text
stringlengths
15
7.82k
ids
sequencelengths
1
7
def METHOD_NAME(signal, action): try: return sun.misc.Signal.handle(signal, action) except RuntimeException, err: raise ValueError(err.getMessage())
[ 372, 900 ]
def METHOD_NAME(request): return request.param
[ 59 ]
def METHOD_NAME(self): """Method to reset all the global and class varibaled""" pass
[ 285, 656 ]
def METHOD_NAME(self): hcl_res = hcl2.loads(""" resource "google_compute_instance" "default" { name = "test" machine_type = "n1-standard-1" zone = "us-central1-a" boot_disk {} metadata = { enable-oslogin = false } } """) resource_conf = hcl_res['resource'][0]['google_compute_instance']['default'] scan_result = check.scan_resource_conf(conf=resource_conf) self.assertEqual(CheckResult.FAILED, scan_result)
[ 9, 374 ]
def METHOD_NAME(client): assert_route( client, "https://legacy.httparchive.org/reports", 301, "https://httparchive.org/reports", )
[ 9, 3378, 3116 ]
def METHOD_NAME(self) -> Optional[Mapping[str, str]]: """ Resource tags. """ return pulumi.get(self, "tags")
[ 114 ]
def METHOD_NAME(msg): if not hasattr(msg, 'correctedMeasurements'): return None num_corr = len(msg.correctedMeasurements) pos_ecef = msg.positionECEF.value pos_geo = [] if len(pos_ecef) > 0: pos_geo = ecef2geodetic(pos_ecef) pos_std = msg.positionECEF.std pos_valid = msg.positionECEF.valid slog(f"{num_corr} {pos_geo} {pos_ecef} {pos_std} {pos_valid}") return pos_geo, (num_corr, pos_geo, list(pos_ecef), list(msg.positionECEF.std))
[ 276, -1 ]
def METHOD_NAME(topology): """Check the parsing of the error log""" # No need to sleep, it's not buffered. error_lines = topology.standalone.ds_error_log.readlines() assert(len(error_lines) > 0) error_lines = topology.standalone.ds_error_log.match('.*started.*') assert(len(error_lines) > 0) assert( topology.standalone.ds_error_log.parse_line('[27/Apr/2016:13:46:35.775670167 +1000] slapd started. Listening on All Interfaces port 54321 for LDAP requests') == # noqa { 'timestamp': '[27/Apr/2016:13:46:35.775670167 +1000]', 'message': 'slapd started. Listening on All Interfaces port 54321 for LDAP requests', 'datetime': datetime.datetime(2016, 4, 27, 13, 0, 0, 775670, tzinfo=tzoffset(None, 36000)) } )
[ 9, 168, 390 ]
def METHOD_NAME(base_length: U256, modulus_length: U256) -> Uint: """ Estimate the complexity of performing a modular exponentiation. Parameters ---------- base_length : Length of the array representing the base integer. modulus_length : Length of the array representing the modulus integer. Returns ------- complexity : `Uint` Complexity of performing the operation. """ max_length = max(Uint(base_length), Uint(modulus_length)) words = (max_length + 7) // 8 return words**2
[ 5301 ]
def METHOD_NAME(self): self.assertEqual(parse_url("rs://localhost"), (False, 'localhost', 80))
[ 9, 214, 17107 ]
def METHOD_NAME(self, event): RunCommand("g.manual", entry="wxGUI.vnet")
[ 69, 40 ]
def METHOD_NAME( equipment, generic_terms, payment_terms, purpose, space_resource_type, test_unit, empty_resource_form_data ): data = empty_resource_form_data data.update({ 'access_code_type': 'pin6', 'authentication': 'weak', 'equipment': equipment.pk, 'external_reservation_url': 'http://calendar.example.tld', 'generic_terms': generic_terms.pk, 'payment_terms': payment_terms.pk, 'max_period': '02:00:00', 'min_period': '01:00:00', 'slot_size': '00:30:00', 'name_fi': 'Test resource', 'purposes': purpose.pk, 'type': space_resource_type.pk, 'unit': test_unit.pk, 'periods-0-name': 'Kesäkausi', 'periods-0-start': '2018-06-06', 'periods-0-end': '2018-08-01', 'days-periods-0-0-opens': '08:00', 'days-periods-0-0-closes': '12:00', 'days-periods-0-0-weekday': '1', 'price_type': 'hourly' }) return data
[ 1205, 191, 1029, 365 ]
def METHOD_NAME(): return { "@type": "And", "and": [ { "@type": "AddTriple", "graph": "schema", "object": {"@type": "NodeValue", "node": "owl:Class"}, "predicate": {"@type": "NodeValue", "node": "rdf:type"}, "subject": {"@type": "Value", "node": "Station"}, }, { "@type": "AddTriple", "graph": "schema", "object": { "@type": "NodeValue", "node": "terminus:Document", }, "predicate": { "@type": "NodeValue", "node": "rdfs:subClassOf", }, "subject": {"@type": "Value", "node": "Station"}, }, ], }
[ 4284, 529 ]
def METHOD_NAME(self, current=None): return not self.isDisallow(ocp.CHGRPRESTRICTION)
[ 1046, -1 ]
def METHOD_NAME(self): rat = self._create_repo_api_token_obj() data = 'permission=rw' self.logout() self.login_as(self.admin) resp = self.client.put(self.user_url, data, 'application/x-www-form-urlencoded') self.assertEqual(403, resp.status_code)
[ 9, 1276, 466, 604, 2395 ]
def METHOD_NAME(text: str) -> str: text = _remove_commas(text) text = _expand_pounds(text) text = _expand_dollars(text) text = _expand_decimal_point(text) text = _expand_ordinal(text) text = _expand_number(text) return text
[ 1137, 3333 ]
def METHOD_NAME(q, delta, x, i): sq = math.sqrt(q) G = np.array([[-sq * delta[0, 0], - sq * delta[1, 0], 0, sq * delta[0, 0], sq * delta[1, 0]], [delta[1, 0], - delta[0, 0], - q, - delta[1, 0], delta[0, 0]]]) G = G / q nLM = calc_n_lm(x) F1 = np.hstack((np.eye(3), np.zeros((3, 2 * nLM)))) F2 = np.hstack((np.zeros((2, 3)), np.zeros((2, 2 * (i - 1))), np.eye(2), np.zeros((2, 2 * nLM - 2 * i)))) F = np.vstack((F1, F2)) H = G @ F return H
[ 2527, 2567 ]
def METHOD_NAME(samples: List[Tuple[np.ndarray, int]], input_name: str): def transform_fn(data_item): inputs = data_item return {input_name: [inputs]} return Dataset(samples, transform_fn)
[ 19, 126, 43, 9 ]
def METHOD_NAME(repository): """Retrieve production images from the specified repository""" log.info(f'Searching for production images in {repository}...') response = authed_session.get(f'{REGISTRY_BASE}/{repository}/tags/list') response.raise_for_status() response_json = response.json() for tag in response_json['tags']: if is_production_tag(tag): yield f'{repository}:{tag}' # recurse through child repositories for child_repository in response_json['child']: yield from METHOD_NAME(f'{repository}/{child_repository}')
[ 1900, 3669 ]
def METHOD_NAME(): sql_query = "SELECT foo.a, foo.b, bar.c FROM foo JOIN bar ON (foo.a == bar.b);" columns_list = SqlLineageSQLParser(sql_query).get_columns() columns_list.sort() assert columns_list == ["a", "b", "c"]
[ 9, 15656, 1621, 1319, 19, 1951, 41 ]
def METHOD_NAME(self, source): """ :return: latest date of news flash """ latest_date = self.execute( "SELECT max(date) FROM news_flash WHERE source=:source", {"source": source}, ).fetchone()[0] or datetime.datetime(1900, 1, 1, 0, 0, 0) res = timezones.from_db(latest_date) logging.info('Latest time fetched for source {} is {}' .format(source, res)) return res
[ 19, 893, 153, 47, 1458 ]
def METHOD_NAME(self, command_args): super().METHOD_NAME(command_args) self._execute_operations() return self._output()
[ 1519 ]
def METHOD_NAME(self): self.rating_box = RatingBox()
[ 1111 ]
def METHOD_NAME(self) -> Optional['outputs.ServicesResourceResponseIdentity']: """ Setting indicating whether the service has a managed identity associated with it. """ return pulumi.get(self, "identity")
[ 2989 ]
async def METHOD_NAME(pipeline_response): deserialized = self._deserialize('ResourceProviderOperationList', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, AsyncList(list_of_elem)
[ 297, 365 ]
def METHOD_NAME(archive, remove): ''' Verify the egg release file is read from file and written to the archive, even if the file cannot be deleted ''' if six.PY3: open_name = 'builtins.open' else: open_name = '__builtin__.open' remove.side_effect = OSError('test') with patch(open_name, create=True) as mock_open: mock_open.side_effect = [mock.mock_open(read_data='/testvalue').return_value] c = InsightsConfig() d = DataCollector(c) d._write_egg_release() remove.assert_called_once_with(constants.egg_release_file) d.archive.add_metadata_to_archive.assert_called_once_with('/testvalue', '/egg_release')
[ 9, 4429, 586, 171, 203, 61, 6878 ]
def METHOD_NAME() -> None: l1 = o3.Linear("5x0e", "5x0e") l2 = o3.Linear("5x0e", "5x0e + 3x0o") with torch.no_grad(): l1.weight[:] = l2.weight x = torch.randn(3, 5) out1 = l1(x) out2 = l2(x) assert out1.shape == (3, 5) assert out2.shape == (3, 8) assert torch.allclose(out1, out2[:, :5]) assert torch.all(out2[:, 5:] == 0)
[ 9, 97, 1737 ]
def METHOD_NAME(self, geometry: o3d.geometry.Geometry3D): ...
[ 188, 1525 ]
def METHOD_NAME(self): with backend.StatelessScope() as scope: v = backend.Variable( initializer=initializers.RandomNormal(), shape=(2, 2) ) self.assertEqual(v._value, None) v.assign(np.zeros((2, 2))) v.assign_add(2 * np.ones((2, 2))) v.assign_sub(np.ones((2, 2))) out = scope.get_current_value(v) self.assertAllClose(out, np.ones((2, 2)))
[ 9, 2514, 776 ]
def METHOD_NAME(self) -> None: import osqp osqp
[ 512, 2644 ]
def METHOD_NAME(self): self.sites["newmark_disp"] = newmark_displ_from_pga_M(pga=self.pga, critical_accel=self.sites['crit_accel'], M=7.5) nd = np.array([0., 0., 0., 2.19233517, 0., 0., 0., 0., 0., 0.]) np.testing.assert_array_almost_equal(self.sites["newmark_disp"], nd)
[ 9, 7258, 6619 ]
def METHOD_NAME(fast_out_annual): """The plant_parts_eia output table.""" return fast_out_annual.METHOD_NAME().reset_index()
[ 9426, 3646, 16563 ]
def METHOD_NAME(value, escape_html=False): """ Render text as Markdown. """ # Strip HTML tags and render Markdown html = md(strip_tags(value), extensions=["fenced_code", "tables"]) if escape_html: html = escape(html) return mark_safe(html)
[ 108 ]
def METHOD_NAME(): extra_vars = { 'blas_lapack_mt': [False, "Link with multi-threaded BLAS/LAPACK library", CUSTOM], } return ConfigureMake.METHOD_NAME(extra_vars)
[ 1967, 1881 ]
def METHOD_NAME(location: str, subscription_id: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-05-01")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.CognitiveServices/locations/{location}/commitmentTiers", ) # pylint: disable=line-too-long path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), "location": _SERIALIZER.url("location", location, "str"), } _url: str = _format_url_section(_url, **path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
[ 56, 245, 377 ]
def METHOD_NAME(self, t: float) -> tuple[float, float, float]: return lambda p: self.homotopy(*p, t)
[ 559, 1541, 104, 791 ]
def METHOD_NAME( # pylint: disable=keyword-arg-before-vararg kind: Literal["local", "rpc"] = "local", *args, **kwargs, ) -> "Runner": """Create a Runner.""" from . import LocalRunner, RPCRunner # pylint: disable=import-outside-toplevel if kind == "local": if "max_workers" in kwargs: kwargs.pop("max_workers") return LocalRunner(*args, **kwargs) # type: ignore elif kind == "rpc": return RPCRunner(*args, **kwargs) # type: ignore raise ValueError(f"Unknown Runner: {kind}")
[ 129 ]
METHOD_NAME(self, user, user_id, user_name): assert "ID" in user assert "DisplayName" in user id = user["ID"] name = user["DisplayName"] assert isinstance(id, str) assert isinstance(name, str) assert id == user_id assert name == user_name return self
[ 187, 2051 ]
def METHOD_NAME(self): from bodystring.models import PatchAddedModel
[ 9, 1575, 171 ]
def METHOD_NAME(value: str) -> int: """Parse an int only if it is only ASCII digits and ``-``. This disallows ``+``, ``_``, and non-ASCII digits, which are accepted by ``int`` but are not allowed in HTTP header values. Any leading or trailing whitespace is stripped """ value = value.strip() if _plain_int_re.fullmatch(value) is None: raise ValueError return int(value)
[ 5982, 962 ]
def METHOD_NAME(client): print("pinging node...") ping_ok = False for _ in range(10): try: subprocess.check_call(["ping", "-q", "-c1", "-w1", client]) ping_ok = True break except subprocess.CalledProcessError: pass if not ping_ok: print("pinging node failed. aborting test.") sys.exit(1) else: print("pinging node succeeded.") return ping_ok
[ 7160 ]
def METHOD_NAME(self, index): """Return an example's size as a float or tuple. This value is used when filtering a dataset with ``--max-positions``.""" if len(self.sizes) == 1: return self.sizes[0][index] else: return (s[index] for s in self.sizes)
[ 1318 ]
def METHOD_NAME(self, param): return "[" + "|".join(self.levels) + "]"
[ 19, -1 ]
def METHOD_NAME(salt_sub_minion): return [ ("/", saltnado.SaltAPIHandler), ]
[ 991, 2248 ]
def METHOD_NAME(compressed=True, wif=False): """Generate a new random keypair and return the corresponding ECKey / bytes objects. The private key can also be provided as WIF (wallet import format) string instead, which is often useful for wallet RPC interaction.""" privkey = ECKey() privkey.generate(compressed) pubkey = privkey.get_pubkey().get_bytes() if wif: privkey = bytes_to_wif(privkey.get_bytes(), compressed) return privkey, pubkey
[ 567, 9954 ]
def METHOD_NAME(cls, response: requests.models.Response) -> str: error_msg = "" try: error_msg = response.json()["message"] except (json.JSONDecodeError, KeyError): error_msg = response.text if response.text else response.reason return error_msg
[ 19, 168, 169, 280, 17 ]
def METHOD_NAME(executor_reference, work_queue): try: while True: work_item = work_queue.get(block=True)[-1] if work_item is not None: work_item.run() del work_item continue executor = executor_reference() if _SHUTDOWN or executor is None or executor._shutdown: work_queue.put(NULL_ENTRY) return None del executor except BaseException: _base.LOGGER.critical("Exception in worker", exc_info=True)
[ 1794 ]
def METHOD_NAME(self) -> Optional[int]: """Get the heartbeat interval if it is set. Returns: The heartbeat interval if it is set, otherwise :obj:`None`. """ raise NotImplementedError()
[ 19, 1797, 3223 ]
def METHOD_NAME(connector, remove_secrets): """ It should fail if no secrets are provided """ assert connector.get_status().status is False
[ 9, 19, 452, 654, 2161 ]
def METHOD_NAME(click_cb): button = QPushButton() button.clicked.connect(partial(click_cb, button)) button.setFixedWidth(int(app_state.app.dpi / 3.6)) return button
[ -1, 1974 ]
def METHOD_NAME(self): node = self.nodes[0] node.add_p2p_connection(P2PInterface()) # Mine one period worth of blocks node.generate(VB_PERIOD) self.log.info("Check that there is no warning if previous VB_BLOCKS have <VB_THRESHOLD blocks with unknown versionbits version.") # Build one period of blocks with < VB_THRESHOLD blocks signaling some unknown bit self.send_blocks_with_version(node.p2p, VB_THRESHOLD - 1, VB_UNKNOWN_VERSION) node.generate(VB_PERIOD - VB_THRESHOLD + 1) # Check that we're not getting any versionbit-related errors in get*info() assert(not VB_PATTERN.match(node.getmininginfo()["warnings"])) assert(not VB_PATTERN.match(node.getnetworkinfo()["warnings"])) self.log.info("Check that there is a warning if >50 blocks in the last 100 were an unknown version") # Build one period of blocks with VB_THRESHOLD blocks signaling some unknown bit self.send_blocks_with_version(node.p2p, VB_THRESHOLD, VB_UNKNOWN_VERSION) node.generate(VB_PERIOD - VB_THRESHOLD) # Check that get*info() shows the 51/100 unknown block version error. assert(WARN_UNKNOWN_RULES_MINED in node.getmininginfo()["warnings"]) assert(WARN_UNKNOWN_RULES_MINED in node.getnetworkinfo()["warnings"]) self.log.info("Check that there is a warning if previous VB_BLOCKS have >=VB_THRESHOLD blocks with unknown versionbits version.") # Mine a period worth of expected blocks so the generic block-version warning # is cleared. This will move the versionbit state to ACTIVE. node.generate(VB_PERIOD) # Stop-start the node. This is required because bitcoind will only warn once about unknown versions or unknown rules activating. self.restart_node(0) # Generating one block guarantees that we'll get out of IBD node.generate(1) wait_until(lambda: not node.getblockchaininfo()['initialblockdownload'], timeout=10, lock=mininode_lock) # Generating one more block will be enough to generate an error. node.generate(1) # Check that get*info() shows the versionbits unknown rules warning assert(WARN_UNKNOWN_RULES_ACTIVE in node.getmininginfo()["warnings"]) assert(WARN_UNKNOWN_RULES_ACTIVE in node.getnetworkinfo()["warnings"]) # Check that the alert file shows the versionbits unknown rules warning wait_until(lambda: self.versionbits_in_alert_file(), timeout=60)
[ 22, 9 ]
def METHOD_NAME(self, new_dir, fake_elf): fake_elf("fake_elf-2.23") elf_files = elf_utils.get_elf_files_from_list(new_dir, {"fake_elf-2.23"}) elf_file = elf_files.pop() assert elf_file.execstack_set is False
[ 9, 2892, 529, 14339 ]
def METHOD_NAME(s): """Return true if the pathname refers to a symbolic link.""" try: import Carbon.File return Carbon.File.ResolveAliasFile(s, 0)[2] except: return False
[ 9946 ]
def METHOD_NAME(cls): if cls._schema_on_200 is not None: return cls._schema_on_200 cls._schema_on_200 = AAZObjectType() _schema_on_200 = cls._schema_on_200 _schema_on_200.id = AAZStrType( flags={"read_only": True}, ) _schema_on_200.name = AAZStrType( flags={"read_only": True}, ) _schema_on_200.properties = AAZObjectType( flags={"client_flatten": True}, ) _schema_on_200.type = AAZStrType( flags={"read_only": True}, ) properties = cls._schema_on_200.properties properties.created_at = AAZStrType( serialized_name="createdAt", flags={"read_only": True}, ) properties.listener_count = AAZIntType( serialized_name="listenerCount", flags={"read_only": True}, ) properties.requires_client_authorization = AAZBoolType( serialized_name="requiresClientAuthorization", ) properties.updated_at = AAZStrType( serialized_name="updatedAt", flags={"read_only": True}, ) properties.user_metadata = AAZStrType( serialized_name="userMetadata", ) return cls._schema_on_200
[ 56, 135, 69, 1072 ]
def METHOD_NAME( table_name, table_signature, table_capacity=1000, min_size_limiter_size=1 ): """Creates a uniform table with default parameters. Args: table_name: string name of the uniform sampling table table_signature: Spec for the data the table will hold. table_capacity: capacity of the replay table in number of items. min_size_limiter_size: Minimum number of items required in the RB before sampling can begin. Returns: an instance of uniform sampling table. """ rate_limiter = reverb.rate_limiters.MinSize(min_size_limiter_size) uniform_table = reverb.Table( table_name, max_size=table_capacity, sampler=reverb.selectors.Uniform(), remover=reverb.selectors.Fifo(), rate_limiter=rate_limiter, signature=table_signature, ) return uniform_table
[ 129, 4431, 410 ]
def METHOD_NAME(cls, exception): if not cls.is_class(): return False for python_cls in exception.mro(): if cls.py__name__() == python_cls.__name__ \ and cls.parent_context.is_builtins_module(): return True return False
[ 250, 590 ]
def METHOD_NAME(cmd, # pylint: disable=too-many-locals client, registry_name, source_image, source_registry=None, source_registry_username=None, source_registry_password=None, target_tags=None, resource_group_name=None, repository=None, force=False, no_wait=False): if source_registry_username and not source_registry_password: raise CLIError(CREDENTIALS_INVALID) _, resource_group_name = validate_managed_registry( cmd, registry_name, resource_group_name, IMPORT_NOT_SUPPORTED) ImportImageParameters, ImportSource, ImportMode = cmd.get_models( 'ImportImageParameters', 'ImportSource', 'ImportMode') registry = None if source_registry: if is_valid_resource_id(source_registry): source = ImportSource(resource_id=source_registry, source_image=source_image) else: registry = get_registry_from_name_or_login_server(cmd.cli_ctx, source_registry, source_registry) if registry: # trim away redundant login server name, a common error prefix = registry.login_server + '/' if source_image.lower().startswith(prefix.lower()): warning = ('The login server name of "%s" in the "--source" argument will be ignored as ' '"--registry" already supplies the same information') logger.warning(warning, prefix[:-1]) source_image = source_image[len(prefix):] # For Azure container registry source = ImportSource(resource_id=registry.id, source_image=source_image) else: # For non-Azure container registry raise CLIError(SOURCE_REGISTRY_NOT_FOUND) else: registry_uri, source_image = _split_registry_and_image(source_image) if source_registry_password: ImportSourceCredentials = cmd.get_models('ImportSourceCredentials') source = ImportSource(registry_uri=registry_uri, source_image=source_image, credentials=ImportSourceCredentials(password=source_registry_password, username=source_registry_username)) else: registry = get_registry_from_name_or_login_server(cmd.cli_ctx, registry_uri) if registry: # For Azure container registry source = ImportSource(resource_id=registry.id, source_image=source_image) else: # For non-Azure container registry source = ImportSource(registry_uri=registry_uri, source_image=source_image) if not target_tags and not repository: index = source_image.find("@") if index > 0: target_tags = [source_image[:index]] else: target_tags = [source_image] import_parameters = ImportImageParameters(source=source, target_tags=target_tags, untagged_target_repositories=repository, mode=ImportMode.force.value if force else ImportMode.no_force.value) try: if no_wait: logger.warning('Import has started. Due to no-wait option, failures will not be reflected.') return sdk_no_wait(no_wait, client.begin_import_image, resource_group_name, registry_name, import_parameters) result_poller = client.begin_import_image( resource_group_name=resource_group_name, registry_name=registry_name, parameters=import_parameters) return LongRunningOperation(cmd.cli_ctx, 'Importing image...')(result_poller) except CLIError as e: _handle_import_exception(e, cmd, source_registry, source_image, registry)
[ 2299, 512 ]
def METHOD_NAME(name, get1=get1): """Internal function for Bastion(). See source comments.""" return get1(name)
[ 8119 ]
def METHOD_NAME(self, rtype, name, content): # Check whether the record already exists with the same rtype, name & content. # If so, claim to have added the record, but dont't do anything. records = self.list_records(rtype, name, content) if records: LOGGER.debug( "create_record: (ignored, duplicate record): %s", records[0]["id"] ) return True # Make sure TXT records are wrapped in quotes if content: content = self._add_quotes(rtype, content) data = {"name": self._full_name(name), "type": rtype, "content": content} self._post(f"/zones/{self.domain}/records", data) LOGGER.debug("create_record: %s", True) return True
[ 129, 148 ]
def METHOD_NAME(self): parameters = { **self.serialize_url_param( "l2IsolationDomainName", self.ctx.args.resource_name, required=True, ), **self.serialize_url_param( "resourceGroupName", self.ctx.args.resource_group, required=True, ), **self.serialize_url_param( "subscriptionId", self.ctx.subscription_id, required=True, ), } return parameters
[ 274, 386 ]
def METHOD_NAME(self, chip_counter): # Skip here, and do later! This is a special case... pass
[ 129, 1600, 2128 ]
def METHOD_NAME( path: str, token: str, provider: S3Provider, ): if provider is None: return None api = import_indra_api() creds_used = provider.creds_used if creds_used == "PLATFORM": provider._check_update_creds() return api.dataset( path, origin_path=provider.root, token=token, aws_access_key_id=provider.aws_access_key_id, aws_secret_access_key=provider.aws_secret_access_key, aws_session_token=provider.aws_session_token, region_name=provider.aws_region, endpoint_url=provider.endpoint_url, expiration=str(provider.expiration), ) elif creds_used == "ENV": return api.dataset( path, origin_path=provider.root, token=token, profile_name=provider.profile_name, ) elif creds_used == "DICT": return api.dataset( path, origin_path=provider.root, token=token, aws_access_key_id=provider.aws_access_key_id, aws_secret_access_key=provider.aws_secret_access_key, aws_session_token=provider.aws_session_token, region_name=provider.aws_region, endpoint_url=provider.endpoint_url, )
[ 19, -1, 2491, 280, 607, 2275 ]
def METHOD_NAME(self): if self.model is not None: self.model.to('meta')
[ 5473, 578 ]
async def METHOD_NAME( request: GolangciLintRequest.Batch[GolangciLintFieldSet, Any], golangci_lint: GolangciLint, goroot: GoRoot, bash: BashBinary, platform: Platform, golang_subsystem: GolangSubsystem, golang_env_aware: GolangSubsystem.EnvironmentAware, ) -> LintResult: transitive_targets = await Get( TransitiveTargets, TransitiveTargetsRequest((field_set.address for field_set in request.elements)), ) all_source_files_request = Get( SourceFiles, SourceFilesRequest( tgt[SourcesField] for tgt in transitive_targets.closure if tgt.has_field(SourcesField) ), ) target_source_files_request = Get( SourceFiles, SourceFilesRequest(field_set.sources for field_set in request.elements), ) downloaded_golangci_lint_request = Get( DownloadedExternalTool, ExternalToolRequest, golangci_lint.get_request(platform), ) config_files_request = Get(ConfigFiles, ConfigFilesRequest, golangci_lint.config_request()) ( target_source_files, all_source_files, downloaded_golangci_lint, config_files, ) = await MultiGet( target_source_files_request, all_source_files_request, downloaded_golangci_lint_request, config_files_request, ) owning_go_mods = await MultiGet( Get(OwningGoMod, OwningGoModRequest(field_set.address)) for field_set in request.elements ) owning_go_mod_addresses = {x.address for x in owning_go_mods} go_mod_infos = await MultiGet( Get(GoModInfo, GoModInfoRequest(address)) for address in owning_go_mod_addresses ) go_build_opts = await MultiGet( Get(GoBuildOptions, GoBuildOptionsFromTargetRequest(address)) for address in owning_go_mod_addresses ) cgo_enabled = any(build_opts.cgo_enabled for build_opts in go_build_opts) # If cgo is enabled, golangci-lint needs to be able to locate the # associated tools in its environment. This is injected in $PATH in the # wrapper script. tool_search_path = ":".join( ["${GOROOT}/bin", *(golang_env_aware.cgo_tool_search_paths if cgo_enabled else ())] ) # golangci-lint requires a absolute path to a cache golangci_lint_run_script = FileContent( "__run_golangci_lint.sh", textwrap.dedent( f"""\ export GOROOT={goroot.path} sandbox_root="$(/bin/pwd)" export PATH="{tool_search_path}" export GOPATH="${{sandbox_root}}/gopath" export GOCACHE="${{sandbox_root}}/gocache" export GOLANGCI_LINT_CACHE="$GOCACHE" export CGO_ENABLED={1 if cgo_enabled else 0} /bin/mkdir -p "$GOPATH" "$GOCACHE" exec "$@" """ ).encode("utf-8"), ) golangci_lint_run_script_digest = await Get(Digest, CreateDigest([golangci_lint_run_script])) input_digest = await Get( Digest, MergeDigests( [ golangci_lint_run_script_digest, downloaded_golangci_lint.digest, config_files.snapshot.digest, target_source_files.snapshot.digest, all_source_files.snapshot.digest, *(info.digest for info in set(go_mod_infos)), ] ), ) argv = [ bash.path, golangci_lint_run_script.path, downloaded_golangci_lint.exe, "run", # keep golangci-lint from complaining # about concurrent runs "--allow-parallel-runners", ] if golangci_lint.config: argv.append(f"--config={golangci_lint.config}") elif config_files.snapshot.files: argv.append(f"--config={config_files.snapshot.files[0]}") else: argv.append("--no-config") argv.extend(golangci_lint.args) process_result = await Get( FallibleProcessResult, Process( argv=argv, input_digest=input_digest, description="Run `golangci-lint`.", level=LogLevel.DEBUG, ), ) return LintResult.create(request, process_result)
[ 22, -1, 3060 ]
def METHOD_NAME(path): """Test whether a path exists. Returns True for broken symbolic links""" try: st = os.lstat(path) except OSError: return False return True
[ 6134 ]
f METHOD_NAME(self):
[ 1658 ]
def METHOD_NAME(self, tasklist, root): """Populate a list of all the subtasks and their children, recursively. Also collect the list of affected tags which should be refreshed""" if root not in tasklist: tasklist.append(root) [self.update_tags.append(tagname) for tagname in root.get_tags_name() if tagname not in self.update_tags] [self.METHOD_NAME(tasklist, i) for i in root.get_subtasks() if i not in tasklist]
[ 2203, 245, 620 ]
def METHOD_NAME(): dataLocation = get_tests_data_folder() if not os.path.exists(dataLocation): print("No Opengate test data available in: " + dataLocation) print("I download it for you.") download_tests_data(dataLocation) print("") print("Done") else: # Check if the commit is correct if file HEAD is present if os.path.isfile(os.path.join(dataLocation, "..", "HEAD")): f = open(os.path.join(dataLocation, "..", "HEAD"), "r") checkoutReferenceDataGit = str(f.readline()).strip() if os.path.isfile(os.path.join(dataLocation, "sha.log")): f = open(os.path.join(dataLocation, "sha.log"), "r") checkoutRealDataGit = str(f.readline()).strip() if not checkoutReferenceDataGit == checkoutRealDataGit: shutil.rmtree(dataLocation) print("No correct Opengate test data version in: " + dataLocation) print("I update it for you.") download_tests_data(dataLocation) print("") print("Done") else: shutil.rmtree(dataLocation) print("No Opengate test data available in: " + dataLocation) print("I download it for you.") download_tests_data(dataLocation) print("") print("Done") # Check if the size of one .raw file is correct to detect lfs if "ct_4mm.raw" in os.listdir(dataLocation): filesize = os.stat(os.path.join(dataLocation, "ct_4mm.raw")).st_size if filesize < 4000000: print( "It seems the test data in: " + dataLocation + " do not have the correct size" ) print("Maybe you do not have git-lfs. Execute this:") print("Install git-lfs from https://git-lfs.com/") print("cd " + dataLocation) print("git-lfs pull") return False else: # if the file is not present print( colored.stylize( "The data are not present in: " + dataLocation, color_error, ) ) print("Download them with:") print("git submodule update --init --recursive") return False return True
[ 250, 450, 365, 451 ]
def METHOD_NAME(api_client, descriptive_property, unit, user): url = ( reverse("unit-detail", kwargs={"pk": unit.pk}) + "?include=observable_properties" ) response = api_client.get(url) observable_properties = response.data["observable_properties"] assert len(observable_properties) > 0 assert Observation.objects.count() == 0 authenticate_user(api_client, user) count = 0 for prop in observable_properties: url = reverse("observation-list") current_time = timezone.now() # Test default language raw_data = dict(unit=unit.pk, value="test string", property=prop["id"]) response = api_client.post(url, raw_data, format="json") assert response.status_code == 201 count += 1 data = response.data observation_time = datetime.strptime(data["time"], "%Y-%m-%dT%H:%M:%S.%f%z") assert observation_time - current_time < timedelta(seconds=1) assert data["value"]["fi"] == raw_data["value"] assert data["property"] == raw_data["property"] assert data["unit"] == raw_data["unit"] current_time = timezone.now() # Test all + 1 languages raw_data = dict( unit=unit.pk, value={"fi": "test string", "en": "test string 2", "sv": "test string 3"}, property=prop["id"], ) response = api_client.post(url, raw_data, format="json") assert response.status_code == 201 count += 1 data = response.data observation_time = datetime.strptime(data["time"], "%Y-%m-%dT%H:%M:%S.%f%z") assert observation_time - current_time < timedelta(seconds=1) assert data["value"] == raw_data["value"] assert data["property"] == raw_data["property"] assert data["unit"] == raw_data["unit"] assert Observation.objects.count() == count
[ 9, 129, 7366, 476 ]
def METHOD_NAME(self, page): extr = text.extract_from(page) manga = text.unescape(extr('<h1 class="title">', '</h1>')).strip() author = extr('<b>Author</b>: ', '<br') artist = extr('<b>Artist</b>: ', '<br') results = [] while True: url = extr('<div class="title"><a href="', '"') if not url: return results results.append((url, self.parse_chapter_url(url, { "manga": manga, "author": author, "artist": artist, "chapter_string": extr('title="', '"'), "group" : extr('title="', '"'), })))
[ 15186 ]
def METHOD_NAME(self): return self.objects.filter(id=self.root_id).first()
[ 1563 ]
def METHOD_NAME(var): return _get_config().get_option(var)
[ 19 ]
def METHOD_NAME(client, name_contains): return client.METHOD_NAME(NameContains=name_contains)
[ 245, 1197 ]
def METHOD_NAME(self, dl_manager): downloaded_dir = dl_manager.download_and_extract(self.config.data_url) data_dir = os.path.join(downloaded_dir, self.config.data_dir) train_split = datasets.SplitGenerator( name=datasets.Split.TRAIN, gen_kwargs={"filepath": os.path.join(data_dir, "train.tsv"), "split": "train"} ) test_split = datasets.SplitGenerator( name=datasets.Split.TEST, gen_kwargs={"filepath": os.path.join(data_dir, "test.tsv"), "split": "test"} ) return [train_split, test_split]
[ 265, 942 ]
def METHOD_NAME(url, size=0, rank=0, to_path=None, file_pname=None): """ url: file url file_pname: file save name chunk_size: chunk size resume_download: download from last chunk """ try: requests.get(url, stream=True, verify=True) except Exception: raise ValueError('please check the download file names') total_size = size if to_path is None: to_path = './checkpoints/' if file_pname is None: file_path = os.path.join(to_path, url.split('/')[-1]) else: file_path = os.path.join(to_path, file_pname) if (is_bmt == 1 and bmt.init.is_initialized() and bmt.rank() == 0) or (torch.distributed.is_initialized() and torch.distributed.get_rank() == 0) or (((is_bmt == 1 and not bmt.init.is_initialized()) or is_bmt == 0 ) and not torch.distributed.is_initialized()): if not os.path.exists(to_path): os.makedirs(to_path) if os.path.exists(file_path): resume_size = os.path.getsize(file_path) else: resume_size = 0 if resume_size == total_size: return headers = {'Range': 'bytes=%d-' % resume_size} res = requests.get(url, stream=True, verify=True, headers=headers) progress = tqdm( unit="B", unit_scale=True, unit_divisor=1024, total=total_size, initial=resume_size, desc="Downloading", ) while 1: with open(file_path, "ab") as f: for chunk in res.iter_content(chunk_size=1024 * 1024): if chunk: f.write(chunk) progress.update(len(chunk)) f.flush() resume_size = os.path.getsize(file_path) if resume_size >= total_size: print('-----model dowloaded in ', os.getcwd() + to_path[1:]) break else: headers = {'Range': 'bytes=%d-' % resume_size} res = requests.get(url, stream=True, verify=True, headers=headers) else: while not os.path.exists( file_path) or total_size != os.path.getsize(file_path): sleep(1)
[ 136, 280, 274 ]
def METHOD_NAME(bridge, sock, proto_ver): global connect_packet, connack_packet if not mosq_test.expect_packet(bridge, "connect", connect_packet): return 1 bridge.send(connack_packet) if proto_ver == 5: opts = mqtt5_opts.MQTT_SUB_OPT_NO_LOCAL | mqtt5_opts.MQTT_SUB_OPT_RETAIN_AS_PUBLISHED else: opts = 0 mid = 0 patterns = [ "remote/topic/#", "remote2/topic/prefix/#", "remote3/topic/+/value", "remote4/tipic/+", "$SYS/broker/clients/total", ] for pattern in ("remote/topic/#", "remote2/topic/prefix/#", "remote3/topic/+/value"): mid += 1 subscribe_packet = mosq_test.gen_subscribe(mid, pattern, 0 | opts, proto_ver=proto_ver) suback_packet = mosq_test.gen_suback(mid, 0, proto_ver=proto_ver) if not mosq_test.expect_packet(bridge, "subscribe", subscribe_packet): return 1 bridge.send(suback_packet) mid += 1 subscribe_packet = mosq_test.gen_subscribe(mid, "#", 0 | opts, proto_ver=proto_ver) suback_packet = mosq_test.gen_suback(mid, 0, proto_ver=proto_ver) sock.send(subscribe_packet) if not mosq_test.expect_packet(sock, "suback", suback_packet): return 1 cases = [ ('local/topic/something', 'remote/topic/something'), ('local/topic/some/t/h/i/n/g', 'remote/topic/some/t/h/i/n/g'), ('local/topic/value', 'remote/topic/value'), # Don't work, #40 must be fixed before # ('local/topic', 'remote/topic'), ('local2/topic/prefix/something', 'remote2/topic/prefix/something'), ('local3/topic/something/value', 'remote3/topic/something/value'), ('local4/topic/something', 'remote4/tipic/something'), ('test/mosquitto/orgclients/total', '$SYS/broker/clients/total'), ] for (local_topic, remote_topic) in cases: mid += 1 remote_publish_packet = mosq_test.gen_publish( remote_topic, qos=0, mid=mid, payload='', proto_ver=proto_ver) local_publish_packet = mosq_test.gen_publish( local_topic, qos=0, mid=mid, payload='', proto_ver=proto_ver) bridge.send(remote_publish_packet) match = mosq_test.expect_packet(sock, "publish", local_publish_packet) if not match: print("Fail on cases local_topic=%r, remote_topic=%r" % ( local_topic, remote_topic, )) return 1 return 0
[ 921, 9 ]
f METHOD_NAME(self):
[ 1318 ]
def METHOD_NAME(self): not_on_or_after = str_to_time(in_a_while(days=1)) session_info = SESSION_INFO_PATTERN.copy() session_info["ava"] = {"surName": ["Jeter"]} self.cache.set(nid[0], "bcde", session_info, not_on_or_after) (ava, inactive) = self.cache.get_identity(nid[0]) assert inactive == [] assert _eq(ava.keys(), ["givenName", "surName"]) assert ava["givenName"] == ["Derek"] assert ava["surName"] == ["Jeter"]
[ 9, 238, 9931, 100 ]
def METHOD_NAME(self, api_id, resources, error_code=None): expected_params = {'restApiId': api_id} response = {'items': resources} self._stub_bifurcator( 'get_resources', expected_params, response, error_code=error_code)
[ 492, 19, 1614 ]
def METHOD_NAME(s): """Return true if the pathname refers to an existing directory.""" try: st = os.stat(s) except (OSError, ValueError): return False return stat.S_ISDIR(st.st_mode)
[ 5382 ]
def METHOD_NAME(self, prompt: str, max_generated_tokens=100, top_k=50, top_p=0.7, temperature=1): input_ids, prefix_mask, past_key_values = self.prepare_input(prompt) output_tokens = [] while True: inputs = { "input_ids": input_ids, "prefix_mask": prefix_mask, "use_past": np.array(len(output_tokens) > 0), } inputs.update(past_key_values) logits, *past_key_values = self.session.run(output_names, inputs) past_key_values = { k: v for k, v in zip(past_names, past_key_values) } next_token = self.sample_next_token(logits[0, -1], top_k=top_k, top_p=top_p, temperature=temperature) output_tokens += [next_token] if next_token == self.eop_token_id or len(output_tokens) > max_generated_tokens: break input_ids = np.array([[next_token]], dtype=np.longlong) prefix_mask = np.concatenate([prefix_mask, np.array([[0]], dtype=np.longlong)], axis=1) yield process_response(self.tokenizer.decode(output_tokens)) return process_response(self.tokenizer.decode(output_tokens))
[ 567, 3972 ]
def METHOD_NAME(self, msg: Message): receiver_id = msg.get_receiver_id() logging.info("sending message to {}".format(receiver_id)) # Should I wait? tick = time.time() rpc.rpc_sync( WORKER_NAME.format(receiver_id), TRPCCOMMServicer.sendMessage, args=(self.process_id, msg), ) MLOpsProfilerEvent.log_to_wandb({"Comm/send_delay": time.time() - tick}) logging.debug("sent")
[ 353, 277 ]
def METHOD_NAME(self, db: Session, new_password: str) -> None: """Updates the user's password to the specified value. No validations are performed on the old/existing password within this function. """ hashed_password, salt = FidesUser.hash_password(new_password) self.hashed_password = hashed_password # type: ignore self.salt = salt # type: ignore self.password_reset_at = datetime.utcnow() # type: ignore self.save(db)
[ 86, 2897 ]
def METHOD_NAME(self): return [ { "endContainer": "/div[1]/article[1]/section[1]/div[1]/div[2]/div[1]", "endOffset": 76, "startContainer": "/div[1]/article[1]/section[1]/div[1]/div[2]/div[1]", "startOffset": 0, "type": "RangeSelector", }, {"end": 362, "start": 286, "type": "TextPositionSelector"}, { # pylint: disable=line-too-long "exact": "If you wish to install Hypothesis on your own site then head over to GitHub.", "prefix": " browser extension.\n ", "suffix": "\n \n \n \n ", "type": "TextQuoteSelector", }, ]
[ 1030, 2226 ]
def METHOD_NAME(self): self.dev_null.close()
[ 531, 481 ]
def METHOD_NAME(ocrd_tool): ''' Validate OCRD_TOOL as an ocrd-tool.json file. ''' if not ocrd_tool: ocrd_tool = 'ocrd-tool.json' with codecs.open(ocrd_tool, encoding='utf-8') as f: ocrd_tool = loads(f.read()) _inform_of_result(OcrdToolValidator.validate(ocrd_tool))
[ 187, 9528, 3081 ]
def METHOD_NAME(data_dir=DATA_DIR): file_list = os.path.join(data_dir, 'test_list.txt') return _reader_creator(file_list, 'test', shuffle=False, data_dir=data_dir)
[ 9 ]
def METHOD_NAME( release, url, insecure, cacert, verbose, distro, nochecksum, library_only, no_download, extras_only, extra, to, ): if url is None and release is None: # the default release is only used if neither a release or url is given release = idaes.config.default_binary_release if url is not None and release is not None: click.echo("\n* You must provide either a release or url not both.") elif url is not None or release is not None: click.echo("Getting files...") try: d = idaes.commands.util.download_bin.download_binaries( release, url, insecure, cacert, verbose, distro, nochecksum, library_only, no_download, extras_only, extra, alt_path=to, ) click.echo("Done") except idaes.commands.util.download_bin.UnsupportedPlatformError as e: click.echo("") click.echo(e) click.echo("") click.echo("Specify an os with --distro <os>:") return if no_download: for k, i in d.items(): click.echo(f"{k:14}: {i}") else: print_extensions_version(library_only) else: click.echo("\n* You must provide a download URL for IDAES binary files.")
[ 19, 583 ]
def METHOD_NAME(self, text: str = None, content_desc: str = None): """ Android: find view :param text: :param content_desc: :return: """ self.switch_to_app() if text is not None: attribute = "text" _text = text elif content_desc is not None: attribute = "content-desc" _text = content_desc else: raise ValueError("parameter error, setting text/content_desc") for _ in range(3): elem = self.__find(class_name="android.view.View", attribute=attribute, text=_text) if elem is not None: break sleep(1) else: raise ValueError(f"Unable to find -> {text}") return elem
[ 416, 1179 ]
def METHOD_NAME(): assert not IS_PULL_REQUEST, "Cannot release from pull requests"
[ 638, 1046, 586 ]
def METHOD_NAME(): root = test() root.mainloop()
[ 57 ]
def METHOD_NAME(self, retry_policy): """Inherit backoff parameters from another retry policy. :param retry_policy: The retry policy to inherit from. :return: An instance of ExponentialBackoffRetryPolicy with inherited parameters. :raise ValueError: If the strategy of the retry policy is not ExponentialBackoff. """ if retry_policy.strategy_case != "ExponentialBackoff": raise ValueError("Strategy must be exponential backoff") return self._inherit_backoff(retry_policy.exponential_backoff)
[ 8450, 4287 ]
def METHOD_NAME(name: str | tuple[str, str]) -> AbstractContextManager[str]: ...
[ 8909 ]
def METHOD_NAME(self): if self.normal: return 1 if self._start_x < self._end_x else -1 else: return 1 if self._end_x < self._start_x else -1
[ 1104, 1190 ]
def METHOD_NAME(self): addr = self.server.server_address url = 'http://' + support.HOST + ':' + str(addr[1]) robots_url = url + "/robots.txt" parser = robotparser.RobotFileParser() parser.set_url(url) parser.read() self.assertFalse(parser.can_fetch("*", robots_url))
[ 9, 2897, 814, 1055 ]
def METHOD_NAME(self, final=False): """Internal function to add a chunk of data to a started upload""" self.buffer.seek(0) data = self.buffer.getvalue() data_chunks = [ data[start:end] for start, end in self._to_sized_blocks(end=len(data)) ] for data_chunk in data_chunks: self.fs._add_data(handle=self.handle, data=data_chunk) if final: self.fs._close_handle(handle=self.handle) return True
[ 172, 464 ]
def METHOD_NAME(approx_order=1): """Define the problem to solve.""" from sfepy import data_dir filename_mesh = data_dir + '/meshes/3d/block.mesh' options = { 'nls' : 'newton', 'ls' : 'ls', 'post_process_hook' : 'verify_tractions', } functions = { 'linear_tension' : (linear_tension,), } fields = { 'displacement': ('real', 3, 'Omega', approx_order), } materials = { 'solid' : ({'D': stiffness_from_lame(3, lam=5.769, mu=3.846)},), 'load' : (None, 'linear_tension') } variables = { 'u' : ('unknown field', 'displacement', 0), 'v' : ('test field', 'displacement', 'u'), } regions = { 'Omega' : 'all', 'Left' : ('vertices in (x < -4.99)', 'facet'), # Use a parent region to select only facets belonging to cells in the # parent region. Otherwise, each facet is in the region two times, with # opposite normals. 'Middle' : ('vertices in (x > -1e-10) & (x < 1e-10)', 'facet', 'Rhalf'), 'Rhalf' : 'vertices in x > -1e-10', 'Right' : ('vertices in (x > 4.99)', 'facet'), } ebcs = { 'fixb' : ('Left', {'u.all' : 0.0}), 'fixt' : ('Right', {'u.[1,2]' : 0.0}), } integrals = { 'i' : 2 * approx_order, } ## # Balance of forces. equations = { 'elasticity' : """dw_lin_elastic.i.Omega( solid.D, v, u ) = - dw_surface_ltr.i.Right( load.val, v )""", } ## # Solvers etc. solvers = { 'ls' : ('ls.auto_direct', {}), 'newton' : ('nls.newton', { 'i_max' : 1, 'eps_a' : 1e-10, 'eps_r' : 1.0, 'macheps' : 1e-16, # Linear system error < (eps_a * lin_red). 'lin_red' : 1e-2, 'ls_red' : 0.1, 'ls_red_warp' : 0.001, 'ls_on' : 1.1, 'ls_min' : 1e-5, 'check' : 0, 'delta' : 1e-6, }) } return locals()
[ 632 ]
def METHOD_NAME(val: Any, output_format: str = "standard", errors: str = "coarse") -> Any: """ Reformat a number string with proper separators and whitespace. Parameters ---------- val The value of number string. output_format If output_format = 'compact', return string without any separators or whitespace. If output_format = 'standard', return string with proper separators and whitespace. Note: in the case of VAT, the compact format is the same as the standard one. """ val = str(val) result: Any = [] if val in NULL_VALUES: return [np.nan] if not validate_cy_vat(val): if errors == "raise": raise ValueError(f"Unable to parse value {val}") error_result = val if errors == "ignore" else np.nan return [error_result] if output_format in {"compact", "standard"}: result = [vat.compact(val)] + result return result
[ 275 ]
def METHOD_NAME(self): assert is_clockwise(self.triangle_cw)
[ 9, 2147, 6562 ]
def METHOD_NAME(self) -> str: return pulumi.get(self, "name")
[ 156 ]