text
stringlengths
15
7.82k
ids
sequencelengths
1
7
def METHOD_NAME(filename): with _resource_bytesio(filename) as f: with tarfile.open(fileobj=f) as tarf: root, _, _ = filename.partition('.') assert oct(tarf.getmember(root).mode) == '0o755'
[ 9, 1622, 1563, 1813 ]
def METHOD_NAME(pset, inputID, label): setattr(pset, label, cms.PSet(inputTag=cms.InputTag(inputID), provenanceConfigLabel=cms.string(""), idLabel=cms.string("")))
[ 97, 147 ]
f METHOD_NAME(self, file_desc_proto):
[ 238 ]
def METHOD_NAME(sink_type: str, from_: str = "ingestion") -> Type[Sink]: return import_from_module( "metadata.{}.sink.{}.{}Sink".format( from_, get_module_name(sink_type), get_class_name_root(sink_type), ) )
[ 512, 1079, 2 ]
def METHOD_NAME(self, path): path = path.rstrip('/') entries = [] if path == '': for model in ('collection', 'user'): info = paramiko.SFTPAttributes() info.st_size = 0 info.st_mode = 0o777 | stat.S_IFDIR info.filename = model.encode('utf8') entries.append(info) elif path in ('/user', '/collection'): model = path[1:] for doc in ModelImporter.model(model).list(user=self.server.girderUser): entries.append(_stat(doc, model)) else: obj = lookUpPath(path, filter=False, user=self.server.girderUser) return self._list(obj['model'], obj['document']) return entries
[ 245, 451 ]
def METHOD_NAME(self): self.admin.assert_icommand('irmdir', 'STDOUT_SINGLELINE', 'No collection names specified.')
[ 9, 9850, 654, 362 ]
def METHOD_NAME(group, action, enabled=True): wait_action_in_group(group, action) with glib_util.SignalWait(group, f'action-enabled-changed::{action}') as w: while group.get_action_enabled(action) != enabled: w.wait()
[ 618, 1006, 623, 846, 1111 ]
def METHOD_NAME(self): """ Make sure rows can be retrieved where all items in an array match a given value. """ """ 🐛 Cockroach bug: https://github.com/cockroachdb/cockroach/issues/71908 "could not decorrelate subquery" error under asyncpg """ # noqa: E501 MyTable(value=[1, 1, 1]).save().run_sync() self.assertEqual( MyTable.select(MyTable.value) .where(MyTable.value.all(1)) .first() .run_sync(), {"value": [1, 1, 1]}, ) self.assertEqual( MyTable.select(MyTable.value) .where(MyTable.value.all(0)) .first() .run_sync(), None, )
[ 9, 75 ]
def METHOD_NAME(): """Return base_ports""" return BASE_PORTS
[ 19, 414, 907 ]
def METHOD_NAME(testrepo, tmp_path): # pack the repo with the default strategy confirm_same_repo_after_packing(testrepo, tmp_path, None)
[ 9, 522, 1699 ]
def METHOD_NAME(self, name, table): """Given a reference to a `Table`, manage it and allow operations on it to occur through the Manager. If a function for `queue_process` is defined (i.e., by :obj:`~perspective.PerspectiveTornadoHandler`), bind the function to :obj:`~perspective.Table` and have it call the manager's version of `queue_process`. """ if self._loop_callback is not None: # always bind the callback to the table's state manager table._state_manager.queue_process = partial(self._loop_callback, table._state_manager.call_process) self._tables[name] = table return name
[ 1806, 410 ]
def METHOD_NAME( self, repo=None, namespace=None, is_fork=False, **kwargs ) -> "GitlabProject": if is_fork: namespace = self.user.get_username() return GitlabProject(repo=repo, namespace=namespace, service=self, **kwargs)
[ 19, 155 ]
def METHOD_NAME(f, tag, tagregs, tagimms): regs = tagregs[tag] imms = tagimms[tag] numresults = 0 numscalarresults = 0 numscalarreadwrite = 0 for regtype,regid,toss,numregs in regs: if (hex_common.is_written(regid)): numresults += 1 if (hex_common.is_scalar_reg(regtype)): numscalarresults += 1 if (hex_common.is_readwrite(regid)): if (hex_common.is_scalar_reg(regtype)): numscalarreadwrite += 1 if (numscalarresults > 1): ## The helper is bogus when there is more than one result f.write('DEF_HELPER_1(%s, void, env)\n' % tag) else: ## Figure out how many arguments the helper will take if (numscalarresults == 0): def_helper_size = len(regs)+len(imms)+numscalarreadwrite+1 if hex_common.need_part1(tag): def_helper_size += 1 if hex_common.need_slot(tag): def_helper_size += 1 f.write('DEF_HELPER_%s(%s' % (def_helper_size, tag)) ## The return type is void f.write(', void' ) else: def_helper_size = len(regs)+len(imms)+numscalarreadwrite if hex_common.need_part1(tag): def_helper_size += 1 if hex_common.need_slot(tag): def_helper_size += 1 f.write('DEF_HELPER_%s(%s' % (def_helper_size, tag)) ## Generate the qemu DEF_HELPER type for each result ## Iterate over this list twice ## - Emit the scalar result ## - Emit the vector result i=0 for regtype,regid,toss,numregs in regs: if (hex_common.is_written(regid)): if (not hex_common.is_hvx_reg(regtype)): gen_def_helper_opn(f, tag, regtype, regid, toss, numregs, i) i += 1 ## Put the env between the outputs and inputs f.write(', env' ) i += 1 # Second pass for regtype,regid,toss,numregs in regs: if (hex_common.is_written(regid)): if (hex_common.is_hvx_reg(regtype)): gen_def_helper_opn(f, tag, regtype, regid, toss, numregs, i) i += 1 ## Generate the qemu type for each input operand (regs and immediates) for regtype,regid,toss,numregs in regs: if (hex_common.is_read(regid)): if (hex_common.is_hvx_reg(regtype) and hex_common.is_readwrite(regid)): continue gen_def_helper_opn(f, tag, regtype, regid, toss, numregs, i) i += 1 for immlett,bits,immshift in imms: f.write(", s32") ## Add the arguments for the instruction slot and part1 (if needed) if hex_common.need_slot(tag): f.write(', i32' ) if hex_common.need_part1(tag): f.write(' , i32' ) f.write(')\n')
[ 370, 1087, 8298 ]
def METHOD_NAME( self, sort: Union[_NotSetType, str] = ..., direction: Union[_NotSetType, str] = ..., since: Union[_NotSetType, datetime] = ..., ) -> PaginatedList[PullRequestComment]: ...
[ 19, 3528 ]
def METHOD_NAME( self, v ): ''' Wrapper for the setReferencePoint method passing a vector ''' self.setReferencePoint( array( 'f', [v.x(), v.y(), v.z()] ) )
[ 0, 272, 1669, 3203 ]
def METHOD_NAME(**kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2021-01-01")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop("template_url", "/providers/Microsoft.Storage/operations") # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
[ 56, 245, 377 ]
def METHOD_NAME(self, locs): """Compute displacement field at locations. """ (npts, dim) = locs.shape disp = numpy.zeros((1, npts, 2), dtype=numpy.float64) mask = numpy.logical_and(locs[:, 0] > -2.0e+3, locs[:,0] < 0.0) disp[0,:, 1] = +2.0 * mask return disp
[ 6619 ]
def METHOD_NAME(self, split=False): num_nodes = 3 extra_args = [([ "-exportdir={}/export{}".format(self.options.tmpdir, i), '-allowdeprecated=getnewaddress', '-allowdeprecated=z_getnewaddress', ] + (["-walletrequirebackup"] if i == 0 else [])) for i in range(num_nodes)] self.nodes = start_nodes(num_nodes, self.options.tmpdir, extra_args)
[ 102, 1228 ]
def METHOD_NAME(self) -> Optional[str]: """ A custom definition for the USer Definition. """ return pulumi.get(self, "custom_data")
[ 343, 365 ]
def METHOD_NAME(self, *args, **kwargs): ...
[ 11185 ]
def METHOD_NAME(cls): """Returns a list of CBC parameters which can be obtained from the class instance.""" return sorted([m[0].replace('_param', '') for m in inspect.getmembers(cls) if m[0].endswith('_param')])
[ 19, 49, 83 ]
def METHOD_NAME(self): temp_dir = tempfile.TemporaryDirectory() rgb_url = os.path.join(OPENDR_SERVER_URL, 'perception', 'multimodal_human_centric', 'rgbd_hand_gesture_learner', 'test_hand_gesture_rgb.png') depth_url = os.path.join(OPENDR_SERVER_URL, 'perception', 'multimodal_human_centric', 'rgbd_hand_gesture_learner', 'test_hand_gesture_depth.png') # retrieve test files rgb_file = os.path.join(temp_dir.name, 'rgb.png') depth_file = os.path.join(temp_dir.name, 'depth.png') urlretrieve(rgb_url, rgb_file) urlretrieve(depth_url, depth_file) # load test files rgb_img = np.asarray(imageio.imread(rgb_file)) / 255.0 rgb_img = cv2.resize(rgb_img, (224, 224)) depth_img = np.asarray(imageio.imread(depth_file)) / 65535.0 depth_img = cv2.resize(depth_img, (224, 224)) depth_img = np.expand_dims(depth_img, axis=-1) img = np.concatenate([rgb_img, depth_img], axis=-1) # normalize mean = np.asarray([0.485, 0.456, 0.406, 0.0303]).reshape(1, 1, 4) std = np.asarray([0.229, 0.224, 0.225, 0.0353]).reshape(1, 1, 4) img = (img - mean) / std img = Image(img, np.float32) # create learner and download pretrained model learner = RgbdHandGestureLearner(n_class=16, architecture='mobilenet_v2') model_path = os.path.join(temp_dir.name, 'mobilenet_v2') learner.download(model_path) learner.load(model_path) # make inference pred = learner.infer(img) self.assertTrue(isinstance(pred, Category)) self.assertTrue(pred.data == 12, msg="Predicted class label must be 12") self.assertTrue(pred.confidence <= 1, msg="Confidence of prediction must be less or equal than 1") temp_dir.cleanup()
[ 9, 1852 ]
def METHOD_NAME(verts: torch.Tensor, edges: torch.Tensor) -> torch.Tensor: """ Computes the laplacian matrix. The definition of the laplacian is L[i, j] = -1 , if i == j L[i, j] = 1 / deg(i) , if (i, j) is an edge L[i, j] = 0 , otherwise where deg(i) is the degree of the i-th vertex in the graph. Args: verts: tensor of shape (V, 3) containing the vertices of the graph edges: tensor of shape (E, 2) containing the vertex indices of each edge Returns: L: Sparse FloatTensor of shape (V, V) """ V = verts.shape[0] e0, e1 = edges.unbind(1) idx01 = torch.stack([e0, e1], dim=1) # (E, 2) idx10 = torch.stack([e1, e0], dim=1) # (E, 2) idx = torch.cat([idx01, idx10], dim=0).t() # (2, 2*E) # First, we construct the adjacency matrix, # i.e. A[i, j] = 1 if (i,j) is an edge, or # A[e0, e1] = 1 & A[e1, e0] = 1 ones = torch.ones(idx.shape[1], dtype=torch.float32, device=verts.device) A = torch.sparse.FloatTensor(idx, ones, (V, V)) # the sum of i-th row of A gives the degree of the i-th vertex deg = torch.sparse.sum(A, dim=1).to_dense() # We construct the Laplacian matrix by adding the non diagonal values # i.e. L[i, j] = 1 ./ deg(i) if (i, j) is an edge deg0 = deg[e0] # pyre-fixme[58]: `/` is not supported for operand types `float` and `Tensor`. deg0 = torch.where(deg0 > 0.0, 1.0 / deg0, deg0) deg1 = deg[e1] # pyre-fixme[58]: `/` is not supported for operand types `float` and `Tensor`. deg1 = torch.where(deg1 > 0.0, 1.0 / deg1, deg1) val = torch.cat([deg0, deg1]) L = torch.sparse.FloatTensor(idx, val, (V, V)) # Then we add the diagonal values L[i, i] = -1. idx = torch.arange(V, device=verts.device) idx = torch.stack([idx, idx], dim=0) ones = torch.ones(idx.shape[1], dtype=torch.float32, device=verts.device) L -= torch.sparse.FloatTensor(idx, ones, (V, V)) return L
[ 3505 ]
def METHOD_NAME(document, description): assert compose_create_description(document) == description
[ 9, 166, 129, 1067 ]
def METHOD_NAME(max_retries, backoff_factor, retry_codes): """ Returns a `Requests.Session` object for making an HTTP request. :param max_retries: Maximum total number of retries. :param backoff_factor: a time factor for exponential backoff. e.g. value 5 means the HTTP request will be retried with interval 5, 10, 20... seconds. A value of 0 turns off the exponential backoff. :param retry_codes: a list of HTTP response error codes that qualifies for retry. :return: requests.Session object. """ return _cached_get_request_session( max_retries, backoff_factor, retry_codes, _pid=os.getpid(), )
[ 19, 377, 240 ]
def METHOD_NAME(fake_hsm, conn_type, connections): fake_hsm.connectStorageServer(conn_type, 'SPUID', connections) sc = storageServer.ConnectionFactory.connections for con in connections: assert sc[con["id"]].connected
[ 9, 707 ]
def METHOD_NAME(path, msg=None): """Assert that path exists but is not a symlink. """ path = _strpath(path) st = _stat_for_assert(path, False, msg) if stat.S_ISLNK(st.st_mode): if msg is None: msg = "Path is a symlink: %r" % path raise AssertionError(msg)
[ 638, 130, 9946 ]
def METHOD_NAME(cls, init_data: Dict) -> 'ClusterScopedPermCtx': return cls( username=init_data['username'], force_raise=init_data.get('force_raise', False), project_id=init_data['project_id'], cluster_id=init_data['cluster_id'], )
[ 280, 553 ]
async def METHOD_NAME(self): mgr = InboundTransportManager(self.profile, None) test_wire_format = async_mock.MagicMock() session = await mgr.create_session("http", wire_format=test_wire_format) test_outbound = OutboundMessage(payload=None) test_outbound.reply_session_id = session.session_id with async_mock.patch.object( session, "accept_response", return_value=True ) as mock_accept: assert mgr.return_to_session(test_outbound) is True mock_accept.assert_called_once_with(test_outbound) test_outbound = OutboundMessage(payload=None) test_outbound.reply_session_id = None with async_mock.patch.object( session, "accept_response", return_value=False ) as mock_accept: assert mgr.return_to_session(test_outbound) is False mock_accept.assert_called_once_with(test_outbound) with async_mock.patch.object( session, "accept_response", return_value=True ) as mock_accept: assert mgr.return_to_session(test_outbound) is True mock_accept.assert_called_once_with(test_outbound)
[ 9, 1413, 24, 240 ]
def METHOD_NAME(self): self.worker.poll(task_list='some_other_tasklist') self.worker._swf.poll_for_activity_task.assert_called_with('test', 'some_other_tasklist')
[ 9, 1794, 345, -1 ]
def METHOD_NAME(event): self.press = None ax.figure.canvas.draw()
[ 69, 586 ]
def METHOD_NAME(df: pd.DataFrame, filename: str, output_path: str, output_format: Optional[str] = None, index: bool =False): _, file_extension = os.path.splitext(filename) if output_format is None: if not file_extension: default_format = 'txt' default_extension = '.{}'.format(default_format) separator = get_separator(default_extension) filename = '{}{}'.format(filename, default_extension) else: separator = get_separator(file_extension) else: selected_extension = '.{}'.format(output_format) if file_extension != selected_extension: separator = get_separator(selected_extension) filename = '{}{}'.format(filename, selected_extension) if file_extension: logger.warning( 'Selected extension missmatches output filename ({}, {}): It will be added => {}'.format( selected_extension, file_extension, filename)) else: separator = get_separator(selected_extension) df.to_csv('{}/{}'.format(output_path, filename), sep=separator, index=index)
[ 77, 24, 171 ]
def METHOD_NAME(self): DatasetCatalog.register("test_dataset", lambda: TestConvertCOCO.generate_data()) MetadataCatalog.get("test_dataset").set(thing_classes=["test_label"]) convert_to_coco_dict("test_dataset") DatasetCatalog.pop("test_dataset") MetadataCatalog.pop("test_dataset")
[ 9, 197, 24, 777 ]
def METHOD_NAME( dependencies: List[DependencyInfo], ): dependencies_to_install = get_deps_to_install(dependencies) if len(dependencies_to_install) == 0: return exit_code = subprocess.check_call( [ python_path, "-m", "pip", "install", *[pin(dep_info) for dep_info in dependencies_to_install], "--disable-pip-version-check", "--no-warn-script-location", ] ) if exit_code != 0: raise ValueError("An error occurred while installing dependencies.") for dep_info in dependencies_to_install: package_name = dep_info["package_name"] version = dep_info["version"] installed_packages[package_name] = version
[ 428, 2410, 164 ]
def METHOD_NAME(): pattern = some.dict.containing( { "dict": some.dict.containing({"int": some.int.in_range(100, 200)}), "list": [None, ~some.error, some.number | some.str], } ) log_repr(pattern) assert pattern == { "list": [None, False, 123], "bool": True, "dict": {"int": 123, "str": "abc"}, }
[ 9, 2203 ]
def METHOD_NAME(val_volt): keysight_sim = Keysight_34465A( "keysight_34465A_sim", address="GPIB::1::INSTR", pyvisa_sim_file="Keysight_34465A.yaml", ) def get_ask_with_read_mock(original_ask, read_value): def ask_with_read_mock(cmd: str) -> str: if cmd in ("READ?", "FETCH?"): return read_value else: return original_ask(cmd) return ask_with_read_mock keysight_sim.ask = get_ask_with_read_mock(keysight_sim.ask, val_volt) try: yield keysight_sim finally: Keysight_34465A.close_all()
[ 1988, 41, 203, 61, 1047, 4331 ]
def METHOD_NAME(self, tracker): self._decoded_torrent["announce"] = tracker
[ 0, 4102 ]
def METHOD_NAME(self, pa_table: pa.Table) -> Mapping: row = self.numpy_arrow_extractor().extract_row(pa_table) row = self.python_features_decoder.decode_row(row) return self.recursive_tensorize(row)
[ 275, 843 ]
def METHOD_NAME(self): # activate gameover in game state self.game_state['ctx']['gameover'] = ['0'] # call on_sync with bot game id self.sut.on_sync(self.botmock.game_id, self.game_state) self.botmock.gameover.assert_called_once_with(self.game_state['G'], self.game_state['ctx']) self.sut.emit.assert_not_called()
[ 9, 69, 164, 14102, 128, 14103, 217 ]
def METHOD_NAME(block, top=1): ret = [] if hascommon(block): for key, value in block['common'].items(): vars_ = {v: block['vars'][v] for v in value} ret.append((key, value, vars_)) elif hasbody(block): for b in block['body']: ret = ret + METHOD_NAME(b, 0) if top: tret = [] names = [] for t in ret: if t[0] not in names: names.append(t[0]) tret.append(t) return tret return ret
[ -1 ]
def METHOD_NAME(self, id): for p in self._properties: if id == p['id']: return 1 return 0
[ 220, 1042 ]
def METHOD_NAME(annotated_utterance): found_topics = get_topics(annotated_utterance, probs=False, which="all") if any([food_topic in found_topics for food_topic in TOPIC_GROUPS["food"]]): return True elif re.findall(FOOD_COMPILED_PATTERN, annotated_utterance["text"]): return True else: return False
[ 2296, 12002 ]
def METHOD_NAME(self, field_name, model_field): field_class, field_kwargs = super().METHOD_NAME( field_name, model_field ) if isinstance(model_field, LocalizedField): lang = translation.get_language() allow_blank = model_field.blank or lang not in model_field.required field_kwargs["allow_blank"] = allow_blank return field_class, field_kwargs
[ 56, 2356, 101 ]
def METHOD_NAME(value, weight): return tf.nest.map_structure(lambda x: x * weight, value)
[ 1998, 1755 ]
def METHOD_NAME(self, type): """ get the ordering """ result = [] for child, ancestry in type.resolve(): name = child.name if child.name is None: continue if child.isattr(): name = '_%s' % child.name result.append(name) return result
[ 887 ]
def METHOD_NAME(self, imgId, catId): p = self.params if p.useCats: gt = self._gts[imgId, catId] dt = self._dts[imgId, catId] else: gt = [_ for cId in p.catIds for _ in self._gts[imgId, cId]] dt = [_ for cId in p.catIds for _ in self._dts[imgId, cId]] if len(gt) == 0 and len(dt) == 0: return [] inds = np.argsort([-d["score"] for d in dt], kind="mergesort") dt = [dt[i] for i in inds] if len(dt) > p.maxDets[-1]: dt = dt[0 : p.maxDets[-1]] assert p.iouType == "bbox", "unsupported iouType for iou computation" g = [g["bbox"] for g in gt] d = [d["bbox"] for d in dt] # compute iou between each dt and gt region iscrowd = [int(o["iscrowd"]) for o in gt] # Note: this function is copied from cocoeval.py in cocoapi # and the major difference is here. ious = self.compute_iou_dt_gt(d, g, iscrowd) return ious
[ 226, 249, 3597 ]
def METHOD_NAME(self, ymd): date_key = hmac_sha256(("AWS4" + self.password).encode(), ymd.encode()) region_key = hmac_sha256(date_key, self.region.encode()) service_key = hmac_sha256(region_key, b's3') signing_key = hmac_sha256(service_key, b'aws4_request') self.signing_key = (signing_key, ymd)
[ 86, 4943, 59 ]
def METHOD_NAME(self): ...
[ 7367, 1997, 641 ]
def METHOD_NAME(apps, schema_editor): ExtraPackageTypeStorage = apps.get_model("subscription", "ExtraPackageTypeStorage") packages = ExtraPackageTypeStorage.objects.filter( code__in=("storage", "storage_medium"), ) if packages.count() == 1: packages.update( code="storage_medium", display_name="Medium +1000MB", is_public=True, megabytes=1000, ) else: ExtraPackageTypeStorage.objects.all().delete() ExtraPackageTypeStorage.objects.create( code="storage_medium", display_name="Medium +1000MB", is_public=True, megabytes=1000, )
[ 2461, 948, 360, 119 ]
def METHOD_NAME(wrapped, typed, *args, **kwargs): key = _make_key(args, kwargs, typed) exists = key in wrapped._cache if exists: wrapped._cache.pop(key) return exists
[ 596, 3359 ]
def METHOD_NAME(self): self.assertTrue(parse_args(["site", "-l"]).l) self.assertTrue(parse_args(["site", "--lowercase"]).l)
[ 9, 214, 335, 741 ]
def METHOD_NAME( user_api_client, checkout_with_item, graphql_address_data, ): checkout = checkout_with_item assert checkout.shipping_address is None checkout_id = graphene.Node.to_global_id("Checkout", checkout.pk) shipping_address = graphql_address_data variables = { "checkoutId": checkout_id, "token": checkout.token, "shippingAddress": shipping_address, } response = user_api_client.post_graphql( MUTATION_CHECKOUT_SHIPPING_ADDRESS_UPDATE, variables ) content = get_graphql_content(response) data = content["data"]["checkoutShippingAddressUpdate"] assert len(data["errors"]) == 1 assert not data["checkout"] assert data["errors"][0]["code"] == CheckoutErrorCode.GRAPHQL_ERROR.name
[ 9, 2170, 850, 85, 86, 2171, 466 ]
def METHOD_NAME(): """ Converts PyTorch/TorchScript file to ONNX files, for dynamic batchsize and explicit batchsize """ args = get_args() print("Converting PyTorch model to ONNX...") output_dir_path = Path(args.output_dir).absolute() output_dir_path.mkdir(parents=True, exist_ok=True) model_path = Path(args.model).absolute() output_path = Path(args.output_dir, args.output_name).absolute() dynamic_bs_output_path = Path( args.output_dir, args.dynamic_bs_output_name).absolute() print("Loading PyTorch model...") assert Path(model_path).is_file( ), "Cannot find the model file {:}!".format(model_path) device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") model = torch.jit.load(model_path, map_location=device) model.eval() batchsize = 1 input_channels = 1 output_channels = 3 depth, height, width = ROI_SHAPE dummy_input = torch.rand( [batchsize, input_channels, height, width, depth]).float().to(device) dummy_output = torch.rand( [batchsize, output_channels, height, width, depth]).float().to(device) # using opset version 12 torch.onnx.export(model, dummy_input, output_path, opset_version=12, do_constant_folding=False, input_names=['input'], output_names=['output'], example_outputs=dummy_output) torch.onnx.export(model, dummy_input, dynamic_bs_output_path, opset_version=12, do_constant_folding=False, input_names=['input'], output_names=['output'], dynamic_axes={"input": {0: "batch_size"}, "output": {0: "batch_size"}}, example_outputs=dummy_output) print("Successfully exported model:\n {}\nand\n {}".format( output_path, dynamic_bs_output_path))
[ 57 ]
def METHOD_NAME(self) -> List[float]: """m/z values ordered by intensity""" pair_lists = sort_mzs_intensities(self.mzs, self.intensities) return list(reversed(pair_lists[0])) # pylint: disable=unsubscriptable-object
[ 15441, 604, 5531 ]
def METHOD_NAME(self) -> list[Match]: ...
[ 19, 3626, 37 ]
def METHOD_NAME( N, corners, conv_iters, max_iters, warmup, conv_threshold, perform_check, timing, verbose, ): # A, b = generate_random(N) A, b = generate_2D(N, corners) print("Solving system...") x = np.zeros(A.shape[1]) r = b - A.dot(x) p = r rsold = r.dot(r) converged = -1 # Should always converge in fewer iterations than this max_iters = ( min(max_iters, b.shape[0]) if max_iters is not None else b.shape[0] ) timer.start() for i in range(-warmup, max_iters): if i == 0: timer.start() Ap = A.dot(p) alpha = rsold / (p.dot(Ap)) x = x + alpha * p r = r - alpha * Ap rsnew = r.dot(r) # We only do the convergence test every conv_iters or on the last # iteration if ( i >= 0 and (i % conv_iters == 0 or i == (max_iters - 1)) and np.sqrt(rsnew) < conv_threshold ): converged = i break if verbose: print("Residual: " + str(rsnew)) beta = rsnew / rsold p = r + beta * p rsold = rsnew total = timer.stop() if converged < 0: print("Convergence FAILURE!") else: print("Converged in %d iterations" % (converged)) if perform_check: check(A, x, b) if timing: print(f"Elapsed Time: {total} ms") return total
[ 22, 10452 ]
def METHOD_NAME(self) -> None: self._client.METHOD_NAME()
[ 1462 ]
def METHOD_NAME( self, camera: CamerasBase, **kwargs, ): image_render, mask_render, depth_render = render_point_cloud_pytorch3d( camera[0], self._point_cloud, render_size=self._render_size, point_radius=1e-2, topk=10, bg_color=0.0, ) return { "images_render": image_render.clamp(0.0, 1.0), "masks_render": mask_render, "depths_render": depth_render, }
[ 76 ]
def METHOD_NAME(self): site_name = "testserver" if allauth.app_settings.SITES_ENABLED: from django.contrib.sites.models import Site site = Site.objects.get_current() site.name = site_name = '<enc&"test>' site.save() u = get_user_model().objects.create(username="test", email="[email protected]") request = RequestFactory().get("/") EmailAddress.objects.add_email(request, u, u.email, confirm=True) self.assertTrue(mail.outbox[0].subject[1:].startswith(site_name))
[ 9, 487, 10850 ]
def METHOD_NAME(self, mode='explicit'): self.add_term_a(mode) self.add_term_b(mode)
[ 238, 1814 ]
def METHOD_NAME(self, vectors, assign_clusters=False, trace=False): # stores the merge order self._dendrogram = Dendrogram( [numpy.array(vector, numpy.float64) for vector in vectors] ) return VectorSpaceClusterer.METHOD_NAME(self, vectors, assign_clusters, trace)
[ 2059 ]
def METHOD_NAME(self): self.cur_view = 1 self.proto_view_changed.emit()
[ 69, 696, 1006, 6819 ]
def METHOD_NAME(key): escaped = key.replace("\\", "\\\\") return f":kbd:`{escaped}`"
[ 24, 1320 ]
def METHOD_NAME(next_link=None): if not next_link: request = build_list_request( api_version=api_version, template_url=self.list.metadata["url"], headers=_headers, params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: # make call to next link with the client's api-version _parsed_next_link = urllib.parse.urlparse(next_link) _next_request_params = case_insensitive_dict( { key: [urllib.parse.quote(v) for v in value] for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() } ) _next_request_params["api-version"] = self._config.api_version request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) request.method = "GET" return request
[ 123, 377 ]
def METHOD_NAME(self): if isinstance(self.dataset.METHOD_NAME, list): return [s[self._cur_indices.array] for s in self.dataset.METHOD_NAME] return self.dataset.METHOD_NAME[self._cur_indices.array]
[ 3120 ]
f METHOD_NAME(self):
[ 9, 2054 ]
def METHOD_NAME(node: Node, params: QueryStateDict, result: Result) -> None: result.first_num = int(parse_num(node, result._canonical))
[ 1010, 6640, 865, 106 ]
def METHOD_NAME(self): self.cpp_info.set_property("pkg_config_name", "blosc2") prefix = "lib" if is_msvc(self) and not self.options.shared else "" self.cpp_info.libs = [f"{prefix}blosc2"] if self.settings.os in ["Linux", "FreeBSD"]: self.cpp_info.system_libs = ["rt", "m", "pthread", "dl"]
[ 360, 100 ]
def METHOD_NAME( case_number: str, charges: Tuple[OeciCharge, ...], charges_edits: Dict[str, Dict[str, str]] ) -> Tuple[Tuple[OeciCharge, ...], List[Charge]]: charges_without_charge_type = [ charge for charge in charges if charge.ambiguous_charge_id not in charges_edits.keys() ] charges_with_charge_type = [] for edit_action_ambiguous_charge_id, edit in charges_edits.items(): if edit.get("edit_status", None) == EditStatus.ADD: new_charge = RecordEditor._add_charge(case_number, edit_action_ambiguous_charge_id, edit) charges_with_charge_type.append(new_charge) else: # edit["edit_status"] is either UPDATE or DELETE updated_charge = RecordEditor._update_or_delete_charge( charges, case_number, edit_action_ambiguous_charge_id, edit ) if isinstance(updated_charge, Charge): charges_with_charge_type.append(updated_charge) else: charges_without_charge_type.append(updated_charge) return tuple(charges_without_charge_type), charges_with_charge_type
[ 2004, 10231 ]
def METHOD_NAME(self) -> github.GitTree.GitTree: self._completeIfNotSet(self._tree) return self._tree.value
[ 151 ]
def METHOD_NAME(data_file, schema): table_name = "NASA 1" table = create_table_from_data_file(data_file, table_name, schema) num_records = 1393 expected_row = ( 1, "NASA Kennedy Space Center", "Application", "KSC-12871", "0", "13/033,085", "Polyimide Wire Insulation Repair System", None, ) expected_cols = [ "Center", "Status", "Case Number", "Patent Number", "Application SN", "Title", "Patent Expiration Date", ] check_csv_upload( table, table_name, schema, num_records, expected_row, expected_cols )
[ 9, 732, 172 ]
def METHOD_NAME(self, data_keys: List[str], fetch_only: bool = False) -> List: """ Get result of chunks. Parameters ---------- data_keys : list Data keys. fetch_only : bool If fetch_only, only fetch data but not return. Returns ------- results : list Result of chunks if not fetch_only, else return None """
[ 19, 831, 1571 ]
def METHOD_NAME(self): # partly for 100% coverage self.assertIn('Redirector', repr(self.redir)) self.assertIn('Original', repr(self.orig_insert))
[ 9, 92 ]
def METHOD_NAME(): """Return True if the plugin has loaded successfully.""" ok = g.app.gui.guiName() == "qt" if ok: g.plugin_signon(__name__) install_contextmenu_handlers() return ok
[ 176 ]
def METHOD_NAME(self): cmake_layout(self, src_folder="src")
[ 571 ]
def METHOD_NAME(self) -> str: """ Fully qualified resource ID for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} """ return pulumi.get(self, "id")
[ 147 ]
def METHOD_NAME(pgconn, scs): esc = pq.Escaping(pgconn) res = pgconn.exec_(f"set standard_conforming_strings to {scs}".encode("ascii")) assert res.status == pq.ExecStatus.COMMAND_OK special = {b"'": b"''", b"\\": b"\\" if scs == "on" else b"\\\\"} for c in range(1, 128): data = bytes([c]) rv = esc.escape_string(data) exp = special.get(data) or b"%s" % data assert rv == exp
[ 9, 4748, 144, -1 ]
def METHOD_NAME(self): if self.inventory_licenses is not None: return self.inventory_licenses else: return ""
[ 19, 2885, 4087 ]
def METHOD_NAME(self) -> Iterator[str]: return iter(self.__dict__.get("tags_"))
[ 82, 83, 84 ]
def METHOD_NAME(client, setup_api_test_data): """Try to change the name of a sensor with a non admin account""" headers = make_headers_for("[email protected]", client) sensor = Sensor.query.filter(Sensor.name == "some temperature sensor").one_or_none() response = client.patch( url_for("SensorAPI:patch", id=sensor.id), headers=headers, json={ "name": "try to change the name", }, ) assert response.status_code == 403 assert response.json["status"] == "INVALID_SENDER"
[ 9, 1575, 6234, 256, 2870 ]
def METHOD_NAME(self) -> int: return 1
[ 417, 3014 ]
def METHOD_NAME(self, node: nodes.AssignName) -> None: """Check module level assigned names.""" # The NameChecker from which this Checker originates knows a lot of different # versions of variables, i.e. constants, inline variables etc. # To simplify we use only `variable` here, as we don't need to apply different # rules to different types of variables. frame = node.frame() if isinstance(frame, nodes.FunctionDef): if node.parent in frame.body: # Only perform the check if the assignment was done in within the body # of the function (and not the function parameter definition # (will be handled in visit_functiondef) # or within a decorator (handled in visit_call) self._check_name("variable", node.name, node) elif isinstance(frame, nodes.ClassDef): self._check_name("attr", node.name, node) else: # Possibilities here: # - isinstance(node.assign_type(), nodes.Comprehension) == inlinevar # - isinstance(frame, nodes.Module) == variable (constant?) # - some other kind of assignment missed but still most likely a variable self._check_name("variable", node.name, node)
[ 716, -1 ]
def METHOD_NAME(self, key): if 32 <= key <= 127: return keys.Key(chr(key)), chr(key) elif key in KEYMAP: return KEYMAP[key], "" else: return None, ""
[ 356, 59 ]
def METHOD_NAME(cls, op): if has_unknown_shape(*op.inputs): yield inputs = yield from unify_chunks( *[(input, list(range(input.ndim))[::-1]) for input in op.inputs] ) output = op.outputs[0] chunk_shapes = [ t.chunk_shape if hasattr(t, "chunk_shape") else t for t in inputs ] out_chunk_shape = broadcast_shape(*chunk_shapes) out_chunks = [] nsplits = [[np.nan] * shape for shape in out_chunk_shape] get_index = lambda idx, t: tuple( 0 if t.nsplits[i] == (1,) else ix for i, ix in enumerate(idx) ) for out_idx in itertools.product(*(map(range, out_chunk_shape))): in_chunks = [ t.cix[get_index(out_idx[-t.ndim :], t)] if t.ndim != 0 else t.chunks[0] for t in inputs ] out_chunk = ( op.copy() .reset_key() .new_chunk( in_chunks, shape=in_chunks[1].shape, order=output.order, index=out_idx, ) ) out_chunks.append(out_chunk) for i, idx, s in zip(itertools.count(0), out_idx, out_chunk.shape): nsplits[i][idx] = s new_op = op.copy() return new_op.new_tensors( op.inputs, output.shape, order=output.order, chunks=out_chunks, nsplits=nsplits, )
[ 4161 ]
def METHOD_NAME(empty_force_field, attribute, value): """ Test that an empty force field has the expected attributes. """ assert getattr(empty_force_field, attribute) == value
[ 9, 176, 35, 1818, 101 ]
def METHOD_NAME(self): label = Label() code = Bytecode([Instr("JUMP_ABSOLUTE", label)]) expected = " JUMP_ABSOLUTE <error: unknown label>\n\n" self.check_dump_bytecode(code, expected)
[ 9, 5858, 31, 636 ]
def METHOD_NAME(self) -> str | None: ...
[ 913, 147 ]
def METHOD_NAME(completeness): completeness_range = range(-1,flex.max(completeness)+1) completeness_counts = [completeness.count(n) for n in completeness_range] from matplotlib import pyplot as plt plt.plot(completeness_range,completeness_counts,"r+") plt.show()
[ 1288, 4600, 10470 ]
def METHOD_NAME(artifact_repository_url: str) -> Optional[List[Dict[str, Any]]]: """ Returns a list of JSON objects representing docker images found under |artifact_repository_url|, or None on error. Sample JSON object: { "createTime": "2022-07-11T20:20:23.577823Z", "package": "us-docker.pkg.dev/stadia-open-source/amdvlk-public-ci/amdvlk_release_gcc_assertions", "tags": "", "updateTime": "2022-07-11T20:20:23.577823Z", "version": "sha256:e101b6336fa78014e4008df59667dd84616dc8d1b60c2240f3246ab9a1ed6b20" } """ ok, text = _run_cmd(['gcloud', 'artifacts', 'docker', 'images', 'list', artifact_repository_url, '--format=json', '--quiet']) if not ok: return None return list(json.loads(text))
[ 539, 3669 ]
def METHOD_NAME(self): election = Election.objects.get(slug=self.kwargs["election"]) if election.nominations_complete: messages.error( self.request, f"Nominations for {election.name} Election are closed" ) raise Http404(f"Nominations for {election.name} Election are closed") return NominationCreateForm
[ 19, 1029, 2 ]
def METHOD_NAME(tmp_path): clf = ak.StructuredDataClassifier( directory=tmp_path, column_names=["a", "b"], column_types={"a": "numerical", "b": "numerical"}, ) clf.inputs[0].shape = (2,) clf.outputs[0].in_blocks[0].shape = (10,) init_hp = task_specific.STRUCTURED_DATA_CLASSIFIER[0] hp = keras_tuner.HyperParameters() hp.values = copy.copy(init_hp) clf.tuner.hypermodel.build(hp) assert set(init_hp.keys()) == set(hp._hps.keys())
[ 9, 4836, 7310, 176, 15691, 816, 1754 ]
async def METHOD_NAME(): assert fsspec.asyn.running_async()
[ 1515 ]
def METHOD_NAME(self): return self.get_list_of_fxcodes()
[ 219 ]
def METHOD_NAME(self): try: return self.path.METHOD_NAME except: return f'PIL_texture_{self.size}'
[ 156 ]
def METHOD_NAME(): array = np.arange(40, dtype=np.int64) container = {"node0-data": array} form = """ { "class": "NumpyArray", "primitive": "int32", "form_key": "node0" } """ result = ak.from_buffers(form, array.size, container, highlevel=False) assert np.shares_memory(array, result.data)
[ 9, 2481, 2481, 955, 4303, 9007 ]
def METHOD_NAME(skycoord, rng): zhat = np.squeeze((skycoord.cartesian / skycoord.cartesian.norm()).xyz) # Random vector orthogonal to the pole: x = rng.uniform(size=3) x /= np.linalg.norm(x) xhat = x - (x @ zhat) * zhat xhat /= np.linalg.norm(xhat) return coord.SkyCoord(coord.CartesianRepresentation(xhat), frame=skycoord.frame)
[ 19, 236, 5329 ]
async def METHOD_NAME(update, context): """Bot help""" consultant = await check_user_is_declared(update, context) if consultant is None: return ConversationHandler.END msg = _("""Hello. I am just a bot you know. So I won't fake doing incredible things. Here's what can I do for you: /hello nice way to meet. After this cordial introduction, I may talk to you from time to time to remind you important things to do /time a fun and easy way to declare your timesheet of the day /bye you will never be bothered again till you say /hello again """) await update.message.reply_text(msg) return ConversationHandler.END
[ 40 ]
def METHOD_NAME(self, session): data = self.deserialize_http_content(session) self.ctx.set_var( "instance", data, schema_builder=self._build_schema_on_200_201 )
[ 69, 1072, 4905 ]
def METHOD_NAME(self, image): self.left_image_show_lock.acquire() self.left_image_to_be_shown = image self.left_image_to_be_shown_updated = True self.left_image_show_lock.release()
[ 697, 879, 660 ]
def METHOD_NAME(): assert time.unix_epoch_to_datetime(996877846784536) == datetime.datetime.max
[ 9, 1226, 1165, 24, 884, 41, 1737 ]