text
stringlengths
15
7.82k
ids
sequencelengths
1
7
def METHOD_NAME(res, args): from datalad.ui import ui if res is None: res = [] if not len(res): ui.message("No repos were created... oops") return items = '\n'.join(map(str, res)) msg = "{n} installed {obj} available at\n{items}".format( obj='items are' if len(res) > 1 else 'item is', n=len(res), items=items) ui.message(msg)
[ 1571, 6599, 4518 ]
async def METHOD_NAME(request): content = open(TELEOPDIR + "/static/index.html", "r").read() now = time.monotonic() request.app['mutable_vals']['last_send_time'] = now request.app['mutable_vals']['last_override_time'] = now request.app['mutable_vals']['prev_command'] = [] request.app['mutable_vals']['find_person'] = False return web.Response(content_type="text/html", text=content)
[ 724 ]
def METHOD_NAME(fdemType, comp, SrcList, freq, useMu=False, verbose=False): cs = 10.0 ncx, ncy, ncz = 0, 0, 0 npad = 8 hx = [(cs, npad, -1.3), (cs, ncx), (cs, npad, 1.3)] hy = [(cs, npad, -1.3), (cs, ncy), (cs, npad, 1.3)] hz = [(cs, npad, -1.3), (cs, ncz), (cs, npad, 1.3)] mesh = TensorMesh([hx, hy, hz], ["C", "C", "C"]) if useMu is True: mapping = [("sigma", maps.ExpMap(mesh)), ("mu", maps.IdentityMap(mesh))] else: mapping = maps.ExpMap(mesh) x = ( np.array( [np.linspace(-5.0 * cs, -2.0 * cs, 3), np.linspace(5.0 * cs, 2.0 * cs, 3)] ) + cs / 4.0 ) # don't sample right by the source, slightly off alignment from either staggered grid XYZ = utils.ndgrid(x, x, np.linspace(-2.0 * cs, 2.0 * cs, 5)) Rx0 = getattr(fdem.Rx, "Point" + comp[0]) if comp[-1] == "r": real_or_imag = "real" elif comp[-1] == "i": real_or_imag = "imag" rx0 = Rx0(XYZ, comp[1], real_or_imag) Src = [] for SrcType in SrcList: if SrcType == "MagDipole": Src.append( fdem.Src.MagDipole([rx0], frequency=freq, location=np.r_[0.0, 0.0, 0.0]) ) elif SrcType == "MagDipole_Bfield": Src.append( fdem.Src.MagDipole_Bfield( [rx0], frequency=freq, location=np.r_[0.0, 0.0, 0.0] ) ) elif SrcType == "CircularLoop": Src.append( fdem.Src.CircularLoop( [rx0], frequency=freq, location=np.r_[0.0, 0.0, 0.0] ) ) elif SrcType == "LineCurrent": Src.append( fdem.Src.LineCurrent( [rx0], frequency=freq, location=np.array([[0.0, 0.0, 0.0], [20.0, 0.0, 0.0]]), ) ) elif SrcType == "RawVec": if fdemType == "e" or fdemType == "b": S_m = np.zeros(mesh.nF) S_e = np.zeros(mesh.nE) S_m[ mesh.closest_points_index([0.0, 0.0, 0.0], "Fz") + np.sum(mesh.vnF[:1]) ] = 1e-3 S_e[ mesh.closest_points_index([0.0, 0.0, 0.0], "Ez") + np.sum(mesh.vnE[:1]) ] = 1e-3 Src.append( fdem.Src.RawVec( [rx0], freq, S_m, mesh.get_edge_inner_product() * S_e ) ) elif fdemType == "h" or fdemType == "j": S_m = np.zeros(mesh.nE) S_e = np.zeros(mesh.nF) S_m[ mesh.closest_points_index([0.0, 0.0, 0.0], "Ez") + np.sum(mesh.vnE[:1]) ] = 1e-3 S_e[ mesh.closest_points_index([0.0, 0.0, 0.0], "Fz") + np.sum(mesh.vnF[:1]) ] = 1e-3 Src.append( fdem.Src.RawVec( [rx0], freq, mesh.get_edge_inner_product() * S_m, S_e ) ) if verbose: print(" Fetching {0!s} problem".format((fdemType))) if fdemType == "e": survey = fdem.Survey(Src) prb = fdem.Simulation3DElectricField(mesh, survey=survey, sigmaMap=mapping) elif fdemType == "b": survey = fdem.Survey(Src) prb = fdem.Simulation3DMagneticFluxDensity( mesh, survey=survey, sigmaMap=mapping ) elif fdemType == "j": survey = fdem.Survey(Src) prb = fdem.Simulation3DCurrentDensity(mesh, survey=survey, sigmaMap=mapping) elif fdemType == "h": survey = fdem.Survey(Src) prb = fdem.Simulation3DMagneticField(mesh, survey=survey, sigmaMap=mapping) else: raise NotImplementedError() try: from pymatsolver import Pardiso prb.solver = Pardiso except ImportError: prb.solver = SolverLU # prb.solver_opts = dict(check_accuracy=True) return prb
[ 19, -1, 3095 ]
def METHOD_NAME(self, cache, helper): self.node.get_descendant_count.return_value = STALE_MAX_CALCULATION_SIZE + 1 self.assertCalculation(cache, helper, force=True)
[ 9, 1038, 564, 4289, 9357 ]
def METHOD_NAME( setup_py_path: str, egg_info_dir: Optional[str], no_user_config: bool, ) -> List[str]: args = make_setuptools_shim_args(setup_py_path, no_user_config=no_user_config) args += ["egg_info"] if egg_info_dir: args += ["--egg-base", egg_info_dir] return args
[ 93, 11349, 4429, 100, 335 ]
def METHOD_NAME(inp): """ Wraps around if we split: make sure last passage isn't too short. This is meant to be similar to the DPR preprocessing. """ (nwords, overlap, tokenizer), (title_idx, docid, title, url, content) = inp if tokenizer is None: words = content.split() else: words = tokenizer.tokenize(content) words_ = (words + words) if len(words) > nwords else words passages = [words_[offset:offset + nwords] for offset in range(0, len(words) - overlap, nwords - overlap)] assert all(len(psg) in [len(words), nwords] for psg in passages), (list(map(len, passages)), len(words)) if tokenizer is None: passages = [' '.join(psg) for psg in passages] else: passages = [' '.join(psg).replace(' ##', '') for psg in passages] if title_idx % 100000 == 0: print("#> ", title_idx, '\t\t\t', title) for p in passages: print("$$$ ", '\t\t', p) print() print() print() print() return (docid, title, url, passages)
[ 356, 1174 ]
def METHOD_NAME(args): web_server = faraday.server.web.WebServer() daemonize.create_pid_file(args.port) web_server.run()
[ 22, 163 ]
def METHOD_NAME(nameurls): DEFAULT_URLNAME = "Unknown" METHOD_NAME = {} for urlname, url in nameurls: urlname = name(urlname, safe_name=True) urlname = os.path.splitext(urlname)[0].strip() urlname = _RE_PACKS.sub("_", urlname).strip("_") if not urlname: urlname = DEFAULT_URLNAME METHOD_NAME.setdefault(urlname, []).append(url) return METHOD_NAME
[ 8557 ]
def METHOD_NAME(self) -> List[Cluster[T]]: """ Returns list of all clusters in clusterization. :return: List of all clusters in clusterization. """ return list(self.clusters.values())
[ 19, 75, 1827 ]
def METHOD_NAME(self): # Test the last resort to prevent segfault obj = DummyHasTraits(names=["A", "B", "C"]) view = get_view(TabularAdapter(columns=["Name"])) with reraise_exceptions(), create_ui(obj, dict(view=view)) as ui: (editor,) = ui.get_editors("names") model = editor.model # sanity check self.assertEqual(model.rowCount(None), 3) # when # -1 is an invalid row. This should not cause segfault. model.moveRows([1], -1) # then mime_data = model.mimeData( [ model.createIndex(i, 0) for i in range( model.rowCount(None), ) ] ) content = mime_data.instance() self.assertEqual(content, ["A", "C", "B"]) self.assertEqual(obj.names, content)
[ 9, 132, 1346, 532, 724 ]
def METHOD_NAME(self): """ Get completeness of osm data :return: percentage of completeness :rtype: int """ total = 0 completed = 0 for meta_field in LocalityOSM._meta.get_fields(): field = meta_field.name total += 1 if self._meta.get_field(field).get_internal_type() == 'CharField': if getattr(self, field): completed += 1 return float(completed * 100 / total)
[ 19, 10470 ]
def METHOD_NAME(self, component_name, module_dirs): # precondition: A module with this name has to exist provided that the caller has called #can_load() before. root_module_name = "%s.%s" % (component_name, self.component_entry_point) for p in self._modules(module_dirs, component_name): self.logger.debug("Loading module [%s]", p) m = importlib.import_module(p) if p == root_module_name: root_module = m return root_module
[ 557, 1007 ]
def METHOD_NAME(self): return _write_kegg("DEFINITION", [self.definition])
[ 1208 ]
def METHOD_NAME(predictions, targets, data_weights, mask, expected_loss): """ Test the multiclass MCC loss function by comparing to sklearn's results. """ loss = mcc_multiclass_loss(predictions, targets, data_weights, mask) np.testing.assert_almost_equal(loss.item(), expected_loss)
[ 9, 592, 11216 ]
def METHOD_NAME(self): gp = self.frame.grid_panel gui_loop(0.2) csize = self.frame.ClientSize # Hide 1 windows # Hide top left gp.hide_viewport(self.frame.red) gui_loop(0.2) self.assertEqual(self.frame.blue.Size, (csize.x, gp.grid_layout.tr.size.y)) self.assertEqual(self.frame.purple.Size, gp.grid_layout.bl.size) self.assertEqual(self.frame.brown.Size, gp.grid_layout.br.size) gp.show_viewport(self.frame.red) # Hide top right gp.hide_viewport(self.frame.blue) gui_loop(0.2) self.assertEqual(self.frame.red.Size, (csize.x, gp.grid_layout.tl.size.y)) self.assertEqual(self.frame.purple.Size, gp.grid_layout.bl.size) self.assertEqual(self.frame.brown.Size, gp.grid_layout.br.size) gp.show_viewport(self.frame.blue) # Hide bottom left gp.hide_viewport(self.frame.purple) gui_loop(0.2) self.assertEqual(self.frame.red.Size, gp.grid_layout.tl.size) self.assertEqual(self.frame.blue.Size, gp.grid_layout.tr.size) self.assertEqual(self.frame.brown.Size, (csize.x, gp.grid_layout.br.size.y)) gp.show_viewport(self.frame.purple) # Hide bottom right gp.hide_viewport(self.frame.brown) gui_loop(0.2) self.assertEqual(self.frame.red.Size, gp.grid_layout.tl.size) self.assertEqual(self.frame.blue.Size, gp.grid_layout.tr.size) self.assertEqual(self.frame.purple.Size, (csize.x, gp.grid_layout.bl.size.y)) gp.show_viewport(self.frame.brown) # Hide 2 windows # Hide top gp.hide_viewport(self.frame.red) gp.hide_viewport(self.frame.blue) gui_loop(0.2) self.assertEqual(self.frame.purple.Size, (csize.x, gp.grid_layout.tl.size.y)) self.assertEqual(self.frame.brown.Size, (csize.x, gp.grid_layout.bl.size.y)) gp.show_viewport(self.frame.red) gp.show_viewport(self.frame.blue) # Hide right gp.hide_viewport(self.frame.blue) gp.hide_viewport(self.frame.brown) gui_loop(0.2) self.assertEqual(self.frame.red.Size, (csize.x, gp.grid_layout.tl.size.y)) self.assertEqual(self.frame.purple.Size, (csize.x, gp.grid_layout.bl.size.y)) gp.show_viewport(self.frame.brown) gp.show_viewport(self.frame.blue) # Hide bottom gp.hide_viewport(self.frame.purple) gp.hide_viewport(self.frame.brown) gui_loop(0.2) self.assertEqual(self.frame.red.Size, (csize.x, gp.grid_layout.tl.size.y)) self.assertEqual(self.frame.blue.Size, (csize.x, gp.grid_layout.bl.size.y)) gp.show_viewport(self.frame.brown) gp.show_viewport(self.frame.purple) # Hide left gp.hide_viewport(self.frame.red) gp.hide_viewport(self.frame.purple) gui_loop(0.2) self.assertEqual(self.frame.blue.Size, (csize.x, gp.grid_layout.tr.size.y)) self.assertEqual(self.frame.brown.Size, (csize.x, gp.grid_layout.br.size.y)) gp.show_viewport(self.frame.purple) gp.show_viewport(self.frame.red) # Hide 3 windows gp.set_shown_viewports(self.frame.red) gui_loop(0.2) self.assertEqual(self.frame.red.Size, csize) gp.set_shown_viewports(self.frame.blue) gui_loop(0.2) self.assertEqual(self.frame.blue.Size, csize) gp.set_shown_viewports(self.frame.purple) gui_loop(0.2) self.assertEqual(self.frame.purple.Size, csize) gp.set_shown_viewports(self.frame.brown) gui_loop(0.2) self.assertEqual(self.frame.brown.Size, csize) gp.set_shown_viewports(self.frame.yellow) gui_loop(0.2) self.assertEqual(self.frame.yellow.Size, csize) gp.show_grid_viewports()
[ 9, 753, 1179 ]
f METHOD_NAME(self, name):
[ 447 ]
f METHOD_NAME(self):
[ 9, 584, 2707, 485, 486 ]
def METHOD_NAME(x, window=None, window_len=7): r""" Computes the periodogram .. math:: I(w) = \frac{1}{n} \Big[ \sum_{t=0}^{n-1} x_t e^{itw} \Big] ^2 at the Fourier frequencies :math:`w_j := \frac{2 \pi j}{n}`, :math:`j = 0, \dots, n - 1`, using the fast Fourier transform. Only the frequencies :math:`w_j` in :math:`[0, \pi]` and corresponding values :math:`I(w_j)` are returned. If a window type is given then smoothing is performed. Parameters ---------- x : array_like(float) A flat NumPy array containing the data to smooth window_len : scalar(int), optional(default=7) An odd integer giving the length of the window. Defaults to 7. window : string A string giving the window type. Possible values are 'flat', 'hanning', 'hamming', 'bartlett' or 'blackman' Returns ------- w : array_like(float) Fourier frequencies at which periodogram is evaluated I_w : array_like(float) Values of periodogram at the Fourier frequencies """ n = len(x) I_w = np.abs(fft(x))**2 / n w = 2 * np.pi * np.arange(n) / n # Fourier frequencies w, I_w = w[:int(n/2)+1], I_w[:int(n/2)+1] # Take only values on [0, pi] if window: I_w = smooth(I_w, window_len=window_len, window=window) return w, I_w
[ -1 ]
def METHOD_NAME(cls, *args, **kwargs): """Returns a global `IOLoop` instance. Most applications have a single, global `IOLoop` running on the main thread. Use this method to get this instance from another thread. To get the current thread's `IOLoop`, use `current()`. """ # install ZMQIOLoop as the active IOLoop implementation # when using tornado 3 if tornado_version >= (3,): PollIOLoop.configure(cls) loop = PollIOLoop.METHOD_NAME(*args, **kwargs) if not isinstance(loop, cls): warnings.warn( f"IOLoop.current expected instance of {cls!r}, got {loop!r}", RuntimeWarning, stacklevel=2, ) return loop
[ 89 ]
def METHOD_NAME(_: WIDParams): """ Please configure IUT's OOB flag with 'Remote OOB data present' TODO: The flag will be set when we handle wid 149 - set remote oob data """ return True
[ 5794, 5795, -1 ]
def METHOD_NAME(self) -> str: return ""
[ 462 ]
async def METHOD_NAME(monkeypatch): mock_nmcli_output = """mock_wpa2:90:no:WPA2
[ 9, 1272, -1 ]
def METHOD_NAME(self): definitions = plugin.get_available_decision_definitions() # default camunda demo image contains 2 decision definitions, there *may* be # additional images deployed for local testing/development self.assertGreaterEqual(len(definitions), 2) identifiers = {definition.identifier for definition in definitions} self.assertTrue( {"invoice-assign-approver", "invoiceClassification"}.issubset(identifiers) )
[ 9, 245, 356, 2706 ]
def METHOD_NAME(parameters, lr, args, state=None): if args.optimizer == "sgd": optimizer = get_sgd_optimizer( parameters, lr, momentum=args.momentum, weight_decay=args.weight_decay, nesterov=args.nesterov, bn_weight_decay=args.bn_weight_decay, ) elif args.optimizer == "rmsprop": optimizer = get_rmsprop_optimizer( parameters, lr, alpha=args.rmsprop_alpha, momentum=args.momentum, weight_decay=args.weight_decay, eps=args.rmsprop_eps, bn_weight_decay=args.bn_weight_decay, ) if not state is None: optimizer.load_state_dict(state) return optimizer
[ 19, 968 ]
def METHOD_NAME(self, validated_data: dict): instance: ProxyProvider = super().METHOD_NAME(validated_data) instance.set_oauth_defaults() instance.save() return instance
[ 129 ]
def METHOD_NAME(samples): return np.abs(samples) >= delta
[ 4682, 1674, 667, 9 ]
def METHOD_NAME(self): v = self._unpack_from(4, ">f") return v
[ 203, 1819 ]
def METHOD_NAME(length, channels, min_timescale=1.0, max_timescale=1.0e4): position = torch.arange(length, dtype=torch.float) num_timescales = channels // 2 log_timescale_increment = math.log(float(max_timescale) / float(min_timescale)) / (num_timescales - 1) inv_timescales = min_timescale * torch.exp(torch.arange(num_timescales, dtype=torch.float) * -log_timescale_increment) scaled_time = position.unsqueeze(0) * inv_timescales.unsqueeze(1) signal = torch.cat([torch.sin(scaled_time), torch.cos(scaled_time)], 0) signal = F.pad(signal, [0, 0, 0, channels % 2]) signal = signal.view(1, channels, length) return signal
[ 19, 845, 900, 2481 ]
def METHOD_NAME(page): # Display completions. page_completions = completions[page * completions_per_page: (page+1) * completions_per_page] page_row_count = int(math.ceil(len(page_completions) / float(column_count))) page_columns = [page_completions[i * page_row_count:(i+1) * page_row_count] for i in range(column_count)] result = [] for r in range(page_row_count): for c in range(column_count): try: result.append(page_columns[c][r].text.ljust(max_compl_width)) except IndexError: pass result.append('\n') cli.output.write(''.join(result)) cli.output.flush()
[ 52 ]
def METHOD_NAME(sleep_binary): timeout = 30 cmd = Command([sleep_binary, str(timeout)], timeout=3) start_time = time.time() cmd.execute() # Check the process is no longer around. assert cmd.getpid() is not None with pytest.raises(ProcessLookupError): os.kill(cmd.getpid(), 0) elapsed_time = time.time() - start_time assert elapsed_time < timeout assert cmd.getstate() == Command.TIMEDOUT assert cmd.getretcode() is None
[ 9, 462, 659 ]
def METHOD_NAME(self, power_override, power_set): return False
[ 0, 1928, 345 ]
def METHOD_NAME(self, template, dashboard=False): if not dashboard: return template.item({ "name": "PostgreSQL CFS: Written byte/s", "key": "pgsql.cfs.activity[written_bytes]", "units": self.UNITS.bytes_per_second, "delay": self.Interval }) + template.item({ "name": "PostgreSQL CFS: Scanned byte/s", "key": "pgsql.cfs.activity[scanned_bytes]", "units": self.UNITS.bytes_per_second, "delay": self.Interval }) + template.item({ "name": "PostgreSQL CFS: Compressed Files", "key": "pgsql.cfs.activity[compressed_files]", "units": self.UNITS.none, "delay": self.Interval }) + template.item({ "name": "PostgreSQL CFS: Scanned Files", "key": "pgsql.cfs.activity[scanned_files]", "units": self.UNITS.none, "delay": self.Interval }) + template.item({ "name": "PostgreSQL CFS: Current Ratio", "key": "pgsql.cfs.activity[current_compress_ratio]", "units": self.UNITS.none, "delay": self.Interval }) + template.item({ "name": "PostgreSQL CFS: Total Ratio", "key": "pgsql.cfs.activity[total_compress_ratio]", "units": self.UNITS.none, "delay": self.Interval }) else: return []
[ 1768 ]
def METHOD_NAME(self, oai_set): """Return and remove from cache an OAI Set.""" key = '{}:{}'.format(self.cache_prefix, oai_set) # TODO: Remove map as it needs to be string oai_sets = set(map(int, self.cache.smembers(key))) self.cache.delete(key) return oai_sets
[ 760, 1267, 10476, 0 ]
def METHOD_NAME(trivial, outpdf, R, owner, user, aes, metadata, err): with pytest.raises(Exception, match=err): trivial.save( outpdf, encryption=pikepdf.Encryption( R=R, owner=owner, user=user, aes=aes, metadata=metadata ), )
[ 9, 1068, 817 ]
def METHOD_NAME( self, providers: List[JsonDict], config_path: StrSequence ) -> Iterable[OEmbedEndpointConfig]: # Ensure it is the proper form. validate_config( _OEMBED_PROVIDER_SCHEMA, providers, config_path=config_path, ) # Parse it and yield each result. for provider in providers: # Each provider might have multiple API endpoints, each which # might have multiple patterns to match. for endpoint in provider["endpoints"]: api_endpoint = endpoint["url"] # The API endpoint must be an HTTP(S) URL. results = urlparse.urlparse(api_endpoint) if results.scheme not in {"http", "https"}: raise ConfigError( f"Unsupported oEmbed scheme ({results.scheme}) for endpoint {api_endpoint}", config_path, ) patterns = [ self._glob_to_pattern(glob, config_path) for glob in endpoint["schemes"] ] yield OEmbedEndpointConfig( api_endpoint, patterns, endpoint.get("formats") )
[ 214, 61, 187, 2275 ]
def METHOD_NAME(self): super().METHOD_NAME() self.integration = self.create_integration( organization=self.organization, external_id="github:1", provider="github" )
[ 0, 1 ]
def METHOD_NAME(cls, v, values): if v is None: filename = values.get("filename") if filename: mime_content_type, _ = guess_type(filename, strict=False) return mime_content_type return v
[ 1363, 459, 44 ]
def METHOD_NAME(): parser = argparse.ArgumentParser(description='create the data for training the tydi score normalizer') parser.add_argument('--train_file', help='the QA train file ', type=str) parser.add_argument('--eval_file', help='the QA dev file', type=str) parser.add_argument('--test_file', help='the QA test file', type=str) parser.add_argument('--search_result_location', help='the output directory with the search result for the train, dev and test queries', type=str) parser.add_argument('--collection', help='The corpus file with all the passages', type=str) parser.add_argument('--output_dir', help='the output directory with the train and dev queries', type=str) args = parser.parse_args() return args
[ 214, 134 ]
def METHOD_NAME(self): for cli in self.clients.values(): self.gobgp.wait_for(expected_state=BGP_FSM_ESTABLISHED, peer=cli)
[ 9, 2901, 6478, 13415 ]
def METHOD_NAME() -> None: """(internal) Category: General Utility Functions """ return _baplus.METHOD_NAME()
[ 1743, 200, 5220 ]
def METHOD_NAME(self, format="pint"): result = "" FD_mapping = self.get_prefix_mapping_component("FD") for FD in FD_mapping.values(): FD_par = getattr(self, FD) result += FD_par.as_parfile_line(format=format) return result
[ 38, 12799 ]
def METHOD_NAME(): """ ArgParser for backend worker. Takes the socket name and socket type. :return: """ parser = argparse.ArgumentParser( prog="model-server-worker", description="Model Server Worker" ) parser.add_argument( "--sock-type", required=True, dest="sock_type", type=str, choices=["unix", "tcp"], help="Socket type the model service worker would use. The options are\n" "unix: The model worker expects to unix domain-socket\n" "tcp: The model worker expects a host-name and port-number", ) parser.add_argument( "--sock-name", required=False, dest="sock_name", type=str, help="If 'sock-type' is 'unix', sock-name is expected to be a string. " 'Eg: --sock-name "test_sock"', ) parser.add_argument( "--host", type=str, help="If 'sock-type' is 'tcp' this is expected to have a host IP address", ) parser.add_argument( "--port", type=str, help="If 'sock-type' is 'tcp' this is expected to have the host port to bind on", ) parser.add_argument( "--metrics-config", dest="metrics_config", type=str, help="Metrics configuration file", ) return parser
[ 578, 549, 1794, 335 ]
def METHOD_NAME(self, locs): """Compute bulk modulus field at locations. """ (npts, dim) = locs.shape METHOD_NAME = (p_lambda + 2.0 / 3.0 * p_mu) * numpy.ones((1, npts, 1), dtype=numpy.float64) return METHOD_NAME
[ 2278, 8322 ]
def METHOD_NAME( self, token: Union[str, uuid.UUID], name: str, auth_type: AuthType = AuthType.bearer, timeout: float = MOONSTREAM_REQUEST_TIMEOUT, ) -> uuid.UUID: """ Deletes query specified by name. """ headers = { "Authorization": f"{auth_type.value} {token}", } response = self._call( method=Method.DELETE, url=f"{self.api.endpoints[ENDPOINT_QUERIES]}/{name}", headers=headers, timeout=timeout, ) return response["id"]
[ 34, 539 ]
def METHOD_NAME(linearLayer, index, dim=1): W = paddle.index_select(linearLayer.weight, index, axis=dim).detach() if linearLayer.bias is not None: if dim == 0: b = paddle.assign(linearLayer.bias).detach() else: b = paddle.assign( L.index_select( linearLayer.bias, index, dim=0)).detach() linearLayer.weight.stop_gradient = True linearLayer.weight.set_value(W) linearLayer.weight.stop_gradient = False if linearLayer.bias is not None: linearLayer.bias.stop_gradient = True linearLayer.bias.set_value(b) linearLayer.bias.stop_gradient = False
[ 4150, 373, 430 ]
f METHOD_NAME(self):
[ 9, 2439, 5695 ]
def METHOD_NAME(self, color): mode = None for key, val in SYSLED_MODES.items(): if val == color: mode = key break if mode is None: return False else: return self._api_helper.write_txt_file(SYSLED_FNODE, mode)
[ 0, 452, 1643 ]
def METHOD_NAME(self): return {'type': 'rocketchat', 'rocket_chat_username_override': self.rocket_chat_username_override, 'rocket_chat_webhook_url': self.rocket_chat_webhook_url}
[ 19, 100 ]
def METHOD_NAME( maintenance_test_setup, make_alert_receive_channel, mock_start_disable_maintenance_task ): organization, user = maintenance_test_setup alert_receive_channel = make_alert_receive_channel( organization, integration=AlertReceiveChannel.INTEGRATION_GRAFANA, author=user ) mode = AlertReceiveChannel.MAINTENANCE duration = AlertReceiveChannel.DURATION_ONE_HOUR.seconds alert_receive_channel.start_maintenance(mode, duration, user) with pytest.raises(MaintenanceCouldNotBeStartedError): alert_receive_channel.start_maintenance(mode, duration, user) assert alert_receive_channel.maintenance_mode == mode assert alert_receive_channel.maintenance_duration == AlertReceiveChannel.DURATION_ONE_HOUR assert alert_receive_channel.maintenance_uuid is not None assert alert_receive_channel.maintenance_started_at is not None assert alert_receive_channel.maintenance_author == user
[ 9, 4107, 1911, 5334, 130, 447, 2430 ]
def METHOD_NAME( self, resource_group_name: str, vault_name: str, **kwargs: Any ) -> _models.PrivateLinkResourceListResult: """Gets the private link resources supported for the key vault. :param resource_group_name: Name of the resource group that contains the key vault. Required. :type resource_group_name: str :param vault_name: The name of the key vault. Required. :type vault_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: PrivateLinkResourceListResult or the result of cls(response) :rtype: ~azure.mgmt.keyvault.v2023_02_01.models.PrivateLinkResourceListResult :raises ~azure.core.exceptions.HttpResponseError: """ error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, 304: ResourceNotModifiedError, } error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-02-01")) cls: ClsType[_models.PrivateLinkResourceListResult] = kwargs.pop("cls", None) request = build_list_by_vault_request( resource_group_name=resource_group_name, vault_name=vault_name, subscription_id=self._config.subscription_id, api_version=api_version, template_url=self.METHOD_NAME.metadata["url"], headers=_headers, params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) _stream = False pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = self._deserialize("PrivateLinkResourceListResult", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized
[ 245, 604, 78 ]
def METHOD_NAME(self, eta, fun, args=(), kwargs=None, priority=0): kwargs = {} if not kwargs else kwargs return self.enter_at(self.Entry(fun, args, kwargs), eta, priority)
[ 128, 1541 ]
def METHOD_NAME(tmp_path: Path, raw, exception): config_filepath = tmp_path / "config.yml" config_filepath.write_text(raw) with raises(exception): with mock.patch.dict(environ, {}): FixtureConfig(config_filepath)
[ 9, 557, 200, 168 ]
def METHOD_NAME(command_tester_factory: CommandTesterFactory, poetry: Poetry) -> None: command_tester_factory("lock").execute() assert poetry.locker.lock.exists()
[ 74, 625 ]
def METHOD_NAME(self): """ Retrieves the high critical threshold temperature of thermal Returns: A float number, the high critical threshold temperature of thermal in Celsius up to nearest thousandth of one degree Celsius, e.g. 30.125 """ is_valid, high_crit_threshold = self.sensor.get_threshold("UpperCritical") if not is_valid: return super(Thermal, self).METHOD_NAME() return float(high_crit_threshold)
[ 19, 5020, 257, 853 ]
def METHOD_NAME(sh, escape, cmd, args, env): return exec_subprocess([sh, '-c', ' '.join(args)], env)
[ 2929, 597 ]
def METHOD_NAME(self, location: str, **kwargs: Any) -> AsyncIterable["_models.SignalRServiceUsage"]: """List resource usage quotas by location. :param location: the location like "eastus". Required. :type location: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either SignalRServiceUsage or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.webpubsub.models.SignalRServiceUsage] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.SignalRServiceUsageList] = kwargs.pop("cls", None) error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, 304: ResourceNotModifiedError, } error_map.update(kwargs.pop("error_map", {}) or {}) def prepare_request(next_link=None): if not next_link: request = build_list_request( location=location, subscription_id=self._config.subscription_id, api_version=api_version, template_url=self.METHOD_NAME.metadata["url"], headers=_headers, params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: # make call to next link with the client's api-version _parsed_next_link = urllib.parse.urlparse(next_link) _next_request_params = case_insensitive_dict( { key: [urllib.parse.quote(v) for v in value] for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() } ) _next_request_params["api-version"] = self._config.api_version request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) request.method = "GET" return request async def extract_data(pipeline_response): deserialized = self._deserialize("SignalRServiceUsageList", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response return AsyncItemPaged(get_next, extract_data)
[ 245 ]
def METHOD_NAME(filepath): with open(filepath, "r") as fp: return json.load(fp)
[ 557, 763 ]
def METHOD_NAME(): """ :rtype: list[str] """ # Earlier this was a `glob("%s/*.py" % _root_dir)`. But not anymore, since we have the new package structure. src_files = [] for root, dirs, files in os.walk(_root_dir): if root == _root_dir: root = "" else: assert root.startswith(_root_dir + "/") root = root[len(_root_dir) + 1 :] # relative to the root root += "/" # Ignore tests, or other irrelevant directories. if root == "": dirs[:] = ["returnn", "demos", "tools"] else: dirs[:] = sorted(dirs) # Ignore extern git submodules. dirs[:] = [d for d in dirs if not os.path.exists("%s/%s%s/.git" % (_root_dir, root, d))] for file in sorted(files): if not file.endswith(".py"): continue if file == "_setup_info_generated.py": continue src_files.append(root + file) return src_files
[ 416, 75, 1739, 1458, 1537 ]
def METHOD_NAME(num_classes, custom_color=None): """ Returns the color map for visualizing the segmentation mask, which can support arbitrary number of classes. Args: num_classes (int): Number of classes. custom_color (list, optional): Save images with a custom color map. Default: None, use paddleseg's default color map. Returns: (list). The color map. """ num_classes += 1 color_map = num_classes * [0, 0, 0] for i in range(0, num_classes): j = 0 lab = i while lab: color_map[i * 3] |= (((lab >> 0) & 1) << (7 - j)) color_map[i * 3 + 1] |= (((lab >> 1) & 1) << (7 - j)) color_map[i * 3 + 2] |= (((lab >> 2) & 1) << (7 - j)) j += 1 lab >>= 3 color_map = color_map[3:] if custom_color: color_map[:len(custom_color)] = custom_color return color_map
[ 19, 36, 422, 245 ]
def METHOD_NAME(): player = Player(device) with player.changed_volume(): for chunk in sine(freq, seconds * RATE, PERIOD): player.play(chunk)
[ 1443 ]
def METHOD_NAME(xml): linear_operator(xml) max_valid_change(xml) bad_spinup_longwave(xml) sources(xml) pks = asearch.child_by_name(xml, "PKs") for pk in pks: pk_type = asearch.child_by_name(pk, "PK type") if pk_type.get('value') == 'surface balance implicit': print('updating seb monolitic') import seb_monolithic_to_evals seb_monolithic_to_evals.update_seb(xml) fixEvaluator(xml, "surface-snow_skin_potential", "snow-skin_potential") fixEvaluator(xml, "surface-snow_conductivity", "snow-conductivity") snow_distribution(xml) end_time_units(xml) import verbose_object verbose_object.fixVerboseObject(xml)
[ 86 ]
def METHOD_NAME(self, tb): """If bad_cb returns True then keep it. :param tb: traceback that caused exception :return: """ with LiveExecution.lock: if self.bad_cb and not self.bad_cb(tb): self.bad_cb = None
[ 128, 1068, 905 ]
def METHOD_NAME(self): if self.settings.os in ("Windows", "Macos"): raise ConanInvalidConfiguration("Platform is currently not supported") if self.options.shared: del self.options.fPIC del self.settings.compiler.libcxx del self.settings.compiler.cppstd
[ 111 ]
def METHOD_NAME(cls): for username in cls.USERS_USED_IN_TEST: user = model.User.by_name(username) if user: user.email = username + '@test-domain.com' user.apikey = username + '_apikey' model.Session.commit()
[ 0, 21, 58, 219 ]
def METHOD_NAME(self, evt, station, det): channelBandPassFilter.run(evt, station, det, passband=passband_low, filter_type=filter_type, order=order_low, rp=0.1) channelBandPassFilter.run(evt, station, det, passband=passband_high, filter_type=filter_type, order=order_high, rp=0.1)
[ 2715, 3036, 527, 8106 ]
def METHOD_NAME( phase_factory, map_idea_factory, user, admin, user_factory, group_factory ): phase, _, project, item = setup_phase( phase_factory, map_idea_factory, phases.CollectPhase ) anonymous, moderator, initiator = setup_users(project) creator = item.creator ( project, group_member_in_org, group_member_in_pro, group_member_out, ) = setup_group_members(project, group_factory, user_factory) assert project.is_public with freeze_phase(phase): assert rules.has_perm(perm_name, anonymous, item) assert rules.has_perm(perm_name, user, item) assert rules.has_perm(perm_name, creator, item) assert rules.has_perm(perm_name, group_member_out, item) assert rules.has_perm(perm_name, group_member_in_org, item) assert rules.has_perm(perm_name, group_member_in_pro, item) assert rules.has_perm(perm_name, moderator, item) assert rules.has_perm(perm_name, initiator, item) assert rules.has_perm(perm_name, admin, item)
[ 9, 3200, 923 ]
async def METHOD_NAME(inference_pool: InferencePool): worker_pids = [pid for pid in inference_pool._workers] await inference_pool.close() assert len(inference_pool._workers) == 0 for worker_pid in worker_pids: assert not check_pid(worker_pid)
[ 9, 1462 ]
def METHOD_NAME(http_request) -> dict: """ This function parses the request URL and extracts the following: default operation, process identifier, output_ids, default mimetype info that cannot be terminated from the URL will be None (default) The url is expected to be in the following format, all the levels are optional. [base_url]/[identifier]/[output_ids] :param http_request: the request URL :return: dict with the extracted info listed: base_url - [wps|processes|jobs|api/api_level] default_mimetype - determinate by the base_url part: XML - if the base url == 'wps', JSON - if the base URL in ['api'|'jobs'|'processes'] operation - also determinate by the base_url part: ['api'|'jobs'] -> 'execute' processes -> 'describeprocess' or 'getcapabilities' 'describeprocess' if identifier is present as the next item, 'getcapabilities' otherwise api - api level, only expected if base_url=='api' identifier - the process identifier output_ids - if exist then it selects raw output with the name output_ids """ operation = api = identifier = output_ids = default_mimetype = base_url = None if http_request: parts = str(http_request.path[1:]).split('/') i = 0 if len(parts) > i: base_url = parts[i].lower() if base_url == 'wps': default_mimetype = 'xml' elif base_url in ['api', 'processes', 'jobs']: default_mimetype = 'json' i += 1 if base_url == 'api': api = parts[i] i += 1 if len(parts) > i: identifier = parts[i] i += 1 if len(parts) > i: output_ids = parts[i] if not output_ids: output_ids = None if base_url in ['jobs', 'api']: operation = 'execute' elif base_url == 'processes': operation = 'describeprocess' if identifier else 'getcapabilities' d = {} if operation: d['operation'] = operation if identifier: d['identifier'] = identifier if output_ids: d['output_ids'] = output_ids if default_mimetype: d['default_mimetype'] = default_mimetype if api: d['api'] = api if base_url: d['base_url'] = base_url return d
[ 214, 721, 274 ]
def METHOD_NAME(self) -> collections.defaultdict: """Initializes latent weights dict."""
[ 176, 8067 ]
def METHOD_NAME(cmd, repo): repo = repo_url_to_name(repo) secret_store = _get_github_token_secret_store(cmd) cache = secret_store.load() if isinstance(cache, list): for entry in cache: if isinstance(entry, dict) and repo in entry.get("repos", []): return entry.get("value") return None
[ 557, 1115, 466, 280, 596 ]
def METHOD_NAME(self): "Cross product with a Vector."
[ 436 ]
def METHOD_NAME(variant_scalar_rgb): assert(dr.allclose(mi.warp.square_to_tent([0.5, 0.5]), [0, 0])) assert(dr.allclose(mi.warp.square_to_tent([0, 0.5]), [-1, 0])) assert(dr.allclose(mi.warp.square_to_tent([1, 0]), [1, -1])) check_inverse(mi.warp.square_to_tent, mi.warp.tent_to_square) check_warp_vectorization("square_to_tent")
[ 9, 3489, 24, 9006 ]
def METHOD_NAME(new: RunDescriberV2Dict) -> RunDescriberV0Dict: """ Convert a v2 RunDescriber Dict to a v0 RunDescriber Dict """ return v1_to_v0(v2_to_v1(new))
[ 820, 24, 2724 ]
def METHOD_NAME(cli_ctx, *_): return get_container_service_client(cli_ctx).container_services
[ 2325, 224, 3186 ]
def METHOD_NAME(self, poly): if "points" in poly.attrib: self._start_path("M" + poly.attrib["points"])
[ 214, 2853 ]
def METHOD_NAME(self): step = MultipleFileUpload(workersrcs=["dir/noexist_path"], masterdest="master_dest") yield self.setup_config_single_step(step) build = yield self.doForceBuild(wantSteps=True, wantLogs=True) self.assertEqual(build['results'], FAILURE) res = yield self.checkBuildStepLogExist(build, "Cannot open file") self.assertTrue(res)
[ 9, 654, 1985, 107, 171, 172 ]
def METHOD_NAME(self, func, *args, **kwargs): self.pending.append(TaskUnit(func, args, kwargs))
[ 238, 758 ]
def METHOD_NAME(self): args = self.get_connect_args() (token, headers, status, sid, expected_server_sig) = self._scram_auth( args["user"], args["password"] ) self.assertEqual(status, 200) values = {} for kv_str in headers["authentication-info"].split(): key, _, value = kv_str.rstrip(",").partition("=") values[key] = value self.assertEqual(values["sid"], sid) self.assertIn("data", values) server_final = base64.b64decode(values["data"]) server_sig = scram.parse_server_final_message(server_final) self.assertEqual(server_sig, expected_server_sig) proto_ver = edbdef.CURRENT_PROTOCOL proto_ver_str = f"v_{proto_ver[0]}_{proto_ver[1]}" mime_type = f"application/x.edgedb.{proto_ver_str}.binary" with self.http_con() as con: con.request( "POST", f"/db/{args['database']}", body=protocol.Execute( annotations=[], allowed_capabilities=protocol.Capability.ALL, compilation_flags=protocol.CompilationFlag(0), implicit_limit=0, command_text="SELECT 42", output_format=protocol.OutputFormat.JSON, expected_cardinality=protocol.Cardinality.AT_MOST_ONE, input_typedesc_id=b"\0" * 16, output_typedesc_id=b"\0" * 16, state_typedesc_id=b"\0" * 16, arguments=b"", state_data=b"", ).dump() + protocol.Sync().dump(), headers={ "Content-Type": mime_type, "Authorization": f"Bearer {token.decode('ascii')}", "X-EdgeDB-User": args["user"], }, ) content, headers, status = self.http_con_read_response(con) self.assertEqual(status, 200) self.assertEqual(headers, headers | {"content-type": mime_type}) uint32_unpack = struct.Struct("!L").unpack msgs = [] while content: mtype = content[0] (msize,) = uint32_unpack(content[1:5]) msg = protocol.ServerMessage.parse(mtype, content[5 : msize + 1]) msgs.append(msg) content = content[msize + 1 :] self.assertIsInstance(msgs[0], protocol.CommandDataDescription) self.assertIsInstance(msgs[1], protocol.Data) self.assertEqual(bytes(msgs[1].data[0].data), b"42") self.assertIsInstance(msgs[2], protocol.CommandComplete) self.assertEqual(msgs[2].status, "SELECT") self.assertIsInstance(msgs[3], protocol.ReadyForCommand) self.assertEqual( msgs[3].transaction_state, protocol.TransactionState.NOT_IN_TRANSACTION, )
[ 9, 721, 2433, 17480, 1205 ]
def METHOD_NAME( url: str, api_key: str, cluster_id: str, page_size: int, current_page: int, **kwargs ): """list jobs in a cluster Args: url (str): url of the server cluster_id (str): id of a milvus instance(for cloud) page_size (int): pagination size current_page (int): pagination Returns: json: response of the restful interface """ request_url = f"https://{url}/v1/vector/collections/import/list" params = { "clusterId": cluster_id, "pageSize": page_size, "currentPage": current_page, } resp = _get_request(url=request_url, api_key=api_key, params=params, **kwargs) _handle_response(url, resp.json()) return resp
[ 245, 512, 494 ]
def METHOD_NAME(self): parameters = { **self.serialize_query_param( "api-version", "2022-01-01", required=True, ), } return parameters
[ 539, 386 ]
def METHOD_NAME(self, path): """Get previous commit of a given path starting from the revision.""" return self.repository.METHOD_NAME(path, revision=self.revision)
[ 19, 1511, 1160 ]
def METHOD_NAME() -> str: return "1.2.3"
[ 549, 281 ]
async def METHOD_NAME(self, ctx: commands.Context, option: str = "") -> None: """ Get a random python hacktober issue from Github. If the command is run with beginner (`.hacktoberissues beginner`): It will also narrow it down to the "first good issue" label. """ async with ctx.typing(): issues = await self.get_issues(ctx, option) if issues is None: return issue = random.choice(issues["items"]) embed = self.format_embed(issue) await ctx.send(embed=embed)
[ -1 ]
def METHOD_NAME(outreach_connection_config) -> None: get_connector(outreach_connection_config).test_connection()
[ 9, -1, 550, 9 ]
def METHOD_NAME( a: Sequence[str], b: Sequence[str], fromfile: str = "", tofile: str = "", fromfiledate: str = "", tofiledate: str = "", n: int = 3, lineterm: str = "\n", ) -> Iterator[str]: ...
[ 198, 2443 ]
def METHOD_NAME( self, mock_mediapackage_channels, mock_medialive_indexed_channels ): """Command should do nothing when there is no mediapackage to process.""" out = StringIO() mock_mediapackage_channels.return_value = [] mock_medialive_indexed_channels.return_value = {} call_command("clean_mediapackages", stdout=out) self.assertEqual("", out.getvalue()) out.close()
[ 9, 1356, 1357, 654, 14694 ]
def METHOD_NAME(self, token, client, request): return True
[ 250, 204 ]
def METHOD_NAME( mocked_logger, mocked_data_from_fxa, phone_user ): social_account = SocialAccount.objects.get(user=phone_user) sample_subscription_data = { "_subscription_type": "web", "cancel_at_period_end": False, "created": 1673427825, "current_period_end": 1681203825, "current_period_start": 1678525425, "end_at": None, "latest_invoice": "D861D444-0006", "latest_invoice_items": {}, "plan_id": "price_1J000000000000000000000p", "product_id": "prod_notPhone", "product_name": "MDN Plus", } # Sample subscription data from https://mozilla.sentry.io/issues/4062336484/events/b798a75eb05c4f67937309bf8148ab8e/?project=4503976951152641 mocked_data_from_fxa.return_value = {"subscriptions": [sample_subscription_data]} ( date_subscribed_phone, date_phone_subscription_start, date_phone_subscription_end, ) = get_phone_subscription_dates(social_account) assert date_subscribed_phone is None assert date_phone_subscription_start is None assert date_phone_subscription_end is None mocked_logger.assert_not_called()
[ 9, 19, 3180, 835, 306, 835, 130 ]
def METHOD_NAME(): return _get_sample_doc('parent_child_data_source.json', DataSourceConfiguration)
[ 19, 365, 1458, 41, 252, 366, 44 ]
def METHOD_NAME(self): """ Test that the test search template has the expected data stored in the database. """ expected_result = frozenset( [ ("name", "template"), ("user", self.user1), ] ) self._test_db_object(expected_result=expected_result, model_cls=SearchTemplate)
[ 9, -1, 578 ]
def METHOD_NAME() -> str: print_instruction("Which routing framework?") router = input(f"{get_options_string(ROUTERS)}\n") or 0 return ROUTERS[int(router)]
[ 19, 2704, 1486 ]
def METHOD_NAME(self) -> str: """ Resource Id. """ return pulumi.get(self, "id")
[ 147 ]
def METHOD_NAME(self, label): """Add a label to the resource. Args: label (str): string with the label information. """ if label in self._labels: return self._labels.append(label) self.save()
[ 238, 636 ]
def METHOD_NAME(): actions.insert("while () ") actions.edit.left() actions.edit.left()
[ 544, 551, 795 ]
def METHOD_NAME(): np.set_printoptions(precision=3, linewidth=150) print("channel construction Bhattacharyya bounds by Arikan") n = 10 m = 2 ** n k = m // 2 design_snr = 0.0 mu = 32 z_params = load_z_parameters(m, design_snr, mu) z_bounds = bhattacharyya_bounds(design_snr, m) print(z_params[-10:]) if 0: import matplotlib.pyplot as plt plt.plot(z_params) plt.plot(z_bounds) plt.show()
[ 57 ]
def METHOD_NAME(cls): print("starting class: {} execution".format(cls.__name__)) cls.stats_executor = MockStatsExecutor() cls.stats_executor.initialize(None)
[ 102, 2 ]
def METHOD_NAME(self, mocked_force_login): try: self.assertPageIsEditable(self.page) except self.failureException: pass mocked_force_login.assert_called_with( self._pageiseditable_superuser, settings.AUTHENTICATION_BACKENDS[0] ) try: self.assertPageIsEditable(self.page, user=self.superuser) except self.failureException: pass mocked_force_login.assert_called_with( self.superuser, settings.AUTHENTICATION_BACKENDS[0] )
[ 9, 137, 6835, 2659, -1 ]
def METHOD_NAME(self, series, urls): series_with_urls = [] for value, url in zip(series, urls): series_with_urls.append({ 'value': value, 'meta': { 'clickUrl': url, } }) return series_with_urls
[ 4045, 41, 2248 ]
def METHOD_NAME( self, **kwargs # type: Any ): # type: (...) -> Iterable["_models.OperationsListResult"] """This lists all the available Microsoft Support REST API operations. :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either OperationsListResult or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.support.models.OperationsListResult] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationsListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2020-04-01" accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.METHOD_NAME.metadata['url'] # type: ignore # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] request = self._client.get(url, query_parameters, header_parameters) return request def extract_data(pipeline_response): deserialized = self._deserialize('OperationsListResult', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: error = self._deserialize(_models.ExceptionResponse, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response return ItemPaged( get_next, extract_data )
[ 245 ]
def METHOD_NAME(self): """De-serialize coreference-related objects (CorefMention, CorefEntity). This internal method will be called automatically whenever any coref-related method is called. It iterates through all nodes in the document and creates the objects based on the info in MISC (stored in attributes Entity, SplitAnte, Bridge). """ if self._eid_to_entity is None: udapi.core.coref.load_coref_from_misc(self)
[ 557, 5225 ]