text
stringlengths
15
7.82k
ids
sequencelengths
1
7
def METHOD_NAME(self, queries: List[str]) -> List[str]: return querie
[ 977, 146, 365, 578 ]
def METHOD_NAME(self, code): proc = subprocess.Popen( sys.executable, stdin=subprocess.PIPE, stdout=subprocess.PIPE ) proc.stdin.write(utf8(code)) proc.stdin.close() proc.wait() stdout = proc.stdout.read() proc.stdout.close() if proc.returncode != 0: raise RuntimeError( "Process returned %d. stdout=%r" % (proc.returncode, stdout) ) return to_unicode(stdout)
[ 22, 6492 ]
def METHOD_NAME(self, event, source=None, data=None): """@brief Notify subscribers of an event. @param self @param event Event to send. Must be a hashable object. It is acceptable to notify for an event for which there are no subscribers. @param source The object sending the notification. If not set, the source defaults to self, the object on which the notify() method was called. @param data Optional data value to send with the notification. """ # Look up subscribers for this event. try: event_info = self._subscribers[event] except KeyError: # Nobody has subscribed to this event, so nothing to do. TRACE.debug("Not sending notification because no subscribers: event=%s", event) return # Look up subscribers for this event + source combo. try: source_subscribers = event_info[1][source] except KeyError: # No source-specific subscribers. source_subscribers = [] # Create combined subscribers list. Exit if no subscribers matched. subscribers = event_info[0] + source_subscribers if not subscribers: TRACE.debug("Not sending notification because no matching subscribers: event=%s", event) return # Create the notification object now that we know there are some subscribers. if source is None: source = self note = Notification(event, source, data) TRACE.debug("Sending notification to %d subscribers: %s", len(subscribers), note) # Tell everyone! for cb in subscribers: cb(note)
[ 959 ]
def METHOD_NAME(self, format_arg, keys): self.all_formats()[format_arg] = keys
[ 372, 275 ]
def METHOD_NAME(self, organization, config): installation = self.get_installation(config.get("installation"), organization.id) client = installation.get_client() repo_id = config["identifier"] instance = installation.model.metadata["instance"] try: project = client.get_project(repo_id) except Exception as e: raise installation.raise_error(e) config.update( { "instance": instance, "path": project["path_with_namespace"], "name": project["name_with_namespace"], "external_id": "{}:{}".format(instance, project["id"]), "project_id": project["id"], "url": project["web_url"], } ) return config
[ 19, 1230, 365 ]
def METHOD_NAME(op): if op.tag == "conv3d_transpose_ncdhw": schedule_direct_conv3d_cuda( cfg, s, op.output(0), "NCDHW", "conv3d_transpose_ncdhw.cuda" )
[ 1076 ]
f METHOD_NAME(self):
[ 9, 3533, 40, 41, 1745, 584 ]
def METHOD_NAME(tensor, name=None, prefix=None, print_summary=False): """Adds an image summary for the given tensor. Args: tensor: a variable or op tensor with shape [batch,height,width,channels] name: the optional name for the summary. prefix: An optional prefix for the summary names. print_summary: If `True`, the summary is printed to stdout when the summary is computed. Returns: An image `Tensor` of type `string` whose contents are the serialized `Summary` protocol buffer. """ summary_name = _get_summary_name(tensor, name, prefix) # If print_summary, then we need to make sure that this call doesn't add the # non-printing op to the collection. We'll add it to the collection later. collections = [] if print_summary else None op = summary.image( name=summary_name, tensor=tensor, collections=collections) if print_summary: op = logging_ops.Print(op, [tensor], summary_name) ops.add_to_collection(ops.GraphKeys.SUMMARIES, op) return op
[ 238, 660, 2718 ]
def METHOD_NAME(self, vals): if not self._context.get("copy_pos_config") and "name" not in vals: for pos in self: sequence = pos.l10n_es_simplified_invoice_sequence_id sequence.check_simplified_invoice_unique_prefix() if "name" in vals: prefix = self.l10n_es_simplified_invoice_prefix.replace( self.name, vals["name"] ) if prefix != self.l10n_es_simplified_invoice_prefix: self.l10n_es_simplified_invoice_sequence_id.update( { "prefix": prefix, "name": ( self.l10n_es_simplified_invoice_sequence_id.name.replace( self.name, vals["name"] ) ), } ) return super().METHOD_NAME(vals)
[ 77 ]
def METHOD_NAME(self, filename, baseaddr, normname): try: pe = PE.peFromMemoryObject(self, baseaddr) vhash = e_symcache.symCacheHashFromPe(pe) symcache = self.symcache.getCacheSyms(vhash) if symcache is None: # Symbol type 0 for now... symcache = [(rva, 0, name, e_resolv.SYMSTOR_SYM_SYMBOL) for rva, ord, name in pe.getExports()] self.symcache.setCacheSyms(vhash, symcache) self.impSymCache(symcache, symfname=normname, baseaddr=baseaddr) except Exception as e: logger.error('Error Parsing Binary (%s): %s', normname, e)
[ 2773, 214, 808 ]
def METHOD_NAME(self): pass
[ 72, 710 ]
def METHOD_NAME(script, new_name): """ The `args` / `kwargs` params are the same as in `api.Script`. :param new_name: The new name of the script. :param script: The source Script object. :return: list of changed lines/changed files """ return Refactoring(_rename(script.usages(), new_name))
[ 2010 ]
def METHOD_NAME(self, size=None): ret = [] data = self.read() while data: pos = data.find(b"\n") if pos < 0: ret.append(data) data = b"" else: line, data = data[:pos + 1], data[pos + 1:] ret.append(line) return ret
[ 5357 ]
def METHOD_NAME(self) -> str: ...
[ 2723, 156 ]
def METHOD_NAME(collection, source_filter): return find_object( collection, source_filter, EDA_FILTER_PATHS, ".py", )
[ 416, 1458, 527 ]
def METHOD_NAME(): def model(T=10, q=1, r=1, phi=0.0, beta=0.0): def transition(state, i): x0, mu0 = state x1 = numpyro.sample("x", dist.Normal(phi * x0, q)) mu1 = beta * mu0 + x1 y1 = numpyro.sample("y", dist.Normal(mu1, r)) numpyro.deterministic("y2", y1 * 2) return (x1, mu1), (x1, y1) mu0 = x0 = numpyro.sample("x_0", dist.Normal(0, q)) y0 = numpyro.sample("y_0", dist.Normal(mu0, r)) _, xy = scan(transition, (x0, mu0), jnp.arange(T)) x, y = xy return jnp.append(x0, x), jnp.append(y0, y) T = 10 num_samples = 100 kernel = NUTS(model) mcmc = MCMC(kernel, num_warmup=100, num_samples=num_samples) mcmc.run(random.PRNGKey(0), T=T) assert set(mcmc.get_samples()) == {"x", "y", "y2", "x_0", "y_0"} mcmc.print_summary() samples = mcmc.get_samples() x = samples.pop("x")[0] # take 1 sample of x # this tests for the composition of condition and substitute # this also tests if we can use `vmap` for predictive. future = 5 predictive = Predictive( numpyro.handlers.condition(model, {"x": x}), samples, return_sites=["x", "y", "y2"], parallel=True, ) result = predictive(random.PRNGKey(1), T=T + future) expected_shape = (num_samples, T + future) assert result["x"].shape == expected_shape assert result["y"].shape == expected_shape assert result["y2"].shape == expected_shape assert_allclose(result["x"][:, :T], jnp.broadcast_to(x, (num_samples, T))) assert_allclose(result["y"][:, :T], samples["y"])
[ 9, 793 ]
def METHOD_NAME(self, port_num): # Check for invalid port_num if port_num < self.port_start or port_num > self.port_end: return False try: reg_file = open("/sys/class/swps/port"+str(port_num)+"/present") except IOError as e: print("Error: unable to open file: %s" % str(e)) return False reg_value = int(reg_file.readline().rstrip()) if reg_value == 0: return True return False
[ 19, 4061 ]
def METHOD_NAME(self): """ Nominal case: existing username, correct password, activated user, 'remember me' checked. Expected: successful login, redirect to homepage, session expiration age set. """ result = self.client.post( self.login_url, { "username": self.correct_username, "password": self.correct_password, "remember": "remember", }, follow=False, REMOTE_ADDR=self.test_ip, ) self.assertRedirects(result, reverse("homepage")) # Check cookie setting self.assertFalse(self.client.session.get_expire_at_browser_close()) self.assertEqual(self.client.session.get_expiry_age(), settings.SESSION_COOKIE_AGE) # Check IP recording profile = Profile.objects.get(user=self.profile.user) self.assertEqual(profile.last_ip_address, self.test_ip)
[ 9, 4203, 61, 4204, 1273 ]
def METHOD_NAME(self, index) -> Tuple[np.ndarray, np.ndarray, np.ndarray, Dict[str, Any]]: """ Read a sample from the disk and return (image, mask, joints, extras) tuple :param index: Sample index :return: Tuple of (image, mask, joints, extras) image - Numpy array of [H,W,3] shape, which represents input RGB image mask - Numpy array of [H,W] shape, which represents a binary mask with zero values corresponding to an ignored region which should not be used for training (contribute to loss) joints - Numpy array of [Num Instances, Num Joints, 3] shape, which represents the skeletons of the instances extras - Dictionary of extra information about the sample that should be included in `extras` dictionary. """ raise NotImplementedError()
[ 557, 734 ]
def METHOD_NAME(cls, api_version=DEFAULT_API_VERSION): """Module depends on the API version: * 0.0.0: :mod:`v0.models<multiapisecurity.v0.models>` * 1.0.0: :mod:`v1.models<multiapisecurity.v1.models>` """ if api_version == '0.0.0': from ..v0 import METHOD_NAME return METHOD_NAME elif api_version == '1.0.0': from ..v1 import METHOD_NAME return METHOD_NAME raise ValueError("API version {} is not available".format(api_version))
[ 379 ]
def METHOD_NAME(self): pass
[ 1462 ]
def METHOD_NAME(request): # Don't try and download IERS during tests # See https://github.com/astropy/astropy/issues/12998 for issue that this # sidesteps old_value = iers.conf.auto_download iers.conf.auto_download = False yield iers.conf.auto_download = old_value
[ 654, 136, 14253 ]
def METHOD_NAME(): ts = create_track_set() serialized_ts = serialize_track_set(ts) deserialized_ts = deserialize_track_set(serialized_ts) nose.tools.assert_true(compare_track_set( ts, deserialized_ts ) )
[ 9, 183, 3068, 0 ]
def METHOD_NAME(self): self.perform_test(widget, color)
[ 9 ]
def METHOD_NAME( theme, delete_css, css, css_link ): delete_css(theme, [css, css_link]) assert not theme.css.exists()
[ 9, 107, 344, 392, 1046, 673, 1108 ]
def METHOD_NAME(self) -> str: return pulumi.get(self, "tracker_name")
[ 4102, 156 ]
def METHOD_NAME(): world_wk = pd.read_csv( f"{BASE_URL}/export_world.csv", usecols=[ "world", "date", "estimated_daily_excess_deaths", "estimated_daily_excess_deaths_ci_95_top", "estimated_daily_excess_deaths_ci_95_bot", ], # type: ignore ) world_wk_100k = pd.read_csv( f"{BASE_URL}/export_world_per_100k.csv", usecols=[ "world", "date", "estimated_daily_excess_deaths_per_100k", "estimated_daily_excess_deaths_ci_95_top_per_100k", "estimated_daily_excess_deaths_ci_95_bot_per_100k", ], # type: ignore ) world_cum = pd.read_csv( f"{BASE_URL}/export_world_cumulative.csv", usecols=[ "world", "date", "cumulative_estimated_daily_excess_deaths", "cumulative_estimated_daily_excess_deaths_ci_95_top", "cumulative_estimated_daily_excess_deaths_ci_95_bot", ], # type: ignore ) world_cum_100k = pd.read_csv( f"{BASE_URL}/export_world_per_100k_cumulative.csv", usecols=[ "world", "date", "cumulative_estimated_daily_excess_deaths_per_100k", "cumulative_estimated_daily_excess_deaths_ci_95_top_per_100k", "cumulative_estimated_daily_excess_deaths_ci_95_bot_per_100k", ], # type: ignore ) world_wk_merge = pd.merge(world_wk, world_wk_100k, on=["world", "date"], how="outer") world_cum_merge = pd.merge(world_cum, world_cum_100k, on=["world", "date"], how="outer") world_all = pd.merge(world_cum_merge, world_wk_merge, on=["world", "date"], how="outer") return world_all
[ 557, 3021, 365 ]
def METHOD_NAME(pipeline_response): deserialized = self._deserialize("RestorableSqlDatabasesListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) # type: ignore return None, iter(list_of_elem)
[ 297, 365 ]
def METHOD_NAME(self): self.config = Config() self.config.general['log_level'] = logging.DEBUG self.maker = CDBMaker(self.config) # Building a new CDB from two files (full_build) csvs = ['../examples/cdb.csv', '../examples/cdb_2.csv'] self.cdb = self.maker.prepare_csvs(csvs, full_build=True)
[ 0, 1 ]
def METHOD_NAME(mode): """Return True if mode is from a character special device file.""" return S_IFMT(mode) == S_IFCHR
[ 1305, 10871 ]
def METHOD_NAME() -> None: code = """ import six from enum import Enum, EnumMeta class FooMeta(EnumMeta): pass class Foo(six.with_metaclass(FooMeta, Enum)): #@ bar = 1 """ klass = astroid.extract_node(code) assert next(klass.ancestors()).name == "Enum"
[ 9, 26, 41, 11390, 1206, 4490 ]
def METHOD_NAME(cmd): """Win32 version of os.system() that works with network shares. Note that this implementation returns None, as meant for use in IPython. Parameters ---------- cmd : str or list A command to be executed in the system shell. Returns ------- int : child process' exit code. """ # The controller provides interactivity with both # stdin and stdout #import _process_win32_controller #_process_win32_controller.system(cmd) with AvoidUNCPath() as path: if path is not None: cmd = '"pushd %s &&"%s' % (path, cmd) return process_handler(cmd, _system_body)
[ 112 ]
f METHOD_NAME(self, opt, stepSize):
[ 7367, 355, 182 ]
def METHOD_NAME(self): irows = [] for obj in self.table.selectedIndexes(): irow = obj.row() irows.append(irow) irows.sort() for irow in reversed(irows): self.table.removeRow(irow) name = self.names.pop(irow) del self.cameras[name] #print(' removing irow=%s name=%r' % (irow, name))
[ 69, 34 ]
def METHOD_NAME(file, as_float=False): sampwidth_types = { 1: np.uint8, 2: np.int16, 4: np.int32 } with wave.open(file, "rb") as wav: params = wav.getparams() data = wav.readframes(params.nframes) if params.sampwidth in sampwidth_types: data = np.frombuffer(data, dtype=sampwidth_types[params.sampwidth]) else: raise RuntimeError("Couldn't process file {}: unsupported sample width {}" .format(file, params.sampwidth)) data = np.reshape(data, (params.nframes, params.nchannels)) if as_float: data = (data - np.mean(data)) / (np.std(data) + 1e-15) return params.framerate, data
[ 203, 4097 ]
def METHOD_NAME(sd, prefix): sd_k = sd.keys() if "{}transformer.resblocks.0.attn.in_proj_weight".format(prefix) in sd_k: keys_to_replace = { "{}class_embedding".format(prefix): "vision_model.embeddings.class_embedding", "{}conv1.weight".format(prefix): "vision_model.embeddings.patch_embedding.weight", "{}positional_embedding".format(prefix): "vision_model.embeddings.position_embedding.weight", "{}ln_post.bias".format(prefix): "vision_model.post_layernorm.bias", "{}ln_post.weight".format(prefix): "vision_model.post_layernorm.weight", "{}ln_pre.bias".format(prefix): "vision_model.pre_layrnorm.bias", "{}ln_pre.weight".format(prefix): "vision_model.pre_layrnorm.weight", } for x in keys_to_replace: if x in sd_k: sd[keys_to_replace[x]] = sd.pop(x) if "{}proj".format(prefix) in sd_k: sd['visual_projection.weight'] = sd.pop("{}proj".format(prefix)).transpose(0, 1) sd = transformers_convert(sd, prefix, "vision_model.", 48) return sd
[ 197, 24, 7137 ]
def METHOD_NAME(imported_module=None): global _thread, DELAY if imported_module: _thread = imported_module DELAY = 2 if test_support.verbose: print print "*** Using %s as _thread module ***" % _thread test_support.run_unittest(LockTests, MiscTests, ThreadTests)
[ 9, 57 ]
async def METHOD_NAME() -> Dict[str, List[Dict[str, Optional[datetime]]]]: return { table: [ {**record} for record in await postgresql_client.fetch(f"SELECT %s FROM public.{table};" % ", ".join(columns)) ] for table, columns in TIMESTAMP_COLUMNS.items() }
[ 1047, 3244 ]
def METHOD_NAME(User): pass
[ 176, 379 ]
def METHOD_NAME(self, tmpdir): ml = MapLocal() with taddons.context(ml) as tctx: tmpfile = tmpdir.join("foo.jpg") tmpfile.write("foo") tctx.configure(ml, map_local=["|//example.org/images|" + str(tmpfile)]) f = tflow.tflow() f.request.url = b"https://example.org/images/foo.jpg" f.kill() ml.request(f) assert not f.response
[ 9, 137, 5622 ]
def METHOD_NAME(subscription_id: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2020-11-01-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.AppPlatform/skus") path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
[ 56, 245, 377 ]
async def METHOD_NAME( cls, port: str, loop: Optional[asyncio.AbstractEventLoop] ) -> HeaterShakerDriver: """ Create a heater-shaker driver. Args: port: port or url of heater shaker loop: optional event loop Returns: driver """ connection = await AsyncResponseSerialConnection.METHOD_NAME( port=port, baud_rate=HS_BAUDRATE, timeout=DEFAULT_HS_TIMEOUT, ack=HS_ACK, loop=loop, error_keyword=HS_ERROR_KEYWORD, async_error_ack=HS_ASYNC_ERROR_ACK, ) return cls(connection=connection)
[ 129 ]
def METHOD_NAME(): import oracledb return oracledb
[ 512, 6610 ]
def METHOD_NAME(self): current_dir = pathlib.Path(__file__).resolve().parent config_file = current_dir.joinpath("mysql_test.yaml") workflow_config = load_config_file(config_file) workflow = MetadataWorkflow.create(workflow_config) workflow.execute() workflow.stop() config = workflow.config.workflowConfig.openMetadataServerConfig client = OpenMetadata(config).client self.assertIsNotNone( client.get("/services/databaseServices/name/local_mysql_test") ) client.delete( f"/services/databaseServices/" f"{client.get('/services/databaseServices/name/local_mysql_test')['id']}" f"?hardDelete=true&recursive=true" )
[ 9, 750, 1072 ]
def METHOD_NAME(self, program_config: ProgramConfig, predictor_config: CxxConfig) -> bool: return True
[ 137, 735, 1205 ]
def METHOD_NAME(contents, name=None): """ Decode a EXR-encoded image meta data. Args: contents: A `Tensor` of type `string`. 0-D. The EXR-encoded image. name: A name for the operation (optional). Returns: A `Tensor` of type `uint8` and shape of `[height, width, 4]` (RGBA). """ shape, dtype, channel = core_ops.io_decode_exr_info(contents, name=name) return shape, dtype, channel
[ 1268, 7698, 100 ]
def METHOD_NAME (self): ''' Starts (Subscribes) the client. ''' self.sub = rospy.Subscriber(self.topic, Odometry, self.__callback)
[ 447 ]
def METHOD_NAME(cls): test_pkg_path = os.path.join(test_settings.TEST_ROOT, "fixtures", "testing_prj", "testing_prj", "pkg") management.call_command("packages", operation="load_package", source=test_pkg_path, yes=True) cls.add_users()
[ 0, 1, 2 ]
def METHOD_NAME(modeladmin, request, queryset): # pylint: disable=W0613 """ Admin action to change complaint status to 'Resolved' Queryset contains the selected complaints and this is a batch SQL UPDATE process for the complaint status """ queryset.update(status='Resolved')
[ 1743, 947, 4558 ]
def METHOD_NAME(self): if self.status == httplib.UNAUTHORIZED: raise InvalidCredsError("Invalid provider credentials") body = self.parse_body() if isinstance(body, basestring): return body + " (HTTP Code: %d)" % self.status error = body.get("error", None) debug = body.get("debug", None) # If we only have one of "error" or "debug", use the one that we have. # If we have both, use both, with a space character in between them. value = "No message specified" if error is not None: value = error if debug is not None: value = debug if error is not None and value is not None: value = error + " " + value value = value + " (HTTP Code: %d)" % self.status return value
[ 214, 168 ]
def METHOD_NAME(): class DummyExecutor(Executor): @requests(on='/foo') def foo(self, docs, **kwargs): ... @requests(on='/bar') def bar(self, docs, **kwargs): ... return DummyExecutor
[ 5153 ]
def METHOD_NAME(port, address): """ This method is deprecated. Please use is_port_available(). """ warnings.warn("deprecated, use is_port_available() instead.", DeprecationWarning) return is_port_available(port, address)
[ 137, 237, 3712 ]
def METHOD_NAME( service_region: str, zones: List[str] ) -> List[Dict[str, str]]: """Default VPC endpoint service.""" return BaseBackend.default_vpc_endpoint_service_factory( service_region, zones, "codepipeline", policy_supported=False )
[ 235, 7755, 841, 549 ]
def METHOD_NAME(self): # Staff users have some privileges. user = UserProfile.objects.get(email='[email protected]') group = Group.objects.create(name='Staff', rules='Admin:*') GroupUser.objects.create(group=group, user=user) self.client.force_login(UserProfile.objects.get(email='[email protected]')) self.assert_status('admin:index', 200, follow=True)
[ 9, 1045, 21 ]
def METHOD_NAME(self): """ensure m4 version matches installed spec""" m4 = which(self.prefix.bin.m4) out = m4("--version", output=str.split, error=str.split) assert str(self.spec.version) in out
[ 9, 281 ]
f METHOD_NAME(self, nb_frames_list):
[ 9, 2935 ]
def METHOD_NAME(screen, scene): scenes = [] effects = [ Julia(screen), ClockFrame(screen, 0, 0), ClockFrame(screen, screen.width - 26, 0), ClockFrame(screen, 0, screen.height - 13), ClockFrame(screen, screen.width - 26, screen.height - 13), DemoFrame(screen), ] scenes.append(Scene(effects, -1)) screen.play(scenes, stop_on_resize=True, start_scene=scene)
[ 2660 ]
def METHOD_NAME(self): self.site_count = Integer( "Number of image sites per well", 1, minval=1, doc="""\
[ 129, 817 ]
def METHOD_NAME(self, node: pending_xref) -> None: docnames = self.citation_refs.setdefault(node['reftarget'], set()) docnames.add(self.env.docname)
[ 2611, 11836, 272 ]
def METHOD_NAME(data, fn, scale=1.0, cmap='Greys'): import matplotlib matplotlib.use('Agg') import matplotlib.pyplot as plt sizes = np.shape(data) height = float(sizes[0]) width = float(sizes[1]) fig = plt.figure() fig.set_size_inches(width/height, 1, forward=False) ax = plt.Axes(fig, [0., 0., 1., 1.]) ax.set_axis_off() fig.add_axes(ax) if len(list(data.shape)) > 2: ax.imshow(data) else: ax.imshow(data, cmap=cmap) plt.savefig(fn, dpi=height*scale) plt.close()
[ 73, 660 ]
def METHOD_NAME(composite_test_map): composite_test_map.plot()
[ 9, 1288, 3209, 422 ]
def METHOD_NAME(self, indicator, positions, chunk, additional=None): if additional is None: additional = {} if self.distribute._has_mdistr() or indicator.use_morphologies(): try: morphologies, rotations = self.distribute._specials( self.partitions, indicator, positions ) except EmptySelectionError as e: selectors = ", ".join(f"{s}" for s in e.selectors) raise DistributorError( "%property% distribution of `%strategy.name%` couldn't find any" + f" morphologies with the following selector(s): {selectors}", "Morphology", self, ) from None elif self.distribute._has_rdistr(): rotations = self.distribute( "rotations", self.partitions, indicator, positions ) morphologies = None else: morphologies, rotations = None, None distr = self.distribute._curry(self.partitions, indicator, positions) additional.update( {prop: distr(prop) for prop in self.distribute.properties.keys()} ) self.scaffold.METHOD_NAME( indicator.cell_type, positions=positions, rotations=rotations, morphologies=morphologies, additional=additional, chunk=chunk, )
[ 2038, 383 ]
def METHOD_NAME(self, text): # python re, remove html tags METHOD_NAME = re.compile('<.*?>') return re.sub(METHOD_NAME, '', text)
[ 1356 ]
def METHOD_NAME(string, why): "A do-nothing-special-to-the-input, just-return-it function" return string
[ 2989 ]
def METHOD_NAME(data, X, Y): """calculates the \sigma_x - \sigma_y term used for z position estimation""" A = data.max() - data.min() #amplitude #threshold at half maximum and subtract threshold dr = numpy.maximum(data - data.min() - 0.5*A, 0).squeeze() dr = dr/dr.sum() x0 = (X[:,None]*dr).sum() y0 = (Y[None, :]*dr).sum() xn = (X - x0)[:,None] yn = (Y - y0)[None, :] #sig_xl = (numpy.maximum(0, x0 - X)[:,None]*dr).sum()/(drs) sig_xl = (numpy.maximum(0, xn*costheta - yn*sintheta)*dr).sum() #sig_xr = (numpy.maximum(0, X - x0)[:,None]*dr).sum()/(drs) #sig_yu = (numpy.maximum(0, y0 - Y)[None, :]*dr).sum()/(drs) sig_yu = (numpy.maximum(0, xn*sintheta + yn*costheta)*dr).sum() #sig_yd = (numpy.maximum(0, Y - y0)[None, :]*dr).sum()/(drs) return A, x0, y0, sig_xl - sig_yu
[ 1407, 434 ]
def METHOD_NAME(self): """ :param cards: the list of PSOLID cards """ #print("N[%s] = %s" % (self.type, self.n)) if self.n: i = self.property_id.argsort() self.property_id = self.property_id[i] #print("PSOLID.property_id =", self.property_id) self.material_id = self.material_id[i] self.cordm = self.cordm[i] self.integ = self.integ[i] self.stress = self.stress[i] self.isop = self.isop[i] self.fctn = self.fctn[i] unique_pids = unique(self.property_id) if len(unique_pids) != len(self.property_id): raise RuntimeError('There are duplicate PSOLID IDs...') else: self.property_id = array([], dtype='int32') self.material_id = array([], dtype='int32')
[ 56 ]
def METHOD_NAME(): valid_ips = ['129.241.75.1', '10.0.25.62', '2001:700:1::abcd', 'fe80::baad'] invalid_ips = ['www.uninett.no', '92835235', '5:4', '-5325'] for ip in valid_ips: assert util.is_valid_ip(ip), "%s should be valid" % ip for ip in invalid_ips: assert not util.is_valid_ip(ip), "%s should be invalid" % ip
[ 9, 137, 1205, 1213 ]
def METHOD_NAME(img_size, dtype): """Asymetric image.""" asymmetric_vol = AsymmetricVolume(img_size, C=1, dtype=dtype).generate() METHOD_NAME = asymmetric_vol.project(np.eye(3, dtype=dtype)) pf = pf_transform(METHOD_NAME) return METHOD_NAME, pf
[ 5669, 660 ]
def METHOD_NAME(name: str) -> str: if name not in function_args: return name + "()" return name + "(" + ",".join(["uint256"] * function_args[name][0]) + ")"
[ 275, 559, 2701 ]
METHOD_NAME(self):
[ 250, 3451, 2247 ]
def METHOD_NAME(page: Page, option_text: str, k: int): """Delete an option from a multiselect widget. Parameters ---------- page : Page The playwright page to use. option_text : str The text of the option to delete. k : int The index of the multiselect widget to delete from. """ multiselect_elem = page.locator(".stMultiSelect").nth(k) multiselect_elem.locator( f'span[data-baseweb="tag"] span[title="{option_text}"] + span[role="presentation"]' ).first.click()
[ 1269, 280, 17697, 5090 ]
f METHOD_NAME(self):
[ 3125 ]
def METHOD_NAME(self, arg1: str, arg2: str, arg3: str) -> None: scale_value = self.gui_scale.get() if arg2 == "-1": if scale_value <= LARGEST_SCALE - SCALE_INTERVAL: self.gui_scale.set(round(scale_value + SCALE_INTERVAL, 2)) else: self.gui_scale.set(round(LARGEST_SCALE, 2)) elif arg2 == "1": if scale_value >= SMALLEST_SCALE + SCALE_INTERVAL: self.gui_scale.set(round(scale_value - SCALE_INTERVAL, 2)) else: self.gui_scale.set(round(SMALLEST_SCALE, 2))
[ 270, 930 ]
def METHOD_NAME(self): MayaCmds.file(new=True, force=True) self.__abcStitcher = [os.environ['AbcStitcher']] self.__files = []
[ 0, 1 ]
def METHOD_NAME(seq, values): if not values or len(values) == 0: return postgres = get_postgres_cursor() max_id = values[len(values) - 1][0] postgres.execute(f"SELECT setval('{seq}', {max_id});") connection = postgres.connection postgres.close() connection.close()
[ 1112, 147 ]
def METHOD_NAME(cls): FormProcessorTestUtils.delete_all_cases() cls.case_type_obj.delete() super().METHOD_NAME()
[ 531, 481, 2 ]
def METHOD_NAME(self,node): if not node.inputs['File Path'].is_linked: return files = node.inputs['File Path'].sv_get() if not len(files[0]) == 1: print ('FCStd write node support just 1 file at once') return fc_file=files[0][0] if node.obj_format == 'mesh': if any((node.inputs['Verts'].is_linked,node.inputs['Faces'].is_linked)): verts_in = node.inputs['Verts'].sv_get(deepcopy=False) pols_in = node.inputs['Faces'].sv_get(deepcopy=False) verts, pols = match_long_repeat([verts_in, pols_in]) fc_write_parts(fc_file, verts, pols, node.part_name, None, node.obj_format) elif node.obj_format == 'solid': if node.inputs['Solid'].is_linked: solid=node.inputs['Solid'].sv_get() fc_write_parts(fc_file, None, None, node.part_name, solid, node.obj_format) else: return
[ 77, 325, 1396 ]
def METHOD_NAME(self, args, return_unknown): # add optional args with defaults arg_dest = [] arg_vals = [] for opt in self.opt: arg_dest.append(opt.dest) arg_vals.append(opt.default) # deal with unknown arguments, if needed unknown = [] def consume_unknown(): while args and not args[0].startswith("-"): unknown.append(args.pop(0)) # parse all args parsed_pos = False while args or not parsed_pos: if args and args[0].startswith("-") and args[0] != "-" and args[0] != "--": # optional arg a = args.pop(0) if a in ("-h", "--help"): self.usage(True) sys.exit(0) found = False for i, opt in enumerate(self.opt): if a in opt.names: arg_vals[i] = opt.parse(a, args) found = True break if not found: if return_unknown: unknown.append(a) consume_unknown() else: raise _ArgError("unknown option %s" % a) else: # positional arg if parsed_pos: if return_unknown: unknown = unknown + args break else: raise _ArgError("extra args: %s" % " ".join(args)) for pos in self.pos: arg_dest.append(pos.dest) arg_vals.append(pos.parse(pos.names[0], args)) parsed_pos = True if return_unknown: consume_unknown() # build and return named tuple with arg values values = namedtuple("args", arg_dest)(*arg_vals) return (values, unknown) if return_unknown else values
[ 214, 335 ]
def METHOD_NAME(self): url = reverse("groups.add_member", locale="en-US", args=[self.group_profile.slug]) r = self.client.get(url) self.assertEqual(405, r.status_code) r = self.client.post(url, {"users": self.member.username}) self.assertEqual(302, r.status_code) assert self.member in self.group_profile.group.user_set.all()
[ 9, 238, 1823 ]
def METHOD_NAME(self, task, config, remember_rejected=False): """Filter entries already accepted on previous runs.""" config = self.prepare_config(config) if config is False: logger.debug('{} is disabled', self.keyword) return fields = config.get('fields') local = config.get('local') for entry in task.entries: # construct list of values looked values = [] for field in fields: if field not in entry: continue if entry[field] not in values and entry[field]: values.append(str(entry[field])) if values: logger.trace('querying for: {}', ', '.join(values)) # check if SeenField.value is any of the values found = db.search_by_field_values( field_value_list=values, task_name=task.name, local=local, session=task.session ) if found: logger.debug( "Rejecting '{}' '{}' because of seen '{}'", entry['url'], entry['title'], found.value, ) se = ( task.session.query(db.SeenEntry) .filter(db.SeenEntry.id == found.seen_entry_id) .one() ) entry.reject( 'Entry with {} `{}` is already marked seen in the task {} at {}'.format( found.field, found.value, se.task, se.added.strftime('%Y-%m-%d %H:%M') ), remember=remember_rejected, )
[ 69, 758, 527 ]
def METHOD_NAME(self): self.assertEqual("foo", self._cfg.get_team())
[ 9, 19, 2957 ]
def METHOD_NAME(self) -> bool: return self._outside_buildarea or self.callDecoration("getBuildPlateNumber") < 0
[ 137, 261, 56, 690 ]
async def METHOD_NAME(history_server): """only end return reversed order""" end = datetime.utcnow() + timedelta(days=6) res = await history_server.var.read_raw_history(None, end, 0) assert 20 == len(res) assert res[-1].Value.Value == history_server.values[0] assert res[0].Value.Value == history_server.values[-1]
[ 9, 351, 486, 203, 75, 41, 1798 ]
def METHOD_NAME(environment, **kwargs): INIT_DONE.wait() node = NearNodeProxy(environment) ft_contract_code = environment.parsed_options.fungible_token_wasm num_ft_contracts = environment.parsed_options.num_ft_contracts funding_account = NearUser.funding_account parent_id = funding_account.key.account_id funding_account.refresh_nonce(node.node) environment.ft_contracts = [] # TODO: Create accounts in parallel for i in range(num_ft_contracts): account_id = environment.account_generator.random_account_id( parent_id, '_ft') contract_key = key.Key.from_random(account_id) ft_account = Account(contract_key) ft_contract = FTContract(ft_account, ft_account, ft_contract_code) ft_contract.install(node, funding_account) environment.ft_contracts.append(ft_contract)
[ 69, 18054, 176 ]
def METHOD_NAME(kube_apis, test_namespace, mtls_secret, tls_secret, policy): print(f"Create egress-mtls secret") mtls_secret_name = create_secret_from_yaml(kube_apis.v1, test_namespace, mtls_secret) print(f"Create tls secret") tls_secret_name = create_secret_from_yaml(kube_apis.v1, test_namespace, tls_secret) print(f"Create egress-mtls policy") pol_name = create_policy_from_yaml(kube_apis.custom_objects, policy, test_namespace) return mtls_secret_name, tls_secret_name, pol_name
[ 102, 54 ]
def METHOD_NAME(): for test_list in pythonToCOM("FLOAT"): testhelper(com_obj.mFloat, Single, test_list, equality_func=AlmostEqual) #Min/Max float values AssertError(OverflowError, com_obj.mFloat, 3.402823e+039)
[ 9, 1819 ]
def METHOD_NAME(self): pass
[ 950 ]
def METHOD_NAME(): # Load and process GEOSX results # File path prefix = "../../../../../../../inputFiles/hydraulicFracturing/" hdf5File = prefix + "KGD_validation_output.hdf5" xmlFile1Path = prefix + "kgdValidation_base.xml" xmlFile2Path = prefix + "kgdValidation_benchmark.xml" # Read simulation parameters from XML file xMin, xMax, yMin, yMax, zMin, zMax, nx, ny, nz = getMeshSettings(xmlFile2Path) fracHeight = abs(zMax - zMin) dx = abs(xMax - xMin) / nx dy = abs(yMax - yMin) / ny dz = abs(zMax - zMin) / nz zmean = (zMax + zMin) / 2. # Read simulation output from HDF5 file # Global Coordinate of Element Center hf = hdf5_wrapper.hdf5_wrapper(hdf5File) xl = hf['pressure elementCenter'] xl = np.array(xl) xcord = xl[-1, :, 0] ycord = xl[-1, :, 1] zcord = xl[-1, :, 2] tl = hf['pressure Time'] tl = np.array(tl) # Load pressure fpre = hf['pressure'] fpre = np.array(fpre) # Load elementAperture aper = hf['elementAperture'] aper = np.array(aper) # Load elementArea area = hf['elementArea'] area = np.array(area) # Query simulation results xloc_58 = 0.015 xloc_57 = 0.041 xloc_lvdt = 0.0285 wellPressure = np.zeros([len(tl)]) G58Pressure = np.zeros([len(tl)]) G57Pressure = np.zeros([len(tl)]) LVDTAperture = np.zeros([len(tl)]) fracArea = np.zeros([len(tl)]) for j in range(0, len(tl)): xcord = xl[j, :, 0] ycord = xl[j, :, 1] zcord = xl[j, :, 2] temp = 0 for i in range(0, len(aper[0, :])): if abs(xcord[i] / (dx / 2.) - 1) < 0.01 and abs(ycord[i]) < 0.01 and abs(zcord[i] / (zmean - dz / 2.) - 1) < 0.01: wellPressure[j] = fpre[j, i] if abs(xcord[i] - xloc_58) < 0.001 and abs(ycord[i]) < 0.01 and abs(zcord[i] / (zmean - dz / 2.) - 1) < 0.01: G58Pressure[j] = fpre[j, i] if abs(xcord[i] - xloc_57) < 0.001 and abs(ycord[i]) < 0.01 and abs(zcord[i] / (zmean - dz / 2.) - 1) < 0.01: G57Pressure[j] = fpre[j, i] if abs(xcord[i] - xloc_lvdt) < 0.001 and abs(ycord[i]) < 0.01 and abs(zcord[i] / (zmean - dz / 2.) - 1) < 0.01: LVDTAperture[j] = aper[j, i] if aper[j, i] > 1.0e-5: temp += area[j, i] fracArea[j] = temp # Ouput fracture characteristics header = ' time wpressure 58pressure 57pressure Laperture area' timehist = [] for i in range(0, len(tl)): time = tl[i] pressure1 = wellPressure[i] pressure2 = G58Pressure[i] pressure3 = G57Pressure[i] aperture = LVDTAperture[i] farea = fracArea[i] timehist.append( [float(time), float(pressure1), float(pressure2), float(pressure3), float(aperture), float(farea)]) np.savetxt('model_results.txt', timehist, header=header, fmt='%10.4g', comments='')
[ 57 ]
def METHOD_NAME(client): info = client.scheduler_info() memory = 0 for name, w_info in info['workers'].items(): memory += w_info['metrics']['memory'] return memory
[ 1794, 1645 ]
def METHOD_NAME(self, str_dict): new_dict = {} for key in str_dict.keys(): new_dict[key.lower()] = str_dict[key] return new_dict
[ 24, 826 ]
def METHOD_NAME(self, _): """ Yield a problem when an ini file cannot be opened. """ with muted_logging(): cfg = Configuration.from_path("invalid path") self.assertEqual(len(cfg.get_problems()), 1)
[ 9, 10469, 130, 622 ]
def METHOD_NAME(): r = Route() # driver terminals r.driver_terminals.append(Route.build_wire([1,1,1,1], 'M1')) r.driver_terminals.append(Route.build_wire([2,1,2,1], 'M1')) r.driver_terminals.append(Route.build_wire([3,1,3,1], 'M1')) r.driver_terminals.append(Route.build_wire([1,2,1,2], 'M1')) r.driver_terminals.append(Route.build_wire([2,2,2,2], 'M1')) r.driver_terminals.append(Route.build_wire([3,2,3,2], 'M1')) # receiver terminals r.receiver_terminals.append(Route.build_wire([6,4,6,4], 'M1')) r.receiver_terminals.append(Route.build_wire([7,4,7,4], 'M1')) r.receiver_terminals.append(Route.build_wire([8,4,8,4], 'M1')) r.receiver_terminals.append(Route.build_wire([6,5,6,5], 'M1')) r.receiver_terminals.append(Route.build_wire([7,5,7,5], 'M1')) r.receiver_terminals.append(Route.build_wire([8,5,8,5], 'M1')) # primitive wiring r.wires.append(Route.build_wire([1,1,3,1], 'M2')) r.wires.append(Route.build_wire([1,2,3,2], 'M2')) r.wires.append(Route.build_wire([6,4,8,4], 'M2')) r.wires.append(Route.build_wire([6,5,8,5], 'M2')) return r
[ 56, 1441 ]
def METHOD_NAME(self): with temporary_env({"SERVICES": "foobar", "EAGER_SERVICE_LOADING": "1"}): result = get_enabled_apis() assert len(result) == 1 assert "foobar" in result
[ 9, 343, 549, 529, 237 ]
def METHOD_NAME(): with pytest.raises(AttributeError) as excinfo: CompositeDataSource().query([Filter("type", "=", "indicator")]) assert "CompositeDataSource has no data sources" == str(excinfo.value)
[ 9, 3209, 914, 539, 45, 168 ]
def METHOD_NAME(self): """ Returns filter shorcuts """ shortcut_list = [ { "name": "no_photo", "label": "No current photo", "query": {"no_photo": ["True"]}, }, { "name": "current_election", "label": "In a current election", "query": {"current_election": ["True"]}, }, { "name": "uploaded_by", "label": "Uploaded by a robot 🤖", "query": {"uploaded_by": ["bot"]}, }, ] query = dict(self.request.GET) METHOD_NAME = {"list": shortcut_list} for shortcut in METHOD_NAME["list"]: shortcut["querystring"] = urlencode(shortcut["query"], doseq=True) if shortcut["query"] == query: shortcut["active"] = True METHOD_NAME["active"] = shortcut return METHOD_NAME
[ 9461 ]
def METHOD_NAME(self): """Run the plugin.""" self.config = self.state.document.settings.env.config self.env = self.state.document.settings.env self.record_dependencies = \ self.state.document.settings.record_dependencies output_nodes = [] location = os.path.normpath( os.path.join(self.env.srcdir, self.config.autoyaml_root + '/' + self.arguments[0])) if os.path.exists(location): print("This path exists") else: raise AutoYAMLException('location "%s" does not exists.' % ( location)) if os.path.isfile(location): logger.debug('[autoyaml] parsing file: %s', location) try: output_nodes.extend(self._parse_file(location)) except Exception as e: raise AutoYAMLException('Failed to parse YAML file: %s' % (location)) from e else: raise AutoYAMLException('%s:%s: location "%s" is not a file.' % ( self.env.doc2path(self.env.docname, None), self.content_offset - 1, location)) self.record_dependencies.add(location) return output_nodes
[ 22 ]
def METHOD_NAME(self, parser): create_parser = self.add_subparser(parser, 'create') delete_parser = self.add_subparser(parser, 'delete') list_parser = self.add_subparser(parser, 'list') for parser in [create_parser, delete_parser]: parser.add_argument( 'name', nargs=1, help='The product type name. Mandatory.' ) create_parser.add_argument( '--coverage-type', '-c', action='append', dest='coverage_type_names', default=[], help=( ) ) create_parser.add_argument( '--mask-type', '-m', action='append', dest='mask_type_names', default=[], help=( ) ) create_parser.add_argument( '--validity-mask-type', action='append', dest='validity_mask_type_names', default=[], help=( ) ) create_parser.add_argument( '--browse-type', '-b', action='append', dest='browse_type_names', default=[], help=( ) ) delete_parser.add_argument( '--force', '-f', action='store_true', default=False, help='Also remove all products associated with that type.' ) list_parser.add_argument( '--no-detail', action="store_false", default=True, dest='detail', help="Disable the printing of details of the product type." )
[ 238, 134 ]
def METHOD_NAME(self) -> bool: ...
[ 137, 2812 ]
def METHOD_NAME(self, worker_id: str): return {'worker_name': f'UNITTEST_MOCK_{worker_id}'}
[ 19, 1794, 156 ]
def METHOD_NAME(tmpdir, datasets_o, datasets_m): nn.clear_parameters() ctx = get_extension_context( 'cpu', device_id=0, type_config='float') nn.set_default_context(ctx) batch_size = 64 x = nn.Variable([batch_size, 1, 28, 28]) Affine = PF.affine(x, 1, name='Affine') Sigmoid = F.sigmoid(Affine) target = nn.Variable([batch_size, 1]) target.data.fill(1) BinaryCrossEntropy = F.binary_cross_entropy(Sigmoid, target) solver = S.Adam() solver.set_parameters(nn.get_parameters()) solver.set_learning_rate(5e-4) contents = { 'global_config': { 'default_context': ctx }, 'training_config': { 'max_epoch': 100, 'iter_per_epoch': 23, 'save_best': True, 'monitor_interval': 10 }, 'networks': [ { 'name': 'Main', 'batch_size': batch_size, 'outputs': {'BinaryCrossEntropy': BinaryCrossEntropy}, 'names': {'x': x} }, { 'name': 'MainValidation', 'batch_size': batch_size, 'outputs': {'BinaryCrossEntropy': BinaryCrossEntropy}, 'names': {'x': x} }, { 'name': 'MainRuntime', 'batch_size': batch_size, 'outputs': {'Sigmoid': Sigmoid}, 'names': {'x': x} } ], 'datasets': [ { 'name': 'dataset1', 'uri': 'DATASET_TRAINING1', 'cache_dir': 'here_it_is', 'shuffle': True, 'batch_size': batch_size, 'no_image_normalization': False, 'variables': {'x': x, 'BinaryCrossEntropy': BinaryCrossEntropy} }, { 'name': 'dataset2', 'uri': 'DATASET_TRAINING2', 'cache_dir': 'here_it_is', 'shuffle': True, 'batch_size': batch_size, 'no_image_normalization': False, 'variables': {'x': x, 'BinaryCrossEntropy': BinaryCrossEntropy}, } ], 'optimizers': [ { 'name': 'optimizer', 'solver': solver, 'network': 'Main', 'dataset': datasets_o, 'weight_decay': 0, 'lr_decay': 1, 'lr_decay_interval': 1, 'update_interval': 1 } ], 'monitors': [ { 'name': 'train_error', 'network': 'MainValidation', 'dataset': datasets_m }, { 'name': 'valid_error', 'network': 'MainValidation', 'dataset': datasets_m } ], 'executors': [ { 'name': 'Executor', 'network': 'MainRuntime', 'data': ['x'], 'output': ['Sigmoid'] } ] } tmpdir.ensure(dir=True) tmppath = tmpdir.join('testsave.nnp') nnp_file = tmppath.strpath nnabla.utils.save.save(nnp_file, contents) nnabla.utils.load.load([nnp_file])
[ 9, 73, 557, 457, 4146 ]