text
stringlengths 15
7.82k
| ids
sequencelengths 1
7
|
---|---|
def METHOD_NAME(self,
plugin_id: int,
type: Literal['recast_critical',
'recast_high',
'recast_medium',
'recast_low',
'recast_info',
'exclude'
],
host: Optional[str] = None,
date: Optional[int] = None
) -> None:
'''
Creates a new plugin rule
Args:
plugin_id (int): The plugin id to modify
type: (str): The type of modification to perform
host (str, optional): The host to apply this rule to
date (int, optional): The unix date for this rule to expire
Example:
>>> nessus.plugin_rules.create(
... plugin_id=19506,
... type='exclude',
... host='192.168.0.1',
... date=1645164000
... )
'''
self._post(json={
'plugin_id': str(plugin_id),
'type': type,
'host': host if host else '',
'date': date
}) | [
129
] |
def METHOD_NAME(opt, npar=2):
tst_opt(opt, _tstoptfct.Bohachevsky1, npar) | [
9,
-1
] |
def METHOD_NAME(runner, revision):
"""Downgrade the database to the given REVISION."""
runner.execute_alembic_command('downgrade', revision=revision) | [
8171,
1502
] |
def METHOD_NAME(self, s):
if s is None:
return None
count = len(s.transitions)
look = [] * count
for alt in range(0, count):
look[alt] = set()
lookBusy = set()
seeThruPreds = False # fail to get lookahead upon pred
self._LOOK(s.transition(alt).target, None, PredictionContext.EMPTY, \
look[alt], lookBusy, set(), seeThruPreds, False)
# Wipe out lookahead for this alternative if we found nothing
# or we had a predicate when we !seeThruPreds
if len(look[alt])==0 or self.HIT_PRED in look[alt]:
look[alt] = None
return look | [
19,
2719,
14430
] |
def METHOD_NAME(self) -> 'outputs.RegistryResponse':
"""
[Required] Additional attributes of the entity.
"""
return pulumi.get(self, "registry_properties") | [
510,
748
] |
def METHOD_NAME(self):
self.config = get_sample_registry_data_source()
mock_helper = create_autospec(
DataRegistryHelper, spec_set=True, instance=True,
visible_domains={"user-reports", "granted-domain"},
participating_domains={"user-reports", "granted-domain", "other-domain"}
)
self.patcher = patch("corehq.apps.userreports.models.DataRegistryHelper", return_value=mock_helper)
self.patcher.start() | [
0,
1
] |
def METHOD_NAME(self):
return LayoutLMv2Config(
vocab_size=self.vocab_size,
hidden_size=self.hidden_size,
coordinate_size=self.coordinate_size,
num_hidden_layers=self.num_hidden_layers,
num_attention_heads=self.num_attention_heads,
intermediate_size=self.intermediate_size,
hidden_act=self.hidden_act,
hidden_dropout_prob=self.hidden_dropout_prob,
attention_probs_dropout_prob=self.attention_probs_dropout_prob,
max_position_embeddings=self.max_position_embeddings,
type_vocab_size=self.type_vocab_size,
initializer_range=self.initializer_range,
pad_token_id=self.pad_token_id,
num_class=self.num_classes,
num_labels=self.num_labels,
) | [
19,
200
] |
def METHOD_NAME(nn_data):
norms = smooth_knn(nn_data)
assert_array_almost_equal(
norms,
1.0 + np.log2(10) * np.ones(norms.shape[0]),
decimal=3,
err_msg="Smooth knn-dists does not give expected" "norms",
) | [
9,
3772,
9896,
1260,
-1
] |
f METHOD_NAME(self, inInputFilter, inPipeAndViewsState): | [
129,
8904,
1179,
17896
] |
def METHOD_NAME(self, smem_args):
raise NotImplementedError("PyOpenCL backend does not support shared memory") | [
215,
1644,
1645,
335
] |
def METHOD_NAME(self):
return self._exact_frontend.METHOD_NAME | [
1418
] |
METHOD_NAME(self): | [
9,
3129,
875,
43,
-1
] |
def METHOD_NAME():
observer["callback_invoked"] = True | [
1076,
43,
9
] |
def METHOD_NAME(labels: List[np.ndarray]) -> List[List[int]]:
"""Return a list containing the classes in batch."""
return [tensor.reshape((-1, 5))[:, 0].tolist() for tensor in labels] | [
19,
700,
2735,
2,
279,
381
] |
def METHOD_NAME(self):
qc = self.query_context()
session_helper = connection_manager.get_session_helper(self.engine_id, readonly=True)
with session_helper.session_context() as session:
return qc.METHOD_NAME(session.connection()) | [
19,
539,
3224
] |
def METHOD_NAME(output):
"""Removes memory addresses from the test output."""
return re.sub(r'@\w+', '@0x#', output) | [
188,
1645,
1065
] |
def METHOD_NAME(self) -> None:
"""Closes the database connection."""
if hasattr(self, "db") and self.db is not None:
bes = -1
TimDb.instances -= 1
try:
# bes = self.get_pg_connections()
self.db.METHOD_NAME()
except Exception as err:
log_error("close error: " + str(self.num) + " " + str(err))
log_debug(
f"TimDb-dstr {worker_pid:2d} {self.num:6d} {TimDb.instances:2d} {bes:3d} {time.time() - self.time:7.5f} {self.route_path:s}"
)
self.reset_attrs() | [
1462
] |
def METHOD_NAME() -> str:
"""Return platform dependent shared object name."""
if system() in ["Linux", "OS400"] or system().upper().endswith("BSD"):
name = "libxgboost.so"
elif system() == "Darwin":
name = "libxgboost.dylib"
elif system() == "Windows":
name = "xgboost.dll"
else:
raise NotImplementedError(f"System {system()} not supported")
return name | [
124,
156
] |
def METHOD_NAME(path, name, force=False):
if isinstance(force, bool):
return force
else:
for f in force:
if "*" in f or "?" in f:
if fnmatch(path, f):
return True
elif name == f:
return True
return False | [
427,
9392,
6388
] |
def METHOD_NAME(settings: Union[Text, Mapping] = "new") -> Callable:
"""Returns a function for merging new investments into assets.
Available merging functions should be registered with
:py:func:`@register_final_asset_transform<register_final_asset_transform>`.
"""
"""Returns a function for performing initial housekeeping.
For instance, remove technologies with no capacity now or in the future.
Available housekeeping functions should be registered with
:py:func:`@register_initial_asset_transform<register_initial_asset_transform>`.
"""
if isinstance(settings, Text):
name = settings
params: Mapping = {}
else:
params = {k: v for k, v in settings.items() if k != "name"}
name = settings["name"]
transform = FINAL_ASSET_TRANSFORM[name]
def final_assets_transform(old_assets: Dataset, new_assets):
return transform(old_assets, new_assets, **params)
final_assets_transform.__name__ = name
return final_assets_transform | [
3455,
411,
1155
] |
def METHOD_NAME(self):
from music21 import stream
from music21 import note
from music21 import converter
s = stream.Stream()
n1 = note.Note('d2', quarterLength=2.0)
s.append(n1)
s.append(note.Note('g~6', quarterLength=0.25))
temp = converter.freezeStr(s)
post = converter.thawStr(temp)
self.assertEqual(len(post.notes), 2)
self.assertEqual(str(post.notes[0].pitch), 'D2') | [
9,
756,
2629
] |
def METHOD_NAME(self):
"""
Assert that correct url is returned.
"""
project = models.Project(
name="test", homepage="https://example.org", backend=BACKEND
)
exp = "https://pypi.org/pypi/test/json"
obs = backend.PypiBackend.get_version_url(project)
self.assertEqual(obs, exp) | [
9,
19,
281,
274
] |
def METHOD_NAME(self, labels: Tensor, embeddings: Tensor) -> Tensor:
"""Build the triplet loss over a batch of embeddings.
We generate all the valid triplets and average the loss over the positive ones.
Args:
labels: labels of the batch, of size (batch_size,)
embeddings: tensor of shape (batch_size, embed_dim)
margin: margin for triplet loss
squared: Boolean. If true, output is the pairwise squared euclidean distance matrix.
If false, output is the pairwise euclidean distance matrix.
Returns:
Label_Sentence_Triplet: scalar tensor containing the triplet loss
"""
labels = labels.unsqueeze(1)
pdist_matrix = self.distance_metric(embeddings)
adjacency = labels == labels.t()
adjacency_not = ~adjacency
batch_size = torch.numel(labels)
pdist_matrix_tile = pdist_matrix.repeat([batch_size, 1])
mask = adjacency_not.repeat([batch_size, 1]) & (pdist_matrix_tile > torch.reshape(pdist_matrix.t(), [-1, 1]))
mask_final = torch.reshape(torch.sum(mask, 1, keepdims=True) > 0.0, [batch_size, batch_size])
mask_final = mask_final.t()
negatives_outside = torch.reshape(BatchSemiHardTripletLoss._masked_minimum(pdist_matrix_tile, mask), [batch_size, batch_size])
negatives_outside = negatives_outside.t()
negatives_inside = BatchSemiHardTripletLoss._masked_maximum(pdist_matrix, adjacency_not)
negatives_inside = negatives_inside.repeat([1, batch_size])
semi_hard_negatives = torch.where(mask_final, negatives_outside, negatives_inside)
loss_mat = (pdist_matrix - semi_hard_negatives) + self.margin
mask_positives = adjacency.float().to(labels.device) - torch.eye(batch_size, device=labels.device)
mask_positives = mask_positives.to(labels.device)
num_positives = torch.sum(mask_positives)
triplet_loss = torch.sum(torch.max(loss_mat * mask_positives, torch.tensor([0.0], device=labels.device))) / num_positives
return triplet_loss | [
2277,
4169,
388,
12392,
1572
] |
def METHOD_NAME(test):
if test is not None:
return test.cost()
return 1 | [
1654
] |
def METHOD_NAME(self):
self.assertCountryAliases(Pakistan, PK, PAK) | [
9,
1078,
2334
] |
def METHOD_NAME(self):
"""Returns the time stamp for the file as a tuple of values containing
the local time, or an empty tuple if the file does not have a time stamp."""
# RISC OS time is given as a five byte block containing the
# number of centiseconds since 1900 (presumably 1st January 1900).
# Convert the time to the time elapsed since the Epoch (assuming
# 1970 for this value).
date_num = struct.unpack("<Q",
struct.pack("<IBxxx", self.execution_address, self.load_address & 0xff))[0]
centiseconds = date_num - between_epochs
# Convert this to a value in seconds and return a time tuple.
try:
return time.localtime(centiseconds / 100.0)
except ValueError:
return () | [
104,
2418
] |
def METHOD_NAME(self):
ref_key_list = ["BPR:1,1,1", "BPR:10,10,3", "FGOR", "FOPR", "TIME", "WBHP:INJ", "WBHP:PROD",
"WGIR:INJ", "WGIR:PROD", "WGIT:INJ", "WGIT:PROD", "WGOR:PROD", "WGPR:INJ",
"WGPR:PROD", "WGPT:INJ", "WGPT:PROD", "WOIR:INJ", "WOIR:PROD", "WOIT:INJ",
"WOIT:PROD", "WOPR:INJ", "WOPR:PROD", "WOPT:INJ", "WOPT:PROD", "WWIR:INJ",
"WWIR:PROD", "WWIT:INJ", "WWIT:PROD", "WWPR:INJ", "WWPR:PROD", "WWPT:INJ",
"WWPT:PROD"]
ref_keys_pattern = ["WGPR:INJ", "WGPR:PROD", "WOPR:INJ", "WOPR:PROD", "WWPR:INJ", "WWPR:PROD"]
smry1 = ESmry(test_path("data/SPE1CASE1.SMSPEC"))
smry1.make_esmry_file()
ext_smry1 = ESmry(test_path("data/SPE1CASE1.ESMRY"))
list_of_keys = ext_smry1.keys()
self.assertEqual(len(list_of_keys), len(ref_key_list))
for key, ref_key in zip(list_of_keys, ref_key_list):
self.assertEqual(key, ref_key)
for key in list_of_keys:
data = smry1[key]
self.assertEqual(len(smry1), len(data))
list_of_keys2 = ext_smry1.keys("W?PR:*")
self.assertEqual(len(list_of_keys2), len(ref_keys_pattern))
for key, ref in zip(list_of_keys2, ref_keys_pattern):
self.assertEqual(key, ref) | [
9,
13954,
1661
] |
async def METHOD_NAME(
cls,
actor_config: ActorPoolConfig,
process_index: int,
status_queue: multiprocessing.Queue,
):
pool = await TestSubActorPool.create(
{"actor_pool_config": actor_config, "process_index": process_index}
)
await pool.start()
status_queue.put(
SubpoolStatus(status=0, external_addresses=[pool.external_address])
)
actor_config.reset_pool_external_address(process_index, [pool.external_address])
await pool.join() | [
129,
1066,
1567
] |
def METHOD_NAME(cls, serviceInfoDict):
"""Initialization of DB object"""
cls.dataIntegrityDB = DataIntegrityDB(parentLogger=cls.log)
return S_OK() | [
15,
1519
] |
def METHOD_NAME():
"""Test wavefront reader"""
fname_mesh = load_data_file('orig/triceratops.obj.gz')
fname_out = op.join(temp_dir, 'temp.obj')
mesh1 = read_mesh(fname_mesh)
assert_raises(IOError, read_mesh, 'foo.obj')
assert_raises(ValueError, read_mesh, op.abspath(__file__))
assert_raises(ValueError, write_mesh, fname_out, *mesh1, format='foo')
write_mesh(fname_out, mesh1[0], mesh1[1], mesh1[2], mesh1[3])
assert_raises(IOError, write_mesh, fname_out, *mesh1)
write_mesh(fname_out, *mesh1, overwrite=True)
mesh2 = read_mesh(fname_out)
assert_equal(len(mesh1), len(mesh2))
for m1, m2 in zip(mesh1, mesh2):
if m1 is None:
assert_equal(m2, None)
else:
assert_allclose(m1, m2, rtol=1e-5)
# test our efficient normal calculation routine
assert_allclose(mesh1[2], _slow_calculate_normals(mesh1[0], mesh1[1]),
rtol=1e-7, atol=1e-7) | [
9,
-1
] |
def METHOD_NAME(self):
with self.assertRaises(ValidationError) as cm:
_validate_complete_username('[email protected]', 'domain')
self.assertEqual(cm.exception.message,
"The username email domain '@domain2.commcarehq.org' should be '@domain.commcarehq.org'.") | [
9,
442,
3966,
217,
3534,
487,
1674
] |
def METHOD_NAME(self):
if not any(socket.is_linked for socket in self.outputs):
return
sfields_s = self.inputs[S_FIELD_A.idx].sv_get(default=[[None]])
vfields_s = self.inputs[V_FIELD_A.idx].sv_get(default=[[None]])
if not isinstance(sfields_s, (list, tuple)):
sfields_s = [sfields_s]
if not isinstance(vfields_s, (list, tuple)):
vfields_s = [vfields_s]
vfields_out = []
sfields_out = []
for sfields, vfields in zip_long_repeat(sfields_s, vfields_s):
if not isinstance(sfields, (list, tuple)):
sfields = [sfields]
if not isinstance(vfields, (list, tuple)):
vfields = [vfields]
for sfield, vfield in zip_long_repeat(sfields, vfields):
if self.operation == 'GRAD':
vfield = SvScalarFieldGradient(sfield, self.step)
vfields_out.append(vfield)
elif self.operation == 'DIV':
sfield = SvVectorFieldDivergence(vfield, self.step)
sfields_out.append(sfield)
elif self.operation == 'LAPLACE':
sfield = SvScalarFieldLaplacian(sfield, self.step)
sfields_out.append(sfield)
elif self.operation == 'ROTOR':
vfield = SvVectorFieldRotor(vfield, self.step)
vfields_out.append(vfield)
else:
raise Exception("Unsupported operation")
self.outputs[V_FIELD_B.idx].sv_set(vfields_out)
self.outputs[S_FIELD_B.idx].sv_set(sfields_out) | [
356
] |
def METHOD_NAME(session, mimetype, file):
return openList(session, [ScanFile(file, mimetype)]) | [
1452,
171
] |
def METHOD_NAME(self) -> int:
"""Returns the height of the bottom border."""
return 1 | [
7887,
1318
] |
def METHOD_NAME(plane, ymin, ymax, zmin, zmax):
"""Calculate the x,y,z values of the corners of a rectangular plane
Drawing::
.zmin__ _________
. | |
. | plane |
.zmax__|_________|
. | |
. ymin ymax
Parameters
----------
plane : :obj:`list` of :obj:`double`
Parameters of the plane parametrisation ax+by+cz+d = 0.
ymin : double
y-coordinate of top corners
ymax : double
y-coordinate of bottom corners
zmin : double
z-coordinate of top corners
zmax : double
z-coordinate of bottom corners
Returns
-------
np.ndarray
x, y and z of the 4 corners of the plane as matrices for plotting
"""
a, b, c, d = plane.tolist()
yy = np.array([[ymin, ymin], [ymax, ymax]])
zz = np.array([[zmin, zmax], [zmin, zmax]])
return (-d - c * zz - b * yy) / a, yy, zz | [
1088,
24,
-1,
753
] |
def METHOD_NAME(self, location):
self.location = location | [
17679
] |
def METHOD_NAME(
self, line_position: np.ndarray, line_direction: np.ndarray
) -> np.ndarray:
"""Calculate a 3D line-plane intersection."""
return intersect_line_with_plane_3d(
line_position, line_direction, self.position, self.normal
) | [
3801,
41,
534
] |
def METHOD_NAME(
rect: ui.Rect, title: str = "", screen_num: Optional[int] = None
):
"""Allow other modules this screenshot a rectangle"""
selected_screen = get_screen(screen_num)
flash_rect(rect)
img = screen.capture_rect(rect)
path = get_screenshot_path(title)
img.write_file(path) | [
3249,
1539
] |
def METHOD_NAME():
# Doesn't raise
meshgrid()
meshgrid(asarray([1.], dtype=float32))
meshgrid(asarray([1.], dtype=float32), asarray([1.], dtype=float32))
assert_raises(ValueError, lambda: meshgrid(asarray([1.], dtype=float32), asarray([1.], dtype=float64))) | [
9,
11725,
1249,
1096
] |
def METHOD_NAME(**kwargs: Any) -> HttpRequest:
# Construct URL
_url = kwargs.pop("template_url", "/azurespecials/overwrite/x-ms-client-request-id/method/")
return HttpRequest(method="GET", url=_url, **kwargs) | [
56,
19,
377
] |
def METHOD_NAME(zs=None, zs_n=None, batch=None, sigma=None, **kwargs):
'''The InfoNCE (NT-XENT) loss in contrastive learning.
Args:
zs (list, optipnal): List of tensors of shape [batch_size, z_dim].
zs_n (list, optional): List of tensors of shape [nodes, z_dim].
batch (Tensor, optional): Required when both :obj:`zs` and :obj:`zs_n` are given.
sigma (ndarray, optional): A 2D-array of shape [:obj:`n_views`, :obj:`n_views`] with boolean
values, indicating contrast between which two views are computed. Only required
when number of views is greater than 2. If :obj:`sigma[i][j]` = :obj:`True`,
infoNCE between :math:`view_i` and :math:`view_j` will be computed.
tau (int, optional): The temperature used in NT-XENT.
:rtype: :class:`Tensor`
'''
assert zs is not None or zs_n is not None
if 'tau' in kwargs:
tau = kwargs['tau']
else:
tau = 0.5
if 'norm' in kwargs:
norm = kwargs['norm']
else:
norm = True
mean = kwargs['mean'] if 'mean' in kwargs else True
if zs_n is not None:
if zs is None:
# InfoNCE in GRACE
assert len(zs_n)==2
return (infoNCE_local_intra_node(zs_n[0], zs_n[1], tau, norm, batch)+
infoNCE_local_intra_node(zs_n[1], zs_n[0], tau, norm, batch))*0.5
else:
assert len(zs_n)==len(zs)
assert batch is not None
if len(zs)==1:
return infoNCE_local_global(zs[0], zs_n[0], batch, tau, norm)
elif len(zs)==2:
return (infoNCE_local_global(zs[0], zs_n[1], batch, tau, norm)+
infoNCE_local_global(zs[1], zs_n[0], batch, tau, norm))
else:
assert len(zs)==len(sigma)
loss = 0
for (i, j) in itertools.combinations(range(len(zs)), 2):
if sigma[i][j]:
loss += (infoNCE_local_global(zs[i], zs_n[j], batch, tau, norm)+
infoNCE_local_global(zs[j], zs_n[i], batch, tau, norm))
return loss
if len(zs)==2:
return NT_Xent(zs[0], zs[1], tau, norm)
elif len(zs)>2:
assert len(zs)==len(sigma)
loss = 0
for (i, j) in itertools.combinations(range(len(zs)), 2):
if sigma[i][j]:
loss += NT_Xent(zs[i], zs[j], tau, norm)
return loss | [
3697,
1178,
1572
] |
def METHOD_NAME(filename):
# Load in the molecule and its conformers.
# Note that all conformers of the same molecule are loaded as separate Molecule objects
# If using a OFF Toolkit version before 0.7.0, loading SDFs through RDKit and OpenEye may provide
# different behavior in some cases. So, here we force loading through RDKit to ensure the correct behavior
rdktkw = RDKitToolkitWrapper()
loaded_molecules = Molecule.from_file(filename, toolkit_registry=rdktkw)
# The logic below only works for lists of molecules, so if a
# single molecule was loaded, cast it to list
try:
loaded_molecules = [*loaded_molecules]
except TypeError:
loaded_molecules = [loaded_molecules]
# Collatate all conformers of the same molecule
# NOTE: This isn't necessary if you have already loaded or created multi-conformer molecules;
# it is just needed because our SDF reader does not automatically collapse conformers.
molecule = loaded_molecules.pop(0)
for next_molecule in loaded_molecules:
if next_molecule == molecule:
for conformer in next_molecule.conformers:
molecule.add_conformer(conformer)
else:
# We're assuming the SDF just has multiple conformers of the
# same molecule, so raise an error if that's not the case
raise ValueError("Multiple chemical species loaded")
# Make sure the molecule has a name
if not molecule.name:
molecule.name = molecule.to_hill_formula()
print(
f"Loaded {molecule.n_conformers} conformers"
+ f" of {molecule.to_smiles(explicit_hydrogens=False)!r}"
+ f" ({molecule.name})"
)
# Load the openff-2.1.0 force field appropriate for vacuum calculations (without constraints)
forcefield = ForceField("openff_unconstrained-2.1.0.offxml")
print(f"Parametrizing {molecule.name} (may take a moment to calculate charges)...")
interchange = Interchange.from_smirnoff(forcefield, [molecule])
print("Done.")
integrator = openmm.VerletIntegrator(1 * openmm.unit.femtoseconds)
simulation = interchange.to_openmm_simulation(integrator)
# We'll store energies in two lists
initial_energies = []
minimized_energies = []
# And minimized conformers in a second molecule
minimized_molecule = Molecule(molecule)
minimized_molecule.conformers.clear()
for conformer in molecule.conformers:
# Tell the OpenMM Simulation the positions of this conformer
simulation.context.setPositions(conformer.to_openmm())
# Keep a record of the initial energy
initial_energies.append(
simulation.context.getState(getEnergy=True).getPotentialEnergy()
)
# Perform the minimization
simulation.minimizeEnergy()
# Record minimized energy and positions
min_state = simulation.context.getState(getEnergy=True, getPositions=True)
minimized_energies.append(min_state.getPotentialEnergy())
minimized_molecule.add_conformer(from_openmm(min_state.getPositions()))
n_confs = molecule.n_conformers
print(f"{molecule.name}: {n_confs} conformers")
# Create a copy of the molecule so we can work on it
working_mol = Molecule(molecule)
# Print text header
print("Conformer Initial PE Minimized PE RMSD")
output = [
[
"Conformer",
"Initial PE (kcal/mol)",
"Minimized PE (kcal/mol)",
"RMSD between initial and minimized conformer (Angstrom)",
]
]
for i, (init_energy, init_coords, min_energy, min_coords) in enumerate(
zip(
initial_energies,
molecule.conformers,
minimized_energies,
minimized_molecule.conformers,
)
):
# Clear the conformers from the working molecule
working_mol.conformers.clear()
# Save the minimized conformer to file
working_mol.add_conformer(min_coords)
working_mol.to_file(
f"{molecule.name}_conf{i+1}_minimized.sdf",
file_format="sdf",
)
# Calculate the RMSD between the initial and minimized conformer
working_mol.add_conformer(init_coords)
rdmol = working_mol.to_rdkit()
rmslist = []
rdMolAlign.AlignMolConformers(rdmol, RMSlist=rmslist)
minimization_rms = rmslist[0]
# Record the results
output.append(
[
i + 1,
init_energy.value_in_unit(openmm.unit.kilocalories_per_mole),
min_energy.value_in_unit(openmm.unit.kilocalories_per_mole),
minimization_rms,
]
)
print(
f"{{:5d}} / {n_confs:5d} : {{:8.3f}} kcal/mol {{:8.3f}} kcal/mol {{:8.3f}} Å".format(
*output[-1]
)
)
# Write the results out to CSV
with open(f"{molecule.name}.csv", "w") as of:
of.write(", ".join(output.pop(0)) + "\n")
for line in output:
of.write("{}, {:.3f}, {:.3f}, {:.3f}".format(*line) + "\n") | [
226,
6425,
1811,
280,
171
] |
def METHOD_NAME(self):
"""Close the backing file, forget filename, *do* change to memory mode."""
self._adata.__X = self._adata.X[()]
self._file.close()
self._file = None
self._filename = None | [
24,
1645,
854
] |
def METHOD_NAME(self, resolver, path, parents=None):
# type: (URLResolver, str, Optional[List[URLResolver]]) -> Optional[str]
match = get_regex(resolver).search(path) # Django < 2.0
if not match:
return None
if parents is None:
parents = [resolver]
elif resolver not in parents:
parents = parents + [resolver]
new_path = path[match.end() :]
for pattern in resolver.url_patterns:
# this is an include()
if not pattern.callback:
match_ = self.METHOD_NAME(pattern, new_path, parents)
if match_:
return match_
continue
elif not get_regex(pattern).search(new_path):
continue
try:
return self._cache[pattern]
except KeyError:
pass
prefix = "".join(self._simplify(get_regex(p).pattern) for p in parents)
result = prefix + self._simplify(get_regex(pattern).pattern)
if not result.startswith("/"):
result = "/" + result
self._cache[pattern] = result
return result
return None | [
1014
] |
def METHOD_NAME(self):
with pytest.raises(TypeError):
x509.RevokedCertificateBuilder().add_extension(
"notanextension", False # type: ignore[arg-type]
) | [
9,
238,
532,
2916
] |
def METHOD_NAME(cls, name):
"""
Deletes a single synapse scope from the set of stored scopes.
:return: the name of the scope to delete.
:rtype: Scope
"""
if name in cls.name2synapse_scope.keys():
del cls.name2synapse_scope[name]
return | [
34,
629,
913
] |
def METHOD_NAME(self, elements=None, **options) -> str:
encoded, size = self.boxes_to_b64text(elements, **options)
decoded = encoded.decode("utf8")
# see https://tools.ietf.org/html/rfc2397
return f'<mglyph src="{decoded}" width="{size[0]}px" height="{size[1]}px" />' | [
2877,
24,
13688
] |
def METHOD_NAME(self, obj, event):
# type: (QObject, QEvent) -> bool
if event.type() == QEvent.StatusTip and \
not isinstance(event, QuickHelpTipEvent) and \
hasattr(obj, "whatsThis") and \
callable(obj.whatsThis):
assert isinstance(event, QStatusTipEvent)
tip = event.tip()
try:
text = obj.whatsThis()
except Exception:
text = None
if text:
ev = QuickHelpTipEvent(tip, text if tip else "")
return QCoreApplication.sendEvent(obj, ev)
return super().METHOD_NAME(obj, event) | [
417,
527
] |
def METHOD_NAME(self):
"""This method is not used in this implementation."""
raise NotImplementedError() | [
1171
] |
def METHOD_NAME(self):
hier = hier_block_with_message_inout()
input = block_with_message_output()
output = block_with_message_input()
self.tb.msg_connect(input, "test", hier, "test")
self.tb.msg_connect(hier, "test", output, "test")
self.run_top_block()
self.assert_has_num_subscriptions(input, "test", 1)
self.assert_has_subscription(input, "test", hier.input, "test")
self.assert_has_num_subscriptions(hier, "test", 0)
self.assert_has_num_subscriptions(hier.output, "test", 1)
self.assert_has_subscription(hier.output, "test", output, "test")
self.tb.msg_disconnect(input, "test", hier, "test")
self.tb.msg_disconnect(hier, "test", output, "test")
self.assert_has_num_subscriptions(input, "test", 0)
self.assert_has_num_subscriptions(hier.output, "test", 0) | [
9,
1576,
623,
24,
4688,
24,
1576
] |
def METHOD_NAME(self, node=None):
"""Check that a user is able to execute `ALTER QUOTA` with privileges are granted through a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Suite(run=alter_quota,
examples=Examples("privilege grant_target_name user_name", [
tuple(list(row)+[role_name,user_name]) for row in alter_quota.examples
], args=Args(name="check privilege={privilege}", format_name=True))) | [
2384,
2268,
2321,
2499,
1018
] |
def METHOD_NAME(self):
self.preparer2.prepare.return_value = None
self.preparer2.get_plugin_name.return_value = "my-plugin"
self.preparer2.mock_add_spec(PluginPreparer)
self.installer.preparers = [self.preparer, self.preparer2]
node = self.installer.install(self.host, self.binaries, self.all_node_ips)
self.assertEqual(node, "fake node")
self.preparer.prepare.assert_has_calls([
mock.call(self.host, self.binaries)
])
self.preparer2.prepare.assert_has_calls([
mock.call(self.host, self.binaries)
])
self.preparer.get_config_vars.assert_has_calls([
mock.call(self.host, "fake node", self.all_node_ips)
])
self.preparer2.get_config_vars.assert_has_calls([
mock.call(self.host, None, self.all_node_ips)
])
expected_config_vars = {"fake": "config", "new": "var", "cluster_settings": {"plugin.mandatory": ["my-plugin"]}}
self.installer.config_applier.apply_configs.assert_has_calls([
mock.call(self.host, "fake node", ["/tmp"], expected_config_vars),
mock.call(self.host, None, ["/fake"], expected_config_vars)
])
self.installer.java_home_resolver.resolve_java_home.assert_has_calls([
mock.call(self.host, self.provision_config_instance)
])
self.preparer.invoke_install_hook.assert_has_calls([
mock.call(self.host, BootstrapPhase.post_install, expected_config_vars, {"JAVA_HOME": "/path/to/java/home"})
])
self.preparer2.invoke_install_hook.assert_has_calls([
mock.call(self.host, BootstrapPhase.post_install, expected_config_vars, {"JAVA_HOME": "/path/to/java/home"})
]) | [
9,
6957,
61,
2793,
3732
] |
def METHOD_NAME():
'''Test adding read variables with and without modules. '''
rwi = ReadWriteInfo()
sig_b = Signature("b")
rwi.add_read(sig_b)
correct = set()
correct.add(("", sig_b))
assert rwi.set_of_all_used_vars == correct
assert rwi.read_list == [("", sig_b)]
assert rwi.signatures_read == [sig_b]
# Check that the results are sorted as expected: even though
# 'a' is added later, it must be first in the output list:
sig_a = Signature("a")
rwi.add_read(sig_a)
correct.add(("", sig_a))
assert rwi.set_of_all_used_vars == correct
assert rwi.read_list == [("", sig_a), ("", sig_b)]
assert rwi.signatures_read == [sig_a, sig_b]
sig_c = Signature("c")
rwi.add_read(sig_c, "c_mod")
correct.add(("c_mod", sig_c))
assert rwi.set_of_all_used_vars == correct
assert rwi.read_list == [("", sig_a), ("", sig_b), ("c_mod", sig_c)]
assert rwi.signatures_read == [sig_a, sig_b, sig_c]
assert rwi.is_read(sig_a) is True | [
9,
238,
203
] |
def METHOD_NAME(self, schedule=1):
""" wrapper for the Packages.save_packages_byid() which requires the sysid """
ret = self.save_packages_byid(self.server["id"], schedule=schedule)
# this function is primarily called from outside
# so we have to commit here
rhnSQL.commit()
return ret | [
73,
2975
] |
def METHOD_NAME(self, interface=None):
## this just makes it easy for us to detect whether an ExceptionHook is already installed.
if interface is None:
return ['ExceptionHandler']
else:
return interface == 'ExceptionHandler' | [
2721
] |
def METHOD_NAME(page, headers, code, getRatioValue, pageLength):
threadData = getCurrentThreadData()
if kb.testMode:
threadData.lastComparisonHeaders = listToStrValue(_ for _ in headers.headers if not _.startswith("%s:" % URI_HTTP_HEADER)) if headers else ""
threadData.lastComparisonPage = page
threadData.lastComparisonCode = code
if page is None and pageLength is None:
return None
if any((conf.string, conf.notString, conf.regexp)):
rawResponse = "%s%s" % (listToStrValue(_ for _ in headers.headers if not _.startswith("%s:" % URI_HTTP_HEADER)) if headers else "", page)
# String to match in page when the query is True
if conf.string:
return conf.string in rawResponse
# String to match in page when the query is False
if conf.notString:
if conf.notString in rawResponse:
return False
else:
if kb.errorIsNone and (wasLastResponseDBMSError() or wasLastResponseHTTPError()):
return None
else:
return True
# Regular expression to match in page when the query is True and/or valid
if conf.regexp:
return re.search(conf.regexp, rawResponse, re.I | re.M) is not None
# HTTP code to match when the query is valid
if conf.code:
return conf.code == code
seqMatcher = threadData.seqMatcher
seqMatcher.set_seq1(kb.pageTemplate)
if page:
# In case of an DBMS error page return None
if kb.errorIsNone and (wasLastResponseDBMSError() or wasLastResponseHTTPError()) and not kb.negativeLogic:
if not (wasLastResponseHTTPError() and getLastRequestHTTPError() in (conf.ignoreCode or [])):
return None
# Dynamic content lines to be excluded before comparison
if not kb.nullConnection:
page = removeDynamicContent(page)
seqMatcher.set_seq1(removeDynamicContent(kb.pageTemplate))
if not pageLength:
pageLength = len(page)
if kb.nullConnection and pageLength:
if not seqMatcher.a:
errMsg = "problem occurred while retrieving original page content "
errMsg += "which prevents sqlmap from continuation. Please rerun, "
errMsg += "and if the problem persists turn off any optimization switches"
raise SqlmapNoneDataException(errMsg)
ratio = 1. * pageLength / len(seqMatcher.a)
if ratio > 1.:
ratio = 1. / ratio
else:
# Preventing "Unicode equal comparison failed to convert both arguments to Unicode"
# (e.g. if one page is PDF and the other is HTML)
if isinstance(seqMatcher.a, six.binary_type) and isinstance(page, six.text_type):
page = getBytes(page, kb.pageEncoding or DEFAULT_PAGE_ENCODING, "ignore")
elif isinstance(seqMatcher.a, six.text_type) and isinstance(page, six.binary_type):
seqMatcher.a = getBytes(seqMatcher.a, kb.pageEncoding or DEFAULT_PAGE_ENCODING, "ignore")
if any(_ is None for _ in (page, seqMatcher.a)):
return None
elif seqMatcher.a and page and seqMatcher.a == page:
ratio = 1.
elif kb.skipSeqMatcher or seqMatcher.a and page and any(len(_) > MAX_DIFFLIB_SEQUENCE_LENGTH for _ in (seqMatcher.a, page)):
if not page or not seqMatcher.a:
return float(seqMatcher.a == page)
else:
ratio = 1. * len(seqMatcher.a) / len(page)
if ratio > 1:
ratio = 1. / ratio
else:
seq1, seq2 = None, None
if conf.titles:
seq1 = extractRegexResult(HTML_TITLE_REGEX, seqMatcher.a)
seq2 = extractRegexResult(HTML_TITLE_REGEX, page)
else:
seq1 = getFilteredPageContent(seqMatcher.a, True) if conf.textOnly else seqMatcher.a
seq2 = getFilteredPageContent(page, True) if conf.textOnly else page
if seq1 is None or seq2 is None:
return None
seq1 = seq1.replace(REFLECTED_VALUE_MARKER, "")
seq2 = seq2.replace(REFLECTED_VALUE_MARKER, "")
if kb.heavilyDynamic:
seq1 = seq1.split("\n")
seq2 = seq2.split("\n")
key = None
else:
key = (hash(seq1), hash(seq2))
seqMatcher.set_seq1(seq1)
seqMatcher.set_seq2(seq2)
if key in kb.cache.comparison:
ratio = kb.cache.comparison[key]
else:
ratio = round(seqMatcher.quick_ratio() if not kb.heavilyDynamic else seqMatcher.ratio(), 3)
if key:
kb.cache.comparison[key] = ratio
# If the url is stable and we did not set yet the match ratio and the
# current injected value changes the url page content
if kb.matchRatio is None:
if ratio >= LOWER_RATIO_BOUND and ratio <= UPPER_RATIO_BOUND:
kb.matchRatio = ratio
logger.debug("setting match ratio for current parameter to %.3f" % kb.matchRatio)
if kb.testMode:
threadData.lastComparisonRatio = ratio
# If it has been requested to return the ratio and not a comparison
# response
if getRatioValue:
return ratio
elif ratio > UPPER_RATIO_BOUND:
return True
elif ratio < LOWER_RATIO_BOUND:
return False
elif kb.matchRatio is None:
return None
else:
return (ratio - kb.matchRatio) > DIFF_TOLERANCE | [
713
] |
def METHOD_NAME(self) -> updatePrices:
METHOD_NAME = getattr(self, "_update_prices", None)
if METHOD_NAME is None:
METHOD_NAME = updatePrices(self.data)
self._update_prices = METHOD_NAME
return METHOD_NAME | [
86,
2357
] |
def METHOD_NAME(self, user):
"""Return all projects that an user belongs to."""
queryset = self._add_user_projects(self.none(), user, admin=True, member=True)
return queryset.distinct() | [
43,
21
] |
def METHOD_NAME(proto_ver):
if proto_ver == 4:
bridge_protocol = "mqttv311"
proto_ver_connect = 128+4
else:
bridge_protocol = "mqttv50"
proto_ver_connect = 5
(port1, port2) = mosq_test.get_port(2)
conf_file = '06-bridge-reconnect-local-out.conf'
write_config(conf_file, port1, port2, bridge_protocol)
rc = 1
keepalive = 60
connect_packet = mosq_test.gen_connect("bridge-reconnect-test", keepalive=keepalive, proto_ver=proto_ver_connect)
connack_packet = mosq_test.gen_connack(rc=0, proto_ver=proto_ver)
mid = 180
subscribe_packet = mosq_test.gen_subscribe(mid, "bridge/#", 0, proto_ver=proto_ver)
suback_packet = mosq_test.gen_suback(mid, 0, proto_ver=proto_ver)
publish_packet = mosq_test.gen_publish("bridge/reconnect", qos=0, payload="bridge-reconnect-message", proto_ver=proto_ver)
try:
os.remove('mosquitto-%d.db' % (port1))
except OSError:
pass
broker = mosq_test.start_broker(filename=os.path.basename(__file__), port=port1, use_conf=False)
local_cmd = ['../../src/mosquitto', '-c', '06-bridge-reconnect-local-out.conf']
local_broker = mosq_test.start_broker(cmd=local_cmd, filename=os.path.basename(__file__)+'_local1', use_conf=False, port=port2)
if os.environ.get('MOSQ_USE_VALGRIND') is not None:
time.sleep(5)
else:
time.sleep(0.5)
local_broker.terminate()
local_broker.wait()
if os.environ.get('MOSQ_USE_VALGRIND') is not None:
time.sleep(5)
else:
time.sleep(0.5)
local_broker = mosq_test.start_broker(cmd=local_cmd, filename=os.path.basename(__file__)+'_local2', port=port2)
if os.environ.get('MOSQ_USE_VALGRIND') is not None:
time.sleep(5)
else:
time.sleep(0.5)
pub = None
try:
sock = mosq_test.do_client_connect(connect_packet, connack_packet, port=port1)
mosq_test.do_send_receive(sock, subscribe_packet, suback_packet, "suback")
mosq_test.do_send_receive(sock, subscribe_packet, suback_packet, "suback")
# Helper
helper_connect_packet = mosq_test.gen_connect("test-helper", keepalive=keepalive, proto_ver=proto_ver)
helper_connack_packet = mosq_test.gen_connack(rc=0, proto_ver=proto_ver)
helper_publish_packet = mosq_test.gen_publish("bridge/reconnect", qos=1, mid=1, payload="bridge-reconnect-message", proto_ver=proto_ver)
helper_puback_packet = mosq_test.gen_puback(mid=1, proto_ver=proto_ver)
helper_disconnect_packet = mosq_test.gen_disconnect(proto_ver=proto_ver)
helper_sock = mosq_test.do_client_connect(helper_connect_packet, helper_connack_packet, port=port2, connack_error="helper connack")
mosq_test.do_send_receive(helper_sock, helper_publish_packet, helper_puback_packet, "puback")
helper_sock.send(helper_disconnect_packet)
helper_sock.close()
# End of helper
# Should have now received a publish command
mosq_test.expect_packet(sock, "publish", publish_packet)
rc = 0
sock.close()
except mosq_test.TestError:
pass
finally:
os.remove(conf_file)
time.sleep(1)
broker.terminate()
broker.wait()
(stdo, stde) = broker.communicate()
if rc:
print(stde.decode('utf-8'))
local_broker.terminate()
local_broker.wait()
try:
os.remove('mosquitto-%d.db' % (port1))
except OSError:
pass
if rc:
(stdo, stde) = local_broker.communicate()
print(stde.decode('utf-8'))
if pub:
(stdo, stde) = pub.communicate()
print(stdo.decode('utf-8'))
exit(rc) | [
74,
9
] |
def METHOD_NAME(self):
if self._server is None:
self._server = self._create_server()
if self._server_thread is None:
self._server_thread = self._create_server_thread(self._server)
self._server_thread.start() | [
447,
163
] |
def METHOD_NAME() -> None:
opt.minimize(m.training_loss, m.trainable_variables) | [
2123,
367
] |
def METHOD_NAME(s):
return "\n".join([line for line in s.replace(os.linesep, "\n").split("\n") if line]) | [
1137,
171,
459
] |
def METHOD_NAME(server=None):
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
instantiate the operator. The Configuration allows to customize
how the operation will be processed by the operator.
Parameters
----------
server : server.DPFServer, optional
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
"""
return Operator.METHOD_NAME(name="server_path", server=server) | [
235,
200
] |
def METHOD_NAME(self, path: str, splits: List[str]) -> Optional["RepoUrl"]:
if len(splits) == 1:
self.namespace = self.username
self.repo = path
return self
if len(splits) < 2:
# invalid cases
return None
namespace_parts = self._check_fork(splits)
self.namespace = "/".join(namespace_parts)
self.repo = splits[-1]
return self | [
3458,
157
] |
def METHOD_NAME(self, _object, _attributes={}, **_arguments):
"""data size: Return the size in bytes of an object
Required argument: the object whose data size is to be returned
Keyword argument _attributes: AppleEvent attribute dictionary
Returns: the size of the object in bytes
"""
_code = 'core'
_subcode = 'dsiz'
if _arguments: raise TypeError, 'No optional args expected'
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
# XXXX Optionally decode result
if _arguments.has_key('----'):
return _arguments['----'] | [
365,
1318
] |
def METHOD_NAME(self):
reproject = Reproject(source=self.source, coordinates=self.coarse_coords.definition, interpolation="bilinear")
o1 = reproject.eval(self.coarse_coords)
o2 = self.source_coarse.eval(self.coarse_coords)
assert_array_equal(o1.data, o2.data)
node = podpac.Node.from_json(reproject.json)
o3 = node.eval(self.coarse_coords)
assert_array_equal(o1.data, o3.data) | [
9,
11222,
1458,
553
] |
def METHOD_NAME(self, response):
self.assertContains(response, "You manage 2 test plan(s), 0 test plan(s) disabled")
for plan, expected_runs_count in ((self.plan_1, 0), (self.plan, 2)):
plan_url = plan.get_absolute_url()
self.assertContains(
response,
f'<td height="27"><a class="link" href="{plan_url}">{plan.name}</a></td>',
html=True,
)
self.assertContains(response, f"<td>{plan.product.name}</td>", html=True)
self.assertContains(response, f"<td>{plan.type}</td>", html=True)
self.assertContains(
response,
f'<td><a href="/runs/?plan={plan.pk}">{expected_runs_count}</a></td>',
html=True,
) | [
638,
9,
3919,
245
] |
def METHOD_NAME(self):
G = nx.DiGraph(self.edges)
x = list(nx.edge_bfs(G, self.nodes, orientation=None))
x_ = [(0, 1), (1, 0), (2, 0), (2, 1), (3, 1)]
assert x == x_ | [
9,
8261,
5354,
98
] |
def METHOD_NAME(matrix: NDArrayF64) -> Intrinsics:
"""Get intrinsics data structure from 3x3 matrix."""
intrinsics = Intrinsics(
focal=(matrix[0, 0], matrix[1, 1]),
center=(matrix[0, 2], matrix[1, 2]),
skew=matrix[0, 1],
)
return intrinsics | [
19,
14989,
280,
430
] |
def METHOD_NAME(original_array, new_array):
if type(original_array) is not type(new_array):
# if input was an ndarray subclass and subclasses were OK,
# then view the result as that subclass.
new_array = new_array.view(type=type(original_array))
# Since we have done something akin to a view from original_array, we
# should let the subclass finalize (if it has it implemented, i.e., is
# not None).
if new_array.__array_finalize__:
new_array.__array_finalize__(original_array)
return new_array | [
2946,
1179,
947,
9260
] |
def METHOD_NAME(self):
return os.path.join("lib", "cmake", f"conan-official-{self.name}-targets.cmake") | [
298,
171,
2071,
157
] |
def METHOD_NAME(self):
return os.environ.get('LOCAL_RANK', 0) | [
1056,
398
] |
f METHOD_NAME(self, test_name, expected_xml): | [
9,
1737,
171
] |
def METHOD_NAME(src_bucket, src_prefix, dest_bucket, dest_prefix):
boto3.client("s3").copy_object(
Bucket=dest_bucket,
Key=dest_prefix,
CopySource={"Bucket": src_bucket, "Key": src_prefix}
) | [
607,
215,
279
] |
def METHOD_NAME(df: pd.DataFrame) -> pd.DataFrame:
cols = list(df.columns)
cols = [cols[-1]] + cols[:-1]
df = df[cols]
return df | [
3007,
1951
] |
def METHOD_NAME(
self, cache_key: Hashable
) -> Optional[ICompletionContextDependencyGraph]:
with self._lock:
ret = self._cached.get(cache_key)
if ret is not None:
self.cache_hits += 1
if BaseOptions.DEBUG_CACHE_DEPS:
if ret is not None:
log.info(
"Cache HIT (%s):\n%s\n",
cache_key,
json.dumps(ret.to_dict(), indent=4),
)
else:
log.info(
"Cache MISS (%s):\n%s\n",
cache_key,
" \n".join(
json.dumps(x.to_dict(), indent=4)
for x in self._cached.values()
),
)
return ret | [
19,
175,
2913,
303
] |
def METHOD_NAME(
namespaces: list[NamespaceV1], expected_state: State
):
integration = CNAIntegration(cna_clients={}, namespaces=namespaces)
integration.assemble_desired_states()
assert integration._desired_states == {"test": expected_state} | [
9,
1911,
1893,
5368,
4085
] |
def METHOD_NAME(self, X, y, init_params=True):
self._check_target_array(y)
if init_params:
if self.n_classes is None:
self.n_classes = np.max(y) + 1
self._n_features = X.shape[1]
self.b_, self.w_ = self._init_params(
weights_shape=(self._n_features, self.n_classes),
bias_shape=(self.n_classes,),
random_seed=self.random_seed,
)
self.cost_ = []
y_enc = self._one_hot(y=y, n_labels=self.n_classes, dtype=np.float_)
self.init_time_ = time()
rgen = np.random.RandomState(self.random_seed)
for i in range(self.epochs):
for idx in self._yield_minibatches_idx(
rgen=rgen, n_batches=self.minibatches, data_ary=y, shuffle=True
):
# net_input, softmax and diff -> n_samples x n_classes:
y_probas = self._forward(X[idx])
# w_ -> n_feat x n_classes
# b_ -> n_classes
grad_loss_wrt_w, grad_loss_wrt_b = self._backward(
X[idx], y_true=y_enc[idx], y_probas=y_probas
)
# update in opp. direction of the cost gradient
l2_reg = self.l2 * self.w_
self.w_ += self.eta * (-grad_loss_wrt_w - l2_reg)
self.b_ += self.eta * -grad_loss_wrt_b
# compute cost of the whole epoch
y_probas = self._forward(X)
cross_ent = self._cross_entropy(output=y_probas, y_target=y_enc)
cost = self._cost(cross_ent)
self.cost_.append(cost)
if self.print_progress:
self._print_progress(iteration=i + 1, n_iter=self.epochs, cost=cost)
return self | [
90
] |
def METHOD_NAME(projects):
return Tags(parent_stream=projects, repository_part=True, **auth_params) | [
114
] |
def METHOD_NAME(self,rec):
lst = rec.toList()
for a in lst:
self.file.METHOD_NAME(a)
self.file.METHOD_NAME('$$$$\n') | [
77
] |
def METHOD_NAME(self, name):
sym = self._symbols.get(name)
if sym is None:
flags = self._table.symbols[name]
namespaces = self.__check_children(name)
sym = self._symbols[name] = Symbol(name, flags, namespaces)
return sym | [
1906
] |
def METHOD_NAME():
""" return a dict that remembers insertion order """
return {} if sys.version_info[1] > 5 else OrderedDict() | [
11341
] |
def METHOD_NAME(self) -> 'outputs.SystemDataResponse':
"""
The system meta data relating to this resource.
"""
return pulumi.get(self, "system_data") | [
112,
365
] |
def METHOD_NAME(creators: List[Union[dict, str]], ignore_email=False) -> Tuple[List["Person"], List[str]]:
"""Parse input and return a list of Person."""
creators = creators or []
people = []
no_email_warnings = []
for creator in creators:
person, no_email_warning = construct_creator(creator, ignore_email=ignore_email)
if person:
people.append(person)
if no_email_warning:
no_email_warnings.append(no_email_warning)
return people, no_email_warnings | [
363,
11896
] |
def METHOD_NAME(args):
"""
Executes the *software* subprogram with parsed commandline *args*.
"""
sw_dir = get_sw_dir()
# just print the cache location?
if args.location:
print(sw_dir)
return
# just print the list of dependencies?
if args.print_deps:
print(",".join(args.deps))
return
# just remove the current software cache?
if args.remove:
remove_software_cache(sw_dir)
return
# rebuild the software cache
build_software_cache(sw_dir, dep_names=args.deps) | [
750
] |
def METHOD_NAME(
model_settings: str,
env_params: str,
expected: HuggingFaceSettings,
request: pytest.FixtureRequest,
monkeypatch: pytest.MonkeyPatch,
):
monkeypatch.setenv(PARAMETERS_ENV_NAME, env_params)
assert expected == get_huggingface_settings(request.getfixturevalue(model_settings))
monkeypatch.delenv(PARAMETERS_ENV_NAME) | [
9,
19,
2923,
817
] |
def METHOD_NAME(self):
get(self, **self.conan_data["sources"][self.version], strip_root=True) | [
1458
] |
def METHOD_NAME() -> Tuple[str, ...]:
return ("core20",) | [
19,
616,
7346
] |
def METHOD_NAME(self):
# All nodes should start with 25*11.4375 BTC:
starting_balance = 25 * 11.4375
for i in range(4):
assert_equal(self.nodes[i].getbalance(), starting_balance)
self.nodes[i].getnewaddress("") # bug workaround, coins generated assigned to first getnewaddress!
# Generate zcaddress keypairs
zckeypair = self.nodes[0].zcrawkeygen()
zcsecretkey = zckeypair["zcsecretkey"]
zcaddress = zckeypair["zcaddress"]
pool = [0, 1, 2, 3]
for i in range(4):
(total_in, inputs) = gather_inputs(self.nodes[i], 45.75)
pool[i] = self.nodes[i].createrawtransaction(inputs, {})
pool[i] = self.nodes[i].zcrawjoinsplit(pool[i], {}, {zcaddress:45.74}, 45.74, 0)
signed = self.nodes[i].signrawtransaction(pool[i]["rawtxn"])
# send the tx to both halves of the network
self.nodes[0].sendrawtransaction(signed["hex"])
self.nodes[0].generate(1)
self.nodes[2].sendrawtransaction(signed["hex"])
self.nodes[2].generate(1)
pool[i] = pool[i]["encryptednote1"]
sync_blocks(self.nodes[0:2])
sync_blocks(self.nodes[2:4])
# Confirm that the protects have taken place
for i in range(4):
enc_note = pool[i]
receive_result = self.nodes[0].zcrawreceive(zcsecretkey, enc_note)
assert_equal(receive_result["exists"], True)
pool[i] = receive_result["note"]
# Extra confirmations
receive_result = self.nodes[1].zcrawreceive(zcsecretkey, enc_note)
assert_equal(receive_result["exists"], True)
receive_result = self.nodes[2].zcrawreceive(zcsecretkey, enc_note)
assert_equal(receive_result["exists"], True)
receive_result = self.nodes[3].zcrawreceive(zcsecretkey, enc_note)
assert_equal(receive_result["exists"], True)
blank_tx = self.nodes[0].createrawtransaction([], {})
# Create joinsplit {A, B}->{*}
joinsplit_AB = self.nodes[0].zcrawjoinsplit(blank_tx,
{pool[0] : zcsecretkey, pool[1] : zcsecretkey},
{zcaddress:(45.74*2)-0.01},
0, 0.01)
# Create joinsplit {B, C}->{*}
joinsplit_BC = self.nodes[0].zcrawjoinsplit(blank_tx,
{pool[1] : zcsecretkey, pool[2] : zcsecretkey},
{zcaddress:(45.74*2)-0.01},
0, 0.01)
# Create joinsplit {C, D}->{*}
joinsplit_CD = self.nodes[0].zcrawjoinsplit(blank_tx,
{pool[2] : zcsecretkey, pool[3] : zcsecretkey},
{zcaddress:(45.74*2)-0.01},
0, 0.01)
# Create joinsplit {A, D}->{*}
joinsplit_AD = self.nodes[0].zcrawjoinsplit(blank_tx,
{pool[0] : zcsecretkey, pool[3] : zcsecretkey},
{zcaddress:(45.74*2)-0.01},
0, 0.01)
# (a) Node 0 will spend joinsplit AB, then attempt to
# double-spend it with BC. It should fail before and
# after Node 0 mines blocks.
#
# (b) Then, Node 2 will spend BC, and mine 5 blocks.
# Node 1 connects, and AB will be reorg'd from the chain.
# Any attempts to spend AB or CD should fail for
# both nodes.
#
# (c) Then, Node 0 will spend AD, which should work
# because the previous spend for A (AB) is considered
# invalid due to the reorg.
# (a)
AB_txid = self.nodes[0].sendrawtransaction(joinsplit_AB["rawtxn"])
self.expect_cannot_joinsplit(self.nodes[0], joinsplit_BC["rawtxn"])
# Wait until node[1] receives AB before we attempt to double-spend
# with BC.
print("Waiting for AB_txid...\n")
while True:
if self.txid_in_mempool(self.nodes[1], AB_txid):
break
time.sleep(0.2)
print("Done!\n")
self.expect_cannot_joinsplit(self.nodes[1], joinsplit_BC["rawtxn"])
# Generate a block
self.nodes[0].generate(1)
sync_blocks(self.nodes[0:2])
self.expect_cannot_joinsplit(self.nodes[0], joinsplit_BC["rawtxn"])
self.expect_cannot_joinsplit(self.nodes[1], joinsplit_BC["rawtxn"])
# (b)
self.nodes[2].sendrawtransaction(joinsplit_BC["rawtxn"])
self.nodes[2].generate(5)
# Connect the two nodes
connect_nodes(self.nodes, 1, 2)
sync_blocks(self.nodes)
# AB and CD should all be impossible to spend for each node.
self.expect_cannot_joinsplit(self.nodes[0], joinsplit_AB["rawtxn"])
self.expect_cannot_joinsplit(self.nodes[0], joinsplit_CD["rawtxn"])
self.expect_cannot_joinsplit(self.nodes[1], joinsplit_AB["rawtxn"])
self.expect_cannot_joinsplit(self.nodes[1], joinsplit_CD["rawtxn"])
self.expect_cannot_joinsplit(self.nodes[2], joinsplit_AB["rawtxn"])
self.expect_cannot_joinsplit(self.nodes[2], joinsplit_CD["rawtxn"])
self.expect_cannot_joinsplit(self.nodes[3], joinsplit_AB["rawtxn"])
self.expect_cannot_joinsplit(self.nodes[3], joinsplit_CD["rawtxn"])
# (c)
# AD should be possible to send due to the reorg that
# tossed out AB.
self.nodes[0].sendrawtransaction(joinsplit_AD["rawtxn"])
self.nodes[0].generate(1)
sync_blocks(self.nodes) | [
22,
9
] |
def METHOD_NAME(self, *args, **kwargs) -> List["Element"]: # type: ignore
pass | [
4855
] |
def METHOD_NAME(self):
time = self.get_time()
return self.data_3d.sel(time=time, method="nearest").merge(grid) | [
19,
1529,
394
] |
def METHOD_NAME(self, dict_):
"""Group keys within a dict by their type and sort within type."""
type_sorted = sorted(dict_, key=self._type_sort_key)
type_and_value_sorted = []
for _, group in itertools.groupby(type_sorted, self._type_sort_key):
type_and_value_sorted.extend(sorted(group))
return type_and_value_sorted | [
1389,
219,
2690,
604,
44
] |
def METHOD_NAME(test, checks=None):
"""Kubernetescluster create operation"""
if checks is None:
checks = []
test.cmd(
"az networkcloud kubernetescluster create --name {name} --resource-group {rg} "
"--location {location} --extended-location name={extendedLocation} type={extendedLocationType} "
"--kubernetes-version {kubernetesVersion} "
"--admin-username {adminUsername} --ssh-key-values {sshKey} "
"--aad-configuration admin-group-object-ids={adminGroupObjectIds} "
"--initial-agent-pool-configurations {initialNodeConfiguration} "
"--control-plane-node-configuration count={count} vmSkuName={vmSkuName} adminUsername={cpAdminUsername} sshKeyValues={cpSshKeyList} "
"--network-configuration cloud-services-network-id={csnId} cni-network-id={cniId} pod-cidrs={podCidrs} service-cidrs={serviceCidrs} dns-service-ip={dnsServiceIp} "
"bgp-service-load-balancer-configuration.bgp-advertisements={bgpAdvertisements} "
"bgp-service-load-balancer-configuration.fabric-peering-enabled={fabricPeeringEnabled} "
"bgp-service-load-balancer-configuration.ip-address-pools={ipAddressPools} "
"--tags {tags}"
) | [
367,
129
] |
def METHOD_NAME(callable):
"""A hack to make json_tricks work on Python 2, after some unknown module
has injected getfullargspec into inspect.
"""
from functools import partial
from logging import warn
from sys import version_info
from inspect import getargspec
if type(callable) == partial and version_info[0] == 2:
if not hasattr(get_arg_names, '__warned_partial_argspec'):
get_arg_names.__warned_partial_argspec = True
warn("'functools.partial' and 'inspect.getargspec' are not compatible in this Python version; "
"ignoring the 'partial' wrapper when inspecting arguments of {}, which can lead to problems".format(callable))
return set(getargspec(callable.func).args)
argspec = getargspec(callable)
return set(argspec.args) | [
19,
718,
83
] |
def METHOD_NAME(self):
self.model.METHOD_NAME()
# we only sync buffer in the first eval iteration
# so that future eval iterations can be done without communication
self._first_eval_run = True | [
1171
] |
def METHOD_NAME(self):
parsers, args = parse_args(
['slos', 'list', '-p', PROJECT_ID, SERVICE_ID])
self.assertEqual(args.parser, 'slos')
self.assertEqual(args.operation, 'list')
self.assertEqual(args.project, PROJECT_ID)
self.assertEqual(args.service_id, SERVICE_ID) | [
9,
214,
335,
17061,
245
] |
def METHOD_NAME(self, value: str):
self.layer.color_by = value | [
194,
36,
604
] |
def METHOD_NAME(self) -> Dict[str, Union[datasets.Dataset, datasets.DatasetDict]]:
if self.data_config.pad_to_max_length:
padding = "max_length"
else:
# We will pad later, dynamically at batch creation to the max_seq_length in each batch.
padding = False
# We cannot use self.tokenizer as a non-local variable in the preprocess_function if we
# want map to be able to cache the output of the tokenizer. Hence, the preprocess_function
# takes a tokenizer explicitly as an input and we create a closure using functools.partial.
def preprocess_function(tokenizer, padding, max_length, examples):
# Tokenize the texts
return tokenizer(
examples["premise"],
examples["hypothesis"],
padding=padding,
max_length=max_length,
truncation=True,
)
train_dataset = self.raw_datasets["train"].map(
functools.partial(
preprocess_function, self.tokenizer, padding, self.data_config.max_seq_length
),
batched=True,
load_from_cache_file=not self.data_config.overwrite_cache,
)
eval_dataset = self.raw_datasets["validation"].map(
functools.partial(
preprocess_function, self.tokenizer, padding, self.data_config.max_seq_length
),
batched=True,
load_from_cache_file=not self.data_config.overwrite_cache,
)
if self.data_config.pad_to_max_length:
self.collator = transformers.default_data_collator
else:
collator = transformers.DataCollatorWithPadding(
self.tokenizer, pad_to_multiple_of=8 if self.hparams.use_apex_amp else None
)
self.collator = lambda x: collator(x).data
return {"train": train_dataset, "validation": eval_dataset} | [
56,
4146
] |
def METHOD_NAME(self):
"""
Get the list of lock file names under the current lock dir
:returns: a list with the full path of files that match the
lockfile pattern
:rtype: list
"""
try:
files = os.listdir(self.lock_dir)
pattern = re.compile(r'avocado-vt-joblock-[0-9a-f]{40}-[0-9]+'
'-[0-9a-z]{8}\.pid')
return [os.path.join(self.lock_dir, _) for _ in files
if pattern.match(_)]
except OSError as e:
if e.errno == errno.ENOENT:
return [] | [
19,
625,
1537
] |
def METHOD_NAME(
adata: anndata.AnnData,
moran_i_genes: Union[np.ndarray, list],
num_clusters: int = 5,
layer: Union[str, None] = None,
) -> np.ndarray:
"""Identify archetypes from the anndata object.
Args:
adata: Anndata object of interests.
moran_i_genes: genes that are identified as singificant autocorrelation genes in space based on Moran's I.
num_clusters: number of archetypes.
layers: the layer for the gene expression, can be None which corresponds to adata.X.
Returns:
archetypes: the archetypes within the genes with high moran I scores.
Examples:
>>> archetypes = st.tl.archetypes(adata)
>>> adata.obs = pd.concat((adata.obs, df), 1)
>> arch_cols = adata.obs.columns
>>> st.pl.space(adata, basis="spatial", color=arch_cols, pointsize=0.1, alpha=1)
"""
if layer is None:
exp = adata[:, moran_i_genes].X.A
else:
exp = adata[:, moran_i_genes].layers[layer].A
METHOD_NAME, clusters, gene_corrs = find_spatial_archetypes(num_clusters, exp.T)
arch_cols = ["archetype %d" % i for i in np.arange(num_clusters)]
df = pd.DataFrame(METHOD_NAME.T, columns=arch_cols)
df.index = adata.obs_names
METHOD_NAME = pd.concat((adata.obs, df), 1)
return METHOD_NAME | [
-1
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.