text
stringlengths 15
7.82k
| ids
sequencelengths 1
7
|
---|---|
def METHOD_NAME(__a: object, __b: object) -> Any: ... | [
2338
] |
def METHOD_NAME():
target = 'classBackgroundLayer('
should_match = 'class BackgroundLayer(cocos.layer.Layer)'
filling = '\n'.join([
'this is a line',
'this is another'
])
text = should_match + '\n' + filling
lines = text.split('\n')
it = enumerate(lines)
start_line = get_start_line(it, target)
assert start_line == 0
assert six.next(it) == (1, lines[1]) | [
9,
447,
15995,
534,
590
] |
def METHOD_NAME() -> argparse.ArgumentParser:
parser = argparse.ArgumentParser()
parser.add_argument("reservoir_model_path", type=str, help="Input Zarr path")
parser.add_argument("output_path", type=str, help="Directory to save outputs to")
parser.add_argument(
"validation_config_path", type=str, help="Path to validation data config"
)
parser.add_argument(
"n_synchronize",
type=int,
help=(
"Number of timesteps from start of validation to use in reservoir "
"synchronization (not used in prediction)."
),
)
parser.add_argument(
"--n-validation-batches",
type=int,
default=None,
help="Number of batch data netcdfs to use for validation. Defaults to use all.",
)
return parser | [
19,
1319
] |
def METHOD_NAME(pidfile):
"""
internal helper.
:returns FilePath: a path to use for file-locking the given pidfile
"""
return pidfile.sibling("{}.lock".format(pidfile.basename())) | [
10794,
24,
-1
] |
def METHOD_NAME(self):
# test bottomup training
dataset = self.build_atrw_dataset(data_mode='bottomup')
self.assertEqual(len(dataset), 2)
self.check_data_info_keys(dataset[0], data_mode='bottomup')
# test bottomup testing
dataset = self.build_atrw_dataset(data_mode='bottomup', test_mode=True)
self.assertEqual(len(dataset), 2)
self.check_data_info_keys(dataset[0], data_mode='bottomup') | [
9,
6795
] |
def METHOD_NAME(self):
from traits.api import Interface
class IFoo(Interface):
pass
obj = {}
# Global `register_provides`.
register_provides(dict, IFoo)
self.assertEqual(obj, adapt(obj, IFoo)) | [
9,
285,
372,
3075
] |
def METHOD_NAME(self):
return str(toml.dumps(self.to_dict())).strip().split('\n') | [
24,
4842,
144
] |
def METHOD_NAME(self):
dirname = os.path.join(support.TESTFN, 'Gr\xfc\xdf-\u66e8\u66e9\u66eb')
filename = '\xdf-\u66e8\u66e9\u66eb'
with support.temp_cwd(dirname):
with open(filename, 'wb') as f:
f.write((filename + '\n').encode("utf-8"))
os.access(filename,os.R_OK)
os.remove(filename) | [
9,
2851
] |
def METHOD_NAME(self):
assert self.roidbs
if self.proposals and len(self.proposals.keys()) > 0:
logger.info("merge proposals to annos")
for id, record in enumerate(self.roidbs):
image_id = int(record["im_id"])
if image_id not in self.proposals.keys():
logger.info("image id :{} no proposals".format(image_id))
record["proposals"] = np.array(self.proposals.get(image_id, []), dtype=np.float32)
self.roidbs[id] = record | [
411,
7802,
4528
] |
def METHOD_NAME(self):
self.test_graph = create_graph()
from neural_compressor.adaptor.tf_utils.graph_rewriter.generic.fetch_weight_from_reshape import (
FetchWeightFromReshapeOptimizer,
)
convert_graph = FetchWeightFromReshapeOptimizer(self.test_graph).do_transformation()
handled = False
for node in convert_graph.node:
if node.op == "Conv2D" and node.input[1] == "reshape/weight_0":
handled = True
break
self.assertEqual(handled, True) | [
9,
1047,
1336,
280,
3013,
968
] |
def METHOD_NAME(self):
return "DELETE" | [
103
] |
def METHOD_NAME(self, value):
"""Helper function to transform the dictionary from strings or integers to bools"""
if value in [0, 'F', 'f']:
return False
if value in [1, 'T', 't']:
return True
return True | [
1053,
24,
863
] |
def METHOD_NAME(command):
assert not match(command) | [
9,
130,
590
] |
def METHOD_NAME(inputs):
qml.AmplitudeEmbedding(inputs, wires=range(n_wires), pad_with=0, normalize=True)
return qml.expval(qml.PauliZ(0)) | [
1708,
2590
] |
def METHOD_NAME(in_length, out_length, ctc_type):
if ctc_type == "builtin" or ctc_type == "cudnnctc":
_ctcloss_sum = torch.nn.CTCLoss(reduction="sum")
def torch_ctcloss(th_pred, th_target, th_ilen, th_olen):
th_pred = th_pred.log_softmax(2)
loss = _ctcloss_sum(th_pred, th_target, th_ilen, th_olen)
# Batch-size average
loss = loss / th_pred.size(1)
return loss
elif ctc_type == "gtnctc":
pytest.importorskip("gtn")
from espnet.nets.pytorch_backend.gtn_ctc import GTNCTCLossFunction
_ctcloss_sum = GTNCTCLossFunction.apply
def torch_ctcloss(th_pred, th_target, th_ilen, th_olen):
targets = [t.tolist() for t in th_target]
log_probs = torch.nn.functional.log_softmax(th_pred, dim=2)
loss = _ctcloss_sum(log_probs, targets, th_ilen, 0, "none")
return loss
n_out = 7
input_length = numpy.array(in_length, dtype=numpy.int32)
label_length = numpy.array(out_length, dtype=numpy.int32)
np_pred = [
numpy.random.rand(il, n_out).astype(numpy.float32) for il in input_length
]
np_target = [
numpy.random.randint(0, n_out, size=ol, dtype=numpy.int32)
for ol in label_length
]
# NOTE: np_pred[i] seems to be transposed and used axis=-1 in e2e_asr.py
ch_pred = F.separate(F.pad_sequence(np_pred), axis=-2)
ch_target = F.pad_sequence(np_target, padding=-1)
ch_loss = F.connectionist_temporal_classification(
ch_pred, ch_target, 0, input_length, label_length
).data
th_pred = pad_list([torch.from_numpy(x) for x in np_pred], 0.0).transpose(0, 1)
if ctc_type == "gtnctc":
# gtn implementation expects targets as list
th_target = np_target
# keep as B x T x H for gtn
th_pred = th_pred.transpose(0, 1)
else:
th_target = torch.from_numpy(numpy.concatenate(np_target))
th_ilen = torch.from_numpy(input_length)
th_olen = torch.from_numpy(label_length)
th_loss = torch_ctcloss(th_pred, th_target, th_ilen, th_olen).numpy()
numpy.testing.assert_allclose(th_loss, ch_loss, 0.05) | [
9,
3276,
1572
] |
def METHOD_NAME(self) -> str:
"""
Resource name.
"""
return pulumi.get(self, "name") | [
156
] |
def METHOD_NAME(self, client):
json_input = {"hello":"world"}
client.body_three_types(json_input)
content = b"hello, world"
client.body_three_types(content) | [
9,
2829,
2756,
119
] |
def METHOD_NAME():
"""Returns a tuple of (old_layers, new_layers, and optimizer)."""
model = SimpleModel(num_features=1, num_classes=10)
policy: Mapping[Type[torch.nn.Module], module_surgery.ReplacementFunction] = {
torch.nn.Linear: _CopyLinear.from_linear
}
opt = torch.optim.SGD(model.parameters(), lr=.001)
orig_linear_modules = [model.fc1, model.fc2]
module_surgery.replace_module_classes(model, policies=policy, optimizers=opt)
new_linear_modules = [model.fc1, model.fc2]
return orig_linear_modules, new_linear_modules, opt | [
968,
13927,
551
] |
def METHOD_NAME(masks):
tile = DataTile(
np.ones((2 * 2, 10, 10)),
tile_slice=Slice(origin=(0, 0, 0), shape=Shape((2 * 2, 10, 10), sig_dims=2)),
scheme_idx=0,
)
slice_ = masks.get_masks_for_slice(tile.tile_slice)
assert slice_.shape == (100, 5) | [
9,
43,
-1,
988
] |
def METHOD_NAME(
self
) -> None:
def mock_run_cmd(unused_cmd: str) -> str:
return 'scripts/setup.py\nscripts/setup_gae.py'
with self.swap(common, 'run_cmd', mock_run_cmd):
actual_scripts = (
repo_specific_changes_fetcher.get_setup_scripts_changes_status(
'release_tag'))
expected_scripts = {
'scripts/setup.py': True,
'scripts/setup_gae.py': True,
'scripts/install_third_party_libs.py': False,
'scripts/install_third_party.py': False
}
self.assertEqual(actual_scripts, expected_scripts) | [
9,
19,
102,
2942,
1103,
452,
24
] |
def METHOD_NAME(self, obj):
addObjectProperty(obj, 'FieldName', "S", "App::PropertyString", "Scalar transport",
"Name of the scalar transport field")
addObjectProperty(obj, 'DiffusivityFixed', False, "App::PropertyBool", "Scalar transport",
"Use fixed value for diffusivity rather than viscosity")
# This is actually rho*diffusivity, but this is what OpenFOAM uses
addObjectProperty(obj, 'DiffusivityFixedValue', "0.001 kg/m/s", "App::PropertyQuantity", "Scalar transport",
"Diffusion coefficient for fixed diffusivity")
addObjectProperty(obj, 'RestrictToPhase', False, "App::PropertyBool", "Scalar transport",
"Restrict transport within phase")
addObjectProperty(obj, 'PhaseName', "water", "App::PropertyString", "Scalar transport",
"Transport within phase")
addObjectProperty(obj, 'InjectionRate', '1 kg/s', "App::PropertyQuantity", "Scalar transport",
"Injection rate")
addObjectProperty(obj, 'InjectionPoint', FreeCAD.Vector(0, 0, 0), "App::PropertyPosition", "Scalar transport",
"Location of the injection point") | [
176,
748
] |
def METHOD_NAME(f, v):
var = tkinter.IntVar(f)
var.set(v)
w = tkinter.Entry(f, textvariable=var, validatecommand=validate_posint, validate="all", width=10)
return w, var | [
-1
] |
def METHOD_NAME() -> Likelihood:
return Gaussian(variance=Setup.likelihood_variance) | [
4141
] |
def METHOD_NAME(request, category):
form = DonationGiftForm(data=request.POST, category=category, request=request)
if form.is_valid():
messages.add_message(request, messages.SUCCESS, "Danke für Deine Bestellung!")
form.save(request)
return get_redirect(request)
messages.add_message(request, messages.ERROR, "Form-Fehler!")
return get_redirect(request, next=request.META.get("HTTP_REFERER", "/")) | [
93,
852
] |
def METHOD_NAME(self):
"""
Test unicode(): default arguments with unicode-subclass input.
"""
class UnicodeSubclass(unicode):
pass
s = UnicodeSubclass(u"foo")
loader = Loader()
actual = loader.unicode(s)
self.assertString(actual, u"foo") | [
9,
774,
756,
362,
774,
9260
] |
def METHOD_NAME(users):
return "<br><br>" + "<br>".join(users) | [
275,
277,
43,
1283,
24
] |
def METHOD_NAME(self):
return self.fHprSnap | [
19,
13052,
4792
] |
def METHOD_NAME(self):
"""Verify the method retrieves data from the User API and caches it."""
username = Faker().user_name()
user_data = {
"username": username,
}
user_url = f"{self.site_configuration.user_api_url}accounts/{username}"
responses.add(responses.GET, user_url, body=json.dumps(user_data), content_type=JSON, status=200)
verification_data = {
"username": "jdoe",
"verified_name": "Jonathan Doe",
"profile_name": "Jon Doe",
"verification_attempt_id": 123,
"proctored_exam_attempt_id": None,
"is_verified": True,
"use_verified_name_for_certs": False,
}
verification_url = f"{self.site_configuration.name_verification_api_url}?username={username}"
responses.add(
responses.GET, verification_url, body=json.dumps(verification_data), content_type=JSON, status=200
)
self.mock_access_token_response()
expected_data = user_data
expected_data["verified_name"] = "Jonathan Doe"
expected_data["use_verified_name_for_certs"] = False
actual = self.site_configuration.get_user_api_data(username)
self.assertEqual(actual, expected_data)
self.assertEqual(len(responses.calls), 3)
# Verify the data is cached
responses.reset()
actual = self.site_configuration.get_user_api_data(username)
self.assertEqual(actual, expected_data)
self.assertEqual(len(responses.calls), 0) | [
9,
19,
21,
58,
365,
41,
596
] |
def METHOD_NAME(x):
A, B = get_model_matrix()
start = time.time()
K, _, _ = dlqr(A, B, Q, R)
u = -K @ x
elapsed_time = time.time() - start
print(f"calc time:{elapsed_time:.6f} [sec]")
return u | [
10564,
401
] |
def METHOD_NAME(self):
email = "[email protected]"
cardholder_name = "Tom %s" % random.randint(1, 10000)
customer_id = "%s" % random.randint(1, 10000)
expiration_date = "10/2012"
number = CreditCardNumbers.MasterCard
postal_code = "44444"
customer = Customer.create({
"id": customer_id,
"email": email,
"credit_card": {
"cardholder_name": cardholder_name,
"expiration_date": expiration_date,
"number": number,
"billing_address": {
"postal_code": postal_code
},
"options": {
"verify_card": True
}
}
}).customer
found_verifications = CreditCardVerification.search(
CreditCardVerificationSearch.credit_card_expiration_date == expiration_date,
CreditCardVerificationSearch.credit_card_cardholder_name == cardholder_name,
CreditCardVerificationSearch.credit_card_number == number,
CreditCardVerificationSearch.customer_email == email,
CreditCardVerificationSearch.customer_id == customer_id,
CreditCardVerificationSearch.billing_postal_code == postal_code
)
self.assertEqual(1, found_verifications.maximum_size)
self.assertEqual(customer.credit_cards[0].token, found_verifications.first.credit_card["token"]) | [
9,
75,
526,
342
] |
def METHOD_NAME(osd_label=constants.OSD_APP_LABEL, namespace=None):
"""
Fetches info about osd deployments in the cluster
Args:
osd_label (str): label associated with osd deployments
(default: defaults.OSD_APP_LABEL)
namespace (str): Namespace in which ceph cluster lives
(default: config.ENV_DATA["cluster_namespace"])
Returns:
list: OSD deployment OCS instances
"""
namespace = namespace or config.ENV_DATA["cluster_namespace"]
osds = get_deployments_having_label(osd_label, namespace)
return osds | [
19,
7207,
13356
] |
def METHOD_NAME(self):
r"""
:math:`\tau_+`
:rtype: float
"""
return self.__tau_plus | [
319,
222
] |
def METHOD_NAME(self):
"""
Retrieves the presence of the device
Returns:
bool: True if device is present, False if not
"""
attr_path = HWMON_DIR + self.fan_presence_attr
attr_rv = self.__get_attr_value(attr_path)
if (attr_rv != 'ERR'):
if (attr_rv == '1'):
return True
else:
return False
return None | [
19,
4061
] |
def METHOD_NAME(sparse_nn_data):
with pytest.raises(ValueError):
nearest_neighbors(
sparse_nn_data,
10,
"seuclidean",
{},
False,
np.random,
) | [
9,
7883,
1068,
1341,
2087,
365
] |
def METHOD_NAME():
pytest.importorskip("matplotlib")
plt.figure()
with quantity_support():
x = [1, 2, 3] * u.s
y = [1, 2, 3] * u.m
yerr = [3, 2, 1] * u.cm
fig, ax = plt.subplots()
ax.errorbar(x, y, yerr=yerr)
assert ax.xaxis.get_units() == u.s
assert ax.yaxis.get_units() == u.m | [
9,
1878,
-1
] |
def METHOD_NAME(self, collection_id, lib_albums):
lib_ids = {x.mb_albumid for x in lib_albums}
albums_in_collection = self._get_albums_in_collection(collection_id)
remove_me = list(set(albums_in_collection) - lib_ids)
for i in range(0, len(remove_me), FETCH_CHUNK_SIZE):
chunk = remove_me[i:i + FETCH_CHUNK_SIZE]
mb_call(
musicbrainzngs.remove_releases_from_collection,
collection_id, chunk
) | [
188,
1038
] |
def METHOD_NAME(log_records, search_string):
for log_record in logs.records[::-1]:
if isinstance(log_record.msg, str) and search_string in log_record.msg:
return json.loads(log_record.msg) | [
19,
679,
390,
577,
6504,
144
] |
def METHOD_NAME(self):
"""Return irreducible q-points and weights."""
return self._qpoints, self._weights | [
19,
13668
] |
def METHOD_NAME(self, path):
if not HAS_RARFILE:
raise CuckooPackageError("rarfile Python module not installed in guest")
# Check file extension.
path = check_file_extension(path, ".rar")
root = os.environ["TEMP"]
password = self.options.get("password")
exe_regex = re.compile(r"(\.exe|\.scr|\.msi|\.bat|\.lnk)$", flags=re.IGNORECASE)
rarinfos = self.get_infos(path)
self.extract_rar(path, root, password)
file_name = self.options.get("file")
# If no file name is provided via option, take the first file.
if file_name is None:
# No name provided try to find a better name.
if len(rarinfos):
# Attempt to find a valid exe extension in the archive
for f in rarinfos:
if exe_regex.search(f.filename):
file_name = f.filename
break
# Default to the first one if none found
file_name = file_name or rarinfos[0].filename
log.debug("Missing file option, auto executing: %s", file_name)
else:
raise CuckooPackageError("Empty RAR archive")
file_path = os.path.join(root, file_name)
if file_name.lower().endswith(".lnk"):
cmd_path = self.get_path("cmd.exe")
cmd_args = f'/c start /wait "" "{file_path}"'
return self.execute(cmd_path, cmd_args, file_path)
return self.execute(file_path, self.options.get("arguments"), file_path) | [
447
] |
def METHOD_NAME() -> None:
rule_runner = RuleRunner(
rules=[QueryRule(CompleteEnvironmentVars, [EnvironmentName])],
target_types=[DockerEnvironmentTarget],
inherent_environment=EnvironmentName("docker"),
)
localhost_platform = Platform.create_for_localhost()
if localhost_platform == Platform.linux_arm64:
image_sha = "65a4aad1156d8a0679537cb78519a17eb7142e05a968b26a5361153006224fdc"
platform = Platform.linux_arm64.value
else:
image_sha = "a1801b843b1bfaf77c501e7a6d3f709401a1e0c83863037fa3aab063a7fdb9dc"
platform = Platform.linux_x86_64.value
rule_runner.write_files(
{
"BUILD": dedent(
f"""\
docker_environment(
name='docker',
image='centos@sha256:{image_sha}',
platform='{platform}',
)
"""
)
}
)
rule_runner.set_options(["--environments-preview-names={'docker': '//:docker'}"])
result = dict(rule_runner.request(CompleteEnvironmentVars, []))
# HOSTNAME is not deterministic across machines, so we don't care about the value.
assert "HOSTNAME" in result
result.pop("HOSTNAME")
assert dict(result) == {
"PATH": "/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin",
"HOME": "/root",
} | [
9,
223,
676,
485,
1659
] |
def METHOD_NAME(self):
if self._target_os in ("Macos"):
return False
return True | [
220,
947
] |
def METHOD_NAME(self) -> Optional[str]:
"""
Resource ID.
"""
return pulumi.get(self, "id") | [
147
] |
def METHOD_NAME(self):
parameters = {
**self.serialize_query_param(
"api-version", "2018-11-01",
required=True,
),
}
return parameters | [
539,
386
] |
def METHOD_NAME(self):
setup_class_common("server_config") | [
102,
2
] |
def METHOD_NAME(datapath):
db = "pipe.neu"
TestSection(db)
OpenDatabase(pjoin(datapath,db))
AddMeshPlot()
DrawPlots()
v = View3DAttributes()
v.viewNormal = (-0.786306, 0.379526, 0.487527)
v.focus = (0.149902, 0.212562, 0.124929)
v.viewUp = (0.316186, 0.925114, -0.210215)
v.viewAngle = 30
v.parallelScale = 0.585963
v.nearPlane = -1.17193
v.farPlane = 1.17193
v.imagePan = (0.0173275, 0.033058)
v.imageZoom = 1.45734
v.perspective = 1
v.eyeAngle = 2
v.centerOfRotationSet = 0
v.centerOfRotation = (0.149902, 0.212562, 0.124929)
SetView3D(v)
Test("PATRAN1_0")
v2 = View3DAttributes()
v2.viewNormal = (-0.786306, 0.379526, 0.487527)
v2.focus = (0.149902, 0.212562, 0.124929)
v2.viewUp = (0.316186, 0.925114, -0.210215)
v2.viewAngle = 30
v2.parallelScale = 0.585963
v2.nearPlane = -1.17193
v2.farPlane = 1.17193
v2.imagePan = (-0.108127, 0.0971661)
v2.imageZoom = 6.92887
v2.perspective = 1
v2.eyeAngle = 2
v2.centerOfRotationSet = 0
v2.centerOfRotation = (0.149902, 0.212562, 0.124929)
SetView3D(v2)
Test("PATRAN1_1")
DeleteActivePlots()
AddPseudocolorPlot()
SetView3D(v)
Test("PATRAN1_2")
DeleteActivePlots()
CloseDatabase(pjoin(datapath,db)) | [
2222
] |
def METHOD_NAME(config, str2id, term_file, terms, item_distribution):
train_data = []
neg_samples = []
with io.open(config.train_data, encoding=config.encoding) as f:
for idx, line in enumerate(f):
if idx % 100000 == 0:
log.info("%s readed %s lines" % (config.train_data, idx))
slots = []
for col_idx, col in enumerate(line.strip("\n").split("\t")):
s = col[: config.max_seqlen]
if s not in str2id:
str2id[s] = len(str2id)
term_file.write(str(col_idx) + "\t" + col + "\n")
item_distribution.append(0)
slots.append(str2id[s])
src = slots[0]
dst = slots[1]
neg_samples.append(slots[2:])
train_data.append((src, dst))
train_data = np.array(train_data, dtype="int64")
np.save(os.path.join(config.graph_work_path, "train_data.npy"), train_data)
if len(neg_samples) != 0:
np.save(os.path.join(config.graph_work_path, "neg_samples.npy"), np.array(neg_samples)) | [
557,
548,
2726,
849,
365
] |
def METHOD_NAME(self) -> str:
"""
The ID of the private endpoint connection.
"""
return pulumi.get(self, "id") | [
147
] |
def METHOD_NAME(self):
self._test_mapping(SearchRoute, Route)
queryable_fields = SearchRoute.queryable_fields
self.assertIn('rmina', queryable_fields)
self.assertTrue(queryable_fields['rmina']._range)
self.assertIn('act', queryable_fields)
self.assertIsNotNone(queryable_fields['act']._enum)
self.assertIn('dhei', queryable_fields)
self.assertIn('ralt', queryable_fields) | [
9,
2476,
445
] |
def METHOD_NAME(fname, exp_shape):
"""test_decode_dicom_image"""
dcm_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "test_dicom", fname
)
file_contents = tf.io.read_file(filename=dcm_path)
dcm_image = tfio.image.decode_dicom_image(
contents=file_contents,
dtype=tf.float32,
on_error="strict",
scale="auto",
color_dim=True,
)
assert dcm_image.numpy().shape == exp_shape | [
9,
1268,
7087,
660
] |
def METHOD_NAME(self):
current_func = functions[self.function_name].func
params = [si.sv_get(default=[[]], deepcopy=False) for si in self.inputs]
matching_f = list_match_func[self.list_match]
desired_levels = [2 for p in params]
ops = [current_func, self.list_match, self.output_numpy]
result = recurse_f_level_control(params, ops, logic_numpy, matching_f, desired_levels)
self.outputs[0].sv_set(result) | [
356
] |
f METHOD_NAME(self): | [
9,
446,
0,
-1
] |
def METHOD_NAME(self) -> None:
# language=rst
"""
Reset the pipeline.
"""
self.network.METHOD_NAME()
self.step_count = 0 | [
656,
551,
2045
] |
def METHOD_NAME(self):
with urlopen(self.url) as fd:
content = fd.read().decode()
return content | [
1047,
365
] |
def METHOD_NAME(self):
zone = self.driver.list_zones()[0]
name = "test"
type = RecordType.A
data = "200.150.100.50"
record = self.driver.create_record(name, zone, type, data)
self.assertEqual(record.id, "123")
self.assertEqual(record.name, name)
self.assertEqual(record.type, "A")
self.assertEqual(record.data, data) | [
9,
129,
148
] |
def METHOD_NAME(self, arg):
""" Run tests:
* full - run all tests
* unit - run tests (also for each backend)
* any backend name (e.g. pyside2, pyside, pyqt4, etc.) -
run tests for the given backend
* nobackend - run tests that do not require a backend
* extra - run extra tests (line endings and style)
* lineendings - test line ending consistency
* flake - flake style testing (PEP8 and more)
* docs - test docstring parameters for correctness
* examples - run all examples
* examples [examples paths] - run given examples
"""
# Note: By default, "python make full" *will* produce coverage data,
# whereas vispy.test('full') will not. This is because users won't
# really care about coveraged, but developers will.
if not arg:
return self.help('test')
from vispy import METHOD_NAME
try:
args = arg.split(' ')
METHOD_NAME(args[0], ' '.join(args[1:]), coverage=True)
except Exception as err:
print(err)
if not isinstance(err, RuntimeError):
type_, value, tb = sys.exc_info()
traceback.print_exception(type, value, tb)
raise SystemExit(1) | [
9
] |
def METHOD_NAME(
query_context: QueryContext, query_obj: QueryObject, force_cached: bool = False
) -> dict[str, Any]:
datasource = _get_datasource(query_context, query_obj)
query_obj = copy.copy(query_obj)
query_obj.is_timeseries = False
query_obj.orderby = []
query_obj.metrics = None
query_obj.post_processing = []
qry_obj_cols = []
for o in datasource.columns:
if isinstance(o, dict):
qry_obj_cols.append(o.get("column_name"))
else:
qry_obj_cols.append(o.column_name)
query_obj.columns = qry_obj_cols
query_obj.from_dttm = None
query_obj.to_dttm = None
return _get_full(query_context, query_obj, force_cached) | [
19,
700
] |
def METHOD_NAME(self):
event_yielder = self.yield_events_dag_succeeds()
attribs = {
"start_execution.return_value": ("remote_execution_started", "12345", "abcdefg123"),
"rcv_next.side_effect": event_yielder,
}
self._run_engine(attribs) | [
9,
2437,
1693,
722
] |
def METHOD_NAME(batch, separator_token_id):
"""Segment embeddings as described in [1]
The values {0,1} were found in the repository [2].
Attributes:
batch: torch.Tensor, size [batch_size, block_size]
Batch of input.
separator_token_id: int
The value of the token that separates the segments.
[1] Liu, Yang, and Mirella Lapata. "Text summarization with pretrained encoders."
arXiv preprint arXiv:1908.08345 (2019).
[2] https://github.com/nlpyang/PreSumm (/src/prepro/data_builder.py, commit fac1217)
"""
batch_embeddings = []
for sequence in batch:
sentence_num = -1
embeddings = []
for s in sequence:
if s == separator_token_id:
sentence_num += 1
embeddings.append(sentence_num % 2)
batch_embeddings.append(embeddings)
return torch.tensor(batch_embeddings) | [
226,
466,
44,
308
] |
def METHOD_NAME(self, args: CommonConfig) -> str:
"""Return the base commit or an empty string.""" | [
19,
414,
1160
] |
def METHOD_NAME() -> None:
import trio
# no warning on accessing the submodule
assert trio.tests
# only when accessing a submodule member
with pytest.warns(TrioDeprecationWarning):
assert trio.tests.test_abc # type: ignore[attr-defined] | [
9,
450,
137,
-1
] |
def METHOD_NAME(self, *args, **kwargs):
pass | [
849
] |
def METHOD_NAME(
img: np.ndarray,
model: OnnxModel,
tile_size: TileSize,
separate_alpha: bool,
) -> np.ndarray:
"""Upscales an image with a pretrained model"""
settings = get_settings()
session = get_onnx_session(
model,
settings.gpu_index,
settings.execution_provider,
settings.tensorrt_fp16_mode,
settings.tensorrt_cache_path,
)
input_shape, in_nc, req_width, req_height = get_input_shape(session)
_, out_nc, _, _ = get_output_shape(session)
change_shape = input_shape == "BHWC"
exact_size = None
if req_width is not None:
exact_size = req_width, req_height or req_width
elif req_height is not None:
exact_size = req_width or req_height, req_height
h, w, c = get_h_w_c(img)
logger.debug(f"Image is {h}x{w}x{c}")
return convenient_upscale(
img,
in_nc,
out_nc,
lambda i: upscale(i, session, tile_size, change_shape, exact_size),
separate_alpha,
) | [
6885,
660,
1716
] |
def METHOD_NAME(cls, manager):
"""Adopt *manager* into pyplot and make it the active manager."""
if not hasattr(manager, "_cidgcf"):
manager._cidgcf = manager.canvas.mpl_connect(
"button_press_event", lambda event: cls.set_active(manager))
fig = manager.canvas.figure
fig.number = manager.num
label = fig.get_label()
if label:
manager.set_window_title(label)
cls.set_active(manager) | [
0,
80,
923,
722
] |
def METHOD_NAME(self):
"""mapserver exceptions behave with multiple threads"""
workers = []
for i in range(10):
name = 'e%d' % (i)
thread = threading.Thread(target=trigger_exception, name=name, args=(name,))
workers.append(thread)
thread.start() | [
9,
504,
457,
1573
] |
def METHOD_NAME():
# Testing the initialization when seed=None
# Since internal states randomly generated,
# we just check the output classes
z = function(inputs=[], outputs=[RandomState()])()
aes_res = z[0]
assert isinstance(aes_res, np.random.RandomState)
z = function(inputs=[], outputs=[default_rng()])()
aes_res = z[0]
assert isinstance(aes_res, np.random.Generator) | [
9,
236,
8054,
829,
654,
484
] |
def METHOD_NAME(subparsers):
autoscale_parser = subparsers.add_parser(
"autoscale",
help="Manually scale a service up and down manually, bypassing the normal autoscaler",
)
autoscale_parser.add_argument(
"-s", "--service", help="Service that you want to stop. Like 'example_service'."
).completer = lazy_choices_completer(list_services)
autoscale_parser.add_argument(
"-i",
"--instance",
help="Instance of the service that you want to stop. Like 'main' or 'canary'.",
required=True,
).completer = lazy_choices_completer(list_instances)
autoscale_parser.add_argument(
"-c",
"--cluster",
help="The PaaSTA cluster that has the service instance you want to stop. Like 'pnw-prod'.",
required=True,
).completer = lazy_choices_completer(list_clusters)
autoscale_parser.add_argument(
"--set", help="Set the number to scale to. Must be an Int.", type=int
)
autoscale_parser.set_defaults(command=paasta_autoscale) | [
238,
3509
] |
def METHOD_NAME(self, instance):
attrs = super().METHOD_NAME(instance)
if self.use_row_ordering_attributes:
attrs["id"] = "page_%d" % instance.id
attrs["data-page-title"] = instance.get_admin_display_title()
return attrs | [
19,
843,
1685
] |
def METHOD_NAME():
model = get_model(
io.StringIO(
"\n".join(
[
par_base,
"DMX 15",
"DMX_0001 16 1",
"DMXR1_0001 58000",
"DMXR2_0001 59000",
]
)
)
)
toas = make_fake_toas_uniform(57000, 57900, 10, model)
with pytest.raises(MissingTOAs) as e:
model.validate_toas(toas)
assert e.value.parameter_names == ["DMX_0001"]
fitter = pint.fitter.WLSFitter(toas, model)
with pytest.raises(MissingTOAs):
fitter.fit_toas() | [
9,
11369,
654,
12655
] |
def METHOD_NAME(s: str) -> bool:
if s.startswith(":"):
return True
if ":" in s and "//" in s:
return True
return False | [
6285,
2307,
636
] |
METHOD_NAME(self): | [
19,
526
] |
def METHOD_NAME(self, p):
'''template_func : tf_format_json
| tf_echo
| tf_generic''' | [
2054,
671,
717
] |
async def METHOD_NAME(pipeline_response):
deserialized = self._deserialize("OperationPage", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem) | [
297,
365
] |
def METHOD_NAME(self):
"""
Render navbar from home page with restricted UI set to True.
This restricts the user to be able to view ONLY relationships on the navbar.
It then checks the UI for these restrictions.
"""
self.add_permissions("extras.view_relationship")
user_permissions = self.user.get_all_permissions()
self.browser.visit(self.live_server_url)
for tab_name, groups in self.navbar.items():
tab_flag = False
for _, items in groups.items():
for _, item_details in items.items():
if item_details["permission"] in user_permissions:
tab_flag = True
# XPath to find tabs using the tab name
tabs = self.browser.find_by_xpath(f"//*[@id='navbar']//*[contains(text(), '{tab_name}')]")
if tab_flag:
self.assertEqual(len(tabs), 1)
else:
self.assertEqual(len(tabs), 0) | [
9,
10535,
338,
8611,
882
] |
def METHOD_NAME(tmp_path: Path) -> None:
src_target = tmp_path / "src.py"
python_codegen(metaschema_file_uri, src_target)
assert os.path.exists(src_target)
with open(src_target) as f:
assert f.read() == inspect.getsource(cg_metaschema) | [
9,
1094,
135,
370,
1,
24,
153
] |
def METHOD_NAME(self) -> None:
self.reset()
for driver in list(self._drivers):
self.terminate(driver) | [
950
] |
def METHOD_NAME(self, **kwargs: Any) -> AsyncIterable["_models.Operation"]:
"""Lists all available Relay REST API operations.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either Operation or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.relay.models.Operation]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", self._config.api_version)
) # type: Literal["2021-11-01"]
cls = kwargs.pop("cls", None) # type: ClsType[_models.OperationListResult]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
api_version=api_version,
template_url=self.METHOD_NAME.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("OperationListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(get_next, extract_data) | [
245
] |
def METHOD_NAME(self):
assert resource_string("missing") is None
assert resource_string("missing", "gone") is None
assert resource_string('test/data/empty.txt') == six.b('empty')
assert resource_string('empty.txt', 'test/data') == six.b('empty')
tmp = tempfile.NamedTemporaryFile('w').name
with open(tmp, "w") as fd:
fd.write("test")
try:
print(resource_string(tmp))
assert resource_string(tmp) == 'test'
(d, fn) = os.path.split(tmp)
assert resource_string(fn, d) == 'test'
except IOError as ex:
raise ex
finally:
try:
os.unlink(tmp)
except Exception:
pass | [
9,
191,
144
] |
def METHOD_NAME(components: Iterable[Union[E3dcBat, E3dcCounter, E3dcInverter,
E3dcExternalInverter]]) -> None:
with client as c:
for component in components:
with SingleComponentUpdateContext(component.component_info):
component.update(c) | [
86,
811
] |
def METHOD_NAME(response, api_timeout=None, headers=None, timeout=None):
"""
:param response: request
:param api_timeout: asynchronous API timeout (will wait forever or until error if None)
:param headers: request headers
:param timeout: connect timeout
:return: request
"""
logger = logging.getLogger(__name__)
location_uri = response.headers.get("Location")
if location_uri is None:
raise Exception(f"no Location header in {response}")
start_time = time.time()
if api_timeout is None:
while True:
done, response = call_finished(location_uri, headers, timeout)
if done:
break
time.sleep(1)
else:
for _ in range(api_timeout):
done, response = call_finished(location_uri, headers, timeout)
if done:
break
time.sleep(1)
if response.status_code == 202:
wait_time = time.time() - start_time
logger.warn(f"API request still not completed after {int(wait_time)} seconds: {response}")
return response
logger.debug(f"DELETE API call to {location_uri}")
requests.delete(location_uri, headers=headers, proxies=get_proxies(location_uri), timeout=timeout)
return response | [
618,
43,
958,
58
] |
async def METHOD_NAME(write_client):
await write_client.indices.create(
index="test-mapping",
body={
"settings": {"number_of_shards": 1, "number_of_replicas": 0},
"mappings": {
"date_detection": False,
"properties": {
"title": {
"type": "text",
"analyzer": "snowball",
"fields": {"raw": {"type": "keyword"}},
},
"created_at": {"type": "date"},
"comments": {
"type": "nested",
"properties": {
"created": {"type": "date"},
"author": {
"type": "text",
"analyzer": "snowball",
"fields": {"raw": {"type": "keyword"}},
},
},
},
},
},
},
)
m = await mapping.AsyncMapping.from_opensearch("test-mapping", using=write_client)
assert ["comments", "created_at", "title"] == list(
sorted(m.properties.properties._d_.keys())
)
assert {
"date_detection": False,
"properties": {
"comments": {
"type": "nested",
"properties": {
"created": {"type": "date"},
"author": {
"analyzer": "snowball",
"fields": {"raw": {"type": "keyword"}},
"type": "text",
},
},
},
"created_at": {"type": "date"},
"title": {
"analyzer": "snowball",
"fields": {"raw": {"type": "keyword"}},
"type": "text",
},
},
} == m.to_dict()
# test same with alias
await write_client.indices.put_alias(index="test-mapping", name="test-alias")
m2 = await mapping.AsyncMapping.from_opensearch("test-alias", using=write_client)
assert m2.to_dict() == m.to_dict() | [
9,
445,
9452,
3758,
280,
6957
] |
f METHOD_NAME(self): | [
9,
581
] |
def METHOD_NAME(self):
return "POST" | [
103
] |
def METHOD_NAME(api_client):
response = api_client.get(
'/submissions/authors',
content_type='application/json',
)
assert response.status_code == 401 | [
9,
80,
2997,
579,
529,
1970,
19
] |
def METHOD_NAME(self):
return self.client.format_url(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ServiceBus/namespaces/{namespaceName}/AuthorizationRules/{authorizationRuleName}",
**self.url_parameters
) | [
274
] |
def METHOD_NAME(
params: SpinEchoParameters,
platform: Platform,
qubits: Qubits,
) -> SpinEchoData:
"""Data acquisition for SpinEcho"""
# create a sequence of pulses for the experiment:
# Spin Echo 3 Pulses: RX(pi/2) - wait t(rotates z) - RX(pi) - wait t(rotates z) - RX(pi/2) - readout
ro_pulses = {}
RX90_pulses1 = {}
RX_pulses = {}
RX90_pulses2 = {}
sequence = PulseSequence()
for qubit in qubits:
RX90_pulses1[qubit] = platform.create_RX90_pulse(qubit, start=0)
RX_pulses[qubit] = platform.create_RX_pulse(
qubit, start=RX90_pulses1[qubit].finish
)
RX90_pulses2[qubit] = platform.create_RX90_pulse(
qubit, start=RX_pulses[qubit].finish
)
ro_pulses[qubit] = platform.create_qubit_readout_pulse(
qubit, start=RX90_pulses2[qubit].finish
)
sequence.add(RX90_pulses1[qubit])
sequence.add(RX_pulses[qubit])
sequence.add(RX90_pulses2[qubit])
sequence.add(ro_pulses[qubit])
# define the parameter to sweep and its range:
# delay between pulses
ro_wait_range = np.arange(
params.delay_between_pulses_start,
params.delay_between_pulses_end,
params.delay_between_pulses_step,
)
data = SpinEchoData()
# sweep the parameter
for wait in ro_wait_range:
# save data as often as defined by points
for qubit in qubits:
RX_pulses[qubit].start = RX90_pulses1[qubit].finish + wait
RX90_pulses2[qubit].start = RX_pulses[qubit].finish + wait
ro_pulses[qubit].start = RX90_pulses2[qubit].finish
# execute the pulse sequence
results = platform.execute_pulse_sequence(
sequence,
ExecutionParameters(
nshots=params.nshots,
relaxation_time=params.relaxation_time,
acquisition_type=AcquisitionType.INTEGRATION,
averaging_mode=AveragingMode.CYCLIC,
),
)
for qubit in qubits:
result = results[ro_pulses[qubit].serial]
data.register_qubit(
qubit, wait=wait, msr=result.magnitude, phase=result.phase
)
return data | [
1257
] |
def METHOD_NAME(self, equal_resolution=False, numpify=False, torchify=False):
return METHOD_NAME(
batch_size=self.batch_size,
num_channels=self.num_channels,
min_resolution=self.min_resolution,
max_resolution=self.max_resolution,
equal_resolution=equal_resolution,
numpify=numpify,
torchify=torchify,
) | [
123,
660,
1461
] |
def METHOD_NAME(self, terminator: str):
old_terminator = self.handler.terminator
self.handler.terminator = terminator
yield
self.handler.terminator = old_terminator | [
1080,
16521
] |
f METHOD_NAME(self, request, context): | [
416,
4800,
2419
] |
def METHOD_NAME(include_unk, expected):
y = ''' | [
9,
1872,
46
] |
def METHOD_NAME(task_obj):
return os.path.join(settings.DATA_ROOT, str(task_obj.id)) | [
19,
758,
2838
] |
def METHOD_NAME(self):
indices = self.get_codebook_indices()
return (
self.vars.squeeze(0)
.index_select(0, indices)
.view(self.num_vars ** self.groups, -1)
) | [
10543
] |
def METHOD_NAME(some_function) -> callable:
""" Decorator for functions (pages) that require a user to provide identification. Returns
403 (forbidden) or 401 (depending on beiwe-api-version) if the identifying info (username,
password, device ID) are invalid.
In any function wrapped with this decorator provide a parameter named "patient_id" (with the
user's id) and a parameter named "password" with an SHA256 hashed instance of the user's
password. """
@functools.wraps(some_function)
def authenticate_and_call(*args, **kwargs):
request: ParticipantRequest = args[0]
assert isinstance(request, HttpRequest), \
f"first parameter of {some_function.__name__} must be an HttpRequest, was {type(request)}."
correct_for_basic_auth(request)
if validate_post(request, require_password=True, registration=True):
return some_function(*args, **kwargs)
is_ios = kwargs.get("OS_API", None) == IOS_API
return abort(401 if is_ios else 403)
return authenticate_and_call | [
1805,
1615,
2213
] |
def METHOD_NAME() -> None:
theta_3d = create_theta_3d()
N, C, D, W, H = len(theta_3d), 3, 4, 5, 6
data_size = (D, W, H)
for align_corners in (0, 1):
node = onnx.helper.make_node(
"AffineGrid",
inputs=["theta", "size"],
outputs=["grid"],
align_corners=align_corners,
)
original_grid = construct_original_grid(data_size, align_corners)
grid = apply_affine_transform(theta_3d, original_grid)
test_name = "test_affine_grid_3d"
if align_corners == 1:
test_name += "_align_corners"
expect(
node,
inputs=[theta_3d, np.array([N, C, D, W, H], dtype=np.int64)],
outputs=[grid],
name=test_name,
) | [
294,
1529,
654,
272,
8539
] |
def METHOD_NAME(self, cmd, *args, **kwargs):
return self.node.remoter.METHOD_NAME(f'{self.sudo_needed} docker exec {self.docker_id} /bin/sh -c {shlex.quote(cmd)}', *args, **kwargs) | [
22
] |
def METHOD_NAME(self):
router = Router(dispatcher=handler_dispatcher())
@resource("/_localstack/health")
class TestResource:
def on_post(self, request):
return "POST/OK"
router.add(TestResource())
with pytest.raises(MethodNotAllowed):
assert router.dispatch(Request("GET", "/_localstack/health"))
assert router.dispatch(Request("POST", "/_localstack/health")).get_data(True) == "POST/OK" | [
9,
2506,
24,
256,
1153,
103,
45
] |
def METHOD_NAME(self, kube_apis, crd_ingress_controller, virtual_server_setup):
wait_before_test(1)
text = f"{virtual_server_setup.namespace}/{virtual_server_setup.vs_name}"
vs_event_text = f"Configuration for {text} was added or updated"
events_vs = get_events(kube_apis.v1, virtual_server_setup.namespace)
initial_count = assert_event_and_get_count(vs_event_text, events_vs)
vs_src = f"{TEST_DATA}/virtual-server-canned-responses/virtual-server-updated.yaml"
patch_virtual_server_from_yaml(
kube_apis.custom_objects, virtual_server_setup.vs_name, vs_src, virtual_server_setup.namespace
)
wait_and_assert_status_code(501, virtual_server_setup.backend_1_url, virtual_server_setup.vs_host)
resp = requests.get(virtual_server_setup.backend_1_url, headers={"host": virtual_server_setup.vs_host})
resp_content = resp.content.decode("utf-8")
assert resp.headers["content-type"] == "some/type" and resp_content == "{}"
wait_and_assert_status_code(201, virtual_server_setup.backend_2_url, virtual_server_setup.vs_host)
resp = requests.get(virtual_server_setup.backend_2_url, headers={"host": virtual_server_setup.vs_host})
resp_content = resp.content.decode("utf-8")
assert resp.headers["content-type"] == "user-type" and resp_content == "line1\nline2"
vs_events = get_events(kube_apis.v1, virtual_server_setup.namespace)
assert_event_count_increased(vs_event_text, initial_count, vs_events) | [
9,
86
] |
def METHOD_NAME(self):
"""Allows to get u_svd output of the operator
Returns
----------
my_u_svd : FieldsContainer
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.svd()
>>> # Connect inputs : op.inputs. ...
>>> result_u_svd = op.outputs.u_svd()
""" # noqa: E501
return self._u_svd | [
3597,
7505
] |
def METHOD_NAME(self):
"""Returns the end value of range (or None if empty)."""
if self.is_empty:
return None
return self.list[-1][1] | [
1798
] |
def METHOD_NAME():
run_unittest(TracebackCases, TracebackFormatTests) | [
9,
57
] |
def METHOD_NAME(error):
'''Returns the message to show for an error
@param error: error object or string
@returns: 2-tuple of: message string and a boolean
whether a traceback should be shown or not
'''
if isinstance(error, Error):
# An "expected" error
return error.msg, False
elif isinstance(error, EnvironmentError):
# Normal error, e.g. OSError or IOError
msg = error.strerror
if hasattr(error, 'filename') and error.filename:
msg += ': ' + error.filename
return msg, False
else:
# An unexpected error, all other Exception's
msg = _('Looks like you found a bug') # T: generic error dialog
return msg, True | [
19,
168,
169
] |
Subsets and Splits