text
stringlengths 15
7.82k
| ids
sequencelengths 1
7
|
---|---|
def METHOD_NAME(self) -> str:
"""
Resource location.
"""
return pulumi.get(self, "location") | [
708
] |
def METHOD_NAME(
cls,
point_1: Iterable[float],
point_2: Iterable[float],
point_3: Iterable[float],
label: str = "",
):
"""
Instantiate a BluemiraPlane from three points.
Parameters
----------
point_1:
First point
point_2:
Second Point
point_3:
Third point
label:
Label of the plane
"""
plane = BluemiraPlane()
plane._shape = cadapi.make_plane_from_3_points(point_1, point_2, point_3)
plane.label = label
return plane | [
280,
490,
182
] |
def METHOD_NAME(colorer, s, i):
return colorer.match_span(s, i, kind="literal1", begin="\"", end="\"") | [
15836,
9976
] |
def METHOD_NAME(self):
os.environ['BUGWARRIORRC'] = str(
self.basedir / 'example-bugwarriorrc')
load.load_config('general', False, False) | [
9,
1441,
-1
] |
def METHOD_NAME(cls, data: Dict[str, Any]) -> "SentenceTransformersDocumentEmbedder":
"""
Deserialize this component from a dictionary.
"""
return default_from_dict(cls, data) | [
280,
553
] |
def METHOD_NAME(fname):
ans = PoseGraphWrapper()
# Load pose graph
ans.pose_graph = o3d.io.read_pose_graph(fname)
# Update dicts
ans.dict_nodes, ans.dict_edges = ans._graph2dicts()
return ans | [
557
] |
def METHOD_NAME(name: str) -> str:
if name:
name = re.sub(r'[_.,\[\]\(\): ]+', ' ', name).strip().lower()
return name | [
188,
-1
] |
def METHOD_NAME(self, key):
"""Return the PSD index for `key`
Args:
key (std): Detector name or mixing matrix key.
Returns:
index (int): PSD index.
"""
return self._indices[key] | [
724
] |
def METHOD_NAME(self, key):
"""
Find scripts and force-start them
Args:
key (str): The script's key or dbref.
Returns:
nr_started (int): The number of started scripts found.
"""
scripts = ScriptDB.objects.get_all_scripts_on_obj(self.obj, key=key)
num = 0
for script in scripts:
script.METHOD_NAME()
num += 1
return num | [
447
] |
def METHOD_NAME(apps, schema_editor):
CustomPricingUnitConversion = apps.get_model(
"metering_billing", "CustomPricingUnitConversion"
)
for c in CustomPricingUnitConversion.objects.all():
c.organization = c.plan_version.organization
c.save() | [
343,
3292,
805,
1719,
24,
3411
] |
def METHOD_NAME(self):
self.assertRaises(RuntimeError, lambda: Key()(zeros(4096))) | [
9,
16765
] |
f METHOD_NAME(self): | [
9,
539,
604,
946
] |
def METHOD_NAME(self, basic_dats, basic_json):
term = "missing_term"
assert find_schema(basic_dats, term, basic_json) is None | [
9,
1038,
3108
] |
def METHOD_NAME(graph, name_prefix='', pb_graph=None, executors_it=None):
"""Visualizes an independent graph, or a graph executor."""
value_map = {}
pb_graph = pb_graph or graph_pb2.GraphDef()
if isinstance(graph, torch._C.GraphExecutorState):
visualize_graph_executor(graph, name_prefix, pb_graph,
partial(METHOD_NAME, pb_graph=pb_graph))
return pb_graph
# Set up an input node
input_node = pb_graph.node.add(op='input', name=name_prefix + 'input')
for i, value in enumerate(graph.param_node().outputs()):
value_map[value.unique()] = name_prefix + 'input:' + str(i)
visualize_rec(graph, value_map, name_prefix, pb_graph, executors_it)
# Gather all outputs
return_node = pb_graph.node.add(op='output', name=name_prefix + 'output')
for value in graph.return_node().inputs():
return_node.input.append(value_map[value.unique()])
return pb_graph | [
3701
] |
def METHOD_NAME(s):
"""Convert the characters &<>'" in string s to HTML-safe sequences.
Convert newline to <br> too."""
#: E127+1:28
return unicode((s or '').replace('&', '&')
.replace('\n', '<br>\n')) | [
-1
] |
def METHOD_NAME(self, data, ensure_copy=False):
v = self.real_zero_vector()
v.to_numpy()[:] = data
return v | [
1866,
798,
280,
2028
] |
def METHOD_NAME():
with mock.patch(
"paasta_tools.secret_providers.vault.SecretProvider.get_vault_ecosystems_for_clusters",
autospec=True,
return_value=["devc"],
), mock.patch(
"paasta_tools.secret_providers.vault.get_vault_client", autospec=True
):
return SecretProvider(
soa_dir="/nail/blah",
service_name="universe",
cluster_names=["mesosstage"],
vault_auth_method="token",
) | [
248,
444,
2275
] |
def METHOD_NAME(self) -> None:
"""Always test there are no pre-existing data, or it may break tests for nothing."""
# Pre-flight assertions
self.assertEqual(Playlist.objects.count(), 0)
self.assertEqual(PlaylistAccess.objects.count(), 0) | [
0,
1
] |
def METHOD_NAME(handle, group, par, value):
h = pyiutil.boolDataHandle(bool(value))
set_parameter(handle, group, par, h, inspect.stack()[1])
pyiutil.deleteDataHandle(h) | [
0,
863,
12799
] |
def METHOD_NAME(path):
if not os.path.isfile(path):
return ""
machine_vars = {}
with open(path) as machine_file:
for line in machine_file:
tokens = line.split('=')
if len(tokens) < 2:
continue
machine_vars[tokens[0]] = tokens[1].strip()
return machine_vars.get("onie_platform") | [
-1
] |
def METHOD_NAME(self) -> bool:
return False | [
137,
16703
] |
def METHOD_NAME(certificate_name: Optional[pulumi.Input[str]] = None,
provisioning_service_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetDpsCertificateResult]:
"""
Get the certificate from the provisioning service.
:param str certificate_name: Name of the certificate to retrieve.
:param str provisioning_service_name: Name of the provisioning service the certificate is associated with.
:param str resource_group_name: Resource group identifier.
"""
... | [
19,
6431,
1548,
146
] |
def METHOD_NAME(self, request: HttpRequest, **kwargs: Any) -> HttpResponse:
"""Runs the network request through the client's chained policies.
>>> from azure.core.rest import HttpRequest
>>> request = HttpRequest("GET", "https://www.example.org/")
<HttpRequest [GET], url: 'https://www.example.org/'>
>>> response = client._send_request(request)
<HttpResponse: 200 OK>
For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request
:param request: The network request you want to make. Required.
:type request: ~azure.core.rest.HttpRequest
:keyword bool stream: Whether the response payload will be streamed. Defaults to False.
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.rest.HttpResponse
"""
request_copy = deepcopy(request)
request_copy.url = self._client.format_url(request_copy.url)
return self._client.send_request(request_copy, **kwargs) | [
353,
377
] |
def METHOD_NAME(filename):
f = numpy.loadtxt(filename)
return f.shape[0] | [
-1
] |
f METHOD_NAME(self): | [
9,
194,
61,
2796
] |
def METHOD_NAME(points0, points1, scale=1.0, p_cutoff=0.1):
"""
Match points between two frames
Parameters
----------
points0 : array_like
Mx2 or Mx3 array of points
points1 : array_like
Mx2 or Mx3 array of points
Returns
-------
matches : array_like
Nx2 array of indices of matched points
"""
from scipy.spatial.distance import cdist
from scipy.special import erf
features0 = gen_features2(points0, 10)
features1 = gen_features2(points1, 10)
def _robust_dist(feat1, feat2):
"""
Calculate a robust distance between two feature vectors,
allowing for some points to have no matching points in the other data set.
"""
# find which features match in each set
a = np.abs((feat1[:,None]) - (feat2[None,:]))
i = np.argmin(a,axis=1, keepdims=True)
delta = np.take_along_axis(a, i, axis=1).squeeze()
f2i = feat2[i.squeeze()]
#print('delta:', delta)
#print(np.angle(feat1) - np.angle(f2i))
t = np.mod(np.angle(feat1) - np.angle(f2i), 2*np.pi)
w = 1.0/(1.0 + delta)
t = (t*w).sum()/w.sum()
d = np.abs(feat1 - f2i*np.exp(1j*np.median(t)))
#print(d)
p = 1 - erf(d/scale-1)
#print(p)
return p.sum()
# plt.figure()
# plt.subplot(211)
# plt.plot(features0, 'x')
# plt.subplot(212)
# plt.plot(features1, 'x')
# plt.figure()
# plt.plot(np.real(features0)[:2, :].T, np.imag(features0)[:2, :].T, 'x')
# plt.plot(np.real(features1)[:2, :].T, np.imag(features1)[:2, :].T, '+')
p = cdist(features0, features1, _robust_dist)
#p /= p.sum(0)[None, :]
# de-weight matches where the there is another higher probability match
#p = p*(p/p.max(1)[:,None])**2
#matches = np.zeros(len(points0), dtype=np.int)
print(p)
plt.figure()
plt.imshow(p)
plt.colorbar()
plt.title('Point correspondance matrix')
plt.xlabel('Channel 1')
plt.ylabel('Channel 0')
am = p.argmax(axis=1, keepdims=True)
score = np.take_along_axis(p, am, axis=1)
am, score = am.squeeze(), score.squeeze()
plt.figure()
plt.plot(points0[:,0], points0[:,1], '.')
for i, p in enumerate(points0):
plt.text(p[0], p[1], f'{i}', color='C0')
plt.plot(points1[:,0], points1[:,1], '.')
for i, p in enumerate(points1):
plt.text(p[0], p[1], f'{i}', color='C1', verticalalignment='top')
for i in range(len(points0)):
plt.plot([points0[i, 0], points1[am[i], 0]], [points0[i, 1], points1[am[i], 1]], 'k', lw=1)
plt.title('Matched points')
return am, score | [
590,
182
] |
def METHOD_NAME(self):
compressed_temp_file = os.path.join(self.temp_folder.name, "m31_query.xml.gz")
table = esa_hubble.query_target(name="m3", filename=compressed_temp_file)
assert 'observation_id' in table.columns | [
9,
539,
1030
] |
def METHOD_NAME(self, *args):
if LOG_LEVEL >=1:
print("ERROR - ", ' '.join(map(str, args)), file=self.out) | [
168
] |
def METHOD_NAME(objs):
# The Parent can be omitted as long as one object is orphaned
if len(objs) == 2:
InListLength = tuple((len(obj.InList)) for obj in objs)
if InListLength == (0,1):
newchild,oldchild = objs
parent = oldchild.InList[0]
elif InListLength == (1,0):
oldchild,newchild = objs
parent = oldchild.InList[0]
else:
raise ValueError("Selection ambiguous. Please select oldchild,\
newchild and parent")
elif len(objs) == 3:
if objs[2] in objs[0].InList: oldchild, newchild, parent = objs
elif objs[0] in objs[1].InList: parent, oldchild, newchild = objs
elif objs[0] in objs[2].InList: parent, newchild, oldchild = objs
elif objs[1] in objs[0].InList: oldchild, parent, newchild = objs
elif objs[1] in objs[2].InList: newchild, parent, oldchild = objs
elif objs[2] in objs[1].InList: newchild, oldchild, parent = objs
else:
raise ValueError("Cannot determine current parent-child relationship")
else:
raise ValueError("Wrong number of selected objects")
replaceobj(parent,oldchild,newchild)
parent.Document.recompute() | [
-1
] |
def METHOD_NAME(self, other):
newset = self.copy()
newset.difference_update(other)
return newset | [
614
] |
def METHOD_NAME(mock_401_returned):
with pytest.raises(KeycloakError) as raised_error:
get_token(module_params_creds)
assert str(raised_error.value) == (
'Could not obtain access token from http://keycloak.url'
'/auth/realms/master/protocol/openid-connect/token: '
'HTTP Error 401: Unauthorized'
) | [
9,
168,
2475
] |
def METHOD_NAME(io, data, name, print_name=False):
return process_rr(io, data, 'SRV', ['priority', 'weight', 'port', 'target'], name, print_name) | [
356,
4617
] |
def METHOD_NAME(signer, triples):
transactions = [
create_transaction(signer, verb, name, value)
for verb, name, value in triples
]
txn_signatures = [txn.header_signature for txn in transactions]
header = BatchHeader(
signer_public_key=signer.get_public_key().as_hex(),
transaction_ids=txn_signatures
).SerializeToString()
signature = signer.sign(header)
batch = Batch(
header=header,
transactions=transactions,
header_signature=signature)
batch_list = BatchList(batches=[batch])
return batch_list.SerializeToString() | [
129,
2277
] |
def METHOD_NAME(
self, default_org, form_data, virtwho_config, target_sat, deploy_type
):
"""Verify " hammer virt-who-config deploy & fetch"
:id: e66bf88a-bd4e-409a-91a8-bc5e005d95dd
:expectedresults:
1. Config can be created and deployed
2. Config can be created, fetch and deploy
:CaseLevel: Integration
:CaseImportance: High
"""
assert virtwho_config['status'] == 'No Report Yet'
if deploy_type == "id":
command = get_configure_command(virtwho_config['id'], default_org.name)
hypervisor_name, guest_name = deploy_configure_by_command(
command, form_data['hypervisor-type'], debug=True, org=default_org.label
)
elif deploy_type == "script":
script = target_sat.cli.VirtWhoConfig.fetch(
{'id': virtwho_config['id']}, output_format='base'
)
hypervisor_name, guest_name = deploy_configure_by_script(
script, form_data['hypervisor-type'], debug=True, org=default_org.label
)
virt_who_instance = target_sat.cli.VirtWhoConfig.info({'id': virtwho_config['id']})[
'general-information'
]['status']
assert virt_who_instance == 'OK'
hosts = [
(hypervisor_name, f'product_id={settings.virtwho.sku.vdc_physical} and type=NORMAL'),
(guest_name, f'product_id={settings.virtwho.sku.vdc_physical} and type=STACK_DERIVED'),
]
for hostname, sku in hosts:
host = target_sat.cli.Host.list({'search': hostname})[0]
subscriptions = target_sat.cli.Subscription.list(
{'organization': default_org.name, 'search': sku}
)
vdc_id = subscriptions[0]['id']
if 'type=STACK_DERIVED' in sku:
for item in subscriptions:
if hypervisor_name.lower() in item['type']:
vdc_id = item['id']
break
result = target_sat.cli.Host.subscription_attach(
{'host-id': host['id'], 'subscription-id': vdc_id}
)
assert result.strip() == 'Subscription attached to the host successfully.' | [
9,
2302,
2749,
111,
604,
147,
782
] |
def METHOD_NAME(self, data):
self._timestamp = self._parse_property(
data, "timestamp", required=True, type=int
)
self._open = self._parse_property(data, "open", required=True, type=float)
self._high = self._parse_property(data, "high", required=True, type=float)
self._low = self._parse_property(data, "low", required=True, type=float)
self._close = self._parse_property(data, "close", required=True, type=float)
self._volume = self._parse_property(data, "volume", required=True, type=float)
self._dividend = self._parse_property(
data, "dividend", required=False, type=float
)
self._stock_splits = self._parse_property(
data, "stock_splits", required=False, type=float
) | [
86
] |
def METHOD_NAME(self, message_body: _DataType | None = None, *, encode_chunked: bool = False) -> None: ... | [
974
] |
def METHOD_NAME(self):
return self.source_expressions | [
19,
1458,
7580
] |
def METHOD_NAME():
return {
"version": weblate.utils.version.GIT_VERSION,
"vcs": sorted(VCS_REGISTRY.keys()),
"formats": sorted(FILE_FORMATS.keys()),
"mt_services": sorted(MACHINERY.keys()),
"encoding": [sys.getfilesystemencoding(), sys.getdefaultencoding()],
"uid": os.getuid(),
} | [
1677
] |
f METHOD_NAME(self, input): | [
214
] |
def METHOD_NAME(path):
files = os.listdir(path)
for file in files:
if path != '.':
file=f'{path}/{file}'
if os.path.isdir(file):
METHOD_NAME(file)
elif os.path.isfile(file):
process_file(file) | [
19,
1537
] |
def METHOD_NAME(prediction, y, significance=None):
"""Calculates the number of correct predictions made by a conformal
regression model.
"""
if significance is not None:
idx = int(significance * 100 - 1)
prediction = prediction[:, :, idx]
low = y >= prediction[:, 0]
high = y <= prediction[:, 1]
correct = low * high
return y[correct].size | [
739,
293,
668
] |
def METHOD_NAME(self):
with self.assertRaises(CaikitRuntimeException) as context:
self.model_sizer.get_model_size(
model_id=random_test_id(),
local_model_path="not/a/path/to/anything",
model_type=_random_test_model_type(),
)
self.assertEqual(grpc.StatusCode.NOT_FOUND, context.exception.status_code) | [
9,
1807,
5147,
130,
622,
217,
171
] |
def METHOD_NAME(r1, r2):
transformer = _get_transformer(r1)
r2_transformer = _get_transformer(r2)
if transformer != r2_transformer:
transformer = _identity
assert transformer(read_body(r1)) == transformer(read_body(r2)) | [
2829
] |
def METHOD_NAME(self, order):
# Creates an index that undoes a sort: xs==xs[order][inverse_sort(order)]
return torch.empty_like(order).scatter_(
0, order, torch.arange(0, order.size(0), device=order.device)
) | [
3581,
266
] |
def METHOD_NAME(__x: _SupportsFloatOrIndex) -> float: ... | [
10155
] |
def METHOD_NAME():
with object_registration.custom_object_scope(
{"CustomClass": CustomClass, "custom_fn": custom_fn}
):
actual_custom_fn = keras.activations.get("custom_fn")
self.assertEqual(actual_custom_fn, custom_fn)
actual_custom_class = keras.regularizers.get("CustomClass")
self.assertEqual(actual_custom_class.__class__, CustomClass)
with object_registration.custom_object_scope(
{"CustomClass": CustomClass, "custom_fn": custom_fn}
):
actual_custom_fn = keras.activations.get("custom_fn")
self.assertEqual(actual_custom_fn, custom_fn)
actual_custom_class = keras.regularizers.get("CustomClass")
self.assertEqual(actual_custom_class.__class__, CustomClass)
checked_thread = self.checkedThread(METHOD_NAME)
checked_thread.start()
checked_thread.join() | [
250,
19,
623,
600
] |
def METHOD_NAME(self):
copy(self, "CMakeLists.txt", self.recipe_folder, self.export_sources_folder)
export_conandata_patches(self) | [
294,
505
] |
def METHOD_NAME(filename):
if filename: matplotlib.pyplot.savefig(filename, dpi=80) | [
73,
217
] |
def METHOD_NAME(self):
# Test GC-optimization of dynamically constructed tuples.
self.check_track_dynamic(tuple, False) | [
9,
3068,
2111
] |
def METHOD_NAME(opt):
'completer for legacy options'
opt = opt.replace('-', '_')
if opt == 'colorscheme':
return ('black', 'blue', 'green', 'cyan',
'red', 'magenta', 'yellow', 'white', 'normal')
opt = _legacy_map.get(opt)
if opt:
return config.complete(*opt)
return [] | [
-1
] |
def METHOD_NAME(
df: pandas.DataFrame,
labels: bool,
id_column_name: str,
label_column_name: str,
label_to_id: Mapping[str, int],
) -> Iterable[str]:
if id_column_name in df.columns:
yield id_column_name
# check value range entity IDs
assert df[id_column_name].isin(label_to_id.values()).all()
if labels:
yield label_column_name
# check value range entity labels
assert df[label_column_name].isin(label_to_id.keys()).all() | [
250,
415
] |
def METHOD_NAME(self):
parameters = {
**self.serialize_query_param(
"api-version", "2023-06-15",
required=True,
),
}
return parameters | [
539,
386
] |
async def METHOD_NAME(self, hash, params):
self.create_async_task(self.do_backup()) | [
0,
1001,
4607
] |
def METHOD_NAME(self, connection, table_name, schema=None, **kw):
pass | [
19,
2627,
1126
] |
def METHOD_NAME(self):
eval_dataset = ExternalDataset(path=os.path.join(self.temp_dir, "test_images"), dataset_type="test")
results_dict = self.fall_detector.eval(eval_dataset)
self.assertEqual(results_dict['accuracy'], 1.0,
msg="Accuracy is not 1.0.")
self.assertEqual(results_dict['sensitivity'], 1.0,
msg="Sensitivity is not 1.0.")
self.assertEqual(results_dict['specificity'], 1.0,
msg="Specificity is not 1.0.")
self.assertEqual(results_dict['detection_accuracy'], 1.0,
msg="Detection accuracy is not 1.0.")
self.assertEqual(results_dict['no_detections'], 0,
msg="Number of no detections is not 0.") | [
9,
1171
] |
def METHOD_NAME(self, base_config, site_config, expected):
"""
Verify that the footer language selector config is correct.
"""
with patch.dict('django.conf.settings.FEATURES', base_config):
with with_site_configuration_context(configuration=site_config):
assert language_api.footer_language_selector_is_enabled() == expected | [
9,
1201,
2938,
5169,
137,
1111
] |
def METHOD_NAME(self, in_path, out_path):
""" put a file from local to lxd """
super(Connection, self).METHOD_NAME(in_path, out_path)
self._display.vvv(u"PUT {0} TO {1}".format(in_path, out_path), host=self.get_option('remote_addr'))
if not os.path.isfile(to_bytes(in_path, errors='surrogate_or_strict')):
raise AnsibleFileNotFound("input path is not a file: %s" % in_path)
local_cmd = [self._lxc_cmd]
if self.get_option("project"):
local_cmd.extend(["--project", self.get_option("project")])
local_cmd.extend([
"file", "push",
in_path,
"%s:%s/%s" % (self.get_option("remote"), self.get_option("remote_addr"), out_path)
])
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd]
process = Popen(local_cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
process.communicate() | [
1276,
171
] |
def METHOD_NAME(t):
"""
args : args COMMA expr
"""
t[0] = t[1] + [t[3]] | [
2054,
335,
978
] |
def METHOD_NAME(self, ax):
return self.sps[ax].plots["connect1", "elem"].figure | [
9,
1949,
55,
17367,
18182,
9888
] |
def METHOD_NAME(self, laneIndex):
return OperatorSubView(self, laneIndex) | [
19,
2164
] |
def METHOD_NAME(self):
"""Ensure that no error is raised when the library's version is below the function's removal version."""
with patch("super_gradients.__version__", "10.1.0"): # Mocking the version to be below removal version
@deprecated(deprecated_since="3.2.0", removed_from="10.2.0", target=self.new_func)
def deprecated_func_version_below():
return
deprecated_func_version_below() | [
9,
654,
168,
1646,
3106,
281,
2234
] |
def METHOD_NAME(self):
m1 = pg.Mesh()
m2 = pg.Mesh()
self.assertTrue(m1.hash() == m2.hash())
m1.createNode([1.0, 0.0])
m2.createNode([2.0, 0.0])
self.assertFalse(m1.hash() == m2.hash())
m2.node(0).setPos([1.0, 0.0])
self.assertTrue(m1.hash() == m2.hash()) | [
9,
1161,
1949
] |
def METHOD_NAME(self, index=None):
"""Creates a cursor on the database.
Creates a cursor on the database, which traverses the keys in their
natural order. It may be associated with an index, where it traverses
the specified index's keys in their natural order.
Args:
index (str; optional) - an optional index; defaults to `None`
Returns:
(:obj:`Cursor`) - a Cursor instance
"""
raise NotImplementedError() | [
3230
] |
def METHOD_NAME(self):
env = omero.util.Environment("PATH")
env.append("PATH", os.pathsep.join(["bob", "cat"]))
env.append("PATH", os.path.join(os.getcwd(), "lib")) | [
9,
1027
] |
def METHOD_NAME(self, src_tokens, src_lengths, **kwargs):
"""
src_tokens: padded tensor (B, T, C * feat)
src_lengths: tensor of original lengths of input utterances (B,)
"""
B, T, _ = src_tokens.size()
x = src_tokens.transpose(1, 2).contiguous() # (B, feat, T) assuming C == 1
for layer_idx in range(len(self.conv_layers)):
x = self.conv_layers[layer_idx](x)
x = F.glu(x, dim=1)
x = self.dropouts[layer_idx](x)
x = x.transpose(1, 2).contiguous() # (B, T, 908)
x = self.linear_layers[0](x)
x = F.glu(x, dim=2)
x = self.dropouts[-1](x)
x = self.linear_layers[1](x)
assert x.size(0) == B
assert x.size(1) == T
encoder_out = x.transpose(0, 1) # (T, B, vocab_size)
# need to debug this -- find a simpler/elegant way in pytorch APIs
encoder_padding_mask = (
torch.arange(T).view(1, T).expand(B, -1).to(x.device)
>= src_lengths.view(B, 1).expand(-1, T)
).t() # (B x T) -> (T x B)
return {
"encoder_out": encoder_out, # (T, B, vocab_size)
"encoder_padding_mask": encoder_padding_mask, # (T, B)
} | [
76
] |
def METHOD_NAME(clrd):
clrd = clrd['CumPaidLoss']
w = cl.load_sample('clrd')['EarnedPremDIR'].latest_diagonal
bcl = cl.Chainladder().fit(cl.Development(groupby=['LOB']).fit_transform(clrd))
bbk = cl.Benktander().fit(cl.Development(groupby=['LOB']).fit_transform(clrd), sample_weight=w)
bcc = cl.CapeCod().fit(cl.Development(groupby=['LOB']).fit_transform(clrd), sample_weight=w)
a = bcl.ultimate_.iloc[:10].sum().sum()
b = bcl.predict(clrd.iloc[:10]).ultimate_.sum().sum()
assert abs(a - b) < 1e-5
a = bbk.ultimate_.iloc[:10].sum().sum()
b = bbk.predict(clrd.iloc[:10], sample_weight=w.iloc[:10]).ultimate_.sum().sum()
assert abs(a - b) < 1e-5
a = bcc.ultimate_.iloc[:10].sum().sum()
b = bcc.predict(clrd.iloc[:10], sample_weight=w.iloc[:10]).ultimate_.sum().sum()
assert abs(a - b) < 1e-5
a = bcl.ultimate_.iloc[150:153].sum().sum()
b = bcl.predict(clrd.iloc[150:153]).ultimate_.sum().sum()
assert abs(a - b) < 1e-5
a = bbk.ultimate_.iloc[150:153].sum().sum()
b = bbk.predict(clrd.iloc[150:153], sample_weight=w.iloc[150:153]).ultimate_.sum().sum()
assert abs(a - b) < 1e-5
a = bcc.ultimate_.iloc[150:153].sum().sum()
b = bcc.predict(clrd.iloc[150:153], sample_weight=w.iloc[150:153]).ultimate_.sum().sum()
assert abs(a - b) < 1e-5
a = bcl.ultimate_.iloc[150:152].sum().sum()
b = bcl.predict(clrd.iloc[150:152]).ultimate_.sum().sum()
assert abs(a - b) < 1e-5
a = bbk.ultimate_.iloc[150:152].sum().sum()
b = bbk.predict(clrd.iloc[150:152], sample_weight=w.iloc[150:152]).ultimate_.sum().sum()
assert abs(a - b) < 1e-5
a = bcc.ultimate_.iloc[150:152].sum().sum()
b = bcc.predict(clrd.iloc[150:152], sample_weight=w.iloc[150:152]).ultimate_.sum().sum()
assert abs(a - b) < 1e-5
a = bcl.ultimate_.iloc[150].sum().sum()
b = bcl.predict(clrd.iloc[150]).ultimate_.sum().sum()
assert abs(a - b) < 1e-5
a = bbk.ultimate_.iloc[150].sum().sum()
b = bbk.predict(clrd.iloc[150], sample_weight=w.iloc[150]).ultimate_.sum().sum()
assert abs(a - b) < 1e-5
a = bcc.ultimate_.iloc[150].sum().sum()
b = bcc.predict(clrd.iloc[150], sample_weight=w.iloc[150]).ultimate_.sum().sum()
assert abs(a - b) < 1e-5 | [
9,
14510,
14511
] |
def METHOD_NAME(self, train_data: EncodedDs, dev_data: EncodedDs, args: Optional[dict] = None) -> None:
"""
Fits the linear regression on some data, this refits the model entirely rather than updating it
:param train_data: Regression is fit on this
:param dev_data: This just gets concatenated to the ``train_data``
"""
self.fit(train_data, dev_data) | [
2351,
90
] |
def METHOD_NAME():
"""
Get transformation function for dataset.
""" | [
19,
1053,
667
] |
def METHOD_NAME(context, value_scope, node, origin_scope=None):
if is_big_annoying_library(context) \
or not context.inference_state.flow_analysis_enabled:
return UNSURE
first_flow_scope = get_parent_scope(node, include_flows=True)
if origin_scope is not None:
origin_flow_scopes = list(_get_flow_scopes(origin_scope))
node_flow_scopes = list(_get_flow_scopes(node))
branch_matches = True
for flow_scope in origin_flow_scopes:
if flow_scope in node_flow_scopes:
node_keyword = get_flow_branch_keyword(flow_scope, node)
origin_keyword = get_flow_branch_keyword(flow_scope, origin_scope)
branch_matches = node_keyword == origin_keyword
if flow_scope.type == 'if_stmt':
if not branch_matches:
return UNREACHABLE
elif flow_scope.type == 'try_stmt':
if not branch_matches and origin_keyword == 'else' \
and node_keyword == 'except':
return UNREACHABLE
if branch_matches:
break
# Direct parents get resolved, we filter scopes that are separate
# branches. This makes sense for autocompletion and static analysis.
# For actual Python it doesn't matter, because we're talking about
# potentially unreachable code.
# e.g. `if 0:` would cause all name lookup within the flow make
# unaccessible. This is not a "problem" in Python, because the code is
# never called. In Jedi though, we still want to infer types.
while origin_scope is not None:
if first_flow_scope == origin_scope and branch_matches:
return REACHABLE
origin_scope = origin_scope.parent
return _break_check(context, value_scope, first_flow_scope, node) | [
-1,
250
] |
def METHOD_NAME(instrument, elasticapm_client, mc_conn):
elasticapm_client.begin_transaction("transaction.test")
with capture_span("test_pymemcache", "test"):
mc_conn.set("mykey", "a")
assert b"a" == mc_conn.get("mykey")
assert {"mykey": b"a"} == mc_conn.get_many(["mykey", "myotherkey"])
elasticapm_client.end_transaction("BillingView")
transactions = elasticapm_client.events[TRANSACTION]
spans = elasticapm_client.spans_for_transaction(transactions[0])
expected_signatures = {
"test_pymemcache",
"PooledClient.set",
"PooledClient.get",
"PooledClient.get_many",
}
assert {t["name"] for t in spans} == expected_signatures
assert len(spans) == 4 | [
9,
14385,
5415,
340
] |
def METHOD_NAME(
circuit: cirq.Circuit, qubits: Optional[Sequence[cirq.Qid]] = None
) -> np.ndarray:
"""Given a circuit contract a tensor network into a dense unitary
of the circuit."""
if qubits is None:
qubits = sorted(circuit.all_qubits())
tensors, qubit_frontier, _ = circuit_to_tensors(
circuit=circuit, qubits=qubits, initial_state=None
)
tn = qtn.TensorNetwork(tensors)
i_inds = tuple(f'i0_q{q}' for q in qubits)
f_inds = tuple(f'i{qubit_frontier[q]}_q{q}' for q in qubits)
tn.contract(inplace=True)
return tn.to_dense(f_inds, i_inds) | [
768,
7544
] |
def METHOD_NAME(one):
"""Convert CTF kind to MNE kind."""
if one["ctfkind"] == int("47314252", 16):
one["kind"] = 1
elif one["ctfkind"] == int("47324252", 16):
one["kind"] = 2
elif one["ctfkind"] == int("47334252", 16):
one["kind"] = 3
else:
one["kind"] = int(one["ctfkind"]) | [
238,
1253
] |
def METHOD_NAME(self, num_samples: int = 1, **kwargs) -> Union[StateVector, StateVectors]:
"""
Must be implemented to properly inherit the parent Model.
"""
return None | [
10529
] |
def METHOD_NAME():
# insure that we at least have an X display before continuing.
import os
try:
display = os.environ['DISPLAY']
except:
raise unittest.SkipTest, "No $DISPLAY -- skipping gl test"
# touch all the attributes of gl without doing anything
if verbose:
print 'Touching gl module attributes...'
for attr in glattrs:
if verbose:
print 'touching: ', attr
getattr(gl, attr)
# create a small 'Crisscross' window
if verbose:
print 'Creating a small "CrissCross" window...'
print 'foreground'
gl.foreground()
if verbose:
print 'prefposition'
gl.prefposition(500, 900, 500, 900)
if verbose:
print 'winopen "CrissCross"'
w = gl.winopen('CrissCross')
if verbose:
print 'clear'
gl.clear()
if verbose:
print 'ortho2'
gl.ortho2(0.0, 400.0, 0.0, 400.0)
if verbose:
print 'color WHITE'
gl.color(GL.WHITE)
if verbose:
print 'color RED'
gl.color(GL.RED)
if verbose:
print 'bgnline'
gl.bgnline()
if verbose:
print 'v2f'
gl.v2f(0.0, 0.0)
gl.v2f(400.0, 400.0)
if verbose:
print 'endline'
gl.endline()
if verbose:
print 'bgnline'
gl.bgnline()
if verbose:
print 'v2i'
gl.v2i(400, 0)
gl.v2i(0, 400)
if verbose:
print 'endline'
gl.endline()
if verbose:
print 'Displaying window for 2 seconds...'
time.sleep(2)
if verbose:
print 'winclose'
gl.winclose(w) | [
9,
57
] |
def METHOD_NAME(poly_case):
facade = LibresFacade(poly_case)
args = Namespace(realizations=None)
assert (
model_factory._realizations(args, facade.get_ensemble_size())
== [True] * facade.get_ensemble_size()
) | [
9,
235,
5071
] |
def METHOD_NAME(self):
"""Check if the service property is set."""
backend = self.backend
with self.subTest(backend=backend.name):
self.assertIsInstance(backend.service, QiskitRuntimeService) | [
9,
3127,
549
] |
def METHOD_NAME(ngkpt):
"""
This function constructs the input file for the GS calculation:
"""
structure = dict(
angdeg= 3*[60.0],
acell=3* [ 7.1992351952],
natom=2,
ntypat=2,
typat=[1, 2],
znucl=[31, 15],
xred=[
0.0000000000, 0.0000000000, 0.0000000000,
0.2500000000, 0.2500000000, 0.2500000000,
])
#pseudos = ["Ga.psp8", "P.psp8"]
pseudos = abidata.pseudos("Ga.oncvpsp", "P.psp8")
gs_inp = abilab.AbinitInput(structure, pseudos=pseudos)
gs_inp.set_vars(
nband=8,
#nband=13,
ecut=30.0,
#ecut=24.0,
ngkpt=ngkpt,
nshiftk=1,
shiftk=[0, 0, 0],
tolvrs=1.0e-10,
nstep=150,
)
return gs_inp | [
93,
9868,
362
] |
def METHOD_NAME(test_case):
with test_case.assertRaises(RuntimeError) as context:
ref = flow.tensor([[1], [2]])
value = flow.tensor([[1], [2]])
start = [1]
stop = [-1]
step = [1]
flow._C.slice_update(ref, value, start, stop, step)
test_case.assertTrue(
"The stop list elements must be greater than or equal to 0"
in str(context.exception)
) | [
9,
55,
86,
631,
245,
3451
] |
def METHOD_NAME(Name, Credential, ConnectionCredential, Ensure):
found = IsUserPresent(Name, ConnectionCredential)
if (found and Ensure == 'Present') or ( not found and Ensure == 'Absent'):
return True
return False | [
9
] |
def METHOD_NAME(self) -> str:
"""Get architecture for staging packages.
Prior to core20, staging packages has broken behavior in that it will
stage native architecture packages by default.
:return: The appropriate default architecture to stage.
"""
if self._get_build_base() == "core18":
return self.deb_arch
else:
return self.target_arch | [
19,
3164,
2975,
1030,
2837
] |
def METHOD_NAME(self, filter, page, **options):
"""
common_flow 资源没有属性,返回空
"""
return ListResult(results=[], count=0) | [
245,
864,
99
] |
def METHOD_NAME(
client, document, acq_order_line_fiction_martigny):
"""Test can delete a document with a linked acquisition order line."""
can, reasons = document.can_delete
assert not can
assert reasons['links']['acq_order_lines'] | [
9,
12154,
852,
513,
352,
1046,
34
] |
def METHOD_NAME(self):
kernel_url = ('http://security.debian.org/'
'debian-security/pool/updates/main/l/linux-signed-arm64/'
'linux-image-4.19.0-12-arm64_4.19.152-1_arm64.deb')
kernel_sha1 = '2036c2792f80ac9c4ccaae742b2e0a28385b6010'
kernel_deb = self.fetch_asset(kernel_url, asset_hash=kernel_sha1)
kernel_path = self.extract_from_deb(kernel_deb,
"/boot/vmlinuz-4.19.0-12-arm64")
return kernel_path | [
1383,
2843,
1885
] |
def METHOD_NAME(self) -> t.Tuple[Path, str]:
raise NotImplementedError | [
1363
] |
def METHOD_NAME() -> str:
return str(uuid.uuid4())[0:8] | [
1707,
3668
] |
def METHOD_NAME(cls, dikt) -> 'RetrievalSource':
"""Returns the dict as a model
:param dikt: A dict.
:type: dict
:return: The RetrievalSource of this RetrievalSource. # noqa: E501
:rtype: RetrievalSource
"""
return util.deserialize_model(dikt, cls) | [
280,
553
] |
def METHOD_NAME(self):
# Arrange
self.wma.update_raw(1.0)
self.wma.update_raw(2.0)
self.wma.update_raw(3.0)
self.wma.update_raw(4.0)
self.wma.update_raw(5.0)
self.wma.update_raw(6.0)
self.wma.update_raw(7.0)
self.wma.update_raw(8.0)
self.wma.update_raw(9.0)
self.wma.update_raw(10.0)
self.wma.update_raw(11.0)
# Act, Assert
assert self.wma.value == 8.0 | [
9,
99,
1541,
610,
391,
99
] |
def METHOD_NAME(self):
"""Parsing House vote"""
"""This will run the parser over any *house.pdf in the testData folder
If vote is in the voteKey, it will also do an additional check,
otherwise the only test being done is the sanity check in the parser"""
# print vote on a known good result to generate this easily
voteKey = {
"2017_vote_senate.pdf": {
"other_count": 1,
"_type": "vote",
"chamber": "upper",
"yes_count": 39,
"yes_votes": [
"CANDELARIA",
"PINTO",
"LOPEZ",
"SOULES",
"BACA",
"ORTIZ y PINO",
"INGLE",
"SAPIEN",
"CERVANTES",
"PIRTLE",
"MU\xd1OZ",
"WHITE",
"MARTINEZ",
"STEFANICS",
"PADILLA",
"NEVILLE",
"WIRTH",
"IVEY-SOTO",
"SHARER",
"CISNEROS",
"RODRIGUEZ",
"McSORLEY",
"STEINBORN",
"BURT",
"PAPEN",
"KERNAN",
"SHENDO",
"O'NEILL",
"GOULD",
"RUE",
"STEWART",
"CAMPOS",
"PAYNE",
"LEAVELL",
"SMITH",
"GRIGGS",
"SANCHEZ",
"MORALES",
"TALLMAN",
],
"other_votes": ["MOORES"],
"motion": "senate passage",
"sources": [{"url": "2017_vote_senate.pdf"}],
"date": datetime.datetime(2017, 1, 18, 0, 0),
"type": "other",
"no_count": 2,
"no_votes": ["BRANDT", "WOODS"],
"passed": True,
}
}
testData = os.path.join(here, "testData")
pdfs = [f for f in os.listdir(testData) if "house" in f and ".pdf" in f]
for pdf in pdfs:
shutil.copy(os.path.join(testData, pdf), os.path.join(self.tmp, pdf))
hv_text = self.nmBillScraper.scrape_vote(os.path.join(self.tmp, pdf), True)
vote = self.nmBillScraper.parse_house_vote(hv_text, pdf)
ok_(vote, "Vote returned empty or did not complete")
if pdf in voteKey:
eq_(
vote,
voteKey[pdf],
"%s Vote results did not match up with redefined key" % pdf,
) | [
9,
10482,
4078
] |
def METHOD_NAME(vif):
try:
delcmd = "/sbin/ebtables -t nat -L PREROUTING | grep " + vif
delcmds = pluginlib.do_cmd(['/bin/bash', '-c', delcmd]).split('\n')
for cmd in delcmds:
try:
cmd = '/sbin/ebtables -t nat -D PREROUTING ' + cmd
pluginlib.do_cmd(['/bin/bash', '-c', cmd])
except:
pass
except:
pass | [
537,
1634
] |
f METHOD_NAME(self): | [
19,
1094,
5133
] |
def METHOD_NAME(self):
""" Add the module pins """
for i in range(self.num_inputs):
self.add_pin("in_{0}".format(i), "INPUT")
for j in range(self.num_outputs):
self.add_pin("out_{0}".format(j), "OUTPUT")
self.add_pin("vdd", "POWER")
self.add_pin("gnd", "GROUND") | [
238,
3783
] |
def METHOD_NAME(self):
"""
the derivative is a polynomial as well
"""
# 2 things are happening here
# (*) we use the count() iterator; this never terminates
# except that it is embedded in a zip() that will
# terminate when iterating over our own coefficients expires
# (*) here again observe the use of a splat operator
derived_coefs = (n * c for (n, c) in zip(
count(1),
self.coefs[1:]
))
return Polynomial(*derived_coefs) | [
7996
] |
def METHOD_NAME(
self,
resource_group_name, # type: str
network_interface_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.NetworkInterfaceLoadBalancerListResult"]
"""List all load balancers in a network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkInterfaceLoadBalancerListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2018_08_01.models.NetworkInterfaceLoadBalancerListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterfaceLoadBalancerListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-08-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.METHOD_NAME.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkInterfaceLoadBalancerListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
) | [
245
] |
def METHOD_NAME(self):
# set up temporary directory to be used for temp files.
self._tempdirName = tempfile.mkdtemp(prefix="key_tmp_")
self.key = paramiko.RSAKey.generate(2048)
keyOutput = io.StringIO()
self.key.write_private_key(keyOutput)
self.private_key = keyOutput.getvalue()
self.public_key = '{} {}'.format(self.key.get_name(), self.key.get_base64()) | [
0,
1
] |
def METHOD_NAME(bad_uprns):
addresses = Address.objects.filter(pk__in=bad_uprns)
for address in addresses:
print(address.uprn)
address.delete()
print("..deleted")
print("removing bad uprns from uprn lookup")
uprns = UprnToCouncil.objects.filter(pk__in=bad_uprns)
for uprn in uprns:
print(uprn.pk)
uprn.delete()
print(".. deleted") | [
188,
182,
280,
6398
] |
def METHOD_NAME(self):
expanded_method = getattr(self.instance, "fn2(1)")
self.assertEqual(1, expanded_method()) | [
9,
1066,
103
] |
f METHOD_NAME(self, args, stdout_mock, msg): | [
638,
707,
216,
41
] |
def METHOD_NAME(self) -> _Reply: ... | [
16930
] |
def METHOD_NAME(
spc: np.ndarray,
n_fft: int,
n_shift: int,
win_length: int = None,
window: Optional[str] = "hann",
n_iter: Optional[int] = 32,
) -> np.ndarray:
"""Convert linear spectrogram into waveform using Griffin-Lim.
Args:
spc: Linear spectrogram (T, n_fft // 2 + 1).
n_fft: The number of FFT points.
n_shift: Shift size in points.
win_length: Window length in points.
window: Window function type.
n_iter: The number of iterations.
Returns:
Reconstructed waveform (N,).
"""
# assert the size of input linear spectrogram
assert spc.shape[1] == n_fft // 2 + 1
if LooseVersion(librosa.__version__) >= LooseVersion("0.7.0"):
# use librosa's fast Grriffin-Lim algorithm
spc = np.abs(spc.T)
y = librosa.griffinlim(
S=spc,
n_iter=n_iter,
hop_length=n_shift,
win_length=win_length,
window=window,
center=True if spc.shape[1] > 1 else False,
)
else:
# use slower version of Grriffin-Lim algorithm
logging.warning(
"librosa version is old. use slow version of Grriffin-Lim algorithm."
"if you want to use fast Griffin-Lim, please update librosa via "
"`source ./path.sh && pip install librosa==0.7.0`."
)
cspc = np.abs(spc).astype(np.complex).T
angles = np.exp(2j * np.pi * np.random.rand(*cspc.shape))
y = librosa.istft(cspc * angles, n_shift, win_length, window=window)
for i in range(n_iter):
angles = np.exp(
1j
* np.angle(librosa.stft(y, n_fft, n_shift, win_length, window=window))
)
y = librosa.istft(cspc * angles, n_shift, win_length, window=window)
return y | [
7463,
7464
] |
def METHOD_NAME(self):
return self.client.format_url(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityPerimeters/{networkSecurityPerimeterName}/resourceAssociations/{associationName}",
**self.url_parameters
) | [
274
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.