text
stringlengths 15
7.82k
| ids
sequencelengths 1
7
|
---|---|
def METHOD_NAME(self):
order = 25
ppp = 10
curves, currents, ma = get_ncsx_data(Nt_coils=order, ppp=ppp)
with ScratchDir("."):
coils_to_makegrid("coils.file_to_load", curves, currents, nfp=1)
loaded_coils = load_coils_from_makegrid_file("coils.file_to_load", order, ppp)
gamma = [curve.gamma() for curve in curves]
loaded_gamma = [coil.curve.gamma() for coil in loaded_coils]
loaded_currents = [coil.current for coil in loaded_coils]
coils = [Coil(curve, current) for curve, current in zip(curves, currents)]
for j_coil in range(len(coils)):
np.testing.assert_allclose(
currents[j_coil].get_value(),
loaded_currents[j_coil].get_value()
)
np.testing.assert_allclose(curves[j_coil].x, loaded_coils[j_coil].curve.x)
np.random.seed(1)
bs = BiotSavart(coils)
loaded_bs = BiotSavart(loaded_coils)
points = np.asarray(17 * [[0.9, 0.4, -0.85]])
points += 0.01 * (np.random.rand(*points.shape) - 0.5)
bs.set_points(points)
loaded_bs.set_points(points)
B = bs.B()
loaded_B = loaded_bs.B()
np.testing.assert_allclose(B, loaded_B)
np.testing.assert_allclose(gamma, loaded_gamma) | [
9,
557,
4561,
280,
-1,
171
] |
def METHOD_NAME(self):
return self.frequency | [
19,
3831
] |
def METHOD_NAME():
property = default_image_properties[0].copy()
property.pop('method')
msg = r"List of properties contains next problems:\n\+ Item #0: property must be of type dict, " \
r"and include keys \('name', 'method', 'output_type'\)."
assert_that(calling(validate_properties).with_args([property]), raises(DeepchecksValueError, msg)) | [
9,
187,
748,
41,
3534,
1042,
553
] |
def METHOD_NAME(
container_from_file: t.Callable[[str], ConfigDictType], version: int | None
):
OVERRIDE_RUNNERS = f"""\ | [
9,
10665,
830,
1102,
345
] |
def METHOD_NAME(src, dst):
"""The default progress/filter callback; returns True for all files"""
return dst | [
235,
527
] |
def METHOD_NAME(self, data, t_range=[]):
self.apm.transTimebase() | [
356
] |
f METHOD_NAME(self, mock_popen): | [
9,
-1,
485,
1659
] |
def METHOD_NAME(self):
master_seed(seed=301)
self.backdoor_path = os.path.join(
os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__)))),
"utils",
"data",
"backdoors",
"alert.png",
)
super().METHOD_NAME() | [
0,
1
] |
def METHOD_NAME(metafile_path):
"""Load meta data file created by compute_attention_masks.py"""
with open(metafile_path, "r", encoding="utf-8") as f:
lines = f.readlines()
meta_data = []
for line in lines:
wav_file, attn_file = line.split("|")
meta_data.append([wav_file, attn_file])
return meta_data | [
557,
3998,
361,
1094,
365
] |
def METHOD_NAME() -> int:
"""
Obtain the organization id from the auth that is logged in.
Returns
-------
int
Organization id
"""
if g.user:
return g.user.organization.id
elif g.node:
return g.node.organization.id
else:
return g.container["organization_id"] | [
11591,
1044,
147
] |
def METHOD_NAME(git_modules_dir) -> str:
"""
+--------+-------------+----------+
| Name | Requires | Versions |
+--------+-------------+----------+
| std | | 0.0.1 |
+--------+-------------+----------+
| | | 3.2 |
+--------+-------------+----------+
| mod1 | mod3 ~= 0.1 | 0.0.1 |
+--------+-------------+----------+
| | mod3 ~= 0.1 | 3.2 |
+--------+-------------+----------+
| mod2 | | 0.0.1 |
+--------+-------------+----------+
| | | 2016.1 |
+--------+-------------+----------+
| mod3 | | 0.0.1 |
+--------+-------------+----------+
| | | 0.1 |
+--------+-------------+----------+
| badmod | mod2 < 2016 | 0.0.1 |
+--------+-------------+----------+
| | | 0.1 |
+--------+-------------+----------+
| mod5 | | 0.0.1 |
+--------+-------------+----------+
| | | 0.1 |
+--------+-------------+----------+
| mod6 | | 0.0.1 |
+--------+-------------+----------+
| | | 3.2 |
+--------+-------------+----------+
| mod7 | | 0.0.1 |
+--------+-------------+----------+
| | | 3.2 |
+--------+-------------+----------+
| | | 3.2.1 |
+--------+-------------+----------+
| | | 3.2.2 |
+--------+-------------+----------+
| | | 4.0 |
+--------+-------------+----------+
| | | 4.2 |
+--------+-------------+----------+
| | | 4.3 |
+--------+-------------+----------+
| mod8 | | 0.0.1 |
+--------+-------------+----------+
| | | 3.2 |
+--------+-------------+----------+
| | | 3.3.dev |
+--------+-------------+----------+
| mod12 | | 3.2.1 |
+--------+-------------+----------+
| | | 4.0.0.dev0 |
+--------+-------------+----------+
| | | 4.0.0 |
+--------+-------------+----------+"""
tempdir = git_modules_dir
reporoot = os.path.join(tempdir, "repos")
os.makedirs(reporoot)
make_module_simple(reporoot, "std")
make_module_simple(reporoot, "mod1", depends=[("mod3", "~=0.1")])
make_module_simple(reporoot, "mod2", version="2016.1")
mod3 = make_module_simple(reporoot, "mod3", version="0.1")
add_file(mod3, "badsignal", "present", "third commit")
mod4 = make_module_simple(reporoot, "badmod", [("mod2", "<2016")])
add_file(mod4, "badsignal", "present", "third commit")
mod5 = make_module_simple(reporoot, "mod5", version="0.1")
add_file(mod5, "badsignal", "present", "third commit")
mod6 = make_module_simple(reporoot, "mod6")
add_file(mod6, "badsignal", "present", "third commit")
mod7 = make_module_simple(reporoot, "mod7")
add_file(mod7, "nsignal", "present", "third commit", version="3.2.1")
add_file(mod7, "signal", "present", "fourth commit", version="3.2.2")
add_file_and_compiler_constraint(mod7, "badsignal", "present", "fifth commit", version="4.0", compiler_version="1000000.4")
add_file(mod7, "badsignal", "present", "sixth commit", version="4.1")
add_file_and_compiler_constraint(mod7, "badsignal", "present", "fifth commit", version="4.2", compiler_version="1000000.5")
add_file(mod7, "badsignal", "present", "sixth commit", version="4.3")
mod8 = make_module_simple(reporoot, "mod8", [])
add_file(mod8, "devsignal", "present", "third commit", version="3.3.dev2")
add_file(mod8, "mastersignal", "present", "last commit")
mod11 = make_module_simple(reporoot, "mod11")
add_file(mod11, "file", "test", "release version 3.2.1", version="3.2.1")
add_file(mod11, "file", "test", "release version 4.0.0", version="4.0.0")
add_file(mod11, "file", "test", "release version 4.1.0", version="4.1.0")
add_file(mod11, "file", "test", "release version 4.1.2", version="4.1.2")
add_file(mod11, "file", "test", "release version 4.2.0", version="4.2.0")
mod12 = make_module_simple(reporoot, "mod12", version="3.2.1")
add_file(mod12, "file", "test", "release version 4.0.0.dev0", version="4.0.0.dev0")
add_file(mod12, "file", "test", "release version 4.0.0", version="4.0.0")
proj = makemodule(
reporoot, "testproject", [("mod1", None), ("mod2", ">2016"), ("mod5", None)], True, ["mod1", "mod2", "mod6", "mod7"]
)
# results in loading of 1,2,3,6
commitmodule(proj, "first commit")
badproject = makemodule(reporoot, "badproject", [("mod15", None)], True)
commitmodule(badproject, "first commit")
baddep = makemodule(reporoot, "baddep", [("badmod", None), ("mod2", ">2016")], True)
commitmodule(baddep, "first commit")
devproject = makeproject(reporoot, "devproject", imports=["mod8"], install_mode=InstallMode.prerelease)
commitmodule(devproject, "first commit")
masterproject = makeproject(reporoot, "masterproject", imports=["mod8"], install_mode=InstallMode.master)
commitmodule(masterproject, "first commit")
masterproject_multi_mod = makeproject(
reporoot, "masterproject_multi_mod", imports=["mod2", "mod8"], install_mode=InstallMode.master
)
commitmodule(masterproject_multi_mod, "first commit")
nover = makemodule(reporoot, "nover", [])
commitmodule(nover, "first commit")
add_file(nover, "signal", "present", "second commit")
noverproject = makeproject(reporoot, "noverproject", imports=["nover"])
commitmodule(noverproject, "first commit")
"""
for freeze, test from C
A-> B,C,D
C-> E,F,E::a
C::a -> I
E::a -> J
E-> H
D-> F,G
"""
make_module_simple_deps(reporoot, "A", ["B", "C", "D"], project=True)
make_module_simple_deps(reporoot, "B")
c = make_module_simple_deps(reporoot, "C", ["E", "F", "E::a"], version="3.0")
add_file(c, "model/a.cf", "import modI", "add mod C::a", "3.2")
make_module_simple_deps(reporoot, "D", ["F", "G"])
e = make_module_simple_deps(reporoot, "E", ["H"], version="3.0")
add_file(e, "model/a.cf", "import modJ", "add mod E::a", "3.2")
make_module_simple_deps(reporoot, "F")
make_module_simple_deps(reporoot, "G")
make_module_simple_deps(reporoot, "H")
make_module_simple_deps(reporoot, "I")
make_module_simple_deps(reporoot, "J")
return reporoot | [
468,
522
] |
def METHOD_NAME(services, municipalities):
METHOD_NAME = []
max_unit_count = 5
index = 1
unit_id = 0
distinct_service_muni_counts = set()
unit_names = set()
for municipality in municipalities:
for service in services:
if index % max_unit_count > 0:
distinct_service_muni_counts.add((service.id, municipality.id))
for i in range(0, index % max_unit_count):
name = "unit_s{}_m{}_{}".format(service.id, municipality.id, i)
unit = Unit.objects.create(
id=unit_id,
municipality=municipality,
last_modified_time=now(),
name=name,
)
unit_names.add(name)
unit.save()
usd = UnitServiceDetails.objects.create(unit=unit, service=service)
usd.save()
METHOD_NAME.append(unit)
unit_id += 1
index += 1
unit_name = "unit_s0_special_case_no_muni"
unit = Unit.objects.create(
id=500000, municipality=None, last_modified_time=now(), name=unit_name
)
unit_names.add(unit_name)
usd = UnitServiceDetails.objects.create(unit=unit, service=services[0])
usd.save()
METHOD_NAME.append(unit)
# Currently generates the following units
assert unit_names == set(
[
"unit_s0_mhelsinki_0",
"unit_s0_mvantaa_0",
"unit_s1_mhelsinki_0",
"unit_s1_mhelsinki_1",
"unit_s1_mvantaa_0",
"unit_s1_mvantaa_1",
"unit_s2_mhelsinki_0",
"unit_s2_mhelsinki_1",
"unit_s2_mhelsinki_2",
"unit_s2_mvantaa_0",
"unit_s2_mvantaa_1",
"unit_s2_mvantaa_2",
"unit_s3_mhelsinki_0",
"unit_s3_mhelsinki_1",
"unit_s3_mhelsinki_2",
"unit_s3_mhelsinki_3",
"unit_s3_mvantaa_0",
"unit_s3_mvantaa_1",
"unit_s3_mvantaa_2",
"unit_s3_mvantaa_3",
"unit_s0_special_case_no_muni",
]
)
return {"units": METHOD_NAME, "count_rows": len(distinct_service_muni_counts) + 1} | [
1878
] |
def METHOD_NAME(self, soup: BeautifulSoup) -> str:
# The soup here is the result of `self.get_soup(self.novel_url)`
pass | [
214,
2893
] |
def METHOD_NAME(self, key: str, *, suppress=False) -> None:
"""
Delete the given key from the config.
Implements :meth:`AbstractConfig.delete`.
"""
if self.config is None:
raise ValueError('attempt to use a closed config')
self.config.remove_option(self.SECTION, key)
self.save() | [
34
] |
def METHOD_NAME(model, metric, data_loader):
model.eval()
metric.reset()
for input_ids, seg_ids, lens, labels in data_loader:
logits = model(input_ids, seg_ids)
preds = paddle.argmax(logits, axis=-1)
n_infer, n_label, n_correct = metric.compute(lens, preds, labels)
metric.update(n_infer.numpy(), n_label.numpy(), n_correct.numpy())
precision, recall, f1_score = metric.accumulate()
print("[EVAL] Precision: %f - Recall: %f - F1: %f" % (precision, recall, f1_score))
model.train() | [
1195
] |
def METHOD_NAME(next_link=None):
if not next_link:
request = build_list_request(
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request | [
123,
377
] |
def METHOD_NAME() -> None:
rec = types.Reconstruction()
cam = pygeometry.Camera.create_perspective(0.5, 0, 0)
cam.id = "cam1"
rec.add_camera(cam)
_add_shot(rec, "im0", cam)
for i in range(1, 4):
p, n = "im" + str(i - 1), "im" + str(i)
_add_shot(rec, n, cam)
_add_point(rec, str(i), [p, n])
interior, boundary = reconstruction.shot_neighborhood(
rec, "im2", radius=1, min_common_points=1, max_interior_size=10
)
assert interior == {"im2"}
assert boundary == {"im1", "im3"}
interior, boundary = reconstruction.shot_neighborhood(
rec, "im2", radius=2, min_common_points=1, max_interior_size=10
)
assert interior == {"im1", "im2", "im3"}
assert boundary == {"im0"}
interior, boundary = reconstruction.shot_neighborhood(
rec, "im2", radius=3, min_common_points=1, max_interior_size=10
)
assert interior == {"im0", "im1", "im2", "im3"}
assert boundary == set()
interior, boundary = reconstruction.shot_neighborhood(
rec, "im2", radius=3, min_common_points=1, max_interior_size=3
)
assert interior == {"im1", "im2", "im3"}
assert boundary == {"im0"} | [
9,
10729,
15337,
1783,
303
] |
def METHOD_NAME(self):
"""Acquire a JSON Web Token."""
log.debug('Acquiring a TVDB JWT')
if not self.api_key:
raise AuthError('Missing API key')
response = requests.post(
urljoin(self.api_base, 'login'),
json=self.authorization,
verify=False,
)
try:
self._get_token(response)
finally:
return response | [
273
] |
def METHOD_NAME():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('speakers_sessions_version',
sa.Column('speaker_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('session_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('transaction_id', sa.BIGINT(), autoincrement=False, nullable=False),
sa.Column('operation_type', sa.SMALLINT(), autoincrement=False, nullable=False),
sa.PrimaryKeyConstraint('transaction_id', name='speakers_sessions_version_pkey')
)
op.create_index('ix_speakers_sessions_version_transaction_id', 'speakers_sessions_version', ['transaction_id'], unique=False)
op.create_index('ix_speakers_sessions_version_operation_type', 'speakers_sessions_version', ['operation_type'], unique=False)
op.create_table('sessions_version',
sa.Column('id', sa.INTEGER(), autoincrement=False, nullable=False),
sa.Column('title', sa.VARCHAR(), autoincrement=False, nullable=True),
sa.Column('subtitle', sa.VARCHAR(), autoincrement=False, nullable=True),
sa.Column('short_abstract', sa.TEXT(), autoincrement=False, nullable=True),
sa.Column('long_abstract', sa.TEXT(), autoincrement=False, nullable=True),
sa.Column('comments', sa.TEXT(), autoincrement=False, nullable=True),
sa.Column('starts_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True),
sa.Column('ends_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True),
sa.Column('track_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('language', sa.VARCHAR(), autoincrement=False, nullable=True),
sa.Column('microlocation_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('session_type_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('slides_url', sa.VARCHAR(), autoincrement=False, nullable=True),
sa.Column('video_url', sa.VARCHAR(), autoincrement=False, nullable=True),
sa.Column('audio_url', sa.VARCHAR(), autoincrement=False, nullable=True),
sa.Column('signup_url', sa.VARCHAR(), autoincrement=False, nullable=True),
sa.Column('event_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('state', sa.VARCHAR(), autoincrement=False, nullable=True),
sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True),
sa.Column('deleted_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True),
sa.Column('submitted_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True),
sa.Column('submission_modifier', sa.VARCHAR(), autoincrement=False, nullable=True),
sa.Column('is_mail_sent', sa.BOOLEAN(), autoincrement=False, nullable=True),
sa.Column('transaction_id', sa.BIGINT(), autoincrement=False, nullable=False),
sa.Column('operation_type', sa.SMALLINT(), autoincrement=False, nullable=False),
sa.Column('level', sa.VARCHAR(), autoincrement=False, nullable=True),
sa.Column('creator_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('last_modified_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True),
sa.Column('send_email', sa.BOOLEAN(), autoincrement=False, nullable=True),
sa.Column('is_locked', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=False),
sa.Column('complex_field_values', postgresql.JSON(astext_type=sa.Text()), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id', 'transaction_id', name='session_version_pkey')
)
op.create_index('ix_sessions_version_transaction_id', 'sessions_version', ['transaction_id'], unique=False)
op.create_index('ix_sessions_version_operation_type', 'sessions_version', ['operation_type'], unique=False)
# ### end Alembic commands ### | [
1502
] |
def METHOD_NAME(sender, task_id, **kwargs):
# When a task is removed, simply remove clutter
# When a task is re-processed, make sure we can re-share it if we shared a task previously
logger.info("Cleaning up OAM datastore for task {}".format(str(task_id)))
ds.del_key(get_key_for(task_id, "info")) | [
16853,
950
] |
def METHOD_NAME(self):
assert self._send(
"print-query {\"query\": \"slash\", \"unknown\": \"<title>\"}"
) == "\n" | [
9,
532,
335
] |
def METHOD_NAME():
client = boto3.client("apigatewayv2", region_name="eu-west-1")
api = client.create_api(Name="test-api", ProtocolType="HTTP")
api_domain = client.create_domain_name(DomainName="dev.service.io")
v1_mapping = client.create_api_mapping(
DomainName=api_domain["DomainName"],
ApiMappingKey="v1/api",
Stage="$default",
ApiId=api["ApiId"],
)
del v1_mapping["ResponseMetadata"]
get_resp = client.get_api_mappings(DomainName=api_domain["DomainName"])
assert "Items" in get_resp
assert v1_mapping in get_resp.get("Items")
client.delete_api_mapping(
DomainName=api_domain["DomainName"], ApiMappingId=v1_mapping["ApiMappingId"]
)
get_resp = client.get_api_mappings(DomainName=api_domain["DomainName"])
assert "Items" in get_resp
assert v1_mapping not in get_resp.get("Items") | [
9,
34,
58,
445
] |
def METHOD_NAME(defs_str):
defs = {}
lines = defs_str.splitlines()
for line in lines:
m = DEF_RE.match(line)
if m:
defs[m.group(1)] = m.group(2)
return defs | [
297,
9045
] |
def METHOD_NAME(object, value, namedValues={}):
if isinstance(value, dict):
for key in list(value.keys()):
if isinstance(key, RegexType):
temp = None
for key2 in list(object.keys()):
# Find a key that match the Regex pattern!
match = key.match(key2)
if match is not None:
# For named-capture we compare values
# with the values available in params!
if not JsonUtils.checkDictionary(
match.groupdict(None), namedValues
):
break
temp = object.get(key2, None)
break
else:
temp = object.get(key, None)
# If temp is not None, then a match in json dictionary was
# found!
if temp is None or not JsonUtils.METHOD_NAME(
temp, value[key], namedValues
):
return False
return True
elif isinstance(value, RegexType):
# The compare is performed with a Regex object!
match = value.match(object)
if match is None or not JsonUtils.checkDictionary(
match.groupdict(None), namedValues
):
return False
return True
elif JsonUtils.compareValue(value, object):
return True
return False | [
250,
763,
199
] |
def METHOD_NAME(self) -> str:
"""
The provider-assigned unique ID for this managed resource.
"""
return pulumi.get(self, "id") | [
147
] |
def METHOD_NAME(scene):
background_rect = Rectangle(color=WHITE, fill_opacity=1).scale(2)
rgb_svg = SVGMobject(get_svg_resource("pixelated_text.svg"))
scene.add(background_rect, rgb_svg)
scene.wait() | [
9,
-1,
526
] |
f METHOD_NAME(self, responses: List[rdf_flows.GrrMessage]) -> Text: | [
19,
233,
147
] |
def METHOD_NAME(model_numel, batch_size, seq_len, step_time):
return model_numel * batch_size * seq_len * 8 / 1e12 / (step_time + 1e-12) | [
19,
-1
] |
def METHOD_NAME(self) -> Optional[str]:
"""
SecondaryKey of the created AuthorizationRule
"""
return pulumi.get(self, "secondary_key") | [
3288,
59
] |
def METHOD_NAME(self, *args):
"""Indicate whether or not to enter a case suite"""
if self.fall or not args:
return True
elif self.value in args: # changed for v1.5, see below
self.fall = True
return True
else:
return False | [
590
] |
def METHOD_NAME(context,path,media=''):
__M_caller = context.caller_stack._push_frame()
try:
context._push_buffer()
self = context.get('self', UNDEFINED)
__M_writer = context.writer()
__M_writer(u'\n')
if path not in self.seen_css:
__M_writer(u' <link rel="stylesheet" type="text/css" href="')
__M_writer(filters.html_escape(unicode(path)))
__M_writer(u'" media="')
__M_writer(unicode(media))
__M_writer(u'">\n')
__M_writer(u' ')
self.seen_css.add(path)
__M_writer(u'\n')
finally:
__M_buf, __M_writer = context._pop_buffer_and_writer()
context.caller_stack._pop_frame()
__M_writer(filters.trim(__M_buf.getvalue()))
return '' | [
338,
392,
548
] |
def METHOD_NAME(test_class, api_version, is_hsm, **kwargs):
self._skip_if_not_configured(api_version, is_hsm)
if not self.is_logging_enabled:
kwargs.update({"logging_enable": False})
endpoint_url = self.managed_hsm_url if is_hsm else self.vault_url
client = self.create_key_client(endpoint_url, api_version=api_version, **kwargs)
with client:
fn(test_class, client, is_hsm=is_hsm, managed_hsm_url=self.managed_hsm_url, vault_url=self.vault_url) | [
8983
] |
def METHOD_NAME(self):
import paho.mqtt.publish as publish
self.publish = publish
self.setup_output_variables(OUTPUT_INFORMATION)
self.output_setup = True | [
15
] |
def METHOD_NAME(lt_ctx):
data = _mk_random(size=(8, 8, 8, 8), dtype="float32")
dataset = MemoryDataSet(data=data, tileshape=(4, 8, 8),
num_partitions=2, sig_dims=2, sync_offset=-62)
test = SimpleTestByTileNegativeSyncOffsetUDF()
lt_ctx.run_udf(dataset=dataset, udf=test) | [
9,
299,
2927,
1540
] |
def METHOD_NAME(self):
prescribed_carbon_intensity = \
CarbonIntensityDeterminationType.objects.create(
display_order="1",
effective_date="2017-01-01",
the_type="Carbon Intensity"
)
provision = ProvisionOfTheAct.objects.create(
description="Prescribed carbon intensity",
display_order="1",
effective_date="2017-01-01",
expiration_date=None,
provision="Section 6 (5) (a)"
)
ApprovedFuelProvision.objects.create(
fuel=ApprovedFuel.objects.get(name="Petroleum-based gasoline"),
provision_act=provision,
determination_type=prescribed_carbon_intensity
)
provision = ProvisionOfTheAct.objects.create(
description="Prescribed carbon intensity",
display_order="2",
effective_date="2017-01-01",
expiration_date=None,
provision="Section 6 (5) (b)"
)
ApprovedFuelProvision.objects.create(
fuel=ApprovedFuel.objects.get(name="Petroleum-based diesel"),
provision_act=provision,
determination_type=prescribed_carbon_intensity
)
# other fuel types
approved_fuel_code = \
CarbonIntensityDeterminationType.objects.create(
display_order="2",
effective_date="2017-01-01",
the_type="Fuel Code"
)
fuel_types = ApprovedFuel.objects.exclude(
name__in=["Petroleum-based diesel", "Petroleum-based gasoline"]
)
# Section 6 (5) (c)
provision = ProvisionOfTheAct.objects.create(
description="Approved fuel code",
display_order="3",
effective_date="2017-01-01",
expiration_date=None,
provision="Section 6 (5) (c)"
)
obj = [
ApprovedFuelProvision(
fuel=fuel_type,
provision_act=provision,
determination_type=approved_fuel_code
) for fuel_type in fuel_types
]
ApprovedFuelProvision.objects.bulk_create(obj)
# Section 6 (5) (d) (i)
default_carbon_intensity = \
CarbonIntensityDeterminationType.objects.create(
display_order="3",
effective_date="2017-01-01",
the_type="Default Carbon Intensity"
)
provision = ProvisionOfTheAct.objects.create(
description="Default Carbon Intensity Value",
display_order="4",
effective_date="2017-01-01",
expiration_date=None,
provision="Section 6 (5) (d) (i)"
)
obj = [
ApprovedFuelProvision(
fuel=fuel_type,
provision_act=provision,
determination_type=default_carbon_intensity
) for fuel_type in fuel_types
]
ApprovedFuelProvision.objects.bulk_create(obj)
# Section 6 (5) (d) (ii) (A)
gh_genius = \
CarbonIntensityDeterminationType.objects.create(
display_order="4",
effective_date="2017-01-01",
the_type="GHGenius"
)
provision = ProvisionOfTheAct.objects.create(
description="GHGenius modelled",
display_order="5",
effective_date="2017-01-01",
expiration_date=None,
provision="Section 6 (5) (d) (ii) (A)"
)
obj = [
ApprovedFuelProvision(
fuel=fuel_type,
provision_act=provision,
determination_type=gh_genius
) for fuel_type in fuel_types
]
ApprovedFuelProvision.objects.bulk_create(obj)
# Section 6 (5) (d) (ii) (B)
alternative_method = \
CarbonIntensityDeterminationType.objects.create(
display_order="5",
effective_date="2017-01-01",
the_type="Alternative"
)
provision = ProvisionOfTheAct.objects.create(
description="Alternative Method",
display_order="6",
effective_date="2017-01-01",
expiration_date=None,
provision="Section 6 (5) (d) (ii) (B)"
)
obj = [
ApprovedFuelProvision(
fuel=fuel_type,
provision_act=provision,
determination_type=alternative_method
) for fuel_type in fuel_types
]
ApprovedFuelProvision.objects.bulk_create(obj) | [
22
] |
def METHOD_NAME(distributed_query):
tmp_fh, tmp_fp = tempfile.mkstemp()
with os.fdopen(tmp_fh, "wb") as tmp_f:
workbook = xlsxwriter.Workbook(tmp_f)
worksheet = workbook.add_worksheet("Results")
columns = distributed_query.result_columns()
row_idx = col_idx = 0
worksheet.write_string(row_idx, col_idx, "serial number")
for column in columns:
col_idx += 1
worksheet.write_string(row_idx, col_idx, column)
worksheet.freeze_panes(1, 0)
for dqr in distributed_query.distributedqueryresult_set.iterator():
row_idx += 1
col_idx = 0
worksheet.write_string(row_idx, col_idx, dqr.serial_number)
for column in columns:
col_idx += 1
val = dqr.row.get(column)
if not val:
worksheet.write_blank(row_idx, col_idx, "")
elif isinstance(val, (int, float)):
worksheet.write_number(row_idx, col_idx, val)
elif isinstance(val, bool):
worksheet.write_boolean(row_idx, col_idx, val)
else:
if not isinstance(val, str):
val = str(val)
worksheet.write_string(row_idx, col_idx, val)
workbook.close()
return tmp_fp | [
294,
-1,
24,
4136,
9009,
171
] |
def METHOD_NAME(edges: List[Tuple[int, int]], num_node: int) -> np.ndarray:
"""Get adjacency matrix from edges.
Args:
edges (list[tuple[int, int]]): The edges of the graph.
num_node (int): The number of nodes of the graph.
Returns:
np.ndarray: The adjacency matrix.
"""
A = np.zeros((num_node, num_node))
for i, j in edges:
A[j, i] = 1
return A | [
12329
] |
def METHOD_NAME():
"""
Allocate capital to different strategies
:return: Nothing
"""
with dataBlob(log_name="Update-Strategy-Capital") as data:
update_strategy_capital_object = updateStrategyCapital(data)
update_strategy_capital_object.strategy_allocation()
return success | [
86,
1554,
11552
] |
def METHOD_NAME(self):
# we can read minc compressed
# Not so for MINC2; hence this small sub-class
for tp in self.test_files:
content = open(tp['fname'], 'rb').read()
openers_exts = [(gzip.open, '.gz'), (bz2.BZ2File, '.bz2')]
if HAVE_ZSTD: # add .zst to test if installed
openers_exts += [(pyzstd.ZstdFile, '.zst')]
with InTemporaryDirectory():
for opener, ext in openers_exts:
fname = 'test.mnc' + ext
fobj = opener(fname, 'wb')
fobj.write(content)
fobj.close()
img = self.module.load(fname)
data = img.get_fdata()
assert_data_similar(data, tp)
del img | [
9,
2303
] |
def METHOD_NAME(headers):
if MIME_PAOS in headers["Accept"]:
if "PAOS" in headers:
if f'ver="{paos.NAMESPACE}";"{SERVICE}"' in headers["PAOS"]:
return True
return False | [
12705,
9063
] |
def METHOD_NAME(num, grouping=True):
# set locale for currency filter
locale.setlocale(locale.LC_ALL, 'en_US.UTF-8')
to_return = None
if isinstance(num, (int, float)):
to_return = locale.currency(round(num), grouping=grouping)
return to_return[:-3]
return to_return | [
5251,
-1
] |
def METHOD_NAME(self):
events = []
result = LoggingResult(events)
def setUp():
events.append('setUp')
def test():
events.append('test')
self.fail('raised by test')
def tearDown():
events.append('tearDown')
expected = ['startTest', 'setUp', 'test', 'addFailure', 'tearDown',
'stopTest']
unittest.FunctionTestCase(test, setUp, tearDown).run(result)
self.assertEqual(events, expected) | [
9,
22,
128,
852,
374,
623,
9
] |
def METHOD_NAME(self, fullname):
candidate_locations = []
def _get_package_paths(*root_paths, containing_folder='plugins'):
for config_dir in orderedSet(map(Path, root_paths), lazy=True):
with contextlib.suppress(OSError):
yield from (config_dir / containing_folder).iterdir()
# Load from yt-dlp config folders
candidate_locations.extend(_get_package_paths(
*get_user_config_dirs('yt-dlp'),
*get_system_config_dirs('yt-dlp'),
containing_folder='plugins'))
# Load from yt-dlp-plugins folders
candidate_locations.extend(_get_package_paths(
get_executable_path(),
*get_user_config_dirs(''),
*get_system_config_dirs(''),
containing_folder='yt-dlp-plugins'))
candidate_locations.extend(map(Path, sys.path)) # PYTHONPATH
with contextlib.suppress(ValueError): # Added when running __main__.py directly
candidate_locations.remove(Path(__file__).parent)
parts = Path(*fullname.split('.'))
for path in orderedSet(candidate_locations, lazy=True):
candidate = path / parts
if candidate.is_dir():
yield candidate
elif path.suffix in ('.zip', '.egg', '.whl') and path.is_file():
if parts in dirs_in_zip(path):
yield candidate | [
1070,
1081
] |
def METHOD_NAME(self, text='', target=None):
self.unet.train()
self.unet = self.unet.to(self.device)
# Convert to latent space
with torch.no_grad():
latents = self.vae.encode(
target.to(dtype=self.weight_dtype)).latent_dist.sample()
latents = latents * self.vae.config.scaling_factor
# Sample noise that we'll add to the latents
noise = torch.randn_like(latents)
bsz = latents.shape[0]
# Sample a random timestep for each image
timesteps = torch.randint(
0,
self.noise_scheduler.num_train_timesteps, (bsz, ),
device=latents.device)
timesteps = timesteps.long()
# Add noise to the latents according to the noise magnitude at each timestep
# (this is the forward diffusion process)
noisy_latents = self.noise_scheduler.add_noise(latents, noise,
timesteps)
input_ids = self.tokenize_caption(text).to(self.device)
# Get the text embedding for conditioning
with torch.no_grad():
encoder_hidden_states = self.text_encoder(input_ids)[0]
# Get the target for loss depending on the prediction type
if self.noise_scheduler.config.prediction_type == 'epsilon':
target = noise
elif self.noise_scheduler.config.prediction_type == 'v_prediction':
target = self.noise_scheduler.get_velocity(latents, noise,
timesteps)
else:
raise ValueError(
f'Unknown prediction type {self.noise_scheduler.config.prediction_type}'
)
# Predict the noise residual and compute loss
model_pred = self.unet(noisy_latents, timesteps,
encoder_hidden_states).sample
if model_pred.shape[1] == 6:
model_pred, _ = torch.chunk(model_pred, 2, dim=1)
loss = F.mse_loss(model_pred.float(), target.float(), reduction='mean')
output = {OutputKeys.LOSS: loss}
return output | [
76
] |
def METHOD_NAME(username: str, role: str = "editor"):
token = LoadedAccessToken(uuid4(), username, ROLES.get(role)).encode()
return "Bearer {}".format(token) | [
93,
1089,
466
] |
def METHOD_NAME(self):
self.expectNone('bytes=') | [
9,
654,
2757
] |
def METHOD_NAME(self, group_sets_response):
schema = BlackboardListGroupSetsSchema(
factories.requests.Response(json_data=group_sets_response)
)
result = schema.parse()
assert result == [{"id": "GROUP_SET_1", "name": "GROUP SET 1"}] | [
9,
1807
] |
def METHOD_NAME(self, name):
self.model.load_state_dict(torch.load('g2p-{}.ptsd'.format(name))) | [
557,
551,
11341
] |
async def METHOD_NAME(message_type):
"""The client delete the listener"""
async def server_node(ep):
await _shutdown_send(ep, message_type)
await _shutdown_send(ep, message_type)
listener = ucp.create_listener(
server_node,
)
ep = await ucp.create_endpoint(
ucp.get_address(),
listener.port,
)
await _shutdown_recv(ep, message_type)
assert listener.closed() is False
del listener
await _shutdown_recv(ep, message_type) | [
9,
4130,
1269
] |
def METHOD_NAME():
customer_name = "Shipment Customer"
customer = frappe.get_all("Customer", fields=["name"], filters={"name": customer_name})
if len(customer):
return customer[0]
else:
return create_shipment_customer(customer_name) | [
19,
8260,
1487
] |
def METHOD_NAME(self, train: bool) -> Any:
transform = transforms.Compose(
[transforms.ToTensor(), transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))]
)
# Use a file lock so that workers on the same node attempt the download one at a time.
# The first worker will actually perform the download, while the subsequent workers will
# see that the dataset is downloaded and skip.
with filelock.FileLock(os.path.join(self.download_directory, "lock")):
return torchvision.datasets.CIFAR10(
root=self.download_directory, train=train, download=True, transform=transform
) | [
136,
126
] |
def METHOD_NAME(self):
return self._types | [
1205,
119
] |
def METHOD_NAME(resource_group_name: Optional[str] = None,
virtual_network_gateway_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetVirtualNetworkGatewayVpnclientIpsecParametersResult:
"""
The Get VpnclientIpsecParameters operation retrieves information about the vpnclient ipsec policy for P2S client of virtual network gateway in the specified resource group through Network resource provider.
:param str resource_group_name: The name of the resource group.
:param str virtual_network_gateway_name: The virtual network gateway name.
"""
__args__ = dict()
__args__['resourceGroupName'] = resource_group_name
__args__['virtualNetworkGatewayName'] = virtual_network_gateway_name
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('azure-native:network/v20230501:getVirtualNetworkGatewayVpnclientIpsecParameters', __args__, opts=opts, typ=GetVirtualNetworkGatewayVpnclientIpsecParametersResult).value
return AwaitableGetVirtualNetworkGatewayVpnclientIpsecParametersResult(
dh_group=pulumi.get(__ret__, 'dh_group'),
ike_encryption=pulumi.get(__ret__, 'ike_encryption'),
ike_integrity=pulumi.get(__ret__, 'ike_integrity'),
ipsec_encryption=pulumi.get(__ret__, 'ipsec_encryption'),
ipsec_integrity=pulumi.get(__ret__, 'ipsec_integrity'),
pfs_group=pulumi.get(__ret__, 'pfs_group'),
sa_data_size_kilobytes=pulumi.get(__ret__, 'sa_data_size_kilobytes'),
sa_life_time_seconds=pulumi.get(__ret__, 'sa_life_time_seconds')) | [
19,
162,
1228,
14,
8939,
8940,
386
] |
def METHOD_NAME(self):
"""Monitorize batch queue size"""
batch_queue_size = self._requests.qsize()
metrics.log(batch_request_queue=batch_queue_size) | [
2277,
651,
1863
] |
async def METHOD_NAME(next_link=None):
request = prepare_request(next_link)
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response | [
19,
243
] |
def METHOD_NAME(self):
"""Parse node/edge data from CSV files and construct DGL.Graphs"""
from .csv_dataset_base import (
DGLGraphConstructor,
EdgeData,
GraphData,
NodeData,
)
meta_yaml = self.meta_yaml
base_dir = self.raw_dir
node_data = []
for meta_node in meta_yaml.node_data:
if meta_node is None:
continue
ntype = meta_node.ntype
data_parser = (
self.ndata_parser
if callable(self.ndata_parser)
else self.ndata_parser.get(ntype, self.default_data_parser)
)
ndata = NodeData.load_from_csv(
meta_node,
base_dir=base_dir,
separator=meta_yaml.separator,
data_parser=data_parser,
)
node_data.append(ndata)
edge_data = []
for meta_edge in meta_yaml.edge_data:
if meta_edge is None:
continue
etype = tuple(meta_edge.etype)
data_parser = (
self.edata_parser
if callable(self.edata_parser)
else self.edata_parser.get(etype, self.default_data_parser)
)
edata = EdgeData.load_from_csv(
meta_edge,
base_dir=base_dir,
separator=meta_yaml.separator,
data_parser=data_parser,
)
edge_data.append(edata)
graph_data = None
if meta_yaml.graph_data is not None:
meta_graph = meta_yaml.graph_data
data_parser = (
self.default_data_parser
if self.gdata_parser is None
else self.gdata_parser
)
graph_data = GraphData.load_from_csv(
meta_graph,
base_dir=base_dir,
separator=meta_yaml.separator,
data_parser=data_parser,
)
# construct graphs
self.graphs, self.data = DGLGraphConstructor.construct_graphs(
node_data, edge_data, graph_data
)
if len(self.data) == 1:
self.labels = list(self.data.values())[0] | [
356
] |
def METHOD_NAME(self, state): # pylint: disable=unused-argument
self._assemble() | [
69,
992,
1180
] |
def METHOD_NAME(self, query: Query):
"""Add filters to the query"""
if self.filters_to_add:
return query.filter(*self.filters_to_add)
return query | [
238,
469
] |
def METHOD_NAME(coords):
axis_order = 'lat,lon'
if round(coords[0]) == 5 and round(coords[1]) == 52:
axis_order = 'lon,lat'
return axis_order | [
19,
2227,
852
] |
def METHOD_NAME(self, **kwargs: Any) -> Iterable["_models.Operation"]:
"""Lists all of the available HDInsight REST API operations.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either Operation or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.hdinsight.models.Operation]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.OperationListResult] = kwargs.pop("cls", None)
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
api_version=api_version,
template_url=self.METHOD_NAME.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("OperationListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data) | [
245
] |
def METHOD_NAME(self, tags=None):
self.tags_todelete = tags or self.tags_todelete
if not self.tags_todelete:
# We must at least have something to delete !
return []
# Prepare labels
singular = len(self.tags_todelete)
cancel_text = ngettext("Keep selected tag", "Keep selected tags", singular)
delete_text = ngettext("Permanently remove tag", "Permanently remove tags", singular)
label_text = ngettext("Deleting a tag cannot be undone, "
"and will delete the tag shown below. Tasks containing this tag will not be deleted: ",
"Deleting a tag cannot be undone, "
"and will delete the tag shown below. Tasks containing this tag will not be deleted:",
singular)
label_text = label_text[0:label_text.find(":") + 1]
# we don't want to end with just one task that doesn't fit the
# screen and a line saying "And one more task", so we go a
# little over our limit
tags_count = len(self.tags_todelete)
missing_tags_count = tags_count - self.MAXIMUM_TAGS_TO_SHOW
if missing_tags_count >= 2:
tagslist = self.tags_todelete[:self.MAXIMUM_TAGS_TO_SHOW]
titles_suffix = _("\nAnd %d more tags") % missing_tags_count
else:
tagslist = self.tags_todelete
titles_suffix = ""
if len(tagslist) == 1:
# Don't show a bulleted list if there's only one item
titles = "".join(tag for tag in tagslist)
else:
titles = "".join("\n• " + tag for tag in tagslist)
# Build and run dialog
dialog = Gtk.MessageDialog(transient_for=self.browser, modal=True)
dialog.add_button(cancel_text, Gtk.ResponseType.CANCEL)
delete_btn = dialog.add_button(delete_text, Gtk.ResponseType.YES)
delete_btn.get_style_context().add_class("destructive-action")
dialog.props.use_markup = True
dialog.props.text = "<span weight=\"bold\">" + label_text + "</span>"
dialog.props.secondary_text = titles + titles_suffix
response = dialog.run()
dialog.destroy()
if response == Gtk.ResponseType.YES:
self.on_delete_confirm()
elif response == Gtk.ResponseType.REJECT:
tagslist = []
return tagslist | [
34,
114
] |
def METHOD_NAME(self, object_id, permission, principal):
permission_key = f"permission:{object_id}:{permission}"
object_permission_principals = self._store.get(permission_key, set())
object_permission_principals.add(principal)
self._store[permission_key] = object_permission_principals | [
238,
3391,
24,
8926
] |
def METHOD_NAME(db):
"""Method to test not deleting ticket holders with no order id but created within expiry time"""
attendee = AttendeeSubFactory(
created_at=datetime.datetime.utcnow(),
modified_at=datetime.datetime.utcnow(),
)
db.session.commit()
attendee_id = attendee.id
delete_ticket_holders_no_order_id()
ticket_holder = TicketHolder.query.get(attendee_id)
assert ticket_holder != None | [
9,
34,
3769,
4411,
152,
4417
] |
def METHOD_NAME(self):
# nonsense strings
self._check_value_error('')
self._check_value_error('\N{greek small letter pi}')
if self.allow_bytes:
self._check_value_error(b'')
# bytes which can't be converted to strings via utf8
self._check_value_error(b"\xFF")
if self.exact_match:
self._check_value_error("there's no way this is supported") | [
9,
909,
99
] |
def METHOD_NAME(self):
assets = self.asset_ids
if self.category_ids:
assets = assets.filtered(
lambda a: a.category_id.id in self.category_ids.ids
)
if self.company_id:
assets = assets.filtered(
lambda a: a.company_id.id in (False, self.company_id.id)
)
if self.date:
assets = assets.filtered(lambda a: a.purchase_date <= self.date)
if self.type_ids:
assets = assets.filtered(
lambda a: any(
[d.type_id.id in self.type_ids.ids for d in a.depreciation_ids]
)
)
return assets | [
527,
3407
] |
def METHOD_NAME(k):
k = k[np.argsort(k.prod(1))] # sort small to large
x, best = metric(k, wh0)
bpr, aat = (best > thr).float().mean(), (x > thr).float().mean() * n # best possible recall, anch > thr
print(f'{prefix}thr={thr:.2f}: {bpr:.4f} best possible recall, {aat:.2f} anchors past thr')
print(f'{prefix}n={n}, img_size={img_size}, metric_all={x.mean():.3f}/{best.mean():.3f}-mean/best, '
f'past_thr={x[x > thr].mean():.3f}-mean: ', end='')
for i, x in enumerate(k):
print('%i,%i' % (round(x[0]), round(x[1])), end=', ' if i < len(k) - 1 else '\n') # use in *.cfg
return k | [
38,
51
] |
def METHOD_NAME(self) -> Optional[str]:
"""
This will be used to handle Optimistic Concurrency.
"""
return pulumi.get(self, "etag") | [
431
] |
async def METHOD_NAME(
partition_sda1_service: PartitionService, dbus_session_bus: MessageBus
):
"""Test deleting partition."""
partition_sda1_service.Delete.calls.clear()
sda1 = UDisks2Partition("/org/freedesktop/UDisks2/block_devices/sda1")
with pytest.raises(DBusNotConnectedError):
await sda1.delete(DeletePartitionOptions(tear_down=True))
await sda1.connect(dbus_session_bus)
await sda1.delete(DeletePartitionOptions(tear_down=True))
assert partition_sda1_service.Delete.calls == [
(
{
"tear-down": Variant("b", True),
"auth.no_user_interaction": Variant("b", True),
},
)
] | [
9,
34
] |
def METHOD_NAME(self) -> SP:
"""Return current primary service provider.
If primary sp not available, such as not reported by SD, connection to SD not established yet
the name and ports will be empty strings.
"""
return self._psp | [
19,
1379,
1728
] |
def METHOD_NAME(self, job):
self._ensure_paths()
node_path = self.path + "/" + str(job.id)
value = {
'next_run_time': datetime_to_utc_timestamp(job.next_run_time),
'job_state': job.__getstate__()
}
data = pickle.dumps(value, self.pickle_protocol)
try:
self.client.create(node_path, value=data)
except NodeExistsError:
raise ConflictingIdError(job.id) | [
238,
202
] |
def METHOD_NAME(self):
table = Table(data)
port_ids = []
for i in range(10):
port_ids.append(table.make_port())
assert port_ids == list(range(1, 11)) | [
9,
93,
237,
3272
] |
def METHOD_NAME(*args,**kw):
pass | [
7634,
5931,
673,
421
] |
def METHOD_NAME(
hash_type: common_pb2.HashType,
curve: common_pb2.EllipticCurveType,
encoding: ecdsa_pb2.EcdsaSignatureEncoding,
output_prefix_type: tink_pb2.OutputPrefixType = tink_pb2.TINK
) -> tink_pb2.KeyTemplate:
"""Creates a KeyTemplate containing an EcdsaKeyFormat."""
params = ecdsa_pb2.EcdsaParams(
hash_type=hash_type, curve=curve, encoding=encoding)
key_format = ecdsa_pb2.EcdsaKeyFormat(params=params)
key_template = tink_pb2.KeyTemplate(
value=key_format.SerializeToString(),
type_url=_ECDSA_KEY_TYPE_URL,
output_prefix_type=output_prefix_type)
return key_template | [
129,
10300,
59,
671
] |
def METHOD_NAME(self):
from bodystring.operations import EnumOperations
with pytest.raises(ImportError):
from bodystring.operations import _enum_operations_py3
from bodystring.operations._enum_operations import EnumOperations as EnumOperationsPy2
assert EnumOperations == EnumOperationsPy2 | [
9,
2206,
861
] |
def METHOD_NAME(grayscale, scaled, max_test_steps=10):
# arbitrarily chosen number for stepping into env. and ensuring all observations are in the required range
env = AtariPreprocessing(
StepAPICompatibility(AtariTestingEnv(), output_truncation_bool=True),
screen_size=84,
grayscale_obs=grayscale,
scale_obs=scaled,
frame_skip=1,
noop_max=0,
)
obs, _ = env.reset()
max_obs = 1 if scaled else 255
assert np.all(0 <= obs) and np.all(obs <= max_obs)
terminated, truncated, step_i = False, False, 0
while not (terminated or truncated) and step_i <= max_test_steps:
obs, _, terminated, truncated, _ = env.step(env.action_space.sample())
assert np.all(0 <= obs) and np.all(obs <= max_obs)
step_i += 1
env.close() | [
9,
9054,
1184,
930
] |
def METHOD_NAME(trial, model, data, objConfig, objParams):
x = []
params_morgan = objParams['model_params']
for smiles in data:
m = smiles2mol(smiles)
fingerprint = rdkit.Chem.AllChem.GetMorganFingerprintAsBitVect(m, **params_morgan)
x.append(fingerprint)
return | [
12074,
24,
15291,
4371
] |
def METHOD_NAME(self):
vals = [0, -1, -2147483648, 2147483647]
cmd = 'turo_simple_array_s32 ' + ' '.join(map(str, vals))
resp = self.exec_turo_cmd(cmd)
self.assertCountEqual(resp, vals) | [
9,
12827,
53,
877,
7711
] |
def METHOD_NAME(self):
"""
A convenience function to commit any changes to git
"""
git_cmd = ["git", "diff", "--name-only"]
complproc = subprocess.run(git_cmd, stdout=subprocess.PIPE, check=True)
if not complproc.stdout or complproc.stdout.isspace():
print(("Not commiting anything because nothing changed in cleanup "
"task %s" % self._name))
return
print(("Commiting changes for cleanup task %s" % self._name))
with tempfile.NamedTemporaryFile(mode='w', delete=False) as tmpfile:
tmpfile.write(self._name)
tmpfile.write("\n\n")
tmpfile.write(self._description)
git_cmd = ["git", "commit", "-a", "-F", tmpfile.name]
subprocess.run(git_cmd, check=True)
os.remove(tmpfile.name) | [
1160,
24,
1493
] |
def METHOD_NAME(self) -> datetime:
self._completeIfNotSet(self._created_at)
return self._created_at.value | [
152,
1541
] |
def METHOD_NAME(self, **kwargs): # noqa: E501
"""get_api_group # noqa: E501
get information of a group # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_api_group_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(V1APIGroup, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_api_group" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/authorization.k8s.io/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1APIGroup', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats) | [
19,
58,
846,
41,
721,
100
] |
def METHOD_NAME() -> bool:
"""
Whether the logging outputs returned by `forward` can be summed
across workers prior to calling `reduce_metrics`. Setting this
to True will improves distributed training speed.
"""
return True | [
663,
141,
1046,
673,
8599
] |
def METHOD_NAME(c):
functions = c.functions
functions = [
f.full_name
for f in functions
if f.visibility in ["public", "external"] and not f.is_constructor and not f.is_fallback
]
variables = c.state_variables
variables = [
variable.name + "()" for variable in variables if variable.visibility in ["public"]
]
return list(set(functions + variables)) | [
19,
621
] |
def METHOD_NAME(iff, cmd):
s = socket.socket()
ifreq = ioctl(s, cmd, struct.pack("16s16x", iff))
s.close()
return ifreq | [
19,
217
] |
def METHOD_NAME(path: StrPath) -> PathDistribution: ... | [
1541
] |
f METHOD_NAME(self): | [
9,
137,
708,
1541,
1079,
15460,
41
] |
def METHOD_NAME(self, key, root):
if not root:
return None
# Header node for easier manipulation
header = Node(None)
header.left = header.right = None
LeftTreeMax = RightTreeMin = header
# Splay the key to the root of the tree
while True:
if key < root.key:
if not root.left:
break
if key < root.left.key:
# Zig-Zig (Right-Right) rotation
root = self.RR_Rotate(root)
if not root.left:
break
RightTreeMin.left = root
RightTreeMin = RightTreeMin.left
root = root.left
RightTreeMin.left = None
elif key > root.key:
if not root.right:
break
if key > root.right.key:
# Zag-Zag (Left-Left) rotation
root = self.LL_Rotate(root)
if not root.right:
break
LeftTreeMax.right = root
LeftTreeMax = LeftTreeMax.right
root = root.right
LeftTreeMax.right = None
else:
# Key found at the root, break the loop
break
# Reassemble the tree
LeftTreeMax.right = root.left
RightTreeMin.left = root.right
root.left = header.right
root.right = header.left
return root | [
-1
] |
def METHOD_NAME(path):
"""
mkdir if not exists, ignore the exception when multiprocess mkdir together
"""
if not os.path.exists(path):
try:
os.makedirs(path)
except OSError as e:
if e.errno == errno.EEXIST and os.path.isdir(path):
logger.warning(
'be happy if some process has already created {}'.format(
path))
else:
raise OSError('Failed to mkdir {}'.format(path)) | [
3456,
217,
130,
1985
] |
f METHOD_NAME(self): | [
531,
481
] |
def METHOD_NAME(self):
pred_labels = self.clf.predict(self.X_test)
assert_equal(pred_labels.shape, self.y_test.shape) | [
9,
2726,
415
] |
def METHOD_NAME(worker):
worker.run('hashing', {"num_hashes": 100*1000, "input_len": 1024}) | [
-1
] |
def METHOD_NAME(self):
# Get and set the current umask value for testing mode bits.
umask = os.umask(0o002)
os.umask(umask)
mkpath(self.target, 0o700)
self.assertEqual(
stat.S_IMODE(os.stat(self.target).st_mode), 0o700 & ~umask)
mkpath(self.target2, 0o555)
self.assertEqual(
stat.S_IMODE(os.stat(self.target2).st_mode), 0o555 & ~umask) | [
9,
6403,
41,
343,
854
] |
def METHOD_NAME(data):
numrange = data.get("range", [1.0, 100000.0])
min_value, max_value = numrange[0], numrange[1]
label = data.get("label")
widget = forms.NumberInput()
help_text = data.get("help", f"Enter number between {min_value} and {max_value}")
initial = data.get("value", 1)
field = forms.FloatField(widget=widget, initial=initial, min_value=min_value, max_value=max_value,
help_text=help_text, label=label, required=False)
return field | [
1819,
101
] |
def METHOD_NAME(P, W, Conv):
if P.developmentPin.get():
reload(STAR)
W.lDesc.setText(_translate('Conversational', 'CREATING STAR'))
W.iLabel.setPixmap(P.conv_star_l)
#alignment and size
rightAlign = ['ctLabel', 'spLabel', 'xsLabel', 'xsEntry', 'ysLabel', \
'ysEntry', 'liLabel', 'liEntry', 'loLabel', 'loEntry', \
'pLabel', 'pEntry', 'odLabel', 'odEntry', 'idLabel', \
'idEntry', 'aLabel', 'aEntry']
centerAlign = ['lDesc']
rButton = ['intExt', 'centLeft']
pButton = ['preview', 'add', 'undo']
for widget in rightAlign:
W[widget].setAlignment(Qt.AlignRight | Qt.AlignVCenter)
W[widget].setFixedWidth(80)
W[widget].setFixedHeight(24)
for widget in centerAlign:
W[widget].setAlignment(Qt.AlignCenter | Qt.AlignBottom)
W[widget].setFixedWidth(240)
W[widget].setFixedHeight(24)
for widget in rButton:
W[widget].setFixedWidth(80)
W[widget].setFixedHeight(24)
for widget in pButton:
W[widget].setFixedWidth(80)
W[widget].setFixedHeight(24)
#connections
W.conv_material.currentTextChanged.connect(lambda:auto_preview(P, W, Conv))
W.intExt.toggled.connect(lambda:auto_preview(P, W, Conv, 'intext'))
W.centLeft.toggled.connect(lambda:auto_preview(P, W, Conv, 'center'))
W.preview.pressed.connect(lambda:preview(P, W, Conv))
W.add.pressed.connect(lambda:Conv.conv_add_shape_to_file(P, W))
W.undo.pressed.connect(lambda:Conv.conv_undo_shape(P, W))
entries = ['xsEntry', 'ysEntry', 'liEntry', 'loEntry', \
'pEntry', 'odEntry', 'idEntry', 'aEntry']
for entry in entries:
W[entry].textChanged.connect(lambda:entry_changed(P, W, Conv, W.sender()))
W[entry].returnPressed.connect(lambda:preview(P, W, Conv))
#add to layout
if P.landscape:
W.entries.addWidget(W.ctLabel, 0, 0)
W.entries.addWidget(W.intExt, 0, 1)
W.entries.addWidget(W.spLabel, 1, 0)
W.entries.addWidget(W.centLeft, 1, 1)
W.entries.addWidget(W.xsLabel, 2, 0)
W.entries.addWidget(W.xsEntry, 2, 1)
W.entries.addWidget(W.ysLabel, 3, 0)
W.entries.addWidget(W.ysEntry, 3, 1)
W.entries.addWidget(W.liLabel, 4, 0)
W.entries.addWidget(W.liEntry, 4, 1)
W.entries.addWidget(W.loLabel, 5, 0)
W.entries.addWidget(W.loEntry, 5, 1)
W.entries.addWidget(W.pLabel, 6, 0)
W.entries.addWidget(W.pEntry, 6, 1)
W.entries.addWidget(W.odLabel, 7, 0)
W.entries.addWidget(W.odEntry, 7, 1)
W.entries.addWidget(W.idLabel, 8, 0)
W.entries.addWidget(W.idEntry, 8, 1)
W.entries.addWidget(W.aLabel, 9, 0)
W.entries.addWidget(W.aEntry, 9, 1)
for r in [10,11]:
W['s{}'.format(r)] = QLabel('')
W['s{}'.format(r)].setFixedHeight(24)
W.entries.addWidget(W['s{}'.format(r)], r, 0)
W.entries.addWidget(W.preview, 12, 0)
W.entries.addWidget(W.add, 12, 2)
W.entries.addWidget(W.undo, 12, 4)
W.entries.addWidget(W.lDesc, 13 , 1, 1, 3)
W.entries.addWidget(W.iLabel, 0 , 2, 7, 3)
else:
W.entries.addWidget(W.conv_material, 0, 0, 1, 5)
W.entries.addWidget(W.ctLabel, 1, 0)
W.entries.addWidget(W.intExt, 1, 1)
W.entries.addWidget(W.spLabel, 2, 0)
W.entries.addWidget(W.centLeft, 2, 1)
W.entries.addWidget(W.xsLabel, 3, 0)
W.entries.addWidget(W.xsEntry, 3, 1)
W.entries.addWidget(W.ysLabel, 3, 2)
W.entries.addWidget(W.ysEntry, 3, 3)
W.entries.addWidget(W.liLabel, 4, 0)
W.entries.addWidget(W.liEntry, 4, 1)
W.entries.addWidget(W.loLabel, 4, 2)
W.entries.addWidget(W.loEntry, 4, 3)
W.entries.addWidget(W.pLabel, 5, 0)
W.entries.addWidget(W.pEntry, 5, 1)
W.entries.addWidget(W.odLabel, 6, 0)
W.entries.addWidget(W.odEntry, 6, 1)
W.entries.addWidget(W.idLabel, 6, 2)
W.entries.addWidget(W.idEntry, 6, 3)
W.entries.addWidget(W.aLabel, 7, 0)
W.entries.addWidget(W.aEntry, 7, 1)
W.s8 = QLabel('')
W.s8.setFixedHeight(24)
W.entries.addWidget(W.s8, 8, 0)
W.entries.addWidget(W.preview, 9, 0)
W.entries.addWidget(W.add, 9, 2)
W.entries.addWidget(W.undo, 9, 4)
W.entries.addWidget(W.lDesc, 10 , 1, 1, 3)
W.entries.addWidget(W.iLabel, 0 , 5, 7, 3)
W.pEntry.setFocus()
P.convSettingsChanged = False | [
1551
] |
f METHOD_NAME(self, repo_url): | [
670
] |
def METHOD_NAME(self, *args, **kwargs):
if not isinstance(self._wrapped_protocol, TCompactProtocol):
return func(self, *args, **kwargs)
try:
return func(self, *args, **kwargs)
except Exception:
self._wrapped_protocol.state = CLEAR
raise | [
291
] |
def METHOD_NAME(self, request: HttpRequest, **kwargs: Any) -> HttpResponse:
"""Runs the network request through the client's chained policies.
>>> from azure.core.rest import HttpRequest
>>> request = HttpRequest("GET", "https://www.example.org/")
<HttpRequest [GET], url: 'https://www.example.org/'>
>>> response = client._send_request(request)
<HttpResponse: 200 OK>
For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request
:param request: The network request you want to make. Required.
:type request: ~azure.core.rest.HttpRequest
:keyword bool stream: Whether the response payload will be streamed. Defaults to False.
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.rest.HttpResponse
"""
request_copy = deepcopy(request)
request_copy.url = self._client.format_url(request_copy.url)
return self._client.send_request(request_copy, **kwargs) | [
353,
377
] |
def METHOD_NAME(self, poolID: int) -> ContractFunction:
return self.contract.functions.METHOD_NAME(poolID) | [
-1,
1567,
12591
] |
def METHOD_NAME(start, end, step):
current = start
while current < end:
yield current
current += step
while current >= 0:
yield current
current -= step | [
6654
] |
def METHOD_NAME(self):
super().METHOD_NAME()
self.login_as(user=self.user) | [
0,
1
] |
def METHOD_NAME(config, str2id, term_file, terms, item_distribution):
edges = []
with io.open(config.graph_data, encoding=config.encoding) as f:
for idx, line in enumerate(f):
if idx % 100000 == 0:
log.info("%s readed %s lines" % (config.graph_data, idx))
slots = []
for col_idx, col in enumerate(line.strip("\n").split("\t")):
s = col[:config.max_seqlen]
if s not in str2id:
str2id[s] = len(str2id)
term_file.write(str(col_idx) + "\t" + col + "\n")
item_distribution.append(0)
slots.append(str2id[s])
src = slots[0]
dst = slots[1]
edges.append((src, dst))
edges.append((dst, src))
item_distribution[dst] += 1
edges = np.array(edges, dtype="int64")
return edges | [
557,
303
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.