text
stringlengths 15
7.82k
| ids
sequencelengths 1
7
|
---|---|
def METHOD_NAME(self):
self.pre_operations()
yield self.VirtualHubsDelete(ctx=self.ctx)()
self.post_operations() | [
750,
710
] |
def METHOD_NAME(self):
super(MapPrint, self).METHOD_NAME() | [
69,
1798,
1219
] |
def METHOD_NAME(d):
for p in os.listdir(d):
yield path_join(d, p) | [
1014,
1190,
192
] |
async def METHOD_NAME(azure_client, unencoded_path, unencoded_query):
await azure_client.skip_url_encoding.get_method_path_valid(unencoded_path)
await azure_client.skip_url_encoding.get_path_valid(unencoded_path)
await azure_client.skip_url_encoding.get_swagger_path_valid()
await azure_client.skip_url_encoding.get_method_query_valid(q1=unencoded_query)
await azure_client.skip_url_encoding.get_path_query_valid(q1=unencoded_query)
await azure_client.skip_url_encoding.get_swagger_query_valid()
await azure_client.skip_url_encoding.get_method_query_null()
await azure_client.skip_url_encoding.get_method_query_null(q1=None) | [
9,
2423,
274,
2300
] |
def METHOD_NAME(self):
urls = [
"https://raw.githubusercontent.com/openforcefield/openff-toolkit/master/openff/toolkit/data/test_forcefields/test_forcefield.offxml",
"https://raw.githubusercontent.com/openforcefield/openff-toolkit/master/openff/toolkit/data/test_forcefields/tip3p.offxml",
]
# A generator should work as well
ForceField(iter(urls)) | [
9,
129,
3139,
280,
274,
640
] |
def METHOD_NAME(self):
data = b"room1:10.0.0.8"
b = bulkparse.NetboxBulkParser(data)
with pytest.raises(bulkparse.RequiredFieldMissing):
b.__next__() | [
9,
1707,
534,
427,
241,
168
] |
def METHOD_NAME(self, target):
"""Function to output to a cairo surface.
target is a cairo Context or filename
"""
def output_context(ctx):
target_ctx = target
target_ctx.set_source_surface(ctx.get_target())
target_ctx.paint()
return target_ctx
def output_surface(ctx):
target_ctx = cairo.Context(target)
target_ctx.set_source_surface(ctx.get_target())
target_ctx.paint()
return target_ctx
def output_file(ctx):
root, extension = os.path.splitext(target)
filename = target
extension = extension.lower()
if extension == ".png":
surface = ctx.get_target()
surface.write_to_png(target)
elif extension == ".pdf":
target_ctx = cairo.Context(
cairo.PDFSurface(filename, *self.size_or_default()),
)
target_ctx.set_source_surface(ctx.get_target())
target_ctx.paint()
elif extension in (".ps", ".eps"):
target_ctx = cairo.Context(
cairo.PSSurface(filename, *self.size_or_default()),
)
if extension == ".eps":
target_ctx.set_eps(extension=".eps")
target_ctx.set_source_surface(ctx.get_target())
target_ctx.paint()
elif extension == ".svg":
surface = cairo.SVGSurface(filename, *self.size_or_default())
surface.restrict_to_version(cairo.SVGVersion.VERSION_1_2)
target_ctx = cairo.Context(surface)
target_ctx.set_source_surface(ctx.get_target())
target_ctx.paint()
return filename
if isinstance(target, cairo.Context):
return output_context
elif isinstance(target, cairo.Surface):
return output_surface
else:
return output_file | [
146,
1145
] |
def METHOD_NAME(revs_a, revs_b):
for rev in revs_a:
if rev in revs_b:
return True
return False | [
1152,
590
] |
f METHOD_NAME(self): | [
13733
] |
def METHOD_NAME(self):
with warns(UserWarning):
assert get_base_url("example.com", include_scheme=False) == "example.com"
assert (
get_base_url("example.com/some/path", include_scheme=False)
== "example.com"
)
assert (
get_base_url("example.com.au/some/path", include_scheme=False)
== "example.com.au"
)
assert get_base_url("bad_url", include_scheme=False) == "bad_url" | [
9,
69,
2248,
529,
4932,
529,
2015
] |
def METHOD_NAME(self):
return find_libraries(
["libasprintf", "libgettextlib", "libgettextpo", "libgettextsrc", "libintl"],
root=self.prefix,
recursive=True,
) | [
5051
] |
def METHOD_NAME(self) -> str:
"""
The name of the resource
"""
return pulumi.get(self, "name") | [
156
] |
def METHOD_NAME(config_dict: dict, read_only: bool = False) -> "DataHubLiteLocal":
lite_local_config = LiteLocalConfig.parse_obj(config_dict)
lite_type = lite_local_config.type
try:
lite_class = lite_registry.get(lite_type)
except KeyError:
raise Exception(
f"Failed to find a registered lite implementation for {lite_type}. Valid values are {[k for k in lite_registry.mapping.keys()]}"
)
lite_specific_config = lite_class.get_config_class().parse_obj(
lite_local_config.config
)
lite = lite_class(lite_specific_config)
# we only set up forwarding if forwarding config is present and read_only is set to False
if lite_local_config.forward_to and not read_only:
forward_sink_class = sink_registry.get(lite_local_config.forward_to.type)
if forward_sink_class is not None:
current_time = int(time.time() * 1000.0)
try:
forward_to = forward_sink_class.create(
lite_local_config.forward_to.config or {},
PipelineContext(run_id=f"lite-forward_{current_time}"),
)
return DataHubLiteWrapper(lite, forward_to)
except Exception as e:
logger.warning(
f"Failed to set up forwarding due to {e}, will not forward events"
)
logger.debug(
"Failed to set up forwarding, will not forward events", exc_info=e
)
return lite
else:
raise Exception(
f"Failed to find a registered forwarding sink for type {lite_local_config.forward_to.type}. Valid values are {[k for k in sink_registry.mapping.keys()]}"
)
else:
return lite | [
19,
6100,
8609
] |
def METHOD_NAME(plugin):
plugins.manager.install(plugin)
assert plugins.manager.get_active_plugins() == {}
plugins.manager.activate(plugin)
assert plugins.manager.get_active_plugins() == {plugin.get_name(): plugin}
plugins.manager.deactivate(plugin)
assert plugins.manager.get_active_plugins() == {} | [
9,
19,
923,
1294
] |
def METHOD_NAME(self, recv):
if isinstance(recv, Select):
recv._check_no_loop(self)
self._receivers.append(recv)
if recv.notify is not None:
raise Error(self.owned_msg)
recv.notify = self._put
# Avoid race by polling once after installation.
if not recv.empty():
self._put(recv) | [
238
] |
def METHOD_NAME(self):
new_array = arange(0, 40, 2).reshape(10, 2)
with self.assertTraitChanges(
self.data_source, "data_changed", count=1
):
self.data_source.set_data(new_array)
assert_array_equal(new_array, self.data_source._data)
self.assertEqual(self.data_source.get_bounds(), (0, 38))
self.assertEqual(self.data_source.sort_order, "ascending") | [
9,
0,
365
] |
def METHOD_NAME(self):
tools.get(**self.conan_data["sources"][self.version],
destination=self._source_subfolder, strip_root=True) | [
1458
] |
def METHOD_NAME(self):
self.n_train = 100
self.n_test = 100
self.n_features = 80
self.contamination = 0.1
self.roc_floor = 0.8
# Generate sample data
self.X_train, self.X_test, self.y_train, self.y_test = generate_data(
n_train=self.n_train, n_test=self.n_test,
n_features=self.n_features, contamination=self.contamination,
random_state=42)
self.clf = RGraph(n_nonzero=100, transition_steps=20, gamma=50, blocksize_test_data=20,
tau=1, preprocessing=True, active_support=False, gamma_nz=False,
maxiter_lasso=100, contamination=self.contamination,
algorithm='lasso_lars', verbose=0)
self.clf.fit(self.X_train) | [
0,
1
] |
def METHOD_NAME(self, value):
if not value:
return []
# value may come in as a string.
# try to parse and
# ultimately return an empty list if nothing remains -- this will
# eventually raise an `OAuthValidationError` in `validate` where
# it should be anyways.
if not isinstance(value, (list, tuple)):
value = value.split(' ')
# Split values into list
return ' '.join([smart_str(val) for val in value]).split(' ') | [
24,
440
] |
def METHOD_NAME(*, db_session, organization_in: OrganizationCreate) -> Organization:
"""Creates an organization."""
organization = Organization(
**organization_in.dict(exclude={"banner_color"}),
)
if organization_in.banner_color:
organization.banner_color = organization_in.banner_color.as_hex()
# we let the new schema session create the organization
organization = init_schema(engine=engine, organization=organization)
return organization | [
129
] |
def METHOD_NAME(self):
self.assertEqual(_ew.encode('foo', 'utf-8', 'b'), '=?utf-8?b?Zm9v?=') | [
9,
1484
] |
def METHOD_NAME(self, samples):
return Dataset(samples, lambda data: {INPUT_NAME: data}) | [
19,
126
] |
def METHOD_NAME(self) -> str:
"""
The resource identifier.
"""
return pulumi.get(self, "id") | [
147
] |
def METHOD_NAME(self):
# Changing the name of the group shouldn't affect anything
# get a group assigned to the user - should be group 0 or 1
old_group = RandomUserPartitionScheme.get_group_for_user(self.MOCK_COURSE_ID, self.user, self.user_partition)
assert old_group.id in [0, 1]
# Change the group names
groups = [Group(0, 'Group 0'), Group(1, 'Group 1')]
user_partition = UserPartition(
self.TEST_ID,
'Test Partition',
'for testing purposes',
groups,
scheme=RandomUserPartitionScheme
)
# Now, get a new group using the same call
new_group = RandomUserPartitionScheme.get_group_for_user(self.MOCK_COURSE_ID, self.user, user_partition)
assert old_group.id == new_group.id | [
9,
194,
846,
156
] |
def METHOD_NAME(self):
assert path.is_path_inside_base_dir("/æ/øå", "/æ") | [
9,
1646,
3005
] |
def METHOD_NAME(self):
self.assertEqual(self.basicSensor.state_class, self.basicSensorStateClass) | [
9,
756,
6234,
551,
2
] |
def METHOD_NAME(self, message):
if self.film_mode:
hardwareModule.runHardwareTask(self,
message,
lambda : self.setExtControl(True))
self.film_mode = False | [
631,
10327
] |
def METHOD_NAME(df, x_range, y_range, height, width):
cvs = ds.Canvas(x_range=x_range, y_range=y_range,
plot_height=height, plot_width=width)
agg = cvs.points(df, 'x', 'y')
return agg | [
248,
16902,
717
] |
def METHOD_NAME(self):
if self._first_result is not None:
yield self._first_result
for next_result in self._parsed_results:
processed_result = self._process_record(next_result)
if processed_result is not None:
yield processed_result | [
84,
919
] |
def METHOD_NAME(p: DocParagraph):
p.set_attr("settings", "")
return p | [
93,
817
] |
def METHOD_NAME(self):
for i in range(10):
job = self.project.open_job(dict(a=i, b=i * 2))
job.doc.c = float(i)
job.doc.d = float(i * 3)
# Including no keys should return an empty DataFrame
df = self.project.to_dataframe(usecols=[])
assert len(df.columns) == 0
assert len(df) == 0
# Excluding all keys should return an empty DataFrame
def usecols(column):
return column not in ["sp.a", "sp.b", "doc.c", "doc.d"]
df = self.project.to_dataframe(usecols=usecols)
assert len(df.columns) == 0
assert len(df) == 0
# Include one state point column
df = self.project.to_dataframe(usecols=["sp.a"])
assert "sp.a" in df.columns
assert len(df.columns) == 1
assert len(df) == len(self.project)
# Exclude one state point column
def usecols(column):
return column != "sp.b"
df = self.project.to_dataframe(usecols=usecols)
assert "sp.a" in df.columns
assert "sp.b" not in df.columns
assert "doc.c" in df.columns
assert "doc.d" in df.columns
assert len(df.columns) == 3
assert len(df) == len(self.project)
# Include one document column
df = self.project.to_dataframe(usecols=["doc.c"])
assert "doc.c" in df.columns
assert len(df.columns) == 1
assert len(df) == len(self.project)
# Exclude one document column
def usecols(column):
return column != "doc.d"
df = self.project.to_dataframe(usecols=usecols)
assert "sp.a" in df.columns
assert "sp.b" in df.columns
assert "doc.c" in df.columns
assert "doc.d" not in df.columns
assert len(df.columns) == 3
assert len(df) == len(self.project) | [
9,
-1
] |
def METHOD_NAME(
symbol: str, limit: int = 1000, side: Optional[Any] = None
) -> pd.DataFrame:
"""Get last N trades for chosen trading pair. [Source: Coinbase]
Parameters
----------
symbol: str
Trading pair of coins on Coinbase e.g ETH-USDT or UNI-ETH
limit: int
Last `limit` of trades. Maximum is 1000.
side: str
You can chose either sell or buy side. If side is not set then all trades will be displayed.
Returns
-------
pd.DataFrame
Last N trades for chosen trading pairs.
"""
params = {"limit": limit}
if side is not None and side in ["buy", "sell"]:
params["side"] = side
product_id = check_validity_of_product(symbol)
product = make_coinbase_request(f"/products/{product_id}/trades", params=params)
return pd.DataFrame(product)[["time", "price", "size", "side"]] | [
19,
9225
] |
def METHOD_NAME(self) -> JSON:
return self._resource | [
191
] |
def METHOD_NAME(azure_traffic_collector_name: Optional[pulumi.Input[str]] = None,
collector_policy_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetCollectorPolicyResult]:
"""
Gets the collector policy in a specified Traffic Collector
Azure REST API version: 2022-11-01.
:param str azure_traffic_collector_name: Azure Traffic Collector name
:param str collector_policy_name: Collector Policy Name
:param str resource_group_name: The name of the resource group.
"""
... | [
19,
4523,
54,
146
] |
f METHOD_NAME(self): | [
9,
11138
] |
def METHOD_NAME(self, tokens):
"""Converts a sequence of tokens (string) in a single string."""
current_sub_tokens = []
out_string = ""
for token in tokens:
# make sure that special tokens are not decoded using sentencepiece model
if token in self.all_special_tokens:
out_string += self.sp_model.decode(current_sub_tokens) + token
current_sub_tokens = []
else:
current_sub_tokens.append(token)
out_string += self.sp_model.decode(current_sub_tokens)
return out_string.strip() | [
197,
1735,
24,
144
] |
def METHOD_NAME(self):
return self.view() | [
19,
3029,
1179
] |
def METHOD_NAME():
pg = PostgresHook(default_statement_timeout=10)
pg.run_statement_timeout()
actual = pg.run(sql="show statement_timeout;", statement_timeout=0)
assert actual == ("10s",) | [
9,
22,
925,
659
] |
def METHOD_NAME(self):
cmd = "df -BM -T -P"
return cmd | [
56,
462,
144
] |
def METHOD_NAME(self):
right_now = datetime.datetime.now()
# check if the current time is between the trigger time and the trigger time plus leeway
if (self.trigger_time < right_now <
self.trigger_time + datetime.timedelta(minutes=self.leeway)):
# time is currently after the trigger time, but within the leeway
self.step_trigger()
return True
else:
return False | [
250,
2117
] |
def METHOD_NAME(text):
return re.sub('Decoder', '<a href="/Decoder.html" style="color: inherit">Decoder</a>', text) | [
9523,
3642,
2
] |
def METHOD_NAME(self):
if stringVarName.value:
ageSDL = PtGetAgeSDL()
ageSDL.setFlags(stringVarName.value,1,1)
ageSDL.sendToClients(stringVarName.value)
try:
self.enabledStateList = stringShowStates.value.split(",")
for i in range(len(self.enabledStateList)):
self.enabledStateList[i] = int(self.enabledStateList[i].strip())
except:
PtDebugPrint("ERROR: ercaSDLIntShowHide.OnFirstUpdate():\tERROR: couldn't process start state list")
pass
else:
PtDebugPrint("ERROR: ercaSDLIntShowHide.OnFirstUpdate():\tERROR: missing SDL var name")
pass
if AgeStartedIn == PtGetAgeName():
ageSDL = PtGetAgeSDL()
if stringVarName.value:
#PtDebugPrint("Setting notify on %s..." % stringVarName.value)
ageSDL.setNotify(self.key,stringVarName.value,0.0)
try:
SDLvalue = ageSDL[stringVarName.value][0]
except:
PtDebugPrint("ERROR: ercaSDLIntShowHide.OnServerInitComplete():\tERROR: age sdl read failed, SDLvalue = 0 by default. stringVarName = %s" % (stringVarName.value))
SDLvalue = 0
try:
if SDLvalue in self.enabledStateList:
PtDebugPrint("DEBUG: ercaSDLIntShowHide.OnServerInitComplete: Attempting to disable drawing and collision on %s..." % self.sceneobject.getName())
self.sceneobject.draw.disable()
self.sceneobject.physics.suppress(True)
else:
PtDebugPrint("DEBUG: ercaSDLIntShowHide.OnServerInitComplete: Attempting to enable drawing and collision on %s..." % self.sceneobject.getName())
self.sceneobject.draw.enable()
self.sceneobject.physics.suppress(False)
except:
PtDebugPrint("ERROR: ercaSDLIntShowHide.OnServerInitComplete():\tERROR enabling/disabling object %s" % self.sceneobject.getName())
pass
else:
PtDebugPrint("ERROR: ercaSDLIntShowHide.OnServerInitComplete():\tERROR: missing SDL var name")
pass | [
69,
163,
176,
676
] |
def METHOD_NAME(func):
"""Decorator to add a default footprint to morphology functions.
Parameters
----------
func : function
A morphology function such as erosion, dilation, opening, closing,
white_tophat, or black_tophat.
Returns
-------
func_out : function
The function, using a default footprint of same dimension
as the input image with connectivity 1.
"""
@functools.wraps(func)
def func_out(image, footprint=None, *args, **kwargs):
if footprint is None:
footprint = ndi.generate_binary_structure(image.ndim, 1)
return func(image, footprint=footprint, *args, **kwargs)
return func_out | [
235,
9241
] |
def METHOD_NAME(input_dir):
"""Creates and returns function serving unlabelled data for scoring.
Args:
input_dir: string, path to input data.
Returns:
Serving function.
"""
raw_metadata = metadata_io.read_metadata(
posixpath.join(input_dir, constants.PATH_INPUT_SCHEMA))
transform_fn_path = posixpath.join(
input_dir, constants.PATH_INPUT_TRANSFORMATION, 'transform_fn')
return input_fn_maker.build_default_transforming_serving_input_receiver_fn(
raw_metadata=raw_metadata,
transform_savedmodel_dir=transform_fn_path,
exclude_raw_keys=[constants.LABEL_COLUMN],
include_raw_keys=constants.FEATURE_COLUMNS + [constants.KEY_COLUMN]) | [
19,
6565,
362,
667
] |
def METHOD_NAME(self):
self.assertEqual(
self.subject.supported_features,
ClimateEntityFeature.TARGET_TEMPERATURE,
) | [
9,
616,
2247
] |
def METHOD_NAME():
"""Create the DATASET_MAP from recipe datasets to ESGF dataset names.
Run `python -m esmvalcore.esgf.facets` to print an up to date map.
"""
cfg = get_esgf_config()
search_args = dict(cfg["search_connection"])
url = search_args.pop("urls")[0]
connection = pyesgf.search.SearchConnection(url=url, **search_args)
dataset_map = {}
indices = {
'CMIP3': 2,
'CMIP5': 3,
'CMIP6': 3,
'CORDEX': 7,
'obs4MIPs': 2,
}
for project in FACETS:
dataset_map[project] = {}
dataset_key = FACETS[project]['dataset']
ctx = connection.new_context(
project=project,
facets=[dataset_key],
fields=['id'],
latest=True,
)
available_datasets = sorted(ctx.facet_counts[dataset_key])
print(f"The following datasets are available for project {project}:")
for dataset in available_datasets:
print(dataset)
# Figure out the ESGF name of the requested dataset
n_available = len(available_datasets)
for i, dataset in enumerate(available_datasets, 1):
print(f"Looking for dataset name of facet name"
f" {dataset} ({i} of {n_available})")
query = {dataset_key: dataset}
dataset_result = next(iter(ctx.search(batch_size=1, **query)))
print(f"Dataset id: {dataset_result.dataset_id}")
dataset_id = dataset_result.dataset_id
if dataset not in dataset_id:
idx = indices[project]
dataset_alias = dataset_id.split('.')[idx]
print(f"Found dataset name '{dataset_alias}'"
f" for facet '{dataset}',")
dataset_map[project][dataset_alias] = dataset
return dataset_map | [
129,
126,
422
] |
def METHOD_NAME(self):
b = self.pt.shared_arrays['a'].array @ self.fit
np.savez_compressed('b.npz', b=b) | [
278,
1484
] |
def METHOD_NAME(toc):
if type(toc) == type(''):
tracklist = []
for i in range(2, len(toc), 4):
tracklist.append((None,
(int(toc[i:i+2]),
int(toc[i+2:i+4]))))
else:
tracklist = toc
ntracks = len(tracklist)
hash = _dbid((ntracks >> 4) & 0xF) + _dbid(ntracks & 0xF)
if ntracks <= _DB_ID_NTRACKS:
nidtracks = ntracks
else:
nidtracks = _DB_ID_NTRACKS - 1
min = 0
sec = 0
for track in tracklist:
start, length = track
min = min + length[0]
sec = sec + length[1]
min = min + sec / 60
sec = sec % 60
hash = hash + _dbid(min) + _dbid(sec)
for i in range(nidtracks):
start, length = tracklist[i]
hash = hash + _dbid(length[0]) + _dbid(length[1])
return hash | [
12475
] |
def METHOD_NAME(elem: object) -> Dataset:
"""Check that `elem` is a :class:`~pydicom.dataset.Dataset` instance."""
if not isinstance(elem, Dataset):
raise TypeError("Sequence contents must be Dataset instances.")
return elem | [
187,
126
] |
def METHOD_NAME(self) -> int:
"""
The status returned by the Github API
"""
return self.__status | [
452
] |
def METHOD_NAME(params, f, matching_f):
'''params will spread using the matching function (matching_f)
on the lowest level applies f (function)'''
is_list = [isinstance(l, (list, tuple)) for l in params]
if any(is_list):
res = []
if not all(is_list):
l_temp = []
for l in params:
if not isinstance(l, (list, tuple)):
l_temp.append([l])
else:
l_temp.append(l)
params = l_temp
params = matching_f(params)
for z in zip(*params):
res.append(METHOD_NAME(z,f, matching_f))
return res
else:
return f(params) | [
4541,
474,
-1
] |
def METHOD_NAME(self, method):
"""
Teardown method
"""
try:
self.client.admin_drop_role("usr-sys-admin-test")
time.sleep(1)
except e.InvalidRole:
pass
self.client.close() | [
1843,
103
] |
def METHOD_NAME():
scores = get_label_quality_scores(labels=labels, predictions=predictions)
assert labels.shape == scores.shape
assert isinstance(scores, np.ndarray) | [
9,
146,
555,
44
] |
def METHOD_NAME():
assert False == parse_boolean(None)
assert False == parse_boolean('False')
assert True == parse_boolean('true')
assert True == parse_boolean('TrUE') | [
9,
19,
201
] |
def METHOD_NAME(document: JSON) -> JSON:
"""Utility function to return ES mappings by querying the Template
database against a given index.
Args:
document : One of the values of ES_INTERNAL_INDEX_NAMES (JSON)
Returns:
JSON containing whitelisted keys of the index and corresponding
values.
"""
template = Template.find(document["index"])
# Only keep the whitelisted fields
return {
"properties": {
key: value
for key, value in template.mappings["properties"].items()
if key in document["whitelist"]
}
} | [
19,
1844
] |
def METHOD_NAME(self, obj):
return '<a href="%s" title="%s">%s</a>' % (
obj.get_absolute_url(),
obj.title,
obj.slug
) | [
548
] |
def METHOD_NAME(pipeline_response):
deserialized = self._deserialize("WorkspaceListManagementGroupsResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return None, iter(list_of_elem) | [
297,
365
] |
def METHOD_NAME(self):
bad_multipolygon = geojson.MultiPolygon(
[[(2.38, 57.322), (23.194, -20.28), (-120.43, 19.15), (2.38)]]
)
bad_feature = geojson.Feature(geometry=bad_multipolygon)
bad_feature_collection = geojson.FeatureCollection([bad_feature])
# Act / Assert
with self.assertRaises(InvalidGeoJson):
# Only geometries of type MultiPolygon are valid
GridService.merge_to_multi_polygon(
geojson.dumps(bad_feature_collection), dissolve=True
) | [
9,
3438,
129,
10513,
41,
532,
8344
] |
def METHOD_NAME(inputs):
m = polynomial.SIP
m = m((21, 23), 2, 3)
m1 = loads(dumps(m))
assert_allclose(m(*inputs), m1(*inputs)) | [
9,
1385,
-1
] |
def METHOD_NAME():
with pytest.raises(ValueError, match="stat_img not to be None"):
threshold_stats_img(None, None, alpha=0.05, height_control="fdr")
with pytest.raises(ValueError, match="stat_img not to be None"):
threshold_stats_img(
None, None, alpha=0.05, height_control="bonferroni"
)
with pytest.raises(ValueError, match="height control should be one of"):
threshold_stats_img(None, None, alpha=0.05, height_control="plop") | [
9,
853,
577,
2029,
1096
] |
def METHOD_NAME(self, message):
"""
Send TCP message as JSON string if the socket is connected.
"""
if self.isConnected():
message_str = message.toJSON() + "\n"
self.socket.write(message_str.encode(self.encoding))
self.socket.flush()
if self.verbose:
print("Sent: \n" + str(message))
else:
print(self.server_name + " socket not connected. \nDid not send:" )
message.setError(True, "Communication Error: " + self.server_name + " socket not connected")
print(message)
self.messageReceived.emit(message) # Return message with error | [
353,
277
] |
def METHOD_NAME(self):
for x in ["INCAR", "POSCAR", "POTCAR", "KPOINTS", "POTCAR.spec"]:
f = Path(self.scratch_dir) / x
if f.exists():
f.unlink() | [
531,
481
] |
def METHOD_NAME(self):
client = APIClient()
client.force_authenticate(user=self.superuser)
response = client.put(
"/api/v2/courses/2/",
{
"code": "123456",
"name": "test course",
"course_url": "Course-Url",
"instance_name": "Fall 2011 day 1 fixed",
"url": "T-00.1000_d0-fixed",
"language": "en",
"starting_time": "2022-01-01T12:00",
"ending_time": "2022-05-31T12:00",
"visible_to_students": False,
"configure_url": "https://grader.cs.aalto.fi/test/url/",
"teachers": [
"staff",
"newteacher"
]
},
format='json'
)
data = response.data
self.assertEqual(data['id'], 2)
course = CourseInstance.objects.get(id=2)
self.assertEqual(course.instance_name, "Fall 2011 day 1 fixed")
self.assertEqual(course.url, "T-00.1000_d0-fixed")
t = map(lambda x: x.user.username, course.teachers)
self.assertIn('staff', t)
self.assertIn('newteacher', t) | [
9,
1276,
1122
] |
def METHOD_NAME(list_of_labels):
generator = LabelGenerator(list_of_labels)
label_collection = list(generator)
assert len(label_collection) == len(list_of_labels)
assert [x for x in label_collection] == list_of_labels | [
9,
1719
] |
def METHOD_NAME(value):
return False | [
962,
1479
] |
def METHOD_NAME():
def wrapper(f, args, kwargs):
return f(*args, **kwargs)
def g():
yield 0
for _ in range(1, 10):
n = yield _
assert _ == n
return
wrap(g, wrapper)
gen = g()
n = next(gen)
channel = [n]
try:
while True:
n = gen.send(n)
channel.append(n)
except StopIteration:
pass
assert list(range(10)) == channel | [
9,
503,
1443,
353
] |
def METHOD_NAME(self):
# Cubes on similar but different meshes -- should *not* combine.
mesh1 = sample_mesh()
mesh2 = sample_mesh(n_edges=0)
self.assertNotEqual(mesh1, mesh2)
cube1 = sample_mesh_cube(mesh=mesh1)
cube2 = sample_mesh_cube(mesh=mesh2)
msg = "Mesh coordinate.* does not match"
with self.assertRaisesRegex(ValueError, msg):
cube1 + cube2 | [
9,
180,
15266,
-1,
1949
] |
def METHOD_NAME(self, doms, stats=0, flags=0):
return [
(dom, {
'vmid': dom.UUIDString()
})
for dom in doms
if dom.UUIDString() in self.vms
] | [
1674,
245,
19,
577
] |
def METHOD_NAME():
verify_batch_matmul(128, 64, 512, 512, rocblas, transa=False, transb=False)
verify_batch_matmul(128, 64, 512, 512, rocblas, transa=False, transb=True)
verify_batch_matmul(128, 64, 512, 512, rocblas, transa=True, transb=False)
verify_batch_matmul(128, 64, 512, 512, rocblas, transa=True, transb=True)
verify_batch_matmul(128, 512, 512, 64, rocblas, transa=False, transb=False)
verify_batch_matmul(128, 512, 512, 64, rocblas, transa=False, transb=True)
verify_batch_matmul(128, 512, 512, 64, rocblas, transa=True, transb=False)
verify_batch_matmul(128, 512, 512, 64, rocblas, transa=True, transb=True)
verify_batch_matmul(128, 512, 64, 512, rocblas, transa=False, transb=False)
verify_batch_matmul(128, 512, 64, 512, rocblas, transa=False, transb=True)
verify_batch_matmul(128, 512, 64, 512, rocblas, transa=True, transb=False)
verify_batch_matmul(128, 512, 64, 512, rocblas, transa=True, transb=True)
verify_batch_matmul(128, 64, 128, 128, rocblas, transa=False, transb=False)
verify_batch_matmul(128, 64, 128, 128, rocblas, transa=False, transb=True)
verify_batch_matmul(128, 64, 128, 128, rocblas, transa=True, transb=False)
verify_batch_matmul(128, 64, 128, 128, rocblas, transa=True, transb=True)
verify_batch_matmul(128, 128, 128, 64, rocblas, transa=False, transb=False)
verify_batch_matmul(128, 128, 128, 64, rocblas, transa=False, transb=True)
verify_batch_matmul(128, 128, 128, 64, rocblas, transa=True, transb=False)
verify_batch_matmul(128, 128, 128, 64, rocblas, transa=True, transb=True) | [
9,
2277,
1496
] |
def METHOD_NAME(self):
data = {"a": [None, None, None, None, None, None, "2019/07/25T09:00:00"]}
tbl = Table(data)
assert tbl.schema() == {"a": datetime} | [
9,
410,
1852,
1187,
884
] |
def METHOD_NAME(self, object, event, method):
for o, e, m, g, t in self.Observations:
if object == o and event == e and method == m:
o.RemoveObserver(t)
self.Observations.remove([o, e, m, g, t]) | [
188,
15295
] |
def METHOD_NAME(self):
uniq_mixin_fields = UniqueFieldMixin._get_fields(PostWithUniqField)
self.assertIsInstance(uniq_mixin_fields, list)
for field in uniq_mixin_fields:
self.assertIsInstance(field, tuple) | [
9,
19,
342,
610,
245,
47,
-1
] |
def METHOD_NAME(self, name):
return getattr(self, normalize(name)) | [
19,
309
] |
def METHOD_NAME(self):
"""Return BGP port."""
return self._bgp_val("port") | [
2260,
237
] |
def METHOD_NAME(self):
from neural_compressor import set_random_seed
set_random_seed(9527)
if args.tune:
from neural_compressor import quantization
from neural_compressor.config import PostTrainingQuantConfig
from neural_compressor.utils.create_obj_from_config import create_dataloader
calib_dataloader_args = {
'batch_size': 10,
'dataset': {"ImageRecord": {'root':args.dataset_location}},
'transform': {'BilinearImagenet':
{'height': 224, 'width': 224}},
'filter': None
}
calib_dataloader = create_dataloader('tensorflow', calib_dataloader_args)
conf = PostTrainingQuantConfig(calibration_sampling_size=[20, 50])
q_model = quantization.fit(model=args.input_graph, conf=conf,
calib_dataloader=calib_dataloader, eval_func=evaluate)
q_model.save(args.output_graph)
if args.benchmark:
from neural_compressor.benchmark import fit
from neural_compressor.config import BenchmarkConfig
if args.mode == 'performance':
conf = BenchmarkConfig(cores_per_instance=4, num_of_instance=1)
from neural_compressor.utils.create_obj_from_config import create_dataloader
dataloader_args = {
'batch_size': args.batch_size,
'dataset': {"ImageRecord": {'root': args.dataset_location}},
'transform': {'BilinearImagenet': {'height': 224, 'width': 224}},
'filter': None
}
eval_dataloader = create_dataloader('tensorflow', dataloader_args)
fit(model=args.input_graph, conf=conf, b_dataloader=eval_dataloader)
else:
from neural_compressor.model import Model
model = Model(args.input_graph).model
accuracy = evaluate(model)
print('Batch size = %d' % args.batch_size)
print("Accuracy: %.5f" % accuracy) | [
22
] |
def METHOD_NAME(self) -> str:
"""Generate a random CID.
:return: CID code.
:Example:
7452
"""
return f"{self.random.randint(1, 9999):04d}" | [
5149
] |
def METHOD_NAME(client, auth_creds, salt_auto_account):
low = {"client": "async", "fun": "key.list_all", "print_event": False, **auth_creds}
ret = client.cmd_sync(low)
assert ret
assert "data" in ret
data = ret["data"]
assert data["success"] is True
assert data["user"] == salt_auto_account.username
assert data["fun"] == "wheel.key.list_all"
assert data["return"]
assert data["return"]["local"] == ["master.pem", "master.pub"] | [
9,
1660,
164
] |
def METHOD_NAME():
para = OpenRectangularParallelepiped(size=(5.0, 1.0, 3.0), center=(0, 0, -1.5),
resolution=(5, 2, 3),
translational_symmetry=False, name="test")
assert para.name == "test"
assert isinstance(para.mesh, Mesh)
assert para.mesh.name == "test_mesh"
assert para.mesh.nb_faces == 42
assert np.isclose(np.abs(para.mesh.vertices[:, 0]).max(), 2.5)
assert np.isclose(np.abs(para.mesh.vertices[:, 1]).max(), 0.5)
assert np.all(para.mesh.vertices[:, 2] <= 0.0)
assert np.all(para.mesh.vertices[:, 2] >= -3.0)
clever_para = OpenRectangularParallelepiped(size=(5.0, 1.0, 3.0), center=(0, 0, -1.5),
resolution=(5, 2, 3),
translational_symmetry=True, name="clever_test")
assert clever_para.mesh.nb_faces == 42
assert isinstance(clever_para.mesh, CollectionOfMeshes)
assert any(isinstance(submesh, TranslationalSymmetricMesh) for submesh in clever_para.mesh) | [
9,
1452,
9658,
552
] |
def METHOD_NAME(self, nii: nib.Nifti1Image) -> nib.Nifti1Image:
"""
Checks the NIfTI header datatype and converts the data to the matching
numpy dtype.
Parameters
----------
nii : nib.Nifti1Image
Input image
Returns
-------
nib.Nifti1Image
Converted input image
"""
header = nii.header.copy()
datatype = int(header["datatype"])
self._warn_suspicious_dtype(datatype)
try:
dtype = NUMPY_DTYPE[datatype]
except KeyError:
return nii
else:
header.set_data_dtype(dtype)
converted = np.asanyarray(nii.dataobj, dtype=dtype)
return nib.Nifti1Image(converted, nii.affine, header) | [
250,
1249
] |
def METHOD_NAME(
self,
request: HttpRequest,
**kwargs: Any
) -> Awaitable[AsyncHttpResponse]:
"""Runs the network request through the client's chained policies.
>>> from azure.core.rest import HttpRequest
>>> request = HttpRequest("GET", "https://www.example.org/")
<HttpRequest [GET], url: 'https://www.example.org/'>
>>> response = await client.send_request(request)
<AsyncHttpResponse: 200 OK>
For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart
:param request: The network request you want to make. Required.
:type request: ~azure.core.rest.HttpRequest
:keyword bool stream: Whether the response payload will be streamed. Defaults to False.
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.rest.AsyncHttpResponse
"""
request_copy = deepcopy(request)
path_format_arguments = {
"endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
}
request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments)
return self._client.METHOD_NAME(request_copy, **kwargs) | [
353,
377
] |
def METHOD_NAME(action_obj, sagemaker_session):
tag = {"Key": "foo", "Value": "bar"}
action_obj.set_tag(tag)
while True:
actual_tags = sagemaker_session.sagemaker_client.list_tags(
ResourceArn=action_obj.action_arn
)["Tags"]
if actual_tags:
break
time.sleep(5)
# When sagemaker-client-config endpoint-url is passed as argument to hit some endpoints,
# length of actual tags will be greater than 1
assert len(actual_tags) > 0
assert actual_tags[0] == tag | [
9,
82
] |
def METHOD_NAME(self, _exaile):
self.__alarm_clock = AlarmClock() | [
1317
] |
def METHOD_NAME(policy, optimizer, gamma):
"""
Training code. Calculates actor and critic loss and performs backprop.
"""
R = 0
saved_actions = policy.saved_actions
policy_losses = [] # list to save actor (policy) loss
value_losses = [] # list to save critic (value) loss
returns = deque() # list to save the true values
# calculate the true value using rewards returned from the environment
for r in policy.rewards[::-1]:
# calculate the discounted value
R = r + gamma * R
returns.appendleft(R)
returns = torch.tensor(returns)
returns = (returns - returns.mean()) / (returns.std() + eps)
for (log_prob, value), R in zip(saved_actions, returns):
advantage = R - value.item()
# calculate actor (policy) loss
policy_losses.append(-log_prob * advantage)
# calculate critic (value) loss using L1 smooth loss
value_losses.append(F.smooth_l1_loss(value, torch.tensor([R])))
# reset gradients
optimizer.zero_grad()
# sum up all the values of policy_losses and value_losses
loss = torch.stack(policy_losses).sum() + torch.stack(value_losses).sum()
# perform backprop
loss.backward()
optimizer.step()
# reset rewards and action buffer
del policy.rewards[:]
del policy.saved_actions[:] | [
1239,
3188
] |
def METHOD_NAME(driver, pages):
pages.load("javascriptPage.html")
shown = driver.find_element(by=By.ID, value="visibleSubElement")
assert shown.is_displayed() is True | [
9,
427,
29,
1532,
947,
2999,
217
] |
def METHOD_NAME():
output = json.loads(r1.vtysh_cmd("show bgp ipv4 unicast json detail"))
expected = {
"routes": {
"192.168.100.101/32": {
"paths": [
{
"valid": True,
"atomicAggregate": None,
"community": None,
}
],
},
"192.168.100.102/32": {
"paths": [
{
"valid": True,
"originatorId": None,
"community": None,
}
],
},
}
}
return topotest.json_cmp(output, expected) | [
2260,
250,
217,
177,
11920
] |
def METHOD_NAME(self):
self.worldNP = render.attach_new_node('World')
# World
self.debugNP = self.worldNP.attach_new_node(BulletDebugNode('Debug'))
self.debugNP.show()
self.debugNP.node().show_wireframe(True)
self.debugNP.node().show_constraints(True)
self.debugNP.node().show_bounding_boxes(False)
self.debugNP.node().show_normals(False)
self.world = BulletWorld()
self.world.set_gravity((0, 0, -9.81))
self.world.set_debug_node(self.debugNP.node())
# Box A
shape = BulletBoxShape((0.5, 0.5, 0.5))
bodyA = BulletRigidBodyNode('Box A')
bodyNP = self.worldNP.attach_new_node(bodyA)
bodyNP.node().add_shape(shape)
bodyNP.set_collide_mask(BitMask32.all_on())
bodyNP.set_pos(-2, 0, 4)
visNP = loader.load_model('models/box.egg')
visNP.clear_model_nodes()
visNP.reparent_to(bodyNP)
self.world.attach(bodyA)
# Box B
shape = BulletBoxShape((0.5, 0.5, 0.5))
bodyB = BulletRigidBodyNode('Box B')
bodyNP = self.worldNP.attach_new_node(bodyB)
bodyNP.node().add_shape(shape)
bodyNP.node().set_mass(1.0)
bodyNP.node().set_deactivation_enabled(False)
bodyNP.set_collide_mask(BitMask32.all_on())
bodyNP.set_pos(0, 0, 0)
visNP = loader.load_model('models/box.egg')
visNP.clear_model_nodes()
visNP.reparent_to(bodyNP)
self.world.attach(bodyB)
# Cone
frameA = TransformState.make_pos_hpr((0, 0, -2), (0, 0, 90))
frameB = TransformState.make_pos_hpr((-5, 0, 0), (0, 0, 0))
cone = BulletConeTwistConstraint(bodyA, bodyB, frameA, frameB)
cone.set_debug_draw_size(2.0)
cone.set_limit(30, 45, 170, softness=1.0, bias=0.3, relaxation=8.0)
self.world.attach(cone) | [
102
] |
def METHOD_NAME(sk: PrivateKey, path: List[int]) -> PrivateKey:
for index in path:
sk = AugSchemeMPL.derive_child_sk_unhardened(sk, index)
return sk | [
229,
157,
17514
] |
def METHOD_NAME(self) -> str:
return "engrams" | [
19,
156
] |
def METHOD_NAME(
input_features, time_steps, eps=1e-5, momentum=0.1, affine=True
):
"""
Generate a torch.nn.ModuleList of 1D Batch Normalization Layer with
length time_steps.
Input to this layer is the same as the vanilla torch.nn.BatchNorm1d
layer.
Batch Normalisation Through Time (BNTT) as presented in:
'Revisiting Batch Normalization for Training Low-Latency Deep Spiking
Neural Networks From Scratch'
By Youngeun Kim & Priyadarshini Panda
arXiv preprint arXiv:2010.01729
Original GitHub repo:
https://github.com/Intelligent-Computing-Lab-Yale/
BNTT-Batch-Normalization-Through-Time
Using LIF neuron as the neuron of choice for the math shown below.
Typically, for a single post-synaptic neuron i, we can represent its
membrane potential :math:`U_{i}^{t}` at time-step t as:
.. math::
U_{i}^{t} = λ u_{i}^{t-1} + \\sum_j w_{ij}S_{j}^{t}
where:
* λ - a leak factor which is less than one
* j - the index of the pre-synaptic neuron
* :math:`S_{j}` - the binary spike activation
* :math:`w_{ij}` - the weight of the connection between the pre & \
post neurons.
With Batch Normalization Throught Time, the membrane potential can be
modeled as:
.. math::
U_{i}^{t} = λu_{i}^{t-1} + BNTT_{γ^{t}}
= λu_{i}^{t-1} + γ _{i}^{t} (\\frac{\\sum_j
w_{ij}S_{j}^{t} -
µ_{i}^{t}}{\\sqrt{(σ _{i}^{t})^{2} + ε}})
:param input_features: number of features of the input
:type input_features: int
:param time_steps: number of time-steps of the SNN
:type time_steps: int
:param eps: a value added to the denominator for numerical stability
:type eps: float
:param momentum: the value used for the running_mean and running_var \
computation
:type momentum: float
:param affine: a boolean value that when set to True, the Batch Norm \
layer will have learnable affine parameters
:type affine: bool
Inputs: input_features, time_steps
- **input_features**: same number of features as the input
- **time_steps**: the number of time-steps to unroll in the SNN
Outputs: bntt
- **bntt** of shape `(time_steps)`: toch.nn.ModuleList of \
BatchNorm1d layers for the specified number of time-steps
"""
bntt = nn.ModuleList(
[
nn.BatchNorm1d(
input_features, eps=eps, momentum=momentum, affine=affine
)
for _ in range(time_steps)
]
)
# Disable bias/beta of Batch Norm
for bn in bntt:
bn.bias = None
return bntt | [
2277,
387,
791,
-1
] |
def METHOD_NAME(cls, path) -> "SampleConfig":
file_path = Path(path).resolve()
with safe_open(file_path) as f:
loaded_json = json.load(f)
return cls(loaded_json) | [
280,
763
] |
def METHOD_NAME():
pl = pv.Plotter()
pl.track_mouse_position()
pl.show(auto_close=False)
assert pl.mouse_position is None
x, y = 10, 20
pl.iren._mouse_move(x, y)
assert pl.mouse_position == (x, y)
pl.iren.untrack_mouse_position()
assert "MouseMoveEvent" not in pl.iren._observers.values() | [
9,
3068,
2571,
195
] |
async def METHOD_NAME(
self, interaction: disnake.Interaction, component: disnake.ui.Button | disnake.ui.Select
):
"""
Updates the view such that the passed component is disabled while this context manager is active.
Refreshes the view content after the context exits.
"""
component.disabled = True
await self.refresh_content(interaction)
try:
yield
finally:
component.disabled = False
await self.refresh_content(interaction) | [
193,
1007
] |
def METHOD_NAME(self, payment_mode):
return [("payment_mode_id", "=", payment_mode.id), ("state", "=", "draft")] | [
19,
598,
13,
1674
] |
def METHOD_NAME(self):
"""Testing crystalline structures in the XSF format."""
tmp_file = tempfile.TemporaryFile(mode="w+")
xsf_write_structure(tmp_file, self.mgb2)
xsf_string = \ | [
9,
-1,
77,
1011
] |
def METHOD_NAME(self, data_dict):
"""
Parse unregister request from SPARCS SSO server
:param data_dict: a data dictionary that the server sent
:returns: the user's service id
:raises RuntimeError: raise iff the request is invalid
"""
client_id = data_dict.get("client_id", "")
sid = data_dict.get("sid", "")
timestamp = data_dict.get("timestamp", "")
sign = data_dict.get("sign", "")
if client_id != self.client_id:
raise RuntimeError("INVALID_REQUEST")
elif not self._validate_sign([sid], timestamp, sign):
raise RuntimeError("INVALID_REQUEST")
return sid | [
214,
2468,
377
] |
async def METHOD_NAME(self):
"""Test that duplicate violations are ignored."""
self.csv += self.serious_violation
response = await self.collect(get_request_text=self.csv)
self.assert_measurement(response, value="2", entities=self.expected_entities) | [
9,
1119,
4923
] |
def METHOD_NAME(monkeypatch):
"""Test the parse_paths function"""
monkeypatch.chdir(os.path.dirname(__file__))
cmdline_parser = CmdlineParser()
assert cmdline_parser._parse_paths(__file__) == os.path.abspath(__file__)
values = ["test_resolve_config.py", "test", "nada", __file__]
parsed_values = cmdline_parser._parse_paths(values)
assert parsed_values[0] == os.path.abspath("test_resolve_config.py")
assert parsed_values[1] == "test"
assert parsed_values[2] == "nada"
assert parsed_values[3] == os.path.abspath(__file__) | [
9,
214,
3336
] |
def METHOD_NAME(self, obj):
if callable(obj):
try:
return obj(1)
except:
return None
return json.JSONEncoder.METHOD_NAME(self, obj) | [
235
] |
def METHOD_NAME():
with open(get_test_file_path('pygeoapi-test-config.yml')) as fh:
return yaml_load(fh) | [
200
] |
def METHOD_NAME() -> str:
return "snowflake" | [
19,
4059,
156
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.