text
stringlengths 15
7.82k
| ids
sequencelengths 1
7
|
---|---|
def METHOD_NAME(
graphql_context: WorkspaceRequestContext, snapshot
):
result = execute_dagster_graphql(
graphql_context,
SNAPSHOT_OR_ERROR_QUERY_BY_PIPELINE_NAME,
{
"activePipelineSelector": {
"pipelineName": "jkdjfkdj",
"repositoryName": main_repo_name(),
"repositoryLocationName": main_repo_location_name(),
}
},
)
assert not result.errors
assert result.data
assert result.data["pipelineSnapshotOrError"]["__typename"] == "PipelineNotFoundError"
snapshot.assert_match(pretty_dump(result.data)) | [
9,
1047,
394,
894,
168,
604,
923
] |
def METHOD_NAME(self, func, times=3, delay_sec=10):
"""Attempt to execute a function several times with a delay between attempts."""
for i in range(1, times + 1):
try:
func()
break
except Exception:
if i < times:
self.logger.exception(
f"Exception executing {func.__name__}() on attempt {i} of {times}. "
f"Waiting {delay_sec} seconds and trying again.")
time.sleep(delay_sec)
else:
self.logger.exception(
f"Exception executing {func.__name__}() on attempt {i} of {times}.") | [
3142,
750
] |
def METHOD_NAME():
from mpi4py import MPI
commworld = MPI.COMM_WORLD
rank = commworld.Get_rank()
size = commworld.Get_size()
@dace.program
def mpi4py_isend_irecv(rank: dace.int32, size: dace.int32):
src = (rank - 1) % size
dst = (rank + 1) % size
req = np.empty((2, ), dtype=MPI.Request)
sbuf = np.full((1, ), rank, dtype=np.int32)
req[0] = commworld.Isend(sbuf, dst, tag=0)
rbuf = np.empty((1, ), dtype=np.int32)
req[1] = commworld.Irecv(rbuf, src, tag=0)
MPI.Request.Waitall(req)
return rbuf
sdfg = None
if rank == 0:
sdfg = mpi4py_isend_irecv.to_sdfg(simplify=True)
func = utils.distributed_compile(sdfg, commworld)
val = func(rank=rank, size=size)
ref = mpi4py_isend_irecv.f(rank, size)
assert (val[0] == ref[0]) | [
9,
10820,
-1
] |
def METHOD_NAME(self, rules: None = None) -> None:
foirequest = self.message.request
if rules is None:
rules = Rule.objects.all()
if self.active_only:
rules = rules.filter(is_active=True)
rules = (
rules.filter(
Q(jurisdictions=None) | Q(jurisdictions=foirequest.jurisdiction)
)
.filter(Q(publicbodies=None) | Q(publicbodies=foirequest.public_body))
.filter(
Q(categories=None)
| Q(categories__in=foirequest.public_body.categories.all())
)
.order_by("priority")
)
for rule in rules:
if rule.references_re:
if not rule.references_re.search(foirequest.reference):
continue
yield rule | [
527,
1634
] |
def METHOD_NAME(buffer, offset, length, bit_size, packed_type, decode_strings):
byte_size = 1 << bit_size
arr = []
for i in range(length):
item_offset = offset + (i * byte_size)
arr.append(read_buffer(buffer, item_offset, bit_size, packed_type, decode_strings))
return arr | [
203,
877
] |
def METHOD_NAME(settings):
with mock.patch(
"mach.telemetry.resolve_is_employee",
side_effect=requests.exceptions.RequestException("Unlucky"),
), mock.patch("mach.telemetry.record_telemetry_settings") as record_mock:
initialize_telemetry_setting(None, None, None)
assert record_mock.call_count == 0 | [
9,
15,
1952,
1646,
377,
168
] |
def METHOD_NAME(text, file, cwd):
subprocess.call(['git', 'appraise', 'comment', '-d', '-m', text, '-f', file],
cwd=cwd,
stdout=FNULL,
stderr=subprocess.STDOUT) | [
238,
6821,
1591
] |
def METHOD_NAME():
if authed():
user = Users.query.filter_by(id=session["id"]).first()
# Check if the session is still valid
session_hash = session.get("hash")
if session_hash:
if session_hash != hmac(user.password):
logout_user()
if request.content_type == "application/json":
error = 401
else:
error = redirect(url_for("auth.login", next=request.full_path))
abort(error)
return user
else:
return None | [
19,
1056,
21
] |
def METHOD_NAME():
importset = ImportSet('''
from a1.b1 import c1 as x
from a2.b2 import c2 as x
from a2.b2 import c2 as y
''')
expected = {'x': (Import('from a1.b1 import c1 as x'),
Import('from a2.b2 import c2 as x')),
'y': (Import('from a2.b2 import c2 as y'),)}
assert importset.by_import_as == expected | [
9,
512,
0,
604,
512,
947,
1170
] |
def METHOD_NAME(
self,
**kwargs: Any
) -> AsyncIterable["_models.ResourceProviderOperationList"]:
"""List all the available operations the KubernetesConfiguration resource provider supports.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ResourceProviderOperationList or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.kubernetesconfiguration.v2021_11_01_preview.models.ResourceProviderOperationList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ResourceProviderOperationList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
template_url=self.METHOD_NAME.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_request(
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ResourceProviderOperationList", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
) | [
245
] |
def METHOD_NAME(*, content: bytes) -> str:
if "<!DOCTYPE HTML PUBLIC" not in str(content):
raise TypeError
soup = BeautifulSoup(content, "lxml")
result = soup.find("meta", attrs={"http-equiv": "REFRESH"})
if result:
_, text = result["content"].split(";")
if "http" in text:
url = text[text.lower().find("http") :]
url = unquote(url, encoding="utf-8", errors="replace")
url = url[: url.find("?")]
return str(url)
return "" | [
1094,
1736
] |
def METHOD_NAME(self) -> List[str]:
return self.params.get(self._IMAGE_ID_PARAM_NAME).split() | [
660,
308
] |
def METHOD_NAME(dut): | [
193,
-1
] |
f METHOD_NAME(self, bad_value_type): | [
9,
3534,
99,
44,
45
] |
def METHOD_NAME(self, data):
self._p("%s UNKNOWN-DECL" % data) | [
46,
2197
] |
def METHOD_NAME(fd, n):
msg = UNSIGNED_STRUCT.pack(n)
while msg:
nbytes = os.write(fd, msg)
if nbytes == 0:
raise RuntimeError('should not get here')
msg = msg[nbytes:] | [
77,
1715
] |
async def METHOD_NAME(conn: SAConnection, file_id: SimcoreS3FileID) -> FileMetaDataAtDB:
result = await conn.execute(
query=sa.select(file_meta_data).where(file_meta_data.c.file_id == file_id)
)
if row := await result.first():
return FileMetaDataAtDB.from_orm(row)
raise FileMetaDataNotFoundError(file_id=file_id) | [
19
] |
def METHOD_NAME():
bpy.utils.register_class(SvProjectCurveSurfaceNode) | [
372
] |
f METHOD_NAME(self): | [
9,
616,
2539,
43,
59,
44,
532
] |
def METHOD_NAME(self):
input_dominoes = [(1, 2), (3, 1), (2, 3)]
output_chain = can_chain(input_dominoes)
self.assert_correct_chain(input_dominoes, output_chain) | [
9,
2756,
1532
] |
def METHOD_NAME(oclass, obj, name):
classdict = oclass.__dict__
if isinstance(obj, FunctionType):
if not isinstance(classdict[name], StaticMethodType):
return False
else:
if not isinstance(obj, MethodType):
return False
if obj.im_self is not None:
if (not isinstance(classdict[name], ClassMethodType) or
obj.im_self is not oclass):
return False
else:
if not isinstance(classdict[name], FunctionType):
return False
objname = obj.__name__
if objname.startswith("__") and not objname.endswith("__"):
objname = "_%s%s" % (obj.im_class.__name__, objname)
return objname == name | [
5466
] |
def METHOD_NAME(self):
qs = self.model.objects.approved().filter(product=self.kwargs["product_pk"])
# pylint: disable=attribute-defined-outside-init
self.form = SortReviewsForm(self.request.GET)
if self.request.GET and self.form.is_valid():
sort_by = self.form.cleaned_data["sort_by"]
if sort_by == SortReviewsForm.SORT_BY_RECENCY:
return qs.order_by("-date_created")
return qs.order_by("-score") | [
19,
2386
] |
def METHOD_NAME(node: Node, chainArg="", skipGenesis=True, relaunchAssertMessage="Fail to relaunch"):
isRelaunchSuccess=node.relaunch(chainArg=chainArg, timeout=relaunchTimeout, skipGenesis=skipGenesis)
time.sleep(1) # Give a second to replay or resync if needed
assert isRelaunchSuccess, relaunchAssertMessage
return isRelaunchSuccess | [
8364,
1716
] |
def METHOD_NAME(self):
"""Enables to get outputs of the operator by evaluating it
Returns
--------
outputs : OutputsFieldsContainer
"""
return super().METHOD_NAME | [
141
] |
def METHOD_NAME(router, expected_locator_file):
logger.info("checking zebra locator status")
output = json.loads(router.vtysh_cmd("show segment-routing srv6 locator json"))
expected = open_json_file("{}/{}".format(CWD, expected_locator_file))
return topotest.json_cmp(output, expected) | [
250,
13821,
10957
] |
def METHOD_NAME(xml_content, path):
"""Extract the targetPackage value from the <instrumentation> tag."""
# https://developer.android.com/guide/topics/manifest/manifest-element.html
# xmlns:android is the required namespace in an Android manifest.
tree = ET.ElementTree(ET.fromstring(xml_content))
package_key = "{http://schemas.android.com/apk/res/android}targetPackage"
instrumentation_elems = tree.iterfind(
".//instrumentation[@{0}]".format(package_key))
package_names = set(e.attrib[package_key] for e in instrumentation_elems)
if not package_names:
raise ManifestError("No <instrumentation> tag containing "
"the targetPackage attribute is found in the "
"manifest at %s" % path)
if len(package_names) > 1:
raise ManifestError(
"The <instrumentation> tags in the manifest at %s do not "
"reference the same target package: %s" % (path, list(package_names)))
return package_names.pop() | [
297,
1030,
360,
24,
2933
] |
def METHOD_NAME(*args):
package_dir = get_package_share_directory(PACKAGE_NAME)
ur5e_xacro_path = os.path.join(package_dir, 'resource', 'ur5e_with_gripper.urdf.xacro')
ur5e_description = xacro.process_file(ur5e_xacro_path, mappings={'name': 'UR5eWithGripper'}).toxml()
# Define your URDF robots here
# The name of an URDF robot has to match the name of the robot of the driver node
# You can specify the URDF content to use with robot_description
# In case you have relative paths in your URDF, specify the relative_path_prefix as the directory of your xacro file
spawn_URDF_ur5e = URDFSpawner(
name='UR5e',
robot_description=ur5e_description,
relative_path_prefix=os.path.join(package_dir, 'resource'),
translation='0 0 0.62',
rotation='0 0 1 -1.5708',
)
# ROS control spawners
controller_manager_timeout = ['--controller-manager-timeout', '500']
controller_manager_prefix = 'python.exe' if os.name == 'nt' else ''
ur5e_trajectory_controller_spawner = Node(
package='controller_manager',
executable='spawner',
output='screen',
prefix=controller_manager_prefix,
arguments=['ur_joint_trajectory_controller', '-c', 'ur5e/controller_manager'] + controller_manager_timeout,
)
ur5e_joint_state_broadcaster_spawner = Node(
package='controller_manager',
executable='spawner',
output='screen',
prefix=controller_manager_prefix,
arguments=['ur_joint_state_broadcaster', '-c', 'ur5e/controller_manager'] + controller_manager_timeout,
)
ur5e_spawners = [ur5e_trajectory_controller_spawner, ur5e_joint_state_broadcaster_spawner]
abb_trajectory_controller_spawner = Node(
package='controller_manager',
executable='spawner',
output='screen',
prefix=controller_manager_prefix,
arguments=['abb_joint_trajectory_controller', '-c', 'abb/controller_manager'] + controller_manager_timeout,
)
abb_joint_state_broadcaster_spawner = Node(
package='controller_manager',
executable='spawner',
output='screen',
prefix=controller_manager_prefix,
arguments=['abb_joint_state_broadcaster', '-c', 'abb/controller_manager'] + controller_manager_timeout,
)
abb_spawners = [abb_trajectory_controller_spawner, abb_joint_state_broadcaster_spawner]
# Control nodes
ur5e_controller = Node(
package=PACKAGE_NAME,
executable='ur5e_controller',
namespace='ur5e',
output='screen'
)
abb_controller = Node(
package=PACKAGE_NAME,
executable='abb_controller',
namespace='abb',
output='screen'
)
return [
# Request to spawn the URDF robot
spawn_URDF_ur5e,
# Launch the driver node once the URDF robot is spawned.
# You might include other nodes to start them with the driver node.
launch.actions.RegisterEventHandler(
event_handler=launch.event_handlers.OnProcessIO(
target_action=spawn_URDF_ur5e,
on_stdout=lambda event: get_webots_driver_node(
event, [ur5e_controller, abb_controller] + ur5e_spawners + abb_spawners
),
)
),
] | [
19,
-1,
480
] |
async def METHOD_NAME(pipeline_response):
deserialized = self._deserialize("ResourceSkuCollection", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, AsyncList(list_of_elem) | [
297,
365
] |
def METHOD_NAME(
self,
event: Optional[dict] = None,
point: Optional[int] = None,
side_by_side: bool = False,
force_group: bool = True,
fallback: bool = False,
group: int = -1
) -> bool:
if self.applies_to_context_menu(event):
return self.is_enabled(event, point, side_by_side, force_group, fallback, group)
return True | [
137,
2999
] |
def METHOD_NAME(self) -> None:
"""Invalidate underlying filesystem caches."""
filepath = get_filepath_str(self._filepath, self._protocol)
self._fs.invalidate_cache(filepath) | [
3359,
596
] |
def METHOD_NAME(self, agreement_id: str) -> None:
self._agreement_data[agreement_id].payable = True | [
0,
16374
] |
def METHOD_NAME(data_path):
uniform = fn.random.uniform(range=(0., 1.), shape=2)
resize_uniform = fn.random.uniform(range=(256., 480.))
mirror = fn.random.coin_flip(probability=0.5)
jpegs, _ = fn.readers.file(file_root=data_path)
images = fn.decoders.image(jpegs, output_type=types.RGB)
resized_images = fn.fast_resize_crop_mirror(images, crop=(224, 224), crop_pos_x=uniform[0],
crop_pos_y=uniform[1], mirror=mirror,
resize_shorter=resize_uniform)
output = fn.crop_mirror_normalize(resized_images.gpu(), device='gpu', dtype=types.FLOAT16,
mean=[128., 128., 128.], std=[1., 1., 1.])
return output | [
11667,
1148
] |
def METHOD_NAME(self, contexts: List[str]):
for c in contexts:
assert isinstance(c, str)
_wenet.wenet_add_context(self.d, c) | [
238,
198
] |
def METHOD_NAME(name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
scope_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetPrivateLinkScopedResourceResult]:
"""
Gets a scoped resource in a private link scope.
Azure REST API version: 2020-08-15-preview.
:param str name: The name of the scoped resource object.
:param str resource_group_name: The name of the resource group.
:param str scope_name: The name of the Azure Arc PrivateLinkScope resource.
"""
... | [
19,
547,
548,
3270,
191,
146
] |
def METHOD_NAME(tarball, out_dir, arches, toolchain):
version = utils.GetVersion()
tarroot = 'dart-%s' % version
origtarname = 'dart_%s.orig.tar.gz' % version
if not exists(tarball):
print('Source tarball not found')
return -1
with utils.TempDir() as temp_dir:
origtarball = join(temp_dir, origtarname)
copyfile(tarball, origtarball)
with tarfile.open(origtarball) as tar:
tar.extractall(path=temp_dir)
# Build source package.
print("Building source package")
RunBuildPackage(['-S', '-us', '-uc'], join(temp_dir, tarroot))
# Build binary package(s).
for arch in arches:
print("Building %s package" % arch)
RunBuildPackage(
['-B', '-a', GN_ARCH_TO_DEBIAN_ARCH[arch], '-us', '-uc'],
join(temp_dir, tarroot))
# Copy the Debian package files to the build directory.
debbase = 'dart_%s' % version
source_package = [
'%s-1.dsc' % debbase,
'%s.orig.tar.gz' % debbase,
'%s-1.debian.tar.xz' % debbase
]
for name in source_package:
copyfile(join(temp_dir, name), join(out_dir, name))
for arch in arches:
name = '%s-1_%s.deb' % (debbase, GN_ARCH_TO_DEBIAN_ARCH[arch])
copyfile(join(temp_dir, name), join(out_dir, name)) | [
56,
1428,
360
] |
def METHOD_NAME(self, name, code):
# Helper that put all \N escapes inside eval'd raw strings,
# to make sure this script runs even if the compiler
# chokes on \N escapes
res = eval(ur'u"\N{%s}"' % name)
self.assertEqual(res, code)
return res | [
17591
] |
def METHOD_NAME(self):
struct = Structure.from_file(filename=f"{TEST_FILES_DIR}/garnet.cif")
spga = SpacegroupAnalyzer(struct, 0.1)
prim = spga.find_primitive()
struct = prim.copy()
struct["Al3+"] = {"Al3+": 0.5, "Ga3+": 0.5}
adaptor = EnumlibAdaptor(struct, 1, 1, enum_precision_parameter=0.01)
adaptor.run()
structures = adaptor.structures
assert len(structures) == 7
for struct in structures:
assert struct.formula == "Ca12 Al4 Ga4 Si12 O48"
struct = prim.copy()
struct["Ca2+"] = {"Ca2+": 1 / 3, "Mg2+": 2 / 3}
adaptor = EnumlibAdaptor(struct, 1, 1, enum_precision_parameter=0.01)
adaptor.run()
structures = adaptor.structures
assert len(structures) == 20
for struct in structures:
assert struct.formula == "Ca4 Mg8 Al8 Si12 O48"
struct = prim.copy()
struct["Si4+"] = {"Si4+": 1 / 3, "Ge4+": 2 / 3}
adaptor = EnumlibAdaptor(struct, 1, 1, enum_precision_parameter=0.01)
adaptor.run()
structures = adaptor.structures
assert len(structures) == 18
for struct in structures:
assert struct.formula == "Ca12 Al8 Si4 Ge8 O48" | [
9,
2351,
-1
] |
def METHOD_NAME(x: int):
return x + 3 | [
53,
6013
] |
def METHOD_NAME(end_time: Optional[pulumi.Input[str]] = None,
entity_id: Optional[pulumi.Input[str]] = None,
kinds: Optional[pulumi.Input[Optional[Sequence[Union[str, 'EntityTimelineKind']]]]] = None,
number_of_bucket: Optional[pulumi.Input[Optional[int]]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
start_time: Optional[pulumi.Input[str]] = None,
workspace_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetEntitiesGetTimelineResult]:
"""
Timeline for an entity.
:param str end_time: The end timeline date, so the results returned are before this date.
:param str entity_id: entity ID
:param Sequence[Union[str, 'EntityTimelineKind']] kinds: Array of timeline Item kinds.
:param int number_of_bucket: The number of bucket for timeline queries aggregation.
:param str resource_group_name: The name of the resource group. The name is case insensitive.
:param str start_time: The start timeline date, so the results returned are after this date.
:param str workspace_name: The name of the workspace.
"""
... | [
19,
5399,
19,
6938,
146
] |
def METHOD_NAME(self) -> str | None:
"""
Exposed in avd_switch_facts
"""
if self.shared_utils.configure_parent_for_inband_mgmt:
return self.shared_utils.METHOD_NAME | [
9924,
4891,
1782
] |
def METHOD_NAME(self):
coords = [(0.0,0.1,0.0)]
charges = [1.00]
mol = gto.M(
verbose = 0,
atom = '''C 0.000 -0.300 0.2
Ne 0.310 0.820 0.1''',
basis = 'cc-pvdz',
cart = True)
mf = itrf.mm_charge(scf.RHF(mol), coords, charges)
h = mf.get_hcore()
self.assertAlmostEqual(lib.finger(h), -147.92831183612765, 9)
h = mf.nuc_grad_method().get_hcore()
self.assertEqual(h.shape, (3,30,30))
self.assertAlmostEqual(lib.finger(h), -178.29768724184771, 9) | [
9,
-1,
9697
] |
def METHOD_NAME(self, batch_shape, _instance=None):
new = self._get_checked_instance(OneHotCategorical, _instance)
batch_shape = torch.Size(batch_shape)
new._categorical = self._categorical.METHOD_NAME(batch_shape)
super(OneHotCategorical, new).__init__(
batch_shape, self.event_shape, validate_args=False
)
new._validate_args = self._validate_args
return new | [
2450
] |
async def METHOD_NAME():
# Since Azure Cosmos DB data plane SDK does not cover management operations, we have to create our resources
# with a master key authenticated client for this sample.
await create_sample_resources()
# With this done, you can use your AAD service principal id and secret to create your ClientSecretCredential.
# The async ClientSecretCredentials, like the async client, also have a context manager,
# and as such should be used with the `async with` keywords.
async with ClientSecretCredential(
tenant_id=TENANT_ID,
client_id=CLIENT_ID,
client_secret=CLIENT_SECRET) as aad_credentials:
# Use your credentials to authenticate your client.
async with CosmosClient(HOST, aad_credentials) as aad_client:
print("Showed ClientSecretCredential, now showing DefaultAzureCredential")
# You can also utilize DefaultAzureCredential rather than directly passing in the id's and secrets.
# This is the recommended method of authentication, and uses environment variables rather than in-code strings.
async with DefaultAzureCredential() as aad_credentials:
# Use your credentials to authenticate your client.
async with CosmosClient(HOST, aad_credentials) as aad_client:
# Do any R/W data operations with your authorized AAD client.
db = aad_client.get_database_client(DATABASE_ID)
container = db.get_container_client(CONTAINER_ID)
print("Container info: " + str(container.read()))
await container.create_item(get_test_item(879))
print("Point read result: " + str(container.read_item(item='Item_0', partition_key='Item_0')))
query_results = [item async for item in
container.query_items(query='select * from c', partition_key='Item_0')]
assert len(query_results) == 1
print("Query result: " + str(query_results[0]))
await container.delete_item(item='Item_0', partition_key='Item_0')
# Attempting to do management operations will return a 403 Forbidden exception.
try:
await aad_client.delete_database(DATABASE_ID)
except exceptions.CosmosHttpResponseError as e:
assert e.status_code == 403
print("403 error assertion success")
# To clean up the sample, we use a master key client again to get access to deleting containers/ databases.
await delete_sample_resources()
print("end of sample") | [
22,
734
] |
def METHOD_NAME(input_df_1, input_df_2, input_df_3):
assert isinstance(input_df_1, pd.DataFrame)
assert isinstance(input_df_2, pd.DataFrame)
assert isinstance(input_df_3, pd.DataFrame)
assert all(x.isupper() for x in list(input_df_1.columns))
assert all(x.islower() for x in list(input_df_2.columns))
assert all(x.islower() for x in list(input_df_3.columns)) | [
187
] |
def METHOD_NAME(cls, value, info):
field = cls.model_fields[info.field_name]
field_name = field.alias or info.field_name
if field_name in info.context['configured_fields']:
value = getattr(validators, f'instance_{info.field_name}', identity)(value, field=field)
else:
value = getattr(defaults, f'instance_{info.field_name}', lambda: value)()
return validation.utils.make_immutable(value) | [
187
] |
def METHOD_NAME():
unify_helper(List[str], List[str], List[str])
unify_helper(List[int], List[int], List[int]) | [
9,
1101,
245
] |
def METHOD_NAME(structures):
"""Test scaled_cell's PDB-designated validation: inverse of det(SCALE) = Volume of cell"""
for structure in structures:
# Manual calculation of volume = |a_1 . (a_2 x a_3)|
a_1 = structure.lattice_vectors[0]
a_2 = structure.lattice_vectors[1]
a_3 = structure.lattice_vectors[2]
a_mid_0 = a_2[1] * a_3[2] - a_2[2] * a_3[1]
a_mid_1 = a_2[2] * a_3[0] - a_2[0] * a_3[2]
a_mid_2 = a_2[0] * a_3[1] - a_2[1] * a_3[0]
volume_from_cellpar = abs(
a_1[0] * a_mid_0 + a_1[1] * a_mid_1 + a_1[2] * a_mid_2
)
scale = scaled_cell(structure.lattice_vectors)
volume_from_scale = math.fabs(1 / numpy.linalg.det(scale))
assert volume_from_scale == pytest.approx(volume_from_cellpar) | [
9,
4164,
118,
2619
] |
def METHOD_NAME():
"""Test WorkspaceConfig - no parameter."""
test_config1 = Path(_TEST_DATA).joinpath(pkg_config._CONFIG_FILE)
with custom_mp_config(test_config1):
# Default workspace
_DEF_WS = {
"WorkspaceId": "52b1ab41-869e-4138-9e40-2a4457f09bf3",
"TenantId": "72f988bf-86f1-41af-91ab-2d7cd011db49",
}
ws_config = WorkspaceConfig()
check.is_in("workspace_id", ws_config)
check.equal(ws_config["workspace_id"], _DEF_WS["WorkspaceId"])
check.is_in("tenant_id", ws_config)
check.equal(ws_config["tenant_id"], _DEF_WS["TenantId"])
check.is_not_none(ws_config.code_connect_str)
check.is_true(
ws_config.code_connect_str.startswith("loganalytics://code().tenant(")
and _DEF_WS["WorkspaceId"] in ws_config.code_connect_str
and _DEF_WS["TenantId"] in ws_config.code_connect_str
) | [
9,
9780,
235,
368
] |
def METHOD_NAME(self, path='/opt/couchbase/var/lib/couchbase/data',
index_path='/opt/couchbase/var/lib/couchbase/data'):
data = {'path':path, 'index_path':index_path}
result = self._post('nodes/self/controller/settings', data)
return result | [
15,
1716
] |
def METHOD_NAME(self, program_counter: int) -> Iterator['CodeStream']:
anchor_pc = self.program_counter
self.program_counter = program_counter
try:
yield self
finally:
self.program_counter = anchor_pc | [
336
] |
def METHOD_NAME(driver, pages):
pages.load("nestedElements.html")
element = driver.find_element(By.ID, "test_id_div")
with pytest.raises(NoSuchElementException):
element.find_element(By.ID, "test_id_out") | [
9,
427,
416,
669,
604,
147,
1646
] |
def METHOD_NAME(self, input: Dict[str, Any]) -> Dict[str, Any]:
scores = []
labels = []
masks = []
boxes = []
track_ids = []
for ii in tqdm(range(len(input['iids']))):
img = input['imgs'][ii]
img_meta = input['img_metas'][ii]
iid = input['iids'][ii]
x = np.transpose(img, [2, 0, 1])
x = np.expand_dims(x, 0)
x = torch.from_numpy(x).to(self.device)
with torch.no_grad():
segm_results = self.model(x, img_meta, rescale=True, iid=iid)
_, _, _, vis_sem, vis_tracker, label, binary_mask, track_id, thing_bbox_for_tracking = segm_results
scores.append([0.99] * len(label))
labels.append(label)
masks.append(binary_mask)
boxes.append(thing_bbox_for_tracking)
track_ids.append(track_id)
output = {
'scores': scores,
'labels': labels,
'masks': masks,
'boxes': boxes,
'uuid': track_ids
}
return output | [
76
] |
def METHOD_NAME(self):
if os.sep == '/':
self.assertEqual(ensure_relative('/home/foo'), 'home/foo')
self.assertEqual(ensure_relative('some/path'), 'some/path')
else: # \\
self.assertEqual(ensure_relative('c:\\home\\foo'), 'c:home\\foo')
self.assertEqual(ensure_relative('home\\foo'), 'home\\foo') | [
9,
602,
1821
] |
def METHOD_NAME(self, dt: datetime) -> datetime:
return dt.astimezone(utc) | [
24,
1166
] |
f METHOD_NAME(self, lex_mode=lex_mode_e.DBRACKET): | [
243
] |
def METHOD_NAME(
name,
root,
shuffle,
batch_size,
python_multiprocessing,
num_parallel_workers,
drop_remainder,
args,
num_shards=1,
shard_id=0,
is_training=True,
):
"""Create SSD dataset with MindDataset."""
if name == "coco":
if is_training:
mindrecord_file = os.path.join(root, "train", "coco0")
else:
mindrecord_file = os.path.join(root, "val", "coco0")
ds = de.MindDataset(
mindrecord_file,
columns_list=["img_id", "image", "annotation"],
num_shards=num_shards,
shard_id=shard_id,
num_parallel_workers=num_parallel_workers,
shuffle=shuffle,
)
decode = de.vision.Decode()
ds = ds.map(operations=decode, input_columns=["image"])
change_swap_op = de.vision.HWC2CHW()
# Computed from random subset of ImageNet training images
normalize_op = de.vision.Normalize(
mean=[0.485 * 255, 0.456 * 255, 0.406 * 255], std=[0.229 * 255, 0.224 * 255, 0.225 * 255]
)
color_adjust_op = de.vision.RandomColorAdjust(brightness=0.4, contrast=0.4, saturation=0.4)
def compose_map_func(img_id, image, annotation):
return preprocess_fn(img_id, image, annotation, is_training, args)
if is_training:
output_columns = ["image", "box", "label", "num_match"]
trans = [color_adjust_op, normalize_op, change_swap_op]
else:
output_columns = ["img_id", "image", "image_shape"]
trans = [normalize_op, change_swap_op]
ds = ds.map(
operations=compose_map_func,
input_columns=["img_id", "image", "annotation"],
output_columns=output_columns,
column_order=output_columns,
python_multiprocessing=python_multiprocessing,
num_parallel_workers=num_parallel_workers,
)
ds = ds.map(
operations=trans,
input_columns=["image"],
python_multiprocessing=python_multiprocessing,
num_parallel_workers=num_parallel_workers,
)
ds = ds.batch(batch_size, drop_remainder=drop_remainder)
return ds
else:
raise NotImplementedError | [
129,
5481,
126
] |
def METHOD_NAME(self):
self.modelObject.detail.Group.role = self.group.attrib['role']
self.modelObject.detail.Group.name = self.group.attrib['name'] | [
846,
15594
] |
def METHOD_NAME(self):
# Modify the builtins out from under a live generator.
def foo():
x = range(3)
yield len(x)
yield len(x)
self.configure_func(foo)
g = foo()
self.assertEqual(next(g), 3)
with swap_attr(builtins, "len", lambda x: 7):
self.assertEqual(next(g), 7) | [
9,
2444,
4298,
795,
1443,
923
] |
def METHOD_NAME(self):
return prioritize.all_categories() | [
9725,
2065
] |
def METHOD_NAME(self):
asset = assets_service.get_asset(self.asset.id)
self.assertEqual(asset["id"], str(self.asset.id))
assets_service.remove_asset(asset["id"])
self.assertRaises(
AssetNotFoundException,
assets_service.get_asset,
str(self.asset.id),
) | [
9,
19,
3455
] |
def METHOD_NAME(self, subtitle):
if subtitle.content is not None:
return
subtitle.content = self.query(subtitle.language, subtitle.hash).content | [
136,
3332
] |
def METHOD_NAME(self, pyramid_request, blackboard_api_client):
plugin = BlackboardGroupingPlugin.factory(sentinel.context, pyramid_request)
assert isinstance(plugin, BlackboardGroupingPlugin)
# pylint: disable=protected-access
assert plugin._blackboard_api == blackboard_api_client | [
9,
1155
] |
def METHOD_NAME(node):
mappings = set()
for key_node, _ in node.value:
try:
if key_node.value in mappings:
raise yaml.constructor.ConstructorError(
"while constructing a mapping",
node.start_mark,
f"found duplicate key {key_node.value!r}",
node.start_mark,
)
mappings.add(key_node.value)
except TypeError:
# Ignore errors for malformed inputs that will be caught later.
pass | [
250,
1119,
219
] |
def METHOD_NAME(cls):
if cls._schema_on_200 is not None:
return cls._schema_on_200
cls._schema_on_200 = AAZObjectType()
_schema_on_200 = cls._schema_on_200
_schema_on_200.message = AAZStrType(
flags={"read_only": True},
)
_schema_on_200.name_available = AAZBoolType(
serialized_name="nameAvailable",
)
_schema_on_200.reason = AAZStrType()
return cls._schema_on_200 | [
56,
135,
69,
1072
] |
def METHOD_NAME(self, request: Request, project: Project) -> Response:
if not features.has(
"organizations:profiling-stacktrace-links", project.organization, actor=request.user
):
return Response(status=404)
serializer = StacktraceLinksSerializer(data=request.GET)
if not serializer.is_valid():
return Response(serializer.errors, status=400)
data = serializer.validated_data
result = {"files": [{"file": file} for file in data["file"]]}
mappings_used = 0
mappings_attempted = 0
configs = get_code_mapping_configs(project)
default_error = "stack_root_mismatch" if configs else "no_code_mappings"
for config in configs:
# find all the files that match the current code mapping's stack_root
# and have not already been resolved by another code mapping
#
# if the's an error from a previous code mapping attempted, but this
# current code mapping can be used, we should try again
files = [
file
for file in result["files"]
if file.METHOD_NAME("sourceUrl") is None and file["file"].startswith(config.stack_root)
]
if not files:
continue
mappings_attempted += 1
# safety to limit the maximum number of mappings used
# to avoid reaching API rate limits
if mappings_used >= MAX_CODE_MAPPINGS_USED:
for file in files:
if not file.METHOD_NAME("error") and file.METHOD_NAME("sourceUrl") is None:
file["error"] = "max_code_mappings_applied"
continue
mappings_used += 1
install = get_installation(config)
# should always be overwritten
error: str | None = "file_not_checked"
# since the same code mapping stack root matches all these files, we only check the
# first file and we will assume the other matching files will resolve the same way
ref = data.METHOD_NAME("ref")
if ref:
error = check_file(install, config, files[0]["file"], ref)
if not ref or error:
ref = config.default_branch
error = check_file(install, config, files[0]["file"], ref)
for file in files:
formatted_path = file["file"].replace(config.stack_root, config.source_root, 1)
url = install.format_source_url(config.repository, formatted_path, ref)
if error:
file["error"] = error
file["attemptedUrl"] = url
else:
file["sourceUrl"] = url
# there may be an error from an previous code mapping, clear it
if "error" in file:
del file["error"]
if "attemptedUrl" in file:
del file["attemptedUrl"]
# number of available code mappings
set_measurement("mappings.found", len(configs))
# number of code mappings that matched a stack root
set_measurement("mappings.attempted", mappings_attempted)
# number of code mappings that was attempted
set_measurement("mappings.used", mappings_used)
for file in result["files"]:
if not file.METHOD_NAME("error") and file.METHOD_NAME("sourceUrl") is None:
file["error"] = default_error
return Response(result, status=200) | [
19
] |
def METHOD_NAME(args, torch_client_filename, inputs):
def get_torchrun_arguments(node_rank):
torchrun_path = subprocess.run(["which", "torchrun"], capture_output=True, text=True).stdout.strip()
return [
torchrun_path,
f"--nnodes={args.n_node_in_silo}",
f"--nproc_per_node={args.n_proc_per_node}",
# "--rdzv_backend=c10d",
f"--rdzv_endpoint={args.master_address}:{args.launcher_rdzv_port}",
f"--node_rank={node_rank}",
"--rdzv_id=hi_fl",
torch_client_filename,
] + inputs
network_interface = None if not hasattr(args, "network_interface") else args.network_interface
print(f"Using network interface {network_interface} for process group and TRPC communication")
env_variables = {
"OMP_NUM_THREADS": "4",
}
if network_interface:
env_variables = {
**env_variables,
"NCCL_SOCKET_IFNAME": network_interface,
"GLOO_SOCKET_IFNAME": network_interface,
}
if args.n_node_in_silo == 1:
args.node_rank = 0
args.manual_launch = True
if not (hasattr(args, "n_proc_per_node") and args.n_proc_per_node):
print("Number of processes per node not specified.")
device_type = get_device_type(args)
if torch.cuda.is_available() and device_type == "gpu":
gpu_count = torch.cuda.device_count()
print(f"Using number of GPUs ({gpu_count}) as number of processeses.")
args.n_proc_per_node = gpu_count
else:
print(f"Using number 1 as number of processeses.")
args.n_proc_per_node = 1
if hasattr(args, "manual_launch") and args.manual_launch:
print(f"Manual Client Launcher")
node_rank = args.node_rank
torchrun_cmd_arguments = get_torchrun_arguments(node_rank)
process_args = torchrun_cmd_arguments
print(f"Launching node {node_rank} of silo {args.rank}")
subprocess.run(process_args, env=dict(os.environ, **env_variables))
else:
print(f"Automatic Client Launcher")
which_pdsh = subprocess.run(["which", "pdsh"], capture_output=True, text=True).stdout.strip()
if not which_pdsh:
raise Exception(
f"Silo {args.rank} has {args.n_node_in_silo} nodes. Automatic Client Launcher for more than 1 nodes requires PSDH."
)
print(f"Launching nodes using pdsh")
os.environ["PDSH_RCMD_TYPE"] = "ssh"
node_addresses = ",".join(args.node_addresses)
pdsh_cmd_aruments = ["pdsh", "-w", node_addresses]
exports = ""
for key, val in env_variables.items():
exports += "export {}={}; ".format(key, val)
prerun_args = [
exports,
f"cd {os.path.abspath('.')};",
]
node_rank = "%n"
torchrun_cmd_arguments = get_torchrun_arguments(node_rank)
process_args = pdsh_cmd_aruments + prerun_args + torchrun_cmd_arguments
subprocess.run(process_args) | [
22,
436,
12233,
3228
] |
def METHOD_NAME(self, results: List[InferenceResult]) -> List[CommentedMap]:
type_map = {
InferenceSignalType.DIMENSION.TIME: "time",
InferenceSignalType.DIMENSION.PRIMARY_TIME: "time",
InferenceSignalType.DIMENSION.CATEGORICAL: "categorical",
}
rendered: List[CommentedMap] = []
for result in results:
if not result.type_node.is_subtype_of(InferenceSignalType.DIMENSION.UNKNOWN):
continue
result_data: CommentedMap = CommentedMap(
{
"name": result.column.column_name,
"type": type_map.get(result.type_node, ConfigFileRenderer.UNKNOWN_FIELD_VALUE),
}
)
if result_data["type"] == ConfigFileRenderer.UNKNOWN_FIELD_VALUE:
result_data.yaml_add_eol_comment(self._fixme("unknown field value"), "type")
if result.type_node.is_subtype_of(InferenceSignalType.DIMENSION.TIME):
type_params: CommentedMap = CommentedMap({"time_granularity": "day"})
if result.type_node.is_subtype_of(InferenceSignalType.DIMENSION.PRIMARY_TIME):
type_params["is_primary"] = True
result_data["type_params"] = type_params
if len(result.problems) > 0:
result_data.yaml_set_comment_before_after_key(
key="name",
before=f"{ConfigFileRenderer.UNKNOWN_FIELD_VALUE}: " + ", ".join(result.problems),
)
rendered.append(result_data)
return rendered | [
338,
1425,
1951
] |
def METHOD_NAME(self):
""" Add or remove user notifiers for the objects observed by the root
observer.
"""
if not self.graph.node.notify:
return
for observable in self.graph.node.iter_observables(self.object):
notifier = self.graph.node.get_notifier(
handler=self.handler,
target=self.target,
dispatcher=self.dispatcher,
)
if self.remove:
notifier.remove_from(observable)
else:
notifier.add_to(observable)
self._processed.append((notifier, observable)) | [
238,
894,
188,
10721
] |
def METHOD_NAME():
'''Test that the vector length setter and getter work as expected,
including raising an exception if the value is invalid.
'''
field_vector_arg = FieldVectorArgMetadata("GH_REAL", "GH_READ", "W0", "2")
with pytest.raises(ValueError) as info:
field_vector_arg.vector_length = "invalid"
assert ("The vector size should be a string containing an integer, "
"but found 'invalid'." in str(info.value))
with pytest.raises(ValueError) as info:
field_vector_arg.vector_length = "1"
assert ("The vector size should be an integer greater than 1 but found 1."
in str(info.value))
field_vector_arg.vector_length = "3"
assert field_vector_arg.vector_length == "3" | [
9,
798,
799,
800,
801
] |
def METHOD_NAME(user: Optional[User], alert_group: AlertGroup) -> bool:
if not user:
return False
has_permission = user_is_authorized(user, [RBACPermission.Permissions.CHATOPS_WRITE])
return user.organization == alert_group.channel.organization and has_permission | [
250,
204
] |
def METHOD_NAME():
results = PostfixMaster(context_wrap(POSTFIXMASTER))
assert len(results) == 25
assert results[0]['service'] == 'smtp'
assert results[0]['type'] == 'inet'
assert results[0]['private'] == 'n'
assert results[0]['command'] == 'smtpd'
assert results[14]['args'] == ['-o syslog_name=postfix/submission',
'-o smtpd_tls_security_level=encrypt',
'-o smtpd_sasl_auth_enable=yes',
'-o smtpd_tls_auth_only=yes',
'-o smtpd_reject_unlisted_recipient=no',
'-o smtpd_client_restrictions=$mua_client_restrictions',
'-o smtpd_helo_restrictions=$mua_helo_restrictions',
'-o smtpd_sender_restrictions=$mua_sender_restrictions',
'-o smtpd_recipient_restrictions=',
'-o smtpd_relay_restrictions=permit_sasl_authenticated,reject',
'-o milter_macro_daemon_name=ORIGINATING']
assert results[-1] == {'service': 'mailman', 'type': 'unix', 'private': '-', 'unpriv': 'n', 'chroot': 'n', 'wakeup': '-', 'maxproc': '-', 'command': 'pipe', 'args': ['flags=FRX user=list argv=/usr/lib/mailman/bin/postfix-to-mailman.py', '${nexthop} ${user}']} | [
9,
3444,
2614
] |
def METHOD_NAME(self, data: Any) -> bool:
return True | [
684,
2334
] |
f METHOD_NAME(self): | [
9,
17902,
1529
] |
def METHOD_NAME(self, command_args):
super().METHOD_NAME(command_args)
self._execute_operations()
return self._output() | [
1519
] |
async def METHOD_NAME(self):
pass | [
958,
0,
1
] |
def METHOD_NAME(next_link=None):
if not next_link:
request = build_list_request(
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request | [
123,
377
] |
def METHOD_NAME(Application):
"""
Create a new application by copying the template
"""
# create directory ../Mod/<Application>
if not os.path.isdir("../Mod/" + Application):
os.mkdir("../Mod/" + Application)
else:
sys.stdout.write(Application + " already exists. Please enter another name.\n")
sys.exit()
# copying files from _TEMPLATE_ to ../Mod/<Application>
sys.stdout.write("Copying files...")
MakeAppTools.copyTemplate(
"_TEMPLATE_",
"../Mod/" + Application,
"_TEMPLATE_",
Application,
SetupFilter(FilFilter),
SetupFilter(DirFilter),
)
sys.stdout.write("Ok\n")
# replace the _TEMPLATE_ string by <Application>
sys.stdout.write("Modifying files...\n")
MakeAppTools.replaceTemplate("../Mod/" + Application, "_TEMPLATE_", Application)
MakeAppTools.replaceTemplate(
"../Mod/" + Application,
"${CMAKE_SOURCE_DIR}/src/Tools/",
"${CMAKE_SOURCE_DIR}/src/Mod/",
)
# make the configure script executable
# os.chmod("../Mod/" + Application + "/configure", 0777);
sys.stdout.write("Modifying files done.\n")
sys.stdout.write(Application + " module created successfully.\n") | [
129,
991
] |
def METHOD_NAME(t):
outl = ''
for atom in t:
outl += '%4s -' % atom
return outl[:-1] | [
275,
6076
] |
def METHOD_NAME(cls, app):
try:
from .helpers import JSONProvider
app.json = JSONProvider(app)
except ImportError:
from .helpers import JSONEncoder
app.json_encoder = JSONEncoder | [
111,
763,
2300
] |
def METHOD_NAME(name, array, n=1):
dtype_map = {
"int8": "Int8",
"int16": "Int16",
"int32": "Int32",
"int64": "Int64",
"uint8": "UInt8",
"uint16": "UInt16",
"uint32": "UInt32",
"uint64": "UInt64",
"float32": "Float32",
"float64": "Float64",
"str": "String",
}
element = None
if str(array.dtype) in dtype_map.keys():
element = ET.Element("DataArray")
element.set("Name", name)
element.set("NumberOfComponents", str(n))
element.set("type", dtype_map[str(array.dtype)])
element.text = "\t".join(map(str, array.ravel()))
return element | [
877,
24,
669
] |
def METHOD_NAME(self, context):
updateNode(self, context)
#self.inputs['Scale'].hide_safe = self.auto_scale | [
86,
6424
] |
def METHOD_NAME(self, data: str, url: Optional[List[str]] = None) -> None:
self.config.path.write_text(data) | [
278
] |
def METHOD_NAME(self):
return self.fn | [
5661
] |
def METHOD_NAME(self) -> List[Constraint]:
"""Returns constraints describing the domain of the node.
"""
# y > 0.
return [self.args[1] >= 0] | [
1674
] |
def METHOD_NAME(self):
follower = UserFactory()
q = QuestionFactory()
# The above might make follows, which this test isn't about. Clear them out.
Follow.objects.all().delete()
follow(follower, q, actor_only=False)
# Make a new action for the above. This should trigger notifications.
action.send(q.creator, verb="edited", action_object=q)
act = Action.objects.order_by("-id")[0]
notification = Notification.objects.get(action=act)
self.assertEqual(notification.owner, follower)
self.assertEqual(notification.action, act) | [
9,
11164,
1006,
279
] |
def METHOD_NAME(self, prefix, uri):
if not uri:
return
# Jython uses '' instead of None; standardize on None
prefix = prefix or None
self.track_namespace(prefix, uri)
if prefix and uri == "http://www.w3.org/1999/xlink":
self.decls["xmlns:" + prefix] = uri | [
447,
426,
445
] |
def METHOD_NAME(group):
"""Fix up any issues with badly chosen values
"""
if group == 'modifiers':
return 'modifier'
if group == 'media':
return 'consumer'
return group | [
711,
846
] |
def METHOD_NAME( # type: ignore[override]
self,
object: object,
name: str,
mod: str | None = None,
funcs: Mapping[str, str] = {},
classes: Mapping[str, str] = {},
methods: Mapping[str, str] = {},
cl: type | None = None,
) -> str: ... | [
13168
] |
def METHOD_NAME(self, data: bytes) -> None:
"""Feed byte string into the pipe""" | [
353,
321
] |
def METHOD_NAME(public_key, secret_key):
"""
Accepts public/secret key pair from caller
"""
return utils.lib.zcert_new_from(public_key, secret_key) | [
80,
280
] |
def METHOD_NAME(self) -> HtmlBodyMailParts:
tree = self._get_proper_main_body_tree()
return self._distinct_elements(tree) | [
19,
1532
] |
def METHOD_NAME(remove_database, restart_wazuh_daemon, test_case, create_groups):
'''
description: Check that every input message using the 'set_agent_groups' command in wazuh-db socket generates
the proper output to wazuh-db socket. To do this, it performs a query to the socket with a command
taken from the list of test_cases's 'input' field, and compare the result with the test_case's
'output' and 'expected_group' fields.
wazuh_min_version: 4.4.0
parameters:
- remove_database:
type: fixture
brief: Delete databases.
- restart_wazuh:
type: fixture
brief: Reset the 'ossec.log' file and restart Wazuh.
- test_case:
type: fixture
brief: List of test_case stages (dicts with input, output and agent_id and expected_groups keys).
- create_groups:
type: fixture:
brief: Create required groups.
assertions:
- Verify that the socket response matches the expected output.
- Verify that the agent has the expected_group assigned.
input_description:
- Test cases are defined in the set_agent_groups.yaml file. This file contains the command to insert the agents
groups, with different modes and combinations, as well as the expected outputs and results.
expected_output:
- f"Assertion Error - expected {output}, but got {response}"
- 'Unable to add agent'
- 'did not recieve expected groups in Agent.'
tags:
- wazuh_db
- wdb_socket
'''
output = test_case['output']
agent_id = test_case['agent_id']
# Insert test Agent
response = insert_agent_in_db(id=agent_id, connection_status='disconnected', registration_time=str(time.time()))
# Apply preconditions
if 'pre_input' in test_case:
query_wdb(test_case['pre_input'])
# Add tested group
response = query_wdb(test_case["input"])
# validate output
assert response == output, f"Assertion Error - expected {output}, but got {response}"
# Check warnings
if 'expected_warning' in test_case:
callback = test_case['expected_warning']
evm.check_event(callback=callback, file_to_monitor=fw.LOG_FILE_PATH, timeout=20).result()
# get agent data and validate agent's groups
response = query_wdb(f'global get-agent-info {agent_id}')
assert test_case['expected_group_sync_status'] == response[0]['group_sync_status']
if test_case["expected_group"] == 'None':
assert 'group' not in response[0], "Agent has groups data and it was expecting no group data"
else:
assert test_case["expected_group"] == response[0]['group'], "Did not receive the expected groups in the agent." | [
9,
0,
1849,
861
] |
def METHOD_NAME(router_name):
"Return a dict with link state id as key and forwarding addresses as value"
result = dict()
tgen = get_topogen()
router = tgen.gears[router_name]
cmd = "show ip ospf database external\n"
output = topotest.normalize_text(router.vtysh_cmd(cmd))
for line in output.splitlines():
re0 = re.match(r"\s+Link State ID: (\S+) \(External Network Number\)", line)
if re0:
lsa = re0.group(1)
re1 = re.match(r"\s+Forward Address: (\S+)", line)
if re1:
result[lsa] = re1.group(1)
return result | [
10446,
19,
10720,
-1
] |
def METHOD_NAME():
return {aliases: {}} | [
111,
467,
468
] |
def METHOD_NAME(cls, column, _dialect, **kwargs):
"""
Please note that there is a stricter version to verify GUID, as can be seen in the following link:
https://www.techtarget.com/searchwindowsserver/definition/GUID-global-unique-identifier#:~:text=RFC%204122%20specification.-,How%20does%20GUID%20work%3F,-GUIDs%20are%20constructed
However, since the UUID package doesn't seem to enforce it, the chosen regex was the less stricter.
For future purposes, the stricter pattern can be found here as well, commented out.
"""
# regex_pattern = '^(urn:uuid:)?\{?[A-Fa-f0-9]{8}-?[A-Fa-f0-9]{4}-?[1-5][A-Fa-f0-9]{3}-?[89ABab][A-Fa-f0-9]{3}-?[A-Fa-f0-9]{12}\}?$'
regex_pattern = (
"^(urn:uuid:)?\\{?[0-9a-fA-F]{8}(-?[0-9a-fA-F]{4}){3}-?[0-9a-fA-F]{12}\\}?$"
)
return column.regexp_match(regex_pattern) | [
4267
] |
def METHOD_NAME(self):
raise NotImplementedError("This should be implemented.") | [
1067
] |
def METHOD_NAME(server, no, desc='dummy', sleep=True):
cn = '%s%d' % (USER_CN, no)
dn = 'cn=%s,ou=people,%s' % (cn, SUFFIX)
log.fatal('Adding user (%s): ' % dn)
server.add_s(Entry((dn, {'objectclass': ['top', 'person', 'inetuser'],
'sn': ['_%s' % cn],
'description': [desc]})))
if sleep:
time.sleep(2) | [
238,
21
] |
def METHOD_NAME(self):
"""Enables to get outputs of the operator by evaluating it
Returns
--------
outputs : OutputsElementaryDataSelectorFc
"""
return super().METHOD_NAME | [
141
] |
def METHOD_NAME(once=False, threads=1, sleep_time=60):
"""
Starts up the Judge-Repairer threads.
"""
setup_logging(process_name=DAEMON_NAME)
if rucio.db.sqla.util.is_old_db():
raise exception.DatabaseException('Database was not updated, daemon won\'t start')
if once:
rule_repairer(once)
else:
logging.info('Repairer starting %s threads' % str(threads))
threads = [threading.Thread(target=rule_repairer, kwargs={'once': once,
'sleep_time': sleep_time}) for i in range(0, threads)]
[t.start() for t in threads]
# Interruptible joins require a timeout.
while threads[0].is_alive():
[t.join(timeout=3.14) for t in threads] | [
22
] |
def METHOD_NAME(self) -> dict:
"""Creates a YAML friendly dictionary that can be saved for future reloading.
This dictionary will contain only `Python` types that can later be converted to their
proper formats. See `_attr_floris_filter` for detail on which attributes are
removed from the export.
Returns:
dict: All key, value pairs required for class recreation.
"""
return attrs.asdict(self, filter=_attr_floris_filter, value_serializer=_attr_serializer) | [
947,
553
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.