text
stringlengths 15
7.82k
| ids
sequencelengths 1
7
|
---|---|
def METHOD_NAME(self, *points):
self.value.append(("qCurveTo", points)) | [
1010,
461,
24
] |
def METHOD_NAME(self, token: str) -> None: ... | [
1049
] |
def METHOD_NAME(self):
self.assertEqual(self.__test_weather_rain,
weather.find_closest_weather(self.__test_weathers,
self.__test_time_low + 200))
self.assertEqual(self.__test_weather_sun,
weather.find_closest_weather(self.__test_weathers,
self.__test_time_high - 200)) | [
9,
416,
5221,
7677
] |
def METHOD_NAME() -> List[FlashMode]:
flash_xml = ET.parse(os.path.join(utils.CONF_DIR, "flash_modes.xml"))
modes = []
for xml_mode in flash_xml.getroot().findall("mode"):
mode_name = xml_mode.get("name")
vars = {}
for xml_var in xml_mode.findall("variable"):
var_name = xml_var.get("name")
var_value = xml_var.get("value")
vars[var_name] = var_value
mode = FlashMode(mode_name, vars)
for xml_board in xml_mode.find("boards").findall("board"):
board = xml_board.get("name")
mode.boards.append(board)
modes.append(mode)
return modes | [
214,
6062,
3965
] |
def METHOD_NAME(self):
self._hide_all()
self.button_bottom.show()
self.button_bottom.show_refresh()
self.button_bottom.hide_close() | [
697,
7887,
1920
] |
def METHOD_NAME(self):
# Instantiate a BlobServiceClient using a connection string
from azure.storage.blob import BlobServiceClient
blob_service_client = BlobServiceClient.from_connection_string(self.connection_string)
try:
# [START bsc_create_container]
try:
new_container = blob_service_client.create_container("containerfromblobservice")
properties = new_container.get_container_properties()
except ResourceExistsError:
print("Container already exists.")
# [END bsc_create_container]
# [START bsc_list_containers]
# List all containers
all_containers = blob_service_client.list_containers(include_metadata=True)
for container in all_containers:
print(container['name'], container['metadata'])
# Filter results with name prefix
test_containers = blob_service_client.list_containers(name_starts_with='test-')
for container in test_containers:
print(container['name'], container['metadata'])
# [END bsc_list_containers]
finally:
# [START bsc_delete_container]
# Delete container if it exists
try:
blob_service_client.delete_container("containerfromblobservice")
except ResourceNotFoundError:
print("Container already deleted.")
# [END bsc_delete_container] | [
224,
710
] |
def METHOD_NAME(self):
obj = self.base_factory()
self.submission_factory(**{self.relation_to_app: obj}, rejected=True)
qs = RoundsAndLabs.objects.with_progress()
fetched_obj = qs.first()
self.assertEqual(fetched_obj.total_submissions, 1)
self.assertEqual(fetched_obj.closed_submissions, 1)
self.assertEqual(fetched_obj.progress, 100) | [
9,
41,
12729
] |
def METHOD_NAME(word):
"""Return set of symbol pairs in a word.
Word is represented as tuple of symbols (symbols being variable-length strings).
"""
pairs = set()
prev_char = word[0]
for char in word[1:]:
pairs.add((prev_char, char))
prev_char = char
return pairs | [
19,
3151
] |
def METHOD_NAME(self):
rooms = self.env["pms.room"].search([])
_logger.warning("Init Add All today Task")
for room in rooms:
room.add_today_tasks()
return | [
238,
75,
3329,
620
] |
def METHOD_NAME(cls, parser):
parser.add_argument('--target', required=True,
help='target override')
parser.add_argument('--daparg',
help='Additional -da arguments to pyocd tool')
parser.add_argument('--pyocd', default='pyocd',
help='path to pyocd tool, default is pyocd')
parser.add_argument('--flash-opt', default=[], action='append',
help='''Additional options for pyocd flash,
e.g. --flash-opt="-e=chip" to chip erase''')
parser.add_argument('--frequency',
help='SWD clock frequency in Hz')
parser.add_argument('--gdb-port', default=DEFAULT_PYOCD_GDB_PORT,
help='pyocd gdb port, defaults to {}'.format(
DEFAULT_PYOCD_GDB_PORT))
parser.add_argument('--telnet-port', default=DEFAULT_PYOCD_TELNET_PORT,
help='pyocd telnet port, defaults to {}'.format(
DEFAULT_PYOCD_TELNET_PORT))
parser.add_argument('--tui', default=False, action='store_true',
help='if given, GDB uses -tui')
parser.add_argument('--board-id', dest='dev_id',
help='obsolete synonym for -i/--dev-id') | [
74,
238,
1319
] |
def METHOD_NAME(cls, left: Any, right: Any):
head = cls._as_key(left)
tail = cls._as_key(right)
return cls(head._path + tail._path) | [
2003
] |
async def METHOD_NAME(profile, profile_dir):
year, semester, class_number = parse_course(profile)
# fetch the SIS term id, e.g. 2195
term_id = await terms.get_term_id_from_year_sem(
SIS_TERMS_ID, SIS_TERMS_KEY, year, semester
)
term_id = int(term_id)
logger.info(f"{year} {term_id} {class_number}")
s_emails = await student_emails(term_id, class_number)
save_emails(profile_dir, profile, 'students', s_emails)
i_emails = await instructor_emails(term_id, class_number)
save_emails(profile_dir, profile, 'instructors', i_emails) | [
276,
337
] |
def METHOD_NAME(x, y, width, height, thickness, padding):
border = (thickness // 4) + padding
display.line(x + border, y + height - border,
x + (width // 2), y + border)
display.line(x + (width // 2), y + border,
x + width - border, y + height - border) | [
1100,
1
] |
def METHOD_NAME(self, txt):
""" Use <ol> for line numbering """
# Fix Whitespace
txt = txt.replace('\t', ' '*TAB_LENGTH)
txt = txt.replace(" "*4, " ")
txt = txt.replace(" "*3, " ")
txt = txt.replace(" "*2, " ")
# Add line numbers
lines = txt.splitlines()
txt = '<div class="codehilite"><pre><ol>\n'
for line in lines:
txt += '\t<li>%s</li>\n'% line
txt += '</ol></pre></div>\n'
return txt | [
106
] |
def METHOD_NAME():
net = pn.case6495rte()
pp.runpp(net)
assert net.converged
_ppc_element_test(net, 6495, 9019, 1650, 1372) | [
9,
-1
] |
def METHOD_NAME():
map_family, map_style = build_stylemap_names(
family_name="NotoSans",
style_name="ExtraBold",
is_bold=False,
is_italic=False,
linked_style=None,
)
assert map_family == "NotoSans ExtraBold"
assert map_style == "regular" | [
9,
256,
2448
] |
def METHOD_NAME(self) -> URL:
base = f"disk://{self.cluster_name}"
if self.org_name:
base += f"/{self.org_name}"
return URL(f"{base}/{self.project_name}/{self.id}") | [
354
] |
def METHOD_NAME(self):
product = create_product(num_in_stock=1)
page = self.app.get(reverse("catalogue:index"))
self.assertContains(page, product.title)
self.assertContains(page, gettext("Add to basket")) | [
9,
1442,
238,
24,
9487,
1974,
43
] |
def METHOD_NAME(next_link=None):
if not next_link:
request = build_list_request(
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request | [
123,
377
] |
def METHOD_NAME(self, name: str, sig: bytes) -> bytes:
data = bytearray()
data.append(len(name))
data += self._string_to_bytes(name)
data.append(len(sig))
data += sig
return data | [
7583,
5614,
353,
156
] |
def METHOD_NAME() -> TextData:
return TextData(raw_text=[
"Explicit is better than implicit.",
"Simple is better than complex.",
"Complex is better than complicated.",
"Flat is better than nested.",
"Readability counts.",
"Errors should never pass silently.",
"There should be one-- and preferably only one --obvious way to do it.",
"If the implementation is easy to explain, it may be a good idea.",
]) | [
126,
529,
2968
] |
def METHOD_NAME():
config_file = './experiments/autocal_mtf_50/conf_kg_auto_50.json'
#config_file = './experiments/autocal_mtf_50/conf_dg_auto_50.json'
#mtf = 100
sys.argv.extend(['--config', config_file])
params, _ = ontomatch.utils.util.init()
src_file = params['dataset']['src']
src_onto = ontomatch.utils.util.read_csv_table_ditto_format(src_file)
tgt_file = params['dataset']['tgt']
tgt_onto = ontomatch.utils.util.read_csv_table_ditto_format(tgt_file)
matchfile = params['post_processing']['evaluation_file']
index_matches = ontomatch.evaluate.read_match_file_as_index_set(matchfile, linktypes = [1, 2, 3, 4, 5])
logging.info('ground truth matches=%s', len(index_matches))
params_blocking = params['blocking']
#params_blocking['model_specific']['max_token_occurrences_src'] = mtf
#params_blocking['model_specific']['max_token_occurrences_tgt'] = mtf
#logging.debug('changed params_blocking=%s', params_blocking)
iterator = ontomatch.blocking.create_iterator(src_onto, tgt_onto, params_blocking)
index_pairs = iterator.candidate_matching_pairs
intersection_matches = index_pairs.intersection(index_matches)
false_negatives = index_matches.difference(intersection_matches)
logging.info('number of matches in candidate set=%s, false negatives=%s', len(intersection_matches), len(false_negatives))
df_candidates = pd.DataFrame(index=index_pairs)
df_candidates.index.names = ['ltable_id','rtable_id']
df_candidates['label'] = 0 # nonmatch
df_candidates.at[intersection_matches, 'label'] = 1 # match
#df_candidates.rename(index={'idx_1':'ltable_id', 'idx_2':'rtable_id'}, inplace=True)
logging.info('number of candidate pairs in data frame=%s, matches=%s', len(df_candidates), len(df_candidates[df_candidates['label'] == 1]))
# create split
train_size = 0.6
train, rest = sklearn.model_selection.train_test_split(df_candidates, train_size=train_size, shuffle=True, stratify=df_candidates['label'])
val_size = 0.2
rest_size = val_size / (1 - train_size)
val, test = sklearn.model_selection.train_test_split(rest, train_size=rest_size, shuffle=True, stratify=rest['label'])
logging.info('train / val / test = %s / %s / %s', len(train), len(val), len(test))
logging.info('matches = %s / %s / %s', len(train[train['label'] == 1]), len(val[val['label'] == 1]), len(test[test['label'] == 1]))
train.to_csv('../tmp/train.csv', index=True)
val.to_csv('../tmp/valid.csv', index=True)
test.to_csv('../tmp/test.csv', index=True) | [
129,
849,
1205,
9,
265,
1537
] |
f METHOD_NAME(self): | [
9,
163,
610,
1072
] |
def METHOD_NAME(self):
""" Create and connect the netlist """
# This will create a default set of bitline/wordline names
self.create_all_wordline_names()
self.create_all_bitline_names()
self.add_modules()
self.add_pins()
self.create_instances() | [
129,
5422
] |
def METHOD_NAME(self):
if self.options.shared:
self.options.rm_safe("fPIC") | [
111
] |
def METHOD_NAME(self,m,rank_in,data,data_out):
# data and data_out are in coefficient space
(start_index_in,end_index_in,spin_in) = self.tensor_index(m,rank_in)
rank_out = rank_in+1
(start_index_out,end_index_out,spin_out) = self.tensor_index(m,rank_out)
half = 2**(rank_out-1)
for i in range(2**(rank_out)):
if i//half == 0:
operator = self.op('k+',m,spin_in[i%half])
else:
operator = self.op('k-',m,spin_in[i%half])
np.copyto( data_out[start_index_out[i]:end_index_out[i]],
operator.dot(data[start_index_in[i%half]:end_index_in[i%half]]) ) | [
140
] |
def METHOD_NAME(self):
self.address = {
'openbare_ruimte': 'Amstel',
'huisnummer': 1,
'huisletter': '',
'huisnummer_toevoeging': '',
'postcode': '1011PN',
'woonplaats': 'Amsterdam'
} | [
0,
1
] |
def METHOD_NAME(path):
old_path = os.getcwd()
os.METHOD_NAME(path)
try:
yield
finally:
os.METHOD_NAME(old_path) | [
9227
] |
def METHOD_NAME(self, A):
# Left-hand side of (1) equation
return 2 * A * sinh (self.dx / (2*A)) | [
1523
] |
async def METHOD_NAME(request: web.Request) -> web.Response:
computations = ComputationsApi(request.app)
run_policy = get_project_run_policy(request.app)
assert run_policy # nosec
user_id = UserID(request[RQT_USERID_KEY])
project_id = ProjectID(request.match_info["project_id"])
try:
project_ids: list[ProjectID] = await run_policy.get_runnable_projects_ids(
request, project_id
)
_logger.debug("Project %s will get %d variants", project_id, len(project_ids))
list_computation_tasks = parse_obj_as(
list[ComputationTaskGet],
await asyncio.gather(
*[
computations.get(project_id=pid, user_id=user_id)
for pid in project_ids
]
),
)
assert len(list_computation_tasks) == len(project_ids) # nosec
# NOTE: until changed all the versions of a meta project shall use the same cluster
# this should fail the day that changes
assert all(
c.cluster_id == list_computation_tasks[0].cluster_id
for c in list_computation_tasks
)
return web.json_response(
data={"data": list_computation_tasks[0].dict(by_alias=True)},
dumps=json_dumps,
)
except DirectorServiceError as exc:
return create_error_response(
exc,
reason=exc.reason,
http_error_cls=get_http_error(exc.status) or web.HTTPServiceUnavailable,
)
except ValidationError as exc:
return create_error_response(exc, http_error_cls=web.HTTPInternalServerError) | [
19,
9972
] |
def METHOD_NAME(lower, upper, body_fun, init_val):
val = init_val
for i in range(lower, upper):
val = body_fun(i, val)
return val | [
-1,
1751
] |
def METHOD_NAME(self):
for patch in self.conan_data.get("patches", {}).get(self.version, []):
tools.patch(**patch)
with self._build_context():
autotools = self._configure_autotools()
autotools.make() | [
56
] |
def METHOD_NAME(test, props):
test.start_server(get_app(props))
target = test.table("table")
target.cell(3, 1).click()
with test.hold(Keys.SHIFT):
test.send_keys(Keys.ARROW_DOWN + Keys.ARROW_UP)
for row in range(2, 5):
for col in range(0, 2):
assert target.cell(row, col).is_selected() == (row in [3] and col in [1])
assert test.get_log_errors() == [] | [
9,
-1,
1046,
1472,
481,
868,
1
] |
f METHOD_NAME(
self, strategy_cls, optimizer_class_1, optimizer_class_2): | [
22,
968,
713,
41,
53,
1173,
578
] |
def METHOD_NAME(enrollment_session, target, artifact_version_pk):
if not isinstance(artifact_version_pk, str):
artifact_version_pk = str(artifact_version_pk)
payload = {"avpk": artifact_version_pk,
"esm": enrollment_session._meta.model_name,
"espk": enrollment_session.pk}
if target.enrolled_user:
payload["eupk"] = target.enrolled_user.pk
return signing.dumps(payload, salt="zentral_mdm_legacy_profile") | [
278,
3116,
337,
466
] |
def METHOD_NAME(self):
l = []
fun = lambda x: l.append(x)
scheduler = sched.scheduler(time.time, time.sleep)
for x in [10, 9, 8, 7, 6]:
scheduler.enter(x, 1, fun, (x,))
scheduler.run(blocking=False)
self.assertEqual(l, []) | [
9,
22,
256,
5999
] |
async def METHOD_NAME(self, deployment, entity_id, address, type):
if (
deployment in self._deployments
and entity_id in self._deployments[deployment][type]
):
self._logger.debug(
f'removing connection for deployment {deployment}/{type}/{entity_id} to {address}'
)
await self._deployments[deployment][type][entity_id].remove_connection(
address
)
if not self._deployments[deployment][type][entity_id].has_connections():
self._deployments[deployment][type].pop(entity_id) | [
188,
550
] |
async def METHOD_NAME(self, **kwargs: Any) -> _models.OperationListResult:
"""Lists all of the available REST API operations.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: OperationListResult or the result of cls(response)
:rtype: ~azure.mgmt.rdbms.mysql.models.OperationListResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2017-12-01"))
cls: ClsType[_models.OperationListResult] = kwargs.pop("cls", None)
request = build_list_request(
api_version=api_version,
template_url=self.METHOD_NAME.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("OperationListResult", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized | [
245
] |
def METHOD_NAME(graph_def, input_nodes, output_nodes, output_dtypes,
output_quantized, op_name, op_type):
"""Fuse subgraph between input_nodes and output_nodes into a single custom op.
Args:
graph_def: A graph_pb2.GraphDef proto.
input_nodes: input nodes to the subgraph to be fused.
output_nodes: output nodes to the subgraph to be fused.
output_dtypes: A list of output datatypes for the custom op
output_quantized: A boolean flag that indicates if output is quantized
op_name: fused op name.
op_type: fused op type.
Returns:
The GraphDef of the new graph.
Raises:
TypeError: If 'graph_def' is not a graph_pb2.GraphDef proto.
"""
if not isinstance(graph_def, graph_pb2.GraphDef):
raise TypeError("graph_def must be a graph_pb2.GraphDef proto.")
if isinstance(input_nodes, six.string_types):
raise TypeError("input_nodes must be a list.")
if isinstance(output_nodes, six.string_types):
raise TypeError("output_nodes must be a list.")
name_to_input_name, name_to_node, name_to_seq_num = _extract_graph_summary(
graph_def)
_assert_nodes_are_present(name_to_node, input_nodes + output_nodes)
# Nodes upto and including input_nodes
reachable_by_input = _bfs_for_reachable_nodes(input_nodes, name_to_input_name)
# Nodes upto and including output_nodes
reachable_by_output = _bfs_for_reachable_nodes(output_nodes,
name_to_input_name)
# Set of nodes in the list input_nodes
input_nodes_set = set(input_nodes)
# Set of nodes in the list output_nodes
output_nodes_set = set(output_nodes)
nodes_post_output = []
for node in graph_def.node:
n = _node_name(node.name)
if n in reachable_by_output:
if n not in reachable_by_input and n not in output_nodes_set:
# n is between input and output, i.e., part of the fused op
next_to_visit = [n]
visited = set()
while next_to_visit:
cur_node = next_to_visit[0]
visited.add(cur_node)
del next_to_visit[0]
if cur_node in reachable_by_input and cur_node not in input_nodes_set:
raise TypeError("Node %s uses input %s not in input_nodes." %
(n, cur_node))
if cur_node not in input_nodes_set:
next_to_visit += [
input_node for input_node in name_to_input_name[cur_node]
if input_node not in visited
]
elif n not in reachable_by_input:
nodes_post_output.append(n)
# Add all nodes upto the input nodes
out = graph_pb2.GraphDef()
reachable_by_input_sorted = sorted(
list(reachable_by_input), key=lambda n: name_to_seq_num[n])
for node in reachable_by_input_sorted:
out.node.extend([copy.deepcopy(name_to_node[node])])
# Add the custom op
new_node = node_def_pb2.NodeDef()
for node in input_nodes:
new_node.input.append(node)
new_node.attr["_output_types"].list.type[:] = output_dtypes
new_node.attr["_output_quantized"].b = output_quantized
new_node.op = op_type
new_node.name = op_name
out.node.extend([new_node])
# Add the nodes in the output of the custom op
for index, n in enumerate(output_nodes):
assert len(name_to_node[n].input) == 1
new_node = copy.deepcopy(name_to_node[n])
del new_node.input[:]
new_node.input.append(op_name + (":" + str(index) if index != 0 else ""))
out.node.extend([new_node])
# Add the nodes post output_nodes
for n in nodes_post_output:
out.node.extend([copy.deepcopy(name_to_node[n])])
out.library.CopyFrom(graph_def.library)
out.versions.CopyFrom(graph_def.versions)
return out | [
2151,
441
] |
def METHOD_NAME(self, meth):
assert meth in ("one", "set")
i64 = rint(64)
pix = self.missing_pyramid()
q = ("select tb from Thumbnail tb "
"where tb.pixels.id = %s "
"order by tb.id desc ")
q = q % pix
p = ParametersI().page(0, 1)
def get():
return self.query.findByQuery(q, p)
# Before anything has been called, there
# should be no thumbnail
assert not get()
# At this stage, there should still be no
# thumbnail
tb = self.client.sf.createThumbnailStore()
if meth == "one":
tb.setPixelsId(int(pix))
tb.resetDefaults()
assert not tb.thumbnailExists(i64, i64)
assert tb.isInProgress()
# As soon as it's requested, it should have a -1
# version to mark pyramid creation as ongoing.
if meth == "one":
before = tb.getThumbnail(i64, i64)
assert not tb.thumbnailExists(i64, i64)
assert tb.isInProgress()
elif meth == "set":
before = tb.getThumbnailSet(i64, i64, [int(pix)])
before = before[int(pix)]
assert get().version.val == -1
# Now we wait until the pyramid has been created
# and test that a proper version has been set.
event = get_event("test_thumbs")
secs = 20
rps = self.client.sf.createRawPixelsStore()
for x in range(secs):
try:
rps.setPixelsId(int(pix), True)
event = None
break
except MissingPyramidException:
event.wait(1)
if event:
assert "Pyramid was not generated %ss" % secs
if meth == "one":
# Re-load the thumbnail store now that
# the pyramid is generated.
tb.close()
tb = self.client.sf.createThumbnailStore()
if not tb.setPixelsId(int(pix)):
tb.resetDefaults()
tb.close()
tb = self.client.sf.createThumbnailStore()
assert tb.setPixelsId(int(pix))
after = tb.getThumbnail(i64, i64)
assert before != after
assert tb.thumbnailExists(i64, i64)
assert not tb.isInProgress()
elif meth == "set":
tb.getThumbnailSet(i64, i64, [int(pix)])
assert get().version.val >= 0 | [
9,
4137,
281
] |
def METHOD_NAME(self):
self.tmpfp.write(b'a'*16)
mm = memmap(self.tmpfp, dtype='float64')
assert_equal(mm.shape, (2,)) | [
9,
654,
555
] |
def METHOD_NAME(self):
return self.gear_number | [
19,
17583,
106
] |
def METHOD_NAME(self, thread):
return reverse(
"misago:thread-new", kwargs={"slug": thread.slug, "pk": thread.pk}
) | [
19,
600,
80,
72,
274
] |
def METHOD_NAME(batting):
@udf.scalar.python
def num_vowels_map(s: str, include_y: bool = False) -> dict[str, int]:
y = "y" * include_y
vowels = "aeiou" + y
counter = dict.fromkeys(vowels, 0)
for c in s:
if c in vowels:
counter[c] += 1
return counter
batting = batting.limit(100)
expr = batting.select(vowel_dist=num_vowels_map(batting.playerID))
df = expr.execute()
assert not df.empty | [
9,
422,
6013
] |
def METHOD_NAME(dut: Dut) -> None:
dut.serial.erase_flash()
dut.serial.reset_efuses()
dut.burn_wafer_version()
bootloader_path = os.path.join(dut.app.binary_path, 'bootloader/bootloader.bin')
dut.serial.bootloader_flash(bootloader_path)
dut.serial.flash() | [
1290,
447,
3271,
991
] |
def METHOD_NAME(self):
try:
return CHECKS[self.name]
except KeyError:
return None | [
250,
122
] |
def METHOD_NAME(client: Client):
with pytest.raises(RuntimeError):
device.recover(
client,
12,
False,
False,
"label",
"en-US",
client.mnemonic_callback,
)
ret = client.call_raw(
messages.RecoveryDevice(
word_count=12, type=messages.RecoveryDeviceType.ScrambledWords
)
)
assert isinstance(ret, messages.Failure)
assert "Device is already initialized" in ret.message | [
9,
997,
924
] |
def METHOD_NAME(file_list):
"""Filter out non api files."""
return [
f for f in file_list
if f.endswith('.swift') or (f.endswith('.h') and 'Public' in f)
] | [
19,
58,
1537
] |
def METHOD_NAME():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_polygon_transactions_value'), table_name='polygon_transactions')
op.drop_index(op.f('ix_polygon_transactions_to_address'), table_name='polygon_transactions')
op.drop_index(op.f('ix_polygon_transactions_hash'), table_name='polygon_transactions')
op.drop_index(op.f('ix_polygon_transactions_gas_price'), table_name='polygon_transactions')
op.drop_index(op.f('ix_polygon_transactions_gas'), table_name='polygon_transactions')
op.drop_index(op.f('ix_polygon_transactions_from_address'), table_name='polygon_transactions')
op.drop_index(op.f('ix_polygon_transactions_block_number'), table_name='polygon_transactions')
op.drop_table('polygon_transactions')
op.drop_index(op.f('ix_polygon_labels_transaction_hash'), table_name='polygon_labels')
op.drop_index(op.f('ix_polygon_labels_label'), table_name='polygon_labels')
op.drop_index(op.f('ix_polygon_labels_block_timestamp'), table_name='polygon_labels')
op.drop_index(op.f('ix_polygon_labels_block_number'), table_name='polygon_labels')
op.drop_index(op.f('ix_polygon_labels_address'), table_name='polygon_labels')
op.drop_table('polygon_labels')
op.drop_index(op.f('ix_polygon_blocks_timestamp'), table_name='polygon_blocks')
op.drop_index(op.f('ix_polygon_blocks_hash'), table_name='polygon_blocks')
op.drop_index(op.f('ix_polygon_blocks_block_number'), table_name='polygon_blocks')
op.drop_table('polygon_blocks')
# ### end Alembic commands ### | [
1502
] |
def METHOD_NAME(tree: BoundingBoxTree, midpoint_tree: BoundingBoxTree, mesh: Mesh,
points: npt.NDArray[np.floating]) -> npt.NDArray[np.int32]:
"""Compute closest mesh entity to a point.
Args:
tree: bounding box tree for the entities.
midpoint_tree: A bounding box tree with the midpoints of all
the mesh entities. This is used to accelerate the search.
mesh: The mesh.
points: The points to check for collision, ``shape=(num_points,3)``.
Returns:
Mesh entity index for each point in ``points``. Returns -1 for a
point if the bounding box tree is empty.
"""
return _cpp.geometry.METHOD_NAME(tree._cpp_object, midpoint_tree._cpp_object, mesh._cpp_object, points) | [
226,
5221,
2419
] |
def METHOD_NAME(
theme, delete_media, media, image
):
delete_media(theme, [media, image])
assert not theme.media.exists() | [
9,
107,
344,
1091,
1046,
673,
1108
] |
def METHOD_NAME(self, *args, **kwargs) -> Optional[List[MlHyperParameter]]:
"""Get the Hyper Parameters from the MlModel""" | [
19,
4910,
434
] |
def METHOD_NAME(root, index):
status = root + "/stats?sid=%d" % index
try:
r = requests.get(status, timeout=timeout)
except (RequestException, socket.timeout):
raise ParseError
if r.status_code != 200:
raise ParseError
entries = {}
try:
elm = ET.fromstring(r.content)
except ET.ParseError:
raise ParseError
for e in elm:
entries[e.tag.lower()] = e.text
stream = root + entries["streampath"]
current = entries["currentlisteners"]
peak = entries["peaklisteners"]
peak = str(int(peak))
current = str(int(current))
return Stream(stream, current, peak) | [
19,
919
] |
def METHOD_NAME(linter: PyLinter) -> None:
linter.register_checker(NestedMinMaxChecker(linter)) | [
372
] |
def METHOD_NAME(self, event_type: Optional[CBEventType] = None) -> List[CBEvent]:
"""Get all events for a specific event type."""
if event_type is not None:
return self._event_pairs_by_type[event_type]
return self._sequential_events | [
19,
239
] |
def METHOD_NAME(self, session, uid):
issuer_url = session["client"].wf.discovery_query(uid)
provider_info = session["client"].provider_config(issuer_url)
session["client"].register(provider_info["registration_endpoint"],
**self.client_metadata) | [
372,
41,
2111,
2275
] |
def METHOD_NAME(self, path: str, dispatcher: SimpleXMLRPCDispatcher) -> SimpleXMLRPCDispatcher: ... | [
238,
5780
] |
def METHOD_NAME(self):
wrong = '/(.*)/(\\1/){boom}'
right = '/(.*)/(\\1/){3,}'
self.oracle.set_patterns([wrong, right])
result = self.oracle.ignores('http://www.example.com/foo/foo/foo/foo/foo')
self.assertEqual(result, right) | [
9,
9083,
7234,
652,
41,
46,
511
] |
def METHOD_NAME(self):
config_args = []
for b in ["opencl", "hwloc", "ze", "hip", "cuda"]:
config_args.extend(self.with_or_without(b))
if self.spec.satisfies("%oneapi"):
config_args += ["--with-openmp-flags=-fiopenmp -fopenmp-targets=spir64"]
if self.spec.variants["hip-platform"].value == "amd":
config_args += ["--with-hip-platform=amd"]
if self.spec.variants["hip-platform"].value == "nvidia":
config_args += ["--with-hip-platform=nvidia"]
return config_args | [
111,
335
] |
def METHOD_NAME(pathname):
if sys.version_info.major < 3 or (sys.version_info.major == 3 and sys.version_info.minor < 5):
return globlib.METHOD_NAME(pathname)
else:
return globlib.METHOD_NAME(pathname, recursive=True) | [
1825
] |
f METHOD_NAME(self, dek_template): | [
9,
2196,
443
] |
def METHOD_NAME(
q1: int,
q2: int,
args: sim.CliffordTableauSimulationState,
operations: List[ops.Operation],
qubits: List['cirq.Qid'],
):
protocols.act_on(ops.CNOT, args, qubits=[qubits[q1], qubits[q2]], allow_decompose=False)
operations.append(ops.CNOT(qubits[q1], qubits[q2])) | [
9615
] |
def METHOD_NAME(self):
pll = S6PLL()
pll.register_clkin(Signal(), 100e6)
for i in range(pll.nclkouts_max):
pll.create_clkout(ClockDomain("clkout{}".format(i)), 200e6)
pll.compute_config() | [
9,
-1,
8199
] |
def METHOD_NAME(self, value):
"""Try using ``value`` as a possible candidate improvement.
Return True if it works.
"""
value = self.make_immutable(value)
self.check_invariants(value)
if not self.left_is_better(value, self.current):
if value != self.current and (value == value):
self.debug(
"Rejected %r as worse than self.current=%r" % (value, self.current)
)
return False
if value in self.__seen:
return False
self.__seen.add(value)
if self.__predicate(value):
self.debug("shrinking to %r" % (value,))
self.changes += 1
self.current = value
return True
return False | [
11601
] |
def METHOD_NAME(subject: _MockObject) -> Any:
"""Unwrap mock if *subject* is decorated by mocked object.
If not decorated, returns given *subject* itself.
"""
if ismock(subject) and subject.__sphinx_decorator_args__:
return subject.__sphinx_decorator_args__[0]
else:
return subject | [
-1
] |
def METHOD_NAME():
gdim = 3
shape = "tetrahedron"
degree = 1
domain = ufl.Mesh(element("Lagrange", shape, degree, gdim=gdim, rank=1))
x = [[0., 0., 0.], [0., 1., 0.], [0., 1., 1.], [1, 1., 1]]
cells = [[0, 1, 2, 3]]
mesh = create_mesh(MPI.COMM_WORLD, cells, x, domain)
assert squared_distance(mesh, mesh.topology.dim, [0], [-1.0, -1.0, -1.0]) == pytest.approx(3.0)
assert squared_distance(mesh, mesh.topology.dim, [0], [-1.0, 0.5, 0.5]) == pytest.approx(1.0)
assert squared_distance(mesh, mesh.topology.dim, [0], [0.5, 0.5, 0.5]) == pytest.approx(0.0) | [
9,
1886,
8128
] |
def METHOD_NAME(self, value):
# Always update ResultCapture date when this field is modified
self.setResultCaptureDate(DateTime())
# Ensure result integrity regards to None, empty and 0 values
val = str('' if not value and value != 0 else value).strip()
self.getField('Result').set(self, val) | [
0,
1571
] |
async def METHOD_NAME(coresys: CoreSys, api_client):
"""Test resolution manager suggestion apply api."""
coresys.resolution.suggestions = clear_backup = Suggestion(
SuggestionType.CLEAR_FULL_BACKUP, ContextType.SYSTEM
)
assert SuggestionType.CLEAR_FULL_BACKUP == coresys.resolution.suggestions[-1].type
await api_client.delete(f"/resolution/suggestion/{clear_backup.uuid}")
assert clear_backup not in coresys.resolution.suggestions | [
9,
58,
1328,
13458,
1477
] |
def METHOD_NAME(image, height, width, resize_method,
central_fraction=0.875, scope=None):
with tf.compat.v1.name_scope('eval_image'):
if resize_method == 'crop':
shape = tf.shape(input=image)
image = tf.cond(pred=tf.less(shape[0], shape[1]),
true_fn=lambda: tf.image.resize(image,
tf.convert_to_tensor(value=[256, 256 * shape[1] / shape[0]],
dtype=tf.int32)),
false_fn=lambda: tf.image.resize(image,
tf.convert_to_tensor(value=[256 * shape[0] / shape[1], 256],
dtype=tf.int32)))
shape = tf.shape(input=image)
y0 = (shape[0] - height) // 2
x0 = (shape[1] - width) // 2
distorted_image = tf.image.crop_to_bounding_box(image, y0, x0, height, width)
distorted_image.set_shape([height, width, 3])
return distorted_image
else: # bilinear
if image.dtype != tf.float32:
image = tf.image.convert_image_dtype(image, dtype=tf.float32)
# Crop the central region of the image with an area containing 87.5% of
# the original image.
if central_fraction:
image = tf.image.central_crop(image, central_fraction=central_fraction)
if height and width:
# Resize the image to the specified height and width.
image = tf.expand_dims(image, 0)
image = tf.image.resize(image, [height, width],
method=tf.image.ResizeMethod.BILINEAR)
image = tf.squeeze(image, [0])
image = tf.subtract(image, 0.5)
image = tf.multiply(image, 2.0)
return image | [
1171,
660
] |
def METHOD_NAME(self):
"""Enables to get outputs of the operator by evaluating it
Returns
--------
outputs : OutputsDefaultValue
"""
return super().METHOD_NAME | [
141
] |
def METHOD_NAME(self):
REPO_PATH = '/not/here'
FILE_STATUS = dedent("""\
## the-branch
""".rstrip()).replace('\n', '\x00')
LAST_COMMIT = 'd9b34774 The last commit message'
STASH_LIST = dedent("""\
stash@{0}: On fix-1055: /not/here/but_like_a_filename.py
stash@{1}: On fix-1046: fix-1048
""".rstrip())
interface, view = self.create_status_interface(
REPO_PATH, FILE_STATUS, LAST_COMMIT, STASH_LIST
)
# The interface updates async.
yield lambda: view.find('fix-1048', 0, sublime.LITERAL)
actual = view.find_all_results()
expected = []
self.assertEqual(actual, expected) | [
9,
1356,
6907,
1190,
220,
654,
17490
] |
def METHOD_NAME(
self,
diagram: Diagram,
callback: Callable[[set[Presentation]], None] | None = None,
) -> None:
self._paste(diagram, METHOD_NAME, callback) | [
8151,
324
] |
def METHOD_NAME():
assert IndirectReference(1, 2) == IndirectReference(1, 2)
assert IndirectReference(1, 2) != IndirectReference(1, 3)
assert IndirectReference(1, 2) != IndirectObjectDef(1, 2)
assert IndirectReference(1, 2) != (1, 2)
assert IndirectObjectDef(1, 2) == IndirectObjectDef(1, 2)
assert IndirectObjectDef(1, 2) != IndirectObjectDef(1, 3)
assert IndirectObjectDef(1, 2) != IndirectReference(1, 2)
assert IndirectObjectDef(1, 2) != (1, 2) | [
9,
2500,
2925
] |
def METHOD_NAME(chdir, capsys):
"""Validate that we can run a hook's with an arg."""
chdir('cli-fixtures')
with pytest.raises(SystemExit):
tackle('cli-hook-no-context.yaml', 'run', 'help')
out, err = capsys.readouterr()
assert "usage: tackle" in out
# with capsys.disabled():
# print(out) | [
9,
559,
615,
1021,
718,
40
] |
def METHOD_NAME(self):
"""Allows to connect fields_containerB input to the operator.
Parameters
----------
my_fields_containerB : FieldsContainer
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.cplx_multiply()
>>> op.inputs.fields_containerB.connect(my_fields_containerB)
>>> # or
>>> op.inputs.fields_containerB(my_fields_containerB)
"""
return self._fields_containerB | [
342,
224,
1484
] |
def METHOD_NAME(self):
v0 = View3DAttributes()
v0.viewNormal = (-0.497104, 0.575734, 0.649168)
v0.focus = (5, 0.353448, 2.5)
v0.viewUp = (0.382047, 0.81696, -0.431991)
v0.viewAngle = 30
v0.parallelScale = 6.23027
v0.nearPlane = -11.2018
v0.farPlane = 11.2018
v0.perspective = 1
v1 = View3DAttributes()
v1.viewNormal = (0.384151, 0.536474, 0.751414)
v1.focus = (5, 0.353448, 2.5)
v1.viewUp = (0.028325, 0.80663, -0.590377)
v1.viewAngle = 30
v1.parallelScale = 6.23027
v1.nearPlane = -11.2018
v1.farPlane = 11.2018
v1.perspective = 1
v2 = View3DAttributes()
v2.viewNormal = (0.784207, 0.514617, 0.346682)
v2.focus = (5.47361, -0.0795047, 2.07135)
v2.viewUp = (-0.356176, 0.830834, -0.427613)
v2.viewAngle = 30
v2.parallelScale = 4.98178
v2.nearPlane = -11.2018
v2.farPlane = 11.2018
v2.perspective = 1
v3 = View3DAttributes()
v3.viewNormal = (0.919312, 0.388049, 0.0654506)
v3.focus = (5.47361, -0.0795047, 2.07135)
v3.viewUp = (-0.381256, 0.919449, -0.0962209)
v3.viewAngle = 30
v3.parallelScale = 3.58871
v3.nearPlane = -11.2018
v3.farPlane = 11.2018
v3.perspective = 1
v4 = View3DAttributes()
v4.viewNormal = (0.75288, 0.461699, -0.469047)
v4.focus = (5.47361, -0.0795047, 2.07135)
v4.viewUp = (-0.284761, 0.87102, 0.400295)
v4.viewAngle = 30
v4.parallelScale = 3.58871
v4.nearPlane = -11.2018
v4.farPlane = 11.2018
v4.perspective = 1
v5 = View3DAttributes()
v5.viewNormal = (0.491718, 0.545418, -0.678773)
v5.focus = (5.44464, 0.329047, 2.37865)
v5.viewUp = (-0.322383, 0.83816, 0.439951)
v5.viewAngle = 30
v5.parallelScale = 5.6009
v5.nearPlane = -11.2018
v5.farPlane = 11.2018
v5.perspective = 1
# Save the view control points and weights as part of this object.
self.cpts = (v0, v1, v2, v3, v4, v5)
self.x=[]
for j in range(len(self.cpts)):
self.x = self.x + [float(j) / float(len(self.cpts) - 1)] | [
176,
4632
] |
def METHOD_NAME():
ak_array = ak.highlevel.Array(
[
[[2, 3, 5], [], [], [7, 11, 13]],
[[17, 19, 23], [], [], [29, 31, 37]],
]
)
assert ak.operations.prod(ak_array, axis=-1).to_list() == [
[2 * 3 * 5, 1, 1, 7 * 11 * 13],
[17 * 19 * 23, 1, 1, 29 * 31 * 37],
]
assert ak.operations.prod(ak_array, axis=-2).to_list() == [
[2 * 7, 3 * 11, 5 * 13],
[17 * 29, 19 * 31, 23 * 37],
]
assert ak.operations.prod(ak_array, axis=-3).to_list() == [
[2 * 17, 3 * 19, 5 * 23],
[],
[],
[7 * 29, 11 * 31, 13 * 37],
] | [
9,
2395,
2227,
199
] |
def METHOD_NAME(self) -> str:
"""
The link identifier.
"""
return pulumi.get(self, "link_identifier") | [
548,
769
] |
def METHOD_NAME(
y_true,
y_probas,
title="Lift Curve",
ax=None,
figsize=None,
title_fontsize="large",
text_fontsize="medium",
pos_label=None,
):
"""
This method is copied from scikit-plot package.
See https://github.com/reiinakano/scikit-plot/blob/2dd3e6a76df77edcbd724c4db25575f70abb57cb/scikitplot/metrics.py#L1133
Generates the Lift Curve from labels and scores/probabilities
The lift curve is used to determine the effectiveness of a
binary classifier. A detailed explanation can be found at
http://www2.cs.uregina.ca/~dbd/cs831/notes/lift_chart/lift_chart.html.
The implementation here works only for binary classification.
Args:
y_true (array-like, shape (n_samples)):
Ground truth (correct) target values.
y_probas (array-like, shape (n_samples, n_classes)):
Prediction probabilities for each class returned by a classifier.
title (string, optional): Title of the generated plot. Defaults to
"Lift Curve".
ax (:class:`matplotlib.axes.Axes`, optional): The axes upon which to
plot the learning curve. If None, the plot is drawn on a new set of
axes.
figsize (2-tuple, optional): Tuple denoting figure size of the plot
e.g. (6, 6). Defaults to ``None``.
title_fontsize (string or int, optional): Matplotlib-style fontsizes.
Use e.g. "small", "medium", "large" or integer-values. Defaults to
"large".
text_fontsize (string or int, optional): Matplotlib-style fontsizes.
Use e.g. "small", "medium", "large" or integer-values. Defaults to
"medium".
pos_label (optional): Label for the positive class.
Returns:
ax (:class:`matplotlib.axes.Axes`): The axes on which the plot was
drawn.
Example:
>>> lr = LogisticRegression()
>>> lr = lr.fit(X_train, y_train)
>>> y_probas = lr.predict_proba(X_test)
>>> plot_lift_curve(y_test, y_probas)
<matplotlib.axes._subplots.AxesSubplot object at 0x7fe967d64490>
>>> plt.show()
.. image:: _static/examples/plot_lift_curve.png
:align: center
:alt: Lift Curve
"""
y_true = np.array(y_true)
y_probas = np.array(y_probas)
classes = np.unique(y_true)
if len(classes) != 2:
raise ValueError(f"Cannot calculate Lift Curve for data with {len(classes)} category/ies")
# Compute Cumulative Gain Curves
percentages, gains1 = _cumulative_gain_curve(y_true, y_probas[:, 0], classes[0])
percentages, gains2 = _cumulative_gain_curve(y_true, y_probas[:, 1], classes[1])
percentages = percentages[1:]
gains1 = gains1[1:]
gains2 = gains2[1:]
gains1 = gains1 / percentages
gains2 = gains2 / percentages
if ax is None:
_, ax = plt.subplots(1, 1, figsize=figsize)
ax.set_title(title, fontsize=title_fontsize)
label0 = f"Class {classes[0]}"
label1 = f"Class {classes[1]}"
# show (positive) next to the positive class in the legend
if pos_label:
if pos_label == classes[0]:
label0 = f"Class {classes[0]} (positive)"
elif pos_label == classes[1]:
label1 = f"Class {classes[1]} (positive)"
# do not mark positive class if pos_label is not in classes
ax.plot(percentages, gains1, lw=3, label=label0)
ax.plot(percentages, gains2, lw=3, label=label1)
ax.plot([0, 1], [1, 1], "k--", lw=2, label="Baseline")
ax.set_xlabel("Percentage of sample", fontsize=text_fontsize)
ax.set_ylabel("Lift", fontsize=text_fontsize)
ax.tick_params(labelsize=text_fontsize)
ax.grid("on")
ax.legend(loc="best", fontsize=text_fontsize)
return ax | [
1288,
8873,
461
] |
def METHOD_NAME(self):
return self._present_fans | [
2541,
6818
] |
METHOD_NAME( self, childParameter ) : | [
186,
511,
2275
] |
def METHOD_NAME(email='[email protected]', password='foo') -> User:
user = User(email=email, password=password)
user.individuals.add(Person(name='Timmy'))
session_external = Session(user=user, type=SessionType.External)
session_internal = Session(user=user, type=SessionType.Internal)
db.session.add(user)
db.session.add(session_internal)
db.session.add(session_external)
db.session.commit()
return user | [
129,
21
] |
def METHOD_NAME(self, args_dict):
self.set_custom_option("total_pulses", 0)
self.sensor.clear_totals() | [
537,
395,
2276
] |
def METHOD_NAME(self, disk_static_func, no_path_validation, execute_task):
for _ in range(0, 2):
task = execute_task('test_most_used')
assert task.entries[0].get('path') in [
'/data/90GB,100GB',
'/data/90.5GB,100GB',
], "path %s not in list" % task.entries[0].get('path') | [
9,
759,
1304
] |
def METHOD_NAME(self):
with self.assertRaises(WrongState):
create_session_for_reaction_request(self.signal) | [
9,
129,
240,
909,
551
] |
def METHOD_NAME(self, **kwargs: Any) -> Iterable["_models.Operation"]:
"""Lists all of the available IoT Hub REST API operations.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either Operation or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.iothub.v2021_07_01.models.Operation]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-07-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01"))
cls: ClsType[_models.OperationListResult] = kwargs.pop("cls", None)
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
api_version=api_version,
template_url=self.METHOD_NAME.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("OperationListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDetails, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data) | [
245
] |
def METHOD_NAME(self, x, y):
self.sendUpdate("setXY", [x, y]) | [
227,
0,
695
] |
def METHOD_NAME(self):
try:
self.browser.find("#uniccmp").remove()
except Exception:
pass | [
950,
1934
] |
def METHOD_NAME(self, user_inputs, unit_id_emb):
feed_dict = {}
for i in range(69):
feed_dict["user_emb_{}".format(i)] = user_inputs[i]
feed_dict["unit_id_emb"] = unit_id_emb
res = self.exe.run(self.prediction_model,
feed=feed_dict,
fetch_list=self.prediction_model_fetch_vars)
return res[0] | [
1407,
3648
] |
def METHOD_NAME(self, soup: BeautifulSoup) -> str:
# The soup here is the result of `self.get_soup(self.novel_url)`
pass | [
214,
3866
] |
def METHOD_NAME():
"""Test the full task pipeline"""
class FakeModel:
def generate_text(
self, inputs, max_length=100, stop_string=None, output_regex=None
):
return "\n\treturn True\na"
model = FakeModel()
task = PythonCodingTask()
score_data = task.evaluate_model(model)
assert score_data.score_dict["compiled"] == 1, score_data
assert 1 > score_data.score_dict["correct"] > 0, score_data
print(task.get_task_details())
print(score_data) | [
9,
758
] |
def METHOD_NAME(
self,
pot: 'AccountingPot',
event: 'EvmEvent',
other_events: Iterator['EvmEvent'],
) -> None:
"""
Process deposits and withdrawals from protocols that allow to deposit multiple assets
in return for a wrapped token. There are multiple events that we have to consume from
the iterator.
The receive wrapped event needs to have in the extra data field a key `deposit_events_num`
marking the number of events to consume.
The return wrapped event needs to have the key `withdrawal_events_num` with the number
of events in the return event.
"""
method: Literal['acquisition', 'spend']
if event.event_type == HistoryEventType.RECEIVE:
# Pool token is received, which means it is a deposit
events_to_consume = event.extra_data.get('deposit_events_num', None) if event.extra_data is not None else None # noqa: E501
method = 'spend'
else: # Withdrawal
events_to_consume = event.extra_data.get('withdrawal_events_num', None) if event.extra_data is not None else None # noqa: E501
method = 'acquisition'
if events_to_consume is None:
log.debug(
f'Could not find the number of events to consume for a {event.counterparty} '
f'deposit/withdrawal transaction {event.tx_hash.hex()}',
)
return
# Consume the events
for idx in range(events_to_consume):
next_event = next(other_events, None)
if next_event is None:
log.debug(f'Could not consume event nr. {idx} for {event.counterparty} deposit/withdrawal') # noqa: E501
return
if next_event.balance.amount == ZERO:
continue
pot.add_asset_change_event(
method=method,
event_type=AccountingEventType.TRANSACTION_EVENT,
notes=next_event.notes if next_event.notes else '',
location=next_event.location,
timestamp=next_event.get_timestamp_in_sec(),
asset=next_event.asset,
amount=next_event.balance.amount,
taxable=False, # Deposits and withdrawals are not taxable
count_entire_amount_spend=False,
count_cost_basis_pnl=False,
extra_data={'tx_hash': next_event.tx_hash.hex()},
) | [
356,
4634,
894,
9149
] |
def METHOD_NAME():
ipython_display = MagicMock()
return SessionManager(ipython_display) | [
19,
240,
722
] |
def METHOD_NAME(self):
current_dir = pathlib.Path(__file__).resolve().parent
config_file = current_dir.joinpath("mysql_test.yaml")
workflow_config = load_config_file(config_file)
del workflow_config["source"]["serviceConnection"]
workflow_config["workflowConfig"]["openMetadataServerConfig"][
"forceEntityOverwriting"
] = True
with self.assertRaises(AttributeError):
MetadataWorkflow.create(workflow_config) | [
9,
180,
654,
549,
550,
61,
3345
] |
def METHOD_NAME(dir_path: str) -> str:
"""
Remove directory later when the process ends.
Removal failures are ignored.
:param dir_path: Path to directory
:return: The absolute path to the directory.
"""
dir_path = os.path.abspath(dir_path)
def _remove_dir_later():
# noinspection PyBroadException
try:
shutil.rmtree(dir_path, ignore_errors=True)
except BaseException:
pass
atexit.register(_remove_dir_later)
return dir_path | [
188,
1190,
7982
] |
def METHOD_NAME(mocker):
return mocker.patch('thefuck.entrypoints.not_configured._get_shell_pid',
new_callable=MagicMock) | [
2770,
2243
] |
METHOD_NAME( self ) : | [
9,
634
] |
def METHOD_NAME (self):
"""
Delete file / dir
"""
if self.is_dir ():
try:
shutil.rmtree (self.full_name)
except Exception, e:
print "Failed to delete directory !", e
sys.exit (1)
else:
try:
os.unlink (self.full_name)
except Exception, e:
print "Failed to delete file !", e
sys.exit (1)
logging.info ('[delete] [%s] %s', self.otype, self.full_name)
self.full_name = ""
self.base_name = "" | [
34
] |
def METHOD_NAME(self, score_deleted, expected_completion):
self.call_scorable_block_completion_handler(self.block_key, score_deleted)
completion = BlockCompletion.objects.get(
user=self.user,
context_key=self.context_key,
block_key=self.block_key,
)
assert completion.completion == expected_completion | [
9,
1519,
7720,
1323
] |
def METHOD_NAME(self):
with session.begin():
task = data_setup.create_task()
out = run_client(['bkr', 'task-list', '--xml'])
self.assert_('<task name="%s">\n\t<params/>\n</task>\n' % task.name
in out, out) | [
9,
399,
432
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.