text
stringlengths 15
7.82k
| ids
sequencelengths 1
7
|
---|---|
def METHOD_NAME(self) -> str:
"""
Gets the resource type.
"""
return pulumi.get(self, "type") | [
44
] |
def METHOD_NAME():
return | [
8704
] |
def METHOD_NAME(**kwargs):
if settings.RECAPTCHA_PUBLIC_KEY and settings.RECAPTCHA_PRIVATE_KEY:
if settings.USE_RECAPTCHA_V3:
score_threshold = get_setting('site', 'global', 'recaptchascorelimit')
try:
score_threshold = float(score_threshold)
except ValueError:
score_threshold = 0.5
if score_threshold > 1 or score_threshold < 0:
score_threshold = 0.5
recaptcha_field = ReCaptchaField(label='', widget=ReCaptchaV3)
# set required_score
recaptcha_field.widget.attrs.update({'required_score': score_threshold})
return recaptcha_field
return ReCaptchaField(label='')
return CaptchaField(**kwargs) | [
343,
-1,
101
] |
def METHOD_NAME(self):
for tout_str, exp_str, exp_secs in [ ('1s', '1 second', 1),
('10 sec', '10 seconds', 10),
('2h 1minute', '2 hours 1 minute', 7260),
('42', '42 seconds', 42) ]:
self._verify_tout(TestTimeout(tout_str), exp_str, exp_secs) | [
9,
659,
144
] |
def METHOD_NAME(s: Any, subber: Any = ..., repl: Any = ...): ... | [
3012
] |
def METHOD_NAME(self):
return self.client.format_url(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/NewRelic.Observability/monitors/{monitorName}/listHosts",
**self.url_parameters
) | [
274
] |
def METHOD_NAME(download_gcs_public_data, storage_provider, url, columns_nb, separator, has_header):
# inject temp file path that was downloaded by the test as URL
url = download_gcs_public_data if storage_provider == "local" else url
config = {
"format": "csv",
"dataset_name": "output",
"reader_options": {"sep": separator, "nrows": 42},
"provider": {"storage": storage_provider, "user_agent": False},
"url": url,
}
check_read(config, expected_columns=columns_nb) | [
9,
203,
280,
1609,
2275
] |
def METHOD_NAME(row_payoffs, col_payoffs, lrsnash_max_denom, lrsnash_path):
"""Find all Nash equilibria using the lrsnash solver.
`lrsnash` uses reverse search vertex enumeration on rational polytopes.
For more info, see: http://cgm.cs.mcgill.ca/~avis/C/lrslib/USERGUIDE.html#nash
Args:
row_payoffs: payoffs for row player
col_payoffs: payoffs for column player
lrsnash_max_denom: maximum denominator
lrsnash_path: path for temporary files
Yields:
(row_mixture, col_mixture), numpy vectors of float64s.
"""
num_rows, num_cols = row_payoffs.shape
game_file, game_file_path = tempfile.mkstemp()
try:
game_file = os.fdopen(game_file, "w")
# write dimensions
game_file.write("%d %d\n\n" % (num_rows, num_cols))
# write row-player payoff matrix as fractions
for row in range(num_rows):
game_file.write(
" ".join(to_fraction_str(row_payoffs[row], lrsnash_max_denom)) + "\n")
game_file.write("\n")
# write col-player payoff matrix as fractions
for row in range(num_rows):
game_file.write(
" ".join(to_fraction_str(col_payoffs[row], lrsnash_max_denom)) + "\n")
game_file.write("\n")
game_file.close()
lrs = subprocess.Popen([lrsnash_path or "lrsnash", "-s", game_file_path],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
col_mixtures = []
for line in lrs.stdout:
if len(line) <= 1 or line[:1] == b"*":
continue
line = np.asfarray([fractions.Fraction(x) for x in line.decode().split()])
if line[0] == 2: # col-player
col_mixtures.append(line[1:-1])
else: # row-player
row_mixture = line[1:-1]
# row-mixture forms a Nash with every col-mixture listed directly above
for col_mixture in col_mixtures:
yield (row_mixture, col_mixture)
col_mixtures = []
finally:
os.remove(game_file_path) | [
10020,
283
] |
def METHOD_NAME(date):
"""Update publication information from INSPIRE for all records updated *on* a certain date."""
inspire_ids = get_inspire_records_updated_on(date)
for inspire_id in inspire_ids:
update_record_info.delay(inspire_id)
log.info('Sent task for Inspire ID {}'.format(inspire_id)) | [
86,
2530,
100,
69
] |
def METHOD_NAME(self, pressure, temperature, volume, params):
"""
Since this equation of state does not contain temperature effects, simply return zero. :math:`[unitless]`
"""
return 0.0 | [
-1,
511
] |
def METHOD_NAME(self) -> Any:
"""
[Required] Additional attributes of the entity.
"""
return pulumi.get(self, "data_version_base_properties") | [
365,
281,
414,
748
] |
def METHOD_NAME(self, _):
# iterate through a copied list so we can delete from the original
for key, value in list(self._wait_rx.items()):
request, stream = value
if stream.done():
del self._wait_rx[key]
log.debug('%s %s', 'removing', req_str(request))
for key, value in list(self._wait_tx.items()):
request, stream = value
if stream.done():
del self._wait_tx[key]
log.debug('%s %s', 'removing', req_str(request))
if not self.is_full:
self._not_full.set() | [
919,
676
] |
def METHOD_NAME(self, playlist):
pattern_text = CONFIG.default_pattern
dialog = ExportToFolderDialog(self.plugin_window, pattern_text)
if dialog.run() == Gtk.ResponseType.OK:
directory = dialog.directory_chooser.get_filename()
pattern = FileFromPattern(dialog.pattern_entry.get_text())
task = Task("Export", _("Export Playlist to Folder"),
stop=self.__cancel_copy)
copool.add(self.__copy_songs, task,
playlist.songs, directory, pattern, self.plugin_window,
funcid="export-playlist-folder")
dialog.destroy() | [
2793,
556
] |
def METHOD_NAME(iterable: Generator[Any, None, None]) -> AsyncGenerator[Any, None]:
async def _gen_wrapper() -> AsyncGenerator[Any, None]:
# Wrap the generator such that each iteration runs
# in the executor. Then rationalise the raised
# errors so that it ends.
def _inner() -> Any:
# https://bugs.python.org/issue26221
# StopIteration errors are swallowed by the
# run_in_exector method
try:
return next(iterable)
except StopIteration:
raise StopAsyncIteration()
loop = asyncio.get_running_loop()
while True:
try:
yield await loop.run_in_executor(None, copy_context().run, _inner)
except StopAsyncIteration:
return
return _gen_wrapper() | [
22,
164,
2439
] |
def METHOD_NAME(self):
return semver.compare(self.version, "1.0.0-alpha") >= 0 | [
3392,
894,
4330
] |
def METHOD_NAME(self, interface):
"""Tests that create_initial_state works with a state-prep operation."""
prep_op = self.DefaultPrep(qml.math.array([1 / 2] * 4, like=interface), wires=[0, 1])
state = create_initial_state([0, 1], prep_operation=prep_op)
assert qml.math.allequal(state, [1 / 2] * 4)
assert qml.math.get_interface(state) == interface | [
9,
129,
2471,
551,
41,
551,
48
] |
def METHOD_NAME(self):
return self.client.format_url(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Communication/communicationServices/{communicationServiceName}/regenerateKey",
**self.url_parameters
) | [
274
] |
def METHOD_NAME(
self, method, path, schema, timeout=DEFAULT_TIMEOUT, params=None
): # pylint:disable=too-many-arguments
"""
Send a Canvas API request, and retry it if there are OAuth problems.
See BasicClient.send() for documentation of parameters, return value
and exceptions raised.
:raise OAuth2TokenError: if the request fails because our Canvas API
access token for the user is missing, expired, or has been deleted
"""
access_token = self._oauth2_token_service.get().access_token
return self._client.METHOD_NAME(
method,
path,
schema,
timeout,
params,
headers={"Authorization": f"Bearer {access_token}"},
) | [
353
] |
def METHOD_NAME(self):
"""test issue #151"""
Profile().populate_random()
Website().delete() | [
9,
34,
3646
] |
def METHOD_NAME():
text = "nothing tainted"
value_parts, sources = taint_ranges_as_evidence_info(text)
assert value_parts == [{"value": text}]
assert sources == [] | [
9,
10996,
2149,
947,
6072,
100,
5996
] |
def METHOD_NAME(self, outfile):
self._lineno += 1
outfile.write("%s%04d: " % (self._lineno != 1 and '\n' or '', self._lineno)) | [
77,
4155
] |
def METHOD_NAME(self):
dbconnector.dedicated_dbs['CONFIG_DB'] = os.path.join(mock_db_path, 'empty_config_db')
db = Db()
runner = CliRunner()
# add
result = runner.invoke(
config.config.commands["nvgre-tunnel"].commands["add"],
["tunnel_1", "--src-ip", "10.0.0.1"], obj=db
)
logger.debug("\n" + result.output)
logger.debug(result.exit_code)
assert result.exit_code == SUCCESS
result = runner.invoke(
config.config.commands["nvgre-tunnel"].commands["add"],
["tunnel_2", "--src-ip", "10.0.0.2"], obj=db
)
logger.debug("\n" + result.output)
logger.debug(result.exit_code)
assert result.exit_code == SUCCESS
# verify
self.verify_output(db, runner, "nvgre-tunnel", assert_show_output.show_nvgre_tunnels)
# delete
result = runner.invoke(
config.config.commands["nvgre-tunnel"].commands["delete"],
["tunnel_1"], obj=db
)
logger.debug("\n" + result.output)
logger.debug(result.exit_code)
assert result.exit_code == SUCCESS
result = runner.invoke(
config.config.commands["nvgre-tunnel"].commands["delete"],
["tunnel_2"], obj=db
)
logger.debug("\n" + result.output)
logger.debug(result.exit_code)
assert result.exit_code == SUCCESS
# verify
self.verify_output(db, runner, "nvgre-tunnel", assert_show_output.show_nvgre_tunnel_empty) | [
9,
16510,
16748,
238,
1269
] |
def METHOD_NAME(cluster, testNodeId, testNodeArgs, resultMsgs):
testNode = None
testResult = False
resultDesc = "!!!BUG IS CONFIRMED ON TEST CASE #{} ({})".format(
testNodeId,
testNodeArgs
)
try:
Print(
"Launch node #{} to execute test scenario: {}".format(
testNodeId,
testNodeArgs
)
)
testNode = cluster.getNode(testNodeId)
assert not testNode.verifyAlive() # resets pid so reluanch works
testNode.relaunch(addSwapFlags={"--terminate-at-block": "9999999"})
# Wait for node to start up.
time.sleep(3)
# Check the node stops at the correct block.
checkStatus(testNode, testNodeArgs)
# Kill node after use.
if not testNode.killed:
assert testNode.kill(signal.SIGTERM)
# Replay the blockchain for the node that just finished,
# also checking it stops at the correct block.
checkReplay(testNode, testNodeArgs)
resultDesc = "!!!TEST CASE #{} ({}) IS SUCCESSFUL".format(
testNodeId,
testNodeArgs
)
testResult = True
finally:
Print(resultDesc)
resultMsgs.append(resultDesc)
# Kill node after use.
if testNode and not testNode.killed:
assert testNode.kill(signal.SIGTERM)
return testResult | [
750,
9
] |
def METHOD_NAME(self, state=None):
if self.chan_interlock_state is None:
return
if state is None:
state = self.chan_interlock_state.getValue()
self.interlock_set = state
self.log.debug(
"P11 DetectorDistance / INTERLOCK is %s"
% (self.interlock_set and "SET" or "NOT SET")
) | [
86,
14389,
551
] |
def METHOD_NAME(self):
with self.assertRaises(ValueError):
voltage_to_uint16(np.zeros(0), 0, 0, 0)
linspace_voltage = np.linspace(0, 1, 128)
with self.assertRaises(ValueError):
voltage_to_uint16(linspace_voltage, 0.9, 0, 1)
with self.assertRaises(ValueError):
voltage_to_uint16(linspace_voltage, 1.1, -1, 1)
expected_data = np.arange(0, 128, dtype=np.uint16)
received_data = voltage_to_uint16(linspace_voltage, 0.5, 0.5, 7)
self.assertTrue(np.all(expected_data == received_data)) | [
9,
268,
24,
269
] |
async def METHOD_NAME(poller_args):
"""Check if all the services are launched after calling Poller.run()
"""
cfg = load_sq_config(config_file=poller_args.config)
poller = Worker(poller_args, cfg)
mks = await run_worker_with_mocks(poller)
# Check if all the functions have been called
for mk in mks:
mks[mk].assert_called() | [
9,
9013,
1794,
22
] |
def METHOD_NAME(bytesequence):
img = []
for i in range(len(bytesequence) // 4):
offset = i * 4
byte1 = bytesequence[offset + 0]
byte2 = bytesequence[offset + 1]
byte3 = bytesequence[offset + 2]
byte4 = bytesequence[offset + 3]
Y = byte1
U = byte2
Y1 = byte3
V = byte4
#Y = byte1
#U = byte2
#Y1 = byte3
#V = byte4
#B1 = 1.164 * (y1-16) + 2.018 * (u - 128)
#G1 = 1.164 * (y1-16) - 0.813 * (v - 128) - 0.391 * (u - 128)
#R1 = 1.164 * (y1-16) + 1.596*(v - 128)
#pix[j*2, i] = int(R), int(G), int(B)
#B2 = 1.164 * (y2-16) + 2.018 * (u - 128)
#G2 = 1.164 * (y2-16) - 0.813 * (v - 128) - 0.391 * (u - 128)
#2 = 1.164 * (y2-16) + 1.596*(v - 128)
#pix[j*2+1, i] = int(R), int(G), int(B)
#R1 = y1 + 1.4075 * (v - 128)
#G1 = y1 - 0.3455 * (u - 128) - (0.7169 * (v - 128))
#B1 = y1 + 1.7790 * (u - 128)
#R2 = y2 + 1.4075 * (v - 128)
#G2 = y2 - 0.3455 * (u - 128) - (0.7169 * (v - 128))
#B2 = y2 + 1.7790 * (u - 128)
R1 = Y + 1.4075 * (V - 128)
G1 = Y - 0.3455 * (U - 128) - (0.7169 * (V - 128))
B1 = Y + 1.7790 * (U - 128)
R2 = Y1 + 1.4075 * (V - 128)
G2 = Y1 - 0.3455 * (U - 128) - (0.7169 * (V - 128))
B2 = Y1 + 1.7790 * (U - 128)
img.append(_clamp(int(R1), 0, 255))
img.append(_clamp(int(G1), 0, 255))
img.append(_clamp(int(B1), 0, 255))
img.append(_clamp(int(R2), 0, 255))
img.append(_clamp(int(G2), 0, 255))
img.append(_clamp(int(B2), 0, 255))
return img | [
14630,
24,
2310
] |
def METHOD_NAME(*_): pass | [
8593,
6283
] |
def METHOD_NAME(pipeline_response):
deserialized = self._deserialize('ListVpnSiteLinkConnectionsResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem) | [
297,
365
] |
def METHOD_NAME(self, timeout=None):
super().METHOD_NAME() | [
2831
] |
def METHOD_NAME(self) -> Point:
return self._engine_client.state.geometry.get_labware_position(self._labware_id) | [
19,
18126,
1540
] |
def METHOD_NAME(s):
return s in ['not-accessible', 'accessible-for-notify', 'read-only',
'read-write', 'read-create'] | [
4479,
7930,
232,
1089
] |
def METHOD_NAME(self):
"""
Sets up PRAGMAs.
"""
connection_created.connect(self.activate_pragmas_per_connection)
self.activate_pragmas_on_start()
# Log the settings file that we are running Kolibri with.
# Do this logging here, as this will be after Django has done its processing of
# Any environment variables or --settings command line arguments.
logger.info(
"Running Kolibri with the following settings: {settings}".format(
settings=os.environ["DJANGO_SETTINGS_MODULE"]
)
)
self.check_redis_settings()
# Do this to add an automapping from the Morango UUIDField to the UUIDFilter so that it automatically
# maps to this filter when using the UUIDField in a filter.
from morango.models import UUIDField
FilterSet.FILTER_DEFAULTS.update({UUIDField: {"filter_class": UUIDFilter}})
# Register any django apps that may have kolibri plugin
# modules inside them
registered_plugins.register_non_plugins(settings.INSTALLED_APPS)
# Fixes issue using OuterRef within Cast() that is patched in later Django version
# Patch from https://github.com/django/django/commit/c412926a2e359afb40738d8177c9f3bef80ee04e
# https://code.djangoproject.com/ticket/29142
F.relabeled_clone = lambda self, relabels: self | [
1338
] |
def METHOD_NAME(self):
"Retrieves the forwarding databases of the device"
columns = yield self.retrieve_columns(['dot1qTpFdbPort', 'dot1qTpFdbStatus'])
columns = self.translate_result(columns)
valid = (
row
for row in columns.values()
if row['dot1qTpFdbStatus'] not in ('self', 'invalid')
)
result = []
for row in valid:
index = row[0]
mac = index[1:]
mac = ':'.join("%02x" % o for o in mac[-6:])
port = row['dot1qTpFdbPort']
result.append((mac, port))
defer.returnValue(result) | [
19,
10607,
463
] |
def METHOD_NAME(bpr, tsd, t):
if has_window_ventilation(bpr) \
and not is_mechanical_ventilation_active(bpr, tsd, t):
# window ventilation in case of non-active mechanical ventilation
return True
else:
return False | [
137,
1092,
-1,
923
] |
def METHOD_NAME(inp: Tensor, column: int) -> Tensor:
mask = torch.ones_like(inp)
mask[:, column] = 0.0
return mask * inp | [
1050,
105,
2232
] |
f METHOD_NAME(self): | [
9,
3102
] |
def METHOD_NAME():
printer = create_printer(indent_level=4)
printer.line("test")
with printer.with_indent():
printer.line("test indent")
assert printer.result() == """test
test indent""" | [
9,
3721,
4
] |
def METHOD_NAME(self):
return sn.assert_eq(self.job.exitcode, 0) | [
187,
136
] |
def METHOD_NAME(self):
self.is_locked = False | [
2671
] |
def METHOD_NAME():
# Any circuit with a (full connectivity) graph of disjoint lines should be directly
# executable after mapping a a supporting device topology without the need for inserting
# any swaps.
circuit = construct_valid_circuit()
device = cirq.testing.construct_grid_device(7, 7)
device_graph = device.metadata.nx_graph
mapper = cirq.LineInitialMapper(device_graph)
mapping = mapper.initial_mapping(circuit)
mapped_circuit = circuit.transform_qubits(mapping)
device.validate_circuit(mapped_circuit) | [
9,
1205,
1708
] |
f METHOD_NAME(self): | [
9,
1751,
1030,
41,
654,
1751
] |
def METHOD_NAME(cls):
raise NotImplementedError | [
280,
485
] |
f METHOD_NAME(self): | [
9,
555,
4261,
1646,
246,
7509,
1272
] |
def METHOD_NAME(
cls,
metric: MetricConfiguration,
configuration: Optional[ExpectationConfiguration] = None,
execution_engine: Optional[ExecutionEngine] = None,
runtime_configuration: Optional[dict] = None,
):
"""Returns a dictionary of given metric names and their corresponding configuration, specifying the metric
types and their respective domains"""
dependencies: dict = super().METHOD_NAME(
metric=metric,
configuration=configuration,
execution_engine=execution_engine,
runtime_configuration=runtime_configuration,
)
if (
metric.metric_name
== f"column_values.z_score.under_threshold.{MetricPartialFunctionTypeSuffixes.CONDITION.value}"
):
dependencies[
f"column_values.z_score.{MetricPartialFunctionTypeSuffixes.MAP.value}"
] = MetricConfiguration(
metric_name=f"column_values.z_score.{MetricPartialFunctionTypeSuffixes.MAP.value}",
metric_domain_kwargs=metric.metric_domain_kwargs,
)
if (
metric.metric_name
== f"column_values.z_score.{MetricPartialFunctionTypeSuffixes.MAP.value}"
):
dependencies["column.mean"] = MetricConfiguration(
metric_name="column.mean",
metric_domain_kwargs=metric.metric_domain_kwargs,
)
dependencies["column.standard_deviation"] = MetricConfiguration(
metric_name="column.standard_deviation",
metric_domain_kwargs=metric.metric_domain_kwargs,
)
return dependencies | [
19,
355,
2410
] |
def METHOD_NAME(self):
self.comment.edit("Comment edited by PyGithub")
self.assertEqual(self.comment.body, "Comment edited by PyGithub") | [
9,
2004
] |
def METHOD_NAME(user):
if not user_can_be_archived(user):
msg = "User cannot be removed from service. " "Check that all services have another team member who can manage settings"
raise InvalidRequest(msg, 400)
permission_dao.remove_user_service_permissions_for_all_services(user)
service_users = dao_get_service_users_by_user_id(user.id)
for service_user in service_users:
db.session.delete(service_user)
user.organisations = []
user.auth_type = EMAIL_AUTH_TYPE
user.email_address = get_archived_email_address(user.email_address)
user.mobile_number = None
user.password = str(uuid.uuid4())
# Changing the current_session_id signs the user out
user.current_session_id = "00000000-0000-0000-0000-000000000000"
user.state = "inactive"
db.session.add(user) | [
3463,
1622,
21
] |
def METHOD_NAME(self):
self.assertRaises(SerializationError, JSONSerializer().loads, object())
self.assertRaises(SerializationError, JSONSerializer().loads, "")
self.assertRaises(SerializationError, JSONSerializer().loads, "{{") | [
9,
45,
2109,
168,
69,
557,
168
] |
def METHOD_NAME(x):
# Computes standard normal cumulative distribution function
return (1. + math.erf(x / math.sqrt(2.))) / 2. | [
387,
1889
] |
def METHOD_NAME(tmp_path, extension=""):
create_file(
tmp_path
/ "briefcase"
/ "tools"
/ "android_sdk"
/ "cmdline-tools"
/ AndroidSDK.SDK_MANAGER_VER
/ "bin"
/ f"sdkmanager{extension}",
"Android SDK manager",
) | [
129,
966,
722
] |
def METHOD_NAME():
"""
Returns the data of all system wide available repositories.
:return: Repository information
"""
enabled = {os.path.realpath(path) for path in find_repos('/etc/leapp/repos.d') if path.strip()}
all_repos = {os.path.realpath(path) for path in find_repos('/usr/share/leapp-repository') if path.strip()}
repo_data = {}
for repo in all_repos:
repo_id = get_repository_metadata(repo).get('id', None)
if not repo_id:
continue
repo_data[repo_id] = {
'id': repo_id,
'path': repo,
'name': get_repository_name(repo),
'enabled': repo in enabled
}
return repo_data | [
19,
285,
2223,
365
] |
def METHOD_NAME(self):
block = ImageBasic()
value = block.to_python({})
self.assertEqual(image_alt_value(value), "") | [
9,
573,
654,
660,
623,
573
] |
def METHOD_NAME(baseobject,
arg1, arg2, arg3,
arg4=None, arg5=None, arg6=None,
name="Array", use_link=False):
"""Create an Array. DEPRECATED. Use 'make_array'."""
_wrn("Do not use this function directly; instead, use "
"'make_ortho_array', 'make_polar_array', "
"or 'make_circular_array'.")
return make_array(baseobject,
arg1, arg2, arg3,
arg4, arg5, arg6, use_link) | [
93,
877
] |
def METHOD_NAME(self):
f = dll._testfunc_callback_i_if
f.restype = c_int
f.argtypes = None
MyCallback = CFUNCTYPE(c_int, c_int)
def callback(value):
#print "called back with", value
return value
cb = MyCallback(callback)
result = f(self.wrap(-10), self.wrap(cb))
self.assertEqual(result, -18)
# test with prototype
f.argtypes = [c_int, MyCallback]
cb = MyCallback(callback)
result = f(self.wrap(-10), self.wrap(cb))
self.assertEqual(result, -18)
result = f(self.wrap(-10), self.wrap(cb))
self.assertEqual(result, -18)
AnotherCallback = CALLBACK_FUNCTYPE(c_int, c_int, c_int, c_int, c_int)
# check that the prototype works: we call f with wrong
# argument types
cb = AnotherCallback(callback)
self.assertRaises(ArgumentError, f, self.wrap(-10), self.wrap(cb)) | [
9,
2425
] |
def METHOD_NAME(self):
"""Returns the cleaned year value if valid, current year otherwise"""
return self.cleaned_data['year'] if self.is_valid() else _get_current_year() | [
12883,
842
] |
def METHOD_NAME(self, *keys: str) -> JSONSerializable:
if self._parameters is not None:
for key in keys:
if key in self._parameters:
return self._parameters[key]
return self._content.METHOD_NAME(*keys) | [
9033,
386
] |
def METHOD_NAME(self):
self.client.login(username="organizer", password="pass")
resp = self.client.post(reverse("competitions:submission_delete", kwargs={"pk": self.submission_1.pk}))
self.assertRedirects(resp, reverse("competitions:view", kwargs={"pk": self.competition.pk}))
self.assertEqual(0, len(CompetitionSubmission.objects.filter(pk=self.submission_1.pk))) | [
9,
34,
1179,
4268,
24,
1434,
217
] |
def METHOD_NAME(self) -> None: ... | [
1206,
560
] |
def METHOD_NAME(path):
fake_venv_bin = Path(__file__).parent / "fake-venv-bin"
path.mkdir(parents=True)
(path / "bin").symlink_to(fake_venv_bin)
return fake_venv_bin | [
129,
1278,
7415
] |
def METHOD_NAME(x):
return np.exp(-(x ** 2) / 2.0) / _norm_pdf_C | [
387,
4555
] |
def METHOD_NAME(request, codelist, owner_choices):
codelist_form = CodelistUpdateForm(request.POST, owner_choices=owner_choices)
reference_formset = ReferenceFormSet(
request.POST, queryset=codelist.references.all(), prefix="reference"
)
signoff_formset = SignOffFormSet(
request.POST, queryset=codelist.signoffs.all(), prefix="signoff"
)
codelist_form.is_valid()
if (
codelist_form.is_valid()
and reference_formset.is_valid()
and signoff_formset.is_valid()
):
return handle_valid(
request,
codelist,
codelist_form,
reference_formset,
signoff_formset,
owner_choices,
)
else:
return handle_invalid(
request, codelist_form, reference_formset, signoff_formset, owner_choices
) | [
276,
72
] |
def METHOD_NAME(model_name, file, expected_result):
"""
Full circle test with torchserve
"""
with open(Path(CURR_FILE_PATH) / "test_data" / file, "rb") as f:
response = requests.post(
url=f"http://localhost:8080/predictions/{model_name}", data=f
)
assert response.status_code == 200
result_entries = json.loads(response.text)
assert [result_entries] == expected_result | [
9,
1748,
41,
-1,
578,
72,
947
] |
def METHOD_NAME(self):
self.test_client_conversion_table()
self.test_categories()
self.dump_help()
if self.is_wallet_compiled():
self.wallet_help() | [
22,
9
] |
def METHOD_NAME(region=None):
"""Builds a client to the AWS EMR API."""
client = boto3.client('emr', region_name=region)
return client | [
19,
340
] |
def METHOD_NAME(columns_removed) -> str:
columns_removed_str = f"Column(s) removed: {columns_removed}\n"
return columns_removed_str | [
1951,
674,
671
] |
def METHOD_NAME():
play = Play.load({'hosts': ['host1', 'host2']})
assert play.get_name() == 'host1,host2' | [
9,
64,
654,
156,
3175,
771
] |
def METHOD_NAME(s):
"""Remove the outermost brackets for a model string
Parameter
---------
s : str
A model expression (but can be any text).
Returns
-------
cleaned : s
The outermost () is removed (if there is one).
Examples
--------
>>> clean_bracket('(xsphabs.gal * powlaw1d.pl)')
'xsphabs.gal * powlaw1d.pl'
"""
if s.startswith('(') and s.endswith(')'):
return s[1:-1]
return s | [
1356,
4283
] |
def METHOD_NAME(spec, state):
yield from rewards_helpers.run_test_full_but_partial_participation(spec, state) | [
9,
324,
6563,
2351,
2053,
6191
] |
def METHOD_NAME(deterministic=False):
"""Return stochkv hoc model filename"""
return os.path.join(
script_dir,
'stochkvcell%s.hoc' %
('_det' if deterministic else '')) | [
5092,
12723,
1147
] |
def METHOD_NAME(request, poll_id):
"""
View for registering a vote for the logged in user.
"""
poll = get_object_or_404(Poll, pk=poll_id)
try:
choice = poll.choices.get(pk=request.POST["choice"])
choice.METHOD_NAME(request.user)
except (KeyError, Choice.DoesNotExist):
messages.warning(request, "Du valgte ikke et svaralternativ")
except UserHasVoted:
messages.error(request, "Du har allerede stemt i denne avstemningen!")
else:
messages.success(request, f'Du har svart på "{poll.question}"')
redirect_to = request.POST.get("next", request.META.get("HTTP_REFERER", "/"))
return redirect(redirect_to) | [
9811
] |
def METHOD_NAME(signum, frame):
logging.debug('INFO:Cause signal %d, set fan speed max.', signum)
platform_chassis.get_fan(0).set_speed(DUTY_MAX)
sys.exit(0) | [
1519
] |
def METHOD_NAME(monkeypatch):
def add_recovery_func(task, module_name, params):
class FakeRecovery(object):
task_proxy = None
args = None
@classmethod
def call(cls, task_proxy, *args):
cls.task_proxy = task_proxy
cls.args = args
# Create a recovery module with the passed module name
module = types.ModuleType(module_name)
module.FakeRecovery = FakeRecovery
# Verify that the fully qualified name of the module is unique
full_name = "vdsm.storage.{}".format(module_name)
if full_name in sys.modules:
raise RuntimeError("Module {} already exists".format(module_name))
# Set task's recovery lookup to refer to our local Recovery class
monkeypatch.setattr(full_name, module, raising=False)
monkeypatch.setitem(sys.modules, full_name, module)
r = Recovery(module_name, module_name, "FakeRecovery", "call", params)
task.pushRecovery(r)
return FakeRecovery
return add_recovery_func | [
238,
1300
] |
def METHOD_NAME(self):
return self._final_result | [
1571
] |
def METHOD_NAME(cls, _db, storage_name=None):
"""Find the ExternalIntegration for the mirror by storage name."""
from .model import ExternalIntegration
qu = _db.query(ExternalIntegration).filter(
ExternalIntegration.goal==cls.STORAGE_GOAL,
ExternalIntegration.name==storage_name
)
integrations = qu.all()
if not integrations:
raise CannotLoadConfiguration(
"No storage integration with name '%s' is configured." % storage_name
)
[integration] = integrations
return integration | [
1911,
604,
156
] |
def METHOD_NAME(v):
return stat.ST_GID(v.stat.st_mode) | [
1524
] |
async def METHOD_NAME():
CallContext.set("foo", "bar")
CallContext.set("foo2", "bar2")
foo = CallContext.get("foo")
assert foo == "bar"
CallContext.set("foo", "bar2")
assert CallContext.get("foo") == CallContext.get("foo2") | [
9,
1101,
198,
710
] |
def METHOD_NAME(
setup_py_path: str,
egg_info_dir: Optional[str],
no_user_config: bool,
) -> List[str]:
args = make_setuptools_shim_args(setup_py_path, no_user_config=no_user_config)
args += ["egg_info"]
if egg_info_dir:
args += ["--egg-base", egg_info_dir]
return args | [
93,
11349,
4429,
100,
335
] |
def METHOD_NAME(line, is_int=False):
if is_int:
return re.sub('[^0-9.+-]', ' ', line)
else:
return re.sub('[^eE0-9.+-]', ' ', line) | [
188,
75,
256,
181
] |
def METHOD_NAME(self):
self._closed = True | [
1462
] |
def METHOD_NAME(queue, timeout):
"""
Background process runner
* Start an SMTP server
* Send the port number through the queue
* Receive message via SMTP
* Send message payload through the queue
"""
smtpserver = SmtpListener(("0.0.0.0", 0), queue)
port = smtpserver.get_port()
queue.put(port)
logger.debug("Waiting on port %s for OTP email", port)
asyncore.loop(5, False, None, timeout)
smtpserver.close() | [
19,
3595,
3562
] |
def METHOD_NAME(self):
return self._obj | [
19,
122
] |
def METHOD_NAME(self):
"""IPA server FQDN
Falls back to ``xmlrpc_uri`` and ``jsonrpc_uri``
"""
if self._server is not None:
return self._server
try:
METHOD_NAME = self.get(self.ipa_section, "server")
except NoOptionError:
# fall back
try:
uri = self.get(self.ipa_section, "xmlrpc_uri")
except NoOptionError:
try:
uri = self.get(self.ipa_section, "jsonrpc_uri")
except NoOptionError:
raise ValueError(
"server, xmlrpc_uri, and jsonrpc_uri missing"
)
METHOD_NAME = urlparse(uri).netloc
self._server = METHOD_NAME
return METHOD_NAME | [
163
] |
def METHOD_NAME(
auth_client,
is_employed,
is_customer_assignee,
is_customer,
expected,
status_code,
django_assert_num_queries,
):
user = auth_client.user
setup_customer_and_employment_status(
user=user,
is_assignee=is_customer_assignee,
is_customer=is_customer,
is_employed=is_employed,
is_external=False,
)
report = ReportFactory.create(duration=timedelta(hours=1))
project = report.task.project
ReportFactory.create(duration=timedelta(hours=2), task=report.task)
report2 = ReportFactory.create(duration=timedelta(hours=4))
project_2 = report2.task.project
task = TaskFactory(project=report.task.project)
ReportFactory.create(duration=timedelta(hours=2), task=task)
url = reverse("project-statistic-list")
with django_assert_num_queries(expected):
result = auth_client.get(
url, data={"ordering": "duration", "include": "customer"}
)
assert result.status_code == status_code
if status_code == status.HTTP_200_OK:
json = result.json()
expected_json = [
{
"type": "project-statistics",
"id": str(report2.task.project.id),
"attributes": {
"duration": "04:00:00",
"name": report2.task.project.name,
"amount-offered": str(project_2.amount_offered.amount),
"amount-offered-currency": project_2.amount_offered_currency,
"amount-invoiced": str(project_2.amount_invoiced.amount),
"amount-invoiced-currency": project_2.amount_invoiced_currency,
"estimated-time": "00:00:00",
"total-remaining-effort": "00:00:00",
},
"relationships": {
"customer": {
"data": {
"type": "customers",
"id": str(project_2.customer.id),
}
}
},
},
{
"type": "project-statistics",
"id": str(report.task.project.id),
"attributes": {
"duration": "05:00:00",
"name": report.task.project.name,
"amount-offered": str(project.amount_offered.amount),
"amount-offered-currency": project.amount_offered_currency,
"amount-invoiced": str(project.amount_invoiced.amount),
"amount-invoiced-currency": project.amount_invoiced_currency,
"estimated-time": "00:00:00",
"total-remaining-effort": "00:00:00",
},
"relationships": {
"customer": {
"data": {
"type": "customers",
"id": str(project.customer.id),
}
}
},
},
]
assert json["data"] == expected_json
assert json["meta"]["total-time"] == "09:00:00" | [
9,
155,
2652,
245
] |
def METHOD_NAME(self, request: HttpRequest, **kwargs: Any) -> Awaitable[AsyncHttpResponse]:
"""Runs the network request through the client's chained policies.
>>> from azure.core.rest import HttpRequest
>>> request = HttpRequest("GET", "https://www.example.org/")
<HttpRequest [GET], url: 'https://www.example.org/'>
>>> response = await client._send_request(request)
<AsyncHttpResponse: 200 OK>
For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request
:param request: The network request you want to make. Required.
:type request: ~azure.core.rest.HttpRequest
:keyword bool stream: Whether the response payload will be streamed. Defaults to False.
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.rest.AsyncHttpResponse
"""
request_copy = deepcopy(request)
request_copy.url = self._client.format_url(request_copy.url)
return self._client.send_request(request_copy, **kwargs) | [
353,
377
] |
def METHOD_NAME(self):
self.measure_thread = threading.Thread(target=self.measure_thread)
self.thread = threading.Thread(target=self.run)
self.measure_thread.METHOD_NAME()
self.thread.METHOD_NAME()
print("GUI Thread Started!") | [
447
] |
def METHOD_NAME(
shot1: pymap.Shot, shot2: pymap.Shot, test_mapillary_specific: bool = True
) -> None:
assert shot1.id == shot2.id
assert np.allclose(shot1.pose.get_Rt(), shot2.pose.get_Rt(), 1e-5)
assert shot1.merge_cc == shot2.merge_cc
assert np.allclose(shot1.covariance, shot2.covariance)
assert_metadata_equal(shot1.metadata, shot2.metadata, test_mapillary_specific) | [
638,
5968,
926
] |
def METHOD_NAME():
"""Keep attempting to stop the daemon for 5 seconds, first using
SIGTERM, then using SIGKILL.
"""
pid = get_pid()
if not pid or not pid_exists(pid):
sys.exit("daemon not running")
sig = signal.SIGTERM
i = 0
while True:
sys.stdout.write('.')
sys.stdout.flush()
try:
os.kill(pid, sig)
except OSError as err:
if err.errno == errno.ESRCH:
print("\nstopped (pid %s)" % pid)
return
else:
raise
i += 1
if i == 25:
sig = signal.SIGKILL
elif i == 50:
sys.exit("\ncould not kill daemon (pid %s)" % pid)
time.sleep(0.1) | [
631
] |
def METHOD_NAME(params, transforms, profiles, data, **kwargs):
# Implements equation 4.19 in M. Landreman & R. Jorge (2020)
# doi:10.1017/S002237782000121X.
integrate = surface_integrals_map(transforms["grid"])
data["D_geodesic"] = transforms["grid"].replace_at_axis(
(
integrate(
data["|e_theta x e_zeta|"]
* mu_0
* data["J*B"]
/ data["|grad(psi)|"] ** 3
)
** 2
- integrate(
data["|e_theta x e_zeta|"] * data["|B|^2"] / data["|grad(psi)|"] ** 3
)
* integrate(
data["|e_theta x e_zeta|"]
* mu_0**2
* data["J*B"] ** 2
/ (data["|B|^2"] * data["|grad(psi)|"] ** 3),
)
)
/ (2 * jnp.pi) ** 6,
jnp.nan, # enforce manually because our integration replaces nan with 0
)
# Axis limit does not exist as ‖∇ ψ‖ terms dominate so that D_geodesic
# is of the order ρ⁻² near axis.
return data | [
227,
2623
] |
METHOD_NAME(base): | [
365,
3613,
1119,
279,
272
] |
def METHOD_NAME(T):
return T.qualify(Direction.METHOD_NAME) | [
623
] |
def METHOD_NAME(request):
try:
lists = {}
auth = AuthSession(request.session)
client = auth.get_clientFactory()
lists["form_vrf"] = VrfForm() # TODO Alterar depois de edit pra ""
if request.method == 'POST':
# Set data in form
vrf_form = VrfForm(request.POST)
# Return data to form in case of error
lists["form_vrf"] = vrf_form
# Validate
if vrf_form.is_valid():
vrf = vrf_form.cleaned_data["vrf"]
internal_name = vrf_form.cleaned_data["internal_name"]
list_vrf = [{
"vrf": vrf,
"internal_name": internal_name
}]
client.create_api_vrf().create(list_vrf)
messages.add_message(
request, messages.SUCCESS, vrf_messages.get("success_insert"))
return redirect('vrf.list')
except NetworkAPIClientError, e:
logger.error(e)
messages.add_message(request, messages.ERROR, e)
return render_to_response(VRF_CREATE, lists, context_instance=RequestContext(request)) | [
408,
8510
] |
f METHOD_NAME(match): | [
56,
2149
] |
METHOD_NAME(self, data): | [
69,
13,
377,
1978
] |
def METHOD_NAME(self, req, fp, code, msg, headers):
infourl = urllib.addinfourl(fp, headers, req.get_full_url())
infourl.status = code
infourl.code = code
return infourl | [
721,
168,
1612
] |
def METHOD_NAME(self):
config = MainConfig.instance()
assert(config.SecretKey == 'abc123') | [
9,
485,
486,
144,
199
] |
def METHOD_NAME(self, pattern, start):
"Recursively adds a linear pattern to the AC automaton"
#print("adding pattern", pattern, "to", start)
if not pattern:
#print("empty pattern")
return [start]
if isinstance(pattern[0], tuple):
#alternatives
#print("alternatives")
match_nodes = []
for alternative in pattern[0]:
#add all alternatives, and add the rest of the pattern
#to each end node
end_nodes = self.METHOD_NAME(alternative, start=start)
for end in end_nodes:
match_nodes.extend(self.METHOD_NAME(pattern[1:], end))
return match_nodes
else:
#single token
#not last
if pattern[0] not in start.transition_table:
#transition did not exist, create new
next_node = BMNode()
start.transition_table[pattern[0]] = next_node
else:
#transition exists already, follow
next_node = start.transition_table[pattern[0]]
if pattern[1:]:
end_nodes = self.METHOD_NAME(pattern[1:], start=next_node)
else:
end_nodes = [next_node]
return end_nodes | [
238
] |
def METHOD_NAME(text, save_xml_name='trips_output.xml', save_xml_pretty=True,
offline=False, service_endpoint='drum', service_host=None):
"""Return a TripsProcessor by processing text.
Parameters
----------
text : str
The text to be processed.
save_xml_name : Optional[str]
The name of the file to save the returned TRIPS extraction knowledge
base XML. Default: trips_output.xml
save_xml_pretty : Optional[bool]
If True, the saved XML is pretty-printed. Some third-party tools
require non-pretty-printed XMLs which can be obtained by setting this
to False. Default: True
offline : Optional[bool]
If True, offline reading is used with a local instance of DRUM, if
available. Default: False
service_endpoint : Optional[str]
Selects the TRIPS/DRUM web service endpoint to use. Is a choice between
"drum" (default) and "drum-dev", a nightly build.
service_host : Optional[str]
Address of a service host different from the public IHMC server (e.g., a
locally running service).
Returns
-------
tp : TripsProcessor
A TripsProcessor containing the extracted INDRA Statements
in tp.statements.
"""
if not offline:
html = client.send_query(text, service_endpoint=service_endpoint,
service_host=service_host)
xml = client.get_xml(html)
else:
if offline_reading:
try:
dr = DrumReader()
if dr is None:
raise Exception('DrumReader could not be instantiated.')
except BaseException as e:
logger.error(e)
logger.error('Make sure drum/bin/trips-drum is running in'
' a separate process')
return None
try:
dr.read_text(text)
dr.start()
except SystemExit:
pass
xml = dr.extractions[0]
else:
logger.error('Offline reading with TRIPS/DRUM not available.')
logger.error('Error message was: %s' % offline_err)
msg = """
To install DRUM locally, follow instructions at
https://github.com/wdebeaum/drum.
Next, install the pykqml package either from pip or from
https://github.com/bgyori/pykqml.
Once installed, run drum/bin/trips-drum in a separate process.
"""
logger.error(msg)
return None
if save_xml_name:
client.save_xml(xml, save_xml_name, save_xml_pretty)
return process_xml(xml) | [
356,
526
] |
def METHOD_NAME(self, algo, hash, user=None):
"""
Return the first file that the user has access to given its hash and its
associated hashsum algorithm name.
:param algo: Algorithm the given hash is encoded with.
:param hash: Hash of the file to find.
:param user: User to test access against.
Default (none) is the current user.
:return: A file document.
"""
self._validateAlgo(algo)
query = {algo: hash}
fileModel = FileModel()
cursor = fileModel.find(query)
if not user:
user = self.getCurrentUser()
for file in cursor:
if fileModel.hasAccess(file, user, AccessType.READ):
return file
return None | [
19,
865,
171,
604,
1161
] |
def METHOD_NAME(self, tag: Tag, do_save: bool) -> None:
self._session.METHOD_NAME(tag)
if do_save:
self._session.flush() | [
34
] |
def METHOD_NAME(self):
config_dict = {
"train_batch_size": 2,
"steps_per_print": 1,
"optimizer": {
"type": "Adam",
"params": {
"lr": 0.00015,
"weight_decay": 0.01
}
},
"gradient_clipping": 1.0,
"fp16": {
"enabled": True,
"loss_scale": 0,
"initial_scale_power": 16
},
"data_efficiency": {
"enabled": True,
"seed": 1234,
"data_sampling": {
"enabled": True,
"num_workers": 0,
"curriculum_learning": {
"enabled": True,
"data_cluster_path": "/tmp",
"curriculum_metrics": {
"dummy_metric": {
"index_to_sample_path": "dummy",
"index_to_metric_path": "dummy",
"difficulty_type": "value",
"clustering_type": "single_cluster",
"min_difficulty": 2,
"max_difficulty": 10,
"schedule_type": "fixed_root",
"schedule_config": {
"total_curriculum_step": 8,
"difficulty_step": 2,
"root_degree": 1
}
}
}
}
}
}
}
def data_post_process(data, data_sampler_state_dict):
assert 'dummy_metric' in data_sampler_state_dict['current_difficulties']
return data
hidden_dim = 10
model = SimpleModel(hidden_dim)
dataset = random_dataset(20, hidden_dim, torch.device('cpu'), dtype=torch.half)
model, _, data_loader, _ = deepspeed.initialize(config=config_dict,
model=model,
training_data=dataset,
model_parameters=model.parameters(),
mpu=MPU(1))
if model.mpu.get_data_parallel_rank() == 0 and not os.path.exists('/tmp'):
os.makedirs('/tmp')
model.set_data_post_process_func(data_post_process)
for n, batch in enumerate(data_loader):
x = batch[0].to(get_accelerator().current_device_name())
y = batch[1].to(get_accelerator().current_device_name())
loss = model(x, y)
model.backward(loss)
model.step()
if n >= 10:
break | [
9,
7461,
4960
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.