text
stringlengths
15
7.82k
ids
sequencelengths
1
7
def METHOD_NAME(scheme): """Convert backend scheme to frontend label.""" scheme_to_label = current_app.config.get("RDM_RECORDS_IDENTIFIERS_SCHEMES", {}) return scheme_to_label.get(scheme, {}).get("label", scheme)
[ 19, 4932, 636 ]
def METHOD_NAME(self): self.topo_tree = { "default": 0, "bk_obj_name": "业务", "bk_obj_id": "biz", "child": [ { "default": 1, "bk_obj_name": "集群", "bk_obj_id": "set", "child": [ { "default": 1, "bk_obj_id": "module", "bk_inst_id": 3, "bk_obj_name": "模块", "bk_inst_name": "空闲机", }, { "default": 1, "bk_obj_id": "module", "bk_inst_id": 4, "bk_obj_name": "模块", "bk_inst_name": "故障机", }, ], "bk_inst_id": 2, "bk_inst_name": "空闲机池", }, { "default": 0, "bk_obj_name": "集群", "bk_obj_id": "set", "child": [ { "default": 0, "bk_obj_name": "模块", "bk_obj_id": "module", "bk_inst_id": 5, "bk_inst_name": "test1", }, { "default": 0, "bk_obj_name": "模块", "bk_obj_id": "module", "bk_inst_id": 6, "bk_inst_name": "test2", }, { "default": 0, "bk_obj_name": "模块", "bk_obj_id": "module", "bk_inst_id": 7, "bk_inst_name": "test3", }, ], "bk_inst_id": 3, "bk_inst_name": "set2", }, { "default": 0, "bk_obj_name": "集群", "bk_obj_id": "set", "child": [ { "default": 0, "bk_obj_name": "模块", "bk_obj_id": "module", "bk_inst_id": 8, "bk_inst_name": "test1", }, { "default": 0, "bk_obj_name": "模块", "bk_obj_id": "module", "bk_inst_id": 9, "bk_inst_name": "test2", }, ], "bk_inst_id": 4, "bk_inst_name": "set3", }, ], "bk_inst_id": 2, "bk_inst_name": "蓝鲸", }
[ 0, 1 ]
def METHOD_NAME(caplog): """ Ensure an empty dict is returned and an error is logged in case the config does not contain path=<...> """ with caplog.at_level(logging.ERROR): ext_pillar = vault.ext_pillar("testminion", {}, "secret/path") assert ext_pillar == {} assert "is not a valid Vault ext_pillar config" in caplog.text
[ 9, 532, 200 ]
def METHOD_NAME(self, iEWidget1, iEWidget2): iEWidget2._imageEditor.posModel = iEWidget1._imageEditor.posModel self._saveShape = iEWidget1._imageEditor.posModel.shape
[ 548 ]
def METHOD_NAME(self): raise NotImplementedError()
[ 19, 441, 968 ]
def METHOD_NAME(self, board: AbstractBoard) -> None: """Create a cache for coherent I/O connections""" self.iocache = Cache( assoc=8, tag_latency=50, data_latency=50, response_latency=50, mshrs=20, size="1kB", tgts_per_mshr=12, addr_ranges=board.mem_ranges, ) self.iocache.mem_side = self.membus.cpu_side_ports self.iocache.cpu_side = board.get_mem_side_coherent_io_port()
[ 102, 249, 596 ]
def METHOD_NAME(cmd_prefix_outputs=None): cmd_prefix_outputs = create_cmd_prefix_outputs(cmd_prefix_outputs) def check_output(args, **kwargs): cmd = " ".join(args) for cmd_prefix, output in cmd_prefix_outputs.items(): assert isinstance(output, bytes), f"Output for '{cmd_prefix}' must be bytes" if cmd.startswith(cmd_prefix): return output raise AssertionError(f"Unexpected check_output args: {args}") return check_output
[ 129, 250, 146 ]
def METHOD_NAME(username='flexget', password='', session=None): check = zxcvbn.zxcvbn(password, user_inputs=[username]) if check['score'] < 3: warning = check['feedback']['warning'] suggestions = ' '.join(check['feedback']['suggestions']) message = f'Password \'{password}\' is not strong enough. ' if warning: message += warning + ' ' if suggestions: message += f'Suggestions: {suggestions}' raise WeakPassword(message) user = get_user(username=username, session=session) user.password = str(generate_password_hash(password)) session.commit()
[ 194, 2897 ]
def METHOD_NAME(self, **options): # Replace whitespace and other problematic characters with underscores def cleanup_name(text: str) -> str: return text.lower(). \ replace(' ', '_'). \ replace('-', '_'). \ replace('/', '_'). \ replace('(', '_'). \ replace(')', '_') # Collect all translatable items data = [i for i in ExerciseCategory.objects.all()] \ + [i for i in Equipment.objects.all()] \ + [i.name_en for i in Muscle.objects.all() if i.name_en] \ + [i for i in RepetitionUnit.objects.all()] \ + [i for i in WeightUnit.objects.all()] # Make entries unique and sort alphabetically data = sorted(set([i.__str__() for i in data])) # # Django - write to .tpl file with open('wger/i18n.tpl', 'w') as f: out = '{% load i18n %}\n' for i in data: out += f'{{% translate "{i}" %}}\n' f.write(out) self.stdout.write(self.style.SUCCESS(f'Wrote content to wger/i18n.tpl')) # # React - copy the file to src/i18n.tsx in the react repo with open('wger/i18n.tsx', 'w') as f: out = ''' // This code is autogenerated in the backend repo in extract-i18n.py do not edit! // Translate dynamic strings that are returned from the server // These strings such as categories or equipment are returned by the server // in English and need to be translated here in the application (there are // probably better ways to do this, but that's the way it is right now). import { useTranslation } from "react-i18next"; // eslint-disable-next-line @typescript-eslint/no-unused-vars const DummyComponent = () => { const [t] = useTranslation();''' for i in data: out += f't("server.{cleanup_name(i.__str__())}");\n' out += ''' return (<p></p>); };''' f.write(out) self.stdout.write(self.style.SUCCESS(f'Wrote content to wger/i18n.tsx')) # # Flutter - copy content to the end of lib/l10n/app_en.arb in the flutter repo with open('wger/app_en.arb', 'w') as f: out = '' for i in data: out += f'"{cleanup_name(i.__str__())}": "{i}",\n' out += f'"@{cleanup_name(i.__str__())}": {{ \n' out += f'"description": "Generated entry for translation for server strings"\n' out += '},\n' f.write(out) self.stdout.write(self.style.SUCCESS(f'Wrote content to app_en.arb')) # Copy to lib/helpers/i18n.dart in the flutter repo with open('wger/i18n.dart', 'w') as f: out = ''' /// This code is autogenerated in the backend repo in extract-i18n.py do not edit! /// Translate dynamic strings that are returned from the server /// These strings such as categories or equipment are returned by the server /// in English and need to be translated here in the application (there are /// probably better ways to do this, but that's the way it is right now). import 'package:flutter/widgets.dart'; import 'package:flutter_gen/gen_l10n/app_localizations.dart'; String getTranslation(String value, BuildContext context) { switch (value) {''' for i in data: out += f''' case '{i}': return AppLocalizations.of(context).{cleanup_name(i.__str__())}; ''' out += ''' default: throw FormatException('Could not translate the server string $value'); }}''' f.write(out) self.stdout.write(self.style.SUCCESS('Wrote content to wger/i18n.dart'))
[ 276 ]
def METHOD_NAME(self, cosmo, M, a): nu = get_delta_c(cosmo, a, 'EdS_approx') / cosmo.sigmaM(M, a) n_eff = -2 * self._dlsigmaR(cosmo, M, a) - 3 alpha_eff = cosmo.growth_rate(a) A = self.a0 * (1 + self.a1 * (n_eff + 3)) B = self.b0 * (1 + self.b1 * (n_eff + 3)) C = 1 - self.c_alpha * (1 - alpha_eff) arg = A / nu * (1 + nu**2 / B) G = self._G_inv(arg, n_eff) return C * G
[ 13782 ]
def METHOD_NAME(self): """Unpauses this queue.""" log.info(u'Unpausing queue') self.min_priority = 0
[ 10412 ]
def METHOD_NAME(self): return dict(zip(self.class_names, range(len(self.class_names))))
[ 56, 2, 156, 24, 718 ]
def METHOD_NAME(cvs, gmin, dx, nbin): # get nD indices from value idx = get_indexes_from_cvs(cvs, gmin, dx, nbin) # transform in 1D index i = idx[-1] for j in range(len(nbin)-1,0,-1): i = i*nbin[j-1]+idx[j-1] return i
[ 19, 724, 280, 11409 ]
def METHOD_NAME(self): raise NotImplementedError
[ 13, 39 ]
def METHOD_NAME(): return get_token(16)
[ 19, 466, 7635 ]
def METHOD_NAME(self) -> Optional[str]: """ The maintenance configuration Id """ return pulumi.get(self, "maintenance_configuration_id")
[ 4107, 830, 147 ]
def METHOD_NAME(self): return Point(self)
[ 215 ]
def METHOD_NAME(self): def fn(x: List[int]): return x[slice(None, 5, None)] self.checkScript(fn, (range(10),))
[ 9, 55, 631, 246, 41, 2599 ]
def METHOD_NAME(data): print('{:<20} {:>12} {:>12} {:>12} {:>25}'.format('Module', 'Average', 'Threshold', 'Stdev', 'Values')) for key, val in data.items(): print('{:<20} {:>12.0f} {:>12.0f} {:>12.0f} {:>25}'.format( key, val['average'], val['threshold'], val['stdev'], str(val['values'])))
[ 38, 199 ]
def METHOD_NAME(): '''Returns the logging level, which might be defined badly by the admin''' try: return int(prefs.pref('LoggingLevel')) except TypeError: return 1
[ 663, 33 ]
def METHOD_NAME(self): node = self.nodes[0] node1 = self.nodes[1] node.generate(101) self.sync_blocks() assert_equal(node.getblockcount(), 101) # eunos # Get addresses and set up account account = node.getnewaddress() node.utxostoaccount({account: "10@0"}) node.generate(1) self.sync_blocks() addressInfo = node.getaddressinfo(account) accountkey1 = addressInfo["scriptPubKey"] + "@0" # key of the first account accounts = node1.listaccounts() # make sure one account in the system assert_equal(len(accounts), 1) assert_equal(accounts[0]["key"], accountkey1) pagination = { "start": accounts[len(accounts) - 1]["key"], "including_start": True, } result2 = node1.listaccounts(pagination) # check the result has accountkey1 assert_equal(len(result2), 1) assert_equal(result2[0]["key"], accountkey1) ########################################################################################### # test with two accounts # add another account account2 = node.getnewaddress() node.utxostoaccount({account2: "10@0"}) node.generate(1) self.sync_blocks() addressInfo = node.getaddressinfo(account2) accountkey2 = addressInfo["scriptPubKey"] + "@0" # key of the second account accounts = node1.listaccounts() # make sure we have two account in the system assert_equal(len(accounts), 2) # results are in lexograpic order. perform the checks accordingly if accountkey1 < accountkey2: assert_equal(accounts[0]["key"], accountkey1) assert_equal(accounts[1]["key"], accountkey2) else: assert_equal(accounts[0]["key"], accountkey2) assert_equal(accounts[1]["key"], accountkey1) pagination = { "start": accounts[len(accounts) - 1][ "key" ], # giving the last element of accounts[] as start "including_start": True, } result2 = node1.listaccounts(pagination) # check for length assert_equal(len(result2), 1) # results are in lexograpic order. perform the checks accordingly if accountkey1 < accountkey2: assert_equal(result2[0]["key"], accountkey2) else: assert_equal(result2[0]["key"], accountkey1) ########################################################################################### # Add another account from other node account3 = node1.getnewaddress() node.sendtoaddress(account3, 50) node.generate(1) self.sync_blocks() node1.utxostoaccount({account3: "10@0"}) node1.generate(1) self.sync_blocks() addressInfo = node1.getaddressinfo(account3) accounts = node.listaccounts() # make sure we have three account in the system assert_equal(len(accounts), 3) pagination = { "start": accounts[0]["key"], # pass the first key in the accounts list "including_start": False, } result2 = node1.listaccounts(pagination) # check for length, we should get 2 entries since listaccounts RPC should return all accounts even with pagination. assert_equal(len(result2), 2)
[ 22, 9 ]
def METHOD_NAME(event: E) -> None: event.app.exit(result=True)
[ 2765 ]
def METHOD_NAME(obj): """Attempt to convert numpy.ma.core.masked""" numpy = get_module("numpy") if not numpy: raise NotEncodable if obj is numpy.ma.core.masked: return float("nan") else: raise NotEncodable
[ 421, 947, 2028 ]
def METHOD_NAME(): datefmt = '%H:%M:%S.%f' fmt = '%(asctime)s %(levelname).1s %(name)s: %(message)s' formatter = logging.Formatter(fmt, datefmt) formatter.formatTime = _formatTime return formatter
[ 390, 19, 2931 ]
def METHOD_NAME(event): assert event.run.facets["parent"].run["runId"] == TASK_UUID assert event.run.facets["parent"].job["name"] == "TestDBTCloudExtractor.task_id" assert event.job.namespace == "default" assert event.job.name.startswith("SANDBOX.TEST_SCHEMA.my_new_project") if len(event.inputs) > 0: assert ( event.inputs[0].facets["dataSource"].name == "snowflake://gp21411.us-east-1.aws" ) assert ( event.inputs[0].facets["dataSource"].uri == "snowflake://gp21411.us-east-1.aws" ) assert event.inputs[0].facets["schema"].fields[0].name.upper() == "ID" if event.inputs[0].name == "SANDBOX.TEST_SCHEMA.my_first_dbt_model": assert event.inputs[0].facets["schema"].fields[0].type.upper() == "NUMBER" if len(event.outputs) > 0: assert ( event.outputs[0].facets["dataSource"].name == "snowflake://gp21411.us-east-1.aws" ) assert ( event.outputs[0].facets["dataSource"].uri == "snowflake://gp21411.us-east-1.aws" ) assert event.outputs[0].facets["schema"].fields[0].name.upper() == "ID" if event.outputs[0].name == "SANDBOX.TEST_SCHEMA.my_first_dbt_model": assert event.outputs[0].facets["schema"].fields[0].type.upper() == "NUMBER"
[ 2648, 417 ]
def METHOD_NAME(model_params): nest.SetDefaults("correlomatrix_detector", model_params) cd = nest.Create("correlomatrix_detector") return cd
[ 0, 235, 129 ]
def METHOD_NAME(date_last_start=None): try: if G.ADDON.getSettingBool('use_mysql'): client_uuid = G.LOCAL_DB.get_value('client_uuid') uuid = G.SHARED_DB.get_value('auto_update_device_uuid') if client_uuid != uuid: LOG.debug('The auto update has been disabled because another device ' 'has been set as the main update manager') return None last_run = date_last_start or G.SHARED_DB.get_value('library_auto_update_last_start', datetime.utcfromtimestamp(0)) if G.ADDON.getSettingInt('lib_auto_upd_mode') == 0: # Update at Kodi startup time = '00:00' update_frequency = 0 else: time = G.ADDON.getSetting('lib_auto_upd_start') or '00:00' update_frequency = G.ADDON.getSettingInt('lib_auto_upd_freq') last_run = last_run.replace(hour=int(time[0:2]), minute=int(time[3:5])) next_run = last_run + timedelta(days=[1, 2, 5, 7][update_frequency]) if next_run >= datetime.now(): LOG.info('Next library auto update is scheduled for {}', next_run) return next_run except Exception: # pylint: disable=broad-except # If settings.xml was not created yet, as at first service run # G.ADDON.getSettingBool('use_mysql') will thrown a TypeError # If any other error appears, we don't want the service to crash, # let's return None in all case # import traceback # LOG.debug(traceback.format_exc()) LOG.warn('Managed error at _compute_next_schedule') return None
[ 226, 243, 507 ]
def METHOD_NAME(project_id, target_dataset, client): """Function to create log table for storing the job fopailure""" table_id = project_id + "." + target_dataset + ".report_status_log_tbl" schema = [ bigquery.SchemaField("Timestamp", "STRING"), bigquery.SchemaField("FilePath", "STRING"), bigquery.SchemaField("Schemaname", "STRING"), bigquery.SchemaField("TableName", "STRING"), bigquery.SchemaField("Category", "STRING"), bigquery.SchemaField("Message", "STRING"), bigquery.SchemaField("Status", "STRING"), bigquery.SchemaField("Action", "STRING"), ] table_address = bigquery.Table(table_id, schema=schema) table_ref = client.create_table(table_address, exists_ok=True) return table_ref
[ 129, 390, 410 ]
def METHOD_NAME( in_dir: pathlib.Path, out_dir: pathlib.Path, *, transformer=publiccaCallTransformer(),
[ 1112, 1537 ]
def METHOD_NAME(self): stack = self.stack args = stack.pop() func = stack[-1] # If the function name ends with "_persist", then assume the # function wants the Unpickler as the first parameter. func_name = func.__name__ if func_name.endswith('_persist') or func_name.endswith('Persist'): value = func(self, *args) else: # Otherwise, use the existing pickle convention. value = func(*args) stack[-1] = value
[ 557, 332 ]
def METHOD_NAME(connection, x, y, tag, ip_address=None, strip=True): """ Make a tag deliver to the given connection. :param connection: The connection to deliver to. :type connection: ~spinnman.connections.udp_packet_connections.UDPConnection :param int x: The X coordinate of the Ethernet-enabled chip we are sending the message to. :param int y: The Y coordinate of the Ethernet-enabled chip we are sending the message to. :param int tag: The ID of the tag to retarget. :param str ip_address: What IP address to send the message to. If ``None``, the connection is assumed to be connected to a specific board already. :param bool strip: Whether the tag should strip the SDP header before sending to the connection. """ # If the connection itself knows how, delegate to it if isinstance(connection, SpallocEIEIOListener): connection.update_tag(x, y, tag) elif isinstance(connection, SpallocEIEIOConnection): connection.update_tag(tag) elif ip_address: reprogram_tag_to_listener(connection, x, y, ip_address, tag, strip) else: reprogram_tag(connection, tag, strip)
[ -1, 82 ]
def METHOD_NAME( self, dataset_name: str, feature_name: str, num_of_bins: int, global_min_value: float, global_max_value: float ) -> Histogram: num_of_bins: int = num_of_bins df = self.data[dataset_name] feature: Series = df[feature_name] flattened = feature.ravel() flattened = flattened[flattened != np.array(None)] buckets = get_std_histogram_buckets(flattened, num_of_bins, BinRange(global_min_value, global_max_value)) return Histogram(HistogramType.STANDARD, buckets)
[ 6069 ]
def METHOD_NAME(address): """ Unpair the bluetooth adapter from a device CLI Example: .. code-block:: bash salt '*' bluetooth.unpair DE:AD:BE:EF:CA:FE Where DE:AD:BE:EF:CA:FE is the address of the device to unpair. TODO: This function is currently broken, as the bluez-simple-agent program no longer ships with BlueZ >= 5.0. It needs to be refactored. """ if not salt.utils.validate.net.mac(address): raise CommandExecutionError("Invalid BD address passed to bluetooth.unpair") cmd = "bluez-test-device remove {}".format(address) out = __salt__["cmd.run"](cmd).splitlines() return out
[ 3869 ]
def METHOD_NAME(self, config_stub, qtbot, monkeypatch, cookie, ram_jar, url): config_stub.val.content.cookies.accept = 'all' config_stub.set_str('content.cookies.accept', 'never', pattern=urlmatch.UrlPattern('http://example.com')) org_url = QUrl('http://example.org/') with qtbot.wait_signal(ram_jar.changed): assert ram_jar.setCookiesFromUrl([cookie], org_url) assert ram_jar.cookiesForUrl(org_url) with qtbot.assert_not_emitted(ram_jar.changed): assert not ram_jar.setCookiesFromUrl([cookie], url) assert not ram_jar.cookiesForUrl(url)
[ 9, 2735, 274 ]
def METHOD_NAME(ctx): local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'], os.environ['request_id']) local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename logging.basicConfig(format='%(levelname)-8s [%(asctime)s] %(message)s', level=logging.DEBUG, filename=local_log_filepath) try: subprocess.run("~/scripts/{}.py".format('common_terminate_notebook'), shell=True, check=True) except Exception as err: traceback.print_exc() append_result("Failed terminating Notebook node.", str(err)) sys.exit(1)
[ 1602 ]
def METHOD_NAME(self): radius = 4 depth = ImageDepth(radius, napers=500, niters=2, overlap=True, seed=123, progress_bar=False) data = np.zeros((300, 400)) mask = None with pytest.warns(AstropyUserWarning, match='One or more flux_limit values was zero'): limits = depth(data, mask) assert_allclose(limits, (0.0, np.inf))
[ 9, 313, 365 ]
def METHOD_NAME(self, mem_type, devid): if mem_type != "noc": return ddrmc_number = idcode_ddrmc_num_dict.get(devid, 0) if ddrmc_number == 0: logging.info("Can not get ddrmc/noc number!") self.enabled = False return pre_base_addr = [] for (dirpath, dirnames, filenames) in os.walk("/proc/device-tree/axi"): for file in dirnames: if file.startswith("memory-controller"): compatible = open(os.path.join(os.path.abspath(os.path.join( dirpath, file)), "compatible"), "rt").read().strip().split(',') driver_name = compatible[1].split('-') if driver_name[1] != 'ddrmc': continue reg = os.path.join(os.path.abspath( os.path.join(dirpath, file)), "reg") f = open(reg, "rb") numl = list(f.read()) addr_off = ((numl[20] << 24) + (numl[21] << 16) + (numl[22] << 8) + (numl[23] << 0)) pre_base_addr.append(int(addr_off)) pre_base_addr.sort() return pre_base_addr[0:ddrmc_number]
[ 19, -1, 414, 990 ]
async def METHOD_NAME( client: Any, logger: Any, register: Callable[[Any], Awaitable[None]], unregister: Callable[[Any], Awaitable[None]], rpc_dict: RPC_DICT_TYPE, event_dict: None | EVENT_DICT_TYPE = None, verbose: bool = False, ) -> None: async def handle_message(msg: str) -> None: try: data = json.loads(msg) except json.JsonException as e: logger.error(f"Invalid data: '{msg}'.") logger.debug(e) return if not isinstance(data, dict): logger.error(f"Invalid data: '{data}'.") return if "request" in data: # ...then it is RPC req_type = data["request"] try: rpc_cls, rpc_cb = rpc_dict[req_type] except KeyError: logger.error(f"Unknown RPC request: {data}.") return assert req_type == rpc_cls.__name__ try: req = rpc_cls.Request.from_dict(data) except ValidationError as e: logger.error(f"Invalid RPC: {data}, error: {e}") return except Arcor2Exception as e: # this might happen if e.g. some dataclass does additional validation of values in its __post_init__ try: await client.send(rpc_cls.Response(data["id"], False, messages=[str(e)]).to_json()) logger.debug(e, exc_info=True) except (KeyError, websockets.exceptions.ConnectionClosed): pass return else: try: rpc_start = time.monotonic() resp = await rpc_cb(req, client) rpc_dur = time.monotonic() - rpc_start if rpc_dur > MAX_RPC_DURATION: logger.warn(f"{req.request} callback took {rpc_dur:.3f}s.") except Arcor2Exception as e: logger.debug(e, exc_info=True) resp = rpc_cls.Response(req.id, False, [str(e)]) else: if resp is None: # default response resp = rpc_cls.Response(req.id, True) else: assert isinstance(resp, rpc_cls.Response) resp.id = req.id try: await client.send(resp.to_json()) except websockets.exceptions.ConnectionClosed: return if logger.level == LogLevel.DEBUG: # Silencing of repetitive log messages # ...maybe this could be done better and in a more general way using logging.Filter? now = time.monotonic() if req.request not in req_last_ts: req_last_ts[req.request] = deque() while req_last_ts[req.request]: if req_last_ts[req.request][0] < now - 5.0: req_last_ts[req.request].popleft() else: break req_last_ts[req.request].append(now) req_per_sec = len(req_last_ts[req.request]) / 5.0 if req_per_sec > 2: if req.request not in ignored_reqs: ignored_reqs.add(req.request) logger.debug(f"Request of type {req.request} will be silenced.") elif req_per_sec < 1: if req.request in ignored_reqs: ignored_reqs.remove(req.request) if req.request not in ignored_reqs: logger.debug(f"RPC request: {req}, result: {resp}") elif "event" in data: # ...event from UI assert event_dict try: event_cls, event_cb = event_dict[data["event"]] except KeyError as e: logger.error(f"Unknown event type: {e}.") return try: event = event_cls.from_dict(data) except ValidationError as e: logger.error(f"Invalid event: {data}, error: {e}") return await event_cb(event, client) else: logger.error(f"unsupported format of message: {data}") if event_dict is None: event_dict = {} req_last_ts: dict[str, deque] = {} ignored_reqs: set[str] = set() try: await register(client) loop = asyncio.get_event_loop() async for message in client: loop.create_task(handle_message(message)) except websockets.exceptions.ConnectionClosed: pass finally: await unregister(client)
[ 163 ]
def METHOD_NAME(self, cmd_list, plugin_info): """Run a list of commands :param cmd_list: List of commands to run :type cmd_list: `list` :param plugin_info: Plugin context information :type plugin_info: `dict` :return: Command output :rtype: `str` """ output = "" for command in cmd_list: if command != "None": output += self.run(command, plugin_info) return output
[ 22, 1660, 245 ]
def METHOD_NAME(p): """Draws a point at position p (either a 2d or 3d list/tuple)""" GL.glBegin(GL.GL_POINTS) if len(p)==2: GL.glVertex2f(*p) elif len(p)==3: GL.glVertex3f(*p) else: GL.glVertex3f(p[0],p[1],p[2]) GL.glEnd()
[ 1669 ]
def METHOD_NAME(gnu_dir): r_internals_h = join(gnu_dir, 'Rinternals.h') with open(r_internals_h) as f: lines = f.readlines() use_rinternals_count = 0 with open('Rinternals.h', 'w') as f: for line in lines: if '== USE_RINTERNALS section' in line: f.write(use_internals_section_and_glob_var_api) f.write(line) f.write('#endif\n') elif 'typedef struct SEXPREC *SEXP' in line: f.write(line) f.write(sexp) elif '#ifdef USE_RINTERNALS' in line: if use_rinternals_count > 0: f.write(use_internals_begin) else: f.write(line) use_rinternals_count = 1 elif 'macro version of R_CheckStack' in line: f.write(use_internals_end) f.write(line) elif 'LibExtern' in line: var = is_internal_var(line) if var: rewrite_var(f, var, line) else: f.write(line) else: f.write(line)
[ 6693, 3264, 15357 ]
def METHOD_NAME(self): return _QueueWriter(self)
[ 797 ]
def METHOD_NAME(self): pass
[ 709, 710 ]
def METHOD_NAME(self): METHOD_NAME = self._list_ports() interfaces = self._list_interfaces() filtered_ports = {} for port in METHOD_NAME: port_iface_uuid = port['interfaces'] port_ifaces = [ iface for iface in interfaces if iface['_uuid'] == port_iface_uuid ] if len(port_ifaces): port_iface = port_ifaces[0] filtered_ports[port['name']] = { 'interface': port_iface['name'], 'type': port_iface['type'], 'options': port_iface['options'], } return filtered_ports
[ 907 ]
def METHOD_NAME(self, msg): """Define debug log""" if msg: self.log(msg, logging.DEBUG)
[ 290 ]
def METHOD_NAME(source, language, css_class, options, md, attrs, **kwargs) -> str: """A superfences formatter to insert an SVG screenshot.""" import io from METHOD_NAME.console import Console title = attrs.get("title", "Rich") rows = int(attrs.get("lines", 24)) columns = int(attrs.get("columns", 80)) console = Console( file=io.StringIO(), record=True, force_terminal=True, color_system="truecolor", width=columns, height=rows, ) error_console = Console(stderr=True) globals: dict = {} try: exec(source, globals) except Exception: error_console.print_exception() # console.bell() if "output" in globals: console.print(globals["output"]) output_svg = console.export_svg(title=title) return output_svg
[ 6857 ]
def METHOD_NAME(self): # This method exists for backward compatibility. """Return buy energy profile of the asset.""" return self._buy_energy_profile.profile
[ 5121, 2007, 1585 ]
def METHOD_NAME(X, drop_probability=0.5, seed=0, training_mode=False, return_mask=False): # type: ignore if drop_probability == 0 or training_mode is False: if return_mask is True: return X, np.ones(X.shape, dtype=bool) else: return X np.random.seed(seed) mask = np.random.uniform(0, 1.0, X.shape) >= drop_probability scale = 1 / (1 - drop_probability) if return_mask: return mask * X * scale, mask.astype(bool) return mask * X * scale
[ 3663 ]
def METHOD_NAME(state, preset, instruction_template, extensions, show_controls): output = copy.deepcopy(shared.settings) exclude = ['name1', 'name2', 'greeting', 'context', 'turn_template'] for k in state: if k in shared.settings and k not in exclude: output[k] = state[k] output['preset'] = preset output['prompt-default'] = state['prompt_menu-default'] output['prompt-notebook'] = state['prompt_menu-notebook'] output['character'] = state['character_menu'] output['instruction_template'] = instruction_template output['default_extensions'] = extensions output['seed'] = int(output['seed']) output['show_controls'] = show_controls return yaml.dump(output, sort_keys=False, width=float("inf"))
[ 73, 817 ]
def METHOD_NAME(self, res, expected_in_body=None): content_type = res.headers.get("Content-Type") assert "application/json" in content_type, content_type res_json = self.loads(res.body) if expected_in_body: assert expected_in_body in res_json or expected_in_body in str( res_json ), ( "Expected to find %r in JSON response %r" % (expected_in_body, res_json) )
[ 638, 763, 17 ]
def METHOD_NAME(self, icon): if self.__animatedIcon is not None: self.__animatedIcon.unregister(self) self.__animatedIcon.iconChanged.disconnect(self.__updateIcon) self.__animatedIcon = icon if self.__animatedIcon is not None: self.__animatedIcon.register(self) self.__animatedIcon.iconChanged.connect(self.__updateIcon) i = self.__animatedIcon.currentIcon() else: i = qt.QIcon() super(AnimatedToolButton, self).setIcon(i)
[ 0, 5808, 875 ]
def METHOD_NAME(cls, *components): return os.path.join(ROOT, *components)
[ 1563, 157 ]
def METHOD_NAME( x, y, colorscale="Earth", ncontours=20, hist_color=(0, 0, 0.5), point_color=(0, 0, 0.5), point_size=2, title="2D Density Plot", height=600, width=600,
[ 129, 1085, 2915 ]
def METHOD_NAME(cls, response: requests.models.Response): if response.status_code not in [200, 201, 202, 204]: err_msg = cls._get_error_msg_from_response(response) if 400 <= response.status_code < 500: err_msg = "%s Client Error: %s for url: %s" % (response.status_code, err_msg, response.url) elif 500 <= response.status_code < 600: err_msg = "%s Server Error: %s for url: %s" % (response.status_code, err_msg, response.url) raise requests.exceptions.HTTPError(err_msg, response=response)
[ 250, 17 ]
async def METHOD_NAME(writer: asyncio.StreamWriter) -> None: """ Python 3.6-compatible `asyncio.StreamWriter.wait_closed` wrapper. :param writer: The `asyncio.StreamWriter` to wait on. """ if sys.version_info >= (3, 7): await writer.METHOD_NAME() return # Python 3.6 transport = writer.transport assert isinstance(transport, asyncio.WriteTransport) while not transport.is_closing(): await asyncio.sleep(0) # This is an ugly workaround, but it's the best I can come up with. sock = transport.get_extra_info('socket') if sock is None: # Our transport doesn't have a socket? ... # Nothing we can reasonably do. return while sock.fileno() != -1: await asyncio.sleep(0)
[ 618, 4703 ]
def METHOD_NAME(): assert config.intake_path_dirs([]) == [] assert config.intake_path_dirs(["paths"]) == ["paths"] assert config.intake_path_dirs("") == [""] assert config.intake_path_dirs("path1:path2") == ["path1", "path2"] assert config.intake_path_dirs("memory://path1:memory://path2") == ["memory://path1", "memory://path2"]
[ 9, -1 ]
def METHOD_NAME(self): value = True expected_can_data = [int(value)] data = {"value": value} actual_can_data = self.converter.convert({}, data) self.assertListEqual(actual_can_data, expected_can_data)
[ 9, 863, 365 ]
def METHOD_NAME(self): self._check_one(PartSha1Mismatch, 400, 'part_sha1_mismatch', '', {}) assert 'Part number my-file-id has wrong SHA1' == \ str(interpret_b2_error(400, 'part_sha1_mismatch', '', {}, {'fileId': 'my-file-id'}))
[ 9, 995, 7770, 4030 ]
def METHOD_NAME(array, target, axis, clip, highlevel, behavior): axis = regularize_axis(axis) layout = ak.operations.to_layout(array, allow_record=False, allow_other=False) out = ak._do.pad_none(layout, target, axis, clip=clip) return wrap_layout(out, behavior, highlevel, like=array)
[ 2581 ]
async def METHOD_NAME(): """Commands to run on server startup.""" logger.debug("Starting up FastAPI server.") logger.debug("Connecting to DB with SQLAlchemy") Base.metadata.create_all(bind=engine) # Read in XLSForms read_xlsforms(next(get_db()), xlsforms_path)
[ 4294, 417 ]
def METHOD_NAME(self, rootpath): """Assimilate the entire subdirectory structure in rootpath serially.""" valid_paths = [] for parent, subdirs, files in os.walk(rootpath): valid_paths.extend(self._drone.get_valid_paths((parent, subdirs, files))) data = [] count = 0 total = len(valid_paths) for path in valid_paths: newdata = self._drone.assimilate(path) self._data.append(newdata) count += 1 logger.info(f"{count}/{total} ({count / total :.2%}) done") for d in data: self._data.append(json.loads(d, cls=MontyDecoder))
[ 4364, -1 ]
def METHOD_NAME(self): G = nx.DiGraph(self.edges) x = list(nx.edge_bfs(G, self.nodes)) x_ = [(0, 1), (1, 0), (2, 0), (2, 1), (3, 1)] assert x == x_
[ 9, 8261 ]
f METHOD_NAME(self):
[ 9, 527 ]
def METHOD_NAME(x): return Mul(2, x, evaluate=False)
[ -1 ]
def METHOD_NAME(self): # type: () -> Iterator[teradatasql.connection] conn = None if TERADATASQL_IMPORT_ERROR: self.log.error( 'Teradata SQL Driver module is unavailable. Please double check your installation and refer to the ' 'Datadog documentation for more information. %s', TERADATASQL_IMPORT_ERROR, ) raise TERADATASQL_IMPORT_ERROR self.log.info('Connecting to Teradata database %s on server %s.', self.config.database, self.config.server) try: conn = teradatasql.METHOD_NAME(self._connect_params) self.log.info('Connected to Teradata.') yield conn except Exception as e: self.log.error('Unable to connect to Teradata. %s.', e) raise e finally: if conn: conn.close()
[ 707 ]
def METHOD_NAME(admin_client, alt_admin_client): # admin user schedule entry alt_admin_rjson = post_schedule(alt_admin_client, TEST_PRIVATE_SCHEDULE_ENTRY) alt_admin_entry_name = alt_admin_rjson["name"] admin_adjust_alt_admin_response = update_schedule( admin_client, alt_admin_entry_name, TEST_SCHEDULE_ENTRY ) validate_response(admin_adjust_alt_admin_response, status.HTTP_200_OK)
[ 9, 2870, 1046, 2444, 75, 109 ]
def METHOD_NAME(url, path): response = requests.get(url, stream=True) if response.status_code != 200: return False with open(path, "wb") as f: shutil.copyfileobj(response.raw, f) return True
[ 136, 171 ]
def METHOD_NAME(self, n): self.db_name = n
[ 0, 1267, 156 ]
def METHOD_NAME(self): """Executed before each test in the class"""
[ 0, 1 ]
def METHOD_NAME(filename, format): from OpenSSL import crypto types = { FILETYPE_PEM: crypto.FILETYPE_PEM, FILETYPE_DER: crypto.FILETYPE_ASN1 } if filename == '-': crl = crypto.load_crl(types[format], sys.stdin.buffer.read()) else: with open(filename, 'rb') as f: crl = crypto.load_crl(types[format], f.read()) return set(int(r.get_serial(), 16) for r in crl.get_revoked())
[ 1365, 10984, 298 ]
def METHOD_NAME(self): braccept = self.get_executable("braccept") bfd_arg = "" if self.bfd: bfd_arg = "--bfd" sudo("%s --artifacts %s %s" % (braccept.executable, self.artifacts, bfd_arg))
[ 22 ]
def METHOD_NAME(self): return """\ color Sets the marker color of unselected points, applied only when a selection exists. opacity Sets the marker opacity of unselected points, applied only when a selection exists. size Sets the marker size of unselected points, applied only when a selection exists. """
[ 1302, 1303 ]
def METHOD_NAME(self, queryset, request, view, count=None): # pylint: disable=attribute-defined-outside-init page_size = self.get_page_size(request) if not page_size: return None paginator = self.django_paginator_class( queryset, page_size, count_override=count ) page_number = request.query_params.get(self.page_query_param, 1) if page_number in self.last_page_strings: page_number = paginator.num_pages try: self.page = paginator.page(page_number) except InvalidPage as exc: msg = self.invalid_page_message.format( page_number=page_number, message=str(exc) ) raise NotFound(msg) from exc if paginator.num_pages > 1 and self.template is not None: self.display_page_controls = True self.request = request return list(self.page)
[ 11465, 2386 ]
def METHOD_NAME(app): app.add_css_file("style.css")
[ 102 ]
async def METHOD_NAME(decoy: Decoy, mock_state_view: StateView) -> None: """It should use a virtual pipette to drop a tip.""" subject = VirtualTipHandler(state_view=mock_state_view) await subject.drop_tip(pipette_id="pipette-id", home_after=None) decoy.verify( mock_state_view.pipettes.validate_tip_state("pipette-id", True), times=1, )
[ 9, 162, 1050, 4754 ]
def METHOD_NAME(self) -> GEOSGeometry: ...
[ 3321 ]
def METHOD_NAME(self): for parser in self.parsers: with self.subTest(parser=parser.__name__): MyPolicy = default.clone(message_factory=self.MyMessage) msg = parser("To: foo\n\ntest", Message, policy=MyPolicy) self.assertNotIsInstance(msg, self.MyMessage) self.assertIsInstance(msg, Message)
[ 9, 1155, 718, 2707, 54 ]
def METHOD_NAME(example_command_with_aliases: Command): """Test that `command_name in registry` works.""" registry = CommandRegistry() command = example_command_with_aliases assert command.name not in registry assert "nonexistent_command" not in registry registry.register(command) assert command.name in registry assert "nonexistent_command" not in registry for alias in command.aliases: assert alias in registry
[ 9, 462, 623, 510 ]
def METHOD_NAME(irreps_in1, irreps_in2, ir_out) -> bool: irreps_in1 = o3.Irreps(irreps_in1).simplify() irreps_in2 = o3.Irreps(irreps_in2).simplify() ir_out = o3.Irrep(ir_out) for _, ir1 in irreps_in1: for _, ir2 in irreps_in2: if ir_out in ir1 * ir2: return True return False
[ 9302, 157, 954 ]
def METHOD_NAME(self, args): '''enable/disable speed report''' self.settings.set('speedreporting', not self.settings.speedreporting) if self.settings.speedreporting: self.console.writeln("Speed reporting enabled", bg='yellow') else: self.console.writeln("Speed reporting disabled", bg='yellow')
[ 1660, 1942 ]
def METHOD_NAME(self, dvs, testlog): self.setup_db(dvs) output_dict = {} brkoutTbl = swsscommon.Table(self.cdb, "BREAKOUT_CFG") brkout_entries = brkoutTbl.getKeys() assert len(brkout_entries) == 32 for key in brkout_entries: (status, fvs) = brkoutTbl.get(key) assert status brkout_mode = fvs[0][1] output_dict[key] = brkout_mode output = collections.OrderedDict(sorted(output_dict.items(), key=lambda t: t[0])) expected_dict = \ {'Ethernet8': '1x100G[40G]', 'Ethernet0': '1x100G[40G]', 'Ethernet4': '1x100G[40G]', \ 'Ethernet108': '1x100G[40G]', 'Ethernet100': '1x100G[40G]', 'Ethernet104': '1x100G[40G]', \ 'Ethernet68': '1x100G[40G]', 'Ethernet96': '1x100G[40G]', 'Ethernet124': '1x100G[40G]', \ 'Ethernet92': '1x100G[40G]', 'Ethernet120': '1x100G[40G]', 'Ethernet52': '1x100G[40G]', \ 'Ethernet56': '1x100G[40G]', 'Ethernet76': '1x100G[40G]', 'Ethernet72': '1x100G[40G]', \ 'Ethernet32': '1x100G[40G]', 'Ethernet16': '1x100G[40G]', 'Ethernet36': '1x100G[40G]', \ 'Ethernet12': '1x100G[40G]', 'Ethernet28': '1x100G[40G]', 'Ethernet88': '1x100G[40G]', \ 'Ethernet116': '1x100G[40G]', 'Ethernet80': '1x100G[40G]', 'Ethernet112': '1x100G[40G]', \ 'Ethernet84': '1x100G[40G]', 'Ethernet48': '1x100G[40G]', 'Ethernet44': '1x100G[40G]', \ 'Ethernet40': '1x100G[40G]', 'Ethernet64': '1x100G[40G]', 'Ethernet60': '1x100G[40G]', \ 'Ethernet20': '1x100G[40G]', 'Ethernet24': '1x100G[40G]'} expected = collections.OrderedDict(sorted(expected_dict.items(), key=lambda t: t[0])) assert output == expected
[ 9, 2471, 15227, 854 ]
def METHOD_NAME(self): installable = client.system.listLatestInstallablePackages( self.session_key, SERVER_ID) install_these = [] for pkg in installable[0:3]: install_these.append(pkg['package_id']) earliest = datetime.now() + timedelta(3) # 3 days from now dt = xmlrpclib.DateTime(earliest.timetuple()) client.system.schedulePackageInstall(self.session_key, SERVER_ID, install_these, dt)
[ 9, 507, 360, 428 ]
def METHOD_NAME(): config = configparser.ConfigParser() config.optionxform = str config.read_file( open(os.path.join(os.path.dirname(__file__), "../config.ini"), encoding="utf8") ) env_conf = dict(config.items("environment")) parser = argparse.ArgumentParser(description=__doc__) parser.add_argument("-v", "--verbose", action="store_true") args = parser.parse_args() verbose = args.verbose if verbose: level = logging.DEBUG else: level = logging.ERROR formatter = "%(asctime)s - %(levelname)s - %(message)s" # Add the format/level to the logger logging.basicConfig(format=formatter, level=level) bctester( os.path.join(env_conf["SRCDIR"], "test", "util", "data"), "defi-util-test.json", env_conf, )
[ 57 ]
def METHOD_NAME(address_n: paths.Bip32Path) -> int | None: HARDENED = paths.HARDENED # local_cache_attribute if len(address_n) < 2: return None if address_n[0] == 45 | HARDENED and not address_n[1] & HARDENED: return address_n[1] return address_n[1] & ~HARDENED
[ 17017, 280, 85, 293 ]
def METHOD_NAME(lists, left, right): if left == right: return lists[left] elif left < right: mid = left + (right - left) // 2 l1 = METHOD_NAME(lists, left, mid) l2 = METHOD_NAME(lists, mid + 1, right) return mergeTwoLists(l1, l2) else: return None
[ 411, 4407, 50, 2291, -1 ]
def METHOD_NAME(info, names=None): """Load plugins with the given app info object. If `names` is None, all installed plugins will be loaded. If `names` is a list, then only those plugins in the provided list will be loaded. """ if names is None: names = allPlugins() for name in names: pluginObject = getPlugin(name) if pluginObject is None: raise GirderException('Plugin %s is not installed' % name) pluginObject.load(info)
[ 557, 1294 ]
def METHOD_NAME(self) -> str: """ Resource Etag. """ return pulumi.get(self, "etag")
[ 431 ]
def METHOD_NAME(existing: list[str], new: list[str]) -> list[str]: converted_names = [] unique_names = list(new[:]) for i, n in enumerate(new): if n in existing: original_name = n fixed_name = n duplicate_count = 0 while fixed_name in existing: fixed_name = n + "_" + str(duplicate_count) duplicate_count += 1 unique_names[i] = fixed_name converted_names.append(f"{original_name} -> {fixed_name}") if converted_names: import warnings ws = column_name_conflict_doc.format("\n ".join(converted_names)) warnings.warn(ws, ColumnNameConflict) return unique_names
[ 5265, 2768, 2618, 156 ]
def METHOD_NAME(): client = HDInsightManagementClient( credential=DefaultAzureCredential(), subscription_id="subid", ) response = client.clusters.begin_create( resource_group_name="rg1", cluster_name="cluster1", parameters={ "properties": { "clusterDefinition": { "configurations": { "gateway": { "restAuthCredential.isEnabled": True, "restAuthCredential.password": "**********", "restAuthCredential.username": "admin", } }, "kind": "Hadoop", }, "clusterVersion": "3.5", "computeProfile": { "roles": [ { "hardwareProfile": {"vmSize": "Standard_D3_V2"}, "minInstanceCount": 1, "name": "headnode", "osProfile": { "linuxOperatingSystemProfile": { "password": "**********", "sshProfile": {"publicKeys": [{"certificateData": "**********"}]}, "username": "sshuser", } }, "scriptActions": [], "targetInstanceCount": 2, "virtualNetworkProfile": { "id": "/subscriptions/subId/resourceGroups/rg/providers/Microsoft.Network/virtualNetworks/vnetname", "subnet": "/subscriptions/subId/resourceGroups/rg/providers/Microsoft.Network/virtualNetworks/vnetname/subnets/vnetsubnet", }, }, { "hardwareProfile": {"vmSize": "Standard_D3_V2"}, "minInstanceCount": 1, "name": "workernode", "osProfile": { "linuxOperatingSystemProfile": { "password": "**********", "sshProfile": {"publicKeys": [{"certificateData": "**********"}]}, "username": "sshuser", } }, "scriptActions": [], "targetInstanceCount": 4, "virtualNetworkProfile": { "id": "/subscriptions/subId/resourceGroups/rg/providers/Microsoft.Network/virtualNetworks/vnetname", "subnet": "/subscriptions/subId/resourceGroups/rg/providers/Microsoft.Network/virtualNetworks/vnetname/subnets/vnetsubnet", }, }, { "hardwareProfile": {"vmSize": "Small"}, "minInstanceCount": 1, "name": "zookeepernode", "osProfile": { "linuxOperatingSystemProfile": { "password": "**********", "sshProfile": {"publicKeys": [{"certificateData": "**********"}]}, "username": "sshuser", } }, "scriptActions": [], "targetInstanceCount": 3, "virtualNetworkProfile": { "id": "/subscriptions/subId/resourceGroups/rg/providers/Microsoft.Network/virtualNetworks/vnetname", "subnet": "/subscriptions/subId/resourceGroups/rg/providers/Microsoft.Network/virtualNetworks/vnetname/subnets/vnetsubnet", }, }, ] }, "osType": "Linux", "securityProfile": { "clusterUsersGroupDNs": ["hdiusers"], "directoryType": "ActiveDirectory", "domain": "DomainName", "domainUserPassword": "**********", "domainUsername": "DomainUsername", "ldapsUrls": ["ldaps://10.10.0.4:636"], "organizationalUnitDN": "OU=Hadoop,DC=hdinsight,DC=test", }, "storageProfile": { "storageaccounts": [ { "container": "containername", "enableSecureChannel": True, "isDefault": True, "key": "storage account key", "name": "mystorage.blob.core.windows.net", } ] }, "tier": "Premium", }, "tags": {"key1": "val1"}, }, ).result() print(response)
[ 57 ]
def METHOD_NAME(Obs_xml): output_file = Obs_xml.getObservationFilename() Obs_data = ObsDATA("amanzi-output/"+output_file) Obs_data.getObservationData() coords = Obs_xml.getAllCoordinates() for obs in Obs_data.observations.values(): region = obs.region obs.coordinate = coords[region] return Obs_data
[ 557, 365, 171 ]
def METHOD_NAME(): scheme = request.environ["wsgi.url_scheme"] host = request.host url = f"{scheme}://{host}" return jsonify( { **data["openAPI_spec"], **{"servers": [{"url": url}]}, } )
[ 19, 1457 ]
def METHOD_NAME(self): self.init_driver() self.init_driver_config() self.init_other_config()
[ 1440 ]
def METHOD_NAME(self, view, row, Column): self.propertiesDialog()
[ 843, 3082 ]
def METHOD_NAME(self) -> Optional[str]: """ When present, the value can be passed to a subsequent query call (together with the same query and scopes used in the current request) to retrieve the next page of data. """ return pulumi.get(self, "skip_token")
[ 2423, 466 ]
def METHOD_NAME(self): """ Test that moving to a timestep by typing. """ self.plot() self._control.TimeStepDisplay.setText('1.2') self._control.TimeStepDisplay.editingFinished.emit() self.assertEqual(float(self._control.TimeDisplay.text()), 1) self.assertImage('testTimeStepEdit.png')
[ 9, 104, 367, 2004 ]
def METHOD_NAME(func: Callable) -> Callable: """ Attaches "name" and "description" options to the command. """ options = [ click.option( "-n", "--name", "name", type=str, help="The name of the generated testplan and test", ), click.option( "-d", "--description", "description", type=str, help="Description of the result", ), ] for option_decorator in options[::-1]: func = option_decorator(func) return func
[ 41, 145, 1881 ]
def METHOD_NAME(self): with patch("karrio.mappers.dicom.proxy.http") as mock: mock.return_value = ErrorResponseJSON parsed_response = ( Tracking.fetch(self.TrackingRequest).from_(gateway).parse() ) self.assertEqual( DP.to_dict(parsed_response), DP.to_dict(ParsedErrorResponse) )
[ 9, 214, 168, 17 ]
async def METHOD_NAME( self, accesspoint_id, lookup=True, lookup_url="https://lookup.homematic.com:48335/getHost", **kwargs ): self.set_token_and_characteristics(accesspoint_id) self._lookup_url = lookup_url # needed for testcases if lookup: result = await self.api_call( lookup_url, json.dumps(self.clientCharacteristics), full_url=True ) self._urlREST = result["urlREST"] self._urlWebSocket = result["urlWebSocket"] else: # pragma: no cover self._urlREST = "https://ps1.homematic.com:6969" self._urlWebSocket = "wss://ps1.homematic.com:8888"
[ 176 ]
def METHOD_NAME(self, path, suffix, temp_dir): ttx_filename = os.path.basename(path) savepath = os.path.join(temp_dir, ttx_filename.replace(".ttx", suffix)) font = TTFont(recalcBBoxes=False, recalcTimestamp=False) font.importXML(path) font.save(savepath, reorderTables=None) return font, savepath
[ 296, 2584 ]
def METHOD_NAME(bytes_, encoding=None): if isinstance(bytes_, bytes): return bytes_.METHOD_NAME(_get_encoding()) else: return bytes_
[ 1268 ]