text
stringlengths
15
7.82k
ids
sequencelengths
1
7
def METHOD_NAME(): with pytest.raises(CheckError): @repository def invalid_repository(_invalid_arg: str): return []
[ 9, 532, 1230 ]
async def METHOD_NAME(): async for page in self.pages: for response in page.executions: yield response
[ 958, 1443 ]
def METHOD_NAME(self) -> None: self.clear() self.video_player.draw()
[ 69, 1100 ]
def METHOD_NAME(self, patch_get): patch_get.assume_fileid_for_request( lambda url, params: f"last-month-{params['tablehead'].split()[-1]}") object_name = 'Crab' heasarc = Heasarc() month_ago = (Time.now() - TimeDelta(30 * u.day)).isot[:10] today = Time.now().isot[:10] T = month_ago + " .. " + today def Q(mission): return heasarc.query_object( object_name, mission=mission, time=T, resultmax=10000, radius='1000 deg' ) with self.isdc_context: table_isdc = Q('integral_rev3_scw') table_heasarc = Q('intscw') # heasarc synchronizes twice a month, and it might or might not be the same at the request time assert len(table_isdc) >= len(table_heasarc)
[ 9, 979, 104 ]
def METHOD_NAME(): default_minute = config_loader.get_config_str("Flask", "minute_processed_paste") threshold_stucked_module = config_loader.get_config_int("Module_ModuleInformation", "threshold_stucked_module") log_select = {10, 25, 50, 100} log_select.add(max_dashboard_logs) log_select = list(log_select) log_select.sort() # Check if update in progress background_update = False update_message = '' if ail_updates.is_update_background_running(): background_update = True update_message = ail_updates.AILBackgroundUpdate(ail_updates.get_update_background_version()).get_message() return render_template("index.html", default_minute = default_minute, threshold_stucked_module=threshold_stucked_module, log_select=log_select, selected=max_dashboard_logs, background_update=background_update, update_message=update_message)
[ 724 ]
def METHOD_NAME(r_key, regex, item_id, content, max_time=30): proc = Proc(target=_regex_match, args=(r_key, regex, content)) try: proc.start() proc.join(max_time) if proc.is_alive(): proc.terminate() # Statistics.incr_module_timeout_statistic(r_key) err_mess = f"{r_key}: processing timeout: {item_id}" logger.info(err_mess) return False else: if r_serv_cache.exists(r_key): r_serv_cache.delete(r_key) return True else: r_serv_cache.delete(r_key) return False except KeyboardInterrupt: print("Caught KeyboardInterrupt, terminating regex worker") proc.terminate() sys.exit(0)
[ 211, 590 ]
def METHOD_NAME(self, obj): """Get the current user's report on the community post """ request = self.context['request'] if 'request' in self.context else None if request and request.user.is_authenticated: profile = request.user.profile return obj.reported_by.filter(id=profile.id).exists() return False
[ 19, 220, 21, 13239 ]
def METHOD_NAME(dataset): """Converts `dataset` for use with the output of `create_simple_keras_model`. Args: dataset: An instance of `tf.data.Dataset` to read from. Returns: An instance of `tf.data.Dataset` after conversion. """ def map_fn(example): return collections.OrderedDict([ ('x', tf.reshape(example['pixels'], [-1])), ('y', example['label']), ]) return dataset.map(map_fn)
[ 4098, 126, 280, 12728 ]
def METHOD_NAME(cls, options=None): """Disassociate an environment""" cls.command_sub = 'remove-environment' return cls.execute(cls._construct_command(options))
[ 188, 1027 ]
def METHOD_NAME(): con = op.get_bind() devices = con.execute(f'select * from {get_inv()}.device') if not list(devices): return init_app() remove_manual_actions() change_lot() change_tags() remove_placeholders()
[ 1502 ]
def METHOD_NAME(self, session): data = self.deserialize_http_content(session) self.ctx.set_var( "instance", data, schema_builder=self._build_schema_on_200 )
[ 69, 1072 ]
def METHOD_NAME(self) -> Type[PTFastBiasCorrectionAlgoBackend]: return PTFastBiasCorrectionAlgoBackend
[ 19, 2602, 1173, 2451, 6080, 3127, 3847 ]
def METHOD_NAME(self): return self.cmd_id is not None
[ 137, 1660, 169 ]
def METHOD_NAME(self): p2p0 = self.nodes[0].add_p2p_connection(BaseNode()) # Build the blockchain self.tip = int(self.nodes[0].getbestblockhash(), 16) self.block_time = ( self.nodes[0].getblock(self.nodes[0].getbestblockhash())["time"] + 1 ) self.blocks = [] # Get a pubkey for the coinbase TXO coinbase_key = ECKey() coinbase_key.generate() coinbase_pubkey = coinbase_key.get_pubkey().get_bytes() # Create the first block with a coinbase output to our key height = 1 block = create_block( self.tip, create_coinbase(height, coinbase_pubkey), self.block_time ) self.blocks.append(block) self.block_time += 1 block.solve() # Save the coinbase for later self.block1 = block self.tip = block.sha256 height += 1 # Bury the block 100 deep so the coinbase output is spendable for i in range(100): block = create_block(self.tip, create_coinbase(height), self.block_time) block.solve() self.blocks.append(block) self.tip = block.sha256 self.block_time += 1 height += 1 # Create a transaction spending the coinbase output with an invalid (null) signature tx = CTransaction() tx.vin.append(CTxIn(COutPoint(self.block1.vtx[0].sha256, 0), scriptSig=b"")) tx.vout.append(CTxOut(49 * 100000000, CScript([OP_TRUE]))) tx.calc_sha256() block102 = create_block(self.tip, create_coinbase(height), self.block_time) self.block_time += 1 block102.vtx.extend([tx]) block102.hashMerkleRoot = block102.calc_merkle_root() block102.rehash() block102.solve() self.blocks.append(block102) self.tip = block102.sha256 self.block_time += 1 height += 1 # Bury the assumed valid block 2100 deep for i in range(2100): block = create_block(self.tip, create_coinbase(height), self.block_time) block.nVersion = 4 block.solve() self.blocks.append(block) self.tip = block.sha256 self.block_time += 1 height += 1 self.nodes[0].disconnect_p2ps() # Start node1 and node2 with assumevalid so they accept a block with a bad signature. self.start_node(1, extra_args=["-assumevalid=" + hex(block102.sha256)]) self.start_node(2, extra_args=["-assumevalid=" + hex(block102.sha256)]) p2p0 = self.nodes[0].add_p2p_connection(BaseNode()) p2p1 = self.nodes[1].add_p2p_connection(BaseNode()) p2p2 = self.nodes[2].add_p2p_connection(BaseNode()) # send header lists to all three nodes p2p0.send_header_for_blocks(self.blocks[0:2000]) p2p0.send_header_for_blocks(self.blocks[2000:]) p2p1.send_header_for_blocks(self.blocks[0:2000]) p2p1.send_header_for_blocks(self.blocks[2000:]) p2p2.send_header_for_blocks(self.blocks[0:200]) # Send blocks to node0. Block 102 will be rejected. self.send_blocks_until_disconnected(p2p0) self.assert_blockchain_height(self.nodes[0], 101) # Send all blocks to node1. All blocks will be accepted. for i in range(2202): p2p1.send_message(msg_block(self.blocks[i])) # Syncing 2200 blocks can take a while on slow systems. Give it plenty of time to sync. p2p1.sync_with_ping(400) assert_equal( self.nodes[1].getblock(self.nodes[1].getbestblockhash())["height"], 2202 ) # Send blocks to node2. Block 102 will be rejected. self.send_blocks_until_disconnected(p2p2) self.assert_blockchain_height(self.nodes[2], 101)
[ 22, 9 ]
def METHOD_NAME(create_surf_file_in_directory): smooth = fs.SurfaceSmooth() # Test underlying command assert smooth.cmd == "mri_surf2surf" # Test mandatory args exception with pytest.raises(ValueError): smooth.run() # Create testing files surf, cwd = create_surf_file_in_directory # Test input settings smooth.inputs.in_file = surf smooth.inputs.subject_id = "fsaverage" fwhm = 5 smooth.inputs.fwhm = fwhm smooth.inputs.hemi = "lh" # Test the command line assert smooth.cmdline == ( "mri_surf2surf --cortex --fwhm 5.0000 --hemi lh --sval %s --tval %s/lh.a_smooth%d.nii --s fsaverage" % (surf, cwd, fwhm) ) # Test identity shmooth = fs.SurfaceSmooth( subject_id="fsaverage", fwhm=6, in_file=surf, hemi="lh", out_file="lh.a_smooth.nii", ) assert smooth != shmooth
[ 9, -1 ]
def METHOD_NAME(): # given export_info = {} # when warehouse_headers = get_warehouses_headers(export_info) # then assert warehouse_headers == []
[ 9, 19, 7381, 2131, 7382, 47, 5295 ]
def METHOD_NAME(): all_versions = {"1.0", "1.1", "2.0", "2.1", "3.0", "3.1"} unaffected_ranges = [VersionRange.from_scheme_version_spec_string("semver", "< 1.2")] affected_ranges = [] resolved_ranges = [VersionRange.from_scheme_version_spec_string("semver", ">= 3.0")] unaffected_versions, affected_versions = categorize_versions( all_versions, unaffected_ranges, affected_ranges, resolved_ranges, ) assert len(unaffected_versions) == 4 assert "1.0" in unaffected_versions assert "1.1" in unaffected_versions assert "3.0" in unaffected_versions assert "3.1" in unaffected_versions assert len(affected_versions) == 2 assert "2.0" in affected_versions assert "2.1" in affected_versions
[ 9, 8689, 295, 529, 859, 2149 ]
def METHOD_NAME(self, jobID): """Delete logging records for given jobs""" if not jobID: return S_OK() # Make sure that we have a list of strings of jobIDs if isinstance(jobID, int): jobList = [str(jobID)] elif isinstance(jobID, str): jobList = jobID.replace(" ", "").split(",") else: jobList = list(str(j) for j in jobID) req = f"DELETE FROM LoggingInfo WHERE JobID IN ({','.join(jobList)})" return self._update(req)
[ 34, 202 ]
def METHOD_NAME(self) -> None: self.finish_called = True
[ 1239 ]
def METHOD_NAME(request): request.cls.queue_ids = {} # type: Dict[str, List[str]] request.cls.distribution_policy_ids = {} # type: Dict[str, List[str]] request.cls.exception_policy_ids = {} # type: Dict[str, List[str]] request.cls.classification_policy_ids = {} # type: Dict[str, List[str]] request.cls.worker_ids = {} # type: Dict[str, List[str]] request.cls.job_ids = {} # type: Dict[str, List[str]]
[ 15, 2 ]
def METHOD_NAME(enum_class): return extract_comparable_values_from_enum_values(*enum_class.__members__.values())
[ 297, 13446, 199, 280, 1206, 2 ]
def METHOD_NAME(): """Test if argument video_length of RecordVideo works properly.""" env = gym.make("CartPole-v1", render_mode="rgb_array_list") env._max_episode_steps = 20 env = RecordVideoV0(env, "videos", step_trigger=lambda x: x == 0, video_length=10) env.reset() for _ in range(10): action = env.action_space.sample() env.step(action) assert env.recording action = env.action_space.sample() env.step(action) assert not env.recording env.close() assert os.path.isdir("videos") mp4_files = [file for file in os.listdir("videos") if file.endswith(".mp4")] assert len(mp4_files) == 1 shutil.rmtree("videos")
[ 9, 148, 1781, 799 ]
def METHOD_NAME(self): client = Client() response = client.get("/fairs/2017/exhibitors/") self.assertEqual(response.status_code, 302) self.assertTrue("?next=" in response.url) response = client.get("/fairs/2017/exhibitors/view") self.assertEqual(response.status_code, 302) self.assertTrue("?next=" in response.url)
[ 9, 8273, 1179, 2004, 17732 ]
def METHOD_NAME(self, transport): """Handle that connection was eatablished.""" _LOGGER.debug("Connected to companion device %s:%d", self.host, self.port) self.transport = transport
[ 550, 2175 ]
def METHOD_NAME(path: str) -> dict: raw_json = load_json_from_file(path) json_path = os.path.dirname(path) # we want relative paths to be relative to where the mapping file is, not where the executing terminal is # so check if we have an absolute path and if not, append the relative path to the path of the mapping file for key, value in raw_json.items(): if value != os.path.abspath(value): path = os.path.join(json_path, value) # error check that wherever the path leads, there is a tarball if not os.path.isfile(path): eprint(f"Tarball does not exist at path: {path}") raw_json[key] = path return raw_json
[ 557, 754, 445, 280, 171 ]
def METHOD_NAME(self, mocked_init): lm_rsh = RSH('', {}, None, None, None) lm_info = {'env' : {'test_env': 'test_value'}, 'env_sh' : 'env/lm_ssh.sh', 'command': '/usr/bin/ssh'} lm_rsh._init_from_info(lm_info) lm_env = lm_rsh.get_launcher_env() self.assertIn('. $RP_PILOT_SANDBOX/%s' % lm_info['env_sh'], lm_env)
[ 9, 19, 3471, 485 ]
def METHOD_NAME(self): GoDaddyMockHttp.type = None GoDaddyDNSDriver.connectionCls.conn_class = GoDaddyMockHttp self.driver = GoDaddyDNSDriver(*DNS_PARAMS_GODADDY)
[ 0, 1 ]
def METHOD_NAME() -> None: for delimiter, test_name in ( ("", "test_string_split_empty_string_delimiter"), (None, "test_string_split_no_delimiter"), ): node = onnx.helper.make_node( "StringSplit", inputs=["x"], outputs=["substrings", "length"], delimiter=delimiter, maxsplit=None, ) x = np.array( ["hello world !", " hello world !", " hello world ! "] ).astype(object) substrings = np.array( [ ["hello", "world", "!"], ["hello", "world", "!"], ["hello", "world", "!"], ] ).astype(object) length = np.array([3, 3, 3], dtype=np.int64) expect( node, inputs=[x], outputs=[substrings, length], name=test_name, )
[ 294, 35, 144, 7871 ]
def METHOD_NAME(self): parameters = { **self.serialize_url_param( "certificateName", self.ctx.args.certificate_name, required=True, ), **self.serialize_url_param( "deploymentName", self.ctx.args.deployment_name, required=True, ), **self.serialize_url_param( "resourceGroupName", self.ctx.args.resource_group, required=True, ), **self.serialize_url_param( "subscriptionId", self.ctx.subscription_id, required=True, ), } return parameters
[ 274, 386 ]
def METHOD_NAME(self) -> Optional[FilterPruningStatistics]: """ Returns statistics of the filter pruning algorithm. If statistics have not been collected, `None` will be returned. :return: Instance of the `FilterPruningStatistics` class. """ return self._storage.get("filter_pruning")
[ 527, 2421 ]
def METHOD_NAME(caller): """Turns caller into a decorator. Unlike decorator module, function signature is not preserved. Parameters ---------- caller: caller(f, *args, **kwargs) """ def decor(f): @wraps(f) def wrapper(*args, **kwargs): return caller(f, *args, **kwargs) return wrapper return decor
[ 972 ]
def METHOD_NAME(repo_id, begin_date=None, end_date=None): """ Timeseries of pull request acceptance rate (expressed as the ratio of pull requests merged on a date to the count of pull requests opened on a date) :param repo_group_id: The repository's repo_group_id :param repo_id: The repository's repo_id, defaults to None :return: DataFrame with ratio/day """ if not begin_date: begin_date = '1970-1-1 00:00:01' if not end_date: end_date = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') pr_acceptance_rate_sql = s.sql.text(""" SELECT SUM ( EXTRACT ( EPOCH FROM ( pr_merged_at - pr_created_at ) ) ) / COUNT ( * ) AS duration FROM pull_requests JOIN repo ON pull_requests.repo_id = repo.repo_id WHERE pull_requests.repo_id = :repo_id AND pr_merged_at IS NOT NULL AND pr_created_at BETWEEN :begin_date AND :end_date """) results = pd.read_sql(pr_acceptance_rate_sql, engine, params={'repo_id': repo_id, 'begin_date': begin_date, 'end_date': end_date}) if results.iloc[0]['duration'] is None: results.iloc[0]['duration'] = -1 else: results.iloc[0]['duration'] = results.iloc[0]['duration'] / 60 / 60 / 24 return results
[ 15802, 4381, 2205 ]
def METHOD_NAME(self, event): if not event.is_directory: self.queue.put(event)
[ 69, 152 ]
def METHOD_NAME(self) -> str: """ Resource type. """ return pulumi.get(self, "type")
[ 44 ]
def METHOD_NAME(): parser = argparse.ArgumentParser() parser.add_argument( "--reload", help="Clear setup config, and reload", action="store_true" ) return parser.METHOD_NAME()
[ 214, 335 ]
def METHOD_NAME(args): cmd = [ '{{devops_home}}/bin/ybcloud.sh', '{{cloud}}', '--node_metadata', '{"ip":"%s","sshPort":{{custom_ssh_port}},"sshUser":"{{ssh_user}}","nodeName":"@@DEFAULT_NODE_NAME@@"}' % (args.ip), 'instance', 'install-node-agent', '--vars_file', '{{vars_file}}', '--vault_password_file', '{{vault_password_file}}', '--private_key_file', '{{private_key_file}}', '--ssh_user', '{{ssh_user}}', '--yba_url', args.yba_url, '--api_token', args.api_token, '--node_name', args.node_name, '--node_agent_ip', args.ip, '--node_agent_port', '{{node_agent_port}}', '--provider_id', '{{provider_id}}', '--instance_type', args.instance_type, '--zone_name', args.zone_name ] cmd.append('@@DEFAULT_NODE_NAME@@') return cmd
[ 363, 1716, 1849, 462 ]
def METHOD_NAME(self): spec = self.spec config_args = [ "--with-tcl={0}".format(spec["tcl"].libs.directories[0]), "--x-includes={0}".format(spec["libx11"].headers.directories[0]), "--x-libraries={0}".format(spec["libx11"].libs.directories[0]), ] config_args += self.enable_or_disable("xft") config_args += self.enable_or_disable("xss") return config_args
[ 111, 335 ]
def METHOD_NAME(monkeypatch): monkeypatch.setenv("MOLECULE_EPHEMERAL_DIRECTORY", "foo/bar") assert os.path.isabs(scenario.ephemeral_directory())
[ 9, 7850, 2851, 15421, 293, 2499, 485 ]
def METHOD_NAME(name, req): action = get_action() body = util.extract_json(req.body) return action.export(name, body=body)
[ 660, 294 ]
def METHOD_NAME(self, value): self.ui.horizontalSlider_y.blockSignals(True) self.ui.horizontalSlider_y.setValue(value * 100) self.ui.horizontalSlider_y.blockSignals(False)
[ 320, 15636, 1180 ]
def METHOD_NAME(path, config=None): """Ensure that the path, or the root of the current package (if path is in a package), is in sys.path. """ # FIXME add any src-looking dirs seen too... need to get config for that log.debug('Add path %s' % path) if not path: return [] added = [] parent = os.path.dirname(path) if (parent and os.path.exists(os.path.join(path, '__init__.py'))): added.extend(METHOD_NAME(parent, config)) elif not path in sys.path: log.debug("insert %s into sys.path", path) sys.path.insert(0, path) added.append(path) if config and config.srcDirs: for dirname in config.srcDirs: dirpath = os.path.join(path, dirname) if os.path.isdir(dirpath): sys.path.insert(0, dirpath) added.append(dirpath) return added
[ 238, 157 ]
def METHOD_NAME(scenario): """ Obtain a list of block names and a list of their arguments from the command line arguments list. :return: A tuple block names, block arguments. :rtype: tuple """ block_names = [] block_args = [] number_of_blocks = 0 for token in scenario: logging.debug("Token %s", token) # If there is no '=' character in the token, consider is as a block name. # Initialize the block arguments to an empty dict. if '=' not in token: logging.debug("- block name") block_names.append(token) block_args.append({}) number_of_blocks += 1 continue # Otherwise consider the token to be a block argument in the form # key=value. logging.debug("- argument") # The first '=' in the token separates name from value. # The value may contain other '=' characters (e.g. in util.Eval node='node.form = "_"'). attribute_name, attribute_value = token.split('=', 1) if number_of_blocks == 0: raise RuntimeError( 'Block attribute pair %r without a prior block name', token) # Put it as a new argument for the previous block if attribute_value.isdigit(): attribute_value = int(attribute_value) block_args[-1][attribute_name] = attribute_value return block_names, block_args
[ 214, 462, 534, 134 ]
def METHOD_NAME(): c1 = Condition(lambda cli: True) assert isinstance(c1, CLIFilter) assert not isinstance(c1, SimpleFilter) c2 = Condition(lambda: True) assert not isinstance(c2, CLIFilter) assert isinstance(c2, SimpleFilter) c3 = c1 | c2 assert not isinstance(c3, CLIFilter) assert not isinstance(c3, SimpleFilter) c4 = Condition(lambda cli: True) c5 = Condition(lambda cli: True) c6 = c4 & c5 c7 = c4 | c5 assert isinstance(c6, CLIFilter) assert isinstance(c7, CLIFilter) assert not isinstance(c6, SimpleFilter) assert not isinstance(c7, SimpleFilter) c8 = Condition(lambda *args: True) assert isinstance(c8, CLIFilter) assert isinstance(c8, SimpleFilter)
[ 9, 615, 527 ]
def METHOD_NAME() -> None: cert_monitor = CertMonitor() cert_monitor.monitor()
[ 57 ]
def METHOD_NAME(self): for entry in self.driver.get_log('browser'): m = entry['message'] entry['message'] = m[m.find('"'):].replace('\\n', '').strip('" ') self.messages.append(entry)
[ 1047, 1107 ]
def METHOD_NAME(self): if not self.outputs['Random'].is_linked: return params = [si.sv_get(default=[[]], deepcopy=False) for si in self.inputs] matching_f = list_match_func[self.list_match] desired_levels = [1, 1, 1] ops = self.output_numpy concatenate = 'APPEND' if self.correct_output == 'NONE' else "EXTEND" result = recurse_f_level_control(params, ops, random_vector, matching_f, desired_levels, concatenate=concatenate) self.outputs[0].sv_set(result)
[ 356 ]
def METHOD_NAME(self, request_or_url, spider=None, redirect=True, **kwargs): from twisted.internet import reactor if isinstance(request_or_url, Request): request = request_or_url else: url = any_to_uri(request_or_url) request = Request(url, dont_filter=True, **kwargs) if redirect: request.meta["handle_httpstatus_list"] = SequenceExclude( range(300, 400) ) else: request.meta["handle_httpstatus_all"] = True response = None try: response, spider = threads.blockingCallFromThread( reactor, self._schedule, request, spider ) except IgnoreRequest: pass self.populate_vars(response, request, spider)
[ 1047 ]
f METHOD_NAME(self):
[ 9, 699, 69, 374, 2707, 1471, 69 ]
def METHOD_NAME(): splitter = transform.InstanceSplitter( target_field=FieldName.TARGET, is_pad_field=FieldName.IS_PAD, start_field=FieldName.START, forecast_start_field=FieldName.FORECAST_START, instance_sampler=transform.ExpectedNumInstanceSampler(num_instances=4), past_length=100, future_length=10, time_series_fields=["dynamic_feat", "observed_values"], ) splitter2 = clone( splitter, { "instance_sampler": transform.ExpectedNumInstanceSampler( num_instances=5 ) }, ) assert equals(splitter, clone(splitter)) assert not equals(splitter, splitter2)
[ 9, 89, 574 ]
def METHOD_NAME(source_file, rulestring): """Run a roundtrip test given a sql file and a rule. We take a file buffer, lint, fix and lint, finally checking that the file fails initially but not after fixing. """ if isinstance(source_file, str): # If it's a string, treat it as a path so lets load it. with open(source_file) as f: source_file = StringIO(f.read()) filename = "testing.sql" # Lets get the path of a file to use tempdir_path = tempfile.mkdtemp() filepath = os.path.join(tempdir_path, filename) # Open the example file and write the content to it with open(filepath, mode="w") as dest_file: for line in source_file: dest_file.write(line) runner = CliRunner() # Check that we first detect the issue result = runner.invoke(lint, ["--rules", rulestring, "--dialect=ansi", filepath]) assert result.exit_code == 1 # Fix the file (in force mode) result = runner.invoke( fix, ["--rules", rulestring, "--dialect=ansi", "-f", filepath] ) assert result.exit_code == 0 # Now lint the file and check for exceptions result = runner.invoke(lint, ["--rules", rulestring, "--dialect=ansi", filepath]) assert result.exit_code == 0 shutil.rmtree(tempdir_path)
[ 1680, 3544, 9 ]
def METHOD_NAME(modules): return modules.METHOD_NAME
[ 171 ]
def METHOD_NAME(self): self.requires("anyrpc/1.0.2") self.requires("qt/6.4.2")
[ 5186 ]
def METHOD_NAME(self): dom = FakeDom() f = VMFreezer(FakeVM(dom)) orig = libvirt.virDomain.fsFreeze for name in "__doc__", "__name__": assert getattr(f.fsFreeze, name) == getattr(orig, name)
[ 9, 8715 ]
def METHOD_NAME(self): self.assertEqual(answer("What is 2 multiplied by -2 multiplied by 3?"), -12)
[ 9, 107, 3651 ]
def METHOD_NAME(self, *args, **options): """ This command sends notifications about newly created unreviewed suggestions that were submitted, unapproved or unrejected in the last 7 days. Recipients of notifications are users with permission to review them, as well as authors of previous translations or comments of the same string. The command is designed to run on a weekly basis. """ self.stdout.write("Sending suggestion notifications.") today = calendar.day_name[timezone.datetime.today().weekday()] day = calendar.day_name[settings.SUGGESTION_NOTIFICATIONS_DAY] if today != day and not options["force"]: raise CommandError(f"Skipping. Command runs every {day}. Today is {today}.") suggestions = self.get_suggestions() data = defaultdict(set) for suggestion in suggestions: self.extract_notifications_data(data, suggestion) pks = [user.pk for user in data.keys()] recipients = User.objects.filter( pk__in=pks, profile__unreviewed_suggestion_notifications=True ) for recipient in recipients: project_locales = data[recipient] description = render_to_string( "projects/suggestion_notification.jinja", {"project_locales": project_locales}, ) notify.send( recipient, recipient=recipient, verb="", description=description ) self.stdout.write(f"Suggestion notifications sent to {len(recipients)} users.")
[ 276 ]
def METHOD_NAME(self): pass
[ 709, 710 ]
def METHOD_NAME( user_id: str, exploration_id: str ) -> Optional[datetime.datetime]: """Fetches the datetime the exploration was last rated by this user, or None if no rating has been awarded. Currently this function is only used for testing purposes. Args: user_id: str. The id of the user. exploration_id: str. The id of the exploration. Returns: datetime.datetime or None. When the exploration was last rated by the user, or None if the user has not previously rated the exploration. """ exp_user_data_model = user_models.ExplorationUserDataModel.get( user_id, exploration_id) return exp_user_data_model.rated_on if exp_user_data_model else None
[ 19, 1646, 5773, 5774 ]
def METHOD_NAME(self, image): self.image_show_lock.acquire() self.image_to_be_shown = image self.image_to_be_shown_updated = True self.image_show_lock.release()
[ 697, 660 ]
def METHOD_NAME(const, table): metadata = table.metadata convention = _get_convention(metadata.naming_convention, type(const)) if isinstance(const.name, conv): return const.name elif convention is not None and \ not isinstance(const.name, conv) and \ ( const.name is None or "constraint_name" in convention or isinstance(const.name, _defer_name)): return conv( convention % ConventionDict(const, table, metadata.naming_convention) ) elif isinstance(convention, _defer_none_name): return None
[ 1126, 156, 43, 410 ]
def METHOD_NAME(layer, init='weight'): run_init(str_init_func_map['eye'], local_extract(locals()), init, layer)
[ 4421 ]
def METHOD_NAME(self, backend): """Builds the widgets necessary to configure the backend. If it doesn't know how to render a widget, it simply skips it. @param backend: the backend that is being configured """ # remove the old parameters UIs def _remove_child(self, child, data=None): self.remove(child) self.foreach(functools.partial(_remove_child, self), None) # add new widgets backend_parameters = backend.get_parameters() if backend_parameters[GenericBackend.KEY_DEFAULT_BACKEND]: # if it's the default backend, the user should not mess with it return for parameter_name, widget in self.parameter_widgets: if parameter_name in backend_parameters: # FIXME I am not 100% about this change self.pack_start(widget(backend), True, True, 0) self.show_all()
[ 1920 ]
def METHOD_NAME(self): """Cleanup after test case execution""" self._s1ap_wrapper.cleanup()
[ 531, 481 ]
def METHOD_NAME(self, path: path_type, recursive: bool = False): """ Alias for FileSystem.delete """ return self.delete(path, recursive=recursive)
[ 5528 ]
def METHOD_NAME( cloud_mode: bool, cloud_base_url: str | None, cloud_access_token: str | None, cloud_organization_id: str | None, ) -> GXCloudConfig | None: if not cloud_mode: return None cloud_config = CloudDataContext.get_cloud_config( cloud_base_url=cloud_base_url, cloud_access_token=cloud_access_token, cloud_organization_id=cloud_organization_id, ) return cloud_config
[ 176, 4054, 200 ]
def METHOD_NAME(self, path, format, *format_args): """ Equivalent to zconfig_put, accepting a format specifier and variable argument list, instead of a single string value. """ utils.lib.zconfig_putf(self._p, utils.to_bytes(path), format, *format_args)
[ -1 ]
def METHOD_NAME(path): """ IterateZip opens the zip file at path and returns a generator of entry objects for each file in it. """ with zipfile.ZipFile(path, 'r') as zip_file: for info in zip_file.infolist(): if info.filename.endswith('/'): continue yield FileEntry(info.filename, None, zip_file.open(info))
[ 3972, 1426 ]
def METHOD_NAME(self, file): self._files.append(file)
[ 238, 171 ]
def METHOD_NAME(self): """An organization administrator should be able to update a file depository.""" organization_access = OrganizationAccessFactory(role=ADMINISTRATOR) playlist = PlaylistFactory(organization=organization_access.organization) file_depository = FileDepositoryFactory(playlist=playlist) jwt_token = UserAccessTokenFactory(user=organization_access.user) data = {"title": "new title", "description": "Hello"} response = self.client.patch( f"/api/filedepositories/{file_depository.id!s}/", data, HTTP_AUTHORIZATION=f"Bearer {jwt_token}", content_type="application/json", ) self.assertEqual(response.status_code, 200) file_depository.refresh_from_db() self.assertEqual("new title", file_depository.title) self.assertEqual("Hello", file_depository.description)
[ 9, 58, 171, 5715, 86, 21, 1089 ]
def METHOD_NAME( dag_id, destination_dataset_project_id, chain='ethereum', notification_emails=None, load_start_date=datetime(2018, 7, 1), schedule_interval='0 0 * * *',
[ 56, 14058, 5791 ]
def METHOD_NAME(self, model): data = model.db.get_unit_operation_parameters("screen") model.fs.unit.load_parameters_from_database(use_default_removal=True) assert model.fs.unit.recovery_frac_mass_H2O[0].fixed assert ( model.fs.unit.recovery_frac_mass_H2O[0].value == data["recovery_frac_mass_H2O"]["value"] ) for (t, j), v in model.fs.unit.removal_frac_mass_comp.items(): assert v.fixed if j == "foo": assert v.value == data["default_removal_frac_mass_comp"]["value"] else: assert v.value == data["removal_frac_mass_comp"][j]["value"] assert model.fs.unit.energy_electric_flow_vol_inlet.fixed assert ( model.fs.unit.energy_electric_flow_vol_inlet.value == data["energy_electric_flow_vol_inlet"]["value"] )
[ 9, 557, 386 ]
def METHOD_NAME(self, inputs: Dict[str, Any]) -> Dict[str, Any]: return inputs
[ 1710 ]
def METHOD_NAME(self): if not any(output.is_linked for output in self.outputs): return vertices_s = self.inputs['Vertices'].sv_get(default=[[]]) out_centers = [] out_normals = [] out_directions = [] out_projections = [] out_diffs = [] out_distances = [] for vertices in vertices_s: approx = linear_approximation(vertices) out_centers.append(approx.center) if self.mode == 'Line': line = approx.most_similar_line() out_directions.append(tuple(line.direction.normalized())) projections = [] diffs = [] distances = [] for vertex in vertices: projection = line.projection_of_point(vertex) projections.append(tuple(projection)) diff = projection - Vector(vertex) diffs.append(tuple(diff)) distances.append(diff.length) out_projections.append(projections) out_diffs.append(diffs) out_distances.append(distances) elif self.mode == 'Plane': plane = approx.most_similar_plane() out_normals.append(tuple(plane.normal.normalized())) projections = [] diffs = [] distances = list(map(float, list(plane.distance_to_points(vertices)))) projections_np = plane.projection_of_points(vertices) vertices_np = np.array(vertices) projections = list(map(tuple, list(projections_np))) diffs_np = projections_np - vertices_np diffs = list(map(tuple, list(diffs_np))) out_projections.append(projections) out_diffs.append(diffs) out_distances.append(distances) self.outputs['Center'].sv_set([out_centers]) self.outputs['Normal'].sv_set([out_normals]) self.outputs['Direction'].sv_set([out_directions]) self.outputs['Projections'].sv_set(out_projections) self.outputs['Diffs'].sv_set(out_diffs) self.outputs['Distances'].sv_set(out_distances)
[ 356 ]
def METHOD_NAME(self, minipools): m_d = rp.get_contract_by_name("rocketMinipoolDelegate") m = rp.assemble_contract("rocketMinipool", address=minipools[0]) mc = rp.get_contract_by_name("multicall3") lambs = [ lambda x: (x, rp.seth_sig(m_d.abi, "getNodeFee"), [((x, "NodeFee"), solidity.to_float)]), lambda x: (x, rp.seth_sig(m.abi, "getEffectiveDelegate"), [((x, "Delegate"), None)]), lambda x: (x, rp.seth_sig(m.abi, "getPreviousDelegate"), [((x, "PreviousDelegate"), None)]), lambda x: (x, rp.seth_sig(m.abi, "getUseLatestDelegate"), [((x, "UseLatestDelegate"), None)]), lambda x: (x, rp.seth_sig(m.abi, "getNodeDepositBalance"), [((x, "NodeOperatorShare"), lambda i: solidity.to_float(i) / 32)]), # get balances of minipool as well lambda x: (mc.address, [rp.seth_sig(mc.abi, "getEthBalance"), x], [((x, "EthBalance"), solidity.to_float)]) ] minipool_stats = {} batch_size = 10_000 // len(lambs) for i in range(0, len(minipools), batch_size): i_end = min(i + batch_size, len(minipools)) log.debug(f"getting minipool stats for {i}-{i_end}") addresses = minipools[i:i_end] calls = [ Call(*lamb(a)) for a in addresses for lamb in lambs ] res = rp.multicall2_do_call(calls) # add data to mini pool stats dict (address => {func_name: value}) # strip get from function name for (address, variable_name), value in res.items(): if address not in minipool_stats: minipool_stats[address] = {} minipool_stats[address][variable_name] = value return minipool_stats
[ 19, 10076, 577 ]
def METHOD_NAME( **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-07-01")) # type: str accept = _headers.pop('Accept', "application/json") # Construct URL _url = kwargs.pop("template_url", "/providers/Microsoft.KubernetesConfiguration/operations") # Construct parameters _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') # Construct headers _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=_url, params=_params, headers=_headers, **kwargs )
[ 56, 245, 377 ]
def METHOD_NAME(self): """Returns the a list of the location(s) of the string.""" return self.location
[ 12193 ]
f METHOD_NAME(self):
[ 9, 366, 144, 2794 ]
def METHOD_NAME(self, mail): pass
[ 353, 3562 ]
def METHOD_NAME(self): atc, atc_dict = self._force_atc() self._login("osquery.change_automatictableconstruction", "osquery.view_automatictableconstruction") atc_dict["name"] = get_random_string(12) response = self.client.post(reverse("osquery:update_atc", args=(atc.pk,)), atc_dict, follow=True) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, "osquery/automatictableconstruction_detail.html") self.assertContains(response, atc_dict["name"]) atc = response.context["object"] self.assertEqual(atc.name, atc_dict["name"])
[ 9, 86, 4938, 72 ]
def METHOD_NAME(input_path: str, output_path: str, file_name_regex_str: str) -> None: compiled_re = re.compile(file_name_regex_str) input_cmds = read_json_file(input_path) output_cmds = [ item for item in input_cmds if compiled_re.match(os.path.basename(item['file'])) ] logging.info( "Filtered compilation commands from %d to %d entries using the regex %s", len(input_cmds), len(output_cmds), file_name_regex_str) write_json_file( output_cmds, output_path, description_for_log="filtered compilation commands file")
[ 527, 296, 2458 ]
def METHOD_NAME(self): self.assertMarkdownRenders( 'A very long long long long long long long long long long long long long long long long long long long ' 'long long long long long long long long long long long long long paragraph on 1 line.\n\n' 'A new long long long long long long long long long long long long long long long ' 'long paragraph on 1 line.', '<p>A very long long long long long long long long long long long long long long long long long long ' 'long long long long long long long long long long long long long long paragraph on 1 line.</p>\n' '<p>A new long long long long long long long long long long long long long long long ' 'long paragraph on 1 line.</p>' )
[ 9, 988, 1759, 524, 534 ]
def METHOD_NAME( self, schema_name: str ) -> str: return '\n'.join([ 'IF NOT EXISTS (', f'SELECT * FROM information_schema.schemata WHERE schema_name = \'{schema_name}\')', f'BEGIN EXEC(\'CREATE SCHEMA {schema_name}\') END' ])
[ 56, 129, 135, 462 ]
def METHOD_NAME(self): self.apiclient = self.testClient.getApiClient() self.dbclient = self.testClient.getDbConnection() self.cleanup = [] if not self.vpcSupported: self.skipTest("VPC is not supported on %s" % self.hypervisor) self.account = Account.create( self.apiclient, self.services["account"], admin=True, domainid=self.domain.id ) self.cleanup.append(self.account) return
[ 0, 1 ]
def METHOD_NAME(self): self.realTest(3,0) self.realTest(0,0) self.realTest(0,3)
[ 9, 1866, 35 ]
def METHOD_NAME(self): self.assertEqual('default', self.processor.name) self.assertEqual('Single-scene NetCDF/CF inputs in xcube standard format', self.processor.description) self.assertEqual('netcdf4', self.processor.input_reader) processor = DefaultInputProcessor(input_reader="zarr") self.assertEqual('zarr', processor.input_reader)
[ 9, 3048 ]
async def METHOD_NAME(app: Quart) -> None: async with app.app_context(): g.foo = "bar" rendered = await render_template_string("{{ g.foo }}") assert rendered == "bar" async with app.test_request_context("/"): session["foo"] = "bar" rendered = await render_template_string( "{{ request.method }} {{ request.path }} {{ session.foo }}" ) assert rendered == "GET / bar"
[ 9, 235, 671, 198 ]
def METHOD_NAME(self): mapping = model.make_mapping( { "csv_url": "http://pets.com", "entities": { "test": { "schema": "Person", "key_literal": "test", "key": ["a", "b"], } }, } ) entities = mapping.map({}) assert len(entities) == 0, entities.keys() entities = mapping.map({"a": "aaa", "b": "bbb"}) ent0 = entities.get("test") assert ent0.id == sha1(b"testaaabbb").hexdigest(), ent0 # assert False, sha1('test').hexdigest()
[ 9, 59, 1479 ]
def METHOD_NAME(_): run_training()
[ 57 ]
def METHOD_NAME(self, stop_time, result): import requests if self.completed_text is None: return self.run["result"] = result self.run["stop_time"] = stop_time self.run["elapsed_time"] = td_format(stop_time - self.run["start_time"]) data = { "username": self.bot_name, "icon_emoji": self.icon, "text": self.get_completed_text(), } headers = {"Content-type": "application/json", "Accept": "text/plain"} requests.post(self.webhook_url, data=json.dumps(data), headers=headers)
[ 3097, 417 ]
def METHOD_NAME(self) -> str: """ The name of the database account. """ return pulumi.get(self, "name")
[ 156 ]
def METHOD_NAME( upstream_and_remote_with_multiple_sources, ): workdir, _ = upstream_and_remote_with_multiple_sources with cwd(workdir): call_real_packit(parameters=["--debug", "srpm", str(workdir)]) srpm_path = list(Path.cwd().glob("*.src.rpm"))[0] assert srpm_path.exists() assert (Path.cwd() / "python-ogr.spec").exists() build_srpm(srpm_path)
[ 9, 13611, 462, 43, 157, 41, 107 ]
def METHOD_NAME( self, request: HttpRequest, **kwargs: Any ) -> Awaitable[AsyncHttpResponse]: """Runs the network request through the client's chained policies. >>> from azure.core.rest import HttpRequest >>> request = HttpRequest("GET", "https://www.example.org/") <HttpRequest [GET], url: 'https://www.example.org/'> >>> response = await client._send_request(request) <AsyncHttpResponse: 200 OK> For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart :param request: The network request you want to make. Required. :type request: ~azure.core.rest.HttpRequest :keyword bool stream: Whether the response payload will be streamed. Defaults to False. :return: The response of your network call. Does not do error handling on your response. :rtype: ~azure.core.rest.AsyncHttpResponse """ request_copy = deepcopy(request) request_copy.url = self._client.format_url(request_copy.url) return self._client.send_request(request_copy, **kwargs)
[ 353, 377 ]
def METHOD_NAME( pruning: str, quantization: bool, kwargs, model: Optional[Module] ): actual = recipe_template( pruning=pruning, quantization=quantization, model=model, **kwargs ) assert actual manager = ScheduledModifierManager.from_yaml(file_path=actual) manager.apply(module=model)
[ 9, 3912, 581, 41, 385, 3303, 578 ]
def METHOD_NAME(self) -> set[str]: return {"default"}
[ 19, 1798, -1, 308 ]
def METHOD_NAME(self): assert convert_vector_catch('none') == 0
[ 9, 98, 3 ]
def METHOD_NAME(self, draw): #lite requires input has 4 dims and the min_val of shape should > 1 in_shape = draw( st.lists( st.integers( min_value=2, max_value=64), min_size=4, max_size=4)) epsilon = draw(st.floats(min_value=0.0, max_value=0.001)) def generate_input(*args, **kwargs): return np.random.random(in_shape).astype(np.float32) def generate_scale(*args, **kwargs): return np.random.random([in_shape[1]]).astype(np.float32) def generate_bias(*args, **kwargs): return np.random.random([in_shape[1]]).astype(np.float32) run_op = OpConfig( type="instance_norm", inputs={ "X": ["input_data"], "Scale": ["scale_data"], "Bias": ["bias_data"] }, outputs={ "Y": ["output_data"], "SavedMean": ["mean_data"], "SavedVariance": ["variance_data"], }, attrs={ "epsilon": epsilon #0~0.001 }) program_config = ProgramConfig( ops=[run_op], weights={}, inputs={ "input_data": TensorConfig(data_gen=partial(generate_input)), "scale_data": TensorConfig(data_gen=partial(generate_scale)), "bias_data": TensorConfig(data_gen=partial(generate_bias)), }, outputs=["output_data", "mean_data", "variance_data"]) return program_config
[ 734, 735, 736 ]
def METHOD_NAME(): assert DummyConstant.get("t1") == DummyConstant.T1 assert DummyConstant.get("T1") == DummyConstant.T1 assert DummyConstant.get("t2") == DummyConstant.T2 assert DummyConstant.get("T2") == DummyConstant.T2 assert DummyConstant.get("t3") == DummyConstant.t3 assert DummyConstant.get("T3") == DummyConstant.t3 assert DummyConstant.get("t4") == DummyConstant.t4 assert DummyConstant.get("T4") == DummyConstant.t4 assert DummyConstant.get("t5") == DummyConstant.T5 assert DummyConstant.get("T5") == DummyConstant.T5 assert DummyConstant.get("t6") == DummyConstant.t6 assert DummyConstant.get("T6") == DummyConstant.t6 assert DummyConstant.get("random5") == DummyConstant.T5 assert DummyConstant.get("RANDOM5") == DummyConstant.T5 assert DummyConstant.get("random6") == DummyConstant.t6 assert DummyConstant.get("RANDOM6") == DummyConstant.t6 assert DummyConstant.get("t7") == DummyConstant.T7 assert DummyConstant.get("T7") == DummyConstant.T7 assert DummyConstant.get("t8") == DummyConstant.t8 assert DummyConstant.get("T8") == DummyConstant.t8
[ 9, 891, 19, 604, 156, 894, 99 ]
async def METHOD_NAME(self, session: ProfileSession, state: str = None): """Change the issuer cred rev record state (default issued).""" self.state = state or IssuerCredRevRecord.STATE_ISSUED await self.save(session, reason=f"Marked {self.state}")
[ 0, 551 ]
def METHOD_NAME(self): tc = CMakeToolchain(self) tc.variables["MVK_SRC_DIR"] = self.source_folder.replace("\\", "/") tc.variables["MVK_VERSION"] = self.version tc.variables["MVK_WITH_SPIRV_TOOLS"] = self.options.with_spirv_tools tc.variables["MVK_BUILD_SHADERCONVERTER_TOOL"] = self.options.tools if self._has_hide_vulkan_symbols_option and self.options.shared: tc.variables["MVK_HIDE_VULKAN_SYMBOLS"] = self.options.hide_vulkan_symbols tc.METHOD_NAME() deps = CMakeDeps(self) deps.METHOD_NAME()
[ 567 ]
def METHOD_NAME( self, scope: ConfigScope = ConfigScope.ALL ) -> Mapping[str, Mapping[str, _ParamType]]: return { section: {name: val[0] for name, val in body.items() if val[1] & scope} for section, body in self._config_spec.items() }
[ 19, 1457 ]
def METHOD_NAME(): images = tf.random.uniform((2, 2, 2)) with pytest.raises(ValueError, match="contains a `kernel`"): non_kernel_layer = tf.keras.layers.MaxPooling2D(2, 2) wn_wrapper = wrappers.WeightNormalization(non_kernel_layer) wn_wrapper(images)
[ 9, 256, 1885, 94 ]