text
stringlengths
15
7.82k
ids
sequencelengths
1
7
def METHOD_NAME(self): ageSDL = PtGetAgeSDL() ageSDL.setFlags(stringVarName.value,1,1) ageSDL.sendToClients(stringVarName.value) ageSDL.setFlags(stringVarSolved.value,1,1) ageSDL.sendToClients(stringVarSolved.value) ageSDL.setNotify(self.key,stringVarName.value,0.0) ageSDL.setNotify(self.key,stringVarSolved.value,0.0) solved = ageSDL[stringVarSolved.value][0] if (solved): PtDebugPrint("solved ",stringVarSolved.value) else: return if (ageSDL[stringVarName.value][0]): PtDebugPrint("DEBUG: giraAgeSDLBoolRespondLightpost.OnServerInitComplete:\tRunning true responder on %s, fastforward=%d" % (self.sceneobject.getName(), boolFFOnInit.value)) respBoolTrue.run(self.key,fastforward=boolFFOnInit.value) else: PtDebugPrint("DEBUG: giraAgeSDLBoolRespondLightpost.OnServerInitComplete:\tRunning false responder on %s, fastforward=%d" % (self.sceneobject.getName(), boolFFOnInit.value)) respBoolFalse.run(self.key,fastforward=boolFFOnInit.value)
[ 69, 163, 176, 676 ]
def METHOD_NAME(self): # We cannot know whether this step is deterministic because we # don't know whether the given revision (if any) refers to a # tag or branch. return False
[ 137, 4665 ]
METHOD_NAME(self):
[ 1515, 24, 1056, 104, 894, 549, 894 ]
async def METHOD_NAME(pipeline_response): deserialized = self._deserialize("NodeList", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem)
[ 297, 365 ]
def METHOD_NAME(self, *args, **kwargs): self.__verify_session_status() return self.__manager.proxy_call(self.METHOD_NAME.__name__, *args, **kwargs)
[ 353, 1203 ]
def METHOD_NAME(self, db_conn: "LoggingDatabaseConnection") -> None: # We need to import here to avoid an import loop. from synapse.storage.prepare_database import prepare_database if self._is_in_memory: # In memory databases need to be rebuilt each time. Ideally we'd # reuse the same connection as we do when starting up, but that # would involve using adbapi before we have started the reactor. # # If we have a `prepped_conn` we can use that to initialise the DB, # otherwise we need to call `prepare_database`. if self._prepped_conn is not None: # Initialise the new DB from the pre-prepared DB. assert isinstance(db_conn.conn, sqlite3.Connection) self._prepped_conn.backup(db_conn.conn) else: prepare_database(db_conn, self, config=None) db_conn.create_function("rank", 1, _rank) db_conn.execute("PRAGMA foreign_keys = ON;") # Enable WAL. # see https://www.sqlite.org/wal.html db_conn.execute("PRAGMA journal_mode = WAL;") db_conn.commit()
[ 69, 80, 550 ]
METHOD_NAME(t1, t2):
[ 137, 1553 ]
def METHOD_NAME(self): self.submodels["lithium plating"] = pybamm.lithium_plating.NoPlating(self.param)
[ 0, 17756, 18136, 6231 ]
def METHOD_NAME(self): # Handle updating the event if self.initial: self.instance.METHOD_NAME() else: Event.objects.create(title=self.cleaned_data['title'], category=self.cleaned_data['category'], host=self.host, description=self.cleaned_data['description'], start_date=self.cleaned_data['start_date'], end_date=self.cleaned_data['end_date'], allowed_domains=self.cleaned_data['allowed_domains'] )
[ 73 ]
def METHOD_NAME(self): return f'{self.track_group.title}: {self.short_title}' if self.track_group else self.short_title
[ 1707, 2893, 41, 846 ]
def METHOD_NAME(self): url = reverse("messages.bulk_action", locale="en-US") resp = self.client.post(url, {"id": [], "mark_read": True}, follow=True) self.assertContains(resp, "No messages selected")
[ 9, 1743, 2278, 203, 98 ]
def METHOD_NAME(self): if self.debugNP.is_hidden(): self.debugNP.show() else: self.debugNP.hide()
[ 766, 290 ]
def METHOD_NAME(self): """write failures shall not corrupt the database""" # First prepopulate the database start = 10 for x in range(start, 0, -1): self.t.runSuccess("track tag{0} {1}min ago".format((start + 1)-x, x)) self.save_database() # Now start in with the failures and make sure the database compares # equally TAG_COUNT = 30 error_count = 0 success_count = 0 MAX_ERROR_COUNT=250 for x in range(TAG_COUNT, 0, -1): while True: exitcode, stdout, stderr = self.t("track tag-{0} {1}s ago".format((TAG_COUNT+1)-x, x)) if exitcode != 0: error_count += 1 if error_count >= MAX_ERROR_COUNT: break if not self.compare_dirs(self.t.datadir, self._goldendir): filecmp.dircmp(self.t.datadir, self._goldendir).report_full_closure() self.fail("{0} not equal to {1}".format(self.t.datadir, self._goldendir)) else: success_count += 1 break if error_count >= MAX_ERROR_COUNT: break self.save_database() self.t.runSuccess("export") print("Test pass. Timew returned an error on {0} runs and none on {1} runs.".format(error_count, success_count))
[ 9, 77, 4538, 74, 130, 13403, 463 ]
def METHOD_NAME() -> None: # Create a gdb.Parameter for each parameter for p in pwndbg.gdblib.config.params.values(): # We don't need to store this anywhere, GDB will handle this Parameter(p)
[ 176, 434 ]
f METHOD_NAME(self, align_corners, half_pixel_centers, data_type):
[ 9, 4665, 1784 ]
async def METHOD_NAME(self): async with self.chat_client: await self._create_thread() await self.chat_client.delete_chat_thread(self.thread_id) # delete created users and chat threads if not self.is_playback(): await self.chat_client.delete_chat_thread(self.thread_id)
[ 9, 34, 3337, 600 ]
def METHOD_NAME(project, dataset, src_dataset): """Update the schema.yaml files in each query.""" sql_root = ROOT / "sql" / project / dataset for path in sql_root.glob("*"): print(f"fetching schema for {path.name}") # we can update the schema with the development version of the schema schema = dict(fields=get_schema(f"{src_dataset}.{path.name}", project)) with (path / "schema.yaml").open("w") as fp: yaml.dump(schema, fp)
[ 86, 3173 ]
def METHOD_NAME(packer, bus, ldw_stock_values, enabled, steering_pressed, hud_alert, hud_control): values = {} if len(ldw_stock_values): values = {s: ldw_stock_values[s] for s in [ "LDW_SW_Warnung_links", # Blind spot in warning mode on left side due to lane departure "LDW_SW_Warnung_rechts", # Blind spot in warning mode on right side due to lane departure "LDW_Seite_DLCTLC", # Direction of most likely lane departure (left or right) "LDW_DLC", # Lane departure, distance to line crossing "LDW_TLC", # Lane departure, time to line crossing ]} values.update({ "LDW_Status_LED_gelb": 1 if enabled and steering_pressed else 0, "LDW_Status_LED_gruen": 1 if enabled and not steering_pressed else 0, "LDW_Lernmodus_links": 3 if hud_control.leftLaneDepart else 1 + hud_control.leftLaneVisible, "LDW_Lernmodus_rechts": 3 if hud_control.rightLaneDepart else 1 + hud_control.rightLaneVisible, "LDW_Texte": hud_alert, }) return packer.make_can_msg("LDW_02", bus, values)
[ 129, 15168, 10976, 401 ]
def METHOD_NAME(self): question_template = "%s %s" all_numerical_rows = [] for attribute in self.all_unique_numerical_attributes: attribute_labels = self.attribute_label_list[attribute] for attribute_label in attribute_labels: for operator in self.operator_dict: operator_labels = self.operator_dict[operator] for operator_label in operator_labels: question = question_template % (attribute_label, operator_label) row = (question.lower(), 0, 0, attribute, operator) all_numerical_rows.append(row) return all_numerical_rows
[ 129, 7365, 713, 6756 ]
def METHOD_NAME(cls): return Junction
[ 19, 2261, 1716, 44 ]
def METHOD_NAME(self): super(TestDecimalMarkWithSciNotation, self).METHOD_NAME() setRoles(self.portal, TEST_USER_ID, ['Member', 'LabManager']) login(self.portal, TEST_USER_NAME) # analysis-service-3: Calcium (Ca) servs = self.portal.bika_setup.bika_analysisservices self.service = servs['analysisservice-3'] # Original values self.orig_as_prec = self.service.getPrecision() self.orig_as_expf = self.service.getExponentialFormatPrecision() self.orig_as_ldl = self.service.getLowerDetectionLimit() self.orig_bs_expf = self.service.getExponentialFormatThreshold() self.orig_bs_scin = self.service.getScientificNotationResults() self.orig_dm = self.portal.bika_setup.getResultsDecimalMark()
[ 0, 1 ]
def METHOD_NAME(self): databases_scope = False self.dbhost = "localhost" self.dbport = 5432 self.dbname = "pulpcore" self.dbpasswd = "" # TODO: read also redis config (we dont expect much customisations) # TODO: read also db user (pulp) self.staticroot = "/var/lib/pulp/assets" self.uploaddir = "/var/lib/pulp/media/upload" def separate_value(line, sep=':'): # an auxiliary method to parse values from lines like: # 'HOST': 'localhost', val = line.split(sep)[1].lstrip().rstrip(',') if (val.startswith('"') and val.endswith('"')) or \ (val.startswith('\'') and val.endswith('\'')): val = val[1:-1] return val try: # split the lines to "one option per line" format for line in open("/etc/pulp/settings.py").read() \ .replace(',', ',\n').replace('{', '{\n') \ .replace('}', '\n}').splitlines(): # skip empty lines and lines with comments if not line or line[0] == '#': continue if line.startswith("DATABASES"): databases_scope = True continue # example HOST line to parse: # 'HOST': 'localhost', pattern = r"\s*['|\"]%s['|\"]\s*:\s*\S+" if databases_scope and match(pattern % 'HOST', line): self.dbhost = separate_value(line) if databases_scope and match(pattern % 'PORT', line): self.dbport = separate_value(line) if databases_scope and match(pattern % 'NAME', line): self.dbname = separate_value(line) if databases_scope and match(pattern % 'PASSWORD', line): self.dbpasswd = separate_value(line) # if line contains closing '}' database_scope end if databases_scope and '}' in line: databases_scope = False if line.startswith("STATIC_ROOT = "): self.staticroot = separate_value(line, sep='=') if line.startswith("CHUNKED_UPLOAD_DIR = "): self.uploaddir = separate_value(line, sep='=') except IOError: # fallback when the cfg file is not accessible pass # set the password to os.environ when calling psql commands to prevent # printing it in sos logs # we can't set os.environ directly now: other plugins can overwrite it self.env = {"PGPASSWORD": self.dbpasswd}
[ 214, 817, 200 ]
def METHOD_NAME(username): print("Hello, %s, now is %s" % (username, time.time()))
[ 9, 1344, 22 ]
def METHOD_NAME(self): for func in self.pivoted_cholesky: A = numpy.array([[9.0]]) L, piv, rank = func(A) self.assertEqual(L.shape, (1, 1)) self.assertEqual(piv.shape, (1,)) self.assertAlmostEqual(L[0, 0], 3.0, delta=1.0e-14) self.assertEqual(piv[0], 0) self.assertEqual(rank, 1)
[ 9, 14915, 3448, 3844 ]
def METHOD_NAME(self): """Start the monitoring threads.""" self.run() logger.info(f'Started monitoring statistics from {self.statistics_file}')
[ 447 ]
async def METHOD_NAME(self, delay): if self.is_live: await asyncio.METHOD_NAME(delay)
[ 5893 ]
def METHOD_NAME(self): return ( self.syn.n_high, self.syn.n_mid, self.syn.n_low, self.t1vec.c(), self.xvec.c(), self.rvec.c(), )
[ 1571 ]
def METHOD_NAME(self, filterObj, sortList=[], skip=0, maxitems=1, skip_geometry=False): featurecursor = self.featuredb[self.collection].find(filterObj) if sortList: featurecursor = featurecursor.sort(sortList) matchCount = self.featuredb[self.collection].count_documents(filterObj) featurecursor.skip(skip) featurecursor.limit(maxitems) featurelist = list(featurecursor) for item in featurelist: item['id'] = str(item.pop('_id')) if skip_geometry: item['geometry'] = None return featurelist, matchCount
[ 19, 964, 245 ]
def METHOD_NAME(): e = '2*(x+1)' assert parse_expr(e, evaluate=0) == parse_expr(e, evaluate=False)
[ 9, 13676, -1 ]
def METHOD_NAME(): assert _attrs._getitem([1, 2, 3], 1) == 2 assert _attrs._getitem(2, 0) == 2 expr = _attrs._getitem(Categorical([3, (4, 5), 6], label='a'), 1) assert expr.freeze({'a': 3}) == 3 assert expr.freeze({'a': (4, 5)}) == 5 assert expr.freeze({'a': 6}) == 6
[ 9, 5181 ]
def METHOD_NAME(self): email = self.cleaned_data.get("email") if User.objects.filter(email=email).first(): raise forms.ValidationError("该邮箱已经注册过了") else: return email
[ 1356, 487 ]
def METHOD_NAME(self): """ Random logged-in users. Cannot delete access for playlist they have no role in. """ user = factories.UserFactory() playlist_access_to_delete = factories.PlaylistAccessFactory() self.assert_user_cant_delete_playlist_access(user, playlist_access_to_delete)
[ 9, 34, 556, 1089, 604, 236, 2717 ]
def METHOD_NAME(self): super(ConnectionMixin, self).METHOD_NAME() self.conn = self.make_connection()
[ 0, 1 ]
def METHOD_NAME(self): return Repeater.objects.by_domain(self.domain)
[ 19, 7543 ]
def METHOD_NAME(obj): @functools.wraps(obj) def func_wrapped(*args, **kwargs): if is_installed(name, version): return obj(*args, **kwargs) else: msg = f'"{obj}" in "{obj.__module__}" requires "{name}' if version is not None: msg += f" {version}" raise ImportError(msg + '"') return func_wrapped
[ 972 ]
def METHOD_NAME(self): cmake_layout(self, src_folder="src")
[ 571 ]
def METHOD_NAME(self): """check db_verify version""" self.check_exe_version("db_verify")
[ 9, 1267, 1162 ]
def METHOD_NAME(self): return self._compute
[ 226 ]
def METHOD_NAME(self) -> None: super().METHOD_NAME() self.patch = patch_datadog() self.recorded_metrics = self.patch.__enter__()
[ 0, 1 ]
def METHOD_NAME(a,b): df = pd.read_hdf('{}/t2_run{}.h5'.format(tier_dir,sys.argv[1])) df['e_cal'] = pd.read_hdf('{}/Spectrum_{}.hdf5'.format(meta_dir,sys.argv[1]))['e_cal'] df = df.loc[(df.e_cal>=float(a))&(df.e_cal<=float(b))] df_2 = pd.read_hdf('{}/t2_run{}.h5'.format(tier_dir,sys.argv[2])) df_2['e_cal'] = pd.read_hdf('{}/Spectrum_{}.hdf5'.format(meta_dir,sys.argv[2]))['e_cal'] df_2 = df_2.loc[(df_2.e_cal>=float(a))&(df_2.e_cal<=float(b))] plt.hist(df['current_max']/df['e_cal'], np.arange(0,.2,.0010), histtype='step', density=True, label='run {}, {} < E < {} keV'.format(sys.argv[1],a,b)) plt.hist(df_2['current_max']/df_2['e_cal'], np.arange(0,.2,.0010), histtype='step', density=True, label='run {}, {} < E < {} keV'.format(sys.argv[2],a,b)) plt.xlabel('A/E', ha='right', x=1.0) plt.ylabel('Counts (normalized)', ha='right', y=1.0) plt.legend(frameon=True, loc='best', fontsize='small') plt.show()
[ 1288, 1170, 227, 8571 ]
def METHOD_NAME(request, methodid, invoiceid, isreturn=None): invoice = get_object_or_404(Invoice, pk=invoiceid, deleted=False, finalized=True) if invoice.recipient_user != request.user: authenticate_backend_group(request, 'Invoice managers') return _invoice_payment(request, methodid, invoice, isreturn)
[ 17826 ]
def METHOD_NAME(self): apply_conandata_patches(self) if is_msvc(self): #========================== # TODO: to remove once https://github.com/conan-io/conan/pull/12817 available in conan client vcxproj_files = [ os.path.join(self.source_folder, "_msvc", "zimg", "zimg.vcxproj"), os.path.join(self.source_folder, "_msvc", "dll", "dll.vcxproj"), ] for vcxproj_file in vcxproj_files: replace_in_file( self, vcxproj_file, "<WholeProgramOptimization>true</WholeProgramOptimization>", "" ) platform_toolset = MSBuildToolchain(self).toolset conantoolchain_props = os.path.join(self.generators_folder, MSBuildToolchain.filename) for vcxproj_file in vcxproj_files: if Version(self.version) >= "3.0.5": replace_in_file( self, vcxproj_file, "<PlatformToolset>v143</PlatformToolset>", f"<PlatformToolset>{platform_toolset}</PlatformToolset>", ) else: replace_in_file( self, vcxproj_file, "<PlatformToolset>v142</PlatformToolset>", f"<PlatformToolset>{platform_toolset}</PlatformToolset>", ) replace_in_file( self, vcxproj_file, "<Import Project=\"$(VCTargetsPath)\\Microsoft.Cpp.targets\" />", f"<Import Project=\"{conantoolchain_props}\" /><Import Project=\"$(VCTargetsPath)\\Microsoft.Cpp.targets\" />", ) #========================== msbuild = MSBuild(self) msbuild.build_type = self._msbuild_configuration msbuild.platform = "Win32" if self.settings.arch == "x86" else msbuild.platform msbuild.METHOD_NAME(os.path.join(self.source_folder, "_msvc", "zimg.sln"), targets=["dll" if self.options.shared else "zimg"]) else: autotools = Autotools(self) autotools.autoreconf() autotools.configure() autotools.make()
[ 56 ]
def METHOD_NAME(filename): data_dir = os.path.join(os.path.dirname(__file__), 'data') return os.path.join(data_dir, filename)
[ 365, 157 ]
def METHOD_NAME(self) -> str: """ Get clipboard """ # Since this function need socket response, we can't auto inject it any more s: socket.socket = self.control_socket with self.control_socket_lock: # Flush socket s.setblocking(False) while True: try: s.recv(1024) except BlockingIOError: break s.setblocking(True) # Read package package = struct.pack(">B", const.TYPE_GET_CLIPBOARD) s.send(package) (code,) = struct.unpack(">B", s.recv(1)) assert code == 0 (length,) = struct.unpack(">i", s.recv(4)) return s.recv(length).decode("utf-8")
[ 19, 3238 ]
def METHOD_NAME(self): parameters = { **self.serialize_header_param( "Accept", "application/json", ), } return parameters
[ 572, 386 ]
def METHOD_NAME(self): zaak = ZaakFactory.create() zaak_url = get_operation_url("zaak_read", uuid=zaak.uuid) roltype = RolTypeFactory.create() add_url = reverse("admin:zaken_rol_add") get_response = self.app.get(add_url) form = get_response.form form["zaak"] = zaak.id form["_roltype"] = roltype.id form["betrokkene_type"] = "natuurlijk_persoon" form["betrokkene"] = "http://example.com/betrokkene/1" form["roltoelichting"] = "desc" response = form.submit() self.assertEqual(response.status_code, 302) self.assertEqual(Rol.objects.count(), 1) rol = Rol.objects.get() rol_url = get_operation_url("rol_read", uuid=rol.uuid, zaak_uuid=zaak.uuid) self.assertEqual(AuditTrail.objects.count(), 1) audittrail = AuditTrail.objects.get() self.assertEqual(audittrail.bron, "ZRC") self.assertEqual(audittrail.actie, "create") self.assertEqual(audittrail.resultaat, 0) self.assertEqual(audittrail.applicatie_weergave, "admin") self.assertEqual(audittrail.gebruikers_id, f"{self.user.id}"), self.assertEqual(audittrail.gebruikers_weergave, self.user.get_full_name()), self.assertEqual(audittrail.hoofd_object, f"http://testserver{zaak_url}"), self.assertEqual(audittrail.resource, "rol"), self.assertEqual(audittrail.resource_url, f"http://testserver{rol_url}"), self.assertEqual(audittrail.resource_weergave, rol.unique_representation()), self.assertEqual(audittrail.oud, None) new_data = audittrail.nieuw self.assertEqual(new_data["roltoelichting"], "desc")
[ 9, 129, 8143 ]
def METHOD_NAME(): return SourceFacebookMarketing()
[ 10654, 14063 ]
def METHOD_NAME(self): """ Handles the connection to a PostgreSQL database instance. """ if self.is_connected is True: return self.connection config = { 'host': self.connection_args.get('host'), 'port': self.connection_args.get('port'), 'user': self.connection_args.get('user'), 'password': self.connection_args.get('password'), 'dbname': self.connection_args.get('database') } if self.connection_args.get('sslmode'): config['sslmode'] = self.connection_args.get('sslmode') if self.connection_args.get('schema'): config['options'] = f'-c search_path={self.connection_args.get("schema")},public' connection = psycopg.METHOD_NAME(**config, connect_timeout=10) self.is_connected = True self.connection = connection return self.connection
[ 707 ]
def METHOD_NAME(): with socket.socket(socket.AF_INET, socket.SOCK_DGRAM) as s: s.connect(('8.8.8.8', 80)) return s.getsockname()[0]
[ 19, 125, 1213 ]
def METHOD_NAME(): """ Sets the verbosity to `logging.ERROR`. """ return set_verbosity(ERROR)
[ 0, 936, 168 ]
def METHOD_NAME(): _atomic_write(TIMESTAMP_FILE, str(time.time())) task_id = flask.request.headers.get('X-AppEngine-TaskName') app.logger.info('Processing task %s', task_id) resp = processor.process_report(task_id, flask.request.form) status = HTTPStatus.CREATED if resp else HTTPStatus.NO_CONTENT if resp: app.logger.info(resp) return (resp, status)
[ 758, 1519 ]
def METHOD_NAME(self): suite, _ = self.test_run.build.project.suites.get_or_create(slug='a-suite') metadata, _ = models.SuiteMetadata.objects.get_or_create(suite=suite.slug, name='no_metadata_test', kind='test') self.test_run.tests.create(metadata=metadata, result=False, suite=suite, build=self.test_run.build, environment=self.test_run.environment) response = self.client.get('/mygroup/myproject/build/1/tests/?page=2') self.assertEqual(200, response.status_code)
[ 9, 654, 773 ]
def METHOD_NAME(namespace, label): return "%s_%s" % (namespace, label)
[ 19, 4653, 1608, 156 ]
def METHOD_NAME(self, method_uri, session): """ Associate a security method URI with a requests.Session like object. Parameters ---------- method_uri : str URI representing the security method session : object the requests.Session like object that will dispatch requests for the authentication method provided by method_uri """ self.credentials[method_uri] = session
[ 0 ]
def METHOD_NAME(Phi): # Transform input to enforce Neumann boundary conditions. # copy input PhiOut = Phi # capture image size m = Phi.shape[0] n = Phi.shape[1] # deal with corners PhiOut[0, 0] = PhiOut[2, 2] PhiOut[0, n - 1] = PhiOut[0, -3] PhiOut[m - 1, 0] = PhiOut[-3, 2] PhiOut[m - 1, n - 1] = PhiOut[-3, -3] # deal with edges PhiOut[0, 1:-1] = PhiOut[2, 1:-1] PhiOut[m - 1, 1:-1] = PhiOut[m - 3, 1:-1] PhiOut[1:-1, 0] = PhiOut[1:-1, 2] PhiOut[1:-1, n - 1] = PhiOut[1:-1, n - 3] return PhiOut
[ 7925, 634 ]
def METHOD_NAME(value): """ Convert builtin function to ast expression. """ if isinstance(value, (type(None), bool)): name = str(value) else: try: name = value.__name__ except AttributeError: raise ToNotEval() return ast.Attribute(ast.Name('builtins', ast.Load(), None, None), name, ast.Load())
[ 3629, 12241 ]
def METHOD_NAME(namespace, cluster_provider): kubernetes.config.load_kube_config(cluster_provider.kubeconfig_file) docker_image = get_test_project_docker_image() @asset def number_y( context: AssetExecutionContext, ext_k8s_pod: ExtK8sPod, ): ext_k8s_pod.run( context=context, namespace=namespace, image=docker_image, command=[ "python", "-m", "numbers_example.number_y", ], extras={ "storage_root": "/tmp/", }, env={ "PYTHONPATH": "/dagster_test/toys/external_execution/", "NUMBER_Y": "2", }, ) result = materialize( [number_y], resources={"ext_k8s_pod": ExtK8sPod()}, raise_on_error=False, ) assert result.success mats = result.asset_materializations_for_node(number_y.op.name) assert "is_even" in mats[0].metadata assert mats[0].metadata["is_even"].value is True
[ 9, 1661, 3761, 6635 ]
def METHOD_NAME(x: interfaces.objects.ObjectInterface) -> bool: try: return not (x.get_private_memory() == 0 and x.ControlArea) except AttributeError: return False
[ 527, 559 ]
def METHOD_NAME(self): parameters = { **self.serialize_query_param( "api-version", "2022-01-01", required=True, ), } return parameters
[ 539, 386 ]
def METHOD_NAME(): configs = parse_configs(["relay.backend.use_auto_scheduler=true"]) assert len(configs) == 1 assert "relay.backend.use_auto_scheduler" in configs.keys() assert configs["relay.backend.use_auto_scheduler"] == True
[ 9, 200, 1205, 200, 863 ]
def METHOD_NAME(self, spec, prefix): with working_dir(join_path(glob("*MT64")[0], "build")): make("all_r")
[ 56 ]
async def METHOD_NAME(self) -> None: await self.close() try: os.remove(self.filename) except FileNotFoundError: # pragma: nocoverage pass except OSError as e: if e.errno != 22: # fix: "sqlite://:memory:" in Windows raise e
[ 1267, 34 ]
def METHOD_NAME(self): """Test the _write_sheet_views() method with freeze panes""" self.worksheet.select() self.worksheet.freeze_panes(0, 1, 0, 4) self.worksheet.set_selection("A1") self.worksheet._write_sheet_views() exp = '<sheetViews><sheetView tabSelected="1" workbookViewId="0"><pane xSplit="1" topLeftCell="E1" activePane="topRight" state="frozen"/><selection pane="topRight"/></sheetView></sheetViews>' got = self.fh.getvalue() self.assertEqual(got, exp)
[ 9, 77, 4501, 13170 ]
def METHOD_NAME(pos: str): """ Should take in a class and return the plain english labelling for it So if I pass in "VTA-1", I should get back: tâpiskôc: wîcihêw """ return read_labels().source_language.get(pos)
[ 1641, 1458 ]
def METHOD_NAME(self, ir): return self._to_java_ir(ir, self._parse_blockmatrix_ir)
[ 24, 5521, -1, 5925 ]
def METHOD_NAME(ls): ''' Return all tail combinations (a la Haskell) tails :: [x] -> [[x]] >>> tails('abcd') ['abcd', 'bcd', 'cd', d'] ''' for i in range(len(ls)): yield ls[i:]
[ 8562 ]
def METHOD_NAME(self, r): self.sendUpdate("setR", [r])
[ 227, 0, 3264 ]
def METHOD_NAME(): global _shutdown _shutdown = True items = list(_threads_queues.items()) for t, q in items: q.put(None) for t, q in items: t.join()
[ 440, 538 ]
def METHOD_NAME(self, module_name: str) -> bool: ...
[ 137, 79, 298 ]
async def METHOD_NAME(): @dataclasses.dataclass class ExampleContext(Context): title: str status: int error: str context = ExampleContext(title="Hello", status=200, error="Error message") ds = Datasette(memory=True) await ds.invoke_startup() rendered = await ds.render_template("error.html", context) assert "<h1>Hello</h1>" in rendered assert "Error message" in rendered
[ 9, 18161, 338, 671, 41, 4051 ]
def METHOD_NAME(self) -> str: ...
[ 156 ]
def METHOD_NAME(message): return encode(message, constants.MAX_REASON_LENGTH)
[ 2293 ]
def METHOD_NAME(): args = parse_args() paddle.set_device(args.device) # frontend frontend = get_frontend( lang=args.lang, phones_dict=args.phones_dict, tones_dict=args.tones_dict) # am_predictor am_predictor = get_predictor( model_dir=args.inference_dir, model_file=args.am + ".pdmodel", params_file=args.am + ".pdiparams", device=args.device, use_trt=args.use_trt, use_mkldnn=args.use_mkldnn, cpu_threads=args.cpu_threads, precision=args.precision) # model: {model_name}_{dataset} am_dataset = args.am[args.am.rindex('_') + 1:] # voc_predictor voc_predictor = get_predictor( model_dir=args.inference_dir, model_file=args.voc + ".pdmodel", params_file=args.voc + ".pdiparams", device=args.device, use_trt=args.use_trt, use_mkldnn=args.use_mkldnn, cpu_threads=args.cpu_threads, precision=args.precision) output_dir = Path(args.output_dir) output_dir.mkdir(parents=True, exist_ok=True) sentences = get_sentences(text_file=args.text, lang=args.lang) merge_sentences = True fs = 24000 if am_dataset != 'ljspeech' else 22050 # warmup for utt_id, sentence in sentences[:3]: with timer() as t: mel = get_am_output( input=sentence, am_predictor=am_predictor, am=args.am, frontend=frontend, lang=args.lang, merge_sentences=merge_sentences, speaker_dict=args.speaker_dict, spk_id=args.spk_id, ) wav = get_voc_output(voc_predictor=voc_predictor, input=mel) speed = wav.size / t.elapse rtf = fs / speed print( f"{utt_id}, mel: {mel.shape}, wave: {wav.shape}, time: {t.elapse}s, Hz: {speed}, RTF: {rtf}." ) print("warm up done!") N = 0 T = 0 for utt_id, sentence in sentences: with timer() as t: mel = get_am_output( input=sentence, am_predictor=am_predictor, am=args.am, frontend=frontend, lang=args.lang, merge_sentences=merge_sentences, speaker_dict=args.speaker_dict, spk_id=args.spk_id, ) wav = get_voc_output(voc_predictor=voc_predictor, input=mel) N += wav.size T += t.elapse speed = wav.size / t.elapse rtf = fs / speed sf.write(output_dir / (utt_id + ".wav"), wav, samplerate=fs) print( f"{utt_id}, mel: {mel.shape}, wave: {wav.shape}, time: {t.elapse}s, Hz: {speed}, RTF: {rtf}." ) print(f"{utt_id} done!") print(f"generation speed: {N / T}Hz, RTF: {fs / (N / T) }")
[ 57 ]
def METHOD_NAME(self, password: str) -> dict: """Get an encryption hash for a given password"""
[ 2897, 1161 ]
def METHOD_NAME( cls, path: Path, public_id: Optional[str] = None, format: Optional[str] = None ) -> GraphSource: if format is None: format = cls.guess_format(path) if format is None: raise ValueError(f"could not guess format for source {path}") return cls(path, format, public_id)
[ 280, 157 ]
def METHOD_NAME(self): if hasattr(self, '_taskList'): for task in list(self._taskList.values()): task.remove()
[ 188, 75, 620 ]
def METHOD_NAME(self) -> str: """ The name of the Azure AD B2C tenant resource. """ return pulumi.get(self, "name")
[ 156 ]
def METHOD_NAME(self): logging.getLogger().setLevel(logging.DEBUG) now = datetime.datetime.now() current_working_directory = os.getcwd() data_dir = os.path.join(current_working_directory, "__Test") Cache.db_make_directory_if_needed(data_dir) try: h = NDataHandler.NDataHandler(os.path.join(data_dir, "abc.ndata")) with contextlib.closing(h): data = numpy.random.randint(0, 10, size=(10, 10))[:,3] # discontiguous data self.assertFalse(data.flags['C_CONTIGUOUS']) p = {u"uuid": str(uuid.uuid4())} # write properties h.write_properties(p, now) # write data h.write_data(data, now) d = h.read_data() self.assertEqual(d.shape, data.shape) self.assertEqual(d.dtype, data.dtype) finally: #logging.debug("rmtree %s", data_dir) shutil.rmtree(data_dir)
[ 9, 16965, 378, 15074, 365 ]
def METHOD_NAME(self, session): pass
[ 69, 1072 ]
def METHOD_NAME(self, batch, logs=None): pass
[ 69, 2103, 2277, 3287 ]
async def METHOD_NAME(self, r: redis.Redis): unicode_string = chr(3456) + "abcd" + chr(3421) await r.set("unicode-string", unicode_string) cached_val = await r.get("unicode-string") assert isinstance(cached_val, str) assert unicode_string == cached_val
[ 9, 53, 2300, 61, 5365 ]
def METHOD_NAME( self, asset_key: CoercibleToAssetKey, *, python_type: Optional[Type[object]] = None, partition_key: Optional[str] = None, metadata: Optional[Dict[str, Any]] = None, resource_config: Optional[Any] = None, ) -> object: """Loads the contents of an asset as a Python object. Invokes `load_input` on the :py:class:`IOManager` associated with the asset. Args: asset_key (Union[AssetKey, Sequence[str], str]): The key of the asset to load. python_type (Optional[Type]): The python type to load the asset as. This is what will be returned inside `load_input` by `context.dagster_type.typing_type`. partition_key (Optional[str]): The partition of the asset to load. metadata (Optional[Dict[str, Any]]): Input metadata to pass to the :py:class:`IOManager` (is equivalent to setting the metadata argument in `In` or `AssetIn`). resource_config (Optional[Any]): A dictionary of resource configurations to be passed to the :py:class:`IOManager`. Returns: The contents of an asset as a Python object. """ asset_key = AssetKey.from_coercible(asset_key) resource_config = resource_config or {} output_metadata = {} if asset_key in self._assets_defs_by_key: assets_def = self._assets_defs_by_key[asset_key] resource_defs = merge_dicts( {DEFAULT_IO_MANAGER_KEY: default_job_io_manager_with_fs_io_manager_schema}, assets_def.resource_defs, ) io_manager_key = assets_def.get_io_manager_key_for_asset_key(asset_key) io_manager_def = resource_defs[io_manager_key] name = assets_def.get_output_name_for_asset_key(asset_key) output_metadata = assets_def.metadata_by_key[asset_key] op_def = assets_def.get_op_def_for_asset_key(asset_key) asset_partitions_def = assets_def.partitions_def elif asset_key in self._source_assets_by_key: source_asset = self._source_assets_by_key[asset_key] resource_defs = merge_dicts( {DEFAULT_IO_MANAGER_KEY: default_job_io_manager_with_fs_io_manager_schema}, source_asset.resource_defs, ) io_manager_key = source_asset.get_io_manager_key() io_manager_def = resource_defs[io_manager_key] name = asset_key.path[-1] output_metadata = source_asset.raw_metadata op_def = None asset_partitions_def = source_asset.partitions_def else: check.failed(f"Asset key {asset_key} not found") required_resource_keys = get_transitive_required_resource_keys( io_manager_def.required_resource_keys, resource_defs ) | {io_manager_key} self._ensure_resource_instances_in_cache( {k: v for k, v in resource_defs.items() if k in required_resource_keys}, resource_config=resource_config, ) io_manager = cast(IOManager, self._resource_instance_cache[io_manager_key]) io_config = resource_config.get(io_manager_key) io_resource_config = {io_manager_key: io_config} if io_config else {} io_manager_config = get_mapped_resource_config( {io_manager_key: io_manager_def}, io_resource_config ) input_context = build_input_context( name=None, asset_key=asset_key, dagster_type=resolve_dagster_type(python_type), upstream_output=build_output_context( name=name, metadata=output_metadata, asset_key=asset_key, op_def=op_def, resource_config=resource_config, ), resources=self._resource_instance_cache, resource_config=io_manager_config[io_manager_key].config, partition_key=partition_key, asset_partition_key_range=( PartitionKeyRange(partition_key, partition_key) if partition_key is not None else None ), asset_partitions_def=asset_partitions_def, instance=self._instance, metadata=metadata, ) return io_manager.load_input(input_context)
[ 557, 3455, 99 ]
def METHOD_NAME(self): """Invert each elemental matrix.""" return self.fromlocal(np.linalg.inv(self.tolocal()))
[ 3581 ]
def METHOD_NAME(signum: "Optional[int]" = None, frame: "Optional[FrameType]" = None) -> None: """ Graceful exit. """ graceful_stop.set()
[ 631 ]
def METHOD_NAME(from_date, to_date, company): if from_date and to_date and (from_date >= to_date): frappe.throw(_("To Date must be greater than From Date")) if not company: frappe.throw(_("Please Select a Company"))
[ 187, 469 ]
def METHOD_NAME(servicer, server): rpc_method_handlers = { 'addExperimentRunComment': grpc.unary_unary_rpc_method_handler( servicer.addExperimentRunComment, request_deserializer=modeldb_dot_Comment__pb2.AddComment.FromString, response_serializer=modeldb_dot_Comment__pb2.AddComment.Response.SerializeToString, ), 'updateExperimentRunComment': grpc.unary_unary_rpc_method_handler( servicer.updateExperimentRunComment, request_deserializer=modeldb_dot_Comment__pb2.UpdateComment.FromString, response_serializer=modeldb_dot_Comment__pb2.UpdateComment.Response.SerializeToString, ), 'getExperimentRunComments': grpc.unary_unary_rpc_method_handler( servicer.getExperimentRunComments, request_deserializer=modeldb_dot_Comment__pb2.GetComments.FromString, response_serializer=modeldb_dot_Comment__pb2.GetComments.Response.SerializeToString, ), 'deleteExperimentRunComment': grpc.unary_unary_rpc_method_handler( servicer.deleteExperimentRunComment, request_deserializer=modeldb_dot_Comment__pb2.DeleteComment.FromString, response_serializer=modeldb_dot_Comment__pb2.DeleteComment.Response.SerializeToString, ), } generic_handler = grpc.method_handlers_generic_handler( 'ai.verta.modeldb.CommentService', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,))
[ 238, 1591, 549, 2711, 24, 163 ]
def METHOD_NAME(self): data_table = self.gen_data(1000, 10, 4) select_param = FeatureSelectionParam() select_param.variance_coe_param.value_threshold = 0.1 selection_obj = self._make_selection_obj(data_table) filter_obj = get_filter(consts.COEFFICIENT_OF_VARIATION_VALUE_THRES, select_param, model=selection_obj) select_properties = SelectionProperties() select_properties.set_header(self.header) select_properties.set_last_left_col_indexes([x for x in range(len(self.header))]) select_properties.set_select_all_cols() filter_obj.set_selection_properties(select_properties) res_select_properties = filter_obj.fit(data_table, suffix='').selection_properties result = [self.header[idx] for idx, x in enumerate(self.coe_list) if x >= select_param.variance_coe_param.value_threshold] self.assertEqual(res_select_properties.all_left_col_names, result) self.assertEqual(len(res_select_properties.all_left_col_names), 9)
[ 9, 527, 6920 ]
def METHOD_NAME(secret_key: str): # encrypts contents class OtherData(BaseModel): foo: int = 123 my_secret_key = secret_key.encode() my_invitation_code = _fernet_encrypt_as_urlsafe_code( data=OtherData().json().encode(), secret_key=my_secret_key ) with pytest.raises(ValidationError): decrypt_invitation( invitation_code=my_invitation_code, secret_key=my_secret_key, ) with pytest.raises(InvalidInvitationCode): extract_invitation_content( invitation_code=my_invitation_code, secret_key=my_secret_key, )
[ 9, 532, 4568, 365 ]
def METHOD_NAME(tfb): n = 3 p = 0.1 K = tc.backend def f(inputs, weights): state = inputs["state"] noise = inputs["noise"] c = tc.Circuit(n, inputs=state) for i in range(n): c.rz(i, theta=weights[i]) for i in range(n): c.depolarizing(i, px=p, py=p, pz=p, status=noise[i]) return K.real(c.expectation_ps(x=[0])) layer = tc.KerasLayer(f, [n]) v = {"state": K.ones([1, 2**n]) / 2 ** (n / 2), "noise": 0.2 * K.ones([1, n])} with tf.GradientTape() as tape: l = layer(v) g1 = tape.gradient(l, layer.trainable_variables) v = {"state": K.ones([2**n]) / 2 ** (n / 2), "noise": 0.2 * K.ones([n])} with tf.GradientTape() as tape: l = layer(v) g2 = tape.gradient(l, layer.trainable_variables) np.testing.assert_allclose(g1[0], g2[0], atol=1e-5)
[ 9, 4098, 94, 1461, 553 ]
def METHOD_NAME(self, workbook_md: str) -> str: new_workbook_md = "" number_of_skipped_lines = 0 skip = False for line in workbook_md.splitlines(): if "<!-- current users table: start -->" in line: # do not copy the old current users table skip = True # insert the new table including the marker new_workbook_md += line + "\n" new_workbook_md += self._render_current_users_table() elif "<!-- current users table: end -->" in line: skip = False # insert the marker new_workbook_md += line + "\n" elif "<!-- tracking table: next row -->" in line: # insert the new row including the marker new_workbook_md += self._render_tracking_table_row( old_number_of_users=number_of_skipped_lines - 2 if number_of_skipped_lines > 0 else 0 ) new_workbook_md += line + "\n" elif not skip: new_workbook_md += line + "\n" else: # count the number of skipped current users table lines # this number minus the table header is the old number of users number_of_skipped_lines += 1 return new_workbook_md
[ 86, 6151 ]
def METHOD_NAME(self, parser): parser.add_argument( '--show-progress', action='store_true', help='Prints out one dot every 1000 (one thousand) metadata processed' ) parser.add_argument( '--num-threads', type=int, default=2, help='Number of simultaneous parallel threads to work' )
[ 238, 134 ]
def METHOD_NAME(self) -> Response: """ List all tables in Oracle DB owned by the current user. """ query = """ SELECT table_name FROM user_tables ORDER BY 1 """ return self.native_query(query)
[ 19, 2253 ]
def METHOD_NAME(VARname, NewSDLValue, myMaxINT): PtDebugPrint("islmEmgrPhase0.OutOfRange:\tERROR: Variable %s expected range from 0 - %d. Received value of %d" % (VARname,NewSDLValue,myMaxINT))
[ 1737, 47, 661 ]
def METHOD_NAME(local_engine_empty, readonly_pg_repo): # Same as previous but we clone the read-only repo and push to our own namespace # to check that the objects we push get their namespaces rewritten to be the unprivileged user, not test. destination = clone(readonly_pg_repo) destination.images["latest"].checkout() destination.run_sql("""UPDATE fruits SET name = 'banana' WHERE fruit_id = 1""") destination.commit() remote_destination = Repository.from_template( readonly_pg_repo, namespace=readonly_pg_repo.engine.conn_params["SG_NAMESPACE"], engine=readonly_pg_repo.engine, ) destination.upstream = remote_destination destination.push(handler="S3") object_id = destination.head.get_table("fruits").objects[-1] assert ( remote_destination.objects.get_object_meta([object_id])[object_id].namespace == readonly_pg_repo.engine.conn_params["SG_NAMESPACE"] ) # Test we can delete our own repo once we've pushed it remote_destination.delete(uncheckout=False) assert len(remote_destination.images()) == 0
[ 9, 1013, 5911, 34, 5911, 955, 6824 ]
def METHOD_NAME(self, request): logger.debug("request: %s", request.array_specs) logger.debug("my spec: %s", self.spec) lcm_voxel_size = self.spec.get_lcm_voxel_size(request.array_specs.keys()) if lcm_voxel_size is None: ndims = len(self.coordinates[0]) lcm_voxel_size = Coordinate((1,) * ndims) # shift to center total_roi = request.get_total_roi() request_center = total_roi.shape / 2 + total_roi.offset self.shift = self._get_next_shift(request_center, lcm_voxel_size) max_tries = 15 tries = 0 while not self.__check_shift(request): logger.warning( "Location %s (shift %s) skipped" % (self.coordinates[self.local_index], self.shift) ) assert tries < max_tries, ( "Unable to find valid shift after %d tries", tries, ) self.shift = self._get_next_shift(request_center, lcm_voxel_size) tries += 1 # Set shift for all requests for specs_type in [request.array_specs, request.graph_specs]: for key, spec in specs_type.items(): if isinstance(spec, ArraySpec) and spec.nonspatial: continue roi = spec.roi.shift(self.shift) specs_type[key].roi = roi logger.debug( "{}'th ({}) shift selected: {}".format( self.local_index, self.coordinates[self.local_index], self.shift ) )
[ 123 ]
def METHOD_NAME( self, *, search: Union[str, UnsetType] = unset, tags: Union[str, UnsetType] = unset, _from: Union[int, UnsetType] = unset, to: Union[int, UnsetType] = unset, page_limit: Union[int, UnsetType] = unset, page_cursor: Union[str, UnsetType] = unset, ) -> collections.abc.Iterable[ProcessSummary]: """Get all processes. Provide a paginated version of :meth:`list_processes`, returning all items. :param search: String to search processes by. :type search: str, optional :param tags: Comma-separated list of tags to filter processes by. :type tags: str, optional :param _from: Unix timestamp (number of seconds since epoch) of the start of the query window. If not provided, the start of the query window will be 15 minutes before the ``to`` timestamp. If neither ``from`` nor ``to`` are provided, the query window will be ``[now - 15m, now]``. :type _from: int, optional :param to: Unix timestamp (number of seconds since epoch) of the end of the query window. If not provided, the end of the query window will be 15 minutes after the ``from`` timestamp. If neither ``from`` nor ``to`` are provided, the query window will be ``[now - 15m, now]``. :type to: int, optional :param page_limit: Maximum number of results returned. :type page_limit: int, optional :param page_cursor: String to query the next page of results. This key is provided with each valid response from the API in ``meta.page.after``. :type page_cursor: str, optional :return: A generator of paginated results. :rtype: collections.abc.Iterable[ProcessSummary] """ kwargs: Dict[str, Any] = {} if search is not unset: kwargs["search"] = search if tags is not unset: kwargs["tags"] = tags if _from is not unset: kwargs["_from"] = _from if to is not unset: kwargs["to"] = to if page_limit is not unset: kwargs["page_limit"] = page_limit if page_cursor is not unset: kwargs["page_cursor"] = page_cursor local_page_size = get_attribute_from_path(kwargs, "page_limit", 1000) endpoint = self._list_processes_endpoint set_attribute_from_path(kwargs, "page_limit", local_page_size, endpoint.params_map) pagination = { "limit_value": local_page_size, "results_path": "data", "cursor_param": "page_cursor", "cursor_path": "meta.page.after", "endpoint": endpoint, "kwargs": kwargs, } return endpoint.call_with_http_info_paginated(pagination)
[ 245, 4491, 41, 2855 ]
def METHOD_NAME(cls, *args, **kwargs): if cls._args_schema is not None: return cls._args_schema cls._args_schema = super().METHOD_NAME(*args, **kwargs) # define Arg Group "" _args_schema = cls._args_schema _args_schema.resource_group = AAZResourceGroupNameArg( required=True, ) _args_schema.name = AAZStrArg( options=["-n", "--name"], help="Name of the VNet gateway.", required=True, id_part="name", ) return cls._args_schema
[ 56, 134, 135 ]
def METHOD_NAME( old_path: pathlib.Path, new_path: pathlib.Path, master_url: str ) -> None: """ maybe_shim_old_cert_store will detect when an old v0 cert store is present and will shim it to a v1 cert store. """ if not old_path.exists(): return None # Only try to shim when ONLY the old path exists. if not new_path.exists(): with old_path.open("r") as f: pem_content = f.read() store = {master_url: pem_content} with new_path.open("w") as f: json.dump(store, f, indent=4, sort_keys=True) old_path.unlink()
[ 2946, 3308, 2228, 1941, 1308 ]
def METHOD_NAME(test_settings): settings = test_settings(dict(mode="disabled")) wandb.setup(settings=settings) wandb.login() wandb.finish()
[ 9, 273 ]
async def METHOD_NAME(tag: str): """ Resets the concurrency limit slots set on the specified tag. """ async with get_client() as client: try: await client.reset_concurrency_limit_by_tag(tag=tag) except ObjectNotFound: exit_with_error(f"No concurrency limit found for the tag: {tag}") exit_with_success(f"Reset concurrency limit set on the tag: {tag}")
[ 656 ]