text
stringlengths
15
7.82k
ids
sequencelengths
1
7
METHOD_NAME():
[ 19, 923, 100, 681 ]
def METHOD_NAME(version): """ Parses the SQL Server major version out of the full version :param version: String representation of full SQL Server version (from @@version) :return: integer representation of SQL Server major version (i.e. 2012, 2019) """ match = re.search(r"Microsoft SQL Server (\d+)", version) if not match: return None return int(match.group(1))
[ 214, 11606, 3901, 281 ]
def METHOD_NAME(self) -> Tuple[str, ...]: return self._language_codes # Note: could actually be language code or name.
[ 2938, 1114 ]
def METHOD_NAME(inputs, directory): """ Example: export PYTHONPATH=`pwd` python -m linkml.utils.execute_tutorial -d /tmp/tutorial/ docs/intro/tutorial01.md """ logging.basicConfig(level=logging.INFO) errs = [] for input in inputs: logging.info(f"INPUT={input}") blocks = parse_file_to_blocks(input) print(f"## {len(blocks)} Blocks") localdir = Path(input).stem subdir = PurePath(directory, localdir) input_errs = execute_blocks(str(subdir), blocks) if len(input_errs) > 0: logging.error(f"TUTORIAL {input} FAILURES: {len(input_errs)}") errs += input_errs logging.info(f"Errors = {len(errs)}") if len(errs) > 0: logging.error(f"Encountered {len(errs)} Errors") for err in errs: logging.error(f"Error: {err}") sys.exit(1)
[ 615 ]
def METHOD_NAME(self, output: Tuple[torch.Tensor, torch.Tensor]) -> None: y_pred, y = output if y_pred.ndimension() not in (1, 2): raise ValueError("Predictions should be of shape (batch_size, n_targets) or (batch_size, ).") if y.ndimension() not in (1, 2): raise ValueError("Targets should be of shape (batch_size, n_targets) or (batch_size, ).")
[ 250, 555 ]
async def METHOD_NAME(name, attr_list): return
[ 238, 24, 828, 864, 245 ]
def METHOD_NAME(flask_client): email = random_email() User.create(email=email, password="password", name="Test User", activated=True) Session.commit() r = flask_client.post( url_for("api.auth_login"), json={ "email": email, "password": "password", "device": "Test Device", }, ) assert r.status_code == 200 api_key = r.json["api_key"] assert not r.json["mfa_enabled"] assert r.json["mfa_key"] is None assert r.json["name"] == "Test User" # same device, should return same api_key r = flask_client.post( url_for("api.auth_login"), json={ "email": email, "password": "password", "device": "Test Device", }, ) assert r.json["api_key"] == api_key
[ 9, 2433, 273, 398, 1985 ]
def METHOD_NAME(self) -> str: ...
[ 17765 ]
def METHOD_NAME(self, verbos=False): return self._checkMapGeneral(self.getFileMapF, self.getFileBitsF, self.getFileCodesF, verbos=verbos)
[ 250, 422, 171, 474 ]
def METHOD_NAME(self) -> None: """ Test for failure on no host part. """ self.assertRaises(jid.InvalidFormat, jid.parse, "user@")
[ 9, 654, 1806 ]
def METHOD_NAME(self, other): for k, v in other.items(): self[k] = v
[ 86 ]
def METHOD_NAME(elasticity_params=None): params = {"available_elasticity_dims": [ElasticityDim.WIDTH.value]} if elasticity_params is not None: params.update(elasticity_params) multi_elasticity_handler, supernet = create_supernet(TwoConvModel, TwoConvModel.INPUT_SIZE, params) move_model_to_cuda_if_available(supernet) return multi_elasticity_handler.width_handler, supernet
[ 129, 1603, 1306, 2327, 9080 ]
def METHOD_NAME(self, component_config: Union[Dict, TeslaBatSetup, TeslaCounterSetup, TeslaInverterSetup]) -> None: if isinstance(component_config, Dict): component_type = component_config["type"] else: component_type = component_config.type component_config = dataclass_from_dict(COMPONENT_TYPE_TO_MODULE[ component_type].component_descriptor.configuration_factory, component_config) if component_type in self.COMPONENT_TYPE_TO_CLASS: self.components["component"+str(component_config.id)] = (self.COMPONENT_TYPE_TO_CLASS[component_type]( component_config)) else: raise Exception( "illegal component type " + component_type + ". Allowed values: " + ','.join(self.COMPONENT_TYPE_TO_CLASS.keys()) )
[ 238, 1007 ]
def METHOD_NAME(self, v): if isinstance(v, np.floating): return float elif isinstance(v, np.integer): return int else: return type(v)
[ 44 ]
def METHOD_NAME(basename_list, versioning_pattern, env): """ Select the most matching binary for the given pattern in the given environment. Works well for binaries that contain versioning. """ binaries = find_by_regex_in_envpath(versioning_pattern, env) if not binaries: return False elif len(binaries) == 1: # Return the first found (earliest in PATH) binary for the only # found binary name group. return list(binaries.values())[0][0] else: keys = list(binaries.keys()) keys.sort() # If one of the base names match, select that version. files = None for base_key in basename_list: # Cannot use set here as it would destroy precendence. if base_key in keys: files = binaries[base_key] break if not files: # Select the "newest" available version if there are multiple and # none of the base names matched. files = binaries[keys[-1]] # Return the one earliest in PATH. return files[0]
[ 19, 808, 623, 157 ]
def METHOD_NAME(reviews): """Builds a dictionary containing the number of reviews and the list of unique reviewers.""" result = dict() result["review_number"] = reviews.totalCount reviewers = set() for review in reviews: if review.user: reviewers.add(review.user.login) result["unique_reviewers"] = list(reviewers) return result
[ 19, 6407, 100 ]
def METHOD_NAME(self): n = self.nr_leaf_nodes() D = [[0 for _ in range(n)] for _ in range(n)] self.path_weight(self.path(0,2)) for i in range(n): for j in range(i+1,n): path = self.path(i,j) w = self.path_weight(path) D[i][j], D[j][i] = w, w return DistanceMatrix(D)
[ 1886, 430 ]
def METHOD_NAME(rng, *shape): return np.asarray(rng.randn(*shape), dtype=np.float32)
[ 9997 ]
def METHOD_NAME(polygon1, polygon2): """Check if two SphPolygons overlaps.""" if polygon1.union(polygon2): return True return False
[ 250, 217, 1603, 6863, 2820 ]
def METHOD_NAME(): for port_name, nbr in list(ctrl_links.items()): check_wpa_supplicant_process(duthost, port_name) if isinstance(nbr["host"], EosHost): continue check_wpa_supplicant_process(nbr["host"], nbr["port"]) return True
[ 9, 17541, -1, 4491 ]
def METHOD_NAME(self, filename: Union[str, Path], add: bool = True) -> Optional[AudioFile]: """Add a song to the library based on filename. If 'add' is true, the song will be added and the 'added' signal may be fired. Example (add=False): load many songs and call Library.add(songs) to add all in one go. The song is returned if it is in the library after this call. Otherwise, None is returned. """ key = normalize_path(filename, True) song = None if key not in self._contents: song = MusicFile(filename) if song and add: self.add([song]) else: print_d(f"Already got file {filename!r}") song = self._contents[key] return song
[ 238, 1147 ]
def METHOD_NAME(self) -> Optional[str]: """ Resource ID. """ return pulumi.get(self, "id")
[ 147 ]
def METHOD_NAME(self) -> str: return self.get_default_api_key()
[ 235, 58, 59 ]
def METHOD_NAME(llm: str) -> BaseLanguageModel: """Import llm from llm name""" return import_class(f"langchain.llms.{llm}")
[ 512, 8474 ]
def METHOD_NAME(dtype): nrm2 = _nrm2_memview[_numpy_to_cython(dtype)] rng = np.random.RandomState(0) x = rng.random_sample(10).astype(dtype, copy=False) expected = np.linalg.norm(x) actual = nrm2(x) assert_allclose(actual, expected, rtol=RTOL[dtype])
[ 9, -1 ]
def METHOD_NAME(db_cursor, tablename): "Returns bool if table exists or not" return tablename in connection.introspection.table_names()
[ 410, 954 ]
METHOD_NAME(self):
[ 0, 1 ]
def METHOD_NAME(R, r): """ Compute the surface area of the intersection of sphere of radius R centered at (0, 0, 0) with a ball of radius r centered at (R, 0, 0). Parameters ---------- R : float, sphere radius r : float, ball radius Returns -------- area: float, the surface are. """ x = (2 * R**2 - r**2) / (2 * R) # x coord of plane if x >= -R: return 2 * np.pi * R * (R - x) if x < -R: return 4 * np.pi * R**2
[ 7768, 7812, 2845 ]
def METHOD_NAME(): """ Gets the temperature in degrees Celsius from the humidity sensor. """ return _sensehat.METHOD_NAME()
[ 19, 1729, 280, 2966 ]
def METHOD_NAME(results, outdir): """ Write the test results out as a simple webpage. """ backends = [] benchmarks = {} # Transpose the results so that they're accesible by benchmark. for btype, backend_results in results.items(): backends.extend(list(backend_results)) for bend in backend_results: for benchmark_name, res in backend_results[bend].items(): benchmarks.setdefault(benchmark_name, {})[bend] = res # Convert each benchmark into an output comparison page and a row for the # comparison table. comparisons = {} for benchmark_name, benchmark_results in benchmarks.items(): _build_output_comparison_page( benchmark_name, benchmark_results, outdir ) # Compare each result to the "kiva.agg" result baseline = benchmark_results["kiva.agg"] comparisons[benchmark_name] = { name: result.compare_to(baseline) for name, result in benchmark_results.items() } # Fill out the comparison table and write the summary index comparison_table = _build_comparison_table(backends, comparisons) path = os.path.join(outdir, "index.html") with open(path, "w", encoding="utf-8") as fp: fp.write(_INDEX_TEMPLATE.format(comparison_table=comparison_table))
[ 2411 ]
def METHOD_NAME(self): return DateTime((2003, 10, 7, 8, 1, 0, 1, 280, -1))
[ 1413, 884 ]
def METHOD_NAME(value: Optional[T], answer_type: AnswerType) -> Union[Unanswered, T]: """ If the given value is None, return an Unanswered of the given type. Otherwise, return the value. """ if value is None: return Unanswered(answer_type) return value
[ -1 ]
def METHOD_NAME(counts: List[int], available_currents: List[float], missing_currents: List[float], expected_current: float): # setup cp = Chargepoint(4, None) ev = Ev(0) ev.data.control_parameter.required_currents = [16]*3 ev.data.control_parameter.required_current = 16 cp.data.set.charging_ev_data = ev cp.data.set.target_current = 10 # evaluation current = common.available_current_for_cp(cp, counts, available_currents, missing_currents) # assertion assert current == expected_current
[ 9, 1272, 10558, 43, 3596 ]
def METHOD_NAME(self, graph_def): """Determines if a StatefulPartitionedCall op exists in the graph.""" for node in graph_def.node: if node.op == "StatefulPartitionedCall": return True return False
[ 220, 3103, 1816, 128, 441 ]
def METHOD_NAME(self): import sqlalchemy return sqlalchemy.__version__
[ 1988, 281 ]
def METHOD_NAME(self, label=None, *args, **kwargs): _ = args, kwargs return self._count(lambda l: self._confusion_matrix[:, l-1].sum(axis=(1, 2)), label)
[ 395, 5610 ]
def METHOD_NAME(self): cube = low_res_4d() cube.remove_coord("grid_longitude") with self.assertRaises(ValueError): project(cube, ROBINSON)
[ 9, 1038, 4877 ]
def METHOD_NAME(self) -> bytes: # Since headers only contain ASCII characters, we can keep this simple. return str(self).encode()
[ 183 ]
def METHOD_NAME(self) -> int: """ (w - R4) value for a given Blake2 flavor. Used in the function G """ return self.w - self.R4
[ 2284, 13893 ]
def METHOD_NAME(next_link=None): if not next_link: request = build_list_operations_request( api_version=api_version, template_url=self.list_operations.metadata["url"], headers=_headers, params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: # make call to next link with the client's api-version _parsed_next_link = urllib.parse.urlparse(next_link) _next_request_params = case_insensitive_dict( { key: [urllib.parse.quote(v) for v in value] for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() } ) _next_request_params["api-version"] = self._config.api_version request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) request.method = "GET" return request
[ 123, 377 ]
def METHOD_NAME(model_type: str, feature: str) -> Dict[str, Type["BaseDecoderConfig"]]: return decoder_config_registry[(model_type, feature)]
[ 19, 3642, 393 ]
def METHOD_NAME(self): pass
[ 709, 710 ]
def METHOD_NAME(make_data_dir): # Axon data_dir = make_data_dir # tmp_path_factory.mktemp("data") layer_names = ["average_10um"] * 3 data_file = ( Path(os.path.abspath(__file__)).parents[3] / "docs" / "notebooks" / "pipelines" / "BrainLine" / "axon_data.json" ) util.download_subvolumes( data_dir=data_dir, brain_id="pytest", layer_names=layer_names, dataset_to_save="val", data_file=data_file, ) output_dir = data_dir / "brainpytest" / "val" files = os.listdir(output_dir) assert len(files) == 2 # Soma data_file = ( Path(os.path.abspath(__file__)).parents[3] / "docs" / "notebooks" / "pipelines" / "BrainLine" / "soma_data.json" ) util.download_subvolumes( data_dir=data_dir, brain_id="pytest_download", layer_names=layer_names, dataset_to_save="val", data_file=data_file, ) output_dir = data_dir / "brainpytest_download" / "val" files = os.listdir(output_dir) assert len(files) == 2
[ 9, 136, -1 ]
def METHOD_NAME( subscription_id: str, location: str, extension_type_name: str, **kwargs: Any ) -> HttpRequest: api_version = "2021-11-01-preview" accept = "application/json" # Construct URL url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/providers/Microsoft.KubernetesConfiguration/locations/{location}/extensionTypes/{extensionTypeName}/versions') path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), "location": _SERIALIZER.url("location", location, 'str'), "extensionTypeName": _SERIALIZER.url("extension_type_name", extension_type_name, 'str'), } url = _format_url_section(url, **path_format_arguments) # Construct parameters query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') # Construct headers header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, params=query_parameters, headers=header_parameters, **kwargs )
[ 56, 245, 377 ]
async def METHOD_NAME(async_iterator, n: int): """ Collect data into fixed-length chunks or blocks. >>> list(grouper(3, 'ABCDEFG')) [['A', 'B', 'C'], ['D', 'E', 'F'], ['G']] Updated from: https://stackoverflow.com/questions/31164731/python-chunking-csv-file-multiproccessing/31170795#31170795 Modified for async """ chunk = [] async for item in async_iterator: chunk.append(item) if len(chunk) >= n: yield chunk chunk.clear() if chunk != []: yield chunk
[ 3384 ]
def METHOD_NAME(test_case): x_placement = flow.placement("cpu", [0]) x_sbp = flow.sbp.broadcast x = flow.ones(2, 3, placement=x_placement, sbp=x_sbp) y_placement = flow.placement("cuda", [0]) y_sbp = flow.sbp.split(0) y = flow.ones(4, 5, placement=y_placement, sbp=y_sbp) old_id = id(x) x.data = y test_case.assertEqual(old_id, id(x)) test_case.assertTrue(x.shape == (4, 5)) test_case.assertTrue(x.placement == y_placement) test_case.assertTrue(x.sbp[0] == y_sbp)
[ 9, 285, 0, 365 ]
def METHOD_NAME(): """ Function to edit and retrieve user profile information """ current_user = get_jwt_identity() user = User.query.filter_by(email=current_user).first() if request.method == "GET": return ( jsonify( { "username": user.email, "bio": user.bio, "first_name": user.first_name, "last_name": user.last_name, "email": user.email, "image": user.image, "id": user.id, } ), 200, ) elif request.method == "POST": data = request.get_json() # print(data['image']) user.update_bio(data["bio"]) user.update_first_name(data["first_name"]) user.update_last_name(data["last_name"]) if data["email"] != user.email: print("email changed") timestamp = datetime.datetime.now() timestamp = timestamp.strftime("%d %H:%M:%S") md5_digest = hashlib.md5(timestamp.encode()).hexdigest() user.update_activation_code(md5_digest) confirmation_email(data["email"], md5_digest) user.update_email(data["email"]) db.session.merge(user) db.session.commit() return jsonify({"msg": "User information changed"}), 200 else: return jsonify({"msg": "profile OK"}), 200
[ 337 ]
def METHOD_NAME(self, mockExists, mockGetcwd, mockMakedirs): """ Test fileCachePlugin """ mockExists.return_value = False # will create a file mockGetcwd.return_value = "/tornado/document/root" plugin = FileCacheLoggingPlugin() mockGetcwd.assert_called_once() mockExists.assert_called_once_with(os.path.join(mockGetcwd.return_value, "pilotlogs")) mockMakedirs.assert_called_once_with(os.path.join(mockGetcwd.return_value, "pilotlogs")) # sendMessage() messsageText = ( "2022-02-23 13:48:35.123456 UTC DEBUG [PilotParams] JSON file loaded: pilot.json\n" + "2022-02-23 13:48:36.123456 UTC DEBUG [PilotParams] JSON file analysed: pilot.json" ) messageJSON = json.dumps(messsageText) vo = "anyVO" pilotUUID = "78f39a90-2073-11ec-98d7-b496913c0cf4" # use a temporary dir, not the one above. Plugin will create the file to write into. with tempfile.TemporaryDirectory(suffix="pilottests") as d: plugin.meta["LogPath"] = d res = plugin.sendMessage(messageJSON, pilotUUID, vo) self.assertTrue(res["OK"]) with open(os.path.join(d, vo, pilotUUID)) as pilotLog: content = pilotLog.read() self.assertEqual(content, messsageText) # failures ? with tempfile.TemporaryDirectory(suffix="pilottests") as d: plugin.meta["LogPath"] = d os.chmod(d, 0o0000) res = plugin.sendMessage(messageJSON, pilotUUID, vo) self.assertFalse(res["OK"]) pilotUUID = "whatever" res = plugin.sendMessage(messageJSON, pilotUUID, vo) self.assertFalse(res["OK"])
[ 9, 171, 596, 2793 ]
def METHOD_NAME(self, method, url, body, headers): return (httplib.OK, "", self.base_headers, httplib.responses[httplib.OK])
[ 2455, 1441, 4430, 238, 3264, 3264, 152 ]
def METHOD_NAME(self): objs = [command_context.JobContext, command_context.JobRunContext] output = command_context.build_filled_context(*objs) assert isinstance(output.base, objs[1]) assert isinstance(output.next.base, objs[0]) assert not output.next.next
[ 9, 56, 7133, 198, 357 ]
def METHOD_NAME(self): """Set all selected modules and actually install them.""" self.ensure_one() self.module_ids.write({"state": "to install"}) self.env.cr.commit() # pylint: disable=invalid-commit Registry.new(self.env.cr.dbname, update_module=True) self.write({"state": "done"}) return self.return_same_form_view()
[ 428, 468 ]
def METHOD_NAME(NOT_GOOD): # [invalid-name] """Function with a badly named argument.""" return NOT_GOOD
[ 717, 1068, 9577 ]
def METHOD_NAME(self, instance): self.update_from_config(instance) for device in self.list_ups_devices(): if device not in self.excluded_devices: excluded = False for r in self.excluded_devices_re: if r.match(device): excluded = True break if excluded: continue self.log.debug("querying device: %s", device) # query stats raw_stats = self.query_ups_device(device) stats, tags = self.convert_and_filter_stats(raw_stats) # report stats for k, v in stats.items(): self.gauge('upsc.{}'.format(k), v, tags=tags)
[ 250 ]
def METHOD_NAME(self, source, result1, result2, mixer, p): assume(result1 is not result2) def do_map(value): rep = repr(value) random = Random(hashlib.sha384((mixer + rep).encode()).digest()) if random.random() <= p: return result1 else: return result2 return source.flatmap(do_map)
[ -1, 1554 ]
def METHOD_NAME(basename: str) -> str: """Restore basename from the trash directory. Args: basename: Basename of the file in the trash directory. Returns: The path to the restored file. """ trash_filename = os.path.join(_files_directory, basename) info_filename = _get_info_filename(basename) if not os.path.exists(info_filename) or ( not os.path.exists(trash_filename) and not os.path.islink(trash_filename) ): raise FileNotFoundError(f"File for '{basename}' does not exist") original_filename, _ = trash_info(basename) if not os.path.isdir(os.path.dirname(original_filename)): raise FileNotFoundError(f"Original directory of '{basename}' is not accessible") shutil.move(trash_filename, original_filename) os.remove(info_filename) return original_filename
[ 7588 ]
def METHOD_NAME(self): protocol = flask.request.form.get("protocol") is_new = False error = self.validate_form_fields(protocol) if error: return error id = flask.request.form.get("id") if id: # Find an existing service to edit service = get_one(self._db, ExternalIntegration, id=id, goal=ExternalIntegration.CATALOG_GOAL) if not service: return MISSING_SERVICE if protocol != service.protocol: return CANNOT_CHANGE_PROTOCOL else: # Create a new service service, is_new = self._create_integration( self.protocols, protocol, ExternalIntegration.CATALOG_GOAL, ) if isinstance(service, ProblemDetail): return service name = self.get_name(service) if isinstance(name, ProblemDetail): self._db.rollback() return name elif name: service.name = name [protocol] = [p for p in self.protocols if p.get("name") == protocol] result = self._set_integration_settings_and_libraries(service, protocol) if isinstance(result, ProblemDetail): return result external_integration_link = self._set_external_integration_link(service) if isinstance(external_integration_link, ProblemDetail): return external_integration_link library_error = self.check_libraries(service) if library_error: self._db.rollback() return library_error if is_new: return Response(str(service.id), 201) else: return Response(str(service.id), 200)
[ 356, 72 ]
f METHOD_NAME(self):
[ 10508, 118 ]
def METHOD_NAME( self, time_range_constraint: TimeRangeConstraint, time_granularity: TimeGranularity ) -> TimeRangeConstraint: """Change the time range so that the ends are at the ends of the appropriate time granularity windows. e.g. [2020-01-15, 2020-2-15] with MONTH granularity -> [2020-01-01, 2020-02-29] """ constraint_start = time_range_constraint.start_time constraint_end = time_range_constraint.end_time start_ts = pd.Timestamp(time_range_constraint.start_time) if not is_period_start(time_granularity, start_ts): constraint_start = adjust_to_start_of_period(time_granularity, start_ts).to_pydatetime() end_ts = pd.Timestamp(time_range_constraint.end_time) if not is_period_end(time_granularity, end_ts): constraint_end = adjust_to_end_of_period(time_granularity, end_ts).to_pydatetime() if constraint_start < TimeRangeConstraint.ALL_TIME_BEGIN(): constraint_start = TimeRangeConstraint.ALL_TIME_BEGIN() if constraint_end > TimeRangeConstraint.ALL_TIME_END(): constraint_end = TimeRangeConstraint.ALL_TIME_END() return TimeRangeConstraint(start_time=constraint_start, end_time=constraint_end)
[ 270, 104, 661, 24, 10459 ]
def METHOD_NAME(filename, variable): """Load the value of a variable in a Python file. Run the contents of the file in a namespace and return the result of the variable named C{variable}. @param filename: string @param variable: string """ with open(filename) as fileObj: data = fileObj.read() d = {"__file__": filename} codeObj = compile(data, filename, "exec") eval(codeObj, d, d) value = d[variable] return value
[ 557, 99, 280, 171 ]
def METHOD_NAME(): class Logic(m.Generator2): def __init__(self, width=None): T = m.Bit if width is None else m.Bits[width] self.io = io = m.IO(I=m.In(T), O=m.Out(T)) io.O @= ~io.I class LogicAsserts(m.Generator2): def __init__(self, dut, width=None): T = m.Bit if width is None else m.Bits[width] self.width = width self.io = io = m.IO(I=m.In(T), O=m.In(T), other=m.In(m.Bit)) m.inline_verilog("{I} {O} {other}", I=io.I, O=io.O, other=io.other) self.bind2_args = [m.bits(dut.I)[0]] m.bind2(Logic, LogicAsserts) class Top(m.Circuit): T = m.Bits[2] io = m.IO(I=m.In(T), O=m.Out(T)) I = m.bits(list(map(lambda x: Logic()()(x), io.I))) io.O @= Logic(2)()(I) opts = { "output": "mlir", "use_native_bind_processor": True, } _assert_compilation(Top, "test_bind2_generator", "mlir", opts) class _CheckLogicAssertsAreBoundModulesPass(CircuitPass): def __call__(self, defn): if not isinstance(defn, LogicAsserts): return assert is_bound_module(defn) assert not is_bound_module(Top) _CheckLogicAssertsAreBoundModulesPass(Top).run()
[ 9, 1443 ]
def METHOD_NAME(self): kpts = numpy.random.random((4,3)) * .25 kpts[3] = -numpy.einsum('ij->j', kpts[:3]) with_df = mdf.MDF(cell).set(auxbasis='weigend') with_df.linear_dep_threshold = 1e-7 with_df.kpts = kpts mo =(numpy.random.random((nao,nao)) + numpy.random.random((nao,nao))*1j) eri = with_df.get_eri(kpts).reshape((nao,)*4) eri0 = numpy.einsum('pjkl,pi->ijkl', eri , mo.conj()) eri0 = numpy.einsum('ipkl,pj->ijkl', eri0, mo ) eri0 = numpy.einsum('ijpl,pk->ijkl', eri0, mo.conj()) eri0 = numpy.einsum('ijkp,pl->ijkl', eri0, mo ) eri1 = with_df.ao2mo(mo, kpts) self.assertAlmostEqual(abs(eri1.reshape(eri0.shape)-eri0).sum(), 0, 9)
[ 9, -1 ]
def METHOD_NAME(self): return np.sqrt(self.mean_squared_error())
[ 1563, 314, 9702, 168 ]
def METHOD_NAME(self) -> str: return pulumi.get(self, "collection_name")
[ 1098, 156 ]
def METHOD_NAME(build_node, type_, source): node = build_node(source) with pytest.raises(InvalidType): type_from_annotation(node)
[ 9, 280, 2141, 1479 ]
def METHOD_NAME(directory, recurse=True, **kwargs): """ Work like os.walk but if recurse is False just list current directory """ if recurse: for root, dirs, files in os.walk(directory, **kwargs): yield root, dirs, files else: files = [] for filename in os.listdir(directory): if os.path.isfile(os.path.join(directory, filename)): files.append(filename) yield directory, [], files
[ 350, 4716 ]
def METHOD_NAME(url: str, params: Dict[str, str]) -> str: """ Given a URL, update the query parameters and return the modified URL. >>> update_query_params('http://example.com?foo=bar&biz=baz', {'foo': 'stuff'}) 'http://example.com?foo=stuff&biz=baz' """ scheme, netloc, path, query_string, fragment = urlsplit(url) query_params = parse_qs(query_string) query_params.update(params) new_query_string = urlencode(query_params, doseq=True) return urlunsplit( (str(scheme), str(netloc), str(path), str(new_query_string), str(fragment)) )
[ 86, 539, 434 ]
def METHOD_NAME( clamav_client: clamav.client.ClamAVClient, oci_client: oc.Client, image_reference: str, ) -> typing.Generator[clamav.model.MalwareScanResult, None, None]: start_time = datetime.datetime.now() logger.info(f'starting to scan {image_reference=}') try: content_iterator = iter_image_files( image_reference=image_reference, oci_client=oci_client, ) findings = clamav_client.scan_container_image( content_iterator=content_iterator, ) yield from findings passed_seconds = datetime.datetime.now().timestamp() - start_time.timestamp() logger.info(f'scan finished for {image_reference=} after {passed_seconds=}') return except tarfile.TarError as te: passed_seconds = datetime.datetime.now().timestamp() - start_time.timestamp() logger.warning(f'{image_reference=}: {te=} - falling back to layer-scan {passed_seconds=}') # fallback to layer-wise scan in case we encounter gzip-uncompression-problems def iter_layers(): manifest = oci_client.manifest(image_reference=image_reference) for layer in manifest.layers: layer_blob = oci_client.blob( image_reference=image_reference, digest=layer.digest, stream=True, ) yield (layer_blob.iter_content(chunk_size=4096), layer.digest) findings = clamav_client.scan_container_image( content_iterator=iter_layers(), ) yield from findings passed_seconds = datetime.datetime.now().timestamp() - start_time.timestamp() logger.info(f'{image_reference=} layer-scan finished after {passed_seconds=}')
[ 793, 10358, 660 ]
def METHOD_NAME(self, filename): """Preconditions: - our agent applies to this entry - filename is URL decoded""" for line in self.rulelines: if line.applies_to(filename): return line.METHOD_NAME return True
[ 9713 ]
def METHOD_NAME(self): Partnership = apps.get_model('rsr.partnership') return self.partners(Partnership.IATI_REPORTING_ORGANISATION)
[ -1 ]
def METHOD_NAME(self, obj: ConfigurationBundle) -> dict: """Serialize config to json dict. Args: obj: AbstractConfig object to serialize. Returns: Representation of object as a dict suitable for serializing to json. """ config: dict = self.schema.dump(obj) json_serializable_dict: dict = convert_to_json_serializable(data=config) return json_serializable_dict
[ 183 ]
def METHOD_NAME(factory_name: Optional[str] = None, private_endpoint_connection_name: Optional[str] = None, resource_group_name: Optional[str] = None, opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetPrivateEndpointConnectionResult: """ Gets a private endpoint connection :param str factory_name: The factory name. :param str private_endpoint_connection_name: The private endpoint connection name. :param str resource_group_name: The resource group name. """ __args__ = dict() __args__['factoryName'] = factory_name __args__['privateEndpointConnectionName'] = private_endpoint_connection_name __args__['resourceGroupName'] = resource_group_name opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts) __ret__ = pulumi.runtime.invoke('azure-native:datafactory/v20180601:getPrivateEndpointConnection', __args__, opts=opts, typ=GetPrivateEndpointConnectionResult).value return AwaitableGetPrivateEndpointConnectionResult( etag=pulumi.get(__ret__, 'etag'), id=pulumi.get(__ret__, 'id'), name=pulumi.get(__ret__, 'name'), properties=pulumi.get(__ret__, 'properties'), type=pulumi.get(__ret__, 'type'))
[ 19, 547, 841, 550 ]
def METHOD_NAME( self, ): input = InputCell(1) plus_one = ComputeCell( [ input, ], lambda inputs: inputs[0] + 1, ) minus_one1 = ComputeCell( [ input, ], lambda inputs: inputs[0] - 1, ) minus_one2 = ComputeCell( [ minus_one1, ], lambda inputs: inputs[0] - 1, ) output = ComputeCell( [ plus_one, minus_one2, ], lambda inputs: inputs[0] * inputs[1], ) cb1_observer = [] callback1 = self.callback_factory(cb1_observer) output.add_callback(callback1) input.value = 4 self.assertEqual(cb1_observer[-1], 10)
[ 9, 2425, 427, 246, 673, 259, 6353 ]
def METHOD_NAME(evm: Evm) -> None: """ Sign extend operation. In other words, extend a signed number which fits in N bytes to 32 bytes. Parameters ---------- evm : The current EVM frame. """ # STACK byte_num = pop(evm.stack) value = pop(evm.stack) # GAS charge_gas(evm, GAS_LOW) # OPERATION if byte_num > 31: # Can't extend any further result = value else: # U256(0).to_be_bytes() gives b'' instead b'\x00'. value_bytes = bytes(value.to_be_bytes32()) # Now among the obtained value bytes, consider only # N `least significant bytes`, where N is `byte_num + 1`. value_bytes = value_bytes[31 - int(byte_num) :] sign_bit = value_bytes[0] >> 7 if sign_bit == 0: result = U256.from_be_bytes(value_bytes) else: num_bytes_prepend = 32 - (byte_num + 1) result = U256.from_be_bytes( bytearray([0xFF] * num_bytes_prepend) + value_bytes ) push(evm.stack, result) # PROGRAM COUNTER evm.pc += 1
[ 2125 ]
def METHOD_NAME(self, user): playlists = user.get_playlists() assert isinstance(playlists, deezer.PaginatedList) playlist = playlists[0] assert isinstance(playlist, deezer.Playlist) assert repr(playlist) == "<Playlist: AC/DC>" assert len(playlists) == 25
[ 9, 19, 9828 ]
def METHOD_NAME(self, conn): self._conn = conn
[ 123 ]
async def METHOD_NAME(sql_query, additional_elements=None, **kwargs): """ Generate a CSV representation of a query result. Parameters ---------- sql_query : str SQL query to stream output of additional_elements : dict Additional columns elements to include along with the query result Yields ------ bytes Encoded csv lines """ logger = current_app.flowapi_logger db_conn_pool = current_app.db_conn_pool logger.debug("Starting generator.", request_id=request.request_id) yield_header = True line = Line() writer = csv.writer(line) if additional_elements is None: additional_elements = {} async with db_conn_pool.acquire() as connection: # Configure asyncpg to encode/decode JSON values await connection.set_type_codec( "json", encoder=json.dumps, decoder=json.loads, schema="pg_catalog" ) logger.debug("Connected.", request_id=request.request_id) async with connection.transaction(): logger.debug("Got transaction.", request_id=request.request_id) logger.debug(f"Running {sql_query}", request_id=request.request_id) try: async for row in connection.cursor(sql_query): if yield_header: writer.writerow(chain(row.keys(), additional_elements.keys())) yield line.read() yield_header = False writer.writerow(chain(row.values(), additional_elements.values())) yield line.read() logger.debug("Finishing up.", request_id=request.request_id) except Exception as e: logger.error(e)
[ 919, 1571, 947, 732 ]
def METHOD_NAME(e, depth, pos=()): nodes.append(dotnode(e, styles, labelfunc=labelfunc, pos=pos, repeat=repeat)) if maxdepth and depth >= maxdepth: return edges.extend(dotedges(e, atom=atom, pos=pos, repeat=repeat)) for i, arg in enumerate(e.args): if not atom(arg): METHOD_NAME(arg, depth+1, pos + (i,))
[ 2742 ]
def METHOD_NAME(self, index): """Handle clicking of an item. Args: index: The QModelIndex of the clicked item. """ if not index.isValid(): return item = self._model().data(index, downloads.ModelRole.item) if item.done and item.successful: item.open_file() item.remove()
[ 69, 2859 ]
def METHOD_NAME(patient_external_id: str): consultation = ( PatientConsultation.objects.filter(patient__external_id=patient_external_id) .order_by("-created_date") .first() ) if not consultation: return None summary_file = generate_and_upload_discharge_summary(consultation) return summary_file.read_signed_url(duration=2 * 24 * 60 * 60)
[ 567, 12641, 339, 3000, 274 ]
def METHOD_NAME(tool): global color_panel if color_panel is not None: print('remove color panel') color_panel.parent.remove(color_panel) color_panel = None return else: print('create color panel') color_panel = html.DIV(Class="color_panel") container <= color_panel color_panel.top = zwidth//10 color_panel.left = zwidth//10 color_panel.style.width = int(0.9*zwidth) color_panel.style.height = int(0.9*zwidth) color = getattr(panel, tool) print(color) for i, base_color in enumerate(['#ff0000', '#00ff00', '#0000ff']): div = html.DIV('&nbsp;', style=dict(position="absolute", left = int(0.05*zwidth), top = int((i+1)*0.2*zwidth), width = int(0.8*zwidth), backgroundColor = base_color, lineHeight = int(0.01*zwidth) ) ) div.num = i div.bind('click', lambda ev: pick_rgb(ev, tool)) color_panel <= div slider = html.DIV('&nbsp;', Class='slider') slider.width = zwidth//50 rgb = int(color[1:][2*i:2*i+2], 16) slider.left = int(div.width*rgb/256) div <= slider
[ 2981, 36 ]
def METHOD_NAME(self, **kwargs): aero_tstep = settings_utils.set_value_or_default(kwargs, 'aero_step', self.data.aero.timestep_info[-1]) structure_tstep = settings_utils.set_value_or_default(kwargs, 'structural_step', self.data.structure.timestep_info[-1]) convect_wake = settings_utils.set_value_or_default(kwargs, 'convect_wake', True) dt= settings_utils.set_value_or_default(kwargs, 'dt', self.settings['dt']) t = settings_utils.set_value_or_default(kwargs, 't', self.data.ts*dt) unsteady_contribution = settings_utils.set_value_or_default(kwargs, 'unsteady_contribution', False) # generate the wake because the solid shape might change self.data.aero.wake_shape_generator.generate({'zeta': aero_tstep.zeta, 'zeta_star': aero_tstep.zeta_star, 'gamma': aero_tstep.gamma, 'gamma_star': aero_tstep.gamma_star, 'dist_to_orig': aero_tstep.dist_to_orig}) return self.data
[ 22 ]
def METHOD_NAME(self, mock): mock.configure_mock(**self.linear_dipole_attrs) x = Moments(mock).calculate(masses=[1, 3], origin='mass') assert_almost_equal(x[0], [0.5, 0, 0])
[ 9, 21, 4158, 6840 ]
def METHOD_NAME(self): return """\ font Sets this legend group's title font. text Sets the title of the legend group. """
[ 1302, 1303 ]
def METHOD_NAME(self) -> Optional[Mapping[str, str]]: """ Resource tags. """ return pulumi.get(self, "tags")
[ 114 ]
def METHOD_NAME(): """ api: paddle.nn.Conv1DTranspose op version: 12 """ op = Net(padding=[1, 2]) op.eval() # net, name, ver_list, delta=1e-6, rtol=1e-5 obj = APIOnnx(op, 'nn_Conv1DTranspose', [9, 10, 11, 12, 13]) obj.set_input_data( "input_data", paddle.to_tensor( randtool("float", -1, 1, [3, 1, 10]).astype('float32'))) obj.run()
[ 9, 6465, 227, 3386, 4296, 746, 1569 ]
def METHOD_NAME(tls_client_auth): """Test different combinations for nsDS5ReplicaTransportInfo values :id: a3157108-cb98-43e9-ba16-8fb21a4a03e9 :setup: Two supplier replication, enabled TLS client auth :steps: 1. Set nsDS5ReplicaTransportInfoCheck: SSL or StartTLS or TLS 2. Restart the instance 3. Check that replication works 4. Set nsDS5ReplicaTransportInfoCheck: LDAPS back :expectedresults: 1. Success 2. Success 3. Replication works 4. Success """ m1 = tls_client_auth.ms['supplier1'] m2 = tls_client_auth.ms['supplier2'] repl = ReplicationManager(DEFAULT_SUFFIX) replica_m1 = Replicas(m1).get(DEFAULT_SUFFIX) replica_m2 = Replicas(m2).get(DEFAULT_SUFFIX) agmt_m1 = replica_m1.get_agreements().list()[0] agmt_m2 = replica_m2.get_agreements().list()[0] if ds_is_older('1.4.0.6'): check_list = (('TLS', False),) else: check_list = (('SSL', True), ('StartTLS', False), ('TLS', False)) for transport, secure_port in check_list: agmt_m1.replace_many(('nsDS5ReplicaTransportInfo', transport), ('nsDS5ReplicaPort', '{}'.format(m2.port if not secure_port else m2.sslport))) agmt_m2.replace_many(('nsDS5ReplicaTransportInfo', transport), ('nsDS5ReplicaPort', '{}'.format(m1.port if not secure_port else m1.sslport))) repl.test_replication_topology(tls_client_auth) if ds_is_older('1.4.0.6'): agmt_m1.replace_many(('nsDS5ReplicaTransportInfo', 'SSL'), ('nsDS5ReplicaPort', str(m2.sslport))) agmt_m2.replace_many(('nsDS5ReplicaTransportInfo', 'SSL'), ('nsDS5ReplicaPort', str(m1.sslport))) else: agmt_m1.replace_many(('nsDS5ReplicaTransportInfo', 'LDAPS'), ('nsDS5ReplicaPort', str(m2.sslport))) agmt_m2.replace_many(('nsDS5ReplicaTransportInfo', 'LDAPS'), ('nsDS5ReplicaPort', str(m1.sslport))) repl.test_replication_topology(tls_client_auth)
[ 9, 1247, 3232 ]
def METHOD_NAME(method_name): connector = sqlalchemy_psycopg2_connector.SQLAlchemyPsycopg2Connector() assert getattr(connector, method_name)._exceptions_wrapped is True
[ 9, 503, 504, 2350 ]
def METHOD_NAME( self, model_params, tmp_dir, source_vocab_size=20, target_vocab_size=20, num_gpus=0, batch_size=2, ): training_params = dict( model_params, batch_size=batch_size, optimizer_params=dict( learning_rate=0.1, ), max_gradient_norm=1.0, ) model_obj = Seq2SeqModelCaffe2( training_params, source_vocab_size, target_vocab_size, num_gpus, ) model_obj.initialize_from_scratch() checkpoint_path_prefix = os.path.join(tmp_dir, 'checkpoint') checkpoint_path = model_obj.save( checkpoint_path_prefix=checkpoint_path_prefix, current_step=0, ) return model_obj, checkpoint_path
[ 56, 7945, 578 ]
def METHOD_NAME(self, model, tokens, mask): logits, _ = model(src_tokens=self.get_masked_input(tokens, mask)) lprobs = F.log_softmax(logits, dim=-1, dtype=torch.float) scores = lprobs.gather(2, tokens.unsqueeze(-1)).squeeze(-1) mask = mask.type_as(scores) scores = (scores * mask).sum(dim=-1) / mask.sum(dim=-1) return scores
[ 19, 3833 ]
def METHOD_NAME(self): """Return inverse XX+YY gate (i.e. with the negative rotation angle and same phase angle).""" return XXPlusYYGate(-self.params[0], self.params[1])
[ 3581 ]
def METHOD_NAME(self): config = {key: value for key, value in self.options.items() if key in self.cfg.settings and value is not None} for key, value in config.items(): self.cfg.set(key.lower(), value)
[ 557, 200 ]
async def METHOD_NAME(self): assert "name" in dir(task_run)
[ 9, 156, 137, 309 ]
def METHOD_NAME(): ufos = to_ufos(TESTFONT) assert "@SmallCaps = [ A.sc" in ufos[0].features.text assert "pos A A.sc 100" in ufos[0].features.text assert "pos A A.sc 500" in ufos[1].features.text assert "pos A Sacute 150" in ufos[0].features.text assert "pos A Sacute 300" in ufos[1].features.text
[ 9, 1798, 24, 1798 ]
def METHOD_NAME(): op.drop_index( op.f("ix_privacyexperiencehistory_regions"), table_name="privacyexperiencehistory", ) op.drop_index( op.f("ix_privacyexperiencehistory_id"), table_name="privacyexperiencehistory" ) op.drop_table("privacyexperiencehistory") op.drop_index(op.f("ix_privacyexperience_regions"), table_name="privacyexperience") op.drop_index(op.f("ix_privacyexperience_id"), table_name="privacyexperience") op.drop_table("privacyexperience") op.drop_index( op.f("ix_privacyexperiencetemplate_regions"), table_name="privacyexperiencetemplate", ) op.drop_index( op.f("ix_privacyexperiencetemplate_id"), table_name="privacyexperiencetemplate" ) op.drop_table("privacyexperiencetemplate")
[ 1502 ]
def METHOD_NAME(src, band, fallback=None, override=None): """Figure out what value to use for nodata given a band and fallback/override settings :param src: Rasterio file """ if override is not None: return override band0 = band if isinstance(band, int) else band[0] nodata = src.nodatavals[band0 - 1] if nodata is None: return fallback return nodata
[ 1014, 5162 ]
def METHOD_NAME(self, ringNumber): """ Returns a peak model from it's ring number. If no peaks where found, returns `None`. :rtype: Union[PeakModel,None] """ for p in self.__peaks: if p.ringNumber() == ringNumber: return p return None
[ 4626, 280, 5402, 106 ]
def METHOD_NAME() -> Path: """Return path to root directory.""" return Path(__file__).parent.parent
[ 1563, 1190 ]
def METHOD_NAME(folder): files = [] #list of image filenames dirFiles = os.listdir(folder) #list of directory files dirFiles.sort() #good initial sort but doesnt sort numerically very well sorted(dirFiles) #sort numerically in ascending order for filename in dirFiles: #filter out all non jpgs if '.png' in filename: files.append(filename) return files
[ 19, 1427 ]
def METHOD_NAME(mocker, con): """ Check that the connection status is false when we receives an httperror """ mocker.patch.object(NetExplorerConnector, '_retrieve_token', side_effect=Exception) assert con.get_status().status is False
[ 9, 19, 452, 58, 168 ]
def METHOD_NAME(path): """Sometimes kernel paths start with /buildbot/src/partner-android/BRANCH/private/PROJ. We want to remove all of this.""" # Remove the stuff before 'private', since it is build-server dependent # and not part of the final URL. if '/private/' in path: path_parts = path.split('/') path_parts = path_parts[path_parts.index('private') + 2:] return '/'.join(path_parts) return path
[ 19, 1356, 1885, 157 ]