text
stringlengths
15
7.82k
ids
sequencelengths
1
7
def METHOD_NAME(history_item): """ Returns the date of a history item """ return history_item['date']
[ 19, 351, 153 ]
def METHOD_NAME(self, inline_query_result_mpeg4_gif): inst = inline_query_result_mpeg4_gif for attr in inst.__slots__: assert getattr(inst, attr, "err") != "err", f"got extra slot '{attr}'" assert len(mro_slots(inst)) == len(set(mro_slots(inst))), "duplicate slot"
[ 9, 3572, 3573 ]
def METHOD_NAME(request, year, pk): fair = get_object_or_404(Fair, year=year) user = get_object_or_404(User, pk=pk) METHOD_NAME = Profile.objects.filter(user=user).first() if not METHOD_NAME: METHOD_NAME = Profile.objects.create(user=user, no_dietary_restrictions=False) application = RecruitmentApplication.objects.filter( user=user, status="accepted", recruitment_period__fair=fair ).first() return TemplateResponse( request, "people/profile.html", { "fair": fair, "profile": METHOD_NAME, "role": application.delegated_role if application else None, "roles": RecruitmentApplication.objects.filter( user=user, status="accepted" ).order_by("recruitment_period__fair"), }, )
[ 337 ]
def METHOD_NAME(): # First four batches of labels, predictions: {TP, FP, TN, FN} # with a threshold of 0.5: # True, 1.0 -> TP; False, .75 -> FP; True, .25 -> FN # False, 0.0 -> TN; True, 1.0 -> TP; False, .75 -> FP # True, .25 -> FN; False, 0.0 -> TN; True, 1.0 -> TP # False, .75 -> FP; True, .25 -> FN; False, 0.0 -> TN return dataset_ops.Dataset.from_tensor_slices({ "labels": [True, False, True, False], "predictions": [1.0, 0.75, 0.25, 0.]}).repeat().batch( 3, drop_remainder=True)
[ 853, 126, 667 ]
def METHOD_NAME(self, build_configuration_file_path, integration_merge_configuration_file_path, disable_cache_on_retry=False): """ Loads configurations for external content to pull from source or cache, and attaches it to the Build Class """ self.build_configuration = yaml.safe_load(open(build_configuration_file_path)) if disable_cache_on_retry: self.cache_enabled = False else: self.cache_enabled = self.build_configuration[0].get('config', {}).get('cache_enabled', False) if not self.cache_enabled: self.integration_mutations = OrderedDict(yaml.safe_load(open(integration_merge_configuration_file_path)))
[ 557, 200 ]
def METHOD_NAME(): self.gui.lap.pause()
[ 3609, 2009 ]
def METHOD_NAME(self): log.info = self.old_log super(DirUtilTestCase, self).METHOD_NAME()
[ 531, 481 ]
def METHOD_NAME(self): nodes = [ Defs(), Suite('s'), Family('f'), Task('t')] sort_attr = ['event','meter','label','variable','all'] for attr in sort_attr: for node in nodes: node.sort_attributes(attr) node.sort_attributes(attr,True) node.sort_attributes(attr,True,['/path','/path2']) sort_attr = [AttrType.event, AttrType.meter, AttrType.label,AttrType.variable, AttrType.all ] for attr in sort_attr: for node in nodes: node.sort_attributes(attr) node.sort_attributes(attr,True) node.sort_attributes(attr,True,['/path','/path2'])
[ 9, 266, 177, 58 ]
def METHOD_NAME(filename, extract_dir, progress_filter=default_filter): """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir` Raises ``UnrecognizedFormat`` if `filename` is not a tarfile (as determined by ``tarfile.open()``). See ``unpack_archive()`` for an explanation of the `progress_filter` argument. """ try: tarobj = tarfile.open(filename) except tarfile.TarError as e: raise UnrecognizedFormat( "%s is not a compressed or uncompressed tar file" % (filename,) ) from e for member, final_dst in _iter_open_tar( tarobj, extract_dir, progress_filter, ): try: # XXX Ugh tarobj._extract_member(member, final_dst) except tarfile.ExtractError: # chown/chmod/mkfifo/mknode/makedev failed pass return True
[ 789, 6041 ]
def METHOD_NAME( in_dir: pathlib.Path, out_dir: pathlib.Path, *, transformer=configCallTransformer(),
[ 1112, 1537 ]
def METHOD_NAME(*args: Condition[TCallDetails]) -> Condition[TCallDetails]: """Returns a filter function that returns True if all filter functions assigned matches conditions. Args: args (function): a list of filter function Returns: A filter function that returns True if all filter functions assigned matches conditions. """ def filter_fn(metadata): return all(func(metadata) for func in args) return filter_fn
[ 75, 47 ]
def METHOD_NAME(self): self.assertEqual(getAdapter(self.portal, ILanguageSchema).display_flags, False) self.settings.display_flags = True self.assertEqual(getAdapter(self.portal, ILanguageSchema).display_flags, True)
[ 9, 19, 52, 1106 ]
def METHOD_NAME(): def func(z): return z**2 - 1 + 2j x0 = 2.0j ftol = 1e-4 sol = root(func, x0, tol=ftol, method='DF-SANE') assert_(sol.success) f0 = np.linalg.norm(func(x0)) fx = np.linalg.norm(func(sol.x)) assert_(fx <= ftol*f0)
[ 9, 2587 ]
def METHOD_NAME( docs_file: typing.TextIO ) -> Tuple[Union[str, None], List[NodeParseError]]: root_group = ParsedGroup("Miscellaneous", "Miscellaneous") docs: Dict[str, Union[ParsedGroup, Variable]] = {"file": root_group} getter_method_specs: List[GetterMethodSpec] = [] def visitor(node: Node): nonlocal docs if isinstance(node.details, Group): group = ParsedGroup(node.name, node.details.group_description) parent = typing.cast(ParsedGroup, docs[node.json_path]) parent.sub_groups.append(group) docs[f'{node.json_path}.{node.name}'] = group else: parent = typing.cast(ParsedGroup, docs[node.json_path]) getter_method_specs.append( GetterMethodSpec(node.name, node.details)) parent.variables[node.name] = node.details errors = visit(docs_file, visitor) if errors: return None, errors sections = render_sections(root_group) env = Environment( loader=PackageLoader("settings_manifest"), autoescape=select_autoescape()) template = env.get_template("SettingsManifest.java.jinja") return template.render( sections=sections, getter_method_specs=getter_method_specs, writeable_mode=Mode.ADMIN_WRITEABLE), []
[ 567, 1220 ]
def METHOD_NAME(): assert Ob('x') == Ob('x') and Ob('x') != Ob('y')
[ 9, 11254 ]
def METHOD_NAME(x, p): """Derivative of circumferential distance function. Args: x (float): first angle p (float): second angle Returns: float: The derivative. """ # pylint: disable=chained-comparison,misplaced-comparison-constant t = p - x if t < -0.5 or (0 < t and t < 0.5): return -1 if t > 0.5 or (-0.5 < t and t < 0): return 1 return 0
[ 7996, 1225, 1260 ]
def METHOD_NAME(options): from gevent import spawn, sleep return test(spawn, sleep, options)
[ 7608, -1 ]
def METHOD_NAME(self): pass
[ 72, 710 ]
def METHOD_NAME(self) -> Optional[float]: """ Number of DNS Services using the DNS zone. """ return pulumi.get(self, "dns_services")
[ 2455, 3186 ]
def METHOD_NAME(self, function_call) -> str: r""" Print function call. :param function_call: a function call :type function_call: ASTFunctionCall :return: the corresponding string representation """ result = function_call.get_name() symbols = { "convolve": r"\\text{convolve}" } for symbol_find, symbol_replace in symbols.items(): result = re.sub(r"(?<![a-zA-Z])(" + symbol_find + ")(?![a-zA-Z])", symbol_replace, result) # "whole word" match if ASTUtils.needs_arguments(function_call): n_args = len(function_call.get_args()) result += "(" + ", ".join(["%s" for _ in range(n_args)]) + ")" else: result += "()" return result
[ 38, 559, 128 ]
def METHOD_NAME(self): teardown_imports()
[ 531, 481 ]
def METHOD_NAME(): """ API Endpoint to update a "trainspace" item using a trainspace ID Params: - trainspace_id: Unique trainspace id - requestData: A dictionary containing the other table attributes to be updated, not including trainspace_id Results: - 200: Trainspace updated successfully - 400: Error in updating trainspace """ try: request_data = json.loads(request.data) trainspace_id = request_data["trainspace_id"] requestData = request_data["requestData"] success = updateTrainspaceData(trainspace_id, requestData) if success: return send_success({"message": "Trainspace updated", "success": success}) else: return send_error("Trainspace not updated") except Exception: print(traceback.format_exc()) return send_traceback_error()
[ 86, 16976, 365, 623, 1267 ]
def METHOD_NAME(): f = Namespace(Dummy=Namespace()) _add_framework_name(f) assert "Dummy" in f assert f.Dummy.name == "Dummy"
[ 9, 238, 1486, 156, 870, 130, 194 ]
def METHOD_NAME(self, value): self._format = value
[ 0, 275 ]
def METHOD_NAME(self) -> dict[Any, Any]: errors: dict[Any, Any] = {} for exception in self._exceptions: errors.update(exception.METHOD_NAME()) return errors
[ 1568, 1107 ]
def METHOD_NAME(given, expect, test_utf8=True): givens = [given] if not PY3 and test_utf8: givens.append(given.encode('utf8')) for given in givens: (res, count) = scanstring(given, 1, None, True) self.assertEqual(len(given), count) self.assertEqual(res, expect)
[ 638, 793 ]
def METHOD_NAME(oms_version, curr_oms_version, cpu_bits): print("--------------------------------------------------------------------------------") print("You are currently running OMS Verion {0}. There is a newer version\n"\ "available which may fix your issue (version {1}).".format(oms_version, curr_oms_version)) answer = get_input("Do you want to update? (y/n)", (lambda x : x.lower() in ['y','yes','n','no']),\ "Please type either 'y'/'yes' or 'n'/'no' to proceed.") # user does want to update if (answer.lower() in ['y', 'yes']): print("--------------------------------------------------------------------------------") print("Please head to the Github link below and click on 'Download Latest OMS Agent\n"\ "for Linux ({0})' in order to update to the newest version:".format(cpu_bits)) print("\n https://github.com/microsoft/OMS-Agent-for-Linux\n") print("And follow the instructions given here:") print("\n https://github.com/microsoft/OMS-Agent-for-Linux/blob/master/docs/"\ "OMS-Agent-for-Linux.md#upgrade-from-a-previous-release\n") return USER_EXIT # user doesn't want to update elif (answer.lower() in ['n', 'no']): print("Continuing on with troubleshooter...") print("--------------------------------------------------------------------------------") return NO_ERROR
[ 4949, 86, 2228, 281 ]
def METHOD_NAME(self) -> Optional[float]: """Get the well's diameter, if circular.""" return self._definition.METHOD_NAME
[ 9814 ]
def METHOD_NAME(): picture_dir = get_picture_dir() assert isinstance(picture_dir, Path) assert picture_dir.is_dir()
[ 9, 12491, 1190 ]
def METHOD_NAME(xcorr_type, filename, jcurrent_selection, event_type, station_id, juser_id): if filename is None or station_id is None or xcorr_type is None: return {} user_id = json.loads(juser_id) nurio = provider.get_file_handler(user_id, filename) fig = plotly.subplots.make_subplots(rows=1, cols=1) keys = nurio.get_header()[station_id].keys() if event_type == 'nu': if stnp.nu_xcorrelations not in keys: return {} xcorrs = nurio.get_header()[station_id][stnp.nu_xcorrelations] else: if stnp.cr_xcorrelations not in keys: return {} xcorrs = nurio.get_header()[station_id][stnp.cr_xcorrelations] if stnp.channels_max_amplitude in keys: current_selection = json.loads(jcurrent_selection) fig.append_trace(plotly.graph_objs.Scatter( x=nurio.get_header()[station_id][stnp.channels_max_amplitude] / units.mV, y=[xcorrs[i][xcorr_type] for i in range(len(xcorrs))], text=[str(x) for x in nurio.get_event_ids()], customdata=[x for x in range(nurio.get_n_events())], mode='markers', opacity=1, selectedpoints=current_selection ), 1, 1) else: return {} fig['layout'].update(default_layout) fig['layout']['xaxis'].update({'type': 'log', 'title': 'maximum amplitude [mV]'}) fig['layout']['yaxis'].update({'title': xcorr_type, 'range': [0, 1]}) fig['layout']['hovermode'] = 'closest' return fig
[ 1288, 2429, 4746 ]
def METHOD_NAME(sdfg: dace.SDFG) -> int: num = 0 for n, _ in sdfg.all_nodes_recursive(): if (isinstance(n, nodes.AccessNode) and isinstance(sdfg.arrays[n.data], data.View)): num += 1 return num
[ 29, 4632 ]
METHOD_NAME(self):
[ 14271, 1046, 132, 12805 ]
def METHOD_NAME(self): """The workspace slug""" return self.get_data("slug")
[ 1231 ]
def METHOD_NAME(self, interface_name, speed): """Set interface speed according to the auto negotiation mode. When auto negotiation mode is enabled, set the advertised speeds; otherwise, set the force speed. Args: interface_name (str): Interface name speed (str): SONiC style interface speed. E.g, 1G=1000, 10G=10000, 100G=100000. If the speed is None and auto negotiation mode is enabled, it sets the advertised speeds to all supported speeds. Returns: boolean: True if success. Usually, the method return False only if the operation is not supported or failed. """ return self.host.METHOD_NAME(interface_name, speed)
[ 0, 1942 ]
def METHOD_NAME(neuron, feature, ax, bins=15, normed=True, cumulative=False): """ Plot a histogram of the selected feature for the population of morphologies. Plots x-axis versus y-axis on a scatter|histogram|binned values plot. Parameters : morphologies : neuron list feature : str The feature of interest. bins : int Number of bins for the histogram. cumulative : bool Sets cumulative histogram on. ax : axes object the axes in which the plot is taking place """ feature_values = nm.get(feature, neuron) # generate histogram ax.hist(feature_values, bins=bins, cumulative=cumulative, normed=normed)
[ 6069 ]
def METHOD_NAME(self): # Make sure enabled is True when it's not included in the uploaded data if 'enabled' not in self.data: return True else: return self.cleaned_data['enabled']
[ 1356, 1111 ]
def METHOD_NAME(test_case, device): output = np.array( [ [ [[-0.0544118, -1.0509688], [-0.2696846, 0.4295622]], [[-1.2834904, -0.4838651], [2.0891891, 0.6236691]], ], [ [[-0.8555527, -0.3554582], [0.493019, -1.694826]], [[1.8035311, 0.4155158], [0.6362644, -0.4424936]], ], ], dtype=np.float32, ) x = flow.tensor(input_arr, dtype=flow.float32, device=flow.device(device)) m = flow.nn.LayerNorm(x.size()[1:]).to(device=flow.device(device)) y = m(x) test_case.assertTrue(np.allclose(y.numpy(), output, 1e-05, 1e-05))
[ 9, 8569 ]
def METHOD_NAME(self, attribute_name, request=None): """ Returns the URL of a page referenced by a foreign key (or other attribute) matching the name ``attribute_name``. If the field value is null, or links to something other than a ``Page`` object, an empty string is returned. The result is also cached per-object to facilitate fast repeat access. Raises an ``AttributeError`` if the object has no such field or attribute. """ if attribute_name in self._page_url_cache: return self._page_url_cache[attribute_name] if not hasattr(self, attribute_name): raise AttributeError( "'{}' object has no attribute '{}'".format( self.__class__.__name__, attribute_name ) ) page = getattr(self, attribute_name) if hasattr(page, "specific"): url = page.specific.get_url(getattr(self, "_request", None)) else: url = "" self._page_url_cache[attribute_name] = url return url
[ 19, 1174, 274 ]
def METHOD_NAME( name, srcs = [], data = [], deps = [], src_prefix = "", visibility = None, target_compatible_with = None, **kwargs): """Bazel rule to create a C++ capnproto library from capnp source files Args: name: library name srcs: list of files to compile data: additional files to provide to the compiler - data files and includes that need not to be compiled deps: other cc_capnp_library rules to depend on src_prefix: src_prefix for capnp compiler to the source root visibility: rule visibility target_compatible_with: target compatibility **kwargs: rest of the arguments to cc_library rule """ hdrs = [s + ".h" for s in srcs] srcs_cpp = [s + ".c++" for s in srcs] _capnp_gen( name = name + "_gen", srcs = srcs, deps = [s + "_gen" for s in deps], data = data, outs = hdrs + srcs_cpp, src_prefix = src_prefix, visibility = visibility, target_compatible_with = target_compatible_with, ) native.cc_library( name = name, srcs = srcs_cpp, hdrs = hdrs, deps = deps + ["@capnp-cpp//src/capnp:capnp_runtime"], visibility = visibility, target_compatible_with = target_compatible_with, **kwargs )
[ 1298, 9205, 3106 ]
def METHOD_NAME(train_dir, test_dir, output_train_dir, output_test_dir, output_vocab_text, output_vocab_tag): print("start constuct word dict") vocab_text = build_dict(2, 0, train_dir, test_dir) with io.open(output_vocab_text, "w", encoding='utf-8') as wf: wf.write(u"{}\n".format(str(len(vocab_text)))) vocab_tag = build_dict(0, 0, train_dir, test_dir) with io.open(output_vocab_tag, "w", encoding='utf-8') as wf: wf.write(u"{}\n".format(str(len(vocab_tag)))) print("construct word dict done\n") write_paddle(vocab_text, vocab_tag, train_dir, test_dir, output_train_dir, output_test_dir)
[ 8827 ]
def METHOD_NAME(self, name): """Default backend logout actions Send a log message, clear some cookies, increment the logout counter. """ self.log.info("User logged out: %s", name) self.clear_login_cookie() self.statsd.incr('logout')
[ 3127, 2431, 950 ]
def METHOD_NAME(*args: Any) -> None: with open(os.path.join(LOG_DIR, "log.txt"), "a+") as log_file: print(f"[ERROR] {' '.join(args)}", file=log_file)
[ 38, 168 ]
def METHOD_NAME(root_vo, identity_key, id_type, email, issuer, default=False, password=None, vo='def', *, session: "Session"): """ Adds a membership association between identity and the root account for given VO. :param root_vo: The VO whose root needs recovery :param identity_key: The identity key name. For example x509 DN, or a username. :param id_type: The type of the authentication (x509, gss, userpass, ssh, saml). :param email: The Email address associated with the identity. :param issuer: The issuer account. :param default: If True, the account should be used by default with the provided identity. :param password: Password if id_type is userpass. :param vo: the VO to act on. :param session: The database session in use. """ kwargs = {} root_vo = vo_core.map_vo(root_vo) if not has_permission(issuer=issuer, vo=vo, action='recover_vo_root_identity', kwargs=kwargs, session=session): raise exception.AccessDenied('Account %s can not recover root identity' % (issuer)) account = InternalAccount('root', vo=root_vo) return identity.add_account_identity(identity=identity_key, type_=IdentityType[id_type.upper()], default=default, email=email, account=account, password=password, session=session)
[ 2986, 3535, 1563, 2989 ]
def METHOD_NAME(name: Optional[str] = None, resource_group_name: Optional[str] = None, version: Optional[str] = None, workspace_name: Optional[str] = None, opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetEnvironmentSpecificationVersionResult: """ Azure Resource Manager resource envelope. :param str name: Container name. :param str resource_group_name: The name of the resource group. The name is case insensitive. :param str version: Version identifier. :param str workspace_name: Name of Azure Machine Learning workspace. """ __args__ = dict() __args__['name'] = name __args__['resourceGroupName'] = resource_group_name __args__['version'] = version __args__['workspaceName'] = workspace_name opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts) __ret__ = pulumi.runtime.invoke('azure-native:machinelearningservices/v20210301preview:getEnvironmentSpecificationVersion', __args__, opts=opts, typ=GetEnvironmentSpecificationVersionResult).value return AwaitableGetEnvironmentSpecificationVersionResult( id=pulumi.get(__ret__, 'id'), name=pulumi.get(__ret__, 'name'), properties=pulumi.get(__ret__, 'properties'), system_data=pulumi.get(__ret__, 'system_data'), type=pulumi.get(__ret__, 'type'))
[ 19, 1027, 3255, 281 ]
def METHOD_NAME(viewer): """Context manager checks that progress bar is added on construction""" assert not qt_viewer_has_pbar(viewer) yield assert qt_viewer_has_pbar(viewer)
[ 638, 11126, 4398, 24 ]
def METHOD_NAME(self, contents, overwrite=False, lease=None): if self._lease is not None: if not self._lease.is_valid(lease): raise Exception("Invalid lease!") if self.contents is not None or overwrite is True: if isinstance(contents, str): self.contents = contents.encode("utf8") elif isinstance(contents, io.BytesIO): self.contents = contents.read() elif isinstance(contents, io.StringIO): self.contents = contents.read().encode("utf8") elif isinstance(contents, bytes): self.contents = contents else: self.contents = contents
[ 172, 365 ]
def METHOD_NAME(self): pass
[ 72, 710 ]
def METHOD_NAME(train_batch_size, val_batch_size): data_transform = Compose([ToTensor(), Normalize((0.1307,), (0.3081,))]) train_loader = DataLoader( MNIST(download=True, root=".", transform=data_transform, train=True), batch_size=train_batch_size, shuffle=True ) val_loader = DataLoader( MNIST(download=False, root=".", transform=data_transform, train=False), batch_size=val_batch_size, shuffle=False ) return train_loader, val_loader
[ 19, 365, 196 ]
def METHOD_NAME(delta_rho, a, z, stationSpacing): respEW = 0 gravity_change = [] xmax = 10.0 npts = int(1 / stationSpacing) x = np.linspace(-xmax, xmax, num=npts) y = x.copy() X, Y = np.meshgrid(x, y) for each in range(npts): gravity_change.append(calculategravity(delta_rho, a, z, x[each])) return respEW, gravity_change, X, Y
[ -1 ]
def METHOD_NAME(sender, json=None, record=None, index=None, doc_type=None, arguments=None, **dummy_kwargs): """Signal sent before a record is indexed. :param json: The dumped record dictionary which can be modified. :param record: The record being indexed. :param index: The index in which the record will be indexed. :param doc_type: The doc_type for the record. """ if index.split('-')[0] == HoldingsSearch.Meta.index: library_pid = None organisation_pid = None holding = next(HoldingsSearch() .filter('term', pid=record.pid) .source('holdings_type').scan(), None) # get the number of items for ui paging item_search = ItemsSearch()[0:0]\ .filter('term', holding__pid=record.pid) if holding is not None and holding["holdings_type"] == 'serial': item_search = ItemsSearch()[0:0]\ .filter('term', holding__pid=record.pid)\ .filter('term', issue__status="received") # to compute the number of masked item item_search.aggs.bucket('public_items', 'terms', field='_masked') results = item_search.source(['organisation', 'library']).execute() # number of items json['items_count'] = results.hits.total.value # number of masked items number_of_masked_items = 0 for bucket in results.aggregations.public_items.buckets: if bucket.key_as_string == 'true': number_of_masked_items = bucket.doc_count break json['public_items_count'] = \ json['items_count'] - number_of_masked_items
[ 644, 14542, 365 ]
def METHOD_NAME(self, key, default_value=None): """ A convenience method to extract a configuration value. :type key: str :param key: a field to look for in the ``self.config`` field :type default_value: anything :param default_value: the default value to return if a value for the ``key`` is not available :return: a configuration value for the supplied ``key`` """ log.debug("Getting config key %s, with supplied default value: %s", key, default_value) value = default_value if isinstance(self.config, dict) and self.config.get(key): value = self.config.get(key, default_value) elif hasattr(self.config, key) and getattr(self.config, key): value = getattr(self.config, key) elif (self._config_parser.has_option(self.PROVIDER_ID, key) and self._config_parser.get(self.PROVIDER_ID, key)): value = self._config_parser.get(self.PROVIDER_ID, key) if isinstance(value, six.string_types) and not isinstance( value, six.text_type): return six.u(value) return value
[ 19, 200, 99 ]
def METHOD_NAME(marker): """ Checks if marker has proper suffix. """ for s in OperationFactory.__suffix: if marker.endswith(s): return raise NotSupportedOperationException( "Incorrect marker format {}, suffix is missing.".format(marker) )
[ 250, 1464, 275 ]
def METHOD_NAME(next_link=None): if not next_link: request = build_list_request( api_version=api_version, template_url=self.list.metadata["url"], headers=_headers, params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: # make call to next link with the client's api-version _parsed_next_link = urllib.parse.urlparse(next_link) _next_request_params = case_insensitive_dict( { key: [urllib.parse.quote(v) for v in value] for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() } ) _next_request_params["api-version"] = self._config.api_version request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) request.method = "GET" return request
[ 123, 377 ]
def METHOD_NAME(self): top = Topology.load(get_fn("benzene.mol2")) for atom in top.sites: assert atom.element.name in {"hydrogen", "carbon"}
[ 9, 17160, 9881, 1532 ]
def METHOD_NAME(cls): """Return backend enabled status by checking Setting Model""" try: get_setting(scope='module', scope_category='users', name='facebook_app_id') get_setting(scope='module', scope_category='users', name='facebook_api_secret') except Setting.DoesNotExist: return False return True
[ 1111 ]
def METHOD_NAME(self, instance): code_types = ",".join( [ code for provider in instance.equipment_provider_manager._get_providers().values() for code in provider.code_types ] ) if not code_types: abort(404, message='No code type exists into equipment provider') return "stop_point.has_code_type(" + code_types + ")"
[ 129, 527, 14420 ]
def METHOD_NAME(self) -> None: ...
[ 19, 2938, 144 ]
def METHOD_NAME(self, Source): source = Source() _, _, extent = source.single_tile(0, 50) assert extent == (0, 1, 50, 51)
[ 9, 3523 ]
def METHOD_NAME(self): group_url = "/buckets/beers/groups/users" group = {**MINIMALIST_GROUP} group["data"]["members"].append(self.principal) self.app.put_json("/buckets/beers", MINIMALIST_BUCKET, headers=self.headers) self.app.put_json(group_url, group, headers=self.headers) response = self.app.get("/", headers=self.headers).json["user"]["principals"] principals = ("system.Everyone", "system.Authenticated", group_url, self.principal) self.assertEqual(sorted(response), sorted(principals))
[ 9, 610, 21, 10775, 217, 218 ]
def METHOD_NAME(self): """Signal to pause the main loop and wait for verification, the refresh settings.""" self.pause_loop = True while not self.verify_pause_loop: time.sleep(0.1) self.logger.info("Refreshing conditional settings") self.initialize_variables() self.pause_loop = False self.verify_pause_loop = False return "Conditional settings successfully refreshed"
[ 1920, 817 ]
def METHOD_NAME(f, option, value): f.write(f'{option}="{bool_to_string(value)}"\n')
[ 863, 1335, 797 ]
def METHOD_NAME(args): master_handler = None master_addr = os.getenv(NodeEnv.DLROVER_MASTER_ADDR, "") use_dlrover_launch = False if args.standalone: master_handler, master_addr = _launch_dlrover_local_master() os.environ[NodeEnv.DLROVER_MASTER_ADDR] = master_addr if _check_dlrover_master_available(master_addr): GlobalMasterClient.MASTER_CLIENT = build_master_client(master_addr) use_dlrover_launch = True else: use_dlrover_launch = False if args.standalone and not use_dlrover_launch: args.rdzv_backend = "c10d" args.rdzv_endpoint = "localhost:29400" args.rdzv_id = str(uuid.uuid4()) logger.info( f"\n**************************************\n" f"Rendezvous info:\n" f"--rdzv-backend={args.rdzv_backend} " f"--rdzv-endpoint={args.rdzv_endpoint} " f"--rdzv-id={args.rdzv_id}\n" f"**************************************\n" ) config, cmd, cmd_args = config_from_args(args) setattr(config, "network_check", False) setattr(config, "node_unit", 1) if hasattr(args, "network_check"): config.network_check = args.network_check if hasattr(args, "node_unit"): config.rdzv_configs["node_unit"] = args.node_unit elastic_launch( config=config, entrypoint=cmd, use_dlrover_launch=use_dlrover_launch )(*cmd_args) if master_handler: master_handler.close()
[ 22 ]
def METHOD_NAME(self, offer: "Offer") -> bool: """Prevent MAEngines from trading their counterpart's offers""" return all(offer.id not in engine.forwarded_offers.keys() for engine in self.engines)
[ 8655, 6179 ]
def METHOD_NAME(self): parameters = { **self.serialize_url_param( "resourceGroupName", self.ctx.args.resource_group, required=True, ), **self.serialize_url_param( "restorePointCollectionName", self.ctx.args.restore_point_collection_name, required=True, ), **self.serialize_url_param( "restorePointName", self.ctx.args.restore_point_name, required=True, ), **self.serialize_url_param( "subscriptionId", self.ctx.subscription_id, required=True, ), } return parameters
[ 274, 386 ]
def METHOD_NAME(): np.random.seed(42) letters = string.ascii_letters categories = [''.join(random.choice(letters) for _ in range(5)) for _ in range(300)] train_data = np.concatenate([np.random.randn(1000, 1), np.random.choice(a=categories, size=(1000, 1))], axis=1) test_data = np.concatenate([np.random.randn(1000, 1), np.random.choice(a=categories, size=(1000, 1))], axis=1) df_train = pd.DataFrame(train_data, columns=['numeric_without_drift', 'categorical_with_many_categories']) df_test = pd.DataFrame(test_data, columns=df_train.columns) df_test['categorical_with_many_categories'] = np.random.choice(a=categories[20:280], size=(1000, 1)) df_train = df_train.astype({'numeric_without_drift': 'float'}) df_test = df_test.astype({'numeric_without_drift': 'float'}) label = np.random.randint(0, 2, size=(df_train.shape[0],)) df_train['target'] = label train_ds = Dataset(df_train, cat_features=['categorical_with_many_categories'], label='target') label = np.random.randint(0, 2, size=(df_test.shape[0],)) df_test['target'] = label test_ds = Dataset(df_test, cat_features=['categorical_with_many_categories'], label='target') check = MultivariateDrift() # Act result = check.run(train_ds, test_ds) # Assert # we only care that it runs assert_that(result.value['domain_classifier_auc'])
[ 9, 3217, -1, 2065, 623, 105 ]
def METHOD_NAME(self): assert longitude_add(0.0, 179.0, 1000000) == constants.MAX_LON
[ 9, 610, 232, 4877 ]
def METHOD_NAME(self): from ckanext.hdx_users.helpers.permissions import Permissions showcase_dict = generate_test_showcase(SYSADMIN, 'dataviz_showcase', True) showcase_dict['notes'] = 'Modified' showcase_dict['image_url'] = None user_with_permission = factories.User(name='user_with_permission', email='[email protected]') Permissions(user_with_permission['id']).set_permissions( {'user': SYSADMIN}, [Permissions.PERMISSION_MANAGE_CAROUSEL] ) context = {'model': model, 'session': model.Session, 'user': 'user_with_permission'} showcase_dict = _get_action('ckanext_showcase_update')(context, showcase_dict) assert showcase_dict['in_dataviz_gallery'] is True, \ 'A user with carousel permission should be allowed to update a dataviz showcase' del showcase_dict['dataviz_label'] try: _get_action('ckanext_showcase_update')(context, showcase_dict) assert False except ValidationError as e: assert True, 'Validation should fail for dataviz showcases without "dataviz_label"'
[ 9, 86, -1, 41, 13129, 204 ]
def METHOD_NAME(self): self._package_path() return self.development_path
[ 19, 10305, 157 ]
def METHOD_NAME( scope, x, num_groups=32, group_size=None, epsilon=1e-6, dtype=jnp.float32, bias=True, scale=True, bias_init=initializers.zeros_init(), scale_init=initializers.ones_init(),
[ 846, 387 ]
def METHOD_NAME( except_cls, msg, check_context=True ): return _expect_raises(except_cls, msg=msg, check_context=check_context)
[ 1297, 45, 277, 41, 3736, 198 ]
def METHOD_NAME(entry): compiler_name = translated_compiler_name(entry["name"]) paths = entry["executables"] # to instantiate a compiler class we may need a concrete version: version = "={}".format(entry["version"]) arch = entry["arch"] operating_system = arch["os"] target = arch["target"] compiler_cls = spack.compilers.class_for_compiler_name(compiler_name) spec = spack.spec.CompilerSpec(compiler_cls.name, version) paths = [paths.get(x, None) for x in ("cc", "cxx", "f77", "fc")] return compiler_cls(spec, operating_system, target, paths)
[ 1436, 280, 475 ]
def METHOD_NAME(self): self.update_editor()
[ 414, 274, 1180 ]
def METHOD_NAME(self): """Create a community with no platform, just a constitution community""" constitution_community = ConstitutionCommunity.objects.create(community_name="my community") community = constitution_community.community self.assertEqual(community.community_name, "my community") self.assertEqual(community.constitution_community, constitution_community) self.assertEqual(community.get_platform_communities().count(), 0)
[ 9, 654, 2773, 6649 ]
def METHOD_NAME(self, apt_mock): modules = {'apt': apt_mock} self.module_patcher = patch.dict('sys.modules', modules) self.module_patcher.start() self.addCleanup(self.module_patcher.stop)
[ 248, 10991 ]
def METHOD_NAME(self): return super().METHOD_NAME()
[ 73 ]
async def METHOD_NAME(project, node): await delete("/projects/{}/nodes/{}".format(project["project_id"], node["node_id"]))
[ 34, 1716 ]
def METHOD_NAME(self, p: Any, cycle: bool): """iPython (Jupyter) pretty print.""" if cycle: # There should never be a cycle. This is just in case. p.text('StateVectorTrialResult(...)') else: p.text(str(self))
[ 92, 885 ]
def METHOD_NAME(self, command_args): super().METHOD_NAME(command_args) self._execute_operations() return self._output()
[ 1519 ]
def METHOD_NAME( staff_api_client, gift_card, permission_manage_gift_card, permission_manage_users, permission_manage_apps, ): # given gift_card.is_active = False gift_card.save(update_fields=["is_active"]) assert not gift_card.is_active variables = {"id": graphene.Node.to_global_id("GiftCard", gift_card.id)} # when response = staff_api_client.post_graphql( ACTIVATE_GIFT_CARD_MUTATION, variables, permissions=[ permission_manage_gift_card, permission_manage_users, permission_manage_apps, ], ) # then content = get_graphql_content(response) data = content["data"]["giftCardActivate"]["giftCard"] assert data["isActive"] events = data["events"] assert len(events) == 1 assert events[0]["type"] == GiftCardEvents.ACTIVATED.upper() assert events[0]["user"]["email"] == staff_api_client.user.email assert events[0]["app"] is None
[ 9, 1284, 4755, 5427, 604, 1045 ]
def METHOD_NAME(self, x, h_prev, x_mask, h_mask): s0 = self._compute_init_state(x, h_prev, x_mask, h_mask) states = [s0] for i, (name, pred) in enumerate(self.genotype.recurrent): s_prev = states[pred] if self.training: ch = (s_prev * h_mask).mm(self._Ws[i]) else: ch = s_prev.mm(self._Ws[i]) c, h = torch.split(ch, self.nhid, dim=-1) c = c.sigmoid() fn = self._get_activation(name) h = fn(h) s = s_prev + c * (h - s_prev) states += [s] output = torch.mean(torch.stack([states[i] for i in self.genotype.concat], -1), -1) return output
[ 118 ]
def METHOD_NAME(identity, force_restart): """Set given identity (one of: 0, 1, 2) Might require stopping other smart card services to connect directly to the device over CCID interface. These will be restarted after operation, if it is required. This could be replaced with: gpg-connect-agent "SCD APDU 00 85 00 0<IDENTITY>" """ if not identity.isdigit(): local_critical("identity number must be a digit") identity = int(identity) if identity < 0 or identity > 2: local_critical("identity must be 0, 1 or 2") local_print(f"Setting identity to {identity}") for x in range(3): try: gnuk = get_gnuk_device() with gnuk.release_on_exit() as gnuk: gnuk.cmd_select_openpgp() try: gnuk.cmd_set_identity(identity) break except USBError: local_print(f"Reset done - now active identity: {identity}") break except OnlyBusyICCError: local_print( "Device is occupied by some other service and cannot be connected to. Identity not changed." ) break except ValueError as e: if "No ICC present" in str(e): local_print( "Device is occupied by some other service and cannot be connected to. Identity not changed." ) else: local_critical(e) except Exception as e: local_critical(e)
[ 0, 2989 ]
def METHOD_NAME( question_freeze_after_option_after_deadline, ): assert question_freeze_after_option_after_deadline.read_only is True
[ 9, 1745, 1042, 3125, 1887, 1335, 1887 ]
def METHOD_NAME(session): stream = FileStream(session, "/path/to/file") assert stream.__json__() == { "type": "file", "path": "/path/to/file", }
[ 9, 171, 919, 157 ]
def METHOD_NAME(self): def foo(a): return {"a": a * 2} d = {"a": 2} dres = adaptor(foo, {"a": "a"})(d) self.assertEqual(dres["a"], 4) d = {"b": 2} dres = adaptor(foo, {"a": "b"}, {"b": "a"})(d) self.assertEqual(dres["b"], 4)
[ 9, 553, 1737 ]
def METHOD_NAME(inpath, outpath, **args): try: template = Template(filename=inpath) rendered = template.render(**args) rendered = re.sub(r"\r\n", r"\n", rendered) with open(outpath, 'w') as fout: fout.write(rendered) makoFileList.append(outpath) return len(rendered.splitlines()) except: traceback = RichTraceback() #for (filename, lineno, function, line) in traceback.traceback: # print("%s(%s) : error in %s" % (filename, lineno, function)) # print(line, "\n") line = "%s: %s" % (str(traceback.error.__class__.__name__), traceback.error) makoErrorList.append(line) print(line) raise
[ 12900, 77 ]
def METHOD_NAME(self) -> None: df = self.df_with_index self.data.add_df_index(df, column_name="IndexColumnName") self.data.add_df(df) self.assertEqual( self.data_with_index, self.data.build(), )
[ 9, 724 ]
def METHOD_NAME(conf_file_name, bdf): filestring = "filename=trtype=PCIe traddr=0000." + bdf.replace(":", ".") + " ns=1" with open(conf_file_name, "a") as conf_file: conf_file.write(filestring + "\n")
[ 238, 1147, 24, 2546 ]
def METHOD_NAME(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] = None) -> 'Vendor': """ Get an existing Vendor resource's state with the given name, id, and optional extra properties used to qualify the lookup. :param str resource_name: The unique name of the resulting resource. :param pulumi.Input[str] id: The unique provider ID of the resource to lookup. :param pulumi.ResourceOptions opts: Options for the resource. """ opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = VendorArgs.__new__(VendorArgs) __props__.__dict__["name"] = None __props__.__dict__["provisioning_state"] = None __props__.__dict__["skus"] = None __props__.__dict__["system_data"] = None __props__.__dict__["type"] = None return Vendor(resource_name, opts=opts, __props__=__props__)
[ 19 ]
f METHOD_NAME(self):
[ 1668, 3551, 126, 41, 246, 7728, 4244 ]
def METHOD_NAME(self): """Test that executing a state measurement with shots raises a warning.""" class MyMeasurement(StateMeasurement): def process_state(self, state, wire_order): return qml.math.sum(state) dev = qml.device("default.qubit.legacy", wires=2, shots=1000) @qml.qnode(dev) def circuit(): return MyMeasurement() with pytest.warns( UserWarning, match="Requested measurement MyMeasurement with finite shots", ): circuit()
[ 9, 734, 479, 41, 5968 ]
def METHOD_NAME(client): with pytest.raises(OverflowError): deserialize_iso(client.datetime.get_local_negative_offset_uppercase_max_date_time())
[ 9, 19, 125, 2927, 1540, 8391, 232 ]
def METHOD_NAME(self): return "tap_tester_mongodb_oplog_aged_out"
[ 156 ]
def METHOD_NAME(self): """ Cleanup data related to quest. """ del self.quester.db.test_quest_counter
[ 950 ]
f METHOD_NAME(self):
[ 9, 661 ]
def METHOD_NAME(self, ranges=None): # Render table axes = self.handles['axis'] element = self.hmap.last table = self._render_table(element, axes) self.handles['artist'] = table # Add to axes axes.set_axis_off() axes.add_table(table) return self._finalize_axis(self.keys[-1], element=element)
[ 15, 1288 ]
def METHOD_NAME(fake_config_dir): return fake_config_dir / "default.cfg"
[ 235, 171 ]
def METHOD_NAME(evm: Evm) -> None: """ Signed integer division of the top two elements of the stack. Pushes the result back on the stack. Parameters ---------- evm : The current EVM frame. """ # STACK dividend = pop(evm.stack).to_signed() divisor = pop(evm.stack).to_signed() # GAS charge_gas(evm, GAS_LOW) # OPERATION if divisor == 0: quotient = 0 elif dividend == -U255_CEIL_VALUE and divisor == -1: quotient = -U255_CEIL_VALUE else: sign = get_sign(dividend * divisor) quotient = sign * (abs(dividend) // abs(divisor)) push(evm.stack, U256.from_signed(quotient)) # PROGRAM COUNTER evm.pc += 1
[ 13440 ]
def METHOD_NAME(self): """Returns global batch size.""" return self.batch_size * self._num_devices
[ 285, 2277, 1318 ]
def METHOD_NAME(background_job, execute_fn): """ Common way of `BackgroundTask.submit` """ background_job.save() execute_fn.schedule(args=(background_job.id,), delay=10)
[ 579, 604, 2272, 202 ]
def METHOD_NAME(m): return "".join(choice([str.upper, str.lower])(c) for c in m[1]), None
[ 214, -1 ]