code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def setProxyAddress (self, ip, port): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if not len(ip): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> self.proxyAddress = (ip,port) <NEW_LINE> ret = socket.gethostbyname( str(ip) ) <NEW_LINE> if ret != ip: <NEW_LINE> <INDENT> self.proxyAddress = (ret, port) <NEW_LINE> self.proxyDstHostname = ip <NEW_LINE> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> self.error( e ) <NEW_LINE> self.onResolveHostnameProxyFailed(err = e ) <NEW_LINE> return None <NEW_LINE> <DEDENT> return self.proxyAddress
Set the destination server address @param ip: destination ip address @type ip: string @param port: destination tcp port @type port: Integer @return: @rtype:
625941b21b99ca400220a842
def read_lines(filename="", nb_lines=0): <NEW_LINE> <INDENT> with open(filename, encoding="utf-8") as fp: <NEW_LINE> <INDENT> if nb_lines <= 0: <NEW_LINE> <INDENT> print(fp.read().rstrip()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for line in range(nb_lines): <NEW_LINE> <INDENT> print(fp.readline().rstrip())
Reads n lines of a text file
625941b232920d7e50b27f65
def __enter__(self): <NEW_LINE> <INDENT> return self
Setup Method.
625941b2d10714528d5ffa77
def parse(self, json: str) -> Command: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> json = loads(json) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> raise ParseError(['Invalid JSON']) <NEW_LINE> <DEDENT> if not isinstance(json, dict): <NEW_LINE> <INDENT> raise ParseError(['Invalid JSON']) <NEW_LINE> <DEDENT> errors = [e.message for e in self.validator.iter_errors(json)] <NEW_LINE> if len(errors) > 0: <NEW_LINE> <INDENT> raise ParseError(errors) <NEW_LINE> <DEDENT> cmd_name = json['name'] <NEW_LINE> cmd: Command = self.classes.get(cmd_name)() <NEW_LINE> if 'args' in json.keys(): <NEW_LINE> <INDENT> cmd.set_arguments(json['args']) <NEW_LINE> <DEDENT> return cmd
Parse JSON stringified representation of a command :param json: JSON stringified representation of the command :return: the corresponding command instance represented in the stringified JSON :raises ParseError: in case of parsing an invalid command
625941b26aa9bd52df036b33
def __init__(self, *args): <NEW_LINE> <INDENT> this = _gnuradio_core_gengen.new_gr_integrate_ii_sptr(*args) <NEW_LINE> try: self.this.append(this) <NEW_LINE> except: self.this = this
__init__(self) -> gr_integrate_ii_sptr __init__(self, p) -> gr_integrate_ii_sptr
625941b27c178a314d6ef1e9
def guid(): <NEW_LINE> <INDENT> import uuid <NEW_LINE> return uuid.uuid4().hex
生成一个uuid的字符串
625941b2187af65679ca4eb7
def test_validate_credentials_valid(self): <NEW_LINE> <INDENT> self.request.user = self.user <NEW_LINE> result = self.basic_auth_backend.validate_credentials( self.request, username='testuser', password='testpassword') <NEW_LINE> self.assertEqual(result, (True, None, None))
Testing Basic Auth validate_credentials with valid credentials
625941b2046cf37aa974cadd
def unique(list): <NEW_LINE> <INDENT> unique = [] <NEW_LINE> [unique.append(x) for x in list if x not in unique] <NEW_LINE> return unique
Returns a copy of the list without duplicates.
625941b215fb5d323cde08a0
def scale_point_to_CCF(self, point): <NEW_LINE> <INDENT> vxsize = self.atlas._info[-1]['vxsize'] * 1e6 <NEW_LINE> p_to_ccf = ((self.atlas_view.atlas.shape[1] - point[0]) * vxsize, (self.atlas_view.atlas.shape[2] - point[1]) * vxsize, (self.atlas_view.atlas.shape[0] - point[2]) * vxsize) <NEW_LINE> return p_to_ccf
Returns a tuple (x, y, z) scaled from Item coordinates to CCF coordinates Point is a tuple with values x, y, z (ordered)
625941b2f548e778e58cd312
def builder_inited(app): <NEW_LINE> <INDENT> if app.builder.name in ['html', 'readthedocs']: <NEW_LINE> <INDENT> app.add_node(VariationNode, html=(visit_variation_node, depart_variation_node)) <NEW_LINE> app.add_directive('only', OnlyVariationDirective, override=True)
This is the earliest event hook possible, hopefully adding stuff here doesn't screw anything up. We only want our stuff to run when we're using the regular old HTML builder.
625941b297e22403b379cd32
def __init__(self, d): <NEW_LINE> <INDENT> if not isinstance(d, Distribution): <NEW_LINE> <INDENT> raise ditException('`d` must be a Distribution instance.') <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> d.outcomes[0] + '' <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> is_int = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> is_int = False <NEW_LINE> <DEDENT> self.is_int = is_int <NEW_LINE> self.L = d.outcome_length() <NEW_LINE> self.ctor = d._outcome_ctor <NEW_LINE> self.outcome_class = d._outcome_class
Initialize the random variable function creator. Parameters ---------- d : Distribution The distribution used to create the new random variables. Examples -------- >>> d = dit.Distribution(['00', '01', '10', '11'], [1/4]*4) >>> bf = dit.RVFunctions(d) >>> d = dit.insert_rvf(d, bf.xor([0,1])) >>> d = dit.insert_rvf(d, bf.xor([1,2])) >>> d.outcomes ('0000', '0110', '1011', '1101')
625941b2796e427e537b0353
def unparsedEntityDecl(self, name, publicId, systemId, ndata): <NEW_LINE> <INDENT> pass
Handle an unparsed entity declaration event.
625941b245492302aab5e052
def __init__(self, config_src: Union[TextIOWrapper, StringIO, str], config_schema_path: str = 'yml_schema.json'): <NEW_LINE> <INDENT> configuration_schema = self.load_configuration_schema(config_schema_path=config_schema_path) <NEW_LINE> self.config, self.config_path = self.load_yml(config_src=config_src, env_tag=self.env_variable_tag, env_pattern=self.env_variable_pattern) <NEW_LINE> if 'test_mode' in self.config.keys(): <NEW_LINE> <INDENT> if isinstance(self.config['test_mode'], str): <NEW_LINE> <INDENT> self.config['test_mode'] = False if self.config['test_mode'].lower() == 'false' else True <NEW_LINE> <DEDENT> self.test_mode = self.config['test_mode'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.test_mode = True <NEW_LINE> <DEDENT> if 'check_interval' in self.config.keys(): <NEW_LINE> <INDENT> self.config['check_interval'] = int(self.config['check_interval']) <NEW_LINE> self.check_interval = self.config['check_interval'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.check_interval = 120 <NEW_LINE> <DEDENT> if 'crawl_interval' in self.config.keys(): <NEW_LINE> <INDENT> self.config['crawl_interval'] = int(self.config['crawl_interval']) <NEW_LINE> self.crawl_interval = self.config['crawl_interval'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.crawl_interval = 15 <NEW_LINE> <DEDENT> if 'anchor_class_name' in self.config.keys(): <NEW_LINE> <INDENT> self.anchor_class_name = self.config['anchor_class_name'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.anchor_class_name = "highlight" <NEW_LINE> <DEDENT> logger.debug("Loaded config: %s" % self.config) <NEW_LINE> validate_json_schema(self.config, configuration_schema) <NEW_LINE> self.lookup_url = self.config['lookup_url'] <NEW_LINE> self.tag = self.config['tag'] <NEW_LINE> all_config_attributes = ('datastore', 'cloudstore', 'email_app') <NEW_LINE> for config_attribute in all_config_attributes: <NEW_LINE> <INDENT> if config_attribute in self.config.keys(): <NEW_LINE> <INDENT> setattr(self, config_attribute, self.config[config_attribute]) <NEW_LINE> self.config_attributes.append(config_attribute) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> setattr(self, config_attribute, None)
Tha basic constructor. Creates a new instance of a MySQL Datastore using the specified credentials :param config_src: :param config_schema_path:
625941b2dc8b845886cb52c6
def validate_redirect_uri(self, client_id, redirect_uri, request, *args, **kwargs): <NEW_LINE> <INDENT> request.client = request.client = self._clientgetter(client_id) <NEW_LINE> client = request.client <NEW_LINE> if hasattr(client, 'validate_redirect_uri'): <NEW_LINE> <INDENT> return client.validate_redirect_uri(redirect_uri) <NEW_LINE> <DEDENT> return redirect_uri in client.redirect_uris
Ensure client is authorized to redirect to the redirect_uri. This method is used in the authorization code grant flow and also in implicit grant flow. It will detect if redirect_uri in client's redirect_uris strictly, you can add a `validate_redirect_uri` function on grant for a customized validation.
625941b2d58c6744b42579fa
def fetch(self): <NEW_LINE> <INDENT> proxies = [] <NEW_LINE> for _ in range(10): <NEW_LINE> <INDENT> url = "https://www.proxyscan.io/api/proxy?last_check=9800&uptime=50&limit=20&_t=" + str(time.time()) <NEW_LINE> resp = requests.get(url).json() <NEW_LINE> for data in resp: <NEW_LINE> <INDENT> protocol = str.lower(data['Type'][0]) <NEW_LINE> proxies.append((protocol, data['Ip'], data['Port'])) <NEW_LINE> <DEDENT> <DEDENT> return list(set(proxies))
执行一次爬取,返回一个数组,每个元素是(protocol, ip, port),portocol是协议名称,目前主要为http 返回示例:[('http', '127.0.0.1', 8080), ('http', '127.0.0.1', 1234)]
625941b2de87d2750b85fb1f
def extract_value(self, data): <NEW_LINE> <INDENT> errors = [] <NEW_LINE> if 'id' not in data: <NEW_LINE> <INDENT> errors.append('Must have an `id` field') <NEW_LINE> <DEDENT> if 'type' not in data: <NEW_LINE> <INDENT> errors.append('Must have a `type` field') <NEW_LINE> <DEDENT> elif data['type'] != self.type_: <NEW_LINE> <INDENT> errors.append('Invalid `type` specified') <NEW_LINE> <DEDENT> if errors: <NEW_LINE> <INDENT> raise ValidationError(errors) <NEW_LINE> <DEDENT> if 'attributes' in data and self.__schema: <NEW_LINE> <INDENT> return self.schema.load({'data': data}).data <NEW_LINE> <DEDENT> return data.get('id')
Extract the id key and validate the request structure.
625941b230bbd722463cbb5c
def get_test_data(statement): <NEW_LINE> <INDENT> match = re_stmt.match(statement.text) <NEW_LINE> assert match <NEW_LINE> tablename = match.group("tablename") <NEW_LINE> result = Mock(spec_set=("fetchall", "close")) <NEW_LINE> try: <NEW_LINE> <INDENT> data = test_data[tablename].pop(0) <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> data = [] <NEW_LINE> <DEDENT> result.fetchall.return_value = data <NEW_LINE> return result
Validate the SQL call and return test data. The tablename is extracted from the SQL statement. On the first call, the test data, if any, is returned. On the second call, an empty list is returned.
625941b2f8510a7c17cf9498
def get_base_packing_price(self, with_properties=True, amount=1): <NEW_LINE> <INDENT> return self.get_price(with_properties, amount) * self._calc_packing_amount()
Returns the base packing price of the product. **Parameters:** with_properties If the instance is a configurable product and with_properties is True the prices of the default properties are added to the price. amount The amount of products for which the price is calculated.
625941b28e71fb1e9831d547
def get_language(): <NEW_LINE> <INDENT> if not config.LANGUAGE_CALLBACK: <NEW_LINE> <INDENT> raise EnvironmentError("Language callback is not configured!") <NEW_LINE> <DEDENT> return config.LANGUAGE_CALLBACK()
Get current language.
625941b2f9cc0f698b140399
def _row_keys_filter_helper(row_keys): <NEW_LINE> <INDENT> filters = [] <NEW_LINE> for row_key in row_keys: <NEW_LINE> <INDENT> filters.append(RowKeyRegexFilter(row_key)) <NEW_LINE> <DEDENT> num_filters = len(filters) <NEW_LINE> if num_filters == 0: <NEW_LINE> <INDENT> raise ValueError('Must have at least one filter.') <NEW_LINE> <DEDENT> elif num_filters == 1: <NEW_LINE> <INDENT> return filters[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return RowFilterUnion(filters=filters)
Creates a union filter for a list of rows. :type row_keys: list :param row_keys: Iterable containing row keys (as strings). :rtype: :class:`~google.cloud.bigtable.row.RowFilter` :returns: The union filter created containing all of the row keys. :raises: :class:`ValueError <exceptions.ValueError>` if there are no filters to union.
625941b27047854f462a11a6
def example2(): <NEW_LINE> <INDENT> with open("example1.txt") as f: <NEW_LINE> <INDENT> data = json.load(f) <NEW_LINE> print(data["id"], data["name"], data["age"], data["wage"], data["active"]) <NEW_LINE> <DEDENT> with open("example2.txt") as f: <NEW_LINE> <INDENT> data = json.load(f) <NEW_LINE> for row in data: <NEW_LINE> <INDENT> print(row["id"], row["name"], row["age"])
json.load()
625941b28c3a873295158150
def list(self, path, protocol): <NEW_LINE> <INDENT> if path is None: <NEW_LINE> <INDENT> path = "" <NEW_LINE> <DEDENT> return self.receiveFromConnection(["LIST " + self.escapePath(path)], protocol)
Retrieve a file listing into the given protocol instance. This method issues the 'LIST' FTP command. @param path: path to get a file listing for. @param protocol: a L{Protocol} instance, probably a L{FTPFileListProtocol} instance. It can cope with most common file listing formats. @return: L{Deferred}
625941b2099cdd3c635f09f0
def test_mhs(self): <NEW_LINE> <INDENT> geom = mhs(1) <NEW_LINE> expected_fovs = np.array([ [[0.86, 0.84, 0.82, 0.8, 0.79, 0.77, 0.75, 0.73, 0.71, 0.69, 0.67, 0.65, 0.63, 0.61, 0.59, 0.57, 0.55, 0.53, 0.51, 0.49, 0.48, 0.46, 0.44, 0.42, 0.4, 0.38, 0.36, 0.34, 0.32, 0.3, 0.28, 0.26, 0.24, 0.22, 0.2, 0.18, 0.16, 0.15, 0.13, 0.11, 0.09, 0.07, 0.05, 0.03, 0.01, -0.01, -0.03, -0.05, -0.07, -0.09, -0.11, -0.13, -0.15, -0.16, -0.18, -0.2, -0.22, -0.24, -0.26, -0.28, -0.3, -0.32, -0.34, -0.36, -0.38, -0.4, -0.42, -0.44, -0.46, -0.48, -0.49, -0.51, -0.53, -0.55, -0.57, -0.59, -0.61, -0.63, -0.65, -0.67, -0.69, -0.71, -0.73, -0.75, -0.77, -0.79, -0.8, -0.82, -0.84, -0.86]], np.zeros((1, 90))], dtype=np.float64) <NEW_LINE> self.assertTrue(np.allclose(geom.fovs, expected_fovs, rtol=1e-2, atol=1e-2))
Test the definition of the mhs instrument
625941b2d268445f265b4c07
def pos_tag_sentence(sentence,tokenizer,neg_suffix_appender=None): <NEW_LINE> <INDENT> tokens = tokenizer.tokenize(sentence) <NEW_LINE> if neg_suffix_appender is not None: <NEW_LINE> <INDENT> tokens = neg_suffix_appender.add_negation_suffixes(tokens) <NEW_LINE> <DEDENT> return clean_tokens_tags(nltk.pos_tag(tokens))
INPUT: a sentence string OUTPUT: list of tuples Given a tokenized sentence, return a list of tuples of form (token, POS) where POS is the part of speech of token
625941b2925a0f43d2549c05
def put(self): <NEW_LINE> <INDENT> args = requester_parser.parse_args() <NEW_LINE> email = args['email'] <NEW_LINE> password = args['password'] <NEW_LINE> register_user(email=email, password=password) <NEW_LINE> requesterDocument = Requester.objects.get_or_404(email=email) <NEW_LINE> if requesterDocument: <NEW_LINE> <INDENT> return {'requester_id' : str(requesterDocument.id)} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return requesterDocument
Create a new requester. :param str email: requester email address :param str password: requester password :returns: JSON with requester_id string to use in future requests. **Example response:** .. code-block:: json { "requester_id": "12345" }
625941b2ab23a570cc24ff1a
def exploreIsomer(self, isomer, reactionModel, database): <NEW_LINE> <INDENT> if isomer in self.explored: <NEW_LINE> <INDENT> logging.warning('Already explored isomer {0} in pressure-dependent network #{1:d}'.format(isomer, self.index)) <NEW_LINE> return [] <NEW_LINE> <DEDENT> assert [isomer] in self.products <NEW_LINE> assert isomer not in self.isomers <NEW_LINE> assert isomer not in self.source <NEW_LINE> logging.info('Exploring isomer {0} in pressure-dependent network #{1:d}'.format(isomer, self.index)) <NEW_LINE> self.explored.append(isomer) <NEW_LINE> self.isomers.append(isomer) <NEW_LINE> self.products.remove([isomer]) <NEW_LINE> newReactionList = reactionModel.react(database, isomer) <NEW_LINE> return newReactionList
Explore a previously-unexplored unimolecular `isomer` in this partial network using the provided core-edge reaction model `reactionModel`, returning the new reactions and new species.
625941b2627d3e7fe0d68be1
def revert(self, pool_id, *args, **kwargs): <NEW_LINE> <INDENT> LOG.warning("Reverting mark pool ACTIVE in DB for pool id %s", pool_id) <NEW_LINE> self.task_utils.mark_pool_prov_status_error(pool_id)
Mark the pool as broken :param pool_id: pool_id that failed to update :returns: None
625941b29f2886367277a62d
def set_task(self, task): <NEW_LINE> <INDENT> raise NotImplementedError
Sets the specified task to the current environment Args: task: task of the meta-learning environment
625941b2e8904600ed9f1cbc
def tradeoffs_example(refecon, refpho): <NEW_LINE> <INDENT> np.random.seed(3) <NEW_LINE> inds = np.random.randint(0, high = len(refecon), size=5) <NEW_LINE> pts1 = refecon[inds] <NEW_LINE> pts2 = -1*refpho[inds] <NEW_LINE> pts1 = (1,1.4,0.3) <NEW_LINE> pts2 = (-0.5, -1.65, -0.2) <NEW_LINE> pts1a = (0.5,1.2,0.2) <NEW_LINE> pts2a = (-1.85, -1.7, -0.5) <NEW_LINE> utimin = 0 <NEW_LINE> utimax = 1.1*lake_problem_JDM_uti_1L((0.1)) <NEW_LINE> phomax = 0 <NEW_LINE> phomin = -1*(1.1*lake_problem_JDM_pho_1L((0.1))) <NEW_LINE> utilims = (utimin, utimax) <NEW_LINE> pholims = (phomin, phomax) <NEW_LINE> savewd = os.getcwd() <NEW_LINE> fig = plt.figure(dpi=150) <NEW_LINE> plt.annotate('', xy=(utimax,phomin), xycoords='data', xytext=(utimin,phomin), arrowprops=dict(facecolor='black', width=3, headwidth=7,headlength=7, connectionstyle='arc3')) <NEW_LINE> plt.annotate('', xy=(utimin, phomax), xycoords='data', xytext=(utimin,phomin), arrowprops=dict(facecolor='black', width=3, headwidth=7,headlength=7, connectionstyle='arc3')) <NEW_LINE> plt.xlabel('A', fontsize=18) <NEW_LINE> plt.ylabel('B',fontsize=18) <NEW_LINE> plt.xlim(utilims) <NEW_LINE> plt.ylim(pholims) <NEW_LINE> for spine in plt.gca().spines.values(): <NEW_LINE> <INDENT> spine.set_visible(False) <NEW_LINE> <DEDENT> plt.yticks([]) <NEW_LINE> plt.xticks([]) <NEW_LINE> plt.scatter(pts1, pts2, c = 'k', s=200, lw = 0.2) <NEW_LINE> plt.scatter(pts1a, pts2a, c = 'k', s=200, lw = 0.2, alpha = 0.25) <NEW_LINE> plt.legend(['Non-Dominated Alternatives', 'Dominated Alternatives']) <NEW_LINE> plt.savefig((savewd + '\\fig1.pdf'), dpi=300, facecolor='w', edgecolor='w', orientation='portrait', papertype=None, format=None, transparent=False, bbox_inches=None, pad_inches=0.1, frameon=None, metadata=None) <NEW_LINE> plt.savefig((savewd + '\\fig1.png'), dpi=300, facecolor='w', edgecolor='w', orientation='portrait', papertype=None, format=None, transparent=False, bbox_inches=None, pad_inches=0.1, frameon=None, metadata=None) <NEW_LINE> return(fig)
Group 1: Participants flagged "other objectives"
625941b23539df3088e2e0de
def delete( self, resource_group_name, appliance_name, custom_headers=None, raw=False, **operation_config): <NEW_LINE> <INDENT> url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Solutions/appliances/{applianceName}' <NEW_LINE> path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern='^[-\w\._\(\)]+$'), 'applianceName': self._serialize.url("appliance_name", appliance_name, 'str', max_length=64, min_length=3), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } <NEW_LINE> url = self._client.format_url(url, **path_format_arguments) <NEW_LINE> query_parameters = {} <NEW_LINE> query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') <NEW_LINE> header_parameters = {} <NEW_LINE> header_parameters['Content-Type'] = 'application/json; charset=utf-8' <NEW_LINE> if self.config.generate_client_request_id: <NEW_LINE> <INDENT> header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) <NEW_LINE> <DEDENT> if custom_headers: <NEW_LINE> <INDENT> header_parameters.update(custom_headers) <NEW_LINE> <DEDENT> if self.config.accept_language is not None: <NEW_LINE> <INDENT> header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') <NEW_LINE> <DEDENT> def long_running_send(): <NEW_LINE> <INDENT> request = self._client.delete(url, query_parameters) <NEW_LINE> return self._client.send(request, header_parameters, **operation_config) <NEW_LINE> <DEDENT> def get_long_running_status(status_link, headers=None): <NEW_LINE> <INDENT> request = self._client.get(status_link) <NEW_LINE> if headers: <NEW_LINE> <INDENT> request.headers.update(headers) <NEW_LINE> <DEDENT> return self._client.send( request, header_parameters, **operation_config) <NEW_LINE> <DEDENT> def get_long_running_output(response): <NEW_LINE> <INDENT> if response.status_code not in [204, 202]: <NEW_LINE> <INDENT> raise models.ErrorResponseException(self._deserialize, response) <NEW_LINE> <DEDENT> if raw: <NEW_LINE> <INDENT> client_raw_response = ClientRawResponse(None, response) <NEW_LINE> return client_raw_response <NEW_LINE> <DEDENT> <DEDENT> if raw: <NEW_LINE> <INDENT> response = long_running_send() <NEW_LINE> return get_long_running_output(response) <NEW_LINE> <DEDENT> long_running_operation_timeout = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) <NEW_LINE> return AzureOperationPoller( long_running_send, get_long_running_output, get_long_running_status, long_running_operation_timeout)
Deletes the appliance. :param resource_group_name: The name of the resource group. The name is case insensitive. :type resource_group_name: str :param appliance_name: The name of the appliance. :type appliance_name: str :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :rtype: :class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>` instance that returns None :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true :raises: :class:`ErrorResponseException<azure.mgmt.resource.managedapplications.models.ErrorResponseException>`
625941b26e29344779a623aa
def test_lp_url_offline(self): <NEW_LINE> <INDENT> brdir = os.path.join(self.dst_dir, "lp_branch") <NEW_LINE> os.makedirs(os.path.join(brdir, ".bzr", "branch")) <NEW_LINE> branch = BzrBranch(brdir, "lp:something", offline=True) <NEW_LINE> self.assertEqual(branch.url, "lp:something") <NEW_LINE> parent_loc = "bzr+ssh://previously/resolved" <NEW_LINE> branch.write_conf(dict(parent_location=parent_loc)) <NEW_LINE> branch.update_conf() <NEW_LINE> self.assertEqual(branch.parse_conf()["parent_location"], parent_loc)
[offline mode] lp: locations are not to be resolved. See lp:1249566, resolving lp: performs outgoing requests, and resolving the remote URL is irrelevant anyway, since it won't be used.
625941b2462c4b4f79d1d463
def set_PublicationDate(self, value): <NEW_LINE> <INDENT> super(SearchByKeywordInputSet, self)._set_input('PublicationDate', value)
Set the value of the PublicationDate input for this Choreo. ((optional, date) Limits by date or range of dates. The publication-date is the date the review was first publish.ed in The Times. Format YYYY-MM-DD. Separate ranges with semicolons.)
625941b2b57a9660fec33612
def licenseKeyFormatting(self, S, K): <NEW_LINE> <INDENT> S, k = S.replace('-',"").upper(), 1 <NEW_LINE> for i in xrange(len(S) - 1, 0, -1): <NEW_LINE> <INDENT> if not k % K: <NEW_LINE> <INDENT> S, k = S[:i] + '-' + S[i:], 0 <NEW_LINE> <DEDENT> k += 1 <NEW_LINE> <DEDENT> return S
:type S: str :type K: int :rtype: str
625941b250812a4eaa59c0ba
def read_label(trial_path): <NEW_LINE> <INDENT> label = pd.read_csv(trial_path + 'label.csv') <NEW_LINE> ground_true_valence = int(label['valence']) > 5 <NEW_LINE> ground_true_arousal = int(label['arousal']) > 5 <NEW_LINE> return ground_true_valence, ground_true_arousal
read trial's ground truth from trial path Parameter: trial_path: the path of the trial file Returens: ground_true_valence: the ground truth value for this trial in valence space ground_true_arousal: the ground truth value for this trial in arousal space
625941b2e5267d203edcda35
def prepare_graph_date_range(): <NEW_LINE> <INDENT> dates = Bag.objects.aggregate(Min("bagging_date"), Max('bagging_date')) <NEW_LINE> system_start_date = dates['bagging_date__min'] <NEW_LINE> system_end_date = dates['bagging_date__max'] <NEW_LINE> daily_edit_counts = [] <NEW_LINE> month = system_start_date.strftime('%Y-%m') <NEW_LINE> daily_edit_counts.append([datetime.strptime(month, '%Y-%m'), 0]) <NEW_LINE> for dt in rrule.rrule( rrule.DAILY, dtstart=system_start_date, until=system_end_date, ): <NEW_LINE> <INDENT> if month != dt.strftime('%Y-%m'): <NEW_LINE> <INDENT> month = dt.strftime('%Y-%m') <NEW_LINE> daily_edit_counts.append([datetime.strptime(month, '%Y-%m'), 0]) <NEW_LINE> <DEDENT> <DEDENT> return daily_edit_counts
Several functions use the same code to prepare the dates and values for the graphing of event data, so we can make a function for it. DRY 4 LYPHE returns list of lists
625941b2cdde0d52a9e52dc9
def __init__(self, payloadEnvelope: PayloadEnvelope, destVortexName: Optional[str] = None, destVortexUuid: Optional[str] = None, timeout: Optional[float] = None, resultCheck=True, logTimeoutError=True) -> None: <NEW_LINE> <INDENT> Deferred.__init__(self) <NEW_LINE> if not timeout: <NEW_LINE> <INDENT> timeout = self.TIMEOUT <NEW_LINE> <DEDENT> self._resultCheck = resultCheck <NEW_LINE> self._logTimeoutError = logTimeoutError <NEW_LINE> self._messageId = str(uuid4()) + str(PayloadResponse.__SEQ) <NEW_LINE> PayloadResponse.__SEQ += 1 <NEW_LINE> payloadEnvelope.filt[self.__messageIdKey] = self._messageId <NEW_LINE> self._filt = copy(payloadEnvelope.filt) <NEW_LINE> self._destVortexName = destVortexName <NEW_LINE> self._status = self.PROCESSING <NEW_LINE> self._date = datetime.now(pytz.utc) <NEW_LINE> self._endpoint = PayloadEndpoint(self._filt, self._process) <NEW_LINE> if destVortexName or destVortexUuid: <NEW_LINE> <INDENT> d: Deferred = payloadEnvelope.toVortexMsgDefer() <NEW_LINE> d.addCallback(VortexFactory.sendVortexMsg, destVortexName=destVortexName, destVortexUuid=destVortexUuid) <NEW_LINE> d.addErrback(self.errback) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> raise Exception() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> self._stack = sys.exc_info()[2] <NEW_LINE> <DEDENT> self.addTimeout(timeout, reactor) <NEW_LINE> self.addErrback(self._timedOut)
Constructor Tag and optionally send a payload. The timeout starts as soon as the constructor is called. :param payloadEnvelope The payloadEnvelope to send to the remote and, and wait for a response for :param destVortexName The name of the vortex to send to. :param destVortexUuid The UUID of the vortex to send a payload to. :param timeout The timeout to wait for a response :param resultCheck Should the response payload.result be checked, if it fails it errback will be called.
625941b256b00c62f0f143ef
def winning(s, board, player): <NEW_LINE> <INDENT> winner = win(board) <NEW_LINE> if winner == player: <NEW_LINE> <INDENT> return (True, None) <NEW_LINE> <DEDENT> elif winner == enemy(player): <NEW_LINE> <INDENT> return (False, [board]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> new = board[:] <NEW_LINE> new[s(new)] = player <NEW_LINE> for i in range(16): <NEW_LINE> <INDENT> if new[i] == NEITHER: <NEW_LINE> <INDENT> res, game = winning(s, new[:i] + [enemy(player)] + new[i+1:], player) <NEW_LINE> if not res: <NEW_LINE> <INDENT> return (False, [board, new] + game) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return (True, None)
Is s a winning strategy for the given player from the given position on their turn? Return also the losing game.
625941b28c0ade5d55d3e753
def delivery_stats(self): <NEW_LINE> <INDENT> self._check_values() <NEW_LINE> req = Request( __POSTMARK_URL__ + 'deliverystats', None, { 'Accept': 'application/json', 'Content-Type': 'application/json', 'X-Postmark-Server-Token': self.__api_key, 'User-agent': self.__user_agent } ) <NEW_LINE> try: <NEW_LINE> <INDENT> result = urlopen(req) <NEW_LINE> with closing(result): <NEW_LINE> <INDENT> if result.code == 200: <NEW_LINE> <INDENT> return json.loads(result.read()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise PMMailSendException('Return code %d: %s' % (result.code, result.msg)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except HTTPError as err: <NEW_LINE> <INDENT> return err
Returns a summary of inactive emails and bounces by type.
625941b24527f215b584c1f8
def test_user_viewing_detail_post(self): <NEW_LINE> <INDENT> post = Post.objects.get(title='Lorem ', body='ipsum') <NEW_LINE> serializer = PostSerializer(post) <NEW_LINE> response = self.client.get(reverse('post_router-detail', kwargs={'pk': post.pk}), Authorization=f'Bearer: {self.token}') <NEW_LINE> self.assertEqual(response.status_code, status.HTTP_200_OK) <NEW_LINE> self.assertEqual(response.data, serializer.data)
User is viewing one posts get post record -> post.pk send GET request with post data and JWT token check status code and check response data
625941b215fb5d323cde08a2
def analyze_market_page(auth, params, db): <NEW_LINE> <INDENT> session = login(auth) <NEW_LINE> market_url = "http://es.ibasketmanager.com/mercado.php" <NEW_LINE> market_url = market_url + "?juvenil=" + str(params["juvenil"]) <NEW_LINE> market_url = market_url + "&tiempos=" + str(params["tiempos"]) <NEW_LINE> market_url = market_url + "&posiciones=" + str(params["posiciones"]) <NEW_LINE> market_url = market_url + "&calidad=" + str(params["calidad"]) <NEW_LINE> market_url = market_url + "&edad" + str(params["edad"]) <NEW_LINE> market_url = market_url + "&cdirecta=" + str(params["cdirecta"]) <NEW_LINE> print(show("market") + ">{ " + market_url + " }") <NEW_LINE> r = session.get(market_url) <NEW_LINE> load_status = 0 <NEW_LINE> while load_status != 200: <NEW_LINE> <INDENT> load_status = r.status_code <NEW_LINE> <DEDENT> auctions = get_auctions(r.content) <NEW_LINE> for v_auction in auctions: <NEW_LINE> <INDENT> id_player = str(int(str(v_auction.player))) <NEW_LINE> player = player_page.get_player_data(id_player, auth, session) <NEW_LINE> similars = player_page.get_similar_data(id_player, auth, None, session) <NEW_LINE> db.auctions.insert_one(v_auction.to_db_collection()) <NEW_LINE> db.auctions_history.replace_one( {'_id': v_auction._id}, v_auction.to_db_collection(), True ) <NEW_LINE> player_page.insert_player(player, id_player, db) <NEW_LINE> player_page.insert_similars(similars, db) <NEW_LINE> player_page.updateAuctions(id_player, v_auction._id, db)
Accede a la página del mercado obtenida de los parametros Keyword arguments: auth -- Cadena de autenticacion a la web. params -- Parametros para construir la url del mercado. db -- Objeto de conexion a la BD.
625941b2b7558d58953c4cb8
def suggestion(self, message, **kwargs): <NEW_LINE> <INDENT> pass
suggestion an incoming message. This is the components chance to suggestion an incoming message. The component can rely on any context attribute to be present, that gets created by a call to `pipeline_init` of ANY component and on any context attributes created by a call to `suggestion` of components previous to this one.
625941b216aa5153ce362214
def add(self, v): <NEW_LINE> <INDENT> if self.dimension != v.dimension: <NEW_LINE> <INDENT> raise TypeError("Please use a vector with the same dimension") <NEW_LINE> <DEDENT> ret = [] <NEW_LINE> for index, val in enumerate(self.coordinates): <NEW_LINE> <INDENT> ret.append((val + v.coordinates[index])) <NEW_LINE> <DEDENT> return Vector(ret)
This function returns the sum of 2 vectors if they are of same dimensions. The sum is returned as a list
625941b250485f2cf553cb2d
def stage_one_trimming(self, alignment, window_size, proportion, threshold, min_len, replace_ends=False): <NEW_LINE> <INDENT> start, end = self.running_average(alignment, window_size, proportion, threshold) <NEW_LINE> s1_trimmed = MultipleSeqAlignment([], Gapped(IUPAC.ambiguous_dna, "-?")) <NEW_LINE> for sequence in alignment: <NEW_LINE> <INDENT> sequence.seq.alphabet = IUPAC.IUPACAmbiguousDNA() <NEW_LINE> if start >= 0 and end: <NEW_LINE> <INDENT> trim = sequence[start:end] <NEW_LINE> if set(trim) != set(['-']) and set(trim) != (['?']) and len(trim) >= min_len: <NEW_LINE> <INDENT> if not replace_ends: <NEW_LINE> <INDENT> s1_trimmed.append(sequence[start:end]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> repl = self._replace_ends(str(sequence[start:end].seq)) <NEW_LINE> s1_trimmed.append(self._record_formatter(repl, sequence.id)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> s1_trimmed = None <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> s1_trimmed = None <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> return s1_trimmed
First stage (of 3) alignment trimming to find and trim edges of a given alignment. Calls running_average function above to determine reasonable alignment start and end trimming for the entire alignment block.
625941b2b5575c28eb68dd91
def preprocess_sub_units(self): <NEW_LINE> <INDENT> if self.unit == "char": <NEW_LINE> <INDENT> self.preprocess_char() <NEW_LINE> <DEDENT> elif self.unit == "char-ngram": <NEW_LINE> <INDENT> self.preprocess_char_ngram() <NEW_LINE> <DEDENT> elif self.unit == "morpheme": <NEW_LINE> <INDENT> self.preprocess_morpheme() <NEW_LINE> <DEDENT> elif self.unit == "oracle": <NEW_LINE> <INDENT> self.preprocess_oracle() <NEW_LINE> <DEDENT> elif self.unit == "oracle-db": <NEW_LINE> <INDENT> self.preprocess_oracle_db() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> sys.exit("Unknown unit")
Build dictionaries for sub word units
625941b299cbb53fe679297d
def put_async_relative_retry_no_status( self, product=None, custom_headers=None, raw=False, **operation_config): <NEW_LINE> <INDENT> url = '/lro/error/putasync/retry/nostatus' <NEW_LINE> query_parameters = {} <NEW_LINE> header_parameters = {} <NEW_LINE> header_parameters['Content-Type'] = 'application/json; charset=utf-8' <NEW_LINE> if self.config.generate_client_request_id: <NEW_LINE> <INDENT> header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) <NEW_LINE> <DEDENT> if custom_headers: <NEW_LINE> <INDENT> header_parameters.update(custom_headers) <NEW_LINE> <DEDENT> if self.config.accept_language is not None: <NEW_LINE> <INDENT> header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') <NEW_LINE> <DEDENT> if product is not None: <NEW_LINE> <INDENT> body_content = self._serialize.body(product, 'Product') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> body_content = None <NEW_LINE> <DEDENT> def long_running_send(): <NEW_LINE> <INDENT> request = self._client.put(url, query_parameters) <NEW_LINE> return self._client.send( request, header_parameters, body_content, **operation_config) <NEW_LINE> <DEDENT> def get_long_running_status(status_link, headers=None): <NEW_LINE> <INDENT> request = self._client.get(status_link) <NEW_LINE> if headers: <NEW_LINE> <INDENT> request.headers.update(headers) <NEW_LINE> <DEDENT> return self._client.send( request, header_parameters, **operation_config) <NEW_LINE> <DEDENT> def get_long_running_output(response): <NEW_LINE> <INDENT> if response.status_code not in [200]: <NEW_LINE> <INDENT> exp = CloudError(response) <NEW_LINE> exp.request_id = response.headers.get('x-ms-request-id') <NEW_LINE> raise exp <NEW_LINE> <DEDENT> deserialized = None <NEW_LINE> header_dict = {} <NEW_LINE> if response.status_code == 200: <NEW_LINE> <INDENT> deserialized = self._deserialize('Product', response) <NEW_LINE> header_dict = { 'Azure-AsyncOperation': 'str', 'Location': 'str', 'Retry-After': 'int', } <NEW_LINE> <DEDENT> if raw: <NEW_LINE> <INDENT> client_raw_response = ClientRawResponse(deserialized, response) <NEW_LINE> client_raw_response.add_headers(header_dict) <NEW_LINE> return client_raw_response <NEW_LINE> <DEDENT> return deserialized <NEW_LINE> <DEDENT> if raw: <NEW_LINE> <INDENT> response = long_running_send() <NEW_LINE> return get_long_running_output(response) <NEW_LINE> <DEDENT> long_running_operation_timeout = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) <NEW_LINE> return AzureOperationPoller( long_running_send, get_long_running_output, get_long_running_status, long_running_operation_timeout)
Long running put request, service returns a 200 to the initial request, with an entity that contains ProvisioningState=’Creating’. Poll the endpoint indicated in the Azure-AsyncOperation header for operation status. :param product: Product to put :type product: :class:`Product <fixtures.acceptancetestslro.models.Product>` :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :rtype: :class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>` instance that returns :class:`Product <fixtures.acceptancetestslro.models.Product>` :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
625941b282261d6c526ab238
def add(self, value): <NEW_LINE> <INDENT> inserted = False <NEW_LINE> node = _Set_Node(value, None) <NEW_LINE> if self._front is None: <NEW_LINE> <INDENT> inserted = True <NEW_LINE> self._front = node <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> curr = self._front <NEW_LINE> while curr._value != value: <NEW_LINE> <INDENT> if curr._next == None: <NEW_LINE> <INDENT> inserted = True <NEW_LINE> curr._next = node <NEW_LINE> <DEDENT> curr = curr._next <NEW_LINE> <DEDENT> <DEDENT> return inserted
--------------------------------------------------------- Adds value to the end of the Set, allows only one copy of value. Use: inserted = set.add(value) ------------------------------------------------------- Parameters: value - a comparable data element (?) Returns: True if value is inserted, False otherwise (boolean) -------------------------------------------------------
625941b2293b9510aa2c302f
def _build_action_gui(self, _checkHBox): <NEW_LINE> <INDENT> _frameActionSelection = gtk.Frame('Action Selection') <NEW_LINE> _checkHBox.pack_start(_frameActionSelection, True, True, 5) <NEW_LINE> _asVBox = gtk.VBox() <NEW_LINE> _frameActionSelection.add(_asVBox) <NEW_LINE> self._greedyAction = gtk.RadioButton(None, 'Use ε-greedy selection') <NEW_LINE> self._greedyAction.connect('clicked', self.__on_greedy) <NEW_LINE> _asVBox.add(self._greedyAction) <NEW_LINE> self._eCounter = self._build_counter('ε value:', .1, .9, _asVBox) <NEW_LINE> _softmaxAction = gtk.RadioButton(self._greedyAction, 'Use softmax selection') <NEW_LINE> _softmaxAction.connect('clicked', self.__on_softmax) <NEW_LINE> _asVBox.add(_softmaxAction) <NEW_LINE> self._tCounter = self._build_counter('τ value:', 1, 10, _asVBox, .1, 1, False)
Builds the action selection GUI. Basically, it should allow selecting between choosing action using ε-greedy method or using softmax selection. In both cases, there is one parameter to select, tweaking the process (ε or τ). _checkHBox HBox holding the widgets built by this function
625941b2ff9c53063f47bf93
def volume_up(self): <NEW_LINE> <INDENT> self._player.volume += 1
Volume up media player.
625941b21f037a2d8b945f93
def all_slice_authorities(self): <NEW_LINE> <INDENT> return self._uppercase_keys_in_list([e for e in self._delegate_tools.get_config('SERVICE')["SERVICES"] if (e['service_type']==self.SA_SERVICE_TYPE)])
Return all slice authorities as defined in the configuration (config.json).
625941b2099cdd3c635f09f2
@curry <NEW_LINE> def as_package(w3: Web3, manifest: Manifest) -> Package: <NEW_LINE> <INDENT> return Package(manifest, w3)
Return a Package object instantiated with the provided manifest and web3 instance.
625941b2d268445f265b4c09
def test_world(self): <NEW_LINE> <INDENT> deposits = parameters.get('deposits', 12) + NUM_BASES <NEW_LINE> self.assertEqual(len(self.world.agents), deposits) <NEW_LINE> for agent in self.world.agents: <NEW_LINE> <INDENT> self.assertIn(agent.team, {'mineral', 'base'})
Assert that resources have been initialized in world
625941b2507cdc57c6306a66
def retry(max_retry_count = 3, start_page = None): <NEW_LINE> <INDENT> def real_decorator(org_func): <NEW_LINE> <INDENT> def wrapper(*args, **kwargs): <NEW_LINE> <INDENT> browser = None <NEW_LINE> _start_page = start_page <NEW_LINE> for arg in args: <NEW_LINE> <INDENT> if not isinstance(arg, Remote): continue <NEW_LINE> browser = arg <NEW_LINE> break <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for _, value in kwargs.items(): <NEW_LINE> <INDENT> if not isinstance(value, Remote): continue <NEW_LINE> browser = value <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> if not browser: <NEW_LINE> <INDENT> print('not able to find browser in parameters!') <NEW_LINE> return org_func(*args, **kwargs) <NEW_LINE> <DEDENT> if max_retry_count == 0: <NEW_LINE> <INDENT> print('max retry count is set to 0, this function is useless right now') <NEW_LINE> return org_func(*args, **kwargs) <NEW_LINE> <DEDENT> if not start_page: <NEW_LINE> <INDENT> _start_page = browser.current_url <NEW_LINE> <DEDENT> rv = None <NEW_LINE> retry_count = 0 <NEW_LINE> while True: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> rv = org_func(*args, **kwargs) <NEW_LINE> break <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> retry_count += 1 <NEW_LINE> if retry_count > max_retry_count: <NEW_LINE> <INDENT> raise e <NEW_LINE> <DEDENT> rv = None <NEW_LINE> browser.get(_start_page) <NEW_LINE> <DEDENT> <DEDENT> return rv <NEW_LINE> <DEDENT> return wrapper <NEW_LINE> <DEDENT> return real_decorator
Decorator which refreshes the page and tries to execute the function again. Use it like that: @retry() => the '()' are important because its a decorator with params.
625941b20c0af96317bb7f7e
def print_notes(self, query='any:'): <NEW_LINE> <INDENT> extra_args = ['printNotes', '/q', query] <NEW_LINE> self._call_enscript(extra_args)
Print a set of notes.
625941b22ae34c7f2600cecb
def __del_feature(self, feature_name): <NEW_LINE> <INDENT> del self.__train[feature_name] <NEW_LINE> self.__max_bin_count.pop(feature_name) <NEW_LINE> self.__features_type.pop(feature_name)
Parameters ---------- feature_name Returns -------
625941b2bf627c535bc12f6b
def parameters(self, currency): <NEW_LINE> <INDENT> return self._repo.get_one(currency=currency).parameters
Get the parameters of the blockchain :rtype: sakia.data.entities.BlockchainParameters
625941b263b5f9789fde6e7b
def my_list_enumerate(mylist, start=0): <NEW_LINE> <INDENT> result = [] <NEW_LINE> for i in range(len(mylist)): <NEW_LINE> <INDENT> current_position = i + start <NEW_LINE> result.append((current_position, mylist[i])) <NEW_LINE> <DEDENT> return result
Pure finite list version. Does not work with iterables and returns a list. Should work (at least) with lists, tuples, dicts, and sets.
625941b292d797404e303f27
def cric__random_forest(): <NEW_LINE> <INDENT> model = sklearn.ensemble.RandomForestClassifier(100, random_state=0) <NEW_LINE> model.predict = lambda X: model.predict_proba(X)[:,1] <NEW_LINE> return model
Random Forest
625941b2fb3f5b602dac342c
def Compute_nodes_centroids(self, nodes_to_explore): <NEW_LINE> <INDENT> n_nodes_to_explore = len(nodes_to_explore) <NEW_LINE> if n_nodes_to_explore > self.n_clusters and len(self.nodes[nodes_to_explore[0]].graph) != 0 and len(self.nodes[nodes_to_explore[0]].graph[-1]) > 1: <NEW_LINE> <INDENT> obs = np.zeros([n_nodes_to_explore, len(self.nodes[nodes_to_explore[0]].graph[-1])]) <NEW_LINE> for i in range(n_nodes_to_explore): <NEW_LINE> <INDENT> node = self.nodes[nodes_to_explore[i]] <NEW_LINE> for j in range(len(node.graph[-1])): <NEW_LINE> <INDENT> obs[i][j] = node.graph[-1][j].task_id <NEW_LINE> <DEDENT> <DEDENT> minibatch_kmeans = MiniBatchKMeans(self.n_clusters) <NEW_LINE> minibatch_kmeans.fit(obs) <NEW_LINE> distances = minibatch_kmeans.transform(obs) <NEW_LINE> reduced_nodes_to_explore = [] <NEW_LINE> for i in range(self.n_clusters): <NEW_LINE> <INDENT> pos = distances[:,i].argmin() <NEW_LINE> reduced_nodes_to_explore.append(nodes_to_explore[pos]) <NEW_LINE> <DEDENT> return reduced_nodes_to_explore <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return nodes_to_explore
Compute clusters of nodes_to_explors using k-means, compute their centroids, and then finally return the closest nodes to the centroids.
625941b2eab8aa0e5d26d8f3
def high_variance_confounds(series, n_confounds=5, percentile=2., detrend=True): <NEW_LINE> <INDENT> if detrend: <NEW_LINE> <INDENT> series = _detrend(series) <NEW_LINE> <DEDENT> var = _mean_of_squares(series) <NEW_LINE> var_thr = stats.scoreatpercentile(var, 100. - percentile) <NEW_LINE> series = series[:, var > var_thr] <NEW_LINE> u, _, _ = linalg.svd(series, full_matrices=False) <NEW_LINE> u = u[:, :n_confounds].copy() <NEW_LINE> return u
Return confounds time series extracted from series with highest variance. Parameters ========== series: numpy.ndarray Timeseries. A timeseries is a column in the "series" array. shape (sample number, feature number) n_confounds: int Number of confounds to return percentile: float Highest-variance series percentile to keep before computing the singular value decomposition, 0. <= `percentile` <= 100. series.shape[0] * percentile / 100 must be greater than n_confounds. detrend: bool If True, detrend timeseries before processing. Returns ======= v: numpy.ndarray highest variance confounds. Shape: (samples, n_confounds) Notes ====== This method is related to what has been published in the literature as 'CompCor' (Behzadi NeuroImage 2007). The implemented algorithm does the following: - compute sum of squares for each time series (no mean removal) - keep a given percentile of series with highest variances (percentile) - compute an svd of the extracted series - return a given number (n_confounds) of series from the svd with highest singular values. See also ======== nilearn.image.high_variance_confounds
625941b25fdd1c0f98dbffce
def remove_job(self, job_id): <NEW_LINE> <INDENT> job = self.scheduler.get_job(job_id) <NEW_LINE> if job: <NEW_LINE> <INDENT> job.remove()
Shortcut for removing scheduler job by id.
625941b2a79ad161976cbedb
def stamp(im, xy, sout): <NEW_LINE> <INDENT> return stamp
Args: im - 2D image of arbitrary dimensions (281x281 for GPI) xy - 2tuple containing x,y coordinates of spot/point source sout - size of output image, with source shifted to (sout/2, sout/2) Return: stamp - 2d image of size (sout, sout) Use ndimage.map_coordinates to create the stamp by interpolation. See lines 365-374 in gpi_satspotcalib_wd.py
625941b28e05c05ec3eea10d
def read_stop_words(path: str) -> set: <NEW_LINE> <INDENT> with open(path) as f: <NEW_LINE> <INDENT> return set([line for line in f.read().splitlines() if line])
:param path: 入力ファイルパス :return: stop word set
625941b250812a4eaa59c0bc
def get_seconds_from_duration(time_str: str) -> int: <NEW_LINE> <INDENT> import re <NEW_LINE> pattern = re.compile('\w(\d+)\w\w(\d+)\w(\d+)\w(\d+)\w') <NEW_LINE> matches = pattern.search(time_str) <NEW_LINE> d, h, m, s = matches.groups() <NEW_LINE> seconds = (int(d) * 86400) + (int(h) * 3600) + (int(m) * 60) + int(s) <NEW_LINE> return seconds
This function will convert the TM1 time to seconds :param time_str: P0DT00H01M43S :return: int
625941b25fc7496912cc371b
def test_dictlookupone(): <NEW_LINE> <INDENT> t1 = (('foo', 'bar'), ('a', 1), ('b', 2), ('b', 3)) <NEW_LINE> try: <NEW_LINE> <INDENT> dictlookupone(t1, 'foo', strict=True) <NEW_LINE> <DEDENT> except DuplicateKeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> assert False, 'expected error' <NEW_LINE> <DEDENT> actual = dictlookupone(t1, 'foo', strict=False) <NEW_LINE> expect = {'a': {'foo': 'a', 'bar': 1}, 'b': {'foo': 'b', 'bar': 2}} <NEW_LINE> eq_(expect, actual) <NEW_LINE> t2 = (('foo', 'bar', 'baz'), ('a', 1, True), ('b', 2, False), ('b', 3, True), ('b', 3, False)) <NEW_LINE> actual = dictlookupone(t2, ('foo', 'bar'), strict=False) <NEW_LINE> expect = {('a', 1): {'foo': 'a', 'bar': 1, 'baz': True}, ('b', 2): {'foo': 'b', 'bar': 2, 'baz': False}, ('b', 3): {'foo': 'b', 'bar': 3, 'baz': True}} <NEW_LINE> eq_(expect, actual)
Test the dictlookupone function.
625941b28c0ade5d55d3e755
def __reduce__(self): <NEW_LINE> <INDENT> d = { 'edges': self.edges, 'connectivity': self.connectivity, 'sortingLabel': self.sortingLabel, } <NEW_LINE> atomType = self.atomType <NEW_LINE> if atomType is not None: <NEW_LINE> <INDENT> atomType = [a.label for a in atomType] <NEW_LINE> <DEDENT> return (GroupAtom, (atomType, self.radicalElectrons, self.charge, self.label, self.lonePairs), d)
A helper function used when pickling an object.
625941b20a50d4780f666c24
def generate_chunk(): <NEW_LINE> <INDENT> global CHUNK_SIZE <NEW_LINE> global password_queue <NEW_LINE> global password_generator_empty <NEW_LINE> count = 0 <NEW_LINE> while count < CHUNK_SIZE and not password_generator_empty: <NEW_LINE> <INDENT> new_range = _generate_range() <NEW_LINE> if new_range: <NEW_LINE> <INDENT> password_queue.put(new_range) <NEW_LINE> <DEDENT> count += 1
generate_chunk generate_chunk() Generate chunk of password ranges.
625941b224f1403a92600908
def __len__(self): <NEW_LINE> <INDENT> warnings.warn(self._deprecation_msg, DeprecationWarning) <NEW_LINE> return len(list(iter(self)))
Return the number of available CFN template keys. This is for backwards compatibility with existing code that expects a parsed-JSON template snippet.
625941b207f4c71912b1121c
def replaceLimitRange(self, **kwargs): <NEW_LINE> <INDENT> allParams = ['name', 'namespaces', 'body'] <NEW_LINE> params = locals() <NEW_LINE> for (key, val) in params['kwargs'].iteritems(): <NEW_LINE> <INDENT> if key not in allParams: <NEW_LINE> <INDENT> raise TypeError("Got an unexpected keyword argument '%s' to method replaceLimitRange" % key) <NEW_LINE> <DEDENT> params[key] = val <NEW_LINE> <DEDENT> del params['kwargs'] <NEW_LINE> resourcePath = '/api/v1beta3/namespaces/{namespaces}/limitranges/{name}' <NEW_LINE> resourcePath = resourcePath.replace('{format}', 'json') <NEW_LINE> method = 'PUT' <NEW_LINE> queryParams = {} <NEW_LINE> headerParams = {} <NEW_LINE> formParams = {} <NEW_LINE> files = {} <NEW_LINE> bodyParam = None <NEW_LINE> headerParams['Accept'] = 'application/json' <NEW_LINE> headerParams['Content-Type'] = '*/*,' <NEW_LINE> if ('name' in params): <NEW_LINE> <INDENT> replacement = str(self.apiClient.toPathValue(params['name'])) <NEW_LINE> replacement = urllib.quote(replacement) <NEW_LINE> resourcePath = resourcePath.replace('{' + 'name' + '}', replacement) <NEW_LINE> <DEDENT> if ('namespaces' in params): <NEW_LINE> <INDENT> replacement = str(self.apiClient.toPathValue(params['namespaces'])) <NEW_LINE> replacement = urllib.quote(replacement) <NEW_LINE> resourcePath = resourcePath.replace('{' + 'namespaces' + '}', replacement) <NEW_LINE> <DEDENT> if ('body' in params): <NEW_LINE> <INDENT> bodyParam = params['body'] <NEW_LINE> <DEDENT> postData = (formParams if formParams else bodyParam) <NEW_LINE> response = self.apiClient.callAPI(resourcePath, method, queryParams, postData, headerParams, files=files)
replace the specified LimitRange Args: name, str: name of the LimitRange (required) namespaces, str: object name and auth scope, such as for teams and projects (required) body, V1beta3_LimitRange: (required) Returns:
625941b2596a897236089861
def on_error(self, status_code): <NEW_LINE> <INDENT> return True
member returns true upon receiving an error in order to continue listening.
625941b2796e427e537b0357
def __init__(self): <NEW_LINE> <INDENT> pass
pass init
625941b291f36d47f21ac28b
def slider_parametro_1(self): <NEW_LINE> <INDENT> size = self.sldExample.value() <NEW_LINE> self.filtros['params'][1]['val'] = size <NEW_LINE> self.mostrar_imagem()
Método do slider 1
625941b2b7558d58953c4cba
def on_failure(self, exc): <NEW_LINE> <INDENT> self._breaker.open() <NEW_LINE> raise CircuitBreakerError('Trial call failed, circuit breaker opened')
Opens the circuit breaker.
625941b2e5267d203edcda38
def rbridge_id_router_ospf_distribute_list_route_map_route_map(**kwargs): <NEW_LINE> <INDENT> config = ET.Element("config") <NEW_LINE> rbridge_id = ET.SubElement(config, "rbridge-id", xmlns="urn:brocade.com:mgmt:brocade-rbridge") <NEW_LINE> if kwargs.pop('delete_rbridge_id', False) is True: <NEW_LINE> <INDENT> delete_rbridge_id = config.find('.//*rbridge-id') <NEW_LINE> delete_rbridge_id.set('operation', 'delete') <NEW_LINE> <DEDENT> rbridge_id_key = ET.SubElement(rbridge_id, "rbridge-id") <NEW_LINE> rbridge_id_key.text = kwargs.pop('rbridge_id') <NEW_LINE> if kwargs.pop('delete_rbridge_id', False) is True: <NEW_LINE> <INDENT> delete_rbridge_id = config.find('.//*rbridge-id') <NEW_LINE> delete_rbridge_id.set('operation', 'delete') <NEW_LINE> <DEDENT> router = ET.SubElement(rbridge_id, "router") <NEW_LINE> if kwargs.pop('delete_router', False) is True: <NEW_LINE> <INDENT> delete_router = config.find('.//*router') <NEW_LINE> delete_router.set('operation', 'delete') <NEW_LINE> <DEDENT> ospf = ET.SubElement(router, "ospf", xmlns="urn:brocade.com:mgmt:brocade-ospf") <NEW_LINE> if kwargs.pop('delete_ospf', False) is True: <NEW_LINE> <INDENT> delete_ospf = config.find('.//*ospf') <NEW_LINE> delete_ospf.set('operation', 'delete') <NEW_LINE> <DEDENT> vrf_key = ET.SubElement(ospf, "vrf") <NEW_LINE> vrf_key.text = kwargs.pop('vrf') <NEW_LINE> if kwargs.pop('delete_vrf', False) is True: <NEW_LINE> <INDENT> delete_vrf = config.find('.//*vrf') <NEW_LINE> delete_vrf.set('operation', 'delete') <NEW_LINE> <DEDENT> distribute_list = ET.SubElement(ospf, "distribute-list") <NEW_LINE> if kwargs.pop('delete_distribute_list', False) is True: <NEW_LINE> <INDENT> delete_distribute_list = config.find('.//*distribute-list') <NEW_LINE> delete_distribute_list.set('operation', 'delete') <NEW_LINE> <DEDENT> route_map = ET.SubElement(distribute_list, "route-map") <NEW_LINE> if kwargs.pop('delete_route_map', False) is True: <NEW_LINE> <INDENT> delete_route_map = config.find('.//*route-map') <NEW_LINE> delete_route_map.set('operation', 'delete') <NEW_LINE> <DEDENT> route_map = ET.SubElement(route_map, "route-map") <NEW_LINE> if kwargs.pop('delete_route_map', False) is True: <NEW_LINE> <INDENT> delete_route_map = config.find('.//*route-map') <NEW_LINE> delete_route_map.set('operation', 'delete') <NEW_LINE> <DEDENT> route_map.text = kwargs.pop('route_map') <NEW_LINE> callback = kwargs.pop('callback', _callback) <NEW_LINE> return callback(config, mgr=kwargs.pop('mgr'))
Auto Generated Code
625941b2956e5f7376d70c13
def __and__(self, other): <NEW_LINE> <INDENT> return Ipv6(self.address & other)
Ipv6.__and__(b) <=> Ipv6 & b -> return Ipv6
625941b2046cf37aa974cae1
def fetch_monitors(scheduler, config): <NEW_LINE> <INDENT> monitors_url = urljoin(config['master'].get('endpoint'), 'monitors') <NEW_LINE> logger.info('Requesting endpoint for monitors at %s.', monitors_url) <NEW_LINE> r = requests.get(monitors_url, auth=(config.get('my_name'), config['master'].get('key'))) <NEW_LINE> logger.debug('Master returned following Monitors: %s.', r.text) <NEW_LINE> monitors = json.loads(r.text).get('monitors') <NEW_LINE> for job in scheduler.get_jobs(): <NEW_LINE> <INDENT> scheduler.cancel(job) <NEW_LINE> <DEDENT> for monitor in monitors: <NEW_LINE> <INDENT> scheduler.schedule( scheduled_time=datetime.utcnow(), func=run_monitor, args=[ monitor['id'], monitor['unique_id'], monitor['check'], json.loads(monitor['arguments']), config['master'].get('endpoint')], interval=monitor['frequency'] * 60, repeat=None )
Fetch Monitors and schedule accordingly. Arguments: scheduler (rq_scheduler.Scheduler): The Scheduler instance initialized. config (dict): Entire loaded config.
625941b27b180e01f3dc45a3
def set_molname(gro_file, atomlist, molname): <NEW_LINE> <INDENT> if len(molname) > 5: <NEW_LINE> <INDENT> raise Exception( "molname has {} characters, but can only have 5".format(len(molname)) ) <NEW_LINE> <DEDENT> for i, line in enumerate(fileinput.input(gro_file, inplace=True)): <NEW_LINE> <INDENT> if i - 2 in atomlist: <NEW_LINE> <INDENT> line = line[0:5] + "{:<5}".format(molname) + line[10:] <NEW_LINE> <DEDENT> sys.stdout.write(line)
set molecule name of atoms in atomlist to be molname
625941b2dc8b845886cb52ca
def get_durations(self): <NEW_LINE> <INDENT> is_exit = re.findall("◎片长(.*?)◎",self.source) <NEW_LINE> if is_exit : <NEW_LINE> <INDENT> return re.findall("◎片长(.*?)◎",self.source)[0].strip(' ').strip('/') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return ''
查找片长
625941b2de87d2750b85fb23
def digit(loc, pid): <NEW_LINE> <INDENT> numerator = int( pow(10, loc-1) ) <NEW_LINE> return (abs(pid)/numerator)%10
returns the digit at the given location in the pid // PID digits (base 10) are: n nr nl nq1 nq2 nq3 nj // the location enum provides a convenient index into the PID int numerator = (int) std::pow(10.0,(loc-1)); return (abspid(pid)/numerator)%10;
625941b2796e427e537b0358
def _handler_command_autosample(self, *args, **kwargs): <NEW_LINE> <INDENT> return ProtocolState.AUTOSAMPLE, (ProtocolState.AUTOSAMPLE, None)
Begin autosample.
625941b28c3a873295158154
def get_icanhaz(): <NEW_LINE> <INDENT> base_url = 'https://icanhazdadjoke.com/search' <NEW_LINE> r = requests.get(base_url) <NEW_LINE> last_page = 0 <NEW_LINE> soup = bsoup(r.content, 'lxml') <NEW_LINE> for a in soup.findAll('a', class_='pagination-link '): <NEW_LINE> <INDENT> last_page = max(last_page, int(a.text)) <NEW_LINE> <DEDENT> dad_jokes = [] <NEW_LINE> for i in list(range(1, last_page + 1)): <NEW_LINE> <INDENT> url = base_url + f'?term=&page={i}' <NEW_LINE> r = requests.get(url) <NEW_LINE> soup = bsoup(r.content, 'lxml') <NEW_LINE> for pre in soup.findAll('pre', style=True): <NEW_LINE> <INDENT> dad_jokes.append(pre.text) <NEW_LINE> <DEDENT> <DEDENT> return dad_jokes
Get all dadjokes from website Returns --------------------------------- dad_jokes : list list of strings
625941b273bcbd0ca4b2be13
def __bool__(self): <NEW_LINE> <INDENT> layer_exists = hasattr(self._packet, self._layer_name) <NEW_LINE> return layer_exists
Returns if this layer exists in the packet.
625941b2711fe17d82542113
def generateSmUseExample(self): <NEW_LINE> <INDENT> comments = ('/*状态机%s使用示例:\n* 当前状态机已在SMDF中定义出实体%s,需要做的只是让其运行起来\n' %(self.__fileName,self.__fileName)) <NEW_LINE> comments += ('* 状态机运行方式有两种直接给时钟驱动的SMR和使用自定义记录状态后进驱动的SMRE\n') <NEW_LINE> comments += ('***********************************************\n') <NEW_LINE> comments += ('* SMR方式\n') <NEW_LINE> comments += ('***********************************************\n') <NEW_LINE> comments += ('void test(void){\n') <NEW_LINE> comments += (' while(1){SMR(%s);};\n}\n' %self.__fileName) <NEW_LINE> comments += ('***********************************************\n') <NEW_LINE> comments += ('* SMRE方式\n') <NEW_LINE> comments += ('***********************************************\n') <NEW_LINE> comments += ('void test2(void){\n') <NEW_LINE> comments += (' static %sRec %sRunRec = {%s_sta_init};\n' %(self.__fileName,self.__fileName,self.__fileName)) <NEW_LINE> comments += (' while(1){SMRE(%s, %sRunRec);};\n}\n' %(self.__fileName,self.__fileName)) <NEW_LINE> comments += ('***********************************************/\n') <NEW_LINE> return comments
生成状态机使用示例
625941b2d6c5a10208143ddc
def legvander3d(x, y, z, deg): <NEW_LINE> <INDENT> return pu._vander_nd_flat((legvander, legvander, legvander), (x, y, z), deg)
Pseudo-Vandermonde matrix of given degrees. Returns the pseudo-Vandermonde matrix of degrees `deg` and sample points `(x, y, z)`. If `l, m, n` are the given degrees in `x, y, z`, then The pseudo-Vandermonde matrix is defined by .. math:: V[..., (m+1)(n+1)i + (n+1)j + k] = L_i(x)*L_j(y)*L_k(z), where `0 <= i <= l`, `0 <= j <= m`, and `0 <= j <= n`. The leading indices of `V` index the points `(x, y, z)` and the last index encodes the degrees of the Legendre polynomials. If ``V = legvander3d(x, y, z, [xdeg, ydeg, zdeg])``, then the columns of `V` correspond to the elements of a 3-D coefficient array `c` of shape (xdeg + 1, ydeg + 1, zdeg + 1) in the order .. math:: c_{000}, c_{001}, c_{002},... , c_{010}, c_{011}, c_{012},... and ``np.dot(V, c.flat)`` and ``legval3d(x, y, z, c)`` will be the same up to roundoff. This equivalence is useful both for least squares fitting and for the evaluation of a large number of 3-D Legendre series of the same degrees and sample points. Parameters ---------- x, y, z : array_like Arrays of point coordinates, all of the same shape. The dtypes will be converted to either float64 or complex128 depending on whether any of the elements are complex. Scalars are converted to 1-D arrays. deg : list of ints List of maximum degrees of the form [x_deg, y_deg, z_deg]. Returns ------- vander3d : ndarray The shape of the returned matrix is ``x.shape + (order,)``, where :math:`order = (deg[0]+1)*(deg([1]+1)*(deg[2]+1)`. The dtype will be the same as the converted `x`, `y`, and `z`. See Also -------- legvander, legvander3d, legval2d, legval3d Notes ----- .. versionadded:: 1.7.0
625941b2462c4b4f79d1d466
def test_phase(self): <NEW_LINE> <INDENT> self.assertEqual(str(self.complex_a.phase()),str(1.11)) <NEW_LINE> self.assertEqual(str(self.complex_h.phase()),str(0.98)) <NEW_LINE> self.assertEqual(str(self.complex_f.phase()),str(0.79)) <NEW_LINE> self.assertEqual(str(self.complex_b.phase()),str(1.11))
"Test de la función fase de números complejos
625941b2091ae35668666cfd
def recReveal(self, rows, cols): <NEW_LINE> <INDENT> if rows>=self.height or rows<0 or cols>=self.width or cols<0 or self.grid[rows][cols].isRevealed: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.grid[rows][cols].isRevealed=True <NEW_LINE> if self.grid[rows][cols].isBomb: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> elif self.grid[rows][cols].adj!=0: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> elif self.grid[rows][cols].adj==0: <NEW_LINE> <INDENT> self.recReveal(rows-1, cols-1) <NEW_LINE> self.recReveal(rows-1, cols) <NEW_LINE> self.recReveal(rows-1, cols+1) <NEW_LINE> self.recReveal(rows, cols+1) <NEW_LINE> self.recReveal(rows+1, cols+1) <NEW_LINE> self.recReveal(rows+1, cols) <NEW_LINE> self.recReveal(rows+1, cols-1) <NEW_LINE> self.recReveal(rows, cols-1) <NEW_LINE> <DEDENT> return
@pre Grid is a valid object of type Board, rows and cols is the location in the board to reveal @post will reveal all @return none
625941b2046cf37aa974cae2
def get_orbital_elements_from_binaries( primaries, secondaries, G=None): <NEW_LINE> <INDENT> position = secondaries.position - primaries.position <NEW_LINE> velocity = secondaries.velocity - primaries.velocity <NEW_LINE> mass1 = primaries.mass <NEW_LINE> mass2 = secondaries.mass <NEW_LINE> total_mass = mass1 + mass2 <NEW_LINE> semimajor_axis, eccentricity, true_anomaly, inclination, long_asc_node, arg_per = get_orbital_elements_from_arrays( position, velocity, total_mass, G=G) <NEW_LINE> return ( mass1, mass2, semimajor_axis, eccentricity, true_anomaly, inclination, long_asc_node, arg_per)
Function that computes orbital elements from given primaries and secondaries. Elements are computed for the second particle in this set and the return values are: mass1, mass2, semimajor axis, eccentricity, cosine of true anomaly, cosine of inclination, cosine of the longitude of the ascending node and the cosine of the argument of pericenter. In case of a perfectly circular orbit the true anomaly and argument of pericenter cannot be determined; in this case the return values are 1.0 for both cosines.
625941b27b25080760e391fa
def db_check(item): <NEW_LINE> <INDENT> connection = sq.connect(path) <NEW_LINE> cursor = connection.cursor() <NEW_LINE> cursor.execute('SELECT Link FROM Links WHERE Link = (?)', (item,)) <NEW_LINE> data = cursor.fetchall() <NEW_LINE> if data == []: <NEW_LINE> <INDENT> cursor.close() <NEW_LINE> return False <NEW_LINE> <DEDENT> cursor.close() <NEW_LINE> return True
True or false
625941b299cbb53fe679297f
def _init_keys(self): <NEW_LINE> <INDENT> self.schema = SCHEMA_ONBOARD <NEW_LINE> self.sysdef_section = "main" <NEW_LINE> self.add_key("schema-version", "") <NEW_LINE> self.add_key("use-system-defaults", False) <NEW_LINE> self.layout_key = self.add_key("layout", DEFAULT_LAYOUT) <NEW_LINE> self.theme_key = self.add_key("theme", DEFAULT_THEME) <NEW_LINE> self.add_key("system-theme-tracking-enabled", True) <NEW_LINE> self.add_key("system-theme-associations", {}, 'a{ss}') <NEW_LINE> self.add_key("snippets", {}, "as") <NEW_LINE> self.add_key("show-status-icon", True) <NEW_LINE> self.add_key("status-icon-provider", StatusIconProviderEnum.AppIndicator, enum={"auto" : 0, "GtkStatusIcon" : 1, "AppIndicator" : 2, }) <NEW_LINE> self.add_key("start-minimized", False) <NEW_LINE> self.add_key("show-tooltips", True) <NEW_LINE> self.add_key("key-label-font", "") <NEW_LINE> self.add_key("key-label-overrides", {}, "as") <NEW_LINE> self.add_key("current-settings-page", 0) <NEW_LINE> self.add_key("xembed-onboard", False, prop="onboard_xembed_enabled") <NEW_LINE> self.add_key("xembed-aspect-change-range", [0, 1.6]) <NEW_LINE> self.add_key("xembed-background-color", "#0000007F") <NEW_LINE> self.add_key("xembed-background-image-enabled", True) <NEW_LINE> self.add_key("xembed-unity-greeter-offset-x", 85.0) <NEW_LINE> self.keyboard = ConfigKeyboard() <NEW_LINE> self.window = ConfigWindow() <NEW_LINE> self.icp = ConfigICP(self) <NEW_LINE> self.auto_show = ConfigAutoShow() <NEW_LINE> self.universal_access = ConfigUniversalAccess(self) <NEW_LINE> self.theme_settings = ConfigTheme(self) <NEW_LINE> self.lockdown = ConfigLockdown(self) <NEW_LINE> self.scanner = ConfigScanner(self) <NEW_LINE> self.typing_assistance = ConfigTypingAssistance(self) <NEW_LINE> self.gss = ConfigGSS(self) <NEW_LINE> self.gdi = ConfigGDI(self) <NEW_LINE> self.children += [self.keyboard, self.window, self.icp, self.auto_show, self.universal_access, self.theme_settings, self.lockdown, self.gss, self.gdi, self.scanner, self.typing_assistance] <NEW_LINE> for _class in [CSMousetweaks1, CSMousetweaks0]: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.mousetweaks = _class() <NEW_LINE> self.children.append(self.mousetweaks) <NEW_LINE> break <NEW_LINE> <DEDENT> except (SchemaError, ImportError, RuntimeError) as e: <NEW_LINE> <INDENT> _logger.info(unicode_str(e)) <NEW_LINE> self.mousetweaks = None <NEW_LINE> <DEDENT> <DEDENT> if self.mousetweaks is None: <NEW_LINE> <INDENT> _logger.warning("mousetweaks GSettings schema not found, " "mousetweaks integration disabled.") <NEW_LINE> <DEDENT> self.unity_greeter = None <NEW_LINE> if self.launched_by == self.LAUNCHER_UNITY_GREETER: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.unity_greeter = ConfigUnityGreeter(self) <NEW_LINE> self.children.append(self.unity_greeter) <NEW_LINE> <DEDENT> except (SchemaError, ImportError) as e: <NEW_LINE> <INDENT> _logger.warning(unicode_str(e))
Create key descriptions
625941b2de87d2750b85fb24
def __init__(self, initial_set=[]): <NEW_LINE> <INDENT> if isinstance(initial_set, list): <NEW_LINE> <INDENT> self.objs = initial_set <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.objs = [initial_set]
Constructor
625941b2adb09d7d5db6c52b
def get_autostep(self): <NEW_LINE> <INDENT> if self._autostep >= 1E100: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self._autostep + 1
Get autostep value (property)
625941b266673b3332b91e2d
def link(self, first_node, second_node, connection_type): <NEW_LINE> <INDENT> if not self._are_nodes_related(first_node, second_node, connection_type): <NEW_LINE> <INDENT> relation = rel(first_node, connection_type, second_node) <NEW_LINE> self.handler.create(relation)
This method links two object of neo4j Node type, with an edge labeled under the given connection_type. All types are defined in this class.
625941b23346ee7daa2b2afe
def ls(self, cmds): <NEW_LINE> <INDENT> print("\033[34;1mview current working directory\033[0m") <NEW_LINE> obj = self.socket.recv(4) <NEW_LINE> dir_size = struct.unpack('i', obj)[0] <NEW_LINE> recv_size = 0 <NEW_LINE> recv_bytes = b'' <NEW_LINE> while recv_size < dir_size: <NEW_LINE> <INDENT> temp_bytes = self.socket.recv(settings.max_recv_bytes) <NEW_LINE> recv_bytes += temp_bytes <NEW_LINE> recv_size += len(temp_bytes) <NEW_LINE> <DEDENT> print(recv_bytes.decode('gbk'))
查看当前工作目录,文件列表
625941b2091ae35668666cfe
def openstack_credentials_harvester(self): <NEW_LINE> <INDENT> STREAM.info("==> Get Openstack cluster connection settings") <NEW_LINE> try: <NEW_LINE> <INDENT> configfile, section = self.openstack_cluster.split("::") <NEW_LINE> STREAM.debug(" -> Using user configuration file %s" % configfile) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> configfile = LoadSettings.GENERAL_CONFIG <NEW_LINE> STREAM.debug(" -> Using general configuration file %s" % configfile) <NEW_LINE> section = self.openstack_cluster <NEW_LINE> <DEDENT> config = ConfigParser() <NEW_LINE> config.read(configfile) <NEW_LINE> args = {key: value.strip() for key, value in config.items(section)} <NEW_LINE> self.clusters[section] = args <NEW_LINE> if self.clusters == {}: <NEW_LINE> <INDENT> STREAM.error(" -> There are no connection settings for the Openstack clusters found!\nMake sure" "that parameter(openstack_cluster) specified correctly.") <NEW_LINE> STREAM.error(" -> Export in Openstack passed.") <NEW_LINE> sys.exit(1) <NEW_LINE> <DEDENT> STREAM.info(" -> Found connection settings for the Openstack cluster") <NEW_LINE> STREAM.info(" -> Target Openstack cluster set to: %s" % section) <NEW_LINE> target_cluster_name = section <NEW_LINE> return target_cluster_name
Method to get cluster's connection settings from the configuration file
625941b23eb6a72ae02ec270
def put(self, base, value='', query='', tree=0): <NEW_LINE> <INDENT> value = json.dumps(value) <NEW_LINE> if 'linux' in sys.platform: <NEW_LINE> <INDENT> cmd = "put\n{}\n{}\n{}\n{}\n".format(base, query, tree, value) <NEW_LINE> return self._dispatch(cmd) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> import requests <NEW_LINE> device_ip, username, password = self._get_device_access_info() <NEW_LINE> device_api = 'http://{}/api/{}/{}'.format(device_ip, base, query) <NEW_LINE> try: <NEW_LINE> <INDENT> response = requests.put(device_api, headers={"Content-Type": "application/x-www-form-urlencoded"}, auth=self._get_auth(device_ip, username, password), data={"data": '{}'.format(value)}) <NEW_LINE> <DEDENT> except (requests.exceptions.Timeout, requests.exceptions.ConnectionError): <NEW_LINE> <INDENT> print("Timeout: device at {} did not respond.".format(device_ip)) <NEW_LINE> return None <NEW_LINE> <DEDENT> return json.loads(response.text)
Constructs and sends a put request to update or add specified data to the device router tree. The behavior of this method is contextual: - If the app is installed on(and executed from) a device, it directly updates or adds the specified data to the router tree. - If the app running remotely from a computer it calls the HTTP PUT method to update or add the specified data. Args: base: String representing a path to a resource on a router tree, (i.e. '/config/system/logging/level'). value: Not required. query: Not required. tree: Not required. Returns: A dictionary containing the response (i.e. {"success": True, "data:": {}}
625941b2507cdc57c6306a69
def cotton_york(self, name=None, latex_name=None): <NEW_LINE> <INDENT> n = self._ambient_domain.dimension() <NEW_LINE> if n != 3: <NEW_LINE> <INDENT> raise ValueError("the Cotton-York tensor is only defined for a " + "manifold of dimension 3") <NEW_LINE> <DEDENT> if self._cotton_york is None: <NEW_LINE> <INDENT> cot = self.cotton() <NEW_LINE> eps = self.volume_form(2) <NEW_LINE> cy = eps.contract(0, 1, cot, 2, 1)/2 <NEW_LINE> name = name or 'CY(' + self._name + ')' <NEW_LINE> latex_name = latex_name or r'\mathrm{CY}(' + self._latex_name + ')' <NEW_LINE> cy.set_name(name=name, latex_name=latex_name) <NEW_LINE> self._cotton_york = cy <NEW_LINE> <DEDENT> return self._cotton_york
Return the Cotton-York conformal tensor associated with the metric. The tensor has type (0,2) and is only defined for manifolds of dimension 3. It is defined in terms of the Cotton tensor `C` (see :meth:`cotton`) or the Schouten tensor `S` (see :meth:`schouten`): .. MATH:: CY_{ij} = \frac{1}{2} \epsilon^{kl}_{\ \ \, i} C_{jlk} = \epsilon^{kl}_{\ \ \, i} \nabla_k S_{lj} INPUT: - ``name`` -- (default: ``None``) name given to the Cotton-York tensor; if ``None``, it is set to "CY(g)", where "g" is the metric's name - ``latex_name`` -- (default: ``None``) LaTeX symbol to denote the Cotton-York tensor; if ``None``, it is set to "\\mathrm{CY}(g)", where "g" is the metric's name OUTPUT: - the Cotton-York conformal tensor `CY`, as an instance of :class:`~sage.manifolds.differentiable.tensorfield.TensorField` EXAMPLES: Compute the determinant of the Cotton-York tensor for the Heisenberg group with the left invariant metric:: sage: M = Manifold(3, 'Nil', start_index=1) sage: X.<x,y,z> = M.chart() sage: g = M.riemannian_metric('g') sage: g[1,1], g[2,2], g[2,3], g[3,3] = 1, 1+x^2, -x, 1 sage: g.display() g = dx⊗dx + (x^2 + 1) dy⊗dy - x dy⊗dz - x dz⊗dy + dz⊗dz sage: CY = g.cotton_york() ; CY # long time Tensor field CY(g) of type (0,2) on the 3-dimensional differentiable manifold Nil sage: CY.display() # long time CY(g) = 1/2 dx⊗dx + (-x^2 + 1/2) dy⊗dy + x dy⊗dz + x dz⊗dy - dz⊗dz sage: det(CY[:]) # long time -1/4
625941b291af0d3eaac9b7aa
def complete_all(self, pick_up = True): <NEW_LINE> <INDENT> while self.complete_next(pick_up): <NEW_LINE> <INDENT> pass
Runs all branches until completion. This is a convenience wrapper around complete_next(), and the pick_up argument is passed along. @type pick_up: boolean @param pick_up: Passed on to each call of complete_next().
625941b215baa723493c3d0a
def kmeans_clustering(cluster_list, num_clusters, num_iterations): <NEW_LINE> <INDENT> list_len = len(cluster_list) <NEW_LINE> index_by_pop = list(range(list_len)) <NEW_LINE> index_by_pop.sort(reverse=True, key=lambda idx: cluster_list[idx].total_population()) <NEW_LINE> clust_ctrs = [] <NEW_LINE> for idx in range(num_clusters): <NEW_LINE> <INDENT> clust_ctrs.append(cluster_list[index_by_pop[idx]]) <NEW_LINE> <DEDENT> for dummy_idx0 in range(num_iterations): <NEW_LINE> <INDENT> new_clusters = [alg_cluster.Cluster(set([]), 0, 0, 0, 0) for dummy_idx in range(num_clusters)] <NEW_LINE> for pt_idx in range(list_len): <NEW_LINE> <INDENT> min_dist, closest_ctr = float('inf'), 0 <NEW_LINE> for clust_idx in range(num_clusters): <NEW_LINE> <INDENT> dist = clust_ctrs[clust_idx].distance(cluster_list[pt_idx]) <NEW_LINE> if dist < min_dist: <NEW_LINE> <INDENT> min_dist, closest_ctr = dist, clust_idx <NEW_LINE> <DEDENT> <DEDENT> new_clusters[closest_ctr].merge_clusters(cluster_list[pt_idx]) <NEW_LINE> <DEDENT> for idx in range(num_clusters): <NEW_LINE> <INDENT> new_pos = (new_clusters[idx].horiz_center(), new_clusters[idx].vert_center()) <NEW_LINE> clust_ctrs[idx] = alg_cluster.Cluster(set([]), new_pos[0], new_pos[1], 0, 0) <NEW_LINE> <DEDENT> <DEDENT> return new_clusters
Takes a list of Cluster objects and applies k-means clustering as described in the pseudo-code KMeansClustering from Homework 3 to this list of clusters. This function should compute an initial list of clusters (line 2 in the pseudo-code) with the property that each cluster consists of a single county chosen from the set of the num_cluster counties with the largest populations. The function should then compute num_iterations of k-means clustering and return this resulting list of clusters.
625941b28a43f66fc4b53e09
def UpdateBlacklist(self, success_symbols, fail_symbols): <NEW_LINE> <INDENT> to_write = [] <NEW_LINE> symbol_to_data = {} <NEW_LINE> with open(self._symbol_data_path, 'w') as symbol_file: <NEW_LINE> <INDENT> symbol_writer = csv.DictWriter(symbol_file, const.SYMBOL_DATA_HEADER) <NEW_LINE> if self._symbol_data: <NEW_LINE> <INDENT> symbol_to_data = {row[const.SYMBOL_KEY]: row for row in self._symbol_data} <NEW_LINE> <DEDENT> for symbol in success_symbols: <NEW_LINE> <INDENT> if symbol not in success_symbols: <NEW_LINE> <INDENT> to_write.append(self._CreateCsvLine(symbol, 1, 0)) <NEW_LINE> <DEDENT> <DEDENT> for symbol in fail_symbols: <NEW_LINE> <INDENT> if symbol not in symbol_to_data: <NEW_LINE> <INDENT> to_write.append(self._CreateCsvLine(symbol, 0, 1)) <NEW_LINE> <DEDENT> <DEDENT> for symbol, row in symbol_to_data.iteritems(): <NEW_LINE> <INDENT> if symbol in success_symbols: <NEW_LINE> <INDENT> row[const.SUCCESS_KEY] = 1 + int(row[const.SUCCESS_KEY]) <NEW_LINE> <DEDENT> if symbol in fail_symbols: <NEW_LINE> <INDENT> row[const.FAILURE_KEY] = 1 + int(row[const.FAILURE_KEY]) <NEW_LINE> <DEDENT> <DEDENT> symbol_writer.writeheader() <NEW_LINE> symbol_writer.writerows(to_write) <NEW_LINE> symbol_writer.writerows(symbol_to_data.itervalues())
Update stock symbol data using given failure and success records. Args: success_symbols: list of strings, List of symbols that succeeded. fail_symbols: list of strings, List of failed symbols.
625941b207d97122c4178621
def test_mod(self): <NEW_LINE> <INDENT> obj_a = std_wobj("obj_a") <NEW_LINE> obj_b = std_wobj("obj_b", dep_vector=array([8, -1, -4])) <NEW_LINE> obj_c = std_wobj("obj_c", dep_vector=array([6, 0, 0])) <NEW_LINE> aobj = obj_a % obj_b <NEW_LINE> assert aobj == obj_c <NEW_LINE> assert aobj.dep_name == r"obj_a%obj_b" <NEW_LINE> assert ((5 % obj_a).dep_vector == array([5, 0, 1])).all() <NEW_LINE> assert ((obj_a % 2).dep_vector == array([0, 1, 0])).all() <NEW_LINE> obj_a %= 2 <NEW_LINE> assert (obj_a.dep_vector == array([0, 1, 0])).all()
Test __mod__ method behavior.
625941b2460517430c393f29
@CacheLock() <NEW_LINE> def add_cleanup_pod(url): <NEW_LINE> <INDENT> variance = random.uniform(0.1, 1.5) <NEW_LINE> grace = round(settings.KUBERNETES_POD_TERMINATION_GRACE_PERIOD_SECONDS * variance) <NEW_LINE> pods = cache.get('cleanup_pods', {}) <NEW_LINE> pods[url] = (datetime.utcnow() + timedelta(seconds=grace)) <NEW_LINE> cache.set('cleanup_pods', pods) <NEW_LINE> pod = cache.get(url) <NEW_LINE> grace = settings.KUBERNETES_POD_TERMINATION_GRACE_PERIOD_SECONDS <NEW_LINE> pd = datetime.utcnow() + timedelta(seconds=grace) <NEW_LINE> timestamp = str(pd.strftime(MockSchedulerClient.DATETIME_FORMAT)) <NEW_LINE> pod['metadata']['deletionTimestamp'] = timestamp <NEW_LINE> cache.set(url, pod)
populate the cleanup pod list
625941b2b5575c28eb68dd94