code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def generate_pass(minimum_length=9): <NEW_LINE> <INDENT> password = '' <NEW_LINE> while len(password) < minimum_length: <NEW_LINE> <INDENT> password = '{}_{}'.format(names.get_last_name().lower(), random.randint(0, 9999)) <NEW_LINE> <DEDENT> return password
Return a really simple password. This will NOT actally protect against any attackers. It is just so we can have randomly-generated passwords in our config files and such.
625941b5627d3e7fe0d68c32
def QA_fetch_get_commodity_option_AL_contract_time_to_market(): <NEW_LINE> <INDENT> result = QA_fetch_get_option_list('tdx') <NEW_LINE> rows = [] <NEW_LINE> result['meaningful_name'] = None <NEW_LINE> for idx in result.index: <NEW_LINE> <INDENT> strCategory = result.loc[idx, "category"] <NEW_LINE> strMarket = result.loc[idx, "market"] <NEW_LINE> strCode = result.loc[idx, "code"] <NEW_LINE> strName = result.loc[idx, 'name'] <NEW_LINE> strDesc = result.loc[idx, 'desc'] <NEW_LINE> if strName.startswith("AL"): <NEW_LINE> <INDENT> row = result.loc[idx] <NEW_LINE> rows.append(row) <NEW_LINE> <DEDENT> <DEDENT> return rows
#🛠todo 获取期权合约的上市日期 ? 暂时没有。 :return: list Series
625941b53539df3088e2e12f
def callFromThread(self, f, *args, **kw): <NEW_LINE> <INDENT> assert isinstance(f, collections.Callable), "%s is not callable" % f <NEW_LINE> p = functools.partial(f, *args, **kw) <NEW_LINE> self._io_loop.add_callback(p)
See L{twisted.internet.interfaces.IReactorThreads.callFromThread}.
625941b599fddb7c1c9de178
def silence_pylint(self): <NEW_LINE> <INDENT> assert self
silence pylint
625941b523e79379d52ee34d
def test_read_celsius(self): <NEW_LINE> <INDENT> self.assertAlmostEqual( self.sensor.read_celsius(), 25.0, delta=3 )
Note: We assert almost equal because there are slight floating point differences between python 2 and 3.
625941b5004d5f362079a11c
def find_donor(self, donor: Donor): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.donors.get(donor.id) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> return self.donors.get(int(donor))
searches through donor list and returns donor returns none if not found. Setup to first check for donor id if this fails, assumes integer and will check for that
625941b5a79ad161976cbf2a
def get_root(self): <NEW_LINE> <INDENT> if self.parent is None: <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> component = self <NEW_LINE> while component.parent: <NEW_LINE> <INDENT> component = component.parent <NEW_LINE> <DEDENT> return component
Returns the root component.
625941b50a366e3fb873e5fb
def retrieve(self, cli_id, **kwargs): <NEW_LINE> <INDENT> parameters = dict(cli_id=cli_id, **kwargs) <NEW_LINE> return self._call('GET', data=ujson.dumps(parameters))[0]
:type cli_id: int :param dict kwargs: creative_id, line_item_id, start_date, end_date, active
625941b592d797404e303f6f
def prepare_dictionary(): <NEW_LINE> <INDENT> s = request_slice() <NEW_LINE> store(BC_QUOTE_NAME, s, 0) <NEW_LINE> store(BC_FLOW_RETURN, s, 1) <NEW_LINE> add_definition('define', s)
setup the initial dictionary
625941b54428ac0f6e5ba5de
def _on_group_expanded(self, expander, paramSpec, group_label): <NEW_LINE> <INDENT> if expander.get_expanded(): <NEW_LINE> <INDENT> self._expanded_groups.add(group_label) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._expanded_groups.remove(group_label) <NEW_LINE> <DEDENT> self._preferences.set("expanded-symbol-groups", ",".join(self._expanded_groups))
The Expander for a symbol group has been expanded
625941b5d58c6744b4257a45
def test_email_found(self): <NEW_LINE> <INDENT> response = self.client.post('/password_reset/', {'email': '[email protected]'}) <NEW_LINE> self.assertEqual(response.status_code, 302) <NEW_LINE> self.assertEqual(len(mail.outbox), 1) <NEW_LINE> self.assertTrue("http://" in mail.outbox[0].body) <NEW_LINE> self.assertEqual(settings.DEFAULT_FROM_EMAIL, mail.outbox[0].from_email) <NEW_LINE> self.assertFalse(mail.outbox[0].message().is_multipart())
Email is sent if a valid email address is provided for password reset
625941b597e22403b379cd7d
def process_image(image): <NEW_LINE> <INDENT> image = cv2.cvtColor(cv2.resize(image, (80, 80)), cv2.COLOR_BGR2GRAY) <NEW_LINE> """ 이미지 임계처리""" <NEW_LINE> ret, image = cv2.threshold(image,1,255,cv2.THRESH_BINARY) <NEW_LINE> return image
이미지를 흑백으로, 80 * 80 크기로 잘라냄
625941b5851cf427c661a300
def remove_stateless_service(self, service_type, id_resource): <NEW_LINE> <INDENT> if service_type not in ["rgw"]: <NEW_LINE> <INDENT> raise orchestrator.OrchestratorError( "{} service not supported".format(service_type)) <NEW_LINE> <DEDENT> group = "{}s".format(service_type) <NEW_LINE> if service_type == "rgw": <NEW_LINE> <INDENT> group_prefix = "rgw_zone_{}" <NEW_LINE> <DEDENT> resource_group = group_prefix.format(id_resource) <NEW_LINE> hosts_list = list(InventoryGroup(resource_group, self.ar_client)) <NEW_LINE> limited = ",".join(hosts_list) <NEW_LINE> extravars = {"ireallymeanit": "yes"} <NEW_LINE> playbook_operation = PlaybookOperation(client=self.ar_client, playbook=PURGE_PLAYBOOK, logger=self.log, result_pattern="", params=extravars, querystr_dict={"limit": limited}) <NEW_LINE> playbook_operation.output_wizard = ProcessPlaybookResult(self.ar_client, self.log) <NEW_LINE> playbook_operation.event_filter_list = ["playbook_on_stats"] <NEW_LINE> clean_inventory = {} <NEW_LINE> clean_inventory[resource_group] = hosts_list <NEW_LINE> clean_inventory[group] = hosts_list <NEW_LINE> playbook_operation.clean_hosts_on_success = clean_inventory <NEW_LINE> self.log.info("Removing service %s for resource %s", service_type, id_resource) <NEW_LINE> self._launch_operation(playbook_operation) <NEW_LINE> return playbook_operation
Remove a stateles services providing <sv_id> resources :svc_type : Kind of service (nfs, rgw, mds) :id_resource : Id of the resource provided <zone name> if service is RGW ... : returns : Completion object
625941b56fb2d068a760ee86
def __rxor__(self, other): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return BF("(%s) % (%s)" %(other.expression(), self.expression()), "%s_XOR_%s" %(other.name(), self.name())) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> raise InvalidBooleanFunctionError("The object is not a Boolean Function")
Finds the XOR of the BFs
625941b53346ee7daa2b2b4d
def ad_get_version(): <NEW_LINE> <INDENT> ad_get_version = libad4_dll.ad_get_version <NEW_LINE> ad_get_version.restype = c_uint32 <NEW_LINE> res = ad_get_version() <NEW_LINE> return res
Return version of *LIBAD4.dll*.
625941b530dc7b7665901750
def get_stats(self): <NEW_LINE> <INDENT> if self.login(): <NEW_LINE> <INDENT> responseinfo = self.Token.get(self.address + '/stats') <NEW_LINE> return responseinfo.json()
salt.netapi.rest_cherrypy.app.Stats! :return: Return a dump of statistics collected from the CherryPy server
625941b5462c4b4f79d1d4b5
def get_n_ring_neighbor(faces, n=1, ordinal=False): <NEW_LINE> <INDENT> n_vtx = np.max(faces) + 1 <NEW_LINE> coo_w = mesh_edges(faces) <NEW_LINE> csr_w = coo_w.tocsr() <NEW_LINE> n_ring_neighbors = [csr_w.indices[csr_w.indptr[i]:csr_w.indptr[i+1]] for i in range(n_vtx)] <NEW_LINE> n_ring_neighbors = [set(i) for i in n_ring_neighbors] <NEW_LINE> if n > 1: <NEW_LINE> <INDENT> one_ring_neighbors = [i.copy() for i in n_ring_neighbors] <NEW_LINE> n_th_ring_neighbors = [i.copy() for i in n_ring_neighbors] <NEW_LINE> for i in range(n-1): <NEW_LINE> <INDENT> for neighbor_set in n_th_ring_neighbors: <NEW_LINE> <INDENT> neighbor_set_tmp = neighbor_set.copy() <NEW_LINE> for v_id in neighbor_set_tmp: <NEW_LINE> <INDENT> neighbor_set.update(one_ring_neighbors[v_id]) <NEW_LINE> <DEDENT> <DEDENT> if i == 0: <NEW_LINE> <INDENT> for v_id in range(n_vtx): <NEW_LINE> <INDENT> n_th_ring_neighbors[v_id].remove(v_id) <NEW_LINE> <DEDENT> <DEDENT> for v_id in range(n_vtx): <NEW_LINE> <INDENT> n_th_ring_neighbors[v_id] -= n_ring_neighbors[v_id] <NEW_LINE> n_ring_neighbors[v_id] |= n_th_ring_neighbors[v_id] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> elif n == 1: <NEW_LINE> <INDENT> n_th_ring_neighbors = n_ring_neighbors <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise RuntimeError("The number of rings should be equal or greater than 1!") <NEW_LINE> <DEDENT> if ordinal: <NEW_LINE> <INDENT> return n_th_ring_neighbors <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return n_ring_neighbors
get n ring nerghbor from faces array :param faces: the array of shape [n_triangles, 3] :param n: integer specify which ring should be got :param ordinal: bool True: get the n_th ring neighbor False: get the n ring neighbor :return: list each index of the list represents a vertex number each element is a set which includes neighbors of corresponding vertex
625941b51f037a2d8b945fe3
def _add_iso_created_at(self, tweet_dict): <NEW_LINE> <INDENT> if tweet_dict.get('created_at'): <NEW_LINE> <INDENT> tweet_dict['traptor']['created_at_iso'] = self._tweet_time_to_iso( tweet_dict['created_at'] ) <NEW_LINE> <DEDENT> return tweet_dict
Add the created_at_iso to the tweet. :param tweet_dict: tweet in json format :return tweet_dict: with created_at_iso field
625941b5d53ae8145f87a05c
def get_lookup_from_csv(csv_table_uri, key_field): <NEW_LINE> <INDENT> def u(string): <NEW_LINE> <INDENT> if type(string) is StringType: <NEW_LINE> <INDENT> return unicode(string, 'utf-8') <NEW_LINE> <DEDENT> return string <NEW_LINE> <DEDENT> with open(csv_table_uri, 'rU') as csv_file: <NEW_LINE> <INDENT> dialect = csv.Sniffer().sniff('\n'.join(csv_file.readlines(1024)), delimiters=";,") <NEW_LINE> csv_file.seek(0) <NEW_LINE> csv_reader = csv.reader(csv_file, dialect=dialect) <NEW_LINE> header_row = map(lambda s: u(s), csv_reader.next()) <NEW_LINE> key_index = header_row.index(key_field) <NEW_LINE> index_to_field = dict(zip(range(len(header_row)), header_row)) <NEW_LINE> lookup_dict = {} <NEW_LINE> for line_num, line in enumerate(csv_reader): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> key_value = _smart_cast(line[key_index]) <NEW_LINE> <DEDENT> except IndexError as error: <NEW_LINE> <INDENT> LOGGER.error('CSV line %s (%s) should have index %s', line_num, line, key_index) <NEW_LINE> raise error <NEW_LINE> <DEDENT> lookup_dict[key_value] = ( dict([(index_to_field[index], _smart_cast(value)) for index, value in zip(range(len(line)), line)])) <NEW_LINE> <DEDENT> return lookup_dict
Read CSV table file in as dictionary. Creates a python dictionary to look up the rest of the fields in a csv table indexed by the given key_field Args: csv_table_uri (string): a URI to a csv file containing at least the header key_field key_field: (description) Returns: lookup_dict (dict): returns a dictionary of the form {key_field_0: {header_1: val_1_0, header_2: val_2_0, etc.} depending on the values of those fields
625941b51f037a2d8b945fe4
def __init__(self): <NEW_LINE> <INDENT> self.end = False <NEW_LINE> self.children = {} <NEW_LINE> self.debug = False
Initialize your data structure here.
625941b5ab23a570cc24ff64
def get_remote_address(self): <NEW_LINE> <INDENT> return self._get_remote() or request.remote_addr
:return: client ip address
625941b5925a0f43d2549c58
def get_request_for_user(user): <NEW_LINE> <INDENT> request = RequestFactory() <NEW_LINE> request.user = user <NEW_LINE> request.META = {} <NEW_LINE> request.is_secure = lambda: True <NEW_LINE> request.get_host = lambda: "edx.org" <NEW_LINE> return request
Create a request object for user.
625941b5dc8b845886cb5319
def read_analogsignal(self, gid=None, time_unit=pq.ms, t_start=None, t_stop=None, sampling_period=None, id_column=0, time_column=1, value_column=2, value_type=None, value_unit=None, lazy=False): <NEW_LINE> <INDENT> assert not lazy, 'Do not support lazy' <NEW_LINE> return self.__read_analogsignals([gid], time_unit, t_start, t_stop, sampling_period=sampling_period, id_column=id_column, time_column=time_column, value_columns=value_column, value_types=value_type, value_units=value_unit)[0]
Reads an AnalogSignal with specified neuron ID from the DAT data. Arguments ---------- gid : int, default: None The GDF ID of the returned SpikeTrain. gdf_id must be specified if the GDF file contains neuron IDs, the default None then raises an error. Specify an empty list [] to retrieve the spike trains of all neurons. time_unit : Quantity (time), optional, default: quantities.ms The time unit of recorded time stamps. t_start : Quantity (time), optional, default: 0 * pq.ms Start time of SpikeTrain. t_stop : Quantity (time), default: None Stop time of SpikeTrain. t_stop must be specified, the default None raises an error. sampling_period : Quantity (frequency), optional, default: None Sampling period of the recorded data. id_column : int, optional, default: 0 Column index of neuron IDs. time_column : int, optional, default: 1 Column index of time stamps. value_column : int, optional, default: 2 Column index of the analog values recorded. value_type : str, optional, default: None Nest data type of the analog values recorded, eg.'V_m', 'I', 'g_e'. value_unit : Quantity (amplitude), default: None The physical unit of the recorded signal values. lazy : bool, optional, default: False Returns ------- spiketrain : SpikeTrain The requested SpikeTrain object with an annotation 'id' corresponding to the gdf_id parameter.
625941b566656f66f7cbbf8f
def readSDT(self, bits,pos): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if self.leng['actual']+184 > self.leng['total']: <NEW_LINE> <INDENT> pos.setMinPos((self.leng['total']-self.leng['actual'])*8) <NEW_LINE> pos.setCrc() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.leng['actual']=self.leng['actual']+184 <NEW_LINE> <DEDENT> while True: <NEW_LINE> <INDENT> if self.stdItem['service_id'] == None: self.stdItem['service_id'] = getBits(bits,pos,16) <NEW_LINE> if self.stdItem['reserved_1'] == None: self.stdItem['reserved_1'] = getBits(bits,pos,6) <NEW_LINE> if self.stdItem['EIT_schedule_flag'] == None: self.stdItem['EIT_schedule_flag'] = getBits(bits,pos,1) <NEW_LINE> if self.stdItem['EIT_present_following_flag'] == None: self.stdItem['EIT_present_following_flag'] = getBits(bits,pos,1) <NEW_LINE> if self.stdItem['running_status'] == None: self.stdItem['running_status'] = getBits(bits,pos,3) <NEW_LINE> if self.stdItem['free_CA_mode'] == None: self.stdItem['free_CA_mode'] = getBits(bits,pos,1) <NEW_LINE> if self.stdItem['descriptors_loop_length'] == None: self.stdItem['descriptors_loop_length'] = getBits(bits,pos,12) <NEW_LINE> if self.stdItem['DVB_DescriptorTag'] == None: self.stdItem['DVB_DescriptorTag'] = getBits(bits,pos,8) <NEW_LINE> if self.stdItem['DVB_DescriptorTag'] == 72: <NEW_LINE> <INDENT> self.service_descriptor(bits,pos) <NEW_LINE> <DEDENT> self.stdData['data'].append(self.stdItem) <NEW_LINE> self.stdItem = self.getClearStdItem() <NEW_LINE> <DEDENT> <DEDENT> except CRC: <NEW_LINE> <INDENT> if self.stdData['crc_32'] == None: self.stdData['crc_32'] = getBits(bits,pos,32,True) <NEW_LINE> <DEDENT> except PositionZero: <NEW_LINE> <INDENT> return
Method initialize reading next STD packet :param bits: Packet data in bits format :return: None
625941b55f7d997b87174880
@jit(float64(float64, float64), nopython=True) <NEW_LINE> def _integrand(φ, lΔ): <NEW_LINE> <INDENT> cos_φ = np.cos(φ) <NEW_LINE> sin_φ = np.sin(φ) <NEW_LINE> lΔ_2 = 0.5 * lΔ <NEW_LINE> temp = np.sin(lΔ_2 * cos_φ) * np.sin(lΔ_2 * sin_φ) / (lΔ * lΔ * cos_φ * sin_φ) <NEW_LINE> return temp * temp
integrand of PWF c.f. eq. 91 in Wu, J. H. P., Balbi, A., Borrill, J., Ferreira, P. G., Hanany, S., Jaffe, A. H., et al. (2001). Asymmetric Beams in Cosmic Microwave Background Anisotropy Experiments. The Astrophysical Journal Supplement Series, 132(1), 1–17. http://doi.org/10.1086/318947
625941b550812a4eaa59c10b
def intersect(self, nums1, nums2): <NEW_LINE> <INDENT> result = [] <NEW_LINE> if not nums1 or not nums2: <NEW_LINE> <INDENT> return result <NEW_LINE> <DEDENT> nums1_count = {} <NEW_LINE> for num in nums1: <NEW_LINE> <INDENT> nums1_count[num] = nums1_count.setdefault(num, 0) + 1 <NEW_LINE> <DEDENT> nums2_count = {} <NEW_LINE> for num in nums2: <NEW_LINE> <INDENT> nums2_count[num] = nums2_count.setdefault(num, 0) + 1 <NEW_LINE> <DEDENT> for key in nums1_count: <NEW_LINE> <INDENT> if key in nums2_count: <NEW_LINE> <INDENT> result.extend([key] * min(nums1_count[key], nums2_count[key])) <NEW_LINE> <DEDENT> <DEDENT> return result
Time: O(m+n) 56ms. Space: O(m+n). :type nums1: List[int] :type nums2: List[int] :rtype: List[int]
625941b58e7ae83300e4adb1
def __init__(self, input_size, output_size, hidden_layers, dropout=0.5): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.hidden_layers = nn.ModuleList([nn.Linear(input_size, hidden_layers[0])]) <NEW_LINE> layer_sizes = zip(hidden_layers[:-1], hidden_layers[1:]) <NEW_LINE> self.hidden_layers.extend([nn.Linear(h1, h2) for h1, h2 in layer_sizes]) <NEW_LINE> self.output = nn.Linear(hidden_layers[-1], output_size) <NEW_LINE> self.dropout = nn.Dropout(p=dropout)
Builds a feedforward network with arbitrary hidden layers. Arguments --------- input_size: integer, size of the input output_size: integer, size of the output layer hidden_layers: list of integers, the sizes of the hidden layers dropout: float between 0 and 1
625941b5de87d2750b85fb73
def makepasv(self): <NEW_LINE> <INDENT> if self.af == socket.AF_INET: <NEW_LINE> <INDENT> untrusted_host, port = parse227(self.sendcmd('PASV')) <NEW_LINE> if self.trust_server_pasv_ipv4_address: <NEW_LINE> <INDENT> host = untrusted_host <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> host = self.sock.getpeername()[0] <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> host, port = parse229(self.sendcmd('EPSV'), self.sock.getpeername()) <NEW_LINE> <DEDENT> return host, port
Internal: Does the PASV or EPSV handshake -> (address, port)
625941b5aad79263cf390820
def execute(self, *args, **options): <NEW_LINE> <INDENT> from django.utils.encoding import smart_str <NEW_LINE> import sys <NEW_LINE> try: <NEW_LINE> <INDENT> self.stdout = options.get('stdout', sys.stdout) <NEW_LINE> self.stderr = options.get('stderr', sys.stderr) <NEW_LINE> if self.requires_model_validation: <NEW_LINE> <INDENT> self.validate() <NEW_LINE> <DEDENT> output = self.handle(*args, **options) <NEW_LINE> if output: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> except CommandError as e: <NEW_LINE> <INDENT> self.stderr.write(smart_str(self.style.ERROR('Error: %s\n' % e)))
Try to execute this command, performing model validation if needed (as controlled by the attribute ``self.requires_model_validation``). If the command raises a ``CommandError``, intercept it and print it sensibly to stderr.
625941b5d8ef3951e3243323
def __init__(self, df, features_dict): <NEW_LINE> <INDENT> self.df = df <NEW_LINE> self.features_dict = features_dict <NEW_LINE> self.output_df = self.df.copy(deep=True)
df: pandas dataframe features_dict: dict with key encoding type and values list
625941b526238365f5f0ec4e
def c_params(self, need_type=True, need_name=True): <NEW_LINE> <INDENT> if self.params and (need_type or need_name): <NEW_LINE> <INDENT> if need_type and need_name: <NEW_LINE> <INDENT> return ", ".join([param.c() for param in self.params]) <NEW_LINE> <DEDENT> elif need_type: <NEW_LINE> <INDENT> return ", ".join([param.ty for param in self.params]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return ", ".join([param.name for param in self.params]) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return "void" if need_type else ""
Return the parameter list in C.
625941b594891a1f4081b88d
def do_http_exchange(host, port, resource, file_name): <NEW_LINE> <INDENT> data_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) <NEW_LINE> data_socket.connect((host, port)) <NEW_LINE> request = b'GET ' + resource + b' HTTP/1.1\r\nHost: ' + host + b'\r\n\r\n' <NEW_LINE> data_socket.sendall(request) <NEW_LINE> status_code = read_status_code(data_socket) <NEW_LINE> encoding_type = get_encoding_type(data_socket) <NEW_LINE> message = read_content(encoding_type, data_socket) <NEW_LINE> write_file(file_name, message) <NEW_LINE> return status_code
Get an HTTP resource from a server :param bytes host: the ASCII domain name or IP address of the server machine (i.e., host) to connect to :param int port: port number to connect to on server host :param bytes resource: the ASCII path/name of resource to get. This is everything in the URL after the domain name, including the first /. :param file_name: string (str) containing name of file in which to store the retrieved resource :return: the status code :rtype: int :author: Claudia Poptile + Parker Splitt
625941b5fb3f5b602dac347c
def test_lower(self): <NEW_LINE> <INDENT> self.failUnless('140conf' in find_hash(self.d)) <NEW_LINE> self.failUnless(u'дфса' in find_hash(self.e))
test that case of all hashes is lowered
625941b560cbc95b062c632e
def test_constructor_with_only_a_db(self): <NEW_LINE> <INDENT> index = TextIndex(self.db) <NEW_LINE> self.assertIsInstance(index, TextIndex) <NEW_LINE> self.assertIsNone(index.design_document_id) <NEW_LINE> self.assertIsNone(index.name) <NEW_LINE> self.assertEqual(index.type, 'text') <NEW_LINE> self.assertEqual(index.definition, {})
Test instantiating an TextIndex with a database only. As a side effect this test also tests the design_document_id, name, type, and definition property methods.
625941b515baa723493c3d57
def subs(self, *args, **kwargs): <NEW_LINE> <INDENT> return sum([ Dyadic([(v[0].subs(*args, **kwargs), v[1], v[2])]) for v in self.args])
Substituion on the Dyadic. Examples ======== >>> from sympy.physics.mechanics import ReferenceFrame >>> from sympy import Symbol >>> N = ReferenceFrame('N') >>> s = Symbol('s') >>> a = s * (N.x|N.x) >>> a.subs({s: 2}) 2*(N.x|N.x)
625941b54e4d5625662d41c4
def start_buy(self): <NEW_LINE> <INDENT> self._start(self._buy)
Start buying.
625941b5c432627299f04a2a
def allow_relation(self, obj1, obj2, **hints): <NEW_LINE> <INDENT> if obj1._meta.app_label == 'powerdns_manager' or obj2._meta.app_label == 'powerdns_manager': <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return None
Allow any relation if a model in powerdns_manager is involved
625941b5d99f1b3c44c67385
def reset(self): <NEW_LINE> <INDENT> self.t = 0 <NEW_LINE> self.config = np.zeros([self.height, self.width]) <NEW_LINE> self.config[0, :] = self.setup_initial_row() <NEW_LINE> self.build_rule_set()
Initializes the configuration of the cells and converts the entered rule number to a rule set.
625941b5090684286d50eacc
def do_setup(self, context): <NEW_LINE> <INDENT> if hpelefthandclient.version < MIN_CLIENT_VERSION: <NEW_LINE> <INDENT> ex_msg = (_("Invalid hpelefthandclient version found (" "%(found)s). Version %(minimum)s or greater " "required. Run 'pip install --upgrade " "python-lefthandclient' to upgrade the " "hpelefthandclient.") % {'found': hpelefthandclient.version, 'minimum': MIN_CLIENT_VERSION}) <NEW_LINE> LOG.error(ex_msg) <NEW_LINE> raise exception.InvalidInput(reason=ex_msg) <NEW_LINE> <DEDENT> self._do_replication_setup()
Set up LeftHand client.
625941b59c8ee82313fbb563
def __init__(self,list1): <NEW_LINE> <INDENT> self.BtnCount =0 <NEW_LINE> self.root =Tk() <NEW_LINE> self.root.geometry("300x300") <NEW_LINE> self.root.title("MySql破解") <NEW_LINE> self.Lbox =Listbox(self.root,width="40") <NEW_LINE> self.Lbox.grid(row =1,column=0) <NEW_LINE> self.Lbox.insert(END,"查询中......") <NEW_LINE> self.btn =Button(self.root,text="开始破解",command=self.XianCheng) <NEW_LINE> self.btn.grid(row =0,column =0) <NEW_LINE> self.list1 =list1 <NEW_LINE> self.root.mainloop()
初始化函数,账号字典,密码字典
625941b5cc40096d61595739
def get_to_account_id(self): <NEW_LINE> <INDENT> return self.to_account_id
Get to account id. Returns: str: To account id.
625941b5cdde0d52a9e52e14
def time_dependent_test(): <NEW_LINE> <INDENT> diff = np.array([compare_time_dependent(n)[:3] for n in [34, 67]]) <NEW_LINE> stored = [[0.01023591, 5.19786964, 0.93575341], [0.04744501, 4.11548251, 0.69969177]] <NEW_LINE> return np.testing.assert_almost_equal(diff, stored)
Time dependent bed deformation (disc load)
625941b594891a1f4081b88e
def copy_questions(text, report): <NEW_LINE> <INDENT> found = [] <NEW_LINE> new = text <NEW_LINE> p = re.compile('[\[|\(]{1}((?P<preg>\d+)[,\s]*)+[\]|\)]{1}') <NEW_LINE> m = p.finditer(text) <NEW_LINE> for x in m: <NEW_LINE> <INDENT> (px, py) = x.span() <NEW_LINE> found.append((text[px:py], get_new_questions(text[px:py], report))) <NEW_LINE> <DEDENT> for x in found: <NEW_LINE> <INDENT> new = new.replace(x[0], x[1]) <NEW_LINE> <DEDENT> return new
TODO: docstring
625941b54c3428357757c111
def reset_local_db(): <NEW_LINE> <INDENT> print(red(" * deleting local database...")) <NEW_LINE> local('mysql -u %s -p%s -e "drop database if exists %s"' % ( settings.DATABASES['default']['USER'], settings.DATABASES['default']['PASSWORD'], settings.DATABASES['default']['NAME'] )) <NEW_LINE> print(red(" * creating local database...")) <NEW_LINE> local('mysql -u %s -p%s -e "create database %s"' % ( settings.DATABASES['default']['USER'], settings.DATABASES['default']['PASSWORD'], settings.DATABASES['default']['NAME'] )) <NEW_LINE> print(red(" * migrating local database...")) <NEW_LINE> local('cd %s ' '&& . %s ' '&& python manage.py migrate' % (PROJECT_PATH, VIRTUAL_ENVIRONMENT))
resets local database
625941b5711fe17d82542162
def lastRow(self): <NEW_LINE> <INDENT> if not self._readOnly and self._showAddDeleteButtonFlag and not self._updatingFlag>0: <NEW_LINE> <INDENT> return self.rowCount() - 2 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.rowCount() - 1
Return the last row holding a property. The row with the add new property field is not counted.
625941b530bbd722463cbba8
@pytest.mark.parametrize("random_spectrum", range(4), indirect=True) <NEW_LINE> @pytest.mark.parametrize("another_random_spectrum", range(4), indirect=True) <NEW_LINE> def test_compare_spectra_swapped(random_spectrum, another_random_spectrum, mz_tolerance, min_matched_peaks, compare_spectra_f): <NEW_LINE> <INDENT> mz1, data1 = random_spectrum <NEW_LINE> mz2, data2 = another_random_spectrum <NEW_LINE> comparison = compare_spectra_f(mz1, data1, mz2, data2, mz_tolerance) <NEW_LINE> comparison_swapped = compare_spectra_f(mz2, data2, mz1, data1, mz_tolerance) <NEW_LINE> assert comparison.size == comparison_swapped.size <NEW_LINE> if comparison.size > 0: <NEW_LINE> <INDENT> assert np.array_equal(comparison['ix1'], comparison_swapped['ix2']) <NEW_LINE> assert np.array_equal(comparison['ix2'], comparison_swapped['ix1']) <NEW_LINE> assert np.array_equal(comparison['score'], comparison_swapped['score']) <NEW_LINE> assert np.array_equal(comparison['type'], comparison_swapped['type'])
Comparisons should only differs by indices (swapped) if comparing spectrum 1 with spectrum 2 or spectrum 2 with spectrum 1.
625941b5566aa707497f4361
def _get_item_str(self, res): <NEW_LINE> <INDENT> ipl = self.items_p_line <NEW_LINE> prt_items = sorted([self._get_genestr(itemid) for itemid in res.study_items]) <NEW_LINE> prt_multiline = [prt_items[i:i+ipl] for i in range(0, len(prt_items), ipl)] <NEW_LINE> num_items = len(prt_items) <NEW_LINE> if self.study_items_max is None: <NEW_LINE> <INDENT> genestr = "\n".join([", ".join(str(e) for e in sublist) for sublist in prt_multiline]) <NEW_LINE> return "{N}) {GENES}".format(N=num_items, GENES=genestr) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if num_items <= self.study_items_max: <NEW_LINE> <INDENT> gene_lines = [", ".join(str(e) for e in sublist) for sublist in prt_multiline] <NEW_LINE> genestr = "\n".join(gene_lines) <NEW_LINE> return genestr <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> short_list = prt_items[:self.study_items_max] <NEW_LINE> short_mult = [short_list[i:i+ipl] for i in range(0, len(short_list), ipl)] <NEW_LINE> short_lines = [", ".join(str(e) for e in sublist) for sublist in short_mult] <NEW_LINE> short_str = "\n".join(short_lines) <NEW_LINE> return "".join(["{N} genes; ".format(N=num_items), short_str, "..."])
Return genes in any of these formats: 1. 19264, 17319, 12520, 12043, 74131, 22163, 12575 2. Ptprc, Mif, Cd81, Bcl2, Sash3, Tnfrsf4, Cdkn1a 3. 7: Ptprc, Mif, Cd81, Bcl2, Sash3...
625941b5287bf620b61d3856
def quickplot(path, num_plot, PhyQty, ref, skiprows, nms, ucols, AspRatio=3): <NEW_LINE> <INDENT> fig = plt.figure(figsize=(10, 5 * len(PhyQty))) <NEW_LINE> fnm = dir2fnm(path) <NEW_LINE> jet = plt.get_cmap('jet') <NEW_LINE> colors = iter(jet(np.linspace(0, 1, len(fnm)))) <NEW_LINE> if len(PhyQty) > 1: <NEW_LINE> <INDENT> gs = fig.add_gridspec(len(PhyQty), 1) <NEW_LINE> plots_ax = [fig.add_subplot(x) for x in gs] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> plots_ax = fig.add_subplot(111) <NEW_LINE> <DEDENT> for file in fnm: <NEW_LINE> <INDENT> color = next(colors) <NEW_LINE> data = pd.read_csv(file, sep="\t", skiprows=skiprows, usecols=ucols, names=nms, header=None) <NEW_LINE> data['rxx'] = data.uxx / data.curr * ref <NEW_LINE> data['rxy'] = data.uxy / data.curr * ref <NEW_LINE> data['sxx'] = data['rxx'] / AspRatio / ((data['rxx'] / AspRatio) ** 2 + data['rxy'] ** 2) / e0 ** 2 * h0 <NEW_LINE> data['sxy'] = data['rxy'] / ((data['rxx'] / AspRatio) ** 2 + data['rxy'] ** 2) / e0 ** 2 * h0 <NEW_LINE> if len(PhyQty)>1: <NEW_LINE> <INDENT> for index, phyqty in enumerate(PhyQty): <NEW_LINE> <INDENT> plot_ax = plots_ax[index] <NEW_LINE> plot_ax.plot(data.x, data[phyqty], color=color) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> plots_ax.plot(data.x, data[PhyQty[0]], color=color) <NEW_LINE> <DEDENT> <DEDENT> return plots_ax
Quick plot for multiple files containing the same type of data Arguments: path: The directory of multiple files PhyQty: The physical quantities to be plotted('bf','gate','rxx','rxy','sxx','sxy' and etc) ref: reference resistor skiprows: skipped rows from the header nms: Names for all used columns ucols: Used columns AspRatio: The aspect ratio of the Hall bar. Default is 3 Return: the handle of axes to facilitate further adjustment if necessary
625941b526238365f5f0ec4f
def checkAndCreateDataset(self, conversionStepMap): <NEW_LINE> <INDENT> output = conversionStepMap["outDs"] <NEW_LINE> abstractDb = None <NEW_LINE> try: <NEW_LINE> <INDENT> parameters = self.parseDatasourcePath(datasource=output) <NEW_LINE> parameters['srid'] = conversionStepMap['crs'].split(":")[-1] <NEW_LINE> parameters['isTemplateEdgv'] = True <NEW_LINE> parameters['version'], parameters['templateName'] = { "EDGV 2.1.3" : ("2.1.3", 'template_edgv_213'), "EDGV 2.1.3 F Ter" : ("FTer_2a_Ed", 'template_edgv_fter_2a_ed'), "EDGV 2.1.3 Pro" : ("2.1.3 Pro", 'template_edgv_213_pro'), "EDGV 3.0" : ("3.0", 'template_edgv_3') }[conversionStepMap['edgv']] <NEW_LINE> if 'path' in parameters: <NEW_LINE> <INDENT> parameters['db'] = os.path.basename(os.path.splitext(parameters['path'])[0]) <NEW_LINE> <DEDENT> abstractDb = self.createDataset(parameters=parameters) <NEW_LINE> msg = "" <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> if abstractDb is not None: <NEW_LINE> <INDENT> abstractDb.closeDatabase() <NEW_LINE> abstractDb = None <NEW_LINE> <DEDENT> msg = "{0} dataset creation has failed: '{1}'".format(output, "; ".join(map(str, e.args))) <NEW_LINE> <DEDENT> return abstractDb, msg
Checks conversion map for dataset creation necessity and creates it. :param conversionStepMap: (dict) conversion step map. :return: (tuple) new dataset (or None if failed) and failing message (empty, if successful).
625941b599cbb53fe67929cd
@portal_v1.route('/certificates', methods=['POST']) <NEW_LINE> @jwt_required <NEW_LINE> @registrar_required <NEW_LINE> def add_certificate(): <NEW_LINE> <INDENT> errors = check_certificates_keys(request) <NEW_LINE> if errors: <NEW_LINE> <INDENT> return raise_error(400, "Invalid {} key".format(', '.join(errors))) <NEW_LINE> <DEDENT> details = request.get_json() <NEW_LINE> certificate_name = details['certificate_name'] <NEW_LINE> if not certificate_restrictions(certificate_name): <NEW_LINE> <INDENT> return raise_error(400, "Invalid certificate name") <NEW_LINE> <DEDENT> response = CertificatesModel(certificate_name).save() <NEW_LINE> return Serializer.serialize(response, 201, "Certificate added successfully")
Add new certificate.
625941b524f1403a92600950
def move_lines(self, direction): <NEW_LINE> <INDENT> if not direction in ['up', 'down']: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> self.beginEditBlock() <NEW_LINE> for cursor in self.cursors: <NEW_LINE> <INDENT> sel_start, sel_end, is_reversed = self.get_sel_start_end_reverse(cursor) <NEW_LINE> text, start_in_block, end_in_block = self.get_selected_lines( cursor, sel_start, sel_end, pos_in_block=True ) <NEW_LINE> cursor.removeSelectedText() <NEW_LINE> if direction == 'up': <NEW_LINE> <INDENT> cursor.deletePreviousChar() <NEW_LINE> cursor.movePosition(cursor.StartOfLine, cursor.MoveAnchor) <NEW_LINE> line_start = cursor.position() <NEW_LINE> cursor.insertText(text +'\n') <NEW_LINE> cursor.movePosition(cursor.PreviousBlock, cursor.MoveAnchor) <NEW_LINE> cursor.movePosition(cursor.StartOfLine, cursor.MoveAnchor) <NEW_LINE> line_end = cursor.position() <NEW_LINE> self.restore_selection( cursor, line_start +start_in_block, line_end +end_in_block, is_reversed ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cursor.deleteChar() <NEW_LINE> cursor.movePosition(cursor.EndOfLine, cursor.MoveAnchor) <NEW_LINE> line_start = cursor.position() +1 <NEW_LINE> cursor.insertText('\n' +text) <NEW_LINE> cursor.movePosition(cursor.StartOfLine, cursor.MoveAnchor) <NEW_LINE> line_end = cursor.position() <NEW_LINE> self.restore_selection( cursor, line_start +start_in_block, line_end +end_in_block, is_reversed ) <NEW_LINE> <DEDENT> <DEDENT> self.endEditBlock() <NEW_LINE> return True
Args: direction (str) : 'up' or 'down' Returns: (bool) Move up/down selected lines on up/down keys.
625941b5091ae35668666d4c
def update_keys(self): <NEW_LINE> <INDENT> keys = pygame.key.get_pressed() <NEW_LINE> self.jump = keys[K_SPACE] <NEW_LINE> self.up = keys[K_w] <NEW_LINE> self.down = keys[K_s] <NEW_LINE> self.left = keys[K_a] <NEW_LINE> self.right = keys[K_d] <NEW_LINE> self.shift = keys[KMOD_SHIFT] <NEW_LINE> self.cam_left = keys[K_LEFT] <NEW_LINE> self.cam_right = keys[K_RIGHT]
Update the key states to those of the keyboard that are being pressed.
625941b5cb5e8a47e48b7897
def setUp(self): <NEW_LINE> <INDENT> self.request = DummyRequest([""]) <NEW_LINE> self.request.site = FakeSite()
Set up a common L{DummyRequest} and L{FakeSite}.
625941b563f4b57ef0000f09
def plot_post_scatter(self, xvar, yvar, xfunc, yfunc): <NEW_LINE> <INDENT> xs = [xfunc(x) for x in self.get_post_attr(xvar)] <NEW_LINE> ys = [yfunc(y) for y in self.get_post_attr(yvar)] <NEW_LINE> fit = np.polyfit(xs, ys, 1) <NEW_LINE> fit_fn = np.poly1d(fit) <NEW_LINE> results = scipy.stats.linregress(xs, ys) <NEW_LINE> r_value = results[2] <NEW_LINE> plt.plot(xs, ys, 'yo', xs, fit_fn(xs), '--k') <NEW_LINE> plt.xlabel(self.get_axis_label(xfunc, xvar)) <NEW_LINE> plt.ylabel(self.get_axis_label(yfunc, yvar)) <NEW_LINE> plt.show() <NEW_LINE> return {'r_squared': r_value ** 2}
:param xvar: (str) name of the attribute to use as x values :param yvar: (str) name of the attribute to use as y values :param xfunc: (func) function to apply to each x-value :param yfunc: (func) function to apply to each y-value :return: (dict) plots the images of the x- and y-values and returns the coefficient of correlation (r squared) for the linear regression
625941b556ac1b37e6263fc6
def round_duration(duration, resolution): <NEW_LINE> <INDENT> resolution_duration = duration_or_delta(resolution) <NEW_LINE> seconds = round(divide_durations(duration, resolution_duration)) * resolution_duration.total_seconds() <NEW_LINE> duration = datetime.timedelta(seconds=seconds) <NEW_LINE> return isodate.duration_isoformat(duration)
Round a timedelta to the units specified by resolution.
625941b5a4f1c619b28afe29
def get_x(self, y): <NEW_LINE> <INDENT> if self.is_vertical : <NEW_LINE> <INDENT> return self._x <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> if self.is_horizontal: <NEW_LINE> <INDENT> raise ArithmeticError <NEW_LINE> <DEDENT> return (float(y) - self._y0) / self.slope
get x by y, raises ArithmeticError for horizontal lines
625941b599fddb7c1c9de179
def request_ceph_pool(self, event): <NEW_LINE> <INDENT> logging.info("Requesting replicated pool") <NEW_LINE> self.ceph_client.create_replicated_pool( self.model.config["pool-name"]) <NEW_LINE> logging.info("Requesting permissions") <NEW_LINE> self.ceph_client.request_ceph_permissions( self.CEPH_CLIENT_NAME, self.CEPH_CAPABILITIES) <NEW_LINE> self.ceph_client.request_osd_settings({ "osd heartbeat grace": 20, "osd heartbeat interval": 5})
Request pool from ceph cluster. :param event: Event :type event: Operator framework event object :returns: This method is called for its side effects :rtype: None
625941b54c3428357757c112
def calc_hij_arq(r_da, q_dot_arq): <NEW_LINE> <INDENT> term_a1 = np.exp(-gamma_arq * q_dot_arq) <NEW_LINE> term_a2 = (C_arq * np.exp(-alpha_arq * (r_da - a_DA_arq) ** 2) + (1 - C_arq) * np.exp(-beta_arq * np.square(r_da - b_DA_arq))) <NEW_LINE> term_a3 = 1 + np.tanh(eps_arq * (r_da - c_DA_arq)) <NEW_LINE> return V_ij_arq * term_a1 * term_a2 * term_a3
Calculates h_ij per Maupin et al. 2006, http://pubs.acs.org/doi/pdf/10.1021/jp053596r, equations 4-6 @param r_da: o_ostar_dist @param q_dot_arq: the square of the three-body term @return: the off-diagonal term
625941b5b7558d58953c4d02
def get_path(self, i, j): <NEW_LINE> <INDENT> intermediate = self.next[i][j] <NEW_LINE> if intermediate is None: <NEW_LINE> <INDENT> assert self.G[i][j] > 0 <NEW_LINE> return [] <NEW_LINE> <DEDENT> return self.get_path(i, intermediate) + [intermediate] + self.get_path(intermediate, j)
returns a list containing the shortest path from node i to node j, exclusive of i & j. example: get_path(1,3) -> 2. where the original shortest path is 1 -> 2 -> 3. returned values are meant to reside in between in an original non-adjacent tour.
625941b5a934411ee3751482
def ignore_import_error(_unused): <NEW_LINE> <INDENT> if not issubclass(sys.exc_info()[0], ImportError): <NEW_LINE> <INDENT> raise
Ignore ImportError during collection. Some modules raise import errors for various reasons, and should be just treated as missing.
625941b51f5feb6acb0c493c
def _build_hlh_matrix(self, index_k, diff_i, diff_j, diff_k): <NEW_LINE> <INDENT> index0 = self.basis_index[0] <NEW_LINE> index1 = self.basis_index[1] <NEW_LINE> func_00 = self.func_hlh(index0, index0, index_k, diff_i, diff_j, diff_k) <NEW_LINE> func_01 = self.func_hlh(index0, index1, index_k, diff_i, diff_j, diff_k) <NEW_LINE> func_10 = self.func_hlh(index1, index0, index_k, diff_i, diff_j, diff_k) <NEW_LINE> func_11 = self.func_hlh(index1, index1, index_k, diff_i, diff_j, diff_k) <NEW_LINE> point_00 = func_00.integral(self.lower_bound, self.upper_bound) <NEW_LINE> point_01 = func_01.integral(self.lower_bound, self.upper_bound) <NEW_LINE> point_10 = func_10.integral(self.lower_bound, self.upper_bound) <NEW_LINE> point_11 = func_11.integral(self.lower_bound, self.upper_bound) <NEW_LINE> return np.array([[point_00, point_01], [point_10, point_11]])
Builds the HLH matrix for a fixed k HLH = [[Lt(0, 0), Lt(0, 1)], [Lt(1, 0), Lt(1, 1)]] Where, Lt(i, j) = H(i, diff_i) * L(j, diff_j) * H(index_k, diff_k) Args: index_k := index on third basis function diff_i := diff on first basis diff_j := diff on second basis diff_k := diff on third basis Returns: matrix HLH
625941b538b623060ff0abd5
def resize_image(self, im): <NEW_LINE> <INDENT> cropped_im = self.crop_image(im) <NEW_LINE> img = resizer.resize_full(cropped_im) <NEW_LINE> mdf = img <NEW_LINE> refiner.color_refinement(mdf) <NEW_LINE> return mdf
Resizes image for recognition Args: im - binary image Returns: mdf - resized image
625941b5f8510a7c17cf94eb
def test__get_tunable_condition_no_match(self): <NEW_LINE> <INDENT> init_params = { 'a_condition': 'not_a_match' } <NEW_LINE> hyperparameters = { 'tunable': { 'this_is_not_conditional': { 'type': 'int', 'default': 1, 'range': [1, 10] }, 'this_is_conditional': { 'type': 'conditional', 'condition': 'a_condition', 'default': { 'type': 'float', 'default': 0.1, 'values': [0, 1] }, 'values': { 'also_not_a_match': { 'type': 'str', 'default': 'a', 'values': ['a', 'b'] }, 'neither_a_match': { 'type': 'int', 'default': 0, 'range': [1, 10] } } } } } <NEW_LINE> tunable = MLBlock._get_tunable(hyperparameters, init_params) <NEW_LINE> expected = { 'this_is_not_conditional': { 'type': 'int', 'default': 1, 'range': [1, 10] }, 'this_is_conditional': { 'type': 'float', 'default': 0.1, 'values': [0, 1] } } <NEW_LINE> assert tunable == expected
If there is a conditional and it does not match, the default is used.
625941b5009cb60464c631a4
def leve_marq_operator(a, b, x, y, k, l): <NEW_LINE> <INDENT> ktk=np.dot(k.T, k) <NEW_LINE> lvma=l*np.diag(ktk.diagonal()) <NEW_LINE> ki = np.dot(inverse(ktk + lvma), k.T) <NEW_LINE> incr_x = np.dot(ki, y) <NEW_LINE> cnx = clipper(a, b, x - incr_x) <NEW_LINE> return cnx, incr_x, None, None
:param a: lower limit of x np.array with 1 dimension :param b: upper limit of x np.array with same length as a :param x: state vector :param y: fnc(x) :param k: dfnc(x) :param l: lambda damping of levenberg dumping :return: cnx (clipped) root of fnc for the linear case, last increment of x
625941b5f7d966606f6a9def
def make_let_frame(bindings, env): <NEW_LINE> <INDENT> if not scheme_listp(bindings): <NEW_LINE> <INDENT> raise SchemeError("bad bindings list in let form") <NEW_LINE> <DEDENT> "*** REPLACE THIS LINE ***" <NEW_LINE> formals = nil <NEW_LINE> vals = nil <NEW_LINE> while bindings: <NEW_LINE> <INDENT> elem = bindings.first <NEW_LINE> check_form(elem, 2, 2) <NEW_LINE> if elem.first is elem.second.first: <NEW_LINE> <INDENT> raise SchemeError("Wrong Assignment.") <NEW_LINE> <DEDENT> formals = Pair(elem.first, formals) <NEW_LINE> vals = Pair(scheme_eval(elem.second.first, env), vals) <NEW_LINE> bindings = bindings.second <NEW_LINE> <DEDENT> return env.make_child_frame(formals, vals)
Create a frame containing bindings from a let expression.
625941b58a43f66fc4b53e50
@BLUEPRINT.route('/sru', methods=['GET']) <NEW_LINE> def sru(): <NEW_LINE> <INDENT> request.environ['PYCSW_IS_SRU'] = True <NEW_LINE> status, headers, content = application_dispatcher(request.environ) <NEW_LINE> return get_response((headers, status, content))
OpenSearch endpoint :returns: HTTP response
625941b567a9b606de4a7ca3
def to_xml_from_string(string): <NEW_LINE> <INDENT> stream = tika.ByteArrayInputStream(tika.JArray_byte(string)) <NEW_LINE> return __handler(stream)
Parse content from a string and return back XHTML representation
625941b5046cf37aa974cb31
def __init__(self, gripper, versioned=False): <NEW_LINE> <INDENT> self.name = gripper + '_gripper' <NEW_LINE> self._cmd_sender = rospy.get_name() + '_%s' <NEW_LINE> self._cmd_sequence = 0 <NEW_LINE> ns = 'robot/end_effector/' + self.name + "/" <NEW_LINE> self._state = None <NEW_LINE> self._prop = EndEffectorProperties(id=-1) <NEW_LINE> self.on_type_changed = baxter_dataflow.Signal() <NEW_LINE> self.on_gripping_changed = baxter_dataflow.Signal() <NEW_LINE> self.on_moving_changed = baxter_dataflow.Signal() <NEW_LINE> self._parameters = dict() <NEW_LINE> self._cmd_pub = rospy.Publisher(ns + 'command', EndEffectorCommand) <NEW_LINE> self._prop_pub = rospy.Publisher(ns + 'rsdk/set_properties', EndEffectorProperties, latch=True ) <NEW_LINE> self._state_pub = rospy.Publisher(ns + 'rsdk/set_state', EndEffectorState, latch=True ) <NEW_LINE> self._state_sub = rospy.Subscriber(ns + 'state', EndEffectorState, self._on_gripper_state ) <NEW_LINE> self._prop_sub = rospy.Subscriber(ns + 'properties', EndEffectorProperties, self._on_gripper_prop ) <NEW_LINE> baxter_dataflow.wait_for( lambda: not self._state is None, timeout=5.0, timeout_msg=("Failed to get state from %s" % (ns + 'state',)) ) <NEW_LINE> baxter_dataflow.wait_for( lambda: not self.type() is None, timeout=5.0, timeout_msg=("Failed to get properties from %s" % (ns + 'properties',)) ) <NEW_LINE> if versioned and self.type() == 'electric': <NEW_LINE> <INDENT> if not self.version_check(): <NEW_LINE> <INDENT> sys.exit(1) <NEW_LINE> <DEDENT> <DEDENT> self.set_parameters(defaults=True)
Version-checking capable constructor. @type gripper: str @param gripper: robot limb <left/right> on which the gripper is mounted @type versioned: bool @param versioned: True if Gripper firmware version should be checked on initialization. [False] The gripper firmware versions are checked against the version compatibility list in L{baxter_interface.VERSIONS_SDK2GRIPPER}. The compatibility list is updated in each SDK release. By default, this interface class will not check versions, but all examples using Grippers in baxter_examples pass a True and will check. This behavior can be overridden by setting L{baxter_interface.CHECK_VERSION} to False.
625941b5b57a9660fec33665
def _update_paths(self, change): <NEW_LINE> <INDENT> if change['type'] == 'update': <NEW_LINE> <INDENT> name = change['name'] <NEW_LINE> new = change['value'] <NEW_LINE> old = change.get('oldvalue', None) <NEW_LINE> if self.has_root: <NEW_LINE> <INDENT> if name == 'task_name': <NEW_LINE> <INDENT> self.task_database.rename_node(self.task_path, new, old) <NEW_LINE> for name in tagged_members(self, 'child'): <NEW_LINE> <INDENT> child = getattr(self, name) <NEW_LINE> if child: <NEW_LINE> <INDENT> if isinstance(child, list): <NEW_LINE> <INDENT> for aux in child: <NEW_LINE> <INDENT> aux.task_path = self.task_path + '/' + new <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> child.task_path = self.task_path + '/' + new <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> elif name == 'task_path': <NEW_LINE> <INDENT> for name in tagged_members(self, 'child'): <NEW_LINE> <INDENT> child = getattr(self, name) <NEW_LINE> if child: <NEW_LINE> <INDENT> if isinstance(child, list): <NEW_LINE> <INDENT> for aux in child: <NEW_LINE> <INDENT> aux.task_path = new + '/' + self.task_name <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> child.task_path = new + '/' + self.task_name <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> elif name == 'task_depth': <NEW_LINE> <INDENT> for name in tagged_members(self, 'child'): <NEW_LINE> <INDENT> child = getattr(self, name) <NEW_LINE> if child: <NEW_LINE> <INDENT> if isinstance(child, list): <NEW_LINE> <INDENT> for aux in child: <NEW_LINE> <INDENT> aux.task_depth = new + 1 <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> child.task_depth = new + 1
Method taking care that the path of children, the database and the task name remains coherent
625941b592d797404e303f71
def downloaded(name, categories=None, skips=None, retries=10): <NEW_LINE> <INDENT> ret = {'name': name, 'result': True, 'changes': {}, 'comment': ''} <NEW_LINE> deprecation_msg = 'The \'win_update\' module is deprecated, and will be ' 'removed in Salt Fluorine. Please use the \'win_wua\' ' 'module instead.' <NEW_LINE> salt.utils.versions.warn_until('Fluorine', deprecation_msg) <NEW_LINE> ret.setdefault('warnings', []).append(deprecation_msg) <NEW_LINE> if not categories: <NEW_LINE> <INDENT> categories = [name] <NEW_LINE> <DEDENT> log.debug('categories to search for are: {0}'.format(categories)) <NEW_LINE> win_updater = PyWinUpdater() <NEW_LINE> win_updater.SetCategories(categories) <NEW_LINE> win_updater.SetSkips(skips) <NEW_LINE> comment, passed, retries = _search(win_updater, retries) <NEW_LINE> ret['comment'] += comment <NEW_LINE> if not passed: <NEW_LINE> <INDENT> ret['result'] = False <NEW_LINE> return ret <NEW_LINE> <DEDENT> comment, passed, retries = _download(win_updater, retries) <NEW_LINE> ret['comment'] += comment <NEW_LINE> if not passed: <NEW_LINE> <INDENT> ret['result'] = False <NEW_LINE> return ret <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> ret['changes'] = win_updater.GetDownloadResults() <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> ret['comment'] += 'could not get results, but updates were downloaded.' <NEW_LINE> <DEDENT> return ret
Cache updates for later install. name: if categories is left empty, it will be assumed that you are passing the category option through the name. These are separate because you can only have one name, but can have multiple categories. categories: the list of categories to be downloaded. These are simply strings in the update's information, so there is no enumeration of the categories available. Some known categories: .. code-block:: text Updates Windows 7 Critical Updates Security Updates Update Rollups skips: a list of features of the updates to cull by. Available features: .. code-block:: text 'UI' - User interaction required, skipped by default 'downloaded' - Already downloaded, skipped by default (downloading) 'present' - Present on computer, included by default (installing) 'installed' - Already installed, skipped by default 'reboot' - Reboot required, included by default 'hidden' - skip those updates that have been hidden. 'software' - Software updates, included by default 'driver' - driver updates, skipped by default retries Number of retries to make before giving up. This is total, not per step.
625941b51b99ca400220a897
def write(self): <NEW_LINE> <INDENT> with open(self.path, 'w') as sfile: <NEW_LINE> <INDENT> self.config.write(sfile) <NEW_LINE> <DEDENT> LOGGER.info("Settings written to %s", self.path)
Writes the current configuration to the configured file.
625941b57b25080760e39242
@contextmanager <NEW_LINE> def timeout(time): <NEW_LINE> <INDENT> signal.signal(signal.SIGALRM, raise_timeout) <NEW_LINE> signal.alarm(time) <NEW_LINE> try: <NEW_LINE> <INDENT> yield <NEW_LINE> <DEDENT> except TimeoutError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> signal.signal(signal.SIGALRM, signal.SIG_IGN)
Raises a TimeoutError after a duration specified in seconds.
625941b566656f66f7cbbf90
def test_get_shape(): <NEW_LINE> <INDENT> s = get_shape(np.ones((5, 5))) <NEW_LINE> assert(s == (5, 5)) <NEW_LINE> s = get_shape([[1, 2]]) <NEW_LINE> assert(s == (1, 2)) <NEW_LINE> s = get_shape([1, 2, 3]) <NEW_LINE> assert(s == (3,))
Test computation of the shape of nested list.
625941b54f6381625f11482d
def test_zones_patch_403_forbidden_nameservers(self): <NEW_LINE> <INDENT> self.assert_post(self.basepath, self.post_data_two) <NEW_LINE> self.assert_patch_and_403(self.basepath + self.post_data_two['name'], {'nameservers': self.host_three.name})
Trying to patch the nameservers directly is not allowed.
625941b516aa5153ce36225f
def forward(self, input_all): <NEW_LINE> <INDENT> cat_input = torch.stack(input_all) <NEW_LINE> s = cat_input.shape <NEW_LINE> cat_input = cat_input.reshape( s[1], s[0] * self.num_subspaces, int(s[2] / self.num_subspaces) ) <NEW_LINE> res = self.matrix @ cat_input <NEW_LINE> res = res.reshape(len(input_all), -1, input_all[0].shape[1]) <NEW_LINE> res = [res[i] for i in range(len(input_all))] <NEW_LINE> return res
:param input_all: Expects list with n_task entries. Each element is of size batch_size X hidden_dimension :return:
625941b57c178a314d6ef23e
def __repr__(self): <NEW_LINE> <INDENT> num_players = len(self.players) <NEW_LINE> out = num_players * " ───── " <NEW_LINE> out = out + "\n" <NEW_LINE> max_orders = max( [len(player.history) for player in self.players.values()]) <NEW_LINE> for i in range(max_orders + 2): <NEW_LINE> <INDENT> for player_id, player in self.players.items(): <NEW_LINE> <INDENT> if i == 0: <NEW_LINE> <INDENT> out = out + " {} ".format(player_id) <NEW_LINE> <DEDENT> elif i == 1: <NEW_LINE> <INDENT> out = out + " ───── " <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if (i - 2) < len(player.history): <NEW_LINE> <INDENT> trade = player.history[i - 2] <NEW_LINE> trade_value = trade.pot_value * trade.num_shares * (-1) <NEW_LINE> trade_value_str = str(trade_value) <NEW_LINE> if trade_value > 0: <NEW_LINE> <INDENT> trade_value_str = "+" + trade_value_str <NEW_LINE> <DEDENT> out = out + " {} ".format(trade_value_str) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> out = out + " " <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> out = out + "\n" <NEW_LINE> <DEDENT> return out
Show all players and their trades in a table
625941b5462c4b4f79d1d4b7
def test_bfd_profile_values(): <NEW_LINE> <INDENT> tgen = get_topogen() <NEW_LINE> if tgen.routers_have_failure(): <NEW_LINE> <INDENT> pytest.skip(tgen.errors) <NEW_LINE> <DEDENT> logger.info("waiting for bfd peers to go up and checking profile values") <NEW_LINE> for router in tgen.routers().values(): <NEW_LINE> <INDENT> json_file = "{}/{}/bfd-peers-initial.json".format(CWD, router.name) <NEW_LINE> expected = json.loads(open(json_file).read()) <NEW_LINE> test_func = partial( topotest.router_json_cmp, router, "show bfd peers json", expected ) <NEW_LINE> _, result = topotest.run_and_expect(test_func, None, count=12, wait=0.5) <NEW_LINE> assertmsg = '"{}" JSON output mismatches'.format(router.name) <NEW_LINE> assert result is None, assertmsg
Assert that the BFD peers can find themselves.
625941b5b7558d58953c4d03
def longestUnivaluePath(self, root): <NEW_LINE> <INDENT> self.res = 0 <NEW_LINE> def helper(root): <NEW_LINE> <INDENT> if not root: return 0 <NEW_LINE> l = helper(root.left) if root.left else -1 <NEW_LINE> r = helper(root.right) if root.right else -1 <NEW_LINE> lp = l + 1 if root.left and root.left.val == root.val else 0 <NEW_LINE> rp = r + 1 if root.right and root.right.val == root.val else 0 <NEW_LINE> self.res = max(self.res, lp + rp) <NEW_LINE> return max(lp, rp) <NEW_LINE> <DEDENT> helper(root) <NEW_LINE> return self.res
:type root: TreeNode :rtype: int
625941b5b57a9660fec33666
def __eq__(self, rhs): <NEW_LINE> <INDENT> if sys.version_info <= (3, 0): <NEW_LINE> <INDENT> def test(value): <NEW_LINE> <INDENT> with catch_warning(UnicodeWarning): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return value == rhs <NEW_LINE> <DEDENT> except UnicodeWarning: <NEW_LINE> <INDENT> if isinstance(value, str): <NEW_LINE> <INDENT> return value.decode('utf-8') == rhs <NEW_LINE> <DEDENT> elif isinstance(rhs, str): <NEW_LINE> <INDENT> return value == rhs.decode('utf-8') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> def test(value): <NEW_LINE> <INDENT> return value == rhs <NEW_LINE> <DEDENT> <DEDENT> return self._generate_test( lambda value: test(value), ('==', self._path, freeze(rhs)) )
Test a dict value for equality. >>> Query().f1 == 42 :param rhs: The value to compare against
625941b5ab23a570cc24ff66
def test_zip_import(self): <NEW_LINE> <INDENT> testfile = get_module_resource( "account_statement_import_camt", "test_files", "test-camt053.zip" ) <NEW_LINE> with open(testfile, "rb") as datafile: <NEW_LINE> <INDENT> camt_file = base64.b64encode(datafile.read()) <NEW_LINE> self.env["account.statement.import"].create( {"statement_filename": "test import", "statement_file": camt_file} ).import_file_button() <NEW_LINE> bank_st_record = self.env["account.bank.statement"].search( [("name", "in", ["1234Test/2", "1234Test/3"])] ) <NEW_LINE> <DEDENT> self.assertTrue(all([st.line_ids for st in bank_st_record])) <NEW_LINE> self.assertEqual(bank_st_record[0].line_ids.mapped("sequence"), [1, 2, 3])
Test import of multiple statements from zip file.
625941b596565a6dacc8f4bc
def gauss_2D(xy, amplitude, xo, yo, sigma_x, sigma_y, theta, offset): <NEW_LINE> <INDENT> x=xy[0] <NEW_LINE> y=xy[1] <NEW_LINE> xo = float(xo) <NEW_LINE> yo = float(yo) <NEW_LINE> a = (np.cos(theta)**2)/(2*sigma_x**2) + (np.sin(theta)**2)/(2*sigma_y**2) <NEW_LINE> b = -(np.sin(2*theta))/(4*sigma_x**2) + (np.sin(2*theta))/(4*sigma_y**2) <NEW_LINE> c = (np.sin(theta)**2)/(2*sigma_x**2) + (np.cos(theta)**2)/(2*sigma_y**2) <NEW_LINE> g = offset + amplitude*np.exp( - (a*((x-xo)**2) + 2*b*(x-xo)*(y-yo) + c*((y-yo)**2))) <NEW_LINE> return g.ravel()
Model function of a 2D Gaussian profile.
625941b51d351010ab85590c
def has_dimension(self): <NEW_LINE> <INDENT> return self.width and self.height
Renvoyer si les dimensions de l'objet sont valides
625941b52ae34c7f2600cf18
def test_noName(self) -> None: <NEW_LINE> <INDENT> mem = MemorySessionStore() <NEW_LINE> session = self.successResultOf( mem.newSession(True, SessionMechanism.Header) ) <NEW_LINE> to = TestObject(mem) <NEW_LINE> stub = StubTreq(to.router.resource()) <NEW_LINE> response = self.successResultOf( stub.post( "https://localhost/dangling-param", data=dict(), headers={b"X-Test-Session": session.identifier}, ) ) <NEW_LINE> self.assertEqual(response.code, 500) <NEW_LINE> errors = self.flushLoggedErrors(ValueError) <NEW_LINE> self.assertEqual(len(errors), 1) <NEW_LINE> self.assertIn( str(errors[0].value), "Cannot extract unnamed form field." )
A handler for a Form with a Field that doesn't have a name will return an error explaining the problem.
625941b5e1aae11d1e749a9a
def _get_altitude(self): <NEW_LINE> <INDENT> altitude = None <NEW_LINE> for i, line in enumerate(self.file_content): <NEW_LINE> <INDENT> if i == self._header_length: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> line = ''.join(line).lower() <NEW_LINE> if re.search(r"[aA]ltitude.*", line): <NEW_LINE> <INDENT> regex = r"\d*\.\d+|\d+" <NEW_LINE> try: <NEW_LINE> <INDENT> altitude = float(re.findall(regex, line)[0]) <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> next_line = ''.join(self.file_content[i + 1]) <NEW_LINE> altitude = float(re.findall(regex, next_line)[0]) <NEW_LINE> <DEDENT> break <NEW_LINE> <DEDENT> <DEDENT> return altitude
Return the altitude value scraped from the header of the file.
625941b58a349b6b435e7f5b
def isValidBST(self, root): <NEW_LINE> <INDENT> if root == None: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return self.recursiveJudge(root.left,-2.**32,root.val) and self.recursiveJudge(root.right,root.val,2.**32)
:type root: TreeNode :rtype: bool
625941b5dc8b845886cb531b
def setUp(self): <NEW_LINE> <INDENT> self.match_history = MatchHistory(data.raw_match_history) <NEW_LINE> self.trend = SummonerTrends(self.match_history, 363043) <NEW_LINE> self.assertEquals(10, len(self.match_history.matches))
Init test
625941b52c8b7c6e89b355ab
def enter_username(self, email) : <NEW_LINE> <INDENT> self.selib.input_text(self.locator.email, email)
Type the given text into the username field
625941b566656f66f7cbbf91
def test_calc_vn_00(self): <NEW_LINE> <INDENT> n = 1 <NEW_LINE> fs = make_simple_class_object() <NEW_LINE> v1 = fs._calc_vn(n) <NEW_LINE> np.testing.assert_almost_equal(v1, -4.75)
test that calc_vn gives correct values
625941b550812a4eaa59c10d
def _remove_git_file_if_invalid(git_file_path): <NEW_LINE> <INDENT> with open(git_file_path, 'rb') as fp: <NEW_LINE> <INDENT> contents = fp.read(10000).decode('utf-8') <NEW_LINE> <DEDENT> if contents.startswith('gitdir: '): <NEW_LINE> <INDENT> gitdir = contents.split(' ', 1)[1].rstrip() <NEW_LINE> if not os.path.isabs(gitdir): <NEW_LINE> <INDENT> gitdir = os.path.join(os.path.dirname(git_file_path), gitdir) <NEW_LINE> <DEDENT> if not os.path.exists(gitdir): <NEW_LINE> <INDENT> os.remove(git_file_path)
Remove invalid .git file. This is needed, because .git file that points to non-existing directory will cause problems with build_resources, since some git commands are executed there and invalid .git file will cause them to fail with error like fatal: Not a git repository: ../.git/modules/shuup Invalid .git files are created when "pip install" copies shuup from a Git submodule to temporary build directory, for example.
625941b5de87d2750b85fb75
def __isub__(self, p): <NEW_LINE> <INDENT> self.x -= p.x <NEW_LINE> self.y -= p.y <NEW_LINE> self.z -= p.z <NEW_LINE> return self
soustraction de deux points
625941b566673b3332b91e7e
def logToFile(self, file, heading=LOG_HEADING_DEFAULT, precision=DEFAULT_PRECISION, dialect=DEFAULT_CSV_DIALECT): <NEW_LINE> <INDENT> heading = bool(heading) <NEW_LINE> if precision <= MINIMUM_PRECISION: <NEW_LINE> <INDENT> precision = MINIMUM_PRECISION <NEW_LINE> <DEDENT> elif precision >= MAXIMUM_PRECISION: <NEW_LINE> <INDENT> precision = MAXIMUM_PRECISION <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> precision = round(precision) <NEW_LINE> <DEDENT> if not file: <NEW_LINE> <INDENT> return self.setLogger("StreamLogger", stdout, heading, dialect, precision) <NEW_LINE> <DEDENT> elif hasattr(file, "write"): <NEW_LINE> <INDENT> return self.setLogger("StreamLogger", file, heading, dialect, precision) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.setLogger("FileLogger", str(file), heading, dialect, precision)
Arranges for this population's log to be written to a file. If *file* is ``None``, or another false value, the standard output is used. If *file* is a file object, or any other object implmenting the **write()** method, it is used as is. Otherwise is is converted to a string and used as a filename, which is opened for writing; the contents of any existing file of that name are lost. The :meth:`close` method will only close a file that has been opened by this population. If an already open file, or ``None``, was pass as the *file* argument this population will never attempt to close it. The file is typically written in Commad Separated Values (CSV) format. The format can be change by providing a different *dialect*, a string. See the documentation of Python's `csv module <https://docs.python.org/3.4/library/csv.html>`_ for details of what dialects are available. If *heading* is not false when first writing to this file a header row is written. Floating point numbers are rounded to the given *precision*, which defaults to four digits after the decimal point.
625941b54e4d5625662d41c5
def copy_upgrade_switch(self, sw, image=None): <NEW_LINE> <INDENT> t = test.Test() <NEW_LINE> switch = t.switch(sw) <NEW_LINE> helpers.log("Switch argument {} handle {}".format(sw, switch.ip())) <NEW_LINE> copy_url = "http://10.6.1.1/export/switchlight/autobuilds/master/latest.switchlight-BT-4.5.0-2cad4a5-powerpc-release.swi" <NEW_LINE> switch.cli("enable") <NEW_LINE> sw_basic_setup = ["boot netdev ma1", "boot netip {}", "boot netmask {}", "boot netgw {}", "boot netdns {}", "boot netdomain qa.bigswitch.com"] <NEW_LINE> switch.config("boot netdev ma1") <NEW_LINE> switch.config("boot netip {}".format(switch.ip())) <NEW_LINE> switch.config("boot netmask 255.255.192.0") <NEW_LINE> switch.config("boot netgw 10.8.0.1") <NEW_LINE> switch.config("boot netdns 10.3.0.4") <NEW_LINE> switch.config("boot netdomain qa.bigswitch.com") <NEW_LINE> switch.config("boot image {}".format(copy_url)) <NEW_LINE> switch.config("copy running-config startup-config")
Copy the image to the switch and upgrade
625941b5f548e778e58cd363
def __init__(self, code='rb2005', frequence='1min', strategy_id='QA_STRATEGY', risk_check_gap=1, portfolio='default', start='2020-01-01', end='2020-05-21', init_cash=1000000, send_wx=False, data_host=eventmq_ip, data_port=eventmq_port, data_user=eventmq_username, data_password=eventmq_password, trade_host=eventmq_ip, trade_port=eventmq_port, trade_user=eventmq_username, trade_password=eventmq_password, taskid=None, mongo_ip=mongo_ip, model='py'): <NEW_LINE> <INDENT> self.username = 'admin' <NEW_LINE> self.password = 'admin' <NEW_LINE> self.trade_host = trade_host <NEW_LINE> self.code = code <NEW_LINE> self.frequence = frequence <NEW_LINE> self.strategy_id = strategy_id <NEW_LINE> self.portfolio = portfolio <NEW_LINE> self.data_host = data_host <NEW_LINE> self.data_port = data_port <NEW_LINE> self.data_user = data_user <NEW_LINE> self.data_password = data_password <NEW_LINE> self.trade_host = trade_host <NEW_LINE> self.trade_port = trade_port <NEW_LINE> self.trade_user = trade_user <NEW_LINE> self.trade_password = trade_password <NEW_LINE> self.start = start <NEW_LINE> self.end = end <NEW_LINE> self.init_cash = init_cash <NEW_LINE> self.taskid = taskid <NEW_LINE> self.running_time = '' <NEW_LINE> self.market_preset = QA.QAARP.MARKET_PRESET() <NEW_LINE> self._market_data = [] <NEW_LINE> self.risk_check_gap = risk_check_gap <NEW_LINE> self.latest_price = {} <NEW_LINE> self.isupdate = False <NEW_LINE> self.model = model <NEW_LINE> self.new_data = {} <NEW_LINE> self._systemvar = {} <NEW_LINE> self._signal = [] <NEW_LINE> self.send_wx = send_wx <NEW_LINE> if isinstance(self.code, str): <NEW_LINE> <INDENT> self.last_order_towards = {self.code: {'BUY': '', 'SELL': ''}} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.last_order_towards = dict( zip(self.code, [{'BUY': '', 'SELL': ''} for i in range(len(self.code))])) <NEW_LINE> <DEDENT> self.dt = '' <NEW_LINE> if isinstance(self.code, str): <NEW_LINE> <INDENT> self.market_type = MARKET_TYPE.FUTURE_CN if re.search( r'[a-zA-z]+', self.code) else MARKET_TYPE.STOCK_CN <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.market_type = MARKET_TYPE.FUTURE_CN if re.search( r'[a-zA-z]+', self.code[0]) else MARKET_TYPE.STOCK_CN <NEW_LINE> <DEDENT> self.bar_order = {'BUY_OPEN': 0, 'SELL_OPEN': 0, 'BUY_CLOSE': 0, 'SELL_CLOSE': 0} <NEW_LINE> self._num_cached = 120 <NEW_LINE> self._cached_data = [] <NEW_LINE> self.user_init()
code 可以传入单个标的 也可以传入一组标的(list) 会自动基于code来判断是什么市场 TODO: 支持多个市场同时存在 self.trade_host 交易所在的eventmq的ip [挂ORDER_ROUTER的] /
625941b560cbc95b062c6330
def fake_expanduser(self, path): <NEW_LINE> <INDENT> return path if path in self.fake_files else self.real_expanduser(path)
return itself if it's match fixture name
625941b550485f2cf553cb80
def check_new_value(self, value): <NEW_LINE> <INDENT> if self._function(self._old_best_value, value): <NEW_LINE> <INDENT> self._iterations = -1 <NEW_LINE> self._old_best_value = value
Function to be called after every improvement of the evaluation function. It's also possible to call this function every time when the evaluation value is calculated without ill effects. Parameters ---------- value : int or float Is the best evaluation value found for a solution or the new evaluation value of a solution. It does not matter which one is used.
625941b5dd821e528d63af93
def test_anonymous_user_can_not_update_via_update_view(self): <NEW_LINE> <INDENT> r = self.client.post('/events/1/update/', { 'pub_state': 'public', 'title': '変更後のイベントです', 'body': 'うえーい', 'attendees': [self.user.pk,], 'period_start': datetime.datetime.now()+datetime.timedelta(hours=1), 'period_end': datetime.datetime.now()+datetime.timedelta(hours=3), }) <NEW_LINE> self.assertRedirects(r, settings.LOGIN_URL + '?next=/events/1/update/') <NEW_LINE> self.assertEqual(self.event.title, '変更前のイベントです')
Tests anonymous user can not update event via EventUpdateView It will redirect to LOGIN_URL
625941b5d10714528d5ffac6
def create_intacct_code_mapping_template(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.intacct_codes = self._load_intacct_code_mapping() <NEW_LINE> print( f"Found an existing {INTACCT_CODE_MAPPING_FILENAME} file! Either use it or delete it and run this method again." ) <NEW_LINE> <DEDENT> except FileNotFoundError: <NEW_LINE> <INDENT> self._build_intacct_code_map_template()
Create a code mapping template from your Toggl entries.
625941b50a50d4780f666c76
def __init__(self, segment, square, label=None): <NEW_LINE> <INDENT> self._segment = segment <NEW_LINE> self._square = square <NEW_LINE> self._label = label
Instantiates a new Axis element
625941b515baa723493c3d59
def get_prefilt(trace, tlow=(0.5, 0.9), thi=(5., 2.), verbosity=0): <NEW_LINE> <INDENT> if verbosity: <NEW_LINE> <INDENT> print("Calculating pre-filter values for %s, %s ..." % ( trace.stats.station, trace.stats.channel)) <NEW_LINE> <DEDENT> fny = trace.stats.sampling_rate / 2 <NEW_LINE> fc21 = fny - (fny * thi[0] / 100.) <NEW_LINE> fc22 = fny - (fny * thi[1] / 100.) <NEW_LINE> return (tlow[0], tlow[1], fc21, fc22)
takes a `obspy.core.stream.Trace` object, taper parameters tlow and thi and returns the pre-filtering corner frequencies for the cosine taper for further processing :param trace: seismic data trace :type trace: `obspy.core.stream.Trace` :param tlow: tuple or list containing the desired lower corner frequenices for a cosine taper :type tlow: tuple or list :param thi: tuple or list containing the percentage values of the Nyquist frequency for the desired upper corner frequencies of the cosine taper :type thi: tuple or list :param verbosity: verbosity level :type verbosity: int :return: pre-filt cosine taper corner frequencies :rtype: tuple ..example:: >>> st = read() >>> get_prefilt(st[0]) (0.5, 0.9, 47.5, 49.0)
625941b52eb69b55b151c692
def isValid(self, s): <NEW_LINE> <INDENT> stack = [] <NEW_LINE> paren_map = { ')': '(', ']': '[', '}': '{' } <NEW_LINE> print(paren_map) <NEW_LINE> for c in s: <NEW_LINE> <INDENT> if c not in paren_map: <NEW_LINE> <INDENT> stack.append(c) <NEW_LINE> <DEDENT> elif not stack or paren_map[c] != stack.pop(): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return not stack
:type s: str :rtype: bool
625941b5c432627299f04a2c