code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def click_login(self): <NEW_LINE> <INDENT> self.q(css=".login-btn").click() <NEW_LINE> SignInPage(self.browser).wait_for_page()
Click on login button and go to the next page :return:
625941b4ab23a570cc24ff5a
def RTS(self, opcode = 0x60): <NEW_LINE> <INDENT> PC = (self.stack_pop(2)) <NEW_LINE> self.set_PC(PC + 1)
return from subroutine
625941b4460517430c393f6b
def pin_node_mem(self, nodes): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self._cpusetcg.set_node_list(nodes) <NEW_LINE> <DEDENT> except IOError as err: <NEW_LINE> <INDENT> raise Error('Cgroup write failed: {}'.format(err))
Change list of memory nodes for CT. This function changes CT affinity for memory and migrates CT's memory accordingly.
625941b48a349b6b435e7f50
def setDateFacture(self, new_date_facture): <NEW_LINE> <INDENT> self.date_facture_client = new_date_facture <NEW_LINE> print("La nouvelle date de facture "+ self.date_facture_client)
methode qui permet de modifier la date de la facture du client
625941b4236d856c2ad445ba
def start_proxy(): <NEW_LINE> <INDENT> bash_proxy = "kubectl proxy &" <NEW_LINE> subprocess.call(['bash', '-c', bash_proxy]) <NEW_LINE> time.sleep(2)
Starts local proxy to Kubernetes cluster, host: 127.0.0.1:8001
625941b4925a0f43d2549c4e
def list_plugins(plugins): <NEW_LINE> <INDENT> for name, plugin in sorted(plugins.items()): <NEW_LINE> <INDENT> print("%-20s %s" % (name, plugin.oneliner()))
List given plugins.
625941b415fb5d323cde08e3
def render_video(self, openid, title, desc, mediaId, timestamp=None): <NEW_LINE> <INDENT> return _VIDEO_XML.format( openid, self.fromUserName, mediaId, _create_time(timestamp), title, desc )
回复图片消息 :param openid: 接收方帐号 :param title: :param desc: :param mediaId:通过素材管理中的接口上传多媒体文件,得到的id :param timestamp: 消息创建时间 (整型) :return: 微信格式的XML文本
625941b4cdde0d52a9e52e09
def test_remove_introns_for_irrelevant_registers2(self): <NEW_LINE> <INDENT> a = Instruction(mode = 'read-input', target = 0, op = '+', source = 4) <NEW_LINE> b = Instruction(mode = 'read-input', target = 0, op = 'if_equal_or_higher_than', source = 2) <NEW_LINE> c = Instruction(mode = 'read-register', target = 1, op = '-', source = 0) <NEW_LINE> d = Instruction(mode = 'read-input', target = 0, op = '-', source = 0) <NEW_LINE> e = Instruction(mode = 'read-input', target = 1, op = '+', source = 7) <NEW_LINE> f = Instruction(mode = 'read-register', target = 0, op = 'exp', source = 1) <NEW_LINE> g = Instruction(mode = 'read-input', target = 0, op = '*', source = 6) <NEW_LINE> instructions = [a,b,c,d,e,f,g] <NEW_LINE> instructions_without_introns = Program.remove_introns(instructions) <NEW_LINE> instructions.remove(b) <NEW_LINE> instructions.remove(c) <NEW_LINE> instructions.remove(e) <NEW_LINE> self.assertEqual(instructions, instructions_without_introns)
Ensures the algorithm removes correctly. r[0] = r[0] + i[4] if r[0] >= i[2]: # HERE r[1] = r[1] - r[0] # HERE r[0] = r[0] - i[0] r[1] = r[1] + i[7] # HERE r[0] = exp(r[0]) # r[1] shouldn't be added to relevant registers r[0] = r[0] * i[6]
625941b4d8ef3951e3243318
def SaveCal(self) -> int: <NEW_LINE> <INDENT> if self.__dirty: <NEW_LINE> <INDENT> if os.path.exists(self.__prefix + ".cal.csv"): <NEW_LINE> <INDENT> shutil.copy2(self.__prefix + ".cal.csv", self.__prefix + ".cal.bak.csv") <NEW_LINE> <DEDENT> with open(self.__prefix + ".cal.csv", "w",encoding='utf8',errors="ignore") as f: <NEW_LINE> <INDENT> i = self.WriteCalToCSV(f) <NEW_LINE> self.__dirty = False <NEW_LINE> return i
Save the Calender Data to CSV filename <prefix>.cal.csv check if __dirty check if file exists, then copy to *bak.csv
625941b4fb3f5b602dac3472
def syntax_highlight(self, src, lang, hl_lines=[], inline=False): <NEW_LINE> <INDENT> self.set_view(src, 'text' if not lang else lang) <NEW_LINE> self.inline = inline <NEW_LINE> self.hl_lines = hl_lines <NEW_LINE> self.setup() <NEW_LINE> self.html = [] <NEW_LINE> self.write_body() <NEW_LINE> return ''.join(self.html)
Syntax Highlight.
625941b4cc0a2c11143dcc74
def handshake_menuitem(self, *args, **kwargs): <NEW_LINE> <INDENT> mi = self.menuitem(*args, **kwargs) <NEW_LINE> set_sensitive(mi, False) <NEW_LINE> def enable_menuitem(*args): <NEW_LINE> <INDENT> set_sensitive(mi, True) <NEW_LINE> <DEDENT> self.client.after_handshake(enable_menuitem) <NEW_LINE> return mi
Same as menuitem() but this one will be disabled until we complete the server handshake
625941b4d99f1b3c44c6737b
def disjunct_components(adjacency_list, vertices=None): <NEW_LINE> <INDENT> if vertices is None: <NEW_LINE> <INDENT> vertices = {vertex: {} for vertex in adjacency_list} <NEW_LINE> <DEDENT> visited = [] <NEW_LINE> def recursive_depth_first_search(vertex): <NEW_LINE> <INDENT> visited.append(vertex) <NEW_LINE> component_adjacency_list[vertex] = adjacency_list[vertex] <NEW_LINE> component_vertices[vertex] = vertices[vertex] <NEW_LINE> for neighbour in adjacency_list[vertex]: <NEW_LINE> <INDENT> if neighbour not in visited: <NEW_LINE> <INDENT> recursive_depth_first_search(neighbour) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> for root in adjacency_list: <NEW_LINE> <INDENT> if root not in visited: <NEW_LINE> <INDENT> component_adjacency_list, component_vertices = {}, {} <NEW_LINE> recursive_depth_first_search(root) <NEW_LINE> yield component_adjacency_list, component_vertices
Break the graph into its disjunct components. Returns a generator object. Parameters ---------- adjacency_list : dict of dicts Adjacency list containing edge data. vertices : dict of dicts List containing vertex data. Returns ------- disjunct_component_generator : generator The generator yields disjunct components in increasing order of their smallest members.
625941b4711fe17d82542158
def rightSideView(self, root: TreeNode) -> List[int]: <NEW_LINE> <INDENT> def collect(node, depth): <NEW_LINE> <INDENT> if node: <NEW_LINE> <INDENT> if depth == len(view): <NEW_LINE> <INDENT> view.append(node.val) <NEW_LINE> <DEDENT> collect(node.right, depth+1) <NEW_LINE> collect(node.left, depth+1) <NEW_LINE> <DEDENT> <DEDENT> view = [] <NEW_LINE> collect(root, 0) <NEW_LINE> return view
:desc 输出树的右视图 :way DFS-traverse the tree right-to-left, add values to the view whenever we first reach a new record depth. This is O(n).
625941b4097d151d1a222c3f
def set_ResponseType(self, value): <NEW_LINE> <INDENT> super(ListIndustryCodesInputSet, self)._set_input('ResponseType', value)
Set the value of the ResponseType input for this Choreo. ((optional, string) Specify json or xml for the type of response to be returned. Defaults to xml.)
625941b4de87d2750b85fb69
def clone_pod(module, array): <NEW_LINE> <INDENT> changed = True <NEW_LINE> if not module.check_mode: <NEW_LINE> <INDENT> changed = False <NEW_LINE> if get_target(module, array) is None: <NEW_LINE> <INDENT> if not get_destroyed_target(module, array): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> array.clone_pod(module.params['name'], module.params['target']) <NEW_LINE> changed = True <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> module.fail_json(msg='Clone pod {0} to pod {1} failed.'.format(module.params['name'], module.params['target'])) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> module.fail_json(msg='Target pod {0} already exists but deleted.'.format(module.params['target'])) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> module.exit_json(changed=changed)
Create Pod Clone
625941b4377c676e91271f86
def build_guided_model(): <NEW_LINE> <INDENT> if "GuidedBackProp" not in ops._gradient_registry._registry: <NEW_LINE> <INDENT> @ops.RegisterGradient("GuidedBackProp") <NEW_LINE> def _GuidedBackProp(op, grad): <NEW_LINE> <INDENT> dtype = op.inputs[0].dtype <NEW_LINE> return grad * tf.cast(grad > 0., dtype) * tf.cast(op.inputs[0] > 0., dtype) <NEW_LINE> <DEDENT> <DEDENT> g = tf.compat.v1.get_default_graph() <NEW_LINE> with g.gradient_override_map({'Relu': 'GuidedBackProp'}): <NEW_LINE> <INDENT> new_model = build_model() <NEW_LINE> <DEDENT> return new_model
Function returning modified model. Changes gradient function for all ReLu activations according to Guided Backpropagation.
625941b426238365f5f0ec44
def onConnect(self, response): <NEW_LINE> <INDENT> print("Server connected: {}".format(response.peer)) <NEW_LINE> self.factory.resetDelay()
Callback for when the server is connected
625941b430bbd722463cbb9e
def is_user_allowed(self, access_token): <NEW_LINE> <INDENT> if not self.allowed_orgs: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> response = access_token.get("https://api.github.com/user/orgs") <NEW_LINE> response = response.read() <NEW_LINE> response = json.loads(response) <NEW_LINE> user_orgs = set(org["login"] for org in response) <NEW_LINE> allowed_orgs = set(self.allowed_orgs) <NEW_LINE> return bool(allowed_orgs.intersection(user_orgs))
Check if the authenticated user is allowed to access the protected application. If this :class:`GitHubService` was created with a list of allowed_orgs, the user must be a memeber of one or more of the allowed_orgs to get access. If no allowed_orgs were specified, all authenticated users will be allowed. :param access_token: a valid :class:`AccessToken` .. versionadded:: 0.1.3
625941b426068e7796caeab3
def non_winning_cards(self, played: List[Card], playable: Deck) -> Deck: <NEW_LINE> <INDENT> if not played: <NEW_LINE> <INDENT> return Deck([]) <NEW_LINE> <DEDENT> lead = played[0].suit <NEW_LINE> best_card = max(c for c in played if c.suit == lead) <NEW_LINE> return Deck([c for c in playable if c < best_card or c.suit != lead])
List playable cards that are guaranteed to not win the trick
625941b47cff6e4e81117762
def send_msg(mobile,datas): <NEW_LINE> <INDENT> sdk=SmsSDK(constants.accId,constants.accToken,constants.accIp) <NEW_LINE> tid=constants.TID <NEW_LINE> resp=sdk.sendMessage(tid,mobile,datas) <NEW_LINE> return resp
发送短信验证码
625941b450485f2cf553cb74
@task <NEW_LINE> def in_place_deploy(): <NEW_LINE> <INDENT> env.user = 'deploy' <NEW_LINE> env.project_root = os.path.realpath(os.path.join( '/home', env.user, env.production_projects_directory, env.project_name, 'live')) <NEW_LINE> __build_env_dictionary() <NEW_LINE> git_pull(env.project_root) <NEW_LINE> compile_less_css(env.project_root) <NEW_LINE> django_sync_and_migrate(env.project_root, True) <NEW_LINE> django_publish_static_content(env.project_root) <NEW_LINE> restart_services() <NEW_LINE> puts(success="In-place deployment finished")
Update an existing production deployment of the site. Code will be updated, but dependencies will not be reinstalled. @todo - make it keep track of the number of deploys/db backups so it doesn't end up creating hundreds by mistake @todo - Extend this so it will provision new servers by: - installing nginx - installing supervisord - uploading base configurations using files.upload_template - creating links in /etc/nginx/conf.d to the project config file - doing the same for supervisord
625941b4a8ecb033257d2eb1
def test_get_destinations_no_default_directory_single_destination(get_destinations_message): <NEW_LINE> <INDENT> dispatcher = _get_dispatcher(test_yaml_no_default_directory) <NEW_LINE> expected_length = 1 <NEW_LINE> expected_url = 'ftp://ftp.target1.com/input_data/viirs/NOAA-20_201909190919.tif' <NEW_LINE> expected_attrs = {'connection_uptime': 20} <NEW_LINE> expected_client = "target1" <NEW_LINE> res = dispatcher.get_destinations(get_destinations_message) <NEW_LINE> _assert_get_destinations_res(res, expected_length, expected_url, expected_attrs, expected_client)
Check getting destination urls when default directory isn't configured.
625941b485dfad0860c3ac34
def set_result_in_gds(self, task_id, execution_result): <NEW_LINE> <INDENT> if not task_id: return <NEW_LINE> return self.insert_in_key_list_redis(task_id+"_result", execution_result)
TODO: Figure out if errors need to be stored as elements as opposed to the first (and only) element of the list
625941b46e29344779a623f2
def _do_sync_2i(self, obj): <NEW_LINE> <INDENT> return obj
保存对象时同步 2i 索引. 如果使用到 2i 索引, 请在子类设置 :attr:`uses_2i` 为 :const:`True`, 并为此方法提供实现. 此方法应返回一个设置好的 :class:`RiakObject <riak.riak_object.RiakObject>` 对象. :param obj: 准备同步 2i 索引的 Riak 对象. :type obj: :class:`RiakObject <riak.riak_object.RiakObject>` :return: 设置好 2i 索引的 Riak 对象. :rtype: :class:`RiakObject <riak.riak_object.RiakObject>`
625941b4a79ad161976cbf21
def create_definition(self, block_type): <NEW_LINE> <INDENT> def_id = self._next_id() <NEW_LINE> self._definitions[def_id] = block_type <NEW_LINE> return def_id
Make a definition, storing its block type.
625941b499cbb53fe67929c3
def plant(seed=None): <NEW_LINE> <INDENT> return random_name(minlength=2, maxlength=4, seed=seed)
>>> plant(seed=1234) 'Mayu' >>> plant(seed=1) 'Wo'
625941b4090684286d50eac2
def joinSmilesListToCompositeSmiles(smiList): <NEW_LINE> <INDENT> return SMILES_MOL_DELIM.join(smiList);
Simple convenience to join together a set of smiles
625941b494891a1f4081b883
def flush_cloud_db_instance(self, flush_cloud_db_instance_request, **kwargs): <NEW_LINE> <INDENT> kwargs['_return_http_data_only'] = True <NEW_LINE> if kwargs.get('async'): <NEW_LINE> <INDENT> return self.flush_cloud_db_instance_with_http_info(flush_cloud_db_instance_request, **kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> (data) = self.flush_cloud_db_instance_with_http_info(flush_cloud_db_instance_request, **kwargs) <NEW_LINE> return data
flush_cloud_db_instance # noqa: E501 CloudDB Flush # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.flush_cloud_db_instance(flush_cloud_db_instance_request, async=True) >>> result = thread.get() :param async bool :param FlushCloudDBInstanceRequest flush_cloud_db_instance_request: flushCloudDBInstanceRequest (required) :return: FlushCloudDBInstanceResponse If the method is called asynchronously, returns the request thread.
625941b466656f66f7cbbf86
def new_block(self, proof, previous_hash): <NEW_LINE> <INDENT> if not self.current_transactions and previous_hash != '1': <NEW_LINE> <INDENT> raise Exception <NEW_LINE> <DEDENT> block = { 'index': len(self.chain) + 1, 'timestamp': time(), 'transactions': self.current_transactions, 'proof': proof, 'previous_hash': previous_hash or self.hash(self.chain[-1]), } <NEW_LINE> self.current_transactions = [] <NEW_LINE> self.chain.append(block) <NEW_LINE> return block
Create a new Block in the Blockchain :param proof: The proof given by the Proof of Work algorithm :param previous_hash: Hash of previous Block :return: New Block
625941b49b70327d1c4e0baf
def testAddRows(self): <NEW_LINE> <INDENT> self.assertEqual(self.glp.getNumRows(), 904) <NEW_LINE> newColumArray = self.glp.getRowCoef(1) <NEW_LINE> self.glp.addRows({'Mwurstb': (0., 99999., newColumArray)}) <NEW_LINE> self.assertEqual(self.glp.getNumRows(), 905) <NEW_LINE> self.assertEqual(self.glp.getRowCoef(1), self.glp.getRowCoef(905)) <NEW_LINE> self.glp.undo() <NEW_LINE> self.assertEqual(self.glp.getNumRows(), 904) <NEW_LINE> self.assertEqual(len(self.glp.history), 0)
Tests if a the specified column is appended to the constraint matrix
625941b4293b9510aa2c3075
def get_terminal_size(): <NEW_LINE> <INDENT> current_os = platform.system() <NEW_LINE> tuple_xy = None <NEW_LINE> if current_os == 'Windows': <NEW_LINE> <INDENT> tuple_xy = _get_terminal_size_windows() <NEW_LINE> if tuple_xy is None: <NEW_LINE> <INDENT> tuple_xy = _get_terminal_size_tput() <NEW_LINE> <DEDENT> <DEDENT> if current_os in ['Linux', 'Darwin'] or current_os.startswith('CYGWIN'): <NEW_LINE> <INDENT> tuple_xy = _get_terminal_size_linux() <NEW_LINE> <DEDENT> if tuple_xy is None: <NEW_LINE> <INDENT> print <NEW_LINE> tuple_xy = (80, 25) <NEW_LINE> <DEDENT> return tuple_xy
getTerminalSize() - get width and height of console - works on linux,os x,windows,cygwin(windows) originally retrieved from: http://stackoverflow.com/questions/566746/how-to-get-console-window-width-in-python
625941b4be8e80087fb20a2b
def PrioPacketVerify2_write(p): <NEW_LINE> <INDENT> return _libprio.PrioPacketVerify2_write(p)
PrioPacketVerify2_write(const_PrioPacketVerify2 p) -> PyObject * Parameters ---------- p: const_PrioPacketVerify2
625941b48e7ae83300e4ada8
def validate_response(response): <NEW_LINE> <INDENT> jsonrpc = 'jsonrpc' in response <NEW_LINE> response_id = 'id' in response <NEW_LINE> result = 'result' in response <NEW_LINE> error = 'error' in response <NEW_LINE> if not jsonrpc or not response_id or (not result and not error): <NEW_LINE> <INDENT> raise Exception('Server returned invalid response.') <NEW_LINE> <DEDENT> if error: <NEW_LINE> <INDENT> raise ProtocolError( response['error']['code'], response['error']['message'] )
Parses the returned JSON object, verifies that it follows the JSON-RPC spec, and checks for errors, raising exceptions as necessary.
625941b494891a1f4081b884
def classes(self, sort=True) -> List['Class']: <NEW_LINE> <INDENT> return self._filter_doc_objs(Class, sort)
Returns all documented module-level classes in the module, optionally sorted alphabetically, as a list of `pdoc.Class`.
625941b426068e7796caeab4
def discover_episodes(self, base_path, files): <NEW_LINE> <INDENT> op_or_ed = re.compile(r'(NCOP)|(NCED)|(\sOP[0-9]+)|(\sED[0-9]+)') <NEW_LINE> for file in files: <NEW_LINE> <INDENT> episode = Path(file) <NEW_LINE> if episode.suffix not in EXTENSIONS: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if op_or_ed.search(episode.name): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> title = clean_title(episode.stem) <NEW_LINE> episode = Episode(base_path / episode) <NEW_LINE> episode.parse_episode_number(title) <NEW_LINE> self.series[title].add_or_update_episode(episode)
Search through a directory to find all files which have a close enough name to be considered a part of the same series
625941b4187af65679ca4f00
def findcross(profile1, profile2): <NEW_LINE> <INDENT> l1x, l1y = profile1.X1, profile1.X2 <NEW_LINE> l2x, l2y = profile2.X1, profile2.X2 <NEW_LINE> result = [] <NEW_LINE> for i in range(1, len(l1x)): <NEW_LINE> <INDENT> for j in range(1, len(l2x)): <NEW_LINE> <INDENT> if min(l1x[i-1:i+1]) > max(l2x[j-1:j+1]) or min(l2x[j-1:j+1]) > max(l1x[i-1:i+1]): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> elif min(l1y[i-1:i+1]) > max(l2y[j-1:j+1]) or min(l2y[j-1:j+1]) > max(l1y[i-1:i+1]): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> o = np.array([l1x[i-1], l1y[i-1]]) <NEW_LINE> a = np.array([l1x[i], l1y[i]]) <NEW_LINE> p = np.array([l2x[j-1], l2y[j-1]]) <NEW_LINE> b = np.array([l2x[j], l2y[j]]) <NEW_LINE> a, p, b = a-o, p-o, b-p <NEW_LINE> r = cross(a, b, p) <NEW_LINE> if r != -1: <NEW_LINE> <INDENT> result += [[r[0]+o[0], r[1]+o[1]]] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return result
Find the cross composition of two ternary diffusion paths. Parameters ---------- profile1, profile2 : Profile1D Two 1D diffusion profiles of the same ternary system Returns ------- list of cross compositions
625941b48c3a87329515819a
def fetch_tweets(api, tweet_count, stop, min_id, max_id, tweets_per_qry, file, query): <NEW_LINE> <INDENT> with open(file,'a') as f: <NEW_LINE> <INDENT> while tweet_count<stop: <NEW_LINE> <INDENT> if (max_id <= 0): <NEW_LINE> <INDENT> if (not min_id): <NEW_LINE> <INDENT> new_tweets = api.search(q=query, count=tweets_per_qry) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> new_tweets = api.search(q=query, count=tweets_per_qry, since_id=min_id) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if (not min_id): <NEW_LINE> <INDENT> new_tweets = api.search(q=query, count=tweets_per_qry, max_id=str(max_id - 1)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> new_tweets = api.search(q=query, count=tweets_per_qry, max_id=str(max_id - 1), since_id=min_id) <NEW_LINE> <DEDENT> <DEDENT> if not new_tweets: <NEW_LINE> <INDENT> print("No more tweets found") <NEW_LINE> break <NEW_LINE> <DEDENT> for tweet in new_tweets: <NEW_LINE> <INDENT> json.dump(tweet._json, f) <NEW_LINE> f.write('\n') <NEW_LINE> <DEDENT> tweet_count += len(new_tweets) <NEW_LINE> print(f"Downloaded {tweet_count} tweets") <NEW_LINE> max_id = new_tweets[-1].id <NEW_LINE> <DEDENT> <DEDENT> return tweet_count
Tweets fetched for query will be appended to the file and the fetching limit per iteration is tweets_per_qry. The function stops fetching once the tweet_count reaches stop limit. min_id tells the latest tweet id from which the fetch starts and max_id tells the oldest tweet fetched based on tweet_id. max_id and min_id guides the fetching process. For every iteration we update max_id to the oldest tweet_id to keep track of fetch of tweets.
625941b4f9cc0f698b1403e1
def test_function_with_variable(self): <NEW_LINE> <INDENT> def is_special(facility, situation, criterion): <NEW_LINE> <INDENT> return (facility == 1) and (situation == 0) and (criterion == 2) <NEW_LINE> <DEDENT> def attractiveness(md): <NEW_LINE> <INDENT> attr = np.empty((3, 3, 3)) <NEW_LINE> for index in np.ndindex(*(attr.shape)): <NEW_LINE> <INDENT> if is_special(*index): <NEW_LINE> <INDENT> attr[index] = md.TIME <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> attr[index] = index[0] * 10 + index[1] <NEW_LINE> <DEDENT> <DEDENT> return attr <NEW_LINE> <DEDENT> with mn.model() as m: <NEW_LINE> <INDENT> mn.variable('Attractiveness', attractiveness, '__model__') <NEW_LINE> <DEDENT> self.assertEqual(m['Attractiveness'][''][0, 0, 0], 0) <NEW_LINE> self.assertEqual(m['Attractiveness'][''][2, 2, 2], 22) <NEW_LINE> self.assertEqual(m['Attractiveness'][''][2, 0, 1], 20) <NEW_LINE> self.assertEqual(m['Attractiveness'][''][1, 0, 2], 0) <NEW_LINE> m.step() <NEW_LINE> self.assertEqual(m['Attractiveness'][''][0, 0, 0], 0) <NEW_LINE> self.assertEqual(m['Attractiveness'][''][2, 2, 2], 22) <NEW_LINE> self.assertEqual(m['Attractiveness'][''][2, 0, 1], 20) <NEW_LINE> self.assertEqual(m['Attractiveness'][''][1, 0, 2], 1) <NEW_LINE> m.step() <NEW_LINE> self.assertEqual(m['Attractiveness'][''][0, 0, 0], 0) <NEW_LINE> self.assertEqual(m['Attractiveness'][''][2, 2, 2], 22) <NEW_LINE> self.assertEqual(m['Attractiveness'][''][2, 0, 1], 20) <NEW_LINE> self.assertEqual(m['Attractiveness'][''][1, 0, 2], 2) <NEW_LINE> m.step(10) <NEW_LINE> self.assertEqual(m['Attractiveness'][''][0, 0, 0], 0) <NEW_LINE> self.assertEqual(m['Attractiveness'][''][2, 2, 2], 22) <NEW_LINE> self.assertEqual(m['Attractiveness'][''][2, 0, 1], 20) <NEW_LINE> self.assertEqual(m['Attractiveness'][''][1, 0, 2], 12) <NEW_LINE> m.reset() <NEW_LINE> self.assertEqual(m['Attractiveness'][''][0, 0, 0], 0) <NEW_LINE> self.assertEqual(m['Attractiveness'][''][2, 2, 2], 22) <NEW_LINE> self.assertEqual(m['Attractiveness'][''][2, 0, 1], 20) <NEW_LINE> self.assertEqual(m['Attractiveness'][''][1, 0, 2], 0) <NEW_LINE> m.step() <NEW_LINE> self.assertEqual(m['Attractiveness'][''][0, 0, 0], 0) <NEW_LINE> self.assertEqual(m['Attractiveness'][''][2, 2, 2], 22) <NEW_LINE> self.assertEqual(m['Attractiveness'][''][2, 0, 1], 20) <NEW_LINE> self.assertEqual(m['Attractiveness'][''][1, 0, 2], 1)
Test a variable updated with a function as a callable
625941b4baa26c4b54cb0f00
def __init__(self, twitterAPI): <NEW_LINE> <INDENT> self.twitterAPI = twitterAPI <NEW_LINE> self.mymarkov = markov.Markov(3, 140)
This method initializes the bot class with the twitter API in use. Other methods use this value, so when instantiating this class be sure to do it like: mybot = bot.bot(api) then other methods can be accessed and they'll use the right api object
625941b4d268445f265b4c51
def _write_summary_results(output_dir, eval_results, current_global_step): <NEW_LINE> <INDENT> logging.info('Saving evaluation summary for %d step: %s', current_global_step, _eval_results_to_str(eval_results)) <NEW_LINE> summary_writer = get_summary_writer(output_dir) <NEW_LINE> summary = summary_pb2.Summary() <NEW_LINE> for key in eval_results: <NEW_LINE> <INDENT> if eval_results[key] is None: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> value = summary.value.add() <NEW_LINE> value.tag = key <NEW_LINE> if (isinstance(eval_results[key], np.float32) or isinstance(eval_results[key], float)): <NEW_LINE> <INDENT> value.simple_value = float(eval_results[key]) <NEW_LINE> <DEDENT> <DEDENT> summary_writer.add_summary(summary, current_global_step) <NEW_LINE> summary_writer.close()
Writes eval results into summary file in given dir.
625941b431939e2706e4cc4d
def inconsistent_support_mutex(self, node_s1: PgNode_s, node_s2: PgNode_s): <NEW_LINE> <INDENT> return all(a1.is_mutex(a2) for a1 in node_s1.parents for a2 in node_s2.parents)
Test a pair of state literals for mutual exclusion, returning True if there are no actions that could achieve the two literals at the same time, and False otherwise. In other words, the two literal nodes are mutex if all of the actions that could achieve the first literal node are pairwise mutually exclusive with all of the actions that could achieve the second literal node. HINT: The PgNode.is_mutex method can be used to test whether two nodes are mutually exclusive. :param node_s1: PgNode_s :param node_s2: PgNode_s :return: bool
625941b4cb5e8a47e48b788d
def __rmul__(self,other): <NEW_LINE> <INDENT> return CompositeSpectra(other,self,'mul')
Reverse Multiplication
625941b49c8ee82313fbb559
def test_dart_mime_type(self): <NEW_LINE> <INDENT> mappings = [ ('', [self.apps_dir]), ] <NEW_LINE> server_address = ('http://%s:%u/' % http_server.start_http_server(mappings)) <NEW_LINE> app_relpath = os.path.relpath(self.dart_app_path, self.apps_dir) <NEW_LINE> hello_response = urllib2.urlopen(server_address + app_relpath) <NEW_LINE> self.assertEquals(200, hello_response.getcode()) <NEW_LINE> self.assertTrue('Content-Type' in hello_response.info()) <NEW_LINE> self.assertEquals('application/dart', hello_response.info().get('Content-Type'))
Verifies that files of '.dart' extension are served with MIME type 'application/dart'.
625941b491f36d47f21ac2d1
def is_email_exist(email): <NEW_LINE> <INDENT> query = ("""SELECT * FROM users where email = '{}'""".format(email)) <NEW_LINE> cursor.execute(query) <NEW_LINE> user = cursor.fetchone() <NEW_LINE> if user: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False
check if email exists
625941b4b545ff76a8913bfd
def return_empty_preference_filter_state(request, shua_cookie, filters_key): <NEW_LINE> <INDENT> return ''
Returns an empty string.
625941b43eb6a72ae02ec2b6
def chcuro(self, message): <NEW_LINE> <INDENT> if self.show_access(message, 'CHCURO customer routing;description', 'S') != 0: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> curo, rc = self.extract_field(message, 0, 'CUSTOMER ROUTING CODE') <NEW_LINE> if rc > 0: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> data = (curo,) <NEW_LINE> sql = 'select desc from routing where routing = ?' <NEW_LINE> count, ds_routes = self.db_read(sql, data) <NEW_LINE> if count < 0: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if count == 0: <NEW_LINE> <INDENT> print('* CUSTOMER ROUTING CODE DOES NOT EXIST') <NEW_LINE> return <NEW_LINE> <DEDENT> for row in ds_routes: <NEW_LINE> <INDENT> routing = row[0] <NEW_LINE> <DEDENT> value, rc = self.extract_field(message, 1, '') <NEW_LINE> if rc == 0: <NEW_LINE> <INDENT> routing = value <NEW_LINE> <DEDENT> data = (routing, curo) <NEW_LINE> sql = 'update routing set desc = ? where routing = ?' <NEW_LINE> if self.db_update(sql, data) != 0: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> print('CUSTOMER ROUTING CHANGED SUCCESSFULLY') <NEW_LINE> print(curo + routing) <NEW_LINE> return
allows amendment of an existing routing code
625941b4c432627299f04a20
def test_check_answer(): <NEW_LINE> <INDENT> assert check_answer("word", "wo**", "r") == "wor*", "Incorrect check answer for existed char" <NEW_LINE> assert check_answer("word", "wo**", "x") == "wo**", "Incorrect check answer for not existed char"
Test check_answer.
625941b40fa83653e4656d9b
def getRandombuilding(): <NEW_LINE> <INDENT> gapY = random.randrange(0, int(BASEY * 0.6 - buildingGAPSIZE)) <NEW_LINE> gapY += int(BASEY * 0.2) <NEW_LINE> buildingHeight = IMAGES['building'][0].get_height() <NEW_LINE> buildingX = SCREENWIDTH + 10 <NEW_LINE> return [ {'x': buildingX, 'y': gapY - buildingHeight}, {'x': buildingX, 'y': gapY + buildingGAPSIZE}, ]
returns a randomly generated building
625941b41f5feb6acb0c4931
def intersection(self, nums1, nums2): <NEW_LINE> <INDENT> source = collections.Counter(nums1) if len(collections.Counter(nums1)) > len(collections.Counter(nums2)) else collections.Counter(nums2) <NEW_LINE> target = collections.Counter(nums2) if source == collections.Counter(nums1) else collections.Counter(nums1) <NEW_LINE> result = [] <NEW_LINE> for part in target.keys(): <NEW_LINE> <INDENT> if part in source: <NEW_LINE> <INDENT> result.append(part) <NEW_LINE> <DEDENT> <DEDENT> print(result) <NEW_LINE> return result
:type nums1: List[int] :type nums2: List[int] :rtype: List[int]
625941b4b7558d58953c4cf8
def lookups(self, request, model_admin): <NEW_LINE> <INDENT> return Category.objects.filter(owner=request.user).values_list('id', 'name')
返回要展示的内容和查询用的id
625941b482261d6c526ab27f
def test_fdict_viewvalues(): <NEW_LINE> <INDENT> a = fdict({'a': {'b': 1, 'c': 2}, 'd': 3}) <NEW_LINE> assert set(a.values()) == set([1, 2, 3]) <NEW_LINE> assert set(a['a'].values()) == set([1, 2]) <NEW_LINE> a = fdict({'a': {'b': 1, 'c': 2, 'e': {'f': 4}}, 'd': 3}, fastview=True) <NEW_LINE> assert set(a.values()) == set([1, 2, 3, 4]) <NEW_LINE> assert set(a['a'].values()) == set([1, 2, 4]) <NEW_LINE> v1 = list(a.values(nodes=True)) <NEW_LINE> assert set(['a/e/f']) in v1 and set(['a/b', 'a/c', 'a/e/']) in v1 <NEW_LINE> v2 = list(a['a'].values(nodes=True)) <NEW_LINE> assert set(['e/f']) in v2 <NEW_LINE> v3 = list(a['a'].values(nodes=True, fullpath=True)) <NEW_LINE> assert set(['a/e/f']) in v3
Test fdict viewvalues()
625941b4004d5f362079a114
def value_iteration(self, T, method="linear", features="random"): <NEW_LINE> <INDENT> v_ti = np.zeros((N)) <NEW_LINE> v_t1 = np.zeros((N)) <NEW_LINE> policy = [0 for i in range(N)] <NEW_LINE> x_mat = np.concatenate((self.sample_states, self.thetas), axis=1) <NEW_LINE> for t in range(T): <NEW_LINE> <INDENT> if method=="linear": <NEW_LINE> <INDENT> clf = linear_model.LinearRegression() <NEW_LINE> clf.fit(x_mat, v_ti) <NEW_LINE> coeffs = clf.coef_ <NEW_LINE> intercept = clf.intercept_ <NEW_LINE> <DEDENT> elif method=="forest": <NEW_LINE> <INDENT> clf = RandomForestRegressor(max_depth=d * total) <NEW_LINE> clf.fit(x_mat, v_ti) <NEW_LINE> <DEDENT> for i in range(N): <NEW_LINE> <INDENT> p = self.sample_states[i] <NEW_LINE> actions = sample_circle(p, dist, fine) <NEW_LINE> max_val = -1000 <NEW_LINE> max_action = (0, 0) <NEW_LINE> for a in actions: <NEW_LINE> <INDENT> s_p = np.random.multivariate_normal(a, noise_mat, s_fine) <NEW_LINE> if features=="random": <NEW_LINE> <INDENT> sp_t = self.draw_y_th(s_p) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> sp_t = self.find_all_thetas(s_p) <NEW_LINE> <DEDENT> feature_mat = np.concatenate((s_p, sp_t), axis=1) <NEW_LINE> if method=="linear": <NEW_LINE> <INDENT> vals = np.transpose((np.dot(feature_mat, coeffs) + intercept) * gamma) <NEW_LINE> <DEDENT> elif method=="forest": <NEW_LINE> <INDENT> vals = clf.predict(feature_mat) * gamma <NEW_LINE> <DEDENT> probs = [] <NEW_LINE> for st in s_p: <NEW_LINE> <INDENT> probs.append(norm_pdf_multivariate(np.array(st), np.array(a), noise_mat)) <NEW_LINE> <DEDENT> probs = np.array(probs) <NEW_LINE> const = np.sum(probs) <NEW_LINE> rewards = np.transpose(self.get_rewards(s_p)) <NEW_LINE> u_val = np.dot(probs, rewards) + np.dot(probs, vals) <NEW_LINE> sp_val = u_val / const <NEW_LINE> if sp_val > max_val: <NEW_LINE> <INDENT> max_val = sp_val <NEW_LINE> max_action = np.array(a) - np.array(p) <NEW_LINE> <DEDENT> <DEDENT> v_t1[i] = max_val <NEW_LINE> policy[i] = max_action <NEW_LINE> <DEDENT> v_ti = list(v_t1) <NEW_LINE> <DEDENT> return v_ti, policy
run value iteration over each of the sample points
625941b4be7bc26dc91cd3e3
def get_soup_js(url): <NEW_LINE> <INDENT> session = dryscrape.Session() <NEW_LINE> session.visit(url) <NEW_LINE> response = session.body() <NEW_LINE> soup = bs4.BeautifulSoup(response) <NEW_LINE> return soup
Returns soup from javascript-rendered webpage str -> soup
625941b4796e427e537b039e
def _get_function(col, band, option, renamed=False): <NEW_LINE> <INDENT> band_options = col.bitOptions(renamed) <NEW_LINE> f = lambda img: img <NEW_LINE> if band in band_options: <NEW_LINE> <INDENT> bit_options = band_options[band] <NEW_LINE> if option in bit_options: <NEW_LINE> <INDENT> f = lambda img: col.applyMask(img, band, [option], renamed=renamed) <NEW_LINE> <DEDENT> <DEDENT> return f
Get mask function for given band and option
625941b45fc7496912cc3762
def create_user_bet(user_id, bet_id, deposit): <NEW_LINE> <INDENT> user_bet = UserBet(user_id=user_id, bet_id=bet_id, deposit=deposit) <NEW_LINE> session.add(user_bet) <NEW_LINE> session.commit() <NEW_LINE> return user_bet
Creates a currency for the user. Returns the created currency or None if not exists.
625941b4f8510a7c17cf94e2
def get_model_params(model_name): <NEW_LINE> <INDENT> switcher = { "SVR": lambda: get_SVR_params(), "MLP": lambda: get_MLP_params(), "Random Forest": lambda: get_Random_Forest_params() } <NEW_LINE> return [switcher.get(model_name, None)()]
get the model params :param model_name: model name :return: model params
625941b4627d3e7fe0d68c2a
def roll_dice(self, hold_list): <NEW_LINE> <INDENT> if self.roll_count < 3: <NEW_LINE> <INDENT> for dice in range(5): <NEW_LINE> <INDENT> if hold_list[dice] == 0: <NEW_LINE> <INDENT> self.roll_list[dice] = random.randint(1, 6) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.roll_count += 1
set the dice in the roll list to a random integer 1-6 only if the dice is not being held
625941b492d797404e303f67
def sound_fx(self, fx, param=[0, 0], fileID=None): <NEW_LINE> <INDENT> self.soundFxId = fx <NEW_LINE> if fileID is not None: <NEW_LINE> <INDENT> self.soundFileId = fileID <NEW_LINE> <DEDENT> for i, p in enumerate(param): <NEW_LINE> <INDENT> if i == 0: <NEW_LINE> <INDENT> self.soundParam1 = p <NEW_LINE> <DEDENT> elif i == 1: <NEW_LINE> <INDENT> self.soundParam2 = p <NEW_LINE> <DEDENT> <DEDENT> return self
Populates an action with a user specified sound effect and associated parameters. :param fx: :obj:`int` desired sound action :param param: [:obj:`int`] a list of up to 2 sound parameters :param fileID: :obj:`int` file ID of an audio file in the file system :returns: :obj:`PFxAction` self The details of specifying the sound **fx** and **param** items is described in detail in the ICD document. The **pfx.py** file contains convenient pre-defined constants for all of the sound effect types and parameter values. An example of using this method is as follows:: p = [EVT_SOUND_DUR_10S] a = PFxAction().sound_fx(EVT_SOUND_PLAY_DUR, p, 5) This specifies an action to playback an audio file with ID=5 for a fixed duration of 10 seconds.
625941b4f548e778e58cd358
def p_expression_2(p): <NEW_LINE> <INDENT> p[0] = BinaryOpNode(p[2], p[1], p[3]) <NEW_LINE> p[0].pos_info = get_pos(p, 0)
expression : simple_expression relop simple_expression
625941b44428ac0f6e5ba5d6
def get_resource_parts(self): <NEW_LINE> <INDENT> if not self.is_api_request(): <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> parts_list = list(filter(lambda x: x.replace(' ', '') != '', self.path.split(API_PATH))) <NEW_LINE> if len(parts_list) <= 0: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> return list(filter(lambda x: x.replace(' ', '') != '' and x[0] != '?', parts_list[0].split('/')))
Returns a list of resource parts: if URL is 'API_PATH/foo/bar' it returns ['foo', 'bar'] If not is a valid API_REQUEST, returns an empty list
625941b4a17c0f6771cbde31
def attach_error_data(error, in_runtime=False): <NEW_LINE> <INDENT> e_type, e_value, e_traceback = sys.exc_info() <NEW_LINE> tb = traceback.extract_tb(e_traceback)[1:] <NEW_LINE> error_data = ErrorData(e_type, e_value, tb, global_origin_info_map) <NEW_LINE> error_data.in_runtime = in_runtime <NEW_LINE> setattr(error, ERROR_DATA, error_data) <NEW_LINE> return error
Attachs error data about original source code information and traceback to an error. Args: error(Exception): An native error. in_runtime(bool): `error` is raised in runtime if in_runtime is True, otherwise in compile time Returns: An error attached data about original source code information and traceback.
625941b416aa5153ce362254
def __call__(self, name, *args, **kwargs): <NEW_LINE> <INDENT> if name in self: <NEW_LINE> <INDENT> self[name](*args, **kwargs)
manuel call of event Arguments: name name of event
625941b431939e2706e4cc4e
def __init__(self): <NEW_LINE> <INDENT> with open(Config._path_config, 'r') as file: <NEW_LINE> <INDENT> self.config = load(file)
Arquivo Config.JSON é lido.
625941b48c0ade5d55d3e79c
def build_report(scenarios): <NEW_LINE> <INDENT> for scenario in scenarios: <NEW_LINE> <INDENT> os.chdir(os.path.dirname(scenario)) <NEW_LINE> subprocess.call(['java', '-jar', CMDRunner_path, "--tool", "Reporter" ,"--generate-png", "response_times_vs_threads.png", '--input-jtl', r"{0}\response_times_vs_threads.jtl" .format(os.path.dirname(scenario)), '--plugin-type', 'TimesVsThreads', '--exclude-labels', 'Авторизация на сайте','--width', '1200', '--height', '900']) <NEW_LINE> subprocess.call(['java', '-jar', CMDRunner_path, "--tool", "Reporter" ,"--generate-png", "response_times_over_time.png", '--input-jtl', r"{0}\response_times_vs_threads.jtl" .format(os.path.dirname(scenario)), '--plugin-type', 'ResponseTimesOverTime', '--exclude-labels', 'Авторизация на сайте', '--width', '1200', '--height', '900']) <NEW_LINE> subprocess.call(['java', '-jar', CMDRunner_path, "--tool", "Reporter" ,"--generate-png", "response_codes_per_second.png", '--exclude-labels', 'Авторизация на сайте', '--input-jtl', r"{0}\response_times_vs_threads.jtl" .format(os.path.dirname(scenario)), '--plugin-type', 'ResponseCodesPerSecond', '--width', '1200', '--height', '900']) <NEW_LINE> subprocess.call(['java', '-jar', CMDRunner_path, "--tool", "Reporter" ,"--generate-csv", "summary.csv", '--input-jtl', r"{0}\response_times_vs_threads.jtl" .format(os.path.dirname(scenario)), '--plugin-type', 'AggregateReport'])
Строит отчеты из полученных данных
625941b47cff6e4e81117763
def getMarketUploadServer(group_id=None, main_photo=None, crop_x=None, crop_y=None, crop_width=None): <NEW_LINE> <INDENT> params = { 'group_id': group_id, 'main_photo': main_photo, 'crop_x': crop_x, 'crop_y': crop_y, 'crop_width': crop_width } <NEW_LINE> result = call('photos.getMarketUploadServer', **params) <NEW_LINE> return parse_response(result)
Returns the server address for market photo upload. https://vk.com/dev/photos.getMarketUploadServer
625941b47c178a314d6ef234
def eh_tabuleiro(arg): <NEW_LINE> <INDENT> return isinstance(arg, list) and len(arg) == 3 and all(isinstance(subli, list) and len(subli) == 3 for subli in arg) and arg[2][0] is None and all(eh_celula(arg[l][c]) for l in range(3) for c in range(3) if (l, c) != (2, 0))
eh_tabuleiro: universal --> booleano Indica se o argumento eh um tabuleiro verificando se eh uma lista de listas com celulas
625941b45166f23b2e1a4f35
def unsuscribe(self, pid=None): <NEW_LINE> <INDENT> if not pid: <NEW_LINE> <INDENT> pid = Process.current() <NEW_LINE> if not pid: <NEW_LINE> <INDENT> raise Exception("The current thread has no PID") <NEW_LINE> <DEDENT> <DEDENT> with self.__lock: <NEW_LINE> <INDENT> self.__suscribers.remove(pid)
Unsuscribe a process to stop receiving notifications.
625941b43539df3088e2e128
def repair(self): <NEW_LINE> <INDENT> self._push_queue() <NEW_LINE> Model.repair(self) <NEW_LINE> self.regenerate_constraints() <NEW_LINE> self.regenerate_variables()
Updates references to variables and constraints :return:
625941b48a43f66fc4b53e47
def option_repr(option, value): <NEW_LINE> <INDENT> if option == 'document_class': <NEW_LINE> <INDENT> if value is dict: <NEW_LINE> <INDENT> return 'document_class=dict' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 'document_class=%s.%s' % (value.__module__, value.__name__) <NEW_LINE> <DEDENT> <DEDENT> if option in common.TIMEOUT_OPTIONS and value is not None: <NEW_LINE> <INDENT> return "%s=%s" % (option, int(value * 1000)) <NEW_LINE> <DEDENT> return '%s=%r' % (option, value)
Fix options whose __repr__ isn't usable in a constructor.
625941b4956e5f7376d70c59
def doUndo(self): <NEW_LINE> <INDENT> package = self.undo.undo() <NEW_LINE> self._undoPackageToState(package)
Restore undo package -> current state.
625941b4fff4ab517eb2f216
def Run(vm, target, connections=1, duration=60): <NEW_LINE> <INDENT> threads = min(connections, vm.num_cpus) <NEW_LINE> cmd = ('{wrk} --connections={connections} --threads={threads} ' '--duration={duration} ' '--timeout={timeout} ' '--script={script} {target}').format( wrk=WRK_PATH, connections=connections, threads=threads, script=_LUA_SCRIPT_PATH, target=target, duration=duration, timeout=_TIMEOUT) <NEW_LINE> stdout, _ = vm.RemoteCommand(cmd) <NEW_LINE> for variable, value, unit in _ParseOutput(stdout): <NEW_LINE> <INDENT> yield sample.Sample(variable, value, unit, metadata={'connections': connections, 'threads': threads, 'duration': duration})
Runs wrk against a given target. Args: vm: Virtual machine. target: URL to fetch. connections: Number of concurrent connections. duration: Duration of the test, in seconds. Yields: sample.Sample objects with results.
625941b44a966d76dd550de8
def gen_int_numbers(start_n, end_n): <NEW_LINE> <INDENT> for number in count(start_n): <NEW_LINE> <INDENT> if number > end_n: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> yield number
Функция, генерирующая целые числа, начиная с указанного
625941b4cdde0d52a9e52e0b
def terminal_paint(): <NEW_LINE> <INDENT> terminal = get_terminal_size() <NEW_LINE> width = terminal.columns - int(terminal.columns * .2) <NEW_LINE> height = width // 4 <NEW_LINE> canvas = [[' ' for i in range(width)] for k in range(height)] <NEW_LINE> brushes = ['#','*','^','~','0','/','=', '|', '-','_','$','¬','+','(',')','.',':','<','>'] <NEW_LINE> x = 0 <NEW_LINE> y = 0 <NEW_LINE> mode = 'MOVE' <NEW_LINE> inp = ' ' <NEW_LINE> prev = ' ' <NEW_LINE> brush_cursor = 0 <NEW_LINE> while inp != 'q': <NEW_LINE> <INDENT> if mode == 'MOVE': <NEW_LINE> <INDENT> canvas[y][x] = prev <NEW_LINE> <DEDENT> elif mode == 'ERASE': <NEW_LINE> <INDENT> canvas[y][x] = ' ' <NEW_LINE> <DEDENT> x,y,mode,canvas,brush_cursor, prev = evaluate_input(inp, x,y, mode,height, width,canvas,brushes, brush_cursor, prev) <NEW_LINE> clean_screen() <NEW_LINE> draw_brushes(brushes,(len(brushes)), brush_cursor) <NEW_LINE> draw_grid(canvas, width,mode) <NEW_LINE> inp = getch() <NEW_LINE> <DEDENT> clean_screen() <NEW_LINE> print("="*20+"\nGoodbye!\n"+"="*20)
Main loop. TODO - refactoring needed! Arguments: None Returns: None
625941b444b2445a33931e7e
def __init__(self, size_multiplier:int = 4, **kwargs): <NEW_LINE> <INDENT> self.size_multiplier = size_multiplier <NEW_LINE> super().__init__(**kwargs)
TODO: docstring
625941b4a8370b771705267f
def get_profile(user): <NEW_LINE> <INDENT> if user.is_anonymous(): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> return user.get_profile() <NEW_LINE> <DEDENT> except ObjectDoesNotExist: <NEW_LINE> <INDENT> return None
Rather than throw an error on get_profile, we just return None. Makes handling of anonymous users in non-loggedin areas easier.
625941b466673b3332b91e75
def __init__(self, unit_title, template, stack_config, cf_origins_config, cf_cache_behavior_config, cf_distribution_config): <NEW_LINE> <INDENT> self.stack_config = stack_config <NEW_LINE> super(CFDistributionUnit, self).__init__(title=unit_title, template=template, cf_origins_config=cf_origins_config, cf_cache_behavior_config=cf_cache_behavior_config, cf_distribution_config=cf_distribution_config)
Create a Cloudfront Distribution as a unit, part of an integrated stack :param unit_title: title of the API Gateway as part of an integrated stack :param template: troposphere template :param stack_config: shared stack configuration object to store generated API Gateway endpoint :param cf_origins_config: A list of CFOriginsConfig objects :param cf_cache_behavior_config: A list of CFCacheBehavior objects :param cf_distribution_config: A CFDistributionConfig object
625941b494891a1f4081b885
def window(idf, fsdobject, deletebsd=True, setto000=False): <NEW_LINE> <INDENT> if fsdobject.Surface_Type.upper() == "WINDOW": <NEW_LINE> <INDENT> simpleobject = idf.newidfobject("WINDOW") <NEW_LINE> simpleobject.Name = fsdobject.Name <NEW_LINE> simpleobject.Construction_Name = fsdobject.Construction_Name <NEW_LINE> simpleobject.Building_Surface_Name = fsdobject.Building_Surface_Name <NEW_LINE> simpleobject.Shading_Control_Name = fsdobject.Shading_Control_Name <NEW_LINE> simpleobject.Frame_and_Divider_Name = fsdobject.Frame_and_Divider_Name <NEW_LINE> simpleobject.Multiplier = fsdobject.Multiplier <NEW_LINE> surforigin = fsdorigin(fsdobject, setto000=setto000) <NEW_LINE> simpleobject.Starting_X_Coordinate = surforigin[0] <NEW_LINE> simpleobject.Starting_Z_Coordinate = surforigin[1] <NEW_LINE> simpleobject.Length = fsdobject.width <NEW_LINE> simpleobject.Height = fsdobject.height <NEW_LINE> if deletebsd: <NEW_LINE> <INDENT> idf.removeidfobject(fsdobject) <NEW_LINE> <DEDENT> return simpleobject <NEW_LINE> <DEDENT> return None
return an window object if the fsd (fenestrationsurface:detailed) is a window
625941b4dd821e528d63af89
def __init__(self, pin_type, clk_pin, data_pin, port="/dev/ttyACM0", debug=False): <NEW_LINE> <INDENT> self.board = PyMata(port, bluetooth=False, verbose=debug) <NEW_LINE> self.i2c = self.board.i2c_config(0, pin_type, clk_pin, data_pin)
:param pin_type: DIGITAL 或者 ANALOG :param clk_pin: 时钟总线接入的针脚 :param data_pin: 数据总线接入的针脚 :param port: 虚谷连接I2C设备的COM口,默认为"/dev/ttyACM0" :param debug: 当为True的时候,会输出debug信息
625941b46aa9bd52df036b80
def set_SiteID(self, value): <NEW_LINE> <INDENT> super(SegmentationForVisitorsTodayViaSearchInputSet, self)._set_input('SiteID', value)
Set the value of the SiteID input for this Choreo. ((required, integer) Your request must include the site's ID that you want to access data from. Available from your site preferences page.)
625941b4d10714528d5ffabc
def validate_actions_or_conditionals_list(self, actionOrConditionalList, targetName): <NEW_LINE> <INDENT> for statement in actionOrConditionalList: <NEW_LINE> <INDENT> if self.is_valid_action_or_conditional_statement(statement.strip(), targetName) == False: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True
Validates if the actions or conditionals in the actionOrConditionalList is valid. i.e target goblin: roll1 = d20 if golbin.hp >4 then attack - it will validate these statements individually [roll1 = d20, if golbin.hp >4 then attack] Returns: Bool - true if all the actions and conditionals in the actionOrConditionalList is true, false otherwise
625941b4d99f1b3c44c6737d
def create_app(debug=False): <NEW_LINE> <INDENT> app = Flask(__name__, static_folder='./resources') <NEW_LINE> app.config['SECRET_KEY'] = 'RYVl4Fg3n1JLDaxWyr1m' <NEW_LINE> app.config['MONGO_DBNAME'] = 'chirp' <NEW_LINE> app.config['USERS_COLLECTION'] = 'accounts' <NEW_LINE> app.config['MONITORS_COLLECTION'] = 'monitors' <NEW_LINE> app.config['ARTICLES_COLLECTION'] = 'articles' <NEW_LINE> app.config['GLOBAL_COLLECTION'] = 'global' <NEW_LINE> login_manager.init_app(app) <NEW_LINE> mongo.init_app(app) <NEW_LINE> app.config.update( CELERY_BROKER_URL='redis://localhost:6379', CELERY_RESULT_BACKEND='redis://localhost:6379', CELERYBEAT_SCHEDULE={ 'process_all_rss': { 'task': 'process_all_rss', 'schedule': crontab(minute='*/15') } } ) <NEW_LINE> celery.conf.update(app.config) <NEW_LINE> from .core import core as core_blueprint <NEW_LINE> app.register_blueprint(core_blueprint) <NEW_LINE> app.register_error_handler(404, page_not_found) <NEW_LINE> app.register_error_handler(500, server_error) <NEW_LINE> return app
Create an application context with blueprints.
625941b42eb69b55b151c687
@main.route('/event/<event_id>', methods=['GET']) <NEW_LINE> def event_detail(event_id): <NEW_LINE> <INDENT> guests = Guest.query.all() <NEW_LINE> given_id = request.form.get('event_id') <NEW_LINE> this_event = Event.query.filter_by(id=given_id) <NEW_LINE> return render_template('event_detail.html', this_event=this_event, guests=guests)
Show a single event.
625941b4dc8b845886cb5312
def _CreateAndAddToSelf(self, op_type, inputs, outputs=None, **kwargs): <NEW_LINE> <INDENT> inputs = _RectifyInputOutput(inputs) <NEW_LINE> for input in inputs: <NEW_LINE> <INDENT> if not self.BlobIsDefined(input): <NEW_LINE> <INDENT> assert input.Net() != self <NEW_LINE> self.AddExternalInput(input) <NEW_LINE> <DEDENT> <DEDENT> if outputs is None: <NEW_LINE> <INDENT> outputs = self.NextName(prefix=op_type) <NEW_LINE> <DEDENT> elif type(outputs) is int: <NEW_LINE> <INDENT> outputs = [ self.NextName(prefix=op_type, output_id=i) for i in range(outputs)] <NEW_LINE> <DEDENT> outputs = _RectifyInputOutput(outputs, net=self) <NEW_LINE> op = CreateOperator(op_type, inputs, outputs, **kwargs) <NEW_LINE> self._ExtendOps([op]) <NEW_LINE> workspace.operator_tracebacks[self.Name()][ len(self._net.op) - 1] = _extract_stacktrace() <NEW_LINE> if len(op.output) == 0: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> elif len(op.output) == 1: <NEW_LINE> <INDENT> return BlobReference(op.output[0], self) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return tuple(BlobReference(o, self) for o in op.output)
A helper function to create an operator and add it to self.
625941b47d43ff24873a2a81
def test_bt_6lowpan_be_ssh(self): <NEW_LINE> <INDENT> self.bt1.connect_6lowpan_ble(self.bt2) <NEW_LINE> self.bt2.bt0_ssh_check(self.bt1.get_bt0_ip())
Setup two devices with BLE, and remote ssh to self
625941b4be7bc26dc91cd3e4
def test_event_is_relayed(self): <NEW_LINE> <INDENT> pyherc.vtable['\ufdd0:move'](character=self.character, direction=Direction.east) <NEW_LINE> verify(self.listener).receive_event(EventType('move'))
Test that moving will create an event and send it forward
625941b476d4e153a657e90e
def summarize(self): <NEW_LINE> <INDENT> pass
Print a summary of the contents of this object.
625941b48e71fb1e9831d58b
def backward(AL, Y, caches): <NEW_LINE> <INDENT> grads = {} <NEW_LINE> L = len(caches) <NEW_LINE> m = AL.shape[1] <NEW_LINE> Y = Y.reshape(AL.shape) <NEW_LINE> dAL = - (np.divide(Y, AL) - np.divide(1-Y, 1-AL)) <NEW_LINE> current_cache = caches[L-1] <NEW_LINE> grads["dA" + str(L)], grads["dW" + str(L)], grads["db" + str(L)] = linear_activation_backward(dAL, current_cache, activation="sigmoid") <NEW_LINE> for l in reversed(range(L-1)): <NEW_LINE> <INDENT> current_cache = caches[l] <NEW_LINE> dA_prev_temp, dW_temp, db_temp = linear_activation_backward(grads["dA" + str(l+2)], current_cache, activation="relu") <NEW_LINE> grads["dA" + str(l+1)] = dA_prev_temp <NEW_LINE> grads["dW" + str(l+1)] = dW_temp <NEW_LINE> grads["db" + str(l+1)] = db_temp <NEW_LINE> <DEDENT> return grads
Implement the backward propagation for the [LINEAR->RELU] * (L-1) -> LINEAR -> SIGMOID group Arguments: AL -- probability vector, output of the forward propagation (L_model_forward()) Y -- true "label" vector (containing 0 if non-cat, 1 if cat) caches -- list of caches containing: every cache of linear_activation_forward() with "relu" (there are (L-1) or them, indexes from 0 to L-2) the cache of linear_activation_forward() with "sigmoid" (there is one, index L-1) Returns: grads -- A dictionary with the gradients grads["dA" + str(l)] = ... grads["dW" + str(l)] = ... grads["db" + str(l)] = ...
625941b4498bea3a759b9890
def inverted(self): <NEW_LINE> <INDENT> return ~self
Get inverted copy of this color.
625941b4566aa707497f4359
def printMutation(mutation): <NEW_LINE> <INDENT> print(" ----- mutation ----- \n", " residue rmsd") <NEW_LINE> for key in mutation.keys(): <NEW_LINE> <INDENT> print(" %s %6.2lf" % (mutation[key]['label'], mutation[key]['rmsd']))
print out the mutation in formatted form
625941b430c21e258bdfa27a
def check_sgid_ref(self, value, path, parameters, resources): <NEW_LINE> <INDENT> matches = list() <NEW_LINE> allowed_types = [ 'AWS::SSM::Parameter::Value<AWS::EC2::SecurityGroup::Id>', 'AWS::EC2::SecurityGroup::Id' ] <NEW_LINE> if value in parameters: <NEW_LINE> <INDENT> parameter_properties = parameters.get(value) <NEW_LINE> parameter_type = parameter_properties.get('Type') <NEW_LINE> if parameter_type not in allowed_types: <NEW_LINE> <INDENT> path_error = ['Parameters', value, 'Type'] <NEW_LINE> message = "Security Group Id Parameter should be of type [{0}] for {1}" <NEW_LINE> matches.append( RuleMatch( path_error, message.format( ', '.join(map(str, allowed_types)), '/'.join(map(str, path_error))))) <NEW_LINE> <DEDENT> <DEDENT> if value in resources: <NEW_LINE> <INDENT> resource = resources.get(value, {}) <NEW_LINE> resource_type = resource.get('Type', "") <NEW_LINE> if resource_type != 'AWS::EC2::SecurityGroup': <NEW_LINE> <INDENT> message = "Security Group Id resources should be of type AWS::EC2::SecurityGroup for {0}" <NEW_LINE> matches.append( RuleMatch(path, message.format('/'.join(map(str, path))))) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> resource_properties = resource.get('Properties', {}) <NEW_LINE> vpc_property = resource_properties.get('VpcId', None) <NEW_LINE> if not vpc_property: <NEW_LINE> <INDENT> message = "Security Group Id should reference a VPC based AWS::EC2::SecurityGroup for {0}" <NEW_LINE> matches.append( RuleMatch(path, message.format('/'.join(map(str, path))))) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return matches
Check ref for VPC
625941b4a79ad161976cbf23
def _get_label(self, label): <NEW_LINE> <INDENT> if self.is_training: <NEW_LINE> <INDENT> if label not in self._map_label_str2num: <NEW_LINE> <INDENT> label_number = len(self._map_label_str2num) <NEW_LINE> self._map_label_str2num[label] = label_number <NEW_LINE> self._map_label_num2str[label_number] = label <NEW_LINE> <DEDENT> return self._map_label_str2num[label] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if label in self._map_label_str2num: <NEW_LINE> <INDENT> return self._map_label_str2num[label] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return -1
Return the label in hot encoding format @type label: str @param label: label for which the label id is required @return: integer ID for given label
625941b491f36d47f21ac2d3
def format_template(template, context_dict={}): <NEW_LINE> <INDENT> current_time = datetime.datetime.now() <NEW_LINE> default_context = { "year": current_time.year, "platform": platform.system(), "hour": current_time.hour, "minute": current_time.minute, "second": current_time.second, "user": getpass.getuser(), "config_root": os.path.join(os.path.dirname(__file__), "config") } <NEW_LINE> version_number = context_dict.get("version") <NEW_LINE> if version_number: <NEW_LINE> <INDENT> if isinstance(version_number, int): <NEW_LINE> <INDENT> context_dict["version"] = "v{}".format("{}".format(version_number).zfill(3)) <NEW_LINE> <DEDENT> <DEDENT> default_context.update(context_dict) <NEW_LINE> try: <NEW_LINE> <INDENT> formated_template = template.format(**default_context) <NEW_LINE> for i in range(5): <NEW_LINE> <INDENT> formated_template = formated_template.format(**default_context) <NEW_LINE> <DEDENT> <DEDENT> except KeyError: <NEW_LINE> <INDENT> all_keys = {x[1:][:-1] for x in re.findall(r"\{[^\{, .]*\}", template)} <NEW_LINE> provided_keys = set(default_context.keys()) <NEW_LINE> missing_keys = all_keys.difference(provided_keys) <NEW_LINE> error_string = "Missing keys: {}".format(", ".join(list(missing_keys))) <NEW_LINE> raise KeyError(error_string) <NEW_LINE> <DEDENT> return formated_template
Takes a template string and replaces all tokens with values from context dictionary. Tokens in template string are in in {token_in_snake_case} format. The following tokens are provided by the function for you, so you don't have to provide them. If you provide these tokens, the default values are overridden. - year : current year, for example 2018 - platform : current platform, Windows, Darwin or Linux - hour : current hour, for example 01 - minute : current minute, for example 01 - second : current second, for example 01 - user : name of the current user, for example Jan :param template: the template string, containing tokens in {token_in_snake_case} format :param context_dict: dictionary containing all used tokens (or more) as keys and useful values. Example: {'asset_name': 'useful_value'} :return: the template string with all tokens replaced
625941b49b70327d1c4e0bb1
def get__equip_fragment_config(equip_id): <NEW_LINE> <INDENT> _equip_fragment_config = client_configs['equip_fragment_config'] <NEW_LINE> select_equip = lambda config: config['fragment1'] == equip_id <NEW_LINE> result = filter(select_equip, _equip_fragment_config) <NEW_LINE> if result: <NEW_LINE> <INDENT> return result[0] <NEW_LINE> <DEDENT> return None
获取装备碎片表格 ["fragment1","int"], #碎片ID ["fragment2","int"], #万能碎片ID ["num","int"], #所需碎片总数 ["equip","int"], #合成的装备ID
625941b426068e7796caeab6
def pluralize(word, pos=NOUN, gender=MALE, role=SUBJECT, custom={}): <NEW_LINE> <INDENT> w = word.lower().capitalize() <NEW_LINE> if pos == NOUN: <NEW_LINE> <INDENT> for a, b in plural_inflections: <NEW_LINE> <INDENT> if w.endswith(a): <NEW_LINE> <INDENT> return w[:-len(a)] + b <NEW_LINE> <DEDENT> <DEDENT> if w.startswith("ge"): <NEW_LINE> <INDENT> return w <NEW_LINE> <DEDENT> if w.endswith("gie"): <NEW_LINE> <INDENT> return w <NEW_LINE> <DEDENT> if w.endswith("e"): <NEW_LINE> <INDENT> return w + "n" <NEW_LINE> <DEDENT> if w.endswith("ien"): <NEW_LINE> <INDENT> return w[:-2] + "um" <NEW_LINE> <DEDENT> if w.endswith(("au", "ein", "eit", "er", "en", "el", "chen", "mus", u"tät", "tik", "tum", "u")): <NEW_LINE> <INDENT> return w <NEW_LINE> <DEDENT> if w.endswith(("ant", "ei", "enz", "ion", "ist", "or", "schaft", "tur", "ung")): <NEW_LINE> <INDENT> return w + "en" <NEW_LINE> <DEDENT> if w.endswith("in"): <NEW_LINE> <INDENT> return w + "nen" <NEW_LINE> <DEDENT> if w.endswith("nis"): <NEW_LINE> <INDENT> return w + "se" <NEW_LINE> <DEDENT> if w.endswith(("eld", "ild", "ind")): <NEW_LINE> <INDENT> return w + "er" <NEW_LINE> <DEDENT> if w.endswith("o"): <NEW_LINE> <INDENT> return w + "s" <NEW_LINE> <DEDENT> if w.endswith("a"): <NEW_LINE> <INDENT> return w[:-1] + "en" <NEW_LINE> <DEDENT> if w.endswith(("all", "and", "ang", "ank", "atz", "auf", "ock", "opf", "uch", "uss")): <NEW_LINE> <INDENT> umlaut = w[-3] <NEW_LINE> umlaut = umlaut.replace("a", u"ä") <NEW_LINE> umlaut = umlaut.replace("o", u"ö") <NEW_LINE> umlaut = umlaut.replace("u", u"ü") <NEW_LINE> return w[:-3] + umlaut + w[-2:] + "e" <NEW_LINE> <DEDENT> for a, b in ( ("ag", u"äge"), ("ann", u"änner"), ("aum", u"äume"), ("aus", u"äuser"), ("zug", u"züge")): <NEW_LINE> <INDENT> if w.endswith(a): <NEW_LINE> <INDENT> return w[:-len(a)] + b <NEW_LINE> <DEDENT> <DEDENT> return w + "e" <NEW_LINE> <DEDENT> return w
Returns the plural of a given word. The inflection is based on probability rather than gender and role.
625941b48e7ae83300e4adaa
def main(_): <NEW_LINE> <INDENT> metadata_dir = os.path.join(FLAGS.tfds_dir, 'testing', 'metadata') <NEW_LINE> _delete_metadata_dirs(metadata_dir)
Main script.
625941b466656f66f7cbbf88
def _close(self): <NEW_LINE> <INDENT> if self._type == 'term': <NEW_LINE> <INDENT> self._fp_std = None <NEW_LINE> self._fp_err = None <NEW_LINE> <DEDENT> elif self._type == 'file': <NEW_LINE> <INDENT> self._fp_std.close() <NEW_LINE> self._fp_err = None
Close the current log channel
625941b47c178a314d6ef235
def GetBarTabWidth(self, dc, wnd, label, bitmap, ideal=None, small_begin_need_separator=None, small_must_have_separator=None, minimum=None): <NEW_LINE> <INDENT> width = mini = 0 <NEW_LINE> if self._flags & RIBBON_BAR_SHOW_PAGE_LABELS and label.strip(): <NEW_LINE> <INDENT> dc.SetFont(self._tab_active_label_font) <NEW_LINE> width += dc.GetTextExtent(label)[0] <NEW_LINE> mini += min(30, width) <NEW_LINE> if bitmap.IsOk(): <NEW_LINE> <INDENT> width += 4 <NEW_LINE> mini += 2 <NEW_LINE> <DEDENT> <DEDENT> if self._flags & RIBBON_BAR_SHOW_PAGE_ICONS and bitmap.IsOk(): <NEW_LINE> <INDENT> width += bitmap.GetWidth() <NEW_LINE> mini += bitmap.GetWidth() <NEW_LINE> <DEDENT> ideal = width + 16 <NEW_LINE> small_begin_need_separator = mini <NEW_LINE> small_must_have_separator = mini <NEW_LINE> minimum = mini <NEW_LINE> return ideal, small_begin_need_separator, small_must_have_separator, minimum
Calculate the ideal and minimum width (in pixels) of a tab in a ribbon bar. :param `dc`: A device context to use when one is required for size calculations; :param `wnd`: The window onto which the tab will eventually be drawn; :param `label`: The tab's label (or wx.EmptyString if it has none); :param `bitmap`: The tab's icon (or wx.NullBitmap if it has none); :param `ideal`: The ideal width (in pixels) of the tab; :param `small_begin_need_separator`: A size less than the size, at which a tab separator should begin to be drawn (i.e. drawn, but still fairly transparent); :param `small_must_have_separator`: A size less than the size, at which a tab separator must be drawn (i.e. drawn at full opacity); :param `minimum`: A size less than the size, and greater than or equal to zero, which is the minimum pixel width for the tab.
625941b4091ae35668666d44
def test_get_random_range(self): <NEW_LINE> <INDENT> return <NEW_LINE> rand_seq = [] <NEW_LINE> hot_random = HotRandom() <NEW_LINE> for j in range( 0, 200 ): <NEW_LINE> <INDENT> rand_num = hot_random.hot_rand( 91, 37 ) <NEW_LINE> rand_seq.append( rand_num ) <NEW_LINE> <DEDENT> assert are_items_in_range( rand_seq, 91, 37 ) , "Got an out of range number" <NEW_LINE> rand_seq = [] <NEW_LINE> for j in range( 0, 200 ): <NEW_LINE> <INDENT> rand_num = hot_random.hot_rand( 19, 0 ) <NEW_LINE> rand_seq.append( rand_num ) <NEW_LINE> <DEDENT> assert are_items_in_range( rand_seq, 19, 0 ) , "Got an out of range number" <NEW_LINE> rand_seq = [] <NEW_LINE> for j in range( 0, 200 ): <NEW_LINE> <INDENT> rand_num = hot_random.hot_rand( 61, 4 ) <NEW_LINE> rand_seq.append( rand_num ) <NEW_LINE> <DEDENT> assert are_items_in_range( rand_seq, 61, 4 ) , "Got an out of range number"
Get a sequence of random numbers.
625941b47047854f462a11ec
def _filter_link_tag_data(self, source, soup, data, url): <NEW_LINE> <INDENT> link = FILTER_MAPS['link'][source] <NEW_LINE> html = soup.find_all('link', {link['key']: link['pattern']}) <NEW_LINE> for line in html: <NEW_LINE> <INDENT> data['images'].append({ 'src': urljoin(url, line.get('href')), 'type': link['type'], })
This method filters the web page content for link tags that match patterns given in the ``FILTER_MAPS`` :param source: The key of the meta dictionary in ``FILTER_MAPS['link']`` :type source: string :param soup: BeautifulSoup instance to find meta tags :type soup: instance :param data: The response dictionary to manipulate :type data: (dict) :param url: URL used for making an absolute url :type url: string
625941b494891a1f4081b886
def main(): <NEW_LINE> <INDENT> logging.basicConfig(level=logging.INFO) <NEW_LINE> parser = argparse.ArgumentParser(description=__doc__) <NEW_LINE> parser.add_argument( 'old', metavar='OLD', nargs=1, help='file or URL to a .tar.gz tarball for the old HDFS version') <NEW_LINE> parser.add_argument( 'new', metavar='NEW', nargs=1, help='file or URL to a .tar.gz tarball for the new HDFS version') <NEW_LINE> parser.add_argument('--scratch', metavar='DIR', default='', help='set scratch directory') <NEW_LINE> global ARGS <NEW_LINE> ARGS = parser.parse_args() <NEW_LINE> sys.argv = sys.argv[:1] <NEW_LINE> old = ARGS.old[0] <NEW_LINE> new = ARGS.new[0] <NEW_LINE> if not old.endswith('.tar.gz'): <NEW_LINE> <INDENT> logging.error('Must specify .tar.gz file or URL for old release') <NEW_LINE> sys.exit(1) <NEW_LINE> <DEDENT> if not new.endswith('.tar.gz'): <NEW_LINE> <INDENT> logging.error('Must specify .tar.gz file or URL for new release') <NEW_LINE> sys.exit(1) <NEW_LINE> <DEDENT> check_envs() <NEW_LINE> if not os.path.exists(get_scratch_dir()): <NEW_LINE> <INDENT> os.makedirs(get_scratch_dir()) <NEW_LINE> <DEDENT> with cd(get_scratch_dir()): <NEW_LINE> <INDENT> extract_tarball(old, '.') <NEW_LINE> extract_tarball(new, '.') <NEW_LINE> MultipleHdfsClusterTestBase.OLD_HADOOP_HOME = os.path.join(get_scratch_dir(), splitext(os.path.basename(old))[0]) <NEW_LINE> MultipleHdfsClusterTestBase.NEW_HADOOP_HOME = os.path.join(get_scratch_dir(), splitext(os.path.basename(new))[0]) <NEW_LINE> unittest.main()
Test API compability between two hadoop versions.
625941b46fece00bbac2d518