code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def vcsLog(self, name): <NEW_LINE> <INDENT> isFile = os.path.isfile(name) <NEW_LINE> noEntries, ok = QInputDialog.getInt( None, self.tr("Subversion Log"), self.tr("Select number of entries to show."), self.getPlugin().getPreferences("LogLimit"), 1, 999999, 1) <NEW_LINE> if ok: <NEW_LINE> <INDENT> from .SvnLogDialog import SvnLogDialog <NEW_LINE> self.log = SvnLogDialog(self, isFile=isFile) <NEW_LINE> self.log.show() <NEW_LINE> self.log.raise_() <NEW_LINE> self.log.start(name, noEntries)
Public method used to view the log of a file/directory from the Subversion repository. @param name file/directory name to show the log of (string)
625941b4a934411ee3751468
def rmatmat(self, x): <NEW_LINE> <INDENT> return self.__H.__mul__(x)
Matrix (batch) product with the conjugate transpose. This method is included for compatibility with Scipy only. Please use the ``H`` attribute instead.
625941b416aa5153ce362244
def redundant_entry(entry): <NEW_LINE> <INDENT> global entry_set, word_set <NEW_LINE> word_lst = list(word_set[entry]) <NEW_LINE> if not word_lst: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> u_set = entry_set[word_lst[0]] <NEW_LINE> for word in word_lst[1:]: <NEW_LINE> <INDENT> e_set = entry_set[word] <NEW_LINE> u_set = u_set & e_set <NEW_LINE> <DEDENT> for e in u_set: <NEW_LINE> <INDENT> if word_set[e] > word_set[entry]: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False
Whether some other entry generates all words this one does and more
625941b4f548e778e58cd348
def multiply_sigmoid_06(x1, x2): <NEW_LINE> <INDENT> expo = 0.6 <NEW_LINE> return signed_expo(x1 * x2, expo)
Computes the function y_i <- sign(x1_i * x2_i) * abs(x1_i * x2_i)^0.6 .
625941b4099cdd3c635f0a29
def pure(x): <NEW_LINE> <INDENT> def parser(state): <NEW_LINE> <INDENT> return State(state.value + [x], state.remaining) <NEW_LINE> <DEDENT> return parser
A parser which simply inserts a new value into the parse result, but does not consume any input.
625941b430dc7b7665901738
def getTotalArea(inputList, getFrom="coords", type="inside", R=1*u.m, units=u.m**2, angleUnits=u.rad): <NEW_LINE> <INDENT> if getFrom == "Phi": <NEW_LINE> <INDENT> Phi23_1, Phi31_2, Phi12_3 = inputList <NEW_LINE> <DEDENT> elif getFrom == "coords": <NEW_LINE> <INDENT> a1, a2, a3, d1, d2, d3 = inputList <NEW_LINE> Phi23_1 = getRotationAngle([a2, a3, a1, d2, d3, d1], getFrom="coords", type=type, units=angleUnits) <NEW_LINE> Phi31_2 = getRotationAngle([a3, a1, a2, d3, d1, d2], getFrom="coords", type=type, units=angleUnits) <NEW_LINE> Phi12_3 = getRotationAngle([a1, a2, a3, d1, d2, d3], getFrom="coords", type=type, units=angleUnits) <NEW_LINE> <DEDENT> if angleUnits is not None: <NEW_LINE> <INDENT> piUse = pi * u.rad <NEW_LINE> Phi23_1 = Phi23_1.to(u.rad) <NEW_LINE> Phi31_2 = Phi31_2.to(u.rad) <NEW_LINE> Phi12_3 = Phi12_3.to(u.rad) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> piUse = pi <NEW_LINE> <DEDENT> Omega_ti = R**2 * (Phi23_1 + Phi31_2 + Phi12_3 - piUse) <NEW_LINE> Omega_t = Omega_ti <NEW_LINE> if units is not None: <NEW_LINE> <INDENT> Omega_t = Omega_t.to(units, equivalencies=u.dimensionless_angles()) <NEW_LINE> <DEDENT> return Omega_t
Function to find the total area of a spherical triangle :param inputList: List of possible inputs. If getFrom is "coords" list is as [a1, a2, a3, d1, d2, d3] where ai and di are the alpha and delta of point Pi. If getFrom is "Phi" list is as [Phi23_1, Phi31_2, Phi12_3] where Phiij_k is the rotation angle :param getFrom: Boolean to define what type of inputList is used :param type: Boolean to determine if inside or outside triangle area is to be found :param R: Radius of the sphere. Default is unit sphere R=1*u.m. Must be input without astropy units if None used for other units :param units: Output units. Set to None if input units are also None :param angleUnits: Units to use for angles. Should be u.rad or None :return Omega_t: The spherical triangle area
625941b45fc7496912cc3752
def initGui(self): <NEW_LINE> <INDENT> icon_path = ':/plugins/NetworkProfiler/icon.png' <NEW_LINE> self.add_action( icon_path, text=self.tr(u'Network Profiler'), callback=self.run, parent=self.iface.mainWindow())
Create the menu entries and toolbar icons inside the QGIS GUI.
625941b47b25080760e39228
def send(self, message): <NEW_LINE> <INDENT> self.data['data']['alert'] = message <NEW_LINE> r = post(self.url, data=dumps(self.data), headers=self.headers) <NEW_LINE> return r.text, r.status_code
:return: Return message, Status code
625941b4d58c6744b4257a2d
def _clean_array(self, value): <NEW_LINE> <INDENT> return shlex.split(value.strip('()'))
Pythonize a bash array
625941b41f037a2d8b945fcc
@receiver(post_save, sender=User) <NEW_LINE> def create_subscriber_for_user(sender, **kwargs): <NEW_LINE> <INDENT> user = kwargs.get('instance') <NEW_LINE> created = kwargs.get('created') <NEW_LINE> raw = kwargs.get('raw') <NEW_LINE> logging.debug('user is %r' % user) <NEW_LINE> if created and not raw: <NEW_LINE> <INDENT> Subscriber.objects.get_or_create_for_user(user)
Create a Subscriber object whenever a user is created. This is useful so that we don't have to patch whatever different registration processes we end up using.
625941b4a4f1c619b28afe10
@dbmanager.command <NEW_LINE> def recreate(): <NEW_LINE> <INDENT> drop() <NEW_LINE> create()
Recreates database tables (same as issuing 'drop' and then 'create')
625941b4ab23a570cc24ff50
def __init__(self): <NEW_LINE> <INDENT> self.set = [False] * 1000000
Initialize your data structure here.
625941b4e1aae11d1e749a81
def get_locales(self, package_name): <NEW_LINE> <INDENT> self._analyse() <NEW_LINE> for locale in self.values[package_name].keys(): <NEW_LINE> <INDENT> yield locale
Retrieve a list of all available locales in a given package_name. :param package_name: the package name to get locales of
625941b4e8904600ed9f1cf6
def test_save_POST(self): <NEW_LINE> <INDENT> response = self.client.post('/', data={'item_text': 'A new list item'}) <NEW_LINE> self.assertEqual(Item.objects.count(), 1) <NEW_LINE> new_item = Item.objects.first() <NEW_LINE> self.assertEqual(new_item.text, 'A new list item') <NEW_LINE> self.assertEqual(response.status_code, 302) <NEW_LINE> self.assertEqual(response['location'], '/')
Unit test checking that home page can save a post request.
625941b48a43f66fc4b53e37
def _updateLabel(self, isLeft): <NEW_LINE> <INDENT> lbl = self._label <NEW_LINE> if self._idx != None: <NEW_LINE> <INDENT> if lbl == None: <NEW_LINE> <INDENT> bw = _BASE_WIDTH <NEW_LINE> num = self._partner_virtual_helix.number() <NEW_LINE> tbr = _FM.tightBoundingRect(str(num)) <NEW_LINE> half_label_h = tbr.height()/2.0 <NEW_LINE> half_label_w = tbr.width()/2.0 <NEW_LINE> labelX = bw/2.0 - half_label_w <NEW_LINE> if self._is_on_top: <NEW_LINE> <INDENT> labelY = -0.25*half_label_h - 0.5 - 0.5*bw <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> labelY = 2*half_label_h + 0.5 + 0.5*bw <NEW_LINE> <DEDENT> labelXoffset = 0.25*bw if isLeft else -0.25*bw <NEW_LINE> labelX += labelXoffset <NEW_LINE> if num == 1: labelX -= half_label_w/2.0 <NEW_LINE> lbl = QGraphicsSimpleTextItem(str(num), self) <NEW_LINE> lbl.setPos(labelX, labelY) <NEW_LINE> lbl.setBrush(_ENAB_BRUSH) <NEW_LINE> lbl.setFont(_toHelixNumFont) <NEW_LINE> self._label = lbl <NEW_LINE> <DEDENT> lbl.setText( str(self._partner_virtual_helix.number()) )
Called by updatePositionAndAppearance during init, or later by updateConnectivity. Updates drawing and position of the label.
625941b48a349b6b435e7f42
def __div__(self, rhs): <NEW_LINE> <INDENT> return Duration(self._frame, self._seconds / rhs)
Divide a Duration by a value. = INPUT VARIABLES - rhs The scalar to divide by. = RETURN VALUE - Returns the scaled Duration.
625941b4925a0f43d2549c40
def origin_aliases(): <NEW_LINE> <INDENT> warnings.warn( "origin_aliases is deprecated - please use load_data instead", DeprecationWarning, ) <NEW_LINE> return [PathAlias(**elem) for elem in DATA["origin_alias"]]
Returns: list[:class:`~PathAlias`] A list of aliases relating to origin paths.
625941b4cdde0d52a9e52dfb
def parse_mentoring(xml): <NEW_LINE> <INDENT> result = [] <NEW_LINE> for deal in xml: <NEW_LINE> <INDENT> if xml_text(xml_find("category-id", deal)) == MENTORING_DEAL_ID: <NEW_LINE> <INDENT> result.append((xml_text(xml_find("party-id", deal)), [xml_text(xml_find("id", party)) for party in xml_find("parties", deal)])) <NEW_LINE> <DEDENT> <DEDENT> return dict(result)
Returns a dictionary specifing all mentoring relationships. The key is the person id of mentor and the value is a list of all person ids which are the mentees.
625941b4dc8b845886cb5301
def map_values(e): <NEW_LINE> <INDENT> return col(MapValues(ensure_column(e)))
:rtype: Column
625941b46aa9bd52df036b6f
def manage_addCdefGraphPoint(context, id, REQUEST = None): <NEW_LINE> <INDENT> gp = CdefGraphPoint(id) <NEW_LINE> context._setObject(gp.id, gp) <NEW_LINE> if REQUEST: <NEW_LINE> <INDENT> return context.callZenScreen(REQUEST)
This is here so than zope will let us copy/paste/rename graphpoints.
625941b4566aa707497f4349
def _GetHighlighterParams(self, parsed_url): <NEW_LINE> <INDENT> query_dict = urlparse.parse_qs(parsed_url.query) <NEW_LINE> return (query_dict.get('highlighter', ['pygments'])[0], query_dict.get('style', ['colorful'])[0])
Get the highlighting parameters from a parsed url.
625941b4de87d2750b85fb5b
def _do_partial_hook(self, cr, uid, ids, context, *args, **kwargs): <NEW_LINE> <INDENT> if isinstance(ids, (int, long)): <NEW_LINE> <INDENT> ids = [ids] <NEW_LINE> <DEDENT> move = kwargs.get('move') <NEW_LINE> assert move, 'missing move' <NEW_LINE> partial_datas = kwargs.get('partial_datas') <NEW_LINE> assert partial_datas, 'missing partial_datas' <NEW_LINE> defaults = super(stock_picking, self)._do_partial_hook(cr, uid, ids, context, *args, **kwargs) <NEW_LINE> location_id = partial_datas.get('move%s'%(move.id), {}).get('location_id') <NEW_LINE> if location_id: <NEW_LINE> <INDENT> defaults.update({'location_id': location_id}) <NEW_LINE> <DEDENT> return defaults
hook to update defaults data of the current object, which is stock.picking. The defaults data are taken from the _do_partial_hook which is on the stock_partial_picking osv_memory object used for the wizard of deliveries. For outgoing shipment
625941b430bbd722463cbb90
def __sub__(self, left): <NEW_LINE> <INDENT> return (self.v - left.v*pow(radix,self.n-left.n,Mod))%Mod
difference of prefixes to get rollinghash of an interval
625941b4be7bc26dc91cd3d4
def __getitem__(self, item: Identifier) -> Symbol: <NEW_LINE> <INDENT> return self.lookup[item]
Returns a symbol from this map
625941b4be383301e01b5263
def dump_logs(sess,splash_url,id_param): <NEW_LINE> <INDENT> sep = '-'*60 <NEW_LINE> for ru in sess.query(RedirectURL).all(): <NEW_LINE> <INDENT> if ru.access_logs: <NEW_LINE> <INDENT> tpl = ru.to_urltuple(splash_url,id_param) <NEW_LINE> m = f'- {yellow("Splash Link")}: {tpl.splash_link}\n' f'- {yellow("Redirect URL")}: {tpl.redirect_url}\n' f'- {yellow("Access Logs")}:\n\n' <NEW_LINE> counter = 0 <NEW_LINE> for al in ru.access_logs: <NEW_LINE> <INDENT> counter += 1 <NEW_LINE> m+=f'[{counter}][{al.time}] {al.source_ip}\n' <NEW_LINE> <DEDENT> m = m.strip() <NEW_LINE> print(sep+'\n'+m) <NEW_LINE> <DEDENT> <DEDENT> print(sep)
Dump logs in a simple format to stdout.
625941b43539df3088e2e119
def find_elements(self, *locators): <NEW_LINE> <INDENT> logger.debug('find elements %s by %s' % (locators[1], locators[0])) <NEW_LINE> return self.driver.find_elements(*locators)
Find web elements by UI locators :param locators: a tuple of UI locators :return: a list of web elements
625941b46e29344779a623e4
def CollapseDictionary(mapping): <NEW_LINE> <INDENT> def originalPrefixes(item): <NEW_LINE> <INDENT> return item.find('_')+1==1 <NEW_LINE> <DEDENT> revDict = {} <NEW_LINE> for k,v in mapping.items(): <NEW_LINE> <INDENT> revDict.setdefault(v,set()).add(k) <NEW_LINE> <DEDENT> prefixes2Collapse = [] <NEW_LINE> for k,v in revDict.items(): <NEW_LINE> <INDENT> origPrefixes=[] <NEW_LINE> dupePrefixes=[] <NEW_LINE> for rt,items in itertools.groupby(v,originalPrefixes): <NEW_LINE> <INDENT> if rt: <NEW_LINE> <INDENT> dupePrefixes.extend(items) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> origPrefixes.extend(items) <NEW_LINE> <DEDENT> <DEDENT> if origPrefixes and len(v) > 1 and len(dupePrefixes): <NEW_LINE> <INDENT> assert len(origPrefixes)==1 <NEW_LINE> prefixes2Collapse.extend(dupePrefixes) <NEW_LINE> <DEDENT> <DEDENT> return dict([(k,v) for k,v in mapping.items() if k not in prefixes2Collapse])
Takes a dictionary mapping prefixes to URIs and removes prefix mappings that begin with _ and there is already a map to their value >>> a = {'ex': URIRef('http://example.com/')} >>> a['_1'] = a['ex'] >>> len(a) 2 >>> a.values() [rdflib.URIRef('http://example.com/'), rdflib.URIRef('http://example.com/')] >>> CollapseDictionary(a) {'ex': rdflib.URIRef('http://example.com/')} >>> a {'ex': rdflib.URIRef('http://example.com/'), '_1': rdflib.URIRef('http://example.com/')}
625941b42eb69b55b151c677
def FSpecialGauss(size, sigma): <NEW_LINE> <INDENT> radius = size // 2 <NEW_LINE> offset = 0.0 <NEW_LINE> start, stop = -radius, radius + 1 <NEW_LINE> if size % 2 == 0: <NEW_LINE> <INDENT> offset = 0.5 <NEW_LINE> stop -= 1 <NEW_LINE> <DEDENT> x, y = np.mgrid[offset + start:stop, offset + start:stop] <NEW_LINE> assert len(x) == size <NEW_LINE> g = np.exp(-((x**2 + y**2) / (2.0 * sigma**2))) <NEW_LINE> return g / g.sum()
Function to mimic the 'fspecial' gaussian MATLAB function.
625941b4a17c0f6771cbde22
def calcKernel(self): <NEW_LINE> <INDENT> k = [[0 for i in range(self.m)] for j in range(self.m)] <NEW_LINE> for i in range(self.m): <NEW_LINE> <INDENT> if i % 100 == 0: <NEW_LINE> <INDENT> print('construct the kernel:', i, self.m) <NEW_LINE> <DEDENT> X = self.trainDataMat[i, :] <NEW_LINE> for j in range(i, self.m): <NEW_LINE> <INDENT> Z = self.trainDataMat[j, :] <NEW_LINE> result = (X - Z) * (X - Z).T <NEW_LINE> result = np.exp(-1 * result / (2 * self.sigma**2)) <NEW_LINE> k[i][j] = result <NEW_LINE> k[j][i] = result <NEW_LINE> <DEDENT> <DEDENT> return k
计算核函数 使用的是高斯核 详见“7.3.3 常用核函数” 式7.90 :return: 高斯核矩阵
625941b40a50d4780f666c5d
def test_timeout(self): <NEW_LINE> <INDENT> node = self.network.new_node('T') <NEW_LINE> cb = mock.Mock(side_effect=lambda: self.kill(node)) <NEW_LINE> self.network.set_timer(node.address, 0.01, cb) <NEW_LINE> self.network.run() <NEW_LINE> self.failUnless(cb.called)
Node's timeouts trigger at the appropriate time
625941b423849d37ff7b2e60
def modInv (a, n): <NEW_LINE> <INDENT> res = eucl(a, n)[0] <NEW_LINE> if res < 0: <NEW_LINE> <INDENT> res = a * modPow(res, 2, n) <NEW_LINE> <DEDENT> return res
Inverse modulaire (n'existe que si a et n premiers entre eux).
625941b4656771135c3eb640
def polarU(A: tn.Tensor, pivot_axis: int = -1, mode: Text = "svd", Niter: int = 4) -> tn.Tensor: <NEW_LINE> <INDENT> A_mat = tn.pivot(A, pivot_axis=pivot_axis) <NEW_LINE> if mode == "svd": <NEW_LINE> <INDENT> W, _, Vh, _ = A.backend.svd(A_mat.array) <NEW_LINE> Umat_arr = W@Vh <NEW_LINE> <DEDENT> elif mode == "QDWH": <NEW_LINE> <INDENT> Umat_arr = polarU_QDWH(A_mat, Niter=Niter) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError(f"Mode {mode} was invalid.") <NEW_LINE> <DEDENT> U_mat = tn.Tensor(Umat_arr, backend=A.backend) <NEW_LINE> U = U_mat.reshape(A.shape) <NEW_LINE> return U
Computes the isometric factor U in the polar decomposition, U = u @ vh where u and vh are the singular vector matrices in the SVD. Args: A: The input tensor. pivot_axis: Determines where to matricize A. mode: Algorithm used for the decomposition. See vumps_params. Niter: Maximum number of iteration allotted the QDWH algorithm. Returns: U: The decomposed tensor, reshaped to A.shape.
625941b485dfad0860c3ac26
def get_edi_version(): <NEW_LINE> <INDENT> project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), "../..")) <NEW_LINE> git_dir = os.path.join(project_root, ".git") <NEW_LINE> if os.path.isdir(git_dir): <NEW_LINE> <INDENT> from setuptools_scm import get_version <NEW_LINE> return get_version(root=project_root) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return pkg_resources.get_distribution('edi').version <NEW_LINE> <DEDENT> except pkg_resources.DistributionNotFound: <NEW_LINE> <INDENT> return edi_fallback_version
Get the version of the current edi installation or the version derived from git. :return: full edi version string
625941b4be8e80087fb20a1d
def test_parser_failed_command(self): <NEW_LINE> <INDENT> def parsing_complete(command_failure): <NEW_LINE> <INDENT> response_dict = command_failure.value.results['response'] <NEW_LINE> self.assertEqual(response_dict['status'], 'error', 'The parser did not return an error response.') <NEW_LINE> self.assertTrue('submitted_command' in response_dict['result']) <NEW_LINE> self.assertEqual(response_dict['result']['submitted_command'], 'generate_error') <NEW_LINE> <DEDENT> test_deferred = self.command_parser.parse_command("{\"command\": \"generate_error\",\"destination\":\"test\"}", user_id="4") <NEW_LINE> test_deferred.addErrback(parsing_complete) <NEW_LINE> return test_deferred
This test verifies that the command parser correctly returns an error response when a command fails (i.e. raises an exception). In addition, it tests that the parser defaults to requiring an active user session if the command doesn't provide any meta-data. @note This test checks the "default to requiring an active session" condition successfully because the default mock session coordinator always returns 1 active mock session for any user ID.
625941b4aad79263cf390808
def __getitem__(self, k): <NEW_LINE> <INDENT> if self._data is not None: <NEW_LINE> <INDENT> return self._data[k] <NEW_LINE> <DEDENT> if isinstance(k, slice): <NEW_LINE> <INDENT> if k.start is not None: <NEW_LINE> <INDENT> start = int(k.start) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> start = None <NEW_LINE> <DEDENT> if k.stop is not None: <NEW_LINE> <INDENT> stop = int(k.stop) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> stop = None <NEW_LINE> <DEDENT> if k.step: <NEW_LINE> <INDENT> raise TypeError("Stepping not supported") <NEW_LINE> <DEDENT> self.queryset.query.set_limits(start, stop) <NEW_LINE> return self.execute() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.queryset.query.set_limits(k, k+1) <NEW_LINE> return self.execute()
Support list index and slicing, similar to Django QuerySet.
625941b4d164cc6175782b1b
def get_widget_label(self, item_id): <NEW_LINE> <INDENT> return self._pattern.format(item_id + self._offset)
Get widget identifier for user input description. It should be something similar to the pattern. :param item_id: Position of the widget in the list. :type item_id: int starts from 0.
625941b4dd821e528d63af7a
def _load_items(self, splits): <NEW_LINE> <INDENT> ids = [] <NEW_LINE> for year, name in splits: <NEW_LINE> <INDENT> root = os.path.join(self._root, 'BDD' + str(year)) <NEW_LINE> lf = os.path.join(root, 'ImageSets', 'Main', name + '.txt') <NEW_LINE> with open(lf, 'r') as f: <NEW_LINE> <INDENT> ids += [(root, line.strip()) for line in f.readlines()] <NEW_LINE> <DEDENT> <DEDENT> return ids
Load individual image indices from splits.
625941b46aa9bd52df036b70
@responses.activate <NEW_LINE> def test_async_raise_for_status(): <NEW_LINE> <INDENT> responses.add(responses.GET, 'http://baseurl.com/v1/thing/777', body='{"error": "Not found."}', status=404, content_type='application/json') <NEW_LINE> future = DummyService.async_thing_detail(id=777) <NEW_LINE> with pytest.raises(requests.exceptions.HTTPError): <NEW_LINE> <INDENT> future.result()
Test that we raise a `requests.HTTPError` for 40x/50x response status
625941b4f548e778e58cd349
def test_letters_undo(self): <NEW_LINE> <INDENT> self.assertEqual(pycg.undo("bob"), "b") <NEW_LINE> self.assertEqual(pycg.undo("Bob"), "B")
Very simple tests
625941b42eb69b55b151c678
def set_value_unit(self, value, original_unit, fmt=None): <NEW_LINE> <INDENT> base_unit = self.postbox.get_value() <NEW_LINE> self.textbox.set_value(value*self.postbox.get_factor(original_unit, base_unit), fmt)
Set the value to the base unit. :param value: :param original_unit: :param fmt: :return:
625941b48e7ae83300e4ad9a
def read_references(audio_path, annotator_id=0): <NEW_LINE> <INDENT> ds_path = os.path.dirname(os.path.dirname(audio_path)) <NEW_LINE> jam_path = os.path.join(ds_path, ds_config.references_dir, os.path.basename(audio_path)[:-4] + ds_config.references_ext) <NEW_LINE> jam = jams.load(jam_path, validate=False) <NEW_LINE> ann = jam.search(namespace='segment_.*')[annotator_id] <NEW_LINE> ref_inters, ref_labels = ann.to_interval_values() <NEW_LINE> ref_times = utils.intervals_to_times(ref_inters) <NEW_LINE> return ref_times, ref_labels
Reads the boundary times and the labels. Parameters ---------- audio_path : str Path to the audio file Returns ------- ref_times : list List of boundary times ref_labels : list List of labels Raises ------ IOError: if `audio_path` doesn't exist.
625941b4aad79263cf390809
def cast(*args): <NEW_LINE> <INDENT> return _itkBinaryImageToStatisticsLabelMapFilterPython.itkBinaryImageToStatisticsLabelMapFilterIUL3IUC3LM3_cast(*args)
cast(itkLightObject obj) -> itkBinaryImageToStatisticsLabelMapFilterIUL3IUC3LM3
625941b445492302aab5e08e
def test_similarity_none_attributes(self): <NEW_LINE> <INDENT> self.obj1.item1.similarity.return_value = Similarity(1.0) <NEW_LINE> self.obj1.item2.similarity.return_value = Similarity(1.0) <NEW_LINE> self.obj1.item2 = None <NEW_LINE> self.obj2.item2 = None <NEW_LINE> similarity = (self.obj1 % self.obj2) <NEW_LINE> self.assertTrue(similarity) <NEW_LINE> self.assertEqual(1.0, similarity)
Verify two empty attributes are not included in similarity.
625941b491af0d3eaac9b7e1
def loop_params(soup): <NEW_LINE> <INDENT> counter_div = soup.find('div', attrs={'class': 'page-nave__desktop'}) <NEW_LINE> counter_page = int(str(counter_div.find_all('a')[-1]).split('>')[1].split('<')[0]) <NEW_LINE> return counter_page
bepaal het aantal pagina's die doorlopen moeten worden # zoek de div met de pagina counters # zoek de laatste waarde hierin (laatste pagina) # verwijder anchor tekst zodat het getal overblijft !!! let op: gaat fout bij 1 pagina
625941b450485f2cf553cb67
def get_entities(self, location, position=None): <NEW_LINE> <INDENT> return self._entities_positions[location.level_id][position or location.position]
Get entitities on given Location.
625941b4b7558d58953c4cea
def Dec2Angle(Dec): <NEW_LINE> <INDENT> import astropy.units as u <NEW_LINE> if type(Dec) is not list: <NEW_LINE> <INDENT> Dec = [Dec] <NEW_LINE> <DEDENT> if type(Dec[0]) is str: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> DecAngle = Angle(Dec, unit=u.deg) <NEW_LINE> <DEDENT> except KeyboardInterrupt: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> DecSex = [decstr.replace('.', ':', 2) for decstr in Dec] <NEW_LINE> DecAngle = Angle(DecSex, unit=u.deg) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> raise ValueError('Dec not understood (must be string in ' 'makesourcedb format or float in degrees): {0}'.format(e.message)) <NEW_LINE> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> raise ValueError('Dec not understood (must be string in ' 'makesourcedb format or float in degrees): {0}'.format(e.message)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> DecAngle = Angle(Dec, unit=u.deg) <NEW_LINE> <DEDENT> return DecAngle
Returns Angle objects for input Dec values. Parameters ---------- Dec : str, float or list of str, float Values of Dec to convert. Can be strings in makesourcedb format or floats in degrees Returns ------- DecAngle : astropy.coordinates.Angle object
625941b4004d5f362079a106
def get_by_itag(self, itag: int) -> Optional[Stream]: <NEW_LINE> <INDENT> return self.itag_index.get(int(itag))
Get the corresponding :class:`Stream <Stream>` for a given itag. :param int itag: YouTube format identifier code. :rtype: :class:`Stream <Stream>` or None :returns: The :class:`Stream <Stream>` matching the given itag or None if not found.
625941b4498bea3a759b9880
def ban_owner(self, request, queryset): <NEW_LINE> <INDENT> total = 0 <NEW_LINE> for project in queryset: <NEW_LINE> <INDENT> if project.users.count() == 1: <NEW_LINE> <INDENT> count = (UserProfile.objects .filter(user__projects=project) .update(banned=True)) <NEW_LINE> total += count <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> messages.add_message(request, messages.ERROR, 'Project has multiple owners: {0}'.format(project)) <NEW_LINE> <DEDENT> <DEDENT> if total == 0: <NEW_LINE> <INDENT> messages.add_message(request, messages.ERROR, 'No users banned') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> messages.add_message(request, messages.INFO, 'Banned {0} user(s)'.format(total))
Ban project owner This will only ban single owners, because a malicious user could add a user as a co-owner of the project. We don't want to induce and collatoral damage when flagging users.
625941b4e5267d203edcda70
@pytest.mark.parametrize('string, expected_membership', [ ('0', True), ('1', True), ('9', True), ('a', False), ('A', False), ('$', False), ]) <NEW_LINE> def test_is_digit(string: str, expected_membership: bool) -> None: <NEW_LINE> <INDENT> membership: bool = is_digit(string) <NEW_LINE> assert membership == expected_membership
Test seligimus.characters.digits.is_digit.
625941b42c8b7c6e89b35599
def ddpg(n_episodes=1000, print_every=50): <NEW_LINE> <INDENT> scores = [] <NEW_LINE> scores_deque = deque(maxlen=print_every) <NEW_LINE> log = open("log.txt","w+") <NEW_LINE> for i_episode in range(1, n_episodes+1): <NEW_LINE> <INDENT> env_info = env.reset(train_mode=True)[BRAIN_NAME] <NEW_LINE> agent.reset() <NEW_LINE> state = env_info.vector_observations <NEW_LINE> score = np.zeros(NUM_AGENTS)[:, None] <NEW_LINE> while True: <NEW_LINE> <INDENT> action = agent.act(state) <NEW_LINE> env_info = env.step(action)[BRAIN_NAME] <NEW_LINE> next_state = env_info.vector_observations <NEW_LINE> reward = np.array(env_info.rewards)[:, None] <NEW_LINE> done = np.array(env_info.local_done )[:, None] <NEW_LINE> agent.step(state, action, reward, next_state, done) <NEW_LINE> score += reward <NEW_LINE> state = next_state <NEW_LINE> if np.any(done): <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> scores_deque.append(score.mean()) <NEW_LINE> scores.append(score.mean()) <NEW_LINE> print('\rEpisode {}\tAverage Score: {:.2f}'.format(i_episode, np.mean(scores_deque)), end="") <NEW_LINE> if i_episode % print_every == 0: <NEW_LINE> <INDENT> string ='\rEpisode {}\tAverage Score: {:.2f}'.format(i_episode, np.mean(scores_deque)) <NEW_LINE> print(string) <NEW_LINE> log.write(string) <NEW_LINE> torch.save(agent.actor_local.state_dict(), 'checkpoint_actor.pth') <NEW_LINE> torch.save(agent.critic_local.state_dict(), 'checkpoint_critic.pth') <NEW_LINE> <DEDENT> if np.mean(scores_deque)>=30.0 and i_episode>=100: <NEW_LINE> <INDENT> print('\nEnvironment solved in {:d} episodes!\tAverage Score: {:.2f}'.format(i_episode, np.mean(scores_deque))) <NEW_LINE> torch.save(agent.actor_local.state_dict(), 'checkpoint_actor.pth') <NEW_LINE> torch.save(agent.critic_local.state_dict(), 'checkpoint_critic.pth') <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> log.close() <NEW_LINE> return scores
DDQN Algorithm. Params ====== n_episodes (int): maximum number of training episodes print_every (int): frequency of printing and loging information throughout iteration
625941b45e10d32532c5ecfe
def load_observed_ts(self,site,fn=None,location=None): <NEW_LINE> <INDENT> import pandas as pd <NEW_LINE> if fn is None: <NEW_LINE> <INDENT> if location is None: <NEW_LINE> <INDENT> location = OBSERVED_DIRECTORY <NEW_LINE> <DEDENT> import os <NEW_LINE> from glob import glob <NEW_LINE> matching_files = glob(os.path.join(location,"*%s*.csv"%site)) <NEW_LINE> fn = matching_files[0] <NEW_LINE> <DEDENT> result = pd.read_csv(fn,index_col=0,parse_dates=True,dayfirst=True) <NEW_LINE> self._apply_time_series_helpers(result) <NEW_LINE> return result
Load a time series file of observed loads for a given site.
625941b4f7d966606f6a9dd7
def byte_adaptor(fbuffer): <NEW_LINE> <INDENT> if six.PY3: <NEW_LINE> <INDENT> strings = fbuffer.read().decode('utf-8') <NEW_LINE> fbuffer = six.StringIO(strings) <NEW_LINE> return fbuffer <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return fbuffer
provides py3 compatibility by converting byte based file stream to string based file stream Arguments: fbuffer: file like objects containing bytes Returns: string buffer
625941b423849d37ff7b2e61
@test_context <NEW_LINE> def test_check_pending_upload_not_expired(app): <NEW_LINE> <INDENT> with app.app_context(), set_time(): <NEW_LINE> <INDENT> expires = time.now() + timedelta(seconds=10) <NEW_LINE> pu_row, file_row = add_pending_upload_and_file_row( len(DATA), DATA_DIGEST, expires, 'us-west-2') <NEW_LINE> session = app.db.session(tables.DB_DECLARATIVE_BASE) <NEW_LINE> grooming.check_pending_upload(session, pu_row) <NEW_LINE> session.commit() <NEW_LINE> eq_(len(tables.PendingUpload.query.all()), 1)
check_pending_upload doesn't check anything if the URL isn't expired yet
625941b4f8510a7c17cf94d4
def make_tomato(size): <NEW_LINE> <INDENT> np.random.seed(0) <NEW_LINE> x = np.random.normal(size=size) <NEW_LINE> tomato_price = 5 + x <NEW_LINE> ketchup_price = (2 * np.sin(2 * x) + x + np.random.normal(scale=0.5, size=size)) / 2 + 3 <NEW_LINE> return pd.DataFrame({ 'tomato_price': tomato_price, 'ketchup_price': ketchup_price })
Simulated regression example.
625941b45fdd1c0f98dc0000
def test_get_state(self): <NEW_LINE> <INDENT> c = initialise_cell() <NEW_LINE> s = c.get_state() <NEW_LINE> assert s <NEW_LINE> assert isinstance(s, Dead)
This method tests the ability of the GoLCell to retrieve its current state. The expected result of this test is for the cell's current state to be correctly retrieved.
625941b466656f66f7cbbf79
def _region_to_opj_arg(self, region_param): <NEW_LINE> <INDENT> arg = None <NEW_LINE> if region_param.mode != 'full': <NEW_LINE> <INDENT> x0 = region_param.pixel_x <NEW_LINE> y0 = region_param.pixel_y <NEW_LINE> x1 = region_param.pixel_x + region_param.pixel_w <NEW_LINE> y1 = region_param.pixel_y + region_param.pixel_h <NEW_LINE> arg = ','.join(map(str, (x0, y0, x1, y1))) <NEW_LINE> <DEDENT> logger.debug('opj region parameter: %s' % (arg,)) <NEW_LINE> return arg
Args: region_param (params.RegionParam) Returns (str): e.g. 'x0,y0,x1,y1'
625941b421bff66bcd684724
@blueprint.route("/post_file", methods=["POST"]) <NEW_LINE> def post_file(): <NEW_LINE> <INDENT> if not api_key_is_correct(): <NEW_LINE> <INDENT> abort(403) <NEW_LINE> <DEDENT> posted_data = request.form["fileinfo"] <NEW_LINE> posted_file = request.files["datafile"].read() <NEW_LINE> file_info = parse_post_data(posted_data, "file") <NEW_LINE> if not file_info: <NEW_LINE> <INDENT> return abort(400) <NEW_LINE> <DEDENT> file = load_file_attachment(file_info, posted_file) <NEW_LINE> if file: <NEW_LINE> <INDENT> return "Файл добавлен на сервер" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> abort(404)
Загрузить файл к бизнес-процессу
625941b467a9b606de4a7c8c
def files_exist(path, files): <NEW_LINE> <INDENT> return all(os.path.exists(os.path.join(path, f)) for f in files)
Return True if all files exist in specified path.
625941b4fff4ab517eb2f207
def make_bootstrap_pom(): <NEW_LINE> <INDENT> tf = NamedTemporaryFile(delete=False) <NEW_LINE> pom = BootstrapPOM() <NEW_LINE> pom.write(tf.name) <NEW_LINE> return tf
Creates a Bootstrap POM and returns the temporary file it has been written to. :returns: temporary file :rtype: :class:`NamedTemporaryFile`
625941b4462c4b4f79d1d49e
def mkdir_f(path): <NEW_LINE> <INDENT> if (not os.path.isdir(path)): <NEW_LINE> <INDENT> os.mkdir(path)
Ensure that directory path exists. That is, if path already exists and is a directory, do nothing; otherwise, try to create directory path (and raise appropriate OSError if that doesn't work.)
625941b45fc7496912cc3754
def p_annotation_type_member_declarations_opt2(self, p): <NEW_LINE> <INDENT> p[0] = []
annotation_type_member_declarations_opt : empty
625941b4090684286d50eab6
def cleanup_ovs_ports(netdst, ports): <NEW_LINE> <INDENT> host_bridge = utils_net.find_bridge_manager(netdst) <NEW_LINE> if utils_net.ovs_br_exists(netdst) is True: <NEW_LINE> <INDENT> ports = set(host_bridge.list_ports(netdst)) - set(ports) <NEW_LINE> for p in ports: <NEW_LINE> <INDENT> utils_net.find_bridge_manager(netdst).del_port(netdst, p)
Clean up created ovs ports in this case :param netdst: netdst get from command line :param ports: existing ports need to be remain before this test
625941b4d58c6744b4257a30
def Mtp(self, *args): <NEW_LINE> <INDENT> return _DataModel.Tensor_Mtp(self, *args)
Mtp(Tensor self) -> RealQuantity Mtp(Tensor self) -> RealQuantity
625941b46fb2d068a760ee70
def generate_SOM(self,initial_radius,number_of_iterations,initial_learning_rate): <NEW_LINE> <INDENT> k_layer = self.Kohonen_Layer(self.x_size,self.y_size,self.data) <NEW_LINE> data_length = np.arange(0,self.data.shape[0]-1,1) <NEW_LINE> bmu_list = [] <NEW_LINE> cols_list = [] <NEW_LINE> rows_list = [] <NEW_LINE> for i in range(int(number_of_iterations)): <NEW_LINE> <INDENT> random.shuffle(data_length) <NEW_LINE> for j in range(len(data_length)): <NEW_LINE> <INDENT> data_array = self.data[data_length[j]] <NEW_LINE> lambda_value = self.lambda_function(initial_radius,number_of_iterations) <NEW_LINE> radius = self.radial_decay_function(i,lambda_value,initial_radius) <NEW_LINE> BMU = self.BMU_node(k_layer,data_array) <NEW_LINE> bmu_list.append(BMU) <NEW_LINE> indices = np.indices((k_layer.shape[0],k_layer.shape[1])).reshape(2, -1).T <NEW_LINE> nodes_in_r = indices[np.where(np.linalg.norm(BMU-indices,axis=1)<radius)[0]] <NEW_LINE> node_dists = np.linalg.norm(nodes_in_r-BMU,axis=1) <NEW_LINE> rows = nodes_in_r[:,0] <NEW_LINE> cols = nodes_in_r[:,1] <NEW_LINE> cols_list.append(cols) <NEW_LINE> rows_list.append(rows) <NEW_LINE> node_values = k_layer[rows,cols] <NEW_LINE> theta = self.node_influence(node_dists,radius) <NEW_LINE> learn_rate = self.learning_rate(initial_learning_rate,i,lambda_value) <NEW_LINE> k_layer = self.weight_update(k_layer, theta, learn_rate, data_array, node_values, rows, cols) <NEW_LINE> <DEDENT> <DEDENT> return k_layer
A forward process which loops over training data and a specified number of timesteps, updating weights and training the map. :param initial_radius: (int or float) initial radius around the BMU within which, nodes are updated based on the input vector and proximity to the BMU :param number_of_iterations: (int) The number of timesteps over which to train the map :param initial_learning_rate: (float) initial value for the hyperparameter of weight update influence on BMU neighbouring nodes :return: (array) 2-dimensional kohonen layer
625941b4004d5f362079a107
def set_waveform(self, channel=1, function="SIN"): <NEW_LINE> <INDENT> self.check_ch(channel) <NEW_LINE> if channel == 1: <NEW_LINE> <INDENT> set_waveform = self.__write('FUNC {:s}'.format(function)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> set_waveform = self.__write('FUNC:CH2 {:s}'.format(function))
set desired waveform (functions available: SINusoid, SQUare, RAMP, PULSe, NOISe, DC, USER)
625941b496565a6dacc8f4a4
def query(sql, *par): <NEW_LINE> <INDENT> conn, cursor = connection() <NEW_LINE> cursor.execute(sql, par) <NEW_LINE> res = cursor.fetchall() <NEW_LINE> close_connection(conn, cursor) <NEW_LINE> return res
:param sql: sql查询语句 :param par: 占位符参数 :return: sql查询结果,tuple
625941b4c4546d3d9de72807
def test_split_children_type(self): <NEW_LINE> <INDENT> response = self.client.post( self.split_endpoint.format(pk=self.signal_no_image.id), [ { 'text': 'Child #1 Request', 'category': {'sub_category': self.link_subcategory}, 'type': {'code': 'REQ'} }, { 'text': 'Child #2 Maintenance', 'category': {'sub_category': self.link_subcategory}, 'type': {'code': 'MAI'} } ], format='json' ) <NEW_LINE> self.assertEqual(response.status_code, 201) <NEW_LINE> split_json = response.json() <NEW_LINE> split_first_child = split_json['children'][0] <NEW_LINE> response = self.client.get(self.detail_endpoint.format(pk=split_first_child['id'])) <NEW_LINE> split_first_child_json = response.json() <NEW_LINE> self.assertEqual(split_first_child_json['type']['code'], 'REQ') <NEW_LINE> split_second_child = split_json['children'][1] <NEW_LINE> response = self.client.get(self.detail_endpoint.format(pk=split_second_child['id'])) <NEW_LINE> split_second_child_json = response.json() <NEW_LINE> self.assertEqual(split_second_child_json['type']['code'], 'MAI')
When a signal is split its children must inherit certain properties.
625941b42ae34c7f2600cf01
def step(self, session, encoder_inputs, decoder_inputs, target_weights, bucket_id, forward_only): <NEW_LINE> <INDENT> encoder_size, decoder_size = self.buckets[bucket_id] <NEW_LINE> if len(encoder_inputs) != encoder_size: <NEW_LINE> <INDENT> raise ValueError("Encoder length must be equal to the one in bucket," " %d != %d." % (len(encoder_inputs), encoder_size)) <NEW_LINE> <DEDENT> if len(decoder_inputs) != decoder_size: <NEW_LINE> <INDENT> raise ValueError("Decoder length must be equal to the one in bucket," " %d != %d." % (len(decoder_inputs), decoder_size)) <NEW_LINE> <DEDENT> if len(target_weights) != decoder_size: <NEW_LINE> <INDENT> raise ValueError("Weights length must be equal to the one in bucket," " %d != %d." % (len(target_weights), decoder_size)) <NEW_LINE> <DEDENT> input_feed = {} <NEW_LINE> for l in range(encoder_size): <NEW_LINE> <INDENT> input_feed[self.encoder_inputs[l].name] = encoder_inputs[l] <NEW_LINE> <DEDENT> for l in range(decoder_size): <NEW_LINE> <INDENT> input_feed[self.decoder_inputs[l].name] = decoder_inputs[l] <NEW_LINE> input_feed[self.target_weights[l].name] = target_weights[l] <NEW_LINE> <DEDENT> last_target = self.decoder_inputs[decoder_size].name <NEW_LINE> input_feed[last_target] = np.zeros([self.batch_size], dtype='float') <NEW_LINE> if not forward_only: <NEW_LINE> <INDENT> output_feed = [self.updates[bucket_id], self.gradient_norms[bucket_id], self.losses[bucket_id]] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> output_feed = [self.losses[bucket_id]] <NEW_LINE> for l in range(decoder_size): <NEW_LINE> <INDENT> output_feed.append(self.outputs[bucket_id][l]) <NEW_LINE> <DEDENT> <DEDENT> outputs = session.run(output_feed, input_feed) <NEW_LINE> if not forward_only: <NEW_LINE> <INDENT> return outputs[1], outputs[2], None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None, outputs[0], outputs[1:]
Run a step of the model feeding the given inputs. Args: session: tensorflow session to use. encoder_inputs: list of numpy int vectors to feed as encoder inputs. decoder_inputs: list of numpy int vectors to feed as decoder inputs. target_weights: list of numpy float vectors to feed as target weights. bucket_id: which bucket of the model to use. forward_only: whether to do the backward step or only forward. Returns: A triple consisting of gradient norm (or None if we did not do backward), average perplexity, and the outputs. Raises: ValueError: if length of encoder_inputs, decoder_inputs, or target_weights disagrees with bucket size for the specified bucket_id.
625941b491af0d3eaac9b7e2
def reset(self): <NEW_LINE> <INDENT> self.optimized = None <NEW_LINE> self.timestamp = None <NEW_LINE> self.config_dir = None <NEW_LINE> self.output_dir = None <NEW_LINE> self.pipeline_config_path = None <NEW_LINE> self.pipeline_status_path = None <NEW_LINE> self.pipeline_logfile_path = None <NEW_LINE> self.execution_order = [] <NEW_LINE> self.module_inputs = defaultdict(dict) <NEW_LINE> self.module_outputs = defaultdict(dict) <NEW_LINE> self.module_parameters = defaultdict(dict) <NEW_LINE> self.pipeline_outputs = {}
Resets the builder so that another pipeline can be built.
625941b4e8904600ed9f1cf8
def online_frame_generator(): <NEW_LINE> <INDENT> from madmom.audio.signal import Stream <NEW_LINE> hop_size = int(SAMPLE_RATE / FPS) <NEW_LINE> stream = Stream(sample_rate=SAMPLE_RATE, num_channels=1, frame_size=FRAME_SIZE, hop_size=hop_size, queue_size=1) <NEW_LINE> for frame in stream: <NEW_LINE> <INDENT> yield frame
generate signal frames from mic
625941b4cad5886f8bd26db1
def segment_combined_simple(self): <NEW_LINE> <INDENT> for line in self.corp: <NEW_LINE> <INDENT> if self.debug: <NEW_LINE> <INDENT> print(line) <NEW_LINE> <DEDENT> for word in line: <NEW_LINE> <INDENT> if word[1] == "S": <NEW_LINE> <INDENT> v = Verb(word[0]) <NEW_LINE> print("Word to segment:", word[0]) <NEW_LINE> m = 1000000 <NEW_LINE> path = [] <NEW_LINE> for v in v.paths: <NEW_LINE> <INDENT> if len(v) < m: <NEW_LINE> <INDENT> m=len(v) <NEW_LINE> path = v <NEW_LINE> <DEDENT> <DEDENT> if len(path) != 0: <NEW_LINE> <INDENT> word[0] = [m[1] for m in path] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> path = self.word_morph(word[0]) <NEW_LINE> word[0] = path
Method that use Simple WixNLP
625941b4099cdd3c635f0a2c
def enumerate(vendor_id=0, product_id=0): <NEW_LINE> <INDENT> r = _hidapi.hid_enumerate(vendor_id, product_id) <NEW_LINE> if r is None: <NEW_LINE> <INDENT> raise HIDException('Failed to enumerate devices') <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> d = r <NEW_LINE> while d: <NEW_LINE> <INDENT> yield DeviceInformation(d.contents) <NEW_LINE> d = d.contents.next <NEW_LINE> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> _hidapi.hid_free_enumeration(r)
Enumerate the HID Devices. This function returns an iterator that iterates over all the HID devices attached to the system which match vendor_id and product_id. If vendor_id and product_id are both set to 0, then all HID devices will be returned.
625941b45166f23b2e1a4f28
def get_next_term(self): <NEW_LINE> <INDENT> self.termpos += 1 <NEW_LINE> while self.termpos >= len(self.document_term): <NEW_LINE> <INDENT> self.next_doc_and_term() <NEW_LINE> self.termpos += 1 <NEW_LINE> if self.document_term is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> return self.document_term[self.termpos]
用来判断一篇文档的词是否读完
625941b4711fe17d8254214b
def load_widgets(): <NEW_LINE> <INDENT> core_buttons = plugins.load_widgets_from_dir(config.widgets_path, default_section=_('Core widgets')) <NEW_LINE> local_buttons = plugins.load_widgets_from_dir(config.preferences.local_widget_path, default_section=_('Custom widgets')) <NEW_LINE> for path in [config.widgets_path, config.preferences.local_widget_path]: <NEW_LINE> <INDENT> for lang in ['perl', 'lisp']: <NEW_LINE> <INDENT> if lang not in code_writers: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> codegen_name = '%s_codegen' % code_writers[lang].lang_prefix <NEW_LINE> plugins.load_widgets_from_dir( path, submodule=codegen_name ) <NEW_LINE> <DEDENT> <DEDENT> all_widgets = OrderedDict() <NEW_LINE> all_widgets.update(core_buttons) <NEW_LINE> for section in local_buttons: <NEW_LINE> <INDENT> if section not in all_widgets: <NEW_LINE> <INDENT> all_widgets[section] = local_buttons[section] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> all_widgets[section].extend(local_buttons[section]) <NEW_LINE> <DEDENT> <DEDENT> return all_widgets
Load built-in and user widgets. Scans the built-in and user widget directories to find the installed widgets and loads it. @rtype: OrderedDict @see: L{plugins.load_widgets_from_dir()} for more details e.g. the structure of the dictionary.
625941b47b25080760e3922b
def mist_update_psk(mist_url,headers,site_id,key_name,password): <NEW_LINE> <INDENT> return_text = '' <NEW_LINE> api_url = '{}sites/{}/psks?name={}'.format(mist_url,site_id,key_name) <NEW_LINE> response = requests.get(api_url, headers=headers) <NEW_LINE> site_name = get_site_name(mist_url,headers,site_id) <NEW_LINE> if response.status_code == 200: <NEW_LINE> <INDENT> psk = json.loads(response.content.decode('utf-8')) <NEW_LINE> ssid=psk[0]['ssid'] <NEW_LINE> vlan_id = psk[0]['vlan_id'] <NEW_LINE> mist_add_psk(mist_url,headers,site_id,key_name,ssid,vlan_id,password) <NEW_LINE> return_text = 'Updated key {} with new password'.format(key_name) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return_text = 'Could not find PSK: {}'.format(key_name) <NEW_LINE> <DEDENT> return return_text
Function for showing PSK to Mist site
625941b4baa26c4b54cb0ef3
@require_GET <NEW_LINE> @require_certificate_permission <NEW_LINE> def search_certificates(request): <NEW_LINE> <INDENT> user_filter = request.GET.get("user", "") <NEW_LINE> if not user_filter: <NEW_LINE> <INDENT> msg = _("user is not given.") <NEW_LINE> return HttpResponseBadRequest(msg) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> user = User.objects.get(Q(email=user_filter) | Q(username=user_filter)) <NEW_LINE> <DEDENT> except User.DoesNotExist: <NEW_LINE> <INDENT> return HttpResponseBadRequest(_("user '{user}' does not exist").format(user=user_filter)) <NEW_LINE> <DEDENT> certificates = api.get_certificates_for_user(user.username) <NEW_LINE> for cert in certificates: <NEW_LINE> <INDENT> cert["course_key"] = unicode(cert["course_key"]) <NEW_LINE> cert["created"] = cert["created"].isoformat() <NEW_LINE> cert["modified"] = cert["modified"].isoformat() <NEW_LINE> cert["regenerate"] = True <NEW_LINE> <DEDENT> course_id = urllib.quote_plus(request.GET.get("course_id", ""), safe=':/') <NEW_LINE> if course_id: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> course_key = CourseKey.from_string(course_id) <NEW_LINE> <DEDENT> except InvalidKeyError: <NEW_LINE> <INDENT> return HttpResponseBadRequest(_("Course id '{course_id}' is not valid").format(course_id=course_id)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if CourseOverview.get_from_id(course_key): <NEW_LINE> <INDENT> certificates = [certificate for certificate in certificates if certificate['course_key'] == course_id] <NEW_LINE> if not certificates: <NEW_LINE> <INDENT> return JsonResponse([{'username': user.username, 'course_key': course_id, 'regenerate': False}]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except CourseOverview.DoesNotExist: <NEW_LINE> <INDENT> msg = _("The course does not exist against the given key '{course_key}'").format(course_key=course_key) <NEW_LINE> return HttpResponseBadRequest(msg) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return JsonResponse(certificates)
Search for certificates for a particular user OR along with the given course. Supports search by either username or email address along with course id. First filter the records for the given username/email and then filter against the given course id (if given). Show the 'Regenerate' button if a record found in 'generatedcertificate' model otherwise it will show the Generate button. Arguments: request (HttpRequest): The request object. Returns: JsonResponse Example Usage: GET /certificates/[email protected] GET /certificates/[email protected]&course_id=xyz Response: 200 OK Content-Type: application/json [ { "username": "bob", "course_key": "edX/DemoX/Demo_Course", "type": "verified", "status": "downloadable", "download_url": "http://www.example.com/cert.pdf", "grade": "0.98", "created": 2015-07-31T00:00:00Z, "modified": 2015-07-31T00:00:00Z } ]
625941b44e4d5625662d41ad
def _raise_exception(self): <NEW_LINE> <INDENT> exception_class = NSNitroExceptionsMap[self.errorcode] <NEW_LINE> raise exception_class(self.message, self.errorcode)
Raises exceptions, if it has a more specific mapping if not it raises NSNitroError
625941b4d99f1b3c44c6736f
def categorical_attribute_equal(attribute_name: str, attribute_value: str) -> typ.Callable[[graph.TemporalNode], bool]: <NEW_LINE> <INDENT> def __pred(n: graph.TemporalNode) -> bool: <NEW_LINE> <INDENT> return n.get_global_attribute(attribute_name) == attribute_value <NEW_LINE> <DEDENT> return __pred
Checks for equality of global attribute named attribute_name of a node with the provided attribute value. Attribute must be categorical and global.
625941b4fb3f5b602dac3466
def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, PostSendFailed): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__
Returns true if both objects are equal
625941b48a43f66fc4b53e3a
def createEditor(self, parent, option, index): <NEW_LINE> <INDENT> if index.column()==1: <NEW_LINE> <INDENT> dataType = str(index.sibling(index.row(), 2).data()) <NEW_LINE> if dataType=='int': <NEW_LINE> <INDENT> editor = QtGui.QLineEdit(parent) <NEW_LINE> editor.setValidator(QtGui.QIntValidator(parent)) <NEW_LINE> <DEDENT> elif dataType=='bool': <NEW_LINE> <INDENT> editor = QtGui.QComboBox(parent) <NEW_LINE> editor.addItem('True') <NEW_LINE> editor.addItem('False') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> editor = QtGui.QItemDelegate.createEditor(self, parent, option, index) <NEW_LINE> <DEDENT> return editor <NEW_LINE> <DEDENT> return None
createEditor(parent: QWidget, option: QStyleOptionViewItem, index: QModelIndex) -> QWidget Return the editing widget depending on columns
625941b46aa9bd52df036b71
def get_users_online(self, *args): <NEW_LINE> <INDENT> return self.session.query(User).join(UserOnline).all()
Method gets users online from db
625941b43cc13d1c6d3c7156
def BoundlessRaster(row, col, dataset): <NEW_LINE> <INDENT> filename = dataset.tilename(row=row, col=col, **kwargs) <NEW_LINE> if os.path.exists(filename): <NEW_LINE> <INDENT> raster, profile = PadRaster(row, col, dataset, padding=padding, **kwargs) <NEW_LINE> nodata = profile['nodata'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> filename = dataset.filename(**kwargs) <NEW_LINE> assert os.path.exists(filename) <NEW_LINE> with rio.open(filename) as ds: <NEW_LINE> <INDENT> i0, j0 = ds.index(tile.x0, tile.y0) <NEW_LINE> window = Window(j0 - padding, i0 - padding, width, height) <NEW_LINE> raster = ds.read(1, window=window, boundless=True, fill_value=ds.nodata) <NEW_LINE> nodata = ds.nodata <NEW_LINE> <DEDENT> <DEDENT> return raster, nodata
Read tile with some padding, handling the case if the tile does not exist.
625941b4a05bb46b383ec5fd
def make_credits(lines: str) -> list: <NEW_LINE> <INDENT> result = [] <NEW_LINE> for line in lines.splitlines(): <NEW_LINE> <INDENT> words = [] <NEW_LINE> username = None <NEW_LINE> for word in line.split(): <NEW_LINE> <INDENT> word = word.strip("()") <NEW_LINE> if word.startswith("@"): <NEW_LINE> <INDENT> username = word[1:] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> words.append(word) <NEW_LINE> <DEDENT> <DEDENT> if words and username: <NEW_LINE> <INDENT> result.append({"name": " ".join(words), "github_id": username}) <NEW_LINE> <DEDENT> elif words: <NEW_LINE> <INDENT> result.append({"name": " ".join(words)}) <NEW_LINE> <DEDENT> elif username: <NEW_LINE> <INDENT> result.append({"github_id": username}) <NEW_LINE> <DEDENT> <DEDENT> return result
Convert multiline credits into list Ex: @acmcgee MycroftAI (@MycroftAI) Tom's great songs
625941b40383005118ecf3b4
def login(self, account, pwd): <NEW_LINE> <INDENT> url = 'https://www.ketangpai.com/Home/User/login.html' <NEW_LINE> self.driver.get(url) <NEW_LINE> self.driver.find_element(*self.account_locator).send_keys(account) <NEW_LINE> self.driver.find_element(*self.pwd_locator).send_keys(pwd) <NEW_LINE> self.driver.find_element(*self.button_locator).click()
登录
625941b4e5267d203edcda71
@view_config(route_name='monitoring', request_method='GET', renderer='json') <NEW_LINE> def check_monitoring(request): <NEW_LINE> <INDENT> core_uri = request.registry.settings['core_uri'] <NEW_LINE> email = request.registry.settings.get('email','') <NEW_LINE> password = request.registry.settings.get('password','') <NEW_LINE> timestamp = datetime.utcnow().strftime("%s") <NEW_LINE> hash = sha256("%s:%s:%s" % (email, timestamp, password)).hexdigest() <NEW_LINE> payload = {'email': email, 'timestamp': timestamp, 'hash': hash, } <NEW_LINE> ret = requests.get(core_uri+request.path, params=payload, verify=False) <NEW_LINE> if ret.status_code == 200: <NEW_LINE> <INDENT> return ret.json() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return Response('Service unavailable', 503)
Ask the mist.io service if monitoring is enabled for this machine.
625941b430bbd722463cbb92
def delete(self, post_id): <NEW_LINE> <INDENT> if not is_login(): <NEW_LINE> <INDENT> raise LoginException(message='not logged in') <NEW_LINE> <DEDENT> like = Like.get_by_post_user(long(post_id), current_user_id()) <NEW_LINE> if like: <NEW_LINE> <INDENT> post = Post.get_by_id(long(post_id)) <NEW_LINE> post.likes = post.likes - 1 <NEW_LINE> post.put() <NEW_LINE> like.key.delete() <NEW_LINE> return jsonify(delete=True) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise OwnerException(message='not auth')
REST delete method Note: This also updates the corresponding Post by subtracting 1 to its like property Args: post_id: The post's id that will be unliked Returns: A JSON object with a simple deleted key value true Raises: LoginException: if user is not logged in OwnerException: if like was not found
625941b4a79ad161976cbf15
def delta_batch(source, window): <NEW_LINE> <INDENT> assert source.ndim == 3 <NEW_LINE> w = windowed_batch(source, window + 1) <NEW_LINE> return w[1:] - w[:-1]
:param theano.TensorVariable source: 3d tensor of shape (n_time, n_batch, n_dim) :param int|theano.Variable window: window size :return: tensor of shape (n_time, n_batch, window * n_dim) Similar as numpy.diff. Also called delta. TODO with conv op
625941b4be383301e01b5265
def show_slots(self): <NEW_LINE> <INDENT> slot_numbers = dict() <NEW_LINE> num = 1 <NEW_LINE> for i in self.slots: <NEW_LINE> <INDENT> if self.slots[i]: <NEW_LINE> <INDENT> print(num, i, self.slots[i].name, self.slots[i].armor_points) <NEW_LINE> slot_numbers[num] = self.slots[i].type <NEW_LINE> num += 1 <NEW_LINE> <DEDENT> <DEDENT> try: <NEW_LINE> <INDENT> choice = int(input()) <NEW_LINE> if slot_numbers[choice]: <NEW_LINE> <INDENT> self.items.add(self.slots[slot_numbers[choice]]) <NEW_LINE> self.slots[slot_numbers[choice]] = 0 <NEW_LINE> <DEDENT> <DEDENT> except (ValueError, KeyError): <NEW_LINE> <INDENT> pass
Посмотреть предметы в слотах. Выйти (пустой input) либо снять предмет и положить в инвентарь
625941b476d4e153a657e8ff
def bitnot(num): <NEW_LINE> <INDENT> numbits = 16 * 4 <NEW_LINE> return (1 << numbits) - 1 - num
unsigned bitwise not
625941b48da39b475bd64d46
def _find_reports(self, start_dir, pattern): <NEW_LINE> <INDENT> paths = os.listdir(start_dir) <NEW_LINE> for path in paths: <NEW_LINE> <INDENT> full_path = os.path.join(start_dir, path) <NEW_LINE> if os.path.isfile(full_path): <NEW_LINE> <INDENT> if not VALID_MODULE_NAME.match(path): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if not self._match_path(path, full_path, pattern): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> name = self._get_name_from_path(full_path) <NEW_LINE> try: <NEW_LINE> <INDENT> module = self._get_module_from_name(name) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> mod_file = os.path.abspath(getattr(module, '__file__', full_path)) <NEW_LINE> realpath = _splitext_first(mod_file) <NEW_LINE> fullpath_noext = _splitext_first(full_path) <NEW_LINE> if realpath.lower() != fullpath_noext.lower(): <NEW_LINE> <INDENT> module_dir = os.path.dirname(realpath) <NEW_LINE> mod_name = _splitext_first(os.path.basename(full_path)) <NEW_LINE> expected_dir = os.path.dirname(full_path) <NEW_LINE> msg = ("%r module incorrectly imported from %r. Expected %r. " "Is this module globally installed?") <NEW_LINE> raise ImportError(msg % (mod_name, module_dir, expected_dir)) <NEW_LINE> <DEDENT> for r in self.loadReportsFromModule(module): <NEW_LINE> <INDENT> yield r <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> elif os.path.isdir(full_path): <NEW_LINE> <INDENT> if not os.path.isfile(os.path.join(full_path, '__init__.py')): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> for report in self._find_reports(full_path, pattern): <NEW_LINE> <INDENT> yield report
Used by discovery. Yields reports it loads.
625941b426238365f5f0ec39
def create_solution(self, dstore=None, verbosity=0): <NEW_LINE> <INDENT> dstore = dstore or self.dstore <NEW_LINE> groups = self.variables['groups'].get_value(dstore=dstore) <NEW_LINE> ginsts = [self.groups[g] for g in groups] <NEW_LINE> s2gnodes = [] <NEW_LINE> for node in self.nodes: <NEW_LINE> <INDENT> gnodes = list(node.variables['gnodes'].get_value(dstore=dstore)) <NEW_LINE> s2gnodes.append(gnodes) <NEW_LINE> <DEDENT> tree_attribs = {} <NEW_LINE> for snindex, sg in enumerate(s2gnodes): <NEW_LINE> <INDENT> for gn in sg: <NEW_LINE> <INDENT> gnode = self.gnodes[gn] <NEW_LINE> gn_group = gnode.ginst.index <NEW_LINE> if gn_group not in tree_attribs: <NEW_LINE> <INDENT> tree_attribs[gn_group] = [[], []] <NEW_LINE> <DEDENT> tree_attribs[gn_group][0].append(snindex) <NEW_LINE> <DEDENT> if len(sg) == 2: <NEW_LINE> <INDENT> gn0, gn1 = self.gnodes[sg[0]], self.gnodes[sg[1]] <NEW_LINE> group0, group1 = gn0.ginst.index, gn1.ginst.index <NEW_LINE> if gn0.cat: <NEW_LINE> <INDENT> tree_attribs[group0][1].append(group1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> tree_attribs[group1][1].append(group0) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> for gindex, sn in tree_attribs.items(): <NEW_LINE> <INDENT> sn.append(set(sn[0])) <NEW_LINE> Sentence.make_tree(tree_attribs, gindex, sn[2]) <NEW_LINE> <DEDENT> trees = list(tree_attribs.items()) <NEW_LINE> trees.sort(key=lambda x: x[0]) <NEW_LINE> trees = [x[1][2] for x in trees] <NEW_LINE> solution = Solution(self, ginsts, s2gnodes, len(self.solutions), trees=trees) <NEW_LINE> self.solutions.append(solution) <NEW_LINE> return solution
Assuming essential variables are determined in a domain store, make a Solution object.
625941b4dd821e528d63af7b
def test_find_module(self): <NEW_LINE> <INDENT> im = self.importer <NEW_LINE> for i in test_modules: <NEW_LINE> <INDENT> assert(im.find_module(i)[0] == root)
Test find root functionallity.
625941b4a17c0f6771cbde24
def detectCycle(self, head): <NEW_LINE> <INDENT> store = set() <NEW_LINE> while head: <NEW_LINE> <INDENT> if head in store: <NEW_LINE> <INDENT> return head <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> store.add(head) <NEW_LINE> <DEDENT> head = head.next <NEW_LINE> <DEDENT> return None
:type head: ListNode :rtype: ListNode
625941b48e7ae83300e4ad9b
@mips_instruction(r"{instr_gap}({register}){args_gap}({label})", lambda args: (args[2], args[3])) <NEW_LINE> def bnez(program: MipsProgram, rd: str, label: str): <NEW_LINE> <INDENT> if program.registers[rd] != 0: <NEW_LINE> <INDENT> program.registers["pc"] = program.labels[label].value - 1
Branch to label if Reg[rd] != 0.
625941b485dfad0860c3ac28
def get_input_data(train_file_path='train.json', train=True): <NEW_LINE> <INDENT> with open(train_file_path, 'r') as train_file: <NEW_LINE> <INDENT> json_data = train_file.read() <NEW_LINE> train_data = json.loads(json_data) <NEW_LINE> <DEDENT> band_1 = [instance['band_1'] for instance in train_data] <NEW_LINE> band_2 = [instance['band_2'] for instance in train_data] <NEW_LINE> ids = [instance['id'] for instance in train_data] <NEW_LINE> band_1 = np.array([np.array(band).astype(np.float32).reshape(75, 75) for band in band_1]) <NEW_LINE> band_2 = np.array([np.array(band).astype(np.float32).reshape(75, 75) for band in band_2]) <NEW_LINE> X_train = np.concatenate([band_1[:, :, :, np.newaxis], band_2[:, :, :, np.newaxis]], axis=-1) <NEW_LINE> if train: <NEW_LINE> <INDENT> y_train = np.array([instance['is_iceberg'] for instance in train_data]) <NEW_LINE> return X_train, y_train, ids <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return X_train, ids
Retrieves training (X) and label (y) matrices. Note that this can take a few seconds to run. Args: train_file_path is the path of the file containing training data. Returns: A tuple containing the X training matrix in the first position, and the y label matrix in the second position. X is of shape (N, 75, 75, 3), where N is the number of training images, 75 x 75 is the dimension of the images, and 3 represents the number of channels for each image.
625941b46aa9bd52df036b72
def __init__(self): <NEW_LINE> <INDENT> self.__x_axis_location = random.randint(-500, 500) <NEW_LINE> self.__y_axis_location = random.randint(-500, 500) <NEW_LINE> self.__x_axis_speed = random.randint(-10, 10) <NEW_LINE> self.__y_axis_speed = random.randint(-10, 10) <NEW_LINE> self.__size = 3
Initialize a new Game object. :return: A new Game object.
625941b4009cb60464c6318e
def _on_label_right_click(self, evt): <NEW_LINE> <INDENT> row, col = evt.GetRow(), evt.GetCol() <NEW_LINE> menu_manager = None <NEW_LINE> if row == -1: <NEW_LINE> <INDENT> menu_manager = self.model.get_column_context_menu(col) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> menu_manager = self.model.get_row_context_menu(row) <NEW_LINE> <DEDENT> if menu_manager is not None: <NEW_LINE> <INDENT> menu = menu_manager.create_menu(self._grid) <NEW_LINE> if menu.GetMenuItemCount() > 0: <NEW_LINE> <INDENT> self._grid.PopupMenu(menu, evt.GetPosition().Get()) <NEW_LINE> <DEDENT> <DEDENT> elif col >= 0: <NEW_LINE> <INDENT> cws = getattr(self, "_cached_widths", None) <NEW_LINE> if (cws is not None) and (0 <= col < len(cws)): <NEW_LINE> <INDENT> cws[col] = None <NEW_LINE> self.__autosize() <NEW_LINE> <DEDENT> <DEDENT> evt.Skip()
Called when a right click occurred on a label.
625941b4507cdc57c6306aa2
def serial_sorting(dataset, buffer_size): <NEW_LINE> <INDENT> if (buffer_size <= 2): <NEW_LINE> <INDENT> print("Error: buffer size should be greater than 2") <NEW_LINE> return <NEW_LINE> <DEDENT> result = [] <NEW_LINE> sorted_set = [] <NEW_LINE> start_pos = 0 <NEW_LINE> N = len(dataset) <NEW_LINE> while True: <NEW_LINE> <INDENT> if ((N - start_pos) > buffer_size): <NEW_LINE> <INDENT> subset = dataset[start_pos:start_pos + buffer_size] <NEW_LINE> sorted_subset = qsort(subset) <NEW_LINE> sorted_set.append(sorted_subset) <NEW_LINE> start_pos += buffer_size <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> subset = dataset[start_pos:] <NEW_LINE> sorted_subset = qsort(subset) <NEW_LINE> sorted_set.append(sorted_subset) <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> merge_buffer_size = buffer_size - 1 <NEW_LINE> dataset = sorted_set <NEW_LINE> while True: <NEW_LINE> <INDENT> merged_set = [] <NEW_LINE> N = len(dataset) <NEW_LINE> start_pos = 0 <NEW_LINE> while True: <NEW_LINE> <INDENT> if ((N - start_pos) > merge_buffer_size): <NEW_LINE> <INDENT> subset = dataset[start_pos:start_pos + merge_buffer_size] <NEW_LINE> merged_set.append(k_way_merge(subset)) <NEW_LINE> start_pos += merge_buffer_size <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> subset = dataset[start_pos:] <NEW_LINE> merged_set.append(k_way_merge(subset)) <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> dataset = merged_set <NEW_LINE> if (len(dataset) <= 1): <NEW_LINE> <INDENT> result = merged_set <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> return result
Perform a serial external sorting method based on sort-merge The buffer size determines the size of eac sub-record set Arguments: dataset -- the entire record set to be sorted buffer_size -- the buffer size determining the size of each sub-record set Return: result -- the sorted record set
625941b42eb69b55b151c67a
def __init__(self, user, switch, passw=None): <NEW_LINE> <INDENT> self.user = user <NEW_LINE> if passw is None: <NEW_LINE> <INDENT> self.passw = getpass('What is your password: ') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.passw = passw <NEW_LINE> <DEDENT> self.switch = switch
This initializes a switch login object. :param user: The username used to login. :param passw: The password for the users; defaults to using getpass for security. :param switch: The switch to connect to.
625941b463f4b57ef0000ef3
def test_new_block(self): <NEW_LINE> <INDENT> block = self.blockchain.new_block(proof=33, previous_hash="000000") <NEW_LINE> self.assertEqual(type(block), dict, "checking block type") <NEW_LINE> self.assertEqual(block['index'], len(self.blockchain.chain), "checking created block id") <NEW_LINE> self.assertEqual(block['proof'], 33, "checking proof")
Test created block in blockchain
625941b4187af65679ca4ef4