code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def _update_callbacks(self, plot): <NEW_LINE> <INDENT> subplots = self.traverse(lambda x: x, [GenericElementPlot]) <NEW_LINE> merged_tools = {t: list(plot.select({'type': TOOLS[t]})) for t in self._merged_tools} <NEW_LINE> for subplot in subplots: <NEW_LINE> <INDENT> for cb in subplot.callbacks: <NEW_LINE> <INDENT> for c in cb.callbacks: <NEW_LINE> <INDENT> for tool, objs in merged_tools.items(): <NEW_LINE> <INDENT> if tool in c.args and objs: <NEW_LINE> <INDENT> c.args[tool] = objs[0]
Iterates over all subplots and updates existing CustomJS callbacks with models that were replaced when compositing subplots into a CompositePlot
625941b13c8af77a43ae3528
def copy(self, start_at, stop_at, new_git_branches): <NEW_LINE> <INDENT> self._log_memory(NTR('start')) <NEW_LINE> self._enforce_start_at(start_at) <NEW_LINE> any_changes_since_last_copy = self._discover_changed_branches(self.all_branches()) <NEW_LINE> repo_empty = p4gf_util.git_empty() <NEW_LINE> use_fast_reclone = repo_empty and not any_changes_since_last_copy <NEW_LINE> if ( not repo_empty and p4gf_tag.any_tags_since_last_copy(self.ctx) and not any_changes_since_last_copy and not new_git_branches): <NEW_LINE> <INDENT> p4gf_tag.update_tags(self.ctx) <NEW_LINE> <DEDENT> if (not repo_empty) and (not any_changes_since_last_copy) and not new_git_branches: <NEW_LINE> <INDENT> LOG.debug("No changes since last copy.") <NEW_LINE> self.fastimport.cleanup() <NEW_LINE> return <NEW_LINE> <DEDENT> p4gf_branch.attach_depot_branch_info( self.ctx.branch_dict(), self.ctx.depot_branch_info_index()) <NEW_LINE> if not any_changes_since_last_copy and not repo_empty: <NEW_LINE> <INDENT> for b in self.all_branches(): <NEW_LINE> <INDENT> b.any_changes = False <NEW_LINE> <DEDENT> <DEDENT> if repo_empty: <NEW_LINE> <INDENT> for b in self.all_branches(): <NEW_LINE> <INDENT> b.any_changes = True <NEW_LINE> <DEDENT> <DEDENT> if new_git_branches: <NEW_LINE> <INDENT> for b in self.all_branches(): <NEW_LINE> <INDENT> if b.git_branch_name in new_git_branches: <NEW_LINE> <INDENT> b.any_changes = True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> with Timer(OVERALL): <NEW_LINE> <INDENT> self._log_memory(NTR('pygit2')) <NEW_LINE> with Timer(SETUP): <NEW_LINE> <INDENT> self._setup(start_at, stop_at) <NEW_LINE> self._log_memory('_setup') <NEW_LINE> if (not len(self.changes.keys())) and (not self.branch_id_to_graft_num): <NEW_LINE> <INDENT> LOG.debug("No new changes found to copy") <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> with Timer(PRINT): <NEW_LINE> <INDENT> LOG.info('Copying file revisions from Perforce') <NEW_LINE> self._copy_print() <NEW_LINE> self._log_memory('_copy_print') <NEW_LINE> <DEDENT> if use_fast_reclone: <NEW_LINE> <INDENT> use_fast_reclone = self._copy_fast_reclone() <NEW_LINE> <DEDENT> if not use_fast_reclone: <NEW_LINE> <INDENT> sorted_changes = self._copy_normal_reclone(repo_empty) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.fastimport.cleanup() <NEW_LINE> <DEDENT> with Timer(PACK): <NEW_LINE> <INDENT> self._pack() <NEW_LINE> self._log_memory('_pack') <NEW_LINE> <DEDENT> <DEDENT> LOG.getChild("time").debug("\n" + str(self)) <NEW_LINE> if not use_fast_reclone: <NEW_LINE> <INDENT> LOG.info('Done. Changelists: {} File Revisions: {} Seconds: {}' .format( len(sorted_changes) , self.printed_rev_count , int(Timer(OVERALL).time))) <NEW_LINE> <DEDENT> p4gf_mem_gc.report_objects(NTR('after P2G.copy()')) <NEW_LINE> self._log_memory(NTR('copy() done'))
copy a set of changelists from Perforce into Git. :param start_at: must be one of * "@{change_num}" or * None Anything else rejected.
625941b1507cdc57c6306a52
def predict_expectation(self, X: DataFrame, conditional_after: Optional[ndarray] = None) -> pd.Series: <NEW_LINE> <INDENT> subjects = utils._get_index(X) <NEW_LINE> v = self.predict_survival_function(X, conditional_after=conditional_after)[subjects] <NEW_LINE> return pd.Series(trapz(v.values.T, v.index), index=subjects)
Compute the expected lifetime, :math:`E[T]`, using covariates X. This algorithm to compute the expectation is to use the fact that :math:`E[T] = \int_0^\inf P(T > t) dt = \int_0^\inf S(t) dt`. To compute the integral, we use the trapezoidal rule to approximate the integral. Caution -------- If the survival function doesn't converge to 0, then the expectation is really infinity and the returned values are meaningless/too large. In that case, using ``predict_median`` or ``predict_percentile`` would be better. Parameters ---------- X: numpy array or DataFrame a (n,d) covariate numpy array or DataFrame. If a DataFrame, columns can be in any order. If a numpy array, columns must be in the same order as the training data. conditional_after: iterable, optional Must be equal is size to X.shape[0] (denoted `n` above). An iterable (array, list, series) of possibly non-zero values that represent how long the subject has already lived for. Ex: if :math:`T` is the unknown event time, then this represents :math:`s` in :math:`T | T > s`. This is useful for knowing the *remaining* hazard/survival of censored subjects. The new timeline is the remaining duration of the subject, i.e. normalized back to starting at 0. Notes ----- If X is a DataFrame, the order of the columns do not matter. But if X is an array, then the column ordering is assumed to be the same as the training dataset. See Also -------- predict_median predict_percentile
625941b15fc7496912cc3707
def serialize_numpy(self, buff, numpy): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> _x = self <NEW_LINE> buff.write(_struct_2IH.pack(_x.stamp.secs, _x.stamp.nsecs, _x.id)) <NEW_LINE> _x = self.data <NEW_LINE> length = len(_x) <NEW_LINE> if type(_x) in [list, tuple]: <NEW_LINE> <INDENT> buff.write(struct.pack('<I%sB'%length, length, *_x)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> buff.write(struct.pack('<I%ss'%length, length, _x)) <NEW_LINE> <DEDENT> <DEDENT> except struct.error as se: self._check_types(se) <NEW_LINE> except TypeError as te: self._check_types(te)
serialize message with numpy array types into buffer :param buff: buffer, ``StringIO`` :param numpy: numpy python module
625941b1b545ff76a8913ba2
def bondi_constraints(self, lhs=True, rhs=True): <NEW_LINE> <INDENT> return ( self.bianchi_0(lhs, rhs), self.bianchi_1(lhs, rhs), self.bianchi_2(lhs, rhs), self.constraint_3(lhs, rhs), self.constraint_4(lhs, rhs), self.constraint_mass_aspect(lhs, rhs), )
Compute Bondi-gauge constraint equations Bondi gauge establishes some relations that the data must satisfy: ΟˆΜ‡β‚€ = Γ°Οˆβ‚ + 3 Οƒ Οˆβ‚‚ ΟˆΜ‡β‚ = Γ°Οˆβ‚‚ + 2 Οƒ Οˆβ‚ƒ ΟˆΜ‡β‚‚ = Γ°Οˆβ‚ƒ + 1 Οƒ Οˆβ‚„ Οˆβ‚ƒ = -βˆ‚Γ°ΟƒΜ„/βˆ‚u Οˆβ‚„ = -βˆ‚Β²ΟƒΜ„/βˆ‚uΒ² Im[Οˆβ‚‚] = -Im[ð²σ̄ + Οƒ βˆ‚ΟƒΜ„/βˆ‚u] This function returns a 6-tuple of 2-tuples, corresponding to these 6 equations and their left- and right-hand sides.
625941b18a43f66fc4b53df3
def render(self, screen_surf, offset): <NEW_LINE> <INDENT> raise NotImplementedError()
Renders the world to screen. :Parameters: screen_surf : Surface The surface to draw on. offset : `Vec3`, `Vec2`, tuple The offsset, actually ignored by the renderers (the renderer actually defines the offset). :Note: This method need to be implemented.
625941b2cdde0d52a9e52db7
def static_path(self, path, **kw): <NEW_LINE> <INDENT> if not os.path.isabs(path): <NEW_LINE> <INDENT> if ':' not in path: <NEW_LINE> <INDENT> package = caller_package() <NEW_LINE> path = '%s:%s' % (package.__name__, path) <NEW_LINE> <DEDENT> <DEDENT> kw['_app_url'] = self.script_name <NEW_LINE> return self.static_url(path, **kw)
Generates a path (aka a 'relative URL', a URL minus the host, scheme, and port) for a static resource. This function accepts the same argument as :meth:`pyramid.request.Request.static_url` and performs the same duty. It just omits the host, port, and scheme information in the return value; only the script_name, path, query parameters, and anchor data are present in the returned string. Example:: request.static_path('mypackage:static/foo.css') => /static/foo.css .. note:: Calling ``request.static_path(apath)`` is the same as calling ``request.static_url(apath, _app_url=request.script_name)``. :meth:`pyramid.request.Request.static_path` is, in fact, implemented in terms of :meth:`pyramid.request.Request.static_url` in just this way. As a result, any ``_app_url`` passed within the ``**kw`` values to ``static_path`` will be ignored.
625941b2d164cc6175782acf
def test_bucketlist_search_with_wrong_term_returns_404(self): <NEW_LINE> <INDENT> self.client.post('/bucketlist/',data=self.bucketlist, content_type="application/json", headers=self.headers_auth) <NEW_LINE> response = self.client.get('/bucketlist/?q=hgqpage=1&per_page=10', headers=self.headers_auth) <NEW_LINE> self.assertEqual(response.status_code,404) <NEW_LINE> self.assertEqual(json.loads(response.data)["message"], "Bucketlist doesn't exist")
Test that a searched bucketList that does not exist returns 404 and a message is returned
625941b2ec188e330fd5a532
def parse_comand_line(): <NEW_LINE> <INDENT> usage = "usage: %prog [options] file.hdf5" <NEW_LINE> parser = op.OptionParser(usage) <NEW_LINE> parser.add_option( "-o", "--output", help="Output file (nifti)", default="panoramic.nii" ) <NEW_LINE> parser.add_option( "-d", "--distance", type="int", dest="distance", default=3, help="Distance between the curves", ) <NEW_LINE> parser.add_option( "-n", "--ncurves", type="int", dest="ncurves", default=10, help="Number of curves between and after the found curve", ) <NEW_LINE> parser.add_option( "-p", "--npoints", type="int", dest="npoints", default=500, help="Number of points (pixels) for each curve", ) <NEW_LINE> parser.add_option( "-g", "--nctrl_points", type="int", dest="nctrl_points", default=10, help="Number of bezier control points", ) <NEW_LINE> parser.add_option( "-t", "--threshold", type="int", dest="threshold", default=1500, help="Threshold used to determine the dental arcade", ) <NEW_LINE> parser.add_option( "-s", "--skeleton", dest="gen_skeleton", action="store_true", help="Generate skeleton image", ) <NEW_LINE> options, args = parser.parse_args() <NEW_LINE> if len(args) != 1: <NEW_LINE> <INDENT> parser.error("Incorrect number of arguments") <NEW_LINE> <DEDENT> filename = args[0] <NEW_LINE> return filename, options
Handle command line arguments.
625941b2d7e4931a7ee9dca5
def millisec2sec(self, dat, column=1): <NEW_LINE> <INDENT> dat[:, column] /= 1.0e3
Convert time from millisecond to second. Parameters ---------- dat : TYPE DESCRIPTION. column : TYPE, optional DESCRIPTION. The default is 1. Returns ------- None.
625941b2097d151d1a222be5
def get_all_by_user_id(request, _cls=True): <NEW_LINE> <INDENT> return TemplateVersionManager.get_all_version_manager_by_user_id( str(request.user.id), _cls )
Return all Template Version Managers with given user id. Returns:
625941b2099cdd3c635f09e7
def make_folder(path_to_folder): <NEW_LINE> <INDENT> if not os.path.exists(path_to_folder): <NEW_LINE> <INDENT> os.mkdir(path_to_folder)
Will remake a folder, even if it already exists. Input: path_to_folder (str.): the path to the folder to remake.
625941b27b180e01f3dc4592
def add_cmap(name, cdict): <NEW_LINE> <INDENT> yt_colormaps[name] = cc.LinearSegmentedColormap(name,cdict,256) <NEW_LINE> mcm.datad[name] = cdict <NEW_LINE> mcm.__dict__[name] = cdict <NEW_LINE> try: <NEW_LINE> <INDENT> mcm.register_cmap(name, yt_colormaps[name]) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> pass
Adds a colormap to the colormaps available in yt for this session
625941b28a349b6b435e7eff
def post_init(self, record, data, model=None, field_data=None): <NEW_LINE> <INDENT> if field_data is not None: <NEW_LINE> <INDENT> self.__set__(record, field_data)
Core implementation of post_init to support argument loading.
625941b27c178a314d6ef1d9
def type(self): <NEW_LINE> <INDENT> return "TDS"
Returns type of step (TDS)
625941b2b57a9660fec33601
def SetChannelStateOn(self, channel, yn): <NEW_LINE> <INDENT> if yn: <NEW_LINE> <INDENT> self.write("output{}:state on".format(channel)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.write("output{}:state off".format(channel))
Turns a channel on or off @param channel: The channel to set the state of. @type channel: int @param yn: True for On, false for off. @type yn: Boolean
625941b2b5575c28eb68dd7f
def get_credentials(flags): <NEW_LINE> <INDENT> home_dir = os.path.expanduser('~') <NEW_LINE> credential_dir = os.path.join(home_dir, '.credentials') <NEW_LINE> if not os.path.exists(credential_dir): <NEW_LINE> <INDENT> os.makedirs(credential_dir) <NEW_LINE> <DEDENT> credential_path = os.path.join(credential_dir, 'sheets.googleapis.esperanca.fb.data.json') <NEW_LINE> store = Storage(credential_path) <NEW_LINE> credentials = store.get() <NEW_LINE> if not credentials or credentials.invalid: <NEW_LINE> <INDENT> flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES) <NEW_LINE> flow.user_agent = APPLICATION_NAME <NEW_LINE> credentials = tools.run_flow(flow, store, flags) <NEW_LINE> <DEDENT> return credentials
Gets valid user credentials from storage. If nothing has been stored, or if the stored credentials are invalid, the OAuth2 flow is completed to obtain the new credentials. Returns: Credentials, the obtained credential.
625941b2bf627c535bc12f5a
def _create_map(self): <NEW_LINE> <INDENT> self._locations = [] <NEW_LINE> for _ in range(10): <NEW_LINE> <INDENT> row = random.randint(0, 9) <NEW_LINE> col = random.randint(0, 9) <NEW_LINE> location = [col * 64 + 32, row * 64 + 32 + 640] <NEW_LINE> if not (location in self._locations): <NEW_LINE> <INDENT> self._locations.append(location) <NEW_LINE> obstacle_type = random.choice(["tree", "flag"]) <NEW_LINE> if obstacle_type == "tree": <NEW_LINE> <INDENT> img = f"{self.images_path}skier_tree.png" <NEW_LINE> <DEDENT> elif obstacle_type == "flag": <NEW_LINE> <INDENT> img = f"{self.images_path}skier_flag.png" <NEW_LINE> <DEDENT> obstacle = ObstacleSprite(img, location, obstacle_type) <NEW_LINE> self.obstacles.add(obstacle)
Creates one 'screen' of obstacles; 640 x 640 use "blocks" of 64 x 64 pixels, so objects aren't too close together.
625941b263b5f9789fde6e69
def class_to_json(obj): <NEW_LINE> <INDENT> return (obj.__dict__)
Serialize class attributes to dictionary Args: obj (object): object to be serialized
625941b256ac1b37e6263f66
def rebuildCurve(*args, **kwargs): <NEW_LINE> <INDENT> pass
This command rebuilds a curve by modifying its parameterization. In some cases the shape may also change. The rebuildType (-rt) determines how the curve is to be rebuilt. The optional second curve can be used to specify a reference parameterization. Flags: - caching : cch (bool) [create,query,edit] Modifies the node caching mode. See the node documentation for more information. Note:For advanced users only. - constructionHistory : ch (bool) [create] Turn the construction history on or off. - degree : d (int) [create,query,edit] The degree of the resulting curve 1 - linear, 2 - quadratic, 3 - cubic, 5 - quintic, 7 - heptic Default:3 - endKnots : end (int) [create,query,edit] End conditions for the curve 0 - uniform end knots, 1 - multiple end knots, Default:0 - fitRebuild : fr (bool) [create,query,edit] If true use the least squares fit rebuild. Otherwise use the convert method. Default:true - frozen : fzn (bool) [] - keepControlPoints : kcp (bool) [create,query,edit] If true, the CVs will remain the same. This forces uniform parameterization unless rebuildType is matchKnots. Default:false - keepEndPoints : kep (bool) [create,query,edit] If true, keep the endpoints the same. Default:true - keepRange : kr (int) [create,query,edit] Determine the parameterization for the resulting curve. 0 - reparameterize the resulting curve from 0 to 1, 1 - keep the original curve parameterization, 2 - reparameterize the result from 0 to number of spans Default:1 - keepTangents : kt (bool) [create,query,edit] If true, keep the end tangents the same. Default:true - name : n (unicode) [create] Sets the name of the newly-created node. If it contains namespace path, the new node will be created under the specified namespace; if the namespace does not exist, it will be created. - noChanges : nc (bool) [] - nodeState : nds (int) [create,query,edit] Modifies the node state. See the node documentation for more information. Note:For advanced users only. Flag can have multiple arguments, passed either as a tuple or a list. - object : o (bool) [create] Create the result, or just the dependency node. - range : rn (bool) [create] Force a curve range on complete input curve. - rebuildType : rt (int) [create,query,edit] How to rebuild the input curve. 0 - uniform, 1 - reduce spans, 2 - match knots, 3 - remove multiple knots, 4 - curvature 5 - rebuild ends 6 - clean Default:0 - replaceOriginal : rpo (bool) [create] Create in place(i.e., replace). Advanced flags - smartSurfaceCurveRebuild : scr (bool) [create,query,edit] If true, curve on surface is rebuild in 3D and 2D info is kept Default:false - smooth : sm (float) [] - spans : s (int) [create,query,edit] The number of spans in resulting curve Used only if rebuildType is uniform. Default:4 - tolerance : tol (float) [create,query,edit] The tolerance with which to rebuild. Default:0.01 Common flags Derived from mel command `maya.cmds.rebuildCurve`
625941b256b00c62f0f143de
def know(self, word: str) -> bool: <NEW_LINE> <INDENT> return self.words[word].knowing in [ LexiconResponse.KNOW, LexiconResponse.DO_NOT_BUT_PROPER_NOUN_TOO]
Check if user knows the word.
625941b2d99f1b3c44c67324
def AddPropertiesFlag(parser): <NEW_LINE> <INDENT> parser.add_argument( '--properties', help='A comma separated, key:value, map ' 'to be used when deploying a template file or composite type directly.', type=arg_parsers.ArgDict(operators=dm_api_util.NewParserDict()), dest='properties')
Add properties flag.
625941b23539df3088e2e0cf
def create_content(data_dir: str = 'content', size: int = 4000, seed: int = 15): <NEW_LINE> <INDENT> fp_members, fm_members = init_data(size, seed) <NEW_LINE> score = roc_auc_score( y_true=np.concatenate((np.ones_like(fp_members), np.zeros_like(fm_members))), y_score=np.concatenate((fp_members, fm_members)) ) <NEW_LINE> ratios = [0.02, 0.2, 0.4, 0.6, 0.8, 1] <NEW_LINE> content_p, content_m = generate_figures_and_data_splits(ratios, fp_members, fm_members) <NEW_LINE> roc_auc_scores = calculate_roc_auc_scores(ratios, content_p, content_m) <NEW_LINE> content = { 'ratios': ratios, 'content_p': content_p, 'content_m': content_m, 'roc_auc_scores': roc_auc_scores, 'score': score } <NEW_LINE> os.makedirs(data_dir, exist_ok=True) <NEW_LINE> with open(os.path.join(data_dir, 'content.pickle'), 'wb') as f: <NEW_LINE> <INDENT> pickle.dump(content, f)
Create (and dump) the content needed to generate the interactive visualization.
625941b2596a89723608984f
def _threshold_youden(self): <NEW_LINE> <INDENT> youden_index = np.argmax(np.abs(self.tpr_list - self.fpr_list)) <NEW_LINE> youden_threshold = self.roc_thresholds[youden_index] <NEW_LINE> return youden_index, youden_threshold
Function to calculate youden index as a threshold
625941b2d10714528d5ffa69
def test_interpolation_month_end(self, simple_linear_model): <NEW_LINE> <INDENT> model = simple_linear_model <NEW_LINE> values = np.arange(12, dtype=np.float64) <NEW_LINE> p = MonthlyProfileParameter(model, values, interp_day="last") <NEW_LINE> model.setup() <NEW_LINE> @assert_rec(model, p) <NEW_LINE> def expected_func(timestep, scenario_index): <NEW_LINE> <INDENT> imth = timestep.month - 1 <NEW_LINE> days_in_month = calendar.monthrange(timestep.year, timestep.month)[1] <NEW_LINE> day = timestep.day <NEW_LINE> x = day / days_in_month <NEW_LINE> return values[(imth - 1) % 12] * (1 - x) + values[imth] * x <NEW_LINE> <DEDENT> model.run()
Test interpolating monthly values from last day of the month.
625941b26aa9bd52df036b26
def IsXformSimilarity(xform): <NEW_LINE> <INDENT> xform = rhutil.coercexform(xform, True) <NEW_LINE> return xform.SimilarityType!=Rhino.Geometry.TransformSimilarityType.NotSimilarity
Verifies a matrix is a similarity transformation. A similarity transformation can be broken into a sequence of dialations, translations, rotations, and reflections Parameters: xform = List or Rhino.Geometry.Transform. A 4x4 transformation matrix. Returns: True if this transformation is an orientation preserving similarity, otherwise False. Example: import rhinoscriptsyntax as rs xform = rs.BlockInstanceXform(block) print rs.IsXformSimilarity(xform) See Also: IsXformIdentity IsXformZero
625941b297e22403b379cd23
def test_create_chapter(self): <NEW_LINE> <INDENT> self.create_book("Morrissey","Smiths", "80s", "Me") <NEW_LINE> response = self.client.post(self.url, {"title": "Apice", "content": "All world", "book": 1 }) <NEW_LINE> self.assertEqual(201, response.status_code)
ensure we can create a book chapter
625941b27b180e01f3dc4594
def test_check_install(): <NEW_LINE> <INDENT> print("Testing utils.check_install") <NEW_LINE> from spython.utils import check_install <NEW_LINE> is_installed = check_install() <NEW_LINE> assert is_installed <NEW_LINE> is_not_installed = check_install("fakesoftwarename") <NEW_LINE> assert not is_not_installed
check install is used to check if a particular software is installed. If no command is provided, singularity is assumed to be the test case
625941b250485f2cf553cb20
def firstUniqChar(self, s): <NEW_LINE> <INDENT> alphabet = 'abcdefghijklmnopqistuvwxyz' <NEW_LINE> index = [s.index(x) for x in alphabet if s.count(x)==1] <NEW_LINE> return min(index) if index else -1
:type s: str :rtype: int
625941b25e10d32532c5ecb4
def format_name(self, **kwargs): <NEW_LINE> <INDENT> name = super(PromptyMethodDocumenter, self).format_name(**kwargs) <NEW_LINE> name = name.split('.')[-1] <NEW_LINE> return "\\\\" + name
Format the function name like ``\function``.
625941b2711fe17d82542101
def testSetBulk(self): <NEW_LINE> <INDENT> self._testSetBulkOrInterrupt('setBulk')
Simplest test: feed some data, must not raise. Also, test setBuffer/getBuffer.
625941b216aa5153ce362204
def update_wrapper(proxy, original): <NEW_LINE> <INDENT> func = _update_wrapper(proxy, original) <NEW_LINE> func.signature = getargspec(original) <NEW_LINE> return func
Update `proxy` to look like `original`. Uses :func:`functools.update_wrapper` internally and adds the function signature to the new created proxy function.
625941b25166f23b2e1a4ee4
@memo <NEW_LINE> def x_squared_masks(radius, ndim): <NEW_LINE> <INDENT> radius = validate_tuple(radius, ndim) <NEW_LINE> points = [np.arange(-rad, rad + 1) for rad in radius] <NEW_LINE> if len(radius) > 1: <NEW_LINE> <INDENT> coords = np.array(np.meshgrid(*points, indexing="ij")) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> coords = np.array([points[0]]) <NEW_LINE> <DEDENT> r = [(coord/rad)**2 for (coord, rad) in zip(coords, radius)] <NEW_LINE> masks = np.asarray(coords**2, dtype=int) <NEW_LINE> masks[:, sum(r) > 1] = 0 <NEW_LINE> return masks
Returns ndim masks with values x^2 inside radius and 0 outside
625941b260cbc95b062c62cd
def __init__(self): <NEW_LINE> <INDENT> self.msg_q = [] <NEW_LINE> self.resp_q = []
AlexaAudio initialization function. @PARAM scribe -- the type of message
625941b230dc7b76659016f7
def stub_resource_handle_op_eager_fallback(container="", shared_name="", name=None, ctx=None): <NEW_LINE> <INDENT> _ctx = ctx if ctx else _context.context() <NEW_LINE> if container is None: <NEW_LINE> <INDENT> container = "" <NEW_LINE> <DEDENT> container = _execute.make_str(container, "container") <NEW_LINE> if shared_name is None: <NEW_LINE> <INDENT> shared_name = "" <NEW_LINE> <DEDENT> shared_name = _execute.make_str(shared_name, "shared_name") <NEW_LINE> _inputs_flat = [] <NEW_LINE> _attrs = ("container", container, "shared_name", shared_name) <NEW_LINE> _result = _execute.execute(b"StubResourceHandleOp", 1, inputs=_inputs_flat, attrs=_attrs, ctx=_ctx, name=name) <NEW_LINE> _execute.record_gradient( "StubResourceHandleOp", _inputs_flat, _attrs, _result, name) <NEW_LINE> _result, = _result <NEW_LINE> return _result
This is the slowpath function for Eager mode. This is for function stub_resource_handle_op
625941b299fddb7c1c9de11f
def transfer_to_road(self, road, location): <NEW_LINE> <INDENT> self.road = road <NEW_LINE> self.intersection = None <NEW_LINE> self.x = location[0] <NEW_LINE> self.y = location[1] <NEW_LINE> orientation = road.orientation <NEW_LINE> new_velocity = self.rotate_around_origin(self.vx, self.vy, -orientation) <NEW_LINE> self.orientation = 0 <NEW_LINE> self.vx = new_velocity[0] <NEW_LINE> self.vy = new_velocity[1] <NEW_LINE> return
Takes a road and a local location and sets the car to being on that road at that location Cars enter roads parallel to the road :param road: :param location: :return:
625941b27c178a314d6ef1dc
def _build_uri(base_url, url_params, uri_params, use_builder=False): <NEW_LINE> <INDENT> url = _build_url(base_url, url_params) <NEW_LINE> if use_builder: <NEW_LINE> <INDENT> uri_builder = QgsDataSourceUri() <NEW_LINE> for key, value in uri_params.items(): <NEW_LINE> <INDENT> uri_builder.setParam(key, str(value)) <NEW_LINE> <DEDENT> uri_builder.setParam('url', url) <NEW_LINE> return uri_builder.uri() <NEW_LINE> <DEDENT> param_list = list(uri_params.items()) + [('url', quote_plus(url))] <NEW_LINE> param_strings = ('{}={}'.format(key, value) for key, value in param_list) <NEW_LINE> return '&'.join(param_strings)
Builds an URI for a QGIS layer. In some cases a builder class should be used and in some cases it shouldn't.
625941b2ff9c53063f47bf83
def submit(fn, io_loop, *args, **kwargs): <NEW_LINE> <INDENT> future = Future() <NEW_LINE> def execute(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> result = gen.maybe_future(fn(*args, **kwargs)) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> f = gen.Future() <NEW_LINE> f.set_exc_info(sys.exc_info()) <NEW_LINE> on_done(f) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result.add_done_callback(on_done) <NEW_LINE> <DEDENT> <DEDENT> def on_done(tornado_future): <NEW_LINE> <INDENT> exception = tornado_future.exception() <NEW_LINE> if not exception: <NEW_LINE> <INDENT> future.set_result(tornado_future.result()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> future.set_exception(exception) <NEW_LINE> <DEDENT> <DEDENT> io_loop.add_callback(execute) <NEW_LINE> return future
Submit Tornado Coroutine to IOLoop.current(). :param fn: Tornado Coroutine to execute :param io_loop: Tornado IOLoop where to schedule the coroutine :param args: Args to pass to coroutine :param kwargs: Kwargs to pass to coroutine :returns concurrent.futures.Future: future result of coroutine
625941b263b5f9789fde6e6a
def selectDut(self): <NEW_LINE> <INDENT> fname = self.loadFileDialog(fileTypes=TestRunnerGui.duttypes) <NEW_LINE> if fname is not None: <NEW_LINE> <INDENT> self.runner.setDUT(fname) <NEW_LINE> self.edtDUT.SetValue(os.path.relpath(fname))
Show file dialog and set the result as DUT
625941b2e1aae11d1e749a3f
def get_url(self): <NEW_LINE> <INDENT> (_, _, url) = self.storage.store(self) <NEW_LINE> return url
Return URL pointing to data
625941b292d797404e303f17
def on_epoch_end(self): <NEW_LINE> <INDENT> np.random.shuffle(self.x_y)
Method called at the end of every epoch.
625941b256b00c62f0f143e0
def to_wkb_point(c): <NEW_LINE> <INDENT> point = {'type': 'Point', 'coordinates':[c[0], c[1]]} <NEW_LINE> return geomet.wkb.dumps(point)
Super quick hack that does not actually belong in here
625941b2e64d504609d745cd
def specs_for_conda_package_names(self, names): <NEW_LINE> <INDENT> return self._specs_for_package_names(names, self._conda_specs_for_create_by_name)
Get the full install specs given an iterable of package names.
625941b23346ee7daa2b2aed
def _mk_slice(self, pv): <NEW_LINE> <INDENT> if pv.size == 0: <NEW_LINE> <INDENT> return slice(0, 0) <NEW_LINE> <DEDENT> if np.all(np.diff(pv) == 1): <NEW_LINE> <INDENT> return slice(pv[0], pv[-1] + 1) <NEW_LINE> <DEDENT> raise ValueError("invalid partition vector for conversion to slice")
Convert index partition vector to slice object: ``start:stop``. Raises ValueError if `pv` cannot be converted to this type of slice object.
625941b24a966d76dd550d96
def extract_compartment_prefix_mapping(): <NEW_LINE> <INDENT> with open_text(data, "comp_prefix.tsv") as handle: <NEW_LINE> <INDENT> mapping = pd.read_csv(handle, sep="\t", index_col="mnx", squeeze=True) <NEW_LINE> <DEDENT> return mapping
Return the packaged compartment prefix mapping.
625941b282261d6c526ab229
def get_gkeep_config() -> Dict: <NEW_LINE> <INDENT> return _get_config(GKEEP_CONF_PATH)
Gets google keep config from file Returns ------- Dict google keep config dictionary
625941b2627d3e7fe0d68bd4
def libvlc_audio_equalizer_set_preamp(p_equalizer, f_preamp): <NEW_LINE> <INDENT> f = _Cfunctions.get('libvlc_audio_equalizer_set_preamp', None) or _Cfunction('libvlc_audio_equalizer_set_preamp', ((1,), (1,),), None, ctypes.c_int, ctypes.c_void_p, ctypes.c_float) <NEW_LINE> return f(p_equalizer, f_preamp)
Set a new pre-amplification value for an equalizer. The new equalizer settings are subsequently applied to a media player by invoking L{libvlc_media_player_set_equalizer}(). The supplied amplification value will be clamped to the -20.0 to +20.0 range. @param p_equalizer: valid equalizer handle, must not be NULL. @param f_preamp: preamp value (-20.0 to 20.0 Hz). @return: zero on success, -1 on error. @version: LibVLC 2.2.0 or later.
625941b2596a897236089851
def read_holding_registers(self, address, count=1, **kwargs): <NEW_LINE> <INDENT> request = reg_read_msg.ReadHoldingRegistersRequest(address, count, **kwargs) <NEW_LINE> return self.execute(request)
:param address: The starting address to read from :param count: The number of registers to read :param unit: The slave unit this request is targeting :returns: A deferred response handle
625941b285dfad0860c3abe0
def test_templated_method_of_templated_class(self): <NEW_LINE> <INDENT> base_qualname = 'Gleb::lerb(U)' <NEW_LINE> self._qualname_instances('lerb', base_qualname, [1], [1]) <NEW_LINE> self._callers_check(base_qualname, [('<b>gl.lerb(true)</b>;', 19)]) <NEW_LINE> spec_qualname = 'Gleb<char>::lerb(int)' <NEW_LINE> self._qualname_instances('lerb', spec_qualname, [2], [2]) <NEW_LINE> self._callers_check(spec_qualname, [('<b>gl.lerb(3)</b>;', 20)])
Test consistency for a templated method of a templated class and a full specialization.
625941b2956e5f7376d70c03
def main(): <NEW_LINE> <INDENT> os.environ.setdefault("DJANGO_SETTINGS_MODULE", "CRUD_functionbasedapi_05.settings") <NEW_LINE> try: <NEW_LINE> <INDENT> from django.core.management import execute_from_command_line <NEW_LINE> <DEDENT> except ImportError as exc: <NEW_LINE> <INDENT> raise ImportError( "Couldn't import Django. Are you sure it's installed and " "available on your PYTHONPATH environment variable? Did you " "forget to activate a virtual environment?" ) from exc <NEW_LINE> <DEDENT> execute_from_command_line(sys.argv)
Run administrative tasks.
625941b2d486a94d0b98ded6
def _validate(self, bounding_box, order: str = None): <NEW_LINE> <INDENT> if self._n_inputs == 1 and not isinstance(bounding_box, dict): <NEW_LINE> <INDENT> self[0] = bounding_box <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._validate_iterable(bounding_box, order)
Validate and set any representation
625941b25e10d32532c5ecb6
def _compute_fin(node, Graph, partition): <NEW_LINE> <INDENT> community_node = partition.get(node) <NEW_LINE> e = _compute_e(node, community_node, Graph, partition) <NEW_LINE> degree_node = Graph.degree(node) <NEW_LINE> degree_community = _compute_degree_of_community(community_node, Graph, partition) <NEW_LINE> num_edges = Graph.number_of_edges() <NEW_LINE> fin = e - ( (degree_node * (degree_community - degree_node)) / (2 * num_edges) ) <NEW_LINE> return fin
This function computes the force F_in for a node, which signifies how strongly a node is attracted to the current community that it is in. The force F_in is used to determine if a node should stay in it's current community. Args: node (any): A node in a graph. May be an integer or string or object, etc. Graph (graph): A graph consisting of nodes and edges. partition (dict): Keys represent nodes, and values represent the community that each node belongs to. Returns: fin (float): The force that a node's community exerts on that node.
625941b250485f2cf553cb21
def main(): <NEW_LINE> <INDENT> groundctrl = urllib.request.urlopen(MAJORTOM) <NEW_LINE> helmet = groundctrl.read() <NEW_LINE> print(helmet) <NEW_LINE> helmetson = json.loads(helmet.decode("utf-8")) <NEW_LINE> print(type(helmet)) <NEW_LINE> print(type(helmetson)) <NEW_LINE> print(helmetson["number"]) <NEW_LINE> print(helmetson["people"]) <NEW_LINE> print(helmetson["people"][0]) <NEW_LINE> print(helmetson["people"][1]) <NEW_LINE> print(helmetson["people"][-1]) <NEW_LINE> for astro in helmetson["people"]: <NEW_LINE> <INDENT> print(astro) <NEW_LINE> <DEDENT> for astro in helmetson["people"]: <NEW_LINE> <INDENT> print(astro["name"]) <NEW_LINE> <DEDENT> print('People in Space :', helmetson["number"]) <NEW_LINE> for astro in helmetson["people"]: <NEW_LINE> <INDENT> print(astro["name"], 'on the', astro["craft"])
reading json from api
625941b28a349b6b435e7f03
def isPerfectSquare(self, num): <NEW_LINE> <INDENT> r = num <NEW_LINE> while r*r > num: <NEW_LINE> <INDENT> r = (r + num / r) / 2 <NEW_LINE> <DEDENT> return num == r*r
:type num: int :rtype: bool
625941b215fb5d323cde0895
def connection_failed(error_msg: str) -> None: <NEW_LINE> <INDENT> print(col("") + "ERR" + col("")) <NEW_LINE> print_indented(f" {col('')}β””{col('')} " + f"{col('')}{error_msg}{col('')}")
Signals that an error has occurred while establishing the connection.
625941b2293b9510aa2c3020
def GetConversationByBlob( self, join_blob, also_join = True ): <NEW_LINE> <INDENT> l_request = skypekit.XCallRequest("ZR\000\021", 0, 17) <NEW_LINE> l_request.add_parm('S', 1, join_blob) <NEW_LINE> l_request.add_parm('b', 2, also_join) <NEW_LINE> l_response = self.transport.xcall(l_request) <NEW_LINE> l_result = module_id2classes[18](l_response.get(1), self.transport) <NEW_LINE> return l_result
For public conversations B{Arguments:} - B{join_blob} - B{also_join} B{Return values:} - B{conversation}
625941b2ff9c53063f47bf85
def DumpReadable(self, *args): <NEW_LINE> <INDENT> return _ogr.Feature_DumpReadable(self, *args)
DumpReadable(self) void OGR_F_DumpReadable(OGRFeatureH hFeat, FILE *fpOut) Dump this feature in a human readable form. This dumps the attributes, and geometry; however, it doesn't definition information (other than field types and names), nor does it report the geometry spatial reference system. This function is the same as the C++ method OGRFeature::DumpReadable(). Parameters: ----------- hFeat: handle to the feature to dump. fpOut: the stream to write to, such as strout.
625941b230bbd722463cbb50
def texify(s): <NEW_LINE> <INDENT> s = s.replace('_', ' ').replace('-', ' ') <NEW_LINE> s = s.replace('>0', '$>0$') <NEW_LINE> return s
Convert a string to be TeX compatible
625941b292d797404e303f19
def plural(n, word): <NEW_LINE> <INDENT> if n == 1: <NEW_LINE> <INDENT> return '1 ' + word <NEW_LINE> <DEDENT> return '%d %ss' % (n, word)
Use plural for n!=1. >>> plural(0, 'egg'), plural(1, 'egg'), plural(2, 'egg') ('0 eggs', '1 egg', '2 eggs')
625941b266673b3332b91e1d
def get_config(): <NEW_LINE> <INDENT> cfg = VersioneerConfig() <NEW_LINE> cfg.VCS = "git" <NEW_LINE> cfg.style = "pep440" <NEW_LINE> cfg.tag_prefix = "v" <NEW_LINE> cfg.parentdir_prefix = "None" <NEW_LINE> cfg.versionfile_source = "src/epcsunspecdemo/_version.py" <NEW_LINE> cfg.verbose = False <NEW_LINE> return cfg
Create, populate and return the VersioneerConfig() object.
625941b223849d37ff7b2e21
@receiver(error_message) <NEW_LINE> def print_error_message(**kwargs): <NEW_LINE> <INDENT> text = kwargs.get('text') <NEW_LINE> if text: <NEW_LINE> <INDENT> print(Style.BRIGHT + Fore.RED + text)
Receiver to print an error message in red text
625941b2507cdc57c6306a59
def _postprocess( results, result_mask_info, output_height, output_width, mask_threshold=0.5 ): <NEW_LINE> <INDENT> scale_x, scale_y = ( output_width / results.image_size[1], output_height / results.image_size[0], ) <NEW_LINE> results = Instances((output_height, output_width), **results.get_fields()) <NEW_LINE> output_boxes = results.pred_boxes <NEW_LINE> output_boxes.tensor[:, 0::2] *= scale_x <NEW_LINE> output_boxes.tensor[:, 1::2] *= scale_y <NEW_LINE> output_boxes.clip(results.image_size) <NEW_LINE> inds_nonempty = output_boxes.nonempty() <NEW_LINE> results = results[inds_nonempty] <NEW_LINE> result_masks, result_anchors = result_mask_info <NEW_LINE> if result_masks: <NEW_LINE> <INDENT> result_anchors.tensor[:, 0::2] *= scale_x <NEW_LINE> result_anchors.tensor[:, 1::2] *= scale_y <NEW_LINE> result_masks = [x for (i, x) in zip(inds_nonempty.tolist(), result_masks) if i] <NEW_LINE> results.pred_masks = _paste_mask_lists_in_image( result_masks, result_anchors[inds_nonempty], results.image_size, threshold=mask_threshold, ) <NEW_LINE> <DEDENT> return results
Post-process the output boxes for TensorMask. The input images are often resized when entering an object detector. As a result, we often need the outputs of the detector in a different resolution from its inputs. This function will postprocess the raw outputs of TensorMask to produce outputs according to the desired output resolution. Args: results (Instances): the raw outputs from the detector. `results.image_size` contains the input image resolution the detector sees. This object might be modified in-place. Note that it does not contain the field `pred_masks`, which is provided by another input `result_masks`. result_mask_info (list[Tensor], Boxes): a pair of two items for mask related results. The first item is a list of #detection tensors, each is the predicted masks. The second item is the anchors corresponding to the predicted masks. output_height, output_width: the desired output resolution. Returns: Instances: the postprocessed output from the model, based on the output resolution
625941b232920d7e50b27f5b
def set_ResponseFormat(self, value): <NEW_LINE> <INDENT> super(RegisterImageInputSet, self)._set_input('ResponseFormat', value)
Set the value of the ResponseFormat input for this Choreo. ((optional, string) The format that the response should be in. Valid values are "xml" (the default) and "json".)
625941b215baa723493c3d00
def process_item(self, item, spider): <NEW_LINE> <INDENT> alias = urllib.unquote(spider.alias).decode('gb2312') <NEW_LINE> if spider.name == 'tb_user': <NEW_LINE> <INDENT> db_user[alias].insert(dict(item)) <NEW_LINE> <DEDENT> elif spider.name == 'tb_thread': <NEW_LINE> <INDENT> db_thread[alias].insert(dict(item)) <NEW_LINE> <DEDENT> elif spider.name == 'wb_userstatus': <NEW_LINE> <INDENT> if item.get('pdate','') in self.wbuserstatus_sid_seen: <NEW_LINE> <INDENT> raise DropItem("Duplicate item found: %s" % item) <NEW_LINE> <DEDENT> elif item.get('text', '') == '': <NEW_LINE> <INDENT> raise DropItem("Empty item found: %s" % item) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.wbuserstatus_sid_seen.add(item['pdate']) <NEW_LINE> db_wbuserstatus[alias].insert(dict(item)) <NEW_LINE> <DEDENT> <DEDENT> return item
For each item, insert into mongodb as dict
625941b2097d151d1a222beb
def _assert_confirmation_email(self, expect_email): <NEW_LINE> <INDENT> if expect_email: <NEW_LINE> <INDENT> assert len(mail.outbox) == 1 <NEW_LINE> assert 'Thank you for submitting your photos!' == mail.outbox[0].subject <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> assert len(mail.outbox) == 0
Check that a confirmation email was or was not sent.
625941b2b57a9660fec33606
def teardown(): <NEW_LINE> <INDENT> os.close(db_fd) <NEW_LINE> os.unlink(rPi.app.config['DATABASE'])
Get rid of the database again after each test.
625941b2ec188e330fd5a538
def mask_grid(grid, shape): <NEW_LINE> <INDENT> assert grid.ndim == 2 <NEW_LINE> assert grid.shape[0] == 3 <NEW_LINE> assert len(shape) == 3 <NEW_LINE> msk = ((grid[0, ...] < 1) + (grid[0, ...] >= shape[0]) + (grid[1, ...] < 1) + (grid[1, ...] >= shape[1]) + (grid[2, ...] < 1) + (grid[2, ...] >= shape[2])) <NEW_LINE> return ~msk
Remove voxels that have fallen out of the FOV. Parameters ---------- grid: 2D array of shape (3, n_points) the grid been masked shape: array of 3 ints the shape of the underlying image Returns ------- msk: 1D array of n_points bools a msk for lattices/voxels/points on the grid that have not falled out of the underlying image's FOV
625941b2460517430c393f1b
@app.route('/logout_route') <NEW_LINE> def logout_route(): <NEW_LINE> <INDENT> auth._logout() <NEW_LINE> flash(u'γƒ­γ‚°γ‚’γ‚¦γƒˆγ—γΎγ—γŸ.' ) <NEW_LINE> return redirect(url_for('index'))
Logout
625941b2004d5f362079a0c7
@task <NEW_LINE> def widget(): <NEW_LINE> <INDENT> pass
A dummy function
625941b2b5575c28eb68dd85
def start(self): <NEW_LINE> <INDENT> self.cov = coverage.coverage(source=self.cov_source, branch=self.cov_branch, config_file=self.cov_config) <NEW_LINE> self.combining_cov = coverage.coverage(source=self.cov_source, branch=self.cov_branch, data_file=os.path.abspath(self.cov.config.data_file), config_file=self.cov_config) <NEW_LINE> if self.cov_append: <NEW_LINE> <INDENT> self.cov.load() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.cov.erase() <NEW_LINE> <DEDENT> self.cov.start() <NEW_LINE> self.set_env()
Erase any previous coverage data and start coverage.
625941b2dc8b845886cb52bf
def _resource_as_dict(self, member): <NEW_LINE> <INDENT> enum, dot, role = str(member.role).partition('.') <NEW_LINE> member_id = self.api.from_uuid(member.member_id) <NEW_LINE> response = dict( address=self.api.path_to( 'addresses/{}'.format(member.address.email)), delivery_mode=member.delivery_mode, email=member.address.email, list_id=member.list_id, member_id=member_id, role=role, self_link=self.api.path_to('members/{}'.format(member_id)), ) <NEW_LINE> if member.moderation_action is not None: <NEW_LINE> <INDENT> response['moderation_action'] = member.moderation_action <NEW_LINE> <DEDENT> if member.display_name is not None: <NEW_LINE> <INDENT> response['display_name'] = member.display_name <NEW_LINE> <DEDENT> user = member.user <NEW_LINE> if user is not None: <NEW_LINE> <INDENT> user_id = self.api.from_uuid(user.user_id) <NEW_LINE> response['user'] = self.api.path_to('users/{}'.format(user_id)) <NEW_LINE> <DEDENT> return response
See `CollectionMixin`.
625941b2099cdd3c635f09eb
def __init__(self, builder, targets, default_target, demographics_names=None, vitals_names=None, lab_measurements_names=None, interventions_names=None, description=None, homepage=None, citation=None): <NEW_LINE> <INDENT> self.has_demographics = demographics_names is not None <NEW_LINE> self.has_vitals = vitals_names is not None <NEW_LINE> self.has_lab_measurements = lab_measurements_names is not None <NEW_LINE> self.has_interventions = interventions_names is not None <NEW_LINE> self.default_target = default_target <NEW_LINE> metadata = tfds.core.MetadataDict() <NEW_LINE> features_dict = { 'time': Tensor(shape=(None,), dtype=self.time_dtype) } <NEW_LINE> demo_is_categorical = [] <NEW_LINE> combined_is_categorical = [] <NEW_LINE> if self.has_demographics: <NEW_LINE> <INDENT> metadata['demographics_names'] = demographics_names <NEW_LINE> demo_is_categorical.extend( ['=' in demo_name for demo_name in demographics_names]) <NEW_LINE> features_dict['demographics'] = Tensor( shape=(len(demographics_names),), dtype=self.demographics_dtype) <NEW_LINE> <DEDENT> if self.has_vitals: <NEW_LINE> <INDENT> metadata['vitals_names'] = vitals_names <NEW_LINE> combined_is_categorical.extend( ['=' in name for name in vitals_names]) <NEW_LINE> features_dict['vitals'] = Tensor( shape=(None, len(vitals_names),), dtype=self.vitals_dtype) <NEW_LINE> <DEDENT> if self.has_lab_measurements: <NEW_LINE> <INDENT> metadata['lab_measurements_names'] = lab_measurements_names <NEW_LINE> combined_is_categorical.extend( ['=' in name for name in lab_measurements_names]) <NEW_LINE> features_dict['lab_measurements'] = Tensor( shape=(None, len(lab_measurements_names),), dtype=self.lab_measurements_dtype) <NEW_LINE> <DEDENT> if self.has_interventions: <NEW_LINE> <INDENT> metadata['interventions_names'] = interventions_names <NEW_LINE> combined_is_categorical.extend( ['=' in name for name in interventions_names]) <NEW_LINE> features_dict['interventions'] = Tensor( shape=(None, len(interventions_names),), dtype=self.interventions_dtype) <NEW_LINE> <DEDENT> metadata['demographics_categorical_indicator'] = demo_is_categorical <NEW_LINE> metadata['combined_categorical_indicator'] = combined_is_categorical <NEW_LINE> features_dict['targets'] = targets <NEW_LINE> features_dict['metadata'] = {'patient_id': self.patient_id_dtype} <NEW_LINE> features_dict = FeaturesDict(features_dict) <NEW_LINE> if builder.output_raw: <NEW_LINE> <INDENT> supervised_keys = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> supervised_keys = ("combined", "target") <NEW_LINE> <DEDENT> super().__init__( builder=builder, description=description, homepage=homepage, citation=citation, features=features_dict, supervised_keys=supervised_keys, metadata=metadata )
Dataset info for medical time series datasets. Ensures all datasets follow a similar structure and can be used (almost) interchangably. Args: builder: Builder class associated with this dataset info. targets: Dictionary of endpoints. demographics_names: Names of the demographics. vitals_names: Names of the vital measurements. lab_measurements_names: Names of the lab measurements. interventions_names: Names of the intervensions. description: Dataset description. homepage: Homepage of dataset. citation: Citation of dataset.
625941b2f9cc0f698b14038f
def query(self, name, rdtype, timeout=2): <NEW_LINE> <INDENT> if self.direct: <NEW_LINE> <INDENT> return self.query_strategy.query(name, rdtype, timeout) <NEW_LINE> <DEDENT> return resolver.query(name, rdtype, self.dnssec)
Lookup.
625941b21f037a2d8b945f88
def is_blackjack(self): <NEW_LINE> <INDENT> return len(self._cards) == HAND_START_SIZE and self.optimal_value() == HAND_LIMIT
Returns whether this hand has a blackjack/natural.
625941b2d268445f265b4bfd
def factorint(n): <NEW_LINE> <INDENT> global P <NEW_LINE> n = abs(n) <NEW_LINE> d = {} <NEW_LINE> while n > 1: <NEW_LINE> <INDENT> for p in P: <NEW_LINE> <INDENT> q,r = div(n,p) <NEW_LINE> if r == 0: <NEW_LINE> <INDENT> n2 = q <NEW_LINE> try: <NEW_LINE> <INDENT> d[p] += 1 <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> d[p] = 1 <NEW_LINE> <DEDENT> break <NEW_LINE> <DEDENT> <DEDENT> if n2 == n: <NEW_LINE> <INDENT> n = n2 <NEW_LINE> break <NEW_LINE> <DEDENT> n = n2 <NEW_LINE> <DEDENT> return d
factorint(12) returns {2: 2, 3: 1} where a:b means key of prime number a and b is multiplicity of that prime. So 12 = 2**2 * 3*1 is unique factorization. This makes the integers a unique factorization domain (UFD).
625941b2293b9510aa2c3023
def implementation_content( classname, parentname, namespace="See", project='see-object' ): <NEW_LINE> <INDENT> cls_camel_name, cls_func_name, cls_caps_name, cls_namespace_caps_name = ( get_class_names(classname, namespace) ) <NEW_LINE> par_camel_name, par_func_name, par_caps_name, _ = get_class_names(parentname) <NEW_LINE> return IMPLEMENTATION_INIT.format( ClassName=classname, lowername=classname.lower(), CamelCaseName=cls_camel_name, object_name=cls_func_name[(len(namespace) + 1):], function_name=cls_func_name, CLASS_NAME_CAPS=cls_caps_name, ParentCamelCaseName=par_camel_name, parent_name=par_func_name[(len(namespace) + 1):], parent_func_name=par_func_name, PARENT_CLASS_NAME_CAPS=par_caps_name, NAMESPACE_CAP=cls_namespace_caps_name, PROJECT=project )
Generate the boilerplate for the file that implements the new class. @param classname [in] The CamelCase name for the class @param parentname[in] The CamelCase name for the parent @param namespace [in] The namespace of the library used to prepend the names of the functions
625941b2287bf620b61d37fb
@error.context_aware <NEW_LINE> def run(test, params, env): <NEW_LINE> <INDENT> def kill_vm_by_signal_15(): <NEW_LINE> <INDENT> vm_pid = vm.get_pid() <NEW_LINE> logging.info("VM: %s, PID: %s" % (vm.name, vm_pid)) <NEW_LINE> thread_pid = os.getpid() <NEW_LINE> logging.info("Main Process ID is %s" % thread_pid) <NEW_LINE> utils_misc.kill_process_tree(vm_pid, 15) <NEW_LINE> return thread_pid <NEW_LINE> <DEDENT> def killer_report(re_str): <NEW_LINE> <INDENT> output = vm.process.get_output() <NEW_LINE> results = re.findall(re_str, output) <NEW_LINE> if results: <NEW_LINE> <INDENT> return results <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> vm = env.get_vm(params["main_vm"]) <NEW_LINE> vm.verify_alive() <NEW_LINE> re_str = "terminating on signal 15 from pid ([0-9]+)" <NEW_LINE> re_str = params.get("qemu_error_re", re_str) <NEW_LINE> error.context("Kill VM by signal 15", logging.info) <NEW_LINE> thread_pid = kill_vm_by_signal_15() <NEW_LINE> results = utils_misc.wait_for(lambda: killer_report(re_str), 60, 2, 2) <NEW_LINE> error.context("Check that QEMU can report who killed it", logging.info) <NEW_LINE> if not results: <NEW_LINE> <INDENT> raise error.TestFail("QEMU did not tell us who killed it") <NEW_LINE> <DEDENT> elif int(results[-1]) != thread_pid: <NEW_LINE> <INDENT> msg = "QEMU identified the process that killed it incorrectly. " <NEW_LINE> msg += "Killer PID: %s, " % thread_pid <NEW_LINE> msg += "QEMU reported PID: %s" % int(results[-1]) <NEW_LINE> raise error.TestFail(msg) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logging.info("QEMU identified the process that killed it properly")
Test that QEMU report the process ID that sent it kill signals. 1) Start a VM. 2) Kill VM by signal 15 in another process. 3) Check that QEMU report the process ID that sent it kill signals. :param test: QEMU test object :param params: Dictionary with the test parameters. :param env: Dictionary with test environment.
625941b2d8ef3951e32432c6
def singleRequestResponse(self): <NEW_LINE> <INDENT> if not self.__mapr.report_id: <NEW_LINE> <INDENT> (mapr, report_id) = self.actions <NEW_LINE> <DEDENT> while not done: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> map_instance = self.queue.get(timeout=1) <NEW_LINE> while map_instance.report_id is None: <NEW_LINE> <INDENT> rconn = self.redis_cache.getConnection() <NEW_LINE> rconn.load_api_object(obj=map_instance) <NEW_LINE> if map_instance.report_id is None: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except queue.Empty: <NEW_LINE> <INDENT> logging.debug('Queue timed out, assuming closed.') <NEW_LINE> done = True
Begin consuming map references and generating reports on them (also capable of resuming monitoring of running reports).
625941b2507cdc57c6306a5a
def get(self, key): <NEW_LINE> <INDENT> key = bytes_to_str(key) <NEW_LINE> LOGGER.debug("Getting CosmosDB document %s/%s/%s", self._database_name, self._collection_name, key) <NEW_LINE> try: <NEW_LINE> <INDENT> document = self._client.ReadDocument( self._get_document_link(key), self._get_partition_key(key)) <NEW_LINE> <DEDENT> except HTTPFailure as ex: <NEW_LINE> <INDENT> if ex.status_code != ERROR_NOT_FOUND: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return document.get("value")
Read the value stored at the given key. Args: key: The key for which to read the value.
625941b223849d37ff7b2e23
def ShowCoverage(self): <NEW_LINE> <INDENT> webbrowser.open(self.COVERAGE_INDEX)
Opens coverage summary HTML file in the default Internet browser.
625941b215baa723493c3d02
def is_email_valid_extended_finally(mailing_list): <NEW_LINE> <INDENT> final_users_list = [] <NEW_LINE> try: <NEW_LINE> <INDENT> for key, email in mailing_list.items(): <NEW_LINE> <INDENT> if '@' in email[1]: <NEW_LINE> <INDENT> final_users_list.append(key) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise EmailNotValidError('Email format not valid.') <NEW_LINE> <DEDENT> <DEDENT> except EmailNotValidError: <NEW_LINE> <INDENT> print('An email in the mailing list is not valid.') <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> return final_users_list
This function extends the previous one by adding a `finally` block to return the user ids for the users with valid email addresses. Example: '41c30786-aa84-4d60-9879-0c53f8fad970': ['cgoodleyh', '[email protected]', 'active'], '480fb04a-d7cd-47c5-8079-b580cb14b4d9': ['csheraton4', 'pgatherell6-1.com', 'active'] This should return: - '41c30786-aa84-4d60-9879-0c53f8fad970', which is the id of the user with a valid email - Print a user-friendly message for the casted exception: 'An email in the mailing list is not valid.' :param mailing_list: the current mailing list with the active users :return: N/A
625941b2d164cc6175782ad7
def create_object ( self, object ): <NEW_LINE> <INDENT> adapter = self.adapter <NEW_LINE> result = ListCanvasItem( canvas = self, hidden = self.hidden ).set( object = object, mutable_theme = adapter.get_mutable_theme( object ), debug = adapter.get_debug( object ), monitor = adapter.get_monitor( object ) ) <NEW_LINE> self.hidden = False <NEW_LINE> return result
Creates a specified HasTraits object as a new list canvas item.
625941b2462c4b4f79d1d459
def solve_enh(self): <NEW_LINE> <INDENT> self.flow.set_Re_dependents() <NEW_LINE> self.flow.h_conv = self.flow.Nu_D * self.flow.k / self.flow.D <NEW_LINE> self.set_eta() <NEW_LINE> self.set_h_and_P()
Runs all the other methods that need to run. Methods: self.set_enh_geometry self.set_eta self.set_h_and_P
625941b282261d6c526ab22d
@blueprint.route('/api/users/<endpoint_id>') <NEW_LINE> @secure <NEW_LINE> def users(endpoint_id): <NEW_LINE> <INDENT> with session_scope() as db_session: <NEW_LINE> <INDENT> users_hits = get_users(db_session, endpoint_id) <NEW_LINE> dicts = [] <NEW_LINE> for uh in users_hits: <NEW_LINE> <INDENT> dicts.append({'user': uh[0], 'hits': uh[1]}) <NEW_LINE> <DEDENT> return jsonify(dicts)
:param endpoint_id: integer :return: A JSON-list with all users of a specific endpoint (user represented by a string)
625941b2097d151d1a222bed
def add_face(self, frame, alignment): <NEW_LINE> <INDENT> logger.debug("Adding face to frame: '%s'", frame) <NEW_LINE> self.data[frame].append(alignment) <NEW_LINE> retval = self.count_faces_in_frame(frame) - 1 <NEW_LINE> logger.debug("Returning new face index: %s", retval) <NEW_LINE> return retval
Add a new face for a frame and return it's index
625941b224f1403a926008fc
def groupKey(params, pubKeys=[]): <NEW_LINE> <INDENT> (G, g, h, o) = params <NEW_LINE> pub = pubKeys[0] <NEW_LINE> for key in pubKeys[1:]: <NEW_LINE> <INDENT> pub = pub+key <NEW_LINE> <DEDENT> return pub
Generate a group public key from a list of public keys
625941b24527f215b584c1ee
def get_save_slug(extra_field=None): <NEW_LINE> <INDENT> if extra_field: <NEW_LINE> <INDENT> def save_slug(**kwargs): <NEW_LINE> <INDENT> extra_filters = {} <NEW_LINE> extra_filters[extra_field] = getattr(kwargs.get('instance'), extra_field) <NEW_LINE> kwargs['instance'].slug = create_or_check_slug(kwargs['instance'], kwargs['sender'], **extra_filters) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> def save_slug(**kwargs): <NEW_LINE> <INDENT> kwargs['instance'].slug = create_or_check_slug(kwargs['instance'], kwargs['sender']) <NEW_LINE> <DEDENT> <DEDENT> return save_slug
Returns a function to add or make an instance's slug unique :param extra_field: field needed in case of a unique_together.
625941b26aa9bd52df036b2b
def test_expected_exception_not_passed_to_server_span_finish(self): <NEW_LINE> <INDENT> class Handler(TestService.Iface): <NEW_LINE> <INDENT> def example(self, context): <NEW_LINE> <INDENT> raise TestService.ExpectedException() <NEW_LINE> <DEDENT> <DEDENT> handler = Handler() <NEW_LINE> server_span_observer = mock.Mock(spec=ServerSpanObserver) <NEW_LINE> with serve_thrift(handler, TestService, server_span_observer) as server: <NEW_LINE> <INDENT> with raw_thrift_client(server.endpoint, TestService) as client: <NEW_LINE> <INDENT> with self.assertRaises(TestService.ExpectedException): <NEW_LINE> <INDENT> client.example() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> server_span_observer.on_start.assert_called_once_with() <NEW_LINE> server_span_observer.on_finish.assert_called_once_with(None)
If the server returns an expected exception, don't count it as failure.
625941b24f88993c3716bdff
def fibonacci_generate_generator(): <NEW_LINE> <INDENT> index, val1, val2 = 0, 0, 1 <NEW_LINE> while True: <NEW_LINE> <INDENT> yield val1 <NEW_LINE> val1, val2 = val2, val1 + val2 <NEW_LINE> index = index + 1
Generate a generator for the fibonacci sequence.
625941b20383005118ecf36e
def process_sparse_grad2(grads): <NEW_LINE> <INDENT> client_grads = [] <NEW_LINE> indices = grads[0].indices <NEW_LINE> values = grads[0].values <NEW_LINE> first_layer_dense = np.zeros((80, 8)) <NEW_LINE> for i in range(indices.shape[0]): <NEW_LINE> <INDENT> first_layer_dense[indices[i], :] = values[i, :] <NEW_LINE> <DEDENT> client_grads.append(first_layer_dense) <NEW_LINE> for i in range(1, len(grads)): <NEW_LINE> <INDENT> client_grads.append(grads[i]) <NEW_LINE> <DEDENT> return client_grads
:param grads: grad returned by LSTM model (only for the shakespaere dataset) (with indices) :return: grads with the same shape as weights
625941b285dfad0860c3abe3
def key_search(inchikey, **kwargs): <NEW_LINE> <INDENT> if inchikey.startswith('InChIKey='): <NEW_LINE> <INDENT> prefix, inchikey = inchikey.split('=', 1) <NEW_LINE> <DEDENT> base_url = 'https://www.ebi.ac.uk/unichem/rest/key_search' <NEW_LINE> search_url = '{base_url}/{StandardInChIKey}'.format( base_url = base_url, StandardInChIKey = inchikey) <NEW_LINE> return connectivity_query(search_url, **kwargs)
Search by InChIKeys.
625941b28c0ade5d55d3e749
def extract_file(xml_file_path): <NEW_LINE> <INDENT> dump = mwxml.Dump.from_file(mwtypes.files.reader(xml_file_path)) <NEW_LINE> site_name = dump.site_info.dbname <NEW_LINE> for page in dump.pages: <NEW_LINE> <INDENT> if (page.namespace not in [0, "0"]) or page.redirect: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> for revision in page: <NEW_LINE> <INDENT> if revision.deleted.text or not revision.text: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> meta = extract_revision(revision) <NEW_LINE> meta["site_name"] = site_name <NEW_LINE> meta["page_title"] = page.title <NEW_LINE> yield meta
Iterate over all revisions of all pages not in a namespace from the dump. Yields dicts. If we are processing one of the 'current' dumps, there will be only one Revision per Page.
625941b26aa9bd52df036b2c
def build_graph(parameters): <NEW_LINE> <INDENT> input_tensor = tf.placeholder( dtype=parameters["input_dtype"], name="input", shape=parameters["input_shape"]) <NEW_LINE> if parameters["const_axis"]: <NEW_LINE> <INDENT> axis = parameters["axis"] <NEW_LINE> input_tensors = [input_tensor] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if isinstance(parameters["axis"], list): <NEW_LINE> <INDENT> shape = [len(parameters["axis"])] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> shape = [0] <NEW_LINE> <DEDENT> axis = tf.placeholder(dtype=tf.int32, name="axis", shape=shape) <NEW_LINE> input_tensors = [input_tensor, axis] <NEW_LINE> <DEDENT> out = tf.reduce_mean( input_tensor, axis=axis, keepdims=parameters["keepdims"]) <NEW_LINE> return input_tensors, [out]
Build the mean op testing graph.
625941b2b5575c28eb68dd87
def list(self, request): <NEW_LINE> <INDENT> a_viewset = [ 'enter your Key', ] <NEW_LINE> return Response({'message':'Hello', 'a_viewset': a_viewset})
Return a True or False if nfc_tag is authenticated for that door.
625941b28c3a873295158148
def get_permutations(sequence): <NEW_LINE> <INDENT> seq_list = list(sequence) <NEW_LINE> if len(sequence) == 1: <NEW_LINE> <INDENT> return [sequence] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> per_list = [] <NEW_LINE> for i in range(len(seq_list)): <NEW_LINE> <INDENT> let = seq_list[i] <NEW_LINE> sub_seq = seq_list[:i] + seq_list[i+1:] <NEW_LINE> for p in get_permutations(sub_seq): <NEW_LINE> <INDENT> per_list.append([let] + p) <NEW_LINE> <DEDENT> <DEDENT> return per_list
Enumerate all permutations of a given string a sequence (string): an arbitrary string to permute. Assume that it is a non-empty string. You MUST use recursion for this part. Non-recursive solutions will not be accepted. Returns: a list of all permutations of sequence Example: >>> get_permutations('abc') ['abc', 'acb', 'bac', 'bca', 'cab', 'cba'] Note: depending on your implementation, you may return the permutations in a different order than what is listed here.
625941b25166f23b2e1a4eea
@g.command('session-snapshot-save') <NEW_LINE> def session_snapshot_save_command(event): <NEW_LINE> <INDENT> c = event.get('c') <NEW_LINE> m = g.app.sessionManager <NEW_LINE> if c and m: <NEW_LINE> <INDENT> m.save_snapshot(c=c)
Save a snapshot of the present session to the leo.session file.
625941b2460517430c393f1d
def GetTrajectoryStatus(manipulator): <NEW_LINE> <INDENT> raise NotImplementedError('GetTrajectoryStatus not supported on manipulator.' ' Use returned TrajectoryFuture instead.')
Gets the status of the current (or previous) trajectory executed by OWD. @return status of the current (or previous) trajectory executed
625941b2796e427e537b034c
def __len__(self): <NEW_LINE> <INDENT> return len(self.images)
obtain the length of the data-items :return: len => length
625941b2f8510a7c17cf9491
def checkForGameOver(): <NEW_LINE> <INDENT> result = pyautogui.locateAllOnScreen(imPath('you_win.png'), region=(GAME_REGION[0] + 188, GAME_REGION[1] + 94, 262, 60)) <NEW_LINE> if result is not None: <NEW_LINE> <INDENT> pyautogui.click(pyautogui.center(result)) <NEW_LINE> return LEVEL_WIN_MESSAGE <NEW_LINE> <DEDENT> result = pyautogui.locateOnScreen(imPath('you_failed.png'), region=(GAME_REGION[0] + 167, GAME_REGION[1] + 133, 314, 39)) <NEW_LINE> if result is not None: <NEW_LINE> <INDENT> logging.info('Gmae over. Quitting') <NEW_LINE> sys.exit()
Checks the screen for the "You Win" or "You Fail" message. On winning, returns the string in LEVEL_WIN_MESSAGE. On losing, the program terminates.
625941b273bcbd0ca4b2be07
def decode_to_text(stream, encoding=locale.getpreferredencoding(), errors='strict'): <NEW_LINE> <INDENT> if hasattr(stream, 'decode'): <NEW_LINE> <INDENT> return stream.decode(encoding, errors) <NEW_LINE> <DEDENT> if isinstance(stream, string_types): <NEW_LINE> <INDENT> return stream <NEW_LINE> <DEDENT> raise TypeError("Unable to decode stream into a string-like type")
Decode decoding string :param stream: string stream :param encoding: encode_type :param errors: error handling to use while decoding (strict,replace, ignore,...) :return: encoding text
625941b2adb09d7d5db6c522