text
stringlengths
78
104k
score
float64
0
0.18
def get_stats_action(cachespec, interval): """Action for getting bcache statistics for a given cachespec. Cachespec can either be a device name, eg. 'sdb', which will retrieve cache stats for the given device, or 'global', which will retrieve stats for all cachesets """ if cachespec == 'global': caches = get_bcache_fs() else: caches = [Bcache.fromdevice(cachespec)] res = dict((c.cachepath, c.get_stats(interval)) for c in caches) return json.dumps(res, indent=4, separators=(',', ': '))
0.001848
def register_patch(self, name, path): """ Registers given patch. :param name: Patch name. :type name: unicode :param path: Patch path. :type path: unicode :return: Method success. :rtype: bool """ patch = foundations.strings.get_splitext_basename(path) LOGGER.debug("> Current patch: '{0}'.".format(patch)) directory = os.path.dirname(path) not directory in sys.path and sys.path.append(directory) module = __import__(patch) if hasattr(module, "apply") and hasattr(module, "UID"): self.__patches[name] = Patch(name=name, path=path, module=module, apply=getattr(module, "apply"), uid=getattr(module, "UID")) else: raise umbra.exceptions.PatchInterfaceError( "{0} | '{1}' is not a valid patch and has been rejected!".format(self.__class__.__name__, patch)) return True
0.003604
def total_size(self): """ Determine the size (in bytes) of this node. If an array, returns size of the entire array """ if self.inst.is_array: # Total size of arrays is technically supposed to be: # self.inst.array_stride * (self.inst.n_elements-1) + self.size # However this opens up a whole slew of ugly corner cases that the # spec designers may not have anticipated. # Using a simplified calculation for now until someone actually cares return self.inst.array_stride * self.inst.n_elements else: return self.size
0.004608
def do_title(s): """Return a titlecased version of the value. I.e. words will start with uppercase letters, all remaining characters are lowercase. """ return ''.join( [item[0].upper() + item[1:].lower() for item in _word_beginning_split_re.split(soft_unicode(s)) if item])
0.003175
def from_node(cls, work): """Initialize an instance from a :class:`Work` instance.""" new = super().from_node(work) # Will put all files found in outdir in GridFs # Warning: assuming binary files. d = {os.path.basename(f): f for f in work.outdir.list_filepaths()} new.register_gridfs_files(**d) return new
0.00551
def GetParserAndPluginNames(cls, parser_filter_expression=None): """Retrieves the parser and parser plugin names. Args: parser_filter_expression (Optional[str]): parser filter expression, where None represents all parsers and plugins. Returns: list[str]: parser and parser plugin names. """ parser_and_plugin_names = [] for parser_name, parser_class in cls.GetParsers( parser_filter_expression=parser_filter_expression): parser_and_plugin_names.append(parser_name) if parser_class.SupportsPlugins(): for plugin_name, _ in parser_class.GetPlugins(): parser_and_plugin_names.append( '{0:s}/{1:s}'.format(parser_name, plugin_name)) return parser_and_plugin_names
0.006536
def dropEvent( self, event ): """ Handles the drop event. :param event | <QDropEvent> """ tags = nativestring(event.mimeData().text()) # handle an internal move if event.source() == self: curr_item = self.selectedItems()[0] create_item = self.createItem() # don't allow moving of the creation item if curr_item == create_item: return targ_item = self.itemAt(event.pos()) if not targ_item: targ_item = create_item curr_idx = self.row(curr_item) targ_idx = self.row(targ_item) if ( targ_idx == self.count() - 1 ): targ_idx -= 1 # don't bother moving the same item if ( curr_idx == targ_idx ): return self.takeItem(self.row(curr_item)) self.insertItem(targ_idx, curr_item) self.setCurrentItem(curr_item) elif ( tags ): for tag in tags.split(','): tag = tag.strip() if ( self.isTagValid(tag) ): self.addTag(tag) else: event.accept()
0.017319
def replace(self, scaling_group, name, cooldown, min_entities, max_entities, metadata=None): """ Replace an existing ScalingGroup configuration. All of the attributes must be specified. If you wish to delete any of the optional attributes, pass them in as None. """ return self._manager.replace(scaling_group, name, cooldown, min_entities, max_entities, metadata=metadata)
0.006696
def to_tensor(X, device, accept_sparse=False): """Turn input data to torch tensor. Parameters ---------- X : input data Handles the cases: * PackedSequence * numpy array * torch Tensor * scipy sparse CSR matrix * list or tuple of one of the former * dict with values of one of the former device : str, torch.device The compute device to be used. If set to 'cuda', data in torch tensors will be pushed to cuda tensors before being sent to the module. accept_sparse : bool (default=False) Whether to accept scipy sparse matrices as input. If False, passing a sparse matrix raises an error. If True, it is converted to a torch COO tensor. Returns ------- output : torch Tensor """ to_tensor_ = partial(to_tensor, device=device) if is_torch_data_type(X): return X.to(device) if isinstance(X, dict): return {key: to_tensor_(val) for key, val in X.items()} if isinstance(X, (list, tuple)): return [to_tensor_(x) for x in X] if np.isscalar(X): return torch.as_tensor(X, device=device) if isinstance(X, Sequence): return torch.as_tensor(np.array(X), device=device) if isinstance(X, np.ndarray): return torch.as_tensor(X, device=device) if sparse.issparse(X): if accept_sparse: return torch.sparse_coo_tensor( X.nonzero(), X.data, size=X.shape).to(device) raise TypeError("Sparse matrices are not supported. Set " "accept_sparse=True to allow sparse matrices.") raise TypeError("Cannot convert this data type to a torch tensor.")
0.000585
def run_action(self, feature, action, run_if_error=False, raise_exception=True): """ Run an action, and log it's output in case of errors """ if len(self._error_dict[feature]) > 0 and not run_if_error: return error = None instance = self.features[feature] try: getattr(instance, action)() # catch a generic exception within a feature except Exception as e: e = sys.exc_info()[1] self.logger.info("An exception occurred with action %s in feature %s!" % (action, feature)) self.logger.debug("Exception", exc_info=sys.exc_info()) error = str(e) self.log_feature_error(feature, str(e)) # any error in a feature should fail immediately - unless it occurred # from the remove() method in which case continue the rest of the # feature removal from there if error is not None and raise_exception: exception_msg = "%s action failed for feature %s: %s" % (action, feature, error) if self.phase == PHASE.REMOVE: raise FormulaException(exception_msg) else: raise SprinterException(exception_msg) return error
0.004573
def parse_cluster(self, global_params, region, cluster): """ Parse a single EMR cluster :param global_params: Parameters shared for all regions :param region: Name of the AWS region :param cluster: EMR cluster """ cluster_id = cluster['Id'] cluster = api_clients[region].describe_cluster(ClusterId = cluster_id)['Cluster'] cluster['id'] = cluster.pop('Id') cluster['name'] = cluster.pop('Name') vpc_id = 'TODO' # The EMR API won't disclose the VPC ID, so wait until all configs have been fetch and look up the VPC based on the subnet ID manage_dictionary(self.vpcs, vpc_id, VPCConfig(self.vpc_resource_types)) self.vpcs[vpc_id].clusters[cluster_id] = cluster
0.009926
def get_resource_allocation(self): """Get the :py:class:`ResourceAllocation` element tance. Returns: ResourceAllocation: Resource allocation used to access information about the resource where this PE is running. .. versionadded:: 1.9 """ if hasattr(self, 'resourceAllocation'): return ResourceAllocation(self.rest_client.make_request(self.resourceAllocation), self.rest_client)
0.009009
def make_sine_surface(dims=DEFAULT_DIMS, offset=0.5, scale=1.0): """Makes a surface from the 3D sine function. Args: dims (pair): the dimensions of the surface to create offset (float): an offset applied to the function scale (float): a scale applied to the sine frequency Returns: surface: A surface. """ gradients = (np.array(make_gradients(dims)) - offset) * scale * np.pi return np.sin(np.linalg.norm(gradients, axis=0))
0.002075
def dumps(obj, decimals=16): """ Dump a GeoJSON-like `dict` to a WKT string. """ try: geom_type = obj['type'] exporter = _dumps_registry.get(geom_type) if exporter is None: _unsupported_geom_type(geom_type) # Check for empty cases if geom_type == 'GeometryCollection': if len(obj['geometries']) == 0: return 'GEOMETRYCOLLECTION EMPTY' else: # Geom has no coordinate values at all, and must be empty. if len(list(util.flatten_multi_dim(obj['coordinates']))) == 0: return '%s EMPTY' % geom_type.upper() except KeyError: raise geomet.InvalidGeoJSONException('Invalid GeoJSON: %s' % obj) result = exporter(obj, decimals) # Try to get the SRID from `meta.srid` meta_srid = obj.get('meta', {}).get('srid') # Also try to get it from `crs.properties.name`: crs_srid = obj.get('crs', {}).get('properties', {}).get('name') if crs_srid is not None: # Shave off the EPSG prefix to give us the SRID: crs_srid = crs_srid.replace('EPSG', '') if (meta_srid is not None and crs_srid is not None and str(meta_srid) != str(crs_srid)): raise ValueError( 'Ambiguous CRS/SRID values: %s and %s' % (meta_srid, crs_srid) ) srid = meta_srid or crs_srid # TODO: add tests for CRS input if srid is not None: # Prepend the SRID result = 'SRID=%s;%s' % (srid, result) return result
0.000649
def new_run(self): """Creates a new RunData object and increments pointers""" self.current_run += 1 self.runs.append(RunData(self.current_run + 1))
0.011696
def g(self, id): """ If the given id is known, the numerical representation is returned, otherwise a new running number is assigned to the id and returned""" if id not in self._m: if self.orig_ids: self._m[id] = id if self.warn: try: int(id) except: sys.stderr.write( 'Warning: ID "%s" is not an integer.\n' % id) self.warn = False else: self._m[id] = self.index self.index += 1 return self._m[id]
0.004511
def convert(self, destination_units): """Convert units. Parameters ---------- destination_units : string (optional) Units to convert into. """ if not wt_units.is_valid_conversion(self.units, destination_units): kind = wt_units.kind(self.units) valid = list(wt_units.dicts[kind].keys()) raise wt_exceptions.UnitsError(valid, destination_units) if self.units is None: return def f(dataset, s, destination_units): dataset[s] = wt_units.converter(dataset[s], dataset.units, destination_units) self.chunkwise(f, destination_units=destination_units) self.units = destination_units
0.004115
def get_values(self, obj): """get label and shape for classes. The label contains all attributes and methods """ label = obj.title if obj.shape == "interface": label = "«interface»\\n%s" % label if not self.config.only_classnames: label = r"%s|%s\l|" % (label, r"\l".join(obj.attrs)) for func in obj.methods: args = [arg.name for arg in func.args.args if arg.name != "self"] label = r"%s%s(%s)\l" % (label, func.name, ", ".join(args)) label = "{%s}" % label if is_exception(obj.node): return dict(fontcolor="red", label=label, shape="record") return dict(label=label, shape="record")
0.004054
def _getExperimentDescriptionSchema(): """ Returns the experiment description schema. This implementation loads it in from file experimentDescriptionSchema.json. Parameters: -------------------------------------------------------------------------- Returns: returns a dict representing the experiment description schema. """ installPath = os.path.dirname(os.path.abspath(__file__)) schemaFilePath = os.path.join(installPath, "experimentDescriptionSchema.json") return json.loads(open(schemaFilePath, 'r').read())
0.011194
def produce_characteristic_explorer(corpus, category, category_name=None, not_category_name=None, not_categories=None, characteristic_scorer=DenseRankCharacteristicness(), term_ranker=termranking.AbsoluteFrequencyRanker, term_scorer=RankDifference(), **kwargs): ''' Parameters ---------- corpus : Corpus It is highly recommended to use a stoplisted, unigram corpus-- `corpus.get_stoplisted_unigram_corpus()` category : str category_name : str not_category_name : str not_categories : list characteristic_scorer : CharacteristicScorer term_ranker term_scorer term_acceptance_re : SRE_Pattern Regular expression to identify valid terms kwargs : dict remaining produce_scattertext_explorer keywords Returns ------- str HTML of visualization ''' if not_categories is None: not_categories = [c for c in corpus.get_categories() if c != category] category_name, not_category_name = get_category_names( category, category_name, not_categories, not_category_name) zero_point, characteristic_scores = characteristic_scorer.get_scores(corpus) corpus = corpus.remove_terms(set(corpus.get_terms()) - set(characteristic_scores.index)) characteristic_scores = characteristic_scores.loc[corpus.get_terms()] term_freq_df = term_ranker(corpus).get_ranks() scores = term_scorer.get_scores( term_freq_df[category + ' freq'], term_freq_df[[c + ' freq' for c in not_categories]].sum(axis=1) ) kwargs['scores'] = kwargs.get('scores', scores) max_score = np.floor(np.max(kwargs['scores']) * 100) / 100 min_score = np.ceil(np.min(kwargs['scores']) * 100) / 100 if min_score < 0 and max_score > 0: central = 0 else: central = 0.5 scores_scaled_for_charting = scale_neg_1_to_1_with_zero_mean_abs_max(kwargs['scores']) html = produce_scattertext_explorer( corpus=corpus, category=category, category_name=category_name, not_category_name=not_category_name, not_categories=not_categories, minimum_term_frequency=0, sort_by_dist=False, x_coords=characteristic_scores, y_coords=scores_scaled_for_charting, y_axis_labels=kwargs.get('y_axis_labels', ['More ' + not_category_name, 'Even', 'More ' + category_name]), x_label=kwargs.get('x_label', 'Characteristic to Corpus'), y_label=kwargs.get('y_label', term_scorer.get_name()), vertical_lines=kwargs.get('vertical_lines', []), characteristic_scorer=kwargs.get('characteristic_scorer', characteristic_scorer), **kwargs ) return html
0.002607
def bounds(self): """Gets the bounds of a tile represented as the most west and south point and the most east and north point""" google_x, google_y = self.google pixel_x_west, pixel_y_north = google_x * TILE_SIZE, google_y * TILE_SIZE pixel_x_east, pixel_y_south = (google_x + 1) * TILE_SIZE, (google_y + 1) * TILE_SIZE point_min = Point.from_pixel(pixel_x=pixel_x_west, pixel_y=pixel_y_south, zoom=self.zoom) point_max = Point.from_pixel(pixel_x=pixel_x_east, pixel_y=pixel_y_north, zoom=self.zoom) return point_min, point_max
0.011986
def valid_file(cls, filename): """ Check if the provided file is a valid file for this plugin. :arg filename: the path to the file to check. """ return not os.path.isdir(filename) \ and os.path.basename(filename).startswith('Session ') \ and filename.endswith('.mqo')
0.006154
def norm(x, mu, sigma=1.0): """ Scipy norm function """ return stats.norm(loc=mu, scale=sigma).pdf(x)
0.009174
def _compile(self, p): """ Recursively compiles the regexs in the pattern (p). """ if self._is_value_filter(p) and p[0] == '=~': try: p[2] = re.compile(p[2]) except: # Python doesn't document exactly what exceptions re.compile throws raise ValueError('Bad regex - {0}'.format(p[2])) elif self._is_operator(p): for operator_or_filter in (p[1] if p[0] != '!' else [p[1]]): self._compile(operator_or_filter)
0.007561
def spawn_isolated_child(self): """ Fork or launch a new child off the target context. :returns: mitogen.core.Context of the new child. """ return self.get_chain(use_fork=True).call( ansible_mitogen.target.spawn_isolated_child )
0.006645
def _chown(self, path, uid, gid): """Change the *owner* of a resource. """ if uid is None or gid is None: info = self.getinfo(path, namespaces=('access',)) uid = uid or info.get('access', 'uid') gid = gid or info.get('access', 'gid') self._sftp.chown(path, uid, gid)
0.005988
def stream_download(self, chunk_size: Optional[int] = None, callback: Optional[Callable] = None) -> AsyncIterator[bytes]: """Generator for streaming request body data. """ chunk_size = chunk_size or CONTENT_CHUNK_SIZE async def async_gen(resp): while True: chunk = await resp.content.read(chunk_size) if not chunk: break callback(chunk, resp) return async_gen(self.internal_response)
0.007952
def current_custom_claims(): """ This method returns any custom claims in the current jwt """ jwt_data = get_jwt_data_from_app_context() return {k: v for (k, v) in jwt_data.items() if k not in RESERVED_CLAIMS}
0.004367
def make_qs(n, m=None): """Make sympy symbols q0, q1, ... Args: n(int), m(int, optional): If specified both n and m, returns [qn, q(n+1), ..., qm], Only n is specified, returns[q0, q1, ..., qn]. Return: tuple(Symbol): Tuple of sympy symbols. """ try: import sympy except ImportError: raise ImportError("This function requires sympy. Please install it.") if m is None: syms = sympy.symbols(" ".join(f"q{i}" for i in range(n))) if isinstance(syms, tuple): return syms else: return (syms,) syms = sympy.symbols(" ".join(f"q{i}" for i in range(n, m))) if isinstance(syms, tuple): return syms else: return (syms,)
0.002584
def zeroize(): ''' Resets the device to default factory settings CLI Example: .. code-block:: bash salt 'device_name' junos.zeroize ''' conn = __proxy__['junos.conn']() ret = {} ret['out'] = True try: conn.cli('request system zeroize') ret['message'] = 'Completed zeroize and rebooted' except Exception as exception: ret['message'] = 'Could not zeroize due to : "{0}"'.format(exception) ret['out'] = False return ret
0.001984
def opener(ip_address, port, delay=1): """ Wait a little and then open a web browser page for the control panel. """ global WEBPAGE_OPENED if WEBPAGE_OPENED: return WEBPAGE_OPENED = True raw_opener(ip_address, port, delay)
0.003876
def no_param_shortcut(parser, token): """ Shortcut to transmogrify thumbnail """ bits = smart_split(token.contents) tagname = bits.next() try: imageurl = bits.next() except StopIteration: raise template.TemplateSyntaxError("%r tag requires at least the image url" % tagname) return MogrifyNode(imageurl, [(tagname, ), ])
0.00542
def wait(fs, timeout=-1, return_when=ALL_COMPLETED): """Wait for the futures in the given sequence to complete. Using this function may prevent a worker from executing. :param fs: The sequence of Futures to wait upon. :param timeout: The maximum number of seconds to wait. If negative or not specified, then there is no limit on the wait time. :param return_when: Indicates when this function should return. The options are: =============== ================================================ FIRST_COMPLETED Return when any future finishes or is cancelled. FIRST_EXCEPTION Return when any future finishes by raising an exception. If no future raises an exception then it is equivalent to ALL_COMPLETED. ALL_COMPLETED Return when all futures finish or are cancelled. =============== ================================================ :return: A named 2-tuple of sets. The first set, named 'done', contains the futures that completed (is finished or cancelled) before the wait completed. The second set, named 'not_done', contains uncompleted futures.""" DoneAndNotDoneFutures = namedtuple('DoneAndNotDoneFutures', 'done not_done') if timeout < 0: # Negative timeout means blocking. if return_when == FIRST_COMPLETED: next(_waitAny(*fs)) elif return_when in [ALL_COMPLETED, FIRST_EXCEPTION]: for _ in _waitAll(*fs): pass done = set(f for f in fs if f.done()) not_done = set(fs) - done return DoneAndNotDoneFutures(done, not_done) elif timeout == 0: # Zero-value entry means non-blocking control.execQueue.flush() control.execQueue.updateQueue() done = set(f for f in fs if f._ended()) not_done = set(fs) - done return DoneAndNotDoneFutures(done, not_done) else: # Any other value means blocking for a given time. done = set() start_time = time.time() while time.time() - start_time < timeout: # Flush futures on local queue (to be executed remotely) control.execQueue.flush() # Block until data arrives (to free CPU time) control.execQueue.socket._poll(time.time() - start_time) # Update queue control.execQueue.updateQueue() for f in fs: if f._ended(): done.add(f) not_done = set(fs) - done if return_when == FIRST_COMPLETED and len(done) > 0: break if len(not_done) == 0: break return DoneAndNotDoneFutures(done, not_done)
0.000721
def distributions_route(self, request): """Given a tag and single run, return an array of compressed histograms.""" tag = request.args.get('tag') run = request.args.get('run') try: (body, mime_type) = self.distributions_impl(tag, run) code = 200 except ValueError as e: (body, mime_type) = (str(e), 'text/plain') code = 400 return http_util.Respond(request, body, mime_type, code=code)
0.011547
def setColor(self, poiID, color): """setColor(string, (integer, integer, integer, integer)) -> None Sets the rgba color of the poi. """ self._connection._beginMessage( tc.CMD_SET_POI_VARIABLE, tc.VAR_COLOR, poiID, 1 + 1 + 1 + 1 + 1) self._connection._string += struct.pack("!BBBBB", tc.TYPE_COLOR, int( color[0]), int(color[1]), int(color[2]), int(color[3])) self._connection._sendExact()
0.004338
def on_decks(self, *args): """Inform the cards of their deck and their index within the deck; extend the ``_hint_offsets`` properties as needed; and trigger a layout. """ if None in ( self.canvas, self.decks, self.deck_x_hint_offsets, self.deck_y_hint_offsets ): Clock.schedule_once(self.on_decks, 0) return self.clear_widgets() decknum = 0 for deck in self.decks: cardnum = 0 for card in deck: if not isinstance(card, Card): raise TypeError("You must only put Card in decks") if card not in self.children: self.add_widget(card) if card.deck != decknum: card.deck = decknum if card.idx != cardnum: card.idx = cardnum cardnum += 1 decknum += 1 if len(self.deck_x_hint_offsets) < len(self.decks): self.deck_x_hint_offsets = list(self.deck_x_hint_offsets) + [0] * ( len(self.decks) - len(self.deck_x_hint_offsets) ) if len(self.deck_y_hint_offsets) < len(self.decks): self.deck_y_hint_offsets = list(self.deck_y_hint_offsets) + [0] * ( len(self.decks) - len(self.deck_y_hint_offsets) ) self._trigger_layout()
0.001364
def recurring(self, offset=0, count=25): '''Return all the recurring jobs''' return self.client('jobs', 'recurring', self.name, offset, count)
0.012658
def read(url, **args): """Loads an object from a data URI.""" info, data = url.path.split(',') info = data_re.search(info).groupdict() mediatype = info.setdefault('mediatype', 'text/plain;charset=US-ASCII') if ';' in mediatype: mimetype, params = mediatype.split(';', 1) params = [p.split('=') for p in params.split(';')] params = dict((k.strip(), v.strip()) for k, v in params) else: mimetype, params = mediatype, dict() data = base64.b64decode(data) if info['base64'] else urllib.unquote(data) return content_types.get(mimetype).parse(data, **params)
0.001623
def selection(self): """ Selection property. :return: None if no font is selected and font family name if one is selected. :rtype: None or str """ selection = self.listbox.curselection() if len(selection) is 0: return None return self.font_indexes[self.listbox.curselection()[0]]
0.010753
def readedf(filename): """Read an ESRF data file (measured at beamlines ID01 or ID02) Inputs ------ filename: string the input file name Output ------ the imported EDF structure in a dict. The scattering pattern is under key 'data'. Notes ----- Only datatype ``FloatValue`` is supported right now. """ edf = header.readehf(filename) f = open(filename, 'rb') f.read(edf['EDF_HeaderSize']) # skip header. if edf['DataType'] == 'FloatValue': dtype = np.float32 else: raise NotImplementedError( 'Not supported data type: %s' % edf['DataType']) edf['data'] = np.fromstring(f.read(edf['EDF_BinarySize']), dtype).reshape( edf['Dim_1'], edf['Dim_2']) return edf
0.001287
def get_users_by_tag(self, tag_id, next_open_id=""): """ 获取标签下粉丝列表 :param tag_id: 标签 ID :param next_open_id: 第一个拉取用户的 OPENID,默认从头开始拉取 :return: 返回的 JSON 数据包 """ return self.post( url="https://api.weixin.qq.com/cgi-bin/user/tag/get", data={ "tagid": tag_id, "next_openid": next_open_id } )
0.004762
def focusInEvent(self, event): """ When this widget loses focus, try to emit the record changed event signal. """ self._changedRecord = -1 super(XOrbRecordBox, self).focusInEvent(event)
0.008368
def _count_citations(aux_file): ''' Counts the citations in an aux-file. @return: defaultdict(int) - {citation_name: number, ...} ''' counter = defaultdict(int) with open(aux_file) as fobj: content = fobj.read() for match in CITE_PATTERN.finditer(content): name = match.groups()[0] counter[name] += 1 return counter
0.002674
def insert_graph(cur, nodelist, edgelist, encoded_data=None): """Insert a graph into the cache. A graph is stored by number of nodes, number of edges and a json-encoded list of edges. Args: cur (:class:`sqlite3.Cursor`): An sqlite3 cursor. This function is meant to be run within a :obj:`with` statement. nodelist (list): The nodes in the graph. edgelist (list): The edges in the graph. encoded_data (dict, optional): If a dictionary is provided, it will be populated with the serialized data. This is useful for preventing encoding the same information many times. Notes: This function assumes that the nodes are index-labeled and range from 0 to num_nodes - 1. In order to minimize the total size of the cache, it is a good idea to sort the nodelist and edgelist before inserting. Examples: >>> nodelist = [0, 1, 2] >>> edgelist = [(0, 1), (1, 2)] >>> with pmc.cache_connect(':memory:') as cur: ... pmc.insert_graph(cur, nodelist, edgelist) >>> nodelist = [0, 1, 2] >>> edgelist = [(0, 1), (1, 2)] >>> encoded_data = {} >>> with pmc.cache_connect(':memory:') as cur: ... pmc.insert_graph(cur, nodelist, edgelist, encoded_data) >>> encoded_data['num_nodes'] 3 >>> encoded_data['num_edges'] 2 >>> encoded_data['edges'] '[[0,1],[1,2]]' """ if encoded_data is None: encoded_data = {} if 'num_nodes' not in encoded_data: encoded_data['num_nodes'] = len(nodelist) if 'num_edges' not in encoded_data: encoded_data['num_edges'] = len(edgelist) if 'edges' not in encoded_data: encoded_data['edges'] = json.dumps(edgelist, separators=(',', ':')) insert = \ """ INSERT OR IGNORE INTO graph(num_nodes, num_edges, edges) VALUES (:num_nodes, :num_edges, :edges); """ cur.execute(insert, encoded_data)
0.00049
def _run_server(self): """ 启动 HTTP Server """ try: if __conf__.DEBUG: self._webapp.listen(self._port) else: server = HTTPServer(self._webapp) server.bind(self._port) server.start(0) IOLoop.current().start() except KeyboardInterrupt: print ("exit ...")
0.007109
def _update_targets(vesseldicts, environment_dict): """ <Purpose> Connects to the nodes in the vesseldicts and adds them to the list of valid targets. <Arguments> vesseldicts: A list of vesseldicts obtained through SeattleClearinghouseClient calls. <Side Effects> All valid targets that the user can access on the specified nodes are added to the list of targets. <Exceptions> None <Returns> None """ # Compile a list of the nodes that we need to check nodelist = [] for vesseldict in vesseldicts: nodeip_port = vesseldict['node_ip']+':'+str(vesseldict['node_port']) if not nodeip_port in nodelist: nodelist.append(nodeip_port) # we'll output a message about the new keys later... newidlist = [] faillist = [] # Clear the list so that the user doesn't target vessels acquired from # previous requests when targeting this group seash_global_variables.targets['acquired'] = [] print nodelist # currently, if I browse more than once, I look up everything again... retdict = seash_helper.contact_targets( nodelist, seash_helper.browse_target, environment_dict['currentkeyname'], 'acquired') # parse the output so we can print out something intelligible for nodename in retdict: if retdict[nodename][0]: newidlist = newidlist + retdict[nodename][1] else: faillist.append(nodename) seash_helper.print_vessel_errors(retdict) if len(newidlist) == 0: print "Could not add any new targets." else: print "Added targets: "+", ".join(newidlist) if len(seash_global_variables.targets['acquired']) > 0: num_targets = str(len(seash_global_variables.targets['acquired'])) print "Added group 'acquired' with "+num_targets+" targets"
0.013445
def mavlink_packet(self, m): '''handle mavlink packets''' if m.get_type() == 'GLOBAL_POSITION_INT': if self.settings.target_system == 0 or self.settings.target_system == m.get_srcSystem(): self.packets_mytarget += 1 else: self.packets_othertarget += 1
0.009288
def get_ht_mcs(mcs): """http://git.kernel.org/cgit/linux/kernel/git/jberg/iw.git/tree/util.c?id=v3.17#n591. Positional arguments: mcs -- bytearray. Returns: Dict. """ answers = dict() max_rx_supp_data_rate = (mcs[10] & ((mcs[11] & 0x3) << 8)) tx_mcs_set_defined = not not (mcs[12] & (1 << 0)) tx_mcs_set_equal = not (mcs[12] & (1 << 1)) tx_max_num_spatial_streams = ((mcs[12] >> 2) & 3) + 1 tx_unequal_modulation = not not (mcs[12] & (1 << 4)) if max_rx_supp_data_rate: answers['HT Max RX data rate (Mbps)'] = max_rx_supp_data_rate if tx_mcs_set_defined and tx_mcs_set_equal: answers['HT TX/RX MCS rate indexes supported'] = get_mcs_index(mcs) elif tx_mcs_set_defined: answers['HT RX MCS rate indexes supported'] = get_mcs_index(mcs) answers['TX unequal modulation supported'] = bool(tx_unequal_modulation) answers['HT TX Max spatial streams'] = tx_max_num_spatial_streams else: answers['HT RX MCS rate indexes supported'] = get_mcs_index(mcs) return answers
0.001854
def _mine_delete(self, load): ''' Allow the minion to delete a specific function from its own mine :param dict load: A payload received from a minion :rtype: bool :return: Boolean indicating whether or not the given function was deleted from the mine ''' load = self.__verify_load(load, ('id', 'fun', 'tok')) if load is False: return {} else: return self.masterapi._mine_delete(load)
0.006237
def forum_post_update(self, topic_id, body): """Update a specific forum post (Requries login)(Moderator+)(UNTESTED). Parameters: post_id (int): Forum topic id. body (str): Post content. """ params = {'forum_post[body]': body} return self._get('forum_posts/{0}.json'.format(topic_id), params, method='PUT', auth=True)
0.004926
def _call(ins): """ Calls a function XXXX (or address XXXX) 2nd parameter contains size of the returning result if any, and will be pushed onto the stack. """ output = [] output.append('call %s' % str(ins.quad[1])) try: val = int(ins.quad[2]) if val == 1: output.append('push af') # Byte else: if val > 4: output.extend(_fpush()) else: if val > 2: output.append('push de') if val > 1: output.append('push hl') except ValueError: pass return output
0.001555
def getMeta(self, uri): """Return meta information about an action. Cache the result as specified by the server""" action = urlparse(uri).path mediaKey = self.cacheKey + '_meta_' + action mediaKey = mediaKey.replace(' ', '__') meta = cache.get(mediaKey, None) # Nothing found -> Retrieve it from the server and cache it if not meta: r = self.doQuery('meta/' + uri) if r.status_code == 200: # Get the content if there is not problem. If there is, template will stay to None meta = r.json() if 'expire' not in r.headers: expire = 5 * 60 # 5 minutes of cache if the server didn't specified anything else: expire = int((parser.parse(r.headers['expire']) - datetime.datetime.now(tzutc())).total_seconds()) # Use the server value for cache if expire > 0: # Do the server want us to cache ? cache.set(mediaKey, meta, expire) return meta
0.005825
def fromEpoch(cls, epoch_time): ''' a method for constructing a labDT object from epoch timestamp :param epoch_time: number with epoch timestamp info :return: labDT object ''' # validate input title = 'Epoch time input for labDT.fromEpoch' if not isinstance(epoch_time, float) and not isinstance(epoch_time, int): raise TypeError('\n%s must be an integer or float.' % title) # construct labDT from epoch time dT = datetime.utcfromtimestamp(epoch_time).replace(tzinfo=pytz.utc) dt_kwargs = { 'year': dT.year, 'month': dT.month, 'day': dT.day, 'hour': dT.hour, 'minute': dT.minute, 'second': dT.second, 'microsecond': dT.microsecond, 'tzinfo': dT.tzinfo } return labDT(**dt_kwargs)
0.003319
def _variant_levels(level, variant): """ Gets the level for the variant. :param int level: the current variant level :param int variant: the value for this level if variant :returns: a level for the object and one for the function :rtype: int * int """ return (level + variant, level + variant) \ if variant != 0 else (variant, level)
0.007299
def removeLogbook(self, menu=None): '''Remove logbook menu set.''' if self.logMenuCount > 1 and menu is not None: menu.removeMenu() self.logMenus.remove(menu) self.logMenuCount -= 1
0.008584
def create(vm_): ''' Create a single VM from a data dict ''' try: # Check for required profile parameters before sending any API calls. if vm_['profile'] and config.is_profile_configured(__opts__, __active_provider_name__ or 'azure', vm_['profile'], vm_=vm_) is False: return False except AttributeError: pass __utils__['cloud.fire_event']( 'event', 'starting create', 'salt/cloud/{0}/creating'.format(vm_['name']), args=__utils__['cloud.filter_event']('creating', vm_, ['name', 'profile', 'provider', 'driver']), sock_dir=__opts__['sock_dir'], transport=__opts__['transport'] ) log.info('Creating Cloud VM %s', vm_['name']) conn = get_conn() label = vm_.get('label', vm_['name']) service_name = vm_.get('service_name', vm_['name']) service_kwargs = { 'service_name': service_name, 'label': label, 'description': vm_.get('desc', vm_['name']), } loc_error = False if 'location' in vm_: if 'affinity_group' in vm_: loc_error = True else: service_kwargs['location'] = vm_['location'] elif 'affinity_group' in vm_: service_kwargs['affinity_group'] = vm_['affinity_group'] else: loc_error = True if loc_error: raise SaltCloudSystemExit( 'Either a location or affinity group must be specified, but not both' ) ssh_port = config.get_cloud_config_value('port', vm_, __opts__, default=22, search_global=True) ssh_endpoint = azure.servicemanagement.ConfigurationSetInputEndpoint( name='SSH', protocol='TCP', port=ssh_port, local_port=22, ) network_config = azure.servicemanagement.ConfigurationSet() network_config.input_endpoints.input_endpoints.append(ssh_endpoint) network_config.configuration_set_type = 'NetworkConfiguration' if 'win_username' in vm_: system_config = azure.servicemanagement.WindowsConfigurationSet( computer_name=vm_['name'], admin_username=vm_['win_username'], admin_password=vm_['win_password'], ) smb_port = '445' if 'smb_port' in vm_: smb_port = vm_['smb_port'] smb_endpoint = azure.servicemanagement.ConfigurationSetInputEndpoint( name='SMB', protocol='TCP', port=smb_port, local_port=smb_port, ) network_config.input_endpoints.input_endpoints.append(smb_endpoint) # Domain and WinRM configuration not yet supported by Salt Cloud system_config.domain_join = None system_config.win_rm = None else: system_config = azure.servicemanagement.LinuxConfigurationSet( host_name=vm_['name'], user_name=vm_['ssh_username'], user_password=vm_['ssh_password'], disable_ssh_password_authentication=False, ) # TODO: Might need to create a storage account media_link = vm_['media_link'] # TODO: Probably better to use more than just the name in the media_link media_link += '/{0}.vhd'.format(vm_['name']) os_hd = azure.servicemanagement.OSVirtualHardDisk(vm_['image'], media_link) vm_kwargs = { 'service_name': service_name, 'deployment_name': service_name, 'deployment_slot': vm_['slot'], 'label': label, 'role_name': vm_['name'], 'system_config': system_config, 'os_virtual_hard_disk': os_hd, 'role_size': vm_['size'], 'network_config': network_config, } if 'virtual_network_name' in vm_: vm_kwargs['virtual_network_name'] = vm_['virtual_network_name'] if 'subnet_name' in vm_: network_config.subnet_names.append(vm_['subnet_name']) log.debug('vm_kwargs: %s', vm_kwargs) event_kwargs = {'service_kwargs': service_kwargs.copy(), 'vm_kwargs': vm_kwargs.copy()} del event_kwargs['vm_kwargs']['system_config'] del event_kwargs['vm_kwargs']['os_virtual_hard_disk'] del event_kwargs['vm_kwargs']['network_config'] __utils__['cloud.fire_event']( 'event', 'requesting instance', 'salt/cloud/{0}/requesting'.format(vm_['name']), args=__utils__['cloud.filter_event']('requesting', event_kwargs, list(event_kwargs)), sock_dir=__opts__['sock_dir'], transport=__opts__['transport'] ) log.debug('vm_kwargs: %s', vm_kwargs) # Azure lets you open winrm on a new VM # Can open up specific ports in Azure; but not on Windows try: conn.create_hosted_service(**service_kwargs) except AzureConflictHttpError: log.debug('Cloud service already exists') except Exception as exc: error = 'The hosted service name is invalid.' if error in six.text_type(exc): log.error( 'Error creating %s on Azure.\n\n' 'The hosted service name is invalid. The name can contain ' 'only letters, numbers, and hyphens. The name must start with ' 'a letter and must end with a letter or a number.', vm_['name'], # Show the traceback if the debug logging level is enabled exc_info_on_loglevel=logging.DEBUG ) else: log.error( 'Error creating %s on Azure\n\n' 'The following exception was thrown when trying to ' 'run the initial deployment: \n%s', vm_['name'], exc, # Show the traceback if the debug logging level is enabled exc_info_on_loglevel=logging.DEBUG ) return False try: result = conn.create_virtual_machine_deployment(**vm_kwargs) log.debug('Request ID for machine: %s', result.request_id) _wait_for_async(conn, result.request_id) except AzureConflictHttpError: log.debug('Conflict error. The deployment may already exist, trying add_role') # Deleting two useless keywords del vm_kwargs['deployment_slot'] del vm_kwargs['label'] del vm_kwargs['virtual_network_name'] result = conn.add_role(**vm_kwargs) _wait_for_async(conn, result.request_id) except Exception as exc: error = 'The hosted service name is invalid.' if error in six.text_type(exc): log.error( 'Error creating %s on Azure.\n\n' 'The VM name is invalid. The name can contain ' 'only letters, numbers, and hyphens. The name must start with ' 'a letter and must end with a letter or a number.', vm_['name'], # Show the traceback if the debug logging level is enabled exc_info_on_loglevel=logging.DEBUG ) else: log.error( 'Error creating %s on Azure.\n\n' 'The Virtual Machine could not be created. If you ' 'are using an already existing Cloud Service, ' 'make sure you set up the `port` variable corresponding ' 'to the SSH port exists and that the port number is not ' 'already in use.\nThe following exception was thrown when trying to ' 'run the initial deployment: \n%s', vm_['name'], exc, # Show the traceback if the debug logging level is enabled exc_info_on_loglevel=logging.DEBUG ) return False def wait_for_hostname(): ''' Wait for the IP address to become available ''' try: conn.get_role(service_name, service_name, vm_['name']) data = show_instance(vm_['name'], call='action') if 'url' in data and data['url'] != six.text_type(''): return data['url'] except AzureMissingResourceHttpError: pass time.sleep(1) return False hostname = salt.utils.cloud.wait_for_fun( wait_for_hostname, timeout=config.get_cloud_config_value( 'wait_for_fun_timeout', vm_, __opts__, default=15 * 60), ) if not hostname: log.error('Failed to get a value for the hostname.') return False vm_['ssh_host'] = hostname.replace('http://', '').replace('/', '') vm_['password'] = config.get_cloud_config_value( 'ssh_password', vm_, __opts__ ) ret = __utils__['cloud.bootstrap'](vm_, __opts__) # Attaching volumes volumes = config.get_cloud_config_value( 'volumes', vm_, __opts__, search_global=True ) if volumes: __utils__['cloud.fire_event']( 'event', 'attaching volumes', 'salt/cloud/{0}/attaching_volumes'.format(vm_['name']), args=__utils__['cloud.filter_event']('attaching_volumes', vm_, ['volumes']), sock_dir=__opts__['sock_dir'], transport=__opts__['transport'] ) log.info('Create and attach volumes to node %s', vm_['name']) created = create_attach_volumes( vm_['name'], { 'volumes': volumes, 'service_name': service_name, 'deployment_name': vm_['name'], 'media_link': media_link, 'role_name': vm_['name'], 'del_all_vols_on_destroy': vm_.get('set_del_all_vols_on_destroy', False) }, call='action' ) ret['Attached Volumes'] = created data = show_instance(vm_['name'], call='action') log.info('Created Cloud VM \'%s\'', vm_) log.debug('\'%s\' VM creation details:\n%s', vm_['name'], pprint.pformat(data)) ret.update(data) __utils__['cloud.fire_event']( 'event', 'created instance', 'salt/cloud/{0}/created'.format(vm_['name']), args=__utils__['cloud.filter_event']('created', vm_, ['name', 'profile', 'provider', 'driver']), sock_dir=__opts__['sock_dir'], transport=__opts__['transport'] ) return ret
0.001057
def handle_challenge(self,data): """ Executed when the server requests additional authentication """ # Send challenge response self.send_message(AUTHENTICATE( signature = self.password, extra = {} ))
0.025455
def _call(self, method, params): """Call a method. :param method: method to call :param params: dict with the HTTP parameters needed to call the given method :raises ConduitError: when an error is returned by the server """ url = self.URL % {'base': self.base_url, 'method': method} # Conduit and POST parameters params['__conduit__'] = {'token': self.api_token} data = { 'params': json.dumps(params, sort_keys=True), 'output': 'json', '__conduit__': True } logger.debug("Phabricator Conduit client requests: %s params: %s", method, str(data)) r = self.fetch(url, payload=data, method=HttpClient.POST, verify=False) # Check for possible Conduit API errors result = r.json() if result['error_code']: raise ConduitError(error=result['error_info'], code=result['error_code']) return r.text
0.001932
def db_exec_literal(self, sql: str) -> int: """Executes SQL without modification. Returns rowcount.""" self.ensure_db_open() cursor = self.db.cursor() debug_sql(sql) try: cursor.execute(sql) return cursor.rowcount except: # nopep8 log.exception("db_exec_literal: SQL was: " + sql) raise
0.005222
def create_account(self, email, password=None, attrs={}): """ :param email: Full email with domain eg: [email protected] :param password: Password for local auth :param attrs: a dictionary of attributes to set ({key:value,...}) :returns: the created zobjects.Account """ attrs = [{'n': k, '_content': v} for k, v in attrs.items()] params = {'name': email, 'a': attrs} if password: params['password'] = password resp = self.request_single('CreateAccount', params) return zobjects.Account.from_dict(resp)
0.003236
def sample_multinomial(N, p, size=None): r""" Draws fixed number of samples N from different multinomial distributions (with the same number dice sides). :param int N: How many samples to draw from each distribution. :param np.ndarray p: Probabilities specifying each distribution. Sum along axis 0 should be 1. :param size: Output shape. ``int`` or tuple of ``int``s. If the given shape is, e.g., ``(m, n, k)``, then m * n * k samples are drawn for each distribution. Default is None, in which case a single value is returned for each distribution. :rtype: np.ndarray :return: Array of shape ``(p.shape, size)`` or p.shape if size is ``None``. """ # ensure s is array s = np.array([1]) if size is None else np.array([size]).flatten() def take_samples(ps): # we have to flatten to make apply_along_axis work. return np.random.multinomial(N, ps, np.prod(s)).flatten() # should have shape (prod(size)*ps.shape[0], ps.shape[1:]) samples = np.apply_along_axis(take_samples, 0, p) # should have shape (size, p.shape) samples = samples.reshape(np.concatenate([s, p.shape])) # should have shape (p.shape, size) samples = samples.transpose(np.concatenate( [np.arange(s.ndim, p.ndim+s.ndim), np.arange(s.ndim)] )) if size is None: # get rid of trailing singleton dimension. samples = samples[...,0] return samples
0.001342
def add(self, entries=None, force=False, allow_address_duplication=False): """ Add instances of HostsEntry to the instance of Hosts. :param entries: A list of instances of HostsEntry :param force: Remove matching before adding :param allow_address_duplication: Allow using multiple entries for same address :return: The counts of successes and failures """ ipv4_count = 0 ipv6_count = 0 comment_count = 0 invalid_count = 0 duplicate_count = 0 replaced_count = 0 import_entries = [] existing_addresses = [x.address for x in self.entries if x.address] existing_names = [] for item in self.entries: if item.names: existing_names.extend(item.names) existing_names = dedupe_list(existing_names) for entry in entries: if entry.entry_type == 'comment': entry.comment = entry.comment.strip() if entry.comment[0] != "#": entry.comment = "# " + entry.comment import_entries.append(entry) elif entry.address in ('0.0.0.0', '127.0.0.1') or allow_address_duplication: # Allow duplicates entries for addresses used for adblocking if set(entry.names).intersection(existing_names): if force: for name in entry.names: self.remove_all_matching(name=name) import_entries.append(entry) else: duplicate_count += 1 else: import_entries.append(entry) elif entry.address in existing_addresses: if not force: duplicate_count += 1 elif force: self.remove_all_matching(address=entry.address) replaced_count += 1 import_entries.append(entry) elif set(entry.names).intersection(existing_names): if not force: duplicate_count += 1 else: for name in entry.names: self.remove_all_matching(name=name) replaced_count += 1 import_entries.append(entry) else: import_entries.append(entry) for item in import_entries: if item.entry_type == 'comment': comment_count += 1 self.entries.append(item) elif item.entry_type == 'ipv4': ipv4_count += 1 self.entries.append(item) elif item.entry_type == 'ipv6': ipv6_count += 1 self.entries.append(item) return {'comment_count': comment_count, 'ipv4_count': ipv4_count, 'ipv6_count': ipv6_count, 'invalid_count': invalid_count, 'duplicate_count': duplicate_count, 'replaced_count': replaced_count}
0.001287
def _compute_missing_rates(self, currency): """Fill missing rates of a currency. This is done by linear interpolation of the two closest available rates. :param str currency: The currency to fill missing rates for. """ rates = self._rates[currency] # tmp will store the closest rates forward and backward tmp = defaultdict(lambda: [None, None]) for date in sorted(rates): rate = rates[date] if rate is not None: closest_rate = rate dist = 0 else: dist += 1 tmp[date][0] = closest_rate, dist for date in sorted(rates, reverse=True): rate = rates[date] if rate is not None: closest_rate = rate dist = 0 else: dist += 1 tmp[date][1] = closest_rate, dist for date in sorted(tmp): (r0, d0), (r1, d1) = tmp[date] rates[date] = (r0 * d1 + r1 * d0) / (d0 + d1) if self.verbose: print(('{0}: filling {1} missing rate using {2} ({3}d old) and ' '{4} ({5}d later)').format(currency, date, r0, d0, r1, d1))
0.003984
def maintained_selection(): """Maintain selection during context Example: >>> with maintained_selection(): ... # Modify selection ... node.setSelected(on=False, clear_all_selected=True) >>> # Selection restored """ previous_selection = hou.selectedNodes() try: yield finally: if previous_selection: for node in previous_selection: node.setSelected(on=True) else: for node in previous_selection: node.setSelected(on=False)
0.001761
def append_cluster(self, cluster, data = None, marker = '.', markersize = None, color = None): """! @brief Appends cluster for visualization. @param[in] cluster (list): cluster that may consist of indexes of objects from the data or object itself. @param[in] data (list): If defines that each element of cluster is considered as a index of object from the data. @param[in] marker (string): Marker that is used for displaying objects from cluster on the canvas. @param[in] markersize (uint): Size of marker. @param[in] color (string): Color of marker. @return Returns index of cluster descriptor on the canvas. """ if len(cluster) == 0: raise ValueError("Empty cluster is provided.") markersize = markersize or 5 if color is None: index_color = len(self.__clusters) % len(color_list.TITLES) color = color_list.TITLES[index_color] cluster_descriptor = canvas_cluster_descr(cluster, data, marker, markersize, color) self.__clusters.append(cluster_descriptor)
0.013274
def redirect_from_callback(self): '''Redirect to the callback URL after a successful authentication.''' state = toolkit.request.params.get('state') came_from = get_came_from(state) toolkit.response.status = 302 toolkit.response.location = came_from
0.006944
def write_comment(self, comment): """ Write a comment into the header """ self._FITS.write_comment(self._ext+1, str(comment))
0.012739
def addNewRole(self, txn): """ Adds a new client or steward to this node based on transaction type. """ # If the client authenticator is a simple authenticator then add verkey. # For a custom authenticator, handle appropriately. # NOTE: The following code should not be used in production if isinstance(self.clientAuthNr.core_authenticator, SimpleAuthNr): txn_data = get_payload_data(txn) identifier = txn_data[TARGET_NYM] verkey = txn_data.get(VERKEY) v = DidVerifier(verkey, identifier=identifier) if identifier not in self.clientAuthNr.core_authenticator.clients: role = txn_data.get(ROLE) if role not in (STEWARD, TRUSTEE, None): logger.debug("Role if present must be {} and not {}". format(Roles.STEWARD.name, role)) return self.clientAuthNr.core_authenticator.addIdr(identifier, verkey=v.verkey, role=role)
0.002564
def merge(assembled_gtfs, ref_file, gtf_file, num_cores, data): """ run cuffmerge on a set of assembled GTF files """ assembled_file = tempfile.NamedTemporaryFile(delete=False).name with open(assembled_file, "w") as temp_handle: for assembled in assembled_gtfs: temp_handle.write(assembled + "\n") out_dir = os.path.join("assembly", "cuffmerge") merged_file = os.path.join(out_dir, "merged.gtf") out_file = os.path.join(out_dir, "assembled.gtf") if file_exists(out_file): return out_file if not file_exists(merged_file): with file_transaction(data, out_dir) as tmp_out_dir: cmd = ("cuffmerge -o {tmp_out_dir} --ref-gtf {gtf_file} " "--num-threads {num_cores} --ref-sequence {ref_file} " "{assembled_file}") cmd = cmd.format(**locals()) message = ("Merging the following transcript assemblies with " "Cuffmerge: %s" % ", ".join(assembled_gtfs)) do.run(cmd, message) clean, _ = clean_assembly(merged_file) fixed = fix_cufflinks_attributes(gtf_file, clean, data) classified = annotate_gtf.annotate_novel_coding(fixed, gtf_file, ref_file, data) filtered = annotate_gtf.cleanup_transcripts(classified, gtf_file, ref_file) shutil.move(filtered, out_file) return out_file
0.000702
def get_release_data(self, package_name: str, version: str) -> Tuple[str, str, str]: """ Returns ``(package_name, version, manifest_uri)`` associated with the given package name and version, *if* they are published to the currently set registry. * Parameters: * ``name``: Must be a valid package name. * ``version``: Must be a valid package version. """ validate_package_name(package_name) validate_package_version(version) self._validate_set_registry() release_id = self.registry._get_release_id(package_name, version) return self.get_release_id_data(release_id)
0.007485
def write_result(self, data): """Write the results received to the database :param dict data: the data to save in database :return: None """ data['custom_timers'] = ujson.dumps(data['custom_timers']) self.results.append(data) if len(self.results) >= 150: # 150 rows for SQLite default limit with db.execution_context(): with db.atomic(): Result.insert_many(self.results).execute() del self.results[:]
0.003831
def start_connect(self): """Tries to connect to the Heron Server ``loop()`` method needs to be called after this. """ Log.debug("In start_connect() of %s" % self._get_classname()) # TODO: specify buffer size, exception handling self.create_socket(socket.AF_INET, socket.SOCK_STREAM) # when ready, handle_connect is called self._connecting = True self.connect(self.endpoint)
0.002433
def get_param(self, param, default=None): """ Get a parameter in config (handle default value) :param param: name of the parameter to recover :type param: string :param default: the default value, raises an exception if param is not in configuration and default is None (which is the default value). :type default: string or None :rtype: the value of the parameter or the default value if not set in configuration """ if param in self.config: return self.config[param] elif default is not None: return default else: raise MissingParameter('ppolicy', param)
0.002821
def _set_emails( self, emails, global_substitutions=None, is_multiple=False, p=0): """Adds emails to the Personalization object :param emails: An Email or list of Email objects :type emails: Email, list(Email) :param global_substitutions: A dict of substitutions for all recipients :type global_substitutions: dict :param is_multiple: Create a new personilization for each recipient :type is_multiple: bool :param p: p is the Personalization object or Personalization object index :type p: Personalization, integer, optional """ # Send multiple emails to multiple recipients if is_multiple is True: if isinstance(emails, list): for email in emails: personalization = Personalization() personalization.add_email(email) self.add_personalization(personalization) else: personalization = Personalization() personalization.add_email(emails) self.add_personalization(personalization) if global_substitutions is not None: if isinstance(global_substitutions, list): for substitution in global_substitutions: for p in self.personalizations: p.add_substitution(substitution) else: for p in self.personalizations: p.add_substitution(global_substitutions) else: try: personalization = self._personalizations[p] has_internal_personalization = True except IndexError: personalization = Personalization() has_internal_personalization = False if isinstance(emails, list): for email in emails: personalization.add_email(email) else: personalization.add_email(emails) if global_substitutions is not None: if isinstance(global_substitutions, list): for substitution in global_substitutions: personalization.add_substitution(substitution) else: personalization.add_substitution(global_substitutions) if not has_internal_personalization: self.add_personalization(personalization, index=p)
0.000796
def upgrade(): """Upgrade database.""" op.create_table( 'oauthclient_remoteaccount', sa.Column('id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=False), sa.Column('client_id', sa.String(length=255), nullable=False), sa.Column( 'extra_data', sqlalchemy_utils.JSONType(), nullable=False), sa.ForeignKeyConstraint(['user_id'], [u'accounts_user.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('user_id', 'client_id') ) op.create_table( 'oauthclient_useridentity', sa.Column('id', sa.String(length=255), nullable=False), sa.Column('method', sa.String(length=255), nullable=False), sa.Column('id_user', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['id_user'], [u'accounts_user.id'], ), sa.PrimaryKeyConstraint('id', 'method') ) op.create_index( 'useridentity_id_user_method', 'oauthclient_useridentity', ['id_user', 'method'], unique=True ) op.create_table( 'oauthclient_remotetoken', sa.Column('id_remote_account', sa.Integer(), nullable=False), sa.Column('token_type', sa.String(length=40), nullable=False), sa.Column( 'access_token', sqlalchemy_utils.EncryptedType(), nullable=False), sa.Column('secret', sa.Text(), nullable=False), sa.ForeignKeyConstraint( ['id_remote_account'], [u'oauthclient_remoteaccount.id'], name='fk_oauthclient_remote_token_remote_account' ), sa.PrimaryKeyConstraint('id_remote_account', 'token_type') )
0.000587
def find_level_aliases(): """ Find log level names which are aliases of each other. :returns: A dictionary that maps aliases to their canonical name. .. note:: Canonical names are chosen to be the alias with the longest string length so that e.g. ``WARN`` is an alias for ``WARNING`` instead of the other way around. Here's what the result looks like by default (when no custom levels or level names have been defined): >>> from coloredlogs import find_level_aliases >>> find_level_aliases() {'WARN': 'WARNING', 'FATAL': 'CRITICAL'} """ mapping = collections.defaultdict(list) for name, value in find_defined_levels().items(): mapping[value].append(name) aliases = {} for value, names in mapping.items(): if len(names) > 1: names = sorted(names, key=lambda n: len(n)) canonical_name = names.pop() for alias in names: aliases[alias] = canonical_name return aliases
0.000978
def get_product(config): """Get the /product/<product> resource from LTD Keeper. """ product_url = config['keeper_url'] + '/products/{p}'.format( p=config['ltd_product']) r = requests.get(product_url) if r.status_code != 200: raise RuntimeError(r.json()) product_info = r.json() return product_info
0.002924
def is_condition_met(self, hand, win_tile, melds, is_tsumo): """ Three closed pon sets, the other sets need not to be closed :param hand: list of hand's sets :param win_tile: 136 tiles format :param melds: list Meld objects :param is_tsumo: :return: true|false """ win_tile //= 4 open_sets = [x.tiles_34 for x in melds if x.opened] chi_sets = [x for x in hand if (is_chi(x) and win_tile in x and x not in open_sets)] pon_sets = [x for x in hand if is_pon(x)] closed_pon_sets = [] for item in pon_sets: if item in open_sets: continue # if we do the ron on syanpon wait our pon will be consider as open # and it is not 789999 set if win_tile in item and not is_tsumo and not len(chi_sets): continue closed_pon_sets.append(item) return len(closed_pon_sets) == 3
0.003074
def tags(self): '''Return a list of all tags that have this semantic tag, sorted by name. :rtype: list of ckan.model.tag.Tag objects ''' q = meta.Session.query(_tag.Tag) q = q.join(TagSemanticTag) q = q.filter_by(tag_id=self.id) # q = q.filter_by(state='active') q = q.order_by(_tag.Tag.name) tags = q.all() return tags
0.035294
def clone(cls, srcpath, destpath): """Copy a main repository to a new location.""" try: os.makedirs(destpath) except OSError as e: if not e.errno == errno.EEXIST: raise cmd = [SVNADMIN, 'dump', '--quiet', '.'] dump = subprocess.Popen( cmd, cwd=srcpath, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) repo = cls.create(destpath) repo.load(dump.stdout) stderr = dump.stderr.read() dump.stdout.close() dump.stderr.close() dump.wait() if dump.returncode != 0: raise subprocess.CalledProcessError(dump.returncode, cmd, stderr) return repo
0.00274
def walk(self, node, *listeners:RDLListener): """ Initiates the walker to traverse the current ``node`` and its children. Calls the corresponding callback for each of the ``listeners`` provided in the order that they are listed. Parameters ---------- node : :class:`~systemrdl.node.Node` Node to start traversing. Listener traversal includes this node. listeners : list List of :class:`~RDLListener` that are invoked during node traversal. Listener callbacks are executed in the same order as provided by this parameter. """ for listener in listeners: self.do_enter(node, listener) for child in node.children(unroll=self.unroll, skip_not_present=self.skip_not_present): self.walk(child, *listeners) for listener in listeners: self.do_exit(node, listener)
0.007277
def omim_terms(case_obj): """Extract all OMIM phenotypes available for the case Args: case_obj(dict): a scout case object Returns: disorders(list): a list of OMIM disorder objects """ LOG.info("Collecting OMIM disorders for case {}".format(case_obj.get('display_name'))) disorders = [] case_disorders = case_obj.get('diagnosis_phenotypes') # array of OMIM terms if case_disorders: for disorder in case_disorders: disorder_obj = { "id" : ':'.join([ 'MIM', str(disorder)]) } disorders.append(disorder_obj) return disorders
0.007899
def filter_pem(data): '''Processes the bytes for PEM certificates. Returns: ``set`` containing each certificate ''' assert isinstance(data, bytes), 'Expect bytes. Got {}.'.format(type(data)) certs = set() new_list = [] in_pem_block = False for line in re.split(br'[\r\n]+', data): if line == b'-----BEGIN CERTIFICATE-----': assert not in_pem_block in_pem_block = True elif line == b'-----END CERTIFICATE-----': assert in_pem_block in_pem_block = False content = b''.join(new_list) content = rewrap_bytes(content) certs.add(b'-----BEGIN CERTIFICATE-----\n' + content + b'\n-----END CERTIFICATE-----\n') new_list = [] elif in_pem_block: new_list.append(line) return certs
0.001117
def comments(self): # pylint: disable=E0202 """Return forest of comments, with top-level comments as tree roots. May contain instances of MoreComment objects. To easily replace these objects with Comment objects, use the replace_more_comments method then fetch this attribute. Use comment replies to walk down the tree. To get an unnested, flat list of comments from this attribute use helpers.flatten_tree. """ if self._comments is None: self.comments = Submission.from_url( # pylint: disable=W0212 self.reddit_session, self._api_link, comments_only=True) return self._comments
0.002933
def trace_error(function_index=2): """ This will return the line number and line text of the last error :param function_index: int to tell what frame to look from :return: int, str of the line number and line text """ info = function_info(function_index) traces = traceback.format_stack(limit=10) for trace in traces: file_, line_number, line_text = trace.split(',', 2) if file_ == ' File "%s"' % info['file'] and\ line_number != 'line %s' % info['line_number']: return line_number.split()[-1], line_text.strip() return None, None
0.003231
def new_model(self, info): """ Handles the new Graph action. """ if info.initialized: retval = confirm(parent = info.ui.control, message = "Replace existing graph?", title = "New Graph", default = YES) if retval == YES: self.model = Graph()
0.031008
def sort_descendants(self, attr="name"): """ This function sort the branches of a given tree by considerening node names. After the tree is sorted, nodes are labeled using ascendent numbers. This can be used to ensure that nodes in a tree with the same node names are always labeled in the same way. Note that if duplicated names are present, extra criteria should be added to sort nodes. Unique id is stored as a node._nid attribute """ node2content = self.get_cached_content(store_attr=attr, container_type=list) for n in self.traverse(): if not n.is_leaf(): n.children.sort(key=lambda x: str(sorted(node2content[x])))
0.004065
def bend(mapping, source, context=None): """ The main bending function. mapping: the map of benders source: a dict to be bent returns a new dict according to the provided map. """ context = {} if context is None else context transport = Transport(source, context) return _bend(mapping, transport)
0.002994
def compile_instance_masks(cls): """ Compiles instance masks into a master mask that is usable by the IO expander. Also determines whether or not the pump should be on. Method is generalized to support multiple IO expanders for possible future expansion. """ # Compute required # of IO expanders needed, clear mask variable. number_IO_expanders = ((len(cls._list) - 1) / 4) + 1 cls.master_mask = [0, 0] * number_IO_expanders for ctrlobj in cls: # Or masks together bank-by-banl cls.master_mask[ctrlobj.bank] |= ctrlobj.mask # Handle the pump request seperately if ctrlobj.pump_request == 1: cls.master_mask[cls.pump_bank] |= 1 << cls.pump_pin
0.003676
def connect_to_endpoints_blocking(self, *endpoints: ConnectionConfig, timeout: int=30) -> None: """ Connect to the given endpoints and block until the connection to every endpoint is established. Raises a ``TimeoutError`` if connections do not become available within ``timeout`` seconds (default 30 seconds). """ self._throw_if_already_connected(*endpoints) for endpoint in endpoints: wait_for_path_blocking(endpoint.path, timeout) self._connect_if_not_already_connected(endpoint)
0.012478
def save_repo_cache(i): """ Input: {} Output: { return - return code = 0, if successful > 0, if error (error) - error text if return > 0 } """ r=save_json_to_file({'json_file':work['dir_cache_repo_uoa'], 'dict':cache_repo_uoa}) if r['return']>0: return r r=save_json_to_file({'json_file':work['dir_cache_repo_info'], 'dict':cache_repo_info}) if r['return']>0: return r return {'return':0}
0.026769
def weights(self, other): """ Compute weights, given a scale or time-frequency representation :param other: A time-frequency representation, or a scale :return: a numpy array of weights """ try: return self._wdata(other) except AttributeError: frequency_dim = other.dimensions[-1] return self._wdata(frequency_dim.scale)
0.004854
def LOS_get_sample(D, u, dL, DL=None, dLMode='abs', method='sum', Test=True): """ Return the sampled line, with the specified method 'linspace': return the N+1 edges, including the first and last point 'sum' : return the N middle of the segments 'simps': return the N+1 egdes, where N has to be even (scipy.simpson requires an even number of intervals) 'romb' : return the N+1 edges, where N+1 = 2**k+1 (fed to scipy.romb for integration) """ if Test: assert all([type(dd) is np.ndarray and dd.shape==(3,) for dd in [D,u]]) assert not hasattr(dL,'__iter__') assert DL is None or all([hasattr(DL,'__iter__'), len(DL)==2, all([not hasattr(dd,'__iter__') for dd in DL])]) assert dLMode in ['abs','rel'] assert type(method) is str and method in ['linspace','sum','simps','romb'] # Compute the minimum number of intervals to satisfy the specified resolution N = int(np.ceil((DL[1]-DL[0])/dL)) if dLMode=='abs' else int(np.ceil(1./dL)) # Modify N according to the desired method if method=='simps': N = N if N%2==0 else N+1 elif method=='romb': N = 2**int(np.ceil(np.log(N)/np.log(2.))) # Derive k and dLr if method=='sum': dLr = (DL[1]-DL[0])/N k = DL[0] + (0.5+np.arange(0,N))*dLr else: k, dLr = np.linspace(DL[0], DL[1], N+1, endpoint=True, retstep=True, dtype=float) Pts = D[:,np.newaxis] + k[np.newaxis,:]*u[:,np.newaxis] return Pts, k, dLr
0.018792
def setUserPasswdCredentials(self, username, password): """Set username and password in ``disk.0.os.credentials``.""" self.setCredentialValues(username=username, password=password)
0.010152
def values(self, desc = None): '''numpy asarray does not copy data''' if self._ts: res = asarray(self._ts) if desc == True: return reversed(res) else: return res else: return ndarray([0,0])
0.019868
def evaluate(self, dataset, metric='auto', batch_size=None, verbose=True): """ Evaluate the model by making predictions of target values and comparing these to actual values. Parameters ---------- dataset : SFrame Dataset of new observations. Must include columns with the same names as the feature and target columns used for model training. Additional columns are ignored. metric : str, optional Name of the evaluation metric. Possible values are: - 'auto' : Returns all available metrics. - 'accuracy' : Classification accuracy (micro average). - 'auc' : Area under the ROC curve (macro average) - 'precision' : Precision score (macro average) - 'recall' : Recall score (macro average) - 'f1_score' : F1 score (macro average) - 'confusion_matrix' : An SFrame with counts of possible prediction/true label combinations. - 'roc_curve' : An SFrame containing information needed for an ROC curve verbose : bool, optional If True, prints prediction progress. Returns ------- out : dict Dictionary of evaluation results where the key is the name of the evaluation metric (e.g. `accuracy`) and the value is the evaluation score. See Also ---------- create, predict Examples ---------- .. sourcecode:: python >>> results = model.evaluate(data) >>> print(results['accuracy']) """ if self.target not in dataset.column_names(): raise _ToolkitError("Must provide ground truth column, '" + self.target + "' in the evaluation dataset.") predicted = self._predict_with_probabilities(dataset, batch_size, verbose) avail_metrics = ['accuracy', 'auc', 'precision', 'recall', 'f1_score', 'confusion_matrix', 'roc_curve'] _tkutl._check_categorical_option_type( 'metric', metric, avail_metrics + ['auto']) metrics = avail_metrics if metric == 'auto' else [metric] ret = {} if 'accuracy' in metrics: ret['accuracy'] = _evaluation.accuracy( dataset[self.target], predicted[self.target]) if 'auc' in metrics: ret['auc'] = _evaluation.auc( dataset[self.target], predicted['probability'], index_map=self._class_to_index) if 'precision' in metrics: ret['precision'] = _evaluation.precision( dataset[self.target], predicted[self.target]) if 'recall' in metrics: ret['recall'] = _evaluation.recall( dataset[self.target], predicted[self.target]) if 'f1_score' in metrics: ret['f1_score'] = _evaluation.f1_score( dataset[self.target], predicted[self.target]) if 'confusion_matrix' in metrics: ret['confusion_matrix'] = _evaluation.confusion_matrix( dataset[self.target], predicted[self.target]) if 'roc_curve' in metrics: ret['roc_curve'] = _evaluation.roc_curve( dataset[self.target], predicted['probability'], index_map=self._class_to_index) return ret
0.00496
def send_mail(recipient_list, subject, body, html=False, from_address=None): """ :param recipient_list: List of recipients i.e. ['[email protected]', 'Stephen Brown <[email protected]>'] :param subject: The subject :param body: The email body :param html: Is this a html email? Defaults to False :param from_address: From email address or name and address i.e. 'Test System <[email protected]> :return: """ if not _configured: raise Exception('LFS Mailer hasn\'t been configured') if from_address is None: from_address = default_email_from mime_type = 'html' if html else 'plain' log.debug('Sending {} mail to {}: {}'.format(mime_type, ', '.join(recipient_list), subject)) if dump_email_body: log.info(body) s = smtplib.SMTP(host, port) if use_tls: s.ehlo() s.starttls() s.ehlo() if username: s.login(username, password) if email_to_override: subject = '[to %s] %s' % (', '.join(recipient_list), subject) recipient_list = [email_to_override] log.info('Using email override: %s' % ', '.join(recipient_list)) msg = MIMEText(body, mime_type, 'utf-8') msg['To'] = ', '.join(recipient_list) msg['Subject'] = subject msg['From'] = from_address msg['Date'] = email.utils.formatdate() s.sendmail(from_address, recipient_list, msg.as_string()) s.quit()
0.004193
def remove_repo(self, repo, team): """Remove ``repo`` from ``team``. :param str repo: (required), form: 'user/repo' :param str team: (required) :returns: bool """ for t in self.iter_teams(): if team == t.name: return t.remove_repo(repo) return False
0.005988
def train(self, x, drop=False, na_rm=False): """ Train discrete range """ self.range = scale_discrete.train(x, self.range, drop, na_rm=na_rm)
0.011561
def send_data(self, endpoint=None, **kwargs): """Sends data to the API. This call is similar to ``fetch``, but **sends** data to the API instead of retrieving it. Returned data will appear in the ``items`` key of the resulting dictionary. Sending data **requires** that the ``token`` is set. :param endpoint: (string) **(Required)** The API end point being called. Available endpoints are listed on the official `API Documentation <https://github.com/Charcoal-SE/metasmoke/wiki/API-Documentation>`__. If no end point is passed, a ``ValueError`` will be raised :param kwargs: Parameters accepted by individual endpoints. These parameters **must** be named the same as described in the endpoint documentation :rtype: (dictionary) A dictionary containing wrapper data regarding the API call and the results of the call in the `items` key. If multiple pages were received, all of the results will appear in the ``items`` tag. """ if not endpoint: raise ValueError('No end point provided.') if not self.token: raise ValueError('A write token has not been set. This is required for all MetaSmoke API routes. This can\n' 'be set by setting the "token" parameter of your SmokeAPI object.') self._endpoint = endpoint params = { "key": self._api_key, "token": self.token } if 'ids' in kwargs: ids = ';'.join(str(x) for x in kwargs['ids']) kwargs.pop('ids', None) else: ids = None params.update(kwargs) data = [] base_url = "{}{}/".format(self._base_url, endpoint) response = requests.post(base_url, data=params, proxies=self.proxy) self._previous_call = response.url response = response.json() try: code = response["error_code"] name = response["error_name"] message = response["error_message"] raise SmokeAPIError(self._previous_call, code, name, message) except KeyError: pass # This means there is no error data.append(response) r = [] for d in data: r.extend(d['items']) items = list(chain(r)) result = {'has_more': data[-1]['has_more'], 'page': params['page'], 'total': len(items), 'items': items} return result
0.003873