text
stringlengths
78
104k
score
float64
0
0.18
def indent(text: str, num: int = 2) -> str: """Indent a piece of text.""" lines = text.splitlines() return "\n".join(indent_iterable(lines, num=num))
0.006211
def groups_set_topic(self, room_id, topic, **kwargs): """Sets the topic for the private group.""" return self.__call_api_post('groups.setTopic', roomId=room_id, topic=topic, kwargs=kwargs)
0.014706
def process_multinest_run(file_root, base_dir, **kwargs): """Loads data from a MultiNest run into the nestcheck dictionary format for analysis. N.B. producing required output file containing information about the iso-likelihood contours within which points were sampled (where they were "born") requies MultiNest version 3.11 or later. Parameters ---------- file_root: str Root name for output files. When running MultiNest, this is determined by the nest_root parameter. base_dir: str Directory containing output files. When running MultiNest, this is determined by the nest_root parameter. kwargs: dict, optional Passed to ns_run_utils.check_ns_run (via process_samples_array) Returns ------- ns_run: dict Nested sampling run dict (see the module docstring for more details). """ # Load dead and live points dead = np.loadtxt(os.path.join(base_dir, file_root) + '-dead-birth.txt') live = np.loadtxt(os.path.join(base_dir, file_root) + '-phys_live-birth.txt') # Remove unnecessary final columns dead = dead[:, :-2] live = live[:, :-1] assert dead[:, -2].max() < live[:, -2].min(), ( 'final live points should have greater logls than any dead point!', dead, live) ns_run = process_samples_array(np.vstack((dead, live)), **kwargs) assert np.all(ns_run['thread_min_max'][:, 0] == -np.inf), ( 'As MultiNest does not currently perform dynamic nested sampling, all ' 'threads should start by sampling the whole prior.') ns_run['output'] = {} ns_run['output']['file_root'] = file_root ns_run['output']['base_dir'] = base_dir return ns_run
0.000573
def set_index(self, index): """Display the data of the given index :param index: the index to paint :type index: QtCore.QModelIndex :returns: None :rtype: None :raises: None """ self.index = index self.reftrack = index.model().index(index.row(), 18, index.parent()).data(REFTRACK_OBJECT_ROLE) self.set_maintext(self.index) self.set_identifiertext(self.index) self.set_type_icon(self.index) self.disable_restricted() self.hide_restricted() self.set_top_bar_color(self.index) self.set_status_buttons() self.set_menu()
0.004601
def _get_network(self, network_id): """Function to get specific network (eth0, total, etc)""" if self._data is not None: for network in self._data["network"]: if network["device"] == network_id: return network
0.007194
def get_all_results_from_jobs(user, j_id): """Get all results from job. """ job = v1_utils.verify_existence_and_get(j_id, _TABLE) if not user.is_in_team(job['team_id']) and not user.is_read_only_user(): raise dci_exc.Unauthorized() # get testscases from tests_results query = sql.select([models.TESTS_RESULTS]). \ where(models.TESTS_RESULTS.c.job_id == job['id']) all_tests_results = flask.g.db_conn.execute(query).fetchall() results = [] for test_result in all_tests_results: test_result = dict(test_result) results.append({'filename': test_result['name'], 'name': test_result['name'], 'total': test_result['total'], 'failures': test_result['failures'], 'errors': test_result['errors'], 'skips': test_result['skips'], 'time': test_result['time'], 'regressions': test_result['regressions'], 'successfixes': test_result['successfixes'], 'success': test_result['success'], 'file_id': test_result['file_id']}) return flask.jsonify({'results': results, '_meta': {'count': len(results)}})
0.000754
def index_get(index, hosts=None, profile=None): ''' Check for the existence of an index and if it exists, return it index Index name CLI example:: salt myminion elasticsearch.index_get testindex ''' es = _get_instance(hosts, profile) try: return es.indices.get(index=index) except elasticsearch.exceptions.NotFoundError: return None except elasticsearch.TransportError as e: raise CommandExecutionError("Cannot retrieve index {0}, server returned code {1} with message {2}".format(index, e.status_code, e.error))
0.003378
def _reset_suffix_links(self): ''' Reset all suffix links in all nodes in this trie. ''' self._suffix_links_set = False for current, _parent in self.dfs(): current.suffix = None current.dict_suffix = None current.longest_prefix = None
0.006452
def export_image(input, output, timeout=20, palette='white', resolution=150, layers=None, command=None, mirror=False, showgui=False): ''' Exporting eagle .sch or .brd file into image file. GUI is not displayed if ``pyvirtualdisplay`` is installed. If export is blocked somehow (e.g. popup window is displayed) then after timeout operation is canceled with exception. Problem can be investigated by setting 'showgui' flag. Exporting generates an image file with a format corresponding to the given filename extension. The following image formats are available: .bmp Windows Bitmap Files .png Portable Network Graphics Files .pbm Portable Bitmap Files .pgm Portable Grayscale Bitmap Files .ppm Portable Pixelmap Files .tif TIFF Files .xbm X Bitmap Files .xpm X Pixmap Files :param input: eagle .sch or .brd file name :param output: image file name, existing file will be removed first! :param palette: background color [None,black,white,colored] :param resolution: image resolution in dpi (50..2400) :param timeout: operation is canceled after this timeout (sec) :param showgui: eagle GUI is displayed :param layers: list, layers to be displayed ['top','pads'] :param command: string, direct eagle command :param mirror: Bool :rtype: None ''' input = norm_path(input) output = norm_path(output) if palette: palette = palette.lower() if palette == 'none': palette = None cmds = [] if palette is not None: cmds += ['SET PALETTE {palette}'.format(palette=palette)] if layers is not None: cmds += ['DISPLAY NONE ' + ' '.join(layers)] if command is not None: cmds += [command] if mirror: f = tempfile.NamedTemporaryFile(suffix='.png', prefix='eagexp_') fout = f.name else: fout = output commands = export_command(output=fout, output_type='image', commands=cmds, resolution=resolution) command_eagle( input=input, timeout=timeout, commands=commands, showgui=showgui) if mirror: im = Image.open(fout) # save dpi info info = im.info im = ImageOps.mirror(im) im.save(output, **info)
0.001296
def get_rgb_hex(self): """ Converts the RGB value to a hex value in the form of: #RRGGBB :rtype: str """ rgb_r, rgb_g, rgb_b = self.get_upscaled_value_tuple() return '#%02x%02x%02x' % (rgb_r, rgb_g, rgb_b)
0.007874
def edit_dedicated_fwl_rules(self, firewall_id, rules): """Edit the rules for dedicated firewall. :param integer firewall_id: the instance ID of the dedicated firewall :param list rules: the rules to be pushed on the firewall as defined by SoftLayer_Network_Firewall_Update_Request_Rule """ mask = ('mask[networkVlan[firewallInterfaces' '[firewallContextAccessControlLists]]]') svc = self.client['Network_Vlan_Firewall'] fwl = svc.getObject(id=firewall_id, mask=mask) network_vlan = fwl['networkVlan'] for fwl1 in network_vlan['firewallInterfaces']: if fwl1['name'] == 'inside': continue for control_list in fwl1['firewallContextAccessControlLists']: if control_list['direction'] == 'out': continue fwl_ctx_acl_id = control_list['id'] template = {'firewallContextAccessControlListId': fwl_ctx_acl_id, 'rules': rules} svc = self.client['Network_Firewall_Update_Request'] return svc.createObject(template)
0.001724
def stop(self): """Use this method to manually stop the Client. Requires no parameters. Raises: ``ConnectionError`` in case you try to stop an already stopped Client. """ if not self.is_started: raise ConnectionError("Client is already stopped") if self.takeout_id: self.send(functions.account.FinishTakeoutSession()) log.warning("Takeout session {} finished".format(self.takeout_id)) Syncer.remove(self) self.dispatcher.stop() for _ in range(self.DOWNLOAD_WORKERS): self.download_queue.put(None) for i in self.download_workers_list: i.join() self.download_workers_list.clear() for _ in range(self.UPDATES_WORKERS): self.updates_queue.put(None) for i in self.updates_workers_list: i.join() self.updates_workers_list.clear() for i in self.media_sessions.values(): i.stop() self.media_sessions.clear() self.is_started = False self.session.stop() return self
0.00266
def _set_fcoe_map(self, v, load=False): """ Setter method for fcoe_map, mapped from YANG variable /fcoe/fcoe_map (list) If this variable is read-only (config: false) in the source YANG file, then _set_fcoe_map is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_fcoe_map() directly. YANG Description: The list of FCoE maps.Each row contains the FCoE map name and the list of FCoE fabric maps and CEE maps. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=YANGListType("fcoe_map_name",fcoe_map.fcoe_map, yang_name="fcoe-map", rest_name="fcoe-map", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='fcoe-map-name', extensions={u'tailf-common': {u'info': u'Configure an FCoE Map', u'alt-name': u'fcoe-map', u'cli-suppress-list-no': None, u'cli-full-command': None, u'hidden': u'debug', u'callpoint': u'fcoe_map_cp', u'cli-mode-name': u'config-fcoe-map-$(fcoe-map-name)'}}), is_container='list', yang_name="fcoe-map", rest_name="fcoe-map", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure an FCoE Map', u'alt-name': u'fcoe-map', u'cli-suppress-list-no': None, u'cli-full-command': None, u'hidden': u'debug', u'callpoint': u'fcoe_map_cp', u'cli-mode-name': u'config-fcoe-map-$(fcoe-map-name)'}}, namespace='urn:brocade.com:mgmt:brocade-fcoe', defining_module='brocade-fcoe', yang_type='list', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """fcoe_map must be of a type compatible with list""", 'defined-type': "list", 'generated-type': """YANGDynClass(base=YANGListType("fcoe_map_name",fcoe_map.fcoe_map, yang_name="fcoe-map", rest_name="fcoe-map", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='fcoe-map-name', extensions={u'tailf-common': {u'info': u'Configure an FCoE Map', u'alt-name': u'fcoe-map', u'cli-suppress-list-no': None, u'cli-full-command': None, u'hidden': u'debug', u'callpoint': u'fcoe_map_cp', u'cli-mode-name': u'config-fcoe-map-$(fcoe-map-name)'}}), is_container='list', yang_name="fcoe-map", rest_name="fcoe-map", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure an FCoE Map', u'alt-name': u'fcoe-map', u'cli-suppress-list-no': None, u'cli-full-command': None, u'hidden': u'debug', u'callpoint': u'fcoe_map_cp', u'cli-mode-name': u'config-fcoe-map-$(fcoe-map-name)'}}, namespace='urn:brocade.com:mgmt:brocade-fcoe', defining_module='brocade-fcoe', yang_type='list', is_config=True)""", }) self.__fcoe_map = t if hasattr(self, '_set'): self._set()
0.003459
def set_portfast(self, name, value=None, default=False, disable=False): """Configures the portfast value for the specified interface Args: name (string): The interface identifier to configure. The name must be the full interface name (eg Ethernet1, not Et1) value (bool): True if portfast is enabled otherwise False default (bool): Configures the portfast parameter to its default value using the EOS CLI default config command disable (bool): Negates the portfast parameter using the EOS CLI no config command Returns: True if the command succeeds, otherwise False Raises: ValueError: Rasied if an invalid interface name is specified TypeError: Raised if the value keyword argument does not evaluate to a valid boolean """ if value is False: disable = True string = 'spanning-tree portfast' cmds = self.command_builder(string, value=value, default=default, disable=disable) return self.configure_interface(name, cmds)
0.001674
def doc_stream(path): """ Generator to feed tokenized documents (treating each line as a document). """ with open(path, 'r') as f: for line in f: if line.strip(): yield line
0.004444
def updatePrefShockProcess(self): ''' Make a discrete preference shock structure for each period in the cycle for this agent type, storing them as attributes of self for use in the solution (and other methods). Parameters ---------- none Returns ------- none ''' time_orig = self.time_flow self.timeFwd() PrefShkDstn = [] # discrete distributions of preference shocks for t in range(len(self.PrefShkStd)): PrefShkStd = self.PrefShkStd[t] PrefShkDstn.append(approxMeanOneLognormal(N=self.PrefShkCount, sigma=PrefShkStd,tail_N=self.PrefShk_tail_N)) # Store the preference shocks in self (time-varying) and restore time flow self.PrefShkDstn = PrefShkDstn self.addToTimeVary('PrefShkDstn') if not time_orig: self.timeRev()
0.006218
def _parse_details(self, tag_list): """Given a list of tags from either a search page or the KAT home page parse the details and return a list of Torrents""" result = list() for i, item in enumerate(tag_list): title = item.find("a", class_="cellMainLink") title_text = title.text link = title.get("href") tds = item.find_all("td", class_="center") # Better name here. size = tds[0].text files = tds[1].text age = tds[2].text seed = tds[3].text leech = tds[4].text magnet = item.find("a", class_="imagnet icon16") download = item.find("a", class_="idownload icon16") isVerified = item.find("a", class_="iverify icon16") != None # Home page doesn't have magnet or download links if magnet: magnet = magnet.get("href") if download: download = download.get("href") # Get category changes depending on if we're parsing # the home page or a search page. if self.current_url == self.base_url: category = self._get_torrent_category(item, result=i) else: category = self._get_torrent_category(item) result.append(Torrent(title_text, category, link, size, seed, leech, magnet, download,files, age, isVerified)) return result
0.031941
def parseBEDString(line, scoreType=int, dropAfter=None): """ Parse a string in BED format and return a GenomicInterval object. :param line: the string to be parsed :param dropAfter: an int indicating that any fields after and including this field should be ignored as they don't conform to the BED format. By default, None, meaning we use all fields. Index from zero. :return: GenomicInterval object built from the BED string representation """ peices = line.split("\t") if dropAfter is not None: peices = peices[0:dropAfter] if len(peices) < 3: raise GenomicIntervalError("BED elements must have at least chrom, " + "start and end; found only " + str(len(peices)) + " in " + line) chrom = peices[0] start = peices[1] end = peices[2] name = None score = None strand = None if len(peices) >= 4 is not None: name = peices[3] if len(peices) >= 5 is not None: score = peices[4] if len(peices) >= 6 is not None: strand = peices[5] return GenomicInterval(chrom, start, end, name, score, strand, scoreType)
0.01268
def lint(self, targets): """Run linters in parallel and sort all results. Args: targets (list): List of files and folders to lint. """ LinterRunner.targets = targets linters = self._config.get_linter_classes() with Pool() as pool: out_err_none = pool.map(LinterRunner.run, linters) out_err = [item for item in out_err_none if item is not None] stdout, stderr = zip(*out_err) return sorted(chain.from_iterable(stdout)), chain.from_iterable(stderr)
0.003683
def _is_url_like_archive(url): # type: (str) -> bool """Return whether the URL looks like an archive. """ filename = Link(url).filename for bad_ext in ARCHIVE_EXTENSIONS: if filename.endswith(bad_ext): return True return False
0.003704
def git_fetch(repo_dir, remote=None, refspec=None, verbose=False, tags=True): """Do a git fetch of `refspec` in `repo_dir`. If 'remote' is None, all remotes will be fetched. """ command = ['git', 'fetch'] if not remote: command.append('--all') else: remote = pipes.quote(remote) command.extend(['--update-head-ok']) if tags: command.append('--tags') if verbose: command.append('--verbose') if remote: command.append(remote) if refspec: command.append(refspec) return execute_git_command(command, repo_dir=repo_dir)
0.001637
def rule_low_registers(self, arg): """Low registers are R0 - R7""" r_num = self.check_register(arg) if r_num > 7: raise iarm.exceptions.RuleError( "Register {} is not a low register".format(arg))
0.008097
def __experimental_range(start, stop, var, cond, loc={}): '''Utility function made to reproduce range() with unit integer step but with the added possibility of specifying a condition on the looping variable (e.g. var % 2 == 0) ''' locals().update(loc) if start < stop: for __ in range(start, stop): locals()[var] = __ if eval(cond, globals(), locals()): yield __ else: for __ in range(start, stop, -1): locals()[var] = __ if eval(cond, globals(), locals()): yield __
0.001669
def process_nxml_str(nxml_str, output_fmt='json', outbuf=None, cleanup=True, key='', **kwargs): """Return processor with Statements extracted by reading an NXML string. Parameters ---------- nxml_str : str The string value of the NXML-formatted paper to be read. output_fmt: Optional[str] The output format to obtain from Sparser, with the two options being 'json' and 'xml'. Default: 'json' outbuf : Optional[file] A file like object that the Sparser output is written to. cleanup : Optional[bool] If True, the temporary file created in this function, which is used as an input file for Sparser, as well as the output file created by Sparser are removed. Default: True key : Optional[str] A key which is embedded into the name of the temporary file passed to Sparser for reading. Default is empty string. Returns ------- SparserXMLProcessor or SparserJSONProcessor depending on what output format was chosen. """ tmp_fname = 'PMC%s_%d.nxml' % (key, mp.current_process().pid) with open(tmp_fname, 'wb') as fh: fh.write(nxml_str.encode('utf-8')) try: sp = process_nxml_file(tmp_fname, output_fmt, outbuf, cleanup, **kwargs) finally: if cleanup and os.path.exists(tmp_fname): os.remove(tmp_fname) return sp
0.000695
def clear(self): ''' Clear the command list. This command will be sent to the vehicle only after you call :py:func:`upload() <Vehicle.commands.upload>`. ''' # Add home point again. self.wait_ready() home = None try: home = self._vehicle._wploader.wp(0) except: pass self._vehicle._wploader.clear() if home: self._vehicle._wploader.add(home, comment='Added by DroneKit') self._vehicle._wpts_dirty = True
0.007463
def _find_base_tds_url(catalog_url): """Identify the base URL of the THREDDS server from the catalog URL. Will retain URL scheme, host, port and username/password when present. """ url_components = urlparse(catalog_url) if url_components.path: return catalog_url.split(url_components.path)[0] else: return catalog_url
0.002793
def sort_func(variant=VARIANT1, case_sensitive=False): """A function generator that can be used for sorting. All keywords are passed to `normalize()` and generate keywords that can be passed to `sorted()`:: >>> key = sort_func() >>> print(sorted(["fur", "far"], key=key)) [u'far', u'fur'] Please note, that `sort_func` returns a function. """ return lambda x: normalize( x, variant=variant, case_sensitive=case_sensitive)
0.00211
def attach_service(cls, service): """ Allows you to attach one TCP and one HTTP service deprecated:: 2.1.73 use http and tcp specific methods :param service: A trellio TCP or HTTP service that needs to be hosted """ if isinstance(service, HTTPService): cls._http_service = service elif isinstance(service, TCPService): cls._tcp_service = service else: cls._logger.error('Invalid argument attached as service') cls._set_bus(service)
0.003759
def collection(self, user_id): """ Get the song collection of a user. :param user_id: ID of a user. :rtype: list of :class:`Song` """ # TODO further evaluation of the page param, I don't know where the # limit is. dct = {'userID': user_id, 'page': 0} r = 'userGetSongsInLibrary' result = self.connection.request(r, dct, self.connection.header(r)) songs = result[1]['Songs'] return [Song.from_response(song, self.connection) for song in songs]
0.003711
def next(self): """ Where to redirect after authorization """ next = request.args.get('next') if next is None: params = self.default_redirect_params next = url_for(self.default_redirect_endpoint, **params) return next
0.007326
def describe_parameters(name, Source=None, MaxRecords=None, Marker=None, region=None, key=None, keyid=None, profile=None): ''' Returns a list of `DBParameterGroup` parameters. CLI example to description of parameters :: salt myminion boto_rds.describe_parameters parametergroupname\ region=us-east-1 ''' res = __salt__['boto_rds.parameter_group_exists'](name, tags=None, region=region, key=key, keyid=keyid, profile=profile) if not res.get('exists'): return {'result': False, 'message': 'Parameter group {0} does not exist'.format(name)} try: conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) if not conn: return {'result': False, 'message': 'Could not establish a connection to RDS'} kwargs = {} kwargs.update({'DBParameterGroupName': name}) for key in ('Marker', 'Source'): if locals()[key] is not None: kwargs[key] = str(locals()[key]) # future lint: disable=blacklisted-function if locals()['MaxRecords'] is not None: kwargs['MaxRecords'] = int(locals()['MaxRecords']) pag = conn.get_paginator('describe_db_parameters') pit = pag.paginate(**kwargs) keys = ['ParameterName', 'ParameterValue', 'Description', 'Source', 'ApplyType', 'DataType', 'AllowedValues', 'IsModifieable', 'MinimumEngineVersion', 'ApplyMethod'] parameters = odict.OrderedDict() ret = {'result': True} for p in pit: for result in p['Parameters']: data = odict.OrderedDict() for k in keys: data[k] = result.get(k) parameters[result.get('ParameterName')] = data ret['parameters'] = parameters return ret except ClientError as e: return {'error': __utils__['boto3.get_error'](e)}
0.000931
def p_switch_statement(self, p): """switch_statement : SWITCH LPAREN expr RPAREN case_block""" # this uses a completely different type that corrects a # subtly wrong interpretation of this construct. # see: https://github.com/rspivak/slimit/issues/94 p[0] = self.asttypes.Switch(expr=p[3], case_block=p[5]) p[0].setpos(p) return
0.005208
def is_set(self): """Returns True if the request has finished or False if it is still pending. Raises [LinkException](AmqpLink.m.html#IoticAgent.Core.AmqpLink.LinkException) if the request failed due to a network related problem. """ if self.__event.is_set(): if self.exception is not None: # todo better way to raise errors on behalf of other Threads? raise self.exception # pylint: disable=raising-bad-type return True return False
0.007421
def send_image(self, number, path, caption=None): """ Send image message :param str number: phone number with cc (country code) :param str path: image file path """ return self._send_media_path(number, path, RequestUploadIqProtocolEntity.MEDIA_TYPE_IMAGE, caption)
0.009615
def nuclear_norm(data): r"""Nuclear norm This method computes the nuclear (or trace) norm of the input data. Parameters ---------- data : np.ndarray Input data array Returns ------- float nuclear norm value Examples -------- >>> from modopt.math.matrix import nuclear_norm >>> a = np.arange(9).reshape(3, 3) >>> nuclear_norm(a) 15.49193338482967 Notes ----- Implements the following equation: .. math:: \|\mathbf{A}\|_* = \sum_{i=1}^{\min\{m,n\}} \sigma_i (\mathbf{A}) """ # Get SVD of the data. u, s, v = np.linalg.svd(data) # Return nuclear norm. return np.sum(s)
0.001466
def _retrieve(self): """ Retrieve the data from the database. """ if self._authorization() and "whois_db" not in PyFunceble.INTERN: # The usage of the whois database is activated. if PyFunceble.path.isfile(self.whois_db_path): # The database file exist. # We merge our current database into already initiated one. PyFunceble.INTERN["whois_db"] = Dict().from_json( File(self.whois_db_path).read() ) else: # The database file does not exist. # We initiate an empty database. PyFunceble.INTERN["whois_db"] = {}
0.002813
def _parse_band(cls, kw): """Returns photometric band from inifile keyword """ m = re.search('([a-zA-Z0-9]+)(_\d+)?', kw) if m: if m.group(1) in cls._not_a_band: return None else: return m.group(1)
0.010526
def create_seq(self, ): """Create a sequence and store it in the self.sequence :returns: None :rtype: None :raises: None """ name = self.name_le.text() desc = self.desc_pte.toPlainText() try: seq = djadapter.models.Sequence(name=name, project=self._project, description=desc) seq.save() self.sequence = seq self.accept() except: log.exception("Could not create new sequence")
0.007874
def x(self, x): """Project x as y""" if x is None: return None if self._force_vertical: return super(HorizontalLogView, self).x(x) return super(XLogView, self).y(x)
0.009091
def guid(valu=None): ''' Get a 16 byte guid value. By default, this is a random guid value. Args: valu: Object used to construct the guid valu from. This must be able to be msgpack'd. Returns: str: 32 character, lowercase ascii string. ''' if valu is None: return binascii.hexlify(os.urandom(16)).decode('utf8') # Generate a "stable" guid from the given item byts = s_msgpack.en(valu) return hashlib.md5(byts).hexdigest()
0.001996
def announce_job_results(self, pacts: List[Pact], emulator_results_list: List[List[EmulatorResult]], verification_results_list: List[List[VerificationResult]], results_published: bool, succeeded: bool) -> None: """The ugliest logger.""" lines: List[str] = [] for group in zip(pacts, emulator_results_list, verification_results_list): lines += _format_pact_results(*group) if results_published: lines.append(Fore.BLACK + '**Results for passing pacts were published**') print('\n'.join(lines)) print(Style.RESET_ALL)
0.013661
def do_profile(self,args): """ Select nephele profile profile -h for more details """ parser = CommandArgumentParser("profile") parser.add_argument(dest="profile",help="Profile name") parser.add_argument('-v','--verbose',dest="verbose",action='store_true',help='verbose') args = vars(parser.parse_args(args)) profile = args['profile'] verbose = args['verbose'] if verbose: print "Selecting profile '{}'".format(profile) selectedProfile = {} if profile in Config.config['profiles']: selectedProfile = Config.config['profiles'][profile] selectedProfile['name'] = profile Config.config['selectedProfile'] = selectedProfile awsProfile = profile if 'awsProfile' in selectedProfile: awsProfile = selectedProfile['awsProfile'] AwsConnectionFactory.resetInstance(profile=awsProfile)
0.009385
def pubmed_url(args=sys.argv[1:], resolve_doi=True, out=sys.stdout): """ Get a publication URL via the command line using a PubMed ID or PubMed URL """ parser = argparse.ArgumentParser( description='Get a publication URL using a PubMed ID or PubMed URL') parser.add_argument('query', help='PubMed ID or PubMed URL') parser.add_argument( '-d', '--doi', action='store_false', help='get DOI URL') parser.add_argument( '-e', '--email', action='store', help='set user email', default='') args = parser.parse_args(args=args) lookup = PubMedLookup(args.query, args.email) publication = Publication(lookup, resolve_doi=args.doi) out.write(publication.url + '\n')
0.001377
def versionString(version): """Create version string. For a sequence containing version information such as (2, 0, 0, 'pre'), this returns a printable string such as '2.0pre'. The micro version number is only excluded from the string if it is zero. """ ver = list(map(str, version)) numbers, rest = ver[:2 if ver[2] == '0' else 3], ver[3:] return '.'.join(numbers) + '-'.join(rest)
0.00241
def hostname(self, hostname): """ hostname setter """ if not isinstance(hostname, six.string_types): raise TypeError("hostname must be a string. {0} was passed.".format(type(hostname))) # if a host name is passed and its not valid raise else set hostname empty strings are the docker default. if hostname and not is_valid_hostname(hostname): raise ValueError("{0} isn't a valid hostname").format(hostname) else: self._hostname = hostname
0.007634
def _define_end_episode(self, agent_indices): """Implement the branch of end_episode() entered during training.""" episodes, length = self._current_episodes.data(agent_indices) space_left = self._config.update_every - self._num_finished_episodes use_episodes = tf.range(tf.minimum( tf.shape(agent_indices)[0], space_left)) episodes = tools.nested.map(lambda x: tf.gather(x, use_episodes), episodes) append = self._finished_episodes.replace( episodes, tf.gather(length, use_episodes), use_episodes + self._num_finished_episodes) with tf.control_dependencies([append]): increment_index = self._num_finished_episodes.assign_add( tf.shape(use_episodes)[0]) with tf.control_dependencies([increment_index]): memory_full = self._num_finished_episodes >= self._config.update_every return tf.cond(memory_full, self._training, str)
0.00442
def client_list(self, name=None, name_only=None, all_enrolled=None): """ Get list of clients. Uses GET to /clients interface. :Kwargs: * *name*: (str) If specified, returns the client information for this client only. * *name_only*: (bool) If true, returns only the names of the clients requested * *all_enrolled*: (bool) If true, will return all enrolled clients :Returns: (list) List of dictionaries with the client information as requested. """ params = {} if name: # When specific name value is provided params["name"] = name if name_only: # (Boolean) "True": only keyword "name" is provided params["name"] = "" if all_enrolled: # (Boolean) "True": returns all enrolled clients params["all_enrolled"] = all_enrolled response = self._get(url.clients, params=params) self._check_response(response, 200) if name: return response.json() return self._create_response(response).get("clients")
0.006233
def find_neighbor_throats(self, pores, mode='union', flatten=True): r""" Returns a list of throats neighboring the given pore(s) Parameters ---------- pores : array_like Indices of pores whose neighbors are sought flatten : boolean, optional If ``True`` (default) a 1D array of unique throat indices is returned. If ``False`` the returned array contains arrays of neighboring throat indices for each input pore, in the order they were sent. mode : string Specifies logic to filter the resulting list. Options are: **'or'** : (default) All neighbors of the input pores. This is also known as the 'union' in set theory or 'any' in boolean logic. Both keywords are accepted and treated as 'or'. **'xor'** : Only neighbors of one and only one input pore. This is useful for finding the thraots that are not shared by any of the input pores. **'xnor'** : Neighbors that are shared by two or more input pores. This is equivalent to finding all neighbors with 'or', minus those found with 'xor', and is useful for finding neighbors that the inputs have in common. **'and'** : Only neighbors shared by all input pores. This is also known as 'intersection' in set theory and (somtimes) as 'all' in boolean logic. Both keywords are accepted and treated as 'and'. Returns ------- If ``flatten`` is ``True``, returns a 1D array of throat indices filtered according to the specified mode. If ``flatten`` is ``False``, returns a list of lists, where each list contains the neighbors of the corresponding input pores. Notes ----- The ``logic`` options are applied to neighboring bonds only, thus it is not possible to obtain bonds that are part of the global set but not neighbors. This is because (a) the list of global bonds might be very large, and (b) it is not possible to return a list of neighbors for each input site if global sites are considered. Examples -------- >>> import openpnm as op >>> pn = op.network.Cubic(shape=[5, 5, 5]) >>> Ts = pn.find_neighbor_throats(pores=[0, 1]) >>> print(Ts) [ 0 1 100 101 200 201] >>> Ts = pn.find_neighbor_throats(pores=[0, 1], flatten=False) >>> print(Ts) [array([ 0, 100, 200]), array([ 0, 1, 101, 201])] """ pores = self._parse_indices(pores) if sp.size(pores) == 0: return sp.array([], ndmin=1, dtype=int) if 'lil' not in self._im.keys(): self.get_incidence_matrix(fmt='lil') neighbors = topotools.find_neighbor_bonds(sites=pores, logic=mode, im=self._im['lil'], flatten=flatten) return neighbors
0.000646
def execute(self, logical_form: str): """Executes a logical form, using whatever predicates you have defined.""" if not hasattr(self, '_functions'): raise RuntimeError("You must call super().__init__() in your Language constructor") logical_form = logical_form.replace(",", " ") expression = util.lisp_to_nested_expression(logical_form) return self._execute_expression(expression)
0.009259
def _imm_init_setattr(self, name, value): ''' An immutable's initial setattr allows only param's to be set and does not run checks on the new parameters until a full parameter-set has been specified, at which point it runs all checks and switches over to a normal setattr and getattr method. ''' params = _imm_param_data(self) if name in params: tx_fn = params[name][1] value = value if tx_fn is None else tx_fn(value) # Set the value object.__getattribute__(self, '__dict__')[name] = value # No checks are run, as we're in initialization mode... else: raise TypeError( 'Attempt to change non-parameter \'%s\' of initializing immutable' % name)
0.005435
def _check_stop_list(stop): """ Check stop words list ref: https://github.com/scikit-learn/scikit-learn/blob/master/sklearn/feature_extraction/text.py#L87-L95 """ if stop == "thai": return THAI_STOP_WORDS elif isinstance(stop, six.string_types): raise ValueError("not a built-in stop list: %s" % stop) elif stop is None: return None # assume it's a collection return frozenset(stop)
0.004525
def server_to_dict(server): """ Returns the :class:`dict` representation of a server object. The returned :class:`dict` is meant to be consumed by :class:`~bang.deployers.cloud.ServerDeployer` objects. """ return { A.server.ID: server.id, A.server.PUBLIC_IPS: [server.public_dns_name], A.server.PRIVATE_IPS: [server.private_dns_name], }
0.002439
def remove(name=None, pkgs=None, recursive=True, **kwargs): ''' name The name of the package to be deleted. recursive Also remove dependent packages (not required elsewhere). Default mode: enabled. Multiple Package Options: pkgs A list of packages to delete. Must be passed as a python list. The ``name`` parameter will be ignored if this option is passed. Returns a list containing the removed packages. CLI Example: .. code-block:: bash salt '*' pkg.remove <package name> [recursive=False] salt '*' pkg.remove <package1>,<package2>,<package3> [recursive=False] salt '*' pkg.remove pkgs='["foo", "bar"]' [recursive=False] ''' try: pkg_params, pkg_type = __salt__['pkg_resource.parse_targets']( name, pkgs ) except MinionError as exc: raise CommandExecutionError(exc) if not pkg_params: return {} old = list_pkgs() # keep only installed packages targets = [x for x in pkg_params if x in old] if not targets: return {} cmd = ['xbps-remove', '-y'] if recursive: cmd.append('-R') cmd.extend(targets) __salt__['cmd.run'](cmd, output_loglevel='trace') __context__.pop('pkg.list_pkgs', None) new = list_pkgs() return salt.utils.data.compare_dicts(old, new)
0.000725
def find_in_mailbox(cls, session, mailbox_or_id): """Get the users that are associated to a Mailbox. Args: session (requests.sessions.Session): Authenticated session. mailbox_or_id (MailboxRef or int): Mailbox of the ID of the mailbox to get the folders for. Returns: RequestPaginator(output_type=helpscout.models.User): Users iterator. """ if hasattr(mailbox_or_id, 'id'): mailbox_or_id = mailbox_or_id.id return cls( '/mailboxes/%d/users.json' % mailbox_or_id, session=session, )
0.00311
def revoke_access(src, dst='any', port=None, proto=None): """ Revoke access to an address or subnet :param src: address (e.g. 192.168.1.234) or subnet (e.g. 192.168.1.0/24). :param dst: destiny of the connection, if the machine has multiple IPs and connections to only one of those have to accepted this is the field has to be set. :param port: destiny port :param proto: protocol (tcp or udp) """ return modify_access(src, dst=dst, port=port, proto=proto, action='delete')
0.001805
def end(self): """Get or set the end of the event. | Will return an :class:`Arrow` object. | May be set to anything that :func:`Arrow.get` understands. | If set to a non null value, removes any already existing duration. | Setting to None will have unexpected behavior if begin is not None. | Must not be set to an inferior value than self.begin. """ if self._duration: # if end is duration defined # return the beginning + duration return self.begin + self._duration elif self._end_time: # if end is time defined if self.all_day: return self._end_time else: return self._end_time elif self._begin: # if end is not defined if self.all_day: return self._begin + timedelta(days=1) else: # instant event return self._begin else: return None
0.001955
def to_csv(col, options={}): """ Converts a column containing a :class:`StructType` into a CSV string. Throws an exception, in the case of an unsupported type. :param col: name of column containing a struct. :param options: options to control converting. accepts the same options as the CSV datasource. >>> from pyspark.sql import Row >>> data = [(1, Row(name='Alice', age=2))] >>> df = spark.createDataFrame(data, ("key", "value")) >>> df.select(to_csv(df.value).alias("csv")).collect() [Row(csv=u'2,Alice')] """ sc = SparkContext._active_spark_context jc = sc._jvm.functions.to_csv(_to_java_column(col), options) return Column(jc)
0.002894
def remove_pointer(type_): """removes pointer from the type definition If type is not pointer type, it will be returned as is. """ nake_type = remove_alias(type_) if not is_pointer(nake_type): return type_ elif isinstance(nake_type, cpptypes.volatile_t) and \ isinstance(nake_type.base, cpptypes.pointer_t): return cpptypes.volatile_t(nake_type.base.base) elif isinstance(nake_type, cpptypes.const_t) and \ isinstance(nake_type.base, cpptypes.pointer_t): return cpptypes.const_t(nake_type.base.base) elif isinstance(nake_type, cpptypes.volatile_t) \ and isinstance(nake_type.base, cpptypes.const_t) \ and isinstance(nake_type.base.base, cpptypes.pointer_t): return ( cpptypes.volatile_t(cpptypes.const_t(nake_type.base.base.base)) ) return nake_type.base
0.001121
def create_authors(project_dir=os.curdir): """ Creates the authors file, if not in a package. Returns: None Raises: RuntimeError: If the authors could not be retrieved """ pkg_info_file = os.path.join(project_dir, 'PKG-INFO') authors_file = os.path.join(project_dir, 'AUTHORS') if os.path.exists(pkg_info_file): return authors = get_authors(project_dir=project_dir) with open(authors_file, 'wb') as authors_fd: authors_fd.write( b'\n'.join(a.encode('utf-8') for a in authors) + b'\n' )
0.001724
def items(self): """Get all the message's header fields and values. These will be sorted in the order they appeared in the original message, or were added to the message, and may contain duplicates. Any fields deleted and re-inserted are always appended to the header list. """ return [(k, self.policy.header_fetch_parse(k, v)) for k, v in self._headers]
0.004684
def name(self, name): """This function will take the given name and split it into components weight, width, customName, and possibly the full name. This is what Glyphs 1113 seems to be doing, approximately. """ weight, width, custom_name = self._splitName(name) self.set_all_name_components(name, weight, width, custom_name)
0.005376
def window_design(self, window_length, beta): """Kaiser window design Args: window_length: Length of the window in number of samples beta: Beta value for Kaiser window design Returns: window: Window designed using the beta and length provided as inputs """ self.window = np.kaiser(window_length, beta) return self.window
0.007092
def _get_generic_schema(self): """ Returns whoosh's generic schema. """ schema = Schema( identifier=ID(stored=True), # Partition versioned id type=ID(stored=True), name=NGRAM(phrase=True, stored=True, minsize=2, maxsize=8)) return schema
0.006711
def get_certificate(self, certificate_id): """Get certificate by id. :param str certificate_id: The certificate id (Required) :returns: Certificate object :rtype: Certificate """ api = self._get_api(iam.DeveloperApi) certificate = Certificate(api.get_certificate(certificate_id)) self._extend_certificate(certificate) return certificate
0.00489
def cassist(self,dc,dt,dt2,nodiag=False,memlimit=-1): """Calculates probability of gene i regulating gene j with continuous data assisted method, with the recommended combination of multiple tests. Probabilities are converted from likelihood ratios separately for each A. This gives better predictions when the number of secondary targets (dt2) is large. (Check program warnings.) dc: numpy.ndarray(nt,ns,dtype=ftype(='f4' by default)) Continuous anchor data. Entry dc[i,j] is anchor i's value for sample j. Anchor i is used to infer the probability of gene i -> any other gene. dt: numpy.ndarray(nt,ns,dtype=ftype(='=f4' by default)) Gene expression data for A Entry dt[i,j] is gene i's expression level for sample j. dt2:numpy.ndarray(nt2,ns,dtype=ftype(='=f4' by default)) Gene expression data for B. dt2 has the same format as dt, and can be identical with, different from, or a superset of dt. When dt2 is a superset of (or identical with) dt, dt2 must be arranged to be identical with dt at its upper submatrix, i.e. dt2[:nt,:]=dt, and set parameter nodiag = 1. nodiag: skip diagonal regulations, i.e. regulation A->B for A=B. This should be set to True when A is a subset of B and aligned correspondingly. memlimit: The approximate memory usage limit in bytes for the library. For datasets require a larger memory, calculation will be split into smaller chunks. If the memory limit is smaller than minimum required, calculation can fail with an error message. memlimit=0 defaults to unlimited memory usage. Return: dictionary with following keys: ret:0 iff execution succeeded. p: numpy.ndarray((nt,nt2),dtype=ftype(='=f4' by default)). Probability function from for recommended combination of multiple tests. For more information on tests, see paper. ftype can be found in auto.py. Example: see findr.examples.geuvadis5 """ return _cassist_any(self,dc,dt,dt2,"pij_cassist",nodiag=nodiag,memlimit=memlimit)
0.025115
def wait(self, timeout: Union[float, datetime.timedelta] = None) -> Awaitable[bool]: """Wait for `.notify`. Returns a `.Future` that resolves ``True`` if the condition is notified, or ``False`` after a timeout. """ waiter = Future() # type: Future[bool] self._waiters.append(waiter) if timeout: def on_timeout() -> None: if not waiter.done(): future_set_result_unless_cancelled(waiter, False) self._garbage_collect() io_loop = ioloop.IOLoop.current() timeout_handle = io_loop.add_timeout(timeout, on_timeout) waiter.add_done_callback(lambda _: io_loop.remove_timeout(timeout_handle)) return waiter
0.006536
def http_request(self, path="/", method="GET", host=None, port=None, json=False, data=None): """ perform a HTTP request :param path: str, path within the request, e.g. "/api/version" :param method: str, HTTP method :param host: str, if None, set to 127.0.0.1 :param port: str or int, if None, set to 8080 :param json: bool, should we expect json? :param data: data to send (can be dict, list, str) :return: dict """ host = host or '127.0.0.1' port = port or 8080 url = get_url(host=host, port=port, path=path) return self.http_session.request(method, url, json=json, data=data)
0.004335
def edge_predicate(func: DictEdgePredicate) -> EdgePredicate: # noqa: D202 """Decorate an edge predicate function that only takes a dictionary as its singular argument. Apply this as a decorator to a function that takes a single argument, a PyBEL node data dictionary, to make sure that it can also accept a pair of arguments, a BELGraph and a PyBEL node tuple as well. """ @wraps(func) def _wrapped(*args): x = args[0] if isinstance(x, BELGraph): u, v, k = args[1:4] return func(x[u][v][k]) return func(*args) return _wrapped
0.006579
def start(self): """Start the Consumers. :return: """ if not self.connection: self.create_connection() while True: try: channel = self.connection.channel() channel.queue.declare('simple_queue') channel.basic.consume(self, 'simple_queue', no_ack=False) channel.start_consuming() if not channel.consumer_tags: channel.close() except amqpstorm.AMQPError as why: LOGGER.exception(why) self.create_connection() except KeyboardInterrupt: self.connection.close() break
0.002801
def to_html(self, **kwargs): """Render as html :returns: the html representation (<hr>) :rtype: str We pass the kwargs on to the base class so an exception is raised if invalid keywords were passed. See: http://stackoverflow.com/questions/13124961/ how-to-pass-arguments-efficiently-kwargs-in-python """ super(HorizontalRule, self).__init__(**kwargs) return '<hr%s/>\n' % self.html_attributes()
0.004193
def _update_value_inert(self, index, value): """ is only called by _update_inert """ # get field descriptor field_descriptor = self._table._dev_descriptor.get_field_descriptor(index) # prepare value value = field_descriptor.deserialize(value, index) # unregister previous link if relevant if isinstance(value, Link): # de-activate current link if any current_link = self._data.get(index) if current_link is not None: current_link.unregister() # unregister previous hook if relevant if isinstance(value, RecordHook): current_record_hook = self._data.get(index) if current_record_hook is not None: current_record_hook.unregister() # unregister previous external file if relevant if isinstance(value, ExternalFile): current_external_file = self._data.get(index) if current_external_file is not None: current_external_file._dev_unregister() # if None remove and leave if value in (None, NONE_RECORD_HOOK, NONE_LINK, NONE_EXTERNAL_FILE): # we don't check required, because this method is called by _update_inert which does the job self._dev_set_none_without_unregistering(index, check_not_required=False) return # if relevant, store current pk to signal table old_hook = None if index == 0 and not self._table._dev_auto_pk: old_hook = self._data.get(0) # we use get, because record may not have a pk yet if it is being created # set value self._data[index] = value # signal pk update if relevant if old_hook is not None: self._table._dev_record_pk_was_updated(old_hook.target_value)
0.003236
def sumApprox(self, timeout, confidence=0.95): """ .. note:: Experimental Approximate operation to return the sum within a timeout or meet the confidence. >>> rdd = sc.parallelize(range(1000), 10) >>> r = sum(range(1000)) >>> abs(rdd.sumApprox(1000) - r) / r < 0.05 True """ jrdd = self.mapPartitions(lambda it: [float(sum(it))])._to_java_object_rdd() jdrdd = self.ctx._jvm.JavaDoubleRDD.fromRDD(jrdd.rdd()) r = jdrdd.sumApprox(timeout, confidence).getFinalValue() return BoundedFloat(r.mean(), r.confidence(), r.low(), r.high())
0.004724
def grab_idx(x,i,batch_first:bool=True): "Grab the `i`-th batch in `x`, `batch_first` stating the batch dimension." if batch_first: return ([o[i].cpu() for o in x] if is_listy(x) else x[i].cpu()) else: return ([o[:,i].cpu() for o in x] if is_listy(x) else x[:,i].cpu())
0.047782
def filter_reads(self, input_bam, output_bam, metrics_file, paired=False, cpus=16, Q=30): """ Remove duplicates, filter for >Q, remove multiple mapping reads. For paired-end reads, keep only proper pairs. """ nodups = re.sub("\.bam$", "", output_bam) + ".nodups.nofilter.bam" cmd1 = self.tools.sambamba + " markdup -t {0} -r --compression-level=0 {1} {2} 2> {3}".format(cpus, input_bam, nodups, metrics_file) cmd2 = self.tools.sambamba + ' view -t {0} -f bam --valid'.format(cpus) if paired: cmd2 += ' -F "not (unmapped or mate_is_unmapped) and proper_pair' else: cmd2 += ' -F "not unmapped' cmd2 += ' and not (secondary_alignment or supplementary) and mapping_quality >= {0}"'.format(Q) cmd2 += ' {0} |'.format(nodups) cmd2 += self.tools.sambamba + " sort -t {0} /dev/stdin -o {1}".format(cpus, output_bam) cmd3 = "if [[ -s {0} ]]; then rm {0}; fi".format(nodups) cmd4 = "if [[ -s {0} ]]; then rm {0}; fi".format(nodups + ".bai") return [cmd1, cmd2, cmd3, cmd4]
0.006329
def plot_mv_voltages(self, **kwargs): """ Plots voltages in MV grid on grid topology plot. For more information see :func:`edisgo.tools.plots.mv_grid_topology`. """ if self.network.pypsa is not None: try: v_res = self.network.results.v_res() except: logging.warning("Voltages `pfa_v_mag_pu` from power flow " "analysis must be available to plot them.") return plots.mv_grid_topology( self.network.pypsa, self.network.config, timestep=kwargs.get('timestep', None), node_color='voltage', filename=kwargs.get('filename', None), grid_district_geom=kwargs.get('grid_district_geom', True), background_map=kwargs.get('background_map', True), voltage=v_res, limits_cb_nodes=kwargs.get('limits_cb_nodes', None), xlim=kwargs.get('xlim', None), ylim=kwargs.get('ylim', None), title=kwargs.get('title', '')) else: logging.warning("pypsa representation of MV grid needed to " "plot voltages.")
0.002408
def guard_retract(worksheet): """Return whether the transition retract can be performed or not to the worksheet passed in. Since the retract transition from worksheet is a shortcut to retract transitions from all analyses the worksheet contains, this guard only returns True if retract transition is allowed for all analyses the worksheet contains """ analyses = worksheet.getAnalyses() detached = ['rejected', 'retracted'] num_detached = 0 for analysis in analyses: if api.get_workflow_status_of(analysis) in detached: num_detached += 1 elif not isTransitionAllowed(analysis, "retract"): return False return analyses and num_detached < len(analyses) or False
0.001346
def factors(self, rank): """Returns KTensor factors for models with specified rank. """ self._check_rank(rank) return [result.factors for result in self.results[rank]]
0.01005
def expand_as_args(args): """Returns `True` if `args` should be expanded as `*args`.""" return (isinstance(args, collections.Sequence) and not _is_namedtuple(args) and not _force_leaf(args))
0.014706
def stream_file(self, path, fast_lane=True): """ Create a temp file, stream it to the server if online and append its content using the write() method. This makes sure that we have all newest data of this file on the server directly. At the end of the job, the content the server received is stored as git blob on the server. It is then committed locally and pushed. Git detects that the server already has the version (through the continuous streaming) and won't push it again. Very handy for rather large files that will append over time (like channel data, logs) Example: self.log_stream = git.stream_file('log.txt') self.log_stream.write("new line\n"); self.log_stream.write("another line\n"); """ # create temp file # open temp file # register stream file and write locally # on end() git_commit that file locally # create socket connection to server # stream file to server # on end() send server end signal, so he can store its content in git as blob as well. # A git push would detect that both sides have the same content already, # except when server connection broke between start() and end(). # Result -> already transmitted logs/channel data (probably many MBs) won't transfered twice # when doing a git-push. # return handler to write to this file full_path = os.path.normpath(self.temp_path + '/stream-blob/' + self.job_id + '/' + path) if not os.path.exists(os.path.dirname(full_path)): os.makedirs(os.path.dirname(full_path)) handle = open(full_path, 'wb') self.streamed_files[path] = handle class Stream(): def __init__(self, git): self.git = git def write(self, data): if path not in self.git.streamed_files: # already committed to server return if hasattr(data, 'encode'): data = data.encode("utf-8", 'replace') try: self.git.stream_files_lock.acquire() if not handle.closed: handle.write(data) handle.flush() except IOError as e: handle.close() if 'No space left' in e.__str__(): sys.stderr.write(traceback.format_exc() + '\n') self.git.logger.error(e.__str__()) finally: self.git.stream_files_lock.release() if self.git.client.online is not False: self.git.client.send({'type': 'stream-blob', 'path': path, 'data': data}, channel='' if fast_lane else 'files') return Stream(self)
0.00619
def create_key(key_type='RSA', key_length=1024, name_real='Autogenerated Key', name_comment='Generated by SaltStack', name_email=None, subkey_type=None, subkey_length=None, expire_date=None, use_passphrase=False, user=None, gnupghome=None): ''' Create a key in the GPG keychain .. note:: GPG key generation requires *a lot* of entropy and randomness. Difficult to do over a remote connection, consider having another process available which is generating randomness for the machine. Also especially difficult on virtual machines, consider the `rng-tools <http://www.gnu.org/software/hurd/user/tlecarrour/rng-tools.html>`_ package. The create_key process takes awhile so increasing the timeout may be necessary, e.g. -t 15. key_type The type of the primary key to generate. It must be capable of signing. 'RSA' or 'DSA'. key_length The length of the primary key in bits. name_real The real name of the user identity which is represented by the key. name_comment A comment to attach to the user id. name_email An email address for the user. subkey_type The type of the secondary key to generate. subkey_length The length of the secondary key in bits. expire_date The expiration date for the primary and any secondary key. You can specify an ISO date, A number of days/weeks/months/years, an epoch value, or 0 for a non-expiring key. use_passphrase Whether to use a passphrase with the signing key. Passphrase is received from Pillar. user Which user's keychain to access, defaults to user Salt is running as. Passing the user as ``salt`` will set the GnuPG home directory to the ``/etc/salt/gpgkeys``. gnupghome Specify the location where GPG keyring and related files are stored. CLI Example: .. code-block:: bash salt -t 15 '*' gpg.create_key ''' ret = { 'res': True, 'fingerprint': '', 'message': '' } create_params = {'key_type': key_type, 'key_length': key_length, 'name_real': name_real, 'name_comment': name_comment, } gpg = _create_gpg(user, gnupghome) if name_email: create_params['name_email'] = name_email if subkey_type: create_params['subkey_type'] = subkey_type if subkey_length: create_params['subkey_length'] = subkey_length if expire_date: create_params['expire_date'] = expire_date if use_passphrase: gpg_passphrase = __salt__['pillar.get']('gpg_passphrase') if not gpg_passphrase: ret['res'] = False ret['message'] = "gpg_passphrase not available in pillar." return ret else: create_params['passphrase'] = gpg_passphrase input_data = gpg.gen_key_input(**create_params) key = gpg.gen_key(input_data) if key.fingerprint: ret['fingerprint'] = key.fingerprint ret['message'] = 'GPG key pair successfully generated.' else: ret['res'] = False ret['message'] = 'Unable to generate GPG key pair.' return ret
0.000573
def _getWorkerCommandList(self): """Generate the workerCommand as list""" c = [] c.extend(self._WorkerCommand_environment()) c.extend(self._WorkerCommand_launcher()) c.extend(self._WorkerCommand_options()) c.extend(self._WorkerCommand_executable()) return c
0.006369
def delete(self): """ Deletes this NIO. """ if self._input_filter or self._output_filter: yield from self.unbind_filter("both") yield from self._hypervisor.send("nio delete {}".format(self._name)) log.info("NIO {name} has been deleted".format(name=self._name))
0.006231
def zobrist_hash(board: chess.Board, *, _hasher: Callable[[chess.Board], int] = ZobristHasher(POLYGLOT_RANDOM_ARRAY)) -> int: """ Calculates the Polyglot Zobrist hash of the position. A Zobrist hash is an XOR of pseudo-random values picked from an array. Which values are picked is decided by features of the position, such as piece positions, castling rights and en passant squares. """ return _hasher(board)
0.004525
def query(cls, select, qb=None): """ :param select: QBO SQL query select statement :param qb: :return: Returns list """ if not qb: qb = QuickBooks() json_data = qb.query(select) obj_list = [] if cls.qbo_object_name in json_data["QueryResponse"]: for item_json in json_data["QueryResponse"][cls.qbo_object_name]: obj_list.append(cls.from_json(item_json)) return obj_list
0.004057
def update_startup_byteman_script(self, byteman_startup_script): """ Update the byteman startup script, i.e., rule injected before the node starts. :param byteman_startup_script: the relative path to the script :raise common.LoadError: if the node does not have byteman installed """ if self.byteman_port == '0': raise common.LoadError('Byteman is not installed') self.byteman_startup_script = byteman_startup_script self.import_config_files()
0.005769
def str_or_unicode(text): """ handle python 3 unicode and python 2.7 byte strings """ encoding = sys.stdout.encoding if sys.version_info > (3, 0): return text.encode(encoding).decode(encoding) return text.encode(encoding)
0.004082
def toRanks(A): """ converts the columns of A to ranks """ AA=sp.zeros_like(A) for i in range(A.shape[1]): AA[:,i] = st.rankdata(A[:,i]) AA=sp.array(sp.around(AA),dtype="int")-1 return AA
0.026906
def dump(self, file, payload): """Dump json oject to open file output. Writes json with 2 spaces indentation. Args: file: Open file-like object. Must be open for writing. payload: The Json object to write to file. Returns: None. """ json.dump(payload, file, indent=2, ensure_ascii=False)
0.005333
def _connect_mitogen_su(spec): """ Return ContextService arguments for su as a first class connection. """ return { 'method': 'su', 'kwargs': { 'username': spec.remote_user(), 'password': spec.password(), 'python_path': spec.python_path(), 'su_path': spec.become_exe(), 'connect_timeout': spec.timeout(), 'remote_name': get_remote_name(spec), } }
0.002165
def reset(self): """ This command sends a reset message to the Arduino. The response tables will be reinitialized :return: No return value. """ # set all output pins to a value of 0 for pin in range(0, self._command_handler.total_pins_discovered): if self._command_handler.digital_response_table[self._command_handler.RESPONSE_TABLE_MODE] \ == self.PWM: self.analog_write(pin, 0) elif self._command_handler.digital_response_table[self._command_handler.RESPONSE_TABLE_MODE] \ == self.SERVO: self.analog_write(pin, 0) elif self._command_handler.digital_response_table[self._command_handler.RESPONSE_TABLE_MODE] \ == self.TONE: data = [self.TONE_NO_TONE, pin] self._command_handler.send_sysex(self._command_handler.TONE_PLAY, data) else: self.digital_write(pin, 0) self._command_handler.system_reset()
0.006718
def get(**kwargs): ''' Return system rc configuration variables CLI Example: .. code-block:: bash salt '*' sysrc.get includeDefaults=True ''' cmd = 'sysrc -v' if 'file' in kwargs: cmd += ' -f '+kwargs['file'] if 'jail' in kwargs: cmd += ' -j '+kwargs['jail'] if 'name' in kwargs: cmd += ' '+kwargs['name'] elif kwargs.get('includeDefaults', False): cmd += ' -A' else: cmd += ' -a' sysrcs = __salt__['cmd.run'](cmd) if "sysrc: unknown variable" in sysrcs: # raise CommandExecutionError(sysrcs) return None ret = {} for sysrc in sysrcs.split("\n"): line_components = sysrc.split(': ') rcfile = line_components[0] if len(line_components) > 2: var = line_components[1] val = line_components[2] else: var = line_components[1].rstrip(':') val = '' if rcfile not in ret: ret[rcfile] = {} ret[rcfile][var] = val return ret
0.000943
def Buscar(self, nro_doc, tipo_doc=80): "Devuelve True si fue encontrado y establece atributos con datos" # cuit: codigo único de identificación tributaria del contribuyente # (sin guiones) self.cursor.execute("SELECT * FROM padron WHERE " " tipo_doc=? AND nro_doc=?", [tipo_doc, nro_doc]) row = self.cursor.fetchone() for key in [k for k, l, t, d in FORMATO]: if row: val = row[key] if not isinstance(val, basestring): val = str(row[key]) setattr(self, key, val) else: setattr(self, key, '') if self.tipo_doc == 80: self.cuit = self.nro_doc elif self.tipo_doc == 96: self.dni = self.nro_doc # determinar categoría de IVA (tentativa) try: cat_iva = int(self.cat_iva) except ValueError: cat_iva = None if cat_iva: pass elif self.imp_iva in ('AC', 'S'): self.cat_iva = 1 # RI elif self.imp_iva == 'EX': self.cat_iva = 4 # EX elif self.monotributo: self.cat_iva = 6 # MT else: self.cat_iva = 5 # CF return True if row else False
0.001521
def evaluate(estimator, eval_args): """Runs evaluation on the latest model checkpoint & logs to tensorboard. Args: estimator: A tf.Estimator object. eval_args: Dictionary of {eval_name: (input_fn, eval_steps)} where eval_name is the name of the evaluation set, e.g. "train" or "val", input_fn is an input function returning a tuple (features, labels), and eval_steps is the number of steps for which to evaluate the model. If None, evaluates until input_fn raises an end-of-input exception. Returns: A dict of metric values from the evaluation. May be empty, e.g. if the training job has not yet saved a checkpoint or the checkpoint is deleted by the time the TPU worker initializes. """ values = {} # Default return value if evaluation fails. checkpoint_path = estimator.latest_checkpoint() if not checkpoint_path: # This is expected if the training job has not yet saved a checkpoint. return values tf.logging.info("Starting evaluation on checkpoint %s", checkpoint_path) for eval_name in eval_args: input_fn, eval_steps = eval_args[eval_name] metric_values = estimator.evaluate( input_fn, steps=eval_steps, name=eval_name, checkpoint_path=checkpoint_path) for key, val in metric_values.iteritems(): values[eval_name + "/" + key] = val tf.logging.info(values) return values
0.008541
def __checkTextMatches(self, folders, items, buffer, windowInfo, immediate=False): """ Check for an abbreviation/predictive match among the given folder and items (scripts, phrases). @return: a tuple possibly containing an item to execute, or a menu to show """ itemMatches = [] folderMatches = [] for item in items: if item.check_input(buffer, windowInfo): if not item.prompt and immediate: return item, None else: itemMatches.append(item) for folder in folders: if folder.check_input(buffer, windowInfo): folderMatches.append(folder) break # There should never be more than one folder match anyway if self.__menuRequired(folderMatches, itemMatches, buffer): self.lastStackState = buffer #return (None, PopupMenu(self, folderMatches, itemMatches)) return None, (folderMatches, itemMatches) elif len(itemMatches) == 1: self.lastStackState = buffer return itemMatches[0], None else: return None, None
0.005838
def find_file(path, tgt_env='base', **kwargs): # pylint: disable=W0613 ''' Find the first file to match the path and ref, read the file out of git and send the path to the newly cached file ''' return _gitfs().find_file(path, tgt_env=tgt_env, **kwargs)
0.003663
def goto_definitions(self): """ Return the definition of a the symbol under the cursor via exact match. Goes to that definition with a buffer. """ element = self._evaluator.get_definition() if element is not None: return BaseDefinition(self._user_context, element) else: return None
0.005525
def autohard(equation): """ Automatically solve a hard maths problem. :type equation: string :param equation: The equation to solve. >>> autohard("log 10") 2.302585092994046 """ try: # Try to set a variable to an integer num1 = int(equation.split(" ")[1]) except ValueError: # Try to set a variable to a decimal num1 = float(equation.split(" ")[1]) # If the lowercase version of the operation equals 'log' if equation.split(" ")[0].lower() == "log": # Return the answer return math.log(num1) # If the lowercase version of the operation equals 'acos' elif equation.split(" ")[0].lower() == "acos": # Return the answer return math.acos(num1) # If the lowercase version of the operation equals 'asin' elif equation.split(" ")[0].lower() == "asin": # Return the answer return math.asin(num1) # If the lowercase version of the operation equals 'atan' elif equation.split(" ")[0].lower() == "atan": # Return the answer return math.atan(num1) # If the lowercase version of the operation equals 'cos' elif equation.split(" ")[0].lower() == "cos": # Return the answer return math.cos(num1) # If the lowercase version of the operation equals 'hypot' elif equation.split(" ")[0].lower() == "hypot": try: # Try to set a variable to an integer num2 = int(equation.split(" ")[2]) except ValueError: # Try to set a variable to an decimal num2 = float(equation.split(" ")[2]) # Return the answer return math.hypot(num1, num2) # If the lowercase version of the operation equals 'sin' elif equation.split(" ")[0].lower() == "sin": # Return the answer return math.sin(num1) # If the lowercase version of the operation equals 'tan' elif equation.split(" ")[0].lower() == "tan": # Return the answer return math.tan(num1) # Raise a warning raise ValueError("Invalid operation entered.")
0.000474
def produce(self, obj, val, ctx=None): """ factory function to create primitives :param pyswagger.spec.v2_0.objects.Schema obj: spec to construct primitives :param val: value to construct primitives :return: the created primitive """ val = obj.default if val == None else val if val == None: return None obj = deref(obj) ctx = {} if ctx == None else ctx if 'name' not in ctx and hasattr(obj, 'name'): ctx['name'] = obj.name if 'guard' not in ctx: ctx['guard'] = CycleGuard() if 'addp_schema' not in ctx: # Schema Object of additionalProperties ctx['addp_schema'] = None if 'addp' not in ctx: # additionalProperties ctx['addp'] = False if '2nd_pass' not in ctx: # 2nd pass processing function ctx['2nd_pass'] = None if 'factory' not in ctx: # primitive factory ctx['factory'] = self if 'read' not in ctx: # default is in 'read' context ctx['read'] = True # cycle guard ctx['guard'].update(obj) ret = None if obj.type: creater, _2nd = self.get(_type=obj.type, _format=obj.format) if not creater: raise ValueError('Can\'t resolve type from:(' + str(obj.type) + ', ' + str(obj.format) + ')') ret = creater(obj, val, ctx) if _2nd: val = _2nd(obj, ret, val, ctx) ctx['2nd_pass'] = _2nd elif len(obj.properties) or obj.additionalProperties: ret = Model() val = ret.apply_with(obj, val, ctx) if isinstance(ret, (Date, Datetime, Byte, File)): # it's meanless to handle allOf for these types. return ret def _apply(o, r, v, c): if hasattr(ret, 'apply_with'): v = r.apply_with(o, v, c) else: _2nd = c['2nd_pass'] if _2nd == None: _, _2nd = self.get(_type=o.type, _format=o.format) if _2nd: _2nd(o, r, v, c) # update it back to context c['2nd_pass'] = _2nd return v # handle allOf for Schema Object allOf = getattr(obj, 'allOf', None) if allOf: not_applied = [] for a in allOf: a = deref(a) if not ret: # try to find right type for this primitive. ret = self.produce(a, val, ctx) is_member = hasattr(ret, 'apply_with') else: val = _apply(a, ret, val, ctx) if not ret: # if we still can't determine the type, # keep this Schema object for later use. not_applied.append(a) if ret: for a in not_applied: val = _apply(a, ret, val, ctx) if ret != None and hasattr(ret, 'cleanup'): val = ret.cleanup(val, ctx) return ret
0.002807
def get_all_licenses(self): """Retrieve license type, key, installation date, etc.""" data = self._execute_command('GET_ALL_LICENSES', 'RIB_INFO', 'read') d = {} for key, val in data['GET_ALL_LICENSES']['LICENSE'].items(): if isinstance(val, dict): d[key] = data['GET_ALL_LICENSES']['LICENSE'][key]['VALUE'] return d
0.005208
def check_completeness_table(completeness_table, catalogue): ''' Check to ensure completeness table is in the correct format `completeness_table = np.array([[year_, mag_i]]) for i in number of bins` :param np.ndarray completeness_table: Completeness table in format [[year, mag]] :param catalogue: Instance of openquake.hmtk.seismicity.catalogue.Catalogue class :returns: Correct completeness table ''' if isinstance(completeness_table, np.ndarray): assert np.shape(completeness_table)[1] == 2 return completeness_table elif isinstance(completeness_table, list): # Assuming list has only two elements assert len(completeness_table) == 2 return np.array([[completeness_table[0], completeness_table[1]]]) else: # Accepts the minimum magnitude and earliest year of the catalogue return np.array([[np.min(catalogue.data['year']), np.min(catalogue.data['magnitude'])]])
0.000986