text
stringlengths
78
104k
score
float64
0
0.18
def find_roots( disconnected, index, shared ): """Find appropriate "root" objects from which to recurse the hierarchies Will generate a synthetic root for anything which doesn't have any parents... """ log.warn( '%s disconnected objects in %s total objects', len(disconnected), len(index)) natural_roots = [x for x in disconnected if x.get('refs') and not x.get('parents')] log.warn( '%s objects with no parents at all' ,len(natural_roots)) for natural_root in natural_roots: recurse_module( natural_root, index, shared ) yield natural_root rest = [x for x in disconnected if x.get( 'totsize' ) is None] un_found = { 'type': 'module', 'name': '<disconnected objects>', 'children': rest, 'parents': [ ], 'size': 0, 'totsize': sum([x['size'] for x in rest],0), 'address': new_address( index ), } index[un_found['address']] = un_found yield un_found
0.017154
def zrem(self, key, *members): """Removes the specified members from the sorted set stored at key. Non existing members are ignored. An error is returned when key exists and does not hold a sorted set. .. note:: **Time complexity**: ``O(M*log(N))`` with ``N`` being the number of elements in the sorted set and ``M`` the number of elements to be removed. :param key: The key of the sorted set :type key: :class:`str`, :class:`bytes` :param members: One or more member values to remove :type members: :class:`str`, :class:`bytes` :rtype: int :raises: :exc:`~tredis.exceptions.RedisError` """ return self._execute([b'ZREM', key] + list(members))
0.002574
def clear_highlight(self, src_id, line_start=0, line_end=-1, async_=None, **kwargs): """Clear highlights from the buffer.""" async_ = check_async(async_, kwargs, True) self.request('nvim_buf_clear_highlight', src_id, line_start, line_end, async_=async_)
0.009317
def _run_popen(command, print_output=False): """ subprocess has the most terrible interface ever. Envoy is an option but too heavyweight for this. This is a convenience wrapper around subprocess.Popen. Also, this merges STDOUT and STDERR together, since there isn't a good way of interleaving them without threads. """ output = '' po = subprocess.Popen( command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, ) fcntl.fcntl( po.stdout.fileno(), fcntl.F_SETFL, fcntl.fcntl(po.stdout.fileno(), fcntl.F_GETFL) | os.O_NONBLOCK, ) while po.poll() is None: stream = po.stdout readx = select.select([stream.fileno()], [], [])[0] if readx: chunk = stream.read() output += chunk if print_output: print chunk return Result(output, po.returncode)
0.001083
def set_tunnel(self, host, port): ''' Sets up the host and the port for the HTTP CONNECT Tunnelling.''' url = host if port: url = url + u':' + port var_host = VARIANT.create_bstr_from_str(url) var_empty = VARIANT.create_empty() _WinHttpRequest._SetProxy( self, HTTPREQUEST_PROXYSETTING_PROXY, var_host, var_empty)
0.005168
def get_num_part_files(): """Get the number of PART.html files currently saved to disk.""" num_parts = 0 for filename in os.listdir(os.getcwd()): if filename.startswith('PART') and filename.endswith('.html'): num_parts += 1 return num_parts
0.003623
def completion(): """Output completion (to be eval'd). For bash or zsh, add the following to your .bashrc or .zshrc: eval "$(doitlive completion)" For fish, add the following to ~/.config/fish/completions/doitlive.fish: eval (doitlive completion) """ shell = env.get("SHELL", None) if env.get("SHELL", None): echo( click_completion.get_code( shell=shell.split(os.sep)[-1], prog_name="doitlive" ) ) else: echo( "Please ensure that the {SHELL} environment " "variable is set.".format(SHELL=style("SHELL", bold=True)) ) sys.exit(1)
0.001471
def set_error_output_file(filename): """Sets a file to write out the VTK errors""" filename = os.path.abspath(os.path.expanduser(filename)) fileOutputWindow = vtk.vtkFileOutputWindow() fileOutputWindow.SetFileName(filename) outputWindow = vtk.vtkOutputWindow() outputWindow.SetInstance(fileOutputWindow) return fileOutputWindow, outputWindow
0.00271
def add_source(self, name, src_dict, free=None, save_source_maps=True, use_pylike=True, use_single_psf=False): """Add a new source to the model. Source properties (spectrum, spatial model) are set with the src_dict argument. Parameters ---------- name : str Source name. src_dict : dict or `~fermipy.roi_model.Source` object Dictionary or Source object defining the properties of the source. free : bool Initialize the source with the normalization parameter free. save_source_maps : bool Write the source map for this source to the source maps file. use_pylike : bool use_single_psf : bool """ # if self.roi.has_source(name): # msg = 'Source %s already exists.' % name # self.logger.error(msg) # raise Exception(msg) srcmap_utils.delete_source_map(self.files['srcmap'], name) src = self.roi[name] if self.config['gtlike']['expscale'] is not None and \ name not in self._src_expscale: self._src_expscale[name] = self.config['gtlike']['expscale'] if self._like is None: return if not use_pylike: self._update_srcmap_file([src], True) pylike_src = self._create_source(src) # Initialize source as free/fixed if free is not None: pylike_src.spectrum().normPar().setFree(free) if hasattr(pyLike, 'PsfIntegConfig') and \ hasattr(pyLike.PsfIntegConfig, 'set_use_single_psf'): config = pyLike.BinnedLikeConfig(self.like.logLike.config()) config.psf_integ_config().set_use_single_psf(use_single_psf) self.like.addSource(pylike_src, config) else: self.like.addSource(pylike_src) self.like.syncSrcParams(str(name)) self.like.logLike.buildFixedModelWts() if save_source_maps and \ not self.config['gtlike']['use_external_srcmap']: self.like.logLike.saveSourceMaps(str(self.files['srcmap'])) self.set_exposure_scale(name)
0.001361
async def collect_wallets(self, uid): """ Asynchronous generator """ logging.debug(self.types) logging.debug(uid) for coinid in self.types: logging.debug(coinid) await asyncio.sleep(0.5) # Connect to appropriate database database = self.client[self.collection] logging.debug(database) collection = database[coinid] logging.debug(collection) # Get wallets wallet = await collection.find_one({"uid":int(uid)}) logging.debug(wallet) wallet["amount_active"] = int(wallet["amount_active"]) wallet["amount_frozen"] = int(wallet["amount_frozen"]) del wallet["_id"] yield wallet
0.0368
def run_experiment(experiment, roleouts, episodes, in_cloud=False, dynProfile=None): """ Runs the given experiment and returns the results. """ def run(): if dynProfile is None: maxsteps = len(experiment.profile) # episode length else: maxsteps = dynProfile.shape[1] na = len(experiment.agents) ni = roleouts * episodes * maxsteps all_action = zeros((na, 0)) all_reward = zeros((na, 0)) epsilon = zeros((na, ni)) # exploration rate # Converts to action vector in percentage markup values. vmarkup = vectorize(get_markup) for roleout in range(roleouts): if dynProfile is not None: # Apply new load profile before each roleout (week). i = roleout * episodes # index of first profile value experiment.profile = dynProfile[i:i + episodes, :] # print "PROFILE:", experiment.profile, episodes experiment.doEpisodes(episodes) # number of samples per learning step nei = episodes * maxsteps # num interactions per role epi_action = zeros((0, nei)) epi_reward = zeros((0, nei)) for i, (task, agent) in \ enumerate(zip(experiment.tasks, experiment.agents)): action = copy(agent.history["action"]) reward = copy(agent.history["reward"]) for j in range(nei): if isinstance(agent.learner, DirectSearchLearner): action[j, :] = task.denormalize(action[j, :]) k = nei * roleout epsilon[i, k:k + nei] = agent.learner.explorer.sigma[0] elif isinstance(agent.learner, ValueBasedLearner): action[j, :] = vmarkup(action[j, :], task) k = nei * roleout epsilon[i, k:k + nei] = agent.learner.explorer.epsilon else: action = vmarkup(action, task) # FIXME: Only stores action[0] for all interactions. epi_action = c_[epi_action.T, action[:, 0].flatten()].T epi_reward = c_[epi_reward.T, reward.flatten()].T if hasattr(agent, "module"): print "PARAMS:", agent.module.params agent.learn() agent.reset() all_action = c_[all_action, epi_action] all_reward = c_[all_reward, epi_reward] return all_action, all_reward, epsilon if in_cloud: import cloud job_id = cloud.call(run, _high_cpu=False) result = cloud.result(job_id) all_action, all_reward, epsilon = result else: all_action, all_reward, epsilon = run() return all_action, all_reward, epsilon
0.002772
def register_ascii_series_io(array_type, format='txt', identify=True, **defaults): """Register ASCII read/write/identify methods for the given array """ def _read(filepath, **kwargs): kwgs = defaults.copy() kwgs.update(kwargs) return read_ascii_series(filepath, array_type=array_type, **kwgs) def _write(series, output, **kwargs): kwgs = defaults.copy() kwgs.update(kwargs) return write_ascii_series(series, output, **kwgs) io_registry.register_reader(format, array_type, _read) io_registry.register_writer(format, array_type, _write) if identify: io_registry.register_identifier(format, array_type, identify_factory(format))
0.001282
def start_transmit(self, blocking=False): """ Start transmit on port. :param blocking: True - wait for traffic end, False - return after traffic start. """ self.session.start_transmit(blocking, False, self)
0.0125
def build_subtree_strut(self, result, *args, **kwargs): """ Returns a dictionary in form of {node:Resource, children:{node_id: Resource}} :param result: :return: """ return self.service.build_subtree_strut(result=result, *args, **kwargs)
0.006803
def _get_minute_message(self, dt, algo, metrics_tracker): """ Get a perf message for the given datetime. """ rvars = algo.recorded_vars minute_message = metrics_tracker.handle_minute_close( dt, self.data_portal, ) minute_message['minute_perf']['recorded_vars'] = rvars return minute_message
0.005263
def color_code(self, fore=None, back=None, style=None): """ Return the codes for this style/colors. """ # Map from style type to raw code formatter function. colorcodes = [] resetcodes = [] userstyles = {'style': style, 'back': back, 'fore': fore} for stype in userstyles: stylearg = userstyles.get(stype, None) if not stylearg: # No value for this style name, don't use it. continue # Get escape code for this style. code = self.get_escape_code(stype, stylearg) stylename = str(stylearg).lower() if (stype == 'style') and (stylename in ('0', )): resetcodes.append(code) elif stylename.startswith('reset'): resetcodes.append(code) else: colorcodes.append(code) # Reset codes come first, to not override colors. return ''.join((''.join(resetcodes), ''.join(colorcodes)))
0.001982
def upload(self, local_path): """ Upload a file to the camera's permanent storage. :param local_path: Path to file to copy :type local_path: str/unicode """ camerafile_p = ffi.new("CameraFile**") with open(local_path, 'rb') as fp: lib.gp_file_new_from_fd(camerafile_p, fp.fileno()) lib.gp_camera_folder_put_file( self._cam._cam, self.path.encode() + b"/", os.path.basename(local_path).encode(), backend.FILE_TYPES['normal'], camerafile_p[0], self._cam.ctx)
0.003367
def mousePressEvent( self, event ): """ Make sure on a mouse release event that we have a current item. If no item is current, then our edit item will become current. :param event | <QMouseReleaseEvent> """ item = self.itemAt(event.pos()) # set the tag creation item as active if item is None: create_item = self.createItem() if create_item: self.setCurrentItem(create_item) self.editItem(create_item) # check to see if we're removing a tag else: rect = self.visualItemRect(item) if ( rect.right() - 14 < event.pos().x() ): # make sure the item is allowed to be removed via the widget if ( self.itemsRemovable() ): self.takeItem(self.row(item)) # emit the removed signal if ( not self.signalsBlocked() ): self.tagRemoved.emit(item.text()) event.ignore() return super(XMultiTagEdit, self).mousePressEvent(event)
0.013093
def padded_ds(ll_input, size=(250, 300), resize_method=ResizeMethod.CROP, padding_mode='zeros', **kwargs): "For a LabelList `ll_input`, resize each image to `size` using `resize_method` and `padding_mode`." return ll_input.transform(tfms=crop_pad(), size=size, resize_method=resize_method, padding_mode=padding_mode)
0.01506
def from_str(cls: Type[BlockUIDType], blockid: str) -> BlockUIDType: """ :param blockid: The block id """ data = BlockUID.re_block_uid.match(blockid) if data is None: raise MalformedDocumentError("BlockUID") try: number = int(data.group(1)) except AttributeError: raise MalformedDocumentError("BlockUID") try: sha_hash = data.group(2) except AttributeError: raise MalformedDocumentError("BlockHash") return cls(number, sha_hash)
0.003509
def registerAugmentation(self, *names): """Register table extension. SNMP SMI provides a way to extend already existing SMI table with another table. This method registers dependent (extending) table (or type :py:class:`MibTableRow`) to already existing table. Whenever a row of the parent table is created or destroyed, the same mass columnar operation is applied on the extending table row. Parameters ---------- names: :py:class:`tuple` One or more `tuple`'s of `str` referring to the extending table by MIB module name (first `str`) and `:py:class:`MibTableRow` object name (second `str`). """ for name in names: if name in self._augmentingRows: raise error.SmiError( 'Row %s already augmented by %s::%s' % (self.name, name[0], name[1]) ) self._augmentingRows.add(name) return self
0.002982
def _create_combined_words(words, startindex): """ Helper for create_match_bool, used to combine words inside single quotes from a list into a single string. :param words: List of words. :param startindex: Index where search is started. :return: (str, int) or (None, 0) if no closing quote is found. """ for i, word in enumerate(words[startindex+1:]): if "'" in word: return " ".join(words[startindex:startindex+i+2]), i+1 return None, 0
0.004057
def sign(key, qs): """Signs the query string using the key.""" sig = derive_signature(key, qs) return "%s&%s" % (qs, urlencode([("sig", sig)]))
0.006452
def _create_model_by_type(self, type): """ Create a new model instance by type. :rtype: Model """ klass = None for cls in eloquent.orm.model.Model.__subclasses__(): morph_class = cls.__morph_class__ or cls.__name__ if morph_class == type: klass = cls break return klass()
0.005195
async def is_anonymous(request): """Check if user is anonymous. User is considered anonymous if there is not identity in request. """ identity_policy = request.config_dict.get(IDENTITY_KEY) if identity_policy is None: return True identity = await identity_policy.identify(request) if identity is None: return True return False
0.002639
def print_data(zone_id, connection): """fetch data from database (use zone_id if not empty/None) and print to console""" result = connection.execute( # explicitly pass zone id before related data select([cast(zone_id.encode('utf-8'), BYTEA), test_table])) result = result.fetchall() ZONE_ID_INDEX = 0 print("use zone_id: ", zone_id) print("{:<3} - {} - {} - {}".format("id", 'zone', "data", "raw_data")) for row in result: print( "{:<3} - {} - {} - {}\n".format( row['id'], row[ZONE_ID_INDEX].decode('utf-8'), row['data'].decode('utf-8', errors='ignore'), row['raw_data']))
0.004511
def item(*args, **kwargs): ''' Return one or more grains CLI Example: .. code-block:: bash salt '*' grains.item os salt '*' grains.item os osrelease oscodename Sanitized CLI Example: .. code-block:: bash salt '*' grains.item host sanitize=True ''' ret = {} for arg in args: try: ret[arg] = __grains__[arg] except KeyError: pass if salt.utils.data.is_true(kwargs.get('sanitize')): for arg, func in six.iteritems(_SANITIZERS): if arg in ret: ret[arg] = func(ret[arg]) return ret
0.001597
def _smartos_computenode_data(): ''' Return useful information from a SmartOS compute node ''' # Provides: # vms_total # vms_running # vms_stopped # vms_type # sdc_version # vm_capable # vm_hw_virt grains = {} # collect vm data vms = {} for vm in __salt__['cmd.run']('vmadm list -p -o uuid,alias,state,type').split("\n"): vm = dict(list(zip(['uuid', 'alias', 'state', 'type'], vm.split(':')))) vms[vm['uuid']] = vm del vms[vm['uuid']]['uuid'] # set vm grains grains['computenode_vms_total'] = len(vms) grains['computenode_vms_running'] = 0 grains['computenode_vms_stopped'] = 0 grains['computenode_vms_type'] = {'KVM': 0, 'LX': 0, 'OS': 0} for vm in vms: if vms[vm]['state'].lower() == 'running': grains['computenode_vms_running'] += 1 elif vms[vm]['state'].lower() == 'stopped': grains['computenode_vms_stopped'] += 1 if vms[vm]['type'] not in grains['computenode_vms_type']: # NOTE: be prepared for when bhyve gets its own type grains['computenode_vms_type'][vms[vm]['type']] = 0 grains['computenode_vms_type'][vms[vm]['type']] += 1 # sysinfo derived grains sysinfo = salt.utils.json.loads(__salt__['cmd.run']('sysinfo')) grains['computenode_sdc_version'] = sysinfo['SDC Version'] grains['computenode_vm_capable'] = sysinfo['VM Capable'] if sysinfo['VM Capable']: grains['computenode_vm_hw_virt'] = sysinfo['CPU Virtualization'] # sysinfo derived smbios grains grains['manufacturer'] = sysinfo['Manufacturer'] grains['productname'] = sysinfo['Product'] grains['uuid'] = sysinfo['UUID'] return grains
0.001138
def read(self, istream, kmip_version=enums.KMIPVersion.KMIP_1_0): """ Read the data encoding the RevokeRequestPayload object and decode it into its constituent parts. Args: istream (Stream): A data stream containing encoded object data, supporting a read method; usually a BytearrayStream object. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be decoded. Optional, defaults to KMIP 1.0. """ super(RevokeRequestPayload, self).read( istream, kmip_version=kmip_version ) tstream = BytearrayStream(istream.read(self.length)) self.unique_identifier = attributes.UniqueIdentifier() self.unique_identifier.read(tstream, kmip_version=kmip_version) self.revocation_reason = objects.RevocationReason() self.revocation_reason.read(tstream, kmip_version=kmip_version) if self.is_tag_next(enums.Tags.COMPROMISE_OCCURRENCE_DATE, tstream): self.compromise_occurrence_date = primitives.DateTime( tag=enums.Tags.COMPROMISE_OCCURRENCE_DATE) self.compromise_occurrence_date.read( tstream, kmip_version=kmip_version ) self.is_oversized(tstream) self.validate()
0.001435
def make_xeditable(instance=None, extra_attrs=[], *args, **kwargs): """ Converts the contents of the column into an ``<a>`` tag with the required DOM attributes to power the X-Editable UI. The following keyword arguments are all optional, but may be provided when pre-calling the helper, to customize the output of the helper once it is run per object record: * ``type`` - Defaults to the basic type of the HTML input ("text", "number", "datetime") * ``title`` - Defaults to an empty string, controls the HTML "title" attribute. * ``placeholder`` - Defaults to whatever "title" is, controls the HTML "placeholder" attribute. * ``url`` - Defaults to the ``request.path`` of the view, which will automatically serve the X-Editable interface as long as it inherits from ``XEditableDatatableView``. * ``source`` - Defaults to the ``request.path`` of the view, which will automatically serve X-Editable requests for ``choices`` data about a field. Supplying a list of names via ``extra_attrs`` will enable arbitrary other keyword arguments to be rendered in the HTML as attribute as well. ``extra_attrs`` serves as a whitelist of extra names so that unintended kwargs don't get rendered without your permission. """ if instance is None: # Preloading kwargs into the helper for deferred execution helper = partial(make_xeditable, extra_attrs=extra_attrs, *args, **kwargs) return helper # Immediate finalization, return the xeditable structure data = kwargs.get('default_value', instance) rich_data = kwargs.get('rich_value', data) # Compile values to appear as "data-*" attributes on the anchor tag default_attr_names = ['pk', 'type', 'url', 'source', 'title', 'placeholder'] valid_attr_names = set(default_attr_names + list(extra_attrs)) attrs = {} for k, v in kwargs.items(): if k in valid_attr_names: if k.startswith('data_'): k = k[5:] attrs['data-{0}'.format(k)] = v attrs['data-xeditable'] = "xeditable" # Assign default values where they are not provided field_name = kwargs['field_name'] # sent as a default kwarg to helpers if isinstance(field_name, (tuple, list)): # Legacy syntax field_name = field_name[1] if isinstance(field_name, (tuple, list)): raise ValueError("'make_xeditable' helper needs a single-field data column," " not {0!r}".format(field_name)) attrs['data-name'] = field_name if isinstance(rich_data, Model): attrs['data-value'] = rich_data.pk else: attrs['data-value'] = rich_data if 'data-pk' not in attrs: attrs['data-pk'] = instance.pk if 'data-url' not in attrs: # Look for a backup data-url provider_name = 'get_update_url' url_provider = getattr(kwargs.get('view'), provider_name, None) if not url_provider: url_provider = getattr(instance, provider_name, None) if not url_provider and 'view' in kwargs: url_provider = lambda field_name: kwargs['view'].request.path else: raise ValueError("'make_xeditable' cannot determine a value for 'url'.") if url_provider: attrs['data-url'] = url_provider(field_name=field_name) if 'data-placeholder' not in attrs: attrs['data-placeholder'] = attrs.get('data-title', "") if 'data-type' not in attrs: if hasattr(instance, '_meta'): # Try to fetch a reasonable type from the field's class if field_name == 'pk': # special field name not in Model._meta.fields field = instance._meta.pk else: field = resolve_orm_path(instance, field_name) if field.choices: field_type = 'select' else: field_type = XEDITABLE_FIELD_TYPES.get(field.get_internal_type(), 'text') else: field_type = 'text' attrs['data-type'] = field_type # type=select elements need to fetch their valid choice options from an AJAX endpoint. # Register the view for this lookup. if attrs['data-type'] in ('select', 'select2'): if 'data-source' not in attrs: if 'view' in kwargs: attrs['data-source'] = "{url}?{field_param}={fieldname}".format(**{ 'url': kwargs['view'].request.path, 'field_param': kwargs['view'].xeditable_fieldname_param, 'fieldname': field_name, }) if attrs['data-type'] == 'select2': attrs['data-source'] += '&select2=true' else: raise ValueError("'make_xeditable' cannot determine a value for 'source'.") # Choice fields will want to display their readable label instead of db data data = getattr(instance, 'get_{0}_display'.format(field_name), lambda: data)() data = u"""<a href="#"{attrs}>{data}</a>""".format(attrs=flatatt(attrs), data=data) return data
0.004669
def _instantiate_task(api, kwargs): """Create a Task object from raw kwargs""" file_id = kwargs['file_id'] kwargs['file_id'] = file_id if str(file_id).strip() else None kwargs['cid'] = kwargs['file_id'] or None kwargs['rate_download'] = kwargs['rateDownload'] kwargs['percent_done'] = kwargs['percentDone'] kwargs['add_time'] = get_utcdatetime(kwargs['add_time']) kwargs['last_update'] = get_utcdatetime(kwargs['last_update']) is_transferred = (kwargs['status'] == 2 and kwargs['move'] == 1) if is_transferred: kwargs['pid'] = api.downloads_directory.cid else: kwargs['pid'] = None del kwargs['rateDownload'] del kwargs['percentDone'] if 'url' in kwargs: if not kwargs['url']: kwargs['url'] = None else: kwargs['url'] = None task = Task(api, **kwargs) if is_transferred: task._parent = api.downloads_directory return task
0.001058
def widgets(self): ''' List widgets with filter return True for this node (or without filter). Remove button is prepended if :property:can_remove returns true. :returns: list of widgets :rtype: list of namedtuple instances ''' widgets = [] if self.can_remove: widgets.append( self.plugin_manager.create_widget( 'entry-actions', 'button', file=self, css='remove', endpoint='remove' ) ) return widgets + self.plugin_manager.get_widgets(file=self)
0.002933
def _update_data(self): """Update the internal data values.""" _old = self.folds self.folds = _get_fold_levels(self.editor) # only update our dropdown lists if the folds have changed. if self.folds != _old: self.classes, self.funcs = _split_classes_and_methods(self.folds) self.populate_dropdowns()
0.00551
def to_sa_pair_form(self, sparse=True): """ Convert this instance of `DiscreteDP` to SA-pair form Parameters ---------- sparse : bool, optional(default=True) Should the `Q` matrix be stored as a sparse matrix? If true the CSR format is used Returns ------- ddp_sa : DiscreteDP The correspnoding DiscreteDP instance in SA-pair form Notes ----- If this instance is already in SA-pair form then it is returned un-modified """ if self._sa_pair: return self else: s_ind, a_ind = np.where(self.R > - np.inf) RL = self.R[s_ind, a_ind] if sparse: QL = sp.csr_matrix(self.Q[s_ind, a_ind]) else: QL = self.Q[s_ind, a_ind] return DiscreteDP(RL, QL, self.beta, s_ind, a_ind)
0.00216
def _build_commands(self, ip_dest, next_hop, **kwargs): """Build the EOS command string for ip route interactions. Args: ip_dest (string): The ip address of the destination in the form of A.B.C.D/E next_hop (string): The next hop interface or ip address **kwargs['next_hop_ip'] (string): The next hop address on destination interface **kwargs['distance'] (string): Administrative distance for this route **kwargs['tag'] (string): Route tag **kwargs['route_name'] (string): Route name Returns the ip route command string to be sent to the switch for the given set of parameters. """ commands = "ip route %s %s" % (ip_dest, next_hop) next_hop_ip = kwargs.get('next_hop_ip', None) distance = kwargs.get('distance', None) tag = kwargs.get('tag', None) route_name = kwargs.get('route_name', None) if next_hop_ip is not None: commands += " %s" % next_hop_ip if distance is not None: commands += " %s" % distance if tag is not None: commands += " tag %s" % tag if route_name is not None: commands += " name %s" % route_name return commands
0.001508
def list_nodes_select(nodes, selection, call=None): ''' Return a list of the VMs that are on the provider, with select fields ''' if call == 'action': raise SaltCloudSystemExit( 'The list_nodes_select function must be called ' 'with -f or --function.' ) if 'error' in nodes: raise SaltCloudSystemExit( 'An error occurred while listing nodes: {0}'.format( nodes['error']['Errors']['Error']['Message'] ) ) ret = {} for node in nodes: pairs = {} data = nodes[node] for key in data: if six.text_type(key) in selection: value = data[key] pairs[key] = value ret[node] = pairs return ret
0.001271
def set_as_object(self, *args): """ Sets a new value to map element specified by its index. When the index is not defined, it resets the entire map value. This method has double purpose because method overrides are not supported in JavaScript. :param args: objects to set """ if len(args) == 1: self.set_as_map(args[0]) elif len(args) == 2: self.put(args[0], args[1])
0.006579
async def services(self, *, dc=None, watch=None, consistency=None): """Lists services in a given DC Parameters: dc (str): Specify datacenter that will be used. Defaults to the agent's local datacenter. watch (Blocking): Do a blocking query consistency (Consistency): Force consistency Returns: ObjectMeta: where value is a dict It returns a JSON body like this:: { "consul": [], "redis": [], "postgresql": [ "master", "slave" ] } The keys are the service names, and the array values provide all known tags for a given service. """ params = {"dc": dc} response = await self._api.get("/v1/catalog/services", params=params, watch=watch, consistency=consistency) return consul(response)
0.001878
def debug(self, text): """ Posts a debug message adding a timestamp and logging level to it for both file and console handlers. Logger uses a redraw rate because of console flickering. That means it will not draw new messages or progress at the very time they are being logged but their timestamp will be captured at the right time. Logger will redraw at a given time period AND when new messages or progress are logged. If you still want to force redraw immediately (may produce flickering) then call 'flush' method. :param text: The text to log into file and console. """ self.queue.put(dill.dumps(LogMessageCommand(text=text, level=logging.DEBUG)))
0.009682
def load_scene(self, item): """Load scene from json.""" scene = Scene.from_config(self.pyvlx, item) self.add(scene)
0.014388
def attribute(self, name): """Expression for an input attribute. An input attribute is an attribute on the input port of the operator invocation. Args: name(str): Name of the attribute. Returns: Expression: Expression representing the input attribute. """ return super(Map, self).attribute(self._inputs[0], name)
0.007389
def compute(self): """ Compute a MaxSAT solution. First, the method checks whether or not the set of hard clauses is satisfiable. If not, the method returns ``False``. Otherwise, add soft clauses to the oracle and call the MaxSAT algorithm (see :func:`_compute`). Note that the soft clauses are added to the oracles after being augmented with additional *selector* literals. The selectors literals are then used as *assumptions* when calling the SAT oracle and are needed for extracting unsatisfiable cores. """ if self.oracle.solve(): # hard part is satisfiable # create selectors and a mapping from selectors to clause ids self.sels, self.vmap = [], {} self.scpy = [True for cl in self.soft] # adding soft clauses to oracle for i in range(len(self.soft)): self.topv += 1 self.soft[i].append(-self.topv) self.sels.append(self.topv) self.oracle.add_clause(self.soft[i]) self.vmap[self.topv] = i self._compute() return True else: return False
0.001584
def from_stream(klass, stream, header, path=None, use_bgzf=None): """Create new :py:class:`Writer` from file Note that for getting bgzf support, you have to pass in a stream opened in binary mode. Further, you either have to provide a ``path`` ending in ``".gz"`` or set ``use_bgzf=True``. Otherwise, you will get the notorious "TypeError: 'str' does not support the buffer interface". :param stream: ``file``-like object to write to :param header: VCF header to use, lines and samples are deep-copied :param path: optional string with path to store (for display only) :param use_bgzf: indicator whether to write bgzf to ``stream`` if ``True``, prevent if ``False``, interpret ``path`` if ``None`` """ if use_bgzf or (use_bgzf is None and path and path.endswith(".gz")): stream = bgzf.BgzfWriter(fileobj=stream) return Writer(stream, header, path)
0.002043
def split(value, dash_ranges=True): """Splits """ if isinstance(value, list): value = [str(v) for v in value] else: str_value = str(value) dash_matches = re.match(pattern='(\d+)\-(\d+)', string=str_value) if ':' in str_value or ',' in str_value: value = [v.strip() for v in str_value.replace(',', ':').split(':')] elif dash_ranges and dash_matches: start_range = int(dash_matches.group(1)) end_range = int(dash_matches.group(2)) + 1 rng = range(start_range, end_range) value = [str(r) for r in rng] else: value = [str_value] return value
0.008119
def dataproc(cls, graph, data): """ example datagetter function, make any local modifications here """ for thing in data: graph.add_trip(*thing) raise NotImplementedError('You need to implement this yourlself!')
0.008097
def register_measurements(self, end, rows, between, refresh_presision): """Register the measurements if it has measurements and close the configuration, if it hasen't got measurements clean the temporal file on disk. Keyword arguments: f -- open memory file end -- datetime of the moment when the configuration go inactive between -- time between integral_measurements in seconds refresh_presision -- time between sensor values that compose the integral_measurements """ if not self.end and len(rows) > 0: self.append_rows(rows, between, refresh_presision) self.go_inactive(end) self.save()
0.024116
def strptime(cls, date_string, format): 'string, format -> new datetime parsed from a string (like time.strptime()).' import _strptime return _strptime._strptime_datetime(cls, date_string, format)
0.013636
def register_tile(self, hw_type, api_major, api_minor, name, fw_major, fw_minor, fw_patch, exec_major, exec_minor, exec_patch, slot, unique_id): """Register a tile with this controller. This function adds the tile immediately to its internal cache of registered tiles and queues RPCs to send all config variables and start tile rpcs back to the tile. """ api_info = (api_major, api_minor) fw_info = (fw_major, fw_minor, fw_patch) exec_info = (exec_major, exec_minor, exec_patch) address = 10 + slot info = TileInfo(hw_type, name, api_info, fw_info, exec_info, slot, unique_id, state=TileState.JUST_REGISTERED, address=address) self.tile_manager.insert_tile(info) debug = int(self.tile_manager.debug_mode) if self.tile_manager.safe_mode: run_level = RunLevel.SAFE_MODE info.state = TileState.SAFE_MODE config_rpcs = [] else: run_level = RunLevel.START_ON_COMMAND info.state = TileState.BEING_CONFIGURED config_rpcs = self.config_database.stream_matching(address, name) self.tile_manager.queue.put_nowait((info, config_rpcs)) return [address, run_level, debug]
0.004769
def info(self, text=None): """Shows and persists info symbol and text and exits. Parameters ---------- text : None, optional Text to be shown alongside info symbol. Returns ------- self """ return self.stop_and_persist(symbol=LogSymbols.INFO.value, text=text)
0.005831
def rollforward(self, date): """Roll date forward to nearest start of year""" if self.onOffset(date): return date else: return date + YearBegin(month=self.month)
0.009569
def update_note(note_id: NoteId, body: Body=None, done: Done=None) -> Note: """Update an existing note.""" if note_id != 1: raise NotFoundError('Note does not exist') new_note = note.copy() if body is not None: new_note['body'] = body if done is not None: new_note['done'] = done return new_note
0.014577
def _defaggr(name, type, func): 'Define aggregator `name` that calls func(col, rows)' func.type=type func.__name__ = name return func
0.013423
def output_summary(self, output_stream=sys.stdout): """outputs a usage tip and the list of acceptable commands. This is useful as the output of the 'help' option. parameters: output_stream - an open file-like object suitable for use as the target of a print function """ if self.app_name or self.app_description: print('Application: ', end='', file=output_stream) if self.app_name: print(self.app_name, self.app_version, file=output_stream) if self.app_description: print(self.app_description, file=output_stream) if self.app_name or self.app_description: print('', file=output_stream) names_list = self.get_option_names() print( "usage:\n%s [OPTIONS]... " % self.app_invocation_name, end='', file=output_stream ) bracket_count = 0 # this section prints the non-switch command line arguments for key in names_list: an_option = self.option_definitions[key] if an_option.is_argument: if an_option.default is None: # there's no option, assume the user must set this print(an_option.name, end='', file=output_stream) elif ( inspect.isclass(an_option.value) or inspect.ismodule(an_option.value) ): # this is already set and it could have expanded, most # likely this is a case where a sub-command has been # loaded and we're looking to show the help for it. # display show it as a constant already provided rather # than as an option the user must provide print(an_option.default, end='', file=output_stream) else: # this is an argument that the user may alternatively # provide print("[ %s" % an_option.name, end='', file=output_stream) bracket_count += 1 print(']' * bracket_count, '\n', file=output_stream) names_list.sort() if names_list: print('OPTIONS:', file=output_stream) pad = ' ' * 4 for name in names_list: if name in self.options_banned_from_help: continue option = self._get_option(name) line = ' ' * 2 # always start with 2 spaces if option.short_form: line += '-%s, ' % option.short_form line += '--%s' % name line += '\n' doc = option.doc if option.doc is not None else '' if doc: line += '%s%s\n' % (pad, doc) try: value = option.value type_of_value = type(value) converter_function = to_string_converters[type_of_value] default = converter_function(value) except KeyError: default = option.value if default is not None: if ( (option.secret or 'password' in name.lower()) and not self.option_definitions.admin.expose_secrets.default ): default = '*********' if name not in ('help',): # don't bother with certain dead obvious ones line += '%s(default: %s)\n' % (pad, default) print(line, file=output_stream)
0.000557
def addApplicationManifest(self, pchApplicationManifestFullPath, bTemporary): """ Adds an application manifest to the list to load when building the list of installed applications. Temporary manifests are not automatically loaded """ fn = self.function_table.addApplicationManifest result = fn(pchApplicationManifestFullPath, bTemporary) return result
0.00978
def entrez(db, acc): """ search entrez using specified database and accession """ c1 = ['esearch', '-db', db, '-query', acc] c2 = ['efetch', '-db', 'BioSample', '-format', 'docsum'] p1 = Popen(c1, stdout = PIPE, stderr = PIPE) p2 = Popen(c2, stdin = p1.stdout, stdout = PIPE, stderr = PIPE) return p2.communicate()
0.031429
def create_dictionary_of_element_from_dictionary(self, property_name, datas): """Populate a dictionary of elements """ response = {} if property_name in datas and datas[property_name] is not None and isinstance(datas[property_name], collections.Iterable): for key, value in datas[property_name].items(): response[key] = self.create_from_name_and_dictionary(key, value) return response
0.00883
def wait_until_page_does_not_contain(self, text, timeout=None, error=None): """Waits until `text` disappears from current page. Fails if `timeout` expires before the `text` disappears. See `introduction` for more information about `timeout` and its default value. `error` can be used to override the default error message. See also `Wait Until Page Contains`, `Wait Until Page Contains Element`, `Wait Until Page Does Not Contain Element` and BuiltIn keyword `Wait Until Keyword Succeeds`. """ def check_present(): present = self._is_text_present(text) if not present: return else: return error or "Text '%s' did not disappear in %s" % (text, self._format_timeout(timeout)) self._wait_until_no_error(timeout, check_present)
0.003363
def islice_extended(iterable, *args): """An extension of :func:`itertools.islice` that supports negative values for *stop*, *start*, and *step*. >>> iterable = iter('abcdefgh') >>> list(islice_extended(iterable, -4, -1)) ['e', 'f', 'g'] Slices with negative values require some caching of *iterable*, but this function takes care to minimize the amount of memory required. For example, you can use a negative step with an infinite iterator: >>> from itertools import count >>> list(islice_extended(count(), 110, 99, -2)) [110, 108, 106, 104, 102, 100] """ s = slice(*args) start = s.start stop = s.stop if s.step == 0: raise ValueError('step argument must be a non-zero integer or None.') step = s.step or 1 it = iter(iterable) if step > 0: start = 0 if (start is None) else start if (start < 0): # Consume all but the last -start items cache = deque(enumerate(it, 1), maxlen=-start) len_iter = cache[-1][0] if cache else 0 # Adjust start to be positive i = max(len_iter + start, 0) # Adjust stop to be positive if stop is None: j = len_iter elif stop >= 0: j = min(stop, len_iter) else: j = max(len_iter + stop, 0) # Slice the cache n = j - i if n <= 0: return for index, item in islice(cache, 0, n, step): yield item elif (stop is not None) and (stop < 0): # Advance to the start position next(islice(it, start, start), None) # When stop is negative, we have to carry -stop items while # iterating cache = deque(islice(it, -stop), maxlen=-stop) for index, item in enumerate(it): cached_item = cache.popleft() if index % step == 0: yield cached_item cache.append(item) else: # When both start and stop are positive we have the normal case yield from islice(it, start, stop, step) else: start = -1 if (start is None) else start if (stop is not None) and (stop < 0): # Consume all but the last items n = -stop - 1 cache = deque(enumerate(it, 1), maxlen=n) len_iter = cache[-1][0] if cache else 0 # If start and stop are both negative they are comparable and # we can just slice. Otherwise we can adjust start to be negative # and then slice. if start < 0: i, j = start, stop else: i, j = min(start - len_iter, -1), None for index, item in list(cache)[i:j:step]: yield item else: # Advance to the stop position if stop is not None: m = stop + 1 next(islice(it, m, m), None) # stop is positive, so if start is negative they are not comparable # and we need the rest of the items. if start < 0: i = start n = None # stop is None and start is positive, so we just need items up to # the start index. elif stop is None: i = None n = start + 1 # Both stop and start are positive, so they are comparable. else: i = None n = start - stop if n <= 0: return cache = list(islice(it, n)) yield from cache[i::step]
0.000266
def perform_ops(self): """ Performs the stored operations on the database connection. """ with self.db: with closing(self.db.cursor()) as cursor: cursor.execute('BEGIN TRANSACTION') self._perform_ops(cursor)
0.006873
def get_user_orders(self): """Return user's orders that are currently open. :return: User's orders currently open. :rtype: [dict] """ self._log('get user orders') return self._rest_client.post( endpoint='/open_orders', payload={'book': self.name} )
0.006079
def _set_alarm_entry(self, v, load=False): """ Setter method for alarm_entry, mapped from YANG variable /rmon/alarm_entry (list) If this variable is read-only (config: false) in the source YANG file, then _set_alarm_entry is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_alarm_entry() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=YANGListType("alarm_index",alarm_entry.alarm_entry, yang_name="alarm-entry", rest_name="alarm", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='alarm-index', extensions={u'tailf-common': {u'info': u'RMON alarm', u'cli-no-key-completion': None, u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'cli-full-no': None, u'alt-name': u'alarm', u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-suppress-key-abbreviation': None, u'cli-incomplete-command': None, u'callpoint': u'rmon_alarm'}}), is_container='list', yang_name="alarm-entry", rest_name="alarm", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'RMON alarm', u'cli-no-key-completion': None, u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'cli-full-no': None, u'alt-name': u'alarm', u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-suppress-key-abbreviation': None, u'cli-incomplete-command': None, u'callpoint': u'rmon_alarm'}}, namespace='urn:brocade.com:mgmt:brocade-rmon', defining_module='brocade-rmon', yang_type='list', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """alarm_entry must be of a type compatible with list""", 'defined-type': "list", 'generated-type': """YANGDynClass(base=YANGListType("alarm_index",alarm_entry.alarm_entry, yang_name="alarm-entry", rest_name="alarm", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='alarm-index', extensions={u'tailf-common': {u'info': u'RMON alarm', u'cli-no-key-completion': None, u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'cli-full-no': None, u'alt-name': u'alarm', u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-suppress-key-abbreviation': None, u'cli-incomplete-command': None, u'callpoint': u'rmon_alarm'}}), is_container='list', yang_name="alarm-entry", rest_name="alarm", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'RMON alarm', u'cli-no-key-completion': None, u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'cli-full-no': None, u'alt-name': u'alarm', u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-suppress-key-abbreviation': None, u'cli-incomplete-command': None, u'callpoint': u'rmon_alarm'}}, namespace='urn:brocade.com:mgmt:brocade-rmon', defining_module='brocade-rmon', yang_type='list', is_config=True)""", }) self.__alarm_entry = t if hasattr(self, '_set'): self._set()
0.003459
def _glob(self, curdir, this, rest): """ Handle glob flow. There are really only a couple of cases: - File name. - File name pattern (magic). - Directory. - Directory name pattern (magic). - Extra slashes `////`. - `globstar` `**`. """ is_magic = this.is_magic dir_only = this.dir_only target = this.pattern is_globstar = this.is_globstar if is_magic and is_globstar: # Glob star directory `**`. # Throw away multiple consecutive `globstars` # and acquire the pattern after the `globstars` if available. this = rest.pop(0) if rest else None globstar_end = this is None while this and not globstar_end: if this: dir_only = this.dir_only target = this.pattern if this and this.is_globstar: this = rest.pop(0) if rest else None if this is None: globstar_end = True else: break if globstar_end: target = None # We match `**/next` during a deep glob, so what ever comes back, # we will send back through `_glob` with pattern after `next` (`**/next/after`). # So grab `after` if available. this = rest.pop(0) if rest else None # Deep searching is the unique case where we # might feed in a `None` for the next pattern to match. # Deep glob will account for this. matcher = self._get_matcher(target) # If our pattern ends with `curdir/**`, but does not start with `**` it matches zero or more, # so it should return `curdir/`, signifying `curdir` + no match. # If a pattern follows `**/something`, we always get the appropriate # return already, so this isn't needed in that case. # There is one quirk though with Bash, if `curdir` had magic before `**`, Bash # omits the trailing `/`. We don't worry about that. if globstar_end and curdir: yield os.path.join(curdir, self.empty) # Search for path in self._glob_dir(curdir, matcher, dir_only, deep=True): if this: yield from self._glob(path, this, rest[:]) else: yield path elif not dir_only: # Files: no need to recursively search at this point as we are done. matcher = self._get_matcher(target) yield from self._glob_dir(curdir, matcher) else: # Directory: search current directory against pattern # and feed the results back through with the next pattern. this = rest.pop(0) if rest else None matcher = self._get_matcher(target) for path in self._glob_dir(curdir, matcher, True): if this: yield from self._glob(path, this, rest[:]) else: yield path
0.002205
def get_object_ids(model, meteor_ids): """Return all object IDs for the given meteor_ids.""" if model is ObjectMapping: # this doesn't make sense - raise TypeError raise TypeError("Can't map ObjectMapping instances through self.") # Django model._meta is now public API -> pylint: disable=W0212 meta = model._meta alea_unique_fields = [ field for field in meta.local_fields if isinstance(field, AleaIdField) and field.unique and not field.null ] result = collections.OrderedDict( (str(meteor_id), None) for meteor_id in meteor_ids ) if len(alea_unique_fields) == 1: aid = alea_unique_fields[0].name query = model.objects.filter(**{ '%s__in' % aid: meteor_ids, }).values_list(aid, 'pk') else: content_type = ContentType.objects.get_for_model(model) query = ObjectMapping.objects.filter( content_type=content_type, meteor_id__in=meteor_ids, ).values_list('meteor_id', 'object_id') for meteor_id, object_id in query: result[meteor_id] = object_id return result
0.000855
def find_by_ref(self, ref_type, ref_id): """ Returns an object of type "item", "status" or "task" as a stream object. This is useful when a new status has been posted and should be rendered directly in the stream without reloading the entire stream. For details, see: https://developers.podio.com/doc/stream/get-stream-object-80054 """ return self.transport.GET(url='/stream/%s/%s' % (ref_type, ref_id))
0.00641
def _chunks(iterable, n): """ Splits an iterable into chunks of size n. """ iterable = iter(iterable) while True: # store one line in memory, # chain it to an iterator on the rest of the chunk yield chain([next(iterable)], islice(iterable, n-1))
0.00346
def reset(self): ''' Reset Stan model and all tracked distributions and parameters. ''' self.parameters = [] self.transformed_parameters = [] self.expressions = [] self.data = [] self.transformed_data = [] self.X = {} self.model = [] self.mu_cont = [] self.mu_cat = [] self._original_names = {} # variables to suppress in output. Stan uses limited set for variable # names, so track variable names we may need to simplify for the model # code and then sub back later. self._suppress_vars = ['yhat', 'lp__']
0.003115
def analyzeWeightPruning(args): """ Multiprocess function used to analyze the impact of nonzeros and accuracy after pruning low weights and units with low dutycycle of a pre-trained model. :param args: tuple with the following arguments: - experiment path: The experiment results path - configuration parameters: The parameters used in the experiment run - minWeight: min weight to prune. If zero then no pruning - minDutycycle: min threshold to prune. If less than zero then no pruning - progress bar position: When 'minWeight' is zero :type args: tuple :return: Panda DataFrame with the nonzero count for every weight variable in the model and the evaluation results after the pruning the weights. :rtype: :class:`pandas.DataFrame` """ path, params, minWeight, minDutycycle, position = args device = torch.device("cuda" if torch.cuda.is_available() else "cpu") # Dataset transformations used during training. See mnist_sparse_experiment.py transform = transforms.Compose([transforms.ToTensor(), transforms.Normalize((0.1307,), (0.3081,))]) # Initialize MNIST test dataset for this experiment test_loader = torch.utils.data.DataLoader( datasets.MNIST(params["datadir"], train=False, download=True, transform=transform), batch_size=params["test_batch_size"], shuffle=True) # Load pre-trained model and evaluate with test dataset model = torch.load(os.path.join(path, "model.pt"), map_location=device) label = str(minWeight) name = params["name"] desc = "{}.minW({}).minD({})".format(name, minWeight, minDutycycle) model.pruneWeights(minWeight) model.pruneDutycycles(minDutycycle) # Collect nonzero nonzero = {} register_nonzero_counter(model, nonzero) results = evaluateModel(model=model, loader=test_loader, device=device, progress={"desc": desc, "position": position}) unregister_counter_nonzero(model) # Create table with results table = pd.DataFrame.from_dict(nonzero) noise_score = results["total_correct"] table = table.assign(accuracy=results["accuracy"]) # Compute noise score noise_values = tqdm([0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.45, 0.5], position=position) for noise in noise_values: noise_values.set_description("{}.noise({})".format(desc, noise)) # Add noise to dataset transforms transform.transforms.append( RandomNoise(noise, whiteValue=0.1307 + 2 * 0.3081)) # Evaluate model with noise results = evaluateModel(model=model, loader=test_loader, device=device) # Remove noise from dataset transforms transform.transforms.pop() # Update noise score noise_score += results["total_correct"] table = table.assign(noise_score=noise_score) # Filter result for the 'weight' variable only table = pd.DataFrame({label: table.xs("weight")}) table.drop(["input", "output"], inplace=True) table.dropna(inplace=True) return table
0.011935
def find_node(self, node, path): """Finds a node by the given path from the given node.""" for hash_value in path: if isinstance(node, LeafStatisticsNode): break for stats in node.get_child_keys(): if hash(stats) == hash_value: node = node.get_child_node(stats) break else: break return node
0.004556
def setup_callables(self): """ Setup Callable attributes that belong to this object. """ defaults = self.get_default_callables() for key, value in list(defaults.items()): self._postponed_callables.setdefault(key, value) for key in self.callables: value = self._postponed_callables.pop(key) value.setup_callable_system(self.system, init=True) setattr(self, key, value)
0.004283
def up(tag, sql, revision): """ Upgrade to revision """ alembic_command.upgrade( config=get_config(), revision=revision, sql=sql, tag=tag )
0.005464
def parse(self, value): """ Parse date """ value = super(DateOpt, self).parse(value) if value is None: return None if isinstance(value, str): value = self.parse_date(value) if isinstance(value, datetime) and self.date_only: value = value.date() return value
0.005602
def com_google_fonts_check_metadata_menu_and_latin(family_metadata): """METADATA.pb should contain at least "menu" and "latin" subsets.""" missing = [] for s in ["menu", "latin"]: if s not in list(family_metadata.subsets): missing.append(s) if missing != []: yield FAIL, ("Subsets \"menu\" and \"latin\" are mandatory," " but METADATA.pb is missing" " \"{}\"").format(" and ".join(missing)) else: yield PASS, "METADATA.pb contains \"menu\" and \"latin\" subsets."
0.013308
def read_single_knmi_file(filename): """reads a single file of KNMI's meteorological time series data availability: www.knmi.nl/nederland-nu/klimatologie/uurgegevens Args: filename: the file to be opened Returns: pandas data frame including time series """ hourly_data_obs_raw = pd.read_csv( filename, parse_dates=[['YYYYMMDD', 'HH']], date_parser=lambda yyyymmdd, hh: pd.datetime(int(str(yyyymmdd)[0:4]), int(str(yyyymmdd)[4:6]), int(str(yyyymmdd)[6:8]), int(hh) - 1), skiprows=31, skipinitialspace=True, na_values='', keep_date_col=True, ) hourly_data_obs_raw.index = hourly_data_obs_raw['YYYYMMDD_HH'] hourly_data_obs_raw.index = hourly_data_obs_raw.index + pd.Timedelta(hours=1) columns_hourly = ['temp', 'precip', 'glob', 'hum', 'wind', 'ssd'] hourly_data_obs = pd.DataFrame( index=hourly_data_obs_raw.index, columns=columns_hourly, data=dict( temp=hourly_data_obs_raw['T'] / 10 + 273.15, precip=hourly_data_obs_raw['RH'] / 10, glob=hourly_data_obs_raw['Q'] * 10000 / 3600., hum=hourly_data_obs_raw['U'], wind=hourly_data_obs_raw['FH'] / 10, ssd=hourly_data_obs_raw['SQ'] * 6, ), ) # remove negative values negative_values = hourly_data_obs['precip'] < 0.0 hourly_data_obs.loc[negative_values, 'precip'] = 0.0 return hourly_data_obs
0.001218
def progress_updater(size, total): """Progress reporter for checksum verification.""" current_task.update_state( state=state('PROGRESS'), meta=dict(size=size, total=total) )
0.004975
def db_aws_delete_instance(self, instance_id): ''' Delete AWS DB instance ''' interactive = False if instance_id is None: interactive = True instances = self.db_services.get_db_instances() instance_list = [dbinst.id for dbinst in instances] if interactive: valid = False if len(instances) == 0: print("There are no instances you can delete currently. Use " "`db aws_create_instance` to make one.") return print "Here are the available instances you can delete:" for inst in instances: print "\t ", inst.id, "(", inst.status, ")" while not valid: instance_id = raw_input('Enter the instance identity you would ' 'like to delete: ') res = self.db_services.validate_instance_id(instance_id) if res is True: valid = True else: print(res + " Try again, instance name not valid. Check " \ "for typos.") if instance_id in instance_list: valid = True else: valid = False print("Try again, instance not present in this account. " "Try again checking for typos.") else: res = self.db_services.validate_instance_id(instance_id) if res is not True: print("*** Error, instance name either not valid. Try again " "checking for typos.") return if instance_id not in instance_list: print("*** Error, This instance not present in this account. " "Try again checking for typos. Run `db aws_list_instances` to " "see valid list.") return user_input = raw_input( "Deleting an instance will erase all your data associated with the " "database in that instance. Really quit? y or n:" ) if user_input == 'y': res = self.db_services.delete_db_instance(instance_id) if res: print("AWS RDS database instance %s deleted. Run `db " \ "aws_list_instances` for current status." % instance_id) else: print("*** Error deleting database instance %s. " \ "It maybe because it is still being created, deleted, or is " \ "being backed up. Run `db aws_list_instances` for current " \ "status." % instance_id) else: return
0.007609
def create_contact(self, email=None, first_name=None, last_name=None, phone_number=None): """ Create a contant which is later passed to payment. """ result = {} if email: result['email'] = email if first_name is not None: result['first_name'] = first_name if last_name is not None: result['last_name'] = last_name if phone_number is not None: result['phone_number'] = phone_number return result if len(result) > 0 else None
0.005525
def rotate_and_detach_tab_labels(self): """Rotates tab labels of a given notebook by 90 degrees and makes them detachable. :param notebook: GTK Notebook container, whose tab labels are to be rotated and made detachable """ icons = {'Libraries': constants.SIGN_LIB, 'States Tree': constants.ICON_TREE, 'Global Variables': constants.ICON_GLOB, 'Modification History': constants.ICON_HIST, 'Execution History': constants.ICON_EHIST, 'network': constants.ICON_NET} for notebook in self.left_bar_notebooks: for i in range(notebook.get_n_pages()): child = notebook.get_nth_page(i) tab_label = notebook.get_tab_label(child) tab_label_text = tab_label.get_text() notebook.set_tab_label(child, gui_helper_label.create_tab_header_label(tab_label_text, icons)) notebook.set_tab_reorderable(child, True) notebook.set_tab_detachable(child, True)
0.007882
def rename(self, old_file_path, new_file_path, force_replace=False): """Renames a FakeFile object at old_file_path to new_file_path, preserving all properties. Args: old_file_path: Path to filesystem object to rename. new_file_path: Path to where the filesystem object will live after this call. force_replace: If set and destination is an existing file, it will be replaced even under Windows if the user has permissions, otherwise replacement happens under Unix only. Raises: OSError: if old_file_path does not exist. OSError: if new_file_path is an existing directory (Windows, or Posix if old_file_path points to a regular file) OSError: if old_file_path is a directory and new_file_path a file OSError: if new_file_path is an existing file and force_replace not set (Windows only). OSError: if new_file_path is an existing file and could not be removed (Posix, or Windows with force_replace set). OSError: if dirname(new_file_path) does not exist. OSError: if the file would be moved to another filesystem (e.g. mount point). """ ends_with_sep = self.ends_with_path_separator(old_file_path) old_file_path = self.absnormpath(old_file_path) new_file_path = self.absnormpath(new_file_path) if not self.exists(old_file_path, check_link=True): self.raise_os_error(errno.ENOENT, old_file_path, 2) if ends_with_sep: self._handle_broken_link_with_trailing_sep(old_file_path) old_object = self.lresolve(old_file_path) if not self.is_windows_fs: self._handle_posix_dir_link_errors( new_file_path, old_file_path, ends_with_sep) if self.exists(new_file_path, check_link=True): new_file_path = self._rename_to_existing_path( force_replace, new_file_path, old_file_path, old_object, ends_with_sep) if not new_file_path: return old_dir, old_name = self.splitpath(old_file_path) new_dir, new_name = self.splitpath(new_file_path) if not self.exists(new_dir): self.raise_os_error(errno.ENOENT, new_dir) old_dir_object = self.resolve(old_dir) new_dir_object = self.resolve(new_dir) if old_dir_object.st_dev != new_dir_object.st_dev: self.raise_os_error(errno.EXDEV, old_file_path) if not S_ISDIR(new_dir_object.st_mode): self.raise_os_error( errno.EACCES if self.is_windows_fs else errno.ENOTDIR, new_file_path) if new_dir_object.has_parent_object(old_object): self.raise_os_error(errno.EINVAL, new_file_path) object_to_rename = old_dir_object.get_entry(old_name) old_dir_object.remove_entry(old_name, recursive=False) object_to_rename.name = new_name new_name = new_dir_object._normalized_entryname(new_name) if new_name in new_dir_object.contents: # in case of overwriting remove the old entry first new_dir_object.remove_entry(new_name) new_dir_object.add_entry(object_to_rename)
0.000598
def format_logstash_v0(self, record): ''' Messages are formatted in logstash's expected format. ''' host = salt.utils.network.get_fqhostname() message_dict = { '@timestamp': self.formatTime(record), '@fields': { 'levelname': record.levelname, 'logger': record.name, 'lineno': record.lineno, 'pathname': record.pathname, 'process': record.process, 'threadName': record.threadName, 'funcName': record.funcName, 'processName': record.processName }, '@message': record.getMessage(), '@source': '{0}://{1}/{2}'.format( self.msg_type, host, self.msg_path ), '@source_host': host, '@source_path': self.msg_path, '@tags': self.tags, '@type': self.msg_type, } if record.exc_info: message_dict['@fields']['exc_info'] = self.formatException( record.exc_info ) # Add any extra attributes to the message field for key, value in six.iteritems(record.__dict__): if key in ('args', 'asctime', 'created', 'exc_info', 'exc_text', 'filename', 'funcName', 'id', 'levelname', 'levelno', 'lineno', 'module', 'msecs', 'msecs', 'message', 'msg', 'name', 'pathname', 'process', 'processName', 'relativeCreated', 'thread', 'threadName'): # These are already handled above or not handled at all continue if value is None: message_dict['@fields'][key] = value continue if isinstance(value, (six.string_types, bool, dict, float, int, list)): message_dict['@fields'][key] = value continue message_dict['@fields'][key] = repr(value) return message_dict
0.001444
def get(self, name, ns=None, default=None): """ Get the value of an attribute by name. @param name: The name of the attribute. @type name: basestring @param ns: The optional attribute's namespace. @type ns: (I{prefix}, I{name}) @param default: An optional value to be returned when either the attribute does not exist or has no value. @type default: basestring @return: The attribute's value or I{default}. @rtype: basestring @see: __getitem__() """ attr = self.getAttribute(name, ns) if attr is None or attr.value is None: return default return attr.getValue()
0.002829
def BC_0Displacement0Slope(self): """ 0Displacement0Slope boundary condition for 0 deflection. This requires that nothing be done to the edges of the solution array, because the lack of the off-grid terms implies that they go to 0 Here we just turn the cells outside the array into nan, to ensure that we are not accidentally including the wrong cells here (and for consistency with the other solution types -- this takes negligible time) """ if self.BC_W == '0Displacement0Slope': i=0 self.l2[i] = np.nan self.l1[i] = np.nan self.c0[i] += 0 self.r1[i] += 0 self.r2[i] += 0 i=1 self.l2[i] = np.nan self.l1[i] += 0 self.c0[i] += 0 self.r1[i] += 0 self.r2[i] += 0 if self.BC_E == '0Displacement0Slope': i=-2 self.l2[i] += 0 self.l1[i] += 0 self.c0[i] += 0 self.r1[i] += 0 self.r2[i] = np.nan i=-1 self.l2[i] += 0 self.l1[i] += 0 self.c0[i] += 0 self.r1[i] = np.nan self.r2[i] = np.nan
0.029012
def __checkExpiration(self, mtime=None): ''' __checkExpiration - Check if we have expired @param mtime <int> - Optional mtime if known, otherwise will be gathered @return <bool> - True if we did expire, otherwise False ''' if not self.maxLockAge: return False if mtime is None: try: mtime = os.stat(self.lockPath).st_mtime except FileNotFoundError as e: return False if mtime < time.time() - self.maxLockAge: return True return False
0.006515
def hist_series(self, by=None, ax=None, grid=True, xlabelsize=None, xrot=None, ylabelsize=None, yrot=None, figsize=None, bins=10, **kwds): """ Draw histogram of the input series using matplotlib. Parameters ---------- by : object, optional If passed, then used to form histograms for separate groups ax : matplotlib axis object If not passed, uses gca() grid : bool, default True Whether to show axis grid lines xlabelsize : int, default None If specified changes the x-axis label size xrot : float, default None rotation of x axis labels ylabelsize : int, default None If specified changes the y-axis label size yrot : float, default None rotation of y axis labels figsize : tuple, default None figure size in inches by default bins : integer or sequence, default 10 Number of histogram bins to be used. If an integer is given, bins + 1 bin edges are calculated and returned. If bins is a sequence, gives bin edges, including left edge of first bin and right edge of last bin. In this case, bins is returned unmodified. bins : integer, default 10 Number of histogram bins to be used `**kwds` : keywords To be passed to the actual plotting function See Also -------- matplotlib.axes.Axes.hist : Plot a histogram using matplotlib. """ import matplotlib.pyplot as plt if by is None: if kwds.get('layout', None) is not None: raise ValueError("The 'layout' keyword is not supported when " "'by' is None") # hack until the plotting interface is a bit more unified fig = kwds.pop('figure', plt.gcf() if plt.get_fignums() else plt.figure(figsize=figsize)) if (figsize is not None and tuple(figsize) != tuple(fig.get_size_inches())): fig.set_size_inches(*figsize, forward=True) if ax is None: ax = fig.gca() elif ax.get_figure() != fig: raise AssertionError('passed axis not bound to passed figure') values = self.dropna().values ax.hist(values, bins=bins, **kwds) ax.grid(grid) axes = np.array([ax]) _set_ticks_props(axes, xlabelsize=xlabelsize, xrot=xrot, ylabelsize=ylabelsize, yrot=yrot) else: if 'figure' in kwds: raise ValueError("Cannot pass 'figure' when using the " "'by' argument, since a new 'Figure' instance " "will be created") axes = grouped_hist(self, by=by, ax=ax, grid=grid, figsize=figsize, bins=bins, xlabelsize=xlabelsize, xrot=xrot, ylabelsize=ylabelsize, yrot=yrot, **kwds) if hasattr(axes, 'ndim'): if axes.ndim == 1 and len(axes) == 1: return axes[0] return axes
0.000331
def addSlider2D( self, sliderfunc, xmin, xmax, value=None, pos=4, s=0.04, title="", c=None, showValue=True ): """Add a slider widget which can call an external custom function. :param sliderfunc: external function to be called by the widget :param float xmin: lower value :param float xmax: upper value :param float value: current value :param list pos: position corner number: horizontal [1-4] or vertical [11-14] it can also be specified by corners coordinates [(x1,y1), (x2,y2)] :param str title: title text :param bool showValue: if true current value is shown .. hint:: |sliders| |sliders.py|_ """ return addons.addSlider2D(sliderfunc, xmin, xmax, value, pos, s, title, c, showValue)
0.008516
def __print_namespace_help(self, session, namespace, cmd_name=None): """ Prints the documentation of all the commands in the given name space, or only of the given command :param session: Session Handler :param namespace: Name space of the command :param cmd_name: Name of the command to show, None to show them all """ session.write_line("=== Name space '{0}' ===", namespace) # Get all commands in this name space if cmd_name is None: names = [command for command in self._commands[namespace]] names.sort() else: names = [cmd_name] first_cmd = True for command in names: if not first_cmd: # Print an empty line session.write_line("\n") self.__print_command_help(session, namespace, command) first_cmd = False
0.002169
def rename_nfa_states(nfa: dict, suffix: str): """ Side effect on input! Renames all the states of the NFA adding a **suffix**. It is an utility function to be used to avoid automata to have states with names in common. Avoid suffix that can lead to special name like "as", "and",... :param dict nfa: input NFA. :param str suffix: string to be added at beginning of each state name. """ conversion_dict = {} new_states = set() new_initials = set() new_accepting = set() for state in nfa['states']: conversion_dict[state] = '' + suffix + state new_states.add('' + suffix + state) if state in nfa['initial_states']: new_initials.add('' + suffix + state) if state in nfa['accepting_states']: new_accepting.add('' + suffix + state) nfa['states'] = new_states nfa['initial_states'] = new_initials nfa['accepting_states'] = new_accepting new_transitions = {} for transition in nfa['transitions']: new_arrival = set() for arrival in nfa['transitions'][transition]: new_arrival.add(conversion_dict[arrival]) new_transitions[ conversion_dict[transition[0]], transition[1]] = new_arrival nfa['transitions'] = new_transitions return nfa
0.000761
def get_contradiction_summary(graph: BELGraph) -> Iterable[Tuple[BaseEntity, BaseEntity, str]]: """Yield triplets of (source node, target node, set of relations) for (source node, target node) pairs that have multiple, contradictory relations. """ for u, v in set(graph.edges()): relations = {data[RELATION] for data in graph[u][v].values()} if relation_set_has_contradictions(relations): yield u, v, relations
0.006608
def log(self, *args, **kwargs): """Convenience function for printing indenting debug output.""" if self.verbose: print(' ' * self.depth, *args, **kwargs)
0.010929
def rotate(self, axis, angleDeg): """ Rotate geometry. axis: axis of rotation (array of floats) angleDeg: rotation angle in degrees """ ax = Vector(axis[0], axis[1], axis[2]).unit() cosAngle = math.cos(math.pi * angleDeg / 180.) sinAngle = math.sin(math.pi * angleDeg / 180.) def newVector(v): vA = v.dot(ax) vPerp = v.minus(ax.times(vA)) vPerpLen = vPerp.length() if vPerpLen == 0: # vector is parallel to axis, no need to rotate return v u1 = vPerp.unit() u2 = u1.cross(ax) vCosA = vPerpLen*cosAngle vSinA = vPerpLen*sinAngle return ax.times(vA).plus(u1.times(vCosA).plus(u2.times(vSinA))) for poly in self.polygons: for vert in poly.vertices: vert.pos = newVector(vert.pos) normal = vert.normal if normal.length() > 0: vert.normal = newVector(vert.normal)
0.001876
def __compute_mode_from_string(self, path, mode_string): """ Scan a unix-style mode string and apply it to ``path``. :type mode_string: str :param mode_string: see ``man chmod`` for details. ``X``, ``s`` and ``t`` modes are not supported. The string should match the following regular expression: ``[ugoa]*[-+=]([rwx]*)``. :rtype: int :return: a new mode integer resulting from applying ``mode_string`` to ``path``. :raises: :exc:`~exceptions.ValueError` if ``mode_string`` is invalid. """ Char_to_perm_byte = {'r': 4, 'w': 2, 'x': 1} Fields = (('u', 6), ('g', 3), ('o', 0)) # -- m = re.match(r"\s*([ugoa]*)([-+=])([rwx]*)\s*", mode_string) if not m: raise ValueError("Invalid mode string %s" % mode_string) who = m.group(1) what_op = m.group(2) which_perm = m.group(3) # -- old_mode = self.fs.get_path_info(path)['permissions'] # The mode to be applied by the operation, repeated three # times in a list, for user, group, and other respectively. # Initially these are identical, but some may change if we # have to respect the umask setting. op_perm = [ reduce(ops.ior, [Char_to_perm_byte[c] for c in which_perm]) ] * 3 if 'a' in who: who = 'ugo' elif who == '': who = 'ugo' # erase the umask bits inverted_umask = ~self.__get_umask() for i, field in enumerate(Fields): op_perm[i] &= (inverted_umask >> field[1]) & 0x7 # for each user, compute the permission bit and set it in the mode new_mode = 0 for i, tpl in enumerate(Fields): field, shift = tpl # shift by the bits specified for the field; keep only the # 3 lowest bits old = (old_mode >> shift) & 0x7 if field in who: if what_op == '-': new = old & ~op_perm[i] elif what_op == '=': new = op_perm[i] elif what_op == '+': new = old | op_perm[i] else: raise RuntimeError( "unexpected permission operation %s" % what_op ) else: # copy the previous permissions new = old new_mode |= new << shift return new_mode
0.000787
def clone(self, data=None, shared_data=True, *args, **overrides): """Clones the object, overriding data and parameters. Args: data: New data replacing the existing data shared_data (bool, optional): Whether to use existing data new_type (optional): Type to cast object to link (bool, optional): Whether clone should be linked Determines whether Streams and Links attached to original object will be inherited. *args: Additional arguments to pass to constructor **overrides: New keyword arguments to pass to constructor Returns: Cloned object """ with item_check(not shared_data and self._check_items): return super(MultiDimensionalMapping, self).clone(data, shared_data, *args, **overrides)
0.004338
def set_pre_processing_parameters(self, image_input_names = [], is_bgr = False, red_bias = 0.0, green_bias = 0.0, blue_bias = 0.0, gray_bias = 0.0, image_scale = 1.0): """Add pre-processing parameters to the neural network object Parameters ---------- image_input_names: [str] Name of input blobs that are images is_bgr: boolean | dict() Channel order for input blobs that are images. BGR if True else RGB. To specify a different value for each image input, provide a dictionary with input names as keys. red_bias: float | dict() Image re-centering parameter (red channel) blue_bias: float | dict() Image re-centering parameter (blue channel) green_bias: float | dict() Image re-centering parameter (green channel) gray_bias: float | dict() Image re-centering parameter (for grayscale images) image_scale: float | dict() Value by which to scale the images. See Also -------- set_input, set_output, set_class_labels """ spec = self.spec if not image_input_names: return # nothing to do here if not isinstance(is_bgr, dict): is_bgr = dict.fromkeys(image_input_names, is_bgr) if not isinstance(red_bias, dict): red_bias = dict.fromkeys(image_input_names, red_bias) if not isinstance(blue_bias, dict): blue_bias = dict.fromkeys(image_input_names, blue_bias) if not isinstance(green_bias, dict): green_bias = dict.fromkeys(image_input_names, green_bias) if not isinstance(gray_bias, dict): gray_bias = dict.fromkeys(image_input_names, gray_bias) if not isinstance(image_scale, dict): image_scale = dict.fromkeys(image_input_names, image_scale) # Add image inputs for input_ in spec.description.input: if input_.name in image_input_names: if input_.type.WhichOneof('Type') == 'multiArrayType': array_shape = tuple(input_.type.multiArrayType.shape) channels, height, width = array_shape if channels == 1: input_.type.imageType.colorSpace = _FeatureTypes_pb2.ImageFeatureType.ColorSpace.Value('GRAYSCALE') elif channels == 3: if input_.name in is_bgr: if is_bgr[input_.name]: input_.type.imageType.colorSpace = _FeatureTypes_pb2.ImageFeatureType.ColorSpace.Value('BGR') else: input_.type.imageType.colorSpace = _FeatureTypes_pb2.ImageFeatureType.ColorSpace.Value('RGB') else: input_.type.imageType.colorSpace = _FeatureTypes_pb2.ImageFeatureType.ColorSpace.Value('RGB') else: raise ValueError("Channel Value %d not supported for image inputs" % channels) input_.type.imageType.width = width input_.type.imageType.height = height preprocessing = self.nn_spec.preprocessing.add() preprocessing.featureName = input_.name scaler = preprocessing.scaler if input_.name in image_scale: scaler.channelScale = image_scale[input_.name] else: scaler.channelScale = 1.0 if input_.name in red_bias: scaler.redBias = red_bias[input_.name] if input_.name in blue_bias: scaler.blueBias = blue_bias[input_.name] if input_.name in green_bias: scaler.greenBias = green_bias[input_.name] if input_.name in gray_bias: scaler.grayBias = gray_bias[input_.name]
0.011914
def check_ntp_status(self, ntp_status_int): """ check the NTP status """ # convert the ntp_status integer value in a human readable value ntp_status_string = self.ntp_status.get(ntp_status_int, "unknown") if ntp_status_string == "unknown": return unknown, ("NTP status: " + ntp_status_string) # the ntp status should be synchronized (new MIB) or normalOperation (old mib) elif ntp_status_string != "synchronized" and ntp_status_string != "normalOperationPPS": # that is a critical condition, because the time reference will not be reliable anymore return critical, ("NTP status: " + ntp_status_string) return None
0.006906
def _advapi32_interpret_dsa_key_blob(bit_size, public_blob, private_blob): """ Takes a CryptoAPI DSS private key blob and converts it into the ASN.1 structures for the public and private keys :param bit_size: The integer bit size of the key :param public_blob: A byte string of the binary data after the public key header :param private_blob: A byte string of the binary data after the private key header :return: A 2-element tuple of (asn1crypto.keys.PublicKeyInfo, asn1crypto.keys.PrivateKeyInfo) """ len1 = 20 len2 = bit_size // 8 q_offset = len2 g_offset = q_offset + len1 x_offset = g_offset + len2 y_offset = x_offset p = int_from_bytes(private_blob[0:q_offset][::-1]) q = int_from_bytes(private_blob[q_offset:g_offset][::-1]) g = int_from_bytes(private_blob[g_offset:x_offset][::-1]) x = int_from_bytes(private_blob[x_offset:x_offset + len1][::-1]) y = int_from_bytes(public_blob[y_offset:y_offset + len2][::-1]) public_key_info = keys.PublicKeyInfo({ 'algorithm': keys.PublicKeyAlgorithm({ 'algorithm': 'dsa', 'parameters': keys.DSAParams({ 'p': p, 'q': q, 'g': g, }) }), 'public_key': core.Integer(y), }) private_key_info = keys.PrivateKeyInfo({ 'version': 0, 'private_key_algorithm': keys.PrivateKeyAlgorithm({ 'algorithm': 'dsa', 'parameters': keys.DSAParams({ 'p': p, 'q': q, 'g': g, }) }), 'private_key': core.Integer(x), }) return (public_key_info, private_key_info)
0.000571
def goes_requires(self, regs): """ Returns whether any of the goes_to block requires any of the given registers. """ if len(self) and self.mem[-1].inst == 'call' and self.mem[-1].condition_flag is None: for block in self.calls: if block.is_used(regs, 0): return True d = block.destroys() if not len([x for x in regs if x not in d]): return False # If all registers are destroyed then they're not used for block in self.goes_to: if block.is_used(regs, 0): return True return False
0.00607
def process_node_search(self, node, q, **kwargs): ''' API: process_node_search(self, node, q, **kwargs) Description: Used by search() method. Process nodes along the search. Should not be called by user directly. Input: node: Name of the node being processed. q: Queue data structure. kwargs: Keyword arguments. Post: 'priority' attribute of the node may get updated. ''' if isinstance(q, PriorityQueue): self.get_node(node).set_attr('priority', q.get_priority(node))
0.003339
def _create_lacp(self, datapath, port, req): """create a LACP packet.""" actor_system = datapath.ports[datapath.ofproto.OFPP_LOCAL].hw_addr res = slow.lacp( actor_system_priority=0xffff, actor_system=actor_system, actor_key=req.actor_key, actor_port_priority=0xff, actor_port=port, actor_state_activity=req.LACP_STATE_PASSIVE, actor_state_timeout=req.actor_state_timeout, actor_state_aggregation=req.actor_state_aggregation, actor_state_synchronization=req.actor_state_synchronization, actor_state_collecting=req.actor_state_collecting, actor_state_distributing=req.actor_state_distributing, actor_state_defaulted=req.LACP_STATE_OPERATIONAL_PARTNER, actor_state_expired=req.LACP_STATE_NOT_EXPIRED, partner_system_priority=req.actor_system_priority, partner_system=req.actor_system, partner_key=req.actor_key, partner_port_priority=req.actor_port_priority, partner_port=req.actor_port, partner_state_activity=req.actor_state_activity, partner_state_timeout=req.actor_state_timeout, partner_state_aggregation=req.actor_state_aggregation, partner_state_synchronization=req.actor_state_synchronization, partner_state_collecting=req.actor_state_collecting, partner_state_distributing=req.actor_state_distributing, partner_state_defaulted=req.actor_state_defaulted, partner_state_expired=req.actor_state_expired, collector_max_delay=0) self.logger.info("SW=%s PORT=%d LACP sent.", dpid_to_str(datapath.id), port) self.logger.debug(str(res)) return res
0.001086
def _modify_run_to_states(self, state): """ This is a special case. Inside a hierarchy state a step_over is triggered and affects the last child. In this case the self.run_to_states has to be modified in order to contain the parent of the hierarchy state. Otherwise the execution won't respect the step_over any more and run until the end of the state machine. The same holds for a step_out. The reason for this is, that handle_execution_mode() can not be called between the last state of a hierarchy state and the termination of the hierarchy state itself. """ if self._status.execution_mode is StateMachineExecutionStatus.FORWARD_OVER or \ self._status.execution_mode is StateMachineExecutionStatus.FORWARD_OUT: for state_path in copy.deepcopy(self.run_to_states): if state_path == state.get_path(): logger.verbose("Modifying run_to_states; triggered by state %s!", state.name) self.run_to_states.remove(state_path) from rafcon.core.states.state import State if isinstance(state.parent, State): from rafcon.core.states.library_state import LibraryState if isinstance(state.parent, LibraryState): parent_path = state.parent.parent.get_path() else: parent_path = state.parent.get_path() self.run_to_states.append(parent_path) break
0.006879
def _broadcast(value, target): """Broadcast a value to match the batching dimensions of a target. If necessary the value is converted into a tensor. Both value and target should be of the same dtype. Args: value: A value to broadcast. target: A `Tensor` of shape [b1, ..., bn, d]. Returns: A `Tensor` of shape [b1, ..., bn] and same dtype as the target. """ return tf.broadcast_to( tf.convert_to_tensor(value=value, dtype=target.dtype), distribution_util.prefer_static_shape(target)[:-1])
0.005671
def complex_mult(A, B, shifts, start): """ Generate shift-and-add multiplier that can shift and add multiple bits per clock cycle. Uses substantially more space than `simple_mult()` but is much faster. :param WireVector A, B: two input wires for the multiplication :param int shifts: number of spaces Register is to be shifted per clk cycle (cannot be greater than the length of `A` or `B`) :param bool start: start signal :returns: Register containing the product; the "done" signal """ alen = len(A) blen = len(B) areg = pyrtl.Register(alen) breg = pyrtl.Register(alen + blen) accum = pyrtl.Register(alen + blen) done = (areg == 0) # Multiplication is finished when a becomes 0 if (shifts > alen) or (shifts > blen): raise pyrtl.PyrtlError("shift is larger than one or both of the parameters A or B," "please choose smaller shift") # During multiplication, shift a right every cycle 'shift' times, # shift b left every cycle 'shift' times with pyrtl.conditional_assignment: with start: # initialization areg.next |= A breg.next |= B accum.next |= 0 with ~done: # don't run when there's no work to do # "Multiply" shifted breg by LSB of areg by cond. adding areg.next |= libutils._shifted_reg_next(areg, 'r', shifts) # right shift breg.next |= libutils._shifted_reg_next(breg, 'l', shifts) # left shift accum.next |= accum + _one_cycle_mult(areg, breg, shifts) return accum, done
0.003108