text
stringlengths
78
104k
score
float64
0
0.18
def profile_v3_to_proofs(profile, fqdn, refresh=False, address = None): """ Convert profile format v3 to proofs """ proofs = [] try: test = profile.items() except: return proofs if 'account' in profile: accounts = profile['account'] else: return proofs for account in accounts: # skip if proof service is not supported if 'service' in account and account['service'].lower() not in SITES: continue if 'proofType' in account and account['proofType'] == "http": try: proof = {"service": account['service'], "proof_url": account['proofUrl'], "identifier": account['identifier'], "valid": False} if is_valid_proof(account['service'], account['identifier'], fqdn, account['proofUrl'], address = address): proof["valid"] = True proofs.append(proof) except Exception as e: pass return proofs
0.006233
def show_metal(self): """Visualize metal coordination.""" metal_complexes = self.plcomplex.metal_complexes if not len(metal_complexes) == 0: self.select_by_ids('Metal-M', self.metal_ids) for metal_complex in metal_complexes: cmd.select('tmp_m', 'id %i' % metal_complex.metal_id) cmd.select('tmp_t', 'id %i' % metal_complex.target_id) if metal_complex.location == 'water': cmd.select('Metal-W', 'Metal-W or id %s' % metal_complex.target_id) if metal_complex.location.startswith('protein'): cmd.select('tmp_t', 'tmp_t & %s' % self.protname) cmd.select('Metal-P', 'Metal-P or (id %s & %s)' % (metal_complex.target_id, self.protname)) if metal_complex.location == 'ligand': cmd.select('tmp_t', 'tmp_t & %s' % self.ligname) cmd.select('Metal-L', 'Metal-L or (id %s & %s)' % (metal_complex.target_id, self.ligname)) cmd.distance('MetalComplexes', 'tmp_m', 'tmp_t') cmd.delete('tmp_m or tmp_t') if self.object_exists('MetalComplexes'): cmd.set('dash_color', 'violetpurple', 'MetalComplexes') cmd.set('dash_gap', 0.5, 'MetalComplexes') # Show water molecules for metal complexes cmd.show('spheres', 'Metal-W') cmd.color('lightblue', 'Metal-W')
0.003418
def delete(self, key_name): """Delete the key and return true if the key was deleted, else false """ self.db.remove(Query().name == key_name) return self.get(key_name) == {}
0.009756
def _handle_actions(self, state, current_run, func, sp_addr, accessed_registers): """ For a given state and current location of of execution, will update a function by adding the offets of appropriate actions to the stack variable or argument registers for the fnc. :param SimState state: upcoming state. :param SimSuccessors current_run: possible result states. :param knowledge.Function func: current function. :param int sp_addr: stack pointer address. :param set accessed_registers: set of before accessed registers. """ se = state.solver if func is not None and sp_addr is not None: # Fix the stack pointer (for example, skip the return address on the stack) new_sp_addr = sp_addr + self.project.arch.call_sp_fix actions = [a for a in state.history.recent_actions if a.bbl_addr == current_run.addr] for a in actions: if a.type == "mem" and a.action == "read": try: addr = se.eval_one(a.addr.ast, default=0) except (claripy.ClaripyError, SimSolverModeError): continue if (self.project.arch.call_pushes_ret and addr >= new_sp_addr) or \ (not self.project.arch.call_pushes_ret and addr >= new_sp_addr): # TODO: What if a variable locates higher than the stack is modified as well? We probably want # TODO: to make sure the accessing address falls in the range of stack offset = addr - new_sp_addr func._add_argument_stack_variable(offset) elif a.type == "reg": offset = a.offset if a.action == "read" and offset not in accessed_registers: func._add_argument_register(offset) elif a.action == "write": accessed_registers.add(offset) else: l.error( "handle_actions: Function not found, or stack pointer is None. It might indicates unbalanced stack.")
0.005457
def main(args): ''' register_retinotopy.main(args) can be given a list of arguments, such as sys.argv[1:]; these arguments may include any options and must include at least one subject id. All subjects whose ids are given are registered to a retinotopy model, and the resulting registration, as well as the predictions made by the model in the registration, are exported. ''' m = register_retinotopy_plan(args=args) # force completion files = m['files'] if len(files) > 0: return 0 else: print('Error: No files exported.', file=sys.stderr) return 1
0.006494
def numparser(strict=False): """Return a function that will attempt to parse the value as a number, trying :func:`int`, :func:`long`, :func:`float` and :func:`complex` in that order. If all fail, return the value as-is, unless ``strict=True``, in which case raise the underlying exception. """ def f(v): try: return int(v) except (ValueError, TypeError): pass try: return long(v) except (ValueError, TypeError): pass try: return float(v) except (ValueError, TypeError): pass try: return complex(v) except (ValueError, TypeError) as e: if strict: raise e return v return f
0.001272
def say( text = None, preference_program = "festival", background = False, silent = True, filepath = None ): """ Say specified text to speakers or to file, as specified. Determine the program to use based on the specified program preference and availability, then say the text to speakers or synthesize speech of the text and save it to file, as specified. """ if not text: if not silent: print("text not specified") return False # Determine the program to use based on program preference and program # availability. preference_order_programs = [ "festival", "espeak", "pico2wave", "deep_throat.py" ] # Remove the specified preference program from the default program # preferences order and prioritise it. preference_order_programs.remove(preference_program) preference_order_programs.insert(0, preference_program) # Determine first program that is available in the programs order of # preference. preference_order_programs_available =\ [program for program in preference_order_programs \ if shijian.which(program) is not None] if not preference_order_programs_available: if not silent: print("text-to-speech program unavailable") return False program = preference_order_programs_available[0] if program != preference_program and not silent: print("text-to-speech preference program unavailable, using {program}".format(program = program)) if program == "festival": if not filepath: command = """ echo "{text}" | festival --tts """.format(text = text) else: command = """ echo "{text}" | text2wave -o {filepath} """.format(text = text, filepath = filepath) elif program == "espeak": if not filepath: command = """ echo "{text}" | espeak """.format(text = text) else: command = """ echo "{text}" | espeak -w {filepath} """.format(text = text, filepath = filepath) elif program == "pico2wave": if not filepath: command = """ pico2wave --wave="{filepath}" "{text}" aplay --quiet "{filepath}" """.format(text = text, filepath = shijian.tmp_filepath() + ".wav") else: command = """ pico2wave --wave="{filepath}" "{text}" """.format(text = text, filepath = filepath) elif program == "deep_throat.py": if not filepath: command = """ echo "{text}" | deep_throat.py """.format(text = text) else: command = """ deep_throat.py --text="{text}" --savetowavefile --outfile="{filepath}" """.format(text = text, filepath = filepath) if filepath: background = False if background: command = command.rstrip().rstrip("\n") + " &" command = textwrap.dedent(command) engage_command(command = command, background = background)
0.016013
def addPluginPath(cls, pluginpath): """ Adds the plugin path for this class to the given path. The inputted pluginpath value can either be a list of strings, or a string containing paths separated by the OS specific path separator (':' on Mac & Linux, ';' on Windows) :param pluginpath | [<str>, ..] || <str> """ prop_key = '_%s__pluginpath' % cls.__name__ curr_path = getattr(cls, prop_key, None) if not curr_path: curr_path = [] setattr(cls, prop_key, curr_path) if isinstance(pluginpath, basestring): pluginpath = pluginpath.split(os.path.pathsep) for path in pluginpath: if not path: continue path = os.path.expanduser(os.path.expandvars(path)) paths = path.split(os.path.pathsep) if len(paths) > 1: cls.addPluginPath(paths) else: curr_path.append(path)
0.002953
def get_cts_property(self, prop, lang=None): """ Set given property in CTS Namespace .. example:: collection.get_cts_property("groupname", "eng") :param prop: Property to get (Without namespace) :param lang: Language to get for given value :return: Value or default if lang is set, else whole set of values :rtype: dict or Literal """ x = { obj.language: obj for obj in self.metadata.get(RDF_NAMESPACES.CTS.term(prop)) } if lang is not None: if lang in x: return x[lang] return next(x.values()) return x
0.004566
def setup_extended_logging(opts): ''' Setup any additional logging handlers, internal or external ''' if is_extended_logging_configured() is True: # Don't re-configure external loggers return # Explicit late import of salt's loader import salt.loader # Let's keep a reference to the current logging handlers initial_handlers = logging.root.handlers[:] # Load any additional logging handlers providers = salt.loader.log_handlers(opts) # Let's keep track of the new logging handlers so we can sync the stored # log records with them additional_handlers = [] for name, get_handlers_func in six.iteritems(providers): logging.getLogger(__name__).info('Processing `log_handlers.%s`', name) # Keep a reference to the logging handlers count before getting the # possible additional ones. initial_handlers_count = len(logging.root.handlers) handlers = get_handlers_func() if isinstance(handlers, types.GeneratorType): handlers = list(handlers) elif handlers is False or handlers == [False]: # A false return value means not configuring any logging handler on # purpose logging.getLogger(__name__).info( 'The `log_handlers.%s.setup_handlers()` function returned ' '`False` which means no logging handler was configured on ' 'purpose. Continuing...', name ) continue else: # Make sure we have an iterable handlers = [handlers] for handler in handlers: if not handler and \ len(logging.root.handlers) == initial_handlers_count: logging.getLogger(__name__).info( 'The `log_handlers.%s`, did not return any handlers ' 'and the global handlers count did not increase. This ' 'could be a sign of `log_handlers.%s` not working as ' 'supposed', name, name ) continue logging.getLogger(__name__).debug( 'Adding the \'%s\' provided logging handler: \'%s\'', name, handler ) additional_handlers.append(handler) logging.root.addHandler(handler) for handler in logging.root.handlers: if handler in initial_handlers: continue additional_handlers.append(handler) # Sync the null logging handler messages with the temporary handler if LOGGING_STORE_HANDLER is not None: LOGGING_STORE_HANDLER.sync_with_handlers(additional_handlers) else: logging.getLogger(__name__).debug( 'LOGGING_STORE_HANDLER is already None, can\'t sync messages ' 'with it' ) # Remove the temporary queue logging handler __remove_queue_logging_handler() # Remove the temporary null logging handler (if it exists) __remove_null_logging_handler() global __EXTERNAL_LOGGERS_CONFIGURED __EXTERNAL_LOGGERS_CONFIGURED = True
0.00032
def _evaluatelinearForces(Pot,x,t=0.): """Raw, undecorated function for internal use""" if isinstance(Pot,list): sum= 0. for pot in Pot: sum+= pot._force_nodecorator(x,t=t) return sum elif isinstance(Pot,linearPotential): return Pot._force_nodecorator(x,t=t) else: #pragma: no cover raise PotentialError("Input to 'evaluateForces' is neither a linearPotential-instance or a list of such instances")
0.025751
def register_token( self, registry_address_hex: typing.AddressHex, token_address_hex: typing.AddressHex, retry_timeout: typing.NetworkTimeout = DEFAULT_RETRY_TIMEOUT, ) -> TokenNetwork: """ Register a token with the raiden token manager. Args: registry_address: registry address token_address_hex (string): a hex encoded token address. Returns: The token network proxy. """ registry_address = decode_hex(registry_address_hex) token_address = decode_hex(token_address_hex) registry = self._raiden.chain.token_network_registry(registry_address) contracts_version = self._raiden.contract_manager.contracts_version if contracts_version == DEVELOPMENT_CONTRACT_VERSION: token_network_address = registry.add_token_with_limits( token_address=token_address, channel_participant_deposit_limit=UINT256_MAX, token_network_deposit_limit=UINT256_MAX, ) else: token_network_address = registry.add_token_without_limits( token_address=token_address, ) # Register the channel manager with the raiden registry waiting.wait_for_payment_network( self._raiden, registry.address, token_address, retry_timeout, ) return self._raiden.chain.token_network(token_network_address)
0.001975
def _classify_load_constant(self, regs_init, regs_fini, mem_fini, written_regs, read_regs): """Classify load-constant gadgets. """ matches = [] # Check for "dst_reg <- constant" pattern. for dst_reg, dst_val in regs_fini.items(): # Make sure the *dst* register was written. if dst_reg not in written_regs: continue # Check restrictions... if dst_val == regs_init[dst_reg]: continue dst_val_ir = ReilImmediateOperand(dst_val, self._arch_regs_size[dst_reg]) dst_reg_ir = ReilRegisterOperand(dst_reg, self._arch_regs_size[dst_reg]) matches.append({ "src": [dst_val_ir], "dst": [dst_reg_ir] }) return matches
0.00612
def infer_dims(self, x, y, dims_x, dims_y, dims_out): """Infer probable output from input x, y """ OptimizedInverseModel.infer_x(self, y) assert len(x) == len(dims_x) assert len(y) == len(dims_y) if len(self.fmodel.dataset) == 0: return [[0.0]*self.dim_out] else: _, index = self.fmodel.dataset.nn_dims(x, y, dims_x, dims_y, k=1) guesses = [self.fmodel.dataset.get_dims(index[0], dims_out)] result = [] for g in guesses: res = cma.fmin(lambda q:self._error_dims(q, dims_x, dims_y, dims_out), g, self.cmaes_sigma, options={'bounds':[self.lower, self.upper], 'verb_log':0, 'verb_disp':False, 'maxfevals':self.maxfevals, 'seed': self.seed}) result.append((res[1], res[0])) return sorted(result)[0][1]
0.010848
def add(self, urls): """ Add the provided urls to this purge request The urls argument can be a single string, a list of strings, a queryset or model instance. Models must implement `get_absolute_url()`. """ if isinstance(urls, (list, tuple)): self.urls.extend(urls) elif isinstance(urls, basestring): self.urls.append(urls) elif isinstance(urls, QuerySet): for obj in urls: self.urls.append(obj.get_absolute_url()) elif hasattr(urls, 'get_absolute_url'): self.urls.append(urls.get_absolute_url()) else: raise TypeError("Don't know how to handle %r" % urls)
0.002797
def git_tag_to_semver(git_tag: str) -> SemVer: """ :git_tag: A string representation of a Git tag. Searches a Git tag's string representation for a SemVer, and returns that as a SemVer object. """ pattern = re.compile(r'[0-9]+\.[0-9]+\.[0-9]+$') match = pattern.search(git_tag) if match: version = match.group(0) else: raise InvalidTagFormatException('Tag passed contains no SemVer.') return SemVer.from_str(version)
0.00211
def _recursive_cleanup(foo): """ Aggressively cleans up things that look empty. """ if isinstance(foo, dict): for (key, val) in list(foo.items()): if isinstance(val, dict): _recursive_cleanup(val) if val == "" or val == [] or val == {}: del foo[key]
0.00304
def processed_shape(self, shape): """ Shape of preprocessed state given original shape. Args: shape: original state shape Returns: processed state shape """ for processor in self.preprocessors: shape = processor.processed_shape(shape=shape) return shape
0.00597
def _deploy_iapp(self, iapp_name, actions, deploying_device): '''Deploy iapp to add trusted device :param iapp_name: str -- name of iapp :param actions: dict -- actions definition of iapp sections :param deploying_device: ManagementRoot object -- device where the iapp will be created ''' tmpl = deploying_device.tm.sys.application.templates.template serv = deploying_device.tm.sys.application.services.service tmpl.create(name=iapp_name, partition=self.partition, actions=actions) pollster(deploying_device.tm.sys.application.templates.template.load)( name=iapp_name, partition=self.partition ) serv.create( name=iapp_name, partition=self.partition, template='/%s/%s' % (self.partition, iapp_name) )
0.002275
def apt_purge(packages, fatal=False): """Purge one or more packages.""" cmd = ['apt-get', '--assume-yes', 'purge'] if isinstance(packages, six.string_types): cmd.append(packages) else: cmd.extend(packages) log("Purging {}".format(packages)) _run_apt_command(cmd, fatal)
0.003236
def linebuffered_stdout(): """ Always line buffer stdout so pipes and redirects are CLI friendly. """ if sys.stdout.line_buffering: return sys.stdout orig = sys.stdout new = type(orig)(orig.buffer, encoding=orig.encoding, errors=orig.errors, line_buffering=True) new.mode = orig.mode return new
0.002882
def polygon_to_geohashes(polygon, precision, inner=True): """ :param polygon: shapely polygon. :param precision: int. Geohashes' precision that form resulting polygon. :param inner: bool, default 'True'. If false, geohashes that are completely outside from the polygon are ignored. :return: set. Set of geohashes that form the polygon. """ inner_geohashes = set() outer_geohashes = set() envelope = polygon.envelope centroid = polygon.centroid testing_geohashes = queue.Queue() testing_geohashes.put(geohash.encode(centroid.y, centroid.x, precision)) while not testing_geohashes.empty(): current_geohash = testing_geohashes.get() if current_geohash not in inner_geohashes and current_geohash not in outer_geohashes: current_polygon = geohash_to_polygon(current_geohash) condition = envelope.contains(current_polygon) if inner else envelope.intersects(current_polygon) if condition: if inner: if polygon.contains(current_polygon): inner_geohashes.add(current_geohash) else: outer_geohashes.add(current_geohash) else: if polygon.intersects(current_polygon): inner_geohashes.add(current_geohash) else: outer_geohashes.add(current_geohash) for neighbor in geohash.neighbors(current_geohash): if neighbor not in inner_geohashes and neighbor not in outer_geohashes: testing_geohashes.put(neighbor) return inner_geohashes
0.002946
def to_triangulation(self): """ Returns the mesh as a matplotlib.tri.Triangulation instance. (2D only) """ from matplotlib.tri import Triangulation conn = self.split("simplices").unstack() coords = self.nodes.coords.copy() node_map = pd.Series(data = np.arange(len(coords)), index = coords.index) conn = node_map.loc[conn.values.flatten()].values.reshape(*conn.shape) return Triangulation(coords.x.values, coords.y.values, conn)
0.012903
def entries(self): """return the actual lists of entries tagged with""" Tags = Query() tag = self.table.get(Tags.name == self.name) posts = tag['post_ids'] for id in posts: post = self.db.posts.get(doc_id=id) if not post: # pragma: no coverage raise ValueError("No post found for doc_id %s" % id) yield Entry(os.path.join(CONFIG['content_root'], post['filename']), id)
0.006494
def lsqfit(self, data=None, pdata=None, prior=None, p0=None, **kargs): """ Compute least-squares fit of models to data. :meth:`MultiFitter.lsqfit` fits all of the models together, in a single fit. It returns the |nonlinear_fit| object from the fit. To see plots of the fit data divided by the fit function with the best-fit parameters use fit.show_plots() This method has optional keyword arguments ``save`` and ``view``; see documentation for :class:`lsqfit.MultiFitter.show_plots` for more information. Plotting requires module :mod:`matplotlib`. To bootstrap a fit, use ``fit.bootstrapped_fit_iter(...)``; see :meth:`lsqfit.nonlinear_fit.bootstrapped_fit_iter` for more information. Args: data: Input data. One of ``data`` or ``pdata`` must be specified but not both. ``pdata`` is obtained from ``data`` by collecting the output from ``m.builddata(data)`` for each model ``m`` and storing it in a dictionary with key ``m.datatag``. pdata: Input data that has been processed by the models using :meth:`MultiFitter.process_data` or :meth:`MultiFitter.process_dataset`. One of ``data`` or ``pdata`` must be specified but not both. prior (dict): Bayesian prior for fit parameters used by the models. p0: Dictionary , indexed by parameter labels, containing initial values for the parameters in the fit. Setting ``p0=None`` implies that initial values are extracted from the prior. Setting ``p0="filename"`` causes the fitter to look in the file with name ``"filename"`` for initial values and to write out best-fit parameter values after the fit (for the next call to ``self.lsqfit()``). kargs: Arguments that (temporarily) override parameters specified when the :class:`MultiFitter` was created. Can also include additional arguments to be passed through to the :mod:`lsqfit` fitter. """ # gather parameters if prior is None: raise ValueError('no prior') kargs, oldargs = self.set(**kargs) # save parameters for bootstrap (in case needed) fitter_args_kargs = ( self.chained_lsqfit, dict(data=data, prior=prior, pdata=pdata, models=self.models), dict(kargs), ) # build prior, data and function fitprior = self.buildprior(prior=prior, mopt=self.mopt) fitdata = self.builddata( mopt=self.mopt, data=data, pdata=pdata, prior=prior ) fitfcn = self.buildfitfcn() # fit self.fit = lsqfit.nonlinear_fit( data=fitdata, prior=fitprior, fcn=fitfcn, p0=p0, **self.fitterargs ) if len(self.flatmodels) > 1: fname = self.fitname( '(' + ','.join([self.fitname(k.datatag) for k in self.flatmodels]) + ')' ) else: fname = self.fitname(self.flatmodels[0].datatag) self.fit.chained_fits = collections.OrderedDict([(fname, self.fit)]) # add methods for printing and plotting def _formatall(*args, **kargs): " Add-on method for fits returned by chained_lsqfit. " ans = '' for x in self.fit.chained_fits: ans += 10 * '=' + ' ' + str(x) + '\n' ans += self.fit.chained_fits[x].format(*args, **kargs) ans += '\n' return ans[:-1] self.fit.formatall = _formatall def _show_plots(save=False, view='ratio'): MultiFitter.show_plots( fitdata=fitdata, fitval=fitfcn(self.fit.p), save=save, view=view, ) self.fit.show_plots = _show_plots # restore default keywords self.set(**oldargs) # add bootstrap method fitter_args_kargs[1]['p0'] = self.fit.pmean def _bstrap_iter( n=None, datalist=None, pdatalist=None, **kargs ): return MultiFitter._bootstrapped_fit_iter( fitter_args_kargs, n=n, datalist=datalist, pdatalist=pdatalist, **kargs ) self.fit.bootstrapped_fit_iter = _bstrap_iter return self.fit
0.001095
def update_delivery_note_item(self, delivery_note_item_id, delivery_note_item_dict): """ Updates a delivery note item :param delivery_note_item_id: delivery note item id :param delivery_note_item_dict: dict :return: dict """ return self._create_put_request( resource=DELIVERY_NOTE_ITEMS, billomat_id=delivery_note_item_id, send_data=delivery_note_item_dict )
0.006536
def set_res(self,res): """ reset the private Pst.res attribute Parameters ---------- res : (varies) something to use as Pst.res attribute """ if isinstance(res,str): res = pst_utils.read_resfile(res) self.__res = res
0.013423
def call_decorator(cls, func): """class function that MUST be specified as decorator to the `__call__` method overriden by sub-classes. """ @wraps(func) def _wrap(self, *args, **kwargs): try: return func(self, *args, **kwargs) except Exception: self.logger.exception('While executing benchmark') if not (self.catch_child_exception or False): raise return _wrap
0.003984
def __query(self, input_string): # type: (text_type)->text_type """* What you can do - It takes the result of Juman++ - This function monitors time which takes for getting the result. """ signal.signal(signal.SIGALRM, self.__notify_handler) signal.alarm(self.timeout_second) self.process_analyzer.sendline(input_string) buffer = "" while True: line_string = self.process_analyzer.readline() # type: text_type if line_string.strip() == input_string: """Skip if process returns the same input string""" continue elif line_string.strip() == self.pattern: buffer += line_string signal.alarm(0) return buffer else: buffer += line_string
0.003505
def save_model(self, file_name='model.sbgn'): """Save the assembled SBGN model in a file. Parameters ---------- file_name : Optional[str] The name of the file to save the SBGN network to. Default: model.sbgn """ model = self.print_model() with open(file_name, 'wb') as fh: fh.write(model)
0.005249
def velocity_from_bundle(self, bundle): """[DEPRECATED] Return velocity, given the `coefficient_bundle()` return value.""" coefficients, days_per_set, T, twot1 = bundle coefficient_count = coefficients.shape[2] # Chebyshev derivative: dT = np.empty_like(T) dT[0] = 0.0 dT[1] = 1.0 dT[2] = twot1 + twot1 for i in range(3, coefficient_count): dT[i] = twot1 * dT[i-1] - dT[i-2] + T[i-1] + T[i-1] dT *= 2.0 dT /= days_per_set return (dT.T * coefficients).sum(axis=2)
0.005226
def _get_hanging_wall_term(self, C, dists, rup): """ Compute and return hanging wall model term, see page 1038. """ if rup.dip == 90.0: return np.zeros_like(dists.rx) else: Fhw = np.zeros_like(dists.rx) Fhw[dists.rx > 0] = 1. # Compute taper t1 T1 = np.ones_like(dists.rx) T1 *= 60./45. if rup.dip <= 30. else (90.-rup.dip)/45.0 # Compute taper t2 (eq 12 at page 1039) - a2hw set to 0.2 as # indicated at page 1041 T2 = np.zeros_like(dists.rx) a2hw = 0.2 if rup.mag > 6.5: T2 += (1. + a2hw * (rup.mag - 6.5)) elif rup.mag > 5.5: T2 += (1. + a2hw * (rup.mag - 6.5) - (1. - a2hw) * (rup.mag - 6.5)**2) else: T2 *= 0. # Compute taper t3 (eq. 13 at page 1039) - r1 and r2 specified at # page 1040 T3 = np.zeros_like(dists.rx) r1 = rup.width * np.cos(np.radians(rup.dip)) r2 = 3. * r1 # idx = dists.rx < r1 T3[idx] = (np.ones_like(dists.rx)[idx] * self.CONSTS['h1'] + self.CONSTS['h2'] * (dists.rx[idx] / r1) + self.CONSTS['h3'] * (dists.rx[idx] / r1)**2) # idx = ((dists.rx >= r1) & (dists.rx <= r2)) T3[idx] = 1. - (dists.rx[idx] - r1) / (r2 - r1) # Compute taper t4 (eq. 14 at page 1040) T4 = np.zeros_like(dists.rx) # if rup.ztor <= 10.: T4 += (1. - rup.ztor**2. / 100.) # Compute T5 (eq 15a at page 1040) - ry1 computed according to # suggestions provided at page 1040 T5 = np.zeros_like(dists.rx) ry1 = dists.rx * np.tan(np.radians(20.)) # idx = (dists.ry0 - ry1) <= 0.0 T5[idx] = 1. # idx = (((dists.ry0 - ry1) > 0.0) & ((dists.ry0 - ry1) < 5.0)) T5[idx] = 1. - (dists.ry0[idx] - ry1[idx]) / 5.0 # Finally, compute the hanging wall term return Fhw*C['a13']*T1*T2*T3*T4*T5
0.000898
def genderStats(self, asFractions = False): """Creates a dict (`{'Male' : maleCount, 'Female' : femaleCount, 'Unknown' : unknownCount}`) with the numbers of male, female and unknown names in the collection. # Parameters _asFractions_ : `optional bool` > Default `False`, if `True` the counts will be divided by the total number of names, giving the fraction of names in each category instead of the raw counts. # Returns `dict[str:int]` > A dict with three keys `'Male'`, `'Female'` and `'Unknown'` mapping to their respective counts """ maleCount = 0 femaleCount = 0 unknownCount = 0 for R in self: m, f, u = R.authGenders(_countsTuple = True) maleCount += m femaleCount += f unknownCount += u if asFractions: tot = maleCount + femaleCount + unknownCount return {'Male' : maleCount / tot, 'Female' : femaleCount / tot, 'Unknown' : unknownCount / tot} return {'Male' : maleCount, 'Female' : femaleCount, 'Unknown' : unknownCount}
0.015179
async def send(self, request: ClientRequest, **kwargs: Any) -> AsyncClientResponse: # type: ignore """Send the request using this HTTP sender. """ requests_kwargs = self._configure_send(request, **kwargs) return await super(AsyncRequestsHTTPSender, self).send(request, **requests_kwargs)
0.0125
def FetchFileContent(self): """Fetch as much as the file's content as possible. This drains the pending_files store by checking which blobs we already have in the store and issuing calls to the client to receive outstanding blobs. """ if not self.state.pending_files: return # Check what blobs we already have in the blob store. blob_hashes = [] for file_tracker in itervalues(self.state.pending_files): for hash_response in file_tracker.get("hash_list", []): blob_hashes.append(rdf_objects.BlobID.FromBytes(hash_response.data)) # This is effectively a BlobStore call. existing_blobs = data_store.BLOBS.CheckBlobsExist(blob_hashes) self.state.blob_hashes_pending = 0 # If we encounter hashes that we already have, we will update # self.state.pending_files right away so we can't use an iterator here. for index, file_tracker in list(self.state.pending_files.items()): for i, hash_response in enumerate(file_tracker.get("hash_list", [])): # Make sure we read the correct pathspec on the client. hash_response.pathspec = file_tracker["stat_entry"].pathspec if existing_blobs[rdf_objects.BlobID.FromBytes(hash_response.data)]: # If we have the data we may call our state directly. self.CallStateInline( messages=[hash_response], next_state="WriteBuffer", request_data=dict(index=index, blob_index=i)) else: # We dont have this blob - ask the client to transmit it. self.CallClient( server_stubs.TransferBuffer, hash_response, next_state="WriteBuffer", request_data=dict(index=index, blob_index=i))
0.004566
def lookup_cell(self, uri): """Looks up a local actor by its location relative to this actor.""" steps = uri.steps if steps[0] == '': found = self.root steps.popleft() else: found = self for step in steps: assert step != '' found = found.get_child(step) if not found: break found = found._cell return found
0.004405
def check_dns_txt(domain, prefix, code): """ Validates a domain by checking that {prefix}={code} is present in the TXT DNS record of the domain to check. Returns true if verification suceeded. """ token = '{}={}'.format(prefix, code) try: for rr in dns.resolver.query(domain, 'TXT'): if token in rr.to_text(): return True except: logger.debug('', exc_info=True) return False
0.006593
def validate_basic_smoother(): """Run Friedman's test from Figure 2b.""" x, y = sort_data(*smoother_friedman82.build_sample_smoother_problem_friedman82()) plt.figure() # plt.plot(x, y, '.', label='Data') for span in smoother.DEFAULT_SPANS: my_smoother = smoother.perform_smooth(x, y, span) friedman_smooth, _resids = run_friedman_smooth(x, y, span) plt.plot(x, my_smoother.smooth_result, '.-', label='pyace span = {0}'.format(span)) plt.plot(x, friedman_smooth, '.-', label='Friedman span = {0}'.format(span)) finish_plot()
0.006908
def enum(self, other, rmax, process=None, bunch=100000, **kwargs): """ cross correlate with other, for all pairs closer than rmax, iterate. >>> def process(r, i, j, **kwargs): >>> ... >>> A.enum(... process, **kwargs): >>> ... where r is the distance, i and j are the original input array index of the data. arbitrary args can be passed to process via kwargs. """ rall = None if process is None: rall = [numpy.empty(0, 'f8')] iall = [numpy.empty(0, 'intp')] jall = [numpy.empty(0, 'intp')] def process(r1, i1, j1, **kwargs): rall[0] = numpy.append(rall[0], r1) iall[0] = numpy.append(iall[0], i1) jall[0] = numpy.append(jall[0], j1) _core.KDNode.enum(self, other, rmax, process, bunch, **kwargs) if rall is not None: return rall[0], iall[0], jall[0] else: return None
0.00286
def outdent(value): """ remove common whitespace prefix from lines :param value: :return: """ try: num = 100 lines = toString(value).splitlines() for l in lines: trim = len(l.lstrip()) if trim > 0: num = min(num, len(l) - len(l.lstrip())) return CR.join([l[num:] for l in lines]) except Exception as e: if not _Log: _late_import() _Log.error("can not outdent value", e)
0.006024
def start(self, hash, name=None, service='facebook'): """ Start a recording for the provided hash :param hash: The hash to start recording with :type hash: str :param name: The name of the recording :type name: str :param service: The service for this API call (facebook, etc) :type service: str :return: dict of REST API output with headers attached :rtype: :class:`~datasift.request.DictResponse` :raises: :class:`~datasift.exceptions.DataSiftApiException`, :class:`requests.exceptions.HTTPError` """ params = {'hash': hash} if name: params['name'] = name return self.request.post(service + '/start', params)
0.002532
def get_next(self): """Return next iteration time related to loop time""" return self.loop_time + (self.croniter.get_next(float) - self.time)
0.012739
def get_value(self): """Returns the value of the constant.""" if self.value is not_computed: self.value = self.value_provider() if self.value is not_computed: return None return self.value
0.007937
def get_guest_property_value(self, property_p): """Reads a value from the machine's guest property store. in property_p of type str The name of the property to read. return value of type str The value of the property. If the property does not exist then this will be empty. raises :class:`VBoxErrorInvalidVmState` Machine session is not open. """ if not isinstance(property_p, basestring): raise TypeError("property_p can only be an instance of type basestring") value = self._call("getGuestPropertyValue", in_p=[property_p]) return value
0.007194
async def stop_async(self): """ Stop the EventHubClient and all its Sender/Receiver clients. """ log.info("%r: Stopping %r clients", self.container_id, len(self.clients)) self.stopped = True await self._close_clients_async()
0.011029
def create_query(self, fields=None): """Convenience method to create a Query with the Index's fields. Args: fields (iterable, optional): The fields to include in the Query, defaults to the Index's `all_fields`. Returns: Query: With the specified fields or all the fields in the Index. """ if fields is None: return Query(self.fields) non_contained_fields = set(fields) - set(self.fields) if non_contained_fields: raise BaseLunrException( "Fields {} are not part of the index", non_contained_fields ) return Query(fields)
0.002941
def obj_box_coord_centroid_to_upleft_butright(coord, to_int=False): """Convert one coordinate [x_center, y_center, w, h] to [x1, y1, x2, y2] in up-left and botton-right format. Parameters ------------ coord : list of 4 int/float One coordinate. to_int : boolean Whether to convert output as integer. Returns ------- list of 4 numbers New bounding box. Examples --------- >>> coord = obj_box_coord_centroid_to_upleft_butright([30, 40, 20, 20]) [20, 30, 40, 50] """ if len(coord) != 4: raise AssertionError("coordinate should be 4 values : [x, y, w, h]") x_center, y_center, w, h = coord x = x_center - w / 2. y = y_center - h / 2. x2 = x + w y2 = y + h if to_int: return [int(x), int(y), int(x2), int(y2)] else: return [x, y, x2, y2]
0.002296
def display_path(path): # type: (Union[str, Text]) -> str """Gives the display value for a given path, making it relative to cwd if possible.""" path = os.path.normcase(os.path.abspath(path)) if sys.version_info[0] == 2: path = path.decode(sys.getfilesystemencoding(), 'replace') path = path.encode(sys.getdefaultencoding(), 'replace') if path.startswith(os.getcwd() + os.path.sep): path = '.' + path[len(os.getcwd()):] return path
0.00207
def process_quote(self, data): """报价推送""" for ix, row in data.iterrows(): symbol = row['code'] tick = self._tick_dict.get(symbol, None) if not tick: tick = TinyQuoteData() tick.symbol = symbol self._tick_dict[symbol] = tick tick.date = row['data_date'].replace('-', '') tick.time = row['data_time'] # with GLOBAL.dt_lock: if tick.date and tick.time: tick.datetime = datetime.strptime(' '.join([tick.date, tick.time]), '%Y%m%d %H:%M:%S') else: return tick.openPrice = row['open_price'] tick.highPrice = row['high_price'] tick.lowPrice = row['low_price'] tick.preClosePrice = row['prev_close_price'] # 1.25 新增摆盘价差,方便计算正确的订单提交价格 要求牛牛最低版本 v3.42.4961.125 if 'price_spread' in row: tick.priceSpread = row['price_spread'] tick.lastPrice = row['last_price'] tick.volume = row['volume'] new_tick = copy(tick) self._notify_new_tick_event(new_tick)
0.002564
def edmcompletion(A, reordered = True, **kwargs): """ Euclidean distance matrix completion. The routine takes an EDM-completable cspmatrix :math:`A` and returns a dense EDM :math:`X` that satisfies .. math:: P( X ) = A :param A: :py:class:`cspmatrix` :param reordered: boolean """ assert isinstance(A, cspmatrix) and A.is_factor is False, "A must be a cspmatrix" tol = kwargs.get('tol',1e-15) X = matrix(A.spmatrix(reordered = True, symmetric = True)) symb = A.symb n = symb.n snptr = symb.snptr sncolptr = symb.sncolptr snrowidx = symb.snrowidx # visit supernodes in reverse (descending) order for k in range(symb.Nsn-1,-1,-1): nn = snptr[k+1]-snptr[k] beta = snrowidx[sncolptr[k]:sncolptr[k+1]] nj = len(beta) if nj-nn == 0: continue alpha = beta[nn:] nu = beta[:nn] eta = matrix([matrix(range(beta[kk]+1,beta[kk+1])) for kk in range(nj-1)] + [matrix(range(beta[-1]+1,n))]) ne = len(eta) # Compute Yaa, Yan, Yea, Ynn, Yee Yaa = -0.5*X[alpha,alpha] - 0.5*X[alpha[0],alpha[0]] blas.syr2(X[alpha,alpha[0]], matrix(1.0,(nj-nn,1)), Yaa, alpha = 0.5) Ynn = -0.5*X[nu,nu] - 0.5*X[alpha[0],alpha[0]] blas.syr2(X[nu,alpha[0]], matrix(1.0,(nn,1)), Ynn, alpha = 0.5) Yee = -0.5*X[eta,eta] - 0.5*X[alpha[0],alpha[0]] blas.syr2(X[eta,alpha[0]], matrix(1.0,(ne,1)), Yee, alpha = 0.5) Yan = -0.5*X[alpha,nu] - 0.5*X[alpha[0],alpha[0]] Yan += 0.5*matrix(1.0,(nj-nn,1))*X[alpha[0],nu] Yan += 0.5*X[alpha,alpha[0]]*matrix(1.0,(1,nn)) Yea = -0.5*X[eta,alpha] - 0.5*X[alpha[0],alpha[0]] Yea += 0.5*matrix(1.0,(ne,1))*X[alpha[0],alpha] Yea += 0.5*X[eta,alpha[0]]*matrix(1.0,(1,nj-nn)) # EVD: Yaa = Z*diag(w)*Z.T w = matrix(0.0,(Yaa.size[0],1)) Z = matrix(0.0,Yaa.size) lapack.syevr(Yaa, w, jobz='V', range='A', uplo='L', Z=Z) # Pseudo-inverse: Yp = pinv(Yaa) lambda_max = max(w) Yp = Z*spmatrix([1.0/wi if wi > lambda_max*tol else 0.0 for wi in w],range(len(w)),range(len(w)))*Z.T # Compute update tmp = -2.0*Yea*Yp*Yan + matrix(1.0,(ne,1))*Ynn[::nn+1].T + Yee[::ne+1]*matrix(1.0,(1,nn)) X[eta,nu] = tmp X[nu,eta] = tmp.T if reordered: return X else: return X[symb.ip,symb.ip]
0.02947
def toStringArray(name, a, width = 0): """ Returns an array (any sequence of floats, really) as a string. """ string = name + ": " cnt = 0 for i in a: string += "%4.2f " % i if width > 0 and (cnt + 1) % width == 0: string += '\n' cnt += 1 return string
0.012579
def mark_regex(regex, text, split_locations): """ Regex that adds a 'SHOULD_SPLIT' marker at the end location of each matching group of the given regex. Arguments --------- regex : re.Expression text : str, same length as split_locations split_locations : list<int>, split decisions. """ for match in regex.finditer(text): end_match = match.end() if end_match < len(split_locations): split_locations[end_match] = SHOULD_SPLIT
0.001976
def fetch(self, log_group_name, start=None, end=None, filter_pattern=None): """ Fetch logs from all streams under the given CloudWatch Log Group and yields in the output. Optionally, caller can filter the logs using a pattern or a start/end time. Parameters ---------- log_group_name : string Name of CloudWatch Logs Group to query. start : datetime.datetime Optional start time for logs. end : datetime.datetime Optional end time for logs. filter_pattern : str Expression to filter the logs by. This is passed directly to CloudWatch, so any expression supported by CloudWatch Logs API is supported here. Yields ------ samcli.lib.logs.event.LogEvent Object containing the information from each log event returned by CloudWatch Logs """ kwargs = { "logGroupName": log_group_name, "interleaved": True } if start: kwargs["startTime"] = to_timestamp(start) if end: kwargs["endTime"] = to_timestamp(end) if filter_pattern: kwargs["filterPattern"] = filter_pattern while True: LOG.debug("Fetching logs from CloudWatch with parameters %s", kwargs) result = self.cw_client.filter_log_events(**kwargs) # Several events will be returned. Yield one at a time for event in result.get('events', []): yield LogEvent(log_group_name, event) # Keep iterating until there are no more logs left to query. next_token = result.get("nextToken", None) kwargs["nextToken"] = next_token if not next_token: break
0.003313
def rargmax(x, eps=1e-8): """Argmax with random tie-breaking Args: x: a 1-dim numpy array Returns: the argmax index """ idxs = np.where(abs(x - np.max(x, axis=0)) < eps)[0] return np.random.choice(idxs)
0.004115
def container_describe(object_id, input_params={}, always_retry=True, **kwargs): """ Invokes the /container-xxxx/describe API method. For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Containers-for-Execution#API-method%3A-%2Fcontainer-xxxx%2Fdescribe """ return DXHTTPRequest('/%s/describe' % object_id, input_params, always_retry=always_retry, **kwargs)
0.010076
def get_tournaments(self, only_active=True): """Get all tournaments Args: only_active (bool): Flag to indicate of only active tournaments should be returned or all of them. Defaults to True. Returns: list of dicts: list of tournaments Each tournaments' dict contains the following items: * id (`str`) * name (`str`) * tournament (`int`) * active (`bool`) Example: >>> NumerAPI().get_tournaments() [ { 'id': '2ecf30f4-4b4f-42e9-8e72-cc5bd61c2733', 'name': 'alpha', 'tournament': 1, 'active': True}, { 'id': '6ff44cca-263d-40bd-b029-a1ab8f42798f', 'name': 'bravo', 'tournament': 2, 'active': True}, { 'id': 'ebf0d62b-0f60-4550-bcec-c737b168c65d', 'name': 'charlie', 'tournament': 3 'active': False}, { 'id': '5fac6ece-2726-4b66-9790-95866b3a77fc', 'name': 'delta', 'tournament': 4, 'active': True}] """ query = """ query { tournaments { id name tournament active } } """ data = self.raw_query(query)['data']['tournaments'] if only_active: data = [d for d in data if d['active']] return data
0.001237
def delete_lower(script, layer_num=None): """ Delete all layers below the specified one. Useful for MeshLab ver 2016.12, whcih will only output layer 0. """ if layer_num is None: layer_num = script.current_layer() if layer_num != 0: change(script, 0) for i in range(layer_num): delete(script, 0) return None
0.002778
def sils_cut(T,f,c,d,h): """solve_sils -- solve the lot sizing problem with cutting planes - start with a relaxed model - add cuts until there are no fractional setup variables Parameters: - T: number of periods - P: set of products - f[t]: set-up costs (on period t) - c[t]: variable costs - d[t]: demand values - h[t]: holding costs Returns the final model solved, with all necessary cuts added. """ Ts = range(1,T+1) model = sils(T,f,c,d,h) y,x,I = model.data # relax integer variables for t in Ts: y[t].vtype = "C" # compute D[i,j] = sum_{t=i}^j d[t] D = {} for t in Ts: s = 0 for j in range(t,T+1): s += d[j] D[t,j] = s EPS = 1.e-6 cuts = True while cuts: model.optimize() cuts = False for ell in Ts: lhs = 0 S,L = [],[] for t in range(1,ell+1): yt = model.getVal(y[t]) xt = model.getVal(x[t]) if D[t,ell]*yt < xt: S.append(t) lhs += D[t,ell]*yt else: L.append(t) lhs += xt if lhs < D[1,ell]: # add cutting plane constraint model.addCons(quicksum([x[t] for t in L]) +\ quicksum(D[t,ell] * y[t] for t in S) >= D[1,ell]) cuts = True model.data = y,x,I return model
0.017744
def _grow_trees(self): """ Adds new trees to the forest according to the specified growth method. """ if self.grow_method == GROW_AUTO_INCREMENTAL: self.tree_kwargs['auto_grow'] = True while len(self.trees) < self.size: self.trees.append(Tree(data=self.data, **self.tree_kwargs))
0.008523
def get_total_mass(self): """Returns the total mass in g/mol. Args: None Returns: float: """ try: mass = self.loc[:, 'mass'].sum() except KeyError: mass_molecule = self.add_data('mass') mass = mass_molecule.loc[:, 'mass'].sum() return mass
0.005602
def genfile(*paths): ''' Create or open ( for read/write ) a file path join. Args: *paths: A list of paths to join together to make the file. Notes: If the file already exists, the fd returned is opened in ``r+b`` mode. Otherwise, the fd is opened in ``w+b`` mode. Returns: io.BufferedRandom: A file-object which can be read/written too. ''' path = genpath(*paths) gendir(os.path.dirname(path)) if not os.path.isfile(path): return io.open(path, 'w+b') return io.open(path, 'r+b')
0.001779
def dict_as_tuple_list(d, as_list=False): """ Format a dict to a list of tuples :param d: the dictionary :param as_list: return a list of lists rather than a list of tuples :return: formatted dictionary list """ dd = list() for k, v in d.items(): dd.append([k, v] if as_list else (k, v)) return dd
0.002933
def ColorfullyWrite(log: str, consoleColor: int = -1, writeToFile: bool = True, printToStdout: bool = True, logFile: str = None) -> None: """ log: str. consoleColor: int, a value in class `ConsoleColor`, such as `ConsoleColor.DarkGreen`. writeToFile: bool. printToStdout: bool. logFile: str, log file path. ColorfullyWrite('Hello <Color=Green>Green</Color> !!!'), color name must be in Logger.ColorNames. """ text = [] start = 0 while True: index1 = log.find('<Color=', start) if index1 >= 0: if index1 > start: text.append((log[start:index1], consoleColor)) index2 = log.find('>', index1) colorName = log[index1+7:index2] index3 = log.find('</Color>', index2 + 1) text.append((log[index2 + 1:index3], Logger.ColorNames[colorName])) start = index3 + 8 else: if start < len(log): text.append((log[start:], consoleColor)) break for t, c in text: Logger.Write(t, c, writeToFile, printToStdout, logFile)
0.004955
def execute(self, action): """Execute the indicated action within the environment and return the resulting immediate reward dictated by the reward program. Usage: immediate_reward = scenario.execute(selected_action) Arguments: action: The action to be executed within the current situation. Return: A float, the reward received for the action that was executed, or None if no reward is offered. """ self.logger.debug('Executing action: %s', action) reward = self.wrapped.execute(action) if reward: self.total_reward += reward self.steps += 1 self.logger.debug('Reward received on this step: %.5f', reward or 0) self.logger.debug('Average reward per step: %.5f', self.total_reward / self.steps) return reward
0.002134
def Client(api_version, *args, **kwargs): """Return an neutron client. @param api_version: only 2.0 is supported now """ neutron_client = utils.get_client_class( API_NAME, api_version, API_VERSIONS, ) return neutron_client(*args, **kwargs)
0.003472
def parse_250_row(row: list) -> BasicMeterData: """ Parse basic meter data record (250) """ return BasicMeterData(row[1], row[2], row[3], row[4], row[5], row[6], row[7], float(row[8]), parse_datetime(row[9]), row[10], row[11], row[12], float(row[13]), parse_datetime( row[14]), row[15], row[16], row[17], float(row[18]), row[19], row[20], parse_datetime(row[21]), parse_datetime(row[22]))
0.010453
def all_enclosing_scopes(scope, allow_global=True): """Utility function to return all scopes up to the global scope enclosing a given scope.""" _validate_full_scope(scope) # TODO: validate scopes here and/or in `enclosing_scope()` instead of assuming correctness. def scope_within_range(tentative_scope): if tentative_scope is None: return False if not allow_global and tentative_scope == GLOBAL_SCOPE: return False return True while scope_within_range(scope): yield scope scope = (None if scope == GLOBAL_SCOPE else enclosing_scope(scope))
0.015332
def query_by_slug(slug): ''' 查询全部章节 ''' cat_rec = MCategory.get_by_slug(slug) if cat_rec: cat_id = cat_rec.uid else: return None if cat_id.endswith('00'): cat_con = TabPost2Tag.par_id == cat_id else: cat_con = TabPost2Tag.tag_id == cat_id recs = TabPost.select().join( TabPost2Tag, on=(TabPost.uid == TabPost2Tag.post_id) ).where( cat_con ).order_by( TabPost.time_update.desc() ) return recs
0.003367
def averageOnTime(vectors, numSamples=None): """ Returns the average on-time, averaged over all on-time runs. Parameters: ----------------------------------------------- vectors: the vectors for which the onTime is calculated. Row 0 contains the outputs from time step 0, row 1 from time step 1, etc. numSamples: the number of elements for which on-time is calculated. If not specified, then all elements are looked at. Returns: (scalar average on-time of all outputs, list containing frequency counts of each encountered on-time) """ # Special case given a 1 dimensional vector: it represents a single column if vectors.ndim == 1: vectors.shape = (-1,1) numTimeSteps = len(vectors) numElements = len(vectors[0]) # How many samples will we look at? if numSamples is None: numSamples = numElements countOn = range(numElements) else: countOn = numpy.random.randint(0, numElements, numSamples) # Compute the on-times and accumulate the frequency counts of each on-time # encountered sumOfLengths = 0.0 onTimeFreqCounts = None n = 0 for i in countOn: (onTime, segments, durations) = _listOfOnTimesInVec(vectors[:,i]) if onTime != 0.0: sumOfLengths += onTime n += segments onTimeFreqCounts = _accumulateFrequencyCounts(durations, onTimeFreqCounts) # Return the average on time of each element that was on. if n > 0: return (sumOfLengths/n, onTimeFreqCounts) else: return (0.0, onTimeFreqCounts)
0.016917
def complete_shells(line, text, predicate=lambda i: True): """Return the shell names to include in the completion""" res = [i.display_name + ' ' for i in dispatchers.all_instances() if i.display_name.startswith(text) and predicate(i) and ' ' + i.display_name + ' ' not in line] return res
0.003003
def _generateDDL(self): """Generate DDL statements for SQLLite""" sql = [] # Next convert each set of columns into a table structure for dataset_name in sorted(self.datasets.keys()): # SQL to drop the table if it already exists sql.append('''drop table if exists %s;''' % dataset_name) # Generate the SQL for the cols cols = self.datasets[dataset_name] col_defs = [] for col in cols: sql_datatype = self.getSQLDataType(col["vartype"]) col_defs.append("%s %s" % (col["varname"], sql_datatype,)) stmt = 'CREATE TABLE %s (%s)' % (dataset_name, ','.join(col_defs)) sql.append(stmt) return sql
0.002632
def get(self, cid, fields=[], **kwargs): '''taobao.itemprops.get 获取标准商品类目属性 Q:能否通过图形化界面获取特定类目下面的属性及属性值? A:请点击属性工具,通过图形化界面直接获取上述数据 Q:关键属性,非关键属性,销售属性有什么区别? A:产品的关键属性是必填的,关键属性+类目id确定一个产品,非关键属性,是分类上除了关键属性和销售属性以外的属性。销售属性是只有一件实物的商品才能确定的一个属性,如:N73 红色,黑色。没有实物不能确定。 Q:销售属性与SKU之间有何关联? A:销售属性是组成SKU的特殊属性,它会影响买家的购买和卖家的库存管理,如服装的"颜色"、"套餐"和"尺码"。 SKU 即我们常说的销售属性,最小购买单位或最小库存单位。它是销售属性的一个组合。比如"红色的诺基亚N95"就是一个SKU。''' request = TOPRequest('taobao.itemprops.get') if not fields: itemProp = ItemProp() fields = itemProp.fields request['fields'] = fields request['cid'] = cid for k, v in kwargs.iteritems(): if k not in ('pid', 'parent_pid', 'is_key_prop', 'is_sale_prop', 'is_color_prop', 'is_enum_prop', 'is_input_prop', 'is_item_prop', 'child_path') and v==None: continue request[k] = v self.create(self.execute(request)) return self.item_props
0.013039
def parent(self): """ Select the direct child(ren) from the UI element(s) given by the query expression, see ``QueryCondition`` for more details about the selectors. Warnings: Experimental method, may not be available for all drivers. Returns: :py:class:`UIObjectProxy <poco.proxy.UIObjectProxy>`: a new UI proxy object representing the direct parent of the first UI element. """ sub_query = build_query(None) # as placeholder query = ('^', (self.query, sub_query)) obj = UIObjectProxy(self.poco) obj.query = query return obj
0.006126
def format_base64(data): """ <Purpose> Return the base64 encoding of 'data' with whitespace and '=' signs omitted. <Arguments> data: Binary or buffer of data to convert. <Exceptions> securesystemslib.exceptions.FormatError, if the base64 encoding fails or the argument is invalid. <Side Effects> None. <Returns> A base64-encoded string. """ try: return binascii.b2a_base64(data).decode('utf-8').rstrip('=\n ') except (TypeError, binascii.Error) as e: raise securesystemslib.exceptions.FormatError('Invalid base64' ' encoding: ' + str(e))
0.009917
def open(self, method, url): ''' Opens the request. method: the request VERB 'GET', 'POST', etc. url: the url to connect ''' flag = VARIANT.create_bool_false() _method = BSTR(method) _url = BSTR(url) _WinHttpRequest._Open(self, _method, _url, flag)
0.005797
def file_download_using_requests(self,url): '''It will download file specified by url using requests module''' file_name=url.split('/')[-1] if os.path.exists(os.path.join(os.getcwd(),file_name)): print 'File already exists' return #print 'Downloading file %s '%file_name #print 'Downloading from %s'%url try: r=requests.get(url,stream=True,timeout=200) except requests.exceptions.SSLError: try: response=requests.get(url,stream=True,verify=False,timeout=200) except requests.exceptions.RequestException as e: print e quit() except requests.exceptions.RequestException as e: print e quit() chunk_size = 1024 total_size = int(r.headers['Content-Length']) total_chunks = total_size/chunk_size file_iterable = r.iter_content(chunk_size = chunk_size) tqdm_iter = tqdm(iterable = file_iterable,total = total_chunks,unit = 'KB', leave = False ) with open(file_name,'wb') as f: for data in tqdm_iter: f.write(data) #total_size=float(r.headers['Content-Length'])/(1024*1024) '''print 'Total size of file to be downloaded %.2f MB '%total_size total_downloaded_size=0.0 with open(file_name,'wb') as f: for chunk in r.iter_content(chunk_size=1*1024*1024): if chunk: size_of_chunk=float(len(chunk))/(1024*1024) total_downloaded_size+=size_of_chunk print '{0:.0%} Downloaded'.format(total_downloaded_size/total_size) f.write(chunk)''' print 'Downloaded file %s '%file_name
0.053691
def get_proficiencies(self): """Gets all ``Proficiencies``. return: (osid.learning.ProficiencyList) - a list of ``Proficiencies`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure *compliance: mandatory -- This method must be implemented.* """ # Implemented from template for # osid.resource.ResourceLookupSession.get_resources # NOTE: This implementation currently ignores plenary view collection = JSONClientValidated('learning', collection='Proficiency', runtime=self._runtime) result = collection.find(self._view_filter()).sort('_id', DESCENDING) return objects.ProficiencyList(result, runtime=self._runtime, proxy=self._proxy)
0.003413
def serialize_endnote(ctx, document, el, root): "Serializes endnotes." footnote_num = el.rid if el.rid not in ctx.endnote_list: ctx.endnote_id += 1 ctx.endnote_list[el.rid] = ctx.endnote_id footnote_num = ctx.endnote_list[el.rid] note = etree.SubElement(root, 'sup') link = etree.SubElement(note, 'a') link.set('href', '#') link.text = u'{}'.format(footnote_num) fire_hooks(ctx, document, el, note, ctx.get_hook('endnote')) return root
0.002012
def chunk(self, seek=None, lenient=False): """ Read the next PNG chunk from the input file returns a (*chunk_type*, *data*) tuple. *chunk_type* is the chunk's type as a byte string (all PNG chunk types are 4 bytes long). *data* is the chunk's data content, as a byte string. If the optional `seek` argument is specified then it will keep reading chunks until it either runs out of file or finds the chunk_type specified by the argument. Note that in general the order of chunks in PNGs is unspecified, so using `seek` can cause you to miss chunks. If the optional `lenient` argument evaluates to `True`, checksum failures will raise warnings rather than exceptions. """ self.validate_signature() while True: # http://www.w3.org/TR/PNG/#5Chunk-layout if not self.atchunk: self.atchunk = self.chunklentype() length, chunk_type = self.atchunk self.atchunk = None data = self.file.read(length) if len(data) != length: raise ChunkError('Chunk %s too short for required %i octets.' % (chunk_type, length)) checksum = self.file.read(4) if len(checksum) != 4: raise ChunkError('Chunk %s too short for checksum.', chunk_type) if seek and chunk_type != seek: continue verify = zlib.crc32(strtobytes(chunk_type)) verify = zlib.crc32(data, verify) # Whether the output from zlib.crc32 is signed or not varies # according to hideous implementation details, see # http://bugs.python.org/issue1202 . # We coerce it to be positive here (in a way which works on # Python 2.3 and older). verify &= 2**32 - 1 verify = struct.pack('!I', verify) if checksum != verify: (a, ) = struct.unpack('!I', checksum) (b, ) = struct.unpack('!I', verify) message = "Checksum error in %s chunk: 0x%08X != 0x%08X." %\ (chunk_type, a, b) if lenient: warnings.warn(message, RuntimeWarning) else: raise ChunkError(message) return chunk_type, data
0.001635
def get_assigned_object(self, object_type=None): """Return the current assigned object UUID. :param object_type: If it's specified, returns only if the PID object_type is the same, otherwise returns None. (default: None). :returns: The object UUID. """ if object_type is not None: if self.object_type == object_type: return self.object_uuid else: return None return self.object_uuid
0.004008
def is_left(point0, point1, point2): """ Tests if a point is Left|On|Right of an infinite line. Ported from the C++ version: on http://geomalgorithms.com/a03-_inclusion.html .. note:: This implementation only works in 2-dimensional space. :param point0: Point P0 :param point1: Point P1 :param point2: Point P2 :return: >0 for P2 left of the line through P0 and P1 =0 for P2 on the line <0 for P2 right of the line """ return ((point1[0] - point0[0]) * (point2[1] - point0[1])) - ((point2[0] - point0[0]) * (point1[1] - point0[1]))
0.005051
def connect(self, chassis_list): """Establish connection to one or more chassis. Arguments: chassis_list -- List of chassis (IP addresses or DNS names) Return: List of chassis addresses. """ self._check_session() if not isinstance(chassis_list, (list, tuple, set, dict, frozenset)): chassis_list = (chassis_list,) if len(chassis_list) == 1: status, data = self._rest.put_request( 'connections', chassis_list[0]) data = [data] else: params = {chassis: True for chassis in chassis_list} params['action'] = 'connect' status, data = self._rest.post_request('connections', None, params) return data
0.002587
def plotDutyCycles(dutyCycle, filePath): """ Create plot showing histogram of duty cycles :param dutyCycle: (torch tensor) the duty cycle of each unit :param filePath: (str) Full filename of image file """ _,entropy = binaryEntropy(dutyCycle) bins = np.linspace(0.0, 0.3, 200) plt.hist(dutyCycle, bins, alpha=0.5, label='All cols') plt.title("Histogram of duty cycles, entropy=" + str(float(entropy))) plt.xlabel("Duty cycle") plt.ylabel("Number of units") plt.savefig(filePath) plt.close()
0.021277
def delta(self, other): ''' Return the error between this and another bearing. This will be an angle in degrees, positive or negative depending on the direction of the error. self other \ / \ / \__/ \/ <- angle will be +ve other self \ / \ / \__/ \/ <- angle will be -ve :param other: bearing to compare to :type other: Bearing :returns: error angle :rtype: float ''' difference = float(other) - float(self) while difference < -180: difference += 360 while difference > 180: difference -= 360 return difference
0.014528
def setting(self): """ Load setting (Amps, Watts, or Ohms depending on program mode) """ prog_type = self.__program.program_type return self._setting / self.SETTING_DIVIDES[prog_type]
0.008969
def __validate1 (property): """ Exit with error if property is not valid. """ assert isinstance(property, Property) msg = None if not property.feature.free: feature.validate_value_string (property.feature, property.value)
0.012
def _startNextChunk(self) -> None: """ Close current and start next chunk """ if self.currentChunk is None: self._useLatestChunk() else: self._useChunk(self.currentChunkIndex + self.chunkSize)
0.007813
def main(argString=None): """The main function. :param argString: the options. :type argString: list These are the steps performed by this module: 1. Prints the options of the module. 2. Computes the number of markers in the input file (:py:func:`computeNumberOfMarkers`). 3. If there are no markers, the module stops. 4. Computes the Bonferroni therhold (:math:`0.05 / \\textrm{nbMarkers}`). 5. Runs Plink to find failed markers with the Bonferroni threshold. 6. Runs Plink to find failed markers with the default threshold. 7. Compares the ``bim`` files for the Bonferroni threshold. 8. Compares the ``bim`` files for the default threshold. 9. Computes the "in between" marker list, which is the markers from the default threshold and the Bonferroni threshold. """ # Getting and checking the options args = parseArgs(argString) checkArgs(args) logger.info("Options used:") for key, value in vars(args).iteritems(): logger.info(" --{} {}".format(key.replace("_", "-"), value)) # Compute the number of markers logger.info("Counting the number of markers") nbMarkers = computeNumberOfMarkers(args.bfile + ".bim") if nbMarkers <= 0: logger.info(" - There are no markers: STOPPING NOW!") else: logger.info(" - There are {} markers".format(nbMarkers)) customThreshold = str(0.05 / nbMarkers) # Run the plink command logger.info("Computing the HW equilibrium for {}".format( customThreshold, )) computeHWE(args.bfile, customThreshold, args.out + ".threshold_" + customThreshold) logger.info("Computing the HW equilibrium for {}".format(args.hwe)) computeHWE(args.bfile, args.hwe, args.out + ".threshold_" + args.hwe) # Compare the BIM files logger.info("Creating the flagged SNP list for {}".format( customThreshold, )) custom_snps = compareBIMfiles( args.bfile + ".bim", args.out + ".threshold_" + customThreshold + ".bim", args.out + ".snp_flag_threshold_" + customThreshold, ) logger.info("Creating the flagged SNP list for {}".format(args.hwe)) hwe_snps = compareBIMfiles( args.bfile + ".bim", args.out + ".threshold_" + args.hwe + ".bim", args.out + ".snp_flag_threshold_" + args.hwe, ) logger.info("Creating the in between SNP list ([{}, {}[)".format( args.hwe, customThreshold, )) file_name = args.out + ".snp_flag_threshold_between_{}-{}".format( args.hwe, customThreshold, ) try: with open(file_name, 'w') as output_file: differences = hwe_snps - custom_snps if len(differences) > 0: print >>output_file, "\n".join(differences) except IOError: msg = "{}: can't write file".format(file_name) raise ProgramError(msg)
0.000324
def malloc(self, sim_size): """ A somewhat faithful implementation of libc `malloc`. :param sim_size: the amount of memory (in bytes) to be allocated :returns: the address of the allocation, or a NULL pointer if the allocation failed """ raise NotImplementedError("%s not implemented for %s" % (self.malloc.__func__.__name__, self.__class__.__name__))
0.010593
def get_bundles(): """ Used to cache the bundle definitions rather than loading from config every time they're used """ global _cached_bundles if not _cached_bundles: _cached_bundles = BundleManager() for bundle_conf in bundles_settings.BUNDLES: _cached_bundles[bundle_conf[0]] = Bundle(bundle_conf) return _cached_bundles
0.005305
def command_loop(self, run_script_event): """ This is the debugger command loop that processes (protocol) client requests. """ while True: obj = remote_client.receive(self) command = obj["command"] # TODO: ensure we always have a command if receive returns args = obj.get('args', {}) if command == 'getBreakpoints': breakpoints_list = IKBreakpoint.get_breakpoints_list() remote_client.reply(obj, breakpoints_list) _logger.b_debug("getBreakpoints(%s) => %s", args, breakpoints_list) elif command == "setBreakpoint": # Set a new breakpoint. If the lineno line doesn't exist for the # filename passed as argument, return an error message. # The filename should be in canonical form, as described in the # canonic() method. file_name = args['file_name'] line_number = args['line_number'] condition = args.get('condition', None) enabled = args.get('enabled', True) _logger.b_debug("setBreakpoint(file_name=%s, line_number=%s," " condition=%s, enabled=%s) with CWD=%s", file_name, line_number, condition, enabled, os.getcwd()) error_messages = [] result = {} c_file_name = self.normalize_path_in(file_name) if not c_file_name: err = "Failed to find file '%s'" % file_name _logger.g_error("setBreakpoint error: %s", err) msg = "IKP3db error: Failed to set a breakpoint at %s:%s "\ "(%s)." % (file_name, line_number, err) error_messages = [msg] command_exec_status = 'error' else: err, bp_number = self.set_breakpoint(c_file_name, line_number, condition=condition, enabled=enabled) if err: _logger.g_error("setBreakpoint error: %s", err) msg = "IKP3db error: Failed to set a breakpoint at %s:%s "\ "(%s)." % (file_name, line_number, err,) error_messages = [msg] command_exec_status = 'error' else: result = {'breakpoint_number': bp_number} command_exec_status = 'ok' remote_client.reply(obj, result, command_exec_status=command_exec_status, error_messages=error_messages) elif command == "changeBreakpointState": # Allows to: # - activate or deactivate breakpoint # - set or remove condition _logger.b_debug("changeBreakpointState(%s)", args) bp_number = args.get('breakpoint_number', None) if bp_number is None: result = {} msg = "changeBreakpointState() error: missing required " \ "breakpointNumber parameter." _logger.g_error(" "+msg) error_messages = [msg] command_exec_status = 'error' else: err = self.change_breakpoint_state(bp_number, args.get('enabled', False), condition=args.get('condition', '')) result = {} error_messages = [] if err: msg = "changeBreakpointState() error: \"%s\"" % err _logger.g_error(" "+msg) error_messages = [msg] command_exec_status = 'error' else: command_exec_status = 'ok' remote_client.reply(obj, result, command_exec_status=command_exec_status, error_messages=error_messages) _logger.b_debug(" command_exec_status => %s", command_exec_status) elif command == "clearBreakpoint": _logger.b_debug("clearBreakpoint(%s)", args) bp_number = args.get('breakpoint_number', None) if bp_number is None: result = {} msg = "IKP3db error: Failed to delete breakpoint (Missing "\ "required breakpointNumber parameter)." error_messages = [msg] command_exec_status = 'error' else: err = self.clear_breakpoint(args['breakpoint_number']) result = {} error_messages = [] if err: msg = "IKP3db error: Failed to delete breakpoint (%s)." % err _logger.g_error(msg) error_messages = [msg] command_exec_status = 'error' else: command_exec_status = 'ok' remote_client.reply(obj, result, command_exec_status=command_exec_status, error_messages=error_messages) elif command == 'runScript': #TODO: handle a 'stopAtEntry' arg _logger.x_debug("runScript(%s)", args) remote_client.reply(obj, {'executionStatus': 'running'}) run_script_event.set() elif command == 'suspend': _logger.x_debug("suspend(%s)", args) # We return a running status which is True at that point. Next # programBreak will change status to 'stopped' remote_client.reply(obj, {'executionStatus': 'running'}) self.setup_suspend() elif command == 'resume': _logger.x_debug("resume(%s)", args) remote_client.reply(obj, {'executionStatus': 'running'}) self._command_q.put({'cmd':'resume'}) elif command == 'stepOver': # <=> Pdb n(ext) _logger.x_debug("stepOver(%s)", args) remote_client.reply(obj, {'executionStatus': 'running'}) self._command_q.put({'cmd':'stepOver'}) elif command == 'stepInto': # <=> Pdb s(tep) _logger.x_debug("stepInto(%s)", args) remote_client.reply(obj, {'executionStatus': 'running'}) self._command_q.put({'cmd':'stepInto'}) elif command == 'stepOut': # <=> Pdb r(eturn) _logger.x_debug("stepOut(%s)", args) remote_client.reply(obj, {'executionStatus': 'running'}) self._command_q.put({'cmd':'stepOut'}) elif command == 'evaluate': _logger.e_debug("evaluate(%s)", args) if self.tracing_enabled and self.status == 'stopped': self._command_q.put({ 'cmd':'evaluate', 'obj': obj, 'frame': args['frame'], 'expression': args['expression'], 'global': args['global'], 'disableBreak': args['disableBreak'] }) # reply will be done in _tracer() where result is available else: remote_client.reply(obj, {'value': None, 'type': None}) elif command == 'getProperties': _logger.e_debug("getProperties(%s,%s)", args, obj) if self.tracing_enabled and self.status == 'stopped': if args.get('id'): self._command_q.put({ 'cmd':'getProperties', 'obj': obj, 'id': args['id'] }) # reply will be done in _tracer() when result is available else: result={} command_exec_status = 'error' error_messages = ["IKP3db received getProperties command sent without target variable 'id'."] remote_client.reply(obj, result, command_exec_status=command_exec_status, error_messages=error_messages) else: remote_client.reply(obj, {'value': None, 'type': None}) elif command == 'setVariable': _logger.e_debug("setVariable(%s)", args) if self.tracing_enabled and self.status == 'stopped': self._command_q.put({ 'cmd':'setVariable', 'obj': obj, 'frame': args['frame'], 'name': args['name'], # TODO: Rework plugin to send var's id 'value': args['value'] }) # reply will be done in _tracer() when result is available else: remote_client.reply(obj, {'value': None, 'type': None}) elif command == 'reconnect': _logger.n_debug("reconnect(%s)", args) remote_client.reply(obj, {'executionStatus': self.status}) elif command == 'getThreads': _logger.x_debug("getThreads(%s)", args) threads_list = self.get_threads() remote_client.reply(obj, threads_list) elif command == 'setDebuggedThread': _logger.x_debug("setDebuggedThread(%s)", args) ret_val = self.set_debugged_thread(args['ident']) if ret_val['error']: remote_client.reply(obj, {}, # result command_exec_status='error', error_messages=[ret_val['error']]) else: remote_client.reply(obj, ret_val['result']) elif command == '_InternalQuit': # '_InternalQuit' is an IKP3db internal message, generated by # IKPdbConnectionHandler when a socket.error occured. # Usually this occurs when socket has been destroyed as # debugged program sys.exit() # So we leave the command loop to stop the debugger thread # in order to allow debugged program to shutdown correctly. # This message must NEVER be send by remote client. _logger.e_debug("_InternalQuit(%s)", args) self._command_q.put({'cmd':'_InternalQuit'}) return else: # unrecognized command ; just log and ignored _logger.g_critical("Unsupported command '%s' ignored.", command) if IKPdbLogger.enabled: _logger.b_debug("Current breakpoints list [any_active_breakpoint=%s]:", IKBreakpoint.any_active_breakpoint) _logger.b_debug(" IKBreakpoint.breakpoints_by_file_and_line:") if not IKBreakpoint.breakpoints_by_file_and_line: _logger.b_debug(" <empty>") for file_line, bp in list(IKBreakpoint.breakpoints_by_file_and_line.items()): _logger.b_debug(" %s => #%s, enabled=%s, condition=%s, %s", file_line, bp.number, bp.enabled, repr(bp.condition), bp) _logger.b_debug(" IKBreakpoint.breakpoints_files = %s", IKBreakpoint.breakpoints_files) _logger.b_debug(" IKBreakpoint.breakpoints_by_number = %s", IKBreakpoint.breakpoints_by_number)
0.004672
def ping(): ''' Returns true if the device is reachable, else false. ''' try: session, cookies, csrf_token = logon() logout(session, cookies, csrf_token) except salt.exceptions.CommandExecutionError: return False except Exception as err: log.debug(err) return False return True
0.002899
def message(self, category, subject, msg_file): """Send message to all users in `category`.""" users = getattr(self.sub, category) if not users: print('There are no {} users on {}.'.format(category, self.sub)) return if msg_file: try: msg = open(msg_file).read() except IOError as error: print(str(error)) return else: print('Enter message:') msg = sys.stdin.read() print('You are about to send the following message to the users {}:' .format(', '.join([str(x) for x in users]))) print('---BEGIN MESSAGE---\n{}\n---END MESSAGE---'.format(msg)) if input('Are you sure? yes/[no]: ').lower() not in ['y', 'yes']: print('Message sending aborted.') return for user in users: user.send_message(subject, msg) print('Sent to: {}'.format(user))
0.002018
def unique_scene_labels(scene_list): """Find the unique scene labels Parameters ---------- scene_list : list, shape=(n,) A list containing scene dicts Returns ------- labels: list, shape=(n,) Unique labels in alphabetical order """ if isinstance(scene_list, dcase_util.containers.MetaDataContainer): return scene_list.unique_scene_labels else: labels = [] for item in scene_list: if 'scene_label' in item and item['scene_label'] not in labels: labels.append(item['scene_label']) labels.sort() return labels
0.001575
def set_tunnel(self, host, port=None, headers=None): """ Sets up the host and the port for the HTTP CONNECT Tunnelling. The headers argument should be a mapping of extra HTTP headers to send with the CONNECT request. """ self._tunnel_host = host self._tunnel_port = port if headers: self._tunnel_headers = headers else: self._tunnel_headers.clear()
0.004577
def getVariances(self): """ get variances """ var = [] var.append(self.Cr.K().diagonal()) if self.bgRE: var.append(self.Cg.K().diagonal()) var.append(self.Cn.K().diagonal()) var = sp.array(var) return var
0.006944
def filtered_search( self, id_list: List, negated_classes: List, limit: Optional[int] = 100, taxon_filter: Optional[int] = None, category_filter: Optional[str] = None, method: Optional[SimAlgorithm] = SimAlgorithm.PHENODIGM) -> SimResult: """ Owlsim2 filtered search, resolves taxon and category to a namespace, calls search_by_attribute_set, and converts to SimResult object """ if len(negated_classes) > 0: logging.warning("Owlsim2 does not support negation, ignoring neg classes") namespace_filter = self._get_namespace_filter(taxon_filter, category_filter) owlsim_results = search_by_attribute_set(self.url, tuple(id_list), limit, namespace_filter) return self._simsearch_to_simresult(owlsim_results, method)
0.006873
def set_display_name(self, display_name): """Sets a display name. A display name is required and if not set, will be set by the provider. arg: display_name (string): the new display name raise: InvalidArgument - ``display_name`` is invalid raise: NoAccess - ``Metadata.isReadonly()`` is ``true`` raise: NullArgument - ``display_name`` is ``null`` *compliance: mandatory -- This method must be implemented.* """ self._my_map['displayName'] = self._get_display_text(display_name, self.get_display_name_metadata())
0.005
def _set_packet_error_counters(self, v, load=False): """ Setter method for packet_error_counters, mapped from YANG variable /mpls_state/rsvp/statistics/packet_error_counters (container) If this variable is read-only (config: false) in the source YANG file, then _set_packet_error_counters is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_packet_error_counters() directly. YANG Description: RSVP error packet counters """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=packet_error_counters.packet_error_counters, is_container='container', presence=False, yang_name="packet-error-counters", rest_name="packet-error-counters", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'mpls-rsvp-packet-error-counters', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='container', is_config=False) except (TypeError, ValueError): raise ValueError({ 'error-string': """packet_error_counters must be of a type compatible with container""", 'defined-type': "container", 'generated-type': """YANGDynClass(base=packet_error_counters.packet_error_counters, is_container='container', presence=False, yang_name="packet-error-counters", rest_name="packet-error-counters", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'mpls-rsvp-packet-error-counters', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='container', is_config=False)""", }) self.__packet_error_counters = t if hasattr(self, '_set'): self._set()
0.005564
def plot_psrrates(pkllist, outname=''): """ Plot cumulative rate histograms. List of pkl files in order, as for make_psrrates. """ if not outname: outname = 'tmp.png' labels = {0: 'Flux at 0\'', 1: 'Flux at 7\'', 2: 'Flux at 15\'', 3: 'Flux at 25\''} labelsr = {1: 'Flux Ratio 7\' to 0\'', 2: 'Flux Ratio 15\' to 0\'', 3: 'Flux Ratio 25\' to 0\''} colors = {0: 'b.', 1: 'r.', 2: 'g.', 3: 'y.'} rates = make_psrrates(pkllist) plt.clf() fig = plt.figure(1, figsize=(10,8), facecolor='white') ax = fig.add_subplot(211, axis_bgcolor='white') for kk in rates.keys(): flux, rate = rates[kk] plt.plot(flux, rate, colors[kk], label=labels[kk]) plt.setp( ax.get_xticklabels(), visible=False) plt.ylabel('Flux (Jy)', fontsize='20') plt.legend(numpoints=1) plt.loglog() ax2 = fig.add_subplot(212, sharex=ax, axis_bgcolor='white') flux0, rate0 = rates[0] for kk in rates.keys(): flux, rate = rates[kk] if kk == 1: r10 = [rate[i]/rate0[np.where(flux0 == flux[i])[0][0]] for i in range(len(rate))] plt.plot(flux, r10, colors[kk], label=labelsr[kk]) elif kk == 2: r20 = [rate[i]/rate0[np.where(flux0 == flux[i])[0][0]] for i in range(len(rate))] plt.plot(flux, r20, colors[kk], label=labelsr[kk]) elif kk == 3: r30 = [rate[i]/rate0[np.where(flux0 == flux[i])[0][0]] for i in range(len(rate))] plt.plot(flux, r30, colors[kk], label=labelsr[kk]) plt.xlabel('Rate (1/s)', fontsize='20') plt.ylabel('Flux ratio', fontsize='20') plt.legend(numpoints=1) plt.subplots_adjust(hspace=0) # find typical ratio. avoid pulsar period saturation and low-count regimes (high and low ends) if len(rates) == 4: logger.info('flux ratio, lowest common (1/0, 2/0, 3/0):', (r10[len(r30)-1], r20[len(r30)-1], r30[-1])) logger.info('flux ratio, high end (1/0, 2/0, 3/0):', (r10[-1], r20[-1], r30[-1])) elif len(rates) == 3: logger.info('flux ratio, lowest common (1/0, 2/0):', (r10[len(r20)-1], r20[-1])) logger.info('flux ratio, high end (1/0, 2/0):', (r10[-1], r20[-1])) plt.savefig(outname)
0.00584