text
stringlengths
78
104k
score
float64
0
0.18
def from_apps(cls, apps): "Takes in an Apps and returns a VersionedProjectState matching it" app_models = {} for model in apps.get_models(include_swapped=True): model_state = VersionedModelState.from_model(model) app_models[(model_state.app_label, model_state.name.lower())] = model_state return cls(app_models)
0.008174
def construct_all(templates, **unbound_var_values): """Constructs all the given templates in a single pass without redundancy. This is useful when the templates have a common substructure and you want the smallest possible graph. Args: templates: A sequence of templates. **unbound_var_values: The unbound_var values to replace. Returns: A list of results corresponding to templates. Raises: TypeError: If any value in templates is unsupported. ValueError: If the unbound_var values specified are not complete or contain unknown values. """ def _merge_dicts(src, dst): for k, v in six.iteritems(src): if dst.get(k, v) != v: raise ValueError('Conflicting values bound for %s: %s and %s' % (k, v, dst[k])) else: dst[k] = v # pylint: disable=protected-access all_unbound_vars = {} context = {} for x in templates: if isinstance(x, _DeferredLayer): _merge_unbound_var_dicts(x.unbound_vars, all_unbound_vars) _merge_dicts(x._partial_context, context) else: raise TypeError('Unexpected type: %s' % type(x)) _merge_dicts( _assign_values_to_unbound_vars(all_unbound_vars, unbound_var_values), context) # We need to create a result of known size to avoid client pylint errors. result = list(templates) for i, x in enumerate(result): if isinstance(x, _DeferredLayer): result[i] = x._construct(context) return result
0.012212
def delete_folder(self, folder): '''Delete a folder. It will recursively delete all the content. Args: folder_id (str): The UUID of the folder to be deleted. Returns: None Raises: StorageArgumentException: Invalid arguments StorageForbiddenException: 403 StorageNotFoundException: 404 HTTPError: other non-20x error codes ''' if not is_valid_uuid(folder): raise StorageArgumentException( 'Invalid UUID for folder: {0}'.format(folder)) self._authenticated_request \ .to_endpoint('folder/{}/'.format(folder)) \ .delete()
0.002853
def _node_rating_count(self): """ Return node_rating_count record or create it if it does not exist usage: node = Node.objects.get(pk=1) node.rating_count """ try: return self.noderatingcount except ObjectDoesNotExist: node_rating_count = NodeRatingCount(node=self) node_rating_count.save() return node_rating_count
0.002604
def gpu_a_trous(): """ Simple convenience function so that the a trous kernels can be easily accessed by any function. """ ker1 = SourceModule(""" __global__ void gpu_a_trous_row_kernel(float *in1, float *in2, float *wfil, int *scale) { const int len = gridDim.x*blockDim.x; const int col = (blockDim.x * blockIdx.x + threadIdx.x); const int i = col; const int row = (blockDim.y * blockIdx.y + threadIdx.y); const int j = row*len; const int tid2 = i + j; const int lstp = exp2(float(scale[0] + 1)); const int sstp = exp2(float(scale[0])); in2[tid2] = wfil[2]*in1[tid2]; if (row < lstp) { in2[tid2] += wfil[0]*in1[col + len*(lstp - row - 1)]; } else { in2[tid2] += wfil[0]*in1[tid2 - lstp*len]; } if (row < sstp) { in2[tid2] += wfil[1]*in1[col + len*(sstp - row - 1)]; } else { in2[tid2] += wfil[1]*in1[tid2 - sstp*len]; } if (row >= (len - sstp)) { in2[tid2] += wfil[3]*in1[col + len*(2*len - row - sstp - 1)]; } else { in2[tid2] += wfil[3]*in1[tid2 + sstp*len]; } if (row >= (len - lstp)) { in2[tid2] += wfil[4]*in1[col + len*(2*len - row - lstp - 1)]; } else { in2[tid2] += wfil[4]*in1[tid2 + lstp*len]; } } """, keep=True) ker2 = SourceModule(""" __global__ void gpu_a_trous_col_kernel(float *in1, float *in2, float *wfil, int *scale) { const int len = gridDim.x*blockDim.x; const int col = (blockDim.x * blockIdx.x + threadIdx.x); const int i = col; const int row = (blockDim.y * blockIdx.y + threadIdx.y); const int j = row*len; const int tid2 = i + j; const int lstp = exp2(float(scale[0] + 1)); const int sstp = exp2(float(scale[0])); in2[tid2] = wfil[2]*in1[tid2]; if (col < lstp) { in2[tid2] += wfil[0]*in1[j - col + lstp - 1]; } else { in2[tid2] += wfil[0]*in1[tid2 - lstp]; } if (col < sstp) { in2[tid2] += wfil[1]*in1[j - col + sstp - 1]; } else { in2[tid2] += wfil[1]*in1[tid2 - sstp]; } if (col >= (len - sstp)) { in2[tid2] += wfil[3]*in1[j + 2*len - sstp - col - 1]; } else { in2[tid2] += wfil[3]*in1[tid2 + sstp]; } if (col >= (len - lstp)) { in2[tid2] += wfil[4]*in1[j + 2*len - lstp - col - 1]; } else { in2[tid2] += wfil[4]*in1[tid2 + lstp]; } } """, keep=True) return ker1.get_function("gpu_a_trous_row_kernel"), ker2.get_function("gpu_a_trous_col_kernel")
0.004394
def _node_filter(self, node, ancestors, filtrates): '''_node_filter Low-level api: Remove unrelated nodes in config. This is a recursive method. Parameters ---------- node : `Element` A node to be processed. ancestors : `list` A list of ancestors of filtrates. filtrates : `list` A list of filtrates which are result of xpath evaluation. Returns ------- None There is no return of this method. ''' if node in filtrates: return elif node in ancestors: if node.tag != config_tag: s_node = self.get_schema_node(node) if node.tag != config_tag and \ s_node.get('type') == 'list': for child in node.getchildren(): s_node = self.get_schema_node(child) if s_node.get('is_key') or child in filtrates: continue elif child in ancestors: self._node_filter(child, ancestors, filtrates) else: node.remove(child) else: for child in node.getchildren(): if child in filtrates: continue elif child in ancestors: self._node_filter(child, ancestors, filtrates) else: node.remove(child) else: node.getparent().remove(node)
0.001262
async def set_key_metadata(wallet_handle: int, verkey: str, metadata: str) -> None: """ Creates keys pair and stores in the wallet. :param wallet_handle: Wallet handle (created by open_wallet). :param verkey: the key (verkey, key id) to store metadata. :param metadata: the meta information that will be store with the key. :return: Error code """ logger = logging.getLogger(__name__) logger.debug("set_key_metadata: >>> wallet_handle: %r, verkey: %r, metadata: %r", wallet_handle, verkey, metadata) if not hasattr(set_key_metadata, "cb"): logger.debug("set_key_metadata: Creating callback") set_key_metadata.cb = create_cb(CFUNCTYPE(None, c_int32, c_int32)) c_wallet_handle = c_int32(wallet_handle) c_verkey = c_char_p(verkey.encode('utf-8')) c_metadata = c_char_p(metadata.encode('utf-8')) await do_call('indy_set_key_metadata', c_wallet_handle, c_verkey, c_metadata, set_key_metadata.cb) logger.debug("create_key: <<<")
0.001693
def add_subscriber(self, connection_id, subscriptions, last_known_block_id): """Register the subscriber for the given event subscriptions. Raises: InvalidFilterError One of the filters in the subscriptions is invalid. """ with self._subscribers_cv: self._subscribers[connection_id] = \ EventSubscriber( connection_id, subscriptions, last_known_block_id) LOGGER.debug( 'Added Subscriber %s for %s', connection_id, subscriptions)
0.005172
def set_file_to_upload(self, file_to_upload): # type: (str) -> None """Delete any existing url and set the file uploaded to the local path provided Args: file_to_upload (str): Local path to file to upload Returns: None """ if 'url' in self.data: del self.data['url'] self.file_to_upload = file_to_upload
0.010076
def start_container(self, conf, tty=True, detach=False, is_dependency=False, no_intervention=False): """Start up a single container""" # Make sure we can bind to our specified ports! if not conf.harpoon.docker_api.base_url.startswith("http"): self.find_bound_ports(conf.ports) container_id = conf.container_id container_name = conf.container_name conf.harpoon.network_manager.register(conf, container_name) log.info("Starting container %s (%s)", container_name, container_id) try: if not detach and not is_dependency: self.start_tty(conf, interactive=tty, **conf.other_options.start) else: conf.harpoon.docker_api.start(container_id , **conf.other_options.start ) except docker.errors.APIError as error: if str(error).startswith("404 Client Error: Not Found"): log.error("Container died before we could even get to it...") inspection = None if not detach and not is_dependency: inspection = self.get_exit_code(conf) if inspection and not no_intervention: if not inspection["State"]["Running"] and inspection["State"]["ExitCode"] != 0: self.stage_run_intervention(conf) raise BadImage("Failed to run container", container_id=container_id, container_name=container_name, reason="nonzero exit code after launch") if not is_dependency and conf.harpoon.intervene_afterwards and not no_intervention: self.stage_run_intervention(conf, just_do_it=True)
0.006028
def indirectInitialMatrix(self, initialState): """ Given some initial state, this iteratively determines new states. We repeatedly call the transition function on unvisited states in the frontier set. Each newly visited state is put in a dictionary called 'mapping' and the rates are stored in a dictionary. """ mapping = {} rates = OrderedDict() #Check whether the initial state is defined and of the correct type, and convert to a tuple or int. convertedState = self.checkInitialState(initialState) if isinstance(convertedState,set): #If initialstates is a set, include all states in the set in the mapping. frontier = set( convertedState ) for idx,state in enumerate(convertedState): mapping[state] = idx if idx == 0: #Test the return type of the transition function (dict or numpy). usesNumpy = self.checkTransitionType(initialState) else: #Otherwise include only the single state. frontier = set( [convertedState] ) usesNumpy = self.checkTransitionType(initialState) mapping[convertedState] = 0 while len(frontier) > 0: fromstate = frontier.pop() fromindex = mapping[fromstate] if usesNumpy: #If numpy is used, convert to a dictionary with tuples and rates. transitions = self.transition(np.array(fromstate)) transitions = self.convertToTransitionDict(transitions) else: transitions = self.transition(fromstate) for tostate,rate in transitions.items(): if tostate not in mapping: frontier.add(tostate) mapping[tostate] = len(mapping) toindex = mapping[tostate] rates[(fromindex, toindex)] = rate #Inverse the keys and values in mapping to get a dictionary with indices and states. self.mapping = {value: key for key, value in list(mapping.items())} #Use the `rates` dictionary to fill a sparse dok matrix. D = dok_matrix((self.size,self.size)) D.update(rates) return D.tocsr()
0.018783
def checkMain(): """ This script performs two checks. First it tries to import nupic.bindings to check that it is correctly installed. Then it tries to import the C extensions under nupic.bindings. Appropriate user-friendly status messages are printed depend on the outcome. """ try: checkImportBindingsInstalled() except ImportError as e: print ("Could not import nupic.bindings. It must be installed before use. " "Error message:") print e.message return try: checkImportBindingsExtensions() except ImportError as e: print ("Could not import C extensions for nupic.bindings. Make sure that " "the package was properly installed. Error message:") print e.message return print "Successfully imported nupic.bindings."
0.011335
def convert_pro_to_hyp(pro): """Converts a pro residue to a hydroxypro residue. All metadata associated with the original pro will be lost i.e. tags. As a consequence, it is advisable to relabel all atoms in the structure in order to make them contiguous. Parameters ---------- pro: ampal.Residue The proline residue to be mutated to hydroxyproline. Examples -------- We can create a collagen model using isambard and convert every third residue to hydroxyproline: >>> import isambard >>> col = isambard.specifications.CoiledCoil.tropocollagen(aa=21) >>> col.pack_new_sequences(['GPPGPPGPPGPPGPPGPPGPP']*3) >>> to_convert = [ ... res for (i, res) in enumerate(col.get_monomers()) ... if not (i + 1) % 3] >>> for pro in to_convert: ... isambard.ampal.non_canonical.convert_pro_to_hyp(pro) >>> col.sequences ['GPXGPXGPXGPXGPXGPXGPX', 'GPXGPXGPXGPXGPXGPXGPX', 'GPXGPXGPXGPXGPXGPXGPX'] """ with open(str(REF_PATH / 'hydroxyproline_ref_1bkv_0_6.pickle'), 'rb') as inf: hyp_ref = pickle.load(inf) align_nab(hyp_ref, pro) to_remove = ['CB', 'CG', 'CD'] for (label, atom) in pro.atoms.items(): if atom.element == 'H': to_remove.append(label) for label in to_remove: del pro.atoms[label] for key, val in hyp_ref.atoms.items(): if key not in pro.atoms.keys(): pro.atoms[key] = val pro.mol_code = 'HYP' pro.mol_letter = 'X' pro.is_hetero = True pro.tags = {} pro.states = {'A': pro.atoms} pro.active_state = 'A' for atom in pro.get_atoms(): atom.ampal_parent = pro atom.tags = {'bfactor': 1.0, 'charge': ' ', 'occupancy': 1.0, 'state': 'A'} return
0.001113
def get_noise_level(cell): """ Gets the noise level of a network / cell. @param string cell A network / cell from iwlist scan. @return string The noise level of the network. """ noise = matching_line(cell, "Noise level=") if noise is None: return "" noise = noise.split("=")[1] return noise.split(' ')[0]
0.002762
def _bisearch(ucs, table): """ Auxiliary function for binary search in interval table. :arg int ucs: Ordinal value of unicode character. :arg list table: List of starting and ending ranges of ordinal values, in form of ``[(start, end), ...]``. :rtype: int :returns: 1 if ordinal value ucs is found within lookup table, else 0. """ lbound = 0 ubound = len(table) - 1 if ucs < table[0][0] or ucs > table[ubound][1]: return 0 while ubound >= lbound: mid = (lbound + ubound) // 2 if ucs > table[mid][1]: lbound = mid + 1 elif ucs < table[mid][0]: ubound = mid - 1 else: return 1 return 0
0.001393
def bindings(self, entity_id, typ, service): """ Get me all the bindings that are registered for a service entity :param entity_id: :param service: :return: """ return self.service(entity_id, typ, service)
0.007634
def easybake(css_in, html_in=sys.stdin, html_out=sys.stdout, last_step=None, coverage_file=None, use_repeatable_ids=False): """Process the given HTML file stream with the css stream.""" html_doc = etree.parse(html_in) oven = Oven(css_in, use_repeatable_ids) oven.bake(html_doc, last_step) # serialize out HTML print(etree.tostring(html_doc, method="xml").decode('utf-8'), file=html_out) # generate CSS coverage_file file if coverage_file: print('SF:{}'.format(css_in.name), file=coverage_file) print(oven.get_coverage_report(), file=coverage_file) print('end_of_record', file=coverage_file)
0.00149
def datetime_f(dttm): """Formats datetime to take less room when it is recent""" if dttm: dttm = dttm.isoformat() now_iso = datetime.now().isoformat() if now_iso[:10] == dttm[:10]: dttm = dttm[11:] elif now_iso[:4] == dttm[:4]: dttm = dttm[5:] return '<nobr>{}</nobr>'.format(dttm)
0.002865
def store_report_link(backend, user, response, *args, **kwargs): ''' Part of the Python Social Auth Pipeline. Stores the result service URL reported by the LMS / LTI tool consumer so that we can use it later. ''' if backend.name is 'lti': assignment_pk = response.get('assignment_pk', None) assignment = get_object_or_404(Assignment, pk=assignment_pk) lti_result, created = LtiResult.objects.get_or_create(assignment=assignment, user=user) if created: logger.debug("LTI result record not found, creating it.") else: logger.debug("LTI result record found, updating it.") # Expected, check LTI standard lti_result.lis_result_sourcedid = response.get('lis_result_sourcedid') lti_result.lis_outcome_service_url = response.get('lis_outcome_service_url') lti_result.save()
0.005708
def get_activations_stimulate(self): """Extract Activation INDRA Statements via stimulation.""" # TODO: extract to other patterns: # - Stimulation by EGF activates ERK # - Stimulation by EGF leads to ERK activation # Search for stimulation event stim_events = self.tree.findall("EVENT/[type='ONT::STIMULATE']") for event in stim_events: event_id = event.attrib.get('id') if event_id in self._static_events: continue controller = event.find("arg1/[@role=':AGENT']") affected = event.find("arg2/[@role=':AFFECTED']") # If either the controller or the affected is missing, skip if controller is None or affected is None: continue controller_id = controller.attrib.get('id') # Here, implicitly, we require that the controller is a TERM # and not an EVENT controller_term = self.tree.find("TERM/[@id='%s']" % controller_id) affected_id = affected.attrib.get('id') # Here it is implicit that the affected is an event not # a TERM affected_event = self.tree.find("EVENT/[@id='%s']" % affected_id) if controller_term is None or affected_event is None: continue controller_term_type = controller_term.find('type') # The controller term must be a molecular entity if controller_term_type is None or \ controller_term_type.text not in molecule_types: continue controller_agent = self._get_agent_by_id(controller_id, None) if controller_agent is None: continue affected_event_type = affected_event.find('type') if affected_event_type is None: continue # Construct evidence ev = self._get_evidence(event) ev.epistemics['direct'] = False location = self._get_event_location(affected_event) if affected_event_type.text == 'ONT::ACTIVATE': affected = affected_event.find(".//*[@role=':AFFECTED']") if affected is None: continue affected_agent = self._get_agent_by_id(affected.attrib['id'], affected_id) if affected_agent is None: continue for a1, a2 in _agent_list_product((controller_agent, affected_agent)): st = Activation(a1, a2, evidence=[deepcopy(ev)]) _stmt_location_to_agents(st, location) self.statements.append(st) elif affected_event_type.text == 'ONT::ACTIVITY': agent_tag = affected_event.find(".//*[@role=':AGENT']") if agent_tag is None: continue affected_agent = self._get_agent_by_id(agent_tag.attrib['id'], affected_id) if affected_agent is None: continue for a1, a2 in _agent_list_product((controller_agent, affected_agent)): st = Activation(a1, a2, evidence=[deepcopy(ev)]) _stmt_location_to_agents(st, location) self.statements.append(st)
0.000569
def nub(it): '''Dedups an iterable in arbitrary order. Uses memory proportional to the number of unique items in ``it``. ''' seen = set() for v in it: h = hash(v) if h in seen: continue seen.add(h) yield v
0.003704
def pow(val: Any, exponent: Any, default: Any = RaiseTypeErrorIfNotProvided) -> Any: """Returns `val**factor` of the given value, if defined. Values define an extrapolation by defining a __pow__(self, exponent) method. Note that the method may return NotImplemented to indicate a particular extrapolation can't be done. Args: val: The value or iterable of values to invert. exponent: The extrapolation factor. For example, if this is 0.5 and val is a gate then the caller is asking for a square root of the gate. default: Determines the fallback behavior when `val` doesn't have an extrapolation defined. If `default` is not set and that occurs, a TypeError is raised instead. Returns: If `val` has a __pow__ method that returns something besides NotImplemented, that result is returned. Otherwise, if a default value was specified, the default value is returned. Raises: TypeError: `val` doesn't have a __pow__ method (or that method returned NotImplemented) and no `default` value was specified. """ raiser = getattr(val, '__pow__', None) result = NotImplemented if raiser is None else raiser(exponent) if result is not NotImplemented: return result if default is not RaiseTypeErrorIfNotProvided: return default if raiser is None: raise TypeError("object of type '{}' " "has no __pow__ method.".format(type(val))) raise TypeError("object of type '{}' does have a __pow__ method, " "but it returned NotImplemented.".format(type(val)))
0.001188
def detail(self, block_identifier: BlockSpecification) -> ChannelDetails: """ Returns the channel details. """ return self.token_network.detail( participant1=self.participant1, participant2=self.participant2, block_identifier=block_identifier, channel_identifier=self.channel_identifier, )
0.00554
def get_queue_system_lock(self, queue): """ Get system lock timeout Returns time system lock expires or None if lock does not exist """ key = self._key(LOCK_REDIS_KEY, queue) return Semaphore.get_system_lock(self.connection, key)
0.007168
def remove_neighbours(self): """ Remove all the pixels at max order located at the bound of the moc """ time_delta = 1 << (2*(IntervalSet.HPY_MAX_ORDER - self.max_order)) intervals_arr = self._interval_set._intervals intervals_arr[:, 0] = np.minimum(intervals_arr[:, 0] + time_delta, (1 << 58) - 1) intervals_arr[:, 1] = np.maximum(intervals_arr[:, 1] - time_delta, 0) good_intervals = intervals_arr[:, 1] > intervals_arr[:, 0] self._interval_set = IntervalSet(intervals_arr[good_intervals])
0.005291
def render_search(self, ctx, data): """ Render some UI for performing searches, if we know about a search aggregator. """ if self.username is None: return '' translator = self._getViewerPrivateApplication() searchAggregator = translator.getPageComponents().searchAggregator if searchAggregator is None or not searchAggregator.providers(): return '' return ctx.tag.fillSlots( 'form-action', translator.linkTo(searchAggregator.storeID))
0.00369
def parts(path): # type: (Text) -> List[Text] """Split a path in to its component parts. Arguments: path (str): Path to split in to parts. Returns: list: List of components Example: >>> parts('/foo/bar/baz') ['/', 'foo', 'bar', 'baz'] """ _path = normpath(path) components = _path.strip("/") _parts = ["/" if _path.startswith("/") else "./"] if components: _parts += components.split("/") return _parts
0.002037
def reload_module( module: typing.Union[str, types.ModuleType], recursive: bool, force: bool ) -> bool: """ Reloads the specified module, which can either be a module object or a string name of a module. Will not reload a module that has not been imported :param module: A module object or string module name that should be refreshed if its currently loaded version is out of date or the action is forced. :param recursive: When true, any imported sub-modules of this module will also be refreshed if they have been updated. :param force: When true, all modules will be refreshed even if it doesn't appear that they have been updated. :return: """ if isinstance(module, str): module = get_module(module) if module is None or not isinstance(module, types.ModuleType): return False try: step = session.project.get_internal_project().current_step modified = step.last_modified if step else None except AttributeError: modified = 0 if modified is None: # If the step has no modified time it hasn't been run yet and # a reload won't be needed return False newer_than = modified if not force and modified else 0 if recursive: children_reloaded = reload_children(module, newer_than) else: children_reloaded = False reloaded = do_reload(module, newer_than) return reloaded or children_reloaded
0.000657
def _minimal_common_integer_splitted(si_0, si_1): """ Calculates the minimal integer that appears in both StridedIntervals. It's equivalent to finding an integral solution for equation `ax + b = cy + d` that makes `ax + b` minimal si_0.stride, si_1.stride being a and c, and si_0.lower_bound, si_1.lower_bound being b and d, respectively. Upper bounds are used to check whether the minimal common integer exceeds the bound or not. None is returned if no minimal common integers can be found within the range. Some assumptions: # - None of the StridedIntervals straddles the south pole. Consequently, we have x <= max_int(si.bits) and y <= # max_int(si.bits) # - a, b, c, d are all positive integers # - x >= 0, y >= 0 :param StridedInterval si_0: the first StridedInterval :param StridedInterval si_1: the second StrideInterval :return: the minimal common integer, or None if there is no common integer """ a, c = si_0.stride, si_1.stride b, d = si_0.lower_bound, si_1.lower_bound # if any of them is an integer if si_0.is_integer: if si_1.is_integer: return None if si_0.lower_bound != si_1.lower_bound else si_0.lower_bound elif si_0.lower_bound >= si_1.lower_bound and \ si_0.lower_bound <= si_1.upper_bound and \ (si_0.lower_bound - si_1.lower_bound) % si_1.stride == 0: return si_0.lower_bound else: return None elif si_1.is_integer: return StridedInterval._minimal_common_integer_splitted(si_1, si_0) # shortcut if si_0.upper_bound < si_1.lower_bound or si_1.upper_bound < si_0.lower_bound: # They don't overlap at all return None if (d - b) % StridedInterval.gcd(a, c) != 0: # They don't overlap return None """ Given two strided intervals a = sa[lba, uba] and b = sb[lbb, ubb], the first integer shared by them is found by finding the minimum values of ka and kb which solve the equation: ka * sa + lba = kb * sb + lbb In particular one can solve the above diophantine equation and find the parameterized solutions of ka and kb, with respect to a parameter t. The minimum natural value of the parameter t which gives two positive natural values of ka and kb is used to resolve ka and kb, and finally to solve the above equation and get the minimum shared integer. """ x, y = StridedInterval.diop_natural_solution_linear(-(b-d), a, -c) if a is None or b is None: return None first_integer = x * a + b assert first_integer == y*c + d if first_integer >= si_0.lower_bound and first_integer <= si_0.upper_bound and \ first_integer >= si_1.lower_bound and first_integer <= si_1.upper_bound: return first_integer else: return None
0.005516
def get_vasp_input(self, vasp_input_set=MPRelaxSet, **kwargs): """ Returns VASP input as a dict of vasp objects. Args: vasp_input_set (pymatgen.io.vaspio_set.VaspInputSet): input set to create vasp input files from structures """ d = vasp_input_set(self.final_structure, **kwargs).get_vasp_input() d["transformations.json"] = json.dumps(self.as_dict()) return d
0.004484
def xsl_text(self, text, parent): """Construct an XSLT 'text' element containing `text`. `parent` is this element's parent. """ res = ET.SubElement(parent, "text") res.text = text return res
0.008368
def build(args): """ %prog build current.fasta Bacteria_Virus.fasta prefix Build assembly files after a set of clean-ups: 1. Use cdhit (100%) to remove duplicate scaffolds 2. Screen against the bacteria and virus database (remove scaffolds 95% id, 50% cov) 3. Mask matches to UniVec_Core 4. Sort by decreasing scaffold sizes 5. Rename the scaffolds sequentially 6. Build the contigs by splitting scaffolds at gaps 7. Rename the contigs sequentially """ from jcvi.apps.cdhit import deduplicate from jcvi.apps.vecscreen import mask from jcvi.formats.fasta import sort p = OptionParser(build.__doc__) p.add_option("--nodedup", default=False, action="store_true", help="Do not deduplicate [default: deduplicate]") opts, args = p.parse_args(args) if len(args) != 3: sys.exit(not p.print_help()) fastafile, bacteria, pf = args dd = deduplicate([fastafile, "--pctid=100"]) \ if not opts.nodedup else fastafile screenfasta = screen([dd, bacteria]) tidyfasta = mask([screenfasta]) sortedfasta = sort([tidyfasta, "--sizes"]) scaffoldfasta = pf + ".assembly.fasta" format([sortedfasta, scaffoldfasta, "--prefix=scaffold_", "--sequential"]) gapsplitfasta = pf + ".gapSplit.fasta" cmd = "gapSplit -minGap=10 {0} {1}".format(scaffoldfasta, gapsplitfasta) sh(cmd) contigsfasta = pf + ".contigs.fasta" format([gapsplitfasta, contigsfasta, "--prefix=contig_", "--sequential"])
0.001969
def update(self, new_routing_table): """ Update the current routing table with new routing information from a replacement table. """ self.routers.replace(new_routing_table.routers) self.readers.replace(new_routing_table.readers) self.writers.replace(new_routing_table.writers) self.last_updated_time = self.timer() self.ttl = new_routing_table.ttl log_debug("[#0000] S: <ROUTING> table=%r", self)
0.004264
def _build(config_cls, dictionary, validate=False): # noqa """ Builds an instance of ``config_cls`` using ``dictionary``. :param type config_cls: The class to use for building :param dict dictionary: The dictionary to use for building ``config_cls`` :param bool validate: Performs validation before building ``config_cls``, defaults to False, optional :return: An instance of ``config_cls`` :rtype: object """ if not is_config_type(config_cls): raise ValueError( f"cannot build {config_cls!r} from {dictionary!r}, " f"{config_cls!r} is not a config" ) # perform jsonschema validation on the given dictionary # (simplifys dynamic typecasting) if validate: jsonschema.validate(dictionary, build_schema(config_cls)) kwargs = {} for var in attr.fields(config_cls): if not is_config_var(var): continue entry = var.metadata[CONFIG_KEY] arg_key = entry.name if entry.name else var.name arg_default = var.default if var.default is not None else None if callable(entry.decoder): kwargs[var.name] = entry.decoder(dictionary.get(arg_key, arg_default)) continue if is_array_type(entry.type): if is_typing_type(entry.type) and len(entry.type.__args__) > 0: nested_type = entry.type.__args__[0] if is_config_type(nested_type): kwargs[var.name] = [ _build(nested_type, item) for item in dictionary.get(arg_key, []) ] else: kwargs[var.name] = typecast(entry.type, dictionary.get(arg_key, [])) elif is_object_type(entry.type): item = dictionary.get(arg_key, {}) if is_typing_type(entry.type) and len(entry.type.__args__) == 2: (_, value_type) = entry.type.__args__ kwargs[var.name] = { key: _build(value_type, value) if is_config_type(value_type) else typecast(value_type, value) for (key, value) in item.items() } else: kwargs[var.name] = typecast(entry.type, item) elif is_config_type(entry.type): if arg_key not in dictionary: # if the default value for a nested config is the nested config class # then build the empty state of the nested config if is_config_type(arg_default) and entry.type == arg_default: kwargs[var.name] = _build(entry.type, {}) else: kwargs[var.name] = arg_default else: kwargs[var.name] = _build( entry.type, dictionary.get(arg_key, arg_default) ) else: if arg_key not in dictionary: kwargs[var.name] = arg_default else: kwargs[var.name] = typecast( entry.type, dictionary.get(arg_key, arg_default) ) return config_cls(**kwargs)
0.001255
def Grigoras(Tc=None, Pc=None, Vc=None): r'''Relatively recent (1990) relationship for estimating critical properties from each other. Two of the three properties are required. This model uses the "critical surface", a general plot of Tc vs Pc vs Vc. The model used 137 organic and inorganic compounds to derive the equation. The general equation is in [1]_: .. math:: P_c = 2.9 + 20.2 \frac{T_c}{V_c} Parameters ---------- Tc : float Critical temperature of fluid (optional) [K] Pc : float Critical pressure of fluid (optional) [Pa] Vc : float Critical volume of fluid (optional) [m^3/mol] Returns ------- Tc, Pc or Vc : float Critical property of fluid [K], [Pa], or [m^3/mol] Notes ----- The prediction of Tc from Pc and Vc is not tested, as this is not necessary anywhere, but it is implemented. Internal units are bar, cm^3/mol, and K. A slight error occurs when Pa, cm^3/mol and K are used instead, on the order of <0.2%. This equation is less accurate than that of Ihmels, but surprisingly close. The author also investigated an early QSPR model. Examples -------- Succinic acid [110-15-6] >>> Grigoras(Tc=851.0, Vc=0.000308) 5871233.766233766 References ---------- .. [1] Grigoras, Stelian. "A Structural Approach to Calculate Physical Properties of Pure Organic Substances: The Critical Temperature, Critical Volume and Related Properties." Journal of Computational Chemistry 11, no. 4 (May 1, 1990): 493-510. doi:10.1002/jcc.540110408 ''' if Tc and Vc: Vc = Vc*1E6 # m^3/mol to cm^3/mol Pc = 2.9 + 20.2*Tc/Vc Pc = Pc*1E5 # bar to Pa return Pc elif Tc and Pc: Pc = Pc/1E5 # Pa to bar Vc = 202.0*Tc/(10*Pc-29.0) Vc = Vc/1E6 # cm^3/mol to m^3/mol return Vc elif Pc and Vc: Pc = Pc/1E5 # Pa to bar Vc = Vc*1E6 # m^3/mol to cm^3/mol Tc = 1.0/202*(10*Pc-29.0)*Vc return Tc else: raise Exception('Two of Tc, Pc, and Vc must be provided')
0.000459
def cell_thermal_mass(temperature, conductivity): """ Sample interval is measured in seconds. Temperature in degrees. CTM is calculated in S/m. """ alpha = 0.03 # Thermal anomaly amplitude. beta = 1.0 / 7 # Thermal anomaly time constant (1/beta). sample_interval = 1 / 15.0 a = 2 * alpha / (sample_interval * beta + 2) b = 1 - (2 * a / alpha) dCodT = 0.1 * (1 + 0.006 * [temperature - 20]) dT = np.diff(temperature) ctm = -1.0 * b * conductivity + a * (dCodT) * dT # [S/m] return ctm
0.001835
def _parse_precinct_size(spcod): """Compute precinct size from SPcod or SPcoc.""" spcod = np.frombuffer(spcod, dtype=np.uint8) precinct_size = [] for item in spcod: ep2 = (item & 0xF0) >> 4 ep1 = item & 0x0F precinct_size.append((2 ** ep1, 2 ** ep2)) return tuple(precinct_size)
0.003106
def __check_status(result, function, arguments): """ Check the result of a vcinpl function call and raise appropriate exception in case of an error. Used as errcheck function when mapping C functions with ctypes. :param result: Function call numeric result :param callable function: Called function :param arguments: Arbitrary arguments tuple :raise: :class:VCITimeout :class:VCIRxQueueEmptyError :class:StopIteration :class:VCIError """ if isinstance(result, int): # Real return value is an unsigned long result = ctypes.c_ulong(result).value if result == constants.VCI_E_TIMEOUT: raise VCITimeout("Function {} timed out".format(function._name)) elif result == constants.VCI_E_RXQUEUE_EMPTY: raise VCIRxQueueEmptyError() elif result == constants.VCI_E_NO_MORE_ITEMS: raise StopIteration() elif result == constants.VCI_E_ACCESSDENIED: pass # not a real error, might happen if another program has initialized the bus elif result != constants.VCI_OK: raise VCIError(vciFormatError(function, result)) return result
0.002433
def select_multi_items(self, select_name): """ Select multiple options from select with label (recommended), name, or id. Pass a multiline string of options. e.g. .. code-block:: gherkin When I select the following from "Contact Methods": \"\"\" Email Phone Fax \"\"\" """ # Ensure only the options selected are actually selected option_names = self.multiline.split('\n') select_box = find_field(world.browser, 'select', select_name) assert select_box, "Cannot find a '{}' select.".format(select_name) select = Select(select_box) select.deselect_all() for option in option_names: try: select.select_by_value(option) except NoSuchElementException: try: select.select_by_visible_text(option) except NoSuchElementException: raise AssertionError("Cannot find option: '{}'.".format(option))
0.002022
def validate_srec_checksum(srec): """ Validate if the checksum of the supplied s-record is valid Returns: True if valid, False if not """ checksum = srec[len(srec)-2:] # Strip the original checksum and compare with the computed one if compute_srec_checksum(srec[:len(srec) - 2]) == int(checksum, 16): return True else: return False
0.002577
def order_assets(self, asset_ids, composition_id): """Reorders a set of assets in a composition. arg: asset_ids (osid.id.Id[]): ``Ids`` for a set of ``Assets`` arg: composition_id (osid.id.Id): ``Id`` of the ``Composition`` raise: NotFound - ``composition_id`` not found or, an ``asset_id`` not related to ``composition_id`` raise: NullArgument - ``instruction_ids`` or ``agenda_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure *compliance: mandatory -- This method must be implemented.* """ self._provider_session.order_assets(self, asset_ids, composition_id)
0.002551
def update_with(self, update_fn, *maps): """ Return a new PMap with the items in Mappings maps inserted. If the same key is present in multiple maps the values will be merged using merge_fn going from left to right. >>> from operator import add >>> m1 = m(a=1, b=2) >>> m1.update_with(add, m(a=2)) pmap({'a': 3, 'b': 2}) The reverse behaviour of the regular merge. Keep the leftmost element instead of the rightmost. >>> m1 = m(a=1) >>> m1.update_with(lambda l, r: l, m(a=2), {'a':3}) pmap({'a': 1}) """ evolver = self.evolver() for map in maps: for key, value in map.items(): evolver.set(key, update_fn(evolver[key], value) if key in evolver else value) return evolver.persistent()
0.005995
def remove_vdir(name, site, app='/'): ''' Remove an IIS virtual directory. :param str name: The virtual directory name. :param str site: The IIS site name. :param str app: The IIS application. Example of usage with only the required arguments: .. code-block:: yaml site0-foo-vdir-remove: win_iis.remove_vdir: - name: foo - site: site0 Example of usage specifying all available arguments: .. code-block:: yaml site0-foo-vdir-remove: win_iis.remove_vdir: - name: foo - site: site0 - app: v1 ''' ret = {'name': name, 'changes': {}, 'comment': str(), 'result': None} current_vdirs = __salt__['win_iis.list_vdirs'](site, app) if name not in current_vdirs: ret['comment'] = 'Virtual directory has already been removed: {0}'.format(name) ret['result'] = True elif __opts__['test']: ret['comment'] = 'Virtual directory will be removed: {0}'.format(name) ret['changes'] = {'old': name, 'new': None} else: ret['comment'] = 'Removed virtual directory: {0}'.format(name) ret['changes'] = {'old': name, 'new': None} ret['result'] = __salt__['win_iis.remove_vdir'](name, site, app) return ret
0.001415
def _adaptSynapses(self, inputVector, activeColumns, synPermActiveInc, synPermInactiveDec): """ The primary method in charge of learning. Adapts the permanence values of the synapses based on the input vector, and the chosen columns after inhibition round. Permanence values are increased for synapses connected to input bits that are turned on, and decreased for synapses connected to inputs bits that are turned off. Parameters: ---------------------------- @param inputVector: A numpy array of 0's and 1's that comprises the input to the spatial pooler. There exists an entry in the array for every input bit. @param activeColumns: An array containing the indices of the columns that survived inhibition. @param synPermActiveInc: Permanence increment for active inputs @param synPermInactiveDec: Permanence decrement for inactive inputs """ inputIndices = numpy.where(inputVector > 0)[0] permChanges = numpy.zeros(self.getNumInputs(), dtype=REAL_DTYPE) permChanges.fill(-1 * synPermInactiveDec) permChanges[inputIndices] = synPermActiveInc perm = numpy.zeros(self.getNumInputs(), dtype=REAL_DTYPE) potential = numpy.zeros(self.getNumInputs(), dtype=REAL_DTYPE) for i in activeColumns: self.getPermanence(i, perm) self.getPotential(i, potential) maskPotential = numpy.where(potential > 0)[0] perm[maskPotential] += permChanges[maskPotential] self._updatePermanencesForColumn(perm, i, raisePerm=False)
0.004232
def _set_get_stp_brief_info(self, v, load=False): """ Setter method for get_stp_brief_info, mapped from YANG variable /brocade_xstp_ext_rpc/get_stp_brief_info (rpc) If this variable is read-only (config: false) in the source YANG file, then _set_get_stp_brief_info is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_get_stp_brief_info() directly. YANG Description: RPC to return spanning tree information similar to the CLI 'show spanning-tree'. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=get_stp_brief_info.get_stp_brief_info, is_leaf=True, yang_name="get-stp-brief-info", rest_name="get-stp-brief-info", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'hidden': u'rpccmd', u'actionpoint': u'show-spanning-tree-action-point'}}, namespace='urn:brocade.com:mgmt:brocade-xstp-ext', defining_module='brocade-xstp-ext', yang_type='rpc', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """get_stp_brief_info must be of a type compatible with rpc""", 'defined-type': "rpc", 'generated-type': """YANGDynClass(base=get_stp_brief_info.get_stp_brief_info, is_leaf=True, yang_name="get-stp-brief-info", rest_name="get-stp-brief-info", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'hidden': u'rpccmd', u'actionpoint': u'show-spanning-tree-action-point'}}, namespace='urn:brocade.com:mgmt:brocade-xstp-ext', defining_module='brocade-xstp-ext', yang_type='rpc', is_config=True)""", }) self.__get_stp_brief_info = t if hasattr(self, '_set'): self._set()
0.005423
def mul_pdf(mean1, var1, mean2, var2): """ Multiply Gaussian (mean1, var1) with (mean2, var2) and return the results as a tuple (mean, var, scale_factor). Strictly speaking the product of two Gaussian PDFs is a Gaussian function, not Gaussian PDF. It is, however, proportional to a Gaussian PDF. `scale_factor` provides this proportionality constant Parameters ---------- mean1 : scalar mean of first Gaussian var1 : scalar variance of first Gaussian mean2 : scalar mean of second Gaussian var2 : scalar variance of second Gaussian Returns ------- mean : scalar mean of product var : scalar variance of product scale_factor : scalar proportionality constant Examples -------- >>> mul(1, 2, 3, 4) (1.6666666666666667, 1.3333333333333333) References ---------- Bromily. "Products and Convolutions of Gaussian Probability Functions", Tina Memo No. 2003-003. http://www.tina-vision.net/docs/memos/2003-003.pdf """ mean = (var1*mean2 + var2*mean1) / (var1 + var2) var = 1. / (1./var1 + 1./var2) S = math.exp(-(mean1 - mean2)**2 / (2*(var1 + var2))) / \ math.sqrt(2 * math.pi * (var1 + var2)) return mean, var, S
0.001514
def defined_annotation_keywords(self) -> Set[str]: """Get the set of all keywords defined as annotations in this graph.""" return ( set(self.annotation_pattern) | set(self.annotation_url) | set(self.annotation_list) )
0.00722
def local_id(personal): """ Executor for `globus endpoint local-id` """ if personal: try: ep_id = LocalGlobusConnectPersonal().endpoint_id except IOError as e: safeprint(e, write_to_stderr=True) click.get_current_context().exit(1) if ep_id is not None: safeprint(ep_id) else: safeprint("No Globus Connect Personal installation found.") click.get_current_context().exit(1)
0.002033
def _handle_system_status_event(self, event: SystemStatusEvent) -> None: """ DISARMED -> ARMED_AWAY -> EXIT_DELAY_START -> EXIT_DELAY_END (trip): -> ALARM -> OUTPUT_ON -> ALARM_RESTORE (disarm): -> DISARMED -> OUTPUT_OFF (disarm): -> DISARMED (disarm before EXIT_DELAY_END): -> DISARMED -> EXIT_DELAY_END TODO(NW): Check ALARM_RESTORE state transition to move back into ARMED_AWAY state """ if event.type == SystemStatusEvent.EventType.UNSEALED: return self._update_zone(event.zone, True) elif event.type == SystemStatusEvent.EventType.SEALED: return self._update_zone(event.zone, False) elif event.type == SystemStatusEvent.EventType.ALARM: return self._update_arming_state(ArmingState.TRIGGERED) elif event.type == SystemStatusEvent.EventType.ALARM_RESTORE: if self.arming_state != ArmingState.DISARMED: return self._update_arming_state(ArmingState.ARMED) elif event.type == SystemStatusEvent.EventType.ENTRY_DELAY_START: return self._update_arming_state(ArmingState.ENTRY_DELAY) elif event.type == SystemStatusEvent.EventType.ENTRY_DELAY_END: pass elif event.type == SystemStatusEvent.EventType.EXIT_DELAY_START: return self._update_arming_state(ArmingState.EXIT_DELAY) elif event.type == SystemStatusEvent.EventType.EXIT_DELAY_END: # Exit delay finished - if we were in the process of arming update # state to armed if self.arming_state == ArmingState.EXIT_DELAY: return self._update_arming_state(ArmingState.ARMED) elif event.type in Alarm.ARM_EVENTS: return self._update_arming_state(ArmingState.ARMING) elif event.type == SystemStatusEvent.EventType.DISARMED: return self._update_arming_state(ArmingState.DISARMED) elif event.type == SystemStatusEvent.EventType.ARMING_DELAYED: pass
0.001473
def register_master(): """Register the SDP Master device.""" tango_db = Database() device = "sip_sdp/elt/master" device_info = DbDevInfo() device_info._class = "SDPMasterDevice" device_info.server = "sdp_master_ds/1" device_info.name = device devices = tango_db.get_device_name(device_info.server, device_info._class) if device not in devices: LOG.info('Registering device "%s" with device server "%s"', device_info.name, device_info.server) tango_db.add_device(device_info)
0.001842
def iter_generic_bases(type_): """Iterates over all generics `type_` derives from, including origins. This function is only necessary because, in typing 3.5.0, a generic doesn't get included in the list of bases when it constructs a parameterized version of itself. This was fixed in aab2c59; now it would be enough to just iterate over the MRO. """ for t in type_.__mro__: if not isinstance(t, typing.GenericMeta): continue yield t t = t.__origin__ while t: yield t t = t.__origin__
0.005172
def _to_dict(self): """Return a json dictionary representing this model.""" _dict = {} if hasattr(self, 'system') and self.system is not None: _dict['system'] = self.system._to_dict() return _dict
0.008333
def close(self): """ Close TCP connection """ if self.device is not None: self.device.shutdown(socket.SHUT_RDWR) self.device.close()
0.011628
def can_approve(self, user, **data): """ Only sys admins can approve an organisation, or a reseller sending pre_verified=true :param user: a User :param data: data that the user wants to update """ is_admin = user.is_admin() is_reseller_preverifying = user.is_reseller() and data.get('pre_verified', False) raise Return(is_admin or is_reseller_preverifying)
0.009501
def log(args, kwargs): """Log to a file.""" logfile = os.path.join(args.logdir, 'log.tsv') if 'log_created' not in globals(): if os.path.exists(logfile): logging.error('Logfile %s already exists.', logfile) sys.exit(1) global log_created log_created = sorted(kwargs.keys()) header = '\t'.join((str(k) for k in log_created)) + '\n' with open(logfile, 'w') as f: f.write(header) # Log variables shouldn't change during training assert log_created == sorted(kwargs.keys()) with open(logfile, 'a') as f: f.write('\t'.join((str(kwargs[k]) for k in log_created)) + '\n')
0.001477
def exists(instance_id=None, name=None, tags=None, region=None, key=None, keyid=None, profile=None, in_states=None, filters=None): ''' Given an instance id, check to see if the given instance id exists. Returns True if the given instance with the given id, name, or tags exists; otherwise, False is returned. CLI Example: .. code-block:: bash salt myminion boto_ec2.exists myinstance ''' instances = find_instances(instance_id=instance_id, name=name, tags=tags, region=region, key=key, keyid=keyid, profile=profile, in_states=in_states, filters=filters) if instances: log.info('Instance exists.') return True else: log.warning('Instance does not exist.') return False
0.002427
def _WebSafeComponent(c, alt=False): '''Convert a color component to its web safe equivalent. Parameters: :c: The component value [0...1] :alt: If True, return the alternative value instead of the nearest one. Returns: The web safe equivalent of the component value. ''' # This sucks, but floating point between 0 and 1 is quite fuzzy... # So we just change the scale a while to make the equality tests # work, otherwise it gets wrong at some decimal far to the right. sc = c * 100.0 # If the color is already safe, return it straight away d = sc % 20 if d==0: return c # Get the lower and upper safe values l = sc - d u = l + 20 # Return the 'closest' value according to the alt flag if alt: if (sc-l) >= (u-sc): return l/100.0 else: return u/100.0 else: if (sc-l) >= (u-sc): return u/100.0 else: return l/100.0
0.012712
def is_armed(self): """Return True or False if the system is armed in any way""" alarm_code = self.get_armed_status() if alarm_code == YALE_STATE_ARM_FULL: return True if alarm_code == YALE_STATE_ARM_PARTIAL: return True return False
0.006667
def get_outerframe_skip_importlib_frame(level): """ There's a bug in Python3.4+, see http://bugs.python.org/issue23773, remove this and use sys._getframe(3) when bug is fixed """ if sys.version_info < (3, 4): return sys._getframe(level) else: currentframe = inspect.currentframe() levelup = 0 while levelup < level: currentframe = currentframe.f_back if currentframe.f_globals['__name__'] == 'importlib._bootstrap': continue else: levelup += 1 return currentframe
0.004552
def get(self, name_or_id): """ Get alert by name or id :param name_or_id: The alert's name or id :type name_or_id: str :return: A list of matching tags. An empty list is returned if there are not any matches :rtype: list of dict :raises: This will raise a :class:`ServerException<logentries_api.exceptions.ServerException>` if there is an error from Logentries """ return [ tag for tag in self.list_tags() if name_or_id == tag.get('id') or name_or_id == tag.get('name') ]
0.004615
def _handle_exception(ignore_callback_errors, print_callback_errors, obj, cb_event=None, node=None): """Helper for prining errors in callbacks See EventEmitter._invoke_callback for a use example. """ if not hasattr(obj, '_vispy_err_registry'): obj._vispy_err_registry = {} registry = obj._vispy_err_registry if cb_event is not None: cb, event = cb_event exp_type = 'callback' else: exp_type = 'node' type_, value, tb = sys.exc_info() tb = tb.tb_next # Skip *this* frame sys.last_type = type_ sys.last_value = value sys.last_traceback = tb del tb # Get rid of it in this namespace # Handle if not ignore_callback_errors: raise if print_callback_errors != "never": this_print = 'full' if print_callback_errors in ('first', 'reminders'): # need to check to see if we've hit this yet if exp_type == 'callback': key = repr(cb) + repr(event) else: key = repr(node) if key in registry: registry[key] += 1 if print_callback_errors == 'first': this_print = None else: # reminders ii = registry[key] # Use logarithmic selection # (1, 2, ..., 10, 20, ..., 100, 200, ...) if ii == (2 ** int(np.log2(ii))): this_print = ii else: this_print = None else: registry[key] = 1 if this_print == 'full': logger.log_exception() if exp_type == 'callback': logger.error("Invoking %s for %s" % (cb, event)) else: # == 'node': logger.error("Drawing node %s" % node) elif this_print is not None: if exp_type == 'callback': logger.error("Invoking %s repeat %s" % (cb, this_print)) else: # == 'node': logger.error("Drawing node %s repeat %s" % (node, this_print))
0.000454
def on_resized(self): """ Update linked views """ d = self.declaration if not self.is_root and d.parent.multi_axis: if self.viewbox: self.viewbox.setGeometry(self.widget.vb.sceneBoundingRect()) self.viewbox.linkedViewChanged(self.widget.vb,self.viewbox.XAxis)
0.012232
def as_dictionary(self, is_proof=True): """ Return the DDO as a JSON dict. :param if is_proof: if False then do not include the 'proof' element. :return: dict """ if self._created is None: self._created = DDO._get_timestamp() data = { '@context': DID_DDO_CONTEXT_URL, 'id': self._did, 'created': self._created, } if self._public_keys: values = [] for public_key in self._public_keys: values.append(public_key.as_dictionary()) data['publicKey'] = values if self._authentications: values = [] for authentication in self._authentications: values.append(authentication) data['authentication'] = values if self._services: values = [] for service in self._services: values.append(service.as_dictionary()) data['service'] = values if self._proof and is_proof: data['proof'] = self._proof return data
0.001794
def cli(env, identifier, path, name): """Adds an attachment to an existing ticket.""" mgr = SoftLayer.TicketManager(env.client) ticket_id = helpers.resolve_id(mgr.resolve_ids, identifier, 'ticket') if path is None: raise exceptions.ArgumentError("Missing argument --path") if not os.path.exists(path): raise exceptions.ArgumentError("%s not exist" % path) if name is None: name = os.path.basename(path) attached_file = mgr.upload_attachment(ticket_id=ticket_id, file_path=path, file_name=name) env.fout("File attached: \n%s" % attached_file)
0.001458
def register(self, kind, handler): """Register a handler for a given type, class, interface, or abstract base class. View registration should happen within the `start` callback of an extension. For example, to register the previous `json` view example: class JSONExtension: def start(self, context): context.view.register(tuple, json) The approach of explicitly referencing a view handler isn't very easy to override without also replacing the extension originally adding it, however there is another approach. Using named handlers registered as discrete plugins (via the `entry_point` argument in `setup.py`) allows the extension to easily ask "what's my handler?" class JSONExtension: def start(self, context): context.view.register( tuple, context.view.json ) Otherwise unknown attributes of the view registry will attempt to look up a handler plugin by that name. """ if __debug__: # In production this logging is completely skipped, regardless of logging level. if py3 and not pypy: # Where possible, we shorten things to just the cannonical name. log.debug("Registering view handler.", extra=dict(type=name(kind), handler=name(handler))) else: # Canonical name lookup is not entirely reliable on some combinations. log.debug("Registering view handler.", extra=dict(type=repr(kind), handler=repr(handler))) # Add the handler to the pool of candidates. This adds to a list instead of replacing the "dictionary item". self._map.add(kind, handler) return handler
0.033736
def request_ocsp(self): """ Called to request that the server sends stapled OCSP data, if available. If this is not called on the client side then the server will not send OCSP data. Should be used in conjunction with :meth:`Context.set_ocsp_client_callback`. """ rc = _lib.SSL_set_tlsext_status_type( self._ssl, _lib.TLSEXT_STATUSTYPE_ocsp ) _openssl_assert(rc == 1)
0.004435
def rebuild(self): """ Rebuilds the widget based on the position and current size/location of its parent. """ if not self.isVisible(): return self.raise_() max_size = self.maximumPixmapSize() min_size = self.minimumPixmapSize() widget = self.window() rect = widget.rect() rect.setBottom(rect.bottom() - widget.statusBar().height()) rect.setTop(widget.menuBar().height()) offset = self.padding() # align this widget to the north if self.position() == XDockToolbar.Position.North: self.move(rect.left(), rect.top()) self.resize(rect.width(), min_size.height() + offset) # align this widget to the east elif self.position() == XDockToolbar.Position.East: self.move(rect.left(), rect.top()) self.resize(min_size.width() + offset, rect.height()) # align this widget to the south elif self.position() == XDockToolbar.Position.South: self.move(rect.left(), rect.top() - min_size.height() - offset) self.resize(rect.width(), min_size.height() + offset) # align this widget to the west else: self.move(rect.right() - min_size.width() - offset, rect.top()) self.resize(min_size.width() + offset, rect.height())
0.007463
def daily404summary(date, return_format=None): """Returns daily summary information of submitted 404 Error Page Information. :param date: string or datetime.date() (required) """ uri = 'daily404summary' if date: try: uri = '/'.join([uri, date.strftime("%Y-%m-%d")]) except AttributeError: uri = '/'.join([uri, date]) return _get(uri, return_format)
0.002398
def fullscreen(self): """Return a context manager that enters fullscreen mode while inside it and restores normal mode on leaving.""" self.stream.write(self.enter_fullscreen) try: yield finally: self.stream.write(self.exit_fullscreen)
0.006711
def wait_for_lime(self, listen_port, listen_address="0.0.0.0", max_tries=20, wait=1): """ Wait for lime to load unless max_retries is exceeded :type listen_port: int :param listen_port: port LiME is listening for connections on :type listen_address: str :param listen_address: address LiME is listening for connections on :type max_tries: int :param max_tries: maximum number of checks that LiME has loaded :type wait: int :param wait: time to wait between checks """ tries = 0 pattern = self.commands.lime_pattern.value.format(listen_address, listen_port) lime_loaded = False while tries < max_tries and lime_loaded is False: lime_loaded = self.check_for_lime(pattern) tries = tries + 1 time.sleep(wait) return lime_loaded
0.003099
def print_report(self): """ Print Compare report. :return: None """ report = compare_report_print( self.sorted, self.scores, self.best_name) print(report)
0.009302
def emit(self, arg=None): """Emits the signal, passing the optional argument""" for model,name in self.__get_models__(): model.notify_signal_emit(name, arg)
0.016304
def do_hook_actions(self, actions, hook_type): """ call hook actions. Args: actions (list): each action in actions list maybe in two format. format1 (dict): assignment, the value returned by hook function will be assigned to variable. {"var": "${func()}"} format2 (str): only call hook functions. ${func()} hook_type (enum): setup/teardown """ logger.log_debug("call {} hook actions.".format(hook_type)) for action in actions: if isinstance(action, dict) and len(action) == 1: # format 1 # {"var": "${func()}"} var_name, hook_content = list(action.items())[0] hook_content_eval = self.session_context.eval_content(hook_content) logger.log_debug( "assignment with hook: {} = {} => {}".format( var_name, hook_content, hook_content_eval ) ) self.session_context.update_test_variables( var_name, hook_content_eval ) else: # format 2 logger.log_debug("call hook function: {}".format(action)) # TODO: check hook function if valid self.session_context.eval_content(action)
0.002845
def process_task_topic_list(app, doctree, fromdocname): """Process the ``task_topic_list`` node to generate a rendered listing of Task, Configurable, or Config topics (as determined by the types key of the ``task_topic_list`` node). This is called during the "doctree-resolved" phase so that the ``lsst_task_topcs`` environment attribute is fully set. """ logger = getLogger(__name__) logger.debug('Started process_task_list') env = app.builder.env for node in doctree.traverse(task_topic_list): try: topics = env.lsst_task_topics except AttributeError: message = ( "Environment does not have 'lsst_task_topics', " "can't process the listing." ) logger.warning(message) node.replace_self(nodes.paragraph(text=message)) continue root = node['root_namespace'] # Sort tasks by the topic's class name. # NOTE: if the presentation of the link is changed to the fully # qualified name, with full Python namespace, then the topic_names # should be changed to match that. topic_keys = [k for k, topic in topics.items() if topic['type'] in node['types'] if topic['fully_qualified_name'].startswith(root)] topic_names = [topics[k]['fully_qualified_name'].split('.')[-1] for k in topic_keys] topic_keys = [ k for k, _ in sorted(zip(topic_keys, topic_names), key=lambda pair: pair[1])] if len(topic_keys) == 0: # Fallback if no topics are found p = nodes.paragraph(text='No topics.') node.replace_self(p) continue dl = nodes.definition_list() for key in topic_keys: topic = topics[key] class_name = topic['fully_qualified_name'].split('.')[-1] summary_text = topic['summary_node'][0].astext() # Each topic in the listing is a definition list item. The term is # the linked class name and the description is the summary # sentence from the docstring _or_ the content of the # topic directive dl_item = nodes.definition_list_item() # Can insert an actual reference since the doctree is resolved. ref_node = nodes.reference('', '') ref_node['refdocname'] = topic['docname'] ref_node['refuri'] = app.builder.get_relative_uri( fromdocname, topic['docname']) # NOTE: Not appending an anchor to the URI because task topics # are designed to occupy an entire page. link_label = nodes.Text(class_name, class_name) ref_node += link_label term = nodes.term() term += ref_node dl_item += term # We're degrading the summary to plain text to avoid syntax issues # and also because it may be distracting def_node = nodes.definition() def_node += nodes.paragraph(text=summary_text) dl_item += def_node dl += dl_item # Replace the task_list node (a placeholder) with this renderable # content node.replace_self(dl)
0.000301
def cachier(stale_after=None, next_time=False, pickle_reload=True, mongetter=None): """A persistent, stale-free memoization decorator. The positional and keyword arguments to the wrapped function must be hashable (i.e. Python's immutable built-in objects, not mutable containers). Also, notice that since objects which are instances of user-defined classes are hashable but all compare unequal (their hash value is their id), equal objects across different sessions will not yield identical keys. Arguments --------- stale_after (optional) : datetime.timedelta The time delta afterwhich a cached result is considered stale. Calls made after the result goes stale will trigger a recalculation of the result, but whether a stale or fresh result will be returned is determined by the optional next_time argument. next_time (optional) : bool If set to True, a stale result will be returned when finding one, not waiting for the calculation of the fresh result to return. Defaults to False. pickle_reload (optional) : bool If set to True, in-memory cache will be reloaded on each cache read, enabling different threads to share cache. Should be set to False for faster reads in single-thread programs. Defaults to True. mongetter (optional) : callable A callable that takes no arguments and returns a pymongo.Collection object with writing permissions. If unset a local pickle cache is used instead. """ # print('Inside the wrapper maker') # print('mongetter={}'.format(mongetter)) # print('stale_after={}'.format(stale_after)) # print('next_time={}'.format(next_time)) if mongetter: core = _MongoCore(mongetter, stale_after, next_time) else: core = _PickleCore( # pylint: disable=R0204 stale_after, next_time, pickle_reload) def _cachier_decorator(func): core.set_func(func) @wraps(func) def func_wrapper(*args, **kwds): # pylint: disable=C0111,R0911 # print('Inside general wrapper for {}.'.format(func.__name__)) ignore_cache = kwds.pop('ignore_cache', False) overwrite_cache = kwds.pop('overwrite_cache', False) verbose_cache = kwds.pop('verbose_cache', False) _print = lambda x: None if verbose_cache: _print = print if ignore_cache: return func(*args, **kwds) key, entry = core.get_entry(args, kwds) if overwrite_cache: return _calc_entry(core, key, func, args, kwds) if entry is not None: # pylint: disable=R0101 _print('Entry found.') if entry.get('value', None) is not None: _print('Cached result found.') if stale_after: now = datetime.datetime.now() if now - entry['time'] > stale_after: _print('But it is stale... :(') if entry['being_calculated']: if next_time: _print('Returning stale.') return entry['value'] # return stale val _print('Already calc. Waiting on change.') try: return core.wait_on_entry_calc(key) except RecalculationNeeded: return _calc_entry(core, key, func, args, kwds) if next_time: _print('Async calc and return stale') try: core.mark_entry_being_calculated(key) _get_executor().submit( _function_thread, core, key, func, args, kwds) finally: core.mark_entry_not_calculated(key) return entry['value'] _print('Calling decorated function and waiting') return _calc_entry(core, key, func, args, kwds) _print('And it is fresh!') return entry['value'] if entry['being_calculated']: _print('No value but being calculated. Waiting.') try: return core.wait_on_entry_calc(key) except RecalculationNeeded: return _calc_entry(core, key, func, args, kwds) _print('No entry found. No current calc. Calling like a boss.') return _calc_entry(core, key, func, args, kwds) def clear_cache(): """Clear the cache.""" core.clear_cache() def clear_being_calculated(): """Marks all entries in this cache as not being calculated.""" core.clear_being_calculated() func_wrapper.clear_cache = clear_cache func_wrapper.clear_being_calculated = clear_being_calculated return func_wrapper return _cachier_decorator
0.000558
def synonym(name): """ Utility function mimicking the behavior of the old SA synonym function with the new hybrid property semantics. """ return hybrid_property(lambda inst: getattr(inst, name), lambda inst, value: setattr(inst, name, value), expr=lambda cls: getattr(cls, name))
0.002833
def create_config_files(directory): """ Initialize directory ready for vpn walker :param directory: the path where you want this to happen :return: """ # Some constant strings config_zip_url = "https://s3-us-west-1.amazonaws.com/heartbleed/linux/linux-files.zip" if not os.path.exists(directory): os.makedirs(directory) logging.info("Starting to download PureVPN config file zip") url_opener = urllib.URLopener() zip_path = os.path.join(directory, '../linux_files.zip') url_opener.retrieve(config_zip_url, zip_path) logging.info("Extracting zip file") unzip(zip_path, os.path.join(directory, '../')) # remove zip file os.remove(zip_path) # copy ca and key to root path shutil.copyfile(os.path.join(directory, '../Linux OpenVPN Updated files', 'ca.crt'), os.path.join(directory, '../ca.crt')) shutil.copyfile(os.path.join(directory, '../Linux OpenVPN Updated files', 'Wdc.key'), os.path.join(directory, '../Wdc.key')) # move all config files to /vpns orig_path = os.path.join(directory, '../Linux OpenVPN Updated files/TCP') server_country = {} for filename in os.listdir(orig_path): if filename.endswith('.ovpn'): country = filename.split('-')[0] if '(V)' in country: country = country[:country.find('(V)')] file_path = os.path.join(orig_path, filename) lines = [line.rstrip('\n') for line in open(file_path)] # get ip address for this vpn ip = "" for line in lines: if line.startswith('remote'): hostname = line.split(' ')[1] ip = socket.gethostbyname(hostname) break if len(ip) > 0: new_path = os.path.join(directory, ip + '.ovpn') shutil.copyfile(file_path, new_path) server_country[ip] = country # remove extracted folder shutil.rmtree(os.path.join(directory, '../Linux OpenVPN Updated files')) # add dns update options to each file logging.info("Appending DNS update options") for filename in os.listdir(directory): file_path = os.path.join(directory, filename) with open(file_path, 'a') as f: f.write("\n") f.write("up /etc/openvpn/update-resolv-conf\n") f.write("down /etc/openvpn/update-resolv-conf\n") print os.path.join(directory, 'servers.txt'), len(server_country) with open(os.path.join(directory, 'servers.txt'), 'w') as f: for ip in server_country: f.write('|'.join([ip, server_country[ip]]) + '\n')
0.001477
def loads(string, filename=None, includedir=''): '''Load the contents of ``string`` to a Python object The returned object is a subclass of ``dict`` that exposes string keys as attributes as well. Example: >>> config = libconf.loads('window: { title: "libconfig example"; };') >>> config['window']['title'] 'libconfig example' >>> config.window.title 'libconfig example' ''' try: f = io.StringIO(string) except TypeError: raise TypeError("libconf.loads() input string must by unicode") return load(f, filename=filename, includedir=includedir)
0.001575
def find_near(lat, lon, *, n=10, session=None): """Return n results for a given latitude and longitude""" search_params = {'npoints': n, 'clat': lat, 'clon': lon, 'Columns[]': ['Subregion', 'Notes', 'CollectionYear', 'ReservoirAge', 'ReservoirErr', 'C14age', 'C14err', 'LabID', 'Delta13C', 'nextime', 'Genus', 'Species', 'Feeding', 'Name']} resp = _query_near(session=session, **search_params) df = _response_to_dataframe(resp) df_clean = _clean_dataframe(df) return df_clean
0.007949
def basic_consume(self, queue='', consumer_tag='', no_local=False, no_ack=False, exclusive=False, nowait=False, callback=None, arguments=None, on_cancel=None): """Start a queue consumer This method asks the server to start a "consumer", which is a transient request for messages from a specific queue. Consumers last as long as the channel they were created on, or until the client cancels them. RULE: The server SHOULD support at least 16 consumers per queue, unless the queue was declared as private, and ideally, impose no limit except as defined by available resources. PARAMETERS: queue: shortstr Specifies the name of the queue to consume from. If the queue name is null, refers to the current queue for the channel, which is the last declared queue. RULE: If the client did not previously declare a queue, and the queue name in this method is empty, the server MUST raise a connection exception with reply code 530 (not allowed). consumer_tag: shortstr Specifies the identifier for the consumer. The consumer tag is local to a connection, so two clients can use the same consumer tags. If this field is empty the server will generate a unique tag. RULE: The tag MUST NOT refer to an existing consumer. If the client attempts to create two consumers with the same non-empty tag the server MUST raise a connection exception with reply code 530 (not allowed). no_local: boolean do not deliver own messages If the no-local field is set the server will not send messages to the client that published them. no_ack: boolean no acknowledgement needed If this field is set the server does not expect acknowledgments for messages. That is, when a message is delivered to the client the server automatically and silently acknowledges it on behalf of the client. This functionality increases performance but at the cost of reliability. Messages can get lost if a client dies before it can deliver them to the application. exclusive: boolean request exclusive access Request exclusive consumer access, meaning only this consumer can access the queue. RULE: If the server cannot grant exclusive access to the queue when asked, - because there are other consumers active - it MUST raise a channel exception with return code 403 (access refused). nowait: boolean do not send a reply method If set, the server will not respond to the method. The client should not wait for a reply method. If the server could not complete the method it will raise a channel or connection exception. callback: Python callable function/method called with each delivered message For each message delivered by the broker, the callable will be called with a Message object as the single argument. If no callable is specified, messages are quietly discarded, no_ack should probably be set to True in that case. """ args = AMQPWriter() args.write_short(0) args.write_shortstr(queue) args.write_shortstr(consumer_tag) args.write_bit(no_local) args.write_bit(no_ack) args.write_bit(exclusive) args.write_bit(nowait) args.write_table(arguments or {}) self._send_method((60, 20), args) if not nowait: consumer_tag = self.wait(allowed_methods=[ (60, 21), # Channel.basic_consume_ok ]) self.callbacks[consumer_tag] = callback if on_cancel: self.cancel_callbacks[consumer_tag] = on_cancel if no_ack: self.no_ack_consumers.add(consumer_tag) return consumer_tag
0.000871
def find_partition_multiplex(graphs, partition_type, **kwargs): """ Detect communities for multiplex graphs. Each graph should be defined on the same set of vertices, only the edges may differ for different graphs. See :func:`Optimiser.optimise_partition_multiplex` for a more detailed explanation. Parameters ---------- graphs : list of :class:`ig.Graph` List of :class:`louvain.VertexPartition` layers to optimise. partition_type : type of :class:`MutableVertexPartition` The type of partition to use for optimisation (identical for all graphs). **kwargs Remaining keyword arguments, passed on to constructor of ``partition_type``. Returns ------- list of int membership of nodes. float Improvement in quality of combined partitions, see :func:`Optimiser.optimise_partition_multiplex`. Notes ----- We don't return a partition in this case because a partition is always defined on a single graph. We therefore simply return the membership (which is the same for all layers). See Also -------- :func:`Optimiser.optimise_partition_multiplex` :func:`slices_to_layers` Examples -------- >>> n = 100 >>> G_1 = ig.Graph.Lattice([n], 1) >>> G_2 = ig.Graph.Lattice([n], 1) >>> membership, improvement = louvain.find_partition_multiplex([G_1, G_2], ... louvain.ModularityVertexPartition) """ n_layers = len(graphs) partitions = [] layer_weights = [1]*n_layers for graph in graphs: partitions.append(partition_type(graph, **kwargs)) optimiser = Optimiser() improvement = optimiser.optimise_partition_multiplex(partitions, layer_weights) return partitions[0].membership, improvement
0.006877
def __get_stored_instances(self, factory_name): # type: (str) -> List[StoredInstance] """ Retrieves the list of all stored instances objects corresponding to the given factory name :param factory_name: A factory name :return: All components instantiated from the given factory """ with self.__instances_lock: return [ stored_instance for stored_instance in self.__instances.values() if stored_instance.factory_name == factory_name ]
0.005272
def _create_filter_by(self): """Transform the json-server filter arguments to model-resource ones.""" filter_by = [] for name, values in request.args.copy().lists(): # copy.lists works in py2 and py3 if name not in _SKIPPED_ARGUMENTS: column = _re_column_name.search(name).group(1) if column not in self._model_columns: continue for value in values: if name.endswith('_ne'): filter_by.append(name[:-3] + '!=' + value) elif name.endswith('_lte'): filter_by.append(name[:-4] + '<=' + value) elif name.endswith('_gte'): filter_by.append(name[:-4] + '>=' + value) elif name.endswith('_like'): filter_by.append(name[:-5] + '::like::%' + value + '%') else: filter_by.append(name.replace('__', '.') + '==' + value) filter_by += self._create_fulltext_query() return ','.join(filter_by)
0.004452
def date(self): """ Return date object with same year, month and day. :rtype: :py:class:`khayyam.JalaliDate` """ return khayyam.JalaliDate(self.year, self.month, self.day)
0.009434
def credit_card_account_query(self, number, date): """CC Statement request""" return self.authenticated_query(self._ccreq(number, date))
0.013158
def concretize(self, **kwargs): """ Return a concretization of the contents of the file, as a flat bytestring. """ size = self.state.solver.min(self._size, **kwargs) data = self.load(0, size) kwargs['cast_to'] = kwargs.get('cast_to', bytes) kwargs['extra_constraints'] = tuple(kwargs.get('extra_constraints', ())) + (self._size == size,) return self.state.solver.eval(data, **kwargs)
0.008929
def from_birth_date(birth_date): """Take a person's birth date (datetime.date) and return a new DOB object suitable for him.""" if birth_date > datetime.date.today(): raise ValueError('birth_date can\'t be in the future') date_range = DateRange(birth_date, birth_date) return DOB(date_range=date_range)
0.00831
def parse_classi_or_classii_allele_name(name, infer_pair=True): """ Handle different forms of both single and alpha-beta allele names. Alpha-beta alleles may look like: DPA10105-DPB110001 HLA-DPA1*01:05-DPB1*100:01 hla-dpa1*0105-dpb1*10001 dpa1*0105-dpb1*10001 HLA-DPA1*01:05/DPB1*100:01 Other class II alleles may look like: DRB1_0102 DRB101:02 HLA-DRB1_0102 """ species, name = split_species_prefix(name) # Handle the case where alpha/beta pairs are separated with a /. name = name.replace("/", "-") # Ignored underscores, such as with DRB1_0102 name = name.replace("_", "*") parts = name.split("-") if len(parts) == 2: alpha_string, beta_string = parts alpha = parse_allele_name(alpha_string) beta = parse_allele_name(beta_string) return (alpha, beta) elif len(parts) == 1: parsed = parse_allele_name(name, species) if parsed.species == "HLA" and infer_pair: alpha = infer_alpha_chain(parsed) if alpha is not None: return (alpha, parsed) return (parsed,) else: raise AlleleParseError( "Allele has too many parts: %s" % name)
0.000808
async def _sasl_respond(self): """ Respond to SASL challenge with response. """ # Formulate a response. if self._sasl_client: try: response = self._sasl_client.process(self._sasl_challenge) except puresasl.SASLError: response = None if response is None: self.logger.warning('SASL challenge processing failed: aborting SASL authentication.') await self._sasl_abort() else: response = b'' response = base64.b64encode(response).decode(self.encoding) to_send = len(response) self._sasl_challenge = b'' # Send response in chunks. while to_send > 0: await self.rawmsg('AUTHENTICATE', response[:RESPONSE_LIMIT]) response = response[RESPONSE_LIMIT:] to_send -= RESPONSE_LIMIT # If our message fit exactly in SASL_RESPOSE_LIMIT-byte chunks, send an empty message to indicate we're done. if to_send == 0: await self.rawmsg('AUTHENTICATE', EMPTY_MESSAGE)
0.003653
def _distribution_distance(simulated_trajectories, observed_trajectories_lookup, distribution): """ Returns the distance between the simulated and observed trajectory, w.r.t. the assumed distribution :param simulated_trajectories: Simulated trajectories :type simulated_trajectories: list[:class:`means.simulation.Trajectory`] :param observed_trajectories_lookup: A dictionary of (trajectory.description: trajectory) of observed trajectories :type observed_trajectories_lookup: dict :param distribution: Distribution to use. See :func:`_eval_density` for the list of available distributions :return: """ mean_variance_lookup = _compile_mean_variance_lookup(simulated_trajectories) # get moment expansion result with current parameters log_likelihood = 0 for trajectory in observed_trajectories_lookup.itervalues(): moment = trajectory.description assert(isinstance(moment, Moment)) assert(moment.order == 1) species = np.where(moment.n_vector == 1)[0][0] mean_variance = mean_variance_lookup[species] if (mean_variance.mean < 0).any() or (mean_variance.variance < 0).any(): return float('inf') term = _eval_density(mean_variance.mean, mean_variance.variance, trajectory.values, distribution) log_likelihood += term dist = -log_likelihood return dist
0.005747
def update_json(self, fields=None): """Update the current entity. Call :meth:`update_raw`. Check the response status code, decode JSON and return the decoded JSON as a dict. :param fields: See :meth:`update`. :return: A dict consisting of the decoded JSON in the server's response. :raises: ``requests.exceptions.HTTPError`` if the response has an HTTP 4XX or 5XX status code. :raises: ``ValueError`` If the response JSON can not be decoded. """ response = self.update_raw(fields) response.raise_for_status() return response.json()
0.003096
def get_node_sum(self, age=None): """Get sum of all branches in the tree. Returns: int: The sum of all nodes grown until the age. """ if age is None: age = self.age return age if self.comp == 1 else int((pow(self.comp, age+1) - 1) / (self.comp - 1))
0.009524
def get_title(self, group=None): """Adds number of comments to title.""" title = super(CommentsPlugin, self).get_title() if group is not None: count = GroupComments.objects.filter(group=group).count() else: count = None if count: title = u'%s (%d)' % (title, count) return title
0.005525
def parse_repo_slug_from_url(github_url): """Get the slug, <owner>/<repo_name>, for a GitHub repository from its URL. Parameters ---------- github_url : `str` URL of a GitHub repository. Returns ------- repo_slug : `RepoSlug` Repository slug with fields ``full``, ``owner``, and ``repo``. See `RepoSlug` for details. Raises ------ RuntimeError Raised if the URL cannot be parsed. """ match = GITHUB_SLUG_PATTERN.match(github_url) if not match: message = 'Could not parse GitHub slug from {}'.format(github_url) raise RuntimeError(message) _full = '/'.join((match.group('org'), match.group('name'))) return RepoSlug(_full, match.group('org'), match.group('name'))
0.00125
def add_splash_ids(splash_mapping_file_pth, conn, db_type='sqlite'): """ Add splash ids to database (in case stored in a different file to the msp files like for MoNA) Example: >>> from msp2db.db import get_connection >>> from msp2db.parse import add_splash_ids >>> conn = get_connection('sqlite', 'library.db') >>> add_splash_ids('splash_mapping_file.csv', conn, db_type='sqlite') Args: splash_mapping_file_pth (str): Path to the splash mapping file (needs to be csv format and have no headers, should contain two columns. The first the accession number the second the splash. e.g. AU100601, splash10-0a4i-1900000000-d2bc1c887f6f99ed0f74 \n """ # get dictionary of accession and library_spectra_meta_id cursor = conn.cursor() cursor.execute("SELECT id, accession FROM library_spectra_meta") accession_d = {row[1]: row[0] for row in cursor} if db_type == 'sqlite': type_sign = '?' else: type_sign = '%s' rows = [] c = 0 # loop through splash mapping file with open(splash_mapping_file_pth, "r") as f: for line in f: c+=1 line = line.rstrip() line_l = line.split(',') accession = line_l[0] splash = line_l[1] try: aid = accession_d[accession] except KeyError as e: print("can't find accession {}".format(accession)) continue row = (splash, aid) rows.append(row) if c > 200: print(row) cursor.executemany("UPDATE library_spectra_meta SET splash = {t} WHERE id = {t} ".format(t=type_sign), rows) conn.commit() rows = [] c = 0 cursor.executemany("UPDATE library_spectra_meta SET splash = {t} WHERE id = {t} ".format(t=type_sign), rows) conn.commit()
0.003978
def pool_function(p, nick, rutaDescarga, avoidProcessing=True, avoidDownload=True, verbosity=1): """ Wrapper for being able to launch all the threads of getPageWrapper. We receive the parameters for getPageWrapper as a tuple. Args: ----- pName: Platform where the information is stored. It is a string. nick: Nick to be searched. rutaDescarga: Local file where saving the obtained information. avoidProcessing: Boolean var that defines whether the profiles will NOT be processed (stored in this version). avoidDownload: Boolean var that defines whether the profiles will NOT be downloaded (stored in this version). verbosity: The verbosity level: 1, shows errors; 2, shows warnings. Return: ------- A dictionary with the following structure: { "platform": "Platform", "status": "DONE", "data": "<data>" } Data is None or a serialized representation of the dictionary. """ try: #res = getPageWrapper(p, nick, rutaDescarga, avoidProcessing, avoidDownload, outQueue) res = p.getInfo( query=nick, mode="usufy", process=True ) return {"platform" : str(p), "status": "Ok", "data": res} except Exception as e: if (isinstance(e, OSRFrameworkError) and verbosity >= 1) and (isinstance(e, OSRFrameworkException) and verbosity >= 2): print(str(e)) return {"platform" : str(p), "status": e, "data": e.generic}
0.008929
def rotate_defaultbasis(self, C): """Rotate all parameters to the basis where the running down-type quark and charged lepton mass matrices are diagonal and where the running up-type quark mass matrix has the form V.S, with V unitary and S real diagonal, and where the CKM and PMNS matrices have the standard phase convention.""" v = sqrt(2*C['m2'].real/C['Lambda'].real) Mep = v/sqrt(2) * (C['Ge'] - C['ephi'] * v**2/self.scale_high**2/2) Mup = v/sqrt(2) * (C['Gu'] - C['uphi'] * v**2/self.scale_high**2/2) Mdp = v/sqrt(2) * (C['Gd'] - C['dphi'] * v**2/self.scale_high**2/2) Mnup = -v**2 * C['llphiphi'] UeL, Me, UeR = ckmutil.diag.msvd(Mep) UuL, Mu, UuR = ckmutil.diag.msvd(Mup) UdL, Md, UdR = ckmutil.diag.msvd(Mdp) Unu, Mnu = ckmutil.diag.mtakfac(Mnup) UuL, UdL, UuR, UdR = ckmutil.phases.rephase_standard(UuL, UdL, UuR, UdR) Unu, UeL, UeR = ckmutil.phases.rephase_pmns_standard(Unu, UeL, UeR) return definitions.flavor_rotation(C, Uq=UdL, Uu=UuR, Ud=UdR, Ul=UeL, Ue=UeR)
0.003604
def Compare(fromMo, toMo, diff): """ Internal method to support CompareManagedObject functionality. """ from UcsBase import UcsUtils if (fromMo.classId != toMo.classId): return CompareStatus.TypesDifferent for prop in UcsUtils.GetUcsPropertyMetaAttributeList(str(fromMo.classId)): propMeta = UcsUtils.IsPropertyInMetaIgnoreCase(fromMo.classId, prop) if propMeta != None: if ((propMeta.access == UcsPropertyMeta.Internal) or (propMeta.access == UcsPropertyMeta.ReadOnly) or ( prop in toMo._excludePropList)): continue if ((toMo.__dict__.has_key(prop)) and (fromMo.getattr(prop) != toMo.getattr(prop))): diff.append(prop) if (len(diff) > 0): return CompareStatus.PropsDifferent return CompareStatus.Equal
0.027027
def dskopn(fname, ifname, ncomch): """ Open a new DSK file for subsequent write operations. https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dskopn_c.html :param fname: Name of a DSK file to be opened. :type fname: str :param ifname: Internal file name. :type ifname: str :param ncomch: Number of comment characters to allocate. :type ncomch: int :return: Handle assigned to the opened DSK file. :rtype: int """ fname = stypes.stringToCharP(fname) ifname = stypes.stringToCharP(ifname) ncomch = ctypes.c_int(ncomch) handle = ctypes.c_int() libspice.dskopn_c(fname, ifname, ncomch, ctypes.byref(handle)) return handle.value
0.008392
def role_show(self, role_id, **kwargs): "https://developer.zendesk.com/rest_api/docs/chat/roles#get-role" api_path = "/api/v2/roles/{role_id}" api_path = api_path.format(role_id=role_id) return self.call(api_path, **kwargs)
0.007843