text
stringlengths
78
104k
score
float64
0
0.18
def _specialize_curve(nodes, start, end): """Specialize a curve to a re-parameterization .. note:: This assumes the curve is degree 1 or greater but doesn't check. .. note:: There is also a Fortran implementation of this function, which will be used if it can be built. Args: nodes (numpy.ndarray): Control points for a curve. start (float): The start point of the interval we are specializing to. end (float): The end point of the interval we are specializing to. Returns: numpy.ndarray: The control points for the specialized curve. """ # NOTE: There is no corresponding "enable", but the disable only applies # in this lexical scope. # pylint: disable=too-many-locals _, num_nodes = np.shape(nodes) # Uses start-->0, end-->1 to represent the specialization used. weights = ((1.0 - start, start), (1.0 - end, end)) partial_vals = { (0,): de_casteljau_one_round(nodes, *weights[0]), (1,): de_casteljau_one_round(nodes, *weights[1]), } for _ in six.moves.xrange(num_nodes - 2, 0, -1): new_partial = {} for key, sub_nodes in six.iteritems(partial_vals): # Our keys are ascending so we increment from the last value. for next_id in six.moves.xrange(key[-1], 1 + 1): new_key = key + (next_id,) new_partial[new_key] = de_casteljau_one_round( sub_nodes, *weights[next_id] ) partial_vals = new_partial result = np.empty(nodes.shape, order="F") for index in six.moves.xrange(num_nodes): key = (0,) * (num_nodes - index - 1) + (1,) * index result[:, [index]] = partial_vals[key] return result
0.000565
def add(self, item): """Add an item in the history.""" self._check_index() # Possibly truncate the history up to the current point. self._history = self._history[:self._index + 1] # Append the item self._history.append(item) # Increment the index. self._index += 1 self._check_index() # Check that the current element is what was provided to the function. assert id(self.current_item) == id(item)
0.004132
def _imm_getattribute(self, name): ''' An immutable's getattribute calculates lazy values when not yet cached in the object then adds them as attributes. ''' if _imm_is_init(self): return _imm_init_getattribute(self, name) else: dd = object.__getattribute__(self, '__dict__') if name == '__dict__': return dd curval = dd.get(name, dd) if curval is not dd: return dd[name] values = _imm_value_data(self) if name not in values: return object.__getattribute__(self, name) (args, memfn, _) = values[name] value = memfn(*[getattr(self, arg) for arg in args]) dd[name] = value # if this is a const, it may have checks to run if name in _imm_const_data(self): # #TODO # Note that there's a race condition that eventually needs to be handled here: # If dd[name] is set then a check fails, there may have been something that read the # improper value in the meantime try: _imm_check(self, [name]) except: del dd[name] raise # if those pass, then we're fine return value
0.0057
def surrogate(self, u_sparse, q_sparse): '''Combines the train and predict methods to create a surrogate model function fitted to the input/output combinations given in u_sparse and q_sparse. :param numpy.ndarray u_sparse: input values at which the output values are obtained. Must be the same as the qaudrature points defined by the getQuadraturePoints method. :param numpy.ndarray q_sparse: output values corresponding to the input values given in u_sparse to which the surrogate is fitted :return: surrogate model fitted to u_sparse and q_sparse :rtype: function *Sample Usage*:: >>> thePC = PolySurrogate(dimensions=2) >>> U = thePC.getQuadraturePoints() >>> Q = [myFunc(u) for u in U] >>> surrogateFunc = thePC.surrogate(U, Q) ''' self.train(q_sparse) def model(u): return self.predict(u) return model
0.00295
def fix_display(self): """If this is being run on a headless system the Matplotlib backend must be changed to one that doesn't need a display. """ try: tkinter.Tk() except (tkinter.TclError, NameError): # If there is no display. try: import matplotlib as mpl except ImportError: pass else: print("Setting matplotlib backend to Agg") mpl.use('Agg')
0.006
def RegisterDecoder(cls, decoder): """Registers a decoder for a specific encoding method. Args: decoder (type): decoder class. Raises: KeyError: if the corresponding decoder is already set. """ encoding_method = decoder.ENCODING_METHOD.lower() if encoding_method in cls._decoders: raise KeyError( 'Decoder for encoding method: {0:s} already set.'.format( decoder.ENCODING_METHOD)) cls._decoders[encoding_method] = decoder
0.004049
def _pending_of(self, workload): """Return the number of pending tests in a workload.""" pending = sum(list(scope.values()).count(False) for scope in workload.values()) return pending
0.014493
def char_span_to_token_span(token_offsets: List[Tuple[int, int]], character_span: Tuple[int, int]) -> Tuple[Tuple[int, int], bool]: """ Converts a character span from a passage into the corresponding token span in the tokenized version of the passage. If you pass in a character span that does not correspond to complete tokens in the tokenized version, we'll do our best, but the behavior is officially undefined. We return an error flag in this case, and have some debug logging so you can figure out the cause of this issue (in SQuAD, these are mostly either tokenization problems or annotation problems; there's a fair amount of both). The basic outline of this method is to find the token span that has the same offsets as the input character span. If the tokenizer tokenized the passage correctly and has matching offsets, this is easy. We try to be a little smart about cases where they don't match exactly, but mostly just find the closest thing we can. The returned ``(begin, end)`` indices are `inclusive` for both ``begin`` and ``end``. So, for example, ``(2, 2)`` is the one word span beginning at token index 2, ``(3, 4)`` is the two-word span beginning at token index 3, and so on. Returns ------- token_span : ``Tuple[int, int]`` `Inclusive` span start and end token indices that match as closely as possible to the input character spans. error : ``bool`` Whether the token spans match the input character spans exactly. If this is ``False``, it means there was an error in either the tokenization or the annotated character span. """ # We have token offsets into the passage from the tokenizer; we _should_ be able to just find # the tokens that have the same offsets as our span. error = False start_index = 0 while start_index < len(token_offsets) and token_offsets[start_index][0] < character_span[0]: start_index += 1 # start_index should now be pointing at the span start index. if token_offsets[start_index][0] > character_span[0]: # In this case, a tokenization or labeling issue made us go too far - the character span # we're looking for actually starts in the previous token. We'll back up one. logger.debug("Bad labelling or tokenization - start offset doesn't match") start_index -= 1 if token_offsets[start_index][0] != character_span[0]: error = True end_index = start_index while end_index < len(token_offsets) and token_offsets[end_index][1] < character_span[1]: end_index += 1 if end_index == start_index and token_offsets[end_index][1] > character_span[1]: # Looks like there was a token that should have been split, like "1854-1855", where the # answer is "1854". We can't do much in this case, except keep the answer as the whole # token. logger.debug("Bad tokenization - end offset doesn't match") elif token_offsets[end_index][1] > character_span[1]: # This is a case where the given answer span is more than one token, and the last token is # cut off for some reason, like "split with Luckett and Rober", when the original passage # said "split with Luckett and Roberson". In this case, we'll just keep the end index # where it is, and assume the intent was to mark the whole token. logger.debug("Bad labelling or tokenization - end offset doesn't match") if token_offsets[end_index][1] != character_span[1]: error = True return (start_index, end_index), error
0.007699
def _get_run_results(self, run_id, request_type="plan", timeout_count=120): """ Wait for plan/apply results, else timeout :param run_id: ID for the run :return: Returns object of the results. """ if request_type is not "plan" and request_type is not "apply": raise KeyError("request_type must be Plan or Apply") for x in range(0, timeout_count): request = self.client.get(path="/runs/" + run_id).json() if request['data']['attributes']['status'] is not "planning" and \ request['data']['attributes']['status'] is not "applying": return request['data'] print("Job Status: " + request_type + "ing | " + str(x * 10) + " seconds") time.sleep(10) raise TimeoutError("Plan took too long to resolve")
0.005767
def get_span_datas(self, span): """Extracts a list of SpanData tuples from a span :rtype: list of opencensus.trace.span_data.SpanData :return list of SpanData tuples """ span_datas = [ span_data_module.SpanData( name=ss.name, context=self.span_context, span_id=ss.span_id, parent_span_id=ss.parent_span.span_id if ss.parent_span else None, attributes=ss.attributes, start_time=ss.start_time, end_time=ss.end_time, child_span_count=len(ss.children), stack_trace=ss.stack_trace, time_events=ss.time_events, links=ss.links, status=ss.status, same_process_as_parent_span=ss.same_process_as_parent_span, span_kind=ss.span_kind ) for ss in span ] return span_datas
0.002016
def make_symmetric(dict): """Makes the given dictionary symmetric. Values are assumed to be unique.""" for key, value in list(dict.items()): dict[value] = key return dict
0.010526
def nonzero_pixels(self): """ Return an array of the nonzero pixels. Returns ------- :obj:`numpy.ndarray` Nx2 array of the nonzero pixels """ nonzero_px = np.where(np.sum(self.raw_data, axis=2) > 0) nonzero_px = np.c_[nonzero_px[0], nonzero_px[1]] return nonzero_px
0.005831
def to_intermediate(self): """ Converts the NetJSON configuration dictionary (self.config) to the intermediate data structure (self.intermediate_data) that will be then used by the renderer class to generate the router configuration """ self.validate() self.intermediate_data = OrderedDict() for converter_class in self.converters: # skip unnecessary loop cycles if not converter_class.should_run_forward(self.config): continue converter = converter_class(self) value = converter.to_intermediate() # maintain backward compatibility with backends # that are currently in development by GSoC students # TODO for >= 0.6.2: remove once all backends have upgraded if value and isinstance(value, (tuple, list)): # pragma: nocover value = OrderedDict(value) if value: self.intermediate_data = merge_config(self.intermediate_data, value, list_identifiers=['.name'])
0.00253
def p_lpartselect(self, p): 'lpartselect : identifier LBRACKET expression COLON expression RBRACKET' p[0] = Partselect(p[1], p[3], p[5], lineno=p.lineno(1)) p.set_lineno(0, p.lineno(1))
0.014354
def vms(message, level=1): """Writes the specified message *only* if verbose output is enabled.""" if verbose is not None and verbose != False: if isinstance(verbose, bool) or (isinstance(verbose, int) and level <= verbose): std(message)
0.011321
def _center_transform(self, transform): '''' Works like setupTransform of a version of java nodebox http://dev.nodebox.net/browser/nodebox-java/branches/rewrite/src/java/net/nodebox/graphics/Grob.java ''' dx, dy = self._get_center() t = cairo.Matrix() t.translate(dx, dy) t = transform * t t.translate(-dx, -dy) return t
0.005
def ndiff(a, b, linejunk=None, charjunk=IS_CHARACTER_JUNK): r""" Compare `a` and `b` (lists of strings); return a `Differ`-style delta. Optional keyword parameters `linejunk` and `charjunk` are for filter functions (or None): - linejunk: A function that should accept a single string argument, and return true iff the string is junk. The default is None, and is recommended; as of Python 2.3, an adaptive notion of "noise" lines is used that does a good job on its own. - charjunk: A function that should accept a string of length 1. The default is module-level function IS_CHARACTER_JUNK, which filters out whitespace characters (a blank or tab; note: bad idea to include newline in this!). Tools/scripts/ndiff.py is a command-line front-end to this function. Example: >>> diff = ndiff('one\ntwo\nthree\n'.splitlines(1), ... 'ore\ntree\nemu\n'.splitlines(1)) >>> print ''.join(diff), - one ? ^ + ore ? ^ - two - three ? - + tree + emu """ return Differ(linejunk, charjunk).compare(a, b)
0.000882
def enabled(self): """ True if coloring is currently enabled """ # In auto-detection mode color enabled when terminal attached if self._mode == COLOR_AUTO: return sys.stdout.isatty() return self._mode == COLOR_ON
0.007813
def kill_all_processes(self, check_alive=True, allow_graceful=False): """Kill all of the processes. Note that This is slower than necessary because it calls kill, wait, kill, wait, ... instead of kill, kill, ..., wait, wait, ... Args: check_alive (bool): Raise an exception if any of the processes were already dead. """ # Kill the raylet first. This is important for suppressing errors at # shutdown because we give the raylet a chance to exit gracefully and # clean up its child worker processes. If we were to kill the plasma # store (or Redis) first, that could cause the raylet to exit # ungracefully, leading to more verbose output from the workers. if ray_constants.PROCESS_TYPE_RAYLET in self.all_processes: self._kill_process_type( ray_constants.PROCESS_TYPE_RAYLET, check_alive=check_alive, allow_graceful=allow_graceful) # We call "list" to copy the keys because we are modifying the # dictionary while iterating over it. for process_type in list(self.all_processes.keys()): self._kill_process_type( process_type, check_alive=check_alive, allow_graceful=allow_graceful)
0.001489
def ex_best_offers_overrides(best_prices_depth=None, rollup_model=None, rollup_limit=None, rollup_liability_threshold=None, rollup_liability_factor=None): """ Create filter to specify whether to accumulate market volume info, how deep a book to return and rollup methods if accumulation is selected. :param int best_prices_depth: The maximum number of prices to return on each side for each runner. :param str rollup_model: method to use to accumulate market orders. :param int rollup_limit: The volume limit to use when rolling up returned sizes. The exact definition of the limit depends on the rollupModel. If no limit is provided it will use minimum stake :param float rollup_liability_threshold: Only applicable when rollupModel is MANAGED_LIABILITY. The rollup model switches from being stake based to liability based at the smallest lay price which is >= rollupLiabilityThreshold :param int rollup_liability_factor: Only applicable when rollupModel is MANAGED_LIABILITY. (rollupLiabilityFactor * rollupLimit) is the minimum liabilty the user is deemed to be comfortable with. After the rollupLiabilityThreshold price subsequent volumes will be rolled up to minimum value such that the liability >= the minimum liability. :returns: parameters for inclusion in market data requests. :rtype: dict """ args = locals() return { to_camel_case(k): v for k, v in args.items() if v is not None }
0.007199
def i_from_v(resistance_shunt, resistance_series, nNsVth, voltage, saturation_current, photocurrent, method='lambertw'): ''' Device current at the given device voltage for the single diode model. Uses the single diode model (SDM) as described in, e.g., Jain and Kapoor 2004 [1]. The solution is per Eq 2 of [1] except when resistance_series=0, in which case the explict solution for current is used. Ideal device parameters are specified by resistance_shunt=np.inf and resistance_series=0. Inputs to this function can include scalars and pandas.Series, but it is the caller's responsibility to ensure that the arguments are all float64 and within the proper ranges. Parameters ---------- resistance_shunt : numeric Shunt resistance in ohms under desired IV curve conditions. Often abbreviated ``Rsh``. 0 < resistance_shunt <= numpy.inf resistance_series : numeric Series resistance in ohms under desired IV curve conditions. Often abbreviated ``Rs``. 0 <= resistance_series < numpy.inf nNsVth : numeric The product of three components. 1) The usual diode ideal factor (n), 2) the number of cells in series (Ns), and 3) the cell thermal voltage under the desired IV curve conditions (Vth). The thermal voltage of the cell (in volts) may be calculated as ``k*temp_cell/q``, where k is Boltzmann's constant (J/K), temp_cell is the temperature of the p-n junction in Kelvin, and q is the charge of an electron (coulombs). 0 < nNsVth voltage : numeric The voltage in Volts under desired IV curve conditions. saturation_current : numeric Diode saturation current in amperes under desired IV curve conditions. Often abbreviated ``I_0``. 0 < saturation_current photocurrent : numeric Light-generated current (photocurrent) in amperes under desired IV curve conditions. Often abbreviated ``I_L``. 0 <= photocurrent method : str Method to use: ``'lambertw'``, ``'newton'``, or ``'brentq'``. *Note*: ``'brentq'`` is limited to 1st quadrant only. Returns ------- current : np.ndarray or scalar References ---------- [1] A. Jain, A. Kapoor, "Exact analytical solutions of the parameters of real solar cells using Lambert W-function", Solar Energy Materials and Solar Cells, 81 (2004) 269-277. ''' if method.lower() == 'lambertw': return _singlediode._lambertw_i_from_v( resistance_shunt, resistance_series, nNsVth, voltage, saturation_current, photocurrent ) else: # Calculate points on the IV curve using either 'newton' or 'brentq' # methods. Voltages are determined by first solving the single diode # equation for the diode voltage V_d then backing out voltage args = (voltage, photocurrent, saturation_current, resistance_series, resistance_shunt, nNsVth) I = _singlediode.bishop88_i_from_v(*args, method=method.lower()) # find the right size and shape for returns size, shape = _singlediode._get_size_and_shape(args) if size <= 1: if shape is not None: I = np.tile(I, shape) if np.isnan(I).any() and size <= 1: I = np.repeat(I, size) if shape is not None: I = I.reshape(shape) return I
0.001421
def geq_multiple(self, other): """ Return the next multiple of this time value, greater than or equal to ``other``. If ``other`` is zero, return this time value. :rtype: :class:`~aeneas.exacttiming.TimeValue` """ if other == TimeValue("0.000"): return self return int(math.ceil(other / self)) * self
0.005319
def query(sql, format='df'): ''' Submit an `sql` query (string) to treasury.io and return a pandas DataFrame. For example:: print('Operating cash balances for May 22, 2013') print(treasuryio.query('SELECT * FROM "t1" WHERE "date" = \'2013-05-22\';')) Return a dict:: treasuryio.query('SELECT * FROM "t1" WHERE "date" = \'2013-05-22\';', format='dict') ''' url = 'http://api.treasury.io/cc7znvq/47d80ae900e04f2/sql/' query_string = urlencode({'q':sql}) handle = urlopen(url + '?' + query_string) if handle.code == 200: d = load(handle) if format == 'df': return DataFrame(d) elif format == 'dict': return d else: raise ValueError('format must equal "df" or "dict"') else: raise ValueError(handle.read())
0.005903
def signature(self, name=None): """Return our function signature as a string. By default this function uses the annotated name of the function however if you need to override that with a custom name you can pass name=<custom name> Args: name (str): Optional name to override the default name given in the function signature. Returns: str: The formatted function signature """ self._ensure_loaded() if name is None: name = self.name num_args = len(self.arg_names) num_def = 0 if self.arg_defaults is not None: num_def = len(self.arg_defaults) num_no_def = num_args - num_def args = [] for i in range(0, len(self.arg_names)): typestr = "" if self.arg_names[i] in self.annotated_params: typestr = "{} ".format(self.annotated_params[self.arg_names[i]].type_name) if i >= num_no_def: default = str(self.arg_defaults[i-num_no_def]) if len(default) == 0: default = "''" args.append("{}{}={}".format(typestr, str(self.arg_names[i]), default)) else: args.append(typestr + str(self.arg_names[i])) return "{}({})".format(name, ", ".join(args))
0.002888
def _append_expectation(self, expectation_config): """Appends an expectation to `DataAsset._expectations_config` and drops existing expectations of the same type. If `expectation_config` is a column expectation, this drops existing expectations that are specific to \ that column and only if it is the same expectation type as `expectation_config`. Otherwise, if it's not a \ column expectation, this drops existing expectations of the same type as `expectation config`. \ After expectations of the same type are dropped, `expectation_config` is appended to `DataAsset._expectations_config`. Args: expectation_config (json): \ The JSON-serializable expectation to be added to the DataAsset expectations in `_expectations_config`. Notes: May raise future errors once json-serializable tests are implemented to check for correct arg formatting """ expectation_type = expectation_config['expectation_type'] # Test to ensure the new expectation is serializable. # FIXME: If it's not, are we sure we want to raise an error? # FIXME: Should we allow users to override the error? # FIXME: Should we try to convert the object using something like recursively_convert_to_json_serializable? json.dumps(expectation_config) # Drop existing expectations with the same expectation_type. # For column_expectations, _append_expectation should only replace expectations # where the expectation_type AND the column match #!!! This is good default behavior, but #!!! it needs to be documented, and #!!! we need to provide syntax to override it. if 'column' in expectation_config['kwargs']: column = expectation_config['kwargs']['column'] self._expectations_config.expectations = [f for f in filter( lambda exp: (exp['expectation_type'] != expectation_type) or ( 'column' in exp['kwargs'] and exp['kwargs']['column'] != column), self._expectations_config.expectations )] else: self._expectations_config.expectations = [f for f in filter( lambda exp: exp['expectation_type'] != expectation_type, self._expectations_config.expectations )] self._expectations_config.expectations.append(expectation_config)
0.006019
def wait_until(name, state, timeout=300): ''' Wait until a specific state has been reached on a node ''' start_time = time.time() node = show_instance(name, call='action') while True: if node['state'] == state: return True time.sleep(1) if time.time() - start_time > timeout: return False node = show_instance(name, call='action')
0.002433
def request(self, *args, **kwargs): """ Makes an API request based on arguments. :Parameters: - `args`: Non-keyword arguments - `kwargs`: Keyword arguments """ return self.session.request(*args, **self.get_kwargs(**kwargs))
0.006993
def is_valid_ipv4 (ip): """ Return True if given ip is a valid IPv4 address. """ if not _ipv4_re.match(ip): return False a, b, c, d = [int(i) for i in ip.split(".")] return a <= 255 and b <= 255 and c <= 255 and d <= 255
0.007937
def _planck_spi(self, lam, Teff): """ Computes the spectral index of the monochromatic blackbody intensity using the Planck function. The spectral index is defined as: B(lambda) = 5 + d(log I)/d(log lambda), where I is the Planck function. @lam: wavelength in m @Teff: effective temperature in K Returns: the spectral index of monochromatic blackbody intensity """ hclkt = self.h*self.c/lam/self.k/Teff expterm = np.exp(hclkt) return hclkt * expterm/(expterm-1)
0.003534
def new_tag(self, label, cfrom=None, cto=None, tagtype=None, **kwargs): ''' Create a new tag on this token ''' if cfrom is None: cfrom = self.cfrom if cto is None: cto = self.cto tag = Tag(label=label, cfrom=cfrom, cto=cto, tagtype=tagtype, **kwargs) return self.add_tag(tag)
0.0059
def run_op(self, op, sched): """ Handle the operation: * if coro is in STATE_RUNNING, send or throw the given op * if coro is in STATE_NEED_INIT, call the init function and if it doesn't return a generator, set STATE_COMPLETED and set the result to whatever the function returned. * if StopIteration is raised, set STATE_COMPLETED and return self. * if any other exception is raised, set STATE_FAILED, handle error or send it to the caller, return self Return self is used as a optimization. Coroutine is also a Operation which handles it's own completion (resuming the caller and the waiters). """ if op is self: import warnings warnings.warn("Running coro %s with itself. Something is fishy."%op) assert self.state < self.STATE_COMPLETED, \ "%s called with %s op %r, coroutine state (%s) should be less than %s!" % ( self, isinstance(op, CoroutineException) and op or (hasattr(op, 'state') and {0:'RUNNING', 1:'FINALIZED', 2:'ERRORED'}[op.state] or 'NOP'), op, self._state_names[self.state], self._state_names[self.STATE_COMPLETED] ) #~ assert self.state < self.STATE_COMPLETED, \ #~ "%s called with:%s, last one:%s, expected state less than %s!" % ( #~ self, #~ op, #~ isinstance(self.lastop, CoroutineException) and ''.join(traceback.format_exception(*self.lastop.message)) or self.lastop, #~ self._state_names[self.STATE_COMPLETED] #~ ) #~ self.lastop = op if self.debug: print if isinstance(op, CoroutineException): print 'Running %r with exception:' % self, if len(op.args) == 3: print '[[[' traceback.print_exception(*op.args) print ']]]' else: print op.args else: print 'Running %r with: %r' % (self, op) global ident ident = self try: if self.state == self.STATE_RUNNING: if self.debug: traceback.print_stack(self.coro.gi_frame) if isinstance(op, CoroutineException): rop = self.coro.throw(*op.args) else: rop = self.coro.send(op and op.finalize(sched)) elif self.state == self.STATE_NEED_INIT: assert op is None self.coro = self.coro(*self.f_args, **self.f_kws) del self.f_args del self.f_kws if self._valid_gen(self.coro): self.state = self.STATE_RUNNING rop = None else: self.state = self.STATE_COMPLETED self.result = self.coro self.coro = None rop = self else: return None except StopIteration, e: self.state = self.STATE_COMPLETED self.result = e.args and e.args[0] if hasattr(self.coro, 'close'): self.coro.close() rop = self except (KeyboardInterrupt, GeneratorExit, SystemExit): raise except: self.state = self.STATE_FAILED self.result = None self.exception = sys.exc_info() if hasattr(self.coro, 'close'): self.coro.close() if not self.caller: self.handle_error(op) rop = self sys.exc_clear() finally: ident = None if self.debug: print "Yields %s." % rop return rop
0.006787
def onecons_qcqp(z, f, tol=1e-6): """ Solves a nonconvex problem minimize ||x-z||_2^2 subject to f(x) = x^T P x + q^T x + r ~ 0 where the relation ~ is given by f.relop (either <= or ==) """ # if constraint is ineq and z is feasible: z is the solution if f.relop == '<=' and f.eval(z) <= 0: return z if f.eigh is None: Psymm = (f.P + f.P.T)/2. f.eigh = LA.eigh(np.asarray(Psymm.todense())) lmb, Q = f.eigh zhat = Q.T.dot(z) qhat = Q.T.dot(f.qarray) # now solve a transformed problem # minimize ||xhat - zhat||_2^2 # subject to sum(lmb_i xhat_i^2) + qhat^T xhat + r = 0 # constraint is now equality from # complementary slackness xhat = lambda nu: -np.divide(nu*qhat-2*zhat, 2*(1+nu*lmb)) phi = lambda xhat: lmb.dot(np.power(xhat, 2)) + qhat.dot(xhat) + f.r s = -np.inf e = np.inf for l in lmb: if l > 0: s = max(s, -1./l) if l < 0: e = min(e, -1./l) if s == -np.inf: s = -1. while phi(xhat(s)) <= 0: s *= 2. if e == np.inf: e = 1. while phi(xhat(e)) >= 0: e *= 2. while e-s > tol: m = (s+e)/2. p = phi(xhat(m)) if p > 0: s = m elif p < 0: e = m else: s = e = m break nu = (s+e)/2. return Q.dot(xhat(nu))
0.007386
def before_render(self): """Before template render hook """ super(PricelistsView, self).before_render() # Render the Add button if the user has the AddPricelist permission if check_permission(AddPricelist, self.context): self.context_actions[_("Add")] = { "url": "createObject?type_name=Pricelist", "icon": "++resource++bika.lims.images/add.png" } # Don't allow any context actions on the Methods folder self.request.set("disable_border", 1)
0.003617
def _to_dict(self): """Return a json dictionary representing this model.""" _dict = {} if hasattr(self, 'name') and self.name is not None: _dict['name'] = self.name if hasattr(self, 'dbpedia_resource') and self.dbpedia_resource is not None: _dict['dbpedia_resource'] = self.dbpedia_resource if hasattr(self, 'subtype') and self.subtype is not None: _dict['subtype'] = self.subtype return _dict
0.004032
def eval(self, code, mode="single"): """Evaluate code in the context of the frame.""" if isinstance(code, string_types): if PY2 and isinstance(code, text_type): # noqa code = UTF8_COOKIE + code.encode("utf-8") code = compile(code, "<interactive>", mode) return eval(code, self.globals, self.locals)
0.00551
def get_all_names( self, offset=None, count=None, include_expired=False ): """ Get the set of all registered names, with optional pagination Returns the list of names. """ if offset is not None and offset < 0: offset = None if count is not None and count < 0: count = None cur = self.db.cursor() names = namedb_get_all_names( cur, self.lastblock, offset=offset, count=count, include_expired=include_expired ) return names
0.015385
def derive_value(self, value): """Derives a new event from this one setting the ``value`` attribute. Args: value: (any): The value associated with the derived event. Returns: IonEvent: The newly generated non-thunk event. """ return IonEvent( self.event_type, self.ion_type, value, self.field_name, self.annotations, self.depth )
0.004073
async def error(self, status=500, allowredirect = True, close = True, showerror = None, headers = []): """ Show default error response """ if showerror is None: showerror = self.showerrorinfo if self._sendHeaders: if showerror: typ, exc, tb = sys.exc_info() if exc: await self.write('<span style="white-space:pre-wrap">\n', buffering = False) await self.writelines((self.nl2br(self.escape(v)) for v in traceback.format_exception(typ, exc, tb)), buffering = False) await self.write('</span>\n', close, False) elif allowredirect and status in self.protocol.errorrewrite: await self.rewrite(self.protocol.errorrewrite[status], b'GET') elif allowredirect and status in self.protocol.errorredirect: await self.redirect(self.protocol.errorredirect[status]) else: self.start_response(status, headers) typ, exc, tb = sys.exc_info() if showerror and exc: await self.write('<span style="white-space:pre-wrap">\n', buffering = False) await self.writelines((self.nl2br(self.escape(v)) for v in traceback.format_exception(typ, exc, tb)), buffering = False) await self.write('</span>\n', close, False) else: await self.write(b'<h1>' + _createstatus(status) + b'</h1>', close, False)
0.016173
def prepare_intervals(data, region_file, work_dir): """Prepare interval regions for targeted and gene based regions. """ target_file = os.path.join(work_dir, "%s-target.interval_list" % dd.get_sample_name(data)) if not utils.file_uptodate(target_file, region_file): with file_transaction(data, target_file) as tx_out_file: params = ["-T", "PreprocessIntervals", "-R", dd.get_ref_file(data), "--interval-merging-rule", "OVERLAPPING_ONLY", "-O", tx_out_file] if dd.get_coverage_interval(data) == "genome": params += ["--bin-length", "1000", "--padding", "0"] else: params += ["-L", region_file, "--bin-length", "0", "--padding", "250"] _run_with_memory_scaling(params, tx_out_file, data) return target_file
0.003505
def batch_means(x, f=lambda y: y, theta=.5, q=.95, burn=0): """ TODO: Use Bayesian CI. Returns the half-width of the frequentist confidence interval (q'th quantile) of the Monte Carlo estimate of E[f(x)]. :Parameters: x : sequence Sampled series. Must be a one-dimensional array. f : function The MCSE of E[f(x)] will be computed. theta : float between 0 and 1 The batch length will be set to len(x) ** theta. q : float between 0 and 1 The desired quantile. :Example: >>>batch_means(x, f=lambda x: x**2, theta=.5, q=.95) :Reference: Flegal, James M. and Haran, Murali and Jones, Galin L. (2007). Markov chain Monte Carlo: Can we trust the third significant figure? <Publication> :Note: Requires SciPy """ try: import scipy from scipy import stats except ImportError: raise ImportError('SciPy must be installed to use batch_means.') x = x[burn:] n = len(x) b = np.int(n ** theta) a = n / b t_quant = stats.t.isf(1 - q, a - 1) Y = np.array([np.mean(f(x[i * b:(i + 1) * b])) for i in xrange(a)]) sig = b / (a - 1.) * sum((Y - np.mean(f(x))) ** 2) return t_quant * sig / np.sqrt(n)
0.000764
def capture_on_device_name(device_name, callback): """ :param device_name: the name (guid) of a device as provided by WinPcapDevices.list_devices() :param callback: a function to call with each intercepted packet """ with WinPcap(device_name) as capture: capture.run(callback=callback)
0.008902
def load_log(args): """Load a `logging.Logger` object. Arguments --------- args : `argparse.Namespace` object Namespace containing required settings: {`args.debug`, `args.verbose`, and `args.log_filename`}. Returns ------- log : `logging.Logger` object """ from astrocats.catalog.utils import logger # Determine verbosity ('None' means use default) log_stream_level = None if args.debug: log_stream_level = logger.DEBUG elif args.verbose: log_stream_level = logger.INFO # Create log log = logger.get_logger( stream_level=log_stream_level, tofile=args.log_filename) log._verbose = args.verbose log._debug = args.debug return log
0.001344
def _init_impl(self, data, ctx_list): """Sets data and grad.""" self._ctx_list = list(ctx_list) self._ctx_map = [[], []] for i, ctx in enumerate(self._ctx_list): dev_list = self._ctx_map[ctx.device_typeid&1] while len(dev_list) <= ctx.device_id: dev_list.append(None) dev_list[ctx.device_id] = i self._data = [data.copyto(ctx) for ctx in self._ctx_list] self._init_grad()
0.006356
def generate_defect_structure(self, supercell=(1, 1, 1)): """ Returns Defective Vacancy structure, decorated with charge Args: supercell (int, [3x1], or [[]] (3x3)): supercell integer, vector, or scaling matrix """ defect_structure = self.bulk_structure.copy() defect_structure.make_supercell(supercell) #create a trivial defect structure to find where supercell transformation moves the lattice struct_for_defect_site = Structure( self.bulk_structure.copy().lattice, [self.site.specie], [self.site.frac_coords], to_unit_cell=True) struct_for_defect_site.make_supercell(supercell) defect_site = struct_for_defect_site[0] poss_deflist = sorted( defect_structure.get_sites_in_sphere(defect_site.coords, 2, include_index=True), key=lambda x: x[1]) defindex = poss_deflist[0][2] defect_structure.remove_sites([defindex]) defect_structure.set_charge(self.charge) return defect_structure
0.007745
def snip_line(line, max_width, split_at): """Shorten a line to a maximum length.""" if len(line) < max_width: return line return line[:split_at] + " … " \ + line[-(max_width - split_at - 3):]
0.004566
def locate_range(self, chrom, start=None, stop=None): """Locate slice of index containing all entries within the range `key`:`start`-`stop` **inclusive**. Parameters ---------- chrom : object Chromosome or contig. start : int, optional Position start value. stop : int, optional Position stop value. Returns ------- loc : slice Slice object. Examples -------- >>> import allel >>> chrom = ['chr2', 'chr2', 'chr1', 'chr1', 'chr1', 'chr3'] >>> pos = [1, 4, 2, 5, 5, 3] >>> idx = allel.ChromPosIndex(chrom, pos) >>> idx.locate_range('chr1') slice(2, 5, None) >>> idx.locate_range('chr2', 1, 4) slice(0, 2, None) >>> idx.locate_range('chr1', 3, 7) slice(3, 5, None) >>> try: ... idx.locate_range('chr3', 4, 9) ... except KeyError as e: ... print(e) ('chr3', 4, 9) """ slice_chrom = self.locate_key(chrom) if start is None and stop is None: return slice_chrom else: pos_chrom = SortedIndex(self.pos[slice_chrom]) try: slice_within_chrom = pos_chrom.locate_range(start, stop) except KeyError: raise KeyError(chrom, start, stop) loc = slice(slice_chrom.start + slice_within_chrom.start, slice_chrom.start + slice_within_chrom.stop) return loc
0.001269
def save(self): """Saves this order to Holvi, returns a tuple with the order itself and checkout_uri""" if self.code: raise HolviError("Orders cannot be updated") send_json = self.to_holvi_dict() send_json.update({ 'pool': self.api.connection.pool }) url = six.u(self.api.base_url + "order/") stat = self.api.connection.make_post(url, send_json) code = stat["details_uri"].split("/")[-2] # Maybe slightly ugly but I don't want to basically reimplement all but uri formation of the api method return (stat["checkout_uri"], self.api.get_order(code))
0.006211
def Clift(Re): r'''Calculates drag coefficient of a smooth sphere using the method in [1]_ as described in [2]_. .. math:: C_D = \left\{ \begin{array}{ll} \frac{24}{Re} + \frac{3}{16} & \mbox{if $Re < 0.01$}\\ \frac{24}{Re}(1 + 0.1315Re^{0.82 - 0.05\log Re}) & \mbox{if $0.01 < Re < 20$}\\ \frac{24}{Re}(1 + 0.1935Re^{0.6305}) & \mbox{if $20 < Re < 260$}\\ 10^{[1.6435 - 1.1242\log Re + 0.1558[\log Re]^2} & \mbox{if $260 < Re < 1500$}\\ 10^{[-2.4571 + 2.5558\log Re - 0.9295[\log Re]^2 + 0.1049[\log Re]^3} & \mbox{if $1500 < Re < 12000$}\\ 10^{[-1.9181 + 0.6370\log Re - 0.0636[\log Re]^2} & \mbox{if $12000 < Re < 44000$}\\ 10^{[-4.3390 + 1.5809\log Re - 0.1546[\log Re]^2} & \mbox{if $44000 < Re < 338000$}\\ 9.78 - 5.3\log Re & \mbox{if $338000 < Re < 400000$}\\ 0.19\log Re - 0.49 & \mbox{if $400000 < Re < 1000000$}\end{array} \right. Parameters ---------- Re : float Particle Reynolds number of the sphere using the surrounding fluid density and viscosity, [-] Returns ------- Cd : float Drag coefficient [-] Notes ----- Range is Re <= 1E6. Examples -------- >>> Clift(200) 0.7756342422322543 References ---------- .. [1] R. Clift, J.R. Grace, M.E. Weber, Bubbles, Drops, and Particles, Academic, New York, 1978. .. [2] Barati, Reza, Seyed Ali Akbar Salehi Neyshabouri, and Goodarz Ahmadi. "Development of Empirical Models with High Accuracy for Estimation of Drag Coefficient of Flow around a Smooth Sphere: An Evolutionary Approach." Powder Technology 257 (May 2014): 11-19. doi:10.1016/j.powtec.2014.02.045. ''' if Re < 0.01: return 24./Re + 3/16. elif Re < 20: return 24./Re*(1 + 0.1315*Re**(0.82 - 0.05*log10(Re))) elif Re < 260: return 24./Re*(1 + 0.1935*Re**(0.6305)) elif Re < 1500: return 10**(1.6435 - 1.1242*log10(Re) + 0.1558*(log10(Re))**2) elif Re < 12000: return 10**(-2.4571 + 2.5558*log10(Re) - 0.9295*(log10(Re))**2 + 0.1049*log10(Re)**3) elif Re < 44000: return 10**(-1.9181 + 0.6370*log10(Re) - 0.0636*(log10(Re))**2) elif Re < 338000: return 10**(-4.3390 + 1.5809*log10(Re) - 0.1546*(log10(Re))**2) elif Re < 400000: return 29.78 - 5.3*log10(Re) else: return 0.19*log10(Re) - 0.49
0.003271
def hash(hash_type, input_text): '''Hash input_text with the algorithm choice''' hash_funcs = {'MD5' : hashlib.md5, 'SHA1' : hashlib.sha1, 'SHA224' : hashlib.sha224, 'SHA256' : hashlib.sha256, 'SHA384' : hashlib.sha384, 'SHA512' : hashlib.sha512} if hash_type == 'All': hash_type = ['MD5', 'SHA1', 'SHA224', 'SHA256', 'SHA384', 'SHA512'] else: hash_type = [hash_type] return [{'Algorithm' : h, 'Hash' : hash_funcs[h](input_text).hexdigest()} for h in hash_type]
0.015025
def closing_plugin(self, cancelable=False): """Perform actions before parent main window is closed""" state = self.splitter.saveState() self.set_option('splitter_state', qbytearray_to_str(state)) filenames = [] editorstack = self.editorstacks[0] active_project_path = None if self.projects is not None: active_project_path = self.projects.get_active_project_path() if not active_project_path: self.set_open_filenames() else: self.projects.set_project_filenames( [finfo.filename for finfo in editorstack.data]) self.set_option('layout_settings', self.editorsplitter.get_layout_settings()) self.set_option('windows_layout_settings', [win.get_layout_settings() for win in self.editorwindows]) # self.set_option('filenames', filenames) self.set_option('recent_files', self.recent_files) # Stop autosave timer before closing windows self.autosave.stop_autosave_timer() try: if not editorstack.save_if_changed(cancelable) and cancelable: return False else: for win in self.editorwindows[:]: win.close() return True except IndexError: return True
0.003539
def serve(application, host='127.0.0.1', port=8080, **options): """Tornado's HTTPServer. This is a high quality asynchronous server with many options. For details, please visit: http://www.tornadoweb.org/en/stable/httpserver.html#http-server """ # Wrap our our WSGI application (potentially stack) in a Tornado adapter. container = tornado.wsgi.WSGIContainer(application) # Spin up a Tornado HTTP server using this container. http_server = tornado.httpserver.HTTPServer(container, **options) http_server.listen(int(port), host) # Start and block on the Tornado IO loop. tornado.ioloop.IOLoop.instance().start()
0.036335
def orthologize(self, ortho_species_id, belast): """Decanonical ortholog name used""" if ( self.orthologs and ortho_species_id in self.orthologs and ortho_species_id != self.species_id ): self.orthology_species = ortho_species_id self.canonical = self.orthologs[ortho_species_id]["canonical"] self.decanonical = self.orthologs[ortho_species_id]["decanonical"] self.update_nsval(nsval=self.decanonical) self.orthologized = True elif self.species_id and ortho_species_id not in self.orthologs: self.orthologized = False belast.partially_orthologized = True return self
0.002743
def reset(rh): """ Reset a virtual machine. Input: Request Handle with the following properties: function - 'POWERVM' subfunction - 'RESET' userid - userid of the virtual machine parms['maxQueries'] - Maximum number of queries to issue. Optional. parms['maxWait'] - Maximum time to wait in seconds. Optional, unless 'maxQueries' is specified. parms['poll'] - Polling interval in seconds. Optional, unless 'maxQueries' is specified. Output: Request Handle updated with the results. Return code - 0: ok, non-zero: error """ rh.printSysLog("Enter powerVM.reset, userid: " + rh.userid) # Log off the user parms = ["-T", rh.userid] results = invokeSMCLI(rh, "Image_Deactivate", parms) if results['overallRC'] != 0: if results['rc'] == 200 and results['rs'] == 12: # Tolerated error. Machine is already in the desired state. results['overallRC'] = 0 results['rc'] = 0 results['rs'] = 0 else: # SMAPI API failed. rh.printLn("ES", results['response']) rh.updateResults(results) # Use results from invokeSMCLI # Wait for the logoff to complete if results['overallRC'] == 0: results = waitForVMState(rh, rh.userid, "off", maxQueries=30, sleepSecs=10) # Log the user back on if results['overallRC'] == 0: parms = ["-T", rh.userid] results = invokeSMCLI(rh, "Image_Activate", parms) if results['overallRC'] != 0: # SMAPI API failed. rh.printLn("ES", results['response']) rh.updateResults(results) # Use results from invokeSMCLI if results['overallRC'] == 0 and 'maxQueries' in rh.parms: if rh.parms['desiredState'] == 'up': results = waitForOSState( rh, rh.userid, rh.parms['desiredState'], maxQueries=rh.parms['maxQueries'], sleepSecs=rh.parms['poll']) else: results = waitForVMState( rh, rh.userid, rh.parms['desiredState'], maxQueries=rh.parms['maxQueries'], sleepSecs=rh.parms['poll']) if results['overallRC'] == 0: rh.printLn("N", rh.userid + ": " + rh.parms['desiredState']) else: rh.updateResults(results) rh.printSysLog("Exit powerVM.reset, rc: " + str(rh.results['overallRC'])) return rh.results['overallRC']
0.001088
def neighbor(args): """ %prog neighbor agpfile componentID Check overlaps of a particular component in agpfile. """ p = OptionParser(neighbor.__doc__) opts, args = p.parse_args(args) if len(args) != 2: sys.exit(not p.print_help()) agpfile, componentID = args fastadir = "fasta" cmd = "grep" cmd += " --color -C2 {0} {1}".format(componentID, agpfile) sh(cmd) agp = AGP(agpfile) aorder = agp.order if not componentID in aorder: print("Record {0} not present in `{1}`."\ .format(componentID, agpfile), file=sys.stderr) return i, c = aorder[componentID] north, south = agp.getNorthSouthClone(i) if not north.isCloneGap: ar = [north.component_id, componentID, "--dir=" + fastadir] if north.orientation == '-': ar += ["--qreverse"] overlap(ar) if not south.isCloneGap: ar = [componentID, south.component_id, "--dir=" + fastadir] if c.orientation == '-': ar += ["--qreverse"] overlap(ar)
0.003717
def delete_global_cache(appname='default'): """ Reads cache files to a safe place in each operating system """ #close_global_shelf(appname) shelf_fpath = get_global_shelf_fpath(appname) util_path.remove_file(shelf_fpath, verbose=True, dryrun=False)
0.007576
def lockToColumn(self, index): """ Sets the column that the tree view will lock to. If None is supplied, then locking will be removed. :param index | <int> || None """ self._lockColumn = index if index is None: self.__destroyLockedView() return else: if not self._lockedView: view = QtGui.QTreeView(self.parent()) view.setModel(self.model()) view.setSelectionModel(self.selectionModel()) view.setItemDelegate(self.itemDelegate()) view.setFrameShape(view.NoFrame) view.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff) view.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff) view.setRootIsDecorated(self.rootIsDecorated()) view.setUniformRowHeights(True) view.setFocusProxy(self) view.header().setFocusProxy(self.header()) view.setStyleSheet(self.styleSheet()) view.setAutoScroll(False) view.setSortingEnabled(self.isSortingEnabled()) view.setPalette(self.palette()) view.move(self.x(), self.y()) self.setAutoScroll(False) self.setUniformRowHeights(True) view.collapsed.connect(self.collapse) view.expanded.connect(self.expand) view.expanded.connect(self.__updateLockedView) view.collapsed.connect(self.__updateLockedView) view_head = view.header() for i in range(self.columnCount()): view_head.setResizeMode(i, self.header().resizeMode(i)) view.header().sectionResized.connect(self.__updateStandardSection) self.header().sectionResized.connect(self.__updateLockedSection) vbar = view.verticalScrollBar() self.verticalScrollBar().valueChanged.connect(vbar.setValue) self._lockedView = view self.__updateLockedView()
0.005616
def relatedItems(self): ''' If this item is associated with a registration, then return all other items associated with the same registration. ''' if self.registration: return self.registration.revenueitem_set.exclude(pk=self.pk)
0.010676
def parse_value(self, value): """Parse string into instance of `datetime`.""" if isinstance(value, datetime.datetime): return value if value: return parse(value) else: return None
0.008097
def to_markdown(self): """Converts to markdown :return: item in markdown format """ if self.type == "text": return self.text elif self.type == "url" or self.type == "image": return "[" + self.text + "](" + self.attributes["ref"] + ")" elif self.type == "title": return "#" * int(self.attributes["size"]) + " " + self.text return None
0.004695
def e(): """This is a hypothetical reference radiator. All wavelengths in CIE illuminant E are weighted equally with a relative spectral power of 100.0. """ lmbda = 1.0e-9 * numpy.arange(300, 831) data = numpy.full(lmbda.shape, 100.0) return lmbda, data
0.00361
def FileHashIndexQuery(self, subject, target_prefix, limit=100): """Search the index for matches starting with target_prefix. Args: subject: The index to use. Should be a urn that points to the sha256 namespace. target_prefix: The prefix to match against the index. limit: Either a tuple of (start, limit) or a maximum number of results to return. Yields: URNs of files which have the same data as this file - as read from the index. """ if isinstance(limit, (tuple, list)): start, length = limit # pylint: disable=unpacking-non-sequence else: start = 0 length = limit prefix = (DataStore.FILE_HASH_TEMPLATE % target_prefix).lower() results = self.ResolvePrefix(subject, prefix, limit=limit) for i, (_, hit, _) in enumerate(results): if i < start: continue if i >= start + length: break yield rdfvalue.RDFURN(hit)
0.008359
def populate_keys_tree(self): """Reads the HOTKEYS global variable and insert all data in the TreeStore used by the preferences window treeview. """ for group in HOTKEYS: parent = self.store.append(None, [None, group['label'], None, None]) for item in group['keys']: if item['key'] == "show-hide" or item['key'] == "show-focus": accel = self.settings.keybindingsGlobal.get_string(item['key']) else: accel = self.settings.keybindingsLocal.get_string(item['key']) gsettings_path = item['key'] keycode, mask = Gtk.accelerator_parse(accel) keylabel = Gtk.accelerator_get_label(keycode, mask) self.store.append(parent, [gsettings_path, item['label'], keylabel, accel]) self.get_widget('treeview-keys').expand_all()
0.006601
def change_frozen_attr(self): """Changes frozen state of cell if there is no selection""" # Selections are not supported if self.grid.selection: statustext = _("Freezing selections is not supported.") post_command_event(self.main_window, self.StatusBarMsg, text=statustext) cursor = self.grid.actions.cursor frozen = self.grid.code_array.cell_attributes[cursor]["frozen"] if frozen: # We have an frozen cell that has to be unfrozen # Delete frozen cache content self.grid.code_array.frozen_cache.pop(repr(cursor)) else: # We have an non-frozen cell that has to be frozen # Add frozen cache content res_obj = self.grid.code_array[cursor] self.grid.code_array.frozen_cache[repr(cursor)] = res_obj # Set the new frozen state / code selection = Selection([], [], [], [], [cursor[:2]]) self.set_attr("frozen", not frozen, selection=selection)
0.001878
def dump_selected_keys_or_addrs(wallet_obj, used=None, zero_balance=None): ''' Works for both public key only or private key access ''' if wallet_obj.private_key: content_str = 'private keys' else: content_str = 'addresses' if not USER_ONLINE: puts(colored.red('\nInternet connection required, would you like to dump *all* %s instead?' % ( content_str, content_str, ))) if confirm(user_prompt=DEFAULT_PROMPT, default=True): dump_all_keys_or_addrs(wallet_obj=wallet_obj) else: return mpub = wallet_obj.serialize_b58(private=False) if wallet_obj.private_key is None: puts('Displaying Public Addresses Only') puts('For Private Keys, please open bcwallet with your Master Private Key:\n') priv_to_display = '%s123...' % first4mprv_from_mpub(mpub=mpub) print_bcwallet_basic_priv_opening(priv_to_display=priv_to_display) chain_address_objs = get_addresses_on_both_chains( wallet_obj=wallet_obj, used=used, zero_balance=zero_balance, ) if wallet_obj.private_key and chain_address_objs: print_childprivkey_warning() addr_cnt = 0 for chain_address_obj in chain_address_objs: if chain_address_obj['index'] == 0: print_external_chain() elif chain_address_obj['index'] == 1: print_internal_chain() print_key_path_header() for address_obj in chain_address_obj['chain_addresses']: print_path_info( address=address_obj['pub_address'], wif=address_obj.get('wif'), path=address_obj['path'], coin_symbol=coin_symbol_from_mkey(mpub), ) addr_cnt += 1 if addr_cnt: puts(colored.blue('\nYou can compare this output to bip32.org')) else: puts('No matching %s in this subset. Would you like to dump *all* %s instead?' % ( content_str, content_str, )) if confirm(user_prompt=DEFAULT_PROMPT, default=True): dump_all_keys_or_addrs(wallet_obj=wallet_obj)
0.001792
def buckets(bucket=None, account=None, matched=False, kdenied=False, errors=False, dbpath=None, size=None, denied=False, format=None, incomplete=False, oversize=False, region=(), not_region=(), inventory=None, output=None, config=None, sort=None, tagprefix=None, not_bucket=None): """Report on stats by bucket""" d = db.db(dbpath) if tagprefix and not config: raise ValueError( "account tag value inclusion requires account config file") if config and tagprefix: with open(config) as fh: data = json.load(fh).get('accounts') account_data = {} for a in data: for t in a['tags']: if t.startswith(tagprefix): account_data[a['name']] = t[len(tagprefix):] buckets = [] for b in sorted(d.buckets(account), key=operator.attrgetter('bucket_id')): if bucket and b.name not in bucket: continue if not_bucket and b.name in not_bucket: continue if matched and not b.matched: continue if kdenied and not b.keys_denied: continue if errors and not b.error_count: continue if size and b.size < size: continue if inventory and not b.using_inventory: continue if denied and not b.denied: continue if oversize and b.scanned <= b.size: continue if incomplete and b.percent_scanned >= incomplete: continue if region and b.region not in region: continue if not_region and b.region in not_region: continue if tagprefix: setattr(b, tagprefix[:-1], account_data[b.account]) buckets.append(b) if sort: key = operator.attrgetter(sort) buckets = list(reversed(sorted(buckets, key=key))) formatter = format == 'csv' and format_csv or format_plain keys = tagprefix and (tagprefix[:-1],) or () formatter(buckets, output, keys=keys)
0.000471
def read_names(rows, source_id=1): """Return an iterator of rows ready to insert into table "names". Adds columns "is_primary" (identifying the primary name for each tax_id with a vaule of 1) and "is_classified" (always None). * rows - iterator of lists (eg, output from read_archive or read_dmp) * unclassified_regex - a compiled re matching "unclassified" names From the NCBI docs: Taxonomy names file (names.dmp): tax_id -- the id of node associated with this name name_txt -- name itself unique name -- the unique variant of this name if name not unique name class -- (synonym, common name, ...) """ ncbi_keys = ['tax_id', 'tax_name', 'unique_name', 'name_class'] extra_keys = ['source_id', 'is_primary', 'is_classified'] # is_classified applies to species only; we will set this value # later is_classified = None tax_id = ncbi_keys.index('tax_id') tax_name = ncbi_keys.index('tax_name') unique_name = ncbi_keys.index('unique_name') name_class = ncbi_keys.index('name_class') yield ncbi_keys + extra_keys for tid, grp in itertools.groupby(rows, itemgetter(tax_id)): # confirm that each tax_id has exactly one scientific name num_primary = 0 for r in grp: is_primary = r[name_class] == 'scientific name' # fix primary key uniqueness violation if r[unique_name]: r[tax_name] = r[unique_name] num_primary += is_primary yield (r + [source_id, is_primary, is_classified]) assert num_primary == 1
0.000617
def get_config_status(): ''' Get the status of the current DSC Configuration Returns: dict: A dictionary representing the status of the current DSC Configuration on the machine CLI Example: .. code-block:: bash salt '*' dsc.get_config_status ''' cmd = 'Get-DscConfigurationStatus | ' \ 'Select-Object -Property HostName, Status, MetaData, ' \ '@{Name="StartDate";Expression={Get-Date ($_.StartDate) -Format g}}, ' \ 'Type, Mode, RebootRequested, NumberofResources' try: return _pshell(cmd, ignore_retcode=True) except CommandExecutionError as exc: if 'No status information available' in exc.info['stderr']: raise CommandExecutionError('Not Configured') raise
0.002522
def register(self, method, uri, call_back): """Register a class instance function to handle a request. :param method: string - HTTP Verb :param uri: string - URI for the request :param call_back: class instance function that handles the request :returns: n/a """ found = False self.create_route(uri, False) self.routes[uri]['handlers'].register_method(method, call_back)
0.004016
def render(gpg_data, saltenv='base', sls='', argline='', **kwargs): ''' Create a gpg object given a gpg_keydir, and then use it to try to decrypt the data to be rendered. ''' if not _get_gpg_exec(): raise SaltRenderError('GPG unavailable') log.debug('Reading GPG keys from: %s', _get_key_dir()) translate_newlines = kwargs.get('translate_newlines', False) return _decrypt_object(gpg_data, translate_newlines=translate_newlines)
0.002137
def protected(self, *tests, **kwargs): """Factory of decorators for limit the access to views. :tests: *function, optional One or more functions that takes the args and kwargs of the view and returns either `True` or `False`. All test must return True to show the view. Options: :role: str, optional Test for the user having a role with this name. :roles: list, optional Test for the user having **any** role in this list of names. :csrf: bool, None, optional If ``None`` (the default), the decorator will check the value of the CSFR token for POST, PUT or DELETE requests. If ``True`` it will do the same also for all requests. If ``False``, the value of the CSFR token will not be checked. :url_sign_in: str, function, optional If any required condition fail, redirect to this place. Override the default URL. This can also be a callable. :request: obj, optional Overwrite the request for testing. The rest of the ``key=value`` pairs in ``kwargs`` are interpreted as tests. The user must have a property `key` with a value equals to `value`. If the user has a method named `key`, that method is called with `value` as a single argument and must return True to show the view. """ _role = kwargs.pop('role', None) _roles = kwargs.pop('roles', None) or [] _csrf = kwargs.pop('csrf', None) _url_sign_in = kwargs.pop('url_sign_in', None) _request = kwargs.pop('request', None) if _role: _roles.append(_role) _roles = [to_unicode(r) for r in _roles] _tests = tests _user_tests = kwargs def decorator(f): @functools.wraps(f) def wrapper(*args, **kwargs): logger = logging.getLogger(__name__) request = _request or self.request or args and args[0] url_sign_in = self._get_url_sign_in(request, _url_sign_in) user = self.get_user() if not user: return self._login_required(request, url_sign_in) if hasattr(user, 'has_role') and _roles: if not user.has_role(*_roles): logger.debug(u'User `{0}`: has_role fail'.format(user.login)) logger.debug(u'User roles: {0}'.format([r.name for r in user.roles])) return self.wsgi.raise_forbidden() for test in _tests: test_pass = test(user, *args, **kwargs) if not test_pass: logger.debug(u'User `{0}`: test fail'.format(user.login)) return self.wsgi.raise_forbidden() for name, value in _user_tests.items(): user_test = getattr(user, name) test_pass = user_test(value, *args, **kwargs) if not test_pass: logger.debug(u'User `{0}`: test fail'.format(user.login)) return self.wsgi.raise_forbidden() disable_csrf = _csrf == False # noqa if (not self.wsgi.is_idempotent(request) and not disable_csrf) or _csrf: if not self.csrf_token_is_valid(request): logger.debug(u'User `{0}`: invalid CSFR token'.format(user.login)) return self.wsgi.raise_forbidden("CSFR token isn't valid") return f(*args, **kwargs) return wrapper return decorator
0.002691
def unreduce_tensor(tensor, shape, axis, keepdims): """Reverse summing over a dimension. See utils.py. Args: tensor: The tensor that was reduced. shape: A list, the original shape of the tensor before reduction. axis: The axis or axes that were summed. keepdims: Whether these axes were kept as singleton axes. Returns: A tensor with axes broadcast to match the shape of the original tensor. """ if not keepdims: if axis is None: axis = range(len(shape)) elif isinstance(axis, int): axis = axis, for ax in sorted(axis): tensor = tf.expand_dims(tensor, ax) tile_shape = np.array(shape) / np.array(shape_as_list(tensor)) return tf.tile(tensor, tile_shape)
0.01108
def setInstitutionLogo(self, pathList: tuple): """ takes one or more [logo].svg paths if logo should be clickable, set pathList = ( (my_path1.svg,www.something1.html), (my_path2.svg,www.something2.html), ...) """ for p in pathList: url = None if type(p) in (list, tuple): p, url = p logo = QtSvg.QSvgWidget(p) s = logo.sizeHint() aR = s.height() / s.width() h = 150 w = h / aR logo.setFixedSize(int(w), int(h)) self.layout().addWidget(logo) if url: logo.mousePressEvent = lambda evt, u=url: self._openUrl(evt, u)
0.002522
def _apply_section_children(self, section, hosts): """ Add the variables for each entry in a 'children' section to the hosts belonging to that entry. """ for entry in section['entries']: for hostname in self._group_get_hostnames(entry['name']): host = hosts[hostname] for var_key, var_val in entry['hostvars'].items(): host['hostvars'][var_key] = var_val
0.004367
def equals(self, data): """Adds new `IN` or `=` condition depending on if a list or string was provided :param data: string or list of values :raise: - QueryTypeError: if `data` is of an unexpected type """ if isinstance(data, six.string_types): return self._add_condition('=', data, types=[int, str]) elif isinstance(data, list): return self._add_condition('IN', ",".join(map(str, data)), types=[str]) raise QueryTypeError('Expected value of type `str` or `list`, not %s' % type(data))
0.008606
def error(bot, update, error): """Log Errors caused by Updates.""" logger.error('Update {} caused error {}'.format(update, error), extra={"tag": "err"})
0.0125
def parse(text, encoding='utf8'): """Parse the querystring into a normalized form.""" # Decode the text if we got bytes. if isinstance(text, six.binary_type): text = text.decode(encoding) return Query(text, split_segments(text))
0.003937
def main(command_line=True, **kwargs): """ NAME sio_magic.py DESCRIPTION converts SIO .mag format files to magic_measurements format files SYNTAX sio_magic.py [command line options] OPTIONS -h: prints the help message and quits. -usr USER: identify user, default is "" -f FILE: specify .mag format input file, required -fsa SAMPFILE : specify er_samples.txt file relating samples, site and locations names,default is none -- values in SAMPFILE will override selections for -loc (location), -spc (designate specimen), and -ncn (sample-site naming convention) -F FILE: specify output file, default is magic_measurements.txt -Fsy: specify er_synthetics file, default is er_sythetics.txt -LP [colon delimited list of protocols, include all that apply] AF: af demag T: thermal including thellier but not trm acquisition S: Shaw method I: IRM (acquisition) I3d: 3D IRM experiment N: NRM only TRM: trm acquisition ANI: anisotropy experiment D: double AF demag G: triple AF demag (GRM protocol) CR: cooling rate experiment. The treatment coding of the measurement file should be: XXX.00,XXX.10, XXX.20 ...XX.70 etc. (XXX.00 is optional) where XXX in the temperature and .10,.20... are running numbers of the cooling rates steps. XXX.00 is optional zerofield baseline. XXX.70 is alteration check. syntax in sio_magic is: -LP CR xxx,yyy,zzz,..... xxx -A where xxx, yyy, zzz...xxx are cooling time in [K/minutes], seperated by comma, ordered at the same order as XXX.10,XXX.20 ...XX.70 if you use a zerofield step then no need to specify the cooling rate for the zerofield It is important to add to the command line the -A option so the measurements will not be averaged. But users need to make sure that there are no duplicate measurements in the file -V [1,2,3] units of IRM field in volts using ASC coil #1,2 or 3 -spc NUM : specify number of characters to designate a specimen, default = 0 -loc LOCNAME : specify location/study name, must have either LOCNAME or SAMPFILE or be a synthetic -syn INST TYPE: sets these specimens as synthetics created at institution INST and of type TYPE -ins INST : specify which demag instrument was used (e.g, SIO-Suzy or SIO-Odette),default is "" -dc B PHI THETA: dc lab field (in micro tesla) and phi,theta, default is none NB: use PHI, THETA = -1 -1 to signal that it changes, i.e. in anisotropy experiment -ac B : peak AF field (in mT) for ARM acquisition, default is none -ncn NCON: specify naming convention: default is #1 below -A: don't average replicate measurements Sample naming convention: [1] XXXXY: where XXXX is an arbitrary length site designation and Y is the single character sample designation. e.g., TG001a is the first sample from site TG001. [default] [2] XXXX-YY: YY sample from site XXXX (XXX, YY of arbitary length) [3] XXXX.YY: YY sample from site XXXX (XXX, YY of arbitary length) [4-Z] XXXX[YYY]: YYY is sample designation with Z characters from site XXX [5] site name same as sample [6] site is entered under a separate column NOT CURRENTLY SUPPORTED [7-Z] [XXXX]YYY: XXXX is site designation with Z characters with sample name XXXXYYYY NB: all others you will have to customize your self or e-mail [email protected] for help. [8] synthetic - has no site name [9] ODP naming convention INPUT Best to put separate experiments (all AF, thermal, thellier, trm aquisition, Shaw, etc.) in seperate .mag files (eg. af.mag, thermal.mag, etc.) Format of SIO .mag files: Spec Treat CSD Intensity Declination Inclination [optional metadata string] Spec: specimen name Treat: treatment step XXX T in Centigrade XXX AF in mT for special experiments: Thellier: XXX.0 first zero field step XXX.1 first in field step [XXX.0 and XXX.1 can be done in any order] XXX.2 second in-field step at lower temperature (pTRM check) XXX.3 second zero-field step after infield (pTRM check step) XXX.3 MUST be done in this order [XXX.0, XXX.1 [optional XXX.2] XXX.3] AARM: X.00 baseline step (AF in zero bias field - high peak field) X.1 ARM step (in field step) where X is the step number in the 15 position scheme (see Appendix to Lecture 13 - http://magician.ucsd.edu/Essentials_2) ATRM: X.00 optional baseline X.1 ATRM step (+X) X.2 ATRM step (+Y) X.3 ATRM step (+Z) X.4 ATRM step (-X) X.5 ATRM step (-Y) X.6 ATRM step (-Z) X.7 optional alteration check (+X) TRM: XXX.YYY XXX is temperature step of total TRM YYY is dc field in microtesla Intensity assumed to be total moment in 10^3 Am^2 (emu) Declination: Declination in specimen coordinate system Inclination: Declination in specimen coordinate system Optional metatdata string: mm/dd/yy;hh:mm;[dC,mT];xx.xx;UNITS;USER;INST;NMEAS hh in 24 hours. dC or mT units of treatment XXX (see Treat above) for thermal or AF respectively xx.xxx DC field UNITS of DC field (microT, mT) INST: instrument code, number of axes, number of positions (e.g., G34 is 2G, three axes, measured in four positions) NMEAS: number of measurements in a single position (1,3,200...) """ # initialize some stuff mag_file = None codelist = None infile_type="mag" noave=0 methcode,inst="LP-NO","" phi,theta,peakfield,labfield=0,0,0,0 pTRM,MD,samp_con,Z=0,0,'1',1 dec=[315,225,180,135,45,90,270,270,270,90,180,180,0,0,0] inc=[0,0,0,0,0,-45,-45,0,45,45,45,-45,-90,-45,45] tdec=[0,90,0,180,270,0,0,90,0] tinc=[0,0,90,0,0,-90,0,0,90] missing=1 demag="N" er_location_name="" citation='This study' args=sys.argv fmt='old' syn=0 synfile='er_synthetics.txt' samp_infile,Samps='',[] trm=0 irm=0 specnum=0 coil="" mag_file="" # # get command line arguments # meas_file="magic_measurements.txt" user="" if not command_line: user = kwargs.get('user', '') meas_file = kwargs.get('meas_file', '') syn_file = kwargs.get('syn_file', '') mag_file = kwargs.get('mag_file', '') labfield = kwargs.get('labfield', '') if labfield: labfield = float(labfield) *1e-6 else: labfield = 0 phi = kwargs.get('phi', 0) if phi: phi = float(phi) else: phi = 0 theta = kwargs.get('theta', 0) if theta: theta=float(theta) else: theta = 0 peakfield = kwargs.get('peakfield', 0) if peakfield: peakfield=float(peakfield) *1e-3 else: peakfield = 0 specnum = kwargs.get('specnum', 0) samp_con = kwargs.get('samp_con', '1') er_location_name = kwargs.get('er_location_name', '') samp_infile = kwargs.get('samp_infile', '') syn = kwargs.get('syn', 0) institution = kwargs.get('institution', '') syntype = kwargs.get('syntype', '') inst = kwargs.get('inst', '') noave = kwargs.get('noave', 0) codelist = kwargs.get('codelist', '') coil = kwargs.get('coil', '') cooling_rates = kwargs.get('cooling_rates', '') if command_line: if "-h" in args: print(main.__doc__) return False if "-usr" in args: ind=args.index("-usr") user=args[ind+1] if '-F' in args: ind=args.index("-F") meas_file=args[ind+1] if '-Fsy' in args: ind=args.index("-Fsy") synfile=args[ind+1] if '-f' in args: ind=args.index("-f") mag_file=args[ind+1] if "-dc" in args: ind=args.index("-dc") labfield=float(args[ind+1])*1e-6 phi=float(args[ind+2]) theta=float(args[ind+3]) if "-ac" in args: ind=args.index("-ac") peakfield=float(args[ind+1])*1e-3 if "-spc" in args: ind=args.index("-spc") specnum=int(args[ind+1]) if "-loc" in args: ind=args.index("-loc") er_location_name=args[ind+1] if "-fsa" in args: ind=args.index("-fsa") samp_infile = args[ind+1] if '-syn' in args: syn=1 ind=args.index("-syn") institution=args[ind+1] syntype=args[ind+2] if '-fsy' in args: ind=args.index("-fsy") synfile=args[ind+1] if "-ins" in args: ind=args.index("-ins") inst=args[ind+1] if "-A" in args: noave=1 if "-ncn" in args: ind=args.index("-ncn") samp_con=sys.argv[ind+1] if '-LP' in args: ind=args.index("-LP") codelist=args[ind+1] if "-V" in args: ind=args.index("-V") coil=args[ind+1] # make sure all initial values are correctly set up (whether they come from the command line or a GUI) if samp_infile: Samps, file_type = pmag.magic_read(samp_infile) if coil: coil = str(coil) methcode="LP-IRM" irmunits = "V" if coil not in ["1","2","3"]: print(main.__doc__) print('not a valid coil specification') return False, '{} is not a valid coil specification'.format(coil) if mag_file: try: #with open(mag_file,'r') as finput: # lines = finput.readlines() lines=pmag.open_file(mag_file) except: print("bad mag file name") return False, "bad mag file name" if not mag_file: print(main.__doc__) print("mag_file field is required option") return False, "mag_file field is required option" if specnum!=0: specnum=-specnum #print 'samp_con:', samp_con if samp_con: if "4" == samp_con[0]: if "-" not in samp_con: print("naming convention option [4] must be in form 4-Z where Z is an integer") print('---------------') return False, "naming convention option [4] must be in form 4-Z where Z is an integer" else: Z=samp_con.split("-")[1] samp_con="4" if "7" == samp_con[0]: if "-" not in samp_con: print("option [7] must be in form 7-Z where Z is an integer") return False, "option [7] must be in form 7-Z where Z is an integer" else: Z=samp_con.split("-")[1] samp_con="7" if codelist: codes=codelist.split(':') if "AF" in codes: demag='AF' if'-dc' not in args: methcode="LT-AF-Z" if'-dc' in args: methcode="LT-AF-I" if "T" in codes: demag="T" if '-dc' not in args: methcode="LT-T-Z" if '-dc' in args: methcode="LT-T-I" if "I" in codes: methcode="LP-IRM" irmunits="mT" if "I3d" in codes: methcode="LT-T-Z:LP-IRM-3D" if "S" in codes: demag="S" methcode="LP-PI-TRM:LP-PI-ALT-AFARM" trm_labfield=labfield ans=input("DC lab field for ARM step: [50uT] ") if ans=="": arm_labfield=50e-6 else: arm_labfield=float(ans)*1e-6 ans=input("temperature for total trm step: [600 C] ") if ans=="": trm_peakT=600+273 # convert to kelvin else: trm_peakT=float(ans)+273 # convert to kelvin if "G" in codes: methcode="LT-AF-G" if "D" in codes: methcode="LT-AF-D" if "TRM" in codes: demag="T" trm=1 if "CR" in codes: demag="T" cooling_rate_experiment=1 if command_line: ind=args.index("CR") cooling_rates=args[ind+1] cooling_rates_list=cooling_rates.split(',') else: cooling_rates_list=str(cooling_rates).split(',') if demag=="T" and "ANI" in codes: methcode="LP-AN-TRM" if demag=="T" and "CR" in codes: methcode="LP-CR-TRM" if demag=="AF" and "ANI" in codes: methcode="LP-AN-ARM" if labfield==0: labfield=50e-6 if peakfield==0: peakfield=.180 SynRecs,MagRecs=[],[] version_num=pmag.get_version() ################################## if 1: #if infile_type=="SIO format": for line in lines: instcode="" if len(line)>2: SynRec={} MagRec={} MagRec['er_location_name']=er_location_name MagRec['magic_software_packages']=version_num MagRec["treatment_temp"]='%8.3e' % (273) # room temp in kelvin MagRec["measurement_temp"]='%8.3e' % (273) # room temp in kelvin MagRec["treatment_ac_field"]='0' MagRec["treatment_dc_field"]='0' MagRec["treatment_dc_field_phi"]='0' MagRec["treatment_dc_field_theta"]='0' meas_type="LT-NO" rec=line.split() if rec[1]==".00":rec[1]="0.00" treat=rec[1].split('.') if methcode=="LP-IRM": if irmunits=='mT': labfield=float(treat[0])*1e-3 else: labfield=pmag.getfield(irmunits,coil,treat[0]) if rec[1][0]!="-": phi,theta=0.,90. else: phi,theta=0.,-90. meas_type="LT-IRM" MagRec["treatment_dc_field"]='%8.3e'%(labfield) MagRec["treatment_dc_field_phi"]='%7.1f'%(phi) MagRec["treatment_dc_field_theta"]='%7.1f'%(theta) if len(rec)>6: code1=rec[6].split(';') # break e.g., 10/15/02;7:45 indo date and time if len(code1)==2: # old format with AM/PM missing=0 code2=code1[0].split('/') # break date into mon/day/year code3=rec[7].split(';') # break e.g., AM;C34;200 into time;instr/axes/measuring pos;number of measurements yy=int(code2[2]) if yy <90: yyyy=str(2000+yy) else: yyyy=str(1900+yy) mm=int(code2[0]) if mm<10: mm="0"+str(mm) else: mm=str(mm) dd=int(code2[1]) if dd<10: dd="0"+str(dd) else: dd=str(dd) time=code1[1].split(':') hh=int(time[0]) if code3[0]=="PM":hh=hh+12 if hh<10: hh="0"+str(hh) else: hh=str(hh) min=int(time[1]) if min<10: min= "0"+str(min) else: min=str(min) MagRec["measurement_date"]=yyyy+":"+mm+":"+dd+":"+hh+":"+min+":00.00" MagRec["measurement_time_zone"]='SAN' if inst=="": if code3[1][0]=='C':instcode='SIO-bubba' if code3[1][0]=='G':instcode='SIO-flo' else: instcode='' MagRec["measurement_positions"]=code3[1][2] elif len(code1)>2: # newest format (cryo7 or later) if "LP-AN-ARM" not in methcode:labfield=0 fmt='new' date=code1[0].split('/') # break date into mon/day/year yy=int(date[2]) if yy <90: yyyy=str(2000+yy) else: yyyy=str(1900+yy) mm=int(date[0]) if mm<10: mm="0"+str(mm) else: mm=str(mm) dd=int(date[1]) if dd<10: dd="0"+str(dd) else: dd=str(dd) time=code1[1].split(':') hh=int(time[0]) if hh<10: hh="0"+str(hh) else: hh=str(hh) min=int(time[1]) if min<10: min= "0"+str(min) else: min=str(min) MagRec["measurement_date"]=yyyy+":"+mm+":"+dd+":"+hh+":"+min+":00.00" MagRec["measurement_time_zone"]='SAN' if inst=="": if code1[6][0]=='C': instcode='SIO-bubba' if code1[6][0]=='G': instcode='SIO-flo' else: instcode='' if len(code1)>1: MagRec["measurement_positions"]=code1[6][2] else: MagRec["measurement_positions"]=code1[7] # takes care of awkward format with bubba and flo being different if user=="":user=code1[5] if code1[2][-1]=='C': demag="T" if code1[4]=='microT' and float(code1[3])!=0. and "LP-AN-ARM" not in methcode: labfield=float(code1[3])*1e-6 if code1[2]=='mT' and methcode!="LP-IRM": demag="AF" if code1[4]=='microT' and float(code1[3])!=0.: labfield=float(code1[3])*1e-6 if code1[4]=='microT' and labfield!=0. and meas_type!="LT-IRM": phi,theta=0.,-90. if demag=="T": meas_type="LT-T-I" if demag=="AF": meas_type="LT-AF-I" MagRec["treatment_dc_field"]='%8.3e'%(labfield) MagRec["treatment_dc_field_phi"]='%7.1f'%(phi) MagRec["treatment_dc_field_theta"]='%7.1f'%(theta) if code1[4]=='' or labfield==0. and meas_type!="LT-IRM": if demag=='T':meas_type="LT-T-Z" if demag=="AF":meas_type="LT-AF-Z" MagRec["treatment_dc_field"]='0' if syn==0: MagRec["er_specimen_name"]=rec[0] MagRec["er_synthetic_name"]="" MagRec["er_site_name"]="" if specnum!=0: MagRec["er_sample_name"]=rec[0][:specnum] else: MagRec["er_sample_name"]=rec[0] if samp_infile and Samps: # if samp_infile was provided AND yielded sample data samp=pmag.get_dictitem(Samps,'er_sample_name',MagRec['er_sample_name'],'T') if len(samp)>0: MagRec["er_location_name"]=samp[0]["er_location_name"] MagRec["er_site_name"]=samp[0]["er_site_name"] else: MagRec['er_location_name']='' MagRec["er_site_name"]='' elif int(samp_con)!=6: site=pmag.parse_site(MagRec['er_sample_name'],samp_con,Z) MagRec["er_site_name"]=site if MagRec['er_site_name']=="": print('No site name found for: ',MagRec['er_specimen_name'],MagRec['er_sample_name']) if MagRec["er_location_name"]=="": print('no location name for: ',MagRec["er_specimen_name"]) else: MagRec["er_specimen_name"]=rec[0] if specnum!=0: MagRec["er_sample_name"]=rec[0][:specnum] else: MagRec["er_sample_name"]=rec[0] MagRec["er_site_name"]="" MagRec["er_synthetic_name"]=MagRec["er_specimen_name"] SynRec["er_synthetic_name"]=MagRec["er_specimen_name"] site=pmag.parse_site(MagRec['er_sample_name'],samp_con,Z) SynRec["synthetic_parent_sample"]=site SynRec["er_citation_names"]="This study" SynRec["synthetic_institution"]=institution SynRec["synthetic_type"]=syntype SynRecs.append(SynRec) if float(rec[1])==0: pass elif demag=="AF": if methcode != "LP-AN-ARM": MagRec["treatment_ac_field"]='%8.3e' %(float(rec[1])*1e-3) # peak field in tesla if meas_type=="LT-AF-Z": MagRec["treatment_dc_field"]='0' else: # AARM experiment if treat[1][0]=='0': meas_type="LT-AF-Z:LP-AN-ARM:" MagRec["treatment_ac_field"]='%8.3e' %(peakfield) # peak field in tesla MagRec["treatment_dc_field"]='%8.3e'%(0) if labfield!=0 and methcode!="LP-AN-ARM": print("Warning - inconsistency in mag file with lab field - overriding file with 0") else: meas_type="LT-AF-I:LP-AN-ARM" ipos=int(treat[0])-1 MagRec["treatment_dc_field_phi"]='%7.1f' %(dec[ipos]) MagRec["treatment_dc_field_theta"]='%7.1f'% (inc[ipos]) MagRec["treatment_dc_field"]='%8.3e'%(labfield) MagRec["treatment_ac_field"]='%8.3e' %(peakfield) # peak field in tesla elif demag=="T" and methcode == "LP-AN-TRM": MagRec["treatment_temp"]='%8.3e' % (float(treat[0])+273.) # temp in kelvin if treat[1][0]=='0': meas_type="LT-T-Z:LP-AN-TRM" MagRec["treatment_dc_field"]='%8.3e'%(0) MagRec["treatment_dc_field_phi"]='0' MagRec["treatment_dc_field_theta"]='0' else: MagRec["treatment_dc_field"]='%8.3e'%(labfield) if treat[1][0]=='7': # alteration check as final measurement meas_type="LT-PTRM-I:LP-AN-TRM" else: meas_type="LT-T-I:LP-AN-TRM" # find the direction of the lab field in two ways: # (1) using the treatment coding (XX.1=+x, XX.2=+y, XX.3=+z, XX.4=-x, XX.5=-y, XX.6=-z) ipos_code=int(treat[1][0])-1 # (2) using the magnetization DEC=float(rec[4]) INC=float(rec[5]) if INC < 45 and INC > -45: if DEC>315 or DEC<45: ipos_guess=0 if DEC>45 and DEC<135: ipos_guess=1 if DEC>135 and DEC<225: ipos_guess=3 if DEC>225 and DEC<315: ipos_guess=4 else: if INC >45: ipos_guess=2 if INC <-45: ipos_guess=5 # prefer the guess over the code ipos=ipos_guess MagRec["treatment_dc_field_phi"]='%7.1f' %(tdec[ipos]) MagRec["treatment_dc_field_theta"]='%7.1f'% (tinc[ipos]) # check it if ipos_guess!=ipos_code and treat[1][0]!='7': print("-E- ERROR: check specimen %s step %s, ATRM measurements, coding does not match the direction of the lab field!"%(rec[0],".".join(list(treat)))) elif demag=="S": # Shaw experiment if treat[1][1]=='0': if int(treat[0])!=0: MagRec["treatment_ac_field"]='%8.3e' % (float(treat[0])*1e-3) # AF field in tesla MagRec["treatment_dc_field"]='0' meas_type="LT-AF-Z" # first AF else: meas_type="LT-NO" MagRec["treatment_ac_field"]='0' MagRec["treatment_dc_field"]='0' elif treat[1][1]=='1': if int(treat[0])==0: MagRec["treatment_ac_field"]='%8.3e' %(peakfield) # peak field in tesla MagRec["treatment_dc_field"]='%8.3e'%(arm_labfield) MagRec["treatment_dc_field_phi"]='%7.1f'%(phi) MagRec["treatment_dc_field_theta"]='%7.1f'%(theta) meas_type="LT-AF-I" else: MagRec["treatment_ac_field"]='%8.3e' % ( float(treat[0])*1e-3) # AF field in tesla MagRec["treatment_dc_field"]='0' meas_type="LT-AF-Z" elif treat[1][1]=='2': if int(treat[0])==0: MagRec["treatment_ac_field"]='0' MagRec["treatment_dc_field"]='%8.3e'%(trm_labfield) MagRec["treatment_dc_field_phi"]='%7.1f'%(phi) MagRec["treatment_dc_field_theta"]='%7.1f'%(theta) MagRec["treatment_temp"]='%8.3e' % (trm_peakT) meas_type="LT-T-I" else: MagRec["treatment_ac_field"]='%8.3e' % ( float(treat[0])*1e-3) # AF field in tesla MagRec["treatment_dc_field"]='0' meas_type="LT-AF-Z" elif treat[1][1]=='3': if int(treat[0])==0: MagRec["treatment_ac_field"]='%8.3e' %(peakfield) # peak field in tesla MagRec["treatment_dc_field"]='%8.3e'%(arm_labfield) MagRec["treatment_dc_field_phi"]='%7.1f'%(phi) MagRec["treatment_dc_field_theta"]='%7.1f'%(theta) meas_type="LT-AF-I" else: MagRec["treatment_ac_field"]='%8.3e' % ( float(treat[0])*1e-3) # AF field in tesla MagRec["treatment_dc_field"]='0' meas_type="LT-AF-Z" # Cooling rate experient # added by rshaar elif demag=="T" and methcode == "LP-CR-TRM": MagRec["treatment_temp"]='%8.3e' % (float(treat[0])+273.) # temp in kelvin if treat[1][0]=='0': meas_type="LT-T-Z:LP-CR-TRM" MagRec["treatment_dc_field"]='%8.3e'%(0) MagRec["treatment_dc_field_phi"]='0' MagRec["treatment_dc_field_theta"]='0' else: MagRec["treatment_dc_field"]='%8.3e'%(labfield) if treat[1][0]=='7': # alteration check as final measurement meas_type="LT-PTRM-I:LP-CR-TRM" else: meas_type="LT-T-I:LP-CR-TRM" MagRec["treatment_dc_field_phi"]='%7.1f' % (phi) # labfield phi MagRec["treatment_dc_field_theta"]='%7.1f' % (theta) # labfield theta indx=int(treat[1][0])-1 # alteration check matjed as 0.7 in the measurement file if indx==6: cooling_time= cooling_rates_list[-1] else: cooling_time=cooling_rates_list[indx] MagRec["measurement_description"]="cooling_rate"+":"+cooling_time+":"+"K/min" elif demag!='N': if len(treat)==1:treat.append('0') MagRec["treatment_temp"]='%8.3e' % (float(treat[0])+273.) # temp in kelvin if trm==0: # demag=T and not trmaq if treat[1][0]=='0': meas_type="LT-T-Z" else: MagRec["treatment_dc_field"]='%8.3e' % (labfield) # labfield in tesla (convert from microT) MagRec["treatment_dc_field_phi"]='%7.1f' % (phi) # labfield phi MagRec["treatment_dc_field_theta"]='%7.1f' % (theta) # labfield theta if treat[1][0]=='1':meas_type="LT-T-I" # in-field thermal step if treat[1][0]=='2': meas_type="LT-PTRM-I" # pTRM check pTRM=1 if treat[1][0]=='3': MagRec["treatment_dc_field"]='0' # this is a zero field step meas_type="LT-PTRM-MD" # pTRM tail check else: labfield=float(treat[1])*1e-6 MagRec["treatment_dc_field"]='%8.3e' % (labfield) # labfield in tesla (convert from microT) MagRec["treatment_dc_field_phi"]='%7.1f' % (phi) # labfield phi MagRec["treatment_dc_field_theta"]='%7.1f' % (theta) # labfield theta meas_type="LT-T-I:LP-TRM" # trm acquisition experiment MagRec["measurement_csd"]=rec[2] MagRec["measurement_magn_moment"]='%10.3e'% (float(rec[3])*1e-3) # moment in Am^2 (from emu) MagRec["measurement_dec"]=rec[4] MagRec["measurement_inc"]=rec[5] MagRec["magic_instrument_codes"]=instcode MagRec["er_analyst_mail_names"]=user MagRec["er_citation_names"]=citation if "LP-IRM-3D" in methcode : meas_type=methcode #MagRec["magic_method_codes"]=methcode.strip(':') MagRec["magic_method_codes"]=meas_type MagRec["measurement_flag"]='g' MagRec["er_specimen_name"]=rec[0] if 'std' in rec[0]: MagRec["measurement_standard"]='s' else: MagRec["measurement_standard"]='u' MagRec["measurement_number"]='1' #print MagRec['treatment_temp'] MagRecs.append(MagRec) MagOuts=pmag.measurements_methods(MagRecs,noave) pmag.magic_write(meas_file,MagOuts,'magic_measurements') print("results put in ",meas_file) if len(SynRecs)>0: pmag.magic_write(synfile,SynRecs,'er_synthetics') print("synthetics put in ",synfile) return True, meas_file
0.023407
def enqueue(self, item, queue=None): """ Enqueue items. If you define "self.filter" (sequence), this method put the item to queue after filtering. "self.filter" operates as blacklist. This method expects that "item" argument has dict type "data" attribute. """ if queue is None: queue = self.queue is_enqueue_item = True if self.invalid_key_list is not None: for entry in self.invalid_key_list: if entry in item.data['key']: is_enqueue_item = False log_message = ( '{key} is filtered by "invalid_key_list".' ''.format(key=item.data['key'], plugin=__name__) ) self.logger.debug(log_message) break if is_enqueue_item: try: queue.put(item, block=False) return True except Full: self.logger.error('Blackbird item Queue is Full!!!') return False else: return False
0.001682
def decrement_display_ref_count(self, amount: int=1): """Decrement display reference count to indicate this library item is no longer displayed.""" assert not self._closed self.__display_ref_count -= amount if self.__display_ref_count == 0: self.__is_master = False if self.__data_item: for _ in range(amount): self.__data_item.decrement_data_ref_count()
0.011521
def order_search(self, article_code, **kwargs): '''taobao.vas.order.search 订单记录导出 用于ISV查询自己名下的应用及收费项目的订单记录。目前所有应用调用此接口的频率限制为200次/分钟,即每分钟内,所有应用调用此接口的次数加起来最多为200次。''' request = TOPRequest('taobao.vas.order.search') request['article_code'] = article_code for k, v in kwargs.iteritems(): if k not in ('item_code', 'nick', 'start_created', 'end_created', 'biz_type', 'biz_order_id', 'order_id', 'page_size','page_no') and v==None: continue request[k] = v self.create(self.execute(request), fields=['article_biz_orders','total_item'], models={'article_biz_orders':ArticleBizOrder}) return self.article_biz_orders
0.017266
def delete(self, force=False, client=None): """Delete this bucket. The bucket **must** be empty in order to submit a delete request. If ``force=True`` is passed, this will first attempt to delete all the objects / blobs in the bucket (i.e. try to empty the bucket). If the bucket doesn't exist, this will raise :class:`google.cloud.exceptions.NotFound`. If the bucket is not empty (and ``force=False``), will raise :class:`google.cloud.exceptions.Conflict`. If ``force=True`` and the bucket contains more than 256 objects / blobs this will cowardly refuse to delete the objects (or the bucket). This is to prevent accidental bucket deletion and to prevent extremely long runtime of this method. If :attr:`user_project` is set, bills the API request to that project. :type force: bool :param force: If True, empties the bucket's objects then deletes it. :type client: :class:`~google.cloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :raises: :class:`ValueError` if ``force`` is ``True`` and the bucket contains more than 256 objects / blobs. """ client = self._require_client(client) query_params = {} if self.user_project is not None: query_params["userProject"] = self.user_project if force: blobs = list( self.list_blobs( max_results=self._MAX_OBJECTS_FOR_ITERATION + 1, client=client ) ) if len(blobs) > self._MAX_OBJECTS_FOR_ITERATION: message = ( "Refusing to delete bucket with more than " "%d objects. If you actually want to delete " "this bucket, please delete the objects " "yourself before calling Bucket.delete()." ) % (self._MAX_OBJECTS_FOR_ITERATION,) raise ValueError(message) # Ignore 404 errors on delete. self.delete_blobs(blobs, on_error=lambda blob: None, client=client) # We intentionally pass `_target_object=None` since a DELETE # request has no response value (whether in a standard request or # in a batch request). client._connection.api_request( method="DELETE", path=self.path, query_params=query_params, _target_object=None, )
0.001124
def shuffle(self): """ Shuffle the deque Deques themselves do not support this, so this will make all items into a list, shuffle that list, clear the deque, and then re-init the deque. """ args = list(self) random.shuffle(args) self.clear() super(DogeDeque, self).__init__(args)
0.00565
def imagetransformer_b10l_4h_big_uncond_dr03_lr025_tpu(): """TPU related small model.""" hparams = imagetransformer_bas8l_8h_big_uncond_dr03_imgnet() update_hparams_for_tpu(hparams) hparams.batch_size = 4 hparams.num_heads = 4 # heads are expensive on tpu hparams.num_decoder_layers = 10 hparams.learning_rate = 0.25 hparams.learning_rate_warmup_steps = 8000 hparams.layer_preprocess_sequence = "none" hparams.layer_postprocess_sequence = "dan" # hparams.unconditional = True return hparams
0.025194
def to_ascii_bytes(self, filter_func=None): """ Attempt to encode the headers block as ascii If encoding fails, call percent_encode_non_ascii_headers() to encode any headers per RFCs """ try: string = self.to_str(filter_func) string = string.encode('ascii') except (UnicodeEncodeError, UnicodeDecodeError): self.percent_encode_non_ascii_headers() string = self.to_str(filter_func) string = string.encode('ascii') return string + b'\r\n'
0.003565
def transform(data, channels, transform_fxn, def_channels = None): """ Apply some transformation function to flow cytometry data. This function is a template transformation function, intended to be used by other specific transformation functions. It performs basic checks on `channels` and `data`. It then applies `transform_fxn` to the specified channels. Finally, it rescales ``data.range`` and if necessary. Parameters ---------- data : FCSData or numpy array NxD flow cytometry data where N is the number of events and D is the number of parameters (aka channels). channels : int, str, list of int, list of str, optional Channels on which to perform the transformation. If `channels` is None, use def_channels. transform_fxn : function Function that performs the actual transformation. def_channels : int, str, list of int, list of str, optional Default set of channels in which to perform the transformation. If `def_channels` is None, use all channels. Returns ------- data_t : FCSData or numpy array NxD transformed flow cytometry data. """ # Copy data array data_t = data.copy().astype(np.float64) # Default if channels is None: if def_channels is None: channels = range(data_t.shape[1]) else: channels = def_channels # Convert channels to iterable if not (hasattr(channels, '__iter__') \ and not isinstance(channels, six.string_types)): channels = [channels] # Apply transformation data_t[:,channels] = transform_fxn(data_t[:,channels]) # Apply transformation to ``data.range`` if hasattr(data_t, '_range'): for channel in channels: # Transform channel name to index if necessary channel_idx = data_t._name_to_index(channel) if data_t._range[channel_idx] is not None: data_t._range[channel_idx] = \ transform_fxn(data_t._range[channel_idx]) return data_t
0.002872
def _relay(self, **kwargs): """Send the request through the server and return the HTTP response.""" retval = None delay_time = 2 # For connection retries read_attempts = 0 # For reading from socket while retval is None: # Evict can return False sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock_fp = sock.makefile('rwb') # Used for pickle try: sock.connect((self.host, self.port)) cPickle.dump(kwargs, sock_fp, cPickle.HIGHEST_PROTOCOL) sock_fp.flush() retval = cPickle.load(sock_fp) except: # pylint: disable=W0702 exc_type, exc, _ = sys.exc_info() socket_error = exc_type is socket.error if socket_error and exc.errno == 111: # Connection refused sys.stderr.write('Cannot connect to multiprocess server. I' 's it running? Retrying in {0} seconds.\n' .format(delay_time)) time.sleep(delay_time) delay_time = min(64, delay_time * 2) elif exc_type is EOFError or socket_error and exc.errno == 104: # Failure during socket READ if read_attempts >= 3: raise ClientException('Successive failures reading ' 'from the multiprocess server.') sys.stderr.write('Lost connection with multiprocess server' ' during read. Trying again.\n') read_attempts += 1 else: raise finally: sock_fp.close() sock.close() if isinstance(retval, Exception): raise retval # pylint: disable=E0702 return retval
0.001542
async def substr(self, name, start, end=-1): """ Return a substring of the string at key ``name``. ``start`` and ``end`` are 0-based integers specifying the portion of the string to return. """ return await self.execute_command('SUBSTR', name, start, end)
0.00678
def change_last_focused_widget(self, old, now): """To keep track of to the last focused widget""" if (now is None and QApplication.activeWindow() is not None): QApplication.activeWindow().setFocus() self.last_focused_widget = QApplication.focusWidget() elif now is not None: self.last_focused_widget = now self.previous_focused_widget = old
0.007177
def duration(self): """Get duration of composition """ return max([x.comp_location + x.duration for x in self.segments])
0.012195
def correct(self, image, keepSize=False, borderValue=0): ''' remove lens distortion from given image ''' image = imread(image) (h, w) = image.shape[:2] mapx, mapy = self.getUndistortRectifyMap(w, h) self.img = cv2.remap(image, mapx, mapy, cv2.INTER_LINEAR, borderMode=cv2.BORDER_CONSTANT, borderValue=borderValue ) if not keepSize: xx, yy, ww, hh = self.roi self.img = self.img[yy: yy + hh, xx: xx + ww] return self.img
0.003247
def idle_task(self): '''called rapidly by mavproxy''' now = time.time() if now-self.last_sent > self.system_time_settings.interval: self.last_sent = now time_us = time.time() * 1000000 if self.system_time_settings.verbose: print("ST: Sending system time: (%u/%u)" % (time_us, self.uptime(),)) self.master.mav.system_time_send(time_us, self.uptime()) if (now-self.last_sent_timesync > self.system_time_settings.interval_timesync): self.last_sent_timesync = now time_ns = time.time() * 1000000000 time_ns += 1234 if self.system_time_settings.verbose: print("ST: Sending timesync request") self.master.mav.timesync_send(0, time_ns) self.last_sent_ts1 = time_ns
0.003254
def value(self, dcode, dextra): """Decode value of symbol together with the extra bits. >>> d = DistanceAlphabet('D', NPOSTFIX=2, NDIRECT=10) >>> d[34].value(2) (0, 35) """ if dcode<16: return [(1,0),(2,0),(3,0),(4,0), (1,-1),(1,+1),(1,-2),(1,+2),(1,-3),(1,+3), (2,-1),(2,+1),(2,-2),(2,+2),(2,-3),(2,+3) ][dcode] if dcode<16+self.NDIRECT: return (0,dcode-16) #we use the original formulas, instead of my clear explanation POSTFIX_MASK = (1 << self.NPOSTFIX) - 1 ndistbits = 1 + ((dcode - self.NDIRECT - 16) >> (self.NPOSTFIX + 1)) hcode = (dcode - self.NDIRECT - 16) >> self.NPOSTFIX lcode = (dcode - self.NDIRECT - 16) & POSTFIX_MASK offset = ((2 + (hcode & 1)) << ndistbits) - 4 distance = ((offset + dextra) << self.NPOSTFIX) + lcode + self.NDIRECT + 1 return (0,distance)
0.038974
def add_empty_fields(untl_dict): """Add empty values if UNTL fields don't have values.""" # Iterate the ordered UNTL XML element list to determine # which elements are missing from the untl_dict. for element in UNTL_XML_ORDER: if element not in untl_dict: # Try to create an element with content and qualifier. try: py_object = PYUNTL_DISPATCH[element]( content='', qualifier='', ) except: # Try to create an element with content. try: py_object = PYUNTL_DISPATCH[element](content='') except: # Try to create an element without content. try: py_object = PYUNTL_DISPATCH[element]() except: raise PyuntlException( 'Could not add empty element field.' ) else: untl_dict[element] = [{'content': {}}] else: # Handle element without children. if not py_object.contained_children: untl_dict[element] = [{'content': ''}] else: untl_dict[element] = [{'content': {}}] else: # Handle element without children. if not py_object.contained_children: untl_dict[element] = [{'content': '', 'qualifier': ''}] else: untl_dict[element] = [{'content': {}, 'qualifier': ''}] # Add empty contained children. for child in py_object.contained_children: untl_dict[element][0].setdefault('content', {}) untl_dict[element][0]['content'][child] = '' return untl_dict
0.002077
def gauge(self, key, gauge=None, default=float("nan"), **dims): """Adds gauge with dimensions to the registry""" return super(RegexRegistry, self).gauge( self._get_key(key), gauge=gauge, default=default, **dims)
0.008368
def pages(self): """A generator of all pages in the stream. Returns: types.GeneratorType[google.cloud.bigquery_storage_v1beta1.ReadRowsPage]: A generator of pages. """ # Each page is an iterator of rows. But also has num_items, remaining, # and to_dataframe. avro_schema, column_names = _avro_schema(self._read_session) for block in self._reader: self._status = block.status yield ReadRowsPage(avro_schema, column_names, block)
0.003731
def fromtimestamp(cls, t, tz=None): """Construct a datetime from a POSIX timestamp (like time.time()). A timezone info object may be passed in as well. """ _check_tzinfo_arg(tz) converter = _time.localtime if tz is None else _time.gmtime t, frac = divmod(t, 1.0) us = int(frac * 1e6) # If timestamp is less than one microsecond smaller than a # full second, us can be rounded up to 1000000. In this case, # roll over to seconds, otherwise, ValueError is raised # by the constructor. if us == 1000000: t += 1 us = 0 y, m, d, hh, mm, ss, weekday, jday, dst = converter(t) ss = min(ss, 59) # clamp out leap seconds if the platform has them result = cls(y, m, d, hh, mm, ss, us, tz) if tz is not None: result = tz.fromutc(result) return result
0.002174
def _legacy_add_user(self, name, password, read_only, **kwargs): """Uses v1 system to add users, i.e. saving to system.users. """ # Use a Collection with the default codec_options. system_users = self._collection_default_options('system.users') user = system_users.find_one({"user": name}) or {"user": name} if password is not None: user["pwd"] = auth._password_digest(name, password) if read_only is not None: user["readOnly"] = read_only user.update(kwargs) # We don't care what the _id is, only that it has one # for the replace_one call below. user.setdefault("_id", ObjectId()) try: system_users.replace_one({"_id": user["_id"]}, user, True) except OperationFailure as exc: # First admin user add fails gle in MongoDB >= 2.1.2 # See SERVER-4225 for more information. if 'login' in str(exc): pass # First admin user add fails gle from mongos 2.0.x # and 2.2.x. elif (exc.details and 'getlasterror' in exc.details.get('note', '')): pass else: raise
0.00161
def subkey_for_path(self, path): """ path: a path of subkeys denoted by numbers and slashes. Use H or p for private key derivation. End with .pub to force the key public. Examples: 1H/5/2/1 would call subkey(i=1, is_hardened=True) .subkey(i=5).subkey(i=2).subkey(i=1) and then yield the private key 0/0/458.pub would call subkey(i=0).subkey(i=0) .subkey(i=458) and then yield the public key You should choose one of the H or p convention for private key derivation and stick with it. """ force_public = (path[-4:] == '.pub') if force_public: path = path[:-4] key = self if path: invocations = path.split("/") for v in invocations: is_hardened = v[-1] in ("'pH") if is_hardened: v = v[:-1] v = int(v) key = key.subkey(i=v, is_hardened=is_hardened, as_private=key.secret_exponent() is not None) if force_public and key.secret_exponent() is not None: key = key.public_copy() return key
0.00256
def get_fields(self, db_name, table_name): """ Parameters: - db_name - table_name """ self.send_get_fields(db_name, table_name) return self.recv_get_fields()
0.005348
def proxy(self, signal_source, *signal_names, weak_ref=False): """ :meth:`.WSignalProxyProto.proxy` implementation """ callback = self.__callback if weak_ref is False else self.__weak_ref_callback for signal_name in signal_names: signal_source.callback(signal_name, callback)
0.024476