text
stringlengths
78
104k
score
float64
0
0.18
def _send_command(self, command, raw_text=False): """ Wrapper for NX-API show method. Allows more code sharing between NX-API and SSH. """ return self.device.show(command, raw_text=raw_text)
0.008658
def get_temperature_from_humidity(self): """ Returns the temperature in Celsius from the humidity sensor """ self._init_humidity() # Ensure humidity sensor is initialised temp = 0 data = self._humidity.humidityRead() if (data[2]): # Temp valid temp = data[3] return temp
0.005731
def _select_nonblock(sockets, remain=None): """This function is called during sendrecv() routine to select the available sockets. """ # pcap sockets aren't selectable, so we return all of them # and ask the selecting functions to use nonblock_recv instead of recv def _sleep_nonblock_recv(self): try: res = self.nonblock_recv() if res is None: time.sleep(conf.recv_poll_rate) return res except TimeoutElapsed: return None return sockets, _sleep_nonblock_recv
0.001767
def conditional_expected_number_of_purchases_up_to_time(self, t, frequency, recency, T): """ Conditional expected number of repeat purchases up to time t. Calculate the expected number of repeat purchases up to time t for a randomly choose individual from the population, given they have purchase history (frequency, recency, T) See Wagner, U. and Hoppe D. (2008). Parameters ---------- t: array_like times to calculate the expectation for. frequency: array_like historical frequency of customer. recency: array_like historical recency of customer. T: array_like age of the customer. Returns ------- array_like """ x = frequency r, alpha, a, b = self._unload_params("r", "alpha", "a", "b") hyp_term = hyp2f1(r + x, b + x + 1, a + b + x, t / (alpha + T + t)) first_term = (a + b + x) / (a - 1) second_term = 1 - hyp_term * ((alpha + T) / (alpha + t + T)) ** (r + x) numerator = first_term * second_term denominator = 1 + (a / (b + x)) * ((alpha + T) / (alpha + recency)) ** (r + x) return numerator / denominator
0.00318
def image_predict_proba(self, X): """ Predicts class probabilities for the entire image. Parameters: ----------- X: array, shape = [n_samples, n_pixels_x, n_pixels_y, n_bands] Array of training images y: array, shape = [n_samples] or [n_samples, n_pixels_x, n_pixels_y, n_classes] Target probabilities """ self._check_image(X) probabilities = self.pixel_classifier.image_predict_proba(X) patches, _ = self._to_patches(probabilities) row_steps = self._image_size[0] // self.patch_size[0] col_steps = self._image_size[1] // self.patch_size[1] ps = self.patch_size[0] * self.patch_size[1] # how can this be optimised? for i, j, k in itertools.product(range(row_steps), range(col_steps), range(self._samples)): patches[k, i, j, 0] = np.sum(patches[k, i, j, 0]) / ps patches[k, i, j, 1] = np.sum(patches[k, i, j, 1]) / ps return probabilities
0.003806
def get_location(self, location_id: int, timeout: int=None): """Get a location information Parameters ---------- location_id: int A location ID See https://github.com/RoyaleAPI/cr-api-data/blob/master/json/regions.json for a list of acceptable location IDs timeout: Optional[int] = None Custom timeout that overwrites Client.timeout """ url = self.api.LOCATIONS + '/' + str(location_id) return self._get_model(url, timeout=timeout)
0.009191
def module_set_id(self) -> str: """Compute unique id of YANG modules comprising the data model. Returns: String consisting of hexadecimal digits. """ fnames = sorted(["@".join(m) for m in self.schema_data.modules]) return hashlib.sha1("".join(fnames).encode("ascii")).hexdigest()
0.006024
def rotateInZMat(theta_deg): """Rotate a vector theta degrees around the z-axis Equivalent to yaw left Rotates the vector in the sense that the x-axis is rotated towards the y-axis. If looking along the z-axis (which is not the way you usually look at it), the vector rotates clockwise. If sitting on the vector [1,0,0], the rotation is towards the left Input: theta_deg (float) Angle through which vectors should be rotated in degrees Returns: A matrix To rotate a vector, premultiply by this matrix. To rotate the coord sys underneath the vector, post multiply """ ct = np.cos( np.radians(theta_deg)) st = np.sin( np.radians(theta_deg)) rMat = np.array([ [ ct, -st, 0], [ st, ct, 0], [ 0, 0, 1], ]) return rMat
0.00907
def _delete_json(self, instance, space=None, rel_path=None, extra_params=None, id_field=None, append_to_path=None): """ Base level method for removing data from the API """ model = type(instance) # Only API.spaces and API.event should not provide # the `space argument if space is None and model not in (Space, Event): raise Exception( 'In general, `API._delete_json` should always ' 'be called with a `space` argument.' ) if not extra_params: extra_params = {} if not id_field: id_field = 'number' if not instance.get(id_field, None): raise AttributeError( '%s does not have a value for the id field \'%s\'' % ( instance.__class__.__name__, id_field ) ) # Generate the url to hit url = '{0}/{1}/{2}/{3}{4}.json?{5}'.format( settings.API_ROOT_PATH, settings.API_VERSION, rel_path or model.rel_path, instance[id_field], append_to_path or '', urllib.urlencode(extra_params), ) # Fetch the data response = requests.delete( url=url, headers={ 'X-Api-Key': self.key, 'X-Api-Secret': self.secret, 'Content-type': "application/json", }, ) if response.status_code == 204: # OK return True else: # Most likely a 404 Not Found raise Exception( 'Code {0} returned from `{1}`. Response text: "{2}".'.format( response.status_code, url, response.text ) )
0.001618
def to_si(self, values, from_unit): """Return values in SI and the units to which the values have been converted.""" if from_unit in self._si_units: return values, from_unit elif from_unit == 'degF-hours': return self.to_unit(values, 'degC-hours', from_unit), 'degC-hours' else: return self.to_unit(values, 'degC-days', from_unit), 'degC-days'
0.007299
def place_order(self, amount, price, side, ord_type, symbol='btcusd', exchange='bitfinex'): """ Submit a new order. :param amount: :param price: :param side: :param ord_type: :param symbol: :param exchange: :return: """ payload = { "request": "/v1/order/new", "nonce": self._nonce, "symbol": symbol, "amount": amount, "price": price, "exchange": exchange, "side": side, "type": ord_type } signed_payload = self._sign_payload(payload) r = requests.post(self.URL + "/order/new", headers=signed_payload, verify=True) json_resp = r.json() try: json_resp['order_id'] except: return json_resp['message'] return json_resp
0.005663
def add_chapter(self, title): ''' Adds a new chapter to the report. :param str title: Title of the chapter. ''' chap_id = 'chap%s' % self.chap_counter self.chap_counter += 1 self.sidebar += '<a href="#%s" class="list-group-item">%s</a>\n' % ( chap_id, title) self.body += '<h1 id="%s">%s</h1>\n' % (chap_id, title)
0.005115
def should_be_hidden_as_cause(exc): """ Used everywhere to decide if some exception type should be displayed or hidden as the casue of an error """ # reduced traceback in case of HasWrongType (instance_of checks) from valid8.validation_lib.types import HasWrongType, IsWrongType return isinstance(exc, (HasWrongType, IsWrongType))
0.00578
def _single_orbit_find_actions(orbit, N_max, toy_potential=None, force_harmonic_oscillator=False): """ Find approximate actions and angles for samples of a phase-space orbit, `w`, at times `t`. Uses toy potentials with known, analytic action-angle transformations to approximate the true coordinates as a Fourier sum. This code is adapted from Jason Sanders' `genfunc <https://github.com/jlsanders/genfunc>`_ .. todo:: Wrong shape for w -- should be (6,n) as usual... Parameters ---------- orbit : `~gala.dynamics.Orbit` N_max : int Maximum integer Fourier mode vector length, |n|. toy_potential : Potential (optional) Fix the toy potential class. force_harmonic_oscillator : bool (optional) Force using the harmonic oscillator potential as the toy potential. """ if orbit.norbits > 1: raise ValueError("must be a single orbit") if toy_potential is None: toy_potential = fit_toy_potential( orbit, force_harmonic_oscillator=force_harmonic_oscillator) else: logger.debug("Using *fixed* toy potential: {}" .format(toy_potential.parameters)) if isinstance(toy_potential, IsochronePotential): orbit_align = orbit.align_circulation_with_z() w = orbit_align.w() dxyz = (1, 2, 2) circ = np.sign(w[0, 0]*w[4, 0]-w[1, 0]*w[3, 0]) sign = np.array([1., circ, 1.]) orbit = orbit_align elif isinstance(toy_potential, HarmonicOscillatorPotential): dxyz = (2, 2, 2) sign = 1. w = orbit.w() else: raise ValueError("Invalid toy potential.") t = orbit.t.value # Now find toy actions and angles aaf = toy_potential.action_angle(orbit) if aaf[0].ndim > 2: aa = np.vstack((aaf[0].value[..., 0], aaf[1].value[..., 0])) else: aa = np.vstack((aaf[0].value, aaf[1].value)) if np.any(np.isnan(aa)): ix = ~np.any(np.isnan(aa), axis=0) aa = aa[:, ix] t = t[ix] warnings.warn("NaN value in toy actions or angles!") if sum(ix) > 1: raise ValueError("Too many NaN value in toy actions or angles!") t1 = time.time() A, b, nvecs = _action_prepare(aa, N_max, dx=dxyz[0], dy=dxyz[1], dz=dxyz[2]) actions = np.array(solve(A,b)) logger.debug("Action solution found for N_max={}, size {} symmetric" " matrix in {} seconds" .format(N_max, len(actions), time.time()-t1)) t1 = time.time() A, b, nvecs = _angle_prepare(aa, t, N_max, dx=dxyz[0], dy=dxyz[1], dz=dxyz[2], sign=sign) angles = np.array(solve(A, b)) logger.debug("Angle solution found for N_max={}, size {} symmetric" " matrix in {} seconds" .format(N_max, len(angles), time.time()-t1)) # Just some checks if len(angles) > len(aa): warnings.warn("More unknowns than equations!") J = actions[:3] # * sign theta = angles[:3] freqs = angles[3:6] # * sign return dict(actions=J*aaf[0].unit, angles=theta*aaf[1].unit, freqs=freqs*aaf[2].unit, Sn=actions[3:], dSn_dJ=angles[6:], nvecs=nvecs)
0.000911
def merge_groups(self, indices): """Extend the lists within the DICOM groups dictionary. The indices will indicate which list have to be extended by which other list. Parameters ---------- indices: list or tuple of 2 iterables of int, bot having the same len The indices of the lists that have to be merged, both iterables items will be read pair by pair, the first is the index to the list that will be extended with the list of the second index. The indices can be constructed with Numpy e.g., indices = np.where(square_matrix) """ try: merged = merge_dict_of_lists(self.dicom_groups, indices, pop_later=True, copy=True) self.dicom_groups = merged except IndexError: raise IndexError('Index out of range to merge DICOM groups.')
0.002116
def form_valid(self, post_form, attachment_formset, poll_option_formset, **kwargs): """ Processes valid forms. """ save_poll_option_formset = poll_option_formset is not None \ and not self.preview valid = super().form_valid( post_form, attachment_formset, poll_option_formset=poll_option_formset, **kwargs) if save_poll_option_formset: poll_option_formset.topic = self.forum_post.topic poll_option_formset.save( poll_question=post_form.cleaned_data.pop('poll_question', None), poll_max_options=post_form.cleaned_data.pop('poll_max_options', None), poll_duration=post_form.cleaned_data.pop('poll_duration', None), poll_user_changes=post_form.cleaned_data.pop('poll_user_changes', None), ) return valid
0.009227
def dependency_of_targets(targets, op): """ Check that op is in the subgraph induced by the dependencies of targets. The result is memoized. This is useful if some SessionRunHooks should be run only together with certain ops. Args: targets: a tuple of ops or tensors. The targets to find dependencies of. op (tf.Operation or tf.Tensor): Returns: bool: True if any one of `targets` depend on `op`. """ # TODO tensorarray? sparsetensor? if isinstance(op, tf.Tensor): op = op.op assert isinstance(op, tf.Operation), op from tensorflow.contrib.graph_editor import get_backward_walk_ops # alternative implementation can use graph_util.extract_sub_graph dependent_ops = get_backward_walk_ops(targets, control_inputs=True) return op in dependent_ops
0.003597
def assembly_length(self): """ Use SeqIO.parse to extract the total number of bases in each assembly file """ for sample in self.metadata: # Only determine the assembly length if is has not been previously calculated if not GenObject.isattr(sample, 'assembly_length'): # Create the assembly_length attribute, and set it to 0 sample.assembly_length = 0 for record in SeqIO.parse(sample.bestassemblyfile, 'fasta'): # Update the assembly_length attribute with the length of the current contig sample.assembly_length += len(record.seq) # Write the updated object to file self.write_json(sample)
0.006527
def apply_transform( t: Union[List[List[float]], np.ndarray], pos: Tuple[float, float, float], with_offsets=True) -> Tuple[float, float, float]: """ Change of base using a transform matrix. Primarily used to render a point in space in a way that is more readable for the user. :param t: A transformation matrix from one 3D space [A] to another [B] :param pos: XYZ point in space A :param with_offsets: Whether to apply the transform as an affine transform or as a standard transform. You might use with_offsets=False :return: corresponding XYZ point in space B """ extended = 1 if with_offsets else 0 return tuple(dot(t, list(pos) + [extended])[:3])
0.001309
def modulePath(self): """ Returns the module path information for this proxy plugin. This path will represent the root module that will be imported when the instance is first created of this plugin. :return <str> """ base_path = os.path.dirname(self.filepath()) module_path = self.importPath() module_path = os.path.expanduser(os.path.expandvars(module_path)) if module_path.startswith('.'): module_path = os.path.abspath(os.path.join(base_path, module_path)) return module_path
0.005051
def received_sig_option_changed(self, option, value): """ Called when sig_option_changed is received. If option being changed is autosave_mapping, then synchronize new mapping with all editor stacks except the sender. """ if option == 'autosave_mapping': for editorstack in self.editorstacks: if editorstack != self.sender(): editorstack.autosave_mapping = value self.sig_option_changed.emit(option, value)
0.003831
def fetch(self, request, spider): """download_func""" info = self._extract_key_info(request) ret = self.store.fetch_file(request.url, info['key'], info['bucket']) return Response(request.url, body=json.dumps(ret))
0.00813
def load_entry_point_actions(self, entry_point_group): """Load actions from an entry point group. :param entry_point_group: The entrypoint for extensions. """ for ep in pkg_resources.iter_entry_points(group=entry_point_group): self.register_action(ep.load())
0.006601
def parse_md_code_options(options): """Parse 'python class key="value"' into [('python', None), ('class', None), ('key', 'value')]""" metadata = [] while options: name_and_value = re.split(r'[\s=]+', options, maxsplit=1) name = name_and_value[0] # Equal sign in between name and what's next? if len(name_and_value) == 2: sep = options[len(name):-len(name_and_value[1])] has_value = sep.find('=') >= 0 options = name_and_value[1] else: has_value = False options = '' if not has_value: metadata.append((name, None)) continue try: value = loads(options) options = '' except JSONDecodeError as err: try: split = err.colno - 1 except AttributeError: # str(err) is like: "ValueError: Extra data: line 1 column 7 - line 1 column 50 (char 6 - 49)" match = re.match(r'.*char ([0-9]*)', str(err)) split = int(match.groups()[0]) value = loads(options[:split]) options = options[split:] metadata.append((name, value)) return metadata
0.002427
def results(self, times='all', t_precision=12, **kwargs): r""" Fetches the calculated quantity from the algorithm and returns it as an array. Parameters ---------- times : scalar or list Time steps to be returned. The default value is 'all' which results in returning all time steps. If a scalar is given, only the corresponding time step is returned. If a range is given (e.g., 'range(0, 1, 1e-3)'), time steps in this range are returned. t_precision : integer The time precision (number of decimal places). Default value is 12. Notes ----- The keyword steps is interpreted in the same way as times. """ if 'steps' in kwargs.keys(): times = kwargs['steps'] t_pre = t_precision quantity = self.settings['quantity'] q = [k for k in list(self.keys()) if quantity in k] if times == 'all': t = q elif type(times) in [float, int]: n = int(-dc(str(round(times, t_pre))).as_tuple().exponent * (round(times, t_pre) != int(times))) t_str = (str(int(round(times, t_pre)*10**n)) + ('e-'+str(n))*(n != 0)) t = [k for k in q if t_str == k.split('@')[-1]] elif 'range' in times: t = times.replace(' ', '') t = t[6:-1] t = t.split(',') out = np.arange(float(t[0]), float(t[1]), float(t[2])) out = np.append(out, float(t[1])) out = np.unique(out) out = np.around(out, decimals=t_pre) t = [] for i in out: n = int(-dc(str(round(i, t_pre))).as_tuple().exponent * (round(i, t_pre) != int(i))) j = (str(int(round(i, t_pre)*10**n))+('e-'+str(n))*(n != 0)) t_str = [k for k in q if j == k.split('@')[-1]] t += (t_str) d = {k: self[k] for k in t} return d
0.000975
def _write_packed_data(self, data_out, table): """This is kind of legacy function - this functionality may be useful for some people, so even though now the default of writing CSV is writing unpacked data (divided by independent variable) this method is still available and accessible if ```pack``` flag is specified in Writer's options :param output: output file like object to which data will be written :param table: input table :type table: hepdata_converter.parsers.Table """ headers = [] data = [] qualifiers_marks = [] qualifiers = {} self._extract_independent_variables(table, headers, data, qualifiers_marks) for dependent_variable in table.dependent_variables: self._parse_dependent_variable(dependent_variable, headers, qualifiers, qualifiers_marks, data) self._write_metadata(data_out, table) self._write_csv_data(data_out, qualifiers, qualifiers_marks, headers, data)
0.007874
def _init_tag_params(self, tag, params): """ Alternative constructor used when the tag parameters are added to the HTMLElement (HTMLElement(tag, params)). This method just creates string and then pass it to the :meth:`_init_tag`. Args: tag (str): HTML tag as string. params (dict): HTML tag parameters as dictionary. """ self._element = tag self.params = params self._parseTagName() self._istag = True self._isendtag = False self._isnonpairtag = False self._element = self.tagToString()
0.0032
def update_tags(self, idlist, tags_add=None, tags_remove=None): """ Updates the 'tags' field for a bug. """ tags = {} if tags_add: tags["add"] = self._listify(tags_add) if tags_remove: tags["remove"] = self._listify(tags_remove) d = { "ids": self._listify(idlist), "tags": tags, } return self._proxy.Bug.update_tags(d)
0.004545
def chunks(raw): """Yield successive EVENT_SIZE sized chunks from raw.""" for i in range(0, len(raw), EVENT_SIZE): yield struct.unpack(EVENT_FORMAT, raw[i:i+EVENT_SIZE])
0.005405
def residual_histogram(df, col_true, col_pred=None): """ Compute histogram of residuals of a predicted DataFrame. Note that this method will trigger the defined flow to execute. :param df: predicted data frame :type df: DataFrame :param col_true: column name of true value :type col_true: str :param col_true: column name of predicted value, 'prediction_score' by default. :type col_pred: str :return: histograms for every columns, containing histograms and bins. """ if not col_pred: col_pred = get_field_name_by_role(df, FieldRole.PREDICTED_VALUE) return _run_evaluation_node(df, col_true, col_pred)['hist']
0.002981
def get_objective_bank_form_for_create(self, objective_bank_record_types=None): """Gets the objective bank form for creating new objective banks. A new form should be requested for each create transaction. arg: objectiveBankRecordTypes (osid.type.Type): array of objective bank record types return: (osid.learning.ObjectiveBankForm) - the objective bank form raise: NullArgument - objectiveBankRecordTypes is null raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure raise: Unsupported - unable to get form for requested record types. compliance: mandatory - This method must be implemented. """ if objective_bank_record_types is None: pass # Still need to deal with the record_types argument objective_bank_form = objects.ObjectiveBankForm() self._forms[objective_bank_form.get_id().get_identifier()] = not CREATED return objective_bank_form
0.002804
def threaded_quit(self, arg): """ quit command when several threads are involved. """ threading_list = threading.enumerate() mythread = threading.currentThread() for t in threading_list: if t != mythread: ctype_async_raise(t, Mexcept.DebuggerQuit) pass pass raise Mexcept.DebuggerQuit
0.007874
def check_connection(host='localhost', port=27017, username=None, password=None, authdb=None, max_delay=1): """Check if a connection could be made to the mongo process specified Args: host(str) port(int) username(str) password(str) authdb (str): database to to for authentication max_delay(int): Number of milliseconds to wait for connection Returns: bool: If connection could be established """ #uri looks like: #mongodb://[username:password@]host1[:port1][,host2[:port2],...[,hostN[:portN]]][/[database][?options]] if username and password: uri = ("mongodb://{}:{}@{}:{}/{}" .format(quote_plus(username), quote_plus(password), host, port, authdb)) log_uri = ("mongodb://{}:****@{}:{}/{}" .format(quote_plus(username), host, port, authdb)) else: log_uri = uri = "mongodb://%s:%s" % (host, port) LOG.info("Test connection with uri: %s", log_uri) client = MongoClient(uri, serverSelectionTimeoutMS=max_delay) try: client.server_info() except (ServerSelectionTimeoutError,OperationFailure) as err: LOG.warning(err) return False return True
0.007188
def from_text(cls, filename, **kwargs): ''' Create a constellation by reading a catalog in from a text file, as long as it's formated as in to_text() with identifiers, coordinates, magnitudes. Parameters ---------- filename : str The filename to read in. **kwargs are passed to astropy.io.ascii.read() ''' # FIXME -- add something here to parse id, mag, errors from the table? # load the table t = ascii.read(filename, **kwargs) ''' # which columns is the coordinates? i_coordinates = t.colnames.index('ra') # everything before the coordinates is an identifier identifiers = Table(t.columns[:i_coordinates]) # the complete coordinates are stored in one c = t.columns[i_coordinates:i_coordinates+6] coordinates = coord.SkyCoord(**c) coordinates.obstime=Time(cls.epoch, format='decimalyear') # everything after coordinates is magnitudes magnitudes = Table(t.columns[i_coordinates+1:]) newtable = hstack([Table(identifiers), Table({'coordinates':coordinates}), Table(magnitudes)]) ''' this = cls(t) this.speak('loaded constellation from {}'.format(filename)) return this
0.002208
def fletcher16_checkbytes(binbuf, offset): """Calculates the Fletcher-16 checkbytes returned as 2 byte binary-string. Including the bytes into the buffer (at the position marked by offset) the # noqa: E501 global Fletcher-16 checksum of the buffer will be 0. Thus it is easy to verify # noqa: E501 the integrity of the buffer on the receiver side. For details on the algorithm, see RFC 2328 chapter 12.1.7 and RFC 905 Annex B. # noqa: E501 """ # This is based on the GPLed C implementation in Zebra <http://www.zebra.org/> # noqa: E501 if len(binbuf) < offset: raise Exception("Packet too short for checkbytes %d" % len(binbuf)) binbuf = binbuf[:offset] + b"\x00\x00" + binbuf[offset + 2:] (c0, c1) = _fletcher16(binbuf) x = ((len(binbuf) - offset - 1) * c0 - c1) % 255 if (x <= 0): x += 255 y = 510 - c0 - x if (y > 255): y -= 255 return chb(x) + chb(y)
0.00104
def pack_nibbles(nibbles): """pack nibbles to binary :param nibbles: a nibbles sequence. may have a terminator """ if nibbles[-1] == NIBBLE_TERMINATOR: flags = 2 nibbles = nibbles[:-1] else: flags = 0 oddlen = len(nibbles) % 2 flags |= oddlen # set lowest bit if odd number of nibbles if oddlen: nibbles = [flags] + nibbles else: nibbles = [flags, 0] + nibbles o = b'' for i in range(0, len(nibbles), 2): o += ascii_chr(16 * nibbles[i] + nibbles[i + 1]) return o
0.001776
def create_sum(self, bound=1): """ Create a totalizer object encoding a cardinality constraint on the new list of relaxation literals obtained in :func:`process_sels` and :func:`process_sums`. The clauses encoding the sum of the relaxation literals are added to the SAT oracle. The sum of the totalizer object is encoded up to the value of the input parameter ``bound``, which is set to ``1`` by default. :param bound: right-hand side for the sum to be created :type bound: int :rtype: :class:`.ITotalizer` Note that if Minicard is used as a SAT oracle, native cardinality constraints are used instead of :class:`.ITotalizer`. """ if self.solver != 'mc': # standard totalizer-based encoding # new totalizer sum t = ITotalizer(lits=self.rels, ubound=bound, top_id=self.topv) # updating top variable id self.topv = t.top_id # adding its clauses to oracle for cl in t.cnf.clauses: self.oracle.add_clause(cl) else: # for minicard, use native cardinality constraints instead of the # standard totalizer, i.e. create a new (empty) totalizer sum and # fill it with the necessary data supported by minicard t = ITotalizer() t.lits = self.rels self.topv += 1 # a new variable will represent the bound # proper initial bound t.rhs = [None] * (len(t.lits)) t.rhs[bound] = self.topv # new atmostb constraint instrumented with # an implication and represented natively rhs = len(t.lits) amb = [[-self.topv] * (rhs - bound) + t.lits, rhs] # add constraint to the solver self.oracle.add_atmost(*amb) return t
0.001015
def to_shcoeffs(self, nmax=None, normalization='4pi', csphase=1): """ Return the spherical harmonic coefficients using the first n Slepian coefficients. Usage ----- s = x.to_shcoeffs([nmax]) Returns ------- s : SHCoeffs class instance The spherical harmonic coefficients obtained from using the first n Slepian expansion coefficients. Parameters ---------- nmax : int, optional, default = x.nmax The maximum number of expansion coefficients to use when calculating the spherical harmonic coefficients. normalization : str, optional, default = '4pi' Normalization of the output class: '4pi', 'ortho' or 'schmidt' for geodesy 4pi-normalized, orthonormalized, or Schmidt semi-normalized coefficients, respectively. csphase : int, optional, default = 1 Condon-Shortley phase convention: 1 to exclude the phase factor, or -1 to include it. """ if type(normalization) != str: raise ValueError('normalization must be a string. ' + 'Input type was {:s}' .format(str(type(normalization)))) if normalization.lower() not in set(['4pi', 'ortho', 'schmidt']): raise ValueError( "normalization must be '4pi', 'ortho' " + "or 'schmidt'. Provided value was {:s}" .format(repr(normalization)) ) if csphase != 1 and csphase != -1: raise ValueError( "csphase must be 1 or -1. Input value was {:s}" .format(repr(csphase)) ) if nmax is None: nmax = self.nmax if self.galpha.kind == 'cap': shcoeffs = _shtools.SlepianCoeffsToSH(self.falpha, self.galpha.coeffs, nmax) else: shcoeffs = _shtools.SlepianCoeffsToSH(self.falpha, self.galpha.tapers, nmax) temp = SHCoeffs.from_array(shcoeffs, normalization='4pi', csphase=1) if normalization != '4pi' or csphase != 1: return temp.convert(normalization=normalization, csphase=csphase) else: return temp
0.000834
def generate_rsa_public_and_private(bits=_DEFAULT_RSA_KEY_BITS): """ <Purpose> Generate public and private RSA keys with modulus length 'bits'. The public and private keys returned conform to 'securesystemslib.formats.PEMRSA_SCHEMA' and have the form: '-----BEGIN RSA PUBLIC KEY----- ...' or '-----BEGIN RSA PRIVATE KEY----- ...' The public and private keys are returned as strings in PEM format. 'generate_rsa_public_and_private()' enforces a minimum key size of 2048 bits. If 'bits' is unspecified, a 3072-bit RSA key is generated, which is the key size recommended by TUF. >>> public, private = generate_rsa_public_and_private(2048) >>> securesystemslib.formats.PEMRSA_SCHEMA.matches(public) True >>> securesystemslib.formats.PEMRSA_SCHEMA.matches(private) True <Arguments> bits: The key size, or key length, of the RSA key. 'bits' must be 2048, or greater. 'bits' defaults to 3072 if not specified. <Exceptions> securesystemslib.exceptions.FormatError, if 'bits' does not contain the correct format. <Side Effects> The RSA keys are generated from pyca/cryptography's rsa.generate_private_key() function. <Returns> A (public, private) tuple containing the RSA keys in PEM format. """ # Does 'bits' have the correct format? # This check will ensure 'bits' conforms to # 'securesystemslib.formats.RSAKEYBITS_SCHEMA'. 'bits' must be an integer # object, with a minimum value of 2048. Raise # 'securesystemslib.exceptions.FormatError' if the check fails. securesystemslib.formats.RSAKEYBITS_SCHEMA.check_match(bits) # Generate the public and private RSA keys. The pyca/cryptography 'rsa' # module performs the actual key generation. The 'bits' argument is used, # and a 2048-bit minimum is enforced by # securesystemslib.formats.RSAKEYBITS_SCHEMA.check_match(). private_key = rsa.generate_private_key(public_exponent=65537, key_size=bits, backend=default_backend()) # Extract the public & private halves of the RSA key and generate their # PEM-formatted representations. Return the key pair as a (public, private) # tuple, where each RSA is a string in PEM format. private_pem = private_key.private_bytes(encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.TraditionalOpenSSL, encryption_algorithm=serialization.NoEncryption()) # Need to generate the public pem from the private key before serialization # to PEM. public_key = private_key.public_key() public_pem = public_key.public_bytes(encoding=serialization.Encoding.PEM, format=serialization.PublicFormat.SubjectPublicKeyInfo) return public_pem.decode('utf-8'), private_pem.decode('utf-8')
0.009417
def run(self): """Request client connection and start the main loop.""" if self.args.roster_cache and os.path.exists(self.args.roster_cache): logging.info(u"Loading roster from {0!r}" .format(self.args.roster_cache)) try: self.client.roster_client.load_roster(self.args.roster_cache) except (IOError, ValueError), err: logging.error(u"Could not load the roster: {0!r}".format(err)) self.client.connect() self.client.run()
0.00531
def process_data(self, new_data): """ handles incoming data from the `IrcProtocol` connection. Main data processing/routing is handled by the _process_line method, inherited from `ServerConnection` """ self.buffer.feed(new_data) # process each non-empty line after logging all lines for line in self.buffer: log.debug("FROM SERVER: %s", line) if not line: continue self._process_line(line)
0.003953
def doc_to_help(doc): """Takes a __doc__ string and reformats it as help.""" # Get rid of starting and ending white space. Using lstrip() or even # strip() could drop more than maximum of first line and right space # of last line. doc = doc.strip() # Get rid of all empty lines. whitespace_only_line = re.compile('^[ \t]+$', re.M) doc = whitespace_only_line.sub('', doc) # Cut out common space at line beginnings. doc = trim_docstring(doc) # Just like this module's comment, comments tend to be aligned somehow. # In other words they all start with the same amount of white space. # 1) keep double new lines; # 2) keep ws after new lines if not empty line; # 3) all other new lines shall be changed to a space; # Solution: Match new lines between non white space and replace with space. doc = re.sub(r'(?<=\S)\n(?=\S)', ' ', doc, flags=re.M) return doc
0.021277
def delete(self, username, napp): """Delete a NApp. Raises: requests.HTTPError: If 400 <= status < 600. """ api = self._config.get('napps', 'api') endpoint = os.path.join(api, 'napps', username, napp, '') content = {'token': self._config.get('auth', 'token')} response = self.make_request(endpoint, json=content, method='DELETE') response.raise_for_status()
0.004598
def dispatch(self, request, *args, **kwargs): """ Does request processing for return_url query parameter and redirects with it's missing We can't do that in the get method, as it does not exist in the View base class and child mixins implementing get do not call super().get """ self.return_url = request.GET.get('return_url', None) referrer = request.META.get('HTTP_REFERER', None) # leave alone POST and ajax requests and if return_url is explicitly left empty if (request.method != "GET" or request.is_ajax() or self.return_url or referrer is None or self.return_url is None and 'return_url' in request.GET): return super().dispatch(request, *args, **kwargs) if not self.return_url: url = request.get_full_path() if url.find("?") < 0: url = "?return_url=".join((url, referrer)) else: url = "&return_url=".join((url, referrer)) return HttpResponseRedirect(url)
0.004537
def _clean_create_kwargs(**kwargs): ''' Sanatize kwargs to be sent to create_server ''' VALID_OPTS = { 'name': six.string_types, 'image': six.string_types, 'flavor': six.string_types, 'auto_ip': bool, 'ips': list, 'ip_pool': six.string_types, 'root_volume': six.string_types, 'boot_volume': six.string_types, 'terminate_volume': bool, 'volumes': list, 'meta': dict, 'files': dict, 'reservation_id': six.string_types, 'security_groups': list, 'key_name': six.string_types, 'availability_zone': six.string_types, 'block_device_mapping': dict, 'block_device_mapping_v2': dict, 'nics': list, 'scheduler_hints': dict, 'config_drive': bool, 'disk_config': six.string_types, # AUTO or MANUAL 'admin_pass': six.string_types, 'wait': bool, 'timeout': int, 'reuse_ips': bool, 'network': dict, 'boot_from_volume': bool, 'volume_size': int, 'nat_destination': six.string_types, 'group': six.string_types, 'userdata': six.string_types, } extra = kwargs.pop('extra', {}) for key, value in six.iteritems(kwargs.copy()): if key in VALID_OPTS: if isinstance(value, VALID_OPTS[key]): continue log.error('Error %s: %s is not of type %s', key, value, VALID_OPTS[key]) kwargs.pop(key) return __utils__['dictupdate.update'](kwargs, extra)
0.001281
def randn(*shape, **kwargs): """Draw random samples from a normal (Gaussian) distribution. Samples are distributed according to a normal distribution parametrized by *loc* (mean) and *scale* (standard deviation). Parameters ---------- loc : float or NDArray Mean (centre) of the distribution. scale : float or NDArray Standard deviation (spread or width) of the distribution. shape : int or tuple of ints The number of samples to draw. If shape is, e.g., `(m, n)` and `loc` and `scale` are scalars, output shape will be `(m, n)`. If `loc` and `scale` are NDArrays with shape, e.g., `(x, y)`, then output will have shape `(x, y, m, n)`, where `m*n` samples are drawn for each `[loc, scale)` pair. dtype : {'float16', 'float32', 'float64'} Data type of output samples. Default is 'float32' ctx : Context Device context of output. Default is current context. Overridden by `loc.context` when `loc` is an NDArray. out : NDArray Store output to an existing NDArray. Returns ------- NDArray If input `shape` has shape, e.g., `(m, n)` and `loc` and `scale` are scalars, output shape will be `(m, n)`. If `loc` and `scale` are NDArrays with shape, e.g., `(x, y)`, then output will have shape `(x, y, m, n)`, where `m*n` samples are drawn for each `[loc, scale)` pair. Examples -------- >>> mx.nd.random.randn() 2.21220636 <NDArray 1 @cpu(0)> >>> mx.nd.random.randn(2, 2) [[-1.856082 -1.9768796 ] [-0.20801921 0.2444218 ]] <NDArray 2x2 @cpu(0)> >>> mx.nd.random.randn(2, 3, loc=5, scale=1) [[4.19962 4.8311777 5.936328 ] [5.357444 5.7793283 3.9896927]] <NDArray 2x3 @cpu(0)> """ loc = kwargs.pop('loc', 0) scale = kwargs.pop('scale', 1) dtype = kwargs.pop('dtype', _Null) ctx = kwargs.pop('ctx', None) out = kwargs.pop('out', None) assert isinstance(loc, (int, float)) assert isinstance(scale, (int, float)) return _random_helper(_internal._random_normal, _internal._sample_normal, [loc, scale], shape, dtype, ctx, out, kwargs)
0.003166
def union_with_variable(self, variable: str, replacement: VariableReplacement) -> 'Substitution': """Try to create a new substitution with the given variable added. See :meth:`try_add_variable` for a version of this method that modifies the substitution in place. Args: variable_name: The name of the variable to add. replacement: The substitution for the variable. Returns: The new substitution with the variable_name added or merged. Raises: ValueError: if the variable cannot be merged because it conflicts with the existing substitution for the variable. """ new_subst = Substitution(self) new_subst.try_add_variable(variable, replacement) return new_subst
0.005841
def create_session(self, alias, url, headers={}, cookies={}, auth=None, timeout=None, proxies=None, verify=False, debug=0, max_retries=3, backoff_factor=0.10, disable_warnings=0): """ Create Session: create a HTTP session to a server ``url`` Base url of the server ``alias`` Robot Framework alias to identify the session ``headers`` Dictionary of default headers ``cookies`` Dictionary of cookies ``auth`` List of username & password for HTTP Basic Auth ``timeout`` Connection timeout ``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication ``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided. Defaults to False. ``debug`` Enable http verbosity option more information https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel ``max_retries`` The maximum number of retries each connection should attempt. ``backoff_factor`` The pause between for each retry ``disable_warnings`` Disable requests warning useful when you have large number of testcases """ auth = requests.auth.HTTPBasicAuth(*auth) if auth else None logger.info('Creating Session using : alias=%s, url=%s, headers=%s, \ cookies=%s, auth=%s, timeout=%s, proxies=%s, verify=%s, \ debug=%s ' % (alias, url, headers, cookies, auth, timeout, proxies, verify, debug)) return self._create_session( alias, url, headers, cookies, auth, timeout, max_retries, backoff_factor, proxies, verify, debug, disable_warnings)
0.004724
def receive(self): """ receive the next PDU from the watchman service If the client has activated subscriptions or logs then this PDU may be a unilateral PDU sent by the service to inform the client of a log event or subscription change. It may also simply be the response portion of a request initiated by query. There are clients in production that subscribe and call this in a loop to retrieve all subscription responses, so care should be taken when making changes here. """ self._connect() result = self.recvConn.receive() if self._hasprop(result, "error"): raise CommandError(result["error"]) if self._hasprop(result, "log"): self.logs.append(result["log"]) if self._hasprop(result, "subscription"): sub = result["subscription"] if not (sub in self.subs): self.subs[sub] = [] self.subs[sub].append(result) # also accumulate in {root,sub} keyed store root = os.path.normpath(os.path.normcase(result["root"])) if not root in self.sub_by_root: self.sub_by_root[root] = {} if not sub in self.sub_by_root[root]: self.sub_by_root[root][sub] = [] self.sub_by_root[root][sub].append(result) return result
0.002845
def create_object(self, alias, *args, **kwargs): """Constructs the type with the given alias using the given args and kwargs. NB: aliases may be the alias' object type itself if that type is known. :API: public :param alias: Either the type alias or the type itself. :type alias: string|type :param *args: These pass through to the underlying callable object. :param **kwargs: These pass through to the underlying callable object. :returns: The created object. """ object_type = self._type_aliases.get(alias) if object_type is None: raise KeyError('There is no type registered for alias {0}'.format(alias)) return object_type(*args, **kwargs)
0.004292
def _process_gradient(self, backward, dmdp): """ backward: `callable` callable that backpropagates the gradient of the model w.r.t to preprocessed input through the preprocessing to get the gradient of the model's output w.r.t. the input before preprocessing dmdp: gradient of model w.r.t. preprocessed input """ if backward is None: # pragma: no cover raise ValueError('Your preprocessing function does not provide' ' an (approximate) gradient') dmdx = backward(dmdp) assert dmdx.dtype == dmdp.dtype return dmdx
0.003058
def GetWindowText(handle: int) -> str: """ GetWindowText from Win32. handle: int, the handle of a native window. Return str. """ arrayType = ctypes.c_wchar * MAX_PATH values = arrayType() ctypes.windll.user32.GetWindowTextW(ctypes.c_void_p(handle), values, MAX_PATH) return values.value
0.006211
def mk_external_entity(metamodel, s_ee): ''' Create a python object from a BridgePoint external entity with bridges realized as python member functions. ''' bridges = many(s_ee).S_BRG[19]() names = [brg.Name for brg in bridges] EE = collections.namedtuple(s_ee.Key_Lett, names) funcs = list() for s_brg in many(s_ee).S_BRG[19](): fn = mk_bridge(metamodel, s_brg) funcs.append(fn) return EE(*funcs)
0.002198
def define(cls, name, **kwargs): """ Utility to quickly and easily declare Stream classes. Designed for interactive use such as notebooks and shouldn't replace parameterized class definitions in source code that is imported. Takes a stream class name and a set of keywords where each keyword becomes a parameter. If the value is already a parameter, it is simply used otherwise the appropriate parameter type is inferred and declared, using the value as the default. Supported types: bool, int, float, str, dict, tuple and list """ params = {'name': param.String(default=name)} for k, v in kwargs.items(): kws = dict(default=v, constant=True) if isinstance(v, param.Parameter): params[k] = v elif isinstance(v, bool): params[k] = param.Boolean(**kws) elif isinstance(v, int): params[k] = param.Integer(**kws) elif isinstance(v, float): params[k] = param.Number(**kws) elif isinstance(v, str): params[k] = param.String(**kws) elif isinstance(v, dict): params[k] = param.Dict(**kws) elif isinstance(v, tuple): params[k] = param.Tuple(**kws) elif isinstance(v, list): params[k] = param.List(**kws) elif isinstance(v, np.ndarray): params[k] = param.Array(**kws) else: params[k] = param.Parameter(**kws) # Dynamic class creation using type return type(name, (Stream,), params)
0.001192
def create_pool(hostname, username, password, name, members=None, allow_nat=None, allow_snat=None, description=None, gateway_failsafe_device=None, ignore_persisted_weight=None, ip_tos_to_client=None, ip_tos_to_server=None, link_qos_to_client=None, link_qos_to_server=None, load_balancing_mode=None, min_active_members=None, min_up_members=None, min_up_members_action=None, min_up_members_checking=None, monitor=None, profiles=None, queue_depth_limit=None, queue_on_connection_limit=None, queue_time_limit=None, reselect_tries=None, service_down_action=None, slow_ramp_time=None): ''' A function to connect to a bigip device and create a pool. hostname The host/address of the bigip device username The iControl REST username password The iControl REST password name The name of the pool to create. members List of comma delimited pool members to add to the pool. i.e. 10.1.1.1:80,10.1.1.2:80,10.1.1.3:80 allow_nat [yes | no] allow_snat [yes | no] description [string] gateway_failsafe_device [string] ignore_persisted_weight [enabled | disabled] ip_tos_to_client [pass-through | [integer]] ip_tos_to_server [pass-through | [integer]] link_qos_to_client [pass-through | [integer]] link_qos_to_server [pass-through | [integer]] load_balancing_mode [dynamic-ratio-member | dynamic-ratio-node | fastest-app-response | fastest-node | least-connections-members | least-connections-node | least-sessions | observed-member | observed-node | predictive-member | predictive-node | ratio-least-connections-member | ratio-least-connections-node | ratio-member | ratio-node | ratio-session | round-robin | weighted-least-connections-member | weighted-least-connections-node] min_active_members [integer] min_up_members [integer] min_up_members_action [failover | reboot | restart-all] min_up_members_checking [enabled | disabled] monitor [name] profiles [none | profile_name] queue_depth_limit [integer] queue_on_connection_limit [enabled | disabled] queue_time_limit [integer] reselect_tries [integer] service_down_action [drop | none | reselect | reset] slow_ramp_time [integer] CLI Example:: salt '*' bigip.create_pool bigip admin admin my-pool 10.1.1.1:80,10.1.1.2:80,10.1.1.3:80 monitor=http ''' params = { 'description': description, 'gateway-failsafe-device': gateway_failsafe_device, 'ignore-persisted-weight': ignore_persisted_weight, 'ip-tos-to-client': ip_tos_to_client, 'ip-tos-to-server': ip_tos_to_server, 'link-qos-to-client': link_qos_to_client, 'link-qos-to-server': link_qos_to_server, 'load-balancing-mode': load_balancing_mode, 'min-active-members': min_active_members, 'min-up-members': min_up_members, 'min-up-members-action': min_up_members_action, 'min-up-members-checking': min_up_members_checking, 'monitor': monitor, 'profiles': profiles, 'queue-on-connection-limit': queue_on_connection_limit, 'queue-depth-limit': queue_depth_limit, 'queue-time-limit': queue_time_limit, 'reselect-tries': reselect_tries, 'service-down-action': service_down_action, 'slow-ramp-time': slow_ramp_time } # some options take yes no others take true false. Figure out when to use which without # confusing the end user toggles = { 'allow-nat': {'type': 'yes_no', 'value': allow_nat}, 'allow-snat': {'type': 'yes_no', 'value': allow_snat} } #build payload payload = _loop_payload(params) payload['name'] = name #determine toggles payload = _determine_toggles(payload, toggles) #specify members if provided if members is not None: payload['members'] = _build_list(members, 'ltm:pool:members') #build session bigip_session = _build_session(username, password) #post to REST try: response = bigip_session.post( BIG_IP_URL_BASE.format(host=hostname) + '/ltm/pool', data=salt.utils.json.dumps(payload) ) except requests.exceptions.ConnectionError as e: return _load_connection_error(hostname, e) return _load_response(response)
0.001622
def setHeader(self, fileHeader): """ Sets the file header """ self.technician = fileHeader["technician"] self.recording_additional = fileHeader["recording_additional"] self.patient_name = fileHeader["patientname"] self.patient_additional = fileHeader["patient_additional"] self.patient_code = fileHeader["patientcode"] self.equipment = fileHeader["equipment"] self.admincode = fileHeader["admincode"] self.gender = fileHeader["gender"] self.recording_start_time = fileHeader["startdate"] self.birthdate = fileHeader["birthdate"] self.update_header()
0.003026
def insert_source_info(result): """Adds info about source of test result if available.""" comment = result.get("comment") # don't change comment if it already exists if comment: return source = result.get("source") job_name = result.get("job_name") run = result.get("run") source_list = [source, job_name, run] if not all(source_list): return source_note = "/".join(source_list) source_note = "Source: {}".format(source_note) result["comment"] = source_note
0.001912
def stop_task(cls, task_tag, stop_dependent=True, stop_requirements=False): """ Stop started task from registry :param task_tag: same as in :meth:`.WTaskDependencyRegistryStorage.stop_task` method :param stop_dependent: same as in :meth:`.WTaskDependencyRegistryStorage.stop_task` method :param stop_requirements: same as in :meth:`.WTaskDependencyRegistryStorage.stop_task` method :return: None """ registry = cls.registry_storage() registry.stop_task(task_tag, stop_dependent=stop_dependent, stop_requirements=stop_requirements)
0.025594
def unlink_rich_menu_from_user(self, user_id, timeout=None): """Call unlink rich menu from user API. https://developers.line.me/en/docs/messaging-api/reference/#unlink-rich-menu-from-user :param str user_id: ID of the user :param timeout: (optional) How long to wait for the server to send data before giving up, as a float, or a (connect timeout, read timeout) float tuple. Default is self.http_client.timeout :type timeout: float | tuple(float, float) """ self._delete( '/v2/bot/user/{user_id}/richmenu'.format(user_id=user_id), timeout=timeout )
0.002967
def _build_codes_reverse( codes: Dict[str, Dict[str, str]]) -> Dict[str, Dict[str, str]]: """ Build a reverse escape-code to name map, based on an existing name to escape-code map. """ built = {} # type: Dict[str, Dict[str, str]] for codetype, codemap in codes.items(): for name, escapecode in codemap.items(): # Skip shorcut aliases to avoid overwriting long names. if len(name) < 2: continue if built.get(codetype, None) is None: built[codetype] = {} built[codetype][escapecode] = name return built
0.0016
def pairs(args): """ %prog pairs pairsfile <fastbfile|fastqfile> Parse ALLPATHS pairs file, and write pairs IDs and single read IDs in respective ids files: e.g. `lib1.pairs.fastq`, `lib2.pairs.fastq`, and single `frags.fastq` (with single reads from lib1/2). """ from jcvi.assembly.preprocess import run_FastbAndQualb2Fastq p = OptionParser(pairs.__doc__) p.add_option("--header", default=False, action="store_true", help="Print header only [default: %default]") p.add_option("--suffix", default=False, action="store_true", help="Add suffix /1, /2 to read names") opts, args = p.parse_args(args) if len(args) != 2: sys.exit(not p.print_help()) pairsfile, fastqfile = args pf = op.basename(fastqfile).split(".")[0] p = PairsFile(pairsfile) print(p.header, file=sys.stderr) if opts.header: return if fastqfile.endswith(".fastb"): fastbfile = fastqfile fastqfile = fastbfile.replace(".fastb", ".fastq") run_FastbAndQualb2Fastq(infile=fastbfile, outfile=fastqfile) p1file = "{0}.1.corr.fastq" p2file = "{0}.2.corr.fastq" fragsfile = "{0}.corr.fastq" p1fw = [open(p1file.format(x), "w") for x in p.libnames] p2fw = [open(p2file.format(x), "w") for x in p.libnames] fragsfw = open(fragsfile.format(pf), "w") extract_pairs(fastqfile, p1fw, p2fw, fragsfw, p, suffix=opts.suffix)
0.001381
def _setup_edge(self, capabilities): """Setup Edge webdriver :param capabilities: capabilities object :returns: a new local Edge driver """ edge_driver = self.config.get('Driver', 'edge_driver_path') self.logger.debug("Edge driver path given in properties: %s", edge_driver) return webdriver.Edge(edge_driver, capabilities=capabilities)
0.007634
def update_bios_data_by_post(self, data): """Update bios data by post :param data: default bios config data """ bios_settings_data = { 'Attributes': data } self._conn.post(self.path, data=bios_settings_data)
0.007463
def process(self, items_block): """Return items as they come, updating their metadata__enriched_on field. :param items_block: :return: hits blocks as they come, updating their metadata__enriched_on field. Namedtuple containing: - processed: number of processed hits - out_items: a list containing items ready to be written. """ out_items = [] for hit in items_block: if __name__ == '__main__': hit['_source']['metadata__enriched_on'] = datetime.datetime_utcnow().isoformat() out_items.append(hit) return self.ProcessResults(processed=0, out_items=out_items)
0.007342
def correct(args): """ %prog correct *.fastq Correct reads using ErrorCorrection. Only PE will be used to build the K-mer table. """ p = OptionParser(correct.__doc__) p.set_cpus() opts, args = p.parse_args(args) if len(args) < 1: sys.exit(not p.print_help()) lstfile = "reads2cor.lst" fw = open(lstfile, "w") print("\n".join(x for x in args if x[:2] == "PE"), file=fw) fw.close() p1 = args[0] offset = guessoffset([p1]) cpus = opts.cpus freq = "output.freq.cz" freqlen = freq + ".len" if need_update(args, (freq, freqlen)): cmd = "KmerFreq_AR_v2.0 -k 17 -c -1 -q {0}".format(offset) cmd += " -m 1 -t {0}".format(cpus) cmd += " -p output {0}".format(lstfile) sh(cmd) fw = open(lstfile, "w") print("\n".join(args), file=fw) fw.close() cmd = "Corrector_AR_v2.0 -k 17 -l 3 -m 5 -c 5 -a 0 -e 1 -w 0 -r 45" cmd += " -Q {0} -q 30 -x 8 -t {1} -o 1 ".format(offset, cpus) cmd += " {0} {1} {2}".format(freq, freqlen, lstfile) sh(cmd)
0.001862
def media(self): """ Combines media of both components and adds a small script that unchecks the clear box, when a value in any wrapped input is modified. """ return self.widget.media + self.checkbox.media + Media(self.Media)
0.007547
def __get_username(): """ Returns the effective username of the current process. """ if WINDOWS: return getpass.getuser() import pwd return pwd.getpwuid(os.geteuid()).pw_name
0.005051
def raw(self, raw): """ Sets the raw of this RuntimeRawExtension. Raw is the underlying serialization of this object. :param raw: The raw of this RuntimeRawExtension. :type: str """ if raw is None: raise ValueError("Invalid value for `raw`, must not be `None`") if raw is not None and not re.search('^(?:[A-Za-z0-9+\/]{4})*(?:[A-Za-z0-9+\/]{2}==|[A-Za-z0-9+\/]{3}=)?$', raw): raise ValueError("Invalid value for `raw`, must be a follow pattern or equal to `/^(?:[A-Za-z0-9+\/]{4})*(?:[A-Za-z0-9+\/]{2}==|[A-Za-z0-9+\/]{3}=)?$/`") self._raw = raw
0.015552
def string_to_daterange(str_range, delimiter='-', as_dates=False, interval=CLOSED_CLOSED): """ Convert a string to a DateRange type. If you put only one date, it generates the relevant range for just that date or datetime till 24 hours later. You can optionally use mixtures of []/() around the DateRange for OPEN/CLOSED interval behaviour. Parameters ---------- str_range : `String` The range as a string of dates separated by one delimiter. delimiter : `String` The separator between the dates, using '-' as default. as_dates : `Boolean` True if you want the date-range to use datetime.date rather than datetime.datetime. interval : `int` CLOSED_CLOSED, OPEN_CLOSED, CLOSED_OPEN or OPEN_OPEN. **Default is CLOSED_CLOSED**. Returns ------- `arctic.date.DateRange` : the DateRange parsed from the string. Examples -------- >>> from arctic.date import string_to_daterange >>> string_to_daterange('20111020', as_dates=True) DateRange(start=datetime.date(2011, 10, 20), end=datetime.date(2011, 10, 21)) >>> string_to_daterange('201110201030') DateRange(start=datetime.datetime(2011, 10, 20, 10, 30), end=datetime.datetime(2011, 10, 21, 10, 30)) >>> string_to_daterange('20111020-20120120', as_dates=True) DateRange(start=datetime.date(2011, 10, 20), end=datetime.date(2012, 1, 20)) >>> string_to_daterange('[20111020-20120120)', as_dates=True) DateRange(start=datetime.date(2011, 10, 20), end=datetime.date(2012, 1, 20)) """ num_dates = str_range.count(delimiter) + 1 if num_dates > 2: raise ValueError('Too many dates in input string [%s] with delimiter (%s)' % (str_range, delimiter)) # Allow the user to use the [date-date), etc. range syntax to specify the interval. range_mode = Ranges.get(str_range[0] + str_range[-1], None) if range_mode: return string_to_daterange(str_range[1:-1], delimiter, as_dates, interval=range_mode) if as_dates: parse_dt = lambda s: parse(s).date() if s else None else: parse_dt = lambda s: parse(s) if s else None if num_dates == 2: d = [parse_dt(x) for x in str_range.split(delimiter)] oc = interval else: start = parse_dt(str_range) d = [start, start + datetime.timedelta(1)] oc = CLOSED_OPEN # Always use closed-open for a single date/datetime. return DateRange(d[0], d[1], oc)
0.006017
def pause(self): """Pauses playback""" if self.isPlaying is True: self._execute("pause") self._changePlayingState(False)
0.0125
def observe(cls, *args, **kwargs): """ Mark a method as receiving notifications. Comes in two flavours: .. method:: observe(name, **types) :noindex: A decorator living in the class. Can be applied more than once to the same method, provided the names differ. *name* is the property we want to be notified about as a string. .. Note:: Alternatively, *name* can be a pattern for matching property names, meaning it can contain wildcards character like in module `fnmatch <http://docs.python.org/library/fnmatch.html>`_ in Python library. However, if wildcards are used in name, only *one* `observe` can be used for a given notification method, or else `ValueError` exception is raised when the Observer class is instantiated. .. versionadded:: 1.99.2 *types* are boolean values denoting the types of notifications desired. At least one of the following has to be passed as True: assign, before, after, signal. Excess keyword arguments are passed to the method as part of the info dictionary. .. method:: observe(callable, name, **types) :noindex: An instance method to define notifications at runtime. Works as above. *callable* is the method to send notifications to. The effect will be as if this had been decorated. In all cases the notification method must take exactly three arguments: the model object, the name of the property that changed, and an :class:`NTInfo` object describing the change. .. warning:: Due to limitation in the dynamic registration (in version 1.99.1), declarations of dynamic notifications must occur before registering self as an observer of the models whose properties the notifications are supposed to be observing. A hack for this limitation, is to first relieve any interesting model before dynamically register the notifications, and then re-observe those models. .. versionadded:: 1.99.1 """ @decorators.good_decorator def _decorator(_notified): # marks the method with observed properties _list = getattr(_notified, Observer._CUST_OBS_, list()) _list.append((name, kwargs)) setattr(_notified, Observer._CUST_OBS_, _list) return _notified # handles arguments if args and isinstance(args[0], cls): # Used as instance method, for declaring notifications # dynamically if len(args) != 3: raise TypeError("observe() takes exactly three arguments" " when called (%d given)" % len(args)) self = args[0] notified = args[1] name = args[2] assert isinstance(self, Observer), "Method Observer.observe " \ "must be called with an Observer instance as first argument" if not callable(notified): raise TypeError("Second argument of observe() " "must be a callable") if not isinstance(name, str): raise TypeError("Third argument of observe() must be a string") self.__register_notification(name, notified, kwargs) return None # used statically as decorator if len(args) != 1: raise TypeError("observe() takes exactly one argument when used" " as decorator (%d given)" % len(args)) name = args[0] if not isinstance(name, str): raise TypeError("First argument of observe() must be a string") return _decorator
0.000506
def _GetRelPath(self, filename): """Get relative path of a file according to the current directory, given its logical path in the repo.""" assert filename.startswith(self.subdir), (filename, self.subdir) return filename[len(self.subdir):].lstrip(r"\/")
0.023077
def _submitQuery(self, gitquery, gitvars={}, verbose=False, rest=False): """Send a curl request to GitHub. Args: gitquery (str): The query or endpoint itself. Examples: query: 'query { viewer { login } }' endpoint: '/user' gitvars (Optional[Dict]): All query variables. Defaults to empty. verbose (Optional[bool]): If False, stderr prints will be suppressed. Defaults to False. rest (Optional[bool]): If True, uses the REST API instead of GraphQL. Defaults to False. Returns: { 'statusNum' (int): The HTTP status code. 'headDict' (Dict[str]): The response headers. 'linkDict' (Dict[int]): Link based pagination data. 'result' (str): The body of the response. } """ errOut = DEVNULL if not verbose else None authhead = 'Authorization: bearer ' + self.__githubApiToken bashcurl = 'curl -iH TMPauthhead -X POST -d TMPgitquery https://api.github.com/graphql' if not rest \ else 'curl -iH TMPauthhead https://api.github.com' + gitquery bashcurl_list = bashcurl.split() bashcurl_list[2] = authhead if not rest: gitqueryJSON = json.dumps({'query': gitquery, 'variables': json.dumps(gitvars)}) bashcurl_list[6] = gitqueryJSON fullResponse = check_output(bashcurl_list, stderr=errOut).decode() _vPrint(verbose, "\n" + fullResponse) fullResponse = fullResponse.split('\r\n\r\n') heads = fullResponse[0].split('\r\n') if len(fullResponse) > 1: result = fullResponse[1] else: result = "" http = heads[0].split() statusNum = int(http[1]) # Parse headers into a useful dictionary headDict = {} headDict["http"] = heads[0] for header in heads[1:]: h = header.split(': ') headDict[h[0]] = h[1] # Parse any Link headers even further linkDict = None if "Link" in headDict: linkProperties = headDict["Link"].split(', ') propDict = {} for item in linkProperties: divided = re.split(r'<https://api.github.com|>; rel="|"', item) propDict[divided[2]] = divided[1] linkDict = propDict return {'statusNum': statusNum, 'headDict': headDict, 'linkDict': linkDict, 'result': result}
0.00194
def DomainsGet(self, parameters = None, domain_id = -1): """ This method returns the domains of the current user. The list also contains the domains to which the users has not yet been accepted. @param parameters (dictonary) - Dictionary containing the parameters of the request. @return (bool) - Boolean indicating whether DomainsGet was successful. """ url = '' if parameters is None and domain_id <> -1: url = '/domains/{0}.json'.format(domain_id) else: url = '/domains.json' if self.__SenseApiCall__(url, 'GET', parameters = parameters): return True else: self.__error__ = "api call unsuccessful" return False
0.017689
def filter(self, **config) -> CallbackDataFilter: """ Generate filter :param config: :return: """ for key in config.keys(): if key not in self._part_names: raise ValueError(f"Invalid field name '{key}'") return CallbackDataFilter(self, config)
0.006098
def DOM_setFileInputFiles(self, files, **kwargs): """ Function path: DOM.setFileInputFiles Domain: DOM Method name: setFileInputFiles WARNING: This function is marked 'Experimental'! Parameters: Required arguments: 'files' (type: array) -> Array of file paths to set. Optional arguments: 'nodeId' (type: NodeId) -> Identifier of the node. 'backendNodeId' (type: BackendNodeId) -> Identifier of the backend node. 'objectId' (type: Runtime.RemoteObjectId) -> JavaScript object id of the node wrapper. No return value. Description: Sets files for the given file input element. """ assert isinstance(files, (list, tuple) ), "Argument 'files' must be of type '['list', 'tuple']'. Received type: '%s'" % type( files) expected = ['nodeId', 'backendNodeId', 'objectId'] passed_keys = list(kwargs.keys()) assert all([(key in expected) for key in passed_keys] ), "Allowed kwargs are ['nodeId', 'backendNodeId', 'objectId']. Passed kwargs: %s" % passed_keys subdom_funcs = self.synchronous_command('DOM.setFileInputFiles', files= files, **kwargs) return subdom_funcs
0.038194
def request_param_update(self, var_id): """Place a param update request on the queue""" self._useV2 = self.cf.platform.get_protocol_version() >= 4 pk = CRTPPacket() pk.set_header(CRTPPort.PARAM, READ_CHANNEL) if self._useV2: pk.data = struct.pack('<H', var_id) else: pk.data = struct.pack('<B', var_id) logger.debug('Requesting request to update param [%d]', var_id) self.request_queue.put(pk)
0.004158
def descendants(self,collapseLists=True): """ A more genteel method of accessing an object's children (than self.children) If collapseLists==True, the returned list is guaranteed not to be a list of lists. If collapseLists==False, the returned list *may* be a list of lists, [cf. self.children] in the case of optionality among children eg, a line's children is a list of lists of Words. """ if not collapseLists: return self.children if not self.children: return [] if type(self.children[0])==type([]): return [x[0] for x in self.children] return self.children
0.033445
def insert_sections_some(ol,*secs,**kwargs): ''' ol = initRange(0,20,1) ol loc = 6 rslt = insert_sections_some(ol,['a','a','a'],['c','c','c','c'],index=loc) rslt #### ''' if('mode' in kwargs): mode = kwargs["mode"] else: mode = "new" loc = kwargs['index'] secs = list(secs) secs = [concat(*secs)] locs = [loc] return(insert_sections_many(ol,secs,locs,mode=mode))
0.015152
def start(jar_path=None, nthreads=-1, enable_assertions=True, max_mem_size=None, min_mem_size=None, ice_root=None, log_dir=None, log_level=None, port="54321+", name=None, extra_classpath=None, verbose=True, jvm_custom_args=None, bind_to_localhost=True): """ Start new H2O server on the local machine. :param jar_path: Path to the h2o.jar executable. If not given, then we will search for h2o.jar in the locations returned by `._jar_paths()`. :param nthreads: Number of threads in the thread pool. This should be related to the number of CPUs used. -1 means use all CPUs on the host. A positive integer specifies the number of CPUs directly. :param enable_assertions: If True, pass `-ea` option to the JVM. :param max_mem_size: Maximum heap size (jvm option Xmx), in bytes. :param min_mem_size: Minimum heap size (jvm option Xms), in bytes. :param log_dir: Directory for H2O logs to be stored if a new instance is started. Default directory is determined by H2O internally. :param log_level: The logger level for H2O if a new instance is started. :param ice_root: A directory where H2O stores its temporary files. Default location is determined by tempfile.mkdtemp(). :param port: Port where to start the new server. This could be either an integer, or a string of the form "DDDDD+", indicating that the server should start looking for an open port starting from DDDDD and up. :param name: name of the h2o cluster to be started :param extra_classpath List of paths to libraries that should be included on the Java classpath. :param verbose: If True, then connection info will be printed to the stdout. :param jvm_custom_args Custom, user-defined arguments for the JVM H2O is instantiated in :param bind_to_localhost A flag indicating whether access to the H2O instance should be restricted to the local machine (default) or if it can be reached from other computers on the network. Only applicable when H2O is started from the Python client. :returns: a new H2OLocalServer instance """ assert_is_type(jar_path, None, str) assert_is_type(port, None, int, str) assert_is_type(name, None, str) assert_is_type(nthreads, -1, BoundInt(1, 4096)) assert_is_type(enable_assertions, bool) assert_is_type(min_mem_size, None, int) assert_is_type(max_mem_size, None, BoundInt(1 << 25)) assert_is_type(log_dir, str, None) assert_is_type(log_level, str, None) assert_satisfies(log_level, log_level in [None, "TRACE", "DEBUG", "INFO", "WARN", "ERRR", "FATA"]) assert_is_type(ice_root, None, I(str, os.path.isdir)) assert_is_type(extra_classpath, None, [str]) assert_is_type(jvm_custom_args, list, None) assert_is_type(bind_to_localhost, bool) if jar_path: assert_satisfies(jar_path, jar_path.endswith("h2o.jar")) if min_mem_size is not None and max_mem_size is not None and min_mem_size > max_mem_size: raise H2OValueError("`min_mem_size`=%d is larger than the `max_mem_size`=%d" % (min_mem_size, max_mem_size)) if port is None: port = "54321+" baseport = None # TODO: get rid of this port gimmick and have 2 separate parameters. if is_type(port, str): if port.isdigit(): port = int(port) else: if not(port[-1] == "+" and port[:-1].isdigit()): raise H2OValueError("`port` should be of the form 'DDDD+', where D is a digit. Got: %s" % port) baseport = int(port[:-1]) port = 0 hs = H2OLocalServer() hs._verbose = bool(verbose) hs._jar_path = hs._find_jar(jar_path) hs._extra_classpath = extra_classpath hs._ice_root = ice_root hs._name = name if not ice_root: hs._ice_root = tempfile.mkdtemp() hs._tempdir = hs._ice_root if verbose: print("Attempting to start a local H2O server...") hs._launch_server(port=port, baseport=baseport, nthreads=int(nthreads), ea=enable_assertions, mmax=max_mem_size, mmin=min_mem_size, jvm_custom_args=jvm_custom_args, bind_to_localhost=bind_to_localhost, log_dir=log_dir, log_level=log_level) if verbose: print(" Server is running at %s://%s:%d" % (hs.scheme, hs.ip, hs.port)) atexit.register(lambda: hs.shutdown()) return hs
0.006439
def delete(self, path='', **params): """ Make a DELETE request to the given path, and return the JSON-decoded result. Keyword parameters will be converted to URL parameters. DELETE requests ask to delete the object represented by this URL. """ params = jsonify_parameters(params) url = ensure_trailing_slash(self.url + path.lstrip('/')) return self._json_request('delete', url, params=params)
0.004292
def run_filter(vrn_file, align_bam, ref_file, data, items): """Filter and annotate somatic VCFs with damage/bias artifacts on low frequency variants. Moves damage estimation to INFO field, instead of leaving in FILTER. """ if not should_filter(items) or not vcfutils.vcf_has_variants(vrn_file): return data else: raw_file = "%s-damage.vcf" % utils.splitext_plus(vrn_file)[0] out_plot_files = ["%s%s" % (utils.splitext_plus(raw_file)[0], ext) for ext in ["_seq_bias_simplified.pdf", "_pcr_bias_simplified.pdf"]] if not utils.file_uptodate(raw_file, vrn_file) and not utils.file_uptodate(raw_file + ".gz", vrn_file): with file_transaction(items[0], raw_file) as tx_out_file: # Does not apply --qcSummary plotting due to slow runtimes cmd = ["dkfzbiasfilter.py", "--filterCycles", "1", "--passOnly", "--tempFolder", os.path.dirname(tx_out_file), vrn_file, align_bam, ref_file, tx_out_file] do.run(cmd, "Filter low frequency variants for DNA damage and strand bias") for out_plot in out_plot_files: tx_plot_file = os.path.join("%s_qcSummary" % utils.splitext_plus(tx_out_file)[0], "plots", os.path.basename(out_plot)) if utils.file_exists(tx_plot_file): shutil.move(tx_plot_file, out_plot) raw_file = vcfutils.bgzip_and_index(raw_file, items[0]["config"]) data["vrn_file"] = _filter_to_info(raw_file, items[0]) out_plot_files = [x for x in out_plot_files if utils.file_exists(x)] data["damage_plots"] = out_plot_files return data
0.003937
def _dmpaft_cmd(self, time_fields): ''' issue a command to read the archive records after a known time stamp. ''' records = [] # convert time stamp fields to buffer tbuf = struct.pack('2H', *time_fields) # 1. send 'DMPAFT' cmd self._cmd('DMPAFT') # 2. send time stamp + crc crc = VProCRC.get(tbuf) crc = struct.pack('>H', crc) # crc in big-endian format log_raw('send', tbuf + crc) self.port.write(tbuf + crc) # send time stamp + crc ack = self.port.read(len(self.ACK)) # read ACK log_raw('read', ack) if ack != self.ACK: return # if bad ack, return # 3. read pre-amble data raw = self.port.read(DmpStruct.size) log_raw('read', raw) if not VProCRC.verify(raw): # check CRC value log_raw('send ESC', self.ESC) self.port.write(self.ESC) # if bad, escape and abort return log_raw('send ACK', self.ACK) self.port.write(self.ACK) # send ACK # 4. loop through all page records dmp = DmpStruct.unpack(raw) log.info('reading %d pages, start offset %d' % (dmp['Pages'], dmp['Offset'])) for i in xrange(dmp['Pages']): # 5. read page data raw = self.port.read(DmpPageStruct.size) log_raw('read', raw) if not VProCRC.verify(raw): # check CRC value log_raw('send ESC', self.ESC) self.port.write(self.ESC) # if bad, escape and abort return log_raw('send ACK', self.ACK) self.port.write(self.ACK) # send ACK # 6. loop through archive records page = DmpPageStruct.unpack(raw) offset = 0 # assume offset at 0 if i == 0: offset = dmp['Offset'] * ArchiveAStruct.size while offset < ArchiveAStruct.size * 5: log.info('page %d, reading record at offset %d' % (page['Index'], offset)) if self._use_rev_b_archive(page['Records'], offset): a = ArchiveBStruct.unpack_from(page['Records'], offset) else: a = ArchiveAStruct.unpack_from(page['Records'], offset) # 7. verify that record has valid data, and store if a['DateStamp'] != 0xffff and a['TimeStamp'] != 0xffff: records.append(a) offset += ArchiveAStruct.size log.info('read all pages') return records
0.001153
def send(self, messages=None, api_key=None, secure=None, test=None, **request_args): '''Send batch request to Postmark API. Returns result of :func:`requests.post`. :param messages: Batch messages to send to the Postmark API. :type messages: A list of :class:`Message` :param api_key: Your Postmark API key. Defaults to `self.api_key`. :param test: Make a test request to the Postmark API. Defaults to `self.test`. :param secure: Use the https Postmark API. Defaults to `self.secure`. :param \*\*request_args: Passed to :func:`requests.request` :rtype: :class:`BatchSendResponse` ''' return super(BatchSender, self).send(message=messages, test=test, api_key=api_key, secure=secure, **request_args)
0.005549
def desaturate(self): """Desaturates the layer, making it grayscale. Instantly removes all color information from the layer, while maintaing its alpha channel. """ alpha = self.img.split()[3] self.img = self.img.convert("L") self.img = self.img.convert("RGBA") self.img.putalpha(alpha)
0.01626
def Animation_resolveAnimation(self, animationId): """ Function path: Animation.resolveAnimation Domain: Animation Method name: resolveAnimation Parameters: Required arguments: 'animationId' (type: string) -> Animation id. Returns: 'remoteObject' (type: Runtime.RemoteObject) -> Corresponding remote object. Description: Gets the remote object of the Animation. """ assert isinstance(animationId, (str,) ), "Argument 'animationId' must be of type '['str']'. Received type: '%s'" % type( animationId) subdom_funcs = self.synchronous_command('Animation.resolveAnimation', animationId=animationId) return subdom_funcs
0.042773
def update_parameters(url, parameters, encoding='utf8'): """ Updates a URL's existing GET parameters. :param url: a base URL to which to add additional parameters. :param parameters: a dictionary of parameters, any mix of unicode and string objects as the parameters and the values. :parameter encoding: the byte encoding to use when passed unicode for the base URL or for keys and values of the parameters dict. This isnecessary because `urllib.urlencode` calls the `str()` function on all of its inputs. This raises a `UnicodeDecodeError` when it encounters a unicode string with characters outside of the default ASCII charset. :rtype: a string URL. """ # Convert the base URL to the default encoding. if isinstance(url, unicode): url = url.encode(encoding) parsed_url = urlparse.urlparse(url) existing_query_parameters = urlparse.parse_qsl(parsed_url.query) # Convert unicode parameters to the default encoding. byte_parameters = [] for key, value in (existing_query_parameters + parameters.items()): if isinstance(key, unicode): key = key.encode(encoding) if isinstance(value, unicode): value = value.encode(encoding) byte_parameters.append((key, value)) # Generate the final URL with all of the updated parameters. Read # http://docs.python.org/2/library/urlparse.html#urlparse.urlparse if this is # confusing. return urlparse.urlunparse(( parsed_url.scheme, parsed_url.netloc, parsed_url.path, parsed_url.params, urlencode(byte_parameters), parsed_url.fragment ))
0.009387
def _decode_ctrl_packet(self, version, packet): """Decode a control packet into the list of sensors.""" for i in range(5): input_bit = packet[i] self._debug(PROP_LOGLEVEL_DEBUG, "Byte " + str(i) + ": " + str((input_bit >> 7) & 1) + str((input_bit >> 6) & 1) + str((input_bit >> 5) & 1) + str((input_bit >> 4) & 1) + str((input_bit >> 3) & 1) + str((input_bit >> 2) & 1) + str((input_bit >> 1) & 1) + str(input_bit & 1)) for sensor in self._ctrl_sensor: if (sensor.sensor_type == PROP_SENSOR_FLAG): sensor.value = (packet[sensor.index // 8] >> (sensor.index % 8)) & 1 elif (sensor.sensor_type == PROP_SENSOR_RAW): sensor.value = packet
0.005405
def delete_intent(self, name, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None): """ Deletes the specified intent. Example: >>> import dialogflow_v2 >>> >>> client = dialogflow_v2.IntentsClient() >>> >>> name = client.intent_path('[PROJECT]', '[INTENT]') >>> >>> client.delete_intent(name) Args: name (str): Required. The name of the intent to delete. Format: ``projects/<Project ID>/agent/intents/<Intent ID>``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if 'delete_intent' not in self._inner_api_calls: self._inner_api_calls[ 'delete_intent'] = google.api_core.gapic_v1.method.wrap_method( self.transport.delete_intent, default_retry=self._method_configs['DeleteIntent'].retry, default_timeout=self._method_configs['DeleteIntent'] .timeout, client_info=self._client_info, ) request = intent_pb2.DeleteIntentRequest(name=name, ) self._inner_api_calls['delete_intent']( request, retry=retry, timeout=timeout, metadata=metadata)
0.002593
def construct(parent=None, defaults=None, **kwargs): """ Random variable constructor. Args: cdf: Cumulative distribution function. Optional if ``parent`` is used. bnd: Boundary interval. Optional if ``parent`` is used. parent (Dist): Distribution used as basis for new distribution. Any other argument that is omitted will instead take is function from ``parent``. doc (str]): Documentation for the distribution. str (str, :py:data:typing.Callable): Pretty print of the variable. pdf: Probability density function. ppf: Point percentile function. mom: Raw moment generator. ttr: Three terms recursion coefficient generator. init: Custom initialiser method. defaults (dict): Default values to provide to initialiser. Returns: (Dist): New custom distribution. """ for key in kwargs: assert key in LEGAL_ATTRS, "{} is not legal input".format(key) if parent is not None: for key, value in LEGAL_ATTRS.items(): if key not in kwargs and hasattr(parent, value): kwargs[key] = getattr(parent, value) assert "cdf" in kwargs, "cdf function must be defined" assert "bnd" in kwargs, "bnd function must be defined" if "str" in kwargs and isinstance(kwargs["str"], str): string = kwargs.pop("str") kwargs["str"] = lambda *args, **kwargs: string defaults = defaults if defaults else {} for key in defaults: assert key in LEGAL_ATTRS, "invalid default value {}".format(key) def custom_distribution(**kws): prm = defaults.copy() prm.update(kws) dist = Dist(**prm) for key, function in kwargs.items(): attr_name = LEGAL_ATTRS[key] setattr(dist, attr_name, types.MethodType(function, dist)) return dist if "doc" in kwargs: custom_distribution.__doc__ = kwargs["doc"] return custom_distribution
0.000935
def create_calc_dh_dv(estimator): """ Return the function that can be used in the various gradient and hessian calculations to calculate the derivative of the transformation with respect to the index. Parameters ---------- estimator : an instance of the estimation.LogitTypeEstimator class. Should contain a `design` attribute that is a 2D ndarray representing the design matrix for this model and dataset. Returns ------- Callable. Will accept a 1D array of systematic utility values, a 1D array of alternative IDs, (shape parameters if there are any) and miscellaneous args and kwargs. Should return a 2D array whose elements contain the derivative of the tranformed utility vector with respect to the vector of systematic utilities. The dimensions of the returned vector should be `(design.shape[0], design.shape[0])`. """ dh_dv = diags(np.ones(estimator.design.shape[0]), 0, format='csr') # Create a function that will take in the pre-formed matrix, replace its # data in-place with the new data, and return the correct dh_dv on each # iteration of the minimizer calc_dh_dv = partial(_asym_transform_deriv_v, ref_position=estimator.shape_ref_pos, output_array=dh_dv) return calc_dh_dv
0.000729
def _serialize_iterable(obj): """ Only for serializing list and tuples and stuff. Dicts and Strings/Unicode is treated differently. String/Unicode normally don't need further serialization and it would cause a max recursion error trying to do so. :param obj: :return: """ if isinstance(obj, (tuple, set)): # make a tuple assignable by casting it to list obj = list(obj) for item in obj: obj[obj.index(item)] = serialize_obj(item) return obj
0.001969
def _pwl1_to_poly(self, generators): """ Converts single-block piecewise-linear costs into linear polynomial. """ for g in generators: if (g.pcost_model == PW_LINEAR) and (len(g.p_cost) == 2): g.pwl_to_poly() return generators
0.00678
def sort(self, *sorting, **kwargs): """Sort resources.""" sorting_ = [] for name, desc in sorting: field = self.meta.model._meta.fields.get(name) if field is None: continue if desc: field = field.desc() sorting_.append(field) if sorting_: return self.collection.order_by(*sorting_) return self.collection
0.004587
def _program_files_from_executable(self, executable, required_paths, parent_dir=False): """ Get a list of program files by expanding a list of path patterns and interpreting it as relative to the executable. This method can be used as helper for implementing the method program_files(). Contrary to the default implementation of program_files(), this method does not explicitly add the executable to the list of returned files, it assumes that required_paths contains a path that covers the executable. @param executable: the path to the executable of the tool (typically the result of executable()) @param required_paths: a list of required path patterns @param parent_dir: whether required_paths are relative to the directory of executable or the parent directory @return a list of paths as strings, suitable for result of program_files() """ base_dir = os.path.dirname(executable) if parent_dir: base_dir = os.path.join(base_dir, os.path.pardir) return util.flatten( util.expand_filename_pattern(path, base_dir) for path in required_paths)
0.008432
def _run_hooks(self): """Calls any registered hooks providing the current state.""" for hook in self.hooks: getattr(self, hook)(self._state)
0.011905
def get_ext_outputs(self): """Get a list of relative paths to C extensions in the output distro""" all_outputs = [] ext_outputs = [] paths = {self.bdist_dir: ''} for base, dirs, files in sorted_walk(self.bdist_dir): for filename in files: if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS: all_outputs.append(paths[base] + filename) for filename in dirs: paths[os.path.join(base, filename)] = (paths[base] + filename + '/') if self.distribution.has_ext_modules(): build_cmd = self.get_finalized_command('build_ext') for ext in build_cmd.extensions: if isinstance(ext, Library): continue fullname = build_cmd.get_ext_fullname(ext.name) filename = build_cmd.get_ext_filename(fullname) if not os.path.basename(filename).startswith('dl-'): if os.path.exists(os.path.join(self.bdist_dir, filename)): ext_outputs.append(filename) return all_outputs, ext_outputs
0.001653
def PushPopItem(obj, key, value): ''' A context manager to replace and restore a value using a getter and setter. :param object obj: The object to replace/restore. :param object key: The key to replace/restore in the object. :param object value: The value to replace. Example:: with PushPop2(sys.modules, 'alpha', None): pytest.raises(ImportError): import alpha ''' if key in obj: old_value = obj[key] obj[key] = value yield value obj[key] = old_value else: obj[key] = value yield value del obj[key]
0.001616
def copy(self, clr=None, d=0.0): """ Returns a copy of the range. Optionally, supply a color to get a range copy limited to the hue of that color. """ cr = ColorRange() cr.name = self.name cr.h = deepcopy(self.h) cr.s = deepcopy(self.s) cr.b = deepcopy(self.b) cr.a = deepcopy(self.a) cr.grayscale = self.grayscale if not self.grayscale: cr.black = self.black.copy() cr.white = self.white.copy() if clr != None: cr.h, cr.a = clr.h + d * (random() * 2 - 1), clr.a return cr
0.004739
def get_participants_for_section(section, person=None): """ Returns a list of gradebook participants for the passed section and person. """ section_label = encode_section_label(section.section_label()) url = "/rest/gradebook/v1/section/{}/participants".format(section_label) headers = {} if person is not None: headers["X-UW-Act-as"] = person.uwnetid data = get_resource(url, headers) participants = [] for pt in data["participants"]: participants.append(_participant_from_json(pt)) return participants
0.001767