code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def bake_action( obj, *, action, frames, **kwargs ): <NEW_LINE> <INDENT> if not (kwargs.get("do_pose") or kwargs.get("do_object")): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> action, = bake_action_objects( [(obj, action)], frames=frames, **kwargs, ) <NEW_LINE> return action
:arg obj: Object to bake. :type obj: :class:`bpy.types.Object` :arg action: An action to bake the data into, or None for a new action to be created. :type action: :class:`bpy.types.Action` or None :arg frames: Frames to bake. :type frames: iterable of int :return: an action or None :rtype: :class:`bpy.types.Action`
625941b38e05c05ec3eea124
def ConfigureLexer(self, keyword): <NEW_LINE> <INDENT> import peppy.editra.style_specs as style_specs <NEW_LINE> custom = None <NEW_LINE> if hasattr(self, 'stc_lexer_id') and self.stc_lexer_id is not None: <NEW_LINE> <INDENT> lexer = self.stc_lexer_id <NEW_LINE> if hasattr(lexer, 'styleText'): <NEW_LINE> <INDENT> custom = self.stc_lexer_id <NEW_LINE> lexer = wx.stc.STC_LEX_CONTAINER <NEW_LINE> <DEDENT> <DEDENT> elif keyword in style_specs.stc_lexer_id: <NEW_LINE> <INDENT> lexer = style_specs.stc_lexer_id[keyword] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> dprint("keyword not found: %s" % keyword) <NEW_LINE> lexer = wx.stc.STC_LEX_NULL <NEW_LINE> <DEDENT> if lexer in [ wx.stc.STC_LEX_HTML, wx.stc.STC_LEX_XML]: <NEW_LINE> <INDENT> self.SetStyleBits(7) <NEW_LINE> <DEDENT> elif lexer == wx.stc.STC_LEX_NULL: <NEW_LINE> <INDENT> self.NullLexer() <NEW_LINE> return True <NEW_LINE> <DEDENT> elif custom: <NEW_LINE> <INDENT> self.SetStyleBits(custom.getStyleBits()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.SetStyleBits(5) <NEW_LINE> <DEDENT> self.SetLexer(lexer) <NEW_LINE> try: <NEW_LINE> <INDENT> self.SetKeyWordsFromClassprefs() <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> keywords = style_specs.keywords_mapping[keyword] <NEW_LINE> self.SetKeyWordsFromDict(keywords) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> dprint("No keywords found for %s" % keyword) <NEW_LINE> <DEDENT> <DEDENT> if self.stc_syntax_style_specs is not None: <NEW_LINE> <INDENT> synspec = self.stc_syntax_style_specs <NEW_LINE> <DEDENT> elif custom: <NEW_LINE> <INDENT> synspec = custom.getEditraStyleSpecs() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> synspec = style_specs.syntax_style_specs[keyword] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> dprint("No style specs found for %s" % keyword) <NEW_LINE> synspec = [] <NEW_LINE> <DEDENT> <DEDENT> self.SetSyntax(synspec) <NEW_LINE> if self.stc_extra_properties is not None: <NEW_LINE> <INDENT> props = self.stc_extra_properties <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> props = style_specs.extra_properties[keyword] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> dprint("No extra properties found for %s" % keyword) <NEW_LINE> props = [] <NEW_LINE> <DEDENT> <DEDENT> self.SetProperties(props) <NEW_LINE> self.dprint("GetLexer = %d" % self.GetLexer()) <NEW_LINE> return True
Sets Lexer and Lexer Keywords for the specified keyword @param keyword: a peppy major mode keyword
625941b3a17c0f6771cbde0e
def add_card(name): <NEW_LINE> <INDENT> query_params = { "name": name, "idList": TO_DO_LIST_ID, "key": TRELLO_KEY, "token": TRELLO_TOKEN} <NEW_LINE> requrl = f"{TRELLO_URL}/1/cards" <NEW_LINE> response = requests.post(requrl, params=query_params) <NEW_LINE> return(response.json())
Adds a new card on the 'To Do' list to the 'ToDo App using Trello' board in Trello Args: name: The name of the card. This comes from ndex.html, Returns: json object from requests library.
625941b326238365f5f0ec1c
def __init__(self, logger): <NEW_LINE> <INDENT> self.logger = logger <NEW_LINE> self.api = _TaigaAPI(host=config.TAIGA_HOST)
Init TaigaAPI object and logger.
625941b3be7bc26dc91cd3ba
def appendInside(self, *__args): <NEW_LINE> <INDENT> pass
QWebElement.appendInside(QString) QWebElement.appendInside(QWebElement)
625941b35f7d997b8717484f
def actions(board): <NEW_LINE> <INDENT> possibleActions = set() <NEW_LINE> for i, row in enumerate(board): <NEW_LINE> <INDENT> for j, value in enumerate(row): <NEW_LINE> <INDENT> if value == EMPTY: <NEW_LINE> <INDENT> possibleActions.add((i, j)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return possibleActions
Returns set of all possible actions (i, j) available on the board.
625941b330bbd722463cbb76
def put_blobs(self, blobs): <NEW_LINE> <INDENT> blobrefs = {} <NEW_LINE> for blob in blobs: <NEW_LINE> <INDENT> blobrefs[compute_hash(blob)] = blob <NEW_LINE> <DEDENT> blobrefs_all = frozenset(blobrefs.keys()) <NEW_LINE> stat_res = self._stat(blobrefs_all) <NEW_LINE> upload_url = stat_res['uploadUrl'] <NEW_LINE> max_upload_size = stat_res['maxUploadSize'] <NEW_LINE> blobrefs_stat = frozenset([s['blobRef'] for s in stat_res['stat']]) <NEW_LINE> blobrefs_missing = blobrefs_all - blobrefs_stat <NEW_LINE> blobrefs_skipped = blobrefs_all - blobrefs_missing <NEW_LINE> if DEBUG: <NEW_LINE> <INDENT> log.debug('blobs missing: {0}'.format(blobrefs_missing)) <NEW_LINE> log.debug('blobs skipped: {0}'.format(blobrefs_skipped)) <NEW_LINE> <DEDENT> res = {'skipped': stat_res['stat'], 'received': []} <NEW_LINE> if DEBUG: <NEW_LINE> <INDENT> log.debug('Starting first upload batch') <NEW_LINE> <DEDENT> batch_size = 0 <NEW_LINE> r_files = {} <NEW_LINE> for br in blobrefs_missing: <NEW_LINE> <INDENT> blob = blobrefs[br] <NEW_LINE> bref = compute_hash(blob) <NEW_LINE> if isinstance(blob, basestring): <NEW_LINE> <INDENT> blob_content = blob <NEW_LINE> blob_size = len(blob) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> blob_content = blob.read() <NEW_LINE> blob.seek(0, 2) <NEW_LINE> blob_size = blob.tell() <NEW_LINE> <DEDENT> if batch_size + blob_size > max_upload_size: <NEW_LINE> <INDENT> if DEBUG: <NEW_LINE> <INDENT> log.debug('Upload first batch before continue, batch size:{0}'.format(batch_size)) <NEW_LINE> <DEDENT> batch_res = self._put_blobs(upload_url, r_files) <NEW_LINE> upload_url = batch_res['uploadUrl'] <NEW_LINE> res['received'].extend(batch_res['received']) <NEW_LINE> r_files = {} <NEW_LINE> batch_size = 0 <NEW_LINE> <DEDENT> r_files[bref] = (bref, blob_content) <NEW_LINE> batch_size += blob_size <NEW_LINE> <DEDENT> if r_files.keys(): <NEW_LINE> <INDENT> if DEBUG: <NEW_LINE> <INDENT> log.debug('Current batch size: {0}'.format(batch_size)) <NEW_LINE> log.debug('Uploading last batch') <NEW_LINE> <DEDENT> batch_res = self._put_blobs(upload_url, r_files) <NEW_LINE> res['received'].extend(batch_res['received']) <NEW_LINE> <DEDENT> blobs_received = [d['blobRef'] for d in res['received']] <NEW_LINE> blobs_skipped = [d['blobRef'] for d in res['skipped']] <NEW_LINE> blobrefs_failed = set(blobrefs_all.difference(blobs_received)) <NEW_LINE> blobrefs_failed.difference_update(blobs_skipped) <NEW_LINE> if len(blobrefs_failed): <NEW_LINE> <INDENT> if DEBUG: <NEW_LINE> <INDENT> for br in blobrefs_failed: <NEW_LINE> <INDENT> log.debug('Blob with br:{0}, content:{1} failed.'.format(br, blobrefs[br])) <NEW_LINE> <DEDENT> <DEDENT> raise Exception('Some blobs failed to upload: {0}'.format(blobrefs_failed)) <NEW_LINE> <DEDENT> res['success'] = list(blobs_received) <NEW_LINE> res['success'].extend(blobs_skipped) <NEW_LINE> return res
Upload blobs using with standard multi-part upload. Returns a dict with received (blobref and size) and skipped (blobref only)
625941b3a79ad161976cbefa
def test_lidar(self): <NEW_LINE> <INDENT> rospy.init_node('test_node', anonymous=True) <NEW_LINE> msg = rospy.wait_for_message( "/carla/ego_vehicle/lidar", PointCloud2, timeout=TIMEOUT) <NEW_LINE> self.assertEqual(msg.header.frame_id, "ego_vehicle/lidar")
Tests Lidar sensor node
625941b3167d2b6e31218952
def __init__(self, assess=None, count=None, id_2=None): <NEW_LINE> <INDENT> self._assess = None <NEW_LINE> self._count = None <NEW_LINE> self._id_2 = None <NEW_LINE> self.discriminator = None <NEW_LINE> if assess is not None: <NEW_LINE> <INDENT> self.assess = assess <NEW_LINE> <DEDENT> self.count = count <NEW_LINE> if id_2 is not None: <NEW_LINE> <INDENT> self.id_2 = id_2
CompatibilitiesSsdActiveIdParams - a model defined in Swagger
625941b3656771135c3eb626
def checkInputCRS(self): <NEW_LINE> <INDENT> crsList = [] <NEW_LINE> for param in self.parameters: <NEW_LINE> <INDENT> if isinstance(param, (ParameterRaster, ParameterVector, ParameterMultipleInput)): <NEW_LINE> <INDENT> if param.value: <NEW_LINE> <INDENT> if isinstance(param, ParameterMultipleInput): <NEW_LINE> <INDENT> layers = param.value.split(';') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> layers = [param.value] <NEW_LINE> <DEDENT> for item in layers: <NEW_LINE> <INDENT> crs = dataobjects.getObject(item).crs() <NEW_LINE> if crs not in crsList: <NEW_LINE> <INDENT> crsList.append(crs) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return len(crsList) < 2
It checks that all input layers use the same CRS. If so, returns True. False otherwise.
625941b385dfad0860c3ac0c
def format_grid( self, visited_style: str = "reverse", add_coords: bool = True, coords_style: str = "italic bold fg:ansibrightcyan") -> FormattedText: <NEW_LINE> <INDENT> style_pairs = [] <NEW_LINE> coord_column_padding = len("{0:d}".format(self.height)) <NEW_LINE> for i, row in enumerate(self.grid): <NEW_LINE> <INDENT> if add_coords: <NEW_LINE> <INDENT> style_pairs.append((coords_style, "{0:{1}d}".format(self.height - (i+1), coord_column_padding))) <NEW_LINE> style_pairs.append(("", GRID_COLUMN_SEPARATOR)) <NEW_LINE> <DEDENT> for tile in row: <NEW_LINE> <INDENT> if tile.visited: <NEW_LINE> <INDENT> style_pairs.append((visited_style, tile.label)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> style_pairs.append(("", tile.label)) <NEW_LINE> <DEDENT> style_pairs.append(("", GRID_COLUMN_SEPARATOR)) <NEW_LINE> <DEDENT> style_pairs.pop() <NEW_LINE> style_pairs.append(("", GRID_ROW_SEPARATOR)) <NEW_LINE> <DEDENT> if add_coords: <NEW_LINE> <INDENT> style_pairs.append(("", " "*coord_column_padding)) <NEW_LINE> style_pairs.append(("", GRID_COLUMN_SEPARATOR)) <NEW_LINE> for number in range(self.width): <NEW_LINE> <INDENT> style_pairs.append((coords_style, "{0:{1}d}".format(number, CHARS_PER_TILE))) <NEW_LINE> style_pairs.append(("", GRID_COLUMN_SEPARATOR)) <NEW_LINE> <DEDENT> <DEDENT> return FormattedText(style_pairs)
Format the grid as a single string. Args: visited_style: The style string to apply to tiles which have been visited. add_coords: Show coordinates in the top and left edges of the screen. coords_style: The style string to apply to the coordinates.
625941b330bbd722463cbb77
def table_model(self): <NEW_LINE> <INDENT> model = PandasModel(self.df) <NEW_LINE> self.tableview.setModel(model) <NEW_LINE> header = self.tableview.horizontalHeader() <NEW_LINE> header.setSectionResizeMode(0, QHeaderView.ResizeToContents) <NEW_LINE> header.setSectionResizeMode(1, QHeaderView.ResizeToContents) <NEW_LINE> header.setSectionResizeMode(2, QHeaderView.ResizeToContents) <NEW_LINE> header.setSectionResizeMode(3, QHeaderView.ResizeToContents) <NEW_LINE> header.setSectionResizeMode(4, QHeaderView.ResizeToContents)
Tabla con los datos.
625941b360cbc95b062c62fd
def active(self, pos): <NEW_LINE> <INDENT> self._active_pos = pos
Set a given position active :param: string pos This will set a given gridref as being an active cell for the ciphertext, colouring it red on the grid
625941b3e64d504609d745f4
def scoreExtension(seq, seqbdd, mat, posSeq=0, posSeqbdd=0, sens=1): <NEW_LINE> <INDENT> s = [0] <NEW_LINE> sum_score = 0 <NEW_LINE> seuil = 15 <NEW_LINE> debut = 0 <NEW_LINE> if(sens == -1): <NEW_LINE> <INDENT> debut = -1 <NEW_LINE> l = -(min(posSeq, posSeqbdd)+1) <NEW_LINE> if(posSeq == 0 or posSeqbdd == 0): <NEW_LINE> <INDENT> return (posSeq, posSeqbdd, 0) <NEW_LINE> <DEDENT> <DEDENT> elif(sens == 1): <NEW_LINE> <INDENT> l = min((len(seq)-posSeq), (len(seqbdd)-posSeqbdd)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> for i in range(debut, l, sens): <NEW_LINE> <INDENT> score_AA = mat.score(seq[posSeq+i], seqbdd[posSeqbdd+i]) <NEW_LINE> sum_score += score_AA <NEW_LINE> s.append(s[i]+score_AA) <NEW_LINE> maxi = max(s) <NEW_LINE> if((maxi - s[i+1]) >= seuil): <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> return (posSeq+i, posSeqbdd+i, sum_score)
Calcul le score (à partir de la matrice de score mat) de l'allongement d'un hit à la position posSeq (séquence seq) et posSeqbdd (séquence seqbdd) vers la droite (sens = 1) ou la gauche (sens = -1). Retourne les position d'arret de l'alignement sur les 2 seq et le score.
625941b30a50d4780f666c43
def bb_i2c_open(self, SDA, SCL, baud=100000): <NEW_LINE> <INDENT> extents = [struct.pack("I", baud)] <NEW_LINE> return _u2i(_pigpio_command_ext( self.sl, _PI_CMD_BI2CO, SDA, SCL, 4, extents))
This function selects a pair of gpios for bit banging I2C at a specified baud rate. Bit banging I2C allows for certain operations which are not possible with the standard I2C driver. o baud rates as low as 50 o repeated starts o clock stretching o I2C on any pair of spare gpios SDA:= 0-31 SCL:= 0-31 baud:= 50-500000 Returns 0 if OK, otherwise PI_BAD_USER_GPIO, PI_BAD_I2C_BAUD, or PI_GPIO_IN_USE. NOTE: The gpios used for SDA and SCL must have pull-ups to 3V3 connected. As a guide the hardware pull-ups on pins 3 and 5 are 1k8 in value. ... h = pi.bb_i2c_open(4, 5, 50000) # bit bang on gpio 4/5 at 50kbps ...
625941b338b623060ff0abaa
def getNoticeHtml_Reply_NewsComment(self, notice, news_comment): <NEW_LINE> <INDENT> check = self.writeNewsCommentInfos(notice, news_comment) <NEW_LINE> return site_helper.page_render_nobase.user.notices.ReplyNewsComment(notice, news_comment) if check else None
../page/user/notices/ReplyNewsComment.html
625941b36fece00bbac2d4ef
def south(self, distance): <NEW_LINE> <INDENT> return self.pointAtDistance(distance, bearing = pi)
Create a Point at the specified distance due sourth of this Point :param distance: Distance in meters :type distance: float :returns: :class:`model.Point` -- A Point south of this point
625941b355399d3f05588467
def reset_stats(self): <NEW_LINE> <INDENT> self.ship_left = self.ai_settings.ship_limit <NEW_LINE> self.score = 0 <NEW_LINE> self.level = 1
Reset statistics for new game.
625941b37c178a314d6ef20c
def _display_smaller(apartment_list, amount, list_new): <NEW_LINE> <INDENT> ok = 0 <NEW_LINE> if check_number(amount) == False: <NEW_LINE> <INDENT> return -2 <NEW_LINE> <DEDENT> for apt in apartment_list: <NEW_LINE> <INDENT> if apt["amount"] < int(amount): <NEW_LINE> <INDENT> ok = 1 <NEW_LINE> list_new.append(apt) <NEW_LINE> <DEDENT> <DEDENT> if ok == 1: <NEW_LINE> <INDENT> return 1 <NEW_LINE> <DEDENT> return -1
The following function : > Copies all the entries that have the expenses smaller then a given amount & puts them in a new list >>>input : Apartment List; Apartment Number; New Empty List; >>>output : New List; Or an Numeric Value;
625941b350812a4eaa59c0db
def value_to_class_index(bin_arr, val_arr): <NEW_LINE> <INDENT> return np.digitize(val_arr,bin_arr,right=True)-1
Compare the values to a bin list and return the classs indices according to the results. Parameter(s): bin_arr: A list of numbers. val_arr: The array to be classified. Return(s): Classes' indexes.
625941b38e7ae83300e4ad80
def download_files(filenames, server, dirname=''): <NEW_LINE> <INDENT> filenames = to_filename_strings(filenames, strip_names=False, strip_dirs=True, use_cal_dict=True) <NEW_LINE> back_end_fn = _get_back_end(server, 'download_files') <NEW_LINE> if dirname: <NEW_LINE> <INDENT> if not os.path.isdir(dirname): <NEW_LINE> <INDENT> os.mkdir(dirname) <NEW_LINE> <DEDENT> if not os.access(dirname, os.W_OK): <NEW_LINE> <INDENT> raise IOError('directory not writeable: {0}'.format(dirname)) <NEW_LINE> <DEDENT> <DEDENT> back_end_fn(filenames, dirname) <NEW_LINE> return [os.path.join(dirname, fn) for fn in filenames]
Download a list of files from the specified server, skipping any that already exist in the target directory. User authentication is not supported yet (ie. can't get proprietary data). Parameters ---------- filenames : list of (convertible to str) or str or calibration dict The names of the files to request from the server. If a calibration dictionary (in the format produced by calibrations.init_cal_dict()) is supplied, all the raw calibration files listed in it will be downloaded. server : str, function Name of the service to be used for the look-up (which determines the back-end function selected from this library). The currently-available options are: 'gemini' (default). Alternatively, the user may supply a look-up function/object, which must accept a list of filename strings and a directory name as arguments (and should implement skipping existing files if needed). dirname : str, optional Writeable directory in which to place the files. This path may be absolute or relative to the current directory. If it doesn't exist, it will be created. Returns ------- list of str A list of the file(s) downloaded, for syntactical convenience, eg. allowing "DataFileList(download_files(some_list, server='gemini'))". This is just a copy of the input name string(s) that are derived from `filenames`, prefixed with `dirname`.
625941b382261d6c526ab257
def split_message(self, message): <NEW_LINE> <INDENT> work_0 = message % self.modulos[0] <NEW_LINE> message //= self.modulos[0] <NEW_LINE> work_1 = message % self.modulos[1] <NEW_LINE> return [ work_0, work_1 ]
Split message into working parts. Return a list of parts.
625941b3ec188e330fd5a55d
def factory(self, configparser, *args, **kwargs): <NEW_LINE> <INDENT> class_type = configparser['class_name'] <NEW_LINE> new_module = __import__(self.module_name + '.lib.models.' + class_type, fromlist=[type]) <NEW_LINE> new_class = getattr(new_module, class_type) <NEW_LINE> return new_class(configparser, **kwargs)
Factory function for Asset Class objects :param configparser: Configuration dictonary :return factory_class: Asset Class decendent of type listed in config_dict
625941b3d164cc6175782b02
def enqueue(self, item): <NEW_LINE> <INDENT> if self.is_full(): <NEW_LINE> <INDENT> raise OverflowQueueError() <NEW_LINE> <DEDENT> self.tail += 1 <NEW_LINE> self.data.insert(0, item)
add new item into the queue
625941b3293b9510aa2c304e
@utils.arg('id', metavar='<SERVER_ID>', help='ID of virtual server to start') <NEW_LINE> @utils.arg('--qemu-version', metavar='<QEMU_VERSION>', help='Suggest a qemu version') <NEW_LINE> def do_server_start(client, args): <NEW_LINE> <INDENT> kwargs = {} <NEW_LINE> if args.qemu_version: <NEW_LINE> <INDENT> kwargs['qemu_version'] = args.qemu_version <NEW_LINE> <DEDENT> guest = client.guests.perform_action(args.id, 'start', **kwargs) <NEW_LINE> utils.print_dict(guest)
Start a virtual server
625941b330c21e258bdfa251
def chi_square_calculator(tweets_data_dic): <NEW_LINE> <INDENT> index = 0 <NEW_LINE> classes_index = dict() <NEW_LINE> for c in np.unique(np.array(list(tweets_data_dic['airline_sentiment']))): <NEW_LINE> <INDENT> classes_index.update({c: index}) <NEW_LINE> index += 1 <NEW_LINE> <DEDENT> terms_index = dict() <NEW_LINE> index = 0 <NEW_LINE> for tweet in tweets_data_dic['text']: <NEW_LINE> <INDENT> tweet = tweet.split() <NEW_LINE> for term in tweet: <NEW_LINE> <INDENT> if term not in terms_index.keys(): <NEW_LINE> <INDENT> terms_index.update({term: index}) <NEW_LINE> index += 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> contingency_table = np.zeros((len(classes_index), len(terms_index)), dtype=float) <NEW_LINE> for index in range(len(tweets_data_dic['text'])): <NEW_LINE> <INDENT> c = tweets_data_dic['airline_sentiment'][index] <NEW_LINE> tweet = tweets_data_dic['text'][index].split() <NEW_LINE> for term in tweet: <NEW_LINE> <INDENT> contingency_table[classes_index[c]][terms_index[term]] += 1 <NEW_LINE> <DEDENT> <DEDENT> chi_square_table = np.zeros((len(classes_index), len(terms_index)), dtype=float) <NEW_LINE> total_counts = sum(sum(contingency_table)) <NEW_LINE> for t_index in terms_index.values(): <NEW_LINE> <INDENT> for c_index in classes_index.values(): <NEW_LINE> <INDENT> observed = contingency_table[c_index][t_index] <NEW_LINE> expected = sum(contingency_table[c_index, :]) * sum( contingency_table[:, t_index]) / total_counts <NEW_LINE> chi_square_table[c_index][t_index] = pow(observed - expected, 2) / expected <NEW_LINE> <DEDENT> <DEDENT> return chi_square_table, terms_index, classes_index
:param tweets_data_dic: a dictionary of tweets data that only have 'text' and 'airline_sentiment' keys :return chi_square_table: an np.array object that cell ij contains chi_square value for term j and class i
625941b330c21e258bdfa252
def setResponse(self, *args): <NEW_LINE> <INDENT> return _DataModel.Sensor_setResponse(self, *args)
setResponse(Sensor self, std::string const & response)
625941b3c432627299f049fb
def type_of_prev(sequence, interval, firstValue=None, absentValue=None): <NEW_LINE> <INDENT> return _sequence_operation(Oper_type_of_prev, sequence, interval, firstValue, false, absentValue)
Returns an integer expression that represents the type of the interval variable that is previous. This function returns an integer expression that represents the type of the interval variable that is previous to *interval* in sequence variable *sequence*. When *interval* is present and is the first interval of *sequence*, it returns the constant integer value *firstValue* (zero by default). When *interval* is absent, it returns the constant integer value *absentValue* (zero by default). Args: sequence: Sequence variable. interval: Interval variable. firstValue: Value to return if interval variable interval is the first one in sequence. absentValue: Value to return if interval variable interval becomes absent. Returns: An integer expression
625941b3a934411ee3751450
def test_worker_sets_death(self): <NEW_LINE> <INDENT> q = Queue() <NEW_LINE> w = Worker([q]) <NEW_LINE> w.register_death() <NEW_LINE> death_date = w.death_date <NEW_LINE> self.assertIsNotNone(death_date) <NEW_LINE> self.assertEqual(type(death_date).__name__, 'datetime')
Ensure worker correctly sets worker death date.
625941b3711fe17d82542131
def get_kline(self, market, type=None, since=None, size=None): <NEW_LINE> <INDENT> URL = 'http://api.zb.cn/data/v1/kline' <NEW_LINE> params = {'market': market} <NEW_LINE> if type is not None: <NEW_LINE> <INDENT> params['type'] = type <NEW_LINE> <DEDENT> if since is not None: <NEW_LINE> <INDENT> params['since'] = since <NEW_LINE> <DEDENT> if size is not None: <NEW_LINE> <INDENT> params['size'] = size <NEW_LINE> <DEDENT> return self._http.get(URL, params)
K line https://www.zb.com/i/developer/restApi#market :param str market: ex) btc_usdt :param str type: 1min 3min 5min 15min 30min 1day 3day 1week 1hour 2hour 4hour 6hour 12hour :param int since: From this timestamp :param int size: Limit of returning data(default 1000,it only return 1000 data if that more than 1000 date ) :return: json object
625941b3e64d504609d745f5
def compute_per(ref, hyp, normalize=True): <NEW_LINE> <INDENT> phone_set = set(ref + hyp) <NEW_LINE> phone2char = dict(zip(phone_set, range(len(phone_set)))) <NEW_LINE> phones_ref = [chr(phone2char[p]) for p in ref] <NEW_LINE> phones_hyp = [chr(phone2char[p]) for p in hyp] <NEW_LINE> per = lev.distance(''.join(phones_ref), ''.join(phones_hyp)) <NEW_LINE> if normalize: <NEW_LINE> <INDENT> per /= len(ref) <NEW_LINE> <DEDENT> return per
Compute Phone Error Rate. Args: ref (list): phones in the reference transcript hyp (list): phones in the predicted transcript normalize (bool, optional): if True, divide by the length of str_true Returns: per (float): Phone Error Rate between str_true and str_pred
625941b32ae34c7f2600cee6
def get(self, save): <NEW_LINE> <INDENT> tree = fromstring(requests.get(self.START_URL).text) <NEW_LINE> yearobjects = tree.xpath('//*/ul[@class="tab-header"]/li//a') <NEW_LINE> year_links = [ self.START_URL + l.attrib["href"] for l in yearobjects if "href" in l.attrib ] <NEW_LINE> for year_link in year_links: <NEW_LINE> <INDENT> tree = fromstring(requests.get(year_link).text) <NEW_LINE> linkobjects = tree.xpath('//*[@class="annual-details"]/ul/li/p//a') <NEW_LINE> links = [ self.BASE_URL + l.attrib["href"] for l in linkobjects if "href" in l.attrib ] <NEW_LINE> self.process_links(links) <NEW_LINE> <DEDENT> return self.releases
Fetches articles from Banco Santander Central Hispano
625941b301c39578d7e74bf9
def strtime(time, accurate=False): <NEW_LINE> <INDENT> time = time_tuple(int(time)) <NEW_LINE> if time[0] > 0: <NEW_LINE> <INDENT> return '{:02}:{:02}:{:02}'.format(*time[:-1]) <NEW_LINE> <DEDENT> elif accurate: <NEW_LINE> <INDENT> return '{:02}:{:02}.{}0'.format(*time[1:3], time[3] // 100) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return '{:02}:{:02}.00'.format(*time[1:3])
Return a string from the given milliseconds time. :returns: hh:mm:ss when > 59min mm:ss:00 when < 1h and accurate=False mm:ss:z0 when < 1h and accurate=True
625941b30c0af96317bb7f9e
def unifiedorder(self, product, openid=None, trade_type=None): <NEW_LINE> <INDENT> assert isinstance(product, dict) <NEW_LINE> assert trade_type in ('JSAPI', 'NATIVE') <NEW_LINE> post_dict = { 'appid': self.appid, 'attach': product['attach'], 'body': product['body'], 'mch_id': self.mch_id, 'nonce_str': self.generate_nonce_str(), 'notify_url': self.notify_url, 'out_trade_no': product['out_trade_no'], 'product_id': product['product_id'], 'spbill_create_ip': self.ip, 'total_fee': int(product['total_fee'] * 100), 'trade_type': trade_type, } <NEW_LINE> if trade_type == 'JSAPI' and openid is None: <NEW_LINE> <INDENT> raise MissingParameter(u'JSAPI必须传入openid') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if openid: <NEW_LINE> <INDENT> post_dict['openid'] = openid <NEW_LINE> <DEDENT> <DEDENT> post_dict['sign'] = self.generate_sign(post_dict) <NEW_LINE> ret_xml = dict2xml(post_dict, wrap='xml') <NEW_LINE> r = requests.post(self.URL_UINFIEDORDER, data=ret_xml.encode('utf-8')) <NEW_LINE> r.encoding = 'UTF-8' <NEW_LINE> data = r.text.encode('utf-8') <NEW_LINE> ret_dict = {} <NEW_LINE> x = ElementTree.fromstring(data) <NEW_LINE> if x.find('return_code').text.upper() == 'FAIL': <NEW_LINE> <INDENT> raise ParameterValueError(x.find('return_msg').text) <NEW_LINE> <DEDENT> if x.find('result_code').text.upper() == 'FAIL': <NEW_LINE> <INDENT> raise ParameterValueError(x.find('err_code').text) <NEW_LINE> <DEDENT> if trade_type == 'NATIVE': <NEW_LINE> <INDENT> ret_dict['prepay_id'] = x.find('prepay_id').text <NEW_LINE> ret_dict['code_url'] = x.find('code_url').text <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ret_dict['prepay_id'] = x.find('prepay_id').text <NEW_LINE> <DEDENT> return ret_dict
统一下单接口
625941b3004d5f362079a0ed
def _generate_audio_file(self): <NEW_LINE> <INDENT> pico2wave_exec_path = ["/usr/bin/pico2wave"] <NEW_LINE> tmp_path = self.file_path+".wav" <NEW_LINE> pico2wave_options = ["-l=%s" % self.language, "-w=%s" % tmp_path] <NEW_LINE> final_command = list() <NEW_LINE> final_command.extend(pico2wave_exec_path) <NEW_LINE> final_command.extend(pico2wave_options) <NEW_LINE> final_command.append(self.words) <NEW_LINE> logger.debug("Pico2wave command: %s" % final_command) <NEW_LINE> subprocess.call(final_command, stderr=sys.stderr) <NEW_LINE> os.rename(tmp_path, self.file_path)
Generic method used as a Callback in TTSModule - must provided the audio file and write it on the disk .. raises:: FailToLoadSoundFile
625941b3283ffb24f3c556c3
def get_height(self) -> Optional[int]: <NEW_LINE> <INDENT> flags = decode_flags(VmbFrameFlags, self._frame.receiveFlags) <NEW_LINE> if VmbFrameFlags.Dimension not in flags: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return self._frame.height
Get image height in pixels. Returns: Image height in pixels if dimension data is provided by the camera. None if dimension data is not provided by the camera.
625941b321a7993f00bc7a9d
def get_schema(): <NEW_LINE> <INDENT> return { 'schema': { 'schema_version': { 'type': 'integer', 'default': 2, }, 'name': { 'schema_version': 1, 'title': 'Graphite connection name', 'comment': 'Unique Graphite connection name', 'type': 'string', 'required': True, 'empty': False, 'unique': True, }, 'carbon_address': { 'schema_version': 1, 'title': 'Carbon daemon address', 'comment': '', 'type': 'string', 'required': True, 'empty': False, }, 'carbon_port': { 'schema_version': 1, 'title': 'Carbon daemon port', 'comment': '', 'type': 'integer', 'empty': False, 'default': 2004 }, 'graphite_address': { 'schema_version': 1, 'title': 'Graphite address', 'comment': '', 'type': 'string', 'required': True, 'empty': False, }, 'graphite_port': { 'schema_version': 1, 'title': 'Graphite port', 'comment': '', 'type': 'integer', 'empty': False, 'default': 8080 }, 'prefix': { 'schema_version': 1, 'title': 'Metrics prefix', 'comment': 'Prefix that will be prepended to the metrics sent to this TS DB.', 'type': 'string', 'default': '', }, 'realms_prefix': { 'schema_version': 2, "title": "Realms prefix", "comment": "Include the realms prefix for the metrics sent to this TS DB.", 'type': 'boolean', 'default': True }, 'grafana': { 'schema_version': 1, 'title': 'Grafana relation', 'comment': 'If set, the Alignak backend will use this Grafana relation for ' 'the metrics sent to the Influx DB. It will create/update the ' 'Grafana panels accordindgly.', 'type': 'objectid', 'data_relation': { 'resource': 'grafana', 'embeddable': True }, 'nullable': True, 'default': None }, 'statsd': { 'schema_version': 1, 'title': 'StatsD relation', 'comment': 'If set, the Alignak backend will use this StatsD relation for ' 'the metrics sent to the Influx DB.', 'type': 'objectid', 'data_relation': { 'resource': 'statsd', 'embeddable': True }, 'nullable': True, 'default': None }, '_realm': { 'schema_version': 1, 'title': 'Realm', 'comment': 'Realm this element belongs to.', 'type': 'objectid', 'data_relation': { 'resource': 'realm', 'embeddable': True }, 'required': True, }, '_sub_realm': { 'schema_version': 1, 'title': 'Sub-realms', 'comment': 'Is this element visible in the sub-realms of its realm?', 'type': 'boolean', 'default': True }, '_users_read': { 'schema_version': 1, 'type': 'list', 'schema': { 'type': 'objectid', 'data_relation': { 'resource': 'user', 'embeddable': True, } }, }, '_users_update': { 'schema_version': 1, 'type': 'list', 'schema': { 'type': 'objectid', 'data_relation': { 'resource': 'user', 'embeddable': True, } }, }, '_users_delete': { 'schema_version': 1, 'type': 'list', 'schema': { 'type': 'objectid', 'data_relation': { 'resource': 'user', 'embeddable': True, } }, }, }, 'schema_deleted': {} }
Schema structure of this resource :return: schema dictionary :rtype: dict
625941b3bde94217f3682bb2
def parse(line): <NEW_LINE> <INDENT> data = json.loads(line) <NEW_LINE> return data['PRODUCT_ID'], data['CATEGORY_ID'], data['BRAND'], data['DESCRIPTION'], data['NAME'], data['PRICE'], data['LAST_UPDATE_DATE']
Parsing JSON messages from Kafka Producer
625941b3d58c6744b4257a16
def test_list_config_file(self): <NEW_LINE> <INDENT> pass
Test case for list_config_file List config files
625941b324f1403a92600927
def get_i18n(factory=I18n, key=_i18n_registry_key, request=None): <NEW_LINE> <INDENT> request = request or webapp2.get_request() <NEW_LINE> i18n = request.registry.get(key) <NEW_LINE> if not i18n: <NEW_LINE> <INDENT> i18n = request.registry[key] = factory(request) <NEW_LINE> <DEDENT> return i18n
Returns an instance of :class:`I18n` from the request registry. It'll try to get it from the current request registry, and if it is not registered it'll be instantiated and registered. A second call to this function will return the same instance. :param factory: The callable used to build and register the instance if it is not yet registered. The default is the class :class:`I18n` itself. :param key: The key used to store the instance in the registry. A default is used if it is not set. :param request: A :class:`webapp2.Request` instance used to store the instance. The active request is used if it is not set.
625941b399fddb7c1c9de14d
def test_remove_unregistered_torrent(self): <NEW_LINE> <INDENT> self.ltmgr.initialize() <NEW_LINE> mock_handle = MockObject() <NEW_LINE> mock_handle.is_valid = lambda: False <NEW_LINE> alert = type('torrent_removed_alert', (object, ), dict(handle=mock_handle, info_hash='0'*20)) <NEW_LINE> self.ltmgr.process_alert(alert()) <NEW_LINE> self.assertNotIn('0'*20, self.ltmgr.torrents)
Tests a successful removal status of torrents which aren't known
625941b35fc7496912cc373b
def chooseAppropriateRule(self, ruleSymbol): <NEW_LINE> <INDENT> RightRule = Rules[ruleSymbol][0] <NEW_LINE> for RightRule in Rules[ruleSymbol]: <NEW_LINE> <INDENT> if (self.inputSymbol in self.findFirstSymbols(RightRule)): <NEW_LINE> <INDENT> return RightRule <NEW_LINE> <DEDENT> <DEDENT> return None
Effects: Returns rule that begins with symbol "inputSymbol" if more than one rule has this left part.
625941b310dbd63aa1bd2966
def _extract_alignment_region(alignment_seq_with_flanking, annotation): <NEW_LINE> <INDENT> align_stripped = alignment_seq_with_flanking.strip("-") <NEW_LINE> display_start = int(annotation["al_display_start"]) <NEW_LINE> if int(annotation["al_start"]) <= int(annotation["al_stop"]): <NEW_LINE> <INDENT> start = int(annotation["al_start"]) - display_start <NEW_LINE> end = int(annotation["al_stop"]) - display_start + 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> start = display_start - int(annotation["al_start"]) <NEW_LINE> end = display_start - int(annotation["al_stop"]) + 1 <NEW_LINE> <DEDENT> end += align_stripped.count("-") <NEW_LINE> if start < 0 or start >= end or end > len(align_stripped): <NEW_LINE> <INDENT> raise ValueError( "Problem with sequence start/stop,\n%s[%i:%i]\n%s" % (alignment_seq_with_flanking, start, end, annotation) ) <NEW_LINE> <DEDENT> return align_stripped[start:end]
Extract alignment region (PRIVATE). Helper function for the main parsing code. To get the actual pairwise alignment sequences, we must first translate the un-gapped sequence based coordinates into positions in the gapped sequence (which may have a flanking region shown using leading - characters). To date, I have never seen any trailing flanking region shown in the m10 file, but the following code should also cope with that. Note that this code seems to work fine even when the "sq_offset" entries are present as a result of using the -X command line option.
625941b3b57a9660fec33633
def set_kickstart(self,kickstart): <NEW_LINE> <INDENT> if kickstart == "<<inherit>>": <NEW_LINE> <INDENT> self.kickstart = kickstart <NEW_LINE> return True <NEW_LINE> <DEDENT> if utils.find_kickstart(kickstart): <NEW_LINE> <INDENT> self.kickstart = kickstart <NEW_LINE> return True <NEW_LINE> <DEDENT> raise CX(_("kickstart not found"))
Sets the kickstart. This must be a NFS, HTTP, or FTP URL. Or filesystem path. Minor checking of the URL is performed here.
625941b3cdde0d52a9e52dea
def get_age_group(age_joined): <NEW_LINE> <INDENT> bins = [17,25,35,45,55,120] <NEW_LINE> group_names = ['18-25','26-35','36-45','46-55','>55'] <NEW_LINE> age_joined['age_decade'] = pd.cut(age_joined['Age'], bins, labels=group_names) <NEW_LINE> return age_joined
Create a new column as age_decade Input: ------- pandas dataframe Output: ------- pandas dataframe with a new column 'age_decade'
625941b3adb09d7d5db6c549
def __init__(self, baseurl=None, orKeywords=False, version="1.0"): <NEW_LINE> <INDENT> if not baseurl: baseurl = RegistryService.STSCI_REGISTRY_BASEURL <NEW_LINE> super(RegistryQuery, self).__init__(baseurl, "vaoreg", version) <NEW_LINE> self._kw = [] <NEW_LINE> self._preds = [] <NEW_LINE> self._svctype = None <NEW_LINE> self._band = None <NEW_LINE> self._orKw = orKeywords <NEW_LINE> self._doSort = True <NEW_LINE> self._dalonly = False
create the query instance Parameters ---------- baseurl : str the base URL for the VAO registry. If None, it will be set to the public VAO registry at STScI. orKeywords : bool if True, keyword constraints will by default be OR-ed together; that is, a resource that matches any of the keywords will be returned. If FALSE, the keywords will be AND-ed, thus requiring a resource to match all the keywords.
625941b326068e7796caea8d
def simulationWithDrug(numViruses, maxPop, maxBirthProb, clearProb, resistances, mutProb, numTrials): <NEW_LINE> <INDENT> viruses = list(ResistantVirus(maxBirthProb, clearProb, resistances, mutProb) for i in range(numViruses)) <NEW_LINE> totalv = [0] * 300 <NEW_LINE> resisv = [0] * 300 <NEW_LINE> for i in range(numTrials): <NEW_LINE> <INDENT> patient = TreatedPatient(viruses, maxPop) <NEW_LINE> for i in range(0,150): <NEW_LINE> <INDENT> patient.update() <NEW_LINE> totalv[i] += (patient.getResistPop([])) <NEW_LINE> resisv[i] += (patient.getResistPop(['guttagonol'])) <NEW_LINE> <DEDENT> patient.addPrescription('guttagonol') <NEW_LINE> for i in range(150,300): <NEW_LINE> <INDENT> patient.update() <NEW_LINE> totalv[i] += (patient.getResistPop([])) <NEW_LINE> resisv[i] += (patient.getResistPop(['guttagonol'])) <NEW_LINE> <DEDENT> <DEDENT> for i in range(300): <NEW_LINE> <INDENT> totalv[i] = totalv[i] / (numTrials) <NEW_LINE> resisv[i] = resisv[i] / (numTrials) <NEW_LINE> <DEDENT> pylab.plot(totalv, label = 'Virus Population') <NEW_LINE> pylab.plot(resisv, label = 'Resistant Virus Population') <NEW_LINE> pylab.title('Simulation Without Drug') <NEW_LINE> pylab.xlabel('Timestep') <NEW_LINE> pylab.ylabel('Virus Population') <NEW_LINE> pylab.legend(loc='best') <NEW_LINE> pylab.show()
Runs simulations and plots graphs for problem 5. For each of numTrials trials, instantiates a patient, runs a simulation for 150 timesteps, adds guttagonol, and runs the simulation for an additional 150 timesteps. At the end plots the average virus population size (for both the total virus population and the guttagonol-resistant virus population) as a function of time. numViruses: number of ResistantVirus to create for patient (an integer) maxPop: maximum virus population for patient (an integer) maxBirthProb: Maximum reproduction probability (a float between 0-1) clearProb: maximum clearance probability (a float between 0-1) resistances: a dictionary of drugs that each ResistantVirus is resistant to (e.g., {'guttagonol': False}) mutProb: mutation probability for each ResistantVirus particle (a float between 0-1). numTrials: number of simulation runs to execute (an integer)
625941b3d7e4931a7ee9dcd1
def setUp(self): <NEW_LINE> <INDENT> TestController.setUp(self) <NEW_LINE> self.serials = []
setup the Test Controller
625941b3435de62698dfda0a
def TestInstStatusCollector(): <NEW_LINE> <INDENT> enabled_hypervisors = qa_config.GetEnabledHypervisors() <NEW_LINE> is_xen = (constants.HT_XEN_PVM in enabled_hypervisors or constants.HT_XEN_HVM in enabled_hypervisors) <NEW_LINE> if not is_xen: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> AssertCommand([MON_COLLECTOR, "inst-status-xen"]) <NEW_LINE> node1 = qa_config.AcquireNode() <NEW_LINE> node2 = qa_config.AcquireNode() <NEW_LINE> template = qa_config.GetDefaultDiskTemplate() <NEW_LINE> instance = CreateInstanceByDiskTemplate([node1, node2], template) <NEW_LINE> AssertCommand([MON_COLLECTOR, "inst-status-xen"], node=node1) <NEW_LINE> AssertCommand([MON_COLLECTOR, "inst-status-xen"], node=node2) <NEW_LINE> RemoveInstance(instance) <NEW_LINE> node1.Release() <NEW_LINE> node2.Release()
Test the Xen instance status collector.
625941b30383005118ecf39a
def clear_all_object_of_bucket(oss_instance, bucket): <NEW_LINE> <INDENT> return clear_all_objects_in_bucket(oss_instance, bucket)
clean all objects in bucket, after that, it will delete this bucket.
625941b331939e2706e4cc27
def chunk_and_write_dataframe(dataframe_to_write: pd.DataFrame, measurement: str, user_id: str, df_client, batch_size: int = 5000) -> bool: <NEW_LINE> <INDENT> chunk_nb = math.ceil(len(dataframe_to_write) / batch_size) <NEW_LINE> dataframe_chunk_list = np.array_split(dataframe_to_write, chunk_nb) <NEW_LINE> for chunk in dataframe_chunk_list: <NEW_LINE> <INDENT> tags = {USER_PARAM_NAME: user_id} <NEW_LINE> df_client.write_points(chunk, measurement=measurement, tags=tags, protocol="json") <NEW_LINE> <DEDENT> return True
Split the input dataframe in chunk and write them sequentially for perfomance issues :param dataframe_to_write: pandas dataframe to write in influxDB :param measurement: measurement name of dataframe to write :param user_id: id of user :return:
625941b363f4b57ef0000ee1
def test_notification_getunreadcount(self): <NEW_LINE> <INDENT> pass
Test case for notification_getunreadcount Get the number of unread notifications # noqa: E501
625941b34527f215b584c217
def nginx_accel(request,app): <NEW_LINE> <INDENT> allowed = False <NEW_LINE> if allowed: <NEW_LINE> <INDENT> response = HttpResponse() <NEW_LINE> response['Content-Type']="" <NEW_LINE> response['X-Accel-Redirect'] = request.get_full_path() <NEW_LINE> return response <NEW_LINE> <DEDENT> return HttpResponseForbidden()
default django view, where id is an argument that identifies the ressource to be protected
625941b323e79379d52ee31f
def __call__(self, e1, e2): <NEW_LINE> <INDENT> if self.nobias: <NEW_LINE> <INDENT> return bilinear.bilinear(e1, e2, self.W) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return bilinear.bilinear(e1, e2, self.W, self.V1, self.V2, self.b)
Applies the bilinear function to inputs and the internal parameters. Args: e1 (~chainer.Variable): Left input. e2 (~chainer.Variable): Right input. Returns: ~chainer.Variable: Output variable.
625941b3507cdc57c6306a87
def _send_mail(self, request, user, **kwargs): <NEW_LINE> <INDENT> kwargs['context'] = self.get_context_data(request, user) <NEW_LINE> mail_kwargs = { "subject": self.get_subject(**kwargs), "message": self.get_message(**kwargs), "from_email": (settings.DEFAULT_FROM_EMAIL), "recipient_list": [user.email], } <NEW_LINE> try: <NEW_LINE> <INDENT> number_sent = send_mail(**mail_kwargs) <NEW_LINE> <DEDENT> except Exception as error: <NEW_LINE> <INDENT> self.log_mail_error(error=error, **mail_kwargs) <NEW_LINE> if isinstance(error, BadHeaderError): <NEW_LINE> <INDENT> err_code = 'badheader' <NEW_LINE> <DEDENT> elif isinstance(error, SMTPException): <NEW_LINE> <INDENT> err_code = 'smtperror' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> err_code = 'unexpectederror' <NEW_LINE> <DEDENT> return (False, err_code) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if number_sent > 0: <NEW_LINE> <INDENT> return (True, None) <NEW_LINE> <DEDENT> <DEDENT> self.log_mail_error(**mail_kwargs) <NEW_LINE> return (False, 'unknownerror')
_send_mail() will actually send the email
625941b366656f66f7cbbf62
@cli.command() <NEW_LINE> @click.argument('mode', type=click.Choice(['update', 'replace']), metavar='<update|replace>') <NEW_LINE> @click.option('-a', '--all-repos', is_flag=True, help='Use all accessible repositories.') <NEW_LINE> @click.option('-d', '--dry-run', is_flag=True, help='Doesn\'t make any changes to GitHub, just prints them.') <NEW_LINE> @click.option('-v', '--verbose', is_flag=True, help='Turns on logs on standard output.') <NEW_LINE> @click.option('-q', '--quiet', is_flag=True, help='Turns off all logs.') <NEW_LINE> @click.option('-r', '--template-repo', default='', help='Repository to use as a template.') <NEW_LINE> @click.pass_context <NEW_LINE> def run(ctx, mode, all_repos, dry_run, verbose, quiet, template_repo): <NEW_LINE> <INDENT> session = set_session() <NEW_LINE> config = ctx.obj.get('config') <NEW_LINE> template_repository = template_repo if template_repo else config.get('others', 'template-repo', fallback='') <NEW_LINE> labels = get_labels(template_repository, config) <NEW_LINE> repos = get_repos(all_repos, config) <NEW_LINE> logging = 1 if verbose and not quiet else 2 if quiet and not verbose else 0 <NEW_LINE> perform_operation(True if mode == 'replace' else False, repos, labels, dry_run, logging, session)
Run labels processingpython -m pip install --extra-index-url https://test.pypi.org/pypi labelord_klememi1
625941b3167d2b6e31218954
def sanity_filter(frames): <NEW_LINE> <INDENT> lst = [frame for frame in frames if check_frame(frame, debug=global_debug)] <NEW_LINE> print('Dropped {} frames'.format(len(frames) - len(lst)), file=sys.stderr) <NEW_LINE> return lst
Filter the frames to those passed the sanity check
625941b34f88993c3716be2b
def _get_links(): <NEW_LINE> <INDENT> import lxml.html as lh <NEW_LINE> url = 'http://gdelt.utdallas.edu/data/dailyupdates/?O=D' <NEW_LINE> page = requests.get(url) <NEW_LINE> text = lh.fromstring(page.content) <NEW_LINE> urls = text.xpath('//a/@href') <NEW_LINE> urls = [url for url in urls if '.zip' in url] <NEW_LINE> return urls
Private function to obtain the links for the daily update files from the GDELT website. Requires requests and lxmlself.
625941b3de87d2750b85fb43
def _create_dialog_with_widget(self, title, bundle, widget_class, *args, **kwargs): <NEW_LINE> <INDENT> parent = self._get_dialog_parent() <NEW_LINE> widget = self._create_widget(widget_class, *args, **kwargs) <NEW_LINE> self._apply_external_stylesheet(bundle, widget) <NEW_LINE> dialog = self._create_dialog(title, bundle, widget, parent) <NEW_LINE> return (dialog, widget)
Convenience method to create an sgtk TankQDialog with a widget instantiated from widget_class embedded in the main section. .. note:: For more information, see the documentation for :meth:`show_dialog()`. :param title: The title of the window :param bundle: The app, engine or framework object that is associated with this window :param widget_class: The class of the UI to be constructed. This must derive from QWidget. :type widget_class: :class:`PySide.QtGui.QWidget` Additional parameters specified will be passed through to the widget_class constructor.
625941b3b545ff76a8913bd7
def enumerate(self): <NEW_LINE> <INDENT> pass
Enumerate system attributes
625941b3d7e4931a7ee9dcd2
def createDictionary(): <NEW_LINE> <INDENT> dictionaryFile = open("dictionary.txt") <NEW_LINE> for line in dictionaryFile: <NEW_LINE> <INDENT> line = line.strip() <NEW_LINE> DICTIONARY.append(line) <NEW_LINE> <DEDENT> dictionaryFile.close()
Reads from "dictionary.txt" and construct a list of valid words.
625941b3d164cc6175782b03
def _get_all_descendant_nodes(name_object, taxid: str) -> List[str]: <NEW_LINE> <INDENT> descendant_nodes: List[str] = [taxid] <NEW_LINE> if len(name_object[taxid].children) > 0: <NEW_LINE> <INDENT> for child in name_object[taxid].children: <NEW_LINE> <INDENT> descendant_nodes = descendant_nodes + _get_all_descendant_nodes(name_object, child) <NEW_LINE> <DEDENT> <DEDENT> return descendant_nodes
Get all descendant of a node
625941b338b623060ff0abac
def extract_matching_pattern(regex, group_name, unfiltered_list): <NEW_LINE> <INDENT> result = '' <NEW_LINE> filtered_list = filter(regex.search, unfiltered_list) <NEW_LINE> if len(filtered_list) == 1: <NEW_LINE> <INDENT> filtered_string = ''.join(filtered_list) <NEW_LINE> result = regex.search(filtered_string).group(group_name) <NEW_LINE> <DEDENT> return result
Return the desired group_name from a list of matching patterns @param regex : a regular expression with named groups @param group_name : the desired matching group name value @param unfiltered_list : a list of matches @rtype : the string value
625941b37cff6e4e81117741
def _GenServerPrimaryKey(self, file): <NEW_LINE> <INDENT> pass
Generate the data members that define the object primary key Identifier _key_members contains (string, name) which is already present in IFMapIdentifier.
625941b350485f2cf553cb4f
def kolonnerTilListe(lst, svar): <NEW_LINE> <INDENT> a, b, c = delInnListe(lst) <NEW_LINE> if svar == "v": <NEW_LINE> <INDENT> return b+a+c <NEW_LINE> <DEDENT> elif svar == "m": <NEW_LINE> <INDENT> return a+b+c <NEW_LINE> <DEDENT> elif svar == "h": <NEW_LINE> <INDENT> return a+c+b
Denne funksjonen tar inn en liste og et svar, undersoker hva svaret er, og lager en ny liste av den gamle listen utifra hva svaret var.
625941b3507cdc57c6306a88
def getResults(self): <NEW_LINE> <INDENT> if self._results: <NEW_LINE> <INDENT> return self._results <NEW_LINE> <DEDENT> if self._xmlDoc: <NEW_LINE> <INDENT> entryElements = self._xmlDoc.getElementsByTagNameNS(ATOM_NS, 'entry') <NEW_LINE> entries = [] <NEW_LINE> for entryElement in entryElements: <NEW_LINE> <INDENT> changeEntry = ChangeEntry(self._cmisClient, self._repository, entryElement) <NEW_LINE> entries.append(changeEntry) <NEW_LINE> <DEDENT> self._results = entries <NEW_LINE> <DEDENT> return self._results
Overriding to make it work with a list instead of a dict.
625941b39b70327d1c4e0b8a
def ReleaseUnmanagedResources(self,*args): <NEW_LINE> <INDENT> pass
ReleaseUnmanagedResources(self: GroundConductorSizeSet)
625941b38e7ae83300e4ad82
def reset(self): <NEW_LINE> <INDENT> self.counter[2] = 0
将第 2 级别的计数置为 0
625941b3091ae35668666d1d
def setup_scanner(hass, config: dict, see, discovery_info=None): <NEW_LINE> <INDENT> username = config.get(CONF_USERNAME) <NEW_LINE> password = config.get(CONF_PASSWORD) <NEW_LINE> account = config.get(CONF_ACCOUNTNAME, slugify(username.partition('@')[0])) <NEW_LINE> max_interval = config.get(CONF_MAX_INTERVAL) <NEW_LINE> gps_accuracy_threshold = config.get(CONF_GPS_ACCURACY_THRESHOLD) <NEW_LINE> icloudaccount = Icloud(hass, username, password, account, max_interval, gps_accuracy_threshold, see) <NEW_LINE> if icloudaccount.api is not None: <NEW_LINE> <INDENT> ICLOUDTRACKERS[account] = icloudaccount <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _LOGGER.error("No ICLOUDTRACKERS added") <NEW_LINE> return False <NEW_LINE> <DEDENT> def lost_iphone(call): <NEW_LINE> <INDENT> accounts = call.data.get(ATTR_ACCOUNTNAME, ICLOUDTRACKERS) <NEW_LINE> devicename = call.data.get(ATTR_DEVICENAME) <NEW_LINE> for account in accounts: <NEW_LINE> <INDENT> if account in ICLOUDTRACKERS: <NEW_LINE> <INDENT> ICLOUDTRACKERS[account].lost_iphone(devicename) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> hass.services.register(DOMAIN, 'icloud_lost_iphone', lost_iphone, schema=SERVICE_SCHEMA) <NEW_LINE> def update_icloud(call): <NEW_LINE> <INDENT> accounts = call.data.get(ATTR_ACCOUNTNAME, ICLOUDTRACKERS) <NEW_LINE> devicename = call.data.get(ATTR_DEVICENAME) <NEW_LINE> for account in accounts: <NEW_LINE> <INDENT> if account in ICLOUDTRACKERS: <NEW_LINE> <INDENT> ICLOUDTRACKERS[account].update_icloud(devicename) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> hass.services.register(DOMAIN, 'icloud_update', update_icloud, schema=SERVICE_SCHEMA) <NEW_LINE> def reset_account_icloud(call): <NEW_LINE> <INDENT> accounts = call.data.get(ATTR_ACCOUNTNAME, ICLOUDTRACKERS) <NEW_LINE> for account in accounts: <NEW_LINE> <INDENT> if account in ICLOUDTRACKERS: <NEW_LINE> <INDENT> ICLOUDTRACKERS[account].reset_account_icloud() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> hass.services.register(DOMAIN, 'icloud_reset_account', reset_account_icloud, schema=SERVICE_SCHEMA) <NEW_LINE> def setinterval(call): <NEW_LINE> <INDENT> accounts = call.data.get(ATTR_ACCOUNTNAME, ICLOUDTRACKERS) <NEW_LINE> interval = call.data.get(ATTR_INTERVAL) <NEW_LINE> devicename = call.data.get(ATTR_DEVICENAME) <NEW_LINE> for account in accounts: <NEW_LINE> <INDENT> if account in ICLOUDTRACKERS: <NEW_LINE> <INDENT> ICLOUDTRACKERS[account].setinterval(interval, devicename) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> hass.services.register(DOMAIN, 'icloud_set_interval', setinterval, schema=SERVICE_SCHEMA) <NEW_LINE> return True
Set up the iCloud Scanner.
625941b37d43ff24873a2a5a
def configure( always_display_bottom = None, arrow_head_character = None, arrow_tail_character = None, arrow_head_color = None, arrow_tail_color = None, code_color = None, display_arrow = None, display_link = None, display_locals = None, display_timestamp = None, display_trace_locals = None, exception_above = None, exception_arg_color = None, exception_below = None, exception_color = None, exception_file_color = None, filename_color = None, filename_display = None, full_line_newline = None, function_color = None, header_color = None, infix = None, inner_exception_message = None, inner_exception_separator = None, line_color = None, line_length = None, line_number_color = None, line_number_first = None, lines_after = None, lines_before = None, link_color = None, local_len_color = None, local_name_color = None, local_value_color = None, name = None, postfix = None, prefix = None, reset_stdout = None, separator_character = None, show_suppressed = None, stack_depth = None, syntax_error_color = None, timestamp_color = None, timestamp_function = None, top_first = None, trace_lines_after = None, trace_lines_before = None, truncate_code = None, truncate_locals = None ): <NEW_LINE> <INDENT> config.configure( always_display_bottom = always_display_bottom, arrow_head_character = arrow_head_character, arrow_tail_character = arrow_tail_character, arrow_head_color = arrow_head_color, arrow_tail_color = arrow_tail_color, code_color = code_color, display_arrow = display_arrow, display_link = display_link, display_locals = display_locals, display_timestamp = display_timestamp, display_trace_locals = display_trace_locals, exception_above = exception_above, exception_arg_color = exception_arg_color, exception_below = exception_below, exception_color = exception_color, exception_file_color = exception_file_color, filename_color = filename_color, filename_display = filename_display, full_line_newline = full_line_newline, function_color = function_color, header_color = header_color, infix = infix, inner_exception_message = inner_exception_message, inner_exception_separator = inner_exception_separator, line_color = line_color, line_length = line_length, line_number_color = line_number_color, line_number_first = line_number_first, lines_after = lines_after, lines_before = lines_before, link_color = link_color, local_len_color = local_len_color, local_name_color = local_name_color, local_value_color = local_value_color, name = name, postfix = postfix, prefix = prefix, reset_stdout = reset_stdout, separator_character = separator_character, show_suppressed = show_suppressed, stack_depth = stack_depth, syntax_error_color = syntax_error_color, timestamp_color = timestamp_color, timestamp_function = timestamp_function, top_first = top_first, trace_lines_after = trace_lines_after, trace_lines_before = trace_lines_before, truncate_code = truncate_code, truncate_locals = truncate_locals )
Configure settings governing how exceptions are displayed.
625941b316aa5153ce36222e
@templatefunc('revset(query[, formatargs...])') <NEW_LINE> def revset(context, mapping, args): <NEW_LINE> <INDENT> if not len(args) > 0: <NEW_LINE> <INDENT> raise error.ParseError(_("revset expects one or more arguments")) <NEW_LINE> <DEDENT> raw = evalstring(context, mapping, args[0]) <NEW_LINE> ctx = mapping['ctx'] <NEW_LINE> repo = ctx.repo() <NEW_LINE> def query(expr): <NEW_LINE> <INDENT> m = revsetmod.match(repo.ui, expr) <NEW_LINE> return m(repo) <NEW_LINE> <DEDENT> if len(args) > 1: <NEW_LINE> <INDENT> formatargs = [evalfuncarg(context, mapping, a) for a in args[1:]] <NEW_LINE> revs = query(revsetmod.formatspec(raw, *formatargs)) <NEW_LINE> revs = list(revs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> revsetcache = mapping['cache'].setdefault("revsetcache", {}) <NEW_LINE> if raw in revsetcache: <NEW_LINE> <INDENT> revs = revsetcache[raw] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> revs = query(raw) <NEW_LINE> revs = list(revs) <NEW_LINE> revsetcache[raw] = revs <NEW_LINE> <DEDENT> <DEDENT> return templatekw.showrevslist("revision", revs, **mapping)
Execute a revision set query. See :hg:`help revset`.
625941b3d6c5a10208143dfd
def test_init_wrong_mistyped_domain(self): <NEW_LINE> <INDENT> self.assertRaises( TypeError, ServerName, **{ "domain" : 1234, } )
Tests that an ServerName object cannot be instantiated and a TypeError exception is raised if a domain is passed in, but it's not a string.
625941b363b5f9789fde6e9c
def fetch_master_data(params): <NEW_LINE> <INDENT> result = select(params) <NEW_LINE> return result.fetchall()
Fetch official unique company name from database Arg: req_params (dict): Http request parameters using search (database or BigQuery) or identify name string. Return: list: fetch result. converted SQLAlchemy.ResultProxy obj to list obj
625941b3f7d966606f6a9dbf
def setPositionsAndForm ( self, pos, form ): <NEW_LINE> <INDENT> self.positions = numpy.array(pos ) <NEW_LINE> self.functionalForms = form
Construct and ideal tube directly from an array of positions and functional forms :param pos: Array of points where the peaks or edges should be in Metres :param form: Array of functional forms of the points 1=peak, 2=edge
625941b36e29344779a623cd
def extend_volume(self, volume, new_size): <NEW_LINE> <INDENT> client = self._login() <NEW_LINE> try: <NEW_LINE> <INDENT> volume_info = client.getVolumeByName(volume['name']) <NEW_LINE> options = {'size': int(new_size) * units.Gi} <NEW_LINE> client.modifyVolume(volume_info['id'], options) <NEW_LINE> <DEDENT> except Exception as ex: <NEW_LINE> <INDENT> raise exception.VolumeBackendAPIException(ex) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> self._logout(client)
Extend the size of an existing volume.
625941b3ec188e330fd5a55f
def save_metadata(self): <NEW_LINE> <INDENT> labels = [] <NEW_LINE> if os.path.isdir(self.metadata_dest_dir): <NEW_LINE> <INDENT> shutil.rmtree(self.metadata_dest_dir, ignore_errors=True) <NEW_LINE> <DEDENT> os.makedirs(self.metadata_dest_dir) <NEW_LINE> filenames = glob.glob(os.path.join(self.dest_dir, '*')) <NEW_LINE> filenames.sort() <NEW_LINE> for filename in filenames: <NEW_LINE> <INDENT> if 'sub-hc' in filename: <NEW_LINE> <INDENT> labels.append(0) <NEW_LINE> <DEDENT> elif 'sub-pd' in filename: <NEW_LINE> <INDENT> labels.append(1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print('Error: Counld not determine label! exiting...') <NEW_LINE> <DEDENT> <DEDENT> filenames = np.array(filenames) <NEW_LINE> labels = np.array(labels) <NEW_LINE> labels = to_categorical(labels) <NEW_LINE> np.save(os.path.join(self.metadata_dest_dir, 'data.npy'), filenames) <NEW_LINE> np.save(os.path.join(self.metadata_dest_dir, 'labels.npy'), labels) <NEW_LINE> filenames_shuffled, labels_shuffled = shuffle(filenames, labels) <NEW_LINE> np.save(os.path.join(self.metadata_dest_dir, 'data_shuffled.npy'), filenames_shuffled) <NEW_LINE> np.save(os.path.join(self.metadata_dest_dir, 'labels_shuffled.npy'), labels_shuffled)
Function to get metadata infomation such as list of image names, and labels
625941b3d164cc6175782b04
def create_response_and_send_mail(wizard, token, partner_id, email): <NEW_LINE> <INDENT> url = wizard.survey_id.public_url <NEW_LINE> url = urlparse.urlparse(url).path[1:] <NEW_LINE> if token: <NEW_LINE> <INDENT> url = url + '/' + token <NEW_LINE> <DEDENT> values = { 'model': None, 'res_id': None, 'subject': wizard.subject, 'body': wizard.body.replace("__URL__", url), 'body_html': wizard.body.replace("__URL__", url), 'parent_id': None, 'attachment_ids': wizard.attachment_ids and [(6, 0, wizard.attachment_ids.ids)] or None, 'email_from': wizard.email_from or None, 'auto_delete': True, } <NEW_LINE> if partner_id: <NEW_LINE> <INDENT> values['recipient_ids'] = [(4, partner_id)] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> values['email_to'] = email <NEW_LINE> <DEDENT> Mail.create(values).send()
Create one mail by recipients and replace __URL__ by link with identification token
625941b3293b9510aa2c3050
def anchors_for_shape( image_shape, pyramid_levels=None, ratios=None, scales=None, strides=None, sizes=None, shapes_callback=None, ): <NEW_LINE> <INDENT> if pyramid_levels is None: <NEW_LINE> <INDENT> pyramid_levels = [2, 3, 4] <NEW_LINE> <DEDENT> if strides is None: <NEW_LINE> <INDENT> strides = [2 ** x for x in pyramid_levels] <NEW_LINE> <DEDENT> if sizes is None: <NEW_LINE> <INDENT> sizes = [2 ** (x + 2) for x in pyramid_levels] <NEW_LINE> <DEDENT> if ratios is None: <NEW_LINE> <INDENT> ratios = np.array([1]) <NEW_LINE> <DEDENT> if scales is None: <NEW_LINE> <INDENT> scales = np.array([2 ** 0, 2 ** (1.0 / 3.0), 2 ** (2.0 / 3.0)]) <NEW_LINE> <DEDENT> if shapes_callback is None: <NEW_LINE> <INDENT> shapes_callback = guess_shapes <NEW_LINE> <DEDENT> image_shapes = shapes_callback(image_shape, pyramid_levels) <NEW_LINE> all_anchors = np.zeros((0, 4)) <NEW_LINE> for idx, p in enumerate(pyramid_levels): <NEW_LINE> <INDENT> anchors = generate_anchors(base_size=sizes[idx], ratios=ratios, scales=scales) <NEW_LINE> shifted_anchors = shift(image_shapes[idx], strides[idx], anchors) <NEW_LINE> all_anchors = np.append(all_anchors, shifted_anchors, axis=0) <NEW_LINE> <DEDENT> return all_anchors
Generators anchors for a given shape. Args image_shape: The shape of the image. pyramid_levels: List of ints representing which pyramids to use (defaults to [3, 4, 5, 6, 7]). ratios: List of ratios with which anchors are generated (defaults to [0.5, 1, 2]). scales: List of scales with which anchors are generated (defaults to [2^0, 2^(1/3), 2^(2/3)]). strides: Stride per pyramid level, defines how the pyramids are constructed. sizes: Sizes of the anchors per pyramid level. shapes_callback: Function to call for getting the shape of the image at different pyramid levels. Returns np.array of shape (N, 4) containing the (x1, y1, x2, y2) coordinates for the anchors.
625941b3293b9510aa2c3051
def print_menu(exits, room_items, inv_items): <NEW_LINE> <INDENT> print("You can:") <NEW_LINE> for direction in exits: <NEW_LINE> <INDENT> print_exit(direction, exit_leads_to(exits, direction)) <NEW_LINE> <DEDENT> for item_takeable in room_items: <NEW_LINE> <INDENT> print("TAKE " + item_takeable["id"].upper() + " to take the " + item_takeable["name"] + ".") <NEW_LINE> <DEDENT> for item_dropable in inv_items: <NEW_LINE> <INDENT> print("DROP " + item_dropable["id"].upper() + " to drop your " + item_dropable["name"] + ".") <NEW_LINE> <DEDENT> print("What do you want to do?")
This function displays the menu of available actions to the player. The argument exits is a dictionary of exits as exemplified in map.py. The arguments room_items and inv_items are the items lying around in the room and carried by the player respectively. The menu should, for each exit, call the function print_exit() to print the information about each exit in the appropriate format. The room into which an exit leads is obtained using the function exit_leads_to(). Then, it should print a list of commands related to items: for each item in the room print "TAKE <ITEM ID> to take <item name>." and for each item in the inventory print "DROP <ITEM ID> to drop <item name>." For example, the menu of actions available at the Reception may look like this: You can: GO EAST to your personal tutor's office. GO WEST to the parking lot. GO SOUTH to MJ and Simon's room. TAKE BISCUITS to take a pack of biscuits. TAKE HANDBOOK to take a student handbook. DROP ID to drop your id card. DROP LAPTOP to drop your laptop. DROP MONEY to drop your money. What do you want to do?
625941b373bcbd0ca4b2be33
def run_scan(self): <NEW_LINE> <INDENT> self.load_net_config() <NEW_LINE> self.scan_ips() <NEW_LINE> self.save_to_file()
Spusti skenovani dle nactene konfigurace
625941b399cbb53fe679299e
@pytest.fixture <NEW_LINE> def get_token() -> str: <NEW_LINE> <INDENT> env_path = Path(__file__).parent.parent / ".env" <NEW_LINE> load_dotenv(dotenv_path=env_path, override=True, verbose=True) <NEW_LINE> joplin_key = os.getenv("JOPLIN_KEY") <NEW_LINE> if not joplin_key: <NEW_LINE> <INDENT> raise EnvironmentError("no JOPLIN_KEY set in .env file") <NEW_LINE> <DEDENT> return joplin_key
put the token of the Joplin WebClipper config page :return:
625941b3cc40096d6159570b
def generate_random_uint_between(low, high, rand_func=generate_random_bytes): <NEW_LINE> <INDENT> if not (is_integer(low) and is_integer(high)): <NEW_LINE> <INDENT> raise TypeError("unsupported argument types(s): %r and %r" % (type(low).__name__, type(high).__name__)) <NEW_LINE> <DEDENT> if low >= high: <NEW_LINE> <INDENT> raise ValueError("high value must be greater than low value.") <NEW_LINE> <DEDENT> substrate = high - low - 1 <NEW_LINE> bits = integer_bit_size(substrate) <NEW_LINE> value = generate_random_uint_atmost(bits, rand_func=rand_func) <NEW_LINE> while value > substrate: <NEW_LINE> <INDENT> value = generate_random_uint_atmost(bits, rand_func=rand_func) <NEW_LINE> <DEDENT> return low + value
Generates a random long integer between low and high, not including high. :param low: Low :param high: High :param rand_func: Random bytes generator function. :returns: Random unsigned long integer value.
625941b3ff9c53063f47bfb5
def construct_graph(self, _contract): <NEW_LINE> <INDENT> for f in _contract.functions + _contract.constructor_as_list: <NEW_LINE> <INDENT> if f.name in ['slitherConstructorVariables', 'slitherConstructorConstantVariables'] or not f.is_public_or_external: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> for dependency in f.depends_on: <NEW_LINE> <INDENT> written_f, sr = dependency[0], dependency[1] <NEW_LINE> n1 = self.get_node(f.name) <NEW_LINE> n2 = self.get_node(written_f.name) <NEW_LINE> if self.edge_dic.get((n1, n2)): <NEW_LINE> <INDENT> e = self.edge_dic[(n1, n2)] <NEW_LINE> old_label = e.get_label() <NEW_LINE> e.set_label(f'{old_label.strip()}, {sr.name} ') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.construct_edge(n1, n2) <NEW_LINE> e = self.edge_dic[(n1, n2)] <NEW_LINE> e.set_label(f'{sr.name} ')
Constructs the graph by connecting nodes with edges.
625941b323849d37ff7b2e49
def filter_macro(func, *args, **kwargs): <NEW_LINE> <INDENT> filter_partial = partial(func, *args, **kwargs) <NEW_LINE> class FilterMacroMeta(FilterMeta): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def __new__(mcs, name, bases, attrs): <NEW_LINE> <INDENT> for attr in WRAPPER_ASSIGNMENTS: <NEW_LINE> <INDENT> if hasattr(func, attr): <NEW_LINE> <INDENT> attrs[attr] = getattr(func, attr) <NEW_LINE> <DEDENT> <DEDENT> return super(FilterMacroMeta, mcs) .__new__(mcs, func.__name__, bases, attrs) <NEW_LINE> <DEDENT> def __call__(cls, *runtime_args, **runtime_kwargs): <NEW_LINE> <INDENT> return filter_partial(*runtime_args, **runtime_kwargs) <NEW_LINE> <DEDENT> <DEDENT> class FilterMacro(with_metaclass(FilterMacroMeta, FilterMacroType)): <NEW_LINE> <INDENT> def _apply(self, value): <NEW_LINE> <INDENT> return self.__class__()._apply(value) <NEW_LINE> <DEDENT> <DEDENT> return FilterMacro
Promotes a function that returns a filter into its own filter type. Example:: @filter_macro def String(): return Unicode | Strip | NotEmpty # You can now use `String` anywhere you would use a regular Filter: (String | Split(':')).apply('...') You can also use ``filter_macro`` to create partials, allowing you to preset one or more initialization arguments:: Minor = filter_macro(Max, max_value=18, inclusive=False) Minor(inclusive=True).apply(18)
625941b31f037a2d8b945fb5
def test_get_a_specific_template(self): <NEW_LINE> <INDENT> uid_list = ('centos-7', 'debian-8') <NEW_LINE> for uid in uid_list: <NEW_LINE> <INDENT> template = Template.get_template(uid) <NEW_LINE> self.assertEqual(uid, template.uid)
get_template should return the correct template
625941b33346ee7daa2b2b1e
def collect_sql(self, plan): <NEW_LINE> <INDENT> statements = [] <NEW_LINE> state = None <NEW_LINE> for migration, backwards in plan: <NEW_LINE> <INDENT> with self.connection.schema_editor(collect_sql=True, atomic=migration.atomic) as schema_editor: <NEW_LINE> <INDENT> if state is None: <NEW_LINE> <INDENT> state = self.project_state((migration.app_label, migration.name), at_end=False) <NEW_LINE> <DEDENT> if not backwards: <NEW_LINE> <INDENT> state = migration.apply(state, schema_editor, collect_sql=True) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> state = migration.unapply(state, schema_editor, collect_sql=True) <NEW_LINE> <DEDENT> <DEDENT> statements.extend(schema_editor.collected_sql) <NEW_LINE> <DEDENT> return statements
Take a migration plan and return a list of collected SQL statements that represent the best-efforts version of that plan.
625941b3d58c6744b4257a17
def send_activation_mail(*, request, user): <NEW_LINE> <INDENT> token = default_token_generator.make_token(user=user) <NEW_LINE> uidb64 = urlsafe_base64_encode(force_bytes(user.pk)) <NEW_LINE> domain = request.META['HTTP_HOST'] <NEW_LINE> path = reverse('register_confirm', kwargs={'uidb64': uidb64, 'token': token}) <NEW_LINE> url = 'http://{domain}{path}'.format(domain=domain, path=path) <NEW_LINE> template = loader.get_template('email/registration.html') <NEW_LINE> message = template.render(context={'url': url}) <NEW_LINE> user.email_user( subject=_('rExe portal - aktivacija korisničkog računa'), message=message, from_email=settings.DEFAULT_FROM_EMAIL )
Generate activation token and construct activation URL. :param user: (User) Registered user instance.
625941b3099cdd3c635f0a13
def __init__(self, iface): <NEW_LINE> <INDENT> super(SortNumber, self).__init__() <NEW_LINE> self.iface = iface <NEW_LINE> self.plugin_dir = os.path.dirname(__file__) <NEW_LINE> locale = QSettings().value('locale/userLocale')[0:2] <NEW_LINE> locale_path = os.path.join( self.plugin_dir, 'i18n', 'SortNumber_{}.qm'.format(locale)) <NEW_LINE> if os.path.exists(locale_path): <NEW_LINE> <INDENT> self.translator = QTranslator() <NEW_LINE> self.translator.load(locale_path) <NEW_LINE> if qVersion() > '4.3.3': <NEW_LINE> <INDENT> QCoreApplication.installTranslator(self.translator) <NEW_LINE> <DEDENT> <DEDENT> self.dlg = SortNumberDialog() <NEW_LINE> self.actions = [] <NEW_LINE> self.menu = self.tr(u'&Sort and Number') <NEW_LINE> self.toolbar = self.iface.addToolBar(u'SortNumber') <NEW_LINE> self.toolbar.setObjectName(u'SortNumber')
Constructor. :param iface: An interface instance that will be passed to this class which provides the hook by which you can manipulate the QGIS application at run time. :type iface: QgsInterface
625941b315baa723493c3d28
def fixedp(f,x0,tol=10e-6,maxiter=100): <NEW_LINE> <INDENT> e = 1 <NEW_LINE> l=.99 <NEW_LINE> i = 0 <NEW_LINE> xp = [] <NEW_LINE> while(e > tol and i < maxiter): <NEW_LINE> <INDENT> print("iter %d --> x = %e\n" % (i,x0) ) <NEW_LINE> x = l*x0+(1-l)*f(x0) <NEW_LINE> l = .99*l <NEW_LINE> e = norm(x0-x) <NEW_LINE> x0 = x <NEW_LINE> xp.append(x0) <NEW_LINE> i += 1 <NEW_LINE> <DEDENT> print("Done! Plotting the points...\n") <NEW_LINE> return x,xp
Fixed point algorithm
625941b33346ee7daa2b2b1f
def test_manual_baudrate(self, serial_ports, serial, mock_format_response): <NEW_LINE> <INDENT> main("--baudrate 115200 --format".split()) <NEW_LINE> serial.assert_called_once_with("/dev/ttyUSB5", 115200, timeout=2.0)
Baudrate should be overrideable.
625941b32ae34c7f2600cee8
def test(): <NEW_LINE> <INDENT> pass
test func :return: none
625941b3004d5f362079a0ef
def get_nodes_lines(self, **kwargs): <NEW_LINE> <INDENT> params = {'Nodes': util.ints_to_string(kwargs.get('nodes', []))} <NEW_LINE> result = self.make_request('bus', 'get_nodes_lines', **params) <NEW_LINE> if not util.check_result(result): <NEW_LINE> <INDENT> return False, result.get('resultDescription', 'UNKNOWN ERROR') <NEW_LINE> <DEDENT> values = util.response_list(result, 'resultValues') <NEW_LINE> return True, [emtype.NodeLinesItem(**a) for a in values]
Obtain stop IDs, coordinates and line information. Args: nodes (list[int] | int): nodes to query, may be empty to get all nodes. Returns: Status boolean and parsed response (list[NodeLinesItem]), or message string in case of error.
625941b3627d3e7fe0d68c05
def test_GetPlayerBans_str_valid(self): <NEW_LINE> <INDENT> status = self.iSteamUser.GetPlayerBans( steamids=PUBLIC_PROFILE, ).json.status_code <NEW_LINE> self.assertEqual( status, 200, 'Error retrieving player bans. Received Status ' + ' ({0}) instead of Status (200).'.format(status), )
Test to make sure we get a response code of 200 OK from a public profile.
625941b356ac1b37e6263f98
def __init__(self, value: int, position: list = []) -> None: <NEW_LINE> <INDENT> super().__init__(position) <NEW_LINE> self._value = value <NEW_LINE> object_data = const.MAZE_OBJ[value] <NEW_LINE> if object_data[1] is not None: <NEW_LINE> <INDENT> self.load_from_file(path.join(const.IMG_FOLDER, object_data[1])) <NEW_LINE> if len(object_data) > 2 and object_data[2] is not None: <NEW_LINE> <INDENT> self.crop(object_data[2][0], object_data[2][1], object_data[2][2], object_data[2][3]) <NEW_LINE> self.scale(const.SPRITE_SIZE) <NEW_LINE> <DEDENT> if len(object_data) > 3 and object_data[3] is not None: <NEW_LINE> <INDENT> self.set_colorkey(object_data[3]) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.image = pygame.Surface(const.SPRITE_SIZE) <NEW_LINE> self.image.fill((255, 255, 255)) <NEW_LINE> self.rect = self.image.get_rect()
Create a new MazeObject. - Value should be an integer reprenstig the type of case, value should be as in MAZE_OBJECT_TYPE. - Position should be a list of 2 integer representing the position as Row -> Column or None if not yet set. The image file, or any cropping data are loaded from "const.py"
625941b37b25080760e3921a
def test_missing_requirements_file(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> activate('pip>=0.8.3') <NEW_LINE> <DEDENT> except RuntimeError: <NEW_LINE> <INDENT> raise SkipTest("This version of pip is so old that it doesn't even " "handle the IOError opening the file itself properly.") <NEW_LINE> <DEDENT> with running_setup_py(False): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.install_from_path('nonexistent.txt') <NEW_LINE> <DEDENT> except CalledProcessError as exc: <NEW_LINE> <INDENT> eq_(exc.returncode, SOMETHING_WENT_WRONG) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.fail("Peep exited successfully but shouldn't have.")
Check that pip's InstallationError exception (when the specified requirements file doesn't exist) is handled gracefully.
625941b3d10714528d5ffa9b
def test_build(self): <NEW_LINE> <INDENT> _, temp = tempfile.mkstemp() <NEW_LINE> body = { 'sha': uuid.uuid4().hex, 'slug_size': 4096000, 'procfile': json.dumps({'web': 'node server.js'}), 'url': 'http://deis.local/slugs/1c52739bbf3a44d3bfb9a58f7bbdd5fb.tar.gz', 'checksum': uuid.uuid4().hex, } <NEW_LINE> with open(temp, 'w') as f: <NEW_LINE> <INDENT> f.write(json.dumps(body)) <NEW_LINE> <DEDENT> child = pexpect.spawn( 'cat {} | {} builds:create - --app=test-app'.format(temp, CLI)) <NEW_LINE> child.expect('Usage: ')
Test that a user can publish a new build.
625941b35fc7496912cc373d
def test_Failure_string_representation(self): <NEW_LINE> <INDENT> self.assertEqual("Failure(42, Because it is that way)", str(Failure(42, "Because it is that way")))
Validates the string representation of the `Failure` object
625941b394891a1f4081b85f
def RemoveSource(self,source): <NEW_LINE> <INDENT> self._sources.RemoveSource(source)
Remove a source object Parameters ---------- source : Source Source object to remove from active sources
625941b3e8904600ed9f1ce0
def transform(self, X: Iterable[TFingerprint], y: Optional[numpy.ndarray] = None, **transform_params: Any) -> numpy.ndarray: <NEW_LINE> <INDENT> return numpy.array([unpack_fingerprint(x) for x in X], dtype=numpy.float64)
:param X: source of fingerprints :param y: unused :param transform_params: unused :return: a numpy array containing the densely unpacked fingerprints
625941b35fdd1c0f98dbffec
def jump(self, oldr, oldc, newr, newc): <NEW_LINE> <INDENT> self.move(oldr, oldc, newr, newc) <NEW_LINE> jumpr = (oldr + newr) // 2 <NEW_LINE> jumpc = (oldc + newc) // 2 <NEW_LINE> self.squares[(jumpr, jumpc)].clear_checker()
CheckersGame.jump(oldr,oldc,newr,newc) jumps the piece that's on square (oldr,oldc) to square (newr,newc) and removes the piece in between that got jumped over
625941b3b5575c28eb68ddb4