query
stringlengths
9
9.05k
document
stringlengths
10
222k
metadata
dict
negatives
listlengths
30
30
negative_scores
listlengths
30
30
document_score
stringlengths
4
10
document_rank
stringclasses
2 values
Concatenate train set and test set, So our filling data won't overfit on the train set.
def ConcatDF(train_set, test_set): return pd.concat([train_set, test_set], sort=True).reset_index(drop=True)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_training_and_testing_sets(data, Y):\r\n data = pd.concat([data, Y], axis=1)\r\n x,y=data.shape\r\n train_X_sub1=data[0:x//6]\r\n dev_X_sub1 = data[x//6:x//6 + x//12]\r\n test_X_sub1 = data[x//6 + x//12:x//3]\r\n\r\n train_X_sub2 = data[x//3:x//3+x//6]\r\n dev_X_sub2 = data[x//6 + x//3:x//3 + x//6 + x//12]\r\n test_X_sub2 = data[x//3 + x//6 + x//12:2*x//3]\r\n\r\n train_X_sub3 = data[2*x//3:(2*x//3) +x//6]\r\n dev_X_sub3 = data[x//6 + 2*x//3: (2*x//3) + x//6 + x//12]\r\n test_X_sub3 = data[2*x//3 + x//6 + x//12:x]\r\n\r\n train_X=train_X_sub1.append(train_X_sub2,ignore_index = True)\r\n train_X =train_X.append(train_X_sub3,ignore_index = True)\r\n dev_X= dev_X_sub1.append(dev_X_sub2,ignore_index = True)\r\n dev_X = dev_X.append(dev_X_sub3,ignore_index = True)\r\n test_X = test_X_sub1.append(test_X_sub2,ignore_index = True)\r\n test_X = test_X.append(test_X_sub3,ignore_index = True)\r\n\r\n\r\n train_X = util.shuffle(train_X)\r\n train_X = train_X.reset_index(drop=True)\r\n\r\n dev_X = util.shuffle(dev_X)\r\n dev_X = dev_X.reset_index(drop=True)\r\n\r\n test_X = util.shuffle(test_X)\r\n test_X = test_X.reset_index(drop=True)\r\n\r\n train_X_final=train_X\r\n dev_X_final = dev_X\r\n test_X_final = test_X\r\n x, y = train_X_final.shape\r\n train_X = train_X_final.iloc[:, 0:y - 1]\r\n train_Y = train_X_final.iloc[:, y - 1]\r\n\r\n x, y = test_X_final.shape\r\n test_X = test_X_final.iloc[:, 0:y - 1]\r\n test_Y = test_X_final.iloc[:, y - 1]\r\n\r\n x, y = dev_X_final.shape\r\n dev_X = dev_X_final.iloc[:, 0:y - 1]\r\n dev_Y = dev_X_final.iloc[:, y - 1]\r\n\r\n return train_X, train_Y, dev_X,dev_Y,test_X, test_Y", "def ConcatDF(train_set, test_set):\n df_all = pd.concat([train_set, test_set], sort=True).reset_index(drop=True)\n df_all.trn_len = train_set.shape[0]\n return df_all", "def clean_train_test2(train, test):\n\n # Species, Street, Trap\n labeller = LabelEncoder()\n labeller.fit(np.concatenate((train.Species.values, test.Species.values)))\n train.Species = labeller.transform(train.Species.values)\n test.Species = labeller.transform(test.Species.values)\n\n labeller.fit(np.concatenate((train.Street.values, test.Street.values)))\n train.Street = labeller.transform(train.Street.values)\n test.Street = labeller.transform(test.Street.values)\n\n labeller.fit(np.concatenate((train.Trap.values, test.Trap.values)))\n train.Trap = labeller.transform(train.Trap.values)\n test.Trap = labeller.transform(test.Trap.values)\n\n return train, test", "def merge(self, other):\r\n self._train_datas = np.concatenate(\r\n [self._train_datas, other._train_datas], 0)\r\n self._train_labels = np.concatenate(\r\n [self._train_labels, other._train_labels], 0)", "def generate_train_test(self):\n x, y = self.read_data()\n x_train, y_train, x_test, y_test = self.sample_data(x, y)\n self.train = (x_train, y_train)\n self.test = (x_test, y_test)", "def create_train_valid_set(self):\n\n if not self.eq_train:\n X_train_high_level, X_valid_high_level, X_train_low_level, X_valid_low_level, train_w, valid_w, y_train, y_valid = train_test_split(self.X_train_high_level, self.X_train_low_level, self.train_weights, self.y_train,\n train_size=0.7, test_size=0.3\n )\n else:\n X_train_high_level, X_valid_high_level, X_train_low_level, X_valid_low_level, train_w, valid_w, w_train_eq, w_valid_eq, y_train, y_valid = train_test_split(self.X_train_high_level, self.X_train_low_level,\n self.train_weights, self.train_weights_eq, self.y_train,\n train_size=0.7, test_size=0.3\n )\n self.train_weights_eq = w_train_eq\n\n #NOTE: might need to re-equalise weights in each folds as sumW_sig != sumW_bkg anymroe!\n self.train_weights = train_w\n self.valid_weights = valid_w #validation weights should never be equalised weights!\n\n print 'creating validation dataset'\n self.X_train_high_level = X_train_high_level\n self.X_train_low_level = self.join_objects(X_train_low_level)\n\n self.X_valid_high_level = X_valid_high_level\n self.X_valid_low_level = self.join_objects(X_valid_low_level)\n print 'finished creating validation dataset'\n\n self.y_train = y_train\n self.y_valid = y_valid", "def trainSet(self):\r\n self.currIdx = 0\r\n random.shuffle(self.trainSamples)\r\n self.samples = self.trainSamples[:self.numTrainSamplesPerEpoch]", "def build_full_trainset(self):\n\n return self.construct_trainset(self.raw_ratings)", "def create_train_test_sets(self,x,y,lenTest):\n \n nbInd = x.shape[0]\n shuffler = np.random.permutation(nbInd)\n x_train = x[shuffler][0:(nbInd-lenTest),]\n y_train = y[shuffler][0:(nbInd-lenTest),]\n\n x_test = x[shuffler][(nbInd-lenTest):nbInd,]\n y_test = y[shuffler][(nbInd-lenTest):nbInd,]\n\n return x_train,y_train,x_test,y_test", "def createTrainTestSets():\n tweets = open(noDuplicatesFilename, 'r').read().splitlines()\n name_mapping = loadNameMapping()\n holdoutLocations = [u'Frederiksberg, Danmark', u'T\\xe5rnby, Danmark', u'Kolding, Danmark', u'T\\xe4by, Sverige', u'Kungsbacka, Sverige', u'Kristianstad, Sverige', u'Bod\\xf8, Norge', u'Kvinnherad, Norge', u'Ullensaker, Norge']\n testSetLocation = []\n rest = []\n for tweet in tweets:\n if stringToTweet(tweet).getFullName() in holdoutLocations:\n testSetLocation.append(tweet)\n else:\n rest.append(tweet)\n tweets = rest\n testIndex = int(round(len(tweets) * (1 - test_set_ratio)))\n random.seed(1)\n random.shuffle(tweets)\n trainSet = tweets[:testIndex]\n testSet = tweets[testIndex:]\n open(trainSetFilename, 'w').write('\\n'.join(trainSet))\n open(testSetNormalFilename, 'w').write('\\n'.join(testSet))\n open(testSetLocationFilename, 'w').write('\\n'.join(testSetLocation))\n print \"Wrote %d tweets to train set\" % len(trainSet)\n print \"Wrote %d tweets to normal test set\" % len(testSet)\n print \"Wrote %d tweets to location test set\" % len(testSetLocation)", "def train_test_official(self):\n return self.sub_set(self.train_idcs), self.sub_set(self.test_idcs)", "def train_test_official(self):\n return self.sub_set(self.idcs_train), self.sub_set(self.idcs_test)", "def make_training_set(ind_list, training_data): \n \n exp = training_data[ind_list[0]] \n X_train = exp[0]\n u_train = exp[1] \n\n for i in ind_list[1:]: \n exp = training_data[i]\n X_train = np.append(X_train, exp[0], axis=0)\n u_train = np.append(u_train, exp[1], axis=0)\n\n return X_train, u_train", "def triples(self):\n return pd.concat((self._load_train(), self._load_valid(), self._load_test()))", "def generateTrainAndValidateset(trainSets, validateSets, validatePercentage=20):\n\tvalidateFiles = []\n\ttrainFiles = []\n\n\tfor validateSet in validateSets:\n\t\tif \".\" in validateSet:\n\t\t\tvalidateSet, percentage = validateSet.split(\".\")\n\n\t\t\tif percentage == \"all\":\n\t\t\t\t#overwrite any further checks and security measures, just append all files:\n\t\t\t\tvalidateFiles += getAllFiles([validateSet])\n\t\t\t\tcontinue\n\n\t\t\tpercentage = int(percentage)\n\t\telse:\n\t\t\tpercentage = validatePercentage\n\n\t\tif validateSet not in _dataSets:\n\t\t\traise ValueError(\"Not a valid validate set: \" + validateSet)\n\n\t\tallFiles = sorted(filter(lambda x: x.endswith(\".txt\"), os.listdir(_dataSets[validateSet])))\n\t\tallFiles = list(map(lambda x: _dataSets[validateSet] + x, allFiles))\n\t\trandom.seed(42) #make sure all lists are randomized equally each time\n\t\trandom.shuffle(allFiles)\n\n\t\tallAroused = list(filter(lambda x: isAroused(x), allFiles))\n\t\tallNonAroused = list(filter(lambda x: not isAroused(x), allFiles))\n\n\t\tvalidateFiles += allAroused[len(allAroused) - int(percentage * len(allFiles) / 100 / 2):]\n\t\tvalidateFiles += allNonAroused[len(allNonAroused) - int(percentage * len(allFiles) / 100 / 2):]\n\n\n\tfor trainSet in trainSets:\n\t\tif \".\" in trainSet:\n\t\t\ttrainSet, percentage = trainSet.split(\".\", 1)\n\n\t\t\tif percentage == \"all\":\n\t\t\t\t#overwrite any further checks and security measures, just append all files:\n\t\t\t\ttrainFiles += getAllFiles([trainSet])\n\t\t\t\tcontinue\n\n\t\t\tpercentage = int(percentage)\n\t\telse:\n\t\t\tpercentage = 100 - validatePercentage\n\t\t\tvalidatePercentage = validatePercentage\n\n\t\tif trainSet not in _dataSets:\n\t\t\traise ValueError(\"Not a valid train set: \" + trainSet)\n\n\t\tallFiles = sorted(filter(lambda x: x.endswith(\".txt\"), os.listdir(_dataSets[trainSet])))\n\t\tallFiles = list(map(lambda x: _dataSets[trainSet] + x, allFiles))\n\t\trandom.seed(42) #make sure all lists are randomized equally each time\n\t\trandom.shuffle(allFiles)\n\n\t\tallAroused = list(filter(lambda x: isAroused(x), allFiles))\n\t\tallNonAroused = list(filter(lambda x: not isAroused(x), allFiles))\n\n\t\ttrainFiles += filter(lambda x: x not in validateFiles, allAroused[:int(percentage * len(allFiles) / 100 / 2)])\n\t\ttrainFiles += filter(lambda x: x not in validateFiles, allNonAroused[:int(percentage * len(allFiles) / 100 / 2)])\n\n\tif not any(map(lambda x: x.endswith(\".all\"), list(trainSets) + list(validateSets))):\n\t\t#assert no validatefiles are also trainfiles\n\t\tassert(set(trainFiles) - set(validateFiles) == set(trainFiles))\n\t\t#assert an equal amount of aroused and non-aroused validatefiles\n\t\tassert(len(list(filter(isAroused, validateFiles))) == len(validateFiles) / 2)\n\n\treturn trainFiles, validateFiles", "def train_test_split_soy(soy_data):\n\n print('[ INFO ]: Creating training and testing set for soy data...')\n\n train_set_size = round(0.67*len(soy_data))\n soy_data['index'] = soy_data.index.tolist()\n\n # Set any record with an index less than 2/3 of the number of records\n # in the data frame to the training set\n train_set = soy_data[soy_data['index'] < train_set_size]\n train_set = train_set.drop('index', axis=1)\n\n # Assign the next 1/3 to the testing set\n test_set = soy_data[soy_data['index'] >= train_set_size]\n test_set = test_set.drop('index', axis=1)\n\n return train_set, test_set", "def create_sets(test, data, test_size=0.2, write=False):\n y_test = test['y_old']\n X_test = test.drop('y_old', 1)\n y_data = data['y_old']\n X_data = data.drop('y_old', 1)\n X_train, X_val, y_train, y_val = train_test_split(X_data, y_data, test_size=test_size, random_state=123)\n if write:\n pickle.dump((X_train, X_val, y_train, y_val), open(obj_save_path+'train_val_df.p', 'wb'))\n #X_train, X_val, y_train, y_val = pickle.load(open(obj_save_path+'train_val_df.p', 'rb'))\n return X_train, y_train, X_val, y_val, X_test, y_test", "def prep_data_fn(self, st_train_dt, end_train_dt, st_val_dt, end_val_dt, st_test_dt, end_test_dt):\n df = self.get_prep_data()\n train = df[(df['ft_data_dt'] >= st_train_dt) & (df['ft_data_dt'] <= end_train_dt)]\n val = df[(df['ft_data_dt'] >= st_val_dt) & (df['ft_data_dt'] <= end_val_dt)].sample(frac=0.4, random_state=2021)\n test = df[(df['ft_data_dt'] >= st_test_dt) & (df['ft_data_dt'] <= end_test_dt)]\n print(f'----train----')\n print(train[['ft_data_dt', 'target', 'idd']].groupby(['ft_data_dt', 'target']).agg(['count']))\n print(f'----validation----')\n print(val[['ft_data_dt', 'target', 'idd']].groupby(['ft_data_dt', 'target']).agg(['count']))\n print(f'----test----')\n print(test[['ft_data_dt', 'target', 'idd']].groupby(['ft_data_dt', 'target']).agg(['count']))\n self.set_train(train)\n self.set_validation(val)\n self.set_test(test)\n train_X = train[[c for c in train.columns if c not in ['idd', 'ft_data_dt', 'target']]]\n train_y = train['target']\n val_X = val[[c for c in train.columns if c not in ['idd', 'ft_data_dt', 'target']]]\n val_y = val['target']\n test_X = test[[c for c in train.columns if c not in ['idd', 'ft_data_dt', 'target']]]\n test_y = test['target']\n self.set_train_X(train_X)\n self.set_train_y(train_y)\n self.set_val_X(val_X)\n self.set_val_y(val_y)\n self.set_test_X(test_X)\n self.set_test_y(test_y)", "def concatenate_tasks(\n tasks,\n concat_train=True,\n concat_valid=True,\n concat_test=True,\n):\n new_task = deepcopy(tasks[0])\n new_task._name = \"+\".join(task.name for task in tasks)\n if concat_train:\n new_task._train_data = ConcatDataset(\n [task.train_data for task in tasks])\n if concat_valid:\n new_task._valid_data = ConcatDataset(\n [task.valid_data for task in tasks])\n if concat_test:\n new_task._test_data = ConcatDataset([task.test_data for task in tasks])", "def _split_train_tst(self):\n num_samples = self.Y.shape[0]\n mapper_file = self.checkpointer.get_mapper_file_location()\n if not self.checkpointer.is_mapper_checkpointed():\n print 'No mapper checkpoint found. Fresh loading in progress ...'\n # Now shuffle the data\n sample_id = range(num_samples)\n random.shuffle(sample_id)\n print 'Dumping the mapper shuffle for reuse.'\n Pickle.dump(sample_id, open(mapper_file, 'wb'))\n print 'Dump complete. Moving Forward...'\n else:\n print 'Mapper Checkpoint found... Reading from mapper dump'\n sample_id = Pickle.load(open(mapper_file, 'rb'))\n print 'Mapping unpickling complete.. Moving forward...'\n\n self.X_fwd = self.X_fwd[sample_id]\n self.X_bwd = self.X_bwd[sample_id]\n self.Y = self.Y[sample_id]\n # Now divide the data into test ans train set\n test_fraction = 0.01\n self.test_size = int(test_fraction * num_samples)\n self.train_size = num_samples - self.test_size\n # Forward review\n self.X_trn_fwd = self.X_fwd[0:self.train_size]\n self.X_tst_fwd = self.X_fwd[self.train_size:num_samples]\n # Backward review\n self.X_trn_bwd = self.X_bwd[0:self.train_size]\n self.X_tst_bwd = self.X_bwd[self.train_size:num_samples]\n # Summary\n self.Y_trn = self.Y[0:self.train_size]\n self.Y_tst = self.Y[self.train_size:num_samples]", "def test_set(self):\n if self._testset is None: # loads the data to memory once and when requested.\n testset_raw = self.read_dataset(self._testset_path)\n testset_spacy = self.read_spacy_pickle(self._testset_spacy_path)\n self._testset = pd.concat([testset_raw, testset_spacy], axis=1)\n\n self._testset['language'] = self._language\n self._testset['dataset_name'] = self._dataset_name\n\n return self._testset", "def get_combined_data(self, file_path: str, train_file_name: str,\n test_file_name: str) -> pd.DataFrame:\n train_data=self.load_dataset(file_path,train_file_name)\n train_data=train_data.drop('Survived', 1)\n test_data=self.load_dataset(file_path,test_file_name)\n\n combined_data = train_data.append(test_data)\n combined_data.reset_index(inplace=True)\n combined_data.drop('index', inplace=True, axis=1)\n\n return combined_data", "def initSets(self):\n data_frame = pd.read_csv(self.train_file, header=None)\n data_frame = data_frame.drop(columns=self.drop_cols)\n features = data_frame.iloc[:, :-1].values\n labels = data_frame.iloc[:, -1].values\n if self.test_file is None:\n self.train_feat, self.test_feat, self.train_labl, self.test_labl = train_test_split(features, labels, test_size=self.test_size)\n else:\n data_frame = pd.read_csv(self.test_file, header=None)\n data_frame = data_frame.drop(columns=self.drop_cols)\n self.train_feat, _, self.train_labl, _ = train_test_split(features, labels, test_size=self.test_size)\n features = data_frame.iloc[:, :-1].values\n labels = data_frame.iloc[:, -1].values\n _, self.test_feat, _, self.test_labl = train_test_split(features, labels, test_size=self.test_size)\n # kfold = KFold(n_splits=3)\n # self.train_index, self.test_index = kfold.split(features,labels)", "def train(self, train_set) -> None:\n super().train(train_set)\n # split into data and target\n xlist, y = zip(*train_set)\n x = sparse.vstack(xlist)\n self._classifier.fit(x, y)", "def split_train_test(ratings):\r\n ratings = ratings.sample(frac=1).reset_index(drop=True)\r\n train_user_list = []\r\n train_item_list = []\r\n train_rating_list = []\r\n test_user_list = []\r\n test_item_list = []\r\n test_rating_list = []\r\n user_pool = set(ratings['userId'].unique())\r\n for idx in user_pool:\r\n flag = 0\r\n items = ratings[ratings['userId']==idx][['itemId','rating']]\r\n for i, row in items.iterrows():\r\n if flag == 0:\r\n test_user_list.append(int(idx))\r\n test_item_list.append(int(row['itemId']))\r\n test_rating_list.append(row['rating'])\r\n flag = 1\r\n else:\r\n train_user_list.append(int(idx))\r\n train_item_list.append(int(row['itemId']))\r\n train_rating_list.append(row['rating'])\r\n\r\n train = pd.DataFrame({'userId': train_user_list, 'itemId': train_item_list, 'rating': train_rating_list}, columns=['userId', 'itemId', 'rating'])\r\n test = pd.DataFrame({'userId': test_user_list, 'itemId': test_item_list, 'rating': test_rating_list}, columns=['userId', 'itemId', 'rating'])\r\n return [train, test]\r\n \r\n\r\n \r\n #train, test = train_test_split(ratings, test_size=0.1, shuffle=True)\r\n #return [train, test]\r", "def split_data(self):\n self.train, self.val, self.test_x, self.test_y = [], [], [], []\n train_size = self.horizon\n # This assumes all countries have the same length.\n # The minus two gives space for the validation and test sets as they will overshoot.\n k_folds = len(self.countries[0].data)//self.horizon - 2\n for _ in range(k_folds):\n tr, v, te_x, te_y = self.cross_validate(train_size)\n self.train.append(tr), self.val.append(v), self.test_x.append(te_x), self.test_y.append(te_y)\n train_size += self.horizon", "def combine_all(self):\n combined = copy.deepcopy(self.train)\n\n def _combine_data(data):\n for img_path, pid, camid in data:\n\n if pid in self._junk_pids:\n continue\n #pdb.set_trace()\n pid = self.dataset_name + \"_\" + str(pid)\n camid = self.dataset_name + \"_\" + str(camid)\n combined.append((img_path, pid, camid))\n\n _combine_data(self.query)\n _combine_data(self.gallery)\n\n self.train = combined\n self.num_train_pids = self.get_num_pids(self.train)", "def _load_training_and_test_sets(normalize):\n class_labels = []\n test_labels = []\n norm = None\n if normalize == True:\n norm = loading.get_normalize_vector()\n\n for i in range(0, 10):\n [training, test] = loading.load_number_set(i, 0.7, norm_vector=norm)\n labels = [str(i)] * training.shape[0]\n tlabels = [str(i)] * test.shape[0]\n if i == 0:\n train_points = training\n test_points = test\n else:\n train_points = np.concatenate((train_points, training), axis = 0)\n test_points = np.concatenate((test_points, test), axis = 0)\n class_labels.extend(labels)\n test_labels.extend(tlabels)\n\n return train_points, test_points, class_labels, test_labels", "def train_test_split(self):\n random.seed(self.args.seed)\n nodes = [node for node in range(self.ncount)]\n random.shuffle(nodes)\n self.train_nodes = torch.LongTensor(nodes[0:self.args.training_size])\n self.validation_nodes = torch.LongTensor(nodes[self.args.training_size:self.args.training_size+self.args.validation_size])\n self.test_nodes = torch.LongTensor(nodes[self.args.training_size+self.args.validation_size:])", "def refresh_test_dataset(self):\n inputs_id, inputs_starts, inputs_paths, inputs_ends, inputs_label = self.build_data(self.reader, self.test_items, self.option.max_path_length)\n self.test_dataset = CodeDataset(inputs_id, inputs_starts, inputs_paths, inputs_ends, inputs_label)" ]
[ "0.73496187", "0.7224804", "0.65727186", "0.654394", "0.6478421", "0.64367497", "0.6360613", "0.6349956", "0.6343068", "0.6336615", "0.63026404", "0.62977755", "0.62607616", "0.61374307", "0.61365265", "0.6113406", "0.61098444", "0.60983783", "0.60931194", "0.6065454", "0.6063612", "0.60592717", "0.60472846", "0.60396624", "0.6029524", "0.60172474", "0.60126764", "0.60112345", "0.5997387", "0.59938097" ]
0.72472507
1
Gets all binary entries for a given Keepass database entry.
def get_binaries(kdb,entry): xml = objectify.fromstring(entry.dump_xml()) binaries = list(xml.xpath('./Binary')) for binary in binaries: yield (binary.Key.text, Binary(kdb,binary))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_binaries(name_only=False):\n\n bins = list()\n\n dtf_db = sqlite3.connect(DTF_DB)\n cur = dtf_db.cursor()\n\n # This just returns the name\n if name_only:\n\n sql = ('SELECT name '\n 'FROM binaries ')\n\n for binary in cur.execute(sql):\n bins.append(binary[0])\n\n # This returns a list of items\n else:\n\n sql = ('SELECT name, version, '\n 'about, author '\n 'FROM binaries '\n 'ORDER BY name')\n\n cur.execute(sql)\n\n while True:\n\n item = dtf.core.item.Item()\n line = cur.fetchone()\n if line is None:\n break\n\n item.type = dtf.core.item.TYPE_BINARY\n item.name = line[0]\n item.version = line[1]\n item.about = line[2]\n item.author = line[3]\n\n bins.append(item)\n\n return bins", "def get_entries_all(self):\n if self.database is None:\n raise DatabaseNotOpened('No KeePass Database Opened.')\n else:\n return self.database.find_entries_by_title('.*', \n regex=True)", "def getAllFileBytes(self) -> List[ghidra.program.database.mem.FileBytes]:\n ...", "def entries():\n\n\treturn [entry.value for entry in db.session.query(Entry).all()]", "def getThings(dbn='core', env=None):\n global gDbEnv\n\n if env is None:\n env = gDbEnv\n\n if env is None:\n raise DatabaseError(\"Database environment not set up\")\n\n entries = []\n subDb = gDbEnv.open_db(dbn.encode(\"utf-8\"), dupsort=True) # open named sub db named dbn within env\n with gDbEnv.begin(db=subDb) as txn: # txn is a Transaction object\n with txn.cursor() as cursor:\n if cursor.first(): # first key in database\n while True:\n key = cursor.key().decode()\n if len(key) == DID_LENGTH and \"/\" not in key:\n value = cursor.value().decode()\n ser, sep, sig = value.partition(SEPARATOR)\n try:\n dat = json.loads(ser, object_pairs_hook=ODict)\n except ValueError as ex:\n if cursor.next():\n continue\n else:\n break\n try:\n did, index = dat[\"signer\"].rsplit(\"#\", maxsplit=1)\n except (AttributeError, ValueError) as ex:\n if cursor.next():\n continue\n else:\n break\n\n if did != key: # not self signed so thing\n entries.append(key)\n if not cursor.next(): # next key in database if any\n break\n return entries", "def all(self):\n self.scan()\n return self.entries", "def get_entry(self, entry):\n hash_key = self._calculate_hash(entry)\n sql = \"SELECT * FROM {t_id} WHERE hash = '{hash_key}'\".format(\n t_id=self.table_id, hash_key=hash_key)\n resp = self.fusiontables.query().sql(sql=sql).execute()\n return resp.get('rows')", "def get_all_entries():\n conn = sqlite3.connect(CONF.database, detect_types=sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES)\n curs = conn.cursor()\n try:\n return curs.execute(\"SELECT date_time, price FROM rates ORDER BY date_time DESC\").fetchall()\n finally:\n curs.close()\n conn.close()", "def get_entries(self):\n return self._netdis.loxone.entries", "def binary(self):\n return self.data.binary.values", "def all():\n # results = [String.from_dict(redis.hgetall(key)) for key in redis.keys() if key != 'index']\n results = []\n for key in redis_store.keys(String.generate_key('*')):\n data = pickle.loads(redis_store.get(key))\n string = String(data['key']).deserialize(data)\n results.append(string)\n return results", "def get_all_casks(self):", "def get_all_files_to_instrument():\n sql=\"SELECT * FROM files\"\n conn=sqlite3.connect(CONNECTION_STRING)\n c=conn.cursor()\n c.execute(sql)\n results=c.fetchall()\n conn.close()\n return results", "def to_bytes(self) -> \"list[int]\":\n entries_per_byte = 8 // int(self.offset_type)\n byte_array = []\n for i in range(0, len(self.entries), entries_per_byte):\n byte = 0\n for j in range(0, entries_per_byte):\n byte |= self.entries[i + j] << (j * int(self.offset_type))\n byte_array.append(byte)\n return byte_array", "async def get_entries(self, *args,convert = True, listed=False, as_dict=False):\r\n consts = args\r\n condition = condition = \" AND \".join(consts)\r\n if not consts:\r\n query = \"SELECT * FROM {table_name}\"\r\n else:\r\n query = \"SELECT * FROM {table_name} WHERE {condition}\"\r\n query = query.format(condition = condition, table_name=self.name)\r\n cur = await self.data.db.execute(query)\r\n data = await cur.fetchall()\r\n await cur.close()\r\n if not data:\r\n return []\r\n if (convert and listed) or (convert and as_dict):\r\n raise ArgumentError(\"Incorrect arguments passed. only one can be True between arguments (convert, listed, as_dict)\")\r\n #Data contains all the info retrieved. Compile into dicts and also get the primary key data\r\n if listed:\r\n data = self.compile_as_list(data)\r\n return data\r\n if as_dict:\r\n data = self.compile_as_dict(data)\r\n return data\r\n data = self.compile_as_obj(data)\r\n return Records(data)", "def get_all(self):\n return self.db", "def readentries(self):\n return list(x for x in self)", "def listAll(self):\n red = self.dbConnect()\n return red.keys()", "def get_keys(weat_db):\n import updater\n keys = updater.list_keys(weat_db, verbose=False)\n return keys", "def get_entries(\n self,\n entry\n ):\n\n try:\n return self._cache[self._alias[entry]]\n except:\n pass\n\n if entry in self._header:\n \n # get the index\n idx = self._header[entry]\n\n entries = []\n\n for row in self._array:\n tmp = [0 for i in row]\n for i,cell in enumerate(row):\n if cell != 0:\n tmp[i] = self[cell][idx]\n entries.append(tmp)\n\n # add entries to cache\n self._cache[self._alias[entry]] = entries\n\n return entries", "def __update_binary(item):\n\n conn = sqlite3.connect(DTF_DB)\n cur = conn.cursor()\n\n # Remove the line first.\n sql = ('DELETE FROM binaries '\n \"WHERE name='%s'\" % item.name)\n\n cur.execute(sql)\n\n entry = [(item.name, item.version, item.author,\n item.install_name)]\n\n # Update a Binary Entry\n sql = ('INSERT INTO binaries (name, version, '\n 'author, install_name)'\n 'VALUES (?, ?, ?, ?)')\n\n cur.executemany(sql, entry)\n conn.commit()\n\n return cur.rowcount", "def get_all_element_of_table(path, table):\n conn = sqlite3.connect(path)\n c = conn.cursor()\n to_return = []\n for row in c.execute('SELECT * FROM '+table).fetchall():\n for x in row:\n to_return.append(x)\n conn.close()\n return to_return", "def get_db_entries(location: str='') -> list:\n db = CarsDb() # pylint: disable=invalid-name\n results = db.get_cars(location)\n db.commit()\n db.close()\n return results", "def get_entries(uri):\n if not uri.endswith('/entries'):\n uri += '/entries'\n results = VGOCache(uri).results\n\n results = [ adjust_entry(x) for x in results ]\n return results", "def get_ssh_entries(kdb):\n entries = kdb.entries\n entries = [try_parse_ssh_entry(kdb,e) for e in entries]\n entries = [e for e in entries if e]\n return entries", "def _get_binary_filesystem(self, cr, uid, ids, name, arg, context=None):\n res = {}\n attachment_obj = self.pool.get('ir.attachment')\n\n for record in self.browse(cr, uid, ids, context=context):\n res[record.id] = False\n attachment_ids = attachment_obj.search(cr, uid, [('res_model','=',self._name),('res_id','=',record.id),('binary_field','=',name)], context=context)\n import logging\n #_logger = logging.getLogger(__name__)\n #_logger.info('res %s', attachment_ids)\n if attachment_ids:\n img = attachment_obj.browse(cr, uid, attachment_ids, context=context)[0].datas\n #_logger.info('res %s', img)\n res[record.id] = img\n return res", "def get_entries(self):\n return self.find_by_st(\"urn:schemas-denon-com:device:ACT-Denon:1\")", "def get_file_contents(db_cursor):\n\n db_cursor.execute(\"\"\"SELECT * FROM data\"\"\")\n db_rows = db_cursor.fetchall()\n return {row[0]: row[1] for row in db_rows if row != []}", "def get_all_rows(self):\n cur = self.cursor()\n sql = (\"SELECT * FROM snapshot_log;\")\n cur.execute(sql)\n r = cur.fetchall()\n #cur.close()\n self.close()\n return r", "def get_all(self):\n cursor = self._dbcon.cursor()\n cursor.execute(u\"select rowid,* from books\")\n result = cursor.fetchall()\n cursor.close()\n return [self._book_from_query_result(x) for x in result]" ]
[ "0.6717193", "0.6395717", "0.6036679", "0.6013244", "0.5961055", "0.5801163", "0.5788147", "0.56636417", "0.5656254", "0.5552676", "0.5549219", "0.5496913", "0.54446685", "0.5436322", "0.54299235", "0.5419369", "0.5414017", "0.5394177", "0.535307", "0.53367394", "0.53030056", "0.5282332", "0.52793074", "0.523584", "0.5202643", "0.51922923", "0.5154425", "0.51473504", "0.5133337", "0.5130191" ]
0.7077578
0
Extracts, decodes and decompresses the binary data for this block. Returns the data as bytes.
def content(self): if self._content is not None: return self._content binaries = self._kdb.kdb.obj_root.Meta.Binaries xpath = './Binary[@ID="{}"]'.format(self.ref) binary = binaries.xpath(xpath)[0] result = b64decode(binary.text) if (binary.attrib['Compressed']): result = zlib.decompress(result, 16+zlib.MAX_WBITS) self._content = result return self._content
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def read_binary(self):\n length = self.read_uint32()\n bytes = self.data[:length]\n self.data = self.data[length:]\n return bytes", "def get_data_block_contents_bytes(self):\n bb = self.volume.blkdev.block_bytes\n if self.volume.is_ffs:\n return bb\n else:\n return bb - 24", "def raw_data(self):\n return self._buf[self.data_offset():self.data_offset() + self.size()]", "def _decode_binary(data):\n try:\n data = data.decode('utf-8')\n except UnicodeDecodeError: # pragma: no cover\n # for data written an upstream java App\n data = data.decode('latin-1')\n return data", "def binary(self):\n return self.data.binary.values", "def decode(self, data: bytes) -> bytes:\n ...", "def decomptcptxbytes(self) :\n\t\ttry :\n\t\t\treturn self._decomptcptxbytes\n\t\texcept Exception as e:\n\t\t\traise e", "def data(self):\n if self._data is None:\n return BinaryData(strencoding=\"base64\")\n return self._data", "def read_binary(self):\n with self.open(\"rb\") as f:\n return f.read()", "def data(self):\n return self._buf[self._offset : self._offset + self._size]", "def get_data(self):\n return self.data[self._size:self._size + self._len]", "def as_bytes(self) -> bytes:\n\n return bytes(self.data_bytes)", "def read_bytes(self) -> bytes:\n t = self.pc\n while self.data[self.pc] != 0:\n self.pc += 1\n result = self.data[t:self.pc]\n self.pc += 1 # jump '\\0'\n return result", "def decomptcprxbytes(self) :\n\t\ttry :\n\t\t\treturn self._decomptcprxbytes\n\t\texcept Exception as e:\n\t\t\traise e", "def bdecode_buffer(data):\n\tif isinstance(data, str):\n\t\tdata = data.encode()\n\twith BytesIO(data) as f:\n\t\treturn bdecode(f)", "def decode(self, data):\n\n # Tested:\n # types: z, T, a\n # nested_structure\n # repeated\n if not hasattr(data, 'read'):\n data = io.BytesIO(data)\n\n if self._kv_fmt:\n return dict(self._decode_wire(data))\n else:\n return tuple(self._decode_wire(data))", "def unpack(self, data):\n ptr = 0\n try:\n ptr, self.transaction_id = bbclib_binary.get_bigint(ptr, data)\n self.idlen_conf[\"transaction_id\"] = len(self.transaction_id)\n ptr, num = bbclib_binary.get_n_byte_int(ptr, 2, data)\n if num == 1:\n ptr, self.asset_id = bbclib_binary.get_bigint(ptr, data)\n self.idlen_conf[\"asset_id\"] = len(self.asset_id)\n else:\n self.asset_id = None\n except:\n return False\n return True", "def get_binary(self):\n data = bytes()\n\n for tag in self._tags:\n value = 0\n if tag in self.fields.keys():\n value = self.fields[tag]\n try:\n data += struct.pack(\"<I\", value)\n except struct.error as e:\n raise TypeError(f\"expected integer value for {tag} but got {type(value)}: {value}\")\n\n return data", "def to_bytes(self):\n return bytes(self.data)", "def decrypt_blocks(self):\n full_bin_data = bytearray()\n for n in range(0, self.num_of_chunks + 1):\n with open('block_{:02d}.json'.format(n)) as f:\n json_input = json.load(f)\n try:\n b64 = json_input\n json_k = ['nonce', 'header', 'ciphertext', 'tag']\n jv = {k: b64decode(b64[k]) for k in json_k}\n cipher = AES.new(self.key, AES.MODE_EAX, nonce=jv['nonce'])\n cipher.update(jv['header'])\n plaintext = cipher.decrypt_and_verify(jv['ciphertext'],\n jv['tag'])\n full_bin_data.extend(plaintext)\n except ValueError:\n print(\"Incorrect decryption\")\n return bytes(full_bin_data)", "def bytes(self):\n return self._payload", "def decryptByteArray(self, data, keyobj):\n\n nrOfBlocks = int(math.ceil(len(data)/self.blockLengthBytes))\n M = bytearray()\n\n for i in range(nrOfBlocks):\n c = bytearray(self.blockLengthBytes)\n for j in range(self.blockLengthBytes):\n index = i*self.blockLengthBytes+j\n if index < len(data):\n c[j] = data[index]\n m = self.encryptor.decrypt(c, keyobj)\n for mb in m:\n M.append(mb)\n\n return M", "def decode(self):\n s = self.encoded_content\n if self.encoded_content:\n if self.encoding:\n if self.encoding == u'base64':\n s = decode_base64(s)\n else:\n raise Exception(u'unknown data encoding %s' % (self.encoding))\n if self.compression:\n if self.compression == u'gzip':\n s = decompress_gzip(s)\n else:\n raise Exception(u'unknown data compression %s' %(self.compression))\n else:\n raise Exception(u'no encoded content to decode')\n self.decoded_content = []\n for idx in xrange(0, len(s), 4):\n val = ord(str(s[idx])) | (ord(str(s[idx + 1])) << 8) | \\\n (ord(str(s[idx + 2])) << 16) | (ord(str(s[idx + 3])) << 24)\n self.decoded_content.append(val)\n # generate the 2D version\n self._gen_2D()", "def read_gzip_bytes(self):\n with gzip.open(self, 'rb') as f:\n return f.read()", "def decode(data): #@NoSelf", "def __bytes__(self) -> bytes:\n from hathor.merged_mining.bitcoin import encode_bytearray, encode_list\n struct_bytes = self.header_head\n struct_bytes += encode_bytearray(self.coinbase_head)\n struct_bytes += encode_bytearray(self.coinbase_tail)\n struct_bytes += encode_list(self.merkle_path)\n struct_bytes += self.header_tail\n return struct_bytes", "def getData(self):\n return pickle.loads(self._data)", "def bytes(self):\r\n return self._info_data", "def unpackb(value):\n return load(io.BytesIO(value))", "def _decode_bytes(data: BencodedString) -> bytes:\n # Get byte string length\n delimiter_index = data.bytes.find(COLON)\n\n if delimiter_index > 0:\n length_prefix = data.get_prefix(delimiter_index)\n string_length = int(length_prefix.decode(\"ascii\"))\n data.del_prefix(delimiter_index + 1)\n else:\n raise ValueError(\n \"Cannot decode a byte string, it doesn't contain a delimiter. \"\n \"Most likely the bencoded string is incomplete or incorrect.\"\n )\n\n # Get byte string data\n if len(data.bytes) >= string_length:\n result_bytes = data.get_prefix(string_length)\n data.del_prefix(string_length)\n else:\n raise ValueError(\n f\"Cannot decode a byte string (prefix length \"\n f\"- {string_length}, real_length - {len(data.bytes)}. \"\n \"Most likely the bencoded string is incomplete or incorrect.\"\n )\n\n return result_bytes" ]
[ "0.67371094", "0.6383952", "0.6359461", "0.6287681", "0.6122764", "0.61174524", "0.61125195", "0.6038299", "0.59979355", "0.59773207", "0.59676695", "0.59675443", "0.59668946", "0.59567606", "0.59340024", "0.58919317", "0.58896726", "0.58458877", "0.5815826", "0.5805911", "0.5786289", "0.5780835", "0.5780566", "0.577518", "0.57691187", "0.57212156", "0.57125217", "0.5683903", "0.56726426", "0.5666036" ]
0.6506396
1
Retrieves the SSH entry's passphrase (from the Keepass password field) >>> entry = KP_DB.find_entries_by_path('embedded_keys/id_rsa')[0] >>> ssh_entry = SshEntry(KP_DB,entry) >>> ssh_entry.passphrase.decode() == entry.password True
def passphrase(self): password = self.entry.password if password: return self.entry.password.encode('UTF-8') else: return None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_key_from_keyring(self):\n private_key = keyring.get_password(self.keyring_service_name, \"private_key\")\n\n if private_key is not None:\n return base64.b64decode(private_key)\n else:\n return None", "def passphrase(password):\n\tnow = int(time.time())\n\tkey = sha256(password).hexdigest()\n\tphrase = sha256(str(now)+key).hexdigest()\n\n\treturn (now, phrase)", "def get_password(self):\n return self.controller.dbfilter.db.get('passwd/user-password')", "def get_pass(self, item):\n text = str(self.get_contents(item), encoding=\"utf-8\")\n lines = text.split(\"\\n\")\n password = lines[0]\n return password", "def get_passwd(self):\n if self.__password:\n aes_cipher = AESCipher()\n return aes_cipher.decrypt(self.__password, self.__aes_key)", "def get_password_data(self, instance_id, private_key_file = None, passphrase = None):\n response = instance.get_password_data(self.url, self.verb,\n self.headers, self.version,\n instance_id)\n if response is not None :\n res = GetPasswordDataResponse.GetPasswordDataResponse()\n parseString(str(response.text), res)\n if not private_key_file == None :\n res.password_data = utils.decrypt_instance_password(res.password_data, private_key_file, passphrase)\n return res\n else :\n return None", "def passkey(self) -> Optional[str]:\n return pulumi.get(self, \"passkey\")", "def decrypt_password(pass_to_decrypt):\n\n pass_to_decrypt = fk.decrypt(pass_to_decrypt)\n return pass_to_decrypt.decode()", "def _get_pwd_key_from_config():\n return b64decode(config['app']['auth']['pwd_key_secret'].encode())", "def _decrypt_pvtkey(self, pvtkey_file: str, passphrase: str) -> str:\n\n keydata: str = None\n if pvtkey_file:\n try:\n keydata = asyncssh.public_key.read_private_key(pvtkey_file,\n passphrase)\n except Exception as e:\n self.logger.error(\n f\"ERROR: Unable to read private key file {pvtkey_file}\"\n f\"for jump host due to {str(e)}\")\n\n return keydata", "def _get_password(self):\r\n return self._password", "def private_key(self):\n if self._private_key is not None:\n return self._private_key[0]\n\n spk = self.serialized_private_key\n passphrase = self.passphrase\n\n try:\n self._private_key = [\n serialization.load_pem_private_key(\n self.serialized_private_key,\n backend=default_backend(),\n password=self.passphrase)]\n\n return self._private_key[0]\n\n except:\n raise\n self._private_key = [None]\n return self._private_key[0]", "def _get_password(self):\n return self._password", "def private_key(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"private_key\")", "def private_key(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"private_key\")", "def get_crypt_key():\n\n get_crypt_query = 'SELECT crypt.crypt_key ' \\\n 'FROM crypt ' \\\n 'WHERE key_id = 1'\n\n my_cursor.execute(get_crypt_query)\n stored_key = my_cursor.fetchone()\n\n # 'fetchone()' returns a union or tuple. To get the key, we take the first value:\n stored_key = stored_key[0]\n return stored_key", "def passwd_decryption(self):\n with open(self.key_path, 'rb') as input_key:\n for line in input_key:\n key = line\n with open(self.pass_path, 'rb') as input_password:\n for line in input_password:\n password = line\n cipher_suit = Fernet(key)\n plain_password = cipher_suit.decrypt(password)\n plain_password = bytes(plain_password).decode('utf-8')\n \n return plain_password", "def getPassword(self):\n\t\treturn self.Password", "def dump_priv_key(litecoinaddress):\n try:\n priv_key = subprocess.check_output([\"litecoin-cli\", \"dumpprivkey\", litecoinaddress])\n except:\n sys.exit(1)\n\n return priv_key.decode().strip()", "def passphrase():\n a = []\n for i in range(1,6):\n a.append(password())\n # join words into phrase\n p = \" \".join(a)\n # split phrase into a list\n p = list(p)\n # substitute a random character\n rc = \"\"\"1~!#$%^2&*()-=3+[]\\{}4:;\"'<>5?/01236456789\"\"\"\n p[secrets.choice(range(0,len(p)))] = rc[secrets.choice(range(0,len(rc)))]\n # put phrase back together\n p = \"\".join(p)\n return p", "def get_lc_passwd(self):\n if self.__lc_password:\n aes_cipher = AESCipher()\n return aes_cipher.decrypt(self.__lc_password, self.__aes_key)", "def get_private_key_in_der(self):\n serialized_private = self.private_key_obj.private_bytes(\n encoding=serialization.Encoding.DER,\n format=serialization.PrivateFormat.TraditionalOpenSSL,\n encryption_algorithm=serialization.NoEncryption()\n )\n return serialized_private", "def getPasswd( self, par, path ):\n\n return self.db.getPasswdPar( par, path )", "def load_key():\n return open(\"pass.key\", \"rb\").read()", "def getpass(self, prompt):\r\n return getpass.getpass(prompt)", "def GetPassword(self):\n return self._password", "def recover_encrypt_pass(self):\n with open(self.key_path) as input_file:\n key = input_file.readlines()\n cipher_suite = Fernet(key[0])\n bin_passwd = bytes(self.password, 'utf-8')\n ciphered_text = cipher_suite.encrypt(bin_passwd)\n return ciphered_text", "def get_password(self):\n return self.__password", "def private_key(self):\n return self.__get_option('private_key')", "def reveal_seed():\n password = getpass.getpass('Password from keystore: ') # Prompt the user for a password of keystore file\n\n configuration = Configuration().load_configuration()\n api = get_api()\n\n try:\n wallet = api.get_private_key(configuration, password)\n click.echo('Account prv key: %s' % str(wallet.get_private_key().hex()))\n\n except InvalidPasswordException:\n click.echo('Incorrect password!')" ]
[ "0.6186439", "0.580254", "0.5719748", "0.57165724", "0.5687516", "0.5640266", "0.55456614", "0.5474176", "0.5474158", "0.5451181", "0.5404241", "0.54017514", "0.5397471", "0.5372742", "0.5372742", "0.537041", "0.5367618", "0.5357364", "0.5343646", "0.5339588", "0.53367853", "0.5332456", "0.5331691", "0.53267807", "0.53094447", "0.5295704", "0.5288386", "0.5283028", "0.5269342", "0.52529085" ]
0.731931
0
Parses the KeeAgent settings for the provided entry. >>> entry = KP_DB.find_entries_by_path('embedded_keys/id_rsa')[0] >>> ssh_entry = SshEntry(KP_DB,entry) >>> ssh_entry.settings
def settings(self): if self._settings is not None: return self._settings settings = self.binaries['KeeAgent.settings'].content self._settings = objectify.fromstring(settings) return self._settings
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_settings() -> Dict[str, Any]:\n settings = dict()\n with open(\"config,ini\") as file_config:\n for line in file_config:\n try:\n key = line.split(\":\")[0]\n value = line.split(\":\")[1].strip().split() if key == 'invisible_manes' else line.split(\":\")[1].strip()\n settings[key] = value\n except IndexError:\n pass\n return settings", "def read_enc_settings():\n print(\"Decrypting {}\".format(ENC_SETTINGS))\n try:\n output = subprocess.check_output(['gpg', '-d', ENC_SETTINGS])\n except subprocess.SubprocessError:\n print(\"Decryption failed, ignoring\")\n return\n config = ConfigParser()\n config.read_string(output.decode('utf8', errors='ignore'))\n return config", "def _getSettings(checks):\r\n parser = _RCESettingsParser()\r\n\r\n if PATH not in parser.read(PATH):\r\n raise NoValidSettings('Config file is missing.')\r\n\r\n try:\r\n return _Settings.load(parser, checks)\r\n except (Error, ValueError) as e:\r\n raise NoValidSettings(str(e))", "def encryption_settings(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['EncryptionSettingsElementArgs']]]]:\n return pulumi.get(self, \"encryption_settings\")", "def getSettings(self):\n reComment = re.compile(';.*')\n reSection = re.compile(r'^\\[\\s*(.+?)\\s*\\]$')\n reSetting = re.compile(r'(.+?)\\s*=(.*)')\n #--Read ini file\n #self.ensureExists()\n iniFile = GPath(self.path).open('r')\n settings = {} #settings[section][key] = value (stripped!)\n sectionSettings = None \n for line in iniFile:\n stripped = reComment.sub('',line).strip()\n maSection = reSection.match(stripped)\n maSetting = reSetting.match(stripped)\n if maSection:\n sectionSettings = settings[LString(maSection.group(1))] = {}\n elif maSetting:\n if sectionSettings == None:\n sectionSettings = settings.setdefault(LString('General'),{})\n self.isCorrupted = True\n sectionSettings[LString(maSetting.group(1))] = maSetting.group(2).strip()\n iniFile.close()\n return settings", "def _get_db_settings(self):\n config_path = os.path.expanduser(self.config.get_val('DATABASE_SETTINGS_FILE'))\n settings = {}\n with FileOperations.open(config_path, 'r') as f:\n for line in f:\n line = line.rstrip()\n # Ignore empty/comment lines.\n if not line or line.startswith('#'):\n continue\n try:\n key, value = line.split(':')\n settings[key.strip()] = value.strip()\n except ValueError:\n self.error_handler.abort_framework(\"Problem in config file: '%s' -> Cannot parse line: %s\" %\n (config_path, line))\n return settings", "def sentry_config() -> Dict:\n with open(script_dir + 'config.yml', 'r') as yamlfile:\n cfg = yaml.load(yamlfile, Loader=yaml.SafeLoader)\n sentry_cfg = cfg['sentry']\n result = {\n 'dns': sentry_cfg['dns']\n }\n return result", "def settings():\n return _get_settings()[1]", "def get_settings():\n settings_path = os.path.join(get_config_home(), 'tcharmap', 'settings.yaml')\n try:\n return yaml.safe_load(open(settings_path))\n except FileNotFoundError:\n return {'auto_copy': False}", "def _read_settings_file(cls, settings_path=''):\n if not settings_path:\n return {}\n\n if os.path.isdir(settings_path):\n settings_path = os.path.join(settings_path, '.' + cls.__name__)\n if not os.path.isfile(settings_path):\n return {}\n\n d = {} # returned\n try:\n with open(settings_path) as f:\n lines = f.readlines()\n except BaseException: # FileNotFoundError?!\n return d\n\n settings_dict = DecoSettingsMapping.get_deco_class_settings_dict(cls.__name__)\n for line in lines:\n line = line.strip()\n # Allow blank lines & comments\n if not line or line[0] == '#':\n continue\n\n try:\n setting, val_txt = line.split('=', 1) # only split at first '='\n except ValueError:\n # fail silently. (Or, TODO: report error? ill-formed line)\n continue # bad line\n setting = setting.strip()\n val_txt = val_txt.strip()\n\n if setting not in settings_dict or not val_txt:\n # fail silently. (Or, TODO: report error? ill-formed line)\n continue\n\n # special case: None\n if val_txt == 'None':\n if settings_dict[setting].allow_falsy:\n d[setting] = None\n continue\n\n # If val_txt is enclosed in quotes (single or double)\n # and ends in '=' (indirect value) then let val = val_txt;\n # otherwise, defer to settings_dict[setting].value_from_str\n is_indirect = (is_quoted_str(val_txt) and\n len(val_txt) >= 3 and\n val_txt[-2] == '=')\n if is_indirect:\n val = val_txt[1:-1] # remove quotes\n else:\n try:\n val = settings_dict[setting].value_from_str(val_txt)\n except ValueError as e:\n # fail silently. (Or, TODO: report error? bad value)\n continue # bad line\n\n d[setting] = val\n\n return d", "def _get_sftp_config(cls, entry: Entry):\n # parse url\n parsed = urlparse(entry['url'])\n host: str = parsed.hostname\n username: str = parsed.username\n password: str = parsed.password\n port: int = parsed.port or DEFAULT_SFTP_PORT\n\n # get private key info if it exists\n private_key: str = entry.get('private_key')\n private_key_pass: str = entry.get('private_key_pass')\n\n entry_host_key_config: dict = entry.get('host_key')\n host_key: Optional[HostKey] = None\n if entry_host_key_config:\n host_key = HostKey(\n entry_host_key_config['key_type'], entry_host_key_config['public_key']\n )\n\n config: Optional[SftpConfig] = None\n\n if parsed.scheme == 'sftp':\n config = SftpConfig(\n host, port, username, password, private_key, private_key_pass, host_key\n )\n else:\n logger.warning('Scheme does not match SFTP: {}', entry['url'])\n\n return config", "def read_settings(self):\n config = ConfigParser.ConfigParser()\n config_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'digital_ocean.ini')\n config.read(config_path)\n\n # Credentials\n if config.has_option('digital_ocean', 'api_token'):\n self.api_token = config.get('digital_ocean', 'api_token')\n\n # Cache related\n if config.has_option('digital_ocean', 'cache_path'):\n self.cache_path = config.get('digital_ocean', 'cache_path')\n if config.has_option('digital_ocean', 'cache_max_age'):\n self.cache_max_age = config.getint('digital_ocean', 'cache_max_age')\n\n # Private IP Address\n if config.has_option('digital_ocean', 'use_private_network'):\n self.use_private_network = config.getboolean('digital_ocean', 'use_private_network')\n\n # Group variables\n if config.has_option('digital_ocean', 'group_variables'):\n self.group_variables = ast.literal_eval(config.get('digital_ocean', 'group_variables'))", "def _get_settings():\n # store_last_good=True tells config component to update the config file\n # in a cron job. Here we just read from the datastore.\n rev, cfg = config.get_self_config(\n SETTINGS_CFG_FILENAME, config_pb2.SettingsCfg, store_last_good=True)\n cfg = cfg or config_pb2.SettingsCfg()\n return rev, cfg", "def get_setting_definition(cls, key, **kwargs):\n settings = kwargs.get('settings', cls.SETTINGS)\n\n key = str(key).strip().upper()\n\n if settings is not None and key in settings:\n return settings[key]\n else:\n return {}", "def get_object(cls, user_name, s_key):\n return Setting.all().ancestor(get_user(user_name)).filter('s_key =', s_key).get()", "def find_settings():\n return Setting()", "def get(self, entry: ConfigEntry) -> any:\n value = self.root\n if value is None:\n return None\n\n for key in entry.key_path:\n if self.ignore_case_in_keys:\n key = key.lower()\n value = value.get(key)\n if value is None:\n return entry.value\n\n return value", "def try_parse_ssh_entry(kdb,entry):\n try:\n return SshEntry(kdb,entry)\n except:\n return None", "def read_settings(args):\r\n # Default values\r\n state = 48\r\n district = 7\r\n leg_body = 'US-REP'\r\n census_year = '2016'\r\n election_year = '2018'\r\n voting_precincts = None\r\n voting_results = None\r\n \r\n # Set values in settings.ini\r\n settings = configparser.ConfigParser()\r\n settings.read('settings.ini') # change example.settings.ini to settings.ini\r\n\r\n # Census API Key\r\n census_api_key = settings.get( 'census', 'CENSUS_API_KEY' )\r\n\r\n if args.census_year:\r\n census_year=args.census_year\r\n if args.election_year:\r\n election_year=args.election_year\r\n if args.state:\r\n state = args.state\r\n if args.district:\r\n district = args.district\r\n if args.leg_body:\r\n leg_body = args.leg_body\r\n if args.voting_precincts:\r\n voting_precincts = args.voting_precincts\r\n if args.voting_results:\r\n voting_results = args.voting_results\r\n\r\n settings_dict = { \r\n \"census_api_key\": census_api_key,\r\n \"state\": state,\r\n \"district\": district,\r\n \"leg_body\": leg_body,\r\n \"census_year\": census_year,\r\n \"election_year\": election_year,\r\n \"voting_precincts\": voting_precincts,\r\n \"voting_results\": voting_results\r\n }\r\n\r\n return settings_dict", "def _init_key_settings(self):\n self.minKeySize = 1023\n self.maxKeySize = 8193\n self.rsaSigHashes = list(RSA_SIGNATURE_HASHES)\n self.rsaSchemes = list(RSA_SCHEMES)\n self.dsaSigHashes = list(DSA_SIGNATURE_HASHES)\n self.virtual_hosts = []\n # DH key settings\n self.eccCurves = list(CURVE_NAMES)\n self.dhParams = None\n self.dhGroups = list(ALL_DH_GROUP_NAMES)\n self.defaultCurve = \"secp256r1\"\n self.keyShares = [\"secp256r1\", \"x25519\"]\n self.padding_cb = None\n self.use_heartbeat_extension = True\n self.heartbeat_response_callback = None", "def load_settings(self):\n self.settings = db.get_settings()\n if len(self.settings) < 2:\n while(True):\n consumer_key = raw_input(\"Enter your consumer key\")\n consumer_secret = raw_input(\"Enter your consumer_secret\")\n if len(consumer_key) > 5 and len(consumer_secret) > 5:\n db.add_settings(consumer_key, consumer_secret)\n break", "def loadSettings(self, e):\n if e.tag == self.type:\n c = e\n else:\n c = e.find(\".//\" + self.type)\n if c is not None:\n g = c.get(\"enabled\")\n self.enabled = (g == 'true')\n g = c.get(\"mode\")\n self.setMode((GRANT if g == 'grant' else LIMIT))\n g = c.get(\"priority\")\n try:\n self.setPriority(int(g))\n except:\n self.setPriority(0)\n self.name = c.get(\"name\")\n else:\n print \"Error: no settings found for constrain %s\" % self.name\n return c", "def read_settings():\n settings_path = join(dirname(dirname(__file__)), '.settings')\n filename = settings_path\n settings = configparser.ConfigParser()\n settings.read(filename)\n return settings", "def parse_settings(filename, env={}):\n\n if not exists(filename):\n return {}\n\n with open(filename, 'r') as settings:\n for line in settings:\n if line[0] == '#' or len(line.strip()) == 0: # ignore comments and newlines\n continue\n try:\n k, v = map(lambda x: x.strip(), line.split(\"=\", 1))\n env[k] = expandvars(v, env)\n except Exception:\n echo(\"Error: malformed setting '{}', ignoring file.\".format(line), fg='red')\n return {}\n return env", "def get_settings():\n settings = {}\n for setting in cfg.displayable_setting:\n settings[setting] = getattr(cfg, setting)\n return settings", "def get_settings():\n settings = {}\n for setting in cfg.displayable_setting:\n settings[setting] = getattr(cfg, setting)\n return settings", "def _extract_settings(config):\n # Some helper functions to get typed fields with good error reporting\n def get(key):\n value = getattr(config, key, None)\n if value is None:\n raise Error(\"Required 'Akara' configuration %r is missing\" % (key,))\n return value\n \n def getstring(key):\n value = get(key)\n if not isinstance(value, basestring):\n raise Error(\"'Akara' configuration %r must be a string, not %r\" %\n (key, value))\n return value\n\n def getint(key):\n value = get(key)\n try:\n return int(value)\n except ValueError:\n raise Error(\"'Akara' configuration %r must be an integer, not %r\" % \n (key, value))\n \n def getpositive(key):\n value = get(key)\n if value <= 0:\n raise Error(\n \"'Akara' configuration %r must be a positive integer, not %r\" %\n (key, value))\n return value\n\n def getnonnegative(key):\n value = getint(key)\n if value <= 0:\n raise Error(\n \"'Akara' configuration %r must be a non-negative integer, not %r\" %\n (key, value))\n return value\n\n\n settings = {}\n\n # The value for 'Listen' can be:\n # <port> as in 8080\n # -or-\n # <host>:<port> as in \"localhost:8081\"\n addr = get('Listen')\n if isinstance(addr, int):\n host, port = (\"\", addr)\n else:\n if ':' in addr:\n host, port_s = addr.rsplit(':', 1)\n else:\n host, port_s = '', addr\n try:\n port = int(port_s)\n if port <= 0:\n raise ValueError\n except ValueError:\n raise Error(\"Listen port must be a positive integer, not %r\" % port_s)\n\n settings[\"server_address\"] = (host, port)\n\n # Used to contract the full OpenSearch template to a given service.\n # If not present, use the Listen host and port.\n # (And if the host isn't present, use 'localhost'. It's not a good\n # default but I'm not going to do a FQDN lookup here since that has\n # side effects. Basically, if you need the name right, then set it.)\n try:\n server_root = getstring('ServerRoot')\n except Error:\n if port == 80:\n fmt = \"http://%(host)s/\"\n else:\n fmt = \"http://%(host)s:%(port)s/\"\n server_root = fmt % dict(host = (host or \"localhost\"), port = port)\n \n # Uses only when an Akara service wants to call another Akara service.\n # Needed for the (rare) cases when the listen server has a different\n # local name than the published server.\n try:\n internal_server_root = getstring('InternalServerRoot')\n except Error:\n internal_server_root = server_root\n \n settings[\"server_root\"] = server_root\n settings[\"internal_server_root\"] = internal_server_root\n\n config_root = getstring('ConfigRoot')\n config_root = os.path.expanduser(config_root)\n settings[\"config_root\"] = os.path.abspath(config_root)\n\n pid_file = getstring('PidFile')\n settings[\"pid_file\"] = os.path.join(config_root, pid_file)\n\n error_log = getstring('ErrorLog')\n settings[\"error_log\"] = os.path.join(config_root, error_log)\n\n access_log = getstring('AccessLog')\n settings[\"access_log\"] = os.path.join(config_root, access_log)\n\n module_dir = getstring(\"ModuleDir\")\n settings[\"module_dir\"] = os.path.join(config_root, module_dir)\n \n module_cache = getstring(\"ModuleCache\")\n settings[\"module_cache\"] = os.path.join(config_root, module_cache)\n\n log_level_orig = getstring('LogLevel')\n log_level_s = log_level_orig.upper()\n if log_level_s in _valid_log_levels:\n log_level = _valid_log_levels[log_level_s]\n else:\n raise Error(\n \"global setting 'LogLevel' is %r but must be one of: %s\" %\n (log_level_s, \", \".join(map(repr, _valid_log_levels))))\n \n settings[\"log_level\"] = log_level\n\n\n\n settings[\"max_servers\"] = getpositive(\"MaxServers\")\n settings[\"min_spare_servers\"] = getnonnegative(\"MinSpareServers\")\n settings[\"max_spare_servers\"] = getnonnegative(\"MaxSpareServers\")\n if settings[\"max_spare_servers\"] < settings[\"min_spare_servers\"]:\n raise Error(\"MaxSpareServers (%r) must be greater than MinSpareServers (%r)\" %\n (settings[\"max_spare_servers\"], settings[\"min_spare_servers\"]))\n settings[\"max_requests_per_server\"] = getpositive(\"MaxRequestsPerServer\")\n\n return settings", "def cmd_account_settings(client, args):\n account_settings = client.get_account_settings(args.username)\n data = account_settings.__dict__\n generate_output({'account_settings': data})", "def parseSettings(settings_file):\n\t# Make a new settings object\n\tsetting_object = settings.Settings()\n\n\t# Read the file line by line\n\tfor line in settings_file:\n\t\tthis_line = line.split()\n\t\tif this_line == []:\n\t\t\tpass\n\t\telif this_line[0] == 'input':\n\t\t\tfor filename in this_line[1:]:\n\t\t\t\tsetting_object.addInput(filename)\n\t\telif this_line[0] == 'atom':\n\t\t\tsymbol = this_line[1]\n\t\t\tnumber = this_line[2]\n\t\t\tmass = this_line[3]\n\t\t\tcharge = this_line[4]\n\t\t\tsigma = this_line[5]\n\t\t\teps = this_line[6]\n\t\t\tsetting_object.addAtom(symbol, number, mass, charge, sigma, eps)\n\t\telif this_line[0] == 'mix':\n\t\t\tsetting_object.mix()\n\t\telif this_line[0] == 'bond':\n\t\t\tatom1 = this_line[1]\n\t\t\tatom2 = this_line[2]\n\t\t\tdistance = this_line[3]\n\t\t\tbond_length = this_line[4]\n\t\t\tforce_constant = this_line[5]\n\t\t\tsetting_object.addBond(atom1, atom2, distance, bond_length, force_constant)\n\t\telif this_line[0] == 'angle':\n\t\t\tatom1 = this_line[1]\n\t\t\tatom2 = this_line[2]\n\t\t\tatom3 = this_line[3]\n\t\t\tangle = this_line[4]\n\t\t\tangle_constant = this_line[5]\n\t\t\tsetting_object.addAngle(atom1, atom2, atom3, angle, angle_constant)\n\t\telif this_line[0] == 'molecule':\n\t\t\tresidue = this_line[1]\n\t\t\tnmol = this_line[2]\n\t\t\tnrexcl = this_line[3]\n\t\t\tsetting_object.addMolecule(residue, nmol, nrexcl)\n\t\telif this_line[0] == 'output':\n\t\t\toutput = this_line[1]\n\t\t\tsetting_object.addOutput(output)\n\t\telif this_line[0] == 'system':\n\t\t\tsystem = \"\".join(this_line[1:])\n\t\t\tsetting_object.addSystem(system)\n\t\telif this_line[0] == '#':\n\t\t\tpass\n\treturn setting_object", "def get_settings():\n with open('config/config.json') as data_file:\n settings = json.load(data_file)\n return settings" ]
[ "0.5546932", "0.53139675", "0.5293759", "0.5165881", "0.51479965", "0.51145625", "0.50411206", "0.5008838", "0.4938931", "0.4936226", "0.49296352", "0.49288568", "0.49022695", "0.48895726", "0.4882038", "0.48787072", "0.48775393", "0.4863503", "0.48420894", "0.48408088", "0.48185197", "0.4793218", "0.4792586", "0.47898725", "0.47854173", "0.47854173", "0.4744258", "0.47409937", "0.47282344", "0.47116068" ]
0.57558864
0
Returns the serialized variant of the stored private key for this entry. SSH keys can both be stored as attachments or references to keyfiles on disk. This property supports both and will automatically read the data from the right place. Returns the respective private key file as bytes. >>> entry = KP_DB.find_entries_by_path('embedded_keys/id_rsa')[0] >>> ssh_entry = SshEntry(KP_DB,entry) >>> ssh_entry.serialized_private_key
def serialized_private_key(self): if self._serialized_private_key is not None: return self._serialized_private_key location = self.settings.Location if location.AttachmentName: self._serialized_private_key = self.binaries[location.AttachmentName.text].content return self._serialized_private_key else: with open(location.FileName.text, 'rb') as file: self._serialized_private_key = file.read() return self._serialized_private_key
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_private_key_in_der(self):\n serialized_private = self.private_key_obj.private_bytes(\n encoding=serialization.Encoding.DER,\n format=serialization.PrivateFormat.TraditionalOpenSSL,\n encryption_algorithm=serialization.NoEncryption()\n )\n return serialized_private", "def get_private_key(self):\n# _log.debug(\"get_private_key: node_name={}\".format(self.node_name))\n with open(os.path.join(self.runtime_dir, \"private\", \"private.key\"), 'rb') as f:\n return f.read()", "def get_private_key_in_pem(self):\n serialized_private = self.private_key_obj.private_bytes(\n encoding=serialization.Encoding.PEM,\n format=serialization.PrivateFormat.TraditionalOpenSSL,\n encryption_algorithm=serialization.NoEncryption()\n )\n return serialized_private", "def private_key(self):\n if self._private_key is not None:\n return self._private_key[0]\n\n spk = self.serialized_private_key\n passphrase = self.passphrase\n\n try:\n self._private_key = [\n serialization.load_pem_private_key(\n self.serialized_private_key,\n backend=default_backend(),\n password=self.passphrase)]\n\n return self._private_key[0]\n\n except:\n raise\n self._private_key = [None]\n return self._private_key[0]", "def get_private_key(self):\n return self._private_key", "def _serialize_private_key(private_key, password=None):\n error = None\n pvt_key_loaders = [\n load_pem_private_key, load_der_private_key\n ]\n pvt_key = None\n for loader in pvt_key_loaders:\n if not pvt_key:\n try:\n pvt_key = loader(\n private_key.encode('utf-8'),\n password=password,\n backend=default_backend()\n )\n error = False\n break\n except (ValueError, UnsupportedAlgorithm) as err:\n error = err\n if error:\n raise errors.InvalidPrivateKeyError(error)\n else:\n return pvt_key", "def get_key_from_keyring(self):\n private_key = keyring.get_password(self.keyring_service_name, \"private_key\")\n\n if private_key is not None:\n return base64.b64decode(private_key)\n else:\n return None", "def serializePrivateKey(private_key):\n\treturn private_key.private_bytes(\n\t\tencoding=serialization.Encoding.PEM,\n\t\tformat=serialization.PrivateFormat.PKCS8,\n\t\tencryption_algorithm=serialization.NoEncryption()\n\t)", "def private_key(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"private_key\")", "def private_key(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"private_key\")", "def get_private_key(self) -> str:\n\t\treturn self._privateKey", "def _get_decryption_key(self, **options):\n\n return self._private_key", "def private_key(self):\n return PrivateKey(self._sk.private_bytes(\n encoding=serialization.Encoding.Raw,\n format=serialization.PrivateFormat.Raw,\n encryption_algorithm=serialization.NoEncryption()))", "def get_private_key_file(self):\n private_key_file = (self.private_key_file\n if self.private_key_file is not None\n else (self.delegate.get_private_key_file()\n if self.delegate is not None\n else None))\n return private_key_file", "def get_private_key_file(self):\n private_key_file = (self.private_key_file\n if self.private_key_file is not None\n else (self.delegate.get_private_key_file()\n if self.delegate is not None\n else None))\n return private_key_file", "def _get_private_key_file(self):\n # TODO(dittrich): Should make this more robust.\n private_key_file = None\n for k, v in self.app.secrets._secrets.items():\n if k.find('private_key_file') > 0:\n private_key_file = v\n return private_key_file", "def get_private_key():\n if not os.path.exists(_private_key_path):\n return None\n\n try:\n with open(_private_key_path) as secret_file:\n return secret_file.read()\n\n except Exception as exc:\n log.error(f'Could not read private key.\\n{exc}')\n traceback.print_exc(file=sys.stderr)", "def read_keys(path):\n with open(path) as walletfile:\n b_keys = walletfile.read()\n p_keys = base64.b64decode(b_keys)\n return pickle.loads(p_keys)", "def private_key(self):\n return self.__get_option('private_key')", "def load_key(self):\n\t return open(\"key.key\", \"rb\").read()", "def get_private_key(self) -> str:\n raise NotImplementedError(\"Please implement your own get_public_key() method\")", "def private_key_path(self):\n if self._private_key_path is not None:\n return self._private_key_path\n\n location = self.settings.Location\n if location.AttachmentName:\n self._private_key_path = 'kdbx-attachment:///{}/{}'.format(\n self.entry.path, location.AttachmentName.text)\n return self._private_key_path\n else:\n self._private_key_path = location.FileName.text\n return self._private_key_path", "async def retrieve_private_key(self) -> Tuple[str, str]:\n\n filename, file_path = random.choice(self._private_keys)\n async with aiofiles.open(file_path, mode='r') as file:\n private_key = await file.read()\n return private_key, self._create_public_key_identifier(filename)", "def _get_private_key(self, privkey=None):\n\n # read private keys from keyring\n privkeys = self.gpg.list_keys(True) # True => private keys\n if len(privkeys) > 0 and privkeys[-1].has_key('fingerprint'):\n fingerprints = []\n for k in privkeys:\n fingerprints.append(k['fingerprint'])\n else:\n # no private key in keyring\n return None\n\n if privkey:\n # check for existence of private key received as argument\n # DEVEL: check for expiration as well\n if len(privkey) > 7 and len(privkey) <= 40:\n for fp in fingerprints:\n if fp.endswith(privkey):\n # work with last 16 significant chars internally,\n # even if only 8 are required in trac.ini\n privkey = fp[-16:]\n break\n # no fingerprint matching key ID\n else:\n privkey = None\n else:\n # reset invalid key ID\n privkey = None\n else:\n # select (last) private key from keyring\n privkey = fingerprints[-1][-16:]\n\n return privkey", "def raw(self) -> bytes:\n return bytes(self._signing_key)", "def raw_key(self) -> bytes:\n return bytes(self.data_bytes[ProofPath._Positions.KEY_POS : ProofPath._Positions.KEY_POS + KEY_SIZE])", "def private_key(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"private_key\")", "def private_key(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"private_key\")", "def save_rsa_private_key(private_key: RSAPrivateKeyWithSerialization, file_path: str, password: str = None,\n encoding: Encoding = Encoding.PEM) -> None:\n if password:\n if isinstance(password, str):\n password_bytes = password.encode('utf-8')\n else:\n password_bytes = password\n enc = serialization.BestAvailableEncryption(password=password_bytes) if password else serialization.NoEncryption()\n pem_data = private_key.private_bytes(encoding, serialization.PrivateFormat.PKCS8, enc)\n with open(file_path, 'wb') as f:\n f.write(pem_data)", "def get_key(self, key_id):\r\n return self.sshkey.getObject(id=key_id)" ]
[ "0.7089883", "0.7009223", "0.69112986", "0.6858685", "0.64298254", "0.63974696", "0.6390561", "0.63333625", "0.6255549", "0.6255549", "0.62499315", "0.62322474", "0.61910164", "0.61403877", "0.61403877", "0.6104574", "0.60905373", "0.6054348", "0.6047586", "0.59691155", "0.59685516", "0.59569174", "0.580132", "0.5776268", "0.5757849", "0.573153", "0.57045007", "0.57045007", "0.5700298", "0.56829095" ]
0.7920088
0
Returns the path for the private key file associated with the SSH entry. If the private key file is stored as an attachment in the Keepass database,
def private_key_path(self): if self._private_key_path is not None: return self._private_key_path location = self.settings.Location if location.AttachmentName: self._private_key_path = 'kdbx-attachment:///{}/{}'.format( self.entry.path, location.AttachmentName.text) return self._private_key_path else: self._private_key_path = location.FileName.text return self._private_key_path
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _get_path_to_key_file():\n\n if 'private_key_path' not in ctx.node.properties:\n raise NonRecoverableError(\n 'Unable to get key file path, private_key_path not set.')\n\n return os.path.expanduser(ctx.node.properties['private_key_path'])", "def PRIVATE_RSA_KEYFILE_PATH() :\n return os.path.join( config.CONFIG_PATH(), \"%s-private.pem\" % RSA_KEYPAIR_PREFIX() )", "def _get_private_key_file(self):\n # TODO(dittrich): Should make this more robust.\n private_key_file = None\n for k, v in self.app.secrets._secrets.items():\n if k.find('private_key_file') > 0:\n private_key_file = v\n return private_key_file", "def get_private_key_file(self):\n private_key_file = (self.private_key_file\n if self.private_key_file is not None\n else (self.delegate.get_private_key_file()\n if self.delegate is not None\n else None))\n return private_key_file", "def get_private_key_file(self):\n private_key_file = (self.private_key_file\n if self.private_key_file is not None\n else (self.delegate.get_private_key_file()\n if self.delegate is not None\n else None))\n return private_key_file", "def get_private_key(self):\n# _log.debug(\"get_private_key: node_name={}\".format(self.node_name))\n with open(os.path.join(self.runtime_dir, \"private\", \"private.key\"), 'rb') as f:\n return f.read()", "def get_key_filename(vm_):\n key_filename = config.get_cloud_config_value(\n \"ssh_private_key\", vm_, __opts__, search_global=False, default=None\n )\n if key_filename is not None:\n key_filename = os.path.expanduser(key_filename)\n if not os.path.isfile(key_filename):\n raise SaltCloudConfigError(\n \"The defined ssh_private_key '{}' does not exist\".format(key_filename)\n )\n\n return key_filename", "def _get_private_key(self, privkey=None):\n\n # read private keys from keyring\n privkeys = self.gpg.list_keys(True) # True => private keys\n if len(privkeys) > 0 and privkeys[-1].has_key('fingerprint'):\n fingerprints = []\n for k in privkeys:\n fingerprints.append(k['fingerprint'])\n else:\n # no private key in keyring\n return None\n\n if privkey:\n # check for existence of private key received as argument\n # DEVEL: check for expiration as well\n if len(privkey) > 7 and len(privkey) <= 40:\n for fp in fingerprints:\n if fp.endswith(privkey):\n # work with last 16 significant chars internally,\n # even if only 8 are required in trac.ini\n privkey = fp[-16:]\n break\n # no fingerprint matching key ID\n else:\n privkey = None\n else:\n # reset invalid key ID\n privkey = None\n else:\n # select (last) private key from keyring\n privkey = fingerprints[-1][-16:]\n\n return privkey", "def private_key(self):\n return self.__get_option('private_key')", "def serialized_private_key(self):\n if self._serialized_private_key is not None:\n return self._serialized_private_key\n\n location = self.settings.Location\n if location.AttachmentName:\n self._serialized_private_key = self.binaries[location.AttachmentName.text].content\n return self._serialized_private_key\n else:\n with open(location.FileName.text, 'rb') as file:\n self._serialized_private_key = file.read()\n return self._serialized_private_key", "def key_file(self):\n return self._get('key_file')", "def get_private_key(self) -> str:\n\t\treturn self._privateKey", "def get_private_key():\n if not os.path.exists(_private_key_path):\n return None\n\n try:\n with open(_private_key_path) as secret_file:\n return secret_file.read()\n\n except Exception as exc:\n log.error(f'Could not read private key.\\n{exc}')\n traceback.print_exc(file=sys.stderr)", "def get_key_file(self):\n return self.configuration.get(\"pg_host_key\")", "def private_key(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"private_key\")", "def private_key(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"private_key\")", "def get_private_key(self):\n return self._private_key", "def key_path(self):\n keypath = self._get_field('System', 'keypath')\n localpath = \"/\".join(__file__.split('/')[:-1])\n return join(localpath, keypath)", "def private_key(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"private_key\")", "def private_key(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"private_key\")", "def getPrivateKeys(self):\n privateKeys = {}\n for filename in os.listdir(self.dataRoot):\n if filename[:9] == 'ssh_host_' and filename[-4:]=='_key':\n fullPath = os.path.join(self.dataRoot, filename)\n try:\n key = keys.Key.fromFile(fullPath)\n except IOError as e:\n if e.errno == errno.EACCES:\n # Not allowed, let's switch to root\n key = runAsEffectiveUser(\n 0, 0, keys.Key.fromFile, fullPath)\n privateKeys[key.sshType()] = key\n else:\n raise\n except Exception as e:\n log.msg('bad private key file %s: %s' % (filename, e))\n else:\n privateKeys[key.sshType()] = key\n return privateKeys", "def private_key(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"private_key\")", "def private_key(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"private_key\")", "def private_key(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"private_key\")", "def private_key(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"private_key\")", "def private_key(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"private_key\")", "def private_key(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"private_key\")", "def private_key(self):\n if self._private_key is not None:\n return self._private_key[0]\n\n spk = self.serialized_private_key\n passphrase = self.passphrase\n\n try:\n self._private_key = [\n serialization.load_pem_private_key(\n self.serialized_private_key,\n backend=default_backend(),\n password=self.passphrase)]\n\n return self._private_key[0]\n\n except:\n raise\n self._private_key = [None]\n return self._private_key[0]", "def private_key(self):\n return f'PrivateKey = {self._peer.private_key}'", "async def retrieve_private_key(self) -> Tuple[str, str]:\n\n filename, file_path = random.choice(self._private_keys)\n async with aiofiles.open(file_path, mode='r') as file:\n private_key = await file.read()\n return private_key, self._create_public_key_identifier(filename)" ]
[ "0.7557831", "0.75441337", "0.7296165", "0.7130421", "0.7130421", "0.6899545", "0.67883974", "0.6745132", "0.67365414", "0.66425455", "0.6623382", "0.6521915", "0.64749527", "0.63988084", "0.6377845", "0.6377845", "0.63119304", "0.63106614", "0.6268237", "0.6268237", "0.6199935", "0.6198969", "0.6198969", "0.6198969", "0.6198969", "0.6198969", "0.6198969", "0.61962724", "0.61909753", "0.617179" ]
0.81234384
0
Attempts to wrap a Keepass entry with the `SshEntry` class. Returns the `SshEntry` instance if successful and `None` otherwise.
def try_parse_ssh_entry(kdb,entry): try: return SshEntry(kdb,entry) except: return None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _get_sftp_config(cls, entry: Entry):\n # parse url\n parsed = urlparse(entry['url'])\n host: str = parsed.hostname\n username: str = parsed.username\n password: str = parsed.password\n port: int = parsed.port or DEFAULT_SFTP_PORT\n\n # get private key info if it exists\n private_key: str = entry.get('private_key')\n private_key_pass: str = entry.get('private_key_pass')\n\n entry_host_key_config: dict = entry.get('host_key')\n host_key: Optional[HostKey] = None\n if entry_host_key_config:\n host_key = HostKey(\n entry_host_key_config['key_type'], entry_host_key_config['public_key']\n )\n\n config: Optional[SftpConfig] = None\n\n if parsed.scheme == 'sftp':\n config = SftpConfig(\n host, port, username, password, private_key, private_key_pass, host_key\n )\n else:\n logger.warning('Scheme does not match SFTP: {}', entry['url'])\n\n return config", "def create_entry(hass: HomeAssistant) -> MockConfigEntry:\n entry = MockConfigEntry(\n domain=DOMAIN,\n data={\n CONF_URL: URL,\n CONF_API_KEY: API_KEY,\n CONF_VERIFY_SSL: False,\n },\n )\n\n entry.add_to_hass(hass)\n return entry", "def getSSHConnection(host):\n try:\n ssh = SSHWrapper()\n ssh.connect(host.getID())\n return ssh\n except:\n return None", "def __call__(self, entry):\n return self", "def create_entry_for_topic(cls, topic, entry_id, content_hash):\n\t\tkey = cls.create_key(topic, entry_id)\n\t\treturn cls(key_name=key.name(),\n\t\t\t\t\t\t\t parent=key.parent(),\n\t\t\t\t\t\t\t entry_id=entry_id,\n\t\t\t\t\t\t\t entry_id_hash=utils.sha1_hash(entry_id),\n\t\t\t\t\t\t\t entry_content_hash=content_hash)", "def GetLinkedFileEntry(self):\n link = self._GetLink()\n if not link:\n return None\n\n parent_path_spec = getattr(self.path_spec, 'parent', None)\n path_spec = hfs_path_spec.HFSPathSpec(\n location=link, parent=parent_path_spec)\n\n is_root = bool(link == self._file_system.LOCATION_ROOT)\n\n return HFSFileEntry(\n self._resolver_context, self._file_system, path_spec, is_root=is_root)", "def SSH(*args, **kwargs):\n method = import_class(settings.ORCHESTRATION_SSH_METHOD_BACKEND)\n return method(*args, **kwargs)", "def remove_entry(self, entry: Union[int, str, Entry]) -> Optional[Entry]:\n if isinstance(entry, Entry):\n target = entry\n elif isinstance(entry, int):\n target = self.__entries[entry]\n else:\n target = None\n for e in self.__entries:\n if e.get_name() == entry:\n target = e\n if target is None:\n return None\n if isinstance(target, Directory) and target.is_populated():\n return None\n self.__entries.remove(target)\n return target", "def get_entry(self, entry_id):\n entry = self.entries.find_one({'id': entry_id}, projection={'_id': 0})\n return entry", "def _create_entry():\r\n entry_widget = tk.Entry(password_window, bd=0, font=('Helvetica', 16), width=40,\r\n bg='gray15', fg='white', insertbackground='white')\r\n entry_widget.place(x=10, y=105)\r\n\r\n entry_widget.focus()\r\n\r\n return entry_widget", "def _get_ssh_connection(cls, host, user=None):\n if not user:\n user = cls.user\n\n ssh_opts = ()\n ssh_opts += ('-oPasswordAuthentication=no',\n '-oStrictHostKeyChecking=no',\n '-oPort=22',\n '-oConnectTimeout=10')\n\n keyfile = None\n if 'ssh_keyfile' in cls.config:\n keyfile = cls.config['ssh_keyfile']\n\n ssh_opts += ('-o', 'IdentityFile=%s' % keyfile)\n\n if cls.use_controlpersist:\n ssh_opts += ('-oControlMaster=auto',\n '-oControlPersist=4h',\n '-oControlPath=~/.ssh/glusto-ssh-%r@%h:%p')\n\n scp_opts = ssh_opts\n\n ssh_opts += ('-T',)\n\n conn_name = \"%s@%s\" % (user, host)\n # if no existing connection, create one\n if conn_name not in cls._ssh_connections:\n cls.log.debug(\"Creating connection: %s\" % conn_name)\n try:\n ssh = SshMachine(host, user,\n ssh_opts=ssh_opts, scp_opts=scp_opts)\n except:\n cls.log.error(\"Exception trying to establish SshMachine\")\n return None\n cls._ssh_connections[conn_name] = ssh\n else:\n cls.log.debug(\"Retrieved connection from cache: %s\" % conn_name)\n ssh = cls._ssh_connections[conn_name]\n\n if ssh:\n return ssh\n\n print(\"oops. did not get ssh for %s\", conn_name)\n return None", "def download_entry(cls, entry: Entry, config: dict, sftp: SftpClient) -> None:\n path: str = unquote(urlparse(entry['url']).path) or '.'\n delete_origin: bool = config['delete_origin']\n recursive: bool = config['recursive']\n to: str = config['to']\n\n try:\n to = render_from_entry(to, entry)\n except RenderError as e:\n logger.error('Could not render path: {}', to)\n entry.fail(str(e)) # type: ignore\n return\n\n try:\n sftp.download(path, to, recursive, delete_origin)\n except SftpError as e:\n entry.fail(e) # type: ignore", "def create_entry(entry):\n Entry.create(**entry)\n return entry", "async def async_setup_entry(\n hass: HomeAssistant,\n config_entry: ConfigEntry,\n async_add_entities: AddEntitiesCallback,\n) -> None:\n wrapper: RpcDeviceWrapper | BlockDeviceWrapper | None = None\n if get_device_entry_gen(config_entry) == 2:\n if rpc_wrapper := hass.data[DOMAIN][DATA_CONFIG_ENTRY][\n config_entry.entry_id\n ].get(RPC):\n wrapper = cast(RpcDeviceWrapper, rpc_wrapper)\n else:\n if block_wrapper := hass.data[DOMAIN][DATA_CONFIG_ENTRY][\n config_entry.entry_id\n ].get(BLOCK):\n wrapper = cast(BlockDeviceWrapper, block_wrapper)\n\n if wrapper is not None:\n entities = []\n\n for button in BUTTONS:\n if not button.supported(wrapper):\n continue\n entities.append(ShellyButton(wrapper, button))\n\n async_add_entities(entities)", "async def setup_mocked_integration(hass: HomeAssistant) -> MockConfigEntry:\n\n mock_config_entry = MockConfigEntry(**FIXTURE_CONFIG_ENTRY)\n mock_config_entry.add_to_hass(hass)\n\n assert await hass.config_entries.async_setup(mock_config_entry.entry_id)\n await hass.async_block_till_done()\n\n return mock_config_entry", "def get_access_entry(self, key) -> AccessEntry:\n try:\n return self.model.objects.get(key=key)\n except AccessEntry.DoesNotExist:\n logger.exception(\"Access entry does not exist\")\n raise AccessEntry.DoesNotExist", "def ssh(obj: dict[str, Any], name: str):\n profile = Profile.get_by(name=USERNAME)\n login = Login.get_by(name=name)\n\n if profile.name != USERNAME:\n click.echo(\n click.style(\n f\"You need to be logged in as {profile.name}\"\n \" to access this login\",\n fg=\"red\",\n bold=True))\n raise click.Abort(\"Authentication failed!\")\n\n decrypted_password = None\n if login.password:\n fernet = authenticate_user_and_get_fernet(profile)\n decrypted_password = fernet.decrypt(login.password.encode()).decode()\n\n click.echo(\n click.style(f\"💫 Logging you in to {login.name} ({login.host})\",\n fg=\"cyan\"))\n shell = obj[\"shell\"]\n if decrypted_password is None:\n args = shlex.split(f\"{shell} \\'ssh {login.username}@{login.host}\\'\")\n p = subprocess.Popen(args)\n else:\n args = shlex.split(f\"{shell} 'sshpass -p \\\"{decrypted_password}\\\" \"\n f\"ssh {login.username}@{login.host}'\")\n p = subprocess.Popen(args, stdout=subprocess.PIPE)\n p.communicate()\n if p.returncode == 0:\n click.echo(\n click.style(f\"✅ Logged you in to {login.name} as {login.username}\",\n fg=\"green\"))\n else:\n click.echo(f\"p.returncode: {p.returncode}\")", "def mock_config_entry() -> MockConfigEntry:\n return MockConfigEntry(\n title=\"homeassistant.github\",\n domain=DOMAIN,\n data={CONF_TAILNET: \"homeassistant.github\", CONF_API_KEY: \"tskey-MOCK\"},\n unique_id=\"homeassistant.github\",\n )", "def _parse_result_entry(result):\n entry = ParsedEntry()\n\n if \"content\" in result and len(result.content) > 0:\n entry.content = result.content[0].value\n # if not html, have to escape\n if result.content[0].type not in HTML_MIME_TYPES:\n entry.content = cgi.escape(entry.content)\n elif \"summary_detail\" in result:\n entry.content = result.summary_detail.value\n # if not html, have to escape\n if result.summary_detail.type not in HTML_MIME_TYPES:\n entry.content = cgi.escape(entry.content)\n else:\n entry.content = \"\"\n entry.link = result.get(\"link\", None)\n entry.title = result.get(\"title\", None)\n if \"author_detail\" in result and \"name\" in result.author_detail:\n entry.author = result.author_detail.name\n else:\n entry.author = None\n if \"updated_parsed\" in result and result.updated_parsed is not None:\n entry.date = int(calendar.timegm(result.updated_parsed))\n elif \"published_parsed\" in result and result.published_parsed is not None:\n entry.date = int(calendar.timegm(result.published_parsed))\n else:\n entry.date = int(time.time())\n # try to find something to use as GUID, or fall back to static string\n guid_content = result.get(\"id\", entry.title)\n if guid_content is None:\n guid_content = \"None\"\n entry.guid = hashlib.sha1(guid_content.encode('utf-8')).hexdigest()\n return entry", "def get_entry(self, entry: str) -> Optional[Union['Directory', NormalFile, VirusFile, Entry]]:\n for e in self.get_entries():\n if e.get_name() == entry:\n return e", "def sshclient_from_instance(instance, ssh_key_file,\r\n host_key_file='~/.ssh/known_hosts',\r\n user_name='root', ssh_pwd=None):\r\n s = FakeServer(instance, ssh_key_file)\r\n return SSHClient(s, host_key_file, user_name, ssh_pwd)", "def get_entry(self):\n # Filter out any fields that are invalid for the type of a new entry.\n properties = {\n field: value\n for field, value in self.properties.items()\n if field in self.type_cls.entry_fields\n }\n\n return self.type_cls.from_proxy(self.name, self.description,\n self.updated, self.notes, properties)", "def SSHToInstance(self, args, instance):\n args = self._DefaultArgsForSSH(args)\n\n external_nat = ssh_utils.GetExternalIPAddress(instance)\n log.status.Print(\n 'Trying to SSH to VM with NAT IP:{}'.format(external_nat))\n args.ssh_key_file = ssh.Keys.DEFAULT_KEY_FILE\n\n ssh_helper = ssh_utils.BaseSSHCLIHelper()\n ssh_helper.Run(args)\n identity_file = ssh_helper.keys.key_file\n\n user, _ = ssh_utils.GetUserAndInstance(args.name)\n host_keys = self._GetHostKeyFromInstance(args.zone, ssh_helper, instance)\n options = self._GetSSHOptions(args.name, ssh_helper,\n instance, host_keys)\n\n public_key = ssh_helper.keys.GetPublicKey().ToEntry(include_comment=True)\n oslogin_state = ssh.GetOsloginState(\n instance,\n ssh_helper.GetProject(\n self.client, properties.VALUES.core.project.Get(required=True)),\n user,\n public_key,\n None,\n self.release_track,\n username_requested=False,\n messages=self.client.messages)\n user = oslogin_state.user\n\n remote = ssh.Remote(external_nat, user)\n if not oslogin_state.oslogin_enabled:\n self._WaitForSSHKeysToPropagate(ssh_helper, remote, identity_file, user,\n instance, options)\n\n extra_flags = []\n # Ctpu seems to be forwarding some other ports on what\n # seems like the TPU node. Need to understand better before enabling.\n if args.forward_ports:\n extra_flags.extend(\n ['-A', '-L', '6006:localhost:6006', '-L', '8888:localhost:8888'])\n ssh_cmd_args = {\n 'remote': remote,\n 'identity_file': identity_file,\n 'options': options,\n 'extra_flags': extra_flags\n }\n\n cmd = ssh.SSHCommand(**ssh_cmd_args)\n max_attempts = 10\n sleep_interval = 30\n # Since the instance was just created, it can take a while for the instance\n # to be ready to accept ssh connections, therefore retry up to 5m. Doesn't\n # need to be backed off, regular interval retry is sufficient since we\n # aren't looking to throttle.\n for i in range(max_attempts):\n try:\n log.status.Print('SSH Attempt #{}...'.format(i))\n # Errors from SSH itself result in an ssh.CommandError being raised\n return_code = cmd.Run(\n ssh_helper.env,\n putty_force_connect=properties.VALUES.ssh.putty_force_connect.GetBool())\n if return_code:\n # This is the return code of the remote command.\n # Problems with SSH itself will result in ssh.CommandError\n # being raised above.\n sys.exit(return_code)\n except ssh.CommandError as e:\n if i == max_attempts - 1:\n raise e\n log.status.Print(\n 'Retrying: SSH command error: {}'.format(six.text_type(e)))\n time.sleep(sleep_interval)\n continue\n break", "def config_entry(hass: HomeAssistant) -> MockConfigEntry:\n config_entry = MockConfigEntry(\n domain=DOMAIN,\n unique_id=DEVICE_MAC,\n data={CONF_HOST: \"1.1.1.1\"},\n title=DEVICE_NAME,\n )\n config_entry.add_to_hass(hass)\n return config_entry", "async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:\n # As there currently is no way to import options from yaml\n # when setting up a config entry, we fallback to adding\n # the options to the config entry and pull them out here if\n # they are missing from the options\n _async_import_options_from_data_if_missing(hass, entry)\n\n address = entry.data[CONF_HOST]\n name = entry.data[CONF_NAME]\n data = HarmonyData(hass, address, name, entry.unique_id)\n await data.connect()\n\n await _migrate_old_unique_ids(hass, entry.entry_id, data)\n\n cancel_listener = entry.add_update_listener(_update_listener)\n\n async def _async_on_stop(event):\n await data.shutdown()\n\n cancel_stop = hass.bus.async_listen(EVENT_HOMEASSISTANT_STOP, _async_on_stop)\n\n hass.data.setdefault(DOMAIN, {})[entry.entry_id] = {\n HARMONY_DATA: data,\n CANCEL_LISTENER: cancel_listener,\n CANCEL_STOP: cancel_stop,\n }\n\n await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)\n\n return True", "def upsert_entry(self, entry_group_name, entry_id, entry):\n entry_name = '{}/entries/{}'.format(entry_group_name, entry_id)\n try:\n persisted_entry = self.get_entry(entry_name)\n self.__log_entry_operation('already exists', entry_name=entry_name)\n if self.__entry_was_updated(persisted_entry, entry):\n persisted_entry = self.update_entry(entry)\n else:\n self.__log_entry_operation('is up-to-date',\n entry=persisted_entry)\n return persisted_entry\n except exceptions.PermissionDenied:\n self.__log_entry_operation('does not exist', entry_name=entry_name)\n persisted_entry = self.create_entry(\n entry_group_name=entry_group_name,\n entry_id=entry_id,\n entry=entry)\n return persisted_entry\n except exceptions.FailedPrecondition as e:\n logging.warning('Entry was not updated: %s', entry_name)\n raise e", "def fetch_entry(self, entry_id, **args):\n return self.fetch(\"/entry/\" + entry_id, **args)", "def create_entry(hass: HomeAssistant, device_id: str = DEVICE_UNIQUE_ID) -> ConfigEntry:\n entry = MockConfigEntry(\n domain=DOMAIN,\n title=\"Anova\",\n data={\n CONF_USERNAME: \"[email protected]\",\n CONF_PASSWORD: \"sample\",\n \"devices\": [(device_id, \"type_sample\")],\n },\n unique_id=\"[email protected]\",\n )\n entry.add_to_hass(hass)\n return entry", "def ssh(self) -> Optional[pulumi.Input['LinuxProfilePropertiesSshArgs']]:\n return pulumi.get(self, \"ssh\")", "def GetLinkedFileEntry(self):\n link = self._GetLink()\n if not link:\n return None\n\n # TODO: is there a way to determine the MFT entry here?\n link_mft_entry = None\n\n parent_path_spec = getattr(self.path_spec, 'parent', None)\n path_spec = ntfs_path_spec.NTFSPathSpec(\n location=link, parent=parent_path_spec)\n\n is_root = bool(\n link == self._file_system.LOCATION_ROOT or\n link_mft_entry == self._file_system.MFT_ENTRY_ROOT_DIRECTORY)\n\n return NTFSFileEntry(\n self._resolver_context, self._file_system, path_spec, is_root=is_root)" ]
[ "0.49446183", "0.49134016", "0.4838572", "0.47650325", "0.47643816", "0.47172263", "0.46717143", "0.4544288", "0.45227647", "0.45134276", "0.44612974", "0.44435745", "0.44380474", "0.4422858", "0.44197652", "0.44181973", "0.4392723", "0.43927157", "0.4390919", "0.43838602", "0.43483457", "0.43454877", "0.4344575", "0.4331198", "0.43150443", "0.43054456", "0.42948672", "0.42817432", "0.42808375", "0.42593315" ]
0.6858242
0
Iterates over all entries in a Keepass database and filters out the entries containing KeeAgent settings. >>> get_ssh_entries(KP_DB) [, ...]
def get_ssh_entries(kdb): entries = kdb.entries entries = [try_parse_ssh_entry(kdb,e) for e in entries] entries = [e for e in entries if e] return entries
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_entries_all(self):\n if self.database is None:\n raise DatabaseNotOpened('No KeePass Database Opened.')\n else:\n return self.database.find_entries_by_title('.*', \n regex=True)", "def GetSSHKeys():\n keydict = {}\n for rec in database.db.itervalues():\n if 'keys' in rec:\n keydict[rec['name']] = rec['keys']\n return keydict", "def get_keys(weat_db):\n import updater\n keys = updater.list_keys(weat_db, verbose=False)\n return keys", "def getThings(dbn='core', env=None):\n global gDbEnv\n\n if env is None:\n env = gDbEnv\n\n if env is None:\n raise DatabaseError(\"Database environment not set up\")\n\n entries = []\n subDb = gDbEnv.open_db(dbn.encode(\"utf-8\"), dupsort=True) # open named sub db named dbn within env\n with gDbEnv.begin(db=subDb) as txn: # txn is a Transaction object\n with txn.cursor() as cursor:\n if cursor.first(): # first key in database\n while True:\n key = cursor.key().decode()\n if len(key) == DID_LENGTH and \"/\" not in key:\n value = cursor.value().decode()\n ser, sep, sig = value.partition(SEPARATOR)\n try:\n dat = json.loads(ser, object_pairs_hook=ODict)\n except ValueError as ex:\n if cursor.next():\n continue\n else:\n break\n try:\n did, index = dat[\"signer\"].rsplit(\"#\", maxsplit=1)\n except (AttributeError, ValueError) as ex:\n if cursor.next():\n continue\n else:\n break\n\n if did != key: # not self signed so thing\n entries.append(key)\n if not cursor.next(): # next key in database if any\n break\n return entries", "def get_hostkey_list(self):\n return self.hostkey", "def get_keys(self):\r\n\t\tlogger.debug(\"Getting the keys\")\r\n\t\t\r\n\t\treturn db.get_items('keys')", "def retrieve_ths_entries(user, passwd):\n srv = connect(user=user, passwd=passwd)\n if srv:\n ths_collection = srv[\"aaew_ths\"]\n yield from apply_view(ths_collection, \"ths/all_active_thsentry_objects\")\n else:\n print(\"could not connect to couchdb server\")\n return []", "def readHydroShareEntries(context):\n return GenericMetadata._readEntriesForSection(context.projectDir, GenericMetadata.HYDROSHARE_SECTION)", "def _ReadEntries(self):\n scope = {}\n filename = os.path.join(self._root_dir, self._options.entries_filename)\n if not os.path.exists(filename):\n return []\n exec(gclient_utils.FileRead(filename), scope)\n return scope[\"entries\"]", "def get_entries(self, history=False, first=False, recursive=True,\n path=None, group=None, **kwargs):\n if self.database is None:\n raise DatabaseNotOpened('No KeePass Database Opened.')\n else:\n if 'regex' in kwargs:\n kwargs['regex'] = is_truthy(kwargs['regex']) \n return self.database.find_entries(recursive=recursive,\n path=path, \n group=group,\n history=history, \n first=first, \n **kwargs)", "def get_keys(self):\r\n\r\n #using database\r\n\r\n if self.using_database:\r\n aprint('GET KEYS')\r\n value_tuple = (notebookname,)\r\n db_cursor.execute(\"SELECT keyword\"\r\n +\" FROM keys_to_indexes\"\r\n +\" WHERE notebook=?;\",\r\n value_tuple)\r\n fetched = db_cursor.fetchall()\r\n if fetched:\r\n return {key[0] for key in fetched}\r\n\r\n return set()\r\n\r\n #using shelf\r\n\r\n return self.key_dict.keys()", "def get_all( # type: ignore\n self,\n limit: t.Optional[int] = None, # dead: disable\n offset: t.Optional[int] = None, # dead: disable\n ) -> t.List[KeyValueEntity]:\n items = dict(os.environ).items()\n return [KeyValueEntity(uuid=k, val=v) for k, v in items]", "def _internal_get_ssh_device_configs(self) -> List[dict]:\n\n ssh_device_config_list = []\n\n for device_config in self._internal_get_device_configs():\n dev_type = device_config[\"deviceType\"]\n\n if dev_type == \"network/ssh\":\n ssh_device_config_list.append(device_config)\n\n return ssh_device_config_list", "def sshkeys(self):\n return self._authenticated_server_proxy.sshkeys", "def get_entries(self):\n return self._netdis.loxone.entries", "def get_entries(self, chemsys):\n\n new_q = dict(self.query)\n new_q[\"chemsys\"] = {\"$in\": list(self.chemsys_permutations(chemsys))}\n fields = {f: 1 for f in [\"structure\", \"material_id\", \"thermo.energy\", \"unit_cell_formula\", \"calc_settings\"]}\n data = list(self.materials().find(new_q, fields))\n\n all_entries = []\n\n for d in data:\n parameters = {\"is_hubbard\": d[\"calc_settings\"][\"is_hubbard\"],\n \"hubbards\": d[\"calc_settings\"][\"hubbards\"],\n \"potcar_spec\": d[\"calc_settings\"][\"potcar_spec\"],\n \"run_type\": d[\"calc_settings\"][\"run_type\"]\n }\n\n entry = ComputedEntry(Composition(d[\"unit_cell_formula\"]),\n d[\"thermo\"][\"energy\"], 0.0, parameters=parameters,\n entry_id=d[\"material_id\"],\n data={\"oxide_type\": oxide_type(Structure.from_dict(d[\"structure\"]))})\n\n all_entries.append(entry)\n\n return set(all_entries)", "def iterkeys(self, essid):\n with SessionContext(self.SessionClass) as session:\n q = session.query(PAW2_DBObject.key)\n q = q.join(PYR2_DBObject).join(ESSID_DBObject)\n q = q.filter(ESSID_DBObject.essid == essid)\n keys = q.all()\n return (c[0] for c in keys)", "def get_entries_by_password(self, password, regex=False, flags=None,\n group=None, history=False, first=False): \n if self.database is None:\n raise DatabaseNotOpened('No KeePass Database Opened.')\n else:\n return self.database.find_entries_by_password(password, \n regex, \n flags, \n group, \n history, \n first)", "def list_keys(self, label=None):\r\n _filter = NestedDict({})\r\n if label:\r\n _filter['sshKeys']['label'] = query_filter(label)\r\n\r\n return self.client['Account'].getSshKeys(filter=_filter.to_dict())", "def _GetHostKeyFromInstance(self, zone, ssh_helper, instance):\n instance_ref = instance_flags.SSH_INSTANCE_RESOLVER.ResolveResources(\n [instance.name], compute_scope.ScopeEnum.ZONE, zone,\n self.resources,\n scope_lister=instance_flags.GetInstanceZoneScopeLister(self.client))[0]\n project = ssh_helper.GetProject(self.client, instance_ref.project)\n host_keys = ssh_helper.GetHostKeysFromGuestAttributes(\n self.client, instance_ref, instance, project)\n\n if host_keys is not None and not host_keys:\n # Only display this message if there was an attempt to retrieve\n # host keys but it was unsuccessful(yielded empty dict). If Guest\n # Attributes is disabled, there is no attempt to retrieve host keys.\n log.status.Print('Unable to retrieve host keys from instance metadata. '\n 'Continuing.')\n return host_keys", "def iterkeys(self, essid):\n return self.cli.essids.keys(essid).__iter__()", "def entries():\n\n\treturn [entry.value for entry in db.session.query(Entry).all()]", "def get_electrode_entries(self, chemsys,working_ions):\n\n self.logger.info(\"Getting entries for: {}\".format(chemsys))\n\n new_q = dict(self.query)\n new_q[\"chemsys\"] = {\"$in\": list(chemsys_permutations(chemsys))}\n fields = [\"structure\", self.materials.key, \"thermo.energy\",\n \"unit_cell_formula\", \"calc_settings.is_hubbard\",\n \"calc_settings.hubbards\", \"calc_settings.potcar_spec\",\n \"calc_settings.run_type\"]\n data = list(self.materials.query(fields, new_q))\n\n all_entries = []\n\n for d in data:\n parameters = {\"is_hubbard\": d[\"calc_settings\"][\"is_hubbard\"],\n \"hubbards\": d[\"calc_settings\"][\"hubbards\"],\n \"potcar_spec\": d[\"calc_settings\"][\"potcar_spec\"],\n \"run_type\": d[\"calc_settings\"][\"run_type\"]\n }\n\n entry = ComputedEntry(Composition(d[\"unit_cell_formula\"]),\n d[\"thermo\"][\"energy\"], 0.0, parameters=parameters,\n entry_id=d[self.materials.key],\n data={\"oxide_type\": oxide_type(Structure.from_dict(d[\"structure\"]))})\n\n all_entries.append(entry)\n\n self.logger.info(\"Total entries in {} : {}\".format(\n chemsys, len(all_entries)))\n\n return all_entries", "def get_all_keys(self):\n return self.psettings.allKeys()", "def get_ssh_device_configs(self, exclude_upnp=False) -> List[dict]:\n ssh_device_config_list = []\n\n for devinfo in self._internal_get_device_configs():\n dev_type = devinfo[\"deviceType\"]\n\n if exclude_upnp and dev_type == \"network/upnp\":\n continue\n\n if \"ssh\" in devinfo:\n ssh_device_config_list.append(devinfo)\n\n return ssh_device_config_list", "def get_entries_by_path(self, entry_path_str, regex=False, flags=None,\n group=None, history=False, first=False): \n if self.database is None:\n raise DatabaseNotOpened('No KeePass Database Opened.')\n else:\n entry_path_list = entry_path_str.split('/')\n return self.database.find_entries_by_path(entry_path_list,\n regex,\n flags,\n group,\n history,\n first)", "def get_entries_for_topic(cls, topic, entry_id_list):\n\t\tresults = cls.get([cls.create_key(topic, entry_id)\n\t\t\t\t\t\t\t\t\t\t\t for entry_id in entry_id_list])\n\t\t# Filter out those pesky Nones.\n\t\treturn [r for r in results if r]", "def iterentries(self):\n for key in self.iterkeys():\n yield self.get(key)", "def get_ssh_keys(self, user_id):\n _gu = self.get_user(user_id)\n if _gu is None:\n return []\n\n # build URL and make request\n return self._get('/users/{0}/keys'.format(_gu['id']))", "def getEntities(dbn='core', env=None):\n global gDbEnv\n\n if env is None:\n env = gDbEnv\n\n if env is None:\n raise DatabaseError(\"Database environment not set up\")\n\n entries = []\n subDb = gDbEnv.open_db(dbn.encode(\"utf-8\"), dupsort=True) # open named sub db named dbn within env\n with gDbEnv.begin(db=subDb) as txn: # txn is a Transaction object\n with txn.cursor() as cursor:\n if cursor.first(): # first key in database\n while True:\n key = cursor.key().decode()\n if len(key) == DID_LENGTH and \"/\" not in key:\n value = cursor.value().decode()\n ser, sep, sig = value.partition(SEPARATOR)\n try:\n dat = json.loads(ser, object_pairs_hook=ODict)\n except ValueError as ex:\n if cursor.next():\n continue\n else:\n break\n\n try:\n did, index = dat[\"signer\"].rsplit(\"#\", maxsplit=1)\n except (AttributeError, ValueError) as ex:\n if cursor.next():\n continue\n else:\n break\n\n entry = ODict(did=key)\n if did == key: # self signed so agent\n entry[\"kind\"] = \"agent\"\n else: # not self signed so thing\n entry[\"kind\"] = \"thing\"\n entries.append(entry)\n\n if not cursor.next(): # next key in database if any\n break\n return entries" ]
[ "0.63077664", "0.59141475", "0.5772629", "0.5723269", "0.54638463", "0.53971255", "0.5370203", "0.53014195", "0.52838165", "0.51979524", "0.5167056", "0.5166374", "0.51619893", "0.51327246", "0.5125212", "0.51061887", "0.50975007", "0.50789243", "0.5063901", "0.502756", "0.5024316", "0.5016562", "0.5011692", "0.50050706", "0.49878037", "0.49855015", "0.49777833", "0.49426326", "0.49370363", "0.49249202" ]
0.78091073
0
Inits message queue by a user choice, zmq (Default) or rabbitmq
def __init__(self, mq_choice="zmq"): self.mq = mq_choice func = getattr(self, "_init_{}".format(self.mq)) func()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def init_email_queue():\n g.setdefault('email_queue', [])", "def on_init(self, queue=None, **kwargs):\n self.queue = queue if queue else Queue()", "def __init__(self, address, queue_name):\n self.connection = pika.BlockingConnection(\n pika.ConnectionParameters(address))\n self.queue_name = queue_name\n\n # create the channel\n self.channel = self.connection.channel()\n\n # declare the queue\n self.channel.queue_declare(queue=queue_name, durable=True)\n\n logging.info(\"Message Broker connected to {0}\".format(address))", "def setup_queue(self):\n self.logger.info('declaring queue %s', self.queue)\n if self.otq:\n self._channel.queue_declare(self.on_queue_declareok, self.queue, auto_delete=True)\n else:\n self._channel.queue_declare(self.on_queue_declareok, self.queue)", "def __init__(self, queue_id):\n self.queue_id = queue_id\n self.action_type = 'set_queue'", "def perform_setup():\n global credentials, connection, channel\n credentials = pika.PlainCredentials('guest', 'guest') # AUTH via Default guest user on RabbitMQ\n connection = pika.BlockingConnection(pika.ConnectionParameters(\"127.0.0.1\", 5672, '/', credentials)) # Using rabbit-mq container name to access the RabbitMQ container from other containers\n channel = connection.channel()\n channel.queue_declare(queue='poll', durable=True)", "def __init__(self, queue_name, **kwargs):\n super(Queue, self).__init__(**kwargs)\n self.value = queue_name", "def _setup_tubes(self):\n chan = self.channel\n inp = self.config[self.MODULE_NAME]['amqp']['in']\n out = self.config[self.MODULE_NAME]['amqp']['out']\n if inp['exchange']:\n log.info('generating Input Queue'+ str(inp))\n chan.exchange_declare(**inp)\n self.qname = chan.queue_declare(exclusive=True).queue\n chan.queue_bind(exchange=inp['exchange'],queue=self.qname)\n self.consume = lambda cb : chan.basic_consume(cb,queue=self.qname,no_ack=True)\n self.start_loop = lambda : pika.asyncore_loop()\n\n if out['exchange']:\n log.info('generating Output Exchange'+ str(out))\n chan.exchange_declare(**out)\n self.publish = lambda msg: self.channel.basic_publish(exchange=out['exchange'],routing_key='',body=msg)", "def preProcess(self, msg):\n\n # open connection\n self.conn = Connection(\n user=self.user, password=self.password,\n vhost=self.vhost, host=self.host,\n heartbeat=self.heartbeat, debug=self.debug)\n\n # create AMQP channel\n self.channel = self.conn.channel()\n self.channel.exchange.declare(self.exchange, self.exchange_type)\n self.channel.queue.declare(self.queue, self.auto_delete)\n self.channel.queue.bind(self.queue, self.exchange, self.routing_key)", "def preProcess(self, msg):\n\n # open connection\n self.conn = Connection(\n user=self.user, password=self.password,\n vhost=self.vhost, host=self.host,\n heartbeat=self.heartbeat, debug=self.debug)\n\n # create AMQP channel\n self.channel = self.conn.channel()\n self.channel.exchange.declare(self.exchange, self.exchange_type)\n self.channel.queue.declare(self.queue, self.auto_delete)\n self.channel.queue.bind(self.queue, self.exchange, self.routing_key)", "def preProcess(self, msg):\n\n # open connection\n self.conn = Connection(\n user=self.user, password=self.password,\n vhost=self.vhost, host=self.host,\n heartbeat=self.heartbeat, debug=self.debug)\n\n # create AMQP channel\n self.channel = self.conn.channel()\n self.channel.exchange.declare(self.exchange, self.exchange_type)\n self.channel.queue.declare(self.queue, self.auto_delete)\n self.channel.queue.bind(self.queue, self.exchange, self.routing_key)", "def __init__(self):\n self.queues=[]", "def setup_queues_and_bindings(self):\n self._channel.exchange_declare(self.setup_queue, exchange=self.exchange, passive=True)", "def __init__(self, queue_id=None):\n super().__init__()\n self.queue_id = queue_id", "def __init__(__self__, *,\n endpoint_type: pulumi.Input[str],\n queue_name: Optional[pulumi.Input[str]] = None,\n resource_id: Optional[pulumi.Input[str]] = None):\n pulumi.set(__self__, \"endpoint_type\", 'StorageQueue')\n if queue_name is not None:\n pulumi.set(__self__, \"queue_name\", queue_name)\n if resource_id is not None:\n pulumi.set(__self__, \"resource_id\", resource_id)", "def __init__(self, config, queue_name):\n self.work_queue_client = WorkQueueClient(config, queue_name)", "def __init__(self, config, queue_name):\n self.work_queue_client = WorkQueueClient(config, queue_name)", "def _queue_create(self, **kwargs):\n name = self.generate_random_name()\n return self.clients(\"zaqar\").queue(name, **kwargs)", "def __init__(self, queue, usercallback):\n self.queue = queue\n self.usercallback = usercallback", "def __init__(self):\n self.queue = Queue()", "def setup_queue(self, channel, queue_name):\n logger.info('Declaring queue %s', queue_name)\n channel.queue_declare(queue = queue_name, \n durable = True,\n auto_delete = False)", "def amqp(self, **options):\n pass", "def setUpClass(cls):\n cls.queue = RabbitQueue(QUEUE_CONN_PARAMS)", "def __init__(self) -> None:\n self._queue = []", "def _create_queue(self):\n # Instantiate\n queue = pbs.queue(verbose=not self.quiet)\n\n if self.q == 'ember':\n # Submitting to Utah ember cluster\n ppn = 12\n cpus = ppn if self.cpus is None else min(self.cpus, ppn)\n walltime = self.walltime if int(self.walltime.split(':')[0]) < 72 else '72:00:00'\n queue.create(label=self.label, nodes=self.nodes, qos=self.qos, umask=self.umask,\n walltime=walltime, ppn=ppn, cpus=cpus, partition='ember', alloc='sdss')\n elif self.q is not None:\n # All other self.q values expected for Portsmouth cluster,\n # sciama. In this case, the number of nodes is queue\n # dependent, and qos is not set\n if self.q == 'sciama1.q':\n ppn = 12\n elif self.q == 'sciama3.q':\n ppn = 20\n else:\n ppn = 16\n cpus = ppn if self.cpus is None else min(self.cpus, ppn)\n queue.create(label=self.label, nodes=self.nodes, umask=self.umask,\n walltime=self.walltime, queue=self.q, ppn=ppn, cpus=cpus)\n else:\n # self.q can be None when submitting to both the Portsmouth\n # and Utah clusters. In this case, the default queue\n # destination and ppn is correct. qos is also set, but this\n # should only be used when submitting to Utah.\n ppn = 16\n cpus = ppn if self.cpus is None else min(self.cpus, ppn)\n queue.create(label=self.label, nodes=self.nodes, qos=self.qos, umask=self.umask,\n walltime=self.walltime, ppn=ppn, cpus=cpus)\n\n return queue", "def setup_queues():\n sqs = boto.connect_sqs()\n sqs.create_queue('mls_parse_requests')\n sqs.create_queue('mls_fetcher')", "def __init__(self, ip='127.0.0.1', port='50020'):\n self.ip = ip \n self.port = port\n self.ctx = zmq.Context()\n self.socket = zmq.Socket(self.ctx, zmq.REQ) # this is pub socket", "def __init__(self, pyrps, queue, consumer_id):\n self.pyrps = pyrps\n self.queue = queue\n self.consumer_id = consumer_id", "def send_msg(self, my_queue, my_msg):", "def init_connect_mq(self):\n try:\n mq_username = Configs.mq_username\n mq_pwd = Configs.mq_pwd\n mq_ip_addr = Configs.mq_ip_addr\n mq_port_num = Configs.mq_port_num\n mq_vhost = Configs.mq_vhost\n\n mq_credentials = pika.PlainCredentials(mq_username, mq_pwd)\n mq_connection = pika.BlockingConnection(\n pika.ConnectionParameters(host=mq_ip_addr, port=mq_port_num, virtual_host=mq_vhost,\n credentials=mq_credentials))\n # connect to mq channel\n self.mq_channel = mq_connection.channel()\n self.mq_channel.exchange_declare(exchange=Configs.mq_exchange_name, exchange_type='topic', durable='true')\n # self.mq_channel.queue_declare(queue='test', durable=False, arguments={'x-message-ttl': 10000})\n self.mq_conn_flag = True\n print(\" ************** MQ Connect Success ************** \")\n except Exception as e:\n print(e)" ]
[ "0.69520867", "0.6691533", "0.6441849", "0.63638926", "0.63620484", "0.63039374", "0.6296402", "0.6273781", "0.6147966", "0.6147966", "0.6147966", "0.6134453", "0.6087962", "0.60812384", "0.6079611", "0.603834", "0.603834", "0.60343426", "0.60171574", "0.5870343", "0.58562565", "0.5831944", "0.5790656", "0.57673466", "0.5763094", "0.5753716", "0.5742362", "0.5721876", "0.57214606", "0.5721067" ]
0.7284444
0
Update ESGF slcs token.
def generate_esgf_slcs_token(self): client = ESGFSLCSClient(self.request) if client.get_token(): try: client.refresh_token() except Exception as err: self.session.flash('Could not refresh token: {}'.format(escape(err.message)), queue="danger") else: self.session.flash('ESGF token was updated.', queue="success") return HTTPFound(location=self.request.route_path('profile', userid=self.userid, tab='esgf_slcs')) else: try: auth_url = client.authorize() except Exception as err: self.session.flash('Could not retrieve token: {}'.format(escape(err.message)), queue="danger") return HTTPFound(location=self.request.route_path('profile', userid=self.userid, tab='esgf_slcs')) else: return HTTPFound(location=auth_url)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _update_token(token):\n session.token = token", "def update(self):\n token = request_token(self.client_id, self.client_secret)\n self.request_time = datetime.now()\n self._initialized = True\n self.token = token", "def update_cloud_token(self):\n self._cloud_token = rest_util.get_server_access_token(self._url_login, self._credential, REST_HEADERS,\n self._token_prefix, self._token_suffix)", "def forget_esgf_slcs_token(self):\n client = ESGFSLCSClient(self.request)\n client.delete_token()\n self.session.flash(\"ESGF token removed.\", queue='info')\n return HTTPFound(location=self.request.route_path('profile', userid=self.userid, tab='esgf_slcs'))", "async def token(self, ctx):\n logger.info(\"token command issued by {0}\".format(ctx.message.author.name))\n await ctx.message.delete()\n spotify_token = spotipy.util.prompt_for_user_token_auto(self.spotify_username, self.spotify_scope, self.spotify_id, self.spotify_secret)\n self.spotify_client = spotipy.Spotify(auth=spotify_token)\n await ctx.send(\"Spotify refresh token updated\")", "def update_threatexchange_token() -> t.Dict:\n token = bottle.request.json[\"token\"]\n is_valid_token = try_api_token(token)\n if is_valid_token:\n AWSSecrets(secrets_prefix).update_te_api_token(token)\n return {}\n\n bottle.response.status_code = 400\n return {}", "def refresh_token():\n try:\n deserialized_message = peek_app_token()\n app_id = deserialized_message.get('app_id')\n installation_id = deserialized_message.get('installation_id')\n store_token(get_token(app_id, installation_id))\n\n except Exception as exc:\n log.error(f'Could not refresh token.\\n{exc}')\n traceback.print_exc(file=sys.stderr)", "def _update_token(self, request):\n\n # Refresh our source credentials.\n self._source_credentials.refresh(request)\n\n body = {\n \"delegates\": self._delegates,\n \"scope\": self._target_scopes,\n \"lifetime\": str(self._lifetime) + \"s\"\n }\n\n headers = {\n 'Content-Type': 'application/json',\n }\n\n # Apply the source credentials authentication info.\n self._source_credentials.apply(headers)\n\n self.token, self.expiry = _make_iam_token_request(\n request=request,\n principal=self._target_principal,\n headers=headers,\n body=body)", "def refresh_token(self):\n token = json.loads(get_metadata(\n 'instance/service-accounts/%s/token' % self.service_account,\n ))\n seconds = token['expires_in'] - 60\n self._expiration_time = (\n datetime.datetime.now() + datetime.timedelta(seconds=seconds)\n )\n self._token = token['access_token']", "def update_token(token):\n try:\n payload = jwt.decode(token, os.environ.get('SECRET', 'test'))\n payload['exp'] = datetime.utcnow() + timedelta(days=100)\n jwt_bytes = jwt.encode(\n payload,\n os.environ.get('SECRET', 'test'),\n algorithm='HS256'\n )\n return jwt_bytes.decode('utf-8')\n except Exception as e:\n raise Exception(str(e))", "def __header_update_token(self) -> None:\n cookies = self.session.cookies.get_dict()\n self.session.headers.update({\n 'Referer': 'https://efdsearch.senate.gov/search/',\n 'X-CSRFToken': cookies['csrftoken'],\n })", "def refresh_auth_token(self):\n self._auth_token = self.generate_auth_token()", "def refresh_token(self):\n url = 'https://www.yikyak.com/api/auth/token/refresh'\n token = self._request('POST', url)\n self.session.headers.update({'x-access-token': token})", "def update_token(self, token_response):\n self.access_token = token_response['access_token']\n self.access_token_expires = datetime.fromtimestamp(\n time.time() + token_response['expires_in'],\n )\n if 'refresh_token' in token_response:\n self.refresh_token = token_response['refresh_token']", "async def refresh_tokens(hass: HomeAssistant, entry: ConfigEntry):\n config = entry.data\n account_session = aiohttp_client.async_get_clientsession(hass)\n\n account = C4Account(config[CONF_USERNAME], config[CONF_PASSWORD], account_session)\n await account.getAccountBearerToken()\n\n controller_unique_id = config[CONF_CONTROLLER_UNIQUE_ID]\n director_token_dict = await account.getDirectorBearerToken(controller_unique_id)\n director_session = aiohttp_client.async_get_clientsession(hass, verify_ssl=False)\n\n director = C4Director(\n config[CONF_HOST], director_token_dict[CONF_TOKEN], director_session\n )\n director_token_expiry = director_token_dict[\"token_expiration\"]\n\n _LOGGER.debug(\"Saving new tokens in hass data\")\n entry_data = hass.data[DOMAIN][entry.entry_id]\n entry_data[CONF_ACCOUNT] = account\n entry_data[CONF_DIRECTOR] = director\n entry_data[CONF_DIRECTOR_TOKEN_EXPIRATION] = director_token_expiry", "def send_lsp_update(lsp_name, new_path):\n print(\"Updating \", lsp_name, \"on NorthStar Controller\")\n requs = requests.get(\n 'https://' + server_ip +\n ':8443/NorthStar/API/v1/tenant/1/topology/1/te-lsps/',\n headers=auth_header, verify=False)\n dump = json.dumps(requs.json())\n lsp_list = json.loads(dump)\n # Find target LSP to use lspIndex\n for lsp in lsp_list:\n if lsp['name'] == lsp_name:\n break\n # Fill only the required fields\n # ero = ero_input\n ero = []\n\n # Build new ERO Data\n\n print lsp\n for ip_address in new_path:\n hop = {\n \"topoObjectType\": \"ipv4\",\n \"address\": ip_address,\n # \"loose\" : True,\n }\n ero.append(hop)\n new_lsp = {}\n# \"provisioningType\":\"SR\"\n for key in ('from', 'to', 'name', 'lspIndex', 'pathType', 'provisioningType'):\n new_lsp[key] = lsp[key]\n\n new_lsp['plannedProperties'] = {\n \"bandwidth\": \"100M\",\n 'ero': ero\n # 'calculatedEro' : []\n #'preferredEro' : ero\n }\n response = requests.put(\n 'https://10.10.2.64:8443/NorthStar/API/v1/tenant/1/topology/1/te-lsps/' + str(new_lsp[\n 'lspIndex']),\n json=new_lsp, headers=auth_header, verify=False)\n print(\"LSP Updated on NorthStar Controller\")\n print response", "def fusion_api_edit_lsg(self, body, uri, api=None, headers=None):\n return self.lsg.update(body, uri, api, headers)", "def _set_csp_update(self, response, event):\n cache_key = 'custom_csp_update:{}'.format(event.id)\n update = cache.get(cache_key)\n if update is not None:\n # it was set, use that and exit early\n if update:\n response._csp_update = update\n return\n\n if not event.template:\n return\n if event.is_upcoming() or event.is_live() or not event.is_scheduled():\n return\n if 'vid.ly' not in event.template.name.lower():\n return\n if not event.template_environment.get('tag'):\n return\n\n tag = event.template_environment['tag']\n update = get_vidly_csp_headers(tag, private=not event.is_public())\n cache.set(cache_key, update, 60 * 60)\n # Now we've figured out what headers to update, set it on the response\n if update:\n response._csp_update = update", "def update(self):\n self.__token += self.__lines[self.__i]\n self.__i += 1", "def renew_token(cls, token_obj: \"AuthToken\") -> None:\n token_obj.renew_token(renewed_by=cls)", "def _upgrade_token(self, http_body):\n self.token_string = auth_sub_string_from_body(http_body)", "def updateSecurityContext(self, server_tok):\n\n resp = gss.initSecContext(self.service_name,\n context=self.ctx,\n input_token=server_tok,\n flags=self.flags,\n mech_type=self.mech_type,\n ttl=self.ttl)\n\n (self.ctx, _, _, self.token, self.last_ttl, _) = resp\n return self.token", "def updateTag(self, authenticationToken, tag):\r\n pass", "async def token(self, token):\n # [p]set token <token>\n\n if len(token) < 50:\n await self.bot.say(\"Invalid token.\")\n else:\n CacheAPI.set(key='dwarf_token', value=token, timeout=None)\n await self.bot.say(\"Token set. Restart me.\")\n log.debug(\"Token changed.\")", "def update(self):\n _LOGGER.debug(\"update called.\")\n try:\n # Get our Authentication Token from SEMS Portal API\n _LOGGER.debug(\"SEMS - Getting API token\")\n\n # Prepare Login Headers to retrieve Authentication Token\n login_headers = {\n 'Content-Type': 'application/json',\n 'Accept': 'application/json',\n 'token': '{\"version\":\"v2.1.0\",\"client\":\"ios\",\"language\":\"en\"}',\n }\n\n # Prepare Login Data to retrieve Authentication Token\n login_data = '{\"account\":\"'+self._config.get(CONF_USERNAME)+'\",\"pwd\":\"'+self._config.get(CONF_PASSWORD)+'\"}'\n\n # Make POST request to retrieve Authentication Token from SEMS API\n login_response = requests.post(_URL, headers=login_headers, data=login_data, timeout=_RequestTimeout)\n\n # Process response as JSON\n jsonResponse = json.loads(login_response.text)\n\n # Get all the details from our response, needed to make the next POST request (the one that really fetches the data)\n requestTimestamp = jsonResponse[\"data\"][\"timestamp\"]\n requestUID = jsonResponse[\"data\"][\"uid\"]\n requestToken = jsonResponse[\"data\"][\"token\"]\n\n _LOGGER.debug(\"SEMS - API Token recieved: \"+ requestToken)\n # Get the status of our SEMS Power Station\n _LOGGER.debug(\"SEMS - Making Power Station Status API Call\")\n\n # Prepare Power Station status Headers\n headers = {\n 'Content-Type': 'application/json',\n 'Accept': 'application/json',\n 'token': '{\"version\":\"v2.1.0\",\"client\":\"ios\",\"language\":\"en\",\"timestamp\":\"'+str(requestTimestamp)+'\",\"uid\":\"'+requestUID+'\",\"token\":\"'+requestToken+'\"}',\n }\n\n data = '{\"powerStationId\":\"'+self._config.get(CONF_STATION_ID)+'\"}' \n\n response = requests.post(_PowerStationURL, headers=headers, data=data, timeout=_RequestTimeout)\n\n # Process response as JSON\n jsonResponseFinal = json.loads(response.text)\n\n _LOGGER.debug(\"REST Response Recieved\")\n\n for key, value in jsonResponseFinal[\"data\"][\"inverter\"][0][\"invert_full\"].items():\n if(key is not None and value is not None):\n self._attributes[key] = value\n _LOGGER.debug(\"Updated attribute %s: %s\", key, value)\n except Exception as exception:\n _LOGGER.error(\n \"Unable to fetch data from SEMS. %s\", exception)", "def updateResource(self, authenticationToken, resource):\r\n pass", "def __update_token(self) -> bool:\r\n\r\n self.__sess.cookies.clear()\r\n\r\n r = self.__sess.get(f'{DOMAIN}/')\r\n m = re.search(r'var token = \\'(\\S{42,48})\\';', r.text)\r\n\r\n if not m:\r\n self.__log_msg(f'No token found!', is_err=True)\r\n return False\r\n\r\n old_token = self.__payload.get('token', None)\r\n self.__payload['token'] = m[1]\r\n\r\n # midnight today\r\n self.__token_expiration_date = datetime.now(self.__tz).replace(hour=0, minute=0, second=0, microsecond=0) + timedelta(1)\r\n\r\n if old_token:\r\n self.__log_msg(f'TOKEN UPDATED: \"{old_token}\" -> \"{m[1]}\"')\r\n else:\r\n self.__log_msg(f'TOKEN SET: \"{m[1]}\"')\r\n return True", "def refreshAuthentication(self, authenticationToken):\r\n pass", "def refresh_auth_token(self):\r\n \r\n # For some reason, the auth token in the root path only works if you're \r\n # unauthenticated. To get around that, we check if this is an authed\r\n # session and, if so, get the token from the profile page.\r\n \r\n if self.is_authed:\r\n req = self.session.get(f\"https://archiveofourown.org/users/{self.username}\")\r\n else:\r\n req = self.session.get(\"https://archiveofourown.org\")\r\n \r\n if req.status_code == 429:\r\n raise utils.HTTPError(\"We are being rate-limited. Try again in a while or reduce the number of requests\")\r\n \r\n soup = BeautifulSoup(req.content, \"lxml\")\r\n token = soup.find(\"input\", {\"name\": \"authenticity_token\"})\r\n if token is None:\r\n raise utils.UnexpectedResponseError(\"Couldn't refresh token\")\r\n self.authenticity_token = token.attrs[\"value\"]", "def refresh_token():\n return current_app.library_registry.admin_controller.refresh_token()" ]
[ "0.637459", "0.5720527", "0.57121843", "0.5614682", "0.55534583", "0.54942715", "0.5491389", "0.53698987", "0.5318541", "0.5290018", "0.52841055", "0.5265981", "0.5220092", "0.5216296", "0.52154887", "0.51749265", "0.51682895", "0.5163536", "0.5147349", "0.51034975", "0.508143", "0.50172293", "0.4985844", "0.49710292", "0.495915", "0.49504873", "0.4937275", "0.4930456", "0.49164927", "0.49066797" ]
0.6638531
0
Forget ESGF slcs token.
def forget_esgf_slcs_token(self): client = ESGFSLCSClient(self.request) client.delete_token() self.session.flash("ESGF token removed.", queue='info') return HTTPFound(location=self.request.route_path('profile', userid=self.userid, tab='esgf_slcs'))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def revoke_token(token):\n token.delete_instance()", "def unfetch(self, token):\n\n self._token = token\n return", "def __del__(self):\n self.token_revoke()", "def delete_token(self):\n config.update(outlook_token=None)", "def invalidateSyncToken(self):\n self._cachedSyncToken = None", "async def revoke_token(self, request: Request, token: str) -> None:\n token_record = ...\n token_record.revoked = True\n token_record.save()", "def remove(self, token):\n self.rpc.call(MsfRpcMethod.AuthTokenRemove, [token])", "def pop_token(self):\n return self.tokens.pop()", "def remove_token(self, amount):\n self.M -= amount", "def revoke_refresh_token(cls, jti: str) -> None:\n redis = cls._conn_redis(cls)\n expired_time = int(timedelta(days=cls._REFRESH_TOKEN_EXPIRES).total_seconds())\n redis.setex(jti,expired_time,'true')", "async def token(self, ctx):\n logger.info(\"token command issued by {0}\".format(ctx.message.author.name))\n await ctx.message.delete()\n spotify_token = spotipy.util.prompt_for_user_token_auto(self.spotify_username, self.spotify_scope, self.spotify_id, self.spotify_secret)\n self.spotify_client = spotipy.Spotify(auth=spotify_token)\n await ctx.send(\"Spotify refresh token updated\")", "def revoke_token():\n return server.create_endpoint_response(RevocationEndpoint.ENDPOINT_NAME)", "def service_token_delete(self):\n\n self._client.delete(\n \"{}/servicetoken\".format(LKECluster.api_endpoint), model=self\n )", "def revoke_access_token(cls, jti: str) -> None:\n redis = cls._conn_redis(cls)\n expired_time = int(timedelta(minutes=cls._ACCESS_TOKEN_EXPIRES).total_seconds())\n redis.setex(jti,expired_time,'true')", "def clear(token):\n # type: (str) -> None\n if token == \"cache\" and ku.confirm():\n iwm.cache_clear()\n if token == \"recent\" and ku.confirm():\n iwm.cache_clear()", "def unpop_token(self, tok):\n self.tokens.append(tok)", "def reset_token(self, reset_token):\n\n self._reset_token = reset_token", "def destroy_read_token(mastertoken, config, name):\n mt_path = mastertoken['paths']['self']\n tokens = get_read_tokens(mastertoken, config)\n\n for token in tokens:\n if token['name'] == name:\n print(\"Found token with name: {}\".format(name))\n try:\n url = \"{}{}/read_tokens/{}\".format(config['domain_base'],\n mt_path, token['id'])\n resp = (api_call(url, 'delete', config['debug']))\n except ValueError as ex:\n abort(\"Unexpected response from packagecloud API: \"\n \"{}\".format(ex.message))\n if resp.status_code == 204:\n print(\"Token destroyed, name: {}\".format(name))\n print(\"Result: {}\".format(resp))\n return token['value']\n else:\n eprint(\"ERROR: Destroying token {} failed\".format(name))\n eprint(\"Result: {}\".format(resp))", "def generate_esgf_slcs_token(self):\n client = ESGFSLCSClient(self.request)\n if client.get_token():\n try:\n client.refresh_token()\n except Exception as err:\n self.session.flash('Could not refresh token: {}'.format(escape(err.message)), queue=\"danger\")\n else:\n self.session.flash('ESGF token was updated.', queue=\"success\")\n return HTTPFound(location=self.request.route_path('profile', userid=self.userid, tab='esgf_slcs'))\n else:\n try:\n auth_url = client.authorize()\n except Exception as err:\n self.session.flash('Could not retrieve token: {}'.format(escape(err.message)), queue=\"danger\")\n return HTTPFound(location=self.request.route_path('profile', userid=self.userid, tab='esgf_slcs'))\n else:\n return HTTPFound(location=auth_url)", "def delete(self):\n return self.request.delete_cookie('token')", "def deltoken(confirm, name):\n stm = shared_morphene_instance()\n if mph.rpc is not None:\n mph.rpc.rpcconnect()\n if not unlock_wallet(stm):\n return\n mph.wallet.removeTokenFromPublicName(name)\n set_shared_morphene_instance(stm)", "def reset(self) -> None:\n self._value = proxy.UndefToken", "def revoke(self, token):\n client = self.connect(VAULT_TOKEN)\n client.revoke_token(token)", "def make_reset_token(self, expiration=3600):\n return self._make_token({'id': self.id, 'op': 'reset'}, expiration)", "def revoke_token(self, subid):\n from expfactory.database.models import Participant\n\n p = Participant.query.filter(Participant.id == subid).first()\n if p is not None:\n p.token = \"revoked\"\n self.session.commit()\n return p", "def shutdown(api, settings):\n if api.check_token():\n update_tokenfile(api, settings)\n else:\n delete_tokenfile(settings)", "def clear(self, event, ts=None):\n return self.r.delete(self._keygen(event, ts))", "def revoke_token(decoded_token):\n jti = decoded_token['jti']\n user_identity = decoded_token[current_app.config['JWT_IDENTITY_CLAIM']]\n expires = datetime.fromtimestamp(decoded_token['exp'])\n\n db_token = BlacklistedToken(\n jti=jti,\n user_identity=user_identity,\n expires=expires\n )\n db.session.add(db_token)\n prune_if_necessary()\n db.session.commit()", "def soft_reset():", "def deauth(request):\n\n if(request.token):\n request.token.delete()\n return JsonResponse({'message': 'Your token is revoked'}) \n else:\n return HttpResponseBadRequest('It does not make sense to revoke a token ' +\n 'if no token are supplied to the request')" ]
[ "0.6220367", "0.62018794", "0.6077641", "0.5832691", "0.5752881", "0.5749907", "0.57359105", "0.56649905", "0.5649498", "0.5644481", "0.56320244", "0.558962", "0.5478687", "0.54679763", "0.5448393", "0.5416185", "0.53862184", "0.5349465", "0.52755475", "0.52732354", "0.5240098", "0.52008027", "0.5192559", "0.51886934", "0.5185509", "0.51663435", "0.51621217", "0.5161221", "0.5148091", "0.5136494" ]
0.7006974
0
print message if package not found in repository
def pkg_not_found_mess(pkgname: str, reponame: str) -> None: meta = MainData() print(('{0}Package {1}{2} {0}not found in \'{3}\' ' 'repository.{4}').format(meta.clrs['red'], meta.clrs['lcyan'], pkgname, reponame, meta.clrs['reset']))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def package_info(self, package, repo):\n\n cmd_output = admin_tasks.yum_info(package, repo)\n if cmd_output:\n self.log.info(\"%s package exists\" % package)\n print(\"Command output: \\n\" + cmd_output)\n else:\n self.log.error(\"%s package was not found\" % package)\n sys.exit(1)", "def _print_missing(packages, verbose):\n if not packages:\n print(\"## No Rez packages were found.\")\n print(\"No data found\")\n\n return\n\n print(\"## Your command affects these Rez packages.\")\n\n template = \"{package.name}\"\n\n if verbose:\n template = \"{package.name}: {path}\"\n\n for line in sorted(\n template.format(package=package, path=finder.get_package_root(package))\n for package in packages\n ):\n print(line)", "def non_existing_package_error_test(self):\n client = TestClient()\n error = client.run(\"upload Pkg/0.1@user/channel -p hash1\", ignore_error=True)\n self.assertTrue(error)\n self.assertIn(\"ERROR: There is no local conanfile exported as Pkg/0.1@user/channel\",\n client.user_io.out)", "def package_exists (package_name, package_version, lang):\n\n url = make_package_url (package_name, package_version, lang)\n victim_file = download_file (url)\n\n if victim_file is None:\n return False\n else:\n return True", "def check_in_repo():\n if not os.path.isfile(\"setup.py\"):\n return \"Not in root-level PyTorch repo, no setup.py found\"\n with open(\"setup.py\") as f:\n s = f.read()\n if \"PyTorch\" not in s:\n return \"Not in PyTorch repo, 'PyTorch' not found in setup.py\"", "def test_packages(self):\n for pkg in self.expected_packages:\n status, output = commands.getstatusoutput('pkg_info -qx %s' % pkg)\n assert status == 0", "def find_pkg(self, pkg):\n pass", "def pkg_exists(self, repo_id, pkgname):\n self.send(repo_id, 'pkg_exists', pkgname)", "def _package_available(package_name: str) -> bool:\n try:\n return find_spec(package_name) is not None\n except ModuleNotFoundError:\n return False", "def show_packagelist(user, repo, packages, distro=False, version=False,\n name=False, match=False, pkgtype=False):\n\n print('Currently {}/{} contains these matching packages:'.format(\n user, repo))\n\n numpkgs = 0\n for package in packages:\n if (distro and not package['distro_version'] == distro) or \\\n (version and not package['version'] == version) or \\\n (name and not package['name'] == name) or \\\n (pkgtype and not package['type'] == pkgtype) or \\\n (match and match not in package['filename']):\n continue\n\n print(fmt_pkg(user, repo, package))\n numpkgs += 1\n\n print(\"Repo contains {} matching packages.\".format(numpkgs))", "def test_fetch_missing(self):\n saved_pkg = self.db.fetch(\"missing_pkg-1.2.tar.gz\")\n self.assertIsNone(saved_pkg)", "def test_fetch_missing(self):\n saved_pkg = self.db.fetch(\"missing_pkg-1.2.tar.gz\")\n self.assertIsNone(saved_pkg)", "def test_fetch_missing(self):\n saved_pkg = self.db.fetch(\"missing_pkg-1.2.tar.gz\")\n self.assertIsNone(saved_pkg)", "def test_package_can_not_be_found_in_registry(self):\n with self.with_config_update():\n with patch(\n \"aea.cli.registry.utils.get_package_meta\",\n side_effects=Exception(\"expected!\"),\n ), patch(\n \"aea.cli.registry.utils.find_item_locally\",\n side_effects=Exception(\"expected!\"),\n ), pytest.raises(\n ClickException,\n match=r\"Package .* details can not be fetched from the registry!\",\n ):\n self.runner.invoke(\n cli,\n [\n \"upgrade\",\n *self.LOCAL,\n self.ITEM_TYPE,\n f\"{self.ITEM_PUBLIC_ID.author}/{self.ITEM_PUBLIC_ID.name}:latest\",\n ],\n standalone_mode=False,\n catch_exceptions=False,\n )", "def _is_present(module, path, pkgname):\n cmd = \"./LuaDist/bin/luadist list \" + pkgname\n ret_code, out, err = module.run_command(cmd, cwd=path)\n if ret_code != 0:\n module.fail_json(\n rc=ret_code,\n stdout=out,\n stderr=err,\n msg=\"Cannot check the status of one or more packages.\",\n )\n return pkgname in out", "def test_subversion_package_installed(host):\n assert host.package(PACKAGE).is_installed", "def test_get_missing_svn_repo(self):\n repo = 'testgetmissingrepo'\n svn = SpokeSVN(self.org_name, self.user_id)\n self.assertFalse(svn.get(repo)['data'])", "def test_returns_503_when_package_name_not_found(self, get_package, clone):\n from registry.tasks import TimeoutError\n # Mock the bowerlib.get_package method to avoid I/O\n # We pretend Bower knows what 'wat' is and a task has been dispatched\n # to clone it.\n get_package.return_value = {'name': 'wat', 'url': 'git://a-url.git'}\n\n # Mock the clone_repo task dispatch; throw an exception so we don't\n # wait.\n task = mock.MagicMock()\n clone.delay.return_value = task\n task.get.side_effect = TimeoutError()\n\n Package.objects.create(name=\"ember\", url=\"/foo\")\n url = reverse(\"find\", kwargs={'name': 'wat'})\n\n response = self.client.get(url)\n\n self.assertEqual(503, response.status_code)\n\n upstream = settings.UPSTREAM_BOWER_REGISTRY\n get_package.assert_called_once_with(upstream, 'wat')\n\n clone.delay.assert_called_once_with('wat', 'git://a-url.git')", "def test_source_package_exists(self):\n response = self.client.head(\n f'/filemanager/api/{self.upload_id}/content',\n headers={'Authorization': self.token}\n )\n self.assertEqual(response.status_code, status.OK)", "def repo_of_package(self, package_name: str) -> str:\n if package_name not in self.all_packages_dict:\n return Colors.BOLD(Colors.LIGHT_MAGENTA(\"local/\") + package_name)\n package = self.all_packages_dict[package_name]\n if package.type_of is PossibleTypes.AUR_PACKAGE or package.type_of is PossibleTypes.DEVEL_PACKAGE:\n return Colors.BOLD(Colors.LIGHT_MAGENTA(\"aur/\") + package_name)\n if package.repo is None:\n return Colors.BOLD(Colors.LIGHT_MAGENTA(\"local/\") + package_name)\n else:\n return Colors.BOLD(Colors.LIGHT_MAGENTA(\"{}/\".format(package.repo)) + package_name)", "def main():\n\n local_pkgs = set(os.listdir(GIT_FOLDER))\n local_pkgs = set([it.replace('.git', '') for it in local_pkgs])\n\n pkgdb_info = pkgdb_pkg_branch()\n\n pkgdb_pkgs = set(pkgdb_info.keys())\n\n ## Commented out as we keep the git of retired packages while they won't\n ## show up in the information retrieved from pkgdb.\n\n #if (local_pkgs - pkgdb_pkgs):\n #print 'Some packages are present locally but not on pkgdb:'\n #print ', '.join(sorted(local_pkgs - pkgdb_pkgs))\n\n if (pkgdb_pkgs - local_pkgs):\n print 'Some packages are present in pkgdb but not locally:'\n print ', '.join(sorted(pkgdb_pkgs - local_pkgs))\n\n tofix = set()\n for pkg in sorted(pkgdb_info):\n pkgdb_branches = pkgdb_info[pkg]\n git_branches = get_git_branch(pkg)\n diff = (pkgdb_branches - git_branches)\n if diff:\n print '%s missing: %s' % (pkg, ','.join(sorted(diff)))\n tofix.add(pkg)\n branch_package(pkg, diff)\n\n if tofix:\n print 'Packages fixed (%s): %s' % (\n len(tofix), ', '.join(sorted(tofix)))", "def check_module_path(pkg):\n src_dir_root = ''\n print(\"[root-get] DEBUG: Checking module path\")\n check_module_name = os.system('find %s -mindepth 2 -type d -name \"%s\" ! -path \"*tutorials*\" ! -path \"*dictpch*\"' % (ROOT_SOURCES, pkg))\n if check_module_name != 0:\n print(\"Not a ROOT package (we are working only with ROOT packages for now.)\")\n return False\n else:\n # if have such directory in root then we can try to get it's real path\n path = PathChecker()\n src_dir_root = path.path4module(pkg, ROOT_SOURCES)\n if src_dir_root != None:\n print(\"[root-get] We would use a module from {0:s}\".format(src_dir_root))\n else:\n print(\"Package not present in rootbase.\")\n print(\"Please provide manifest file path, else enter 'NA'\")\n p_manifest = raw_input()\n if p_manifest != 'NA':\n value = yaml_validator(p_manifest)\n if value == 1:\n print(\"Not a valid yml. Please provide valid yml. Exiting now.\")\n else:\n print(\"Downloading package using url.\")\n dn_path = downloader(p_manifest)\n #get path for downloaded directory\n filepath = Path(dn_path + \"/CMakeLists.txt\")\n if filepath.is_file():\n src_dir_root = dn_path\n else:\n print(\"No CMakeLists.txt present. Creating using manifest.\")\n rule_name = re.compile(\".*name:.*\")\n with open(p_manifest) as mn:\n read = mn.read()\n name = rule_name.findall(read)\n parc_name = [x.lstrip(' name: ') for x in name]\n cml = open(dn_path + \"/CMakeLists.txt\", 'a')\n cml.write(\"ROOT_STANDARD_LIBRARY_PACKAGE(\" + parc_name[0] + \" DEPENDENCIES RIO)\")\n src_dir_root = dn_path\n\n else:\n print(\"Can you provide package path..(if available)\")\n dir_path = raw_input()\n filepath = Path(dir_path + \"/CMakeLists.txt\")\n if filepath.is_file():\n src_dir_root = dir_path\n else:\n print(\"No CMakeLists.txt present. Creating using manifest.\")\n rule_name = re.compile(\".*name:.*\")\n with open(p_manifest) as mn:\n read = mn.read()\n name = rule_name.findall(read)\n parc_name = [x.lstrip(' name: ') for x in name]\n cml = open(dn_path + \"/CMakeLists.txt\", 'a')\n cml.write(\"ROOT_STANDARD_LIBRARY_PACKAGE(\" + parc_name[0] + \" DEPENDENCIES RIO)\")\n src_dir_root = dn_path\n\n print(\"[root-get] We would use a module from {0:s}\".format(src_dir_root))\n return src_dir_root", "def package(id = 0):\n\tresults = queries.package(id)\n\tif not results:\n\t\treturn render_template('package_not_found.html')\n\treturn render_template('package.html', package=results)", "def test_download_package__not_found(bucket_and_keys):\n\n with pytest.raises(SystemExit) as exit_error:\n download.download_package(\n bucket_and_keys[0],\n parse_package(\"package-unknown\"),\n )\n\n assert \"Package package-unknown not found\" in exit_error.value.args\n\n with pytest.raises(SystemExit) as specific_error:\n download.download_package(\n bucket_and_keys[0],\n parse_package(\"something==1.2.3\")\n )\n\n assert \"Package something==1.2.3 not found\" in specific_error.value.args", "def use(name):\n click.echo(\"now using repo {}\".format(name))", "def has_package(self, doc):\n return doc.package is not None", "def verify_package(dut, packane_name):\n command = \"dpkg -s {} | grep Status\".format(packane_name)\n output = st.config(dut, command, skip_error_check=True)\n if \"package '{}' is not installed\".format(packane_name) in output:\n st.log(\"Package '{}' is not installed in DUT\".format(packane_name))\n return False\n return True", "def test_no_mpkg(data: TestData) -> None:\n finder = make_test_finder(find_links=[data.find_links])\n req = install_req_from_line(\"pkgwithmpkg\")\n found = finder.find_requirement(req, False)\n assert found is not None\n assert found.link.url.endswith(\"pkgwithmpkg-1.0.tar.gz\"), found", "def check_install():\n if platform.dist()[0] not in ['fedora', 'redhat', 'centos']:\n print \"{} not supported\".format(platform.dist()[0])\n sys.exit(1)\n print \"\\ndetected {} {} ...\".format(platform.dist()[0], platform.dist()[1])\n\n import yum\n # Remove loggin. Taken from: https://stackoverflow.com/a/46716482\n from yum.logginglevels import __NO_LOGGING\n yumloggers = [\n 'yum.filelogging.RPMInstallCallback', 'yum.verbose.Repos',\n 'yum.verbose.plugin', 'yum.Depsolve', 'yum.verbose', 'yum.plugin',\n 'yum.Repos', 'yum', 'yum.verbose.YumBase', 'yum.filelogging',\n 'yum.verbose.YumPlugins', 'yum.RepoStorage', 'yum.YumBase',\n 'yum.filelogging.YumBase', 'yum.verbose.Depsolve'\n ]\n for loggername in yumloggers:\n logger = logging.getLogger(loggername)\n logger.setLevel(__NO_LOGGING)\n\n yumbase = yum.YumBase()\n pkg = 'Percona-XtraDB-Cluster-server-<%= @percona_major_version %>'\n if yumbase.rpmdb.searchNevra(name=pkg):\n pkg_list = yumbase.rpmdb.searchNevra(name=pkg)\n print 'detected {} ...'.format(pkg_list[0])\n else:\n print \"{}{} not installed{}\".format(RED, pkg, WHITE)\n sys.exit(1)\n return 'percona'", "def test_repository(self):\n path = Template().get_repository()\n self.assertTrue(os.path.exists(path))" ]
[ "0.69587296", "0.6549966", "0.65376604", "0.6391485", "0.6359235", "0.63099056", "0.61865646", "0.6125522", "0.6084877", "0.6035341", "0.6022625", "0.6022625", "0.6022625", "0.60137427", "0.60116285", "0.60014516", "0.60005105", "0.5989728", "0.5980288", "0.5926141", "0.5914313", "0.58725184", "0.5852901", "0.58251727", "0.5818606", "0.5795383", "0.57797164", "0.5757909", "0.574069", "0.5721189" ]
0.74416703
0
return list of packages in the current directory
def get_packages_in_current_dir() -> list: from os import listdir pkgs = [] ext = ('.tgz', '.txz') for file_in_current_dir in sorted(listdir()): if file_in_current_dir.endswith(ext): pkgs.append(file_in_current_dir) return pkgs
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def list_packages():\n\n shelf_dir = settings.shelf_dir\n\n package_list = os.listdir(shelf_dir)\n\n package_list.sort()\n\n return package_list", "def packages(self):\n return []", "def get_packages(package):\n return [\n dirpath\n for dirpath, dirnames, filenames in os.walk(package)\n if os.path.exists(os.path.join(dirpath, '__init__.py'))\n ]", "def get_packages(package):\n return [dirpath\n for dirpath, dirnames, filenames in os.walk(package)\n if os.path.exists(os.path.join(dirpath, '__init__.py'))]", "def get_packages(package):\n return [dirpath\n for dirpath, dirnames, filenames in os.walk(package)\n if os.path.exists(os.path.join(dirpath, '__init__.py'))]", "def get_packages(package):\n return [dirpath\n for dirpath, dirnames, filenames in os.walk(package)\n if os.path.exists(os.path.join(dirpath, '__init__.py'))]", "def get_packages(package):\n return [str(path.parent) for path in Path(package).glob(\"**/__init__.py\")]", "def list_packages(self):\n\n # First extract loaded module names from sys.modules\n sys_modules = sys.modules.keys()\n\n packages = {}\n\n # First add moduels in sys.modules (built-ins,\n # preloads and already loaded ones)\n for name in sys_modules:\n d = self.find_package(name)\n if not d: continue\n try:\n pkginfo = packages[d['type']]\n pkginfo[d['name']] = d['path']\n except Exception, e:\n packages[d['type']] = { d['name'] : d['path'] }\n\n #import site\n # Loop through all directories in sys.path and check for modules\n # Dont iterate through <prefix>/lib directory\n libdir = os.path.join(sys.prefix, 'lib')\n\n walked = []\n for top_level in self.paths:\n if not os.path.isdir(top_level):\n continue\n\n # Dont iterate through libdir\n if os.path.abspath(top_level) == os.path.abspath(libdir):\n continue\n\n walked.append(top_level)\n for item in os.listdir(top_level):\n\n fullpath = os.path.join(top_level, item)\n if fullpath in walked: continue\n\n walked.append(fullpath)\n # Remove the extension\n idx = item.find('.')\n if idx != -1: item = item[:idx]\n d = self.find_package(item)\n if not d: continue\n try:\n pkginfo = packages[d['type']]\n pkginfo[d['name']] = d['path']\n except Exception, e:\n packages[d['type']] = { d['name'] : d['path'] } \n\n for key,item in packages.items():\n print\n print self.pkgTypeInfo(key)\n print\n\n # Print sorted\n listofitems = item.keys()\n listofitems.sort()\n\n for key2 in listofitems:\n print key2,':',item[key2]", "def get_packages(root):\n root = os.path.realpath(root)\n proot = parent(root) + \"/\"\n py_files = [file.rsplit(proot)[1] for file in listfiles(root)]\n packages = list(np.unique([parent(file).replace(\"/\", \".\") for file in py_files]))\n # return list(np.unique([parent(file).replace(\"/\", \".\").split(\".{name_root}.\".format(name_root=name(root)))[1]\n # for file in py_files]))\n return packages", "def packages():", "def get_packages():\n\n packages = find_packages()\n packages = ['{}.{}'.format('uniq', package) for package in packages]\n packages.append('uniq')\n return packages", "def packages(self):\r\n return self._packages", "def get_packages():\n packages = []\n for repo in repositories:\n packages.extend(repo.get_packages())\n return packages", "def find_packages( root ):\n for path, directories, files in os.walk( root ):\n if is_package( path ):\n yield path.replace( '/','.' )", "def getusersitepackages():\n\tpass", "def get_all_packages(self):\n return self._package_cache.values()", "def getsitepackages():\n\tpass", "def get_packages_with_prefixes():\n return get_resources('packages')", "def get_package_list():\n pip_freeze = subprocess.check_output(('pip', 'freeze')).decode('utf8')\n package_list = [x.strip().split('==') for x in pip_freeze.split('\\n') if x.find('==') != -1]\n package_list = [(x[0].lower(), x[1]) for x in package_list]\n return package_list", "def get_packages(path):\n\n files = [y for x in os.walk(path) for y in glob(os.path.join(x[0], 'package.xml'))]\n packages = []\n for file in files:\n tree = ET.parse(file)\n root = tree.getroot()\n\n name = root.find('name').text\n path = os.path.dirname(os.path.abspath(file))\n \n packages.append(Package(name, path))\n\n return packages", "def my_find_packages(*args):\n import os\n packages = []\n for root_module_dir in args:\n for root, dirs, files in os.walk(root_module_dir):\n if '__init__.py' in files:\n packages.append(root)\n return packages", "def packages(self):\n\n if self._packages:\n return self._packages\n\n self._load()\n return self._packages", "def create_package_list(base):\n\n return [base] + [\"{}.{}\".format(base, pkg) for pkg in find_packages(base)]", "def packages_in_folder(path):\n for pkg_path in catkin_pkg.packages.find_package_paths(path):\n yield os.path.join(path, pkg_path)", "def get_installed_packages():\n global INSTALLED_PACKAGES\n chk = Popen(\"{} -m pip freeze\".format(sys.executable),\n shell=True, stdout=PIPE)\n installed = chk.communicate()[0].decode().splitlines()\n for pkg in installed:\n item = pkg.split(\"==\")\n INSTALLED_PACKAGES[item[0]] = item[1]", "def getInstalledPackages():\n reqs = subprocess.check_output([sys.executable,\n '-m', 'pip', 'freeze'])\n installed_packages = [r.decode().split('==')[0]\n for r in reqs.split()]\n return installed_packages", "def get_packages(self):\n raise NotImplementedError(\"get_packages is not implemented\")", "def get_site_packages():\n # Another hack...\n # Relies on the fact that os.py is in the dir above site_packages\n os_location = os.path.dirname(os.__file__)\n site_packages = []\n # Consider Debian/Ubuntu custom\n for site in [\"site-packages\", \"dist-packages\"]:\n site_path = os.path.join(os_location, site)\n if os.path.isdir(site_path):\n site_packages.append(site_path)\n return site_packages", "def find_packages(self):\n\n # scan src_dir for __init__.py\n root = _Path(self.src_dir)\n reg_paths = set(\n [d.parent.relative_to(root) for d in root.rglob(\"**/__init__.py\")]\n )\n\n # add all namespace packages that houses regular packages\n pkg_paths = set(reg_paths)\n for dir in reg_paths:\n pkg_paths |= set(dir.parents)\n\n # convert path to str\n pkg_dirs = [path.as_posix() for path in pkg_paths]\n\n # convert dir to package notation\n return [_dir_to_pkg(self.package_name, dir) for dir in pkg_dirs]", "def list_package(all: bool = False) -> List[List[str]]:\n if not all:\n pkgs_info = read_installation_records()\n else:\n pkgs_info = []\n for pkg in pkg_resources.working_set:\n pkgs_info.append([pkg.project_name, pkg.version])\n\n return pkgs_info" ]
[ "0.77534187", "0.770658", "0.76226324", "0.7619127", "0.7619127", "0.7619127", "0.75383526", "0.75368935", "0.7454178", "0.738317", "0.73001754", "0.71758336", "0.7129305", "0.712208", "0.71175563", "0.7054623", "0.70003355", "0.69973624", "0.6979044", "0.69747645", "0.69697404", "0.6947126", "0.6914133", "0.6913354", "0.6888221", "0.68843794", "0.6858265", "0.6787502", "0.67846656", "0.67648643" ]
0.8072811
0
Get the size of the remote file
def get_remote_file_size(url: str = '', httpresponse: object = False) -> int: need_to_close = False if not httpresponse: httpresponse = url_is_alive(url) if not httpresponse: error_open_mess(url) return 0 need_to_close = True content_length = httpresponse.getheader('Content-Length') if need_to_close: httpresponse.close() return int(content_length) if content_length else 0
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_file_size(url: str):\n header = requests.head(url).headers\n if \"Content-Length\" in header and header[\"Content-Length\"] != 0:\n return int(header[\"Content-Length\"])\n elif \"Location\" in header:\n h = requests.head(header[\"Location\"]).headers\n return int(h.get(\"Content-Length\", 0))\n else:\n return 0", "def file_size():\n return os.path.getsize(FILE_NAME)", "def _getsize(path, transport_params):\n with smart_open.open(path, 'rb', ignore_ext=True, transport_params=transport_params) as fin:\n fin.seek(0, io.SEEK_END)\n return fin.tell()", "def getFileSize( self, path ):\n res = self.__checkArgumentFormat( path )\n if not res['OK']:\n return res\n urls = res['Value']\n successful = {}\n failed = {}\n gLogger.debug( \"DIPStorage.getFileSize: Attempting to obtain size for %s files.\" % len( urls ) )\n res = self.getFileMetadata( urls )\n if not res['OK']:\n return res\n for url, urlDict in res['Value']['Successful'].items():\n if urlDict['Exists']:\n successful[url] = urlDict['Size']\n else:\n failed[url] = 'File does not exist'\n for url, error in res['Value']['Failed'].items():\n failed[url] = error\n resDict = {'Failed':failed, 'Successful':successful}\n return S_OK( resDict )", "def _query(self, remote_filename):\n\n file_id = self.get_file_id(remote_filename)\n if file_id is None:\n return {'size': -1}\n response = self.http_client.get(self.metadata_url + 'nodes/' + file_id)\n response.raise_for_status()\n\n return {'size': response.json()['contentProperties']['size']}", "def getsize(url):\n \n o = urlparse(url)\n conn = httplib.HTTPConnection(o.netloc)\n conn.request(\"HEAD\", o.path)\n res = conn.getresponse()\n\n if res.status == 301 or res.status == 302:\t# poprawic na kod opisowy\n # print res.reason, \": \", res.getheader('location')\n return getsize(res.getheader('location'))\n\n elif res.status == 200:\n # inne interesujace tagi: etag\n # print res.getheader('content-length')\n return res.getheader('content-length')\n else:\n print \"getsize() UNKNOWN PROBLEM\"\n print res.reason, \": \", res.getheader('location')\n print res.getheaders()\n raise IOError", "def file_size(file_ref, config=None):\n _authenticate()\n file_id = _get_id_fname(file_ref)[0]\n dx_file = dxpy.get_handler(file_id)\n desc = dx_file.describe(fields={\"size\": True})\n return desc[\"size\"] / (1024.0 * 1024.0)", "def file_size(self,file_path):\n if os.path.isfile(file_path):\n file_info = os.stat(file_path)\n return self.convert_bytes(file_info.st_size)", "def filesize(self, path):\n arinfo = self._handle.getmember(path)\n return arinfo.size", "def filesize(self, path):\n arinfo = self._handle.getmember(path)\n return arinfo.size", "def file_size(self):\n return self.context.getObjSize(self.context)", "def GetSize(filename):\n return os.path.getsize(filename)", "def get_file_size(self):\n try:\n return os.path.getsize(self.get_full_path())\n except Exception as e:\n raise SystemExit(f\"Could not complete operation: {e}\")", "def _get_file_size(self):\n return self.s3_file.size", "def file_size(self, file_id: int):\n file_path = self._path_to_file(file_id)\n return os.path.getsize(file_path)", "def getsize(self):\n return os.path.getsize(self.path)", "def get_file_size(self) -> int:\n return self.get_main_information()['FileSize']", "def get_file_size(self, report_symlinks=False):\n if self.is_symlinked() and not report_symlinks:\n return 0\n\n try:\n return self.datafile.size\n except ValueError: # file is not local\n return 0", "def fileSize(pathAndFilename):\n return os.stat(pathAndFilename).st_size", "def getsize(self, path):\n return os.path.getsize(path)", "def _remote_file_size_modtime(ftpobj, remote_file):\n size_in_bytes = ftpobj.size(remote_file)\n modification_time = ftpobj.get_file_mtime(remote_file)\n\n return size_in_bytes, modification_time", "def file_size(self):\n return self._fileSize", "def filesize(self, path):\n return self._handle.getinfo(path).file_size", "def filesize(self, path):\n return self._handle.getinfo(path).file_size", "def GetFileSize(file_path):\n return os.path.getsize(file_path)", "def getsize(path):\n return get_instance(path).getsize(path)", "def file_size(self):\n if self.fn is not None:\n return self.fn.stat().st_size", "def ftp_SIZE(self, line):\n path = self.fs.ftp2fs(line)\n if self.fs.isdir(path):\n self.respond(\"550 Could not get a directory size.\")\n return\n try:\n size = self.fs.getsize(path)\n except OSError, err:\n why = _strerror(err)\n self.log('FAIL SIZE \"%s\". %s' %(self.fs.ftpnorm(line), why))\n self.respond('550 %s.' %why)\n else:\n self.respond(\"213 %s\" %size)\n self.log('OK SIZE \"%s\".' %self.fs.ftpnorm(line))", "def get_size(filename):\n fileinfo = os.stat(filename)\n return fileinfo", "def size(request, pagename, filename):\n fpath = getFilename(request, pagename, filename)\n return os.path.getsize(fpath)" ]
[ "0.758814", "0.7333418", "0.73246276", "0.7305222", "0.72909725", "0.7271828", "0.7233082", "0.71817094", "0.7140134", "0.7140134", "0.7100435", "0.70998996", "0.7093384", "0.7069803", "0.7066487", "0.70585775", "0.7030291", "0.7018734", "0.6991961", "0.69666433", "0.6960497", "0.6954003", "0.69517696", "0.69517696", "0.6950835", "0.6940542", "0.69377", "0.6930702", "0.691506", "0.69005746" ]
0.8357053
0
get md5sum of remote or local file
def get_md5_hash(file_path: str) -> str: from hashlib import md5 # local file if file_path.startswith('/'): return md5(open(file_path, 'rb').read()).hexdigest() # remote file httpresponse = url_is_alive(file_path) if not httpresponse: error_open_mess(file_path) return '' md5hash = md5() max_file_size = 100 * 1024 * 1024 total_read = 0 while True: data = httpresponse.read(4096) total_read += 4096 if not data or total_read > max_file_size: break md5hash.update(data) httpresponse.close() return md5hash.hexdigest()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _get_remote_md5(self):\n E = action_element_maker()\n top = E.top(\n E.FileSystem(\n E.Files(\n E.File(\n E.SrcName(self.src),\n E.Operations(\n E.md5sum()\n )\n )\n )\n )\n )\n\n\n nc_get_reply = self.device.action(top)\n reply_ele = etree.fromstring(nc_get_reply.xml)\n md5sum = find_in_action('md5sum', reply_ele)\n\n if md5sum is not None:\n return md5sum.text.strip()", "def calculate_md5sum_of_a_file(context, file_name, file_path):\n command = \"md5sum \" + file_path + \"/\" + file_name + \" | awk {'print $1'}\"\n return context.cme_session.send_ssh_command(command=command)", "def get_md5sum(host, fqpath):\n command = \"md5sum %s\" % fqpath\n rcode, rout, rerr = g.run(host, command)\n\n if rcode == 0:\n return rout.strip()\n\n g.log.error('md5sum failed: %s' % rerr)\n return None", "def md5sum(file_name):\n f = open(file_name, mode='rb')\n h = hashlib.md5()\n h.update(f.read())\n return h.hexdigest()", "def CalcMD5(filepath):\n with open(filepath,'rb') as f:\n md5obj = hashlib.md5()\n md5obj.update(f.read())\n return md5obj.hexdigest()", "def md5sum(fileSrc):\n md5 = hashlib.md5()\n try:\n with open(fileSrc, \"rb\") as fd:\n while True:\n content = fd.read(2**20)\n if not content:\n break\n md5.update(content)\n except IOError:\n print(fileSrc + \" Not found\")\n exit(1)\n return md5.hexdigest()", "def checksumFile(filename):\n return md5File(filename)", "def checksum(path):\n with open(path, 'r') as f:\n return md5(f.read()).digest()", "def GetFileMd5(file_path):\n return binascii.hexlify(GetFileHashes(file_path, do_md5=True)['md5'])", "def compute_checksum(filename):\n cmd = 'md5sum ' + filename\n return pipe(cmd)", "def get_checksum(input_fname):\n with open(input_fname, \"rb\") as infile:\n file_contents = infile.read()\n\n checksum = hashlib.md5(file_contents).hexdigest()\n return checksum", "def _get_local_md5(self, blocksize=2**20):\n m = hashlib.md5()\n with open(self.dst, \"rb\") as f:\n buf = f.read(blocksize)\n while buf:\n m.update(buf)\n buf = f.read(blocksize)\n return m.hexdigest()", "def md5sum(fname):\n\tdef read_chunks(fh):\n\t\tfh.seek(0)\n\t\tchunk = fh.read(8096)\n\t\twhile chunk:\n\t\t\tyield chunk\n\t\t\tchunk = fh.read(8096)\n\t\telse: #最后要将游标放回文件开头\n\t\t\tfh.seek(0)\n\n\tm = hashlib.md5()\n\tif isinstance(fname, str) and os.path.exists(fname):\n\t\tfh = open(fname, \"rb\")\n\t\tfor chunk in read_chunks(fh):\n\t\t\tm.update(chunk)\n\t#上传的文件缓存或已打开的文件流\n\telif fname.__class__.__name__ in [\"StringIO\", \"StringO\"] or isinstance(fname, file):\n\t\tfor chunk in read_chunks(fname):\n\t\t\tm.update(chunk)\n\telse:\n\t\treturn \"\"\n\treturn m.hexdigest()", "def md5_checksum(file_path):\n with open(file_path, 'rb') as fh:\n m = hashlib.md5()\n while True:\n data = fh.read(8192)\n if not data:\n break\n m.update(data)\n return m.hexdigest()", "def md5_sum_file(path):\n with open(path, 'rb') as f:\n m = hashlib.md5()\n while True:\n data = f.read(8192)\n if not data:\n break\n m.update(data)\n return m.hexdigest()", "def get_file_checksum(file_path):\n with open(file_path) as f:\n content = f.read()\n return md5(content.encode()).hexdigest()", "def get_checksum(filename):\n # You could use popen here. I read about it, and subprocess is meant\n # to replace os.popen, so I used it instead.\n\n # First, run the command md5 sum with filename as input.\n # It's stored as a subprocess.CompletedProcess\n process = subprocess.run(['md5sum',filename], capture_output=True)\n \n # Use the method stdout from subprocess.CompletedProcess (seen in\n # the Python docs) to get the output. As seen in the book, md5sum will\n # output the checksum follwed by the filename. split() will put\n # those two elements into a list, and [0] will take the first element,\n # which will be the checksum.\n checksum = process.stdout.split()[0]\n return checksum", "def _get_md5(name, path):\n output = run_stdout(\n name, f'md5sum \"{path}\"', chroot_fallback=True, ignore_retcode=True\n )\n try:\n return output.split()[0]\n except IndexError:\n # Destination file does not exist or could not be accessed\n return None", "def local_md5(filepath, blocksize=65536):\n hasher = hashlib.md5()\n with open(filepath, 'rb') as source:\n buf = source.read(blocksize)\n while len(buf) > 0:\n hasher.update(buf)\n buf = source.read(blocksize)\n return hasher.hexdigest()", "def checksum(self, filepath) -> str:\n if os.path.exists(filepath):\n hash_md5 = md5()\n with open(filepath, \"rb\") as f:\n for chunk in iter(lambda: f.read(4096), b\"\"):\n hash_md5.update(chunk)\n return urlsafe_b64encode(hash_md5.digest()).decode('utf-8')\n\n return \"\"", "def md5get(filename):\n with open(filename, mode='rb') as f:\n d = hashlib.md5()\n for buf in iter(partial(f.read, 128), b''):\n d.update(buf)\n return d.hexdigest()", "def md5sum(filename):\n with open(filename, mode='rb') as f:\n d = hashlib.md5()\n for buf in iter(functools.partial(f.read, 1024*100), b''):\n d.update(buf)\n return d.hexdigest()", "def md5sum_file(filepath):\n hasher = hashlib.md5()\n with open(filepath, 'rb') as infile:\n for chunk in util.chunk_reader(infile):\n hasher.update(chunk)\n return hasher.hexdigest()", "def md5sum(fname):\n hash_md5 = hashlib.md5()\n with open(fname, \"rb\") as f:\n for chunk in iter(lambda: f.read(4096), b\"\"):\n hash_md5.update(chunk)\n return hash_md5.hexdigest()", "def md5sum_file(filename: str, hr: bool = True) -> str:\n block_size = 256 * 128\n\n md5 = hashlib.md5()\n with open(filename, \"rb\") as f:\n for chunk in iter(lambda: f.read(block_size), b\"\"):\n md5.update(chunk)\n if hr:\n return md5.hexdigest()\n return md5.digest()", "def md5(self):\n return md5file(self.abspath)", "def md5checksum(file_name):\n from hashlib import md5\n hash_md5 = md5()\n with open(file_name, \"rb\") as f:\n for chunk in iter(lambda: f.read(32768), b\"\"):\n hash_md5.update(chunk)\n return hash_md5.hexdigest()", "def svn_fs_file_md5_checksum(*args):\r\n return _fs.svn_fs_file_md5_checksum(*args)", "def get_md5(filepath):\n md5 = hashlib.md5()\n with open(filepath, 'rb') as f:\n while True:\n data = f.read(BUF_SIZE)\n if not data:\n break\n md5.update(data)\n return md5.hexdigest()", "def calc_file_md5(file_path):\n hash_md5 = str()\n method = hashlib.md5()\n if not os.path.exists(file_path):\n logger.error(\"File(%s) don not exist, can not calculation file hash\" % file_path)\n return hash_md5\n\n with open(file_path, 'rb') as f:\n for chunk in read_chunks(f, 1024 * 1024):\n method.update(chunk)\n return method.hexdigest()" ]
[ "0.79094136", "0.7866285", "0.7696837", "0.7590059", "0.75882936", "0.75156426", "0.7503798", "0.7437231", "0.7426028", "0.7385944", "0.73829806", "0.73817605", "0.7344087", "0.73372847", "0.73297226", "0.7321175", "0.73094076", "0.7299891", "0.7289331", "0.7272034", "0.72614074", "0.72589374", "0.7256654", "0.72553426", "0.7244839", "0.7215819", "0.72073394", "0.71996504", "0.719527", "0.7192484" ]
0.79310167
0
check md5sum of two files
def check_md5sum(file1: str, file2: str) -> bool: return get_md5_hash(file1) == get_md5_hash(file2)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def check_md5(file1, file2):\r\n with open(file1, \"rb\") as f1:\r\n h1 = hashlib.md5(f1.read()).digest()\r\n with open(file2, \"rb\") as f2:\r\n h2 = hashlib.md5(f2.read()).digest()\r\n return h1 == h2", "def equal_file_sum(file1_paht, file2_paht):\n md5_sum1 = generate_sum(file1_path)\n md5_sum2 = generate_sum(file2_path)\n return (md5_sum1 == md5_sum2)", "def md5check(fname, md5fname):\n\tmd5fh = open(md5fname, \"r\")\n\treturn (md5sum(fname) == md5fh.readline())", "def _check_md5(self):\n\n self.log.info('-' * 80)\n self.log.info('Check md5 sum')\n\n self.log.info(self._ref_value)\n self.log.info(self._output_file)\n\n code, out = cmd_exec(['md5sum', self._output_file], shell=False, log=self.log)\n if code:\n self.log.error(out)\n return False\n self.log.info(out)\n\n md5sum, _ = out.split(' ')\n\n self.log.info(f'reference md5: {self._ref_value}')\n self.log.info(f'actual md5: {md5sum}')\n\n if self._ref_value != md5sum:\n return False\n\n return True", "def test_md5sum(self, changes_file):\n for file in changes_file['Files']:\n log.debug('Checking md5sum of %s' % file['name'])\n filename = os.path.join(pylons.config['debexpo.upload.incoming'], file['name'])\n if not os.path.isfile(filename):\n raise OSError(\"Missing file %s in incoming\" % (file['name']))\n sum = md5sum(filename)\n\n if sum != file['md5sum']:\n log.critical('%s != %s' % (sum, file['md5sum']))\n raise OSError(\"MD5 sum mismatch in file %s: %s != %s\" % (file['name'], sum, file['md5sum']))\n\n return True", "def test_checksum(size1, size2, lines, tmpdir):\n fp = tmpdir.join(\"temp-data.txt\").strpath\n data = \"\\n\".join(lines)\n with open(fp, 'w') as f:\n f.write(data)\n exp = hashlib.new(\"md5\", data.encode(\"utf-8\")).hexdigest()\n res1 = checksum(fp, size1)\n res2 = checksum(fp, size2)\n assert exp == res1\n assert res1 == res2\n assert res2 == exp", "def verify_sum(file_path, md5_sum):\n file_md5_sum = generate_sum(file_path)\n return (file_md5_sum == md5_sum)", "def md5_match(file_path, reference_md5):\n\n with open(file_path, \"rb\") as f:\n\n data = f.read()\n\n file_md5 = md5(data).hexdigest()\n\n return file_md5 == reference_md5", "def _compare_files(self, first_file, second_file):\n\n self.log.info('-' * 80)\n self.log.info('Compare files')\n\n code, out = cmd_exec(['cmp', str(first_file), str(second_file)], shell=False, log=self.log)\n if code:\n self.log.warning('md5 checksum IS NOT SAME with ffmpeg sw decode')\n self.log.warning(out)\n return False\n\n self.log.info('md5 checksum IS SAME with ffmpeg sw decode')\n return True", "def _check_md5sum(_setup_str, src_host, src_pfn):\n\n error = PilotErrors()\n\n _cmd = '%suberftp %s \"quote cksm md5sum 0 -1 %s\"' % (_setup_str, src_host, src_pfn)\n estat, coutp = commands.getstatusoutput(_cmd)\n tolog('md5 uberftp done <%s> (%s): %s' % (_cmd, estat, coutp))\n\n if estat != 0:\n check_syserr(estat, coutp)\n if coutp.find('not understood') >= 0:\n tolog('!!WARNING!!2999!! MD5 unsupported by the server')\n return error.ERR_FAILEDMD5, coutp\n try:\n tmp0 = coutp.split('\\n')[-1]\n fmd5usm = tmp0.split()[1]\n # split removes also the trailing \"\\r\" that uberftp returns, no fmd5sum.strip()\n except:\n tolog('!!WARNING!!2999!! Unable to parse MD5')\n fmd5usm = ''\n return 0, fmd5usm", "def checksumFile(filename):\n return md5File(filename)", "def CalcMD5(filepath):\n with open(filepath,'rb') as f:\n md5obj = hashlib.md5()\n md5obj.update(f.read())\n return md5obj.hexdigest()", "def md5(filename):\n d = hashlib.md5()\n try:\n d.update(open(filename).read())\n except Exception,e:\n return False\n else:\n return d.hexdigest()", "def checksum_compare(source_file, dest_file):\n\n con_ssh = ControllerClient.get_active_controller()\n\n LOG.info(\"Compare checksums on source file and destination file\")\n cmd = \"getfattr -m . -d {}\"\n\n exitcode, source_sha = con_ssh.exec_cmd(cmd.format(source_file))\n LOG.info(\"Raw source file checksum is: {}\".format(source_sha))\n source_sha2 = source_sha.split(\"\\n\")\n print(\"This is source_sha2: {}\".format(source_sha2))\n assert source_sha2 != [''], \"No signature on source file\"\n\n if source_file.startswith(\"/\"):\n source_sha = source_sha2[2] + \" \" + source_sha2[3]\n else:\n source_sha = source_sha2[1] + \" \" + source_sha2[2]\n\n LOG.info(\"Extracted source file checksum: {}\".format(source_sha))\n\n exitcode, dest_sha = con_ssh.exec_cmd(cmd.format(dest_file))\n LOG.info(\"Raw symlink checksum is: {}\".format(dest_sha))\n dest_sha2 = dest_sha.split(\"\\n\")\n\n if dest_file.startswith(\"/\"):\n dest_sha = dest_sha2[2] + \" \" + dest_sha2[3]\n else:\n dest_sha = dest_sha2[1] + \" \" + dest_sha2[2]\n\n LOG.info(\"Extracted destination file checksum: {}\".format(dest_sha))\n\n if source_sha == dest_sha:\n return True\n else:\n return False", "def CheckMd5(filename, md5filename):\n try:\n hasher = hashlib.md5()\n with open(filename) as check_file:\n with open(md5filename) as golden_file:\n for chunk in iter(lambda: check_file.read(128*hasher.block_size), ''):\n hasher.update(chunk)\n md5_contents = golden_file.read()\n if md5_contents:\n golden_digest_and_more = md5_contents.split(' ')\n if golden_digest_and_more:\n return golden_digest_and_more[0] == hasher.hexdigest()\n logging.warning('MD5 checksum match failed for %s', filename)\n return False\n except IOError:\n logging.warning('MD5 hasher read failed for %s', filename)\n return False", "def check_md5(filename, stored_md5):\n computed_md5 = _get_file_md5(filename)\n if stored_md5 != computed_md5:\n print (\"MD5 checksum of filename\", filename, \"failed. Expected MD5 was\", stored_md5,\n \"but computed MD5 was\", computed_md5, '\\n',\n \"Please check if the data has been downloaded correctly or if the upstream data has changed.\")", "def test_check_md5_crit_md5sum_mismatch(self, mock_generate_md5):\n jdata = b'{\"/etc/swift/object.ring.gz\": ' \\\n b'\"6b4f3a0ef3731f18291ecd053ce0d9b6\", ' \\\n b'\"/etc/swift/account.ring.gz\": ' \\\n b'\"93fc4ae496a7343362ebf13988a137e7\", ' \\\n b'\"/etc/swift/container.ring.gz\": ' \\\n b'\"0ea1ec9585ef644ce2b5c5b1dced4128\"}'\n pmock_jdata = PropertyMock(return_value=jdata)\n mock_generate_md5.return_value = 'xxxx'\n with patch('urllib.request.urlopen') as mock_urlopen:\n mock_urlopen.return_value = MagicMock(read=pmock_jdata)\n result = check_md5('.')\n mock_urlopen.assert_called_with('.ringmd5')\n expected_result = [(STATUS_CRIT,\n 'Ringfile /etc/swift/{}.ring.gz '\n 'MD5 sum mismatch'.format(name))\n for name in ('object', 'account', 'container')]\n self.assertEqual(result, expected_result)", "def md5sum(file_name):\n f = open(file_name, mode='rb')\n h = hashlib.md5()\n h.update(f.read())\n return h.hexdigest()", "def calculate_md5sum_of_a_file(context, file_name, file_path):\n command = \"md5sum \" + file_path + \"/\" + file_name + \" | awk {'print $1'}\"\n return context.cme_session.send_ssh_command(command=command)", "def fsum(fpath):\n import hashlib\n import codecs\n with codecs.open(fpath, \"r\", \"utf-8\") as filep:\n buff = filep.read()\n cksum = hashlib.md5(buff.encode(\"utf-8\"))\n return cksum.hexdigest()", "def compute_checksum(filename):\n cmd = 'md5sum ' + filename\n return pipe(cmd)", "def _check_final_md5(self, key, file_name):\r\n fp = open(file_name, 'r')\r\n if key.bucket.connection.debug >= 1:\r\n print 'Checking md5 against etag.'\r\n hex_md5 = key.compute_md5(fp)[0]\r\n if hex_md5 != key.etag.strip('\"\\''):\r\n file_name = fp.name\r\n fp.close()\r\n os.unlink(file_name)\r\n raise ResumableDownloadException(\r\n 'File changed during download: md5 signature doesn\\'t match '\r\n 'etag (incorrect downloaded file deleted)',\r\n ResumableTransferDisposition.ABORT)", "def svn_fs_file_md5_checksum(*args):\r\n return _fs.svn_fs_file_md5_checksum(*args)", "def rsync_and_md5(old_name, new_name, md5sum=None):\n if md5sum is None:\n md5sum = md5(old_name)\n\n syscall(\"rsync \" + old_name + \" \" + new_name)\n new_md5sum = md5(new_name)\n\n if new_md5sum != md5sum:\n raise Exception(\n \"Error copying file \"\n + old_name\n + \" -> \"\n + new_name\n + \"\\n. md5s do not match\"\n )\n else:\n return md5sum", "def md5sum(fname):\n hash_md5 = hashlib.md5()\n with open(fname, \"rb\") as f:\n for chunk in iter(lambda: f.read(4096), b\"\"):\n hash_md5.update(chunk)\n return hash_md5.hexdigest()", "def test_local_md5sum(self):\n cwl_local_path = os.path.abspath('testdata/md5sum.cwl')\n workflow_attachment_path = os.path.abspath('testdata/dockstore-tool-md5sum.cwl')\n output_filepath, _ = run_cwl_md5sum(cwl_input='file://' + cwl_local_path,\n workflow_attachment='file://' + workflow_attachment_path)\n\n self.assertTrue(check_for_file(output_filepath), 'Output file was not found: ' + str(output_filepath))", "def md5sum(fileSrc):\n md5 = hashlib.md5()\n try:\n with open(fileSrc, \"rb\") as fd:\n while True:\n content = fd.read(2**20)\n if not content:\n break\n md5.update(content)\n except IOError:\n print(fileSrc + \" Not found\")\n exit(1)\n return md5.hexdigest()", "def md5sum_file(filepath):\n hasher = hashlib.md5()\n with open(filepath, 'rb') as infile:\n for chunk in util.chunk_reader(infile):\n hasher.update(chunk)\n return hasher.hexdigest()", "def md5sum(fname):\n\tdef read_chunks(fh):\n\t\tfh.seek(0)\n\t\tchunk = fh.read(8096)\n\t\twhile chunk:\n\t\t\tyield chunk\n\t\t\tchunk = fh.read(8096)\n\t\telse: #最后要将游标放回文件开头\n\t\t\tfh.seek(0)\n\n\tm = hashlib.md5()\n\tif isinstance(fname, str) and os.path.exists(fname):\n\t\tfh = open(fname, \"rb\")\n\t\tfor chunk in read_chunks(fh):\n\t\t\tm.update(chunk)\n\t#上传的文件缓存或已打开的文件流\n\telif fname.__class__.__name__ in [\"StringIO\", \"StringO\"] or isinstance(fname, file):\n\t\tfor chunk in read_chunks(fname):\n\t\t\tm.update(chunk)\n\telse:\n\t\treturn \"\"\n\treturn m.hexdigest()", "def MD5(self) -> _n_0_t_3[_n_0_t_9]:" ]
[ "0.8655619", "0.81953096", "0.75696427", "0.7422941", "0.73577124", "0.7291288", "0.72639024", "0.71650517", "0.714502", "0.6992843", "0.6956816", "0.6924982", "0.68845403", "0.68717086", "0.68108", "0.68018496", "0.6792291", "0.67812496", "0.6757795", "0.67240214", "0.6716728", "0.6711767", "0.6698437", "0.66775095", "0.6652523", "0.6650045", "0.66343313", "0.66330284", "0.66285527", "0.65513706" ]
0.87421346
0
Helper function to extract ground truth communities from LFR Benchmar
def detect_ground_truth_communities(self, G): print("Detecting Ground - Truth communities") gt_communities = {frozenset(G.nodes[v]['community']) for v in G} return [list(fs) for fs in gt_communities]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_ground_truth():\n\n true_ps, obs_xs = util.io.load(os.path.join(get_root(), 'observed_data'))\n return true_ps, obs_xs", "def fetchChannelMapping(verbose=False):\n\n # Check (from pbworks):\n #High freq. 305 473 360 170.312 \n #Medium freq. 154 424 209 140.820 \n #Low freq. 079 400 134 126.172 \n\n calibrator_chans=[55, 57, 63, 66, 68, 69, 71, 72, 73, 76, 77, 79, 83, 84, 85, 86, 90, 92, 99, 100, 101, 103, 104, 107, 108, 110, 117, 119, 121, 122, 123, 126, 130, 131, 134, 136, 137, 140, 141, 145, 147, 150, 151, 156, 157, 161, 162, 168, 170, 175, 176, 178, 185, 188, 196, 197, 201, 202, 209, 213, 217, 221, 224, 230, 235, 238, 242, 246, 254, 255, 258, 259, 260, 264, 268, 269, 271, 274, 278, 279, 280, 283, 286, 291, 295, 297, 301, 306, 307, 312, 313, 314, 317, 319, 322, 323, 325, 328, 329, 332, 341, 343, 344, 346, 347, 352, 356, 360, 361, 364, 369, 371, 382, 383, 390, 398, 401, 404, 411, 412, 414, 419]\n\n lofar2beams={}; cal2field={}; field2cal={}; cal2lofar={}; field2lofar={}\n\n zero_chan=55\n num_field_chans=366\n num_total_chans=488\n for nf in range(zero_chan,zero_chan+num_total_chans):\n lofar_chan=nf\n field_chan=lofar_chan-zero_chan\n field2lofar[field_chan]=lofar_chan\n if lofar_chan in calibrator_chans:\n cal_chan=num_field_chans+calibrator_chans.index(lofar_chan)\n if verbose:\n print lofar_chan, field_chan, ' C ', cal_chan\n lofar2beams[lofar_chan]=(field_chan,cal_chan)\n cal2field[cal_chan]=field_chan\n field2cal[field_chan]=cal_chan\n cal2lofar[cal_chan]=lofar_chan\n else:\n if verbose:\n print lofar_chan, lofar_chan-zero_chan, ' - ', ' - '\n lofar2beams[lofar_chan]=(field_chan,None)\n field2cal[field_chan]=None\n\n return lofar2beams,cal2field,field2cal,cal2lofar,field2lofar", "def intialize_source():\n raw = loadmat(\"p300backrec2.mat\")\n channels_raw = raw['channels']\n channels = []\n for i in channels_raw[0]:\n channels.extend(list(i))\n X = raw['data']\n marker = raw['marker']\n return X,channels,marker", "def get_GroundTruth(self):\n\n # set first pose to identity\n # first_pose = self.dataset.oxts[0].T_w_imu\n # first_pose_inv = src.se3.inversePose(first_pose)\n # do not correct the orientation\n # first_pose_inv[:3, :3] = np.eye(3)\n\n # do not set first pose to identity\n first_pose_inv = np.eye(4)\n\n for o in self.dataset.oxts:\n\n normalized_pose_original = first_pose_inv @ o.T_w_imu\n self.poses_gt.append(normalized_pose_original)\n\n # gt pose is from I to G\n for i, pose in enumerate(self.poses_gt):\n\n # get gt position\n gt_position = np.reshape(pose[0:3, 3], (-1, 1))\n\n self.gt_position.append(gt_position)\n\n # get gt orientation\n R_wIMU = pose[0:3, 0:3]\n self.gt_orientation.append(R_wIMU)", "def brain(msg):\n\n def check_message(msg):\n \"\"\"\n Check wich neuron to use.\n :param msg:\n :return:\n \"\"\"\n words_of_message = msg.split()\n find = False\n for key in gc_words:\n if words_of_message in gc_words[key]['groups']:\n getattr(neuron.general_conversations, key)()\n find = True\n break\n for key in fc_words:\n if words_of_message in fc_words[key]['groups']:\n getattr(neuron.forecast, key)()\n find = True\n break\n for key in twitter_words:\n if words_of_message in twitter_words[key]['groups']:\n getattr(neuron.twitter, key)()\n find = True\n break\n for key in pipo_words:\n if words_of_message in pipo_words[key]['groups']:\n getattr(neuron.pipotron, key)()\n find = True\n break\n if not find:\n neuron.general_conversations.undefined()\n\n check_message(msg)", "def brain_extraction( x ):\n bxtmethod = 't1combined[5]' # better for individual subjects\n bxt = antspynet.brain_extraction( x, bxtmethod ).threshold_image(2,3).iMath(\"GetLargestComponent\")\n return bxt", "def state_to_features(self, game_state: dict) -> np.array:\n device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n # This is the dict before the game begins and after it ends\n if game_state is None:\n return None\n\n \n #get global information as a 17x17 channel\n x = game_state['field']\n x = np.swapaxes(x,0,1)\n for i in range(len(game_state['coins'])):\n a = game_state['coins'][i][1]\n b = game_state['coins'][i][0]\n x[a][b] = 4\n for i in range(len(game_state['bombs'])):\n a = game_state['bombs'][i][0][1]\n b = game_state['bombs'][i][0][0]\n x[a][b] = -(5+game_state['bombs'][i][1])\n for i in game_state['others']:\n if i[2]:\n x[i[3][1]][i[3][0]] = -10\n else:\n x[i[3][1]][i[3][0]] = -11\n if game_state['self'][2]:\n x[game_state['self'][3][1]][game_state['self'][3][0]] = 5\n else:\n x[game_state['self'][3][1]][game_state['self'][3][0]] = 6\n expl_List = np.argwhere(game_state['explosion_map'] != 0)\n for i in expl_List:\n x[i[1]][i[0]] = -4\n channel1 = x.copy()\n \n \n #prep local channel\n if self.modelToUse != 0:\n #get simpele direction to and aways from closest coin or crate if no coin on the field\n x_axis,y_axis,coin_creat_encoding = directionToNearestCoin_Crate(game_state['coins'], game_state['self'][3], game_state['field'])\n if x_axis == \"left\":\n if x[game_state['self'][3][1]][game_state['self'][3][0]-1] == 0:\n x[game_state['self'][3][1]][game_state['self'][3][0]-1] = coin_creat_encoding\n if x[game_state['self'][3][1]][game_state['self'][3][0]+1] == 0:\n x[game_state['self'][3][1]][game_state['self'][3][0]+1] = -2\n if x_axis == \"right\":\n if x[game_state['self'][3][1]][game_state['self'][3][0]-1] == 0:\n x[game_state['self'][3][1]][game_state['self'][3][0]-1] = -2\n if x[game_state['self'][3][1]][game_state['self'][3][0]+1] == 0:\n x[game_state['self'][3][1]][game_state['self'][3][0]+1] = coin_creat_encoding\n if y_axis == \"up\":\n if x[game_state['self'][3][1]-1][game_state['self'][3][0]] == 0:\n x[game_state['self'][3][1]-1][game_state['self'][3][0]] = coin_creat_encoding\n if x[game_state['self'][3][1]+1][game_state['self'][3][0]] == 0:\n x[game_state['self'][3][1]+1][game_state['self'][3][0]] = -2\n if y_axis == \"down\":\n if x[game_state['self'][3][1]-1][game_state['self'][3][0]] == 0:\n x[game_state['self'][3][1]-1][game_state['self'][3][0]] = -2\n if x[game_state['self'][3][1]+1][game_state['self'][3][0]] == 0:\n x[game_state['self'][3][1]+1][game_state['self'][3][0]] = coin_creat_encoding\n \n \n \n #get information of bombs: on which position the explotion will be and how far away the bomb is\n bombs = game_state['bombs']\n bombs.sort(key=lambda x: x[1],reverse=True)\n x = np.pad(x, (3,3), 'constant', constant_values=(-1))\n for i in (bombs):\n y_bomb = i[0][1] + 3\n x_bomb = i[0][0] + 3\n for j in range(4):\n if abs(x[y_bomb,x_bomb+j]) != 1 and x[y_bomb,x_bomb+j] != -4:\n blocked = False\n for l in range(j):\n if x[y_bomb,x_bomb+j-l] == -1:\n blocked = True\n if blocked == False:\n x[y_bomb,x_bomb+j] = -(9-j)\n #print(\"test1\")\n if abs(x[y_bomb,x_bomb-j]) != 1 and x[y_bomb,x_bomb-j] != -4:\n blocked = False\n for l in range(j):\n if x[y_bomb,x_bomb-j+l] == -1:\n blocked = True\n if blocked == False:\n x[y_bomb,x_bomb-j] = -(9-j)\n #print(\"test2\")\n if abs(x[y_bomb+j,x_bomb]) != 1 and x[y_bomb+j,x_bomb] != -4:\n blocked = False\n for l in range(j):\n if x[y_bomb+j-l,x_bomb] == -1:\n blocked = True\n if blocked == False:\n x[y_bomb+j,x_bomb] = -(9-j)\n #print(\"test3\")\n if abs(x[y_bomb-j,x_bomb]) != 1 and x[y_bomb-j,x_bomb] != -4:\n blocked = False\n for l in range(j):\n if x[y_bomb-j+l,x_bomb] == -1:\n blocked = True\n if blocked == False:\n x[y_bomb-j,x_bomb] = -(9-j)\n #print(\"test4\")\n x = x[3:-3,3:-3]\n \n \n #get local view and concatenate it with channel 1 (will be sliced apart in the model later)\n z = np.zeros(17)\n y = x[game_state['self'][3][1]-1:game_state['self'][3][1]+2,game_state['self'][3][0]-1:game_state['self'][3][0]+2]\n y = y.flatten()\n z[0:9] = y\n #get correct input for the model used\n if self.modelToUse == 2:\n z = Variable(torch.from_numpy(z)).to(device).to(torch.float)\n z = z.unsqueeze(0).unsqueeze(0).unsqueeze(0)\n channel1 = Variable(torch.from_numpy(channel1)).to(device).to(torch.float)\n channel1 = channel1.unsqueeze(0).unsqueeze(0)\n return torch.cat((channel1,z),2)\n elif self.modelToUse == 1:\n y = Variable(torch.from_numpy(y)).to(device).to(torch.float)\n y = y.unsqueeze(0)\n return y\n else:\n channel1 = Variable(torch.from_numpy(channel1)).to(device).to(torch.float)\n channel1 = channel1.unsqueeze(0).unsqueeze(0)\n return channel1\n return", "def grib2nc(f_hrrr, output=None, external_logger=None):\n\tstart = time.time()\n\tif external_logger == None:\n\t\tfmt = \"%(levelname)s: %(msg)s\"\n\t\tlog = logging.getLogger(__name__)\n\t\tcoloredlogs.install(logger=log, fmt=fmt)\n\n\n\tmsg = \"GRIB2NC Converter Utility\"\n\tlog.info(msg)\n\tlog.info(\"=\" * len(msg))\n\n\t# criteria dictionary for extracting variables, CASE MATTERS\n\tcriteria = {'air_temp': {\n\t \t'wgrib2 keys':[\":TMP Temperature\",\"2 m\"]},\n\n\t\t\t\t'dew_point': {\n\t\t\t\t'wgrib2 keys':[\":DPT\",\"2 m\"]},\n\n\t\t\t\t'relative_humidity': {\n\t\t\t\t'wgrib2 keys':[\":RH Relative Humidity\",\"2 m\"]\n\t },\n\t 'wind_u': {\n\t\t\t\t'wgrib2 keys':[\":UGRD U-Component\",\"10 m\"]\n\n\t },\n\t 'wind_v': {\n\t\t\t\t'wgrib2 keys':[\":VGRD V-Component\",\"10 m\"]\n\n\t },\n\t 'precip_int': {\n\t\t\t\t'wgrib2 keys':[\":APCP Total Precipitation\"]\n\n\t },\n\t 'short_wave': {\n\t\t\t\t'wgrib2 keys':['Downward Short-Wave Radiation Flux', ':surface']\n\t },\n\t }\n\n\t# No output file name used, use the original plus a new extension\n\tif output == None:\n\t\toutput = \".\".join(os.path.basename(f_hrrr).split(\".\")[0:-1]) + \".nc\"\n\n\tgrib_vars = \"\"\n\tvar_count = 0\n\t# Cycle through all the variables and export the grib var names\n\tfor k,v in criteria.items():\n\t\tlog.info(\"Attempting to extract grib name for {} \".format(k))\n\n\t\tcmd = \"wgrib2 -v {} \".format(f_hrrr)\n\n\t\t# Add all the search filters\n\t\tfor kw in v[\"wgrib2 keys\"]:\n\t\t\tcmd += '| egrep \"({})\" '.format(kw)\n\t\t# Run the command\n\n\t\t#cmd += \" -netcdf {}\".format(output)\n\t\ts = check_output(cmd, shell=True).decode('utf-8')\n\n\t\t# Check if we only identify one variable based on line returns\n\t\treturn_count = len([True for c in s if c == '\\n'])\n\n\t\tif return_count != 1:\n\t\t\tlog.warning(\"Found multiple variable entries for keywords \"\n\t\t\t\t\t\t\"associated with {}\".format(k))\n\t\t\tvar_count += return_count\n\t\telse:\n\t\t\tvar_count += 1\n\t\t# Add the grib var name to our running string/list\n\t\tgrib_vars += s\n\n\tlog.info(\"Extracting {} variables and converting to netcdf...\".format(var_count))\n\tlog.info(\"Outputting to: {}\".format(output))\n\n\t# Using the var names we just collected run wgrib2 for netcdf conversion\n\tcmd = 'echo \"{}\" | wgrib2 -i {} -netcdf {}'.format(grib_vars, f_hrrr, output)\n\ts = check_output(cmd, shell=True)\n\n\tlog.info(\"Complete! Elapsed {:0.0f}s\".format(time.time()-start))", "def get_aff_net(sta):\n pass", "def test_flmb(self):\n self.create_sample_data_set_dir(\"node10p1.dat\", TELEM_DIR, \"node59p1.dat\")\n self.assert_initialize()\n result = self.data_subscribers.get_samples(DataParticleType.METADATA_TELEMETERED,1,30)\n result = self.data_subscribers.get_samples(DataParticleType.SAMPLE_TELEMETERED,5,30)", "def identify_ground_truth(log, log_name):\n if \"BPI2015\" in log_name:\n for trace in log:\n for event in trace:\n if \"AH_I\" in str(event['concept:name']):\n event['truth'] = 0\n continue\n elif \"AH_II\" in str(event['concept:name']):\n event['truth'] = 1\n continue\n elif \"AP\" in str(event['concept:name']):\n event['truth'] = 2\n continue\n elif 'AWB' in str(event['concept:name']):\n event['truth'] = 3\n continue\n elif 'BB' in str(event['concept:name']):\n event['truth'] = 4\n continue\n elif 'BPT' in str(event['concept:name']):\n event['truth'] = 5\n continue\n elif 'CRD' in str(event['concept:name']):\n event['truth'] = 6\n continue\n elif 'DRZ' in str(event['concept:name']):\n event['truth'] = 7\n continue\n elif 'EIND' in str(event['concept:name']):\n event['truth'] = 8\n continue\n elif 'GBH' in str(event['concept:name']):\n event['truth'] = 9\n continue\n elif 'HOOFD' in str(event['concept:name']):\n event['truth'] = 10\n continue\n elif 'LGSD' in str(event['concept:name']):\n event['truth'] = 11\n continue\n elif 'LGSV' in str(event['concept:name']):\n event['truth'] = 12\n continue\n elif 'NGV' in str(event['concept:name']):\n event['truth'] = 13\n continue\n elif 'NOCODE' in str(event['concept:name']):\n event['truth'] = 14\n continue\n elif 'OLO' in str(event['concept:name']):\n event['truth'] = 15\n continue\n elif 'OPS' in str(event['concept:name']):\n event['truth'] = 16\n continue\n elif 'UOV' in str(event['concept:name']):\n event['truth'] = 17\n continue\n elif 'VD' in str(event['concept:name']):\n event['truth'] = 18\n continue\n elif 'VRIJ' in str(event['concept:name']):\n event['truth'] = 19\n continue\n else:\n print(event['concept:name'])\n return log\n elif \"BPI2017\" in log_name:\n for trace in log:\n for event in trace:\n if event['EventOrigin'] == 'Application':\n event['truth'] = 0\n elif event['EventOrigin'] == 'Offer':\n event['truth'] = 1\n elif event['EventOrigin'] == 'Workflow':\n event['truth'] = 2\n else:\n print(event['concept:name'])\n return log\n elif \"BPI2018\" in log_name:\n for trace in log:\n for event in trace:\n if event['subprocess'] == 'Application':\n event['truth'] = 0\n elif event['subprocess'] == 'Change':\n event['truth'] = 1\n elif event['subprocess'] == 'Declared':\n event['truth'] = 2\n elif event['subprocess'] == 'Main':\n event['truth'] = 3\n elif event['subprocess'] == 'Objection':\n event['truth'] = 4\n elif event['subprocess'] == 'On-Site':\n event['truth'] = 5\n elif event['subprocess'] == 'Remote':\n event['truth'] = 6\n elif event['subprocess'] == 'Reported':\n event['truth'] = 7\n else:\n print(event['subprocess'])\n return log\n elif \"BPI2020\" in log_name:\n for trace in log:\n for event in trace:\n if event['org:role'] == 'ADMINISTRATION':\n event['truth'] = 0\n elif event['org:role'] == 'BUDGET OWNER':\n event['truth'] = 1\n elif event['org:role'] == 'EMPLOYEE':\n event['truth'] = 2\n elif event['org:role'] == 'PRE_APPROVER':\n event['truth'] = 3\n elif event['org:role'] == 'MISSING':\n event['truth'] = 4\n elif event['org:role'] == 'SUPERVISOR':\n event['truth'] = 5\n elif event['org:role'] == 'UNDEFINED':\n event['truth'] = 6\n elif event['org:role'] == 'DIRECTOR':\n event['truth'] = 7\n else:\n print(event['org:role'])\n return log\n else:\n print(\"Event log not found\")", "def get_bodyparts(project_dir):\n print(f\"\\n\\n\\nLoading data\")\n df_paths = sorted(glob.glob(os.path.join(project_dir, '*.h5')))\n points_2d_df = utils.create_dlc_points_2d_file(df_paths)\n arr = points_2d_df[points_2d_df[\"frame\"]==0][[\"marker\"]][points_2d_df[\"camera\"]==0].values\n final_arr = arr.flatten().tolist()\n return(final_arr)", "def get_groundtruth(path):\n pklFile = open(path, \"rb\")\n groundTruth = pickle.load(pklFile)\n\n return [[item[0]] for item in groundTruth]", "def main():\n\n # Establish network inferface to MPL at address below\n # h = NfuUdp(Hostname=\"192.168.1.111\")\n h = NfuUdp(hostname=\"localhost\")\n h.connect()\n\n # Run a quick motion test to verify joints are working\n num_arm_joints = 7\n num_hand_joints = 20\n arm_position = [0.0] * num_arm_joints\n hand_position = [0.0] * num_hand_joints\n\n # goto zero position\n h.send_joint_angles(arm_position + hand_position)\n # time.sleep(3)\n\n # goto elbow bent position\n arm_position[3] = 0.3\n h.send_joint_angles(arm_position + hand_position)\n # time.sleep(3)\n\n # test percept decoding\n f = open(os.path.join(os.path.dirname(__file__), \"../../tests/heartbeat.bin\"), \"r\")\n\n print('Testing heartbeat uint8 decoding...')\n heartbeat = np.fromfile(f, dtype=np.uint8)\n decode_heartbeat_msg(heartbeat)\n\n print('Testing heartbeat byte decoding...')\n bytes_heartbeat = heartbeat.tostring()\n decode_heartbeat_msg(bytes_heartbeat)\n\n f = open(os.path.join(os.path.dirname(__file__), \"../../tests/percepts.bin\"), \"r\")\n u = np.fromfile(f, dtype=np.uint8)\n\n print('Testing cpch uint8 decoding...')\n uint8_cpch = u[0:1366]\n decode_cpch_msg(uint8_cpch)\n\n print('Testing cpch byte decoding...')\n bytes_cpch = uint8_cpch.tostring()\n decode_cpch_msg(bytes_cpch)\n\n print('Testing percept uint8 decoding...')\n uint8_percept = u[1366:]\n decode_percept_msg(uint8_percept)\n\n print('Testing percept byte decoding...')\n bytes_percept = uint8_percept.tostring()\n decode_percept_msg(bytes_percept)\n\n h.close()\n logging.info('Ending NfuUdp')\n logging.info('-----------------------------------------------')", "def parse_forward_msg(self, req):\n batch_id = req.batch_id\n bytes_outputs_of_lower = req.output_matrix\n bytes_labels = req.labels\n is_train = req.is_train\n\n outputs_of_lower = pkl.loads(bytes_outputs_of_lower)\n labels = pkl.loads(bytes_labels)\n return batch_id, outputs_of_lower, labels, is_train", "def test_read_lxyr(self):\n cwd = os.path.dirname(os.path.abspath(__file__))\n # gt_file = os.path.join(cwd, 'test_files/test_gt.lxyr')\n # ground_truths = read_lxyr(gt_file)\n test_dir = os.path.join(cwd, 'test_files/')\n ground_truths = read_lxyr(test_dir, 'test_gt')\n # print ground_truths\n self.assertTrue(any(\n gt for gt in ground_truths\n if gt.x == 553 and gt.y == 132\n and gt.radius == 16.64 and gt.class_value == 3))\n self.assertTrue(any(\n gt for gt in ground_truths\n if gt.x == 119 and gt.y == 631\n and gt.radius == 15.0 and gt.class_value == 4))", "def readgeonet(geonetfile):\n f = open(geonetfile,'rt')\n tracelist = []\n headerlist = []\n try:\n hdrlines = _readheaderlines(f)\n except:\n pass\n while len(hdrlines[-1]):\n hdrdict = _readheader(hdrlines)\n numlines = int(np.ceil(hdrdict['npts']/10.0))\n data = []\n for i in range(0,numlines):\n line = f.readline()\n parts = line.strip().split()\n mdata = [float(p) for p in parts]\n data = data + mdata\n data = np.array(data)\n header = hdrdict.copy()\n stats = Stats(hdrdict)\n trace = Trace(data,header=stats)\n #apply the calibration and convert from mm/s^2 to m/s^2\n trace.data = trace.data * trace.stats['calib'] * 0.001 #convert to m/s^2\n tracelist.append(trace.copy())\n headerlist.append(header.copy())\n hdrlines = _readheaderlines(f)\n\n f.close()\n return (tracelist,headerlist)", "def get_light_sensors(self):\n x=self.send_packet_check_response('\\x50')\n LS=[]\n for i in range(8):\n a=bytearray(x[i*3:(i+1)*3])\n LS.append(a[0]|(a[1]&0xf)<<8)\n LS.append(a[1]>>4|a[2]<<4)\n return LS", "def all_net(configuration):\n net_dict_all = {\n \"design\" : ['H1', 'L1', 'V1' ],\n \"GW170817\" : ['H1', 'L1', 'V1' ],\n \"GW170814\" : ['H1', 'L1', 'V1' ],\n \"GW170817_without_Virgo\" : ['H1', 'L1' ],\n \"ET\" : [\"ET_L_Eu\", \"ET_L_Eu_2\"], # Triangular ET\n \"ET1\" : ['H1', 'L1', 'V1', 'ETdet1', 'ETdet2' ], # Triangular ET +LVC\n \"ET2\" : ['H1', 'L1', 'V1', 'ETdet1', 'ETdet3' ], # L-shaped at 2 places +LVC\n \"ET3\" : ['ETdet1', 'ETdet3', 'ETdet4'], # 3 L-shaped ET at three different places\n \"ET3L_EU\" : [\"ET_L_Eu\", \"ET_L_Aus_Eu\", \"ET_L_Argentina\"],\n \"3ET\" : [\"ET_L_US\", \"ET_L_Aus_US\", \"ET_L_Central_Africa\"],\n \"3CE\" : [\"CE_US\", \"CE_Aus_US\", \"CE_Central_Africa\"],\n \"1CE-ET\" : [\"CE_US\", \"ET_L_Eu\", \"ET_L_Eu_2\"],\n \"2CE-ET\" : [\"CE_US\", \"CE_Aus_US\", \"ET_L_Eu\", \"ET_L_Eu_2\"], #named 1 and 2 to distinguish from CE-ET (below) in Mills et al 2018.\n \"CE-ET\" : [\"CE_US\", \"CE_Aus_US\", \"ET_L_Eu\", \"ET_L_Eu_2\"],\n \"Voyager-ET\" : [\"LBB_H1\", \"LBB_L1\", \"LBB_I1\", \"ET_L_Eu\", \"ET_L_Eu_2\"],\n # next three networks are for calculating the impact of duty cycle on the Voyager-ET network\n \"VoyagerLI-ET\" : [\"LBB_L1\", \"LBB_I1\", \"ET_L_Eu\", \"ET_L_Eu_2\"],\n \"VoyagerHI-ET\" : [\"LBB_H1\", \"LBB_I1\", \"ET_L_Eu\", \"ET_L_Eu_2\"],\n \"VoyagerHL-ET\" : [\"LBB_H1\", \"LBB_L1\", \"ET_L_Eu\", \"ET_L_Eu_2\"],\n \n \"VoyagerETtri\" : [\"LBB_H1\", \"LBB_L1\", \"LBB_I1\", \"ET_Tri_Eu_1\", \"ET_Tri_Eu_2\", \"ET_Tri_Eu_3\"],\n \"Voyager\" : [\"LBB_H1\", \"LBB_L1\", \"LBB_I1\"],\n \"VoyagerWithAL\" : [\"LBB_H1\", \"LBB_L1\", \"LBB_I1\", \"ALV1\", \"ALK1\"],\n \"3_TriangularET\" : [\"ET_L_US\", \"ET_L_Aus_US\", \"ET_L_Central_Africa\",\"ET_L_US_2\", \"ET_L_Aus_US_2\", \"ET_L_Central_Africa_2\"],\n # for comparing to klimenko et al 2011:\n 'LHVA2' : [\"LBB_L1\",\"LBB_H1\",\"LBB_V1\",\"LBB_A-\"],\n 'LHVA' : [\"LBB_L1\",\"LBB_H1\",\"LBB_V1\",\"LBB_A\"],\n 'LHVJ' : [\"LBB_L1\",\"LBB_H1\",\"LBB_V1\",\"LBB_K1\"],\n 'LHVAJ' : [\"LBB_L1\",\"LBB_H1\",\"LBB_V1\",\"LBB_A\",\"LBB_K1\"],\n # for calculating alignment factor distributions in inclincation paper\n \"HL\" : [\"H1\", \"L1\"],\n \"HLV\" : [\"H1\", \"L1\", \"V1\" ],\n \"HLVK\" : [\"L1\",\"H1\",\"V1\",\"K1\"],\n \"HLVKI\" : [\"L1\",\"H1\",\"V1\",\"K1\", \"I1\"],\n \n\n #for optimizing the orientations of ET3L_EU w.r.t. polarization metric (see optimizing polarization notebook)\n #first optimize for the two detector network:\n \"ET2L_EU\" : [\"ET_L_Eu\", \"ET_L_Aus_Eu\"],\n \"2ET\" : [\"ET_L_US\", \"ET_L_Aus_US\"],\n #ranges\n }\n return(net_dict_all[configuration])", "def run(mu_v, Sigma_w, Sigma_z, a_mu, l_sensor):\n N = 1000\n # Init tracking\n mu_x = np.zeros(N) # Belief or estimation of hidden state \n F = np.zeros(N) # Free Energy of AI neuron\n mu_y = np.zeros(N) # Belief or prediction of sensory signal \n x = np.zeros(N) # True hidden state\n y = np.zeros(N) # Sensory signal as input to AI neuron\n\n robot_brain = pp_unit(dt, mu_v, Sigma_w, Sigma_z, a_mu) #make pp object\n \n \n\n start_time = time.time()\n for i in np.arange(1, N):\n #Active inference\n y[i] = l_sensor.ambient_light_intensity #take sensor reading\n print('light reading', y[i])\n F[i], mu_x[i], mu_y[i] = robot_brain.inference_step(i, mu_v, y[i])\n\n\n t_elapsed = time.time() - start_time\n\n print(\"Elapsed Time\", t_elapsed, \"sec\")\n return F, mu_x, mu_y, x, y", "def get_nmea_data(port):\n \n# q = 1 # quality factor\n \n # Wait for RMC message :\n rmc = port.readline().decode(\"utf-8\")\n while not 'RMC' in rmc:\n if rmc: \n print(\"Wait for RMC : \", rmc)\n rmc = port.readline().decode(\"utf-8\")\n\n \n # Read GGA+GST+ZDA messages :\n gga = port.readline().decode(\"utf-8\")\n gst = port.readline().decode(\"utf-8\")\n zda = port.readline().decode(\"utf-8\")\n \n t = np.float(gga[7:16])\n \n # Print messages :\n print(\"Trames :\")\n print(\" RMC: \",rmc)\n print(\" GGA: \",gga)\n print(\" GST: \",gst)\n print(\" ZDA: \",zda)\n \n # Quality check :\n if not 'GGA' in gga or not 'GST' in gst or not 'ZDA' in zda:\n print(\"Issue with GGA/GST/ZDA frame decoding !\\nMessage:\\nGGA:{0}\\nGST:{1}\\nZDA:{2}\".format(gga, gst, zda))\n rmc, gga, gst, zda, t = get_nmea_data(port)\n \n return rmc, gga, gst, zda, t", "def get_monitoring_channels(self, data):\n X, Y, Y_ = data\n state = X\n rval = OrderedDict()\n\n for layer in self.layers[:-1]:\n ch = layer.get_monitoring_channels()\n for key in ch:\n rval[layer.layer_name+'_'+key] = ch[key]\n state = layer.fprop(state)\n args = [state]\n if layer is self.layers[-1]:\n args.append(Y)\n ch = layer.get_monitoring_channels_from_state(*args)\n if not isinstance(ch, OrderedDict):\n raise TypeError(str((type(ch), layer.layer_name)))\n for key in ch:\n rval[layer.layer_name+'_'+key] = ch[key]\n\n if isinstance(state, tuple):\n states = state\n else:\n states = [state, state]\n \"\"\" \n import pdb\n pdb.set_trace()\n states = [state,state]\n \"\"\"\n for layer, state in zip(self.layers[-1], states):\n if isinstance(layer, NestedMLP):\n if layer is self.layers[-1][0]:\n ch = layer.get_monitoring_channels((state, Y))\n else:\n ch = layer.get_monitoring_channels((state, Y_))\n for key in ch:\n rval[layer.layer_name+'_'+key] = ch[key]\n else:\n ch = layer.get_monitoring_channels()\n for key in ch:\n rval[layer.layer_name+'_'+key] = ch[key]\n state = layer.fprop(state)\n args = [state]\n if layer is self.layers[-1][0]:\n args.append(Y)\n else:\n args.append(Y_)\n ch = layer.get_monitoring_channels_from_state(*args)\n if not isinstance(ch, OrderedDict):\n raise TypeError(str((type(ch), layer.layer_name)))\n for key in ch:\n rval[layer.layer_name+'_'+key] = ch[key]\n\n return rval", "def geocube():", "def get_on_neurons(Fish,state):\r\n state_snap= Fish[state,:]\r\n \r\n label=[]\r\n for i in range(len(state_snap)):\r\n if state_snap[i]>2:\r\n label.append(1)\r\n else: \r\n label.append(0)\r\n return label", "def test_read_multiple_lxyrs(self):\n cwd = os.path.dirname(os.path.abspath(__file__))\n test_dir = os.path.join(cwd, 'test_files/')\n ground_truths = read_lxyrs(test_dir)\n self.assertEquals(len(ground_truths), 3)\n self.assertEquals(len(ground_truths['test1']), 3)\n self.assertEquals(len(ground_truths['test_gt']), 2)", "def get_batt_state(self, rmp):\n\t\trmp_items = rmp.sensor_items\n\t\trmp_values = rmp.sensor_values\n\t\t\n\t\tjoint_state = JointState()\n\t\t\n\t\tnames = [self.link_left_front, self.link_right_front,\n\t\t\t\t\tself.link_left_rear, self.link_right_rear, self.link_caster]\n\t\tpos = [0,0,0,0,0]\n\t\tvel = [0,0,0,0,0]\n\t\t\n\t\t\"\"\"\n\t\tget the values for the feedback items needed\n\t\t\"\"\"\n\t\tfor x in range(0, len(rmp_items)):\n\t\t\tif rmp_items[x] == 'left_front_pos_m':\n\t\t\t\tpos[0] = -((rmp_values[x]/self.circumference) % 1.0)*(2*math.pi)\n\t\t\telif rmp_items[x] == 'right_front_pos_m':\n\t\t\t\tpos[1] = ((rmp_values[x]/self.circumference) % 1.0)*(2*math.pi)\n\t\t\telif rmp_items[x] == 'left_rear_pos_m':\n\t\t\t\tpos[2] = -((rmp_values[x]/self.circumference) % 1.0)*(2*math.pi)\n\t\t\telif rmp_items[x] == 'right_rear_pos_m':\n\t\t\t\tpos[3] = ((rmp_values[x]/self.circumference) % 1.0)*(2*math.pi)\n\t\t\telif rmp_items[x] == 'left_front_vel_mps':\n\t\t\t\tvel[0] = -rmp_values[x]\n\t\t\telif rmp_items[x] == 'right_front_vel_mps':\n\t\t\t\tvel[1] = rmp_values[x]\n\t\t\telif rmp_items[x] == 'left_rear_vel_mps':\n\t\t\t\tvel[2] = -rmp_values[x]\n\t\t\telif rmp_items[x] == 'right_rear_vel_mps':\n\t\t\t\tvel[3] = rmp_values[x]\n\t\t\t\t\n\t\tif self.has_two_wheels:\n\t\t\tnum = 2\n\t\telse:\n\t\t\tnum = 4\n\t\t\t\n\t\tfor x in range(0,num):\n\t\t\tjoint_state.name.append(names[x])\n\t\t\tjoint_state.position.append(pos[x])\n\t\t\tjoint_state.velocity.append(vel[x])\n\t\t\n\t\t\"\"\"\n\t\tcalculate (very rough) caster wheel position\n\t\t\"\"\"\n\t\tif self.has_caster_wheel:\n\t\t\tleft_vel = -vel[0]\n\t\t\tright_vel = vel[1]\n\t\t\tif abs(left_vel) < 0.01:\n\t\t\t\tleft_vel = 0\n\t\t\tif abs(right_vel) < 0.01:\n\t\t\t\tright_vel = 0\n\t\t\tif left_vel != 0 or right_vel != 0:\n\t\t\t\tstep = .04\n\t\t\t\ttarget = math.atan2(right_vel, left_vel) - math.pi/4\n\t\t\t\tif target > math.pi:\n\t\t\t\t\ttarget -= 2*math.pi\n\t\t\t\tif target < -math.pi:\n\t\t\t\t\ttarget += 2*math.pi\n\t\t\t\tpos[4] = self.prev_caster_pos\n\t\t\t\t#calculate direction to turn\n\t\t\t\tif abs(pos[4] - target) >= step:\n\t\t\t\t\tdirection = 1\n\t\t\t\t\tif pos[4] > target:\n\t\t\t\t\t\tif abs(target - pos[4]) < (2*math.pi + target - pos[4]):\n\t\t\t\t\t\t\tdirection = -1\n\t\t\t\t\telse:\n\t\t\t\t\t\tif (2*math.pi - (pos[4] - target)) < (pos[4] - target):\n\t\t\t\t\t\t\tdirection = -1\n\t\t\t\t\t#turn to target\n \t\t \tpos[4] += direction*step\n\t\t\t\t\twhile pos[4] <= -math.pi:\n\t\t\t\t\t\tpos[4] += 2*math.pi\n\t\t\t\t\twhile pos[4] > math.pi:\n\t\t\t\t\t\tpos[4] -= 2*math.pi\n\t\t\t\telse:\n\t\t\t\t\tpos[4] = target\n\t\t\t\tself.prev_caster_pos = pos[4]\n\t\t\telse:\n\t\t\t\tpos[4] = self.prev_caster_pos\n\n\t\tjoint_state.name.append(names[4])\n\t\tjoint_state.position.append(pos[4])\n\t\tjoint_state.velocity.append(vel[4])\n\t\t\n\t\t\"\"\"\n\t\tPublish the state of the wheels/joints\n\t\t\"\"\"\n\t\tjoint_state.header.stamp = rospy.Time.now()\n\t\tself.jointStatePub.publish(joint_state)", "def get_joints(joint_listener): \n if LOCAL_TEST: # dummy\n return np.array([-0.5596, 0.5123, 0.5575, -1.6929, 0.2937, 1.6097, -1.237, 0.04, 0.04])\n else:\n joints = joint_listener.joint_position\n print('robot joints', joints)\n return joints", "def never_used_if():\r\n net_connect = ConnectHandler(**devices)\r\n output = net_connect.send_command(\"Sh int | inc Gig|Last input\")\r\n\r\n \"\"\" divide string in lines \"\"\"\r\n output_ = output.split(\"GigabitEthernet\")\r\n\r\n \"\"\" define function to extract word after string \"\"\"\r\n def after(value, a):\r\n # Find and validate first part.\r\n pos_a = value.rfind(a)\r\n if pos_a == -1:\r\n return \"\"\r\n # Returns chars after the found string.\r\n adjusted_pos_a = pos_a + len(a)\r\n if adjusted_pos_a >= len(value):\r\n return \"\"\r\n return value[adjusted_pos_a:]\r\n\r\n\r\n \"\"\" loop over the lines to extract info \"\"\"\r\n dict_upper = {}\r\n for line in output_:\r\n line = line.replace(\"\\n\", \"\")\r\n dict_lower = {}\r\n dict_lower[\"Status\"] = (\r\n after(line, f\"{line.split(' ')[0]} is \").split(\" \")[0].replace(\",\", \"\")\r\n )\r\n dict_lower[\"Line protocol\"] = (\r\n after(line, \"line protocol is \").split(\" \")[0].replace(\",\", \"\")\r\n )\r\n dict_lower[\"Last input\"] = after(line, \"Last input \").split(\" \")[0].replace(\",\", \"\")\r\n dict_lower[\"Last output\"] = (\r\n after(line, f\"Last input {dict_lower['Last input']}, output \")\r\n .split(\" \")[0]\r\n .replace(\",\", \"\")\r\n )\r\n dict_lower[\"Output hang\"] = (\r\n after(line, \"output hang \").split(\" \")[0].replace(\",\", \"\")\r\n )\r\n dict_upper[f\"GigabitEthernet{line.split(' ')[0]}\"] = dict_lower\r\n\r\n\r\n \"\"\" find the gigabitinterfaces that never been used \"\"\"\r\n never_used_interfaces = []\r\n for i in dict_upper.keys():\r\n if dict_upper[i][\"Last input\"] == \"never\":\r\n never_used_interfaces.append(i)\r\n \r\n return never_used_interfaces", "def get_wires(inp):\n\n rows = []\n for line in inp.splitlines():\n words = line.split(',')\n pos = np.zeros((1, 2), dtype=int)\n row = [pos]\n for word in words:\n dir,length = word[0], int(word[1:])\n piece = np.zeros((length, 2), dtype=int)\n l = np.arange(1, length + 1)\n if dir == 'R':\n piece[:, 0] = l\n elif dir == 'L':\n piece[:, 0] = -l\n elif dir == 'U':\n piece[:, 1] = l\n elif dir == 'D':\n piece[:, 1] = -l\n else:\n assert False, f'Invalid direction {dir}!'\n row.append(pos + piece)\n pos += piece[-1, :]\n rows.append(np.concatenate(row))\n\n # find common bounding box, construct ndarray with shape (nwire, posx, posy)\n minvals = np.min([row.min(0) for row in rows], 0)\n maxvals = np.max([row.max(0) for row in rows], 0)\n\n size = (len(rows), maxvals[0] - minvals[0] + 1, maxvals[1] - minvals[1] + 1)\n origin = -minvals\n wires = np.zeros(size, dtype=bool)\n # True where there's a wire, False where there's none\n for i,row in enumerate(rows):\n shifted = row - minvals\n wires[i, shifted[:, 0], shifted[:, 1]] = True\n\n return origin,wires", "def get_translate_table(self):\n self.groundstate = []\n if self.header['CIMETHOD'] == 'DOCI':\n reg = r'^(\\d+)\\s+(\\d+)\\s+([\\-+]?\\d+\\.*\\d*[eEdD]?[\\-+]?\\d*)' \n else:\n reg = r'^(\\d+)\\s+(\\d+)\\s+(\\d+)\\s+([\\-+]?\\d+\\.*\\d*[eEdD]?[\\-+]?\\d*)' \n self.mapdict = {}\n with open(self.filename,\"r\") as file:\n index = 0 \n for line in file:\n match = re.search(reg,line)\n if match:\n if self.header['CIMETHOD'] == 'DOCI':\n self.mapdict[match.group(2)+ '|' + match.group(2)] = int(match.group(1))\n self.groundstate.append(float(match.group(3)))\n else:\n self.mapdict[match.group(2)+'|'+match.group(3) ] = int(match.group(1))\n self.groundstate.append(float(match.group(4)))\n assert(index == int(match.group(1)) ), str(index) + 'not found ' + 'WARNING' + match.group(1)\n index += 1\n if \"#Significant determinants\" in line:\n break\n assert(len(self.mapdict) == self.header['dim']), 'length dict: ' + str(len(self.mapdict)) +'header: '+ str(self.header['dim'])+ self.filename\n assert(len(self.groundstate) == self.header['dim']), 'length groundstate: ' + str(len(self.mapdict)) +'header: '+ str(self.header['dim']) + self.filename\n self.groundstate = np.array(self.groundstate) \n #if self.groundstate != None:\n #print 'We have read in the groundstate vec: ', self.groundstate" ]
[ "0.55977696", "0.52905", "0.5226053", "0.52051973", "0.5124833", "0.50913644", "0.49951893", "0.49718392", "0.49665216", "0.49590883", "0.49517453", "0.49391073", "0.4916794", "0.4913647", "0.49049315", "0.48985282", "0.48870233", "0.48777327", "0.4859775", "0.48231226", "0.4822618", "0.48208812", "0.48081636", "0.48041943", "0.48034972", "0.480203", "0.48016748", "0.48011395", "0.48001224", "0.47409782" ]
0.5451886
1
Calculate the concentration of the input pollutant at point (x, y).
def concentration(rFile, eFile, x, y, pollutant): sheets = pd.read_excel(eFile, sheet_name = None) if pollutant in ['NO2', 'PM10', 'PM25', 'EC']: c_traffic = traffic_concentration(rFile, sheets, x, y, pollutant) if c_traffic == 'e1': print("The calculation point is more than 60 meters far away from the street.") return None c_background = background_concentration(sheets, x, y, pollutant) # print(c_traffic, c_background) return round(c_traffic + c_background, 1) else: print("Pollutant {} is not supported yet.".format(pollutant)) return None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __calc_concentration(self, diam, data, dmin, dmax):\n\n dp = np.log10(diam*1e-9)\n conc = data # smoothed\n dmin = np.max((np.log10(dmin),dp[0]))\n dmax = np.min((np.log10(dmax),dp[-1]))\n dpi = np.arange(dmin,dmax,0.001)\n conci = np.sum(interp1d(dp,conc,kind='nearest')(dpi)*0.001,axis=1)\n return conci", "def concentration(self, time: float) -> _VectorisedFloat:\n return (self._normed_concentration_cached(time) * \n self.normalization_factor())", "def concentration(self, time: float) -> _VectorisedFloat:\n concentration = self.concentration_model.concentration(time)\n for interaction in self.short_range:\n concentration += interaction.short_range_concentration(self.concentration_model, time)\n return concentration", "def estimateCs(y, inp):\n\treturn 1 -(math.tanh(getK1(inp) - (y/getY90(inp)) / (2 * getD0(inp)) + (y/getY90(inp) - 1/3.0)**3 / (3 * getD0(inp))))**2", "def c(self, y, t):\n return 1 / 2 * (np.sum((y - t) ** 2) ** 0.5) ** 2", "def C(self, y, x):\n return self.minor(y,x).det()*(-1.0)**(y+x+2.0)", "def conductivity(self):\n m = 1.67296736e-02 # Determined from optimisation\n c = 8.54665149e-05 # Determined from optimisation\n return m * self.concentration + c", "def _causal_measure(self, x, y):\r\n\t\tC_xy = self._cross_cumulant_4th(x, y)\r\n\t\tC_yx = self._cross_cumulant_4th(y, x)\r\n\t\tR = C_xy**2 - C_yx**2\r\n\t\treturn R", "def cx(x):\n return cw(x - global_min_x)", "def background_concentration(sheets, x, y, pollutant):\r\n i, j = coor2idx(x, y)\r\n \r\n # get bc from excel. The year is hard coded to 2015\r\n f = sheets[\"Backgroundconc\"]\r\n idx = f[f['XiYI'] == str(i) + \"-\" + str(j)].index\r\n if len(idx) == 0:\r\n print(\"BCError: No location found. 0 returned.\")\r\n return 0\r\n return float(f[pollutant+'_2015'][idx])", "def cid(x, y):\n assert(len(x.shape) == 2 and x.shape == y.shape) # time series must have same length and dimensionality\n ce_x = np.sqrt(np.sum(np.square(np.diff(x, axis=0)), axis=0) + 1e-9)\n ce_y = np.sqrt(np.sum(np.square(np.diff(y, axis=0)), axis=0) + 1e-9)\n d = np.sqrt(np.sum(np.square(x - y), axis=0)) * np.divide(np.maximum(ce_x, ce_y), np.minimum(ce_x, ce_y))\n return np.sum(d)", "def _get_concentration(self, state):\n return self.fc(state.float_features).exp() + self.EPSILON", "def cc_coefficient(x, y):\n cor = np.sum( (x-np.mean(x)) * (y-np.mean(y)) )\n norm = sqrt( np.sum((x-np.mean(x))**2) * np.sum((x-np.mean(x))**2) )\n r = cor/norm\n return r", "def confidence_interval(self):\r\n coh_var = np.zeros((self.input.data.shape[0],\r\n self.input.data.shape[0],\r\n self._L), 'd')\r\n for i in range(self.input.data.shape[0]):\r\n for j in range(i):\r\n if i != j:\r\n coh_var[i, j] = tsu.jackknifed_coh_variance(\r\n self.spectra[i],\r\n self.spectra[j],\r\n self.eigs,\r\n adaptive=self._adaptive\r\n )\r\n\r\n idx = triu_indices(self.input.data.shape[0], 1)\r\n coh_var[idx[0], idx[1], ...] = coh_var[idx[1], idx[0], ...].conj()\r\n\r\n coh_mat_xform = tsu.normalize_coherence(self.coherence,\r\n 2 * self.df - 2)\r\n\r\n lb = coh_mat_xform + dist.t.ppf(self.alpha / 2,\r\n self.df - 1) * np.sqrt(coh_var)\r\n ub = coh_mat_xform + dist.t.ppf(1 - self.alpha / 2,\r\n self.df - 1) * np.sqrt(coh_var)\r\n\r\n # convert this measure with the normalizing function\r\n tsu.normal_coherence_to_unit(lb, 2 * self.df - 2, lb)\r\n tsu.normal_coherence_to_unit(ub, 2 * self.df - 2, ub)\r\n\r\n return ub - lb", "def concentration(self):\n return self._gev_bijector.concentration", "def convection(pivData, image, winsize, step=None, shape=None):\n x = pivData.sort_values(by=['x']).x.drop_duplicates()\n if step == None:\n # Need to infer the step size from pivData\n step = x.iat[1] - x.iat[0]\n \n if shape == None:\n # Need to infer shape from pivData\n y = pivData.y.drop_duplicates()\n shape = (len(y), len(x))\n \n # check coarse-grained image shape\n X, Y, I = corrLib.divide_windows(image, windowsize=[winsize, winsize], step=step)\n assert(I.shape==shape)\n \n X = np.array(pivData.x).reshape(shape)\n Y = np.array(pivData.y).reshape(shape)\n U = np.array(pivData.u).reshape(shape)\n V = np.array(pivData.v).reshape(shape)\n \n # compute gradient of concentration\n # NOTE: concentration is negatively correlated with intensity. \n # When computing gradient of concentration, the shifting direction should reverse.\n \n dcx = np.gradient(I, -step, axis=1)\n dcy = np.gradient(I, -step, axis=0)\n \n udc = U * dcx + V * dcy\n \n return udc", "def icc(x,y=None,verbose=0):\r\n TINY = 1.0e-20\r\n if y:\r\n all = N.concatenate([x,y],0)\r\n else:\r\n all = x+0\r\n x = all[:,0]\r\n y = all[:,1]\r\n totalss = ass(all-mean(all))\r\n pairmeans = (x+y)/2.\r\n withinss = ass(x-pairmeans) + ass(y-pairmeans)\r\n withindf = float(len(x))\r\n betwdf = float(len(x)-1)\r\n withinms = withinss / withindf\r\n betweenms = (totalss-withinss) / betwdf\r\n rho = (betweenms-withinms)/(withinms+betweenms)\r\n t = rho*math.sqrt(betwdf/((1.0-rho+TINY)*(1.0+rho+TINY)))\r\n prob = abetai(0.5*betwdf,0.5,betwdf/(betwdf+t*t),verbose)\r\n return rho, prob", "def c(\n dp: np.ndarray,\n ddp: np.ndarray,\n ) -> np.ndarray:\n\n return \\\n np.sqrt((ddp[2, :] * dp[1, :] - dp[2, :] * ddp[1, :])**2 +\n (ddp[0, :] * dp[2, :] - dp[0, :] * ddp[2, :])**2 +\n (ddp[1, :] * dp[0, :] - dp[1, :] * ddp[0, :])**2) / \\\n (dp[0, :]**2 + dp[1, :]**2 + dp[2, :]**2)**1.5", "def ccw(self, b: PointOrIterable, c: PointOrIterable) -> float:\n try:\n return ((b.x - self.x) * (c.y - self.y)) - ((c.x - self.x) * (b.y - self.y))\n except AttributeError:\n pass\n\n return ((b[0] - self.x) * (c[1] - self.y)) - ((c[0] - self.x) * (b[1] - self.y))", "def cone(individual, position, height, width):\n value = 0.0\n for x, p in zip(individual, position):\n value += (x - p)**2\n return height - width * math.sqrt(value)", "def test_concentration_profile(self):\n # TODO: add an output for average particle concentration", "def calc_cophenetic_coeff(self):\n c, d = cophenet(self.__linkage, self.__distance_matrix)\n return round(c, 3)", "def c(\n dp: np.ndarray,\n ddp: np.ndarray,\n ) -> np.ndarray:\n\n return \\\n np.abs(ddp[0, :]*dp[1, :] - dp[0, :]*ddp[1, :]) / \\\n (dp[0, :]**2 + dp[1, :]**2)**1.5", "def _concentration_at_time(cls, t, hl_a, hl_e, return_diff=False):\n res = cls._concentration(2, t, hl_a, hl_e, {0: 1}, return_diff)\n if return_diff:\n return res[0][1], res[1][1]\n return res[1]", "def _GetConcentrationCorrection(self): \n # Shorthand for coeff * log(concentration)\n mult_log_c_list = [c.coeff * numpy.log(c.phase.Value())\n for c in self.reactants]\n\n # Compute log(Q) - the log of the reaction quotient\n log_Q = sum(mult_log_c_list)\n \n _r = constants.R\n _t = constants.DEFAULT_TEMP\n return _r * _t * log_Q", "def calc_isoconc_point(r_min, r_max):\n I = np.sqrt((r_min**2.0 + r_min * r_max + r_max**2.0)/3.0)\n if not isinstance(I, float):\n msg = 'isoconcentration point calc error: {} is not a float.'\n raise TypeError, msg.format(I)\n return I", "def ccc_v(y_true, y_pred):\n x = y_true[:, 0]\n y = y_pred[:, 0]\n mx = K.mean(x, axis=0)\n my = K.mean(y, axis=0)\n xm, ym = x - mx, y - my\n rho = K.sum(xm * ym) / (K.sqrt(K.sum(xm ** 2)) * K.sqrt(K.sum(ym ** 2)))\n x_s = K.std(x)\n y_s = K.std(y)\n ccc = 2 * rho * x_s * y_s / (x_s ** 2 + y_s ** 2 + (mx - my) ** 2)\n return ccc", "def test_concentration_increase_decrease(self):\n\n t, x_n, x_p, r_n, r_p = self.t, self.x_n, self.x_p, self.r_n, self.r_p\n\n if self.model.options[\"particle\"] in [\"quadratic profile\", \"quartic profile\"]:\n # For the assumed polynomial concentration profiles the values\n # can increase/decrease within the particle as the polynomial shifts,\n # so we just check the average instead\n neg_diff = self.c_s_n_rav(t[1:], x_n) - self.c_s_n_rav(t[:-1], x_n)\n pos_diff = self.c_s_p_rav(t[1:], x_p) - self.c_s_p_rav(t[:-1], x_p)\n neg_end_vs_start = self.c_s_n_rav(t[-1], x_n) - self.c_s_n_rav(t[0], x_n)\n pos_end_vs_start = self.c_s_p_rav(t[-1], x_p) - self.c_s_p_rav(t[0], x_p)\n else:\n neg_diff = self.c_s_n(t[1:], x_n, r_n) - self.c_s_n(t[:-1], x_n, r_n)\n pos_diff = self.c_s_p(t[1:], x_p, r_p) - self.c_s_p(t[:-1], x_p, r_p)\n neg_end_vs_start = self.c_s_n(t[-1], x_n, r_n) - self.c_s_n(t[0], x_n, r_n)\n pos_end_vs_start = self.c_s_p(t[-1], x_p, r_p) - self.c_s_p(t[0], x_p, r_p)\n\n if self.operating_condition == \"discharge\":\n np.testing.assert_array_less(neg_diff, 1e-16)\n np.testing.assert_array_less(-1e-16, pos_diff)\n np.testing.assert_array_less(neg_end_vs_start, 0)\n np.testing.assert_array_less(0, pos_end_vs_start)\n elif self.operating_condition == \"charge\":\n np.testing.assert_array_less(-1e-16, neg_diff)\n np.testing.assert_array_less(pos_diff, 1e-16)\n np.testing.assert_array_less(0, neg_end_vs_start)\n np.testing.assert_array_less(pos_end_vs_start, 0)\n elif self.operating_condition == \"off\":\n np.testing.assert_array_almost_equal(neg_diff, 0)\n np.testing.assert_array_almost_equal(pos_diff, 0)\n np.testing.assert_array_almost_equal(neg_end_vs_start, 0)\n np.testing.assert_array_almost_equal(pos_end_vs_start, 0)", "def calc_cogen_const(q_heat_Wh, thermal_eff, electrical_eff):\n q_fuel_Wh = q_heat_Wh / thermal_eff\n p_el_Wh = q_fuel_Wh * electrical_eff\n q_anth_Wh = q_fuel_Wh - (q_heat_Wh + p_el_Wh)\n return q_fuel_Wh, p_el_Wh, q_anth_Wh", "def get_distance(self, point, cpoint):\n distance = 0.0\n for m, s in zip(point, cpoint):\n distance += pow(m - s, 2)\n distance = math.sqrt(distance)\n return distance" ]
[ "0.6304987", "0.6212258", "0.6192664", "0.6185161", "0.61454153", "0.6080291", "0.6044118", "0.5986667", "0.5929356", "0.5894591", "0.58592665", "0.5858337", "0.58366704", "0.5809042", "0.57952833", "0.5746893", "0.5744297", "0.57334715", "0.5693117", "0.5643129", "0.5623406", "0.5618083", "0.56146115", "0.5612929", "0.56017935", "0.56015056", "0.55982924", "0.55967593", "0.55491567", "0.5545664" ]
0.6773205
0
Given a point p, find the nearest road that p belongs to. Return the road type and the distance from p to this road.
def nearest_road(p, file): with fiona.open(roadFile, 'r') as roads: nearestRoad = roads[0] minDis = p.distance(shape(roads[0]['geometry'])) for road in roads: dis = p.distance(shape(road['geometry'])) if dis < minDis: nearestRoad = road minDis = dis return nearestRoad, minDis
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _nearest_to_point(self, point):\n ptvertex = point.get_vertex(crs=self.crs)\n segments = zip(self.vertices.slice(0, -1), self.vertices.slice(1, 0))\n\n if isinstance(self.crs, CartesianCRS):\n func = _cvectorgeo.pt_nearest_planar\n def func(seg):\n return _cvectorgeo.pt_nearest_planar(ptvertex[0], ptvertex[1],\n seg[0][0], seg[0][1], seg[1][0], seg[1][1])\n else:\n fwd = self.crs.forward\n inv = self.crs.inverse\n def func(seg):\n return _cvectorgeo.pt_nearest_proj(fwd, inv, ptvertex,\n seg[0], seg[1], tol=0.01)\n\n point_dist = map(func, segments)\n min_point = None\n min_dist = -1.0\n for i, (point, dist) in enumerate(point_dist):\n if dist < min_dist or (i == 0):\n min_point = point\n min_dist = dist\n\n return min_dist, min_point", "def closest_point(g, p):\n\n nodes = [n for n in g.nodes]\n\n tree = KDTree(nodes)\n idx = tree.query([p], k=1, return_distance=False)[0][0]\n return nodes[idx]", "def point_to_point_distance(p1:Point, p2: Point) -> float:\n return round(geopy.distance.distance((p1.y, p1.x), (p2.y, p2.x)).km,2)", "def closest_point(self, l):\n cos = np.dot(self.direction, l.direction)\n n = 1 - cos ** 2\n if n < sys.float_info.epsilon:\n # Lines are parallel.\n return self.zero\n\n d0 = l.zero - self.zero\n a = np.dot(d0, self.direction)\n b = np.dot(d0, l.direction)\n return self.zero + self.direction * ( a - b * cos) / n", "def closest_point(self, point, start_param=None, Ns=25):\n x, z = self.rotate_to_xz_plane(point)\n la = self._closest_point(x, z, start_param, Ns)\n return la", "def closest_point_to(self, p):\n p = np.array(p)\n # align with z-axis so all triangle have same z-coord\n tri_rot, rot = self.align_with([0,0,1])\n tri_rot_z = tri_rot.a[-1]\n p_rot = np.dot(rot, p)\n\n p_2d = p_rot[:2]\n tri_2d = geometry2d.Triangle(tri_rot.a[:2], tri_rot.b[:2], tri_rot.c[:2])\n\n if tri_2d.is_inside(p_2d):\n # projects onto triangle, so return difference in z\n return np.dot(np.linalg.inv(rot), np.array(list(p_2d) + [tri_rot_z]))\n else:\n closest_pt_2d = tri_2d.closest_point_to(p_2d)[1]\n\n closest_pt_3d = np.array(list(closest_pt_2d) + [tri_rot_z])\n\n return np.dot(np.linalg.inv(rot), closest_pt_3d)", "def find_nearest_d(self, point, layers):\n if isinstance(point, tuple):\n point = geojson.Point(coordinates=[point[0], point[1]])\n if len(point) == 3:\n point = self.transform(point, point[2], self.db_proj)\n gd = self.find_nearest(point, layers)\n if gd:\n return gd, distance(point, gd.data)\n else:\n return None, None", "def distance_to(self, p):\n closest_pt = self.closest_point_to(p)\n return np.linalg.norm(p - closest_pt)", "def find_closest_point(point, street, streetvolume):\r\n streetdf = streetvolume[streetvolume['streetname'] == street]\r\n if streetdf.shape[0] == 0:\r\n streetdf = streetvolume\r\n streetdf['pdistance'] = streetdf['geometry'].apply(lambda x: point.distance(x))\r\n streetdf.sort_values(by = 'pdistance', ascending = True, inplace = True)\r\n return streetdf['lineid'].iloc[0]", "def nearest_on_boundary(self, point):\n _, minpt = self._nearest_to_point(point)\n return Point(minpt, crs=self.crs)", "def getNearestEdge(self, point):\n edge = mm.idx.nearest((point.getPoint().x, point.getPoint().y), objects=True)\n edges = [e.object for e in edge]\n if len(edges) == 1:\n result = edges[0]\n else:\n dist = 99999999999999999999999999999999999999999\n for edge in edges:\n distance = point.getPoint().distance(edge.getGeometry())\n if distance < dist:\n dist = distance\n result = edge\n return result", "def distance_to_line(a, b, p):\n return distance(closest_point(a, b, p), p)", "def closest_point(a, b, p):\n ap = [p[0]-a[0], p[1]-a[1]]\n ab = [b[0]-a[0], b[1]-a[1]]\n mag = float(ab[0]**2 + ab[1]**2)\n proj = dot(ap, ab)\n if mag ==0 :\n dist = 0\n else:\n dist = proj / mag\n if dist < 0:\n return [a[0], a[1]]\n elif dist > 1:\n return [b[0], b[1]]\n else:\n return [a[0] + ab[0] * dist, a[1] + ab[1] * dist]", "def nearest_point(pt):\n nearest_point = None\n min_dist = float(\"inf\")\n for p in cur_points:\n dist = euclidean_dist(pt, p.to_tuple())\n if dist < min_dist:\n min_dist, nearest_point = dist, p\n\n return nearest_point.to_tuple()", "def dist_to_point(self, point):\n\t\treturn dist_to_line2d_seg((self.a.to_tuple(),self.b.to_tuple()), point.to_tuple())", "def getDistanceToPoint(self, p, returnParaPerp = False):\n if not isinstance(p, VectorN) or len(p) != len(self.mOrigin):\n raise ValueError(\"p must be a point of dimension \" + str(len(self.mOrigin)))\n dirToP = p - self.mOrigin\n if dirToP.dot(self.mDirection) < 0:\n return None\n paraPart = dirToP.dot(self.mDirection) * self.mDirection\n perpPart = dirToP - paraPart\n if returnParaPerp:\n return (perpPart.magnitude(), paraPart, perpPart)\n else:\n return perpPart.magnitude()", "def closest_point_on_segment(point, segment):\n a, b = segment\n p = closest_point_on_line(point, segment)\n d = distance_point_point_sqrd(a, b)\n d1 = distance_point_point_sqrd(a, p)\n d2 = distance_point_point_sqrd(b, p)\n if d1 > d or d2 > d:\n if d1 < d2:\n return a\n return b\n return p", "def FindClosestPointWithinRadius(self, p_float, , p_float_4):\n ...", "def get_nearest_node(G, point, method='haversine', return_dist=False):\n start_time = time.time()\n\n if not G or (G.number_of_nodes() == 0):\n raise ValueError('G argument must be not be empty or should contain at least one node')\n\n # dump graph node coordinates into a pandas dataframe indexed by node id\n # with x and y columns\n coords = [[node, data['x'], data['y']] for node, data in G.nodes(data=True)]\n df = pd.DataFrame(coords, columns=['node', 'x', 'y']).set_index('node')\n\n # add columns to the dataframe representing the (constant) coordinates of\n # the reference point\n df['reference_y'] = point[0]\n df['reference_x'] = point[1]\n\n # calculate the distance between each node and the reference point\n if method == 'haversine':\n # calculate distance vector using haversine (ie, for\n # spherical lat-long geometries)\n distances = great_circle_vec(lat1=df['reference_y'],\n lng1=df['reference_x'],\n lat2=df['y'],\n lng2=df['x'])\n\n elif method == 'euclidean':\n # calculate distance vector using euclidean distances (ie, for projected\n # planar geometries)\n distances = euclidean_dist_vec(y1=df['reference_y'],\n x1=df['reference_x'],\n y2=df['y'],\n x2=df['x'])\n\n else:\n raise ValueError('method argument must be either \"haversine\" or \"euclidean\"')\n\n # nearest node's ID is the index label of the minimum distance\n nearest_node = distances.idxmin()\n # log('Found nearest node ({}) to point {} in {:,.2f} seconds'.format(nearest_node, point, time.time()-start_time))\n\n # if caller requested return_dist, return distance between the point and the\n # nearest node as well\n if return_dist:\n return nearest_node, distances.loc[nearest_node]\n else:\n return nearest_node", "def shortest_distance_to(self, pt):\n return self._nearest_to_point(pt)[0]", "def closest_point(p1: Vector3, p2: Vector3, p3: Vector3) -> Vector3:\n k = ((p2.y - p1.y) * (p3.x - p1.x) - (p2.x - p1.x) * (p3.y - p1.y)) / ((p2.y - p1.y) ** 2 + (p2.x - p1.x) ** 2)\n x4 = p3.x - k * (p2.y - p1.y)\n y4 = p3.y + k * (p2.x - p1.x)\n\n return Vector3(x4, y4, 0)", "def closest(point, points):\n pts = [(Point.distance(point, p), p) for p in points]\n pts.sort()\n return pts[0][1]", "def line_point_shortest_dist(r: np.ndarray, v: np.ndarray, p: np.ndarray) -> Tuple[float, float]:\n\n t = np.dot(v, p - r) / np.dot(v, v)\n d = np.linalg.norm((r + v * t) - p)\n return d, t", "def get_closest_relationship(self, point, n=1):\n n = min(n,len(self.rtype_vectors))#prevent index error\n if n > 1:\n tmp = zip(*self.rkdtree.query(point,n))\n return [(d, self.rkdtree_keys[i]) for d,i in tmp]\n else:\n dist, id = self.rkdtree.query(point,n)\n return [(dist, self.rkdtree_keys[id])]", "def closest_point_to(self, x):\n x = np.array(x)\n v = self.p1 - self.p0\n b = self.p0 - x\n\n t = -np.dot(v, b) / np.dot(v, v)\n if (0 <= t <= 1):\n closest = t*(self.p1 - self.p0) + self.p0\n return closest\n else:\n if np.linalg.norm(x - self.p0) < np.linalg.norm(x - self.p1):\n return self.p0\n else:\n return self.p1", "def get_closest_waypoint(self, x, y):\n closest_idx = self.waypoint_tree.query([x, y])[1] # ckd tree (1st closest, idx)\n\n # Check if closest waypoint is ahead or behind vehicle\n closest_coord = self.waypoints_2d[closest_idx]\n prev_coord = self.waypoints_2d[closest_idx - 1]\n\n # Equation for hyperplane through closest_coors\n cl_vect = np.array(closest_coord)\n prev_vect = np.array(prev_coord)\n pos_vect = np.array([x, y])\n\n val = np.dot(cl_vect - prev_vect, pos_vect - cl_vect)\n # Car is ahead of the closest waypoint\n if val > 0:\n closest_idx = (closest_idx + 1) % len(self.waypoints_2d)\n\n return closest_idx", "def project_point_to_object(point, geometry):\n nearest_point = None\n min_dist = float(\"inf\")\n \n if isinstance(geometry, Polygon):\n for seg_start, seg_end in pairs(list(geometry.exterior.coords)):\n line_start = Point(seg_start)\n line_end = Point(seg_end)\n \n intersection_point = project_point_to_line(point, line_start, line_end)\n cur_dist = point.distance(intersection_point)\n \n if cur_dist < min_dist:\n min_dist = cur_dist\n nearest_point = intersection_point\n \n elif isinstance(geometry, LineString):\n for seg_start, seg_end in pairs(list(geometry.coords)):\n line_start = Point(seg_start)\n line_end = Point(seg_end)\n \n intersection_point = project_point_to_line(point, line_start, line_end)\n cur_dist = point.distance(intersection_point)\n \n if cur_dist < min_dist:\n min_dist = cur_dist\n nearest_point = intersection_point\n else:\n raise NotImplementedError(\"project_point_to_object not implemented for\"+\n \" geometry type '\" + geometry.type + \"'.\")\n return nearest_point", "def find_nearest_neighbour_from_point(point_cloud:np.ndarray, point:int) -> int:\n pass", "def find_nearest_edge(pt, tri):\n \n tri_coords = list(tri.coords)\n edge0 = LineString([tri_coords[0], tri_coords[1]])\n edge1 = LineString([tri_coords[1], tri_coords[2]])\n edge2 = LineString([tri_coords[2], tri_coords[3]])\n \n minDist = 100000000000\n nearestEdge = edge0\n edges = [edge0, edge1, edge2]\n \n for edge in edges:\n currentDist = pt.distance(edge)\n if currentDist < minDist:\n minDist = currentDist\n nearestEdge = edge\n \n return nearestEdge", "def get_distance(self, point):\n if not isinstance(point, Point):\n point = Point(*point)\n\n distances = [(point.distance_to_point(p), p) for p in self.points]\n sortpoints = sorted(distances, key=lambda x: x[0])\n closest = sortpoints[0][1]\n\n vc = Vector(*closest)\n d1 = vc.dot(vc)\n\n secondc = sortpoints[1][1]\n vs = Vector(*secondc)\n v1 = Vector(*point) - (vc+vs)/2\n v2 = vs-vc\n v2.unitize()\n d2 = v1.dot(v2)\n\n return abs(min(d1, d2)) - self.thickness/2" ]
[ "0.6380749", "0.6323016", "0.6234743", "0.6233291", "0.62017536", "0.61731505", "0.610106", "0.605958", "0.6042641", "0.6024658", "0.601331", "0.59856397", "0.5979267", "0.59519076", "0.5893517", "0.588874", "0.58503014", "0.5850296", "0.58121365", "0.5811686", "0.57973415", "0.57890403", "0.5784737", "0.577825", "0.577563", "0.57303756", "0.5720712", "0.57040536", "0.56895465", "0.56517935" ]
0.673218
0
Get the background concentration value at point (x, y) from the template file.
def background_concentration(sheets, x, y, pollutant): i, j = coor2idx(x, y) # get bc from excel. The year is hard coded to 2015 f = sheets["Backgroundconc"] idx = f[f['XiYI'] == str(i) + "-" + str(j)].index if len(idx) == 0: print("BCError: No location found. 0 returned.") return 0 return float(f[pollutant+'_2015'][idx])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def min_background_concentration(self) -> _VectorisedFloat:\n return self.CO2_atmosphere_concentration", "def GetBackgroundValue(self) -> \"short\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterISS3ISS3_GetBackgroundValue(self)", "def GetBackgroundValue(self) -> \"short\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterISS2ISS2_GetBackgroundValue(self)", "def GetBackgroundValue(self) -> \"unsigned short\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUS3IUS3_GetBackgroundValue(self)", "def GetBackgroundValue(self) -> \"unsigned short\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUS2IUS2_GetBackgroundValue(self)", "def concentration(rFile, eFile, x, y, pollutant):\r\n sheets = pd.read_excel(eFile, sheet_name = None)\r\n if pollutant in ['NO2', 'PM10', 'PM25', 'EC']:\r\n c_traffic = traffic_concentration(rFile, sheets, x, y, pollutant)\r\n if c_traffic == 'e1':\r\n print(\"The calculation point is more than 60 meters far away from the street.\")\r\n return None\r\n c_background = background_concentration(sheets, x, y, pollutant)\r\n# print(c_traffic, c_background)\r\n return round(c_traffic + c_background, 1)\r\n else:\r\n print(\"Pollutant {} is not supported yet.\".format(pollutant))\r\n return None", "def getCl(filename):\n powSpec = pf.getdata(filename,1)\n temps = powSpec.field('TEMPERATURE')\n ell = np.arange(temps.size)\n return ell,temps", "def GetBackgroundValue(self) -> \"float\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF2IF2_GetBackgroundValue(self)", "def GetBackgroundValue(self) -> \"float\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF3IF3_GetBackgroundValue(self)", "def min_background_concentration(self) -> _VectorisedFloat:\n return 0.", "def get_bg(bg_reader):\n print('Type your current bg.')\n bg = bg_reader() # Type your current bg\n try:\n bg_as_integer = int(bg)\n return bg_as_integer\n except Exception as e:\n return math.nan", "def template_height(self, x, y):\n\n tx = float(x) / self.world_size\n tx = tx * self.global_template.size\n\n ty = float(y) / self.world_size\n ty = ty * self.global_template.size\n\n tx1 = int(tx)\n dx = tx - tx1\n tx2 = tx1 + 1\n\n ty1 = int(ty)\n dy = ty - ty1\n ty2 = ty1 + 1\n\n if tx2 > self.global_template.size-1:\n tx2 = tx1\n if ty2 > self.global_template.size-1:\n ty2 = ty1\n\n A = self.global_template[tx1, ty1]\n B = self.global_template[tx2, ty1]\n C = self.global_template[tx1, ty2]\n D = self.global_template[tx2, ty2]\n\n E = self.cosine_interpolate(A, B, dx)\n F = self.cosine_interpolate(C, D, dx)\n\n return self.cosine_interpolate(E, F, dy)\n #if G == 0:\n #return 0\n #if G > 0:\n #G = G ** (1/2.)\n #else:\n #G = -(abs(G) ** (1/2.))\n\n #G = int(round(G\n\n #return G", "def GetBackgroundValue(self) -> \"unsigned char\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC3IUC3_GetBackgroundValue(self)", "def GetBackgroundValue(self) -> \"unsigned char\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC2IUC2_GetBackgroundValue(self)", "def background(num_energies, num_samples):\n fixed_header = (\n 1*8 # SSID\n + 4*8 # SCET Coarse time\n + 2*8 # SCET Fine time\n + 2*8 # Integration time\n + 1 # Comp Schema background S\n + 3 # Comp Schema background K\n + 3 # Comp Schema background M\n + 1 # Comp Schema trigger S\n + 3 # Comp Schema trigger K\n + 3 # Comp Schema trigger M\n + 1 # Energy bin mask upper boundary\n + 4*8 # Energy bin mask lower boundary\n + 1. # Spare\n + 1*8 # Number of energies\n + num_energies*2*8 # Number data points\n + 2*8 # Number of data points\n )\n\n variable = (\n num_energies*num_samples*8 # Compressed background\n + num_samples*8 # Compressed triggers\n )\n\n return fixed_header, variable", "def GetBackgroundValue(self) -> \"short\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterISS3_GetBackgroundValue(self)", "def f2c_file_read_function():\n with open('data.txt', 'r') as infile:\n data = [i.strip().split() for i in infile] # store data as list\n\n F = float(data[-1][-1]) # last item in data should be value\n C = 5/9.0*F - 32\n print(\"The temperatire in Celcius is {:g}\".format(C))", "def GetBackgroundValue(self) -> \"short\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_GetBackgroundValue(self)", "def GetBackgroundValue(self) -> \"short\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_GetBackgroundValue(self)", "def ci(x, y, z):\n\n return (x * 16 + z) * CHUNK_HEIGHT + y", "def bg_ex(self, f):\n dat = sif.readSIF(os.path.join(self.path, f))[0][0]\n return dat - np.min(dat[:511, :511])", "def GetBackgroundValue(self) -> \"unsigned short\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUS3_GetBackgroundValue(self)", "def GetBackgroundValue(self) -> \"short\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterISS2_GetBackgroundValue(self)", "def GetBackgroundValue(self) -> \"unsigned short\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_GetBackgroundValue(self)", "def GetBackgroundValue(self) -> \"unsigned char\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUC3_GetBackgroundValue(self)", "def get_background(self):\n\n log(\"Getting background for {} at {}\".format(self._location, self._t0))\n suffix = 'products/radar_transparencies/IDR{}.background.png'\n url = self.get_url(suffix.format(self._radar_id))\n background = self.get_image(url)\n if background is None:\n return None\n for layer in ('topography', 'locations', 'range'):\n log(\"Getting {} for {} at {}\".format(layer, self._location, self._t0))\n suffix = 'products/radar_transparencies/IDR{}.{}.png'.format(\n self._radar_id,\n layer\n )\n url = self.get_url(suffix)\n image = self.get_image(url)\n if image is not None:\n background = self._pilimg.alpha_composite(background, image)\n return background", "def GetBackgroundValue(self) -> \"unsigned short\":\n return _itkBinaryGrindPeakImageFilterPython.itkBinaryGrindPeakImageFilterIUS2_GetBackgroundValue(self)", "def GetBackgroundValue(self) -> \"unsigned short\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_GetBackgroundValue(self)", "def GetBackgroundValue(self) -> \"float\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_GetBackgroundValue(self)", "def GetBackgroundValue(self):\n return _itkLabelShapeOpeningImageFilterPython.itkLabelShapeOpeningImageFilterIUS3_GetBackgroundValue(self)" ]
[ "0.5851373", "0.57091177", "0.569069", "0.56186575", "0.55539846", "0.5522965", "0.5509472", "0.5504668", "0.54553366", "0.5387881", "0.53565407", "0.5343399", "0.5340344", "0.5300976", "0.5293775", "0.5271966", "0.52227193", "0.5221222", "0.5219034", "0.52049077", "0.5195402", "0.5173573", "0.5164221", "0.51640123", "0.51161945", "0.5114105", "0.5110073", "0.50826204", "0.50591034", "0.5055137" ]
0.6470138
0
get wind speed at point (x, y) from excel.
def wind_speed(sheets, x, y): i, j = coor2idx(x, y) # get ws from excel. The year is hard coded to 2012 f = sheets["Meteo CAR-VL3.0"] idx = f[f['Search key'] == int(str(i) + str(j) + "2012")].index if len(idx) == 0: print("WSError: No location found.") return 0 return float(f['Windspeed'][idx])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def wind_speed(self):\n names = ['anc_mean_wind_speed']\n return self.sensor.get_with_fallback('wind_speed', names)", "def wind_speed(self):\r\n return self._yesterdays_weather.get_average_wind_speed()", "def get_windtspeed(self):\n return self.read_register(4111, 0, 3)", "def native_wind_speed(self) -> float:\r\n return self._first_timeserie[\"data\"][\"instant\"][\"details\"][\"wind_speed\"]", "def wind_speed(self):\n return self.flow_field.wind_speed", "def wind_speed(self):\r\n raise NotImplementedError", "def windspeed(self):\r\n try:\r\n return str(self.connect()['wind']['speed'])\r\n except:\r\n return '@weather_windspeed'", "def windSpeed(self, json, units):\n windSpeed = str(json['forecast']['simpleforecast']['forecastday'][0]['avewind'][units])\n return windSpeed", "def getWindSpeed():\r\n global pageText\r\n getWebPage()\r\n windSpeedPattern = \"Wind[^&]*&nbsp;([0-9]+)\\\\<\"\r\n windDirectionPattern = \"Wind[^\\\\>]*\\\\>[^\\\\>]*\\\\>[^\\\\>]*\\\\>[^\\\\>]*\\\\>[^\\\\>]*\\\\>([A-Za-z]+)(\\\\&nbsp;|\\\\<)\"\r\n windSpeed = float(re.search(windSpeedPattern, pageText).group(1))\r\n windDirection = re.search(windDirectionPattern, pageText).group(1)\r\n if windDirection == \"Calm\":\r\n windSpeed = 0.0\r\n windSpeed += random.choice(range(-4,5))/10. # !! vary it a little (for CS1 purposes; shouldn't really be here)\r\n if (windSpeed<0):\r\n windSpeed=0 # prevent the random variation from making windSpeed negative\r\n #print \"Wind Speed:\", windSpeed\r\n return windSpeed", "def getWindSpeed(self, *args):\n raise NotImplementedError(\n \"getWindSpeed method must be implemented by class {}\".format(\n type(self).__name__))", "def get_wind_speed(self, anemometer_type: Anemometer = Anemometer.black) -> float:\n wind_sensor = self.get_wind_sensor()\n\n if anemometer_type == Anemometer.black:\n if wind_sensor > 0:\n wind_speed = (wind_sensor * 0.84) + 3\n else:\n wind_speed = 0\n elif anemometer_type == Anemometer.gray:\n wind_speed = wind_sensor\n else:\n raise NotImplementedError(\"Unsupported anemometer type\")\n\n return round(wind_speed, 2)", "def get_wind_sensor(self) -> int:\n self.serial.write(b\"V!\")\n wind_sensor = self.__extract_int(self.__read_response(1)[0], b\"!w\")\n\n return wind_sensor", "def get_wind_values(self):\n return (\n int(self.data[2]), # dir\n float(self.data[3]) / 10, # gust\n float(self.data[4]) / 10, # avg\n float(self.data[5]) / 10, # chill\n )", "def wind_bearing(self) -> float:\r\n return self._first_timeserie[\"data\"][\"instant\"][\"details\"][\r\n \"wind_from_direction\"\r\n ]", "def adc_to_wind_speed(val):\n voltage_val = val / 65535 * 3.3\n return map_range(voltage_val, 0.4, 2, 0, 32.4)", "def extract_wind(source,la,lo,lats,lons,wd,ws):\r\n lat = source[la]\r\n lon = source[lo]\r\n wdir = []\r\n wspd = [] \r\n for coor in zip(lon,lat): \r\n in_lon = coor[0]\r\n in_lat = coor[1]\r\n # since lons are 0 thru 360, convert to -180 thru 180\r\n converted_lons = lons - ( lons.astype(np.int32) / 180) * 360\r\n # get cell of facility\r\n lat_idx = geo_idx(in_lat, lats)\r\n lon_idx = geo_idx(in_lon, converted_lons)\r\n #extract winddirection and wind speed from that cell\r\n d = wd[:,lat_idx,lon_idx][0]\r\n wdir.append(d)\r\n s = ws[:,lat_idx,lon_idx][0]\r\n wspd.append(s)\r\n \r\n return wdir,wspd", "def speed(self):\n self.convert_window(\"Speed\", \"meters/second\", [\"Mach number\", \"Nm/24hr\", \"centimeters/minute\", \"centimeters/second\", \"feet/hour\", \"feet/minute\", \"feet/second\", \"inches/minute\", \"inches/second\", \"kilometers/hour\", \"kilometers/second\", \"knots\", \"meters/hour\", \"meters/minute\", \"meters/second\", \"miles/hour\", \"miles/minute\", \"miles/second\", \"nautical miles/hour\", \"speed of light\", \"speed of sound\", \"yards/hour\", \"yards/minute\", \"yards/second\"])", "def get_speed(self):\r\n return self.__x_speed, self.__y_speed", "def get_speed(self):\n return self.get_par(\"slew_speed\")", "def native_wind_speed(self) -> float | None:\n return self._wind_speed", "def compute_windchill(temperature, windspeed):\n\n a = 35.74\n b = 0.6215\n c = 35.75\n d = 0.4275\n\n windspeed16 = windspeed ** 0.16\n wind_chill_index = a + (b * temperature) - (c * windspeed16) + (d * temperature * windspeed16)\n\n return wind_chill_index", "def wind(self):\n return self._wind", "def get_position(self):\r\n msg = struct.pack('>2B', 56, 00)\r\n response = self.query(msg)\r\n # Read and decode wavelength value (unknown units)\r\n encoded_wl = response[:2]\r\n wl = struct.unpack('>H', encoded_wl)[0]\r\n units, to_nm_multiplier = self.get_units()\r\n return wl * to_nm_multiplier", "def get_speed_x(self):\r\n return self.__X_speed", "def get_speed(self):\n return self.__corrds[self.X_SPEED], self.__corrds[self.Y_SPEED]", "def getMotorSpeed(self):\n cmd = 'E'\n vel = [-1,-1]\n out = self.getData(cmd)\n out = str(out, 'utf-8')\n if self.debug:\n print(out)\n if out[0] == 'e':\n isStart = False\n j = 0\n for i in range(len(out)):\n if isStart:\n if out[i] == ',':\n vel[j] = int(data)\n j = j + 1\n isStart = False\n else:\n data=data+out[i]\n if out[i] == ',':\n isStart = True\n data = ''\n vel[j] = int(data)\n return vel", "def get_speed_x(self):\n return self.__speed_x", "def calc_windrun(key, data, db_manager=None):\n if 'windSpeed' not in data or 'interval' not in data:\n raise weewx.CannotCalculate(key)\n\n if data['windSpeed'] is not None:\n if data['usUnits'] == weewx.US:\n val = data['windSpeed'] * data['interval'] / 60.0\n u = 'mile'\n elif data['usUnits'] == weewx.METRIC:\n val = data['windSpeed'] * data['interval'] / 60.0\n u = 'km'\n elif data['usUnits'] == weewx.METRICWX:\n val = data['windSpeed'] * data['interval'] * 60.0 / 1000.0\n u = 'km'\n else:\n raise weewx.ViolatedPrecondition(\"Unknown unit system %s\" % data['usUnits'])\n else:\n val = None\n u = 'mile'\n return weewx.units.convertStd((val, u, 'group_distance'), data['usUnits'])", "def dist_of_wind_speed(wspd, max_speed=30, max_y_value=None, return_data=False):\n freq_dist = dist(wspd, var_to_bin_against=None, bins=np.arange(-0.5, max_speed+1, 1), bin_labels=None,\n x_label='Wind Speed [m/s]', max_y_value=max_y_value, aggregation_method='%frequency',\n return_data=True)\n if return_data:\n return freq_dist[0], freq_dist[1]\n return freq_dist[0]", "def getWindAt(self, pos, wind):\r\n return wind[int(pos[0]), int(pos[1])]" ]
[ "0.659362", "0.6417548", "0.63081926", "0.62631917", "0.6230827", "0.61567974", "0.60879445", "0.60721374", "0.605982", "0.59834135", "0.5885346", "0.57687396", "0.5756765", "0.5713098", "0.57006425", "0.56810087", "0.5661978", "0.5649398", "0.563927", "0.55981606", "0.5597171", "0.5585403", "0.5580551", "0.55783", "0.5501174", "0.5462171", "0.5449701", "0.54436845", "0.54415447", "0.5374738" ]
0.7899671
0
Translate coordinates into indices (as being used in CAR VL3.0). In fact, I dont know in which CRS the coordinates are. I just followed what has been done in excel.
def coor2idx(x, y): a = round(x/4000,0)*4000 b = (round_down(y/4000,0)+0.5)*4000 i = int((a - 24000)/4000) + 1 j = int((b - 22000)/4000) + 1 return i, j
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def Indexes(self, latitudes, longitudes):\n res = self._transform.TransformPoints(\n np.column_stack((longitudes, latitudes)))\n res = list(zip(*res))\n x, y = np.array(res[0]), np.array(res[1])\n idx_col = self._inv_txf[0] + self._inv_txf[1] * x + self._inv_txf[2] * y\n idx_row = self._inv_txf[3] + self._inv_txf[4] * x + self._inv_txf[5] * y\n return idx_row.astype(int), idx_col.astype(int)", "def _indices_to_coords(c,r):\n\n column = _index_to_column(c)\n row = r + 1\n\n return {'c': column, 'r': row, 'coord': f'{column}{row}'}", "def mni2vxl(coord):\n i = int((90.0 - coord[0]) / 2)\n j = int((coord[1] + 126) / 2)\n k = int((coord[2] + 72) / 2)\n return (i, j, k)", "def grid_to_index(mapdata, x, y):\n i = (y * mapdata.info.width) + x\n return int (i)", "def XYToIdx(cls, pos):\n return cls.rev_pos_mapping[int(pos[0])][int(pos[1])]", "def xy_to_index(x, y):\n index = y * columns + x\n return index", "def to_indices(self, world_coords):\n (x, y, z) = world_coords\n (cx, cy, cz) = self.center\n\n px = x - cx\n py = y - cy\n pz = z - cz\n\n # project u and v\n # TODO: Can this be done with some built-in Numpy operation?\n (ux, uy, uz) = self.u_dir\n (vx, vy, vz) = self.v_dir\n u = px * ux + py * uy + pz * uz\n v = px * vx + py * vy + pz * vz\n\n # rescale to pixel coordinates. u is up, v is down\n (H, W, _) = self.shape\n i = rescale(-1, 1, H - 1, 0, v)\n j = rescale(-1, 1, 0, W -1, u)\n\n return (i, j)", "def toind(x, y):\n return x + 15 * y", "def convert_coordinates(coordinates):\r\n row = coordinates[1] - 1\r\n column = letters.index(coordinates[0])\r\n return column, row", "def ind2coord(self, index):\n\n # assert (index >= 0)\n # assert(index < self.n - 1)\n\n col = index // self.rows\n row = index % self.rows\n\n return [row, col]", "def index_coords(data, origin=None):\r\n ny, nx = data.shape[:2]\r\n if origin is None :\r\n origin_x, origin_y = nx // 2, ny // 2\r\n else :\r\n origin_x, origin_y = origin\r\n x, y = np.meshgrid(np.arange(nx), np.arange(ny))\r\n x -= origin_x\r\n y -= origin_y\r\n return x, y", "def _raster_index_to_coords(i, j, bounds = [[-100, -100], [100, 100]],\n dx = 1, dy = 1):\n x = (j+0.5)*dx + bounds[0][0]\n y = (i+0.5)*dy + bounds[0][1]\n return x, y", "def index_coords(data, origin=None):\n ny, nx = data.shape[:2]\n if origin is None:\n origin_x, origin_y = nx // 2, ny // 2\n else:\n origin_x, origin_y = origin\n x, y = np.meshgrid(np.arange(nx), np.arange(ny))\n x -= origin_x\n y -= origin_y\n return x, y", "def xy2ind(self, x, y):\n return self.sub2ind(*self.xy2sub(x, y))", "def ij_coordinates(self):\n\n x = np.arange(self.nx)\n y = np.arange(self.ny)\n return np.meshgrid(x, y)", "def i_coords(self):\n ref_x = np.arange(-self.ref_w / 2, self.ref_w / 2 + 0.002, 0.002)\n\n if self.ref_shape == 'c': # Curved reflector\n dist_coords1 = [(ref_x[i], pos_on_semicircle(ref_x[i], self.R, self.c_xy)) for i in range(self.I)]\n dist_coords2 = [(ref_x[i + 1], pos_on_semicircle(ref_x[i + 1], self.R, self.c_xy)) for i in range(self.I)]\n a_i = [distance(dist_coords1[i], dist_coords2[i]) for i in range(self.I)]\n\n cx_i = [ref_x[i] + (ref_x[i + 1] - ref_x[i]) / 2 for i in range(self.I)]\n cy_i = [pos_on_semicircle(x, self.R, self.c_xy) for x in cx_i]\n i_coords = list(zip(cx_i, cy_i))\n else: # Flat reflector\n a_i = [(ref_x[i + 1] - ref_x[i]) / 2 for i in range(self.I)]\n cx_i = [ref_x[i] + (ref_x[i + 1] - ref_x[i]) / 2 for i in range(self.I)]\n i_coords = [(x, self.h) for x in cx_i]\n d = {'ref_x': ref_x, 'A_i': a_i, 'I_coords': i_coords, 'cx_i': cx_i}\n\n return d", "def coord (i, j):\r\n return j, i", "def coord2ind(self, coord):\n\n [row, col] = coord\n\n assert (row < self.rows)\n assert (col < self.cols)\n\n return col * self.rows + row", "def _get_grid_cell_indexes(proj, xs, ys, bounding_box):\n # Unpack values from the projection\n eq_rad = proj.semi_major_axis\n polar_rad = proj.semi_minor_axis\n h = proj.perspective_point_height + eq_rad\n lon0 = proj.longitude_of_projection_origin\n \n # Unpack values from the area we want to grab the data\n min_lat, min_lon = bounding_box.sw_corner()\n max_lat, max_lon = bounding_box.ne_corner()\n \n with np.errstate(invalid='ignore'):\n # Calculate the lat and lon grids\n xs, ys = np.meshgrid(xs, ys)\n a_vals = np.power(np.sin(xs), 2.0) + \\\n np.power(np.cos(xs), 2.0) * (np.power(np.cos(ys), 2.0) + \\\n eq_rad * eq_rad / polar_rad / polar_rad * np.power(np.sin(ys), 2.0))\n b_vals = -2 * h * np.cos(xs) * np.cos(ys)\n c_val = h * h - eq_rad * eq_rad\n \n rs = (-b_vals - np.sqrt(np.power(b_vals, 2.0) - 4 * a_vals * c_val)) / (2 * a_vals)\n \n sx = rs * np.cos(xs) * np.cos(ys)\n sy = -rs * np.sin(xs)\n sz = rs * np.cos(xs) * np.sin(ys)\n \n lats = np.arctan((eq_rad *eq_rad * sz) \\\n / (polar_rad * polar_rad * np.sqrt(np.power(h - sx, 2.0) + np.power(sy, 2.0))))\n lats = np.degrees(lats)\n \n lons = np.radians(lon0) - np.arctan(sy / (h - sx))\n lons = np.degrees(lons)\n \n # Flatten the arrays so we get a 1D list of indexes\n lats = lats.flatten()\n lons = lons.flatten()\n \n # Filter out values not in our bounding box\n lats = np.where(np.logical_and(lats >= min_lat, lats <= max_lat))[0]\n lons = np.where(np.logical_and(lons >= min_lon, lons <= max_lon))[0]\n idxs = list(set(lons).intersection(set(lats)))\n \n return idxs", "def coordinates(self):", "def get_idx(velocity, coord):\n d = velocity.node_intervals\n dx=d[0]\n dz=d[1]\n dy=d[2]\n mn = velocity.min_coords\n mnx=mn[0]\n mnz=mn[1]\n mny=mn[2]\n ix = int((coord[0] - mnx)/dx)\n iz = int((coord[1] - mnz)/dz)\n iy = int((coord[2] - mny)/dy)\n return (ix, iz, iy)", "def n_coords(self):\n trans_x = np.arange(-self.trans_dia / 2, self.trans_dia / 2 + 0.002, 0.002)\n a_n = [(trans_x[n + 1] - trans_x[n]) / 2 for n in range(self.N)]\n cx_n = [trans_x[n] + (trans_x[n + 1] - trans_x[n]) / 2 for n in range(self.N)]\n coords = [(x, 0) for x in cx_n]\n d = {'trans_x': trans_x, 'A_n': a_n, 'N_coords': coords}\n return d", "def get_index(self, x, y):\n i = (y - self.y0) // self.dy\n j = (x - self.x0) // self.dx\n i = min(max(i, 0), self.n-1)\n j = min(max(j, 0), self.m-1)\n return [i, j]", "def xy_to_idx(self, xs, ys, mask=None, mask_outside=False, nodata=-1):\n _, ncol = self.shape\n r, c = self.rowcol(xs, ys, mask=mask, mask_outside=mask_outside, nodata=nodata)\n mask = r != nodata\n idx = np.full(r.shape, nodata, dtype=int)\n idx[mask] = r[mask] * ncol + c[mask]\n return idx", "def index_to_xy(index):\n x = index % columns\n y = index // columns\n return x, y", "def obs_ijpos(gridfile,lons,lats,coor):\n\n gfh= netCDF4.Dataset(gridfile)\n cartesian=0\n if (coor=='r'):\n try:\n \n latr=gfh.variables['lat_rho'][:,:]\n lonr=gfh.variables['lon_rho'][:,:]\n except:\n latr=gfh.variables['latitude'][:,:]\n lonr=gfh.variables['longitude'][:,:]\n \n\n try:\n xr=gfh.variables['xi_rho'][:]\n yr=gfh.variables['eta_rho'][:]\n except:\n try:\n xr=gfh.variables['x_rho'][:]\n yr=gfh.variables['y_rho'][:]\n except:\n print('Neither xi_rho/eta_rho or x_rho/y_rho on file.')\n print('This might slow down the calculations')\n\n\n elif (coor=='u'):\n latr=gfh.variables['lat_u'][:,:]\n lonr=gfh.variables['lon_u'][:,:]\n try:\n xr=gfh.variables['xi_u'][:]\n yr=gfh.variables['eta_u'][:]\n except:\n xr=gfh.variables['x_u'][:]\n yr=gfh.variables['y_u'][:]\n elif (coor=='v'):\n latr=gfh.variables['lat_v'][:,:]\n lonr=gfh.variables['lon_v'][:,:]\n try:\n xr=gfh.variables['xi_v'][:]\n yr=gfh.variables['eta_v'][:]\n except:\n xr=gfh.variables['x_v'][:]\n yr=gfh.variables['y_v'][:]\n\n IN = point_in_polygon(lonr, latr, lons, lats)\n ind=np.where(IN)[0]\n \n if lats.size >1: \n lons=lons[ind]; lats=lats[ind]\n # If there's no lons, lats left at this stage, return oipos, ojpos with -999 everywhere\n if not len(lons):\n return np.ones_like(IN)*-999, np.ones_like(IN)*-999\n \n try:\n try:\n mapstr=str(gfh.variables['h'].getncattr('mapping'))\n except:\n try:\n mapstr=str(gfh.variables['h'].getncattr('grid_mapping'))\n except:\n pass\n try:\n projstring=(gfh.variables[mapstr]).getncattr('proj4')\n except:\n try:\n projstring=(gfh.variables[mapstr]).getncattr('proj4string')\n except:\n pass\n try:\n projstring=(gfh.variables['grid_mapping']).getncattr('proj4')\n except:\n try:\n projstring=(gfh.variables['grid_mapping']).getncattr('proj4string')\n except:\n pass\n\n gridproj=proj.Proj(str(projstring))\n hasproj=1\n except:\n hasproj=0\n\n # Check if lat, lon spacing is uniform\n dx1=np.abs(lonr[0,1]-lonr[0,0])\n dx2=np.abs(lonr[0,-1]-lonr[0,-2])\n n=int(np.round(lonr.shape[1]/2))\n dx3=np.abs(lonr[0,n]-lonr[0,n-1])\n\n dy1=np.abs(latr[1,0]-latr[0,0])\n dy2=np.abs(latr[-1,0]-latr[-2,0])\n n=int(np.round(latr.shape[0]/2))\n dy3=np.abs(latr[n,0]-latr[n-1,0])\n\n if ( (dx1 == dx2) & (dx1==dx3) & (dx2==dx3) & (dy1 == dy2) & (dy1==dy3) & (dy2==dy3) ):\n cartesian=1\n gridproj=proj.Proj(\"+proj=latlong +datum=WGS84\")\n \n\n \n if hasproj:\n dx=xr[1]-xr[0]\n dy=yr[1]-yr[0]\n [x,y]=gridproj(lons,lats)\n ipos=(x-xr[0])/dx\n jpos=(y-yr[0])/dy\n\n elif cartesian:\n [x1,y1]=gridproj(lonr[0,0],latr[0,0])\n [x2,y2]=gridproj(lonr[0,1],latr[0,1])\n dx=x2-x1\n [x2,y2]=gridproj(lonr[1,0],latr[1,0])\n dy=y2-y1\n [x,y]=gridproj(lons,lats)\n [x0,y0]=gridproj(lonr[0,0],latr[0,0])\n\n ipos=(x-x0)/dx\n jpos=(y-y0)/dy\n\n else:\n x=np.linspace(0,lonr.shape[1]-1,lonr.shape[1])\n y=np.linspace(0,lonr.shape[0]-1,lonr.shape[0])\n xi=np.zeros_like(lonr); yi=np.zeros([lonr.shape[1],lonr.shape[0]])\n xi[:,:]=x; yi[:,:]=y; yi=np.swapaxes(yi,1,0)\n zi=scipy.interpolate.griddata((lonr.flatten(),latr.flatten()),xi.flatten(),(lons,lats))\n ipos=zi\n zi=scipy.interpolate.griddata((lonr.flatten(),latr.flatten()),yi.flatten(),(lons,lats))\n jpos=zi\n \n if 'ind' in locals():\n oipos=np.ones(IN.shape)*-999.; ojpos=np.ones(IN.shape)*-999.\n oipos[ind]=ipos; ojpos[ind]=jpos\n else:\n oipos=ipos\n ojpos=jpos\n if not IN:\n oipos = np.array([-999.])\n ojpos = np.array([-999.])\n gfh.close()\n return oipos,ojpos", "def footprint_corner_indices():", "def get_idx(lons, lats, lon, lat):\n dist = ((lons - lon) ** 2 + (lats - lat) ** 2) ** 0.5\n return np.unravel_index(dist.argmin(), dist.shape)", "def row_to_indices(row):\r\n return [(row, col) for col in range(0, 9)]", "def pose_to_index(self, pose):\n x = pose.position.x\n y = pose.position.y\n x = x - self.origin.position.x\n y = y - self.origin.position.y\n print(\"Y: \", y)\n height = self.resized_height * self.robot.size\n print(\"Height: \", height)\n print(\"Resized height: \", self.resized_height)\n print(\"Robot size: \", self.robot.size)\n y = height - y\n\n i = int(math.floor(y / self.robot.size))\n j = int(math.floor(x / self.robot.size))\n return (i, j)" ]
[ "0.7147023", "0.70279527", "0.69961184", "0.6830627", "0.67562884", "0.66991967", "0.6639779", "0.663841", "0.66000515", "0.65586907", "0.65067756", "0.65027595", "0.64892167", "0.6450959", "0.64251804", "0.642326", "0.63962543", "0.62964547", "0.62945944", "0.627512", "0.6259023", "0.6241981", "0.6214004", "0.6211312", "0.62031376", "0.61960703", "0.6146644", "0.6134005", "0.60782796", "0.60666126" ]
0.71679586
0
Round a number to n decimal places (same as 'rounddown' in excel).
def round_down(n, decimals=0): multiplier = 10 ** decimals return int(n * multiplier) / multiplier
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def round_to(n, precision):\n correction = 0.5 if n >= 0 else -0.5\n return int(n / precision + correction) * precision", "def float_round(num, n):\n num = float(num)\n num = round(num, n)\n return num", "def round_to_n(x, n=8):\n n = 1 + n - int(np.floor(np.log10(abs(x) + .1)))\n\n return round(x, n)", "def two_decimal_places(n):\n return float('{:.2f}'.format(n))", "def roundy(v, n):\n\n \"\"\"\n t = 10**n\n return math.floor(v * t + 0.5) / t\n \"\"\"\n\n return v", "def __round(num):\n return float(round(decimal.Decimal(num), DataGen.precision))", "def true_round(number: Decimal, nDigits: int = None):\n number = Decimal(number)\n\n if nDigits is None:\n nDigits = 0\n\n normal = number * 10**nDigits\n\n rest = normal - int(normal)\n\n if rest >= 0.5:\n return (normal - rest + 1) * 10**-nDigits\n else:\n return (normal - rest) * 10**-nDigits", "def decimalize(n):\n return '{:.2f}'.format(n)", "def _round_to_nearest_multiple_down(x, n=5):\n return n * math.floor(float(x) / n)", "def _round_to_nearest_multiple_up(x, n=5):\n return n * math.ceil(float(x) / n)", "def _pow_10_round(n, up=True):\n if up:\n return 10 ** math.ceil(math.log(n, 10))\n else:\n return 10 ** math.floor(math.log(n, 10))", "def round_up(number, decimals=0):\n multiplier = 10 ** decimals\n return math.ceil(number * multiplier) / multiplier", "def round(x):\n return int(x + copysign(0.5, x))", "def round(n, precision=DEFAULT_ERROR):\n n = parse_number(n)\n sign = -1 if n < 0 else 1\n\n base = int(n)\n if abs(n) + precision >= abs(base) + 1:\n return sign * (abs(base) + 1)\n elif abs(n) - precision <= abs(base):\n return base\n else:\n return n", "def my_round(x, base=10):\n return base * round(x / base)", "def roundUP(x):\n\treturn int(ceil(x / 10.0)) * 10", "def r(num, places=4):\n precision = '1.{0}'.format('0' * places)\n return Decimal(num or 0).quantize(Decimal(precision), rounding=ROUND_HALF_UP)", "def round_down(x):\n return int(math.floor(x / 10.0)) * 10", "def round_molden(num, p=6):\n # Digit at pth position after dot.\n p_digit = math.floor(abs(num) * 10 ** p) % 10\n # If the 6th digit after dot is greater than 5, but is not 7,\n # round the number upto 6th place.\n # Else truncate at 6th digit after dot.\n if p_digit > 5 and p_digit != 7:\n return round(num, p)\n if num >= 0:\n return math.floor(num * 10 ** p) / 10 ** p\n else:\n return math.ceil(num * 10 ** p) / 10 ** p", "def py3round(number):\n if abs(round(number) - number) == 0.5:\n return int(2.0 * round(number / 2.0))\n\n return int(round(number))", "def round_near(n, p):\n base = 10**p\n\n return math.ceil(round(base * n * 2) / 2) / base", "def float_round(x, prec=2, base=.05):\n return round(base * round(float(x) / base), prec)", "def round_half_up(number):\n return number.quantize(decimal.Decimal(\"0.01\"), rounding=decimal.ROUND_HALF_UP)", "def xround(x, ndigits):\n\n return round(x,ndigits) if (x is not None) else None", "def roundto(x, to=10.0):\n if to and not math.isnan(x):\n return int(round(x / to)) * to\n else:\n return x", "def round_counts(n):\n n = int(n)\n assert n == math.floor(n) # make sure it is an integer; shouldn't be needed with above\n assert n >= 0\n if 0 <= n < 15: return LESS_THAN_15\n if 15 <= n <= 99: return str(nearest( n, 10))\n if 100 <= n <= 999: return str(nearest( n, 50))\n if 1000 <= n <= 9999: return str(nearest( n, 100))\n if 10000 <= n <= 99999: return str(nearest( n, 500))\n if 100000 <= n <= 999999: return str(nearest( n, 1000))\n return round4_decimal(n)", "def rnd(n, nPlaces=3):\n return round(n * (10 ** nPlaces) + 0.5) / (10 ** nPlaces)", "def fround(val):\r\n if val==0:\r\n return \"0.0\"\r\n lval = math.log10(val)\r\n if lval < 0:\r\n lval -= 1\r\n rval = -int(lval) + 2\r\n if lval > 3:\r\n return str(int(round(val, rval)))\r\n return str(round(val, rval))", "def round_of_rating(number):\n return round(number * 2) / 2", "def round_down(number: float, decimals: int = 2):\n if number < 0:\n return round(number, 2)\n if not isinstance(decimals, int):\n raise TypeError(\"decimal places must be an integer\")\n elif decimals < 0:\n raise ValueError(\"decimal places has to be 0 or more\")\n elif decimals == 0:\n return math.floor(number)\n\n factor = 10 ** decimals\n return math.floor(number * factor) / factor" ]
[ "0.7621153", "0.7491323", "0.7403502", "0.7221512", "0.7186418", "0.71188676", "0.706864", "0.7044281", "0.6993371", "0.69815147", "0.69485044", "0.6904951", "0.6862087", "0.6807377", "0.6804079", "0.67433345", "0.67120963", "0.66838527", "0.6665037", "0.6659999", "0.66458374", "0.6618203", "0.66179", "0.66089565", "0.6597943", "0.6584928", "0.6578299", "0.65705055", "0.65147835", "0.6506087" ]
0.75088334
1
Small utility returning a record reader that can read gzip'ed files.
def _gzip_reader_fn(): return tf.TFRecordReader( options=tf.python_io.TFRecordOptions( compression_type=tf.python_io.TFRecordCompressionType.GZIP))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _gzip_reader_fn(filenames):\n return tf.data.TFRecordDataset(\n filenames,\n compression_type='GZIP')", "def _gzip_reader_fn(filenames):\n return tf.data.TFRecordDataset(\n filenames,\n compression_type='GZIP')", "def _gzip_reader_fn(filenames):\n return tf.data.TFRecordDataset(filenames, compression_type=\"GZIP\")", "def _gzip_reader_fn(filenames):\n return tf.data.TFRecordDataset(filenames, compression_type='GZIP')", "def _gzip_reader_fn(filenames):\n return tf.data.TFRecordDataset(\n filenames,\n compression_type='GZIP')", "def open_gz(filename, mode):\n return gzip.open(filename, mode)", "def gzopen(f):\n return gzip.open(f, 'rb') if f.endswith('.gz') else open(f, 'r')", "def parse(path):\n data = gzip.open(path, 'rb')\n for byte_line in data:\n yield eval(byte_line) # return generator instance to save memory", "def parse_fastq(filepath):\n if REGEX_GZIPPED.match(filepath):\n logging.debug('Opening \"%s\" as gzipped file', filepath)\n # using os.popen with zcat since it is much faster than gzip.open or gzip.open(io.BufferedReader)\n # http://aripollak.com/pythongzipbenchmarks/\n # assumes Linux os with zcat installed\n import os\n with os.popen('zcat < {}'.format(filepath)) as f:\n yield from _parse_fastq(f)\n else:\n with open(filepath, 'r') as f:\n yield from _parse_fastq(f)", "def open_gzip(fn):\n magic = b'\\x1f\\x8b\\x08'\n l = len(magic)\n with open(fn, 'rb') as f:\n file_start = f.read(l)\n f.seek(0)\n # check if the file is compressed\n if file_start.startswith(magic):\n return gzip.open(fn, 'rt')\n # not compressed\n return open(fn, 'rt')", "def _CreateTaskStorageMergeReader(self, path):\n return gzip_file.GZIPStorageMergeReader(self, path)", "def iter_records(self):\n\n decomp_type = 'gzip'\n block_size = 16384\n\n self.reader = DecompressingBufferedReader(self.fh,\n block_size=block_size)\n self.offset = self.fh.tell()\n\n next_line = None\n\n while True:\n try:\n record = self._next_record(next_line)\n yield record\n except EOFError:\n break\n\n self.read_to_end(record)\n\n # for non-compressed, consume blank lines here\n if not self.reader.decompressor:\n next_line = self._consume_blanklines()\n if next_line is None:\n # at end of file\n break\n\n # reset reader for next member\n else:\n self.reader.read_next_member()", "def bz2_file_reader(path):\n return bz2.open(path, 'rt')", "def get_stream_reader(fh, tmp_dir):\n magic_dict = {\n b\"\\x1f\\x8b\\x08\": _get_stream_readers_for_gzip,\n b\"\\x42\\x5a\\x68\": _get_stream_readers_for_bz2,\n b\"\\x50\\x4b\\x03\\x04\": _get_stream_readers_for_zip,\n }\n start_of_file = fh.read(CHUNK_SIZE)\n try:\n fh.seek(0)\n except UnsupportedOperation: # This happens if fh has been created by urlopen\n fh = _download_file(start_of_file, fh)\n try: # Check if file is tar file\n if tarfile.open(fileobj=StringIO(start_of_file)):\n return _get_stream_readers_for_tar(fh, tmp_dir)\n except tarfile.ReadError:\n pass\n for k, v in magic_dict.items():\n if start_of_file.startswith(k):\n return v(fh, tmp_dir)\n return [fh]", "def open_(filename, mode=None, compresslevel=9):\n if filename[-3:] == '.gz':\n if mode is None: mode = 'rt'\n return closing(gzip.open(filename, mode, compresslevel))\n else:\n if mode is None: mode = 'r'\n return open(filename, mode)", "def get_infile(filename):\r\n if filename.endswith(\".gz\"):\r\n fin = GzipFile(filename, \"rb\")\r\n else:\r\n fin = open(filename, \"U\")\r\n return fin", "def get_infile(filename):\r\n if filename.endswith(\".gz\"):\r\n fin = GzipFile(filename, \"rb\")\r\n else:\r\n fin = open(filename, \"U\")\r\n return fin", "def open_gzipped(infile, mode='rt'):\n import gzip\n import bz2\n if mode.startswith('r'):\n tmode = 'rt'\n bmode = 'r'\n elif mode.startswith('w'):\n tmode = 'wt'\n bmode = 'w'\n elif mode.startswith('a'):\n tmode = 'at'\n bmode = 'a'\n if hasattr(infile, 'write'):\n return infile\n if isinstance(infile, str):\n if infile.endswith('.gz'):\n return gzip.open(infile, tmode)\n if infile.endswith('.bz2'):\n if hasattr(bz2, 'open'):\n return bz2.open(infile, tmode)\n else:\n return bz2.BZ2File(infile, bmode)\n return open(infile, tmode)", "def _raw_record_reader(stream):\n while True:\n header = stream.read(4)\n if len(header) < 4:\n return\n size, rec_type = struct.unpack(\">HH\", header)\n rec_type = rec_type // 256\n yield (rec_type, header + stream.read(size - 4))", "def load(filename):\n file = gzip.GzipFile(filename, 'rb')\n buffer = \"\"\n while True:\n data = file.read()\n if data == \"\":\n break\n buffer += data\n object = pickle.loads(buffer)\n file.close()\n return object", "def load(filename):\n file = gzip.GzipFile(filename, 'rb')\n buffer = \"\"\n while True:\n data = file.read()\n if data == \"\":\n break\n buffer += data\n object = pickle.loads(buffer)\n file.close()\n return object", "def load(filename):\n file = gzip.GzipFile(filename, 'rb')\n buffer = \"\"\n while 1:\n data = file.read()\n if data == \"\":\n break\n buffer += data\n object = pickle.loads(buffer)\n file.close()\n return object", "def get_reader(self) -> ArchiveFileReader:\n return ArchiveFileReader(\n filename=self.datafile,\n serializer=self.serializer,\n compression=self.compression,\n decoder=self.decoder\n )", "def read_gzipped(response):\r\n if response.info().get('Content-Encoding') == 'gzip':\r\n with io.BytesIO(response.read()) as buf:\r\n with gzip.GzipFile(fileobj=buf) as unzipped:\r\n data = unzipped.read()\r\n else:\r\n data = response.read()\r\n return data", "def bz2_file_bytes_reader(path):\n return bz2.open(path, 'rb')", "def read(self, filename: str, as_memory_io: bool = False):\n data = ungzip(self.driver.read(self.join(filename)))\n\n if as_memory_io:\n data = to_in_memory_io(data)\n\n return data", "def _get_stream_readers_for_zip(fh, tmp_dir):\n fasta_zip = zipfile.ZipFile(fh, 'r')\n rval = []\n for member in fasta_zip.namelist():\n fasta_zip.extract(member, tmp_dir)\n rval.append(open(os.path.join(tmp_dir, member), 'rb'))\n return rval", "def return_file_handle(input_file):\n if str(input_file).endswith(\".gz\"):\n gzipped_file_handle = gzip.open(input_file, \"rt\")\n return gzipped_file_handle\n else:\n normal_fh = open(input_file, \"r\")\n return normal_fh", "def read_file(filename, allow_missing=True, zname=None):\n\n if file_is_missing(filename, allow_missing):\n pass\n elif zname is None:\n with open(filename) as f:\n for line in f:\n yield line\n else:\n with zipfile.ZipFile(filename) as z:\n with z.open(zname) as f:\n for line in f:\n yield line.decode('ascii')", "def reader_for_streaming(io):\n if not hasattr(io, 'read'):\n raise TypeError('{0} must be an opened file.'.format(io))\n if hasattr(io, 'encoding'):\n raise TypeError('{0} must be opened in binary mode'.format(io))\n return reader.Reader.read_headers(io)" ]
[ "0.7269404", "0.7269404", "0.7256386", "0.72512805", "0.7213792", "0.66633874", "0.6593823", "0.6543221", "0.65036976", "0.64691526", "0.64597785", "0.64412504", "0.63976043", "0.62297446", "0.61427087", "0.61095643", "0.61095643", "0.6041713", "0.60404605", "0.6019779", "0.59753215", "0.5974862", "0.59691733", "0.596365", "0.59388787", "0.5924695", "0.58954895", "0.58650815", "0.58258027", "0.5802303" ]
0.777774
0
Create a modified whirl plot for an arbitrary polygon. Traditionally whirl plots are limited to ngons, to plot for an arbtirary polygon we express the construction of the whirl as subsequent connections between points while solving the miceproblem.
def whirl_plot(polygons, iterations, step, fpath, **kwargs): for polygon in polygons: for _ in range(iterations): # pylint: disable=invalid-name xy = np.vstack([polygon, polygon[0]]) plt.plot(*np.hsplit(xy, 2), **kwargs) diff = polygon - np.roll(polygon, 1, axis=0) norm_velocity = diff / np.linalg.norm(diff, axis=1, keepdims=True) polygon -= step * norm_velocity plt.gca().set_aspect("equal") plt.gca().axis("off") canvas = FigureCanvasAgg(plt.gcf()) canvas.draw() stream, (width, height) = canvas.print_to_buffer() img = np.fromstring(stream, np.uint8).reshape((height, width, 4)) plt.imsave(fpath, _trim_border(img))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_simple_polygonisation(n_points=20):\n # generate random sample points.\n sample_points = np.random.random_sample((n_points,2))*10\n # generate simple polygon\n seq = simple_polygonisation(sample_points)\n # plot polygon\n plt.figure()\n plt.plot(seq[:,0], seq[:,1], color=\"blue\", marker=\"s\", alpha=0.5)", "def _plot_wires(ax, hot, gnd, v, **kw):\n #get x and y coordinates\n L = len(hot)\n x = np.array([c.x for c in hot + gnd])\n y = np.array([c.y for c in hot + gnd])\n #calculate the scaling factor\n scale = _fields_plots_xs_wireperc*max(np.absolute(v))/max(np.absolute(y))\n if('scale' in kw):\n if(kw['scale'] is False):\n scale = 1.0\n #plot\n if(hot):\n kw['H'].append(ax.plot(x[:L], scale*y[:L], 'ko')[0])\n kw['L'].append('Conductors')\n if(gnd):\n kw['H'].append(ax.plot(x[L:], scale*y[L:], 'o', color='gray')[0])\n kw['L'].append('Grounded Conductors')", "def optimal_polygon(y, w=0.5, debug=False):\n # Make sure that we use numpy array\n y = np.array(y)\n x = np.arange(len(y))\n\n # Initialization\n y = np.round(y, 6)\n p_plus = (x[0], y[0] + w)\n l_plus = (x[0], y[0] + w)\n r_plus = (x[1], y[1] + w)\n s_plus = {(x[0], y[0] + w): (x[1], y[1] + w)}\n t_plus = {(x[1], y[1] + w): (x[0], y[0] + w)}\n p_minus = (x[0], y[0] - w)\n l_minus = (x[0], y[0] - w)\n r_minus = (x[1], y[1] - w)\n s_minus = {(x[0], y[0] - w): (x[1], y[1] - w)}\n t_minus = {(x[1], y[1] - w): (x[0], y[0] - w)}\n q = []\n i = 2\n\n while i < len(y):\n # Updating CH_plus (convex hull) and CH_minus\n p = (x[i - 1], y[i - 1] + w)\n p_i_plus = (x[i], y[i] + w)\n while (p != p_plus) and _angle(p_i_plus, p, t_plus[p], '+') > np.pi:\n p = t_plus[p]\n s_plus[p] = p_i_plus\n t_plus[p_i_plus] = p\n\n p = (x[i - 1], y[i - 1] - w)\n p_i_minus = (x[i], y[i] - w)\n while (p != p_minus) and _angle(p_i_minus, p, t_minus[p], '-') > np.pi:\n p = t_minus[p]\n s_minus[p] = p_i_minus\n t_minus[p_i_minus] = p\n\n # Check if CH_plus and CH_minus intersect\n if _angle(p_i_plus, l_plus, r_minus, '+') < np.pi:\n q.append((_intersect(l_plus, r_minus, p_plus, p_minus), l_plus, r_minus, p_plus, p_minus))\n p_minus = r_minus\n p_plus = _intersect(l_plus, r_minus, (x[i - 1], y[i - 1] + w), p_i_plus)\n s_plus[p_plus] = p_i_plus\n t_plus[p_i_plus] = p_plus\n r_plus = p_i_plus\n r_minus = p_i_minus\n l_plus = p_plus\n l_minus = p_minus\n while _angle(l_minus, r_plus, s_minus[l_minus], '-') < np.pi:\n l_minus = s_minus[l_minus]\n elif _angle(p_i_minus, l_minus, r_plus, '-') < np.pi:\n q.append((_intersect(l_minus, r_plus, p_minus, p_plus), l_minus, r_plus, p_minus, p_plus))\n p_plus = r_plus\n p_minus = _intersect(l_minus, r_plus, (x[i - 1], y[i - 1] - w), p_i_minus)\n s_minus[p_minus] = p_i_minus\n t_minus[p_i_minus] = p_minus\n r_minus = p_i_minus\n r_plus = p_i_plus\n l_minus = p_minus\n l_plus = p_plus\n while _angle(l_plus, r_minus, s_plus[l_plus], '+') < np.pi:\n l_plus = s_plus[l_plus]\n else:\n # Updating the two seperating and supporting lines\n if _angle(p_i_plus, l_minus, r_plus, '+') < np.pi:\n r_plus = p_i_plus\n while _angle(p_i_plus, l_minus, s_minus[l_minus], '+') < np.pi:\n l_minus = s_minus[l_minus]\n\n if _angle(p_i_minus, l_plus, r_minus, '-') < np.pi:\n r_minus = p_i_minus\n while _angle(p_i_minus, l_plus, s_plus[l_plus], '-') < np.pi:\n l_plus = s_plus[l_plus]\n i += 1\n\n # Add last change point\n a = _intersect(l_plus, r_minus, p_plus, p_minus)\n b = _intersect(l_minus, r_plus, p_minus, p_plus)\n p = ((a[0] + b[0]) / 2, (a[1] + b[1]) / 2)\n q.append((p, r_minus, r_plus, p_minus, p_plus))\n\n end_a = _intersect(p, r_plus, p_i_minus, p_i_plus)\n end_b = _intersect(p, r_minus, p_i_minus, p_i_plus)\n end = ((end_a[0] + end_b[0]) / 2, (end_a[1] + end_b[1]) / 2)\n q.append((end, (None, None), (None, None), p_i_minus, p_i_plus))\n\n if debug:\n return np.array(q)\n else:\n return np.array([o[0] for o in q])", "def generatePolygons():", "def plot_polygon(polygon, size_points_distrib=50):\n # Get the points\n list_points = list(polygon.exterior.coords)\n distances = np.array(scipy.spatial.distance.euclidean([elt[0] for elt in list_points], [elt[1] for elt in list_points]))\n avg_dist = distances.mean()\n\n # Get the boundaries\n minx, miny, maxx, maxy = polygon.bounds\n box_points = box(minx, miny, maxx, maxy, ccw=True)\n\n fig, ax = plt.subplots(figsize=(10, 10))\n\n # Box\n plt.scatter(*zip(*list(box_points.exterior.coords)), color='black', linestyle=\"--\", alpha=0.2)\n plt.plot(*zip(*list(box_points.exterior.coords)), color='black', linestyle=\"--\", alpha=0.2)\n\n # Polygon\n plt.scatter(*zip(*list_points), color='blue')\n plt.plot(*zip(*list(list_points)), color='blue', linestyle=\"-.\", alpha=0.2)\n ax.set(xlim=[minx, maxx])\n ax.set(ylim=[miny, maxy])\n\n # Limits\n rdm_points = gen_rdm_points_square(polygon, size_points_distrib)\n # creates mask\n is_in_distrib = point_in_polygons(polygon, rdm_points)\n print(rdm_points[is_in_distrib])\n\n # Points in\n x_in, y_in = zip(*rdm_points[is_in_distrib])\n plt.scatter(x_in, y_in, color='green', alpha=0.2, marker=\"+\")\n\n # Points out\n x_out, y_out = zip(*rdm_points[~is_in_distrib])\n plt.scatter(x_out, y_out, color='red', alpha=0.2, marker=\"+\")\n\n plt.show()", "def plot_weights_for_getting_smooth_spectrum(wlm,\n s,\n running_wave,\n running_step_median,\n fit_median,\n fit_median_interpolated,\n weight_fit_median,\n wave_min,\n wave_max,\n exclude_wlm, \n show_plot=False):\n fig_size = 12\n fig, ax = plt.subplots(figsize=(fig_size, fig_size / 2.5))\n ax.plot(wlm, s, alpha=0.5)\n ax.plot(running_wave, running_step_median, \"+\", ms=15, mew=3)\n ax.plot(wlm, fit_median, label=\"fit median\")\n ax.plot(wlm, fit_median_interpolated, label=\"fit median_interp\")\n ax.plot(wlm, weight_fit_median * fit_median + (1 - weight_fit_median) * fit_median_interpolated, label=\"weighted\")\n\n extra_display = old_div((np.nanmax(fit_median) - np.nanmin(fit_median)), 10)\n ax.set_ylim(\n np.nanmin(fit_median) - extra_display, np.nanmax(fit_median) + extra_display\n )\n ax.set_xlim(wlm[0] - 10, wlm[-1] + 10)\n ax.tick_params(axis='both', which='minor')\n ax.legend(frameon=False, loc=1, ncol=1)\n\n ax.axvline(x=wave_min, color=\"k\", linestyle=\"--\")\n ax.axvline(x=wave_max, color=\"k\", linestyle=\"--\")\n\n ax.set_xlabel(r\"Wavelength [$\\AA$]\")\n\n if exclude_wlm[0][0] != 0:\n for i in range(len(exclude_wlm)):\n ax.axvspan(exclude_wlm[i][0], exclude_wlm[i][1], color=\"r\", alpha=0.1)\n\n if show_plot:\n plt.show()\n return fig", "def drawPoles(wn):\n wn.setworldcoordinates(-1, -5, 3, 20)\n t = turtle.Turtle()\n t.speed(0)\n t.pensize(3)\n t.up()\n t.goto(-.5, 0)\n t.down()\n t.goto(2.5, 0)\n t.up()\n for i in range(3):\n t.goto(i, 0)\n t.down()\n t.goto(i, 10)\n t.up()\n t.hideturtle()", "def plotSeismogram(d, rho, v, wavf, wavA=1., noise = 0., usingT=True, wavtyp='RICKER'):\n\n tseis, seis, twav, wav, tref, rseriesconv = syntheticSeismogram(d, rho, v, wavf, wavA, usingT,wavtyp)\n\n noise = noise*np.max(np.abs(seis))*np.random.randn(seis.size)\n filt = np.arange(1.,15.)\n filtr = filt[::-1]\n filt = np.append(filt,filtr[1:])*1./15.\n noise = np.convolve(noise,filt)\n noise = noise[0:seis.size]\n\n seis = seis + noise\n\n plt.figure(num=0, figsize = (8, 5))\n\n plt.subplot(131)\n plt.plot(wav,twav,linewidth=1,color='black')\n plt.title('Wavelet')\n plt.xlim((-2.,2.))\n plt.grid()\n plt.ylim((tseis.min()-tseis.mean(),tseis.max()-tseis.mean()))\n plt.gca().invert_yaxis()\n plt.setp(plt.xticks()[1],rotation='90',fontsize=9)\n plt.setp(plt.yticks()[1],fontsize=9)\n plt.gca().set_xlabel('Amplitude',fontsize=9)\n plt.gca().set_ylabel('Time (s)',fontsize=9)\n\n plt.subplot(132)\n plt.plot(np.zeros(tref.size),(tseis.max(),tseis.min()),linewidth=2,color='black')\n plt.hlines(tref,np.zeros(len(rseriesconv)),rseriesconv,linewidth=2) #,'marker','none'\n plt.title('Reflectivity')\n plt.grid()\n plt.ylim((0,tseis.max()))\n plt.gca().invert_yaxis()\n plt.xlim((-2.,2.))\n plt.setp(plt.xticks()[1],rotation='90',fontsize=9)\n plt.setp(plt.yticks()[1],fontsize=9)\n plt.gca().set_xlabel('Amplitude',fontsize=9)\n plt.gca().set_ylabel('Time (s)',fontsize=9)\n\n plt.subplot(133)\n plt.plot(seis,tseis,color='black',linewidth=1)\n plt.title('Seismogram')\n plt.grid()\n plt.ylim((tseis.min(),tseis.max()))\n plt.gca().invert_yaxis()\n plt.xlim((-0.95,0.95))\n plt.setp(plt.xticks()[1],rotation='90',fontsize=9)\n plt.setp(plt.yticks()[1],fontsize=9)\n plt.gca().set_xlabel('Amplitude',fontsize=9)\n plt.gca().set_ylabel('Time (s)',fontsize=9)\n\n plt.tight_layout()\n plt.show()", "def plotSeismogramV2(d, rho, v, wavf, wavA=1., noise = 0., usingT=True, wavtyp='RICKER'):\n\n dpth, rholog, vlog, zlog, rseries = getLogs(d, rho, v, usingT)\n tseis, seis, twav, wav, tref, rseriesconv = syntheticSeismogram(d, rho, v, wavf, wavA, usingT,wavtyp)\n\n noise = noise*np.max(np.abs(seis))*np.random.randn(seis.size)\n filt = np.arange(1.,21.)\n filtr = filt[::-1]\n filt = np.append(filt,filtr[1:])*1./21.\n noise = np.convolve(noise,filt)\n noise = noise[0:seis.size]\n\n xlimrho = (1.95,5.05)\n xlimv = (0.25,4.05)\n xlimz = (xlimrho[0]*xlimv[0], xlimrho[1]*xlimv[1])\n\n seis = seis + noise\n\n plt.figure(num=0, figsize = (8, 5))\n\n plt.subplot(141)\n plt.plot(wav,twav,linewidth=1,color='black')\n plt.title('Wavelet')\n plt.xlim((-1.,1.))\n plt.ylim((tseis.min()-tseis.mean(),tseis.max()-tseis.mean()))\n plt.grid()\n plt.gca().invert_yaxis()\n plt.setp(plt.xticks()[1],rotation='90',fontsize=9)\n plt.setp(plt.yticks()[1],fontsize=9)\n plt.gca().set_xlabel('Amplitude',fontsize=9)\n plt.gca().set_ylabel('Time (s)',fontsize=9)\n\n plt.subplot(142)\n plotLogFormat(rholog*10**-3,dpth,xlimrho,'blue')\n plt.title('$\\\\rho$')\n plt.xlabel('Density \\n $\\\\times 10^3$ (kg /m$^3$)',fontsize=9)\n plt.ylabel('Depth (m)',fontsize=9)\n\n plt.subplot(143)\n plotLogFormat(vlog*10**-3,dpth,xlimv,'red')\n plt.title('$v$')\n plt.xlabel('Velocity \\n $\\\\times 10^3$ (m/s)',fontsize=9)\n plt.ylabel('Depth (m)',fontsize=9)\n\n plt.subplot(144)\n plt.plot(seis,tseis,color='black',linewidth=1)\n plt.title('Seismogram')\n plt.grid()\n plt.ylim((tseis.min(),tseis.max()))\n plt.gca().invert_yaxis()\n plt.xlim((-1.,1.))\n plt.setp(plt.xticks()[1],rotation='90',fontsize=9)\n plt.setp(plt.yticks()[1],fontsize=9)\n plt.gca().set_xlabel('Amplitude',fontsize=9)\n plt.gca().set_ylabel('Time (s)',fontsize=9)\n\n plt.tight_layout()\n plt.show()", "def polygon(n,r):\n \n window = turtle.Screen()\n\n david = turtle.Turtle()\n david.pensize(2)\n\n a = float(360 / n) \t\t #this is the angle the turtle will turn each time\n l = 2 * (math.sin(math.radians(a / 2)) * r) #this is the length of the sides\n\n david.penup()\n david.speed(0)\n david.right(90)\n david.forward(r * math.cos(math.radians(a / 2)))\n david.right(90)\n david.forward(l / 2)\n david.left(180)\n david.pendown()\n david.speed(1/2)\n\n for x in range(n):\n david.forward(l)\n david.left(a)", "def plotWeights(w):\n w = w[:,:,0,:]\n # rescale w to 0.0 - 1.0\n mincode = np.amin(w)\n maxcode = np.amax(w)\n w = (w - mincode) / (maxcode - mincode)\n\n out = np.zeros((15, 15))\n for x in range(0,4):\n for y in range(0,4):\n c = x*4+y\n out[x*4:x*4+3, y*4:y*4+3] = w[:,:,c]\n return out", "def drawPolygon(self, Pol, colour=None):\n #plt.plot(Pol[:,0], Pol[:,1], 'bo-')\n\n x = Pol[:,0]#[k[0] for k in Pol]\n y = Pol[:,1]#[k[1] for k in Pol]\n if colour is None:\n self.ax.plot(x,y)\n else:\n self.ax.plot(x,y,colour)\n self.fig.canvas.draw()", "def __plot_convex_hull(self, ax=None) -> None:\n ax.plot(self.points[:, 0], self.points[:, 1], \"o\")\n for simplex in self.hull.simplices:\n ax.plot(self.points[simplex, 0], self.points[simplex, 1], \"k-\")", "def generate(pts):\n cmds.polyCreateFacet(name=\"shirt\", p=points)\n cmds.polyTriangulate()\n cmds.polySubdivideFacet(dv=SUBDIVISIONS)\n cmds.polyTriangulate()", "def __init__(self, model, polygon, segments = None, strength = 1,\r\n variables = [], priors=[], snap_distance = 1E-10,\r\n snap = False, influence = None):\r\n\r\n import numpy as np\r\n import copy\r\n import matplotlib.path\r\n import math\r\n \r\n # Append this element to the specified model\r\n self.model = model\r\n model.elementlist.append(self)\r\n \r\n # This element adds water, so it also requires an influence range\r\n if influence is None:\r\n self.influence = self.model.domain_radius*2\r\n else:\r\n self.influence = influence\r\n \r\n # Complexify the polygon, if it isn't already complex\r\n polygon = self.complexify(polygon)\r\n \r\n # Prepare the polygon variable\r\n self.polygon = polygon\r\n \r\n # Is the polygon closed? If not, close it temporarily\r\n self.snap_distance = snap_distance\r\n if np.abs(self.polygon[0]-self.polygon[-1]) > self.snap_distance:\r\n self.polygon = np.asarray(list(self.polygon)+[self.polygon[0]])\r\n \r\n # Also create an array with real coordinates\r\n self.polygon_XY = np.column_stack((\r\n np.real(copy.copy(self.polygon))[:,np.newaxis],\r\n np.imag(copy.copy(self.polygon))[:,np.newaxis] ))\r\n\r\n # Is the polygon counter-clockwise? If not, correct it\r\n if self.are_vertices_clockwise(self.polygon_XY):\r\n self.polygon = np.flip(self.polygon)\r\n self.polygon_XY = np.flipud(self.polygon_XY)\r\n \r\n # Do we wish to subdivide the polygon?\r\n # First, check if the user specified a desired segment count\r\n if segments is None:\r\n self.segments = self.polygon.shape[0]-1\r\n else:\r\n self.segments = segments\r\n \r\n if self.segments < self.polygon.shape[0]-1:\r\n raise Exception('Prescribed number of line segments '+str(self.segments)+\" mustn't be smaller than the number of vertices \"+str(polygon.shape[0]-1)+'.')\r\n \r\n # Subdivide the polygon, if desired\r\n if self.segments > self.polygon.shape[0]-1:\r\n self.polygon_XY = self.subdivide_line(self.polygon_XY,self.segments)\r\n self.polygon = self.polygon_XY[:,0] + 1j*self.polygon_XY[:,1]\r\n \r\n # This is a hack: We shrink the polygon by a small amount. This should ensure \r\n # that no issues arise from evaluating points directly on the boundary; \r\n # there might be other ways to solve this issue alternatively\r\n self.polygon_XY = self.shrink_polygon(\r\n polygon = self.polygon_XY,\r\n offset = 1E-10)\r\n self.polygon = self.polygon_XY[:,0] + 1j*self.polygon_XY[:,1]\r\n \r\n # Un-close the polygon again\r\n self.polygon_XY = self.polygon_XY[:-1,:]\r\n self.polygon = self.polygon[:-1]\r\n \r\n # If vertex snapping is enabled, snap all outside vertices onto the domain edge\r\n if snap:\r\n self.snap_to_domain()\r\n \r\n # =====================================================================\r\n # Now some area-sink-specific work\r\n # =====================================================================\r\n \r\n # Get the angles of all segments to the x axis\r\n # required for the local coordinates, Strack 1989, 37.19\r\n self.alpha = np.zeros(self.segments)\r\n for seg in range(self.segments):\r\n if seg == self.segments-1:\r\n nextseg = 0\r\n else:\r\n nextseg = seg+1\r\n \r\n # Get the side vector, then normalize it \r\n temp = self.polygon[nextseg]-self.polygon[seg]\r\n temp /= np.abs(temp)\r\n \r\n self.alpha[seg] = math.asin(np.imag(temp))\r\n \r\n \r\n # Get the central point of the polygon\r\n self.zc = np.mean(self.polygon)\r\n \r\n # Calculate the area of the polygon with the shoelace formula:\r\n self.A = self.get_polygon_area()\r\n \r\n # Calculate the coefficients c0, c1, c2 for all segments\r\n self.L = np.zeros(self.segments)\r\n for seg in range(self.segments):\r\n \r\n if seg == self.segments-1:\r\n nextseg = 0\r\n else:\r\n nextseg = seg+1\r\n \r\n # Save the length of the segment\r\n self.L[seg] = np.abs(self.polygon[nextseg]-self.polygon[seg])\r\n \r\n # Get strength parameters for each vertex\r\n self.strength = strength\r\n \r\n # Extract target variables\r\n self.variables = variables\r\n self.priors = priors\r\n \r\n # # Prepare the matrix block containing the effect of this element onto \r\n # # itself for future use in solving the linear system. The matrix requires\r\n # # subtraction of the A_star variable from its diagonal entries for completion\r\n # self.block = self.matrix_contribution()\r\n \r\n # Check if the prior matches the number of parameters\r\n if len(self.priors) != len(self.variables):\r\n raise Exception('Number of priors must match number of unknown variables. Number of priors: '+str(self.priors)+' / Number of unknown variables: '+str(len(self.variables)))\r\n \r\n if len(self.variables) > 0:\r\n # There are some model variables specified\r\n for idx,var in enumerate(self.variables):\r\n self.model.num_params += 1\r\n exec(\"self.model.params += [self.%s]\" % var)\r\n self.model.priors += [self.priors[idx]]\r\n self.model.variables += [var]\r\n if 'name' in list(self.priors[idx].keys()):\r\n self.model.param_names += [self.priors[idx]['name']] \r\n else: \r\n self.model.param_names += ['unknown']", "def decorate_scene():\n make_polygon( (100,100),(120,140),(270,70) )\n make_polygon( (300,10), (300,550), (340,452),(380,300), (330,50))\n make_polygon( (200,450), (100,450), (100,500), (200,500) )\n make_polygon( (130,320), (150,300), (140,280) )\n return", "def generatePolygons(self, *args, **kwargs): \n return 'var PloneMapPolygons = [' + \\\n ''.join([\"{ 'id': '%s', 'path' : %s,'title':'%s'},\" % (object.id, object.polygon, object.Title()) \n for object in self.context.objectValues() \n if hasattr(object, 'polygon') and len(object.polygon) > 0 ])[:-1] \\\n + '];'", "def squarepot(VV = np.array([0,1,0]), xx = np.array([-2, -1, 1, 2]), neigs = 40):\n\n elt = square_well(xx, VV)\n\n f, axarr = plt.subplots(2)\n plot_potential1( VV, xx, axarr[0] )\n\n l = checked_resonances(elt, neigs)\n\n # #TODO: Fix plot point sizes\n l_full = checked_resonances(elt)\n axarr[1].scatter(l_full.real, l_full.imag)\n axarr[1].set_title('Pole locations')\n # plt.axis('equal')\n plt.show()\n\n return l", "def regular_polygon(self, n, field = QQ):\n npi = 3.14159265359\n verts = []\n for i in range(n):\n t = 2*npi*i/n\n verts.append([sin(t),cos(t)])\n verts = [[field(RDF(x)) for x in y] for y in verts]\n return Polyhedron(vertices = verts, field = field)", "def wadic_slopes(self,terms=None):\n\t\tNP = [(a,self.multiplicity(a)) for a in range(self.num_coefs)]\n\t\tif terms==None:\n\t\t\treturn NewtonPolygon(NP).slopes()\n\t\telse:\n\t\t\treturn NewtonPolygon(NP).slopes()[0:terms]", "def fit_polynomial(binary_warped, plot = False):\n image_shape = binary_warped.shape\n left_curverad = left_line.radius_of_curvature\n right_curverad = right_line.radius_of_curvature\n # Find our lane pixels first\n if (not left_line.detected and left_line.frames_not_detected > \\\n reset_search) or len(left_line.last_n_fits) == 0:\n leftx, lefty, rightx, righty = find_lane_pixels(binary_warped)\n else:\n leftx, lefty, rightx, righty = \\\n polyfit_using_prev(binary_warped, left_line.last_n_fits[-1], right_line.last_n_fits[-1])\n # leftx, lefty, rightx, righty = find_lane_pixels(binary_warped)\n \n # Generate x and y values for plotting\n ploty = np.linspace(0, image_shape[0]-1, image_shape[0])\n if len(lefty) !=0 and len(righty) != 0:\n left_fit = np.polyfit(lefty, leftx, 2)\n right_fit = np.polyfit(righty, rightx, 2)\n left_curverad, right_curverad = measure_curvature_real(left_fit, right_fit, image_shape[0]-1)\n left_line.radius_of_curvature = left_curverad\n right_line.radius_of_curvature = right_curverad\n \n left_fitx = eval_at_y(left_fit, ploty)\n right_fitx = eval_at_y(right_fit, ploty)\n if not sanity_check(left_fitx, right_fitx):\n left_line.detected = False\n right_line.detected = False\n else:\n left_line.detected = True\n right_line.detected = True\n else:\n left_fit =[]\n right_fit = []\n left_line.detected = False\n right_line.detected = False\n \n \n if not plot:\n left_fit = left_line.calculate_fit(left_fit)\n right_fit = right_line.calculate_fit(right_fit)\n \n \n offset = 0\n # calculate offset from center of lane\n if len(left_fit) != 0:\n left_fitx = eval_at_y(left_fit, ploty)\n right_fitx = eval_at_y(right_fit, ploty)\n\n left_bottom_x = left_fitx[-1]\n right_bottom_x = right_fitx[-1]\n # offset calculation\n lane_midpoint = (left_bottom_x + right_bottom_x)/2.0\n camera_midpoint = (image_shape[1]-1)/2.0\n offset = (camera_midpoint - lane_midpoint)*xm_per_pix\n \n if plot:\n ## Visualization ##\n # Colors in the left and right lane regions\n # Create an output image to draw on and visualize the result\n # Create an output image to draw on and visualize the result\n out_img = np.dstack((binary_warped, binary_warped, binary_warped))\n plt.figure(3)\n if not left_line.detected:\n print('Sanity check failed, displaying rejected lines')\n out_img[lefty, leftx] = [255, 0, 0]\n out_img[righty, rightx] = [0, 0, 255]\n\n # Plots the left and right polynomials on the lane lines\n plt.plot(left_fitx, ploty, color='yellow')\n plt.plot(right_fitx, ploty, color='yellow')\n # plt.xlim(0, image_shape[1])\n # plt.ylim(image_shape[0], 0)\n plt.imshow(out_img)\n plt.title(\"Lane lines identified\")\n return left_fitx, right_fitx, ploty, left_curverad, right_curverad, offset", "def plotgwsrc(gwb):\n theta, phi, omega, polarization = gwb.gw_dist()\n\n rho = phi - N.pi\n eta = 0.5 * N.pi - theta\n\n # I don't know how to get rid of the RuntimeWarning -- RvH, Oct 10, 2014:\n # /Users/vhaaster/env/dev/lib/python2.7/site-packages/matplotlib/projections/geo.py:485:\n # RuntimeWarning: invalid value encountered in arcsin theta = np.arcsin(y / np.sqrt(2))\n # old_settings = N.seterr(invalid='ignore')\n\n P.title(\"GWB source population\")\n _ = P.axes(projection=\"mollweide\")\n\n foo = P.scatter(rho, eta, marker=\".\", s=1)\n # bar = N.seterr(**old_settings)\n\n return foo", "def flatNoisePellicle():\n #Get data\n wdir = '/home/rallured/Dropbox/AXRO/Metrology/' \\\n 'NoiseStudy/SolarBwPellicle/'\n d1,dx1 = met.read4DFits(wdir+'161209_Avg8_Meas1.fits')\n d2,dx2 = met.read4DFits(wdir+'161209_Avg8_Meas2.fits')\n d3,dx3 = met.read4DFits(wdir+'161209_Avg8_Meas3.fits')\n d4,dx4 = met.read4DFits(wdir+'161209_Avg8_Meas4.fits')\n\n #Construct power spectra\n f12,pow12 = fourier.meanPSD((d1-d2)[:,100:-100],\\\n win=np.hanning,dx=dx1,irregular=True)\n f23,pow23 = fourier.meanPSD((d2-d3)[:,100:-100],\\\n win=np.hanning,dx=dx1,irregular=True)\n f34,pow34 = fourier.meanPSD((d3-d4)[:,100:-100],\\\n win=np.hanning,dx=dx1,irregular=True)\n f14,pow14 = fourier.meanPSD((d1-d4)[:,100:-100],\\\n win=np.hanning,dx=dx1,irregular=True)\n\n #Mid frequency\n midfreq = [1000*np.sqrt(np.sum(p[np.logical_and(f>.1,f<1.)])) \\\n for f,p in zip([f12,f23,f34,f14],[pow12,pow23,pow34,pow14])]\n\n #Plot\n plt.loglog(f12,pow12/f12[0],label='1-2: %.2f' % midfreq[0])\n plt.loglog(f23,pow23/f23[0],label='2-3: %.2f' % midfreq[1])\n plt.loglog(f34,pow34/f34[0],label='3-4: %.2f' % midfreq[2])\n plt.loglog(f14,pow14/f14[0],label='1-4: %.2f' % midfreq[3])\n plt.legend(loc='lower left')\n plt.grid()\n plt.title('4D Repeatability: SolarB Flat+Pellicle')\n plt.xlabel('Frequency (1/mm)')\n plt.ylabel('Power ($\\mu$m$^2$ mm)')\n\n print midfreq\n\n return f12,pow12", "def generate_worley_noise(width, height, npoints, option, noise_background):\n\n points = [(random.randint(0, width), random.randint(0, height)) for _ in range(npoints)]\n image_worley = np.full((height, width), fill_value=noise_background, dtype=np.float64)\n\n for y in nb.prange(height):\n for x in nb.prange(width):\n distances = [np.sqrt((p[0] - x) ** 2 + (p[1] - y) ** 2) for p in points]\n image_worley[y, x] = sorted(distances)[option]\n return image_worley", "def plot_shp(in_polys=None,in_shp=None, ax=None,\r\n extent=None,radius=500., cmap='Dark2',\r\n edgecolor='scaled', facecolor='scaled',\r\n a=None, masked_values=None,\r\n **kwargs):\r\n import matplotlib.pyplot as plt\r\n \r\n if 'vmin' in kwargs:\r\n vmin = kwargs.pop('vmin')\r\n else:\r\n vmin = None\r\n\r\n if 'vmax' in kwargs:\r\n vmax = kwargs.pop('vmax')\r\n else:\r\n vmax = None\r\n\r\n if ax is None:\r\n ax = plt.gca()\r\n cm = plt.get_cmap(cmap)\r\n pc,bpc = shp_to_patchcollection(in_polys=in_polys,in_shp=in_shp,radius=radius)\r\n pc.set(**kwargs)\r\n if a is None:\r\n nshp = len(pc.get_paths())\r\n cccol = cm(1. * np.arange(nshp) / nshp)\r\n if facecolor == 'scaled':\r\n pc.set_facecolor(cccol)\r\n else:\r\n pc.set_facecolor(facecolor)\r\n if edgecolor == 'scaled':\r\n pc.set_edgecolor(cccol)\r\n else:\r\n pc.set_edgecolor(edgecolor)\r\n else:\r\n pc.set_cmap(cm)\r\n if masked_values is not None:\r\n for mval in masked_values:\r\n a = np.ma.masked_equal(a, mval)\r\n if edgecolor == 'scaled':\r\n pc.set_edgecolor('none')\r\n else:\r\n pc.set_edgecolor(edgecolor)\r\n pc.set_array(a)\r\n pc.set_clim(vmin=vmin, vmax=vmax)\r\n # add the patch collection to the axis\r\n ax.add_collection(pc)\r\n \r\n # overlap polygons with white/blank polygons of interior holes\r\n if bpc is not None:\r\n bpc.set_edgecolor('none')\r\n bpc.set_facecolor('w')\r\n ax.add_collection(bpc)\r\n \r\n if (extent is not None):\r\n ax.axis(extent)\r\n else:\r\n ax.axis(poly_bound_to_extent(in_polys))\r\n plt.show()\r\n return ax,pc", "def create_spectral_bandpass_interpol(interpol_wavelen, interpol_rad, center_wvl,\n save_dir):\n\n save_dir = os.path.join(save_dir, r'look_up_table')\n if not os.path.exists(save_dir):\n os.makedirs(save_dir)\n\n\n center_wvl1 = np.arange(min(center_wvl), max(center_wvl), 2)\n\n\n\n\n for j in np.arange(0, interpol_wavelen.shape[1]):\n #print(j)\n dframe = pd.DataFrame()\n wavelen = interpol_wavelen[:, j]\n\n radiance = interpol_rad[:, j]\n sampled_wvl = np.arange(min(wavelen), max(wavelen), 0.01)\n fit_params = interp1d(wavelen, radiance, kind='slinear')\n fitted_val = fit_params(sampled_wvl)\n #peak_val = np.where(fitted_val==max(fitted_val))[0]\n #print(peak_val)\n #peak_shift = sampled_wvl[peak_val] - CW1[j]\n\n\n# if peak_shift >0:\n# sampled_wvl = sampled_wvl - peak_shift\n# elif peak_shift <0:\n# sampled_wvl = sampled_wvl + peak_shift\n# else:\n# sampled_wvl = sampled_wvl\n#\n# print(sampled_wvl[peak_val] - CW1[j])\n\n dframe['Wavelength'] = sampled_wvl\n dframe['Radiance'] = fitted_val\n dframe.round(4).to_csv(save_dir + '/' + 'bandpass_' + \\\n str(round(center_wvl1[j], 2))+'_nm.csv')\n plt.plot(sampled_wvl, fitted_val/np.max(fitted_val), 'g.--')\n plt.grid(True, linestyle=':')\n plt.xlabel('Wavelength (nm)')\n plt.ylabel('Normalized Spectral Response')\n plt.title('TEMPO Spectral Bandpass (WL = ' + str(round(center_wvl1[j], 2)) + ' nm)')\n plt.ylim(0, 1.1)\n plt.xlim(min(wavelen), max(wavelen))\n #plt.show()\n\n # Now let us save the spectral bandpass data and spectral bandpass plot\n plt.savefig(save_dir + '/' + 'bandpass_' + str(round(center_wvl1[j], 2))+'_nm.png',\n dpi=100)\n plt.close('all')", "def triangular_wave_plot(ax=None):\n import numpy as np\n import matplotlib.pyplot as plt\n\n if ax is None:\n fig, ax = plt.subplots(1, 1, figsize=(9, 3))\n t = np.array([-3, -2, -1, 0, 1, 2, 3])*np.pi\n x = np.array([0, 1, 0, 1, 0, 1, 0])\n ax.plot(t, x, linewidth=3, label=r'Square wave')\n ax.spines['bottom'].set_position('zero')\n ax.spines['top'].set_color('none')\n ax.spines['left'].set_position('zero')\n ax.spines['right'].set_color('none')\n ax.xaxis.set_ticks_position('bottom')\n ax.yaxis.set_ticks_position('left')\n ax.tick_params(axis='both', direction='inout', which='both', length=5)\n ax.set_xlim((-3*np.pi, 3*np.pi))\n ax.set_ylim((-0.1, 1.1))\n ax.set_xticks(np.linspace(-3*np.pi-0.1, 3*np.pi+0.1, 7))\n ax.set_xticklabels(['$-3\\pi$', '$-2\\pi$', '$-\\pi$', '$0$', '$\\pi$', '$2\\pi$', '$3\\pi$'],\n fontsize=16)\n plt.locator_params(axis='y', nbins=3)\n ax.annotate(r'$t$', xy=(3*np.pi, 0.1), xycoords = 'data', xytext=(0, 0),\n textcoords = 'offset points', size=18, color='k')\n ax.annotate(r'$x[t]$', xy=(.1, 1.03), xycoords = 'data', xytext=(0, 0),\n textcoords = 'offset points', size=18, color='k')\n ax.grid()\n fig.tight_layout()\n\n return ax", "def produce_polygon(polygon_ordered_coordinates: List, zoom: int, plot_polygon: bool = False) -> Path:\n polygon_tile_points = []\n for item in polygon_ordered_coordinates:\n polygon_tile_points += [Utility.get_tile(*item, zoom)]\n polygon_tile_points += [polygon_tile_points[0]]\n polygon = Path(polygon_tile_points)\n if plot_polygon:\n fig = plt.figure()\n ax = fig.add_subplot(111)\n patch = patches.PathPatch(polygon, facecolor='orange', lw=2)\n ax.add_patch(patch)\n ax.set_xlim(min(polygon_tile_points, key = lambda item: item[0])[0], max(polygon_tile_points, key = lambda item: item[0])[0])\n ax.set_ylim(min(polygon_tile_points, key = lambda item: item[1])[1], max(polygon_tile_points, key = lambda item: item[1])[1])\n plt.show()\n return polygon", "def wdraw_polygon(self, wcoords, fill, outline):\r\n dpoints = []\r\n for i in range(0, len(wcoords), 2):\r\n dpoints += self.w_to_d(wcoords[i], wcoords[i+1])\r\n self.canvas.create_polygon(dpoints, fill=fill, outline=outline)", "def wolf_visualize_policy(policy: List[float], player: int):\n src_point = np.asarray(\n [[policy[idx][0], policy[idx][1]] for idx in range(len(policy) - 1)]\n )\n dst_point = np.asarray(\n [[policy[idx + 1][0], policy[idx + 1][1]] for idx in range(len(policy) - 1)]\n )\n\n for src, dst in zip(src_point, dst_point):\n plt.plot([src[0]], [src[1]], marker=\"o\", markersize=3, color=\"red\")\n plt.plot([src[0], dst[0]], [src[1], dst[1]], \"k-\")\n\n plt.plot(\n [dst_point[-1][0]], [dst_point[-1][1]], marker=\"o\", markersize=3, color=\"red\"\n )\n\n plt.xlim((0, 1))\n plt.ylim((0, 1))\n\n plt.xlabel(\"Pr(Rock)\")\n plt.ylabel(\"Pr(Paper)\")\n if player == 1:\n plt.title(\"RPS 1st player policy visualisation\")\n elif player == 2:\n plt.title(\"RPS 2nd player policy visualisation\")\n plt.savefig(f\"q2_wolf_agent{player}_pi.pdf\", format=\"pdf\")\n\n plt.show()" ]
[ "0.57888085", "0.57761914", "0.57043", "0.55698246", "0.55198437", "0.55105144", "0.54731727", "0.54656243", "0.5344655", "0.53189474", "0.5304088", "0.52775407", "0.52313", "0.5193505", "0.5174574", "0.5149485", "0.5147914", "0.51264966", "0.51240855", "0.5112683", "0.50915915", "0.5069218", "0.50590533", "0.5056148", "0.50556946", "0.5047582", "0.50373214", "0.50371784", "0.5027873", "0.50237507" ]
0.65695775
0
Trims white space border of a numpy image.
def _trim_border(img): for i in range(img.shape[0]): if np.any(img[i, :, :] != 255): img = img[i:, :, :] break for i in range(img.shape[0] - 1, 0, -1): if np.any(img[i, :, :] != 255): img = img[: i + 1, :, :] break for i in range(img.shape[1]): if np.any(img[:, i, :] != 255): img = img[:, i:, :] break for i in range(img.shape[1] - 1, 0, -1): if np.any(img[:, i, :] != 255): img = img[:, : i + 1, :] break return img
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def reduce_whitespace(self, border: int = 5) -> None:\n if self.img is None:\n raise FileExistsError(\"Load an image first with from_url.\")\n\n pix = np.asarray(self.img)\n\n pix = pix[:, :, 0:3] # Drop the alpha channel\n idx = np.where(pix - 255)[0:2] # Drop the color when finding edges\n bbox = list(map(min, idx))[::-1] + list(map(max, idx))[::-1]\n larger_box = add_whitespace(bbox, border)\n\n self.img = self.img.crop(larger_box)", "def remove_border(src): #---- remove blank border\r\n rows = src.shape[0]; VMIN= 0; VMAX= rows; \r\n cols = src.shape[0]; UMIN= 0; UMAX= cols;\r\n for ky in range(1,rows):\r\n sum0 = np.sum(src[ky,:,:]);\r\n sum1 = np.sum(src[rows-ky-1,:,:]);\r\n if sum0== 0 and VMIN== ky-1: VMIN= ky;\r\n if sum1== 0 and VMAX== rows-ky+1: VMAX= rows-ky;\r\n for kx in range(1,cols):\r\n sum0 = np.sum(src[:,kx,:]);\r\n sum1 = np.sum(src[:,cols-kx-1,:]);\r\n if sum0== 0 and UMIN== kx-1: UMIN= kx;\r\n if sum1== 0 and UMAX== cols-kx+1: UMAX= cols-kx;\r\n #--- --- \r\n DV = np.minimum(VMIN, rows-VMAX);\r\n DU = np.minimum(UMIN, cols-UMAX);\r\n return src[DV:(rows-DV), DU:(cols-DU), :];", "def trim(im):\n \n bg = Image.new(im.mode, im.size, im.getpixel((0,0)))\n diff = ImageChops.difference(im, bg)\n diff = ImageChops.add(diff, diff, 2.0, -100)\n bbox = diff.getbbox()\n if bbox:\n return im.crop(bbox)", "def _trim_margins(self, img):\n oldsize = (0, 0)\n while oldsize != img.shape: # while the size is changing\n oldsize = img.shape\n for i in range(4): # 4 times\n img = num.rot90(img) # rotate 90\n if num.std(img[0, :]) < self.trim_std: # if low std\n img = img[1:, :] # trim edge\n\n return img", "def cut_array_border(array): \n array[:, [0, array.shape[1]-1]]=0\n array[[0, array.shape[0]-1], :]=0\n \n \n return array", "def trim_image(image):\n bbox = image.getbbox()\n return image.crop(bbox)", "def trim(self):\n result = library.MagickTrimImage(self.wand)\n if not result:\n self.raise_exception()", "def trim_floating_solid(im):\n holes = find_disconnected_voxels(~im)\n im[holes] = True\n return im", "def remove_dark_background(self, image_array):\n\n cut_off = self.get_image_balance(image_array, False)\n if cut_off < 200:\n cut_off = 200\n new_array = image_array.copy()\n new_array.setflags(write=1)\n for row_number, each_row in enumerate(new_array):\n for pixel_number, each_pixel in enumerate(each_row):\n if reduce(lambda x, y: int(x) + int(y), each_pixel[:3]) / 3 > cut_off:\n new_array[row_number][pixel_number] = image_array[row_number][pixel_number]\n else:\n new_array[row_number][pixel_number] = [0, 0, 0] # Black\n return new_array", "def trim(x):\n # make sure we get a 3D stack not 2D slice\n assert (x.shape) != 3\n if x.shape[-1] > 576:\n newx = x[:,32:-32, 32:-32]\n else:\n newx = x\n return newx[np.newaxis,...]", "def trim_whitespace(matrix, details, min_gap):\r\n if details == -1:\r\n row = matrix[0, ]\r\n else:\r\n row = matrix[matrix.shape[0] - 1, ]\r\n\r\n min_left = np.argmin(row)\r\n min_right = np.argmin(row[::-1])\r\n\r\n if min_left > min_gap:\r\n matrix = matrix[:, min_left - min_gap:]\r\n\r\n if min_right > min_gap:\r\n matrix = matrix[:, 0:len(row) - (min_right - min_gap)]\r\n\r\n return matrix", "def crop_white_space(image, threshold=255):\n # una maschera di valori booleani. Ha la stessa struttura dell'immagine.\n # True se il pixel non e' bianco.\n img_mask = image < threshold\n # mask.any(1), mask.any(0) producono rispettivamente le maschere per righe e colonne:\n # True se la riga (o la colonna) contiene almeno un pixel nero.\n # sono monodimensionali.\n row_mask = img_mask.any(1)\n col_mask = img_mask.any(0)\n # np.ix_ costruisce gli indici che genereranno il prodotto fra le due maschere\n return image[np.ix_(row_mask, col_mask)]", "def remove_padding(im, pad):\n\n return im[pad:-pad, pad:-pad]", "def remove_border_vals(img, x: torch.Tensor, y: torch.Tensor, c: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]: \r\n\r\n new_x = img.shape[3] - 8\r\n new_y = img.shape[2] - 8\r\n \r\n mask = x.ge(8) & x.le(new_x) & y.ge(8) & y.le(new_y)\r\n x = torch.masked_select(x, mask)\r\n y = torch.masked_select(y, mask)\r\n c = torch.masked_select(c, mask)\r\n\r\n return x, y, c", "def _clip(image):\r\n return np.clip(image, 0, 255).astype(np.uint8)", "def process_image(im, border_size=5, im_size=50):\n\n\tim = im[border_size:-border_size, border_size:-border_size]\n\n\t\n\t'''for i in range(0,len(im)):\n\t\tfor j in range(0,len(im[i])):\n\t\t\tim[i][j] = 255 if im[i][j] > 64 else 0'''\n\t\t\t\t\n\tim = resize(im, (im_size, im_size))\n\n\treturn im", "def image_to_black_and_white(image_array, cut_off):\n new_array = image_array.copy()\n new_array.setflags(write=1)\n for row_number, each_row in enumerate(new_array):\n for pixel_number, each_pixel in enumerate(each_row):\n if reduce(lambda x, y: int(x) + int(y), each_pixel[:3]) / 3 > cut_off:\n new_array[row_number][pixel_number] = [255, 255, 255] # White\n else:\n new_array[row_number][pixel_number] = [0, 0, 0] # Black\n return new_array", "def strip(self):\n result = library.MagickStripImage(self.wand)\n if not result:\n self.raise_exception()", "def remove_background(img):\n mask = np.zeros(img.shape[:2], np.uint8)\n bgdModel = np.zeros((1, 65), np.float64)\n fgdModel = np.zeros((1, 65), np.float64)\n rect = (50, 50, 450, 290)\n cv.grabCut(img, mask, rect, bgdModel, fgdModel, 5, cv.GC_INIT_WITH_RECT)\n mask2 = np.where((mask == 2)|(mask == 0), 0, 1).astype('uint8')\n img = img*mask2[:, :, np.newaxis]\n return img", "def remove_border(contour, ary):\n # Use a rotated rectangle (should be a good approximation of a border).\n # If it's far from a right angle, it's probably two sides of a border and\n # we should use the bounding box instead.\n c_im = np.zeros(ary.shape)\n r = cv2.minAreaRect(contour)\n degs = r[2]\n if angle_from_right(degs) <= 10.0:\n box = cv2.boxPoints(r)\n box = np.int0(box)\n cv2.drawContours(c_im, [box], 0, 255, -1)\n cv2.drawContours(c_im, [box], 0, 0, 4)\n else:\n x1, y1, x2, y2 = cv2.boundingRect(contour)\n cv2.rectangle(c_im, (x1, y1), (x2, y2), 255, -1)\n cv2.rectangle(c_im, (x1, y1), (x2, y2), 0, 4)\n\n return np.minimum(c_im, ary)", "def strip_zeros(a):\n\n return np.trim_zeros(a, trim='b')", "def cut_transformed_array_borders(array): \n for col in range(array.shape[1]): \n col_=array[:, col]\n \n where=np.where(col_>0)\n \n if len(where[0])>0:\n \n col_[[np.min(where[0]),np.min(where[0])+1, np.max(where[0]), np.max(where[0])-1 ]]=0\n \n array[:,col]=col_\n \n for row in range(array.shape[0]): \n row_=array[row,:]\n \n where=np.where(row_>0)\n if len(where[0])>0:\n\n row_[[np.min(where[0]),np.min(where[0])+1, np.max(where[0]), np.max(where[0])-1 ]]=0\n \n array[row,:]=row_\n \n return array", "def trim2DArray(input_arr, threshold=0): \n\n print(input_arr.shape)\n ul_x, ul_y = calculateTrimOffsetForward(input_arr, threshold)\n lr_x, lr_y = calculateTrimOffsetBackward(input_arr, threshold)\n\n output_arr = input_arr[ul_y:lr_y, ul_x:lr_x]\n\n print(output_arr.shape)\n\n return (output_arr)", "def clean(img):\n\n label_img = label(img, connectivity=2)\n props = sorted(regionprops(label_img), key=lambda x: x.area)\n clean = morphology.binary_closing(img)\n\n clean = morphology.remove_small_holes(clean)\n return morphology.remove_small_objects(clean,\n int(np.floor(props[-1].area) / 10), connectivity=2)", "def remove_color(image):\n return image[:, :, 0]", "def remove_color(image):\n return image[:, :, 0]", "def removeMinimum(img, x, y, w, h):\n minmax = cvMinMaxLoc(img)\n cvRectangle(img, cvPoint(x-int(w/2), y-int(h/2)), cvPoint(x+int(w/2), y+int(h/2)), cvScalar(minmax[1], 0, 0, 0), CV_FILLED)", "def remove_border(input_path, output_path):\n if os.path.exists(output_path):\n shutil.rmtree(output_path)\n os.makedirs(output_path)\n img_fn_list = get_images(input_path)\n epsilon = 0.0001\n for img_fn in img_fn_list:\n print('===============')\n print(img_fn)\n start = time.time()\n try:\n img_gray = cv2.imread(img_fn,cv2.IMREAD_GRAYSCALE)\n except:\n print(\"Error reading image {}!\".format(img_fn))\n continue\n h, w = img_gray.shape[:2]\n img_blank = np.ones(shape=[h, w], dtype=np.uint8)*255\n img_binary = cv2.threshold(img_gray, 128, 255, cv2.THRESH_BINARY)[1]\n _, contours, _ = cv2.findContours(img_binary,cv2.RETR_TREE,cv2.CHAIN_APPROX_NONE)\n area = []\n for cnt in contours:\n approx = cv2.approxPolyDP(cnt,epsilon*cv2.arcLength(cnt,True),True)\n area .append(cv2.contourArea(cnt))\n # sort by contour area\n top_cnt_area = np.argsort(-1*np.array(area))\n # drawing has not been pre-processed\n # select the thrid largest contour which fit the drawing broader\n ind = top_cnt_area[2]\n approx = cv2.approxPolyDP(contours[ind],epsilon*cv2.arcLength(contours[ind],True),True)\n cv2.drawContours(img_blank, [approx], 0, (0), thickness = -1, lineType=8)\n # combine image with masks\n img_gray = cv2.bitwise_or(img_blank, img_gray)\n cv2.imwrite(os.path.join(output_path, os.path.basename(img_fn)), img_gray)", "def trim_zeros(x):\n assert len(x.shape) == 2\n return x[~np.all(x == 0, axis=1)]", "def mask2trimap(self, mask):\n fg_mask = (mask > 0).float()\n bg_mask = (mask < 0).float()\n trimap_width = getattr(self.opt, 'trimap_width', 20)\n trimap_width *= bg_mask.shape[-1] / self.opt.width\n trimap_width = int(trimap_width)\n bg_mask = cv2.erode(bg_mask.numpy(), kernel=np.ones((trimap_width, trimap_width)), iterations=1)\n bg_mask = torch.from_numpy(bg_mask)\n mask = fg_mask - bg_mask\n return mask" ]
[ "0.72116804", "0.71161026", "0.7112761", "0.6987696", "0.68486035", "0.68222636", "0.6805708", "0.6737506", "0.65461594", "0.6485702", "0.64720446", "0.6417604", "0.6281844", "0.6267445", "0.62330204", "0.6230816", "0.6140212", "0.61345065", "0.61139953", "0.60943305", "0.60912275", "0.6086031", "0.6050355", "0.6050102", "0.60269153", "0.60269153", "0.5984107", "0.59795123", "0.59511554", "0.5944015" ]
0.805428
0
Return a set of unique field values from a list of DICOM files
def get_unique_field_values(dcm_file_list, field_name): field_values = set() for dcm in dcm_file_list: field_values.add(str(DicomFile(dcm).get_attributes(field_name))) return field_values
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def fastLoad(f_list):\n\n data_list = []\n t_1 = datetime.now()\n for i, f in enumerate(f_list):\n t_data = loadFile(f)\n data_list.extend(t_data)\n data_list = [dict(r) for r in set([tuple(d.items()) for d in data_list])]\n print i, datetime.now() - t_1, \"removing duplicates...\"\n print \"Done removing duplicates.\"\n return data_list", "def getGuids(fileList):\n\n guids = []\n # loop over all files\n for thisfile in fileList:\n guids.append(str(thisfile.getAttribute(\"ID\")))\n\n return guids", "def _all_data_fields(field):\n all_fields = PhotoTech.objects.all().values()\n return list(set([all_fields[x][field]\n for x in range(len(all_fields))]))", "def unique_files(event_list):\n\n if isinstance(event_list, dcase_util.containers.MetaDataContainer):\n return event_list.unique_files\n\n else:\n files = {}\n for event in event_list:\n if 'file' in event:\n files[event['file']] = event['file']\n\n elif 'filename' in event:\n files[event['filename']] = event['filename']\n\n files = list(files.keys())\n files.sort()\n return files", "def read_cuis(file_path):\n\n file_as_string = open(file_path).read()\n return set(file_as_string.split())", "def get_fields(self):\n \n fields = []\n for img in self.img_lst:\n fields += img.get_fields()\n \n fields = list(set(fields))\n \n return fields", "def get_unique_values(local_data, attr):\n\tvalues = []\n\tfor element in local_data:\n\t\tif element[attr] not in values:\n\t\t\tvalues.extend([element[attr]])\n\treturn values", "def select_unique_ids(self):\n utk = self.metadata\n utk_ids = []\n for gg in set(utk['gender']):\n for rg in set(utk['race']):\n for ag in set(utk['age']):\n try:\n intersection_ids = list(utk[np.logical_and(utk['gender'] == gg,\n np.logical_and(utk['race'] == rg,\n utk['age'] == ag))]['filename'])\n if len(intersection_ids) <= CAP:\n utk_ids += intersection_ids\n else:\n x = list(np.random.choice(intersection_ids, CAP, replace=False))\n utk_ids += x\n\n except:\n continue\n self.unique_ids = utk_ids\n return utk_ids", "def unique_list(src_list):\n return list(OrderedDict.fromkeys(src_list).keys())", "def _uniq( list ) : \r\n \r\n d = {} \r\n for e in list : \r\n d[e] = 1 \r\n \r\n return d.keys()", "def _find_valid_dicom_files(\n files: Set[Path], file_errors: DefaultDict[Path, List[str]]\n) -> List[DicomDataset]:\n studies = _get_headers_by_study(files=files, file_errors=file_errors)\n result = []\n for key in studies:\n headers = studies[key][\"headers\"]\n set_name = studies[key][\"name\"]\n if not headers:\n continue\n\n n_files = len(headers)\n n_time = len(\n {\n int(header[\"data\"].TemporalPositionIndex)\n for header in headers\n if \"TemporalPositionIndex\" in header[\"data\"]\n }\n )\n sop_class_uids = [header[\"data\"].SOPClassUID for header in headers]\n\n arbitrary_header = headers[0][\"data\"]\n try:\n n_slices_per_file = len(\n arbitrary_header.PerFrameFunctionalGroupsSequence\n )\n except AttributeError:\n n_slices_per_file = int(\n getattr(arbitrary_header, \"NumberOfFrames\", 1)\n )\n n_slices = n_files * n_slices_per_file\n\n if \"1.2.840.10008.5.1.4.1.1.77.1.6\" in sop_class_uids:\n for d in headers:\n file_errors[d[\"file\"]].append(\n format_error(\"WSI-DICOM not supported by DICOM builder\")\n )\n elif n_time < 2:\n # Not a 4d dicom file\n result.append(\n DicomDataset(\n name=set_name,\n headers=headers,\n n_time=None,\n n_slices=n_slices,\n n_slices_per_file=n_slices_per_file,\n )\n )\n elif len(headers) % n_time > 0:\n # Invalid 4d dicom file\n for d in headers:\n file_errors[d[\"file\"]].append(\n format_error(\"Number of slices per time point differs\")\n )\n else:\n # Valid 4d dicom file\n result.append(\n DicomDataset(\n name=set_name,\n headers=headers,\n n_time=n_time,\n n_slices=n_slices // n_time,\n n_slices_per_file=n_slices_per_file,\n )\n )\n\n del studies\n return result", "def unique(list1):\n # insert the list to the set\n list_set = set(list1)\n # convert the set to the list\n unique_list = (list(list_set))\n for x in unique_list:\n return(x,)", "def uniq(listinput):\n\t\"\"\" This will be provided for the student. \"\"\"\n\toutput = []\n\tfor x in listinput:\n\t\tif x not in output:\n\t\t\toutput.append(x)\n\treturn output", "def uniqField(self, fieldname):\n\t\tretval = list(set(self.getField(fieldname)))\n\t\tretval.sort()\n\t\treturn retval", "def get_fields_from_id_set(id_set_file: Dict[str, List]) -> List[str]:\n return (\n get_all_incident_and_indicator_fields_from_id_set(id_set_file, \"layout\")\n + [field.lower() for field in BUILT_IN_FIELDS]\n + LAYOUT_AND_MAPPER_BUILT_IN_FIELDS\n )", "def load_data(filename):\r\n f = open(filename)\r\n rawData = np.array([[float(i) for i in line.split()] for line in f.read().splitlines()])\r\n uniqueData = np.array([list(x) for x in set(tuple(x) for x in rawData)])\r\n print('\\n\\nData Overview: rawData :', len(rawData), ' uniqueData :', len(uniqueData))\r\n return rawData, uniqueData", "def unique_list(inlist):\n return set(inlist)", "def group_dicom_files(dicom_paths, hdr_field='PatientID'):\n dicom_groups = defaultdict(list)\n try:\n for dcm in dicom_paths:\n hdr = dicom.read_file(dcm)\n group_key = getattr(hdr, hdr_field)\n dicom_groups[group_key].append(dcm)\n except KeyError as ke:\n raise KeyError('Error reading field {} from file {}.'.format(hdr_field, dcm)) from ke\n\n return dicom_groups", "def load_dcm_series(files: List[str]):\n volume = []\n files.sort(key=get_slice_location)\n for file in files:\n dcm = pydicom.dcmread(file, force=True)\n if not dcm.file_meta.get('TransferSyntaxUID'):\n dcm.file_meta.TransferSyntaxUID = pydicom.uid.ImplicitVRLittleEndian\n volume.append(dcm.pixel_array)\n return files, np.stack(volume)", "def unique(input_list):\n output = []\n for item in input_list:\n if item not in output:\n output.append(item)\n return output", "def loadAllFiles(f_list):\n\n t_1 = datetime.now()\n\n # first list\n i_list = []\n id_list = []\n for f in f_list:\n f_t = loadFile(f)\n i_list.extend(f_t)\n # clean list\n ln_i_list = len(i_list)\n c_list = []\n for i, f in enumerate(i_list):\n if f[\"id\"] not in id_list:\n id_list.append(f[\"id\"])\n c_list.append(f)\n print (i * 100.) / ln_i_list, datetime.now() - t_1, i\n return c_list", "def _read_dataset(a_files):\n return [(list(ifields[TXT_IDX]), ifields[GLD_IDX])\n for ifile in a_files for ifields in iterlines(ifile)]", "def select_unique_ids(self):\n ccd = self.metadata\n ccd_ids = []\n for dg in set(ccd['isDark']):\n for gg in set(ccd['Gender']):\n for sg in set(ccd['Skin']):\n for ag in set(ccd['Age']):\n try:\n intersection_ids = list(ccd[np.logical_and(ccd['isDark'] == dg,\n np.logical_and(ccd['Gender'] == gg,\n np.logical_and(ccd['Skin'] == sg,\n ccd['Age'] == ag)))]['ImageID'])\n if len(intersection_ids) <= CAP:\n ccd_ids += intersection_ids\n else:\n x = list(np.random.choice(intersection_ids, CAP, replace=False))\n ccd_ids += x\n\n except:\n continue\n self.unique_ids = ccd_ids\n return ccd_ids", "def getSet(unique_name):", "def getSet(unique_name):", "def filter_json(in_list):\n\n found_list = []\n for device in in_list:\n for key, val in device.items():\n if key == \"sensor\":\n for k, v in val.items():\n if k == \"id\":\n found_list.append(int(v))\n\n myset = set(found_list) # stop duplicate values being added\n unique_list = list(myset)\n # print (sorted(unique_list))\n return sorted(unique_list)", "def unique_values(self):\n for key in self.metadb.unique_values():\n yield key, self.datadb[key]", "def find_unique_elements(molecule_map):\n atoms = []\n for molec_name in molecule_map.keys():\n atoms += [subst['atom'] for subst in molecule_map[molec_name]]\n return set(atoms)", "def unique(input_list): \n try:\n # intilize a null list \n unique_list = [] \n # traverse for all elements \n for x in input_list: \n # check if exists in unique_list or not \n if x not in unique_list: \n unique_list.append(x)\n return(unique_list)\n except TypeError as detail:\n return (\"int object is not iterable\")", "def _unique(li):\n return list(set(li))" ]
[ "0.59633434", "0.5882781", "0.58684295", "0.5741516", "0.56883377", "0.5653832", "0.56346226", "0.5630092", "0.5630056", "0.562653", "0.5568489", "0.5567887", "0.55291784", "0.5525892", "0.55244976", "0.54796493", "0.5465706", "0.5440886", "0.5437382", "0.54364234", "0.5433216", "0.54275894", "0.538042", "0.53709626", "0.53709626", "0.5362254", "0.5344649", "0.5341862", "0.53023463", "0.529922" ]
0.8470222
0
Returns a list of the dicom files within root_path
def find_all_dicom_files(root_path): dicoms = set() try: for fpath in get_all_files(root_path): if is_dicom_file(fpath): dicoms.add(fpath) except IOError as ioe: raise IOError('Error reading file {0}.'.format(fpath)) from ioe return dicoms
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getFiles(self):\n\t\treturn os.listdir(self.getPath())", "def get_datapaths(input_dir):\n image_paths = []\n assert os.path.isdir(input_dir), f\"{input_dir} is not existed\"\n\n for root, _, names in os.walk(input_dir):\n for name in names:\n path = os.path.join(root, name)\n image_paths.append(path)\n return image_paths", "def find_data(self):\n data_list = []\n for root, dirs, files in os.walk(pathfinder.data_path()):\n for name in files:\n data_list.append(os.path.join(root, name))\n return data_list", "def getDataFiles(directoryName):\r\n \r\n return listdir(directoryName)", "def get_all_files(self):\n\t\tfiles_list = []\n\t\tfor path, subdirs, files in os.walk(self.root):\n\t\t for name in files:\n\t\t \tfiles_list.append(os.path.join(self.root, name))\n\t\treturn files_list[0:-1]", "def getfiles(path): \n global picture_list\n try:\n # dir_list has all files and directories in path\n # any directory is WITHOUT ending '/'\n dir_list = os.listdir(path)\n except:\n # path may not be a directory or permission error\n print \"ERROR: in getfiles, picture_list:\", picture_list\n picture_list = None\n return\n \n for line in dir_list:\n file = path + \"/\" + line\n if os.path.isdir(file):\n getfiles( file) # dig into subdirectory\n elif isPicture(file):\n picture_list.append(file)\n else: \n # neither picture file nor directory; ignore \n pass\n return", "def list_dir(self, path):", "def get_all_files(cwd):\n return os.listdir(cwd)", "def get_lst_images(file_path):\n return [i for i in os.listdir(file_path) if i != '.DS_Store']", "def scandir(path_):\n return os.listdir", "def get_data_images(path):\n\n return sorted(\n [os.path.join(root, filename) for root, dirnames, filenames in os.walk(path) for filename in\n filenames if\n filename.endswith('.jpg') and os.path.getsize(os.path.join(root, filename)) > 0]\n )", "def files_in_dir(path):\n return os.listdir(path)", "def build_files_list(root_dir):\n return [\n os.path.join(dirpath, file_path)\n for dirpath, subdirs, files in os.walk(root_dir)\n for file_path in files\n ]", "def build_files_list(root_dir):\n return [\n os.path.join(dirpath, file_path)\n for dirpath, subdirs, files in os.walk(root_dir)\n for file_path in files\n ]", "def list_all_files(root):\n local_files = []\n for path, dirs, files in os.walk(os_path(root), followlinks=False):\n if len(files) > 0:\n path_wo_root = path[(len(root) + len(slash)):] # remove root part\n local_files.extend([os.path.join(path_wo_root, f) for f in files])\n return local_files", "def listfiles(self, *path):\n dir = self.localpath(*path)\n files = []\n for root, dirs, fnms in os.walk(dir):\n for f in fnms:\n if f[-5:] == '.info' and os.path.exists(os.path.join(root, f[:-5])):\n try:\n _open_file_info(os.path.join(root, f))\n files.append(\n path + tuple(_split_path(\n os.path.relpath(os.path.join(root, f[:-5]), start=dir)\n )))\n except ValueError:\n pass\n return files", "def get_files(self):\n\n for path, dirs, files in os.walk(self.data_path):\n for dir in dirs:\n self.original_files[dir] = []\n self.imitation_files[dir] = []\n for file in os.listdir(path + \"/\" + dir):\n if( \"original\" in file ):\n self.original_files[dir].append(path + \"/\" + dir + \"/\" + file)\n else:\n self.imitation_files[dir].append(path + \"/\" + dir + \"/\" + file)\n\n return", "def _path_files(self):\n\n if not os.path.exists(self.path):\n return None\n\n directory_content = os.listdir(self.path)\n files = []\n\n while len(directory_content) != 0:\n\n if not directory_content[0].startswith(self.path):\n directory_obj = os.path.join(self.path, directory_content[0])\n else:\n directory_obj = directory_content[0]\n\n if os.path.isfile(directory_obj):\n files.append(directory_obj)\n elif os.path.exists(directory_obj):\n temp_directory_content = os.listdir(directory_obj)\n for obj in temp_directory_content:\n directory_content.append(os.path.join(directory_obj, obj))\n directory_content.pop(0)\n\n return files", "def get_dataset_files(dataset_info, mode, root):\n basepath = dataset_info.basepath\n base = os.path.join(root, basepath, mode)\n\n # usually of form '{}-of-{}.tfrecord'\n files = sorted(os.listdir(base))\n\n return [os.path.join(base, file) for file in files]", "def _get_files(self, path):\n result = []\n for f in os.listdir(path):\n if os.path.isdir(os.path.join(path, f)):\n result += self._get_files(os.path.join(path, f))\n else:\n result.append(os.path.join(path, f))\n return result", "def read_files(filepath,forc):\n \n dir_all = []\n dir_sub = []\n \n path = os.path.join(str(filepath))\n for dir_name in os.listdir(path):\n filad = str(dir_name)\n sub_path = str(filepath)+filad+'/*.*'\n \n if forc == 'cropped':\n dir_sub = import_image(str(sub_path))\n \n if forc == 'full':\n dir_sub = import_fimage(str(sub_path))\n \n \n dir_sub = np.array(dir_sub)\n \n dir_all.append(dir_sub.transpose())\n \n return(np.array(dir_all))", "def scanDir(dcmdir):\n\n if not enabled():\n raise RuntimeError('dcm2niix is not available or is too old')\n\n dcmdir = op.abspath(dcmdir)\n cmd = f'{dcm2niix()} -b o -ba n -f %s -o . \"{dcmdir}\"'\n series = []\n\n with tempdir.tempdir() as td:\n\n with open(os.devnull, 'wb') as devnull:\n sp.call(shlex.split(cmd), stdout=devnull, stderr=devnull)\n\n files = glob.glob(op.join(td, '*.json'))\n\n if len(files) == 0:\n return []\n\n for fn in files:\n with open(fn, 'rt') as f:\n meta = json.load(f)\n meta['DicomDir'] = dcmdir\n # SeriesDescription is not\n # guaranteed to be present\n if 'SeriesDescription' not in meta:\n meta['SeriesDescription'] = meta['SeriesNumber']\n series.append(meta)\n\n # sort by series number\n def key(s):\n return s.get('SeriesNumber', sys.maxsize)\n\n series = list(sorted(series, key=key))\n\n return series", "def listFiles(root):\n for dirpath, dirnames, filenames in os.walk(root):\n for file in filenames:\n yield os.path.join(dirpath, file)", "def filelist(root):\n allfiles = []\n for path, subdirs, files in os.walk(root):\n for name in files:\n if name.find(\"xls\") >= 0:\n allfiles.append(os.path.join(path, name))\n return allfiles", "def getAllFiles(self):\n\n\t\treturn self.getFilesForDirs([])", "def list_files(path):\n ls_output = os.listdir(path)\n return ls_output", "def listDir(path):\n filenames = []\n for root, dirs, files in os.walk(path):\n for i in files:\n filenames.append(os.path.join(root, i))\n return filenames", "def getGlobusFiles(self):\n\t\treturn self.transfer_client.operation_ls(self.transfer_client.endpoint_search(DATA_ENDPOINT_NAME)[0]['name'])", "def get_file_list(rootdir): #{{{\n file_list = []\n for f in os.listdir(rootdir):\n if f == None or not f.endswith(\".csv\"):\n continue\n file_list.append(os.path.join(rootdir, f))\n \n return file_list", "def read_paths(path):\n images = [[] for _ in range(2)]\n for dirname, dirnames, _ in os.walk(path):\n for subdirname in dirnames:\n filepath = os.path.join(dirname, subdirname)\n for filename in os.listdir(filepath):\n try:\n imgpath = str(os.path.join(filepath, filename))\n images[0].append(imgpath)\n limit = re.findall('[0-9]+', filename)\n images[1].append(limit[0])\n except IOError as err:\n print(\"I/O error\")\n except:\n print(\"I/O error 2\")\n raise\n return images" ]
[ "0.6847716", "0.6676826", "0.66070926", "0.65441406", "0.65221864", "0.65035594", "0.6451133", "0.6429791", "0.6397234", "0.6359657", "0.63431734", "0.6332761", "0.63222665", "0.63222665", "0.6319204", "0.63162965", "0.63142794", "0.6311132", "0.62893206", "0.62882096", "0.62783235", "0.62765384", "0.6269318", "0.62659794", "0.62656575", "0.6246176", "0.6227571", "0.62231296", "0.6213558", "0.6211634" ]
0.7571755
0
Tries to read the file using dicom.read_file, if the file exists and dicom.read_file does not raise and Exception returns True. False otherwise.
def is_dicom_file(filepath): if not os.path.exists(filepath): raise IOError('File {} not found.'.format(filepath)) filename = os.path.basename(filepath) if filename == 'DICOMDIR': return False try: _ = dicom.read_file(filepath) except Exception as exc: log.debug('Checking if {0} was a DICOM, but returned ' 'False.'.format(filepath)) return False return True
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def file_exists(file):\n try:\n Cryptography.read(file)\n return True\n except (FileNotFoundError, FileExistsError):\n return False", "def FileCheck(fn):\n try:\n open(fn, \"r\")\n return 1\n except IOError:\n print(\"Error: File does not exist.\")\n return 0", "def checkFileExistance(filePath):\n\n try:\n with open(filePath, 'r') as f:\n logger.info(\"Se encontro {}\".format(filePath))\n return True\n except FileNotFoundError as e:\n return False\n except IOError as e:\n return False", "def file_exists(path):\n\n try:\n with open(path):\n return True\n except IOError:\n return False", "def _file_exists(name):\n try:\n f = open(name)\n f.close()\n return True\n except IOError:\n return False", "def check_file_exist(self):\n return False", "def fileExists(fileName):\n try:\n fileOpen = open(fileName, 'rt')\n fileOpen.close()\n except FileNotFoundError:\n return False\n else:\n return True", "def read(self, filename):\n try:\n with open(filename, 'r') as _file:\n self.readstream(_file)\n self._filename = filename\n return True\n\n except IOError:\n self._reset()\n return False", "def is_file_exists(self):\n pass", "def check_file(filename: str):\n if os.path.isfile(filename):\n return True\n else:\n raise FileExistsError", "def fileCheck(filePath):\n if not os.path.isfile(filePath):\n return False\n return True", "def _does_file_exist(file_path):\n return os.path.exists(file_path) and os.path.getsize(file_path) > 0", "def file_exist() -> bool:\n pass", "def file_checker(file_name):\n if os.path.islink(file_name):\n print \"Crypto device Symlink %s exists\" % file_name\n return True\n else: \n try:\n with open(file_name):\n print \"File %s exists\" % file_name\n return True\n except IOError:\n print \"File %s does not exists\" % file_name\n return False", "def file_exists(self):\r\n if os.path.exists(self.file_path):\r\n return True\r\n else:\r\n return False", "def _check_valid_file(self, file):\n\n try:\n _ = open(f\"{file}\")\n except FileNotFoundError:\n raise ValueError", "def can_handle(file_io):\r\n\r\n try:\r\n file_io.seek(0)\r\n parsed = etree.parse(file_io)\r\n except XMLSyntaxError:\r\n # IF etree can't parse it, it's not our file.\r\n return False\r\n can_handle = False\r\n can_handle = DelXMLImporter._is_delicious_format(parsed,\r\n can_handle)\r\n\r\n # make sure we reset the file_io object so that we can use it again\r\n return can_handle", "def read_file(self):\n try:\n self.json_parsed_file = parse_progress_report(self.in_file)\n self.output_message += 'Student: {}, {}\\n'.format(self.json_parsed_file['id'],\n self.json_parsed_file['name'])\n return True\n\n except IOError:\n self.output_message += \"File does not exist\\n\"\n self.is_parsed_pdf_valid = False\n return False\n\n except TypeError:\n self.output_message += \"There is an issue with the file\\n\"\n self.is_parsed_pdf_valid = False\n return False", "def fileExist(file):\r\n return os.path.exists(file) and os.path.isfile(file)", "def checkFile(filename):\n\n\tfileRepo = repertoire + filename + extension # Position du fichier\n\n\ttry: # Essaye d'ouvir en lecture\n\t\ttest = open(fileRepo, \"r\")\n\texcept: # Si on arrive pas a ouvrir le fichier\n\t\treturn 0 # Indique le fichier non existant\n\telse: # Sinon, si le fichier s'est ouvert\n\t\ttest.close() # S'assure de fermer le fichier\n\t\treturn 1 # Indique que le fichier existe", "def test_is_delicious_file(self):\r\n good_file = self._get_del_file()\r\n self.assertTrue(\r\n DelXMLImporter.can_handle(good_file),\r\n \"DelXMLImporter should handle this file\")\r\n good_file.close()", "def test_file_read_missing_file(self):\n with (self.assertRaises(IOError)):\n FileReader(self.bogus_path).read()", "def fileExist(file):\n return os.path.exists(file) and os.path.isfile(file)", "def fileExist(file):\n return os.path.exists(file) and os.path.isfile(file)", "def fileExist(file):\n return os.path.exists(file) and os.path.isfile(file)", "def fileExist(file):\n return os.path.exists(file) and os.path.isfile(file)", "def test_is_delicious_file(self):\r\n good_file = self._get_del_file()\r\n\r\n self.assertTrue(\r\n DelImporter.can_handle(good_file),\r\n \"DelImporter should handle this file\")\r\n\r\n good_file.close()", "def FileExists(file):\n return os.path.exists(file)", "def _check_file(cls, file: IO[DiskType]):\n if file.closed:\n raise ValueError(f\"File is closed\")", "def isFileExist(file_name):\n return os.path.exists(file_name)" ]
[ "0.67407846", "0.65363544", "0.65280926", "0.6463532", "0.6462749", "0.64249706", "0.631542", "0.62879235", "0.6253404", "0.6228707", "0.6214524", "0.62095505", "0.61903375", "0.6189899", "0.6187227", "0.6137085", "0.61295354", "0.61180586", "0.6101576", "0.60735005", "0.6066212", "0.6028182", "0.60194325", "0.60194325", "0.60194325", "0.60194325", "0.6014321", "0.60028595", "0.5983043", "0.5974513" ]
0.7037445
0
Group in a dictionary all the DICOM files in dicom_paths separated by the given `hdr_field` tag value.
def group_dicom_files(dicom_paths, hdr_field='PatientID'): dicom_groups = defaultdict(list) try: for dcm in dicom_paths: hdr = dicom.read_file(dcm) group_key = getattr(hdr, hdr_field) dicom_groups[group_key].append(dcm) except KeyError as ke: raise KeyError('Error reading field {} from file {}.'.format(hdr_field, dcm)) from ke return dicom_groups
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def group_visits(wdir):\n all_files = glob(os.path.join(wdir, '*flc.fits'))\n group = dict()\n for file in all_files:\n visit = fits.getheader(file)['LINENUM'].split('.')[0]\n if visit not in group:\n group[str(visit)] = [str(file)]\n elif visit in group:\n group[str(visit)].append(str(file))\n\n return group", "def _iter_field_paths(grp):\n field_paths = []\n for field_name in grp:\n if isinstance(grp[field_name], h5py.Group):\n for subfield in grp[field_name]:\n\n # if it is a sparse field don't do the subfields since\n # they will be _sparse_idxs and data which are not\n # what we want here\n if field_name not in grp.file['_settings/sparse_fields']:\n field_paths.append(field_name + '/' + subfield)\n else:\n field_paths.append(field_name)\n return field_paths", "def build_filedic(data_path, lanczos_path):\n filedic = {'CERA': sorted(glob.glob(data_path + 'CERA20C/*.nc')),\n 'lanczos(CERA)': sorted(glob.glob(lanczos_path + 'CERA_7*.nc')),\n 'lanczos(20CR)': sorted(glob.glob(lanczos_path + '20CRv3_5*.nc'))}\n return filedic", "def process_dicom_file_list(dicom_file_list, parent_sorting_field=\"PatientName\", verbose=False):\n dicom_series_dict_parent = {}\n\n for i, dicom_file in enumerate(sorted(dicom_file_list)):\n if verbose is True:\n logger.debug(\" Sorting file %d\", i)\n\n dicom_file = dicom_file.as_posix()\n\n if \"dicomdir\" in dicom_file.lower():\n logger.warning(\n \"DICOMDIR is not supported in this tool, images are read directly. Skipping.\"\n )\n continue\n\n dicom_object = pydicom.read_file(dicom_file, force=True)\n\n parent_sorting_field_data = dicom_object[parent_sorting_field].value\n\n if parent_sorting_field_data not in dicom_series_dict_parent.keys():\n dicom_series_dict_parent[parent_sorting_field_data] = {}\n\n series_uid = dicom_object.SeriesInstanceUID\n\n if series_uid not in dicom_series_dict_parent[parent_sorting_field_data].keys():\n dicom_series_dict_parent[parent_sorting_field_data][series_uid] = [dicom_file]\n\n else:\n dicom_series_dict_parent[parent_sorting_field_data][series_uid].append(dicom_file)\n\n return dicom_series_dict_parent", "def collect(dname='.'):\n files = {}\n\n for paths in os.walk(dname):\n for fname in paths[2]:\n flen = len(fname)\n fpath = os.path.join(paths[0], fname)\n try:\n files[flen].append(fpath)\n except KeyError:\n files[flen] = [fpath]\n\n return files", "def group_by_dir(urlist):\n\n dir_groups = {}\n for url in urlist:\n net_subdir, filename = url_unquote(url).rsplit('/',1)\n if net_subdir in dir_groups:\n dir_groups[net_subdir].append((url, filename))\n else:\n dir_groups[net_subdir] = [(url, filename)]\n return dir_groups", "def _get_headers_by_study(\n files: Set[Path], file_errors: DefaultDict[Path, List[str]]\n):\n study_key_type = Tuple[str, ...]\n studies: Dict[study_key_type, Dict[str, Any]] = {}\n indices: Dict[str, Dict[study_key_type, int]] = {}\n\n for file in files:\n if not file.is_file():\n continue\n with file.open(\"rb\") as f:\n try:\n # Read header only, skip reading the pixel data for now\n ds = pydicom.dcmread(f, stop_before_pixels=True)\n\n # Group by series instance uid or by stack ID (for 4D images)\n # Additionally also group by SOP class UID to skip over extra\n # raw data (dose reports for example) that are sometimes stored\n # under the same series instance UID.\n key: study_key_type = (\n ds.StudyInstanceUID,\n getattr(ds, \"StackID\", ds.SeriesInstanceUID),\n ds.SOPClassUID,\n )\n\n studies[key] = studies.get(key, {})\n indices[ds.StudyInstanceUID] = indices.get(\n ds.StudyInstanceUID, {}\n )\n\n try:\n index = indices[ds.StudyInstanceUID][key]\n except KeyError:\n index = len(indices[ds.StudyInstanceUID])\n indices[ds.StudyInstanceUID][key] = index\n\n headers = studies[key].get(\"headers\", [])\n headers.append({\"file\": file, \"data\": ds})\n studies[key][\"headers\"] = headers\n\n # Since we might need to combine multiple images with different\n # series instance UID (in 4D images), we cannot use the series\n # as the unique file name - instead, we use the study instance\n # uid and a counter (index) per study\n studies[key][\"name\"] = f\"{ds.StudyInstanceUID}-{index}\"\n\n except Exception as e:\n file_errors[file].append(format_error(str(e)))\n\n return studies", "def group_by_filenames(self):\n package = self.container.config.output.package\n class_map = collections.group_by(self.container, key=get_location)\n groups = self.group_common_paths(class_map.keys())\n\n for keys in groups:\n if len(keys) == 1:\n common_path = os.path.dirname(keys[0])\n else:\n common_path = os.path.commonpath(keys)\n\n for key in keys:\n items = class_map[key]\n suffix = \".\".join(Path(key).parent.relative_to(common_path).parts)\n\n package_name = f\"{package}.{suffix}\" if suffix else package\n self.assign(items, package_name, module_name(key))", "def field_filter(paths, fields, catchall='rois'):\n filt_paths = {} \n\n for p in paths:\n captured = False\n for (name, filts) in fields.items():\n ps = p.strip('/')\n elts = ps.split('/')\n goes_in = False\n for i in range(len(filts)):\n if len(filts[i]):\n if len(elts) > i:\n if elts[i] in filts[i]:\n goes_in = True\n else:\n captured = True\n break\n \n if goes_in:\n filt_paths.setdefault(name, []).append(p)\n captured = True\n\n # If a path was not explicitly captured by a filter rule, and the\n # catchall didn't have any filters, we include it in the catchall\n if not captured and not fields.setdefault(catchall, []):\n filt_paths.setdefault(catchall, []).append(p)\n\n return filt_paths", "def loadObjects(basePath, snapNum, gName, nName, fields):\n result = {}\n\n # make sure fields is not a single element\n if isinstance(fields, six.string_types):\n fields = [fields]\n\n # load header from first chunk\n with h5py.File(gcPath(basePath, snapNum), 'r') as f:\n\n header = dict(f['Header'].attrs.items())\n result['count'] = f['Header'].attrs['N' + nName + '_Total']\n\n if not result['count']:\n print('warning: zero groups, empty return (snap=' + str(snapNum) + ').')\n return result\n\n # if fields not specified, load everything\n if not fields:\n fields = list(f[gName].keys())\n\n for field in fields:\n # verify existence\n if field not in f[gName].keys():\n raise Exception(\"Group catalog does not have requested field [\" + field + \"]!\")\n\n # replace local length with global\n shape = list(f[gName][field].shape)\n shape[0] = result['count']\n\n # allocate within return dict\n result[field] = np.zeros(shape, dtype=f[gName][field].dtype)\n\n # loop over chunks\n wOffset = 0\n\n for i in range(header['NumFiles']):\n f = h5py.File(gcPath(basePath, snapNum, i), 'r')\n\n if not f['Header'].attrs['N'+nName+'_ThisFile']:\n continue # empty file chunk\n\n # loop over each requested field\n for field in fields:\n if field not in f[gName].keys():\n raise Exception(\"Group catalog does not have requested field [\" + field + \"]!\")\n\n # shape and type\n shape = f[gName][field].shape\n\n # read data local to the current file\n if len(shape) == 1:\n result[field][wOffset:wOffset+shape[0]] = f[gName][field][0:shape[0]]\n else:\n result[field][wOffset:wOffset+shape[0], :] = f[gName][field][0:shape[0], :]\n\n wOffset += shape[0]\n f.close()\n\n # only a single field? then return the array instead of a single item dict\n if len(fields) == 1:\n return result[fields[0]]\n\n return result", "def records_to_filepaths(\n records: Union[kapture.RecordsCamera, kapture.RecordsWifi, kapture.RecordsLidar, kapture.RecordsGnss],\n kapture_dirpath: str\n) -> Dict[str, str]:\n return {filename: path_secure(path.join(kapture_dirpath, RECORD_DATA_DIRNAME, filename))\n for _, _, filename in kapture.flatten(records)}", "def get_header_file_map(dir_name):\n result = defaultdict(list)\n for filename in get_files(dir_name, \".h\"):\n parts = Path(os.path.dirname(filename)).parts\n include_prefix = os.path.join(parts[-2], parts[-1])\n result[os.path.basename(filename)].append(include_prefix)\n return result", "def images_to_filepaths(images: kapture.RecordsCamera, kapture_dirpath: str) -> Dict[str, str]:\n return records_to_filepaths(images, kapture_dirpath)", "def build_groupings(idir: str) -> dict:\n bkg_group = {key: [ifile for ifile in glob(f'{idir}/*_{key}_*.root')] for key in bkgs}\n pw_group = {key: [ifile for ifile in glob(f'{idir}/{key}*.root')] for key in powhegs}\n wh_pw_group = [ifile for name in wh_powhegs for ifile in glob(f'{idir}/{name}*.root')]\n ungrouped = [ifile for ifile in glob(f'{idir}/*.root') if 'madgraph' in ifile or 'JHU' in ifile]\n\n group = {}\n for key, files in bkg_group.items():\n if len(files) > 0:\n group[key] = files\n\n for key, files in pw_group.items():\n if len(files) > 0:\n group[key] = files\n\n for ifile in ungrouped:\n name = ifile.split('/')[-1].replace('.root', '')\n name = name.split('_SYST')[0].replace('-', '_')\n name = name.replace('_ggH125', '').replace('_VBF125', '').replace('_WH125', '').replace('_ZH125', '')\n group[name] = [ifile]\n\n if len(wh_pw_group) > 0:\n group['wh125_powheg'] = wh_pw_group\n\n return group", "def by_fileid(response):\n return {\n record['fileid']: record for record in response\n }", "def _get_field_path_grp(self, run_idx, traj_idx, field_path):\n\n # check if it is compound\n if '/' in field_path:\n # split it\n grp_name, field_name = field_path.split('/')\n # get the hdf5 group\n grp = self.h5['{}/{}/{}/{}/{}'.format(RUNS, run_idx, TRAJECTORIES, traj_idx, grp_name)]\n # its simple so just return the root group and the original path\n else:\n grp = self.h5\n field_name = field_path\n\n return grp, field_name", "def read_filtered_directory_content(dirpath, *filters):\n def filter_directory_files(dirpath, *filters):\n return it.chain.from_iterable(glob.iglob(dirpath + '/' + filter)\n for filter in filters)\n\n content_dict = {}\n for filename in filter_directory_files(dirpath, *filters):\n content = \"\"\n with open(os.path.join(filename), 'rb') as obj:\n content = obj.read()\n try:\n # If the filter specified binary files then\n # these will need to be base64 encoded so that\n # they can be transferred over RPC and stored in DB\n content.decode('utf-8')\n except UnicodeError:\n content = base64.encode_as_text(content)\n content_dict['base64_encoded_files'] = \\\n content_dict.get(\"base64_encoded_files\", []) + [filename]\n\n content_dict[filename] = content\n return content_dict", "def dicom_headers(acqpath):\n log.info('anonymizer.py dicom_headers {0}'.format(acqpath))\n\n subj_path = path(acqpath)\n log.info('Anonymizing DICOM files in folder {0}'.format(subj_path))\n\n if subj_path.isfile():\n anonymize_dicom_file(subj_path)\n else:\n for ext in dicom_file_extensions:\n file_lst = subj_path.glob('*' + ext)\n for dcm_file in file_lst:\n try:\n anonymize_dicom_file(dcm_file)\n except Exception as e:\n log.error('Could not anonymize file ' + dcm_file)\n return -1\n return 0", "def map_files(key):\n \n datadir=os.path.join(os.path.dirname(__file__),'ncnr_sample_data')\n filedict={'empty_1m':os.path.join(datadir,'SILIC001.SA3_SRK_S101'),\n 'empty_4m':os.path.join(datadir,'SILIC002.SA3_SRK_S102'),\n 'empty_cell_1m':os.path.join(datadir,'SILIC003.SA3_SRK_S103'),\n 'blocked_1m':os.path.join(datadir,'SILIC004.SA3_SRK_S104'),\n 'trans_empty_cell_4m':os.path.join(datadir,'SILIC005.SA3_SRK_S105'),\n 'trans_sample_4m':os.path.join(datadir,'SILIC006.SA3_SRK_S106'),\n 'blocked_4m':os.path.join(datadir,'SILIC007.SA3_SRK_S107'),\n 'empty_cell_4m':os.path.join(datadir,'SILIC008.SA3_SRK_S108'),\n 'sample_1m':os.path.join(datadir,'SILIC009.SA3_SRK_S109'),\n 'sample_4m':os.path.join(datadir,'SILIC010.SA3_SRK_S110'),\n 'mask':os.path.join(datadir,'DEFAULT.MASK'),\n 'div':os.path.join(datadir,'PLEX_2NOV2007_NG3.DIV'),\n }\n return filedict[key]", "def get_files(metadata_dir, images_dir, image_format, metadata_format):\n all_metadata_files = [x for x in set(os.listdir(metadata_dir)) if x.endswith(metadata_format)]\n all_image_files = [x for x in set(os.listdir(images_dir)) if x.endswith(image_format)]\n images_and_metadata = {}\n for metadata, image in itertools.product(all_metadata_files, all_image_files):\n if image.split('.')[0] in metadata:\n images_and_metadata[metadata] = image\n return images_and_metadata", "def manage_headers(dem_header_file, header_paths):\n dem_header = parse_dem_header(dem_header_file)\n # find param files containing filename dates\n if len(header_paths) == 2:\n headers = [parse_epoch_header(hp) for hp in header_paths]\n combined_header = combine_headers(headers[0], headers[1], dem_header)\n else:\n # probably have DEM or incidence file\n combined_header = dem_header\n combined_header[ifc.DATA_TYPE] = ifc.DEM\n\n return combined_header", "def get_fileinfo(project_dir: Path, paths: List[str]) -> List[Dict[str, Optional[str]]]:\n data = []\n for path in paths:\n file_path = project_dir / path\n md5 = get_checksum(file_path) if file_path.exists() else None\n data.append({\"path\": path, \"md5\": md5})\n return data", "def groupby(self, extractor, field):\r\n # First we only want to consider values of field which are contained\r\n # in both extractors\r\n subset = self.get_field(field)\r\n other_subset = extractor.get_field(field)\r\n intersection = list(subset.intersection(other_subset))\r\n\r\n # Next we will group the datasets in each extractor together by common\r\n # field values\r\n kwargs = {field: intersection}\r\n results = self.query(**kwargs)\r\n\r\n groups = []\r\n for meta in results:\r\n val = self._match_filter(meta, field)\r\n kwargs.update({field: val})\r\n match = extractor.query(**kwargs)\r\n groups.append((meta, match))\r\n\r\n return groups", "def combine_files(self, patient_dir: Path, channels: str, value: str):\n files = {'gray': medim(patient_dir / f\"{str(patient_dir)[-11:]}_{channels}_{value}.mhd\"),\n 'gt': medim(patient_dir / f\"{str(patient_dir)[-11:]}_{channels}_{value}_gt.mhd\")}\n\n return files", "def list_data_files(data_path=\"./data/*.csv\"):\n dict_files = {}\n files = glob.glob(data_path)\n for file in files:\n freq = file.split(\".\")[1].split(\"_\")[1]\n if freq in list(dict_files.keys()):\n dict_files[freq].append(file)\n else:\n dict_files[freq] = [file]\n return dict_files", "def get_stats(filtered_paths : dict) -> dict:\n stats_dict = defaultdict(dict)\n for mag in filtered_paths.keys():\n for spec,paths in filtered_paths[mag].items():\n counter = Counter()\n for path in paths:\n img = cv2.imread(path, -1)\n count = Counter(list(img.ravel()))\n counter += count\n stats_dict[mag][spec] = counter\n print(mag)\n return stats_dict", "def prepare_keys_davis(folder_path):\n print('Reading image path list ...')\n img_path_list = sorted(list(scandir(folder_path, suffix='jpg', recursive=True)))\n keys = [v.split('.jpg')[0] for v in img_path_list] # example: 000/00000000\n\n return img_path_list, keys", "def writeDrizKeywords(hdr, imgnum, drizdict):\n _keyprefix = 'D%03d' % imgnum\n\n for key in drizdict:\n val = drizdict[key]['value']\n if val is None: val = \"\"\n comment = drizdict[key]['comment']\n if comment is None: comment = \"\"\n hdr[_keyprefix + key] = (val, drizdict[key]['comment'])", "def FindLogFiles(base_dir):\n logcat_filter = re.compile(r'^logcat_(\\S+)_(\\d+)$')\n # list of tuples (<device_id>, <seq num>, <full file path>)\n filtered_list = []\n for cur_file in os.listdir(base_dir):\n matcher = logcat_filter.match(cur_file)\n if matcher:\n filtered_list += [(matcher.group(1), int(matcher.group(2)),\n os.path.join(base_dir, cur_file))]\n filtered_list.sort()\n file_map = {}\n for device_id, _, cur_file in filtered_list:\n if device_id not in file_map:\n file_map[device_id] = []\n\n file_map[device_id] += [cur_file]\n return file_map", "def group_fields(r, group_name, fields):\n g = {}\n for f in fields.keys():\n g[fields[f]] = r.pop(f, None)\n r[group_name] = g\n return r" ]
[ "0.5577567", "0.55713826", "0.55147266", "0.533641", "0.5316674", "0.52081", "0.51825505", "0.51507807", "0.51382357", "0.51257515", "0.5118948", "0.5054843", "0.5034878", "0.50288385", "0.5027249", "0.5012812", "0.5007348", "0.49748573", "0.4972229", "0.49720487", "0.4963073", "0.495323", "0.49476925", "0.49439517", "0.49421495", "0.49337763", "0.4933235", "0.48993084", "0.4878129", "0.4859096" ]
0.79826343
0
Decompress all .dcm files recursively found in DICOM_DIR. This uses 'gdcmconv raw'. It works when 'dcm2nii' shows the `Unsupported Transfer Syntax` error. This error is usually caused by lack of JPEG2000 support in dcm2nii compilation.
def decompress(input_dir, dcm_pattern='*.dcm'): dcmfiles = sorted(recursive_glob(input_dir, dcm_pattern)) for dcm in dcmfiles: cmd = 'gdcmconv --raw -i "{0}" -o "{0}"'.format(dcm) log.debug('Calling {}.'.format(cmd)) subprocess.check_call(cmd, shell=True)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def dicom_to_nrrd(self, dicom_root_dir, nrrd_files_dir):\n TEMP_FILE = '/Users/chunwei/Downloads/_TEMP'\n SYSTEM_COMMAND = 'gdcmconv -w {0} {1}'\n\n for i, subject_folder in enumerate(glob.glob(dicom_root_dir + '/*')):\n nrrd_file = nrrd_files_dir + '/'\\\n + re.search(self.KEY_WORD_FLODER, subject_folder).group()\\\n + '_%02d.nrrd' % (i + 1)\n print 'Processing ' + nrrd_file\n\n if not os.path.exists(nrrd_files_dir):\n os.makedirs(nrrd_files_dir)\n\n data_3d = None\n\n dicom_files = glob.glob(subject_folder + '/*')\n for j, dicom_file in enumerate(dicom_files):\n # prompt\n ratio = 100 * float(j)/float(len(dicom_files))\n sys.stdout.write('\\r%d%%' % ratio)\n sys.stdout.flush()\n\n # uncompress the dicom image\n command = SYSTEM_COMMAND.format(dicom_file, TEMP_FILE)\n call(command.split(), shell=False)\n\n # concatenate dicom image layer by layer\n ds = dicom.read_file(TEMP_FILE)\n data = ds.pixel_array\n data_3d = self.concatenate_layers(data_3d, data) # bottom up\n\n # get nrrd options\n options = self.load_dicom_options(TEMP_FILE, len(dicom_file))\n\n # transpose the data\n data_3d = numpy.swapaxes(data_3d, 0, 1)\n data_3d = data_3d[:, :, ::-1]\n\n # write the stack files in nrrd format\n nrrd.write(nrrd_file, data_3d, options)\n\n print", "def scanDir(dcmdir):\n\n if not enabled():\n raise RuntimeError('dcm2niix is not available or is too old')\n\n dcmdir = op.abspath(dcmdir)\n cmd = f'{dcm2niix()} -b o -ba n -f %s -o . \"{dcmdir}\"'\n series = []\n\n with tempdir.tempdir() as td:\n\n with open(os.devnull, 'wb') as devnull:\n sp.call(shlex.split(cmd), stdout=devnull, stderr=devnull)\n\n files = glob.glob(op.join(td, '*.json'))\n\n if len(files) == 0:\n return []\n\n for fn in files:\n with open(fn, 'rt') as f:\n meta = json.load(f)\n meta['DicomDir'] = dcmdir\n # SeriesDescription is not\n # guaranteed to be present\n if 'SeriesDescription' not in meta:\n meta['SeriesDescription'] = meta['SeriesNumber']\n series.append(meta)\n\n # sort by series number\n def key(s):\n return s.get('SeriesNumber', sys.maxsize)\n\n series = list(sorted(series, key=key))\n\n return series", "def convert_directory(dicom_directory, output_folder, compression=True, reorient=True):\n # sort dicom files by series uid\n dicom_series = {}\n for root, _, files in os.walk(dicom_directory):\n for dicom_file in files:\n file_path = os.path.join(root, dicom_file)\n # noinspection PyBroadException\n try:\n if common.is_dicom_file(file_path):\n # read the dicom as fast as possible\n # (max length for SeriesInstanceUID is 64 so defer_size 100 should be ok)\n\n dicom_headers = pydicom.read_file(file_path,\n defer_size=\"1 KB\",\n stop_before_pixels=False,\n force=dicom2nifti.settings.pydicom_read_force)\n if not _is_valid_imaging_dicom(dicom_headers):\n logger.info(\"Skipping: %s\" % file_path)\n continue\n logger.info(\"Organizing: %s\" % file_path)\n if dicom_headers.SeriesInstanceUID not in dicom_series:\n dicom_series[dicom_headers.SeriesInstanceUID] = []\n dicom_series[dicom_headers.SeriesInstanceUID].append(dicom_headers)\n except: # Explicitly capturing all errors here to be able to continue processing all the rest\n logger.warning(\"Unable to read: %s\" % file_path)\n traceback.print_exc()\n\n # start converting one by one\n for series_id, dicom_input in dicom_series.items():\n base_filename = \"\"\n # noinspection PyBroadException\n try:\n # construct the filename for the nifti\n base_filename = \"\"\n if 'SeriesNumber' in dicom_input[0]:\n base_filename = _remove_accents('%s' % dicom_input[0].SeriesNumber)\n if 'SeriesDescription' in dicom_input[0]:\n base_filename = _remove_accents('%s_%s' % (base_filename,\n dicom_input[0].SeriesDescription))\n elif 'SequenceName' in dicom_input[0]:\n base_filename = _remove_accents('%s_%s' % (base_filename,\n dicom_input[0].SequenceName))\n elif 'ProtocolName' in dicom_input[0]:\n base_filename = _remove_accents('%s_%s' % (base_filename,\n dicom_input[0].ProtocolName))\n else:\n base_filename = _remove_accents(dicom_input[0].SeriesInstanceUID)\n logger.info('--------------------------------------------')\n logger.info('Start converting %s' % base_filename)\n if compression:\n nifti_file = os.path.join(output_folder, base_filename + '.nii.gz')\n else:\n nifti_file = os.path.join(output_folder, base_filename + '.nii')\n convert_dicom.dicom_array_to_nifti(dicom_input, nifti_file, reorient)\n gc.collect()\n except: # Explicitly capturing app exceptions here to be able to continue processing\n logger.info(\"Unable to convert: %s\" % base_filename)\n traceback.print_exc()", "def convert_to_dicom(file_name):\n\tpath = get_testdata_file(\"CT_small.dcm\")\n\tds = pydicom.dcmread(path)\n\timg = Image.open(file_name+\".bmp\")\n\tnpa = np.asarray(img)\n\tds.PixelData = img.tobytes()\n\tname = update_destination_file_name(file_name)\n\tds.save_as(name+'.dcm')\n\tprint(\"DONE\\t \"+name+\".dcm\")", "def img2dcm_from_bmp(file_name):\n\tname = update_destination_file_name(file_name)\n\tos.system('img2dcm -i BMP '+file_name+'.bmp '+name+'.dcm ')\t\n\tprint(\"DONE\\t \"+name+\".dcm\")", "def decompress_files(self):\n crx2rnx_path = os.path.join(ROOT_DIR, 'CRX2RNX')\n if not os.path.isfile(crx2rnx_path):\n raise OSError('Cannot find CRX2RNX binary in project directory!')\n\n if glob('{}/*'.format(self.__directory)):\n subprocess.run([\"gunzip\", \"-dr\", self.__directory])\n # convert Hatanaka compressed RINEX to standard RINEX\n for f in glob('{}/*.??d'.format(self.__directory)):\n subprocess.run([crx2rnx_path, f])\n else:\n raise RuntimeError(\n 'Could not decompress. No files were downloaded from FTP server.')", "def process(mydir, valid):\n os.chdir(f\"/mesonet/data/madis/{mydir}\")\n # Collapse files down.\n for hr in range(0, 24):\n i = 300\n found = False\n while i >= 0:\n fn = f\"{valid:%Y%m%d}_{hr:02.0f}00_{i}.nc\"\n if os.path.isfile(fn):\n if not found:\n found = True\n cmd = [\"cp\", \"-f\", fn, f\"{valid:%Y%m%d}_{hr:02.0f}00.nc\"]\n subprocess.call(cmd)\n os.unlink(fn)\n i -= 1\n # gzip\n files = glob.glob(f\"{valid:%Y%m%d}*nc\")\n if not files:\n LOG.info(\"No files found for %s\", mydir)\n return\n subprocess.call([\"gzip\", *files])\n files = glob.glob(f\"{valid:%Y%m%d}*nc.gz\")\n if not files:\n LOG.info(\"No gzip files found for %s\", mydir)\n return\n # rsync\n cmd = [\n \"rsync\",\n \"-a\",\n \"--remove-source-files\",\n \"--rsync-path\",\n f\"mkdir -p /stage/iemoffline/madis/{mydir}/{valid:%Y} && rsync\",\n *files,\n f\"{HOST}:/stage/iemoffline/madis/{mydir}/{valid:%Y}/\",\n ]\n subprocess.call(cmd)", "def dcm2niix() -> str:\n fsldir = fslplatform.platform.fsldir\n candidates = [\n shutil.which('dcm2niix')\n ]\n\n if fsldir is not None:\n candidates.insert(0, op.join(fsldir, 'bin', 'dcm2niix'))\n\n for c in candidates:\n if c is not None and op.exists(c):\n return c\n\n return 'dcm2niix'", "def dicom_to_nii(acqpath):\n log.info('anonymizer.py dicom_to_nii {0}'.format(acqpath))\n\n subj_path = get_abspath(acqpath)\n\n if subj_path.isfile():\n try:\n subprocess.call('dcm2nii {0}'.format(subj_path), shell=True)\n except Exception as e:\n log.error('Error calling dcm2nii on {0}'.format(subj_path))\n return -1\n\n else:\n for ext in dicom_file_extensions:\n regex = '*' + ext\n if subj_path.glob(regex):\n try:\n subprocess.call('dcm2nii {0}'.format(subj_path.joinpath(regex)), shell=True)\n except Exception as e:\n log.error('Error calling dcm2nii on {0}'.format(subj_path.joinpath(regex)))\n return -1\n\n return 0", "def read_img(img_path):\n img_list=[]\n print('image loading...')\n for _,_,files in os.walk(img_path):\n for f in files:\n if f.find('.dcm')>=0:\n tmp_img=dicom.dcmread(os.path.join(img_path,f))\n tmp_img=tmp_img.pixel_array#[0::2,0::2]\n img_list.append(tmp_img)\n img_data=np.array(img_list)\n print('done')\n return img_data", "def extract_nac_pet(dicom_folder):\n from glob import glob\n import os\n import shutil\n import re\n from nipype.interfaces.dcm2nii import Dcm2nii\n\n def atoi(text):\n return int(text) if text.isdigit() else text\n\n def natural_keys(text):\n return [atoi(c) for c in re.split('(\\d+)', text)]\n\n files = glob(os.path.join(os.path.abspath(dicom_folder), '*'))\n sorted_files = sorted(files, key=natural_keys)\n nac_pet_files = sorted_files[-127:]\n for f in nac_pet_files:\n shutil.copy(f, os.getcwd())\n dcm2nii = Dcm2nii()\n dcm2nii.inputs.source_dir = os.getcwd()\n nii_outputs = dcm2nii.run().outputs.converted_files\n print (nii_outputs)\n return nii_outputs[0]", "def convert_dataset(src_dir, dest_dir):\n subdirs = get_subdirs(src_dir)\n detector = dlib.simple_object_detector(MODEL_PATH)\n for img_dir in tqdm(subdirs):\n\tprint(img_dir)\n jpegs = get_img_paths_in_dir(img_dir)\n target_dir = dest_dir + img_dir.split('/')[-1]\n if not os.path.exists(target_dir):\n os.makedirs(target_dir)\n for src_path in jpegs:\n target_path = target_dir + '/' + src_path.split('/')[-1]\n img = io.imread(src_path)\n dets = detector(img)\n bounding_boxes = get_bounding_boxes(dets)\n if bounding_boxes:\n square_box = find_square_box(bounding_boxes[0])\n if is_valid(square_box, img):\n box = bounding_boxes[0]\n square_box = find_square_box(box)\n cropped_img = crop_frame(img, square_box)\n PIL_img = PIL.Image.fromarray(cropped_img)\n resized_img = PIL_img.resize((54,54), PIL.Image.BILINEAR)\n\t\t resized_img.save(target_path)\n print(target_path)\n # grey_img = resized_img.convert('L')\n # grey_img.save(target_path)", "def download_LIDC(output_folder, debug=False):\n\n # Creating config file with path to dataset\n _, config_file = create_config(output_folder, debug, \"fed_lidc_idri\")\n\n # Get patient X study\n patientXstudy = pd.read_json(\n client.get_patient_study(collection=\"LIDC-IDRI\").read().decode(\"utf-8\")\n )\n\n # Get study X series\n series = pd.read_json(\n client.get_series(modality=\"CT\", collection=\"LIDC-IDRI\").read().decode(\"utf-8\")\n )\n\n # Join both of them\n patientXseries = patientXstudy.merge(series).iloc[:]\n\n # there are some images with missing slices. We remove them\n # for reference their loc: 385, 471, 890, 129, 110, 245, 80, 618, 524\n bad_patientID = [\n \"LIDC-IDRI-0418\",\n \"LIDC-IDRI-0514\",\n \"LIDC-IDRI-0672\",\n \"LIDC-IDRI-0146\",\n \"LIDC-IDRI-0123\",\n \"LIDC-IDRI-0267\",\n \"LIDC-IDRI-0085\",\n \"LIDC-IDRI-0979\",\n \"LIDC-IDRI-0572\",\n ]\n patientXseries = patientXseries[~patientXseries[\"PatientID\"].isin(bad_patientID)]\n\n if debug:\n patientXseries = patientXseries[:10]\n\n # Download associated DICOMs\n pool = multiprocessing.Pool(processes=n_cpus)\n downloaded_paths = pool.starmap(\n download_dicom_series,\n zip(patientXseries.SeriesInstanceUID.tolist(), itertools.repeat(output_folder)),\n )\n\n # Download XML annotations\n annotations_path = download_zip_from_url(ANNOTATION_URL, output_folder)\n\n # Unzip everything and remove archives\n zipped_folders = [\n str(p) for p in Path(output_folder).glob(\"./*/\") if str(p).endswith(\".zip\")\n ]\n\n # Check zip integrity, and download corrupted files again\n for zipped_f in zipped_folders:\n try:\n while zipfile.ZipFile(zipped_f).testzip() is not None:\n os.remove(zipped_f)\n download_dicom_series(os.path.splitext(zipped_f)[0], output_folder)\n except zipfile.BadZipFile:\n os.remove(zipped_f)\n download_dicom_series(os.path.splitext(zipped_f)[0], output_folder)\n print(f\"Bad zip file: {zipped_f}\")\n\n for zipped_f in zipped_folders:\n with zipfile.ZipFile(zipped_f, \"r\") as zip_ref:\n zip_file_name = re.sub(\".zip\", \"\", zipped_f)\n # extract only if it does not exist or it is empty\n if not os.path.isdir(zip_file_name) or len(os.listdir(zip_file_name)) == 0:\n os.makedirs(re.sub(\".zip\", \"\", zipped_f), exist_ok=True)\n zip_ref.extractall(zip_file_name)\n os.remove(zipped_f)\n\n # For each patient we record the location of its DICOM\n patientXseries[\"extraction_location\"] = downloaded_paths\n\n # We tie back annotations to the original DICOMS\n xmlfiles = glob.glob(os.path.join(annotations_path, \"tcia-lidc-xml\", \"*\", \"*.xml\"))\n df = pd.DataFrame()\n df[\"annotation_file\"] = xmlfiles\n # We initialize a dask dataframe to speed up computations\n ddf = dd.from_pandas(df, npartitions=8)\n df[\"SeriesInstanceUID\"] = ddf.map_partitions(\n lambda d: d[\"annotation_file\"].apply(get_SeriesUID_from_xml)\n ).compute(scheduler=\"processes\")\n df = df[df.SeriesInstanceUID != \"not found\"]\n df = df[df.SeriesInstanceUID != \"notfound\"]\n # there are several xml files which have the same seriesInstanceUID\n # but the same content, therefore here df has len of 1026.\n # Next, we are removing the duplicates. The correct number of files will be now 1018\n df = df.drop_duplicates(subset=[\"SeriesInstanceUID\"], keep=\"first\")\n patientXseries = df.merge(patientXseries, on=\"SeriesInstanceUID\")\n # Update yaml file\n write_value_in_config(config_file, \"download_complete\", True)\n return patientXseries", "def dicom_load():\n # Identify folders with EPI data\n dirs = [i for i in os.listdir(dcm_dir) if os.path.isdir(os.path.join(dcm_dir, i))]\n d_cnt = 0\n for d in dirs:\n dcm_file = os.path.join(dcm_dir,d,os.listdir(os.path.join(dcm_dir,d))[0])\n try:\n dcm_data = pydicom.dcmread(dcm_file)\n except:\n pass\n else:\n # If data is EPI then get start time, etc\n if 'EPI' in dcm_data.ImageType:\n dcm_dict[d_cnt] = {}\n dcm_dict[d_cnt]['dcm_file'] = dcm_file\n dcm_dict[d_cnt]['task_name'] = dcm_data.SeriesDescription\n dcm_dict[d_cnt]['task_name'] = dcm_dict[d_cnt]['task_name'].replace('_','-')\n date = dcm_data.SeriesDate\n start = dcm_data.SeriesTime\n start_time = '%s-%s-%s %s:%s:%s'%(date[0:4],date[4:6],date[6:],start[0:2],start[2:4],start[4:])\n dcm_dict[d_cnt]['start_time'] = datetime.fromisoformat(start_time)\n dcm_dict[d_cnt]['run_length'] = dcm_data[0x0019,0x105a].value/1000\n dcm_dict[d_cnt]['end_time'] = dcm_dict[d_cnt]['start_time'] + timedelta(milliseconds=dcm_dict[d_cnt]['run_length'])\n d_cnt = d_cnt+1", "def get_dems(src_dir):\n \n method_dir, pair_dir = os.path.split(src_dir)\n _, method = os.path.split(method_dir)\n \n method_patterns = {'pairs': ('*_dem.tif',),\n 'pc_align_reg': ('*_dem.tif', '*DEM.tif'),\n 'icesat_reg': ('*dem_reg.tif',),\n 'nuth_reg': ('*dem.tif', '*dem_trans.tif')}\n \n # Get all DEMs in srcdir (should be two)\n dems = []\n for pattern in method_patterns[method]:\n dems_pattern = os.path.join(src_dir, pattern)\n dems.extend(glob.glob(dems_pattern))\n \n return dems", "def decompose_images(data_dir, output_dir, save_individually, **kwargs):\n\n # parse parameters\n opt = TestOptions()\n opt.parse(kwargs)\n # print(kwargs)\n\n # torch setting\n pytorch_settings.set_(with_random=False, determine=True)\n\n # visualize\n V.create_a_visualizer(opt)\n\n # NIID-Net Manager\n model = create_model(opt)\n model.switch_to_eval()\n\n # List all image files in the directory (exclude subdirectory)\n image_file_list = list_files(data_dir, ['jpg', 'jpeg', 'png', 'tif', 'JPG'])\n print('Total image in the directory %s: %d' % (data_dir, len(image_file_list)))\n\n # Decompose images\n for file_name in image_file_list:\n # Read image\n img_path = os.path.join(data_dir, file_name)\n o_img = Image.open(img_path)\n o_img = o_img.convert(\"RGB\")\n\n # Resize input image\n # input_img = resize_image(o_img)\n input_img = o_img\n\n # Predict\n input_img = TF.to_tensor(input_img).unsqueeze(0)\n pred_N, pred_R, pred_L, pred_S, rendered_img = model.predict({'input_srgb': input_img}, normal=True, IID=True)\n\n # Save results\n idx = 0\n pred_imgs = {\n 'pred_N': pred_N[idx].cpu(),\n 'pred_R': pred_R[idx].cpu(),\n 'pred_L': pred_L[idx].cpu(),\n 'pred_S': pred_S[idx].cpu(),\n 'rendered_img': rendered_img[idx].cpu(),\n 'input_srgb': input_img[idx],\n }\n f = '%s_decomposed' % (file_name[:file_name.rfind('.')])\n image_util.save_intrinsic_images(output_dir, pred_imgs, label=f, individual=save_individually)\n torch.save(pred_imgs, os.path.join(output_dir, f+'.pth.tar'))\n print('Decompose %s successfully!' % file_name)", "def download_cif(output_dir: str, query: dict = None, config: typing.Dict[str, typing.Any] = None) -> None:\n\n # Initialise inputs.\n if config is None:\n config = {}\n if output_dir is None:\n output_dir = 'pullcif/data'\n\n # If query is empty, then search all material ids.\n if query is None:\n query = search_all_id()\n\n # To designate output path, if it isn't a directory, then create it.\n _output_dir = Path(output_dir)\n if not _output_dir.is_dir():\n _output_dir.mkdir(parents=True)\n\n # To loop all query ids.\n for id in query:\n url = 'https://materialsproject.org/materials/' + id + '/cif?type=symmetrized'\n\n # To get cif files.\n res = requests.get(url)\n\n # To query material reduced formulae.\n material_property = MPR(get_api_key()).query(criteria=id, properties=['material_id', 'pretty_formula'])\n formula = material_property[0]['pretty_formula']\n material_id = material_property[0]['material_id']\n\n # To write and save files in designated path.\n fileName = '\\\\' + material_id + '_' + formula + '.cif'\n cif_file = open(output_dir + fileName, 'w')\n cif_file.write(res.text)\n cif_file.close()", "def uncompress(file_path, dname):\n\n supported_types = [ \"Shapefile\", \"CSV\" ]\n\n if not os.path.isdir(dname):\n with zipfile.ZipFile(file_path, 'r') as zout:\n zout.extractall(dname)\n\n filepaths = {}\n for root, dirs, files in os.walk(dname):\n for f in files:\n path = os.path.join(dname, root, f)\n filepaths[path] = magic.from_file(path)\n\n for f, t in filepaths.items():\n for supported_type in supported_types:\n if supported_type in t:\n return f, supported_type", "def convert(src: str, tag: str, size: int = 0, unzip=unzip):\n t1 = glob.glob(f'{src}/*GG/*/*t1.nii.gz')\n t2 = glob.glob(f'{src}/*GG/*/*t2.nii.gz')\n flair = glob.glob(f'{src}/*GG/*/*flair.nii.gz')\n t1ce = glob.glob(f'{src}/*GG/*/*t1ce.nii.gz')\n seg = glob.glob(f'{src}/*GG/*/*seg.nii.gz') # Ground Truth\n pat = re.compile('.*_(\\w*)\\.nii\\.gz')\n\n data_paths = [{\n pat.findall(item)[0]: item\n for item in items\n }\n for items in list(zip(t1, t2, t1ce, flair, seg))]\n\n if not size:\n size = len(data_paths)\n total = len(data_paths[:size])\n step = 25 / total\n\n for i, imgs in enumerate(data_paths[:size]):\n try:\n [unzip(imgs[m], tag) for m in ['t1', 't2', 't1ce', 'flair', 'seg']]\n print('\\r\\n' + f'Progress: '\n f\"[{'=' * int((i + 1) * step) + ' ' * (24 - int((i + 1) * step))}]\"\n f\"({math.ceil((i + 1) * 100 / (total))} %)\" + '\\r\\n',\n end=''\n )\n except Exception as e:\n print(f'Something went wrong with {imgs[\"t1\"]}, skipping...\\n Exception:\\n{str(e)}')\n continue", "def load_dcm_series(files: List[str]):\n volume = []\n files.sort(key=get_slice_location)\n for file in files:\n dcm = pydicom.dcmread(file, force=True)\n if not dcm.file_meta.get('TransferSyntaxUID'):\n dcm.file_meta.TransferSyntaxUID = pydicom.uid.ImplicitVRLittleEndian\n volume.append(dcm.pixel_array)\n return files, np.stack(volume)", "def load_all_dicom_images(self, verbose=True):\n if verbose: print(\"Loading dicom files ... This may take a moment.\")\n\n path = self.get_path_to_dicom_files()\n fnames = [fname for fname in os.listdir(path)\n if fname.endswith('.dcm') and not fname.startswith(\".\")]\n images = []\n for fname in fnames:\n image = dicom.dcmread(os.path.join(path,fname))\n\n seid = str(image.SeriesInstanceUID).strip()\n stid = str(image.StudyInstanceUID).strip()\n\n if seid == self.series_instance_uid and\\\n stid == self.study_instance_uid:\n images.append(image)\n\n # ##############################################\n # Clean multiple z scans.\n #\n # Some scans contain multiple slices with the same `z` coordinate \n # from the `ImagePositionPatient` tag.\n # The arbitrary choice to take the slice with lesser \n # `InstanceNumber` tag is made.\n # This takes some work to accomplish...\n zs = [float(img.ImagePositionPatient[-1]) for img in images]\n inums = [float(img.InstanceNumber) for img in images]\n inds = list(range(len(zs)))\n while np.unique(zs).shape[0] != len(inds):\n for i in inds:\n for j in inds:\n if i!=j and zs[i] == zs[j]:\n k = i if inums[i] > inums[j] else j\n inds.pop(inds.index(k))\n\n # Prune the duplicates found in the loops above.\n zs = [zs[i] for i in range(len(zs)) if i in inds]\n images = [images[i] for i in range(len(images)) if i in inds]\n\n # Sort everything by (now unique) ImagePositionPatient z coordinate.\n sort_inds = np.argsort(zs)\n images = [images[s] for s in sort_inds]\n # End multiple z clean.\n # ##############################################\n\n return images", "def extract_microfossils_in_dir(source_dir, destination_dir,\n crop_dims, min_microfossil_size, clean_particles):\n if os.path.isdir(source_dir) is False:\n raise Exception(\"Not a valid source path\")\n if os.path.isdir(destination_dir) is False:\n os.makedirs(destination_dir)\n\n print(\"Currently processing images in dir: {}\".format(source_dir))\n image_extensions = [\".tif\", \".TIF\", \".png\", \".PNG\"]\n sub_dirs = []\n images_in_dir = []\n for file in os.listdir(source_dir):\n if os.path.isdir(os.path.join(source_dir, file)) and os.path.join(source_dir, file) != destination_dir:\n sub_dirs.append(file)\n # If it's an image with the given extensions\n elif reduce((lambda x, y: x or y), [file.endswith(ext) for ext in image_extensions]):\n images_in_dir.append(file)\n\n # Now process the images\n processed_images = 0\n generated_crops = 0\n for image_path in images_in_dir:\n full_image_path = os.path.join(source_dir, image_path)\n grayscale_image = cv2.imread(full_image_path, cv2.IMREAD_GRAYSCALE)\n if grayscale_image is None:\n print(\"Couldn't read image and was skipped: {}\".format(full_image_path))\n continue\n\n unfiltered_crops, filtered_crops = extract_microfossils(grayscale_image, min_microfossil_size,\n crop_dims, clean_particles)\n processed_images += 1\n for idx, crop in enumerate(unfiltered_crops):\n crop_file_name = \"{}_crop_{}_unfiltered.png\".format(os.path.splitext(image_path)[0], idx)\n cv2.imwrite(os.path.join(destination_dir, crop_file_name), crop)\n for idx, crop in enumerate(filtered_crops):\n crop_file_name = \"{}_crop_{}_filtered.png\".format(os.path.splitext(image_path)[0], idx)\n cv2.imwrite(os.path.join(destination_dir, crop_file_name), crop)\n\n generated_crops += len(unfiltered_crops) + len(filtered_crops)\n\n # Recursively apply to all subdirs\n for subdir in sub_dirs:\n source_subdir = os.path.join(source_dir, subdir)\n destination_subdir = os.path.join(destination_dir, subdir)\n sub_processed_images, sub_generated_crops = extract_microfossils_in_dir(source_subdir, destination_subdir,\n crop_dims, min_microfossil_size, clean_particles)\n processed_images += sub_processed_images\n generated_crops += sub_generated_crops\n\n return processed_images, generated_crops", "def __invokeCdx2Cml(self, inputFile, outputDir):\n tmpFs = None\n try:\n tmpFs = self.iceContext.fs.unzipToTempDirectory(inputFile)\n toDir = tmpFs.absPath()\n tmpFs.unzipToDirectory(inputFile, toDir)\n tmpFs.makeDirectory(\"raw\")\n tmpFs.makeDirectory(\"cml\")\n cmdPath = self.__getChemDrawCmd()\n self.iceContext.system.execute2(cmdPath, \"-INDIR\", toDir,\n \"-INSUFF\", '\"\"',\n \"-RAWDIR\", \"../raw\",\n \"-RAWSUFF\", \".xml\",\n \"-OUTDIR\", \"../cml\", printErr = False)\n _, name, _ = tmpFs.splitPathFileExt(inputFile)\n for file in tmpFs.listFiles(\"cml\"):\n srcFile = tmpFs.join(\"cml\", file)\n outFile = tmpFs.join(outputDir, file.replace(\"Object \", name + \"-\"))\n tmpFs.copy(srcFile, outFile)\n finally:\n if tmpFs != None:\n tmpFs.delete()", "def raw_clean(delete, invert, raw_dir, trash, raw_ext):\n raw_image_ext = f\".{raw_ext.upper()}\"\n\n # Basic user input check\n if not os.path.exists(raw_dir):\n print(f\"No '{raw_dir}' directory found!\")\n sys.exit(1)\n\n # Get list of images in different formats\n image_dir = os.getcwd()\n raw_dir = os.path.abspath(raw_dir)\n\n jpgs = set(\n [f.split('.')[0]\n for f in os.listdir(image_dir) if f.endswith(COMP_IMAGE_EXT)]\n )\n raws = set(\n [f.split('.')[0]\n for f in os.listdir(raw_dir) if f.endswith(raw_image_ext)]\n )\n\n # Find missing pairs\n paired = raws & jpgs\n jpgs_without_raw = jpgs - paired\n raws_without_jpg = raws - paired\n\n # Decide what set of files to process\n if not invert:\n images = raws_without_jpg\n workdir = raw_dir\n ext = raw_image_ext\n else:\n images = jpgs_without_raw\n workdir = image_dir\n ext = COMP_IMAGE_EXT\n\n # Process files\n for image in images:\n image_path = os.path.join(workdir, f\"{image}{ext}\")\n if not trash and not delete:\n print(f\"No pair found for '{image_path}' \")\n elif delete:\n print(f\"Deleting '{image_path}'...\")\n os.unlink(image_path)\n elif trash:\n print(f\"Trashing '{image_path}'...\")\n send2trash.send2trash(image_path)", "def convert(self):\n \n vrtlist = sorted(glob.glob(self.fullPath + '/*vrt'))\n splitAt = len(self.fullPath) + 1\n \n if len(vrtlist)!=0:\n for i in range(0,len(vrtlist)):\n prefix = str(vrtlist[i].split(\".vrt\")[0])\n prefix = prefix[:splitAt] + 'full' + prefix[splitAt:]\n ct = pymodis.convertmodis_gdal.convertModisGDAL(hdfname = vrtlist[i], \n prefix = prefix, subset = self.subset, res = self.resolution, \n outformat = self.outformat, wkt = self.projection, resampl = 'NEAREST_NEIGHBOR', vrt = True)\n ct.run()\n mosdel = glob.glob(self.fullPath + '/*mos.tif')\n for f in mosdel:\n os.remove(f)\n xmldel = glob.glob(self.fullPath + '/*mos.tif.xml') \n for f in xmldel:\n os.remove(f)\n vrtdel = glob.glob(self.fullPath + '/*.vrt')\n for f in vrtdel:\n os.remove(f)\n tifCount = len(glob.glob(self.fullPath + '/*.tif'))\n dataCount = self.subset.count('1')\n logger.log('SUCCESS', 'Conversion complete! The %d bands of %d mosaicked images were successfully converted to %d %s files.' % (dataCount, len(vrtlist), tifCount, str(self.outformat)))\n \n \n if len(vrtlist)==0: \n \n hdflist = sorted(glob.glob(self.fullPath + '/*.hdf'))\n for i in range(len(hdflist)):\n ms = pymodis.convertmodis_gdal.createMosaicGDAL(hdfnames = [hdflist[i]], subset = self.subset, outformat = 'GTiff')\n ms.run(str(hdflist[i].split('.h')[0]) + 'mos.tif')\n ms.write_vrt(output = str(hdflist[i].split('.h')[0]), separate = True)\n\n vrtlist = sorted(glob.glob(self.fullPath + '/*vrt'))\n splitAt = len(self.fullPath) + 1\n \n for i in range(0,len(vrtlist)):\n prefix = str(vrtlist[i].split(\".vrt\")[0])\n prefix = prefix[:splitAt] + 'full' + prefix[splitAt:]\n ct = pymodis.convertmodis_gdal.convertModisGDAL(hdfname = vrtlist[i], \n prefix = prefix, subset = self.subset, res = self.resolution, \n outformat = self.outformat, wkt = self.projection, resampl = 'NEAREST_NEIGHBOR', vrt = True)\n ct.run()\n \n mosdel = glob.glob(self.fullPath + '/*mos.tif')\n for f in mosdel:\n os.remove(f)\n xmldel = glob.glob(self.fullPath + '/*mos.tif.xml') \n for f in xmldel:\n os.remove(f)\n vrtdel = glob.glob(self.fullPath + '/*.vrt')\n for f in vrtdel:\n os.remove(f)\n tifCount = len(glob.glob(self.fullPath + '/full*.tif'))\n dataCount = self.subset.count('1')\n logger.log('SUCCESS', 'Conversion complete! The %d bands of %d HDF files were successfully converted to %d %s files.' % (dataCount, len(hdflist), tifCount, str(self.outformat)))", "def dicom2array(path, voi_lut=True, fix_monochrome=True):\n # Use the pydicom library to read the dicom file\n dicom = pydicom.read_file(path)\n\n # VOI LUT (if available by DICOM device) is used to\n # transform raw DICOM data to \"human-friendly\" view\n if voi_lut:\n data = apply_voi_lut(dicom.pixel_array, dicom)\n else:\n data = dicom.pixel_array\n\n # The XRAY may look inverted\n # - If we want to fix this we can\n if fix_monochrome and dicom.PhotometricInterpretation == \"MONOCHROME1\":\n data = np.amax(data) - data\n\n # Normalize the image array and return\n data = data - np.min(data)\n data = data / np.max(data)\n data *= 255\n return data.astype(np.uint8)", "def OpenDicomSerie(dirname=None):\n\tglobal volume, dim_x, dim_y, dim_z, spacing, origin, CT_open, filename_CT, dir_ini\n ct_swapY, ct_swapZ = False, False\n \n\tprint 'Opening DICOM serie ... '\n\n\t# Opening file\n\tif(dirname==None):\n\t\tfile_path = tkFileDialog.askopenfilename(initialdir = dir_ini, filetypes = [('DICOM files', '*.dcm')])\n\t\tfilelist = os.listdir(os.path.dirname(file_path))\n\telse:\n\t\tfilelist = os.listdir(dirname)\n\t\tfile_path = dirname + filelist[0]\n\n\tfilename_CT = file_path\n dir_ini = str(file_path.rsplit('/', 1)[0])+'/'\n\n\t# Getting dimensions\n\tds = pydicom.read_file(file_path)\n\tsp = ds.PixelSpacing\n\tds.file_meta.TransferSyntaxUID = pydicom.uid.ImplicitVRLittleEndian\n\tct_swapZ =(ds.ImageOrientationPatient[0:3] == [1, 0, 0])\n\tct_swapY =(ds.ImageOrientationPatient[3:6] == [0, 1, 0])\n\n dim_x = 0\n for f in filelist:\n if f.endswith(\".dcm\"): dim_x = dim_x + 1 \n\n\tdim_y, dim_z = np.shape(ds.pixel_array)[1], np.shape(ds.pixel_array)[0]\n \n\tvolume = np.zeros((dim_x, dim_y,dim_z))\n slicelocation = np.zeros(dim_x)\n\n\t# creating volume\n\tfor f,i in zip(filelist,range(dim_x)):\n\t\tif f.endswith(\".dcm\"):\n\t\t\tds = pydicom.read_file(os.path.dirname(file_path)+'/'+f)\n\t\t\tds.file_meta.transfersyntaxuid = pydicom.uid.ImplicitVRLittleEndian \n\t\t\tvolume[i,:,:] = ds.pixel_array\n\t\t\tif('slicelocation' in ds):\tslicelocation[i] = ds.SliceLocation\n\t\t\telse:\tslicelocation[i] = ds.ImagePositionPatient[2]\n \n\torder = np.argsort(slicelocation)\n slicelocation = slicelocation[order] # slicelocation is now sorted\n \n\tspacing = [float(slicelocation[1] - slicelocation[0]),float(sp[1]), float(sp[0])]\n\torigin = [float(slicelocation[0]),float(ds.ImagePositionPatient[1]),float(ds.ImagePositionPatient[0])]\n\tvolume = volume[order,:,:] # volume is now sorted\n\n\tif (\"RescaleSlope\" in ds):\tvolume = float(ds.RescaleSlope)*volume\n\tif (\"RescaleIntercept\" in ds):\tvolume = volume + float(ds.RescaleIntercept)\n\n\t# Dealing with image orientation\n print ' ct_swapY, ct_swapZ :', ct_swapY, ct_swapZ\n\tif(ct_swapY == True):\n volume = np.flip(volume,1) # flip volume, Y direction\n origin[1] = origin[1] + dim_y*spacing[1] \n if(ct_swapZ == True):\n volume = np.flip(volume,2) # flip volume, Z direction\n origin[2] = origin[2] + dim_z*spacing[2] \n if(ct_swapZ == True)and(ct_swapY == True): spacing[1], spacing[2] = spacing[2], spacing[1]\n\n\tSet_axes_lim_init()\n\tSet_scales()\n\tCT_open = True\n\tUpdate_all()\n\n\tprint(' file successfully opened!')", "def test_nonfree_dmca(self):\n image_files = self.extract_images(full_path('../samples/nonfree/dmca.pdf'))\n assert image_files[0].endswith('bmp')", "def batch_dicom_to_nrrd(self, dicom_root, nrrd_root):\n dicom_files_dirs = glob.glob(dicom_root + '/*')\n for dicom_subject in dicom_files_dirs:\n subject = re.search(self.KEY_WORD_FLODER, dicom_subject).group()\n nrrd_subject = nrrd_root + '/' + subject\n self.dicom_to_nrrd(dicom_subject, nrrd_subject)", "def _anonymize_files(dicom_directory_in, dicom_directory_out, fields_to_keep):\n\n # Make sure we have absolute paths\n dicom_directory_in = os.path.abspath(dicom_directory_in)\n dicom_directory_out = os.path.abspath(dicom_directory_out)\n\n # looping over all files\n for root, _, file_names in os.walk(dicom_directory_in):\n # New directory\n\n for file_name in file_names:\n # Create instance_UID\n fields_to_keep['SOPInstanceUID'] = pydicom.uid.generate_uid()\n\n dicom_file_in = os.path.join(root, file_name)\n current_dir = root[len(dicom_directory_in) + 1:]\n dicom_file_out = os.path.join(dicom_directory_out, current_dir, file_name)\n if common.is_dicom_file(dicom_file_in):\n logging.info(\"Processing \" + dicom_file_in)\n _anonymize_file(dicom_file_in, dicom_file_out, fields_to_keep)\n else:\n logging.info(\"Skipping \" + dicom_file_in + \", no dicom file\")" ]
[ "0.5886228", "0.56491315", "0.5584978", "0.54134685", "0.539382", "0.5364296", "0.5216882", "0.5209561", "0.51926136", "0.51270014", "0.50320065", "0.50242835", "0.49604785", "0.49584168", "0.49448156", "0.491124", "0.49083465", "0.49060026", "0.48979783", "0.48747438", "0.48700917", "0.48056448", "0.47933492", "0.4747993", "0.4747186", "0.4734388", "0.47150862", "0.4703241", "0.46951985", "0.46875167" ]
0.78430045
0
This function retrieve articles by article indexes and return found articles.
def retrieve_articles(article_indexes): articles = [] for index in article_indexes: filename, position = index.split('@') with open(filename, 'r', encoding='utf-8') as articles_file: articles_file.seek(int(position)) line = articles_file.readline() article = line while line[0] != '}': line = articles_file.readline() article += line articles.append(article) return articles
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def GetIndexArticles(self, article):\n index_articles = [a for a in self.site_config.GetFlattenedArticles() if \\\n isinstance(a, IndexArticle)]\n out = [i for i in index_articles if i.Match(article.type_name)]\n\n return out", "def articles ():\n\n offset = int (arg ('offset', '0', re_integer_arg))\n limit = clip (arg ('limit', str (MAX_RESULTS), re_integer_arg), 1, MAX_RESULTS)\n\n with current_app.config.dba.engine.begin () as conn:\n res = execute (conn, r\"\"\"\n SELECT no\n FROM article\n ORDER BY no\n LIMIT :limit\n OFFSET :offset\n \"\"\", { 'offset' : offset, 'limit' : limit })\n\n return make_articles_response (res, limit)", "def index_news_articles(self):\n # Get the RSS feed\n print('Fetching the RSS feed')\n item_list = rss_fetch.get_all_feed_urls(self.rss_url_file)\n # Index all the feed items into ES\n print('Going to index {0} news articles...'.format(len(item_list)))\n drop_count=0\n for item in item_list:\n try:\n # Use item specific id while indexing to avoid duplication\n self.es.index(index=self.index, doc_type=self.doc_type, id=item['id'], body=item)\n except KeyError:\n drop_count += 1\n traceback.print_exc()\n except elasticsearch.exceptions.RequestError:\n drop_count += 1\n traceback.print_exc()\n\n print('Indexed {0} Dropped {1}'.format(len(item_list)-drop_count, drop_count))\n print('Current index size {0}'.format(self.get_index_size()))", "def get_articles(db:Session):\n return db.query(ArticleModel).all()", "def search(self, json_query):\n res = self.es.search(index=self.index, body=json_query)\n #print(json.dumps(res, indent=2))\n # Process the results and return the article objects only\n articel_list = [src['_source'] for src in res['hits']['hits']]\n return articel_list", "def subset(dataset, article_indices):\n indices = []\n for a_id in article_indices:\n indices += dataset.get_article_indices(a_id)\n return Subset(dataset, indices)", "def list_articles():\n\n return template(\"index\", articles=get_articles())", "def articles():\n\n # Store the 'geo' part of the URL as a string called 'geo'. Check 'geo' loaded, and produce runtime error if not.\n # e.g. '12589'\n geo = request.args.get(\"geo\")\n if not geo:\n raise RuntimeError(\"missing geo\")\n\n # Run 'geo' through 'lookup()' function, store resulting list of objects in 'rows'.\n # e.g. [{'link':'www.website1.com','title':'article_title1'},{'link':'www.website2.com','title':'article_title2'}]\n rows = lookup(geo)\n\n # Run 'rows' through 'jsonify()'' function, and return resulting dictionary w/ up to 5 objects. The 'jsonify()' function modifies the input to JSON.\n # e.g. [{'link':'www.website1.com','title':'article_title1'},{'link':'www.website2.com','title':'article_title2'}]\n if len(rows) > 5:\n return jsonify(rows[0], rows[1], rows[2], rows[3], rows[4])\n else:\n return jsonify(rows)", "def get_article_by_entity(cls, entities):\n # Exist check by key\n entity_key = \"entity:\" + str(entity.entity_key)\n entity_obj = cls.db.hgetall(entity_key)\n if type(entity_obj) is not dict:\n return None\n\n # Extract\n articles = json.loads(entity_obj[\"articles\"])\n article_list = list()\n for key in articles:\n article_list.append(Article.build(cls.db.get(\"article:\" + key)))\n return article_list", "def fetch_all(): \n client, index_name = connection_es()\n res = client.search(index = index_name+\"*\")\n return res", "def list_articles():\n wiki = listdir(\"wiki\")\n return template(\"index\", wiki = wiki)", "def get_article_indices(self, article_id):\n start, end, _ = self.article_features[article_id]\n return list(range(start, end + 1))", "def search_by_keyword(self, keyword):\n if self.indexes and keyword in self.indexes.keys():\n articles_indexes = self.indexes[keyword][:self.no_returned_articles]\n return retrieve_articles(articles_indexes)\n else:\n return None", "def get_articles(cls, CATEGORY=None, TAG=None, NUM=100):\n if CATEGORY:\n article_list = cls.objects.filter(\n Q(status=0) & Q(category__name__icontains=CATEGORY))[:NUM]\n return article_list\n if TAG:\n article_list = cls.objects.filter(\n Q(status=0) & Q(tags__icontains=TAG))[:NUM]\n return article_list\n return cls.objects.filter(status=0)[:NUM]", "def get_articles(self, publish_status):\n query_str = (\n \"SELECT Id,KnowledgeArticleId,Title,UrlName FROM {} \"\n \"WHERE PublishStatus='{}' AND language='en_US'\"\n ).format(\n settings.SALESFORCE_ARTICLE_TYPE,\n publish_status,\n )\n result = self.api.query(query_str)\n return result['records']", "def search_by_keywords(self, keywords, operator='or'):\n if operator == 'or' and self.indexes:\n articles_indexes = []\n for keyword in keywords:\n if keyword in self.indexes.keys():\n articles_indexes += self.indexes[keyword]\n articles_indexes = list(set(articles_indexes))[:self.no_returned_articles]\n return retrieve_articles(articles_indexes)\n\n if operator == 'and' and self.indexes:\n if keywords[0] in self.indexes.keys():\n article_indexes = self.indexes[keywords[0]]\n for keyword in keywords:\n article_indexes = list(set(article_indexes) & set(self.indexes[keyword]))\n article_indexes = article_indexes[:self.no_returned_articles]\n return retrieve_articles(article_indexes)\n\n return None", "def related_articles(self, num):\n related_articles = None\n try:\n related_articles = Article.objects.values('id', 'title', 'view_times', 'update_time', 'author').\\\n filter(tags__icontains=self.tags_list()[0]).\\\n exclude(id=self.id)[:num]\n except IndexError:\n pass\n\n if not related_articles:\n related_articles = Article.objects.values('id', 'title', 'view_times', 'update_time', 'author').\\\n filter(category=self.category).\\\n exclude(id=self.id)[:num]\n\n return related_articles", "def list(limit, export):\n GetArticles.get_all_articles(limit, export)", "def articles(self):\n return self.get_queryset().filter(content_type__model='article').order_by('-articles__published_at')", "def articles(self, audience_filter=None):\n articles = ArticlePage.objects.live().descendant_of(self)\n if audience_filter is not None:\n articles = articles.filter(audience__name=audience_filter)\n articles = articles.order_by('-date')\n return articles", "async def get_article_links(self):\n urls = []\n for page in range(self._start, self._end+1):\n urls.append(self._searchURL + str(page))\n result_list = await self._connect(urls)\n\n self._urls = []\n hares_links = []\n for result in result_list:\n soup = result[1]\n search_links = soup.find_all(class_='search-title')\n article_links = re.findall(r'url=(.*?)\\\"', str(search_links))\n for l in article_links:\n l = unquote(l)\n if 'hare48.pixnet.net' in l:\n hares_links.append(l)\n else:\n self._urls.append(l)\n self._urls.extend(await self._transform_hares(hares_links))", "def get_articles():\n _, articles = base_query(db_session)\n return jsonify([p.serialize for p in articles])", "def get_articles(city):\n article_list = []\n\n url = 'https://news.search.yahoo.com/search;?p=' + city\n source = requests.get(url, timeout=5)\n plain_text = source.text\n soup = BeautifulSoup(plain_text, \"html5lib\")\n\n articles = soup.findAll('div', {'class': 'NewsArticle'})\n\n i = 0\n for item in articles:\n if i < 3:\n title = item.find('a', attrs={'class':'thmb'})['title']\n source = item.find('span', attrs={'class':'mr-5'}).text\n header4 = item.find('h4', attrs={'class':'fz-16'})\n link = header4.find('a')['href']\n\n article = format_article_list(title, source, \"| \" + link)\n article_list.append(article)\n\n i += 1\n else:\n break\n\n return article_list", "def articles(self, subject_filter=None):\n articles = ArticlePage.objects.live().descendant_of(self)\n if subject_filter is not None:\n articles = articles.filter(\n Q(subject_1=subject_filter) | Q(subject_2=subject_filter))\n articles = articles.order_by('-date')\n return articles", "def articles(self, page=None, per_page=None, sort=None):\r\n params = base.get_params(None, locals())\r\n request = http.Request('GET', self.get_url(), params)\r\n\r\n return request, parsers.parse_json", "def get_articles_pagination(\n pageNumber: int = 0,\n limit: int = 10\n):\n res = articles_db.get_articles_mongo(\n articles,\n pageNumber,\n limit\n )\n\n return res", "def article_list(request):\n try:\n logger.info('Calling the api' + APIURL + '/articles/?format=json&limit=' + str(COUNT))\n response = requests.get(APIURL + '/articles/?format=json&limit=' + str(COUNT))\n parser = json.loads(response.content)\n preview_article = random_article(parser)\n next_read = read_next()\n return render(request, 'article/article_list.html', {'articlelist':parser, 'preview_article': preview_article, 'next_read': next_read})\n except:\n logger.error('Calling the api error in article_list')\n raise Http404(\"Article does not exist\")", "def pull_articles(ls):\n # pull articles\n doi = self.search_articles(file)\n els_key = self.els_key\n\n for i in doi:\n els_url = 'https://api.elsevier.com/content/article/doi/' + doi + '?APIKey=' + els_key\n r = requests.get(els_url)\n for num in range(len(ls)):\n with open(folder + f'/write_test_els_paper{num}.xml', 'wb') as file:\n file.write(r.content)", "def get_articles(self):\n\t\tarticles = Blog.objects.all()\\\n\t\t\t.filter(publication_date__lte=datetime.date.today())\\\n\t\t\t.order_by('publication_date')\n\t\ti = random.randint(0, articles.count()-1)\n\t\treturn articles, articles[i]", "def search_articles(query, case_sensitive, all_articles):\n articles_with_matches = 0\n total_matches = 0\n for title in all_articles:\n article_content = all_articles[title][\"content\"]\n matches = list(get_matches(query, case_sensitive, article_content))\n\n if matches:\n print(f\"{title}:\")\n for snippet in matches:\n print(snippet)\n print()\n articles_with_matches += 1\n total_matches += len(matches)\n\n print(f\"Found {total_matches} mentions of '{query}' in {articles_with_matches} articles.\")" ]
[ "0.7164512", "0.6909187", "0.65383947", "0.6387886", "0.63624847", "0.6330164", "0.63012826", "0.63006276", "0.6228014", "0.6227102", "0.61900175", "0.61707246", "0.60809773", "0.6071758", "0.60450137", "0.60200983", "0.60097104", "0.59602565", "0.59474", "0.5942827", "0.59186435", "0.59165543", "0.591124", "0.58345205", "0.58213705", "0.57828116", "0.57733804", "0.57700413", "0.57254046", "0.5723194" ]
0.7359794
0
This method realizes searching by more than 1 keyword and you can search articles that include all keywords or at least one keyword.
def search_by_keywords(self, keywords, operator='or'): if operator == 'or' and self.indexes: articles_indexes = [] for keyword in keywords: if keyword in self.indexes.keys(): articles_indexes += self.indexes[keyword] articles_indexes = list(set(articles_indexes))[:self.no_returned_articles] return retrieve_articles(articles_indexes) if operator == 'and' and self.indexes: if keywords[0] in self.indexes.keys(): article_indexes = self.indexes[keywords[0]] for keyword in keywords: article_indexes = list(set(article_indexes) & set(self.indexes[keyword])) article_indexes = article_indexes[:self.no_returned_articles] return retrieve_articles(article_indexes) return None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def search_by_keyword(self, keyword):\n if self.indexes and keyword in self.indexes.keys():\n articles_indexes = self.indexes[keyword][:self.no_returned_articles]\n return retrieve_articles(articles_indexes)\n else:\n return None", "def search(self, term):", "def fetchRelatedkeywords(self, keyword, meta_keyword):\n prefix = [\"how\", \"which\", \"why\", \"where\", \"who\", \"when\", \"are\", \"what\"]\n suffix = [\"\", \"a\", \"b\", \"c\", \"d\", \"e\", \"f\", \"g\", \"h\", \"i\", \"j\", \"k\", \"l\",\n \"m\", \"n\", \"o\", \"p\", \"q\", \"r\", \"s\", \"t\", \"u\", \"v\", \"w\" \"x\", \"y\", \"z\"]\n suffix_arr = list(map(lambda x: keyword+\" \"+x, suffix))\n prefix_arr = list(map(lambda x: x+\" \"+keyword, prefix))\n suffix_arr.extend(prefix_arr)\n # removes duplicates for a seed keyword\n duplicates = set()\n for word in suffix_arr:\n suggestion = self.fetchSuggestion(word, keyword, meta_keyword)\n if suggestion == False:\n return False\n self.api_rate_limit+=1\n for query in suggestion:\n if query['keyword'] not in duplicates:\n duplicates.add(query['keyword'])\n # allows same keywords with multiple keywords\n # self.results.append(query)\n if query['keyword'] not in self.already_fetched:\n # does not allow same keyword with multiple keywords\n # this line is temporary need to remove after fetching 10 categories\n self.results.append(query)\n self.queue.add(query['keyword']) \n self.keywords_count += len(self.results)", "def search(self, **kwargs):\n return keyword_search(self._rq_list, **kwargs)", "def keyword_search(self, *args, batch_func=None, batch_number=10):\n batch_func_ = batch_func\n keywords = args\n request_url = self._assemble_kw_url(keywords)\n # print(request_url)\n\n # xml_files = self._send_request_xml(request_url, batch_func=None, batch_number=1000)\n # data = self._construct_data_xml(xml_files)\n\n data = self._send_request_xml(request_url, batch_func=batch_func, batch_number=batch_number)\n\n return data", "def search(self, *args, **kwargs):", "def recommend_by_keywords(self, key_words_list=None):\n pass", "def search_articles(query, case_sensitive, all_articles):\n articles_with_matches = 0\n total_matches = 0\n for title in all_articles:\n article_content = all_articles[title][\"content\"]\n matches = list(get_matches(query, case_sensitive, article_content))\n\n if matches:\n print(f\"{title}:\")\n for snippet in matches:\n print(snippet)\n print()\n articles_with_matches += 1\n total_matches += len(matches)\n\n print(f\"Found {total_matches} mentions of '{query}' in {articles_with_matches} articles.\")", "def find(self, search_terms, _keywords=None):\n objects = super().get_queryset().order_by(\"name\")\n term_query = Q()\n for t in search_terms:\n term_query.add(Q(name__iexact=t), Q.OR)\n term_query.add(Q(search_tokens__icontains=t), Q.OR)\n return objects.filter(term_query)", "def search(self, query, maxhits=100):", "def search_multiple_words(words):\n # YOUR CODE HERE #\n pass # delete this when you write your code", "def keyword_search(keywords):\n try:\n return itunespy.search(keywords)[0]\n except LookupError:\n return None", "def _add_better_search_words(self):\n for kw in self.better_search_kw:\n self.search_query += kw", "def search(self, query, k):\n docs={}\n for term in set(query.split(' ')):\n for article in self.tf_idf:\n if term in self.tf_idf[article]:\n if article in docs:\n docs[article]+=self.tf_idf[article][term]\n else:\n docs[article]=self.tf_idf[article][term]\n docs_sort=sorted(docs.items(), key=lambda p: (p[1],p[0]), reverse=True)\n docs_sort=[x for x in docs_sort if x[1] >= 0]\n if len(docs_sort)<k:\n print (docs)\n return docs\n else:\n print (docs_sort[:k])\n return docs_sort[:k]", "def search(self, *args, **kwargs): # real signature unknown\n pass", "def process_article(sentences: List[Dict[str, str]],\n article: str,\n keyword: str,\n collect_all: bool\n ) -> List[Dict[str, str]]:\n with open(article, 'r') as txt:\n for line in txt.read().split('\\n'):\n if collect_all or keyword.lower() in line.lower():\n sentences.append({\n \"sentence\": line,\n \"keyword\": keyword\n })\n \n return sentences", "def search(self, query):", "def search_professors_keywords(search_term):\n print(search_term)\n # print(\"search_keywords must not\",must_not_term)\n if search_term == \"\" or search_term is None:\n return json.dumps([])\n else:\n # pandas_index_list = elastic_dash.test_search(search_term, must_not_term)\n pandas_index_list = elastic_dash.test_search_standard(search_term, \"\")\n # pandas_index_list = elastic_dash.test_search_desc2(search_term, must_not_term)\n # pandas_index_list = elastic_dash.test_search_fivegrams(search_term, must_not_term)\n return json.dumps(pandas_index_list)", "def keyword_search(self, keyword, apply=None):\n self.open(self.SEARCH_BY_KEYWORD_URL)\n self.getControl('keywords').displayValue = [keyword]\n self.getControl('Search').click()\n if apply:\n self.getControl('Apply on selected persons').displayValue = [apply]\n self.getControl(name='form.buttons.apply').click()", "def articleSearch(article_name):\n search_article_name = article_name.split(\"\")\n search_name_format = \"+\".join(search_article_name)\n searched_articles = search_articles(search_name_format)\n\n return render_template('search.html',articles = searched_articles)", "def set_keywords(self):\n\n if len(self.get_keywords()) == 0 and len(self.get_files()) > 0:\n self.keywords = self.files[0].get_parent()[\"title\"].split(\" \")\n for keyword in self.keywords:\n if str(keyword) in str(self.text):\n self.keywords = []", "def search(self, search):\n raise NotImplementedError", "def get_links(self: 'WebScraper', \n keyword: str\n ) -> Generator[req.Response, None, None]:\n print(f\"Collecting articles for the keyword '{keyword}'...\")\n \n # Create strainer that only searched for links with the corresponding \n # class specified in the constant LINKS_CLASS\n only_links = SoupStrainer(\n 'a', {'class': LINKS_CLASS}\n )\n parameters = {'q': keyword}\n \n # Iterate through the pages of the search\n for i in count(1):\n\n # Stop when the page limit has been reached\n if i > PAGE_LIMIT:\n return None\n \n # for keyword in keyword_synonyms:\n parameters['page'] = i\n res = self.get_request(SEARCH_URL, parameters)\n links = {\n link['href'] \n for link in BeautifulSoup(\n res.text, 'lxml', \n parse_only=only_links\n ).find_all('a', href=True) \n if self.verify(link['href'])\n }\n \n for link in links:\n this = self.get_request(link)\n if keyword.lower() in this.text.lower():\n yield this", "def search_news(request):\n try:\n query_string = ''\n if request.GET['search_text'].strip() != '':\n query_string = '&title='+request.GET['search_text']\n response = requests.get(APIURL + '/articles/?format=json'+query_string)\n parser = json.loads(response.content)\n return render_to_response('article/search_result.html', {'articlelist':parser})\n except:\n raise Http404(\"Search Item error\")", "def search_keyword(self,keyword):\n for entry in self.available_fields_list:\n for x in entry:\n if keyword in x:\n print(entry)\n break\n return", "def get_article_keywords(article,\n keywords,\n preprocess_type=PreprocessWordType.LEMMATIZE):\n matches = set()\n for word in article.words:\n preprocessed_word = query_utils.preprocess_word(word,\n preprocess_type)\n if preprocessed_word in keywords:\n matches.add(preprocessed_word)\n return sorted(list(matches))", "def search(query_string):", "def search_keywords(self, sources, start_date, end_date, num_keywords=5,\n num_associations=5, auth_token=None, term_query=\"\"):\n if not auth_token:\n auth_token = self.auth_token\n if not isinstance(sources, list):\n sources = [sources]\n query = \"\"\"{\"bool\" : {\n \"must\" : [\n {\n \"date\" : {\n \"gte\":\"%s\",\n \"lte\":\"%s\"\n }\n },<<term_query>>\n ]\n }}\n \"\"\" % (start_date, end_date)\n if len(term_query) > 0:\n term_query = ',%s' % term_query\n query = query.replace(',<<term_query>>', term_query)\n query = json.loads(query)\n data = dict(sources=sources, query=query, count=num_keywords,\n associations=num_associations)\n data = json.dumps(data)\n headers = {'Authorization': 'Bearer %s' % auth_token,\n 'Content-Type': 'application/json'}\n url = '/'.join([self.base_url, self.KEYWORD_ENDPOINT])\n r = requests.post(url,\n data=data,\n headers=headers)\n if r.status_code == 200:\n return json.loads(r.content)['result']\n return r", "def search(request):\n if 'q' in request.GET:\n term = request.GET['q']\n story_list = Story.objects.filter(Q(title__contains=term)|Q(markdown_content__contains=term))\n heading = \"Search results\"\n return render_to_response(\"cms/story_list.html\",locals())", "def _keyword_search(id_to_text, raw_keywords, modified_keywords):\n\t# The raw keywords and modified keywords should be two paired lists where the elements correspond to one another.\n\t# The modifications done to the keywords should already match the modifications done to the texts in the input dictionary so they can be directly compared.\n\tassert len(raw_keywords) == len(modified_keywords)\n\tid_to_found_keywords = {i:[r_kw for r_kw,m_kw in zip(raw_keywords,modified_keywords) if m_kw in text] for i,text in id_to_text.items()}\n\tid_to_num_found_keywords = {i:len(kw_list) for i,kw_list in id_to_found_keywords.items()}\n\treturn(id_to_found_keywords, id_to_num_found_keywords)" ]
[ "0.68880993", "0.65679824", "0.6564382", "0.654274", "0.64528704", "0.6317772", "0.6299783", "0.62631476", "0.62608874", "0.6253983", "0.62516856", "0.622099", "0.62086266", "0.61941206", "0.6184246", "0.61816686", "0.6169494", "0.61456", "0.6141546", "0.61017454", "0.6096363", "0.6064143", "0.60623974", "0.60509115", "0.6046026", "0.60272914", "0.6026001", "0.6021229", "0.60108864", "0.5991511" ]
0.73257583
0
Parse command line arguments and liftoff configuration.
def parse_options() -> Namespace: opt_parser = OptionParser( "liftoff", [ "script", "config_path", "procs_no", "gpus", "per_gpu", "no_detach", "verbose", "copy_to_clipboard", "time_limit", # This should be removed in favour of start_by "start_by", "end_by", "optimize", "args", "filters", "results_path", "name", "max_runs", "shuffle", ], ) return opt_parser.parse_args()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def parse_commandline():\n parser = optparse.OptionParser(usage = __doc__,version=git_version.verbose_msg)\n\n parser.add_option(\"-t\", \"--timeFile\",help=\"Text file with central times.\",default = 'centralTimes.txt')\n\tparser.add_option(\"-c\", \"--channel\",help=\"IFO channel.\",default = 'L1:GDS-CALIB_STRAIN')\n\tparser.add_option(\"-w\", \"--workdirectory\",help=\"Working directory.\",default = '.')\n\tparser.add_option(\"-s\", \"--sourceDir\",help=\"Name of source directory.\",default = 'source')\n\n opts, args = parser.parse_args()\n return opts", "def parse_args():\n parser = argparse.ArgumentParser(description=\"Run NCF..\")\n parser.add_argument(\n \"--config_file\",\n nargs=\"?\",\n type=str,\n default=\"../configs/ncf_default.json\",\n help=\"Specify the config file name. Only accept a file from ../configs/\",\n )\n # If the following settings are specified with command line,\n # These settings will used to update the parameters received from the config file.\n parser.add_argument(\n \"--dataset\",\n nargs=\"?\",\n type=str,\n help=\"Options are: tafeng, dunnhunmby and instacart\",\n )\n parser.add_argument(\n \"--data_split\",\n nargs=\"?\",\n type=str,\n help=\"Options are: leave_one_out and temporal\",\n )\n parser.add_argument(\n \"--root_dir\", nargs=\"?\", type=str, help=\"working directory\",\n )\n parser.add_argument(\n \"--emb_dim\", nargs=\"?\", type=int, help=\"Dimension of the embedding.\"\n )\n parser.add_argument(\"--lr\", nargs=\"?\", type=float, help=\"Intial learning rate.\")\n parser.add_argument(\"--max_epoch\", nargs=\"?\", type=int, help=\"Number of max epoch.\")\n parser.add_argument(\n \"--batch_size\", nargs=\"?\", type=int, help=\"Batch size for training.\"\n )\n parser.add_argument(\"--optimizer\", nargs=\"?\", type=str, help=\"OPTI\")\n parser.add_argument(\"--activator\", nargs=\"?\", type=str, help=\"activator\")\n parser.add_argument(\"--alpha\", nargs=\"?\", type=float, help=\"ALPHA\")\n return parser.parse_args()", "def main():\n cli = CommandLineInterface(NAME, package=\"nemo_nowcast\", description=__doc__)\n cli.build_parser()\n parsed_args = cli.parser.parse_args()\n config = Config()\n config.load(parsed_args.config_file)\n msg = _configure_logging(config)\n logger.info(f\"running in process {os.getpid()}\")\n logger.info(f\"read config from {config.file}\")\n logger.info(msg)\n run(config)", "def parseArgs():\n parser = argparse.ArgumentParser()\n parser.add_argument('--dataset', default='fsod', help='training dataset') # use fsod dataset for default\n parser.add_argument('--cfg', dest='cfg_file', required=True, help='optional config file')\n parser.add_argument('--load_ckpt', help='path to load checkpoint')\n parser.add_argument('--load_detectron', help='path to load detectron weight pickle file')\n parser.add_argument('--output_dir', help='output directory to save the testing results.')\n parser.add_argument('--range', help='[start, end)', type=int, nargs=2)\n parser.add_argument('--visualize', dest='visualize', help='output images of detection', action='store_true')\n return parser.parse_args()", "def main_parse_args():\n parser = ArgumentParser()\n parser = cf.add_config_args(parser)\n args = parser.parse_args()\n config_opts = sys.argv[1:]\n # add working_dir to config_opts\n found_wd = False\n for opt in ['-wd', '--working_dir']:\n if opt in config_opts:\n found_wd = True\n if not found_wd:\n config_opts.extend(['-wd', args.working_dir])\n # remove src_classes from config_opts\n for opt in ['-srcs', '--src_classes']:\n if opt in config_opts:\n idx = config_opts.index(opt)\n config_opts.pop(idx)\n # pop next item\n config_opts.pop(idx)\n args.config_opts = \" \".join(config_opts)\n return args", "def _parse_command_line(self):\n DESCRIPTION = (\n \"Application for searching PyLith .cfg parameter files.\"\n )\n\n parser = argparse.ArgumentParser(description=DESCRIPTION,\n formatter_class=argparse.ArgumentDefaultsHelpFormatter)\n parser.add_argument(\"--path\", action=\"store\",\n dest=\"searchpath\", default=\".\", help=\"Search path for .cfg files.\")\n parser.add_argument(\"--display\", action=\"store\",\n dest=\"display\", default=\"all\", help=\"List of metadata to display in search results.\")\n parser.add_argument(\"--verbose\", action=\"store_true\", dest=\"verbose\",\n help=\"Report missing metadata.\")\n\n parser.add_argument(\"--keywords\", action=\"store\", dest=\"keywords\",\n help=\"Comma delimited list of keywords for filtering search results.\")\n parser.add_argument(\"--features\", action=\"store\", dest=\"features\",\n help=\"Comma delimited list of features for filtering search results.\")\n parser.add_argument(\"--authors\", action=\"store\", dest=\"authors\",\n help=\"Comma delimited list of authors for filtering search results.\")\n parser.add_argument(\"--version\", action=\"store\", dest=\"version\",\n help=\"PyLith version for filtering search results.\")\n parser.add_argument(\"--incompatible\", action=\"store_true\", dest=\"incompatible\",\n help=\"Filter search results to show incompatible parameter files.\")\n parser.add_argument(\"--output-format\", action=\"store\", dest=\"output_format\", \n help=\"Output format\", default=\"txt\", choices=[\"text\", \"markdown\"])\n\n args = parser.parse_args()\n\n return args", "def parse_command_line():\n\n desc = \"Perform fluid dynamics simulations.\"\n parser = argparse.ArgumentParser(description=desc)\n\n # Parameter file\n help_txt = \"name of the configuration file (default is 'config.ini.')\"\n parser.add_argument(\"-f\", \"--file\", metavar=\"FILE\", default=\"config.ini\",\n required=False, dest=\"config_file\", help=help_txt)\n\n return parser.parse_args()", "def _read_cmd_args():\n\n # Check if argument count is correct.\n if len(sys.argv) != 5:\n print(\"[ERR] Invalid number of command line arguments!\")\n _usage()\n sys.exit(1)\n\n # Get path to config file\n configfile = sys.argv[1]\n if not os.path.exists(configfile):\n print(f\"[ERR] Config file {configfile} does not exist!\")\n sys.exit(1)\n\n # Get top directory of LIS data\n topdatadir = sys.argv[2]\n if not os.path.exists(topdatadir):\n print(f\"[ERR] LIS data directory {topdatadir} does not exist!\")\n sys.exit(1)\n\n # Get valid year and month\n yyyymm = sys.argv[3]\n if len(yyyymm) != 6:\n print(\"[ERR] Invalid length of YYYYMM, must be 6 characters!\")\n sys.exit(1)\n year = int(yyyymm[0:4])\n month = int(yyyymm[4:6])\n try:\n startdate = datetime.datetime(year, month, day=1)\n except ValueError:\n print(\"[ERR] Invalid YYYYMM passed to script!\")\n sys.exit(1)\n\n # Get model forcing ID\n model_forcing = sys.argv[4]\n\n return configfile, topdatadir, startdate, model_forcing", "def parse_command_line(self, argv):\n from optparse import OptionParser\n usage = \"usage: %prog [options]\"\n parser = OptionParser(usage)\n\n (options, args) = parser.parse_args(argv)", "def _parse_args(argv):\n parser = make_parser()\n args = parser.parse_args(argv)\n LOGGER.setLevel(to_log_level(args.loglevel))\n\n if not args.inputs:\n if args.list:\n tlist = \", \".join(API.list_types())\n _exit_with_output(\"Supported config types: \" + tlist)\n elif args.env:\n cnf = os.environ.copy()\n _output_result(cnf, args.output, args.otype or \"json\", None, None)\n sys.exit(0)\n else:\n parser.print_usage()\n sys.exit(1)\n\n if args.validate and args.schema is None:\n _exit_with_output(\"--validate option requires --scheme option\", 1)\n\n return args", "def parse_command_line():\n\n parser=OptionParser(usage=\"%prog [options] \",\n description=\" updates tracker\" )\n parser.add_option(\"-c\", \"--candidate\", action=\"store\", type=\"string\",\n dest=\"candidate\", default=\"\", help=\"candidate name\")\n parser.add_option(\"-u\", \"--username\", action=\"store\", type=\"string\",\n dest=\"username\", default=\"gzhou\",\n help=\"username\")\n parser.add_option(\"-p\",\"--password\", action=\"store\",\n dest=\"password\", default=\"egghead\", help=\"password\")\n parser.add_option(\"-i\",\"--input\", action=\"store\",\n dest=\"input\", default=\"tracker_temp.txt\", help=\"Input file\") \n parser.add_option(\"-l\",\"--upload\", action=\"store\",\n dest=\"upload\", default=None, help=\"upload file\") \n parser.add_option(\"-d\",\"--description\", action=\"store\",\n dest=\"description\", default=None, help=\"descirption\") \n (options, args)=parser.parse_args()\n\n return options, args", "def parse_args():\n\n parser = argparse.ArgumentParser(description='CLI to store Actisense-NGT Gateway values to InfluxDB and publish via MQTT')\n parser.add_argument('--config', '-c', type=str, required=True, help='JSON configuraton file with path')\n return parser.parse_args()", "def parse_args(self, argv):\n super(UpdaterDaemon, self).parse_args(argv)\n\n self.stdout = self.options.log_file\n self.stderr = self.options.error_log\n\n config = self.options.config\n if config is None:\n config = os.path.join(os.path.dirname(__file__), 'config.py')\n config = os.path.normpath(os.path.abspath(config))\n configdir, configfile = os.path.split(config)\n configfile, ext = os.path.splitext(configfile)\n if configdir not in sys.path:\n sys.path.insert(0, configdir)\n self.config = __import__(configfile)", "def handleCmdLine(self):\n description = \"Nagios monitoring script to check for open ports\\n\"\n usage = (\"%prog <options>\\n\")\n parser = OptionParser(usage=usage, description=description)\n\n parser.add_option(\"-c\", \"--config\",\n type=\"string\",\n help=\"path to open ports configuration file\")\n parser.add_option(\"-l\", \"--list\",\n type=\"string\",\n help=\"supply list of allowed ports seperated by comma.\")\n\n (self.options, args) = parser.parse_args()", "def parse_args():\n parser = argparse.ArgumentParser(description=\"Create timezone info JSON file from tzdata files\")\n parser.add_argument(\"-v\", \"--vzic\", dest=\"vzic_path\", required=True,\n help=\"\"\"Path to the `vzic` executable. This must be\n downloaded from https://code.google.com/p/tzurl/ and\n compiled.\"\"\")\n parser.add_argument(\"-t\", \"--tzdata\", dest=\"tzdata_path\",\n help=\"\"\"Path to a directory containing the IANA\n timezone data. If this argument is omitted, the data\n will be downloaded from ftp.iana.org.\"\"\")\n return parser.parse_args()", "def parse_args():\n parser = ArgumentParser(\n description=\"This is a script for auto apply ipex optimization.\"\n \"\\n################################# Basic usage ############################# \\n\"\n \"\\n 1. Apply ipex optimization with fp32 data type\\n\"\n \"\\n >>> python -m intel_extension_for_pytorch.cpu.auto_ipex python_script args \\n\"\n \"\\n 2. Apply ipex optimization with bf16 data type\\n\"\n \"\\n >>> python -m intel_extension_for_pytorch.cpu.auto_ipex --dtype bfloat16 python_script args \\n\",\n formatter_class=RawTextHelpFormatter,\n )\n\n add_auto_ipex_params(parser, auto_ipex_default_enabled=True)\n\n # positional\n parser.add_argument(\n \"program\",\n type=str,\n help=\"The full path to the proram/script to be launched. \"\n \"followed by all the arguments for the script\",\n )\n # rest from the training program\n parser.add_argument(\"program_args\", nargs=REMAINDER)\n return parser.parse_args()", "def command_line_start(argv, program_name):\n cl_parser = argparse.ArgumentParser(description='Tinkerforge Data Logger')\n\n cl_parser.add_argument('config_file', help=\"Path to the configuration file\")\n cl_parser.add_argument('-v', action=\"store_true\", dest=\"validate\",\n help=\"Just process the validation of the configuration file\")\n\n results = cl_parser.parse_args(argv)\n\n arguments_map = {}\n arguments_map[CONSOLE_CONFIG_FILE] = results.config_file\n arguments_map[CONSOLE_VALIDATE_ONLY] = results.validate\n\n return arguments_map", "def _parse_args():\n parser = argparse.ArgumentParser(description='main.py')\n \n # General system running and configuration options\n parser.add_argument('--do_nearest_neighbor', dest='do_nearest_neighbor', default=False, action='store_true', help='run the nearest neighbor model')\n\n parser.add_argument('--train_path', type=str, default='data/geo_train.tsv', help='path to train data')\n parser.add_argument('--dev_path', type=str, default='data/geo_dev.tsv', help='path to dev data')\n parser.add_argument('--test_path', type=str, default='data/geo_test.tsv', help='path to blind test data')\n parser.add_argument('--test_output_path', type=str, default='geo_test_output.tsv', help='path to write blind test results')\n parser.add_argument('--domain', type=str, default='geo', help='domain (geo for geoquery)')\n \n # Some common arguments for your convenience\n parser.add_argument('--seed', type=int, default=0, help='RNG seed (default = 0)')\n parser.add_argument('--epochs', type=int, default=100, help='num epochs to train for')\n parser.add_argument('--lr', type=float, default=.001)\n parser.add_argument('--batch_size', type=int, default=2, help='batch size')\n # 65 is all you need for GeoQuery\n parser.add_argument('--decoder_len_limit', type=int, default=65, help='output length limit of the decoder')\n\n # Feel free to add other hyperparameters for your input dimension, etc. to control your network\n # 50-200 might be a good range to start with for embedding and LSTM sizes\n args = parser.parse_args()\n return args", "def _read_cmd_args():\n\n # Check if argument count is correct.\n if len(sys.argv) != 4:\n print(\"[ERR] Invalid number of command line arguments!\")\n print(len(sys.argv))\n print(sys.argv[:])\n _usage()\n sys.exit(1)\n\n # Check if lis.config template exists.\n lis_config_template = sys.argv[1]\n if not os.path.exists(lis_config_template):\n print(f\"[ERR] {lis_config_template} does not exist!\")\n sys.exit(1)\n\n # Check if directory for restart files exists. Actual restart file\n # shall be checked later.\n restart_dir = sys.argv[2]\n if not os.path.exists(restart_dir):\n print(f\"[ERR] Directory {restart_dir} does not exist!\")\n sys.exit(1)\n\n # Get start date of new LIS run.\n yyyymmdd = sys.argv[3]\n if len(yyyymmdd) != 8:\n print(\"[ERR] Invalid length for YYYYMMDD, must be 8 characters!\")\n sys.exit(1)\n year = int(yyyymmdd[0:4])\n month = int(yyyymmdd[4:6])\n day = int(yyyymmdd[6:8])\n try:\n startdate = datetime.date(year, month, day)\n except ValueError:\n print(\"[ERR] Invalid YYYYMMDD passed to script!\")\n sys.exit(1)\n\n return lis_config_template, restart_dir, startdate", "def parse_commandline():\n parser = optparse.OptionParser()\n\n parser.add_option(\"-l\",\"--lamp\", default=100, type=int)\n parser.add_option(\"-c\",\"--doCompile\", action=\"store_true\", default=False)\n parser.add_option(\"--doLamp\", action=\"store_true\", default=False)\n\n opts, args = parser.parse_args()\n\n return opts", "def parse_config(cmdline_opts):\n cmdline_opts.add_argument(\n '-p', '--port', help='Enter port number', default=8001)\n cmdline_opts.add_argument(\n '--host', help='Enter host name', default='localhost')\n cmdline_opts.add_argument(\n '-c', '--config', help='Enter config file', default='config.json')", "def parse_args():\n\n parser = ArgumentParser()\n parser.add_argument(\"config\", help=\"Path to config file\")\n parser.add_argument(\"-ncdc\", \"--download-ncdc\", action=\"store_true\", dest=\"d_ncdc\",\n help=\"Download new NCDC data (overwrites existing)\")\n arguments = parser.parse_args()\n\n return arguments", "def main(argv=None):\n\n parser = ArgParser(\n description=\"Extrapolate input data to required lead times.\")\n parser.add_argument(\"input_filepath\", metavar=\"INPUT_FILEPATH\",\n type=str, help=\"Path to input NetCDF file.\")\n\n group = parser.add_mutually_exclusive_group()\n group.add_argument(\"--output_dir\", metavar=\"OUTPUT_DIR\", type=str,\n default=\"\", help=\"Directory to write output files.\")\n group.add_argument(\"--output_filepaths\", nargs=\"+\", type=str,\n help=\"List of full paths to output nowcast files, in \"\n \"order of increasing lead time.\")\n\n optflw = parser.add_argument_group('Advect using files containing the x '\n ' and y components of the velocity')\n optflw.add_argument(\"--eastward_advection_filepath\", type=str, help=\"Path\"\n \" to input file containing Eastward advection \"\n \"velocities.\")\n optflw.add_argument(\"--northward_advection_filepath\", type=str, help=\"Path\"\n \" to input file containing Northward advection \"\n \"velocities.\")\n\n speed = parser.add_argument_group('Advect using files containing speed and'\n ' direction')\n speed.add_argument(\"--advection_speed_filepath\", type=str, help=\"Path\"\n \" to input file containing advection speeds,\"\n \" usually wind speeds, on multiple pressure levels.\")\n speed.add_argument(\"--advection_direction_filepath\", type=str,\n help=\"Path to input file containing the directions from\"\n \" which advection speeds are coming (180 degrees from\"\n \" the direction in which the speed is directed). The\"\n \" directions should be on the same grid as the input\"\n \" speeds, including the same vertical levels.\")\n speed.add_argument(\"--pressure_level\", type=int, default=75000, help=\"The\"\n \" pressure level in Pa to extract from the multi-level\"\n \" advection_speed and advection_direction files. The\"\n \" velocities at this level are used for advection.\")\n parser.add_argument(\"--orographic_enhancement_filepaths\", nargs=\"+\",\n type=str, default=None, help=\"List or wildcarded \"\n \"file specification to the input orographic \"\n \"enhancement files. Orographic enhancement files are \"\n \"compulsory for precipitation fields.\")\n parser.add_argument(\"--json_file\", metavar=\"JSON_FILE\", default=None,\n help=\"Filename for the json file containing \"\n \"required changes to the metadata. Information \"\n \"describing the intended contents of the json file \"\n \"is available in \"\n \"improver.utilities.cube_metadata.amend_metadata.\"\n \"Every output cube will have the metadata_dict \"\n \"applied. Defaults to None.\", type=str)\n parser.add_argument(\"--max_lead_time\", type=int, default=360,\n help=\"Maximum lead time required (mins).\")\n parser.add_argument(\"--lead_time_interval\", type=int, default=15,\n help=\"Interval between required lead times (mins).\")\n\n accumulation_args = parser.add_argument_group(\n 'Calculate accumulations from advected fields')\n accumulation_args.add_argument(\n \"--accumulation_fidelity\", type=int, default=0,\n help=\"If set, this CLI will additionally return accumulations\"\n \" calculated from the advected fields. This fidelity specifies the\"\n \" time interval in minutes between advected fields that is used to\"\n \" calculate these accumulations. This interval must be a factor of\"\n \" the lead_time_interval.\")\n accumulation_args.add_argument(\n \"--accumulation_period\", type=int, default=15,\n help=\"The period over which the accumulation is calculated (mins). \"\n \"Only full accumulation periods will be computed. At lead times \"\n \"that are shorter than the accumulation period, no accumulation \"\n \"output will be produced.\")\n accumulation_args.add_argument(\n \"--accumulation_units\", type=str, default='m',\n help=\"Desired units in which the accumulations should be expressed,\"\n \"e.g. mm\")\n\n # Load Cubes\n args = parser.parse_args(args=argv)\n\n metadata_dict = load_json_or_none(args.json_file)\n\n upath, vpath = (args.eastward_advection_filepath,\n args.northward_advection_filepath)\n spath, dpath = (args.advection_speed_filepath,\n args.advection_direction_filepath)\n\n # load files and initialise advection plugin\n input_cube = load_cube(args.input_filepath)\n orographic_enhancement_cube = load_cube(\n args.orographic_enhancement_filepaths, allow_none=True)\n\n speed_cube = direction_cube = ucube = vcube = None\n if (upath and vpath) and not (spath or dpath):\n ucube = load_cube(upath)\n vcube = load_cube(vpath)\n elif (spath and dpath) and not (upath or vpath):\n level_constraint = Constraint(pressure=args.pressure_level)\n try:\n speed_cube = load_cube(spath, constraints=level_constraint)\n direction_cube = load_cube(dpath, constraints=level_constraint)\n except ValueError as err:\n raise ValueError(\n '{} Unable to extract specified pressure level from given '\n 'speed and direction files.'.format(err))\n else:\n raise ValueError('Cannot mix advection component velocities with speed'\n ' and direction')\n\n # Process Cubes\n accumulation_cubes, forecast_to_return = process(\n input_cube, ucube, vcube, speed_cube, direction_cube,\n orographic_enhancement_cube, metadata_dict, args.max_lead_time,\n args.lead_time_interval, args.accumulation_fidelity,\n args.accumulation_period, args.accumulation_units)\n\n # Save Cube\n if args.output_filepaths and \\\n len(args.output_filepaths) != len(forecast_to_return):\n raise ValueError(\"Require exactly one output file name for each \"\n \"forecast lead time\")\n for i, cube in enumerate(forecast_to_return):\n # save to a suitably-named output file\n if args.output_filepaths:\n file_name = args.output_filepaths[i]\n else:\n file_name = os.path.join(\n args.output_dir, generate_file_name(cube))\n save_netcdf(cube, file_name)\n\n if args.accumulation_fidelity > 0:\n # return accumulation cubes\n for i, cube in enumerate(accumulation_cubes):\n file_name = os.path.join(args.output_dir, generate_file_name(cube))\n save_netcdf(cube, file_name)", "def parseArgs():\n # Configure the option parser for CLI options to the script\n usage = \"usage: %prog [options] userName password configlet xlfile\"\n parser = argparse.ArgumentParser(description=\"Excel File to JSON Configlet Builder\")\n parser.add_argument(\"--userName\", help='Username to log into CVP')\n parser.add_argument(\"--password\", help='Password for CVP user to login')\n parser.add_argument(\"--target\", nargs=\"*\", metavar='TARGET', default=[],\n help='List of CVP appliances to get snapshot from URL,URL')\n parser.add_argument(\"--snapshot\", help='CVP Snapshot containing Show Inventory and Show LLDP neighbor data')\n parser.add_argument(\"--opticType\", default='PSM4', help=\"Optic Type to look for\")\n parser.add_argument(\"--verbose\", default=False, help='Return more information to the command line')\n args = parser.parse_args()\n return checkArgs( args )", "def parse_args():\n parser = argparse.ArgumentParser()\n parser.add_argument('-e', '--env', default='production',\n help='Environment to check: integration, staging, production.')\n parser.add_argument('-l', '--log_type', default='govuk_assets',\n help='Which logs to check: govuk_assets, govuk_www.')\n parser.add_argument('-c', '--critical_age_minutes', type=int, default=60,\n help='If the newest logs are older than this many minutes, '\n 'return CRITICAL status.')\n parser.add_argument('-F', '--fake_time', type=fromisoformat,\n help='For testing purposes, use the given time as if it\\'s the current '\n 'time. Requires the format YYYY-MM-DDTHH:MM. Assumes UTC.')\n parser.add_argument('-v', '--verbose', action='count',\n help='Show DEBUG log messages.')\n return parser.parse_args()", "def arg_parse():\n p = ap.ArgumentParser()\n p.add_argument('infile',\n help='path to file containing objects')\n p.add_argument('n1',\n help='night 1')\n p.add_argument('n2',\n help='night 2')\n p.add_argument('observatory',\n help='Astropy name of observatory')\n return p.parse_args()", "def main(args):\n cli = CLI()\n # Check arguments\n cli.parse_arguments(args)", "def parse_args():\n hpo_warning = 'Flag overwrites config value if set, used for HPO and PBT runs primarily'\n parser = argparse.ArgumentParser('train.py')\n add_arg = parser.add_argument\n add_arg('config', nargs='?', default='configs/hello.yaml')\n add_arg('-d', '--distributed', choices=['ddp-file', 'ddp-mpi', 'cray'])\n add_arg('-v', '--verbose', action='store_true')\n add_arg('--ranks-per-node', default=8)\n add_arg('--gpu', type=int)\n add_arg('--rank-gpu', action='store_true')\n add_arg('--resume', action='store_true', help='Resume from last checkpoint')\n add_arg('--show-config', action='store_true')\n add_arg('--interactive', action='store_true')\n add_arg('--output-dir', help='override output_dir setting')\n add_arg('--seed', type=int, default=0, help='random seed')\n add_arg('--fom', default=None, choices=['last', 'best'],\n help='Print figure of merit for HPO/PBT')\n add_arg('--n-train', type=int, help='Override number of training samples')\n add_arg('--n-valid', type=int, help='Override number of validation samples')\n add_arg('--batch-size', type=int, help='Override batch size. %s' % hpo_warning)\n add_arg('--n-epochs', type=int, help='Specify subset of total epochs to run')\n add_arg('--real-weight', type=float, default=None,\n help='class weight of real to fake edges for the loss. %s' % hpo_warning)\n add_arg('--lr', type=float, default=None,\n help='Learning rate. %s' % hpo_warning)\n add_arg('--hidden-dim', type=int, default=None,\n help='Hidden layer dimension size. %s' % hpo_warning)\n add_arg('--n-graph-iters', type=int, default=None,\n help='Number of graph iterations. %s' % hpo_warning)\n add_arg('--weight-decay', type=float)\n return parser.parse_args()", "def parse_args():\n hpo_warning = 'Flag overwrites config value if set, used for HPO and PBT runs primarily'\n parser = argparse.ArgumentParser('train.py')\n add_arg = parser.add_argument\n add_arg('config', nargs='?', default='GraphLearning/configs/myconfig.yaml')\n add_arg('results_dir', nargs='?', default='GraphLearning/results/withnoise')\n \n return parser.parse_args()", "def parse_args():\n \n parser = argparse.ArgumentParser()\n parser.add_argument(\n 'config',\n help='Config file')\n parser.add_argument(\n '--quiet',\n '-q',\n action='store_true',\n help='do not print to console'\n )\n parser.add_argument(\n '--password',\n '-p',\n action='store_true',\n help='Set password in keyring.'\n )\n parser.add_argument(\n '--update',\n '-u',\n action='store_true',\n help='Only add transactions after last date in database.'\n )\n parser.add_argument(\n '--mark_seen',\n '-m',\n action='store_true',\n help='Mark fetched emails as seen.'\n )\n\n return parser.parse_args()" ]
[ "0.63322717", "0.6321284", "0.6277704", "0.6245351", "0.6236686", "0.61924034", "0.6171203", "0.614352", "0.61078733", "0.60279673", "0.6020191", "0.6009043", "0.5987942", "0.59697133", "0.59659815", "0.5956007", "0.5943503", "0.59413415", "0.59382457", "0.5930941", "0.5920589", "0.5910325", "0.5903821", "0.59005755", "0.589238", "0.588992", "0.58855844", "0.58767784", "0.5870761", "0.5868596" ]
0.6638938
0
Returns the command for a pid if that process exists.
def get_command_for_pid(pid: int) -> str: try: result = subprocess.run( f"ps -p {pid:d} -o cmd h", stdout=subprocess.PIPE, shell=True ) return result.stdout.decode("utf-8").strip() except subprocess.CalledProcessError as _e: return ""
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_command(pid):", "def getpid(command):\n try:\n _pidof = executeCommand(command)\n except Exception as er:\n print (\" not able to get pid\")\n return False\n return _pidof", "def check_pid(pid):\n result = None\n try:\n s = os.stat('/proc/' + pid)\n if s.st_uid == our_uid:\n cwd = os.path.realpath('/proc/' + pid + '/cwd')\n if cwd == kill_dir and int(pid) != our_pid:\n f = open('/proc/' + pid + '/cmdline')\n cmdline = f.read().split('\\x00')[:-1]\n f.close()\n result = cmdline\n except OSError:\n # We can't read all our processes; that's ok\n pass\n return result", "def process_exists(name):\n for pid in [pid for pid in os.listdir(\"/proc\") if pid.isdigit()]:\n try:\n exe_name = os.readlink(os.path.join(\"/proc/\", pid, \"exe\"))\n except OSError:\n continue\n if exe_name and exe_name.endswith(os.path.join(\"/\", name)):\n return pid\n return None", "def get_process(self, pid):\n return self.processes.get(pid, None)", "def get_pid_name(pid):\n try:\n with open(os.path.join('/proc/', pid, 'cmdline'), 'r') as pidfile:\n try:\n cmd = pidfile.readline().split()[0]\n return os.path.basename(cmd).rstrip('\\x00')\n except IndexError:\n # no cmd returned\n return \"<NO NAME>\"\n except IOError:\n # upstream wait any string, no matter if we couldn't read proc\n return \"no_such_process\"", "def pfind(pid):\n for p in list_foreach(\"allproc\", \"p_list\"):\n if p['p_pid'].cast(gdb.lookup_type(\"int\")) == pid:\n return p\n raise gdb.error(\"No process with pid {} exists\".format(pid))", "def comm_for_pid(pid):\n try:\n return slurp('/proc/%d/comm' % pid)\n except IOError:\n return None", "def get_process_name(pid):\n proc = subprocess.Popen(['ps', '-p', pid, '-o', 'comm='],\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n out, err=proc.communicate()\n return out.strip().decode('utf-8')", "def get_process_object(pid, die=True):\n try:\n return psutil.Process(pid)\n except psutil.NoSuchProcess as e:\n if die:\n raise e\n else:\n return None", "def get_command_line(pid, default=None):\n try:\n return only(\n process.Properties_(\"CommandLine\").Value\n for process in win32com.client.GetObject('winmgmts:').InstancesOf('Win32_Process')\n if process.Properties_(\"ProcessID\").Value == pid\n )\n except TooFewItemsError:\n return default", "def pidof(processname = None):\n processname = os.path.basename(processname)\n pidpath = os.path.join(pid_path,processname + \".pid\")\n if processname is not None and os.path.exists(pidpath):\n f = open (pidpath)\n pids = f.readlines()\n f.close()\n return pids\n else:\n return False", "def pidExists(self, pid):\n\n prochash = self.getHash( 'datahash' ) # safely get copy of process dict\n\n try:\n prochash[pid]\n return 1\n except KeyError:\n return 0", "def pidExists(self, pid):\n\n prochash = self.getHash( 'datahash' ) # safely get copy of process dict\n\n try:\n prochash[pid]\n return 1\n except KeyError:\n return 0", "def read_piddir_command(pid_dir):\n comm_path = os.path.join(pid_dir, 'comm')\n comm = None\n try:\n with open(comm_path, 'r') as commfile:\n comm = commfile.read().strip()\n except IOError as exc:\n if exc.errno in (errno.ENOENT, ):\n pass\n else:\n raise\n\n cmdline_path = os.path.join(pid_dir, 'cmdline')\n try:\n with open(cmdline_path, 'r') as cmdlinefile:\n cmdline = cmdlinefile.read().strip().split('\\0')[0]\n comm_from_cmdline = cmdline.rsplit('/', 1)[-1]\n if not comm_from_cmdline or comm == comm_from_cmdline:\n return comm\n elif not comm:\n return comm_from_cmdline\n else:\n # Strip starting - for login shells\n if comm_from_cmdline[0] == '-' and comm_from_cmdline[1:].startswith(comm):\n comm_from_cmdline = comm_from_cmdline[1:]\n if comm == comm_from_cmdline:\n return comm\n # comm may be truncated\n if comm_from_cmdline.startswith(comm):\n return comm + comm_from_cmdline[len(comm):].split(' ', 1)[0]\n\n # Combine cmdline and comm\n return '{}<{}>'.format(comm_from_cmdline, comm)\n except IOError as exc:\n if exc.errno in (errno.ENOENT, ):\n pass\n else:\n raise\n return comm", "def check_process_for_pid(pid, process_name):\n pid = int(pid)\n proc = psutil.Process(pid)\n return proc.name() == process_name", "def get_process(proc_name):\n #LOG = log.getLogger(__name__)\n procList = []\n try:\n for pr in psutil.process_iter():\n for args in pr.cmdline():\n if proc_name in args:\n procList.append(pr.pid)\n return procList\n except BaseException as e:\n print(\"Error in fetching process: {}\".format(e))\n return None", "def get_cmd(cmd):\n try:\n logger.debug(\"get_cmd: %s\" % cmd)\n return subprocess.check_output(cmd, shell=True, \n stderr=subprocess.STDOUT)\n except subprocess.CalledProcessError as e:\n logger.warn(\"error executing command: %s\" % e)\n return None", "def get_process_by_process_id(self, process_id):\n try:\n process = Process.objects.get(pk=process_id)\n except Process.DoesNotExist:\n process = None\n\n return process", "def get_process_pid(robot_name):\n\n try:\n result = check_output(['pgrep', 'x{0}'.format(robot_name)])\n return int(result.strip())\n except:\n return None", "def get_command(self, ctx, cmd_name):\n path = \"%s.%s\" % (__name__, cmd_name)\n path = path.replace(\"-\", \"_\")\n try:\n module = importlib.import_module(path)\n return getattr(module, 'cli')\n except ModuleNotFoundError as ex:\n print(ex.name)\n return None", "def process_cmdline(pid_info):\n\tif pid_info[\"cmdline\"]:\n\t\treturn reduce(lambda a, b: a + \" %s\" % b, pid_info[\"cmdline\"]).strip()\n\n\treturn pid_info[\"stat\"][\"comm\"]", "def is_process_running(pid):\n return os.path.exists(\"/proc/%s\" % pid)", "def get_command(command):\n for _cmd in commands:\n if _cmd.command == command:\n return _cmd\n raise UserWarning(\"telegram command not found.\")", "def get_process_by_port(port):\n pcons = [proc for proc in psutil.net_connections() if proc.laddr.port == port and proc.status == \"LISTEN\"]\n if pcons:\n pid = pcons[0].pid\n if not pid:\n raise j.exceptions.Runtime(\"No pid found maybe permission denied on the process\")\n return psutil.Process(pid)", "def pid_is_running(pid):\n try:\n os.kill(pid, 0)\n\n except OSError:\n return\n\n else:\n return pid", "def __read_command_line(self, pid):\n pf = None\n try:\n pf = file('/proc/%d/cmdline' % pid, 'r')\n return pf.read().strip()\n finally:\n if pf is not None:\n pf.close()", "def pid_exists(pid):\n # http://stackoverflow.com/questions/568271/how-to-check-if-there-exists-a-process-with-a-given-pid\n if os.name == 'posix':\n # OS X and Linux\n import errno\n if pid < 0:\n return False\n try:\n os.kill(pid, 0)\n except OSError as e:\n return e.errno == errno.EPERM\n else:\n return True\n else:\n # Windows\n import ctypes\n kernel32 = ctypes.windll.kernel32\n HANDLE = ctypes.c_void_p\n DWORD = ctypes.c_ulong\n LPDWORD = ctypes.POINTER(DWORD)\n class ExitCodeProcess(ctypes.Structure):\n _fields_ = [ ('hProcess', HANDLE),\n ('lpExitCode', LPDWORD)]\n\n SYNCHRONIZE = 0x100000\n process = kernel32.OpenProcess(SYNCHRONIZE, 0, pid)\n if not process:\n return False\n\n ec = ExitCodeProcess()\n out = kernel32.GetExitCodeProcess(process, ctypes.byref(ec))\n if not out:\n err = kernel32.GetLastError()\n if kernel32.GetLastError() == 5:\n # Access is denied.\n logging.warning(\"Access is denied to get pid info.\")\n kernel32.CloseHandle(process)\n return False\n elif bool(ec.lpExitCode):\n # print ec.lpExitCode.contents\n # There is an exist code, it quit\n kernel32.CloseHandle(process)\n return False\n # No exit code, it's running.\n kernel32.CloseHandle(process)\n return True", "def get_rtmpdump_cmd(pid):\n url = URL_BASE + CONVERT.format(pid)\n root = lxml.html.parse(url)\n codes = root.findall('.//p/code')\n if len(codes) == 1:\n return codes[0].text\n elif len(codes) < 1:\n print 'pid2rtmpdump: <p><code> not found!'\n return False\n else:\n print 'pid2rtmpdump: more than one <p><code> elements found: {0}'.format(len(codes))\n return False", "def check_ps_cmd():\n try:\n p1 = Popen([\"ps\", \"aux\"], stdout=PIPE)\n p2 = Popen([\"grep\", PROC_SCRIPT_NAME], stdin=p1.stdout, \\\n stdout=PIPE)\n p3 = Popen([\"grep\", \"-v\", \"grep\"], stdin=p2.stdout, stdout=PIPE)\n output = p3.communicate()[0]\n return output\n except Exception, e:\n print >>sys.stderr, \"Execution failed:\", e\n return None" ]
[ "0.7689884", "0.72743213", "0.7050953", "0.6783562", "0.6736396", "0.6593299", "0.6572618", "0.6539528", "0.6495064", "0.64656216", "0.64396435", "0.6315611", "0.6309004", "0.6309004", "0.6278498", "0.6276674", "0.6251536", "0.618571", "0.6111853", "0.6089545", "0.6050999", "0.5998401", "0.5994654", "0.5992435", "0.5967901", "0.5964996", "0.5943097", "0.59371895", "0.59142715", "0.5900433" ]
0.7887666
0
Checks if a subprocess is still active.
def still_active(pid: int, cmd: str) -> bool: os_cmd = get_command_for_pid(pid) return cmd in os_cmd
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _proc_is_alive(self):\n if self._proc is None:\n return False\n\n return self._proc.poll() is None", "def isRunning(self):\n if not self.running:\n return False\n elif self.process.poll() == 0 or self.process.returncode >= 0:\n return False\n else:\n return True", "def is_running(self):\n if self._process:\n return self._process.poll() is None\n else:\n return False", "def _isSubProcessRunning(self): \n # Check if child process has terminated. Set and return returncode attribute.\n if self.__process.poll() is None:\n return True\n else:\n return False", "def running(self):\n return self.sub_process and self.sub_process.is_alive()", "def is_running(self):\n if self._process and self._process.poll() is None:\n return True\n return False", "def _is_alive(self) -> bool:\n\n if self._on:\n return True\n\n try:\n os.kill(self.proc.pid, 0)\n except (OSError, ProcessLookupError):\n return False\n\n return True", "def proc_is_alive(pid):\n try:\n os.kill(pid, 0)\n except OSError as e:\n if e.errno == errno.EPERM:\n return True\n if e.errno == errno.ESRCH:\n return False\n raise # something else went wrong\n else:\n return True", "def alive(self):\n\n return self.subprocess.poll() is None and not self.thread_stop.is_set()", "def check_parent_processes_alive():\n cur_process = psutil.Process()\n parent = cur_process.parent()\n while True:\n time.sleep(1)\n if not parent.is_running():\n break\n\n logger.warning(\"Parent process is terminated abnormally. Process exits.\")\n cur_process.kill()", "def alive(self):\n return self._proc is not None and self._proc.poll() is None", "def get_status(self) -> bool:\n try:\n self.__driver.service.assert_process_still_running()\n return True\n except AttributeError:\n return False", "def proc_is_alive(pid):\n handle = windll.kernel32.OpenProcess(\n win32con.SYNCHRONIZE | win32con.PROCESS_QUERY_INFORMATION, 0, pid)\n if handle == 0:\n return False\n\n # If the process exited recently, a pid may still exist for the handle.\n # So, check if we can get the exit code.\n exit_code = DWORD()\n rval = windll.kernel32.GetExitCodeProcess(handle, byref(exit_code))\n windll.kernel32.CloseHandle(handle)\n if rval == 0: # GetExitCodeProcess failure\n raise WinError()\n return exit_code.value == win32con.STILL_ACTIVE", "def _is_alive(self, pid):\n process = next(x for x in self._processes if x.pid == pid)\n return process.is_alive()", "def isRunning(self):\n if not self.hasBeenStarted():\n return False\n \n if not self._slave_dhcp_client_proc.poll(): # Poll our direct child (sudo)\n return False\n \n for pid in self._all_processes_pid:\n if not self._checkPid(pid):\n return False\n \n return True", "def is_proc_alive(pid):\n return os.path.isdir(\"/proc/%i\" % pid)", "def _is_running(self):\n try:\n # Process is not killed, os.kill(pid, 0) does nothing but raise if process does not\n # exist.\n os.kill(self.pid, 0)\n except ProcessLookupError:\n return False\n else:\n return True", "def is_proc_running(name):\n\n for p in psutil.process_iter(['name']):\n if p.info['name'] == name:\n return True\n\n return False", "def is_running(self):\r\n if self._gone:\r\n return False\r\n try:\r\n # Checking if pid is alive is not enough as the pid might\r\n # have been reused by another process.\r\n # pid + creation time, on the other hand, is supposed to\r\n # identify a process univocally.\r\n return self.create_time == \\\r\n self.get_process_create_time()\r\n except NoSuchProcess:\r\n self._gone = True\r\n return False", "def is_alive(pid):\n pid = int(pid)\n return psutil.pid_exists(pid)", "def is_process_running(pid):\n return os.path.exists(\"/proc/%s\" % pid)", "def is_process_running(name):\n if not hasattr(is_process_running, \"proc\"):\n is_process_running.proc = None # it doesn't exist yet, so init it\n\n if is_process_running.proc:\n if is_process_running.proc.is_running():\n return True\n else:\n is_process_running.proc = None\n return False\n else:\n for p in psutil.process_iter():\n if p.name() == name:\n is_process_running.proc = p\n return True\n #\n return False", "def is_alive(self):\n result = execute('ps -Ao pgid', check_pg_alive=False, stdout=PIPE)\n pgids = result['stdout'].decode('utf8').split()\n return str(self.process.pid) in pgids", "def is_running(self):\n qstat = self._grep_qstat('running')\n if qstat:\n return True\n return False", "def check_pid_is_running(self):\n if not os.path.exists(self.__file):\n return True\n\n with open(self.__file, \"r\") as f:\n try:\n pid = int(f.read().strip())\n except Exception:\n return True\n\n try:\n os.kill(pid, 0)\n except OSError:\n return True\n\n return self.check_process_cmd_line(pid)", "def check_finish(self):\r\n return not self.proc.is_alive()", "def is_instance_running(self):\n try:\n self.instance.wait(timeout=1)\n except psutil.TimeoutExpired:\n pass\n return self.instance.is_running()", "def alive(self):\n return self._process.is_alive()", "def check_command(self):\n return self.process is not None and self.process.poll() is None", "def is_running(self):\n if self.__process.poll() is not None: # process has ended\n for nbsr in (\"stdout\", \"stderr\"):\n getattr(self, nbsr).finalise()\n return False\n return True" ]
[ "0.7778317", "0.75869554", "0.7559739", "0.7541411", "0.7532069", "0.751048", "0.74623996", "0.732467", "0.7226817", "0.72260404", "0.72010744", "0.7168987", "0.71515125", "0.7148874", "0.7137039", "0.7127399", "0.7125717", "0.70777947", "0.70722777", "0.7040544", "0.7028164", "0.70178145", "0.7004424", "0.6910155", "0.6856732", "0.6840615", "0.681995", "0.68129456", "0.68101984", "0.6805673" ]
0.7768741
1
This function gets the previous list of running processes, the resources, and return the new list of pids. The resources are modified if some processes ended.
def refresh_pids(active_pids, resources): still_active_pids = [] no_change = True for info in active_pids: pid, gpu, title, cmd, lock_path = info if still_active(pid, cmd): still_active_pids.append(info) else: print(f"[{time.strftime(time.ctime())}] {title} seems to be over.") os.remove(lock_path) resources.free(gpu=gpu) no_change = False return still_active_pids, no_change
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_processes_running():\r\n p = [] #array of processes\r\n if platform == \"linux\" or platform == \"linux2\":\r\n for proc in psutil.process_iter():\r\n try:\r\n tmp=Process(proc.name(),int(proc.pid),proc.username(),int(0),int(0))\r\n p.append(tmp)\r\n except:\r\n continue\r\n return (p)\r\n\t\t\t\r\n tasks = check_output(['tasklist']).decode('cp866', 'ignore').split(\"\\r\\n\")\r\n for task in tasks:\r\n m = re.match(b'(.*?)\\\\s+(\\\\d+)\\\\s+(\\\\w+)\\\\s+(\\\\w+)\\\\s+(.*?)\\\\s.*', task.encode())\r\n if m is not None:\r\n tmp=Process(m.group(1).decode(),int(m.group(2).decode()),m.group(3).decode(),int(m.group(4).decode()),int(m.group(5).decode('ascii', 'ignore')))\r\n p.append(tmp)\r\n #m.group(1).decode() image name\r\n #m.group(2).decode() process id\r\n #m.group(3).decode() session_name\r\n #m.group(4).decode() session_num\r\n #m.group(5).decode('ascii', 'ignore') memory usage\r\n return(p)", "def pids(self):\r\n return copy(self._pids)", "def pslist(self) -> Generator[dict, None, None]:\n\n # Function to switch fields to represent a parent\n def _convert_to_parent_fields(process: dict) -> dict:\n output = {}\n for left, right in [\n (FieldNames.PROCESS_IMAGE, FieldNames.PARENT_PROCESS_IMAGE),\n (FieldNames.PROCESS_ID, FieldNames.PARENT_PROCESS_ID),\n (FieldNames.COMMAND_LINE, FieldNames.PARENT_COMMAND_LINE),\n (FieldNames.PROCESS_IMAGE_PATH, FieldNames.PARENT_PROCESS_IMAGE_PATH),\n ]:\n output[right] = process[left]\n\n return output\n\n # Use the pstree dict output to get a mapping from pid -> proc\n procs = self.session.plugins.pstree()._make_process_dict()\n\n parent_procs: Dict[int, dict] = {}\n\n # Add the system idle process\n parent_procs[0] = {\n FieldNames.PARENT_PROCESS_ID: 0,\n FieldNames.PARENT_COMMAND_LINE: \"\",\n FieldNames.PARENT_PROCESS_IMAGE: \"System Idle Process\",\n FieldNames.PARENT_PROCESS_IMAGE_PATH: \"\\\\\",\n }\n\n for proc in procs.values():\n\n parent_pid = proc.InheritedFromUniqueProcessId\n\n # Get the current processes info\n command_line = str(proc.Peb.ProcessParameters.CommandLine)\n image_path = str(proc.Peb.ProcessParameters.ImagePathName)\n\n if int(proc.pid) == 4:\n process_image = \"SYSTEM\"\n process_image_path = \"\\\\\"\n else:\n process_image, process_image_path = split_path(image_path)\n\n current_proc = {\n FieldNames.EVENT_TYPE: EventTypes.PROCESS_LAUNCHED,\n FieldNames.PROCESS_ID: int(proc.pid),\n FieldNames.COMMAND_LINE: command_line,\n FieldNames.PROCESS_IMAGE: process_image,\n FieldNames.PROCESS_IMAGE_PATH: process_image_path,\n }\n\n # Keep track of the processes.\n self.processes[int(proc.pid)] = current_proc\n\n current_as_parent = _convert_to_parent_fields(current_proc)\n parent_procs[int(proc.pid)] = current_as_parent\n\n # Parse the parent process\n if parent_pid not in parent_procs:\n\n # Do we the _EPROCESS for this process?\n if int(parent_pid) in procs:\n parent = procs[int(parent_pid)]\n parent_image_path = parent.Peb.ProcessParameters.ImagePathName\n\n parent_process_image, parent_process_image_path = split_path(\n str(parent_image_path)\n )\n\n parent_proc = {\n FieldNames.PARENT_PROCESS_ID: int(parent.pid),\n FieldNames.PARENT_COMMAND_LINE: parent.Peb.ProcessParameters.CommandLine,\n FieldNames.PARENT_PROCESS_IMAGE: parent_process_image,\n FieldNames.PARENT_PROCESS_IMAGE_PATH: parent_process_image_path,\n }\n\n # If not, make a dummy one with the PID\n else:\n parent_proc = {\n FieldNames.PARENT_PROCESS_ID: int(parent_pid),\n FieldNames.PARENT_COMMAND_LINE: \"\",\n FieldNames.PARENT_PROCESS_IMAGE: \"\",\n FieldNames.PARENT_PROCESS_IMAGE_PATH: \"\",\n }\n\n parent_procs[int(parent_pid)] = parent_proc\n\n yield {**current_proc, **parent_procs[int(parent_pid)]}", "def get_processes():\n yield from psutil.process_iter()", "def get_pid_list():\r\n pids = [int(x) for x in os.listdir('/proc') if x.isdigit()]\r\n return pids", "def get_running_processes(self):\n\n all_processes = []\n for _process in self.processes:\n all_processes.append(_process[\"pid\"])\n return all_processes", "def get_process_list() -> Dict:\n return {proc.pid: proc.name() for proc in psutil.process_iter()}", "def processes(self):\n # MODIFIED 11/1/16 OLD:\n return list(item.process for item in self.process_tuples)\n # # MODIFIED 11/1/16 NEW:\n # return sorted(list(item.process for item in self.process_tuples), key=lambda process: process.name)\n # MODIFIED 11/1/16 END", "def process_iter():\r\n def add(pid):\r\n proc = Process(pid)\r\n _pmap[proc.pid] = proc\r\n return proc\r\n\r\n def remove(pid):\r\n _pmap.pop(pid, None)\r\n\r\n a = set(get_pid_list())\r\n b = set(_pmap.keys())\r\n new_pids = a - b\r\n gone_pids = b - a\r\n\r\n for pid in gone_pids:\r\n remove(pid)\r\n for pid, proc in sorted(list(_pmap.items()) + \\\r\n list(dict.fromkeys(new_pids).items())):\r\n try:\r\n if proc is None: # new process\r\n yield add(pid)\r\n else:\r\n # use is_running() to check whether PID has been reused by\r\n # another process in which case yield a new Process instance\r\n if proc.is_running():\r\n yield proc\r\n else:\r\n yield add(pid)\r\n except NoSuchProcess:\r\n remove(pid)\r\n except AccessDenied:\r\n # Process creation time can't be determined hence there's\r\n # no way to tell whether the pid of the cached process\r\n # has been reused. Just return the cached version.\r\n yield proc", "def get_all_current_processes():\n p = subprocess.Popen(['ps', '-A'], stdout=subprocess.PIPE)\n out, err = p.communicate()\n return out", "def getActiveProcesses():\n active = []\n\n for p in PROCESSRUNNER_PROCESSES:\n if p.is_alive():\n active.append(p)\n\n return active", "def pids(self):\n return self._pidToProcess.iterkeys()", "def waiting_procs(self):\n return [p.model_id for p in self.primary_scheduler.queue_nodes.wait_q]", "def running_procs(self) -> List[int]:\n return [p.model_id for p in self.primary_scheduler.queue_nodes.run_q]", "def _select_processes(self):\n\n # check if at least one process is running\n is_running = False\n for pid in self.__pids:\n if ProcessMonitor.__is_running(pid):\n is_running = True\n break # at least one process is running\n\n if is_running:\n if not self.__aggregate_multiple_processes:\n return self.__pids\n\n # aggregate metrics, check the last discovered time\n if (\n self.__last_discovered\n and time.time() * 1000 - self.__last_discovered\n < self.__process_discovery_interval * 1000\n ):\n return self.__pids\n\n ps = ProcessList()\n if self.__commandline_matcher:\n self.__last_discovered = time.time() * 1000\n if self.__include_child_processes:\n matched_processes = ps.get_matches_commandline_with_children(\n self.__commandline_matcher\n )\n else:\n matched_processes = ps.get_matches_commandline(\n self.__commandline_matcher\n )\n self.__pids = matched_processes\n\n if not self.__aggregate_multiple_processes and len(self.__pids) > 1:\n # old behaviour where multiple processes were not supported for aggregation\n self._logger.warning(\n \"Multiple processes match the command '%s'. Returning existing pid. \"\n \"You can turn on the multi process aggregation support by adding the \"\n \"aggregate_multiple_processes configuration to true\"\n % self.__commandline_matcher,\n limit_once_per_x_secs=300,\n limit_key=\"linux-process-monitor-existing-pid\",\n )\n self.__pids = [self.__pids[0]]\n else:\n # See if the specified target pid is running. If so, then return it.\n # Special cases:\n # '$$' mean this process.\n # '$$TBD' mean that the PID of the target process has not been determined yet and it will be set later.\n pids = []\n if self.__target_pids:\n for t_pid in self.__target_pids:\n if t_pid == \"$$\":\n t_pid = int(os.getpid())\n\n # skip this until it will be replaced with a real PID.\n elif t_pid == \"$$TBD\":\n continue\n else:\n t_pid = int(t_pid)\n pids.append(t_pid)\n self.__pids = pids\n return self.__pids", "def existing_pipe_ids():\n ids_list = []\n if not os.path.exists(os.path.dirname(__file__) + LAST_RUN_FILE): # Check if record file exist\n pipe_id_file = open(os.path.dirname(__file__) + LAST_RUN_FILE, \"a+\") # if not then create\n else:\n pipe_id_file = open(os.path.dirname(__file__) + LAST_RUN_FILE, \"r+\") # else, start checking the list\n pipelines = []\n for existing_pipeline in pipe_id_file:\n pipelines = existing_pipeline.split(\",\")\n\n ids_list = [int(pipeline) for pipeline in pipelines]\n\n pipe_id_file.close()\n return ids_list", "def PIDs():\n from ctypes import windll,c_ulong,byref,sizeof\n PIDs = (c_ulong*512)()\n size_of_PIDs = c_ulong()\n windll.psapi.EnumProcesses(byref(PIDs),sizeof(PIDs),byref(size_of_PIDs))\n nPIDs = size_of_PIDs.value/sizeof(c_ulong())\n pidProcess = sorted([int(i) for i in PIDs][:nPIDs])\n return pidProcess", "def pids(self):\n resp = self.server.request(\"get\", \"/jobs/%s/%s/pids\" % (\n self.sessionid, self.name))\n result = self.server.json_body(resp)\n return result['pids']", "def __iter__(self):\n seen = self.seen\n if time_now() - self.last_cleanup_time > self.cleanup_seen_interval:\n # Time to cleanup seen set\n to_remove = set()\n for pid in seen:\n # Remove from seen if PID no longer running\n if not P.exists(P.join(PROC_DIR, str(pid))):\n to_remove.add(pid)\n\n seen -= to_remove\n self.last_cleanup_time = time_now()\n\n for file in os.listdir(PROC_DIR):\n try:\n pid = int(file)\n if pid not in seen:\n self._new_pids.append(pid)\n\n except ValueError:\n # Non PID file in /proc\n pass\n\n seen.update(self._new_pids)\n\n return self", "def pid_processes(self):\n return [(process.namespec(), process.infos[self.address_name]['pid'])\n for process in self.processes.values()\n if process.pid_running_on(self.address_name)]", "def get_pid(name: str) -> Set[int]:\n process_pids = set()\n for proc in psutil.process_iter():\n if name == proc.name():\n pid = proc.pid\n process_pids.add(pid)\n return process_pids", "def get_pids(pid):\n\n pids=set([pid])\n for child in get_children(pid):\n pids.update(traverse_tree(child,pids))\n \n return list(pids)", "def returncodes(self):\n for p in self.processes:\n p.wait()\n codes = [p.poll() for p in self.processes]\n if set(codes) == set([0]):\n return []\n return codes", "def get_unstopped_processes(self):\r\n return [ x for x in self.processes.values() if x.get_state() not in\r\n STOPPED_STATES ]", "def get_running_processes(self, dev_handler):\n # Get the list of running processes on each device\n running_processes = NvmlHandler.exec_nvml_function(nvmlDeviceGetComputeRunningProcesses,dev_handler)\n\n # Turns these process objects into dicts\n running_processes_dicts = [obj.__dict__ for obj in running_processes if obj]\n\n # Enhance these dicts with information from psutil\n new_dicts = []\n for running_processes_dict in running_processes_dicts:\n\n # Init the new dict with the current information\n more_ps_infos = {}\n more_ps_infos.update(running_processes_dict)\n\n # Rename the usedGpuMemory key, if any\n if 'usedGpuMemory' in more_ps_infos:\n more_ps_infos['gpu_memory_used'] = utils.psutil_parse_readable_bytes(\n more_ps_infos.get('usedGpuMemory')\n )\n del more_ps_infos['usedGpuMemory']\n\n # Try to retreive info about the process using psutil\n try:\n pid = running_processes_dict.get('pid')\n more_ps_infos.update(utils.psutil_snapshot_process(pid))\n except Exception as e:\n logger.warning('Cannot gather info from process {}'.format(pid))\n\n new_dicts.append(more_ps_infos)\n\n return new_dicts", "def ListProcesses(self):\n stdout, stderr = self.RunCmdOnDevice(\n [\n '/bin/ps', '--no-headers', '-A', '-o', 'pid,ppid,args:4096,state'\n ],\n quiet=True)\n assert stderr == '', stderr\n procs = []\n for l in stdout.split('\\n'):\n if l == '':\n continue\n m = re.match(r'^\\s*(\\d+)\\s+(\\d+)\\s+(.+)\\s+(.+)', l, re.DOTALL)\n assert m\n procs.append((int(m.group(1)), m.group(3).rstrip(), int(m.group(2)),\n m.group(4)))\n logging.debug(\"ListProcesses(<predicate>)->[%i processes]\" % len(procs))\n return procs", "def get_pids(name=None):\n results = []\n for process in win32com.client.GetObject('winmgmts:').InstancesOf('Win32_Process'):\n if name is None or process.Properties_(\"Name\").Value == name:\n results.append(process.Properties_(\"ProcessID\").Value)\n return results", "def ListProcesses(self):\n stdout, stderr = self.RunCmdOnDevice(\n ['/bin/ps', '--no-headers', '-A', '-o', 'pid,ppid,args:4096,state'],\n quiet=True)\n assert stderr == '', stderr\n procs = []\n for l in stdout.split('\\n'):\n if l == '':\n continue\n m = re.match(r'^\\s*(\\d+)\\s+(\\d+)\\s+(.+)\\s+(.+)', l, re.DOTALL)\n assert m\n procs.append(\n (int(m.group(1)), m.group(3).rstrip(), int(m.group(2)), m.group(4)))\n logging.debug(\"ListProcesses(<predicate>)->[%i processes]\" % len(procs))\n return procs", "def processes(start, end, processes):\n end_things = [processes[x][2] for x in range(len(processes))]\n if start == end or end not in end_things:\n return []\n\n seq = []\n seen = 0\n inp = ''\n out = ''\n do = ''\n \n for i in range(len(processes)):\n if processes[i][2] == end:\n out = processes[i][2]\n inp = processes[i][1]\n do = processes[i][0]\n seq.append(do)\n seen += 1\n break\n\n while seen < len(processes):\n for i in range(len(processes)):\n if processes[i][2] == inp:\n out = processes[i][2]\n inp = processes[i][1]\n do = processes[i][0]\n seq.append(do)\n seen += 1\n\n seq.reverse()\n return seq", "def pidof(process_name):\n\n\tpids = []\n\n\tif 'licornd' in process_name:\n\t\t# licorn / linux 3.x specifiq : we can match 'licornd/wmi'\n\t\t# faster than 'licornd-wmi', and in some case the 'cmdline'\n\t\t# is empty, whereas the 'comm' is not.\n\t\tnames = [ process_name, process_name.replace('/', '-') ]\n\n\telse:\n\t\tnames = [ process_name ]\n\n\tfor entry in os.listdir('/proc'):\n\t\tif entry.isdigit():\n\t\t\ttry:\n\n\t\t\t\tif cgroup and open('/proc/%s/cpuset' % entry).read().strip() != cgroup:\n\t\t\t\t\tlogging.progress(_(u'Skipped process @{0} which is not '\n\t\t\t\t\t\t\t\t\t\tu'in the same cgroup.').format(entry))\n\t\t\t\t\tcontinue\n\n\t\t\t\ttry:\n\t\t\t\t\t# Linux 3.x only\n\t\t\t\t\tcommand_line1 = open('/proc/%s/comm' % entry).read().strip()\n\t\t\t\texcept:\n\t\t\t\t\tcommand_line1 = ''\n\n\t\t\t\tcommand_line2 = open('/proc/%s/cmdline' % entry).read().strip()\n\n\t\t\t\tfor pname in names:\n\t\t\t\t\tif pname == command_line1 or pname+'\\0' in command_line2:\n\t\t\t\t\t\tpids.append(int(entry))\n\n\t\t\texcept (IOError, OSError), e:\n\t\t\t\t# in rare cases, the process vanishes during iteration. This\n\t\t\t\t# is harmless. Any other error is not cool, raise it.\n\t\t\t\tif e.errno != errno.ENOENT:\n\t\t\t\t\traise e\n\n\treturn pids" ]
[ "0.66955024", "0.6464206", "0.6179278", "0.6140418", "0.611959", "0.60622585", "0.6036306", "0.6031246", "0.59804934", "0.5914358", "0.5854065", "0.5840564", "0.58357877", "0.5798018", "0.5787222", "0.5764939", "0.57292414", "0.5698399", "0.56616515", "0.5657792", "0.56011176", "0.56001633", "0.5579416", "0.5577748", "0.5553523", "0.5552634", "0.5551955", "0.55409783", "0.55298185", "0.5524617" ]
0.7045869
0
Takes a generator and returns a shuffled generator.
def shuffle(some_generator): seq = list(some_generator) random.shuffle(seq) for x in seq: yield x
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_array_shuffler(rng):\n def nruter(in_array):\n return jax.random.permutation(rng, jnp.asarray(in_array))\n\n return nruter", "def shuffled(iterable):\n items = list(iterable)\n random.shuffle(items)\n return items", "def shuffle(data, shuffle_size=10000):\n buf = []\n for sample in data:\n buf.append(sample)\n if len(buf) >= shuffle_size:\n random.shuffle(buf)\n for x in buf:\n yield x\n buf = []\n # The sample left over\n random.shuffle(buf)\n for x in buf:\n yield x", "def shuffle(list_, random_seed=123):\n random.Random(random_seed).shuffle(list_)", "def shuffle(lol, seed):\n for l in lol:\n random.seed(seed)\n random.shuffle(l)", "def population_gen(population):\n pop_sort = [item for item in population]\n random.shuffle(pop_sort)\n\n for item in pop_sort:\n yield item", "def shuffle(stream, buffer_size, seed=None):\n\n rng = random.Random(seed)\n\n # If stream is not a generator, then we coerce it to one\n if not isinstance(stream, types.GeneratorType):\n stream = iter(stream)\n\n # Initialize the buffer with the first buffer_size elements of the stream\n buffer = list(itertools.islice(stream, buffer_size))\n\n # Deplete the stream until it is empty\n for element in stream:\n\n # Pick a random element from the buffer and yield it\n i = rng.randint(0, len(buffer) - 1)\n yield buffer[i]\n\n # Replace the yielded element from the buffer with the new element from the stream\n buffer[i] = element\n\n # Shuffle the remaining buffer elements and yield them one by one\n rng.shuffle(buffer)\n for element in buffer:\n yield element", "def shuffle( self ):\n random.shuffle(self.__deck)", "def shuffle(reader, buf_size):\n\n def data_reader():\n buf = []\n for e in reader():\n buf.append(e)\n if len(buf) >= buf_size:\n random.shuffle(buf)\n for b in buf:\n yield b\n buf = []\n\n if len(buf) > 0:\n random.shuffle(buf)\n for b in buf:\n yield b\n\n return data_reader", "def iter_shuffle(iterable, bufsize=1000):\n iterable = iter(iterable)\n buf = []\n try:\n while True:\n for _ in range(random.randint(1, bufsize - len(buf))):\n buf.append(next(iterable))\n random.shuffle(buf)\n for _ in range(random.randint(1, bufsize)):\n if buf:\n yield buf.pop()\n else:\n break\n except StopIteration:\n random.shuffle(buf)\n while buf:\n yield buf.pop()\n return", "def get_generator(generator: Generator, **kwargs) -> Generator:\n return generator(**kwargs)", "def shuffle(L):\n return [L[i] for i in permutation(len(L))]", "def shuffle(self, inp: Tensor):\n _seed = self._seed() if callable(self._seed) else self._seed\n inp._reset(_shuffle(inp=inp, seed=_seed, handle=self._handle))", "def _shuffle():\n\n random.shuffle(deck)", "def stable_shuffle(self, seq):\n seq = numpy.asarray(seq)\n if len(seq) != len(self._argshuf):\n # Reset the rng using seq length as the seed.\n # Why not just use the same seed every time? Dunno.\n rng = numpy.random.default_rng(len(seq))\n # Save the first permutation generated thereby.\n self._argshuf = rng.permutation(len(seq))\n return seq[self._argshuf]", "def strategize(generator):\r\n @functools.wraps(generator)\r\n def strategy_generator(random, args):\r\n candidate = generator(random, args)\r\n n = len(candidate)\r\n candidate.extend([random.random() for _ in range(n)])\r\n return candidate\r\n return strategy_generator", "def generator(self, args, gen):\n import random\n\n if args.seed:\n random.seed(args.seed)\n seqs = [s for s in gen]\n sample_indices = random.sample(range(len(seqs)), min(len(seqs), args.number))\n for i in sample_indices:\n yield seqs[i]", "def calc_granger_shuffle(self):\n if not hasattr(self, 'input_data'):\n self.preprocess_and_check_stationarity()\n temp_series = [np.stack([np.random.permutation(x)\n for x in self.input_data.T]).T\n for i in trange(self.n_shuffles)]\n\n outs_temp = parallelize(self.calc_granger, temp_series, n_jobs=30)\n outs_temp = [x[0] for x in outs_temp]\n self.shuffle_outs = np.array(outs_temp)", "def rand_flip_graph(graph, edge):\n return rand_zero_or_one(0.5)\n # return rand_zero_or_one(edge_prob(graph, edge))", "def shuffle_with_seed(lst, seed=None):\n # Create our own Random object so we can mess with its state without\n # affecting global random state\n r = random.Random()\n r.seed(seed)\n # .shuffle shuffles in place, this is the best way to shuffle not in place\n shuffled = sorted(lst, key=lambda item: r.random())\n return shuffled", "def shuffled_deck(deck):\n random.shuffle(deck)\n return deck", "def shuffle(self):\n x = len(self.org)\n result = self.org[:]\n var = x\n for i in range(x):\n id = random.randrange(0, var)\n result[id], result[var - 1] = result[var - 1], result[id]\n var -= 1\n\n return result", "def shuffle(self) -> List[int]:\n runs = self.nums.copy()\n # Fisher-Yates Algorithm\n n = len(runs)\n for i in range(n):\n j = random.randint(i, n - 1)\n runs[i], runs[j] = runs[j], runs[i]\n return runs", "def batch_generator(data, batch_size):\r\n data = np.array(data)\r\n n_batches = int(np.ceil(len(data) / float(batch_size)))\r\n \r\n idx = np.random.permutation(len(data))\r\n data_shuffled = data[idx]\r\n \r\n for i in range(n_batches):\r\n start = i * batch_size\r\n end = start + batch_size\r\n\r\n batch = data_shuffled[start:end]\r\n if len(batch) < batch_size:\r\n # Pad with zeros \r\n pad = np.zeros((batch_size - batch.shape[0], batch.shape[1]),\r\n dtype=batch.dtype)\r\n batch = np.vstack((batch, pad))\r\n\r\n yield batch", "def shuffle(input):\n deck = input[:]\n for i in xrange(len(deck)-1):\n # Find a random index between i and end of deck\n dest = random.randint(i+1,len(deck)-1)\n deck[i], deck[dest] = deck[dest], deck[i]\n \n return deck", "def shuffle(values):\n num_values = len(values)\n for v in range(num_values):\n # Get a random, different index\n s = v + int(random() * (num_values - v))\n # Swap values\n values[s], values[v] = values[v], values[s]\n return values", "def ScrambleMutation(item):\n item=copy.deepcopy(item)\n countryNo = len(item)\n [start,end] = sorted(random.sample(range(1,countryNo+1),2))\n shuffle_slice(item,start,end)\n return item", "def _shuffle(inputs):\n\texpand_inputs = tf.stack(inputs, axis = -1)\n\tshuffled_inputs = tf.random_shuffle(expand_inputs)\n\treturn tf.unstack(shuffled_inputs)", "def shuffle(self, seed=None):\n if seed is None:\n return Split(self)\n\n random.seed(seed)\n sbj_list = list(self.sbj_list)\n random.shuffle(sbj_list)\n\n n0 = len(self[self.grp0])\n return Split({self.grp0: sbj_list[:n0],\n self.grp1: sbj_list[n0:]})", "def main():\n input_1 = [7, 6, 5, 4, 3, 2, 1]\n print shuffle(input_1)\n print input_1" ]
[ "0.6300271", "0.62952805", "0.62433124", "0.61444193", "0.6055796", "0.6012041", "0.59882706", "0.59802055", "0.5964028", "0.59303206", "0.58966327", "0.58935845", "0.5879163", "0.5858825", "0.58194625", "0.57744366", "0.5760689", "0.57575315", "0.5751348", "0.56938046", "0.56866163", "0.5681034", "0.56802386", "0.564879", "0.5646796", "0.5639084", "0.5633654", "0.56180984", "0.5596447", "0.55939215" ]
0.80171114
0
Basic HTTP auth decorator
def basic_http_auth(f): def wrap(request, *args, **kwargs): if request.META.get('HTTP_AUTHORIZATION', False): authtype, auth = request.META['HTTP_AUTHORIZATION'].split(' ') auth = base64.b64decode(auth) username, password = auth.split(':') user = authenticate(username=username, password=password) if user is not None: if user.is_active: login(request, user) return f(request, *args, **kwargs) else: r = HttpResponse("Auth Required", status = 401) r['WWW-Authenticate'] = 'Basic realm="ThatPanda DDNS"' return r r = HttpResponse("Auth Required", status = 401) r['WWW-Authenticate'] = 'Basic realm="ThatPanda DDNS"' return r return wrap
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def http_basic_auth(func):\r\n\t@wraps(func)\r\n\tdef _decorator(request, *args, **kwargs):\r\n\r\n\t\tif request.META.has_key('HTTP_AUTHORIZATION'):\r\n\t\t\ttry:\r\n\t\t\t\tauthmeth, auth = request.META['HTTP_AUTHORIZATION'].split(' ', 1)\r\n\t\t\t\tif authmeth.lower() == 'basic':\r\n\t\t\t\t\tauth = auth.strip().decode('base64')\r\n\t\t\t\t\tusername, password = auth.split(':', 1)\r\n\t\t\t\t\tuser = authenticate(username=username, password=password)\r\n\r\n\t\t\t\t\tif user:\r\n\r\n\t\t\t\t\t\tlogin(request, user)\r\n\t\t\t\t\t\r\n\t\t\t\t\telse:\r\n\r\n\t\t\t\t\t\treturn HttpResponseForbidden()\r\n\r\n\t\t\texcept ValueError:\r\n\t\t\t\t# Bad HTTP_AUTHORIZATION header\r\n\t\t\t\treturn HttpResponseForbidden()\r\n\t\t\t\t\r\n\t\treturn func(request, *args, **kwargs)\r\n\treturn _decorator", "def requires_http_basic_auth(self, f: Callable):\n\n @functools.wraps(f)\n def decorated(*args, **kwargs):\n # Try to authenticate user from HTTP basic auth headers (failure will raise appropriate exception).\n self.authenticate_basic(request)\n # TODO: optionally pass access_token and user_id from authentication result?\n return f(*args, **kwargs)\n\n return decorated", "def add_basic_auth(blueprint: Blueprint, username, password, realm='api'):\n\n @blueprint.before_request\n def basic_http_auth(*args, **kwargs):\n auth = request.authorization\n if auth is None or auth.password != password or auth.username != username:\n return Response('Please login', 401, {'WWW-Authenticate': f'Basic realm=\"{realm}\"'})", "def basicauth(self, user = None, password = None, realm = None):\n if user is None or password is None:\n user = self.config.basicauth_user\n password = self.config.basicauth_password\n if realm is None:\n if hasattr(self.config, 'realm'):\n realm = self.config.realm\n else:\n realm = 'Auth'\n self.user = user\n self.password = password\n self.realm = realm\n\n def decorate(func, *args, **kws):\n \"\"\"\n A method that actually called as a decorator.\n It performs BASIC authentication, checking header and return special\n header for authentication.\n \n :param func : a function to be decorated.\n \"\"\"\n self.func = func\n def do_authenticate():\n auth_header = self.request.headers.get('Authorization', '')\n if auth_header.split():\n scheme, code = auth_header.split()\n else:\n scheme = 'Basic'\n code = ''\n if scheme != 'Basic':\n raise ValueError('The authentication scheme is not BASIC')\n if b64decode(code):\n user, password = b64decode(code).split(':')\n else:\n user = password = ''\n if self.user == user and self.password == password:\n # the request already had valid authentication header.\n return self.func(*args, **kws)\n resp = self.response\n resp.set_status(401)\n self.render('Auth')\n resp.headers['WWW-Authenticate'] = 'Basic realm=\"%s\"' % self.realm\n\n return do_authenticate\n\n return decorate", "def decorate(func, *args, **kws):\n self.func = func\n def do_authenticate():\n auth_header = self.request.headers.get('Authorization', '')\n if auth_header.split():\n scheme, code = auth_header.split()\n else:\n scheme = 'Basic'\n code = ''\n if scheme != 'Basic':\n raise ValueError('The authentication scheme is not BASIC')\n if b64decode(code):\n user, password = b64decode(code).split(':')\n else:\n user = password = ''\n if self.user == user and self.password == password:\n # the request already had valid authentication header.\n return self.func(*args, **kws)\n resp = self.response\n resp.set_status(401)\n self.render('Auth')\n resp.headers['WWW-Authenticate'] = 'Basic realm=\"%s\"' % self.realm\n\n return do_authenticate", "def authenticate():\n return Response('Not Authorized', 401, {'WWW-Authenticate': 'Basic realm=\"api\"'})", "def basic_auth_required(fn):\n @wraps(fn)\n def _wrapper(request, *args, **kwargs):\n authentication = request.headers.get('Authentication', None)\n\n if authentication:\n if not authentication.startswith(\"Basic \"):\n request.response.status = 401\n\n return {\n 'error': \"Authentication failed!\"\n }\n\n auth_data = authentication[6:]\n\n try:\n username, password = base64.urlsafe_b64decode(auth_data).decode(\"UTF8\").split(\":\")\n\n user = request.dbsession.query(User).filter(\n User.email == username\n ).one()\n\n if user.is_password(password.encode(\"UTF8\")):\n return fn(request, *args, **kwargs)\n except (ValueError, NoResultFound):\n pass\n\n request.response.status = 401\n\n return {\n 'error': 'Authentication failed!'\n }\n\n return _wrapper", "def __auth(username, password, type=\"basic\"): # pylint:disable=redefined-builtin\n # TODO: Handle encrypted passwords.\n if type.lower() == \"basic\":\n return HTTPBasicAuth(username, password)\n return HTTPDigestAuth(username, password)", "def basic_auth(user, password):\n return AuthToken(\"basic\", user, password)", "def auth_handler(self, url, method, timeout, headers, data):\n username = self.username\n password = self.password\n return basic_auth_handler(url, method, timeout, headers, data, username,\n password)", "def authenticate():\n return Response(\n '', 401, {'WWW-Authenticate': 'Basic realm=\"Login Required\"'}\n )", "def auth():\n pass", "def auth():\n pass", "def basic_auth_required(view_func):\n # http://djangosnippets.org/snippets/448/\n def _auth(request, *args, **kwargs):\n if 'HTTP_AUTHORIZATION' in request.META:\n auth = request.META['HTTP_AUTHORIZATION'].split()\n if len(auth) == 2:\n if auth[0].lower() == \"basic\":\n uname, passwd = base64.b64decode(auth[1]).split(':')\n user = authenticate(username=uname, password=passwd)\n if user is not None:\n if user.is_active:\n return view_func(request, *args, **kwargs)\n response = HttpResponse(\"Authorization Required\", status=401)\n response['WWW-Authenticate'] = 'Basic realm=\"Secure Area\"'\n return response\n return _auth", "def basic_auth(user=\"user\", passwd=\"passwd\"):\n\n if not check_basic_auth(user, passwd):\n return status_code(401)\n\n return jsonify(authenticated=True, user=user)", "def hidden_basic_auth(user=\"user\", passwd=\"passwd\"):\n\n if not check_basic_auth(user, passwd):\n return status_code(404)\n return jsonify(authenticated=True, user=user)", "def authenticate():\n return Response(\n 'You have to login with proper credentials', 401,\n {'WWW-Authenticate': 'Basic realm=\"Login Required\"'})", "def basic_auth_required(realm=None, test_func=None, callback_func=None):\r\n if realm is None:\r\n realm = getattr(settings, 'HTTP_AUTHENTICATION_REALM', _('Restricted Access'))\r\n if test_func is None:\r\n test_func = lambda u: u.is_authenticated()\r\n\r\n def decorator(view_func):\r\n def basic_auth(request, *args, **kwargs):\r\n # Just return the original view because already logged in\r\n if test_func(request.user):\r\n return view_func(request, *args, **kwargs)\r\n\r\n # Not logged in, look if login credentials are provided\r\n if 'HTTP_AUTHORIZATION' in request.META: \r\n auth_method, auth = request.META['HTTP_AUTHORIZATION'].split(' ',1)\r\n if 'basic' == auth_method.lower():\r\n auth = auth.strip().decode('base64')\r\n username, password = auth.split(':',1)\r\n user = authenticate(username=username, password=password)\r\n if user is not None:\r\n if user.is_active:\r\n if callback_func is not None and callable(callback_func):\r\n callback_func(request, user, *args, **kwargs)\r\n return view_func(request, *args, **kwargs)\r\n\r\n response = HttpResponse(_('Authorization Required'), mimetype=\"text/plain\")\r\n response.status_code = 401\r\n response['WWW-Authenticate'] = 'Basic realm=\"%s\"' % realm\r\n return response\r\n return basic_auth\r\n return decorator", "def authenticate():\n return flask.Response('Login required.', 401,\n {'WWW-Authenticate': 'Basic realm=\"Login Required\"'})", "def http_basic_auth():\n users = ['administrator', 'admin']\n passwords = ['administrator', 'admin']\n protectedResource = 'http://localhost/secured_path'\n foundPass = False\n for user in users:\n if foundPass:\n break\n for passwd in passwords:\n encoded = base64.encodestring(user + ':' + passwd)\n response = requests.get(protectedResource, auth=(user, passwd))\n if response.status_code != 401:\n print('User Found!')\n print('User: %s, Pass: %s' % (user, passwd))\n foundPass = True\n break", "def authenticate():\n return Response(\n 'Could not verify your credentials for that url', 401,\n {'WWW-Authenticate': 'Basic realm=\"Login Required\"'})", "def authenticate():\n\treturn Response(\n\t'Could not verify your access level for that URL.\\n'\n\t'You have to login with proper credentials', 401,\n\t{'WWW-Authenticate': 'Basic realm=\"Login Required\"'})", "def authenticate():\n return Response(\n 'Could not verify your access level for that URL.\\n'\n 'You have to login with proper credentials.', 401,\n {'WWW-Authenticate': 'Basic realm=\"Login Required\"'})", "def authenticate():\n return Response(\n 'Could not verify your access level for that URL.\\n'\n 'You have to login with proper credentials', 401,\n {'WWW-Authenticate': 'Basic realm=\"Login Required\"'})", "def authenticate():\n return Response(\n 'Could not verify your access level for that URL.\\n'\n 'You have to login with proper credentials', 401,\n {'WWW-Authenticate': 'Basic realm=\"Login Required\"'})", "def authenticate():\n return Response(\n 'Could not verify your access level for that URL.\\n'\n 'You have to login with proper credentials', 401,\n {'WWW-Authenticate': 'Basic realm=\"Login Required\"'})", "def authenticate():\n return Response(\n 'Could not verify your access level for that URL.\\n'\n 'You have to login with proper credentials', 401,\n {'WWW-Authenticate': 'Basic realm=\"Login Required\"'})", "def authenticate():\n return Response(\n 'Could not verify your access level for that URL.\\n'\n 'You have to login with proper credentials', 401,\n {'WWW-Authenticate': 'Basic realm=\"Login Required\"'})", "def authenticate():\n return Response(\n 'Could not verify your access level for that URL.\\n'\n 'You have to login with proper credentials', 401,\n {'WWW-Authenticate': 'Basic realm=\"Login Required\"'})", "def authenticate():\n return Response(\n 'Could not verify your access level for that URL.\\n'\n 'You have to login with proper credentials', 401,\n {'WWW-Authenticate': 'Basic realm=\"Login Required\"'})" ]
[ "0.8461793", "0.79010516", "0.7828926", "0.78109825", "0.7766358", "0.7603452", "0.7481247", "0.7302498", "0.7254346", "0.7246135", "0.721293", "0.7180321", "0.7180321", "0.7162722", "0.7149473", "0.71488553", "0.71381325", "0.7052642", "0.70506674", "0.7043808", "0.6986596", "0.69799805", "0.6979108", "0.6961464", "0.6961464", "0.6961464", "0.6961464", "0.6961464", "0.6961464", "0.6961464" ]
0.79767287
1
Service an update request in the form of /update?ipv6=&ipv4=&domain=
def update(request): from pprint import pformat if 'ipv4' not in request.GET and 'ipv6' not in request.GET: return HttpResponse("Must specify one or both of ipv4/ipv6 address\nParams:%s" % pformat(request.GET.dict()), status=400) if not u'domain' in request.GET: return HttpResponse("Must specify domain\nParams:%s" % pformat(request.GET.dict()), status=400) for ipvx, record_type in ((u'ipv4', 'A'), (u'ipv6', 'AAAA')): if ipvx not in request.GET: continue record, created = Record.objects.get_or_create( name=request.GET['domain'], type=record_type, ) record.domain_id = 1 record.ttl = 1 record.auth = True record.content = request.GET[ipvx] record.save() return HttpResponse("Saved record(s)")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def updateHosts(request):\n\n updater = HostUpdater()\n updater.run()\n return http.HttpResponse(\"Ok\")", "def update():\n return 'update api in put'", "def handleTransitUpdateRequest(self, request:CSERequest) -> Result:\n\t\tif (url := self._getForwardURL(request.id)) is None:\n\t\t\treturn Result(rsc=RC.notFound, dbg=f'forward URL not found for id: {request.id}')\n\t\tif len(request.originalArgs) > 0:\t# pass on other arguments, for discovery\n\t\t\turl += '?' + urllib.parse.urlencode(request.originalArgs)\n\t\tLogging.log(f'Forwarding Update request to: {url}')\n\t\treturn self.sendUpdateRequest(url, request.headers.originator, data=request.data)", "def update(self, ip):\n timeout = 60\n LOG.debug(\"Updating '%s' to '%s' at service '%s'\", self.hostname, ip, self._updateurl)\n params = {\"myip\": ip, \"hostname\": self.hostname}\n req = requests.get(self._updateurl, params=params, headers=constants.REQUEST_HEADERS_DEFAULT,\n auth=(self.__userid, self.__password), timeout=timeout)\n LOG.debug(\"status %i, %s\", req.status_code, req.text)\n if req.status_code == 200:\n # responses can also be \"nohost\", \"abuse\", \"911\", \"notfqdn\"\n if req.text.startswith(\"good \") or req.text.startswith(\"nochg\"):\n return ip\n return req.text\n return \"invalid http status code: %s\" % req.status_code", "def update(self, update):\n\n params = shlex.split(update)\n if params[0] in self.addr:\n self.addr[params[0]].update(*params)\n\n else:\n a = Addr(self)\n # add both name and IP address\n self.addr[params[0]] = a\n self.addr[params[1]] = a\n a.update(*params)\n self.notify(\"addrmap_added\", *[a], **{})", "def HandleDynamicUpdater(request):\n print \"[*] %s Got Static C&C Updater request: %s\" % (time.ctime(), request.uri)\n print \"\\t[+] Host: %s\" % request.host\n\n message = getUpdaterResponse(CNC_IP, CNC_PORT)\n request.write(\"HTTP/1.1 200 OK\\r\\nContent-Length: %d\\r\\n\\r\\n%s\" % (len(message), message))", "def sendUpdateRequest(self, url:str, originator:str, data:Any, parameters:Parameters=None, ct:ContentSerializationType=None, targetResource:Resource=None) -> Result:\n\t\tif Utils.isHttpUrl(url):\n\t\t\tCSE.event.httpSendUpdate() # type: ignore\n\t\t\treturn CSE.httpServer.sendHttpRequest(requests.put, url, originator, data=data, parameters=parameters, ct=ct, targetResource=targetResource)\n\t\tLogging.logWarn(dbg := f'unsupported url scheme: {url}')\n\t\treturn Result(status=True, rsc=RC.badRequest, dbg=dbg)", "def update(*args):", "def event_update(req):\n event_id = req.match_dict['event_id']\n try:\n data = utils.find_keys(req.form, _event_args)\n db_conn.event_update(**data)\n json = {'updated': True}\n except Exception as e:\n json = {'errors': [str(e)]}\n return req.Response(json=json)", "def update(self) -> requests.request:\n # Check if id is set\n if self.args.id is None:\n raise Exception('Provide id of asset you want to update')\n\n # Check URL validity\n if self.args.url is not None and self.check_url_invalidity():\n raise Exception('Provided URL is not valid')\n\n # Send PUT request\n return requests.put(\n self.REQUEST_URL + str(self.args.id),\n {'title': self.args.title, 'label': self.args.label, 'url': self.args.url}\n )", "def format_check_update_connection_request(request):\n if request is None:\n return None\n # Split out and get first\n data = {}\n if type(request) == str:\n tmp = request.split(\",\")\n data_to_proc = []\n for d in tmp:\n data_to_proc.append(d.split(\":\"))\n else:\n data_to_proc = request\n for d in data_to_proc:\n if len(d) == 9:\n pass\n elif len(d) == 7:\n d.insert(1, \"LAST\")\n d.insert(3, \"LAST\")\n else:\n print(\"Invalid format for connection update request.\")\n continue\n dkey = d[0] + \":\" + d[2] + \":\" + d[4] + \":\" + d[5]\n if dkey in data.keys():\n data[dkey].append(d)\n else:\n data[dkey] = [d]\n return data", "def update_query(self, **updates):\r\n self._url_updates.update(updates)", "def send_update_message(peer_ip):\n LOG.debug('Try to send update message to peer %s', peer_ip)\n json_request = flask.request.get_json()\n attr = json_request.get('attr')\n nlri = json_request.get('nlri')\n withdraw = json_request.get('withdraw')\n if attr:\n attr = {int(k): v for k, v in attr.items()}\n if 5 not in attr:\n # default local preference\n attr[5] = 100\n if (attr and nlri) or withdraw:\n return flask.jsonify(api_utils.send_update(peer_ip, attr, nlri, withdraw))\n elif 14 in attr or 15 in attr:\n return flask.jsonify(api_utils.send_update(peer_ip, attr, nlri, withdraw))\n\n else:\n return flask.jsonify({\n 'status': False,\n 'code': 'please check your post data'\n })", "def solr_update(config, solr_host, update_body, solr_collection_name, r=False):\n # solr_collection_name = config['solr_collection_name']\n\n url = f'{solr_host}{solr_collection_name}/update?commit=true'\n\n if r:\n return requests.post(url, json=update_body)\n else:\n requests.post(url, json=update_body)", "def update(self, url, updates):\r\n url = \"%s%s\" % (self.base_url, url)\r\n log.debug(\"PUT %s\" % (url))\r\n self.__connection.connect()\r\n \r\n put_headers = {\"Content-Type\": \"application/json\"}\r\n put_headers.update(self.__headers)\r\n request = self.__connection.request(\"PUT\", url, simplejson.dumps(updates), put_headers)\r\n response = self.__connection.getresponse()\r\n data = response.read()\r\n self.__connection.close()\r\n \r\n log.debug(\"PUT %s status %d\" % (url,response.status))\r\n log.debug(\"OUTPUT: %s\" % data)\r\n \r\n result = {}\r\n if response.status == 200:\r\n result = simplejson.loads(data)\r\n \r\n elif response.status == 204:\r\n raise EmptyResponseWarning(\"%d %s @ https://%s%s\" % (response.status, response.reason, self.host, url))\r\n \r\n elif response.status == 404:\r\n log.debug(\"%s returned 404 status\" % url)\r\n raise HTTPException(\"%d %s @ https://%s%s\" % (response.status, response.reason, self.host, url))\r\n \r\n elif response.status >= 400:\r\n _result = simplejson.loads(data)\r\n log.debug(\"OUTPUT %s\" % _result)\r\n raise HTTPException(\"%d %s @ https://%s%s\" % (response.status, response.reason, self.host, url))\r\n \r\n return result", "def dnsUpdate(portId, ipAddr='', action='create'):\n\tzone = 'osdev.skrill.net.'\n\trevZone = '23.32.10.in-addr.arpa'\n\tcname = portId + '.' + zone\n\tttl = 300\n\tnsServer = '10.32.29.99'\n key = 'yw0ADuZjXAhcGgMOYg/Clx1128iUSfhlOHdsY4CzVNIVVVXismrAe+WKMBxocLhbrIVHGvmR94jDC46K18K6oQ=='\n keyRing = dns.tsigkeyring.from_text({zone : key})\n\thostName = genHostname(ipAddr)\n\tdnsUpdate = dns.update.Update(zone, keyring=keyRing)\n\tipAddr = str(ipAddr)\n\thostName = str(hostName)\n\tif action == 'create':\n\t\tdnsUpdate.replace( hostName.split('.')[0], ttl, 'A', ipAddr )\n\t\tdnsResponse = dns.query.tcp(dnsUpdate, nsServer )\n\t\tlogging.info('DNS A record updated for: ' + hostName)\n\t\tdnsUpdate.replace(portId, ttl, 'CNAME', hostName)\n\t\tdnsResponse = dns.query.tcp(dnsUpdate, nsServer )\n\t\tlogging.info('DNS CNAME record updated for: ' + hostName)\n\t\tdnsUpdate = dns.update.Update(revZone, keyring=keyRing)\n\t\tdnsUpdate.replace(ipAddr.split('.')[3], ttl, 'PTR', hostName)\n\t\tdnsResponse = dns.query.tcp(dnsUpdate, nsServer )\n\t\tlogging.info('DNS PTR record updated for: ' + hostName)\n\tif action == 'delete':\n\t\ttry:\n\t\t\thostName = dns.resolver.query(cname, 'CNAME')[0].to_text()\n\t\t\tipAddr = dns.resolver.query(hostName, 'A')[0].to_text()\n\t\texcept Exception, e:\n\t\t\tlogging.exception('DNS query failed for cname and A records: ' + cname + ' ' + hostName)\n\t\t\thostName = ''\n\t\t\treturn hostName\n\t\tdnsUpdate.delete(cname, 'CNAME')\n\t\tdnsResponse = dns.query.tcp(dnsUpdate, nsServer )\n\t\tlogging.info('DNS CNAME record deleted for: ' + portId + ' to ' + hostName)\n\t\tdnsUpdate.delete(hostName.split('.')[0])\n\t\tdnsResponse = dns.query.tcp(dnsUpdate, nsServer )\n\t\tlogging.info('DNS A record deleted for: ' + hostName)\n\t\tdnsUpdate = dns.update.Update(revZone, keyring=keyRing)\n dnsUpdate.delete(ipAddr.split('.')[3])\n\t\tdnsResponse = dns.query.tcp(dnsUpdate, nsServer )\n\t\tlogging.info('DNS PTR record deleted for: ' + hostName)\n\t\treturn hostName", "def update(self, request, pk=None):\n\n return Response({'http_method': 'PUT'})", "def update(self, params):", "def _update_from_rest_data(self) -> None:", "def post_update():\n req_data = request.get_json()\n function = req_data['function']\n if function == 'finish':\n flask_wms.update_finish(req_data['name'])\n print('App, finished request for manga {0}'.format(req_data['name']))\n elif function == 'chapter':\n flask_wms.update_chapter_number(req_data['name'], req_data['new_chapter'])\n print(\"chapter method, changes stuff\")\n return 'Request recieved, update method'", "def _update(self, host):\n pass", "def update(self, request, phone):\n try:\n attrs = self.flatten_dict(request.POST)\n #if self.exists(**attrs):\n #return rc.DUPLICATE_ENTRY\n #else:\n endpoint = Endpoint.objects.get(uid__exact=phone, site__name__exact=request.user)\n if attrs.get('effective_caller_id_name'):\n endpoint.effective_caller_id_name = attrs.get('effective_caller_id_name')\n if attrs.get('password'):\n endpoint.password = attrs.get('password')\n if attrs.get('description'):\n endpoint.description = attrs.get('description')\n if attrs.get(\"enabled\") == \"false\":\n endpoint.enable = False\n elif attrs.get(\"enabled\") == \"true\":\n endpoint.enable = True\n if attrs.get(\"enable\") == \"false\":\n endpoint.enable = False\n elif attrs.get(\"enable\") == \"true\":\n endpoint.enable = True\n endpoint.save()\n return endpoint\n except:\n return rc.NOT_HERE", "def xnat_workflow_info_update(args):\n\trequest_url = \"http://\" + args.server + \"/data/services/workflows/workflowid/\" + args.workflow_id + \"?format=json\"\n\tprint(\"xnat_workflow_info update: request_url: \" + request_url)\n\tresponse = requests.get(request_url, auth=(args.username, args.password))\n\n\tjson_response = json.loads(response.text)\n\tjson_items = json_response['items']\n\tjson_item = json_items[0]\n\tjson_data_fields = json_item['data_fields']\n\n\tput_url = \"http://\" + args.server + \"/REST/workflows\"\n\n\t# workflow identifying information\n\tput_url += \"?wrk:workflowData/id=\" + json_data_fields['ID']\n \tput_url += \"&wrk:workflowData/pipeline_name=\" + json_data_fields['pipeline_name']\n\tput_url += \"&wrk:workflowData/launch_time=\" + json_data_fields['launch_time']\n\tput_url += \"&wrk:workflowData/data_type=\" + json_data_fields['data_type']\n\t# workflow information to be updated\n \tput_url += \"&wrk:workflowData/status=\" + \"In Progress\"\n \tput_url += \"&wrk:workflowData/current_step_id=\" + args.step_id\n\tput_url += \"&wrk:workflowData/step_description=\" + args.step_description\n\tput_url += \"&wrk:workflowData/percentageComplete=\" + args.percent_complete\n\tput_url += \"&wrk:workflowData/current_step_launch_time=\" + datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')\n\n\tput_url = put_url.replace(\" \", \"%20\");\n\n\tprint(\"xnat_workflow_info update: put_url: \" + put_url)\n\n\tresponse = requests.put(put_url, auth=(args.username, args.password))\n\tif (response.status_code != 200):\n\t\tprint(\"Cannot update workflow\")\n\t\tprint(\"response.status_code: \" + str(response.status_code))\n\n\txnat_workflow_info_show(args)", "def Update(self, request, global_params=None):\n config = self.GetMethodConfig('Update')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Update(self, request, global_params=None):\n config = self.GetMethodConfig('Update')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Update(self, request, global_params=None):\n config = self.GetMethodConfig('Update')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Update(self, request, global_params=None):\n config = self.GetMethodConfig('Update')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Update(self, request, global_params=None):\n config = self.GetMethodConfig('Update')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Update(self, request, global_params=None):\n config = self.GetMethodConfig('Update')\n return self._RunMethod(\n config, request, global_params=global_params)", "def Update(self, request, global_params=None):\n config = self.GetMethodConfig('Update')\n return self._RunMethod(\n config, request, global_params=global_params)" ]
[ "0.6419556", "0.622346", "0.6201386", "0.6055032", "0.603461", "0.6001513", "0.5956058", "0.5926744", "0.5919697", "0.5771281", "0.5758475", "0.5752289", "0.5746953", "0.57087284", "0.5706464", "0.5696872", "0.5688933", "0.568207", "0.5681439", "0.5669635", "0.56664", "0.5656596", "0.5655695", "0.564367", "0.564367", "0.564367", "0.564367", "0.564367", "0.564367", "0.564367" ]
0.69290113
0
Init a Paddle ASR Connection Handler instance
def __init__(self, asr_engine): super().__init__() logger.debug( "create an paddle asr connection handler to process the websocket connection" ) self.config = asr_engine.config # server config self.model_config = asr_engine.executor.config self.asr_engine = asr_engine # model_type, sample_rate and text_feature is shared for deepspeech2 and conformer self.model_type = self.asr_engine.executor.model_type self.sample_rate = self.asr_engine.executor.sample_rate # tokens to text self.text_feature = self.asr_engine.executor.text_feature # extract feat, new only fbank in conformer model self.preprocess_conf = self.model_config.preprocess_config self.preprocess_args = {"train": False} self.preprocessing = Transformation(self.preprocess_conf) # frame window and frame shift, in samples unit self.win_length = self.preprocess_conf.process[0]['win_length'] self.n_shift = self.preprocess_conf.process[0]['n_shift'] assert self.preprocess_conf.process[0]['fs'] == self.sample_rate, ( self.sample_rate, self.preprocess_conf.process[0]['fs']) self.frame_shift_in_ms = int( self.n_shift / self.preprocess_conf.process[0]['fs'] * 1000) self.continuous_decoding = self.config.get("continuous_decoding", False) self.init_decoder() self.reset()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def new_handler(self):\n return PaddleASRConnectionHanddler(self)", "def __init__(self, tts_engine):\n super().__init__()\n logger.debug(\n \"Create PaddleTTSConnectionHandler to process the tts request\")\n\n self.tts_engine = tts_engine\n self.executor = self.tts_engine.executor\n self.config = self.tts_engine.config\n self.frontend = self.executor.frontend\n self.am_inference = self.executor.am_inference\n self.voc_inference = self.executor.voc_inference", "def __init__(self):\n\n\t\tself.connection = self.get_connection()", "def __init__(self):\n\n self.loop = asyncio.get_event_loop()\n self.aiohttp = web.Application(\n loop=self.loop,\n middlewares=[unhandled_route],\n )\n self.client = ClientSession()\n self.ws = WebSocketHandler(self)\n self.cert = self._load_ssl_certificate()\n\n self.config()", "def initialize(self) -> None:\n conn = self.optionally_wrap_socket(self.client.connection)\n conn.setblocking(False)\n self.client = TcpClientConnection(conn=conn, addr=self.addr)\n if b'ProtocolHandlerPlugin' in self.config.plugins:\n for klass in self.config.plugins[b'ProtocolHandlerPlugin']:\n instance = klass(self.config, self.client, self.request)\n self.plugins[instance.name()] = instance", "def __init__(self):\n self.try_to_connect()", "def __init__(self):\n\n # labjack connection handle (default: None. If connected: labjack handler instance)\n self.connection_handle = None\n\n # labjack connection state (default: None, connection_error: False, connected: True)\n self.connection_state = False\n\n # try to connect\n self.connect()", "def __init__(self, name: str, ap_id: str, rac_config: RadioAccessNetworkConfiguration):\n super().__init__(name=name)\n\n self.simple = rac_config.bypass_amf # Simple A&C bypasses AMF\n\n # inputs/outputs to UEs\n self.input_access_request = Port(AccessRequest, 'input_access_request')\n self.input_disconnect_request = Port(DisconnectRequest, 'input_disconnect_request')\n self.input_rrc = Port(RadioResourceControl, 'input_rrc')\n self.input_ho_ready = Port(HandOverReady, 'input_ho_ready')\n self.input_ho_response = Port(HandOverResponse, 'input_ho_response')\n self.output_access_response = Port(AccessResponse, 'output_access_response')\n self.output_disconnect_response = Port(DisconnectResponse, 'output_disconnect_response')\n self.output_ho_started = Port(HandOverStarted, 'output_ho_started')\n self.output_ho_finished = Port(HandOverFinished, 'output_ho_finished')\n self.add_in_port(self.input_access_request)\n self.add_in_port(self.input_disconnect_request)\n self.add_in_port(self.input_rrc)\n self.add_in_port(self.input_ho_ready)\n self.add_in_port(self.input_ho_response)\n self.add_out_port(self.output_access_response)\n self.add_out_port(self.output_disconnect_response)\n self.add_out_port(self.output_ho_started)\n self.add_out_port(self.output_ho_finished)\n\n # inputs/outputs to APs\n self.input_start_ho_request = Port(StartHandOverRequest, 'input_start_ho_request')\n self.input_start_ho_response = Port(StartHandOverResponse, 'input_start_ho_response')\n self.output_start_ho_request = Port(StartHandOverRequest, 'output_start_ho_request')\n self.output_start_ho_response = Port(StartHandOverResponse, 'output_start_ho_response')\n self.add_in_port(self.input_start_ho_request)\n self.add_in_port(self.input_start_ho_response)\n self.add_out_port(self.output_start_ho_request)\n self.add_out_port(self.output_start_ho_response)\n\n # inputs/outputs for core network\n if not self.simple:\n self.output_create_path_request = Port(CreatePathRequest, 'output_create_path_request')\n self.output_remove_path_request = Port(RemovePathRequest, 'output_remove_path_request')\n self.output_switch_path_request = Port(SwitchPathRequest, 'output_switch_path_request')\n self.input_create_path_response = Port(CreatePathResponse, 'input_create_path_response')\n self.input_remove_path_response = Port(RemovePathResponse, 'input_remove_path_response')\n self.input_switch_path_response = Port(SwitchPathResponse, 'input_switch_path_response')\n self.add_out_port(self.output_create_path_request)\n self.add_out_port(self.output_remove_path_request)\n self.add_out_port(self.output_switch_path_request)\n self.add_in_port(self.input_create_path_response)\n self.add_in_port(self.input_remove_path_response)\n self.add_in_port(self.input_switch_path_response)\n\n # AP internal inputs/outputs\n self.output_connected_ue_list = Port(EnableChannels, 'output_connected_ue_list')\n self.add_out_port(self.output_connected_ue_list)\n\n self.ap_id = ap_id # AP ID\n self.header = rac_config.header # Header for application packets\n self.ue_path = dict() # dictionary {UE ID: UE status}\n self.ue_to_ho_to = dict() # dictionary of connected_ap UE to be handed over {node_id: new_ap_id}\n self.ue_to_ho_from = dict() # dictionary of UE to be connected_ap via hand over {node_id: prev_ap_id}", "def init(a: str, h: str, c: str, r: bool, A: str, lock: Lock) -> None:\n global host, action, report, router, algorithm\n\n action = a\n algorithm = A\n\n if r:\n report = r\n if h:\n host = h\n if c:\n lock.acquire()\n try:\n router = PyOSRM(c, use_shared_memory=False, algorithm=algorithm)\n LOGGER.debug(\"Router instantiated\")\n finally:\n lock.release()", "def init_connection(self, connection):", "def __init__(self):\n self.client_id = None\n self.bridge_config = {}\n self.bridge_config_answer_status = None", "def __init__(self):\n\n # For now, we'll connect to the target via the Apollo debug controller.\n # This should be replaced by a high-speed USB link soon; but for now\n # we'll use the slow debug connection.\n self._debugger = ApolloDebugger()\n self._serial = self._find_serial_connection()", "def init(self, userdata, conn):\r\n pass", "def __init__(self):\r\n self._map1 = {\r\n \"CIRC\" : self.circ_status_event,\r\n \"STREAM\" : self.stream_status_event,\r\n \"ORCONN\" : self.or_conn_status_event,\r\n \"STREAM_BW\" : self.stream_bw_event,\r\n \"BW\" : self.bandwidth_event,\r\n \"DEBUG\" : self.msg_event,\r\n \"INFO\" : self.msg_event,\r\n \"NOTICE\" : self.msg_event,\r\n \"WARN\" : self.msg_event,\r\n \"ERR\" : self.msg_event,\r\n \"NEWDESC\" : self.new_desc_event,\r\n \"ADDRMAP\" : self.address_mapped_event,\r\n \"NS\" : self.ns_event,\r\n \"NEWCONSENSUS\" : self.new_consensus_event,\r\n \"BUILDTIMEOUT_SET\" : self.buildtimeout_set_event,\r\n \"GUARD\" : self.guard_event,\r\n \"TORCTL_TIMER\" : self.timer_event\r\n }\r\n self.c = None # Gets set by Connection.set_event_hanlder()\r\n self.pre_listeners = []\r\n self.post_listeners = []", "async def init(self):\n self.init_connection_params()\n self._pool = await self._create_pool()\n\n return self", "def init_drone(self):\n #if self.log_level:\n self.drone.log.set_level(0)\n self.drone.connect()\n self.set_video_encoder_rate(3)\n self.drone.start_video()\n\n self.drone.subscribe(self.drone.EVENT_FLIGHT_DATA,\n self.flight_data_handler)\n self.drone.subscribe(self.drone.EVENT_LOG_DATA,\n self.log_data_handler)\n self.drone.subscribe(self.drone.EVENT_FILE_RECEIVED,\n self.handle_flight_received)", "def initialize(self):\n if self.real:\n self.agent.connect(self)\n else:\n self.connect() # Connect python client to VREP\n self.agent.connect(self)", "def init(self):\n return self.conn.init()", "def __init__(self, *args, **kwargs):\n self._initialize_protocols()\n super().__init__(*args, **kwargs)", "def __init__(self, endpoint, playerport1, playerport2, controlport):\n self.__endpoint = endpoint\n self.__player1 = playerport1\n self.__player2 = playerport2\n self.__control = controlport", "def __init__(self) -> None:\n super().__init__()\n self.handler = IxnHandler()", "def init_stream_handler(\n self, \n logger, \n loop, \n netconf_ip, \n netconf_port,\n statistics,\n xml_to_json_translator):\n self._logger = logger\n self._asyncio_loop = loop\n self._encoding = \"xml\"\n self._netconf_ip = netconf_ip\n self._netconf_port = netconf_port\n self._stat = statistics\n self._xml_to_json_translator = xml_to_json_translator", "def _handler_init(self):\r\n\t\tself._handlers[\"player-join\"] = FunctionDelegate()\r\n\t\tself._handlers[\"player-quit\"] = FunctionDelegate()\r\n\t\tself._handlers[\"game-start\"] = FunctionDelegate()\r\n\t\tself._handlers[\"game-stop\"] = FunctionDelegate()", "def __init__(self):\n CRTPDriver.__init__(self)\n self._radio_manager = None\n self.uri = ''\n self.link_error_callback = None\n self.link_quality_callback = None\n self.in_queue = None\n self.out_queue = None\n self._thread = None\n self.needs_resending = True", "def __init__(self):\n self.sp, self.user = self.init_auth_client()\n self.logger = logging.getLogger(__name__)", "def __init__(self):\n super(Handler, self).__init__()\n logging.warning('Initializing coffeeHandler....')\n\n # get an active token and get prepared for sending request\n self.coffee_session = requests.session()", "def connect(self):\n from labrad.wrappers import connectAsync\n self.cxn = yield connectAsync(name='Protection_Beam_Server')\n self.arduino = self.cxn.arduinottl\n self.pmt = self.cxn.normalpmtflow\n self.enable_protection_shutter(self, self.enable_shutter)\n self.setupListeners()", "def __init__(self,connection):\n self.onedir = connection\n super(myEventHandler,self).__init__()", "def __init__(self, addr):\r\n asyncore.dispatcher.__init__(self)\r\n self.accept_channel = None\r\n self.addr = addr\r\n self.create_socket(socket.AF_INET, socket.SOCK_STREAM)\r\n self.bind(addr)\r\n self.listen(5)\r\n \r\n # Start the asyncore polling loop if it's not already running\r\n if not asyncore_loop.running:\r\n stackless.tasklet(asyncore_loop)()", "def init_connection_callback(sender, **signal_args):\n sender.args = sender\n object_args = signal_args['kwargs']\n sender.connection = get_connection(AWS_ACCESS_KEY_ID,AWS_SECRET_ACCESS_KEY_ID)" ]
[ "0.70649016", "0.6452158", "0.63979906", "0.6249045", "0.61649555", "0.6112942", "0.61059374", "0.6089189", "0.6074557", "0.60720205", "0.604152", "0.6030205", "0.6029689", "0.60231495", "0.60225475", "0.59963256", "0.598929", "0.5983683", "0.5979326", "0.597095", "0.59434724", "0.5895799", "0.5861819", "0.5859877", "0.5849538", "0.5837251", "0.5825471", "0.5820477", "0.58178043", "0.5807499" ]
0.68113375
1
when in continous decoding, reset for next utterance.
def reset_continuous_decoding(self): self.global_frame_offset = self.num_frames self.model_reset()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def reset(self):\n log.debug('Reseting decoder.')\n # State values\n self.state = c.STATE_INIT_COPY\n self.ustate = 0\n self.nextstate = 0\n\n # Input / output buffers\n self.buf = ''\n self.output_buf = ''\n # Temporary buffers for storing data in processing\n self.in_buf = ''\n self.out_buf = ''\n self.len_buf = ''\n self.csbuf = ''\n self.htmldec = ''\n\n # Holders for urlencodeding / htmldecoder\n self.c1 = 0\n self.c2 = 0\n\n # Integers / ptrs we need.\n self.buf_ptr = 0\n self.i = 0\n self.j = 0\n # Assorted flags\n self.csum = 0\n self.ml = 0\n self.m = 0\n self.k = 0\n self.hd = 0\n self.utf8 = 0\n self.len = 0", "def reset(self):\n\t\tself.buf = []", "def reset(self):\n self.observation = None\n self.episode_done = True", "def resetDetector (self):\n self.mpr121._reset ()", "def _reset(self) -> None:", "def _reset(self) -> None:", "def reset(self):\n self.enc_len = None\n self.precomputed_enc_h = None\n self.mask = None\n self.prev_attn = None", "def _reset(self):", "def _reset(self) -> None:\n\n self._reset_slots()\n self._paused = False\n self.latest_action = {}\n self.latest_message = [] #clear a list in python\n self.latest_bot_utterance = BotUttered.empty()\n self.followup_action = ACTION_LISTEN_NAME\n self.active_loop = {}", "def reset(self):\n self.state = self.resolve_et({NFA.START})", "def manual_reset(self):\n\n status = self.read()\n if status != CurtainsStatus.STOPPED and status != CurtainsStatus.DANGER:\n return\n self.__remove_event_detect__()\n\n distance_to_min_step = abs(self.steps() - self.__min_step__)\n distance_to_max_step = abs(self.__max_step__ - self.steps())\n\n if distance_to_min_step <= distance_to_max_step:\n if self.steps() > self.__min_step__:\n self.__close__()\n else:\n self.__open__()\n self.curtain_closed.wait_for_active()\n self.__stop__()\n self.rotary_encoder.steps = self.__min_step__\n else:\n if self.steps() > self.__max_step__:\n self.__close__()\n else:\n self.__open__()\n self.curtain_open.wait_for_active()\n self.__stop__()\n self.rotary_encoder.steps = self.__max_step__\n\n self.__event_detect__()", "def reset():", "def reset():", "def reset():", "def tearDown(self):\n self.codec.stream.flush()\n self.codec.stream.close()", "def reset(self):\n # type: () -> None\n self.digest.clear()\n self.offset.clear()\n self.buffer.clear()\n self.position = 0\n self.counter = 0\n self.finished = False", "def async_reset(self) -> None:", "def reset(self):\n self.algo_state = {}\n self.actual_repetitions = 0\n self.next_session = -1\n self.last_session = -1\n self.past_quality = []", "def reset():\r\n pass", "def reset(self):\r\n _debug('api.reset()')\r\n self.write('*RST')\r\n self.query('*IDN?') # Pauses operation until fully reset?\r", "def reset(self):\r\n _debug('api.reset()')\r\n self.write('*RST')\r\n self.query('*IDN?') # Pauses operation until fully reset?\r", "def reset(self):\r\n _debug('api.reset()')\r\n self.write('*RST')\r\n self.query('*IDN?') # Pauses operation until fully reset?\r", "def reset(self):\r\n _debug('api.reset()')\r\n self.write('*RST')\r\n self.query('*IDN?') # Pauses operation until fully reset?\r", "def reset(self):\n TxtFileParser.reset(self)\n self.timestamp = None\n self.rack_barcode = None\n self.position_map = dict()", "def reset(self):\n self.enc_len = None\n self.precomputed_enc_h = None\n self.mask = None", "def restore(self):\n self._result.unparse_seq = self._unparse_seq", "def reset(self):\n self.desc.put(self.desc.pvname.split(\".\")[0])\n self.scan.put(\"Passive\")\n self.calc.put(\"0\")\n self.prec.put(\"5\")\n self.dold.put(0)\n self.doln.put(\"\")\n self.dopt.put(\"Use VAL\")\n self.flnk.put(\"0\")\n self.odly.put(0)\n self.oopt.put(\"Every Time\")\n self.outn.put(\"\")\n for letter in self.channels.read_attrs:\n channel = self.channels.__getattr__(letter)\n channel.reset()", "def _reset(self):\n pass", "def after_encoding_negotiation(self, status):\n if status.cancelled():\n self.log.debug('encoding negotiation cancelled')\n return", "def reset(self):\n print('call reset()')\n self.cur = 0\n if self.shuffle:\n random.shuffle(self.seq)" ]
[ "0.63061255", "0.5913596", "0.5910032", "0.5909014", "0.5876435", "0.5876435", "0.5869188", "0.5829226", "0.580801", "0.5803396", "0.57463163", "0.57294697", "0.57294697", "0.57294697", "0.5713042", "0.5710891", "0.5707014", "0.5701592", "0.57004386", "0.56940204", "0.56940204", "0.56940204", "0.56940204", "0.56460565", "0.5645623", "0.5632028", "0.5627994", "0.5621414", "0.55267954", "0.55055475" ]
0.6538203
0
Decorate a function or method to have its first positional argument be treated as an (x, y, z) tuple which must fit inside chunk boundaries of 16, CHUNK_HEIGHT, and 16, respectively. A warning will be raised if the bounds check fails.
def check_bounds(f): @wraps(f) def deco(chunk, coords, *args, **kwargs): x, y, z = coords # Coordinates were out-of-bounds; warn and run away. if not (0 <= x < 16 and 0 <= z < 16 and 0 <= y < CHUNK_HEIGHT): warn("Coordinates %s are OOB in %s() of %s, ignoring call" % (coords, f.func_name, chunk), ChunkWarning, stacklevel=2) # A concession towards where this decorator will be used. The # value is likely to be discarded either way, but if the value is # used, we shouldn't horribly die because of None/0 mismatch. return 0 return f(chunk, coords, *args, **kwargs) return deco
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def sync_coords_to_chunk(f):\n\n @wraps(f)\n def decorated(self, coords, *args, **kwargs):\n x, y, z = coords\n\n bigx, smallx, bigz, smallz = split_coords(x, z)\n bigcoords = bigx, bigz\n if bigcoords in self.chunk_cache:\n chunk = self.chunk_cache[bigcoords]\n elif bigcoords in self.dirty_chunk_cache:\n chunk = self.dirty_chunk_cache[bigcoords]\n else:\n raise ChunkNotLoaded(\"Chunk (%d, %d) isn't loaded\" % bigcoords)\n\n return f(self, chunk, (smallx, y, smallz), *args, **kwargs)\n\n return decorated", "def coords_to_chunk(f):\n\n @wraps(f)\n def decorated(self, coords, *args, **kwargs):\n x, y, z = coords\n\n bigx, smallx, bigz, smallz = split_coords(x, z)\n d = self.request_chunk(bigx, bigz)\n\n @d.addCallback\n def cb(chunk):\n return f(self, chunk, (smallx, y, smallz), *args, **kwargs)\n\n return d\n\n return decorated", "def bounds(self): # -> tuple[()]:\n ...", "def check_cutout(method):\n\n @wraps(method)\n def new_method(self, *args, **kwargs):\n [length, num_patches], _ = parse_user_args(method, *args, **kwargs)\n\n check_value(length, (1, FLOAT_MAX_INTEGER))\n check_value(num_patches, (1, FLOAT_MAX_INTEGER))\n\n return method(self, *args, **kwargs)\n\n return new_method", "def check_inputs(function):\n def decorated(self, data, *args, **kwargs):\n if not (isinstance(data, np.ndarray) and len(data.shape) == 2 and data.shape[1] == 1):\n raise ValueError('The argument `data` must be a numpy.ndarray with shape (n, 1).')\n\n return function(self, data, *args, **kwargs)\n\n decorated.__doc__ = function.__doc__\n return decorated", "def check_resize_interpolation(method):\n\n @wraps(method)\n def new_method(self, *args, **kwargs):\n [size, interpolation], _ = parse_user_args(method, *args, **kwargs)\n check_resize_size(size)\n if interpolation is not None:\n type_check(interpolation, (Inter,), \"interpolation\")\n\n return method(self, *args, **kwargs)\n\n return new_method", "def __length_hint__(self, *args, **kwargs): # real signature unknown\n pass", "def __length_hint__(self, *args, **kwargs): # real signature unknown\n pass", "def bounds(self, pos):", "def test_arguments_same_name() -> None:\n\n @argcomb(a=\"b\")\n def f(a: Any = None, /, b: Any = None, **kwargs: Any) -> None:\n ...\n\n with pytest.warns(UserWarning):\n f(1, 2, a=3) # pylint: disable=E1124", "def validate_and_maintain_frames(func):\n\n def decorator(self, *args, **kwargs):\n if len(args) == 0 and len(kwargs) == 0:\n obj2 = None\n elif len(args) == 0:\n obj2 = next(iter(kwargs.values()))\n else:\n obj2 = args[0]\n\n if (\n not hasattr(obj2, \"_frame\")\n or (self._frame is None or obj2._frame is None)\n or (self._frame == obj2._frame)\n ):\n result = func(self, *args, **kwargs)\n if hasattr(result, \"_frame\"):\n result._frame = self._frame\n if result._frame is None and hasattr(obj2, \"_frame\"):\n result._frame = obj2._frame\n\n return result\n else:\n raise ValueError(\n f\"The objects {self} and {obj2} are defined in different coordinate frames.\"\n )\n\n return decorator", "def check_mix_up_batch_c(method):\n\n @wraps(method)\n def new_method(self, *args, **kwargs):\n [alpha], _ = parse_user_args(method, *args, **kwargs)\n check_positive(alpha, \"alpha\")\n check_pos_float32(alpha)\n\n return method(self, *args, **kwargs)\n\n return new_method", "def block_coords_to_chunk_coords(\n *args: int, sub_chunk_size: int = 16\n) -> Tuple[int, ...]:\n return tuple(int(math.floor(coord / sub_chunk_size)) for coord in args)", "def measurements_decorator(func):\n @wraps(func)\n def wrapper(nth_nmb: int) -> tuple:\n pass # TODO: Replace with implementation!\n\n return wrapper", "def require_arguments(required):\n\n def decorator(func):\n def wrapper(request):\n request_params = get_dict_from_request(request)\n for param in required:\n if param not in request_params:\n return APIMissingArgumentResponse(error_msg=param)\n return func(request)\n\n return wrapper\n\n return decorator", "def test_extra_argument(self):\n @converters.wrap\n def inner_test():\n \"\"\"This shouldn't be called, converting should fail.\"\"\"\n pass\n self.assert_raises_request_error(lambda: inner_test(param=3), 3102)", "def test_nested_one_arg_short():\n\n @type_checked\n def _run_test(thing:(float, int, str)): pass\n\n with pytest.raises(TypeError) as error:\n _run_test((\"123\", 123.12))\n\n assert error.exconly() == (\n \"TypeError: Argument length mismatch. \"\n \"Expected a tuple of float, int, str.\"\n )", "def check_bounding_box_augment_cpp(method):\n\n @wraps(method)\n def new_method(self, *args, **kwargs):\n [transform, ratio], _ = parse_user_args(method, *args, **kwargs)\n type_check(ratio, (float, int), \"ratio\")\n check_value(ratio, [0., 1.], \"ratio\")\n type_check(transform, (TensorOp,), \"transform\")\n return method(self, *args, **kwargs)\n\n return new_method", "def _in_bounds(self, x, y):\r\n return 0 <= x < 8 and 0 <= y < 8", "def check_resize(method):\n\n @wraps(method)\n def new_method(self, *args, **kwargs):\n [size], _ = parse_user_args(method, *args, **kwargs)\n check_resize_size(size)\n\n return method(self, *args, **kwargs)\n\n return new_method", "def check_mix_up(method):\n\n @wraps(method)\n def new_method(self, *args, **kwargs):\n [batch_size, alpha, is_single], _ = parse_user_args(method, *args, **kwargs)\n\n check_value(batch_size, (1, FLOAT_MAX_INTEGER))\n check_positive(alpha, \"alpha\")\n type_check(is_single, (bool,), \"is_single\")\n\n return method(self, *args, **kwargs)\n\n return new_method", "def extra_coords(self) -> ExtraCoordsABC:", "def _assert_valid(self, y: int, x: int) -> None:\n if not (0 <= y < self.size[0] and 0 <= x < self.size[1]):\n raise ValueError('Coordinates out of image boundary, {}'.format(self.size))", "def check_cut_mix_batch_c(method):\n\n @wraps(method)\n def new_method(self, *args, **kwargs):\n [image_batch_format, alpha, prob], _ = parse_user_args(method, *args, **kwargs)\n type_check(image_batch_format, (ImageBatchFormat,), \"image_batch_format\")\n check_pos_float32(alpha)\n check_positive(alpha, \"alpha\")\n check_value(prob, [0, 1], \"prob\")\n return method(self, *args, **kwargs)\n\n return new_method", "def check_posterize(method):\n\n @wraps(method)\n def new_method(self, *args, **kwargs):\n [bits], _ = parse_user_args(method, *args, **kwargs)\n if bits is not None:\n type_check(bits, (list, tuple, int), \"bits\")\n if isinstance(bits, int):\n check_value(bits, [1, 8])\n if isinstance(bits, (list, tuple)):\n if len(bits) != 2:\n raise TypeError(\"Size of bits should be a single integer or a list/tuple (min, max) of length 2.\")\n for item in bits:\n check_uint8(item, \"bits\")\n # also checks if min <= max\n check_range(bits, [1, 8])\n return method(self, *args, **kwargs)\n\n return new_method", "def validate_position(position: Tuple[int, int], bound: int) -> bool:\n if position[0] < 0 or position[0] >= bound:\n return False\n if position[1] < 0 or position[1] >= bound:\n return False\n return True", "def test_limit_gives_helpful_err_message_with_misuse() -> None:\n msg = r\"Please pass arguments to decorator `@restricted`\"\n with pytest.raises(ValueError, match=msg):\n\n @restricted # type: ignore\n def f(x: int) -> int:\n return x", "def filtered_xyz(self) -> tuple[int, int, int]:", "def test_arguments(self):\n calls = []\n decorator = self.decorator()\n\n @decorator\n def func(a, b, c):\n calls.append((a, b, c))\n\n func(1, 2, c=3)\n self.assertEqual(calls, [(1, 2, 3)])", "def check(\n ureg: UnitRegistry, *args: Optional[Union[str, UnitsContainer, Unit]]\n) -> Callable[[F], F]:\n dimensions = [\n ureg.get_dimensionality(dim) if dim is not None else None for dim in args\n ]\n\n def decorator(func):\n count_params = len(signature(func).parameters)\n if len(dimensions) != count_params:\n raise TypeError(\n \"%s takes %i parameters, but %i dimensions were passed\"\n % (func.__name__, count_params, len(dimensions))\n )\n\n assigned = tuple(\n attr for attr in functools.WRAPPER_ASSIGNMENTS if hasattr(func, attr)\n )\n updated = tuple(\n attr for attr in functools.WRAPPER_UPDATES if hasattr(func, attr)\n )\n\n @functools.wraps(func, assigned=assigned, updated=updated)\n def wrapper(*args, **kwargs):\n list_args, empty = _apply_defaults(func, args, kwargs)\n\n for dim, value in zip(dimensions, list_args):\n if dim is None:\n continue\n\n if not ureg.Quantity(value).check(dim):\n val_dim = ureg.get_dimensionality(value)\n raise DimensionalityError(value, \"a quantity of\", val_dim, dim)\n return func(*args, **kwargs)\n\n return wrapper\n\n return decorator" ]
[ "0.5693027", "0.56747705", "0.5548657", "0.5380239", "0.536203", "0.5236589", "0.5205486", "0.5205486", "0.5182123", "0.5156525", "0.5111526", "0.5082493", "0.50712085", "0.50699216", "0.50692797", "0.5059036", "0.505624", "0.5047516", "0.50207466", "0.50109756", "0.500859", "0.50057584", "0.49687794", "0.49685422", "0.49599823", "0.49526754", "0.49471173", "0.49425885", "0.4930703", "0.49285978" ]
0.65880805
0
Set up glow tables. These tables provide glow maps for illuminated points.
def make_glows(): glow = [None] * 16 for i in range(16): dim = 2 * i + 1 glow[i] = array("b", [0] * (dim**3)) for x, y, z in product(xrange(dim), repeat=3): distance = abs(x - i) + abs(y - i) + abs(z - i) glow[i][(x * dim + y) * dim + z] = i + 1 - distance glow[i] = array("B", [clamp(x, 0, 15) for x in glow[i]]) return glow
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def setMyColorTable(lr,lg,lb):\n dislin.myvlt(lr,lg,lb,len(lr))", "def _TableSetup(self):\n global _tablesetup\n global singlestarLocation\n if not _tablesetup:\n singlestar.star_setup(singlestarLocation)\n _tablesetup = True", "def _pre_draw_bge(self):\r\n self._pre_draw_common()\r\n # draw rays\r\n self._drawRays()", "def setColourMap(self):\n cmap = self.config['cmap']\n\n pos, colour, mode = colourMaps.colourMaps(cmap)\n\n cmap = pg.ColorMap(pos, colour,mode)\n self.lut = cmap.getLookupTable(0.0, 1.0, 256)\n minsg = np.min(self.sg)\n maxsg = np.max(self.sg)\n self.colourStart = (self.config['brightness'] / 100.0 * self.config['contrast'] / 100.0) * (maxsg - minsg) + minsg\n self.colourEnd = (maxsg - minsg) * (1.0 - self.config['contrast'] / 100.0) + self.colourStart", "def setUp(self):\n\n _gray_data = {'red': [(0., 0, 0), (1., 1.0, 1.0)],\n 'green': [(0., 0, 0), (1., 1.0, 1.0)],\n 'blue': [(0., 0, 0), (1., 1.0, 1.0)]}\n\n self.colormap = ColorMapper.from_segment_map(_gray_data)\n self.colormap.range = DataRange1D()", "def create_layers_table():\n\n table_name = f\"{BQ_LAYERS_TABLE}\"", "def setColourMap(self):\n cmap = self.config['cmap']\n\n pos, colour, mode = colourMaps.colourMaps(cmap)\n\n cmap = pg.ColorMap(pos, colour, mode)\n self.lut = cmap.getLookupTable(0.0, 1.0, 256)\n minsg = np.min(self.sg)\n maxsg = np.max(self.sg)\n self.colourStart = (self.config['brightness'] / 100.0 * self.config['contrast'] / 100.0) * (\n maxsg - minsg) + minsg\n self.colourEnd = (maxsg - minsg) * (1.0 - self.config['contrast'] / 100.0) + self.colourStart", "def setColourLevels(self):\n minsg = np.min(self.sg)\n maxsg = np.max(self.sg)\n brightness = self.brightnessSlider.value()\n contrast = self.contrastSlider.value()\n colourStart = (brightness / 100.0 * contrast / 100.0) * (maxsg - minsg) + minsg\n colourEnd = (maxsg - minsg) * (1.0 - contrast / 100.0) + colourStart\n for btn in self.picbuttons:\n btn.stopPlayback()\n btn.setImage(self.lut, colourStart, colourEnd, False)\n btn.update()", "def _gen_table_style_lines(self):\n yield '.heatmap {border: none; border-collapse: collapse; border-spacing: 0}'\n yield '.heatmap td {padding: 0; margin: 0; font-family: monospace;}'", "def setColorTable(table='rainbow'):\n colortabledict = {'small':'SMALL','vga':'VGA','rainbow':'RAIN',\n 'violet':'SPEC', 'greyscale':'GREY', 'reverse rainbow':'RRAIN', \n 'reverse violet':'RSPEC', 'reverse grey':'RGREY'} \n dislin.setvlt(colortabledict[table])", "def setup( self ):\n glClearColor(*self.background)\n glClearDepth(1.0)\n glDepthFunc(GL_LEQUAL)\n glEnable(GL_LIGHTING)\n glEnable(GL_LIGHT0)\n '''\n ambientLight = [0.2, 0.2, 0.2, 1.0]\n diffuseLight = [0.8, 0.8, 0.8, 1.0]\n specularLight = [0.5, 0.5, 0.5, 1.0]\n lightPos = [0.0, 0.0, -30.0, 1.0]\n glLightfv(GL_LIGHT0, GL_AMBIENT, ambientLight)\n glLightfv(GL_LIGHT0, GL_DIFFUSE, diffuseLight)\n glLightfv(GL_LIGHT0, GL_SPECULAR, specularLight)\n glLightfv(GL_LIGHT0, GL_POSITION, lightPos)\n glEnable(GL_LIGHTING)\n glEnable(GL_LIGHT0)\n \n mat = [1.0, 0.0, 0.1, 1.0]\n glMaterialfv(GL_FRONT, GL_AMBIENT, mat)\n mat[0] = 1.0; mat[1] = 0.0; mat[2] = 0.0\n glMaterialfv(GL_FRONT, GL_DIFFUSE, mat)\n mat[0] = 1.0; mat[1] = 1.0; mat[2] = 1.0\n glMaterialfv(GL_FRONT, GL_SPECULAR, mat)\n glMaterialf(GL_FRONT, GL_SHININESS, 0.6*128.0)\n glEnable(GL_FOG)\n fogColor = [1.0, 0.0, 1.0, 1.0]\n \n global fogMode\n fogMode = GL_EXP2\n glFogi (GL_FOG_MODE, fogMode)\n glFogfv (GL_FOG_COLOR, fogColor)\n glFogf (GL_FOG_DENSITY, 0.0001)\n glHint (GL_FOG_HINT, GL_NICEST)\n glFogf (GL_FOG_START, 10.0)\n glFogf (GL_FOG_END, -1000)\n glClearColor(0.0, 0.0, 0.1, 1.0)\n '''\n glEnable(GL_DEPTH_TEST) # Enables Depth Testing\n glShadeModel(GL_SMOOTH) # Enables smooth color shading\n glMatrixMode(GL_PROJECTION)\n glLoadIdentity() \n # Set up perspective view\n gluPerspective(50.0, float(self.size[0])/float(self.size[1]), 0.1, 5000.0)\n # Set up an orthographic view\n #glOrtho(-float(width)/2, float(width)/2, -float(height)/2, float(height)/2, -1.0, 1.0)\n glMatrixMode(GL_MODELVIEW)\n glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)\n display.flip() # For interactiveness sake\n return", "def setup_steps(self):\n step1 = ground_step.Ground(5745, 495, 40, 44)\n step2 = ground_step.Ground(5788, 452, 40, 44)\n step3 = ground_step.Ground(5831, 409, 40, 44)\n step4 = ground_step.Ground(5874, 366, 40, 176)\n\n step5 = ground_step.Ground(6001, 366, 40, 176)\n step6 = ground_step.Ground(6044, 408, 40, 40)\n step7 = ground_step.Ground(6087, 452, 40, 40)\n step8 = ground_step.Ground(6130, 495, 40, 40)\n\n step9 = ground_step.Ground(6345, 495, 40, 40)\n step10 = ground_step.Ground(6388, 452, 40, 40)\n step11 = ground_step.Ground(6431, 409, 40, 40)\n step12 = ground_step.Ground(6474, 366, 40, 40)\n step13 = ground_step.Ground(6517, 366, 40, 176)\n\n step14 = ground_step.Ground(6644, 366, 40, 176)\n step15 = ground_step.Ground(6687, 408, 40, 40)\n step16 = ground_step.Ground(6728, 452, 40, 40)\n step17 = ground_step.Ground(6771, 495, 40, 40)\n\n step18 = ground_step.Ground(7760, 495, 40, 40)\n step19 = ground_step.Ground(7803, 452, 40, 40)\n step20 = ground_step.Ground(7845, 409, 40, 40)\n step21 = ground_step.Ground(7888, 366, 40, 40)\n step22 = ground_step.Ground(7931, 323, 40, 40)\n step23 = ground_step.Ground(7974, 280, 40, 40)\n step24 = ground_step.Ground(8017, 237, 40, 40)\n step25 = ground_step.Ground(8060, 194, 40, 40)\n step26 = ground_step.Ground(8103, 194, 40, 360)\n\n step27 = ground_step.Ground(8488, 495, 40, 40)\n\n self.step_group = pygame.sprite.Group(step1, step2,\n step3, step4,\n step5, step6,\n step7, step8,\n step9, step10,\n step11, step12,\n step13, step14,\n step15, step16,\n step17, step18,\n step19, step20,\n step21, step22,\n step23, step24,\n step25, step26,\n step27)", "def setUp(self):\n self.mixing_ratio = np.array([0.1, 0.2, 0.3], dtype=np.float32)\n self.specific_heat = np.array([1089.5, 1174.0, 1258.5], dtype=np.float32)\n self.latent_heat = np.array([2531771.0, 2508371.0, 2484971.0], dtype=np.float32)\n self.temperature = np.array([185.0, 260.65, 338.15], dtype=np.float32)", "def setUp(self):\n\n self.thresholds = np.array([276, 277], dtype=np.float32)\n self.rain_name = \"probability_of_falling_rain_level_above_surface\"\n self.snow_name = \"probability_of_falling_snow_level_below_surface\"\n\n rain_prob = np.array(\n [\n [[0.5, 0.1, 1.0], [0.0, 0.2, 0.5], [0.1, 0.1, 0.3]],\n [[0.5, 0.1, 1.0], [0.0, 0.2, 0.5], [0.1, 0.1, 0.3]],\n ],\n dtype=np.float32,\n )\n self.rain_prob_cube = set_up_probability_cube(\n rain_prob, self.thresholds, variable_name=self.rain_name\n )\n\n snow_prob = np.array(\n [\n [[0.0, 0.4, 0.0], [0.5, 0.3, 0.1], [0.0, 0.4, 0.3]],\n [[0.0, 0.4, 0.0], [0.5, 0.3, 0.1], [0.0, 0.4, 0.3]],\n ],\n dtype=np.float32,\n )\n self.snow_prob_cube = set_up_probability_cube(\n snow_prob, self.thresholds, variable_name=self.snow_name\n )\n\n high_prob = np.array(\n [\n [[1.0, 0.7, 0.2], [0.8, 0.8, 0.7], [0.9, 0.9, 0.7]],\n [[1.0, 0.7, 0.2], [0.8, 0.8, 0.7], [0.9, 0.9, 0.7]],\n ],\n dtype=np.float32,\n )\n self.high_prob_cube = set_up_probability_cube(\n high_prob, self.thresholds, variable_name=self.snow_name\n )", "def setup_image(self):\n # Create the correct size image for the table\n rows = self.table.count('\\n')\n columns = self.table.split('\\n')[0].count('-') + self.table.split('\\n')[0].count('+')\n self.img = Image.new('RGB', ((columns * 12) + 24, rows * 21 + 48), color=(54, 57, 63))\n\n # Initialize font and drawing object\n self.font = ImageFont.truetype('../extra_files/cour.ttf', 20)\n self.draw = ImageDraw.Draw(self.img)\n\n # Draw the table without markings\n for x in range(5):\n self.draw.text((12, 12), self.table, font=self.font, fill=(255, 255, 255))", "def setupStockTable(self):\n # Get the date\n # NOTE: This is probably un\n date = datetime.date()\n dateStr = date.month() + \"/\" + date.day() + \"/\" + date.year()\n\n stocks = (\"INTC\", \"AAPL\", \"GOOG\", \"YHOO\", \"SYK\", \"VZ\")\n\n for stock in stocks:\n stockObj = self.securityFactory(stock)\n stockObj.queryAPI()\n\n self.stockDB.query(\"INSERT INTO basic_info (ticker, price, daily_change, company, year_high, year_low, \\\n daily_percent, date, streak) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)\", (stockObj.target, stockObj.curr, \\\n stockObj.daily_change, stockObj.company,\\\n stockObj.year_high, stockObj.year_low,\\\n stockObj.daily_percent, dateStr, 0))", "def define_figure(width, height, dataframe, hover, itype):\n\n # Mapper colors\n # I left here the ones just in case\n colors_auld = ['#800000', '#860000', '#8c0000', '#930000', '#990000', '#9f0000', '#a60000', '#ac0000', '#b20000', '#b90000', '#bf0000', '#c50000', '#cc0000', '#d20000', '#d80000', '#df0000', '#e50000', '#eb0000', '#f20000', '#f80000', '#ff0000', '#ff0700', '#ff0e00', '#ff1500', '#ff1c00', '#ff2300', '#ff2a00', '#ff3100', '#ff3800', '#ff3f00', '#ff4600', '#ff4d00', '#ff5400', '#ff5b00', '#ff6200', '#ff6900', '#ff7000', '#ff7700', '#ff7e00', '#ff8500', '#ff8c00', '#ff9100', '#ff9700', '#ff9d00', '#ffa300', '#ffa800', '#ffae00', '#ffb400', '#ffba00', '#ffbf00', '#ffc500', '#ffcb00', '#ffd100', '#ffd600', '#ffdc00', '#ffe200', '#ffe800', '#ffed00', '#fff300', '#fff900', '#ffff00', '#f2ff00', '#e5ff00', '#d8ff00', '#ccff00', '#bfff00', '#b2ff00', '#a5ff00', '#99ff00', '#8cff00', '#7fff00', '#72ff00', '#66ff00', '#59ff00', '#4cff00', '#3fff00', '#33ff00', '#26ff00', '#19ff00', '#0cff00', '#00ff00', '#0afc0a', '#15fa15', '#1ff81f', '#2af62a', '#34f434', '#3ff13f', '#49ef49', '#54ed54', '#5eeb5e', '#69e969', '#74e674', '#7ee47e', '#89e289', '#93e093', '#9ede9e', '#a8dba8', '#b3d9b3', '#bdd7bd', '#c8d5c8', '#d3d3d3']\n colors_ylorrd = ['#800026', '#850026', '#8a0026', '#8f0026', '#940026', '#990026', '#9e0026', '#a30026', '#a80026', '#ad0026', '#b20026', '#b70026', '#bd0026', '#c00225', '#c30424', '#c60623', '#c90822', '#cc0a21', '#d00d21', '#d30f20', '#d6111f', '#d9131e', '#dc151d', '#df171c', '#e31a1c', '#e51e1d', '#e7221e', '#e9271f', '#eb2b20', '#ed2f21', '#ef3423', '#f13824', '#f33c25', '#f54126', '#f74527', '#f94928', '#fc4e2a', '#fc532b', '#fc582d', '#fc5d2e', '#fc6330', '#fc6831', '#fc6d33', '#fc7234', '#fc7836', '#fc7d37', '#fc8239', '#fc873a', '#fd8d3c', '#fd903d', '#fd933e', '#fd9640', '#fd9941', '#fd9c42', '#fd9f44', '#fda245', '#fda546', '#fda848', '#fdab49', '#fdae4a', '#feb24c', '#feb54f', '#feb853', '#febb56', '#febf5a', '#fec25d', '#fec561', '#fec864', '#fecc68', '#fecf6b', '#fed26f', '#fed572', '#fed976', '#feda79', '#fedc7d', '#fede80', '#fedf84', '#fee187', '#fee38b', '#fee48e', '#fee692', '#fee895', '#fee999', '#feeb9c', '#ffeda0', '#ffeea3', '#fff0a7', '#fff1ab', '#fff3ae', '#fff4b2', '#fff6b6', '#fff7b9', '#fff9bd', '#fffac1', '#fffcc4', '#fffdc8', '#ffffcc']\n colors_grorrd = ['#800026', '#850026', '#8a0026', '#8f0026', '#940026', '#990026', '#9e0026', '#a30026', '#a80026', '#ad0026', '#b20026', '#b70026', '#bd0026', '#c00225', '#c30424', '#c60623', '#c90822', '#cc0a21', '#d00d21', '#d30f20', '#d6111f', '#d9131e', '#dc151d', '#df171c', '#e31a1c', '#e51e1d', '#e7221e', '#e9271f', '#eb2b20', '#ed2f21', '#ef3423', '#f13824', '#f33c25', '#f54126', '#f74527', '#f94928', '#fc4e2a', '#fc532b', '#fc582d', '#fc5d2e', '#fc6330', '#fc6831', '#fc6d33', '#fc7234', '#fc7836', '#fc7d37', '#fc8239', '#fc873a', '#fd8d3c', '#fd903d', '#fd933e', '#fd9640', '#fd9941', '#fd9c42', '#fd9f44', '#fda245', '#fda546', '#fda848', '#fdab49', '#fdae4a', '#feb24c', '#feb54f', '#feb853', '#febb56', '#febf5a', '#fec25d', '#fec561', '#fec864', '#fecc68', '#fecf6b', '#fed26f', '#fed572', '#fed976', '#feda79', '#fedc7d', '#fede80', '#fedf84', '#fee187', '#fee38b', '#fee48e', '#fee692', '#fee895', '#fee999', '#feeb9c', '#ffeda0', '#fbeaa4', '#f7e8a8', '#f4e6ac', '#f0e4b1', '#ece2b5', '#e9e0b9', '#e5ddbd', '#e1dbc2', '#ded9c6', '#dad7ca', '#d6d5ce', '#d3d3d3']\n colors_inferno = ['#000003', '#000004', '#000006', '#010007', '#010109', '#01010B', '#02010E', '#020210', '#030212', '#040314', '#040316', '#050418', '#06041B', '#07051D', '#08061F', '#090621', '#0A0723', '#0B0726', '#0D0828', '#0E082A', '#0F092D', '#10092F', '#120A32', '#130A34', '#140B36', '#160B39', '#170B3B', '#190B3E', '#1A0B40', '#1C0C43', '#1D0C45', '#1F0C47', '#200C4A', '#220B4C', '#240B4E', '#260B50', '#270B52', '#290B54', '#2B0A56', '#2D0A58', '#2E0A5A', '#300A5C', '#32095D', '#34095F', '#350960', '#370961', '#390962', '#3B0964', '#3C0965', '#3E0966', '#400966', '#410967', '#430A68', '#450A69', '#460A69', '#480B6A', '#4A0B6A', '#4B0C6B', '#4D0C6B', '#4F0D6C', '#500D6C', '#520E6C', '#530E6D', '#550F6D', '#570F6D', '#58106D', '#5A116D', '#5B116E', '#5D126E', '#5F126E', '#60136E', '#62146E', '#63146E', '#65156E', '#66156E', '#68166E', '#6A176E', '#6B176E', '#6D186E', '#6E186E', '#70196E', '#72196D', '#731A6D', '#751B6D', '#761B6D', '#781C6D', '#7A1C6D', '#7B1D6C', '#7D1D6C', '#7E1E6C', '#801F6B', '#811F6B', '#83206B', '#85206A', '#86216A', '#88216A', '#892269', '#8B2269', '#8D2369', '#8E2468', '#902468', '#912567', '#932567', '#952666', '#962666', '#982765', '#992864', '#9B2864', '#9C2963', '#9E2963', '#A02A62', '#A12B61', '#A32B61', '#A42C60', '#A62C5F', '#A72D5F', '#A92E5E', '#AB2E5D', '#AC2F5C', '#AE305B', '#AF315B', '#B1315A', '#B23259', '#B43358', '#B53357', '#B73456', '#B83556', '#BA3655', '#BB3754', '#BD3753', '#BE3852', '#BF3951', '#C13A50', '#C23B4F', '#C43C4E', '#C53D4D', '#C73E4C', '#C83E4B', '#C93F4A', '#CB4049', '#CC4148', '#CD4247', '#CF4446', '#D04544', '#D14643', '#D24742', '#D44841', '#D54940', '#D64A3F', '#D74B3E', '#D94D3D', '#DA4E3B', '#DB4F3A', '#DC5039', '#DD5238', '#DE5337', '#DF5436', '#E05634', '#E25733', '#E35832', '#E45A31', '#E55B30', '#E65C2E', '#E65E2D', '#E75F2C', '#E8612B', '#E9622A', '#EA6428', '#EB6527', '#EC6726', '#ED6825', '#ED6A23', '#EE6C22', '#EF6D21', '#F06F1F', '#F0701E', '#F1721D', '#F2741C', '#F2751A', '#F37719', '#F37918', '#F47A16', '#F57C15', '#F57E14', '#F68012', '#F68111', '#F78310', '#F7850E', '#F8870D', '#F8880C', '#F88A0B', '#F98C09', '#F98E08', '#F99008', '#FA9107', '#FA9306', '#FA9506', '#FA9706', '#FB9906', '#FB9B06', '#FB9D06', '#FB9E07', '#FBA007', '#FBA208', '#FBA40A', '#FBA60B', '#FBA80D', '#FBAA0E', '#FBAC10', '#FBAE12', '#FBB014', '#FBB116', '#FBB318', '#FBB51A', '#FBB71C', '#FBB91E', '#FABB21', '#FABD23', '#FABF25', '#FAC128', '#F9C32A', '#F9C52C', '#F9C72F', '#F8C931', '#F8CB34', '#F8CD37', '#F7CF3A', '#F7D13C', '#F6D33F', '#F6D542', '#F5D745', '#F5D948', '#F4DB4B', '#F4DC4F', '#F3DE52', '#F3E056', '#F3E259', '#F2E45D', '#F2E660', '#F1E864', '#F1E968', '#F1EB6C', '#F1ED70', '#F1EE74', '#F1F079', '#F1F27D', '#F2F381', '#F2F485', '#F3F689', '#F4F78D', '#F5F891', '#F6FA95', '#F7FB99', '#F9FC9D', '#FAFDA0', '#FCFEA4']\n colors_magma = ['#000003', '#000004', '#000006', '#010007', '#010109', '#01010B', '#02020D', '#02020F', '#030311', '#040313', '#040415', '#050417', '#060519', '#07051B', '#08061D', '#09071F', '#0A0722', '#0B0824', '#0C0926', '#0D0A28', '#0E0A2A', '#0F0B2C', '#100C2F', '#110C31', '#120D33', '#140D35', '#150E38', '#160E3A', '#170F3C', '#180F3F', '#1A1041', '#1B1044', '#1C1046', '#1E1049', '#1F114B', '#20114D', '#221150', '#231152', '#251155', '#261157', '#281159', '#2A115C', '#2B115E', '#2D1060', '#2F1062', '#301065', '#321067', '#341068', '#350F6A', '#370F6C', '#390F6E', '#3B0F6F', '#3C0F71', '#3E0F72', '#400F73', '#420F74', '#430F75', '#450F76', '#470F77', '#481078', '#4A1079', '#4B1079', '#4D117A', '#4F117B', '#50127B', '#52127C', '#53137C', '#55137D', '#57147D', '#58157E', '#5A157E', '#5B167E', '#5D177E', '#5E177F', '#60187F', '#61187F', '#63197F', '#651A80', '#661A80', '#681B80', '#691C80', '#6B1C80', '#6C1D80', '#6E1E81', '#6F1E81', '#711F81', '#731F81', '#742081', '#762181', '#772181', '#792281', '#7A2281', '#7C2381', '#7E2481', '#7F2481', '#812581', '#822581', '#842681', '#852681', '#872781', '#892881', '#8A2881', '#8C2980', '#8D2980', '#8F2A80', '#912A80', '#922B80', '#942B80', '#952C80', '#972C7F', '#992D7F', '#9A2D7F', '#9C2E7F', '#9E2E7E', '#9F2F7E', '#A12F7E', '#A3307E', '#A4307D', '#A6317D', '#A7317D', '#A9327C', '#AB337C', '#AC337B', '#AE347B', '#B0347B', '#B1357A', '#B3357A', '#B53679', '#B63679', '#B83778', '#B93778', '#BB3877', '#BD3977', '#BE3976', '#C03A75', '#C23A75', '#C33B74', '#C53C74', '#C63C73', '#C83D72', '#CA3E72', '#CB3E71', '#CD3F70', '#CE4070', '#D0416F', '#D1426E', '#D3426D', '#D4436D', '#D6446C', '#D7456B', '#D9466A', '#DA4769', '#DC4869', '#DD4968', '#DE4A67', '#E04B66', '#E14C66', '#E24D65', '#E44E64', '#E55063', '#E65162', '#E75262', '#E85461', '#EA5560', '#EB5660', '#EC585F', '#ED595F', '#EE5B5E', '#EE5D5D', '#EF5E5D', '#F0605D', '#F1615C', '#F2635C', '#F3655C', '#F3675B', '#F4685B', '#F56A5B', '#F56C5B', '#F66E5B', '#F6705B', '#F7715B', '#F7735C', '#F8755C', '#F8775C', '#F9795C', '#F97B5D', '#F97D5D', '#FA7F5E', '#FA805E', '#FA825F', '#FB8460', '#FB8660', '#FB8861', '#FB8A62', '#FC8C63', '#FC8E63', '#FC9064', '#FC9265', '#FC9366', '#FD9567', '#FD9768', '#FD9969', '#FD9B6A', '#FD9D6B', '#FD9F6C', '#FDA16E', '#FDA26F', '#FDA470', '#FEA671', '#FEA873', '#FEAA74', '#FEAC75', '#FEAE76', '#FEAF78', '#FEB179', '#FEB37B', '#FEB57C', '#FEB77D', '#FEB97F', '#FEBB80', '#FEBC82', '#FEBE83', '#FEC085', '#FEC286', '#FEC488', '#FEC689', '#FEC78B', '#FEC98D', '#FECB8E', '#FDCD90', '#FDCF92', '#FDD193', '#FDD295', '#FDD497', '#FDD698', '#FDD89A', '#FDDA9C', '#FDDC9D', '#FDDD9F', '#FDDFA1', '#FDE1A3', '#FCE3A5', '#FCE5A6', '#FCE6A8', '#FCE8AA', '#FCEAAC', '#FCECAE', '#FCEEB0', '#FCF0B1', '#FCF1B3', '#FCF3B5', '#FCF5B7', '#FBF7B9', '#FBF9BB', '#FBFABD', '#FBFCBF']\n colors_ylgnbl = ['#081d58', '#0a1e5d', '#0c2062', '#0f2267', '#11246c', '#142671', '#162876', '#182a7b', '#1b2c80', '#1d2e85', '#20308a', '#22328f', '#253494', '#243795', '#243b97', '#243e99', '#24429a', '#23459c', '#23499e', '#234c9f', '#2350a1', '#2253a3', '#2257a4', '#225aa6', '#225ea8', '#2162aa', '#2166ac', '#206aae', '#206fb0', '#1f73b2', '#1f77b4', '#1f7bb6', '#1e80b8', '#1e84ba', '#1d88bc', '#1d8cbe', '#1d91c0', '#2094c0', '#2397c0', '#269ac1', '#299dc1', '#2ca0c1', '#2fa3c2', '#32a6c2', '#35a9c2', '#38acc3', '#3bafc3', '#3eb2c3', '#41b6c4', '#46b7c3', '#4bb9c2', '#50bbc1', '#55bdc1', '#5abfc0', '#60c1bf', '#65c3be', '#6ac5be', '#6fc7bd', '#74c9bc', '#79cbbb', '#7fcdbb', '#85cfba', '#8bd1b9', '#91d4b9', '#97d6b8', '#9dd8b8', '#a3dbb7', '#a9ddb6', '#afdfb6', '#b5e2b5', '#bbe4b5', '#c1e6b4', '#c7e9b4', '#caeab3', '#cdebb3', '#d0ecb3', '#d3eeb3', '#d6efb2', '#daf0b2', '#ddf1b2', '#e0f3b2', '#e3f4b1', '#e6f5b1', '#e9f6b1', '#edf8b1', '#eef8b4', '#f0f9b7', '#f1f9bb', '#f3fabe', '#f4fac1', '#f6fbc5', '#f7fcc8', '#f9fccb', '#fafdcf', '#fcfdd2', '#fdfed5', '#ffffd9']\n colors_rdylbl = ['#a50026', '#a60529', '#a80a2c', '#aa0f2f', '#ac1432', '#ae1a35', '#b01f38', '#b1243b', '#b3293e', '#b52e42', '#b73445', '#b93948', '#bb3e4b', '#bc434e', '#be4851', '#c04e54', '#c25357', '#c4585b', '#c65d5e', '#c76261', '#c96864', '#cb6d67', '#cd726a', '#cf776d', '#d17c70', '#d28274', '#d48777', '#d68c7a', '#d8917d', '#da9680', '#dc9c83', '#dda186', '#dfa689', '#e1ab8d', '#e3b090', '#e5b693', '#e7bb96', '#e8c099', '#eac59c', '#ecca9f', '#eed0a2', '#f0d5a6', '#f2daa9', '#f3dfac', '#f5e4af', '#f7eab2', '#f9efb5', '#fbf4b8', '#fdf9bb', '#ffffbf', '#fafabe', '#f6f6bd', '#f2f2bc', '#eeeebb', '#e9eaba', '#e5e6b9', '#e1e2b9', '#dddeb8', '#d9dab7', '#d4d5b6', '#d0d1b5', '#cccdb4', '#c8c9b3', '#c4c5b3', '#bfc1b2', '#bbbdb1', '#b7b9b0', '#b3b5af', '#afb1ae', '#aaacad', '#a6a8ad', '#a2a4ac', '#9ea0ab', '#9a9caa', '#9598a9', '#9194a8', '#8d90a7', '#898ca7', '#8588a6', '#8083a5', '#7c7fa4', '#787ba3', '#7477a2', '#7073a1', '#6b6fa1', '#676ba0', '#63679f', '#5f639e', '#5b5f9d', '#565a9c', '#52569b', '#4e529b', '#4a4e9a', '#464a99', '#414698', '#3d4297', '#393e96', '#353a95', '#313695']\n colors = colors_grorrd\n colors.reverse()\n mapper = LinearColorMapper(palette=colors, low=0, high=100)\n\n #Bokeh figure\n p = figure(\n plot_width= width,\n plot_height=height,\n #title=\"Example freq\",\n y_range=list(dataframe.Id.drop_duplicates()),\n x_range=list(dataframe.Position.drop_duplicates()),\n tools=[\"hover\",\"tap\",\"save\",\"reset\",\"wheel_zoom\"], \n x_axis_location=\"above\",\n active_drag=None,\n toolbar_location=\"right\",\n toolbar_sticky = False,\n min_border_top = 200,#leave some space for x-axis artificial labels\n min_border_bottom = 0,\n )\n\n # Create rectangle for heatmap\n mysource = ColumnDataSource(dataframe)\n p.rect(\n y=\"Id\", \n x=\"Position\",\n width=1, \n height=1, \n source=mysource,\n line_color=\"white\", \n fill_color=transform(itype, mapper),\n\n # set visual properties for selected glyphs\n selection_line_color=\"black\",\n selection_fill_color=transform(itype, mapper),\n # set visual properties for non-selected glyphs\n nonselection_fill_color=transform(itype, mapper),\n nonselection_fill_alpha=1,\n nonselection_line_alpha=1,\n nonselection_line_color=\"white\"\n )\n\n #Very poor way of creating X-axis labels. Necessary for having linejumps inside the axis labels\n x_cord = 0\n y_cord = len(list(dataframe.Id.drop_duplicates()))+11#Position: 11 spaces above the plot's top border\n foolabel = Label(x=-1,\n y=y_cord,\n text='\\nA: \\nB: \\nC: \\nF: \\n\\n\\nA: \\nB: \\nC: \\nF: \\n',\n render_mode='css', \n border_line_alpha=1.0,\n text_font_size = \"10pt\",\n background_fill_color = \"#FFFFFF\")\n p.add_layout(foolabel)\n\n #Fore every unique position in the set, add a label in axis\n for position in list(dataframe.Position.drop_duplicates()):\n position = position.replace(\"Ligand\",\"Lig\\n\\n\\n\\n\\n\")\n foolabel = Label(x=x_cord,\n y=y_cord,\n text=position,\n render_mode='css', \n border_line_alpha=1.0,\n background_fill_color = \"#FFFFFF\",\n text_font_size = \"10pt\")\n p.add_layout(foolabel)\n x_cord +=1\n\n # Setting axis\n p.axis.axis_line_color = None\n p.axis.major_tick_line_color = None\n p.xaxis.major_label_text_font_size = \"10pt\"\n p.yaxis.major_label_text_font_size = \"10pt\"\n p.yaxis.visible = False\n p.xaxis.visible = False\n p.axis.major_label_standoff = 0\n p.xaxis.major_label_orientation = 1\n\n # Adding hover\n p.add_tools(hover)\n\n # Needed later\n return(mysource,p)", "def eg_pre():\n\n print(\"\\teg3\")\n\n d = 1\n\n for _ in range(10):\n t1 = []\n t2 = []\n\n for _ in range(32):\n t1.append(utils.gaussian(10, 1))\n t2.append(utils.gaussian(d * 10, 1))\n\n print(\"\", \"\", d, d < 1.1, stats.bootstrap(\n t1, t2), stats.bootstrap(t1, t1), sep=\"\\t\")\n\n d = round(d + .05, 2)", "def snowfall_on_begin(self) -> None:\n self.globalsnode.tint = (0.74, 0.74, 0.78)\n # self.globalsnode.tint = (1.4, 1.4, 1.6)\n self.globalsnode.ambient_color = (1, 1, 1)\n self.globalsnode.shadow_ortho = True\n self.globalsnode.vignette_outer = (0.86, 0.86, 0.86)\n self.globalsnode.vignette_inner = (0.95, 0.95, 0.99)\n self.globalsnode.vr_near_clip = 0.5\n self.snowfall()\n return# self.on_begin_old()", "def make_glow_model(im_in, bin_x=1, bin_y=1):\n im=im_in.copy()\n im[0]=im[2]\n im[1]=im[2]\n im[-1]=im[-2]\n \n #glow image\n glow=np.zeros_like(im)\n \n #meshgrid\n x, y = np.meshgrid(np.arange(im.shape[1]), np.arange(im.shape[0]))\n \n \n def model_corner(im, x0, y0, xw, yw, iparams, std_clip=0):\n \"\"\" std_clip is the y height of the small corner to use to exclude\n spectra in the large corner,\n \n (iparams=(glow amp, x center, y center, xwid, ywid, xy amount)\n \n positions and initial params adjusted automatically for binning\n pass coordinates in 4k positions\n \"\"\"\n x0/=bin_x\n y0/=bin_y\n xw/=bin_x\n yw/=bin_y\n iparams=list(iparams)\n iparams[1]/=bin_x\n iparams[2]/=bin_y\n iparams[3]/=bin_x\n iparams[4]/=bin_y\n \n corner=im[y0:y0+yw,x0:x0+xw].copy()\n if std_clip:\n small_corner=im[y0:y0+std_clip,x0:x0+xw].copy()\n patch_locs=corner>2*small_corner.std()\n patch_locs[:y0+std_clip,:]=False\n corner[patch_locs]=np.median(small_corner)\n cim, param= gaussfit2D(corner, iparams)\n param=list(param)\n param[-1]=0\n param[1]+=x0\n param[2]+=y0\n return gauss2D(( x,y), *param)\n \n #Lower R\n try:\n tmp=model_corner(im, 3996, 2, 100, 100,\n (150, 58, -7, 30.0, 20.0, 0, 0))\n if tmp.min() < 0:\n raise RuntimeError('Glow model has negative values')\n else:\n glow+=tmp\n\n except RuntimeError, e:\n print 'Lower R glow model failed: {}'.format(str(e))\n\n #Lower L\n try:\n tmp=model_corner(im, 0, 2, 100, 100,\n (150, 40, 0, 30.0, 20.0, 0, 0),\n std_clip=50)\n if tmp.min() < 0:\n raise RuntimeError('Glow model has negative values')\n else:\n glow+=tmp\n\n except RuntimeError, e:\n print 'Lower L glow model failed: {}'.format(str(e))\n \n\n #Upper L\n try:\n tmp=model_corner(im, 0, 4012, 100, 100,\n (150, 40, 100, 30.0, 20.0, 0, 0))\n if tmp.min() < 0:\n raise RuntimeError('Glow model has negative values')\n else:\n glow+=tmp\n\n except RuntimeError, e:\n print 'Upper L glow model failed: {}'.format(str(e))\n\n #Upper R\n try:\n tmp=model_corner(im, 3996, 4000, 100, 100,\n (150, 58, 100, 30.0, 20.0, 0, 0))\n if tmp.min() < 0:\n raise RuntimeError('Glow model has negative values')\n else:\n glow+=tmp\n\n except RuntimeError, e:\n print 'Upper R glow model failed: {}'.format(str(e))\n \n return glow", "def initialise_shadow_map(self):\n self.shadow_map = np.zeros( self.x_len + 1, np.int8)\n \n for i in range(1, self.x_len + 1):\n self.shadow_map[i] = int((math.tan(math.radians(15)) * i) * (1 / self.slab_ratio))", "def place_headlamp_light():\n\n lx = 1.0\n ly = light_height\n lz = 2.0\n #light_position = [lx, ly, lz, 1.0]\n light_position = [0.0, 0.0, 0.0, 1]\n light_ambient = [ 1*brightness, 1*brightness, 1*brightness, 1.0 ]\n light_diffuse = [ 1*brightness, 1*brightness, 1*brightness, 1.0 ]\n light_specular = [ 1*brightness, 1*brightness, 1*brightness, 1.0 ]\n light_direction = [1.0, -1.0, 1.0, 0.0] # Light points down\n # glViewport(0, 0, win_width, win_height)\n # glMatrixMode(GL_PROJECTION)\n # glLoadIdentity()\n # gluPerspective(40.0, float(win_width) / float(win_height), 0.01, 100.0)\n #\n # glMatrixMode(GL_MODELVIEW)\n # glLoadIdentity()\n # glPushMatrix()\n glLightfv(GL_LIGHT4, GL_POSITION, light_position)\n\n\n\n #glLightfv(GL_LIGHT4, GL_POSITION, (GLfloat * 4)(0.0, 0.0, 0.0, 1))\n glLightfv(GL_LIGHT4, GL_AMBIENT, light_ambient)\n glLightfv(GL_LIGHT4, GL_DIFFUSE, light_diffuse)\n glLightfv(GL_LIGHT4, GL_SPECULAR, light_specular)\n\n # Constant attenuation (for distance, etc.)\n # Only works for fixed light locations! Otherwise disabled\n # glLightf(GL_LIGHT1, GL_CONSTANT_ATTENUATION, 2.0)\n # glLightf(GL_LIGHT1, GL_LINEAR_ATTENUATION, 0.0)\n # glLightf(GL_LIGHT1, GL_QUADRATIC_ATTENUATION, 0.0)\n\n glLightf(GL_LIGHT4, GL_CONSTANT_ATTENUATION, 3.0)\n glLightf(GL_LIGHT4, GL_LINEAR_ATTENUATION, 0.0)\n glLightf(GL_LIGHT4, GL_QUADRATIC_ATTENUATION, 0.0)\n\n # Create a spotlight effect (none at the moment)\n if headlamp_is_on:\n glLightf(GL_LIGHT4, GL_SPOT_CUTOFF, 30.0)\n glLightf(GL_LIGHT4, GL_SPOT_EXPONENT, 0.0)\n glLightfv(GL_LIGHT4, GL_SPOT_DIRECTION, light_direction)\n else:\n glLightf(GL_LIGHT4, GL_SPOT_CUTOFF, 180.0)\n glLightf(GL_LIGHT4, GL_SPOT_EXPONENT, 0.0)\n\n glLightModeli(GL_LIGHT_MODEL_LOCAL_VIEWER, use_lv)\n glLightModeli(GL_LIGHT_MODEL_TWO_SIDE, GL_TRUE)\n # Try GL_TRUE - but then watch what happens when light is low\n\n glEnable(GL_LIGHT4)\n\n # This part draws a SELF-COLORED sphere (in spot where light is!)\n glPushMatrix()\n glTranslatef(lx, ly, lz)\n glDisable(GL_LIGHTING)\n glColor3f(brightness, brightness, brightness)\n glutSolidSphere(0.5, 20, 20)\n glEnable(GL_LIGHTING)\n glPopMatrix()", "def Build_Background_Template(numBGPhotons, bgTemplate, PSFTableFront, PSFTableBack,flatLevel = 0.0,HESS = False,outputSize=300,angularSize=10.0):\r\n \r\n numPhotons = numBGPhotons\r\n numHigh = int(round(.32 *numPhotons))\r\n numLow = numPhotons-numHigh\r\n \r\n bgEventsX = []\r\n bgEventsY = []\r\n \r\n bgTemplate = bgTemplate *(1.0-flatLevel) + flatLevel\r\n# import matplotlib.pyplot as plt\r\n# plt.imshow(bgTemplate,'jet',vmin=0, vmax=1)\r\n# plt.colorbar()\r\n# plt.show()\r\n\r\n app=float(angularSize)/float(outputSize) # angle per pixel\r\n for i in range(numPhotons):\r\n x ,y = 0, 0\r\n while True:\r\n x,y = np.random.randint(0,high = len(bgTemplate)),np.random.randint(0,high = len(bgTemplate))\r\n if (np.random.ranf() < bgTemplate[y][x]):\r\n break\r\n # Shift and scale coordinates to output map and then compute PSF modification to the position.\r\n psfMod = PSF_Spread(PSFTableFront,PSFTableBack, HESS =HESS)\r\n dx = psfMod[0]*math.cos(psfMod[1]) # PSF shift in deg\r\n dy = psfMod[0]*math.sin(psfMod[1]) # PSF shift in deg\r\n \r\n bgEventsX.append((x-outputSize/2.0)*app + dx)\r\n bgEventsY.append((y-outputSize/2.0)*app + dy)\r\n \r\n return (bgEventsX, bgEventsY)", "def set_up_tables(self):\n tables = []\n tables.append({'groupname': 'metadata',\n 'tablename': 'sim_info',\n 'description': desc.SimInfoRow,\n 'tabletitle': 'Simulation Information'})\n tables.append({'groupname': 'metadata',\n 'tablename': 'sim_timeseries',\n 'description': desc.SimTimeseriesRow,\n 'tabletitle': 'Simulation Power Data'})\n tables.append({'groupname': 'th',\n 'tablename': 'th_params',\n 'description': desc.ThMetadataRow,\n 'tabletitle': 'TH Component Parameters'})\n tables.append({'groupname': 'th',\n 'tablename': 'th_timeseries',\n 'description': desc.ThTimeseriesRow,\n 'tabletitle': 'TH Timeseries'})\n tables.append({'groupname': 'neutronics',\n 'tablename': 'neutronics_timeseries',\n 'description': desc.NeutronicsTimeseriesRow,\n 'tabletitle': 'Neutronics Timeseries'})\n tables.append({'groupname': 'neutronics',\n 'tablename': 'neutronics_params',\n 'description': desc.NeutronicsParamsRow,\n 'tabletitle': 'Neutronics Metadata'})\n tables.append({'groupname': 'neutronics',\n 'tablename': 'zetas',\n 'description': desc.ZetasTimestepRow,\n 'tabletitle': 'Neutron Precursor Concentrations'})\n tables.append({'groupname': 'neutronics',\n 'tablename': 'omegas',\n 'description': desc.OmegasTimestepRow,\n 'tabletitle': 'Decay Heat Fractions'})\n return tables", "def generate_huawei_2g_cell_level_discrepancies(self):\n engine = create_engine('postgresql://bodastage:password@database/bts')\n vendor_pk = 2\n tech_pk = 1\n schema_name = 'hua_cm_2g'\n\n conn = psycopg2.connect(\"dbname=bts user=bodastage password=password host=database\")\n conn.autocommit = True\n cur = conn.cursor()\n\n # Get MO\n sql = \"\"\"\n SELECT DISTINCT\n t3.name as mo,\n t3.pk as pk,\n t3.affect_level\n FROM \n live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk \n AND t3.vendor_pk = {} AND t3.tech_pk = {}\n AND t3.affect_level = 1\n \"\"\".format(vendor_pk, tech_pk)\n cur.execute(sql)\n mo_list = cur.fetchall()\n\n for mo in mo_list:\n mo_name, mo_pk, mo_affect_level = mo\n\n # Get parameters\n sql = \"\"\"\n SELECT \n t2.name as pname,\n t2.pk as pk\n FROM \n live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk \n INNER JOIN network_entities t4 on t4.pk = t3.affect_level\n AND t3.vendor_pk = {} AND t3.tech_pk = {}\n WHERE\n t3.name = '{}'\n \"\"\".format(vendor_pk, tech_pk, mo_name)\n cur.execute(sql)\n\n parameters = cur.fetchall()\n\n attr_list = [p[0] for p in parameters]\n\n str_param_values = \",\".join([\"t_mo.{0}{1}{0}\".format('\"', p) for p in attr_list])\n str_param_names = \",\".join([\"{0}{1}{0}\".format('\\'', p) for p in attr_list])\n\n # Join all cell level mos with the primary cell mo i.e. GCELL\n cell_level_join = \"\"\" INNER JOIN {0}.GCELL gcell ON gcell.\"CELLID\" = t_mo.\"CELLID\" AND gcell.neid = t_mo.neid \n AND gcell.module_type = t_mo.module_type \"\"\".format(schema_name)\n\n # Add new entries\n sql = \"\"\"\n INSERT INTO network_audit.network_baseline \n (node, site, cellname, mo, parameter, bvalue, nvalue, vendor, technology, age, modified_by, added_by, date_added, date_modified)\n SELECT TT1.* FROM (\n SELECT\n t8.name as node,\n t7.name as site,\n t4.cellname,\n t3.name as mo,\n t2.name as parameter,\n t1.value as bvalue,\n TRIM(t4.pvalue) as nvalue,\n t9.name as vendor,\n t10.name as technology,\n 1 as age,\n 0 as modified_by,\n 0 as added_by,\n date_time as date_added,\n date_time as date_modified\n from live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk\n INNER JOIN live_network.baseline_parameter_config t5 on t5.mo_pk = t3.pk AND t5.parameter_pk = t2.pk\n INNER JOIN (\n SELECT * FROM (\n SELECT\n '{2}' as \"MO\",\n gcell.\"CELLNAME\" as cellname,\n gcell.\"varDateTime\" as date_time,\n unnest(array[{0}]) AS pname,\n unnest(array[{1}]) AS pvalue\n FROM\n hua_cm_2g.{2} t_mo\n {3}\n WHERE\n t_mo.module_type = 'Radio'\n ) TT\n ) t4 on t4.pname = t2.name AND trim(t4.pvalue) != t1.value\n INNER JOIN live_network.cells t6 on t6.name = t4.cellname\n INNER JOIN live_network.sites t7 on t7.pk = t6.site_pk\n INNER JOIN live_network.nodes t8 on t8.pk = t7.node_pk\n INNER JOIN vendors t9 on t9.pk = t6.vendor_pk\n INNER JOIN technologies t10 ON t10.pk = t6.tech_pk\n ) TT1\n LEFT JOIN network_audit.network_baseline TT2 on TT2.node = TT1.node\n AND TT2.site = TT1.site \n AND TT2.cellname = TT1.cellname\n AND TT2.mo = TT1.mo\n AND TT2.parameter = TT1.parameter\n AND TT2.bvalue = TT1.bvalue\n AND TT2.nvalue = TT1.nvalue\n WHERE\n TT2.cellname is NULL\n \"\"\".format(str_param_names, str_param_values, mo_name, cell_level_join)\n print(sql)\n cur.execute(sql)\n\n # Delete old entries\n sql = \"\"\"\n WITH rd AS (\n SELECT TT2.* FROM \n network_audit.network_baseline TT2\n LEFT JOIN \n (\n select\n t8.name as node,\n t7.name as site,\n t4.cellname,\n t3.name as mo,\n t2.name as parameter,\n t1.value as bvalue,\n TRIM(t4.pvalue) as nvalue,\n t9.name as vendor,\n t10.name as technology,\n 0 as modified_by,\n 0 as added_by,\n date_time as date_added,\n date_time as date_modified\n from live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk\n INNER JOIN live_network.baseline_parameter_config t5 on t5.mo_pk = t3.pk AND t5.parameter_pk = t2.pk\n INNER JOIN (\n SELECT * FROM (\n SELECT\n '{2}' as \"MO\",\n gcell.\"CELLNAME\" as cellname,\n gcell.\"varDateTime\" as date_time,\n unnest(array[{0}]) AS pname,\n unnest(array[{1}]) AS pvalue\n FROM\n hua_cm_2g.{2} t_mo\n {3}\n WHERE\n t_mo.module_type = 'Radio'\n ) TT\n ) t4 on t4.pname = t2.name AND trim(t4.pvalue) != t1.value\n INNER JOIN live_network.cells t6 on t6.name = t4.cellname\n INNER JOIN live_network.sites t7 on t7.pk = t6.site_pk\n INNER JOIN live_network.nodes t8 on t8.pk = t7.node_pk\n INNER JOIN vendors t9 on t9.pk = t6.vendor_pk\n INNER JOIN technologies t10 ON t10.pk = t6.tech_pk\n ) TT1 ON TT2.node = TT1.node\n AND TT2.site = TT1.site \n AND TT2.cellname = TT1.cellname\n AND TT2.mo = TT1.mo\n AND TT2.parameter = TT1.parameter\n AND TT2.bvalue = TT1.bvalue\n AND TT2.nvalue = TT1.nvalue\n WHERE\n TT1.cellname IS NULL\n )\n DELETE FROM network_audit.network_baseline t1\n WHERE t1.pk IN (SELECT pk from rd)\n \"\"\".format(str_param_names, str_param_values, mo_name, cell_level_join)\n print(sql)\n cur.execute(sql)\n\n # Update old entries\n sql = \"\"\"\n WITH rd AS (\n SELECT TT2.pk, TT1.* FROM \n network_audit.network_baseline TT2\n INNER JOIN \n (\n select\n t8.name as node,\n t7.name as site,\n t4.cellname,\n t3.name as mo,\n t2.name as parameter,\n t1.value as bvalue,\n trim(t4.pvalue) as nvalue,\n t9.name as vendor,\n t10.name as technology,\n 0 as modified_by,\n 0 as added_by,\n date_time as date_added,\n date_time as date_modified\n from live_network.base_line_values t1\n INNER JOIN vendor_parameters t2 on t2.pk = t1.parameter_pk\n INNER JOIN managedobjects t3 on t3.pk = t2.parent_pk\n INNER JOIN live_network.baseline_parameter_config t5 on t5.mo_pk = t3.pk AND t5.parameter_pk = t2.pk\n INNER JOIN (\n SELECT * FROM (\n SELECT\n '{2}' as \"MO\",\n gcell.\"CELLNAME\" as cellname,\n gcell.\"varDateTime\" as date_time,\n unnest(array[{0}]) AS pname,\n unnest(array[{1}]) AS pvalue\n FROM\n hua_cm_2g.{2} t_mo\n {3}\n WHERE\n t_mo.module_type = 'Radio'\n ) TT\n ) t4 on t4.pname = t2.name AND trim(t4.pvalue) != t1.value\n INNER JOIN live_network.cells t6 on t6.name = t4.cellname\n INNER JOIN live_network.sites t7 on t7.pk = t6.site_pk\n INNER JOIN live_network.nodes t8 on t8.pk = t7.node_pk\n INNER JOIN vendors t9 on t9.pk = t6.vendor_pk\n INNER JOIN technologies t10 ON t10.pk = t6.tech_pk\n ) TT1 ON TT2.node = TT1.node\n AND TT2.site = TT1.site \n AND TT2.cellname = TT1.cellname\n AND TT2.mo = TT1.mo\n AND TT2.parameter = TT1.parameter\n AND TT2.bvalue = TT1.bvalue\n AND TT2.nvalue = TT1.nvalue\n )\n UPDATE network_audit.network_baseline AS nb\n SET \n date_modified = rd.date_added, \n age=DATE_PART('day',AGE(nb.date_added, rd.date_added))\n FROM \n rd \n where \n rd.pk = nb.pk\n \"\"\".format(str_param_names, str_param_values, mo_name, cell_level_join)\n print(sql)\n cur.execute(sql)", "def setup_maps(self):\n super().setup_maps()\n sprite_classes = {\n \"walls\": Wall,\n \"play\": Background,\n \"exit\": Background,\n }\n island_map = TiledMap((\"images/qwerty_game_1.tmx\"), sprite_classes)\n self.add_map(island_map)", "def config_gb_min(self):\n\n self._config_min()\n self.title = \"GB Minimization\"\n self.cntrl[\"cut\"] = 999.0\n self.cntrl[\"igb\"] = 1", "def setUp(self):\n self.tb = gr.top_block()", "def setUp(self):\n\n self.male_years = HeightCurveMaleYears().make()\n self.male_months = HeightCurveMaleMonths().make()\n self.female_years = HeightCurveFemaleYears().make()\n self.female_months = HeightCurveFemaleMonths().make()", "def init():\n global tube, ball, faceTextureName, woodTextureName\n tube = gluNewQuadric()\n gluQuadricDrawStyle(tube, GLU_FILL)\n ball = gluNewQuadric()\n gluQuadricDrawStyle(ball, GLU_FILL)\n\n # Set up lighting and depth-test\n glEnable(GL_LIGHTING)\n glEnable(GL_NORMALIZE) # Inefficient...\n glEnable(GL_DEPTH_TEST) # For z-buffering!\n\n generateCheckerBoardTexture()\n faceTextureName = loadImageTexture(\"brick.jpg\")\n woodTextureName = loadImageTexture(\"wood.jpg\")" ]
[ "0.5263766", "0.51500183", "0.5125642", "0.5118011", "0.5108372", "0.5051885", "0.50198096", "0.500046", "0.49911672", "0.49805543", "0.4939404", "0.49271867", "0.49168372", "0.4901901", "0.48378637", "0.48280895", "0.48219773", "0.48054692", "0.4797667", "0.47891274", "0.47848246", "0.47773197", "0.47763887", "0.47744033", "0.4770405", "0.4760123", "0.47532117", "0.47466493", "0.47447592", "0.47432366" ]
0.54783833
0
Composite a light source onto a lightmap. The exact operation is not quite unlike an add.
def composite_glow(target, strength, x, y, z): ambient = glow[strength] xbound, zbound, ybound = 16, CHUNK_HEIGHT, 16 sx = x - strength sy = y - strength sz = z - strength ex = x + strength ey = y + strength ez = z + strength si, sj, sk = 0, 0, 0 ei, ej, ek = strength * 2, strength * 2, strength * 2 if sx < 0: sx, si = 0, -sx if sy < 0: sy, sj = 0, -sy if sz < 0: sz, sk = 0, -sz if ex > xbound: ex, ei = xbound, ei - ex + xbound if ey > ybound: ey, ej = ybound, ej - ey + ybound if ez > zbound: ez, ek = zbound, ek - ez + zbound adim = 2 * strength + 1 # Composite! Apologies for the loops. for (tx, ax) in zip(range(sx, ex), range(si, ei)): for (tz, az) in zip(range(sz, ez), range(sk, ek)): for (ty, ay) in zip(range(sy, ey), range(sj, ej)): ambient_index = (ax * adim + az) * adim + ay target[ci(tx, ty, tz)] += ambient[ambient_index]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def merge_light_catalogue():\n output_filename = os.path.join(constants.DESTINATION,\n 'concatenated',\n 'iphas-dr2-light.fits')\n\n instring = ''\n for lon in np.arange(25, 215+1, constants.STRIPWIDTH):\n for part in ['a', 'b']:\n path = os.path.join(constants.DESTINATION,\n 'concatenated',\n 'light',\n 'iphas-dr2-{0:03d}{1}-light.fits'.format(\n lon, part))\n instring += 'in={0} '.format(path)\n\n # Warning: a bug in stilts causes long fieldIDs to be truncated if -utype S15 is not set\n param = {'stilts': constants.STILTS,\n 'in': instring,\n 'out': output_filename}\n\n cmd = '{stilts} tcat {in} countrows=true lazy=true ofmt=colfits-basic out={out}'\n mycmd = cmd.format(**param)\n log.debug(mycmd)\n status = os.system(mycmd)\n log.info('concat: '+str(status))\n\n return status", "def source_surface_brightness(self, kwargs_source, kwargs_lens=None, kwargs_extinction=None, kwargs_special=None,\n unconvolved=False, de_lensed=False, k=None, update_pixelbased_mapping=True):\n kwargs_lens_i, kwargs_source_i, _, _, kwargs_extinction_i = self.select_kwargs(\n kwargs_lens,\n kwargs_source,\n kwargs_lens_light=None,\n kwargs_ps=None,\n kwargs_extinction=kwargs_extinction)\n return self._source_surface_brightness(kwargs_source_i, kwargs_lens_i, kwargs_extinction=kwargs_extinction_i,\n kwargs_special=kwargs_special, unconvolved=unconvolved,\n de_lensed=de_lensed, k=k,\n update_pixelbased_mapping=update_pixelbased_mapping)", "def load_overlay(self, source, surf_type, vmin=None, vmax=None, colormap=None):\n if isinstance(source, np.ndarray):\n name = 'new_overlay'\n data = source\n else:\n name = os.path.basename(source).split('.')[0]\n data = read_data(source, self.surf[surf_type].get_vertices_num())\n self.overlay_list.append(ScalarData(name, data,\n vmin=vmin, vmax=vmax,\n colormap=colormap))", "def merge_into(self, dst):\n # We must respect layer visibility, because saving a\n # transparent PNG just calls this function for each layer.\n src = self\n dst.strokes.extend(self.strokes)\n for tx, ty in dst._surface.get_tiles():\n surf = dst._surface.get_tile_memory(tx, ty, readonly=False)\n surf[:,:,:] = dst.effective_opacity * surf[:,:,:]\n for tx, ty in src._surface.get_tiles():\n surf = dst._surface.get_tile_memory(tx, ty, readonly=False)\n src._surface.composite_tile(surf, tx, ty,\n opacity=self.effective_opacity,\n mode=self.compositeop)\n dst.opacity = 1.0", "def half_light_radius_source(self, kwargs_source, deltaPix=None, numPix=None):\n if numPix is None:\n numPix = 1000\n if deltaPix is None:\n deltaPix = 0.005\n x_grid, y_grid = util.make_grid(numPix=numPix, deltapix=deltaPix)\n source_light = self.SourceModel.surface_brightness(x_grid, y_grid, kwargs_source)\n R_h = analysis_util.half_light_radius(source_light, x_grid, y_grid, center_x=kwargs_source[0]['center_x'], center_y=kwargs_source[0]['center_y'])\n return R_h", "def __init__(self, camera=None, light=None, name=\"\",\r\n x=0.0, y=0.0, z=0.0,\r\n rx=0.0, ry=0.0, rz=0.0,\r\n sx=1.0, sy=1.0, sz=1.0,\r\n cx=0.0, cy=0.0, cz=0.0):\r\n super(MergeShape, self).__init__(camera, light, name, x, y, z,\r\n rx, ry, rz, sx, sy, sz, cx, cy, cz)\r\n\r\n if VERBOSE:\r\n print(\"Creating Merge Shape ...\")\r\n\r\n self.vertices = []\r\n self.normals = []\r\n self.tex_coords = []\r\n self.indices = [] #stores all indices for single render\r\n\r\n self.buf = []\r\n self.buf.append(Buffer(self, self.vertices, self.tex_coords, self.indices, self.normals))", "def calc_source_lightcurve(source, target, log):\n\n log.info('\\n')\n\n for f in ['i', 'r', 'g']:\n\n idx = np.where(target.lightcurves[f]['mag_err'] > 0)[0]\n\n dmag = np.zeros(len(target.lightcurves[f]['mag']))\n dmag.fill(99.99999)\n dmerr = np.zeros(len(target.lightcurves[f]['mag']))\n dmerr.fill(-9.9999)\n\n dmag[idx] = target.lightcurves[f]['mag'][idx] - getattr(target,f)\n dmerr[idx] = np.sqrt( (target.lightcurves[f]['mag_err'][idx])**2 + getattr(target,'sig_'+f)**2 )\n\n lc = Table()\n lc['images'] = target.lightcurves[f]['images']\n lc['hjd'] = target.lightcurves[f]['hjd']\n lc['mag'] = getattr(source,f) + dmag\n lc['mag_err'] = np.zeros(len(lc['mag']))\n lc['mag_err'] = dmerr\n\n lc['mag_err'][idx] = np.sqrt( dmerr[idx]*dmerr[idx] + (getattr(source,'sig_'+f))**2 )\n\n log.info('Calculated the source flux lightcurve in '+f)\n\n source.lightcurves[f] = lc\n\n return source", "def comp_add_ao(self):\n scene = self.set_as_active()\n scene.use_nodes = True\n tree = scene.node_tree\n tree.nodes.clear()\n\n # creating the nodes\n node_rlayer = tree.nodes.new('CompositorNodeRLayers')\n node_rlayer.location = -300, 100\n node_rlayer.scene = scene\n node_rlayer.layer = w_var.rlname\n\n node_mixcolor = tree.nodes.new('CompositorNodeMixRGB')\n node_mixcolor.location = 0, 50\n node_mixcolor.blend_type = 'MULTIPLY'\n node_mixcolor.inputs[0].default_value = 0.730\n\n node_comp = tree.nodes.new('CompositorNodeComposite')\n node_comp.location = 300, 130\n\n node_viewer = tree.nodes.new('CompositorNodeViewer')\n node_viewer.location = 300, -100\n\n # connecting the nodes\n links = tree.links\n links.new(node_rlayer.outputs[0], node_mixcolor.inputs[1])\n links.new(node_rlayer.outputs[10], node_mixcolor.inputs[2])\n links.new(node_mixcolor.outputs[0], node_comp.inputs[0])\n links.new(node_mixcolor.outputs[0], node_viewer.inputs[0])\n\n for node in tree.nodes:\n node.select = False", "def _addSourceToTile(self, tile, sourceEntry, corners, scale):\n source = self._sources[sourceEntry['sourcenum']]\n ts = self._openSource(source, sourceEntry['kwargs'])\n # If tile is outside of bounding box, skip it\n bbox = source['bbox']\n if (corners[2][0] <= bbox['left'] or corners[0][0] >= bbox['right'] or\n corners[2][1] <= bbox['top'] or corners[0][1] >= bbox['bottom']):\n return tile\n transform = bbox.get('transform')\n srccorners = (\n list(np.dot(bbox['inverse'], np.array(corners).T).T)\n if transform is not None else corners)\n x = y = 0\n # If there is no transform or the diagonals are positive and there is\n # no sheer, use getRegion with an appropriate size (be wary of edges)\n if (transform is None or\n transform[0][0] > 0 and transform[0][1] == 0 and\n transform[1][0] == 0 and transform[1][1] > 0):\n scaleX = transform[0][0] if transform is not None else 1\n scaleY = transform[1][1] if transform is not None else 1\n region = {\n 'left': srccorners[0][0], 'top': srccorners[0][1],\n 'right': srccorners[2][0], 'bottom': srccorners[2][1],\n }\n output = {\n 'maxWidth': (corners[2][0] - corners[0][0]) // scale,\n 'maxHeight': (corners[2][1] - corners[0][1]) // scale,\n }\n if region['left'] < 0:\n x -= region['left'] * scaleX // scale\n output['maxWidth'] += int(region['left'] * scaleX // scale)\n region['left'] = 0\n if region['top'] < 0:\n y -= region['top'] * scaleY // scale\n output['maxHeight'] += int(region['top'] * scaleY // scale)\n region['top'] = 0\n if region['right'] > source['metadata']['sizeX']:\n output['maxWidth'] -= int(\n (region['right'] - source['metadata']['sizeX']) * scaleX // scale)\n region['right'] = source['metadata']['sizeX']\n if region['bottom'] > source['metadata']['sizeY']:\n output['maxHeight'] -= int(\n (region['bottom'] - source['metadata']['sizeY']) * scaleY // scale)\n region['bottom'] = source['metadata']['sizeY']\n for key in region:\n region[key] = int(round(region[key]))\n self.logger.debug('getRegion: ts: %r, region: %r, output: %r', ts, region, output)\n sourceTile, _ = ts.getRegion(\n region=region, output=output, frame=sourceEntry.get('frame', 0),\n format=TILE_FORMAT_NUMPY)\n # Otherwise, get an area twice as big as needed and use\n # scipy.ndimage.affine_transform to transform it\n else:\n # TODO\n msg = 'Not implemented'\n raise TileSourceError(msg)\n # Crop\n # TODO\n tile = self._mergeTiles(tile, sourceTile, x, y)\n return tile", "def _blend_layers(self, imagecontent, (z, x, y)):\n result = self._tile_image(imagecontent)\n # Paste each layer\n for (layer, opacity) in self._layers:\n try:\n # Prepare tile of overlay, if available\n overlay = self._tile_image(layer.tile((z, x, y)))\n except (DownloadError, ExtractionError), e:\n logger.warn(e)\n continue\n # Extract alpha mask\n overlay = overlay.convert(\"RGBA\")\n r, g, b, a = overlay.split()\n overlay = Image.merge(\"RGB\", (r, g, b))\n a = ImageEnhance.Brightness(a).enhance(opacity)\n overlay.putalpha(a)\n mask = Image.merge(\"L\", (a,))\n result.paste(overlay, (0, 0), mask)\n # Read result\n return self._image_tile(result)", "def recolorRC(src,dst):\n b,g,r=cv2.split(src)\n cv2.addWeighted(b,0.5,g,0.5,0,b) #arguements(in order):first src array,a weight applied\n # to array, scnd src array, a weight applied to array\n # a constant added to the result and a destination array\n cv2.merge((b,b,r),dest) #replace b and g with modified b(which has both and g)", "def merge_layers(self, l_src, l_tgt, revert=False):\n if l_src == l_tgt:\n return\n u_src = self.gs[l_src]\n u_tgt = self.gs[l_tgt]\n s_src = self.states[l_src + 1]\n s_tgt = self.states[l_tgt + 1]\n\n if self.overlap:\n u_src_base = self.__get_base_u(u_src)[0]\n u_tgt_base = self.__get_base_u(u_tgt)[0]\n else:\n u_src_base = u_src\n u_tgt_base = u_tgt\n\n intersection = u_src_base.new_vertex_property(\"int64_t\", -1)\n\n u_tgt_vmap = u_tgt_base.vp[\"vmap\"]\n vmap = {}\n for v in u_tgt_base.vertices():\n vmap[u_tgt_vmap[v]] = v\n\n u_src_vmap = u_src_base.vp[\"vmap\"]\n for v in u_src_base.vertices():\n w = u_src_vmap[v]\n if w in vmap:\n intersection[v] = int(vmap[w])\n\n if self.overlap:\n u_tgt_base.ep[\"b\"] = self.states[l_tgt + 1].get_edge_blocks()\n u_src_base.ep[\"b\"] = self.states[l_src + 1].get_edge_blocks()\n else:\n u_tgt_base.vp[\"b\"] = self.states[l_tgt + 1].b\n u_src_base.vp[\"b\"] = self.states[l_src + 1].b\n\n tgt_bmap = {}\n src_rbmap = {}\n r_max = 0\n for r in range(self.B):\n if self.bmap.has(l_tgt + 1, r):\n tgt_bmap[r] = self.bmap.get(l_tgt + 1, r)\n r_max = max(r_max, tgt_bmap[r])\n if self.bmap.has(l_src + 1, r):\n src_rbmap[self.bmap.get(l_src + 1, r)] = r\n\n r_missing = list(set(range(r_max)) - set(tgt_bmap.values()))\n r_max += 1\n\n if self.overlap:\n b = u_src_base.ep[\"b\"].copy()\n for e in u_src_base.edges():\n nb = []\n for r in b[e]:\n nb.append(src_rbmap[r])\n for i, r in enumerate(nb):\n if r in tgt_bmap:\n nb[i] = tgt_bmap[r]\n else:\n if len(r_missing) > 0:\n rr = r_missing[0]\n del r_missing[0]\n else:\n rr = r_max\n r_max += 1\n self.bmap.set(l_tgt + 1, r, rr)\n nb[i] = rr\n tgt_bmap[r] = rr\n b[e] = nb\n b_src = b\n b_tgt = u_tgt_base.ep[\"b\"]\n u_tgt_base.ep[\"weight\"] = u_tgt_base.new_edge_property(\"int\", 1)\n u_tgt_base.vp[\"weight\"] = u_tgt_base.new_vertex_property(\"int\", 1)\n u_src_base.ep[\"weight\"] = u_src_base.new_edge_property(\"int\", 1)\n u_src_base.vp[\"weight\"] = u_src_base.new_vertex_property(\"int\", 1)\n else:\n b = u_src_base.vp[\"b\"].copy()\n for v in u_src_base.vertices():\n r = src_rbmap[b[v]]\n if r in tgt_bmap:\n b[v] = tgt_bmap[r]\n else:\n if len(r_missing) > 0:\n rr = r_missing[0]\n del r_missing[0]\n else:\n rr = r_max\n r_max += 1\n self.bmap.set(l_tgt + 1, r, rr)\n b[v] = rr\n tgt_bmap[r] = rr\n b_src = b\n b_tgt = u_tgt_base.vp[\"b\"]\n\n props = [(b_tgt, b_src),\n (u_tgt_base.vp[\"vmap\"], u_src_base.vp[\"vmap\"]),\n (u_tgt_base.vp[\"weight\"], u_src_base.vp[\"weight\"]),\n (u_tgt_base.ep[\"weight\"], u_src_base.ep[\"weight\"])]\n\n if not self.overlap:\n props.append((u_tgt_base.vp[\"brmap\"],\n u_src_base.vp[\"brmap\"]))\n\n u, props = graph_union(u_tgt_base, u_src_base,\n intersection=intersection,\n props=props,\n include=False)\n\n if self.overlap:\n u.ep[\"b\"] = props[0]\n else:\n u.vp[\"b\"] = props[0]\n u.vp[\"brmap\"] = props[4]\n\n u.vp[\"vmap\"] = props[1]\n u.vp[\"weight\"] = props[2]\n u.ep[\"weight\"] = props[3]\n\n if self.overlap:\n u, b, node_index, half_edges, eindex = half_edge_graph(u, u.ep[\"b\"],\n self.B)\n u.vp[\"vmap\"] = node_index\n u.vp[\"weight\"] = u.new_vertex_property(\"int\", 1)\n u.vp[\"b\"] = b\n self.gs[l_tgt] = u\n self.states[l_tgt + 1] = self.__gen_state(self.gs[l_tgt])\n else:\n self.gs[l_tgt] = u\n self.states[l_tgt + 1] = self.__gen_state(self.gs[l_tgt])\n\n del self.states[l_src + 1]\n del self.gs[l_src]\n\n old_ec = self.ec.copy()\n self.ec.a[self.ec.a == l_src] = l_tgt\n self.ec.a[self.ec.a > l_src] -= 1\n if self.overlap:\n old_base_ec = self.base_ec.copy()\n self.base_ec.a[self.base_ec.a == l_src] = l_tgt\n self.base_ec.a[self.base_ec.a > l_src] -= 1\n self.C -= 1\n old_bmap = self.bmap.copy()\n self.bmap.del_c(l_src + 1)\n self.__bg = None\n old_layer_entropy = self.__layer_entropy\n self.__layer_entropy = None\n\n yield\n\n if revert:\n self.gs.insert(l_src, u_src)\n self.gs[l_tgt] = u_tgt\n self.states.insert(l_src + 1, s_src)\n self.states[l_tgt + 1] = s_tgt\n self.ec.a[:] = old_ec.a\n if self.overlap:\n self.base_ec.a[:] = old_base_ec.a\n self.C += 1\n self.bmap = old_bmap\n self.__layer_entropy = old_layer_entropy", "def __init__(self):\n super(LinearAggregationLayer, self).__init__()", "def add_clay_to_selected(self):\n scene = self.set_as_active()\n\n # if the user selected a material, use it\n if w_var.cb_mat_clay:\n clay_mat = bpy.data.materials[w_var.mat_clay_name]\n\n # else, create a new one with the color selected\n else:\n clay_color = w_var.color_clay\n\n # separating rgb and alpha\n clay_color_rgb = clay_color[0:3]\n clay_color_alpha = clay_color[-1]\n clay_mat = bpy.data.materials.new('clay')\n \n renderengine = scene.wirebomb.data_renderengine\n \n if renderengine == 'CYCLES':\n clay_mat.use_nodes = True\n tree = clay_mat.node_tree\n tree.nodes.clear()\n\n # creating the nodes\n node_transparent = tree.nodes.new('ShaderNodeBsdfTransparent')\n node_transparent.location = -300, 100\n\n node_diffuse = tree.nodes.new('ShaderNodeBsdfDiffuse')\n node_diffuse.location = -300, -100\n node_diffuse.inputs[0].default_value = clay_color_rgb + (1.0, )\n node_diffuse.color = clay_color_rgb\n node_diffuse.name = 'addon_clay_color' # referencing to this ID in the real-time change\n\n node_mixshader = tree.nodes.new('ShaderNodeMixShader')\n node_mixshader.location = 0, 50\n node_mixshader.inputs[0].default_value = clay_color_alpha\n node_mixshader.name = 'addon_clay_alpha' # referencing to this ID in the real-time change\n\n node_output = tree.nodes.new('ShaderNodeOutputMaterial')\n node_output.location = 300, 50\n\n # connecting the nodes\n tree.links.new(node_transparent.outputs[0], node_mixshader.inputs[1])\n tree.links.new(node_diffuse.outputs[0], node_mixshader.inputs[2])\n tree.links.new(node_mixshader.outputs[0], node_output.inputs[0])\n\n for node in tree.nodes:\n node.select = False\n\n # sets the viewport color\n clay_mat.diffuse_color = clay_color_rgb\n \n elif renderengine == 'BLENDER_RENDER':\n clay_mat.diffuse_color = clay_color_rgb\n clay_mat.use_transparency = True\n clay_mat.alpha = clay_color_alpha\n\n previous_area = bpy.context.area.type\n bpy.context.area.type = 'VIEW_3D'\n previous_layers = tuple(scene.layers)\n\n # can't enter edit mode on objects on inactive layers\n scene.layers = (True,)*20\n\n for obj in scene.objects:\n if obj.select:\n # only enters edit mode on active object\n scene.objects.active = obj\n obj.data.materials.append(clay_mat)\n clay_index = obj.data.materials.find(clay_mat.name)\n obj.active_material_index = clay_index\n\n bpy.ops.object.mode_set(mode='EDIT')\n bpy.ops.mesh.select_all(action='SELECT')\n bpy.ops.object.material_slot_assign()\n bpy.ops.mesh.select_all(action='SELECT')\n bpy.ops.object.mode_set(mode='OBJECT')\n\n bpy.context.area.type = previous_area\n scene.layers = previous_layers\n\n return clay_mat", "def place_red_light():\n glMatrixMode(GL_MODELVIEW)\n lx = 4.0\n ly = light_height\n lz = 2.0\n light_position = [lx, ly, lz, 1.0]\n lightr_ambient = [1.0, 0, 0, 1] # red\n lightb_diffuse = [0.4, 0.4, 0.6, 1] # blue\n lightb_specular = [0.0, 0, 0.8, 1] # blue\n light_direction = [1.0, -1.0, 1.0, 0.0] # Light points down\n\n\n # For Light 1 (red), set position, ambient, diffuse, and specular values\n glLightfv(GL_LIGHT1, GL_POSITION, light_position)\n glLightfv(GL_LIGHT1, GL_AMBIENT, lightr_ambient)\n glLightfv(GL_LIGHT1, GL_DIFFUSE, lightb_diffuse)\n glLightfv(GL_LIGHT1, GL_SPECULAR, lightb_specular)\n\n # Constant attenuation (for distance, etc.)\n # Only works for fixed light locations! Otherwise disabled\n glLightf(GL_LIGHT1, GL_CONSTANT_ATTENUATION, 2.0)\n glLightf(GL_LIGHT1, GL_LINEAR_ATTENUATION, 0.0)\n glLightf(GL_LIGHT1, GL_QUADRATIC_ATTENUATION, 0.0)\n\n # Create a spotlight effect (none at the moment)\n if red_light:\n glLightf(GL_LIGHT1, GL_SPOT_CUTOFF, 45.0)\n glLightf(GL_LIGHT1, GL_SPOT_EXPONENT, 0.0)\n glLightfv(GL_LIGHT1, GL_SPOT_DIRECTION, light_direction)\n else:\n glLightf(GL_LIGHT1, GL_SPOT_CUTOFF, 180.0)\n glLightf(GL_LIGHT1, GL_SPOT_EXPONENT, 0.0)\n\n glLightModeli(GL_LIGHT_MODEL_LOCAL_VIEWER, use_lv)\n glLightModeli(GL_LIGHT_MODEL_TWO_SIDE, GL_TRUE)\n # Try GL_TRUE - but then watch what happens when light is low\n\n glEnable(GL_LIGHT1)\n\n # This part draws a SELF-COLORED sphere (in spot where light is!)\n glPushMatrix()\n glTranslatef(lx, ly, lz)\n glDisable(GL_LIGHTING)\n glColor3f(brightness, 0, 0)\n glutSolidSphere(0.5, 20, 20)\n glEnable(GL_LIGHTING)\n glPopMatrix()", "def merge_on_top(\n low_layer: np.array,\n high_layer: np.array,\n width: int,\n height: int,\n with_shadow: bool = False,\n) -> np.array:\n merged_layer = np.copy(low_layer)\n\n for row in range(height):\n for col in range(width):\n if high_layer[row, col] != 0:\n # Overwrite bottom with top value\n merged_layer[row, col] = high_layer[row, col]\n elif with_shadow and merged_layer[row, col] != 0:\n # So we know that the top layer at position `[row, col]` is blank but\n # the bottom one is not. So now we check if we should set this pixel to\n # zero because of shadowing.\n if (\n high_layer[\n max(row - 1, 0) : min(row + 2, height),\n max(col - 1, 0) : min(col + 2, width),\n ]\n > 0\n ).any():\n # Apply shadow\n merged_layer[row, col] = 0\n\n return merged_layer", "def alpha_composite(self, im, dest=(0, 0), source=(0, 0)):\r\n\r\n if not isinstance(source, (list, tuple)):\r\n raise ValueError(\"Source must be a tuple\")\r\n if not isinstance(dest, (list, tuple)):\r\n raise ValueError(\"Destination must be a tuple\")\r\n if not len(source) in (2, 4):\r\n raise ValueError(\"Source must be a 2 or 4-tuple\")\r\n if not len(dest) == 2:\r\n raise ValueError(\"Destination must be a 2-tuple\")\r\n if min(source) < 0:\r\n raise ValueError(\"Source must be non-negative\")\r\n if min(dest) < 0:\r\n raise ValueError(\"Destination must be non-negative\")\r\n\r\n channels, depth = self._get_channels_and_depth(im)\r\n _mode = self._get_mode(im.shape, im.dtype)\r\n _im = self._new(_mode, (im.shape[1], im.shape[0]))\r\n if len(source) == 2:\r\n source = source + _im.size\r\n\r\n # over image, crop if it's not the whole thing.\r\n if source == (0, 0) + _im.size:\r\n overlay = _im\r\n else:\r\n overlay = _im.crop(source)\r\n\r\n # target for the paste\r\n box = dest + (dest[0] + overlay.width, dest[1] + overlay.height)\r\n\r\n # destination image. don't copy if we're using the whole image.\r\n if box == (0, 0) + self.size:\r\n background = self._instance\r\n else:\r\n background = self.crop(box)\r\n\r\n result = alpha_composite(background, overlay)\r\n self.paste(result, box)", "def __init__(self):\n\n super(ColorMap, self).__init__()\n self.by_id = dict()\n\n for color in [Color.white(), Color.black()]:\n self.push_color(color)\n\n # only black and white are added ny now\n self.black_and_white = True", "def __add__(self, other):\n cmap = find_map(self.coors, other.coors)\n desc = self.descs[0]\n aux = merge_mesh(self.coors, self.cmesh.vertex_groups,\n self.get_conn(desc), self.cmesh.cell_groups,\n other.coors, other.cmesh.vertex_groups,\n other.get_conn(desc), other.cmesh.cell_groups,\n cmap)\n coors, ngroups, conn, mat_ids = aux\n\n mesh = Mesh.from_data(self.name + ' + ' + other.name,\n coors, ngroups, [conn], [mat_ids], [desc])\n\n return mesh", "def __init__(self, game, world_file):\n self.game = game\n self.world_file = world_file\n self.floor_batch = game.floor_batch\n self.wall_batch = game.wall_batch\n self.lightmap = LightMap()\n self.tiles = {}\n self.load_world()\n self.load_tileset()\n self.player_light = self.lightmap.add_light(0,0,15)", "def init_multicomponent_source(\n sky_coord,\n frame,\n observations,\n coadd=None,\n coadd_rms=None,\n flux_percentiles=None,\n thresh=1,\n symmetric=True,\n monotonic=\"flat\",\n min_grad=0.1,\n obs_ref=None,\n):\n try:\n iter(observations)\n except TypeError:\n observations = [observations]\n\n if obs_ref is None:\n if len(observations) == 1:\n obs_ref = observations[0]\n else:\n # The observation that lives in the same plane as the frame\n loc = np.where([type(obs) is Observation for obs in observations])\n # If more than one element is an `Observation`, then pick the first one as a reference (arbitrary)\n obs_ref = observations[loc[0]]\n\n if flux_percentiles is None:\n flux_percentiles = [25]\n\n # Initialize the first component as an extended source\n sed, morph, bbox = init_extended_source(\n sky_coord,\n frame,\n observations,\n coadd=coadd,\n coadd_rms=coadd_rms,\n thresh=thresh,\n symmetric=symmetric,\n monotonic=monotonic,\n min_grad=min_grad,\n )\n # create a list of components from base morph by layering them on top of\n # each other so that they sum up to morph\n K = len(flux_percentiles) + 1\n\n Ny, Nx = morph.shape\n morphs = np.zeros((K, Ny, Nx), dtype=morph.dtype)\n morphs[0, :, :] = morph[:, :]\n max_flux = morph.max()\n percentiles_ = np.sort(flux_percentiles)\n last_thresh = 0\n for k in range(1, K):\n perc = percentiles_[k - 1]\n flux_thresh = perc * max_flux / 100\n mask_ = morph > flux_thresh\n morphs[k - 1][mask_] = flux_thresh - last_thresh\n morphs[k][mask_] = morph[mask_] - flux_thresh\n last_thresh = flux_thresh\n\n # renormalize morphs: initially Smax\n for k in range(K):\n if np.all(morphs[k] <= 0):\n msg = \"Zero or negative morphology for component {} at y={}, x={}\"\n logger.warning(msg.format(k, *sky_coord))\n morphs[k] /= morphs[k].max()\n\n # optimal SEDs given the morphologies, assuming img only has that source\n boxed_img = bbox.extract_from(obs_ref.images)\n spectra = get_best_fit_spectra(morphs, boxed_img)\n\n for k in range(K):\n if np.all(spectra[k] <= 0):\n # If the flux in all channels is <=0,\n # the new sed will be filled with NaN values,\n # which will cause the code to crash later\n msg = \"Zero or negative spectrum {} for component {} at y={}, x={}\".format(\n spectra[k], k, *sky_coord\n )\n logger.warning(msg)\n\n # avoid using the same box for multiple components\n boxes = tuple(bbox.copy() for k in range(K))\n\n # # define minimal boxes (NOTE: dangerous due to box truncation)\n # morphs_ = []\n # boxes = []\n # threshold = 0\n # for k in range(K):\n # morph, bbox = trim_morphology(sky_coord, frame, morphs[k], threshold)\n # morphs_.append(morph)\n # boxes.append(bbox)\n # morphs = morphs_\n\n return spectra, morphs, boxes", "def _blend_layers(self, imagecontent, (z, x, y)):\n result = self._tile_image(imagecontent)\n # Paste each layer\n for (layer, opacity) in self._layers:\n try:\n # Prepare tile of overlay, if available\n overlay = self._tile_image(layer.tile((z, x, y)))\n except (DownloadError, ExtractionError), e:\n logger.warn(e)\n continue\n # Extract alpha mask\n overlay = overlay.convert(\"RGBA\")\n r, g, b, a = overlay.split()\n overlay = Image.merge(\"RGB\", (r, g, b))\n a = ImageEnhance.Brightness(a).enhance(opacity)\n overlay.putalpha(a)\n mask = Image.merge(\"L\", (a,))\n result.paste(overlay, (0, 0), mask)\n # Read result\n return self._image_tile(result)", "def apply(self,src,dst):\n b,g,r=cv2.split(src)\n utils.applyLookupArray(self._bLookupArray,b,b)\n utils.applyLookupArray(self._gLookupArray,g,g)\n utils.applyLookupArray(self._rLookupArray,r,r)\n cv2.merge([b,g,r],dst)", "def build_light(self, item):\n\n # Validete NMS object.\n if \"ObjectID\" not in item:\n return\n\n # Get object id from item.\n object_id = item[\"ObjectID\"]\n # Find light data\n if object_id not in self.lights_dictionary:\n return\n\n # Build Lights\n light_information = self.lights_dictionary[object_id]\n for idx, light_values in enumerate(light_information.values()):\n # Get Light Properties.\n light_type = light_values[\"type\"]\n light_location = light_values[\"location\"]\n\n # Create light.\n light = bpy.ops.object.light_add(\n type=light_type.upper(),\n location=light_location\n )\n light = bpy.context.object\n light[\"NMS_LIGHT\"] = True\n light.name = \"{0}_light{1}\".format(item.name, idx)\n data_copy = deepcopy(light_values)\n\n # Remove invalid blender properties.\n data_copy.pop(\"type\")\n data_copy.pop(\"location\")\n\n # Apply all other properties to blender object.\n for key, value in data_copy.items():\n if isinstance(value, list):\n value = mathutils.Vector(tuple(value))\n setattr(light.data, key, value)\n\n # Parent to object.\n utils.parent(light, item)\n\n # Disable Selection.\n light.hide_viewport = True\n light.hide_select = True", "def __init__(self,\n overlay,\n overlayList,\n displayCtx,\n canvas,\n threedee,\n init=None,\n preinit=None):\n\n glimageobject.GLImageObject.__init__(self,\n overlay,\n overlayList,\n displayCtx,\n canvas,\n threedee)\n\n name = self.name\n\n self.cmapTexture = textures.ColourMapTexture('{}_cm'.format(name))\n\n self.shader = None\n self.modulateImage = None\n self.clipImage = None\n self.colourImage = None\n self.modulateOpts = None\n self.clipOpts = None\n self.colourOpts = None\n self.modulateTexture = None\n self.clipTexture = None\n self.colourTexture = None\n\n # Make sure we are registered with the\n # auxillary images if any of them are set.\n opts = self.opts\n\n if opts.colourImage is not None: self.registerAuxImage('colour')\n if opts.modulateImage is not None: self.registerAuxImage('modulate')\n if opts.clipImage is not None: self.registerAuxImage('clip')\n\n self.addListeners()\n\n def initWrapper():\n if init is not None:\n init()\n self.notify()\n\n self.refreshColourMapTexture()\n self.refreshAuxTexture('modulate')\n self.refreshAuxTexture('clip')\n self.refreshAuxTexture('colour')\n\n if preinit is not None:\n preinit()\n\n idle.idleWhen(initWrapper, self.texturesReady)", "def add(self, a, b):\n # first remove if already visible\n if self.visible:\n self.remove()\n \n # now add it\n self.source.SetBounds(a[0], b[0], a[1], b[1], a[2], b[2])\n \n self.mapper.SetInputConnection(self.source.GetOutputPort())\n \n self.actor.SetMapper(self.mapper)\n self.setColour(self.currentColour)\n \n self.ren.AddActor(self.actor)\n \n self.visible = 1", "def addLight(self, id):\r\n\t\t\r\n\t\tnewLight = Light(id)\r\n\t\tself.lights[id] = newLight", "def add_multiple_lights(properties,object,dist,numLight,gravity=[0,0,-9.81],tgt=None,color=[1.,1.,1.], \\\n spotlight=False,radius=15.,falloff=20.,tightness=10., \\\n area=0.,sample=9,adaptive=True,jitter=True): \n #normalize gravity\n g=op.mul(gravity,-1/op.norm(gravity))\n \n #compute frame\n gabs=[abs(gi) for gi in g]\n id=gabs.index(min(gabs))\n t0=[1. if i==id else 0. for i in range(3)]\n t1=op.cross(t0,g)\n t1=op.mul(t1,1/op.norm(t1))\n t0=op.cross(t1,g)\n \n #find highest direction\n bb=compute_bb(object)\n ctr=op.mul(op.add(bb[0],bb[1]),0.5)\n distg=sum([abs((bb[1][i]-bb[0][i])/2*g[i]) for i in range(3)])\n \n #add each light\n for i in range(numLight):\n angle=math.pi*2*i/numLight\n d0=op.mul(g,distg)\n d1=op.mul(t0,math.sin(angle)*dist)\n d2=op.mul(t1,math.cos(angle)*dist)\n add_light(properties,op.add(d0,op.add(d1,d2)),ctr,color,\n spotlight,radius,falloff,tightness,area,sample,adaptive,jitter)", "def add_new_source_sink(self):\n source = self.source()\n sink = self.sink()\n for arc in self.out_arcs_lists[source]:\n self.arc_info[arc][\"lower_bound\"] = 0\n self.arc_info[arc][\"upper_bound\"] = float('inf')\n for arc in self.in_arcs_lists[sink]:\n self.arc_info[arc][\"lower_bound\"] = 0\n self.arc_info[arc][\"upper_bound\"] = float('inf')\n for vert in self.vertices:\n if vert != source and vert != sink:\n if self.get_arc(source, vert) is None:\n self.add_inexact_edge(source, vert, 0, float('inf'))\n if self.get_arc(vert, sink) is None:\n self.add_inexact_edge(vert, sink, 0, float('inf'))", "def add_light(self, location, time_start, duration):\n min_state = self.location_to_state(location, 0)\n max_state = self.location_to_state(location, self.max_time - 1)\n\n # times the red light is on\n # if time_start = 2, duration = 3, max_time = 10\n # _ _ - - - _ _ _ - -, where '_' is green and '-' red\n for i in range(time_start, self.max_time, duration * 2):\n for redd in range(i, i + duration):\n if min_state + redd <= max_state:\n self.rewards[min_state + redd] = [1, 0, 0, 0, 0]" ]
[ "0.5610279", "0.53546923", "0.53237617", "0.53043604", "0.5270605", "0.5213873", "0.5177985", "0.50907487", "0.49780676", "0.4953777", "0.49467075", "0.49402687", "0.4936739", "0.49170655", "0.4907632", "0.49029806", "0.48886353", "0.48827502", "0.4881445", "0.48701435", "0.48643214", "0.4862747", "0.4844743", "0.48334783", "0.48298243", "0.48290035", "0.48209465", "0.48206562", "0.48204637", "0.48187605" ]
0.5379295
1
Calculate the amount of light that should be shone on a block. ``glow`` is the brighest neighboring light. ``block`` is the slot of the block being illuminated. The return value is always a valid light value.
def neighboring_light(glow, block): return clamp(glow - blocks[block].dim, 0, 15)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def light_row(row):\n def filter_row(col, rw):\n \"\"\"For a given pixel position, turn on if it matches our row\n \"\"\"\n return rw == row\n light(MAX_BRIGHTNESS, filter_row)", "def block_reward_calc(block_number):\n\n # FIXME: Magic number? Unify\n return int((remaining_emission(config.dev.max_coin_supply, block_number - 1)\n - remaining_emission(config.dev.max_coin_supply, block_number)) * 100000000)", "def lightness(self):\n min_component = min(self.red, self.green, self.blue)\n max_component = max(self.red, self.green, self.blue)\n avg = (max_component + min_component) / 2\n light = avg / 255\n return light", "def lightness(color):\n\n strongest = max(color.red, color.green, color.blue)\n weakest = min(color.red, color.green, color.blue)\n return 0.5 * (strongest + weakest) / 255", "def composite_glow(target, strength, x, y, z):\n\n ambient = glow[strength]\n\n xbound, zbound, ybound = 16, CHUNK_HEIGHT, 16\n\n sx = x - strength\n sy = y - strength\n sz = z - strength\n\n ex = x + strength\n ey = y + strength\n ez = z + strength\n\n si, sj, sk = 0, 0, 0\n ei, ej, ek = strength * 2, strength * 2, strength * 2\n\n if sx < 0:\n sx, si = 0, -sx\n\n if sy < 0:\n sy, sj = 0, -sy\n\n if sz < 0:\n sz, sk = 0, -sz\n\n if ex > xbound:\n ex, ei = xbound, ei - ex + xbound\n\n if ey > ybound:\n ey, ej = ybound, ej - ey + ybound\n\n if ez > zbound:\n ez, ek = zbound, ek - ez + zbound\n\n adim = 2 * strength + 1\n\n # Composite! Apologies for the loops.\n for (tx, ax) in zip(range(sx, ex), range(si, ei)):\n for (tz, az) in zip(range(sz, ez), range(sk, ek)):\n for (ty, ay) in zip(range(sy, ey), range(sj, ej)):\n ambient_index = (ax * adim + az) * adim + ay\n target[ci(tx, ty, tz)] += ambient[ambient_index]", "def _setBlock(o,block):\n o.board.add(block)\n o._cachedShadow = None\n clearedLines = o.board.clearLines()\n o.lines += clearedLines\n if clearedLines > 0 and o.onClearLines is not None:\n o.onClearLines(clearedLines)\n o.board.addRows(o.penalty)\n o.penalty = 0\n o._initBlock(o.queue.pop())\n if o.lines >= o.level*10: o.level+=1\n o.canHold = True\n isGameOver = all(sq.y >= o.board.height for sq in block) \\\n or any(o.board[sq.x,sq.y] != None for sq in o.block)\n if isGameOver and o.onGameOver: o.onGameOver(o)", "def shadowBlock(o):\n return o._cachedShadow if o._cachedShadow is not None \\\n else o.board.dropLocation(o.block)", "def proc_dark_block(block, **kwa):\n exp = kwa.get('exp', None)\n detname = kwa.get('det', None)\n int_lo = kwa.get('int_lo', 1) # lowest intensity accepted for dark evaluation\n int_hi = kwa.get('int_hi', 16000) # highest intensity accepted for dark evaluation\n intnlo = kwa.get('intnlo', 6.0) # intensity ditribution number-of-sigmas low\n intnhi = kwa.get('intnhi', 6.0) # intensity ditribution number-of-sigmas high\n rms_lo = kwa.get('rms_lo', 0.001) # rms ditribution low\n rms_hi = kwa.get('rms_hi', 16000) # rms ditribution high\n rmsnlo = kwa.get('rmsnlo', 6.0) # rms ditribution number-of-sigmas low\n rmsnhi = kwa.get('rmsnhi', 6.0) # rms ditribution number-of-sigmas high\n fraclm = kwa.get('fraclm', 0.1) # allowed fraction limit\n fraclo = kwa.get('fraclo', 0.05) # fraction of statistics below low gate limit\n frachi = kwa.get('frachi', 0.95) # fraction of statistics below high gate limit\n frac05 = 0.5\n nrecs1 = kwa.get('nrecs1', None) # number of records for the 1st stage processing\n\n logger.debug('in proc_dark_block for exp=%s det=%s, block.shape=%s' % (exp, detname, str(block.shape)))\n logger.info(info_ndarr(block, 'Begin processing of the data block:\\n ', first=100, last=105))\n logger.debug('fraction of statistics for gate limits low: %.3f high: %.3f' % (fraclo, frachi))\n\n t0_sec = time()\n\n nrecs, ny, nx = block.shape\n shape = (ny, nx)\n if nrecs1 is None or nrecs1>nrecs: nrecs1 = nrecs\n\n arr1_u16 = np.ones(shape, dtype=np.uint16)\n arr1 = np.ones(shape, dtype=np.uint64)\n\n t1_sec = time()\n\n \"\"\"\n NOTE:\n - our data is uint16.\n - np.median(block, axis=0) or np.quantile(...,interpolation='linear') return result rounded to int\n - in order to return interpolated float values apply the trick:\n data_block + random [0,1)-0.5\n - this would distort data in the range [-0.5,+0.5) ADU, but would allow to get better interpolation for median and quantile values\n - use nrecs1 (< nrecs) due to memory and time consumption\n \"\"\"\n #blockf64 = np.random.random(block.shape) - 0.5 + block\n blockf64 = np.random.random((nrecs1, ny, nx)) - 0.5 + block[:nrecs1,:]\n logger.debug(info_ndarr(blockf64, '1-st stage conversion uint16 to float64, add random [0,1)-0.5 time = %.3f sec '%(time()-t1_sec), first=100, last=105))\n\n t1_sec = time()\n #arr_med = np.median(block, axis=0)\n arr_med = np.quantile(blockf64, frac05, axis=0, interpolation='linear')\n arr_qlo = np.quantile(blockf64, fraclo, axis=0, interpolation='linear')\n arr_qhi = np.quantile(blockf64, frachi, axis=0, interpolation='linear')\n logger.debug('block array median/quantile(0.5) for med, qlo, qhi time = %.3f sec' % (time()-t1_sec))\n\n med_med = np.median(arr_med)\n med_qlo = np.median(arr_qlo)\n med_qhi = np.median(arr_qhi)\n\n arr_dev_3d = block[:,] - arr_med # .astype(dtype=np.float64)\n arr_abs_dev = np.median(np.abs(arr_dev_3d), axis=0)\n med_abs_dev = np.median(arr_abs_dev)\n\n logger.info(info_ndarr(arr_med, ' arr_med[100:105] ', first=100, last=105))\n logger.info(info_ndarr(arr_qlo, ' arr_qlo[100:105] ', first=100, last=105))\n logger.info(info_ndarr(arr_qhi, ' arr_qhi[100:105] ', first=100, last=105))\n logger.info(info_ndarr(arr_abs_dev, ' abs_dev[100:105] ', first=100, last=105))\n\n s = 'Pre-processing time %.3f sec' % (time()-t0_sec)\\\n + '\\nResults for median over pixels intensities:'\\\n + '\\n %.3f fraction of the event spectrum is below %.3f ADU - pedestal estimator' % (frac05, med_med)\\\n + '\\n %.3f fraction of the event spectrum is below %.3f ADU - gate low limit' % (fraclo, med_qlo)\\\n + '\\n %.3f fraction of the event spectrum is below %.3f ADU - gate upper limit' % (frachi, med_qhi)\\\n + '\\n event spectrum spread median(abs(raw-med)): %.3f ADU - spectral peak width estimator' % med_abs_dev\n logger.info(s)\n\n logger.debug(info_ndarr(arr_med, '1st iteration proc time = %.3f sec arr_av1' % (time()-t0_sec)))\n\n # 2nd loop over recs in block to evaluate gated parameters\n logger.debug('Begin 2nd iteration')\n\n sta_int_lo = np.zeros(shape, dtype=np.uint64)\n sta_int_hi = np.zeros(shape, dtype=np.uint64)\n\n arr_max = np.zeros(shape, dtype=block.dtype)\n arr_min = np.ones (shape, dtype=block.dtype) * 0x3fff\n\n gate_lo = arr1_u16 * int_lo\n gate_hi = arr1_u16 * int_hi\n\n gate_lo = np.maximum(arr_qlo, gate_lo).astype(dtype=block.dtype)\n gate_hi = np.minimum(arr_qhi, gate_hi).astype(dtype=block.dtype)\n cond = gate_hi>gate_lo\n gate_hi[np.logical_not(cond)] +=1\n\n logger.debug(info_ndarr(gate_lo, ' gate_lo '))\n logger.debug(info_ndarr(gate_hi, ' gate_hi '))\n\n arr_sum0 = np.zeros(shape, dtype=np.uint64)\n arr_sum1 = np.zeros(shape, dtype=np.float64)\n arr_sum2 = np.zeros(shape, dtype=np.float64)\n\n for nrec in range(nrecs):\n raw = block[nrec,:]\n rawdbl = raw.astype(dtype=np.uint64) # blockdbl[nrec,:]\n\n logger.debug('nrec:%03d median(raw-ave): %f' % (nrec, np.median(raw.astype(dtype=np.float64) - arr_med)))\n\n condlist = (np.logical_not(np.logical_or(raw<gate_lo, raw>gate_hi)),)\n\n arr_sum0 += np.select(condlist, (arr1,), 0)\n arr_sum1 += np.select(condlist, (rawdbl,), 0)\n arr_sum2 += np.select(condlist, (np.square(rawdbl),), 0)\n\n sta_int_lo += np.select((raw<int_lo,), (arr1,), 0)\n sta_int_hi += np.select((raw>int_hi,), (arr1,), 0)\n\n arr_max = np.maximum(arr_max, raw)\n arr_min = np.minimum(arr_min, raw)\n\n arr_av1 = divide_protected(arr_sum1, arr_sum0)\n arr_av2 = divide_protected(arr_sum2, arr_sum0)\n\n nevlm = int(fraclm * nrecs)\n\n arr_rms = np.sqrt(arr_av2 - np.square(arr_av1))\n #rms_ave = arr_rms.mean()\n rms_ave = mean_constrained(arr_rms, rms_lo, rms_hi)\n\n rms_min, rms_max = evaluate_limits(arr_rms, rmsnlo, rmsnhi, rms_lo, rms_hi, cmt='RMS')\n ave_min, ave_max = evaluate_limits(arr_av1, intnlo, intnhi, int_lo, int_hi, cmt='AVE')\n\n arr_sta_rms_hi = np.select((arr_rms>rms_max,), (arr1,), 0)\n arr_sta_rms_lo = np.select((arr_rms<rms_min,), (arr1,), 0)\n arr_sta_int_hi = np.select((sta_int_hi>nevlm,), (arr1,), 0)\n arr_sta_int_lo = np.select((sta_int_lo>nevlm,), (arr1,), 0)\n arr_sta_ave_hi = np.select((arr_av1>ave_max,), (arr1,), 0)\n arr_sta_ave_lo = np.select((arr_av1<ave_min,), (arr1,), 0)\n\n logger.info('Bad pixel status:'\\\n +'\\n status 1: %8d pixel rms > %.3f' % (arr_sta_rms_hi.sum(), rms_max)\\\n +'\\n status 2: %8d pixel rms < %.3f' % (arr_sta_rms_lo.sum(), rms_min)\\\n +'\\n status 4: %8d pixel intensity > %g in more than %g fraction (%d/%d) of non-empty events'%\\\n (arr_sta_int_hi.sum(), int_hi, fraclm, nevlm, nrecs)\\\n +'\\n status 8: %8d pixel intensity < %g in more than %g fraction (%d/%d) of non-empty events'%\\\n (arr_sta_int_lo.sum(), int_lo, fraclm, nevlm, nrecs)\\\n +'\\n status 16: %8d pixel average > %g' % (arr_sta_ave_hi.sum(), ave_max)\\\n +'\\n status 32: %8d pixel average < %g' % (arr_sta_ave_lo.sum(), ave_min)\\\n )\n\n #0/1/2/4/8/16/32 for good/hot-rms/saturated/cold/cold-rms/average above limit/average below limit,\n arr_sta = np.zeros(shape, dtype=np.uint64)\n arr_sta += arr_sta_rms_hi # hot rms\n arr_sta += arr_sta_rms_lo*2 # cold rms\n arr_sta += arr_sta_int_hi*4 # satturated\n arr_sta += arr_sta_int_lo*8 # cold\n arr_sta += arr_sta_ave_hi*16 # too large average\n arr_sta += arr_sta_ave_lo*32 # too small average\n\n absdiff_av1_med = np.abs(arr_av1-arr_med)\n logger.debug(info_ndarr(absdiff_av1_med, 'np.abs(arr_av1-arr_med)', first=100, last=105))\n logger.info('estimator of difference between gated average and median np.median(np.abs(arr_av1-arr_med)): %.3f' % np.median(absdiff_av1_med))\n\n cond = absdiff_av1_med > med_abs_dev\n arr_av1[cond] = arr_med[cond]\n\n arr_sta_bad = np.select((cond,), (arr1,), 0)\n frac_bad = arr_sta_bad.sum()/float(arr_av1.size)\n logger.debug('fraction of panel pixels with gated average deviated from and replaced by median: %.6f' % frac_bad)\n\n logger.info('data block processing time = %.3f sec' % (time()-t0_sec))\n logger.debug(info_ndarr(arr_av1, 'arr_av1 [100:105] ', first=100, last=105))\n logger.debug(info_ndarr(arr_rms, 'pixel_rms [100:105] ', first=100, last=105))\n logger.debug(info_ndarr(arr_sta, 'pixel_status[100:105] ', first=100, last=105))\n logger.debug(info_ndarr(arr_med, 'arr mediane [100:105] ', first=100, last=105))\n\n return arr_av1, arr_rms, arr_sta", "def brightness(pixel):\n red = pixel[0]\n green = pixel[1]\n blue = pixel[2]\n return (21*red + 72*green + 7*blue) // 100", "def part_1() -> int:\n initial_input = _load_input()\n rows = len(initial_input)\n cols = len(initial_input[0])\n\n input = initial_input.copy()\n total_glow_count = 0\n\n for _ in range(100):\n flashed = list()\n for row in range(rows):\n for col in range(cols):\n coords = [[col, row]]\n new_input, glow_count = _get_glow_counts(coords, input, flashed)\n input = new_input\n total_glow_count += glow_count\n\n return total_glow_count", "def distance_to_block(self, block):\n return math.sqrt(\n (self.center_x - block.center_x) ** 2 + (self.center_y - block.center_y) ** 2\n )", "def get_rms(self, block):\n # https://stackoverflow.com/a/25871132\n count = len(block) / 2\n block_format = \"%dh\" % (count)\n shorts = struct.unpack(block_format, block)\n\n sum_squares = 0.0\n for sample in shorts:\n # sample is a signed short in +/- 32768, normalize it to 1.0\n sum_squares += pow(sample * SHORT_NORMALIZE, 2)\n\n return sqrt(sum_squares / count)", "def getLightSensor() -> int:\n pass", "def grow_block_by_one_site(growing_block, ground_state_wf, system, \n\t number_of_states_kept):\n if growing_block not in ('left', 'right'):\n\traise DMRGException('Growing side must be left or right.')\n system.set_growing_side(growing_block)\n rho = ground_state_wf.build_reduced_density_matrix(growing_block)\n evals, evecs = diagonalize(rho)\n truncated_evals, truncation_matrix = truncate(evals, evecs,\n\t\t number_of_states_kept)\n entropy = calculate_entropy(truncated_evals)\n truncation_error = calculate_truncation_error(truncated_evals)\n set_block_hamiltonian_to_AF_Heisenberg(system)\n set_operators_to_update_to_AF_Heisenberg(system)\n system.update_all_operators(truncation_matrix)\n return entropy, truncation_error", "def _get_glow_counts(\n coord_stack: List[List[int]],\n input: List[List[int]],\n flashed: List[List[int]],\n glow_count: int = 0\n) -> Tuple[List[List[int]], int]:\n if not coord_stack:\n return input, glow_count\n\n coord = coord_stack.pop()\n row = coord[0]\n col = coord[1]\n\n if flashed and coord in flashed:\n return _get_glow_counts(coord_stack, input, flashed, glow_count)\n\n if input[row][col] < 9:\n input[row][col] += 1\n return _get_glow_counts(coord_stack, input, flashed, glow_count)\n\n # handle a flashing octo\n glow_count += 1\n input[row][col] = 0\n\n if coord not in flashed:\n flashed.append(coord)\n\n for ad in ADJACENT_DIRS.values():\n new_row = row + ad[0]\n new_col = col + ad[1]\n\n if 0 <= new_row < 10 and 0 <= new_col < 10:\n coord_stack.append([new_row, new_col])\n\n return _get_glow_counts(coord_stack, input, flashed, glow_count)", "def mine(self, block):\r\n for n in range(self.maxNonce):\r\n if int(block.generate_hash(), 16) <= self.chain.targetHash:\r\n self.chain.add(block)\r\n break\r\n else:\r\n block.nonce += 1", "def take(self) -> int:\n for z in range(self.size - 1, -1, -1):\n if self.__get_pixel_color(z) is not None:\n color = self.__get_pixel_color(z)\n self.add_block_to_hopper(color)\n self.__set_pixel_color(z, None)\n self.memory.set_pixelColor((self.position[0], self.position[1], z), None)\n break\n else:\n raise Exception(\"Can't take block here, no block found\")\n\n if self.last_touched_color == color:\n time_elapsed = 2\n else:\n time_elapsed = 3\n self.last_touched_color = color\n self.__display()\n return time_elapsed", "def brightness(colors):\n return np.sum(colors * const_bright, -1)", "def hash(block):\n block_string = json.dumps(block, sort_keys=True).encode()\n return hashlib.sha256(block_string).hexdigest()", "def hash(block):\n block_string = json.dumps(block, sort_keys=True).encode()\n return hashlib.sha256(block_string).hexdigest()", "def hash(block):\r\n block_string = json.dumps(block, sort_keys=True).encode()\r\n return hashlib.sha256(block_string).hexdigest()", "def lighten(self, amount):\n h, light, s = colorsys.rgb_to_hls(self.r, self.g, self.b)\n\n light = light + amount\n\n if light < 0.0:\n light = 0.0\n if light > 1.0:\n light = 1.0\n\n r, g, b = colorsys.hls_to_rgb(h, light, s)\n return Color(from_rgba=(c(r), c(g), c(b), c(self.a)))", "def set_block(self, coords, block):\n\n x, y, z = coords\n index, section_y = divmod(y, 16)\n\n column = x * 16 + z\n\n if self.get_block(coords) != block:\n self.sections[index].set_block((x, section_y, z), block)\n\n if not self.populated:\n return\n\n # Regenerate heightmap at this coordinate.\n if block:\n self.heightmap[column] = max(self.heightmap[column], y)\n else:\n # If we replace the highest block with air, we need to go\n # through all blocks below it to find the new top block.\n height = self.heightmap[column]\n if y == height:\n for y in range(height, -1, -1):\n if self.get_block((x, y, z)):\n break\n self.heightmap[column] = y\n\n # Do the blocklight at this coordinate, if appropriate.\n if block in glowing_blocks:\n composite_glow(self.blocklight, glowing_blocks[block],\n x, y, z)\n bl = [clamp(light, 0, 15) for light in self.blocklight]\n self.blocklight = array(\"B\", bl)\n\n # And the skylight.\n glow = max(self.get_skylight((nx, ny, nz))\n for nx, nz, ny in iter_neighbors((x, z, y)))\n self.set_skylight((x, y, z), neighboring_light(glow, block))\n\n self.dirty = True\n self.damage(coords)", "def _calcBrightness(self, brightness):\n if 0 <= int(brightness) <= 100:\n return int(float(brightness) / 100 * 0xFF)\n raise Exception('Brightness must be an integer betwenn 0 and 100')", "def calc_low_energy_bulb_ratio(lighting_outlets_total, lighting_outlets_low_energy):\n return int(100 * float(lighting_outlets_low_energy) / lighting_outlets_total + 0.5) / 100.0", "def get_block_mount_point(block):\n # type: (str) -> Tuple[str, bool]\n\n mount_point = find_block_mount_point(block)\n if mount_point is None:\n return mount_block(block), False\n else:\n return mount_point, True", "def read_light_bump(self, light_bump):\n data = self._read_packet(light_bump, Bump.LIGHT_DATA_BYTES)\n\n if len(data) == Bump.LIGHT_DATA_BYTES:\n return struct.unpack(\">h\", data)[0]\n else:\n return 0", "def luminance(self):\n \n return (self.r + self.g + self.b) // 3", "def _mine_block(block: Block, final_nonce, found_event: Event, cutoff_time: int) -> None:\n\n while time.time() < cutoff_time:\n block.nonce = random.randint(0, 2**64-1)\n block.block_id = block.compute_block_id()\n try:\n block.verificate_pow()\n if final_nonce.value is None:\n final_nonce.value = block.nonce\n found_event.set()\n return None\n except Exception as e:\n pass\n found_event.set()\n return None", "def light(brightness, filter):\n brightness = clamp(MIN_BRIGHTNESS, round(brightness), MAX_BRIGHTNESS)\n for col in range(DISPLAY_WIDTH):\n for row in range(DISPLAY_HEIGHT):\n if filter(col, row):\n microbit.display.set_pixel(col, row, brightness)" ]
[ "0.52353996", "0.51834047", "0.5176242", "0.5041428", "0.50006485", "0.4954916", "0.4940197", "0.49317864", "0.47643924", "0.4748225", "0.46979314", "0.46752948", "0.46199557", "0.4596901", "0.45848566", "0.45458958", "0.45410168", "0.45358545", "0.44717708", "0.44717708", "0.44675288", "0.4454629", "0.44529152", "0.44429192", "0.44403416", "0.4437165", "0.44261685", "0.43983907", "0.43980816", "0.43946" ]
0.7323498
0
Regenerate the height map array. The height map is merely the position of the tallest block in any xzcolumn.
def regenerate_heightmap(self): for x in range(16): for z in range(16): column = x * 16 + z for y in range(255, -1, -1): if self.get_block((x, y, z)): break self.heightmap[column] = y
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def init_map(self, size, random_data = False):\n heightmap = []\n\n if random_data:\n # random noise background\n for x in range(size):\n heightmap.append([])\n for y in range(size):\n heightmap[-1].append(random.random())\n else:\n # black background\n for x in range(size):\n heightmap.append([])\n for y in range(size):\n heightmap[-1].append(0.0)\n\n return heightmap", "def _split_heightmap(self, height):\n half = height.shape[1] // 2\n self._half = half\n height_s = height[:, half:].copy()\n return height_s", "def generate_pre_heights(self):\n\n config = self.config\n\n def get_lands_oceans():\n oceans, lands = [], []\n for x in xrange(self.size):\n for y in xrange(self.size):\n coord = x, y\n if self[coord] <= 0:\n oceans.append(coord)\n else:\n lands.append(coord)\n return lands, oceans\n\n def add_heights():\n \"\"\"Add pre heights for diamond-square\n \"\"\"\n fac_min = 50\n fac_max = 40\n\n print 'Get lands and oceans'\n t = time.time()\n lands, oceans = get_lands_oceans()\n print 'lands and oceans getted: ', time.time() - t\n\n # TODO: create one def with params: mount_level and other for create heights\n # add default heights\n for coord in lands:\n self[coord] = self.config.land_mount_level[1]\n\n for coord in oceans:\n self[coord] = -self.config.mid_mount_level[1]\n\n # add low heights for lands\n count_land = int(round(len(lands) * config.factor_low_mount / 100.))\n land_coords = []\n\n starts = random.randint(count_land / fac_min, count_land / fac_max)\n for start in xrange(starts):\n start_coord = lands[random.randint(0, len(lands)-1)]\n land_coords.append(start_coord)\n self[start_coord] = random.randint(self.config.low_mount_level[0], self.config.low_mount_level[1])\n\n while count_land > 0:\n # for lands\n if count_land > 0:\n dx = random.randint(-1,1)\n dy = random.randint(-1,1)\n coord = land_coords[random.randint(0, len(land_coords) - 1)]\n coord = coord[0] + dx, coord[1] + dy\n if coord not in land_coords:\n self[coord] = random.randint(self.config.low_mount_level[0], self.config.low_mount_level[1])\n land_coords.append(coord)\n count_land -= 1\n\n\n target_lands = land_coords\n\n # -------------------------------------------------------------------------------\n # add mid heights for lands\n count_land = int(round(len(target_lands) * (config.factor_mid_mount / 100.)))\n land_coords = []\n\n starts = random.randint(count_land / (fac_min * 3), count_land / (fac_max*3))\n for start in xrange(starts):\n start_coord = target_lands[random.randint(0, len(target_lands)-1)]\n land_coords.append(start_coord)\n self[start_coord] = random.randint(self.config.mid_mount_level[0],\n self.config.mid_mount_level[1])\n\n if land_coords == []:\n return\n\n while count_land > 0:\n # for lands\n if count_land > 0:\n dx = random.randint(-1,1)\n dy = random.randint(-1,1)\n coord = land_coords[random.randint(0, len(land_coords) - 1)]\n coord = coord[0] + dx, coord[1] + dy\n #if coord not in land_coords:\n self[coord] = random.randint(self.config.mid_mount_level[0],\n self.config.mid_mount_level[1])\n land_coords.append(coord)\n count_land -= 1\n\n\n target_lands = land_coords\n\n\n # -------------------------------------------------------------------------------\n # add high heights for lands\n count_land = int(round(len(target_lands) * (config.factor_high_mount / 100.)))\n land_coords = []\n\n starts = random.randint(count_land / (fac_min * 4), count_land / (fac_max * 3))\n for start in xrange(starts):\n start_coord = target_lands[random.randint(0, len(target_lands)-1)]\n land_coords.append(start_coord)\n self[start_coord] = random.randint(self.config.high_mount_level[0],\n self.config.high_mount_level[1])\n\n while count_land > 0:\n # for lands\n if count_land > 0:\n dx = random.randint(-1,1)\n dy = random.randint(-1,1)\n try:\n coord = land_coords[random.randint(0, len(land_coords) - 1)]\n except ValueError:\n coord = lands[random.randint(0, len(lands) - 1)]\n coord = coord[0] + dx, coord[1] + dy\n #if coord not in land_coords:\n self[coord] = random.randint(self.config.high_mount_level[0],\n self.config.high_mount_level[1])\n land_coords.append(coord)\n count_land -= 1\n\n\n\n\n def square_diamond(sx, sy, size, strong):\n \"\"\"Algorithm Square-diamond generate terrain heights\n\n -> http://www.lighthouse3d.com/opengl/terrain/index.php?mpd2\n \"\"\"\n if size == 1:\n return\n\n dsize = size/2\n ex = sx+size-1\n ey = sy+size-1\n # lets get math style\n\n\n # SQUARE STEP\n\n A = sx, sy\n B = ex, sy\n C = sx, ey\n D = ex, ey\n E = sx+dsize, sy+dsize\n F = sx, sy + dsize\n G = sx + dsize, sy\n H = ex, sy + dsize\n I = sx + dsize, ey\n\n def RAND(X):\n return random.randint(-strong, strong)\n\n ### for coasts dont disappear\n\n def normalize(add_z, X):\n if self[X] <= 0:\n if add_z > 0:\n add_z = -5\n else:\n if add_z <= 0:\n add_z = 5\n return add_z\n\n # Generate heights\n # E = (A+B+C+D) / 4 + RAND(d)\n # F = (A + C + E + E) / 4 + RAND(d)\n # G = (A + B + E + E) / 4 + RAND(d)\n # H = (B + D + E + E) / 4 + RAND(d)\n # I = (C + D + E + E) / 4 + RANS(d)\n\n ### E\n\n try:\n\n add_z = ((self[A] + self[B] + self[C] + self[D]) / 4) + RAND(E)\n\n except KeyError, e:\n print A, B, C, D, size, dsize, len(self)\n raise e\n\n\n self[E] = normalize(add_z, E)\n\n ### F\n\n add_z = (self[A] + self[C] + self[E] + self[E]) / 4 + RAND(F)\n\n self[F] = normalize(add_z, F)\n\n ### G\n\n add_z = (self[A] + self[B] + self[E] + self[E]) / 4 + RAND(G)\n\n self[G] = normalize(add_z, G)\n\n ### H\n\n add_z = (self[B] + self[D] + self[E] + self[E]) / 4 + RAND(H)\n\n self[H] = normalize(add_z, H)\n\n ### I\n add_z = (self[C] + self[D] + self[E] + self[E]) / 4 + RAND(I)\n\n self[I] = normalize(add_z, I)\n\n\n # DIAMOND STEP\n\n # get coordinates\n # 0 - x, 1 - y\n\n x, y = 0, 1\n\n dx = (G[x] - A[x]) / 2\n dy = (F[y] - A[y]) / 2\n\n J = A[x] + dx, A[y] + dy\n K = G[x] + dx, G[y] + dy\n L = F[x] + dx, F[y] + dy\n M = E[x] + dx, E[y] + dy\n\n N = A[x], A[y] + dy\n O = A[x] + dx, A[y]\n P = G[x], G[y] + dy\n Q = A[x] + dx, F[y]\n\n # Generate Heights\n # J = (A + G + F + E)/4 + RAND(d)\n # K = (G + B + E + H)/4 + RAND(d)\n # L = (F + E + C + I)/4 + RAND(d)\n # M = (E + H + I + D)/4 + RAND(d)\n\n # J\n add_z = ((self[A] + self[G] + self[F] + self[E]) / 4) + RAND(J)\n self[J] = normalize(add_z, J)\n\n # K\n add_z = ((self[G] + self[B] + self[E] + self[H]) / 4) + RAND(K)\n self[K] = normalize(add_z, K)\n\n # L\n add_z = ((self[F] + self[E] + self[C] + self[I]) / 4) + RAND(L)\n self[L] = normalize(add_z, L)\n\n # M\n add_z = ((self[E] + self[H] + self[I] + self[D]) / 4) + RAND(M)\n self[M] = normalize(add_z, M)\n\n # N = (K + A + J + F)/4 + RAND(d)\n # O = (L + A + G + J)/4 + RAND(d)\n # P = (J + G + K + E)/4 + RAND(d)\n # Q = (F + J + E + L)/4 + RAND(d)\n\n # N\n add_z = ((self[K] + self[A] + self[J] + self[F]) / 4) + RAND(N)\n self[N] = normalize(add_z, N)\n\n # O\n add_z = ((self[L] + self[A] + self[G] + self[J]) / 4) + RAND(O)\n self[O] = normalize(add_z, O)\n\n # P\n add_z = ((self[J] + self[G] + self[K] + self[E]) / 4) + RAND(P)\n self[P] = normalize(add_z, P)\n\n # Q\n add_z = ((self[F] + self[J] + self[E] + self[L]) / 4) + RAND(Q)\n self[Q] = normalize(add_z, Q)\n\n # N = (A + J + F)/3 + RAND(d)\n # O = (A + G + J)/3 + RAND(d)\n\n # N\n add_z = ((self[A] + self[J] + self[F]) / 3) + RAND(N)\n self[N] = normalize(add_z, N)\n\n # O\n add_z = ((self[A] + self[G] + self[J]) / 3) + RAND(N)\n self[O] = normalize(add_z, O)\n\n\n ### Start recurse for diamond alg\n square_diamond(A[0], A[1], dsize, strong)\n square_diamond(G[0], G[1], dsize, strong)\n square_diamond(F[0], F[1], dsize, strong)\n square_diamond(E[0], E[1], dsize, strong)\n\n # align\n def align_it(start, strong):\n \"\"\"Deprecated\n \"\"\"\n water = 0\n #map3d = self.copy()\n size = (abs(start)*2) + self.size - strong\n start = start + strong\n coords_map = []\n for x in xrange(start,size):\n for y in xrange(start,size):\n coords_map.append( (x, y) )\n\n random.shuffle(coords_map)\n\n lens = strong * (3.0 ** 2)\n for coord in coords_map:\n average = 0.0\n x, y = coord\n #rounds = self.get_round_xy_land(coord, -strong, False)\n #for r_coord in rounds:\n #average += self[r_coord]\n for x in xrange(-strong, strong+1):\n for y in xrange(-strong, strong+1):\n average += self[x, y]\n\n height = int(round(average / lens))\n #height = int(round(average / float(len(rounds))))\n if self[coord] <= water and height > water:\n height = water\n elif self[coord] > water and height <= water:\n height = water + 1\n\n #print self[coord], '->', height\n\n self[coord] = height\n\n if self.config.add_pre_heights:\n print 'Add heights start'\n add_heights()\n print 'Diamond-Square start'\n for x in xrange(1):\n square_diamond(\n sx = 0,\n sy = 0,\n size = self.size, strong=100)", "def height_at(self, x, z):\n\n return self.heightmap[x * 16 + z]", "def prepare_map(self):\n for y, row in enumerate(self.contents):\n for x, tile in enumerate(row):\n bm = self.get_tile(tile)\n self.image[\n y * TILE_SIZE : (y + 1) * TILE_SIZE,\n x * TILE_SIZE : (x + 1) * TILE_SIZE,\n ] = bm", "def reset(self) -> None:\n self.map = []\n for col in range(self.width):\n self.map.append([])\n for cell in range(self.height):\n if col > 1 and col < self.width - 2:\n if cell == 0:\n # World Barrier - Top Middle\n self.map[col].append(StaticTile('wall_3', self.graphicsLibrary.get('wall_3'), (self.scaleWidth,self.scaleHeight), barrier=True))\n elif cell == self.height - 1:\n # World Barrier - Bottom Middle\n self.map[col].append(StaticTile('wall_12', self.graphicsLibrary.get('wall_12'), (self.scaleWidth,self.scaleHeight), barrier=True))\n else:\n # Playable Map Area\n if (col % 2) != 0 and (cell % 2) == 0:\n # Hard-Barrier Generation\n self.map[col].append(StaticTile('solid', self.graphicsLibrary.get('solid'), (self.scaleWidth,self.scaleHeight), barrier=True))\n elif (col,cell) in self.spawn_buffers:\n # Preserve Potential Spawn Points\n self.map[col].append(StaticTile('terrain', self.graphicsLibrary.get('terrain'), (self.scaleWidth,self.scaleHeight), barrier=False))\n elif random.randint(0, 2) == 0:\n # Soft-Barrier Generation\n self.map[col].append(DynamicTile('destructable_new', self.graphicsLibrary.get('destructable_new'), (self.scaleWidth,self.scaleHeight), destructable=\"True\", barrier=True, death_animation=self.animations_library.get('destructable_death')))\n else:\n # Fill Remaining Terrain\n self.map[col].append(StaticTile('terrain', self.graphicsLibrary.get('terrain'), (self.scaleWidth,self.scaleHeight), barrier=False))\n else:\n # World Barrier - Side Sections\n if col == 0 or col == self.width - 1:\n # Roof\n right_most_columns = False\n if col == self.width - 1:\n right_most_columns = True\n\n if cell == self.height - 1:\n self.map[col].append(StaticTile('wall_10', self.graphicsLibrary.get('wall_10'), (self.scaleWidth,self.scaleHeight), flip_x=right_most_columns, barrier=True))\n elif cell == self.height - 2:\n self.map[col].append(StaticTile('wall_1', self.graphicsLibrary.get('wall_1'), (self.scaleWidth,self.scaleHeight), flip_x=right_most_columns, barrier=True))\n elif cell == 0:\n self.map[col].append(StaticTile('wall_1', self.graphicsLibrary.get('wall_1'), (self.scaleWidth,self.scaleHeight), flip_x=right_most_columns, barrier=True))\n else:\n self.map[col].append(StaticTile('wall_5', self.graphicsLibrary.get('wall_5'), (self.scaleWidth,self.scaleHeight), flip_x=right_most_columns, barrier=True))\n elif col == 1 or col == self.width - 2:\n # Floor \n right_most_columns = False\n if col == self.width - 2:\n right_most_columns = True\n\n if cell == self.height -1:\n self.map[col].append(StaticTile('wall_11', self.graphicsLibrary.get('wall_11'), (self.scaleWidth,self.scaleHeight), flip_x=right_most_columns, barrier=True))\n elif cell == self.height - 2:\n self.map[col].append(StaticTile('wall_9', self.graphicsLibrary.get('wall_9'), (self.scaleWidth,self.scaleHeight), flip_x=right_most_columns, barrier=True))\n elif cell == 0:\n self.map[col].append(StaticTile('wall_2', self.graphicsLibrary.get('wall_2'), (self.scaleWidth,self.scaleHeight), flip_x=right_most_columns, barrier=True))\n elif cell == 1:\n self.map[col].append(StaticTile('wall_6', self.graphicsLibrary.get('wall_6'), (self.scaleWidth,self.scaleHeight), flip_x=right_most_columns, barrier=True))\n else:\n self.map[col].append(StaticTile('wall_7', self.graphicsLibrary.get('wall_7'), (self.scaleWidth,self.scaleHeight), flip_x=right_most_columns, barrier=True))\n self.map[col][cell].place_at(topleft=(self.scaleWidth * col, self.scaleHeight * cell))", "def prepare_map(self):\n for y_coord, row in enumerate(self.contents):\n for x_coord, tile in enumerate(row):\n bit_map = self.get_tile_bitmap(tile)\n self.image[y_coord * TILE_SIZE:(y_coord+1) * TILE_SIZE,\n x_coord * TILE_SIZE:(x_coord+1) * TILE_SIZE] = bit_map", "def create_hard_blocks(self):\n for x in xrange(1, self.map_size[0], 2):\n for y in xrange(1, self.map_size[1], 2):\n self.create_hard_block_at(x, y)", "def populate_blocks_with_blockheights(self):\n for (height, block) in enumerate(self.blocks):\n block[\"height\"] = height", "def height(self, x):\n\t\treturn np.interp(x, self.x, self.z)", "def yank(self):\r\n self.block.bucket_array.yank_cell(self)", "def add_heights():\n fac_min = 50\n fac_max = 40\n\n print 'Get lands and oceans'\n t = time.time()\n lands, oceans = get_lands_oceans()\n print 'lands and oceans getted: ', time.time() - t\n\n # TODO: create one def with params: mount_level and other for create heights\n # add default heights\n for coord in lands:\n self[coord] = self.config.land_mount_level[1]\n\n for coord in oceans:\n self[coord] = -self.config.mid_mount_level[1]\n\n # add low heights for lands\n count_land = int(round(len(lands) * config.factor_low_mount / 100.))\n land_coords = []\n\n starts = random.randint(count_land / fac_min, count_land / fac_max)\n for start in xrange(starts):\n start_coord = lands[random.randint(0, len(lands)-1)]\n land_coords.append(start_coord)\n self[start_coord] = random.randint(self.config.low_mount_level[0], self.config.low_mount_level[1])\n\n while count_land > 0:\n # for lands\n if count_land > 0:\n dx = random.randint(-1,1)\n dy = random.randint(-1,1)\n coord = land_coords[random.randint(0, len(land_coords) - 1)]\n coord = coord[0] + dx, coord[1] + dy\n if coord not in land_coords:\n self[coord] = random.randint(self.config.low_mount_level[0], self.config.low_mount_level[1])\n land_coords.append(coord)\n count_land -= 1\n\n\n target_lands = land_coords\n\n # -------------------------------------------------------------------------------\n # add mid heights for lands\n count_land = int(round(len(target_lands) * (config.factor_mid_mount / 100.)))\n land_coords = []\n\n starts = random.randint(count_land / (fac_min * 3), count_land / (fac_max*3))\n for start in xrange(starts):\n start_coord = target_lands[random.randint(0, len(target_lands)-1)]\n land_coords.append(start_coord)\n self[start_coord] = random.randint(self.config.mid_mount_level[0],\n self.config.mid_mount_level[1])\n\n if land_coords == []:\n return\n\n while count_land > 0:\n # for lands\n if count_land > 0:\n dx = random.randint(-1,1)\n dy = random.randint(-1,1)\n coord = land_coords[random.randint(0, len(land_coords) - 1)]\n coord = coord[0] + dx, coord[1] + dy\n #if coord not in land_coords:\n self[coord] = random.randint(self.config.mid_mount_level[0],\n self.config.mid_mount_level[1])\n land_coords.append(coord)\n count_land -= 1\n\n\n target_lands = land_coords\n\n\n # -------------------------------------------------------------------------------\n # add high heights for lands\n count_land = int(round(len(target_lands) * (config.factor_high_mount / 100.)))\n land_coords = []\n\n starts = random.randint(count_land / (fac_min * 4), count_land / (fac_max * 3))\n for start in xrange(starts):\n start_coord = target_lands[random.randint(0, len(target_lands)-1)]\n land_coords.append(start_coord)\n self[start_coord] = random.randint(self.config.high_mount_level[0],\n self.config.high_mount_level[1])\n\n while count_land > 0:\n # for lands\n if count_land > 0:\n dx = random.randint(-1,1)\n dy = random.randint(-1,1)\n try:\n coord = land_coords[random.randint(0, len(land_coords) - 1)]\n except ValueError:\n coord = lands[random.randint(0, len(lands) - 1)]\n coord = coord[0] + dx, coord[1] + dy\n #if coord not in land_coords:\n self[coord] = random.randint(self.config.high_mount_level[0],\n self.config.high_mount_level[1])\n land_coords.append(coord)\n count_land -= 1", "def get_main_array_bottom(self):\n return self.bitcell_array_inst.by()", "def get_heights_with_blocks(self):\n all_heights = list(self.chain.keys())\n all_heights.sort()\n return all_heights", "def getHeights(self):\n if self.heights: return self.heights\n reader = self.getReader()\n subData = reader.findSubRecord('VHGT','LAND')\n if not subData: return None\n height0 = struct.unpack('f',subData[:4])[0]\n import array\n deltas = array.array('b',subData[4:4+65*65])\n iheights = array.array('i')\n iheights.append(0)\n for index in xrange(1,65*65):\n if index % 65:\n iheights.append(iheights[-1] + deltas[index])\n else:\n iheights.append(iheights[-65] + deltas[index])\n heights = self.heights = array.array('f')\n for index in xrange(65*65):\n heights.append(8*(height0 + iheights[index]))\n return self.heights", "def _createMap(self):\n width = self.map_size[0] * self.chunk_size\n height = self.map_size[1] * self.chunk_size\n map_array = np.zeros((height, width), dtype=float)\n chunks = {}\n clist = []\n for i in range(0, self.map_size[0]*self.map_size[1]):\n chunks[i+1] = Chunk(self)\n chunk_array = np.asarray(list(chunks.keys()))\n chunk_array.resize(self.map_size[0], self.map_size[1])\n return map_array, chunk_array, chunks", "def reindex(self):\n super().reindex()\n self._depths, self._heights = None, None\n for p in self.positions():\n self._compute_depth(p)\n self._compute_height(p)", "def get_all_highest_piece_blocks(self):\n self.reset_state()\n self.update_state()\n\n active_piece_blocks = self.active_piece.get_block_positions()\n result_blocks = {}\n for i in range(self.MAX_X + 1):\n result_blocks[i] = (i, -1)\n for x in range(self.MAX_X):\n for y in range(self.MAX_Y, self.MIN_Y - 1, -1):\n if self.state[y][x] is not None and (x, y) not in active_piece_blocks:\n result_blocks[x] = (x, y)\n break\n return result_blocks", "def complete_mapping(self):\r\n\r\n self._reset_map()\r\n #position_prey = self.prey.position\r\n #self.complete_map[position_prey[1], position_prey[0]] = 1.0\r\n position_body = [part.position for part in self.body]\r\n\r\n for position in position_body:\r\n self.complete_map[position[1], position[0]] = 1\r\n\r\n return self.complete_map", "def update_height(self, height):\n \n import time\n\n self.height = height\n \n gradZx, gradZy = self.delaunay_grad(height)\n self.slope = np.sqrt(gradZx**2+gradZy**2) \n \n # Initialise the downhill/uphill data structures \n\n wall_time = time.clock()\n self._sort_nodes_by_height()\n\n if self.verbose:\n print \" - Sorted all nodes by height (high to low) \", time.clock() - wall_time, \"s\"\n wall_time = time.clock()\n\n wall_time = time.clock()\n self._build_downhill_matrices()\n \n if self.verbose:\n print \" - Built downhill matrices \", time.clock() - wall_time, \"s\"\n wall_time = time.clock()\n\n # Ensure no outdated node chain information is kept\n\n self.node_chain_lookup = None\n self.node_chain_list = None", "def test_degrade_map_recarray(self):\n random.seed(seed=12345)\n\n nside_coverage = 32\n nside_map = 1024\n nside_new = 256\n\n dtype = [('col1', 'f8'), ('col2', 'f8'), ('col3', 'i4')]\n sparse_map = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, dtype, primary='col1')\n pixel = np.arange(20000)\n values = np.zeros_like(pixel, dtype=dtype)\n values['col1'] = random.random(size=pixel.size)\n values['col2'] = random.random(size=pixel.size)\n values['col3'] = random.poisson(size=pixel.size, lam=2)\n sparse_map.update_values_pix(pixel, values)\n\n ra, dec = hpg.pixel_to_angle(nside_map, pixel)\n\n # Make the test values\n hpmap_col1 = np.zeros(hpg.nside_to_npixel(nside_map)) + hpg.UNSEEN\n hpmap_col2 = np.zeros(hpg.nside_to_npixel(nside_map)) + hpg.UNSEEN\n hpmap_col3 = np.zeros(hpg.nside_to_npixel(nside_map)) + hpg.UNSEEN\n hpmap_col1[pixel] = values['col1']\n hpmap_col2[pixel] = values['col2']\n hpmap_col3[pixel] = values['col3']\n\n # Degrade healpix maps\n hpmap_col1 = hp.ud_grade(hpmap_col1, nside_out=nside_new, order_in='NESTED', order_out='NESTED')\n hpmap_col2 = hp.ud_grade(hpmap_col2, nside_out=nside_new, order_in='NESTED', order_out='NESTED')\n hpmap_col3 = hp.ud_grade(hpmap_col3, nside_out=nside_new, order_in='NESTED', order_out='NESTED')\n ipnest_test = hpg.angle_to_pixel(nside_new, ra, dec)\n\n # Degrade the old map\n new_map = sparse_map.degrade(nside_out=nside_new)\n testing.assert_almost_equal(new_map.get_values_pos(ra, dec, lonlat=True)['col1'],\n hpmap_col1[ipnest_test])\n testing.assert_almost_equal(new_map.get_values_pos(ra, dec, lonlat=True)['col2'],\n hpmap_col2[ipnest_test])\n testing.assert_almost_equal(new_map.get_values_pos(ra, dec, lonlat=True)['col3'],\n hpmap_col3[ipnest_test])\n\n # Test degrade-on-read\n self.test_dir = tempfile.mkdtemp(dir='./', prefix='TestHealSparse-')\n\n fname = os.path.join(self.test_dir, 'test_recarray_degrade.hs')\n sparse_map.write(fname)\n\n new_map2 = healsparse.HealSparseMap.read(fname, degrade_nside=nside_new)\n\n testing.assert_almost_equal(new_map2.get_values_pos(ra, dec, lonlat=True)['col1'],\n hpmap_col1[ipnest_test])\n testing.assert_almost_equal(new_map2.get_values_pos(ra, dec, lonlat=True)['col2'],\n hpmap_col2[ipnest_test])\n testing.assert_almost_equal(new_map2.get_values_pos(ra, dec, lonlat=True)['col3'],\n hpmap_col3[ipnest_test])", "def calculate_min_max_tiles(self):", "def reset(self):\n self._cells = [[0 for dummy_col in range(self._grid_width)] for dummy_row in range(self._grid_height)]\n self.new_tile()\n self.new_tile()\n #return self._cells", "def update_pop_matrix(self):\n for row in self.unique_rows[1:-1]: # First and last cell is water\n for col in self.unique_cols[1:-1]: # First and last cell is water\n cell = self.landscape[(row, col)]\n if cell.is_mainland:\n # print(cell)\n self.herb_pop_matrix[row - 1][col - 1] = cell.herb_count\n self.carn_pop_matrix[row - 1][col - 1] = cell.carn_count", "def get_node_heights(self):\n\n # reset lists\n self.extant_h = []\n self.not_extant_h = []\n self.not_yet_sampled_h = []\n\n # the total height of the tree is the maximum distance from root to any tip\n root = self.hosttree.get_tree_root()\n apex = self.hosttree.get_farthest_leaf()[0]\n total_height = self.hosttree.get_distance(root, apex)\n\n # iterate through all nodes in the tree\n self.host_nodes = []\n for node in self.hosttree.traverse():\n # the node's depth is its distance from the root\n depth = self.hosttree.get_distance(root, node)\n # the node's height is the difference between its depth and the total height\n height = total_height-depth\n node.add_feature('height', height) # modify TreeNode in place \n\n # we do not allow zero branch length\n # node.dist = node.dist + (np.finfo(float).eps)\n if (node.dist == 0):\n greaterThan = False\n while greaterThan == False:\n node.dist += float_info.epsilon # add a small amount\n if (node.height+node.dist<=node.height):\n greaterThan = False\n else:\n greaterThan = True\n node.height += node.dist\n\n if node.is_leaf():\n # keep track of leaf nodes\n if node.height == 0:\n self.extant_h.append(node)\n continue # do not append to host_nodes list\n else:\n self.not_yet_sampled_h.append(node)\n\n # store the host node and its height\n self.host_nodes.append((node.height, node))\n\n self.host_nodes.sort() # order by ascending node height", "def de_zigzag(zigzag_blocks):\n blocks = []\n zigzag_arr = []\n last_DC = 0\n for zb in zigzag_blocks:\n for s, v in zb:\n if s == _ZRL:\n zigzag_arr.extend([0] * 15)\n elif s == _EOB:\n zigzag_arr.extend([0] * (64 - len(zigzag_arr)))\n blocks.append(zigzag(zigzag_arr))\n zigzag_arr.clear()\n elif s == _DC:\n last_DC += v\n zigzag_arr.append(last_DC)\n else: # AC\n zigzag_arr.extend([0] * s)\n zigzag_arr.append(v)\n return blocks", "def recalculate_map(self, clear=True):\r\n if clear:\r\n self._clear_map()\r\n changed = True\r\n while changed:\r\n changed = False\r\n for x in range(0, self.width):\r\n for y in range(0, self.height):\r\n if (x,y) in self.walls:\r\n continue\r\n lowest_neighbor = self._get_lowest_neighbor_value(x, y)\r\n if self.tiles[x][y] > lowest_neighbor + 1:\r\n self.tiles[x][y] = lowest_neighbor + 1\r\n changed = True", "def create_map():\n pass\n # for line in range(0, shared.lines):\n # map_data[line][0] = (1, -1)\n # map_data[line][shared.columns - 1] = (1, -1)\n #\n # for column in range(0, shared.columns):\n # map_data[0, column] = (-1, 1)\n # # if column <= shared.left_space or column > shared.columns - shared.left_space:\n # map_data[shared.lines - 1, column] = (-1, 1)", "def _set_z_block_size(self):\n self._scene_gen.block_dimensions = (self._scene_gen.block_dimensions[X],\n self._scene_gen.block_dimensions[Y],\n self._block_size_z_spinbox.value())\n self._refresh_view()", "def __build_map(self):\n columns = []\n\n for i in range(self.__dimensions):\n columns.append([])\n\n for i in range(self.__dimensions):\n self.map.append(columns)" ]
[ "0.58731276", "0.5858455", "0.5805188", "0.5782575", "0.57770145", "0.5629142", "0.55918765", "0.55523336", "0.5498356", "0.5236181", "0.5209923", "0.51482797", "0.50665766", "0.50564885", "0.5053328", "0.50506574", "0.5037577", "0.5036161", "0.50310326", "0.49994954", "0.49697927", "0.4953855", "0.49426544", "0.4924681", "0.49136394", "0.48947927", "0.48903498", "0.48737365", "0.48623723", "0.48526964" ]
0.8081936
0
Regenerate the ambient light map. Each block's individual light comes from two sources. The ambient light comes from the sky. The height map must be valid for this method to produce valid results.
def regenerate_skylight(self): # Create an array of skylights, and a mask of dimming blocks. lights = [0xf] * (16 * 16) mask = [0x0] * (16 * 16) # For each y-level, we're going to update the mask, apply it to the # lights, apply the lights to the section, and then blur the lights # and move downwards. Since empty sections are full of air, and air # doesn't ever dim, ignoring empty sections should be a correct way # to speed things up. Another optimization is that the process ends # early if the entire slice of lights is dark. for section in reversed(self.sections): if not section: continue for y in range(15, -1, -1): # Early-out if there's no more light left. if not any(lights): break # Update the mask. for x, z in XZ: offset = x * 16 + z block = section.get_block((x, y, z)) mask[offset] = blocks[block].dim # Apply the mask to the lights. for i, dim in enumerate(mask): # Keep it positive. lights[i] = max(0, lights[i] - dim) # Apply the lights to the section. for x, z in XZ: offset = x * 16 + z section.set_skylight((x, y, z), lights[offset]) # XXX blur the lights # And continue moving downward.
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def regenerate_heightmap(self):\n\n for x in range(16):\n for z in range(16):\n column = x * 16 + z\n for y in range(255, -1, -1):\n if self.get_block((x, y, z)):\n break\n\n self.heightmap[column] = y", "def regenerate(self):\n\n self.regenerate_heightmap()\n self.regenerate_blocklight()\n self.regenerate_skylight()\n\n self.dirty = True", "def add_heights():\n fac_min = 50\n fac_max = 40\n\n print 'Get lands and oceans'\n t = time.time()\n lands, oceans = get_lands_oceans()\n print 'lands and oceans getted: ', time.time() - t\n\n # TODO: create one def with params: mount_level and other for create heights\n # add default heights\n for coord in lands:\n self[coord] = self.config.land_mount_level[1]\n\n for coord in oceans:\n self[coord] = -self.config.mid_mount_level[1]\n\n # add low heights for lands\n count_land = int(round(len(lands) * config.factor_low_mount / 100.))\n land_coords = []\n\n starts = random.randint(count_land / fac_min, count_land / fac_max)\n for start in xrange(starts):\n start_coord = lands[random.randint(0, len(lands)-1)]\n land_coords.append(start_coord)\n self[start_coord] = random.randint(self.config.low_mount_level[0], self.config.low_mount_level[1])\n\n while count_land > 0:\n # for lands\n if count_land > 0:\n dx = random.randint(-1,1)\n dy = random.randint(-1,1)\n coord = land_coords[random.randint(0, len(land_coords) - 1)]\n coord = coord[0] + dx, coord[1] + dy\n if coord not in land_coords:\n self[coord] = random.randint(self.config.low_mount_level[0], self.config.low_mount_level[1])\n land_coords.append(coord)\n count_land -= 1\n\n\n target_lands = land_coords\n\n # -------------------------------------------------------------------------------\n # add mid heights for lands\n count_land = int(round(len(target_lands) * (config.factor_mid_mount / 100.)))\n land_coords = []\n\n starts = random.randint(count_land / (fac_min * 3), count_land / (fac_max*3))\n for start in xrange(starts):\n start_coord = target_lands[random.randint(0, len(target_lands)-1)]\n land_coords.append(start_coord)\n self[start_coord] = random.randint(self.config.mid_mount_level[0],\n self.config.mid_mount_level[1])\n\n if land_coords == []:\n return\n\n while count_land > 0:\n # for lands\n if count_land > 0:\n dx = random.randint(-1,1)\n dy = random.randint(-1,1)\n coord = land_coords[random.randint(0, len(land_coords) - 1)]\n coord = coord[0] + dx, coord[1] + dy\n #if coord not in land_coords:\n self[coord] = random.randint(self.config.mid_mount_level[0],\n self.config.mid_mount_level[1])\n land_coords.append(coord)\n count_land -= 1\n\n\n target_lands = land_coords\n\n\n # -------------------------------------------------------------------------------\n # add high heights for lands\n count_land = int(round(len(target_lands) * (config.factor_high_mount / 100.)))\n land_coords = []\n\n starts = random.randint(count_land / (fac_min * 4), count_land / (fac_max * 3))\n for start in xrange(starts):\n start_coord = target_lands[random.randint(0, len(target_lands)-1)]\n land_coords.append(start_coord)\n self[start_coord] = random.randint(self.config.high_mount_level[0],\n self.config.high_mount_level[1])\n\n while count_land > 0:\n # for lands\n if count_land > 0:\n dx = random.randint(-1,1)\n dy = random.randint(-1,1)\n try:\n coord = land_coords[random.randint(0, len(land_coords) - 1)]\n except ValueError:\n coord = lands[random.randint(0, len(lands) - 1)]\n coord = coord[0] + dx, coord[1] + dy\n #if coord not in land_coords:\n self[coord] = random.randint(self.config.high_mount_level[0],\n self.config.high_mount_level[1])\n land_coords.append(coord)\n count_land -= 1", "def __init__(self, emap, light):\r\n super(ShadowCaster, self).__init__(\"shadow_caster\")\r\n # load shader for casting shadows and camera\r\n self.cshader = Shader(\"uv_flat\")\r\n self.mshader = Shader(\"mat_flat\")\r\n # keep copy of ElevationMap\r\n self.emap = emap\r\n self.emap.set_material((0.0, 0.0, 0.0)) # hide bits below ground\r\n #TODO doesn't cope with z light positions\r\n self.eye = [-500.0 * i for i in light.lightpos] # good distance away\r\n if self.eye[1] <= 0: # must have +ve y\r\n self.eye[1] = 500.0\r\n if abs(self.eye[0]) > abs(self.eye[2]): #x val is bigger than z val\r\n #change scale so map just fits on screen\r\n if self.eye[0] < 0:\r\n su, sv = 1.0, 1.0\r\n else:\r\n su, sv = -1.0, -1.0\r\n self.scaleu = float(self.iy) / self.emap.width\r\n self.scalev = float(self.ix)/ self.emap.depth\r\n self.eye[2] = 0\r\n self.scaleu = self.scaleu / self.eye[1] * (self.eye[0]**2 + self.eye[1]**2)**0.5\r\n self.emap.unif[50] = 1.0 #orientation flag\r\n self.emap.unif[53] = -3.0 * su / self.emap.width * self.eye[0] / self.eye[1] #height adjustment\r\n else:\r\n #change scale so map just fits on screen\r\n if self.eye[2] < 0:\r\n su, sv = 1.0, -1.0\r\n else:\r\n su, sv = -1.0, 1.0\r\n self.scaleu = float(self.iy) / self.emap.depth\r\n self.scalev = float(self.ix)/ self.emap.width\r\n self.eye[0] = 0\r\n self.scaleu = self.scaleu / self.eye[1] * (self.eye[2]**2 + self.eye[1]**2)**0.5\r\n self.emap.unif[50] = 0.0\r\n self.emap.unif[53] = -3.0 * su / self.emap.width * self.eye[2] / self.eye[1]\r\n if abs(self.scaleu) > abs(self.scalev):\r\n self.scale = 3.0 * self.scalev # multiplication factor to reduce pixeliness\r\n else:\r\n self.scale = 3.0 * self.scaleu\r\n self.scaleu = su * self.scale / self.scaleu # reused later in end_cast\r\n self.scalev = sv * self.scale / self.scalev\r\n self.camera0 = Camera() # default instance created as normal, just in case!\r\n self.camera = Camera(is_3d=False, eye=self.eye, scale=self.scale)\r\n # load shader for drawing map with shadows\r\n self.dshader = Shader(\"shadowcast\")", "def generate_lut(self):\n colormap = self.get_colormap()\n\n if self.block_type == \"equidistant\":\n ev_colormap = colors.colormap_to_ev_blocks_equidistant(colormap, self.exposure_values)\n if self.test:\n self.print_colormap(self.name, ev_colormap)\n return self.generate_spi3d_from_evs(ev_colormap)\n elif self.block_type == \"centered\":\n ev_colormap = colors.colormap_to_ev_blocks_centered(colormap, self.exposure_values)\n if self.test:\n self.print_colormap(self.name, ev_colormap)\n return self.generate_spi3d_from_evs(ev_colormap)\n elif self.block_type == \"stretched\":\n ev_colormap = colors.colormap_to_ev_blocks_stretched(colormap, self.exposure_values)\n if self.test:\n self.print_colormap(self.name, ev_colormap)\n return self.generate_spi3d_from_evs(ev_colormap)", "def __init__(self, mapfile, camera=None, light=None,\r\n width=100.0, depth=100.0, height=10.0,\r\n divx=0, divy=0, ntiles=1.0, name=\"\",\r\n x=0.0, y=0.0, z=0.0, rx=0.0, ry=0.0, rz=0.0,\r\n sx=1.0, sy=1.0, sz=1.0, cx=0.0, cy=0.0, cz=0.0, smooth=True, cubic=False):\r\n super(ElevationMap, self).__init__(camera, light, name, x, y, z, rx, ry, rz,\r\n sx, sy, sz, cx, cy, cz)\r\n if divx > 200 or divy > 200:\r\n print(\"... Map size can't be bigger than 200x200 divisions\")\r\n divx = 200\r\n divy = 200\r\n if issubclass(type(mapfile), type(\"\")): #HORRIBLE. Only way to cope with python2v3\r\n if mapfile[0] != '/':\r\n mapfile = sys.path[0] + '/' + mapfile\r\n if VERBOSE:\r\n print(\"Loading height map ...\", mapfile)\r\n\r\n im = Image.open(mapfile)\r\n im = ImageOps.invert(im)\r\n else:\r\n im = mapfile #allow image files to be passed as mapfile\r\n ix, iy = im.size\r\n if (ix > 200 and divx == 0) or (divx > 0):\r\n if divx == 0:\r\n divx = 200\r\n divy = 200\r\n im = im.resize((divx, divy), Image.ANTIALIAS)\r\n ix, iy = im.size\r\n if not im.mode == \"P\":\r\n im = im.convert('P', palette=Image.ADAPTIVE)\r\n\r\n im = im.transpose(Image.FLIP_TOP_BOTTOM)\r\n im = im.transpose(Image.FLIP_LEFT_RIGHT)\r\n self.pixels = im.load()\r\n self.width = width\r\n self.depth = depth\r\n self.height = height\r\n self.ix = ix\r\n self.iy = iy\r\n self.ttype = GL_TRIANGLE_STRIP\r\n\r\n if VERBOSE:\r\n print(\"Creating Elevation Map ...\", ix, iy)\r\n\r\n wh = width * 0.5\r\n hh = depth * 0.5\r\n ws = width / ix\r\n hs = depth / iy\r\n ht = height / 255.0\r\n tx = 1.0*ntiles / ix\r\n ty = 1.0*ntiles / iy\r\n\r\n verts = []\r\n norms = []\r\n tex_coords = []\r\n idx = []\r\n\r\n for y in xrange(0, iy):\r\n for x in xrange(0, ix):\r\n hgt = (self.pixels[x, y])*ht\r\n this_x = -wh + x*ws\r\n this_z = -hh + y*hs\r\n if cubic:\r\n \"\"\" this is a bit experimental. It tries to make the map either zero\r\n or height high. Vertices are moved 'under' adjacent ones if there is\r\n a step to make vertical walls. Goes wrong in places - mainly because\r\n it doesn't check diagonals\r\n \"\"\"\r\n if hgt > height / 2:\r\n hgt = height\r\n else:\r\n hgt = 0.0\r\n if hgt == 0 and y > 0 and y < iy-1 and x > 0 and x < ix-1:\r\n if self.pixels[x-1, y] > 127:\r\n this_x = -wh + (x-1)*ws\r\n elif self.pixels[x+1, y] > 127:\r\n this_x = -wh + (x+1)*ws\r\n elif self.pixels[x, y-1] > 127:\r\n this_z = -hh + (y-1)*hs\r\n elif self.pixels[x, y+1] > 127:\r\n this_z = -hh + (y+1)*hs\r\n elif self.pixels[x-1, y-1] > 127:\r\n this_x = -wh + (x-1)*ws\r\n this_z = -hh + (y-1)*hs\r\n elif self.pixels[x-1, y+1] > 127:\r\n this_x = -wh + (x-1)*ws\r\n this_z = -hh + (y+1)*hs\r\n elif self.pixels[x+1, y-1] > 127:\r\n this_x = -wh + (x+1)*ws\r\n this_z = -hh + (y-1)*hs\r\n elif self.pixels[x+1, y+1] > 127:\r\n this_x = -wh + (x+1)*ws\r\n this_z = -hh + (y+1)*hs\r\n verts.append((this_x, hgt, this_z))\r\n tex_coords.append(((ix-x) * tx,(iy-y) * ty))\r\n\r\n s = 0\r\n #create one long triangle_strip by alternating X directions\r\n for y in range(0, iy-1):\r\n for x in range(0, ix-1):\r\n i = (y * ix)+x\r\n idx.append((i, i+ix, i+ix+1))\r\n idx.append((i+ix+1, i+1, i))\r\n s += 2\r\n\r\n self.buf = []\r\n self.buf.append(Buffer(self, verts, tex_coords, idx, None, smooth))", "def reset(self) -> None:\n self.map = []\n for col in range(self.width):\n self.map.append([])\n for cell in range(self.height):\n if col > 1 and col < self.width - 2:\n if cell == 0:\n # World Barrier - Top Middle\n self.map[col].append(StaticTile('wall_3', self.graphicsLibrary.get('wall_3'), (self.scaleWidth,self.scaleHeight), barrier=True))\n elif cell == self.height - 1:\n # World Barrier - Bottom Middle\n self.map[col].append(StaticTile('wall_12', self.graphicsLibrary.get('wall_12'), (self.scaleWidth,self.scaleHeight), barrier=True))\n else:\n # Playable Map Area\n if (col % 2) != 0 and (cell % 2) == 0:\n # Hard-Barrier Generation\n self.map[col].append(StaticTile('solid', self.graphicsLibrary.get('solid'), (self.scaleWidth,self.scaleHeight), barrier=True))\n elif (col,cell) in self.spawn_buffers:\n # Preserve Potential Spawn Points\n self.map[col].append(StaticTile('terrain', self.graphicsLibrary.get('terrain'), (self.scaleWidth,self.scaleHeight), barrier=False))\n elif random.randint(0, 2) == 0:\n # Soft-Barrier Generation\n self.map[col].append(DynamicTile('destructable_new', self.graphicsLibrary.get('destructable_new'), (self.scaleWidth,self.scaleHeight), destructable=\"True\", barrier=True, death_animation=self.animations_library.get('destructable_death')))\n else:\n # Fill Remaining Terrain\n self.map[col].append(StaticTile('terrain', self.graphicsLibrary.get('terrain'), (self.scaleWidth,self.scaleHeight), barrier=False))\n else:\n # World Barrier - Side Sections\n if col == 0 or col == self.width - 1:\n # Roof\n right_most_columns = False\n if col == self.width - 1:\n right_most_columns = True\n\n if cell == self.height - 1:\n self.map[col].append(StaticTile('wall_10', self.graphicsLibrary.get('wall_10'), (self.scaleWidth,self.scaleHeight), flip_x=right_most_columns, barrier=True))\n elif cell == self.height - 2:\n self.map[col].append(StaticTile('wall_1', self.graphicsLibrary.get('wall_1'), (self.scaleWidth,self.scaleHeight), flip_x=right_most_columns, barrier=True))\n elif cell == 0:\n self.map[col].append(StaticTile('wall_1', self.graphicsLibrary.get('wall_1'), (self.scaleWidth,self.scaleHeight), flip_x=right_most_columns, barrier=True))\n else:\n self.map[col].append(StaticTile('wall_5', self.graphicsLibrary.get('wall_5'), (self.scaleWidth,self.scaleHeight), flip_x=right_most_columns, barrier=True))\n elif col == 1 or col == self.width - 2:\n # Floor \n right_most_columns = False\n if col == self.width - 2:\n right_most_columns = True\n\n if cell == self.height -1:\n self.map[col].append(StaticTile('wall_11', self.graphicsLibrary.get('wall_11'), (self.scaleWidth,self.scaleHeight), flip_x=right_most_columns, barrier=True))\n elif cell == self.height - 2:\n self.map[col].append(StaticTile('wall_9', self.graphicsLibrary.get('wall_9'), (self.scaleWidth,self.scaleHeight), flip_x=right_most_columns, barrier=True))\n elif cell == 0:\n self.map[col].append(StaticTile('wall_2', self.graphicsLibrary.get('wall_2'), (self.scaleWidth,self.scaleHeight), flip_x=right_most_columns, barrier=True))\n elif cell == 1:\n self.map[col].append(StaticTile('wall_6', self.graphicsLibrary.get('wall_6'), (self.scaleWidth,self.scaleHeight), flip_x=right_most_columns, barrier=True))\n else:\n self.map[col].append(StaticTile('wall_7', self.graphicsLibrary.get('wall_7'), (self.scaleWidth,self.scaleHeight), flip_x=right_most_columns, barrier=True))\n self.map[col][cell].place_at(topleft=(self.scaleWidth * col, self.scaleHeight * cell))", "def _reset_map(self):\n if not self.scenario_name.startswith('random'):\n self._map = self._fixed_original_map.copy()\n else:\n from environment.scenarios import generate_random_map\n self._map = generate_random_map(self.scenario_name)\n\n # Precompute wall channel and positions since they are static\n self._walls_channel = (self._map == WALL).astype(int)\n xs, ys = np.where(self._walls_channel)\n self._wall_positions = list(zip(xs, ys))\n\n # Set avatar position bidirectional caches (first thieves then guardians)\n xs_t, ys_t = np.where(self._map == THIEF)\n xs_g, ys_g = np.where(self._map == GUARDIAN)\n xs = np.concatenate([xs_t, xs_g])\n ys = np.concatenate([ys_t, ys_g])\n for avatar_id, (x, y) in enumerate(zip(xs, ys)):\n self._id2pos[avatar_id] = x, y\n self._pos2id[(x, y)] = avatar_id\n\n self._chased_treasure_pos = _coords_where(self._map == TREASURE)\n self._chased_thief_id = 0", "def mapAdd(block, posMap):\n for (x, y) in block.coords:\n theFallener(x + block.x, y + block.y, block.color, posMap)", "def __init__(self, mapfile, camera=None, light=None,\n width=100.0, depth=100.0, height=10.0,\n divx=0, divy=0, ntiles=1.0, name=\"\",\n x=0.0, y=0.0, z=0.0, rx=0.0, ry=0.0, rz=0.0,\n sx=1.0, sy=1.0, sz=1.0, cx=0.0, cy=0.0, cz=0.0, smooth=True, cubic=False):\n super(ElevationMap, self).__init__(camera, light, name, x, y, z, rx, ry, rz,\n sx, sy, sz, cx, cy, cz)\n if mapfile[0] != '/':\n mapfile = sys.path[0] + '/' + mapfile\n if VERBOSE:\n print(\"Loading height map ...\", mapfile)\n\n if divx > 200 or divy > 200:\n print(\"... Map size can't be bigger than 200x200 divisions\")\n divx = 200\n divy = 200\n\n im = Image.open(mapfile)\n im = ImageOps.invert(im)\n ix, iy = im.size\n if (ix > 200 and divx == 0) or (divx > 0):\n if divx == 0:\n divx = 200\n divy = 200\n im = im.resize((divx, divy), Image.ANTIALIAS)\n ix, iy = im.size\n if not im.mode == \"P\":\n im = im.convert('P', palette=Image.ADAPTIVE)\n\n im = im.transpose(Image.FLIP_TOP_BOTTOM)\n im = im.transpose(Image.FLIP_LEFT_RIGHT)\n self.pixels = im.load()\n self.width = width\n self.depth = depth\n self.height = height\n self.ix = ix\n self.iy = iy\n self.ttype = GL_TRIANGLE_STRIP\n\n if VERBOSE:\n print(\"Creating Elevation Map ...\", ix, iy)\n\n wh = width * 0.5\n hh = depth * 0.5\n ws = width / ix\n hs = depth / iy\n ht = height / 255.0\n tx = 1.0*ntiles / ix\n ty = 1.0*ntiles / iy\n\n verts = []\n norms = []\n tex_coords = []\n idx = []\n\n for y in xrange(0, iy):\n for x in xrange(0, ix):\n hgt = (self.pixels[x, y])*ht\n this_x = -wh + x*ws\n this_z = -hh + y*hs\n if cubic:\n \"\"\" this is a bit experimental. It tries to make the map either zero\n or height high. Vertices are moved 'under' adjacent ones if there is\n a step to make vertical walls. Goes wrong in places - mainly because\n it doesn't check diagonals\n \"\"\"\n if hgt > height / 2:\n hgt = height\n else:\n hgt = 0.0\n if hgt == 0 and y > 0 and y < iy-1 and x > 0 and x < ix-1:\n if self.pixels[x-1, y] > 127:\n this_x = -wh + (x-1)*ws\n elif self.pixels[x+1, y] > 127:\n this_x = -wh + (x+1)*ws\n elif self.pixels[x, y-1] > 127:\n this_z = -hh + (y-1)*hs\n elif self.pixels[x, y+1] > 127:\n this_z = -hh + (y+1)*hs\n elif self.pixels[x-1, y-1] > 127:\n this_x = -wh + (x-1)*ws\n this_z = -hh + (y-1)*hs\n elif self.pixels[x-1, y+1] > 127:\n this_x = -wh + (x-1)*ws\n this_z = -hh + (y+1)*hs\n elif self.pixels[x+1, y-1] > 127:\n this_x = -wh + (x+1)*ws\n this_z = -hh + (y-1)*hs\n elif self.pixels[x+1, y+1] > 127:\n this_x = -wh + (x+1)*ws\n this_z = -hh + (y+1)*hs\n verts.append((this_x, hgt, this_z))\n tex_coords.append(((ix-x) * tx,(iy-y) * ty))\n\n s = 0\n #create one long triangle_strip by alternating X directions\n for y in range(0, iy-1):\n for x in range(0, ix-1):\n i = (y * ix)+x\n idx.append((i, i+ix, i+ix+1))\n idx.append((i+ix+1, i+1, i))\n s += 2\n\n self.buf = []\n self.buf.append(Buffer(self, verts, tex_coords, idx, None, smooth))", "def end_cast(self):\r\n #draw the actual map\r\n self.emap.draw(shader=self.mshader, camera=self.camera)\r\n super(ShadowCaster, self)._end()\r\n # set third texture to this ShadowCaster texture\r\n texs = self.emap.buf[0].textures\r\n if len(texs) == 2:\r\n texs.append(self)\r\n else:\r\n texs[2] = self\r\n # change background back to blue\r\n opengles.glClearColor(ctypes.c_float(0.4), ctypes.c_float(0.8), \r\n ctypes.c_float(0.8), ctypes.c_float(1.0))\r\n # work out left, top, right, bottom for shader\r\n self.emap.unif[48] = 0.5 * (1.0 + self.scaleu) # left [16][0]\r\n self.emap.unif[49] = 0.5 * (1.0 + self.scalev) # top [16][1]\r\n self.emap.unif[51] = 1.0 - self.emap.unif[48] # right [17][0]\r\n self.emap.unif[52] = 1.0 - self.emap.unif[49] # bottom [17][1]\r\n \r\n du = float(self.location[0] / self.emap.width)\r\n dv = float(self.location[2] / self.emap.depth)\r\n self.emap.unif[48] -= self.scaleu * (du if self.emap.unif[50] == 1.0 else dv)\r\n self.emap.unif[49] += self.scalev * (dv if self.emap.unif[50] == 1.0 else du)\r\n self.emap.unif[51] -= self.scaleu * (du if self.emap.unif[50] == 1.0 else dv)\r\n self.emap.unif[52] += self.scalev * (dv if self.emap.unif[50] == 1.0 else du)", "def init_map(self, size, random_data = False):\n heightmap = []\n\n if random_data:\n # random noise background\n for x in range(size):\n heightmap.append([])\n for y in range(size):\n heightmap[-1].append(random.random())\n else:\n # black background\n for x in range(size):\n heightmap.append([])\n for y in range(size):\n heightmap[-1].append(0.0)\n\n return heightmap", "def generate_pre_heights(self):\n\n config = self.config\n\n def get_lands_oceans():\n oceans, lands = [], []\n for x in xrange(self.size):\n for y in xrange(self.size):\n coord = x, y\n if self[coord] <= 0:\n oceans.append(coord)\n else:\n lands.append(coord)\n return lands, oceans\n\n def add_heights():\n \"\"\"Add pre heights for diamond-square\n \"\"\"\n fac_min = 50\n fac_max = 40\n\n print 'Get lands and oceans'\n t = time.time()\n lands, oceans = get_lands_oceans()\n print 'lands and oceans getted: ', time.time() - t\n\n # TODO: create one def with params: mount_level and other for create heights\n # add default heights\n for coord in lands:\n self[coord] = self.config.land_mount_level[1]\n\n for coord in oceans:\n self[coord] = -self.config.mid_mount_level[1]\n\n # add low heights for lands\n count_land = int(round(len(lands) * config.factor_low_mount / 100.))\n land_coords = []\n\n starts = random.randint(count_land / fac_min, count_land / fac_max)\n for start in xrange(starts):\n start_coord = lands[random.randint(0, len(lands)-1)]\n land_coords.append(start_coord)\n self[start_coord] = random.randint(self.config.low_mount_level[0], self.config.low_mount_level[1])\n\n while count_land > 0:\n # for lands\n if count_land > 0:\n dx = random.randint(-1,1)\n dy = random.randint(-1,1)\n coord = land_coords[random.randint(0, len(land_coords) - 1)]\n coord = coord[0] + dx, coord[1] + dy\n if coord not in land_coords:\n self[coord] = random.randint(self.config.low_mount_level[0], self.config.low_mount_level[1])\n land_coords.append(coord)\n count_land -= 1\n\n\n target_lands = land_coords\n\n # -------------------------------------------------------------------------------\n # add mid heights for lands\n count_land = int(round(len(target_lands) * (config.factor_mid_mount / 100.)))\n land_coords = []\n\n starts = random.randint(count_land / (fac_min * 3), count_land / (fac_max*3))\n for start in xrange(starts):\n start_coord = target_lands[random.randint(0, len(target_lands)-1)]\n land_coords.append(start_coord)\n self[start_coord] = random.randint(self.config.mid_mount_level[0],\n self.config.mid_mount_level[1])\n\n if land_coords == []:\n return\n\n while count_land > 0:\n # for lands\n if count_land > 0:\n dx = random.randint(-1,1)\n dy = random.randint(-1,1)\n coord = land_coords[random.randint(0, len(land_coords) - 1)]\n coord = coord[0] + dx, coord[1] + dy\n #if coord not in land_coords:\n self[coord] = random.randint(self.config.mid_mount_level[0],\n self.config.mid_mount_level[1])\n land_coords.append(coord)\n count_land -= 1\n\n\n target_lands = land_coords\n\n\n # -------------------------------------------------------------------------------\n # add high heights for lands\n count_land = int(round(len(target_lands) * (config.factor_high_mount / 100.)))\n land_coords = []\n\n starts = random.randint(count_land / (fac_min * 4), count_land / (fac_max * 3))\n for start in xrange(starts):\n start_coord = target_lands[random.randint(0, len(target_lands)-1)]\n land_coords.append(start_coord)\n self[start_coord] = random.randint(self.config.high_mount_level[0],\n self.config.high_mount_level[1])\n\n while count_land > 0:\n # for lands\n if count_land > 0:\n dx = random.randint(-1,1)\n dy = random.randint(-1,1)\n try:\n coord = land_coords[random.randint(0, len(land_coords) - 1)]\n except ValueError:\n coord = lands[random.randint(0, len(lands) - 1)]\n coord = coord[0] + dx, coord[1] + dy\n #if coord not in land_coords:\n self[coord] = random.randint(self.config.high_mount_level[0],\n self.config.high_mount_level[1])\n land_coords.append(coord)\n count_land -= 1\n\n\n\n\n def square_diamond(sx, sy, size, strong):\n \"\"\"Algorithm Square-diamond generate terrain heights\n\n -> http://www.lighthouse3d.com/opengl/terrain/index.php?mpd2\n \"\"\"\n if size == 1:\n return\n\n dsize = size/2\n ex = sx+size-1\n ey = sy+size-1\n # lets get math style\n\n\n # SQUARE STEP\n\n A = sx, sy\n B = ex, sy\n C = sx, ey\n D = ex, ey\n E = sx+dsize, sy+dsize\n F = sx, sy + dsize\n G = sx + dsize, sy\n H = ex, sy + dsize\n I = sx + dsize, ey\n\n def RAND(X):\n return random.randint(-strong, strong)\n\n ### for coasts dont disappear\n\n def normalize(add_z, X):\n if self[X] <= 0:\n if add_z > 0:\n add_z = -5\n else:\n if add_z <= 0:\n add_z = 5\n return add_z\n\n # Generate heights\n # E = (A+B+C+D) / 4 + RAND(d)\n # F = (A + C + E + E) / 4 + RAND(d)\n # G = (A + B + E + E) / 4 + RAND(d)\n # H = (B + D + E + E) / 4 + RAND(d)\n # I = (C + D + E + E) / 4 + RANS(d)\n\n ### E\n\n try:\n\n add_z = ((self[A] + self[B] + self[C] + self[D]) / 4) + RAND(E)\n\n except KeyError, e:\n print A, B, C, D, size, dsize, len(self)\n raise e\n\n\n self[E] = normalize(add_z, E)\n\n ### F\n\n add_z = (self[A] + self[C] + self[E] + self[E]) / 4 + RAND(F)\n\n self[F] = normalize(add_z, F)\n\n ### G\n\n add_z = (self[A] + self[B] + self[E] + self[E]) / 4 + RAND(G)\n\n self[G] = normalize(add_z, G)\n\n ### H\n\n add_z = (self[B] + self[D] + self[E] + self[E]) / 4 + RAND(H)\n\n self[H] = normalize(add_z, H)\n\n ### I\n add_z = (self[C] + self[D] + self[E] + self[E]) / 4 + RAND(I)\n\n self[I] = normalize(add_z, I)\n\n\n # DIAMOND STEP\n\n # get coordinates\n # 0 - x, 1 - y\n\n x, y = 0, 1\n\n dx = (G[x] - A[x]) / 2\n dy = (F[y] - A[y]) / 2\n\n J = A[x] + dx, A[y] + dy\n K = G[x] + dx, G[y] + dy\n L = F[x] + dx, F[y] + dy\n M = E[x] + dx, E[y] + dy\n\n N = A[x], A[y] + dy\n O = A[x] + dx, A[y]\n P = G[x], G[y] + dy\n Q = A[x] + dx, F[y]\n\n # Generate Heights\n # J = (A + G + F + E)/4 + RAND(d)\n # K = (G + B + E + H)/4 + RAND(d)\n # L = (F + E + C + I)/4 + RAND(d)\n # M = (E + H + I + D)/4 + RAND(d)\n\n # J\n add_z = ((self[A] + self[G] + self[F] + self[E]) / 4) + RAND(J)\n self[J] = normalize(add_z, J)\n\n # K\n add_z = ((self[G] + self[B] + self[E] + self[H]) / 4) + RAND(K)\n self[K] = normalize(add_z, K)\n\n # L\n add_z = ((self[F] + self[E] + self[C] + self[I]) / 4) + RAND(L)\n self[L] = normalize(add_z, L)\n\n # M\n add_z = ((self[E] + self[H] + self[I] + self[D]) / 4) + RAND(M)\n self[M] = normalize(add_z, M)\n\n # N = (K + A + J + F)/4 + RAND(d)\n # O = (L + A + G + J)/4 + RAND(d)\n # P = (J + G + K + E)/4 + RAND(d)\n # Q = (F + J + E + L)/4 + RAND(d)\n\n # N\n add_z = ((self[K] + self[A] + self[J] + self[F]) / 4) + RAND(N)\n self[N] = normalize(add_z, N)\n\n # O\n add_z = ((self[L] + self[A] + self[G] + self[J]) / 4) + RAND(O)\n self[O] = normalize(add_z, O)\n\n # P\n add_z = ((self[J] + self[G] + self[K] + self[E]) / 4) + RAND(P)\n self[P] = normalize(add_z, P)\n\n # Q\n add_z = ((self[F] + self[J] + self[E] + self[L]) / 4) + RAND(Q)\n self[Q] = normalize(add_z, Q)\n\n # N = (A + J + F)/3 + RAND(d)\n # O = (A + G + J)/3 + RAND(d)\n\n # N\n add_z = ((self[A] + self[J] + self[F]) / 3) + RAND(N)\n self[N] = normalize(add_z, N)\n\n # O\n add_z = ((self[A] + self[G] + self[J]) / 3) + RAND(N)\n self[O] = normalize(add_z, O)\n\n\n ### Start recurse for diamond alg\n square_diamond(A[0], A[1], dsize, strong)\n square_diamond(G[0], G[1], dsize, strong)\n square_diamond(F[0], F[1], dsize, strong)\n square_diamond(E[0], E[1], dsize, strong)\n\n # align\n def align_it(start, strong):\n \"\"\"Deprecated\n \"\"\"\n water = 0\n #map3d = self.copy()\n size = (abs(start)*2) + self.size - strong\n start = start + strong\n coords_map = []\n for x in xrange(start,size):\n for y in xrange(start,size):\n coords_map.append( (x, y) )\n\n random.shuffle(coords_map)\n\n lens = strong * (3.0 ** 2)\n for coord in coords_map:\n average = 0.0\n x, y = coord\n #rounds = self.get_round_xy_land(coord, -strong, False)\n #for r_coord in rounds:\n #average += self[r_coord]\n for x in xrange(-strong, strong+1):\n for y in xrange(-strong, strong+1):\n average += self[x, y]\n\n height = int(round(average / lens))\n #height = int(round(average / float(len(rounds))))\n if self[coord] <= water and height > water:\n height = water\n elif self[coord] > water and height <= water:\n height = water + 1\n\n #print self[coord], '->', height\n\n self[coord] = height\n\n if self.config.add_pre_heights:\n print 'Add heights start'\n add_heights()\n print 'Diamond-Square start'\n for x in xrange(1):\n square_diamond(\n sx = 0,\n sy = 0,\n size = self.size, strong=100)", "def generate_materials_dict(self):\n c = 299792458.0\n w_mat = 2 * np.pi * c / self.l_mat - self.w0\n l2_mat = (self.l_mat * 1e6) ** 2\n\n n_air = 1 + 0.05792105 * l2_mat / (238.0185 * l2_mat - 1) + 0.00167917 * l2_mat / (57.362 * l2_mat - 1)\n air_ip = interp1d(w_mat, n_air, bounds_error=False, fill_value=np.nan, kind=\"quadratic\")\n self.materials['air'] = air_ip\n\n n_fs = np.sqrt(1 + 0.6961663 * l2_mat / (l2_mat - 0.0684043 ** 2) +\n 0.4079426 * l2_mat / (l2_mat - 0.1162414 ** 2) +\n 0.8974794 * l2_mat / (l2_mat - 9.896161 ** 2))\n fs_ip = interp1d(w_mat, n_fs, bounds_error=False, fill_value=np.nan, kind=\"quadratic\")\n self.materials['fs'] = fs_ip\n\n n_mgf2 = np.sqrt(1 + 0.48755108 * l2_mat / (l2_mat - 0.04338408 ** 2) +\n 0.39875031 * l2_mat / (l2_mat - 0.09461442 ** 2) +\n 2.3120353 * l2_mat / (l2_mat - 23.793604 ** 2))\n mgf2_ip = interp1d(w_mat, n_mgf2, bounds_error=False, fill_value=np.nan, kind=\"quadratic\")\n self.materials['mgf2'] = mgf2_ip\n\n n_sapphire_o = np.sqrt(1 + 1.4313493 * l2_mat / (l2_mat - 0.0726631 ** 2) +\n 0.65054713 * l2_mat / (l2_mat - 0.1193242 ** 2) +\n 5.3414021 * l2_mat / (l2_mat - 18.028251 ** 2))\n sapphire_o_ip = interp1d(w_mat, n_sapphire_o, bounds_error=False, fill_value=np.nan, kind=\"quadratic\")\n self.materials['sapphire_o'] = sapphire_o_ip\n\n n_sapphire_e = np.sqrt(1 + 1.5039759 * l2_mat / (l2_mat - 0.0740288 ** 2) +\n 0.55069141 * l2_mat / (l2_mat - 0.1216529 ** 2) +\n 6.5927379 * l2_mat / (l2_mat - 20.072248 ** 2))\n sapphire_e_ip = interp1d(w_mat, n_sapphire_e, bounds_error=False, fill_value=np.nan, kind=\"quadratic\")\n self.materials['sapphire_e'] = sapphire_e_ip\n\n n_bbo_o = np.sqrt(2.7405 + 0.0184 / (l2_mat - 0.0179) - 0.0155 * l2_mat)\n bbo_o_ip = interp1d(w_mat, n_bbo_o, bounds_error=False, fill_value=np.nan, kind=\"quadratic\")\n self.materials['bbo_o'] = bbo_o_ip\n\n n_bbo_e = np.sqrt(2.3730 + 0.0128 / (l2_mat - 0.0156) - 0.0044 * l2_mat)\n bbo_e_ip = interp1d(w_mat, n_bbo_e, bounds_error=False, fill_value=np.nan, kind=\"quadratic\")\n self.materials['bbo_e'] = bbo_e_ip\n\n materials_files = os.listdir(self.materials_path)\n logger.info(\"Found {0:d}\".format(materials_files.__len__()))\n for mat_file in materials_files:\n logger.debug(mat_file)\n self.read_material(''.join((self.materials_path, '/', mat_file)))", "def setup_terrain(self):\r\n self.terrain_scale = LVector3(512, 512, 100)\r\n self.terrain_pos = LVector3(-256, -256, -70)\r\n # sample values for a 4096 x 4096px heightmap.\r\n #self.terrain_scale = LVector3(4096, 4096, 1000)\r\n #self.terrain_pos = LVector3(-2048, -2048, -70)\r\n \"\"\"\r\n Diamond_subdivision is an alternating triangulation scheme and may\r\n produce better results.\r\n \"\"\"\r\n use_diamond_subdivision = True\r\n \r\n \"\"\"\r\n Construct the terrain\r\n Without scaling, any ShaderTerrainMesh is 1x1x1 units.\r\n \"\"\"\r\n self.terrain_node = ShaderTerrainMesh()\r\n \"\"\"\r\n Set a heightfield, the heightfield should be a 16-bit png and\r\n have a quadratic size of a power of two.\r\n \"\"\"\r\n heightfield = Texture()\r\n heightfield.read(self.heightfield_fn)\r\n heightfield.set_keep_ram_image(True) \r\n self.terrain_node.heightfield = heightfield\r\n \r\n # Display characteristic values of the heightfield texture\r\n #minpoint, maxpoint, avg = LPoint3(), LPoint3(), LPoint3()\r\n #heightfield.calc_min_max(minpoint, maxpoint)\r\n #heightfield.calc_average_point(avg, 0.5, 0.5, 0.5)\r\n #print(\"avg: {} min: {} max: {}\".format(avg.x, minpoint.x, maxpoint.x))\r\n\r\n \"\"\"\r\n Set the target triangle width. For a value of 10.0 for example,\r\n the ShaderTerrainMesh will attempt to make every triangle 10 pixels\r\n wide on screen.\r\n \"\"\"\r\n self.terrain_node.target_triangle_width = 10.0\r\n if use_diamond_subdivision:\r\n \"\"\"\r\n This has to be specified before calling .generate()\r\n The default is false.\r\n \"\"\"\r\n load_prc_file_data(\"\", \"stm-use-hexagonal-layout true\")\r\n \r\n self.terrain_node.generate()\r\n \"\"\"\r\n Attach the terrain to the main scene and set its scale. With no scale\r\n set, the terrain ranges from (0, 0, 0) to (1, 1, 1)\r\n \"\"\"\r\n self.terrain = self.render.attach_new_node(self.terrain_node)\r\n self.terrain.set_scale(self.terrain_scale)\r\n self.terrain.set_pos(self.terrain_pos)\r\n \"\"\"\r\n Set a vertex and a fragment shader on the terrain. The\r\n ShaderTerrainMesh only works with an applied shader.\r\n \"\"\"\r\n terrain_shader = Shader.load(Shader.SL_GLSL, \r\n \"samples/shader-terrain/terrain.vert.glsl\", \r\n \"samples/shader-terrain/terrain.frag.glsl\")\r\n self.terrain.set_shader(terrain_shader)\r\n self.terrain.set_shader_input(\"camera\", base.camera)\r\n # Set some texture on the terrain\r\n grass_tex = self.loader.load_texture(\r\n \"samples/shader-terrain/textures/grass.png\")\r\n grass_tex.set_minfilter(SamplerState.FT_linear_mipmap_linear)\r\n grass_tex.set_anisotropic_degree(16)\r\n self.terrain.set_texture(grass_tex)\r\n\r\n \"\"\"\r\n Set up the DynamicHeightfield (it's a type of PfmFile). We load the\r\n same heightfield image as with ShaderTerrainMesh.\r\n \"\"\"\r\n self.DHF = DynamicHeightfield()\r\n self.DHF.read(self.heightfield_fn)\r\n \"\"\"\r\n Set up empty PfmFiles to prepare stuff in that is going to\r\n dynamically modify our terrain.\r\n \"\"\"\r\n self.StagingPFM = PfmFile()\r\n self.RotorPFM = PfmFile()\r\n \r\n \"\"\"\r\n Set up the BulletHeightfieldShape (=collision terrain) and give it\r\n some sensible physical properties.\r\n \"\"\"\r\n self.HFS = BulletHeightfieldShape(self.DHF, self.terrain_scale.z,\r\n STM=True)\r\n if use_diamond_subdivision:\r\n self.HFS.set_use_diamond_subdivision(True)\r\n HFS_rigidbody = BulletRigidBodyNode(\"BulletTerrain\")\r\n HFS_rigidbody.set_static(True)\r\n friction = 2.0\r\n HFS_rigidbody.set_anisotropic_friction(\r\n LVector3(friction, friction, friction/1.3))\r\n HFS_rigidbody.set_restitution(0.3)\r\n HFS_rigidbody.add_shape(self.HFS)\r\n self.world.attach(HFS_rigidbody)\r\n \r\n HFS_NP = NodePath(HFS_rigidbody)\r\n HFS_NP.reparent_to(self.worldNP)\r\n \"\"\"\r\n This aligns the Bullet terrain with the ShaderTerrainMesh rendered\r\n terrain. It will be exact as long as the terrain vertex shader from\r\n the STM sample is used and no additional tessellation shader.\r\n For Bullet (as for other physics engines) the origin of objects is at\r\n the center.\r\n \"\"\"\r\n HFS_NP.set_pos(self.terrain_pos + self.terrain_scale/2)\r\n HFS_NP.set_sx(self.terrain_scale.x / heightfield.get_x_size())\r\n HFS_NP.set_sy(self.terrain_scale.y / heightfield.get_y_size())\r\n \r\n # Disables Bullet debug rendering for the terrain, because it is slow.\r\n #HFS_NP.node().set_debug_enabled(False)\r\n \r\n \"\"\"\r\n Finally, link the ShaderTerrainMesh and the BulletHeightfieldShape to\r\n the DynamicHeightfield. From now on changes to the DynamicHeightfield\r\n will propagate to the (visible) ShaderTerrainMesh and the (collidable)\r\n BulletHeightfieldShape.\r\n \"\"\"\r\n self.HFS.set_dynamic_heightfield(self.DHF)\r\n self.terrain_node.set_dynamic_heightfield(self.DHF)", "def setupLights(self) :\n\t\tself.ambientLight = render.attachNewNode(AmbientLight( \\\n\t\t\t\t\t\"ambientLight\"))\n\t\tself.ambientLight.node().setColor(Vec4(.8,.8,.8,1))\n\t\trender.setLight(self.ambientLight)\n\n\t\tdLight1 = DirectionalLight(\"dLight1\")\n\t\tdLight1.setColor(Vec4(6,5,7,1))\n\t\tdLight1.setDirection(Vec3(1,1,1))\n\t\tdlnp1 = render.attachNewNode(dLight1)\n\t\tdlnp1.setHpr(30,-160,0)\n\t\trender.setLight(dlnp1)\n\n\t\tdLight2 = DirectionalLight(\"dLight2\")\n\t\tdLight2.setColor(Vec4(.6,.7,1,1))\n\t\tdLight2.setDirection(Vec3(-1,-1,-1))\n\t\tself.dlnp2 = render.attachNewNode(dLight2)\n\t\tself.dlnp2.node().setScene(render)\n\t\tself.dlnp2.setHpr(-70,-60,0)\n\t\trender.setLight(self.dlnp2)", "def InitLightBasic(self):\r\n\t\t\r\n\t\taLight = AmbientLight(\"AmbientLight\")\r\n\t\taLight.setColor(Vec4(0.3, 0.3, 0.3, 1))\r\n\t\trender.setLight(render.attachNewNode(aLight))\r\n\t\r\n\t\tdLight1 = DirectionalLight(\"DirectionalLight1\")\r\n\t\tdLight1.setColor(Vec4(0.65, 0.6, 0.6, 1))\t\t\r\n\t\tdLight1NP = render.attachNewNode(dLight1)\r\n\t\tdLight1NP.setHpr(100, -40, 0)\r\n\t\trender.setLight(dLight1NP)\r\n\t\r\n\t\tdLight2 = DirectionalLight(\"DirectionalLight2\")\r\n\t\tdLight2.setColor(Vec4(0.35, 0.35, 0.3, 1))\r\n\t\tdLight2NP = render.attachNewNode(dLight2)\r\n\t\tdLight2NP.setHpr(150, -60, 0)\r\n\t\trender.setLight(dLight2NP)", "def _updateLight(self):\n azimuth = numpy.radians(self._azimuth)\n delta = numpy.pi/2. - numpy.radians(self._altitude)\n if delta == 0.: # Avoids zenith position\n delta = 0.0001\n z = - numpy.sin(delta) * numpy.cos(azimuth)\n x = - numpy.sin(delta) * numpy.sin(azimuth)\n y = - numpy.cos(delta)\n self._light.direction = x, y, z", "def prepare_map(self):\n for y, row in enumerate(self.contents):\n for x, tile in enumerate(row):\n bm = self.get_tile(tile)\n self.image[\n y * TILE_SIZE : (y + 1) * TILE_SIZE,\n x * TILE_SIZE : (x + 1) * TILE_SIZE,\n ] = bm", "def make_irac_lightmap(id_keep, hr_segmap, hr_mask, irac_psf, irac_drz, irac_output, blur_threshold=0.1, sigma=1.0):\n # First step, zero-out the non cluster members\n mask = filter_segmap(hr_segmap, id_keep, hr_mask, blur_kernel=irac_psf, \n threshold=blur_threshold)\n # Now we have a mask image in high-res, drizzle the pixels onto the low-res\n # pixel grid\n if os.path.exists(\"irac_mask.fits\"):\n os.system('rm irac_mask.fits')\n drizzle_mask(hr_mask, irac_drz, \"irac_mask.fits\")\n irac_input = pyfits.getdata(irac_drz)\n irac_mask = pyfits.getdata(\"irac_mask.fits\")\n irac_map = np.where(irac_mask>0, irac_input, 0.)\n # Also smooth the output light map with a Gaussian kernel\n if sigma > 0:\n print \"Smoothing the IRAC mask...\"\n irac_map = filters.gaussian_filter(irac_map, sigma)\n irac_hdr = pyfits.getheader(irac_drz)\n os.system('rm %s' % irac_output)\n pyfits.append(irac_output, data=irac_map, header=irac_hdr)\n print \"Done.\"", "def generate_world(world_seed, biome_min, biome_max, w, h):\n\n while True:\n\n try:\n\n # Set the initial seed for the random module (random.seed())\n seed(world_seed)\n\n # Create a blank map (2D list filled with '0' strings\n world = [[0 for y in range(h)] for x in range(w)]\n # Generates the random values for the terrain construction\n terrain = [randrange(20) + 40 for _ in range(w)]\n\n #Empty biome map\n biomes = []\n\n #Generates biomes\n for __ in range(w//biome_min):\n\n #Biome at cursor\n biome_select = choice(list(biome_data))\n\n #Biomes size\n for _ in range(randint(biome_min, biome_max)):\n biomes.append(biome_select)\n\n #World size met\n if len(biomes) >= w:\n biomes = biomes[:w] #Truncate selection\n break\n\n\n # ----- Construct the Terrain\n # Counter that changes dynamically to check through all blocks in the terrain list\n cur_pos = 0\n # Runs through all the generated numbers in a while loop\n while cur_pos < w:\n\n # print(\".\", end=\"\")\n\n # Check to see if terrain gap is too large\n\n if abs(terrain[cur_pos] - terrain[cur_pos - 1]) > biome_data[str(biomes[cur_pos])][\"maxh\"]: # if terrain gap is larger than threshhold (too big)\n\n for n in range(randint(biome_data[str(str(biomes[cur_pos]))][\"minx\"], biome_data[str(str(biomes[cur_pos]))][\"maxx\"])):\n # Insert a new value into the terrain list between the values that are too far apart\n terrain.insert(cur_pos, (terrain[cur_pos] + terrain[cur_pos - 1]) // 2)\n\n else: # Difference between the two blocks is not too big\n\n # Check next block\n cur_pos += 1\n\n # ----- Transfer Terrain To Empty World\n # Run through every space in the empty world\n for x in range(len(world)): # runs through each level\n for y in range(len(world[x])): # runs through each individual space\n\n # Generates structures\n if y > terrain[x]:\n\n #Top layer\n if y - terrain[x] == 1:\n\n #Sets the layer with block specified in biome config\n world[x][y] = block_lookup[biome_data[biomes[x]][\"layer\"][\"top\"]]\n\n if randint(0, 10) == 0 and x + 10 < w:\n world = generate_structure(x, y - 1, world, choice(biome_data[biomes[x]][\"structure\"]))\n\n #Middle layer\n elif y - terrain[x] < randint(3, 8):\n world[x][y] = block_lookup[biome_data[biomes[x]][\"layer\"][\"middle\"]]\n\n #Base\n else:\n world[x][y] = block_lookup[biome_data[biomes[x]][\"layer\"][\"lower\"]]\n\n #Generate ores\n # Coal\n if 10 + terrain[x] > y > 5 + terrain[x] and randint(0, 200) == 0:\n for cluster in range(randint(3, 10)):\n world[x + randint(-4, 4)][y + randint(-4, 4)] = block_lookup[\"Coal Ore\"]\n\n # Iron\n if 30 + terrain[x] > y > 20 + terrain[x] and randint(0, 200) == 0:\n\n for cluster in range(randint(3, 6)):\n world[x + randint(-4, 4)][y + randint(-4, 4)] = block_lookup[\"Iron Ore\"]\n\n # Gold\n if 80 > y > 65 and randint(0, 400) == 0:\n for cluster in range(randint(3, 6)):\n world[x + randint(-4, 4)][y + randint(-4, 4)] = block_lookup[\"Gold Ore\"]\n\n # Diamonds\n if 80 > y > 70 and randint(0, 500) == 0:\n for cluster in range(randint(1, 5)):\n world[x + randint(-3, 3)][y + randint(-3, 3)] = block_lookup[\"Diamond Ore\"]\n\n # Bedrock\n if y > 92 or y > 87 and randint(0, 3) == 0:\n world[x][y] = block_lookup[\"Bed Rock\"]\n\n # Last edit, adding extras to the top of the world to prevent problems\n world = [[0] * 40 + x for x in world]\n\n # Return the world object for use\n return np.array(world)\n\n except:\n world_seed += '1'", "def fill_sinks(self, evolved_elevation):\n\n # assign variables\n depressionless_elevation = 'depressionless_elevation'\n direction = 'flow_direction'\n\n # fill sinks\n gscript.run_command('r.fill.dir',\n input=evolved_elevation,\n output=depressionless_elevation,\n direction=direction,\n overwrite=True)\n\n # update elevation\n gscript.run_command(\n 'r.mapcalc',\n expression=\"{evolved_elevation} = {depressionless_elevation}\".format(\n evolved_elevation=evolved_elevation,\n depressionless_elevation=depressionless_elevation),\n overwrite=True)\n gscript.run_command(\n 'r.colors',\n map=evolved_elevation,\n color='elevation')\n\n # remove temporary maps\n gscript.run_command(\n 'g.remove',\n type='raster',\n name=['depressionless_elevation',\n 'flow_direction'],\n flags='f')\n\n return evolved_elevation", "def update_hpx_skymap_allsky(map_in, map_out):\n if map_out is None:\n in_hpx = map_in.hpx\n out_hpx = HPX.create_hpx(in_hpx.nside, in_hpx.nest, in_hpx.coordsys,\n None, in_hpx.ebins, None, in_hpx.conv, None)\n data_out = map_in.expanded_counts_map()\n print(data_out.shape, data_out.sum())\n map_out = HpxMap(data_out, out_hpx)\n else:\n map_out.data += map_in.expanded_counts_map()\n return map_out", "def _propagate_material_settings(self, bm, layer):\n state = layer.state\n\n # Shade Flags\n if not bm.use_mist:\n state.shadeFlags |= hsGMatState.kShadeNoFog # Dead in CWE\n state.shadeFlags |= hsGMatState.kShadeReallyNoFog\n\n if bm.use_shadeless:\n state.shadeFlags |= hsGMatState.kShadeWhite\n\n # Colors\n layer.ambient = utils.color(bpy.context.scene.world.ambient_color)\n layer.preshade = utils.color(bm.diffuse_color)\n layer.runtime = utils.color(bm.diffuse_color)\n layer.specular = utils.color(bm.specular_color)\n\n layer.specularPower = min(100.0, float(bm.specular_hardness))\n layer.LODBias = -1.0 # Seems to be the Plasma default\n\n if bm.emit > 0.0:\n # Use the diffuse colour as the emit, scaled by the emit amount\n # (maximum 2.0, so we'll also scale that by 0.5)\n emit_scale = bm.emit * 0.5\n layer.ambient = hsColorRGBA(bm.diffuse_color.r * emit_scale,\n bm.diffuse_color.g * emit_scale,\n bm.diffuse_color.b * emit_scale,\n 1.0)", "def initialise_shadow_map(self):\n self.shadow_map = np.zeros( self.x_len + 1, np.int8)\n \n for i in range(1, self.x_len + 1):\n self.shadow_map[i] = int((math.tan(math.radians(15)) * i) * (1 / self.slab_ratio))", "def _update_farness_map(self,ind):", "def lightness_correction(self):\n points = self.color_lookup_table_points\n lightness_max_value = math.sqrt(3 * (255**2))\n deadpool = list()\n for index, point in enumerate(points[0]):\n point = self.get_value_tuple(index)\n lightness = int(math.sqrt(point[0]**2 + point[1]**2 + point[2]**2) * 255 / lightness_max_value)\n if not self.to_dark < lightness < self.to_bright:\n deadpool.append(index)\n self.color_lookup_table_points = (np.delete(points[0], deadpool),\n np.delete(points[1], deadpool),\n np.delete(points[2], deadpool))\n self.point_count = len(self.color_lookup_table_points[0])", "def set_locations():\n STATUS['locations']['monster'][0] = generate_random_coord(STATUS['grid_size'])\n STATUS['locations']['monster'][1] = generate_random_coord(STATUS['grid_size'])\n STATUS['locations']['weapon'][0] = generate_random_coord(STATUS['grid_size'])\n STATUS['locations']['weapon'][1] = generate_random_coord(STATUS['grid_size'])", "def set_block(self, coords, block):\n\n x, y, z = coords\n index, section_y = divmod(y, 16)\n\n column = x * 16 + z\n\n if self.get_block(coords) != block:\n self.sections[index].set_block((x, section_y, z), block)\n\n if not self.populated:\n return\n\n # Regenerate heightmap at this coordinate.\n if block:\n self.heightmap[column] = max(self.heightmap[column], y)\n else:\n # If we replace the highest block with air, we need to go\n # through all blocks below it to find the new top block.\n height = self.heightmap[column]\n if y == height:\n for y in range(height, -1, -1):\n if self.get_block((x, y, z)):\n break\n self.heightmap[column] = y\n\n # Do the blocklight at this coordinate, if appropriate.\n if block in glowing_blocks:\n composite_glow(self.blocklight, glowing_blocks[block],\n x, y, z)\n bl = [clamp(light, 0, 15) for light in self.blocklight]\n self.blocklight = array(\"B\", bl)\n\n # And the skylight.\n glow = max(self.get_skylight((nx, ny, nz))\n for nx, nz, ny in iter_neighbors((x, z, y)))\n self.set_skylight((x, y, z), neighboring_light(glow, block))\n\n self.dirty = True\n self.damage(coords)", "def get_alms(maps=None,\n mask=None,\n maplabel='353',\n showI=False,\n pol=True,\n intensity=True,\n rewrite=False,\n writemap=False,\n savealms=True,\n masktype='PowerSpectra',#'GalPlane2',\n lmax=100):\n\n\n newname = 'alms_lmax{}_mask_{}__'.format(lmax, masktype) + maplabel + '.npy'\n \n\n \n if not os.path.exists(data_path + newname) or rewrite:\n print 'alms file {} does not exist; calculating alms...'.format(newname)\n if mask is None:\n if masktype == 'PowerSpectra':\n maskname = 'HFI_PowerSpect_Mask_2048_R1.10.fits'\n maskfield = 0\n elif masktype == 'GalPlane60':\n maskname = 'HFI_Mask_GalPlane-apo0_2048_R2.00.fits',\n maskfield = 2\n elif masktype == 'no':\n maskname = 'HFI_PowerSpect_Mask_2048_R1.10.fits'\n maskfield = 0\n mask = hp.read_map(data_path + maskname, field=maskfield)\n if masktype == 'no':\n mask = mask*0. + 1.\n masknside = hp.get_nside(mask)\n if maps is None:\n Imap,Qmap,Umap = hp.read_map( data_path + 'HFI_SkyMap_{}_2048_R2.02_full.fits'.format(maplabel),hdu=1, field=(0,1,2) )\n mapnside = hp.get_nside(Imap)\n else:\n if intensity and pol:\n Imap = maps[0]\n Qmap = maps[1]\n Umap = maps[2]\n mapnside = hp.get_nside(Imap)\n elif intensity and not pol:\n Imap = maps[0]\n mapnside = hp.get_nside(Imap)\n elif pol and not intensity:\n Qmap = maps[0]\n Umap = maps[1]\n mapnside = hp.get_nside(Qmap)\n \n if masknside != mapnside:\n print 'adjusting mask to match map resolution...'\n mask = hp.pixelfunc.ud_grade(mask, nside_out=mapnside)\n\n if showI:\n hp.mollview(Imap*mask)\n\n alms = []\n if intensity:\n Imap = Imap*mask\n Tlm = hp.map2alm(Imap, lmax=lmax)\n alms.append(Tlm)\n if pol:\n Qmap *= mask\n Umap *= mask\n Elm,Blm = hp.map2alm_spin( (Qmap,Umap), 2, lmax=lmax )\n alms.append(Elm)\n alms.append(Blm)\n\n #this will only work if get_intensity and get_pol\n if writemap and intensity and pol:\n hp.fitsfunc.write_map( data_path + newname, [Imap, Qmap, Umap])\n \n if savealms and intensity and pol:\n np.save(data_path + newname, alms)\n\n return alms\n\n\n else:\n alms = np.load(data_path + newname, 'r')\n if intensity and pol:\n return alms[0], alms[1], alms[2]\n else:\n if intensity:\n return alms[0]\n if pol:\n return alms[1], alms[2]" ]
[ "0.63644713", "0.618652", "0.5876813", "0.58216393", "0.56143755", "0.54006845", "0.5389068", "0.53830737", "0.53595954", "0.53500336", "0.5324122", "0.5251416", "0.5230712", "0.5167854", "0.51654243", "0.5148389", "0.51166826", "0.51093215", "0.49948815", "0.49848995", "0.49513847", "0.49305776", "0.49282858", "0.4925614", "0.4922874", "0.4921339", "0.4913357", "0.4904555", "0.48430622", "0.48165178" ]
0.65239954
0
Determine whether any damage is pending on this chunk.
def is_damaged(self): return self.all_damaged or bool(self.damaged)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_damaged(self):\n return self.damaged", "def damaged(self) -> bool:\n return len(self._damaged_cells) > 0", "def can_take_damage(self):\n result = True\n if self.side_effects[\"shield\"] > 0:\n result = False\n return result", "def is_bankrupted(self):\n return self.status == self.PLAYER_BANKRUPT", "def is_pending(self):\n return self.is_disarming() or self.is_arming()", "def is_dead(self):\n return self.hp <= 0", "def has_pending_packets_to_be_sent(self):\n return self.num_packets != 0", "def hasCustomEffects(self):\n return not self.getHandle().effects.isEmpty()", "def is_dead(self):\n return self.hearts <= 0", "def is_hungry(self):\r\n if self._hunger > 0:\r\n return True\r\n else:\r\n return False", "def hasEnergyExpended(self, flags):\r\n return (flags & 0x08) != 0", "def is_out_of_stock(self) -> bool:\n return self.on_hand == 0", "def isCombatOver(self):\n\t\treturn len(set([creature.type for creature in self.positionToCreature.values()])) <= 1", "def data_available(self):\n return (self.status & 0x08) != 0", "def need_attention(self):\n msg = [\"not staged\", \"behind\", \"ahead\", \"Untracked\"]\n status_msg = self.status()\n if any([each in status_msg for each in msg]):\n return True\n return False", "def is_over(self):\n return self.is_dead", "def is_over(self):\n return self.is_dead", "def is_over(self):\n return self.is_dead", "def take_damage(self, damage):\n if random.random() < self.chance_dodge:\n self.set_health(self.health - damage)\n return True\n return False", "def still_in_hand(self):\n return len(self.hand.cards)!=0", "def is_dead(self):\n if self.killer:\n if self.killer.stype == 'fire' and not (self.killer in self.pjs.fires):\n return True\n elif self.killer.stype == 'enemy' and self.timeout == 0:\n return True\n else:\n return False", "def is_pending(self):\n status = self.get_status()\n return status[\"status\"] == 3", "def is_attacking(self) -> bool:\n return self.orders and self.orders[0].ability.id in (\n AbilityId.ATTACK,\n AbilityId.ATTACK_ATTACK,\n AbilityId.ATTACK_ATTACKTOWARDS,\n AbilityId.ATTACK_ATTACKBARRAGE,\n AbilityId.SCAN_MOVE,\n )", "def damage(self):\n if not self.damage_mode and not self.attack_mode and not self.death_mode:\n self.damage_mode = True\n self.cut_frame_update = 0", "def deal_dmg(self):\n return self.damage", "def is_few_remaining(self) -> bool:\n return self.on_hand <= self.warn_limit", "def has_flush(self):\n self.suit_hist()\n for val in self.suits.values():\n if val >= 5:\n self.rank_per_hand['4'] = \"flush\"\n return True\n return False", "def check_loss(self):\n return POKEMON in self.get_game()", "def has_flush(self):\n self.suit_hist()\n for val in self.suits.values():\n if val >= 5:\n return True\n return False", "def is_over(self):\n return self._is_dead" ]
[ "0.7358269", "0.72853124", "0.69238526", "0.64603144", "0.6391741", "0.6032398", "0.6017207", "0.6011936", "0.59953684", "0.5980947", "0.59143126", "0.58919996", "0.5880647", "0.5855917", "0.5853071", "0.578088", "0.578088", "0.578088", "0.5766825", "0.576636", "0.5748378", "0.5747514", "0.5740845", "0.5725201", "0.5720666", "0.5713729", "0.56957763", "0.5688015", "0.5685584", "0.5685278" ]
0.732875
1
Make a packet representing the current damage on this chunk. This method is not private, but some care should be taken with it, since it wraps some fairly cryptic internal data structures. If this chunk is currently undamaged, this method will return an empty string, which should be safe to treat as a packet. Please check with `is_damaged()` before doing this if you need to optimize this case. To avoid extra overhead, this method should really be used in conjunction with `Factory.broadcast_for_chunk()`. Do not forget to clear this chunk's damage! Callers are responsible for doing this. >>> packet = chunk.get_damage_packet() >>> factory.broadcast_for_chunk(packet, chunk.x, chunk.z) >>> chunk.clear_damage()
def get_damage_packet(self): if self.all_damaged: # Resend the entire chunk! return self.save_to_packet() elif not self.damaged: # Send nothing at all; we don't even have a scratch on us. return "" elif len(self.damaged) == 1: # Use a single block update packet. Find the first (only) set bit # in the damaged array, and use it as an index. coords = next(iter(self.damaged)) block = self.get_block(coords) metadata = self.get_metadata(coords) x, y, z = coords return make_packet("block", x=x + self.x * 16, y=y, z=z + self.z * 16, type=block, meta=metadata) else: # Use a batch update. records = [] for coords in self.damaged: block = self.get_block(coords) metadata = self.get_metadata(coords) x, y, z = coords record = x << 28 | z << 24 | y << 16 | block << 4 | metadata records.append(record) data = "".join(pack(">I", record) for record in records) return make_packet("batch", x=self.x, z=self.z, count=len(records), data=data)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def damage(self, damage):\n return self.damage", "def damage(self):\n out = (self.blurbs[self.state][\"damage\"])\n self.next_state(\"damage\")\n return out", "def get_damage(self):\n return self.__damage", "def get_damage():\n\n return character['Damage']", "def ship_took_damage(self, damage: Damage):\n pass", "def take_damage(self, dmg, dtype = 1):\n self.game.hit_sound.play()\n \n #DR% = 1 - (100 / x). \n damageMultiplier = 100.0 / float(self.defense)\n #Apply defense buffs/debuffs\n #calculate damage:\n dmg -= self.absorbtion\n dmg *= damageMultiplier\n #apply damage\n self.hp[0] -= dmg", "def get_damage(self, amount: float) -> None:\n self.health = self.health - amount", "def get_damage(self, amount: float) -> None:\n self.health = self.health - amount * self.DMG_TO_VEHICLE\n rnd_operator = random.choice(self.__operators)\n rnd_operator.get_damage(amount * self.DMG_TO_ONE_OPER)\n for operator in self.__operators:\n if operator != rnd_operator:\n operator.get_damage(amount * self.DMG_TO_OPER)\n self.estimate_total_health()\n self.check_is_alive()", "def direct_damage(self, pokemon, damage):\n if damage < 1:\n damage = 1 # always do at least 1 damage\n else:\n damage = int(damage)\n\n pokemon.hp -= damage\n if pokemon.hp <= 0:\n self.faint(pokemon, Cause.DIRECT)", "def deal_dmg(self):\n return self.damage", "def packet(self):\n return self.server.packet(context=self.ctx)", "def take_damage(self, damage):\n alive_units = self.get_alive_units()\n units_count = len(alive_units)\n if units_count == 0:\n return\n damage_for_unit = damage / units_count\n for unit in alive_units:\n unit.take_damage(damage_for_unit)", "def attack(self):\n\t if self.damage == 0:\n\t\treturn None\n\t elif self.name == \"die\":\n\t roll = random.randint(1,20)\n\t if roll == 1:\n\t return 0\n\t else:\n\t return 1\n\t elif self.damage == 1 or self.damage == 2:\n\t\treturn self.damage\n\t elif self.damage == 3:\n\t\treturn random.randint(3,5)\n\t elif self.damage == -4:\n\t return 4\n\t elif self.damage == 10:\n\t\trandomInt = random.randint(1,4)\n\t\tif randomInt == 1:\n\t\t return 10\n\t\telse:\n\t\t return 0\n\t else:\n\t return self.damage", "def take_damage(self, damage):\n damage /= len(self.__units)\n for i in self.__units:\n i.take_damage(damage)", "def take_damage(self, damage: int, attacker: str) -> tuple:\n if self.hp < damage:\n self.hp = damage\n new_hp = self.hp - damage\n self.hp = new_hp\n if self.hp == 0:\n return True, f'{self.name} has been killed by {attacker}!'\n return False, f'{self.name} was hit for {damage} HP by {attacker} and now has {self.hp} HP left!'", "def take_damage(self, damage):\n if self.hp - damage <= 0:\n self.hp = 0\n self.die()\n else:\n self.hp -= damage", "def dealdamage(self, pokemon, movedata):\n mod = getmodifier(movedata['type'].lower(), pokemon.types)\n if movedata['category'].lower() == 'physical':\n damage = (((2*self.level/5+2)*movedata['power']*self.currentStats['ATK']/pokemon.currentStats['DEF'])/50+2)*mod\n if movedata['category'].lower() == 'special':\n damage = (((2*self.level/5+2)*movedata['power']*self.currentStats['SPATK']/pokemon.currentStats['SPDEF'])/50+2)*mod\n print(f\"HP: {pokemon.currentStats['HP']}, Damage: {damage}\")\n pokemon.takedamage(damage)\n if mod == 0:\n return [0, damage]\n if mod == 0.25:\n return [1, damage]\n if mod == 0.5:\n return [2, damage]\n if mod == 1:\n return [3, damage]\n if mod == 2:\n return [4, damage]\n if mod == 4:\n return [5, damage]", "def playerrawdmg(self):\n playerstr = globalvalues.p1.getstrength()\n # see combatvaluetable.xlsx to see some possible values of\n # playerrawdamage. Base formula is below:\n #\n rawdmg = int((playerstr - 4) * 102 * 0.32)\n\n # Things that will deviate the amount of damage done.\n level = globalvalues.p1.getlevel() - globalvalues.ai.getstatus()[0]\n modvalue = float(1 + level * 0.05)\n rngfactor = float(1 + float(random.randint(85, 105)) / 100)\n\n return int(rawdmg * modvalue * rngfactor)", "def getDamage(self):\n \n weapon_dmg = self.weapon.getDamage()\n cat_bonus, att_cats = self.getCatBonus(self.attacking_kittens,\n \"attacking\")\n true_dmg = weapon_dmg + cat_bonus + self.getBonusDamageFromInsanity()\n return true_dmg, att_cats", "def take_damage(self, damage):\n attack = damage - (0.05 + self.__experience / 1000)\n self.set_health(self.get_health - attack)", "def Hit(self, damage):\n self.health -= damage", "def trump(self):\n attack = None\n other = self.enemy.enemy_trump(self.other_hand.get_hand())\n me = self.player.player_trump(self.my_hand.get_hand())\n if other == None and me != None:\n attack = 0\n else:\n if other != None and me == None:\n attack = 1\n else:\n if other == None and me == None:\n attack = randint(0, 1)\n else:\n if other.weight < me.weight:\n attack = 1\n else:\n attack = 0\n return attack", "def is_damaged(self):\n return self.damaged", "def get_attack_damage(self, by: str):\n if by == 'spell':\n if self.spells:\n # get the spell with the maximum damage that we have enough mana for\n available_spells = [spell for spell in self.spells if self._mana >= spell.mana_cost]\n if not available_spells:\n return None\n\n spell = max(available_spells, key= lambda spell: spell.damage) # type: Spell\n if spell:\n return spell\n else:\n print('{} does not know any spells.'.format(self.name))\n return None\n else:\n return self.weapon.damage", "def save_to_packet(self):\n\n mask = 0\n packed = []\n\n ls = segment_array(self.blocklight)\n\n for i, section in enumerate(self.sections):\n if any(section.blocks):\n mask |= 1 << i\n packed.append(section.blocks.tostring())\n\n for i, section in enumerate(self.sections):\n if mask & 1 << i:\n packed.append(pack_nibbles(section.metadata))\n\n for i, l in enumerate(ls):\n if mask & 1 << i:\n packed.append(pack_nibbles(l))\n\n for i, section in enumerate(self.sections):\n if mask & 1 << i:\n packed.append(pack_nibbles(section.skylight))\n\n # Fake the biome data.\n packed.append(\"\\x00\" * 256)\n\n packet = make_packet(\"chunk\", x=self.x, z=self.z, continuous=True,\n primary=mask, add=0x0, data=\"\".join(packed))\n return packet", "def attack(self):\n return random.randint(self.max_damage//2, self.max_damage)", "def take_damage(self, damage):\n if random.random() < self.chance_dodge:\n self.set_health(self.health - damage)\n return True\n return False", "def absorb(self, damage):\n\n dealt_damage = damage\n\n if self.health > 0:\n delta = min(self.health, damage) * self.absorb_coeff\n self.health = int(self.health - delta)\n dealt_damage = (1.0 - self.absorb_coeff) * damage\n\n return dealt_damage", "def getDamage(self, player, is_random=True):\n \n if \"restrained\" in self.debuffs:\n return 0, 0\n \n mitigation, num_cats = player.getCatBonus(player.defending_kittens,\n \"defending\")\n raw_dmg = random.randint(self._damage[0], self._damage[1])\n \n true_dmg = raw_dmg - mitigation\n if true_dmg < 0:\n true_dmg = 0\n \n return true_dmg, num_cats", "def damage(self):\n if not self.damage_mode and not self.attack_mode and not self.death_mode:\n self.damage_mode = True\n self.cut_frame_update = 0" ]
[ "0.59331006", "0.579906", "0.57893956", "0.5656282", "0.55813575", "0.5487684", "0.54682094", "0.54240894", "0.5413487", "0.5369096", "0.5364261", "0.5301248", "0.5289231", "0.5221748", "0.52184904", "0.52123284", "0.52100843", "0.519872", "0.5175159", "0.5127847", "0.5119192", "0.5070927", "0.506044", "0.50532234", "0.50204104", "0.50149995", "0.50134975", "0.5012352", "0.49997017", "0.49936834" ]
0.77795994
0
Clear this chunk's damage.
def clear_damage(self): self.damaged.clear() self.all_damaged = False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def deal_damage(self, damage):\n # Another cool trick\n self.current_health = max(\n 0,\n self.current_health-damage\n )", "def Hit(self, damage):\n self.health -= damage", "def reset(self):\n self.damage_dealt = 0\n self.kills = 0\n self.got_killed = False\n self.fitness = 0", "def magic_damage_dealt(self, magic_damage_dealt):\n\n self._magic_damage_dealt = magic_damage_dealt", "def take_damage(self, damage):\n if self.hp - damage <= 0:\n self.hp = 0\n self.die()\n else:\n self.hp -= damage", "def clear(self):\n self.__hasTABLE = False\n self.__hasGRAPHS = False\n self.__ndoubledollar = 0\n buffer.clear(self)", "def __del__(self):\r\n self.chunk = None", "def take_damage(self, damage):\n if damage <= 0: return\n self.hit_points[0] -= damage\n if self.hit_points[0] <= 0: self.die()", "def clear(self):\n self.length = 0", "def deal_dmg(self):\n return self.damage", "def clear(self):\n\t\tself.PTerm = 0.0\n\t\tself.ITerm = 0.0\n\t\tself.DTerm = 0.0\n\t\tself.clearing = True\n\n\t\tself.output = 0.0", "def purge(self):\n self.remaining = 0", "def damage(self):\n if not self.damage_mode and not self.attack_mode and not self.death_mode:\n self.damage_mode = True\n self.cut_frame_update = 0", "def clear(self) -> None:\n self._loss_dict.clear()", "def reset(self):\n self.reward = 0", "def reset(self):\n self._stat = CardMeta()", "def reset(self, hard=False):\n self.hp = self.starting_hp\n if self.concentrating:\n self.conc_fx() # TODO this looks fishy\n self.healing_spells = self.starting_healing_spells\n if hard:\n self.tally = {'damage': 0, 'hp': 0, 'hits': 0, 'misses': 0, 'rounds': 0, 'healing_spells': 0, 'battles': 0,\n 'dead': 0}", "def clean(self):\n self.decay()\n self.hit_count = 0.0\n self.out_layer.cost = 0.0\n self.batch_loss = 0.0", "def remove_from_hand(self):\n pass", "def get_damage(self, amount: float) -> None:\n self.health = self.health - amount", "def heal(self):\n self.infected = False", "def take_damage(self, damage):\n damage /= len(self.__units)\n for i in self.__units:\n i.take_damage(damage)", "def clear(self) -> None:\n self._tiles.clear()\n self._chunks.clear()", "def die(self):\n self.pjs.bombermen.remove(self)\n for block in self.physics.blocks[self.stype]:\n if block == self.rects[0]:\n self.physics.blocks[self.stype].remove(block)", "def reset(self):\n if self.monotonic_energy is not None:\n self.monotonic_energy.reset()\n if self.chunk_energy is not None:\n self.chunk_energy.reset()\n self.bd_L_prev = 0\n self.key_tail = None", "def clear(self):\n self.hand = []\n self.cards = []\n self.escobas = []\n self.points += self.round_points\n self.round_points = 0", "def clear_data(cls):\n cls.__data.clear()\n cls.__counters.clear()", "def __clearBonuses(self, hp=1):\n if hp:\n self.hpBonuses = [{}, {}, {}, {}]\n else:\n self.kbBonuses = [{}, {}, {}, {}]", "def set_health(self):\n self.health -= 1", "def clear(self) -> None:\n self.block = None\n self.tx = None\n self.msg = None\n self.block_batch = None\n self.tx_batch = None\n self.new_icon_score_mapper = None\n self.cumulative_step_used = 0\n self.step_counter = None\n self.event_logs = None\n self.logs_bloom = None\n self.traces = None\n self.func_type = IconScoreFuncType.WRITABLE\n\n self.msg_stack.clear()" ]
[ "0.65471846", "0.6530023", "0.6179079", "0.61029494", "0.6038493", "0.5985546", "0.5968368", "0.59555227", "0.5935097", "0.58794016", "0.5859021", "0.5855798", "0.58450335", "0.58394456", "0.58167946", "0.58163226", "0.5799631", "0.5789179", "0.5785586", "0.57779443", "0.57740206", "0.57663643", "0.5762727", "0.5761753", "0.5733724", "0.5709342", "0.5708273", "0.57054967", "0.5692399", "0.569238" ]
0.8044922
0
Generate a chunk packet.
def save_to_packet(self): mask = 0 packed = [] ls = segment_array(self.blocklight) for i, section in enumerate(self.sections): if any(section.blocks): mask |= 1 << i packed.append(section.blocks.tostring()) for i, section in enumerate(self.sections): if mask & 1 << i: packed.append(pack_nibbles(section.metadata)) for i, l in enumerate(ls): if mask & 1 << i: packed.append(pack_nibbles(l)) for i, section in enumerate(self.sections): if mask & 1 << i: packed.append(pack_nibbles(section.skylight)) # Fake the biome data. packed.append("\x00" * 256) packet = make_packet("chunk", x=self.x, z=self.z, continuous=True, primary=mask, add=0x0, data="".join(packed)) return packet
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def writeChunk(chunk):", "def chunk(f, n, data):\n\n\t# Chunk ID\n\tf.write(number(2, n))\n\t# Chunk length\n\tf.write(number(4, len(data)))\n\t# Data\n\tf.write(data)", "def create(cls, raw_data, chunk_dir, prefix='chunk'):\n other_names = [f for f in os.listdir(chunk_dir) if \\\n f.startswith(prefix)]\n other_names.sort()\n last_digit = 0\n if other_names:\n last_digit = int(other_names[-1].split('_')[-1])\n filename = '%s_%s' % (prefix, str(last_digit + 1).zfill(9))\n logger.debug('creating chunk %s' % filename)\n file_path = os.path.join(chunk_dir, filename)\n f = open(file_path, 'wb')\n f.write(raw_data)\n f.close()\n append_checksum(file_path)\n return ClientChunk(file_path)", "def send_chunk(chnk, sock):\n length = len(chnk)\n data = str(length).zfill(MAX_CHUNK_SIZE).encode() + chnk\n sock.send(data)", "def send_chunk(chunk, send_socket):\n length = len(chunk)\n data = str(length).zfill(MAX_CHUNK_SIZE).encode() + chunk\n send_socket.send(data)", "def generate_chunk(self, filename):\n\n # open resource file in binary\n with open(filename, 'rb') as resource:\n\n # instantiate chunk start byte and trailing line string\n p = 0\n overlap = ''\n\n while p <= self.file_size:\n\n try:\n if self.file_size - p < self.chunk_size:\n buffer = overlap + resource.read(self.file_size - p).decode(\"UTF-8\")\n else:\n buffer = overlap + resource.read(self.chunk_size).decode(\"UTF-8\")\n except:\n p += self.chunk_size\n continue\n\n # remove and store trailing sentence\n buffer, overlap = buffer.rsplit('\\n', maxsplit=1)\n\n yield buffer\n\n p += self.chunk_size", "def recv_chunk(self, data):", "def _get_arbitrary_chunk(post: Post) -> str:\n instance_template = Template(config.PLAN_POSTS_INSTANCE_CHUNK)\n instance_chunk = instance_template.render(Context({\"post\": post}))\n return instance_chunk", "def chunks(sequence, chunk_size):\r\n\r\n # YOUR CODE HERE\r", "def get_damage_packet(self):\n\n if self.all_damaged:\n # Resend the entire chunk!\n return self.save_to_packet()\n elif not self.damaged:\n # Send nothing at all; we don't even have a scratch on us.\n return \"\"\n elif len(self.damaged) == 1:\n # Use a single block update packet. Find the first (only) set bit\n # in the damaged array, and use it as an index.\n coords = next(iter(self.damaged))\n\n block = self.get_block(coords)\n metadata = self.get_metadata(coords)\n\n x, y, z = coords\n\n return make_packet(\"block\",\n x=x + self.x * 16,\n y=y,\n z=z + self.z * 16,\n type=block,\n meta=metadata)\n else:\n # Use a batch update.\n records = []\n\n for coords in self.damaged:\n block = self.get_block(coords)\n metadata = self.get_metadata(coords)\n\n x, y, z = coords\n\n record = x << 28 | z << 24 | y << 16 | block << 4 | metadata\n records.append(record)\n\n data = \"\".join(pack(\">I\", record) for record in records)\n\n return make_packet(\"batch\", x=self.x, z=self.z,\n count=len(records), data=data)", "def chunk_generator( callback, request, c ):", "def _chunk_data(self):\n for n in range(0, len(self.data) + 1, len(self.data) //\n self.num_of_chunks):\n yield self.data[0 + n:len(self.data) // self.num_of_chunks + n]", "def generate_bytestream(self):\n # Start empty\n stream = bytearray()\n # Source ID\n stream.extend([self.data_source])\n # Dest ID\n stream.extend([self.data_dest])\n # Param block\n stream.extend(bytearray([len(self.parameters)]))\n for parameter in self.parameters:\n stream.extend(parameter)\n # Content pointer (not used)\n stream.extend([0xFF]) # Content inline\n # Content itself\n stream.extend(self.content)\n return stream", "def generate():\n with open(remote_path, \"rb\") as f:\n for chunk in iter(lambda: f.read(buffer_size), b''):\n yield chunk", "def send_chunk(self, chunk):\n print \"ChunkedTwistedConnection: send chunk\"\n return self.body.send(chunk)", "def chunk_bytes(buf):\n assert len(buf) >= CHUNK_SIZE / 2\n n = len(buf)\n if n < CHUNK_SIZE:\n yield buf[: CHUNK_SIZE // 2] + buf[-CHUNK_SIZE // 2 :], n\n return\n\n for i in range(0, len(buf), CHUNK_SIZE):\n if i + CHUNK_SIZE <= n:\n yield buf[i : i + CHUNK_SIZE], CHUNK_SIZE\n else:\n yield buf[n - CHUNK_SIZE :], n - i", "def request_chunk(self, x, z):\n\n if (x, z) in self.chunk_cache:\n returnValue(self.chunk_cache[x, z])\n elif (x, z) in self.dirty_chunk_cache:\n returnValue(self.dirty_chunk_cache[x, z])\n elif (x, z) in self._pending_chunks:\n # Rig up another Deferred and wrap it up in a to-go box.\n retval = yield self._pending_chunks[x, z].deferred()\n returnValue(retval)\n\n chunk = Chunk(x, z)\n yield maybeDeferred(self.serializer.load_chunk, chunk)\n\n if chunk.populated:\n self.chunk_cache[x, z] = chunk\n self.postprocess_chunk(chunk)\n #self.factory.scan_chunk(chunk)\n returnValue(chunk)\n\n if self.async:\n from ampoule import deferToAMPProcess\n from bravo.remote import MakeChunk\n\n d = deferToAMPProcess(MakeChunk,\n x=x,\n z=z,\n seed=self.seed,\n generators=configuration.getlist(self.config_name, \"generators\")\n )\n\n # Get chunk data into our chunk object.\n def fill_chunk(kwargs):\n chunk.blocks = fromstring(kwargs[\"blocks\"],\n dtype=uint8).reshape(chunk.blocks.shape)\n chunk.heightmap = fromstring(kwargs[\"heightmap\"],\n dtype=uint8).reshape(chunk.heightmap.shape)\n chunk.metadata = fromstring(kwargs[\"metadata\"],\n dtype=uint8).reshape(chunk.metadata.shape)\n chunk.skylight = fromstring(kwargs[\"skylight\"],\n dtype=uint8).reshape(chunk.skylight.shape)\n chunk.blocklight = fromstring(kwargs[\"blocklight\"],\n dtype=uint8).reshape(chunk.blocklight.shape)\n\n return chunk\n d.addCallback(fill_chunk)\n else:\n # Populate the chunk the slow way. :c\n for stage in self.pipeline:\n stage.populate(chunk, self.seed)\n\n chunk.regenerate()\n d = succeed(chunk)\n\n # Set up our event and generate our return-value Deferred. It has to\n # be done early becaues PendingEvents only fire exactly once and it\n # might fire immediately in certain cases.\n pe = PendingEvent()\n # This one is for our return value.\n retval = pe.deferred()\n # This one is for scanning the chunk for automatons.\n #pe.deferred().addCallback(self.factory.scan_chunk)\n self._pending_chunks[x, z] = pe\n\n def pp(chunk):\n chunk.populated = True\n chunk.dirty = True\n\n self.postprocess_chunk(chunk)\n\n self.dirty_chunk_cache[x, z] = chunk\n del self._pending_chunks[x, z]\n\n return chunk\n\n # Set up callbacks.\n d.addCallback(pp)\n d.chainDeferred(pe)\n\n # Because multiple people might be attached to this callback, we're\n # going to do something magical here. We will yield a forked version\n # of our Deferred. This means that we will wait right here, for a\n # long, long time, before actually returning with the chunk, *but*,\n # when we actually finish, we'll be ready to return the chunk\n # immediately. Our caller cannot possibly care because they only see a\n # Deferred either way.\n retval = yield retval\n returnValue(retval)", "def send_chunked(self, chunks, payload, trailers):\r\n\r\n chunk_list = chunks.split(',')\r\n pointer = 0\r\n for cwidth in chunk_list:\r\n cwidth = int(cwidth)\r\n # send chunk length indicator\r\n self.wfile.write(format(cwidth, 'x').upper() + \"\\r\\n\")\r\n # send chunk payload\r\n self.wfile.write(payload[pointer:pointer + cwidth] + \"\\r\\n\")\r\n pointer += cwidth\r\n\r\n # is there another chunk that has not been configured? Send it anyway for the sake of completeness..\r\n if len(payload) > pointer:\r\n # send chunk length indicator\r\n self.wfile.write(format(len(payload) - pointer, 'x').upper() + \"\\r\\n\")\r\n # send chunk payload\r\n self.wfile.write(payload[pointer:] + \"\\r\\n\")\r\n\r\n # we're done with the payload. Send a zero chunk as EOF indicator\r\n self.wfile.write('0'+\"\\r\\n\")\r\n\r\n # if there are trailing headers :-) we send them now..\r\n for trailer in trailers:\r\n self.wfile.write(\"%s: %s\\r\\n\" % (trailer[0], trailer[1]))\r\n\r\n # and finally, the closing ceremony...\r\n self.wfile.write(\"\\r\\n\")", "def pkt_gen(self):\n for i in range(self.num_pkts):\n # create the test packets\n pkt = Ether()/IP()/TCP()/'hello there pretty world!!!'\n rank = random.sample(range(0, 100), 1)[0]\n pkt_id = i\n tuser = Tuser(len(pkt), 0b00000001, 0b00000100, rank, pkt_id)\n print ('@ {:.2f} - Send: {} || {}'.format(self.env.now, pkt.summary(), tuser))\n # write the pkt and metadata into storage\n self.pkt_in_pipe.put((pkt, tuser))\n\n # wait for 10 cycles\n #for j in range(PREAMBLE + len(pkt) + IFG):\n yield self.wait_line_clks(self.PREAMBLE + len(pkt) + self.IFG)", "def _pack(self):\n header = struct.pack(self.PACKAGING_FORMAT, self.step)\n return header", "def make_beagle_chunks(self, window: int, overlap: int, outfile: str, verbose: bool = True)->str:\n\n if self.makeBGLCHUNKS_folder is None:\n raise Exception(\"Provide the folder for the makeBGLCHUNKS binary\")\n\n Arg = namedtuple('Argument', 'option value')\n\n args = [Arg('--vcf', self.vcf), Arg('--window', window),\n Arg('--overlap', overlap), Arg('--output', outfile)]\n\n runner = RunProgram(path=\"{0}/\".format(self.makeBGLCHUNKS_folder),\n program='makeBGLCHUNKS', args=args)\n\n print(runner.cmd_line)\n if verbose is True:\n print(\"Command line for running makeBGLCHUNKS is: {0}\".format(runner.cmd_line))\n\n runner.run_checkoutput()\n\n return outfile", "def get_chunks(size):\n chunk_start = 0\n chunk_size = 0x20000\n\n while chunk_start + chunk_size < size:\n yield (chunk_start, chunk_size)\n chunk_start += chunk_size\n if chunk_size < 0x100000:\n chunk_size += 0x20000\n\n if chunk_start < size:\n yield (chunk_start, size - chunk_start)", "def make_packet(self, type, data): \n return (\"{}\\x00{}\\x00{}\".format(type, data, self.ID)).encode()", "def chunked(self, length, overlap):\n def new_gen():\n buffer = self.read(length)\n while True:\n yield np.array([buffer]) #pack into one more dimension\n new_elems = self.read(length - overlap)\n if new_elems.shape[0] == 0:\n # Reached the end of the stream\n break\n buffer[:overlap] = buffer[length-overlap:]\n buffer[overlap:] = new_elems\n return Stream(new_gen(), chunk_size=1)", "def _generateblocks(self, n):\n if self.key is None:\n raise AssertionError('generator must be seeded before use')\n result = b''\n for i in range(n):\n result += self._cipher.encrypt(self.counter())\n return result", "def make_chunks(l, chunk_length):\n for i in range(0, len(l), chunk_length):\n yield l[i:i + chunk_length]", "def generate():\n PackCommandExecutor().pack()\n GenerateCommandExecutor().generate()", "def generate_data(self, network_name: Name, chunk_size: int = 4096):\n try:\n fs_name = self._files_in_repo[network_name.to_string()]\n except:\n return False\n with open(fs_name, \"r+\") as f:\n # open file and determine number of chunks\n file = mmap.mmap(f.fileno(), 0)\n file_length = len(file)\n num_chunks = math.ceil(file_length / chunk_size)\n # generate data packets (manifest and chunk)\n chunk_names = list()\n for n in range(0, num_chunks):\n # extract chunk and compute digest\n chunk = file[chunk_size * n: min(chunk_size * (n + 1), file_length)]\n m = hashlib.sha256()\n m.update(chunk)\n digest = m.hexdigest()\n chunk_network_name = Name(network_name.to_string() + '/chunk/' + digest)\n # add to cache and chunk list\n chunk_names.append(chunk_network_name.to_string())\n self.add_to_cache(Content(chunk_network_name, chunk))\n # generate manifest\n manifest_data = \"\\n\".join(chunk_names)\n manifest = Content(network_name, manifest_data)\n self.add_to_cache(manifest)\n return True", "def _serialise_chunk(\n self, chunk: Union[Chunk, None], dimension: Dimension, change_no: int\n ) -> ChunkRecord:\n if chunk is None:\n return None\n\n os.makedirs(self._temp_dir, exist_ok=True)\n path = os.path.join(\n self._temp_dir,\n f\"chunk.{dimension}.{chunk.cx}.{chunk.cz}.{change_no}.pickle.gz\",\n )\n\n chunk.pickle(path)\n\n return path", "def make_block(self, in_size, out_size, **kwargs):\n raise NotImplementedError(\"Abstract\")" ]
[ "0.6074541", "0.5996047", "0.5944307", "0.5832408", "0.58029795", "0.57758665", "0.57101524", "0.56944996", "0.5658533", "0.5651672", "0.5611162", "0.5574393", "0.5542967", "0.55011904", "0.5488767", "0.5452836", "0.5403471", "0.5397011", "0.533897", "0.5329054", "0.5314494", "0.5311399", "0.5307619", "0.5305499", "0.5302241", "0.52961963", "0.5294763", "0.52888936", "0.5283482", "0.5275802" ]
0.6450546
0
Look up skylight value.
def get_skylight(self, coords): x, y, z = coords index, y = divmod(y, 16) return self.sections[index].get_skylight((x, y, z))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getLight(self):\n return self.light", "def getLightSensor() -> int:\n pass", "def get_lighting_value(self, lighting):\n if lighting:\n return self.lighting_map[lighting]\n else:\n return 'U'", "def get_light(name):\n if name not in _cache:\n get_lights() # do nothing with response\n\n if name in _cache:\n return _cache[name]\n else:\n return None", "def get_light():\n return 'do some magic!'", "def sky_temperature(self) -> float:\n\n return 0.0552 * (self.ambient_temperature**1.5)", "def get_gas_light():\n\tresult = ser.readline()\n\treturn map(float,result.split(\";\")[0:4])", "def get_light(self, light, text=False):\n if not self.have_light(light):\n return None\n if text:\n return text_switch[self.light_status[light]]\n return self.light_status[light]", "def lightSpeed():\n return const.c.value", "def ambient_light(self):\n return self._ambient_light", "def specular_light(self):\n return self._specular_light", "def light_color(self):\n return self._spots[constants.CROSSING_LOCATION - 1].light_color()", "def _get_snow_rain_value(self, _dict):\n if _dict.get(\"1h\"):\n return _dict.get(\"1h\")\n if _dict.get(\"3h\"):\n return _dict.get(\"3h\")\n if _dict.get(\"all\"):\n return _dict.get(\"all\")\n return None", "def intensity(self, now):\n return self.model.place.state", "def calculateLighting(x,y,z, xnormal, ynormal, znormal):\n dummy = 0\n clr = dislin.getlit(x,y,z,xn,yn,zn,dummy)", "def get_value(self, x, y, z):\n\t\treturn self.data[ self.xyz_to_offset(x,y,z) ]", "def Tsky(self, source):\n\n if not _usePyGSM:\n raise ImportError('PyGSM is not available: cannot access sky temperatures')\n if not isinstance(source, astropy.coordinates.sky_coordinate.SkyCoord):\n if isinstance(source,str):\n # assume .par file\n source=parfile2SkyCoord(source)\n else:\n raise TypeError('Do not know how to interpret an object of type %s' % source.__class__)\n\n source=source.galactic\n T=healpy.pixelfunc.get_interp_val(self.map,\n source.l.value,\n source.b.value,\n lonlat=True)\n return T*u.K", "def process_traffic_lights(self):\n #DONE find the closest visible traffic light (if one exists within LOOKAHEAD_WPS)\n if self.init:\n state = self.get_light_state(0)\n return -1, TrafficLight.UNKNOWN\n elif self.ntlwp:\n state = self.get_light_state(self.ntlwp)\n # state = TrafficLight.RED\n return self.ntlwp, state\n return -1, TrafficLight.UNKNOWN", "def diffuse_light(self):\n return self._diffuse_light", "def check_light(light: pykulersky.Light):\n light.connect()\n light.get_color()", "def getFar(self):\n return self.light.node().getLens().getFar()", "def get_sky_data_flag(self,data):\n sky_data_flag = ~Calibration.get_vane_flag(data['level1']) \n features = self.getFeatures(data)\n features = np.log10(features)/np.log10(2)\n sky_data_flag = sky_data_flag & np.isfinite(features) & (features != 16)\n\n return sky_data_flag", "def intensity(self) -> int:", "def brightness(self):\n _LOGGER.error(\"inside brightness\")\n url = self.urlx + '/dimstate'\n headers = {'x-ha-access': 'raspberry',\n 'content-type': 'application/json'}\n\n response = get(url, headers=headers)\n _LOGGER.error(response.text)\n\n json_data = json.loads(response.text)\n _LOGGER.error(json_data)\n\n state = int(int(json_data['dimState'])*1.5)\n\n # if int(self._dimmer) < 170:\n self._dimmer = state\n\n return self._dimmer", "def __lightness(self, color):\n hsv = color.toHsv()\n return hsv.valueF()", "def get_lux(self):\n\n svc = \"urn:micasaverde-com:serviceId:LightSensor1\"\n if not svc in self.services:\n raise RuntimeError, \"Device doesn't support the service\"\n\n return self.get_variable(svc, \"CurrentLevel\")", "def test_scalar_skycoord():\n\n data = make_4gaussians_image()\n wcs = make_wcs(data.shape)\n skycoord = wcs.pixel_to_world(90, 60)\n aper = SkyCircularAperture(skycoord, r=0.1 * u.arcsec)\n tbl = aperture_photometry(data, aper, wcs=wcs)\n assert isinstance(tbl['sky_center'], SkyCoord)", "def get_light_state(self, light):\n\treturn light.state \n\n\t#if(not self.has_image):\n # self.prev_light_loc = None\n # return False", "def skydir(self):\n return self._skydir", "def __getitem__(self, item) -> float:\n return self.source.hsl[item]" ]
[ "0.67105234", "0.65349597", "0.631679", "0.6164893", "0.6125347", "0.6088632", "0.60547674", "0.5975332", "0.5879654", "0.5829457", "0.5809481", "0.5787087", "0.57246304", "0.56965107", "0.56496423", "0.5648658", "0.56404704", "0.56019545", "0.5598698", "0.557053", "0.5568632", "0.55641377", "0.5561465", "0.5558979", "0.55404085", "0.55125904", "0.550802", "0.550101", "0.5491201", "0.54882705" ]
0.66485906
1
Destroy the block at the given coordinates. This may or may not set the block to be full of air; it uses the block's preferred replacement. For example, ice generally turns to water when destroyed. This is safe as a noop; for example, destroying a block of air with no metadata is not going to cause state changes.
def destroy(self, coords): block = blocks[self.get_block(coords)] self.set_block(coords, block.replace) self.set_metadata(coords, 0)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def destroy(self):\n self._set_block(self._pos, _AIR)\n self._set_block(self._pos + _Vec3(0, 1, 0), _AIR)", "def destroy(self, chunk, coords):\n\n chunk.destroy(coords)", "async def remove_block(\n self,\n position: typing.Union[\n typing.Tuple[int, int, int],\n typing.Any,\n ],\n immediate: bool = True,\n block_update: bool = True,\n block_update_self: bool = True,\n network_sync=True,\n reason=None,\n ):\n raise NotImplementedError", "def delete_block(self, block):\n raise NotImplementedError('delete_block')", "def destroy_all():\n player_loc = _player_loc()\n minec = _get_mc()\n rad = 10\n for x in xrange(player_loc.x - rad, player_loc.x + rad):\n for y in xrange(player_loc.y - rad, player_loc.y + rad):\n for z in xrange(player_loc.z - rad, player_loc.z + rad):\n if minec.getBlock(x, y, z) == Bot._BOT_BLOCK:\n minec.setBlock(x, y, z, _AIR)", "def sync_destroy(self, chunk, coords):\n\n chunk.destroy(coords)", "def destroy(self):\r\n self.city_map.get_tile_at_position(self.position).car = None", "def erase_block(self):\n self.blocks[self.editor_cursor_position[1]][self.editor_cursor_position[0]] = '0'", "def _do_remove_block(self, args):\r\n bus_type = args[1]\r\n slave_id = int(args[2])\r\n name = args[3]\r\n if bus_type == 'rtu':\r\n slave = self.server._servers[0].get_slave(slave_id)\r\n elif bus_type == 'tcp':\r\n slave = self.server._servers[1].get_slave(slave_id)\r\n slave.remove_block(name)", "def remove_block(self, block):\n raise NotImplementedError()", "def exit_block(self, parent=None, **kwargs):\n block = self.newblock(parent, have_code=False, is_exit=True, **kwargs)\n self.blocks.pop()\n return block", "def removeBlock(self, aBlock: gp.Block):\n \n for y, row in iter(self.blocks):\n for x, block in iter(row):\n if block is aBlock:\n self.blocks[y][x] = None\n self.playerSprites.remove(aBlock.sprite)\n return", "def removeBlock(self, block: ghidra.program.model.mem.MemoryBlock, monitor: ghidra.util.task.TaskMonitor) -> None:\n ...", "def bdev_zone_block_delete(client, name):\n params = {'name': name}\n return client.call('bdev_zone_block_delete', params)", "def remove_blocks(self, block_ids):\n self.smd3.remove_blocks(block_ids)\n self.logic.update(self.smd3)\n self.header.update(self.smd3)", "def delete(self, block, name):\n self._kvs.delete(self._key(block, name))", "def mapDel(block, posMap):\n for (x, y) in block.coords:\n theFallener(x + block.x, y + block.y, 0, posMap)", "def free(self, name: str):\n # Find the block to be freed\n to_free = None\n for b in self.used_list:\n if b.name == name:\n to_free = b\n break\n # Return if the user requested to free a block that is not in the used list\n if to_free is None:\n print(\"free FAIL; no such name={}\", to_free)\n return\n # Remove the block from the used list\n to_free.used = False\n self.used_list.remove(to_free)\n \n # Check if either of the neighbors of this block are free and need to be coalesced with this block\n left_end_address = to_free.start()\n right_start_address = to_free.end()\n left_block = None\n right_block = None\n for block in self.free_list:\n if (block.end() == left_end_address):\n left_block = block\n elif (block.start() == right_start_address):\n right_block = block\n\n # Coalesce neighboring blocks if necessary, add blocks to free list\n if (left_block is not None and right_block is not None):\n # Remove these blocks so they can be coalesced with each other and with to_free\n self.free_list.remove(left_block)\n self.free_list.remove(right_block)\n start: int = left_block.start()\n end: int = right_block.end()\n new_name: str = left_block.name + name + right_block.name\n new_free = Block(start, end-start, new_name)\n self.free_list.append(new_free)\n elif (left_block is not None):\n # Remove this block so it can be coalesced with to_free\n self.free_list.remove(left_block)\n start: int = left_block.start()\n end: int = right_start_address\n new_name: str = left_block.name + name\n new_free = Block(start, end-start, new_name)\n self.free_list.append(new_free)\n elif (right_block is not None):\n # Remove this block so it can be coalesced with to_free\n self.free_list.remove(right_block)\n start: int = left_end_address\n end: int = right_block.end()\n new_name: str = name + right_block.name\n new_free = Block(start, end-start, new_name)\n self.free_list.append(new_free)\n else:\n # None of to_free's neighbors are free, so just add it alone to the free list\n self.free_list.append(to_free)", "def destroy(self):\n self._obstacleCourse.destroy()", "def kill(self, coord):\n if self.chart[coord] == ALIVE:\n self.create_rectangle((coord[0] + 1) * UNIT, coord[1] * UNIT,\n coord[0] * UNIT, (coord[1] + 1) * UNIT,\n fill=DEATH, width=THICKNESS, outline='black')\n self.chart[coord] = DEAD", "def remove_blocks(self, *vertices):\n for vertex in vertices:\n try:\n self.world[vertex] = None\n self.shown.pop(vertex)\n for vtx in self._shown[vertex]:\n vtx.delete()\n except KeyError:\n pass\n except IndexError:\n pass", "def finalize_block_construction(self, pyomo_block):\n pass", "def die(self):\n self.pjs.bombermen.remove(self)\n for block in self.physics.blocks[self.stype]:\n if block == self.rects[0]:\n self.physics.blocks[self.stype].remove(block)", "def set_block(self, coords, block):\n\n x, y, z = coords\n index, section_y = divmod(y, 16)\n\n column = x * 16 + z\n\n if self.get_block(coords) != block:\n self.sections[index].set_block((x, section_y, z), block)\n\n if not self.populated:\n return\n\n # Regenerate heightmap at this coordinate.\n if block:\n self.heightmap[column] = max(self.heightmap[column], y)\n else:\n # If we replace the highest block with air, we need to go\n # through all blocks below it to find the new top block.\n height = self.heightmap[column]\n if y == height:\n for y in range(height, -1, -1):\n if self.get_block((x, y, z)):\n break\n self.heightmap[column] = y\n\n # Do the blocklight at this coordinate, if appropriate.\n if block in glowing_blocks:\n composite_glow(self.blocklight, glowing_blocks[block],\n x, y, z)\n bl = [clamp(light, 0, 15) for light in self.blocklight]\n self.blocklight = array(\"B\", bl)\n\n # And the skylight.\n glow = max(self.get_skylight((nx, ny, nz))\n for nx, nz, ny in iter_neighbors((x, z, y)))\n self.set_skylight((x, y, z), neighboring_light(glow, block))\n\n self.dirty = True\n self.damage(coords)", "def uncover(self, loc: tuple[int, int]) -> None:\n self.field.uncover(loc)\n\n if self.field.is_triggered():\n self.game_over = True\n [queue.clear() for queue in\n (self.clear_queue, self.auto_queue, self.hyper_queue)]\n self.status_label.config(text=GAME_OVER_MSG)\n return\n\n self.safes_left -= 1\n\n if self.field.is_all_clear():\n self.win = True\n self.status_label.config(text=ALL_CLEAR_MSG)\n\n if loc in self.clear_queue:\n self.clear_queue.remove(loc)\n\n if self.field[loc].surrounding_mines == 0:\n block = Block(self.field, loc)\n self.clear_queue.add_batch(block.unknown_neighbors,\n emphasis=self.emphasis[\"add_batch\"],\n color=\"new_clear\")\n elif self.auto_solving.get():\n block = Block(self.field, loc)\n useful_neighbors = block.naked_neighbors\n useful_neighbors.add(loc)\n [self.hyper_queue.remove(cell) for cell in useful_neighbors]\n self.auto_queue.add_batch(useful_neighbors,\n emphasis=self.emphasis[\"add_batch\"],\n color=\"new_auto\")\n self.auto_queue.clean_up(emphasis=self.emphasis[\"redundant\"])\n self.hyper_queue.clean_up(emphasis=self.emphasis[\"redundant\"])\n\n if not self.clear_queue.is_busy:\n self.clear_queue.is_busy = True\n self.process(self.clear_queue)", "def _place(self, loc, exclude=None, block_=None):\n if not self._inventory:\n raise Exception('Inventory empty')\n\n if block_ is None:\n for key in self._inventory:\n if key != exclude:\n block_ = key\n break\n else:\n raise Exception((\n 'You requested not to place %s, but it is the only '\n 'block in the inventory.' % exclude\n ))\n\n if block_ not in self._inventory:\n raise Exception('Block %s is not in the inventory' % block_)\n\n if self._inventory[block_] == 1:\n del self._inventory[block_]\n else:\n self._inventory[block_] -= 1\n\n self._set_block(loc, block_)", "def delete_blockreplica(self, block_replica):\n raise NotImplementedError('delete_blockreplica')", "def remove_block(self, name):\n\n if not self._ast or not name in self._block_map:\n raise ValueError(u\"Block '{0}' does not exist\"\n .format(common.from_utf8(name)))\n\n block_idx = self._block_map[name]\n\n # remove block\n self._ast[2].pop(block_idx)\n del self._block_map[name]", "def makeTheHouse(pos, blockTypeMain= wool, blockTypeSecond= wool,\n mainColor= wMagenta, secondColor= wWhite,\n myDoor= wDoorWood):\n\n ### FRONT (& BACK )###\n for Front in range(0,22,21): #This is the trick for the back copy...\n \n mc.setBlocks(pos.x-4, pos.y,pos.z+6+Front,\n pos.x+7, pos.y+9, pos.z+6+Front, blockTypeMain, mainColor)\n mc.setBlocks(pos.x-3, pos.y+1,pos.z+6+Front,\n pos.x+6, pos.y+8, pos.z+6+Front, blockTypeSecond, secondColor)\n # FRONT - Remove blocks\n # Small trick to remove the 6 empty space by a loop\n #[[x,y],[x,y],[x,y],...]\n for i in [[-1,+1],[5,+1],[+2,0],[-1,+5],[2,+5],[5,+5]]:\n mc.setBlocks(pos.x+i[0], pos.y+i[1],pos.z+6+Front,\n pos.x+i[0]-1, pos.y+i[1]+2, pos.z+6+Front, air)\n #let's put the Glasses (that's almost the same than remove actually...)\n for i in [[-1,+1],[5,+1],[-1,+5],[2,+5],[5,+5]]:\n mc.setBlocks(pos.x+i[0], pos.y+i[1],pos.z+6+Front,\n pos.x+i[0]-1, pos.y+i[1]+2, pos.z+6+Front, wGlass_Pane)\n # The door at Entrance\n mc.setBlock(pos.x+1, pos.y, pos.z+6+Front, myDoor,4)\n mc.setBlock(pos.x+1, pos.y+1, pos.z+6+Front, myDoor,8)\n mc.setBlock(pos.x+2, pos.y, pos.z+6+Front, myDoor,1)\n mc.setBlock(pos.x+2, pos.y+1, pos.z+6+Front, myDoor,8)\n \n # ************\n \n # FRONT - Small top\n mc.setBlocks(pos.x-3, pos.y+10,pos.z+6+Front,\n pos.x+6, pos.y+14, pos.z+6+Front, blockTypeSecond, secondColor)\n mc.setBlocks(pos.x-1, pos.y+10,pos.z+6+Front,\n pos.x+4, pos.y+13, pos.z+6+Front, blockTypeMain, mainColor)\n mc.setBlocks(pos.x, pos.y+10,pos.z+6+Front,\n pos.x+3, pos.y+12, pos.z+6+Front, blockTypeSecond, secondColor)\n # FRONT-Small top Remove Blocks\n mc.setBlocks(pos.x+1, pos.y+11,pos.z+6+Front,\n pos.x+2, pos.y+12, pos.z+6+Front, air)\n # small trick to remove as \"stairs\" - funny ? no ?\n for i in range(0,10,1):\n iy = i\n if i > 5:\n iy=9-i\n #print i, iy\n mc.setBlocks(pos.x-3+i, pos.y+11+iy,pos.z+6+Front,\n pos.x-3+i, pos.y+15, pos.z+6+Front, air)\n # FRONT-Small Top put Glass\n mc.setBlocks(pos.x+1, pos.y+11,pos.z+6+Front,\n pos.x+2, pos.y+12, pos.z+6+Front, wGlass_Pane)\n\n\n # FRONT-Right & Left side \n for i in range(0,19,18):\n #print i\n mc.setBlocks(pos.x-4+i, pos.y,pos.z+7+Front,\n pos.x-11+i, pos.y+8, pos.z+7+Front, blockTypeMain, mainColor)\n mc.setBlocks(pos.x-5+i, pos.y+1,pos.z+7+Front,\n pos.x-10+i, pos.y+7, pos.z+7+Front, blockTypeSecond, secondColor)\n # blocks removal\n mc.setBlocks(pos.x-6+i, pos.y+1,pos.z+7+Front,\n pos.x-9+i, pos.y+7, pos.z+7+Front, wGlass_Pane)\n # the line\n mc.setBlocks(pos.x-5+i, pos.y+4,pos.z+7+Front,\n pos.x-11+i, pos.y+4, pos.z+7+Front, blockTypeMain, mainColor)\n \n #remove 2 extra columns\n mc.setBlocks(pos.x-4, pos.y, pos.z+7,\n pos.x-4, pos.y+8, pos.z+7, air)\n mc.setBlocks(pos.x-4+11, pos.y, pos.z+7,\n pos.x-4+11, pos.y+8, pos.z+7, air)\n\n\n ### MAIN WALLS RIGHT & LEFT SIDE ###\n for wall in range(0,26,25):\n mc.setBlocks(pos.x-11+wall, pos.y, pos.z+8,\n pos.x-11+wall, pos.y+8, pos.z+28, blockTypeMain, mainColor)\n\n mc.setBlocks(pos.x-11+wall, pos.y+1, pos.z+8,\n pos.x-11+wall, pos.y+7, pos.z+27, blockTypeSecond, secondColor)\n\n for i in range(0,15,7):\n mc.setBlocks(pos.x-11+wall, pos.y+1,pos.z+9+i,\n pos.x-11+wall, pos.y+7, pos.z+12+i, wGlass_Pane)\n \n # the 3 lines\n mc.setBlocks(pos.x-11+wall, pos.y, pos.z+14,\n pos.x-11+wall, pos.y+8, pos.z+14, blockTypeMain, mainColor)\n mc.setBlocks(pos.x-11+wall, pos.y, pos.z+21,\n pos.x-11+wall, pos.y+8, pos.z+21, blockTypeMain, mainColor)\n mc.setBlocks(pos.x-11+wall, pos.y+4, pos.z+8,\n pos.x-11+wall, pos.y+4, pos.z+28, blockTypeMain, mainColor)\n\n\n \n\n #same \n #removeBlocks(pos.x-1, pos.y+2, pos.z+6, 2, \n pass", "def clear(self):\n self.blocks.clear()" ]
[ "0.6853528", "0.610607", "0.5840603", "0.57335365", "0.5707883", "0.56459004", "0.5503273", "0.5412381", "0.5361791", "0.5271652", "0.5247244", "0.5239406", "0.5233161", "0.52149624", "0.51414394", "0.5088466", "0.5077469", "0.50766724", "0.5050439", "0.50012434", "0.4969171", "0.49632356", "0.49618694", "0.4959833", "0.49454203", "0.4930625", "0.48867133", "0.48776463", "0.4868517", "0.48561093" ]
0.78478986
0
Get the height of an xzcolumn of blocks.
def height_at(self, x, z): return self.heightmap[x * 16 + z]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __column_height(self, x):\n\t\tcolumn = self.board[:, x]\n\t\treturn np.count_nonzero(column)", "def height(self, x):\n\t\treturn np.interp(x, self.x, self.z)", "def height(self):\n return self.row", "def height(self):\n yy = self.yy\n return max(yy) - min(yy)", "def get_height(self,c):\r\n return self.h", "def height(self):\n try:\n return max(elem.height for elem in self[1:])+1\n except ValueError:\n return 0", "def get_grid_height(self):\n # replace with your code\n return self._height", "def height(self):\n return self.i_node.distance(self.n_node)", "def get_grid_height(self):\r\n return self._height", "def get_dimension_height(self):\n pass", "def get_grid_height(self):\r\n return self.height", "def get_grid_height(self):\n return self._height", "def get_grid_height(self):\n return self._height", "def height(self):\n return self.__size[1]", "def bottom_height_px(self):\n return self.bottom_pieces * PipePair.PIECE_HEIGHT", "def get_height(self):\n height = 0\n for layer, ldata in self.conf['Layers'].items():\n layer_t = ldata['params']['thickness']\n height += layer_t\n return height", "def height(self):\n self._updateExtents()\n return self._mHeight", "def get_surface_height_at(x: float, z: float, routing_surface: CommonSurfaceIdentifier) -> float:\n # noinspection PyUnresolvedReferences\n return services.terrain_service.terrain_object().get_routing_surface_height_at(x, z, routing_surface)", "def height(self):\n return _libsbml.Dimensions_height(self)", "def height(self):\n return self.board.shape[0]", "def height(self) -> int:\n\t\treturn self._raw_result['data']['height']", "def height(self):\n return self[\"height\"]", "def height(self):\n return self[\"height\"]", "def height(self):\n return (self.__height)", "def zone_height(self):\n return self._zone_height", "def height(self):\n return self.get_delta_value(self.Y_INDEX)", "def get_height_at_pixel(in_height_file: str, mlines: int, mwidth: int, ref_azlin: int, ref_rpix: int) -> float:\n height = read_bin(in_height_file, mlines, mwidth)\n\n return height[ref_azlin, ref_rpix]", "def get_grid_height(self):\n return self.grid_height", "def get_grid_height(self):\n return self.grid_height", "def get_grid_height(self):\r\n\r\n return self._grid_height" ]
[ "0.70412153", "0.6653234", "0.6424809", "0.6393254", "0.6381678", "0.63737303", "0.63215065", "0.6287072", "0.6262861", "0.62594336", "0.62550616", "0.62349355", "0.62349355", "0.62310463", "0.6210102", "0.6205395", "0.6204255", "0.6203204", "0.61770934", "0.6157957", "0.6151543", "0.6150315", "0.6150315", "0.6133604", "0.61299527", "0.61247367", "0.6121144", "0.61190367", "0.61190367", "0.6117347" ]
0.7296541
0
Execute a search and replace on all blocks in this chunk. Named after the ubiquitous Unix tool. Does a semantic s/search/replace/g on this chunk's blocks.
def sed(self, search, replace): for section in self.sections: for i, block in enumerate(section.blocks): if block == search: section.blocks[i] = replace self.all_damaged = True self.dirty = True
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def change_content(options):\n call_command('''grep -r -l -- '%(patrn)s' . | tr '\\\\n' '\\\\0' | xargs -0 sed -i \"s/%(patrn)s/%(repl)s/g\"''', options)", "def run(self, edit):\n\t\t\n\t\tfor region in self.view.find_all(\"“\"):\n\t\t\tself.view.replace(edit, region, \"\\\"\")\n\n\t\tfor region in self.view.find_all(\"”\"):\n\t\t\tself.view.replace(edit, region, \"\\\"\")\n\t\t\n\t\tfor region in self.view.find_all(\"‘\"):\n\t\t\tself.view.replace(edit, region, \"\\'\")\n\n\t\tfor region in self.view.find_all(\"’\"):\n\t\t\tself.view.replace(edit, region, \"\\'\")\n\n\t\t\"\"\" \n\t\tAdd markers after \"// comment\" comments, so that we can add back in linebreaks later.\n\t\tfor region in self.view.find_all(r';.*\\/\\/.*\\n'):\n\t\t\tself.view.insert(edit, region, \"jjlinebreak\")\n\n\t\tRemove all linebreaks not preceeded with semicolon\n\t\tfor region in self.view.find_all(r'\\n(?<!;\\n)'):\n\t\t\tself.view.replace(edit, region, \"\")\n\n\t\tAdd linebreaks where we have \"jjlinebreak\"\n\t\tfor region in self.view.find_all(\"jjlinebreak\"):\n\t\t\tself.view.replace(edit, region, \"\\n\")\n\t\t\"\"\"", "def find_replace_all(self):\n old_term = self.text_find.get()\n new_term = self.text_replace.get()\n while True:\n idx = '1.0'\n idx = self.text.search(old_term, idx, nocase=1,\n stopindex=tk.END)\n lastidx = '% s+% dc' % (idx, len(old_term))\n if not idx:\n break\n self.text.delete(idx, lastidx)\n self.text.insert(idx, new_term)", "def run(self, content):\n parts = []\n offset = 0\n for match in self.regexp.finditer(content):\n parts.append(content[offset:match.start(0)])\n parts.append(self.replace(match))\n offset = match.end(0)\n parts.append(content[offset:])\n return ''.join(parts)", "def replace_all(self):\n i = 0\n while self.textedit.textCursor().hasSelection():\n self.textedit.textCursor().insertText(self.ui.textToReplace.text())\n self.find()\n i += 1\n self.show_message(unicode(self.tr(\"Replaced {0} occurrence(s)\")).format(i))", "def run(self, text):\r\n for i in range(self.markdown.htmlStash.html_counter):\r\n html, safe = self.markdown.htmlStash.rawHtmlBlocks[i]\r\n if self.markdown.safeMode and not safe:\r\n if str(self.markdown.safeMode).lower() == 'escape':\r\n html = self.escape(html)\r\n elif str(self.markdown.safeMode).lower() == 'remove':\r\n html = ''\r\n else:\r\n html = self.markdown.html_replacement_text\r\n if self.isblocklevel(html) and (safe or not self.markdown.safeMode):\r\n text = text.replace(\"<p>%s</p>\" % \r\n (self.markdown.htmlStash.get_placeholder(i)),\r\n html + \"\\n\")\r\n text = text.replace(self.markdown.htmlStash.get_placeholder(i), \r\n html)\r\n return text", "def findAndReplace(self):\n\n # Prompts user for find regex and replace text\n findText, replaceText = Model.FindAndReplaceDialogBox.getResults(self)\n\n # Gets the current selection from the current tab\n selectionModel = self.getCurrentView().selectionModel()\n\n # Pass to panda\n self.getCurrentPanda().findAndReplace(findText, replaceText, selectionModel)", "def run(self):\n if self.multi != None:\n print(\"Running multi-replacement script.\")\n self.multi_repl(self.multi)\n elif self.dirkey != None:\n print(\"Running individual replacements.\")\n self.ind_repl(self.dirkey, self.filekey, self.txtkey)\n else:\n print(\"Guide file is broken. Please check.\")", "def do_repl(self, args):\n args = args.split()\n if len(args) != 2:\n print 'usage: scan pat'\n return\n pat = args[0]\n repl = args[1]\n self.regexprutils.replace(pat, repl)", "def _do_fenced_code_blocks(self, text):\r\n return self._fenced_code_block_re.sub(self._fenced_code_block_sub, text)", "def _apply_patch_odoo(self):\n paths = [os.path.join('openerp', 'tools', 'translate.py'),\n os.path.join('odoo', 'tools', 'translate.py')]\n for path in paths:\n s_file = os.path.join(self._server_path, path)\n if not os.path.isfile(s_file):\n continue\n cmd = [\"sed\", \"-i\", \"-e\",\n r\"s/translation'] = src/translation'] = ''/g\",\n s_file]\n print \" \".join(cmd)\n subprocess.call(cmd)", "def hxlreplace():\n run_script(hxlreplace_main)", "def executeAll(lines):", "def update_uses(cfg):\r\n for block in cfg._blockmap.values():\r\n for i in range(len(block.body)):\r\n instr = block.body[i]\r\n a1,a2 = instr.arg1, instr.arg2\r\n t1,t2 = get_root(a1),get_root(a2)\r\n if a1 != None and type(a1) == str and a1[0] == \"%\" and a1[0] != '%_':\r\n for j in range(i, -1, -1):\r\n if block.body[j].dest != None and get_root(block.body[j].dest) == t1:\r\n instr.arg1 = block.body[j].dest\r\n if a2 != None and type(a2) == str and a2[0] == \"%\" and a2[0] != '%_':\r\n for k in range(i, -1, -1):\r\n if block.body[k].dest != None and get_root(block.body[k].dest) == t2:\r\n instr.arg2 = block.body[k].dest", "def _replace_keyword(self, keyword, replacement, count=0):\n\n def replace_with_inline(e, doc):\n if type(e) == Str and e.text == keyword:\n doc.num_matches += 1\n if not count or doc.num_matches <= count:\n return replacement\n\n def replace_with_block(e, doc):\n '''\n It's difficult to replace a keyword with an entire Block element.\n\n This is because the keyword is of type Str (an Inline) and the parent\n object of a Str can only contain Inlines and not Blocks\n (e.g. Para can contain Inlines, not Divs)\n\n Implications:\n\n 1) If the Str that contains the keyword is inside another\n Inline instead of a Block (e.g. Div -> Emph -> Str)\n then we have to do a trick:\n when .walk() touches an Emph that contains Str(keyword),\n it replaces the Emph with Str(keyword).\n\n 2) If the element that contains the Str(keyword) has multiple children,\n then we are in a bind as replacing it will destroy information.\n Thus, we can't do do it\n\n 3) If the element that contains the Str(keyword) does so in a DictContainer\n instead of a ListContainer, then we cannot retrieve the \"first and only\n element\" easily, so we also abort (happens with metadata elements).\n '''\n\n # Here we can check that e.content is ListContainer (i.e. not DictContainer)\n # or check that e is not a Metavalue (\"not isinstance(e, MetaValue)\")\n\n if hasattr(e, 'content') and isinstance(e.content, ListContainer) and len(e.content) == 1:\n ee = e.content[0]\n if type(ee) == Str and ee.text == keyword:\n if isinstance(e, Block):\n doc.num_matches += 1\n if not count or doc.num_matches <= count:\n return replacement\n elif isinstance(e, Inline):\n return Str(keyword)\n else:\n pass # not implemented\n\n doc = self.doc\n if doc is None:\n raise Exception('No root document')\n doc.num_matches = 0\n if isinstance(replacement, Inline):\n return self.walk(replace_with_inline, doc)\n elif isinstance(replacement, Block):\n return self.walk(replace_with_block, doc)\n else:\n raise NotImplementedError(type(replacement))", "def run(self, edit, text):\n\n self.view.replace(edit, sublime.Region(0, self.view.size()), text)", "def replace_code(self, pattern, repl):\n regex = re.compile(pattern, re.DOTALL)\n for cell in self.content.cells:\n if cell.cell_type == \"code\" and regex.findall(cell.source):\n cell.source = regex.sub(repl, cell.source)\n print(f\"- code removed from {self.filename}\")", "def extend_template(base, text):\n\n block_search = re.compile(\"{{(block) (\\w+)}}\")\n has_blocks = re.search(block_search, base)\n if not has_blocks:\n return base\n else:\n find_content = re.compile(\"({{block \"+has_blocks.group(2)+\"}})(.*?)({{endblock}})\", re.DOTALL)\n \n content = re.search(find_content, text).group(2)\n base = re.sub(\"{{block \"+has_blocks.group(2)+\"}}\", content, base)\n return extend_template(base, text)", "def run_search(self, links):\n for s in links:\n self._run_command(\" s \\\"{}\\\" \\n\".format(s))", "def run(self, edit):\r\n self.view.erase_regions('json_errors')\r\n for region in self.view.sel():\r\n\r\n selected_entire_file = False\r\n\r\n # If no selection, use the entire file as the selection\r\n if region.empty() and s.get(\"use_entire_file_if_no_selection\", True):\r\n selection = sublime.Region(0, self.view.size())\r\n selected_entire_file = True\r\n else:\r\n selection = region\r\n\r\n try:\r\n obj = json.loads(self.view.substr(selection),\r\n object_pairs_hook=OrderedDict,\r\n parse_float=decimal.Decimal)\r\n\r\n self.view.replace(edit, selection, json.dumps(obj,\r\n ensure_ascii=s.get(\"ensure_ascii\", False),\r\n sort_keys=s.get(\"sort_keys\", False),\r\n separators=(',', ':'),\r\n use_decimal=True))\r\n\r\n if selected_entire_file:\r\n self.change_syntax()\r\n\r\n except Exception:\r\n exc = sys.exc_info()[1]\r\n self.highlight_error(str(exc))\r\n sublime.status_message(str(exc))", "def run(self, **kwargs: Any) -> None:\n get_nodes = (\n self.document.findall # docutils 0.18+\n if hasattr(self.document, \"findall\")\n else self.document.traverse # docutils <= 0.17.x\n )\n for node in list(get_nodes(nodes.table)):\n new_node = nodes.container(classes=[\"table-wrapper\"])\n new_node.update_all_atts(node)\n node.parent.replace(node, new_node)\n new_node.append(node)\n\n for node in list(get_nodes(nodes.math_block)):\n new_node = nodes.container(classes=[\"math-wrapper\"])\n new_node.update_all_atts(node)\n node.parent.replace(node, new_node)\n new_node.append(node)", "def __PerformSubstitutions(self, text):\n\n for substitution in self.substitutions:\n pattern, replacement = self.SplitValue(substitution)\n text = re.compile(pattern,re.M).sub(replacement, text)\n return text", "def run(self, edit):\n\n obj = ConvertPythonSrc2Obj().convert(self.view.substr(sublime.Region(0, self.view.size())))[0]\n\n if obj is None:\n return\n if not obj.get('name'):\n error('A valid name must be provided!')\n elif obj.get('scope') is None and obj.get('find') is None:\n error('A valid find pattern or scope must be provided!')\n elif not self.is_existing_name(obj['name']):\n try:\n if obj.get('find') is not None:\n if obj.get('selection_inputs', False):\n pass\n elif obj.get('literal', False):\n flags = 0\n pattern = re.escape(obj['find'])\n if obj.get('literal_ignorecase', False):\n flags = re.I\n re.compile(pattern, flags)\n else:\n extend = sublime.load_settings(\n 'reg_replace.sublime-settings'\n ).get('extended_back_references', False)\n if extend:\n bre.compile_search(obj['find'])\n else:\n re.compile(obj['find'])\n settings = sublime.load_settings('reg_replace_rules.sublime-settings')\n rules = settings.get('replacements', {})\n rules[obj['name']] = obj\n settings.set('replacements', rules)\n sublime.save_settings('reg_replace_rules.sublime-settings')\n self.view.settings().set('regreplace.name', obj['name'])\n except Exception as e:\n error('Regex compile failed!\\n\\n%s' % str(e))", "def run(jsglobals, target):\n\n with open(target, \"rt\") as f:\n text = f.read()\n text = process_text(text, jsglobals)\n\n in_place_replace(target, text)\n\n return 0", "def substitute(files: str, pattern: str, replacement: str):\n with fileinput.input(\n files=glob.glob(files, recursive=True), inplace=True\n ) as file:\n for line in file:\n print(re.sub(pattern, replacement, line), end='')", "def __replaceFiles(self):\n self.ui.showReplaceFilesDialog(self.textForFind())", "def run(self, lines):\n self.lines = [line.strip() for line in lines]\n for index, line in enumerate(lines, start=1):\n self.truncate_line(line)\n line = self.remove_links(line)\n if line.startswith(CODE_BLOCK_DELIMITER):\n self.in_code_block = not self.in_code_block\n self.check_line(index, line)", "def run(self, edit):\n\n obj, test = ConvertPythonSrc2Obj().convert(self.view.substr(sublime.Region(0, self.view.size())))\n\n # Something went wrong.\n if test is None or obj is None:\n return\n\n # Ensure test command is valid.\n if not self.process_test_cmd(test):\n return\n\n # Copy all regex rules that are to be included in the test sequence\n test_rules = {}\n rules = sublime.load_settings('reg_replace_rules.sublime-settings').get('replacements', {})\n for x in test['replacements']:\n if x in rules:\n test_rules[x] = rules[x]\n\n # Ensure the bare minimum items are in the current test rule\n # and ensure the regex (if any) compiles.\n # If all is well, execute the command.\n if not obj.get('name'):\n error('A valid name must be provided!')\n elif obj.get('scope') is None and obj.get('find') is None:\n error('A valid find pattern or scope must be provided!')\n else:\n try:\n if obj.get('find') is not None:\n if obj.get('selection_inputs', False):\n pass\n elif obj.get('literal', False):\n flags = 0\n pattern = re.escape(obj['find'])\n if obj.get('literal_ignorecase', False):\n flags = re.I\n re.compile(pattern, flags)\n else:\n extend = sublime.load_settings(\n 'reg_replace.sublime-settings'\n ).get('extended_back_references', False)\n if extend:\n bre.compile_search(obj['find'])\n else:\n re.compile(obj['find'])\n test_rules[obj['name']] = obj\n settings = sublime.load_settings('reg_replace_test.sublime-settings')\n settings.set('format', '3.2')\n settings.set('replacements', test_rules)\n window = sublime.active_window()\n if window is not None:\n view = window.active_view()\n if view is not None:\n test[\"use_test_buffer\"] = True\n view.run_command('reg_replace', test)\n except Exception as e:\n error('Regex compile failed!\\n\\n%s' % str(e))", "def find_values_to_replace(self):\n regexp = re.compile(self.raw_pattern)\n self.to_replace = regexp.findall(self.raw_sql)", "def run(self, lines):\n\n text = \"\\n\".join(lines)\n while 1:\n m = self.FENCED_BLOCK_RE.search(text)\n if m:\n lang = \"\"\n if m.group(\"lang\"):\n lang = m.group(\"lang\")\n html = highlight(\n m.group(\"code\"), self.config, self.markdown.tab_length, lang=lang\n )\n placeholder = self.markdown.htmlStash.store(html)\n text = \"%s\\n%s\\n%s\" % (text[: m.start()], placeholder, text[m.end():])\n else:\n break\n return text.split(\"\\n\")" ]
[ "0.5799908", "0.56883264", "0.55834264", "0.5576345", "0.53871274", "0.5384175", "0.53761226", "0.5295978", "0.5126247", "0.5111896", "0.4971735", "0.4959113", "0.49545926", "0.4946214", "0.49248308", "0.49042535", "0.48981127", "0.48809478", "0.48734272", "0.48707765", "0.48387244", "0.48379388", "0.48107985", "0.48074698", "0.48066556", "0.47950026", "0.4793637", "0.4787225", "0.47863463", "0.47779602" ]
0.72197974
0
tests the init method of MicrophoneToText
def test_init(self): mic = mi.MicrophoneToText() self.assertTrue(mic.switch) self.assertIsNotNone(mic.resultkeywords) self.assertIsNotNone(mic.result) self.assertIsNotNone(mic.keywordsshort) # tests also chunk and maxbuffer self.assertIsNotNone(mic.q) self.assertIsNotNone(mic.keywords) self.assertIsNotNone(mic.resultkeywords) self.assertIsNotNone(mic.speech_to_text) # tests also audio, format, channel and rate self.assertIsNotNone(mic.stream) self.assertIsNotNone(mic.audio_source)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(self, src='mic', debug = False):\n self.recog_obj = speech_recognition.Recognizer() #setting the recognizer object (a instance of Recognizer module of speech_recognition package)\n self.mic_obj = speech_recognition.Microphone() #setting the microphone object (a instance of Microphone module of speech_recognition package)\n self.debug = debug #debug option\n self.src = src #source option\n self.tcr = TextConvRules() #instance Text conversion module that can later used to define conversion rule in the text such as punctuation", "def setUp(self):\n\n audio = open(SAMPLE_AUDIO, 'rb')\n self.converter = Converter(FileStorage(audio), 'en')\n audio.close()", "def setUp(self):\n super(test_phonenumbers,self).setUp()\n self.pn = bss_phonumbers_fields.phonenumber()\n self.samples = [\n '9545551234,US',\n '0411234567,CH',\n '041123456é,CH',\n '',\n '11111111111111111',\n ]", "def initAudio(self):\n\t\t# Initialize pitch detection\n\t\tself.listener = PitchDetect(channels=1)\n\t\tself.listener.listen()\n\t\tself.recording = False\n\t\tself.paused = False", "def test_init(self):\n self._api.Init(lang=\"eng+osd\")\n self.assertEqual(self._api.GetInitLanguagesAsString(), \"eng+osd\")\n self._api.Init(lang=\"eng\")\n self.assertEqual(self._api.GetInitLanguagesAsString(), \"eng\")\n self._api.Init(oem=tesserocr.OEM.TESSERACT_ONLY)\n self.assertEqual(self._api.oem(), tesserocr.OEM.TESSERACT_ONLY)", "def test_init(self):\n orig = \"\"\n r = self.SequenceClass(orig)\n self.assertEqual(str(r), orig)\n\n orig = \"TCAGGA\"\n r = self.SequenceClass(orig)\n self.assertEqual(r._data, array([6, 62]))\n self.assertEqual(str(r), orig)", "def make_silence_phones_txt(self):\n raise NotImplementedError", "def test_analyze_text(self):\n\n mic = mi.MicrophoneToText()\n\n with open('../examples/result.txt', 'w', encoding='utf-8') as f:\n f.write('x transcript\": straße lautet aarbergerstraße }x\\n')\n f.write('x transcript\": ort lautet testort }x\\n')\n f.write('x transcript\": einkommen lautet testeinkommen }x\\n')\n f.write('x transcript\": kaufpreis lautet testkaufpreis }x\\n')\n f.write('x transcript\": eigenkapital lautet testkapital }x\\n')\n\n #mic.threader()\n\n mic.switchoff()\n print(mic.keywords.values())\n with open('../examples/result.txt', 'r', encoding='utf-8') as f:\n filestring = f.read()\n print(filestring)\n self.assertTrue(' straße lautet aarbergerstraße ' in filestring)", "def test_device_unicode(self):\n with captured_output() as (out, err):\n devices = AudioUtilities.GetAllDevices()\n print(\"devices: %s\" % devices)\n for device in devices:\n print(\"device: %s\" % device)", "def __init__(self, port):\n self._ser = Serial(port, 9600, 8, 'N', 1, timeout=1)\n self._ser.write(b\"\\x03\")\n time.sleep(0.1)\n self._ser.write(b\" \")\n time.sleep(0.1)\n init_response = self._ser.read(100)\n if init_response != b\" SMC24 v2.12\\r\\n\":\n click.echo(\"Monochromator not initialized correctly\", err=True)\n sys.exit(-1)\n self._pos_regex = re.compile(\"^Z\\\\s+(-?\\\\d+).*$\")", "def test_init(self):\n orig = \"\"\n r = self.SequenceClass(orig)\n self.assertEqual(str(r), orig)\n\n orig = \"TCAGGA\"\n r = self.SequenceClass(orig)\n self.assertEqual(r._data, array([0, 1, 2, 3, 3, 2]))\n self.assertEqual(str(r), orig)", "def setUp(self):\n\n subtitles_binary = open(SAMPLE_SUBTITLES_PATH, 'rb')\n subtitles_file = FileStorage(subtitles_binary)\n self.sp = SubtitleParser(subtitles_file, SAMPLE_SUBTITLES_LANGUAGE, SAMPLE_AUDIO_LANGUAGE)\n subtitles_binary.close()", "def initInfo(self):\n\t\tinfoCx, infoCy = self.width/2, self.height*0.85\n\t\tself.pitchText = self.createText( infoCx, infoCy, \n\t\t\t\t\t\t\t\t\t\t\tNone, self.pitchTextFont)\n\t\tself.pitchText.text = \"Currently: No pitch detected.\"\n\n\t\tself.title = self.createText( infoCx, self.height*0.15, None, self.titleFont)\n\t\tself.title.text = \"Tuner\"\n\t\tself.subtitle = self.createText(infoCx, self.height*0.2, None, self.labelFont)\n\t\tself.subtitle.text = \"(equal temperament)\"", "def test_init(self):\n orig = \"TC---\"\n seq = self.SequenceClass(orig)\n self.assertEqual(str(seq), orig)", "def init_recording(self):\n self.statusBar().showMessage('Initialising...')\n self.streams = resolve_stream('type', 'EEG')\n self.inlet = StreamInlet(self.streams[0])\n self.timeObj = []\n self.sampleObj = []", "def __init__(self):\r\n\t\tself.introducer()\r\n\t\tif self.code_mode == \"1\":\r\n\t\t\tif self.input_mode == \"1\":\r\n\t\t\t\tself.encrypt_message()\r\n\t\t\telse:\r\n\t\t\t\tself.encrypt_text_file()\r\n\t\t\t\t#print(\"work in progress\")\r\n\t\telif self.code_mode == \"2\":\r\n\t\t\tif self.input_mode == \"1\":\r\n\t\t\t\tself.decrypt_message()\r\n\t\t\telse:\r\n\t\t\t\tself.decrypt_text_file()\r\n\t\telse:\r\n\t\t\tif self.input_mode == \"1\":\r\n\t\t\t\tself.hack_message()\r\n\t\t\telse:\r\n\t\t\t\tself.hack_text_file()", "def __init__(self):\n self._eng = pyttsx.init()\n self._eng.connect(\"started-utterance\", self._onStart)\n self._eng.connect(\"started-word\", self._onWord)\n self._eng.connect(\"finished-utterance\", self._onEnd)", "def __init__(self):\n super().__init__(interface.Audio, DEFAULT_PRIORITIES)", "def __init__(self):\n super().__init__()\n self.printTag = 'SAMPLER MONTECARLO'\n self.samplingType = None\n self.limit = None", "def init(self):\n self.AOMBoxConnection = pyArdDAC.ARD_DAC(HOST=self.AOMBox_IP, PORT=8888, DEBUG=False)#connects to arduino in High frequency Na AOM box\n #channel number should be defined in subclass\n self.INTEGER_MIN = 0\n self.INTEGER_MAX = 65535\n self.VOLTAGE_MIN = 0.0\n self.VOLTAGE_MAX = 5.0\n self.initialised=True\n return \"%s init successful\" % self.hardwareActionName", "def __init__(self, text):\n\n self.text = text", "def speech_recognize_from_microphone():\n speech_config = speechsdk.SpeechConfig(subscription=speech_key, region=service_region)\n speech_config.request_word_level_timestamps()\n speech_config.output_format = speechsdk.OutputFormat(1)\n\n speech_recognizer = speechsdk.SpeechRecognizer(speech_config=speech_config)\n\n done = False\n\n def stop_cb(evt):\n \"\"\"callback that signals to stop continuous recognition upon receiving an event `evt`\"\"\"\n print('CLOSING on {}'.format(evt))\n nonlocal done\n done = True\n\n def recognized_cb(evt):\n \"\"\"callback for recognized event\"\"\"\n if evt.result.reason == speechsdk.ResultReason.RecognizedSpeech:\n #print('RECOGNIZED: {}'.format(evt.result.text))\n #print('All params: {}'.format(evt.result))\n #print(evt.result.json)\n response = json.loads(evt.result.json)\n #print('All params: {}'.format(response))\n Text = response[\"DisplayText\"]\n duration = 0;\n for word in response[\"NBest\"][0][\"Words\"]:\n duration += word[\"Duration\"]\n duration = duration / 10000000\n print(\"dur :\"+str(duration)+\" text: \" + Text)\n\n # Connect callbacks to the events fired by the speech recognizer\n speech_recognizer.recognized.connect(recognized_cb)\n speech_recognizer.session_started.connect(lambda evt: print('SESSION STARTED: {}'.format(evt)))\n speech_recognizer.session_stopped.connect(lambda evt: print('SESSION STOPPED {}'.format(evt)))\n speech_recognizer.canceled.connect(lambda evt: print('CANCELED {}'.format(evt)))\n # stop continuous recognition on either session stopped or canceled events\n speech_recognizer.session_stopped.connect(stop_cb)\n speech_recognizer.canceled.connect(stop_cb)\n\n # Start keyword recognition\n speech_recognizer.start_continuous_recognition()\n\n while not done:\n time.sleep(.5)\n\n speech_recognizer.stop_continuous_recognition()", "def speech_recognizer_function(self, text_widget):\r\n label_listening = Label(self.root, text=\"listening to input...\",\r\n font=self.text_font, bg=self.bg_color)\r\n label_listening.pack(pady=10)\r\n recognizer = speech_recognition.Recognizer()\r\n microphone = speech_recognition.Microphone()\r\n with microphone as source:\r\n recognizer.adjust_for_ambient_noise(source)\r\n audio = recognizer.listen(source)\r\n try:\r\n text = recognizer.recognize_google(audio)\r\n text += \" \"\r\n except:\r\n text = \"\"\r\n text_widget.insert(END, text)\r\n label_listening.destroy()\r\n self.thread_speech_is_running = False", "def test_convert_audio_to_text(self):\n\n text = self.converter.convert_audio_to_text(START, END, [WORD], lambda: False)\n text = text.strip()\n self.assertEqual(text, WORD)", "def initFormat(self):\n pass", "def test_initialization(self):\r\n self.assertEqual(str(self.p), '0% [....................]')", "def _init_display(self):\n raise NotImplementedError", "def __init__(self, text):\n self.text = text", "def __init__(self, text):\n self.text = text", "def __init__(self, data=None, texOutput=None):\n self.data = data\n self.texOutput = texOutput" ]
[ "0.65088475", "0.6317328", "0.6297638", "0.5739098", "0.5717202", "0.57086253", "0.5613694", "0.5601155", "0.55943924", "0.55922794", "0.5561357", "0.55337906", "0.55214226", "0.5499641", "0.5494486", "0.5462692", "0.5460193", "0.5424771", "0.5377457", "0.5363363", "0.5362899", "0.53464365", "0.532879", "0.5327954", "0.532167", "0.5319505", "0.5316305", "0.5313697", "0.5313697", "0.52660984" ]
0.80085844
0
tests the switchoff method of MicrophoneToText
def test_switchoff(self): mic = mi.MicrophoneToText() mic.switchoff() with self.assertRaises(OSError): mic.stream.is_active() self.assertFalse(mic.switch) self.assertFalse(mic.audio_source.is_recording) self.assertTrue(mic.result.closed)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def off(config: dict):\n switch_device(config, config[\"inching\"], \"off\")", "def _turn_off(self):\n self._turn_display('OFF')", "def turn_off(self):\n print(\"Turning the lights off\")\n self.led.all_off()\n self.client.publish(STATE_TOPIC, OFF) #publish", "def rtsOff():\n pass", "def turn_off(self, **kwargs: Any) -> None:\n if (\n DPCODE_LIGHT in self.tuya_device.status\n and DPCODE_SWITCH not in self.tuya_device.status\n ):\n commands = [{\"code\": DPCODE_LIGHT, \"value\": False}]\n else:\n commands = [{\"code\": DPCODE_SWITCH, \"value\": False}]\n self._send_command(commands)", "def off_switch(self):\n self._switch_callback = None", "def test_turn_off(power_supply):\n power_supply.Init()\n assert power_supply.state() != tango.DevState.OFF\n power_supply.turn_off()\n assert power_supply.state() == tango.DevState.OFF", "def turn_off(self):\n self._state = False\n if(self._device['type'] == '_DT-PLUG' or self._device['type'] == '_THIMR'):\n self._send_cmd(self._device, 'cmd=ctrl&devices={[' + self._device[\"sid\"] + ']}&op={\"cmd\":5,\"op\":0 }', 5)\n if(self._device['type'] == '_REALY2' or self._device['type'] == '_REALY4'): \n self._send_cmd(self._device, 'cmd=ctrl&devices={[' + self._device[\"sid\"] + ']}&op={\"cmd\":5,\"'+ self._data_key +'\":0 }', 5)", "def switch_off(self):\n raise DeviceException(DeviceException.FEATURE_NOT_IMPLEMENTED)", "def turn_output_off(self):\n self.instr.write('RF0')\n time.sleep(self.sleep_time)", "def turnOff(self):\n self.write(\"E;O0;E;\")\n return self.output()", "def turn_off(self, **kwargs):\n self.smartplug.turn_off()", "def turnLightingSystemOff():\n dislin.light('OFF')", "def OnStopPress(self, event):\n\t\tself.onOffText.SetLabel('Off')\n\t\tself.isBaselineRunning = False\n\t\tself.hasBaselineEnded = True", "def off(update: Update, context: CallbackContext) -> None:\n if __sauna.control.getPortValue(\"Light Sensor\") == 1:\n __sauna.control.togglePortValue(\"Light Switch\")\n __sauna.control.resetPortValue(\"Light Switch\")\n\n if __sauna.control.getPortValue(\"Oven Sensor\") == 1:\n __sauna.control.togglePortValue(\"Oven Switch\")\n __sauna.control.resetPortValue(\"Oven Switch\")\n\n if __sauna.control.getPortValue(\"Power Sensor\") == 1:\n __sauna.control.togglePortValue(\"Power Switch\")\n __sauna.control.resetPortValue(\"Power Switch\")\n\n __login.logout_user()\n\n str_list = []\n str_list.append('Sauna power is switched OFF.\\n')\n str_list.append('Sauna oven is switched OFF.\\n')\n str_list.append('Sauna light is switched OFF.\\n')\n str_list.append('You are logged OUT.\\n')\n temperature = __sauna.control.getPortValue(\"Temperature Sensor\")\n str_list.append(\"Temp is \" + str(temperature) + \" C.\\n\")\n update.message.reply_text(''.join(str_list))", "def turn_off(self):\n self.write(\"OUT0\\n\")", "def sm_output_off(self):\n self.sm.output_off()", "def turn_off(self, **kwargs: Any) -> None:\n self._device.power_on = False\n _LOGGER.debug(\"Turn off light %s\", self._device.ip)", "def testToggleStopRecord(self):\n self.mgr.captureMode = CAPTURE_MODE_VIDEO\n self.mgr.isRecording = True\n self.mgr.handleRecordCommand( CAPTURE_MODE_VIDEO, RECORD_COMMAND_TOGGLE )\n self.mgr.sendGoProCommand.assert_called_with(mavutil.mavlink.GOPRO_COMMAND_SHUTTER, (0, 0, 0, 0))", "def _off_received(self):\n self._call_subscribers(on_level=0x00)", "def turn_off(self, **kwargs: Any) -> None:\n with self._wemo_call_wrapper(\"turn off\"):\n self.wemo.off()", "def turn_off_modem(self):\n if self.is_power_on():\n self._logger.debug(\"Switching modem off...\")\n self.set_pin()\n GPIO.cleanup()\n # give modem some time to log out\n time.sleep(5)\n else:\n self._logger.debug(\"GSM modem is already OFF...\")", "def lightoff(update: Update, context: CallbackContext) -> None:\n if __sauna.control.getPortValue(\"Light Sensor\") == 1:\n # TODO Mit Stromstossrelais ist dieser Code richtig\n # __sauna.control.togglePortValue(\"Light Switch\")\n update.message.reply_text(\"Light is off\")\n else:\n update.message.reply_text(\"Light was already off\")\n\n __sauna.control.resetPortValue(\"Light Switch\")\n val = __sauna.control.getPortValue(\"Light Switch\")\n update.message.reply_text(\"Light Switch := \" + str(val))", "def power_off(self):\n return self.inst.write(':OUTP OFF')", "def turn_off(self):\n GPIO.output(self.gpio, False) # turn off light", "async def async_turn_off(self, **kwargs: Any) -> None:\n self._device.light_on = False", "def turn_off(self) -> None:\n self._media_title = None\n self._state = self._player.turn_off()", "def off(self):", "def turn_display_off(turn_off):\n if turn_off:\n send_command(0xAE)\n else:\n send_command(0xAF)", "def pswitchoff(chan) :\n s.phaseSwitching(False, chan)" ]
[ "0.65265304", "0.64437073", "0.63737315", "0.62635034", "0.61954254", "0.6169329", "0.61235493", "0.6122673", "0.60922056", "0.60602874", "0.60577524", "0.6016153", "0.599097", "0.5986121", "0.5974701", "0.5949988", "0.59367454", "0.5933775", "0.5930321", "0.5915988", "0.5910866", "0.5888893", "0.5831824", "0.58147424", "0.58135575", "0.580999", "0.5773308", "0.57730174", "0.5763803", "0.5754266" ]
0.7986152
0
tests the analyze_text method of MicrophoneToText
def test_analyze_text(self): mic = mi.MicrophoneToText() with open('../examples/result.txt', 'w', encoding='utf-8') as f: f.write('x transcript": straße lautet aarbergerstraße }x\n') f.write('x transcript": ort lautet testort }x\n') f.write('x transcript": einkommen lautet testeinkommen }x\n') f.write('x transcript": kaufpreis lautet testkaufpreis }x\n') f.write('x transcript": eigenkapital lautet testkapital }x\n') #mic.threader() mic.switchoff() print(mic.keywords.values()) with open('../examples/result.txt', 'r', encoding='utf-8') as f: filestring = f.read() print(filestring) self.assertTrue(' straße lautet aarbergerstraße ' in filestring)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_convert_audio_to_text(self):\n\n text = self.converter.convert_audio_to_text(START, END, [WORD], lambda: False)\n text = text.strip()\n self.assertEqual(text, WORD)", "def speech_recognizer_function(self, text_widget):\r\n label_listening = Label(self.root, text=\"listening to input...\",\r\n font=self.text_font, bg=self.bg_color)\r\n label_listening.pack(pady=10)\r\n recognizer = speech_recognition.Recognizer()\r\n microphone = speech_recognition.Microphone()\r\n with microphone as source:\r\n recognizer.adjust_for_ambient_noise(source)\r\n audio = recognizer.listen(source)\r\n try:\r\n text = recognizer.recognize_google(audio)\r\n text += \" \"\r\n except:\r\n text = \"\"\r\n text_widget.insert(END, text)\r\n label_listening.destroy()\r\n self.thread_speech_is_running = False", "def speech_recognize_from_microphone():\n speech_config = speechsdk.SpeechConfig(subscription=speech_key, region=service_region)\n speech_config.request_word_level_timestamps()\n speech_config.output_format = speechsdk.OutputFormat(1)\n\n speech_recognizer = speechsdk.SpeechRecognizer(speech_config=speech_config)\n\n done = False\n\n def stop_cb(evt):\n \"\"\"callback that signals to stop continuous recognition upon receiving an event `evt`\"\"\"\n print('CLOSING on {}'.format(evt))\n nonlocal done\n done = True\n\n def recognized_cb(evt):\n \"\"\"callback for recognized event\"\"\"\n if evt.result.reason == speechsdk.ResultReason.RecognizedSpeech:\n #print('RECOGNIZED: {}'.format(evt.result.text))\n #print('All params: {}'.format(evt.result))\n #print(evt.result.json)\n response = json.loads(evt.result.json)\n #print('All params: {}'.format(response))\n Text = response[\"DisplayText\"]\n duration = 0;\n for word in response[\"NBest\"][0][\"Words\"]:\n duration += word[\"Duration\"]\n duration = duration / 10000000\n print(\"dur :\"+str(duration)+\" text: \" + Text)\n\n # Connect callbacks to the events fired by the speech recognizer\n speech_recognizer.recognized.connect(recognized_cb)\n speech_recognizer.session_started.connect(lambda evt: print('SESSION STARTED: {}'.format(evt)))\n speech_recognizer.session_stopped.connect(lambda evt: print('SESSION STOPPED {}'.format(evt)))\n speech_recognizer.canceled.connect(lambda evt: print('CANCELED {}'.format(evt)))\n # stop continuous recognition on either session stopped or canceled events\n speech_recognizer.session_stopped.connect(stop_cb)\n speech_recognizer.canceled.connect(stop_cb)\n\n # Start keyword recognition\n speech_recognizer.start_continuous_recognition()\n\n while not done:\n time.sleep(.5)\n\n speech_recognizer.stop_continuous_recognition()", "def processText(self, text: str, filename: str) :\n execution_time = 0.\n\n directory = os.path.join(self.execution_time_dir, AUDIO_DIR, self.getTTS().getName())\n make_dir(directory)\n time_for_generating_audio_fpath = os.path.join(directory, filename + \".txt\")\n \n audio_fpath = self.getTTS().getAudioPath(\n text=text, audio_dir=self.audio_dir, filename=filename)\n \n if self.recompute or not os.path.exists(audio_fpath):\n # print(audio_fpath)\n start_time = time.time()\n self.getTTS().generateAudio(text=text, audio_fpath=audio_fpath)\n save_execution_time(fpath=time_for_generating_audio_fpath, execution_time=time.time() - start_time)\n \n ## add execution time for generating audio\n execution_time += get_execution_time(\n fpath=time_for_generating_audio_fpath) \n \n transcription_dir = os.path.join(self.transcription_dir, self.getTTS().getName())\n \n transcriptions = {}\n for asr in self.asrs :\n directory = os.path.join(\n self.execution_time_dir, TRANSCRIPTION_DIR, self.getTTS().getName(), asr.getName())\n make_dir(directory)\n time_for_recognizing_audio_fpath = os.path.join(\n directory, filename + \".txt\")\n\n if self.recompute :\n start_time = time.time()\n # TODO: \n # change recognize audio -> input audio instead of fpath\n # audio = asr.loadAudio(audio_fpath=audio_fpath)\n # transcription = asr.recognizeAudio(audio=audio)\n # asr.saveTranscription(transcription_fpath, transcription)\n transcription = asr.recognizeAudio(audio_fpath=audio_fpath)\n asr.setTranscription(transcription)\n asr.saveTranscription(transcription_dir=transcription_dir, filename=filename)\n save_execution_time(fpath=time_for_recognizing_audio_fpath, execution_time=time.time() - start_time)\n \n transcription = asr.loadTranscription(\n transcription_dir=transcription_dir, filename=filename)\n num_retry = 0\n while transcription == \"\" and num_retry < self.max_num_retry :\n start_time = time.time()\n asr.recognizeAudio(audio_fpath=audio_fpath)\n asr.saveTranscription(\n transcription_dir=transcription_dir, filename=filename)\n save_execution_time(\n fpath=time_for_recognizing_audio_fpath, execution_time=time.time() - start_time)\n transcription = asr.loadTranscription(\n transcription_dir=transcription_dir, filename=filename)\n\n if asr.getName() == \"wit\" :\n random_number = float(random.randint(9, 47))/10.\n time.sleep(random_number)\n\n num_retry += 1\n\n transcriptions[asr.getName()] = preprocess_text(transcription)\n\n ## add execution time for generating audio\n execution_time += get_execution_time(\n fpath=time_for_recognizing_audio_fpath) \n \n\n cases = self.caseDeterminer(text, transcriptions)\n # if sum(cases.values()) == 0 :\n # print(text)\n # print(transcriptions[\"wav2vec2\"])\n # print(cases)\n # print()\n \n for asr_name, case in cases.items() :\n self.saveCase(self.case_dir, self.getTTS().getName(), asr_name, filename, str(case))\n\n # print(f\"Execution time: {execution_time}\")\n return cases, execution_time", "def make_silence_phones_txt(self):\n raise NotImplementedError", "def test_init(self):\n mic = mi.MicrophoneToText()\n\n self.assertTrue(mic.switch)\n self.assertIsNotNone(mic.resultkeywords)\n self.assertIsNotNone(mic.result)\n self.assertIsNotNone(mic.keywordsshort)\n # tests also chunk and maxbuffer\n self.assertIsNotNone(mic.q)\n self.assertIsNotNone(mic.keywords)\n self.assertIsNotNone(mic.resultkeywords)\n self.assertIsNotNone(mic.speech_to_text)\n # tests also audio, format, channel and rate\n self.assertIsNotNone(mic.stream)\n self.assertIsNotNone(mic.audio_source)", "def tts(model, text):\n\tif USE_CUDA:\n\t\tmodel = model.cuda()\n\t\n\t# NOTE: dropout in the decoder should be activated for generalization!\n\t# model.decoder.eval()\n\tmodel.encoder.eval()\n\tmodel.postnet.eval()\n\n\tsequence = np.array(text_to_sequence(text))\n\tsequence = Variable(torch.from_numpy(sequence)).unsqueeze(0)\n\tif USE_CUDA:\n\t\tsequence = sequence.cuda()\n\n\t# Greedy decoding\n\tmel_outputs, linear_outputs, gate_outputs, alignments = model(sequence)\n\n\tlinear_output = linear_outputs[0].cpu().data.numpy()\n\tspectrogram = audio._denormalize(linear_output)\n\talignment = alignments[0].cpu().data.numpy()\n\n\t# Predicted audio signal\n\twaveform = audio.inv_spectrogram(linear_output.T)\n\n\treturn waveform, alignment, spectrogram", "def stats_text(test):\n\n stats_text_en(test) \n \n stats_text_cn(test)", "def audio2text(audio):\n r = sr.Recognizer()\n with sr.AudioFile(audio) as source:\n audio = r.listen(source)\n\n text = r.recognize_google(audio, language=\"de_DE.utf8\")\n print(f'fetched {file}: {text}')\n return text", "def analyse_text(custom_text, classifier, Resource, threshold, language='en'):\n return [(bytes(custom_text, 'utf-8'),\n _minimal_analysis(bytes(custom_text, 'utf-8'), classifier, Resource, threshold, language))]", "def analyze(self, text):\n\n # start from 0 for each Analyser variable\n self.positives = 0\n self.negatives = 0\n\n # precise self text value\n self.text = text\n\n # declare a tokenased word\n tokenizer = nltk.tokenize.TweetTokenizer()\n tokens = tokenizer.tokenize(text)\n\n # indicate the length of list tokens\n size = len(tokens)\n\n # all the word stuff to ckeck\n for word in tokens:\n\n # chaque mots est converti en mot sans majuscule\n word = str.lower(word)\n\n linespos = [line.rstrip('\\n') for line in open('positive-words.txt')]\n linesneg = [line.rstrip('\\n') for line in open('negative-words.txt')]\n\n # check for positive or negative or neutral words\n if word in linespos:\n self.positives += 1\n elif word in linesneg:\n self.negatives += 1\n else:\n continue\n\n # score calculculated and reurned\n score = self.positives - self.negatives\n\n return score", "def text_to_speech(entry):\n text = entry.get_text()\n if text:\n subprocess.call([\"milena_say\", text])", "def analyze_text (self, testing_string): \n self.length = len(self.testing_string)\n self.total_words = (self.testing_string).split()\n self.total_unique_words = set(self.total_words)\n\n self.total_characters = (int)(0)\n for ch in self.testing_string :\n if(ch.isspace() != True):\n self.total_characters = self.total_characters + 1 \n\n self.total_unique_characters = set(self.testing_string)\n \n Linguist.about_given_string[\"Length\"] = self.length\n Linguist.about_given_string[\"Total_words\"] = len(self.total_words)\n Linguist.about_given_string[\"Total_unique_words\"] = len(self.total_unique_words)\n Linguist.about_given_string[\"Total_characters\"] = self.total_characters\n Linguist.about_given_string[\"Total_unique_characters\"] = len(self.total_unique_characters)", "def test_analysis_screen_with_clean_text(client, text_to_analyse):\n path = reverse('text_analysis:analysis')\n response = client.get(path, {'fulltext': text_to_analyse})\n assert response.status_code == 200, 'Should return an `OK` status code'", "def detect_text(file_name):\n client = vision.ImageAnnotatorClient()\n with io.open(file_name, 'rb') as image_file:\n content = image_file.read()\n image = types.Image(content=content)\n response = client.text_detection(image=image)\n texts = response.text_annotations\n for text in texts:\n print('\\n\"{}\"'.format(text.description))\n print('{} {}'.format('Amount of values in returning list:', len(texts)))\n whole = values_builder(texts)\n # voltage = values_builder(texts, 'voltage')\n # current = values_builder(texts, 'current')\n # charge_amt = values_builder(texts, 'charge_amt')\n # return voltage, current, charge_amt\n return whole", "def test_analyze_text():\n # Create a lexer instance and analyze a text with some rules\n new_dict = lex._lexer(None, None).analyze_text(\n \"test\", [lex_bases.rule(\"JUMP_LINE\", r\"\\n\"), lex_bases.rule(\"TEST\", r\"test\")]\n )\n\n # Check if the returned values are correct\n assert (\n new_dict[\"token\"] == lex_bases.token(\"TEST\", \"test\")\n and new_dict[\"fit_with_a_rule\"]\n and new_dict[\"rule_that_matched\"] == lex_bases.rule(\"TEST\", r\"test\")\n )", "def getTextFromSpeak(self):\n raise NotImplementedError", "def test_text(self):\n result = self._do_output(o.TextOutput(o.Color.Never), self._demo_msgs)\n self.assertEqual(result,\n \"mock: mock.cmake(1): error: short text\\n\"\n \"mock: mock.cmake(2): warning: short text\\n\"\n \"mock: mock.cmake(3): notice: short text\\n\"\n \"mock: error: short text\\n\"\n \"mock: mock.cmake: error: short text\\n\"\n )", "def test_recognize(self):\n\n rec = mi.MyRecognizeCallback()\n rec.on_close()\n rec.on_connected()\n rec.on_data('\"final\": true truetestd')\n rec.on_error(\"testerror\")\n rec.on_hypothesis(\"testh\")\n rec.on_inactivity_timeout(\"testerrorinac\")\n rec.on_listening()\n rec.on_transcription(\"testtr\")\n self.assertIsNotNone(rec)", "def test_find_word(self):\n mic = mi.MicrophoneToText()\n\n teststring = 'x transcript\": ort lautet testort }x'\n\n word = mic.find_word(teststring)\n\n self.assertEqual(word, ' ort lautet testort ')", "def process_text(self, text, language):", "def process_speak_listen(device_index, mp3_filename, text, record, flag):\n\n mp3_filename = mp3_filename + \".mp3\"\n try:\n tts = gTTS(text=text, lang='en', slow=False)\n tts.save(mp3_filename)\n playsound(mp3_filename)\n os.remove(mp3_filename)\n\n if flag != 1:\n with sr.Microphone(device_index=device_index) as source:\n record.adjust_for_ambient_noise(source, duration=1)\n print(\"Speak:\")\n os.system(\"zenity --progress --width=400 --height=200 --title='Speak Now' \"\n \"--text='Speak Now......No need to click OK button' --no-cancel &\")\n try:\n audio = record.listen(source, timeout=5)\n text = record.recognize_google(audio)\n os.system(\"ps -ef|grep zenity|awk '{print $2}'|head -1|xargs kill -9\")\n print(text)\n except LookupError:\n os.system(\"ps -ef|grep zenity|awk '{print $2}'|head -1|xargs kill -9\")\n print(\"ERROR : LookupError - Could not able to understand\")\n text = None\n except speech_recognition.WaitTimeoutError:\n os.system(\"ps -ef|grep zenity|awk '{print $2}'|head -1|xargs kill -9\")\n print(\"ERROR : WaitTimeoutError - Could not able to listen anything for 5 seconds\")\n text = None\n except speech_recognition.UnknownValueError:\n os.system(\"ps -ef|grep zenity|awk '{print $2}'|head -1|xargs kill -9\")\n print(\"ERROR : UnknownValueError - Could not able to listen anything for 5 seconds\")\n text = None\n except gtts.tts.gTTSError:\n print(\"ERROR : Connection Error : No internet connection.\")\n exit_program()\n except PermissionError:\n print(\"ERROR : No permission\")\n exit_program()\n\n return text", "def m() -> str:\n r = sr.Recognizer()\n with sr.Microphone() as source:\n audio = r.adjust_for_ambient_noise(source)\n logger.info(\"Microphone Active! Waiting for prompt!\")\n audio = r.listen(source)\n\n s = r.recognize_google(audio) #Send the audio to google\n result = s.lower()\n return result", "def analyse(self):\n logging.info(\"transferring text to CorpusCook...\")\n\n paragraphs = self.text.split('\\n\\n')\n print(\"mean length of splitted lines\", (mean([len(p) for p in paragraphs])))\n\n # If TIKA resolved '\\n'\n if (mean([len(p) for p in paragraphs])) > 80:\n paragraphs = [re.sub(r\"- *\\n\", '', p) for p in paragraphs]\n paragraphs = [p.replace('\\n', \" \") for p in paragraphs]\n paragraphs = [p.replace(';', \" \") for p in paragraphs]\n joiner = \" \"\n else:\n # If TIKA did not\n joiner = \" \"\n\n processed_text = joiner.join([p\n for p in paragraphs\n if\n p and\n ks_2samp(self.normal_data, list(p)).pvalue > self.threshold\n ]\n )\n\n return processed_text.strip()[:self.length_limit]", "def get_text(self, file_number):\n\n with io.open(self.file_name.format(file_number), 'rb') as audio_file:\n content = audio_file.read()\n audio = types.RecognitionAudio(content=content)\n response = self.client.recognize(self.config, audio)\n texts = self._format_response(response)\n return texts", "def act(self, audio_file=None):\n #file as source\n if self.src == 'file':\n if audio_file is None:\n raise ValueError(\"Please provide a audio_file\")\n return None\n elif not os.path.exists(audio_file):\n raise FileNotFoundError(\"Specified file not found\")\n return None\n else:\n file = speech_recognition.AudioFile(audio_file)\n with file:\n speech = self.recog_obj.record(file)\n \n #mic as source\n elif self.src == 'mic':\n if audio_file is not None:\n print(\"WARNING: source is set to device microphone. Audio file will be ignored\\n\")\n \n try:\n with self.mic_obj:\n print(\"Speak into the mic....\\n\")\n self.recog_obj.adjust_for_ambient_noise(self.mic_obj)\n speech = self.recog_obj.listen(self.mic_obj)\n #if microphone is not detected\n except OSError:\n print(\"Error: Microphone not detected\")\n return None\n \n \n try:\n print(\"Please wait while we transcribe...\\n\")\n text = self.recog_obj.recognize_google(speech, language='en', show_all=self.debug)\n \n #if audio is not detected\n except speech_recognition.UnknownValueError:\n print(\"Error: Sorry audio not detected by device microphone\")\n return None\n \n #if there is connection issue or api issue\n except speech_recognition.RequestError:\n print(\"Error: API for transcription is not reachable. There may be some connection issue or server side issue\")\n return None\n \n #for imposing various rules to text \n #But if debug mode is enabled, transcript variable will store a dictionary of various transcriptions \n #along with their confidence probabilities, so conversion rules are disabled meanwhile \n transcript = self.tcr.deconcat(text) if not self.debug else text\n return transcript", "def result(target_text):\n\n display_text(target_text)\n readability(target_text)", "def handle(text, mic, profile, wxbot=None):\n logger = logging.getLogger(__name__)\n # get config\n if SLUG not in profile or \\\n 'age' not in profile[SLUG]:\n mic.say('性别检测插件配置有误,插件使用失败', cache=True)\n return\n age = profile[SLUG]['age']\n try:\n gen = guess()\n age = guess(model_dir='/home/.dingdang/myplugins/plugincode/22801',class_type='age')#使用绝对路径路径\n logger.debug(\"genda report: \", gen)\n if gen=='M':\n mic.say('帅哥你好!', cache=True)\n print('prediction:',age)\n else:\n mic.say('美女你好!', cache=True)\n print('prediction:',age)\n except Exception, e:\n logger.error(e)", "def test_read_text(pdf_path):\n pdf_reader = PdfReader(path=pdf_path)\n text = pdf_reader.ocr_text()\n\n # We hard code this comparison to keep track of all changes to this metric\n assert pdf_reader.mean_confidence == 89\n assert pdf_reader.page_confidences == [86, 91]\n\n # Check if we have two pages seperated by pagebreaks\n assert len(text.split('\\f')) == 2\n\n # The same content can be extracted from the pages property\n assert '\\f'.join(pdf_reader.pages) == text\n\n # Content on the first page (important that this is at the beginning)\n assert 'Norwegian University of Science and Technology' in text[:50]\n\n # Content on second page (important that this is at the end)\n assert 'two requirements' in text[-50:]\n\n # The double-f in affine is hard for bad OCR algorithms\n assert 'affine' in text", "def detect_text(img):\n \n with io.open(img, 'rb') as image_file:\n content = image_file.read()\n\n image = vision.types.Image(content=content)\n response = client.text_detection(image=image) # returns TextAnnotation\n df = pd.DataFrame(columns=['description'])\n texts = response.text_annotations\n for text in texts:\n df = df.append(\n dict(\n \n description= clean_text (text.description)\n ),\n ignore_index=True\n )\n \n porter = PorterStemmer()\n\n try:\n text= (df['description'][0])\n text = porter.stem(text)\n except IndexError:\n text = 'i am neutral'\n # print (analyze(text))\n \n \n # print(df['description'])\n print(text)\n if len (text.split())<3:\n text = 'i am neutral'\n\n sentiment_dict= analyze2(text) \n if sentiment_dict >= 0.008: \n Category.append('Positive') \n return('Positive') \n\n elif (sentiment_dict > - 0.008) & (sentiment_dict < 0.008): \n Category.append('Random')\n return('Random')\n\n elif (sentiment_dict <= -0.008):\n Category.append('Negative')\n return('Negative')" ]
[ "0.6532657", "0.6448702", "0.6332463", "0.6206798", "0.6167052", "0.6119095", "0.5889082", "0.5853762", "0.58503145", "0.5848887", "0.58040744", "0.5803421", "0.57464606", "0.56818646", "0.5678315", "0.5678252", "0.5670355", "0.5666109", "0.5643918", "0.56433135", "0.5622431", "0.56174165", "0.5604354", "0.55998343", "0.55995744", "0.55985326", "0.55776143", "0.5577081", "0.55691564", "0.5555263" ]
0.7974098
0
tests the find_correct_keyword method from MicrophoneToText
def test_get_correct_keyword(self): mic = mi.MicrophoneToText() mic.keywordsshort = {'street': ['straße lautet aarberger straße '], 'location': ['ort lautet berlin'], 'income': ['einkommen lautet vierzigtausend'] , 'capital': ['eigenkapital lautet hundertfünfundzwanzigtausend'], 'price': ['kaufpreis lautet fünfhunderttausend']} mic.find_correct_keyword() self.assertEqual(mic.resultkeywords['street'], ['aarberger straße']) self.assertEqual(mic.resultkeywords['location'], ['berlin']) self.assertEqual(mic.resultkeywords['income'], [40000]) self.assertEqual(mic.resultkeywords['capital'], [125000]) self.assertEqual(mic.resultkeywords['price'], [500000])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_findcorrectkeyword(self):\n mic = mi.MicrophoneToText()\n\n mic.keywordsshort[\"street\"] = [\"adresse lautet amselweg\", 'useless']\n mic.keywordsshort['location'] = [\"der ort lautet berlin\", 'useless']\n mic.keywordsshort['capital'] = [\"der Kaufpreis lautet vierhunderttausend\", 'useless']\n mic.keywordsshort['income'] = [\"das Eigenkapital lautet 200000\", 'useless']\n mic.keywordsshort['price'] = [\"der kaufpreis beträgt fünfundzwanzigtausend\", 'useless']\n\n mic.find_correct_keyword()\n\n self.assertEqual(mic.resultkeywords['street'], ['amselweg'])\n self.assertEqual(mic.resultkeywords['location'], ['berlin'])\n self.assertEqual(mic.resultkeywords['capital'], [400000])\n self.assertEqual(mic.resultkeywords['income'], [200000])\n self.assertEqual(mic.resultkeywords['price'], [25000])", "def test_find_word(self):\n mic = mi.MicrophoneToText()\n\n teststring = 'x transcript\": ort lautet testort }x'\n\n word = mic.find_word(teststring)\n\n self.assertEqual(word, ' ort lautet testort ')", "def test_analyze_text(self):\n\n mic = mi.MicrophoneToText()\n\n with open('../examples/result.txt', 'w', encoding='utf-8') as f:\n f.write('x transcript\": straße lautet aarbergerstraße }x\\n')\n f.write('x transcript\": ort lautet testort }x\\n')\n f.write('x transcript\": einkommen lautet testeinkommen }x\\n')\n f.write('x transcript\": kaufpreis lautet testkaufpreis }x\\n')\n f.write('x transcript\": eigenkapital lautet testkapital }x\\n')\n\n #mic.threader()\n\n mic.switchoff()\n print(mic.keywords.values())\n with open('../examples/result.txt', 'r', encoding='utf-8') as f:\n filestring = f.read()\n print(filestring)\n self.assertTrue(' straße lautet aarbergerstraße ' in filestring)", "def test_find_word(self):\n self.assertEqual(find_word('GREEN'), [(1, 1), (1, 1), (0, 9)])\n self.assertEqual(find_word('ABSENT'), [])\n self.assertEqual(find_word('PW'), [(1, 7), (3, 7), (0, 8)])", "def test__parse_matched_keyword():\n matched_keyword = 'owo'\n \n for input_data, expected_output in (\n ({}, None),\n ({'matched_keyword': None}, None),\n ({'matched_keyword': ''}, None),\n ({'matched_keyword': str(matched_keyword)}, matched_keyword),\n ):\n output = parse_matched_keyword(input_data)\n vampytest.assert_eq(output, expected_output)", "def test_desy_keyword_translation(self):\n spi_search = \"find dk \\\"B --> pi pi\\\"\"\n inv_search = \"695__a:\\\"B --> pi pi\\\"\"\n self._compare_searches(inv_search, spi_search)", "def test_find_phrase_matches1(self):\n\t\ttest = sentiment.LibraryRun(self.text1, self.lib)\n\t\tobj_ut = test.find_phrase_matches(self.tokens_generator1)[0]\n\t\tself.assertEqual(dict(obj_ut),\n\t\t\t{'not good': [[2, -1, 0]]})", "def search(self, word):", "def test_caption_multi_word(self):\n inv_search = \"caption:quark and caption:mass\"\n spi_search = \"find caption quark mass\"\n self._compare_searches(inv_search, spi_search)", "def test_find_phrase_matches2(self):\n\t\ttest = sentiment.LibraryRun(self.text2, self.lib)\n\t\tobj_ut = test.find_phrase_matches(self.tokens_generator2)[0]\n\t\tself.assertEqual(dict(obj_ut),\n\t\t\t{'not good': [[2, -1, 0], [4, -1, 0]]})", "def test_get_keyword_string(self):\n \n # Create a Resource object\n resource = Resource(1, \"White Noise\", Name(\"Don\", \"\", \"DeLillo\"), \n \"Delillo's White Noise follows narrator Jack \"\\\n \"Gladney, a professor at a small Liberal Arts \"\\\n \"college and describes an academic year. Jack \"\\\n \"teaches at a school called the \"\\\n \"College-on-the-Hill, where he serves as the \"\\\n \"department chair of Hitler studies. He lives in \"\\\n \"Blacksmith, a quiet college town, with his wife, \"\\\n \"Babette, and four of their children from earlier \"\\\n \"marriages: Heinrich, Steffie, Denise, and \"\\\n \"Wilder. Throughout the novel, various \"\\\n \"half-siblings and ex-spouses drift in and out \"\\\n \"of the family’s home.\",\n \"sci-fi\", \"English\", 1985, \"US\", 326, \"book\",\n [\"culture\", \"survival\", \"life\", \"society\"])\n \n # Assert the expected result\n self.assertEqual(resource.get_keyword_string(),\n \"culture, life, society, survival\")", "def test_find_phrase_matches3(self):\n\t\ttest = sentiment.LibraryRun(self.text3, self.lib)\n\t\tobj_ut = test.find_phrase_matches(self.tokens_generator3)[0]\n\t\tself.assertEqual(dict(obj_ut),\n\t\t\t{'not good': [[2, -1, 0]], 'not very good': [[4, -1, 0]]})", "def testing():\n\n # lists which contains paths of keyword and non-keyword utterances\n non_kw_clips, kw_clips = generate_clips_kwds()\n\n non_kw_sent_dict, kw_sent_dict = {}, {}\n templates_dict = {}\n\n # calculate and store MFCC features in a dictionary\n for kw in listdir(kw_path):\n templates_dict[kw] = proc_one(kw_path + kw)\n\n for sent in non_kw_clips:\n filename = sent[:-3] + 'wav'\n non_kw_sent_dict[filename] = proc_one(filename)\n\n for word, paths in kw_clips.items():\n for path in paths:\n filename = path[:-3] + 'wav'\n kw_sent_dict[filename] = (proc_one(filename), word)\n\n final_results = {}\n\n # non-keyword comparisons\n for i, (non_kw_utterance, clip_feat) in enumerate(non_kw_sent_dict.items()):\n\n print(i, '/', len(non_kw_sent_dict))\n\n final_results[non_kw_utterance] = {}\n\n for keyword, kw_feat in templates_dict.items():\n print(\"Comparing keyword and non-kw sentence:\", keyword, non_kw_utterance)\n\n lmd = compare_all(clip_feat, kw_feat)\n final_results[non_kw_utterance][keyword] = (lmd, 0)\n\n with open(results_json, 'w') as f:\n json.dump(final_results, f)\n\n # keyword comparisons\n for i, (kw_utterance, (clip_feat, word)) in enumerate(kw_sent_dict.items()):\n\n print(i, '/', len(kw_sent_dict))\n final_results[kw_utterance] = {}\n\n for keyword, kw_feat in templates_dict.items():\n\n print(\"Comparing keyword and kw sentence:\", keyword, kw_utterance)\n\n lmd = compare_all(clip_feat, kw_feat)\n\n if keyword.split('_')[0] == word:\n final_results[kw_utterance][keyword] = (lmd, 1)\n else:\n final_results[kw_utterance][keyword] = (lmd, 0)\n\n with open(results_json, 'w') as f:\n json.dump(final_results, f)", "def test_queryKeywordFlag(self):\n self._keywordFilteringTest(\"keyword\")", "def test_multiple_word(self):\n score = location.match_weight('weston super mare UK', ['weston super mare'])\n\n self.assertEqual(score, 1000)", "def test_keywords(self):\n\n test_cases = (\n makeTestCase('adele 21',\n AlbumResultMatcher(title=Equals('21'), artist=Equals('adele')),\n ArtistResultMatcher(title=Equals('adele'))),\n makeTestCase('kanye power',\n TrackResultMatcher(title=Equals('power', artist=Equals('kanye west'))),\n ArtistResultMatcher(title=Equals('kanye west')),\n AlbumResultMatcher(title=Equals('my beautiful dark twisted fantasy'))),\n makeTestCase('ratat party with children',\n TrackResultMatcher(title=Equals('party with children', artist=Equals('ratatat'))),\n ArtistResultMatcher(title=Equals('ratatat'))),\n makeTestCase('flobot fight with tools handlebars',\n TrackResultMatcher(title=Equals('handlebars')),\n ArtistResultMatcher(title=Equals('flobots')),\n AlbumResultMatcher(title=Equals('fight with tools')))\n )\n\n self._run_tests(tests, {})", "def test_find_word2(self):\n self.assertEqual(find_word2('GREEN'), [(1, 1), (1, 1), (0, 9)])\n self.assertEqual(find_word2('ABSENT'), [])\n self.assertEqual(find_word2('PW'), [(1, 7), (3, 7), (0, 8)])", "def find_keyword(ciphertext, keyword_length):\n A = str_to_matrix(scrub_string(ciphertext), keyword_length)\n return \"\".join([find_keyword_letter(A[j]) for j in range(keyword_length)])", "def test_single_word(self):\n score = location.match_weight('clevedon', ['clevedon'])\n\n self.assertEqual(score, 1000)", "def test_single_word_with_junk(self):\n score = location.match_weight('clevedon UK', ['clevedon'])\n\n self.assertEqual(score, 1000)", "def test_identify_tone_01():\n # Test all seven tones.\n # Test all vowels.\n # Test final ⁿ, up to two final and three initial consonants.\n # Test diacritic on first of two vowels.\n assert U.identify_tone('chhiong') == ('yīnpíng', '○')\n assert U.identify_tone('kui') == ('yīnpíng', '○')\n assert U.identify_tone('gîm') == ('yángpíng', '○')\n assert U.identify_tone('bî') == ('yángpíng', '○')\n assert U.identify_tone('bêng') == ('yángpíng', '○')\n assert U.identify_tone('ngớⁿ') == ('yīnshǎng', '●')\n assert U.identify_tone('óng') == ('yīnshǎng', '●')\n assert U.identify_tone('àm') == ('yīnqù', '●')\n assert U.identify_tone('pòan') == ('yīnqù', '●')\n assert U.identify_tone('sòe') == ('yīnqù', '●')\n assert U.identify_tone('bān') == ('yángqù', '●')\n assert U.identify_tone('iā') == ('yángqù', '●')\n assert U.identify_tone('ngơ̄ⁿ') == ('yángqù', '●')\n assert U.identify_tone('sek') == ('yīnrù', '●')\n assert U.identify_tone('khip') == ('yīnrù', '●')\n assert U.identify_tone('to̍k') == ('yángrù', '●')\n assert U.identify_tone('bu̍t') == ('yángrù', '●')", "def _keywordFilteringTest(self, keyword):\n # Check all the printable exclusions\n self.assertEqual(\n '(%s twistedrocks)' % (keyword.upper(),),\n imap4.Query(**{keyword: r'twisted (){%*\"\\] rocks'}))\n\n # Check all the non-printable exclusions\n self.assertEqual(\n '(%s twistedrocks)' % (keyword.upper(),),\n imap4.Query(**{\n keyword: 'twisted %s rocks' % (\n ''.join(chr(ch) for ch in range(33)),)}))", "def test_keyword_extractor(self):\n data = [{\"Header\": \"This is a Header\", \"Paragraph\": \"This is a Paragraph\", \"slide\": 10}]\n keywords = keyword_extractor(data)\n data[0][\"Header_keywords\"] = [\"header\"]\n data[0][\"Paragraph_keywords\"] = [\"paragraph\"]\n self.assertEqual(keywords, data)", "def test_caption(self):\n inv_search = \"caption:muon\"\n spi_search = \"find caption muon\"\n self._compare_searches(inv_search, spi_search)", "def _keyword_search(id_to_text, raw_keywords, modified_keywords):\n\t# The raw keywords and modified keywords should be two paired lists where the elements correspond to one another.\n\t# The modifications done to the keywords should already match the modifications done to the texts in the input dictionary so they can be directly compared.\n\tassert len(raw_keywords) == len(modified_keywords)\n\tid_to_found_keywords = {i:[r_kw for r_kw,m_kw in zip(raw_keywords,modified_keywords) if m_kw in text] for i,text in id_to_text.items()}\n\tid_to_num_found_keywords = {i:len(kw_list) for i,kw_list in id_to_found_keywords.items()}\n\treturn(id_to_found_keywords, id_to_num_found_keywords)", "def test_search_key_phrase(self):\n # search via key phrase.\n test = self.data.search(key_phrase='testing entries.', all_names=True)\n self.assertIn('testing entries.', test[0].notes)", "def test_guided():\n top_n = 5\n seed_keywords = [\"time\", \"night\", \"day\", \"moment\"]\n keywords = model.extract_keywords(doc_one,\n min_df=1,\n top_n=top_n,\n seed_keywords=seed_keywords)\n\n assert isinstance(keywords, list)\n assert isinstance(keywords[0], tuple)\n assert isinstance(keywords[0][0], str)\n assert isinstance(keywords[0][1], float)\n assert len(keywords) == top_n", "def check_spellings(text):\n\n for word in vocabulary:\n text = correct(word, text, 0.7)\n return text", "def _fe_keyword_match(self, sample):\n result = OrderedDict()\n\n for item in self._keywords:\n result[item + \"_kw\"] = 1 if item in sample['fqdn'] else 0\n\n return result", "def test_spires_keyword_distribution_before_conjunctions(self):\n spi_search = 'find journal phys.lett. 0903 024'\n inv_search = '(journal:phys.lett.,0903,024)'\n self._compare_searches(inv_search, spi_search)" ]
[ "0.8400151", "0.69928145", "0.6940127", "0.623138", "0.61756164", "0.6096718", "0.6020738", "0.5958086", "0.59253246", "0.5919958", "0.5907681", "0.59044945", "0.5880023", "0.5837451", "0.58224136", "0.5795018", "0.5792562", "0.57838136", "0.57676005", "0.5748379", "0.5716321", "0.5690782", "0.56851476", "0.5677552", "0.56190044", "0.56086063", "0.55552965", "0.5504682", "0.5483225", "0.548144" ]
0.8242476
1
tests the find_word method from MicrophoneToText
def test_find_word(self): mic = mi.MicrophoneToText() teststring = 'x transcript": ort lautet testort }x' word = mic.find_word(teststring) self.assertEqual(word, ' ort lautet testort ')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_find_word(self):\n self.assertEqual(find_word('GREEN'), [(1, 1), (1, 1), (0, 9)])\n self.assertEqual(find_word('ABSENT'), [])\n self.assertEqual(find_word('PW'), [(1, 7), (3, 7), (0, 8)])", "def test_find_word2(self):\n self.assertEqual(find_word2('GREEN'), [(1, 1), (1, 1), (0, 9)])\n self.assertEqual(find_word2('ABSENT'), [])\n self.assertEqual(find_word2('PW'), [(1, 7), (3, 7), (0, 8)])", "def search(self, word):", "def word_finder(word, text):\r\n word = word.lower()\r\n text = str(text).lower()\r\n match = re.search(word, text)\r\n if match:\r\n return True\r\n return False", "def test_convert_audio_to_text(self):\n\n text = self.converter.convert_audio_to_text(START, END, [WORD], lambda: False)\n text = text.strip()\n self.assertEqual(text, WORD)", "def test_analyze_text(self):\n\n mic = mi.MicrophoneToText()\n\n with open('../examples/result.txt', 'w', encoding='utf-8') as f:\n f.write('x transcript\": straße lautet aarbergerstraße }x\\n')\n f.write('x transcript\": ort lautet testort }x\\n')\n f.write('x transcript\": einkommen lautet testeinkommen }x\\n')\n f.write('x transcript\": kaufpreis lautet testkaufpreis }x\\n')\n f.write('x transcript\": eigenkapital lautet testkapital }x\\n')\n\n #mic.threader()\n\n mic.switchoff()\n print(mic.keywords.values())\n with open('../examples/result.txt', 'r', encoding='utf-8') as f:\n filestring = f.read()\n print(filestring)\n self.assertTrue(' straße lautet aarbergerstraße ' in filestring)", "def findWord(data: str) -> str:\n # find the 【 and 】string or 〖 and 〗string\n # either one will work\n # if 【 or 】string doesn't exist in the image, try the 〖 and 〗string\n if data.find('【') == -1 or data.find('】') == -1:\n indStart = find(data, '〖 ')\n indEnd = find(data, '〗')\n else:\n indStart = find(data,'【')\n indEnd = find(data,'】')\n\n # if both 【 and 】string and〖 and 〗string doesn't exist, means image not properly take\n if len(indStart) == 0 or len(indEnd) == 0:\n raise RuntimeError(\"Image not properly taken\")\n\n # slice that string\n word = data[int(indStart[0]+1):indEnd[0]]\n\n # lastly, get rid of spaces\n word = word.replace(\" \", \"\")\n return word", "def getWords(speech):\r\n return speech.split()", "def test_wordMatch(self):\n words = []\n for line in self.output:\n words.extend(string.split(line))\n self.failUnless(self.sampleSplitText == words)", "def words(self, word):\n pass", "def test_findcorrectkeyword(self):\n mic = mi.MicrophoneToText()\n\n mic.keywordsshort[\"street\"] = [\"adresse lautet amselweg\", 'useless']\n mic.keywordsshort['location'] = [\"der ort lautet berlin\", 'useless']\n mic.keywordsshort['capital'] = [\"der Kaufpreis lautet vierhunderttausend\", 'useless']\n mic.keywordsshort['income'] = [\"das Eigenkapital lautet 200000\", 'useless']\n mic.keywordsshort['price'] = [\"der kaufpreis beträgt fünfundzwanzigtausend\", 'useless']\n\n mic.find_correct_keyword()\n\n self.assertEqual(mic.resultkeywords['street'], ['amselweg'])\n self.assertEqual(mic.resultkeywords['location'], ['berlin'])\n self.assertEqual(mic.resultkeywords['capital'], [400000])\n self.assertEqual(mic.resultkeywords['income'], [200000])\n self.assertEqual(mic.resultkeywords['price'], [25000])", "def findExample(data: str, word: str) -> str:\n # find strings with 「 and 」\n start = find(data, '「')\n end = find(data, '」')\n\n # initialize a list that contains each example\n example_list = []\n for i in range(len(start)):\n example = data[int(start[i]+1):end[i]]\n # remove spacing\n example = example.replace(' ', '')\n # remove new lines\n example = example.replace('\\n', '')\n\n if '一' in example or word in example:\n example_list.append(example)\n\n\n # finally, return the first element in list\n if len(example_list) > 0:\n return example_list[0]\n else:\n return ''", "def test_score_word(self):\n self.assertEqual(1, score_word('a', 'a'))\n self.assertEqual(1, score_word('aa', 'ab'))\n self.assertEqual(1, score_word('ba', 'bb'))\n self.assertEqual(0, score_word('a', 'b'))\n self.assertEqual(0, score_word('ab', 'ba'))\n self.assertEqual(2, score_word('aba', 'cba'))\n self.assertEqual(2, score_word('abc', 'abd'))", "def test_output_get_word(self):\n actual = get_words('../corpus/alice.txt')\n expected = [\"alice\"]\n self.assertEqual(actual, expected)", "def spell_a_word(cls, voice_transcript, skill, **kwargs):\n tags = cls._extract_tags(voice_transcript, skill['tags'])\n for tag in tags:\n reg_ex = re.search(tag + ' ([a-zA-Z]+)', voice_transcript)\n try:\n if reg_ex:\n search_text = reg_ex.group(1)\n for letter in search_text:\n cls.response(letter)\n time.sleep(2)\n except Exception as e:\n logging.debug(e)\n cls.response(\"I can't spell the word\")", "def word(word_time):\n return word_time[0]", "def test_get_correct_keyword(self):\n mic = mi.MicrophoneToText()\n\n mic.keywordsshort = {'street': ['straße lautet aarberger straße '], 'location': ['ort lautet berlin'], 'income': ['einkommen lautet vierzigtausend']\n , 'capital': ['eigenkapital lautet hundertfünfundzwanzigtausend'], 'price': ['kaufpreis lautet fünfhunderttausend']}\n\n mic.find_correct_keyword()\n\n self.assertEqual(mic.resultkeywords['street'], ['aarberger straße'])\n self.assertEqual(mic.resultkeywords['location'], ['berlin'])\n self.assertEqual(mic.resultkeywords['income'], [40000])\n self.assertEqual(mic.resultkeywords['capital'], [125000])\n self.assertEqual(mic.resultkeywords['price'], [500000])", "def onWordRecognised(self, *_args):\n # Unsubscribe to the event when talking,\n # to avoid repetitions\n memory.unsubscribeToEvent(\"WordRecognized\",\"AudioRecognition\")\n\n # We access to the word recognised in the memory\n word = memory.getData(\"WordRecognized\")\n\n # Debug : Print the word recognised\n print(\"Mot :\")\n print(word[0])\n print(\"Indice de confiance :\")\n print(word[1])\n print\n\n\n # We acknoledge a word if the trust is high enough\n if (word[1] > 0.28):\n self.mot = word[0]\n #self.tts.say(\"Le mot reconnu est :\"+self.mot)\n StateManager(self)\n \n\n # Subscribe again to the event\n memory.subscribeToEvent(\"WordRecognized\",\n \"AudioRecognition\",\n \"onWordRecognised\")", "def whole_word_matches(self):\n start = '1.0'\n while True:\n start = self.text.search(self.term, start, stopindex=tk.END)\n if not start:\n break\n end = start + ' wordend'\n # whole word includes a space before\n found = self.text.get(start + '-1c', end)\n if found == ' ' + self.term:\n self.text.tag_add('found', start, end)\n start = end", "def whole_word_matches(self):\n start = '1.0'\n while True:\n start = self.text.search(self.term, start, stopindex=tk.END)\n if not start:\n break\n end = start + ' wordend'\n # whole word includes a space before\n found = self.text.get(start + '-1c', end)\n if found == ' ' + self.term:\n self.text.tag_add('found', start, end)\n start = end", "def test_word_translation(self):\n self.assertEqual(translator.translate_word(\"hour\"), \"ourhay\")\n self.assertEqual(translator.translate_word(\"\"), \"\")\n self.assertEqual(translator.translate_word(\"aaa\"), \"aaayay\")", "def test_find_word_chess(self, initial, goal, words):\n\n self.assertEqual(len(words),\n len(self.search_function(len(words))(initial,\n goal)))", "def words(text):\n text = \" \".join(text) if text else 'We are the knights who say \"NI\"!'\n xml.words(text)\n return 0", "def choose_word():\n pass", "def find(self, event: Event = None) -> None:\n if not self.loaded:\n return\n c, n, p = self.c, 0, self.c.p\n sc = self.spellController\n w = c.frame.body.wrapper\n c.selectPosition(p)\n s = w.getAllText().rstrip()\n ins = w.getInsertPoint()\n # New in Leo 5.3: use regex to find words.\n last_p = p.copy()\n while True:\n for m in self.re_word.finditer(s[ins:]):\n start, word = m.start(0), m.group(0)\n if word in self.seen:\n continue\n n += 1\n # Ignore the word if numbers precede or follow it.\n # Seems difficult to do this in the regex itself.\n k1 = ins + start - 1\n if k1 >= 0 and s[k1].isdigit():\n continue\n k2 = ins + start + len(word)\n if k2 < len(s) and s[k2].isdigit():\n continue\n alts: list[str] = sc.process_word(word)\n if alts:\n self.currentWord = word\n i = ins + start\n j = i + len(word)\n self.showMisspelled(p)\n self.tab.fillbox(alts, word)\n c.invalidateFocus()\n c.bodyWantsFocus()\n w.setSelectionRange(i, j, insert=j)\n k = g.see_more_lines(s, j, 4)\n w.see(k)\n return\n self.seen.add(word)\n # No more misspellings in p\n p.moveToThreadNext()\n if p:\n ins = 0\n s = p.b\n else:\n g.es(\"no more misspellings\")\n c.selectPosition(last_p)\n self.tab.fillbox([])\n c.invalidateFocus()\n c.bodyWantsFocus()\n return", "def _get_word_at(self, position: Position) -> Optional[str]:\n line = self._get_line(position.line)\n\n for match in re.finditer(r'\\w+', line):\n if match.start() <= position.character <= match.end():\n return match.group(0)\n\n return None", "def match_sfx(uni_word, morphs):\n uni_morph = unicode(morphs[-1].lex, 'UTF-8')\n if uni_word.endswith(uni_morph): # just one morpheme ends with word\n return len(uni_morph), 1\n for i in range(-2, -(len(morphs)+1), -1):\n submorphs = ''.join([morph.lex for morph in morphs[i:]])\n submorphs_dec = decompose(submorphs)\n for k in range(-1, -len(unicode(submorphs, 'UTF-8')), -1):\n word_dec = decompose(uni_word[k:])\n # logging.debug(' %s(%s:%s) <- %s(%s:%s)', uni_word[k:].encode('UTF-8'), word_dec, to_hex(word_dec),\n # submorphs, submorphs_dec, to_hex(submorphs_dec))\n if word_dec == submorphs_dec:\n return -k, -i\n morphs_str = ' + '.join([str(morph) for morph in morphs])\n logging.debug('SFX: %s(%s): %s', uni_word.encode('UTF-8'), decompose(uni_word), morphs_str)\n return -1, -1", "def query_word(self, word):\n raise NotImplementedError", "def _onWord(self, name, location, length):\n logging.debug(\"onWord...\")", "def test_forward_end_word_start_of_word(self):\n before_b = \"\"\"\\\n Americans live in the most severe weather-prone country on Earth. Each year, Americans cope with an average of 10,000 thunderstorms, 2,500 floods, 1,000 tornadoes, as well as an average of 6 deadly hurricanes. Potentially deadly weather impacts every American. Communities can now rely on the National Weather Service’s StormReady program to help them guard against the ravages of Mother Nature.\n\n Some 90% of all presidentially declared disasters are weather related, leading to around 500 deaths per year and nearly $14 billion in damage. StormReady, a program started in 1999 in Tulsa, OK, helps arm America's communities with the communication and safety skills needed to save lives and property– before and during the event. StormReady helps community leaders and emergency managers strengthen local safety programs.\n\n StormReady communities are better prepared to save lives from the onslaught of severe weather through better planning, education, and awareness. No community is storm proof, but StormReady can help communities save lives. Does StormReady make a difference?\n \"\"\"\n after_b = \"\"\"\\\n Americans live in the most severe weather-prone country on Earth. Each year, Americans cope with an average of 10,000 thunderstorms, 2,500 floods, 1,000 tornadoes, as well as an average of 6 deadly hurricanes. Potentially deadly weather impacts every American. Communities can now rely on the National Weather Service’s StormReady program to help them guard against the ravages of Mother Nature.\n\n Some 90% of all presidentially declared disasters are weather related, leading to around 500 deaths per year and nearly $14 billion in damage. StormReady, a program started in 1999 in Tulsa, OK, helps arm America's communities with the communication and safety skills needed to save lives and property– before and during the event. StormReady helps community leaders and emergency managers strengthen local safety programs.\n\n StormReady communities are better prepared to save lives from the onslaught of severe weather through better planning, education, and awareness. No community is storm proof, but StormReady can help communities save lives. Does StormReady make a difference?\n \"\"\"\n self.run_test(\n before_b=before_b,\n after_b=after_b,\n before_sel=(\"1.310\", \"1.310\"),\n after_sel=(\"1.317\", \"1.317\"),\n command_name=\"forward-end-word\",\n )" ]
[ "0.75302786", "0.67486084", "0.6694181", "0.6322505", "0.6256139", "0.62458503", "0.62292504", "0.61650485", "0.6157438", "0.61508054", "0.61019486", "0.6084692", "0.60277694", "0.6024091", "0.6019456", "0.6004083", "0.5993218", "0.5982803", "0.5946654", "0.5946654", "0.59424937", "0.5918648", "0.59052783", "0.5904119", "0.5868233", "0.58553404", "0.5851087", "0.5849893", "0.58424944", "0.5837149" ]
0.8740787
0
tests the MyRecognizeCallback class check if there appears following console output Connection closed Connection was successful
def test_recognize(self): rec = mi.MyRecognizeCallback() rec.on_close() rec.on_connected() rec.on_data('"final": true truetestd') rec.on_error("testerror") rec.on_hypothesis("testh") rec.on_inactivity_timeout("testerrorinac") rec.on_listening() rec.on_transcription("testtr") self.assertIsNotNone(rec)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def callback_connect(self):\n pass", "def callback_connect(self):\n pass", "def callback_connect(self):\n pass", "def isConnected():", "def check_connection():\n if connected():\n ws.emit(Message('mycroft.internet.connected'))\n # check for pairing, if not automatically start pairing\n if not is_paired():\n # begin the process\n payload = {\n 'utterances': [\"pair my device\"],\n 'lang': \"en-us\"\n }\n ws.emit(Message(\"recognizer_loop:utterance\", payload))\n else:\n thread = Timer(1, check_connection)\n thread.daemon = True\n thread.start()", "def on_connection_closed(self):", "def testCaptureResponse(self):\n message = (mavutil.mavlink.GOPRO_COMMAND_CAPTURE_MODE, mavutil.mavlink.GOPRO_REQUEST_SUCCESS, (CAPTURE_MODE_BURST, 0, 0, 0))\n self.mgr.get_response_callback('vehicle','name', message)\n self.assertEqual( self.mgr.captureMode, CAPTURE_MODE_BURST)\n self.mgr.processMsgQueue.assert_called_with()\n self.mgr.sendState.assert_called_with()", "def recognize(callback, timeout):\n # Reset globals\n global CALLBACK, RUNNING, RECORD_SECONDS\n CALLBACK = callback\n RECORD_SECONDS = timeout\n RUNNING = True\n # Connect to websocket interfaces\n headers = {}\n userpass = \":\".join(get_auth())\n headers[\"Authorization\"] = \"Basic \" + base64.b64encode(\n userpass.encode()).decode()\n url = get_url()\n ws = websocket.WebSocketApp(url,\n header=headers,\n on_message=on_message,\n on_error=on_error,\n on_open=on_open)\n # This hands control to the WebSocketApp. It's a blocking call, so it won't\n # return until ws.close() gets called\n ws.run_forever()", "def test_parse_message_success_connect(self, mock_connect):\n for test_case in self.success_test_params_connect:\n expected = test_case[KEY_EXPECTED]\n mock_connect = app.on_connect()\n self.assertNotEqual(expected, mock_connect)", "async def on_connect(self) -> None:", "def ok_callback(self):\n pass", "def testCaptureModeResponse(self):\n message = (mavutil.mavlink.GOPRO_COMMAND_CAPTURE_MODE, mavutil.mavlink.GOPRO_REQUEST_SUCCESS)\n self.mgr.set_response_callback('vehicle','name', message)\n self.mgr.processMsgQueue.assert_called_with()", "def test_parse_message_success_disconnect(self):\n for test_case in self.success_test_params_disconnect:\n expected = test_case[KEY_EXPECTED]\n mock_disconnect = app.on_disconnect()\n self.assertEqual(expected, mock_disconnect)", "def reconnecting(self) -> bool:", "def on_connect(client, userdata, flags, rc_value):\n print(f\"Connected with result code : {rc_value}\")", "def test_recognize(self):\n self._api.SetImageFile(self._image_file)\n # timeout after 1 milliseconds (likely)\n res = self._api.Recognize(1)\n self.assertFalse(res)\n self._api.SetImageFile(self._image_file)\n # timeout after 10 seconds (unlikely)\n res = self._api.Recognize(10000)\n self.assertTrue(res)\n self._api.SetImageFile(self._image_file)\n # no timeout\n res = self._api.Recognize()\n self.assertTrue(res)", "def connection_listener(connected, info):\n if connected:\n logging.info('Success: {}'.format(info))\n else:\n logging.error('Fail: {}'.format(info))", "def is_incall_connected(self) -> bool:", "def connection_closed(self) -> bool:", "def onConnect(self, fetcher, connectionRespInfo): #$NON-NLS-1$\r", "def on_connect(client, userdata, flags, rc):\n if rc == 0:\n print(\"Connected to broker\")\n client.connected_flag = True\n else:\n print(\"Connection failed\")\n client.connected_flag = False", "def test_incoming_k(self):\n m_interface = Mock()\n m_interface.callback.return_value = True\n m_interface.read.return_value = ''\n upb = UPB(m_interface)\n upb.onCommand(address=(22,255), callback=m_interface.callback)\n m_interface.read.return_value = \"PU07141610FF3090\\x0DPU07151610FF308F\\x0D\"\n# time.sleep(4000)\n time.sleep(2)\n m_interface.callback.assert_called_with(address=(22,255), command='status', source=upb)\n m_interface.read.return_value = ''", "def connectSetupFinished(self, linkURI): \n print(\"Connection is finished\");", "def test_verify_state_of_a_device_when_disconnected_from_the_device():", "def connectionLost(reason):", "def on_connection_end() -> None:\r\n print(\"Connection lost with G-Earth\")\r\n print()", "def _on_connection_success(self):\n if self.connect_handler:\n self.connect_handler()", "def is_call_ended(self) -> bool:", "def accept_connection(self):\n pass", "def _handleEvConnected(self):\r\n print(\"Start SLAC matching\")\r\n self.whitebeet.slacStartMatching()\r\n print(\"Set duty cycle to 5%\")\r\n self.whitebeet.controlPilotSetDutyCycle(5)\r\n try:\r\n if self.whitebeet.slacMatched() == True:\r\n print(\"SLAC matching successful\")\r\n self._handleNetworkEstablished()\r\n return True\r\n else:\r\n print(\"SLAC matching failed\")\r\n return False\r\n except TimeoutError as e:\r\n print(e)\r\n return False" ]
[ "0.6066561", "0.6066561", "0.6066561", "0.60609084", "0.6043428", "0.6035044", "0.6002975", "0.59806365", "0.594079", "0.5937401", "0.59252286", "0.591624", "0.58994645", "0.589289", "0.5883992", "0.5849009", "0.58283126", "0.5797944", "0.5786313", "0.5773687", "0.5753486", "0.57212394", "0.57172424", "0.57081985", "0.5706717", "0.5694614", "0.56932575", "0.56908315", "0.56881726", "0.56877476" ]
0.7577983
0
tests another time the find_correct_keyword method
def test_findcorrectkeyword(self): mic = mi.MicrophoneToText() mic.keywordsshort["street"] = ["adresse lautet amselweg", 'useless'] mic.keywordsshort['location'] = ["der ort lautet berlin", 'useless'] mic.keywordsshort['capital'] = ["der Kaufpreis lautet vierhunderttausend", 'useless'] mic.keywordsshort['income'] = ["das Eigenkapital lautet 200000", 'useless'] mic.keywordsshort['price'] = ["der kaufpreis beträgt fünfundzwanzigtausend", 'useless'] mic.find_correct_keyword() self.assertEqual(mic.resultkeywords['street'], ['amselweg']) self.assertEqual(mic.resultkeywords['location'], ['berlin']) self.assertEqual(mic.resultkeywords['capital'], [400000]) self.assertEqual(mic.resultkeywords['income'], [200000]) self.assertEqual(mic.resultkeywords['price'], [25000])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_get_correct_keyword(self):\n mic = mi.MicrophoneToText()\n\n mic.keywordsshort = {'street': ['straße lautet aarberger straße '], 'location': ['ort lautet berlin'], 'income': ['einkommen lautet vierzigtausend']\n , 'capital': ['eigenkapital lautet hundertfünfundzwanzigtausend'], 'price': ['kaufpreis lautet fünfhunderttausend']}\n\n mic.find_correct_keyword()\n\n self.assertEqual(mic.resultkeywords['street'], ['aarberger straße'])\n self.assertEqual(mic.resultkeywords['location'], ['berlin'])\n self.assertEqual(mic.resultkeywords['income'], [40000])\n self.assertEqual(mic.resultkeywords['capital'], [125000])\n self.assertEqual(mic.resultkeywords['price'], [500000])", "def test_queryKeywordFlag(self):\n self._keywordFilteringTest(\"keyword\")", "def search(self, word):", "def test_desy_keyword_translation(self):\n spi_search = \"find dk \\\"B --> pi pi\\\"\"\n inv_search = \"695__a:\\\"B --> pi pi\\\"\"\n self._compare_searches(inv_search, spi_search)", "def test_find_word(self):\n self.assertEqual(find_word('GREEN'), [(1, 1), (1, 1), (0, 9)])\n self.assertEqual(find_word('ABSENT'), [])\n self.assertEqual(find_word('PW'), [(1, 7), (3, 7), (0, 8)])", "def test_find_word2(self):\n self.assertEqual(find_word2('GREEN'), [(1, 1), (1, 1), (0, 9)])\n self.assertEqual(find_word2('ABSENT'), [])\n self.assertEqual(find_word2('PW'), [(1, 7), (3, 7), (0, 8)])", "def test_keyword_extractor(self):\n data = [{\"Header\": \"This is a Header\", \"Paragraph\": \"This is a Paragraph\", \"slide\": 10}]\n keywords = keyword_extractor(data)\n data[0][\"Header_keywords\"] = [\"header\"]\n data[0][\"Paragraph_keywords\"] = [\"paragraph\"]\n self.assertEqual(keywords, data)", "def test_keywords(self):\n\n test_cases = (\n makeTestCase('adele 21',\n AlbumResultMatcher(title=Equals('21'), artist=Equals('adele')),\n ArtistResultMatcher(title=Equals('adele'))),\n makeTestCase('kanye power',\n TrackResultMatcher(title=Equals('power', artist=Equals('kanye west'))),\n ArtistResultMatcher(title=Equals('kanye west')),\n AlbumResultMatcher(title=Equals('my beautiful dark twisted fantasy'))),\n makeTestCase('ratat party with children',\n TrackResultMatcher(title=Equals('party with children', artist=Equals('ratatat'))),\n ArtistResultMatcher(title=Equals('ratatat'))),\n makeTestCase('flobot fight with tools handlebars',\n TrackResultMatcher(title=Equals('handlebars')),\n ArtistResultMatcher(title=Equals('flobots')),\n AlbumResultMatcher(title=Equals('fight with tools')))\n )\n\n self._run_tests(tests, {})", "def test_search_test_search_returns_correct_menu(self):\n # create some db records\n dataset = self.create_mixed_test_data()\n test_search_string = 'bravo'\n\n with patch('builtins.input', side_effect=test_search_string):\n result = self.menu.search_text_search()\n\n expected_result = self.menu.present_next_result\n\n self.assertEqual(expected_result, result)", "def test_execute_keyword():\n keyword = 'store_in_repo'\n data_repository = {'db_obj': False, 'war_file_type': 'Case', 'wt_results_execdir': None,\\\n 'wt_logs_execdir': None, 'wt_name': 'test', 'step_num':None}\n args_repository = {'datavar': 'a', 'datavalue': 'b'}\n warrior.Framework.Utils.testcase_Utils.pStep = MagicMock(return_value=None)\n package_list = [warrior.Actions.CommonActions]\n warrior.Framework.Utils.data_Utils.update_datarepository = MagicMock(return_value=None)\n warrior.Framework.Utils.config_Utils.data_repository = MagicMock(return_value=data_repository)\n result = kw_driver.execute_keyword(keyword, data_repository, args_repository, package_list)\n assert result == {'db_obj': False, 'war_file_type': 'Case', 'wt_results_execdir': None,\\\n 'wt_logs_execdir': None, 'wt_name': 'test', 'step_num': None, 'step-None_status': True}\n del warrior.Framework.Utils.data_Utils.update_datarepository\n del warrior.Framework.Utils.config_Utils.data_repository\n del warrior.Framework.Utils.testcase_Utils.pStep", "def test__parse_matched_keyword():\n matched_keyword = 'owo'\n \n for input_data, expected_output in (\n ({}, None),\n ({'matched_keyword': None}, None),\n ({'matched_keyword': ''}, None),\n ({'matched_keyword': str(matched_keyword)}, matched_keyword),\n ):\n output = parse_matched_keyword(input_data)\n vampytest.assert_eq(output, expected_output)", "def fix_keyword_detection_issues(\n dset: List, organ2ind: Dict\n): \n \n \"\"\"SOLVE CARDIA\"\"\" \n\n for ind, abstract in tqdm(enumerate(dset)):\n keywords = abstract[\"keywords\"]\n occ_organ_indices = abstract[\"occ_organ_indices\"]\n occ_organ_names = abstract[\"occ_organ_names\"]\n organ_names = abstract[\"organ_names\"]\n if (\n \"cardiac\" in keywords\n and \"stomach\" in occ_organ_names\n and any(\n [\n item in organ_names\n for item in [\"atrium\", \"ventricle\", \"myocardium\", \"pericardium\"]\n ]\n )\n ):\n occ_organ_indices.remove(organ2ind[\"stomach\"])\n occ_organ_names.remove(\"stomach\")\n if (\n \"cardia\" in keywords\n and \"myocardium\" in occ_organ_names\n and any([item in organ_names for item in [\"stomach\"]])\n ):\n occ_organ_indices.remove(organ2ind[\"myocardium\"])\n occ_organ_names.remove(\"myocardium\")\n abstract[\"occ_organ_indices\"] = occ_organ_indices\n abstract[\"occ_organ_names\"] = occ_organ_names\n\n inds = []\n for ind, abstract in tqdm(enumerate(dset)):\n keywords = abstract[\"keywords\"]\n occ_organ_indices = abstract[\"occ_organ_indices\"]\n occ_organ_names = abstract[\"occ_organ_names\"]\n organ_names = abstract[\"organ_names\"]\n if \"cardiac\" in keywords and \"stomach\" in occ_organ_names:\n inds.append(ind)\n\n \"\"\"SOLVE THE LIVER - DELIVER PROBLEM\"\"\"\n\n for ind, abstract in tqdm(enumerate(dset)):\n keywords = abstract[\"keywords\"]\n occ_organ_indices = abstract[\"occ_organ_indices\"]\n occ_organ_names = abstract[\"occ_organ_names\"]\n organ_names = abstract[\"organ_names\"]\n if (\n any(\n [\n keyword in keywords\n for keyword in [\"delivery\", \"delivered\", \"deliver\", \"delivering\"]\n ]\n )\n and \"liver\" not in organ_names\n ):\n occ_organ_indices.remove(organ2ind[\"liver\"])\n occ_organ_names.remove(\"liver\")\n keywords = [\n keyword\n for keyword in keywords\n if keyword not in [\"delivery\", \"delivered\", \"deliver\", \"delivering\"]\n ]\n abstract[\"occ_organ_indices\"] = occ_organ_indices\n abstract[\"occ_organ_names\"] = occ_organ_names\n abstract[\"keywords\"] = keywords\n\n \"\"\"SOLVE THE COLON - COLONISE PROBLEM\"\"\"\n\n inds = []\n for ind, abstract in tqdm(enumerate(dset)):\n keywords = abstract[\"keywords\"]\n occ_organ_indices = abstract[\"occ_organ_indices\"]\n occ_organ_names = abstract[\"occ_organ_names\"]\n organ_names = abstract[\"organ_names\"]\n if (\n any(\n [\n keyword in keywords\n for keyword in [\n \"colonize\",\n \"colonise\",\n \"colonized\",\n \"colonised\",\n \"colonies\",\n ]\n ]\n )\n and \"colon\" not in organ_names\n ):\n occ_organ_indices.remove(organ2ind[\"colon\"])\n occ_organ_names.remove(\"colon\")\n keywords = [\n keyword\n for keyword in keywords\n if keyword\n not in [\"colonize\", \"colonise\", \"colonized\", \"colonised\", \"colonies\"]\n ]\n abstract[\"occ_organ_indices\"] = occ_organ_indices\n abstract[\"occ_organ_names\"] = occ_organ_names\n abstract[\"keywords\"] = keywords\n\n \"\"\"SOLVE THE BLADDER - GALLBLADDER PROBLEM\"\"\"\n\n \"\"\"Gallbladder doesn't cause the bladder keyword\"\"\"\n \"\"\"Bladder does cause problems\"\"\"\n\n for ind, abstract in tqdm(enumerate(dset)):\n keywords = abstract[\"keywords\"]\n occ_organ_indices = abstract[\"occ_organ_indices\"]\n occ_organ_names = abstract[\"occ_organ_names\"]\n organ_names = abstract[\"organ_names\"]\n if (\n any([keyword in keywords for keyword in [\"bladder\", \"bladders\"]])\n and any(\n [\n keyword in keywords\n for keyword in [\n \"gall\",\n \"gallbladder\",\n \"gall-bladder\",\n \"gallbladders\",\n \"gall-bladders\",\n ]\n ]\n )\n and \"gallbladder\" in organ_names\n ):\n occ_organ_indices.remove(organ2ind[\"urinary bladder\"])\n occ_organ_names.remove(\"urinary bladder\")\n keywords = [\n keyword\n for keyword in keywords\n if keyword not in [\"bladder\", \"bladders\"]\n ]\n abstract[\"occ_organ_indices\"] = occ_organ_indices\n abstract[\"occ_organ_names\"] = occ_organ_names\n abstract[\"keywords\"] = keywords\n\n return dset", "def testRandomWord(self):\n word1 = self.searcher.randomWord()\n word2 = self.searcher.randomWord()\n self.assertTrue(len(word1) > 1, 'Word length too short')\n self.assertTrue(len(word2) > 1, 'Word length too short')\n self.assertNotEqual(word1, word2, 'Found the same word')", "def search_single_word(word):\n # YOUR CODE HERE #\n pass # delete this when you write your code", "def test_queryUnkeywordFlag(self):\n self._keywordFilteringTest(\"unkeyword\")", "def test_keyword(self):\n\n url = '/%s/job-types/?keyword=%s' % (self.api, self.job_type1.name)\n response = self.client.generic('GET', url)\n self.assertEqual(response.status_code, status.HTTP_200_OK, response.content)\n result = json.loads(response.content)\n self.assertEqual(len(result['results']), 1)\n self.assertEqual(result['results'][0]['name'], self.job_type1.name)\n\n url = '/%s/job-types/?keyword=%s' % (self.api, 'job-type')\n response = self.client.generic('GET', url)\n self.assertEqual(response.status_code, status.HTTP_200_OK, response.content)\n result = json.loads(response.content)\n self.assertEqual(len(result['results']), 6)\n\n url = '/%s/job-types/?keyword=%s' % (self.api, 'job-type-for-view-test')\n response = self.client.generic('GET', url)\n self.assertEqual(response.status_code, status.HTTP_200_OK, response.content)\n result = json.loads(response.content)\n self.assertEqual(len(result['results']), 3)\n\n url = '/%s/job-types/?keyword=%s&keyword=%s' % (self.api, 'job-type-for-view-test', self.job_type1.name)\n response = self.client.generic('GET', url)\n self.assertEqual(response.status_code, status.HTTP_200_OK, response.content)\n result = json.loads(response.content)\n self.assertEqual(len(result['results']), 4)", "def search(self, term):", "def test_absorbs_naked_a_search(self):\n invenio_search = \"author:ellis\"\n naked_search = \"a ellis\"\n self._compare_searches(invenio_search, naked_search)", "def test_analyze_a_recipe_search_query(self):\n pass", "def test_keyword(self):\n\n url = '/%s/job-type-names/?keyword=%s' % (self.api, self.job_type1.name)\n response = self.client.generic('GET', url)\n self.assertEqual(response.status_code, status.HTTP_200_OK, response.content)\n\n result = json.loads(response.content)\n self.assertEqual(len(result['results']), 1)\n self.assertEqual(result['results'][0]['name'], self.job_type1.name)\n\n url = '/%s/job-type-names/?keyword=%s' % (self.api, 'job-type')\n response = self.client.generic('GET', url)\n\n self.assertEqual(response.status_code, status.HTTP_200_OK, response.content)\n result = json.loads(response.content)\n self.assertEqual(len(result['results']), 4)\n\n url = '/%s/job-type-names/?keyword=%s' % (self.api, 'job-type-for-view-test')\n response = self.client.generic('GET', url)\n self.assertEqual(response.status_code, status.HTTP_200_OK, response.content)\n result = json.loads(response.content)\n self.assertEqual(len(result['results']), 1)\n self.assertEqual(result['results'][0]['latest_version'], '1.10.0')\n\n url = '/%s/job-type-names/?keyword=%s&keyword=%s' % (self.api, 'job-type-for-view-test', self.job_type1.name)\n response = self.client.generic('GET', url)\n self.assertEqual(response.status_code, status.HTTP_200_OK, response.content)\n result = json.loads(response.content)\n self.assertEqual(len(result['results']), 2)", "def test_find_phrase_matches2(self):\n\t\ttest = sentiment.LibraryRun(self.text2, self.lib)\n\t\tobj_ut = test.find_phrase_matches(self.tokens_generator2)[0]\n\t\tself.assertEqual(dict(obj_ut),\n\t\t\t{'not good': [[2, -1, 0], [4, -1, 0]]})", "def _keyword_search(id_to_text, raw_keywords, modified_keywords):\n\t# The raw keywords and modified keywords should be two paired lists where the elements correspond to one another.\n\t# The modifications done to the keywords should already match the modifications done to the texts in the input dictionary so they can be directly compared.\n\tassert len(raw_keywords) == len(modified_keywords)\n\tid_to_found_keywords = {i:[r_kw for r_kw,m_kw in zip(raw_keywords,modified_keywords) if m_kw in text] for i,text in id_to_text.items()}\n\tid_to_num_found_keywords = {i:len(kw_list) for i,kw_list in id_to_found_keywords.items()}\n\treturn(id_to_found_keywords, id_to_num_found_keywords)", "def test_spires_keyword_distribution_before_conjunctions(self):\n spi_search = 'find journal phys.lett. 0903 024'\n inv_search = '(journal:phys.lett.,0903,024)'\n self._compare_searches(inv_search, spi_search)", "def test_find_word_chess(self, initial, goal, words):\n\n self.assertEqual(len(words),\n len(self.search_function(len(words))(initial,\n goal)))", "def test_post_foods_search(self):\n pass", "def test_find_phrase_matches3(self):\n\t\ttest = sentiment.LibraryRun(self.text3, self.lib)\n\t\tobj_ut = test.find_phrase_matches(self.tokens_generator3)[0]\n\t\tself.assertEqual(dict(obj_ut),\n\t\t\t{'not good': [[2, -1, 0]], 'not very good': [[4, -1, 0]]})", "def test_autocomplete_recipe_search(self):\n pass", "def test_find_phrase_matches1(self):\n\t\ttest = sentiment.LibraryRun(self.text1, self.lib)\n\t\tobj_ut = test.find_phrase_matches(self.tokens_generator1)[0]\n\t\tself.assertEqual(dict(obj_ut),\n\t\t\t{'not good': [[2, -1, 0]]})", "def test_search(self):\n pass", "def test_search(self):\n pass" ]
[ "0.7309068", "0.6687913", "0.6643784", "0.65354556", "0.6508402", "0.6308688", "0.6193006", "0.61645675", "0.61492896", "0.61248577", "0.6112851", "0.6052495", "0.60482764", "0.6041861", "0.60333693", "0.6031148", "0.59902656", "0.59857243", "0.59632605", "0.5960688", "0.5956268", "0.59367776", "0.59219307", "0.59054554", "0.5895159", "0.587564", "0.58744943", "0.58737224", "0.5871058", "0.5871058" ]
0.762991
0
Can create a service with a blank name.
def test_create_service_with_empty_name(self): response = self.tenant_client.create_service( type_=self.type, description=self.description) service = response.entity self.addCleanup(self.tenant_client.delete_service, response.entity.id_) self.assertEqual(response.status_code, 200) self.assertIsNone(service.name) self.assertEqual(service.type_, self.type) self.assertEqual(service.description, self.description)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_service(self, service_name, *args, **kwargs):\n\n creator = self._service_creators.get(service_name, None)\n\n if creator is None:\n return None\n\n return creator(*args, **kwargs)", "def test_creation_when_missing_service_name(self):\n self.data = {\n \"service_name\": \"\",\n \"service_price\": \"5000\",\n \"service_description\": \"See Kendrick perform live at the yard\",\n \"service_category\": \"Music\",\n \"service_subcategory\": \"Live\",\n \"service_attributes\": {\n \"duration\": \"as long \",\n \"width\": \"20\",\n \"length\": \"20\",\n \"height\": \"20\"\n }\n }\n\n create_store = self.client.post(create_store_url, data=json.dumps(self.shop_zero), headers=self.my_header)\n store_id = json.loads(create_store.data)\n store_id = json.loads(store_id['store_id'])\n store_id = store_id['$oid']\n response2 = self.client.post(store_url + store_id + '/service/',\n data=json.dumps(self.data),\n headers=self.my_header)\n self.assertEqual(response2.status, \"400 BAD REQUEST\")\n self.assertIn(\"Error. Missing Service Name.\", str(response2.data))", "def test_001_create_empty(self):\n ret = svcmgr.main(argv=[\"create\", \"-s\", SVCNAME])\n assert ret == 0", "def unrecognised_service(service_name):\n print('Service {} not (yet) supported.'.format(service_name))\n pass", "def check_name(name, allow_services=False):", "def createService(data):\n return Service(data).create()", "def test_ipam_services_create(self):\n pass", "def test_create__requires_service_param(self) -> None:\n with pytest.raises(TypeError) as e:\n RecurringRideFactory.create() # type: ignore\n assert \"required positional argument: 'service'\" in str(e.value)", "def test_cannot_create_with_same_category_and_name(self):\n # Create an initial service\n self.project.services.create(name = \"service1\", category = self.category)\n # Then try to create the same service using the serializer\n serializer = ServiceSerializer(\n data = dict(name = \"service1\", category = self.category.pk),\n context = dict(project = self.project)\n )\n self.assertFalse(serializer.is_valid())\n self.assertEqual(serializer.errors['name'][0].code, 'unique')", "def test_cannot_create_with_invalid_name(self):\n # Test with a blank name\n serializer = ServiceSerializer(\n data = dict(name = \"\", category = self.category.pk),\n context = dict(project = self.project)\n )\n self.assertFalse(serializer.is_valid())\n self.assertEqual(serializer.errors['name'][0].code, 'blank')\n # Test with whitespace\n serializer = ServiceSerializer(\n data = dict(name = \"service 1\", category = self.category.pk),\n context = dict(project = self.project)\n )\n self.assertFalse(serializer.is_valid())\n self.assertEqual(serializer.errors['name'][0].code, 'invalid')\n # Test with capital letters\n serializer = ServiceSerializer(\n data = dict(name = \"SERVICE1\", category = self.category.pk),\n context = dict(project = self.project)\n )\n self.assertFalse(serializer.is_valid())\n self.assertEqual(serializer.errors['name'][0].code, 'invalid')\n # Test with unicode characters\n serializer = ServiceSerializer(\n data = dict(name = \"sèrvíçë1\", category = self.category.pk),\n context = dict(project = self.project)\n )\n self.assertFalse(serializer.is_valid())\n self.assertEqual(serializer.errors['name'][0].code, 'invalid')", "def service_present(\n name, service_type, description=None, profile=None, **connection_args\n):\n ret = {\n \"name\": name,\n \"changes\": {},\n \"result\": True,\n \"comment\": 'Service \"{}\" already exists'.format(name),\n }\n\n # Check if service is already present\n role = __salt__[\"keystone.service_get\"](\n name=name, profile=profile, **connection_args\n )\n\n if \"Error\" not in role:\n return ret\n else:\n if __opts__.get(\"test\"):\n ret[\"result\"] = None\n ret[\"comment\"] = 'Service \"{}\" will be added'.format(name)\n return ret\n # Create service\n __salt__[\"keystone.service_create\"](\n name, service_type, description, profile=profile, **connection_args\n )\n ret[\"comment\"] = 'Service \"{}\" has been added'.format(name)\n ret[\"changes\"][\"Service\"] = \"Created\"\n\n return ret", "def create_service_entry(service_name, service_type, service_desc, owner=None):\n manager = get_manager()\n for service in [s._info for s in manager.api.services.list()]:\n if service['name'] == service_name:\n log(\"Service entry for '%s' already exists.\" % service_name,\n level=DEBUG)\n return\n\n manager.api.services.create(service_name,\n service_type,\n description=service_desc)\n log(\"Created new service entry '%s'\" % service_name, level=DEBUG)", "def test_creation_when_service_name_has_invalid_characters(self):\n self.data = {\n \"service_name\": \"@#$%^&*((\",\n \"service_price\": \"5000\",\n \"service_description\": \"See Kendrick perform live at the yard\",\n \"service_category\": \"Music\",\n \"service_subcategory\": \"Live\",\n \"service_attributes\": {\n \"duration\": \"as long \",\n \"width\": \"20\",\n \"length\": \"20\",\n \"height\": \"20\"\n }\n }\n\n create_store = self.client.post(create_store_url, data=json.dumps(self.shop_zero), headers=self.my_header)\n store_id = json.loads(create_store.data)\n store_id = json.loads(store_id['store_id'])\n store_id = store_id['$oid']\n response2 = self.client.post(store_url + store_id + '/service/',\n data=json.dumps(self.data),\n headers=self.my_header)\n self.assertEqual(response2.status, \"400 BAD REQUEST\")\n self.assertIn(\"Error. Service Name Has Invalid Characters.\", str(response2.data))", "def test_build__requires_name_param(self) -> None:\n with pytest.raises(TypeError) as e:\n Service() # type: ignore\n assert \"required positional argument: 'name'\" in str(e.value)", "def service_create(path, service_name, definition):\n compose_result, loaded_definition, err = __load_compose_definitions(\n path, definition\n )\n if err:\n return err\n services = compose_result[\"compose_content\"][\"services\"]\n if service_name in services:\n msg = \"Service {} already exists\".format(service_name)\n return __standardize_result(False, msg, None, None)\n services[service_name] = loaded_definition\n return __dump_compose_file(\n path,\n compose_result,\n \"Service {} created\".format(service_name),\n already_existed=True,\n )", "def test_creation_when_service_attributes_name_is_empty(self):\n self.data = {\n \"service_name\": \"Live at the yard\",\n \"service_price\": \"5000\",\n \"service_description\": \"See Kendrick perform live at the yard\",\n \"service_category\": \"Music\",\n \"service_subcategory\": \"Live\",\n \"service_attributes\": {\n \"\": \"as long \",\n \"width\": \"20\",\n \"length\": \"20\",\n \"height\": \"20\"\n }\n }\n\n create_store = self.client.post(create_store_url, data=json.dumps(self.shop_zero), headers=self.my_header)\n store_id = json.loads(create_store.data)\n store_id = json.loads(store_id['store_id'])\n store_id = store_id['$oid']\n response2 = self.client.post(store_url + store_id + '/service/',\n data=json.dumps(self.data),\n headers=self.my_header)\n self.assertEqual(response2.status, \"400 BAD REQUEST\")\n self.assertIn(\"attribute name cannot be empty.\", str(response2.data))", "def create_service(self, service_id, service_ref):\n raise exception.NotImplemented() # pragma: no cover", "def test_create_missing_service_type(self):\n self.mockTicketGetTicket()\n maint_params = {\n 'master_ticket' : '080102-00121',\n 'description' : 'do stuff',\n 'expedite' : False,\n #'billing_text' : 'send me the bill',\n 'additional_duration_minutes': '60',\n #'service_type_id' : 1,\n 'employee_contact_id' : 1\n }\n response = self.app.post(url_for(controller='/maintenances', action='create'), params=maint_params, status=400)\n self.assertEqual(response.status, 400)", "def service_create(service, service_type, api, endpoint):\n db = model.Session()\n _assert_absent(db, model.Service, service)\n api = _must_find(db, model.API, api)\n service = model.Service(service, service_type, api, endpoint)\n db.add(service)\n db.commit()", "def test_build__valid_input(self, valid_service: fixture) -> None:\n service: Service = valid_service\n\n assert service.name == 'Testing Service'", "def test_create_service_ticket(self):\n st = ServiceTicketFactory()\n self.assertTrue(st.ticket.startswith(st.TICKET_PREFIX))", "def test_add_virtual_service(self):\n pass", "def test_create_confirm_service_details(self):\n pass", "def test_creation_when_missing_service_category(self):\n self.data = {\n \"service_name\": \"Live at the yard\",\n \"service_price\": \"5000\",\n \"service_description\": \"See Kendrick perform live at the yard\",\n \"service_category\": \"\",\n \"service_subcategory\": \"Live\",\n \"service_attributes\": {\n \"duration\": \"as long \",\n \"width\": \"20\",\n \"length\": \"20\",\n \"height\": \"20\"\n }\n }\n\n create_store = self.client.post(create_store_url, data=json.dumps(self.shop_zero), headers=self.my_header)\n store_id = json.loads(create_store.data)\n store_id = json.loads(store_id['store_id'])\n store_id = store_id['$oid']\n response2 = self.client.post(store_url + store_id + '/service/',\n data=json.dumps(self.data),\n headers=self.my_header)\n self.assertEqual(response2.status, \"400 BAD REQUEST\")\n self.assertIn(\"Error. Missing Service Category\", str(response2.data))", "def test_creation_of_service_in_store_that_does_not_exist(self):\n response2 = self.client.post(store_url + '5a2bc733791e4bbc9a26f7a5/service/',\n data=json.dumps(self.service_zero),\n headers=self.my_header)\n self.assertEqual(response2.status, \"404 NOT FOUND\")\n self.assertIn(\"That Store does not exist.\", str(response2.data))", "def create_service(cls, proto_py_module, service_name):\n\n return cls.create_services(proto_py_module, service_name)", "def test_create_service(mock_send_message):\n event = {}\n OranDmaap.create_service(event)\n mock_send_message.assert_called_once_with('POST',\n 'Create Service via Dmaap',\n (f\"{BASE_URL}/events/A1-POLICY-AGENT-READ/\"),\n data=event,\n headers=HEADER)", "def test_creation_when_missing_service_description(self):\n\n self.data = {\n \"service_name\": \"Live at the yard\",\n \"service_price\": \"5000\",\n \"service_description\": \"\",\n \"service_category\": \"Music\",\n \"service_subcategory\": \"Live\",\n \"service_attributes\": {\n \"duration\": \"as long \",\n \"width\": \"20\",\n \"length\": \"20\",\n \"height\": \"20\"\n }\n }\n\n create_store = self.client.post(create_store_url, data=json.dumps(self.shop_zero), headers=self.my_header)\n store_id = json.loads(create_store.data)\n store_id = json.loads(store_id['store_id'])\n store_id = store_id['$oid']\n response2 = self.client.post(store_url + store_id + '/service/',\n data=json.dumps(self.data),\n headers=self.my_header)\n self.assertEqual(response2.status, \"400 BAD REQUEST\")\n self.assertIn(\"Error. Missing Service Description.\", str(response2.data))", "def test_invalid_json_create_service(self):\n # create a payload with invalid json blob\n attack_string = self.create_invalid_json(2500)\n kwargs = {\"data\": attack_string}\n print kwargs\n resp = self.client.create_service(service_name=self.service_name,\n domain_list=self.domain_list,\n origin_list=self.origin_list,\n caching_list=self.caching_list,\n flavor_id=self.flavor_id,\n requestslib_kwargs=kwargs)\n if 'location' in resp.headers:\n self.service_url = resp.headers['location']\n else:\n self.service_url = ''\n \n self.assertTrue(resp.status_code < 503)", "def test_otoroshi_controllers_adminapi_tcp_service_api_controller_create_action(self):\n pass" ]
[ "0.68884873", "0.6859313", "0.6785968", "0.65416306", "0.6514902", "0.6495302", "0.6493249", "0.6388759", "0.63853484", "0.6314513", "0.6299523", "0.6253874", "0.62526363", "0.62521815", "0.62285614", "0.6164396", "0.61389124", "0.6129556", "0.6096557", "0.60756034", "0.6054856", "0.60394007", "0.599658", "0.59861606", "0.5962093", "0.5957862", "0.58878994", "0.58812654", "0.5840095", "0.5817088" ]
0.79796934
0
Can create two different services with the same name, type and description.
def test_create_service_with_duplicate_data(self): first_response = self.tenant_client.create_service( name=self.name, type_=self.type, description=self.description) first_service = first_response.entity self.assertEqual(first_response.status_code, 200) self.addCleanup(self.tenant_client.delete_service, first_service.id_) second_response = self.tenant_client.create_service( name=self.name, type_=self.type, description=self.description) second_service = second_response.entity self.assertEqual(second_response.status_code, 200) self.addCleanup(self.tenant_client.delete_service, second_service.id_) self.assertNotEqual(first_service, second_service) self.assertEqual(second_service.name, first_service.name) self.assertEqual(second_service.type_, first_service.type_) self.assertEqual(first_service.description, second_service.description)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_ipam_services_create(self):\n pass", "def test_create_services_with_tag(self):\n tag1 = sample_tag(user=self.user, name='Electrical')\n tag2 = sample_tag(user=self.user, name='Distribution')\n\n payload = {\n 'title' : 'Fitting Job',\n 'tags' : [tag1.id, tag2.id],\n 'price' : 100.00\n }\n\n res = self.client.post(SERVICES_URL, payload)\n\n self.assertEqual(res.status_code, status.HTTP_201_CREATED)\n services = Service.objects.get(id=res.data['id'])\n tags = services.tags.all()\n self.assertEqual(tags.count(), 2)\n self.assertIn(tag1, tags)\n self.assertIn(tag2, tags)", "def test_create_service_with_empty_name(self):\n response = self.tenant_client.create_service(\n type_=self.type,\n description=self.description)\n service = response.entity\n self.addCleanup(self.tenant_client.delete_service, response.entity.id_)\n self.assertEqual(response.status_code, 200)\n self.assertIsNone(service.name)\n self.assertEqual(service.type_, self.type)\n self.assertEqual(service.description, self.description)", "def test_cannot_create_with_same_category_and_name(self):\n # Create an initial service\n self.project.services.create(name = \"service1\", category = self.category)\n # Then try to create the same service using the serializer\n serializer = ServiceSerializer(\n data = dict(name = \"service1\", category = self.category.pk),\n context = dict(project = self.project)\n )\n self.assertFalse(serializer.is_valid())\n self.assertEqual(serializer.errors['name'][0].code, 'unique')", "def add_service_endpoint(key, name, description, type, url, region):\n service_names = {service.name: service.id for service in key.services.list()}\n if name in service_names.keys():\n service_id = service_names[name]\n else:\n service = key.services.create(name=name, service_type=type, description=description)\n print(\"Created service '{}' of type '{}'\".format(name, type))\n service_id = service.id\n\n for endpoint in key.endpoints.list():\n if endpoint.service_id == service_id:\n if endpoint.publicurl == url and endpoint.adminurl == url and endpoint.internalurl == url:\n return True\n else:\n key.endpoints.delete(endpoint.id)\n\n key.endpoints.create(region=region, service_id=service_id, publicurl=url, adminurl=url, internalurl=url)\n print(\"Added service endpoint '{}' at '{}'\".format(name, url))\n return True", "def services(**kwargs):\n pass", "def test_add_virtual_service(self):\n pass", "def test_create_confirm_service_details(self):\n pass", "def test_creation_of_duplicate_service_in_store(self):\n create_store = self.client.post(create_store_url, data=json.dumps(self.shop_zero), headers=self.my_header)\n store_id = json.loads(create_store.data)\n store_id = json.loads(store_id['store_id'])\n store_id = store_id['$oid']\n response2 = self.client.post(store_url + store_id + '/service/',\n data=json.dumps(self.service_zero),\n headers=self.my_header)\n response3 = self.client.post(store_url + store_id + '/service/',\n data=json.dumps(self.service_zero),\n headers=self.my_header)\n self.assertEqual(response3.status, \"409 CONFLICT\")\n self.assertIn(\"Sorry. Live at the yard already exists in this store.\", str(response3.data))", "def add(name, other, send_events=True, allow_services=False):", "def service_create(service, service_type, api, endpoint):\n db = model.Session()\n _assert_absent(db, model.Service, service)\n api = _must_find(db, model.API, api)\n service = model.Service(service, service_type, api, endpoint)\n db.add(service)\n db.commit()", "def service_present(\n name, service_type, description=None, profile=None, **connection_args\n):\n ret = {\n \"name\": name,\n \"changes\": {},\n \"result\": True,\n \"comment\": 'Service \"{}\" already exists'.format(name),\n }\n\n # Check if service is already present\n role = __salt__[\"keystone.service_get\"](\n name=name, profile=profile, **connection_args\n )\n\n if \"Error\" not in role:\n return ret\n else:\n if __opts__.get(\"test\"):\n ret[\"result\"] = None\n ret[\"comment\"] = 'Service \"{}\" will be added'.format(name)\n return ret\n # Create service\n __salt__[\"keystone.service_create\"](\n name, service_type, description, profile=profile, **connection_args\n )\n ret[\"comment\"] = 'Service \"{}\" has been added'.format(name)\n ret[\"changes\"][\"Service\"] = \"Created\"\n\n return ret", "def test_create_services_with_components(self):\n\n component1 = sample_componenets(user=self.user, name='switch')\n component2 = sample_componenets(user=self.user, name='switchboard')\n\n payload = {\n 'title' : 'Fitting Job',\n 'components' : [component1.id, component2.id],\n 'price' : 100.00\n }\n\n res =self.client.post(SERVICES_URL, payload)\n self.assertEqual(res.status_code, status.HTTP_201_CREATED)\n services = Service.objects.get(id=res.data['id'])\n components = Service.components.all()\n self.assertEqual(components.count(), 2)\n self.assertIn(component1, components)\n self.assertIn(component2, components)", "def _add_services(self):\n # Services and relations which are present merely to satisfy\n # required_interfaces and workload status are not inspected.\n # Fix me. Inspect those too.\n this_service = {'name': 'neutron-openvswitch'}\n other_services = [\n {'name': 'nova-compute'},\n {'name': 'nova-cloud-controller'},\n {'name': 'rabbitmq-server'},\n {'name': 'keystone'},\n {'name': 'glance'},\n {'name': 'neutron-api'},\n self.get_percona_service_entry(),\n ]\n if self._get_openstack_release() >= self.bionic_train:\n other_services.append({'name': 'placement'})\n super(NeutronOVSBasicDeployment, self)._add_services(this_service,\n other_services)", "def create_service(self, url_data):\n data = {key: value[0] for key, value in url_data}\n\n publish_key = uuid.uuid4().hex\n service_id = uuid.uuid4().hex\n service_name = data['name']\n\n self.fastly_cache[service_name] = {\n 'service_details': {\n u'comment': '',\n u'locked': False,\n u'updated_at': u'2014-11-13T14:29:10+00:00',\n u'created_at': u'2014-11-13T14:29:10+00:00',\n u'testing': None,\n u'number': 1,\n u'staging': None,\n u'active': None,\n u'service_id': service_id,\n u'deleted_at': None,\n u'inherit_service_id': None,\n u'deployed': None},\n 'service_name': service_name\n }\n self.fastly_cache[service_id] = self.fastly_cache[service_name]\n\n create_service = {\n u'comment': '',\n u'publish_key': publish_key,\n u'name': service_name,\n u'versions': [{u'comment': '', u'locked': u'0',\n u'service': service_id,\n u'updated_at': u'2014-11-12T18:43:21',\n u'created_at': u'2014-11-12T18:43:21',\n u'testing': None, u'number': u'1',\n u'staging': None,\n u'active': None,\n u'service_id': service_id,\n u'deleted_at': None,\n u'inherit_service_id': None,\n u'deployed': None,\n u'backend': 0}],\n u'created_at': u'2014-11-12T18:43:21+00:00',\n u'updated_at': u'2014-11-12T18:43:21+00:00',\n u'customer_id': data['customer_id'],\n u'id': service_id}\n return create_service", "def test_create_different_devices(self):\n command_line = self._MENU + [self._POOLNAME] + _DEVICE_STRATEGY()\n self.check_error(StratisCliNameConflictError, command_line, _ERROR)", "def definition_of_services(self):\r\n return True", "def _add_services(self):\n this_service = {'name': 'swift-proxy'}\n other_services = [\n {'name': 'percona-cluster'},\n {'name': 'keystone'},\n {'name': 'glance'},\n {'name': 'swift-storage'}\n ]\n super(SwiftProxyBasicDeployment, self)._add_services(this_service,\n other_services)", "def new_t1_vpn_service(**kwargs):\n proxy = kwargs['proxy']\n session_token = kwargs['sessiontoken']\n display_name = kwargs['display_name']\n t1g = kwargs['tier1_gateway']\n service = kwargs['service_type']\n\n if service == 'ipsec':\n json_data = {\n \"resource_type\": \"IPSecVpnService\",\n \"display_name\": display_name,\n \"id\": display_name,\n \"enabled\": True\n }\n json_response_status_code = new_t1_ipsec_vpn_service_json(proxy, session_token, json_data, display_name, t1g)\n if json_response_status_code == 200:\n sys.exit(f'T1 IPSec VPN service {display_name} has been created successfully.')\n else:\n print('There was an error')\n sys.exit(1)\n elif service == 'l2vpn':\n json_data = {\n \"resource_type\": \"L2VPNService\",\n \"display_name\": display_name,\n \"id\": display_name\n }\n json_response_status_code = new_t1_l2vpn_service_json(proxy, session_token, json_data, display_name, t1g)\n if json_response_status_code == 200:\n sys.exit(f'T1 L2VPN service {display_name} has been created successfully.')\n else:\n print('There was an error')\n sys.exit(1)\n else:\n print(f'The supplied service is not correct. Please either provide \"ipsec\" or \"l2vpn\" as your option')\n sys.exit(1)", "def test_create_same_devices(self):\n command_line = self._MENU + [self._POOLNAME] + self.devices\n self.check_error(StratisCliNameConflictError, command_line, _ERROR)", "def add_service(project, env_spec_name, service_type, variable_name=None):\n failed = _check_problems(project)\n if failed is not None:\n return failed\n\n known_types = project.plugin_registry.list_service_types()\n found = None\n for known in known_types:\n if known.name == service_type:\n found = known\n break\n\n if found is None:\n return SimpleStatus(success=False,\n description=\"Unable to add service.\",\n errors=[\n \"Unknown service type '%s', we know about: %s\" %\n (service_type, \", \".join(map(lambda s: s.name, known_types)))\n ])\n\n if variable_name is None:\n variable_name = found.default_variable\n\n assert len(known_types) == 1 # when this fails, see change needed in the loop below\n\n requirement_already_exists = False\n existing_requirements = project.find_requirements(project.default_env_spec_name, env_var=variable_name)\n if len(existing_requirements) > 0:\n requirement = existing_requirements[0]\n if isinstance(requirement, ServiceRequirement):\n assert requirement.service_type == service_type\n # when the above assertion fails, add the second known type besides\n # redis in test_project_ops.py::test_add_service_already_exists_with_different_type\n # and then uncomment the below code.\n # if requirement.service_type != service_type:\n # return SimpleStatus(success=False, description=\"Unable to add service.\",\n # errors=[\"Service %s already exists but with type '%s'\" %\n # (variable_name, requirement.service_type)])\n # else:\n requirement_already_exists = True\n else:\n return SimpleStatus(success=False,\n description=\"Unable to add service.\",\n errors=[\"Variable %s is already in use.\" % variable_name])\n\n if not requirement_already_exists:\n project.project_file.set_value(_path_to_service(env_spec_name, variable_name), service_type)\n\n return _commit_requirement_if_it_works(project, variable_name, env_spec_name=env_spec_name)", "def _add_services(self):\n this_service = {'name': 'keystone'}\n other_services = [\n {'name': 'percona-cluster', 'constraints': {'mem': '3072M'}},\n {'name': 'rabbitmq-server'}, # satisfy wrkload stat\n {'name': 'cinder'},\n ]\n super(KeystoneBasicDeployment, self)._add_services(this_service,\n other_services)", "def create_service_entry(service_name, service_type, service_desc, owner=None):\n manager = get_manager()\n for service in [s._info for s in manager.api.services.list()]:\n if service['name'] == service_name:\n log(\"Service entry for '%s' already exists.\" % service_name,\n level=DEBUG)\n return\n\n manager.api.services.create(service_name,\n service_type,\n description=service_desc)\n log(\"Created new service entry '%s'\" % service_name, level=DEBUG)", "def create_TestService(test_case, # type: AnyMagpieTestCaseType\n override_service_name=null, # type: Optional[Str]\n override_service_type=null, # type: Optional[Str]\n override_headers=null, # type: Optional[HeadersType]\n override_cookies=null, # type: Optional[CookiesType]\n ): # type: (...) -> JSON\n app_or_url = get_app_or_url(test_case)\n svc_name = override_service_name if override_service_name is not null else test_case.test_service_name\n svc_type = override_service_type if override_service_type is not null else test_case.test_service_type\n data = {\n \"service_name\": svc_name,\n \"service_type\": svc_type,\n \"service_url\": \"http://localhost:9000/{}\".format(svc_name)\n }\n if svc_name:\n test_case.extra_service_names.add(svc_name) # indicate potential removal at a later point\n resp = test_request(app_or_url, \"POST\", \"/services\", json=data,\n headers=override_headers if override_headers is not null else test_case.json_headers,\n cookies=override_cookies if override_cookies is not null else test_case.cookies,\n expect_errors=True)\n if resp.status_code == 409:\n path = \"/services/{svc}\".format(svc=svc_name)\n resp = test_request(app_or_url, \"GET\", path,\n headers=override_headers if override_headers is not null else test_case.json_headers,\n cookies=override_cookies if override_cookies is not null else test_case.cookies)\n body = check_response_basic_info(resp, 200, expected_method=\"GET\")\n if TestVersion(test_case.version) < TestVersion(\"0.9.1\"):\n body.update({\"service\": body[svc_name]})\n body.pop(svc_name)\n return body\n return check_response_basic_info(resp, 201, expected_method=\"POST\")", "def test_creation_when_missing_service_name(self):\n self.data = {\n \"service_name\": \"\",\n \"service_price\": \"5000\",\n \"service_description\": \"See Kendrick perform live at the yard\",\n \"service_category\": \"Music\",\n \"service_subcategory\": \"Live\",\n \"service_attributes\": {\n \"duration\": \"as long \",\n \"width\": \"20\",\n \"length\": \"20\",\n \"height\": \"20\"\n }\n }\n\n create_store = self.client.post(create_store_url, data=json.dumps(self.shop_zero), headers=self.my_header)\n store_id = json.loads(create_store.data)\n store_id = json.loads(store_id['store_id'])\n store_id = store_id['$oid']\n response2 = self.client.post(store_url + store_id + '/service/',\n data=json.dumps(self.data),\n headers=self.my_header)\n self.assertEqual(response2.status, \"400 BAD REQUEST\")\n self.assertIn(\"Error. Missing Service Name.\", str(response2.data))", "def sample_services(user, **params):\n defaults = {\n 'title' : 'Sample services',\n 'price' : 5.00\n\n }\n defaults.update(params)\n\n return Service.objects.create(user=user, **defaults)", "def add_services(self):\n # first get the names\n names = str(self.client.console_execute('services -c name {0}\\n'.format(self.ip))[b'data'])\n while not 'name' in names:\n sleep(10)\n names = self.client.console_read()\n names = names.split('\\n')\n for row in names:\n if self.ip in row:\n row = strip_whitespaces(row)\n self.services.append({'name': row.split(' ')[1]})\n\n # get the ports by service name\n ports = str(self.client.console_execute('services -c port {0}\\n'.format(self.ip))[b'data'])\n while not 'port' in ports:\n sleep(10)\n ports = self.client.console_read()\n ports = ports.split('\\n')\n for row in ports:\n for service in self.services:\n if service['name'] in row:\n row = strip_whitespaces(row)\n service['port'] = row.split(' ')[1]\n\n # get some information by service name (only useful if a report shall be generated)\n info = str(self.client.console_execute('services -c info {0}\\n'.format(self.ip))[b'data'])\n while not 'info' in info:\n sleep(10)\n info = self.client.console_read()\n info = info.split('\\n')\n for row in info:\n for service in self.services:\n if service['name'] in row:\n row = strip_whitespaces(row)\n service['info'] = row.split(' ')[1]", "def createService(data):\n return Service(data).create()", "def test_creation_when_missing_service_description(self):\n\n self.data = {\n \"service_name\": \"Live at the yard\",\n \"service_price\": \"5000\",\n \"service_description\": \"\",\n \"service_category\": \"Music\",\n \"service_subcategory\": \"Live\",\n \"service_attributes\": {\n \"duration\": \"as long \",\n \"width\": \"20\",\n \"length\": \"20\",\n \"height\": \"20\"\n }\n }\n\n create_store = self.client.post(create_store_url, data=json.dumps(self.shop_zero), headers=self.my_header)\n store_id = json.loads(create_store.data)\n store_id = json.loads(store_id['store_id'])\n store_id = store_id['$oid']\n response2 = self.client.post(store_url + store_id + '/service/',\n data=json.dumps(self.data),\n headers=self.my_header)\n self.assertEqual(response2.status, \"400 BAD REQUEST\")\n self.assertIn(\"Error. Missing Service Description.\", str(response2.data))", "def service_create(path, service_name, definition):\n compose_result, loaded_definition, err = __load_compose_definitions(\n path, definition\n )\n if err:\n return err\n services = compose_result[\"compose_content\"][\"services\"]\n if service_name in services:\n msg = \"Service {} already exists\".format(service_name)\n return __standardize_result(False, msg, None, None)\n services[service_name] = loaded_definition\n return __dump_compose_file(\n path,\n compose_result,\n \"Service {} created\".format(service_name),\n already_existed=True,\n )" ]
[ "0.6295653", "0.6286004", "0.61355627", "0.6034166", "0.5985182", "0.5976208", "0.5959228", "0.58849037", "0.58821464", "0.5851764", "0.58444655", "0.5827681", "0.58261716", "0.5814517", "0.580832", "0.5779143", "0.5774439", "0.5768419", "0.57149935", "0.5706826", "0.56977415", "0.5695985", "0.56848216", "0.5659256", "0.5656654", "0.56441736", "0.56440604", "0.56230146", "0.5601895", "0.5560355" ]
0.72636217
0
Initialise all the values that is going to be used in the program Create files by the given url and download the given url Then call pywget_inside_crawler() function
def initialise(url, depth): dir_string = url[url.find('/')+2 : url.rfind('/')+1] # the directory name that is going to be created format of .../.../.../ dir_string_list = dir_string.split('/') root_dir_name = dir_string_list[0] # the root directory's name. useful to check collisions filename = url[url.rfind('/')+1:] root_dir_name = handle_collision("dir", root_dir_name, root_dir_name, '') # handle directory name collisions start_dir = os.getcwd() # the location of this py file start_file = os.path.splitext(filename)[0] # the first file that is downloaded. useful to avoid cycles dir_string_list[0] = root_dir_name dir_string = '/'.join(dir_string_list) # change the directory names if collision happened os.makedirs(dir_string, exist_ok=True) os.chdir(dir_string) urllib.request.urlretrieve(url, filename) pywget_inside_crawler(url, depth, start_dir, start_file, root_dir_name) # start crawlling and recursion
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def pywget_inside_crawler(url):\n\n # open and read the url\n content = ''\n try:\n request = urllib.request.urlopen(url)\n content = request.read().decode(\"utf-8\")\n except:\n pass\n\n # find all contents we need which are links and srcs using regex\n match = re.findall(r'<a href=\"(.*?)\"', content) + \\\n re.findall(r'<img src=\"(.*?)\"', content) + \\\n re.findall(r'<a href = \"(.*?)\"', content) + \\\n re.findall(r'<img src = \"(.*?)\"', content)\n\n domain_name = url[0 : url.rfind('/')]\n\n all_item_list = []\n\n # if it's an absolute link, add it to all_item_list\n # if it's a relative link, add prefix in the front and add it to the list\n if match:\n for item in match:\n if item.startswith(\"http://\") or item.startswith(\"https://\") or item.startswith(\"//\"):\n if item.startswith(domain_name):\n all_item_list.append(item)\n else:\n all_item_list.append(domain_name + \"/\" + item)\n\n # apply pywget_download_inside\n for item in all_item_list:\n pywget(item, first_time=False)", "def pywget_inside_crawler(url, depth, start_dir, start_file, root_dir_name):\n depth -= 1\n\n content = ''\n try:\n request = urllib.request.urlopen(url)\n content = request.read().decode(\"utf-8\")\n except:\n pass\n\n # all the information that's inside <a href> and <img src> tags\n match = re.findall(r'<a href=\"(.*?)\"', content) + \\\n re.findall(r'<a href = \"(.*?)\"', content) + \\\n re.findall(r'<img src=\"(.*?)\"', content) + \\\n re.findall(r'<img src = \"(.*?)\"', content)\n\n prefix = url[0 : url.rfind('/')] # a prefix of the link. useful to check if a link is under the same domain\n\n all_item_list = add_item_to_list(match, prefix) # add information to a list\n\n for item in all_item_list:\n pywget_recursive(item, depth, start_dir, start_file, root_dir_name) # recursively download the information", "def __init__(self, url, epRange):\n self.driver = webdriver.PhantomJS()\n self.downloads = OrderedDict() # sort episodes in asending order\n self.pbar = \"\" # Download Progressbar\n self.Main(url, epRange)", "def main():\n # the url for african daily and global daily\n african_dialy_url = \"https://data.chc.ucsb.edu/products/CHIRPS-2.0/africa_daily/tifs/p25/\"\n global_daily_url = \"https://data.chc.ucsb.edu/products/CHIRPS-2.0/global_daily/tifs/p25/\"\n\n\n each_year_list = GetRasterYears(url=african_dialy_url)\n new_path = makenewdir(each_year_list)\n years_new_list = fecthrasterurl(url=african_dialy_url)\n downloadwithwget(each_year_list, years_new_list, new_path)", "def __init__(self, main_url, year, folder):\n\n # logger setting\n logging.basicConfig(\n filename='crawler.log',\n level=logging.INFO,\n format='[%(asctime)s] {%(pathname)s:%(lineno)d} %(levelname)s'\n '- %(message)s',\n datefmt='%H:%M:%S'\n )\n\n # set up logging to console\n console = logging.StreamHandler()\n console.setLevel(logging.DEBUG)\n # set a format which is simpler for console use\n formatter = logging.Formatter('%(name)-12s: %(levelname)-8s '\n '%(message)s')\n console.setFormatter(formatter)\n # add the handler to the root logger\n logging.getLogger('crawler_app').addHandler(console)\n\n self.logger = logging.getLogger('crawler_app')\n\n # configuration / init\n\n self.shelve_obj = None\n self.maven_url = main_url\n self.year = year\n self.file_ext = '.txt'\n self.counter = 0\n self.url_to_parse = list()\n self.list_year_month_url = list()\n self.folder = 'mailbox/'\n self.meta_file_name = self.folder + 'meta.shelve'\n self.process_folder(folder)\n\n list_url = self.parse_main_page()\n msg_year_month = self.parse_year_month_link()\n self.parse_raw_msg()", "def main():\n download_insert_title_basics()\n download_insert_title_principals()\n download_insert_name_basics()\n download_insert_title_ratings()\n scrap_keywords()\n create_and_insert_soup()\n return", "def pywget(url, first_time=True):\n if not isinstance(url, str):\n print(\"Error: url is not a string\")\n return None\n\n filename = url[url.rfind('/')+1:]\n extension = os.path.splitext(filename)[1][1:].strip().lower()\n name_without_extension = os.path.splitext(filename)[0]\n\n # handle name collision\n i = 1;\n while os.path.isfile(filename):\n filename = name_without_extension + '.' + str(i) + '.' + extension\n i += 1\n\n try:\n urllib.request.urlretrieve(url, filename)\n if first_time:\n pywget_inside_crawler(url)\n except:\n pass", "def wget_content(url):\n\n try:\n\n for i in range(len(url)):\n url[i].replace(' ', \"%20\") if i > url.find('?') else url[i]\n\n with TemporaryDirectory() as dirname:\n retval = ''\n retcode = subprocess.Popen([\"wget\", \"--tries=5\", '--timeout=10', url, \"-O\", os.path.join(dirname, \"1.txt\")])\n retcode.wait()\n file_name = os.path.join(dirname, \"1.txt\")\n handle = open(file_name)\n if handle:\n retval = handle.read()\n\n\n except Exception as ex:\n if url.startswith(\"https://\") and \"handshake failure\" in retval:\n return wget_content(url.replace(\"https://\", \"http://\"))\n else:\n wxpush(\"Crawler module failure\", traceback.extract_stack(), True)\n\n return retval or \"\"", "def run_downloader(self):\n \"\"\"calls to the file downloader\"\"\"\n try:\n html = self.get_page(self.url)\n soup = self.get_soup(html)\n if soup is not None: # If we have soup -\n self.get_links(soup)\n self.get_files()\n else:\n self.producer(\"THESS_ENV_CITYOFTHESS_DAILY_YEARLY_DATA_ERROR\", 'data source format is not as expected',\n e)\n return False\n except Exception as e:\n self.producer(\"THESS_ENV_CITYOFTHESS_DAILY_YEARLY_DATA_ERROR\", 'data source format is not as expected', e)\n\n return False\n return True", "def __init__(self, url_download, dependency, source, cwd):\n self.url_download = url_download\n self.dependency = dependency\n self.source = source\n self.cwd = cwd", "def download_and_prepare(self):\n self._download_and_prepare()", "def fetch_pwc():\n for url in [\n \"https://production-media.paperswithcode.com/about/papers-with-abstracts.json.gz\",\n \"https://production-media.paperswithcode.com/about/links-between-papers-and-code.json.gz\",\n \"https://production-media.paperswithcode.com/about/evaluation-tables.json.gz\",\n \"https://production-media.paperswithcode.com/about/methods.json.gz\",\n \"https://production-media.paperswithcode.com/about/datasets.json.gz\",\n ]:\n logging.info(f\"Fetching and saving url {url}\")\n fetch_save(url)", "def rxnorm_crawler():\n # Target webpage\n weburls=[\n 'https://www.nlm.nih.gov/research/umls/rxnorm/docs/rxnormfiles.html',\n 'https://www.nlm.nih.gov/research/umls/rxnorm/docs/rxnormarchive.html'\n ]\n for weburl in weburls:\n # Get contents of webpage\n conn = urllib2.urlopen(weburl)\n html = conn.read()\n # Find urls of all RxNorm files\n pattern = '<a\\s*href=[\\'|\"](.*?/kss/rxnorm/RxNorm_full_\\d+.zip)[\\'|\"]>'\n rxnorm_urls = re.findall(pattern, html)\n for url in rxnorm_urls:\n r = requests.get(url)\n if r.status_code == 200:\n #upload the file\n file_name = re.findall('.*?(\\d+.zip)', url)[0]\n k = Key(bucket)\n k.key = 'rxnorm/' + file_name\n k.content_type = r.headers['content-type']\n k.set_contents_from_string(r.content)\n # Need to add cookies information", "def scrape_main() -> None:\n\n logger.info(\"Starting scrape\")\n search_info = construct_scrape_regex_patterns(grab_scrape_info())\n links = run_scrape(\n url=search_info['url'],\n seasons_regex=search_info['seasons'],\n episodes_regex=search_info['episodes']\n )\n if links:\n logger.debug(\"Writing urls to file\")\n with open('urls.txt', 'w') as f:\n for link in links:\n f.write(link + '\\n')\n else:\n logger.warning(\"No links available\")", "def download_images(main_keyword, supplemented_keywords, download_dir): \n image_links = set()\n print('Process {0} Main keyword: {1}'.format(os.getpid(), main_keyword))\n\n # create a directory for a main keyword\n img_dir = download_dir + main_keyword + '/'\n if not os.path.exists(img_dir):\n os.makedirs(img_dir)\n\n for j in range(len(supplemented_keywords)):\n print('Process {0} supplemented keyword: {1}'.format(os.getpid(), supplemented_keywords[j]))\n search_query = quote(main_keyword + ' ' + supplemented_keywords[j])\n # url = 'https://www.google.com/search?q=' + search_query + '&espv=2&biw=1366&bih=667&site=webhp&source=lnms&tbm=isch&sa=X&ei=XosDVaCXD8TasATItgE&ved=0CAcQ_AUoAg'\n url = 'https://www.google.com/search?q=' + search_query + '&source=lnms&tbm=isch'\n image_links = image_links.union(parse_page(url))\n print('Process {0} get {1} links so far'.format(os.getpid(), len(image_links)))\n time.sleep(2)\n print (\"Process {0} get totally {1} links\".format(os.getpid(), len(image_links)))\n\n print (\"Start downloading...\")\n count = 1\n for link in image_links:\n try:\n req = urllib.request.Request(link, headers = {\"User-Agent\": generate_user_agent()})\n response = urllib.request.urlopen(req)\n data = response.read()\n file_path = img_dir + '{0}.jpg'.format(count)\n with open(file_path,'wb') as wf:\n wf.write(data)\n print('Process {0} fininsh image {1}/{2}.jpg'.format(os.getpid(), main_keyword, count))\n count += 1\n except urllib.error.URLError as e:\n logging.error('URLError while downloading image {0}\\nreason:{1}'.format(link, e.reason))\n continue\n except urllib.error.HTTPError as e:\n logging.error('HTTPError while downloading image {0}\\nhttp code {1}, reason:{2}'.format(link, e.code, e.reason))\n continue\n except Exception as e:\n logging.error('Unexpeted error while downloading image {0}\\nerror type:{1}, args:{2}'.format(link, type(e), e.args))\n continue\n\n print(\"Finish downloading, total {0} errors\".format(len(image_links) - count))", "def main():\n show_banner()\n args = parse_args(sys.argv[1:])\n urls = get_urls(args.inputfiles)\n if args.only_urls:\n print(\"URL\")\n else:\n print('{:70.70} {}'.format(\"URL\", \"Response\"))\n\n loop = asyncio.get_event_loop()\n loop.run_until_complete(download(urls,\n args.concurrency,\n args.only_success,\n args.outputfile,\n args.only_urls))", "def run(self):\n\n for url in self.urls:\n try:\n # Use requests to retrieve web page data\n print(url)\n response = session.get(url, ) # allow_redirects=True)\n\n if response.status_code != 200:\n print('Failed to retrieve page, URL: {0}, error: {1}\\n'.format(url, response.status_code))\n return\n\n # Get web page data from HTML response\n content = get_json_data(response.text)\n\n # Compile data into dictionary to be used for reporting\n summary_data = generate_report(content)\n\n # Generate/print report\n print_report(summary_data)\n\n except Exception as error:\n print('Scraper failed to run for URL {0}, error: {1}, {2}\\n'.format(\n url, type(error).__name__, error\n ))\n\n # time.sleep(1) # for load concerns", "def _download(self) -> None:\n download_url(\n self.url,\n self.root,\n filename=self.data_dir,\n md5=self.md5 if self.checksum else None,\n )\n self._extract()", "def download_files(self):", "def downloadTempGrab(self, url):\n if os.path.exists(\"temp.dat\"):\n os.remove(\"temp.dat\")\n cmd = \"wget -q -T 3 -t 1\" # 1 attempt (no retries)\n cmd += \" -O %s %s\" % (\"temp.dat\", url)\n self.log(cmd)\n process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)\n process.wait()", "def run(self):\n urls_to_download = self._get_links()\n results = ThreadPool(8).imap_unordered(self._download_url, urls_to_download)\n for path in results:\n print(path)", "def spider(given_url):\n \n url_to_crawl = given_url\n\n source_code = requests.get(url_to_crawl)\n plain_text = source_code.text\n soup = BeautifulSoup(plain_text)\n # name = soup.find('h1', {'class': 'pl-header-title'})\n # name = name.string\n # name = str(name)\n # name = name.strip('')\n # fw = open('links of' + name + '.txt', 'w')\n # fw2 = open('names of' + name + '.txt', 'w')\n fw = open('links.txt', 'w')\n fw2 = open('names.txt', 'w')\n for link in soup.findAll('a', {'class': 'pl-video-title-link yt-uix-tile-link yt-uix-sessionlink spf-link '}):\n my_href = 'https://www.youtube.com' + link.get('href')\n title = link.string\n #print(my_href, title)\n fw.write(my_href + '\\n')\n fw2.write(title)\n\n fw.close()\n fw2.close()\n\n \"\"\"Downloading Part\"\"\"\n try:\n os.system(\"youtube-dl --max-quality FORMAT -a links.txt\")\n return\n except:\n print(\"Something went wrong related to downloading\")\n exit(2)", "def __init__(self, base_url, start_urls, config, helper_outfile, verbose):\n\n # setup class variables\n self.base_url = base_url\n self.config = config\n self.helper_outfile = helper_outfile\n self.verbose = verbose\n self.found_urls = set()\n self.crawled_urls = {}\n self.crawled_paths = {}\n self.param_infos = {}\n self.helper_pid = None\n self.found_cookies = []\n self.comments = {}\n self.redirects = {}\n self.driver = None\n\n # figure out domain\n parsed_url = urllib.parse.urlparse(base_url)\n self.domain = parsed_url.hostname\n self.port = parsed_url.port\n if not self.port:\n self.port = 80 if parsed_url.scheme == \"http\" else 443\n self.protocol_prefix = \"%s://\" % parsed_url.scheme\n\n # compile exclude path regexes from config\n self.exclude_paths = []\n if self.config.get(\"exclude_paths\", \"\"):\n exclude_paths_str = util.parse_as_csv(self.config.get(\"exclude_paths\", \"\"))\n for path_str in exclude_paths_str:\n self.exclude_paths.append(re.compile(path_str))\n\n # parse cookies from config\n self.cookies = {}\n for key_val_pair in self.config[\"cookie_str\"].split(\";\"):\n if not key_val_pair:\n continue\n if \"=\" not in key_val_pair:\n self.cookies[key_val_pair.strip()] = \"\"\n else:\n key, val = key_val_pair.strip().split(\"=\")\n self.cookies[key.strip()] = val.strip()\n\n # setup start urls\n self.start_urls = set([base_url])\n for url in start_urls:\n # skip paths that are excluded from crawling\n if self.exclude_paths and url.count(\"/\") > 2:\n check_str = \"/\" + \"/\".join(url.split(\"/\")[3:])\n if any(re_path.match(check_str) for re_path in self.exclude_paths):\n continue\n self.start_urls.add(url)\n self.start_urls = list(self.start_urls)\n\n # create unix socket for IPC with crawler helper\n if os.path.exists(UNIX_SOCK_ADDR):\n os.remove(UNIX_SOCK_ADDR)\n self.socket = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)\n self.socket.bind(UNIX_SOCK_ADDR)\n\n # setup selenium if it is configured to be used\n if config[\"use_selenium\"].lower() == \"true\":\n import logging\n logging.getLogger(\"seleniumwire\").setLevel(logging.ERROR)\n from seleniumwire import webdriver\n from selenium.webdriver.chrome.options import Options\n chrome_options = Options()\n chrome_options.add_argument(\"--headless\")\n chrome_options.add_argument(\"--user-agent=%s\" % self.config[\"user_agent\"])\n\n # on Linux running Selenium as root requires '--no-sandbox' option\n if os.geteuid() == 0 and sys.platform.startswith(\"linux\"):\n chrome_options.add_argument(\"--no-sandbox\")\n self.driver = webdriver.Chrome(options=chrome_options)\n\n # disallow downloads via Selenium (see https://stackoverflow.com/a/47366981)\n self.driver.command_executor._commands[\"send_command\"] = (\"POST\", \"/session/$sessionId/chromium/send_command\")\n params = {\"cmd\": \"Page.setDownloadBehavior\", \"params\": {\"behavior\": \"disallow\", \"downloadPath\": \"\"}}\n command_result = self.driver.execute(\"send_command\", params)\n\n # add cookies\n self.driver.get(self.base_url) # initial request required to add cookies\n self.driver.delete_all_cookies()\n for key, val in self.cookies.items():\n self.driver.add_cookie({\"name\": key, \"value\": val, \"domain\": self.domain})", "def download_from_url(url, output_path):\n\n print('Pulling data from {} to {}'.format(url, output_path))\n wget.download(url, output_path)\n print('done')", "def download(self):\n\n with open(self.dataset_path) as dataset_file:\n dataset = json.load(dataset_file)\n\n path = \"\".join([POST_HIT_PATH, dataset[\"dataset\"][\"data_path\"]])\n if not os.path.exists(path):\n os.makedirs(path)\n\n protocole = dataset[\"dataset\"][\"protocole\"]\n\n download_links = []\n\n for resource in dataset[\"dataset\"][\"resources\"]:\n file_path = \"\".join([path, resource[\"filename\"]])\n\n #Check if the the download link has not been used before (One download link for all)\n if resource[\"download_link\"] not in download_links:\n \n print(\"DOWNLOADING : {}\".format(resource[\"filename\"]))\n f = urllib.request.urlopen(resource[\"download_link\"])\n data = f.read()\n with open(file_path, \"wb\") as donwload_file:\n donwload_file.write(data)\n\n download_links.append(resource[\"download_link\"])\n\n \n #Extract all files from the tar archives if necessary\n if tarfile.is_tarfile(file_path):\n tf = tarfile.open(file_path)\n tf.exractall()", "def fetch_web_cont(self):\n with open(self.input_file) as input_file:\n data = yaml.load(input_file, yaml.FullLoader)\n url_list = data.get(self.url_access)\n regex_list = data.get(self.regex_access)\n\n print('Fetching data:')\n\n for url in url_list:\n # This restores the same behavior as before.\n # Enabling certificate verification by default for stdlib http clients\n context = ssl._create_unverified_context()\n run_time = datetime.now().strftime(\"Date: %d-%m-%Y Time: %I:%M:%S:%f_%p\")\n start = time.perf_counter()\n web_resp = request.urlopen(url, context=context)\n respData = web_resp.read()\n resp_time = '%0.2f s' % (time.perf_counter() - start)\n\n for regex in regex_list:\n contents = re.findall(regex, str(respData))\n with open(self.output_file, 'a') as file:\n if not contents:\n print(run_time, ' | URL: ', url, '| content not found with this regex: ', regex,\n file=file)\n\n else:\n for content in contents:\n print(run_time, ' | URL: ', url, ' | Response Time: ', resp_time,\n url, ' | Contents: ', content, file=file)\n \n with open(self.output_file, 'a') as file:\n \n print('\\n#################################\\n', file=file)", "def main(file_path, urls):\n # format urls input\n with open(urls, 'r') as file:\n urls = file.read().replace('\\n', '')\n\n urls = urls.strip('[]')\n urls = re.findall(r'\\([^\\)\\(]*\\)', urls)\n\n for file in urls:\n\n file_name, url = tuple(file.strip('()').split(', '))\n\n # check if file is already downloaded\n if os.path.exists(os.path.join(file_path, file_name)):\n print(\"%s already exists.\\n\" % file_name)\n continue\n else:\n print(\"Starting download for %s...\\n\" % file_name)\n\n # Create the data subdirectory if it doesn't exist\n os.makedirs(file_path, exist_ok=True)\n\n # create response object\n r = requests.get(url, stream=True)\n widgets = [\"Progress: \",\n progressbar.DataSize(), \"| \",\n progressbar.Timer()]\n bar = progressbar.ProgressBar(widgets=widgets,\n max_value=progressbar.UnknownLength)\n value = 0\n # download started\n with open(os.path.join(file_path, file_name), 'wb') as f:\n for chunk in r.iter_content(chunk_size=64*1024):\n if chunk:\n f.write(chunk)\n value += len(chunk)\n bar.update(value)\n\n print(\"\\n%s downloaded!\\n\" % file_name)\n\n print(\"All files downloaded!\")", "def download(self, url):\n try:\n webFile = urllib.urlopen(url)\n localFile = open(self.workdir + \"/\" + url.split('/')[-1], 'w')\n localFile.write(webFile.read())\n webFile.close()\n localFile.close()\n except IOError:\n print(\"could not get url \" + url)", "def __init__(self, urls_file_, file_spider_='no', target_format_='', ignored_links_file_='',\n allow_clean_url_='no', time_out_=60, work_path_='./',\n max_recursion_depth_=0, one_bite_='no', white_list_path_=\"\"):\n self.__urls = Crawler.__read_file(urls_file_)\n self.__file_spider = file_spider_\n self.__target_format = target_format_\n self.__allow_clean_url = allow_clean_url_\n self.__one_bite = one_bite_\n self.__white_list_path = white_list_path_\n self.__white_list = []\n\n # loads white list in beginning in case an argument was passed for it\n if self.__file_spider == 'yes' and self.__white_list_path != '':\n self.__white_list = Crawler.__read_white_list(self.__white_list_path)\n\n # link titles that should be ignored during recursions\n self.__ignored_links = Crawler.__read_file(ignored_links_file_)\n\n self.__time_out = time_out_\n self.__work_path = os.path.join(work_path_.rstrip('/')+'/', 'DATA')\n self.__recursion_max_depth = max_recursion_depth_\n self.__extensions = ['txt', 'html', 'csv', 'tsv', 'tar', 'raw']\n\n logging.info('''Crawler Has been Initialized With The Below Configurations:\n-------------------------------------------------------------------\n-urls: %s\n-file_spider: %s\n-target_format: %s\n-ignored_links_file: %s\n-allow_clean_url: %s\n-time_out: %s\n-work_path: %s\n-max_recursion_depth: %s\n-one_bite: %s\n-white_list_path: %s\n''', self.__urls, self.__file_spider, self.__target_format, self.__ignored_links,\n self.__allow_clean_url, self.__time_out, self.__work_path,\n self.__recursion_max_depth, self.__one_bite, self.__white_list_path)", "def main(url, localfile):\n ph.download_file(url, localfile)" ]
[ "0.7065692", "0.655426", "0.65318704", "0.6516066", "0.64950967", "0.6479882", "0.64755625", "0.6383626", "0.6380122", "0.6354922", "0.63388115", "0.6297223", "0.6268197", "0.62476665", "0.62240934", "0.62218875", "0.61970305", "0.61833394", "0.61645144", "0.6163209", "0.6153797", "0.61524135", "0.6136798", "0.61130995", "0.60933715", "0.6088484", "0.6088464", "0.60865617", "0.60836047", "0.6076477" ]
0.7149152
0
Crawl the given url find all and tags Get the information inside the tags and apply pywget_recursive() function on each of them
def pywget_inside_crawler(url, depth, start_dir, start_file, root_dir_name): depth -= 1 content = '' try: request = urllib.request.urlopen(url) content = request.read().decode("utf-8") except: pass # all the information that's inside <a href> and <img src> tags match = re.findall(r'<a href="(.*?)"', content) + \ re.findall(r'<a href = "(.*?)"', content) + \ re.findall(r'<img src="(.*?)"', content) + \ re.findall(r'<img src = "(.*?)"', content) prefix = url[0 : url.rfind('/')] # a prefix of the link. useful to check if a link is under the same domain all_item_list = add_item_to_list(match, prefix) # add information to a list for item in all_item_list: pywget_recursive(item, depth, start_dir, start_file, root_dir_name) # recursively download the information
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def parsing_all_page(url):\n html_doc = get_html(url)\n# html_doc = get_html_local()\n page_count = get_html_count(html_doc)\n print 'All have find pages %d' % page_count\n\n projects = []\n\n for page in range(1, page_count + 1):\n print 'Parsing %d%%' % (page*100/page_count)\n\n url = BASE_URL + '?page=%d' % page\n projects.extend(process_page(url))\n\n return projects", "def pywget_inside_crawler(url):\n\n # open and read the url\n content = ''\n try:\n request = urllib.request.urlopen(url)\n content = request.read().decode(\"utf-8\")\n except:\n pass\n\n # find all contents we need which are links and srcs using regex\n match = re.findall(r'<a href=\"(.*?)\"', content) + \\\n re.findall(r'<img src=\"(.*?)\"', content) + \\\n re.findall(r'<a href = \"(.*?)\"', content) + \\\n re.findall(r'<img src = \"(.*?)\"', content)\n\n domain_name = url[0 : url.rfind('/')]\n\n all_item_list = []\n\n # if it's an absolute link, add it to all_item_list\n # if it's a relative link, add prefix in the front and add it to the list\n if match:\n for item in match:\n if item.startswith(\"http://\") or item.startswith(\"https://\") or item.startswith(\"//\"):\n if item.startswith(domain_name):\n all_item_list.append(item)\n else:\n all_item_list.append(domain_name + \"/\" + item)\n\n # apply pywget_download_inside\n for item in all_item_list:\n pywget(item, first_time=False)", "def visit(self, max_depth = DEPTH, response_handler=record, html_rendering=False, no_expand=lambda url, doc: False):\n if self.depth >= max_depth:\n return\n if self.url.name in pool:\n return\n else:\n pool.add(self.url.name)\n \n print(f\"Requesting {self.url.name}...\")\n \n# host for relative href\n try:\n host = re.search(r\"(?:(?:https?:)?//)?([^/]+)\", self.url.name).group(1)\n except Exception:\n host = None\n\n# indicate if the request is successful\n flag = False\n site = None\n html = ''\n\n for req in self.url.request_string():\n if html_rendering:\n renderer.render(req, timeout=10)\n while not renderer.ready:\n time.sleep(1)\n html = renderer.html\n site = bs4.BeautifulSoup(html, 'html5lib')\n if html:\n flag = True\n else:\n try:\n # print(f\"Site: {req}\")\n r = requests.get(req, timeout = 5)\n if r.status_code != 200:\n print(f\"Warning: HTTP response for {req} is {r.status_code} but 200\")\n else:\n # print(\"OK\")\n flag = True\n html = r.content.decode('utf-8')\n site = bs4.BeautifulSoup(html, 'html5lib')\n break\n except requests.exceptions.Timeout:\n # print(f\"Request time out : {req}\")\n pass\n except Exception:\n # print(f\"Failed to connect : {req}\")\n pass\n\n if not site:\n return\n\n if not flag:\n return\n\n urls = []\n\n # handle the response\n response_handler(self.url.name, html)\n\n # find successors\n for tag in site.find_all('a'):\n urls.append(tag.get('href'))\n # print('Link to', tag.get('href'))\n \n if no_expand(self.url.name, html):\n # stop expanding\n return\n\n thread_pool = []\n for url in urls:\n if not url:\n continue\n # add host if started with a slash\n if url[0] == '/':\n if len(url) > 1 and url[1] == '/':\n url = url.lstrip('/')\n else:\n url = host + url\n url = url.rstrip('/')\n\n searchTask = URL(url)\n\n if not searchTask.valid:\n # print(f\"Invalid URL: {url}\")\n continue\n else:\n # if the website has been visited\n if searchTask.name in pool:\n continue\n else:\n thread = threading.Thread(target=Node(searchTask, self.depth + 1).visit, args=(max_depth, response_handler))\n thread.start()\n thread_pool.append(thread)\n\n while thread_pool:\n for thread in thread_pool:\n thread.join(timeout=0)\n if not thread.is_alive():\n thread_pool.remove(thread)\n time.sleep(1)", "def crawl(self, url):\n\n url = self.url_util.normalise_url(url)\n hostname = self.url_util.get_hostname(url)\n\n urlsToVisit = [url]\n urlsVisted = []\n output = []\n # Each iteration of this loop processes the next URL to visit.\n while (len(urlsToVisit) > 0):\n \n url = urlsToVisit.pop(0)\n urlsVisted.append(url)\n\n html = self.html_requester.get_html(url)\n links = self.html_parser.get_links(html)\n same_hostname_urls = self.html_parser.get_same_hostname_urls(hostname, links)\n assets = self.html_parser.get_assets(same_hostname_urls)\n web_pages = self.html_parser.get_web_pages(same_hostname_urls)\n \n output.append({\"url\":url,\"assets\":assets})\n print json.dumps({\"url\":url,\"assets\":assets}, indent=4)\n \n for web_page in web_pages:\n # Do not visit a page more than once\n if not web_page in urlsToVisit and web_page not in urlsVisted:\n urlsToVisit.append(web_page)\n \n return json.dumps(output, indent=4).splitlines()", "def link_scraping(final_links, driver):\n\n for final_link in final_links:\n tags = extract_all_tags(final_link, driver)\n if len(tags) != 0:\n final_tags = find_usefull_tags(tags, tagmodel, tag_count_vect)\n if len(final_tags) != 0:\n print('Extracting(classname): ', final_link)\n scrape_data(final_link, final_tags, driver)\n else:\n print('Extracting(tag): ', final_link)\n scrape_data_tag(final_link, driver)\n else:\n print('Extracting(tag): ', final_link)\n scrape_data_tag(final_link, driver)", "def harvest_urls():\n manifest = []\n category = {}\n subcategory = {}\n directoryfiles = \"%s/directory_listing/\" % config['PREFIX']\n # ^^ the directory containing the HTML from the Technorati site.\n\n #Set up directory for intermediate data: MANIFEST\n #MANIFEST contains: Category, Subcategory, Title and URL.\n #and is a roster of URLs of blogs to autodiscover.\n if not os.path.exists(prefix + \"meta\"):\n os.mkdir(prefix + \"meta\")\n else:\n #TO DO: What if meta exists but MANIFEST got deleted?\n logging.info(\"Blog URLs already harvested. Skipping...\")\n return\n\n #Iterate through each file in the directory and extract blog URLs.\n for infile in glob.glob(os.path.join(directoryfiles, '*.html')):\n logging.info(\"Harvesting blog URLs from %s.\" % infile)\n dirpage = file(infile)\n root = parse(dirpage).getroot()\n #Rather than infer the category from the filename, just extract\n #it from the file. Not the best way to do this, hit is minimal.\n\tpieces = infile.split('/')[-1].split('_')\n\tcat = pieces[1]\n\tsubcat = None\n\tif len(pieces) == 4:\n\t\tsubcat = pieces[2]\n blogs = root.xpath(\"//td[@class='site-details']\")\n #Iterate through all of the blogs listed on the page.\n for blog in blogs:\n url = blog.xpath(\"a[@class='offsite']\")[0].text\n title = blog.xpath('h3/a')[0].text\n OUT = open(prefix + \"meta/MANIFEST\", \"a\")\n #Store the category of the blog.\n category[url] = cat\n if subcat:\n output = [cat, subcat, title.encode('utf-8').replace(' ', ' '), url]\n subcategory[url] = subcat\n print >> OUT, ' '.join(output)\n else:\n output = [cat, \"NA\", title.encode('utf-8').replace(' ', ' '), url]\n print >> OUT, '\\t'.join(output)\n manifest.append(output)\n OUT.close()\n # This is a hack to get around having to use a database.\n # TODO: Reimplement using a database.\n BLOGCATS = open(prefix + \"blogcats.pickle\", \"w\")\n cPickle.dump(category, BLOGCATS)\n BLOGCATS.close()\n return manifest", "def parse(url, conf):\n try:\n result = []\n html = urlopen(url)\n dom = soupparser.fromstring(html)\n items = dom.xpath(conf['xpath'])\n\n for item in items:\n result.append(conf['parse_func'](item.getchildren()))\n return result\n except Exception , e:\n raise e", "def crawl(self, url, crawl_timeout, target_url, max_depth):\n try:\n\n max_depth = int(max_depth)\n if not max_depth >= 1:\n return []\n\n url_list = []\n httpClient = HttpClient()\n info = httpClient.send(url, float(crawl_timeout))\n #info = open(\"./res\").read()\n htmlUrl = HtmlUrl(url, target_url)\n htmlUrl.feed(info)\n imgUrl = ImgUrl(url, target_url)\n imgUrl.feed(info)\n htmlUrl.urls.extend(imgUrl.urls)\n map((lambda item: url_list.append(\n item.decode('utf-8').encode('utf8'))), htmlUrl.urls)\n url_list = list(set(url_list))\n\n url_final_list = copy.deepcopy(url_list)\n for url in url_list:\n url_final_list.extend(\n self.crawl(\n url,\n crawl_timeout,\n target_url,\n max_depth -\n 1))\n\n self.logger.info(\n \"crawl url %s,current max_depth is %s\" %\n (url, max_depth))\n return url_final_list\n except Exception as e:\n self.logger.info(\"crawl url %s error,reason %s\" % (url, e))\n return []", "def process(self, url):\n cache = set()\n\n def populate(url):\n try:\n resp = requests.get(url, headers={\"User-agent\": \"Mozilla/5.0\"}, verify=False)\n except Exception:\n return\n\n # handle redirects\n if any(r.status_code in (301, 302) for r in resp.history):\n if self.redirect_callback(url):\n return\n\n # handle errors\n if resp.status_code >= 400:\n if self.error_callback(\"%03d: %s\" % (resp.status_code, url)):\n return\n\n # handle non-html\n if not resp.headers.get(\"content-type\", \"\").startswith(\"text/html\"):\n self.nonhtml_callback(url)\n return\n\n # parse pages\n self.link_content_callback(dict(url=url, content=resp.text))\n\n for item in re.finditer(self.LINK_PATTERN, resp.text):\n link = urlparse.urljoin(url, item.group(1))\n\n # populate all parsed link (if not cached)\n if link not in cache:\n if self.ext_link_test(url, link):\n cache.add(link)\n self.link_callback(link)\n\n self.stop_callback() # check for stopping\n\n if self._recursive:\n populate(link)\n else:\n self.ext_link_callback(link)\n\n populate(url)", "def main():\n goods = '书包'\n # 爬取深度\n depth = 3\n start_url = 'https://s.taobao.com/search?q=' + goods\n # 输出结果的列表\n infoList = []\n # 使用for循环对每一个页面进行处理\n for i in range(depth):\n try:\n # 每个页面的URL链接\n url = start_url + '' + str(44*i)\n html = getHTMLText(url)\n parsePage(infoList, html)\n except:\n continue\n printGoodsList(infoList)", "def deep_link_scraping(final_links, driver):\n\n import re\n second_links = [] \n for website2 in final_links:\n links2 = extract_all_links(website2, driver)\n final_links1 = find_usefull_links(links2, classmodel, class_count_vect)\n final_links2 = list(set(final_links1) - set(final_links))\n second_links += final_links2\n\n \n second_links = list(dict.fromkeys(second_links))\n second_links1 = find_usefull_links(second_links, classmodel, class_count_vect)\n second_links2 = []\n for link in second_links1:\n if re.search('#', link):\n x = re.search('#', link)\n link = link[:int(x.span()[0])]\n second_links2.append(link)\n else:\n second_links2.append(link)\n\n second_links2 = list(dict.fromkeys(second_links2))\n for final_link in second_links2:\n tags = extract_all_tags(final_link, driver)\n if len(tags) != 0:\n final_tags = find_usefull_tags(tags, tagmodel, tag_count_vect)\n if len(final_tags) != 0:\n scrape_data(final_link, final_tags, driver)\n else:\n scrape_data_tag(final_link, driver)\n else:\n scrape_data_tag(final_link, driver)\n return second_links2", "async def crawl(self):\n fetch_urls = [self.start_url]\n results = []\n while len(fetch_urls):\n \"\"\"\n slicing array urls with max_async_call arg and then run extract_data_urls\n extract_data_urls return a object that contains url, data, found_urls, and all_urls\n url is a url that we crawled\n data is Html content of the url\n found_urls are new urls that we have to crawl that\n all_urls are all links in the html page\n \"\"\"\n urls = await self.extract_data_urls(fetch_urls[0:self.max_async_call])\n del fetch_urls[0:self.max_async_call]\n for url, data, found_urls, all_urls in urls:\n fetch_urls.extend(found_urls)\n result = self.parse_html_content(data)\n result['urls'] = all_urls\n results.append((url, result))\n return results", "def scrape_data(final_link, tags, driver):\n #driver = webdriver.Chrome(executable_path=\"ChromeDriver/chromedriver.exe\")\n driver.get(str(final_link))\n errcount = 0\n for tag in tags:\n try:\n children = driver.find_elements_by_css_selector(tag)\n for child in children:\n try:\n links = child.find_elements_by_tag_name('a')\n images = child.find_elements_by_tag_name('img')\n if len(child.text) == 0:\n continue\n else:\n infotext = []\n sociallinks = []\n imageslinks = [] \n checklen = len(child.text.split(\"\\n\"))\n if checklen > 0 and checklen < 30:\n infotext = child.text.split(\"\\n\")\n\n for link in links:\n sociallinks.append(link.get_attribute('href'))\n\n for linki in imageslinks:\n imageslinks.append(linki.get_attribute('href'))\n\n except:\n continue\n \n infolen = len(infotext)\n sociallen = len(sociallinks)\n if sociallen > 0 and sociallen <= 10 and infolen != 0:\n dump_data(infotext, sociallinks, imageslinks)\n \n else:\n if infolen == 0 or sociallen == 0:\n errcount += 1\n \n except:\n continue\n \n if errcount == len(tags):\n scrape_data_tag(final_link, driver)\n \n elif errcount > 0:\n scrape_data_tag(final_link, driver)\n \n #driver.quit()", "def run(self):\n\n # The url is too deep, skip the url.. Work is done!\n if self.depth_ > self.depth:\n return\n\n # Get doc id corresponds to the url. Add a new entry into doc index if there is no entry.\n doc_id = self.crawler.document_id(self.curr_url)\n\n # Check if the doc_id has been visited/processed by any of crawler_threads. Add doc_id to seen if not so.\n if self.crawler.checkDocVisitedAndUpdate(doc_id):\n return\n\n # Process the document corresponds to the url\n socket = None\n try:\n socket = urllib2.urlopen(self.curr_url, timeout=self.timeout)\n soup = BeautifulSoup(socket.read())\n self._curr_depth = self.depth_ + 1\n self._curr_doc_id = doc_id\n # Traverse the document as deep as possible and add those newly discovered urls into url queue\n self._index_document(soup)\n # Store (wordId, docId) and (word, url) into inverted_index and resolved_inverted_index respectively.\n self.crawler._add_words_to_document(self._curr_words, self._curr_doc_id)\n except:\n pass\n finally:\n if socket:\n socket.close()", "def _grab_tags(self, url):\n a = self._api_request(url)\n return bs4.BeautifulSoup(a,features=\"html.parser\")", "def find_tag_urls(r):\n parser = MyHTMLParser()\n parser.feed(r)\n return parser.url_list", "def parse_url_bfs(html, layer = 1):\n\tcur_layer_n = 1\n\tcur_layer = []\n\tcur_layer.append(html)\n\tvisited.append(html)\n\tnext_layer = []\n\tnum = 0\n\twhile (cur_layer_n <= layer):\n\t\tfor link in cur_layer:\n\t\t\tif len(link) > 23 and \"https://tw.buy.yahoo.com\" in link:\n\t\t\t\ttry:\n\t\t\t\t\t#print (link + \"\\n\")\n\t\t\t\t\t#num += 1\n\t\t\t\t\t#print (num)\n\t\t\t\t\tpage = urllib.request.urlopen(link, timeout = 1)\n\t\t\t\t\tsoup = BeautifulSoup(page.read(),\"lxml\")\n\t\t\t\t\tfor a_line in soup.findAll('a', href=True):\n\t\t\t\t\t\turl = a_line.get(\"href\")\n\t\t\t\t\t\tif url[0:2] == \"/?\" :\n\t\t\t\t\t\t\turl = \"https://tw.buy.yahoo.com\" + url\n\t\t\t\t\t\tif url[0:1] == \"?\":\n\t\t\t\t\t\t\turl = \"https://tw.buy.yahoo.com/\" + url\n\t\t\t\t\t\tif url not in visited:\n\t\t\t\t\t\t\tvisited.append(url)\n\t\t\t\t\t\t\tnext_layer.append(url)\n\t\t\t\texcept Exception as e:\n\t\t\t\t\t#print (\"except :\" + url + \"\\n\")\n\t\t\t\t\t#print (e)\n\t\t\t\t\tpass\n\t\tcur_layer = list(next_layer)\n\t\tcur_layer_n+=1\n\tnum = 0\n\tf = open(\"url_all.txt\", \"w+\")\n\tfor url in visited:\n\t\tf.write(url+ \"\\n\")\n\t\tnum += 1\n\t\t#print (url+\"\\n\")\n\tf.close()\n\tprint (num)", "def recursive_urls(urls):\n if len(urls) == 0:\n return\n rs = [grequests.get(url, hooks=dict(args=print_url)) for url in urls]\n responses = grequests.map(rs)\n url_lists = [get_urls_from_response(response) for response in responses]\n urls = sum(url_lists, []) # flatten list of lists into a list\n recursive_urls(urls)", "def crawl(url):\n while True:\n try:\n proxy=get_random_proxy()\n proxies = {'http': 'http://' + proxy}\n logger.info(proxies)\n resp = requests.get(url, proxies=proxies,timeout=3) # 设置代理,抓取每个公司的连接\n resp.encoding = resp.apparent_encoding # 可以正确解码\n if resp.status_code==200:\n html = etree.HTML(resp.text)\n logger.info(\"成功获得公司信息url!!!\")\n break\n else:\n continue\n except:\n logger.info(\"没获取到\")\n continue\n return html", "def _scrape(self):", "def scrape_data_tag(final_link, driver):\n\n import time\n #driver = webdriver.Chrome(executable_path=\"ChromeDriver/chromedriver.exe\")\n driver.get(final_link)\n time.sleep(2)\n tags = ['li', 'p', 'tr']\n for tag in tags:\n children = driver.find_elements_by_tag_name(tag)\n for child in children:\n try:\n links = child.find_elements_by_tag_name('a')\n images = child.find_elements_by_tag_name('img')\n if len(child.text) == 0:\n continue\n else:\n infotext = []\n sociallinks = []\n imageslinks = [] \n checklen = len(child.text.split(\"\\n\"))\n if checklen > 0 and checklen < 30:\n infotext = child.text.split(\"\\n\")\n\n for link in links:\n sociallinks.append(link.get_attribute('href'))\n \n for link in imageslinks:\n imageslinks.append(link.get_attribute('href'))\n\n except:\n continue\n\n infolen = len(infotext)\n sociallen = len(sociallinks)\n if sociallen > 0 and sociallen <= 10 and infolen != 0:\n try:\n dump_data(infotext, sociallinks, imageslinks)\n except:\n continue\n elif sociallen == 0 and infolen != 0:\n try:\n sociallinks = ['No Available Social Media Links']\n dump_data(infotext, sociallinks, imageslinks)\n except:\n continue\n \n\n #driver.quit()", "def parse_inner_urls(self, response):\n s = Selector(response)\n\n jobs_per_site = s.xpath('//div[@class=\"col-lg-12 col-md-12 col-sm-12 aggelia-view-title\"]//a/@href').extract()\n print(jobs_per_site)\n\n for inner_site in jobs_per_site:\n url = urljoin(\"https://www.skywalker.gr/\", inner_site)\n yield scrapy.Request(url, callback=self.parse_items)", "def crawl(start_url):\n pool = eventlet.GreenPool()\n seen = set()\n fetch(start_url, seen, pool)\n pool.waitall()\n return seen", "def crawler(website_url):\n\n try:\n\n # open and read the website\n pageFile = urllib2.urlopen(website_url)\n pageHtml = pageFile.read()\n pageFile.close()\n\n # call BeautifulSoup on an array of lines in string format\n soup = BeautifulSoup(\"\".join(pageHtml), \"html.parser\")\n # print soup.prettify()[0:1000]\n\n # find all links with hashtag cat, limit to 100 results\n # FIXME add hashtag cat requirement (string = \"#cat\")\n pageLinks = soup.findAll(\"a\", {\"href\": True}, limit=100)\n # import pdb; pdb.set_trace()\n page_URLs = []\n\n for pageLink in pageLinks:\n pageLink = pageLink['href']\n\n # if URL does not have a domain, add the main page's domain'\n if pageLink[0] == '/' and pageLink[:1] != '//':\n pageLink = website_url + pageLink\n\n # check if item in db, if not - add to db and commit\n existing_page = session.query(Page).filter_by(page_URL=pageLink).first()\n\n # add to array of link strings\n page_URLs.append(pageLink)\n\n if not existing_page:\n page_URL = Page(page_URL=pageLink)\n session.add(page_URL)\n session.commit()\n\n # import pdb; pdb.set_trace()\n return page_URLs\n\n except urllib2.URLError as e:\n # exception handling for URLError\n if hasattr(e, 'reason'):\n print \"We failed to reach a server.\"\n print \"Reason: \", e.reason\n # exception handling for HTTPError\n elif hasattr(e, 'code'):\n print 'The server couldn\\'t fulfill the request.'\n print 'Error code; ', e.code\n else:\n print 'Everything is fine.'", "def crawl(self, pages, depth=2):\n # Only go so deep\n for _ in range(depth):\n newpages = {}\n for page in pages:\n try:\n # Open page\n contents = urllib.request.urlopen(page)\n except Exception:\n print(\"Could not open %s\" % page)\n continue\n try:\n # Read page\n soup = BeautifulSoup(contents.read())\n self.add_to_index(page, soup)\n # get all the links\n links = soup('a')\n for link in links:\n if 'href' in dict(link.attrs):\n url = urljoin(page, link['href'])\n if url.find(\"'\") != -1:\n continue\n url = url.split('#')[0] # remove location portion\n if url[0:4] == 'http' and not self.is_indexed(url):\n newpages[url] = 1\n link_text = self.get_text_only(link)\n self.add_link_ref(page, url, link_text)\n self.dbcommit()\n except Exception:\n print(\"Could not parse page %s\" % page)\n pages = newpages", "def __start_recursion(self, url, first_run_p, recursion_depth_p, prev_link_size_p):\n links_titles = []\n print 'Crawling Link: {}'.format(url)\n # storing current link length to count recursion depth.\n recursion_settings = Crawler.__count_recursion_depth(len(str(url).rstrip('/').split('/'))\n , recursion_depth_p,\n prev_link_size_p,\n first_run_p)\n\n try:\n source_code = requests.get(url, timeout=self.__time_out)\n plain_text = source_code.text\n except requests.RequestException as excep:\n logging.error('Error In URL %s, Reason: %s', url, excep.message)\n\n # if true: means that only one bite crawling was asked (no recursion)\n if Crawler.__handle_one_bite(self.__one_bite, BeautifulSoup(plain_text),\n url, self.__work_path):\n return\n\n iteration_result = Crawler.__iterate_link(BeautifulSoup(plain_text).find_all('a')\n , url, self.__ignored_links,\n links_titles, self.__extensions)\n\n # empty original given root urls to fill them with the sub links for next recursion\n if self.__allow_clean_url == 'yes':\n links_titles = Crawler.__clean_url(links_titles)\n\n # in case the crawling found ftp folders (means there should be more iteration)\n sub_urls = Crawler.__setup_recursion(iteration_result[0], iteration_result[1])\n\n # downloads data and configure current crawling status like recursion settings\n crawling_status = Crawler.__fetch_data(self.__file_spider, links_titles,\n self.__white_list, self.__target_format,\n self.__time_out, self.__work_path,\n recursion_settings[0],\n self.__recursion_max_depth,\n recursion_settings[1], recursion_settings[2])\n\n if self.__recursion_max_depth != 0:\n # if true: stop iterating and start from next element of the previous\n # recursion urls\n if crawling_status[0] == self.__recursion_max_depth:\n return\n\n for url in sub_urls:\n self.__start_recursion(url, crawling_status[2], crawling_status[0], crawling_status[1])", "def get_multiple_tags(url, xpathExpressionList, params=None):\n headers = {'User-Agent': 'curl/7.35.0'}\n page = requests.get(url, params=params, headers=headers)\n page.raise_for_status()\n tree = html.fromstring(page.text)\n out = []\n for expression in xpathExpressionList:\n out.append(tree.xpath(expression))\n return out", "def download_images(main_keyword, supplemented_keywords, download_dir): \n image_links = set()\n print('Process {0} Main keyword: {1}'.format(os.getpid(), main_keyword))\n\n # create a directory for a main keyword\n img_dir = download_dir + main_keyword + '/'\n if not os.path.exists(img_dir):\n os.makedirs(img_dir)\n\n for j in range(len(supplemented_keywords)):\n print('Process {0} supplemented keyword: {1}'.format(os.getpid(), supplemented_keywords[j]))\n search_query = quote(main_keyword + ' ' + supplemented_keywords[j])\n # url = 'https://www.google.com/search?q=' + search_query + '&espv=2&biw=1366&bih=667&site=webhp&source=lnms&tbm=isch&sa=X&ei=XosDVaCXD8TasATItgE&ved=0CAcQ_AUoAg'\n url = 'https://www.google.com/search?q=' + search_query + '&source=lnms&tbm=isch'\n image_links = image_links.union(parse_page(url))\n print('Process {0} get {1} links so far'.format(os.getpid(), len(image_links)))\n time.sleep(2)\n print (\"Process {0} get totally {1} links\".format(os.getpid(), len(image_links)))\n\n print (\"Start downloading...\")\n count = 1\n for link in image_links:\n try:\n req = urllib.request.Request(link, headers = {\"User-Agent\": generate_user_agent()})\n response = urllib.request.urlopen(req)\n data = response.read()\n file_path = img_dir + '{0}.jpg'.format(count)\n with open(file_path,'wb') as wf:\n wf.write(data)\n print('Process {0} fininsh image {1}/{2}.jpg'.format(os.getpid(), main_keyword, count))\n count += 1\n except urllib.error.URLError as e:\n logging.error('URLError while downloading image {0}\\nreason:{1}'.format(link, e.reason))\n continue\n except urllib.error.HTTPError as e:\n logging.error('HTTPError while downloading image {0}\\nhttp code {1}, reason:{2}'.format(link, e.code, e.reason))\n continue\n except Exception as e:\n logging.error('Unexpeted error while downloading image {0}\\nerror type:{1}, args:{2}'.format(link, type(e), e.args))\n continue\n\n print(\"Finish downloading, total {0} errors\".format(len(image_links) - count))", "async def parse_links(self, parent_url, html):\n soup = BeautifulSoup(html, features='html.parser')\n urls = [a.get('href', '') for a in soup.find_all('a', href=True)]\n urls += [a.get('src', '') for a in soup.find_all('img')]\n urls = set([urldefrag(urljoin(str(parent_url), u))[0] for u in urls[:]])\n unseen_urls = urls.difference(self.seen_urls)\n for u in unseen_urls:\n if u not in self.seen_urls and u.startswith(self.root_url):\n await self.queue.put((u, parent_url))\n self.seen_urls.add(u)", "def scan_links_from_url(url):\n\n\t#Get the url\n\thtml_io = StringIO.StringIO()\n\n\tcurl = pycurl.Curl()\n\tcurl.setopt(pycurl.URL, str(url))\n\tcurl.setopt(pycurl.WRITEFUNCTION, html_io.write)\n\tcurl.perform()\n\n\thtml = html_io.getvalue()\n\n\thtml_io.close()\n\tcurl.close()\n\n\t#Apply the regex expression and fetch all links from source\n\tregexp = re.compile(\"\"\"http\\:\\/\\/rapidshare\\.(?:com|de)\\/files\\/[\\d]*\\/.*?\\..*?[^\"\\s\\<\\>]*[^.,;'\">\\:\\s\\<\\>\\)\\]\\!]\"\"\")\n\n\treturn regexp.findall(html)" ]
[ "0.63029546", "0.62863165", "0.6204318", "0.61818653", "0.6096154", "0.6072784", "0.6072309", "0.6070269", "0.6025325", "0.59984726", "0.59908885", "0.5942623", "0.59376717", "0.5925007", "0.5916456", "0.59152985", "0.588088", "0.5869225", "0.57900435", "0.57351506", "0.5735136", "0.5731734", "0.57185477", "0.56973684", "0.56972253", "0.56732684", "0.5666549", "0.5653127", "0.5651098", "0.56466526" ]
0.67279756
0
Recursively create directories and download files by the given url
def pywget_recursive(url, depth, start_dir, start_file, root_dir_name): dir_string = url[url.find('/')+2 : url.rfind('/')+1] # the directory name that is going to be created dir_string_list = dir_string.split('/') dir_string_list[0] = root_dir_name dir_string = '/'.join(dir_string_list) # change the directory name if collision happened filename = url[url.rfind('/')+1:] filename_without_extension = os.path.splitext(filename)[0] filename_extension = os.path.splitext(filename)[1][1:].strip().lower() filename = handle_collision("file", filename, filename_without_extension, filename_extension) os.chdir(start_dir) os.makedirs(dir_string, exist_ok=True) os.chdir(dir_string) if filename_without_extension != start_file: # do not download if it's the same file with the start file urllib.request.urlretrieve(url, filename) if depth > 0: pywget_inside_crawler(url, depth, start_dir, start_file, root_dir_name)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def initialise(url, depth):\n dir_string = url[url.find('/')+2 : url.rfind('/')+1] # the directory name that is going to be created format of .../.../.../\n dir_string_list = dir_string.split('/')\n root_dir_name = dir_string_list[0] # the root directory's name. useful to check collisions\n filename = url[url.rfind('/')+1:]\n\n root_dir_name = handle_collision(\"dir\", root_dir_name, root_dir_name, '') # handle directory name collisions\n\n start_dir = os.getcwd() # the location of this py file\n start_file = os.path.splitext(filename)[0] # the first file that is downloaded. useful to avoid cycles\n\n dir_string_list[0] = root_dir_name\n dir_string = '/'.join(dir_string_list) # change the directory names if collision happened\n\n os.makedirs(dir_string, exist_ok=True)\n os.chdir(dir_string)\n\n urllib.request.urlretrieve(url, filename)\n pywget_inside_crawler(url, depth, start_dir, start_file, root_dir_name) # start crawlling and recursion", "def download_all_files(self, root_url, version):\n file_list = self._http_client.get(root_url + '?ref=refs/tags/' + version)\n for file in file_list.json():\n if file['type'] == 'file':\n download_url = file['download_url']\n download_path = self.get_module_and_path('next/' + file['path'].replace(self._main_dir + '/', ''))\n self.download_file(download_url.replace('refs/tags/', ''), download_path)\n elif file['type'] == 'dir':\n path = self.get_module_and_path('next/' + file['path'].replace(self._main_dir + '/', ''))\n os.mkdir(path)\n self.download_all_files(root_url + '/' + file['name'], version) # Recurse into the subdirectory.\n\n file_list.close()", "def download_files(urls, folder): \n\n if not urls: \n return None\n if not folder: \n return None\n \n folder_path = Path(folder)\n if not folder_path.exists():\n os.makedirs(folder_path)", "def download(cls, root):\n path_dirname = os.path.join(root, cls.dirname)\n path_name = os.path.join(path_dirname, cls.name)\n if not os.path.isdir(path_dirname):\n for url in cls.urls:\n filename = os.path.basename(url)\n zpath = os.path.join(path_dirname, filename)\n if not os.path.isfile(zpath):\n if not os.path.exists(os.path.dirname(zpath)):\n os.makedirs(os.path.dirname(zpath))\n print(f'Download {filename} from {url} to {zpath}')\n download_from_url(url, zpath)\n extract_to_dir(zpath, path_name)\n\n return path_name", "def get_files(self):\n # self.folder= +str(int(time.time()))\n if not os.path.exists(self.folder):\n os.mkdir(self.folder)\n while len(self.url_queue): # If we have URLs to crawl - we crawl\n href = self.url_queue.popleft() # We grab a URL from the left of the list\n filename = href.rsplit('/', 1)[-1]\n print(\"Downloading %s to %s...\" % (href, filename))\n fullname = os.path.join(self.folder, filename)\n urlretrieve(href, fullname)\n self.xlfnames.append(filename)", "def download(self, url):\n try:\n webFile = urllib.urlopen(url)\n localFile = open(self.workdir + \"/\" + url.split('/')[-1], 'w')\n localFile.write(webFile.read())\n webFile.close()\n localFile.close()\n except IOError:\n print(\"could not get url \" + url)", "def fetch_files_from_urls(urls, dir):\n makedir(dir)\n try:\n pool = []\n for url in urls:\n p = Process(target=download, args=(url, dir,))\n p.start()\n pool.append(p)\n for p in pool:\n p.join()\n except KeyboardInterrupt:\n print \"Shutdown requested...exiting\"\n # except Exception:\n # traceback.print_exc(file=sys.stdout)\n\n # print(\"removing temporary files from current directory\")\n map(os.remove, glob.glob(\"*.tmp\"))", "def _process_resource(self, url):\n url_parts = urlparse.urlsplit(url)\n rel_path = url_parts.path[1:]\n fs_path = os.path.join(self.fileserver_path, rel_path)\n self.logger.info('Downloading {0} to {1}'.format(url, fs_path))\n self._execute_command('curl --create-dirs -Lo {0} {1}'\n .format(fs_path, url), retries=2)\n url = url.replace(url_parts.netloc, self.fs_base_url)\n url = url.replace(url_parts.scheme, 'http')\n return url", "def make_dir(url):\n parts = url.strip('/').split('/')\n done = []\n for part in parts:\n path = os.path.join(STORAGE_PATH, '/'.join(done), part)\n if not os.path.exists(path):\n os.mkdir(path)\n done.append(part)", "def download(urls, dest_folder):\n pass", "def regular_download(self) -> NoReturn:\n\n if not path.isdir(self.name):\n mkdir(self.name)\n\n for chapter in self.chapters.keys():\n\n chapter_folder = f\"{self.name}/{chapter}/\"\n curr_chapter = self.chapters[chapter]\n base_url = f\"{curr_chapter['server']}{curr_chapter['hash']}/\"\n\n if not path.isdir(chapter_folder):\n mkdir(chapter_folder)\n\n for image in curr_chapter[\"images\"]:\n\n image_url = f\"{base_url}{image}\"\n image_file = f\"{chapter_folder}{image}\"\n response = requests.get(image_url, headers={\"Connection\":\"close\"})\n\n if response and response.status_code == 200:\n with open(image_file, \"wb\") as img_file:\n img_file.write(response.content)\n else:\n print(f\"Error downloading chapter: {curr_chapter['num']} Image: {image}\")", "def pywget_inside_crawler(url, depth, start_dir, start_file, root_dir_name):\n depth -= 1\n\n content = ''\n try:\n request = urllib.request.urlopen(url)\n content = request.read().decode(\"utf-8\")\n except:\n pass\n\n # all the information that's inside <a href> and <img src> tags\n match = re.findall(r'<a href=\"(.*?)\"', content) + \\\n re.findall(r'<a href = \"(.*?)\"', content) + \\\n re.findall(r'<img src=\"(.*?)\"', content) + \\\n re.findall(r'<img src = \"(.*?)\"', content)\n\n prefix = url[0 : url.rfind('/')] # a prefix of the link. useful to check if a link is under the same domain\n\n all_item_list = add_item_to_list(match, prefix) # add information to a list\n\n for item in all_item_list:\n pywget_recursive(item, depth, start_dir, start_file, root_dir_name) # recursively download the information", "def init_downloads(self, pdir):\n self.log.info(\"init_downloads(\" + pdir + \")\")\n contents = os.listdir(pdir)\n for name in contents:\n path = os.path.join(pdir, name)\n if os.path.isdir(path):\n # recursively call with subdir\n self.init_downloads(path)\n else:\n # add file\n download = {}\n s3_uri = self.s3_prefix + path[(len(self.s3_dir)+1):]\n download['s3_uri'] = s3_uri\n download['state'] = 'COMPLETE'\n download[\"local_filepath\"] = path\n self.downloads[s3_uri] = download\n self.update_download(s3_uri) # update file properties", "def prepare_url(self, url, **kwargs):\n (self.base_path / url).mkdir(mode=kwargs.get(\"dir_mode\", 0o755), parents=True)", "def download_file(url, file_path, force=False):\n\n if os.path.exists(file_path) and not force:\n return\n dirname = os.path.dirname(file_path)\n Path(dirname).mkdir(parents=True, exist_ok=True)\n gdown.download(url, file_path, quiet=False)", "def url_files_download(url, ext, outdir, check_exist=False, create_dir=False,\n remove_files=False, bar_opt='tqdm'):\n file_msg = fd.Program_Msg(__file__)\n ## Checking for file type\n # 'URL'\n if not isinstance(url, str):\n msg = '{0} `url` ({1}) is not a valid type. It must be a STRING!'\n msg = msg.format(file_msg, type(url))\n raise TypeError(msg)\n # File extension\n if not isinstance(ext, str):\n msg = '{0} `ext` ({1}) is not a valid type. It must be a STRING!'\n msg = msg.format(file_msg, type(ext))\n raise TypeError(msg)\n # Output directory\n if not isinstance(outdir, str):\n msg = '{0} `outdir` ({1}) is not a valid type. It must be a STRING!'\n msg = msg.format(file_msg, type(outdir))\n raise TypeError(msg)\n # `check_exist`\n if not (isinstance(check_exist, bool)):\n msg = '`check_exist` ({0}) must be of `boolean` type!'.format(\n type(check_exist))\n raise TypeError(msg)\n # `create_dir`\n if not (isinstance(create_dir, bool)):\n msg = '`create_dir` ({0}) must be of `boolean` type!'.format(\n type(create_dir))\n raise TypeError(msg)\n # `bar` - Type\n if not (isinstance(bar_opt, str)):\n msg = '`bar_opt` ({0}) must be of `boolean` type!'.format(\n type(bar_opt))\n raise TypeError(msg)\n # Progress bar - Value\n if not (bar_opt in ['tqdm', 'native']):\n msg = '{0} `bar_opt` ({1}) is not a valid option! Exiting'\n msg = msg.format(file_msg, bar_opt)\n raise LSSUtils_Error(msg)\n ##\n ## List of files in the URL\n files_arr = url_file_list(url, ext)\n # Creating directory\n if create_dir:\n cfutils.Path_Folder(outdir)\n # Check for its existence\n if check_exist:\n if not (os.path.exists(outdir)):\n msg = '`outdir` ({0}) was not found!'.format(\n outdir)\n raise FileNotFoundError(msg)\n ##\n ## Downloading files to output directory\n if len(files_arr) > 0:\n if (bar_opt == 'tqdm'):\n tqdm_desc = 'Downloading files: '\n for file_ii in tqdm(files_arr, desc=tqdm_desc):\n # Local file\n file_ii_local = os.path.join( outdir,\n os.path.basename(file_ii))\n # Checking if local file exists\n if os.path.exists(file_ii_local):\n if remove_files:\n os.remove(file_ii_local)\n wget_opt = True\n else:\n wget_opt = False\n else:\n wget_opt = True\n ##\n ## Only downloading if necessary\n if wget_opt:\n wget.download(file_ii, out=outdir, bar=None)\n elif (bar_opt == 'native'):\n for file_ii in files_arr:\n # Local file\n file_ii_local = os.path.join( outdir,\n os.path.basename(file_ii))\n # Checking if local file exists\n if os.path.exists(file_ii_local):\n if remove_files:\n os.remove(file_ii_local)\n wget_opt = True\n else:\n wget_opt = False\n else:\n wget_opt = True\n ##\n ## Only downloading if necessary\n if wget_opt:\n wget.download(file_ii, out=outdir)\n else:\n msg = '{0} Number of files is ZERO!'.format(file_msg)\n print(msg)", "def maybe_download(directory, filename, url):\n if not os.path.exists(directory):\n print(\"Creating directory %s\" % directory)\n os.mkdir(directory)\n filepath = os.path.join(directory, filename)\n if not os.path.exists(filepath):\n print(\"Downloading %s to %s\" % (url, filepath))\n filepath, _ = urllib.request.urlretrieve(url, filepath)\n statinfo = os.stat(filepath)\n print(\"Succesfully downloaded\", filename, statinfo.st_size, \"bytes\")\n return filepath", "def download_photos(urls, folder=''):\n folder_path = os.path.join('photos', folder)\n if not os.path.exists(folder_path):\n os.mkdir(folder_path)\n for url in urls:\n image = requests.get(url)\n filename = os.path.join(folder_path, url.split('/')[-1])\n with open(filename, 'wb') as f:\n f.write(image.content)", "def download(cls, root, check=None):\n path = os.path.join(root, cls.name)\n check = path if check is None else check\n if not os.path.isdir(check):\n for url in cls.urls:\n if isinstance(url, tuple):\n url, filename = url\n else:\n filename = os.path.basename(url)\n zpath = os.path.join(path, filename)\n if not os.path.isfile(zpath):\n if not os.path.exists(os.path.dirname(zpath)):\n os.makedirs(os.path.dirname(zpath))\n print('downloading {}'.format(filename))\n download_from_url(url, zpath)\n ext = os.path.splitext(filename)[-1]\n if ext == '.zip':\n with zipfile.ZipFile(zpath, 'r') as zfile:\n print('extracting')\n zfile.extractall(path)\n elif ext in ['.gz', '.tgz']:\n with tarfile.open(zpath, 'r:gz') as tar:\n dirs = [member for member in tar.getmembers()]\n tar.extractall(path=path, members=dirs)\n elif ext in ['.bz2', '.tar']:\n with tarfile.open(zpath) as tar:\n dirs = [member for member in tar.getmembers()]\n tar.extractall(path=path, members=dirs)\n\n return os.path.join(path, cls.dirname)", "def maybe_download(directory, filename, url):\n if not os.path.exists(directory):\n print(\"Creating directory %s\" % directory)\n os.mkdir(directory)\n filepath = os.path.join(directory, filename)\n if not os.path.exists(filepath):\n print(\"Downloading %s to %s\" % (url, filepath))\n filepath, _ = urllib.request.urlretrieve(url, filepath)\n statinfo = os.stat(filepath)\n print(\"Succesfully downloaded\", filename, statinfo.st_size, \"bytes\")\n return filepath", "def download_files(self):", "def maybe_download(directory, filename, url):\n if not os.path.exists(directory):\n print(\"Creating directory %s\" % directory)\n os.mkdir(directory)\n filepath = os.path.join(directory, filename)\n if not os.path.exists(filepath):\n print(\"Downloading %s to %s\" % (url, filepath))\n filepath, _ = urllib.request.urlretrieve(url, filepath)\n statinfo = os.stat(filepath)\n print(\"Successfully downloaded\", filename, statinfo.st_size, \"bytes\")\n return filepath", "def download_url(url, path=None, name=None):\n r = requests.get(url, allow_redirects=True)\n if path:\n paths = []\n paths.append(path)\n make_dir_from_list(paths)\n open(os.path.join(paths[0], name), 'wb').write(r.content)\n return r.content.decode('utf-8')", "def download_url(url: str, root: Path, filename: Optional[str] = None) -> Path:\n\n root = root.expanduser()\n if filename is None:\n filename = Path(urlparse(url).path).name\n fpath = root / filename\n if not root.exists():\n root.mkdir(parents=True, exist_ok=True)\n\n # downloads file\n if fpath.is_file():\n print(f\"Using downloaded and verified file: {fpath}\")\n else:\n try:\n print(f\"Downloading {url} to {fpath}\")\n urlretrieve(\n url,\n str(fpath),\n reporthook=gen_bar_updater(tqdm(unit=\"B\", unit_scale=True)),\n )\n except OSError:\n print(f\"Failed to download from url: {url}\")\n\n return fpath", "def download_if_not_exist(self):\n for (fname, furl) in cornell_file_urls:\n # dir_path = os.path.dirname(os.path.realpath(__file__))\n input_folder = '{input_dir}/cornell'.format(input_dir=self.input_dir)\n full_dirname = input_folder\n full_fname = '/'.join([full_dirname, fname])\n if not file_exists(full_fname):\n remote_file = urlopen(furl)\n data = remote_file.read()\n remote_file.close()\n # Try creating the dir\n try_create_dir(full_dirname)\n print('download if not exist fname:', fname, 'url:', furl)\n # Write the file\n with open(full_fname, 'wb') as f:\n f.write(data)", "def downloadLocal(url_list,path):\n print(\"You are downloading {} images\".format(parser_arguments().limit),end=\" \");print(\"of {} class.\".format(parser_arguments().classes))\n print(\"Please, be patient :)\")\n for i in range(len(url_list)):\n filename= url_list[i].split(\"/\")[-1] # name of the picture file\n r = requests.get(url_list[i], stream =True)\n print(filename)\n\n with open(filename,'wb') as f : # create the file locally in binary-write mode\n r = requests.get(url_list[i], stream =True)\n shutil.copyfileobj(r.raw, f) #write our image to the file\n shutil.move(filename,path)\n print('Done!')", "def find_URLs(directory, options):\n\n files = os.listdir(directory)\n filtered_files = []\n files_for_download = []\n for item in files:\n if item.endswith(\".json\"):\n filtered_files.append(item)\n\n for item in filtered_files:\n file_path = os.path.join(directory, item)\n\n with open(file_path, \"r\") as json_file:\n payload = json.load(json_file)\n for message in payload:\n if (\"subtype\" in message\n and message.get(\"subtype\") == \"file_share\"):\n\n download_URL = message.get(\"file\").get(\"url_download\")\n\n if options.remote_name:\n download_filename = message.get(\"file\").get(\"id\")\n else:\n download_filename = message.get(\"file\").get(\"name\")\n if download_filename.startswith(\"-.\"):\n download_filename = download_filename.lstrip(\"-\")\n download_filename = \"{}{}\".format(\n message.get(\"file\").get(\"id\"),\n download_filename)\n\n files_for_download.append(\n (download_filename, download_URL))\n\n download_URLs(files_for_download, directory)", "def _create_local_download_paths(self, urls):\n\n # URL: https://ftp.ripe.net/rpki/afrinic.tal/2019/08/01/roas.csv\n # Path: /tmp/bgp_Historical_ROAs_Parser/rpki/2019/08/01/\n\n download_paths = []\n for url in urls:\n download_path = os.path.join(self.path, url[url.index('rpki'):])\n # p flag creates necessary parent directories\n # slicing off the 'roas.csv'\n utils.run_cmds(f'mkdir -p {download_path[:-8]}')\n download_paths.append(download_path)\n\n return download_paths", "def download_url(url):\n # use url_checker to verify URL is using the full address\n url_name = url_checker(url)\n if url_name:\n print(f'Requesting page {url_name}')\n tstamp = get_tstamp()\n # set the headers like we are a browser\n headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_2) AppleWebKit/537.36 (KHTML, like Gecko)'\n ' Chrome/72.0.3626.109 Safari/537.36'}\n # download the page\n response = requests.get(url, headers=headers)\n\n # create directory for saving file\n URL_DIR_NAME = os.path.join(OUTPUT_DIR, str(url_name))\n URL_TM_DIR_NAME = os.path.join(URL_DIR_NAME, str(tstamp))\n # create directory using url name and timestamp for directories\n ensure_dir(URL_TM_DIR_NAME)\n # save downloaded page as a .txt file\n with open(f'{URL_TM_DIR_NAME}{slash}response.html', 'w') as f:\n print(response.text, file=f)\n # use beautiful soup to extract links\n links = []\n soup = BeautifulSoup(response.text, 'html.parser')\n tags = soup.find_all('a')\n # append links to links list\n for tag in tags:\n links.append(tag.get('href'))\n # get only unique values and sort\n my_set = set(links)\n u_links = list(my_set)\n u_links.sort()\n # save links as a .txt file\n with open(f'{URL_TM_DIR_NAME}{slash}links.txt', 'w') as f:\n for list_item in u_links:\n f.write(f'{list_item}\\n')", "def download(url, fname, directory):\n if not os.path.exists(directory):\n print(\"Creating directory %s\" % directory)\n os.mkdir(directory)\n else:\n print(\"Directory exists: %s\" % directory)\n filepath = os.path.join(directory, fname)\n if not os.path.exists(filepath):\n print(\"Downloading %s to %s\" % (fname, filepath))\n local_fname, _ = request.urlretrieve(url + fname, filepath)\n statinfo = os.stat(filepath)\n print(\"Successfully downloaded %s bytes %s\\n\" % (fname, statinfo.st_size))\n else:\n print(\"File %s exists in %s\\n\" % (fname, filepath))\n return filepath" ]
[ "0.7181396", "0.71674454", "0.7146739", "0.6819767", "0.6666435", "0.66409063", "0.6602934", "0.65131176", "0.649386", "0.6482578", "0.64362377", "0.64026994", "0.63971454", "0.6395716", "0.6394178", "0.6333323", "0.6332076", "0.63098013", "0.6302904", "0.62709564", "0.6262722", "0.621656", "0.620306", "0.6198924", "0.6197574", "0.6164178", "0.6160459", "0.61036444", "0.6099673", "0.60996306" ]
0.75172716
0
Add information inside and to the given list If it is an absolute link, check if it's under the same domain, if so add it to the list otherwise ignore If it is a relative link, add prefix in the front and add it to the list
def add_item_to_list(given_list, prefix): new_list = [] if given_list: for item in given_list: item.lstrip() if item.startswith("http://") or item.startswith("https://") or item.startswith("//"): if item.startswith(prefix): new_list.append(item) else: new_list.append(prefix + '/' + item) return new_list
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def mk_link_list(self, BS_object, base_url):\n link_list = []\n body = BS_object.find('body')\n for element in body.find_all('a'):\n # for link in BS_object.find_all('a'): # TEST if there are any links in html head\n \n raw_link = element.get('href')\n print \"GETS RAW LINK: %r, type:\" % raw_link, type(raw_link)\n if type(raw_link) is not unicode:\n print \"mk_link_list: FAILED TO EXTRACT USABLE LINK, SKIPPING...\"\n continue\n\n if raw_link.startswith(\"https:/\") or raw_link.startswith(\"http:/\"):\n if not raw_link.endswith(\"/\"): # maintaining constant url format\n raw_link + \"/\"\n print \"mk_link_list: FULL LINK\"\n if raw_link.startswith(base_url): # Internal URL check\n print \"mk_link_list: FULL LINK STARTS WITH BASE URL AND IS GOOD FOR LINK LIST\"\n link_list.append(raw_link)\n else:\n print \"mk_link_list: THIS FULL LINK IS NOT INTERNAL LINK\"\n else:\n # when part link found it will be always internal link\n print \"mk_link_list:FOUND PART LINK\", raw_link\n try:\n raw_link.strip()\n except:\n pass\n print \"mk_link_list: MAKING FULL LINK FROM PART\"\n full_link = urlparse.urljoin(base_url, raw_link)\n print \"mk_link_list: FULL LINK MADE FROM PART LINK\", full_link\n if full_link.startswith(base_url): # Internal URL check\n print \"mk_link_list: FULL LINK STARTS WITH BASE URL AND IS GOOD FOR LINK LIST\"\n link_list.append(full_link)\n else:\n print \"mk_link_list: THIS FROM PART TO FULL LINK IS NOT INTERNAL LINK\"\n\n\n\n dedupli_list = c_m.remove_duplicates(link_list) # \n dedupli_list.sort()\n try:\n dedupli_list.remove(base_url) # we do not need retriving base url html again\n print \"mk_link_list: LINK LIST AFTER BASE URL REMOVAL\", len(dedupli_list)\n except ValueError:\n print \"mk_link_list: NO BASE URL FOUND IN BASE URL(HOMEPAGE)\"\n\n return dedupli_list", "def updateLinks(haloList, index):\n found = False\n while not found and index != -1:\n if haloList[-1].descid == haloList[index].ID:\n found = True\n haloList[-1].desc = haloList[index]\n if haloList[-1].mmp == 1:\n haloList[index].parent = haloList[-1]\n else:\n haloList[index].fullParents.append(haloList[-1])\n else:\n index -= 1\n return haloList", "def rewrite_relative_links(soup: bs4.BeautifulSoup, base_url: str):\n for tag_name, attrib in URL_REWRITE_PAIRS:\n for tag in soup.find_all(tag_name, attrs={attrib: True}):\n try:\n tag[attrib] = urllib.parse.urljoin(base_url, tag[attrib])\n except ValueError as e:\n logger.info('Could not rewrite link: %s', e)", "def __url_list(self, page):\n url_list = []\n for tag_a in page.find_all('a'):\n href = str(tag_a.get('href'))\n if self.__verify(href):\n url = parse.quote(self.__add_main_site(href), '/:#')\n url_list.append(url)\n return url_list", "def rewrite_local_links_to_relative(db_data: Optional[DbData], link: str) -> str:\n\n if db_data:\n realm_uri_prefix = db_data.realm_uri + \"/\"\n if link.startswith((realm_uri_prefix + \"#\", realm_uri_prefix + \"user_uploads/\")):\n return link[len(realm_uri_prefix) :]\n\n return link", "def _build_links(links):\n for link in links:\n link['href'] = link['href'].replace('servers', 'instances')\n return links", "def _parse_links(self, item, start, links_list):\n result_list = []\n target_str_1 = start.strftime(\"%m-%d-%Y\").replace(\" 0\", \" \")\n target_str_2 = start.strftime(\"%m-%d-%y\").replace(\" 0\", \" \")\n for item in links_list:\n if item[\"date\"] in target_str_1 or item[\"date\"] in target_str_2:\n new_dict = {}\n new_dict[\"href\"] = item[\"href\"]\n new_dict[\"title\"] = item[\"title\"]\n result_list.append(new_dict)\n return result_list", "def navlist(stu, stulist, rooturl='/dist'):\n parts = []\n for x in stulist:\n if x == stu:\n parts.append('<b>' + x + '</b>')\n else:\n parts.append('<a href=\"' + rooturl + '/' + x + '/\">' + x + '</a>')\n return ' '.join(parts)", "def createCompleteLink(link, domain):\n if link is not None and len(link) > 0:\n if re.match('^http', link) is not None:\n return link\n else:\n #Remove the first / to avoid //\n if link[0] == '/':\n link = link[1:]\n return domain + link\n return domain", "def replace_relative_links(soup,base_url,debug=False):\n # only extract hyperlinks with href atributte\n if soup is None:\n return None\n\n links = soup.findAll('a', {\"href\" : True})\n links_replaced = 0\n for link in links: \n url = link['href']\n if url.startswith(\"./\"):\n link['href'] = base_url + url[1:len(url)] \n links_replaced += 1\n if debug is True:\n print(f\"soup_converter.replace_relative_links: {links_replaced} links replaced\")\n return soup", "def old_list_links(self, link_list, dd):\n link_names = []\n for link in link_list:\n if \"subgroup\" in link:\n sublinks = list(link[\"subgroup\"])\n for sublink in sublinks:\n link_names.append(sublink[\"name\"])\n else:\n link_names.append(link[\"name\"])\n return link_names", "def href_to_link(href, domains=[\"\"]):\n\n headers = {\n \"User-Agent\": \"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36\"\n }\n\n href = href.strip()\n\n # Some \"domains\" given might not actually be the root, this looks to add them\n for domain in domains:\n uri_parsed = urlparse(domain)\n tmp_domain = '{uri.scheme}://{uri.netloc}/'.format(uri=uri_parsed)\n\n if merge_link(domain, '/') != merge_link(tmp_domain, '/'):\n domains.append(tmp_domain)\n\n if domains[0] != \"\":\n domains.insert(0, \"\")\n\n for domain in domains:\n temp_url = merge_link(domain, href)\n print(temp_url)\n #temp_url = add_protocol(temp_url)\n\n # if not \".\" in temp_url:\n # continue\n\n try:\n r = requests.head(temp_url, headers=headers)\n print(r.status_code)\n if r.status_code == 200:\n return temp_url\n except:\n print(f\"{temp_url} failed\")\n pass\n\n try:\n print(switch_protocol(temp_url))\n r = requests.head(switch_protocol(temp_url), headers=headers)\n print(r.status_code)\n if r.status_code == 200:\n return switch_protocol(temp_url)\n except:\n print(f\"{switch_protocol(temp_url)} failed\")\n pass\n\n return href", "def merge_link(url_domain, url_path):\n\n # Ensure domain is not empty\n if url_domain.strip() == \"\":\n return url_path\n\n # Strip / at end of domain\n if url_domain[-1] == \"/\":\n url_domain = url_domain[0:-1]\n\n # Strip / at beginning of path\n if url_path[0] == \"/\":\n url_path = url_path[1:]\n\n url_full = \"/\".join([url_domain, url_path])\n\n return url_full", "def simplify_links(proj,exp,links):\n simple_links =[] \n\n for key in links:\n (node_name,x,y) = key.rpartition(':')\n node_name = node_name+\".\"+exp+\".\"+proj+\".emulab.net\"\n simple_links.append((node_name,links[key]['ipaddr']))\n\n return simple_links", "def correctlink(self, lien, current_url, current_full_url, current_directory, protocol, encoding):\n\n if lien is None:\n return current_full_url\n\n # No destination anchor\n if \"#\" in lien:\n lien = lien.split(\"#\")[0]\n\n # No leading or trailing whitespaces\n lien = lien.strip()\n\n if lien == \"\":\n return current_full_url\n\n if lien == \"..\":\n lien = \"../\"\n # bad protocols\n llien = lien.lower()\n if (llien.startswith(\"telnet:\") or\n llien.startswith(\"ftp:\") or\n llien.startswith(\"mailto:\") or\n llien.startswith(\"javascript:\") or\n llien.startswith(\"news:\") or\n llien.startswith(\"file:\", 0) or\n llien.startswith(\"gopher:\") or\n llien.startswith(\"irc:\", 0)):\n return None\n # Good protocols or relatives links\n else:\n # full url, nothing to do :)\n if lien.startswith(\"http://\") or lien.startswith(\"https://\"):\n pass\n else:\n # Protocol relative URLs\n if lien.startswith(\"//\"):\n lien = protocol + \":\" + lien\n # root-url related link\n elif lien[0] == '/':\n lien = \"{0}://{1}{2}\".format(protocol, self.server, lien)\n else:\n # same page + query string\n if lien[0] == '?':\n lien = current_url + lien\n # current_url directory related link\n else:\n lien = current_directory + lien\n\n args = \"\"\n if \"?\" in lien:\n lien, args = lien.split(\"?\", 1)\n # if args is a unicode string, encode it according to the\n # charset of the webpage (if known)\n if encoding and isinstance(args, unicode):\n args = args.encode(encoding, \"ignore\")\n\n # a hack for auto-generated Apache directory index\n if args in [\"C=D;O=A\", \"C=D;O=D\", \"C=M;O=A\", \"C=M;O=D\",\n \"C=N;O=A\", \"C=N;O=D\", \"C=S;O=A\", \"C=S;O=D\"]:\n args = \"\"\n\n if \"&\" in args:\n args = args.split(\"&\")\n args = [i for i in args if i != \"\" and \"=\" in i]\n for i in self.bad_params:\n for j in args:\n if j.startswith(i + \"=\"):\n args.remove(j)\n args = \"&\".join(args)\n\n # First part of the url (path) must be encoded with UTF-8\n if isinstance(lien, unicode):\n lien = lien.encode(\"UTF-8\", \"ignore\")\n lien = urllib.quote(lien, safe='/#%[]=:;$&()+,!?*')\n\n # remove useless slashes repetitions (expect those from the protocol)\n lien = re.sub(\"([^:])//+\", r\"\\1/\", lien)\n if lien[-2:] == \"/.\":\n lien = lien[:-1]\n\n # It should be safe to parse now\n parsed = urlparse.urlparse(lien)\n path = parsed.path\n\n # links going to a parrent directory (..)\n while re.search(\"/([~:!,;a-zA-Z0-9\\.\\-+_]+)/\\.\\./\", path) is not None:\n path = re.sub(\"/([~:!,;a-zA-Z0-9\\.\\-+_]+)/\\.\\./\", \"/\", path)\n while re.search(\"/\\./\", path) is not None:\n path = re.sub(\"/\\./\", \"/\", path)\n if path == \"\":\n path = '/'\n\n # Fix for path going back up the root directory (eg: http://srv/../../dir/)\n path = re.sub(r'^(/?\\.\\.//*)*', '', path)\n if not path.startswith('/'):\n path = '/' + path\n\n lien = \"%s://%s%s\" % (parsed.scheme, parsed.netloc, path)\n if args != \"\":\n # Put back the query part\n lien = \"%s?%s\" % (lien, args)\n return lien", "def replace_local_hyperlinks(\n text,\n base_url=\"https://github.com/project-rig/nengo_spinnaker/blob/master/\"\n ):\n def get_new_url(url):\n return base_url + url[2:]\n\n # Deal with anonymous URLS\n for match in re.finditer(r\"^__ (?P<url>\\./.*)\", text, re.MULTILINE):\n orig_url = match.groupdict()[\"url\"]\n url = get_new_url(orig_url)\n\n text = re.sub(\"^__ {}\".format(orig_url),\n \"__ {}\".format(url), text, flags=re.MULTILINE)\n\n # Deal with named URLS\n for match in re.finditer(r\"^\\.\\. _(?P<identifier>[^:]*): (?P<url>\\./.*)\",\n text, re.MULTILINE):\n identifier = match.groupdict()[\"identifier\"]\n orig_url = match.groupdict()[\"url\"]\n url = get_new_url(orig_url)\n\n text = re.sub(\n \"^\\.\\. _{}: {}\".format(identifier, orig_url),\n \".. _{}: {}\".format(identifier, url),\n text, flags=re.MULTILINE)\n\n # Deal with image URLS\n for match in re.finditer(r\"^\\.\\. image:: (?P<url>\\./.*)\",\n text, re.MULTILINE):\n orig_url = match.groupdict()[\"url\"]\n url = get_new_url(orig_url)\n\n text = text.replace(\".. image:: {}\".format(orig_url),\n \".. image:: {}\".format(url))\n\n return text", "def middleaddlistitems(self, items, pos):\n self._linklist.insert(pos, items)", "def addLinkToFrontier(link):\n\n\tif link not in variables.crawlFrontier:\n\t\tvariables.crawlFrontier.append(link)", "def add_prefix_to_list_items(prefix: str, items: list) -> list:\r\n new_items = []\r\n for item in items:\r\n new_items.append(f\"{prefix}{str(item)}\")\r\n return new_items", "def fix_links():\n pass", "def get_clean_urls(text_list, list_to_exclude=['twitter']):\n ans_ls = []\n for x in text_list:\n rex = re.findall(\n '(?:http:|https:)\\/\\/.*\\/.*?(?:\\.cms|\\.[a-zA-Z]*|\\/[a-zA-Z0-9-\\ ]+[a-zA-z0-9])', x[1])\n for rx in rex:\n if rx and not any(z in rx for z in\n list_to_exclude) and not rx == 'http://' and not rx == 'https://' and not rx.endswith(\n '.') and 't.c' not in rx:\n if '\\xa0' in x[1]:\n for y in x[1].split('\\xa0'):\n # print(x[0],y)\n ans_ls.append((x[0], y.replace(' ', '')))\n elif '@' in x[1]:\n ans_ls.append((x[0], y.split('@')[0].replace(' ', '')))\n\n else:\n ans_ls.append((x[0], x[1].replace(' ', '')))\n return (ans_ls)", "def good_url(a, start_url):\n for i in range(len(a)):\n par=a[i].find('?')\n if par!=-1:\n a[i]=a[i][:par]\n anc=a[i].find('#')\n if anc!=-1:\n a[i]=a[i][:anc]\n if a[i]!='' and a[i][0]=='/':\n a[i]=str(start_url)+a[i][1:i]\n #print(a[i]) \n return list(set(a))", "def add_link(self, url, *, note=\"\"):\n url = url.strip()\n if url.startswith(\"www.\"):\n url = \"http://{}\".format(url)\n if re.match(r\"\\A@[A-Za-z]+\\Z\", url):\n url = \"https://twitter.com/{}\".format(url[1:])\n self.links.append({\"note\": note, \"url\": url})", "def list_urls(self, prefix: str = \"\", etl_name: str = None) -> Iterable[str]:", "def list_to_link(lst):\n \"*** YOUR CODE HERE ***\"\n #if lst == []:\n #return\n #elif len(lst) == 1:\n #return Link(lst[0])\n if lst == []:\n return Link.empty # This is great. Notice that you're calling list_to_link([])\n # within the call list_to_link([last_elem])\n else:\n return Link(lst[0], list_to_link(lst[1:])) # remember: you don't have to specify the end index\n # I want from index 1 until the end of the list", "def removeurl(wordlist):\n newlist=[]\n for w in wordlist:\n phrases=str(w[0]).split()\n for phrase in phrases:\n if(phrase.startswith('http') is True):\n phrase=\"\"\n newlist.append((phrases,w[1])) \n return newlist", "def add_links(update: Update, context: CallbackContext):\n urls = update.message.parse_entities([\"url\", \"text_link\"]).values()\n\n if urls:\n logging.info(f\"Got content of type url, text_link: {urls}\")\n\n with db.connect() as connection:\n existing_links = db.get_links(connection, update.message.from_user.id)\n if existing_links:\n distinct_links = set([url.casefold() for url in urls]) - set(\n [link.url for link in existing_links]\n )\n else:\n distinct_links = set([url.casefold() for url in urls])\n\n if distinct_links:\n success = context.bot.send_message(\n chat_id=update.message.chat_id,\n text=f\"⏳ Saving your link{'s' if len(distinct_links) > 1 else ''}... ⏳\",\n disable_notification=True,\n )\n\n db.add_links(connection, distinct_links, update.message.from_user.id)\n\n context.bot.edit_message_text(\n chat_id=update.message.chat_id,\n message_id=success.message_id,\n text=f\"✨ {len(distinct_links)} link{'s' if len(distinct_links) > 1 else ''} saved ✨\",\n )\n else:\n context.bot.send_message(\n chat_id=update.message.chat_id,\n text=f\"You already have that link saved! Look it up with *View all* or */all*\",\n parse_mode=telegram.ParseMode.MARKDOWN,\n )", "def update_links(self):\n for a in self.book.xpath(\"//a[@href]\"):\n href = a.xpath(\"@href\")[0]\n index_list = a.xpath(\"@data-index\")\n \n ### If there is no data-index it is assumed link comes from initial book landing page (the index page)\n if index_list == []:\n index = self.manager.get_page_index(\"index.html\")\n else:\n index = index_list[0]\n \n ### Fix people who are bad at links\n if href.startswith(\"www.\"):\n href = \"https://\" + href\n a.set(\"href\", href)\n \n ## Correct for ambiguity (Naive assumption that this error only occours on index page)\n if href == \"./\":\n href = \"index.html\"\n \n if not href:\n return None\n \n href = self.manager.convert_link(href, index)\n a.set(\"href\", href)", "def add_link():\n return True", "def matchWildcardUrls(url, listOfUrls):\n if not url or not listOfUrls:\n return None\n pattern = re.compile('^[a-zA-Z][+a-zA-Z0-9.-]*:.*')\n if not pattern.search(str(url)) and not url.startswith('//'):\n url = '//' + url\n cspUrl = urlparse(str(url)) \n host = cspUrl.netloc.lower() or \"\"\n hostHasWildcard = host.startswith(\"*.\")\n wildcardFreeHost = re.sub(\"^\\*\", \"\", host, flags=re.IGNORECASE)\n path = cspUrl.path or ''\n hasPath = len(cspUrl.path) > 0 \n\n for url2 in listOfUrls:\n url = urlparse(str(url2))\n domain = url.netloc.lower() or \"\"\n domainHasWildCard = domain.startswith(\"*.\")\n if (not domainHasWildCard):\n if (not domain.endswith(wildcardFreeHost) ): \n continue\n if (not hostHasWildcard and host != domain):\n continue\n else:\n domainparts = list(reversed(domain.split('.')))\n hostparts = list(reversed(host.split('.')))\n stop = False\n domainlen = len(domain.split('.'))\n hostlen = len(host.split('.'))\n \n for idx, domainpart in enumerate(domainparts):\n if idx < hostlen:\n hostpart = hostparts[idx]\n if hostpart != domainpart and (domainpart != '*' and hostpart != '*'):\n stop = True\n if stop:\n continue\n if (hasPath):\n if (path.endswith('/')): \n if (not url.path.startswith(path)):\n continue\n elif (url.path != path):\n continue\n\n return url\n\n return None" ]
[ "0.5817685", "0.5556544", "0.5537983", "0.55320865", "0.5474401", "0.54573244", "0.541493", "0.5372431", "0.53233933", "0.53223795", "0.53190553", "0.53161633", "0.52707195", "0.5259274", "0.5230734", "0.52237004", "0.51826096", "0.514296", "0.5105568", "0.5102637", "0.5101266", "0.5095202", "0.50763834", "0.50711226", "0.5065738", "0.50520915", "0.5040854", "0.5022638", "0.501791", "0.49957538" ]
0.72055155
0
Test the setting of the target temperature with range.
async def test_set_target_temp_range(opp): state = opp.states.get(ENTITY_ECOBEE) assert state.attributes.get(ATTR_TEMPERATURE) is None assert 21.0 == state.attributes.get(ATTR_TARGET_TEMP_LOW) assert 24.0 == state.attributes.get(ATTR_TARGET_TEMP_HIGH) await common.async_set_temperature( opp, target_temp_high=25, target_temp_low=20, entity_id=ENTITY_ECOBEE ) await opp.async_block_till_done() state = opp.states.get(ENTITY_ECOBEE) assert state.attributes.get(ATTR_TEMPERATURE) is None assert 20.0 == state.attributes.get(ATTR_TARGET_TEMP_LOW) assert 25.0 == state.attributes.get(ATTR_TARGET_TEMP_HIGH)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_temperatures_value(self):\n self.assertEqual(self.TminValue, 450.0)", "async def test_set_only_target_temp_with_convert(opp):\n state = opp.states.get(ENTITY_WATER_HEATER_CELSIUS)\n assert state.attributes.get(\"temperature\") == 113\n await common.async_set_temperature(opp, 114, ENTITY_WATER_HEATER_CELSIUS)\n state = opp.states.get(ENTITY_WATER_HEATER_CELSIUS)\n assert state.attributes.get(\"temperature\") == 114", "async def test_set_only_target_temp(opp):\n state = opp.states.get(ENTITY_CLIMATE)\n assert 21 == state.attributes.get(ATTR_TEMPERATURE)\n\n await common.async_set_temperature(opp, 30, ENTITY_CLIMATE)\n await opp.async_block_till_done()\n\n state = opp.states.get(ENTITY_CLIMATE)\n assert 30.0 == state.attributes.get(ATTR_TEMPERATURE)", "async def test_set_only_target_temp(opp):\n state = opp.states.get(ENTITY_WATER_HEATER)\n assert state.attributes.get(\"temperature\") == 119\n await common.async_set_temperature(opp, 110, ENTITY_WATER_HEATER)\n state = opp.states.get(ENTITY_WATER_HEATER)\n assert state.attributes.get(\"temperature\") == 110", "async def test_set_only_target_temp_with_convert(opp):\n state = opp.states.get(ENTITY_HEATPUMP)\n assert 20 == state.attributes.get(ATTR_TEMPERATURE)\n\n await common.async_set_temperature(opp, 21, ENTITY_HEATPUMP)\n await opp.async_block_till_done()\n\n state = opp.states.get(ENTITY_HEATPUMP)\n assert 21.0 == state.attributes.get(ATTR_TEMPERATURE)", "async def test_get_temperature_range(hass, hk_driver, cls):\n entity_id = 'climate.test'\n\n hass.states.async_set(entity_id, STATE_OFF)\n await hass.async_block_till_done()\n acc = cls.thermostat(hass, hk_driver, 'Climate', entity_id, 2, None)\n\n hass.states.async_set(entity_id, STATE_OFF,\n {ATTR_MIN_TEMP: 20, ATTR_MAX_TEMP: 25})\n await hass.async_block_till_done()\n assert acc.get_temperature_range() == (20, 25)\n\n acc._unit = TEMP_FAHRENHEIT\n hass.states.async_set(entity_id, STATE_OFF,\n {ATTR_MIN_TEMP: 60, ATTR_MAX_TEMP: 70})\n await hass.async_block_till_done()\n assert acc.get_temperature_range() == (15.6, 21.1)", "async def test_set_target_temp_range_bad_attr(opp):\n state = opp.states.get(ENTITY_ECOBEE)\n assert state.attributes.get(ATTR_TEMPERATURE) is None\n assert 21.0 == state.attributes.get(ATTR_TARGET_TEMP_LOW)\n assert 24.0 == state.attributes.get(ATTR_TARGET_TEMP_HIGH)\n\n with pytest.raises(vol.Invalid):\n await common.async_set_temperature(\n opp,\n temperature=None,\n entity_id=ENTITY_ECOBEE,\n target_temp_low=None,\n target_temp_high=None,\n )\n await opp.async_block_till_done()\n\n state = opp.states.get(ENTITY_ECOBEE)\n assert state.attributes.get(ATTR_TEMPERATURE) is None\n assert 21.0 == state.attributes.get(ATTR_TARGET_TEMP_LOW)\n assert 24.0 == state.attributes.get(ATTR_TARGET_TEMP_HIGH)", "def set_temp_range(self, temp_range=(0, 0, 1)):\n args = list(temp_range)\n assert len(args) == 3\n minimum, maximum, step = args\n if all([isinstance(i, int) for i in args]):\n if (maximum - minimum) % step == 0:\n maximum += 1\n self.temperatures = np.arange(minimum, maximum, step, dtype=float)\n self.qptanalyzer.temperatures = self.temperatures", "def set_temperature(self, **kwargs):\n low_temp = kwargs.get(ATTR_TARGET_TEMP_LOW)\n high_temp = kwargs.get(ATTR_TARGET_TEMP_HIGH)\n if low_temp is not None:\n low_temp = round(low_temp)\n self._device.set_setpoint_heat(low_temp)\n if high_temp is not None:\n high_temp = round(high_temp)\n self._device.set_setpoint_cool(high_temp)", "def set_temperature(self, **kwargs):\n self._target_temperature_low = kwargs.get(ATTR_TARGET_TEMP_LOW)\n self._target_temperature_high = kwargs.get(ATTR_TARGET_TEMP_HIGH)\n temp = kwargs.get(ATTR_TEMPERATURE)\n if self.current_operation == 'Heat & Cool' and self._target_temperature_low is not None \\\n and self._target_temperature_high is not None:\n self._api._heatto = self._target_temperature_low\n self._api._coolto = self._target_temperature_high\n elif temp is not None:\n if self.current_operation == 'Heat only':\n self._api._heatto = temp\n self._api._coolto = temp + 10\n elif self.current_operation == 'Cool only':\n self._api._heatto = temp - 10\n self._api._coolto = temp \n self._api.set()\n self.schedule_update_ha_state()", "def target_temperature_high(self):\n return self._device.setpoint_cool", "def target_temperature(self):\n if (self._device.mode == self._device.MODE_HEAT) or (\n self._device.mode == self._device.MODE_HEAT_EMERGENCY):\n return self._device.setpoint_heat\n if self._device.mode == self._device.MODE_COOL:\n return self._device.setpoint_cool\n return None", "async def async_set_temperature(self, **kwargs: Any) -> None:\n target_temp = kwargs.get(ATTR_TEMPERATURE)\n target_temp_low = kwargs.get(ATTR_TARGET_TEMP_LOW)\n target_temp_high = kwargs.get(ATTR_TARGET_TEMP_HIGH)\n if target_temp is not None:\n if self.hvac_mode == HVACMode.COOL:\n target_temp_high = target_temp\n if self.hvac_mode == HVACMode.HEAT:\n target_temp_low = target_temp\n if target_temp_low is not None:\n await self._node.set_climate_setpoint_heat(int(target_temp_low))\n # Presumptive setting--event stream will correct if cmd fails:\n self._target_temp_low = target_temp_low\n if target_temp_high is not None:\n await self._node.set_climate_setpoint_cool(int(target_temp_high))\n # Presumptive setting--event stream will correct if cmd fails:\n self._target_temp_high = target_temp_high\n self.async_write_ha_state()", "def check(self, temp=None):\n if temp == None:\n temp = self._temp.get_tempC()\n # Get target temperature from file\n target_temp = env['thermostat']['targetTemp']\n msg = \"{} {} {} {}\".format(\"Temp:\", temp, \" Target Temp:\", target_temp)\n self._logger.debug(msg) \n if temp > target_temp:\n self._fan.set(Fan.ON)\n else:\n self._fan.set(Fan.OFF)", "async def async_set_temperature(self, **kwargs: Any) -> None:\n changed = await self._set_climate_attribute(\n kwargs.get(ATTR_TEMPERATURE),\n CONF_TEMP_COMMAND_TOPIC,\n CONF_TEMP_COMMAND_TEMPLATE,\n CONF_TEMP_STATE_TOPIC,\n \"_attr_target_temperature\",\n )\n\n changed |= await self._set_climate_attribute(\n kwargs.get(ATTR_TARGET_TEMP_LOW),\n CONF_TEMP_LOW_COMMAND_TOPIC,\n CONF_TEMP_LOW_COMMAND_TEMPLATE,\n CONF_TEMP_LOW_STATE_TOPIC,\n \"_attr_target_temperature_low\",\n )\n\n changed |= await self._set_climate_attribute(\n kwargs.get(ATTR_TARGET_TEMP_HIGH),\n CONF_TEMP_HIGH_COMMAND_TOPIC,\n CONF_TEMP_HIGH_COMMAND_TEMPLATE,\n CONF_TEMP_HIGH_STATE_TOPIC,\n \"_attr_target_temperature_high\",\n )\n\n if not changed:\n return\n self.async_write_ha_state()", "def check_sample(temp, alert_settings):\n\n # If no range settings, we default to true\n ret = Alert.ALERT_TYPE_TEMP_GOOD;\n \n # Check against upper limit only if one exists\n if alert_settings[HIGH_LIMIT] and temp > Decimal(alert_settings[HIGH_LIMIT]):\n ret = Alert.ALERT_TYPE_TEMP_HIGH\n # Check against lower limit only if one exists\n elif alert_settings[LOW_LIMIT] and temp < Decimal(alert_settings[LOW_LIMIT]):\n ret = Alert.ALERT_TYPE_TEMP_LOW \n #logger.debug(\" Temp %s between %s and %s: %s\" % (temp, alert_settings[LOW_LIMIT], alert_settings[HIGH_LIMIT], ret))\n\n return ret", "def target_temperature(self) -> float | None:\n return self._device.setpoint", "def target_temperature(self):\n return self._boiler.setpoint", "def target_temperature(self):\n if self.current_operation == 'Heat & Cool':\n return None\n if self.current_operation == 'Heat only':\n return int(self._api._heatto)\n elif self.current_operation == 'Cool only':\n return int(self._api._coolto)\n return None", "def set_target_temperature_high(self, value: int = 0):\r\n if self._temperature_scale == \"F\":\r\n self._target_temperature_high = celsius_to_kelvin(\r\n fahrenheit_to_celsius(value)\r\n )\r\n elif self._temperature_scale == \"C\":\r\n self._target_temperature_high = celsius_to_kelvin(value)\r\n else:\r\n self._target_temperature_high = value\r\n\r\n self._logger.info(log_message_formatter(\r\n \"set\", f\"{self}\", \"target_temperature_high\", value))", "def target_temperature_low(self):\n return self._device.setpoint_heat", "def set_temperature(self, **kwargs):\n set_temp = True\n operation_mode = kwargs.get(ATTR_HVAC_MODE)\n temp_low = kwargs.get(ATTR_TARGET_TEMP_LOW)\n temp_high = kwargs.get(ATTR_TARGET_TEMP_HIGH)\n temperature = kwargs.get(ATTR_TEMPERATURE)\n\n if operation_mode and self._mode_map.get(operation_mode) != self._client.mode:\n set_temp = self._set_operation_mode(operation_mode)\n\n if set_temp:\n if (\n self._mode_map.get(operation_mode, self._client.mode)\n == self._client.MODE_HEAT\n ):\n success = self._client.set_setpoints(temperature, self._client.cooltemp)\n elif (\n self._mode_map.get(operation_mode, self._client.mode)\n == self._client.MODE_COOL\n ):\n success = self._client.set_setpoints(self._client.heattemp, temperature)\n elif (\n self._mode_map.get(operation_mode, self._client.mode)\n == self._client.MODE_AUTO\n ):\n success = self._client.set_setpoints(temp_low, temp_high)\n else:\n success = False\n _LOGGER.error(\n (\n \"The thermostat is currently not in a mode \"\n \"that supports target temperature: %s\"\n ),\n operation_mode,\n )\n\n if not success:\n _LOGGER.error(\"Failed to change the temperature\")\n self.schedule_update_ha_state()", "def target_temperature(self) -> int:\r\n # TODO: Find a better way to do this. This is ugly.\r\n if self._hvac_mode == \"cool\":\r\n return self.target_temperature_low\r\n elif self._hvac_mode == \"heat\":\r\n return self.target_temperature_high\r\n elif self._hvac_mode == \"heat-cool\":\r\n # TODO: Fix this so that heat or cool is chosen.\r\n if self._ambient_temperature >= self._target_temperature:\r\n return self.target_temperature_low\r\n elif self._ambient_temperature <= self._target_temperature:\r\n return self.target_temperature_high\r\n elif self._hvac_mode == \"eco\":\r\n if self._ambient_temperature >= self._target_temperature:\r\n return self.eco_temperature_low\r\n elif self._ambient_temperature <= self._target_temperature:\r\n return self.eco_temperature_high\r\n elif self._hvac_mode == \"off\":\r\n return self.ambient_temperature\r\n\r\n self._logger.debug(log_message_formatter(\r\n \"get\", f\"{self}\", \"ambient_temperature\"))", "def target_temperature_step(self):\n return 1", "def target_temperature_step(self):\n return 1", "def target_temperature(self) -> float:\n return self._thermostat.setpoint_temperature", "def set_target_temperature_low(self, value: int = 0):\r\n if self._temperature_scale == \"F\":\r\n self._target_temperature_low = celsius_to_kelvin(\r\n fahrenheit_to_celsius(value)\r\n )\r\n elif self._temperature_scale == \"C\":\r\n self._target_temperature_low = celsius_to_kelvin(value)\r\n else:\r\n self._target_temperature_low = value\r\n\r\n self._logger.info(log_message_formatter(\r\n \"set\", f\"{self}\", \"target_temperature_low\", value))", "def target_temperature(self) -> float | None:\n if self._device.mode == ThermostatMode.COOL and self._device.cooling_setpoint:\n return self._device.scaled_cooling_setpoint\n\n if self._device.heating_setpoint:\n return self._device.scaled_heating_setpoint\n\n return None", "async def async_set_temperature(self, **kwargs: Any) -> None:\n if kwargs.get(ATTR_TEMPERATURE) is not None:\n self._target_temperature = kwargs.get(ATTR_TEMPERATURE)\n if (\n kwargs.get(ATTR_TARGET_TEMP_HIGH) is not None\n and kwargs.get(ATTR_TARGET_TEMP_LOW) is not None\n ):\n self._target_temperature_high = kwargs.get(ATTR_TARGET_TEMP_HIGH)\n self._target_temperature_low = kwargs.get(ATTR_TARGET_TEMP_LOW)\n self.async_write_ha_state()", "def set_temperature(self):\n self.temperature = self.gui.doubleSpinBox_temperature.value()\n self.logger.debug('Changing the temperature to {}K'.format(self.temperature))\n\n self.anc350_instrument.temperature = self.temperature\n self.anc350_instrument.set_temperature_limits()\n\n self.max_dclevel_V = self.anc350_instrument.max_dC_level\n\n self.logger.debug('Changed the scanner piezo limits to {}'.format(self.max_dclevel_V))" ]
[ "0.712122", "0.6892608", "0.685135", "0.68328077", "0.6799549", "0.67770034", "0.6686436", "0.6629148", "0.6591793", "0.6582087", "0.6493302", "0.6487197", "0.64121413", "0.6303357", "0.62739325", "0.6270018", "0.62598914", "0.6257415", "0.6252457", "0.620402", "0.6201357", "0.6196246", "0.6195153", "0.61826754", "0.61826754", "0.61619776", "0.612471", "0.6097167", "0.6081014", "0.6072077" ]
0.753526
0
Test setting the target temperature range without attribute.
async def test_set_target_temp_range_bad_attr(opp): state = opp.states.get(ENTITY_ECOBEE) assert state.attributes.get(ATTR_TEMPERATURE) is None assert 21.0 == state.attributes.get(ATTR_TARGET_TEMP_LOW) assert 24.0 == state.attributes.get(ATTR_TARGET_TEMP_HIGH) with pytest.raises(vol.Invalid): await common.async_set_temperature( opp, temperature=None, entity_id=ENTITY_ECOBEE, target_temp_low=None, target_temp_high=None, ) await opp.async_block_till_done() state = opp.states.get(ENTITY_ECOBEE) assert state.attributes.get(ATTR_TEMPERATURE) is None assert 21.0 == state.attributes.get(ATTR_TARGET_TEMP_LOW) assert 24.0 == state.attributes.get(ATTR_TARGET_TEMP_HIGH)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def test_set_target_temp_range(opp):\n state = opp.states.get(ENTITY_ECOBEE)\n assert state.attributes.get(ATTR_TEMPERATURE) is None\n assert 21.0 == state.attributes.get(ATTR_TARGET_TEMP_LOW)\n assert 24.0 == state.attributes.get(ATTR_TARGET_TEMP_HIGH)\n\n await common.async_set_temperature(\n opp, target_temp_high=25, target_temp_low=20, entity_id=ENTITY_ECOBEE\n )\n await opp.async_block_till_done()\n\n state = opp.states.get(ENTITY_ECOBEE)\n assert state.attributes.get(ATTR_TEMPERATURE) is None\n assert 20.0 == state.attributes.get(ATTR_TARGET_TEMP_LOW)\n assert 25.0 == state.attributes.get(ATTR_TARGET_TEMP_HIGH)", "async def test_set_only_target_temp(opp):\n state = opp.states.get(ENTITY_WATER_HEATER)\n assert state.attributes.get(\"temperature\") == 119\n await common.async_set_temperature(opp, 110, ENTITY_WATER_HEATER)\n state = opp.states.get(ENTITY_WATER_HEATER)\n assert state.attributes.get(\"temperature\") == 110", "async def test_set_only_target_temp(opp):\n state = opp.states.get(ENTITY_CLIMATE)\n assert 21 == state.attributes.get(ATTR_TEMPERATURE)\n\n await common.async_set_temperature(opp, 30, ENTITY_CLIMATE)\n await opp.async_block_till_done()\n\n state = opp.states.get(ENTITY_CLIMATE)\n assert 30.0 == state.attributes.get(ATTR_TEMPERATURE)", "async def test_set_only_target_temp_bad_attr(opp):\n state = opp.states.get(ENTITY_WATER_HEATER)\n assert state.attributes.get(\"temperature\") == 119\n with pytest.raises(vol.Invalid):\n await common.async_set_temperature(opp, None, ENTITY_WATER_HEATER)\n assert state.attributes.get(\"temperature\") == 119", "async def test_set_only_target_temp_bad_attr(opp):\n state = opp.states.get(ENTITY_CLIMATE)\n assert 21 == state.attributes.get(ATTR_TEMPERATURE)\n\n with pytest.raises(vol.Invalid):\n await common.async_set_temperature(opp, None, ENTITY_CLIMATE)\n\n await opp.async_block_till_done()\n assert 21 == state.attributes.get(ATTR_TEMPERATURE)", "async def test_set_only_target_temp_with_convert(opp):\n state = opp.states.get(ENTITY_WATER_HEATER_CELSIUS)\n assert state.attributes.get(\"temperature\") == 113\n await common.async_set_temperature(opp, 114, ENTITY_WATER_HEATER_CELSIUS)\n state = opp.states.get(ENTITY_WATER_HEATER_CELSIUS)\n assert state.attributes.get(\"temperature\") == 114", "def test_temperatures_value(self):\n self.assertEqual(self.TminValue, 450.0)", "async def test_set_only_target_temp_with_convert(opp):\n state = opp.states.get(ENTITY_HEATPUMP)\n assert 20 == state.attributes.get(ATTR_TEMPERATURE)\n\n await common.async_set_temperature(opp, 21, ENTITY_HEATPUMP)\n await opp.async_block_till_done()\n\n state = opp.states.get(ENTITY_HEATPUMP)\n assert 21.0 == state.attributes.get(ATTR_TEMPERATURE)", "def set_temp_range(self, temp_range=(0, 0, 1)):\n args = list(temp_range)\n assert len(args) == 3\n minimum, maximum, step = args\n if all([isinstance(i, int) for i in args]):\n if (maximum - minimum) % step == 0:\n maximum += 1\n self.temperatures = np.arange(minimum, maximum, step, dtype=float)\n self.qptanalyzer.temperatures = self.temperatures", "def set_temperature(self, **kwargs):\n low_temp = kwargs.get(ATTR_TARGET_TEMP_LOW)\n high_temp = kwargs.get(ATTR_TARGET_TEMP_HIGH)\n if low_temp is not None:\n low_temp = round(low_temp)\n self._device.set_setpoint_heat(low_temp)\n if high_temp is not None:\n high_temp = round(high_temp)\n self._device.set_setpoint_cool(high_temp)", "def set_temperature(self, **kwargs):\n self._target_temperature_low = kwargs.get(ATTR_TARGET_TEMP_LOW)\n self._target_temperature_high = kwargs.get(ATTR_TARGET_TEMP_HIGH)\n temp = kwargs.get(ATTR_TEMPERATURE)\n if self.current_operation == 'Heat & Cool' and self._target_temperature_low is not None \\\n and self._target_temperature_high is not None:\n self._api._heatto = self._target_temperature_low\n self._api._coolto = self._target_temperature_high\n elif temp is not None:\n if self.current_operation == 'Heat only':\n self._api._heatto = temp\n self._api._coolto = temp + 10\n elif self.current_operation == 'Cool only':\n self._api._heatto = temp - 10\n self._api._coolto = temp \n self._api.set()\n self.schedule_update_ha_state()", "async def async_set_temperature(self, **kwargs: Any) -> None:\n target_temp = kwargs.get(ATTR_TEMPERATURE)\n target_temp_low = kwargs.get(ATTR_TARGET_TEMP_LOW)\n target_temp_high = kwargs.get(ATTR_TARGET_TEMP_HIGH)\n if target_temp is not None:\n if self.hvac_mode == HVACMode.COOL:\n target_temp_high = target_temp\n if self.hvac_mode == HVACMode.HEAT:\n target_temp_low = target_temp\n if target_temp_low is not None:\n await self._node.set_climate_setpoint_heat(int(target_temp_low))\n # Presumptive setting--event stream will correct if cmd fails:\n self._target_temp_low = target_temp_low\n if target_temp_high is not None:\n await self._node.set_climate_setpoint_cool(int(target_temp_high))\n # Presumptive setting--event stream will correct if cmd fails:\n self._target_temp_high = target_temp_high\n self.async_write_ha_state()", "async def async_set_temperature(self, **kwargs: Any) -> None:\n if kwargs.get(ATTR_TEMPERATURE) is not None:\n self._target_temperature = kwargs.get(ATTR_TEMPERATURE)\n if (\n kwargs.get(ATTR_TARGET_TEMP_HIGH) is not None\n and kwargs.get(ATTR_TARGET_TEMP_LOW) is not None\n ):\n self._target_temperature_high = kwargs.get(ATTR_TARGET_TEMP_HIGH)\n self._target_temperature_low = kwargs.get(ATTR_TARGET_TEMP_LOW)\n self.async_write_ha_state()", "async def test_get_temperature_range(hass, hk_driver, cls):\n entity_id = 'climate.test'\n\n hass.states.async_set(entity_id, STATE_OFF)\n await hass.async_block_till_done()\n acc = cls.thermostat(hass, hk_driver, 'Climate', entity_id, 2, None)\n\n hass.states.async_set(entity_id, STATE_OFF,\n {ATTR_MIN_TEMP: 20, ATTR_MAX_TEMP: 25})\n await hass.async_block_till_done()\n assert acc.get_temperature_range() == (20, 25)\n\n acc._unit = TEMP_FAHRENHEIT\n hass.states.async_set(entity_id, STATE_OFF,\n {ATTR_MIN_TEMP: 60, ATTR_MAX_TEMP: 70})\n await hass.async_block_till_done()\n assert acc.get_temperature_range() == (15.6, 21.1)", "async def async_set_temperature(self, **kwargs: Any) -> None:\n changed = await self._set_climate_attribute(\n kwargs.get(ATTR_TEMPERATURE),\n CONF_TEMP_COMMAND_TOPIC,\n CONF_TEMP_COMMAND_TEMPLATE,\n CONF_TEMP_STATE_TOPIC,\n \"_attr_target_temperature\",\n )\n\n changed |= await self._set_climate_attribute(\n kwargs.get(ATTR_TARGET_TEMP_LOW),\n CONF_TEMP_LOW_COMMAND_TOPIC,\n CONF_TEMP_LOW_COMMAND_TEMPLATE,\n CONF_TEMP_LOW_STATE_TOPIC,\n \"_attr_target_temperature_low\",\n )\n\n changed |= await self._set_climate_attribute(\n kwargs.get(ATTR_TARGET_TEMP_HIGH),\n CONF_TEMP_HIGH_COMMAND_TOPIC,\n CONF_TEMP_HIGH_COMMAND_TEMPLATE,\n CONF_TEMP_HIGH_STATE_TOPIC,\n \"_attr_target_temperature_high\",\n )\n\n if not changed:\n return\n self.async_write_ha_state()", "def target_temperature_high(self):\n return self._device.setpoint_cool", "def target_temperature(self) -> float | None:\n return self._device.setpoint", "def set_temperature(self, **kwargs):\n if kwargs.get(ATTR_TEMPERATURE) is not None:\n self._target_temperature = kwargs.get(ATTR_TEMPERATURE)\n if (self._power_state == HYSEN_POWERON):\n self.send_tempset_command(kwargs.get(ATTR_TEMPERATURE))\n self.schedule_update_ha_state()", "def target_temperature_low(self):\n return self._device.setpoint_heat", "def target_temperature(self):\n if (self._device.mode == self._device.MODE_HEAT) or (\n self._device.mode == self._device.MODE_HEAT_EMERGENCY):\n return self._device.setpoint_heat\n if self._device.mode == self._device.MODE_COOL:\n return self._device.setpoint_cool\n return None", "def target_temperature(self):\n return self._boiler.setpoint", "def target_temperature_step(self):\n return 1", "def target_temperature_step(self):\n return 1", "def set_target_temperature_low(self, value: int = 0):\r\n if self._temperature_scale == \"F\":\r\n self._target_temperature_low = celsius_to_kelvin(\r\n fahrenheit_to_celsius(value)\r\n )\r\n elif self._temperature_scale == \"C\":\r\n self._target_temperature_low = celsius_to_kelvin(value)\r\n else:\r\n self._target_temperature_low = value\r\n\r\n self._logger.info(log_message_formatter(\r\n \"set\", f\"{self}\", \"target_temperature_low\", value))", "def setup_target_mask(self):\n if self.region is not None:\n region_value = self.region.get(\"value\", None)\n if region_value is not None:\n self._obs_file.target_mask = MV2.not_equal(\n self.sftlf[\"target_grid\"], region_value\n )", "async def set_temperature(self, **kwargs):\n if await self.atag.dhw_set_temp(kwargs.get(ATTR_TEMPERATURE)):\n self.async_schedule_update_ha_state(True)", "async def async_set_temperature(self, **kwargs: Any) -> None:\n target_temperature = (\n float(kwargs[ATTR_TEMPERATURE]) - self._offset\n ) / self._scale\n if self._data_type in (\n DataType.INT16,\n DataType.INT32,\n DataType.INT64,\n DataType.UINT16,\n DataType.UINT32,\n DataType.UINT64,\n ):\n target_temperature = int(target_temperature)\n as_bytes = struct.pack(self._structure, target_temperature)\n raw_regs = [\n int.from_bytes(as_bytes[i : i + 2], \"big\")\n for i in range(0, len(as_bytes), 2)\n ]\n registers = self._swap_registers(raw_regs, 0)\n\n if self._data_type in (\n DataType.INT16,\n DataType.UINT16,\n ):\n if self._target_temperature_write_registers:\n result = await self._hub.async_pb_call(\n self._slave,\n self._target_temperature_register,\n [int(float(registers[0]))],\n CALL_TYPE_WRITE_REGISTERS,\n )\n else:\n result = await self._hub.async_pb_call(\n self._slave,\n self._target_temperature_register,\n int(float(registers[0])),\n CALL_TYPE_WRITE_REGISTER,\n )\n else:\n result = await self._hub.async_pb_call(\n self._slave,\n self._target_temperature_register,\n [int(float(i)) for i in registers],\n CALL_TYPE_WRITE_REGISTERS,\n )\n self._attr_available = result is not None\n await self.async_update()", "def set_temperature(self, **kwargs):\n if self._on:\n if kwargs.get(ATTR_TEMPERATURE) is not None:\n self._target_temperature = kwargs.get(ATTR_TEMPERATURE)\n hub.session.set_heat_pump_target_temperature(\n self.heatpump_id, self._target_temperature)\n self.schedule_update_ha_state()", "def set_temperature(self, **kwargs):\n set_temp = True\n operation_mode = kwargs.get(ATTR_HVAC_MODE)\n temp_low = kwargs.get(ATTR_TARGET_TEMP_LOW)\n temp_high = kwargs.get(ATTR_TARGET_TEMP_HIGH)\n temperature = kwargs.get(ATTR_TEMPERATURE)\n\n if operation_mode and self._mode_map.get(operation_mode) != self._client.mode:\n set_temp = self._set_operation_mode(operation_mode)\n\n if set_temp:\n if (\n self._mode_map.get(operation_mode, self._client.mode)\n == self._client.MODE_HEAT\n ):\n success = self._client.set_setpoints(temperature, self._client.cooltemp)\n elif (\n self._mode_map.get(operation_mode, self._client.mode)\n == self._client.MODE_COOL\n ):\n success = self._client.set_setpoints(self._client.heattemp, temperature)\n elif (\n self._mode_map.get(operation_mode, self._client.mode)\n == self._client.MODE_AUTO\n ):\n success = self._client.set_setpoints(temp_low, temp_high)\n else:\n success = False\n _LOGGER.error(\n (\n \"The thermostat is currently not in a mode \"\n \"that supports target temperature: %s\"\n ),\n operation_mode,\n )\n\n if not success:\n _LOGGER.error(\"Failed to change the temperature\")\n self.schedule_update_ha_state()", "def target_temperature(self) -> float:\n return self._thermostat.setpoint_temperature" ]
[ "0.7508329", "0.7287901", "0.7128554", "0.70946324", "0.69626224", "0.69421434", "0.6896598", "0.67694557", "0.6697478", "0.6474526", "0.64601743", "0.63387096", "0.63202316", "0.63062906", "0.624502", "0.6209591", "0.6144897", "0.61321884", "0.6119349", "0.61118054", "0.6096367", "0.6087682", "0.6087682", "0.6032859", "0.60198295", "0.6002337", "0.5987271", "0.5975694", "0.59043384", "0.5889169" ]
0.7497247
1
Test setting the target humidity without required attribute.
async def test_set_target_humidity_bad_attr(opp): state = opp.states.get(ENTITY_CLIMATE) assert 67 == state.attributes.get(ATTR_HUMIDITY) with pytest.raises(vol.Invalid): await common.async_set_humidity(opp, None, ENTITY_CLIMATE) await opp.async_block_till_done() state = opp.states.get(ENTITY_CLIMATE) assert 67 == state.attributes.get(ATTR_HUMIDITY)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def test_set_only_target_temp_bad_attr(opp):\n state = opp.states.get(ENTITY_WATER_HEATER)\n assert state.attributes.get(\"temperature\") == 119\n with pytest.raises(vol.Invalid):\n await common.async_set_temperature(opp, None, ENTITY_WATER_HEATER)\n assert state.attributes.get(\"temperature\") == 119", "async def test_set_only_target_temp_bad_attr(opp):\n state = opp.states.get(ENTITY_CLIMATE)\n assert 21 == state.attributes.get(ATTR_TEMPERATURE)\n\n with pytest.raises(vol.Invalid):\n await common.async_set_temperature(opp, None, ENTITY_CLIMATE)\n\n await opp.async_block_till_done()\n assert 21 == state.attributes.get(ATTR_TEMPERATURE)", "async def test_set_target_humidity(opp):\n state = opp.states.get(ENTITY_CLIMATE)\n assert 67 == state.attributes.get(ATTR_HUMIDITY)\n\n await common.async_set_humidity(opp, 64, ENTITY_CLIMATE)\n await opp.async_block_till_done()\n\n state = opp.states.get(ENTITY_CLIMATE)\n assert 64.0 == state.attributes.get(ATTR_HUMIDITY)", "async def test_set_only_target_temp(opp):\n state = opp.states.get(ENTITY_WATER_HEATER)\n assert state.attributes.get(\"temperature\") == 119\n await common.async_set_temperature(opp, 110, ENTITY_WATER_HEATER)\n state = opp.states.get(ENTITY_WATER_HEATER)\n assert state.attributes.get(\"temperature\") == 110", "async def test_set_only_target_temp(opp):\n state = opp.states.get(ENTITY_CLIMATE)\n assert 21 == state.attributes.get(ATTR_TEMPERATURE)\n\n await common.async_set_temperature(opp, 30, ENTITY_CLIMATE)\n await opp.async_block_till_done()\n\n state = opp.states.get(ENTITY_CLIMATE)\n assert 30.0 == state.attributes.get(ATTR_TEMPERATURE)", "async def test_set_target_temp_range_bad_attr(opp):\n state = opp.states.get(ENTITY_ECOBEE)\n assert state.attributes.get(ATTR_TEMPERATURE) is None\n assert 21.0 == state.attributes.get(ATTR_TARGET_TEMP_LOW)\n assert 24.0 == state.attributes.get(ATTR_TARGET_TEMP_HIGH)\n\n with pytest.raises(vol.Invalid):\n await common.async_set_temperature(\n opp,\n temperature=None,\n entity_id=ENTITY_ECOBEE,\n target_temp_low=None,\n target_temp_high=None,\n )\n await opp.async_block_till_done()\n\n state = opp.states.get(ENTITY_ECOBEE)\n assert state.attributes.get(ATTR_TEMPERATURE) is None\n assert 21.0 == state.attributes.get(ATTR_TARGET_TEMP_LOW)\n assert 24.0 == state.attributes.get(ATTR_TARGET_TEMP_HIGH)", "async def test_set_only_target_temp_with_convert(opp):\n state = opp.states.get(ENTITY_HEATPUMP)\n assert 20 == state.attributes.get(ATTR_TEMPERATURE)\n\n await common.async_set_temperature(opp, 21, ENTITY_HEATPUMP)\n await opp.async_block_till_done()\n\n state = opp.states.get(ENTITY_HEATPUMP)\n assert 21.0 == state.attributes.get(ATTR_TEMPERATURE)", "def set_mist(self, humidity):\n if humidity > 100:\n humidity = 100\n elif humidity < 0:\n humidity = 0\n # could set acknolage\n self.__hum = humidity", "async def set_target_humidity(self, humidity):\n\n range_info = self._get_humidity_range()\n if range_info and not (range_info[0] <= humidity <= range_info[1]):\n raise ValueError(f\"Target humidity out of range: {humidity}\")\n keys = self._get_cmd_keys(CMD_STATE_TARGET_HUM)\n await self.set(keys[0], keys[1], key=keys[2], value=humidity)", "async def test_set_target_temp_range(opp):\n state = opp.states.get(ENTITY_ECOBEE)\n assert state.attributes.get(ATTR_TEMPERATURE) is None\n assert 21.0 == state.attributes.get(ATTR_TARGET_TEMP_LOW)\n assert 24.0 == state.attributes.get(ATTR_TARGET_TEMP_HIGH)\n\n await common.async_set_temperature(\n opp, target_temp_high=25, target_temp_low=20, entity_id=ENTITY_ECOBEE\n )\n await opp.async_block_till_done()\n\n state = opp.states.get(ENTITY_ECOBEE)\n assert state.attributes.get(ATTR_TEMPERATURE) is None\n assert 20.0 == state.attributes.get(ATTR_TARGET_TEMP_LOW)\n assert 25.0 == state.attributes.get(ATTR_TARGET_TEMP_HIGH)", "async def test_set_only_target_temp_with_convert(opp):\n state = opp.states.get(ENTITY_WATER_HEATER_CELSIUS)\n assert state.attributes.get(\"temperature\") == 113\n await common.async_set_temperature(opp, 114, ENTITY_WATER_HEATER_CELSIUS)\n state = opp.states.get(ENTITY_WATER_HEATER_CELSIUS)\n assert state.attributes.get(\"temperature\") == 114", "async def test_thermostat_heatit_z_trm3_no_value(\n hass: HomeAssistant, client, climate_heatit_z_trm3_no_value, integration\n) -> None:\n # When the config parameter that specifies what sensor to use has no value, we fall\n # back to the first temperature sensor found on the device\n state = hass.states.get(CLIMATE_FLOOR_THERMOSTAT_ENTITY)\n assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 22.5", "def set_humidity(self, humidity):\n raise NotImplementedError()", "def test_rh_specific_humidity():\n p = 1013.25 * units.mbar\n temperature = 20. * units.degC\n q = 0.012 * units.dimensionless\n rh = relative_humidity_from_specific_humidity(p, temperature, q)\n assert_almost_equal(rh, 82.71759 * units.percent, 3)", "def target_humidity(self):\n return self._client.hum_setpoint", "async def test_invalid_state_characteristic(hass: HomeAssistant) -> None:\n assert await async_setup_component(\n hass,\n \"sensor\",\n {\n \"sensor\": [\n {\n \"platform\": \"statistics\",\n \"name\": \"test_numeric\",\n \"entity_id\": \"sensor.test_monitored\",\n \"state_characteristic\": \"invalid\",\n \"sampling_size\": 20,\n },\n {\n \"platform\": \"statistics\",\n \"name\": \"test_binary\",\n \"entity_id\": \"binary_sensor.test_monitored\",\n \"state_characteristic\": \"variance\",\n \"sampling_size\": 20,\n },\n ]\n },\n )\n await hass.async_block_till_done()\n\n hass.states.async_set(\n \"sensor.test_monitored\",\n str(VALUES_NUMERIC[0]),\n {ATTR_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS},\n )\n await hass.async_block_till_done()\n\n state = hass.states.get(\"sensor.test_numeric\")\n assert state is None\n state = hass.states.get(\"sensor.test_binary\")\n assert state is None", "def test_missing_attribute(self):\n assert setup.setup_component(\n self.opp,\n \"binary_sensor\",\n {\n \"binary_sensor\": {\n \"platform\": \"trend\",\n \"sensors\": {\n \"test_trend_sensor\": {\n \"entity_id\": \"sensor.test_state\",\n \"attribute\": \"missing\",\n }\n },\n }\n },\n )\n self.opp.block_till_done()\n\n self.opp.states.set(\"sensor.test_state\", \"State\", {\"attr\": \"2\"})\n self.opp.block_till_done()\n self.opp.states.set(\"sensor.test_state\", \"State\", {\"attr\": \"1\"})\n self.opp.block_till_done()\n state = self.opp.states.get(\"binary_sensor.test_trend_sensor\")\n assert state.state == \"off\"", "async def async_set_temperature(self, **kwargs: Any) -> None:\n if kwargs.get(ATTR_TEMPERATURE) is not None:\n self._target_temperature = kwargs.get(ATTR_TEMPERATURE)\n if (\n kwargs.get(ATTR_TARGET_TEMP_HIGH) is not None\n and kwargs.get(ATTR_TARGET_TEMP_LOW) is not None\n ):\n self._target_temperature_high = kwargs.get(ATTR_TARGET_TEMP_HIGH)\n self._target_temperature_low = kwargs.get(ATTR_TARGET_TEMP_LOW)\n self.async_write_ha_state()", "def test_status_target_met(self):\n measurement = self.measurement(\n self.metric(),\n sources=[\n {\"source_uuid\": SOURCE_ID, \"value\": \"0\", \"total\": \"100\", \"parse_error\": None, \"connection_error\": None},\n {\n \"source_uuid\": SOURCE_ID2,\n \"value\": \"0\",\n \"total\": \"100\",\n \"parse_error\": None,\n \"connection_error\": None,\n },\n ],\n )\n self.assertEqual(\"target_met\", measurement.status())", "def set_temperature(self, **kwargs):\n self._target_temperature_low = kwargs.get(ATTR_TARGET_TEMP_LOW)\n self._target_temperature_high = kwargs.get(ATTR_TARGET_TEMP_HIGH)\n temp = kwargs.get(ATTR_TEMPERATURE)\n if self.current_operation == 'Heat & Cool' and self._target_temperature_low is not None \\\n and self._target_temperature_high is not None:\n self._api._heatto = self._target_temperature_low\n self._api._coolto = self._target_temperature_high\n elif temp is not None:\n if self.current_operation == 'Heat only':\n self._api._heatto = temp\n self._api._coolto = temp + 10\n elif self.current_operation == 'Cool only':\n self._api._heatto = temp - 10\n self._api._coolto = temp \n self._api.set()\n self.schedule_update_ha_state()", "async def test_set_aux_heat_bad_attr(opp):\n state = opp.states.get(ENTITY_CLIMATE)\n assert state.attributes.get(ATTR_AUX_HEAT) == STATE_OFF\n\n with pytest.raises(vol.Invalid):\n await common.async_set_aux_heat(opp, None, ENTITY_CLIMATE)\n await opp.async_block_till_done()\n\n assert state.attributes.get(ATTR_AUX_HEAT) == STATE_OFF", "def test_change_brightness_of_the_device_false():", "async def async_set_temperature(self, **kwargs: Any) -> None:\n target_temp = kwargs.get(ATTR_TEMPERATURE)\n target_temp_low = kwargs.get(ATTR_TARGET_TEMP_LOW)\n target_temp_high = kwargs.get(ATTR_TARGET_TEMP_HIGH)\n if target_temp is not None:\n if self.hvac_mode == HVACMode.COOL:\n target_temp_high = target_temp\n if self.hvac_mode == HVACMode.HEAT:\n target_temp_low = target_temp\n if target_temp_low is not None:\n await self._node.set_climate_setpoint_heat(int(target_temp_low))\n # Presumptive setting--event stream will correct if cmd fails:\n self._target_temp_low = target_temp_low\n if target_temp_high is not None:\n await self._node.set_climate_setpoint_cool(int(target_temp_high))\n # Presumptive setting--event stream will correct if cmd fails:\n self._target_temp_high = target_temp_high\n self.async_write_ha_state()", "def set_humidity(self, humidity):\n self.humidity = humidity", "def test_specific_humidity_from_mixing_ratio_no_units():\n w = 0.01215\n q = specific_humidity_from_mixing_ratio(w)\n assert_almost_equal(q, 0.01200, 5)", "def test_sensor_humidity(self):\n with patch.dict(TYPES, {'HumiditySensor': self.mock_type}):\n state = State('sensor.humidity', '20',\n {ATTR_DEVICE_CLASS: 'humidity',\n ATTR_UNIT_OF_MEASUREMENT: '%'})\n get_accessory(None, state, 2, {})", "def test_humidity(self, mock_co2, mock_humidity):\n hw = HardwareEmulator(\n co2=False,\n hardware_id=123456789,\n location='Room 567')\n hw.emulate_data()\n mock_humidity.assert_called_once_with()\n mock_co2.assert_not_called()", "async def async_set_humidity(self, humidity: int) -> None:\n self._target_humidity = humidity\n self.async_write_ha_state()", "async def async_set_temperature(self, **kwargs: Any) -> None:\n changed = await self._set_climate_attribute(\n kwargs.get(ATTR_TEMPERATURE),\n CONF_TEMP_COMMAND_TOPIC,\n CONF_TEMP_COMMAND_TEMPLATE,\n CONF_TEMP_STATE_TOPIC,\n \"_attr_target_temperature\",\n )\n\n changed |= await self._set_climate_attribute(\n kwargs.get(ATTR_TARGET_TEMP_LOW),\n CONF_TEMP_LOW_COMMAND_TOPIC,\n CONF_TEMP_LOW_COMMAND_TEMPLATE,\n CONF_TEMP_LOW_STATE_TOPIC,\n \"_attr_target_temperature_low\",\n )\n\n changed |= await self._set_climate_attribute(\n kwargs.get(ATTR_TARGET_TEMP_HIGH),\n CONF_TEMP_HIGH_COMMAND_TOPIC,\n CONF_TEMP_HIGH_COMMAND_TEMPLATE,\n CONF_TEMP_HIGH_STATE_TOPIC,\n \"_attr_target_temperature_high\",\n )\n\n if not changed:\n return\n self.async_write_ha_state()", "def test_dewpoint_specific_humidity_old_signature():\n p = 1013.25 * units.mbar\n temperature = 20. * units.degC\n q = 0.012 * units.dimensionless\n with pytest.raises(ValueError, match='changed in 1.0'):\n dewpoint_from_specific_humidity(q, temperature, p)" ]
[ "0.74001616", "0.73816055", "0.7277736", "0.72660315", "0.717504", "0.70658773", "0.67734814", "0.6699962", "0.6597737", "0.6581366", "0.6550373", "0.63467187", "0.6337172", "0.6316925", "0.6301849", "0.6280884", "0.6259297", "0.6242652", "0.62400675", "0.6235142", "0.6220746", "0.61639535", "0.6154962", "0.6150845", "0.6149331", "0.61432445", "0.61418426", "0.6119271", "0.6094439", "0.60935587" ]
0.7392733
1
Test the setting of the target humidity.
async def test_set_target_humidity(opp): state = opp.states.get(ENTITY_CLIMATE) assert 67 == state.attributes.get(ATTR_HUMIDITY) await common.async_set_humidity(opp, 64, ENTITY_CLIMATE) await opp.async_block_till_done() state = opp.states.get(ENTITY_CLIMATE) assert 64.0 == state.attributes.get(ATTR_HUMIDITY)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def target_humidity(self):\n return self._client.hum_setpoint", "def test_rh_specific_humidity():\n p = 1013.25 * units.mbar\n temperature = 20. * units.degC\n q = 0.012 * units.dimensionless\n rh = relative_humidity_from_specific_humidity(p, temperature, q)\n assert_almost_equal(rh, 82.71759 * units.percent, 3)", "async def test_set_only_target_temp(opp):\n state = opp.states.get(ENTITY_WATER_HEATER)\n assert state.attributes.get(\"temperature\") == 119\n await common.async_set_temperature(opp, 110, ENTITY_WATER_HEATER)\n state = opp.states.get(ENTITY_WATER_HEATER)\n assert state.attributes.get(\"temperature\") == 110", "async def test_set_only_target_temp(opp):\n state = opp.states.get(ENTITY_CLIMATE)\n assert 21 == state.attributes.get(ATTR_TEMPERATURE)\n\n await common.async_set_temperature(opp, 30, ENTITY_CLIMATE)\n await opp.async_block_till_done()\n\n state = opp.states.get(ENTITY_CLIMATE)\n assert 30.0 == state.attributes.get(ATTR_TEMPERATURE)", "async def test_set_only_target_temp_with_convert(opp):\n state = opp.states.get(ENTITY_HEATPUMP)\n assert 20 == state.attributes.get(ATTR_TEMPERATURE)\n\n await common.async_set_temperature(opp, 21, ENTITY_HEATPUMP)\n await opp.async_block_till_done()\n\n state = opp.states.get(ENTITY_HEATPUMP)\n assert 21.0 == state.attributes.get(ATTR_TEMPERATURE)", "async def set_target_humidity(self, humidity):\n\n range_info = self._get_humidity_range()\n if range_info and not (range_info[0] <= humidity <= range_info[1]):\n raise ValueError(f\"Target humidity out of range: {humidity}\")\n keys = self._get_cmd_keys(CMD_STATE_TARGET_HUM)\n await self.set(keys[0], keys[1], key=keys[2], value=humidity)", "def test_sensor_humidity(self):\n with patch.dict(TYPES, {'HumiditySensor': self.mock_type}):\n state = State('sensor.humidity', '20',\n {ATTR_DEVICE_CLASS: 'humidity',\n ATTR_UNIT_OF_MEASUREMENT: '%'})\n get_accessory(None, state, 2, {})", "def test_get_fan_speed_setting(loaded_fridge):\n assert loaded_fridge.get_fan_speed_setting() == 750", "async def test_thermostat_heatit_z_trm3_no_value(\n hass: HomeAssistant, client, climate_heatit_z_trm3_no_value, integration\n) -> None:\n # When the config parameter that specifies what sensor to use has no value, we fall\n # back to the first temperature sensor found on the device\n state = hass.states.get(CLIMATE_FLOOR_THERMOSTAT_ENTITY)\n assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 22.5", "def test_humidity(self, mock_co2, mock_humidity):\n hw = HardwareEmulator(\n co2=False,\n hardware_id=123456789,\n location='Room 567')\n hw.emulate_data()\n mock_humidity.assert_called_once_with()\n mock_co2.assert_not_called()", "def test_specific_humidity_from_dewpoint():\n p = 1013.25 * units.mbar\n q = specific_humidity_from_dewpoint(p, 16.973 * units.degC)\n assert_almost_equal(q, 0.012 * units.dimensionless, 3)", "def target_humidity(self) -> int | None:\n return self._target_humidity", "async def test_set_only_target_temp_with_convert(opp):\n state = opp.states.get(ENTITY_WATER_HEATER_CELSIUS)\n assert state.attributes.get(\"temperature\") == 113\n await common.async_set_temperature(opp, 114, ENTITY_WATER_HEATER_CELSIUS)\n state = opp.states.get(ENTITY_WATER_HEATER_CELSIUS)\n assert state.attributes.get(\"temperature\") == 114", "def get_humidity(self):\n return randint(25, 50)", "async def test_set_target_temp_range(opp):\n state = opp.states.get(ENTITY_ECOBEE)\n assert state.attributes.get(ATTR_TEMPERATURE) is None\n assert 21.0 == state.attributes.get(ATTR_TARGET_TEMP_LOW)\n assert 24.0 == state.attributes.get(ATTR_TARGET_TEMP_HIGH)\n\n await common.async_set_temperature(\n opp, target_temp_high=25, target_temp_low=20, entity_id=ENTITY_ECOBEE\n )\n await opp.async_block_till_done()\n\n state = opp.states.get(ENTITY_ECOBEE)\n assert state.attributes.get(ATTR_TEMPERATURE) is None\n assert 20.0 == state.attributes.get(ATTR_TARGET_TEMP_LOW)\n assert 25.0 == state.attributes.get(ATTR_TARGET_TEMP_HIGH)", "def set_humidity(self, humidity):\n self.humidity = humidity", "def set_mist(self, humidity):\n if humidity > 100:\n humidity = 100\n elif humidity < 0:\n humidity = 0\n # could set acknolage\n self.__hum = humidity", "def set_humidity(self, humidity):\n raise NotImplementedError()", "def target_temperature_high(self):\n return self._device.setpoint_cool", "def humidity(self):\r\n self._read_temperature()\r\n hum = self._read_register(_BME280_REGISTER_HUMIDDATA, 2)\r\n #print(\"Humidity data: \", hum)\r\n adc = float(hum[0] << 8 | hum[1])\r\n #print(\"adc:\", adc)\r\n\r\n # Algorithm from the BME280 driver\r\n # https://github.com/BoschSensortec/BME280_driver/blob/master/bme280.c\r\n var1 = float(self._t_fine) - 76800.0\r\n #print(\"var1 \", var1)\r\n var2 = (self._humidity_calib[3] * 64.0 + (self._humidity_calib[4] / 16384.0) * var1)\r\n #print(\"var2 \",var2)\r\n var3 = adc - var2\r\n #print(\"var3 \",var3)\r\n var4 = self._humidity_calib[1] / 65536.0\r\n #print(\"var4 \",var4)\r\n var5 = (1.0 + (self._humidity_calib[2] / 67108864.0) * var1)\r\n #print(\"var5 \",var5)\r\n var6 = 1.0 + (self._humidity_calib[5] / 67108864.0) * var1 * var5\r\n #print(\"var6 \",var6)\r\n var6 = var3 * var4 * (var5 * var6)\r\n humidity = var6 * (1.0 - self._humidity_calib[0] * var6 / 524288.0)\r\n\r\n if humidity > _BME280_HUMIDITY_MAX:\r\n return _BME280_HUMIDITY_MAX\r\n if humidity < _BME280_HUMIDITY_MIN:\r\n return _BME280_HUMIDITY_MIN\r\n # else...\r\n return humidity", "async def test_set_target_humidity_bad_attr(opp):\n state = opp.states.get(ENTITY_CLIMATE)\n assert 67 == state.attributes.get(ATTR_HUMIDITY)\n\n with pytest.raises(vol.Invalid):\n await common.async_set_humidity(opp, None, ENTITY_CLIMATE)\n await opp.async_block_till_done()\n\n state = opp.states.get(ENTITY_CLIMATE)\n assert 67 == state.attributes.get(ATTR_HUMIDITY)", "def test_dewpoint_specific_humidity():\n p = 1013.25 * units.mbar\n temperature = 20. * units.degC\n q = 0.012 * units.dimensionless\n td = dewpoint_from_specific_humidity(p, temperature, q)\n assert_almost_equal(td, 16.973 * units.degC, 3)", "def set_temperature(self, **kwargs):\n self._target_temperature_low = kwargs.get(ATTR_TARGET_TEMP_LOW)\n self._target_temperature_high = kwargs.get(ATTR_TARGET_TEMP_HIGH)\n temp = kwargs.get(ATTR_TEMPERATURE)\n if self.current_operation == 'Heat & Cool' and self._target_temperature_low is not None \\\n and self._target_temperature_high is not None:\n self._api._heatto = self._target_temperature_low\n self._api._coolto = self._target_temperature_high\n elif temp is not None:\n if self.current_operation == 'Heat only':\n self._api._heatto = temp\n self._api._coolto = temp + 10\n elif self.current_operation == 'Cool only':\n self._api._heatto = temp - 10\n self._api._coolto = temp \n self._api.set()\n self.schedule_update_ha_state()", "def test_ambient_tmeperature_sensor(self):\n self.assertEqual(self.tag_outdoor.outdoor_probe_has_ambient_temperature, True)\n\n sensor = self.tag_outdoor.sensor[CONST.SENSOR_AMBIENT_TEMPERATURE]\n self.assertIsNotNone(sensor)\n self.assertEqual(sensor.value, 23.43)\n\n self.assertIn('ambient temp:', str(self.tag_outdoor))", "def test_temperatures_value(self):\n self.assertEqual(self.TminValue, 450.0)", "def test_values_single_level(self):\n result = WetBulbTemperature().process(\n CubeList([self.temperature, self.relative_humidity, self.pressure])\n )\n self.assertArrayAlmostEqual(result.data, self.expected_wbt_data, decimal=3)\n self.assertEqual(result.units, Unit(\"K\"))", "async def async_set_humidity(self, humidity: int) -> None:\n self._target_humidity = humidity\n self.async_write_ha_state()", "def test_change_brightness_of_the_device_false():", "def test_status_target_met(self):\n measurement = self.measurement(\n self.metric(),\n sources=[\n {\"source_uuid\": SOURCE_ID, \"value\": \"0\", \"total\": \"100\", \"parse_error\": None, \"connection_error\": None},\n {\n \"source_uuid\": SOURCE_ID2,\n \"value\": \"0\",\n \"total\": \"100\",\n \"parse_error\": None,\n \"connection_error\": None,\n },\n ],\n )\n self.assertEqual(\"target_met\", measurement.status())", "async def test_set_only_target_temp_bad_attr(opp):\n state = opp.states.get(ENTITY_WATER_HEATER)\n assert state.attributes.get(\"temperature\") == 119\n with pytest.raises(vol.Invalid):\n await common.async_set_temperature(opp, None, ENTITY_WATER_HEATER)\n assert state.attributes.get(\"temperature\") == 119" ]
[ "0.70812947", "0.688115", "0.6815197", "0.67476773", "0.67109245", "0.6650672", "0.66435766", "0.657569", "0.65534496", "0.6538978", "0.6525243", "0.64943826", "0.6469849", "0.64688325", "0.64436466", "0.64250076", "0.6409314", "0.6397432", "0.63508534", "0.63238615", "0.63232464", "0.63078976", "0.6299449", "0.629681", "0.62958926", "0.62769115", "0.6231675", "0.6224461", "0.6210553", "0.6206113" ]
0.74006915
0
Test setting hvac mode without required attribute. Also check the state.
async def test_set_hvac_bad_attr_and_state(opp): state = opp.states.get(ENTITY_CLIMATE) assert state.attributes.get(ATTR_HVAC_ACTION) == CURRENT_HVAC_COOL assert state.state == HVAC_MODE_COOL with pytest.raises(vol.Invalid): await common.async_set_hvac_mode(opp, None, ENTITY_CLIMATE) await opp.async_block_till_done() state = opp.states.get(ENTITY_CLIMATE) assert state.attributes.get(ATTR_HVAC_ACTION) == CURRENT_HVAC_COOL assert state.state == HVAC_MODE_COOL
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_hvac_settings_mode() -> None:\n response: models.KamereonVehicleDataResponse = fixtures.get_file_content_as_schema(\n f\"{fixtures.KAMEREON_FIXTURE_PATH}/vehicle_data/hvac-settings.json\",\n schemas.KamereonVehicleDataResponseSchema,\n )\n response.raise_for_error_code()\n\n vehicle_data = cast(\n models.KamereonVehicleHvacSettingsData,\n response.get_attributes(schemas.KamereonVehicleHvacSettingsDataSchema),\n )\n\n assert vehicle_data.mode == \"scheduled\"", "def hvac_mode(self):\n dps_mode = self._device.get_property(PROPERTY_TO_DPS_ID[ATTR_HVAC_MODE])\n\n if dps_mode is not None:\n return GoldairTuyaDevice.get_key_for_value(HVAC_MODE_TO_DPS_MODE, dps_mode)\n else:\n return STATE_UNAVAILABLE", "def hvac_mode(self) -> str | None:\n\n if self._device.tcs.system_mode is None:\n return # unable to determine\n if self._device.tcs.system_mode[CONF_SYSTEM_MODE] == SystemMode.AWAY:\n return HVACMode.AUTO\n if self._device.tcs.system_mode[CONF_SYSTEM_MODE] == SystemMode.HEAT_OFF:\n return HVACMode.OFF\n\n if self._device.mode is None or self._device.mode[ATTR_SETPOINT] is None:\n return # unable to determine\n if (\n self._device.config\n and self._device.mode[ATTR_SETPOINT] <= self._device.config[\"min_temp\"]\n ):\n return HVACMode.OFF\n return HVACMode.HEAT", "def hvac_mode(self):\n return self._hvac_mode", "def set_hvac_mode(self, hvac_mode: str) -> None:\n if hvac_mode == HVAC_MODE_OFF:\n self.turn_off()\n elif hvac_mode == HVAC_MODE_AUTO:\n if self.hvac_mode == HVAC_MODE_OFF:\n self.turn_on()\n self.set_preset_mode(PRESET_SCHEDULE)\n elif hvac_mode == HVAC_MODE_HEAT:\n self.set_preset_mode(PRESET_BOOST)", "def hvac_mode(self):\n if self.ac.status is None:\n _LOGGER.debug(f\"hvac_mode: status is None, returning None\")\n return None\n if self.ac.status.is_on:\n ac_mode = self.ac.status.ac_mode\n value = self.HVAC_MODE_MAPPING[ac_mode]\n _LOGGER.debug(f\"hvac_mode: returning {value} (derived from {ac_mode})\")\n return value\n else:\n _LOGGER.debug(f\"hvac_mode: returning HVAC_MODE_OFF - device is off\")\n return HVAC_MODE_OFF", "async def test_set_hvac(opp):\n state = opp.states.get(ENTITY_CLIMATE)\n assert state.state == HVAC_MODE_COOL\n\n await common.async_set_hvac_mode(opp, HVAC_MODE_HEAT, ENTITY_CLIMATE)\n await opp.async_block_till_done()\n\n state = opp.states.get(ENTITY_CLIMATE)\n assert state.state == HVAC_MODE_HEAT", "def hvac_mode(self) -> HVACMode:\n if self._client.mode == self._client.MODE_HEAT:\n return HVACMode.HEAT\n if self._client.mode == self._client.MODE_COOL:\n return HVACMode.COOL\n if self._client.mode == self._client.MODE_AUTO:\n return HVACMode.AUTO\n return HVACMode.OFF", "def hvac_mode(self) -> HVACMode:\n if self._thermostat.mode == MODE_AUTO:\n return HVACMode.AUTO\n return HVACMode.HEAT", "def hvac_mode(self) -> HVACMode:\n return self._hvac_mode", "async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:\n _LOGGER.debug(\"Requested operation mode %s\", hvac_mode)\n await self._node.set_climate_mode(HA_HVAC_TO_ISY.get(hvac_mode))\n # Presumptive setting--event stream will correct if cmd fails:\n self._hvac_mode = hvac_mode\n self.async_write_ha_state()", "def hvac_mode(self) -> HVACMode:\n if not (hvac_mode := self._node.aux_properties.get(CMD_CLIMATE_MODE)):\n return HVACMode.OFF\n\n # Which state values used depends on the mode property's UOM:\n uom = hvac_mode.uom\n # Handle special case for ISYv4 Firmware:\n if uom in (UOM_ISYV4_NONE, \"\"):\n uom = (\n UOM_HVAC_MODE_INSTEON\n if self._node.protocol == PROTO_INSTEON\n else UOM_HVAC_MODE_GENERIC\n )\n return (\n try_parse_enum(HVACMode, UOM_TO_STATES[uom].get(hvac_mode.value))\n or HVACMode.OFF\n )", "def set_hvac_mode(self, hvac_mode):\n\n if hvac_mode == HVAC_MODE_OFF:\n self._on = False\n self._device.set_location_to_off()\n self._current_operation_mode = CONST_MODE_OFF\n\n elif hvac_mode == HVAC_MODE_AUTO:\n self._on = True\n self._device.set_temperature_to_auto()\n self._current_operation_mode = CONST_MODE_PROGRAM\n\n elif hvac_mode == HVAC_MODE_HEAT:\n self._on = True\n self._device.set_temperature_to_manual()\n self._current_operation_mode = CONST_MODE_FIXED\n\n else:\n raise InvalidStateError", "def hvac_mode(self) -> str | None:\n\n if self._device.system_mode is None:\n return # unable to determine\n if self._device.system_mode[CONF_SYSTEM_MODE] == SystemMode.HEAT_OFF:\n return HVACMode.OFF\n if self._device.system_mode[CONF_SYSTEM_MODE] == SystemMode.AWAY:\n return HVACMode.AUTO # users can't adjust setpoints in away mode\n return HVACMode.HEAT", "def set_hvac_mode(self, hvac_mode: str) -> None:\n self.svc_set_system_mode(MODE_TO_TCS.get(hvac_mode))", "def hvac_mode(self) -> HVACMode:\n if self._device.mode in self._deconz_to_hvac_mode:\n return self._deconz_to_hvac_mode[self._device.mode]\n return HVACMode.HEAT if self._device.state_on else HVACMode.OFF", "def hvac_modes(self):\n return SUPPORT_HVAC_HEAT", "async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:\n if hvac_mode == HVACMode.AUTO:\n await self._thermostat.auto()\n else:\n await self._thermostat.manual()\n await self.coordinator.async_request_refresh()", "def hvac_mode(self) -> str:\r\n self._logger.debug(log_message_formatter(\r\n \"get\", f\"{self}\", \"hvac_mode\"))\r\n return self._hvac_mode", "async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:\n _LOGGER.debug(\"Setting operation mode of %s to %s\", self._unique_id, hvac_mode)\n if hvac_mode == HVACMode.OFF:\n await self.async_turn_off()\n else:\n await self._device.command(HA_STATE_TO_AC[hvac_mode])\n if self._on != \"1\":\n await self.async_turn_on()", "def set_hvac_mode(self, hvac_mode: str) -> None:\n if hvac_mode == HVACMode.AUTO: # FollowSchedule\n self.svc_reset_zone_mode()\n elif hvac_mode == HVACMode.HEAT: # TemporaryOverride\n self.svc_set_zone_mode(mode=ZoneMode.PERMANENT, setpoint=25) # TODO:\n else: # HVACMode.OFF, PermentOverride, temp = min\n self.svc_set_zone_mode(self._device.set_frost_mode) # TODO:", "def hvac_mode(self) -> HVACMode | None:\n return _CLIMATE_MODES.from_esphome(self._state.mode)", "def set_hvac_mode(self, hvac_mode: HVACMode) -> None:\n if hvac_mode == HVACMode.OFF:\n self.vera_device.turn_off()\n elif hvac_mode == HVACMode.HEAT_COOL:\n self.vera_device.turn_auto_on()\n elif hvac_mode == HVACMode.COOL:\n self.vera_device.turn_cool_on()\n elif hvac_mode == HVACMode.HEAT:\n self.vera_device.turn_heat_on()\n\n self.schedule_update_ha_state()", "def hvac_mode(self) -> HVACMode:\n mode = self.vera_device.get_hvac_mode()\n if mode == \"HeatOn\":\n return HVACMode.HEAT\n if mode == \"CoolOn\":\n return HVACMode.COOL\n if mode == \"AutoChangeOver\":\n return HVACMode.HEAT_COOL\n return HVACMode.OFF", "async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:\n if hvac_mode not in self._attr_hvac_modes:\n raise ValueError(f\"Unsupported HVAC mode {hvac_mode}\")\n\n if len(self._attr_hvac_modes) == 2: # Only allow turn on and off thermostat\n await self.gateway.api.sensors.thermostat.set_config(\n id=self._device.resource_id,\n on=hvac_mode != HVACMode.OFF,\n )\n else:\n await self.gateway.api.sensors.thermostat.set_config(\n id=self._device.resource_id,\n mode=HVAC_MODE_TO_DECONZ[hvac_mode],\n )", "async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:\n payload = self._command_templates[CONF_MODE_COMMAND_TEMPLATE](hvac_mode)\n await self._publish(CONF_MODE_COMMAND_TOPIC, payload)\n\n if self._optimistic or self._topic[CONF_MODE_STATE_TOPIC] is None:\n self._attr_hvac_mode = hvac_mode\n self.async_write_ha_state()", "def set_hvac_mode(self, hvac_mode: HVACMode) -> None:\n self._set_operation_mode(hvac_mode)\n self.schedule_update_ha_state()", "async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:\n self._hvac_mode = hvac_mode\n self.async_write_ha_state()", "def hvac_mode(self):\n return HVAC_MAP_WARMUP_HEAT.get(self._current_operation_mode, HVAC_MODE_AUTO)", "async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:\n await self._client.climate_command(\n key=self._key, mode=_CLIMATE_MODES.from_hass(hvac_mode)\n )" ]
[ "0.7346793", "0.659513", "0.6553336", "0.6547794", "0.64788115", "0.6474456", "0.6468106", "0.6432124", "0.6405655", "0.6400266", "0.6380518", "0.6365761", "0.6360496", "0.6344345", "0.6340621", "0.6338978", "0.63107705", "0.63105696", "0.6308598", "0.6301044", "0.62510014", "0.6243038", "0.6230263", "0.6223767", "0.62143886", "0.610287", "0.6060785", "0.60366", "0.6024978", "0.59966063" ]
0.6619193
1
Test setting the hold mode eco.
async def test_set_hold_mode_eco(opp): await common.async_set_preset_mode(opp, PRESET_ECO, ENTITY_ECOBEE) await opp.async_block_till_done() state = opp.states.get(ENTITY_ECOBEE) assert state.attributes.get(ATTR_PRESET_MODE) == PRESET_ECO
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def test_set_hold_mode_away(opp):\n await common.async_set_preset_mode(opp, PRESET_AWAY, ENTITY_ECOBEE)\n await opp.async_block_till_done()\n\n state = opp.states.get(ENTITY_ECOBEE)\n assert state.attributes.get(ATTR_PRESET_MODE) == PRESET_AWAY", "async def test_set_away_mode_on(opp):\n await common.async_set_away_mode(opp, True, ENTITY_WATER_HEATER)\n state = opp.states.get(ENTITY_WATER_HEATER)\n assert state.attributes.get(\"away_mode\") == \"on\"", "async def test_set_away_mode_off(opp):\n await common.async_set_away_mode(opp, False, ENTITY_WATER_HEATER_CELSIUS)\n state = opp.states.get(ENTITY_WATER_HEATER_CELSIUS)\n assert state.attributes.get(\"away_mode\") == \"off\"", "def test_turn_off(power_supply):\n power_supply.Init()\n assert power_supply.state() != tango.DevState.OFF\n power_supply.turn_off()\n assert power_supply.state() == tango.DevState.OFF", "def test_door_pause_protocol(enable_door_safety_switch):\n pause_mgr = PauseManager(door_state=DoorState.CLOSED)\n assert pause_mgr.queue == []\n\n pause_mgr.set_door(door_state=DoorState.OPEN)\n pause_mgr.pause(PauseType.PAUSE)\n assert pause_mgr.queue == [PauseType.PAUSE]\n\n with pytest.raises(PauseResumeError):\n pause_mgr.resume(PauseType.PAUSE)\n assert pause_mgr.queue == [PauseType.PAUSE]\n\n pause_mgr.set_door(door_state=DoorState.CLOSED)\n assert pause_mgr.queue == [PauseType.PAUSE]\n\n pause_mgr.resume(PauseType.PAUSE)\n assert pause_mgr.queue == []", "def testGetConfigPowerAlarmClearHoldOff(self):\n self.ports.getconfig_power_alarm_clear_holdoff(file_name = 'get_power_alarm_clear_holdoff.xml', port_ids = portsDict['port_ids'], power_alarm_clear_holdoff = portsDict['power_alarm_clear_holdoff'])", "def _isstandby(self):\n return self.dp.state()==PyTango.DevState.STANDBY", "def test_turn_on(power_supply):\n power_supply.Init()\n assert power_supply.state() != tango.DevState.ON\n power_supply.current = 5.0\n power_supply.turn_on()\n assert power_supply.state() == tango.DevState.ON", "async def test_set_away_mode_bad_attr(opp):\n state = opp.states.get(ENTITY_WATER_HEATER)\n assert state.attributes.get(\"away_mode\") == \"off\"\n with pytest.raises(vol.Invalid):\n await common.async_set_away_mode(opp, None, ENTITY_WATER_HEATER)\n assert state.attributes.get(\"away_mode\") == \"off\"", "def is_onhold(self) -> bool:", "def testWrongMode(self):\n self.mgr.status = mavutil.mavlink.GOPRO_HEARTBEAT_STATUS_DISCONNECTED\n self.mgr.handleRecordCommand( CAPTURE_MODE_VIDEO, RECORD_COMMAND_TOGGLE )\n self.assertFalse(self.mgr.sendGoProCommand.called)", "def test_gain(self):\n self.plr.piles[Piles.DECK].set(\"Duchy\")\n self.plr.test_input = [\"Get Estate\"]\n self.plr.gain_card(\"Cursed Village\")\n self.assertNotIn(\"Curse\", self.plr.piles[Piles.DISCARD])\n self.assertIsNotNone(self.plr.piles[Piles.DISCARD][\"Estate\"])\n self.assertIn(\"Duchy\", self.g.trashpile)", "def setWindowHold(mode='hold'):\n sdict = {'hold':'FULL','nohold':'NOHOLD','noerase':'NOERASE','none':'NONE'}\n dislin.winmod(sdict[mode])", "async def test_set_operation(opp):\n state = opp.states.get(ENTITY_WATER_HEATER)\n assert state.attributes.get(\"operation_mode\") == \"eco\"\n assert state.state == \"eco\"\n await common.async_set_operation_mode(opp, \"electric\", ENTITY_WATER_HEATER)\n state = opp.states.get(ENTITY_WATER_HEATER)\n assert state.attributes.get(\"operation_mode\") == \"electric\"\n assert state.state == \"electric\"", "def test_right_mode(self):\n self.dp.setRewindingMode('AUTO')\n self.assertEqual(self.dp.getRewindingMode(), 'AUTO')\n self.dp.setRewindingMode('MANUAL')", "def test_conditions(self):\n if not CalculatorUtils.clear_calc(self.device):\n Utils.start_home(self.serial)\n AppUtils.kill_app(self.serial, self.package)\n AppUtils.open_app(self.device, self.serial, self.app)\n Utils.wait_short()", "def testGetPowerAlarmClearHoldOff(self):\n self.ports.get_power_alarm_clear_holdoff(file_name = 'get_power_alarm_clear_holdoff.xml', port_ids = portsDict['port_ids'], power_alarm_clear_holdoff = portsDict['power_alarm_clear_holdoff'])", "def on_hold(self):\n return self.state == self.STATES.on_hold", "def check_manual_mode_change(self, event):\n if self.vehicle.get_manual_mode_change(reset=True):\n data = lambda: None\n data.mode_to_set = \"Inactive\"\n self.set_companion_mode(data)", "def set_mode(self,mode,state=True):\n\t\tprint \"SET_MODE START\"\n\t\tfor key,val in self.ms_all.iteritems():\n\t\t\tif val.index(mode) is not None:\n\t\t\t\tif state:\n\t\t\t\t\tval.activate( val.index(mode) )\n\t\t\t\telse:\n\t\t\t\t\tval.deactivate( val.index(mode) )\n\t\t\"\"\"\n\t\tprint \"SET_MODE DONE -- ALSO DOING EXPERIMENTAL -- \"\n\t\t# DEBUG / EXPERIMENTAL\n\t\tif self.int_encoder is not None:\n\t\t\tif mode == 'volume' and state == True and 'mode_timeout' in self.cfg_gpio and self.int_enabled:\n\t\t\t\tprint \"DEBUG2.. GPIO/VOLUME ({0}:{1}).. disabling our interrupts..\".format(mode,state)\n\t\t\t\tself.gpio.remove_event_detect(13)\n\t\t\t\tself.gpio.remove_event_detect(6)\n\t\t\t\tself.int_enabled = False\n\t\t\telif mode != 'volume' and state == True and 'mode_timeout' in self.cfg_gpio and not self.int_enabled:\n\t\t\t\tprint \"DEBUG2.. GPIO/NOT VOLUME ({0}:{1}).. enabling our interrupts..\".format(mode,state)\n\t\t\t\tself.gpio.setup((13,6), self.gpio.IN, pull_up_down=self.gpio.PUD_DOWN)\n\t\t\t\tself.gpio.add_event_detect(13, self.gpio.RISING, callback=self.int_encoder) # NO bouncetime \n\t\t\t\tself.gpio.add_event_detect(6, self.gpio.RISING, callback=self.int_encoder) # NO bouncetime\n\t\t\t\tself.int_enabled = True\n\t\t\telif mode == 'volume' and state == True and 'mode_timeout' not in self.cfg_gpio and not self.int_enabled:\n\t\t\t\tprint \"DEBUG2.. ECA/VOLUME ({0}:{1}).. enabling our interrupts..\".format(mode,state)\n\t\t\t\tself.gpio.setup((13,6), self.gpio.IN, pull_up_down=self.gpio.PUD_DOWN)\n\t\t\t\tself.gpio.add_event_detect(13, self.gpio.RISING, callback=self.int_encoder) # NO bouncetime \n\t\t\t\tself.gpio.add_event_detect(6, self.gpio.RISING, callback=self.int_encoder) # NO bouncetime\n\t\t\t\tself.int_enabled = True\n\t\t\telif mode != 'volume' and state == True and 'mode_timeout' not in self.cfg_gpio and self.int_enabled:\n\t\t\t\tprint \"DEBUG2.. ECA/NOT VOLUME ({0}:{1}).. disabling our interrupts..\".format(mode,state)\n\t\t\t\tself.gpio.remove_event_detect(13)\n\t\t\t\tself.gpio.remove_event_detect(6)\n\t\t\t\tself.int_enabled = False\n\t\t\tprint \"DEBUG2.. done\"\n\t\t\"\"\"", "def test_init(power_supply):\n power_supply.Init()\n assert power_supply.state() == tango.DevState.STANDBY", "def test_keyboard(self):\n ## Note that the enclosed double-quotes are important. They are part of the config syntax\n assert self.rc_conf.has_key('keymap')\n assert self.rc_conf['keymap'] == '\"uk.cp850\"'\n assert self.rc_conf['keyrate'] == '\"fast\"'", "def eco_mode_enabled(self) -> bool:\n return self._device_info[\"EcoMode\"] == \"on\"", "def is_hold(self):\n status = self.gpio.input(self.pin)\n\n if status == 1:\n if not self.time_set_status:\n self.time_set_status = time.time()\n if time.time() - self.time_set_status > self.hold_time:\n self.time_set_status = time.time()\n return 1\n else:\n self.time_set_status = None\n return 0", "async def test_set_swing(opp):\n state = opp.states.get(ENTITY_CLIMATE)\n assert \"Off\" == state.attributes.get(ATTR_SWING_MODE)\n\n await common.async_set_swing_mode(opp, \"Auto\", ENTITY_CLIMATE)\n await opp.async_block_till_done()\n\n state = opp.states.get(ENTITY_CLIMATE)\n assert \"Auto\" == state.attributes.get(ATTR_SWING_MODE)", "def sleepCheck(self):\n if self.sleep_active:\n if self.system_awake:\n if self.sleepPeriodValidate():\n self.logQ.put('Sleep period: Start = {0} -- Stop = {1}'.format(self.sleep_start, self.sleep_stop))\n self.notifyPut('Current time is {0} -- Now entering Sleep period of the GSDM'.format(self.current_time))\n self.logQ.put('Current time is {0} -- Now entering Sleep period of the GSDM'.format(self.current_time))\n time.sleep(3)\n self.stopGsdm()\n time.sleep(8)\n \n self.system_awake = False\n self.db = shelve.open(os.path.join(self.xlocal, 'Launch Manager Utils\\\\launch.data'))\n self.db['system_awake'] = self.system_awake\n print 'After setting to false system_awake = {0}'.format(self.system_awake) # TESTING ++++++++++++++++\n print 'After setting to false db[system_awake] = {0}'.format(self.db['system_awake']) # TESTING ++++++++++++++++\n self.db.close()\n \n self.notifyPut('Sleep period will Stop at {1}'.format(self.sleep_start, self.sleep_stop))\n else:\n pass\n elif not self.system_awake:\n if not self.sleepPeriodValidate():\n \n self.system_awake = True\n self.db = shelve.open(os.path.join(self.xlocal, 'Launch Manager Utils\\\\launch.data'))\n self.db['system_awake'] = self.system_awake\n print 'After setting to true system_awake = {0}'.format(self.system_awake) # TESTING ++++++++++++++++\n print 'After setting to true db[system_awake] = {0}'.format(self.db['system_awake']) # TESTING ++++++++++++++++\n self.db.close()\n \n self.notifyPut('Sleep period: Start = {0} -- Stop = {1}'.format(self.sleep_start, self.sleep_stop))\n self.logQ.put('Sleep period: Start = {0} -- Stop = {1}'.format(self.sleep_start, self.sleep_stop))\n time.sleep(3)\n self.notifyPut('Current time is {0} -- Now exiting Sleep period of the GSDM'.format(self.current_time))\n self.logQ.put('Current time is {0} -- Now exiting Sleep period of the GSDM'.format(self.current_time))\n time.sleep(3)\n self.checkMetrics()\n time.sleep(1)\n self.stopGsdm() # to clear any unwanted sessions\n time.sleep(5)\n if self.metrics_match == False:\n self.noMatchLaunch()\n else:\n self.matchLaunch()\n else:\n pass", "def test_enable_maintence_mode1(self):\n pass", "def event_m10_29_1140():\r\n \"\"\"State 0,2: [Preset] Door that opens in conjunction with the gimmick door_SubState\"\"\"\r\n assert event_m10_29_x25(z38=10291010, z39=10290405)\r\n \"\"\"State 1: Finish\"\"\"\r\n EndMachine()\r\n Quit()", "def test_play(self):\n self.plr.piles[Piles.DECK].set(\"Province\")\n self.plr.add_card(self.card, Piles.HAND)\n self.plr.test_input = [\"keep\"]\n self.plr.play_card(self.card)\n self.assertEqual(self.plr.coins.get(), 2)\n self.assertIn(\"Province\", self.plr.piles[Piles.DECK])\n self.assertNotIn(\"Province\", self.plr.piles[Piles.DISCARD])", "def _isoff(self):\n return self.dp.state()==PyTango.DevState.OFF" ]
[ "0.7248443", "0.6252217", "0.6161196", "0.6013288", "0.5826798", "0.5784178", "0.5759576", "0.57385975", "0.5711461", "0.5701929", "0.5627311", "0.561392", "0.55675197", "0.55644256", "0.55418855", "0.54940426", "0.5468023", "0.546363", "0.54626715", "0.5459663", "0.5440452", "0.53907114", "0.5388101", "0.5385431", "0.5364556", "0.5360843", "0.5347721", "0.5342641", "0.53373915", "0.5328995" ]
0.7302554
0
Test setting the auxiliary heater without required attribute.
async def test_set_aux_heat_bad_attr(opp): state = opp.states.get(ENTITY_CLIMATE) assert state.attributes.get(ATTR_AUX_HEAT) == STATE_OFF with pytest.raises(vol.Invalid): await common.async_set_aux_heat(opp, None, ENTITY_CLIMATE) await opp.async_block_till_done() assert state.attributes.get(ATTR_AUX_HEAT) == STATE_OFF
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def test_set_only_target_temp_bad_attr(opp):\n state = opp.states.get(ENTITY_WATER_HEATER)\n assert state.attributes.get(\"temperature\") == 119\n with pytest.raises(vol.Invalid):\n await common.async_set_temperature(opp, None, ENTITY_WATER_HEATER)\n assert state.attributes.get(\"temperature\") == 119", "def test_thermallyExpands(self):\n self.assertFalse(self.component.THERMAL_EXPANSION_DIMS)", "async def test_default_setup_params(opp):\n state = opp.states.get(ENTITY_WATER_HEATER)\n assert state.attributes.get(\"min_temp\") == 110\n assert state.attributes.get(\"max_temp\") == 140", "async def test_set_aux_heat_on(opp):\n await common.async_set_aux_heat(opp, True, ENTITY_CLIMATE)\n await opp.async_block_till_done()\n\n state = opp.states.get(ENTITY_CLIMATE)\n assert state.attributes.get(ATTR_AUX_HEAT) == STATE_ON", "async def test_set_aux_heat_off(opp):\n await common.async_set_aux_heat(opp, False, ENTITY_CLIMATE)\n await opp.async_block_till_done()\n\n state = opp.states.get(ENTITY_CLIMATE)\n assert state.attributes.get(ATTR_AUX_HEAT) == STATE_OFF", "def test_thermallyExpands(self):\n self.assertTrue(self.component.THERMAL_EXPANSION_DIMS)", "def test_thermallyExpands(self):\n self.assertTrue(self.component.THERMAL_EXPANSION_DIMS)", "def test_thermallyExpands(self):\n self.assertTrue(self.component.THERMAL_EXPANSION_DIMS)", "def test_thermallyExpands(self):\n self.assertTrue(self.component.THERMAL_EXPANSION_DIMS)", "def test_thermallyExpands(self):\n self.assertTrue(self.component.THERMAL_EXPANSION_DIMS)", "def test_thermallyExpands(self):\n self.assertTrue(self.component.THERMAL_EXPANSION_DIMS)", "def test_thermallyExpands(self):\n self.assertTrue(self.component.THERMAL_EXPANSION_DIMS)", "def test_thermallyExpands(self):\n self.assertTrue(self.component.THERMAL_EXPANSION_DIMS)", "def test_set_hx(self):\n s = State(substance=\"water\")\n s.hx = Q_(1624328.2430353598, \"J/kg\"), Q_(0.5, \"dimensionless\")\n # Pylance does not support NumPy ufuncs\n assert np.isclose(s.T, Q_(400.0, \"K\")) # type: ignore\n assert np.isclose(s.p, Q_(245769.34557103913, \"Pa\")) # type: ignore\n assert np.isclose(s.xT[1], Q_(400.0, \"K\")) # type: ignore\n assert np.isclose(s.xT[0], Q_(0.5, \"dimensionless\")) # type: ignore\n assert np.isclose(s.u, Q_(1534461.5163075812, \"J/kg\")) # type: ignore\n assert np.isclose(s.s, Q_(4329.703956664546, \"J/(kg*K)\")) # type: ignore\n assert np.isclose(s.cp, Q_(4056.471547685226, \"J/(kg*K)\")) # type: ignore\n assert np.isclose(s.cv, Q_(2913.7307270395363, \"J/(kg*K)\")) # type: ignore\n assert np.isclose(s.v, Q_(0.3656547423394701, \"m**3/kg\")) # type: ignore\n assert np.isclose(s.h, Q_(1624328.2430353598, \"J/kg\")) # type: ignore\n assert np.isclose(s.x, Q_(0.5, \"dimensionless\")) # type: ignore", "def test_for_arbitrarily_complicated_substance():\n verify_atomic_weight_for_substance(\"Al4O2H2\", 141.94015428)", "def test_thickness_hydrostatic_isothermal_subset():\n pressure = np.arange(1000, 500 - 1e-10, -10) * units.hPa\n temperature = np.zeros_like(pressure) * units.degC\n thickness = thickness_hydrostatic(pressure, temperature, bottom=850 * units.hPa,\n depth=350 * units.hPa)\n assert_almost_equal(thickness, 4242.527 * units.m, 2)", "def test_set_hp(self):\n s = State(substance=\"water\")\n s.hp = Q_(1061602.391543017, \"J/kg\"), Q_(101325.0, \"Pa\")\n # Pylance does not support NumPy ufuncs\n assert np.isclose(s.T, Q_(373.1242958476843, \"K\")) # type: ignore\n assert np.isclose(s.p, Q_(101325.0, \"Pa\")) # type: ignore\n assert np.isclose(s.hp[0], Q_(1061602.391543017, \"J/kg\")) # type: ignore\n assert np.isclose(s.hp[1], Q_(101325.0, \"Pa\")) # type: ignore\n assert np.isclose(s.u, Q_(1013250, \"J/kg\")) # type: ignore\n assert np.isclose(s.s, Q_(3028.9867985920914, \"J/(kg*K)\")) # type: ignore\n assert np.isclose(s.v, Q_(0.4772010021515822, \"m**3/kg\")) # type: ignore\n assert np.isclose(s.h, Q_(1061602.391543017, \"J/kg\")) # type: ignore\n assert np.isclose(s.x, Q_(0.28475636946248034, \"dimensionless\")) # type: ignore\n s.hp = Q_(3336406.139862406, \"J/kg\"), Q_(101325.0, \"Pa\")\n assert np.isclose(s.T, Q_(700.9882316847855, \"K\")) # type: ignore\n assert np.isclose(s.p, Q_(101325.0, \"Pa\")) # type: ignore\n assert np.isclose(s.hp[0], Q_(3336406.139862406, \"J/kg\")) # type: ignore\n assert np.isclose(s.hp[1], Q_(101325.0, \"Pa\")) # type: ignore\n assert np.isclose(s.u, Q_(3013250, \"J/kg\")) # type: ignore\n assert np.isclose(s.s, Q_(8623.283568815832, \"J/(kg*K)\")) # type: ignore\n assert np.isclose(s.v, Q_(3.189303132125469, \"m**3/kg\")) # type: ignore\n assert np.isclose(s.h, Q_(3336406.139862406, \"J/kg\")) # type: ignore\n assert s.x is None", "async def test_set_only_target_temp(opp):\n state = opp.states.get(ENTITY_WATER_HEATER)\n assert state.attributes.get(\"temperature\") == 119\n await common.async_set_temperature(opp, 110, ENTITY_WATER_HEATER)\n state = opp.states.get(ENTITY_WATER_HEATER)\n assert state.attributes.get(\"temperature\") == 110", "def testWaterOrientation_raisesForHydroxylGroups(self):\n\n\t\tcurrBinResObj = binResHelp.getEmptyBinResultsFromMinMaxAndWidthStandard(-90,90,10,extremesAtCentre=False)\n\t\tfakeOxyIndices, fakeHyIndices = [ [1], [2,3] ] #Settings wrong indices here SHOULDNT matter\n\t\tcurrArgs = [currBinResObj, fakeOxyIndices, fakeHyIndices]\n\t\tcurrKwargs = {\"angleType\":\"pitch\"}\n\t\tself.distrOptObjs = [ distrOptObjHelp.WaterOrientationOptions(*currArgs,**currKwargs) ]\n\n\t\twith self.assertRaises(AssertionError):\n\t\t\tself.createTestObjs()", "def test_set_xh(self):\n s = State(substance=\"water\")\n s.xh = Q_(0.5, \"dimensionless\"), Q_(1624328.2430353598, \"J/kg\")\n # Pylance does not support NumPy ufuncs\n assert np.isclose(s.T, Q_(400.0, \"K\")) # type: ignore\n assert np.isclose(s.p, Q_(245769.34557103913, \"Pa\")) # type: ignore\n assert np.isclose(s.xT[1], Q_(400.0, \"K\")) # type: ignore\n assert np.isclose(s.xT[0], Q_(0.5, \"dimensionless\")) # type: ignore\n assert np.isclose(s.u, Q_(1534461.5163075812, \"J/kg\")) # type: ignore\n assert np.isclose(s.s, Q_(4329.703956664546, \"J/(kg*K)\")) # type: ignore\n assert np.isclose(s.cp, Q_(4056.471547685226, \"J/(kg*K)\")) # type: ignore\n assert np.isclose(s.cv, Q_(2913.7307270395363, \"J/(kg*K)\")) # type: ignore\n assert np.isclose(s.v, Q_(0.3656547423394701, \"m**3/kg\")) # type: ignore\n assert np.isclose(s.h, Q_(1624328.2430353598, \"J/kg\")) # type: ignore\n assert np.isclose(s.x, Q_(0.5, \"dimensionless\")) # type: ignore", "def test_set_hv(self):\n s = State(substance=\"water\")\n s.hv = Q_(1061602.391543017, \"J/kg\"), Q_(0.4772010021515822, \"m**3/kg\")\n # Pylance does not support NumPy ufuncs\n assert np.isclose(s.T, Q_(373.1242958476843, \"K\")) # type: ignore\n assert np.isclose(s.p, Q_(101325.0, \"Pa\")) # type: ignore\n assert np.isclose(s.hv[0], Q_(1061602.391543017, \"J/kg\")) # type: ignore\n assert np.isclose(s.hv[1], Q_(0.4772010021515822, \"m**3/kg\")) # type: ignore\n assert np.isclose(s.u, Q_(1013250, \"J/kg\")) # type: ignore\n assert np.isclose(s.s, Q_(3028.9867985920914, \"J/(kg*K)\")) # type: ignore\n assert np.isclose(s.v, Q_(0.4772010021515822, \"m**3/kg\")) # type: ignore\n assert np.isclose(s.h, Q_(1061602.391543017, \"J/kg\")) # type: ignore\n assert np.isclose(s.x, Q_(0.28475636946248034, \"dimensionless\")) # type: ignore", "def test_bad_property_setting(self):\n s = State(substance=\"water\")\n with pytest.raises(AttributeError):\n # Should be lowercase p\n s.TP = Q_(400.0, \"K\"), Q_(101325.0, \"Pa\")", "def test_set_hT(self):\n s = State(substance=\"water\")\n s.hT = Q_(2730301.3859201893, \"J/kg\"), Q_(400.0, \"K\")\n # Pylance does not support NumPy ufuncs\n assert np.isclose(s.T, Q_(400.0, \"K\")) # type: ignore\n assert np.isclose(s.p, Q_(101325.0, \"Pa\")) # type: ignore\n assert np.isclose(s.hT[1], Q_(400.0, \"K\")) # type: ignore\n assert np.isclose(s.hT[0], Q_(2730301.3859201893, \"J/kg\")) # type: ignore\n assert np.isclose(s.u, Q_(2547715.3635084038, \"J/kg\")) # type: ignore\n assert np.isclose(s.s, Q_(7496.2021523754065, \"J/(kg*K)\")) # type: ignore\n assert np.isclose(s.cp, Q_(2009.2902478486988, \"J/(kg*K)\")) # type: ignore\n assert np.isclose(s.cv, Q_(1509.1482452129906, \"J/(kg*K)\")) # type: ignore\n assert np.isclose(s.v, Q_(1.801983936953226, \"m**3/kg\")) # type: ignore\n assert np.isclose(s.h, Q_(2730301.3859201893, \"J/kg\")) # type: ignore\n assert s.x is None", "def testGetThermoData(self):\n spc = Species().fromSMILES('CCC')\n\n self.assertFalse(spc.thermo)\n spc.getThermoData()\n self.assertTrue(spc.thermo)\n thermo = spc.thermo\n spc.getThermoData()\n\n self.assertEquals(id(thermo), id(spc.thermo))\n \n spc.thermo = None\n spc.getThermoData()\n self.assertNotEquals(id(thermo), id(spc.thermo))", "async def test_set_only_target_temp_bad_attr(opp):\n state = opp.states.get(ENTITY_CLIMATE)\n assert 21 == state.attributes.get(ATTR_TEMPERATURE)\n\n with pytest.raises(vol.Invalid):\n await common.async_set_temperature(opp, None, ENTITY_CLIMATE)\n\n await opp.async_block_till_done()\n assert 21 == state.attributes.get(ATTR_TEMPERATURE)", "def test_bad_dimensions(self, prop: str):\n kwargs = {prop: Q_(1.0, \"dimensionless\")}\n if prop == \"v\":\n kwargs[\"T\"] = Q_(300.0, \"K\")\n else:\n kwargs[\"v\"] = Q_(1.0, \"m**3/kg\")\n with pytest.raises(StateError):\n State(substance=\"water\", **kwargs)", "def test_default_rise(self):\n self.employee.give_raise()\n self.assertEqual(self.employee.anual_salary, self.salary + 5000)", "def setUP(self):\r\n print(\"=====Begin test=====\")", "def test_missing_attribute(self):\n assert setup.setup_component(\n self.opp,\n \"binary_sensor\",\n {\n \"binary_sensor\": {\n \"platform\": \"trend\",\n \"sensors\": {\n \"test_trend_sensor\": {\n \"entity_id\": \"sensor.test_state\",\n \"attribute\": \"missing\",\n }\n },\n }\n },\n )\n self.opp.block_till_done()\n\n self.opp.states.set(\"sensor.test_state\", \"State\", {\"attr\": \"2\"})\n self.opp.block_till_done()\n self.opp.states.set(\"sensor.test_state\", \"State\", {\"attr\": \"1\"})\n self.opp.block_till_done()\n state = self.opp.states.get(\"binary_sensor.test_trend_sensor\")\n assert state.state == \"off\"", "async def test_setup_params(opp):\n state = opp.states.get(ENTITY_WATER_HEATER)\n assert state.attributes.get(\"temperature\") == 119\n assert state.attributes.get(\"away_mode\") == \"off\"\n assert state.attributes.get(\"operation_mode\") == \"eco\"" ]
[ "0.61905926", "0.6005518", "0.5960821", "0.58043325", "0.5786635", "0.5720514", "0.5720514", "0.5720514", "0.5720514", "0.5720514", "0.5720514", "0.5720514", "0.5720514", "0.571382", "0.56762445", "0.5656435", "0.565543", "0.5654403", "0.5651507", "0.56441265", "0.5606535", "0.5603533", "0.5602392", "0.5496779", "0.5490827", "0.5490013", "0.548443", "0.548405", "0.5481445", "0.54812264" ]
0.6089088
1
Test setting the auxiliary heater off/false.
async def test_set_aux_heat_off(opp): await common.async_set_aux_heat(opp, False, ENTITY_CLIMATE) await opp.async_block_till_done() state = opp.states.get(ENTITY_CLIMATE) assert state.attributes.get(ATTR_AUX_HEAT) == STATE_OFF
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def async_turn_aux_heat_off(self) -> None:\n self._aux = False\n self.async_write_ha_state()", "def turn_aux_heat_off(self):\n self.set_operation_mode(STATE_HEAT)", "def test_active_off(self):\n\n self.feature_test.set_percentage(0)\n self.assertFalse(self.feature_test.is_active)", "def turn_aux_heat_on(self):\n self._device.set_mode(self._device.MODE_HEAT_EMERGENCY)\n self._device.set_fan(self._device.FAN_AUTO)", "async def async_turn_aux_heat_off(self) -> None:\n await self._set_aux_heat(False)", "def test_turn_off(power_supply):\n power_supply.Init()\n assert power_supply.state() != tango.DevState.OFF\n power_supply.turn_off()\n assert power_supply.state() == tango.DevState.OFF", "def is_aux_heat_on(self):\n return self._device.mode == self._device.MODE_HEAT_EMERGENCY", "def test_change_brightness_of_the_device_false():", "async def async_turn_aux_heat_on(self) -> None:\n self._aux = True\n self.async_write_ha_state()", "def _isoff(self):\n return self.dp.state()==PyTango.DevState.OFF", "def test_thermallyExpands(self):\n self.assertFalse(self.component.THERMAL_EXPANSION_DIMS)", "def all_off():\n print(\"Climate is within set parameters; toggling systems off if any are on\")\n GPIO.output(HEATPIN, RELAYOFF)\n GPIO.output(COOLPIN, RELAYOFF)\n GPIO.output(FANPIN, RELAYOFF)\n time.sleep(30)", "def reset_energizer_flag(self): \r\n self.energizer_flag = False", "async def test_set_aux_heat_on(opp):\n await common.async_set_aux_heat(opp, True, ENTITY_CLIMATE)\n await opp.async_block_till_done()\n\n state = opp.states.get(ENTITY_CLIMATE)\n assert state.attributes.get(ATTR_AUX_HEAT) == STATE_ON", "async def async_turn_aux_heat_on(self) -> None:\n await self._set_aux_heat(True)", "def turn_off(self):\n self._state = False\n if(self._device['type'] == '_DT-PLUG' or self._device['type'] == '_THIMR'):\n self._send_cmd(self._device, 'cmd=ctrl&devices={[' + self._device[\"sid\"] + ']}&op={\"cmd\":5,\"op\":0 }', 5)\n if(self._device['type'] == '_REALY2' or self._device['type'] == '_REALY4'): \n self._send_cmd(self._device, 'cmd=ctrl&devices={[' + self._device[\"sid\"] + ']}&op={\"cmd\":5,\"'+ self._data_key +'\":0 }', 5)", "def turn_off(self, **kwargs):\n _LOGGER.error(\"DALI TURN OFF\")\n self._state = False\n\n url = self.urlx + '/toggle'\n headers = {'x-ha-access': 'raspberry',\n 'content-type': 'application/json'}\n\n response = get(url, headers=headers)\n _LOGGER.error(response.text)\n\n json_data = json.loads(response.text)\n _LOGGER.error(json_data)\n\n state = json_data['state']\n\n self._dimmer = 0\n\n self._state = state == 'on'", "def turn_off(self, **kwargs):\n self.heater.turn_off()", "async def test_set_away_mode_off(opp):\n await common.async_set_away_mode(opp, False, ENTITY_WATER_HEATER_CELSIUS)\n state = opp.states.get(ENTITY_WATER_HEATER_CELSIUS)\n assert state.attributes.get(\"away_mode\") == \"off\"", "def test_hiding_demo_state(self):\n demo.setup(self.hass, {demo.DOMAIN: {'hide_demo_state': 1}})\n\n self.assertIsNone(self.hass.states.get('a.Demo_Mode'))", "def turn_test_mode_off_by_default(test_mode_off):", "def takeOff(self):\n\t\tself._altHoldController.setTarget(self._MAX_ALTITUDE)\n\t\tself._taking_off = False", "def is_off(self) -> bool:\n return not self.is_on", "def setPowerIfNecessary(self):\n if self.p.power == 0 and self.p.powerDensity > 0:\n self.setPowerFromDensity()", "def _isstandby(self):\n return self.dp.state()==PyTango.DevState.STANDBY", "def is_off(self):\n return self.value == OFF", "def turn_eht_off(self):\n raise NotImplementedError", "def is_aux_heat(self) -> bool | None:\n return self._aux", "def heat_on():\n global PAUSED\n print(\"Temp is low; toggling heat on\")\n GPIO.output(COOLPIN, RELAYOFF)\n GPIO.output(FANPIN, RELAYOFF)\n GPIO.output(HEATPIN, RELAYON)\n while (all_temps_avg < TEMPMID or max_temp < TEMPLOW) and (PAUSED == False):\n time.sleep(10)", "def is_water(self):\n return False" ]
[ "0.660986", "0.6608852", "0.64839536", "0.6451368", "0.63626975", "0.63294744", "0.6312525", "0.6208631", "0.61349434", "0.610655", "0.6094739", "0.60530114", "0.60337293", "0.6031825", "0.60150415", "0.583133", "0.5802599", "0.5797975", "0.5787792", "0.57814157", "0.5779655", "0.5750653", "0.57272977", "0.5691497", "0.5683528", "0.56809574", "0.5677227", "0.5658814", "0.5653541", "0.5641305" ]
0.6653747
0
Assert that this platform has a certain feature or raise an exception otherwise.
def assert_has_feature(self, feature_name): if not self.features.get("has_{}".format(feature_name), False): self.raise_config_error("Platform {} does not support to configure {feature_name}. " "Please make sure the platform " "you configured for {feature_name} actually supports that type " "of devices.".format(self.__class__, feature_name=feature_name), 99)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def check_supported_features(self):", "def is_capable(self, capability):\n return DeviceException(DeviceException.FEATURE_NOT_IMPLEMENTED)", "def setup(self):\n raise DeviceException(DeviceException.FEATURE_NOT_IMPLEMENTED)", "def is_available(self):\n raise DeviceException(DeviceException.FEATURE_NOT_IMPLEMENTED)", "def is_rooted(self):\n raise DeviceException(DeviceException.FEATURE_NOT_IMPLEMENTED)", "def is_booted(self):\n raise DeviceException(DeviceException.FEATURE_NOT_IMPLEMENTED)", "def verify_support():\n ostype, majorrelease, _ = get_os_release_data()\n if ostype not in _supported_os:\n _logger.info('OS type %s is not supported.', ostype)\n return False\n if majorrelease not in _supported_release:\n _logger.info('OS %s %s is not supported', ostype, majorrelease)\n return False\n return True", "def test_available(self):\n feature_guard = _make_requires(True, \"Error text\")\n results = []\n\n @feature_guard\n def inner():\n results.append(True)\n return True\n\n assert inner() is True\n assert [True] == results", "def test_unavailable(self):\n feature_guard = _make_requires(False, \"Error text\")\n\n @feature_guard\n def inner(): # pragma: nocover\n pytest.fail(\"Should not be called\")\n\n with pytest.raises(NotImplementedError) as e:\n inner()\n\n assert \"Error text\" in str(e.value)", "def has_intel_os(self):\n raise DeviceException(DeviceException.FEATURE_NOT_IMPLEMENTED)", "def check_feature(feature, expected_name='Adriatic Sea',\n expected_type='Polygon'):\n assert feature['properties']['name'] == expected_name\n assert feature['properties']['component'] == 'ocean'\n assert feature['geometry']['type'] == expected_type", "def test_not_supported():\n assert get_accessory(None, State('demo.demo', 'on'), 2, config=None) \\\n is None", "def _platform_compatible():\r\n raise NotImplementedError", "def test_validate_media_player_features():\n config = {}\n attrs = {ATTR_SUPPORTED_FEATURES: 20873}\n entity_state = State(\"media_player.demo\", \"on\", attrs)\n assert validate_media_player_features(entity_state, config) is True\n\n config = {FEATURE_ON_OFF: None}\n assert validate_media_player_features(entity_state, config) is True\n\n entity_state = State(\"media_player.demo\", \"on\")\n assert validate_media_player_features(entity_state, config) is False", "def check_platform(target_platform):\n if target_platform == PLATFORM_LINUX:\n pass\n elif target_platform == PLATFORM_WINDOWS:\n # requires wine\n try:\n subprocess.run([\"wine\", \"--help\"], check=True, stderr=subprocess.PIPE, stdout=subprocess.PIPE)\n except:\n log_error(\"wine needs to be installed\")\n else:\n log_error(f\"something is strange with the platform type '{target_platform}'\")", "def test_back_compat_attributes(self):\n cap = DeviceCapabilities.create(True)\n self.assertTrue(cap.iot_edge)", "def verify_capabilities(self, capabilities) -> bool:\n _pinfo = self.provider_features()\n not_supported = {} # type: Dict[str, Union[str, List[str]]]\n for key, val in capabilities.items():\n if isinstance(val, str):\n if val not in _pinfo.get(key, \"\"):\n not_supported[key] = val\n elif isinstance(val, bool):\n if not _pinfo.get(key) and val:\n not_supported[key] = \"\"\n elif isinstance(val, list):\n unsup = []\n for v in val:\n if v not in _pinfo.get(key, \"\"):\n unsup.append(v)\n if unsup:\n not_supported[key] = unsup\n if not_supported:\n logger.error(\n \"Server does not support the following features: %s\", not_supported\n )\n return False\n return True", "def test_invalid_feature_key(self):\n with pytest.raises(AssertionError):\n parse_command({'sleep 1000': {'backgroundish': True}})", "def should_fake_it(self):\n try:\n environment.get(\"FakeIt\")\n return True\n except KeyError:\n return False", "def require_setting(name, feature='this feature'):\n if not self.settings.get(name):\n raise Exception('You must define the \"%s\" setting in your '\n 'application to use %s' % (name, feature))", "def checkAllowed(\n self,\n feature,\n message,\n lineNo=None,\n level=Levels.Fatal ):\n # type: (Text, Text, int, Level, Text) -> bool\n is_allowed = feature in self.allowedFeatures\n if not is_allowed:\n if lineNo is None:\n Issue(self, level=level, message=message )\n else:\n LocalizedSourceIssue(\n sourceFile=self,\n level=level,\n message=message,\n line=lineNo\n )\n return is_allowed", "def test_is_active_of_homework_positive():\n assert oop_hw.is_active()", "async def test_bad_trigger_platform(hass):\n with pytest.raises(vol.Invalid) as ex:\n await async_validate_trigger_config(hass, [{\"platform\": \"not_a_platform\"}])\n assert \"Invalid platform 'not_a_platform' specified\" in str(ex)", "def check_requirement(self):\n raise NotImplementedError", "def __call__(self, feature):\n return self.is_enabled(feature)", "def test_os_system(self):\n self.assertEqual(self.settings.OS_SYSTEM, platform.system())", "def validate_no_win32() -> None:\n try:\n assert sys.platform != \"win32\"\n except AssertionError:\n logger.exception(\"This application cannot run on Windows!\")\n sys.exit(1)", "def is_system(self) -> undefined.UndefinedOr[bool]:", "def retrieve_os_version(self):\n return DeviceException(DeviceException.FEATURE_NOT_IMPLEMENTED)", "def test_not_supported_requirement(self, space_each_type):\n with pytest.raises(TypeError) as exc:\n build_required_space(space_each_type, type_requirement=\"fasdfasf\")\n assert \"Unsupported\" in str(exc.value)" ]
[ "0.6979583", "0.62230426", "0.61273456", "0.5998881", "0.5865305", "0.5852499", "0.5747555", "0.57348573", "0.572911", "0.56581134", "0.56463474", "0.56033766", "0.559378", "0.5572093", "0.5548158", "0.55380577", "0.5529435", "0.55059284", "0.54845357", "0.54813004", "0.54713666", "0.54681385", "0.5438678", "0.54220766", "0.5355205", "0.5336573", "0.52697986", "0.5263626", "0.5255105", "0.52495915" ]
0.80035317
0
Return config spec for this platform.
def get_config_spec(cls): return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _get_configspec():\n files = sorted(pkg_resources.resource_listdir(__name__, \"\"))\n # NOTE:\n # Explicit convert the filter results to a list, since the returned\n # iterator can ONLY be used ONCE.\n specfiles = list(filter(lambda fn: fn.endswith(\".conf.spec\"), files))\n if os.environ.get(\"DEBUG_FG21SIM\"):\n print(\"DEBUG: Found config specifications: %s\" % \", \".join(specfiles),\n file=sys.stderr)\n # NOTE:\n # `resource_string()` returns the resource in *binary/bytes* string\n configspec = \"\\n\".join([\n pkg_resources.resource_string(__name__, fn).decode(\"utf-8\")\n for fn in specfiles\n ]).split(\"\\n\")\n return configspec", "def config(self) -> 'outputs.DeviceConfigResponse':\n return pulumi.get(self, \"config\")", "def config(self) -> 'outputs.CSIPowerMaxRevProxySpecConfig':\n return pulumi.get(self, \"config\")", "def device_config(self):\n\t\ttry:\n\t\t\treturn self._dev\n\t\texcept:\n\t\t\treturn 0", "def config(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"config\")", "def spec(self):\n return self._spec", "def spec(self):\n return self._spec", "def convert_spec(spec):\n config = configobj.ConfigObj(configspec=spec)\n\n return config.configspec", "def config(self):\n annotations = IAnnotations(self.context)\n return annotations.get(CONFIGURATION_KEY, {})", "def get_config_template(self) -> cconfig.Config:", "def get_config(self):\n return ConfigFile.from_file(path.join(self.run_dir, \"os-stdin\"))", "def get_config(self):\n if self.allow_reco():\n return self.chs_config()\n else:\n return self.get_config_j(self.id)", "def get_pecan_config():\n filename = api_config.__file__.replace('.pyc', '.py')\n return filename", "def get_config():\n return CONFIG", "def config(self):\n return self[CONFIG_KEY]", "def get_config():\n return _CONFIG", "def config(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:\n return pulumi.get(self, \"config\")", "def config(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:\n return pulumi.get(self, \"config\")", "def config(self):\n return self.namespace['config']", "def schema_for_config(self) -> Dict[str, Any]:\n return json.loads(self.__sim.config_schema())", "def getConfig(self):\n \n return self.config", "def get_config_descr(self, name):\n return self.configs[name][1]", "def config(self) -> pulumi.Output['outputs.ConfigResponse']:\n return pulumi.get(self, \"config\")", "def get_config(self, name):\n return self.configs[name][0]", "def getConfiguration(self):\n raise NotImplementedError", "def GetPlatform(self):\n arch = \"None\"\n # check architecture name\n if \"CMTCONFIG\" in os.environ:\n arch = os.environ[\"CMTCONFIG\"]\n elif \"SCRAM_ARCH\" in os.environ:\n arch = os.environ[\"SCRAM_ARCH\"]\n return arch", "def get_system_spec():\n import pkg_resources\n import platform\n\n if sys.platform == 'darwin':\n system_info = 'macOS {} {}'.format(\n platform.mac_ver()[0],\n platform.architecture()[0],\n )\n else:\n system_info = '{} {} {} {}'.format(\n platform.system(),\n '_'.join(platform.architecture()),\n platform.release(),\n platform.machine(),\n )\n\n system_spec = dict(\n raiden=pkg_resources.require(raiden.__name__)[0].version,\n python_implementation=platform.python_implementation(),\n python_version=platform.python_version(),\n system=system_info,\n )\n return system_spec", "def get_config(self):\n return self.config", "def config(self) -> pulumi.Input['ConfigArgs']:\n return pulumi.get(self, \"config\")", "def env_spec(self):\n return self._env_spec" ]
[ "0.7266349", "0.6795872", "0.6613083", "0.64155155", "0.63677096", "0.6321028", "0.6321028", "0.62734276", "0.6209236", "0.6196945", "0.61863", "0.6152768", "0.614142", "0.6137562", "0.61059767", "0.6094757", "0.6090384", "0.6090384", "0.60408014", "0.60370034", "0.60344994", "0.6025086", "0.60045683", "0.60010695", "0.59881127", "0.59853214", "0.59708834", "0.596719", "0.5938015", "0.5935415" ]
0.7307367
0
Perform a firmware update.
def update_firmware(self) -> str:
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update_firmware(self):\n self.execute_command(CMD_UPDATE_FIRMWARE)", "async def update(self) -> None:\n # pause logic\n if not self.running.is_set():\n self.add_to_output(\"Paused...\")\n await self.running.wait()\n\n # tell the user we are updating\n self.add_to_output(f\"Updating...\")\n # create ssh connection to miner\n try:\n conn = await self.get_connection(\"root\", \"admin\")\n # tell the user we are sending the update file\n self.add_to_output(\"Sending upgrade file...\")\n # send the update file\n await self.send_file(UPDATE_FILE_S9, \"/tmp/firmware.tar\")\n # install the update and collect the result\n result = await conn.run(f'sysupgrade /tmp/firmware.tar')\n self.add_to_output(result.stdout.strip())\n # tell the user the update completed\n self.add_to_output(f\"Update completed...\")\n except OSError:\n self.add_to_output(f\"Unknown error...\")", "def update_firmware(self) -> None:\n\n BROADCAST_ID = 0xFFF\n firmware_update_message = self.__set_module_state(\n BROADCAST_ID, Module.State.UPDATE_FIRMWARE, Module.State.PNP_OFF\n )\n self._send_q.put(firmware_update_message)\n self.__delay()", "def update_firmware(firmware_path, script_path):\n\n args = ['uflash', '-r', firmware_path, script_path]\n subprocess.call(args)", "def fusion_api_li_upgrade_firmware(self, body=None, uri=None, api=None, param='', headers=None):\n param = '/firmware'\n return self.li.update(body=body, uri=uri, api=api, headers=headers, param=param)", "def update_firmware(self):\n return self._dll.JLINKARM_UpdateFirmwareIfNewer()", "def update_firmware(self, file_url, component_type):\n fw_update_uri = self._get_firmware_update_service_resource()\n action_data = {\n 'Action': 'InstallFromURI',\n 'FirmwareURI': file_url,\n }\n\n # perform the POST\n LOG.debug(self._('Flashing firmware file: %s ...'), file_url)\n status, headers, response = self._rest_post(\n fw_update_uri, None, action_data)\n if status != 200:\n msg = self._get_extended_error(response)\n raise exception.IloError(msg)\n\n # wait till the firmware update completes.\n common.wait_for_ris_firmware_update_to_complete(self)\n\n try:\n state, percent = self.get_firmware_update_progress()\n except exception.IloError:\n msg = 'Status of firmware update not known'\n LOG.debug(self._(msg)) # noqa\n return\n\n if state == \"ERROR\":\n msg = 'Unable to update firmware'\n LOG.debug(self._(msg)) # noqa\n raise exception.IloError(msg)\n elif state == \"UNKNOWN\":\n msg = 'Status of firmware update not known'\n LOG.debug(self._(msg)) # noqa\n else: # \"COMPLETED\" | \"IDLE\"\n LOG.info(self._('Flashing firmware file: %s ... done'), file_url)", "def fusion_api_upgrade_appliance_firmware(self, localfile, api=None, headers=None):\n param = '?file=%s' % localfile\n return self.appfirmware.update(api, headers, param)", "def update_firmware(self, file_url, reinstall=False,\n exclude_npar_fw=False):\n try:\n update_service_inst = self._sushy.get_update_service()\n update_service_inst.flash_firmware(\n self, file_url, reinstall, exclude_npar_fw)\n except sushy.exceptions.SushyError as e:\n msg = (self._('The Redfish controller failed to update firmware '\n 'with firmware %(file)s Error %(error)s') %\n {'file': file_url, 'error': str(e)})\n LOG.debug(msg)\n raise exception.SDFlexError(msg)", "def command_update_hw(self, cmd):\n # TODO\n pass", "def update(self):\n ckresult(_dll.FMOD_System_Update(self._ptr))", "def update():", "def update():", "def update( ):\r\n pass", "def doFirmwareUpgrade(self, serial, unitId, fwFile):\n \n b = self.getBridge(serial)\n \n if unitId != 0:\n # We are going to upgrade the motes\n b.upgradeThread = SkymoteFirmwareUpgraderThread(b, fwFile, upgradeMotes = True, recovery = False)\n else:\n # We are going to upgrade the bridge\n b.upgradeThread = SkymoteFirmwareUpgraderThread(b, fwFile, upgradeMotes = False, recovery = False)\n \n b.upgradeThread.start()\n \n return True", "def update_firmware(self, node, port):\n return hpsum_controller.update_firmware(node)", "def run_update():\n\n args = _parse_arguments()\n\n # get dependencies\n dependencies = get_dependencies(args.folder)\n\n # get update config of dependencies\n update_info = get_update_info()\n\n install_queue = build_queue(\n update_info, dependencies, args.archive\n )\n\n print(\"install_queue\", install_queue)\n if install_queue is not None:\n build_wheels(install_queue)\n install_wheels(install_queue)", "def fusion_api_le_firmware_update(self, body=None, uri=None, api=None, headers=None, etag=None):\n return self.logical_enclosure.patch(body, uri, api, headers, etag)", "def update_data(update_method):\n log.debug('Starting update')\n cmd = ['/usr/bin/python', wf.workflowfile('update.py')]\n if update_method == 'force':\n cmd.append('--update')\n cmd.append('force')\n\n # Update projects data\n log.debug('Run update command : {}'.format(cmd))\n run_in_background('update', cmd)\n\n return 0", "def update(self):\n try:\n self._device.update()\n except requests.exceptions.HTTPError as ex:\n _LOGGER.warning(\"Fritzhome connection error: %s\", ex)\n self._fritz.login()", "def main():\n parser = argparse.ArgumentParser()\n register_device_args(parser)\n register_update_args(parser, default_os_check='update', default_pave=False)\n args = parser.parse_args()\n update(args.system_image_dir, args.os_check, args.target_id,\n args.serial_num, args.pave)", "def update(self):\n self.device.update()", "def update(self):\n self.device.update()", "def with_firmware_update(self, firmware_handler: FirmwareHandler): # type: ignore\n self.logger.debug(f\"Firmware handler: {firmware_handler}\")\n if self.file_management is None:\n raise RuntimeError(\n \"File management must be enabled before firmware update\"\n )\n self.firmware_update = OSFirmwareUpdate(\n firmware_handler, self._on_firmware_update_status\n )\n\n return self", "def update(self):\n return self._process('update')", "def update(self):\n self._device.update()", "def do_update(self, addon):\n self.update_error.emit(\n addon,\n NotImplementedError('An updater is not installed.')\n )", "def update(*args):", "def _update(self, force=False):\n if self.autoupdate:\n self.update(force)", "def update():\n print \"Attempting to update....\"\n program_location = sys.executable\n program_name = \"youtube-dl.exe\"\n update_arg = \"--update\"\n command = [program_name, update_arg]\n result = subprocess.call(command)\n print \"Command result: \", result\n time.sleep(5)\n print \"Finished updating.\"" ]
[ "0.8272702", "0.7195315", "0.71505", "0.71459144", "0.67883563", "0.67392296", "0.664074", "0.6583564", "0.6561077", "0.65192825", "0.64113843", "0.63289976", "0.63289976", "0.62914103", "0.62672305", "0.62541914", "0.62110126", "0.61896014", "0.6174201", "0.6156075", "0.6145443", "0.6107857", "0.6107857", "0.6075549", "0.59981036", "0.59710443", "0.59659773", "0.5922935", "0.583808", "0.5834344" ]
0.7670653
1
Subclass this method in a platform module to configure the DMD. This method should return a reference to the DMD's platform interface method will will receive the frame data.
def configure_dmd(self) -> "DmdPlatformInterface": raise NotImplementedError
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def configure_dmd(self):\n raise NotImplementedError", "def configure_rgb_dmd(self, name: str) -> \"DmdPlatformInterface\":\n raise NotImplementedError", "def __init__(self, dataFrame):\n self.dataFrame = dataFrame", "def __init__(self):\n super().__init__()\n self.dmdParams = {} # dmd settings container\n self.printTag = 'DMD' # print tag\n self._dynamicHandling = True # This ROM is able to manage the time-series on its own. No need for special treatment outside\n self.pivotParameterID = None # pivot parameter\n # variables filled up in the training stages\n self._amplitudes = {} # {'target1': vector of amplitudes,'target2':vector of amplitudes, etc.}\n self._eigs = {} # {'target1': vector of eigenvalues,'target2':vector of eigenvalues, etc.}\n self._modes = {} # {'target1': matrix of dynamic modes,'target2':matrix of dynamic modes, etc.}\n self.__Atilde = {} # {'target1': matrix of lowrank operator from the SVD,'target2':matrix of lowrank operator from the SVD, etc.}\n self.pivotValues = None # pivot values (e.g. time)\n self.KDTreeFinder = None # kdtree weighting model\n self.timeScales = {} # time-scales (training and dmd). {'training' and 'dmd':{t0:float,'dt':float,'intervals':int}}\n self.featureVals = None # feature values", "def expose_data(self):\r\n return _ExposedFarmData(self._platforms, self._awaiting, self._channels)", "def configure(self):\n\n self.platform.configure()", "def __init__(self):\n self.hmd = None\n self.vr_render_models = None\n self.render_width = 0\n self.render_height = 0", "def __init__(self):\r\n super(DataTarget, self).__init__()", "def configure_rgb_dmd(self):\n raise NotImplementedError", "def from_hdf(self, hdf=None, group_name=None):\n super(ParameterMaster, self).from_hdf(hdf=hdf, group_name=group_name)\n with self.project_hdf5.open(\"input\") as hdf5_input:\n self.iteration_frame = pandas.DataFrame(hdf5_input[\"dataframe\"])", "def __call__(self):\n\n # create dataframes of relevant sections from the INP\n for ix, sect in enumerate(self.config['inp_sections']):\n if ix == 0:\n df = create_dataframeINP(self.inp.path, sect, comment_cols=False)\n else:\n df_other = create_dataframeINP(self.inp.path, sect, comment_cols=False)\n df = df.join(df_other)\n\n if self.rpt:\n for rpt_sect in self.config['rpt_sections']:\n df = df.join(create_dataframeRPT(self.rpt.path, rpt_sect))\n\n # add conduit coordinates\n xys = df.apply(lambda r: get_link_coords(r, self.inp.coordinates, self.inp.vertices), axis=1)\n df = df.assign(coords=xys.map(lambda x: x[0]))\n\n # make inlet/outlet node IDs string type\n df.InletNode = df.InletNode.astype(str)\n df.OutletNode = df.OutletNode.astype(str)\n\n return df", "def _ensure_dframe(self):\n if self.dframe is None:\n self.dframe = self.dataset.dframe()", "def initDataFrame(self,referenceID, content):\r\n # Strip any colons in the mac address\r\n self.referenceID = referenceID\r\n\r\n # Set the frame content\r\n self.content = str(content)\r\n\r\n # Set the content length\r\n self.contentLength = len(self.content)\r\n\r\n # Set the correct frame message type\r\n self.mesgType = MULTIPLEXER_DATA_FORWARD", "def setDataFrame(self, dataFrame, copyDataFrame=False):\n if not isinstance(dataFrame, pandas.core.frame.DataFrame):\n raise TypeError(\"not of type pandas.core.frame.DataFrame\")\n\n self.layoutAboutToBeChanged.emit()\n if copyDataFrame:\n self._dataFrame = dataFrame.copy()\n else:\n self._dataFrame = dataFrame\n\n self._columnDtypeModel = ColumnDtypeModel(dataFrame)\n self._columnDtypeModel.dtypeChanged.connect(self.propagateDtypeChanges)\n self._columnDtypeModel.changeFailed.connect(\n lambda columnName, index, dtype: self.changingDtypeFailed.emit(columnName, index, dtype)\n )\n self.layoutChanged.emit()\n self.dataChanged.emit()\n self.dataFrameChanged.emit()", "def __init__(self, epics_only=False, *args, **kwargs):\n self._kwargs = {}\n self._detectors = {}\n self._det_list = [] \n self._det_aliases = {}\n self._psplots = {}\n self._event_functions = {}\n self._source_attrs = []\n self._evt_time_last = (0,0)\n self.ievent = 0\n self._reloadOnLoadRun = False\n self._reloadOnNextEvent = False\n self.psana_cfg_dict = {}\n self._default_module_path = ''\n\n# self._user_attrs = {}\n# self._histograms = {}\n \n for key in kwargs:\n self._kwargs[key] = kwargs[key] \n if key in self._exp_defaults:\n setattr(self,key,kwargs[key])\n print 'setting ',key, kwargs[key]\n\n self._device_config = read_device_config(**kwargs)\n self._device_sets = self._device_config['device_sets'] \n self._device_types = self._device_config['device_types'] \n\n for det in self._device_sets:\n if 'det' in self._device_sets[det]:\n if ('detName' in self._device_sets[det]['det'] or\n 'typeName' in self._device_sets[det]['det']):\n self._det_list.append(det)\n if 'det_key' in self._device_sets[det]['det']:\n det_key = self._device_sets[det]['det']['det_key']\n self._det_aliases[det_key] = det \n else:\n pass\n \n# if 'pvs' in self._device_sets[det]:\n# for attr in self._device_sets[det]['pvs']:\n# pvbase = self._device_sets[det]['pvs'][attr]['base']\n# alias = '_'.join([det,attr])\n# self.add_pv(pvbase, alias)\n\n self.set_exp_defaults(**kwargs)\n if not self._kwargs.get('noload'):\n self.data_source = self.get_data_source(**kwargs)\n print 'Data Source = ', self.data_source\n else:\n self.data_source = None\n\n if not self.data_source:\n self._kwargs['noload'] = True\n else:\n kwargs['run'] = self.run\n\n# if self._kwargs.get('noload') or self.live:\n# if self._kwargs.get('epics_live'):\n# self.set_kwargs(ami=True)\n \n if self._kwargs.get('ami'):\n print 'loading ami'\n self.load_ami(**kwargs)\n\n if not self._kwargs.get('noload'):\n print 'loading run'\n self.load_run(*args, **kwargs)\n self._no_epicsStore = False\n \n print 'Instrument = ', self.instrument\n\n if self._kwargs.get('epics_live'): # and self._kwargs.get('epics_file'):\n print 'loading epics'\n self.load_epicsLive(**kwargs)\n\n if self.ds and self.live:\n self.next_event()\n \n if self.ds and self._reloadOnNextEvent:\n self.next_event()\n \n if not self.ds:\n self._no_epicsStore = True\n self._no_evtData = True\n for det in self._device_sets:\n if 'pvs' in self._device_sets[det]:\n print 'Adding epics ',det\n self.add_detector(det)", "def from_platform(self):\n project_name = self.platform_params['project_name']\n project_id = self.platform_params['project_id']\n dataset_name = self.platform_params['dataset_name']\n dataset_id = self.platform_params['dataset_id']\n item_filepath = self.platform_params['item_filepath']\n item_id = self.platform_params['item_id']\n\n # load remote item\n if dataset_id is None:\n self.project = dl.projects.get(project_name=project_name, project_id=project_id)\n if self.project is None:\n raise ValueError('Project doesnt exists. name: %s, id: %s' % (project_name, project_id))\n self.dataset = self.project.datasets.get(dataset_name=dataset_name, dataset_id=dataset_id)\n else:\n self.dataset = dl.datasets.get(dataset_id=dataset_id)\n if self.dataset is None:\n raise ValueError('Dataset doesnt exists. name: %s, id: %s' % (dataset_name, dataset_id))\n self.item = self.dataset.items.get(filepath=item_filepath, item_id=item_id)\n if self.item is None:\n raise ValueError('Item doesnt exists. name: %s, id: %s' % (item_filepath, item_id))\n self.labels = {label.tag: label.rgb for label in self.dataset.labels}\n _, ext = os.path.splitext(self.item.filename[1:])\n video_filename = os.path.join(self.dataset.__get_local_path__(), self.item.filename[1:])\n if not os.path.isdir(os.path.dirname(video_filename)):\n os.makedirs(os.path.dirname(video_filename))\n if not os.path.isfile(video_filename):\n self.item.download(local_path=os.path.dirname(video_filename), to_items_folder=False)\n self.video_source = video_filename\n self.video_annotations = self.item.annotations.list()", "def __init__(self, dataframe):\n self._dataframe = dataframe \n self._data_grouped_by_manufacturer = self._group_by_manufacturer()\n self._data_agg_by_mean_value = self._agg_by_mean()\n self._formatted_data = self._format_data()", "def setup(self):\r\n ScriptedLoadableModuleWidget.setup(self)\r\n\r\n # Load widget from .ui file (created by Qt Designer).\r\n # Additional widgets can be instantiated manually and added to self.layout.\r\n uiWidget = slicer.util.loadUI(self.resourcePath('UI/RecordHerniaData.ui'))\r\n self.layout.addWidget(uiWidget)\r\n self.ui = slicer.util.childWidgetVariables(uiWidget)\r\n\r\n # Set scene in MRML widgets. Make sure that in Qt designer the top-level qMRMLWidget's\r\n # \"mrmlSceneChanged(vtkMRMLScene*)\" signal in is connected to each MRML widget's.\r\n # \"setMRMLScene(vtkMRMLScene*)\" slot.\r\n uiWidget.setMRMLScene(slicer.mrmlScene)\r\n\r\n # Create logic class. Logic implements all computations that should be possible to run\r\n # in batch mode, without a graphical user interface.\r\n self.logic = TMSRecordDataModuleLogic()\r\n self.recordingStarted = False\r\n self.camerasStarted = False\r\n self.moduleDir = os.path.dirname(slicer.modules.tmsrecorddatamodule.path)\r\n self.logic.setupScene()\r\n\r\n # Buttons\r\n self.ui.StartStopRecordingButton.connect('clicked(bool)', self.onStartStopRecordingClicked)\r\n self.ui.startCamerasButton.connect('clicked(bool)',self.onStartStopCamerasClicked)", "def __init__(self, df):\n self.df = df", "def __init__(self, *args, **kwargs):\n ignore_version = kwargs.pop('ignore_version', False)\n\n super(Hdf5, self).__init__(*args, **kwargs)\n\n # If True, always translate __getitem__ requests according to the\n # schema, even if __getitem__ requests a dataset that exists\n self.always_translate = False\n\n self._version = self.attrs.get('version')\n if isinstance(self._version, bytes):\n self._version = self._version.decode()\n self._timesteps = {}\n\n # Connect the schema map to this object\n if self._version in SCHEMA:\n self.schema = SCHEMA[self._version]\n elif self._version is None:\n self.schema = {}\n elif not ignore_version:\n raise KeyError(\"Unknown schema version %s\" % self._version)\n\n # Connect the schema dataset providers to this object\n if self._version in SCHEMA_DATASET_PROVIDERS:\n self.dataset_providers = SCHEMA_DATASET_PROVIDERS[self._version]\n else:\n self.dataset_providers = {}", "def __init__(self, cfg):\n super(DKInfluxDB, self).__init__(cfg, 'influxdb')", "def setup(self, ds):\n pass", "def __init__(self, platform_name, sensor_key, data_service):\n super().__init__(platform_name, sensor_key, data_service)\n\n self._attributes = {}", "def __init__(self, parent): \n \n self.parent = parent\n \n self.custom_channel_name = _qstring(parent.rhd)\n self.native_channel_name = _qstring(parent.rhd)\n self.native_order = np.int16(struct.unpack('h', parent.rhd.read(2)))[0]\n self.custom_order = np.int16(struct.unpack('h', parent.rhd.read(2)))[0]\n self.signal_type = np.int16(struct.unpack('h', parent.rhd.read(2)))[0]\n self.channel_enabled = np.int16(struct.unpack('h', parent.rhd.read(2)))[0]\n self.chip_channel = np.int16(struct.unpack('h', parent.rhd.read(2)))[0]\n self.board_stream = np.int16(struct.unpack('h', parent.rhd.read(2)))[0]\n self.spike_scope_voltage_trigger_mode= np.int16(struct.unpack('h', parent.rhd.read(2)))[0]\n self.spike_scope_voltage_threshold = np.int16(struct.unpack('h', parent.rhd.read(2)))[0]\n self.spike_scope_digital_trigger_channel = np.int16(struct.unpack('h', parent.rhd.read(2)))[0]\n self.spike_scope_digital_edge_polarity = np.int16(struct.unpack('h', parent.rhd.read(2)))[0]\n self.electrode_impedance_magnitude = np.float32(struct.unpack('f', parent.rhd.read(4)))[0]\n self.electrode_impedance_phase = np.float32(struct.unpack('f', parent.rhd.read(4)))[0]\n\n if self.signal_type == 0 and self.channel_enabled:#Add name to the amplifier channel list\n parent._AMPLIFIER_CHANNELS.append(self.native_channel_name)\n\n if self.signal_type == 1 and self.channel_enabled:#Add name to the aux channel list\n parent._AUX_CHANNELS.append(self.native_channel_name)\n\n if self.signal_type == 2 and self.channel_enabled:#Supply voltage\n parent._SUPPLY_VOLTAGE_CHANNELS.append(self.native_channel_name)\n\n if self.signal_type == 3 and self.channel_enabled:#usb board adc input channel\n parent._ADC_INPUT_CHANNELS.append(self.native_channel_name)\n\n if self.signal_type == 4 and self.channel_enabled:#usb board digital input channel\n parent._DIGITAL_INPUT_CHANNELS.append(self.native_channel_name)", "def port_maker(self, platform):\n raise NotImplementedError()", "def _configure(self):\n Component._configure(self)\n self.dataDim = self.inventory.dataDim\n self.reader = self.inventory.reader\n self.coordsys = self.inventory.coordsys\n return", "def __init__(self, link=None, first_slice_angle=None, floor=None, plot_empty_cells_type=None, auto_scaling=None, style=None, series_axis=None, value_axis=None, show_data_table=None, is3_d=None, chart_area=None, elevation=None, side_wall=None, type=None, title=None, walls=None, back_wall=None, chart_data_table=None, height_percent=None, gap_width=None, legend=None, chart_object=None, is_rectangular_cornered=None, second_category_axis=None, second_value_axis=None, placement=None, name=None, size_with_window=None, right_angle_axes=None, plot_visible_cells=None, show_legend=None, pivot_source=None, depth_percent=None, print_size=None, gap_depth=None, shapes=None, walls_and_gridlines2_d=None, n_series=None, rotation_angle=None, plot_area=None, category_axis=None, perspective=None, hide_pivot_field_buttons=None, page_setup=None, **kw):\n self.container = {}\n\t\t \n \"\"\"\n Chart - a model defined in Swagger\n \"\"\"\n\n self.container['link'] = None\n self.container['first_slice_angle'] = None\n self.container['floor'] = None\n self.container['plot_empty_cells_type'] = None\n self.container['auto_scaling'] = None\n self.container['style'] = None\n self.container['series_axis'] = None\n self.container['value_axis'] = None\n self.container['show_data_table'] = None\n self.container['is3_d'] = None\n self.container['chart_area'] = None\n self.container['elevation'] = None\n self.container['side_wall'] = None\n self.container['type'] = None\n self.container['title'] = None\n self.container['walls'] = None\n self.container['back_wall'] = None\n self.container['chart_data_table'] = None\n self.container['height_percent'] = None\n self.container['gap_width'] = None\n self.container['legend'] = None\n self.container['chart_object'] = None\n self.container['is_rectangular_cornered'] = None\n self.container['second_category_axis'] = None\n self.container['second_value_axis'] = None\n self.container['placement'] = None\n self.container['name'] = None\n self.container['size_with_window'] = None\n self.container['right_angle_axes'] = None\n self.container['plot_visible_cells'] = None\n self.container['show_legend'] = None\n self.container['pivot_source'] = None\n self.container['depth_percent'] = None\n self.container['print_size'] = None\n self.container['gap_depth'] = None\n self.container['shapes'] = None\n self.container['walls_and_gridlines2_d'] = None\n self.container['n_series'] = None\n self.container['rotation_angle'] = None\n self.container['plot_area'] = None\n self.container['category_axis'] = None\n self.container['perspective'] = None\n self.container['hide_pivot_field_buttons'] = None\n self.container['page_setup'] = None\n\n if link is not None:\n self.link = link\n if first_slice_angle is not None:\n self.first_slice_angle = first_slice_angle\n if floor is not None:\n self.floor = floor\n if plot_empty_cells_type is not None:\n self.plot_empty_cells_type = plot_empty_cells_type\n if auto_scaling is not None:\n self.auto_scaling = auto_scaling\n if style is not None:\n self.style = style\n if series_axis is not None:\n self.series_axis = series_axis\n if value_axis is not None:\n self.value_axis = value_axis\n if show_data_table is not None:\n self.show_data_table = show_data_table\n if is3_d is not None:\n self.is3_d = is3_d\n if chart_area is not None:\n self.chart_area = chart_area\n if elevation is not None:\n self.elevation = elevation\n if side_wall is not None:\n self.side_wall = side_wall\n if type is not None:\n self.type = type\n if title is not None:\n self.title = title\n if walls is not None:\n self.walls = walls\n if back_wall is not None:\n self.back_wall = back_wall\n if chart_data_table is not None:\n self.chart_data_table = chart_data_table\n if height_percent is not None:\n self.height_percent = height_percent\n if gap_width is not None:\n self.gap_width = gap_width\n if legend is not None:\n self.legend = legend\n if chart_object is not None:\n self.chart_object = chart_object\n if is_rectangular_cornered is not None:\n self.is_rectangular_cornered = is_rectangular_cornered\n if second_category_axis is not None:\n self.second_category_axis = second_category_axis\n if second_value_axis is not None:\n self.second_value_axis = second_value_axis\n if placement is not None:\n self.placement = placement\n if name is not None:\n self.name = name\n if size_with_window is not None:\n self.size_with_window = size_with_window\n if right_angle_axes is not None:\n self.right_angle_axes = right_angle_axes\n if plot_visible_cells is not None:\n self.plot_visible_cells = plot_visible_cells\n if show_legend is not None:\n self.show_legend = show_legend\n if pivot_source is not None:\n self.pivot_source = pivot_source\n if depth_percent is not None:\n self.depth_percent = depth_percent\n if print_size is not None:\n self.print_size = print_size\n if gap_depth is not None:\n self.gap_depth = gap_depth\n if shapes is not None:\n self.shapes = shapes\n if walls_and_gridlines2_d is not None:\n self.walls_and_gridlines2_d = walls_and_gridlines2_d\n if n_series is not None:\n self.n_series = n_series\n if rotation_angle is not None:\n self.rotation_angle = rotation_angle\n if plot_area is not None:\n self.plot_area = plot_area\n if category_axis is not None:\n self.category_axis = category_axis\n if perspective is not None:\n self.perspective = perspective\n if hide_pivot_field_buttons is not None:\n self.hide_pivot_field_buttons = hide_pivot_field_buttons\n if page_setup is not None:\n self.page_setup = page_setup", "def _setup(self):\n\n from AlGDock.topology import Topology\n self.top = Topology(self.args)\n self.top_RL = Topology(self.args, includeReceptor=True)\n\n # Initialize rmsd calculation function\n from AlGDock.RMSD import hRMSD\n self.get_rmsds = hRMSD(self.args.FNs['prmtop']['L'], \\\n self.top.inv_prmtop_atom_order_L)\n\n # Obtain reference pose\n if self.data['CD'].pose > -1:\n if ('starting_poses' in self.data['CD'].confs.keys()) and \\\n (self.data['CD'].confs['starting_poses'] is not None):\n starting_pose = np.copy(self.data['CD'].confs['starting_poses'][0])\n else:\n (confs, Es) = self._get_confs_to_rescore(site=False, \\\n minimize=False, sort=False)\n if self.args.params['CD']['pose'] < len(confs):\n starting_pose = np.copy(confs[self.args.params['CD']['pose']])\n self.data['CD'].confs['starting_poses'] = [np.copy(starting_pose)]\n else:\n self._clear('CD')\n self._store_infinite_f_RL()\n raise Exception('Pose index greater than number of poses')\n else:\n starting_pose = None\n\n from AlGDock.system import System\n self.system = System(self.args,\n self.log,\n self.top,\n self.top_RL,\n starting_pose=starting_pose)\n\n # Measure the binding site\n if (self.args.params['CD']['site'] == 'Measure'):\n self.args.params['CD']['site'] = 'Sphere'\n if self.args.params['CD']['site_measured'] is not None:\n (self.args.params['CD']['site_max_R'],self.args.params['CD']['site_center']) = \\\n self.args.params['CD']['site_measured']\n else:\n print '\\n*** Measuring the binding site ***'\n self.system.setParams(\n self.system.paramsFromAlpha(1.0, 'CD', site=False))\n (confs, Es) = self._get_confs_to_rescore(site=False, minimize=True)\n if len(confs) > 0:\n # Use the center of mass for configurations\n # within 20 RT of the lowest energy\n cutoffE = Es['total'][-1] + 20 * (R * self.T)\n coms = []\n for (conf, E) in reversed(zip(confs, Es['total'])):\n if E <= cutoffE:\n self.top.universe.setConfiguration(\n Configuration(self.top.universe, conf))\n coms.append(np.array(self.top.universe.centerOfMass()))\n else:\n break\n print ' %d configurations fit in the binding site' % len(coms)\n coms = np.array(coms)\n center = (np.min(coms, 0) + np.max(coms, 0)) / 2\n max_R = max(\n np.ceil(np.max(np.sqrt(np.sum(\n (coms - center)**2, 1))) * 10.) / 10., 0.6)\n self.args.params['CD']['site_max_R'] = max_R\n self.args.params['CD']['site_center'] = center\n self.top.universe.setConfiguration(\n Configuration(self.top.universe, confs[-1]))\n if ((self.args.params['CD']['site_max_R'] is None) or \\\n (self.args.params['CD']['site_center'] is None)):\n raise Exception('No binding site parameters!')\n else:\n self.args.params['CD']['site_measured'] = \\\n (self.args.params['CD']['site_max_R'], \\\n self.args.params['CD']['site_center'])\n\n # Read the reference ligand and receptor coordinates\n import AlGDock.IO\n IO_crd = AlGDock.IO.crd()\n if self.args.FNs['inpcrd']['R'] is not None:\n if os.path.isfile(self.args.FNs['inpcrd']['L']):\n lig_crd = IO_crd.read(self.args.FNs['inpcrd']['L'], multiplier=0.1)\n self.data['CD'].confs['receptor'] = IO_crd.read(\\\n self.args.FNs['inpcrd']['R'], multiplier=0.1)\n elif self.args.FNs['inpcrd']['RL'] is not None:\n complex_crd = IO_crd.read(self.args.FNs['inpcrd']['RL'], multiplier=0.1)\n lig_crd = complex_crd[self.top_RL.L_first_atom:self.top_RL.L_first_atom + \\\n self.top.universe.numberOfAtoms(),:]\n self.data['CD'].confs['receptor'] = np.vstack(\\\n (complex_crd[:self.top_RL.L_first_atom,:],\\\n complex_crd[self.top_RL.L_first_atom + self.top.universe.numberOfAtoms():,:]))\n elif self.args.FNs['inpcrd']['L'] is not None:\n self.data['CD'].confs['receptor'] = None\n if os.path.isfile(self.args.FNs['inpcrd']['L']):\n lig_crd = IO_crd.read(self.args.FNs['inpcrd']['L'], multiplier=0.1)\n else:\n lig_crd = None\n\n if lig_crd is not None:\n self.data['CD'].confs['ligand'] = lig_crd[self.top.\n inv_prmtop_atom_order_L, :]\n self.top.universe.setConfiguration(\\\n Configuration(self.top.universe,self.data['CD'].confs['ligand']))\n if self.top_RL.universe is not None:\n self.top_RL.universe.setConfiguration(\\\n Configuration(self.top_RL.universe, \\\n np.vstack((self.data['CD'].confs['receptor'],self.data['CD'].confs['ligand']))))\n\n if self.args.params['CD']['rmsd'] is not False:\n if self.args.params['CD']['rmsd'] is True:\n if lig_crd is not None:\n rmsd_crd = lig_crd[self.top.inv_prmtop_atom_order_L, :]\n else:\n raise Exception('Reference structure for rmsd calculations unknown')\n else:\n rmsd_crd = IO_crd.read(self.args.params['CD']['rmsd'], \\\n natoms=self.top.universe.numberOfAtoms(), multiplier=0.1)\n rmsd_crd = rmsd_crd[self.top.inv_prmtop_atom_order_L, :]\n self.data['CD'].confs['rmsd'] = rmsd_crd\n\n self.get_rmsds.set_ref_configuration(self.data['CD'].confs['rmsd'])\n\n # If configurations are being rescored, start with a docked structure\n (confs, Es) = self._get_confs_to_rescore(site=False, minimize=False)\n if len(confs) > 0:\n self.top.universe.setConfiguration(\n Configuration(self.top.universe, confs[-1]))\n\n from AlGDock.simulation_iterator import SimulationIterator\n self.iterator = SimulationIterator(self.args, self.top, self.system)\n\n # Load progress\n from AlGDock.postprocessing import Postprocessing\n Postprocessing(self.args, self.log, self.top, self.top_RL, self.system, self.data, self.save).run(readOnly=True)\n\n self.calc_f_L(readOnly=True)\n self.calc_f_RL(readOnly=True)\n\n if self.args.random_seed > 0:\n np.random.seed(self.args.random_seed)", "def __init__(self, df, extras=None, **kwtraits):\n\n super(PandasPlotData, self).__init__(**kwtraits) #Trait initialization \n \n if len(df.shape) > 2:\n raise NotImplementedError('Multidimensional dfs of order 3 or higher \\\n\t are not supported by in PandasPlotData') #Do 1d arrays work?\n \n self.set_df(df)", "def get_frame_data(self):\n # FrameObject is a dictionary of slot names and values.\n frameObject = self.pgdb.sendPgdbFnCall('get-frame-object', self.frameid)\n if not frameObject:\n raise PythonCycError(\"Could not retrieve frame \"+self.frameid+\" from organism (orgid) \"+self.pgdb._orgid)\n else:\n self._gotframe = True\n # Modify slot names to allow Python's syntax (e.g., '_' instead of '-').\n for slot in frameObject:\n self.__dict__[convertLispIdtoPythonId(slot)] = frameObject[slot]\n return self" ]
[ "0.6838673", "0.57695377", "0.5759185", "0.5749999", "0.5472397", "0.54186225", "0.540797", "0.5396732", "0.5386362", "0.5366188", "0.5345183", "0.529332", "0.5250661", "0.5228156", "0.52225274", "0.5218811", "0.52103", "0.52002627", "0.5152777", "0.514994", "0.51410395", "0.51262885", "0.51037025", "0.51020676", "0.50894606", "0.5088446", "0.5085413", "0.5085135", "0.50845426", "0.5074343" ]
0.7256855
0
Return a reference to the hardware sound interface.
def configure_hardware_sound_system(self) -> "HardwareSoundPlatformInterface": raise NotImplementedError
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_mixer_dev(self):\n\t\treturn call_sdk_function('PrlVmDevSound_GetMixerDev', self.handle)", "def get_output_dev(self):\n\t\treturn call_sdk_function('PrlVmDevSound_GetOutputDev', self.handle)", "def getSound(self):\r\n return self._shipsound", "def get_sound(self, ):\r\n return _channeldata[self.chan].sound", "def device(self):\n hw = self.hw()\n if hw: return hw.device()", "def _get_interface(self):\n return self.__interface", "def _get_interface(self):\n return self.__interface", "def _get_interface(self):\n return self.__interface", "def _get_interface(self):\n return self.__interface", "def _get_interface(self):\n return self.__interface", "def _get_interface(self):\n return self.__interface", "def _get_interface(self):\n return self.__interface", "def _get_interface(self):\n return self.__interface", "def _get_interface(self):\n return self.__interface", "def _get_interface(self):\n return self.__interface", "def _get_interface(self):\n return self.__interface", "def _get_interface(self):\n return self.__interface", "def _get_interface(self):\n return self.__interface", "def _get_interface(self):\n return self.__interface", "def _get_interface(self):\n return self.__interface", "def sound(self, where, stream=True):\n cook = cookie()\n S = Sound(cook, self)\n self.call('sound', cook, where, stream and 1 or 0)\n return S", "def get_sound_mixer_dev(self, nIndex):\n\t\treturn handle_to_object(call_sdk_function('PrlSrvCfg_GetSoundMixerDev', self.handle, nIndex))", "def get_sound_dev(self, nIndex):\n\t\treturn handle_to_object(call_sdk_function('PrlVmCfg_GetSoundDev', self.handle, nIndex))", "def interface(self) -> type:\n return self.get_interface()", "def hardware(self):\n return self._hardware", "def _get_bus(self, name: str, interface: str) -> Optional[dbus.Interface]:\n try:\n bus = self.session.get_object(\"org.mpris.MediaPlayer2.\" + name, \"/org/mpris/MediaPlayer2\")\n return dbus.Interface(bus, interface)\n except dbus.exceptions.DBusException:\n return None", "def interface(self):\n return self._interface", "def GetWiredInterface(self):\n return str(self.wired.wired_interface)", "def _get_interface_ref(self):\n return self.__interface_ref", "def _get_interface_ref(self):\n return self.__interface_ref" ]
[ "0.6874726", "0.655885", "0.6412684", "0.63888556", "0.6375075", "0.60455745", "0.60455745", "0.60455745", "0.60455745", "0.60455745", "0.60455745", "0.60455745", "0.60455745", "0.60455745", "0.60455745", "0.60455745", "0.60455745", "0.60455745", "0.60455745", "0.60455745", "0.59725666", "0.59484816", "0.58796483", "0.5839662", "0.5839225", "0.5826897", "0.5800476", "0.5789186", "0.57485104", "0.57485104" ]
0.7013568
0
Return addition config section for segment displays.
def get_segment_display_config_section(cls) -> Optional[str]: return None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_section(self,name):\n if self.__config.has_section(name):\n data={}\n for opt,val in self.__config.items(name):\n data[opt]=val\n return data\n else:\n raise Exception(_('EVOGTK: Section \"%s\" does not exist in this preferences instance') % name)", "def section(self):\n return SECTION_NAME_TO_SECTION[self.section_name]", "def get_switch_config_section(cls):\n return None", "def get_switch_config_section(cls) -> Optional[str]:\n return None", "def get_config_main_sections(self):\n self.sections_in_config = self.config_handle.sections()", "def configure(self, section):", "def get_stepper_config_section(cls) -> Optional[str]:\n return None", "def _config_sections(self):\n data = []\n section_data = []\n for index, line in enumerate(self.running_config):\n if self._nextline_startswith_space(index):\n section_data.append(line)\n else:\n if len(section_data) > 0:\n section_data.append(line)\n data.append(section_data)\n section_data = []\n return data", "def get_section(self, section=None, set_section=False, add_section=False, search_in_default_config=None):\r\n section = self._check_section(section, search_in_default_config=search_in_default_config)\r\n return self._cfg.get_section(section=section, set_section=set_section, add_section=add_section)", "def get_coil_config_section(cls) -> Optional[str]:\n return None", "def add_config(self):\n\n config = {\n 'byte_to_integer': ByteToInteger,\n 'integer_to_byte': IntegerToByte,\n 'integer_to_double_integer': IntegerToDoubleInteger,\n 'integer_to_string': IntegerToString,\n 'double_integer_to_integer': DoubleIntegerToInteger,\n 'double_integer_to_real': DoubleIntegerToReal,\n 'double_integer_to_string': DoubleIntegerToString,\n 'binary_coded_decimal_to_integer': BinaryCodedDecimalToInteger,\n 'integer_to_binary_coded_decimal': IntegerToBinaryCodedDecimal,\n 'round': Round,\n 'truncate': Truncate,\n 'real_to_string': RealToString,\n 'integer_to_ascii': IntegerToASCII,\n 'double_integer_to_ascii': DoubleIntegerToASCII,\n 'real_to_ascii': RealToASCII,\n 'ascii_to_hexadecimal': ASCIIToHexadecimal,\n 'hexadecimal_to_ascii': HexadecimalToASCII,\n 'string_to_integer': StringToInteger,\n 'string_to_double_integer': StringToDoubleInteger,\n 'string_to_real': StringToReal,\n 'decode': Decode,\n 'encode': Encode,\n 'segment': Segment\n }\n\n return config", "def config_section_data():\n config_data = u\"\"\"[fn_sep]\nsep_base_path=/sepm/api/v1\nsep_auth_path=/sepm/api/v1/identity/authenticate\nsep_host=<SEPM server dns name or ip address>\nsep_port=8446\nsep_username=<username>\nsep_password=<password>\nsep_domain=<SEP domain name>\n# Optional settings for access to SEPM via a proxy.\n#http_proxy=http://proxy:80\n#https_proxy=http://proxy:80\n# Limit result sent to Resilient, add full result as an attachment.\nsep_results_limit=200\n# Period of time (seconds) to wait for all endpoints to return a scan result.\nsep_scan_timeout=1800\n\"\"\"\n return config_data", "def _add_section(self, name, last_section=None):\n if last_section is None:\n last_section = self.sections\n last_section[name] = Section()\n return last_section[name]", "def fill_global_display_section():\n section = _SectionData(\"GlobalDisplay\")\n section.props.append((\"DisplayLocators\", int(_property_utils.get_by_type(bpy.types.GlobalSCSProps.display_locators))))\n section.props.append((\"LocatorSize\", _property_utils.get_by_type(bpy.types.GlobalSCSProps.locator_size)))\n section.props.append((\"LocatorEmptySize\", _property_utils.get_by_type(bpy.types.GlobalSCSProps.locator_empty_size)))\n section.props.append((\"DisplayConnections\", int(_property_utils.get_by_type(bpy.types.GlobalSCSProps.display_connections))))\n section.props.append((\"CurveSegments\", _property_utils.get_by_type(bpy.types.GlobalSCSProps.curve_segments)))\n section.props.append((\"DisplayTextInfo\", _property_utils.get_by_type(bpy.types.GlobalSCSProps.display_info)))\n return section", "def validate_segment_display_section(self, segment_display, config) -> dict:\n if self.get_segment_display_config_section():\n spec = self.get_segment_display_config_section() # pylint: disable-msg=assignment-from-none\n config = segment_display.machine.config_validator.validate_config(spec, config, segment_display.name)\n elif config:\n raise AssertionError(\"No platform_config supported but not empty {} for segment display {}\".\n format(config, segment_display.name))\n\n return config", "def add_new_section(self, name, context=...):\n ...", "def config_section_data():\n config_data = u\"\"\"[feeds]\n# comma separated section names. ex. sqlserver_feed,file_feed\nfeed_names=<your feeds>\nreload=true\n# use reload_types to limit the types of objects when reload=true.\n# Ex: incident,task,note,artifact,attachment,<data_table_api_name>\nreload_types=\n# set to true if ElasticSearch errors occur during reload=true\nreload_query_api_method=false\n\n# feed_data is the default message destination that will be listened to\nqueue=feed_data\n\n# set to true if attachment data should be part of payload send to plugins\ninclude_attachment_data=false\n# if necessary, specify the supported workspace (by label, case sensitive) and the list of feeds associated with it\n# ex: 'Default Workspace': ['sqlserver_feed'], 'workspace A': ['kafka_feed', 'resilient_feed']\nworkspaces=\n\"\"\"\n return config_data", "def get_section_config(self, section):\n params = self._parse_params(ConfigStorage(self.config[section]))\n return params", "def get_rec_config(self):\n conf_map = {}\n if len(self.reconstructions.text()) > 0:\n conf_map['reconstructions'] = str(self.reconstructions.text())\n if len(self.device.text()) > 0:\n conf_map['device'] = str(self.device.text()).replace('\\n', '')\n if len(self.alg_seq.text()) > 0:\n conf_map['algorithm_sequence'] = str(self.alg_seq.text()).replace('\\n', '')\n if len(self.beta.text()) > 0:\n conf_map['beta'] = str(self.beta.text())\n if len(self.support_area.text()) > 0:\n conf_map['support_area'] = str(self.support_area.text()).replace('\\n', '')\n if self.cont.isChecked():\n conf_map['cont'] = 'true'\n if len(self.cont_dir_button.text().strip()) > 0:\n conf_map['continue_dir'] = '\"' + str(self.cont_dir_button.text()).strip() + '\"'\n print('cont_dir', conf_map['continue_dir'])\n\n for feat_id in self.features.feature_dir:\n self.features.feature_dir[feat_id].add_config(conf_map)\n\n return conf_map", "def add_section(self, section_name: str) -> None:\n pass", "def add_section(self, section_name: str) -> None:\n pass", "def fill_header_section():\n section = _SectionData(\"Header\")\n section.props.append((\"FormatVersion\", 1))\n section.props.append((\"Source\", get_combined_ver_str()))\n section.props.append((\"Type\", \"Configuration\"))\n section.props.append((\"Note\", \"User settings of SCS Blender Tools\"))\n author = bpy.context.user_preferences.system.author\n if author:\n section.props.append((\"Author\", str(author)))\n section.props.append((\"ConfigStoragePlace\", _property_utils.get_by_type(bpy.types.GlobalSCSProps.config_storage_place)))\n section.props.append((\"DumpLevel\", _property_utils.get_by_type(bpy.types.GlobalSCSProps.dump_level)))\n return section", "def add_segment(self):\n last_seg = c.coords(self.segments[0].instance)\n x = last_seg[2] - SEG_SIZE\n y = last_seg[3] - SEG_SIZE\n self.segments.insert(0, Segment(x, y))", "def add_section(self,name,values):\n if not self.__config.has_section(name):\n self.__config.add_section(name)\n # Add values to this section\n for option,value in values.items():\n self.__config.set(name, option, str(value))\n else:\n raise Exception(_('EVOGTK: Section \"%s\" already exists in this preferences instance') % name)", "def add_sections(self, op):\n if(is_listing(op)):\n self.__sections += op\n else:\n self.__sections += [op]", "def add_section(self):\n section = CodeBuilder(self.indent_level)\n self.code.append(section)\n return section", "def add_section(self):\n section = CodeBuilder(self.indent_level)\n self.code.append(section)\n return section", "def print_config_main_sections(self):\n try:\n print(\"Main Sections in config file : \", self.sections_in_config)\n except:\n print(\"Invalid Config File.\")", "def show_config() -> None:\n with _config_lock:\n config_util.show_config(\n _section_descriptions, cast(Dict[str, ConfigOption], _config_options)\n )", "def get_coil_config_section(cls):\n return None" ]
[ "0.6262563", "0.5879021", "0.58110625", "0.57781315", "0.56835556", "0.56570494", "0.5653879", "0.5543324", "0.55162793", "0.5471141", "0.54562134", "0.5451342", "0.5448683", "0.5439083", "0.5400168", "0.53734565", "0.53500634", "0.53457993", "0.53192", "0.53049344", "0.53049344", "0.52941626", "0.52009976", "0.5190001", "0.51754445", "0.5166659", "0.5166659", "0.51580805", "0.5155097", "0.51529676" ]
0.6996485
0
Validate segment display config for platform.
def validate_segment_display_section(self, segment_display, config) -> dict: if self.get_segment_display_config_section(): spec = self.get_segment_display_config_section() # pylint: disable-msg=assignment-from-none config = segment_display.machine.config_validator.validate_config(spec, config, segment_display.name) elif config: raise AssertionError("No platform_config supported but not empty {} for segment display {}". format(config, segment_display.name)) return config
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def check_display_option(display):\n display_options = get_display_options(verbose=False)\n if display not in display_options:\n err_str = \"The display value (%s) does not correspond to a possible \\\n display value in ENA\" % (display)\n raise ValueError(err_str)", "async def configure_segment_display(self, number: str, display_size: int,\n platform_settings) -> \"SegmentDisplayPlatformInterface\":\n raise NotImplementedError", "def get_segment_display_config_section(cls) -> Optional[str]:\n return None", "def _validate_config(self):\n pass", "def validate_config(self):\n pass", "def validate_config(self):\n pass", "async def configure_segment_display(self, number: str, platform_settings) -> LightSegmentDisplay:\n settings = self.machine.config_validator.validate_config(\"light_segment_displays\", platform_settings)\n return LightSegmentDisplay(number, lights=settings['lights'], segment_type=settings['type'])", "def check_segment(self, segment):\n network_type = segment[api.NETWORK_TYPE]\n return network_type in [constants.TYPE_LOCAL, constants.TYPE_GRE,\n constants.TYPE_VXLAN, constants.TYPE_VLAN]", "def check_segment(self, segment):\n network_type = segment[api.NETWORK_TYPE]\n return network_type in [constants.TYPE_LOCAL, constants.TYPE_GRE,\n constants.TYPE_VXLAN, constants.TYPE_VLAN]", "def _validate_subcloud_config(self,\n context,\n name,\n management_subnet_str,\n management_start_ip_str,\n management_end_ip_str,\n management_gateway_ip_str,\n systemcontroller_gateway_ip_str):\n\n # Validate the name\n if name.isdigit():\n pecan.abort(400, _(\"name must contain alphabetic characters\"))\n\n if name in [consts.DEFAULT_REGION_NAME,\n consts.SYSTEM_CONTROLLER_NAME]:\n pecan.abort(400, _(\"name cannot be %(bad_name1)s or %(bad_name2)s\")\n % {'bad_name1': consts.DEFAULT_REGION_NAME,\n 'bad_name2': consts.SYSTEM_CONTROLLER_NAME})\n\n # Parse/validate the management subnet\n subcloud_subnets = []\n subclouds = db_api.subcloud_get_all(context)\n for subcloud in subclouds:\n subcloud_subnets.append(IPNetwork(subcloud.management_subnet))\n\n MIN_MANAGEMENT_SUBNET_SIZE = 8\n # subtract 3 for network, gateway and broadcast addresses.\n MIN_MANAGEMENT_ADDRESSES = MIN_MANAGEMENT_SUBNET_SIZE - 3\n\n management_subnet = None\n try:\n management_subnet = validate_network_str(\n management_subnet_str,\n minimum_size=MIN_MANAGEMENT_SUBNET_SIZE,\n existing_networks=subcloud_subnets)\n except ValidateFail as e:\n LOG.exception(e)\n pecan.abort(400, _(\"management-subnet invalid: %s\") % e)\n\n # Parse/validate the start/end addresses\n management_start_ip = None\n try:\n management_start_ip = validate_address_str(\n management_start_ip_str, management_subnet)\n except ValidateFail as e:\n LOG.exception(e)\n pecan.abort(400, _(\"management-start-ip invalid: %s\") % e)\n\n management_end_ip = None\n try:\n management_end_ip = validate_address_str(\n management_end_ip_str, management_subnet)\n except ValidateFail as e:\n LOG.exception(e)\n pecan.abort(400, _(\"management-end-ip invalid: %s\") % e)\n\n if not management_start_ip < management_end_ip:\n pecan.abort(\n 400,\n _(\"management-start-ip not less than management-end-ip\"))\n\n if not len(IPRange(management_start_ip, management_end_ip)) >= \\\n MIN_MANAGEMENT_ADDRESSES:\n pecan.abort(\n 400,\n _(\"management address range must contain at least %d \"\n \"addresses\") % MIN_MANAGEMENT_ADDRESSES)\n\n # Parse/validate the gateway\n try:\n validate_address_str(\n management_gateway_ip_str, management_subnet)\n except ValidateFail as e:\n LOG.exception(e)\n pecan.abort(400, _(\"management-gateway-ip invalid: %s\") % e)\n\n # Ensure subcloud management gateway is not within the actual subcloud\n # management subnet address pool for consistency with the\n # systemcontroller gateway restriction below. Address collision\n # is not a concern as the address is added to sysinv.\n subcloud_mgmt_address_start = IPAddress(management_start_ip_str)\n subcloud_mgmt_address_end = IPAddress(management_end_ip_str)\n subcloud_mgmt_gw_ip = IPAddress(management_gateway_ip_str)\n if ((subcloud_mgmt_gw_ip >= subcloud_mgmt_address_start) and\n (subcloud_mgmt_gw_ip <= subcloud_mgmt_address_end)):\n pecan.abort(400, _(\"management-gateway-ip invalid, \"\n \"is within management pool: %(start)s - \"\n \"%(end)s\") %\n {'start': subcloud_mgmt_address_start,\n 'end': subcloud_mgmt_address_end})\n\n # Ensure systemcontroller gateway is in the management subnet\n # for the systemcontroller region.\n management_address_pool = self._get_management_address_pool(context)\n systemcontroller_subnet_str = \"%s/%d\" % (\n management_address_pool.network,\n management_address_pool.prefix)\n systemcontroller_subnet = IPNetwork(systemcontroller_subnet_str)\n try:\n validate_address_str(\n systemcontroller_gateway_ip_str, systemcontroller_subnet)\n except ValidateFail as e:\n LOG.exception(e)\n pecan.abort(400, _(\"systemcontroller-gateway-ip invalid: %s\") % e)\n # Ensure systemcontroller gateway is not within the actual\n # management subnet address pool to prevent address collision.\n mgmt_address_start = IPAddress(management_address_pool.ranges[0][0])\n mgmt_address_end = IPAddress(management_address_pool.ranges[0][1])\n systemcontroller_gw_ip = IPAddress(systemcontroller_gateway_ip_str)\n if ((systemcontroller_gw_ip >= mgmt_address_start) and\n (systemcontroller_gw_ip <= mgmt_address_end)):\n pecan.abort(400, _(\"systemcontroller-gateway-ip invalid, \"\n \"is within management pool: %(start)s - \"\n \"%(end)s\") %\n {'start': mgmt_address_start, 'end': mgmt_address_end})", "def validate(self):\n valid = (\n self.speaker != \"inter_segment_gap\"\n and self.text\n and self.text != \"ignore_time_segment_in_scoring\"\n and self.label in [\"<o,f0,male>\", \"<o,f0,female>\", \"<o,f0,mixed>\"]\n )\n\n try:\n self.start = clean_float(self.start)\n self.stop = clean_float(self.stop)\n valid = valid and float(self.start) < float(self.stop)\n except Exception as exc:\n valid = False\n print(exc)\n\n if not valid:\n LOGGER.error(\n \"\"\"Skipping segment due to validation error.\nPlease note that this invalidates WER calculations based on the entire file.\nSegment: %s\"\"\",\n json.dumps(self.__dict__),\n )\n\n if \"-\" in self.filename:\n self.filename = self.filename.replace(\"-\", \"_\")\n print(\"Please rename audio file to replace hyphens with underscores\")\n\n return valid", "def state_preview_validate(cfg, app, win, events):", "def USBD_ValidateConfigurationDescriptor(self, emu, argv, ctx={}):\n rv = ddk.STATUS_SUCCESS\n ConfigDesc, BufferLength, Level, Offset, Tag = argv\n\n return rv", "def validate_config(self):\n\n # LOCALHOST\n if self.location == 'localhost':\n if 'browserName' not in self.config.keys():\n msg = \"Add the 'browserName' in your local_config: e.g.: 'Firefox', 'Chrome', 'Safari'\" # noqa\n self.runner.critical_log(msg)\n raise BromeBrowserConfigException(msg)\n\n # EC2\n elif self.location == 'ec2':\n self.validate_ec2_browser_config()\n\n # VIRTUALBOX\n elif self.location == 'virtualbox':\n self.validate_virtualbox_config()", "def validateConfigUI(valuesDict, typeId, devId):\n\n errors = indigo.Dict()\n isValid = True\n # The Shelly 1 needs to ensure the user has selected a Broker device, supplied the address, and supplied the message type.\n # If the user has indicated that announcement messages are separate, then they need to supply that message type as well.\n\n # Validate the broker\n brokerId = valuesDict.get('broker-id', None)\n if not brokerId.strip():\n isValid = False\n errors['broker-id'] = u\"You must select the broker to which the Shelly is connected to.\"\n\n # Validate the address\n address = valuesDict.get('address', None)\n if not address.strip():\n isValid = False\n errors['address'] = u\"You must enter the MQTT topic root for the Shelly.\"\n\n # Validate the message type\n messageType = valuesDict.get('message-type', None)\n if not messageType.strip():\n isValid = False\n errors['message-type'] = u\"You must enter the message type that this Shelly will be associated with.\"\n\n # Validate the announcement message type\n hasSameAnnounceMessageType = valuesDict.get('announce-message-type-same-as-message-type', True)\n if not hasSameAnnounceMessageType: # We would expect a supplied message type for announcement messages\n announceMessageType = valuesDict.get('announce-message-type', None)\n if not announceMessageType.strip():\n isValid = False\n errors['announce-message-type'] = u\"You must supply the message type that will be associated with the announce messages.\"\n\n return isValid, valuesDict, errors", "def validate_config(self):\r\n c = self.config\r\n \r\n # Make sure that we have a database_path, and an image_path...\r\n assert 'database_path' in c\r\n assert 'image_path' in c\r\n # We should probably check if these paths exist and make them as well...\r\n \r\n # Set the default values.\r\n graph_draw_frequency = c['graph_draw_frequency']\r\n for period, interval in self.default_config['graph_draw_frequency'].iteritems():\r\n graph_draw_frequency.setdefault(period, interval)\r\n \r\n # A quick check to make sure that our port is an integer.\r\n c['httpd_port'] = int(c['httpd_port'])\r\n \r\n # Make sure that no duplicate IDs exist, and that the template exists as well.\r\n ids = set()\r\n for graph in c['graphs']:\r\n graph.setdefault('config', {})\r\n graph['config'].setdefault('periods', [])\r\n assert graph['id'] not in ids\r\n ids.add(graph['id'])\r\n assert(template_exists(graph['template']))", "def validate_config(self, config: Dict) -> bool:\n raise NotImplementedError", "def supports_display(handler_input):\n # type: (HandlerInput) -> bool\n try:\n if hasattr(\n handler_input.request_envelope.context.system.device.\n supported_interfaces, 'display'):\n return (\n handler_input.request_envelope.context.system.device.\n supported_interfaces.display is not None)\n except:\n return False", "def test_launch_config_sg_valid(self):\n if self.prod_env:\n launch_config_name = 'saints-xctf-server-prod-lc'\n launch_config_sg = 'saints-xctf-prod-server-lc-security-group'\n else:\n launch_config_name = 'saints-xctf-server-dev-lc'\n launch_config_sg = 'saints-xctf-dev-server-lc-security-group'\n\n lcs = self.autoscaling.describe_launch_configurations(\n LaunchConfigurationNames=[launch_config_name],\n MaxRecords=1\n )\n\n launch_config = lcs.get('LaunchConfigurations')[0]\n security_group_id = launch_config.get('SecurityGroups')[0]\n\n security_group = self.ec2_client.describe_security_groups(GroupIds=[security_group_id]).get('SecurityGroups')[0]\n\n self.assertTrue(all([\n security_group.get('GroupName') == launch_config_sg,\n self.validate_launch_config_sg_rules(\n security_group.get('IpPermissions'),\n security_group.get('IpPermissionsEgress')\n )\n ]))", "def validate_project(self):\n\t\tif not os.path.exists(self.segment_path):\n\t\t\terror = \"%s does not exist\" % self.segment_path\n\t\t\traise DLAProjectNotFound,error", "def validate(self):\n\n # Check if motherboard record exists\n motherboard_record_exists = False\n board_info_records = self.groups[constants.RecordType.BASEBOARD_RECORD]\n for handle_id in board_info_records:\n record = self.records[handle_id]\n if 'Type' in record.props and record.props['Type'].val == 'Motherboard':\n motherboard_record_exists = True\n break\n if not motherboard_record_exists:\n self.err_msgs['Motherboard SMBIOS record is missing.'] = (\n 'There should be at least one structure defining the motherboard '\n '(Board Type: 0xA).')\n\n return self.err_msgs", "def validate_switch_section(self, switch: \"Switch\", config: dict) -> dict:\n if self.get_switch_config_section():\n spec = self.get_switch_config_section() # pylint: disable-msg=assignment-from-none\n config = switch.machine.config_validator.validate_config(spec, config, switch.name)\n elif config:\n raise AssertionError(\"No platform_config supported but not empty {} for switch {}\".\n format(config, switch.name))\n\n return config", "def parse_and_validate_num_segs(segment_str):\n # try to parse numSegments\n num_segments = 0\n try:\n num_segments = int(segment_str)\n divs = math.log(num_segments, 2)\n if num_segments < 2:\n raise ValidationError(NUMSEG_ERR_SMALL_VAL)\n elif int(divs) != divs:\n raise ValidationError(NUMSEG_ERR_BAD_POW)\n except ValidationError as err:\n print USAGE_STR.format(sys.argv[0])\n print NUMSEG_ERR.format(err.args[0])\n sys.exit()\n except ValueError:\n print USAGE_STR.format(sys.argv[0])\n print NUMSEG_ERR.format(NUMSEG_ERR_BAD_PARSE)\n sys.exit()\n except BaseException as err:\n print USAGE_STR.format(sys.argv[0])\n print NUMSEG_ERR.format(\"Unexpected error\")\n print \"Error was:\\n\\t\", err\n sys.exit()\n return num_segments", "def check_segment(self, segment, host):\n\n # TODO(ijw): naive - doesn't check host, or configured\n # physnets on the host. Should work out if the binding\n # can't be achieved before accepting it\n\n network_type = segment[api.NETWORK_TYPE]\n if network_type not in self.allowed_network_types:\n LOG.debug(\n 'Network %(network_id)s is %(network_type)s, '\n 'but this driver only supports types '\n '%(allowed_network_types)s. '\n 'The type must be supported if binding is to succeed.',\n {'network_id': segment['id'],\n 'network_type': network_type,\n 'allowed_network_types':\n ', '.join(self.allowed_network_types)}\n )\n return False\n\n if network_type in [plugin_constants.TYPE_FLAT,\n plugin_constants.TYPE_VLAN]:\n physnet = segment[api.PHYSICAL_NETWORK]\n if not self.physnet_known(host, physnet):\n LOG.debug(\n 'Network %(network_id)s is on physical '\n 'network %(physnet)s, but the physical network '\n 'is not one the host %(host)s has attached.',\n {'network_id': segment['id'],\n 'physnet': physnet,\n 'host': host}\n )\n return False\n\n return True", "def seg_known(self, segment, normalize=True):\n if normalize:\n segment = FeatureTable.normalize(segment)\n return segment in self.seg_dict", "def validate_switch_section(self, switch: Switch, config: dict) -> dict:\n base_spec = [\"device\"]\n if self.__class__.get_switch_config_section():\n base_spec.append(self.__class__.get_switch_config_section())\n switch.machine.config_validator.validate_config(\n \"switches\", config, switch.name,\n base_spec=base_spec)\n return config", "def validate_auth(config):\n token = config.get(CONF_ACCESS_TOKEN)\n if config[CONF_DEVICE_CLASS] == \"tv\" and not token:\n raise vol.Invalid(\n f\"When '{CONF_DEVICE_CLASS}' is 'tv' then '{CONF_ACCESS_TOKEN}' is required.\",\n path=[CONF_ACCESS_TOKEN],\n )\n return config", "def validateConfig(self):\n ## (boolean with the result of the validation, eventual error message)\n return (True, '')", "def _check_config(self):", "def _verify_options(config: configuration.Config) -> None:\n\n if not config.config['species']:\n log._logger.error('You must specify a species (-s/--species)')\n exit(1)\n\n if config.config['hpc'] and config.config['local']:\n log._logger.error('You can only use one of the config options (hpc/local)')\n exit(1)\n\n if config.config['hpc'] and config.config['custom']:\n log._logger.error('You can only use one of the config options (hpc/custom)')\n exit(1)\n\n if config.config['local'] and config.config['custom']:\n log._logger.error('You can only use one of the config options (local/custom)')\n exit(1)\n\n if (not config.config['hpc']) and\\\n (not config.config['local']) and\\\n (not config.config['custom']):\n log._logger.error(\n 'You must specify a compute cluster environment (hpc/local/custom)'\n )\n exit(1)\n\n if config.config['custom'] and (not config.config['scheduler']):\n log._logger.error(\n 'The custom compute environment requires a scheduler address to be set'\n )\n exit(1)" ]
[ "0.6280102", "0.60534555", "0.6044687", "0.57998556", "0.577863", "0.577863", "0.571655", "0.55797225", "0.55797225", "0.5537359", "0.53069067", "0.5244744", "0.52330995", "0.5228028", "0.51318383", "0.5114275", "0.5112588", "0.5101397", "0.50893086", "0.50835794", "0.5073127", "0.50569385", "0.5054249", "0.5021388", "0.5015813", "0.49998456", "0.4989727", "0.49770388", "0.4964377", "0.49393496" ]
0.81158113
0
Subclass this method in a platform module to configure a segment display. This method should return a reference to the segment display platform interface method will will receive the text to show.
async def configure_segment_display(self, number: str, display_size: int, platform_settings) -> "SegmentDisplayPlatformInterface": raise NotImplementedError
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def configure_segment_display(self, number: str, platform_settings) -> LightSegmentDisplay:\n settings = self.machine.config_validator.validate_config(\"light_segment_displays\", platform_settings)\n return LightSegmentDisplay(number, lights=settings['lights'], segment_type=settings['type'])", "def _init_display(self):\n raise NotImplementedError", "def get_virtual_display(self):\n pass", "def setupScreenText(self) :\n\t\t# Create object to show avatar's position on the screen.\n\t\t# Update actual text using setText method on object.\n\t\tself.avPos = showText(0.92)\n\n \t\t# Create object to show a list of visible avatars\n \t\tself.showNumVisible = showText(0.85)\n \t\tself.visList = []\n\n\t\t# Create object for displaying keyboard shortcuts\n\t\tself.helpText = showText(0.78)\n\t\tself.helpText.setText(\"h: for help\")", "def get_segment_display_config_section(cls) -> Optional[str]:\n return None", "def __init__(self, machine):\n super().__init__(machine)\n self.features['has_segment_displays'] = True", "def configure(self, manager):\n success, display = manager.c.eval(\"self.core.display_name\")\n assert success\n self.env[\"WAYLAND_DISPLAY\"] = display", "def render(self):\n mode = self.check_mode()\n if mode == MODE_DISPLAY_ALL:\n self.display_all()\n elif mode == MODE_DISPLAY_STATUS:\n self.display_status()\n else:\n self.display_active_sensor()", "def draw(self, screen):\n\n x, y = self.get_abs_x(), self.get_abs_y()\n font_size = self.get_property('font_size')\n min_width, min_height = get_text_size(self.get_text(), font_size=font_size)\n width, height = self.get_properties('width', 'height')\n pad_left, pad_right = self.get_properties('pad_left', 'pad_right')\n pad_top, pad_bottom = self.get_properties('pad_top', 'pad_bottom')\n text = self.get_text()\n\n super().draw(screen)\n font_size = self.get_property('font_size')\n show_text(screen, text, x + width / 2, y + height / 2,\n font_size=font_size)\n\n return self", "def get_spice_console(self, instance):\n raise NotImplementedError()", "def start_displayhook(self):\n pass", "def display_text(self, text):\n self.write_to_serial(':DISP:TEXT \\'' + text + '\\'')", "def update_display(self): #pylint: disable=too-many-branches\n if self.speed_enabled or self.cadence_enabled:\n speed, cadence = self.read_s_and_c()\n\n if self.heart_enabled:\n heart = self.read_heart()\n if not self._setup:\n self._hr_label = self._label_maker('{} bpm'.format(heart), 50, self._heart_y) # 75\n self.splash.append(self._hr_label)\n else:\n self._hr_label.text = '{} bpm'.format(heart)\n\n if self.speed_enabled:\n if not self._setup:\n self._sp_label = self._label_maker('{} mph'.format(speed), 50, self._speed_y) # 120\n self.splash.append(self._sp_label)\n else:\n self._sp_label.text = '{} mph'.format(speed)\n\n if self.cadence_enabled:\n if not self._setup:\n self._cadence_label = self._label_maker('{} rpm'.format(cadence), 50,\n self._cadence_y)\n self.splash.append(self._cadence_label)\n else:\n self._cadence_label.text = '{} rpm'.format(cadence)\n\n if self.ams_enabled:\n ams = self.read_ams()\n if not self._setup:\n self._ams_label = self._label_maker('{}'.format(ams), 50, self._ams_y,\n font=self.arial16)\n self.splash.append(self._ams_label)\n else:\n self._ams_label.text = '{}'.format(ams)\n\n self._setup = True", "def display_text(self, display_text):\n\n self._display_text = display_text", "def show(self):\r\n display(self.grid_part)", "def display_eng(self):\n self.clear_terminal()\n self.menu_eng()\n self.handle_selection_eng()", "def __init__(self, display):\n self.display = display\n self.labels = dict()", "def display(self):\n self.displaycontrol |= self.LCD_DISPLAYON\n self.write_lcd(self.LCD_DATA_E1, self.LCD_DISPLAYCONTROL | self.displaycontrol)\n self.write_lcd(self.LCD_DATA_E2, self.LCD_DISPLAYCONTROL | self.displaycontrol)", "def displaySetup(app, **options):\n\n display = app.display\n\n display.setup(**options)", "def display(self, args):\n from sage.repl.rich_output import get_display_manager\n dm = get_display_manager()\n args = args.strip().split()\n if not args:\n print(dm.preferences)\n return\n arg0 = args[0]\n # deprecated values\n if arg0 == 'simple':\n dm.preferences.text = 'plain'\n elif arg0 == 'typeset':\n dm.preferences.text = 'latex'\n elif arg0 in ['ascii_art', 'unicode_art'] and len(args) > 1:\n try:\n max_width = int(args[1])\n except ValueError:\n max_width = 0\n if max_width <= 0:\n raise ValueError(\n \"max width must be a positive integer\")\n import sage.typeset.character_art as character_art\n character_art.MAX_WIDTH = max_width\n dm.preferences.text = arg0\n # Unset all\n elif arg0 in ['default', 'None']: # un-stringify \"%display None\"\n for option in map(str, dm.preferences.available_options()):\n delattr(dm.preferences, option)\n # Normal argument handling\n elif arg0 in map(str, dm.preferences.available_options()) and len(args) <= 2:\n if len(args) == 1:\n # \"%display text\" => get current value\n print(getattr(dm.preferences, arg0))\n else:\n # \"%display text latex\" => set new value\n assert len(args) == 2\n if args[1] in ['default', 'None']:\n delattr(dm.preferences, arg0)\n else:\n try:\n setattr(dm.preferences, arg0, args[1])\n except ValueError as err:\n print(err) # do not show traceback\n # If all else fails: assume text\n else:\n try:\n dm.preferences.text = arg0\n except ValueError as err:\n print(err) # do not show traceback", "def show(self):\n # Used for testing because there is obviously no way back\n # from VISU_Gen.SetCurrentStudy\n if not self.display:\n return\n\n # Desactivation : Load the med file in the PARAVIS component\n #import smeca_utils.visu_utils as VU\n #log.info(\"Loading Paravis module...\")\n #msg = VU.load_med_file(self.read_fname())\n #log.info(msg)", "def display(self):\n self.command(SSD1306_COLUMNADDR)\n self.command(0) # Column start address. (0 = reset)\n self.command(self.width-1) # Column end address.\n self.command(SSD1306_PAGEADDR)\n self.command(0) # Page start address. (0 = reset)\n self.command(self._pages-1) # Page end address.\n # Write buffer data.\n if self._spi is not None:\n # Set DC high for data.\n self._gpio.set_high(self._dc)\n # Write buffer.\n self._spi.write(self._buffer)\n else:\n for i in range(0, len(self._buffer), 16):\n control = 0x40 # Co = 0, DC = 0\n self._i2c.writeList(control, self._buffer[i:i+16])", "def showDisplay(self, type=\"DEFAULT\"):\n gd = mamba.getDisplayer() # <- trick to ensure the root windows is created and hidden\n if type==\"DEFAULT\":\n # First if there is any display already opened it is showed\n no_display = True\n if self._displayUsr:\n self._displayUsr.show()\n no_display = False\n if self._displayVtk:\n self._displayVtk.show()\n no_display = False\n if self._displayPjt:\n self._displayPjt.show()\n no_display = False\n \n if no_display:\n # If no display is yet open we create one\n # preferentially using user defines display\n # or if not VTK\n if self._displayerUsr:\n self._displayUsr = self._displayerUsr(self.name)\n if self._displayUsr:\n self._displayUsr.connect(list(map(lambda im: im.mbIm, self.seq)), self.name)\n self._displayUsr.updateim()\n else:\n self._displayVtk = self._displayerVtk(self.name)\n if self._displayVtk:\n self._displayVtk.connect(list(map(lambda im: im.mbIm, self.seq)), self.name)\n self._displayVtk.updateim()\n \n elif type==\"USER\":\n if self._displayerUsr:\n if self._displayUsr:\n self._displayUsr.show()\n else:\n self._displayUsr = self._displayerUsr(self.name)\n if self._displayUsr:\n self._displayUsr.connect(list(map(lambda im: im.mbIm, self.seq)), self.name)\n self._displayUsr.updateim()\n \n elif type==\"PROJECTION\":\n if self._displayerPjt:\n if self._displayPjt:\n self._displayPjt.show()\n else:\n self._displayPjt = self._displayerPjt(self.name)\n if self._displayPjt:\n self._displayPjt.connect(list(map(lambda im: im.mbIm, self.seq)), self.name)\n self._displayPjt.updateim()\n \n elif type==\"VTK\":\n if self._displayerVtk:\n if self._displayVtk:\n self._displayVtk.show()\n else:\n self._displayVtk = self._displayerVtk(self.name)\n if self._displayVtk:\n self._displayVtk.connect(list(map(lambda im: im.mbIm, self.seq)), self.name)\n self._displayVtk.updateim()", "def show(self):\n self.set_text(self.read())", "def __init__(self):\r\n self.label = \"Bulk Layout Text Replace\"\r\n self.alias = \" Jake's Toolbox Alias Property True\"\r\n self.description = \"\"\r\n self.canRunInBackground = False", "def display_synth_editor(self, sid):\n pass", "def display( self, value=True ):\n\t\tif value:\n\t\t\tself._displaycontrol |= LCD_DISPLAYON\n\t\telse:\n\t\t\tself._displaycontrol &= (0xFF ^ LCD_DISPLAYON)\n\t\tself.command( LCD_DISPLAYCONTROL | self._displaycontrol )", "def text_plot(self):\n if self.stext is not None:\n # Create text object :\n self.stextmesh = visu.Text(text=self.stext, color=self.stextcolor,\n font_size=self.stextsize, pos=self.xyz,\n bold=True, name='SourcesText')\n\n # Set text texture :\n self.stextmesh.set_gl_state('translucent', depth_test=True)\n\n # Apply a transformation to text elements to not cover sources :\n self.stextmesh.transform = vist.STTransform(\n translate=self.stextshift)\n else:\n self.stextmesh = visu.Text(name='NoneText')", "def display(self, buffer = None):\n raise NotImplementedError", "def update_displays(self):\n for key, value in self.lnp.settings:\n if key in list(self.controls.keys()):\n if isinstance(self.controls[key], Entry):\n self.controls[key].delete(0, END)\n self.controls[key].insert(0, value)\n else:\n self.controls[key][\"text\"] = (\n self.controls[key][\"text\"].split(':')[0] + ': ' +\n value)" ]
[ "0.6924744", "0.6400069", "0.6285432", "0.6190356", "0.6038278", "0.57669413", "0.57462823", "0.5709245", "0.56849116", "0.56409514", "0.5581522", "0.5579373", "0.5565821", "0.5550693", "0.55264884", "0.55241746", "0.545651", "0.542362", "0.53820467", "0.5363494", "0.5354753", "0.5350616", "0.5312604", "0.531226", "0.53120345", "0.53089684", "0.5302518", "0.5302138", "0.5294861", "0.52914625" ]
0.72853565
0
Register display for flash task.
def _handle_software_flash(self, display): self._displays.add(display)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def start_flash(display):\n ck_display[str(display)].configure(relief=tkinter.RAISED, bd=0, highlightbackground='blue', highlightthickness=8)", "def start_displayhook(self):\n pass", "def attach_display(self, display):\n self.extra_displays.append(display)\n self.addDockWidget(Qt.RightDockWidgetArea, display)\n #self.display_attached.emit(display)", "def displaySetup(app, **options):\n\n display = app.display\n\n display.setup(**options)", "def end_flash(display):\n time.sleep(0.3)\n ck_display[str(display)].configure(relief=tkinter.SUNKEN, bd=5, highlightbackground='white', highlightthickness=2)", "def opt_display(self, display):\n key = get_enum_key(display, DISPLAYS)\n if key is not None:\n self.conf[\"display\"] = key\n self.display = DISPLAYS[key]\n print(\"Set display %r\" % key)\n else:\n print(\"Unknown display %r\" % display)", "def do_standalone_display(self):\n stage = clutter.Stage()\n stage.connect('destroy', clutter.main_quit)\n stage.connect('key-press-event', lambda x,y: clutter.main_quit())\n stage.set_fullscreen(True)\n stage.set_color(clutter.color_from_string('black'))\n stage.add(self.group)\n stage.show_all()\n clutter.main()", "def setup(self, callback=False, display=\"lcd\"):\n self.display_medium = display\n self._setup_gpio_in()\n if callback:\n self._add_event_detect()\n self._add_event_callback()", "def showDisplay(self, type=\"DEFAULT\"):\n gd = mamba.getDisplayer() # <- trick to ensure the root windows is created and hidden\n if type==\"DEFAULT\":\n # First if there is any display already opened it is showed\n no_display = True\n if self._displayUsr:\n self._displayUsr.show()\n no_display = False\n if self._displayVtk:\n self._displayVtk.show()\n no_display = False\n if self._displayPjt:\n self._displayPjt.show()\n no_display = False\n \n if no_display:\n # If no display is yet open we create one\n # preferentially using user defines display\n # or if not VTK\n if self._displayerUsr:\n self._displayUsr = self._displayerUsr(self.name)\n if self._displayUsr:\n self._displayUsr.connect(list(map(lambda im: im.mbIm, self.seq)), self.name)\n self._displayUsr.updateim()\n else:\n self._displayVtk = self._displayerVtk(self.name)\n if self._displayVtk:\n self._displayVtk.connect(list(map(lambda im: im.mbIm, self.seq)), self.name)\n self._displayVtk.updateim()\n \n elif type==\"USER\":\n if self._displayerUsr:\n if self._displayUsr:\n self._displayUsr.show()\n else:\n self._displayUsr = self._displayerUsr(self.name)\n if self._displayUsr:\n self._displayUsr.connect(list(map(lambda im: im.mbIm, self.seq)), self.name)\n self._displayUsr.updateim()\n \n elif type==\"PROJECTION\":\n if self._displayerPjt:\n if self._displayPjt:\n self._displayPjt.show()\n else:\n self._displayPjt = self._displayerPjt(self.name)\n if self._displayPjt:\n self._displayPjt.connect(list(map(lambda im: im.mbIm, self.seq)), self.name)\n self._displayPjt.updateim()\n \n elif type==\"VTK\":\n if self._displayerVtk:\n if self._displayVtk:\n self._displayVtk.show()\n else:\n self._displayVtk = self._displayerVtk(self.name)\n if self._displayVtk:\n self._displayVtk.connect(list(map(lambda im: im.mbIm, self.seq)), self.name)\n self._displayVtk.updateim()", "def registrar(self):\r\n self.hide()\r\n self.ventana_registrar = VentanaRegistrar()\r\n self.ventana_registrar.show()", "def open_display (self, *display_args, **kw):\n new_disp = kw.get('cls', Display)((0, 0, 0, 0), *display_args)\n self.displays.append(new_disp)\n return (new_disp, self._arrange_displays())", "def configure(self, manager):\n success, display = manager.c.eval(\"self.core.display_name\")\n assert success\n self.env[\"WAYLAND_DISPLAY\"] = display", "def display(self):\n self.displaycontrol |= self.LCD_DISPLAYON\n self.write_lcd(self.LCD_DATA_E1, self.LCD_DISPLAYCONTROL | self.displaycontrol)\n self.write_lcd(self.LCD_DATA_E2, self.LCD_DISPLAYCONTROL | self.displaycontrol)", "def update_display(self):\r\n\t\tfor message in self._scheduled_messages:\r\n\t\t\tmessage['Delay'] -= 1\r\n\t\t\tif (message['Delay'] == 0):\r\n\t\t\t\tif (message['Parameter'] != None):\r\n\t\t\t\t\tmessage['Message'](message['Parameter'])\r\n\t\t\t\telse:\r\n\t\t\t\t\tmessage['Message']()\r\n\t\t\t\t\tdel self._scheduled_messages[self._scheduled_messages.index(message)]\r\n\r\n\t\tfor callback in self._timer_callbacks:\r\n\t\t\tcallback()\r\n\t\tself._timer = (self._timer + 1) % 256\r\n\t\tif(self._timer == 0):\r\n\t\t\tself._selector._shift_pressed_timer = -12\r\n\t\tself.flash()", "def translate_display(self, translate_display):\n\n self._translate_display = translate_display", "def __init__(self, machine):\n super().__init__(machine)\n self._displays = set()\n self._display_flash_task = None", "def setFlash(self,txt):\n self.flash = txt", "def launch (transparent=False): \n core.registerNew(resonance, str_to_bool(transparent))", "def _flash(self,id,msg,duration=30.0):\n if duration>0:\n pass #gtk.timeout_add(duration,'')\n return self.statusbar.push(id,msg)", "def open_display (self, *display_args, **kw):\n # first display argument is always rect; crop it to fit on the screen\n rect = pygame.Rect(display_args[0]).clip(self.screen.get_rect())\n if any(rect.colliderect(d.rect) for d in self.displays):\n raise ValueError('rect overlaps other displays')\n if rect.w == rect.h == 0:\n raise ValueError('rect outside of screen')\n # create display\n new_disp = kw.get('cls', Display)(rect, *display_args[1:])\n self.displays.append(new_disp)\n return new_disp", "def display_session(self):\n self.user['display_manager'] = {'name': self.user['display']}\n if self.user['display'] is not None:\n\n # Set display manager name\n self.user['display_manager']['name'] = \\\n self.packages['display_manager']['name'][self.user['display']]\n\n # Append display manager packages\n self.user['display_manager']['packages'] = \\\n self.packages['display_manager']['packages'][self.user['display']]\n\n # Append display manager greeter\n if self.user['greeter'] is not None:\n self.user['display_manager']['packages'] += ' {x}'.format(\n x=self.packages['greeter']['packages'][self.user['greeter']])\n\n self.user['display_manager']['session'] = \\\n self.packages['greeter']['session'][self.user['greeter']]", "def tftDisplay(libtft):\n tempMsg = getTemperature()\n print(\"%s\" % tempMsg)\n\n timeMsg = getTime()\n print(\"%s\" % timeMsg)\n\n ipMsg = GetIPAddr()\n print(\"%s\" % ipMsg)\n\n lightMsg = \"UKN\" #getLight()\n print(\"%s\" % lightMsg)\n \n libtft.ClearScreen()\n \n libtft.PutString(0, 0, \"IP \", libtft.YELLOW)\n libtft.PutString(0, 20, ipMsg, libtft.WHITE)\n\n libtft.PutString(0, 40, \"Time\", libtft.YELLOW)\n libtft.PutString(0, 60, timeMsg, libtft.WHITE)\n\n libtft.PutString(0, 80, \"Temp\", libtft.YELLOW)\n libtft.PutString(0, 100, tempMsg, libtft.WHITE)", "def setup_display() -> Surface:\n game_window = pygame.display.set_mode((WINDOW_WIDTH, WINDOW_HEIGHT))\n pygame.display.set_caption(DISPLAY_NAME)\n game_window.fill(BG_COLOR)\n pygame.display.update()\n\n return game_window", "def display(self, s):\n pygame.display.flip()\n time.sleep(s)\n self.screen.fill(self._background)", "def register_lab_node(module_name, class_obj):\n global server_task_graph\n if server_task_graph is None:\n server_task_graph = TaskGraph()\n server_task_graph.start_labwidget()\n server_task_graph.register_node(module_name, class_obj)", "def _init_display(self):\n raise NotImplementedError", "def bs_addHeadsUpDisplay():\n # remove all headsUpDisplay.\n if pm.windows.headsUpDisplay(lh=True):\n for each in pm.windows.headsUpDisplay(lh=True):\n pm.windows.headsUpDisplay(each, rem=True)\n # add new heads up displays.\n pm.windows.headsUpDisplay('sceneNameHUD', l='Scene Name:- ', allowOverlap=True, b=0, s=4, dataFontSize='small',\n command=bspb_sceneName)\n pm.windows.headsUpDisplay('artistNameHUD', l='Artist Name:- ', allowOverlap=True, b=1, s=5, dataFontSize='small',\n command=bspb_artistName)\n pm.windows.headsUpDisplay('dateTimeHUD', l='Date And Time:- ', allowOverlap=True, b=0, s=5, dataFontSize='small',\n command=bspb_dateTime)\n pm.windows.headsUpDisplay('frameCounterHUD', l='Frame Number:- ', allowOverlap=True, b=1, s=9, dataFontSize='small',\n command=bspb_frameCounter)\n pm.windows.headsUpDisplay('focalLengthHUD', l='Focal Length:- ', allowOverlap=True, b=0, s=9, dataFontSize='small',\n command=bspb_focalLength)\n pm.windows.headsUpDisplay('camNameHUD', l='Cam :- ', allowOverlap=True, b=0, s=7, dataFontSize='small',\n command=bspb_getCurrentCam)\n # add colors in heads up display.\n # pm.mel.eval(\"displayColor -dormant headsUpDisplayLabels 19\")\n # pm.mel.eval(\"displayColor -dormant headsUpDisplayValues 14\")\n # add expressions.\n bspb_frameCounterUpdate()\n bspb_focalLengthUpdate()", "def initDisplay(self):\n display = self.getDisplay()\n\n os.environ[\"DISPLAY\"] = display\n os.environ[\"XAUTHORITY\"] = self.xauthFile\n\n if self.verbose:\n for var in ['DISPLAY', 'XAUTHORITY']:\n print \"%s=%s\" % (var, os.environ[var])\n\n return display", "def show(self, display):\n if self.visible == True:\n pg.draw.rect(display, self.bgColor, self.panel)\n\n for element in self.elements:\n element.show(display)", "def _display(port=None, height=None, print_message=False, display_handle=None):\n if height is None:\n height = 600\n\n if port is None:\n infos = manager.get_all()\n if not infos:\n raise ValueError(\"Can't display TensorBoard: no known instances running.\")\n else:\n info = max(manager.get_all(), key=lambda x: x.start_time)\n port = info.port\n else:\n infos = [i for i in manager.get_all() if i.port == port]\n info = (\n max(infos, key=lambda x: x.start_time)\n if infos\n else None\n )\n\n if print_message:\n if info is not None:\n message = (\n \"Selecting TensorBoard with {data_source} \"\n \"(started {delta} ago; port {port}, pid {pid}).\"\n ).format(\n data_source=manager.data_source_from_info(info),\n delta=_time_delta_from_info(info),\n port=info.port,\n pid=info.pid,\n )\n print(message)\n else:\n # The user explicitly provided a port, and we don't have any\n # additional information. There's nothing useful to say.\n pass\n\n fn = {\n _CONTEXT_COLAB: _display_colab,\n _CONTEXT_IPYTHON: _display_ipython,\n _CONTEXT_NONE: _display_cli,\n }[_get_context()]\n return fn(port=port, height=height, display_handle=display_handle)" ]
[ "0.6220367", "0.6065056", "0.56963325", "0.55691284", "0.5387061", "0.5368584", "0.5353149", "0.5286806", "0.52129376", "0.5203391", "0.51823187", "0.5180706", "0.51797324", "0.51759416", "0.51678824", "0.51358366", "0.512952", "0.51293564", "0.5108795", "0.51070505", "0.5089288", "0.50664604", "0.50103056", "0.4997893", "0.49936464", "0.49865696", "0.49607697", "0.49602893", "0.4959889", "0.4951221" ]
0.6626366
0
Initialise I2C platform and set feature.
def __init__(self, machine): super().__init__(machine) self.features['has_i2c'] = True
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(self, machine):\n super().__init__(machine)\n\n # Set default platform features. Each platform interface can change\n # these to notify the framework of the specific features it supports.\n self.features['has_drivers'] = True\n self.features['max_pulse'] = 255", "def __init__(self, machine):\n super().__init__(machine)\n\n # Set default platform features. Each platform interface can change\n # these to notify the framework of the specific features it supports.\n self.features['has_drivers'] = True\n self.features['max_pulse'] = 255", "def __init__(self, machine):\n self.machine = machine # type: MachineController\n self.features = {}\n super().__init__()\n self.debug = False\n\n # Set default platform features. Each platform interface can change\n # these to notify the framework of the specific features it supports.\n self.features['has_dmds'] = False\n self.features['has_rgb_dmds'] = False\n self.features['has_accelerometers'] = False\n self.features['has_i2c'] = False\n self.features['has_servos'] = False\n self.features['has_lights'] = False\n self.features['has_switches'] = False\n self.features['has_drivers'] = False\n self.features['tickless'] = False\n self.features['has_segment_displays'] = False\n self.features['has_hardware_sound_systems'] = False\n self.features['has_steppers'] = False\n self.features['allow_empty_numbers'] = False\n self.features['hardware_eos_repulse'] = False", "def __init__(self, machine):\n self.machine = machine\n self.features = {}\n self.log = None\n self.debug = False\n\n # Set default platform features. Each platform interface can change\n # these to notify the framework of the specific features it supports.\n self.features['has_dmd'] = False\n self.features['has_rgb_dmd'] = False\n self.features['has_accelerometers'] = False\n self.features['has_i2c'] = False\n self.features['has_servos'] = False\n self.features['has_matrix_lights'] = False\n self.features['has_gis'] = False\n self.features['has_leds'] = False\n self.features['has_switches'] = False\n self.features['has_drivers'] = False\n self.features['tickless'] = False", "def _init_hardware(self):\n return", "def _initialize_hardware(self):\n # Import\n try:\n import board\n import busio\n import adafruit_vl6180x\n except Exception as ex:\n logging.error(\n '\\n *** ERROR importing Adafruit libraries: {}'.format(\n ex,\n ),\n )\n\n # Things failed, so we must be running locally, not on a widget;\n # don't bother hooking up the VL6180X\n return\n\n # Initialize I2C and VL6180X\n try:\n i2c = busio.I2C(board.SCL, board.SDA)\n self._sensor = adafruit_vl6180x.VL6180X(i2c)\n except Exception as ex:\n logging.error(\n '\\n *** ERROR initializing I2C/LSM303: {}'.format(ex),\n )\n\n self._initialize_id_led()", "def __init__(self, i2c, address=_SGP30_DEFAULT_I2C_ADDR):\n self._i2c = i2c\n self._addr = address\n self.serial = self._i2c_read_words_from_cmd(command=[0x36, 0x82], reply_size=3, delay=0.01)\n featureset = self._i2c_read_words_from_cmd([0x20, 0x2f], 1, 0.01)\n if featureset[0] != _SGP30_FEATURESET:\n raise RuntimeError('SGP30 Not detected')\n self.initialise_indoor_air_quality()", "def __init__(self, i2c: I2C, address: int = _SGP30_DEFAULT_I2C_ADDR) -> None:\n self._device = I2CDevice(i2c, address)\n\n # get unique serial, its 48 bits so we store in an array\n self.serial = self._i2c_read_words_from_cmd([0x36, 0x82], 0.01, 3)\n # get featureset\n featureset = self._i2c_read_words_from_cmd([0x20, 0x2F], 0.01, 1)\n if featureset[0] not in _SGP30_FEATURESETS:\n raise RuntimeError(\"SGP30 Not detected\")\n self.iaq_init()", "def __init__(self):\n i2c.Pn532_i2c.__init__(self)\n self._uid = False", "def configure(self):\n\n self.platform.configure()", "def __init__(self):\n self.hw = dev_hwinfo.device()\n self.ethKey=\"Ethernet\"\n self.ethAllInterfaceName=[]\n dir_path = os.path.dirname(os.path.realpath(__file__))\n self.myDefine = init_define.main()\n self.mPlatform=self.hw.getPlatform()", "def __init__(self, machine):\n super().__init__(machine)\n self.features['has_hardware_sound_systems'] = True", "def Initialise(self):\n self.__m_Platform.Initialise()\n self.__m_Pump.Initialise( False )", "def platform_start(self):\n self.platform.start()", "def _setup_io_devices(self) -> None:\n # Add PCI\n self.platform.pci_host.pio = self.iobus.mem_side_ports\n\n # Add Ethernet card\n self.ethernet = IGbE_e1000(\n pci_bus=0, pci_dev=0, pci_func=0, InterruptLine=1, InterruptPin=1\n )\n\n self.ethernet.host = self.platform.pci_host\n self.ethernet.pio = self.iobus.mem_side_ports\n self.ethernet.dma = self.iobus.cpu_side_ports\n\n if self.get_cache_hierarchy().is_ruby():\n for device in self._off_chip_devices + self._on_chip_devices:\n device.pio = self.iobus.mem_side_ports\n\n else:\n for device in self._off_chip_devices:\n device.pio = self.iobus.mem_side_ports\n for device in self._on_chip_devices:\n device.pio = self.get_cache_hierarchy().get_mem_side_port()\n\n self.bridge = Bridge(delay=\"10ns\")\n self.bridge.mem_side_port = self.iobus.cpu_side_ports\n self.bridge.cpu_side_port = (\n self.get_cache_hierarchy().get_mem_side_port()\n )\n self.bridge.ranges = [\n AddrRange(dev.pio_addr, size=dev.pio_size)\n for dev in self._off_chip_devices\n ]\n\n # PCI\n self.bridge.ranges.append(AddrRange(0x2F000000, size=\"16MB\"))\n self.bridge.ranges.append(AddrRange(0x30000000, size=\"256MB\"))\n self.bridge.ranges.append(AddrRange(0x40000000, size=\"512MB\"))", "def setUp(self):\n self.platform = wirelesstagpy.WirelessTags(username=USERNAME, password=PASSWORD)\n self.tag_outdoor = wirelesstagpy.SensorTag(MOCK.OUTDOOR_PROBE, self.platform)\n self.platform._tags[\"fake-1\"] = self.tag_outdoor # pylint: disable=protected-access", "def _initialize_hardware(self):\n # Import\n try:\n from gpiozero import MCP3008\n except Exception as ex:\n logging.error('\\n *** ERROR importing gpiozero: {}'.format(ex))\n\n # Things failed, must be running locally, not on a widget, so don't\n # bother initializing the MCP3008\n return\n\n # Initialize the MCP3008\n try:\n self._sensor = MCP3008(channel=0)\n except Exception as ex:\n logging.error('\\n *** ERROR initializing MCP3008: {}'.format(ex))\n return\n\n # Start force loop thread\n threading.Thread(target=self._force_loop, daemon=True).start()", "def __init__(self):\n\n super().__init__()\n\n self.active = True\n self.driver = Driver.instance()\n self.sensor_manager = SensorManager.instance()\n\n self.pwm = Adafruit_PCA9685.PCA9685(address=0x40, busnum=1) # create PCA9685-object at I2C-port\n self.pwm.set_pwm_freq(50)\n\n GPIO.setwarnings(False)\n GPIO.setmode(GPIO.BCM)\n GPIO.setup(20, GPIO.OUT)\n GPIO.setup(21, GPIO.OUT)\n GPIO.setup(26, GPIO.OUT)\n self.driven_distance = 0", "def __init__(self):\n try: \n self.i2c = busio.I2C(board.SCL, board.SDA)\n self.mpu = adafruit_mpu6050.MPU6050(self.i2c)\n \n except: \n print(\"No IMU connection\")", "def init(self):\n self.reset()\n\n self.__interface.send_command('POWER_SETTING')\n self.__interface.send_data(0x37)\n self.__interface.send_data(0x00)\n\n self.__interface.send_command('PANEL_SETTING')\n self.__interface.send_data(0xCF)\n self.__interface.send_data(0x08)\n\n self.__interface.send_command('BOOSTER_SOFT_START')\n self.__interface.send_data(0xc7)\n self.__interface.send_data(0xcc)\n self.__interface.send_data(0x28)\n\n self.__interface.send_command('POWER_ON')\n self.wait_until_idle()\n\n self.__interface.send_command('PLL_CONTROL')\n self.__interface.send_data(0x3c)\n\n self.__interface.send_command('TEMPERATURE_CALIBRATION')\n self.__interface.send_data(0x00)\n\n self.__interface.send_command('VCOM_AND_DATA_INTERVAL_SETTING')\n self.__interface.send_data(0x77)\n\n self.__interface.send_command('TCON_SETTING')\n self.__interface.send_data(0x22)\n\n self.__interface.send_command('TCON_RESOLUTION')\n self.__interface.send_data(0x02) #source 640\n self.__interface.send_data(0x80)\n self.__interface.send_data(0x01) #gate 384\n self.__interface.send_data(0x80)\n\n self.__interface.send_command('VCM_DC_SETTING')\n self.__interface.send_data(0x1E) #decide by LUT file\n\n self.__interface.send_command(0xe5, False) #FLASH MODE\n self.__interface.send_data(0x03)", "def _setup(self) -> None:\n self._api = get_api(\n self._password,\n self._host,\n self._username,\n self._port,\n self._ssl,\n )\n\n self._info = self._api.get_info()\n self.device_name = self._info.get(\"DeviceName\", DEFAULT_NAME)\n self.model = self._info.get(\"ModelName\")\n self.firmware_version = self._info.get(\"Firmwareversion\")\n\n for model in MODELS_V2:\n if self.model.startswith(model):\n self._method_version = 2", "def open(self):\n self._i2c.open(bus=self._i2c_bus)\n self._configure_i2c_library_functions()\n if self.debug:\n print('VL53L1X: Opened I2C bus {}'.format(self._i2c_bus))", "def use_i2c():\n _LIB.oled_click_use_i2c()", "def initialize(self):\n raise DeviceException(DeviceException.FEATURE_NOT_IMPLEMENTED)", "def doInitializeDevice(self):\n super().doInitializeDevice()", "async def init_provider(self):\n self.dsp_name = \"OpenStack\"\n await self._provider.init(image_names=self.config[\"images\"].values())", "def __init__(self, address=0x68, **kwargs):\n I2CDevice.__init__(self, address, **kwargs)\n logger.info(\"Created new si5324 instance with address 0x{:02X}.\".format(address))\n self.iCAL_required = True # An iCAL is required at least once before run", "def __init__(self):\n GPIO.setwarnings(False)\n GPIO.cleanup() # Reset the high and low levels of the GPIO port\n #The following code defines the GPIO used to control the L298N chip. This definition is different for different Raspberry Pi driver boards.\n self.Motor_A_EN = 17\n self.Motor_B_EN = 4\n self.Motor_A_Pin1 = 27\n self.Motor_A_Pin2 = 18\n self.Motor_B_Pin1 = 21\n self.Motor_B_Pin2 = 26\n self.setup()", "def _init_io(self):\n GPIO.setwarnings(False)\n GPIO.setmode( GPIO.BCM )\n pins = [ self._spi_dc ]\n for pin in pins:\n GPIO.setup( pin, GPIO.OUT )", "def test_setup_platform(self, store_mock):\n config = {\n ip.DOMAIN: {\n \"platform\": \"microsoft_face_identify\",\n \"source\": {\"entity_id\": \"camera.demo_camera\"},\n \"group\": \"Test Group1\",\n },\n \"camera\": {\"platform\": \"demo\"},\n mf.DOMAIN: {\"api_key\": \"12345678abcdef6\"},\n }\n\n with assert_setup_component(1, ip.DOMAIN):\n setup_component(self.hass, ip.DOMAIN, config)\n self.hass.block_till_done()\n\n assert self.hass.states.get(\"image_processing.microsoftface_demo_camera\")" ]
[ "0.69220597", "0.69220597", "0.6695689", "0.66690606", "0.6616049", "0.647331", "0.6393633", "0.63410026", "0.6271916", "0.6267543", "0.61609125", "0.61256635", "0.60582995", "0.6043946", "0.60244364", "0.59609616", "0.59184724", "0.5909054", "0.5897445", "0.5890526", "0.5881234", "0.58695465", "0.58637387", "0.5825859", "0.582018", "0.58066183", "0.5801807", "0.5755231", "0.5738275", "0.57247293" ]
0.77334565
1
Configure a servo device in platform.
async def configure_servo(self, number: str) -> "ServoPlatformInterface": raise NotImplementedError
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def configure_servo(self, board):\n self.servo = board.get_pin(f\"d:{self.pin}:p\")\n board.servo_config(\n pin = self.pin,\n min_pulse = 544,\n max_pulse = 2400,\n angle = 93\n )", "def configure_servo(self, config):\n raise NotImplementedError", "def servo_on(self):\n self.logger.info('Setting servo ON')\n self.electronics.move_servo(1)\n self.config['servo']['status'] = 1", "def set_param_motor():\n servo.setSpeed(0, 0) # max = 255\n servo.setAccel(0, 0)\n servo.setSpeed(1, 150) # max = 255\n servo.setAccel(1, 150)", "def SelectServo(self, servo):\n if servo == 'none':\n self._servo_port = None\n elif servo == 'any':\n self._servo_port = 0\n else:\n self._servo_port = int(servo)\n self._out.Notice('Servo port %s' % str(self._servo_port))", "def set_servo(name,servo,value):\n name = _lookup(name)\n servo_data = list(name) + [-1,-1,-1,-1]\n servo_data[servo + 1] = value\n mc.set('servo_values',servo_data)", "def servo_set_target(ch, pulse):\n\n # Pulse number is 4x pulse width (in microseconds)\n p_num = 4 * int(pulse)\n\n # Send command to servo controller\n servo_send_cmd(cmd_set_target, ch, p_num)", "def setservo(pidevice, axes, states=None, toignore=None, **kwargs):\n if not isdeviceavailable([GCS2Commands, GCS21Commands], pidevice):\n raise TypeError('Type %s of pidevice is not supported!' % type(pidevice).__name__)\n\n if not pidevice.HasSVO():\n return False\n if not axes:\n return True\n axes, states = getitemsvaluestuple(axes, states)\n if pidevice.HasRNP():\n axestorelax = [axis for axis, state in list(getservo(pidevice, axes).items()) if not state]\n if axestorelax:\n pidevice.RNP(axestorelax, [0.0] * len(axestorelax))\n waitonready(pidevice, **kwargs)\n eaxaxes = [axes[i] for i in range(len(axes)) if states[i]]\n enableaxes(pidevice, axes=eaxaxes, **kwargs)\n success = True\n toignore = [] if toignore is None else toignore\n toignore = [toignore] if not isinstance(toignore, list) else toignore\n toignore += [gcserror.E5_PI_CNTR_MOVE_WITHOUT_REF_OR_NO_SERVO, gcserror.E23_PI_CNTR_ILLEGAL_AXIS]\n for i, axis in enumerate(axes):\n try:\n pidevice.SVO(axis, states[i])\n except GCSError as exc: # no GCSRaise() because we want to log a warning\n if exc in toignore:\n debug('could not set servo for axis %r to %s: %s', axis, states[i], exc)\n success = False\n else:\n raise\n waitonready(pidevice, **kwargs)\n return success", "def servo_config(self, pin, min_pulse=544, max_pulse=2400, angle=0):\n if pin > len(self.digital) or self.digital[pin].mode == UNAVAILABLE:\n raise IOError(\"Pin %s is not a valid servo pin\")\n data = itertools.chain([pin], to_two_bytes(min_pulse),\n to_two_bytes(max_pulse))\n self.send_sysex(SERVO_CONFIG, data)\n \n # set pin._mode to SERVO so that it sends analog messages\n # don't set pin.mode as that calls this method\n self.digital[pin]._mode = SERVO\n self.digital[pin].write(angle)", "def configure(self):\n\n self.platform.configure()", "async def servo_config(self, pin, min_pulse=544, max_pulse=2400):\n #command = [pin, min_pulse & 0x7f, (min_pulse >> 7) & 0x7f, max_pulse & 0x7f,\n # (max_pulse >> 7) & 0x7f]\n\n self._digital_pins_directly[pin].ConfigServo(min_pulse, max_pulse)\n #await self._send_sysex(PrivateConstants.SERVO_CONFIG, command)", "def setup_platform(hass, config, add_devices_callback, discovery_info=None):\n host = config.get(CONF_HOST)\n name = config.get(CONF_NAME)\n token = config.get('token')\n\n add_devices_callback([MiroboSwitch(name, host, token)])", "def servo_make_default(self):\n self.servo_config.save_as_default_config()", "def _DutControl(self, args):\n if self._servo_port is None:\n raise IOError('No servo access available, please use --servo')\n if self._servo_port:\n args.extend(['-p', '%s' % self._servo_port])\n return self._tools.Run('dut-control', args)", "def resetservo(self):\n debug('ControllerStartup.resetservo()')\n if self.servostates is not None:\n setservo(self.pidevice, self.servostates)\n elif self._databuf['servobuf']:\n setservo(self.pidevice, self._databuf['servobuf'])", "def servo_force(self, *args, **kwargs) -> Any:\n pass", "def init_servos():\n for i in range(0, 7):\n kit.servo[i].actuation_range = 180\n kit.servo[i].set_pulse_width_range(450, 2550)", "def set_servo(self, servo: int, position: Optional[ServoPosition]) -> None:\n if servo < 0 or servo >= self._num_servos:\n raise RuntimeError(\"That servo does not exist.\")", "def configure_switch(self, number: str, config: SwitchConfig, platform_config: dict) -> \"SwitchPlatformInterface\":\n raise NotImplementedError", "def setup_platform(hass, config, add_devices, discovery_info=None):\n name = config.get(CONF_NAME)\n mac = config.get(CONF_MAC)\n pin = config.get(CONF_PIN)\n\n add_devices([ProgtimeSwitch(mac, pin, name)])", "async def servo_config(self, pin, min_pulse=544, max_pulse=2400):\n command = [pin, min_pulse & 0x7f, (min_pulse >> 7) & 0x7f, max_pulse & 0x7f,\n (max_pulse >> 7) & 0x7f]\n\n await self._send_sysex(PrivateConstants.SERVO_CONFIG, command)", "async def configure_stepper(self, number: str, config: dict) -> \"StepperPlatformInterface\":\n raise NotImplementedError", "def app_principal_led():\n \"\"\"\n import serial\n ser = serial.Serial(0) # open first serial port\n print ser.portstr # check which port was really used\n ser.write(\"hello\") # write a string\n ser.close() # close port \n \"\"\"\n\n\n start = mpa.ModuloPyArduino()\n p, v = start.config_arduino()\n con = start.set_conection(p, v)\n\n\n print \"\\n Status of conection: \", con\n if con != 0:\n start.serial_loop_app(con, 1)\n else:\n pass\n\n con.close()", "def setup(self):\n GPIO.setwarnings(False)\n GPIO.setmode(GPIO.BCM)\n GPIO.setup(self.Motor_A_EN, GPIO.OUT)\n GPIO.setup(self.Motor_B_EN, GPIO.OUT)\n GPIO.setup(self.Motor_A_Pin1, GPIO.OUT)\n GPIO.setup(self.Motor_A_Pin2, GPIO.OUT)\n GPIO.setup(self.Motor_B_Pin1, GPIO.OUT)\n GPIO.setup(self.Motor_B_Pin2, GPIO.OUT)\n self.motorStop() # Avoids automatic motor rotation after initialization\n try: # Try is used here to avoid errors due to repeated setting of PWM\n self.pwm_A = GPIO.PWM(self.Motor_A_EN, 1000)\n self.pwm_B = GPIO.PWM(self.Motor_B_EN, 1000)\n except:\n pass", "def setup_motor(self,pin_num):\n pi.set_servo_pulsewidth(pin_num, 2000)\n sleep(2)\n pi.set_servo_pulsewidth(pin_num, 500 )\n sleep(2)", "def __init__(self, address: int = PCA9685_ADDRESS, i2c = None, \n frequency: int = 26500000, resolution: int = 4096,\n servo_frequency: int = 50, **kwargs):\n i2c = ensureI2C(i2c)\n self._servos = {}\n self._servo_frequency = servo_frequency\n self._frequency = frequency\n self._resolution = resolution\n self._address = address\n self._device = i2c.get_i2c_device(address, **kwargs)\n\n self.set_all_pwm(0, 0)\n self._device.write8(MODE2, OUTDRV)\n self._device.write8(MODE1, ALLCALL)\n\n time.sleep(0.005) # wait for oscillator\n mode = self._device.readU8(MODE1)\n mode = mode & ~SLEEP # wake up (reset sleep)\n self._device.write8(MODE1, mode)\n time.sleep(0.005) # wait for oscillator\n self.set_pwm_freq(self._servo_frequency)\n logger.info(\"Registered controller on address %d\" % address)", "def configure(self):\n\n # instantiate Serial\n self.serial = serial.Serial()\n\n # set port_path, e.g. '/dev/ttyUSBx' or 'COMx'\n self.serial.port = self.port.device\n\n # set baudrate\n self.serial.baudrate = 115200", "def _on_config_changed(self, _):\n self._configure_pod()", "def pibooth_configure(cfg):", "def _connect_to_hardware(self):\n if False: # !!!TEMP:need to validate config...\n if len(self.config['ports']) > 1:\n self.log.fatal(\"only one slave com port is supported\")\n if len(self.config['ports']) == 0:\n self.log.warning(\"no communication port setted!\")\n return\n port = self.config['ports'][0]\n self.communicator = RaspSerialCommunicator(\n platform=self, port=port,\n baud=self.config['baud'])\n self.communicator = RaspSerialCommunicator(\n platform=self, port='/dev/ttyAMA0',\n baud=115200)" ]
[ "0.76254755", "0.7315433", "0.66170114", "0.64923394", "0.63826114", "0.6192792", "0.6184836", "0.6113773", "0.61004287", "0.6094591", "0.6075602", "0.6039231", "0.5992154", "0.5920275", "0.5867684", "0.58557546", "0.585078", "0.58460164", "0.58157", "0.58004034", "0.57591856", "0.5736523", "0.5727847", "0.57108986", "0.5700336", "0.5689581", "0.5677986", "0.5653145", "0.5632431", "0.55897665" ]
0.75352657
1
Return config section for additional stepper config items.
def get_stepper_config_section(cls) -> Optional[str]: return None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_step_conf(self):\n return self.step_conf", "def get_section(self,name):\n if self.__config.has_section(name):\n data={}\n for opt,val in self.__config.items(name):\n data[opt]=val\n return data\n else:\n raise Exception(_('EVOGTK: Section \"%s\" does not exist in this preferences instance') % name)", "def validate_stepper_section(self, stepper: \"Stepper\", config: dict) -> dict:\n if self.get_stepper_config_section():\n spec = self.get_stepper_config_section() # pylint: disable-msg=assignment-from-none\n config = stepper.machine.config_validator.validate_config(spec, config, stepper.name)\n elif config:\n raise AssertionError(\"No platform_config supported but not empty {} for stepper {}\".\n format(config, stepper.name))\n\n return config", "def get_config_main_sections(self):\n self.sections_in_config = self.config_handle.sections()", "def _config_sections(self):\n data = []\n section_data = []\n for index, line in enumerate(self.running_config):\n if self._nextline_startswith_space(index):\n section_data.append(line)\n else:\n if len(section_data) > 0:\n section_data.append(line)\n data.append(section_data)\n section_data = []\n return data", "def to_config(self):\n steps = OrderedDict()\n for i, pset in enumerate(self.current):\n key_val = OrderedDict()\n for key in pset:\n if pset[key]['hidden']:\n continue\n key_val[key] = pset.get_value(key)\n steps[\"{}: {}\".format(i + 1, self.stepnames[i])] = key_val\n return configobj.ConfigObj(steps)", "def get_switch_config_section(cls):\n return None", "def configure(self, section):", "def get_switch_config_section(cls) -> Optional[str]:\n return None", "def get_config(self):\n return {'reduction': self.reduction, 'name': self.name}", "def config(self):\n return \"\\n\".join([ c.config(True) for p, c in self.configs_ ])", "def get_config_on_json(self):\n # load section CONFIG from data\n try:\n return self.json_data[\"CONFIG\"]\n except:\n constant.get_error(constant.ERROR_004)", "def get_coil_config_section(cls) -> Optional[str]:\n return None", "def get_config(self):\n\n return {section: self.sections[section].get_values() for section in self.sections}", "def _opt_config(self):\n return self._opt_method.config", "def section(self):\n return SECTION_NAME_TO_SECTION[self.section_name]", "def get_rec_config(self):\n conf_map = {}\n if len(self.reconstructions.text()) > 0:\n conf_map['reconstructions'] = str(self.reconstructions.text())\n if len(self.device.text()) > 0:\n conf_map['device'] = str(self.device.text()).replace('\\n', '')\n if len(self.alg_seq.text()) > 0:\n conf_map['algorithm_sequence'] = str(self.alg_seq.text()).replace('\\n', '')\n if len(self.beta.text()) > 0:\n conf_map['beta'] = str(self.beta.text())\n if len(self.support_area.text()) > 0:\n conf_map['support_area'] = str(self.support_area.text()).replace('\\n', '')\n if self.cont.isChecked():\n conf_map['cont'] = 'true'\n if len(self.cont_dir_button.text().strip()) > 0:\n conf_map['continue_dir'] = '\"' + str(self.cont_dir_button.text()).strip() + '\"'\n print('cont_dir', conf_map['continue_dir'])\n\n for feat_id in self.features.feature_dir:\n self.features.feature_dir[feat_id].add_config(conf_map)\n\n return conf_map", "def getSection(self, section, item):\n if self.config.has_section(section):\n if self.config.has_option(section, item):\n return self.config.get(section, item)\n return None", "def items(self):\n\t\treturn self.config_parser.items(self.section_name)", "def get_next_config(self):\n\n self.reset_trial()\n self._cur_config = self.get_default()\n return self._cur_config if len(self._results) == 0 else None", "def get_next_configuration():\n iteration_config = self.__iterables[self.__iterables_counter]\n\n for static_option in self.__static:\n if static_option not in self._config_dict:\n raise ConfigReaderError(\"Simulation configuration option '{0}' not found. Please check the SimulationConfigReader class for typos.\".format(static_option))\n\n iteration_config[static_option] = self._config_dict[static_option]\n\n return iteration_config", "def config_section_data():\n config_data = u\"\"\"[fn_sep]\nsep_base_path=/sepm/api/v1\nsep_auth_path=/sepm/api/v1/identity/authenticate\nsep_host=<SEPM server dns name or ip address>\nsep_port=8446\nsep_username=<username>\nsep_password=<password>\nsep_domain=<SEP domain name>\n# Optional settings for access to SEPM via a proxy.\n#http_proxy=http://proxy:80\n#https_proxy=http://proxy:80\n# Limit result sent to Resilient, add full result as an attachment.\nsep_results_limit=200\n# Period of time (seconds) to wait for all endpoints to return a scan result.\nsep_scan_timeout=1800\n\"\"\"\n return config_data", "def get_config(self):\n return {\"name\": self.name, \"tunable\": self.tunable}", "def get_config(self):\n return super().get_config()", "def get_config(self):\n config = {\n 'multichannel': self._multichannel,\n 'complex_part': self._complex_part\n }\n base_config = super().get_config()\n return {**base_config, **config}", "def get_coil_config_section(cls):\n return None", "def get_config_descr(self, name):\n return self.configs[name][1]", "def config_section_data():\n config_data = u\"\"\"[feeds]\n# comma separated section names. ex. sqlserver_feed,file_feed\nfeed_names=<your feeds>\nreload=true\n# use reload_types to limit the types of objects when reload=true.\n# Ex: incident,task,note,artifact,attachment,<data_table_api_name>\nreload_types=\n# set to true if ElasticSearch errors occur during reload=true\nreload_query_api_method=false\n\n# feed_data is the default message destination that will be listened to\nqueue=feed_data\n\n# set to true if attachment data should be part of payload send to plugins\ninclude_attachment_data=false\n# if necessary, specify the supported workspace (by label, case sensitive) and the list of feeds associated with it\n# ex: 'Default Workspace': ['sqlserver_feed'], 'workspace A': ['kafka_feed', 'resilient_feed']\nworkspaces=\n\"\"\"\n return config_data", "def get_config_section(self, title_startswith, return_all=True):\n for section in self._config_sections:\n if section[0].startswith(title_startswith):\n if return_all:\n yield section\n else:\n return section", "def gather_configuration(self, config):\n config['log']['logging_level'] = self.logDisplay.get_logging_level()\n\n # MIDI\n config['midi']['winch_midi_input'] = self.winchMidiInputCombo.current_item()\n config['midi']['midi_output'] = self.midiOutputCombo.current_item()\n\n # OSC\n addr, port = self.oscListenerConfig.get_OSC_port()\n config['osc']['listener_addr'] = addr\n config['osc']['listener_port'] = str(port)\n addr, port = self.oscSenderConfig.get_OSC_port()\n config['osc']['sender_addr'] = addr\n config['osc']['sender_port'] = str(port)\n\n # DMX\n config['dmx']['dmx_output_serial_port'] = self.dmxSelect.current_item()\n\n # winches\n for i, winchSelect in enumerate(self.winchSelects):\n key = \"winch_%d_output_serial_port\" % (i+1)\n config['winches'][key] = winchSelect.current_item()\n\n return" ]
[ "0.633057", "0.6099127", "0.58716476", "0.58429116", "0.5834169", "0.5792962", "0.5772797", "0.56489396", "0.5637136", "0.5587155", "0.5559611", "0.55529994", "0.55025566", "0.5501261", "0.54880255", "0.54546607", "0.544704", "0.5441845", "0.54394555", "0.5427645", "0.5409087", "0.5400461", "0.5389107", "0.5385117", "0.53795576", "0.53410095", "0.53164184", "0.5304725", "0.5286367", "0.52648044" ]
0.7092227
0
Validate a stepper config for platform.
def validate_stepper_section(self, stepper: "Stepper", config: dict) -> dict: if self.get_stepper_config_section(): spec = self.get_stepper_config_section() # pylint: disable-msg=assignment-from-none config = stepper.machine.config_validator.validate_config(spec, config, stepper.name) elif config: raise AssertionError("No platform_config supported but not empty {} for stepper {}". format(config, stepper.name)) return config
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _validate_config(self):\n pass", "def validate_config(self):\n pass", "def validate_config(self):\n pass", "def config_validate(ctx, **kwargs):\n # Validates pf9-express config file and obtains Auth Token\n #Load Active Config into ctx\n GetConfig(ctx).GetActiveConfig()\n #Get Token\n token = GetToken().get_token_v3(\n ctx.params[\"du_url\"],\n ctx.params[\"du_username\"],\n ctx.params[\"du_password\"],\n ctx.params[\"du_tenant\"] )\n if token is not None:\n click.echo('Config Validated!')\n click.echo('Token: %s' % token)\n else:\n click.echo('Config Validation Failed!')", "def validate_config(app: App, config: Config):\n for state_machine in config.state_machines.values():\n _validate_state_machine(app, state_machine)", "def validate_config(self, config: Dict) -> bool:\n raise NotImplementedError", "def _validate(self):\n config = self.config\n\n # Reject unknown sections.\n valid_sections = set((\n self.CUSTOM_HOOKS_SECTION,\n self.BUILTIN_HOOKS_SECTION,\n self.BUILTIN_HOOKS_OPTIONS_SECTION,\n self.TOOL_PATHS_SECTION,\n self.OPTIONS_SECTION,\n ))\n bad_sections = set(config.sections()) - valid_sections\n if bad_sections:\n raise ValidationError('%s: unknown sections: %s' %\n (self.paths, bad_sections))\n\n # Reject blank custom hooks.\n for hook in self.custom_hooks:\n if not config.get(self.CUSTOM_HOOKS_SECTION, hook):\n raise ValidationError('%s: custom hook \"%s\" cannot be blank' %\n (self.paths, hook))\n\n # Reject unknown builtin hooks.\n valid_builtin_hooks = set(rh.hooks.BUILTIN_HOOKS.keys())\n if config.has_section(self.BUILTIN_HOOKS_SECTION):\n hooks = set(config.options(self.BUILTIN_HOOKS_SECTION))\n bad_hooks = hooks - valid_builtin_hooks\n if bad_hooks:\n raise ValidationError('%s: unknown builtin hooks: %s' %\n (self.paths, bad_hooks))\n elif config.has_section(self.BUILTIN_HOOKS_OPTIONS_SECTION):\n raise ValidationError('Builtin hook options specified, but missing '\n 'builtin hook settings')\n\n if config.has_section(self.BUILTIN_HOOKS_OPTIONS_SECTION):\n hooks = set(config.options(self.BUILTIN_HOOKS_OPTIONS_SECTION))\n bad_hooks = hooks - valid_builtin_hooks\n if bad_hooks:\n raise ValidationError('%s: unknown builtin hook options: %s' %\n (self.paths, bad_hooks))\n\n # Verify hooks are valid shell strings.\n for hook in self.custom_hooks:\n try:\n self.custom_hook(hook)\n except ValueError as e:\n raise ValidationError('%s: hook \"%s\" command line is invalid: '\n '%s' % (self.paths, hook, e))\n\n # Verify hook options are valid shell strings.\n for hook in self.builtin_hooks:\n try:\n self.builtin_hook_option(hook)\n except ValueError as e:\n raise ValidationError('%s: hook options \"%s\" are invalid: %s' %\n (self.paths, hook, e))\n\n # Reject unknown tools.\n valid_tools = set(rh.hooks.TOOL_PATHS.keys())\n if config.has_section(self.TOOL_PATHS_SECTION):\n tools = set(config.options(self.TOOL_PATHS_SECTION))\n bad_tools = tools - valid_tools\n if bad_tools:\n raise ValidationError('%s: unknown tools: %s' %\n (self.paths, bad_tools))\n\n # Reject unknown options.\n valid_options = set(self.VALID_OPTIONS)\n if config.has_section(self.OPTIONS_SECTION):\n options = set(config.options(self.OPTIONS_SECTION))\n bad_options = options - valid_options\n if bad_options:\n raise ValidationError('%s: unknown options: %s' %\n (self.paths, bad_options))", "def _validate_runner_config(self, runner_config):\n runner_config_validator = create_runner_config_validator()\n runner_config_validator.validate(runner_config)", "def validate_config(self):\n\n # LOCALHOST\n if self.location == 'localhost':\n if 'browserName' not in self.config.keys():\n msg = \"Add the 'browserName' in your local_config: e.g.: 'Firefox', 'Chrome', 'Safari'\" # noqa\n self.runner.critical_log(msg)\n raise BromeBrowserConfigException(msg)\n\n # EC2\n elif self.location == 'ec2':\n self.validate_ec2_browser_config()\n\n # VIRTUALBOX\n elif self.location == 'virtualbox':\n self.validate_virtualbox_config()", "def validate_config(self):\n reference = data_file(\"../config/template/minimum_aiscalator.conf\")\n ref = pyhocon.ConfigFactory.parse_file(reference)\n msg = \"In Global Application Configuration file \"\n _validate_configs(self._app_conf, ref, msg,\n missing_exception=True,\n type_mismatch_exception=True)\n reference = data_file(\"../config/template/aiscalator.conf\")\n ref = pyhocon.ConfigFactory.parse_file(reference)\n msg = \"In Global Application Configuration file \"\n _validate_configs(self._app_conf, ref, msg,\n missing_exception=False,\n type_mismatch_exception=True)\n if self._step_name:\n reference = data_file(\"../config/template/minimum_step.conf\")\n ref = pyhocon.ConfigFactory.parse_file(reference)\n msg = \"in step named \" + self._step_name\n _validate_configs(self._step,\n ref[\"steps\"][\"Untitled\"],\n msg,\n missing_exception=True,\n type_mismatch_exception=True)\n reference = data_file(\"../config/template/step.conf\")\n ref = pyhocon.ConfigFactory.parse_file(reference)\n msg = \"in step named \" + self._step_name\n _validate_configs(self._step,\n ref[\"steps\"][\"Untitled\"],\n msg,\n missing_exception=False,\n type_mismatch_exception=True)\n if self._dag_name:\n reference = data_file(\"../config/template/minimum_dag.conf\")\n ref = pyhocon.ConfigFactory.parse_file(reference)\n msg = \"in dag named \" + self._dag_name\n _validate_configs(self._dag,\n ref[\"dags\"][\"Untitled\"],\n msg,\n missing_exception=True,\n type_mismatch_exception=True)\n reference = data_file(\"../config/template/step.conf\")\n ref = pyhocon.ConfigFactory.parse_file(reference)\n msg = \"in dag named \" + self._dag_name\n _validate_configs(self._dag,\n ref[\"dags\"][\"Untitled\"],\n msg,\n missing_exception=False,\n type_mismatch_exception=True)", "def properties_validation(config_data: Dict = None) -> bool:\n\n if config_data is None:\n config_file = os.path.join(\n os.path.dirname(__file__), 'server-config.json')\n with open(config_file) as config:\n config_data = json.load(config)\n platform_properties, err = PlatformPropertiesSchema().load(config_data)\n\n # Raise error if required property is not provided\n if err:\n raise MissingRequiredParameterError(err)\n\n # Raise error if unsupported protocol or module\n for protocol in platform_properties.supported_transfer_protocols:\n if protocol not in SUPPORTED_PROTOCOLS:\n err = str.format(\"Unsupported protocol {}\", protocol)\n raise ValueError(err)\n for module in platform_properties.supported_modules:\n if module not in SUPPORTED_MODULES:\n err = str.format(\"Unsupported module {}\", module)\n raise ValueError(err)\n\n # Raise error if https not in supported protocols\n if \"https\" not in platform_properties.supported_transfer_protocols:\n raise MissingRequiredParameterError(\n 'CARMIN 0.3 requires https support')\n\n # Raise error if minTimeout is greater than maxTimeout\n if (platform_properties.max_authorized_execution_timeout != 0\n and platform_properties.min_authorized_execution_timeout >\n platform_properties.max_authorized_execution_timeout):\n raise ValueError('maxTimeout must be greater than minTimeout')\n return True", "def valid_configuration(self):\n valid = True\n\n if (not self.__config.suffix()) and (self.__config.output_dir() == self.__config.input_dir()):\n print(\"ERROR: output_dir directory cannot be the same as input_dir with an empty suffix!\")\n valid = False\n if not self.__config.public_key():\n print(\"ERROR: public_key not set! Set it through 'pdfworkshop config public_key <your_key>'. \"\n \"A free API key can be obtained from https://developer.ilovepdf.com/\")\n valid = False\n return valid", "def validate_config(config):\n # check if paths are valid\n check_paths = {\n 'data_path': r'data$',\n 'master_list_path': r'master_list\\.csv$',\n 'duplicate_list_path': r'duplicate_list\\.csv$',\n 'log_path': r'data[\\\\\\/]jobfunnel.log$',\n 'filter_list_path': r'data[\\\\\\/]filter_list\\.json$',\n }\n\n for path, pattern in check_paths.items():\n if not re.search(pattern, config[path]):\n raise ConfigError(path)\n # check if the provider list only consists of supported providers\n if not set(config['providers']).issubset(PROVIDERS):\n raise ConfigError('providers')\n\n # check validity of region settings\n validate_region(config['search_terms']['region'])\n\n # check validity of delay settings\n validate_delay(config['delay_config'])\n\n # check the validity of max_listing_days settings\n if(config['max_listing_days'] is not None and config['max_listing_days'] < 0):\n raise ConfigError('max_listing_days')", "def validate_machine_config(self):\n\n if len(self.rotors) < 3 or self.reflector is None:\n raise ValueError('Invalid configuration. Enigma Machine must have at least 3 rotors and 1 reflector')", "def validate_settings(_cfg, _ctx):\n pass", "def validate(config):\n runner = ScenarioRunner._get_cls(config.get(\"type\", \"continuous\"))\n jsonschema.validate(config, runner.CONFIG_SCHEMA)", "def validate_config(self):\n config = self.config\n\n # which doc types are enabled\n need_at_least_one = ['GOOGLE_DRIVE_ENABLED','GITHUB_ENABLED','DISQUS_ENABLED']\n found_one = False\n for n in need_at_least_one:\n if n in config.keys():\n found_one = True\n break\n if not found_one:\n raise Exception(\"Error: need at least one of: %s\"%(\", \".join(need_at_least_one)))\n\n if 'GOOGLE_DRIVE_ENABLED' in config.keys():\n if config['GOOGLE_DRIVE_ENABLED']:\n if 'GOOGLE_DRIVE_CREDENTIALS_FILE' in config.keys():\n if os.path.basename(config['GOOGLE_DRIVE_CREDENTIALS_FILE']) != 'credentials.json':\n raise Exception(\"Error: the file specified with GOOGLE_DRIVE_CREDENTIALS_FILE in the config file must have a filename of 'credentials.json'\")", "def state_failsafe_validate(cfg, app, win, events):", "def validate_config_dict(self):\n config_options = [\"pipeline_name\",\n \"num_processors\",\n \"num_sessions_at_once\",\n \"available_memory\",\n \"cluster_system\",\n \"output_directory\",\n \"working_directory\",\n \"template_head_for_anat\",\n \"exclude_zeros\",\n \"start_idx\",\n \"stop_idx\",\n \"write_report\",\n \"write_graph\",\n \"write_all_outputs\",\n \"upload_to_s3\",\n \"bucket_prefix\",\n \"bucket_out_prefix\",\n \"local_prefix\",\n \"bucket_name\",\n \"creds_path\"]\n invalid = []\n for param in self._config.keys():\n if param not in config_options:\n invalid.append(param)\n if len(invalid) > 0:\n err = \"\\n[!] The following parameters in your configuration \" \\\n \"file are not recognized. Double-check the pipeline \" \\\n \"configuration template.\\n\"\n err += \"\\n\".join([x for x in invalid])\n raise Exception(err)\n else:\n return 0", "def validateConfig(config):\n if getattr(config, 'TaskWorker', None) is None:\n return False, \"Configuration problem: Task worker section is missing. \"\n return True, 'Ok'", "def state_processing_validate(cfg, app, win, events):", "def test_valid_configuration(self):\n\n conf = [\n 'gasoline', '228i', 'model_luxury_line', 'silver', 'rims_384',\n 'tapistry_black', 'steptronic', 'smoker_package', 'tow_hook'\n ]\n\n attr_val_ids = self.get_attr_val_ids(conf)\n validation = self.cfg_tmpl.validate_configuration(attr_val_ids)\n self.assertTrue(validation, \"Valid configuration failed validation\")", "def validate(self):\n AcceleratorType.validate(self.accelerator_type)\n gcp.validate_machine_configuration(self.cpu_cores,\n self.memory,\n self.accelerator_type,\n self.accelerator_count)", "def validate_config(self, changed):\n logger.debug(\"[%s] Validating config (Legacy path)\", self.name)\n if not self.to_validate(changed):\n return\n # Validate (Legacy Path)\n from noc.cm.engine import Engine\n\n engine = Engine(self)\n try:\n engine.check()\n except: # noqa\n logger.error(\"Failed to validate config for %s\", self.name)\n error_report()", "def validate_config(self):\n\n ServerHeraldNotifyBase.validate_config(self)\n\n # Prowl requires an API key\n if not self.config_has('prowl'):\n print ('`prowl` notification type requires a Prowl API key to be '\n 'specified in the config file.')\n sys.exit(1)\n\n if not self.config_has('prowl', 'apikey'):\n print 'Prowl requires an API key in the config file'\n sys.exit(1)", "def _validate(self, config):\n assert isinstance(config, BaseConfig), \\\n \"Configuration should be instance of `BaseConfig`, but given {}\".format(type(config))", "def validate(self, config_json):\n pass", "def _verify_options(config: configuration.Config) -> None:\n\n if not config.config['species']:\n log._logger.error('You must specify a species (-s/--species)')\n exit(1)\n\n if config.config['hpc'] and config.config['local']:\n log._logger.error('You can only use one of the config options (hpc/local)')\n exit(1)\n\n if config.config['hpc'] and config.config['custom']:\n log._logger.error('You can only use one of the config options (hpc/custom)')\n exit(1)\n\n if config.config['local'] and config.config['custom']:\n log._logger.error('You can only use one of the config options (local/custom)')\n exit(1)\n\n if (not config.config['hpc']) and\\\n (not config.config['local']) and\\\n (not config.config['custom']):\n log._logger.error(\n 'You must specify a compute cluster environment (hpc/local/custom)'\n )\n exit(1)\n\n if config.config['custom'] and (not config.config['scheduler']):\n log._logger.error(\n 'The custom compute environment requires a scheduler address to be set'\n )\n exit(1)", "def check_config(config):\n pass", "def _check_config(self):" ]
[ "0.6644383", "0.6638077", "0.6638077", "0.62906194", "0.6161882", "0.61592", "0.6156101", "0.61285514", "0.6109975", "0.60931396", "0.59612024", "0.5943361", "0.5935539", "0.59314954", "0.59131944", "0.5908969", "0.58302087", "0.5828581", "0.57923645", "0.5778556", "0.5759289", "0.5753191", "0.5746433", "0.5743605", "0.5716185", "0.5684707", "0.5675019", "0.566476", "0.5652916", "0.56346095" ]
0.75444114
0
Configure a smart stepper (axis) device in platform.
async def configure_stepper(self, number: str, config: dict) -> "StepperPlatformInterface": raise NotImplementedError
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def configure_stepper(self):\n self.logger.info('configurating stepper')\n if 'Z' in self.current_axis:\n self.anc350_instrument.configure_stepper('ZPiezoStepper', self.settings['amplitudeZ'] * ur('V'), self.settings['frequencyZ'] * ur('Hz'))\n else:\n self.anc350_instrument.configure_stepper('XPiezoStepper', self.settings['amplitudeX'] * ur('V'), self.settings['frequencyX'] * ur('Hz'))\n self.anc350_instrument.configure_stepper('YPiezoStepper', self.settings['amplitudeY'] * ur('V'), self.settings['frequencyY'] * ur('Hz'))\n\n self.gui.groupBox_actions.setObjectName(\"Colored_actions\")\n self.gui.groupBox_actions.setStyleSheet(\"QGroupBox#Colored_actions {border: 1px solid blue; border-radius: 9px;}\")\n\n self.gui.stackedWidgetMoving.setEnabled(True)\n\n self.get_move()", "def configure(self):\n\n self.platform.configure()", "def _setup(self) -> None:\n # Call base implementation\n super()._setup()\n\n # Configure the low-level integrator\n engine_options = self.simulator.engine.get_options()\n engine_options[\"stepper\"][\"iterMax\"] = 0\n engine_options[\"stepper\"][\"dtMax\"] = min(0.02, self.step_dt)\n engine_options[\"stepper\"][\"logInternalStepperSteps\"] = False\n\n # Set maximum computation time for single internal integration steps\n if self.debug:\n engine_options[\"stepper\"][\"timeout\"] = 0.0\n else:\n engine_options[\"stepper\"][\"timeout\"] = 2.0\n\n # Enable logging of geometries in debug mode\n if self.debug:\n engine_options[\"telemetry\"][\"isPersistent\"] = True\n\n # Update engine options\n self.simulator.engine.set_options(engine_options)\n\n # Set robot in neutral configuration\n qpos = self._neutral()\n framesForwardKinematics(\n self.robot.pinocchio_model, self.robot.pinocchio_data, qpos)", "async def test_manual_configuration_update_configuration(hass):\n device = await setup_axis_integration(hass)\n\n result = await hass.config_entries.flow.async_init(\n AXIS_DOMAIN, context={\"source\": \"user\"}\n )\n\n assert result[\"type\"] == \"form\"\n assert result[\"step_id\"] == \"user\"\n\n mock_device = Mock()\n mock_device.vapix.params.system_serialnumber = MAC\n\n with patch(\n \"homeassistant.components.axis.config_flow.get_device\",\n return_value=mock_device,\n ):\n result = await hass.config_entries.flow.async_configure(\n result[\"flow_id\"],\n user_input={\n CONF_HOST: \"2.3.4.5\",\n CONF_USERNAME: \"user\",\n CONF_PASSWORD: \"pass\",\n CONF_PORT: 80,\n },\n )\n\n assert result[\"type\"] == \"abort\"\n assert result[\"reason\"] == \"already_configured\"\n assert device.host == \"2.3.4.5\"", "def setup_platform(hass, config, add_devices, discovery_info=None):\n name = config.get(CONF_NAME)\n mac = config.get(CONF_MAC)\n pin = config.get(CONF_PIN)\n\n add_devices([ProgtimeSwitch(mac, pin, name)])", "def setup_figure(self):\n # connect ui widgets to measurement/hardware settings or functions\n self.settings.save_video.connect_to_widget(self.ui.save_video_checkBox)\n self.settings.track_ant.connect_to_widget(self.ui.track_ant_checkBox)\n \n self.ui.start_pushButton.clicked.connect(self.start)\n self.ui.interrupt_pushButton.clicked.connect(self.interrupt)\n self.ui.up_pushButton.clicked.connect(self.daqmotor.operations['up'])\n self.ui.down_pushButton.clicked.connect(self.daqmotor.operations['down'])\n self.ui.left_pushButton.clicked.connect(self.daqmotor.operations['left'])\n self.ui.right_pushButton.clicked.connect(self.daqmotor.operations['right'])\n self.daqmotor.settings.manual.connect_to_widget(self.ui.manual_checkBox)\n self.daqmotor.settings.manual_steps.connect_to_widget(self.ui.manual_steps_doubleSpinBox)\n \n self.daqmotor.settings.x.connect_to_widget(self.ui.x_doubleSpinBox)\n self.daqmotor.settings.y.connect_to_widget(self.ui.y_doubleSpinBox)\n self.daqmotor.settings.move_to_x.connect_to_widget(self.ui.move_to_x_doubleSpinBox)\n self.daqmotor.settings.move_to_y.connect_to_widget(self.ui.move_to_y_doubleSpinBox)\n self.ui.move_to_pushButton.clicked.connect(self.daqmotor.operations['move_to'])\n self.ui.zero_pushButton.clicked.connect(self.daqmotor.operations['zero'])\n self.ui.home_pushButton.clicked.connect(self.daqmotor.operations['home'])\n \n # Set up pyqtgraph graph_layout in the UI\n self.wide_cam_layout=pg.GraphicsLayoutWidget()\n self.track_cam_layout=pg.GraphicsLayoutWidget()\n self.tracker_layout=pg.GraphicsLayoutWidget()\n self.ui.wide_cam_groupBox.layout().addWidget(self.wide_cam_layout)\n self.ui.track_cam_groupBox.layout().addWidget(self.track_cam_layout)\n self.ui.tracker_groupBox.layout().addWidget(self.tracker_layout)\n \n #create camera image graphs\n self.wide_cam_view=pg.ViewBox()\n self.wide_cam_layout.addItem(self.wide_cam_view)\n self.wide_cam_image=pg.ImageItem()\n self.wide_cam_view.addItem(self.wide_cam_image)\n \n self.track_cam_view=pg.ViewBox()\n self.track_cam_layout.addItem(self.track_cam_view)\n self.track_cam_image=pg.ImageItem()\n self.track_cam_view.addItem(self.track_cam_image)\n \n self.tracker_view=pg.ViewBox()\n self.tracker_layout.addItem(self.tracker_view)\n self.tracker_image=pg.ImageItem()\n self.tracker_view.addItem(self.tracker_image)\n \n # initiate tracker buffer\n self.tracker_data = np.zeros((64,64),dtype = np.uint8)\n \n #counter used for reducing refresh rate\n self.wide_disp_counter = 0\n self.track_disp_counter = 0", "def configure_step(self):\n\n pass", "def configure_step(self):\n pass", "def setup(hass: HomeAssistant, base_config: ConfigType) -> bool: # noqa: C901\n\n hass.data[DOMAIN] = {}\n\n # Parse configuration into a dict of device name to physical address\n # represented as a list of four elements.\n device_aliases = {}\n devices = base_config[DOMAIN].get(CONF_DEVICES, {})\n _LOGGER.debug(\"Parsing config %s\", devices)\n device_aliases.update(parse_mapping(devices))\n _LOGGER.debug(\"Parsed devices: %s\", device_aliases)\n\n platform = base_config[DOMAIN].get(CONF_PLATFORM, SWITCH)\n\n loop = (\n # Create own thread if more than 1 CPU\n hass.loop\n if multiprocessing.cpu_count() < 2\n else None\n )\n host = base_config[DOMAIN].get(CONF_HOST)\n display_name = base_config[DOMAIN].get(CONF_DISPLAY_NAME, DEFAULT_DISPLAY_NAME)\n if host:\n adapter = TcpAdapter(host, name=display_name, activate_source=False)\n else:\n adapter = CecAdapter(name=display_name[:12], activate_source=False)\n hdmi_network = HDMINetwork(adapter, loop=loop)\n\n def _adapter_watchdog(now=None):\n _LOGGER.debug(\"Reached _adapter_watchdog\")\n event.call_later(hass, WATCHDOG_INTERVAL, _adapter_watchdog_job)\n if not adapter.initialized:\n _LOGGER.info(\"Adapter not initialized; Trying to restart\")\n hass.bus.fire(EVENT_HDMI_CEC_UNAVAILABLE)\n adapter.init()\n\n _adapter_watchdog_job = HassJob(_adapter_watchdog, cancel_on_shutdown=True)\n\n @callback\n def _async_initialized_callback(*_: Any):\n \"\"\"Add watchdog on initialization.\"\"\"\n return event.async_call_later(hass, WATCHDOG_INTERVAL, _adapter_watchdog_job)\n\n hdmi_network.set_initialized_callback(_async_initialized_callback)\n\n def _volume(call: ServiceCall) -> None:\n \"\"\"Increase/decrease volume and mute/unmute system.\"\"\"\n mute_key_mapping = {\n ATTR_TOGGLE: KEY_MUTE_TOGGLE,\n ATTR_ON: KEY_MUTE_ON,\n ATTR_OFF: KEY_MUTE_OFF,\n }\n for cmd, att in call.data.items():\n if cmd == CMD_UP:\n _process_volume(KEY_VOLUME_UP, att)\n elif cmd == CMD_DOWN:\n _process_volume(KEY_VOLUME_DOWN, att)\n elif cmd == CMD_MUTE:\n hdmi_network.send_command(\n KeyPressCommand(mute_key_mapping[att], dst=ADDR_AUDIOSYSTEM)\n )\n hdmi_network.send_command(KeyReleaseCommand(dst=ADDR_AUDIOSYSTEM))\n _LOGGER.info(\"Audio muted\")\n else:\n _LOGGER.warning(\"Unknown command %s\", cmd)\n\n def _process_volume(cmd, att):\n if isinstance(att, (str,)):\n att = att.strip()\n if att == CMD_PRESS:\n hdmi_network.send_command(KeyPressCommand(cmd, dst=ADDR_AUDIOSYSTEM))\n elif att == CMD_RELEASE:\n hdmi_network.send_command(KeyReleaseCommand(dst=ADDR_AUDIOSYSTEM))\n else:\n att = 1 if att == \"\" else int(att)\n for _ in range(0, att):\n hdmi_network.send_command(KeyPressCommand(cmd, dst=ADDR_AUDIOSYSTEM))\n hdmi_network.send_command(KeyReleaseCommand(dst=ADDR_AUDIOSYSTEM))\n\n def _tx(call: ServiceCall) -> None:\n \"\"\"Send CEC command.\"\"\"\n data = call.data\n if ATTR_RAW in data:\n command = CecCommand(data[ATTR_RAW])\n else:\n src = data.get(ATTR_SRC, ADDR_UNREGISTERED)\n dst = data.get(ATTR_DST, ADDR_BROADCAST)\n if ATTR_CMD in data:\n cmd = data[ATTR_CMD]\n else:\n _LOGGER.error(\"Attribute 'cmd' is missing\")\n return\n if ATTR_ATT in data:\n if isinstance(data[ATTR_ATT], (list,)):\n att = data[ATTR_ATT]\n else:\n att = reduce(lambda x, y: f\"{x}:{y:x}\", data[ATTR_ATT])\n else:\n att = \"\"\n command = CecCommand(cmd, dst, src, att)\n hdmi_network.send_command(command)\n\n def _standby(call: ServiceCall) -> None:\n hdmi_network.standby()\n\n def _power_on(call: ServiceCall) -> None:\n hdmi_network.power_on()\n\n def _select_device(call: ServiceCall) -> None:\n \"\"\"Select the active device.\"\"\"\n if not (addr := call.data[ATTR_DEVICE]):\n _LOGGER.error(\"Device not found: %s\", call.data[ATTR_DEVICE])\n return\n if addr in device_aliases:\n addr = device_aliases[addr]\n else:\n entity = hass.states.get(addr)\n _LOGGER.debug(\"Selecting entity %s\", entity)\n if entity is not None:\n addr = entity.attributes[\"physical_address\"]\n _LOGGER.debug(\"Address acquired: %s\", addr)\n if addr is None:\n _LOGGER.error(\n \"Device %s has not physical address\", call.data[ATTR_DEVICE]\n )\n return\n if not isinstance(addr, (PhysicalAddress,)):\n addr = PhysicalAddress(addr)\n hdmi_network.active_source(addr)\n _LOGGER.info(\"Selected %s (%s)\", call.data[ATTR_DEVICE], addr)\n\n def _update(call: ServiceCall) -> None:\n \"\"\"Update if device update is needed.\n\n Called by service, requests CEC network to update data.\n \"\"\"\n hdmi_network.scan()\n\n def _new_device(device):\n \"\"\"Handle new devices which are detected by HDMI network.\"\"\"\n key = f\"{DOMAIN}.{device.name}\"\n hass.data[DOMAIN][key] = device\n ent_platform = base_config[DOMAIN][CONF_TYPES].get(key, platform)\n discovery.load_platform(\n hass,\n ent_platform,\n DOMAIN,\n discovered={ATTR_NEW: [key]},\n hass_config=base_config,\n )\n\n def _shutdown(call):\n hdmi_network.stop()\n\n def _start_cec(callback_event):\n \"\"\"Register services and start HDMI network to watch for devices.\"\"\"\n hass.services.register(\n DOMAIN, SERVICE_SEND_COMMAND, _tx, SERVICE_SEND_COMMAND_SCHEMA\n )\n hass.services.register(\n DOMAIN, SERVICE_VOLUME, _volume, schema=SERVICE_VOLUME_SCHEMA\n )\n hass.services.register(\n DOMAIN,\n SERVICE_UPDATE_DEVICES,\n _update,\n schema=SERVICE_UPDATE_DEVICES_SCHEMA,\n )\n hass.services.register(DOMAIN, SERVICE_POWER_ON, _power_on)\n hass.services.register(DOMAIN, SERVICE_STANDBY, _standby)\n hass.services.register(DOMAIN, SERVICE_SELECT_DEVICE, _select_device)\n\n hdmi_network.set_new_device_callback(_new_device)\n hdmi_network.start()\n\n hass.bus.listen_once(EVENT_HOMEASSISTANT_START, _start_cec)\n hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, _shutdown)\n return True", "def _setup_sensor ( self ):\n self.spectral = Spectral ( np.array([450, 520, 630, 770., 1550, 2090.] ),\n np.array([ 520, 600, 690, 900., 1750., 2350.] ) )", "def _setup_sensor ( self ):\n self.spectral = Spectral ( np.array([500, 610, 780, 1580.] ),\n np.array([590, 680, 890, 1750.] ) )", "def configure_servo(self, board):\n self.servo = board.get_pin(f\"d:{self.pin}:p\")\n board.servo_config(\n pin = self.pin,\n min_pulse = 544,\n max_pulse = 2400,\n angle = 93\n )", "async def test_flow_manual_configuration(hass):\n result = await hass.config_entries.flow.async_init(\n AXIS_DOMAIN, context={\"source\": \"user\"}\n )\n\n assert result[\"type\"] == \"form\"\n assert result[\"step_id\"] == \"user\"\n\n with patch(\"axis.AxisDevice\") as mock_device:\n\n setup_mock_axis_device(mock_device)\n\n result = await hass.config_entries.flow.async_configure(\n result[\"flow_id\"],\n user_input={\n CONF_HOST: \"1.2.3.4\",\n CONF_USERNAME: \"user\",\n CONF_PASSWORD: \"pass\",\n CONF_PORT: 80,\n },\n )\n\n assert result[\"type\"] == \"create_entry\"\n assert result[\"title\"] == f\"prodnbr - {MAC}\"\n assert result[\"data\"] == {\n CONF_HOST: \"1.2.3.4\",\n CONF_USERNAME: \"user\",\n CONF_PASSWORD: \"pass\",\n CONF_PORT: 80,\n CONF_MAC: MAC,\n CONF_MODEL: \"prodnbr\",\n CONF_NAME: \"prodnbr 0\",\n }", "def setup_device(device):\n try:\n # Gets around \"Resource busy\" errors\n device.detach_kernel_driver(0)\n except Exception:\n pass\n device.set_configuration()", "def pibooth_configure(cfg):", "def setup_platform(hass, config, add_devices, discovery_info=None):\n thread1 = QQ(config[QQ_NUMBER])\n thread1.start()\n object_qq = Qqsensor(hass, QQ_NUMBER, thread1)\n add_devices([object_qq])", "def setup(self):\n\n self._enable_torque(self._reg.TORQUE_ENABLE)\n self.change_operating_mode(self._reg.MODE_EXT_POSI)\n # set to max velocity\n self.change_veloity(self._default_velocity)", "def configure(self):\n\n # instantiate Serial\n self.serial = serial.Serial()\n\n # set port_path, e.g. '/dev/ttyUSBx' or 'COMx'\n self.serial.port = self.port.device\n\n # set baudrate\n self.serial.baudrate = 115200", "def setup_platform(hass, config, add_devices_callback, discovery_info=None):\n add_devices_callback([\n HE853Switch('OviSwitch', STATE_ON),\n HE853Switch('AC', STATE_OFF)\n ])", "def setup_platform(hass, config, add_entities, discovery_info=None):\n devices = []\n dev = discovery_info.get(\"dev\")\n param = discovery_info.get(\"param\")\n devices = []\n for idx in dev['data']:\n if dev['devtype'] in OT_SENSOR_TYPES and idx in [\"Z\",\"V\",\"P3\",\"P4\"]:\n devices.append(LifeSmartSensor(dev,idx,dev['data'][idx],param))\n else:\n devices.append(LifeSmartSensor(dev,idx,dev['data'][idx],param))\n add_entities(devices)", "def setup_platform(hass, config, add_devices, discovery_info=None):\n # Add devices\n add_devices([SemsSensor(\"SEMS Portal\", config)], True)", "def setup_platform(hass, config, add_entities, discovery_info=None):\n name = config.get(CONF_NAME)\n host = config.get(CONF_HOST)\n port = config.get(CONF_PORT)\n icon = config.get(CONF_ICON)\n if not icon:\n icon = 'mdi:television'\n server = 'http://192.168.0.12:8008/ssdp/device-desc.xml'\n client = pydial.DialClient(server)\n device = client.get_device_description()\n status = device.friendly_name\n add_entities([DialSensor(name,host,port,icon,status)])", "def setup_mock_axis_device(mock_device):\n\n def mock_constructor(host, username, password, port, web_proto):\n \"\"\"Fake the controller constructor.\"\"\"\n mock_device.host = host\n mock_device.username = username\n mock_device.password = password\n mock_device.port = port\n return mock_device\n\n mock_device.side_effect = mock_constructor\n mock_device.vapix.params.system_serialnumber = MAC\n mock_device.vapix.params.prodnbr = \"prodnbr\"\n mock_device.vapix.params.prodtype = \"prodtype\"\n mock_device.vapix.params.firmware_version = \"firmware_version\"", "def continue_setup_platform(hass, config, token, add_devices, discovery_info=None):\n if \"trakt\" in _CONFIGURING:\n hass.components.configurator.request_done(_CONFIGURING.pop(\"trakt\"))\n \n add_devices([TraktMyShowCalendarSensor(hass, config, token)], True)", "def setup_platform(hass, config, add_devices_callback, discovery_info=None):\n host = config.get(CONF_HOST)\n name = config.get(CONF_NAME)\n token = config.get('token')\n\n add_devices_callback([MiroboSwitch(name, host, token)])", "def setup_platform(\n hass: HomeAssistant,\n config: ConfigType,\n add_entities: AddEntitiesCallback,\n discovery_info: DiscoveryInfoType | None = None,\n) -> None:\n lights = []\n for channel, device_config in config[CONF_DEVICES].items():\n device = {}\n device[\"name\"] = device_config[CONF_NAME]\n device[\"dimmable\"] = device_config[\"dimmable\"]\n device[\"channel\"] = channel\n device[\"driver\"] = config[CONF_DRIVER]\n device[\"host\"] = config[CONF_HOST]\n device[\"port\"] = config[CONF_PORT]\n lights.append(FutureNowLight(device))\n\n add_entities(lights, True)", "def init():\n\n global leftDriverStick\n global rightDriverStick\n global goGamePad\n\n try:\n leftDriverStick = T16000M(0)\n except:\n print('OI: Error - Could not instantiate Left Driver Stick on USB port 0!!!')\n\n try:\n rightDriverStick = T16000M(1)\n except:\n print('OI: Error - Could not instantiate Right Driver Stick on USB port 0!!!')\n\n try:\n goGamePad = Joystick(2)\n except:\n print('OI: Error - Could not instantiate Right Driver Stick on USB port 2!!!')\n\n\n # ----------------------------------------------------------\n # Driver Controls\n # ----------------------------------------------------------\n #global resetYawBtn\n #resetYawBtn = JoystickButton(rightDriverStick, config.btnResetYawAngleIndex)\n #resetYawBtn.whenPressed(NavxResetYawAngle())\n\n global btnDriveSlow\n btnDriveSlow = JoystickButton(leftDriverStick, config.btnDriveSlow)\n \n global btnEnableLightSensor\n btnEnableLightSensor = JoystickButton(leftDriverStick, config.btnEnableLightSensorIndex)\n\n global btnExtendAll\n btnExtendAll = JoystickButton(rightDriverStick, config.btnExtendAllIndex)\n btnExtendAll.whenPressed(ExtendAll())\n\n global btnRetract\n btnRetract = JoystickButton(rightDriverStick, config.btnRetractAllIndex)\n btnRetract.whenPressed(RetractAll())\n\n global btnExtendFront\n btnExtendFront = JoystickButton(rightDriverStick, config.btnExtendFrontIndex)\n btnExtendFront.whenPressed(ExtendFront())\n\n global btnExtendBack\n btnExtendBack = JoystickButton(rightDriverStick, config.btnExtendBackIndex)\n btnExtendBack.whenPressed(ExtendBack())\n\n global btnRetractFront\n btnRetractFront = JoystickButton(rightDriverStick, config.btnRetractFrontIndex)\n btnRetractFront.whenPressed(RetractFront())\n\n global btnCargoGrabTog\n btnCargoGrabTog = JoystickButton(goGamePad, config.btnHatchGrabTogIndex)\n btnCargoGrabTog.whenPressed(ExtendBack())\n \n \"\"\"\n global btnResetEncoders\n btnResetEncoders = JoystickButton(leftDriverStick, config.btnResetEncodersIndex)\n btnResetEncoders.whenPressed(TankDriveResetEncoders())\n \"\"\"\n\n \"\"\"\n global axisElevator\n axisElevator = JoystickAxis(goGamePad, config.axisElevatorIndex)\n axisElevator. #??? idk how to configure joystick axis\n \"\"\"\n\n \"\"\"\n global btnRampTog\n btnRampTog = JoystickButton(goGamePad, config.btnRampTogIndex)\n btnRampTog.whenPressed(ExtendFront())\n \"\"\"\n #global btnResetEncoders\n #btnResetEncoders = JoystickButton(leftDriverStick, config.btnResetEncodersIndex)\n #btnResetEncoders.whenPressed(TankDriveResetEncoders())\n\n # These variable names are inconsistent, need to be fixed!!!!\n #global btnRampExtendTog\n #btnRampExtendTog = JoystickButton(goGamePad, config.btnRampExtendTogIndex)\n #btnRampExtendTog.whenPressed(RampExtend())\n\n #global btnRampRetractTog\n #btnRampRetractTog = JoystickButton(goGamePad, config.btnRampRetractTogIndex)\n #btnRampRetractTog.whenPressed(RampRetract())", "def robotInit(self):\n\n #Initialize Networktables\n self.sd = NetworkTables.getTable('SmartDashboard')\n\n \n #Set up motors to drive robot\n self.M2 = wpilib.VictorSP(2)\n self.M3 = wpilib.VictorSP(3)\n #self.M2.setInverted(True)\n #self.M3.setInverted(True)\n self.left = wpilib.SpeedControllerGroup(self.M2,self.M3)\n \n self.M0 = wpilib.VictorSP(0)\n self.M1 = wpilib.VictorSP(1)\n self.right = wpilib.SpeedControllerGroup(self.M0,self.M1)\n self.drive = wpilib.drive.DifferentialDrive(self.left, self.right)\n \n \n self.stick = wpilib.Joystick(1)\n self.timer = wpilib.Timer()\n #Camera\n wpilib.CameraServer.launch()\n #Servo\n self.SV1 = wpilib.Servo(9)\n self.SV2 = wpilib.Servo(8) \n #Dashboard\n NetworkTables.initialize(server='10.61.62.2')\n #Switches\n self.SW0 = wpilib.DigitalInput(0)\n self.SW1 = wpilib.DigitalInput(1)\n #Elevator\n self.E = wpilib.VictorSP(5)\n self.prepareCubeFlag = 0\n self.grabCubeFlag = 0\n self.deliverCubeFlag = 0\n self.adjustLeftFlag=0\n self.adjustRightFlag=0\n self.driveFlag=0\n #Gyro\n self.gyro = wpilib.ADXRS450_Gyro(0)\n self.gyro.reset()\n #All possible autonomous routines in a sendable chooser\n '''\n self.chooser = wpilib.SendableChooser()\n self.chooser.addDefault(\"None\", '4')\n self.chooser.addObject(\"left-LeftScale\", '1')\n self.chooser.addObject(\"Middle-LeftScale\", '2')\n self.chooser.addObject(\"Right-LeftScale\", '3')\n self.chooser.addObject(\"Left-RightScale\", '5')\n '''\n #wpilib.SmartDashboard.putData('Choice', self.chooser)\n #Encoders\n self.EC1 = wpilib.Encoder(2,3)\n self.EC2 = wpilib.Encoder(4,5)\n self.EC1.reset()\n self.EC2.reset()", "def setup_platform(hass, config, add_devices, discovery_info=None):\n # Only act if loaded via mysensors by discovery event.\n # Otherwise gateway is not setup.\n if discovery_info is None:\n return\n\n for gateway in mysensors.GATEWAYS.values():\n # Define the S_TYPES and V_TYPES that the platform should handle as\n # states. Map them in a dict of lists.\n pres = gateway.const.Presentation\n set_req = gateway.const.SetReq\n map_sv_types = {\n pres.S_TEMP: [set_req.V_TEMP],\n pres.S_HUM: [set_req.V_HUM],\n pres.S_BARO: [set_req.V_PRESSURE, set_req.V_FORECAST],\n pres.S_WIND: [set_req.V_WIND, set_req.V_GUST],\n pres.S_RAIN: [set_req.V_RAIN, set_req.V_RAINRATE],\n pres.S_UV: [set_req.V_UV],\n pres.S_WEIGHT: [set_req.V_WEIGHT, set_req.V_IMPEDANCE],\n pres.S_POWER: [set_req.V_WATT, set_req.V_KWH],\n pres.S_DISTANCE: [set_req.V_DISTANCE],\n pres.S_LIGHT_LEVEL: [set_req.V_LIGHT_LEVEL],\n pres.S_IR: [set_req.V_IR_RECEIVE],\n pres.S_WATER: [set_req.V_FLOW, set_req.V_VOLUME],\n pres.S_CUSTOM: [set_req.V_VAR1,\n set_req.V_VAR2,\n set_req.V_VAR3,\n set_req.V_VAR4,\n set_req.V_VAR5],\n pres.S_SCENE_CONTROLLER: [set_req.V_SCENE_ON,\n set_req.V_SCENE_OFF],\n }\n if float(gateway.protocol_version) < 1.5:\n map_sv_types.update({\n pres.S_AIR_QUALITY: [set_req.V_DUST_LEVEL],\n pres.S_DUST: [set_req.V_DUST_LEVEL],\n })\n if float(gateway.protocol_version) >= 1.5:\n map_sv_types.update({\n pres.S_COLOR_SENSOR: [set_req.V_RGB],\n pres.S_MULTIMETER: [set_req.V_VOLTAGE,\n set_req.V_CURRENT,\n set_req.V_IMPEDANCE],\n pres.S_SOUND: [set_req.V_LEVEL],\n pres.S_VIBRATION: [set_req.V_LEVEL],\n pres.S_MOISTURE: [set_req.V_LEVEL],\n pres.S_AIR_QUALITY: [set_req.V_LEVEL],\n pres.S_DUST: [set_req.V_LEVEL],\n })\n map_sv_types[pres.S_LIGHT_LEVEL].append(set_req.V_LEVEL)\n\n if float(gateway.protocol_version) >= 2.0:\n map_sv_types.update({\n pres.S_INFO: [set_req.V_TEXT],\n pres.S_GAS: [set_req.V_FLOW, set_req.V_VOLUME],\n pres.S_GPS: [set_req.V_POSITION],\n pres.S_WATER_QUALITY: [set_req.V_TEMP, set_req.V_PH,\n set_req.V_ORP, set_req.V_EC]\n })\n map_sv_types[pres.S_CUSTOM].append(set_req.V_CUSTOM)\n map_sv_types[pres.S_POWER].extend(\n [set_req.V_VAR, set_req.V_VA, set_req.V_POWER_FACTOR])\n\n devices = {}\n gateway.platform_callbacks.append(mysensors.pf_callback_factory(\n map_sv_types, devices, add_devices, MySensorsSensor))", "def configure_switch(self, number: str, config: SwitchConfig, platform_config: dict) -> \"SwitchPlatformInterface\":\n raise NotImplementedError" ]
[ "0.6743401", "0.6004976", "0.5996683", "0.587907", "0.5700965", "0.56874853", "0.56152296", "0.5609609", "0.5586614", "0.556677", "0.5564428", "0.55539554", "0.55473626", "0.55334884", "0.5523117", "0.55059314", "0.548029", "0.5454373", "0.5443241", "0.54360276", "0.5430548", "0.542754", "0.5427523", "0.54074997", "0.54069376", "0.54058826", "0.5399985", "0.53952247", "0.5339223", "0.5331231" ]
0.6466725
1
Parse light number to a list of channels.
def parse_light_number_to_channels(self, number: str, subtype: str): raise NotImplementedError
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_rgb_light():\n return list(light.rgb())", "def get_channel_numbers(self):\n channel_numbers = {\"vDeflection\": None, \"hDeflection\": None, \"height\": None, \"capacitiveSensorHeight\": None}\n for key, value in self.general.items():\n if value == \"vDeflection\":\n channel_numbers[value] = re.search(r'(?<=lcd-info\\.)\\d(?=\\.channel.name)', key).group()\n if value == \"hDeflection\":\n channel_numbers[value] = re.search(r'(?<=lcd-info\\.)\\d(?=\\.channel.name)', key).group()\n if value == \"height\":\n channel_numbers[value] = re.search(r'(?<=lcd-info\\.)\\d(?=\\.channel.name)', key).group()\n if value == \"capacitiveSensorHeight\":\n channel_numbers[value] = re.search(r'(?<=lcd-info\\.)\\d(?=\\.channel.name)', key).group()\n return channel_numbers", "def parse_color(raw_color) -> list[float]:\n if isinstance(raw_color, str):\n return list(to_rgba(raw_color))\n return list(raw_color)", "def parse_rooms_to_put_in_darkness(line: str):\n return [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]", "def convert_color(self, color):\n return [color[0]*16, color[1]*16, color[2]*16]", "def get_channels(hexcode):\n assert len(hexcode) in (7, 9)\n assert hexcode[0] == \"#\"\n rgb = hexcode[1:3], hexcode[3:5], hexcode[5:7], hexcode[7:]\n rgb = [int(x, 16) for x in rgb if x != \"\"]\n return np.array(rgb, dtype=np.uint8)", "def get_color_data(self):\n color = []\n data = self.read_byte_data(APDS_9960.CLEAR_DATA_LOW_BYTE_REG_ADDRESS, 8)\n for i in range(4):\n channel_low = data[2 * i]\n channel_high = data[2 * i + 1]\n color.append((channel_high << 8) | channel_low)\n return color", "def get_colors(num_colors):\n import colorsys\n colors = []\n for i in np.arange(0., 360., 360. / num_colors):\n hue = i/360.\n lightness = (50 + np.random.rand() * 10)/100.\n saturation = (90 + np.random.rand() * 10)/100.\n colors.append(colorsys.hls_to_rgb(hue, lightness, saturation))\n return colors", "def characteristic_to_light_states(description):\n fragments = description.split()\n\n pattern_type, groups = parse_pattern(fragments.pop(0))\n colour, fragments = get_colour_code(fragments)\n try:\n period = parse_period(fragments)\n except IndexError:\n if must_have_period(pattern_type, groups):\n raise\n period = None\n if period is not None and cannot_have_period(pattern_type, groups):\n raise ValueError('Period is not allowed in this type of light')\n return TYPES[pattern_type](groups, colour, period)", "def init_channel_numbers(self, channel_numbers):\n if isinstance(channel_numbers, int):\n self._channel_numbers = [channel_numbers]\n\n elif isinstance(channel_numbers, list):\n self._channel_numbers = channel_numbers\n\n elif isinstance(channel_numbers, str):\n if ',' in channel_numbers:\n channel_numbers = channel_numbers.split(',')\n else:\n channel_numbers = [channel_numbers,]\n\n def expand_number_range(range_list):\n \"\"\"Expands any requests that include a range (x-y). Inclusive range.\"\"\"\n if '-' in range_list:\n range_list = range_list.split('-')\n assert len(range_list) == 2\n range_list = list(range(int(range_list[0]), int(range_list[1])+1))\n else: # Not a range, just return the number as a list.\n range_list = [int(range_list),]\n return range_list\n\n # Squash list of lists to a 1-D numpy array.\n channel_numbers = np.concatenate([expand_number_range(r) for r\n in channel_numbers])\n self._channel_numbers = np.unique(channel_numbers).tolist()\n print('channel numbers: ', self._channel_numbers)\n else:\n self._channel_numbers = None", "def _color_brew(n):\n color_list = []\n\n # Initialize saturation & value; calculate chroma & value shift\n s, v = 0.75, 0.9\n c = s * v\n m = v - c\n\n for h in np.arange(25, 385, 360. / n).astype(int):\n # Calculate some intermediate values\n h_bar = h / 60.\n x = c * (1 - abs((h_bar % 2) - 1))\n # Initialize RGB with same hue & chroma as our color\n rgb = [(c, x, 0),\n (x, c, 0),\n (0, c, x),\n (0, x, c),\n (x, 0, c),\n (c, 0, x),\n (c, x, 0)]\n r, g, b = rgb[int(h_bar)]\n # Shift the initial RGB values to match value and store\n rgb = [(int(255 * (r + m))),\n (int(255 * (g + m))),\n (int(255 * (b + m)))]\n color_list.append(rgb)\n\n return color_list", "def parse_color(color):\n try:\n color = webcolors.name_to_rgb(color)\n return color.red, color.green, color.blue\n except ValueError:\n pass\n\n try:\n color = webcolors.hex_to_rgb(color)\n return color.red, color.green, color.blue\n except ValueError:\n pass\n\n try:\n data = color.split(\",\")\n return int(data[0]), int(data[1]), int(data[2])\n except Exception:\n pass\n\n return None", "def extract_channels(self, index: int) -> ListLike:\n cmd_pieces = self[index].split()\n channels = []\n for i, piece in enumerate(cmd_pieces):\n if piece in [\"--channel\", \"-c\"]:\n channels.append(cmd_pieces[i + 1])\n return channels", "def selectedchannels(chans=None, shapeLength=64):\n import re\n import numpy as np\n\n x = []\n #split string into substrings\n if(chans.find(',') != -1):\n n1 = re.split(r',', chans)\n elif(chans.find(';') != -1):\n n1 = re.split(r';', chans)\n else:\n n1=[chans]\n\n for s in n1:\n n2 = re.findall(\"\\d+\", s)\n if ( s.find('~') != -1):\n x += [i for i in range(max(0,int(n2[0])), min(int(n2[1])+1, shapeLength))]\n elif (s.find('>') != -1):\n x += [i for i in range(max(0,int(n2[0])+1), shapeLength)]\n elif (s.find('<') != -1):\n x += [i for i in range(0, min(int(n2[0]),shapeLength))]\n else:\n x += [int(n2[0])]\n\n return x", "def lights(self):\n return list(self.GetLights())", "def parse_color(color):\n return (color[0], color[1], color[2])", "async def Lights_Description() -> List[Dict[str, Any]]:\n result = []\n for index, light in enumerate(busylightapi.manager.lights):\n result.append(\n {\n \"light_id\": index,\n \"name\": light.name,\n \"info\": light.info,\n \"is_on\": light.is_on,\n \"color\": rgb_to_hex(*light.color),\n }\n )\n return result", "def light_number(self, number, position):\n for [x, y] in number:\n uh.set_pixel(x+position[0], y+position[1], 183, 0, 255)\n uh.show()", "def get_colors(self, url):\n fd = urlopen(url)\n f = io.BytesIO(fd.read())\n im = Image.open(f)\n palette = im.quantize(colors=len(self.lights)).getpalette()\n return self.extract_colors(palette, len(self.lights))", "def channels(self): # type: (...) -> List[BlendingRangePair]\n return self._channels", "def read_channels(self, fid):\r\n bones = [[] for i in self.vertices]\r\n num_channels = 0\r\n for vertex in self.vertices:\r\n num_channels = num_channels + len(vertex.meta['channels'])\r\n\r\n lin = self.read_line(fid)\r\n while lin != ':DEGREES':\r\n lin = self.read_line(fid)\r\n if lin == '':\r\n raise ValueError('Could not find :DEGREES in ' + fid.name)\r\n\r\n counter = 0\r\n lin = self.read_line(fid)\r\n while lin:\r\n parts = lin.split()\r\n if len(parts)==1:\r\n frame_no = int(parts[0])\r\n if frame_no:\r\n counter += 1\r\n if counter != frame_no:\r\n raise ValueError('Unexpected frame number.')\r\n else:\r\n raise ValueError('Single bone name ...')\r\n else:\r\n ind = self.get_index_by_name(parts[0])\r\n bones[ind].append(np.array([float(channel) for channel in parts[1:]]))\r\n lin = self.read_line(fid)\r\n\r\n num_frames = counter\r\n\r\n channels = np.zeros((num_frames, num_channels))\r\n\r\n end_val = 0\r\n for i in range(len(self.vertices)):\r\n vertex = self.vertices[i]\r\n if len(vertex.meta['channels'])>0: \r\n start_val = end_val\r\n end_val = end_val + len(vertex.meta['channels'])\r\n for j in range(num_frames):\r\n channels[j, start_val:end_val] = bones[i][j]\r\n self.resolve_indices(i, start_val)\r\n\r\n self.smooth_angle_channels(channels)\r\n return channels", "def get_light_sensors(self):\n x=self.send_packet_check_response('\\x50')\n LS=[]\n for i in range(8):\n a=bytearray(x[i*3:(i+1)*3])\n LS.append(a[0]|(a[1]&0xf)<<8)\n LS.append(a[1]>>4|a[2]<<4)\n return LS", "def _read_channels(self, info):\n channels = []\n if info.desc().child(\"channels\").empty():\n return channels\n\n channel = info.desc().child(\"channels\").child(\"channel\")\n for _ in range(info.channel_count()):\n channel_name = channel.child_value(\"label\")\n # If the data stream has a TRG channel, rename it so it doesn't\n # conflict with the marker channel.\n if channel_name == 'TRG' and self._marker_inlets:\n channel_name = \"TRG_device_stream\"\n channels.append(channel_name)\n channel = channel.next_sibling()\n\n for appended_channel in self._appended_channels:\n channels.append(appended_channel)\n\n trg_marker_index = self._trigger_inlet_index()\n for i, inlet in enumerate(self._marker_inlets):\n col = inlet_name(inlet)\n if i == trg_marker_index:\n col = 'TRG'\n channels.append(col)\n\n return channels", "def parseColor(c):\n if c in baseColors:\n return baseColors[c]\n if len(c) == 6:\n return tuple(map(lambda x: int(x, 16), (c[:2], c[2:4], c[4:])))\n if len(c) == 3:\n return tuple(map(lambda x: 16*int(x, 16), c))\n raise ValueError(\"Can't find color '{}'\".format(c))", "def split_show_channel(line):\n show,channel=line.split(\",\")\n return (show, channel)", "def get_light_list(self):\n return self.light_array", "def getColor(self,number):\n if number >= 0:\n if self.inverse:\n ret = cs.hsv_to_rgb(0,0,abs(number/self.maxp))\n else:\n ret = cs.hsv_to_rgb(0,0,1-abs(number/self.maxp))\n else:\n if self.inverse:\n ret = cs.hsv_to_rgb(0,1-abs(number/self.maxn),1)\n else:\n ret = cs.hsv_to_rgb(0,abs(number/self.maxn),1)\n return [ret[0]*255.0,ret[1]*255.0,ret[2]*255.0]", "def preparethelights(value):\n print \"These shouldn't be empty ---> \"+str(TIME)+\" and \"+str(COLOR)\n stringvals = value\n global TIME\n TIME = None\n global COLOR\n COLOR = None\n global TUB\n TUB = None\n global PROTOCOL\n PROTOCOL = []\n print \"These should be empty ---> \"+str(TIME)+\" and \"+str(COLOR)\n for stringvalue in stringvals:\n TUB = str(stringvalue)\n print TUB\n COLOR = str(TUB[TUB.index('#')+1:TUB.index('#')+7])\n timeholder = str(TUB[TUB.index('e')+3:len(TUB)-1])\n TIME = re.sub(\"[^0-9.]\", \"\", timeholder)\n rgbcol = hextorgb(COLOR)\n print COLOR+' '+TIME\n PROTOCOL.append([rgbcol, TIME])\n print PROTOCOL", "def _identify_channels(self, name):\n\n channel_list = []\n if self.nuke_node.Class() == \"Cryptomatte\":\n # nuke_node is a keyer gizmo\n channel_list = self.nuke_node.node('Input1').channels()\n else:\n # nuke_node might a read node\n channel_list = self.nuke_node.channels()\n\n relevant_channels = [x for x in channel_list if x.startswith(name)]\n pure_channels = []\n for channel in relevant_channels:\n suffix = \".red\"\n if not channel.endswith(suffix):\n continue\n # to do: validate this somewhere else\n pure_channel = channel[:-len(suffix)]\n pure_channels.append(pure_channel)\n\n return sorted(pure_channels)", "def read_channels(fp=None, tree=None):\n if fp:\n et = ElementTree()\n tree = et.parse(fp)\n return [elem_to_channel(elem) for elem in tree.findall('channel')]" ]
[ "0.6089877", "0.57426196", "0.5585461", "0.55491596", "0.5453368", "0.5392379", "0.53050375", "0.5286392", "0.5272585", "0.5230747", "0.52130616", "0.5209771", "0.51865584", "0.5179176", "0.5165454", "0.5161469", "0.5122844", "0.5068625", "0.5067793", "0.5067252", "0.5040409", "0.5007239", "0.5003579", "0.49969557", "0.49940506", "0.49701637", "0.4966085", "0.49396095", "0.49302518", "0.49280205" ]
0.7634664
0
Subclass this method in a platform module to configure a light. This method should return a reference to the light object which will be called to access the hardware.
def configure_light(self, number: str, subtype: str, config: LightConfig, platform_settings: dict) -> "LightPlatformInterface": raise NotImplementedError
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(self, light: pykulersky.Light):\n self._light = light\n self._hs_color = None\n self._brightness = None\n self._white_value = None\n self._available = True", "def __init__(self, light, lights, settings):\n\n if 'name' in lights[light]:\n self.name = lights[light]['name']\n else:\n self.name = light\n if 'gpio' in lights[light]:\n self.gpio = lights[light]['gpio']\n else:\n self.gpio = 18 # GPIO pin 18 is the default for testing\n if 'on' in lights[light]:\n self.on = lights[light]['on']\n else:\n self.on = 'continuous'\n\n GPIO.setup(self.gpio, GPIO.OUT)\n if self.on == 'continuous':\n self.turn_on()\n else: # set up light on/off cyclying other than continuous\n pass # for example, during certain hours", "def __init__(self, **kwargs) -> None:\n super(Light, self).__init__(**kwargs)\n\n get = kwargs.get\n if get('light') is None:\n raise Exception('Light is required')\n if get('button') is None:\n raise Exception('Button is required')\n\n self._light: LED = LED(get('light'))\n self._button: Button = Button(get('button'), pull_up=False)\n self._state: bool = get('state', False)", "def getLight(self):\n return self.light", "def __init__(\n self,\n hass,\n cl,\n name,\n lights_ct,\n lights_rgb,\n lights_xy,\n lights_brightness,\n disable_brightness_adjust,\n min_brightness,\n max_brightness,\n sleep_entity,\n sleep_state,\n sleep_colortemp,\n sleep_brightness,\n disable_entity,\n disable_state,\n initial_transition,\n ):\n self.hass = hass\n self._cl = cl\n self._name = name\n self._entity_id = \"switch.\" + slugify(f\"circadian_lighting {name}\")\n self._state = None\n self._icon = ICON\n self._hs_color = None\n self._lights_ct = lights_ct\n self._lights_rgb = lights_rgb\n self._lights_xy = lights_xy\n self._lights_brightness = lights_brightness\n self._disable_brightness_adjust = disable_brightness_adjust\n self._min_brightness = min_brightness\n self._max_brightness = max_brightness\n self._sleep_entity = sleep_entity\n self._sleep_state = sleep_state\n self._sleep_colortemp = sleep_colortemp\n self._sleep_brightness = sleep_brightness\n self._disable_entity = disable_entity\n self._disable_state = disable_state\n self._initial_transition = initial_transition\n self._attributes = {\"hs_color\": self._hs_color, \"brightness\": None}\n\n self._lights = lights_ct + lights_rgb + lights_xy + lights_brightness\n\n # Register callbacks\n dispatcher_connect(hass, CIRCADIAN_LIGHTING_UPDATE_TOPIC, self.update_switch)\n track_state_change(hass, self._lights, self.light_state_changed)\n if self._sleep_entity is not None:\n track_state_change(hass, self._sleep_entity, self.sleep_state_changed)\n if self._disable_entity is not None:\n track_state_change(hass, self._disable_entity, self.disable_state_changed)", "def __init__(self, LightFun):\n self.setParameters()\n self.Light = LightFun", "def setUp(self):\n self.hass = get_test_home_assistant()\n controller_mock = mock.MagicMock()\n dev_dict = {\"address\": \"a1\", \"name\": \"fake_light\", \"brightness_levels\": 256}\n self.light = mochad.MochadLight(self.hass, controller_mock, dev_dict)", "def __init__(self, name, host):\n\n self._device = OppleLightDevice(host)\n\n self._name = name\n self._is_on = None\n self._brightness = None\n self._color_temp = None", "def setUp(self):\n self.hass = get_test_home_assistant()\n controller_mock = mock.MagicMock()\n dev_dict = {\"address\": \"a1\", \"name\": \"fake_light\", \"brightness_levels\": 64}\n self.light = mochad.MochadLight(self.hass, controller_mock, dev_dict)", "def __init__(self, device: SensemeDevice) -> None:\n super().__init__(device, f\"{device.name} Light\")\n self._attr_supported_color_modes = {ColorMode.COLOR_TEMP}\n self._attr_color_mode = ColorMode.COLOR_TEMP\n self._attr_min_mireds = color_temperature_kelvin_to_mired(\n device.light_color_temp_max\n )\n self._attr_max_mireds = color_temperature_kelvin_to_mired(\n device.light_color_temp_min\n )", "def setUp(self):\n self.hass = get_test_home_assistant()\n controller_mock = mock.MagicMock()\n dev_dict = {\"address\": \"a1\", \"name\": \"fake_light\", \"brightness_levels\": 32}\n self.light = mochad.MochadLight(self.hass, controller_mock, dev_dict)", "def test_light_sensor(self):\n with patch.dict(TYPES, {'LightSensor': self.mock_type}):\n state = State('sensor.light', '900',\n {ATTR_DEVICE_CLASS: 'illuminance'})\n get_accessory(None, state, 2, {})", "def _create_example_light():\n return Light({\"warning\": False, \"off\": True})", "def __init__(self, parent, endpoint):\n Wemo_Endpoint.__init__(self, parent, endpoint)\n self.device_type = self._Parent._DeviceTypes.get('wemo_light')\n self.FEATURES.update({\n FEATURE_BRIGHTNESS: True,\n FEATURE_PERCENT: True,\n FEATURE_NUMBER_OF_STEPS: 100\n })", "def setup_platform(\n hass: HomeAssistant,\n config: ConfigType,\n add_entities: AddEntitiesCallback,\n discovery_info: DiscoveryInfoType | None = None,\n) -> None:\n name = config[CONF_NAME]\n host = config[CONF_HOST]\n entity = OppleLight(name, host)\n\n add_entities([entity])\n\n _LOGGER.debug(\"Init light %s %s\", host, entity.unique_id)", "def configure_matrixlight(self, config):\n raise NotImplementedError", "def turn_on(self, **kwargs: Any) -> None:\n commands = []\n _LOGGER.debug(\"light kwargs-> %s\", kwargs)\n\n if (\n DPCODE_LIGHT in self.tuya_device.status\n and DPCODE_SWITCH not in self.tuya_device.status\n ):\n commands += [{\"code\": DPCODE_LIGHT, \"value\": True}]\n else:\n commands += [{\"code\": DPCODE_SWITCH, \"value\": True}]\n\n if ATTR_BRIGHTNESS in kwargs:\n if self._work_mode().startswith(WORK_MODE_COLOUR):\n colour_data = self._get_hsv()\n v_range = self._tuya_hsv_v_range()\n colour_data[\"v\"] = int(\n self.remap(kwargs[ATTR_BRIGHTNESS], 0, 255, v_range[0], v_range[1])\n )\n commands += [\n {\"code\": self.dp_code_colour, \"value\": json.dumps(colour_data)}\n ]\n else:\n new_range = self._tuya_brightness_range()\n tuya_brightness = int(\n self.remap(\n kwargs[ATTR_BRIGHTNESS], 0, 255, new_range[0], new_range[1]\n )\n )\n commands += [{\"code\": self.dp_code_bright, \"value\": tuya_brightness}]\n\n if ATTR_HS_COLOR in kwargs:\n colour_data = self._get_hsv()\n # hsv h\n colour_data[\"h\"] = int(kwargs[ATTR_HS_COLOR][0])\n # hsv s\n ha_s = kwargs[ATTR_HS_COLOR][1]\n s_range = self._tuya_hsv_s_range()\n colour_data[\"s\"] = int(\n self.remap(\n ha_s,\n HSV_HA_SATURATION_MIN,\n HSV_HA_SATURATION_MAX,\n s_range[0],\n s_range[1],\n )\n )\n # hsv v\n ha_v = self.brightness\n v_range = self._tuya_hsv_v_range()\n colour_data[\"v\"] = int(self.remap(ha_v, 0, 255, v_range[0], v_range[1]))\n\n commands += [\n {\"code\": self.dp_code_colour, \"value\": json.dumps(colour_data)}\n ]\n if self.tuya_device.status[DPCODE_WORK_MODE] != \"colour\":\n commands += [{\"code\": DPCODE_WORK_MODE, \"value\": \"colour\"}]\n\n if ATTR_COLOR_TEMP in kwargs:\n # temp color\n new_range = self._tuya_temp_range()\n color_temp = self.remap(\n self.max_mireds - kwargs[ATTR_COLOR_TEMP] + self.min_mireds,\n self.min_mireds,\n self.max_mireds,\n new_range[0],\n new_range[1],\n )\n commands += [{\"code\": self.dp_code_temp, \"value\": int(color_temp)}]\n\n # brightness\n ha_brightness = self.brightness\n new_range = self._tuya_brightness_range()\n tuya_brightness = self.remap(\n ha_brightness, 0, 255, new_range[0], new_range[1]\n )\n commands += [{\"code\": self.dp_code_bright, \"value\": int(tuya_brightness)}]\n\n if self.tuya_device.status[DPCODE_WORK_MODE] != \"white\":\n commands += [{\"code\": DPCODE_WORK_MODE, \"value\": \"white\"}]\n\n self._send_command(commands)", "def __init__( self, dev, port ):\n super( Grove_Light_Sensor, self ).__init__( dev, port )", "def getLightSensor() -> int:\n pass", "def test_light_interface(light_name='head_green_light'):\n l = Lights()\n rospy.loginfo(\"All available lights on this robot:\\n{0}\\n\".format(\n ', '.join(l.list_all_lights())))\n rospy.loginfo(\"Blinking Light: {0}\".format(light_name))\n on_off = lambda x: 'ON' if l.get_light_state(x) else 'OFF'\n rospy.loginfo(\"Initial state: {0}\".format(on_off(light_name)))\n # turn on light\n l.set_light_state(light_name, True)\n rospy.sleep(1)\n rospy.loginfo(\"New state: {0}\".format(on_off(light_name)))\n # turn off light\n l.set_light_state(light_name, False)\n rospy.sleep(1)\n rospy.loginfo(\"New state: {0}\".format(on_off(light_name)))\n # turn on light\n l.set_light_state(light_name, True)\n rospy.sleep(1)\n rospy.loginfo(\"New state: {0}\".format(on_off(light_name)))\n # reset output\n l.set_light_state(light_name, False)\n rospy.sleep(1)\n rospy.loginfo(\"Final state: {0}\".format(on_off(light_name)))", "def __init__(self, light, controller, send_immediately):\n self._controller = controller\n\n # Fixture configuration\n self._channel = light.get(CONF_CHANNEL)\n self._name = light.get(CONF_NAME)\n self._type = light.get(CONF_TYPE, CONF_LIGHT_TYPE_DIMMER)\n self._fade_time = light.get(CONF_TRANSITION)\n self._brightness = light.get(CONF_DEFAULT_LEVEL,\n controller.default_level)\n self._rgb = light.get(CONF_DEFAULT_COLOR, COLOR_MAP.get(self._type))\n self._white_value = light.get(ATTR_WHITE_VALUE, 0)\n\n # Apply maps and calculations\n self._channel_count = CHANNEL_COUNT_MAP.get(self._type, 1)\n self._channels = [channel for channel in range(self._channel,\n self._channel +\n self._channel_count)]\n self._features = FEATURE_MAP.get(self._type)\n\n # Brightness needs to be set to the maximum default RGB level, then\n # scale up the RGB values to what HA uses\n if self._rgb:\n self._brightness = max(self._rgb)\n self._rgb = scale_rgb_to_brightness(self._rgb, self._brightness)\n\n if self._brightness >= 0 or self._white_value >= 0:\n self._state = STATE_ON\n else:\n self._state = STATE_OFF\n\n # Send default levels to the controller\n self._controller.set_channels(self._channels, self.dmx_values,\n send_immediately)", "def set_light_mode(self, is_lid):\n raise NotImplementedError()", "def setupLights(self) :\n\t\tself.ambientLight = render.attachNewNode(AmbientLight( \\\n\t\t\t\t\t\"ambientLight\"))\n\t\tself.ambientLight.node().setColor(Vec4(.8,.8,.8,1))\n\t\trender.setLight(self.ambientLight)\n\n\t\tdLight1 = DirectionalLight(\"dLight1\")\n\t\tdLight1.setColor(Vec4(6,5,7,1))\n\t\tdLight1.setDirection(Vec3(1,1,1))\n\t\tdlnp1 = render.attachNewNode(dLight1)\n\t\tdlnp1.setHpr(30,-160,0)\n\t\trender.setLight(dlnp1)\n\n\t\tdLight2 = DirectionalLight(\"dLight2\")\n\t\tdLight2.setColor(Vec4(.6,.7,1,1))\n\t\tdLight2.setDirection(Vec3(-1,-1,-1))\n\t\tself.dlnp2 = render.attachNewNode(dLight2)\n\t\tself.dlnp2.node().setScene(render)\n\t\tself.dlnp2.setHpr(-70,-60,0)\n\t\trender.setLight(self.dlnp2)", "async def light_fixture(\n hass: HomeAssistant, mock_entry: MockEntityFixture, mock_light: Light\n):\n\n # disable pydantic validation so mocking can happen\n Light.__config__.validate_assignment = False\n\n light_obj = mock_light.copy(deep=True)\n light_obj._api = mock_entry.api\n light_obj.name = \"Test Light\"\n light_obj.is_light_on = False\n\n mock_entry.api.bootstrap.lights = {\n light_obj.id: light_obj,\n }\n\n await hass.config_entries.async_setup(mock_entry.entry.entry_id)\n await hass.async_block_till_done()\n\n assert_entity_counts(hass, Platform.LIGHT, 1, 1)\n\n yield (light_obj, \"light.test_light\")\n\n Light.__config__.validate_assignment = True", "def InitLightBasic(self):\r\n\t\t\r\n\t\taLight = AmbientLight(\"AmbientLight\")\r\n\t\taLight.setColor(Vec4(0.3, 0.3, 0.3, 1))\r\n\t\trender.setLight(render.attachNewNode(aLight))\r\n\t\r\n\t\tdLight1 = DirectionalLight(\"DirectionalLight1\")\r\n\t\tdLight1.setColor(Vec4(0.65, 0.6, 0.6, 1))\t\t\r\n\t\tdLight1NP = render.attachNewNode(dLight1)\r\n\t\tdLight1NP.setHpr(100, -40, 0)\r\n\t\trender.setLight(dLight1NP)\r\n\t\r\n\t\tdLight2 = DirectionalLight(\"DirectionalLight2\")\r\n\t\tdLight2.setColor(Vec4(0.35, 0.35, 0.3, 1))\r\n\t\tdLight2NP = render.attachNewNode(dLight2)\r\n\t\tdLight2NP.setHpr(150, -60, 0)\r\n\t\trender.setLight(dLight2NP)", "def set_light_on(self):\r\n self._light = \"ON\"", "def __init__(self, envirophat, use_leds):\n self.envirophat = envirophat\n self.use_leds = use_leds\n # sensors readings\n self.light = None\n self.light_red = None\n self.light_green = None\n self.light_blue = None\n self.accelerometer_x = None\n self.accelerometer_y = None\n self.accelerometer_z = None\n self.magnetometer_x = None\n self.magnetometer_y = None\n self.magnetometer_z = None\n self.temperature = None\n self.pressure = None\n self.voltage_0 = None\n self.voltage_1 = None\n self.voltage_2 = None\n self.voltage_3 = None", "def update(self):\n try:\n if not self._light.connected:\n self._light.connect()\n # pylint: disable=invalid-name\n r, g, b, w = self._light.get_color()\n except pykulersky.PykulerskyException as exc:\n if self._available:\n _LOGGER.warning(\"Unable to connect to %s: %s\", self._light.address, exc)\n self._available = False\n return\n if not self._available:\n _LOGGER.info(\"Reconnected to %s\", self.entity_id)\n self._available = True\n\n hsv = color_util.color_RGB_to_hsv(r, g, b)\n self._hs_color = hsv[:2]\n self._brightness = int(round((hsv[2] / 100) * 255))\n self._white_value = w", "def __init__(self, *args, light_control = light_control_dummy, light_channels = [LC.GROWTH], settings, working_directory = os.getcwd(), **kwargs):\n\n # set up the camera super class\n super().__init__(light_control = light_control, working_directory = working_directory)\n\n # give the camera a unique ID per brand/kind/etc, software uses this ID to determine whether the\n # camera is calibrated or not\n self.CAM_ID = 2\n # enable update function to update gains\n self.HAS_UPDATE = True\n\n # bind the settings to the camera object\n self.settings = settings\n\n # set up the light channel array\n self.light_channels = []\n for channel in light_channels:\n if channel in self.settings.allowed_channels:\n self.light_channels.append(channel)\n\n # load config file and check if it matches the cam id, if so, assume calibrated\n try:\n self.load_config_from_file()\n if self.config[\"cam_id\"] == self.CAM_ID:\n self.CALIBRATED = True\n d_print(\"Succesfully loaded suitable camera configuration.\", 1)\n else:\n self.CALIBRATED = False\n d_print(\"Found camera configuration file, but contents are not suitable for current camera.\", 3)\n except (EnvironmentError, ValueError):\n d_print(\"No suitable camera configuration file found!\", 3)\n self.CALIBRATED = False\n\n # set multiprocessing to spawn (so NOT fork)\n try:\n mp.set_start_method('spawn')\n except RuntimeError:\n pass", "def get_light():\n return 'do some magic!'" ]
[ "0.7045976", "0.68374443", "0.67335045", "0.66650575", "0.6658996", "0.6635369", "0.65825313", "0.65682316", "0.65436727", "0.6541785", "0.6537106", "0.6503492", "0.63855004", "0.6335902", "0.63337713", "0.63275725", "0.6303935", "0.6284611", "0.6283159", "0.62752515", "0.6273373", "0.6257014", "0.62380034", "0.62325066", "0.6227442", "0.6198215", "0.6192971", "0.61888856", "0.6127808", "0.6113838" ]
0.7442126
0
Subclass this method in a platform module to configure a switch. This method should return a reference to the switch's platform interface object which will be called to access the hardware.
def configure_switch(self, number: str, config: SwitchConfig, platform_config: dict) -> "SwitchPlatformInterface": raise NotImplementedError
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def configure_switch(self, config):\n raise NotImplementedError", "def _init_hardware(self):\n return", "def setup_platform(hass, config, add_devices_callback, discovery_info=None):\n host = config.get(CONF_HOST)\n name = config.get(CONF_NAME)\n token = config.get('token')\n\n add_devices_callback([MiroboSwitch(name, host, token)])", "def port_maker(self, platform):\n raise NotImplementedError()", "def configure_hardware_sound_system(self) -> \"HardwareSoundPlatformInterface\":\n raise NotImplementedError", "def __init__(self,\n device_name,\n create_device_func,\n props,\n hub_name_prop,\n primary_port_prop,\n secondary_port_prop,\n ethernet_switch_prop,\n ethernet_port_prop,\n get_switchboard_if_initialized,\n power_and_data_share_cable=False,\n pre_off_func=None):\n super().__init__(device_name=device_name)\n\n self._create_device_func = create_device_func\n self._hub_name_prop = hub_name_prop\n self._primary_port_prop = primary_port_prop\n self._secondary_port_prop = secondary_port_prop\n self._props = props\n self._ethernet_switch = None\n\n # Set the properties\n self._get_switchboard_if_initialized = get_switchboard_if_initialized\n self._power_and_data_share_cable = power_and_data_share_cable\n self._pre_off_func = pre_off_func\n self._ethernet_switch_prop = ethernet_switch_prop\n self._ethernet_port_prop = ethernet_port_prop", "def configure_light(self, number: str, subtype: str, config: LightConfig,\n platform_settings: dict) -> \"LightPlatformInterface\":\n raise NotImplementedError", "def __init__(self, machine):\n super().__init__(machine)\n\n # Set default platform features. Each platform interface can change\n # these to notify the framework of the specific features it supports.\n self.features['has_drivers'] = True\n self.features['max_pulse'] = 255", "def __init__(self, machine):\n super().__init__(machine)\n\n # Set default platform features. Each platform interface can change\n # these to notify the framework of the specific features it supports.\n self.features['has_drivers'] = True\n self.features['max_pulse'] = 255", "def setPlatform(self):\n\t\treturn None", "def configure_driver(self, config: DriverConfig, number: str, platform_settings: dict) -> \"DriverPlatformInterface\":\n raise NotImplementedError", "def setup_platform(hass, config, add_devices_callback, discovery_info=None):\n add_devices_callback([\n HE853Switch('OviSwitch', STATE_ON),\n HE853Switch('AC', STATE_OFF)\n ])", "def create_switch():\n connection = MagicMock()\n connection.address = 'addr'\n connection.port = 'port'\n connection.protocol.version = 0x04\n switch = Switch('00:00:00:00:00:00:00:01', connection)\n switch._enabled = True\n return switch", "def configure(self):\n\n self.platform.configure()", "def _connect_to_hardware(self):\n if False: # !!!TEMP:need to validate config...\n if len(self.config['ports']) > 1:\n self.log.fatal(\"only one slave com port is supported\")\n if len(self.config['ports']) == 0:\n self.log.warning(\"no communication port setted!\")\n return\n port = self.config['ports'][0]\n self.communicator = RaspSerialCommunicator(\n platform=self, port=port,\n baud=self.config['baud'])\n self.communicator = RaspSerialCommunicator(\n platform=self, port='/dev/ttyAMA0',\n baud=115200)", "def __init__(\n self,\n netatmo_device: NetatmoDevice,\n ) -> None:\n super().__init__(netatmo_device.data_handler)\n\n self._switch = cast(NaModules.Switch, netatmo_device.device)\n\n self._id = self._switch.entity_id\n self._attr_name = self._device_name = self._switch.name\n self._model = self._switch.device_type\n self._config_url = CONF_URL_CONTROL\n\n self._home_id = self._switch.home.entity_id\n\n self._signal_name = f\"{HOME}-{self._home_id}\"\n self._publishers.extend(\n [\n {\n \"name\": HOME,\n \"home_id\": self._home_id,\n SIGNAL_NAME: self._signal_name,\n },\n ]\n )\n self._attr_unique_id = f\"{self._id}-{self._model}\"\n self._attr_is_on = self._switch.on", "def __init__(self, mb_info, switch_config):\n self.microblaze = Arduino(mb_info, ARDUINO_MAILBOX_PROGRAM)\n self.iop_switch_config = switch_config", "def setup_platform(hass, config, add_devices, discovery_info=None):\n name = config.get(CONF_NAME)\n mac = config.get(CONF_MAC)\n pin = config.get(CONF_PIN)\n\n add_devices([ProgtimeSwitch(mac, pin, name)])", "def platform_init(self):\n if isinstance(self.imu, MockImuController) or isinstance(self.pwm_controller, MockPWMController):\n print(\"Mock components detected, creating mock antenna controller\")\n platform = MockPlatformController(self.azimuth_servo, self.elevation_servo, self.imu)\n else:\n print(\"Initializing PIDAntennaController class\")\n platform = PIDPlatformController(\n self.azimuth_servo,\n self.elevation_servo,\n self.imu,\n pid_output_limits=self.pid_config.get(\"output_limits\"),\n pid_frequency=self.pid_config.get(\"period\"),\n p=self.pid_config.get(\"p\"),\n i=self.pid_config.get(\"i\"),\n d=self.pid_config.get(\"d\")\n )\n \n self.platform = platform\n\n if not isinstance(self.gps, MockGPSController):\n self.gps_update_loop = GPSLocationController(self.gps)\n self.gps_update_loop.start()\n else:\n self.gps_update_loop = None\n \n return platform", "def getPlatform(self):\n\t\treturn None", "def __init__(self, parent, endpoint):\n Wemo_Endpoint.__init__(self, parent, endpoint)\n self.device_type = self._Parent._DeviceTypes.get('wemo_switch')\n self.FEATURES.update({\n FEATURE_BRIGHTNESS: False,\n FEATURE_PERCENT: False,\n FEATURE_NUMBER_OF_STEPS: False\n })", "def __init__(self):\n self.hw = dev_hwinfo.device()\n self.ethKey=\"Ethernet\"\n self.ethAllInterfaceName=[]\n dir_path = os.path.dirname(os.path.realpath(__file__))\n self.myDefine = init_define.main()\n self.mPlatform=self.hw.getPlatform()", "def setup_platform(hass, config, add_devices, discovery_info=None) -> None:\n friendly_name = config.get(CONF_FRIENDLY_NAME)\n mac_addr = config.get(CONF_MAC)\n add_devices([Switchmate(mac_addr, friendly_name)], True)", "def setup_platform(hass, config, add_devices, discovery_info=None):\n switches = []\n for coil in config.get(\"coils\"):\n switches.append(ModbusCoilSwitch(\n coil.get(CONF_NAME),\n coil.get(CONF_SLAVE),\n coil.get(CONF_COIL)))\n add_devices(switches)", "def get_switch(self, conf, dpid):\n\t\tpass", "def setup(hass: HomeAssistant, base_config: ConfigType) -> bool: # noqa: C901\n\n hass.data[DOMAIN] = {}\n\n # Parse configuration into a dict of device name to physical address\n # represented as a list of four elements.\n device_aliases = {}\n devices = base_config[DOMAIN].get(CONF_DEVICES, {})\n _LOGGER.debug(\"Parsing config %s\", devices)\n device_aliases.update(parse_mapping(devices))\n _LOGGER.debug(\"Parsed devices: %s\", device_aliases)\n\n platform = base_config[DOMAIN].get(CONF_PLATFORM, SWITCH)\n\n loop = (\n # Create own thread if more than 1 CPU\n hass.loop\n if multiprocessing.cpu_count() < 2\n else None\n )\n host = base_config[DOMAIN].get(CONF_HOST)\n display_name = base_config[DOMAIN].get(CONF_DISPLAY_NAME, DEFAULT_DISPLAY_NAME)\n if host:\n adapter = TcpAdapter(host, name=display_name, activate_source=False)\n else:\n adapter = CecAdapter(name=display_name[:12], activate_source=False)\n hdmi_network = HDMINetwork(adapter, loop=loop)\n\n def _adapter_watchdog(now=None):\n _LOGGER.debug(\"Reached _adapter_watchdog\")\n event.call_later(hass, WATCHDOG_INTERVAL, _adapter_watchdog_job)\n if not adapter.initialized:\n _LOGGER.info(\"Adapter not initialized; Trying to restart\")\n hass.bus.fire(EVENT_HDMI_CEC_UNAVAILABLE)\n adapter.init()\n\n _adapter_watchdog_job = HassJob(_adapter_watchdog, cancel_on_shutdown=True)\n\n @callback\n def _async_initialized_callback(*_: Any):\n \"\"\"Add watchdog on initialization.\"\"\"\n return event.async_call_later(hass, WATCHDOG_INTERVAL, _adapter_watchdog_job)\n\n hdmi_network.set_initialized_callback(_async_initialized_callback)\n\n def _volume(call: ServiceCall) -> None:\n \"\"\"Increase/decrease volume and mute/unmute system.\"\"\"\n mute_key_mapping = {\n ATTR_TOGGLE: KEY_MUTE_TOGGLE,\n ATTR_ON: KEY_MUTE_ON,\n ATTR_OFF: KEY_MUTE_OFF,\n }\n for cmd, att in call.data.items():\n if cmd == CMD_UP:\n _process_volume(KEY_VOLUME_UP, att)\n elif cmd == CMD_DOWN:\n _process_volume(KEY_VOLUME_DOWN, att)\n elif cmd == CMD_MUTE:\n hdmi_network.send_command(\n KeyPressCommand(mute_key_mapping[att], dst=ADDR_AUDIOSYSTEM)\n )\n hdmi_network.send_command(KeyReleaseCommand(dst=ADDR_AUDIOSYSTEM))\n _LOGGER.info(\"Audio muted\")\n else:\n _LOGGER.warning(\"Unknown command %s\", cmd)\n\n def _process_volume(cmd, att):\n if isinstance(att, (str,)):\n att = att.strip()\n if att == CMD_PRESS:\n hdmi_network.send_command(KeyPressCommand(cmd, dst=ADDR_AUDIOSYSTEM))\n elif att == CMD_RELEASE:\n hdmi_network.send_command(KeyReleaseCommand(dst=ADDR_AUDIOSYSTEM))\n else:\n att = 1 if att == \"\" else int(att)\n for _ in range(0, att):\n hdmi_network.send_command(KeyPressCommand(cmd, dst=ADDR_AUDIOSYSTEM))\n hdmi_network.send_command(KeyReleaseCommand(dst=ADDR_AUDIOSYSTEM))\n\n def _tx(call: ServiceCall) -> None:\n \"\"\"Send CEC command.\"\"\"\n data = call.data\n if ATTR_RAW in data:\n command = CecCommand(data[ATTR_RAW])\n else:\n src = data.get(ATTR_SRC, ADDR_UNREGISTERED)\n dst = data.get(ATTR_DST, ADDR_BROADCAST)\n if ATTR_CMD in data:\n cmd = data[ATTR_CMD]\n else:\n _LOGGER.error(\"Attribute 'cmd' is missing\")\n return\n if ATTR_ATT in data:\n if isinstance(data[ATTR_ATT], (list,)):\n att = data[ATTR_ATT]\n else:\n att = reduce(lambda x, y: f\"{x}:{y:x}\", data[ATTR_ATT])\n else:\n att = \"\"\n command = CecCommand(cmd, dst, src, att)\n hdmi_network.send_command(command)\n\n def _standby(call: ServiceCall) -> None:\n hdmi_network.standby()\n\n def _power_on(call: ServiceCall) -> None:\n hdmi_network.power_on()\n\n def _select_device(call: ServiceCall) -> None:\n \"\"\"Select the active device.\"\"\"\n if not (addr := call.data[ATTR_DEVICE]):\n _LOGGER.error(\"Device not found: %s\", call.data[ATTR_DEVICE])\n return\n if addr in device_aliases:\n addr = device_aliases[addr]\n else:\n entity = hass.states.get(addr)\n _LOGGER.debug(\"Selecting entity %s\", entity)\n if entity is not None:\n addr = entity.attributes[\"physical_address\"]\n _LOGGER.debug(\"Address acquired: %s\", addr)\n if addr is None:\n _LOGGER.error(\n \"Device %s has not physical address\", call.data[ATTR_DEVICE]\n )\n return\n if not isinstance(addr, (PhysicalAddress,)):\n addr = PhysicalAddress(addr)\n hdmi_network.active_source(addr)\n _LOGGER.info(\"Selected %s (%s)\", call.data[ATTR_DEVICE], addr)\n\n def _update(call: ServiceCall) -> None:\n \"\"\"Update if device update is needed.\n\n Called by service, requests CEC network to update data.\n \"\"\"\n hdmi_network.scan()\n\n def _new_device(device):\n \"\"\"Handle new devices which are detected by HDMI network.\"\"\"\n key = f\"{DOMAIN}.{device.name}\"\n hass.data[DOMAIN][key] = device\n ent_platform = base_config[DOMAIN][CONF_TYPES].get(key, platform)\n discovery.load_platform(\n hass,\n ent_platform,\n DOMAIN,\n discovered={ATTR_NEW: [key]},\n hass_config=base_config,\n )\n\n def _shutdown(call):\n hdmi_network.stop()\n\n def _start_cec(callback_event):\n \"\"\"Register services and start HDMI network to watch for devices.\"\"\"\n hass.services.register(\n DOMAIN, SERVICE_SEND_COMMAND, _tx, SERVICE_SEND_COMMAND_SCHEMA\n )\n hass.services.register(\n DOMAIN, SERVICE_VOLUME, _volume, schema=SERVICE_VOLUME_SCHEMA\n )\n hass.services.register(\n DOMAIN,\n SERVICE_UPDATE_DEVICES,\n _update,\n schema=SERVICE_UPDATE_DEVICES_SCHEMA,\n )\n hass.services.register(DOMAIN, SERVICE_POWER_ON, _power_on)\n hass.services.register(DOMAIN, SERVICE_STANDBY, _standby)\n hass.services.register(DOMAIN, SERVICE_SELECT_DEVICE, _select_device)\n\n hdmi_network.set_new_device_callback(_new_device)\n hdmi_network.start()\n\n hass.bus.listen_once(EVENT_HOMEASSISTANT_START, _start_cec)\n hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, _shutdown)\n return True", "def connect_to_switches(self):\n for p4switch in self.topo.get_p4switches():\n thrift_port = self.topo.get_thrift_port(p4switch)\n self.controllers[p4switch] = SimpleSwitchThriftAPI(thrift_port)", "def __init__(self):\n GPIO.setwarnings(False)\n GPIO.cleanup() # Reset the high and low levels of the GPIO port\n #The following code defines the GPIO used to control the L298N chip. This definition is different for different Raspberry Pi driver boards.\n self.Motor_A_EN = 17\n self.Motor_B_EN = 4\n self.Motor_A_Pin1 = 27\n self.Motor_A_Pin2 = 18\n self.Motor_B_Pin1 = 21\n self.Motor_B_Pin2 = 26\n self.setup()", "def __init__(self, hdw=['Soundcard'], devicename='dev1'):\n self.debugFlag = False\n self.task = None # NI Task\n self.required_hardware = hdw # Require specific hardware \n self.hardware = [] # list of hardware actually found on this system\n self.find_hardware(device_info={'devicename': devicename}) # population the self.hardware list", "def setup_platform(hass, config, add_devices, discovery_info=None):\n devices = config.get(CONF_SWITCHES, {})\n cmdrgbwlight = []\n\n for object_id, device_config in devices.items():\n value_template = device_config.get(CONF_STATE_VALUE_TEMPLATE)\n\n if value_template is not None:\n value_template.hass = hass\n\n cmdrgbwlight.append(\n CommandSwitch(\n hass,\n object_id,\n device_config.get(CONF_NAME),\n device_config.get(CONF_COMMAND_ON),\n device_config.get(CONF_COMMAND_OFF),\n device_config.get(CONF_COMMAND_STATE),\n device.config.get(CONF_BRIGHTNESS_STATE),\n device.config.get(CONF_BRIGHTNESS_COMMAND),\n device.config.get(CONF_BRIGHTNESS_VALUE_TEMPLATE),\n device.config.get(CONF_RGB_STATE),\n device.config.get(CONF_RGB_COMMAND),\n device.config.get(CONF_RGB_VALUE_TEMPLATE),\n device.config.get(CONF_FRIENDLY_NAME, object_id),\n device.config.get(CONF_BRIGHTNESS_SCALE),\n value_template\n )\n )\n\n if not cmdrgbwlight:\n _LOGGER.error(\"No switches added\")\n return False\n\n add_devices(cmdrgbwlight)" ]
[ "0.6538836", "0.6523317", "0.6505792", "0.6399605", "0.63249785", "0.6282614", "0.6255226", "0.62351584", "0.62351584", "0.6210984", "0.6205586", "0.61878955", "0.6154461", "0.61467355", "0.6083941", "0.6024437", "0.6023775", "0.60223734", "0.59842956", "0.5959211", "0.5924008", "0.591562", "0.5894989", "0.58884716", "0.58763784", "0.5837445", "0.58322656", "0.5776327", "0.5765848", "0.57382655" ]
0.75649524
0
Return config section for additional switch config items.
def get_switch_config_section(cls) -> Optional[str]: return None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_switch_config_section(cls):\n return None", "def get_section(self,name):\n if self.__config.has_section(name):\n data={}\n for opt,val in self.__config.items(name):\n data[opt]=val\n return data\n else:\n raise Exception(_('EVOGTK: Section \"%s\" does not exist in this preferences instance') % name)", "def configure(self, section):", "def get_stepper_config_section(cls) -> Optional[str]:\n return None", "def get_config(self):\n config = {\n 'multichannel': self._multichannel,\n 'complex_part': self._complex_part\n }\n base_config = super().get_config()\n return {**base_config, **config}", "def get_config(self):\n return {\"name\": self.name, \"tunable\": self.tunable}", "def get_coil_config_section(cls) -> Optional[str]:\n return None", "def get_config_main_sections(self):\n self.sections_in_config = self.config_handle.sections()", "def config(self):\n return \"\\n\".join([ c.config(True) for p, c in self.configs_ ])", "def getSection(self, section, item):\n if self.config.has_section(section):\n if self.config.has_option(section, item):\n return self.config.get(section, item)\n return None", "def get_config_descr(self, name):\n return self.configs[name][1]", "def get_config_section(self, title_startswith, return_all=True):\n for section in self._config_sections:\n if section[0].startswith(title_startswith):\n if return_all:\n yield section\n else:\n return section", "def get_switch_overwrite_section(cls):\n return None", "def get_config_on_json(self):\n # load section CONFIG from data\n try:\n return self.json_data[\"CONFIG\"]\n except:\n constant.get_error(constant.ERROR_004)", "def get_coil_config_section(cls):\n return None", "def get_config(self):\n return {'reduction': self.reduction, 'name': self.name}", "def gather_configuration(self, config):\n config['log']['logging_level'] = self.logDisplay.get_logging_level()\n\n # MIDI\n config['midi']['winch_midi_input'] = self.winchMidiInputCombo.current_item()\n config['midi']['midi_output'] = self.midiOutputCombo.current_item()\n\n # OSC\n addr, port = self.oscListenerConfig.get_OSC_port()\n config['osc']['listener_addr'] = addr\n config['osc']['listener_port'] = str(port)\n addr, port = self.oscSenderConfig.get_OSC_port()\n config['osc']['sender_addr'] = addr\n config['osc']['sender_port'] = str(port)\n\n # DMX\n config['dmx']['dmx_output_serial_port'] = self.dmxSelect.current_item()\n\n # winches\n for i, winchSelect in enumerate(self.winchSelects):\n key = \"winch_%d_output_serial_port\" % (i+1)\n config['winches'][key] = winchSelect.current_item()\n\n return", "def __getitem__(self, item):\n return self._config[item]", "def subconfig(self, subsection):\n if config.is_config(self.config):\n raise PluginFeatureError(\"subconfig() incompatible with plugin.Config, \"\n \"use config.option_map()\")\n section = self.plugin_name() + '/' + subsection\n if section not in self.bot.config_root:\n self.bot.config_root[section] = {}\n return self.bot.config_root[section]", "def get_config(self, name):\n return self.configs[name][0]", "def _opt_config(self):\n return self._opt_method.config", "def get_config(self):\n return super().get_config()", "def section(self):\n return SECTION_NAME_TO_SECTION[self.section_name]", "def get_rec_config(self):\n conf_map = {}\n if len(self.reconstructions.text()) > 0:\n conf_map['reconstructions'] = str(self.reconstructions.text())\n if len(self.device.text()) > 0:\n conf_map['device'] = str(self.device.text()).replace('\\n', '')\n if len(self.alg_seq.text()) > 0:\n conf_map['algorithm_sequence'] = str(self.alg_seq.text()).replace('\\n', '')\n if len(self.beta.text()) > 0:\n conf_map['beta'] = str(self.beta.text())\n if len(self.support_area.text()) > 0:\n conf_map['support_area'] = str(self.support_area.text()).replace('\\n', '')\n if self.cont.isChecked():\n conf_map['cont'] = 'true'\n if len(self.cont_dir_button.text().strip()) > 0:\n conf_map['continue_dir'] = '\"' + str(self.cont_dir_button.text()).strip() + '\"'\n print('cont_dir', conf_map['continue_dir'])\n\n for feat_id in self.features.feature_dir:\n self.features.feature_dir[feat_id].add_config(conf_map)\n\n return conf_map", "def get_config(self):\n if self.allow_reco():\n return self.chs_config()\n else:\n return self.get_config_j(self.id)", "def get_config(self):\n\n return {section: self.sections[section].get_values() for section in self.sections}", "def config_list_options(section):\n return __CONFIG.items(section)", "def _config_sections(self):\n data = []\n section_data = []\n for index, line in enumerate(self.running_config):\n if self._nextline_startswith_space(index):\n section_data.append(line)\n else:\n if len(section_data) > 0:\n section_data.append(line)\n data.append(section_data)\n section_data = []\n return data", "def get(self, name, section=section_default):\n return self.config[section][name]", "def get_architecture_config_section(architecture_name: str) -> BaseArchitectureConfigSection:\n architecture_module = _import_architecture_module(architecture_name)\n return architecture_module.ArchitectureConfigSection()" ]
[ "0.7165999", "0.6003366", "0.5735365", "0.5721832", "0.5691188", "0.569049", "0.56896424", "0.5651286", "0.5600963", "0.5576065", "0.5570528", "0.55503464", "0.5539065", "0.5482666", "0.5422852", "0.5421628", "0.54125065", "0.53546286", "0.53522193", "0.53439945", "0.53227085", "0.53196234", "0.52948934", "0.5271917", "0.5256359", "0.5254117", "0.52536285", "0.524673", "0.52359176", "0.523084" ]
0.70081085
1
Validate a switch config for platform.
def validate_switch_section(self, switch: "Switch", config: dict) -> dict: if self.get_switch_config_section(): spec = self.get_switch_config_section() # pylint: disable-msg=assignment-from-none config = switch.machine.config_validator.validate_config(spec, config, switch.name) elif config: raise AssertionError("No platform_config supported but not empty {} for switch {}". format(config, switch.name)) return config
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def validate_switch_section(self, switch: Switch, config: dict) -> dict:\n base_spec = [\"device\"]\n if self.__class__.get_switch_config_section():\n base_spec.append(self.__class__.get_switch_config_section())\n switch.machine.config_validator.validate_config(\n \"switches\", config, switch.name,\n base_spec=base_spec)\n return config", "def _validate_config(self):\n pass", "def validate_config(self):\n pass", "def validate_config(self):\n pass", "def command_validate_switch():\n\n def duplicate_port(entry, name):\n dpid = entry['dpid']\n\n print 'Warning: switch %s duplicate interface names: %s' % (dpid, name)\n if bigsh.debug_backtrace:\n for port in entry['ports']:\n if port['name'] == name:\n print 'SWTICH %s:%s PORT %s' % (entry, name, port)\n\n def not_case_sensitive(entry, name):\n dpid = entry['dpid']\n\n ports = {}\n for port in entry['ports']:\n if port['name'].lower() == name:\n ports[port['name']] = port\n\n print 'Warning: switch %s case insentive interface names: %s' % \\\n (dpid, ' - '.join(ports.keys()))\n if bigsh.debug_backtrace:\n for port in ports:\n print 'SWTICH %s PORT %s' % (dpid, port)\n\n bigdb = bigsh.bigdb\n try:\n (schema, entries) = bigdb.schema_and_result('core/switch', {})\n except Exception, e:\n print 'command_validate_switch:', e\n traceback.print_exc()\n return\n\n if entries:\n for entry in entries.iter():\n dpid = entry['dpid']\n\n # verify that the port names are unique even when case\n # sensitive\n all_names = [p['name'] for p in entry['interface']]\n one_case_names = utif.unique_list_from_list([x.lower() for x in all_names])\n if len(all_names) != len(one_case_names):\n # Something is rotten, find out what.\n for (i, port_name) in enumerate(all_names):\n # use enumerate to drive upper-triangle comparison\n for other_name in all_names[i+1:]:\n if port_name == other_name:\n duplicate_port(entry, port_name)\n elif port_name.lower() == other_name.lower():\n not_case_sensitive(entry, port_name)", "def validate_config(self):\n\n # LOCALHOST\n if self.location == 'localhost':\n if 'browserName' not in self.config.keys():\n msg = \"Add the 'browserName' in your local_config: e.g.: 'Firefox', 'Chrome', 'Safari'\" # noqa\n self.runner.critical_log(msg)\n raise BromeBrowserConfigException(msg)\n\n # EC2\n elif self.location == 'ec2':\n self.validate_ec2_browser_config()\n\n # VIRTUALBOX\n elif self.location == 'virtualbox':\n self.validate_virtualbox_config()", "def validate_config(self, config: Dict) -> bool:\n raise NotImplementedError", "def check_config(config):\n pass", "def validate_config(self, changed):\n logger.debug(\"[%s] Validating config (Legacy path)\", self.name)\n if not self.to_validate(changed):\n return\n # Validate (Legacy Path)\n from noc.cm.engine import Engine\n\n engine = Engine(self)\n try:\n engine.check()\n except: # noqa\n logger.error(\"Failed to validate config for %s\", self.name)\n error_report()", "def state_failsafe_validate(cfg, app, win, events):", "def _check_config(self):", "def validate_config(params, error_callback):\n local_params = dict(params)\n _validate_value_formats(local_params, error_callback)\n _validate_in_cidr(local_params, error_callback)\n _validate_dhcp_range(local_params, error_callback)\n _validate_inspection_range(local_params, error_callback)\n _validate_no_overlap(local_params, error_callback)\n _validate_ips(local_params, error_callback)\n _validate_interface_exists(local_params, error_callback)", "def state_chosen_validate(cfg, app, win, events):", "def validate_settings(_cfg, _ctx):\n pass", "def test_valid_configuration(self):\n\n conf = [\n 'gasoline', '228i', 'model_luxury_line', 'silver', 'rims_384',\n 'tapistry_black', 'steptronic', 'smoker_package', 'tow_hook'\n ]\n\n attr_val_ids = self.get_attr_val_ids(conf)\n validation = self.cfg_tmpl.validate_configuration(attr_val_ids)\n self.assertTrue(validation, \"Valid configuration failed validation\")", "def validate_config(config: NeedlemanWunschAlgorithmConfig):\n\n parameters_names_list = [\"SAME\", \"DIFF\", \"GAP_PENALTY\", \"MAX_NUMBER_PATHS\", \"MAX_SEQ_LENGTH\"]\n\n for param_name in parameters_names_list:\n if not isinstance(config[param_name], int):\n return False, f\"Parameter {param_name} is not int!\"\n \n for param_name in parameters_names_list[0:3]:\n if config[param_name] == 0:\n return False, f\"Parameter {param_name} can not be equal to 0!\"\n\n for param_name in parameters_names_list[3:]:\n if config[param_name] < 1:\n return False, f\"Parameter {param_name} can not be less than 1!\"\n\n if config.SAME <= config.DIFF:\n return False, f\"Parameter SAME must be greater than parameter DIFF!\"\n\n if config.MAX_SEQ_LENGTH > constants.MAXIMUM_SEQ_LEN:\n return False, f\"Value of parameter MAX_SEQ_LENGTH is too big. It should be less than {constants.MAXIMUM_SEQ_LEN}\"\n\n if config.MAX_NUMBER_PATHS > constants.MAXIMUM_NUMBER_PATHS:\n return False, f\"Value of parameter MAX_NUMBER_PATHS is too big. It should be less than {constants.MAXIMUM_NUMBER_PATHS}\"\n\n return True, \"\"", "def validate_config(app: App, config: Config):\n for state_machine in config.state_machines.values():\n _validate_state_machine(app, state_machine)", "def validate(config):\n valid, error = validate_relays(config['relays'])\n if not valid:\n click.echo(\"Error: Configuration invalid: {}\".format(error))\n else:\n click.echo(\"OK: Configuration is valid.\")", "def _validatePortConfig(self):\n if config.BindHTTPPorts:\n if config.HTTPPort == 0:\n raise UsageError(\n \"HTTPPort required if BindHTTPPorts is not empty\"\n )\n elif config.HTTPPort != 0:\n config.BindHTTPPorts = [config.HTTPPort]\n if config.BindSSLPorts:\n if config.SSLPort == 0:\n raise UsageError(\n \"SSLPort required if BindSSLPorts is not empty\"\n )\n elif config.SSLPort != 0:\n config.BindSSLPorts = [config.SSLPort]", "def _verify_switch_created(self, switch):\n if not (\n hasattr(switch, \"switch_power\") and\n isinstance(switch.switch_power, switch_power_base.SwitchPowerBase)):\n raise errors.CapabilityNotReadyError(\n msg=\"'switch_power' capability is missing in hub device {} ({}),\"\n \" or is not an instance of SwitchPowerBase\".format(\n self.hub_name,\n type(switch).__name__),\n device_name=self._device_name)", "def state_choose_validate(cfg, app, win, events):", "def config_validate(ctx, **kwargs):\n # Validates pf9-express config file and obtains Auth Token\n #Load Active Config into ctx\n GetConfig(ctx).GetActiveConfig()\n #Get Token\n token = GetToken().get_token_v3(\n ctx.params[\"du_url\"],\n ctx.params[\"du_username\"],\n ctx.params[\"du_password\"],\n ctx.params[\"du_tenant\"] )\n if token is not None:\n click.echo('Config Validated!')\n click.echo('Token: %s' % token)\n else:\n click.echo('Config Validation Failed!')", "def check_config_mode(self, check_string=\")#\", pattern=\"\"):\n return super().check_config_mode(check_string=check_string)", "def config_sanity_check(config: dict) -> dict:\n\n # back compatibility support\n config = parse_v011(config)\n\n # check model\n if config[\"train\"][\"method\"] == \"conditional\":\n if config[\"dataset\"][\"train\"][\"labeled\"] is False: # unlabeled\n raise ValueError(\n \"For conditional model, data have to be labeled, got unlabeled data.\"\n )\n\n return config", "def validate(self):\n\n print(\"Checking for supported board.\")\n if self.board == \"\": \n sys.exit(\"Unknown board type. Exiting.\")\n\n supportedboards = supportedBoards()\n\n if not self.board in supportedboards:\n sys.exit(\"Board %s is not supported.\" % self.board)\n return False\n\n if not self.getpath(): \n sys.exit(\"%s unable to find binary file to upload in \\\n specified path or current working directory %s. \\\n Exiting now.\" % (errstr, str(array[0])))\n\n array = self.getfiletype()\n if not (array[0] or array[1]):\n return False\n\n self.arch = array[0]\n self.filetype = array[1]\n return True", "def configure_switch(self, number: str, config: SwitchConfig, platform_config: dict) -> \"SwitchPlatformInterface\":\n raise NotImplementedError", "def validate(self):\n AcceleratorType.validate(self.accelerator_type)\n gcp.validate_machine_configuration(self.cpu_cores,\n self.memory,\n self.accelerator_type,\n self.accelerator_count)", "def check_config(cfg):", "def check_config_mode(self, check_string=\"(config\", pattern=\"\"):\n return super().check_config_mode(check_string=check_string, pattern=pattern)", "def validate_coil_section(self, driver, config) -> dict:\n if self.get_coil_config_section():\n spec = self.get_coil_config_section() # pylint: disable-msg=assignment-from-none\n config = driver.machine.config_validator.validate_config(spec, config, driver.name)\n elif config:\n raise AssertionError(\"No platform_config supported but not empty {} for driver {}\".\n format(config, driver.name))\n\n return config" ]
[ "0.7353518", "0.6292539", "0.62700325", "0.62700325", "0.6195892", "0.60210854", "0.59656155", "0.57471776", "0.5687998", "0.56748194", "0.5667317", "0.56560016", "0.5586307", "0.55708754", "0.55694824", "0.5551827", "0.5543095", "0.5540105", "0.5536376", "0.5514801", "0.5511961", "0.5496445", "0.54927295", "0.5489427", "0.5485643", "0.5470686", "0.5459893", "0.54567796", "0.5435321", "0.5416106" ]
0.77033305
0
Get all hardware switch states. Subclass this method in a platform module to return the hardware states of all the switches on that platform. of a switch. This method should return a dict with the switch numbers as keys and the hardware state of the switches as values. (0 = inactive, 1 = active) This method should not compensate for NO or NC status, rather, it should return the raw hardware states of the switches.
async def get_hw_switch_states(self) -> Dict[str, bool]: raise NotImplementedError
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_hw_switch_states(self):\n hw_states = dict()\n #k = self._kp.keypad()\n k = \"\"\n for number, sw in self.switches.items():\n if number == k:\n hw_states[number] = 1\n else:\n hw_states[number] = 0\n return hw_states", "def get_hw_switch_states(self):\n raise NotImplementedError", "def switches(self):\n return {k:v for k, v in self._data.items() \n if v[\"type\"] == \"SWITCH\"}", "async def get_switches(self):\n return await self.get_states_by_tag_prefix(\"led\")", "def switches(self) -> List[dict]:\n return self.items_by_domain(\"switch\")", "def list_switches(self):\n return [x for x,y in self.devices.items() if y.device_type == \"Switch\"]", "def get_switch_states(self):\n switches_states = []\n for connection in self.connections:\n if connection.start.is_switch_output():\n switches_states.append((connection.start.switch,\n connection.start.output_nr))\n if connection.end.is_switch_output():\n switches_states.append((connection.end.switch,\n connection.end.output_nr))\n return switches_states", "def get_switch_map(self, do_sort=True):\n switch_map = []\n for switch in self.machine.switches.values():\n switch_map.append(SwitchMap(switch.hw_switch.get_board_name(), switch))\n\n # sort by board + driver number\n if do_sort:\n switch_map.sort(key=lambda x: (self._natural_key_sort(x[0]),\n self._natural_key_sort(str(x[1].hw_switch.number))))\n return switch_map", "def getSwitchInfo():\n swDB = switchdb.DB()\n raw_info = swDB.getAllSummary()\n switchList = []\n for row in raw_info:\n row = list(row)\n switch = {}\n switch[\"name\"] = row[0]\n switch[\"serial\"] = row[1]\n switch[\"swver\"] = row[2]\n switch[\"ip\"] = row[3]\n switch[\"check\"] = row[4]\n switch[\"total\"] = row[5]\n switch[\"up\"] = row[6]\n switch[\"down\"] = row[7]\n switch[\"disabled\"] = row[8]\n if switch[\"total\"] == 0:\n switch[\"capacity\"] = 0\n else:\n switch[\"capacity\"] = (switch[\"up\"] / switch[\"total\"]) * 100\n switchList.append(switch)\n swDB.close()\n return switchList", "def check_switching_action(self):\n current_switching = {}\n for devices in self.switching_systems:\n command = self.build_command(devices, \"get_closed_channels\")\n switching = str(self.vcw.query(devices, command)).strip()\n switching = self.pick_switch_response(devices, switching)\n current_switching.update({devices[\"Device_name\"]: switching})\n self.settings[\"settings\"][\"current_switching\"][\n devices[\"Device_name\"]\n ] = current_switching\n return current_switching", "def get_all_switches(name):\n return [False,False,False,False] #TODO Implement", "def get_switches(self) -> tuple:\n return self.switches", "def get_all_switch(self, conf):\n\t\tpass", "def get_switch_state(self, path, params):\n switch = params.get('switch')\n port = params.get('port')\n host = self._extract_url_base(path)\n reply = self._faucet_collector.get_switch_state(switch, port, host)\n self._augment_state_reply(reply, path)\n return reply", "def get_power_state(self):\n\n doc = self.client.enumerate(uris.CIM_ComputerSystem)\n\n enabled_state = doc.find(\n './/s:Body/wsen:EnumerateResponse/wsman:Items/wsinst:CIM_HostComputerSystem/wsinst:EnabledState', wsman.NS_MAP_COMPUTER_SYSTEM)\n return constants._get_enabled_state(enabled_state.text)", "def get_of_switches(self):\n try:\n of_response = requests.get(self.url + \"restconf/operational/opendaylight-inventory:nodes\",\n headers=self.headers)\n error_text = \"Openflow response {}: {}\".format(of_response.status_code, of_response.text)\n if of_response.status_code != 200:\n self.logger.warning(\"get_of_switches \" + error_text)\n raise OpenflowConnUnexpectedResponse(\"Error get_of_switches \" + error_text)\n\n self.logger.debug(\"get_of_switches \" + error_text)\n info = of_response.json()\n\n if not isinstance(info, dict):\n self.logger.error(\"get_of_switches. Unexpected response, not a dict: %s\", str(info))\n raise OpenflowConnUnexpectedResponse(\"Unexpected response, not a dict. Wrong version?\")\n\n nodes = info.get('nodes')\n if type(nodes) is not dict:\n self.logger.error(\"get_of_switches. Unexpected response at 'nodes', not found or not a dict: %s\",\n str(type(info)))\n raise OpenflowConnUnexpectedResponse(\"Unexpected response at 'nodes', not found or not a dict.\"\n \" Wrong version?\")\n\n node_list = nodes.get('node')\n if type(node_list) is not list:\n self.logger.error(\"get_of_switches. Unexpected response, at 'nodes':'node', \"\n \"not found or not a list: %s\", str(type(node_list)))\n raise OpenflowConnUnexpectedResponse(\"Unexpected response, at 'nodes':'node', not found \"\n \"or not a list. Wrong version?\")\n\n switch_list = []\n for node in node_list:\n node_id = node.get('id')\n if node_id is None:\n self.logger.error(\"get_of_switches. Unexpected response at 'nodes':'node'[]:'id', not found: %s\",\n str(node))\n raise OpenflowConnUnexpectedResponse(\"Unexpected response at 'nodes':'node'[]:'id', not found. \"\n \"Wrong version?\")\n\n if node_id == 'controller-config':\n continue\n\n node_ip_address = node.get('flow-node-inventory:ip-address')\n if node_ip_address is None:\n self.logger.error(\"get_of_switches. Unexpected response at 'nodes':'node'[]:'flow-node-inventory:\"\n \"ip-address', not found: %s\", str(node))\n raise OpenflowConnUnexpectedResponse(\"Unexpected response at 'nodes':'node'[]:\"\n \"'flow-node-inventory:ip-address', not found. Wrong version?\")\n\n node_id_hex = hex(int(node_id.split(':')[1])).split('x')[1].zfill(16)\n switch_list.append((':'.join(a+b for a, b in zip(node_id_hex[::2], node_id_hex[1::2])),\n node_ip_address))\n return switch_list\n\n except requests.exceptions.RequestException as e:\n error_text = type(e).__name__ + \": \" + str(e)\n self.logger.error(\"get_of_switches \" + error_text)\n raise OpenflowConnConnectionException(error_text)\n except ValueError as e:\n # ValueError in the case that JSON can not be decoded\n error_text = type(e).__name__ + \": \" + str(e)\n self.logger.error(\"get_of_switches \" + error_text)\n raise OpenflowConnUnexpectedResponse(error_text)", "def get_switch_ids():\n\n device_id_list = []\n url = 'https://' + APIC_EM + '/network-device'\n header = {'accept': 'application/json', 'X-Auth-Token': APIC_EM_TICKET}\n device_response = requests.get(url, headers=header, verify=False)\n device_json = device_response.json()\n device_info = device_json['response']\n for items in device_info:\n if items.get('family') == 'Switches and Hubs':\n device_id = items.get('id')\n device_id_list.append(device_id)\n return device_id_list", "def load_switches(self):\n new_switches = list()\n for site in self.sites:\n switches = self.get_switches_stats(site_id=site['id'])\n for switch in switches:\n if len(switch['name']) < 1:\n switch['name'] = ':'.join([switch['mac'][i:i + 2].upper() for i in range(0, len(switch['mac']), 2)])\n new_switch = {\n \"name\": switch['name'],\n \"site\": site['name'],\n \"site_id\": site['id'],\n \"device_id\": switch['id'],\n \"mac\": switch['mac'],\n \"mac_str\": ':'.join([switch['mac'][i:i + 2].upper() for i in range(0, len(switch['mac']), 2)]),\n \"ip_config\": switch['ip_config'],\n \"ip_actual\": switch['ip_stat'],\n \"net_obj\": get_network(address=switch['ip_config']['ip'], netmask=switch['ip_config']['netmask']) if 'ip' in switch['ip_config'] else None\n }\n for vlan, addr in new_switch['ip_actual']['ips'].items():\n if new_switch['ip_actual']['ip'] == addr:\n new_switch['ip_actual']['vlan'] = vlan.strip('vlan')\n else:\n new_switch['ip_actual']['vlan'] = 0\n if new_switch['ip_config']['network'] and new_switch['ip_config']['network'] != \"default\":\n new_switch['ip_config']['vlan'] = site['network_template']['networks'][new_switch['ip_config']['network']]['vlan_id']\n logger.debug(f\"Matched {new_switch['name']} management network '{new_switch['ip_config']['network']}' to VLAN {new_switch['ip_config']['vlan']}\")\n elif new_switch['ip_config']['network'] and new_switch['ip_config']['network'] == \"default\":\n new_switch['ip_config']['vlan'] = 1\n logger.debug(f\"Matched {new_switch['name']} management network '{new_switch['ip_config']['network']}' to VLAN {new_switch['ip_config']['vlan']}\")\n else:\n new_switch['ip_config']['vlan'] = 0\n logger.error(f\"Did not match {new_switch['name']} management network '{new_switch['ip_config']['network']}' to VLAN {new_switch['ip_config']['vlan']}\")\n new_switches.append(new_switch)\n self.switches = new_switches", "def getSwitchDetail(serial):\n swDB = switchdb.DB()\n raw_info = swDB.getSwitchDetail(serial)\n switch = {}\n for row in raw_info:\n switch[\"name\"] = row[0]\n switch[\"serial\"] = row[1]\n switch[\"model\"] = row[2]\n switch[\"swver\"] = row[3]\n switch[\"ip\"] = row[4]\n switch[\"check\"] = row[5]\n switch[\"total\"] = row[6]\n switch[\"up\"] = row[7]\n switch[\"down\"] = row[8]\n switch[\"disabled\"] = row[9]\n switch[\"int10m\"] = row[10]\n switch[\"int100m\"] = row[11]\n switch[\"int1g\"] = row[12]\n switch[\"int10g\"] = row[13]\n switch[\"int25g\"] = row[14]\n switch[\"int40g\"] = row[15]\n switch[\"int100g\"] = row[16]\n switch[\"copper\"] = row[17]\n switch[\"sfp\"] = row[18]\n switch[\"virtual\"] = row[19]\n if switch[\"total\"] == 0:\n switch[\"capacity\"] = 0\n else:\n switch[\"capacity\"] = int((switch[\"up\"] / switch[\"total\"]) * 100)\n swDB.close()\n return switch", "def get_switching_options(self):\n\n return self._switch_opt_infos.iterkeys()", "def get_switches_stats(self, site_id: str) -> List:\n try:\n stats = self.api.get(host=self.host, endpoint=f\"/api/v1/sites/{site_id}/stats/devices?type=switch\")\n except Exception as e:\n logger.error(f\"{TextColors.FAIL}Error getting switch stats:{TextColors.ENDC} {e}\")\n raise e\n return stats", "def get_system_state(self):\n byte = self.system_state\n return {\n 'chksum': bool(byte & (1 << 6)),\n 'ack': bool(byte & (1 << 4)),\n 'FPGAboot': bool(byte & (1 << 2)),\n 'FPGArun': bool(byte & (1 << 1)),\n 'FPGAcom': bool(byte & (1 << 0)),\n }", "def get_switch_port_map(self,switch_name):\n\n # Now do a sort and return a map having port nos & connected devices\n myswitch_pmap = []\n self.sw_port_mapping[switch_name].sort()\n idx = 1\n for swname in self.sw_port_mapping[switch_name]:\n myswitch_pmap.append( (idx, swname) )\n idx = idx + 1\n return myswitch_pmap", "def get_switch_port_mapping(self,switch_name):\n switch_list = []\n switch_list = self.__graph_dict[switch_name]\n return switch_list", "def GetKillSwitchEnabled(self):\n status = self.wifi.GetKillSwitchStatus()\n return status", "def get_health_dashboard(self):\n result = {}\n fabric_switches_dns, fabric_switches_rns = self.get_fabric_switches()\n for fabric_switch in fabric_switches_rns:\n result[fabric_switch] = {}\n # Switch health\n Health_Inst_mo = self.moDir.lookupByDn('topology/pod-1/' + fabric_switch + '/sys/health')\n result[fabric_switch]['Health'] = Health_Inst_mo.cur\n\n # Switch Policy CAM table\n cam_usage_mo = self.moDir.lookupByDn('topology/pod-1/' + str(fabric_switch) +\n '/sys/eqptcapacity/CDeqptcapacityPolUsage5min')\n result[fabric_switch]['Policy CAM table'] = cam_usage_mo.polUsageCum + ' of ' + cam_usage_mo.polUsageCapCum\n\n # Switch MAC table\n multicast_usage_mo = self.moDir.lookupByDn('topology/pod-1/' + str(fabric_switch) +\n '/sys/eqptcapacity/CDeqptcapacityMcastUsage5min')\n result[fabric_switch]['Multicast'] = multicast_usage_mo.localEpCum + ' of ' + multicast_usage_mo.localEpCapCum\n\n # VLAN\n vlan_usage_mo = self.moDir.lookupByDn('topology/pod-1/' + str(fabric_switch) +\n '/sys/eqptcapacity/CDeqptcapacityVlanUsage5min')\n result[fabric_switch]['VLAN'] = vlan_usage_mo.totalCum + ' of ' + vlan_usage_mo.totalCapCum\n return result", "def _get_system_hardware(self):\n return self._get_system_status()[\"hardware\"]", "def get_logical_switch(cls, client_object, get_logical_switch=None):\n _ = get_logical_switch\n header_keys = ['VNI', 'Controller IP Address', 'Link Status']\n attribute_map = {'vni': 'switch_vni',\n 'controller ip address': 'controller_ip',\n 'link status': 'controller_status'}\n nsxa_socket = cls._get_nsxa_socket(client_object)\n cmd = ('%s -t %s vni/list ' % (cls.CLI, nsxa_socket))\n out = client_object.connection.request(cmd).response_data\n horizontal_parser = horizontal_table_parser.HorizontalTableParser()\n switch_dicts = horizontal_parser.get_parsed_data(\n out, header_keys=header_keys)['table']\n for switch_dict in switch_dicts:\n replication_mode = cls.get_replication_mode(\n client_object, switch_vni=switch_dict['vni'])\n switch_dict['replication_mode'] = replication_mode\n for dict_key in switch_dict.keys():\n switch_dict[dict_key] = switch_dict[dict_key].lower()\n mapped_pydict = utilities.map_attributes(\n attribute_map, {'table': switch_dicts})\n return logical_switch_schema.LogicalSwitchSchema(py_dict=mapped_pydict)", "def get_switch(self, *labels):\n if len(labels) == 1 and not isinstance(labels[0], str):\n try:\n labels = list(labels[0])\n except TypeError:\n pass\n results = {}\n for label in labels:\n if isinstance(label, (ArduinoSwitchControlSwitch,\n ArduinoSwitchControlConnector)):\n label = label.label\n if label in self.switches:\n par = self.parameters[f'switch_{label}_mode']\n elif label in self.inputs:\n par = self.parameters[f'route_{label}_mode']\n elif label.startswith('switch_'):\n if label[7:] not in [str(lab) for lab in self.switches]:\n raise SwitchError(f\"No switch with label {label[7:]}\")\n par = self.parameters[f'{label}_mode']\n elif label.startswith('route_'):\n if label[6:] not in [str(lab) for lab in self.inputs]:\n raise ConnectorError(f\"No input with label {label[6:]}\")\n if f'{label}_mode' not in self.parameters:\n raise RouteError(f\"No route starting at input {label[6:]}\")\n par = self.parameters[f'{label}_mode']\n else:\n raise Exception(f\"parameter label {label} not recognized.\")\n\n results[label] = par()\n return results", "def _get_hardware_info(self) -> list:\n model = ctypes.create_string_buffer(8)\n model_size = ctypes.c_ulong(8)\n type_num = ctypes.c_ushort()\n channel_num = ctypes.c_ushort()\n notes = ctypes.create_string_buffer(48)\n notes_size = ctypes.c_ulong(48)\n firmware_version = ctypes.c_ulong()\n hardware_version = ctypes.c_ushort()\n modification_state = ctypes.c_ushort()\n\n ret = self._dll.LS_GetHardwareInfo(\n self._serial_number,\n ctypes.byref(model), model_size,\n ctypes.byref(type_num), ctypes.byref(channel_num),\n ctypes.byref(notes), notes_size, ctypes.byref(firmware_version),\n ctypes.byref(hardware_version), ctypes.byref(modification_state)\n )\n\n self._check_error(ret)\n return [model.value, type_num.value, channel_num.value,\n notes.value, firmware_version.value, hardware_version.value,\n modification_state.value]" ]
[ "0.8233298", "0.8158193", "0.6846591", "0.68295014", "0.6802005", "0.6770927", "0.64101845", "0.62502646", "0.6102917", "0.5934764", "0.58256066", "0.5736322", "0.567661", "0.5668026", "0.56561065", "0.5612815", "0.559791", "0.5555421", "0.5537869", "0.55164707", "0.5462406", "0.54437745", "0.54188085", "0.5414023", "0.54122674", "0.5375886", "0.53722036", "0.53528357", "0.5343803", "0.5329285" ]
0.8395653
0
Subclass this method in a platform module to configure a driver. This method should return a reference to the driver's platform interface object which will be called to access the hardware.
def configure_driver(self, config: DriverConfig, number: str, platform_settings: dict) -> "DriverPlatformInterface": raise NotImplementedError
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def configure(self):\n\n self.platform.configure()", "def configure_hardware_sound_system(self) -> \"HardwareSoundPlatformInterface\":\n raise NotImplementedError", "def _init_hardware(self):\n return", "def connect(self):\n\n log.info('Connecting to device \"{0}\" using {1} at \"{2}\".'.format(\n self.name, self.driver, self.connection_resource))\n\n if self.driver == drivers.pyvisa:\n try:\n if not (legacyVisa):\n rm = pyvisa.ResourceManager()\n self.device = rm.open_resource(**self.connection_resource)\n else:\n self.device = pyvisa.Instrument(**self.connection_resource)\n except pyvisa.VisaIOError as e:\n raise DeviceNotFoundError(\n 'Could not open device at \"{0}\".'.format(self.connection_resource), e)\n\n elif self.driver == drivers.telnet:\n self.device = telnetlib.Telnet(\n timeout=2, **self.connection_resource)\n elif self.driver == drivers.requests:\n r = requests.get(self.request_address)\n if r.status_code != 200:\n raise DeviceNotFoundError(\n 'Could not connect to device at \"{0}\".'.format(self.connection_resource), e)\n\n elif self.driver == drivers.lgpib:\n try:\n self.device = Gpib.Gpib(**self.connection_resource)\n except gpib.GpibError as e:\n raise DeviceNotFoundError(\n 'Could not open device at \"{0}\".'.format(self.connection_resource), e)\n elif self.driver == drivers.pyvisa_usb:\n try:\n if not (legacyVisa):\n rm = pyvisa.ResourceManager()\n self.device = rm.open_resource(**self.connection_resource)\n else:\n class USBDevice(pyvisa.Instrument):\n \"\"\"\n Using USB devices with PyVISA requires a small hack: the object must be an Instrument, but we can't call Instrument.__init__.\n \"\"\"\n\n def __init__(self, *args, **kwargs):\n # Bypass the initialization in visa.Instrument, due to \"send_end\" not being valid for USB.\n pyvisa.ResourceTemplate.__init__(\n self, *args, **kwargs)\n\n self.device = USBDevice(**self.connection_resource)\n\n except pyvisa.VisaIOError as e:\n raise DeviceNotFoundError(\n 'Could not open device at \"{0}\".'.format(self.connection_resource), e)\n\n try:\n self._connected()\n except Exception as e:\n raise DeviceNotFoundError('Could not finish connection to device at \"{0}\".'.format(\n self.connection_resource), e)", "def platform_init(self):\n if isinstance(self.imu, MockImuController) or isinstance(self.pwm_controller, MockPWMController):\n print(\"Mock components detected, creating mock antenna controller\")\n platform = MockPlatformController(self.azimuth_servo, self.elevation_servo, self.imu)\n else:\n print(\"Initializing PIDAntennaController class\")\n platform = PIDPlatformController(\n self.azimuth_servo,\n self.elevation_servo,\n self.imu,\n pid_output_limits=self.pid_config.get(\"output_limits\"),\n pid_frequency=self.pid_config.get(\"period\"),\n p=self.pid_config.get(\"p\"),\n i=self.pid_config.get(\"i\"),\n d=self.pid_config.get(\"d\")\n )\n \n self.platform = platform\n\n if not isinstance(self.gps, MockGPSController):\n self.gps_update_loop = GPSLocationController(self.gps)\n self.gps_update_loop.start()\n else:\n self.gps_update_loop = None\n \n return platform", "def configure_driver(self, config):\n raise NotImplementedError", "def setPlatform(self):\n\t\treturn None", "def _configure_device():\n vendor_id = 0x04D8 # These ids are microchip's libusb based device\n product_id = 0x0204 # ids\n dev = usb.core.find(idVendor=vendor_id, idProduct = product_id)\n try:\n dev.set_configuration()\n return dev\n except:\n return None", "def getPlatform(self):\n\t\treturn None", "def _setup_io_devices(self) -> None:\n # Add PCI\n self.platform.pci_host.pio = self.iobus.mem_side_ports\n\n # Add Ethernet card\n self.ethernet = IGbE_e1000(\n pci_bus=0, pci_dev=0, pci_func=0, InterruptLine=1, InterruptPin=1\n )\n\n self.ethernet.host = self.platform.pci_host\n self.ethernet.pio = self.iobus.mem_side_ports\n self.ethernet.dma = self.iobus.cpu_side_ports\n\n if self.get_cache_hierarchy().is_ruby():\n for device in self._off_chip_devices + self._on_chip_devices:\n device.pio = self.iobus.mem_side_ports\n\n else:\n for device in self._off_chip_devices:\n device.pio = self.iobus.mem_side_ports\n for device in self._on_chip_devices:\n device.pio = self.get_cache_hierarchy().get_mem_side_port()\n\n self.bridge = Bridge(delay=\"10ns\")\n self.bridge.mem_side_port = self.iobus.cpu_side_ports\n self.bridge.cpu_side_port = (\n self.get_cache_hierarchy().get_mem_side_port()\n )\n self.bridge.ranges = [\n AddrRange(dev.pio_addr, size=dev.pio_size)\n for dev in self._off_chip_devices\n ]\n\n # PCI\n self.bridge.ranges.append(AddrRange(0x2F000000, size=\"16MB\"))\n self.bridge.ranges.append(AddrRange(0x30000000, size=\"256MB\"))\n self.bridge.ranges.append(AddrRange(0x40000000, size=\"512MB\"))", "def _connect_to_hardware(self):\n if False: # !!!TEMP:need to validate config...\n if len(self.config['ports']) > 1:\n self.log.fatal(\"only one slave com port is supported\")\n if len(self.config['ports']) == 0:\n self.log.warning(\"no communication port setted!\")\n return\n port = self.config['ports'][0]\n self.communicator = RaspSerialCommunicator(\n platform=self, port=port,\n baud=self.config['baud'])\n self.communicator = RaspSerialCommunicator(\n platform=self, port='/dev/ttyAMA0',\n baud=115200)", "def port_maker(self, platform):\n raise NotImplementedError()", "def configure_dmd(self) -> \"DmdPlatformInterface\":\n raise NotImplementedError", "def driver(self):\n \n return self.__driver", "def doInitializeDevice(self):\n try:\n\n if self.serialNumber == \"*\" or self.serialNumber == \".*\":\n self.device = OISpectrometer.matchUniqueUSBDevice( idProduct=self.idProduct)\n else:\n self.device = OISpectrometer.matchUniqueUSBDevice( idProduct=self.idProduct,\n serialNumber=self.serialNumber)\n\n \"\"\" Below are all the USB protocol details. This requires reading\n the USB documentation, the Spectrometer documentation and many other \n details. What follows may sound like gibberish.\n\n There is a single USB Configuration (default) with a single USB Interface \n without alternate settings, so we can use (0,0).\n \"\"\"\n self.device.set_configuration()\n self.configuration = self.device.get_active_configuration()\n self.interface = self.configuration[(0,0)]\n\n \"\"\"\n We are working on the reasonable assumption from the documentation\n that the first input and output endpoints are the main endpoints and the\n second input is the data endpoint. If that is not the case, the subclass can\n simply reassign the endpoints properly in its __init__ function. \n \"\"\"\n for endpoint in self.interface:\n \"\"\" The endpoint address has the 8th bit set to 1 when it is an input.\n We can check with the bitwise operator & (and) 0x80. It will be zero\n if an output and non-zero if an input. \"\"\"\n if endpoint.bEndpointAddress & 0x80 != 0:\n self.inputEndpoints.append(endpoint)\n else:\n self.outputEndpoints.append(endpoint)\n\n\n if len(self.inputEndpoints) >= 2 or len(self.outputEndpoints) > 0:\n \"\"\" We have at least 2 input endpoints and 1 output. We assign the\n endpoints according to the documentation, otherwise\n the subclass will need to assign them.\"\"\"\n self.epCommandOut = self.outputEndpoints[self.epCommandOutIdx]\n self.epMainIn = self.inputEndpoints[self.epMainInIdx]\n self.epSecondaryIn = self.inputEndpoints[self.epSecondaryInIdx]\n self.epParameters = self.inputEndpoints[self.epParametersIdx]\n self.epStatus = self.inputEndpoints[self.epStatusIdx]\n\n self.flushEndpoints()\n self.sendCommand(b'0x01')\n time.sleep(0.1)\n self.getCalibration()\n except Exception as err:\n raise UnableToInitialize(\"Error when initializing device: {0}\".format(err))", "def configure_switch(self, number: str, config: SwitchConfig, platform_config: dict) -> \"SwitchPlatformInterface\":\n raise NotImplementedError", "def setup_usb(self):\n global DEVICE\n global epBulkWriter\n global epBulkReader\n global VID\n global PID\n\n DEVICE = usb.core.find(idVendor=0x2AB9,idProduct=0xFFFF)\n if DEVICE is None:#If not a LVPM, look for an HVPM.\n DEVICE = usb.core.find(idVendor=0x04d8,idProduct=0x000b)\n VID = '0x4d8'\n PID = '0xb'\n if \"Linux\" == platform.system():\n try:\n DEVICE.detach_kernel_driver(0)\n except:\n pass # already unregistered\n DEVICE.set_configuration()\n\n cfg = DEVICE.get_active_configuration()\n intf = cfg[(0,0)]\n\n epBulkWriter = usb.util.find_descriptor(\n intf,\n custom_match = \\\n lambda e: \\\n usb.util.endpoint_direction(e.bEndpointAddress) == \\\n usb.util.ENDPOINT_OUT)\n epBulkReader = usb.util.find_descriptor(\n intf,\n custom_match = \\\n lambda e: \\\n usb.util.endpoint_direction(e.bEndpointAddress) == \\\n usb.util.ENDPOINT_IN)", "def setup(hass: HomeAssistant, base_config: ConfigType) -> bool: # noqa: C901\n\n hass.data[DOMAIN] = {}\n\n # Parse configuration into a dict of device name to physical address\n # represented as a list of four elements.\n device_aliases = {}\n devices = base_config[DOMAIN].get(CONF_DEVICES, {})\n _LOGGER.debug(\"Parsing config %s\", devices)\n device_aliases.update(parse_mapping(devices))\n _LOGGER.debug(\"Parsed devices: %s\", device_aliases)\n\n platform = base_config[DOMAIN].get(CONF_PLATFORM, SWITCH)\n\n loop = (\n # Create own thread if more than 1 CPU\n hass.loop\n if multiprocessing.cpu_count() < 2\n else None\n )\n host = base_config[DOMAIN].get(CONF_HOST)\n display_name = base_config[DOMAIN].get(CONF_DISPLAY_NAME, DEFAULT_DISPLAY_NAME)\n if host:\n adapter = TcpAdapter(host, name=display_name, activate_source=False)\n else:\n adapter = CecAdapter(name=display_name[:12], activate_source=False)\n hdmi_network = HDMINetwork(adapter, loop=loop)\n\n def _adapter_watchdog(now=None):\n _LOGGER.debug(\"Reached _adapter_watchdog\")\n event.call_later(hass, WATCHDOG_INTERVAL, _adapter_watchdog_job)\n if not adapter.initialized:\n _LOGGER.info(\"Adapter not initialized; Trying to restart\")\n hass.bus.fire(EVENT_HDMI_CEC_UNAVAILABLE)\n adapter.init()\n\n _adapter_watchdog_job = HassJob(_adapter_watchdog, cancel_on_shutdown=True)\n\n @callback\n def _async_initialized_callback(*_: Any):\n \"\"\"Add watchdog on initialization.\"\"\"\n return event.async_call_later(hass, WATCHDOG_INTERVAL, _adapter_watchdog_job)\n\n hdmi_network.set_initialized_callback(_async_initialized_callback)\n\n def _volume(call: ServiceCall) -> None:\n \"\"\"Increase/decrease volume and mute/unmute system.\"\"\"\n mute_key_mapping = {\n ATTR_TOGGLE: KEY_MUTE_TOGGLE,\n ATTR_ON: KEY_MUTE_ON,\n ATTR_OFF: KEY_MUTE_OFF,\n }\n for cmd, att in call.data.items():\n if cmd == CMD_UP:\n _process_volume(KEY_VOLUME_UP, att)\n elif cmd == CMD_DOWN:\n _process_volume(KEY_VOLUME_DOWN, att)\n elif cmd == CMD_MUTE:\n hdmi_network.send_command(\n KeyPressCommand(mute_key_mapping[att], dst=ADDR_AUDIOSYSTEM)\n )\n hdmi_network.send_command(KeyReleaseCommand(dst=ADDR_AUDIOSYSTEM))\n _LOGGER.info(\"Audio muted\")\n else:\n _LOGGER.warning(\"Unknown command %s\", cmd)\n\n def _process_volume(cmd, att):\n if isinstance(att, (str,)):\n att = att.strip()\n if att == CMD_PRESS:\n hdmi_network.send_command(KeyPressCommand(cmd, dst=ADDR_AUDIOSYSTEM))\n elif att == CMD_RELEASE:\n hdmi_network.send_command(KeyReleaseCommand(dst=ADDR_AUDIOSYSTEM))\n else:\n att = 1 if att == \"\" else int(att)\n for _ in range(0, att):\n hdmi_network.send_command(KeyPressCommand(cmd, dst=ADDR_AUDIOSYSTEM))\n hdmi_network.send_command(KeyReleaseCommand(dst=ADDR_AUDIOSYSTEM))\n\n def _tx(call: ServiceCall) -> None:\n \"\"\"Send CEC command.\"\"\"\n data = call.data\n if ATTR_RAW in data:\n command = CecCommand(data[ATTR_RAW])\n else:\n src = data.get(ATTR_SRC, ADDR_UNREGISTERED)\n dst = data.get(ATTR_DST, ADDR_BROADCAST)\n if ATTR_CMD in data:\n cmd = data[ATTR_CMD]\n else:\n _LOGGER.error(\"Attribute 'cmd' is missing\")\n return\n if ATTR_ATT in data:\n if isinstance(data[ATTR_ATT], (list,)):\n att = data[ATTR_ATT]\n else:\n att = reduce(lambda x, y: f\"{x}:{y:x}\", data[ATTR_ATT])\n else:\n att = \"\"\n command = CecCommand(cmd, dst, src, att)\n hdmi_network.send_command(command)\n\n def _standby(call: ServiceCall) -> None:\n hdmi_network.standby()\n\n def _power_on(call: ServiceCall) -> None:\n hdmi_network.power_on()\n\n def _select_device(call: ServiceCall) -> None:\n \"\"\"Select the active device.\"\"\"\n if not (addr := call.data[ATTR_DEVICE]):\n _LOGGER.error(\"Device not found: %s\", call.data[ATTR_DEVICE])\n return\n if addr in device_aliases:\n addr = device_aliases[addr]\n else:\n entity = hass.states.get(addr)\n _LOGGER.debug(\"Selecting entity %s\", entity)\n if entity is not None:\n addr = entity.attributes[\"physical_address\"]\n _LOGGER.debug(\"Address acquired: %s\", addr)\n if addr is None:\n _LOGGER.error(\n \"Device %s has not physical address\", call.data[ATTR_DEVICE]\n )\n return\n if not isinstance(addr, (PhysicalAddress,)):\n addr = PhysicalAddress(addr)\n hdmi_network.active_source(addr)\n _LOGGER.info(\"Selected %s (%s)\", call.data[ATTR_DEVICE], addr)\n\n def _update(call: ServiceCall) -> None:\n \"\"\"Update if device update is needed.\n\n Called by service, requests CEC network to update data.\n \"\"\"\n hdmi_network.scan()\n\n def _new_device(device):\n \"\"\"Handle new devices which are detected by HDMI network.\"\"\"\n key = f\"{DOMAIN}.{device.name}\"\n hass.data[DOMAIN][key] = device\n ent_platform = base_config[DOMAIN][CONF_TYPES].get(key, platform)\n discovery.load_platform(\n hass,\n ent_platform,\n DOMAIN,\n discovered={ATTR_NEW: [key]},\n hass_config=base_config,\n )\n\n def _shutdown(call):\n hdmi_network.stop()\n\n def _start_cec(callback_event):\n \"\"\"Register services and start HDMI network to watch for devices.\"\"\"\n hass.services.register(\n DOMAIN, SERVICE_SEND_COMMAND, _tx, SERVICE_SEND_COMMAND_SCHEMA\n )\n hass.services.register(\n DOMAIN, SERVICE_VOLUME, _volume, schema=SERVICE_VOLUME_SCHEMA\n )\n hass.services.register(\n DOMAIN,\n SERVICE_UPDATE_DEVICES,\n _update,\n schema=SERVICE_UPDATE_DEVICES_SCHEMA,\n )\n hass.services.register(DOMAIN, SERVICE_POWER_ON, _power_on)\n hass.services.register(DOMAIN, SERVICE_STANDBY, _standby)\n hass.services.register(DOMAIN, SERVICE_SELECT_DEVICE, _select_device)\n\n hdmi_network.set_new_device_callback(_new_device)\n hdmi_network.start()\n\n hass.bus.listen_once(EVENT_HOMEASSISTANT_START, _start_cec)\n hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, _shutdown)\n return True", "def get_device(self):\n raise NotImplementedError()", "def driver(self) -> 'outputs.CSIPowerMaxSpecDriver':\n return pulumi.get(self, \"driver\")", "def setup_platform(hass, config, add_devices_callback, discovery_info=None):\n host = config.get(CONF_HOST)\n name = config.get(CONF_NAME)\n token = config.get('token')\n\n add_devices_callback([MiroboSwitch(name, host, token)])", "def __init__(__self__, *,\n driver: 'outputs.CSIPowerStoreSpecDriver'):\n pulumi.set(__self__, \"driver\", driver)", "def __init__(self, machine):\n super().__init__(machine)\n\n # Set default platform features. Each platform interface can change\n # these to notify the framework of the specific features it supports.\n self.features['has_drivers'] = True\n self.features['max_pulse'] = 255", "def __init__(self, machine):\n super().__init__(machine)\n\n # Set default platform features. Each platform interface can change\n # these to notify the framework of the specific features it supports.\n self.features['has_drivers'] = True\n self.features['max_pulse'] = 255", "def get_platform(self):\n return self._platform", "def device_connect(self):\n pass", "def __init__(self):\n self.hw = dev_hwinfo.device()\n self.ethKey=\"Ethernet\"\n self.ethAllInterfaceName=[]\n dir_path = os.path.dirname(os.path.realpath(__file__))\n self.myDefine = init_define.main()\n self.mPlatform=self.hw.getPlatform()", "def get_driver(self):\n\t\treturn self.driver", "def driver(self) -> 'outputs.CSIUnitySpecDriver':\n return pulumi.get(self, \"driver\")", "def _create_driver(self, config):\n raise NotImplementedError(\"Must override WebAccess::_create_driver.\")" ]
[ "0.6744216", "0.646518", "0.6447951", "0.6382508", "0.63278764", "0.6251928", "0.6243054", "0.62248623", "0.62218076", "0.61590505", "0.60939956", "0.6066648", "0.6025949", "0.59284776", "0.5910068", "0.5901698", "0.58893013", "0.5842704", "0.58287215", "0.5820805", "0.5807339", "0.5799669", "0.57696825", "0.57696825", "0.5735643", "0.57356197", "0.57271767", "0.5723177", "0.5713638", "0.5701723" ]
0.71012676
0
Subclass this method in a platform module to clear a hardware switch rule for this switch. Clearing a hardware rule means actions on this switch will no longer affect coils. Another way to think of this is that it 'disables' a hardware rule. This is what you'd use to disable flippers and autofire_coils during tilt, game over, etc.
def clear_hw_rule(self, switch: SwitchSettings, coil: DriverSettings): raise NotImplementedError
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def clear_hw_rule(self, switch, coil):\n raise NotImplementedError", "def clear_hw_rule(self, switch, coil):\n self.log.info(\"clear_hw_rule(coil=%s sw=%s)\" %\n (coil.hw_driver.number, switch.hw_switch.number))\n self.communicator.rule_clear(coil.hw_driver.number, switch.hw_switch.number)", "def turn_off(self, **kwargs):\n self.smartplug.turn_off()", "def turn_off(self):\n self._state = False\n if(self._device['type'] == '_DT-PLUG' or self._device['type'] == '_THIMR'):\n self._send_cmd(self._device, 'cmd=ctrl&devices={[' + self._device[\"sid\"] + ']}&op={\"cmd\":5,\"op\":0 }', 5)\n if(self._device['type'] == '_REALY2' or self._device['type'] == '_REALY4'): \n self._send_cmd(self._device, 'cmd=ctrl&devices={[' + self._device[\"sid\"] + ']}&op={\"cmd\":5,\"'+ self._data_key +'\":0 }', 5)", "def turn_off(self, **kwargs):\n self._state = False\n self.schedule_update_ha_state()\n self._hs_color = None\n self._attributes[\"hs_color\"] = self._hs_color\n self._attributes[\"brightness\"] = None", "def pibooth_reset(cfg, hard):", "def turn_eht_off(self):\n raise NotImplementedError", "def _force_off(self):\n self._interface.set('fw_wp_vref', self._fw_wp_vref)\n self._interface.set('fw_wp_en', 'on')\n self._interface.set('fw_wp', 'off')", "def clear(self):\n self.cmd(0x33) # $33 8-bit mode\n self.cmd(0x32) # $32 8-bit mode\n self.cmd(0x28) # $28 8-bit mode\n self.cmd(0x0C) # $0C 8-bit mode\n self.cmd(0x06) # $06 8-bit mode\n self.cmd(0x01) # $01 8-bit mode", "def switch_off(self):\n raise DeviceException(DeviceException.FEATURE_NOT_IMPLEMENTED)", "def resetDeviceStates(self):", "def off(config: dict):\n switch_device(config, config[\"inching\"], \"off\")", "def turn_aux_heat_off(self):\n self.set_operation_mode(STATE_HEAT)", "def turn_off(self, **kwargs) -> None:\n self._device.writeCharacteristic(self._handle, b'\\x01', True)\n self._state = False\n self.schedule_update_ha_state()", "def off_switch(self):\n self._switch_callback = None", "def setOff(self, command):\r\n self.setDriver('ST', 0)", "def reset_config(modem, disable_auto_linking, monitor_mode, auto_led, deadman):\n modem.configuration[DISABLE_AUTO_LINKING].set_value(not disable_auto_linking)\n modem.configuration[MONITOR_MODE].set_value(not monitor_mode)\n modem.configuration[AUTO_LED].set_value(not auto_led)\n modem.configuration[DEADMAN].set_value(not deadman)", "def clear_single_switch_rules(switch_id,in_port,out_port):\n print(\"** Remove flows from {}\".format(switch_id))\n in_rule = \"in_port={}\".format(in_port)\n out_rule = \"in_port={}\".format(out_port)\n subprocess.Popen([\"ovs-ofctl\",\"-O\",\"OpenFlow13\",\"del-flows\",switch_id,in_rule],\n stdout=subprocess.PIPE).wait()\n subprocess.Popen([\"ovs-ofctl\",\"-O\",\"OpenFlow13\",\"del-flows\",switch_id,out_rule],\n stdout=subprocess.PIPE).wait()\n\n ### If debugging, remove the comments below to see what the flow rules are\n # result = subprocess.Popen([\"ovs-ofctl\",\"-O\",\"OpenFlow13\",\"dump-flows\",switch_id],\n # stdout=subprocess.PIPE).communicate()[0]\n # print (result)", "def turn_off(self, **kwargs: Any) -> None:\n self._device.power_on = False\n _LOGGER.debug(\"Turn off light %s\", self._device.ip)", "def turn_off(self):\n if self._module_type == NA_VALVE:\n self._data.homestatus.setroomThermpoint(\n self._data.home_id,\n self._room_id,\n STATE_NETATMO_MANUAL,\n DEFAULT_MIN_TEMP,\n )\n elif self.hvac_mode != HVAC_MODE_OFF:\n self._data.homestatus.setroomThermpoint(\n self._data.home_id, self._room_id, STATE_NETATMO_OFF\n )\n self.update_without_throttle = True\n self.schedule_update_ha_state()", "def kill_all(self):\n self.settings['lights_on'] = 12\n self.settings['lights_off'] = 12\n self.settings['overhead_level'] = 0\n self.settings['soil_1'] = 0\n self.settings['soil_2'] = 0\n self.settings['soil_3'] = 0\n self.settings['soil_4'] = 0\n self.scale_overhead_level.set(self.settings['overhead_level'])\n self.scale_smc1.set(self.settings['soil_1'])\n self.scale_smc2.set(self.settings['soil_2'])\n self.scale_smc3.set(self.settings['soil_3'])\n self.scale_smc4.set(self.settings['soil_4'])\n self.active_changes = True # (flag) Once changes are retrieved, we assume that they will be sent to the controller", "def _reset(self):\n self._interface.set('fw_wp_en', 'off')", "def _doDisableRegulation(self):\n self._cmdRegulOff()", "def set_light_off(self):\r\n self._light = \"OFF\"", "def turn_off(self, **kwargs: Any) -> None:\n if (\n DPCODE_LIGHT in self.tuya_device.status\n and DPCODE_SWITCH not in self.tuya_device.status\n ):\n commands = [{\"code\": DPCODE_LIGHT, \"value\": False}]\n else:\n commands = [{\"code\": DPCODE_SWITCH, \"value\": False}]\n self._send_command(commands)", "def _reset(cls):\r\n cls._CONFIGURED = False\r\n cls._ENABLED = {}", "def turn_off(self, **kwargs) -> None:\n self.wink.set_state(False)", "def deconfigure(self):\n\n self.platform.deconfigure()", "def off(self, include_ethernet=False):\n if not self.healthy:\n self.health_check()\n if self._pre_off_func:\n self._pre_off_func()\n switchboard = self._get_switchboard_if_initialized()\n if self._power_and_data_share_cable:\n if switchboard:\n switchboard.add_log_note(\n f\"comm_power.off() called on {self._device_name} set communication \"\n f\"port {self.port_number} to charge as device has a single USB \"\n \"cable for data and power.\")\n switchboard.close_all_transports()\n self._hub.switch_power.power_on(self.port_number, data_sync=False)\n if self.secondary_port_number is not None:\n self._hub.switch_power.power_on(\n self.secondary_port_number, data_sync=False)\n else:\n if switchboard:\n switchboard.close_all_transports()\n self._hub.switch_power.power_off(self.port_number)\n if self.secondary_port_number is not None:\n self._hub.switch_power.power_off(self.secondary_port_number)\n if include_ethernet:\n self.ethernet_off()", "def turnLightingSystemOff():\n dislin.light('OFF')" ]
[ "0.8434804", "0.7971575", "0.6231786", "0.62125003", "0.61418617", "0.5927485", "0.5882054", "0.58764935", "0.5856638", "0.5834958", "0.58010286", "0.57616067", "0.5751494", "0.57407105", "0.5730733", "0.5707406", "0.5672204", "0.56031424", "0.5601723", "0.55898315", "0.5540033", "0.5536645", "0.5536562", "0.5526846", "0.5518861", "0.55161625", "0.5507313", "0.55008096", "0.54824203", "0.54824066" ]
0.8259269
1