query
stringlengths
9
9.05k
document
stringlengths
10
222k
metadata
dict
negatives
listlengths
30
30
negative_scores
listlengths
30
30
document_score
stringlengths
4
10
document_rank
stringclasses
2 values
compute idf and objectfeatures matrix for training set
def idf_object_features_set(set_id): # idf for calc features of new docs # object-features for learning model # doc_index links doc_id and row index in object-features # lemma_index links lemmas and column index in object-features # get lemmas of all docs in set docs = db.get_lemmas_freq(set_id) # document frequency - number of documents with lemma doc_freq = {} # number (sum of weights) of lemmas in document doc_size = {} # index of lemma in overall list lemma_index = {} # lemma counter in overall list lemma_counter = 0 # document index doc_index = {} # document counter in overall list doc_counter = 0 for doc_id in docs: # initialize doc_size doc_size[doc_id] = 0 # add document in overall list by giving index doc_index[doc_id] = doc_counter doc_counter += 1 # count lemmas of doc for lemma in docs[doc_id]: # increase number of docs with lemma doc_freq[lemma] = doc_freq.get(lemma, 0) + 1 # increase number of lemmas in document doc_size[doc_id] += docs[doc_id][lemma] # compute idf idf = {} for lemma in doc_freq: idf[lemma] = - math.log(doc_freq[lemma]/doc_counter) # and lemmas add in overall list by giving index for lemma in idf: if idf[lemma] != 0: lemma_index[lemma] = lemma_counter lemma_counter += 1 # initialization objects-features matrix object_features = np.zeros((doc_counter, lemma_counter)) # fill objects-features matrix for doc_id in docs: doc_lemmas = docs[doc_id] for lemma in doc_lemmas: if lemma_index.get(lemma, -1) != -1: object_features[doc_index[doc_id], lemma_index[lemma]] = \ doc_lemmas[lemma] / doc_size[doc_id] * idf[lemma] # check features with 0 for all documents feat_max = np.sum(object_features, axis=0) # print_lemmas(set_id, [k for k, v in enumerate(feat_max) if v == 0], lemma_index, idf) # check documents with 0 for all lemmas # print(np.min(np.sum(object_features, axis=1))) # save to db: idf, indexes and object_features db.put_training_set_params(set_id, idf, doc_index, lemma_index, object_features) # print(idf) # print(doc_index) # print(lemma_index) # print(object_features)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_all_features(train_data, test_data):\n #train_wc_matrix, test_wc_matrix = get_word_count_features(train_data, test_data)\n train_idf_matrix, test_idf_matrix = get_idf_features(train_data, test_data)\n train_ngram_matrix, test_ngram_matrix = get_ngram_features(train_data, test_data)\n # train_liwc_matrix, test_liwc_matrix = get_liwc_features(train_data, test_data)\n return sparse.hstack([train_idf_matrix, train_ngram_matrix]), \\\n sparse.hstack([test_idf_matrix, test_ngram_matrix])", "def evaluate_hmdb51_fusion():\n vlen = 0\n ob_suffix = '-max.feat.npy.gz'\n fv_suffix = '_fv.npy.gz'\n ob_root = '/home/syq/research_final/data/features/ob_hmdb51_pooled_python/'\n fv_root = '/home/syq/research_final/data/dense-traj/fv_hmdb51_python/'\n hmdb_splits = 'testTrainMulti_7030_splits/'\n categories = os.listdir(fv_root)\n weight = 1.0\n weights = [i / 20.0 for i in range(21)]\n acc_to_weights = {}\n\n for weight in weights:\n print \"Weight: %.2f\" % weight\n accs = np.zeros(3)\n for splitnum in range(1,4):\n ts = time.time()\n trainfiles, testfiles = hmdb51_splits.loadsplit(categories,\n hmdb_splits,\n splitnum)\n print 'Have %d train files' % len(trainfiles)\n print 'Have %d test files' % len(testfiles)\n\n if not vlen:\n fp = gzip.open(os.path.join(ob_root,'%s%s'%(trainfiles[0][0][:-4],\n ob_suffix)),\"rb\")\n vlen_ob = len(np.load(fp))\n fp.close()\n print \"OB vector length is %d\" % vlen_ob\n fp = gzip.open(os.path.join(fv_root,'%s%s'%(trainfiles[0][0][:-4],\n fv_suffix)),\"rb\")\n vlen_fv = len(np.load(fp))\n fp.close()\n print \"IDTFV vector length is %d\" % vlen_fv\n\n Dtrain_ob = np.zeros( (len(trainfiles),vlen_ob), np.float32 )\n Dtrain_fv = np.zeros( (len(trainfiles),vlen_fv), np.float32 )\n\n Ytrain = np.ones ( (len(trainfiles) )) * -1000\n\n for fi,f in enumerate(trainfiles):\n fp = gzip.open(os.path.join(ob_root,'%s%s'%(f[0][:-4],\n ob_suffix)),\"rb\")\n Dtrain_ob[fi][:] = np.load(fp)\n fp.close()\n Ytrain[fi] = f[1]\n\n fp = gzip.open(os.path.join(fv_root,'%s%s'%(f[0][:-4],\n fv_suffix)),\"rb\")\n Dtrain_fv[fi][:] = np.load(fp)\n fp.close()\n\n Dtest_ob = np.zeros( (len(testfiles),vlen_ob), np.float32 )\n Dtest_fv = np.zeros( (len(testfiles),vlen_fv), np.float32 )\n\n Ytest = np.ones ( (len(testfiles) )) * -1000\n\n for fi,f in enumerate(testfiles):\n fp = gzip.open(os.path.join(ob_root,'%s%s'%(f[0][:-4],\n ob_suffix)),\"rb\")\n Dtest_ob[fi][:] = np.load(fp)\n fp.close()\n Ytest[fi] = f[1]\n\n fp = gzip.open(os.path.join(fv_root,'%s%s'%(f[0][:-4],\n fv_suffix)),\"rb\")\n Dtest_fv[fi][:] = np.load(fp)\n fp.close()\n\n \"\"\"\n Early fusion\n Dtrain = np.hstack((Dtrain_ob, Dtrain_fv))\n Dtest = np.hstack((Dtest_ob, Dtest_fv))\n\n clf = OneVsRestClassifier(estimator=LinearSVC(C=100), n_jobs=8)\n acc = clf.fit(Dtrain, Ytrain).score(Dtest, Ytest)\n \"\"\"\n fv_clf = OneVsRestClassifier(estimator=LinearSVC(C=100), n_jobs=8)\n\n ob_clf = OneVsRestClassifier(estimator=SVC(C=10,\n cache_size=1000,\n kernel='linear',\n probability=True),\n n_jobs=-1)\n\n # Get probabilities for late fusion\n Dtrain_fv = fv_clf.fit(Dtrain_fv, Ytrain).decision_function(Dtrain_fv)\n Dtrain_ob = ob_clf.fit(Dtrain_ob, Ytrain).decision_function(Dtrain_ob)\n Dtest_fv = fv_clf.decision_function(Dtest_fv)\n Dtest_ob = ob_clf.decision_function(Dtest_ob)\n\n # Scale decision values b/w 0 and 1\n Dtrain_fv = preprocessing.normalize(Dtrain_fv)\n Dtrain_ob = preprocessing.normalize(Dtrain_ob)\n Dtest_fv = preprocessing.normalize(Dtest_fv)\n Dtest_ob = preprocessing.normalize(Dtest_ob)\n\n # Late fusion\n scores_train = (Dtrain_fv * weight) + (Dtrain_ob * (1 - weight))\n latefusion_clf = OneVsRestClassifier(estimator=LinearSVC(C=100), n_jobs=-1)\n latefusion_clf.fit(scores_train, Ytrain)\n\n scores_test = (Dtest_fv * weight) + (Dtest_ob * (1 - weight))\n acc = latefusion_clf.score(scores_test, Ytest)\n print 'Fold', splitnum, 'late fusion acc', acc\n print \"Train & testing time %.3f\" % (time.time() - ts)\n accs[splitnum-1] = acc\n acc_to_weights[weight] = accs\n\n print \"Mean accuracy: %.3f\" % accs.mean()\n with open(\"hmdb51_weight_gridsearch.txt\", \"w\") as f:\n for weight, accs in acc_to_weights.items():\n f.write(str(weight) + str(accs) + '\\n')\n return acc_to_weights\n\n \"\"\"\n with open('fv_hmdb51_accs.txt', 'w') as f:\n f.write(\"%s\\nMean:%.3f\" % (str(accs), np.mean(accs)))\n \"\"\"", "def model(features, test_features, encoding='ohe', n_folds=5):\n\n # Extract the ids\n train_ids = features['SK_ID_CURR']\n test_ids = test_features['SK_ID_CURR']\n\n # Extract the labels for training\n labels = features['TARGET']\n\n # Remove the ids and target\n features = features.drop(columns=['SK_ID_CURR', 'TARGET'])\n test_features = test_features.drop(columns=['SK_ID_CURR'])\n\n # One Hot Encoding\n if encoding == 'ohe':\n features = pd.get_dummies(features)\n test_features = pd.get_dummies(test_features)\n\n # Align the dataframes by the columns\n features, test_features = features.align(test_features, join='inner', axis=1)\n\n # No categorical indices to record\n cat_indices = 'auto'\n\n # Integer label encoding\n elif encoding == 'le':\n\n # Create a label encoder\n label_encoder = LabelEncoder()\n\n # List for storing categorical indices\n cat_indices = []\n\n # Iterate through each column\n for i, col in enumerate(features):\n if features[col].dtype == 'object':\n # Map the categorical features to integers\n features[col] = label_encoder.fit_transform(np.array(features[col].astype(str)).reshape((-1,)))\n test_features[col] = label_encoder.transform(np.array(test_features[col].astype(str)).reshape((-1,)))\n\n # Record the categorical indices\n cat_indices.append(i)\n\n # Catch error if label encoding scheme is not valid\n else:\n raise ValueError(\"Encoding must be either 'ohe' or 'le'\")\n\n print('Training Data Shape: ', features.shape)\n print('Testing Data Shape: ', test_features.shape)\n\n # Extract feature names\n feature_names = list(features.columns)\n\n # Convert to np arrays\n features = np.array(features)\n test_features = np.array(test_features)\n\n # Create the kfold object\n k_fold = KFold(n_splits=n_folds, shuffle=True, random_state=50)\n\n # Empty array for feature importances\n feature_importance_values = np.zeros(len(feature_names))\n\n # Empty array for test predictions\n test_predictions = np.zeros(test_features.shape[0])\n\n # Empty array for out of fold validation predictions\n out_of_fold = np.zeros(features.shape[0])\n\n # Lists for recording validation and training scores\n valid_scores = []\n train_scores = []\n\n # Iterate through each fold\n for train_indices, valid_indices in k_fold.split(features):\n # Training data for the fold\n train_features, train_labels = features[train_indices], labels[train_indices]\n # Validation data for the fold\n valid_features, valid_labels = features[valid_indices], labels[valid_indices]\n\n # Create the model\n model = lgb.LGBMClassifier(n_estimators=10000, objective='binary',\n class_weight='balanced', learning_rate=0.05,\n reg_alpha=0.1, reg_lambda=0.1,\n subsample=0.8, n_jobs=-1, random_state=50)\n\n # Train the model\n model.fit(train_features, train_labels, eval_metric='auc',\n eval_set=[(valid_features, valid_labels), (train_features, train_labels)],\n eval_names=['valid', 'train'], categorical_feature=cat_indices,\n early_stopping_rounds=100, verbose=200)\n\n # Record the best iteration\n best_iteration = model.best_iteration_\n\n # Record the feature importances\n feature_importance_values += model.feature_importances_ / k_fold.n_splits\n\n # Make predictions\n test_predictions += model.predict_proba(test_features, num_iteration=best_iteration)[:, 1] / k_fold.n_splits\n\n # Record the out of fold predictions\n out_of_fold[valid_indices] = model.predict_proba(valid_features, num_iteration=best_iteration)[:, 1]\n\n # Record the best score\n valid_score = model.best_score_['valid']['auc']\n train_score = model.best_score_['train']['auc']\n\n valid_scores.append(valid_score)\n train_scores.append(train_score)\n\n # Clean up memory\n gc.enable()\n del model, train_features, valid_features\n gc.collect()\n\n # Make the submission dataframe\n submission = pd.DataFrame({'SK_ID_CURR': test_ids, 'TARGET': test_predictions})\n\n # Make the feature importance dataframe\n feature_importances = pd.DataFrame({'feature': feature_names, 'importance': feature_importance_values})\n\n # Overall validation score\n valid_auc = roc_auc_score(labels, out_of_fold)\n\n # Add the overall scores to the metrics\n valid_scores.append(valid_auc)\n train_scores.append(np.mean(train_scores))\n\n # Needed for creating dataframe of validation scores\n fold_names = list(range(n_folds))\n fold_names.append('overall')\n\n # Dataframe of validation scores\n metrics = pd.DataFrame({'fold': fold_names,\n 'train': train_scores,\n 'valid': valid_scores})\n\n return submission, feature_importances, metrics", "def createFeatureMatrix(self,batch):\n \n feature_dim = self.__flags.no_inner_unit * self.__flags.no_outer_unit\n data = np.zeros((len(batch), self.__flags.embedding_dim, 2 * feature_dim), dtype=np.float32)\n\n count = 0\n for obj in batch:\n m1 = self.__object2Matrix(obj)\n m2 = self.__object2Matrix(obj)\n data[count, :self.__flags.embedding_dim, :feature_dim] = m1\n data[count, :self.__flags.embedding_dim, feature_dim:2 * feature_dim] = m2\n count += 1\n scores = np.zeros(len(batch), dtype=np.float32)\n\n return (data,scores)", "def __feature_set__(self):\r\n import numpy as np\r\n import datetime\r\n import time\r\n cols_norm = [col for col in self.columns]\r\n cols_lower = [col.lower() for col in self.columns]\r\n fields = []\r\n features = []\r\n date_fields = []\r\n _geom_types = {\r\n arcgis.geometry._types.Point : \"esriGeometryPoint\",\r\n arcgis.geometry._types.Polyline : \"esriGeometryPolyline\",\r\n arcgis.geometry._types.MultiPoint : \"esriGeometryMultipoint\",\r\n arcgis.geometry._types.Polygon : \"esriGeometryPolygon\"\r\n }\r\n if self.sr is None:\r\n sr = {'wkid' : 4326}\r\n else:\r\n sr = self.sr\r\n fs = {\r\n \"objectIdFieldName\" : \"\",\r\n \"globalIdFieldName\" : \"\",\r\n \"displayFieldName\" : \"\",\r\n \"geometryType\" : _geom_types[type(self.geometry[self.geometry.first_valid_index()])],\r\n \"spatialReference\" : sr,\r\n \"fields\" : [],\r\n \"features\" : []\r\n }\r\n if 'objectid' in cols_lower:\r\n fs['objectIdFieldName'] = cols_norm[cols_lower.index('objectid')]\r\n fs['displayFieldName'] = cols_norm[cols_lower.index('objectid')]\r\n elif 'fid' in cols_lower:\r\n fs['objectIdFieldName'] = cols_norm[cols_lower.index('fid')]\r\n fs['displayFieldName'] = cols_norm[cols_lower.index('fid')]\r\n elif 'oid' in cols_lower:\r\n fs['objectIdFieldName'] = cols_norm[cols_lower.index('oid')]\r\n fs['displayFieldName'] = cols_norm[cols_lower.index('oid')]\r\n else:\r\n self['OBJECTID'] = list(range(1, self.shape[0] + 1))\r\n res = self.__feature_set__\r\n del self['OBJECTID']\r\n return res\r\n if 'objectIdFieldName' in fs:\r\n fields.append({\r\n \"name\" : fs['objectIdFieldName'],\r\n \"type\" : \"esriFieldTypeOID\",\r\n \"alias\" : fs['objectIdFieldName']\r\n })\r\n cols_norm.pop(cols_norm.index(fs['objectIdFieldName']))\r\n if 'globalIdFieldName' in fs and len(fs['globalIdFieldName']) > 0:\r\n fields.append({\r\n \"name\" : fs['globalIdFieldName'],\r\n \"type\" : \"esriFieldTypeGlobalID\",\r\n \"alias\" : fs['globalIdFieldName']\r\n })\r\n cols_norm.pop(cols_norm.index(fs['globalIdFieldName']))\r\n elif 'globalIdFieldName' in fs and \\\r\n len(fs['globalIdFieldName']) == 0:\r\n del fs['globalIdFieldName']\r\n if self._geometry_column_name in cols_norm:\r\n cols_norm.pop(cols_norm.index(self._geometry_column_name))\r\n for col in cols_norm:\r\n try:\r\n idx = self[col].first_valid_index()\r\n col_val = self[col].loc[idx]\r\n except:\r\n col_val = \"\"\r\n if isinstance(col_val, (str, np.str)):\r\n l = self[col].str.len().max()\r\n if str(l) == 'nan':\r\n l = 255\r\n\r\n fields.append({\r\n \"name\" : col,\r\n \"type\" : \"esriFieldTypeString\",\r\n \"length\" : int(l),\r\n \"alias\" : col\r\n })\r\n if fs['displayFieldName'] == \"\":\r\n fs['displayFieldName'] = col\r\n elif isinstance(col_val, (datetime.datetime,\r\n pd.Timestamp,\r\n np.datetime64,\r\n pd.datetime)):\r\n fields.append({\r\n \"name\" : col,\r\n \"type\" : \"esriFieldTypeDate\",\r\n \"alias\" : col\r\n })\r\n date_fields.append(col)\r\n elif isinstance(col_val, (np.int32, np.int16, np.int8)):\r\n fields.append({\r\n \"name\" : col,\r\n \"type\" : \"esriFieldTypeSmallInteger\",\r\n \"alias\" : col\r\n })\r\n elif isinstance(col_val, (int, np.int, np.int64)):\r\n fields.append({\r\n \"name\" : col,\r\n \"type\" : \"esriFieldTypeInteger\",\r\n \"alias\" : col\r\n })\r\n elif isinstance(col_val, (float, np.float64)):\r\n fields.append({\r\n \"name\" : col,\r\n \"type\" : \"esriFieldTypeDouble\",\r\n \"alias\" : col\r\n })\r\n elif isinstance(col_val, (np.float32)):\r\n fields.append({\r\n \"name\" : col,\r\n \"type\" : \"esriFieldTypeSingle\",\r\n \"alias\" : col\r\n })\r\n fs['fields'] = fields\r\n for row in self.to_dict('records'):\r\n geom = {}\r\n if self._geometry_column_name in row:\r\n geom = row[self._geometry_column_name]\r\n del row[self._geometry_column_name]\r\n for f in date_fields:\r\n try:\r\n row[f] = int(row[f].to_pydatetime().timestamp() * 1000)\r\n except:\r\n row[f] = None\r\n features.append(\r\n {\r\n \"geometry\" : dict(geom),\r\n \"attributes\" : row\r\n }\r\n )\r\n del row\r\n del geom\r\n fs['features'] = features\r\n return fs", "def generate_and_save_train_features(train_input, train_output, bag_of_words, tfidf):\n df_train = get_df(train_input)\n train_words = np.array(df_train.text.str.lower().values)\n\n bag_of_words.fit(train_words)\n\n train_words_binary_matrix = bag_of_words.transform(train_words)\n feature_names = bag_of_words.get_feature_names_out()\n\n tfidf.fit(train_words_binary_matrix)\n train_words_tfidf_matrix = tfidf.transform(train_words_binary_matrix)\n\n save_matrix(df_train, train_words_tfidf_matrix, feature_names, train_output)", "def __tf_idf_feature_extraction(self):\n print('=' * 80)\n print(\"TF-IDF Feature Extraction\")\n t0 = time()\n vectorizer = TfidfVectorizer()\n vec_train = vectorizer.fit_transform(self.train.text)\n vec_test = vectorizer.transform(self.test.text)\n duration = time() - t0\n print(\"DONE!!!!! total time: %fs\" % duration)\n print('=' * 80)\n return vec_train, vec_test", "def tfidf_train(newsgroups_train, n_features):\n # Extract Tfidf weights\n stop_words_list = nltk.corpus.stopwords.words('english')\n vectorizer_train = TfidfVectorizer(max_features=n_features,\n min_df=5, max_df=0.70,\n token_pattern = '[a-zA-Z]+',\n stop_words = stop_words_list)\n vectors_train = vectorizer_train.fit_transform(newsgroups_train)\n feature_names_train = vectorizer_train.get_feature_names() #features list\n dense_train = vectors_train.todense()\n\n denselist_train = np.array(dense_train).transpose() # tfidf matrix\n X_train = denselist_train.copy() # train data (tfidf)\n\n return vectorizer_train, feature_names_train, X_train", "def get_liwc_features(train_data, test_data):\n print(\"getting liwc features\")\n train_liwc_matrix = []\n test_liwc_matrix = []\n for phrase in train_data:\n liwc_scores = word_category_counter.score_text(phrase)\n feature_vector = []\n for key in liwc_categories:\n if key in liwc_scores.keys():\n # print(key)\n # print(liwc_scores[key])\n feature_vector.append(liwc_scores[key])\n else:\n feature_vector.append(0)\n # print(feature_vector)\n train_liwc_matrix.append(feature_vector)\n for phrase in test_data:\n liwc_scores = word_category_counter.score_text(phrase)\n feature_vector = []\n for key in liwc_categories:\n if key in liwc_scores.keys():\n # print(key)\n # print(liwc_scores[key])\n feature_vector.append(liwc_scores[key])\n else:\n feature_vector.append(0)\n test_liwc_matrix.append(feature_vector)\n # print(train_liwc_matrix)\n return sparse.csr_matrix(train_liwc_matrix), sparse.csr_matrix(test_liwc_matrix)", "def train_routine(training_file, output_folder):\n if output_folder[-1] != '/':\n output_folder += '/'\n\n svm_file = output_folder + 'svm.txt'\n centroid_file = output_folder + 'centroids.txt'\n ids_file = output_folder + 'ids.txt'\n\n surf = cv2.SURF(250, extended=False)\n categories = dict()\n ids = dict()\n id = 1\n features = list()\n\n print \"Extracting features\"\n for line in open(training_file):\n try:\n category, path = line.split(';')\n except:\n print \"Error: File not in proper format. Ensure: <category/class name>; <path to image of said category>\"\n sys.exit(0)\n path = path.strip()\n\n try:\n img = cv2.imread(path)\n #img = cv2.resize(img, (500, 500))\n except Exception as e:\n print e\n continue\n\n keypoints, descriptors = surf.detectAndCompute(img, None)\n\n if not category in categories:\n categories[category] = Category(label=category)\n ids[category] = id\n id += 1\n categories[category].add_feature(descriptors)\n\n #for category in categories:\n #f = categories[category].yield_features()\n ##features.extend(f)\n #for i in f:\n #features.extend(i)\n\n print \"Calculating centroids\"\n #np_features = numpy.array(features)\n #print \"Features: \", np_features.shape\n #centroids, labels = kmeans2(np_features, FEATURE_TYPES)\n centroids = helpers.loadObject(output_folder + 'centroids.txt')\n print centroids.shape\n\n print \"Forming bag of words\"\n X, Y = [], []\n for category in categories:\n categories[category].calc_bagofwords(centroids)\n for bow in categories[category].bagofwords:\n X.append(bow)\n Y.append(ids[category])\n print \"Fitting linear SVMs onto the bag of words\"\n lin_clf = svm.LinearSVC()\n lin_clf.fit(X, Y)\n\n helpers.saveObject(lin_clf, svm_file)\n helpers.saveObject(centroids, centroid_file)\n helpers.saveObject(ids, ids_file)", "def useTfidfVectorizer(self, data):\n if self.results:\n print()\n print(\"Extracting features from the training dataset using a sparse vectorizer\", end=\" - \")\n t0 = time()\n \n vectorizer = TfidfVectorizer(max_features=10000, stop_words='english',norm='l2',use_idf=True, sublinear_tf=False,encoding='utf-8')\n matrix = vectorizer.fit_transform(data)\n \n if self.results:\n print(\"done in %0.3fs\" % (time() - t0))\n print(\"n_samples: %0.3d, n_features: %d\" % matrix.shape)\n print()\n \n feature_names = vectorizer.get_feature_names()\n return matrix, feature_names", "def trainModel( self, featureTrain, classTrain):", "def extractFeatures(self, data, tf=False):\n tfidf_training_matrix, tfidf_terms = self.useTfidfVectorizer(data)\n \n if tf:\n tf_vectorizer = CountVectorizer(max_df=0.5, min_df=2, max_features=10000,\n stop_words='english')\n \n tf_training_matrix = tf_vectorizer.fit_transform(data)\n tf_terms = tf_vectorizer.get_feature_names()\n \n return tfidf_training_matrix, tfidf_terms, tf_training_matrix, tf_terms\n \n else:\n return tfidf_training_matrix, tfidf_terms", "def get_feature_matrix(images, all_features):\n timestamp = time()\n heigth = len(all_features)\n width = len(images)\n\n feature_matrix = zeros((heigth, width))\n for y, feature in enumerate(all_features):\n for x, image in enumerate(images):\n feature_matrix[y][x] = feature.calculate(image)\n\n stdout.write(\"\\rget feature matrix: {}\\r\".format(time() - timestamp))\n\n return feature_matrix", "def compute_features(self, X):\n F = self.feature_extractor(X)\n if self.with_dropout:\n F = self.dropout(F)\n F = F[:, None].expand(-1, self.n_primitives, -1)\n F = torch.cat([\n F,\n self.primitive_embedding[None].expand_as(F)\n ], dim=-1)\n\n B = F.shape[0]\n M = self.n_primitives\n D = 2*self.feature_extractor.feature_size\n\n assert F.shape == (B, M, D)\n return F", "def init_MF(train, num_features):\n num_user = train.shape[1]\n num_item = train.shape[0]\n user_features = np.random.rand(num_features,num_user) # user_features shape (20,943)\n item_features = np.random.rand(num_item, num_features) # item_features shape (1152,20)\n return user_features, item_features", "def preprocess_features(features):\r\n rowsum = np.array(features.sum(1),dtype='float')\r\n r_inv = np.power(rowsum, -1).flatten()\r\n r_inv[np.isinf(r_inv)] = 0.\r\n r_mat_inv = sp.diags(r_inv)\r\n features = r_mat_inv.dot(features)\r\n # return sparse_to_tuple(features)\r\n return features\r\n # print(features)\r\n # rowsum = np.array(features.sum(1),dtype='float')\r\n #\r\n # r_inv = np.power(rowsum, -1).flatten()\r\n # r_inv[np.isinf(r_inv)] = 0.\r\n # r_mat_inv = np.diag(r_inv)\r\n # features = r_mat_inv.dot(features)\r\n # # return sparse_to_tuple(features)\r\n # return features\r", "def evaluate_hmdb51():\n accs = np.zeros(3)\n fv_root = '/home/syq/research_final/data/dense-traj/fv_hmdb51_python/'\n fv_suffix = '_fv.npy.gz'\n hmdb_splits = 'testTrainMulti_7030_splits/'\n categories = os.listdir(fv_root)\n\n vlen = 0\n for splitnum in range(1, 4):\n ts = time.time()\n print 'Split', splitnum\n trainfiles, testfiles = hmdb51_splits.loadsplit(categories,\n hmdb_splits,\n splitnum)\n print 'Have %d train files' % len(trainfiles)\n print 'Have %d test files' % len(testfiles)\n\n if not vlen:\n fp = gzip.open(os.path.join(fv_root,'%s%s'%(trainfiles[0][0][:-4],fv_suffix)),\"rb\")\n vlen= len(np.load(fp))\n fp.close()\n print \"Feature vector length is %d\" % vlen\n\n Dtrain = np.zeros( (len(trainfiles),vlen), np.float32 )\n Ytrain = np.ones ( (len(trainfiles) )) * -1000\n\n for fi,f in enumerate(trainfiles):\n fp = gzip.open(os.path.join(fv_root,'%s%s'%(f[0][:-4],fv_suffix)),\"rb\")\n Dtrain[fi][:] = np.load(fp)\n fp.close()\n Ytrain[fi] = f[1]\n\n Dtest = np.zeros( (len(testfiles),vlen), np.float32 )\n Ytest = np.ones ( (len(testfiles) )) * -1000\n\n for fi,f in enumerate(testfiles):\n fp = gzip.open(os.path.join(fv_root,'%s%s'%(f[0][:-4],fv_suffix)),\"rb\")\n Dtest[fi][:] = np.load(fp)\n fp.close()\n Ytest[fi] = f[1]\n\n print Dtrain.shape, Ytrain.shape\n print Dtest.shape, Ytest.shape\n\n clf = OneVsRestClassifier(estimator=LinearSVC(C=100), n_jobs=8)\n acc = clf.fit(Dtrain, Ytrain).score(Dtest, Ytest)\n print 'Split %d accuracy: %.3f' % (splitnum, acc)\n print \"Train & testing time %.3f\" % (time.time() - ts)\n accs[splitnum -1] = acc\n\n print 'Mean accuracy is %f'%(accs.mean())\n with open('fv_hmdb51_accs.txt', 'w') as f:\n f.write(\"%s\\nMean:%.3f\" % (str(accs), np.mean(accs)))", "def _extract_features(self, ti, tf):\n makedir(self.featdir)\n\n # number of windows in feature request\n Nw = int(np.floor(((tf-ti)/self.dt)/(self.iw-self.io)))\n\n # features to compute\n cfp = ComprehensiveFCParameters()\n if self.compute_only_features:\n cfp = dict([(k, cfp[k]) for k in cfp.keys() if k in self.compute_only_features])\n else:\n # drop features if relevant\n _ = [cfp.pop(df) for df in self.drop_features if df in list(cfp.keys())]\n\n # check if feature matrix already exists and what it contains\n if os.path.isfile(self.featfile):\n t = pd.to_datetime(pd.read_csv(self.featfile, index_col=0, parse_dates=['time'], usecols=['time'], infer_datetime_format=True).index.values)\n ti0,tf0 = t[0],t[-1]\n Nw0 = len(t)\n hds = pd.read_csv(self.featfile, index_col=0, nrows=1)\n hds = list(set([hd.split('__')[1] for hd in hds]))\n\n # option 1, expand rows\n pad_left = int((ti0-ti)/self.dto)# if ti < ti0 else 0\n pad_right = int(((ti+(Nw-1)*self.dto)-tf0)/self.dto)# if tf > tf0 else 0\n i0 = abs(pad_left) if pad_left<0 else 0\n i1 = Nw0 + max([pad_left,0]) + pad_right\n \n # option 2, expand columns\n existing_cols = set(hds) # these features already calculated, in file\n new_cols = set(cfp.keys()) - existing_cols # these features to be added\n more_cols = bool(new_cols)\n all_cols = existing_cols|new_cols\n cfp = ComprehensiveFCParameters()\n cfp = dict([(k, cfp[k]) for k in cfp.keys() if k in all_cols])\n\n # option 3, expand both\n if any([more_cols, pad_left > 0, pad_right > 0]) and self.update_feature_matrix:\n fm = pd.read_csv(self.featfile, index_col=0, parse_dates=['time'], infer_datetime_format=True)\n if more_cols:\n # expand columns now\n df0, wd = self._construct_windows(Nw0, ti0)\n cfp0 = ComprehensiveFCParameters()\n cfp0 = dict([(k, cfp0[k]) for k in cfp0.keys() if k in new_cols])\n fm2 = extract_features(df0, column_id='id', n_jobs=self.n_jobs, default_fc_parameters=cfp0, impute_function=impute)\n fm2.index = pd.Series(wd)\n \n fm = pd.concat([fm,fm2], axis=1, sort=False)\n\n # check if updates required because training period expanded\n # expanded earlier\n if pad_left > 0:\n df, wd = self._construct_windows(Nw, ti, i1=pad_left)\n fm2 = extract_features(df, column_id='id', n_jobs=self.n_jobs, default_fc_parameters=cfp, impute_function=impute)\n fm2.index = pd.Series(wd)\n fm = pd.concat([fm2,fm], sort=False)\n # expanded later\n if pad_right > 0:\n df, wd = self._construct_windows(Nw, ti, i0=Nw - pad_right)\n fm2 = extract_features(df, column_id='id', n_jobs=self.n_jobs, default_fc_parameters=cfp, impute_function=impute)\n fm2.index = pd.Series(wd)\n fm = pd.concat([fm,fm2], sort=False)\n \n # write updated file output\n fm.to_csv(self.featfile, index=True, index_label='time')\n # trim output\n fm = fm.iloc[i0:i1] \n else:\n # read relevant part of matrix\n fm = pd.read_csv(self.featfile, index_col=0, parse_dates=['time'], infer_datetime_format=True, header=0, skiprows=range(1,i0+1), nrows=i1-i0)\n else:\n # create feature matrix from scratch \n df, wd = self._construct_windows(Nw, ti)\n fm = extract_features(df, column_id='id', n_jobs=self.n_jobs, default_fc_parameters=cfp, impute_function=impute)\n fm.index = pd.Series(wd)\n fm.to_csv(self.featfile, index=True, index_label='time')\n \n ys = pd.DataFrame(self._get_label(fm.index.values), columns=['label'], index=fm.index)\n return fm, ys", "def _process_features(self, limit):\n\n if self.testMode:\n g = self.testgraph\n else:\n g = self.graph\n model = Model(g)\n raw = '/'.join((self.rawdir, 'feature'))\n logger.info(\"building labels for features\")\n\n line_counter = 0\n with open(raw, 'r') as f:\n filereader = csv.reader(f, delimiter='\\t', quotechar='\\\"')\n f.readline() # read the header row; skip\n for line in filereader:\n (feature_id, dbxref_id, organism_id, name, uniquename,\n residues, seqlen, md5checksum, type_id, is_analysis,\n timeaccessioned, timelastmodified) = line\n\n feature_key = feature_id\n if re.search(r'[\\|\\s\\[\\]\\{\\}\\\\<\\>]', uniquename):\n # some uniquenames have pipes or other nasty chars!\n # for example: FB||||FBrf0133242|Hugh-u1\n feature_id = self._makeInternalIdentifier(\n 'feature', feature_key)\n else:\n feature_id = 'FlyBase:'+uniquename\n self.idhash['feature'][feature_key] = feature_id\n self.feature_types[feature_key] = type_id\n self.label_hash[feature_id] = name\n\n if feature_key not in self.feature_to_organism_hash:\n self.feature_to_organism_hash[feature_key] = set()\n self.feature_to_organism_hash[feature_key].add(organism_id)\n\n # HACK - FBgn are genes, and therefore classes,\n # all else be individuals\n is_gene = False\n if re.search(r'(FBgn|FBog)', feature_id):\n self.idhash['gene'][feature_key] = feature_id\n is_gene = True\n elif re.search(r'FBa[lb]', feature_id):\n self.idhash['allele'][feature_key] = feature_id\n elif re.search(r'FBt[ip]', feature_id):\n self.idhash['feature'][feature_key] = feature_id\n\n if self.testMode and \\\n int(feature_key) not in self.test_keys['gene'] + \\\n self.test_keys['allele'] + self.test_keys['feature']:\n continue\n\n # now do something with it!\n # switch on type_id\n if name.strip() == '':\n name = uniquename\n\n type_key = type_id\n type_id = self.idhash['cvterm'][type_key]\n\n # skip some features by type\n types_to_skip = [\n 'SO:0000316', # CDS\n 'SO:0000696', # oligos\n 'SO:0000358', # polypeptide\n 'SO:0000234', # transcripts\n ]\n\n type_keys_to_skip = [\n 596, # pcr_product\n 57096, # mature peptide\n 57097, # signal_peptide\n 57270, # repeat masker\n 58210, # alignment\n 59643, # cDNA_clone\n 60006, # uncharacterized_change_in_nucleotide_sequence\n 61351, # oligo\n 61467, # polypeptide_domain\n 257, # exon\n 286, # intron\n ]\n\n organisms_to_skip = [\n 2 # computational result\n ]\n\n if type_id in types_to_skip \\\n or int(type_key) in type_keys_to_skip\\\n or int(organism_id) in organisms_to_skip:\n continue\n\n line_counter += 1\n\n if int(type_key) == 604: # RNAi_reagent\n # TODO add other reagents?\n self.idhash['reagent'][feature_key] = feature_id\n\n # deal with the taxonomy\n # only get taxa for features that are actually used in our set\n tax_internal_id = self._makeInternalIdentifier(\n 'organism', organism_id)\n if organism_id not in self.checked_organisms:\n # will get the NCBITax if necessary\n tax_id = self._get_organism_id(organism_id)\n self.checked_organisms.add(organism_id)\n else:\n tax_id = self.idhash['organism'][organism_id]\n\n tax_label = self.label_hash.get(tax_id)\n if not re.search(r'FBog', feature_id) \\\n and re.search(r'Drosophila', tax_label):\n # make only fly things leaders\n model.makeLeader(feature_id)\n\n if not self.testMode \\\n and limit is not None and line_counter > limit:\n pass\n else:\n if is_gene:\n model.addClassToGraph(\n feature_id, name, type_id)\n g.addTriple(\n feature_id, model.object_properties['in_taxon'],\n tax_id)\n else:\n if re.search('FBa[lb]', feature_id):\n type_id = Genotype.genoparts['allele']\n model.addIndividualToGraph(feature_id, name, type_id)\n\n # stop adding what we do not appreciate\n # if is_obsolete == 't':\n # if is_gene:\n # model.addDeprecatedClass(feature_id)\n # else:\n # model.addDeprecatedIndividual(feature_id)\n # self.deprecated_features.add(feature_key)\n\n model.addClassToGraph(tax_id)\n if tax_id != tax_internal_id:\n model.addEquivalentClass(tax_id, tax_internal_id)\n\n model.addComment(\n feature_id,\n self._makeInternalIdentifier('feature', feature_key))\n\n # TODO save checked_organisms fbid to ncbitax mapping to\n # a local file to speed up subsequent searches\n\n return", "def feature_calculator(args, graph):\n index_1 = [edge[0] for edge in graph.edges()]\n index_2 = [edge[1] for edge in graph.edges()]\n values = [1 for edge in graph.edges()]\n node_count = max(max(index_1)+1,max(index_2)+1)\n adjacency_matrix = sparse.coo_matrix((values, (index_1,index_2)),shape=(node_count,node_count),dtype=np.float32)\n degrees = adjacency_matrix.sum(axis=0)[0].tolist()\n degs = sparse.diags(degrees, [0])\n normalized_adjacency_matrix = degs.dot(adjacency_matrix)\n target_matrices = [normalized_adjacency_matrix.todense()]\n powered_A = normalized_adjacency_matrix\n if args.window_size > 1:\n for power in tqdm(range(args.window_size-1), desc = \"Adjacency matrix powers\"):\n powered_A = powered_A.dot(normalized_adjacency_matrix)\n to_add = powered_A.todense()\n target_matrices.append(to_add)\n target_matrices = np.array(target_matrices)\n return target_matrices", "def training_features(training_data: pd.DataFrame):\n return pd.get_dummies(\n training_data.drop(columns=[\"outstanding_balance\", \"status\", \"account_no\"])\n )", "def count_idf(self):\n idf = dict.fromkeys(range(self.instances.shape[1]), 0) # initialize for all features\n num_docs = self.instances.shape[0]\n feature_counts = self.count_document_frequency()\n for feature in feature_counts.keys():\n idf[feature] = math.log((num_docs / feature_counts[feature]), 10) if feature_counts[feature] > 0 else 0\n return idf", "def train_initial_classifier(zero_f=\"NIST/human_hcd_synthetic_oxidized.msp\",\n\t\t\t\t\t\t\t one_f=\"NIST/human_hcd_synthetic_native.msp\",\n\t\t\t\t\t\t\t selected_features_diff=[],\n\t\t\t\t\t\t\t top_mean = 1000,\n\t\t\t\t\t\t\t top_peaks = 100,\n\t\t\t\t\t\t\t max_distance = 275,\n\t\t\t\t\t\t\t distance_bins = 0.005,\n\t\t\t\t\t\t\t windowed_mode = False,\n\t\t\t\t\t\t\t out_dir=\"res/\"):\n\t#Check the file extension and parse to get features for class zero\n\tif zero_f.endswith(\".mgf\"): feats_zero,feat_bins,instance_names,count_zero = read_mgf(zero_f,sum_feats=False,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t feat_bins=selected_features_diff,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t max_dist=max_distance,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t step_size=distance_bins,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t top_peaks=top_peaks)\n\telif zero_f.endswith(\".msp\"): feats_zero,feat_bins,instance_names,count_zero = read_msp(zero_f,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tsum_feats=False,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tfeat_bins=selected_features_diff,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tmax_dist=max_distance,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tstep_size=distance_bins,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttop_peaks=top_peaks)\n\telse: return(False) # TODO display error!\n\t\n\t#Check the file extension and parse to get features for class one\n\tif one_f.endswith(\".mgf\"): feats_one,feat_bins,instance_names,count_one = read_mgf(one_f,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t sum_feats=False,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t feat_bins=selected_features_diff,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t max_dist=max_distance,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t step_size=distance_bins,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t top_peaks=top_peaks)\n\telif one_f.endswith(\".msp\"): feats_one,feat_bins,instance_names,count_one = read_msp(one_f,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t sum_feats=False,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t feat_bins=selected_features_diff,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t max_dist=max_distance,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t step_size=distance_bins,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t top_peaks=top_peaks)\n\telse: return(False) # TODO display error!\n\n\t#Prepare labels equal to length class zero and one\n\ty = [0]*(count_zero)\n\ty.extend([1]*(count_one))\n\n\ty = np.array(y)\n\t\n\t#Stack the feature matrices of both classes\n\tX = scipy.sparse.vstack((feats_zero,feats_one))\n\t\n\t#Train optimizing the hyperparameters\n\txgb_model,random_search_res_xgb = train_xgb(X,y)\n\t#print(random_search_res_xgb.best_params_)\n\t#print(random_search_res_xgb.best_score_)\n\t\n\t#Train use selected hyperparameters\n\ttrain_xgb_lim(X,y,random_search_res_xgb.best_params_,out_dir=out_dir)\n\tplot_train_distr(xgb_model,X,y,out_dir=out_dir)\n\t\n\t#Flush to pickle\n\txgboost_to_wb(random_search_res_xgb,outfile=out_dir+\"model.pickle\")\n\t\n\trandom_search_res_xgb = pickle.load(open(out_dir+\"model.pickle\",\"rb\"))\n\t\n\t#Plot some of the feature importances and probs\n\tfscores = xgb_model.booster().get_fscore()\n\tfscores_list = sorted(list(fscores.items()),key=itemgetter(1),reverse=True)\n\tselected_features_indexes = map(int,[f.replace(\"f\",\"\") for f,n in fscores_list])\n\tselected_features_xgboost = [selected_features_diff[sfp] for sfp in selected_features_indexes]\n\tplot_feat_imp(selected_features_indexes,selected_features_diff,X,y,out_dir=out_dir)\n\t\n\treturn(random_search_res_xgb.best_params_,selected_features_xgboost)", "def featureize(F, observation_ids, all_tokens_dict, binary=False):\n (mrc_words_index,) = F\n\n n = len(mrc_words_index)\n m = len(observation_ids)\n\n # Observations\n X = np.zeros((m,n), dtype=np.float)\n\n for (i,ob_id) in enumerate(observation_ids, start=0):\n\n N = len(all_tokens_dict[ob_id])\n\n for token in all_tokens_dict[ob_id]:\n\n if token in mrc_words_index:\n\n if binary:\n X[i][mrc_words_index[token]] = 1\n else: \n X[i][mrc_words_index[token]] += 1.0\n\n if not binary:\n # Normalize by the number of tokens in each observation\n for j in range(0, N):\n X[i][j] /= float(N)\n\n return X", "def _reduceFeatures(self):\n # Adds up all profiles corresponding to each author,\n # then compiles into a matrix of these \"group\" profiles.\n group_profiles = {auth : zeros(len(self.alph)**self.N) for auth in set(self.train_data[1])}\n for i in range(len(self.train_data[1])):\n group_profiles[self.train_data[1][i]] += self.train_data[0][i]\n profile_matrix = array([group_profiles[auth] for auth in group_profiles])\n\n # Takes the variances for all features across the \"group\" profiles,\n # then extracts the indices of the features with the highest variances.\n vars = profile_matrix.var(axis=0)\n self.feature_indices = argsort(vars)[-self.features:]\n # Recompiles the training data.\n self.train_data[0] = array([prof[self.feature_indices] for prof in self.train_data[0]])", "def OTU_table_ML(OTU_table,metadata,obj_col):\n for ele in OTU_table.index:\n #print(ele)\n X.append(df.loc[ele])\n Y.append(metadata[obj_col][ele])\n precisions = []\n for train_time in range(100): \n X,Y = shuffle(X,Y)\n sample_num = len(X)\n sep_num = int(0.8*sample_num)\n train_set = [X[:sep_num],Y[:sep_num]]\n test_set = [X[sep_num:],Y[sep_num:]]\n clf = svm.SVC(gamma='scale')\n clf.fit(train_set[0], train_set[1]) \n predict_result = clf.predict(test_set[0])\n count = 0\n for i in range(len(predict_result)):\n if predict_result[i] == test_set[1][i]:\n count += 1\n else:\n pass\n precisions.append(1.0*count/len(predict_result))\n print(np.mean(precisions))", "def feature_matrix(df, user_id=None, item_id=None):\n print(\"get feature matrix\")\n df1 = df.drop_duplicates(subset=['user_id'], keep='first', inplace=False)\n user_x = None\n if user_id is not None:\n user_x = int(np.argwhere(df1['user_id'].values == user_id))\n user_features = df1[['average_stars']].values\n csr_user_features = sparse.csr_matrix(user_features)\n\n df2 = df.drop_duplicates(\n subset=['business_id'],\n keep='first',\n inplace=False)\n item_x = None\n if item_id is not None:\n item_x = int(np.argwhere(df2['business_id'].values == item_id))\n item_features = df2.iloc[:, 10:].values\n\n csr_item_features = sparse.csr_matrix(item_features)\n return csr_user_features, csr_item_features, user_x, item_x", "def featurize(movies):\n ###TODO \n movies['features'] = \"\" \n get_h = set() \n vocab_dict = {}\n df_dict_return = {}\n tup_list = []\n index_dict = {}\n index_dict_1 = {}\n movie_len = len(movies) \n #print(\"MovieLength::\",movie_len)\n #print(\"MOVIES:::\",movies)\n \n get_h = cal_unique_features(movies) # num_features\n\n vocab_dict = cal_unique_vocab(get_h) # vocab complete\n\n len_vocab = len(get_h)\n \n df_dict_return = cal_unique_docs(get_h,movies) # df(i)\n\n for token in get_h :\n #tup_list.clear()\n #print(\"token_GOTTTTT:::\",token)\n for index,row in movies.iterrows(): \n #print(\"row_got::\",row)\n gen_list = row['tokens']\n #print(\"gen_list::\",gen_list)\n #mov_id = row['movieId'] \n #print(\"mov_id::\",mov_id)\n token_count_1 = Counter(gen_list).most_common()[:1]\n tok = token_count_1[0]\n index_dict_1[index] = tok[1]\n token_count = gen_list.count(token)\n #print(\"token_count::\",token_count)\n tup = (index,token_count)\n #print(\"tuple::\",tup)\n tup_list.append(tup)\n #print(\"LIST_PRINT:::::::::::::\",tup_list)\n index_dict[token] = tup_list\n tup_list = []\n \n \n #print(\"INDEX_DICT:::\",index_dict) # tf(i,d)\n #print(\"INDEX_DICT_1:::\",index_dict_1) # max_k dict per docx\n \n \n for ind, row in movies.iterrows():\n data_list = []\n rows_list = []\n columns_list = []\n gen_list = row['tokens']\n #print(\"TOKENS GOTTT::\",gen_list) \n for gen in gen_list:\n tf = get_tf_value(index_dict,gen,ind)\n #print(\"TF GOTTT::\",tf) \n tf_weight = float( tf / index_dict_1[ind])\n #print(\"tf_weight::\",tf_weight)\n df_weight = float( math.log10( movie_len / df_dict_return[gen] ) )\n #print(\"df_weight::\",df_weight)\n final_tfidf = tf_weight * df_weight\n #print(\"final_tfidf::\",final_tfidf)\n data_list.append(final_tfidf)\n columns_list.append(vocab_dict[gen])\n rows_list.append(0) \n csr = csr_matrix((data_list, (rows_list,columns_list)), shape=(1,len_vocab))\n #print(\"TYPE of CSR GOTT::\",type(csr))\n #print(\"CSR GOTT:::\",csr) \n movies.set_value(ind, 'features', csr)\n \n #print(\"UPDATE movies::\",movies) \n\n return(movies,vocab_dict)\n \n\n pass", "def get_idf_features(train_data, test_data):\n tfidf = TfidfVectorizer(tokenizer = tokenize, ngram_range = (1, 2))\n tfidf.fit(train_data)\n return tfidf.transform(train_data), tfidf.transform(test_data)" ]
[ "0.6619801", "0.64366347", "0.6372779", "0.63321036", "0.63102317", "0.6256432", "0.6228051", "0.6178748", "0.60933167", "0.60557526", "0.6024493", "0.5988039", "0.5978126", "0.5965549", "0.59529054", "0.5933511", "0.5928089", "0.5916273", "0.5896251", "0.58895934", "0.5878962", "0.58715504", "0.58607686", "0.5850681", "0.58392733", "0.58366966", "0.5822072", "0.58165836", "0.5808949", "0.5808773" ]
0.7179811
0
sigmoid for every value of array x
def sigmoid_array(x): for l in range(len(x)): x[l] = 1/(1 + math.exp(-x[l])) return x
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def sigmoid(x: np.ndarray \n ) -> np.ndarray:\n return 1/(1+np.exp(-x))", "def sigmoid(x):\r\n return 1 / (1 + np.exp(-x))", "def sigmoid(x):\n S = np.ones((np.size(x),))\n for i in range(len(x)):\n S[i] = 1/(1+np.exp(-x[i]))\n return S", "def sigmoid(X):\n if isinstance(X,(list,tuple)):\n X=np.array(X)\n return 1/(1+np.exp(-X))\n #return np.exp(X)/(1+np.exp(X))", "def sigmoid(x):\r\n\r\n return 1 / (1 + np.exp(-x))", "def sigmoid(x):\n return 1 / (1 + np.exp(-x))", "def sigmoid(x):\n return 1 / (1 + np.exp(-x))", "def sigmoid(x):\n return 1 / (1 + np.exp(-x))", "def sigmoid(x):\n return 1 / (1 + np.exp(-x))", "def sigmoid(x):\n return 1 / (1 + np.exp(-x))", "def sigmoid(x):\n return 1 / (1 + np.exp(-x))", "def sigmoid(x):\n return 1 / (1 * np.exp(-x))", "def sigmoid(X):\n return 1 / (1 + np.exp(-X))", "def sigmoid(X):\n return 1 / (1 + np.exp(-X))", "def get_sigmoid(x): \n output = np.zeros(x.shape)\n ind1 = (x >= 0)\n ind2 = (x < 0)\n output[ind1] = 1 / (1 + np.exp(-x[ind1]))\n output[ind2] = np.divide(np.exp(x[ind2]), (1 + np.exp(x[ind2])))\n\n return output", "def sigmoid(x):\n return 1. / (1. + np.exp(-x))", "def sigmoid(x):\n return 1/(1+np.exp(-1*x))", "def sigmoid(x):\n return 1.0/(1 + np.exp(-x))", "def sigmoid(x):\n return 1.0 / (1.0 + np.exp(-x))", "def _sigmoid(x):\n return 1 / (1 + np.exp(-x))", "def sigmoid(x):\r\n #pred_x = (np.exp(x) - np.exp(-x)) / (np.exp(x) + np.exp(-x))\r\n pred_x = 1.0 / (1.0 + np.exp(-x))\r\n return pred_x\r\n pass", "def sigmoid(x):\n pos_mask = (x >= 0)\n neg_mask = (x < 0)\n z = np.zeros_like(x)\n z[pos_mask] = np.exp(-x[pos_mask])\n z[neg_mask] = np.exp(x[neg_mask])\n top = np.ones_like(x)\n top[neg_mask] = z[neg_mask]\n return top / (1 + z)", "def _sigmoid_m(self, X):\n result = np.zeros((X.shape[0], X.shape[1]), dtype='float32')\n for i in range(X.shape[0]):\n for j in range(X.shape[1]):\n result[i, j] = 1.0 / (1.0 + np.exp(-X[i, j]))\n return result", "def sigmoid(x):\n\n s = 1 / (1 + np.exp(-x))\n\n return s", "def sigmoid(X):\n g = 1/(1 + np.exp(-X))\n return g", "def sigmoid(x):\n s = 1 / (1 + np.exp(-x))\n return s", "def sigmoid(x):\n return 1 / (1 + exp(-x))", "def sigmoid(x):\n return 1 / (1 + exp(-x))", "def sigmoid(self, x):\n return 1 / (1 + np.exp(-4.9 * x))", "def sigmoid(x):\n\treturn 1 / (1 + m.exp(-x))" ]
[ "0.855918", "0.83445925", "0.8308991", "0.8300278", "0.82980376", "0.8267586", "0.8267586", "0.8267586", "0.8267586", "0.8267586", "0.8267586", "0.8263312", "0.8179828", "0.8179828", "0.8171647", "0.8170802", "0.81627613", "0.8140558", "0.8127495", "0.8123879", "0.8111907", "0.80550957", "0.8040526", "0.7983784", "0.7951478", "0.79472625", "0.79455376", "0.79455376", "0.7925393", "0.7901421" ]
0.84037894
1
compute entropy criteria for feature and answers
def entropy_difference(feature, answers, num_lemma): f_max = np.max(feature) f_min = np.min(feature) # check is it unsound feature if f_max == f_min: # print('lemma 0: ', num_lemma) return 10000 step = (f_max - f_min) / 1000 p = [[0, 0] for _ in range(1000)] sum_p = len(feature) for j in range(len(feature)): index = math.trunc((feature[j] - f_min)/step) if index == 1000: index = 999 p[index][answers[j]] += 1 # difference between entropy feature+answers and just feature result = 0 for i in range(1000): if (p[i][0] != 0) & (p[i][1] != 0): result += math.log2((p[i][0] + p[i][1]) / sum_p) * (p[i][0] + p[i][1]) / sum_p - \ math.log2(p[i][0] / sum_p) * (p[i][0]) / sum_p - \ math.log2(p[i][1] / sum_p) * (p[i][1]) / sum_p # entropy answers all_answers = len(answers) positive_answers = sum(answers) / all_answers negative_answers = 1 - positive_answers if (positive_answers == 0) or negative_answers == 0: entropy_answers = 0 else: entropy_answers = - positive_answers * math.log2(positive_answers) - \ negative_answers * math.log2(negative_answers) # difference between (feature entropy + answers entropy) and (feature + answers) entropy if entropy_answers - result < 0: print('negative information', num_lemma, entropy_answers - result) return - (entropy_answers - result)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def information_gain(features, attribute_index, targets):\r\n\r\n possible_feature_values = [0,1]\r\n \r\n possible_classifications = [0,1]\r\n \r\n feature = features[:,attribute_index]\r\n \r\n \r\n number_of_samples = len(feature)\r\n \r\n import math\r\n \r\n \r\n #current_entropy = np.sum([-(len(targets[targets==possible_classification])/number_of_samples)*math.log(len(targets[targets==possible_classification])/number_of_samples, 2) for possible_classification in possible_classifications])\r\n \r\n terms_to_be_summed_for_current_entropy = []\r\n \r\n for classification in possible_classifications:\r\n \r\n number_of_elements_with_this_classification = len(targets[targets==classification])\r\n \r\n p_for_this_classification = number_of_elements_with_this_classification/len(targets)\r\n \r\n if p_for_this_classification != 0:\r\n terms_to_be_summed_for_current_entropy.append(-p_for_this_classification*math.log(p_for_this_classification,2))\r\n else:\r\n terms_to_be_summed_for_current_entropy.append(0)\r\n \r\n current_entropy = np.sum(terms_to_be_summed_for_current_entropy)\r\n \r\n \r\n \r\n terms_to_be_summed_for_weighted_entropy = []\r\n \r\n for possible_value in possible_feature_values:\r\n \r\n targets_split_by_feature_value = targets[feature.flatten() == possible_value]\r\n \r\n if len(targets_split_by_feature_value) != 0:\r\n \r\n \r\n weight_of_feature_value = len(targets_split_by_feature_value)/len(targets)\r\n \r\n terms_for_entropy_within_subset = []\r\n \r\n for classification in possible_classifications:\r\n \r\n number_of_subset_elements_with_this_classification = len(targets_split_by_feature_value[targets_split_by_feature_value==classification])\r\n \r\n p_in_subset_for_this_classification = number_of_subset_elements_with_this_classification/len(targets_split_by_feature_value)\r\n \r\n if p_in_subset_for_this_classification != 0:\r\n terms_for_entropy_within_subset.append(-p_in_subset_for_this_classification*math.log(p_in_subset_for_this_classification,2))\r\n else:\r\n terms_for_entropy_within_subset.append(0)\r\n \r\n entropy_within_subset = np.sum(terms_for_entropy_within_subset)\r\n \r\n terms_to_be_summed_for_weighted_entropy.append(weight_of_feature_value*entropy_within_subset)\r\n \r\n weighted_entropy = np.sum(terms_to_be_summed_for_weighted_entropy)\r\n \r\n \r\n #current_entropy = np.sum(terms_to_be_summed_for_current_entropy)\r\n \r\n #weighted_entropy = np.sum([(len(feature[feature==possible_value])/number_of_samples)*(len(targets[feature==possible_value][targets[feature==possible_value]==possible_classification])/len(targets[feature==possible_value]))*math.log((len(targets[feature==possible_value][targets[feature==possible_value]==possible_classification])/len(targets[feature==possible_value])), 2) for possible_classification in possible_classifications for possible_value in possible_feature_values])\r\n\r\n information_gain = current_entropy - weighted_entropy \r\n \r\n return information_gain", "def _entropy(self, feature, node):\n entropy = 0\n categories = np.unique(feature)\n num_point = len(feature)\n for category in categories:\n # for each category in that feature\n num_category = len(feature[feature == category])\n for c in self.num_class:\n # count the number of each class\n num_category_class = len(feature[np.logical_and(feature == category, node.y == c)])\n if num_category_class == 0:\n continue\n # compute entropy/information gain or classification error\n entropy += num_category / num_point * (\n -num_category_class / num_category * log2(num_category_class / num_category))\n return entropy", "def entropy(y):\n EPS = 0.0005\n\n # YOUR CODE HERE\n if len(y) == 0:\n return 0.\n \n pk = np.mean(y, axis=0)\n \n return - np.sum(pk * np.log(pk + EPS))", "def calc_conditional_entropy(map,data_stat,attribute):\n #acquire the data info of the attribute stored in data_stat\n data_info = data_stat[attribute]\n #acquire the label info\n # label_col = len(data_stat)-1\n label_col = data_stat.keys()[-1]\n # print(data_stat.keys())\n label_info = data_stat[label_col]\n #acquire the data \n data = map[attribute]\n labels = map[label_col]\n conditional_entropy =0\n for data_type in data_info:\n specific_entropy = 0\n for label_type in label_info: \n #attribute data indices where all data entries are equal to a speicifc value\n data_with_spec_val_idx = data_info[data_type]\n #label indices where all labels are of same value\n spec_label_idx = label_info[label_type]\n #the intersection of the two indices above\n intersect_idx = np.intersect1d(data_with_spec_val_idx,spec_label_idx)\n #conditional probability of label being of specific value given speicific data value\n temp_prob = len(intersect_idx)/float(len(data_with_spec_val_idx))\n if temp_prob!=0:\n specific_entropy += temp_prob*math.log(temp_prob,2)\n specific_entropy = -specific_entropy\n prob = len(data_with_spec_val_idx)/float(len(data))\n conditional_entropy += prob * specific_entropy\n return conditional_entropy", "def entropy(self, **kwargs) -> TensorType:", "def entropy(self, **kwargs) -> TensorType:", "def conditional_entropy(self) -> float:\n pass", "def __entropy(self, data_set, target_feature):\n frequencies = self.__calculate_frequency(data_set, target_feature)\n feature_entropy = 0.0\n number_of_values = len(data_set)\n\n # Add entropy for each value in frequencies.\n for frequency in frequencies:\n probability = frequencies[frequency] / number_of_values\n feature_entropy += (probability * math.log(probability, 2))\n\n return feature_entropy * -1", "def entropy(data):\n\n freqs = {}\n suma = len(data)\n\n for i in range(0, len(data)):\n freqs[data[i]] = 1.0 + freqs.get(data[i], 0)\n\n res = 0.0\n for i in freqs:\n res += (freqs[i] / suma) * log((freqs[i] / suma), 2)\n return -res", "def get_entropy(*labels):\n entropies = [] #list of entropy values from each subset\n total = 0 #total number of datapoints\n for subset in labels:\n n = len(subset)\n total += n\n counts = np.unique(subset, return_counts=True)[1] #frequency of unique values\n entropy = np.sum([-(i/n) * np.log2(i/n) for i in counts]) #subset entropy calcuation\n entropies.append((entropy, n))\n return np.sum([(n/total) * ent for n, ent in iter(entropies)])", "def entropy(self):\n raise NotImplementedError", "def entropy(Y):\n\n temp = np.unique(Y, return_counts=True)\n uniq_Y = list(temp[0])\n Y_count = list(temp[1])\n \n total = sum(Y_count)\n\n ent = 0\n for elem in uniq_Y:\n prob = Y_count[uniq_Y.index(elem)] / total\n # print(\"prob:\", prob)\n ent -= (prob * (math.log2(prob)))\n # print(\"ent:\",ent)\n\n return ent", "def entropy(x):\n nz = np.nonzero(x)[0]\n return -np.sum(x[nz]*np.log2(x[nz]))", "def find_entropy(less_than_threshold,more_than_threshold):\n\n ''' Storing total number of records '''\n total_records = len(less_than_threshold) + len(more_than_threshold)\n\n ''' Calculating the probability '''\n less_than_probability = len(less_than_threshold) / total_records\n more_than_probability = len(more_than_threshold) / total_records\n\n ''' Converting the dataframe to numpy arrays '''\n less_than_threshold_values = less_than_threshold.values\n more_than_threshold_values = more_than_threshold.values\n\n ''' Storing the target attribute values (Muffin or Cupcake) for threshold values '''\n target_for_less_than = less_than_threshold_values[:, -1]\n target_for_more_than = more_than_threshold_values[:, -1]\n\n ''' Finding the counts of muffin and cupcake for values lower than and greater than threshold value '''\n recipe_type, less_than_cupcake_muffin_count = np.unique(target_for_less_than, return_counts=True)\n recipe_type, more_than_cupcake_muffin_count = np.unique(target_for_more_than, return_counts=True)\n\n # print(recipe_type, more_than_cupcake_muffin_count, len(more_than_cupcake_muffin_count))\n ''' To ensure there are at least 5 records in each node '''\n if less_than_cupcake_muffin_count.sum() < 5 or more_than_cupcake_muffin_count.sum() < 5:\n ''' Return horrible badness '''\n return math.inf\n else:\n ''' Find the entropies for less than threshold values and more than threshold values '''\n less_than_entropy = sum((less_than_cupcake_muffin_count / less_than_cupcake_muffin_count.sum()) * - np.log2(\n less_than_cupcake_muffin_count / less_than_cupcake_muffin_count.sum()))\n more_than_entropy = sum((more_than_cupcake_muffin_count / more_than_cupcake_muffin_count.sum()) * - np.log2(\n more_than_cupcake_muffin_count / more_than_cupcake_muffin_count.sum()))\n\n ''' Calculate the total weighted entropy '''\n total_weighted_entropy = less_than_probability * less_than_entropy + more_than_probability * more_than_entropy\n\n return total_weighted_entropy", "def _conditional_entropy_compute(confmat: Tensor) ->Tensor:\n confmat = _drop_empty_rows_and_cols(confmat)\n total_occurrences = confmat.sum()\n p_xy_m = confmat / total_occurrences\n p_y = confmat.sum(1) / total_occurrences\n p_y_m = p_y.unsqueeze(1).repeat(1, p_xy_m.shape[1])\n return torch.nansum(p_xy_m * torch.log(p_y_m / p_xy_m))", "def condentropy(truelabels, labels):\n labels=array(labels)\n truelabels=array(truelabels)\n \n condent=0.\n for l in xrange(min(labels),max(labels)+1):\n sublabels = truelabels[ labels==l ]\n condent += len(sublabels)*chl_entropy( sublabels )\n return condent/float(len(labels))", "def entropy(y):\n return -1 * sum(\n [\n pipe(np.sum(y == value) / len(y), lambda ratio: ratio * np.log(ratio))\n for value in set(y)\n ]\n )", "def entropy(temp,pres):\n g_t = liq_g(1,0,temp,pres)\n s = -g_t\n return s", "def conditional_entropy_hyper(self) -> float:\n pass", "def entropy(data):\n e = 0\n\n counter = collections.Counter(data)\n l = len(data)\n for count in counter.values():\n p_x = count / l\n e += - p_x * math.log2(p_x)\n\n return e", "def get_entropy(self, rows):\n label_count = defaultdict(int)\n total_count = 0\n for row in rows:\n label = row[self.target_attribute]\n label_count[label] += 1\n total_count += 1\n return sum([-(float(label_count[label]) /\n total_count) * np.log2(float(label_count[label]) / total_count)\n for label in label_count.keys()])", "def entropy(y):\r\n\r\n # INSERT YOUR CODE HERE\r\n value, count = np.unique(y,return_counts = True)\r\n Hy = 0.0\r\n prob = count.astype(float)/len(y)\r\n for p in prob:\r\n Hy += -(p)*(np.log2(p))\r\n return Hy\r\n raise Exception('Function not yet implemented!')", "def entropy(dist):\n #dist = array([max(d,1e-100) for d in dist])\n dist = dist + 1e-20\n return dot(dist,(log(1.0/dist) * (1.0/log(2.0))).T)", "def entropy(self, dataset, target_attr):\n freq = {} #A dictionary to counts how many samples for each target classification \n data_entropy = 0.0\n samplenumbers = len(dataset) #Total number of samplers in data set\n \n #Calculate the frequency of each of the values in the target attribute\n for record in dataset:\n if (record[target_attr] in freq):\n freq[record[target_attr]] += 1.0\n else:\n freq[record[target_attr]] = 1.0\n \n # Calculate the entropy of the data for the target attribute\n for freq in list(freq.values()):\n data_entropy += (-freq/samplenumbers) * math.log(freq/samplenumbers, 2) \n \n return data_entropy", "def calc_entropy(data_set):\n size = len(data_set)\n label_counts = {}\n for feat_vector in data_set:\n label = feat_vector[-1]\n label_counts.setdefault(label, 0)\n label_counts[label] += 1\n\n entropy = 0.0\n for key, count in label_counts.iteritems():\n prob = float(count) / size\n entropy -= prob * log(prob, 2)\n\n return entropy", "def get_entropy_feature(self, feature, df=None):\n if df is None:\n df = self.df\n target = self.target\n\n target_variables = df[target].unique()\n variables = df[feature].unique()\n entropy = 0\n\n # Aggregate entropy for each unique value in 'feature' feature on each unique value in target feature\n for variable in variables:\n entropy_inner = 0\n for target_variable in target_variables:\n # Number of values of 'variable' in 'feature' feature that matches current target value\n num = len(df[feature][df[feature] == variable][df[target] == target_variable])\n # Number of values of 'variable' in 'feature' feature\n den = len(df[feature][df[feature] == variable])\n # Machine epsilon\n eps = np.finfo(np.float).eps\n fraction_inner = num/(den+eps)\n entropy_inner += -fraction_inner*np.log(fraction_inner+eps)\n fraction = den/len(df)\n entropy += -fraction*entropy_inner\n\n return abs(entropy)", "def entropy(y):\n total = y.size\n value_counts = np.bincount(y).astype(\"float\")\n proportions = value_counts / y.size\n\n return sum(-i * np.log(i) for i in proportions if i)", "def entropy(y):\n p = _proba(y)\n return (-p * np.log2(p)).sum()", "def entropy(d, total, word_count):\n\t# Entropie je - Sum_morf p(morf) * log_2 p(morf)\n\t# p(morf) = c(morf) / c(all)\n\te = 0\n\tfor count in d.values():\n\t\tp = count/total\n\t\ttype_e = - p * log2(p)\n\t\te += type_e * count\n\treturn e / word_count", "def entropy(data, idxList):\n df = data.loc[idxList]\n counts = df.value_counts().to_numpy()\n counts = counts.reshape(1, -1).astype(np.float32)\n counts /= np.sum(counts)\n log_sum = counts @ np.log2(counts.T)\n return -log_sum[0, 0]" ]
[ "0.72258025", "0.71321565", "0.70042986", "0.6983623", "0.69810224", "0.69810224", "0.68815655", "0.6839398", "0.67989826", "0.6790044", "0.6771465", "0.6763687", "0.67623544", "0.6737052", "0.6696969", "0.66588163", "0.664294", "0.66203666", "0.66119146", "0.65870285", "0.6566494", "0.65466386", "0.65118814", "0.6511373", "0.6510083", "0.6494139", "0.6476051", "0.6466142", "0.644806", "0.6435459" ]
0.7332619
0
wrap for spot_doc_rubrics with local session
def spot_doc_rubrics2(doc_id, rubrics): db.doc_apply(doc_id, spot_doc_rubrics, rubrics)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def spot_doc_rubrics(doc, rubrics, session=None, commit_session=True):\n # get lemmas by doc_id\n lemmas = doc.lemmas\n # compute document size\n doc_size = 0\n for lemma in lemmas:\n doc_size += lemmas[lemma]\n # models for rubrics\n models = {}\n\n # correct_answers = {}\n\n # fill set_id in rubrics and data in models\n for rubric_id in rubrics:\n # correct_answers[rubric_id] = db.get_rubric_answer_doc(doc_id, rubric_id)\n if rubrics[rubric_id] is None:\n rubrics[rubric_id] = db.get_set_id_by_rubric_id(rubric_id, session)\n models[rubric_id] = db.get_model(rubric_id, rubrics[rubric_id], session)\n # get dict with idf and lemma_index for each set_id\n # sets[...] is dict: {'idf':..., 'lemma_index': ...}\n sets = db.get_idf_lemma_index_by_set_id(rubrics.values(), session)\n for set_id in sets:\n # compute idf for doc_id (lemmas) and set_id\n idf_doc = {}\n for lemma in lemmas:\n idf_doc[lemma] = lemmas[lemma] * sets[set_id]['idf'].get(lemma, 0) / doc_size\n sets[set_id]['idf_doc'] = idf_doc\n # for each rubric\n answers = []\n result = []\n for rubric_id in rubrics:\n set_id = rubrics[rubric_id]\n mif_number = models[rubric_id]['features_num']\n lemma_index = sets[set_id]['lemma_index']\n features_array = np.zeros(len(lemma_index), dtype=float)\n # form features row with size and order like in object_features of training set\n for lemma in lemmas:\n # lemma index in lemmas of set\n ind_lemma = lemma_index.get(lemma, -1)\n # if lemma from doc is in lemmas for training set\n if ind_lemma > -1:\n features_array[ind_lemma] = sets[set_id]['idf_doc'][lemma]\n # take most important features of model\n mif = features_array[models[rubric_id]['features']]\n # add 1 for coefficient b in model\n # mif[mif_number] = 1\n mif.resize(mif_number + 1)\n mif[mif_number] = 1\n probability = sigmoid(np.dot(mif, models[rubric_id]['model']))\n if probability > 0.5:\n answers.append(rubric_id)\n result.append(\n {'rubric_id': rubric_id, 'result': round(probability), 'model_id': models[rubric_id]['model_id'],\n 'doc_id': doc.doc_id, 'probability': probability})\n\n db.put_rubrics(result, session, commit_session)\n doc.rubric_ids = answers", "def main(rc):\n with store_client(rc) as sclient:\n for doc in rc.documents:\n sclient.copydoc(doc)", "def __init__(self, temboo_session):\n super(DownloadDocument, self).__init__(temboo_session, '/Library/Zoho/Writer/DownloadDocument')", "def __init__(self, temboo_session):\n super(SearchByReviewer, self).__init__(temboo_session, '/Library/NYTimes/MovieReviews/SearchByReviewer')", "def __call__(self, doc):\n return doc", "def document(self):\n ...", "def edit_document():", "def session(self):", "def __call__(self, doc: Doc) -> Doc:\n return doc", "def __call__(self, doc: Doc) -> Doc:\n return doc", "def __init__(self, blip_data, context):\n super(OpBasedDocument, self).__init__(blip_data)\n self.__context = context", "def documento():\r\n\tpass", "def docs():", "def recreate_client_from_session():\n token = session.get(\"spotify_token\")\n return recreate_client(token)", "def fini_doc(self):\n raise NotImplementedError()", "def __init__(self, run_id: str, doc: Dict, service: APIFactory):\n self.run_id = run_id\n self.file_id = doc['id']\n self.name = doc['name']\n self.title = doc.get('title', self.name)\n self.caption = doc.get('caption')\n self.format = doc.get('format', {})\n self.service = service", "def generate_docs(root_dir, session):\n ...", "def __init__(self, temboo_session):\n super(EntityOverview, self).__init__(temboo_session, '/Library/InfluenceExplorer/EntityOverview')", "def call(self) -> global___Snippet.ClientCall:", "def call(self) -> global___Snippet.ClientCall:", "def svn_client_open_ra_session(svn_ra_session_t_session, char_url, svn_client_ctx_t_ctx, apr_pool_t_pool): # real signature unknown; restored from __doc__\n pass", "def dummy(doc):\r\n return doc", "def run_local_doc():\n\tcfg = settings.LocalConfig()\n\tapp = make_app(blueprints.developer_portal, settings.LocalConfig)\n\tapp.run(host = cfg.SERVERNAME, port = cfg.DOC_PORT, debug = True)", "def __init__(self):\r\n # create a session id\r\n self.session = ViSession()", "def __init__(self, api_entrypoint, server_url, session_token, content_format='markdown'):\r\n self._api_entrypoint = api_entrypoint\r\n self._server_url = server_url\r\n self._session_token = session_token\r\n self._content_format = content_format", "def get_doc_context(self, docname, body, metatags):\n\n # find out relations\n prev = next = None\n parents = []\n rellinks = self.globalcontext['rellinks'][:]\n related = self.relations.get(docname)\n # Populate titles with the list of longtitles from the env instead of titles\n # titles = self.env.titles\n titles = self.env.longtitles\n if related and related[2]:\n try:\n next = {\n 'link': self.get_relative_uri(docname, related[2]),\n 'title': self.render_partial(titles[related[2]])['title']\n }\n rellinks.append((related[2], next['title'], 'N', _('next')))\n except KeyError:\n next = None\n if related and related[1]:\n try:\n prev = {\n 'link': self.get_relative_uri(docname, related[1]),\n 'title': self.render_partial(titles[related[1]])['title']\n }\n rellinks.append((related[1], prev['title'], 'P', _('previous')))\n except KeyError:\n # the relation is (somehow) not in the TOC tree, handle\n # that gracefully\n prev = None\n while related and related[0]:\n try:\n parents.append(\n {'link': self.get_relative_uri(docname, related[0]),\n 'title': self.render_partial(titles[related[0]])['title']})\n except KeyError:\n pass\n related = self.relations.get(related[0])\n if parents:\n parents.pop() # remove link to the master file; we have a generic\n # \"back to index\" link already\n parents.reverse()\n\n # title rendered as HTML\n title = self.env.longtitles.get(docname)\n title = title and self.render_partial(title)['title'] or ''\n # the name for the copied source\n sourcename = self.config.html_copy_source and docname + '.txt' or ''\n\n # metadata for the document\n meta = self.env.metadata.get(docname)\n\n # local TOC and global TOC tree\n self_toc = self.env.get_toc_for(docname, self)\n toc = self.render_partial(self_toc)['fragment']\n\n return dict(\n parents = parents,\n prev = prev,\n next = next,\n title = title,\n meta = meta,\n body = body,\n metatags = metatags,\n rellinks = rellinks,\n sourcename = sourcename,\n toc = toc,\n # only display a TOC if there's more than one item to show\n display_toc = (self.env.toc_num_entries[docname] > 1),\n )", "def client_streaming(self) -> global___Snippet.ClientStreaming:", "def __init__(self, temboo_session):\n super(Image, self).__init__(temboo_session, '/Library/Freebase/Image')", "def __init__(self, temboo_session):\n super(GetTokenDetails, self).__init__(temboo_session, '/Library/Utilities/TokenStorage/GetTokenDetails')", "def __init__(self, project):\n super(ReadTheDocsHelper, self).__init__()\n self._project = project\n self._url_lib_helper = url_lib.URLLibHelper()" ]
[ "0.5741224", "0.5666571", "0.52995265", "0.5269158", "0.51747894", "0.5086328", "0.5001454", "0.49858966", "0.49211386", "0.49211386", "0.4910525", "0.47497374", "0.4746444", "0.4739008", "0.47383398", "0.46627825", "0.4653349", "0.4652473", "0.46367034", "0.46367034", "0.46310946", "0.46148232", "0.4610563", "0.46003935", "0.45470992", "0.45423713", "0.4515358", "0.45120162", "0.45117748", "0.45099753" ]
0.61031824
0
spot rubrics for document
def spot_doc_rubrics(doc, rubrics, session=None, commit_session=True): # get lemmas by doc_id lemmas = doc.lemmas # compute document size doc_size = 0 for lemma in lemmas: doc_size += lemmas[lemma] # models for rubrics models = {} # correct_answers = {} # fill set_id in rubrics and data in models for rubric_id in rubrics: # correct_answers[rubric_id] = db.get_rubric_answer_doc(doc_id, rubric_id) if rubrics[rubric_id] is None: rubrics[rubric_id] = db.get_set_id_by_rubric_id(rubric_id, session) models[rubric_id] = db.get_model(rubric_id, rubrics[rubric_id], session) # get dict with idf and lemma_index for each set_id # sets[...] is dict: {'idf':..., 'lemma_index': ...} sets = db.get_idf_lemma_index_by_set_id(rubrics.values(), session) for set_id in sets: # compute idf for doc_id (lemmas) and set_id idf_doc = {} for lemma in lemmas: idf_doc[lemma] = lemmas[lemma] * sets[set_id]['idf'].get(lemma, 0) / doc_size sets[set_id]['idf_doc'] = idf_doc # for each rubric answers = [] result = [] for rubric_id in rubrics: set_id = rubrics[rubric_id] mif_number = models[rubric_id]['features_num'] lemma_index = sets[set_id]['lemma_index'] features_array = np.zeros(len(lemma_index), dtype=float) # form features row with size and order like in object_features of training set for lemma in lemmas: # lemma index in lemmas of set ind_lemma = lemma_index.get(lemma, -1) # if lemma from doc is in lemmas for training set if ind_lemma > -1: features_array[ind_lemma] = sets[set_id]['idf_doc'][lemma] # take most important features of model mif = features_array[models[rubric_id]['features']] # add 1 for coefficient b in model # mif[mif_number] = 1 mif.resize(mif_number + 1) mif[mif_number] = 1 probability = sigmoid(np.dot(mif, models[rubric_id]['model'])) if probability > 0.5: answers.append(rubric_id) result.append( {'rubric_id': rubric_id, 'result': round(probability), 'model_id': models[rubric_id]['model_id'], 'doc_id': doc.doc_id, 'probability': probability}) db.put_rubrics(result, session, commit_session) doc.rubric_ids = answers
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def spot_doc_rubrics2(doc_id, rubrics):\n db.doc_apply(doc_id, spot_doc_rubrics, rubrics)", "def main(rc):\n with store_client(rc) as sclient:\n for doc in rc.documents:\n sclient.copydoc(doc)", "def spot_test_set_rubric(test_set_id, rubric_id):\n # get lemmas\n docs = db.get_lemmas_freq(test_set_id)\n docs_size = {}\n\n # compute document size\n for doc_id in docs:\n lemmas = docs[doc_id]\n docs_size[doc_id] = 0\n for lemma in lemmas:\n docs_size[doc_id] += lemmas[lemma]\n\n # models for rubrics\n training_set_id = db.get_set_id_by_rubric_id(rubric_id)\n model = db.get_model(rubric_id, training_set_id)\n mif_number = model['features_num']\n idf_lemma_index = db.get_idf_lemma_index_by_set_id([training_set_id])[training_set_id]\n lemma_index = idf_lemma_index['lemma_index']\n training_idf = idf_lemma_index['idf']\n\n answers = []\n for doc_id in docs:\n if docs_size[doc_id]:\n features_array = np.zeros(len(lemma_index), dtype=float)\n lemmas = docs[doc_id]\n for lemma in lemmas:\n # lemma index in lemmas of training set\n ind_lemma = lemma_index.get(lemma, -1)\n # if lemma from doc is in lemmas for training set\n if ind_lemma > -1:\n features_array[ind_lemma] = lemmas[lemma] * training_idf[lemma] / docs_size[doc_id]\n mif = features_array[model['features']]\n mif.resize(mif_number + 1)\n mif[mif_number] = 1\n probability = sigmoid(np.dot(mif, model['model']))\n answers.append({'result': round(probability), 'model_id': model['model_id'],\n 'rubric_id': rubric_id, 'doc_id': doc_id, 'probability': probability})\n else:\n answers.append({'result': 0, 'model_id': model['model_id'],\n 'rubric_id': rubric_id, 'doc_id': doc_id, 'probability': 0})\n\n db.put_rubrics(answers)", "def replace_document_slugs(doc):\n\n logger.debug('> replace_document_slugs')\n\n handlebar_slugs = {\n r'{{AREA}}': str(mission.business_area),\n r'{{MISSION}}': str(mission.mission_name),\n r'{{GENERATION_DATE}}': now().strftime('%x'),\n r'{{TOTAL_TESTS}}': str(total_reportable_tests),\n r'{{TESTS_WITH_FINDINGS}}': str(total_tests_with_findings),\n r'{{TESTS_WITHOUT_FINDINGS}}': str(total_tests_without_findings),\n }\n\n for p in doc.paragraphs:\n for r in p.runs:\n for pattern in handlebar_slugs.keys():\n if re.search(pattern, r.text):\n logger.debug('>> Replaced: {old} With: {new}'.format(\n old=r.text.encode('utf-8'),\n new=handlebar_slugs[pattern].encode('utf-8')\n )\n )\n r.text = re.sub(pattern, handlebar_slugs[pattern], r.text)", "def process(self, doc):\n raise multisearch.errors.FeatureNotAvailableError", "def find_document(self):\n pass", "def __init__(self):\n self.s_sect = []", "def cparenthood(representation, flush=False):\n global _DOC_CACHE\n\n if flush:\n _DOC_CACHE = dict()\n\n def find_parent(document):\n \"\"\"Find and cache\"\"\"\n parent_id = document[\"parent\"]\n if parent_id not in _DOC_CACHE:\n parent = io.find_one({\"_id\": parent_id})\n _DOC_CACHE[parent_id] = parent\n else:\n parent = _DOC_CACHE[parent_id]\n\n return parent\n\n version = find_parent(representation)\n subset = find_parent(version)\n asset = find_parent(subset)\n project = find_parent(asset)\n\n return [version, subset, asset, project]", "def _cursor_collection(self, cursor_doc):\n ...", "def document(self, **kw):\r\n \r\n for p in self.documents(**kw):\r\n return p", "def snippetList(requeset, format = None):", "def presenetCar():", "def get_document_bounds(image_file, feature):\n # client = vision.ImageAnnotatorClient()\n\n bounds = []\n\n\n# No need for this .... \n # with io.open(image_file, 'rb') as image_file:\n # content = image_file.read()\n\n # image = types.Image(content=content)\n\n # response = client.document_text_detection(image=image)\n # document = response.full_text_annotation\n\n # with open('8130processed.json', 'w') as outfile:\n # outfile.write(MessageToJson(response))\n\n\n# We already have the document bounds of the image inside of 8130processed.json no\n\n f = open ('processed_8130-1output-1-to-1.json', \"r\")\n data = json.load(f)\n datas=json.dumps(data)\n # print(data)\n\n response = json_format.Parse(datas, vision.types.AnnotateFileResponse())\n # print(response)\n document = response.inputConfig\n # Collect specified feature bounds by enumerating all document features\n for page in document.pages:\n for block in page.blocks:\n for paragraph in block.paragraphs:\n for word in paragraph.words:\n for symbol in word.symbols:\n if (feature == FeatureType.SYMBOL):\n bounds.append(symbol.bounding_box)\n\n if (feature == FeatureType.WORD):\n bounds.append(word.bounding_box)\n\n if (feature == FeatureType.PARA):\n bounds.append(paragraph.bounding_box)\n\n if (feature == FeatureType.BLOCK):\n bounds.append(block.bounding_box)\n\n # The list `bounds` contains the coordinates of the bounding boxes.\n return bounds", "def detect_document(path):\n from google.cloud import vision\n client = vision.ImageAnnotatorClient()\n\n with io.open(path, 'rb') as image_file:\n content = image_file.read()\n\n image = vision.types.Image(content=content)\n\n response = client.document_text_detection(image=image)\n\n for page in response.full_text_annotation.pages:\n for block in page.blocks:\n #print('\\nBlock confidence: {}\\n'.format(block.confidence))\n for paragraph in block.paragraphs:\n for word in paragraph.words:\n word_text = ''.join([symbol.text for symbol in word.symbols])\n text.append(word_text.encode('utf-8'))\n #print(word_text)", "def extract_doc(self, doc):\n if doc.pod_path.endswith('.yaml'):\n raw_data = yaml.load(\n doc.pod.read_file(doc.pod_path), Loader=yaml_utils.PlainTextYamlLoader)\n else:\n raw_data = doc.format.front_matter.raw_data\n\n if not raw_data:\n print('No raw data found for document: {}'.format(doc.pod_path))\n return\n\n data = collections.OrderedDict()\n\n tagged_keys = tuple(['{}@'.format(key)\n for key in COLLECTION_META_KEYS])\n\n for key, value in raw_data.items():\n if key in COLLECTION_META_KEYS or key.startswith(tagged_keys):\n normal_key = key.lstrip('$')\n if 'path' in key:\n # Use `__NONE` as sentinel to support equating a `value` of None.\n collection_path = self.blueprint.get(\n key, self.blueprint.get(normal_key, '__NONE'))\n # Skip the paths that are the same as the collection.\n if collection_path == value:\n continue\n data[normal_key] = value\n\n if data:\n self.paths[doc.collection_sub_path[1:]] = data", "def detect_objects(snap):\n client = vision.ImageAnnotatorClient()\n print(snap)\n\n with open(snap, 'rb') as im_file:\n content = im_file.read()\n image = vision.Image(content=content)\n\n objects = client.object_localization(image=image).localized_object_annotations\n\n print(f\"Found {len(objects)} objects\")\n [print(f\"{objet.name} : {round(objet.score*100,2)}\") for objet in objects]\n \n return objects", "def document(self):\n ...", "def __init__(self):\n self.c_sect = []", "def footnote_spot(tree: nodes.document) -> tuple[Element, int]:\n # The code uses the following heuristic:\n # a) place them after the last existing footnote\n # b) place them after an (empty) Footnotes rubric\n # c) create an empty Footnotes rubric at the end of the document\n fns = list(tree.findall(nodes.footnote))\n if fns:\n fn = fns[-1]\n return fn.parent, fn.parent.index(fn) + 1\n for node in tree.findall(nodes.rubric):\n if len(node) == 1 and node.astext() == FOOTNOTES_RUBRIC_NAME:\n return node.parent, node.parent.index(node) + 1\n doc = next(tree.findall(nodes.document))\n rub = nodes.rubric()\n rub.append(nodes.Text(FOOTNOTES_RUBRIC_NAME))\n doc.append(rub)\n return doc, doc.index(rub) + 1", "def setRectangles(self, word): \n self.rectangles = [visual.Rect(self.win, size = (120,120),fillColor='grey', lineColor='grey',\n units=\"pix\") for i in range(len(self.capList))]\n self.colors = []\n for cap in self.capList:\n if cap not in word:\n self.colors.append('grey')\n elif cap in word:\n if cap in clusters:\n self.colors[-1] = 'black'\n self.colors.append('black') \n\n [rectangle.setPos(location) for rectangle,location in zip(self.rectangles,self.locations)]\n [rectangle.setFillColor(color) for rectangle,color in zip(self.rectangles,self.colors)]\n [rectangle.setLineColor(color) for rectangle,color in zip(self.rectangles,self.colors)]", "def divy_keys(self,spot):\n\n\t\tgroup_counts = [sum([i[0]=='subpattern' \n\t\t\tfor i in re.sre_parse.parse(self.spots[spot][key])]) \n\t\t\t#---apply naming convention\n\t\t\tfor key in ['top','step','part']]\n\t\tcursor = ([0]+[sum(group_counts[:i+1]) for i in range(len(group_counts))])\n\t\tslices = [slice(cursor[i],cursor[i+1]) for i in range(len(cursor)-1)]\n\t\tdivy = lambda x: [y[0] if len(y)==1 else y for y in [x[s] for s in slices]]\n\t\treturn divy", "def merge_docs(self):", "def keyfinder(self,spot=None):\n\n\t\tspot = self.cursor if not spot else spot\n\t\tdef keys_to_filename(*args,**kwargs):\n\n\t\t\t\"\"\"\n\t\t\tAfter decomposing a list of files into keys that match the regexes in paths.yaml we often \n\t\t\tneed to reconstitute the original filename.\n\t\t\t\"\"\"\n\n\t\t\tstrict = kwargs.get('strict',True)\n\t\t\tif not spot in self.toc: raise Exception('need a spotname to look up keys')\n\t\t\t#---! it may be worth storing this as a function a la divy_keys\n\t\t\t#---follow the top,step,part naming convention\n\t\t\ttry:\n\t\t\t\tbackwards = [''.join(['%s' if i[0]=='subpattern' else chr(i[1]) \n\t\t\t\t\tfor i in re.sre_parse.parse(regex)]) \n\t\t\t\t\tfor regex in [self.spots[spot][key] for key in ['top','step','part']]]\n\t\t\t\tfn = os.path.join(\n\t\t\t\t\tself.spots[spot]['rootdir'],\n\t\t\t\t\t'/'.join([backwards[ii]%i for ii,i in enumerate(args)]))\n\t\t\texcept Exception as e: \n\t\t\t\ttracer(e)\n\t\t\t\t#---previously: raise Exception('error making keys: %s,%s'%(str(spotname),str(args)))\n\t\t\t\timport pdb;pdb.set_trace() #---legit\n\t\t\tif strict: assert os.path.isfile(fn)\n\t\t\treturn fn\n\n\t\treturn keys_to_filename", "def parSearch(self, mode=False):\r\n # research\r\n profprint()\r\n w = slicer.modules.NeedleFinderWidget\r\n l = w.logic\r\n path = [ 0 for i in range(100)]\r\n \r\n if 0:\r\n path[24] = '/Users/guillaume/Dropbox/AMIGO Gyn Data NRRD/Case 24 NRRD/Manual/2013-02-25-Scene-without-CtrPt.mrml'\r\n path[29] = '/Users/guillaume/Dropbox/AMIGO Gyn Data NRRD/Case 29 NRRD/Manual/2013-02-26-Scene-without-CtrPts.mrml'\r\n path[30] = '/Users/guillaume/Dropbox/AMIGO Gyn Data NRRD/Case 30 NRRD/Manual/2013-02-26-Scene-without-CtrPt.mrml'\r\n path[31] = '/Users/guillaume/Dropbox/AMIGO Gyn Data NRRD/Case 31 NRRD/Manual/2013-02-27-Scene-without-CtrPts.mrml'\r\n path[34] = '/Users/guillaume/Dropbox/AMIGO Gyn Data NRRD/Case 34 NRRD/Manual/2013-02-27-Scene-without-CtrPts.mrml'\r\n path[35] = '/Users/guillaume/Dropbox/AMIGO Gyn Data NRRD/Case 35 NRRD/Manual/2013-02-27-Scene-without-CtrPts.mrml'\r\n path[37] = '/Users/guillaume/Dropbox/AMIGO Gyn Data NRRD/Case 37 NRRD/Manual/2013-02-27-Scene-without-CtrPts.mrml'\r\n path[38] = '/Users/guillaume/Dropbox/AMIGO Gyn Data NRRD/Case 38 NRRD/Manual/2013-02-27-Scene-without-CtrPts.mrml'\r\n path[40] = '/Users/guillaume/Dropbox/AMIGO Gyn Data NRRD/Case 40 NRRD/Manual/2013-02-27-Scene-without-CtrPts.mrml'\r\n\r\n #Andre's file system (case copies from AMIGO share)\r\n # stripped OTHER cases\r\n if 0: path[33] = '/home/mastmeyer/Dropbox/GYN Cases/Case 033/NRRD/Auto-Eval-LB/2013-02-27-Scene.mrml'\r\n if 0:\r\n path[ 8] = '/home/mastmeyer/Dropbox/GYN Cases/Case 008/NRRD/Auto-Eval-LB/2013-05-07-Scene.mrml'\r\n path[12] = '/home/mastmeyer/Dropbox/GYN Cases/Case 012/NRRD/Auto-Eval-LB/2013-04-22-Scene.mrml'\r\n path[16] = '/home/mastmeyer/Dropbox/GYN Cases/Case 016/NRRD/Auto-Eval-LB/2013-04-21-Scene.mrml'\r\n path[21] = '/home/mastmeyer/Dropbox/GYN Cases/Case 021/NRRD/Auto-Eval-LB/2013-04-21-Scene.mrml'\r\n path[22] = '/home/mastmeyer/Dropbox/GYN Cases/Case 022/NRRD/Auto-Eval-LB/2013-04-21-Scene.mrml'\r\n path[25] = '/home/mastmeyer/Dropbox/GYN Cases/Case 025/NRRD/Auto-Eval-LB/2013-04-21-Scene.mrml'\r\n path[26] = '/home/mastmeyer/Dropbox/GYN Cases/Case 026/NRRD/Auto-Eval-LB/2013-04-17-Scene.mrml'\r\n path[27] = '/home/mastmeyer/Dropbox/GYN Cases/Case 027/NRRD/Auto-Eval-LB/2013-04-17-Scene.mrml'\r\n #stripped MICCAI13 cases (just manual seg. by LB/AM)\r\n if 1:\r\n path[24] = '/home/mastmeyer/Dropbox/GYN Cases/Case 024/NRRD/Auto-Eval-LB/2013-02-28-Scene.mrml'\r\n path[28] = '/home/mastmeyer/Dropbox/GYN Cases/Case 028/NRRD/Auto-Eval-LB/2013-02-28-Scene.mrml'\r\n path[29] = '/home/mastmeyer/Dropbox/GYN Cases/Case 029/NRRD/Auto-Eval-LB/2013-02-26-Scene.mrml'\r\n path[30] = '/home/mastmeyer/Dropbox/GYN Cases/Case 030/NRRD/Auto-Eval-LB/2013-02-26-Scene.mrml'\r\n path[31] = '/home/mastmeyer/Dropbox/GYN Cases/Case 031/NRRD/Auto-Eval-LB/2013-02-27-Scene.mrml'\r\n path[33] = '/home/mastmeyer/Dropbox/GYN Cases/Case 033/NRRD/Auto-Eval-LB/2013-02-27-Scene.mrml'\r\n path[34] = '/home/mastmeyer/Dropbox/GYN Cases/Case 034/NRRD/Auto-Eval-LB/2013-02-27-Scene.mrml'\r\n path[37] = '/home/mastmeyer/Dropbox/GYN Cases/Case 037/NRRD/Manual Alireza/2013-02-27-Scene.mrml'\r\n path[38] = '/home/mastmeyer/Dropbox/GYN Cases/Case 038/NRRD/Manual Alireza/2013-02-27-Scene.mrml'\r\n path[40] = '/home/mastmeyer/Dropbox/GYN Cases/Case 040/NRRD/Manual Alireza/2013-02-27-Scene.mrml'\r\n #show a directory selector for saving the results\r\n self.dirDialog = qt.QFileDialog(w.parent)\r\n self.dirDialog.setDirectory('/tmp')\r\n self.dirDialog.options = self.dirDialog.ShowDirsOnly\r\n self.dirDialog.acceptMode = self.dirDialog.AcceptSave\r\n #self.dirDialog.show()\r\n dir=self.dirDialog.getExistingDirectory()\r\n w.logDir=dir\r\n print \"saving results to \", dir\r\n try: shutil.copyfile('/home/amast/WualaDrive/mastmeyer/Homes/NeedleFinder/NeedleFinder/NeedleFinder.py',dir+'/NeedleFinder_ref.py')\r\n except: breakbox(\"/!\\ reference source NeedleFinder.py not found!\")\r\n if mode == 0:\r\n #save a copy of the source file as reference\r\n # simple run with current parameters/algo over several patients\r\n self.writeTableHeader(dir+'/AP-All_stats.csv')\r\n filLog=open(dir+'/allog.tsv', 'w')\r\n #filLog.write(\"case\\tman.-seg_\\tiStep\\tcrit\\treject\\tvalue\\tlimit\\n\")\r\n filLog.close()\r\n nUsers=1 #CONST\r\n for user in range(nUsers): \r\n w.userNr=user\r\n print \"simulated user (offset): \",user\r\n for id in range(100): #<o> range(100)\r\n if path[id]:\r\n w.caseNr=id\r\n print \"processing \", path[id]\r\n self.writeTableHeader(dir+'/User-'+str(user)+'_AP-' + str(id) + '.csv', 1)\r\n slicer.mrmlScene.Clear(0)\r\n slicer.util.loadScene(path[id])\r\n #TODO implement random tips in a sphere (d=2mm) from tube center \r\n l.startValidation(script=True, offset=user*50/nUsers)\r\n results, outliers = l.evaluate(script=True) # calculate HD distances\r\n for result in results:\r\n result[0:0]=[user,id]\r\n l.exportEvaluation(results, dir+'/User-'+str(user)+'_AP-' + str(id) + '.csv')\r\n #slicer.util.saveScene(dir+'/AP-' + str(id) + '.mrb') # may use lots of disk space\r\n # stats\r\n HD = np.array(results)\r\n # HD.shape = (int(len(results)/float(3)),3)\r\n maxTipHD = HD[:, 2].max()\r\n maxHD = HD[:, 3].max()\r\n avgHD = HD[:, 3].mean()\r\n stdHD = HD[:, 3].std()\r\n sl = np.sort(HD[:, 3])\r\n medHD = sl[sl.size / 2]\r\n resultsEval = [user,id,maxTipHD, maxHD, avgHD, stdHD, medHD]+[len(results)]+[len(outliers)] +[str(outliers)]+ l.valuesExperience + [id]\r\n l.exportEvaluation(resultsEval, dir+'/AP-All_stats.csv')\r\n #pause()\r\n msgbox(\"parSearch mode 0 done, results in \"+dir)\r\n elif mode == 1:\r\n id = 'Current'\r\n # simple brute force search in the dimensions (Guillaumes parameterSearch.py)\r\n self.writeTableHeader(dir+'/BF-' + str(id) + '.csv', 1)\r\n self.writeTableHeader(dir+'/BF-' + str(id) + '_stats.csv')\r\n for i in range(3, 12):\r\n # l.resetNeedleDetection(script=True) # ??? this resets the parameters to default\r\n w.numberOfPointsPerNeedle.setValue(i) # change parameter control points\r\n l.startValidation(script=True)\r\n results, outliers = l.evaluate(script=True) # calculate HD distances\r\n for result in results:\r\n result[0:0]=[user,id]\r\n l.exportEvaluation(results, dir+'/BF-' + str(id) + '.csv')\r\n slicer.util.saveScene(dir+'/BF-' + str(id) + '.mrb') # may use lots of disk space\r\n # stats\r\n HD = np.array(results)\r\n # HD.shape = (int(len(results)/float(3)),3)\r\n maxTipHD = HD[:, 2].max()\r\n maxHD = HD[:, 3].max()\r\n avgHD = HD[:, 3].mean()\r\n stdHD = HD[:, 3].std()\r\n sl = np.sort(HD[:, 3])\r\n medHD = sl[sl.size / 2]\r\n resultsEval = [user,id,maxTipHD,maxHD, avgHD, stdHD, medHD] +[len(results)]+[len(outliers)] +[str(outliers)]+ l.valuesExperience + [id]\r\n l.exportEvaluation(resultsEval, dir+'/BF-' + str(id) + '_stats.csv')\r\n #pause()\r\n msgbox(\"parSearch mode 1 done, results in \"+dir)\r\n elif mode == 2:\r\n # code piece from Guillaumes (bruteForce.py) multi patient mode search\r\n for id in range(100):\r\n if path[id]:\r\n w.caseNr=id\r\n print \"processing \", path[id]\r\n slicer.mrmlScene.Clear(0)\r\n slicer.util.loadScene(path[id])\r\n self.writeTableHeader(dir+'/RS-' + str(id) + '.csv', 1)\r\n self.writeTableHeader(dir+'/RS-' + str(id) + '_stats.csv')\r\n for i in range(1, 10000):\r\n # l.resetNeedleDetection(script=True) # ??? this resets the parameters to default\r\n w.radiusNeedleParameter.setValue(np.random.randint(1, 6))\r\n w.stepsize.setValue(np.random.randint(1, 40))\r\n w.sigmaValue.setValue(np.random.randint(1, 40)) # change parameter sigma\r\n w.gradientPonderation.setValue(np.random.randint(1, 20))\r\n w.exponent.setValue(np.random.randint(1, 20))\r\n w.numberOfPointsPerNeedle.setValue(np.random.randint(3, 11))\r\n l.startValidation(script=True)\r\n results, outliers = l.evaluate(script=True) # calculate HD distances\r\n for result in results:\r\n result[0:0]=[user,id]\r\n l.exportEvaluation(results, dir+'/RS-' + str(id) + '.csv')\r\n slicer.util.saveScene(dir+'/RS-' + str(id) + '.mrb') # may use lots of disk space\r\n # stats\r\n HD = np.array(results)\r\n maxTipHD = HD[:, 2].max()\r\n maxHD = HD[:, 3].max()\r\n avgHD = HD[:, 3].mean()\r\n stdHD = HD[:, 3].std()\r\n sl = np.sort(HD[:, 3])\r\n medHD = sl[sl.size / 2]\r\n resultsEval = [user,id,maxTipHD,maxHD, avgHD, stdHD, medHD] +[len(results)]+[len(outliers)] +[str(outliers)]+ l.valuesExperience + [id]\r\n l.exportEvaluation(resultsEval, dir+'/RS-' + str(id) + '_stats.csv')\r\n # end = time.time()\r\n # print 'processing time: ', end-start\r\n # start = time.time()\r\n #pause()\r\n msgbox(\"parSearch mode 2 done, results in \"+dir)\r\n #rof id\r\n #file mode 2\r\n slicer.mrmlScene.Clear(0) #clean up to save memory\r", "def tags():", "def collate_sections(self,paper_text,section_list:List[Section],split_upto=0.2,split_bins=10):\n current_text_split = []\n prev_section = None\n curr_text = str(paper_text)\n unfound_sections = []\n some_section_not_found = False\n for index,s in enumerate(section_list):\n curr_text,section_status = self.split_and_find_section(curr_text,s.name,prev_section,split_upto=split_upto,split_bins=split_bins)\n if not section_status: # If couldn't match section add it here. \n some_section_not_found = True\n # print('\\n\\t'+s.name) \n prev_section = s \n for ss in s.subsections:\n curr_text,section_status = self.split_and_find_section(curr_text,ss.name,prev_section,split_upto=split_upto,split_bins=split_bins)\n if not section_status:\n some_section_not_found = True\n # print(\"Cannot Match For :\",ss.name)\n prev_section = ss\n # print('\\n\\t\\t'+ss.name)\n if index == len(section_list)-1:\n s.text = curr_text\n return section_list,some_section_not_found", "def edit_document():", "def __init__(self, files, folder, storage_method=\"raw\", force_shorten=True,\n data_dir=\"./data_og_consecutive\", tokenizer_path=\"./\"):\n Doc.__init__(self, storage_method, force_shorten, data_dir, tokenizer_path)\n self.all_docs = []\n\n for f in tqdm(files):\n doc = {}\n with open(os.path.join(folder, f)) as fp:\n tos = json.load(fp)\n for section in tos:\n # Transform dict into X/y sample\n text = section[\"Text\"]\n label = section[\"Section\"]\n doc = self.add_to_section(text, label, doc)\n\n self.all_docs.append(doc)", "def image_reference(self, image_id):\n info = self.image_info[image_id]\n if info[\"source\"] == \"craters\":\n return info[\"shapes\"]\n else:\n super(self.__class__).image_reference(self, image_id)", "def __init__(self):\n self.g_sect = []" ]
[ "0.69208777", "0.5370122", "0.51581216", "0.49854934", "0.4783225", "0.47799012", "0.47690967", "0.47205272", "0.47074753", "0.4682977", "0.46623933", "0.46576574", "0.4648557", "0.46445367", "0.46335304", "0.46164656", "0.46158558", "0.45981637", "0.45951906", "0.4593771", "0.4592903", "0.45859215", "0.45839608", "0.45362592", "0.45325413", "0.4516759", "0.4513911", "0.45079428", "0.45076853", "0.45043844" ]
0.6318465
1
spot rubrics for all documents from test_set
def spot_test_set_rubric(test_set_id, rubric_id): # get lemmas docs = db.get_lemmas_freq(test_set_id) docs_size = {} # compute document size for doc_id in docs: lemmas = docs[doc_id] docs_size[doc_id] = 0 for lemma in lemmas: docs_size[doc_id] += lemmas[lemma] # models for rubrics training_set_id = db.get_set_id_by_rubric_id(rubric_id) model = db.get_model(rubric_id, training_set_id) mif_number = model['features_num'] idf_lemma_index = db.get_idf_lemma_index_by_set_id([training_set_id])[training_set_id] lemma_index = idf_lemma_index['lemma_index'] training_idf = idf_lemma_index['idf'] answers = [] for doc_id in docs: if docs_size[doc_id]: features_array = np.zeros(len(lemma_index), dtype=float) lemmas = docs[doc_id] for lemma in lemmas: # lemma index in lemmas of training set ind_lemma = lemma_index.get(lemma, -1) # if lemma from doc is in lemmas for training set if ind_lemma > -1: features_array[ind_lemma] = lemmas[lemma] * training_idf[lemma] / docs_size[doc_id] mif = features_array[model['features']] mif.resize(mif_number + 1) mif[mif_number] = 1 probability = sigmoid(np.dot(mif, model['model'])) answers.append({'result': round(probability), 'model_id': model['model_id'], 'rubric_id': rubric_id, 'doc_id': doc_id, 'probability': probability}) else: answers.append({'result': 0, 'model_id': model['model_id'], 'rubric_id': rubric_id, 'doc_id': doc_id, 'probability': 0}) db.put_rubrics(answers)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def spot_doc_rubrics(doc, rubrics, session=None, commit_session=True):\n # get lemmas by doc_id\n lemmas = doc.lemmas\n # compute document size\n doc_size = 0\n for lemma in lemmas:\n doc_size += lemmas[lemma]\n # models for rubrics\n models = {}\n\n # correct_answers = {}\n\n # fill set_id in rubrics and data in models\n for rubric_id in rubrics:\n # correct_answers[rubric_id] = db.get_rubric_answer_doc(doc_id, rubric_id)\n if rubrics[rubric_id] is None:\n rubrics[rubric_id] = db.get_set_id_by_rubric_id(rubric_id, session)\n models[rubric_id] = db.get_model(rubric_id, rubrics[rubric_id], session)\n # get dict with idf and lemma_index for each set_id\n # sets[...] is dict: {'idf':..., 'lemma_index': ...}\n sets = db.get_idf_lemma_index_by_set_id(rubrics.values(), session)\n for set_id in sets:\n # compute idf for doc_id (lemmas) and set_id\n idf_doc = {}\n for lemma in lemmas:\n idf_doc[lemma] = lemmas[lemma] * sets[set_id]['idf'].get(lemma, 0) / doc_size\n sets[set_id]['idf_doc'] = idf_doc\n # for each rubric\n answers = []\n result = []\n for rubric_id in rubrics:\n set_id = rubrics[rubric_id]\n mif_number = models[rubric_id]['features_num']\n lemma_index = sets[set_id]['lemma_index']\n features_array = np.zeros(len(lemma_index), dtype=float)\n # form features row with size and order like in object_features of training set\n for lemma in lemmas:\n # lemma index in lemmas of set\n ind_lemma = lemma_index.get(lemma, -1)\n # if lemma from doc is in lemmas for training set\n if ind_lemma > -1:\n features_array[ind_lemma] = sets[set_id]['idf_doc'][lemma]\n # take most important features of model\n mif = features_array[models[rubric_id]['features']]\n # add 1 for coefficient b in model\n # mif[mif_number] = 1\n mif.resize(mif_number + 1)\n mif[mif_number] = 1\n probability = sigmoid(np.dot(mif, models[rubric_id]['model']))\n if probability > 0.5:\n answers.append(rubric_id)\n result.append(\n {'rubric_id': rubric_id, 'result': round(probability), 'model_id': models[rubric_id]['model_id'],\n 'doc_id': doc.doc_id, 'probability': probability})\n\n db.put_rubrics(result, session, commit_session)\n doc.rubric_ids = answers", "def spot_doc_rubrics2(doc_id, rubrics):\n db.doc_apply(doc_id, spot_doc_rubrics, rubrics)", "def test_all_documents(self):", "def main(rc):\n with store_client(rc) as sclient:\n for doc in rc.documents:\n sclient.copydoc(doc)", "def test_set_official(self, stop_words=True):\n subset = self.__sub_set_from_linescsv(csv_path=self.csv_path_test)\n if not stop_words:\n with open(self.path_stop_words, 'r') as f_sw:\n stop_words = []\n for line in f_sw:\n stop_words += line.split(',')\n subset = subset.exclude_words(stop_words)\n return subset", "def Test(self, test_set):\n test_set_retagged = []\n # This loop will call Viterbi method and pass each sentence (word, POS) in \"test_set\" one by one,\n # and save the returned retagged pos in \"test_set_retagged\"\n a = 0\n for sent in test_set:\n test_set_retagged.append(self.Viterbi(sent))\n return test_set_retagged", "def test_search_recipes(self):\n pass", "def __init__(self, files, folder, storage_method=\"raw\", force_shorten=True,\n data_dir=\"./data_og_consecutive\", tokenizer_path=\"./\"):\n Doc.__init__(self, storage_method, force_shorten, data_dir, tokenizer_path)\n self.all_docs = []\n\n for f in tqdm(files):\n doc = {}\n with open(os.path.join(folder, f)) as fp:\n tos = json.load(fp)\n for section in tos:\n # Transform dict into X/y sample\n text = section[\"Text\"]\n label = section[\"Section\"]\n doc = self.add_to_section(text, label, doc)\n\n self.all_docs.append(doc)", "def test_list_occurrences(self):\n pass", "def run_train_test_split():\n # Load all documents\n conn = sq.connect(config.DB_FILE)\n documents = pd.read_sql_query('select pubmed_id, review_id, included, title, abstract from article ', conn)\n\n # Identify unique review IDs\n review_ids = documents['review_id'].unique()\n\n # Set seed for random sampling\n np.random.seed(2)\n\n # List of Reviews in the partial data set and full data set\n partial_set = list(np.random.choice(review_ids, 10, replace=False))\n full_set = list(review_ids.copy())\n\n # Load array (X) and labels (Y) of all documents\n with (open(config.DOC_TERM_MATRIX, \"rb\")) as openfile:\n X = pickle.load(openfile)\n\n y = documents['included']\n\n # Train-test split of the partial dataset\n train_test_split(X, y, partial_set, 'min_max', 'partial', review_ids)\n train_test_split(X, y, partial_set, 'tf_idf', 'partial', review_ids)\n\n # Train-test split of the full dataset\n train_test_split(X, y, full_set, 'min_max', 'full', review_ids)\n train_test_split(X, y, full_set, 'tf_idf', 'full', review_ids)", "def test_subsample_taxonomy(self):\n basic_test_runner(self, 'taxonomy', nrows=6, niter=3, normalize='subsample')", "def _collection_samples(collection_query, limit, config):\n just_cid = lambda obj : obj.get('meta', {}).get('concept-id')\n found_collections = scom.search_by_page(\"collections\",\n query=collection_query,\n filters=just_cid,\n page_state=scom.create_page_state(limit=limit),\n config=config)\n return found_collections[:limit]", "def snippetList(requeset, format = None):", "def test_get_collection(self):\n pass", "def get_subset_rt(self, rt):\n regions = self.boxes_rt.at(rt)\n it = BoxHolder()\n for r in regions:\n box = r.data\n it.add_box(box)\n return it", "def test_document_retrieval(self):", "def test_set_scored_recommendations_post(self):\n pass", "def train_test_official(self):\n return self.sub_set(self.train_idcs), self.sub_set(self.test_idcs)", "def test_search_recipes_by_nutrients(self):\n pass", "def test_get_collections(self):\n pass", "def testdocs(nfls, pfls):\n neg_tpl = scramble(nfls)\n pos_tpl = scramble(pfls)\n\n neg_results = []\n pos_results = []\n\n # begin the 3-fold testing\n for i in range(3):\n\n # set empty word dictionary for testing files\n word_dict = {\"pos\": {}, \"neg\": {}}\n\n\n # change the testing and training files each iteration\n test_neg_fls = neg_tpl[i%3]\n test_pos_fls = pos_tpl[i%3]\n train_neg_fls = neg_tpl[(i+1)%3] + neg_tpl[(i-1)%3]\n train_pos_fls = pos_tpl[(i+1)%3] + pos_tpl[(i-1)%3]\n num_neg = len(train_neg_fls)\n num_pos = len(train_pos_fls)\n\n # populate the word dictionary with words from\n # reviews tagged with neg\n for f in train_neg_fls:\n\n neg_wl = getWords(getText(f))\n word_dict = clsWordCounts(word_dict, neg_wl, \"neg\")\n\n # add the words from reviews tagged with pos\n for f in train_pos_fls:\n\n pos_wl = getWords(getText(f))\n word_dict = clsWordCounts(word_dict, pos_wl, \"pos\")\n\n # get the number of correctly identified docs\n correct_neg = classify(test_neg_fls, word_dict, \"neg\", num_neg, num_pos)\n correct_pos = classify(test_pos_fls, word_dict, \"pos\", num_neg, num_pos)\n\n # for each iteration add to the two results lists a list containing the\n # number of test files, the number of training files, and the number of\n # correctly identified docs\n neg_results.append([len(test_neg_fls), num_neg, correct_neg])\n pos_results.append([len(test_pos_fls), num_pos, correct_pos])\n\n # print the results\n get_results(neg_results, pos_results)", "def load_recipes_from_test_set(cls, args):\n cls._recipes = Dataset().load_test(\n use_full_test_set=args.use_full_test_set,\n use_english=args.use_english,\n use_english_intelligible=args.use_english_intelligible,\n use_gold=args.use_gold)\n cls._add_indices_to_recipes()\n cls._initialize_recipes_status()\n logging.info(\"Recipes loaded.\")", "def inspect_doc(self, file, terms):\n # populate with studies in loop\n reports = []\n\n # extract text and do the search\n for i, chunk in enumerate(file.content_chunks()):\n\n study = self.inspect_text(chunk, terms)\n\n for entry in study:\n # only populate if there is a match\n # otherwise it will be a bunch of\n # empty studies\n\n # supplement with page\n entry['page'] = i + 1\n\n reports.append(entry)\n\n return reports", "def test_get_vocabulary(self):\n\n for m in self.models:\n vocab = m.vocabulary\n self.assertTrue(isinstance(vocab, turicreate.SArray))\n self.assertEqual(len(vocab), 25)", "def test_get_scored_recommendations_post(self):\n pass", "def train_test_official(self):\n return self.sub_set(self.idcs_train), self.sub_set(self.idcs_test)", "def get_all_documents(self, type: Type) -> List[DocumentReference]:\n runners = []\n collection = self.client.collection(type.value).list_documents()\n for document in collection:\n runners.append(document)\n\n return runners", "def test_text_classifier_get_details_all(self):\n pass", "def test_get_recipe_information_bulk(self):\n pass", "def test_batch_create_occurrences(self):\n pass" ]
[ "0.63508373", "0.59862226", "0.56437767", "0.5264481", "0.5137441", "0.5057981", "0.50532794", "0.50263816", "0.50247127", "0.50228095", "0.49819055", "0.49736837", "0.49623054", "0.49571514", "0.49557984", "0.49491504", "0.49417806", "0.49294844", "0.49229982", "0.49190468", "0.49148756", "0.4905848", "0.4895249", "0.48940942", "0.48929474", "0.48920962", "0.4879912", "0.48793316", "0.4875155", "0.48746797" ]
0.64342356
0
compute average probability for true and false answers
def probabilities_score(model_id, test_set_id, rubric_id): result = {'true_average_probability': 0, 'false_average_probability': 0} # right answers answers = db.get_rubric_answers(test_set_id, rubric_id) # rubrication results rubrication_result = db.get_rubrication_probability(model_id, test_set_id, rubric_id) true_number = 0 true_probability = 0 false_number = 0 false_probability = 0 for key in rubrication_result: if answers[key]: true_number += 1 true_probability += rubrication_result[key] else: false_number +=1 false_probability += rubrication_result[key] if true_number: result['true_average_probability'] = true_probability / true_number if false_number: result['false_average_probability'] = false_probability / false_number return result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def mape(true, predictions):\n true = np.array(true)\n predictions = np.array(predictions) \n return np.mean(np.abs((true - predictions)) / true) * 100", "def precision_score(y_true, y_pred):\n return ((y_true == 1) * (y_pred == 1)).sum() / (y_pred == 1).sum()", "def calc_accuracy(true, predicted):\n return sum([t==p for t,p in zip(true, predicted)]) / float(len(true))", "def calcProbability(self):\n for attribute in self.attributes:\n index = self.F2I[attribute]\n features = set([self.train[i][0][index] for i in range(len(self.train))])\n for feature in features:\n #all the true and false\n result_t = list(filter(lambda x: x[1]== True, self.train))\n total_t = len(result_t)\n result_f = list(filter(lambda x: x[1]== False, self.train))\n total_f= len(result_f)\n #the probability for the feature if its true or false\n t = len(list(filter(lambda x: x[0][index] == feature, result_t)))\n f = len(list(filter(lambda x: x[0][index] == feature, result_f)))\n prob_yes= t/total_t\n prob_no = f/total_f\n #assign the probabilities to the dictionaries\n self.probs_yes[(index,feature)] = prob_yes\n self.probs_no[(index,feature)] = prob_no", "def evaluate(inputs, labels):\n _, probs = forward(inputs)\n preds = predict(probs)\n trues = np.argmax(labels, axis=0)\n return np.mean(preds == trues)", "def eval(y_pred, y_true):\n return torch.mean((torch.argmax(y_pred, dim=1) == y_true).float())", "def naiveBayes(self):\n acc = 0\n #for each example in the test-set\n for d in self.dev:\n pred_good = self.prob_True\n pred_bad = self.prob_False\n #calc the probability for yes and no\n for index in range(len(d[0])):\n pred_good *= self.probs_yes[(index,d[0][index])]\n pred_bad *=(self.probs_no[(index,d[0][index])])\n pred = False\n if pred_good >= pred_bad:\n pred = True\n if pred == d[1]:\n acc +=1\n return acc/len(self.dev)", "def binary_accuracy(predictions, truth):\n\tcorrect = 0.\n\tfor i in range(len(predictions)):\n\t\tif predictions[i] >= 0.5 and truth[i] == 1:\n\t\t\tcorrect += 1\n\t\telif predictions[i] < 0.5 and truth[i] == 0:\n\t\t\tcorrect += 1\n\tacc = correct/len(predictions)\n\treturn acc", "def evaluate(inputs, labels):\n # Your code here.\n _, probs = forward(inputs)\n preds = predict(probs)\n trues = np.argmax(labels, axis=1)\n return np.mean(preds == trues)", "def batch_accuracy(predicted, true):\n _, predicted_index = predicted.max(dim=1, keepdim=True)\n agreeing = true.gather(dim=1, index=predicted_index)\n '''\n Acc needs to be averaged over all 10 choose 9 subsets of human answers.\n While we could just use a loop, surely this can be done more efficiently (and indeed, it can).\n There are two cases for the 1 chosen answer to be discarded:\n (1) the discarded answer is not the predicted answer => acc stays the same\n (2) the discarded answer is the predicted answer => we have to subtract 1 from the number of agreeing answers\n \n There are (10 - num_agreeing_answers) of case 1 and num_agreeing_answers of case 2, thus\n acc = ((10 - agreeing) * min( agreeing / 3, 1)\n + agreeing * min((agreeing - 1) / 3, 1)) / 10\n \n Let's do some more simplification:\n if num_agreeing_answers == 0:\n acc = 0 since the case 1 min term becomes 0 and case 2 weighting term is 0\n if num_agreeing_answers >= 4:\n acc = 1 since the min term in both cases is always 1\n The only cases left are for 1, 2, and 3 agreeing answers.\n In all of those cases, (agreeing - 1) / 3 < agreeing / 3 <= 1, so we can get rid of all the mins.\n By moving num_agreeing_answers from both cases outside the sum we get:\n acc = agreeing * ((10 - agreeing) + (agreeing - 1)) / 3 / 10\n which we can simplify to:\n acc = agreeing * 0.3\n Finally, we can combine all cases together with:\n min(agreeing * 0.3, 1)\n '''\n return (agreeing * 0.3).clamp(max=1)", "def compare(predictions, truth):\n comp = predictions - truth\n return 1 - (np.count_nonzero(comp) / len(predictions))", "def smape(true, predictions):\n \n true = np.array(true)\n predictions = np.array(predictions)\n \n return np.mean(np.abs(true - predictions) * 2/ (np.abs(true) + np.abs(predictions))) * 100", "def get_percentage_false_class_for_resultset(results):\n count_success = 0\n count_correct_prediction = 0\n for result in results:\n if result[\"image_target\"] == result[\"prediction_image\"] and result[\"std_noise\"] != 0:\n count_correct_prediction += 1\n if result[\"success\"] == True:\n count_success += 1\n\n error = stats.proportion.proportion_confint(count_success, count_correct_prediction, 0.05)\n\n return np.array([count_success/count_correct_prediction, error])", "def get_percentage_false_class(arr_of_results):\n\n count_success = np.zeros_like(arr_of_results[:,0], dtype=float)\n count_correct_prediction = 0\n\n for i in range(len(arr_of_results[0])):\n use = True\n for result in arr_of_results[:,i]:\n if result[\"image_target\"] != result[\"prediction_image\"] or result[\"std_noise\"] == 0:\n use = False\n if use:\n count_correct_prediction += 1\n i2 = 0\n for result in arr_of_results[:,i]:\n if result[\"success\"]:\n count_success[i2] += 1\n i2 += 1\n\n\n errors = proportion_confint(count_success, count_correct_prediction)\n count_success = count_success/count_correct_prediction\n errors = np.array(errors)\n\n errors[0] = np.abs(count_success - errors[0])\n errors[1] = np.abs(count_success - errors[1])\n\n return count_success, errors", "def evaluate(labels, predictions):\n\n truePositiveCounter = 0\n trueNegativeCounter = 0\n truePositiveCorrect = 0\n trueNegativeCorrect = 0\n \n sensitivity = 0\n specificity = 0\n\n for i in range(len(labels)):\n if labels[i] == 1:\n truePositiveCounter += 1\n if(labels[i] == predictions[i]):\n truePositiveCorrect += 1\n elif labels[i] == 0:\n trueNegativeCounter += 1\n if(labels[i] == predictions[i]):\n trueNegativeCorrect += 1\n\n sensitivity = truePositiveCorrect / truePositiveCounter\n specificity = trueNegativeCorrect / trueNegativeCounter\n\n return sensitivity, specificity", "def calculateResults(predictions, answers):\r\n t = 0\r\n f = 0\r\n for i in range(len(answers)):\r\n if predictions[i] == answers[i]:\r\n t += 1\r\n else:\r\n f += 1\r\n\r\n print(\"The Percent of Correct Predictions is {t}%\".format(t=round((t * 100 / len(answers)), 1)))\r\n print(\"The Percent of Incorrect Predictions is {f}%\\n\".format(f=round((f * 100 / len(answers)), 1)))", "def score_method(pairs_true, pairs_test):\n \n set_true = {tuple(e) for e in pairs_true}\n set_test = {tuple(e) for e in pairs_test}\n true_pos, false_pos, false_neg = confusion_stats(set_true, set_test)\n \n total = true_pos + false_pos + false_neg\n true_pos_rate = true_pos / total\n false_pos_rate = false_pos / total\n false_neg_rate = false_neg / total\n \n return true_pos_rate, false_pos_rate, false_neg_rate", "def true_positive_rate(y_true, y_pred):\n p = np.sum(y_true == 1)\n tp = np.sum((y_pred == y_true) & (y_true == 1))\n return tp / p", "def test_preds_average():\n pred_1 = np.array([[0.1, 0.3, 0.1, 0.5], [0.9, 0.05, 0.025, 0.025]])\n pred_2 = np.array([[0.6, 0.1, 0.2, 0.1], [0.8, 0.1, 0.05, 0.05]])\n av = preds_average([pred_1, pred_2], [0.9, 0.1])\n assert (av == np.array([3, 0])).all()", "def evaluate(labels, predictions):\n\n true_positives = 0\n label_positives = 0\n\n true_negatives = 0\n label_negatives = 0\n\n for i in range(len(predictions)):\n if labels[i] == predictions[i] == 1:\n true_positives += 1\n if labels[i] == 1:\n label_positives += 1\n\n if labels[i] == predictions[i] == 0:\n true_negatives += 1\n if labels[i] == 0:\n label_negatives += 1\n\n return true_positives / label_positives, true_negatives / label_negatives\n\n # raise NotImplementedError", "def successes(predictions,truth):\n\ttotal = len(predictions)\n\tcorrect = 0.0\n\tfor p in predictions:\n\t\tif p == truth:\n\t\t\tcorrect += 1\n\t\telse:\n\t\t\tprint truth,\"\\t\",p\n\treturn correct", "def binary_stats(y_true, y_pred, normalize=True, sample_weight=None):\n hamming_list = []\n precision_list = []\n recall_list = []\n f1_list = []\n for i in range(y_true.shape[0]):\n set_true = set( np.where(y_true[i])[0] )\n set_pred = set( np.where(y_pred[i])[0] )\n intersection = len(set_true.intersection(set_pred))\n if len(set_true) == 0 and len(set_pred) == 0:\n hamming = 1\n precision = 1\n recall = 1\n f1 = 1\n elif len(set_pred) == 0 or len(set_pred) == 0:\n hamming = intersection/float( len(set_true.union(set_pred)) )\n precision = 0.0\n recall = 0.0\n f1 = 0.0\n else:\n hamming = intersection/float( len(set_true.union(set_pred)) )\n precision = intersection/float(len(set_pred))\n recall = intersection/float(len(set_true))\n if precision + recall == 0.0:\n f1 = 0.0\n else:\n f1 = 2.0*(precision*recall) / (precision + recall)\n\n hamming_list.append(hamming)\n precision_list.append(precision)\n recall_list.append(recall)\n f1_list.append(f1)\n\n return np.mean(hamming_list), np.mean(precision_list), np.mean(recall_list), np.mean(f1_list)", "def evaluate(golds, preds):\n correct_words = 0\n correct_sentences = 0\n\n words_total = 0.0\n sentences_total = 0.0\n\n for gold, pred in zip(golds, preds):\n # check whether entire tag sequence was correct\n sentences_total += 1\n if pred == gold:\n correct_sentences += 1\n\n # check individual tags for correctness\n for predicted_tag, gold_tag in zip(pred, gold):\n words_total += 1\n if predicted_tag == gold_tag:\n correct_words += 1\n\n return (correct_sentences/sentences_total, correct_words/words_total)", "def accuracy(gt, pred):\n \n return np.mean(gt == pred)", "def evaluate(labels, predictions):\n actual_positive = 0\n actual_negative = 0\n predicted_positive = 0\n predicted_negative = 0\n for i, j in zip(labels, predictions):\n if i == 1:\n actual_positive += i\n predicted_positive += j\n else:\n actual_negative += 1\n if j == 0:\n predicted_negative += 1\n return predicted_positive/actual_positive, predicted_negative/actual_negative", "def calculate_probability(self):\n return 0", "def evaluate(data, theta, beta):\n pred = []\n for i, q in enumerate(data[\"question_id\"]):\n u = data[\"user_id\"][i]\n x = (theta[u] - beta[q]).sum()\n p_a = sigmoid(x)\n pred.append(p_a >= 0.5)\n return np.sum((data[\"is_correct\"] == np.array(pred))) \\\n / len(data[\"is_correct\"])", "def percent_accuracy(self, true_values, predicted_values):\n\n correct = 0\n size = len(true_values)\n for i in range(len(true_values)):\n true_labels = true_values[i]\n predicted_labels = predicted_values[i]\n predicted_index = np.argmax(predicted_labels)\n\n if true_labels[predicted_index] == 1:\n correct += 1", "def result(self):\n tp = self.true_positive\n fp = self.false_positive\n return math_ops.div_no_nan(tp, tp + fp)", "def calculate_avg_score(state_score,state_count):\n\tfor state in state_score.keys():\n\t\tstate_score[state] = 1.*state_score[state]/state_count[state]\n\treturn state_score" ]
[ "0.68416166", "0.66683954", "0.6613903", "0.6596483", "0.6539802", "0.64255875", "0.6424991", "0.6420462", "0.6365455", "0.63586", "0.6343499", "0.63240093", "0.62925273", "0.627914", "0.62533486", "0.62258637", "0.6179806", "0.6166214", "0.6155228", "0.61428577", "0.6140479", "0.61383486", "0.61182266", "0.6090036", "0.6087077", "0.60843575", "0.60729253", "0.6069677", "0.60464245", "0.6044456" ]
0.683993
1
This function is returning the weight of the protein sequence given based on average weight given for each amino acid in given protein
def calc_weight(sequence): return len(sequence) * AVG_WEIGHT
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def calculate_molecular_weight(fasta_filename):\n \n sequence_weights={}\n aminoacid_mw = {'A': 89.09, 'C': 121.16, 'E': 147.13, 'D': 133.1, 'G': 75.07, 'F': 165.19, 'I': 131.18, 'H': 155.16, 'K': 146.19, 'M': 149.21, 'L': 131.18, 'N': 132.12, 'Q': 146.15, 'P': 115.13, 'S': 105.09, 'R': 174.2, 'T': 119.12, 'W': 204.23, 'V': 117.15, 'Y': 181.19}\n sequences=FASTA_iterator(fasta_filename)\n for element in sequences:\n weight=0\n for letter in element[1]:\n #if the letter of the sequence doesn't match an aminoacid, it is treated as an \"error\" and no weight is added\n try:\n weight+=aminoacid_mw[letter]\n except:\n pass\n sequence_weights[element[0]]=weight\n return sequence_weights", "def weighted_average(array, weights):\n assert len(array) == len(weights)\n return sum([x * w for x, w in zip(array, weights)]) / sum(weights)", "def weighted_average(items, weights):\n assert len(items) > 0\n assert len(items) == len(weights)\n # declare total as the return value which is a decimal\n total = 0.0\n # for all pairs from two lists\n for i in range(len(items)):\n \t# we increment the total for the product of both value\n \ttotal += items[i] * weights[i]\n # we return the total divided by sum of weights\n return total / sum(weights)", "def GetAPseudoAAC1(ProteinSequence,lamda=30,weight=0.5, AAP = [_Hydrophobicity,_hydrophilicity]):\n\trightpart=0.0\n\tfor i in range(lamda):\n\t\trightpart=rightpart+sum(GetSequenceOrderCorrelationFactorForAPAAC(ProteinSequence,k=i+1, AAP=AAP))\n\tAAC=GetAAComposition(ProteinSequence)\n\t\n\tresult={}\n\ttemp=1+weight*rightpart\n\tfor index,i in enumerate(AALetter):\n\t\tresult['APAAC'+str(index+1)]=round(AAC[i]/temp,3)\n\t\n\treturn result", "def weighted_average(value_weight_list): \n numerator = sum([v * w for v,w in value_weight_list])\n denominator = sum([w for v,w in value_weight_list])\n if(denominator != 0):\n return(float(numerator) / float(denominator))\n else:\n return None", "def _avg(value1, value2, weight):\r\n if value1 is None:\r\n return value2\r\n if value2 is None:\r\n return value1\r\n return value2 * weight + value1 * (1 - weight)", "def _avg(value1, value2, weight):\r\n if value1 is None:\r\n return value2\r\n if value2 is None:\r\n return value1\r\n return value2 * weight + value1 * (1 - weight)", "def weighted_mean(input, weight):\n weight = paddle.cast(weight, input.dtype)\n # paddle.Tensor.size is different with torch.size() and has been overrided in s2t.__init__\n broadcast_ratio = input.numel() / weight.numel()\n return paddle.sum(input * weight) / (paddle.sum(weight) * broadcast_ratio)", "def weighted_average(items, weights):\r\n \r\n assert len(items) > 0\r\n assert len(items) == len(weights)\r\n\r\n a = items #Assign the items to a variable\r\n b = weights #Assign the weights to a variable\r\n x = list(items) #Transform the items to a list\r\n y = list(weights) #Transform the weights to a list\r\n sum1 = sum(weights) #Sum up all of the weights\r\n z = [a*b for a,b in zip(x, y)] #Multiply both lists by matching up the elements from both lists\r\n sum2 = sum(list(z)) #Take the sum of all of the products\r\n \r\n return float(sum2/sum1) #Divide the sum of the products by the sum of the weights to get the weighted average\r", "def GetPseudoAAC1(ProteinSequence,lamda=30,weight=0.05,AAP=[_Hydrophobicity,_hydrophilicity]):\n\trightpart=0.0\n\tfor i in range(lamda):\n\t\trightpart=rightpart+GetSequenceOrderCorrelationFactor(ProteinSequence,i+1,AAP)\n\tAAC=GetAAComposition(ProteinSequence)\n\t\n\tresult={}\n\ttemp=1+weight*rightpart\n\tfor index,i in enumerate(AALetter):\n\t\tresult['PAAC'+str(index+1)]=round(AAC[i]/temp,3)\n\t\n\treturn result", "def weighted_prob(self, f, cat, prf, weight=1.0, ap=0.5):\n basicprob = prf(f, cat) # Calculate current probability\n # Count the number of times this feature has appeared in all cats\n totals = sum([self.feature_count(f, c) for c in self.categories()])\n # Calculate the weighted average\n bp = ((weight*ap)+(totals*basicprob))/(weight+totals)\n return bp", "def weighted_average(value_weight_list):\n numerator = sum([v * w for v, w in value_weight_list])\n denominator = sum([w for v, w in value_weight_list])\n if(denominator != 0):\n return(float(numerator) / float(denominator))\n else:\n return None", "def weighted_avg(x, weights):\n return weights.unsqueeze(1).bmm(x).squeeze(1)", "def GetAPseudoAAC2(ProteinSequence,lamda=30,weight=0.5, AAP = [_Hydrophobicity,_hydrophilicity]):\n\trightpart=[]\n\tfor i in range(lamda):\n\t\ttemp=GetSequenceOrderCorrelationFactorForAPAAC(ProteinSequence,k=i+1, AAP=AAP)\n\t\trightpart.append(temp[0])\n\t\trightpart.append(temp[1])\n\t\t\n\t\n\tresult={}\n\ttemp=1+weight*sum(rightpart)\n\tfor index in range(20,20+2*lamda):\n\t\tresult['PAAC'+str(index+1)]=round(weight*rightpart[index-20]/temp*100,3)\n\t\n\treturn result", "def weight3(ab, ac, bc):\r\n\r\n return((ab+ac+bc)/2) # note that (a+b)+(a+c)+(b+c)=2(a+b+c)\r", "def weight_from_meantsys(self, in_arr):\n return 1./(numpy.mean(in_arr)**2)", "def weighted_prob(self, f, category, weight=1.0, ap=0.5):\r\n basic_prob = self.feature_prob(f, category)\r\n totals = sum([self.get_feature_count(f, category) for category in self.category_count.keys()])\r\n w_prob = ((weight*ap) + (totals * basic_prob)) / (weight + totals)\r\n print \"Weighted Probability %s: %s | self.category_count.keys(): %s | totals: %s\" % (f, w_prob, self.category_count.keys(), totals)\r\n return w_prob", "def weighted_average(listofvalues):\n total = 0\n weights = 0\n for [w, v] in listofvalues:\n total += w*v\n weights += w\n return total/weights", "def GetPseudoAAC2(ProteinSequence,lamda=30,weight=0.05,AAP=[_Hydrophobicity,_hydrophilicity]):\n\trightpart=[]\n\tfor i in range(lamda):\n\t\trightpart.append(GetSequenceOrderCorrelationFactor(ProteinSequence,i+1,AAP))\n\t\n\tresult={}\n\ttemp=1+weight*sum(rightpart)\n\tfor index in range(20,20+lamda):\n\t\tresult['PAAC'+str(index+1)]=round(weight*rightpart[index-20]/temp*100,3)\n\t\n\treturn result", "def weighted_mean(x, sample_weight=None):\n if sample_weight is None:\n return x.mean()\n else:\n assert x.shape == sample_weight.shape\n return (x * sample_weight).sum() / sample_weight.sum()", "def totalWeighting(distance, count, data, n):\n\n weighting = (data)*(distance)*count\n weighting = weighting/(np.sum(np.sum(weighting))) \n return weighting", "def _weightedAverage(list_):\n\n\t\taccum = [0, 0]\n\n\t\tfor point, weight in list_:\n\n\t\t\taccum[0] += point[0] * weight\n\t\t\taccum[1] += point[1] * weight\n\n\t\ttotalWeight = sum([weight for point, weight in list_])\n\n\n\t\tif totalWeight == 0:\n\t\t\t\n\t\t\treturn (0, 0)\n\n\n\t\taccum[0] /= float(totalWeight)\n\t\taccum[1] /= float(totalWeight)\n\n\t\treturn (accum[0], accum[1])", "def _weight(self, user_a, user_i):\n a_votes = self._votes[user_a]\n i_votes = self._votes[user_i]\n\n a_mean = self._averages[user_a]\n i_mean = self._averages[user_i]\n\n item_ndx = np.logical_and(np.isfinite(a_votes),\n np.isfinite(i_votes))\n\n a_deviations = a_mean - a_votes[item_ndx]\n i_deviations = i_mean - i_votes[item_ndx]\n\n numerator = np.sum(a_deviations * i_deviations)\n denominator = (np.sum(np.power(a_deviations, 2)) *\n np.sum(np.power(i_deviations, 2)))\n\n if denominator < _EPSILON:\n return 0\n return numerator / np.sqrt(denominator)", "def average_weights(self):\n for feat, weight in self.weights.items():\n total = self._totals[feat]\n total += (self.i - self._tstamps[feat]) * weight\n averaged = total / float(self.i)\n self.weights[feat] = averaged\n return None", "def average_error_to_weight(error):\n return error ** (-2)", "def average_error_to_weight(error):\r\n return error ** (-2)", "def _GetPseudoAAC1(ProteinSequence,lamda=10,weight=0.05):\n\trightpart=0.0\n\tfor i in range(lamda):\n\t\trightpart=rightpart+_GetSequenceOrderCorrelationFactor(ProteinSequence,k=i+1)\n\tAAC=GetAAComposition(ProteinSequence)\n\t\n\tresult={}\n\ttemp=1+weight*rightpart\n\tfor index,i in enumerate(AALetter):\n\t\tresult['PAAC'+str(index+1)]=round(AAC[i]/temp,3)\n\t\n\treturn result", "def calculate_average(precisions, weights):\n tmp_res = 1\n for id, item in enumerate(precisions):\n tmp_res = tmp_res*np.power(item, weights[id])\n tmp_res = np.power(tmp_res, np.sum(weights))\n return tmp_res", "def weighted_average(model_df,cols):\n #wa = model_df[cols].apply(lambda x: (x[0]*x[1]).sum()/x[0].sum())\n wa = (model_df[cols[0]]*model_df[cols[1]]).sum()/model_df[cols[0]].sum()\n return wa", "def calculate_average(precisions, weights):\r\n tmp_res = 1\r\n for id, item in enumerate(precisions):\r\n tmp_res = tmp_res*np.power(item, weights[id])\r\n tmp_res = np.power(tmp_res, np.sum(weights))\r\n return tmp_res" ]
[ "0.6750212", "0.6603459", "0.6503535", "0.6424174", "0.63299036", "0.63141996", "0.63141996", "0.6302852", "0.6218239", "0.6184637", "0.6173586", "0.61510265", "0.6140726", "0.61221576", "0.6102423", "0.61018354", "0.6076003", "0.60408807", "0.60073984", "0.60028505", "0.5948507", "0.5926794", "0.59119564", "0.5896593", "0.5894997", "0.58868927", "0.5879591", "0.5856292", "0.585579", "0.5817974" ]
0.7381839
0
Marks the image as having been seen on the given blog returns Bool as indicator of new seen
def mark_seen(self, blog_url, get_Blog, seen_at=None): # get the blog where we just saw this image blog = get_Blog().get_or_create(url=blog_url) # update the blog new blog obj's short hash if not blog.short_hash: blog.short_hash = short_hash(blog_url) blog.save() # check if the image already has the blog's reference new_blog = False self.blogs = self.blogs or {} if blog.short_hash not in self.blogs: # it's new ! new_blog = True # if they didn't specify when they saw it, assume now seen_at = seen_at or datetime.now() # add the blog in as being seen self.blogs[blog.short_hash] = seen_at # add the blog's categories as the images blog.categories = blog.categories or [] for category in blog.categories: if category not in self.categories: self.categories.append(category.lower()) # return our timestamp return new_blog
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_new_based_on_imgs(soup):\n\n \n \n prev_hashes = get_prev_img_hashes()\n temp_hashes = get_temp_img_hashes(soup)\n\n if len(temp_hashes.difference(prev_hashes))>0:\n print(\"new, based on images\")\n return True\n else:\n return False", "def mark_seen(self):\r\n self.seen_at = now()\r\n return self", "def post_liker(a, args):\n if args.likes.filter(user=a).exists():\n exists = True\n else:\n exists = False\n return exists", "def check_if_original(article):\n num_img = len(article.find_all(\"img\"))\n return num_img < 2", "def mark_comment_url_seen(url):\n comment = SeenComment(github_comment_url=url)\n session.add(comment)\n session.commit()", "def is_unseen(self):\r\n unseen = self.unseen\r\n if unseen:\r\n self.unseen = False\r\n self.save()\r\n return unseen", "def mark_seen(self):\n mark_seen = {\n \"recipient\": {\"id\": self.user_id},\n \"sender_action\": \"mark_seen\"\n }\n return requests.post(self.mark_seen_uri, json=mark_seen).json()", "def mark_seen(stream: Stream, object_id: UUID, activity_type=None):\n unread = stream.activities.filter(Activity.object_id==object_id, Activity.seen_at==None)\n\n if activity_type:\n unread.filter(Activity.activity_type==activity_type)\n\n unread.update(values=dict(seen_at=now()))", "def has_already_cover(record):\n cover_metadata = record.get(\"cover_metadata\", {})\n return cover_metadata.get(\"ISBN\", False) or cover_metadata.get(\n \"ISSN\", False\n )", "def favorited(self: Article, request: Request):\n if not request.user:\n return False\n\n if self in request.user.favorites:\n return True\n\n return False", "def reportBallSeen(self):\r\n self.lastTimeSeen = time.time()", "def _flagged(self):\n from hive.indexer.notify import NotifyType\n sql = \"\"\"SELECT 1 FROM hive_notifs\n WHERE community_id = :community_id\n AND post_id = :post_id\n AND type_id = :type_id\n AND src_id = :src_id\"\"\"\n return bool(DB.query_one(sql,\n community_id=self.community_id,\n post_id=self.post_id,\n type_id=NotifyType['flag_post'],\n src_id=self.actor_id))", "def _set_tell_seen(conn, tell):\n if tell[4] == '1':\n return\n\n cur = conn.cursor()\n cur.execute(\"UPDATE tells SET seen='1', seen_time=? WHERE msg=? AND time=?\", (str(int(time.time())), tell[2], tell[3]))\n del cur\n conn.commit()\n db.ccache()", "def alreadySeenState(self):\n if self.recentHistory.count(self.getState())>1:\n if self.getState() == self.recentHistory[-2]:\n return (True,True)\n return (True,False)\n return (False,False)", "def mark_recently_accessed(self, server_name: Optional[str], media_id: str) -> None:\n if server_name:\n self.recently_accessed_remotes.add((server_name, media_id))\n else:\n self.recently_accessed_locals.add(media_id)", "def brain_has_lead_image(self, brain=None):", "def setFlag(self):\n if self.inPlay and not self.shown:\n self.flag = not(self.flag)\n image_index = 11 if self.flag else 10\n self.configure(image = Tile.images[image_index])\n return 1 if self.flag else -1\n return 0", "def see_reels(self, reels):\n if not isinstance(reels, list):\n # In case of only one reel as input\n reels = [reels]\n\n story_seen = {}\n now = int(time.time())\n for i, story in enumerate(sorted(reels, key=lambda m: m['taken_at'], reverse=True)):\n story_seen_at = now - min(i + 1 + random.randint(0, 2), max(0, now - story['taken_at']))\n story_seen[\n '{0!s}_{1!s}'.format(story['id'], story['user']['pk'])\n ] = [\n '{0!s}_{1!s}'.format(story['taken_at'], story_seen_at)\n ]\n\n data = self.json_data({\n 'reels': story_seen,\n '_csrftoken': self.token,\n '_uuid': self.uuid,\n '_uid': self.user_id\n })\n data = self.generate_signature(data)\n return self.session.post('https://i.instagram.com/api/v2/' + 'media/seen/', data=data).ok", "def is_article_duplicate(cls, article):\n return cls.db.hkeys(\"article_map\").count(article.link) != 0", "def check_movie_seen(self, id_movie):\n\n if id_movie in self.seen:\n return True\n\n return False", "def is_visited(self):\n return self._tag == 'visited'", "def check_if_actuall(self) -> bool:\n\n return self.last_date >= self.get_last_image_date()", "def mark_as_viewed(self):\n url = \"https://api.imgur.com/3/notification/{0}\".format(self.id)\n return self._imgur._send_request(url, method='POST')", "def sees_post(self, post, context_home_or_profile):\n if owns_post(self, post):\n return True\n if context_home_or_profile and post.author not in self.followings:\n return False\n if post.is_public:\n return True\n else:\n for circle in post.circles:\n circle = get_in_circle_cache(circle.id)\n if check_member(circle, self):\n return True\n return False", "def is_liked_by(self, user):\n return user.liked_articles.filter(pk=self.pk).exists()", "def is_viewed(self):\n return self.has_label(VIEWED_LABEL)", "def _should_update_entry(self,\n cache_entry: dict,\n image_meta: dict) -> bool:\n image_updated_utc = (image_meta['updated_at']\n .astimezone(timezone('UTC')))\n cache_updated_utc = (cache_entry['image_updated_at']\n .replace(tzinfo=timezone('UTC')))\n\n LOG.debug('Image-volume cache entry image_update_at = %(entry_utc)s, '\n 'requested image updated_at = %(image_utc)s.',\n {'entry_utc': str(cache_updated_utc),\n 'image_utc': str(image_updated_utc)})\n\n return image_updated_utc != cache_updated_utc", "def moderate(self, comment, content_object, request):\n if not AKISMET_COMMENT:\n return False\n\n try:\n from akismet import Akismet\n from akismet import APIKeyError\n except ImportError:\n return False\n\n akismet = Akismet(key=AKISMET_API_KEY,\n blog_url='http://%s/' % Site.objects.get_current().domain)\n if akismet.verify_key():\n akismet_data = {\n 'user_ip': request.META.get('REMOTE_ADDR', ''),\n 'user_agent': request.META.get('HTTP_USER_AGENT', ''),\n 'referrer': request.META.get('HTTP_REFERER', 'unknown'),\n 'permalink': content_object.get_absolute_url(),\n 'comment_type': 'comment',\n 'comment_author': smart_str(comment.userinfo.get('name', '')),\n 'comment_author_email': smart_str(comment.userinfo.get('email', '')),\n 'comment_author_url': smart_str(comment.userinfo.get('url', '')),\n }\n is_spam = akismet.comment_check(smart_str(comment.comment),\n data=akismet_data,\n build_data=True)\n if is_spam:\n comment.save()\n user = comment.content_object.authors.all()[0]\n comment.flags.create(user=user, flag='spam')\n return is_spam\n raise APIKeyError(\"Your Akismet API key is invalid.\")", "def _seen(node):\n\t\tcheck = linked_list\n\t\twhile check != node:\n\t\t\tif check.value == node.value:\n\t\t\t\treturn True\n\t\t\tcheck = check.next\n\t\treturn False", "def seen(user, item):\n print(\"---\", item.seen_by(user))\n return item.seen_by(user)" ]
[ "0.6048454", "0.5702105", "0.5617216", "0.55378336", "0.5485685", "0.54344535", "0.5427995", "0.53722227", "0.5333881", "0.53132117", "0.5305507", "0.52907443", "0.5284631", "0.52825695", "0.5263754", "0.52575016", "0.5235767", "0.52142894", "0.5206761", "0.520546", "0.5200404", "0.5189263", "0.51215845", "0.51095945", "0.5091978", "0.50522375", "0.5009607", "0.49980944", "0.49843645", "0.4966389" ]
0.7645362
0
stores the given data returns the data's store key if uploaded returns False if data already existed
def set_data(self, data, short_hash, upload_image, get_Blog, get_Image, compute_vhash, blog_url=None): # get the short hash for the data self.short_hash = short_hash(data) # TODO: compute other attrs: type, vhash, dimensions # save the data to the cloud storage_key = upload_image(data) # upload our downloaded flag # even if the storage key is false we are still updating # this flag, since it is downloaded, just already was self.downloaded = True # when did we get it's data ? if storage_key is not False: self.last_data_set = datetime.now() # save the storage key if storage_key is not False: self.storage_key = storage_key # cheat, we know the storage key is the short_hash, # so lets set that elif not self.storage_key: self.storage_key = self.short_hash # if they told us the blog, make sure we stored it if blog_url: self.mark_seen(blog_url, get_Blog) # if we haven't set the vhash, do so if not self.vhash: try: self.vhash = str(compute_vhash(data)) except IOError: # can't generate vhash, PIL can't handle image pass # uploaded is the name of the key if we uploaded # if it already existed, we get False return storage_key
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def store_data(self, store_data):\n self._store_data = store_data", "def StoreFile(self, file_data):\r\n ### ###########################\r\n\r\n now = datetime.datetime.now();\r\n root_dir = os.path.join(settings.MEDIA_ROOT, now.strftime(\"%Y-%m-%d\"));\r\n fname = os.path.join(root_dir, now.strftime(\"%H%M%S_{}\".format(file_data.name)));\r\n\r\n try:\r\n os.makedirs(root_dir, exist_ok=True);\r\n dest = open(fname, 'wb+');\r\n for c in file_data.chunks():\r\n dest.write(c);\r\n dest.close();\r\n except Exception as e:\r\n os.remove(fname);\r\n return False, \"Failed to write uploaded file.\";\r\n\r\n if not os.path.exists(fname):\r\n return False, \"Failed to write upload file.\";\r\n\r\n self.file_path = fname;\r\n\r\n return True, None;", "def _put(self, key, data):\n path = self._get_key_path(key)\n with open(path, \"wb\") as pickle_file:\n pickle.dump(data, pickle_file)", "def store(self, data: Union[str, bytes, int, float]) -> str:\n key = str(uuid.uuid4())\n self._redis.set(key, data)\n return key", "def add_data(self, new_data_name: str, new_data):\n\n if new_data_name in self.__data.keys():\n print('Key {0} is already existed in database'.format(new_data_name))\n else:\n self.__data[new_data_name] = new_data", "def store(self, data: Union[str, bytes, int, float]) -> str:\n k = str(uuid.uuid4())\n self._redis[k] = data\n return k", "def put(data):", "def append(self, name, data):\n if self._filename:\n if name not in self._dict:\n self._dict[name] = data\n self.save()\n else:\n raise PoseError(\"Key %s already exists\" % name)", "def store_data(self, data):\n self.data.append(data)", "def store_data(self, data):\n self.data = data\n # HERE\n the_main_dict = {**self.user_data(), **self.entities_data(), **self.extract_relevant(), **self.locate(),\n **self.calculate_days(), **self.clean_user_desc()}\n # The below is the reason that the table creation must be written in alphabetical order. This is simpler than\n # writing the complex joins that would otherwise be needed.\n my_keys_list = sorted(the_main_dict.keys())\n my_items = list(map(lambda x: str(the_main_dict[x]).replace(\"'\", ''), my_keys_list))\n try:\n # Unpacks the items into an insert statement for the SQLite table\n self.conn.execute(\"INSERT INTO {0} VALUES('{1}','{2}','{3}','{4}','{5}','{6}','{7}','{8}','{9}',\"\n \"'{10}','{11}','{12}','{13}','{14}','{15}','{16}','{17}','{18}','{19}','{20}',\"\n \"'{21}','{22}','{23}','{24}','{25}','{26}','{27}','{28}')\".format(self.table, *my_items))\n self.limiting += 1\n return 0\n except sqlite3.IntegrityError:\n return 1", "def store(self, key, a):\n if key in self.SMGData.keys():\n self.SMGData[key] = a\n else:\n raise Exception('Key does not exist in the data structure')", "def save_data(db, dict_key, url, data_to_store):\n if dict_key not in db:\n db[dict_key] = []\n data = db[dict_key]\n data.append({\n 'url': url,\n 'data': data_to_store,\n })\n db[dict_key] = data", "def shelf_persist(data: dict, table_key: str, SHELVE_FILEPATH: str = SHELVE_FILEPATH):\n\n \n try:\n with shelve.open(SHELVE_FILEPATH, writeback=False) as db:\n\n flag = table_key in db\n\n # TODO: autoremove *key* if the len(data) < len(db[table_key])\n if flag:\n # extract a copy\n _holder = db[table_key]\n # mutate the copy\n _holder.update(data)\n # stores the copy right back, to persist it\n db[table_key] = _holder \n else:\n db[table_key] = data\n except Exception as msg:\n log.error(f'persist_data_in_shelve_error: {msg}')\n return False\n else:\n return True", "def store_data(self):\n return self._store_data", "def insert_data_into_storage(name, image_dict):\n\n # if S3Connection supports __enter__, and __exit__ then we can use with.\n conn = S3Connection(aws_access_key_id, aws_secret_access_key)\n bucket = conn.get_bucket('hyperionstorm')\n\n k_lrg = Key(bucket)\n k_lrg.key = \"data/%s_lrg.jpg\" % name\n\n k_dft = Key(bucket)\n k_dft.key = \"data/%s.jpg\" % name\n\n k_tiny = Key(bucket)\n k_tiny.key = \"data/%s_tiny.jpg\" % name\n\n try:\n k_lrg.set_contents_from_string(image_dict[\"large\"])\n k_dft.set_contents_from_string(image_dict[\"default\"])\n k_tiny.set_contents_from_string(image_dict[\"tiny\"])\n except Exception, exp:\n conn.close()\n return False\n\n conn.close()\n return True", "def test_upload_existing_file(self):\n ps = PersistenceStore(s3_client=S3ExistingUpload())\n\n try:\n new_data = {\n 'maven': {\n 'pck1, pck2, pck3': 7,\n 'pck30, pck6': 20,\n 'pck2, pck4, pck7': 12\n },\n 'npm': {\n 'pck1, pck2, pck3': 45,\n 'pck77': 23,\n 'pck2, pck4, pck7': 99\n },\n 'pypi': {\n 'pck3, pck56': 65,\n 'pck2, pck4, pck7': 110\n }\n }\n ps.update(new_data, 'filename.json')\n except Exception:\n assert False, 'Exception raised'", "def store_data(self, data):\n if not self.light.hasAttr(self.custom_data_storage_attr_name):\n pm.addAttr(\n self.light,\n ln=self.custom_data_storage_attr_name,\n dt='string'\n )\n\n self.light.setAttr(self.custom_data_storage_attr_name, data)", "def put(self,data):\n\n \n try:\n\n db = getDatabase()\n connection = db.connect()\n \n connection.put(self,data)\n except Exception as e:\n raise e\n finally:\n db.dispose()", "def store_data(self, data, info_dict):\n\n size_in_MB = np.prod(data.shape) * data.dtype.itemsize / 1024 / 1024\n\n with mongo_connection(self.cfg_mongo) as mongo:\n client, coll = mongo\n if self.datastore == \"gridfs\":\n with mongo_storage_gridfs(client.get_database()) as fs:\n tmp = Binary(pickle.dumps(data))\n tic_io = time.perf_counter()\n fid = fs.put(tmp)\n toc_io = time.perf_counter()\n info_dict.update({\"result_gridfs\": fid})\n\n elif self.datastore == \"numpy\":\n with mongo_storage_numpy(self.cfg_mongo) as fname:\n tic_io = time.perf_counter()\n np.savez(fname, data=data)\n toc_io = time.perf_counter()\n info_dict.update({\"unique_filename\": fname})\n\n elif self.datastore == \"adios2\":\n # Use adios2's context manager\n datadir = join(self.cfg_mongo[\"datadir\"], self.cfg_mongo[\"run_id\"])\n if (isdir(datadir) == False):\n try:\n mkdir(datadir)\n except:\n self.logger.error(f\"Could not access path {datadir}\")\n raise ValueError(f\"Could not access path {datadir}\")\n\n fname = join(datadir, uuid.uuid1().__str__() + \".bp\")\n info_dict.update({\"unique_filename\": fname})\n\n # with open(fname, \"w\") as df:\n # tic_io = time.perf_counter()\n # df.write(info_dict[\"analysis_name\"])\n with adios2.open(fname, \"w\") as fh:\n tic_io = time.perf_counter()\n fh.write(info_dict[\"analysis_name\"], data, data.shape, [0] * data.ndim, data.shape)\n toc_io = time.perf_counter()\n \n\n # Calculate performance metric\n MB_per_sec = size_in_MB / (toc_io - tic_io)\n info_dict.update({\"Performance\": MB_per_sec})\n\n info_dict.update({\"timestamp\": datetime.datetime.utcnow().strftime(\"%Y-%m-%d %H:%M:%S\")})\n info_dict.update({\"description\": \"analysis results\"})\n\n try:\n inserted_id = coll.insert_one(info_dict)\n except:\n self.logger.error(\"Unexpected error:\", sys.exc_info()[0])\n\n return None", "def store_insert(request, store_name, initial_data):\n storedb = redis.Redis(host=HOST, db=STOREDB)\n # Creating next docname\n index_num = len(storedb.lrange(store_name + \":docs\", 0, -1))\n docname = store_name + \":\" + str(index_num)\n # Getting data\n\n data = request.POST.get(\"data\", None)\n if not data:\n return json_response(status=\"ERROR\", status_code=400, error=\"No data received.\")\n\n\n storedb.set(docname, data)\n storedb.rpush(store_name + \":docs\", docname)\n\n return str(index_num)", "def save_ticker(connection, ticker_data=None): \n #evita operaciones si no existe informacion.\n if not ticker_data:\n return False\n\n ticker_hash = get_ticker_hash(ticker_data)\n\n if check_if_exists(connection, ticker_hash):\n return False\n \n #ticker_data['ticker_hash'] = get_ticker_hash(ticker_data)\n ticker_data['ticker_hash'] = ticker_hash\n\n # Almacena el documento en la BD de Mongo por medio de insertOne()\n connection.tickers.insert_one(ticker_data)\n return True", "def store(self, key, filepath):\n if not key or not filepath:\n return False\n # check that file actually exists and handle errors\n if not os.path.isfile(filepath):\n logging.error(\"File '%s' does not exists, skipping\", filepath)\n return False\n else:\n obj = self.resource.Object(self.bucketname, key)\n obj.put(Body=open(filepath, 'rb'))\n obj.Acl().put(ACL='public-read')\n return True", "def isExist(data):\n return True/False", "def StoreOrUpdateInCache(self, filename, data):\n try:\n if not memcache.add('%s%s' % (self.CACHE_PREFIX, filename), data):\n memcache.replace('%s%s' % (self.CACHE_PREFIX, filename), data)\n except (ValueError), err:\n logging.warning('Data size too large to cache\\n%s' % err)", "def StoreOrUpdateInCache(self, filename, data):\n try:\n if not memcache.add('%s%s' % (self.CACHE_PREFIX, filename), data):\n memcache.replace('%s%s' % (self.CACHE_PREFIX, filename), data)\n except (ValueError), err:\n logging.warning('Data size too large to cache\\n%s' % err)", "def put_data(data):\n at_write = airtable.Airtable(app.config['AIRTABLE_BASE'],\n app.config['AIRTABLE_WRITE_KEY'])\n return at_write.create(app.config['AIRTABLE_TABLE'] , data)", "def save_data(self):\n data = self.data\n if data is not None:\n data = base64.encodestring(pickle.dumps(data))\n connection = self._open_db()\n cursor = connection.cursor()\n cursor.execute('UPDATE sessions SET data = ? WHERE id = ?;',\n (data, self.sid))\n cursor.close()\n connection.commit()\n connection.close()", "def save(self, data):\n data['id'] = self.id\n\n self.db.append(data)", "def store(self,key,start,end,data):\n\n pass", "def handle_store(event):\n # Decode the C-STORE request's *Data Set* parameter to a pydicom Dataset\n ds = event.dataset\n\n # Add the File Meta Information\n ds.file_meta = event.file_meta\n\n # Save the dataset using the SOP Instance UID as the filename\n ds.save_as(ds.SOPInstanceUID, write_like_original=False)\n\n # Return a 'Success' status\n return 0x0000" ]
[ "0.63676924", "0.63348836", "0.6326494", "0.6223537", "0.6184256", "0.6106908", "0.6085891", "0.60779446", "0.6066866", "0.6061777", "0.60587716", "0.60249597", "0.5990663", "0.5988788", "0.59879524", "0.5976819", "0.59597427", "0.5940061", "0.5934865", "0.5929676", "0.58912885", "0.58687896", "0.58522886", "0.5842573", "0.5842573", "0.58337003", "0.581172", "0.57897407", "0.57876056", "0.5749857" ]
0.6479089
0
returns a public url for this image
def get_public_url(self, get_image_public_url): # get that url return get_image_public_url(self.short_hash)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_url_image(self, obj):\n return settings.SERVER_HOST + obj.image.url", "def image_url(self) -> str:\n return pulumi.get(self, \"image_url\")", "def get_url_image(self, obj):\n return settings.IMAGE_HOST + obj.image.url", "def get_url_image(self, obj):\n return settings.IMAGE_HOST + obj.image.url", "def get_url_image(self, obj):\n return settings.IMAGE_HOST + obj.image.url", "def get_image_url():", "def image_url(self) -> str:\n return self._image_url", "def get_public_url(self,project,filename):\n pass", "def image_url(self):\n return self.photo_url or GENERIC_IMAGE", "def image_url(self, name):\r\n s3_key = self._generate_s3_key(name)\r\n return s3_key.generate_url(self.IMAGE_LINK_DURATION)", "def getReferenceImageUrl(self, name):\n bucket = self.productSearch.bucket\n blobName = self._getReferenceImageBlobName(name)\n return bucket.blob(blobName).public_url", "def img_url_display(self):\n url = '%s=s%s' % (self.img_url, self.DISPLAY_SIZE_PX)\n if self.img_rot in Plaque.ALLOWED_ROTATIONS:\n url = \"%s-r%s\" % (url, self.img_rot)\n return url", "def image_url(self):\n return \"{}/mjpeg_read.php\".format(self.base_url)", "def url(self):\n return self.storage.url(self.name)", "def get_thumbnail_url():", "def get_url(self):\n raise NotImplementedError(\"This asset does not have a URL\")", "def get_image_url(self, size=None):\n return images.get_serving_url(self.image_blob_key, size=size)", "def get_url(self):\n return staticfiles_storage.url(self._name)", "def thumbnail_url(self):\n return None", "def image_url(self):\n context = aq_inner(self.context)\n obj_url = context.absolute_url()\n if hasattr(context, 'getField'):\n field = self.context.getField('image')\n if not field:\n field = context.getField(IMAGE_FIELD_NAME)\n\n if field and field.get_size(context) > 0:\n return u'%s/%s_%s' % (obj_url, field.getName(), 'thumb')\n\n return u\"%s/isaw_logo.png\" % self.portal.absolute_url()", "def get_url(self, image_id):\n key = image_id if image_id else self.default_image\n if key:\n return u'{bucket_url}{key}'.format(\n bucket_url=self.connection.bucket_url,\n key=self.id_to_key(key))\n else:\n return None", "def media_image_url(self):\n return self._imageUrl", "def media_image_url(self):\n return self._media_image_url", "def media_image_url(self):\n return self._media_image_url", "def get_url(self):\n return self.resource.url", "def get_thumbnail_url(self):\n return self.thumbnail_url", "def thumbnail(self):\n return self.get_thumbnail_url()", "def build_image_path(self, src):\r\n o = urlparse(src)\r\n # we have a full url\r\n if o.hostname:\r\n return o.geturl()\r\n # we have a relative url\r\n return urljoin(self.target_url, src)", "def _get_image_url(self, image_filepath):\n return self.IMAGE_URL + image_filepath", "def get_image_url(self):\n return self.get_extract_image_urls(is_first=True)" ]
[ "0.8159343", "0.81349707", "0.8098181", "0.8098181", "0.8098181", "0.8068925", "0.8035841", "0.7448655", "0.7433158", "0.74328434", "0.73438674", "0.7327816", "0.73267376", "0.723886", "0.7232513", "0.72067153", "0.71977", "0.71773213", "0.71523744", "0.7151588", "0.7013372", "0.7009706", "0.6958401", "0.6958401", "0.6926574", "0.69197214", "0.68888927", "0.68881017", "0.6869495", "0.6869143" ]
0.8463321
0
Rounds up a datetime interval to one of the closest time interval from the scope. Returns suggested start and end timestamps, e.g.
def round_datetime_interval(start, end): if not start or not end: return 1800000, None, None # Approximate number of bars. bars = 50 supported_intervals = [ # In seconds. 60, # 1 minute 120, # 2 minutes 300, # 5 minutes 600, # 10 minutes 1800, # 30 minutes 3600, # 1 hour 7200, # 2 hour 21600, # 6 hour 43200, # 12 hour 86400, # 1 day. Use 00:00 of each 1 day. 259200, # 3 days. Use 00:00 of each 3 day. 604800, # 1 week. Use 00:00 of each Monday. 1209600, # 2 weeks. Use 00:00 of Monday of each 2 weeks. 2592000, # 1 month. Use 00:00 of Monday of each 2 weeks. 7776000, # 3 months. Use XX/01 00:00 of each month. 15552000, # 6 months. Use XX/01 00:00 of each 6 month. 31104000 # 1 year. Use 01/01/XXXX 00:00 of each year. ] # Convert dates. start = datetime_to_timestamp(start) end = datetime_to_timestamp(end) # Get difference between two dates. delta = (end - start) / 1000 # In secs. # Current interval for default bars count. base_interval = delta / bars # Choose closest interval to current delta. interval_index = 0 delta_interval_difference = None for index, interval in enumerate(supported_intervals): diff = abs(base_interval - interval) if not delta_interval_difference: delta_interval_difference = diff else: if diff < delta_interval_difference: interval_index = index interval = supported_intervals[interval_index] * 1000 # In ms. # Round down start datetime. suggested_start = start // interval * interval # Round down end datetime. if end % interval > 0: suggested_end = (end//interval + 1) * interval else: suggested_end = end return interval, suggested_start, suggested_end
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def round_time(\n dt: datetime,\n delta: str | timedelta | relativedelta,\n start_date: datetime = timezone.make_aware(datetime.min),\n):\n if isinstance(delta, str):\n # It's cron based, so it's easy\n time_zone = start_date.tzinfo\n start_date = timezone.make_naive(start_date, time_zone)\n cron = croniter(delta, start_date)\n prev = cron.get_prev(datetime)\n if prev == start_date:\n return timezone.make_aware(start_date, time_zone)\n else:\n return timezone.make_aware(prev, time_zone)\n\n # Ignore the microseconds of dt\n dt -= timedelta(microseconds=dt.microsecond)\n\n # We are looking for a datetime in the form start_date + i * delta\n # which is as close as possible to dt. Since delta could be a relative\n # delta we don't know its exact length in seconds so we cannot rely on\n # division to find i. Instead we employ a binary search algorithm, first\n # finding an upper and lower limit and then dissecting the interval until\n # we have found the closest match.\n\n # We first search an upper limit for i for which start_date + upper * delta\n # exceeds dt.\n upper = 1\n while start_date + upper * delta < dt:\n # To speed up finding an upper limit we grow this exponentially by a\n # factor of 2\n upper *= 2\n\n # Since upper is the first value for which start_date + upper * delta\n # exceeds dt, upper // 2 is below dt and therefore forms a lower limited\n # for the i we are looking for\n lower = upper // 2\n\n # We now continue to intersect the interval between\n # start_date + lower * delta and start_date + upper * delta\n # until we find the closest value\n while True:\n # Invariant: start + lower * delta < dt <= start + upper * delta\n # If start_date + (lower + 1)*delta exceeds dt, then either lower or\n # lower+1 has to be the solution we are searching for\n if start_date + (lower + 1) * delta >= dt:\n # Check if start_date + (lower + 1)*delta or\n # start_date + lower*delta is closer to dt and return the solution\n if (start_date + (lower + 1) * delta) - dt <= dt - (start_date + lower * delta):\n return start_date + (lower + 1) * delta\n else:\n return start_date + lower * delta\n\n # We intersect the interval and either replace the lower or upper\n # limit with the candidate\n candidate = lower + (upper - lower) // 2\n if start_date + candidate * delta >= dt:\n upper = candidate\n else:\n lower = candidate\n\n # in the special case when start_date > dt the search for upper will\n # immediately stop for upper == 1 which results in lower = upper // 2 = 0\n # and this function returns start_date.", "def interval(start, end):\n return seconds_since_midnight(end) - seconds_since_midnight(start)", "def computeRangeAsymmetric():\r\n global timeComputedRangeTS\r\n round1 = DW1000.wrapTimestamp(timePollAckReceivedTS - timePollSentTS)\r\n reply1 = DW1000.wrapTimestamp(timePollAckSentTS - timePollReceivedTS)\r\n round2 = DW1000.wrapTimestamp(timeRangeReceivedTS - timePollAckSentTS)\r\n reply2 = DW1000.wrapTimestamp(timeRangeSentTS - timePollAckReceivedTS)\r\n timeComputedRangeTS = (round1 * round2 - reply1 * reply2) / (round1 + round2 + reply1 + reply2)", "def computeRangeAsymmetric():\n global timeComputedRangeTS\n round1 = DW1000.wrapTimestamp(timePollAckReceivedTS - timePollSentTS)\n reply1 = DW1000.wrapTimestamp(timePollAckSentTS - timePollReceivedTS)\n round2 = DW1000.wrapTimestamp(timeRangeReceivedTS - timePollAckSentTS)\n reply2 = DW1000.wrapTimestamp(timeRangeSentTS - timePollAckReceivedTS)\n timeComputedRangeTS = (round1 * round2 - reply1 * reply2) / (round1 + round2 + reply1 + reply2)", "def find_start_index():\n def recursive_find_index(lower_bound, upper_bound):\n if upper_bound - lower_bound <= 1:\n if intervals[upper_bound][0] <= start_dt:\n return upper_bound\n return lower_bound\n index = (upper_bound + lower_bound) // 2\n if intervals[index][0] <= start_dt:\n return recursive_find_index(index, upper_bound)\n else:\n return recursive_find_index(lower_bound, index)\n\n if start_dt <= intervals[0][0] - tolerance:\n return -1\n if end_dt >= intervals[-1][1] + tolerance:\n return -1\n return recursive_find_index(0, len(intervals) - 1)", "def timedInterval(self, start, end=False):\n\n assert type(start) == float\n interval1 = min(\n enumerate(self.intervals), key=lambda x: abs(x[1].xmin - start))\n\n if end:\n assert type(end) == float\n interval2 = self.timedInterval(end)\n else:\n interval2 = interval1\n\n return (interval1[0], interval2[0] + 1)", "def remap_interval(val, input_interval_start, input_interval_end, output_interval_start, output_interval_end):\n \n return ((float(val-input_interval_start) * (output_interval_end-output_interval_start)) / (input_interval_end-input_interval_start)) + output_interval_start", "def adjust_interval(time1, time2, interval):\n\n time_elapsed = round(time2 - time1, 4)\n\n return interval - time_elapsed", "def zonedetails_nearest_range(zone_code, start_datetime=None):\n if start_datetime:\n timenow = start_datetime.time()\n day = start_datetime.weekday()\n else:\n # datetime.time(15, 29, 22, 129537)\n timenow = datetime.now().time()\n # Return the day of the week as an integer, where Monday is 0 and Sunday is 6\n day = datetime.today().weekday()\n\n qset_zonedetails_today = zone_details_for_day(zone_code,day)\n\n if not qset_zonedetails_today:\n raise Exception(\"No hay detalles cargado para hoy, revisar\")\n\n # Traverse elements to guess in which we are.If we are at the end of a day\n # we obtain the first interval of the next day.\n # For example, a \"graph\" of what would be the range of what is it charged\n # when parking in the day\n # 00hs 8hs 13hs 14hs 20hs 23.59hs\n # ----------|||||||||||-------|||||||||||||||-----------\n zonedetails = None\n for item in qset_zonedetails_today:\n end = item.hour_end\n # See if we are before or after or inside range of current elem.\n\n # As they are ordered by time, we check if we are before the end of the period\n # as if we are before it starts, we will pick that as wll.\n # (and as they are ordered, if one element has passed, it means that it is\n # after the one that has not alreay passed the condition.\n if timenow < end:\n zonedetails = item\n break;\n\n # This means that now, is after the last interval where the user hasto pay and the end of today\n # So we pick the first of next day\n if zonedetails is None:\n day += 1\n qset_zonedetails_tomorrow = zone_details_for_day(zone_code,day)\n zonedetails = qset_zonedetails_tomorrow.first()\n\n return zonedetails", "def _nearest_datetime(self, datetime_list, target_datetime):\n if not datetime_list:\n raise errors.ParserError(\n \"Input parameter datetime_list length is zero. Required\"\n \" parameters: [datetime.datetime], datetime.datetime\")\n work_list = [entry for entry in datetime_list if entry < target_datetime]\n if not work_list:\n raise errors.ParserError(\n \"work_list length is zero. Entries in datetime_list\"\n \" {} are not < target_datetime {}\".format(datetime_list,\n target_datetime))\n return min(\n work_list,\n key=lambda datetime_entry: abs(datetime_entry - target_datetime))", "def get_start_end_datetimes():\n current_datetime = datetime.now()\n start_datetime = (current_datetime - timedelta(days=90)\n ).replace(hour=0, minute=0, second=0, microsecond=0)\n end_datetime = current_datetime.replace(\n hour=23, minute=59, second=59, microsecond=0)\n return (start_datetime, end_datetime)", "def _adjustRange(self, start, end):\n adjusted_start = start\n if self._start:\n if end < self._start:\n return None\n adjusted_start = max(self._start, start)\n \n adjusted_end = end\n if self._end:\n if self._end < start:\n return None\n adjusted_end = min(self._end, end)\n \n return (adjusted_start, adjusted_end)", "def remap_interval(val, input_interval_start, input_interval_end, output_interval_start, output_interval_end):\n return (val - input_interval_start)/float(input_interval_end - input_interval_start)*(\n output_interval_end - output_interval_start) + output_interval_start", "def _makespan(sched_mapping_list):\n start = reduce(min, [x.start_time for x in sched_mapping_list], 0.)\n end = reduce(max, [x.end_time for x in sched_mapping_list], 0.)\n return end - start", "def remap_interval(val, input_interval_start, input_interval_end, output_interval_start, output_interval_end):\n # your code goes here", "def remap_interval(val, in_start, in_end, out_start, out_end):\n in_range = in_end-in_start\n out_range = out_end-out_start\n return (val-in_start)/in_range*out_range+out_start", "def window_index_time(t,windowsize,overlap):\r\n \r\n try:\r\n t=t.tolist()\r\n except:\r\n t=t\r\n \r\n t1=t[0]\r\n t2=t1 + timedelta(seconds=windowsize)\r\n pt1=[0]\r\n pt2=[othertime.findNearest(t2,t)]\r\n while t2 < t[-1]:\r\n t1 = t2 - timedelta(seconds=overlap)\r\n t2 = t1 + timedelta(seconds=windowsize)\r\n\r\n pt1.append(othertime.findNearest(t1,t))\r\n pt2.append(othertime.findNearest(t2,t))\r\n \r\n return pt1, pt2", "def get_snapped_datetime(start, period):\n\n if period == \"1hour\":\n # The same day, but withe verything else as 0\n floored = datetime.datetime(year=start.year, month=start.month, day=start.day)\n\n # List of hourly datetimes\n times = list(rrule(HOURLY, interval=1, dtstart=floored, count=25))\n elif period == \"6hour\":\n floored = datetime.datetime(year=start.year, month=start.month, day=start.day)\n # List of 6-hourly datetimes\n times = list(rrule(HOURLY, interval=6, dtstart=floored, count=5))\n elif period == \"1day\":\n floored = datetime.datetime(year=start.year, month=start.month, day=1)\n # List of daily datetimes\n # 32?\n times = list(rrule(DAILY, interval=1, dtstart=floored, count=32))\n elif period == \"1week\":\n floored = datetime.datetime(year=start.year, month=start.month, day=1)\n # List of daily datetimes\n times = list(rrule(WEEKLY, interval=1, dtstart=floored, count=5))\n elif period == \"1month\":\n floored = datetime.datetime(year=start.year, month=1, day=1)\n # List of monthly datetimes\n times = list(rrule(MONTHLY, interval=1, dtstart=floored, count=13))\n\n return times[bisect.bisect_left(times, start) - 1]", "def round_time(dt=None, date_delta=datetime.timedelta(minutes=1), to='closest'):\n round_to = date_delta.total_seconds()\n\n if dt is None:\n dt = datetime.datetime.utcnow()\n\n seconds = (dt - dt.min).seconds\n\n if to == 'up':\n rounding = (seconds + round_to) // round_to * round_to\n elif to == 'down':\n rounding = seconds // round_to * round_to\n elif to == 'closest':\n rounding = (seconds + round_to / 2) // round_to * round_to\n else:\n raise ValueError(\n 'Expected `to` to be one of: up, down, closest')\n\n return dt + datetime.timedelta(\n seconds=rounding - seconds,\n microseconds=-dt.microsecond)", "def time_interval( self ):\n begin = self.begin; end = self.end\n if end - begin < 600*self.hour_switch:\n return 600\n if end - begin < 86400*self.day_switch:\n return 3600\n elif end - begin < 86400*7*self.week_switch:\n return 86400\n else:\n return 86400*7", "def intervals(start, end, delta):\n intervals = []\n current = copy.deepcopy(start)\n while current < end:\n intervals.append((unix_to_iso(current.strftime('%s')),\n unix_to_iso((current + delta).strftime('%s'))))\n current += delta\n return intervals", "def get_interval(self, start_time):\n end_time = start_time + self.interval\n return start_time, end_time", "def get_interval(self, start_time):\n end_time = start_time + self.interval\n return start_time, end_time", "def range(self):\n\n return time_stat(self, stat=\"range\")", "def closest_half(iso_datetime):\n d_time = datetime.fromisoformat(iso_datetime)\n approx = round(d_time.minute / 30.0) * 30\n d_time = d_time.replace(minute=0)\n d_time += timedelta(seconds=approx * 60)\n d_time = d_time.replace(second=0)\n return d_time.isoformat()", "def _interval(cls,best,lo,hi):\n return ugali.utils.stats.interval(best,lo,hi)", "def gen_start_end_times(start_time=[6, 0, 0], end_time=[23, 0, 0]):\n\n now = datetime.now()\n year = now.year\n month = now.month\n day = now.day\n\n start_time = datetime(\n year, month, day, start_time[0], start_time[1], start_time[2], 0\n )\n\n end_time = datetime(year, month, day, end_time[0], end_time[1], end_time[2], 0)\n\n if end_time < now:\n end_time += timedelta(days=1)\n start_time += timedelta(days=1)\n\n return start_time, end_time", "def range():\n\n # Time this functions.\n timer = coils.Timer()\n\n # Parse the URL parameter \"amount\".\n errors = list()\n try:\n amount = flask.request.args.get('amount')\n amount = float(amount)\n except:\n errors.append('Failed to parse \"amount\" parameter.')\n\n # Bail on any errors.\n if errors:\n return flask.jsonify(errors=errors)\n\n\n latest_tstring = db.session.query(mapping.Datum).\\\n filter(mapping.Datum.name=='latest_tstamp')[0].value\n latest_time = coils.string2time(latest_tstring)\n start_time = latest_time - dt.timedelta(seconds=amount)\n start_tstring = getNearestTime(start_time)\n \n return flask.jsonify(\n begin_time=start_tstring,\n end_time=latest_tstring,\n )", "def srt_segment_to_range(item):\n start_segment = item.start.hours * 60 * 60 + item.start.minutes * \\\n 60 + item.start.seconds + item.start.milliseconds / 1000.0\n end_segment = item.end.hours * 60 * 60 + item.end.minutes * \\\n 60 + item.end.seconds + item.end.milliseconds / 1000.0\n return start_segment, end_segment", "def remap_interval(val, input_interval_start, input_interval_end, output_interval_start, output_interval_end):\n convert = float (val - input_interval_start) / float ( input_interval_end - input_interval_start)\n output1 = output_interval_start + convert* (output_interval_end - output_interval_start)\n return output1" ]
[ "0.6122795", "0.58377707", "0.5655673", "0.5626476", "0.5443188", "0.5438394", "0.5426317", "0.5354083", "0.5349516", "0.53376406", "0.53324294", "0.5328962", "0.5262824", "0.52342826", "0.52287924", "0.5211007", "0.52071995", "0.5177311", "0.5176398", "0.51655763", "0.51580006", "0.5131383", "0.5131383", "0.5130096", "0.5123526", "0.5094994", "0.5090203", "0.50871694", "0.5084897", "0.50707436" ]
0.6247458
0
Helper to determine whether to use es6 or require imports based on file context
def detect_import(self): if self.contains_match(CONTAINS_IMPORT): self.es6import = True elif self.contains_match(CONTAINS_REQUIRE): self.es6import = False else: self.es6import = self.get_project_pref('detect_prefer_imports')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_Chep_2_Conditionalized_Import_Behavior_InlineImport():\n template = '''\n #def funky(s)\n #try\n #import os.path\n #except ImportError\n #pass\n #end try\n #return os.path.join('foo', $s)\n #end def\n '''\n template = compile_to_class(\n template, settings={'useLegacyImportMode': False},\n )\n template = template()\n rc = template.funky('abcdef')\n assert rc == 'foo/abcdef'", "def supports_ordinary_make_module_imports(self):\n return True", "def test_imports():\n from .context import readersender # noqa: F401", "def _import_module(self, name):\r\n try:\r\n __import__(name)\r\n return True\r\n except ImportError:\r\n return False", "def can_import(name):\n try:\n __import__(name)\n return True\n except ImportError:\n return False", "def consider_env(self): \n for spec in self._envlist(\"PYLIB\"):\n self.import_module(spec)", "def is_import(self):\n return self.sh_info is None and (self.binding == 'STB_GLOBAL' or \\\n self.binding == 'STB_WEAK' or \\\n self.binding == 'STT_FUNC')", "def _c_optimizations_available(module_name):\n import importlib\n catch = () if _c_optimizations_required() else (ImportError,)\n try:\n return importlib.import_module('BTrees._' + module_name)\n except catch: # pragma: no cover\n return False", "def imported(module):\n try:\n if module not in sys.modules:\n __import__(module)\n return 'enabled'\n except:\n return '-'", "def is_imported():\n return len(inspect.stack()) > 3", "def importer(name) -> ContextType:\n try:\n # try importing as a module (using importlib from standard import mechanism)\n return __import__(name, globals=globals(), locals=locals())\n except:\n route_steps = name.split(\".\")\n route_steps = route_steps[1:] if not route_steps[0] else route_steps\n is_name_module, is_name_package = is_module(name), is_package(name)\n assert is_name_module or is_name_package\n file_path = os.path.join(*route_steps)\n if is_name_module:\n file_path = f\"{file_path}.py\"\n else: # name is definitely a package (because of the assertion)\n file_path = os.path.join(file_path, \"__init__.py\")\n spec = importlib.util.spec_from_file_location(name, file_path)\n foo = importlib.util.module_from_spec(spec)\n spec.loader.exec_module(foo)\n return foo", "def _redefines_import(node: nodes.AssignName) -> bool:\n current = node\n while current and not isinstance(current.parent, nodes.ExceptHandler):\n current = current.parent\n if not current or not utils.error_of_type(current.parent, ImportError):\n return False\n try_block = current.parent.parent\n for import_node in try_block.nodes_of_class((nodes.ImportFrom, nodes.Import)):\n for name, alias in import_node.names:\n if alias:\n if alias == node.name:\n return True\n elif name == node.name:\n return True\n return False", "def is_builtins(self) -> bool:\n return self.source.startswith(self.builtins_import_string)", "def test_imports():\n import sys\n import src\n assert 'sklearn.feature_extraction' not in sys.modules.keys()", "def imports(self):\n line = self.line.strip()\n if line.startswith('im'):\n if line.startswith('import') is False:\n return True\n elif line == '':\n return True", "def is_import_completion(self):\n current_line = self.get_current_line()\n\n # Seperate cases! More difficult than I thought\n match = re.match(r\"(import)|(from)\", current_line)\n if match:\n word_before = self.get_word_before()\n if word_before == \"from\" or word_before == \"import\":\n # Need to check for multiple imports! (TODO)\n return True\n\n return False", "def module_imported(module_name):\n return sys.modules.get(module_name) is not None", "def test_xchemOT_imported():\n assert \"xchemOT\" in sys.modules", "def require():", "def available_importer(**kwargs):\n return LazyImportTester(\"site\", **kwargs)", "def get_import_context(node):\n name = node.name\n seen = set()\n while node.parent and node.parent.parent:\n node = node.parent\n if node in seen or (node.parent and node.parent.name == name):\n break\n seen.add(node)\n\n # Should never fail because we take the full_path of the parent. And as the parent imports this child\n # there should at least be one number in the array\n return node.is_imported_from[node.parent.full_path][0]", "def is_third_party(self) -> bool:\n for third_party_import_string in self.third_party_import_strings:\n if self.source.startswith(third_party_import_string):\n return True\n\n return False", "def is_import_from_completion(self):\n\n current_line = self.get_current_line()\n\n match = re.match(r\"from .* import\", current_line)\n if match and self.get_word() != \"import\":\n return True\n\n return False", "def test_imports_on_global_namespace_without_path(Script):\n completions = Script(\"import operator\").completions()\n assert [c.name for c in completions] == ['operator']\n completions = Script(\"import operator\", path='example.py').completions()\n assert [c.name for c in completions] == ['operator']\n\n # the first one has a path the second doesn't\n completions = Script(\"import keyword\", path='example.py').completions()\n assert [c.name for c in completions] == ['keyword']\n completions = Script(\"import keyword\").completions()\n assert [c.name for c in completions] == ['keyword']", "def resolve_import(self, item):\n name = item.name\n # The last part in `from a.b.c import d` might be a symbol rather than a\n # module, so we try a.b.c and a.b.c.d as names.\n short_name = None\n if item.is_from and not item.is_star:\n if '.' in name.lstrip('.'):\n # The name is something like `a.b.c`, so strip off `.c`.\n rindex = name.rfind('.')\n else:\n # The name is something like `..c`, so strip off just `c`.\n rindex = name.rfind('.') + 1\n short_name = name[:rindex]\n\n if import_finder.is_builtin(name):\n filename = name + '.so'\n return Builtin(filename, name)\n\n filename, level = convert_to_path(name)\n if level:\n # This is a relative import; we need to resolve the filename\n # relative to the importing file path.\n filename = os.path.normpath(\n os.path.join(self.current_directory, filename))\n\n if not short_name:\n try_filename = True\n try_short_filename = False\n elif item.source:\n # If the import has a source path, we can use it to eliminate\n # filenames that don't match.\n source_filename, _ = os.path.splitext(item.source)\n dirname, basename = os.path.split(source_filename)\n if basename == \"__init__\":\n source_filename = dirname\n try_filename = source_filename.endswith(filename)\n try_short_filename = not try_filename\n else:\n try_filename = try_short_filename = True\n\n files = []\n if try_filename:\n files.append((name, filename))\n if try_short_filename:\n short_filename = os.path.dirname(filename)\n files.append((short_name, short_filename))\n\n for module_name, path in files:\n for fs in self.fs_path:\n f = self._find_file(fs, path)\n if not f or f == self.current_module.path:\n # We cannot import a file from itself.\n continue\n if item.is_relative():\n package_name = self.current_module.package_name\n if package_name is None:\n # Relative import in non-package\n raise ImportException(name)\n module_name = get_absolute_name(package_name, module_name)\n if isinstance(self.current_module, System):\n return System(f, module_name)\n return Local(f, module_name, fs)\n\n # If the module isn't found in the explicit pythonpath, see if python\n # itself resolved it.\n if item.source:\n prefix, ext = os.path.splitext(item.source)\n mod_name = name\n # We need to check for importing a symbol here too.\n if short_name:\n mod = prefix.replace(os.path.sep, '.')\n mod = utils.strip_suffix(mod, '.__init__')\n if not mod.endswith(name) and mod.endswith(short_name):\n mod_name = short_name\n\n if ext == '.pyc':\n pyfile = prefix + '.py'\n if os.path.exists(pyfile):\n return System(pyfile, mod_name)\n elif not ext:\n pyfile = os.path.join(prefix, \"__init__.py\")\n if os.path.exists(pyfile):\n return System(pyfile, mod_name)\n return System(item.source, mod_name)\n\n raise ImportException(name)", "def test_CL13_import(): \n\tdef test(): \n\t\ttry: \n\t\t\tfrom .. import CL13 \n\t\texcept: \n\t\t\treturn False \n\t\treturn True \n\treturn [\"vice.yields.ccsne.CL13\", test]", "def try_import(import_str, default=None):\r\n try:\r\n return import_module(import_str)\r\n except ImportError:\r\n return default", "def test_taskfile_import(monkeypatch, modpath):\n monkeypatch.setattr(loadlimit.importhook, 'lstaskfiles', fake_lstaskfiles)\n monkeypatch.setattr(loadlimit.importhook, 'SourceFileLoader',\n FakeSourceFileLoader)\n\n taskfiles = ['a_{}.py'.format(i) for i in range(10)]\n names = [splitext(n)[0] for n in taskfiles]\n pypath = ['{}.{}'.format(modpath, n) for n in names]\n randpath = choice(pypath)\n\n assert modpath not in sys.modules\n assert all(not p.startswith(modpath) for p in sys.modules)\n\n sys.meta_path.append(TaskImporter(*taskfiles))\n taskfile = import_module(randpath)\n\n expected = set(pypath) | set([modpath])\n result = set(p for p in sys.modules if p.startswith(modpath))\n\n assert modpath in sys.modules\n assert result == expected\n assert taskfile.TEST == randpath", "def check_module(name):\n return importlib.util.find_spec(name) is not None", "def test_molecool_imported():\n assert \"molecool\" in sys.modules" ]
[ "0.6167472", "0.6017514", "0.56490624", "0.5636177", "0.55596316", "0.54430085", "0.541212", "0.5303518", "0.5297958", "0.52604955", "0.52583086", "0.5240708", "0.5227387", "0.51878864", "0.51798165", "0.5090028", "0.50892895", "0.5072239", "0.50425524", "0.5027445", "0.49965057", "0.49941045", "0.49823168", "0.4975951", "0.49718788", "0.4967388", "0.49309298", "0.49046454", "0.4873793", "0.48710656" ]
0.6984186
0
Helper to determine whether a regexp is contained in the current file in sublime 3 or sublime 2
def contains_match(self, regexp): # If the regexp is not found, find will return a tuple (-1, -1) in Sublime 3 or None in Sublime 2 # https://github.com/SublimeTextIssues/Core/issues/534 contains_import = self.view.find(regexp, 0) return contains_import.size() > 0 if float(sublime.version()) >= 3000 else contains_import is not None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_cpp(filename: Path) -> bool:\n from fnmatch import fnmatch\n\n return any(fnmatch(os.path.basename(filename), p) for p in CPP_PATTERNS)", "def found(self, command, regex):\n result = self.sys(command)\n for line in result:\n found = re.search(regex,line)\n if found:\n return True\n return False", "def found(self, command, regex):\n result = self.sys(command)\n for line in result:\n found = re.search(regex,line)\n if found:\n return True\n return False", "def file_contains_pattern(file, pattern):\r\n if not os.path.isfile(file):\r\n raise NameError('file %s does not exist' % file)\r\n return not utils.system('egrep -q \"' + pattern + '\" ' + file,\r\n ignore_status=True)", "def _check_regex_match(file_path, search_regex):\n with file_path.open(\"rb\") as file_obj:\n file_bytes = file_obj.read()\n content = None\n for encoding in TREE_ENCODINGS:\n try:\n content = file_bytes.decode(encoding)\n break\n except UnicodeDecodeError:\n continue\n if not search_regex.search(content) is None:\n return True\n return False", "def match(self, regexp):\n try:\n self.rematch = regexp.match(self.matchstring)\n except AttributeError:\n self.rematch = re.match(regexp, self.matchstring)\n return bool(self.rematch)", "def is_js_file(fname):\r\n return REJS.search(fname) and \\\r\n TEST_INDICATOR not in fname", "def regexp(expr, item):\n reg = re.compile(expr)\n return reg.search(item) is not None", "def contains_code(notebook, regex_list):\n source = code_cells(notebook)\n for cell_source in source:\n for line in cell_source:\n # Ignore comments\n if line.startswith('#'):\n continue\n # if the line contains any of the regexes, return True\n for regex in regex_list:\n if re.search(regex, line, re.IGNORECASE):\n return True\n return False", "def file_check(pattern, file_to_check):\n if file_to_check.name.__contains__(pattern):\n yield True", "def test_match_regexp_including_start():\r\n runmatch(lcode)", "def search(self, regexp):\n try:\n self.rematch = regexp.search(self.matchstring)\n except AttributeError:\n self.rematch = re.search(regexp, self.matchstring)\n return bool(self.rematch)", "def _match_incl_regexp(self, rel_path):\n\n for neg_regexp in self.include_regexps:\n if neg_regexp.search(rel_path) is not None:\n self.logger.debug(\"The same path %s matches the include\"\n \" regexp %s.\" % (rel_path,\n neg_regexp.pattern))\n return True\n\n return False", "def _is_regex_match(s, pat):\n\n pat = pat.rstrip()\n m = re.search(Settings._REPAT, pat)\n if m:\n flags_combined = 0\n if m.group('flag'):\n char_to_flag = {\n 'A':re.A, 'I':re.I, 'L':re.L, 'M':re.M, 'S':re.S, 'X':re.X}\n for flag in list(m.group('flag')):\n flags_combined |= char_to_flag[flag]\n return bool(re.search(m.group('pat'), s, flags_combined))\n raise InvalidRegexError(pat)", "def _check_regex():\n try:\n import regex\n\n return regex\n except ImportError:\n raise ImportError(\n '\"regex\" is not installed. Please install \"regex\" -> \"pip install regex\"'\n )", "def match_file(patterns, file):\n\tmatched = False\n\tfor pattern in patterns:\n\t\tif pattern.include is not None:\n\t\t\tif file in pattern.match((file,)):\n\t\t\t\tmatched = pattern.include\n\treturn matched", "def isrst(filename):\n return filename[-4:] == '.rst'", "def _gitline_to_regexp(self, line):\n negation = False # if True, inverse the pattern\n\n # Remove the dirty characters like spaces at the beginning\n # or at the end, carriage returns, etc.\n line = line.strip()\n\n # A blank line matches no files, so it can serve as a\n # separator for readability.\n if line == '':\n return\n\n # A line starting with # serves as a comment.\n if line.startswith('#'):\n return\n\n # An optional prefix ! which negates the pattern; any\n # matching file excluded by a previous pattern will become\n # included again. If a negated pattern matches, this will\n # override\n if line.startswith('!'):\n line = line[1:]\n negation = True\n\n # If the pattern does not contain a slash /, git treats it\n # as a shell glob pattern and checks for a match against\n # the pathname relative to the location of the .gitignore\n # file (relative to the toplevel of the work tree if not\n # from a .gitignore file).\n\n # Otherwise, git treats the pattern as a shell glob\n # suitable for consumption by fnmatch(3) with the\n # FNM_PATHNAME flag: wildcards in the pattern will not\n # match a / in the pathname. For example,\n # \"Documentation/*.html\" matches \"Documentation/git.html\"\n # but not \"Documentation/ppc/ppc.html\" or\n # \"tools/perf/Documentation/perf.html\".\n regex = fnmatch.translate(line)\n regex = regex.replace('\\\\Z(?ms)', '')\n\n if not negation:\n regex = '.*%s.*' % regex\n\n return regex", "def _gitline_to_regexp(self, line):\n negation = False # if True, inverse the pattern\n\n # Remove the dirty characters like spaces at the beginning\n # or at the end, carriage returns, etc.\n line = line.strip()\n\n # A blank line matches no files, so it can serve as a\n # separator for readability.\n if line == '':\n return\n\n # A line starting with # serves as a comment.\n if line.startswith('#'):\n return\n\n # An optional prefix ! which negates the pattern; any\n # matching file excluded by a previous pattern will become\n # included again. If a negated pattern matches, this will\n # override\n if line.startswith('!'):\n line = line[1:]\n negation = True\n\n # If the pattern does not contain a slash /, git treats it\n # as a shell glob pattern and checks for a match against\n # the pathname relative to the location of the .gitignore\n # file (relative to the toplevel of the work tree if not\n # from a .gitignore file).\n\n # Otherwise, git treats the pattern as a shell glob\n # suitable for consumption by fnmatch(3) with the\n # FNM_PATHNAME flag: wildcards in the pattern will not\n # match a / in the pathname. For example,\n # \"Documentation/*.html\" matches \"Documentation/git.html\"\n # but not \"Documentation/ppc/ppc.html\" or\n # \"tools/perf/Documentation/perf.html\".\n regex = fnmatch.translate(line)\n regex = regex.replace('\\\\Z(?ms)', '')\n\n if not negation:\n regex = '.*%s.*' % regex\n\n return regex", "def can_make(path: str) -> bool:\n for (regexp, _) in Step.by_regexp:\n if re.fullmatch(regexp, path):\n return True\n\n return False", "def should_format(\n filename: Path, include_patterns: Iterable[str], exclude_patterns: Iterable[str]\n) -> Tuple[bool, str]:\n from fnmatch import fnmatch\n\n if any(fnmatch(os.path.abspath(filename), pattern) for pattern in exclude_patterns):\n return False, \"Excluded file\"\n\n filename_no_ext, ext = os.path.splitext(filename)\n # ignore .py file that has a jupytext configured notebook with the same base name\n ipynb_filename = filename_no_ext + \".ipynb\"\n if ext == \".py\" and os.path.isfile(ipynb_filename):\n with open(ipynb_filename, \"rb\") as f:\n if b\"jupytext\" not in f.read():\n return True, \"\"\n with open(filename, \"rb\") as f:\n if b\"jupytext:\" not in f.read():\n return True, \"\"\n return False, \"Jupytext generated file\"\n\n if any(fnmatch(os.path.basename(filename), pattern) for pattern in include_patterns):\n return True, \"\"\n\n return False, \"Unknown file type\"", "def is_min(filename):\r\n return re.search(\"min.js$\", filename)", "def validate_string_search(self, pattern, file):\r\n try:\r\n file_open = open(file, 'r')\r\n except:\r\n logging.info(\"file not found\")\r\n return -1\r\n file_data = file_open.read()\r\n ret_out = re.search(pattern, file_data)\r\n if ret_out:\r\n return True, ret_out\r\n else:\r\n return False, ret_out", "def isRegexPossible(self):\n if self._lastToken is None:\n # No token has been produced yet: at the start of the input,\n # no division is possible, so a regex literal _is_ possible.\n return True\n\n if self._lastToken.type == ECMAScriptLexer.Identifier or \\\n self._lastToken.type == ECMAScriptLexer.NullLiteral or \\\n self._lastToken.type == ECMAScriptLexer.BooleanLiteral or \\\n self._lastToken.type == ECMAScriptLexer.This or \\\n self._lastToken.type == ECMAScriptLexer.CloseBracket or \\\n self._lastToken.type == ECMAScriptLexer.CloseParen or \\\n self._lastToken.type == ECMAScriptLexer.OctalIntegerLiteral or \\\n self._lastToken.type == ECMAScriptLexer.DecimalLiteral or \\\n self._lastToken.type == ECMAScriptLexer.HexIntegerLiteral or \\\n self._lastToken.type == ECMAScriptLexer.StringLiteral or \\\n self._lastToken.type == ECMAScriptLexer.PlusPlus or \\\n self._lastToken.type == ECMAScriptLexer.MinusMinus:\n # After any of the tokens above, no regex literal can follow.\n return False\n else:\n # In all other cases, a regex literal _is_ possible.\n return True", "def is_input(line):\n #tex_input_re = r\"\"\"^\\s*\\\\input{[^}]*}\"\"\" # input only\n tex_input_re = r\"\"\"(^[^\\%]*\\\\input{[^}]*})|(^[^\\%]*\\\\include{[^}]*})\"\"\" # input or include\n return re.search(tex_input_re, line)", "def validate_string_match(self, pattern, file):\r\n try:\r\n file_open = open(file, 'r')\r\n except:\r\n logging.info(\"file not found\")\r\n return -1\r\n file_data = file_open.read()\r\n ret_out = re.match(pattern, file_data)\r\n if ret_out:\r\n return True, ret_out\r\n else:\r\n return False, ret_out", "def stdout_is_regular_file() -> bool:\n mode = os.fstat(sys.stdout.buffer.fileno()).st_mode\n return stat.S_ISREG(mode)", "def looks_like_a_filename(kernel_source):\n result = False\n if isinstance(kernel_source, str):\n result = True\n #test if not too long\n if len(kernel_source) > 250:\n result = False\n #test if not contains special characters\n for c in \"();{}\\\\\":\n if c in kernel_source:\n result = False\n #just a safeguard for stuff that looks like code\n for s in [\"__global__ \", \"__kernel \", \"void \", \"float \"]:\n if s in kernel_source:\n result = False\n #string must contain substring \".c\", \".opencl\", or \".F\"\n result = result and any([s in kernel_source for s in (\".c\", \".opencl\", \".F\")])\n return result", "def __check_pattern(node):\n if node.tag != \"discover_datasets\":\n return False\n if \"from_tool_provided_metadata\" in node.attrib and string_as_bool(\n node.attrib.get(\"from_tool_provided_metadata\", \"false\")\n ):\n return True\n if \"pattern\" not in node.attrib:\n return False\n pattern = node.attrib[\"pattern\"]\n regex_pattern = NAMED_PATTERNS.get(pattern, pattern)\n # TODO error on wrong pattern or non-regexp\n if \"(?P<ext>\" in regex_pattern:\n return True", "def fnmatch(pattern, filename) -> bool:\n return _fnmatch(filename, pattern)" ]
[ "0.6236844", "0.6035598", "0.6035598", "0.59960926", "0.5810656", "0.5790262", "0.57603216", "0.57455856", "0.5740435", "0.5727228", "0.5653124", "0.5651493", "0.5648111", "0.5636862", "0.5629547", "0.5627501", "0.56141937", "0.56075996", "0.56075996", "0.55625325", "0.5550482", "0.55402696", "0.5517392", "0.54846823", "0.54699934", "0.5453762", "0.5445301", "0.5409859", "0.5400009", "0.5385532" ]
0.77203137
0
Return arguments for insert snippet command.
def get_args(self): return { 'contents': self.get_formatted_code() }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def help_insert(self):\n print(INSERT)", "def args(self) -> typing.Tuple[str, typing.List[str]]:\n func = inspect.stack()[1][3]\n command = func[len(self.CMD_PREFIX):]\n return ('{} {}'.format(sys.argv[0], command),\n sys.argv[2:])", "def get_args(self) -> List[str]:\n return self.content.split()[1:]", "def do_insert(self,args):\n if len(args) != 0:\n for w in args.split():\n sl.insertList(int(w.rstrip()))", "def get_start_cmd_args(self):\r\n return self.get_args(OSPL.start)", "def args(self):\n return self.cmd_args", "def inputs_create(self):\n inputs = []\n for macro in self.my_xml.tool_data[self.shell_dict['short_name']]['pre_params']:\n inputs.append(self.my_xml.xml_tmpl.substitute(macro_name=macro))\n for macro in self.my_xml.tool_data[self.shell_dict['short_name']]['post_params']:\n inputs.append(self.my_xml.xml_tmpl.substitute(macro_name=macro))\n\n return '\\n'.join(inputs)", "def getPositionalArgs():", "def _get_run_script_args(self):\n raise NotImplementedError", "def add_arguments(self, parser):", "def get_cli_arguments(self):\n pass", "def signature(self) -> global___SnippetSignature:", "def args(self):\n if not self.__args_updated:\n for inc in self.include_templates:\n self.__args.update(inc.args)\n self.__args_updated = True\n return self.__args", "def args_str(self):", "def add_arguments(cls):\n return [\n (('--yes',), dict(action='store_true', help='clean .git repo')),\n (('--variable', '-s'),\n dict(nargs='+', help='set extra variable,format is name:value')),\n (('--skip-builtin',),\n dict(action='store_true', help='skip replace builtin variable')),\n\n (('--dir',), dict(nargs='?', default=os.getcwd(),\n help='set working directory')),\n (('--debug',), dict(action='store_true', help='open debug mode')),\n (('--dry-run',), dict(action='store_true',\n help='print command instead execute it')),\n (('--verbose', '-v'), dict(action='count')),\n ]", "def parse_command_line_args(self) -> None:\n self.parser.add_argument(\n \"-i\",\n \"--input\",\n help=\"(str) [default: .] The relative folder path with the csv files\",\n default=getcwd()\n )\n self.parser.add_argument(\n \"-o\",\n \"--output\",\n help=\"(str) [default: .] The folder path for saving the *.alfredsnippets files\",\n default=getcwd()\n )\n self.parser.add_argument(\n \"-f\",\n \"--fieldorder\",\n help=\"(str) [default: 'abbreviation, content, name'] A comma separated list for the order of the fields \"\n \"of the csv files\",\n default=\"abbreviation, content, name\"\n )\n self.parser.add_argument(\n \"-d\",\n \"--deletefolders\",\n help=\"(bool) [default=False] Delete the folders that contains the json files\",\n type=self.str2bool,\n nargs='?',\n const=True,\n default=False\n )\n self.parser.add_argument(\n \"-l\",\n \"--lplaceholder\",\n help=\"(str) [default: %] The left side placeholder for the embedded snippets.\",\n default=\"%\"\n )\n self.parser.add_argument(\n \"-r\",\n \"--rplaceholder\",\n help=\"(str) [default: %] The right side placeholder for the embedded snippets.\",\n default=\"%\"\n )\n\n self.parser.add_argument(\n \"-c\",\n \"--changeplaceholders\",\n help=\"(bool) [default=True] Set to false if the placeholder shouldn't get changed at all\",\n type=self.str2bool,\n nargs='?',\n const=True,\n default=True\n )\n\n self.args = self.parser.parse_args()", "def insert_statement() -> str:\n pass", "def help_args():\n pass", "def args(self) -> List[str]:\n return self.__args", "def parse_args():\n\n parser = argparse.ArgumentParser(prog='pyim-split')\n\n parser.add_argument('--insertions', type=Path, required=True)\n parser.add_argument('--output_dir', type=Path, required=True)\n\n parser.add_argument('--samples', nargs='+', required=False, default=None)\n parser.add_argument('--remove_prefix', default=False, action='store_true')\n\n return parser.parse_args()", "def Args(parser):", "def get_command_args(self, skip_serialized_namedtuple: bool = False) -> Sequence[str]:\n return (\n _get_entry_point(self.job_origin)\n + [\"api\", \"execute_step\"]\n + (\n [\"--compressed-input-json\", self._get_compressed_args()]\n if not skip_serialized_namedtuple\n else []\n )\n )", "def args(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"args\")", "def args(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"args\")", "def args(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"args\")", "def args(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"args\")", "def args(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"args\")", "def args(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"args\")", "def args(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"args\")", "def args(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"args\")" ]
[ "0.5862157", "0.5763", "0.5747862", "0.5689465", "0.55466807", "0.5543572", "0.5507813", "0.54482096", "0.5418385", "0.54103833", "0.5397669", "0.5368619", "0.53491396", "0.532232", "0.53106725", "0.52695787", "0.5247165", "0.5244827", "0.5234803", "0.52134436", "0.51947635", "0.51690984", "0.5151727", "0.5151727", "0.5151727", "0.5151727", "0.5151727", "0.5151727", "0.5151727", "0.5151727" ]
0.6094225
0
Get type of quotes to use.
def get_quotes(self): # However ignore the 'true' autodetection setting. jscs_quotes = self.jscs_options.get('validateQuoteMarks') if isinstance(jscs_quotes, dict): jscs_quotes = jscs_quotes.get('mark') if jscs_quotes and jscs_quotes is not True: return jscs_quotes # Use whatever quote type is set in preferences return get_quotes()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_type(self):\n if self.data[\"is_script\"]:\n return self.data[\"script\"]\n elif self.data[\"is_qt\"]:\n return \"qt\"\n else:\n return \"normal\"", "def get_type(self) -> str:\n # Note: this name conflicts with existing python builtins\n return self[\"Type\"]", "def get_type(self) -> str:\n return self.row_dict['type']", "def get_magic_quotes_runtime():\n raise NotImplementedError()", "def type(self) -> str:\n return pulumi.get(self, \"type\")", "def type(self) -> str:\n return pulumi.get(self, \"type\")", "def type(self) -> str:\n return pulumi.get(self, \"type\")", "def type(self) -> str:\n return pulumi.get(self, \"type\")", "def type(self) -> str:\n return pulumi.get(self, \"type\")", "def type(self) -> str:\n return pulumi.get(self, \"type\")", "def type(self) -> str:\n return pulumi.get(self, \"type\")", "def type(self) -> str:\n return pulumi.get(self, \"type\")", "def type(self) -> str:\n return pulumi.get(self, \"type\")", "def type(self) -> str:\n return pulumi.get(self, \"type\")", "def type(self) -> str:\n return pulumi.get(self, \"type\")", "def type(self) -> str:\n return pulumi.get(self, \"type\")", "def type(self) -> str:\n return pulumi.get(self, \"type\")", "def type(self) -> str:\n return pulumi.get(self, \"type\")", "def type(self) -> str:\n return pulumi.get(self, \"type\")", "def type(self) -> str:\n return pulumi.get(self, \"type\")", "def type(self) -> str:\n return pulumi.get(self, \"type\")", "def type(self) -> str:\n return pulumi.get(self, \"type\")", "def type(self) -> str:\n return pulumi.get(self, \"type\")", "def type(self) -> str:\n return pulumi.get(self, \"type\")", "def type(self) -> str:\n return pulumi.get(self, \"type\")", "def type(self) -> str:\n return pulumi.get(self, \"type\")", "def type(self) -> str:\n return pulumi.get(self, \"type\")", "def type(self) -> str:\n return pulumi.get(self, \"type\")", "def type(self) -> str:\n return pulumi.get(self, \"type\")", "def type(self) -> str:\n return pulumi.get(self, \"type\")" ]
[ "0.6076279", "0.60122496", "0.5968294", "0.59570783", "0.5889422", "0.5889422", "0.5889422", "0.5889422", "0.5889422", "0.5889422", "0.5889422", "0.5889422", "0.5889422", "0.5889422", "0.5889422", "0.5889422", "0.5889422", "0.5889422", "0.5889422", "0.5889422", "0.5889422", "0.5889422", "0.5889422", "0.5889422", "0.5889422", "0.5889422", "0.5889422", "0.5889422", "0.5889422", "0.5889422" ]
0.68239087
0
Parses the disallowSpace{After,Before}BinaryOperators jscs options and checks if spaces are not allowed before or after an `=` so we know if we should strip those from the var statement.
def should_strip_setter_whitespace(self): def parse_jscs_option(val): if type(val) == bool: return val if isinstance(val, list) and '=' in val: return True return False return dict( before=parse_jscs_option( self.jscs_options.get('disallowSpaceBeforeBinaryOperators')), after=parse_jscs_option( self.jscs_options.get('disallowSpaceAfterBinaryOperators')) )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _ExpectSpaceBeforeOperator(self, token):\n if token.string == ',' or token.metadata.IsUnaryPostOperator():\n return False\n\n # Colons should appear in labels, object literals, the case of a switch\n # statement, and ternary operator. Only want a space in the case of the\n # ternary operator.\n if (token.string == ':' and\n token.metadata.context.type in (Context.LITERAL_ELEMENT,\n Context.CASE_BLOCK,\n Context.STATEMENT)):\n return False\n\n if token.metadata.IsUnaryOperator() and token.IsFirstInLine():\n return False\n\n return True", "def test_sqpp_oddly_capped_operators(self):\n self.assertEqual(self.parser.parse_query('foo oR bar'),\n ['+', 'foo', '|', 'bar'])", "def preprocessConditions(conditions):\n conditions = re.sub(r'&+', ' and ', conditions)\n conditions = re.sub(r'\\|+', ' or ', conditions)\n conditions = re.sub(r'==+', '=', conditions)\n conditions = re.sub(r'(?<![<>])=', '==', conditions)\n conditions = \"lambda x, y, z=0: any([ \" + conditions + \" ])\"\n return conditions", "def replace_operators(self, instr):\n # change ++, -- to add(1), sub(1)\n instr = re.sub(r\"\\+\\+\", \".add(1)\", instr)\n instr = re.sub(r\"--\", \".sub(1)\", instr)\n\n m1 = re.search(r\"[+\\-*/]=\", instr)\n result = \"\"\n if m1:\n # handle the string with +=, -=, *=. /=\n v = instr[: m1.start()].rstrip(\" \")\n v1 = v.strip(\" \")\n expressions = [v1, m1.group()[: 1], \"(\", instr[m1.end():].strip().strip(\";\"), \");\"]\n instr = v + \"= \" + \" \".join(expressions)\n\n # split by !, &&, ||\n equations = re.split(r\"(!|&&|\\|\\||)\", instr)\n for equation in equations:\n # split by <=, >=, ==, !=, =\n expressions = re.split(r\"([<>=!]*=)\", equation)\n if len(expressions) == 1:\n result += equation\n else:\n for expression in expressions:\n if re.search(r\"[+\\-*/]\", expression):\n # with math operators\n # 0.exclude ;\n rc = \"\"\n pos = expression.find(';')\n if pos != -1:\n rc = expression[pos:]\n expression = expression[:pos]\n\n # 1.exclude independent ( or )\n lbc = expression.count(\"(\")\n rbc = expression.count(\")\")\n lc = \"\"\n if lbc > rbc:\n # ( is more than )\n pos = expression.replace('(', 'X', lbc - rbc - 1).find('(')\n lc = expression[: pos + 1]\n expression = expression[pos + 1:]\n else:\n if lbc < rbc:\n # ( is less than )\n pos = 'X'.join(expression.rsplit(')', rbc - lbc - 1)).rfind(')')\n rc = expression[pos:] + rc\n expression = expression[:pos]\n\n # 2.change normal notation to RPN, in order to change math operators to SafeMath operators\n # 3.change RPN to normal notation\n result += lc + self.rpn_to_nn(self.nn_to_rpn(expression)) + rc\n else:\n result += expression\n\n return result", "def remove_code_punc(code):\n sec = code\n together = set([\"==\", \"&&\", \"<>\", \"||\"])\n spacing = set([\"+\", \"-\", \"*\", \"/\", \"!\", \"^\"])\n exclude = set([\"=\", \"|\", \"&\", \"[\", \"]\", \"\\r\", \"\\n\", \"(\", \")\", \"{\", \"}\", \":\", \",\", \";\", \".\", '\"', \"'\", \">\", \"<\", \"#\", \"%\", \"$\", \"~\", \"\\\\\", \"?\"])\n new_sec = \"\"\n i = 0\n while i < len(sec):\n try:\n if sec[i:i + 1] in together:\n new_sec += \" \" + sec[i:i+1] + \" \"\n i += 2\n continue\n except:\n print \"last\"\n if sec[i] in exclude:\n new_sec += \" \"\n elif sec[i] in spacing:\n new_sec += \" \" + sec[i] + \" \"\n else:\n new_sec += sec[i]\n i += 1\n new_sec = new_sec.replace(\" \", \" \")\n return new_sec", "def test_whitespaceStripFlagsAndParameters(self):\n # We test this by making sure aflag and it's help string are on the\n # same line.\n lines = [s for s in str(self.nice).splitlines() if s.find(\"aflag\")>=0]\n self.failUnless(len(lines) > 0)\n self.failUnless(lines[0].find(\"flagallicious\") >= 0)", "def test_preserved_whitespace_in_pre_and_textarea(self):\n self.assertSoupEquals(\"<pre> </pre>\")\n self.assertSoupEquals(\"<textarea> woo </textarea>\")", "def validateOperator(self, tokens):\n return tokens", "def skipWhiteSpace(self):\n pass", "def test_sqpp_paren_expr1_not_expr2_or_quoted_string_not_expr3_or_expr4WORDS(self):\n self.assertEqual(self.parser.parse_query('(expr1) not expr2 | \"expressions not in and quotes | (are) not - parsed \" - (expr3) or expr4'),\n ['+', 'expr1', '-', 'expr2', '|', '\"expressions not in and quotes | (are) not - parsed \"', '-', 'expr3', '|', 'expr4'])\n #['+', '+ \"expressions not in and quotes | (are) not - parsed \" | expr1 | expr4',\n # '+', '- expr3 | expr1 | expr4',\n # '+', '+ \"expressions not in and quotes | (are) not - parsed \" - expr2 | expr4',\n # '+', '- expr3 - expr2 | expr4'])", "def check_no_whitespace(args):\n for arg in args:\n for char in arg:\n if char in string.whitespace:\n raise RuntimeError(\"No whitespace characters are currently allowed in input arguments. Replace spaces in file and folder names with underscores ('_').\")\n return", "def test_str_not_equal_str(self):\n # compact version\n assert_that(Condition.is_valid(\n 'not \"{{ env.BRANCH_NAME }}\" == \"master\"'), equal_to(True))\n # more spaces around are allowed\n assert_that(Condition.is_valid(\n ' not \"{{ env.BRANCH_NAME }}\" == \"master\" '), equal_to(True))\n # compact version\n assert_that(Condition.is_valid(\n 'not \"{{ variables.BRANCH_NAME }}\" == \"master\"'), equal_to(True))\n # more spaces around are allowed\n assert_that(Condition.is_valid(\n ' not \"{{ variables.BRANCH_NAME }}\" == \"master\" '), equal_to(True))", "def __sub_comparison_ops(file_contents: str) -> str:\n\n return re.sub(r'(?:IS\\s+)?EQUALS?(?:\\s+TO)?', '=', file_contents)", "def test_str_eq_str(self):\n # compact version (env variables)\n assert_that(Condition.is_valid(\n '\"{{ env.BRANCH_NAME }}\" == \"master\"'), equal_to(True))\n # more spaces around are allowed (env variables)\n assert_that(Condition.is_valid(\n ' \"{{ env.BRANCH_NAME }}\" == \"master\" '), equal_to(True))\n # compact version (tasks variables)\n assert_that(Condition.is_valid(\n '\"{{ variables.cpu_count }}\" == \"6\"'), equal_to(True))\n # more spaces around are allowed (tasks variables)\n assert_that(Condition.is_valid(\n ' \"{{ variables.cpu_count }}\" == \"6\" '), equal_to(True))", "def test_sqpp_paren_expr1_not_expr2_and_paren_expr3_or_expr4_WORDS(self):\n self.assertEqual(self.parser.parse_query('(expr1) not expr2 and (expr3) or expr4'),\n ['+', 'expr1', '-', 'expr2', '+', 'expr3', '|', 'expr4'])\n #['+', '+ expr1 | expr4', '+', '- expr2 | expr4', '+', '+ expr3 | expr4'])", "def Remove_constants(self):\r\n\r\n i = 0\r\n while i < len(self.Code_Lines):\r\n if len(self.Code_Lines[i]) > 2:\r\n if self.Code_Lines[i][1] == \"equ\":\r\n if not self.Check_is_valid(self.Code_Lines[i][0]):\r\n return False\r\n else:\r\n for j in range(0, len(self.Code_Lines)):\r\n if j != i:\r\n if self.Code_Lines[i][0] in self.Code_Lines[j]:\r\n Index = self.Code_Lines[j].index(self.Code_Lines[i][0])\r\n for k in range(0, len(self.Code_Lines[i]) - 2):\r\n self.Code_Lines[j].insert(Index + k, self.Code_Lines[i][k + 2])\r\n self.Code_Lines[j].remove(self.Code_Lines[i][0])\r\n\r\n self.Code_Lines.remove(self.Code_Lines[i])\r\n continue\r\n i = i + 1\r\n return True", "def IgnoreLine(self, str):\n if not str.strip(): return True\n else: return str.startswith('==') or str.startswith('**')", "def test_disable_pyparsing_arity_trimming():\n import pyparsing\n import dice.utilities\n assert pyparsing._trim_arity is dice.utilities._trim_arity", "def _jsmin(self):\r\n self.theA = '\\n'\r\n self._action(3)\r\n\r\n while self.theA != '\\000':\r\n if self.theA == ' ':\r\n if isAlphanum(self.theB):\r\n self._action(1)\r\n else:\r\n self._action(2)\r\n elif self.theA == '\\n':\r\n if self.theB in ['{', '[', '(', '+', '-']:\r\n self._action(1)\r\n elif self.theB == ' ':\r\n self._action(3)\r\n else:\r\n if isAlphanum(self.theB):\r\n self._action(1)\r\n else:\r\n self._action(2)\r\n else:\r\n if self.theB == ' ':\r\n if isAlphanum(self.theA):\r\n self._action(1)\r\n else:\r\n self._action(3)\r\n elif self.theB == '\\n':\r\n if self.theA in ['}', ']', ')', '+', '-', '\"', '\\'']:\r\n self._action(1)\r\n else:\r\n if isAlphanum(self.theA):\r\n self._action(1)\r\n else:\r\n self._action(3)\r\n else:\r\n self._action(1)", "def op(self) -> Literal[\"==\"] | Literal[\"<=\"] | Literal[\">=\"]:\n ...", "def test_whitespace(self):\n self.assertRaises(ParseException, self.flag.parseString, ' ')", "def determine_if_whitespace(self):\n value = self.current.value\n\n if value == \"\\n\":\n self.is_space = True\n else:\n self.is_space = False\n if value == \"\" or regexes[\"whitespace\"].match(value):\n self.is_space = True", "def test_remove_assignment_rule(self):\n pass", "def operators_with_no_words_in_between(input_string):\n op_re1 = r'\\&|\\||AND|OR|BUT\\sNOT|NOT|\\~|\\,|NEAR\\d{1,3}|WITHIN\\d{1,3}'\n regex = re.compile('(%s)\\s*(%s)' % (op_re1, op_re1))\n if re.search(regex, input_string) is None:\n return True\n else:\n return False", "def test_remove_multiple_spaces():\n questions_parser = QuestionsParser()\n assert questions_parser.remove_multiple_spaces('Sentence with multiple spaces') == 'Sentence with multiple spaces'", "def postparsing_precmd(self, statement):\n stop = False\n return stop, statement", "def postparsing_precmd(self, statement):\n stop = False\n return stop, statement", "def _despace(statement):\n return re.sub(r' +', ' ', statement)", "def bypass_conds(self):\n for block in self.get_basic_blocks_followed_by_branches():\n constants = collect_constant_assigns(block.statements)\n branch = block.outgoing_edge[0]\n cond = deepcopy(branch.cond)\n cond = specialize_constants(cond, constants)\n try:\n if eval(astor.to_source(cond), silica.operators):\n # FIXME: Interface violation, need a remove method from blocks\n branch.true_edge.incoming_edges.add((block, \"\"))\n block.outgoing_edges = {(branch.true_edge, \"\")}\n else:\n branch.false_edge.incoming_edges.add((block, \"\"))\n block.outgoing_edges = {(branch.false_edge, \"\")}\n branch.incoming_edges.remove((block, \"\"))\n except NameError as e:\n # print(e)\n pass", "def test_blank_value(self):\n assert yaenv.core.EnvVar('key=').value == ''\n assert yaenv.core.EnvVar('key=\"\"').value == ''\n assert yaenv.core.EnvVar(\"key=''\").value == ''\n assert yaenv.core.EnvVar('key= ').value == ''" ]
[ "0.5571867", "0.52223045", "0.5181726", "0.51077217", "0.50890666", "0.50884646", "0.50165695", "0.49252373", "0.4916879", "0.48278362", "0.48272413", "0.48212582", "0.48195723", "0.47902355", "0.47791567", "0.4775543", "0.47721535", "0.47455353", "0.47295842", "0.46985298", "0.46972668", "0.46837795", "0.46488047", "0.46401834", "0.46223006", "0.461961", "0.461961", "0.46097335", "0.4597468", "0.4583585" ]
0.72638625
0
Finds the sensel device, if none is detected, return None. This None should throw an error in subsequent functions.
def openSensel(): handle = None (error, device_list) = sensel.getDeviceList() if device_list.num_devices != 0: (error, handle) = sensel.openDeviceByID(device_list.devices[0].idx) return handle
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _find_device(self):\n for bus in usb.busses():\n for dev in bus.devices:\n if dev.idVendor == self.vendor_id and dev.idProduct == self.product_id:\n if self.device_id is None or dev.filename == self.device_id:\n log.info('found station on USB bus=%s device=%s' % (bus.dirname, dev.filename))\n return dev\n return None", "def finddevice():\n\n return next((device for device in [\"xpu\"] if hasattr(torch, device) and getattr(torch, device).is_available()), None)", "def find_device():\n device = usb.core.find(\n idVendor=LuxaforFlag.DEVICE_VENDOR_ID,\n idProduct=LuxaforFlag.DEVICE_PRODUCT_ID\n )\n return device", "def discover_device(devCtrl, setupPayload):\n log.info(\"Attempting to find device on network\")\n longDiscriminator = int(setupPayload.attributes['Long discriminator'])\n try:\n res = devCtrl.DiscoverCommissionableNodes(\n discovery.FilterType.LONG_DISCRIMINATOR, longDiscriminator, stopOnFirst=True, timeoutSecond=5)\n except exceptions.ChipStackError as ex:\n log.error(\"DiscoverCommissionableNodes failed {}\".format(str(ex)))\n return None\n if not res:\n log.info(\"Device not found\")\n return None\n return res[0]", "def find_scsi_device(self, path_types, nexus):\n\n for path_type in path_types:\n if path_type.get('SCSI Device'):\n if path_type['SCSI Nexus'] == nexus:\n return path_type['SCSI Device']\n return None", "def get_device(l):\n if not l.device:\n l.device = find_device()\n setup_device(l.device)\n return l.device", "def _find_dev(vendor_id, product_id, device_id):\n for bus in usb.busses():\n for dev in bus.devices:\n if dev.idVendor == vendor_id and dev.idProduct == product_id:\n if device_id is None or dev.filename == device_id:\n loginf('Found device on USB bus=%s device=%s' % (bus.dirname, dev.filename))\n return dev\n return None", "def find_stick():\n out = subprocess.check_output(\n \"gdbus introspect --system --dest org.freedesktop.UDisks \"\n \"--object-path /org/freedesktop/UDisks/devices --recurse \"\n \"--only-properties\".split())\n devs = zip(*((re.match(r\".* = '?(.*?)'?;\", x).group(1)\n for x in out.splitlines()\n if \"DriveConnectionInterface =\" in x\n or \"DeviceIsPartition =\" in x\n or \"DeviceFile = \" in x),)*3)\n try:\n return next(dev[2] for dev in devs if dev[0] == 'usb'\n and dev[1] == 'true')\n except StopIteration:\n return None", "def any_soco():\n\n cls = config.SOCO_CLASS\n # pylint: disable=no-member, protected-access\n try:\n # Try to get the first pre-existing soco instance we know about,\n # as long as it is visible (i.e. not a bridge etc). Otherwise,\n # perform discovery (again, excluding invisibles) and return one of\n # those\n device = next(\n d for d in cls._instances[cls._class_group].values() if d.is_visible\n )\n except (KeyError, StopIteration):\n devices = discover()\n return None if devices is None else devices.pop()\n\n return device", "def getDevice(driver):\n devices = list(listDevices(driver))\n if not devices:\n print('No devices found. Ensure your camera is connected.')\n elif len(devices) != 1:\n print('Too many devices found. Only one camera is supported')\n else:\n return devices[0]", "def _find_device(self):\n found_device = False\n nearby_devices = None\n try:\n nearby_devices = self._adapter.scan()\n except Exception:\n pass\n\n if nearby_devices is not None:\n for device in nearby_devices:\n name = device['name']\n if name is not None and name.startswith(self._search_name):\n self._address = device['address']\n print(f'Found device named: {name} at {self._address}')\n found_device = True\n break\n\n return found_device", "def get_scanner(hass, config):\n try:\n return DdWrtDeviceScanner(config[DOMAIN])\n except ConnectionError:\n return None", "def get_device_or_None(id):\n try:\n d = Device.objects.get(id=id)\n return d\n except Device.DoesNotExist:\n return None", "def find(ctx, name):\n conf = settings.devices.get(name, dict())\n if conf.get('type') == 'command':\n return conf, name, name\n\n uuids = ctx.obj['uuids']\n context = Context()\n for dev in iter(context.list_devices()):\n if 'ID_FS_TYPE' in dev:\n if name == uuids.get(dev.get('ID_FS_UUID')):\n return (settings.devices[name], dev['DEVNAME'],\n settings.devices[name].get('label',\n dev.get('ID_FS_LABEL')))\n\n print('Device \"%s\" not found.' % name)\n sys.exit(1)", "def device(self):\n hw = self.hw()\n if hw: return hw.device()", "def vkb_device_by_guid(guid: str) -> typing.Optional[VKBDevice]:\n guid = guid.lower()\n for dev in find_all_vkb():\n if dev.guid.lower().startswith(guid):\n return dev\n return None", "def GetDevice(self, arg):\n\n if not arg: return None\n\n deviceSpec = DeviceId(arg)\n\n for device in self.YieldAllDevices():\n if deviceSpec.Matches(device): return device", "def get_device(ui, name):\n if not name in soc_db:\n assert False, 'unknown SoC name %s' % name\n return None\n info = soc_db[name]\n svd_file = './vendor/silabs/svd/%s.svd.gz' % info.svd\n ui.put('%s: compiling %s\\n' % (name, svd_file))\n device = soc.build_device(svd_file)\n for f in info.fixups:\n f(device)\n return device", "def findDeviceDescriptor(self, string: str) -> cern.japc.core.DeviceDescriptor:\n ...", "def get_device(arn=None):\n pass", "def device_class(self):\n if self._type in SENSOR_TYPES:\n return self._type\n return None", "def query_device_handle(runtime, query_str):\r\n devices_manager = runtime.devices_manager\r\n dname, sname = query_str.split('.')\r\n\r\n dev = devices_manager.find_devices(dname)\r\n if dev is None:\r\n print(f'[Debug] Query {dname} from DevicesManager and got None.', file=sys.stderr)\r\n raise ValueError(f'Device {dname} not in database.')\r\n\r\n ret = dev.get_status_value(sname)\r\n if ret is None:\r\n print(f'[Debug] Query {dname}.{sname} from DevicesManager and got None.', file=sys.stderr)\r\n raise ValueError(f'Status {dname}.{sname} not in database.')\r\n\r\n return ret", "def find_device(data):\n if isinstance(data, Mapping):\n for obj in data.values():\n device = find_device(obj)\n if device is not None:\n return device\n elif isinstance(data, (tuple, list)):\n for obj in data:\n device = find_device(obj)\n if device is not None:\n return device\n elif isinstance(data, torch.Tensor):\n return data.device", "def device_class(self):\n return SENSOR_TYPES[self._type][3] if self._type in SENSOR_TYPES else None", "def get_device_by_mac_or_None(mac):\n try:\n d = Device.objects.get(mac=mac)\n return d\n except Device.DoesNotExist:\n return None", "def get_device(self):\n raise NotImplementedError()", "def detect():\n id = None\n\n if lsb_release:\n id = lsb_release.get_distro_information()['ID']\n else:\n try:\n lsb_cmd = subprocess.Popen(['lsb_release', '--id', '-s'],\n stdout=subprocess.PIPE,\n stderr=subprocess.DEVNULL)\n output = lsb_cmd.communicate()[0]\n if not lsb_cmd.returncode:\n id = output.decode().split('\\n')[0].strip()\n except OSError:\n # id is None in this case\n pass\n\n if id == whatmaps.debiandistro.DebianDistro.id:\n return whatmaps.debiandistro.DebianDistro\n elif id == whatmaps.redhatdistro.FedoraDistro.id:\n return whatmaps.redhatdistro.FedoraDistro\n else:\n if os.path.exists('/usr/bin/dpkg'):\n logging.warning(\"Unknown distro but dpkg found, assuming Debian\")\n return whatmaps.debiandistro.DebianDistro\n elif os.path.exists('/bin/rpm'):\n logging.warning(\"Unknown distro but rpm found, assuming Fedora\")\n return whatmaps.debiandistro.FedoraDistro\n else:\n return None", "def test_get_device_unknown():\n device = get_device(SERIAL, CREDENTIAL, \"unknown\")\n assert device is None", "def get_devices_lsscsi(self):\n\n try:\n message = \"Find SCSI Devices\"\n if self._include_enclosures:\n command = \"lsscsi --generic --transport | egrep 'disk|0x14|enclo'\"\n else:\n command = \"lsscsi --generic --transport | fgrep 'disk|0x14'\"\n pdata = self._run_command(command=command, message=message, logger=self._logger, shell=True)\n #\n # Format:\n # $ lsscsi --generic --transport\n # [0] [1] [2] [3] [4]\n # [0:0:0:0] disk sas:0x5000cca25103b471 /dev/sda /dev/sg0 \n # [0:0:1:0] disk sas:0x5000cca251029301 /dev/sdb /dev/sg1 \n # ...\n # [0:0:14:0] enclosu sas:0x5001636001caa0bd - /dev/sg14\n # [7:0:0:0] cd/dvd usb: 1-1.3:1.2 /dev/sr0 /dev/sg15\n #\n # Special Case:\n # Handle lines without a transport (spaces only). (screen scrapping danger)\n # [0:0:10:0] enclosu sas:0x50030480091d71fd - /dev/sg10\n # [1:0:0:0] disk <spaces> /dev/sdk /dev/sg11 <- INTEL disk!\n #\n # Another SNAFU! (and why I hate screen scrapping!!!)\n # [15:0:53597:0]disk sas:0x5000cca23b359649 /dev/sdg /dev/sg6 \n # [15:0:53598:0]disk sas:0x5000cca23b0c0a99 /dev/sdh /dev/sg7 \n # [15:0:53599:0]disk sas:0x5000cca23b0b7531 /dev/sdi /dev/sg8 \n # ...\n # [15:0:53686:0]enclosu sas:0x5000ccab040001bc - /dev/sg165\n # [15:0:53766:0]enclosu sas:0x5000ccab040001fc - /dev/sg144\n #\n # Evidently, the author of lsscsi did not think of consistent output! ;(\n #\n for line in pdata['stdout'].splitlines():\n dinfo = line.split()\n device = dict()\n if len(dinfo) < 5:\n m = re.search('(?P<device>disk|\\(0x14\\)|enclosu)', dinfo[0])\n if m:\n device['Device Type'] = m.group('device')\n sas_index = 1\n dev_index = 2\n sg_index = 3\n else:\n continue\n else:\n device['Device Type'] = dinfo[1]\n sas_index = 2\n dev_index = 3\n sg_index = 4\n\n # lsscsi does not understand 'Host Managed' device type.\n if '0x14' in device['Device Type']:\n device['Device Type'] = 'disk'\n\n # Parse remaining information.\n if 'sas:' in dinfo[sas_index]:\n device['SAS Address'] = dinfo[sas_index][4:]\n self._sas_addresses += 1\n else:\n device['SAS Address'] = \"\"\n\n # Note: Enclosure has no driver, so reports '-' for name.\n if '/dev/' in dinfo[dev_index]:\n if self._drives and not dinfo[dev_index] in self._drives:\n continue\n if self._exclude and dinfo[dev_index] in self._exclude:\n continue\n device['Linux Device Name'] = dinfo[dev_index]\n else:\n device['Linux Device Name'] = \"\"\n if '/dev/sg' in dinfo[sg_index]:\n device['SCSI Device Name'] = dinfo[sg_index]\n else:\n device['SCSI Device Name'] = \"\"\n\n self._devices.append(device)\n\n except RuntimeError as exc:\n self._logger.error(\"Failed to acquire SCSI devices: {0}\".format(exc))\n raise exc", "def device():\n return G.DEVICE" ]
[ "0.7095207", "0.66211075", "0.63678074", "0.633608", "0.6279986", "0.62618655", "0.62455034", "0.62315935", "0.6200235", "0.6090791", "0.60804033", "0.60033995", "0.5996161", "0.5954078", "0.5831486", "0.581433", "0.57635665", "0.57375133", "0.57314014", "0.570372", "0.5673131", "0.5667884", "0.56671417", "0.5640382", "0.56399155", "0.5631045", "0.56306034", "0.56200206", "0.55821055", "0.5571608" ]
0.6641357
1
Initializes the sensel to capture all contacts. Returns the initial frame.
def initFrame(): error = sensel.setFrameContent(handle, sensel.FRAME_CONTENT_CONTACTS_MASK) (error, frame) = sensel.allocateFrameData(handle) error = sensel.startScanning(handle) return frame
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def initializeGL(self):\n\n self.gl = mg.create_context()\n\n self.recompile()\n\n self.to_capture = False\n self.capture_texture = self.gl.texture((capture_width, capture_height), 4, dtype=\"f4\")\n capture_framebuffer = self.gl.framebuffer([self.capture_texture])\n self.capture_scope = self.gl.scope(capture_framebuffer)\n\n self.to_record = False\n self.record_texture = self.gl.texture((record_width, record_height), 4, dtype=\"f4\")\n record_framebuffer = self.gl.framebuffer([self.record_texture])\n self.record_scope = self.gl.scope(record_framebuffer)\n self.recording = None\n\n self.to_capture_buffer_in = False\n self.to_capture_buffer_out = False", "def initialize(self):\n self.logger.debug('Initializing Basler Camera')\n tl_factory = pylon.TlFactory.GetInstance()\n devices = tl_factory.EnumerateDevices()\n if len(devices) == 0:\n #print('No camera found')\n self.logger.warning('No camera found')\n\n self._driver = None\n for device in devices:\n if self.cam_num in device.GetFriendlyName():\n self._driver = pylon.InstantCamera()\n self._driver.Attach(tl_factory.CreateDevice(device))\n self._driver.Open()\n self.friendly_name = device.GetFriendlyName()\n print(device.GetFriendlyName())\n\n if not self._driver:\n msg = f'Basler {self.cam_num} not found. Please check if the camera is connected'\n self.logger.error(msg)\n return\n\n # self.logger.info(f'Loaded camera {self._driver.GetDeviceInfo().GetModelName()}')\n\n # self._driver.RegisterConfiguration(pylon.SoftwareTriggerConfiguration(), pylon.RegistrationMode_ReplaceAll,\n # pylon.Cleanup_Delete)\n\n #self.config.fetch_all()", "def initialize_first_frame(self):\r\n img = self.cam0_curr_img_msg.image\r\n grid_height, grid_width = self.get_grid_size(img)\r\n\r\n # Detect new features on the frist image.\r\n new_features = self.detector.detect(img)\r\n\r\n # Find the stereo matched points for the newly detected features.\r\n cam0_points = [kp.pt for kp in new_features]\r\n cam1_points, inlier_markers = self.stereo_match(cam0_points)\r\n\r\n cam0_inliers, cam1_inliers = [], []\r\n response_inliers = []\r\n for i, inlier in enumerate(inlier_markers):\r\n if not inlier:\r\n continue\r\n cam0_inliers.append(cam0_points[i])\r\n cam1_inliers.append(cam1_points[i])\r\n response_inliers.append(new_features[i].response)\r\n # len(cam0_inliers) < max(5, 0.1 * len(new_features))\r\n\r\n # Group the features into grids\r\n grid_new_features = [[] for _ in range(self.config.grid_num)]\r\n\r\n for i in range(len(cam0_inliers)):\r\n cam0_point = cam0_inliers[i]\r\n cam1_point = cam1_inliers[i]\r\n response = response_inliers[i]\r\n\r\n row = int(cam0_point[1] / grid_height)\r\n col = int(cam0_point[0] / grid_width)\r\n code = row*self.config.grid_col + col\r\n\r\n new_feature = FeatureMetaData()\r\n new_feature.response = response\r\n new_feature.cam0_point = cam0_point\r\n new_feature.cam1_point = cam1_point\r\n grid_new_features[code].append(new_feature)\r\n\r\n # Sort the new features in each grid based on its response.\r\n # And collect new features within each grid with high response.\r\n for i, new_features in enumerate(grid_new_features):\r\n for feature in sorted(new_features, key=lambda x:x.response, \r\n reverse=True)[:self.config.grid_min_feature_num]:\r\n self.curr_features[i].append(feature)\r\n self.curr_features[i][-1].id = self.next_feature_id\r\n self.curr_features[i][-1].lifetime = 1\r\n self.next_feature_id += 1", "def init_recording(self):\n self.statusBar().showMessage('Initialising...')\n self.streams = resolve_stream('type', 'EEG')\n self.inlet = StreamInlet(self.streams[0])\n self.timeObj = []\n self.sampleObj = []", "def initialize(self):\n self.candidate_disease_list = []\n self.candidate_symptom_list = []\n self.agent_action = {\n \"turn\":None,\n \"action\":None,\n \"request_slots\":{},\n \"inform_slots\":{},\n \"explicit_inform_slots\":{},\n \"implicit_inform_slots\":{},\n \"speaker\":\"agent\"\n }", "def init_lens(self):\n\n response = self.send_lens_cmd(['00'], fast_mode=False)\n response = self.send_lens_cmd(['0A', '00'], fast_mode=False)\n\n if response['MISO'][1] != 'AA':\n print(response['return_str'])\n raise RuntimeError('Lens initialisation failed')\n\n response = self.send_lens_cmd(['0A', '00'], fast_mode=True)\n\n cmd = ['80', '0A']\n for n in range(10):\n cmd.append('00')\n\n response = self.send_lens_cmd(cmd, fast_mode=True)\n\n self._min_FL = int('0x' + response['MISO'][4], 16)\n self._max_FL = int('0x' + response['MISO'][6], 16)\n\n if self.min_FL == self.max_FL:\n self.lens_desc = '{} mm prime lens'.format(self.min_FL)\n else:\n self.lens_desc = '{}-{} mm tele lens'.format(self.min_FL, self.max_FL)\n\n print('initialised {}'.format(self.lens_desc))", "def _initialize(self):\n if not self._is_initialized:\n self.connect(retries=Camera.CONNECTION_RETRIES)\n self.cam.resolution = (self.resolution['x'], self.resolution['y'])\n self.cam.start_preview()\n time.sleep(2)\n self._is_initialized = True", "def __init__(self, camera_idx):\n self.stream = cv2.VideoCapture(camera_idx)\n self.grabbed, self.frame = self.stream.read()\n self.boundary_lines = findBookBoundaries(self.frame)\n self.stopped = False", "def init(self):\n self.focus_modes = []\n for focus_mode in self['focusModes']:\n self.focus_modes.append(\\\n {'modeName': focus_mode.modeName,\n 'lensCombination': eval(focus_mode.lensCombination),\n 'lensModes': eval(focus_mode.lensModes),\n 'size': eval(focus_mode.size),\n 'message': eval(focus_mode.message),\n 'diverg': eval(focus_mode.divergence)})\n self.focus_motors_dict = {}\n\n focus_motors = []\n focus_motors = eval(self.getProperty('focusMotors'))\n\n for focus_motor in focus_motors:\n self.focus_motors_dict[focus_motor] = []\n\n #TODO\n self.motors_groups = [self.getObjectByRole(\"P14ExpTbl\"),\n self.getObjectByRole(\"P14KB\"),\n self.getObjectByRole(\"P14DetTrans\"),\n self.getObjectByRole(\"P14BCU\"),\n self.getObjectByRole(\"slitsMotors\")]\n \n\n if len(self.motors_groups) > 0:\n for motors_group in self.motors_groups:\n self.connect(motors_group,\n 'mGroupFocModeChanged',\n self.motor_group_focus_mode_changed)\n else:\n logging.getLogger(\"HWR\").debug('BeamFocusing: No motors defined')\n self.active_focus_mode = self.focus_modes[0]['modeName']\n self.size = self.focus_modes[0]['size']\n self.update_values()\n\n self.cmd_set_calibration_name = self.getCommandObject(\\\n 'cmdSetCallibrationName')\n try:\n self.cmd_set_phase = eval(self.getProperty('setPhaseCmd'))\n except:\n pass", "def printFrame(frame, info):\n global analysis_frame\n if frame.n_contacts > 0:\n if analysis_frame == None:\n analysis_frame = frame\n # print(\"analysis_frame: \", analysis_frame.__dir__(),\"\\n\")\n # #print(\"n_contact: \", analysis_frame.n_contacts.contents.__dir__(),\"\\n\")\n # print(\"contacts: \", analysis_frame.contacts.contents.__dir__(),\"\\n\")\n # print(\"accel_data: \", analysis_frame.accel_data.contents.__dir__(),\"\\n\")\n # # print(\"n_contact: \", analysis_frame.n_contacts.contents,\"\\n\")\n # print(\"contacts: \", analysis_frame.contacts.contents.content_bit_mask.__dir__(),\"\\n\")\n # print(\"accel_data: \", analysis_frame.accel_data.contents.__dir__(),\"\\n\")\n\n #print(\"\\nNum Contacts: \", frame.n_contacts)\n for n in range(frame.n_contacts):\n c = frame.contacts[n]\n #print(\"Contact ID: \", c.id)\n #print(\"Contact x, y position: \", c.x_pos, c.y_pos)\n #print(\"Contact Total Force: \", c.total_force)\n\n # send osc\n client.send_message(\"/this/is/a/different/channel\", [c.id, c.x_pos, c.y_pos, c.total_force])\n client.send_message(\"/this/is/a/channel\", [c.id, c.x_pos, c.y_pos, c.total_force])\n\n if c.state == sensel.CONTACT_START:\n sensel.setLEDBrightness(handle, c.id, 100)\n elif c.state == sensel.CONTACT_END:\n sensel.setLEDBrightness(handle, c.id, 0)", "def __init__(self, frame):\n self.frame = frame\n self._configure()", "def start(self):\n self.show_greeting()\n self.read_frame()", "def initial_sequences_pipeline(self):\n\n input_calcs = gen_ga_input_calcs(self.params)\n\n # Creates networks of interacting residues from input dataframe\n if self.barrel_or_sandwich == '2.60':\n sheet_ids = list(set(self.input_df['sheet_number'].tolist()))\n if len(sheet_ids) != 2:\n raise Exception(\n 'Incorrect number of sheets in input beta-sandwich structure'\n )\n initial_network = input_calcs.generate_networks()\n\n # Adds side-chains in order to generate a population of starting\n # sequences to be fed into the genetic algorithm\n initial_sequences_dict = OrderedDict()\n print('Generating initial sequence population for backbone model')\n if self.method_initial_side_chains == 'random':\n initial_sequences_dict = input_calcs.add_random_initial_side_chains(initial_network)\n elif self.method_initial_side_chains in ['rawpropensity', 'rankpropensity']:\n raw_or_rank = self.method_initial_side_chains.replace('propensity', '')\n initial_sequences_dict = input_calcs.add_initial_side_chains_from_propensities(\n initial_network, raw_or_rank, False, ''\n )\n\n return initial_network, initial_sequences_dict", "def initializeGL(self):\n self._graphicsInitialized = True\n if self._context:\n self._createSceneviewer()\n self.graphicsInitialized.emit()\n # initializeGL end", "def init_all_frames(self) -> bool:\n raise NotImplementedError", "def initialize(self):\n self.ros.enable()\n self.phone_link.enable()", "def new_frame(self):\n self.eyes_frame = numpy.zeros(self.shape, numpy.uint8)", "def initialize(self):\n return", "def startup(self):\n self.prev_gray = None\n self.frame_idx = 1\n self.tracks = []\n self.fps = []\n self.vid_info = None\n self.track_new_points_count = 0", "def __init__(self, frames):\n self._frames = frames", "def __init__(self, frames):\n self._frames = frames", "def __init__(self, frames):\n self._frames = frames", "def __init__(self, frames):\n self._frames = frames", "def initialize(self):\n self.Update()\n ViewportManager.updateAll()\n self.wxStep()\n ViewportManager.initializeAll()\n # Position the camera\n if base.trackball is not None:\n base.trackball.node().setPos(0, 30, 0)\n base.trackball.node().setHpr(0, 15, 0)\n\n # to make persp view as default\n self.perspViewMenuItem.Check()\n self.onViewChange(None, 3)\n\n # initializing direct\n if self.fStartDirect:\n base.startDirect(fWantTk = 0, fWantWx = 0)\n\n base.direct.disableMouseEvents()\n newMouseEvents = [\"_le_per_%s\"%x for x in base.direct.mouseEvents] +\\\n [\"_le_fro_%s\"%x for x in base.direct.mouseEvents] +\\\n [\"_le_lef_%s\"%x for x in base.direct.mouseEvents] +\\\n [\"_le_top_%s\"%x for x in base.direct.mouseEvents]\n base.direct.mouseEvents = newMouseEvents\n base.direct.enableMouseEvents()\n\n base.direct.disableKeyEvents()\n keyEvents = [\"_le_per_%s\"%x for x in base.direct.keyEvents] +\\\n [\"_le_fro_%s\"%x for x in base.direct.keyEvents] +\\\n [\"_le_lef_%s\"%x for x in base.direct.keyEvents] +\\\n [\"_le_top_%s\"%x for x in base.direct.keyEvents]\n base.direct.keyEvents = keyEvents\n base.direct.enableKeyEvents()\n\n base.direct.disableModifierEvents()\n modifierEvents = [\"_le_per_%s\"%x for x in base.direct.modifierEvents] +\\\n [\"_le_fro_%s\"%x for x in base.direct.modifierEvents] +\\\n [\"_le_lef_%s\"%x for x in base.direct.modifierEvents] +\\\n [\"_le_top_%s\"%x for x in base.direct.modifierEvents]\n base.direct.modifierEvents = modifierEvents\n base.direct.enableModifierEvents()\n\n base.direct.cameraControl.lockRoll = True\n base.direct.setFScaleWidgetByCam(1)\n\n unpickables = [\n \"z-guide\",\n \"y-guide\",\n \"x-guide\",\n \"x-disc-geom\",\n \"x-ring-line\",\n \"x-post-line\",\n \"y-disc-geom\",\n \"y-ring-line\",\n \"y-post-line\",\n \"z-disc-geom\",\n \"z-ring-line\",\n \"z-post-line\",\n \"centerLines\",\n \"majorLines\",\n \"minorLines\",\n \"Sphere\",]\n\n for unpickable in unpickables:\n base.direct.addUnpickable(unpickable)\n\n base.direct.manipulationControl.optionalSkipFlags |= SKIP_UNPICKABLE\n base.direct.manipulationControl.fAllowMarquee = 1\n base.direct.manipulationControl.supportMultiView()\n base.direct.cameraControl.useMayaCamControls = 1\n base.direct.cameraControl.perspCollPlane = self.perspView.collPlane\n base.direct.cameraControl.perspCollPlane2 = self.perspView.collPlane2\n\n for widget in base.direct.manipulationControl.widgetList:\n widget.setBin('gui-popup', 0)\n widget.setDepthTest(0)\n\n # [gjeon] to intercept messages here\n base.direct.ignore('DIRECT-delete')\n base.direct.ignore('DIRECT-select')\n base.direct.ignore('DIRECT-preDeselectAll')\n base.direct.ignore('DIRECT-toggleWidgetVis')\n base.direct.fIgnoreDirectOnlyKeyMap = 1\n\n # [gjeon] do not use the old way of finding current DR\n base.direct.drList.tryToGetCurrentDr = False\n\n else:\n base.direct=None\n #base.closeWindow(base.win)\n base.win = base.winList[3]", "def initialize(self):\r\n pass", "def initialize(self):\r\n pass", "def __init__(self, frame):\n self.frame = frame", "def initialize(self):\n pass", "def initialize(self):\n pass", "def initialize(self):\n pass" ]
[ "0.5816397", "0.574967", "0.55902994", "0.5495237", "0.54497045", "0.5447377", "0.5339393", "0.52684057", "0.5208814", "0.51819396", "0.5139388", "0.51059765", "0.50550157", "0.5049098", "0.50385374", "0.5037612", "0.5022273", "0.5020498", "0.50094044", "0.49824604", "0.49824604", "0.49824604", "0.49824604", "0.49739182", "0.4973472", "0.4973472", "0.49718398", "0.49667177", "0.49667177", "0.49667177" ]
0.7684333
0
Test module university.py by downloading university.csv and testing shape of extracted data has 62 rows and 17 columns
def test_university(): test_path = tempfile.mkdtemp() x_train, metadata = university(test_path) try: assert x_train.shape == (62, 17) except: shutil.rmtree(test_path) raise()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def main():\n\n #get the csv file into a data-frame\n universities_df = pd.read_csv('universities_data.csv', encoding = 'utf-8-sig')\n universities_names_list = universities_df['name'].tolist()\n\n #get list of university objects\n url = 'http://universities.hipolabs.com/search?country=Israel'\n api_universities = Get_universities(url)\n list_of_universities = api_universities.get_universities_info()\n\n #to see if we got new entities or not for exporting to csv later..\n is_new_entities = False\n\n for university in list_of_universities:\n if university.name not in universities_names_list:\n is_new_entities = True\n universities_df= universities_df.append(pd.DataFrame({\n 'alpha_two_code': [university.alpha_two_code], \n 'country': [university.country],\n 'web_pages': [str(university.web_pages)],\n 'domains': [str(university.domains)],\n 'name': [university.name],\n 'state_province':[str(university.state_province)]}) , ignore_index = True)\n\n #export back to csv if true\n if is_new_entities: \n print('we got new entities!') \n universities_df.to_csv('universities_data.csv', encoding = 'utf-8-sig', index = False)\n else:print('no new universities for now!')", "def main():\n national_university_table()", "def load_utlization(path):\n df = pd.read_csv(f\"{raw_data}\\\\{path}\", parse_dates=[\"AdmissionDate\"])\n\n df.rename(\n columns={\"MemberID\": \"member_id\", \"LOSDays\": \"los\", \"FacilityName\": \"facility\"},\n inplace=True,\n )\n\n df.columns = clean_table_columns(df.columns)\n\n facility_col = [col for col in df.columns if \"facility\" in col][0]\n\n df = cognify_facility_changes(df, facility_col)\n\n df = df[df.member_id != 1003]\n return df", "def main():\n df_titanic = pd.read_csv('train.csv', header=None)\n print df_titanic.describe()", "def test_read_in_file(self):\r\n filename = \"CrimeDataSmall.csv\"\r\n\r\n lst = cds.read_in_file(filename)\r\n\r\n self.assertIsInstance(lst, list, \"Returned datatype should be a list\")\r\n self.assertEqual(len(lst), 4, \"There should be 4 rows returned from CrimeDataSmall 1 header and 3 data rows\")\r\n self.assertEqual(len(lst[0]), 23, \"Each row should have 23 columns\")\r\n self.assertEqual(lst[0][1], \"Reported_Date\", \"Column 1 was incorrect header\")\r\n self.assertEqual(lst[0][7], \"Offense\", \"Column 7 was incorrect header\")\r\n self.assertEqual(lst[0][13], \"Zip Code\", \"Column 13 header was incorrect\")\r\n self.assertEqual(lst[1][1], \"03/19/2019\", \"Column 1 was incorrect in first data row\")\r\n self.assertEqual(lst[1][7], \"Vehicular – Non-Injury\", \"Column 7 was incorrect in first data row\")\r\n self.assertEqual(lst[1][13], \"64161\", \"Column 13 in first data row was incorrect\")\r\n self.assertEqual(lst[3][1], \"03/27/2019\", \"Column 1 was incorrect in 3rd data row\")\r\n self.assertEqual(lst[3][7], \"Embezzlement\", \"Column 7 was incorrect 3rd data row\")\r\n self.assertEqual(lst[3][13], \"64112\", \"Column 13 3rd data row was incorrect\")\r\n self.assertEqual(lst[3][11], \"4600, S WORNALL RD\", \"Column 11 3rd data row was incorrect. Use csv module to read \")", "def load_data():\n data = pd.read_csv(\"https://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data\", header=None)\n\n # utiliza somente as duas primeiras classes\n data = data[:100]\n # transforma as classes em 0 e 1\n data[4] = np.where(data.iloc[:, -1] == 'Iris-setosa', 0, 1)\n data = np.asmatrix(data, dtype='float64')\n return data", "def GetUsCities():\n return GetDataFromCsvFile('us_cities.csv')", "def test_load_UCR_UEA_dataset():\n X, y = load_UCR_UEA_dataset(name=\"UnitTest\")\n assert isinstance(X, pd.DataFrame) and isinstance(y, np.ndarray)\n assert X.shape == (42, 1) and y.shape == (42,)", "def main():\n filename = \"data/exercise.csv\"\n analyze(filename)", "def load_data(self):\n with open('data/fordTrain.csv') as f:\n data = csv.reader(f, delimiter=',')\n train = [x for i, x in enumerate(data) if i > 0] \n # Extract features and target variable separately\n trainx = [x[3:] for x in train]\n trainy = [x[2] for x in train]\n\n with open('data/fordTest.csv') as f:\n data = csv.reader(f, delimiter=',')\n testx = [x[3:] for i, x in enumerate(data) if i > 0] \n\n with open('data/Solution.csv') as f:\n data = csv.reader(f, delimiter=',')\n testy = [x[2] for i, x in enumerate(data) if i > 0] \n\n # Extract features and target variable, convert to numpy array\n trainx = np.asarray(trainx, dtype=np.float32)\n trainy = np.asarray(trainy, dtype=np.int8)\n testx = np.asarray(testx, dtype=np.float32)\n testy = np.asarray(testy, dtype=np.int8)\n\n # Return training and test sets\n trainSet = Dataset(trainx, trainy)\n testSet = Dataset(testx, testy)\n return trainSet, testSet", "def test_number_of_rows_with_header(self):\n url=\"http://archive.ics.uci.edu/ml/machine-learning-databases/forest-fires/forestfires.csv\"\n\n reader=requester.url_to_df(url)\n rows,columns=reader.shape\n self.assertEqual(rows,517)", "def load_occupancy_dataset(trainsize=500, testsize=1000):\n filename = 'datasets/numericsequence.csv'\n dataset = loadcsv(filename)\n trainset, testset = splitdataset(dataset, trainsize, testsize)\n return trainset, testset", "def read_data(city='Chicago'):\n df = pd.read_csv('train.csv')\n df = df[df.City==city]\n\n # I will be using this dictionary to convert direction to degrees\n degrees = {'N':0, 'NE':45, 'E':90, 'SE':135, 'S':180, 'SW':225, 'W':270, 'NW':315}\n\n df[\"EntryHeading_deg\"] = df.EntryHeading.apply(lambda x:degrees[x])\n df[\"ExitHeading_deg\"] = df.ExitHeading.apply(lambda x:degrees[x])\n df[\"TurnDegree\"] = (df.EntryHeading_deg-df.ExitHeading_deg).apply(lambda x: x if abs(x) <=180 else (x+360 if x<0 else x-360))\n df[\"TurnDegree\"] = df.TurnDegree.apply(lambda x: x if x != -180 else x*-1)\n\n # Lets assign a number(StreetId) to each street\n all_streets = np.concatenate([df.ExitStreetName.reindex().values, df.EntryStreetName.reindex().values])\n # there are some nan values so lets just replace them with Unknown\n street_name_list = ['Unknown' if type(x)==type(0.0) else x for x in all_streets]\n street_names = {name: num for num, name in enumerate(street_name_list)}\n df[\"EntryStreetId\"] = np.array([street_names[x] if x in street_names else -999 for x in df.EntryStreetName])\n df[\"ExitStreetId\"] = np.array([street_names[x] if x in street_names else -999 for x in df.ExitStreetName])\n\n # we also want to categorize the street by its type (road, boulevard, ...)\n street_types = {n: i for i, n in enumerate(np.unique([x.split()[-1] for x in street_names.keys()]))}\n street_name_to_type = {}\n for name in street_names.keys():\n typ = name.split()[-1]\n street_name_to_type[name] = street_types[typ]\n df[\"EntryStreetType\"] = np.array([street_name_to_type[x] if x in street_names else -999 for x in df.EntryStreetName])\n df[\"ExitStreetType\"] = np.array([street_name_to_type[x] if x in street_names else -999 for x in df.ExitStreetName])\n\n df[\"EnterHighway\"] = np.array([1 if type(x)==type('') and x.split()[-1] in ['Broadway', 'Parkway', 'Expressway', 'Highway'] else 0 for x in df.EntryStreetName])\n df[\"ExitHighway\"] = np.array([1 if type(x)==type('') and x.split()[-1] in ['Broadway', 'Parkway', 'Expressway', 'Highway'] else 0 for x in df.ExitStreetName])\n df['Season'] = np.array([1 if month in (12,1,2) else 2 if month in (6,7,8) else 3 for month in df.Month.reindex().values])\n df['RushHour'] = np.array([1 if hour in (7,8,9) else 2 if hour in (16,17,18) else 3 if hour>=10 and hour<=15 else 4 for hour in df.Hour])\n return df", "def test_csv_reader_data_contents(process_data):\n data = process_data(file_name_or_type='clean_map.csv')\n\n # Check row types\n for row in data:\n assert(isinstance(row['Country'], str))\n assert(isinstance(row['City'], str))\n assert(isinstance(row['State_Or_Province'], str))\n assert(isinstance(row['Lat'], float))\n assert(isinstance(row['Long'], float))\n assert(isinstance(row['Altitude'], float))\n\n # Basic data checks\n assert len(data) == 180 # We have collected 180 rows\n assert data[0]['Country'] == 'Andorra'\n assert data[106]['Country'] == 'Japan'", "def load_unicef_data():\n fname = 'SOWC_combined_simple.csv'\n \n \n # Uses pandas to help with string-NaN-numeric data.\n data = pd.read_csv(fname, na_values='_', encoding='latin1')\n \n \n # Strip countries title from feature names.\n features = data.axes[1][1:]\n # Separate country names from feature values.\n countries = data.values[:,0]\n values = data.values[:,1:]\n # Convert to numpy matrix for real.\n values = np.asmatrix(values,dtype='float64')\n # Modify NaN values (missing values).\n mean_vals = nanmean(values, axis=0)\n inds = np.where(np.isnan(values))\n values[inds] = np.take(mean_vals, inds[1])\n return (countries, features, values)", "def read_data():\n data = pd.read_csv('input_data/Preply_tutor_views_datasaet.csv')\n return data", "def read_data(self):\n fpath = './data/surveys.csv'\n self.data = pd.read_csv(fpath, header=0, low_memory=False)\n #print(self.data.head(n=5))\n print(self.data.shape)", "def get_national_university_data(univ_url):\n f_name = 'national_university_html.json'\n base_url = 'https://www.usnews.com'\n html_cache = load_cache(f_name)\n\n if univ_url not in html_cache:\n resp = requests.get(base_url + univ_url, headers=agent)\n html_cache[univ_url] = resp.text\n save_cache(html_cache, f_name)\n\n soup = BeautifulSoup(html_cache[univ_url], 'html.parser')\n\n map_chunk = soup.find('section', attrs={'class': 'hero-stats-widget-map'})\n address = map_chunk.find('p').find('strong').text.strip()\n info_list = soup.find_all('div', attrs={'class': 'block-looser'})[1].find_all('ul')\n stats_list = soup.find('section', attrs={'class': 'hero-stats-widget-stats'}).find('ul').find_all('strong')\n salary_chunk = soup.find_all('div', attrs={'class': 'block-looser'})[4].find('span', attrs={'class': 'text-strong'})\n\n if univ_url + '/student-life' not in html_cache:\n life_resp = requests.get(base_url + univ_url + '/student-life', headers=agent)\n html_cache[univ_url + '/student-life'] = life_resp.text\n save_cache(html_cache, f_name)\n\n life_soup = BeautifulSoup(html_cache[univ_url + '/student-life'], 'html.parser')\n life_chunk = life_soup.find('div', attrs={'id': 'StudentBody'})\n gender_chunk = life_chunk.find('span', attrs={'data-test-id': 'v_percent'})\n\n if univ_url + '/academics' not in html_cache:\n academic_resp = requests.get(base_url + univ_url + '/academics', headers=agent)\n html_cache[univ_url + '/academics'] = academic_resp.text\n save_cache(html_cache, f_name)\n\n academic_soup = BeautifulSoup(html_cache[univ_url + '/academics'], 'html.parser')\n faculty_chunk = academic_soup.find('div', attrs={'data-field-id': 'vStudentFacultyRatio'})\n\n found_year = info_list[1].find('span', attrs={'class': 'heading-small'}).text\n if found_year == 'N/A':\n found_year = None\n else:\n found_year = int(found_year)\n\n endowment = info_list[5].find('span', attrs={'class': 'heading-small'}).text\n endowment = endowment.replace('$', '').replace(' +', '').strip()\n if endowment == 'N/A':\n endowment = None\n else:\n endowment_list = endowment.split()\n if len(endowment_list) == 1:\n endowment = float(endowment.replace(',', '')) / 1000\n elif endowment_list[1] == 'billion':\n endowment = float(endowment_list[0]) * 1000\n else:\n endowment = float(endowment_list[0])\n\n median_salary = salary_chunk.text.replace('*', '').strip() if salary_chunk is not None else None\n if median_salary is not None:\n median_salary = int(median_salary.replace('$', '').replace(',', ''))\n\n student_faculty = faculty_chunk.find('p').find('span', attrs={'class': 'text-strong'}).text.strip()\n if student_faculty == 'N/A':\n student_faculty = None\n else:\n student_faculty = int(student_faculty.split(':')[0])\n\n tuition_in_state = stats_list[0].text.split()[0]\n if tuition_in_state == 'N/A':\n tuition_in_state = None\n else:\n tuition_in_state = int(tuition_in_state.replace('$', '').replace(',', ''))\n\n female = gender_chunk.text if gender_chunk is not None else None\n if female is not None:\n female = float(female.replace('%', '')) / 100\n\n univ_dict = dict(name=soup.find('h1', attrs={'class': 'hero-heading'}).text.strip().replace('1', ''),\n ranking=soup.find('strong').text.strip().split()[0].replace(\"#\", \"\").replace('-', ' - '),\n state=address.rsplit(', ', 1)[1],\n city=address.rsplit(', ', 1)[0],\n type=info_list[0].find('span', attrs={'class': 'heading-small'}).text.split(', ')[0],\n found_year=found_year,\n endowment=endowment,\n median_salary=median_salary,\n student_faculty=student_faculty,\n female=female,\n tuition_in_state=tuition_in_state)\n\n if univ_dict['type'] == 'Public':\n tuition_out_state = stats_list[1].text.split()[0]\n enrollment = stats_list[3].text\n else:\n tuition_out_state = stats_list[0].text.split()[0]\n enrollment = stats_list[2].text\n\n if tuition_out_state == 'N/A':\n tuition_out_state = None\n else:\n tuition_out_state = int(tuition_out_state.replace('$', '').replace(',', ''))\n\n if enrollment == 'N/A':\n enrollment = None\n else:\n enrollment = int(enrollment.replace(',', ''))\n\n univ_dict.update(dict(tuition_out_state=tuition_out_state,\n enrollment=enrollment))\n\n return univ_dict", "def unicef_data():\n workbook = xlrd.open_workbook('unicef_oct_2014.xlsx')\n sheet = workbook.sheets()[0]\n\n title_rows = zip(sheet.row_values(4), sheet.row_values(5))\n titles = [t[0] + ' ' + t[1] for t in title_rows]\n titles = [t.strip() for t in titles]\n\n country_rows = [sheet.row_values(r) for r in range(6, 114)]\n cleaned_rows = []\n\n for row in country_rows:\n cleaned_row = [remove_bad_chars(rv) for rv in row]\n cleaned_rows.append(cleaned_row)\n\n example_row = sheet.row(6)\n types = get_types(example_row)\n\n table = agate.Table(cleaned_rows, titles, types)\n ranked = table.compute([('Total Child Labor Rank',\n agate.Rank('Total (%)', reverse=True)), ])\n\n return ranked", "def load_data():\n df = pd.read_csv(\"https://raw.githubusercontent.com/Andrea-Giuliani/Python-Project/master/data/final_dataset.csv\",sep=',') \n return df", "def absentee(path):\n import pandas as pd\n path = os.path.expanduser(path)\n filename = 'absentee.csv'\n if not os.path.exists(os.path.join(path, filename)):\n url = 'http://dustintran.com/data/r/pscl/absentee.csv'\n maybe_download_and_extract(path, url,\n save_file_name='absentee.csv',\n resume=False)\n\n data = pd.read_csv(os.path.join(path, filename), index_col=0,\n parse_dates=True)\n x_train = data.values\n metadata = {'columns': data.columns}\n return x_train, metadata", "def load_data():\n data = pd.read_csv('datasets/housing.csv')\n prices = data['MEDV']\n features = data.drop(['MEDV'], axis=1) # remove it from data as we need to predict it\n print(data.head()) # prints top columns 5 for ex\n return [features, prices]", "def read_education() -> pd.DataFrame:\n\n school_df = pd.read_csv(\"data/Expected years of schooling (years).csv\", header=2, usecols=[1, 32], names=[\"Country\", \"Education\"])\n\n index = school_df[school_df[\"Country\"]==\"Iran (Islamic Republic of)\"].index.values[0]\n school_df.loc[index, \"Country\"] = \"Iran\"\n index = school_df[school_df[\"Country\"] == \"United States\"].index.values[0]\n school_df.loc[index, \"Country\"] = \"US\"\n index = school_df[school_df[\"Country\"] == \"Russian Federation\"].index.values[0]\n school_df.loc[index, \"Country\"] = \"Russia\"\n\n school_df = school_df.dropna()\n\n return school_df", "def test_univariate(self):\n data = load_dataset(data_format=\"numpy\", standardize=True)\n assert data.shape[0] > 131, \"Imported time series collection has the wrong shape\"\n assert data.shape[-1] == 1000, \"Imported time series collection has the wrong shape\"", "def __loaddata(filename, datatype='flightcsv', minprob=0.001, maxprob=0.20):\n if datatype is 'flightcsv':\n return extract_flight_csv(filename, minprob=minprob, maxprob=maxprob)\n else:\n raise Exception('unknown datatype %s' % datatype)", "def read_csv():", "def get_test_data():\n\n # test set\n test = pd.read_csv(\"test.csv\")\n\n return test", "def extractdata(classification, testfile):\n train = pd.read_table(classification, header=None, engine='python')\n test = pd.read_table(testfile, header=None, engine='python')\n X_train, X_test = train.values[:,0:2], test.values[:,0:2]\n y_train, y_test = train.values[:,2], test.values[:,2]\n return X_train, y_train, X_test, y_test", "def test_education():\n test_path = tempfile.mkdtemp()\n x_train, metadata = education(test_path)\n try:\n assert x_train.shape == (50, 6)\n except:\n shutil.rmtree(test_path)\n raise()", "def main():\n raw_data = pd.read_csv('data/raw_hospital_data.csv')\n\n fe_data = new_features(raw_data)\n fe_data = compressing_admission_type(data)\n fe_data = age_to_cat(fe_data)\n fe_data = compressing_careunit(fe_data)\n fe_data = compressing_curr_serv(fe_data)\n fe_data = compressing_ethnicity(fe_data)\n fe_data = compressing_marital_status(fe_data)\n fe_data = compressing_religion(fe_data)\n fe_data = compressing_admit_location(fe_data)\n fe_data = compress_icd9_codes(fe_data)\n\n fe_data.to_csv('data/feature_engineering_data.csv')" ]
[ "0.6574307", "0.6403017", "0.5885163", "0.5802315", "0.5745275", "0.5678409", "0.566421", "0.56549424", "0.56502306", "0.56443703", "0.56308216", "0.56158274", "0.5613451", "0.55993706", "0.559208", "0.5591343", "0.55832076", "0.55831206", "0.55750996", "0.55711734", "0.5567999", "0.55679137", "0.5565138", "0.55582434", "0.5557576", "0.5554288", "0.55453026", "0.5537685", "0.55176544", "0.54906744" ]
0.7281142
0
Load numpy file contained embedding of video frames and uniformly selecting every hp.sample_frames out of all frames. Sampling rate is dropped if input length is less than target length (due to ctc loss).
def load_file(filename,label_len): embedding = np.load(filename) # sample_frames = hp.sample_frames # while(1): # n_frames = (embedding.shape[0]-1)/sample_frames + 1 # if (embedding.shape[0]-1)%sample_frames!=0: # n_frames += 1 # if n_frames>=label_len: # break # sample_frames-=1 arr = [] for i in range(0,embedding.shape[0],hp.sample_frames): arr.append(embedding[i]) #always include ending frame if (embedding.shape[0]-1)%hp.sample_frames!=0: arr.append(embedding[-1]) length = len(arr) arr = np.array(arr) return arr, length
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_train_video(opt, frame_path, Total_frames):\n clip = []\n i = 0\n loop = 0\n\n # choosing a random frame\n if Total_frames <= opt.sample_duration: \n loop = 1\n start_frame = 0\n else:\n start_frame = np.random.randint(0, Total_frames - opt.sample_duration)\n \n if opt.modality == 'RGB': \n while len(clip) < opt.sample_duration:\n try:\n im = Image.open(os.path.join(frame_path, '%05d.jpg'%(start_frame+i+1)))\n clip.append(im.copy())\n im.close()\n except:\n print('ERROR no such image {}'.format(os.path.join(frame_path, '%05d.jpg'%(i+1))))\n i += 1\n \n if loop==1 and i == Total_frames:\n i = 0\n\n elif opt.modality == 'Flow': \n while len(clip) < 2*opt.sample_duration:\n try:\n im_x = Image.open(os.path.join(frame_path, 'TVL1jpg_x_%05d.jpg'%(start_frame+i+1)))\n im_y = Image.open(os.path.join(frame_path, 'TVL1jpg_y_%05d.jpg'%(start_frame+i+1)))\n clip.append(im_x.copy())\n clip.append(im_y.copy())\n im_x.close()\n im_y.close()\n except:\n pass\n i += 1\n \n if loop==1 and i == Total_frames:\n i = 0\n \n elif opt.modality == 'RGB_Flow':\n while len(clip) < 3*opt.sample_duration:\n try:\n im = Image.open(os.path.join(frame_path, '%05d.jpg'%(start_frame+i+1)))\n im_x = Image.open(os.path.join(frame_path, 'TVL1jpg_x_%05d.jpg'%(start_frame+i+1)))\n im_y = Image.open(os.path.join(frame_path, 'TVL1jpg_y_%05d.jpg'%(start_frame+i+1)))\n clip.append(im.copy())\n clip.append(im_x.copy())\n clip.append(im_y.copy())\n im.close()\n im_x.close()\n im_y.close()\n except:\n pass\n i += 1\n \n if loop==1 and i == Total_frames:\n i = 0\n return clip", "def preprocess_sample(file, params):\n\n videoFile = file + \".mp4\"\n audioFile = file + \".wav\"\n roiFile = file + \".png\"\n visualFeaturesFile = file + \".npy\"\n\n roiSize = params[\"roiSize\"]\n normMean = params[\"normMean\"]\n normStd = params[\"normStd\"]\n vf = params[\"vf\"]\n device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n\n\n #Extract the audio from the video file using the FFmpeg utility and save it to a wav file.\n v2aCommand = \"ffmpeg -y -v quiet -i \" + videoFile + \" -ac 1 -ar 16000 -vn \" + audioFile\n os.system(v2aCommand)\n\n\n #for each frame, resize to 224x224 and crop the central 112x112 region\n captureObj = cv.VideoCapture(videoFile)\n roiSequence = list()\n while (captureObj.isOpened()):\n ret, frame = captureObj.read()\n if ret == True:\n grayed = cv.cvtColor(frame, cv.COLOR_BGR2GRAY)\n grayed = grayed/255\n grayed = cv.resize(grayed, (224,224))\n roi = grayed[int(112-(roiSize/2)):int(112+(roiSize/2)), int(112-(roiSize/2)):int(112+(roiSize/2))]\n roiSequence.append(roi)\n else:\n break\n captureObj.release()\n cv.imwrite(roiFile, np.floor(255*np.concatenate(roiSequence, axis=1)).astype(np.int))\n\n\n #normalise the frames and extract features for each frame using the visual frontend\n #save the visual features to a .npy file\n inp = np.stack(roiSequence, axis=0)\n inp = np.expand_dims(inp, axis=[1,2])\n inp = (inp - normMean)/normStd\n inputBatch = torch.from_numpy(inp)\n inputBatch = (inputBatch.float()).to(device)\n vf.eval()\n with torch.no_grad():\n outputBatch = vf(inputBatch)\n out = torch.squeeze(outputBatch, dim=1)\n out = out.cpu().numpy()\n np.save(visualFeaturesFile, out)\n return", "def sample_frames(frame_dir, fps, visualize_sample_rate):\n visualize_every_x_frames = visualize_sample_rate * int(fps)\n sampled_frames = np.empty((0, 3, IMG_DIM, IMG_DIM), dtype=np.float32) # B, C, H, W\n i = 0\n for file in sorted(os.listdir(frame_dir)):\n if i % visualize_every_x_frames == 0:\n img = skimage.img_as_float(skimage.io.imread(os.path.join(frame_dir, file))).astype(np.float32)\n img = skimage.transform.resize(img, (IMG_DIM, IMG_DIM)) # H, W, C\n img = img.swapaxes(1, 2).swapaxes(0, 1) # C, H, W\n sampled_frames = np.append(sampled_frames, np.array([img]), axis=0)\n i += 1\n logger.debug(\"total number of frames: {}\".format(i))\n return sampled_frames", "def sample_generator(self, data, index):\r\n out = []\r\n frames = data[\"video\"]\r\n for speed_idx, speed in enumerate(self.speed_set):\r\n # generate all the samples according to the speed set\r\n num_input_frames, h, w, c = frames.shape\r\n frame_idx = random.randint(0, num_input_frames-1)\r\n selected_frame = frames[frame_idx] # H, W, C\r\n\r\n # standardize the frame size\r\n if self.cfg.PRETRAIN.FRAME_SIZE_STANDARDIZE_ENABLE: \r\n selected_frame = self.frame_size_standardize(selected_frame)\r\n \r\n # generate the sample index \r\n h, w, c = selected_frame.shape\r\n speed_x, speed_y = speed\r\n start_x, end_x = self.get_crop_params(speed_x/(self.num_speeds//2), w)\r\n start_y, end_y = self.get_crop_params(speed_y/(self.num_speeds//2), h)\r\n intermediate_x = (torch.linspace(start_x, end_x, self.num_frames).long()).clamp_(0, w-self.crop_size)\r\n intermediate_y = (torch.linspace(start_y, end_y, self.num_frames).long()).clamp_(0, h-self.crop_size)\r\n \r\n frames_out = torch.empty(\r\n self.num_frames, self.crop_size, self.crop_size, c, device=frames.device, dtype=frames.dtype\r\n )\r\n\r\n for t in range(self.num_frames):\r\n frames_out[t] = selected_frame[\r\n intermediate_y[t]:intermediate_y[t]+self.crop_size, intermediate_x[t]:intermediate_x[t]+self.crop_size, :\r\n ]\r\n\r\n # performs augmentation on the generated image sequence\r\n if self.transform is not None:\r\n frames_out = self.transform(frames_out)\r\n \r\n # applies static mask\r\n if self.static_mask_enable:\r\n frames_out = self.static_mask(frames_out)\r\n out.append(frames_out)\r\n out = torch.stack(out)\r\n data[\"video\"] = out\r\n return data", "def _subsample_frames(self, video_clip_frames):\n subsampled_frames = []\n current_ix = 0\n step_size = len(video_clip_frames) / float(config.RGB_N_FRAMES)\n for _ in range(config.RGB_N_FRAMES):\n frame = video_clip_frames[int(current_ix)]\n subsampled_frames.append(frame)\n current_ix += step_size\n\n return np.array(subsampled_frames)", "def sample(self, input, h, c, max_length):\n sampled_ids = []\n input = self.embed(input) \n for i in range(max_length): \n hiddens, (h, c) = self.decoder(input, (h, c)) # (batch_size, 1, hidden_size), \n outputs = self.linear(hiddens.squeeze(1)) # (batch_size, vocab_size)\n _, predicted = outputs.max(1)\n sampled_ids.append(predicted)\n input = self.embed(predicted.unsqueeze(1)) # (batch_size, 1, embed_size)\n sampled_ids = torch.stack(sampled_ids, dim=1) # (batch_size, 120)\n return sampled_ids", "def __init__(self, data_path, batch_size, video_size, mode=\"first80\"):\n self._batch_size = batch_size\n self._video_size = video_size\n\n\n # KTH video splits \n splits = [[11, 12, 13, 14, 15, 16, 17, 18], # train\n [19, 20, 21, 23, 24, 25, 1, 4], # validation\n [22, 2, 3, 5, 6, 7, 8, 9, 10]] # test\n \n label_mapping = {\"boxing\":0,\n \"handclapping\":1, \n \"handwaving\":2,\n \"jogging\":3,\n \"running\":4,\n \"walking\":5}\n self._num_classes = len(label_mapping)\n\n # file containing KTH video frame clip intervals\n sequence_list = os.path.join(data_path, \"00sequences.txt\")\n sequences = self._read_sequence_list(sequence_list)\n \n \n # clip and labels for each split, will be converted into [np.arrays()] format\n self._clips = [[] for _ in range(3)] # resized videos\n self._labels = [[] for _ in range(3)] # labels\n self._fns = [[] for _ in range(3)] # file names\n # read video into np array and create label according to splits \n for video_file in glob.glob(os.path.join(data_path, \"*.avi\")):\n fn = os.path.basename(video_file)\n fn = fn[0:len(fn) - 4]\n \n video = load_video(video_file, self._video_size)\n person_index = int(fn.split(\"_\")[0][-2:len(fn.split(\"_\")[0])])\n split = [i for i, j in enumerate(splits) if person_index in j][0]\n label = label_mapping[fn.split(\"_\")[1]]\n\n # obtain clips from video\n video_key_in_sequences = \"_\".join(fn.split(\"_\")[0:len(fn.split(\"_\")) - 1])\n print video_key_in_sequences\n\n if mode == \"episodes\":\n for clip_index, clip_range in enumerate(sequences[video_key_in_sequences]):\n self._labels[split].append(np.eye(len(label_mapping))[label]) \n self._clips[split].append(video[clip_range[0] - 1:clip_range[1] - 1, :, :, :])\n self._fns[split].append(fn + \"_\" + str(clip_index))\n elif mode == \"first80\":\n self._labels[split].append(np.eye(len(label_mapping))[label]) \n self._clips[split].append(video[0:80, :, :, :])\n self._fns[split].append(fn) \n else:\n raise NotImplementedError(\"Unknown preprocess mode.\")\n\n # maximum length for all clips, limit for padding\n self._clip_length = np.array(\\\n reduce(lambda a, b: a + [elem.shape[0] for elem in b], \n self._clips, [])).max() \n\n for split in range(3):\n for clip_index, (clip, label) in \\\n enumerate(zip(self._clips[split], self._labels[split])):\n self._clips[split][clip_index] = np.pad(clip, \\\n ((0, self._clip_length - clip.shape[0]), (0, 0), (0, 0), (0, 0)),\\\n mode=\"constant\", constant_values=0)\n # shuffling\n shuffle_index = range(len(self._clips[split]))\n random.shuffle(shuffle_index)\n self._clips[split] = [self._clips[split][i] for i in shuffle_index]\n self._labels[split] = [self._labels[split][i] for i in shuffle_index]\n self._fns[split] = [self._fns[split][i] for i in shuffle_index]\n \n self._clips[split] = np.concatenate(\\\n [np.expand_dims(i, axis=0) for i in self._clips[split]]) \n self._labels[split] = np.concatenate(\\\n [np.expand_dims(i, axis=0) for i in self._labels[split]])\n\n print self._clips[0].shape\n print self._labels[0].shape\n self._batch_index = [0 for _ in range(3)]", "def test_read_video_in_range_from_memory(self, test_video, config, num_frames):\n full_path, video_tensor = _get_video_tensor(VIDEO_DIR, test_video)\n # video related\n width, height, min_dimension, max_dimension = 0, 0, 0, 0\n video_start_pts, video_end_pts = 0, -1\n video_timebase_num, video_timebase_den = 0, 1\n # audio related\n samples, channels = 0, 0\n audio_start_pts, audio_end_pts = 0, -1\n audio_timebase_num, audio_timebase_den = 0, 1\n # pass 1: decode all frames using new decoder\n tv_result = torch.ops.video_reader.read_video_from_memory(\n video_tensor,\n SEEK_FRAME_MARGIN,\n 0, # getPtsOnly\n 1, # readVideoStream\n width,\n height,\n min_dimension,\n max_dimension,\n video_start_pts,\n video_end_pts,\n video_timebase_num,\n video_timebase_den,\n 1, # readAudioStream\n samples,\n channels,\n audio_start_pts,\n audio_end_pts,\n audio_timebase_num,\n audio_timebase_den,\n )\n (\n vframes,\n vframe_pts,\n vtimebase,\n vfps,\n vduration,\n aframes,\n aframe_pts,\n atimebase,\n asample_rate,\n aduration,\n ) = tv_result\n assert abs(config.video_fps - vfps.item()) < 0.01\n\n start_pts_ind_max = vframe_pts.size(0) - num_frames\n if start_pts_ind_max <= 0:\n return\n # randomly pick start pts\n start_pts_ind = randint(0, start_pts_ind_max)\n end_pts_ind = start_pts_ind + num_frames - 1\n video_start_pts = vframe_pts[start_pts_ind]\n video_end_pts = vframe_pts[end_pts_ind]\n\n video_timebase_num, video_timebase_den = vtimebase[0], vtimebase[1]\n if len(atimebase) > 0:\n # when audio stream is available\n audio_timebase_num, audio_timebase_den = atimebase[0], atimebase[1]\n audio_start_pts = _pts_convert(\n video_start_pts.item(),\n Fraction(video_timebase_num.item(), video_timebase_den.item()),\n Fraction(audio_timebase_num.item(), audio_timebase_den.item()),\n math.floor,\n )\n audio_end_pts = _pts_convert(\n video_end_pts.item(),\n Fraction(video_timebase_num.item(), video_timebase_den.item()),\n Fraction(audio_timebase_num.item(), audio_timebase_den.item()),\n math.ceil,\n )\n\n # pass 2: decode frames in the randomly generated range\n tv_result = torch.ops.video_reader.read_video_from_memory(\n video_tensor,\n SEEK_FRAME_MARGIN,\n 0, # getPtsOnly\n 1, # readVideoStream\n width,\n height,\n min_dimension,\n max_dimension,\n video_start_pts,\n video_end_pts,\n video_timebase_num,\n video_timebase_den,\n 1, # readAudioStream\n samples,\n channels,\n audio_start_pts,\n audio_end_pts,\n audio_timebase_num,\n audio_timebase_den,\n )\n\n # pass 3: decode frames in range using PyAv\n video_timebase_av, audio_timebase_av = _get_timebase_by_av_module(full_path)\n\n video_start_pts_av = _pts_convert(\n video_start_pts.item(),\n Fraction(video_timebase_num.item(), video_timebase_den.item()),\n Fraction(video_timebase_av.numerator, video_timebase_av.denominator),\n math.floor,\n )\n video_end_pts_av = _pts_convert(\n video_end_pts.item(),\n Fraction(video_timebase_num.item(), video_timebase_den.item()),\n Fraction(video_timebase_av.numerator, video_timebase_av.denominator),\n math.ceil,\n )\n if audio_timebase_av:\n audio_start_pts = _pts_convert(\n video_start_pts.item(),\n Fraction(video_timebase_num.item(), video_timebase_den.item()),\n Fraction(audio_timebase_av.numerator, audio_timebase_av.denominator),\n math.floor,\n )\n audio_end_pts = _pts_convert(\n video_end_pts.item(),\n Fraction(video_timebase_num.item(), video_timebase_den.item()),\n Fraction(audio_timebase_av.numerator, audio_timebase_av.denominator),\n math.ceil,\n )\n\n pyav_result = _decode_frames_by_av_module(\n full_path,\n video_start_pts_av,\n video_end_pts_av,\n audio_start_pts,\n audio_end_pts,\n )\n\n assert tv_result[0].size(0) == num_frames\n if pyav_result.vframes.size(0) == num_frames:\n # if PyAv decodes a different number of video frames, skip\n # comparing the decoding results between Torchvision video reader\n # and PyAv\n self.compare_decoding_result(tv_result, pyav_result, config)", "def load_data(data_path, sequence_length, no_sequences=None):\n frames_available = 0\n # load everything in the folder\n all_data = False\n for root, dirs, files in os.walk(data_path):\n for one_file in files:\n if one_file.split(\".\")[-1] != 'npy':\n write_to_summary(f\"Skipping {root}/{one_file}\", print_red=True)\n continue\n write_to_summary(f\"Loading from:{root}/{one_file}\")\n file_path = os.path.join(root, one_file)\n if all_data is False:\n all_data = load_blob(file_path)\n frames_available += all_data.shape[0]\n else:\n more_data = load_blob(file_path)\n all_data = np.concatenate((all_data, more_data), axis=0)\n frames_available += more_data.shape[0]\n # Add 10 sequences in case some are discarded for damaged frames\n if no_sequences is not None and frames_available // sequence_length > no_sequences + 10:\n break\n if all_data is False:\n return (False,False)\n # First we check the average, and see if there are any frames to discard\n average_pixel_count = [np.sum(frame) for frame in all_data]\n average_pixel_count = np.mean(average_pixel_count)\n write_to_summary(f\"Average pixel count:{average_pixel_count}\")\n skip_limit = DISCARD_CONSTANT * average_pixel_count\n write_to_summary(f\"Pixel count skip limit:{skip_limit}\")\n skip_indexes = []\n index_counter = 0\n for frame in all_data:\n if np.sum(frame) > skip_limit:\n skip_indexes.append(index_counter)\n index_counter += 1\n write_to_summary(f\"{len(skip_indexes)} frames have a pixel count exceding the threshold:\")\n write_to_summary(skip_indexes)\n\n # Generate all indicies that will produce data\n # and use that to filter out the ones with damaged frame in them \n indicies_pairs = []\n frame_counter = 0\n while frame_counter + sequence_length + 1 < frames_available:\n all_valid_frames = True\n for damaged_frames in skip_indexes:\n if damaged_frames >= frame_counter and damaged_frames <= frame_counter + sequence_length + 1:\n all_valid_frames = False\n break\n pair = (frame_counter, frame_counter + sequence_length + 1)\n if not all_valid_frames:\n write_to_summary(f\"{pair} skipped because of damaged frame\", print_red=True)\n else:\n indicies_pairs.append(pair)\n frame_counter += sequence_length + 1\n write_to_summary(f\"{len(indicies_pairs)} valid sequences available, target is {no_sequences}\")\n\n # Check how many sequences we will get\n # final_no_sequences = frames_available // sequence_length\n final_no_sequences = len(indicies_pairs)\n if no_sequences is not None and final_no_sequences > no_sequences:\n final_no_sequences = no_sequences\n # Discard the ones we dont need\n indicies_pairs = indicies_pairs[:final_no_sequences]\n img_width = all_data.shape[1]\n img_height = all_data.shape[2]\n # -1 in sequence_length becasue the final frame is in the ground truth, no wait skip that\n # better to use sequence_length + 1 for y_train, makes more sense\n x_train = np.zeros((final_no_sequences, sequence_length, img_width, img_height, 1))\n y_train = np.zeros((final_no_sequences, 1, img_width, img_height, 1))\n current_sequence = 0\n for start_frame, end_frame in indicies_pairs:\n training_frames = all_data[start_frame: start_frame + sequence_length]\n truth_frame = all_data[start_frame + sequence_length: end_frame]\n x_train[current_sequence] = np.expand_dims(training_frames, axis=3)\n y_train[current_sequence] = np.expand_dims(truth_frame, axis=3)\n current_sequence += 1\n # No validation for now\n write_to_summary(f\"Loaded {len(x_train)} sequences of length {sequence_length}!\")\n return (x_train, y_train)", "def split_into_frames(filename_raw, thr_var_per_event=5e-4, downsampling_factor=2, disable_display=False,\n filename_output_video=None):\n\n assert downsampling_factor == int(downsampling_factor), \"Error: downsampling_factor must be an integer\"\n assert downsampling_factor >= 0, \"Error: downsampling_factor must be >= 0\"\n\n mv_adaptive_rate_iterator = AdaptiveRateEventsIterator(input_path=filename_raw,\n thr_var_per_event=thr_var_per_event,\n downsampling_factor=downsampling_factor)\n\n height, width = mv_adaptive_rate_iterator.get_size()\n\n if filename_output_video == None:\n video_process = None\n else:\n assert not os.path.exists(filename_output_video)\n video_process = FFmpegWriter(filename_output_video)\n\n if video_process or not disable_display:\n img_bgr = np.zeros((height, width, 3), dtype=np.uint8)\n\n cv2.namedWindow(\"img\", cv2.WINDOW_NORMAL)\n\n for events in mv_adaptive_rate_iterator:\n assert events.size > 0\n start_ts = events[0][\"t\"]\n end_ts = events[-1][\"t\"]\n print(\"frame: {} -> {} delta_t: {} fps: {} nb_ev: {}\".format(start_ts, end_ts,\n end_ts - start_ts,\n 1e6 / (end_ts - start_ts),\n events.size))\n if video_process or not disable_display:\n img = events_to_diff_image(events, sensor_size=(height, width))\n img_bgr[...] = 0\n img_bgr[img < 0, 0] = 255\n img_bgr[img > 0, 1] = 255\n\n chunk_start_ts = events[0][\"t\"]\n chunk_end_ts = events[-1][\"t\"]\n delta_t_frame = chunk_end_ts - chunk_start_ts + 1\n frame_txt = \"ts: {} -> {} delta_t: {} fps: {} (nb_ev): {}\".format(chunk_start_ts, chunk_end_ts,\n delta_t_frame,\n int(1.e6/delta_t_frame),\n events.size)\n img_bgr[20:45, ...] = 0\n cv2.putText(img_bgr,\n frame_txt,\n (int(0.05 * width), 40),\n cv2.FONT_HERSHEY_PLAIN, 1.0, (200, 200, 100))\n\n if video_process:\n video_process.writeFrame(img_bgr.astype(np.uint8)[..., ::-1])\n if not disable_display:\n cv2.imshow(\"img\", img_bgr)\n if cv2.waitKey(1) & 0xFF == ord('q'):\n break\n\n if video_process:\n video_process.close()\n if not disable_display:\n cv2.destroyAllWindows()", "def load_data(path, rng, epoch, batch_size, x_,y_):\n #global x_,t_,y_,\n #global first_report2 \n #first_report2 = True\n start_time = time()\n v,p,skeleton_feature,l = load_gzip(path)\n v = v[:,:,:res_shape[2]]\n res_shape[0] = v.shape[0]\n v_new = empty(res_shape,dtype=\"uint8\")\n\n for i in xrange(v.shape[0]): #batch\n if p[i] < 10: p[i] = 100\n ofs = p[i]*ratio\n mid = v.shape[-1]/2.\n sli = None\n if ofs < mid:\n start = int(round(mid-ofs))\n end = int(round(mid+ofs))\n sli = slice(start,end)\n\n for j in xrange(v.shape[2]): #maps\n for k in xrange(v.shape[3]): #frames\n #body\n img = v[i,0,j,k]\n img = cut_img(img,5)\n img = misc.imresize(img,(h,h))\n # if j==0: img = 255-misc.imfilter(img,\"contour\")\n v_new[i,0,j,k] = img\n\n #hand\n img = v[i,1,j,k]\n img = img[sli,sli]\n img = misc.imresize(img,(h,h))\n v_new[i,1,j,k] = img\n\n vid, lbl = v_new,l\n\n #if epoch==0: print \"get in\",str(time()-start_time)[:3]+\"s\",\n # shuffle data\n ind = rng.permutation(l.shape[0])\n ind = ind[:batch_size]\n vid = vid[:,:,:,:4,:,:]\n vid, skeleton_feature, lbl = vid[ind].astype(floatX), skeleton_feature[ind].astype(floatX),lbl[ind].astype(floatX)\n #vid, skeleton_feature, lbl = vid.astype(floatX), skeleton_feature.astype(floatX),lbl.astype(floatX)\n\n # vid = vid/(255./(scaler*2.))-scaler\n #traj = traj/(255./(scaler_traj*2.))-scaler_traj\n # traj = traj/(255./5.)\n\n # Wudi already made labels start from 0\n #lbl -= 1 \n\n #if first_report2:\n # print \"data range:\",vid.min(),vid.max()\n # print \"traj range:\",skeleton_feature.min(),skeleton_feature.max()\n # print \"lbl range:\",lbl.min(),lbl.max()\n # first_report2 = False\n\n # set value\n x_.set_value(vid, borrow=True)\n #t_.set_value(skeleton_feature, borrow=True)\n y_.set_value(lbl, borrow=True)", "def load_data_from_flow():\n # use the load_snippet_pths_test in data writer to get frames and labels\n print('Loading frames and labels...')\n dataset_writer = dataset_factory.get_writer(FLAGS.datasetname)\n writer = dataset_writer()\n\n # retrieve list of test videos\n vid_lst = writer.generate_data_lst_from_split(FLAGS.split_fn)\n if _DEBUG_:\n vid_lst = vid_lst[:3]\n\n # for each video, collect fnames and labels with downsampling\n frames, labels = [], []\n print('Found {:d} videos'.format(len(vid_lst)))\n for vid in vid_lst:\n print(' Loading {}...'.format(vid))\n fname_pths_per_vid, labels_per_vid = writer.load_snippet_pths_test(\n FLAGS.datadir, [vid], FLAGS.labels_fname, FLAGS.bg_lbl,\n FLAGS.ext, FLAGS.frameskip)\n fname_pths_per_vid = [x[0] for x in fname_pths_per_vid]\n\n if _DEBUG_:\n fname_pths_per_vid = fname_pths_per_vid[:200]\n labels_per_vid = labels_per_vid[:200]\n\n frames.append(_load_flows(fname_pths_per_vid))\n labels.append(np.array(labels_per_vid))\n return frames, labels", "def filter(\n data,\n max_length=10240,\n min_length=10,\n token_max_length=200,\n token_min_length=1,\n min_output_input_ratio=0.0005,\n max_output_input_ratio=1,\n):\n for sample in data:\n assert \"sample_rate\" in sample\n assert \"wav\" in sample\n assert \"label\" in sample\n # sample['wav'] is torch.Tensor, we have 100 frames every second\n num_frames = sample[\"wav\"].size(1) / sample[\"sample_rate\"] * 100\n if num_frames < min_length:\n continue\n if num_frames > max_length:\n continue\n if len(sample[\"label\"]) < token_min_length:\n continue\n if len(sample[\"label\"]) > token_max_length:\n continue\n if num_frames != 0:\n if len(sample[\"label\"]) / num_frames < min_output_input_ratio:\n continue\n if len(sample[\"label\"]) / num_frames > max_output_input_ratio:\n continue\n yield sample", "def sample_for_inception(model, encoder, batch_size, dataloader, device):\n\n captions = []\n gen_imgs = []\n # get sample captions\n done = False\n while not done:\n for (_, labels_batch, captions_batch) in dataloader:\n captions += captions_batch\n conditional_embeddings = encoder(labels_batch.to(device), captions)\n imgs = model.sample(conditional_embeddings).cpu()\n gen_imgs.append(imgs)\n\n if len(captions) > batch_size:\n done = True\n break\n\n gen_imgs = torch.cat(gen_imgs).numpy()\n gen_imgs = np.clip(gen_imgs, 0, 1)\n return(gen_imgs)", "def load_wavfile(total_frame, wav_file):\n wav_data, sr = sf.load(wav_file, sr=audio_params.SAMPLE_RATE, dtype='float32')\n assert sf.get_duration(wav_data, sr) > 1\n \n features = waveform_to_feature(wav_data, sr)\n features = np.resize(features, (int(total_frame), features.shape[1], features.shape[2]))\n\n return features", "def next(self):\n\n if self.i_sample < self.n_sample:\n df_batch = self.grouped[self.i_sample:min(self.n_sample, self.i_sample + self.batch_size)]\n # at end of epoch, number of sample remains may be smaller than batch size\n if len(df_batch) < self.batch_size:\n df_sample = random.sample(self.grouped, self.batch_size-len(df_batch))\n df_batch = df_batch + df_sample\n try:\n assert len(df_batch) == self.batch_size\n except AssertionError:\n print(self.i_sample, df_sample, df_batch)\n\n # get random frame_idxs\n if self.train:\n flips = np.random.choice(a=[False, True], size=(self.batch_size,), p=[0.5, 0.5])\n else:\n flips = np.zeros(self.batch_size, dtype=bool)\n\n\n video = sample_clips(df_batch, flips, self.batch_size, self.n_frame,\n self.scale_w, self.scale_h, self.sample_half_time, self.train)\n\n bboxes = np.zeros((self.batch_size, self.n_frame // self.temporal_scale, self.n_bbox, 5))\n labels = np.zeros((self.batch_size, self.n_bbox, self.num_class))\n for i in range(len(df_batch)):\n tmp_bbox, tmp_label = self.get_bbox_and_label(df_batch[i], flips[i], i, self.scale_w, self.scale_h)\n bboxes[i] = tmp_bbox\n labels[i] = tmp_label\n\n if self.debug_dataloader:\n with open('dataset/AVA_v2.1/ava_action_list_v2.1.pbtxt') as fd:\n lines = fd.readlines()\n\n labels_info = []\n for i in range(80):\n name_line = lines[i * 5 + 1]\n label_id_line = lines[i * 5 + 2]\n label_type_line = lines[i * 5 + 3]\n\n name = name_line[name_line.find('\"') + 1:name_line.rfind('\"')]\n label_id = int(label_id_line.strip().split(':')[1].strip())\n label_type = label_type_line.strip().split(':')[1].strip()\n\n assert label_id == i + 1\n labels_info.append({\n 'name': name,\n 'label_type': label_type\n })\n\n for bidx in range(self.batch_size):\n s_video = video[bidx, ...]\n s_bboxes = bboxes[bidx, ...]\n s_labels = labels[bidx, ...]\n\n window_name = 'batch_idx_'+str(bidx)\n if self.train:\n window_name += '_train'\n else:\n window_name += '_val'\n\n\n bbox = s_bboxes[0, 0, 1:].astype(np.int32)\n label_indices = np.where(s_labels[0, :])[0]\n\n for fidx in range(self.n_frame):\n # print('fidx', fidx)\n save_name = window_name + '_' + str(fidx)\n tmp_img = (s_video[:, fidx, :, :].transpose((1,2,0))).astype(np.uint8).copy()\n\n cv2.rectangle(tmp_img, (bbox[0], bbox[1]), (bbox[2], bbox[3]), color=(0,0,255), thickness=2)\n for en_idx, label_index in enumerate(label_indices):\n # print('label_index', label_index, 'len', len(labels_info))\n cv2.putText(tmp_img, labels_info[label_index]['name'], (bbox[0], bbox[1] + en_idx * 10), cv2.FONT_HERSHEY_SIMPLEX, 0.3, color=(0, 255, 0), thickness=1)\n\n cv2.imwrite(save_name+'.jpg', tmp_img)\n\n\n #print(video.shape, bboxes.shape, labels.shape)\n ret = mx.io.DataBatch(data=[mx.nd.array(video), mx.nd.array(bboxes)],\n label=[mx.nd.array(labels),],\n provide_data=self.provide_data,\n provide_label=self.provide_label)\n\n self.i_sample += self.batch_size\n return ret\n else:\n raise StopIteration", "def main(input_file_name,output_file_name,video_length):\n length_of_video = video_length\n with open(input_file_name + '.pkl', 'rb') as fin:\n frames = pickle.load(fin)\n\n sorted_frames = list(list(x[1]) for x in itertools.groupby(frames, operator.itemgetter(1)))\n final_dict = dict()\n for element in sorted_frames:\n for f in element:\n name = f[0]\n video_name = name[name.rindex(\"/\")+1:name.rindex(\"frame\")-1]\n if video_name not in final_dict:\n final_dict[video_name] = []\n\n final_dict[video_name].append(f)\n\n new_frames = []\n\n for key in final_dict:\n #elements = takespread(final_dict[key],length_of_video)\n new_frames.extend(final_dict[key])\n\n print(\"size:\", len(new_frames))\n predictions = predict_on_frames(new_frames)\n\n with open(output_file_name + '.pkl', 'wb') as fout:\n pickle.dump(predictions, fout)", "def load_data():\n # use the load_snippet_pths_test in data writer to get frames and labels\n print('Loading frames and labels...')\n dataset_writer = dataset_factory.get_writer(FLAGS.datasetname)\n writer = dataset_writer()\n\n # retrieve list of test videos\n vid_lst = writer.generate_data_lst_from_split(FLAGS.split_fn)\n if _DEBUG_:\n vid_lst = vid_lst[:3]\n\n # for each video, collect fnames and labels with downsampling\n frames, labels = [], []\n print('Found {:d} videos'.format(len(vid_lst)))\n for vid in vid_lst:\n print(' Loading {}...'.format(vid))\n fname_pths_per_vid, labels_per_vid = writer.load_snippet_pths_test(\n FLAGS.datadir, [vid], FLAGS.labels_fname, FLAGS.bg_lbl,\n FLAGS.ext, FLAGS.frameskip)\n fname_pths_per_vid = [x[0] for x in fname_pths_per_vid]\n\n if _DEBUG_:\n fname_pths_per_vid = fname_pths_per_vid[:200]\n labels_per_vid = labels_per_vid[:200]\n\n frames.append(_load_images(fname_pths_per_vid))\n labels.append(np.array(labels_per_vid))\n return frames, labels", "def prepData(sample, file, train = True):\r\n y = get_y_sample(sample)\r\n if y == None:\r\n return [], np.array([])\r\n Y = np.array([y])\r\n \r\n try:\r\n f = open(\"./phase3_data/\" + sample + \"/\" + file, 'rb')\r\n except:\r\n print(\"[Error]: Could not access file:\", file, \"for sample:\", sample)\r\n return [], np.array([])\r\n \r\n decoding = [(1, 0, 0, 0),\r\n (0, 1, 0, 0),\r\n (0, 0, 1, 0),\r\n (0, 0, 0, 1)]\r\n raw = list(f.read())\r\n f.close()\r\n ohvs = []\r\n if train:\r\n limit = 200\r\n else:\r\n limit = 400\r\n for t in range(len(raw)):\r\n if t > limit:\r\n break\r\n base = (int)(raw[t] / 4**3)\r\n ohvs += [decoding[base]]\r\n raw[t] %= (4**3)\r\n base = (int)(raw[t] / 4**2)\r\n ohvs += [decoding[base]]\r\n raw[t] %= (4**2)\r\n base = (int)(raw[t] / 4**1)\r\n ohvs += [decoding[base]]\r\n raw[t] %= (4**1)\r\n base = (int)(raw[t])\r\n ohvs += [decoding[base]]\r\n\r\n print(sample, file, \"\\n\\tY =\", Y)\r\n return ohvs, Y", "def process_video(input_file, output_file):\n with open('all-features-rbf-svm.p', 'rb') as svm_fd:\n clf = pickle.load(svm_fd)\n with open('all-features-scaler.p', 'rb') as scaler_fd:\n hog_scaler = pickle.load(scaler_fd)\n hog_parameters = HogParameters(orientations=18, pixels_per_cell=8, cells_per_block=2)\n clip = VideoFileClip(input_file)\n test_clip = clip.fl_image(\n lambda frame: process_frame(frame, clf=clf, norm_scaler=hog_scaler, hog_parameters=hog_parameters, spatial_size=(16, 16), hist_bins=32))\n test_clip.write_videofile(output_file, audio=False)", "def test_read_video_from_file_audio_resampling(self, test_video, samples):\n # video related\n width, height, min_dimension, max_dimension = 0, 0, 0, 0\n video_start_pts, video_end_pts = 0, -1\n video_timebase_num, video_timebase_den = 0, 1\n # audio related\n channels = 0\n audio_start_pts, audio_end_pts = 0, -1\n audio_timebase_num, audio_timebase_den = 0, 1\n\n full_path = os.path.join(VIDEO_DIR, test_video)\n\n tv_result = torch.ops.video_reader.read_video_from_file(\n full_path,\n SEEK_FRAME_MARGIN,\n 0, # getPtsOnly\n 1, # readVideoStream\n width,\n height,\n min_dimension,\n max_dimension,\n video_start_pts,\n video_end_pts,\n video_timebase_num,\n video_timebase_den,\n 1, # readAudioStream\n samples,\n channels,\n audio_start_pts,\n audio_end_pts,\n audio_timebase_num,\n audio_timebase_den,\n )\n (\n vframes,\n vframe_pts,\n vtimebase,\n vfps,\n vduration,\n aframes,\n aframe_pts,\n atimebase,\n asample_rate,\n aduration,\n ) = tv_result\n if aframes.numel() > 0:\n assert samples == asample_rate.item()\n assert 1 == aframes.size(1)\n # when audio stream is found\n duration = float(aframe_pts[-1]) * float(atimebase[0]) / float(atimebase[1])\n assert aframes.size(0) == approx(int(duration * asample_rate.item()), abs=0.1 * asample_rate.item())", "def make_frames(signal, sampling_rate, frame_size=0.025, frame_overlap=0.015):\n frame_length = int(round(frame_size * sampling_rate)) #seconds to samples\n frame_step = int(round((frame_size - frame_overlap) * sampling_rate)) #seconds to samples\n #signal_length = len(emphasized_signal)\n\n nf = abs(len(signal) - frame_length)/float(frame_step)\n num_frames = 0\n if int(nf) < 1:\n num_frames = 1 # Make sure that we have at least 1 frame\n else:\n num_frames = int(np.ceil(nf))\n\n padding = np.zeros((num_frames * frame_step) + frame_length - len(signal)) #padding to be added at the end of the signal\n# padded_signal = np.concatenate((signal, padding), axis = None)\n padded_signal = np.zeros((len(padding)+len(signal)))\n np.put(padded_signal, list(range(len(signal))), signal) #put original signal in the front\n np.put(padded_signal, list(range(len(signal), len(padded_signal))), padding) #put padding at the back after signal\n\n indices = np.tile(np.array(range(0, frame_length)), (num_frames, 1)) + np.tile(np.array(range(0, num_frames * frame_step, frame_step)), (frame_length, 1)).T\n frames = padded_signal[indices.astype(np.int32, copy=False)]\n\n #Windowing\n frames = frames * hamming(frame_length)\n return frames", "def load_embed(file_name, vocab_size):\n\n with tf.io.gfile.Open(file_name, 'r') as embed_file:\n vocab = []\n embeds = []\n depth = -1\n for index, line in enumerate(embed_file):\n if vocab_size > 0 and index >= vocab_size:\n break\n line = line.strip()\n tokens = line.strip().split(' ')\n word = tokens[0]\n vocab.append(word)\n if depth == -1:\n embed = [float(token) for token in tokens[1:]]\n else:\n embed = [float(token) for token in tokens[-depth:]]\n d = len(embed)\n if depth == -1:\n depth = d\n if d != depth:\n raise ValueError('Inconsistent embedding sizes')\n embeds.append(embed)\n\n embeds = np.stack(embeds)\n\n return vocab, embeds, depth", "def early_gen_test_clip(self, list_files, clip_id, stride=1):\n ground_truth = list_files[0][0]\n start = 0\n end = len(list_files) - self.time_step\n while True:\n labels = []\n features = [np.zeros((1, self.time_step, self.feature_num)).astype('float'),\n np.zeros((1, self.time_step, 1024)).astype('float')]\n for index, elem in enumerate(list_files[start:start + self.time_step]):\n _, frame_path, audio_path = elem\n frame_feature = np.load(frame_path)\n features[0][0][index] = np.array(from_arff_to_feture(audio_path)).reshape(self.feature_num, )\n features[1][0][index] = frame_feature.reshape(1024, )\n labels.append(ground_truth)\n start += self.time_step // stride\n if start >= end:\n break\n labels = self.lb.transform(np.array(labels)).reshape((1, 7))\n yield features, labels", "def frames_to_samples(frames, hop_length=512, n_fft=None):\n\n offset = 0\n if n_fft is not None:\n offset = int(n_fft // 2)\n\n return (np.asanyarray(frames) * hop_length + offset).astype(int)", "def frame_size_standardize(self, frame):\r\n h,w,_ = frame.shape\r\n standard_size = self.cfg.PRETRAIN.STANDARD_SIZE\r\n if isinstance(standard_size, list):\r\n assert len(standard_size) == 3\r\n size_s, size_l, crop_size = standard_size\r\n reshape_size = random.randint(int(size_s), int(size_l))\r\n else:\r\n crop_size = standard_size\r\n reshape_size = standard_size\r\n\r\n # resize the short side to standard size\r\n dtype = frame.dtype\r\n frame = frame.permute(2, 0, 1).to(torch.float) # C, H, W\r\n aspect_ratio = random.uniform(self.aspect_ratio[0], self.aspect_ratio[1])\r\n if h <= w:\r\n new_h = reshape_size\r\n new_w = int(new_h / h * w)\r\n # resize\r\n frame = F.resize(frame.unsqueeze(0), (new_h, new_w), \"bilinear\").squeeze(0) \r\n elif h > w:\r\n new_w = reshape_size\r\n new_h = int(new_w / w * h)\r\n # resize\r\n frame = F.resize(frame.unsqueeze(0), (new_h, new_w), \"bilinear\").squeeze(0) \r\n \r\n # crop\r\n if aspect_ratio >= 1: \r\n crop_h = int(crop_size / aspect_ratio)\r\n crop_w = crop_size\r\n else:\r\n crop_h = crop_size\r\n crop_w = int(crop_size * aspect_ratio)\r\n start_h = random.randint(0, new_h - crop_h)\r\n start_w = random.randint(0, new_w - crop_w)\r\n return frame[:, start_h:start_h+crop_h, start_w:start_w+crop_w].to(dtype).permute(1, 2, 0) # H, W, C\r", "def enframe(samples, winlen, winshift):\n\n # check if i+winlen > len(samples):\n\n result = []\n for i in range(0,len(samples),winshift):\n if(i+winlen > len(samples)): break\n result.append(samples[i:i+winlen])\n return np.array(result)\n # return np.array([samples[i:i+winlen] for i in range(0,len(samples),winshift)])", "def generate_video_from_frames(path_to_frames, title):\r\n mean_height = 0\r\n mean_width = 0\r\n num_of_images = load_one_setting(settings_filename, 'MAX_CYCLES')\r\n os.chdir(path_to_frames)\r\n '''Loading all frames'''\r\n for file in os.listdir('.'):\r\n if file.endswith(\".jpg\") or file.endswith(\".jpeg\") or file.endswith(\"png\") or file.endswith(\"JPEG\"):\r\n im = Image.open(file)\r\n width, height = im.size\r\n mean_width += width\r\n mean_height += height\r\n\r\n mean_width = int(mean_width / num_of_images)\r\n mean_height = int(mean_height / num_of_images)\r\n\r\n for file in os.listdir('.'):\r\n if file.endswith(\".jpg\") or file.endswith(\".jpeg\") or file.endswith(\"png\") or file.endswith(\"JPEG\"):\r\n im = Image.open(file)\r\n imResize = im.resize((mean_width, mean_height), Image.ANTIALIAS)\r\n imResize.save(file, 'JPEG', quality=95)\r\n release_video(title)\r\n os.chdir(r'../..')", "def get_hpc_data(filename='./data/hpc/50000_scanned_voxels.Bfloat', sample_size=None):\n\tarr = to_voxels(read_float(filename))\n\tif sample_size is not None:\n\t\tnp.random.shuffle(arr)\n\t\treturn arr[0:sample_size, :]\n\treturn arr", "def dynamic_batch(data, max_frames_in_batch=12000):\n buf = []\n longest_frames = 0\n for sample in data:\n assert \"feat\" in sample\n assert isinstance(sample[\"feat\"], torch.Tensor)\n new_sample_frames = sample[\"feat\"].size(0)\n longest_frames = max(longest_frames, new_sample_frames)\n frames_after_padding = longest_frames * (len(buf) + 1)\n if frames_after_padding > max_frames_in_batch:\n yield buf\n buf = [sample]\n longest_frames = new_sample_frames\n else:\n buf.append(sample)\n if len(buf) > 0:\n yield buf" ]
[ "0.61556715", "0.6012853", "0.5795365", "0.5787265", "0.5753409", "0.5703127", "0.55909455", "0.55065256", "0.55059505", "0.54967993", "0.54833144", "0.54575807", "0.5453226", "0.5434739", "0.5393761", "0.5387126", "0.53686005", "0.53385323", "0.5338431", "0.53017545", "0.5298802", "0.52912825", "0.5288527", "0.5287484", "0.52775615", "0.5269938", "0.5244712", "0.5164779", "0.51522857", "0.51462597" ]
0.67151964
0
Builds dataset with multiple items per sample. Returns (x, y).
def build_train_dataset(x: List[List[NpArray]], y: NpArray, indices: List[int]) -> \ Tuple[NpArray, NpArray]: res_x, res_y = [], [] for idx in indices: for sample in x[idx]: res_x.append(sample) res_y.append(y[idx]) return np.array(res_x), np.array(res_y)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def build_test_dataset(x: List[List[NpArray]]) -> Tuple[NpArray, List[int]]:\n x_test, clips_per_sample = [], []\n\n for sample in x:\n clips_per_sample.append(len(sample))\n\n for clip in sample:\n x_test.append(clip)\n\n return np.array(x_test), clips_per_sample", "def sample_data(self, x: list, y: list) -> Tuple[list, list, list, list]:\n y = [self.mapping[i] for i in y]\n\n x_train = list()\n x_test = list()\n y_test = list()\n y_train = list()\n\n min_label = min([y.count(i) for i in list(set(y))])\n nbr_labels = len(set(y))\n data_size = min_label * len(set(y))\n\n train_size = int(0.66 * data_size)\n test_size = data_size - train_size\n\n for elem_x, elem_y in zip(x, y):\n if y_train.count(elem_y) < int(train_size / nbr_labels):\n x_train.append([elem_x])\n y_train.append(elem_y)\n\n elif y_test.count(elem_y) < int(test_size / nbr_labels):\n x_test.append([elem_x])\n y_test.append(elem_y)\n\n return x_train, y_train, x_test, y_test", "def build_data(samples, labels):\n num_samples = len(samples)\n indexes = list(range(num_samples))\n np.random.shuffle(indexes)\n num_train = int(train_ratio * num_samples)\n # Get the indexes of train data and test data.\n train_indexes = indexes[0:num_train]\n test_indexes = indexes[num_train:num_samples]\n\n # Build the train data and test data.\n train_data = samples[train_indexes]\n train_labels = labels[train_indexes]\n test_data = samples[test_indexes]\n test_labels = labels[test_indexes]\n\n return train_data, test_data, \\\n train_labels, test_labels, \\\n train_indexes, test_indexes", "def create_dataset(name, grid=None, samples=1000, seed=0):\n if grid == None:\n np.random.seed(seed)\n points = 1 - 2 * np.random.rand(samples, 2)\n else:\n x = np.linspace(-1, 1, grid)\n points = np.array(list(product(x, x)))\n creator = globals()[f\"_{name}\"]\n\n x, y = creator(points)\n return x, y", "def DatasetToTuple(sample):\n \n X_elem = []\n Y_elem = []\n for x,y in sample:\n X_elem.append(x if x.dim() > 0 else x.item())\n Y_elem.append(y if y.dim() > 0 else y.item()) \n return (torch.stack(X_elem),torch.stack(Y_elem))", "def DatasetToTuple(sample):\n \n X_elem = []\n Y_elem = []\n for x,y in sample:\n X_elem.append(x if x.dim() > 0 else x.item())\n Y_elem.append(y if y.dim() > 0 else y.item()) \n return (torch.stack(X_elem),torch.stack(Y_elem))", "def generate_dataset(output_dim=14, num_examples=10000):\n def int2vec(x, dim=output_dim):\n out = np.zeros(dim)\n binrep = np.array(list(np.binary_repr(x))).astype('int')\n out[-len(binrep):] = binrep\n return out\n\n x_left_int = (np.random.rand(num_examples) * 2 ** (output_dim - 1)).astype('int')\n x_right_int = (np.random.rand(num_examples) * 2 ** (output_dim - 1)).astype('int')\n y_int = x_left_int + x_right_int\n\n x = list()\n for i in range(len(x_left_int)):\n x.append(np.concatenate((int2vec(x_left_int[i]), int2vec(x_right_int[i]))))\n\n y = list()\n for i in range(len(y_int)):\n y.append(int2vec(y_int[i]))\n\n x = np.array(x)\n y = np.array(y)\n return x, y", "def __data_generation(self, list_ids_temp):\n # X : (n_samples, *dim, n_channels)\n # Initialization\n x = np.empty((self.batch_size, *self.dim, self.n_channels))\n y = np.empty(self.batch_size, dtype=int)\n\n # Generate data\n for i, image_path in enumerate(list_ids_temp):\n # Store sample\n x[i,] = self.__preprocess_inputs(image_path)\n\n # Store class\n if self.tar is None:\n y[i] = self.labels[Path(image_path)._parts[-2]]\n else:\n y[i] = self.labels[Path(image_path.name)._parts[-2]]\n\n return x, tf.keras.utils.to_categorical(y, num_classes=self.n_classes)", "def __data_generation(self, list_IDs_temp):\n\n # Initialization\n X = np.empty((self.batch_size, self.grid_dim, self.grid_dim, self.grid_dim, self.n_channels))\n y = np.empty(self.batch_size, dtype=int)\n\n # Generate data\n for i, ID in enumerate(list_IDs_temp):\n # Store sample\n X[i], y[i] = pro_lig_reader_sample(ID[0], ID[1], self.grid_size, self.n_channels, self.grid_resolution)\n return X, keras.utils.to_categorical(y, num_classes=self.n_classes)", "def _get_data_for_tests():\n X = np.random.randn(100, input_dim)\n Y = np.random.randn(100, output_dim)\n X_new = np.random.randn(100, input_dim)\n return X, X_new, Y", "def get_data(generator, random, bench_id):\n x_train, y_train, x_test, y_test = generator(random, bench_id)\n x_train = np.c_[np.ones(len(x_train)), x_train]\n x_test = np.c_[np.ones(len(x_test)), x_test]\n return x_train, y_train, x_test, y_test", "def create_dataset(dataset,time_step=1):\n dataX,dataY=[],[]\n for i in range(len(dataset)-time_step):\n a=dataset[i:i+time_step]\n dataX.append(a)\n dataY.append(dataset[i+time_step])\n return np.asarray(dataX),np.asarray(dataY)", "def get_dataset(self):\n return self._X, self._y", "def create_dataset(self, x, t, n_samples, noise, random=True, normalize=True, return_idx=False, random_state=42):\n assert ((x.shape[1] == 1) & (t.shape[1] == 1)), 'x and t should have shape (n_samples x 1)'\n u = self.generate_solution(x, t)\n\n X = np.concatenate([t, x], axis=1)\n if random_state is None:\n y = u + noise * np.std(u, axis=0) * np.random.normal(size=u.shape)\n else:\n y = u + noise * np.std(u, axis=0) * np.random.RandomState(seed=random_state).normal(size=u.shape)\n \n\n # creating random idx for samples\n N = y.shape[0] if n_samples == 0 else n_samples\n\n if random is True:\n if random_state is None:\n rand_idx = np.random.permutation(y.shape[0])[:N]\n else:\n rand_idx = np.random.RandomState(seed=random_state).permutation(y.shape[0])[:N]\n else:\n rand_idx = np.arange(y.shape[0])[:N]\n \n # Normalizing\n if normalize:\n if (self.scaling_factor is None):\n self.scaling_factor = (-(np.max(X, axis=0) + np.min(X, axis=0))/2, (np.max(X, axis=0) - np.min(X, axis=0))/2) # only calculate the first time\n X = (X + self.scaling_factor[0]) / self.scaling_factor[1] \n\n # Building dataset\n X_train = torch.tensor(X[rand_idx, :], dtype=torch.float32)\n y_train = torch.tensor(y[rand_idx, :], dtype=torch.float32)\n\n if return_idx is False:\n return X_train, y_train\n else:\n return X_train, y_train, rand_idx", "def _create_dataset(self, *data):\n # Make sure data is a tuple of dense tensors\n data = [self._to_torch(x, dtype=torch.FloatTensor) for x in data]\n return TensorDataset(*data)", "def create_dataset(self, x, t, n_samples, noise, random=True, return_idx=False, random_state=42):\n assert ((x.shape[1] == 2) & (t.shape[1] == 1)), 'x and t should have shape (n_samples x 1)'\n u = self.generate_solution(x, t)\n\n X = np.concatenate([t, x], axis=1)\n y = u + noise * np.std(u, axis=0) * np.random.normal(size=u.shape)\n\n # creating random idx for samples\n N = y.shape[0] if n_samples == 0 else n_samples\n\n if random is True:\n rand_idx = np.random.RandomState(seed=random_state).permutation(y.shape[0])[:N] # so we can get similar splits for different noise levels\n else:\n rand_idx = np.arange(y.shape[0])[:N]\n\n # Building dataset\n X_train = torch.tensor(X[rand_idx, :], requires_grad=True, dtype=torch.float32)\n y_train = torch.tensor(y[rand_idx, :], requires_grad=True, dtype=torch.float32)\n \n if return_idx is False:\n return X_train, y_train\n else:\n return X_train, y_train, rand_idx", "def generate_dataset(self) -> Tuple[np.array, np.array]:\n np.random.seed(self.seed)\n sim_data_x = []\n sim_data_y = []\n num_sims = math.ceil(self.size / self.samples_per_sim)\n for sim in tqdm.tqdm(range(num_sims)):\n x_times = np.linspace(0, self.time_delta, self.samples_per_sim, endpoint=False)\n y_times = x_times + self.time_delta\n sim_results = self.simulator.run_simulation(\n np.random.lognormal(), np.random.lognormal(),\n eval_times=np.concatenate((x_times, y_times)))\n sim_data_x.append(sim_results[:self.samples_per_sim])\n sim_data_y.append(sim_results[self.samples_per_sim:])\n\n sim_data_x, sim_data_y = map(\n lambda l: np.concatenate(l, axis=0)[:self.size], (sim_data_x, sim_data_y))\n return sim_data_x, sim_data_y", "def CreateDataset(all_arrays):\n dataset = Dataset()\n\n dataset._addData(all_arrays[0])\n dataset._addData(all_arrays[1])\n dataset._addData(all_arrays[3])\n dataset._addData(all_arrays[5])\n dataset._addData(all_arrays[6])\n dataset._addData(all_arrays[9])\n dataset._addData(all_arrays[8])\n dataset._addData(all_arrays[4])\n\n return dataset", "def __data_generation(self, indexes):\n\n # Initialization\n x = np.zeros((self.batch_size, self.dim_1,self.dim_2,1))\n y = np.zeros((self.batch_size, 2))\n\n # Generate data\n\n for i, ID in enumerate(indexes):\n\n x[i,:,:,0] = self.file_source[ID]\n y[i,:] = self.label_source[ID]\n\n return x, y", "def __data_generation(self, list_IDs_temp):\n X = np.empty((self.batch_size, self.dim))\n Y = np.empty((self.batch_size, self.word_length, self.hot_enc_len))\n\n # Generate data\n for i, ID in enumerate(list_IDs_temp): # The enumerate() function adds a counter to an iterable.\n word = self.labels.index2word[ID]\n # Store sample\n X[i, ] = self.labels[word]\n # Store class\n char_hot_enc_pad = self.word_2_seq_hot_enc_sample(word)\n Y[i] = char_hot_enc_pad\n return X.reshape(self.batch_size, self.dim), Y", "def dataset(self, timestep, data):\n dataX, dataY = [], []\n for i in range(len(data) - timestep):\n a = data[i:i+timestep]\n dataX.append(a)\n dataY.append(data[i + timestep])\n return np.array(dataX), np.array(dataY)", "def __create_sample_data__(npts = 20):\n\t#data function\n\tdef wavy(x, y):\n\t\treturn np.sin(0.2*np.pi*x)*np.cos(0.4*np.pi*y)\n\t\n\t#make grid\n\txs = np.linspace(0, 2*20, 2*npts + 1)\n\tys = np.linspace(0, 20, npts + 1)\n\t(xgrid, ygrid) = np.meshgrid(xs, ys)\n\tzgrid = wavy(xgrid, ygrid)\n\t\n\treturn (xgrid, ygrid, zgrid)", "def getSampleDataSet():\n ds = [{\"name\": \"Denise\", \"sex\": \"F\"},\n \t{\"name\": \"Paul\", \"sex\": \"M\"}]\n return ds", "def _split_data(self, x, y):\n\tindices = range(self.N)\n\tnp.random.shuffle(indices)\n\ttrain_idx, test_idx = indices[:self.TRAIN_SIZE], indices[self.TRAIN_SIZE:]\n\treturn (x[train_idx,:], y[train_idx,:], x[test_idx,:], y[test_idx,:])", "def create_dataset(raw_vectors):\n dataset = []\n for k in range(len(raw_vectors)):\n for n,v in enumerate(raw_vectors[k]):\n if k == 0:\n dataset.append(Datapoint(n))\n dataset[n].addNumber(v)\n return dataset", "def create_dataset():\n nb_subjects_per_category = 100\n\n # Generate random data using numpy\n # Two values are: Concentration of red blood cell and concentration of white blood cell\n # Generates two values and add the corresponding value with -2. Sick people get score lower than 0\n sick = np.random.randn( nb_subjects_per_category, 2) + np.array([-2,-2])\n # Generates two values and add the corresponding value with 2. Healthy people get score higher than 0\n healthy = np.random.randn( nb_subjects_per_category, 2) + np.array([2, 2])\n\n # combines the two arrays\n full_data = np.vstack([sick, healthy])\n\n # means that those sick people get a value of zero, and those healthy get a value of 1.\n # this gives an array of 10 composed of 5 0s followed by 5 1s.\n targets = np.concatenate((np.zeros(nb_subjects_per_category), np.zeros(nb_subjects_per_category) + 1))\n\n # Plot points. This is the data set being shown in a graph.\n # features[:, 0] means that we are slicing our 2D features of shape 100,2 and taking only the first column of all data\n # features[:, 1] means that we are slicing our array by taking only the second column of our data points\n # s: is marker size (draws bigger points)\n # c: describes the possible colors. Because our targets are 0s and 1s, then there is only two colors. Also, targets\n # array shows how to color the different elements in full_data depending on the index of targets. So I know the 50\n # last elements in full_data will have their own color because the last 50 items in targets all hold same value.\n plt.scatter(full_data[:, 0], full_data[:, 1], s=40, c=targets, cmap=plt.cm.Spectral)\n # save picture of data points drawn.\n plt.savefig(\"DataPoints.png\")\n\n # can return multiple parameters at once\n return full_data, targets", "def create_data(num_sample=None):\n I = np.eye(3, dtype=np.float32)\n\n\n if (num_sample == None):\n num_sample = 100\n\n # Generate first class\n m1 = np.asarray([0.5, 0.5], dtype=np.float32)\n cov1 = np.asarray([[0.1, 0],\n [0, 0.1]], dtype=np.float32)\n data1 = rng.multivariate_normal(m1, cov1, num_sample)\n label1 = np.ones((num_sample), dtype=np.uint16) - 1\n label1 = I[label1,:]\n\n # Generate second class\n m2 = np.asarray([0.3,0.3], dtype=np.float32)\n cov2 = np.asarray([[0.5, 0], [0, 0.5]], dtype=np.float32)\n data2 = rng.multivariate_normal(m2, cov2, num_sample)\n label2 = np.ones((num_sample), dtype=np.uint16)\n label2 = I[label2, :]\n\n\n return (data1, label1, data2, label2)", "def dataset(self):\n if self.X is not None and self.y is not None:\n return self.X, self.y\n\n self.X, self.y = self.get_BOW_from_file(self.labels[0])\n for label in self.labels[1:]:\n X_temp, y_temp = self.get_BOW_from_file(label)\n self.X = np.concatenate((self.X, X_temp))\n self.y = np.concatenate((self.y, y_temp))\n\n return self.X, self.y", "def data_set_maker():\n\n # crate a folder in your code directory and name it: \"files\". put the .npy files iside that folder\n path = os.getcwd() # reads the current path\n x_train = np.load(path + '/files/tinyX.npy', 'r') # reads the input file\n y_train = np.load(path + '/files/tinyY.npy', 'r') # reads the input file\n x_test = np.load(path + '/files/tinyX_test.npy', 'r') # reads the input file\n x_train, y_train = shuffle(x_train, y_train)\n\n return x_train, y_train, x_test", "def build_dataset(self, X, y=None):\n X = np.array(X)\n self.input_dim = X.shape[1]\n X = torch.FloatTensor(X)\n if y is None:\n dataset = torch.utils.data.TensorDataset(X)\n else:\n self.classes_ = sorted(set(y))\n self.n_classes_ = len(self.classes_)\n class2index = dict(zip(self.classes_, range(self.n_classes_)))\n y = [class2index[label] for label in y]\n y = torch.tensor(y)\n dataset = torch.utils.data.TensorDataset(X, y)\n return dataset" ]
[ "0.71700114", "0.6876828", "0.67699337", "0.675121", "0.66405386", "0.66405386", "0.65027136", "0.6290405", "0.6290306", "0.626962", "0.6263836", "0.62594765", "0.62546146", "0.6243724", "0.6191763", "0.61844975", "0.61676997", "0.61600125", "0.6146306", "0.61357695", "0.61095244", "0.61068624", "0.6095173", "0.60777175", "0.6065923", "0.6063546", "0.6038876", "0.60269886", "0.5999795", "0.5994263" ]
0.71009916
1
Builds test dataset with multiple clips per sample, which will be joined later somehow (maximum, geom. mean, etc).
def build_test_dataset(x: List[List[NpArray]]) -> Tuple[NpArray, List[int]]: x_test, clips_per_sample = [], [] for sample in x: clips_per_sample.append(len(sample)) for clip in sample: x_test.append(clip) return np.array(x_test), clips_per_sample
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def generate_test_set(data, pts): \n test_set = np.asarray(random.sample(data, pts))\n \n return test_set", "def make_dataset():\n\n\tnumberOfTrials = dataset_params.num_of_samples\n\tnumberOfTrials_train = int(numberOfTrials*0.8)\n\tnumberOfTrials_test = int(numberOfTrials*0.2)\n\n\tprint(\"==================================================\")\n\tprint(\"1. Generating Train images ......\")\n\tprint(\"\\nTrain image per variation\", numberOfTrials_train)\n\tmakeDataset(numberOfTrials_train, \"train\")\n\n\tprint(\"==================================================\")\n\tprint(\"2. Generating Test images ......\")\n\tprint(\"\\nTest image per variation\", numberOfTrials_test)\n\tmakeDataset(numberOfTrials_test, \"test\")\n\n\tprint(\"==================================================\")\n\tprint(\"Done!!!\")", "def create_data_bunch(data, test=False):\r\n examples = [t[0] for t in data if t[0]]\r\n target = np.zeros((len(data),), dtype=np.int64)\r\n if not test:\r\n for i, tweet in enumerate(data):\r\n if tweet[1] == 'OFF':\r\n target[i] = 1\r\n elif tweet[1] == 'NOT':\r\n target[i] = 0\r\n dataset = Bunch(data=examples, target=target, target_names=['NOT', 'OFF'])\r\n return dataset", "def make_nshot_dataset(\n samples: List[Dict[str, Any]],\n shots: int,\n rng: np.random.RandomState,\n repeat_samples: int = 1,\n separator: str = \" \",\n max_examples: Optional[int] = None,\n) -> List[Dict[str, Any]]:\n samples = copy.deepcopy(samples)\n if len(samples) < shots + 1:\n raise ValueError(\"Do not have enough examples for the number of shots\")\n\n if repeat_samples != 1:\n samples = samples * repeat_samples\n\n fewshot_samples = []\n\n if max_examples is not None:\n query_samples = rng.choice(samples, max_examples, replace=False)\n else:\n query_samples = samples\n\n for sample in query_samples:\n validation_example = copy.deepcopy(sample)\n valid_samples = [x for x in samples if x != sample]\n shot_examples = list(rng.choice(valid_samples, shots, replace=False))\n\n context = separator.join(\n [\n example[\"input\"] + rng.choice(example[\"target\"])\n for example in shot_examples\n ]\n )\n validation_example[\"input\"] = context + separator + validation_example[\"input\"]\n\n fewshot_samples.append(validation_example)\n\n return fewshot_samples", "def generate_test():\n o = []\n pos = [384, 288]\n note_group_size = GAN_PARAMS[\"note_group_size\"]\n generate_set(begin=3 * note_group_size, start_pos=pos,\n length_multiplier=dist_multiplier, group_id=3, plot_map=True)", "def prepare_test_data(args):\n image_dir = args.test_image_dir\n\n files = os.listdir(image_dir)\n files = [f for f in files if f.lower().endswith('.png')]\n\n img_ids = list(range(len(files)))\n img_files = []\n img_heights = []\n img_widths = []\n \n for f in files:\n img_path = os.path.join(image_dir, f)\n img_files.append(img_path)\n img = cv2.imread(img_path)\n img_heights.append(img.shape[0]) \n img_widths.append(img.shape[1]) \n\n print(\"Building the testing dataset...\")\n dataset = DataSet(img_ids, img_files, img_heights, img_widths)\n print(\"Dataset built.\")\n return dataset", "def refresh_test_dataset(self):\n inputs_id, inputs_starts, inputs_paths, inputs_ends, inputs_label = self.build_data(self.reader, self.test_items, self.option.max_path_length)\n self.test_dataset = CodeDataset(inputs_id, inputs_starts, inputs_paths, inputs_ends, inputs_label)", "def create_data_generators(shuffle=True, novelty_type='normal', item_to_include='None',\n scale_level=1):\n\n total_noi_i = 10 # Number of processed images from one environemnt i\n noe = 1 # Numer of environments\n n_p = 32 # Patch size, patch --> n_p x n_p\n\n novelty = novelty_type\n datasets = []\n\n for i in range(noe):\n\n # Load only images of the environment which includes images of the stated novel item.\n if item_to_include is not None and novelty == 'novel_item':\n dataset_env_i = PolycraftDatasetWithSpecificItem(\n nov_type=novelty, noi=total_noi_i, env_idx=i, p_size=n_p, scale_factor=scale_level,\n item_name=item_to_include)\n datasets.append(dataset_env_i)\n # We only process the one environment with the item (maybe change this\n # if we have more than one environement per novel_item!?)\n break\n\n # No specific item given which should be included.\n else:\n dataset_env_i = PolycraftDatasetNoSpecificItem(\n nov_type=novelty, noi=total_noi_i, env_idx=i, p_size=n_p, scale_factor=scale_level)\n datasets.append(dataset_env_i)\n\n final_dataset = ConcatDataset(datasets)\n\n total_noi = len(final_dataset) # Total number of processed images from all datasets\n\n if(total_noi < 7):\n print('Number of samples too small for splitting dataset in training-/valid-/test set.')\n\n train_noi = int(0.7 * total_noi) # Number of images used for training (70 %)\n valid_noi = int(0.15 * total_noi) # Number of images used for validation (15 %)\n test_noi = total_noi - train_noi - valid_noi # Number of images used for testing (15 %)\n train_dataset, valid_dataset, test_dataset = torch.utils.data.random_split(\n final_dataset, [train_noi, valid_noi, test_noi])\n\n train_loader = DataLoader(train_dataset, batch_size=1, shuffle=True)\n valid_loader = DataLoader(valid_dataset, batch_size=1, shuffle=True)\n test_loader = DataLoader(test_dataset, batch_size=1, shuffle=True)\n\n return train_loader, valid_loader, test_loader", "def make_dataset_for_scatter():\n condition1 = max_corr['bin_width'] == select_bin_width.value\n condition2 = select_n_samples.value[0] <= max_corr['n_points']\n condition3 = max_corr['n_points'] <= select_n_samples.value[1]\n by_bin = max_corr[condition1 & condition2 & condition3]\n return ColumnDataSource(by_bin)", "def construct_test_set(tweet_test):\n tweet_clean = clean_data(tweet_test.values)\n np.reshape(tweet_clean, (10000,))\n tweet_TE = tweet_clean.flatten()\n return tweet_TE", "def make_test_data(self):\r\n\r\n \r\n\r\n print (\"Creating Test Sample:\")\r\n\r\n print (' Period, rate, reps, phases: ', self.period, self.framerate, self.nrepetitions, self.nPhases)\r\n\r\n nframes = int(self.period * self.framerate * self.nrepetitions)\r\n\r\n print (' nframes: ', nframes)\r\n\r\n if self.bkgdNoise > 0.:\r\n\r\n d = np.random.normal(size=(nframes,self.imageSize[0],self.imageSize[1]),\r\n\r\n loc=self.bkgdIntensity, scale=self.bkgdNoise).astype('float32')\r\n\r\n else:\r\n\r\n d = self.bkgdIntensity*np.ones((nframes,self.imageSize[0],self.imageSize[1])).astype('float32')\r\n\r\n \r\n\r\n ds = d.shape\r\n\r\n print (' data shape: ', ds)\r\n\r\n dx = int(ds[2]/4)\r\n\r\n xc = int(ds[2]/2)\r\n\r\n xo = [xc-dx, xc+dx]\r\n\r\n ywidth = int(ds[2]/(self.nPhases+2))\r\n\r\n framedelay = 4\r\n\r\n\r\n\r\n if not self.mode:\r\n\r\n self.phasex = []\r\n\r\n self.phasey = []\r\n\r\n for i in range(0,self.nPhases):\r\n\r\n dy = int((i+1)*ds[2]/(self.nPhases+2)) # each phase is assigned to a region\r\n\r\n self.resp = np.zeros((nframes,))\r\n\r\n self.resp = np.cos(\r\n\r\n np.linspace(0, 2.0*np.pi*nframes/(self.period*self.framerate), nframes-framedelay)+i*np.pi/8 - np.pi/2.0)\r\n\r\n self.resp = np.concatenate((np.zeros(framedelay), self.resp))\r\n\r\n d[:, xo[0]:xo[1], dy:dy+ywidth ] += self.resp[:, np.newaxis, np.newaxis]\r\n\r\n self.phasey.append( (2+(dy+int(ds[2]/self.nPhases))/2))\r\n\r\n self.phasex.append((6+int(ds[1]/2)/2)) # make the signal equivalent of digitized one (baseline 3000, signal at 1e-4 of baseline)\r\n\r\n else:\r\n\r\n self.nPhases = 4\r\n\r\n self.spotsize = 16\r\n\r\n nrpts = 20\r\n\r\n nsites = 4\r\n\r\n one_rep = int(self.period*self.framerate)\r\n\r\n isi = int(self.period*self.framerate/self.nPhases)\r\n\r\n print('period, isi: ', self.period, isi)\r\n\r\n r = np.arange(0, nrpts, 1.)\r\n\r\n alpha = 4.\r\n\r\n A = r/alpha *np.exp(-(r-alpha)/alpha) # scaled alpha function\r\n\r\n self.spot= self.gauss_spot(self.spotsize, 3.) # the 2d spot\r\n\r\n sigsize = np.random.normal(size=self.nPhases, loc=self.signal_size, scale=self.signal_size*2)\r\n\r\n sigsize = [np.abs(s) for s in sigsize] # restrict to positive amplitudes\r\n\r\n print ('sigsize: ', sigsize)\r\n\r\n for j in range(self.nrepetitions):\r\n\r\n for i in range(self.nPhases):\r\n\r\n self.resp = np.zeros((nrpts, self.spot.shape[0], self.spot.shape[1]))\r\n\r\n for k in range(nrpts):\r\n\r\n self.resp[k,:,:] += sigsize[i]*A[k] * self.spot # make response an alpha time course of gaussian spot\r\n\r\n start = j*one_rep + i*isi + framedelay\r\n\r\n stop = start + nrpts\r\n\r\n dy = int((i+1)*ds[2]/(self.nPhases+2)) # location for phase\r\n\r\n #dy = dy + 2*z\r\n\r\n# print ('start, stop: ', start, stop)\r\n\r\n for z in range(nsites):\r\n\r\n #self.resp = np.concatenate((np.zeros(framedelay), self.resp))\r\n\r\n xp = xo[0] + i*10 - 10*z\r\n\r\n yp = dy - i*10 + 10*z\r\n\r\n d[start:stop, xp:xp+self.spotsize, yp:yp+self.spotsize ] += self.resp\r\n\r\n self.imageData = d # reduce to a 16-bit map to match camera data type\r\n\r\n self.nFrames = self.imageData.shape[0]\r\n\r\n self.times = np.arange(0, nframes/self.framerate, 1.0/self.framerate)\r\n\r\n print( \" Test Image Created\")\r\n\r\n # imv = pg.ImageView()\r\n\r\n # imv.show()\r\n\r\n # imv.setImage(self.imageData)\r\n\r\n\r\n\r\n if self.layout is not None:\r\n\r\n self.layout.addWidget(imv, 0, 0)\r\n\r\n\r\n\r\n avgImage = np.mean(self.imageData, axis=0)\r\n\r\n ima = pg.ImageView()\r\n\r\n ima.setImage(avgImage)\r\n\r\n self.layout.addWidget(ima, 0, 1)\r\n\r\n self.adjust_image_data()\r\n\r\n self.avgimg = np.mean(self.imageData, axis=0) # get mean image for reference later: average across all time\r\n\r\n print (' Test file, original Image Info: ')\r\n\r\n self.print_image_info()\r\n\r\n self.rebin_image()\r\n\r\n #self.clean_windowerrors()\r\n\r\n # pg.image(self.imageData)\r\n\r\n # pg.show()\r\n\r\n # mpl.figure(1)\r\n\r\n # mpl.show()\r\n\r\n if not self.mode: # FFT analysis\r\n\r\n self.analysis_fourier_map(target=1, mode=0)\r\n\r\n self.plot_maps(mode=2, gfilter=self.gfilter)\r\n\r\n else:\r\n\r\n self.analysis_dFF_map()\r\n\r\n mpl.show()", "def get_training_and_testing_sets(data, Y):\r\n data = pd.concat([data, Y], axis=1)\r\n x,y=data.shape\r\n train_X_sub1=data[0:x//6]\r\n dev_X_sub1 = data[x//6:x//6 + x//12]\r\n test_X_sub1 = data[x//6 + x//12:x//3]\r\n\r\n train_X_sub2 = data[x//3:x//3+x//6]\r\n dev_X_sub2 = data[x//6 + x//3:x//3 + x//6 + x//12]\r\n test_X_sub2 = data[x//3 + x//6 + x//12:2*x//3]\r\n\r\n train_X_sub3 = data[2*x//3:(2*x//3) +x//6]\r\n dev_X_sub3 = data[x//6 + 2*x//3: (2*x//3) + x//6 + x//12]\r\n test_X_sub3 = data[2*x//3 + x//6 + x//12:x]\r\n\r\n train_X=train_X_sub1.append(train_X_sub2,ignore_index = True)\r\n train_X =train_X.append(train_X_sub3,ignore_index = True)\r\n dev_X= dev_X_sub1.append(dev_X_sub2,ignore_index = True)\r\n dev_X = dev_X.append(dev_X_sub3,ignore_index = True)\r\n test_X = test_X_sub1.append(test_X_sub2,ignore_index = True)\r\n test_X = test_X.append(test_X_sub3,ignore_index = True)\r\n\r\n\r\n train_X = util.shuffle(train_X)\r\n train_X = train_X.reset_index(drop=True)\r\n\r\n dev_X = util.shuffle(dev_X)\r\n dev_X = dev_X.reset_index(drop=True)\r\n\r\n test_X = util.shuffle(test_X)\r\n test_X = test_X.reset_index(drop=True)\r\n\r\n train_X_final=train_X\r\n dev_X_final = dev_X\r\n test_X_final = test_X\r\n x, y = train_X_final.shape\r\n train_X = train_X_final.iloc[:, 0:y - 1]\r\n train_Y = train_X_final.iloc[:, y - 1]\r\n\r\n x, y = test_X_final.shape\r\n test_X = test_X_final.iloc[:, 0:y - 1]\r\n test_Y = test_X_final.iloc[:, y - 1]\r\n\r\n x, y = dev_X_final.shape\r\n dev_X = dev_X_final.iloc[:, 0:y - 1]\r\n dev_Y = dev_X_final.iloc[:, y - 1]\r\n\r\n return train_X, train_Y, dev_X,dev_Y,test_X, test_Y", "def get_uniform_dist_data(debug, dataset, resamplefactor, raw, analysis):\n\n project_wd, project_data, project_sink = get_paths(debug, dataset)\n\n demographics, imgs, dataframe = get_data(project_data, dataset,\n debug, project_wd,\n resamplefactor,\n raw=raw,\n analysis=analysis)\n\n # transform age into ints\n demographics['age_int'] = demographics['age'].astype('int32', copy=False)\n\n # Select 14 subjects for all ages that have 14 representatives.\n age_range = np.arange(demographics['age'].min(), demographics['age'].max())\n # remove entry where you don't have 14 subjects\n max_n = 14\n age_to_remove = [35, 36, 39, 42, 78, 79, 80, 81, 82, 83, 85, 89]\n age_range = np.setdiff1d(age_range, age_to_remove)\n # iterate over the dataframe and select 14 subjects for each age range\n ids_to_use = []\n for age in age_range:\n ids_to_use.append(demographics.index[demographics['age_int'] ==\n age].tolist()[:max_n])\n\n # flatten ids_to_use\n ids_to_use = [item for sublist in ids_to_use for item in sublist]\n # Filter the demographics dataframe\n demographics = demographics[demographics.index.isin(ids_to_use)]\n # set subject's id as index\n # filter dataset using index of the subjects\n dataframe = dataframe.loc[demographics['id']]\n\n # Print some diagnosis\n print('Shape of the new demographics:')\n print(demographics.shape)\n print('Oldest %d and youngest %d subject' %(demographics['age_int'].max(),\n demographics['age_int'].min()))\n print('Number of age bins %d' %len(demographics['age_int'].unique()))\n return demographics, dataframe", "def create_samples(self):\n for s_id in range(len(self.data[\"sample\"])):\n self.samples.add(Sample(s_id, [self.data[key][s_id] for key in self.data.keys() if key not in WRONG_KEYS],\n self.data[\"label\"][s_id]))", "def create_simple_data_set(\n n_training_points,\n n_testing_points,\n low=0,\n high=3,\n mode=training_testing_split.SEPERATE,\n kernel=kernel_matern,\n shuffle=True,\n):\n gp = gaussian_process(kernel=kernel, verbose=True)\n\n mid = (low + high) / 2\n\n if mode == training_testing_split.SEPERATE_LONG:\n x_training, x_testing = __seperate_long(\n n_training_points, n_testing_points, low, high\n )\n elif mode == training_testing_split.SEPERATE:\n x_training, x_testing = __seperate(\n n_training_points, n_testing_points, low, high\n )\n elif mode == training_testing_split.INTERSPREAD:\n x_training, x_testing = __interspread(\n n_training_points, n_testing_points, low, high\n )\n elif mode == training_testing_split.RANDOM:\n x_training, x_testing = __random(n_training_points, n_testing_points, low, high)\n elif mode == training_testing_split.MIXED:\n\n def r(z):\n dist = np.random.randint(low=1, high=100, size=4)\n λ = lambda x: x / dist.sum()\n vfunc = np.vectorize(λ)\n dist = vfunc(dist)\n return (z * dist).round().astype(int)\n\n training_dist = r(n_training_points)\n testing_dist = r(n_testing_points)\n x1, x2 = __random(training_dist[0], testing_dist[0], low, high)\n x11, x22 = __interspread(training_dist[1], testing_dist[1], low, high)\n x111, x222 = __interspread(training_dist[2], testing_dist[2], low, high)\n x1111, x2222 = __seperate(training_dist[3], testing_dist[3], low, high)\n x_training = np.vstack([x1, x11, x111, x1111])\n x_testing = np.vstack([x2, x22, x222, x222])\n\n y_samples = gp.sample(np.vstack([x_training, x_testing]), 1).squeeze()\n y_training = y_samples[: len(x_training)].reshape(-1, 1)\n y_testing = y_samples[len(x_training) :].reshape(-1, 1)\n training_data_set = data_loader.DataSet(X=x_training, Y=y_training)\n testing_data_set = data_loader.DataSet(X=x_testing, Y=y_testing)\n\n if shuffle:\n training_data_set.shuffle()\n testing_data_set.shuffle()\n\n return training_data_set, testing_data_set", "def test_data():\n batch_size = 10\n input_dim = 28\n test_data = np.random.rand(batch_size, input_dim)\n\n return test_data", "def make_sub_data_test(data, config):\n sub_input_sequence = []\n sub_label_sequence = []\t\t \n\n for i in range(len(data)):\n\tinput_, label_, = preprocess_test(data[i], config.c_dim) # do bicbuic\n\tinput_ = input_[:,:,0]\n\tlabel_ = label_[:,:,0]\n\n\tif len(input_.shape) == 3: # is color\n\t h, w, c = input_.shape\n\telse:\n\t h, w = input_.shape # is grayscale\n\n\t#checkimage(input_)\t\n\t\n\tnx, ny = 0, 0\n\tfor x in range(0, h - config.image_size + 1, config.stride):\n\t nx += 1; ny = 0\n\t for y in range(0, w - config.image_size + 1, config.stride):\n\t\tny += 1\n\n\t\tsub_input = input_[x: x + config.image_size, y: y + config.image_size] # 41 * 41\n\t\tsub_label = label_[x: x + config.label_size, y: y + config.label_size] # 41 * 41\n\n\n\t\t# Reshape the subinput and sublabel\n\t\tsub_input = sub_input.reshape([config.image_size, config.image_size, 1])\n\t\tsub_label = sub_label.reshape([config.label_size, config.label_size, 1])\n\n\t\t# Normialize\n\t\tsub_input = sub_input / 255.0\n\t\tsub_label = sub_label / 255.0\n\t\t\n\t\t#cv2.imshow(\"im1\",sub_input)\n\t\t#cv2.imshow(\"im2\",sub_label)\n\t\t#cv2.imshow(\"residual\",sub_input - sub_label)\n\t\t#cv2.waitKey(10)\t\n\n\t\t# Add to sequence\n\t\tsub_input_sequence.append(sub_input)\n\t\tsub_label_sequence.append(sub_label)\n\t'''\n\tnx, ny = 1, 1\n\n\t# Normialize\n\tsub_input = input_ / 255.0\n\tsub_label = label_ / 255.0 \n \n \tsub_input_sequence.append(sub_input)\n\tsub_label_sequence.append(sub_label) \n\t''' \n # NOTE: The nx, ny can be ignore in train\n return sub_input_sequence, sub_label_sequence, nx, ny", "def create_train_test_sets(self,x,y,lenTest):\n \n nbInd = x.shape[0]\n shuffler = np.random.permutation(nbInd)\n x_train = x[shuffler][0:(nbInd-lenTest),]\n y_train = y[shuffler][0:(nbInd-lenTest),]\n\n x_test = x[shuffler][(nbInd-lenTest):nbInd,]\n y_test = y[shuffler][(nbInd-lenTest):nbInd,]\n\n return x_train,y_train,x_test,y_test", "def generate_negative_samples(self, data, sampled_data, zeros=[], validation=False):\n negative_sampled_data = []\n negative_sampled_indices = []\n for sample in sampled_data:\n i = data['pos'].index(sample) ## index of a particular move in a demo\n all_num = 0\n for which, num in enumerate(data['leng_pos']):\n all_num += num\n if all_num > i:\n which_demo = which ## index of a demo the move with index i comes from\n break\n\n sum_neg_lengths = sum(data['leng_neg'][:which_demo])\n\n key = sum_neg_lengths-1 \n value = sum_neg_lengths + data['leng_neg'][which_demo]\n demo_negative_data = data['neg'][key : value]\n state, action = sample\n for demo_state, demo_action in demo_negative_data:\n if demo_state == state:\n negative_sampled_data.extend([(demo_state, demo_action)])\n demo_index = data['neg'].index((demo_state, demo_action))\n negative_sampled_indices.append(demo_index)\n\n if not validation:\n num_pos = sum(self.pipeline_y == 1)\n num_neg = len(negative_sampled_data)\n pos_sample = self.pipeline_X[:num_pos, :]\n neg_sample = self.pipeline_X[num_pos + negative_sampled_indices, :]\n y_vector = [1] * num_pos + [0] * num_neg\n ######################### Mouselab ad-hc #########################\n ########################## Removing 0's ##########################\n non_zero = [self.pipeline_X[i, :] for i in range(num_pos)\n if i not in zeros]\n pos_sample = vstack(non_zero) if non_zero != [] else self.pipeline_X[0,:]\n num_pos = pos_sample.shape[0]\n y_vector = [1] * num_pos + [0] * num_neg\n ##################################################################\n\n self.pipeline_X = vstack((pos_sample, neg_sample))\n self.pipeline_y = np.array(y_vector, dtype='uint8')\n \n return negative_sampled_data", "def autogen_dataset_ratios_with_test():\n return TabularDataset.autogen('tests/data/dummy_tabular/train.csv',\n test_path='tests/data/dummy_tabular_test/test.csv',\n seed=42,\n sep=',',\n test_ratio=0.5, # no effect\n val_ratio=0.5)", "def create_train_test_sets(conform_shape=True, indi_proportion=0.50, incl_group_imgs=True):\r\n X_train_indi, y_train_indi = build_dataframe('Individual_Training_Images',\r\n img_input_shape, conform_shape=conform_shape)\r\n X_test_indi, y_test_indi = build_dataframe('Individual_Test_Images',\r\n img_input_shape, conform_shape=conform_shape)\r\n \r\n X_train_group, y_train_group = build_dataframe('Group_Training_Images',\r\n img_input_shape, conform_shape=conform_shape)\r\n X_test_group, y_test_group = build_dataframe('Group_Test_Images',\r\n img_input_shape, conform_shape=conform_shape)\r\n \r\n X_train_indi, y_train_indi = subsample_dataframe(X_train_indi, y_train_indi,indi_proportion)\r\n \r\n if incl_group_imgs:\r\n X_train = np.concatenate([X_train_indi,X_train_group])\r\n y_train = np.concatenate([y_train_indi,y_train_group])\r\n else: \r\n X_train = X_train_indi.copy()\r\n y_train = y_train_indi.copy()\r\n\r\n return X_train, y_train, X_test_indi, y_test_indi, X_test_group, y_test_group", "def __create_sample_data__(npts = 20):\n\t#data function\n\tdef wavy(x, y):\n\t\treturn np.sin(0.2*np.pi*x)*np.cos(0.4*np.pi*y)\n\t\n\t#make grid\n\txs = np.linspace(0, 2*20, 2*npts + 1)\n\tys = np.linspace(0, 20, npts + 1)\n\t(xgrid, ygrid) = np.meshgrid(xs, ys)\n\tzgrid = wavy(xgrid, ygrid)\n\t\n\treturn (xgrid, ygrid, zgrid)", "def subsampleData(self, count):\n size = 0\n for block in self.blocks: size += len(block[1])\n subset = numpy.random.permutation(size)[:count]\n subset.sort()\n\n pos = 0\n index = 0\n ret = Dataset()\n for block in self.blocks:\n while subset[index]<(pos+len(block[1])):\n loc = subset[index] - pos\n ret.add(block[0][loc,:], block[1][loc])\n index += 1\n if index==subset.shape[0]: return ret\n pos += len(block[1])\n \n return ret", "def split_training_testing(X, Y, gnd, negative=10000, per=0.05):\n df_x = pd.DataFrame(X)\n df_x['y'] = Y\n df_x['gnd'] = gnd\n df_x.sort_values(by=['y'], inplace=True, ascending=False)\n frac_positive = (df_x[df_x['y'] == 1].shape[0])/float(df_x.shape[0])\n split = int(frac_positive * per * df_x.shape[0])\n df_x.reset_index(drop=True, inplace=True)\n fraud = df_x[df_x['y'] == 1]\n # Shuffle inplace\n fraud = fraud.sample(frac=1, random_state=0).reset_index(drop=True)\n test = fraud.iloc[:split]\n train_ = fraud.iloc[split:]\n train = pd.concat([train_, df_x.iloc[fraud.shape[0]:].sample(n = negative, random_state=0)], ignore_index=True)\n # Shuffle inplace\n train = train.sample(frac=1, random_state=0).reset_index(drop=True)\n #train = randomSample(train, negative)\n y_train = train['y'].as_matrix()\n y_train_gnd = train['gnd'].as_matrix()\n train = train.drop(['y'], axis=1)\n train = train.drop(['gnd'], axis=1)\n \n y_test = test['y'].as_matrix()\n y_test_gnd = test['gnd'].as_matrix()\n test = test.drop(['y'], axis=1)\n test = test.drop(['gnd'], axis=1)\n return train.as_matrix(), y_train, y_train_gnd, test.as_matrix(), y_test, y_test_gnd", "def split_train_test(ratings):\r\n ratings = ratings.sample(frac=1).reset_index(drop=True)\r\n train_user_list = []\r\n train_item_list = []\r\n train_rating_list = []\r\n test_user_list = []\r\n test_item_list = []\r\n test_rating_list = []\r\n user_pool = set(ratings['userId'].unique())\r\n for idx in user_pool:\r\n flag = 0\r\n items = ratings[ratings['userId']==idx][['itemId','rating']]\r\n for i, row in items.iterrows():\r\n if flag == 0:\r\n test_user_list.append(int(idx))\r\n test_item_list.append(int(row['itemId']))\r\n test_rating_list.append(row['rating'])\r\n flag = 1\r\n else:\r\n train_user_list.append(int(idx))\r\n train_item_list.append(int(row['itemId']))\r\n train_rating_list.append(row['rating'])\r\n\r\n train = pd.DataFrame({'userId': train_user_list, 'itemId': train_item_list, 'rating': train_rating_list}, columns=['userId', 'itemId', 'rating'])\r\n test = pd.DataFrame({'userId': test_user_list, 'itemId': test_item_list, 'rating': test_rating_list}, columns=['userId', 'itemId', 'rating'])\r\n return [train, test]\r\n \r\n\r\n \r\n #train, test = train_test_split(ratings, test_size=0.1, shuffle=True)\r\n #return [train, test]\r", "def make_test_collection():\n\n pc = cloud.Cloud(os.path.join(data_dir, \"test.las\"))\n\n # Sample to only 1000 points for speed\n pc.data.points = pc.data.points.sample(1000, random_state=12)\n\n tr = pc.data.points[\n (pc.data.points[\"x\"] > 405100) & (pc.data.points[\"y\"] > 3276400)\n ]\n tl = pc.data.points[\n (pc.data.points[\"x\"] < 405100) & (pc.data.points[\"y\"] > 3276400)\n ]\n br = pc.data.points[\n (pc.data.points[\"x\"] > 405100) & (pc.data.points[\"y\"] < 3276400)\n ]\n bl = pc.data.points[\n (pc.data.points[\"x\"] < 405100) & (pc.data.points[\"y\"] < 3276400)\n ]\n\n all = [tr, tl, br, bl]\n\n for i, points in enumerate(all):\n out = cloud.LASData(points, pc.data.header)\n out.write(os.path.join(data_dir, \"mock_collection\", \"{}.las\".format(i)))\n\n pc.data.header.reader.close()", "def build_toy_dataset(N):\n y_data = np.random.uniform(-10.5, 10.5, N)\n r_data = np.random.normal(size=N) # random noise\n x_data = np.sin(0.75 * y_data) * 7.0 + y_data * 0.5 + r_data * 1.0\n x_data = x_data.reshape((N, 1))\n return train_test_split(x_data, y_data, random_state=42)", "def create_dataset():\n nb_subjects_per_category = 100\n\n # Generate random data using numpy\n # Two values are: Concentration of red blood cell and concentration of white blood cell\n # Generates two values and add the corresponding value with -2. Sick people get score lower than 0\n sick = np.random.randn( nb_subjects_per_category, 2) + np.array([-2,-2])\n # Generates two values and add the corresponding value with 2. Healthy people get score higher than 0\n healthy = np.random.randn( nb_subjects_per_category, 2) + np.array([2, 2])\n\n # combines the two arrays\n full_data = np.vstack([sick, healthy])\n\n # means that those sick people get a value of zero, and those healthy get a value of 1.\n # this gives an array of 10 composed of 5 0s followed by 5 1s.\n targets = np.concatenate((np.zeros(nb_subjects_per_category), np.zeros(nb_subjects_per_category) + 1))\n\n # Plot points. This is the data set being shown in a graph.\n # features[:, 0] means that we are slicing our 2D features of shape 100,2 and taking only the first column of all data\n # features[:, 1] means that we are slicing our array by taking only the second column of our data points\n # s: is marker size (draws bigger points)\n # c: describes the possible colors. Because our targets are 0s and 1s, then there is only two colors. Also, targets\n # array shows how to color the different elements in full_data depending on the index of targets. So I know the 50\n # last elements in full_data will have their own color because the last 50 items in targets all hold same value.\n plt.scatter(full_data[:, 0], full_data[:, 1], s=40, c=targets, cmap=plt.cm.Spectral)\n # save picture of data points drawn.\n plt.savefig(\"DataPoints.png\")\n\n # can return multiple parameters at once\n return full_data, targets", "def build_data(samples, labels):\n num_samples = len(samples)\n indexes = list(range(num_samples))\n np.random.shuffle(indexes)\n num_train = int(train_ratio * num_samples)\n # Get the indexes of train data and test data.\n train_indexes = indexes[0:num_train]\n test_indexes = indexes[num_train:num_samples]\n\n # Build the train data and test data.\n train_data = samples[train_indexes]\n train_labels = labels[train_indexes]\n test_data = samples[test_indexes]\n test_labels = labels[test_indexes]\n\n return train_data, test_data, \\\n train_labels, test_labels, \\\n train_indexes, test_indexes", "def __init__(self, train_x, train_y, test_x, test_y):\n\n self.train_x = train_x\n self.train_y = train_y\n self.test_x = test_x\n self.test_y = test_y\n\n self.group_col = []\n self.numeric_col = []\n\n self.min = []\n self.max = []\n self.mean = []\n\n self.learned = []\n\n self.intervals = []\n\n self.data_sum_win = None\n self.data_sum_lose = None\n\n self.data_win = None\n self.data_lose = None\n\n self.survival_sum = [0, 0] # [1 (survived), 0 (died)]" ]
[ "0.59920436", "0.58828664", "0.5873379", "0.5842184", "0.5801387", "0.5799118", "0.575984", "0.57430387", "0.5738082", "0.57155204", "0.5697254", "0.56378454", "0.56356657", "0.56329775", "0.5632461", "0.55968785", "0.5581852", "0.55785733", "0.55710536", "0.5516363", "0.54883283", "0.54787076", "0.54734325", "0.5454353", "0.5441909", "0.5441853", "0.5438284", "0.5435374", "0.53956354", "0.5393234" ]
0.7107193
0
Return negative log likelihood graph for gaussian constraints on a list of parameters.
def nll_gaussian(params: ztyping.ParamTypeInput, observation: ztyping.NumericalScalarType, uncertainty: ztyping.NumericalScalarType) -> tf.Tensor: return GaussianConstraint(params=params, observation=observation, uncertainty=uncertainty)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def grad_neg_log_like(params):\n gp.set_parameter_vector(params)\n return -gp.grad_log_likelihood(y, quiet=True)", "def gaussian_likelihood(x, mu, log_std):\n prob = -0.5 * (((x - mu) / (tf.exp(log_std) + EPS)) ** 2 + 2 * log_std + np.log(2 * np.pi))\n return tf.reduce_sum(prob, axis=1)", "def gaussian(x, *parameters):\n position, sigma, amplitude, background = parameters\n return amplitude * np.exp(-(x - position)**2 / (2.0 * sigma**2)) + background", "def gaussian_log_p(params_out, x_target, dim):\n mean_x, cov_x = params_out\n x_diff = x_target - mean_x\n x_square = tf.reduce_sum((x_diff / cov_x) * x_diff, axis=[1])\n log_x_exp = -0.5 * x_square\n log_cov_x_det = tf.reduce_sum(tf.log(cov_x), axis=[1])\n log_x_norm = -0.5 * (dim * tf.log(2 * np.pi) + log_cov_x_det)\n log_p = log_x_norm + log_x_exp\n return log_p, log_x_norm, log_x_exp, tf.abs(x_diff)", "def funcG(p, x):\n A, mu, sigma, zerolev = p\n return( A * numpy.exp(-(x-mu)*(x-mu)/(2*sigma*sigma)) + zerolev )", "def gaussian_likelihood(x, mu, log_std):\n std = tf.exp(log_std)\n pre_sum = tf.square((x - mu)/std) + 2*log_std + np.log(2*np.pi)\n return -0.5 * tf.reduce_sum(pre_sum, axis=1)", "def GaussianGaussianLoss(mu_kl, log_var_kl, const_var=None) :\n if const_var is None : # Heteroscedastic\n def ggl(x, mu_log_var) :\n N = K.int_shape(mu_log_var)[1]\n mu = mu_log_var[:,:,0]\n log_var = mu_log_var[:,:,1]\n mu = mu[:,:,np.newaxis]\n log_var = log_var[:,:,np.newaxis]\n recon = -1.*K.sum(-0.5*log_var - 0.5*K.exp(-1.*log_var)*K.square(x - mu), axis=1)\n dkl = -0.5 * K.sum(-K.exp(log_var_kl) - K.square(mu_kl) + 1. + log_var_kl, axis=-1)\n return dkl\n return ggl\n else : # Homoscedastic\n const_var = float(const_var)\n def ggl(x, mu) :\n recon = -1.*K.sum(-0.5*const_var - 0.5*K.exp(-1.*const_var)*K.square(x - mu), axis=1)\n dkl = -0.5 * K.sum(-K.exp(log_var_kl) - K.square(mu_kl) + 1. + log_var_kl, axis=-1)\n return dkl + recon\n return ggl", "def gaussian_likelihood(input_, mu_, log_std):\n pre_sum = -0.5 * (((input_ - mu_) / (\n tf.exp(log_std) + EPS)) ** 2 + 2 * log_std + np.log(\n 2 * np.pi))\n return tf.reduce_sum(pre_sum, axis=1)", "def LoG(sigma=(1,1), angle=0, nstd=3, normalize=True):\n\tassert (sigma[0]>0)&(sigma[1]>0), \"sigma cannot be equal to zero\"\n\tx,y = generate_filter_support(sigma,angle,nstd)\n\tx_theta,y_theta = rotate(x,y,-angle)\n\tLambda = sigma[0] / sigma[1]\n\tg = ( x_theta*x_theta + Lambda**2 * y_theta*y_theta - sigma[0]**2*(1+Lambda**2) ) / sigma[0]**4\n\tg *= gauss2d(sigma,order=(0,0),angle=angle,nstd=nstd)\n\tg[x.shape[0]//2,x.shape[1]//2] -= np.sum(g)\n\t# return [ [[0,0,1,0,0],[0,1,2,1,0],[1,2,-16,2,1],[0,1,2,1,0],[0,0,1,0,0]] ]\n\tif normalize:\n\t\treturn [g / np.linalg.norm(g,1)]\n\telse:\n\t\treturn [g]", "def prior_phi_q_gaussian(kwargs_list, prior_list):\n \n logL = 0\n \n if not kwargs_list: \n pass #So nothing crashes if there is not lens light or source light model\n else:\n for i in range(len(prior_list)):\n index, param_name, value, sigma = prior_list[i]\n\n if (('e1' in kwargs_list[index]) and ('e2' in kwargs_list[index])): \n model_e1 = kwargs_list[index]['e1']\n model_e2 = kwargs_list[index]['e2']\n\n model_vals = {}\n model_vals['phi'], model_vals['q'] = ellipticity2phi_q(model_e1,model_e2)\n\n dist = (model_vals[param_name] - value) ** 2 / sigma ** 2 / 2 \n logL -= np.sum(dist)\n \n# print('prior: {} \\n model value: {} \\n mean value: {} \\n sigma: {}'.format(param_name,\n# model_vals[param_name],\n# value,sigma))\n else: \n pass\n \n return logL", "def log_gaussian_likelihood(x, mu, log_std):\n log_gaussian_prob = -0.5 * (((x - mu) / (tf.exp(log_std) + EPS)) ** 2 - log_std - 0.5 * np.log(2 * np.pi))\n return tf.reduce_sum(log_gaussian_prob, axis=1)", "def gaussian(pars, x):\n A, b, mu, sigma = pars\n # return b + A/(np.sqrt(2*np.pi)*sigma**2) \\\n return b + A \\\n * np.exp(-.5*(x - mu)**2/sigma**2)", "def _gaussian_loglikelihood(\n self,\n parameters: NDArray,\n resids: NDArray,\n backcast: Union[float, NDArray],\n var_bounds: NDArray,\n ) -> float:\n sigma2 = np.zeros_like(resids)\n self.compute_variance(parameters, resids, sigma2, backcast, var_bounds)\n return float(self._normal.loglikelihood([], resids, sigma2))", "def soft_constraint ( self , var , value , name = '' , title = '' ) :\n \n assert isinstance ( var , ROOT.RooAbsReal ) ,\\\n \"Invalid ``v'': %s/%s\" % ( var , type ( var ) ) \n assert isinstance ( value , VE ),\\\n \"Invalid ``value'': %s/%s\" % ( value , type ( value ) )\n\n assert 0 < value.cov2() , 'Invalid error for %s' % value\n \n name = name if name else 'Gauss_%s_%s' % ( var.GetName() , self.name ) \n title = title if title else 'Gaussian Constraint(%s,%s) at %s' % ( var.GetName() , self.name , value )\n \n # value & error as RooFit objects: \n val = ROOT.RooFit.RooConst ( value.value () )\n err = ROOT.RooFit.RooConst ( value.error () )\n \n # Gaussian constrains \n gauss = ROOT.RooGaussian ( self.var_name ( name ) , title , var , val , err )\n \n # keep all the created technical stuff \n self.aux_keep.append ( val )\n self.aux_keep.append ( err )\n self.aux_keep.append ( gauss )\n\n self.info ('Constraint is created %s=%s' % ( var.name , value ) )\n return gauss", "def gaussian_weight_matrix(dispersion, L):\n return np.exp(-0.5*(dispersion[:,None]-dispersion[None,:])**2/L**2)", "def _log_prior_gradients(self):\n x = self._get_params()\n ret = np.zeros(x.size)\n [np.put(ret,i,p.lnpdf_grad(xx)) for i,(p,xx) in enumerate(zip(self.priors,x)) if not p is None]\n return ret", "def gaussian_likelihood(x, mu, log_std):\n\n # this expression just calculates the log of the pdf of the gaussian for a single\n # vector index, as described in the function docstring.\n # note that since we are taking the *log* of the pdf, we add terms together\n # which are multiplied together in the pdf\n # also note that rather than dividing by the std_dev, like we do in the regular pdf,\n # we divide by (std_dev + EPS), where EPS (epsilon) is a tiny number we include\n # to ensure that we don't divide by zero if std_dev = 0.\n pre_sum = -0.5 * (((x - mu) / (tf.exp(log_std) + EPS))**2 + 2*log_std + np.log(2 * np.pi))\n \n # return the sum of the items in the pre_sum vector\n # since each item is the log of the pdf for a specific index,\n # when we sum these, we get the log of the product of each\n # individual pdf -- ie. the log of the pdf evaluated\n # at this vector as a whole\n return tf.reduce_sum(pre_sum, axis=1)", "def gmmloglik(log_emlik, weights):", "def gmmloglik(log_emlik, weights):", "def PoissonGaussianLoss(mu_kl, log_var_kl) :\n def pgl(x, lambdas) :\n N = K.int_shape(lambdas)[1]\n recon = -1.*K.sum(x*lambdas - K.exp(lambdas), axis=1)\n dkl = -0.5 * K.sum(-K.exp(log_var_kl) - K.square(mu_kl) + 1. + log_var_kl, axis=-1)\n return recon + dkl\n return pgl", "def log_prior_grad(self, inputs):", "def gaussian_kl_np(mu0, log_std0, mu1, log_std1):\n var0, var1 = np.exp(2 * log_std0), np.exp(2 * log_std1)\n pre_sum = 0.5*(((mu1- mu0)**2 + var0)/(var1+EPS) - 1) + log_std1 - log_std0\n all_kls = pre_sum\n #all_kls = np.mean(all_kls)\n all_kls = np.clip(all_kls, 0, 1/EPS) ### for stability\n return all_kls", "def G(self, (k,t), (j,x), **params):\n d = len(x)/2\n q,dq = x[:d],x[d:]\n J = (j == True)\n _J = np.logical_not(J)\n # number of constraints\n n = len(J) \n # number of active constraints\n m = np.sum(J) # = n - len(a)\n a = self.a( (k,t), (_J,q), **params)\n lambda_ = self.lambda_( (k,t), (J,q,dq), **params)\n # unilateral constraint forces\n lambda_ = lambda_[:m] \n g = np.nan*np.zeros(n)\n g[_J] = a\n g[J] = lambda_\n return g", "def _log_prior_gradients(self):\n if self.priors.size == 0:\n return 0.\n x = self.param_array\n ret = np.zeros(x.size)\n #compute derivate of prior density\n [np.put(ret, ind, p.lnpdf_grad(x[ind])) for p, ind in self.priors.items()]\n #add in jacobian derivatives if transformed\n priored_indexes = np.hstack([i for p, i in self.priors.items()])\n for c,j in self.constraints.items():\n if not isinstance(c, Transformation):continue\n for jj in j:\n if jj in priored_indexes:\n ret[jj] += c.log_jacobian_grad(x[jj])\n return ret", "def log_gaussian_density(x, mu, L):\n\n D = x.shape[-1]\n # print(\"x shape:\", x.shape)\n # print(\"mu shape:\", mu.shape)\n # print(\"L shape:\", L.shape)\n\n a = np.linalg.solve(L, x - mu) # (..., K)-array\n\n logp = - 0.5 * D * np.log(2.0 * np.pi) - np.sum(np.log(np.diagonal(L))) \\\n - 0.5 * np.sum(a**2.0, axis=-1) # (...)-array; sums only the dimension of the Gaussian vector\n\n return logp", "def correlated_gaussian_loglikelihood(xs, means, cov):\n lu,piv=sl.lu_factor(cov)\n\n lambdas=np.diag(lu)\n\n ndim=xs.shape[0]\n \n ds=(xs-means)*sl.lu_solve((lu,piv), xs-means)/2.0\n\n return -np.log(2.0*np.pi)*(ndim/2.0)-0.5*np.sum(np.log(lambdas))-np.sum(ds)", "def logp_grad(self, xs, ys, fs, **kwargs):", "def neg_log_likelihood_all_bp(param, bp_events, end_time, block_pair_size, cache):\n alpha = param[0]\n beta = param[1]\n mu = param[2]\n\n # If the bounds for minimize are violated, return 0, the largest value possible.\n if mu <= 0 or alpha < 0 or beta <= 0:\n return 0.\n\n if cache['prev_beta'] == beta:\n cache['beta_changed'] = False\n else:\n cache['prev_beta'] = beta\n cache['beta_changed'] = True\n\n return -block_pair_conditional_log_likelihood(bp_events, mu, alpha, beta,\n end_time, block_pair_size, cache=cache)", "def estimate_gaussian_params(X):\n mu = X.mean(axis=0)\n var = X.std(axis=0)**2.0\n return mu,var", "def src_gauss(l, m, sigma_lm, A=1., i=0., pa=0., l0=0., m0=0.):\n l = np.atleast_1d(l)\n m = np.atleast_1d(m)\n sigma_x = sigma_lm\n sigma_y = sigma_lm * np.cos(i)\n a = 0.5 * ((np.cos(pa) / sigma_x)**2. + (np.sin(pa) / sigma_y)**2.)\n b = 0.5 * np.sin(2. * pa) * (sigma_x**-2. - sigma_y**-2.)\n c = 0.5 * ((np.sin(pa) / sigma_x)**2. + (np.cos(pa) / sigma_y)**2.)\n p = a * (l - l0)**2. + b * (l - l0) * (m - m0) + c * (m - m0)**2.\n I = A * np.exp(-p) / (2. * pi * sigma_x * sigma_y)\n return I" ]
[ "0.64535975", "0.5904606", "0.5901875", "0.5888618", "0.58798784", "0.5872534", "0.5868868", "0.5861738", "0.5828152", "0.58248097", "0.57563585", "0.5716952", "0.57024264", "0.5620952", "0.5620016", "0.5609774", "0.56095904", "0.5586873", "0.5586873", "0.55866486", "0.5584652", "0.55563205", "0.5499539", "0.5488597", "0.54883754", "0.5488366", "0.5477223", "0.5472099", "0.54671925", "0.54653955" ]
0.6091901
1
Validates the shape of the result tables.
def _validate_result_shape(self, mean, output): self.assertTrue(isinstance(mean, ITableWorkspace)) self.assertEqual(mean.columnCount(), 6) self.assertEqual(mean.rowCount(), 8) self.assertTrue(isinstance(output, WorkspaceGroup)) self.assertEqual(len(output), 3) for idx in range(len(output)): wks = output.getItem(idx) self.assertTrue(isinstance(wks, ITableWorkspace)) self.assertEqual(wks.columnCount(), 3) self.assertEqual(wks.rowCount(), 196)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _validate(self, obj):\n assert (self._confidence in obj.columns and self._predicted in obj.columns\n and self._groundtruth in obj.columns), \\\n \"Must at least have '%s', '%s' and '%s' columns.\" \\\n % (self._confidence, self._predicted, self._groundtruth)\n assert len(obj['groundtruth']) == len(obj['predicted']) == len(obj['confidence']), \\\n \"Dataframe columns are inconsistent \"\n\n if len(obj.index) < 2:\n self._logger.fatal(\"Stored procedure returned empty dataframe\")\n raise RuntimeError(\"Stored procedure returned empty dataframe\")\n\n self._logger.debug(obj.head)", "def validate(self):\n self._validate_time_index()\n self._validate_num_profiles()\n self._validate_merge_col_exists()\n self._validate_unique_merge_col()\n self._validate_merge_col_overlaps()", "def test_validation_function(self):\n\n for data in ('tbldata', 'dihedraldata', 'rdcdata', 'danidata'):\n v = self.web.query_nodes(key=data)\n\n if not v.empty():\n self.assertTrue(validate_tbl(v.value, pcs=False))", "def _validate_table(self, table_name, table_meta, table_data=None, errors=None):\n errors = [] if errors is None else errors\n dtypes = self.get_dtypes(table_name, ids=True, errors=errors)\n\n # Primary key field exists and its type is 'id'\n primary_key = table_meta.get('primary_key')\n if primary_key:\n pk_field = table_meta['fields'].get(primary_key)\n\n if not pk_field:\n errors.append(\n f'Invalid primary key: \"{primary_key}\" not found in table \"{table_name}\"')\n elif pk_field['type'] != 'id':\n errors.append(\n f'Primary key \"{primary_key}\" of table \"{table_name}\" not of type \"id\"')\n\n if table_data is not None:\n for column in table_data:\n try:\n dtype = dtypes.pop(column)\n table_data[column].dropna().astype(dtype)\n except KeyError:\n message = 'Unexpected column in table `{}`: `{}`'.format(table_name, column)\n errors.append(message)\n except ValueError as ve:\n message = 'Invalid values found in column `{}` of table `{}`: `{}`'.format(\n column, table_name, ve)\n errors.append(message)\n\n # assert all dtypes are in data\n if dtypes:\n errors.append(\n 'Missing columns on table {}: {}.'.format(table_name, list(dtypes.keys()))\n )", "def check_table(self):\n self.missing()\n return self._table(self._data_list)", "def test_row_from_columns_has_errors_table(self):\n errors_on_separate_row = True\n field_setup = None\n error_names = ['first', 'billing_address_1', 'billing_country_area']\n for as_type in ('p', 'ul', 'fieldset'):\n setup = self.setup_row_from_columns(as_type, field_setup, error_names, errors_on_separate_row)\n has_no_errors = setup[-1]\n for row in setup:\n if row == has_no_errors:\n self.assertEqual(len(row['expected']), 1)\n self.assertEqual(len(row['actual']), 1)\n else:\n self.assertGreater(len(row['expected']), 1)\n self.assertGreater(len(row['actual']), 1)\n self.assertEqual(row['expected'], row['actual'])", "def test_row_from_columns_no_errors_table(self):\n errors_on_separate_row = True\n field_setup = None\n error_names = ['non-field_name', 'not_a_field']\n for as_type in ('p', 'ul', 'fieldset'):\n setup = self.setup_row_from_columns(as_type, field_setup, error_names, errors_on_separate_row)\n for row in setup:\n self.assertEqual(len(row['expected']), 1)\n self.assertEqual(len(row['actual']), 1)\n self.assertEqual(row['expected'], row['actual'])", "def validate(self) -> None:\n\n if self.field not in self.model.table_fields:\n raise ValueError(f\"Value field {self.field} not present in {self.model.table}\")\n\n if self.pivot:\n if self.pivot not in self.model.table_fields:\n raise ValueError(\n f\"Pivot field {self.pivot} not present in {self.model.table}\"\n )\n\n if self.connector:\n if self.connector not in self.model.table_fields:\n raise ValueError(\n f\"Connector field {self.connector} not present in {self.model.table}\"\n )\n\n for field in self.selectors:\n if field not in self.model.table_fields:\n raise ValueError(f\"Selector field {field} not present in {self.model.table}\")", "def _validate_values(self, values):\n prev_len = -1\n i = j = -1\n if values is None or len(values) == 0:\n self.shape = 0, 0\n return\n for i, row in enumerate(values):\n if prev_len == -1:\n prev_len = len(row)\n if prev_len != len(row):\n raise ValueError(f\"Row {i} differs in length: {prev_len} != {len(row)}\")\n for j, val in enumerate(row):\n if type(val) not in (int, float, complex):\n raise ValueError(f\"[{i}, {j}]: {val} is of bad type ({type(val)})\")\n if val == 0:\n self.empty_loc = (i, j)\n if i == -1:\n self.shape = 0, 0\n else:\n self.shape = i + 1, j + 1", "def validate_dataset(self):\n pass", "def validate(self):\n validated = True \n # Check that all parameters exist in the self.parameters dictionary\n for param_name in self._SCALAR_PARAMETERS:\n if param_name not in self.parameters:\n LOG.critical('%s not found in %s', param_name, self.filename)\n validated = False \n \n for param_name in self._TABLE_PARAMETERS:\n if not all([elem for elem in self.parameters[param_name]]):\n LOG.critical('%s not found in %s', param_name, self.filename)\n validated = False\n \n return validated", "def validate_input(self):\n self._validate_limits_cols_prefixed()\n self._validate_fillna_cols_prefixed()\n self._validate_ratio_input()", "def _further_validate_and_setup(self) -> None:\n\n # Make sure parameters make sense/are valid\n if len(self.validated['learners']) != len(self.validated['param_grids']):\n raise SchemaError(autos=None,\n errors='The lists of of learners and parameter '\n 'grids must be the same size.')\n if (self.validated['hashed_features'] is not None\n and self.validated['hashed_features'] == 0):\n self.validated['hashed_features'] = self._n_features_feature_hashing\n if self.validated['lognormal'] and self.validated['power_transform']:\n raise SchemaError(autos=None,\n errors='Both \"lognormal\" and \"power_transform\" '\n 'were set simultaneously.')\n if len(self.validated['learners']) != len(self.validated['param_grids']):\n raise SchemaError(autos=None,\n errors='The \"learners\" and \"param_grids\" '\n 'parameters were both set and the '\n 'lengths of the lists are unequal.')", "def test_row_from_columns_not_own_error_row_table(self):\n errors_on_separate_row = False\n field_setup = None\n error_names = None\n for as_type in ('p', 'ul', 'fieldset'):\n setup = self.setup_row_from_columns(as_type, field_setup, error_names, errors_on_separate_row)\n for row in setup:\n self.assertEqual(len(row['expected']), 1)\n self.assertEqual(len(row['actual']), 1)\n self.assertEqual(row['expected'], row['actual'])", "def _valid_result(res):\n (HEADER, RESULTS) = [0,1]\n ok = (isinstance(res, (tuple, list)) and \n len(res) == 2 and\n isinstance(res[HEADER], (tuple, list)) and\n isinstance(res[RESULTS], (tuple, list)))\n if not ok:\n return False\n\n n = len(res[HEADER])\n def _valid_row(row):\n return isinstance(row, (tuple, list)) and len(row) == n\n return reduce(and_, (_valid_row(x) for x in res[RESULTS]), True)", "def test_row_from_columns_has_errors(self):\n errors_on_separate_row = True\n field_setup = None\n error_names = ['first', 'billing_address_1', 'billing_country_area']\n for as_type in ('p', 'ul', 'fieldset'):\n setup = self.setup_row_from_columns(as_type, field_setup, error_names, errors_on_separate_row)\n has_no_errors = setup[-1]\n for row in setup:\n if row == has_no_errors:\n self.assertEqual(len(row['expected']), 1)\n self.assertEqual(len(row['actual']), 1)\n else:\n self.assertGreater(len(row['expected']), 1)\n self.assertGreater(len(row['actual']), 1)\n self.assertEqual(row['expected'], row['actual'])", "def _validate_data(self, table_definition, data):\n if len(data) == 0:\n # Length zero columns get converted on write.\n return\n\n columns_checked = set()\n\n for column_name, column_definition in table_definition.c.items():\n if column_name in data:\n expected_type = self._expected_type(column_definition)\n is_nullable_numeric = (column_definition.nullable and\n expected_type in [int, float])\n if is_nullable_numeric:\n data[column_name] = data[column_name].fillna(value=np.nan)\n actual_type = data[column_name].dtype\n is_pandas_extension = isinstance(actual_type, ExtensionDtype)\n if expected_type is int:\n self._check_int_type(actual_type, column_name,\n is_pandas_extension, table_definition)\n elif expected_type is float:\n self._check_float_type(actual_type, column_name,\n table_definition)\n elif expected_type is str:\n self._check_str_type(actual_type, column_name, data,\n table_definition)\n else:\n raise RuntimeError(f\"Unexpected type from column \"\n f\"definitions: {expected_type}.\")\n elif not (column_definition.primary_key or\n column_definition.nullable):\n raise DismodFileError(f\"Missing column in data for table \"\n f\"'{table_definition.name}': \"\n f\"'{column_name}'\")\n columns_checked.add(column_name)\n\n extra_columns = set(data.columns).difference(table_definition.c.keys())\n if extra_columns:\n raise DismodFileError(f\"extra columns in data for table \"\n f\"'{table_definition.name}': {extra_columns}\"\n )", "def validateInputTable(self, evaluator):\n da = self._result\n if isinstance(da, xr.DataArray):\n has_changed = False\n for dim in da.dims:\n node_index = self.model.getNode(dim)\n # values of index has changed\n if list(node_index.result.values) != list(da.coords[dim].values):\n has_changed = True\n _input_properties = None\n if \"_input_properties\" in self.definition:\n _input_properties = self.model.evaluate(self._definition.split(\"# values\")[\n 0] + \"result = _input_properties\")\n else:\n _input_properties = {\"defaultValue\": 0.}\n da = da.reindex({dim: list(node_index.result.values)})\n da = da.fillna(_input_properties[\"defaultValue\"])\n\n if has_changed:\n self._result = da\n self._definition = evaluator.generateNodeDefinition(\n self.model.nodeDic, self.identifier)", "def _validate_QLr_shape(self, result, probability, group):\n\n # Test size/shape of result\n self.assertTrue(isinstance(result, MatrixWorkspace))\n self.assertEquals(result.getNumberHistograms(), 21)\n self.assertEquals(result.blocksize(), self._num_hists)\n self.assertEquals(result.getAxis(0).getUnit().unitID(), 'MomentumTransfer')\n\n # Test size/shape of probability\n self.assertTrue(isinstance(probability, MatrixWorkspace))\n self.assertEquals(probability.getNumberHistograms(), 3)\n self.assertEquals(probability.blocksize(), self._num_hists)\n self.assertEquals(result.getAxis(0).getUnit().unitID(), 'MomentumTransfer')\n\n # Test size/shape of group fitting workspaces\n self.assertTrue(isinstance(group, WorkspaceGroup))\n self.assertEquals(group.getNumberOfEntries(), self._sample_ws.getNumberHistograms())\n\n # Test sub workspaces\n for i in range (group.getNumberOfEntries()):\n sub_ws = group.getItem(i)\n self.assertTrue(isinstance(sub_ws, MatrixWorkspace))\n self.assertEqual(sub_ws.getNumberHistograms(), 5)\n self.assertEquals(sub_ws.getAxis(0).getUnit().unitID(), 'DeltaE')", "def _validate_plaincolumns(self):\n\n # assert tuples for plaincolumns and plaincolumns to be PlainColumn\n if not isinstance(self.plaincolumns, tuple):\n raise ValueError(\"PlainFrame was instantiated incorrectly. \"\n \"`plaincolumns` needs to be of type `tuple`. \"\n \"However, {} was encountered. Please use \"\n \"`PlainFrame.from_plain` instead for convenient \"\n \"instantiation and proper type casts.\"\n .format(type(self.plaincolumns)))\n\n not_plaincolumn = [type(column)\n for column in self.plaincolumns\n if not isinstance(column, PlainColumn)]\n\n if not_plaincolumn:\n raise ValueError(\"PlainFrame was instantiated incorrectly. \"\n \"Elements of `plaincolumns` needs to be of type \"\n \"`PlainColumn`. However, {} was encountered. \"\n \"Please use `PlainFrame.from_plain` instead for \"\n \"convenient instantiation and proper type casts.\"\n .format(not_plaincolumn))\n\n # assert equal number of values per column\n row_lenghts = {len(column.values) for column in self.plaincolumns}\n if len(row_lenghts) > 1:\n raise ValueError(\"Input data has varying number of values per \"\n \"column. Please check provided input data.\")\n\n # assert unique column names\n duplicates = {x for x in self.columns if self.columns.count(x) > 1}\n if duplicates:\n raise ValueError(\"Duplicated column names encountered: {}. \"\n \"Please use unique column names.\"\n .format(duplicates))", "def validate():", "def clean_table(self):\n return False", "def test_get_error_data_table_all_col_errors(self):\n field_setup = None\n error_names = None\n prepared_info = self.setup_error_data(field_setup, error_names, True)\n for row in prepared_info:\n self.assertEqual(row['expected'], row['actual'])", "def _validate_from_plain(data: Sequence[Sequence],\n columns: Sequence[str],\n dtypes: Sequence[str],\n row_wise: bool):\n\n if row_wise:\n # assert equal number of elements across rows\n row_lenghts = {len(row) for row in data}\n if len(row_lenghts) > 1:\n raise ValueError(\"Input data has varying number of values per \"\n \"row. Please check provided input data\")\n\n # assert equal number of columns and elements per row\n row_lenghts.add(len(columns))\n if len(row_lenghts) > 1:\n raise ValueError(\n \"Number of columns has to equal the number of \"\n \"values per row. Please check column names and \"\n \"provided input data.\")\n\n # assert equal number of dtypes and elements per row\n row_lenghts.add(len(dtypes))\n if len(row_lenghts) > 1:\n raise ValueError(\"Number of dtypes has to equal the number of \"\n \"values per row. Please check dtypes and \"\n \"provided input data.\")\n\n else:\n # assert equal number of elements across columns\n col_lengths = {len(col) for col in data}\n if len(col_lengths) > 1:\n raise ValueError(\"Input data has varying number of values per \"\n \"columns. Please check provided input data\")\n\n # assert equal number of columns in data, column names and dtypes\n col_count = len(columns)\n if col_count != len(data):\n raise ValueError(\"Input data and column names have different \"\n \"amount of columns. Please check provided \"\n \"input data\")\n\n if col_count != len(dtypes):\n raise ValueError(\"Input data and dtypes have different \"\n \"amount of columns. Please check provided \"\n \"input data\")", "def _verify_integrity(self):\n if len(self.data.shape) != 1:\n raise ValueError(\n \"Data array must be one dimensional \"\n \"(is {})\".format(len(self.data.shape))\n )\n\n if len(self.shape.shape) != 2:\n raise ValueError(\n \"Shape array must be two dimensional \"\n \"(is {})\".format(len(self.shape.shape))\n )\n\n shape_size, data_size = self._cumsum[-1], self.data.size\n\n if not shape_size == data_size:\n raise ValueError(\n \"Size of data ({data_size}) does not match that \"\n \"of the given shapes ({shape_size}).\".format(\n data_size=data_size, shape_size=shape_size\n )\n )", "def validate_full_schema(self):\n #self.check_duplicate_labels()\n for record in self.extension_schema['schema']['@graph']:\n #self.check_whether_atid_and_label_match(record)\n if record['@type'] == \"rdfs:Class\":\n self.validate_class_schema(record)\n #self.validate_class_label(record[\"@id\"])\n self.validate_validation_field(record)\n elif record['@type'] == \"rdf:Property\":\n self.validate_property_schema(record)\n #self.validate_property_label(record[\"@id\"])\n #self.validate_domainIncludes_field(record[\"http://schema.org/domainIncludes\"])\n #self.validate_rangeIncludes_field(record[\"http://schema.org/rangeIncludes\"])\n #else:\n # raise ValueError('wrong @type value found: {}'.format(record))", "def validate(self):\n super().validate()\n frame = getattr(self, 'frame', None)\n if frame is None:\n raise ValueError('Missing columns %s since no frame' % ', '.join(\n self.required_cols))\n cols = set(list(self.frame))\n missing = sorted(self.required_cols - cols)\n if missing:\n raise ValueError('Missing columns: [%s]' % ', '.join(missing))", "def verify_table(self):\n metadata = MetaData()\n metadata.reflect(bind = StatusSource.engine)\n mine = str(self.table.columns)\n verified = str(metadata.tables[self.tablename].columns)\n if mine != verified:\n raise DbException(\"Table '%s' in the database has schema %s whereas the query's schema is %s\" % (self.tablename, verified, mine))", "def _check_shape(placeholder_shape, data_shape):\n\n return True", "def validate_result(self):\n raise NotImplementedError" ]
[ "0.65462273", "0.6470221", "0.64296114", "0.64213187", "0.62917054", "0.6224976", "0.6170748", "0.6142788", "0.61046374", "0.607582", "0.6027592", "0.6017014", "0.6013583", "0.6006808", "0.5963135", "0.59552646", "0.59322536", "0.59297746", "0.59133005", "0.59023815", "0.58904946", "0.58771855", "0.587635", "0.58741534", "0.5856266", "0.58501047", "0.5842429", "0.5840887", "0.58262056", "0.58210653" ]
0.69808173
0
Test use_cache with a missing cache file. Should generate error.
def test_use_cache_missing_file(): # Generate cached files cmd_list = [NETMIKO_GREP] + ['interface', 'all'] _, full_dir = find_netmiko_dir() remove_file = 'bad_device.txt' remove_file_full = "{}/{}".format(full_dir, remove_file) if os.path.exists(remove_file_full) and os.path.isfile(remove_file_full): os.remove(remove_file_full) cmd_list = [NETMIKO_GREP] + ['--use-cache', '--display-runtime', 'interface', 'all'] (output, std_err) = subprocess_handler(cmd_list) assert "Some cache files are missing: unable to use --use-cache option." in std_err
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test__cache_notfound(self):\n # Access to a protected member _cache of a client class\n # pylint: disable=W0212\n treadmill.zkutils.get.side_effect = \\\n kazoo.exceptions.NoNodeError\n\n zkclient = kazoo.client.KazooClient()\n self.evmgr._cache(zkclient, 'foo#001')\n\n appcache = os.path.join(self.cache, 'foo#001')\n self.assertFalse(os.path.exists(appcache))", "def test_local_cache():", "def test_not_in_cache(self):\n cache = LRUCache(5)\n cache.put(1, 'aaa')\n with pytest.raises(KeyError):\n cache.get(2)", "def test_cache_create(self):\n self.assertTrue(self.host_updater.refresh_cache())\n self.assertTrue(os.path.exists(self.host_updater.cache_file))", "def test_no_func_name_raises_exception(self, _):\n with self.assertRaises(CacheHelperFunctionError):\n @cached(60*5)\n def test_function():\n pass", "def test_no_func_type_raises_exception(self, _):\n with self.assertRaises(CacheHelperFunctionError):\n @cached(60*5)\n def test_function():\n pass", "def test_cache_get_non_existent_item(self):\n self.assertEqual(self.cache.get('ghost'), None)\n self.assertEqual(self.cache.get('ghost', 'never exists'), 'never exists')", "def test__cache(self):\n # Access to a protected member _cache of a client class\n # pylint: disable=W0212\n treadmill.zkutils.get.return_value = {}\n\n zkclient = kazoo.client.KazooClient()\n self.evmgr._cache(zkclient, 'foo#001')\n\n appcache = os.path.join(self.cache, 'foo#001')\n self.assertTrue(os.path.exists(appcache))", "def test_file_list_cache():\n from nose.tools import raises\n\n tmp = FileListCache()\n\n @raises(TypeError)\n def test_tmp():\n \"\"\" nost test \"\"\"\n tmp.cache_file_list_dict = 0\n\n test_tmp()", "def test_clear_cache_silent_fail():\n shutil.rmtree(yvs.cache.LOCAL_CACHE_DIR_PATH)\n yvs.main()\n case.assertFalse(\n os.path.exists(yvs.cache.LOCAL_CACHE_DIR_PATH),\n 'local cache directory exists')", "def test_ignore_cache(self):\r\n settings = get_settings(filenames={})\r\n settings['CACHE_PATH'] = self.temp_cache\r\n settings['READERS'] = {'asc': None}\r\n\r\n generator = ArticlesGenerator(\r\n context=settings.copy(), settings=settings,\r\n path=CONTENT_DIR, theme=settings['THEME'], output_path=None)\r\n generator.readers.read_file = MagicMock()\r\n generator.generate_context()\r\n self.assertTrue(hasattr(generator, '_cache_open'))\r\n orig_call_count = generator.readers.read_file.call_count\r\n\r\n settings['LOAD_CONTENT_CACHE'] = False\r\n generator = ArticlesGenerator(\r\n context=settings.copy(), settings=settings,\r\n path=CONTENT_DIR, theme=settings['THEME'], output_path=None)\r\n generator.readers.read_file = MagicMock()\r\n generator.generate_context()\r\n generator.readers.read_file.assert_called_count == orig_call_count", "def test_use_cache():\n # Generate cached files\n cmd_list = [NETMIKO_GREP] + ['interface', 'all']\n subprocess_handler(cmd_list)\n cmd_list = [NETMIKO_GREP] + ['--use-cache', '--display-runtime', 'interface', 'all']\n (output, std_err) = subprocess_handler(cmd_list)\n match = re.search(r\"Total time: (0:.*)\", output)\n time = match.group(1)\n _, _, seconds = time.split(\":\")\n seconds = float(seconds)\n assert seconds <= 1\n assert 'pynet_rtr1.txt:interface FastEthernet0' in output", "def use_cached_files(self, cache_key):\r\n pass", "def test_clear_cache():\n yvs.main()\n case.assertFalse(\n os.path.exists(yvs.cache.LOCAL_CACHE_DIR_PATH),\n 'local cache directory exists')", "def test_ignore_cache(self):\r\n settings = get_settings(filenames={})\r\n settings['CACHE_PATH'] = self.temp_cache\r\n settings['READERS'] = {'asc': None}\r\n\r\n generator = PagesGenerator(\r\n context=settings.copy(), settings=settings,\r\n path=CONTENT_DIR, theme=settings['THEME'], output_path=None)\r\n generator.readers.read_file = MagicMock()\r\n generator.generate_context()\r\n self.assertTrue(hasattr(generator, '_cache_open'))\r\n orig_call_count = generator.readers.read_file.call_count\r\n\r\n settings['LOAD_CONTENT_CACHE'] = False\r\n generator = PagesGenerator(\r\n context=settings.copy(), settings=settings,\r\n path=CONTENT_DIR, theme=settings['THEME'], output_path=None)\r\n generator.readers.read_file = MagicMock()\r\n generator.generate_context()\r\n generator.readers.read_file.assert_called_count == orig_call_count", "def test_cache_pollution(self):\n with self._test_checksum_setup(self.tempdir.name) as setupdata:\n filename, data, expected_checksum = setupdata\n\n # corrupt the file\n with open(os.path.join(self.tempdir.name, filename), \"r+b\") as fh:\n fh.seek(0)\n real_first_byte = fh.read(1).decode(\"latin-1\")\n fh.seek(0)\n fh.write(chr(ord(real_first_byte) ^ 0xff).encode(\"latin-1\"))\n\n with self.assertRaises(ChecksumValidationError):\n with self.caching_backend.read_contextmanager(filename, expected_checksum) as cm:\n self.assertEqual(cm.read(), data)\n\n # un-corrupt the file\n with open(os.path.join(self.tempdir.name, filename), \"r+b\") as fh:\n fh.seek(0)\n real_first_byte = fh.read(1).decode(\"latin-1\")\n fh.seek(0)\n fh.write(chr(ord(real_first_byte) ^ 0xff).encode(\"latin-1\"))\n\n with self.caching_backend.read_contextmanager(filename, expected_checksum) as cm:\n self.assertEqual(cm.read(), data)", "def test_get_datasource_cache_miss(in_memory_runtime_context) -> None:\n context = in_memory_runtime_context\n\n name = \"my_fake_datasource_name\"\n\n # Initial GET will miss the cache, necessitating store retrieval\n with mock.patch(\n \"great_expectations.core.datasource_dict.DatasourceDict.__getitem__\"\n ) as mock_get:\n context.get_datasource(name)\n\n assert mock_get.called\n\n # Subsequent GET will retrieve from the cache\n with mock.patch(\n \"great_expectations.data_context.store.DatasourceStore.get\"\n ) as mock_get:\n context.get_datasource(name)\n\n assert not mock_get.called", "def test_refresh_error_create_cache(self):\n mock_method_path = ('dbtobindzone.updaters.host_updater'\n '.HostUpdater.cache_file')\n patch = mock.patch(mock_method_path, new_callable=mock.PropertyMock)\n with patch as mock_method:\n mock_method.return_value = '/TMP/DIR/NOT/EXISTS'\n result = self.host_updater.refresh_cache()\n self.assertFalse(result)", "def test_filecache_directory_already_exists(self, tmpdir, sess):\r\n url = self.url + ''.join(sample(string.ascii_lowercase, randint(2, 4)))\r\n\r\n # Make sure our cache dir DOES exist\r\n tmp_cache = tmpdir.join('missing', 'folder', 'name').strpath\r\n os.makedirs(tmp_cache, self.cache.dirmode)\r\n\r\n assert os.path.exists(tmp_cache)\r\n\r\n self.cache.directory = tmp_cache\r\n\r\n # trigger a cache save\r\n sess.get(url)\r\n\r\n assert True # b/c no exceptions were raised\r", "def testExistsMissing(self):\n ref = cache.CacheReference(self.cache, 'key')\n self.cache._KeyExists.return_value = True\n self.assertTrue(ref.Exists())", "def test_ref_cache_with_tempfile(self):\n # update cache file from db\n self.host_updater.refresh_cache()\n # create temp_cache_file to test it doesnt broke system\n with open(self.host_updater.temp_cache_file, 'a'):\n pass\n self.host_updater.refresh_cache()\n self.assertFalse(os.path.exists(self.host_updater.temp_cache_file))", "def test_no_io_on_url():\n file = get_image_cache_file()\n file.url\n assert not file.storage.exists.called\n assert not file.storage.open.called", "def test_non_existent_key(self):\n ttl = self.cache.ttl('does_not_exist')\n self.assertEqual(ttl, 0)", "def _check_cache(self):\n return os.path.exists(self._cache_key)", "def test_add_filelist_to_cache():\n from nose.tools import raises\n\n tmp = FileListCache()\n assert tmp.add_filelist_to_cache() is False\n\n @raises(TypeError)\n def test_tmp():\n \"\"\" nost test \"\"\"\n tmp.add_filelist_to_cache(file_list=1)\n\n test_tmp()", "def test_db_got_error_without_cache(self):\n mock_method_path = ('dbtobindzone.fetcher.host_data_fetcher'\n '.HostDataFetcher.is_fetch_success')\n with mock.patch(mock_method_path) as mock_method:\n mock_method.return_value = False\n self.host_updater.refresh_cache()\n self.assertEqual(self.host_updater.data, [])", "def testExists(self):\n ref = cache.CacheReference(self.cache, 'key')\n self.cache._KeyExists.return_value = False\n self.assertFalse(ref.Exists())", "def test_cachefile_timestamp(self):\n data = EngineTest.testdata['test_cachefile']\n filenames = { 'layout': 'layout.pyhtml',\n 'page': 'account_create.pyhtml',\n 'form': 'account_form.pyhtml',\n }\n expected = data['expected']\n context = { 'params': { } }\n cache_filenames = ['account_create.pyhtml.cache', 'account_form.pyhtml.cache']\n try:\n for key, filename in filenames.items():\n write_file(filename, data[key])\n props = { 'prefix': 'account_', 'postfix':'.pyhtml', 'layout':'layout.pyhtml', 'cache':True }\n ## create cache files and check them\n time.sleep(1)\n curr_time = time.time()\n engine = tenjin.Engine(**props)\n output = engine.render(':create', context)\n for fname in filenames.values():\n self.assertExists(fname) # file created?\n self.assertTrue(engine.get_template(fname).timestamp < curr_time)\n self.assertEquals(os.path.getmtime(fname), engine.get_template(fname).timestamp)\n ## save current cached object\n cached = {}\n for fname in filenames.values():\n cached[fname] = engine.get_template(fname)\n ## confirm that get_template() returns the same object\n for fname in filenames.values():\n self.assertEquals(id(engine.get_template(fname)), id(cached[fname]))\n ## change timestamp of templates to be old\n for fname in filenames.values():\n atime = mtime = os.path.getmtime(fname) - 10\n os.utime(fname, (atime, mtime))\n ## check whether new caches are created\n for fname in filenames.values():\n t = engine.get_template(fname)\n self.assertNotEqual(id(t), id(cached[fname]))\n self.assertEquals(os.path.getmtime(fname), t.timestamp)\n finally:\n _remove_files(filenames.values())", "def test_cache_results(self):\n kbase_sdk.init_context.cache_clear()\n config_path = os.path.join(self.test_app_dir, 'kbase.yaml')\n # Move kbase.yaml to kbase.yaml.bak\n context1 = kbase_sdk.init_context(self.test_app_dir)\n shutil.move(config_path, config_path + '.bak')\n context2 = kbase_sdk.init_context(self.test_app_dir)\n # If it's not caching, then MissingPath would be raised\n self.assertEqual(context1, context2)\n shutil.move(config_path + '.bak', config_path)", "def test_load_from_cache(self, mock_config_file):\n config_instance, working_dir = mock_config_file\n configuration = Configuration()\n\n assert configuration.config['core']['import_demo_on_first_login'] is False\n assert configuration.config['environment']['repo_url'] == \\\n [\"https://github.com/gigantum/base-images-testing.git\"]\n assert configuration.config['git']['working_directory'] == configuration.app_workdir" ]
[ "0.73853165", "0.73476803", "0.7076356", "0.68939716", "0.684688", "0.68456286", "0.6829679", "0.67922646", "0.67754066", "0.6688267", "0.6683732", "0.6657779", "0.6619422", "0.6537959", "0.65214545", "0.6512296", "0.65074056", "0.64840156", "0.6457274", "0.64564806", "0.64249337", "0.64222884", "0.6332393", "0.63280225", "0.6315099", "0.63129187", "0.62610793", "0.62255186", "0.61997455", "0.61794627" ]
0.7576966
0
Verify failed devices are showing
def test_display_failed(): cmd_list = [NETMIKO_GREP] + ['interface', 'all'] (output, std_err) = subprocess_handler(cmd_list) assert "Failed devices" in output failed_devices = output.split("Failed devices:")[1] failed_devices = failed_devices.strip().split("\n") failed_devices = [x.strip() for x in failed_devices] assert len(failed_devices) == 2 assert "bad_device" in failed_devices assert "bad_port" in failed_devices
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_verify_state_of_a_device():", "def test_hide_failed():\n cmd_list = [NETMIKO_GREP] + ['--hide-failed', 'interface', 'all']\n (output, std_err) = subprocess_handler(cmd_list)\n assert \"Failed devices\" not in output", "def test_verify_list_of_devices_in_my_network():", "def test_verify_connection_to_a_device():", "def test_verify_state_of_a_device_when_disconnected_from_the_device():", "def verify_diagnostics_and_usage_screen_name():\r\n msg = \"\"\r\n try:\r\n sleep(10)\r\n if g.platform == 'android':\r\n\r\n 'Verify screen name of diagnostics and usage page'\r\n sleep(3)\r\n \r\n flag1,msg = element_textvalidation('Diagnostics_usage_lbl','Diagnostics and Usage')\r\n \r\n \r\n flag = False if not (flag1) else True\r\n \r\n else:\r\n\r\n 'Verify screen name of diagnostics and usage label'\r\n \r\n flag1,msg = label_textvalidation('Diagnostics_usage_lbl','Diagnostics and Usage')\r\n \r\n\r\n flag = False if not (flag1) else True\r\n if not flag:\r\n return False, msg\r\n else:\r\n print \"License agreement screen name is displayed properly\"\r\n except Exception as excp:\r\n traceback.print_exc()\r\n msg += str(excp)\r\n return False, msg\r\n return True, msg", "def test_launch_failures_hw(self):\n self.test_launch_failures()", "def check_expected_devices():\n\n res = devices()\n error = extract_error_from(res)\n if error:\n if log.isEnabledFor(logging.DEBUG):\n log.debug(\"Check expected devices got error result: {}\".format(res))\n return\n\n expected_usb_devices = __opts__.get(\"expected_usb_devices\", [])\n vendors_products = [ \"{}:{}\".format(dev[\"vendor\"], dev[\"product\"]) for dev in res[\"values\"] ]\n\n for dev in expected_usb_devices:\n if dev not in vendors_products:\n vendor, product = dev.split(\":\")\n tag = \"system/usb/{}/{}/not_connected\".format(vendor, product)\n __salt__[\"minionutil.trigger_event\"](tag)", "def check_device_state(self):", "def verify_diagnostics_and_usage_text():\r\n msg, status = \"\", True\r\n try:\r\n sleep(10)\r\n if g.platform == 'android':\r\n sleep(3)\r\n 'verify end user license agreement label'\r\n flag1,msg = element_textvalidation('Diagnostics_usage_lbl','Diagnostics and Usage')\r\n sleep(4) \r\n 'Read verification input data'\r\n text_to_verify = util.read_file(g.demomode_Diagnostcs_nd_usage_txt)\r\n 'verify end user license agreement label'\r\n flag2,msg = element_textvalidation('Diagnostics_usage_txt',text_to_verify)\r\n \r\n flag = False if not (flag1 and flag2) else True\r\n else:\r\n\r\n \r\n 'Verify diagnostics usage text'\r\n flag1 = ui_controls.ui_element(get_obj_identifier('Diagnostics_usage_lbl'))\r\n 'Verify diagnostics usage text'\r\n flag2 = ui_controls.ui_element(get_obj_identifier('Diagnostics_usage_txt'))\r\n \r\n sleep(4) \r\n \r\n flag = False if not (flag1 and flag2) else True\r\n if not flag:\r\n return False, msg\r\n else:\r\n print \"License agreement screen name is displayed properly\"\r\n \r\n \r\n except Exception as excp:\r\n traceback.print_exc()\r\n msg += str(excp)\r\n return False, msg\r\n return True, msg", "def test_verification_failed(self):\n pass", "def test_unsuccessful_verification(self):\n for i in (-4, -3, 3, 4):\n description = \"TOTP verified for `i={0}`\".format(i)\n calculated = self.algorithm.calculate(self.device.secret, drift=i)\n confirmed = self.relate.verify(calculated, save=False)\n\n self.assertFalse(confirmed, description)\n\n self.relate.confirm = False", "def loading_failures():\n\n import simtk.openmm as mm\n print(mm.Platform.getPluginLoadFailures())", "def test_system_fail_result(self):\r\n data = {\"EdX-ID\": self.receipt_id,\r\n \"Result\": 'SYSTEM FAIL',\r\n \"Reason\": 'Memory overflow',\r\n \"MessageType\": 'You must retry the verification.'}\r\n json_data = json.dumps(data)\r\n response = self.client.post(\r\n reverse('verify_student_results_callback'),\r\n data=json_data,\r\n content_type='application/json',\r\n HTTP_AUTHORIZATION='test BBBBBBBBBBBBBBBBBBBB:testing',\r\n HTTP_DATE='testdate'\r\n )\r\n attempt = SoftwareSecurePhotoVerification.objects.get(receipt_id=self.receipt_id)\r\n self.assertEqual(attempt.status, u'must_retry')\r\n self.assertEqual(attempt.error_code, u'You must retry the verification.')\r\n self.assertEqual(attempt.error_msg, u'\"Memory overflow\"')\r\n self.assertEquals(response.content, 'OK!')", "def check_errors(self) -> None:", "def test_device_status(self):\n #071031031E3067\n self.ms.add_response({'\\x14071031031E3067\\x0D': 'PA\\x0D'})\n # Network / Device ID\n response = self.upb.status((49, 3))\n self.assertTrue(response)", "def devicesSatusVerification(windowname):\n try:\n AppButtons = getAppButtons(windowname)\n DeviceStatus = AppButtons[10:14]\n DeviceStatus_Descriptions = []\n for device in DeviceStatus:\n Descriptionsofsettings = getApplicatontitle(device)\n DeviceStatus_Descriptions.append(Descriptionsofsettings)\n except Exception as er:\n return False\n return DeviceStatus_Descriptions", "def details_not_matching():\n print(\"login details don't match.\")", "def accept_diagnostics_and_usage():\r\n msg, status = \"\", True\r\n\r\n try:\r\n sleep(5)\r\n if g.platform == 'android':\r\n sleep(3)\r\n 'Click on diagnostics and usage agree button'\r\n flag1 = ui_controls.button(get_obj_identifier('diagnostics_accept_btn'))\r\n\r\n status = False if not (flag1) else True\r\n else:\r\n \r\n 'Click on Agree button in EULA page for IOS'\r\n flag = ui_controls.button(get_obj_identifier('diagnostics_accept_btn'))\r\n status = flag\r\n\r\n except Exception as excp:\r\n traceback.print_exc()\r\n msg += str(excp)\r\n status = False\r\n\r\n return status, msg", "def test_get_devices(self):\n pass", "def test_get_devices(self):\n pass", "def verify_no_cable_errors(self):\n i = 0\n for dpid in self.dpids:\n i += 1\n labels = {'dp_id': '0x%x' % int(dpid), 'dp_name': 'faucet-%u' % i}\n self.assertEqual(\n 0, self.scrape_prometheus_var(\n var='stack_cabling_errors_total', labels=labels, default=None))\n self.assertGreater(\n self.scrape_prometheus_var(\n var='stack_probes_received_total', labels=labels), 0)", "def checkstatus(self):\n # define cross-platform /dev/null\n devnull = open(os.devnull, 'w')\n\n # if the OS is windows\n if os.name == 'nt':\n ping = ['ping', '-n', '1', self.device]\n\n # if the OS is posix\n else:\n ping = ['ping', '-c', '1', self.device]\n\n print(self.device + ' Checking for device availability', end='', flush=True)\n time.sleep(5)\n count = 0\n while count < 2:\n print('.', end='', flush=True)\n ping_call = subprocess.Popen(ping, stdout=devnull)\n returncode = ping_call.wait()\n if returncode == 0:\n break\n time.sleep(1)\n count = count + 1\n\n print('')\n if count == 2:\n print(self.device + ' Device is not up')\n print(self.device + ' Exiting...')\n return 'FAIL'\n else:\n print(self.device + ' Device is Online')\n print(self.device + ' Please wait for script initialization')\n time.sleep(5)", "async def test_failed_samples(self):\n self.set_source_parameter(\"test_result\", [\"failed\"])\n response = await self.collect(get_request_json_return_value=self.JMETER_JSON)\n self.assert_measurement(response, value=\"6\", entities=[])", "def test_successful_verification(self):\n for i in (-2, -1, 0, 1, 2):\n\n description = \"TOTP not verified for `i={0}`\".format(i)\n calculated = self.algorithm.calculate(self.device.secret, drift=i)\n confirmed = self.relate.verify(calculated, save=False)\n\n self.assertTrue(confirmed, description)\n\n self.relate.confirm = False", "def verify():", "def _check(self,err):\r\n if err < 0:\r\n buf_size = 128\r\n buf = create_string_buffer('\\000' * buf_size)\r\n self.nidaq.DAQmxGetErrorString(err,byref(buf),buf_size)\r\n raise RuntimeError('NI-DAQ call failed with error %d: %s'%(err,repr(buf.value)))", "def _check(self):\n\t\tif not self._raven:\n\t\t\traise NoDeviceFoundException", "def verify_failure():\n try:\n failure_alert = driver.find_element_by_class_name('css-1f1jd2h')\n if (failure_alert.is_displayed()):\n return \"Success\"\n else:\n return \"No Failure Alert\"\n except Exception as e:\n return \"No Failure Alert\"", "def chkDevForErrors(self,devPart):\n\t self.host.chk_if_dpp_errors(devPart)\n\t self.host.vgcproc.chk_if_ue_errors(devPart)\n \n\t return 1" ]
[ "0.69230753", "0.68009025", "0.6730258", "0.6647889", "0.6592971", "0.65443087", "0.6511433", "0.6507385", "0.6504286", "0.6377974", "0.636535", "0.6360225", "0.6213847", "0.62052417", "0.61863875", "0.6157639", "0.61221045", "0.6117112", "0.60790807", "0.59957546", "0.59957546", "0.5981816", "0.5981709", "0.59799004", "0.5957767", "0.5946974", "0.5929699", "0.59249514", "0.59092", "0.5901253" ]
0.73280716
0
Test function that finds netmiko_dir.
def test_find_netmiko_dir(): base_dir_check = '/home/gituser/.netmiko' full_dir_check = '/home/gituser/.netmiko/tmp' base_dir, full_dir = find_netmiko_dir() assert base_dir == base_dir_check and full_dir == full_dir_check
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_obtain_netmiko_filename():\n create_dir_test = True\n file_name_test = '/home/gituser/.netmiko/tmp/test_device.txt'\n file_name = obtain_netmiko_filename('test_device')\n assert file_name == file_name_test\n\n if create_dir_test:\n uuid_str = str(uuid.uuid1())\n junk_dir_base = '/home/gituser/JUNK/netmiko'\n junk_dir = '{}/{}'.format(junk_dir_base, uuid_str)\n base_dir, full_dir = find_netmiko_dir()\n print(base_dir)\n\n # Move base_dir and recreate it\n if os.path.isdir(base_dir) and os.path.isdir(junk_dir_base):\n shutil.move(src=base_dir, dst=junk_dir)\n assert os.path.exists(base_dir) == False\n assert os.path.exists(full_dir) == False\n file_name = obtain_netmiko_filename('test_device')\n ensure_dir_exists(base_dir)\n ensure_dir_exists(full_dir)\n assert os.path.exists(base_dir) == True\n assert os.path.exists(full_dir) == True", "def test_find(self):\r\n input_dir = None\r\n try:\r\n input_dir = tempfile.mkdtemp()\r\n input_file = 'conpot/tests/data/VOGON-POEM-MIB.mib'\r\n shutil.copy(input_file, input_dir)\r\n available_mibs = find_mibs([input_dir])\r\n self.assertIn('VOGON-POEM-MIB', available_mibs)\r\n finally:\r\n shutil.rmtree(input_dir)", "def test_infodir(self):\n self.chck_triple('infodir')", "def initDirs():\n #--Bash Ini\n mashIni = None\n if GPath('mash.ini').exists():\n mashIni = ConfigParser.ConfigParser()\n mashIni.read('mash.ini')\n dirs['app'] = GPath(settings['mwDir'])\n dirs['mods'] = dirs['app'].join('Data Files')\n #--Installers\n if mashIni and mashIni.has_option('General','sInstallersDir'):\n installers = GPath(mashIni.get('General','sInstallersDir').strip())\n else:\n installers = GPath('Installers')\n if installers.isabs():\n dirs['installers'] = installers\n else:\n dirs['installers'] = dirs['app'].join(installers)\n #-# D.C.-G.\n # dirs['installers'].makedirs()\n\n # prevHead = \"\"\n # head = dirs['installers'].s\n # print sys.platform\n # print \"prevHead\", prevHead, \"head\", head\n # while prevHead != head:\n # prevHead = head\n # head, tail = os.path.split(prevHead)\n # print \"head\", head, \"tail\", tail\n # detecting Windows\n if sys.platform.lower().startswith(\"win\") == True:\n drv, pth = os.path.splitdrive(dirs['installers'].s)\n if os.access(drv, os.R_OK):\n #-# Testing the directories\n # class Dummy: chk = None\n\n # def testDir(a, d, ds):\n # if d in dirs['installers'].s:\n # Dummy.chk = os.access(d, a)\n\n # os.path.walk(dirs['installers'].s, testDir, os.F_OK)\n # print \"chk\", Dummy.chk\n #-#\n # print \"Installers directory found.\"\n dirs['installers'].makedirs()\n #-#", "def test_find_system_config_directory():\n dir = find.system_config_directory()\n nt.ok_(os.path.exists(dir))\n\n project_name = 'test'\n dir = find.system_config_directory(project_name)\n nt.ok_(dir.endswith(project_name))", "def test_manndir(self):\n self.chck_triple('manndir')", "def test_lispdir(self):\n self.chck_triple('lispdir')", "def test_man5dir(self):\n self.chck_triple('man5dir')", "def test_get_result_directory(self):\n pass", "def test_get_qiime_scripts_dir(self):\r\n obs = get_qiime_scripts_dir()\r\n\r\n # We can't do much testing of the observed value, but let's at least\r\n # check that the directory exists.\r\n self.assertTrue(isdir(obs))", "def test_llvm_bin_dir(self):\n self.assertEqual(\n self.ndk.llvm_bin_dir,\n f\"/opt/android/android-ndk/toolchains/llvm/prebuilt/{self.ndk.host_tag}/bin\",\n )", "def test_sysconfdir(self):\n self.chck_triple('sysconfdir')", "def test_blastmat_dir(self):\r\n\r\n test_qiime_config_variable(\"blastmat_dir\", self.config, self)", "def test_man7dir(self):\n self.chck_triple('man7dir')", "def testnodes_path() -> str:\n return os.path.abspath(\n os.path.join(os.path.dirname(__file__), \"..\", \"test\", \"testnodes\")\n )", "def test_ls_no_shareddir():\n\n with bad_fixture() as root:\n assert next(pipeline.ls(root=root), None) is None", "def test_find_in_current_path(self):\n directory = os.path.dirname(os.path.realpath(__file__))\n result = steptest.find_project_directory(directory)\n self.assertEqual(directory, result)", "def test_get_pyrin_root_path():\n\n root_path = os.path.abspath('.')\n assert application_services.get_pyrin_root_path() == root_path", "def test_find_no_share(self):\n with pytest.raises(EnvironmentError):\n with patch(\"os.getenv\", Mock(return_value=False)):\n services.find_config_path(FILES)", "def test_find_path_bi():\n assert True", "def test_get_denoiser_data_dir(self):\r\n\r\n obs = get_denoiser_data_dir()\r\n\r\n self.assertTrue(exists(obs))\r\n self.assertTrue(exists(obs + 'FLX_error_profile.dat'))", "def test_get_result_top_dir(self):\n pass", "def find_config():\n print(\"in find_config()\")\n print(os.getcwd())\n print(os.listdir(os.getcwd()))\n print(os.path.expanduser(\"~/.pylcmodel\"))\n if os.path.isfile(os.path.join(os.getcwd(), \".pylcmodel\")):\n return os.path.join(os.getcwd(), \".pylcmodel\")\n elif os.path.isfile(os.path.expanduser(\"~/.pylcmodel\")):\n return os.path.expanduser(\"~/.pylcmodel\")\n else:\n raise FileNotFoundError(\"No .pylcmodel config file found.\")", "def test_get_result_directories(self):\n pass", "def create_tester_paths():\n config.config_tester()\n _create_paths(vmcheckerpaths.tester_paths())", "def test_psdir(self):\n self.chck_triple('psdir')", "def test_get_node_drives(self):\n pass", "def test_supply_directory(self):\n supplied_value = '/tmp'\n returned_value = generic.check_path(supplied_value)\n\n self.assertEqual(supplied_value, returned_value)", "def test_man8dir(self):\n self.chck_triple('man8dir')", "def test_base_dir(self):\n old_base_dir = self.path_translator.BASE_REAL_DIR\n self.path_translator.BASE_REAL_DIR = \"/tmp/study\"\n import os\n stat_f = lambda x: FakeStat(33188, 16398844, 65024L, 1, 1049, 1049, 0,\n 1409046988, 1409046988, 1409046988)\n os.stat = stat_f\n os.lstat = stat_f\n expected = [\"subdir1\", \"subdir2\"]\n result = [r[0] for r in self.path_translator.list_directory(\n \"/{0}/search1\".format(self.search.instance))]\n self.assertEqual(result, expected)\n self.path_translator.BASE_REAL_DIR = old_base_dir" ]
[ "0.72621965", "0.6246462", "0.6027246", "0.5804337", "0.5803069", "0.5800377", "0.5759109", "0.5746789", "0.56879157", "0.5673341", "0.5659619", "0.5653931", "0.5647432", "0.5622848", "0.56032187", "0.5596177", "0.557815", "0.55779433", "0.5561922", "0.5549885", "0.5549646", "0.5534084", "0.55323267", "0.552607", "0.5495851", "0.54862374", "0.54524696", "0.5450148", "0.54468226", "0.5434339" ]
0.84443676
0
The set of arguments for constructing a IdentityPoolProviderPrincipalTag resource.
def __init__(__self__, *, identity_pool_id: pulumi.Input[str], identity_provider_name: pulumi.Input[str], principal_tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None, use_defaults: Optional[pulumi.Input[bool]] = None): pulumi.set(__self__, "identity_pool_id", identity_pool_id) pulumi.set(__self__, "identity_provider_name", identity_provider_name) if principal_tags is not None: pulumi.set(__self__, "principal_tags", principal_tags) if use_defaults is not None: pulumi.set(__self__, "use_defaults", use_defaults)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(__self__,\n resource_name: str,\n args: IdentityPoolProviderPrincipalTagArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n identity_pool_id: Optional[pulumi.Input[str]] = None,\n identity_provider_name: Optional[pulumi.Input[str]] = None,\n principal_tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n use_defaults: Optional[pulumi.Input[bool]] = None,\n __props__=None):\n ...", "def __init__(__self__, *,\n identity_pool_id: Optional[pulumi.Input[str]] = None,\n identity_provider_name: Optional[pulumi.Input[str]] = None,\n principal_tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n use_defaults: Optional[pulumi.Input[bool]] = None):\n if identity_pool_id is not None:\n pulumi.set(__self__, \"identity_pool_id\", identity_pool_id)\n if identity_provider_name is not None:\n pulumi.set(__self__, \"identity_provider_name\", identity_provider_name)\n if principal_tags is not None:\n pulumi.set(__self__, \"principal_tags\", principal_tags)\n if use_defaults is not None:\n pulumi.set(__self__, \"use_defaults\", use_defaults)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n identity_pool_id: Optional[pulumi.Input[str]] = None,\n identity_provider_name: Optional[pulumi.Input[str]] = None,\n principal_tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n use_defaults: Optional[pulumi.Input[bool]] = None) -> 'IdentityPoolProviderPrincipalTag':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _IdentityPoolProviderPrincipalTagState.__new__(_IdentityPoolProviderPrincipalTagState)\n\n __props__.__dict__[\"identity_pool_id\"] = identity_pool_id\n __props__.__dict__[\"identity_provider_name\"] = identity_provider_name\n __props__.__dict__[\"principal_tags\"] = principal_tags\n __props__.__dict__[\"use_defaults\"] = use_defaults\n return IdentityPoolProviderPrincipalTag(resource_name, opts=opts, __props__=__props__)", "def __init__(__self__,\n resource_name: str,\n args: OpenIdConnectProviderArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: TagArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n domain_id: Optional[pulumi.Input[str]] = None,\n group_id: Optional[pulumi.Input[str]] = None,\n project_id: Optional[pulumi.Input[str]] = None,\n region: Optional[pulumi.Input[str]] = None,\n role_id: Optional[pulumi.Input[str]] = None,\n user_id: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: Optional[TargetPoolArgs] = None,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: ProvisioningTemplateArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: BasicEndpointGroupArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: Optional[PolicyPasswordDefaultArgs] = None,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: ApplicationFederatedIdentityCredentialArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: EndpointAclPolicyArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def Args(parser):\n\n parser.add_argument(\n '--service',\n help='The service to which the principal is to be added.',\n required=True)\n parser.add_argument(\n '--label',\n help=('Optionally, the visibility label to which the principal is '\n 'to be added.'))\n parser.add_argument(\n 'type',\n help=('The type of principal to add to the access policy entity. '\n 'Choose from {0}.').format(\n ', '.join(sorted(Add._PRINCIPAL_TYPES))),\n type=lambda x: str(x).lower(),\n choices=sorted(Add._PRINCIPAL_TYPES))\n parser.add_argument(\n 'principal',\n help='The principal to add to the access policy entity.')", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n name: Optional[pulumi.Input[str]] = None,\n name_prefix: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n user_names: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n __props__=None):\n ...", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n account_id: Optional[pulumi.Input[str]] = None,\n group: Optional[pulumi.Input[str]] = None,\n image_id: Optional[pulumi.Input[str]] = None,\n organization_arn: Optional[pulumi.Input[str]] = None,\n organizational_unit_arn: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: GroupArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n consumer_group_name: Optional[pulumi.Input[str]] = None,\n consumer_group_password: Optional[pulumi.Input[str]] = None,\n consumer_group_user_name: Optional[pulumi.Input[str]] = None,\n dts_instance_id: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: VpcIpamPoolArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n application_name: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n resource_group_id: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,\n __props__=None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: ProtectionContainerMappingArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: AuthServerClaimDefaultArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n authentication_type: Optional[pulumi.Input[str]] = None,\n connection_string: Optional[pulumi.Input[str]] = None,\n endpoint_uri: Optional[pulumi.Input[str]] = None,\n entity_path: Optional[pulumi.Input[str]] = None,\n identity_id: Optional[pulumi.Input[str]] = None,\n iothub_id: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n resource_group_name: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: Optional[DomainArgs] = None,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: MetaTagArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__, *,\n identity_namespace: Optional[pulumi.Input[str]] = None,\n identity_provider: Optional[pulumi.Input[str]] = None,\n workload_pool: Optional[pulumi.Input[str]] = None):\n if identity_namespace is not None:\n pulumi.set(__self__, \"identity_namespace\", identity_namespace)\n if identity_provider is not None:\n pulumi.set(__self__, \"identity_provider\", identity_provider)\n if workload_pool is not None:\n pulumi.set(__self__, \"workload_pool\", workload_pool)", "def __init__(__self__,\n resource_name: str,\n args: ManagedNetworkGroupArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: RegistryAgentPoolArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: MembershipArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n certificate: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n name_prefix: Optional[pulumi.Input[str]] = None,\n private_key: Optional[pulumi.Input[str]] = None,\n project: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ..." ]
[ "0.7856163", "0.64542943", "0.6376276", "0.5968333", "0.5625083", "0.5487669", "0.5449511", "0.5414559", "0.54109836", "0.5339899", "0.53381735", "0.53368294", "0.5325394", "0.53140837", "0.52848655", "0.52398705", "0.52252465", "0.52150136", "0.51871485", "0.5153368", "0.5144707", "0.51442814", "0.51425695", "0.51422894", "0.51377934", "0.51348966", "0.5132293", "0.5125864", "0.5125008", "0.51103526" ]
0.6546987
1
An identity pool ID.
def identity_pool_id(self) -> pulumi.Input[str]: return pulumi.get(self, "identity_pool_id")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def identity_pool_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"identity_pool_id\")", "def identity_pool_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"identity_pool_id\")", "def pool_id ( self ):\n return self._pool_id", "def resource_pool_id(self) -> str:\n return pulumi.get(self, \"resource_pool_id\")", "def __getNewIPpoolID(self):\n return db_main.getHandle().seqNextVal(\"ippool_id_seq\")", "def _pool_id(self, queue, project=None):\n return self._catalogue_ctrl.get(project, queue)['pool']", "def elastic_pool_id(self) -> Optional[str]:\n return pulumi.get(self, \"elastic_pool_id\")", "def _get_id(self) -> int:\n if len(self._id_pool) == 0:\n raise ArchonError(\"No ids reamining in the pool!\")\n return self._id_pool.pop()", "def identity(self):\n return self.id", "def source_ipam_pool_id(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"source_ipam_pool_id\")", "def identity_id(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"identity_id\")", "def get_identity(self):\n return self.query_serial('*IDN?')", "def identity_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"identity_id\")", "def identity_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"identity_id\")", "def fixup_pool_id ( self ):\n self._pool_id = len ( self._poolstack ) - 1", "def source_ipam_pool_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"source_ipam_pool_id\")", "def source_ipam_pool_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"source_ipam_pool_id\")", "def get_pool_id(pool_name, host=None):\n cmd = utils.XMS_CLI_HEADER + \"-f json pool list\"\n print cmd\n ret = utils.execute_cmd_in_host(cmd, host)\n if ret[2] != 0 or isinstance(ret[0], dict):\n print \"[Error] Failed to get pool info. Error message: [{err}]\".format(err=ret[1])\n return -1\n try:\n pool_info = json.loads(ret[0])\n pools = pool_info[\"pools\"]\n for p in pools:\n if pool_name == p[\"name\"]:\n return p[\"id\"]\n except Exception as e:\n print \"[Error] error message is: \" + e.message\n return -1", "def AllocId(self, pool='default'):\n\n if self.__free_ids:\n idrange = self.__free_ids.pop()\n result = idrange.start\n if idrange.start < idrange.stop:\n self.__free_ids.append(self.IdRange(idrange.start+1, idrange.stop))\n else:\n result = self.__idcounter\n self.__idcounter += 1\n allocated_ranges = self.__idpools.get(pool)\n if allocated_ranges is None:\n allocated_ranges = []\n self.__idpools[pool] = allocated_ranges\n for index, idrange in enumerate(allocated_ranges):\n if result == idrange.start-1:\n idrange = self.IdRange(result, idrange.stop)\n allocated_ranges[index] = idrange\n break\n elif result == idrange.stop+1:\n idrange = self.IdRange(idrange.start, result)\n allocated_ranges[index] = idrange\n break\n else:\n allocated_ranges.append(self.IdRange(result, result))\n return result", "def getID():", "def getIdent (self) :\n return self.id", "def get_id(self):\n pass", "def get_id(self):\n pass", "def get_id(self):\n pass", "def get_id(self):\n pass", "def pool_ids(self) -> Sequence[str]:\n return pulumi.get(self, \"pool_ids\")", "def _id(self):\n pass", "def establish_id(self):\n if self.config.node_id is None:\n self.config.node_id = str(uuid4()).replace('-', '')\n return self.config.node_id", "def id(self) -> str:\n\n return self._inst.query('*IDN?')", "def get_identity():\n identity = multiprocessing.current_process()._identity\n identity = 0 if not identity else identity[0]\n\n identity = (identity, threading.current_thread().ident)\n return identity" ]
[ "0.86603266", "0.838689", "0.82100475", "0.7408374", "0.73865277", "0.7111695", "0.70824254", "0.6979489", "0.6917806", "0.6748776", "0.6693632", "0.6658713", "0.65843666", "0.65843666", "0.6578973", "0.6505788", "0.6505788", "0.65010935", "0.64529204", "0.64043105", "0.6381404", "0.63649356", "0.63649356", "0.63649356", "0.63649356", "0.6364885", "0.6357003", "0.63148725", "0.62898546", "0.62725073" ]
0.86181694
1
The name of the identity provider.
def identity_provider_name(self) -> pulumi.Input[str]: return pulumi.get(self, "identity_provider_name")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def identity_provider_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"identity_provider_name\")", "def identity_provider_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"identity_provider_name\")", "def provider_name(self):\n raise NotImplementedError", "def provider_display_name(self) -> str:\n return self._provider_display_name", "def provider_name(cls) -> str:\n return cls.__name__", "def provider_display_name(self) -> Optional[str]:\n return pulumi.get(self, \"provider_display_name\")", "def provider_name(self):\n return self.resource_class.name", "def provider_name(self):\n return self.resource_class.name", "def identity_provider(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"identity_provider\")", "def provider(self) -> str:\n return pulumi.get(self, \"provider\")", "def provider(self) -> str:\n return pulumi.get(self, \"provider\")", "def service_provider_display_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"service_provider_display_name\")", "def provider(self) -> str:\n return self._provider", "def identity_provider_type(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"identity_provider_type\")", "def identity_provider_type(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"identity_provider_type\")", "def identity_provider_type(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"identity_provider_type\")", "def provider_id(self):\n raise NotImplementedError", "def multi_factor_auth_provider(self) -> str:\n return pulumi.get(self, \"multi_factor_auth_provider\")", "def __str__(self) -> str:\n return self.provider.lower()", "def provider_id(self):\n return self.get('_id')", "def provider_display_name(self, provider_display_name: str):\n\n self._provider_display_name = provider_display_name", "def get_provider(self):\n return self.provider", "def get_identity_name(identity_kind: str = GLOBAL_APPLICATION_CONFIGURATION) -> str:\n identity_name = os.environ.get(identity_kind)\n if identity_name:\n return identity_name\n # TODO: Add discovery here? This can probably be inferred.\n # Need to be careful because not all users may have IAM privileges.\n # -kmp 31-Aug-2022\n context = \"\"\n account_number = os.environ.get('ACCOUNT_NUMBER')\n if account_number:\n context = f\" in account {account_number}\"\n raise ValueError(f\"There is no default identity name available for {identity_kind}{context}.\")", "def healthcare_provider_id(self):\n return self._healthcare_provider_id", "def get_local_name(self) -> str:\n if self.username:\n return self.username\n if self.email:\n return self.email\n return self.identifier", "def service_provider_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"service_provider_id\")", "def providerKey(cls):\n return \"qdmtk_provider\"", "def name(self) -> str:\n return getattr(\n self.auth_accounts[-1], \"name\" # pylint: disable=unsubscriptable-object\n )", "def name(self):\n if self.user_provided_name is not None:\n return self.user_provided_name\n else:\n return super().name", "def name(self) -> str:\n return self.user.name" ]
[ "0.9126659", "0.8877346", "0.8368294", "0.8306505", "0.8110782", "0.80312", "0.7812182", "0.7812182", "0.7771749", "0.754476", "0.754476", "0.75150007", "0.7478888", "0.74466497", "0.73160523", "0.73160523", "0.7292682", "0.7079154", "0.6827711", "0.6733854", "0.66272384", "0.66187304", "0.66109836", "0.65812933", "0.65800357", "0.6573259", "0.6562437", "0.6500616", "0.64945275", "0.64579815" ]
0.8995868
1
Get an existing IdentityPoolProviderPrincipalTag resource's state with the given name, id, and optional extra properties used to qualify the lookup.
def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] = None, identity_pool_id: Optional[pulumi.Input[str]] = None, identity_provider_name: Optional[pulumi.Input[str]] = None, principal_tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None, use_defaults: Optional[pulumi.Input[bool]] = None) -> 'IdentityPoolProviderPrincipalTag': opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = _IdentityPoolProviderPrincipalTagState.__new__(_IdentityPoolProviderPrincipalTagState) __props__.__dict__["identity_pool_id"] = identity_pool_id __props__.__dict__["identity_provider_name"] = identity_provider_name __props__.__dict__["principal_tags"] = principal_tags __props__.__dict__["use_defaults"] = use_defaults return IdentityPoolProviderPrincipalTag(resource_name, opts=opts, __props__=__props__)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(__self__,\n resource_name: str,\n args: IdentityPoolProviderPrincipalTagArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n api_management_id: Optional[pulumi.Input[str]] = None,\n display_name: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None) -> 'Tag':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _TagState.__new__(_TagState)\n\n __props__.__dict__[\"api_management_id\"] = api_management_id\n __props__.__dict__[\"display_name\"] = display_name\n __props__.__dict__[\"name\"] = name\n return Tag(resource_name, opts=opts, __props__=__props__)", "def get_principal(self, principal_id, info=True):", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None) -> 'SqlPoolSensitivityLabel':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = SqlPoolSensitivityLabelArgs.__new__(SqlPoolSensitivityLabelArgs)\n\n __props__.__dict__[\"column_name\"] = None\n __props__.__dict__[\"information_type\"] = None\n __props__.__dict__[\"information_type_id\"] = None\n __props__.__dict__[\"is_disabled\"] = None\n __props__.__dict__[\"label_id\"] = None\n __props__.__dict__[\"label_name\"] = None\n __props__.__dict__[\"managed_by\"] = None\n __props__.__dict__[\"name\"] = None\n __props__.__dict__[\"rank\"] = None\n __props__.__dict__[\"schema_name\"] = None\n __props__.__dict__[\"table_name\"] = None\n __props__.__dict__[\"type\"] = None\n return SqlPoolSensitivityLabel(resource_name, opts=opts, __props__=__props__)", "def normalize_idp(idp):\n if idp is None:\n return None\n\n _idp = idp.to_dict()\n _idp['enabled'] = idp['is_enabled']\n _idp['name'] = idp['id']\n return _idp", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n address_family: Optional[pulumi.Input[str]] = None,\n allocation_default_netmask_length: Optional[pulumi.Input[int]] = None,\n allocation_max_netmask_length: Optional[pulumi.Input[int]] = None,\n allocation_min_netmask_length: Optional[pulumi.Input[int]] = None,\n allocation_resource_tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n arn: Optional[pulumi.Input[str]] = None,\n auto_import: Optional[pulumi.Input[bool]] = None,\n aws_service: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n ipam_scope_id: Optional[pulumi.Input[str]] = None,\n ipam_scope_type: Optional[pulumi.Input[str]] = None,\n locale: Optional[pulumi.Input[str]] = None,\n pool_depth: Optional[pulumi.Input[int]] = None,\n public_ip_source: Optional[pulumi.Input[str]] = None,\n publicly_advertisable: Optional[pulumi.Input[bool]] = None,\n source_ipam_pool_id: Optional[pulumi.Input[str]] = None,\n state: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None) -> 'VpcIpamPool':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _VpcIpamPoolState.__new__(_VpcIpamPoolState)\n\n __props__.__dict__[\"address_family\"] = address_family\n __props__.__dict__[\"allocation_default_netmask_length\"] = allocation_default_netmask_length\n __props__.__dict__[\"allocation_max_netmask_length\"] = allocation_max_netmask_length\n __props__.__dict__[\"allocation_min_netmask_length\"] = allocation_min_netmask_length\n __props__.__dict__[\"allocation_resource_tags\"] = allocation_resource_tags\n __props__.__dict__[\"arn\"] = arn\n __props__.__dict__[\"auto_import\"] = auto_import\n __props__.__dict__[\"aws_service\"] = aws_service\n __props__.__dict__[\"description\"] = description\n __props__.__dict__[\"ipam_scope_id\"] = ipam_scope_id\n __props__.__dict__[\"ipam_scope_type\"] = ipam_scope_type\n __props__.__dict__[\"locale\"] = locale\n __props__.__dict__[\"pool_depth\"] = pool_depth\n __props__.__dict__[\"public_ip_source\"] = public_ip_source\n __props__.__dict__[\"publicly_advertisable\"] = publicly_advertisable\n __props__.__dict__[\"source_ipam_pool_id\"] = source_ipam_pool_id\n __props__.__dict__[\"state\"] = state\n __props__.__dict__[\"tags\"] = tags\n __props__.__dict__[\"tags_all\"] = tags_all\n return VpcIpamPool(resource_name, opts=opts, __props__=__props__)", "def get_raw_principal(self, principal_id):", "def read_tag(\n *,\n db: Session = Depends(get_db),\n id: int,\n current_user: DBUser = Depends(get_current_active_user),\n):\n tag = crud.tag.get(db_session=db, id=id)\n if not tag:\n raise HTTPException(status_code=404, detail=\"Tag not found\")\n if not crud.user.is_superuser(current_user) and (tag.owner_id != current_user.id):\n raise HTTPException(status_code=400, detail=\"Not enough permissions\")\n return tag", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n access_string: Optional[pulumi.Input[str]] = None,\n arn: Optional[pulumi.Input[str]] = None,\n authentication_mode: Optional[pulumi.Input[pulumi.InputType['UserAuthenticationModeArgs']]] = None,\n engine: Optional[pulumi.Input[str]] = None,\n no_password_required: Optional[pulumi.Input[bool]] = None,\n passwords: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n user_id: Optional[pulumi.Input[str]] = None,\n user_name: Optional[pulumi.Input[str]] = None) -> 'User':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _UserState.__new__(_UserState)\n\n __props__.__dict__[\"access_string\"] = access_string\n __props__.__dict__[\"arn\"] = arn\n __props__.__dict__[\"authentication_mode\"] = authentication_mode\n __props__.__dict__[\"engine\"] = engine\n __props__.__dict__[\"no_password_required\"] = no_password_required\n __props__.__dict__[\"passwords\"] = passwords\n __props__.__dict__[\"tags\"] = tags\n __props__.__dict__[\"tags_all\"] = tags_all\n __props__.__dict__[\"user_id\"] = user_id\n __props__.__dict__[\"user_name\"] = user_name\n return User(resource_name, opts=opts, __props__=__props__)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n allocation_policy: Optional[pulumi.Input[str]] = None,\n data_residency_enabled: Optional[pulumi.Input[bool]] = None,\n device_provisioning_host_name: Optional[pulumi.Input[str]] = None,\n id_scope: Optional[pulumi.Input[str]] = None,\n ip_filter_rules: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['IotHubDpsIpFilterRuleArgs']]]]] = None,\n linked_hubs: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['IotHubDpsLinkedHubArgs']]]]] = None,\n location: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n public_network_access_enabled: Optional[pulumi.Input[bool]] = None,\n resource_group_name: Optional[pulumi.Input[str]] = None,\n service_operations_host_name: Optional[pulumi.Input[str]] = None,\n sku: Optional[pulumi.Input[pulumi.InputType['IotHubDpsSkuArgs']]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None) -> 'IotHubDps':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _IotHubDpsState.__new__(_IotHubDpsState)\n\n __props__.__dict__[\"allocation_policy\"] = allocation_policy\n __props__.__dict__[\"data_residency_enabled\"] = data_residency_enabled\n __props__.__dict__[\"device_provisioning_host_name\"] = device_provisioning_host_name\n __props__.__dict__[\"id_scope\"] = id_scope\n __props__.__dict__[\"ip_filter_rules\"] = ip_filter_rules\n __props__.__dict__[\"linked_hubs\"] = linked_hubs\n __props__.__dict__[\"location\"] = location\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"public_network_access_enabled\"] = public_network_access_enabled\n __props__.__dict__[\"resource_group_name\"] = resource_group_name\n __props__.__dict__[\"service_operations_host_name\"] = service_operations_host_name\n __props__.__dict__[\"sku\"] = sku\n __props__.__dict__[\"tags\"] = tags\n return IotHubDps(resource_name, opts=opts, __props__=__props__)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n add_on: Optional[pulumi.Input[pulumi.InputType['InstanceAddOnArgs']]] = None,\n arn: Optional[pulumi.Input[str]] = None,\n availability_zone: Optional[pulumi.Input[str]] = None,\n blueprint_id: Optional[pulumi.Input[str]] = None,\n bundle_id: Optional[pulumi.Input[str]] = None,\n cpu_count: Optional[pulumi.Input[int]] = None,\n created_at: Optional[pulumi.Input[str]] = None,\n ip_address_type: Optional[pulumi.Input[str]] = None,\n ipv6_address: Optional[pulumi.Input[str]] = None,\n ipv6_addresses: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n is_static_ip: Optional[pulumi.Input[bool]] = None,\n key_pair_name: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n private_ip_address: Optional[pulumi.Input[str]] = None,\n public_ip_address: Optional[pulumi.Input[str]] = None,\n ram_size: Optional[pulumi.Input[float]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n user_data: Optional[pulumi.Input[str]] = None,\n username: Optional[pulumi.Input[str]] = None) -> 'Instance':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _InstanceState.__new__(_InstanceState)\n\n __props__.__dict__[\"add_on\"] = add_on\n __props__.__dict__[\"arn\"] = arn\n __props__.__dict__[\"availability_zone\"] = availability_zone\n __props__.__dict__[\"blueprint_id\"] = blueprint_id\n __props__.__dict__[\"bundle_id\"] = bundle_id\n __props__.__dict__[\"cpu_count\"] = cpu_count\n __props__.__dict__[\"created_at\"] = created_at\n __props__.__dict__[\"ip_address_type\"] = ip_address_type\n __props__.__dict__[\"ipv6_address\"] = ipv6_address\n __props__.__dict__[\"ipv6_addresses\"] = ipv6_addresses\n __props__.__dict__[\"is_static_ip\"] = is_static_ip\n __props__.__dict__[\"key_pair_name\"] = key_pair_name\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"private_ip_address\"] = private_ip_address\n __props__.__dict__[\"public_ip_address\"] = public_ip_address\n __props__.__dict__[\"ram_size\"] = ram_size\n __props__.__dict__[\"tags\"] = tags\n __props__.__dict__[\"tags_all\"] = tags_all\n __props__.__dict__[\"user_data\"] = user_data\n __props__.__dict__[\"username\"] = username\n return Instance(resource_name, opts=opts, __props__=__props__)", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n identity_pool_id: Optional[pulumi.Input[str]] = None,\n identity_provider_name: Optional[pulumi.Input[str]] = None,\n principal_tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n use_defaults: Optional[pulumi.Input[bool]] = None,\n __props__=None):\n ...", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None) -> 'OpenIdConnectProvider':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = OpenIdConnectProviderArgs.__new__(OpenIdConnectProviderArgs)\n\n __props__.__dict__[\"client_id\"] = None\n __props__.__dict__[\"client_secret\"] = None\n __props__.__dict__[\"description\"] = None\n __props__.__dict__[\"display_name\"] = None\n __props__.__dict__[\"metadata_endpoint\"] = None\n __props__.__dict__[\"name\"] = None\n __props__.__dict__[\"type\"] = None\n __props__.__dict__[\"use_in_api_documentation\"] = None\n __props__.__dict__[\"use_in_test_console\"] = None\n return OpenIdConnectProvider(resource_name, opts=opts, __props__=__props__)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n accept_language: Optional[pulumi.Input[str]] = None,\n arn: Optional[pulumi.Input[str]] = None,\n created_time: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n distributor: Optional[pulumi.Input[str]] = None,\n has_default_path: Optional[pulumi.Input[bool]] = None,\n name: Optional[pulumi.Input[str]] = None,\n owner: Optional[pulumi.Input[str]] = None,\n provisioning_artifact_parameters: Optional[pulumi.Input[pulumi.InputType['ProductProvisioningArtifactParametersArgs']]] = None,\n status: Optional[pulumi.Input[str]] = None,\n support_description: Optional[pulumi.Input[str]] = None,\n support_email: Optional[pulumi.Input[str]] = None,\n support_url: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n type: Optional[pulumi.Input[str]] = None) -> 'Product':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _ProductState.__new__(_ProductState)\n\n __props__.__dict__[\"accept_language\"] = accept_language\n __props__.__dict__[\"arn\"] = arn\n __props__.__dict__[\"created_time\"] = created_time\n __props__.__dict__[\"description\"] = description\n __props__.__dict__[\"distributor\"] = distributor\n __props__.__dict__[\"has_default_path\"] = has_default_path\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"owner\"] = owner\n __props__.__dict__[\"provisioning_artifact_parameters\"] = provisioning_artifact_parameters\n __props__.__dict__[\"status\"] = status\n __props__.__dict__[\"support_description\"] = support_description\n __props__.__dict__[\"support_email\"] = support_email\n __props__.__dict__[\"support_url\"] = support_url\n __props__.__dict__[\"tags\"] = tags\n __props__.__dict__[\"tags_all\"] = tags_all\n __props__.__dict__[\"type\"] = type\n return Product(resource_name, opts=opts, __props__=__props__)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n minimal_action: Optional[pulumi.Input[str]] = None,\n most_disruptive_allowed_action: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n preserved_state: Optional[pulumi.Input[pulumi.InputType['RegionPerInstanceConfigPreservedStateArgs']]] = None,\n project: Optional[pulumi.Input[str]] = None,\n region: Optional[pulumi.Input[str]] = None,\n region_instance_group_manager: Optional[pulumi.Input[str]] = None,\n remove_instance_state_on_destroy: Optional[pulumi.Input[bool]] = None) -> 'RegionPerInstanceConfig':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _RegionPerInstanceConfigState.__new__(_RegionPerInstanceConfigState)\n\n __props__.__dict__[\"minimal_action\"] = minimal_action\n __props__.__dict__[\"most_disruptive_allowed_action\"] = most_disruptive_allowed_action\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"preserved_state\"] = preserved_state\n __props__.__dict__[\"project\"] = project\n __props__.__dict__[\"region\"] = region\n __props__.__dict__[\"region_instance_group_manager\"] = region_instance_group_manager\n __props__.__dict__[\"remove_instance_state_on_destroy\"] = remove_instance_state_on_destroy\n return RegionPerInstanceConfig(resource_name, opts=opts, __props__=__props__)", "def _get_resource_provider(self, uuid):\n resp = self.get(\"/resource_providers/%s\" % uuid)\n if resp.status_code == 200:\n data = resp.json()\n return objects.ResourceProvider(\n uuid=uuid,\n name=data['name'],\n generation=data['generation'],\n )\n elif resp.status_code == 404:\n return None\n else:\n msg = _LE(\"Failed to retrieve resource provider record from \"\n \"placement API for UUID %(uuid)s. \"\n \"Got %(status_code)d: %(err_text)s.\")\n args = {\n 'uuid': uuid,\n 'status_code': resp.status_code,\n 'err_text': resp.text,\n }\n LOG.error(msg, args)", "def _fetch(resource):\n found = []\n for x in getattr(ec2, definitions[resource].fetch).all():\n if x.tags:\n for t in x.tags:\n if t['Key'] == args.tag and t['Value'] == args.role:\n found.append(x)\n\n if len(found) > 1:\n raise Exception('More than 1 {r} tagged {k}:{v} found, this is an issue.'.format(\n r=resource,\n k=args.tag,\n v=args.role\n ))\n elif len(found) == 1:\n return found[0]\n else:\n return None", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n inter_region_traffic_qos_policy_description: Optional[pulumi.Input[str]] = None,\n inter_region_traffic_qos_policy_name: Optional[pulumi.Input[str]] = None,\n status: Optional[pulumi.Input[str]] = None,\n transit_router_attachment_id: Optional[pulumi.Input[str]] = None,\n transit_router_id: Optional[pulumi.Input[str]] = None) -> 'InterRegionTrafficQosPolicy':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _InterRegionTrafficQosPolicyState.__new__(_InterRegionTrafficQosPolicyState)\n\n __props__.__dict__[\"inter_region_traffic_qos_policy_description\"] = inter_region_traffic_qos_policy_description\n __props__.__dict__[\"inter_region_traffic_qos_policy_name\"] = inter_region_traffic_qos_policy_name\n __props__.__dict__[\"status\"] = status\n __props__.__dict__[\"transit_router_attachment_id\"] = transit_router_attachment_id\n __props__.__dict__[\"transit_router_id\"] = transit_router_id\n return InterRegionTrafficQosPolicy(resource_name, opts=opts, __props__=__props__)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None) -> 'Membership':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = MembershipArgs.__new__(MembershipArgs)\n\n __props__.__dict__[\"arn\"] = None\n __props__.__dict__[\"collaboration_arn\"] = None\n __props__.__dict__[\"collaboration_creator_account_id\"] = None\n __props__.__dict__[\"collaboration_identifier\"] = None\n __props__.__dict__[\"membership_identifier\"] = None\n __props__.__dict__[\"query_log_status\"] = None\n __props__.__dict__[\"tags\"] = None\n return Membership(resource_name, opts=opts, __props__=__props__)", "def _extract_resource(resource: Optional[dict],\n allowed_vals: tuple[tuple[str, ...]],\n exc: Type[exception.CinderException],\n resource_name: str,\n props: tuple[str] = ('status',)) -> Optional[str]:\n\n resource_id = None\n if resource:\n for prop, allowed_states in zip(props, allowed_vals):\n if resource[prop] not in allowed_states:\n msg = _(\"Originating %(res)s %(prop)s must be one of \"\n \"'%(vals)s' values\")\n msg = msg % {'res': resource_name,\n 'prop': prop,\n 'vals': ', '.join(allowed_states)}\n # TODO(harlowja): what happens if the status changes after\n # this initial resource status check occurs??? Seems like\n # someone could delete the resource after this check passes\n # but before the volume is officially created?\n raise exc(reason=msg)\n resource_id = resource['id']\n return resource_id", "def find_by_id(self, tag, params={}, **options):\n path = \"/tags/%s\" % (tag)\n return self.client.get(path, params, **options)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n auto_devops_enabled: Optional[pulumi.Input[bool]] = None,\n avatar: Optional[pulumi.Input[str]] = None,\n avatar_hash: Optional[pulumi.Input[str]] = None,\n avatar_url: Optional[pulumi.Input[str]] = None,\n default_branch_protection: Optional[pulumi.Input[int]] = None,\n description: Optional[pulumi.Input[str]] = None,\n emails_disabled: Optional[pulumi.Input[bool]] = None,\n extra_shared_runners_minutes_limit: Optional[pulumi.Input[int]] = None,\n full_name: Optional[pulumi.Input[str]] = None,\n full_path: Optional[pulumi.Input[str]] = None,\n ip_restriction_ranges: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n lfs_enabled: Optional[pulumi.Input[bool]] = None,\n membership_lock: Optional[pulumi.Input[bool]] = None,\n mentions_disabled: Optional[pulumi.Input[bool]] = None,\n name: Optional[pulumi.Input[str]] = None,\n parent_id: Optional[pulumi.Input[int]] = None,\n path: Optional[pulumi.Input[str]] = None,\n prevent_forking_outside_group: Optional[pulumi.Input[bool]] = None,\n project_creation_level: Optional[pulumi.Input[str]] = None,\n request_access_enabled: Optional[pulumi.Input[bool]] = None,\n require_two_factor_authentication: Optional[pulumi.Input[bool]] = None,\n runners_token: Optional[pulumi.Input[str]] = None,\n share_with_group_lock: Optional[pulumi.Input[bool]] = None,\n shared_runners_minutes_limit: Optional[pulumi.Input[int]] = None,\n subgroup_creation_level: Optional[pulumi.Input[str]] = None,\n two_factor_grace_period: Optional[pulumi.Input[int]] = None,\n visibility_level: Optional[pulumi.Input[str]] = None,\n web_url: Optional[pulumi.Input[str]] = None) -> 'Group':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _GroupState.__new__(_GroupState)\n\n __props__.__dict__[\"auto_devops_enabled\"] = auto_devops_enabled\n __props__.__dict__[\"avatar\"] = avatar\n __props__.__dict__[\"avatar_hash\"] = avatar_hash\n __props__.__dict__[\"avatar_url\"] = avatar_url\n __props__.__dict__[\"default_branch_protection\"] = default_branch_protection\n __props__.__dict__[\"description\"] = description\n __props__.__dict__[\"emails_disabled\"] = emails_disabled\n __props__.__dict__[\"extra_shared_runners_minutes_limit\"] = extra_shared_runners_minutes_limit\n __props__.__dict__[\"full_name\"] = full_name\n __props__.__dict__[\"full_path\"] = full_path\n __props__.__dict__[\"ip_restriction_ranges\"] = ip_restriction_ranges\n __props__.__dict__[\"lfs_enabled\"] = lfs_enabled\n __props__.__dict__[\"membership_lock\"] = membership_lock\n __props__.__dict__[\"mentions_disabled\"] = mentions_disabled\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"parent_id\"] = parent_id\n __props__.__dict__[\"path\"] = path\n __props__.__dict__[\"prevent_forking_outside_group\"] = prevent_forking_outside_group\n __props__.__dict__[\"project_creation_level\"] = project_creation_level\n __props__.__dict__[\"request_access_enabled\"] = request_access_enabled\n __props__.__dict__[\"require_two_factor_authentication\"] = require_two_factor_authentication\n __props__.__dict__[\"runners_token\"] = runners_token\n __props__.__dict__[\"share_with_group_lock\"] = share_with_group_lock\n __props__.__dict__[\"shared_runners_minutes_limit\"] = shared_runners_minutes_limit\n __props__.__dict__[\"subgroup_creation_level\"] = subgroup_creation_level\n __props__.__dict__[\"two_factor_grace_period\"] = two_factor_grace_period\n __props__.__dict__[\"visibility_level\"] = visibility_level\n __props__.__dict__[\"web_url\"] = web_url\n return Group(resource_name, opts=opts, __props__=__props__)", "def get_by_id(cls, name):\n\t\treturn super(Locality, cls).get_by_id(cls.normalized_name(name))", "def get_tagname(tags, tagid):\n for tag in tags:\n if tag['id'] == tagid:\n return tag['name']", "def get(self, identifier, **kwargs):\n\n all_data = self._load()\n # if matches\n for feature in all_data['features']:\n if str(feature.get('id')) == identifier:\n return feature\n # default, no match\n err = f'item {identifier} not found'\n LOGGER.error(err)\n raise ProviderItemNotFoundError(err)", "def __init__(__self__, *,\n identity_pool_id: pulumi.Input[str],\n identity_provider_name: pulumi.Input[str],\n principal_tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n use_defaults: Optional[pulumi.Input[bool]] = None):\n pulumi.set(__self__, \"identity_pool_id\", identity_pool_id)\n pulumi.set(__self__, \"identity_provider_name\", identity_provider_name)\n if principal_tags is not None:\n pulumi.set(__self__, \"principal_tags\", principal_tags)\n if use_defaults is not None:\n pulumi.set(__self__, \"use_defaults\", use_defaults)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n config: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n metadata: Optional[pulumi.Input[pulumi.InputType['SyntheticsPrivateLocationMetadataArgs']]] = None,\n name: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None) -> 'SyntheticsPrivateLocation':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _SyntheticsPrivateLocationState.__new__(_SyntheticsPrivateLocationState)\n\n __props__.__dict__[\"config\"] = config\n __props__.__dict__[\"description\"] = description\n __props__.__dict__[\"metadata\"] = metadata\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"tags\"] = tags\n return SyntheticsPrivateLocation(resource_name, opts=opts, __props__=__props__)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n data_factory_id: Optional[pulumi.Input[str]] = None,\n fqdns: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n name: Optional[pulumi.Input[str]] = None,\n subresource_name: Optional[pulumi.Input[str]] = None,\n target_resource_id: Optional[pulumi.Input[str]] = None) -> 'ManagedPrivateEndpoint':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _ManagedPrivateEndpointState.__new__(_ManagedPrivateEndpointState)\n\n __props__.__dict__[\"data_factory_id\"] = data_factory_id\n __props__.__dict__[\"fqdns\"] = fqdns\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"subresource_name\"] = subresource_name\n __props__.__dict__[\"target_resource_id\"] = target_resource_id\n return ManagedPrivateEndpoint(resource_name, opts=opts, __props__=__props__)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n container_registry_name: Optional[pulumi.Input[str]] = None,\n instance_count: Optional[pulumi.Input[int]] = None,\n location: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n resource_group_name: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n tier: Optional[pulumi.Input[str]] = None,\n virtual_network_subnet_id: Optional[pulumi.Input[str]] = None) -> 'RegistryAgentPool':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _RegistryAgentPoolState.__new__(_RegistryAgentPoolState)\n\n __props__.__dict__[\"container_registry_name\"] = container_registry_name\n __props__.__dict__[\"instance_count\"] = instance_count\n __props__.__dict__[\"location\"] = location\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"resource_group_name\"] = resource_group_name\n __props__.__dict__[\"tags\"] = tags\n __props__.__dict__[\"tier\"] = tier\n __props__.__dict__[\"virtual_network_subnet_id\"] = virtual_network_subnet_id\n return RegistryAgentPool(resource_name, opts=opts, __props__=__props__)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None) -> 'ProvisioningTemplate':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = ProvisioningTemplateArgs.__new__(ProvisioningTemplateArgs)\n\n __props__.__dict__[\"description\"] = None\n __props__.__dict__[\"enabled\"] = None\n __props__.__dict__[\"pre_provisioning_hook\"] = None\n __props__.__dict__[\"provisioning_role_arn\"] = None\n __props__.__dict__[\"tags\"] = None\n __props__.__dict__[\"template_arn\"] = None\n __props__.__dict__[\"template_body\"] = None\n __props__.__dict__[\"template_name\"] = None\n __props__.__dict__[\"template_type\"] = None\n return ProvisioningTemplate(resource_name, opts=opts, __props__=__props__)" ]
[ "0.5595048", "0.53635854", "0.49380523", "0.48628393", "0.4816035", "0.47020298", "0.4629302", "0.45980203", "0.45842114", "0.45643133", "0.45463476", "0.4532976", "0.4522428", "0.4495576", "0.4438125", "0.44314432", "0.44203177", "0.4390814", "0.43893582", "0.4349263", "0.43206725", "0.43204203", "0.4307856", "0.43070388", "0.43055066", "0.4304557", "0.4295388", "0.42931825", "0.4288698", "0.42834044" ]
0.74135715
0
An identity pool ID.
def identity_pool_id(self) -> pulumi.Output[str]: return pulumi.get(self, "identity_pool_id")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def identity_pool_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"identity_pool_id\")", "def identity_pool_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"identity_pool_id\")", "def pool_id ( self ):\n return self._pool_id", "def resource_pool_id(self) -> str:\n return pulumi.get(self, \"resource_pool_id\")", "def __getNewIPpoolID(self):\n return db_main.getHandle().seqNextVal(\"ippool_id_seq\")", "def _pool_id(self, queue, project=None):\n return self._catalogue_ctrl.get(project, queue)['pool']", "def elastic_pool_id(self) -> Optional[str]:\n return pulumi.get(self, \"elastic_pool_id\")", "def _get_id(self) -> int:\n if len(self._id_pool) == 0:\n raise ArchonError(\"No ids reamining in the pool!\")\n return self._id_pool.pop()", "def identity(self):\n return self.id", "def source_ipam_pool_id(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"source_ipam_pool_id\")", "def identity_id(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"identity_id\")", "def get_identity(self):\n return self.query_serial('*IDN?')", "def identity_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"identity_id\")", "def identity_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"identity_id\")", "def fixup_pool_id ( self ):\n self._pool_id = len ( self._poolstack ) - 1", "def source_ipam_pool_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"source_ipam_pool_id\")", "def source_ipam_pool_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"source_ipam_pool_id\")", "def get_pool_id(pool_name, host=None):\n cmd = utils.XMS_CLI_HEADER + \"-f json pool list\"\n print cmd\n ret = utils.execute_cmd_in_host(cmd, host)\n if ret[2] != 0 or isinstance(ret[0], dict):\n print \"[Error] Failed to get pool info. Error message: [{err}]\".format(err=ret[1])\n return -1\n try:\n pool_info = json.loads(ret[0])\n pools = pool_info[\"pools\"]\n for p in pools:\n if pool_name == p[\"name\"]:\n return p[\"id\"]\n except Exception as e:\n print \"[Error] error message is: \" + e.message\n return -1", "def AllocId(self, pool='default'):\n\n if self.__free_ids:\n idrange = self.__free_ids.pop()\n result = idrange.start\n if idrange.start < idrange.stop:\n self.__free_ids.append(self.IdRange(idrange.start+1, idrange.stop))\n else:\n result = self.__idcounter\n self.__idcounter += 1\n allocated_ranges = self.__idpools.get(pool)\n if allocated_ranges is None:\n allocated_ranges = []\n self.__idpools[pool] = allocated_ranges\n for index, idrange in enumerate(allocated_ranges):\n if result == idrange.start-1:\n idrange = self.IdRange(result, idrange.stop)\n allocated_ranges[index] = idrange\n break\n elif result == idrange.stop+1:\n idrange = self.IdRange(idrange.start, result)\n allocated_ranges[index] = idrange\n break\n else:\n allocated_ranges.append(self.IdRange(result, result))\n return result", "def getID():", "def getIdent (self) :\n return self.id", "def get_id(self):\n pass", "def get_id(self):\n pass", "def get_id(self):\n pass", "def get_id(self):\n pass", "def pool_ids(self) -> Sequence[str]:\n return pulumi.get(self, \"pool_ids\")", "def _id(self):\n pass", "def establish_id(self):\n if self.config.node_id is None:\n self.config.node_id = str(uuid4()).replace('-', '')\n return self.config.node_id", "def id(self) -> str:\n\n return self._inst.query('*IDN?')", "def get_identity():\n identity = multiprocessing.current_process()._identity\n identity = 0 if not identity else identity[0]\n\n identity = (identity, threading.current_thread().ident)\n return identity" ]
[ "0.86181694", "0.838689", "0.82100475", "0.7408374", "0.73865277", "0.7111695", "0.70824254", "0.6979489", "0.6917806", "0.6748776", "0.6693632", "0.6658713", "0.65843666", "0.65843666", "0.6578973", "0.6505788", "0.6505788", "0.65010935", "0.64529204", "0.64043105", "0.6381404", "0.63649356", "0.63649356", "0.63649356", "0.63649356", "0.6364885", "0.6357003", "0.63148725", "0.62898546", "0.62725073" ]
0.86603266
0
The name of the identity provider.
def identity_provider_name(self) -> pulumi.Output[str]: return pulumi.get(self, "identity_provider_name")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def identity_provider_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"identity_provider_name\")", "def identity_provider_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"identity_provider_name\")", "def provider_name(self):\n raise NotImplementedError", "def provider_display_name(self) -> str:\n return self._provider_display_name", "def provider_name(cls) -> str:\n return cls.__name__", "def provider_display_name(self) -> Optional[str]:\n return pulumi.get(self, \"provider_display_name\")", "def provider_name(self):\n return self.resource_class.name", "def provider_name(self):\n return self.resource_class.name", "def identity_provider(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"identity_provider\")", "def provider(self) -> str:\n return pulumi.get(self, \"provider\")", "def provider(self) -> str:\n return pulumi.get(self, \"provider\")", "def service_provider_display_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"service_provider_display_name\")", "def provider(self) -> str:\n return self._provider", "def identity_provider_type(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"identity_provider_type\")", "def identity_provider_type(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"identity_provider_type\")", "def identity_provider_type(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"identity_provider_type\")", "def provider_id(self):\n raise NotImplementedError", "def multi_factor_auth_provider(self) -> str:\n return pulumi.get(self, \"multi_factor_auth_provider\")", "def __str__(self) -> str:\n return self.provider.lower()", "def provider_id(self):\n return self.get('_id')", "def provider_display_name(self, provider_display_name: str):\n\n self._provider_display_name = provider_display_name", "def get_provider(self):\n return self.provider", "def get_identity_name(identity_kind: str = GLOBAL_APPLICATION_CONFIGURATION) -> str:\n identity_name = os.environ.get(identity_kind)\n if identity_name:\n return identity_name\n # TODO: Add discovery here? This can probably be inferred.\n # Need to be careful because not all users may have IAM privileges.\n # -kmp 31-Aug-2022\n context = \"\"\n account_number = os.environ.get('ACCOUNT_NUMBER')\n if account_number:\n context = f\" in account {account_number}\"\n raise ValueError(f\"There is no default identity name available for {identity_kind}{context}.\")", "def healthcare_provider_id(self):\n return self._healthcare_provider_id", "def get_local_name(self) -> str:\n if self.username:\n return self.username\n if self.email:\n return self.email\n return self.identifier", "def service_provider_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"service_provider_id\")", "def providerKey(cls):\n return \"qdmtk_provider\"", "def name(self) -> str:\n return getattr(\n self.auth_accounts[-1], \"name\" # pylint: disable=unsubscriptable-object\n )", "def name(self):\n if self.user_provided_name is not None:\n return self.user_provided_name\n else:\n return super().name", "def name(self) -> str:\n return self.user.name" ]
[ "0.8995868", "0.8877346", "0.8368294", "0.8306505", "0.8110782", "0.80312", "0.7812182", "0.7812182", "0.7771749", "0.754476", "0.754476", "0.75150007", "0.7478888", "0.74466497", "0.73160523", "0.73160523", "0.7292682", "0.7079154", "0.6827711", "0.6733854", "0.66272384", "0.66187304", "0.66109836", "0.65812933", "0.65800357", "0.6573259", "0.6562437", "0.6500616", "0.64945275", "0.64579815" ]
0.9126659
0
Breaks down a complicated filename and returns a 2element list consisting of the filenamecomponent and the SHAcomponent If mode == 'learn', given a string of the form "s1__s2__s3.c", it returns ['s1/s2/s3.c', 1] If mode == 'old', given a string of the form "s1__s2__s3__SHA.c", it returns ['s1/s2/s3.c', SHA]
def dismemberFilename(myname, mode): if mode == 'learn': return [pathLeaf(myname).replace('__', '/'), -1] elif mode == 'old': filename_parts = myname.split('__') # ['s1', 's2', 's3', 'SHA.c'] SHA_and_extension = filename_parts[-1].split('.') # ['SHA', 'c'] return ['/'.join(filename_parts[:-1]) + '.' + SHA_and_extension[1], SHA_and_extension[0]]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def splitFilename(filename):\n\n if filename[-4:] == '.rpm':\n filename = filename[:-4]\n \n archIndex = filename.rfind('.')\n arch = filename[archIndex+1:]\n\n relIndex = filename[:archIndex].rfind('-')\n rel = filename[relIndex+1:archIndex]\n\n verIndex = filename[:relIndex].rfind('-')\n ver = filename[verIndex+1:relIndex]\n\n epochIndex = filename.find(':')\n if epochIndex == -1:\n epoch = ''\n else:\n epoch = filename[:epochIndex]\n \n name = filename[epochIndex + 1:verIndex]\n return name, ver, rel, epoch, arch", "def file_key(filename):\n prio = 4\n if filename == 'install.rdf':\n prio = 1\n elif filename in [\"chrome.manifest\", \"icon.png\", \"icon64.png\"]:\n prio = 2\n elif filename in LICENSE_FILENAMES:\n prio = 5\n return (prio, os.path.split(filename.lower()))", "def used_mods(ffile):\n import re\n import codecs\n\n # Go through line by line,\n # remove comments and strings because the latter can include ';'.\n # Then split at at ';', if given.\n # The stripped line should start with 'use '.\n # After use should be the \"module_name\", ', intrinsic :: module_name', or\n # ', non_intrinsic :: module_name'. We allow also to use \":: module_name\"\n # After module name should only be ', only: ...' or ', a ==> b'\n olist = list()\n of = codecs.open(ffile, 'r', encoding='ascii', errors='ignore')\n for line in of:\n ll = line.rstrip().lower() # everything lower case\n ll = re.sub('!.*$', '', ll) # remove F90 comment\n ll = re.sub('^c.*$', '', ll) # remove F77 comments\n ll = re.sub('\".*?\"', '', ll) # remove \"string\"\n ll = re.sub(\"'.*?'\", '', ll) # remove 'string'\n # check if several commands are on one line\n if ';' in ll:\n lll = ll.split(';')\n else:\n lll = [ll]\n for il in lll:\n iil = il.strip()\n # line should start with 'use '\n if iil.startswith('use '):\n iil = iil[4:].strip() # remove 'use '\n # skip intrinsic modules\n if 'intrinsic' in iil:\n if 'non_intrinsic' in iil:\n iil = re.sub(', *non_intrinsic', '', iil)\n iil = iil.strip()\n else:\n continue # skip to next in lll\n if iil.startswith('::'):\n iil = iil[2:].strip() # remove ':: '\n # remove after ',' if rename-list or only-list\n iil = re.sub(',.*$', '', iil)\n olist.append(iil.strip())\n of.close()\n\n return olist", "def processed_file_names(self):\n # For 'trainval', we use files from 'train' and 'val' to save\n # memory\n if self.stage == 'trainval' and self.val_mixed_in_train:\n return [\n osp.join('train', self.pre_transform_hash, f'{w}.h5')\n for s in ('train', 'val')\n for w in self.all_cloud_ids[s]]\n if self.stage == 'trainval':\n return [\n osp.join(s, self.pre_transform_hash, f'{w}.h5')\n for s in ('train', 'val')\n for w in self.all_cloud_ids[s]]\n return [\n osp.join(self.stage, self.pre_transform_hash, f'{w}.h5')\n for w in self.cloud_ids]", "def ListMatchingComponents(self, policy_type):\n base_name = self.GetBaseFilename(policy_type)\n files = glob.glob('%s_*.*' % base_name)\n len_base_name = len(base_name) + 1\n return [ file[len_base_name:file.rfind('.')] for file in files ]", "def _get_file_info(filename):\n filename = os.path.split(filename)[-1]\n filename = filename[:str.rfind(filename, '.jsonl.gz')]\n _, mode, idx = filename.split('_')\n return mode, idx", "def filemode(mode):\n perm = []\n for table in _filemode_table:\n for bit, char in table:\n if mode & bit == bit:\n perm.append(char)\n break\n else:\n perm.append(\"-\")\n return \"\".join(perm)", "def filename_to_condition(fname):\n fname = os.path.splitext(os.path.basename(fname))[0]\n elements = fname.split('_')\n result = {}\n for term in elements:\n try:\n index = term.index('-')\n key = term[:index]\n result[key] = term[index + 1:].split('-')\n if len(result[key]) == 1:\n result[key] = result[key][0]\n except ValueError:\n continue\n return result", "def get_strs_from_input(file_name,mode):\n foo = open(file_name,mode) \n seq = foo.read().split(\">\") \n strs = []\n for s in seq:\n if(s!=\"\"):\n strings=s.split()\n s_1=strings[0]\n s_2=''.join(strings[1:])\n strs.append(s_2) \n return strs", "def split3 (filename):\n directory, basename = os.path.split (filename)\n basename, extension = os.path.splitext (basename)\n return directory, basename, extension", "def getnames(fname):\n ok = 1\n src = fname\n flist = fname.split('.')\n if (len(flist) > 2\n and flist[-2] == 'v'\n and flist[-1].isdigit()\n and len(flist[-1]) >= 3):\n dst = '.'.join(flist[0 : -2])\n else:\n version = -1\n prefix = '%s.v.' % fname\n prefix_len = len(prefix)\n for f in glob.glob('%s*' % prefix):\n suffix = f[prefix_len:]\n if suffix.isdigit() and len(suffix) >= 3:\n v = int(suffix)\n if v > version:\n version = v\n if version == -1:\n ok = 0\n src = '%s%03d' % (prefix, version)\n dst = fname\n return (ok, src, dst)", "def split_file_name(file, dataset_type='ycb'):\n dirname, filename = osp.split(file)\n filename_without_ext, ext = osp.splitext(filename)\n\n if dataset_type == 'ObjectNet3D':\n category_name = dirname.split(\"/\")[-2]\n idx = dirname.split(\"/\")[-1]\n else: # ycb\n category_name = dirname.split(\"/\")[-1]\n idx = None\n return dirname, filename, category_name, idx", "def get_file_flag(self):\n flag_list = os.listdir(self.path)\n temp_flag_list = []\n for flag in flag_list[:5]:\n result = re.match('^(\\w{2}\\d{6}\\_)(\\d{8})', flag)\n if result:\n temp_flag_list.append(result[2])\n self.flag_list = list(set(temp_flag_list))", "def parse_rarefaction_fname(name_string):\r\n\r\n root, ext = os.path.splitext(name_string)\r\n root_list = root.split(\"_\")\r\n iters = int(root_list.pop())\r\n seqs_per_sam = int(root_list.pop())\r\n base_name = \"_\".join(root_list)\r\n return base_name, seqs_per_sam, iters, ext", "def name_version_fn(fn):\n if fn.endswith('.egg'):\n fn = fn[:-4]\n if '-' in fn:\n return tuple(fn.split('-', 1))\n else:\n return fn, ''", "def get_file_info(fname) -> Tuple[str, bool]:\n fname = fname.lower()\n is_compressed = False\n if fname.endswith((\".tgz\", \".tar.gz\")):\n is_compressed = True\n fname = re.sub(r\"\\.(tgz|tar\\.gz)$\", \"\", fname)\n elif fname.endswith(\".gz\"):\n is_compressed = True\n fname = fname[:-3]\n elif fname.endswith(\".zip\"):\n is_compressed = True\n fname = fname[:-4]\n split = os.path.splitext(fname)\n return split[1], is_compressed", "def get_url_components(self, url):\n if 'http://' not in url and 'https://' not in url:\n print(\"Protocol not found, skipping: \" + url)\n return False\n if url[:7] == 'http://':\n protocol = url[:7]\n file_path = url[7:]\n elif url[:8] == 'https://':\n protocol = url[:8]\n file_path = url[8:]\n else:\n print(\"Error when parsing protocol. Skipping: \" + url)\n return False\n # Split the string from the last '/'.\n # To do this, we reverse the string, split from the first '/' and\n # then reverse them both back.\n filename, root_and_directory = [x[::-1] for x in file_path[::-1].split('/', 1)]\n # Replace the lost '/'\n root_and_directory = root_and_directory + '/'\n root, directory = root_and_directory.split('/', 1)\n directory = '/' + directory\n return [protocol, root, directory, filename]", "def read_str_name(path):\r\n name = []\r\n name_stru = {}\r\n with open(path, \"r+\") as f:\r\n line = f.readlines()\r\n \r\n # to load the name to list. files\r\n for i in range(len(line)):\r\n \r\n if line[i][:-1] != '':\r\n \r\n name.append(line[i][:-1])\r\n else:\r\n \r\n name.append(line[i-1][:-1] + str())\r\n \r\n line[i] = line[i-1]\r\n \r\n # to remark the structure name\r\n for s in name:\r\n \r\n name_stru[s] = (name.count(s),name.index(s))\r\n \r\n for key,values in name_stru.items():\r\n \r\n if values[0] != 1:\r\n for i in range(values[0]):\r\n name[values[1]+i] = name[values[1]+i] + str(i+1)\r\n \r\n return name", "def handle_filenames(filenames):\n suffixes = [\".mod\", \".dat\", \".run\"]\n if len(filenames) == 1:\n return (filenames[0].with_suffix(suffix) for suffix in suffixes)\n else:\n try:\n return sorted(filenames, key=lambda x: suffixes.index(x.suffix))\n except ValueError:\n click.echo(click.style(f\"Invalid filename.\", fg=\"red\", bold=True))", "def split_path(s):\n dirname, filename = os.path.split(s)\n fname_noext, ext = os.path.splitext(filename)\n levels = dirname.strip('/').split(os.path.sep)[2:][-2:]\n return PATH_SPLIT.split(' '.join(levels + [fname_noext]))", "def decompose_newstyle_name(filename):\n path, parts, ext = _get_fields(filename)\n observatory = parts[0]\n serial = list_get(parts, 3, \"\")\n\n if ext == \".pmap\":\n assert len(parts) in [1,2], \"Invalid .pmap filename \" + repr(filename)\n instrument, filekind = \"\", \"\"\n serial = list_get(parts, 1, \"\")\n elif ext == \".imap\":\n assert len(parts) in [2,3], \"Invalid .imap filename \" + repr(filename)\n instrument = parts[1]\n filekind = \"\"\n serial = list_get(parts, 2, \"\")\n else:\n assert len(parts) in [3,4], \"Invalid filename \" + repr(filename)\n instrument = parts[1]\n filekind = parts[2]\n serial = list_get(parts, 3, \"\")\n\n # Don't include filename in these or it messes up crds.certify unique error tracking.\n\n assert instrument in INSTRUMENTS+[\"\"], \"Invalid instrument \" + repr(instrument)\n assert filekind in FILEKINDS+[\"\"], \"Invalid filekind \" + repr(filekind)\n assert re.match(r\"\\d*\", serial), \"Invalid id field \" + repr(id)\n # extension may vary for upload temporary files.\n\n return path, observatory, instrument, filekind, serial, ext", "def get_filenames(mode, data_dir):\n if mode == 'train':\n return [os.path.join(data_dir, 'encoder.train.input'), os.path.join(data_dir, 'encoder.train.target'),\n os.path.join(data_dir, 'decoder.train.target')]\n else:\n return [os.path.join(data_dir, 'encoder.test.input'), os.path.join(data_dir, 'encoder.test.target'),\n os.path.join(data_dir, 'decoder.test.target')]", "def separate_mode_type(mode):\n m = stat.S_IMODE(mode)\n t = stat.S_IFMT(mode)\n return m, mode_to_unix(t)", "def getfiletype(self):\n d = magic.from_file(os.path.join(self.path,self.name))\n d = re.sub(', ',',',d)\n e = d.split(',')\n filetype = e[0]\n array = [False,False]\n if filetype == 'data':\n array = ['ARM','BIN']\n elif filetype == 'HIT archive data':\n array = ['MSP430', 'BIN']\n elif re.search('ELF',filetype):\n arch = e[1]\n if arch == 'ARM':\n array = ['ARM','ELF']\n elif arch == 'TI msp430':\n array = ['MSP430','ELF']\n else:\n pass\n else:\n pass\n\n return array", "def split(self, f):\n x = os.path.split(f)\n subjectid = os.path.split(x[-2])[-1]\n imagefile = x[-1]\n return (subjectid, imagefile)", "def GetFileOperations():\n values = __get_current_values()\n to_return = []\n for i in range(int(len(values) / 2)):\n to_return.append((values[2*i].replace(\"\\\\??\\\\\", \"\"), values[2*i+1].replace(\"\\\\??\\\\\", \"\")))\n return to_return", "def _getbyspec(self, spec: str) -> list[str]:\n res = []\n parts = self.strpath.split(self.sep)\n\n args = filter(None, spec.split(\",\"))\n for name in args:\n if name == \"drive\":\n res.append(parts[0])\n elif name == \"dirname\":\n res.append(self.sep.join(parts[:-1]))\n else:\n basename = parts[-1]\n if name == \"basename\":\n res.append(basename)\n else:\n i = basename.rfind(\".\")\n if i == -1:\n purebasename, ext = basename, \"\"\n else:\n purebasename, ext = basename[:i], basename[i:]\n if name == \"purebasename\":\n res.append(purebasename)\n elif name == \"ext\":\n res.append(ext)\n else:\n raise ValueError(\"invalid part specification %r\" % name)\n return res", "def split_file(document: str):\n class_name, sep, assignment_name = document.partition(\"-\")\n try:\n assignment_name = assignment_name.split('.')[0].split('_')[0]\n except TypeError:\n pass\n return class_name, assignment_name", "def get_tool_version_files():\n similar_files = defaultdict(list)\n for path in Runtime_Datasets.RAW_FILE_PATHS:\n filename = get_file_name(path)\n filename = filename.rsplit('_', 1)[0]\n similar_files[filename].append(path)\n\n Runtime_Datasets.RAW_FILE_PATHS = similar_files", "def parse_filename(f):\n problem = f[2:5]\n extension = f.split('.')[-1]\n if extension not in langs.keys():\n # if the extension isn't in our list we don't care about the file\n return (None, None)\n return (problem, extension)" ]
[ "0.5483648", "0.540052", "0.5353368", "0.5318837", "0.5288031", "0.5252998", "0.5236029", "0.5149559", "0.5129162", "0.5072963", "0.50405633", "0.4918019", "0.48877585", "0.48875466", "0.4871731", "0.48545542", "0.47975442", "0.47791", "0.47737268", "0.47444177", "0.4743137", "0.47430375", "0.47227854", "0.47178978", "0.47032413", "0.46997046", "0.46904126", "0.46590397", "0.46587422", "0.46528196" ]
0.640244
0
Returns the basename of the file/directory path in an _extremely_ robust way. For example, pathLeaf('/hame/saheel/git_repos/szz/abc.c/') will return 'abc.c'.
def pathLeaf(path): head, tail = ntpath.split(path) return tail or ntpath.basename(head)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def path_leaf(path):\n head, tail = ntpath.split(path)\n return tail or ntpath.basename(head)", "def path_leaf(path):\n\thead, tail = ntpath.split(path)\n\treturn tail or ntpath.basename(head)", "def get_leafname(path):\n\n\tpos = string.rfind(path, os.sep)\n\tif pos != -1:\n\t\treturn path[pos+1:]\n\telse:\n\t\treturn path", "def basename(path):\n\n return path.rpartition(\"/\")[2]", "def basename(path):\r\n return path.replace(\"\\\\\", \"/\").split(\"/\")[-1]", "def basename(path):\r\n return split(path)[1]", "def basename(path: str) -> str:\n pass", "def get_filename(path):\n return path.split('/')[-1]", "def get_basename(absolute_file_path):\r\n return absolute_file_path.split('/')[-1]", "def path_name(self, path):\r\n ind = path.rfind(\"/\") + 1\r\n return (path[:ind], path[ind:])", "def get_name(path):\n return path.rsplit('/',1)[1]", "def getfilename(path):\r\n return path.split('\\\\').pop().split('/').pop().rsplit('.', 1)[0]", "def path_leaf(path):\n return re.sub('[^A-Za-z0-9]+', '_', path)", "def name_from_path(path):\n return path[0:-3]", "def get_last_part_of_path(path: str) -> str:\n multi_os_path = path.replace(\"\\\\\", \"/\")\n return re.search(\"(?:[^/](?!/))+$\", multi_os_path).group(0)", "def clean_file_path(path):\r\n\r\n return path.split(\"/\")[-1]", "def basename(file_path):\n return os.path.basename(file_path)", "def get_file_name(path):\n return os.path.basename(path)", "def file_name(path):\n return os.path.basename(path).split('.')[0]", "def getFilename(path):\n\tfrom os.path import split\n\tpath = normalizePath(path)\n\treturn split(path)[1]", "def basename_sans(path):\n return os.path.splitext(os.path.basename(path))[0]", "def get_base_name(path):\n return os.path.basename(path).split('.')[0]", "def strip_path(path):\n name_re = re.compile(\"[^/]*\\.([a-z]+)$\")\n return name_re.search(path).group(0)", "def _get_leaf_node_path_suffix(p):\n return _LeafNodePath(_as_root_node_tensor(p.middle[0]), p.middle[1:], p.tail)", "def just_the_name(path):\n return os.path.splitext(os.path.basename(path))[0]", "def base_name(path):\n return os.path.basename(path)", "def get_filename(file_path):\n\n # Get rid of directories and etc\n just_file = os.path.basename(file_path)\n\n # Now we return just the base name\n return os.path.splitext(just_file)[0]", "def _get_leaf(leaf, d, pattern):\n xleaf = d.rsplit('/', 1)[-1].strip()\n check_pattern = re.match('\\*(\\.[a-zA-Z0-9]+)$', pattern)\n if check_pattern:\n xten = check_pattern.groups()[0]\n if xleaf[-len(xten):] == xten:\n xleaf = xleaf[:-len(xten)].strip()\n if xleaf.find(ROOT_LEAF_PREFIX) == 0:\n return leaf\n elif leaf.strip():\n return '{0}.{1}'.format(leaf, xleaf)\n else:\n return xleaf", "def filename_from_path(filepath: str) -> str:\n return filepath.split(\"/\")[-1]", "def extract_file_name(file_path):\n # ファイルパスからファイル名(拡張子含む)を取り出す\n file_name = file_path.split('/')[-1]\n # 拡張子を取り除く\n return file_name.split('.')[0]" ]
[ "0.8202049", "0.81125677", "0.77742285", "0.75503045", "0.7526012", "0.73413885", "0.7088789", "0.6984967", "0.6955897", "0.68906826", "0.68379354", "0.6827969", "0.6780256", "0.67511636", "0.6678975", "0.6597504", "0.65930575", "0.65369546", "0.65035325", "0.6499314", "0.6480182", "0.6458834", "0.64580214", "0.6451188", "0.6439688", "0.6431518", "0.64296794", "0.63589656", "0.6319264", "0.6314489" ]
0.84374326
0
Computes the SHAs where all the fixinducing lines were introduced (along the lines of SZZ) and records the precise location of each such line in a CSV file in the `project_corpus_path` directory. TODO Document your algo!! Args
def szz(project_corpus_path, project_snapshots_path, bugfix_SHAs_filename, \ num_of_cores = '4', ps_bug_report_times_filename = ''): if ps_bug_report_times_filename == '' or os.path.isfile(ps_bug_report_times_filename): if not os.path.isdir(project_corpus_path) or not os.path.isdir(project_snapshots_path) or not os.path.isfile(bugfix_SHAs_filename): sys.stderr.write(printUsage.__doc__) raise IOError("""\nGiven paths are not as expected.\n `project_snapshots_path` and `project_corpus_path` should be valid directories.\n `bugfix_SHAs_filename` should be a valid file.""") elif not os.path.isfile(ps_bug_report_times_filename): sys.stderr.write(printUsage.__doc__) raise IOError("\nGiven paths are not as expected.\n`ps_bug_report_times_filename` should be a valid file.") print("Working with these input arguments:") print(project_corpus_path, project_snapshots_path, bugfix_SHAs_filename, ps_bug_report_times_filename) # # TODO code up the case for post-release time bugs # # bugfix_SHAs maps a bugfix SHA to its bug report time; in the dev-time bugs case, the bug report time is '' for each SHA # bug_report_times = ['']*len(bugfix_SHAs) # if ps_bug_report_times_filename != '': # bug_report_times = [date for date in open(ps_bug_report_times_filename).read().splitlines()] # if bugfix_SHAs == [] or len(bugfix_SHAs) != len(bug_report_times): # raise ValueError("\nEither the `bugfix_SHAs_filename` file is empty or doesn't match with the `ps_bug_report_times_filename` file") project_corpus_path += '/' project_snapshots_path += '/' project_name = pathLeaf(project_snapshots_path) # Only select snapshots that have `corpus` directories ss_names = [ss_name for ss_name in os.listdir(project_snapshots_path) \ if os.path.isdir(project_snapshots_path + '/' + ss_name) \ and os.path.isdir(project_corpus_path + ss_name)] if ss_names == []: raise ValueError("\nNo valid snapshots found in `project_corpus_path`") ss_names.sort() ss_paths = [project_snapshots_path + '/' + ss_name + '/' for ss_name in ss_names] ss_corpus_paths = [project_corpus_path + ss_name + '/' for ss_name in ss_names] # Get the list of commits onto which we want to map the buggy lines # Important Note: this implies that our resultant bugdata will be restricted to these commits old_file_SHAs_forall_ss = set() for ss_corpus_path in ss_corpus_paths: old_files_path = ss_corpus_path + 'test/old/' old_file_fullnames = [filename for filename in os.listdir(old_files_path) if filename.endswith(('c', 'cpp', 'cc', 'java'))] for old_file_fullname in old_file_fullnames: name_SHA_pair = dismemberFilename(old_file_fullname, 'old') old_file_SHAs_forall_ss.add(name_SHA_pair[1]) with open(project_corpus_path + 'mapped_commits.txt', 'wb') as outfile: outfile.write('\n'.join(old_file_SHAs_forall_ss) + '\n') # Wait for processes to complete pool = Pool(int(num_of_cores)) processes = [] cmds = [] for ss_index, ss_corpus_path in enumerate(ss_corpus_paths): process_ss_cmd = "python src/szz_all_commits/szz_process_ss.py " + ss_names[ss_index] + " " + ss_paths[ss_index] + " " \ + ss_corpus_path + " " + bugfix_SHAs_filename cmds.append(shlex.split(process_ss_cmd)) # TODO check the return codes for errors return_codes = pool.map(call, cmds) # Concatenate bugdata for all snapshots into a single CSV file in `data/corpus` ss_bugdatas = [] for index, ss_corpus_path in enumerate(ss_corpus_paths): ss_bugdata_filename = ss_corpus_path + '/ss_mappedOntoSSOnly.bugdata' if os.path.isfile(ss_bugdata_filename): data = pandas.read_csv(ss_bugdata_filename, index_col=False) data.insert(0, 'project', project_name) ss_bugdatas.append(data) else: print('\nBugdata not found for ' + ss_names[index] + '. Skipping this ss...') bugdata = pandas.concat(ss_bugdatas) bugdata_csv_filename = project_corpus_path + '/ss_bugdata.csv' col_names = ['project', 'sha', 'file_name', 'line_num', 'bi_sha', 'bi_file_name', 'bi_line_num', 'is_bug', 'bf_ss', 'bf_sha', 'bf_file_name', 'bf_line_num'] bugdata.to_csv(bugdata_csv_filename, columns=col_names, index=False)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def TranscriptionFind(genome, gene_start_stop_dict,\n gene_first_exon_dict, gene_scaff_dict,\n gene_direction, gene_set, gene_gff_line,\n bam, stand_dev_threshold, walk, min_value,\n interation_value, out_file, logger, TITLE,\n keep_gene_depth,\n default_cutoff,\n test_mode):\n logger.info(\"RESULTS in outfile: %s\", out_file)\n genome_index = index_genome_file(genome, logger)\n depth_set = set([])\n # open outfile:\n out_file_gff = out_file.split(\".\")[0] + \"based_on_min_value.gff\"\n out_file_gff2 = out_file.split(\".\")[0] + \"based_on_SD_threshold.gff\"\n logger.info(\"gff info will be in %s\", out_file_gff)\n gff_out = open(out_file_gff, \"w\")\n gff_sd_out = open(out_file_gff2, \"w\")\n gene_failed_count = 0\n gene_results_printed_count = 0\n fall_off_contig_count = 0\n default_cutoff = float(default_cutoff)\n logger.info(\"any problem and default cutoff is used. Which is %.1f\",\n default_cutoff)\n\n with open(out_file, 'w') as file_out:\n file_out.write(TITLE)\n for gene in gene_set:\n gene = gene.rstrip()\n start_stop = gene_start_stop_dict[gene]\n start, stop = start_stop.split(\"\\t\")\n start =int(start)\n stop = int(stop)\n scaffold = gene_scaff_dict[gene]\n scaffold = scaffold.rstrip()\n direction = gene_direction[gene]\n if gene in gene_first_exon_dict:\n exon_start_exon_stop = gene_first_exon_dict[gene]\n exon_start, exon_stop = exon_start_exon_stop.split(\"\\t\")\n exon_start =int(exon_start)\n exon_stop = int(exon_stop)\n else:\n exon_start =int(start)\n exon_stop = int(stop)\n # call samtools to get the depth per posititon for\n # the transcript of interest\n depth_filename = os.path.join(\"temp_reads_per_base\",\n gene + \"_depth.tmp\")\n #exon_depth_file = os.path.join(\"temp_reads_per_base\",\n #gene + \"_exon_depth.tmp\")\n scaffold_depth_file = os.path.join(\"temp_reads_per_base\",\n scaffold + \"_depth.tmp\")\n scaffold_start_stop = \"%s:%s-%s\" %(scaffold, start, stop)\n # call the func to run\n if scaffold_depth_file not in depth_set:\n depth_set.add(scaffold_depth_file)\n # print(\"not seen %s\" % scaffold)\n pipe = run_samtools_depth(scaffold, bam_file,\n scaffold_depth_file, logger)\n # call the depth for the gene specifically\n pipe = run_samtools_depth(scaffold_start_stop, bam_file,\n depth_filename, logger)\n if \"Y\" not in keep_gene_depth.upper():\n # can keep the gene depth file, or not\n os.remove(depth_filename)\n\n # assign zeros to all positions of the transcript,\n # as samtool does no report zeros\n seq_record = genome_index[scaffold]\n if \"Y\" in test_mode.upper():\n outstr = \" \".join([\"scaff = %s\" % scaffold,\n \"len scaffold = %d \" % (len(seq_record.seq)),\n \"gene = %s \" % gene,\n \"depth scaff out = \",\n scaffold_depth_file])\n logger.info(outstr)\n\n all_coverage = [0] * len(seq_record.seq)\n if \"Y\" in test_mode.upper():\n outstr = \"length all_cov = %d\" % len(all_coverage)\n logger.info(outstr)\n all_coverage = fill_in_zero_cov(all_coverage,\n scaffold_depth_file)\n # print(\"seq = \", len(seq_record.seq))\n # print(exon_all_coverage)\n # get the mean and std reads per base for exon 1\n exon_mean, exon_stdDev = avg_std_dev(all_coverage\n [exon_start:exon_stop])\n # get the mean and std reads per base for exon 1\n gene_mean, gene_stdDev = avg_std_dev(all_coverage\n [start:stop])\n if exon_mean == 0:\n warn = \"No RNAseq expression for gene exon 1 %s\" % gene\n logger.warning(\"%s: gene failed\", warn)\n gene_failed_count = gene_failed_count + 1\n continue\n out_str = \"\\t\".join([gene + \":\",\n \"Cov min: %i\" % min(all_coverage),\n \"max: %i\" % max(all_coverage),\n \"gene mean %0.2f:\" % gene_mean,\n \"gene std %0.2f:\" % gene_stdDev,\n \"Sliced section:\",\n \"exon mean %0.2f\" % exon_mean,\n \"exon std: %0.2f\" % exon_stdDev,\n direction])\n # logger.info(out_str)\n cut_off = exon_mean - (int(stand_dev_threshold) * exon_stdDev)\n position_mean_cov = mean(all_coverage[exon_start:exon_stop])\n # walk in 3 bases to find the position where coverage sig drops\n current_end = stop\n current_start = start\n position_mean_cov = 10000000000000\n if cut_off < default_cutoff:\n logger.warning(\"%s gene cut off set to %.1f\", gene, default_cutoff)\n cut_off = default_cutoff\n write = \"yes\"\n while position_mean_cov >= cut_off:\n current_start, current_end = walk_away_from_end(current_start,\n current_end,\n direction, walk)\n current_start, current_end = add_one_direct_aware(current_start,\n current_end,\n interation_value,\n direction)\n if current_start < 1:\n logger.warning(\"%s has fallen off start scaffold %s\",\n gene,\n scaffold)\n position_mean_cov = 0\n write = \"no\"\n fall_off_contig_count = fall_off_contig_count + 1\n break\n if current_end >= len(seq_record.seq):\n logger.warning(\"%s has fallen off end scaffold %s\",\n gene,\n scaffold)\n position_mean_cov = 0\n write = \"no\"\n fall_off_contig_count = fall_off_contig_count + 1\n break\n position_mean_cov = mean(all_coverage\n [current_start:current_end])\n if position_mean_cov == False:\n position_mean_cov = 0\n #print(\"setting position_mean_cov to: \", position_mean_cov)\n # run the while loop again to find the position where the expression\n # is less than the option min value\n current_end1 = stop\n current_start1 = start\n position_mean_cov = 10000000000\n write_min_value = \"ok\"\n while position_mean_cov >= int(min_value):\n current_start1, current_end1 = walk_away_from_end(current_start1,\n current_end1,\n direction, walk)\n current_start1, current_end1 = add_one_direct_aware(current_start1,\n current_end1,\n interation_value,\n direction)\n if current_start < 1:\n logger.warning(\"%s has fallen off start scaffold %s\", gene, scaffold)\n position_mean_cov = 0\n write_min_value = \"not_ok\"\n break\n if current_end >= len(seq_record.seq):\n logger.warning(\"%s has fallen off end scaffold %s\", gene, scaffold)\n position_mean_cov = 0\n write_min_value = \"not_ok\"\n break\n # print(\"bases = \", all_coverage[current_start1:current_end1], \"\\n\")\n position_mean_cov = mean(all_coverage[current_start1:current_end1])\n if position_mean_cov == False:\n position_mean_cov = 0\n break\n\n out_str = \"\\t\".join([gene,\n str(current_start),\n str(current_end),\n str(seq_record.seq[current_start:current_end]),\n str(current_start1),\n str(current_end1),\n str(seq_record.seq[current_start1:current_end1]),\n \"%s\" % start,\n \"%s\" % stop,\n \"%0.2f\" % gene_mean,\n \"%0.2f\" % gene_stdDev,\n \"%0.2f\" % exon_mean,\n \"%0.2f\" % exon_stdDev,\n direction,\n \"\\n\"])\n if current_start1 > 0 and current_end1 > 0 and current_start > 0 and current_end > 0:\n if write == \"yes\" and write_min_value == \"ok\":\n # print(\"writing: \", out_str)\n file_out.write(out_str)\n GENE_gff = gene_gff_line[gene]\n # for the min value approach\n\n new_gff_line1, UTR_start, UTR_stop = create_gff_line(GENE_gff, gene,\n current_start1,\n current_end1)\n Min_val_Hits_geneic_or_not = iterate_coordinate_dict(gene_gff_line,\n gene,\n scaffold,\n UTR_start,\n UTR_stop,\n logger)\n if Min_val_Hits_geneic_or_not == \"HITS genic region\":\n gene_failed_count = gene_failed_count + 1\n continue\n if Min_val_Hits_geneic_or_not == \"OK\":\n gff_out.write(new_gff_line1)\n # for the standard dev approach\n new2_gff_line, UTR_start, UTR_stop = create_gff_line(GENE_gff, gene,\n current_start,\n current_end)\n # Check to see if this hits a gene or not\n sd_geneic_or_not = iterate_coordinate_dict(gene_gff_line,\n gene,\n scaffold,\n UTR_start,\n UTR_stop,\n logger)\n if sd_geneic_or_not == \"HITS genic region\":\n gene_failed_count = gene_failed_count + 1\n continue\n if sd_geneic_or_not == \"OK\":\n gff_sd_out.write(new2_gff_line)\n gene_results_printed_count = gene_results_printed_count + 1\n else:\n gene_failed_count = gene_failed_count + 1\n \n logger.info(\"deleting scaffold depth files\")\n for depthfile in depth_set:\n os.remove(depthfile) \n logger.info(\"main function finished. %d gene failed\", gene_failed_count)\n logger.info(\"Results generated for . %d gene\", gene_results_printed_count)\n logger.info(\"fall_off_contig_count = %d\", fall_off_contig_count)\n gff_out.close()", "def read_snfit_results(self, snfit_res_path='../sugar_analysis_data/results/results_snfit.txt'):\n\n snfit_res = open (snfit_res_path)\n \n self.sn_name = []\n self.z = []\n self.x0 =[]\n self.x0_err = []\n self.x1 =[]\n self.x1_err = []\n self.c = []\n self.c_err = []\n self.mb = []\n self.mb_err = []\n self.cov_x0_x1 = [] \n self.cov_x0_c = []\n self.cov_x1_c = []\n self.cov_mb_x1 = []\n self.cov_mb_c = []\n self.snfit_chi2 = []\n snfit_lines = snfit_res.readlines()\n \n line_name = 0\n \n for line in range(len(snfit_lines)):\n \n snfit_lines[line] = snfit_lines[line].strip()\n snfit_lines[line] = snfit_lines[line].split(' ')\n snfit_lines[line]=[x.replace('\\n','') for x in snfit_lines[line]]\n snfit_lines[line]=[x.replace(\"'\",'') for x in snfit_lines[line]]\n \n if snfit_lines[line][0] == 'sn_name': \n self.sn_name.append(snfit_lines[line][1])\n line_name = line\n \n if snfit_lines[line][0] == 'Redshift' and line == line_name + 35 : \n self.z.append(float(snfit_lines[line][1])) \n \n if snfit_lines[line][0] == 'X0' and line == line_name + 37: \n self.x0.append(float(snfit_lines[line][1])) \n self.x0_err.append(float(snfit_lines[line][2]))\n \n if snfit_lines[line][0] == 'X1' and line == line_name + 38 : \n self.x1.append(float(snfit_lines[line][1])) \n self.x1_err.append(float(snfit_lines[line][2]))\n \n if snfit_lines[line][0] == 'Color' and line == line_name + 36: \n self.c.append(float(snfit_lines[line][1]))\n self.c_err.append(float(snfit_lines[line][2]))\n \n if snfit_lines[line][0] == 'RestFrameMag_0_B' and line == line_name + 39 : \n self.mb.append(float(snfit_lines[line][1])) \n self.mb_err.append(float(snfit_lines[line][2]))\n \n if snfit_lines[line][0] == 'CovX0X1' and line == line_name + 52: \n self.cov_x0_x1.append(float(snfit_lines[line][1]))\n \n if snfit_lines[line][0] == 'CovColorX0' and line == line_name + 43: \n self.cov_x0_c.append(float(snfit_lines[line][1])) \n \n \n if snfit_lines[line][0] == 'CovColorX1' and line == line_name + 44: \n self.cov_x1_c.append(float(snfit_lines[line][1]))\n \n if snfit_lines[line][0] == 'CovColorRestFrameMag_0_B' and line == line_name + 42: \n self.cov_mb_c.append(float(snfit_lines[line][1]))\n \n if snfit_lines[line][0] == 'CovRestFrameMag_0_BX1' and line == line_name + 50: \n self.cov_mb_x1.append(float(snfit_lines[line][1])) \n\n if snfit_lines[line][0] == '@CHI2_LC' and line == line_name + 56: \n self.snfit_chi2.append(float(snfit_lines[line][1])) \n \n self.sn_name = np.array(self.sn_name,str)\n self.z = np.array(self.z)\n self.x0 = np.array(self.x0)\n self.x0_err = np.array(self.x0_err)\n self.x1 = np.array(self.x1)\n self.x1_err = np.array(self.x1_err) \n self.c = np.array(self.c)\n self.c_err = np.array(self.c_err)\n self.mb = np.array(self.mb)\n self.mb_err = np.array(self.mb_err)\n self.cov_x0_x1 = np.array(self.cov_x0_x1)\n self.cov_x0_c = np.array(self.cov_x0_c)\n self.cov_x1_c = np.array(self.cov_x1_c)\n self.cov_mb_x1 = np.array(self.cov_mb_x1)\n self.cov_mb_c = np.array(self.cov_mb_c) \n self.snfit_chi2 = np.array(self.snfit_chi2)", "def wtrie_data(lines, suffix, pre_train):\n if pre_train:\n file_path = str(save_path) + '/news_{}'.format(str(suffix))\n if file_path.split('/')[-1] in os.listdir(save_path):\n _error('{} exists'.format(file_path))\n raise FileExistsError\n\n _info('Save {} \\n'.format(file_path))\n with codecs.open(file_path, 'w', 'utf-8') as file:\n if pre_train:\n for line in lines:\n # if TPU available, no need to cut the sentences with long length,\n # However, Do you think we could use TPU for training ?\n if len(line) <= 50:\n line = list(map(_to_str, line))\n file.write(' '.join(line) + '\\n')\n file.flush()\n else:\n if type(lines) is not zip:\n _error('for fine tune, the data type should be zip', head='TYPE ERROR')\n raise TypeError\n file_path = 'data/chat_idx.txt'\n with codecs.open(file_path, 'w', 'utf-8') as file:\n for que, ans in lines:\n que = list(map(_to_str, que)) # IMPORTANT\n ans = list(map(_to_str, ans))\n if (len(que) != 0) and (len(ans) != 0):\n line = ' '.join(que) + '=' + ' '.join(ans)\n file.write(line + '\\n')\n file.flush()\n else:\n continue", "def main():\n tl = TwoLocus(in_path='/csbiodata/public/www.csbio.unc.edu/htdocs/sgreens/pairwise_origins/')\n # tl = TwoLocus()\n # tl.preprocess(glob.glob('OR_ss_origins/*.hap'))\n print len(tl.list_available_strains())\n exit()\n # print len(tl.list_available_strains())\n # tl.preprocess(['cc_origins.csv'])\n # tl.preprocess(['ccv_origins.csv'])\n classical = [s for s in\n [\"129P1/ReJ\", # \"129P3/J\", \"129S1SvlmJ\", \"129S6\", \"129T2/SvEmsJ\", \"129X1/SvJ\", \"A/J\", \"A/WySnJ\",\n \"AEJ/GnLeJ\", \"AEJ/GnRk\", \"AKR/J\", \"ALR/LtJ\", \"ALS/LtJ\", \"BALB/cByJ\", \"BALB/cJ\", \"BDP/J\", \"BPH/2J\",\n # \"BPL/1J\", \"BPN/3J\", \"BTBR T<+>tf/J\", \"BUB/BnJ\", \"BXSB/MpJ\", \"C3H/HeJ\", \"C3HeB/FeJ\", \"C57BL/10J\",\n # \"C57BL/10ScNJ\", \"C57BL/10SAAAJ\", \"C57BL/6CR\", \"C57BL/6J\", \"C57BL/6NCI\", \"C57BL/6Tc\", \"C57BLKS/J\",\n # \"C57BR/cdJ\", \"C57L/J\", \"C58/J\", \"CBA/CaJ\", \"CBA/J\", \"CE/J\", \"CHMU/LeJ\", \"DBA/1J\", \"DBA/1LacJ\",\n # \"DBA/2DeJ\", \"DBA/2HaSmnJ\", \"DBA/2J\", \"DDK/Pas\", \"DDY/JclSidSeyFrkJ\", \"DLS/LeJ\", \"EL/SuzSeyFrkJ\",\n # \"FVB/NJ\", \"HPG/BmJ\", \"I/LnJ\", \"IBWSP2\", \"IBWSR2\", \"ICOLD2\", \"IHOT1\", \"IHOT2\", \"ILS\", \"ISS\", \"JE/LeJ\",\n # \"KK/HlJ\", \"LG/J\", \"LP/J\", \"LT/SvEiJ\", \"MRL/MpJ\", \"NOD/ShiLtJ\", \"NON/ShiLtJ\", \"NONcNZO10/LtJ\",\n # \"NONcNZO5/LtJ\", \"NOR/LtJ\", \"NU/J\", \"NZB/BlNJ\", \"NZL/LtJ\", \"NZM2410/J\", \"NZO/HlLtJ\", \"NZW/LacJ\", \"P/J\",\n # \"PL/J\", \"PN/nBSwUmabJ\", \"RF/J\", \"RHJ/LeJ\", \"RIIIS/J\", \"RSV/LeJ\", \"SB/LeJ\", \"SEA/GnJ\", \"SEC/1GnLeJ\",\n # \"SEC/1ReJ\", \"SH1/LeJ\", \"SI/Col Tyrp1 Dnahc11/J\", \"SJL/Bm\", \"SJL/J\", \"SM/J\", \"SSL/LeJ\", \"ST/bJ\",\n \"STX/Le\", ] # \"SWR/J\", \"TALLYHO/JngJ\", \"TKDU/DnJ\", \"TSJ/LeJ\", \"YBR/EiJ\", \"ZRDCT Rax<+>ChUmdJ\"]\n if tl.is_available(s)]\n wild_derived = [s for s in\n ['22MO',\n # 'BIK/g', 'BULS', 'BUSNA', 'BZO', 'CALB/RkJ', 'CASA/RkJ', 'CAST/EiJ', 'CIM', 'CKN', 'CKS',\n 'CZECHI/EiJ', 'CZECHII/EiJ', 'DCA', 'DCP', 'DDO', 'DEB', 'DGA', 'DIK', 'DJO', 'DKN', 'DMZ', 'DOT',\n # 'IS/CamRkJ', 'JF1/Ms', 'LEWES/EiJ', 'MBK', 'MBS', 'MCZ', 'MDG', 'MDGI', 'MDH', 'MGA', 'MH',\n # 'MOLD/RkJ', 'MOLF/EiJ', 'MOLG/DnJ', 'MOR/RkJ', 'MPB', 'MSM/Ms', 'PERA/EiJ', 'PERC/EiJ', 'POHN/Deh',\n # 'PWD/PhJ', 'PWK/PhJ', 'RBA/DnJ', 'RBB/DnJ', 'RBF/DnJ', 'SF/CamEiJ', 'SKIVE/EiJ', 'SOD1/EiJ',\n # 'STLT', 'STRA', 'STRB', 'STUF', 'STUP', 'STUS', 'TIRANO/EiJ', 'WLA', 'WMP', 'WSB/EiJ',\n 'ZALENDE/EiJ'] if tl.is_available(s)]\n tl.contingency_table(classical, wild_derived, '/csbiohome01/sgreens/Projects/intervals/contingency.csv')\n exit()\n x = TwoLocus(chrom_sizes=[20e6, 20e6])\n x.preprocess([\"test2.csv\"])\n x.unique_combos(['A', 'B', 'D'], ['C', 'E'])\n x.sources_at_point_pair('1', 1, '1', 10000000, ['A'])\n # x.interlocus_dependence([chr(c) for c in xrange(ord('A'), ord('J')+1)])\n # exit()\n\n x = TwoLocus(chrom_sizes=[20 * 10 ** 6, 20 * 10 ** 6])\n x.preprocess([\"test.csv\"])\n rez = x.pairwise_frequencies([\"A\"])\n\n areas = x.calculate_genomic_area(rez[0], rez[1])\n total = 0.0\n\n for combo in subspecies.iter_combos():\n print \"\\t{:15s}({:4d}):{:1.5f}\".format(subspecies.to_string(combo), combo,\n areas[str(subspecies.to_string(combo))])\n total += areas[str(subspecies.to_string(combo))]\n print \"\\t{:21s}:{:1.5f}\".format(\"Total\", total)\n\n sys.exit(1)\n # for code, combo in combos.iteritems():\n # print \"\\n\", rez[1]\n # print \"\\t{} ({}):\\n{}\".format(combo, code, rez[0][code])", "def convert_bismark_add_strand_and_seq(indf, outfn):\n logger.debug(f'Start add strand and seq to bismark cov file, total len={len(indf)}')\n\n outf = gzip.open(outfn, 'wt')\n\n for index, row in tqdm(indf.iterrows(), total=len(indf), desc='Bismark_cov'):\n # if report_num and index % report_num == 0:\n # logger.debug(f'processed index={index}')\n chr = row['chr']\n start = int(row['start']) # Keep raw 1-based format of bismark results\n ret = get_dna_base_from_reference(chr, start - 1, ref_fasta=ref_fasta)\n if ret[5] == 'C': # strand is +\n strand = '+'\n elif ret[5] == 'G':\n strand = '-'\n else:\n raise Exception(f'We can not identify this bg-truth file with non-CG results, such as row={row}')\n\n outstr = '\\t'.join([chr, str(start), strand, str(row['mcount']), str(row['ccount']), ret[4:7]])\n outf.write(f'{outstr}\\n')\n outf.close()\n logger.info(f'save to {outfn}')\n\n logger.debug(f'Finish add strand info task')", "def generateSolution(self, cont):\n solnf = self.outdir + \"/tracks_soln.csv\"\n old = os.dup(1)\n sys.stdout.flush()\n os.close(1)\n os.open(solnf, os.O_WRONLY | os.O_CREAT)\n cont.printallSolutions(yetkin=self.yetkin)\n sys.stdout.flush()\n os.close(1)\n os.dup(old)\n os.close(old)", "def main():\n\n\t# Script arguments... \n\t\"\"\" If running as standalone, hardcode theWorkspace and inFile \"\"\"\n\ttheWorkspace = arcpy.GetParameterAsText(0)\n\tif not theWorkspace:\n\t\ttheWorkspace = r\"d:\\_dataTest\"\n\ttheWorkspace = r\"d:\\_dataTest\"\n\tarcpy.env.workspace = theWorkspace\n\tarcpy.env.overwriteOutput = True\n\toutWorkspace = os.path.join(theWorkspace, \"_repair\")\n\n\tinFile = arcpy.GetParameterAsText(1)\n\tif not inFile:\n\t\tinFile = \"updateMultipleSourcePaths.csv\"\n\t#inFile = \"FixSource4.csv\"\n\t#inFile = os.path.join(theWorkspace, inFile) + \".csv\"\n\t# opens the infile.csv, read only; then creates tuple of inFile\n\t#f = open(inFile, \"r\") \n\t#update_list = [tuple(line.strip().split(\",\") for line in f)]\n\n\n\tmxd = None\n\toutMXDName = \"none\"\n\tnewPath = []\n\t# makes sure the .csv file exists\n\tif arcpy.Exists(inFile):\n\t\tmyMsgs (\"Repair source list: \" + inFile)\n\t\t# walks thru the workspace to create list of files \n\t\tfor root, dirs, files in os.walk(theWorkspace): \n\t\t\tif root == outWorkspace:\n\t\t\t\tprint(\"heh now\")\n\t\t\t\tpass\n\t\t\t# creates list of .mxd's and works thru them\n\t\t\tmxdList = arcpy.ListFiles(\"*.mxd\")\n\t\t\tfor fileName in mxdList:\n\t\t\t\tfullPath = os.path.join(root, fileName) \n\t\t\t\tmxd = arcpy.mapping.MapDocument(fullPath)\n\t\t\t\tmyMsgs (\"*** Processing mxd: \" + fullPath)\n\t\t\t\t#mxd.findAndReplaceWorkspacePaths(\"v:\\\\\", \"\\\\\\\\dfg.alaska.local\\\\gis\\\\Anchorage\\\\gisshare\\\\\", validate=False)\n\t\t\t\t#mxd.findAndReplaceWorkspacePaths(\"t:\\\\\", \"\\\\\\\\dfg.alaska.local\\\\gis\\\\Anchorage\\\\GISStaff\\\\\", validate=False)\n\t\t\t\t#mxd.findAndReplaceWorkspacePaths(\"u:\\\\\", \"\\\\\\\\dfg.alaska.local\\\\gis\\\\Anchorage\\\\GISStaff\\\\\", validate=False)\n\t\t\t\t# New output mxd....\n\t\t\t\tbasename, extension = os.path.splitext(fileName)\n\t\t\t\toutMXDName = os.path.join(outWorkspace, (str(basename) + \"_fix.mxd\"))\n\t\t\t\t# create list of the tables since they are handle differently\n\t\t\t\ttheTables = arcpy.mapping.ListTableViews(mxd)\n\t\t\t\t# Loops thru layers, checks for broken links and tries to repai\n\t\t\t\tlyrList = arcpy.mapping.ListLayers(mxd)\n\t\t\t\tfor lyr in lyrList:\n\t\t\t\t\tif lyr.isBroken:\n\t\t\t\t\t\tif lyr.isGroupLayer or (\"Events\" in lyr.name):\n\t\t\t\t\t\t\tprint(\"...skipping group or event\")\n\t\t\t\t\t\t\tbreak\n\t\t\t\t\t\t#print(lyr.isServiceLayer)\n\t\t\t\t\t\tif lyr.isServiceLayer:\n\t\t\t\t\t\t\tif lyr.supports(\"SERVICEPROPERTIES\"):\n\t\t\t\t\t\t\t\tcnt = 0\n\t\t\t\t\t\t\t\tfor i, j in lyr.serviceProperties.iteritems():\n\t\t\t\t\t\t\t\t\tif cnt == 2:\n\t\t\t\t\t\t\t\t\t\tdataSource = str(j)\n\t\t\t\t\t\t\t\t\t\tbreak\n\t\t\t\t\t\t\t\t\telse:\n\t\t\t\t\t\t\t\t\t\tcnt += 1 \n\t\t\t\t\t\t\t\tprint(\"sees this as service....using findAndReplsWorkspacePath\")\n\t\t\t\t\t\t\t\tnewPath = findUpdatePath(inFile, dataSource)\n\t\t\t\t\t\t\t\tlyr.findAndReplaceWorkspacePath(lyr.dataSource, newPath, False)\n\t\t\t\t\t\t\telse:\n\t\t\t\t\t\t\t\tprint(\"--> a service layer but no SERVICE PROPOERTIES\")\n\t\t\t\t\t\telse:\n\t\t\t\t\t\t\tprint(lyr.dataSource)\n\t\t\t\t\t\t\tnewPath = findUpdatePath(inFile, lyr.dataSource)\n\t\t\t\t\t\t\tnewDSPath, newDSName = os.path.split(newPath[0])\n\t\t\t\t\t\t\tprint(\"..newDSPAth \" + newDSPath)\n\t\t\t\t\t\t\tprint(\"..newDSName \" + newDSName)\n\t\t\t\t\t\t\tsameType = newPath[1]\n\t\t\t\t\t\t\tprint(\" same type? \" + str(sameType))\n\t\t\t\t\t\t\tcvrList = [r\"\\arc\", r\"\\polygon\", r\"\\region\", r\"\\point\", r\"\\tic\" ]\n\t\t\t\t\t\t\t#print newDSPath\n\t\t\t\t\t\t\tif newPath == \"no match\":\n\t\t\t\t\t\t\t\tprint(\"...no match to: \" + lyr.dataSource)\n\t\t\t\t\t\t\t\tnewPath[0] = \"not found\"\n\t\t\t\t\t\t\t\tbreak\n\t\t\t\t\t\t\telif lyr.supports(\"dataSource\") and lyr.supports(\"datasetName\"):\n\t\t\t\t\t\t\t\tif lyr in theTables:\n\t\t\t\t\t\t\t\t\tprint(\"thinks its a table....using findAndReplsWorkspacePath\")\n\t\t\t\t\t\t\t\t\tlyr.findAndReplaceWorkspacePath(lyr.dataSource, newPath, False) \n\t\t\t\t\t\t\t\telif lyr.isRasterLayer:\n\t\t\t\t\t\t\t\t\tprint(\"thinks its a raster....using findAndReplsWorkspacePath\")\n\t\t\t\t\t\t\t\t\t#lyr.replaceDataSource(newPath, \"RASTER_WORKSPACE\", lyr.datasetName, False)\n\t\t\t\t\t\t\t\t\tlyr.findAndReplaceWorkspacePath(lyr.dataSource, newPath, False)\n\t\t\t\t\t\t\t\telif lyr.supports(\"dataSource\") and lyr.supports(\"datasetName\"):\n\t\t\t\t\t\t\t\t\tif not sameType and newPath[1] == \"gdb\":\n\t\t\t\t\t\t\t\t\t\tprint(\"..................moving to fgdb\")\n\t\t\t\t\t\t\t\t\t\tlyr.replaceDataSource(newDSPath, \"FILEGDB_WORKSPACE\", newDSName, False) \n\t\t\t\t\t\t\t\t\telif r\".shp\" in lyr.dataSource:\n\t\t\t\t\t\t\t\t\t\tprint(\"thinks its a shape\")\n\t\t\t\t\t\t\t\t\t\tlyr.replaceDataSource(newDSPath, \"SHAPEFILE_WORKSPACE\", lyr.datasetName, False)\n\t\t\t\t\t\t\t\t\telif r\".sde\" in lyr.dataSource:\n\t\t\t\t\t\t\t\t\t\tprint(\"thinks its a sde\")\n\t\t\t\t\t\t\t\t\t\tlyr.replaceDataSource(newDSPath, \"SDE_Workspace\", lyr.datasetName, False)\n\t\t\t\t\t\t\t\t\telif r\".mdb\" in lyr.dataSource:\n\t\t\t\t\t\t\t\t\t\tprint(\"thinks its a pgdb\")\n\t\t\t\t\t\t\t\t\t\tlyr.replaceDataSource(newDSPath, \"ACCESS_WORKSPACE\", lyr.datasetName, False)\n\t\t\t\t\t\t\t\t\telif r\".gdb\" in lyr.dataSource:\n\t\t\t\t\t\t\t\t\t\tprint(\"thinks its a fgdb\")\n\n\t\t\t\t\t\t\t\t\t\tlyr.replaceDataSource(newDSPath, \"FILEGDB_WORKSPACE\", lyr.datasetName, False)\n\t\t\t\t\t\t\t\t\telif sameType:\n\t\t\t\t\t\t\t\t\t\tfor cvr in cvrList:\n\t\t\t\t\t\t\t\t\t\t\tif cvr in lyr.dataSource:\n\t\t\t\t\t\t\t\t\t\t\t\tprint(\"to WS sametype is True\")\n\t\t\t\t\t\t\t\t\t\t\t\tlyr.replaceDataSource(newDSPath, \"ARCINFO_WORKSPACE\", newDSName, False)\n\t\t\t\t\t\t\t\t\telif not sameType:\n\t\t\t\t\t\t\t\t\t\tfor cvr in cvrList:\n\n\t\t\t\t\t\t\t\t\t\t\tlyr.replaceDataSource(newDSPath, \"FILEGDB_WORKSPACE\", newDSName, False)\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\"\"\"else:\n newPath[0] = \"not found\" \"\"\"\n\t\t\t\t\t\t\tprint(\" **** the new data source: \" + newPath[0])\n\t\t\t\t\t\t\tprint(\"\")\n\n\t\t\t\tprint(outMXDName)\n\t\t\t\t#mxd.saveACopy(outMXDName, '10.1')\n\t\t\tif arcpy.Exists(outMXDName):\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\toutMXDName.save()\n\t\t\t\telse:\n mxd.saveACopy(outMXDName, '10.1')\n\t\t\t\tdel mxd\n\telse:\n\t\tmyMsgs (\"Repair source list: \" + inFile + \" does not exit.\")\n\n\tmyMsgs('!!! Success !!! ')", "def postprocess_cga(lines, outfile):\n pattern = re.compile(\"^\\s*([0-9,]+)\\s+\\([ 0-9.]+%\\)\\s+Source/(\\S+):(\\S+)\\(.*\\).*$\")\n\n totalCost = 0.0\n functionTable = []\n functionMap = {}\n\n for line in lines:\n line = line.strip()\n match = pattern.match(line)\n if not match:\n continue\n\n cost = float(match.group(1).replace(\",\", \"\"))\n sourceFile = match.group(2)\n function = match.group(3)\n\n # Filter out library code we don't want to change\n if function.startswith(\"stbi__\"):\n continue\n\n totalCost += cost\n\n # Accumulate the scores from functions in multiple call chains\n if function in functionMap:\n index = functionMap[function]\n functionTable[index][1] += cost\n functionTable[index][2] += cost\n # Else add new functions to the end of the table\n else:\n functionMap[function] = len(functionTable)\n functionTable.append([function, cost, cost])\n\n # Sort the table by accumulated cost\n functionTable.sort(key=lambda x: 101.0 - x[2])\n\n for function in functionTable:\n function[2] /= totalCost\n function[2] *= 100.0\n\n with open(outfile, \"w\") as fileHandle:\n\n totals = 0.0\n for function in functionTable:\n # Omit entries less than 1% load\n if function[2] < 1:\n break\n\n totals += function[2]\n fileHandle.write(\"%5.2f%% %s\\n\" % (function[2], function[0]))\n\n fileHandle.write(\"======\\n\")\n fileHandle.write(f\"{totals:5.2f}%\\n\")", "def create_positions_filestep(vcf_filenames):\n\n filter2_only_snp_position_files_array = []\n for file in vcf_filenames:\n with open(file, 'rU') as csv_file:\n file_name = temp_dir + \"/\" + os.path.basename(file) + \"_positions\"\n addpositionfilenametoarray = file_name\n filter2_only_snp_position_files_array.append(addpositionfilenametoarray)\n f1 = open(file_name, 'w+')\n csv_reader = csv.reader(csv_file, delimiter='\\t')\n for row in csv_reader:\n position = row[0]\n if not position.startswith('#'):\n p_string = row[1] + \"\\n\"\n f1.write(p_string)\n f1.close()\n csv_file.close()\n print \"End of creating '_positions' file step\\n\"\n\n \"\"\" Create position array containing unique positiones from positions file \"\"\"\n position_array = []\n for filess in filter2_only_snp_position_files_array:\n f = open(filess, 'r+')\n for line in f:\n line = line.strip()\n position_array.append(line)\n f.close()\n position_array_unique = set(position_array)\n position_array_sort = sorted(position_array_unique)\n print \"\\nThe number of unique variant positions:\\n\" + str(len(position_array_sort)) + \"\\n\"\n unique_position_file = \"%s/unique_positions_file\" % args.filter2_only_snp_vcf_dir\n f=open(unique_position_file, 'w+')\n for i in position_array_sort:\n f.write(i + \"\\n\")\n f.close()\n if len(position_array_sort) == 0:\n print \"ERROR: No unique positions found. Check if vcf files are empty?\"\n exit()\n\n\n\n # \"\"\" Create position array containing all the final SNP positions from all the final vcf files\"\"\"\n # position_array = []\n # for file in vcf_filenames:\n # with open(file, 'rU') as csv_file:\n # csv_reader = csv.reader(csv_file, delimiter='\\t')\n # for row in csv_reader:\n # position = row[0]\n # if not position.startswith('#'):\n # if row[1] not in position_array:\n # position_array(row[1])\n # csv_file.close()\n #\n #\n # position_array_unique = set(position_array)\n # position_array_sort = sorted(position_array_unique)\n # print \"\\nThe number of unique variant positions:\\n\" + str(len(position_array_sort)) + \"\\n\"\n # unique_position_file = \"%s/temp/unique_positions_file\" % args.filter2_only_snp_vcf_dir\n # f=open(unique_position_file, 'w+')\n # for i in position_array_sort:\n # f.write(i + \"\\n\")\n # f.close()", "def oq_read_uhs_classical_psha(scen_info, event_info, dir_info):\n import glob\n import random\n # number of scenario\n num_scen = scen_info['Number']\n if num_scen > 1:\n print('FetchOpenQuake: currently only supporting a single scenario for PHSA')\n num_scen = 1\n # number of realizations per site\n num_rlz = event_info['NumberPerSite']\n # directory of the UHS\n res_dir = dir_info['Output']\n # mean UHS\n cur_uhs_file = glob.glob(os.path.join(res_dir,'hazard_uhs-mean_*.csv'))[0]\n print(cur_uhs_file)\n # read csv\n tmp = pd.read_csv(cur_uhs_file,skiprows=1)\n # number of stations\n num_stn = len(tmp.index)\n # number of IMs\n num_IMs = len(tmp.columns) - 2\n # IM list\n list_IMs = tmp.columns.tolist()[2:]\n im_list = [x.split('~')[1] for x in list_IMs]\n ln_psa_mr = []\n mag_maf = []\n for i in range(num_scen):\n # initialization\n ln_psa = np.zeros((num_stn, num_IMs, num_rlz))\n # collecting UHS\n if num_rlz == 1:\n ln_psa[:, :, 0] = np.log(tmp.iloc[:, 2:])\n else:\n num_r1 = np.min([len(glob.glob(os.path.join(res_dir,'hazard_uhs-rlz-*.csv'))), num_rlz])\n for i in range(num_r1):\n cur_uhs_file = glob.glob(os.path.join(res_dir,'hazard_uhs-rlz-*.csv'))[i]\n tmp = pd.read_csv(cur_uhs_file,skiprows=1)\n ln_psa[:, :, i] = np.log(tmp.iloc[:, 2:])\n if num_rlz > num_r1:\n # randomly resampling available spectra\n for i in range(num_rlz-num_r1):\n rnd_tag = random.randrange(num_r1)\n print(int(rnd_tag))\n cur_uhs_file = glob.glob(os.path.join(res_dir,'hazard_uhs-rlz-*.csv'))[int(rnd_tag)]\n tmp = pd.read_csv(cur_uhs_file,skiprows=1)\n ln_psa[:, :, i] = np.log(tmp.iloc[:, 2:])\n ln_psa_mr.append(ln_psa)\n mag_maf.append([0.0,float(list_IMs[0].split('~')[0]),0.0])\n \n # return\n return ln_psa_mr, mag_maf, im_list", "def write_usearch61_log(log_fp,\r\n input_seqs_fp,\r\n output_dir,\r\n reference_seqs_fp,\r\n suppress_usearch61_intermediates,\r\n suppress_usearch61_ref,\r\n suppress_usearch61_denovo,\r\n split_by_sampleid,\r\n non_chimeras_retention,\r\n usearch61_minh,\r\n usearch61_xn,\r\n usearch61_dn,\r\n usearch61_mindiffs,\r\n usearch61_mindiv,\r\n usearch61_abundance_skew,\r\n percent_id_usearch61,\r\n minlen,\r\n word_length,\r\n max_accepts,\r\n max_rejects,\r\n HALT_EXEC,\r\n log_lines):\r\n\r\n out_f = open(log_fp, \"w\")\r\n\r\n param_names = [\"input_seqs_fp\", \"output_dir\",\r\n \"reference_seqs_fp\", \"suppress_usearch61_intermediates\",\r\n \"suppress_usearch61_ref\", \"suppress_usearch61_denovo\",\r\n \"split_by_sampleid\", \"non_chimeras_retention\", \"usearch61_minh\",\r\n \"usearch61_xn\", \"usearch61_dn\", \"usearch61_mindiffs\", \"usearch61_mindiv\",\r\n \"usearch61_abundance_skew\", \"percent_id_usearch61\", \"minlen\",\r\n \"word_length\", \"max_accepts\", \"max_rejects\", \"HALT_EXEC\"]\r\n\r\n param_values = [input_seqs_fp, output_dir,\r\n reference_seqs_fp, suppress_usearch61_intermediates,\r\n suppress_usearch61_ref, suppress_usearch61_denovo,\r\n split_by_sampleid, non_chimeras_retention, usearch61_minh,\r\n usearch61_xn, usearch61_dn, usearch61_mindiffs, usearch61_mindiv,\r\n usearch61_abundance_skew, percent_id_usearch61, minlen,\r\n word_length, max_accepts, max_rejects, HALT_EXEC]\r\n\r\n for curr_param in range(len(param_names)):\r\n out_f.write(\"%s\\t%s\\n\" % (param_names[curr_param],\r\n param_values[curr_param]))\r\n\r\n out_f.write(\"\\n\")\r\n\r\n for curr_line in log_lines.keys():\r\n out_f.write(\"%s\\t%s\\n\" % (curr_line, log_lines[curr_line]))\r\n\r\n out_f.close()\r\n\r\n return", "def main():\n\n preprocessed_file = preprocess_clinical_trials()\n\n preprocessed_file.to_csv(PREPROCESSED_CLINICAL_TRIALS_FILE_PATH, index=False)", "def main():\n\n # Script arguments... \n \"\"\" If running as standalone, hardcode theWorkspace and inFile \"\"\"\n theWorkspace = arcpy.GetParameterAsText(0)\n if not theWorkspace:\n theWorkspace = r\"d:\\_dataTest\"\n arcpy.env.workspace = theWorkspace\n arcpy.env.overwriteOutput = True\t\n\n inFile = arcpy.GetParameterAsText(1)\n if not inFile:\n inFile = \"updateMultipleSourcePaths.csv\"\n inFile = r\"\\\\dfg.alaska.local\\gis\\Anchorage\\GISStaff\\___gisStaffConnections\\RepairBrokenSrcAug242015.csv\"\n\n outWorkspace = arcpy.GetParameterAsText(2)\n if not outWorkspace:\n outWorkspace = os.path.join(theWorkspace, \"_repaired\")\n '''if not os.path.isdir(outWorkspace): \n os.makedirs(outWorkspace)\n myMsgs(\"created new directory {0} \\n\".format(outWorkspace))'''\n\n # Create .txt Report of what it thinks was fixed, tagged with YYYYMMDD_HHMM\n outFile = \"FixedReport\"\n fileDateTime = curFileDateTime()\n currentDate = curDate()\n outfileTXT = os.path.join(theWorkspace, outFile) + fileDateTime + \".txt\" \n myMsgs (outFile)\n reportFile = open(outfileTXT, 'w')\n myMsgs( \"File {0} is open? {1}\".format(outfileTXT, str(not reportFile.closed)))\n outText = \"Report for what it THINKS it repaired in {0}, on {1} \\n \".format(theWorkspace, currentDate)\n outText += \" Includes coverages (pts, poly, arc, anno), shapes, and FGDB data.\" + '\\n'\n outText += \"-----------------------------------------------------\" + '\\n' \n reportFile.write(outText)\t\n\n mxd = None\n outMXDName = \"none\"\n updatePath = []\n cvrList = [r\"\\arc\", r\"\\polygon\", r\"\\region\", r\"\\point\", r\"\\tic\" ]\n lstExtDatatype = [[\".shp\", \"SHAPEFILE_WORKSPACE\" ], [\".sde\",\"SDE_WORKSPACE\"], \n [\".mdb\", \"ACCESS_WORKSPACE\" ], [\".gdb\", \"FILEGDB_WORKSPACE\"], \n [\"cover\", \"ARCINFO_WORKSPACE\"]]\t\n cntMXD = 0\n cntFixed = 0\n cntTotalFixed = 0\n\n # makes sure the .csv file exists\n if arcpy.Exists(inFile):\n myMsgs (\"->Using {0} to repair paths.\\n==============================\".format(inFile))\n # walks thru the workspace to create list of files \n for root, dirs, files in os.walk(theWorkspace): \t\t\n for fileName in files:\n if root == outWorkspace: # don't process mxd's in the target directory\n pass\n else:\n fullPath = os.path.join(root, fileName)\n basename, extension = os.path.splitext(fileName)\n # Only process .mxd files\n if extension == \".mxd\":\n myMsgs(\"\\nReviewing MXD: {0}\".format(fullPath))\n reportFile.write(\"\\nReviewing MXD: {0}\".format(fullPath))\n mxd = arcpy.mapping.MapDocument(fullPath)\n dfs = arcpy.mapping.ListDataFrames(mxd)\n cntMXD += 1\n cntFixed = 0\n basename, extension = os.path.splitext(fileName)\n # New output mxd name....\n outMXDName = os.path.join(outWorkspace, (str(basename) + \".mxd\")) #\"_fix.mxd\"))\n # create list of the tables since they are handle differently\n theTables = arcpy.mapping.ListTableViews(mxd)\n # Loops thru dataframes so adding and deleting Services will work.\n for df in dfs:\n # Loops thru layers, checks for broken links and tries to repair\n lyrList = arcpy.mapping.ListLayers(mxd, \"\", df)\n for lyr in lyrList:\n if lyr.isBroken:\n if not lyr.supports(\"DATASOURCE\") and not lyr.isServiceLayer:\n myMsgs(\" ->Skipping {0} not a Service layer, and does not support DATASOURCE\".format(lyr.name))\n pass #continue\n elif not lyr.supports(\"DATASOURCE\") and lyr.isServiceLayer:\n myMsgs(\" -Broken Service: {0}\".format(lyr.name))\n else:\n myMsgs(\" -Broken: {0}\".format(lyr.dataSource))\n #myMsgs(\"layer is Group {0} or ServiceLayer {1}\".format(lyr.isGroupLayer, lyr.isServiceLayer))\n if (lyr.isGroupLayer or (\"Events\" in lyr.name)) and (not lyr.isServiceLayer): # Groups and Event FC skipped\n myMsgs(\" ...skipping group or event: {0}\".format(lyr.name))\n reportFile.write(\"\\n *skipping group or event: {0} \\n\".format(lyr.name))\n pass #break\n elif lyr.isServiceLayer: # services might have to be handle differently\n if lyr.supports(\"SERVICEPROPERTIES\"):\n for spType, spName in lyr.serviceProperties.iteritems():\n myMsgs(\" Service Properties: {0}: {1}\".format(spType, spName ))\n if spType == \"URL\": \n dataSource = str(spName)\n lyrType = (\"service_{}\".format(lyr.name))\n break\n myMsgs(\" ->this ia a service....using add and remove layer\")\n updatePath = findUpdatePath(inFile, dataSource, lyrType.strip().lower())\n newDSPath, newDSName = os.path.split(updatePath[0])\n if (\"service\" in updatePath[3]) and (\"service\" in updatePath[1]):\n insertLayer = arcpy.mapping.Layer(updatePath[0])\n print(\"dataframe: {0}\".format(df))\n arcpy.mapping.InsertLayer(df, lyr, insertLayer, \"AFTER\")\n arcpy.mapping.RemoveLayer(df, lyr)\n reportFile.write(\"\\n ->sees this as service....{0} \\n\".format(dataSource))\n # will still look at deleted version after insert, not the new version..\n # isBroken will give false info even if fixed, so \n # don't use myMsgs(\"Still broken? {0}\".format(lyr.isBroken)) \n else:\n myMsgs(\" --> a service layer but no SERVICE PROPERTIES\")\n elif lyr.supports(\"DATASOURCE\") and lyr.supports(\"DATASETNAME\"): \n # not a group, event or what it thinks is a service\n updatePath = findUpdatePath(inFile, lyr.dataSource, \"\")\n newDSPath, newDSName = os.path.split(updatePath[0])\n sameType = updatePath[2] \n for cvr in cvrList: #checks to see if the source layer is a coverage...must handle different\n if cvr in lyr.dataSource:\n sourceIsCoverage = True\n break\n else:\n sourceIsCoverage = False\n # updatePath[1] is False if there wasn't a match\n # so \"not update[1]\" means no match was found, and moves to next layer\t\t\t\t\t\t\t\t\n if not updatePath[1]: # if no match was found\n myMsgs(\" !! no match to: {0} \".format(lyr.dataSource))\n updateStatus = \"no match, not changed\" # used for message only\n pass\n elif updatePath[1].strip().lower() == \"drive\":\n myMsgs(\" skipping drive-letter matches for now: {0}\".format(lyr.dataSource))\n updateStatus = \"can only find drive match...look into it)\"\n pass\n elif updatePath[1].strip().lower() == \"_review\":\n myMsgs(\" no new source assigned yet for: {0}\".format(lyr.dataSource))\n updateStatus = (\"review and update {0}\".format(inFile))\n pass\n else: #if lyr.supports(\"DATASOURCE\") and lyr.supports(\"DATASETNAME\"):\n updateStatus = str(updatePath[0]) # used for message only\n if lyr in theTables:\n #myMsgs(\" thinks its a table....using findAndReplsWorkspacePath\")\n myMsgs(\" *Moving {0}: {1} to new: {2}\".format(updatePath[3], lyr.dataSource, updatePath[0]))\n reportFile.write(\"\\n Moving {0}: {1} to new: {2} \\n\".format(updatePath[3], lyr.dataSource, updatePath[0]))\n lyr.findAndReplaceWorkspacePath(lyr.dataSource, updatePath, False) \n elif lyr.isRasterLayer:\n #myMsgs(\" thinks its a raster....using findAndReplsWorkspacePath\")\n myMsgs(\" *Moving {0}: {1} to new: {2}\".format(updatePath[3], lyr.dataSource, updatePath[0]))\n reportFile.write(\"\\n Moving {0}: {1} to new: {2} \\n\".format(updatePath[3], lyr.dataSource, updatePath[0]))\n newType = \"RASTER_WORKSPACE\"\n for extType in lstExtDatatype:\n if extType[0] in updatePath[0]:\n newType = extType[1] \n if extType[0] == '.gdb':\n newDSPath = newDSPath.split('.gdb', 1)[0] + '.gdb'\n #newType = extType[1]\n elif extType[0] == '.sde':\n newDSPath = newDSPath.split('.sde', 1)[0] + '.sde'\n break \n lyr.replaceDataSource(newDSPath, newType, newDSName, False)\n if not sameType:\n testOldTOC = updatePath[4].strip('\\\\')\n if lyr.name == testOldTOC:\n lyr.name = lyr.datasetName\n else:\n newType = updatePath[1] \n if sourceIsCoverage and sameType:\n newDSPath = os.path.split(newDSPath)[0]\n newType = \"ARCINFO_WORKSPACE\"\n for extType in lstExtDatatype:\n if extType[0] in updatePath[0]:\n newType = extType[1]\n if extType[0] == '.gdb':\n newDSPath = newDSPath.split('.gdb', 1)[0] + '.gdb'\n #newType = extType[1]\n elif extType[0] == '.sde':\n newDSPath = newDSPath.split('.sde', 1)[0] + '.sde'\n\n break\n print(\"line ~281 newType is: {0}\".format(newType))\n myMsgs(\" *Moving {0}: {1} to new: {2}\".format(updatePath[3], lyr.dataSource, updatePath[0]))\n reportFile.write(\"\\n Moving {0}: {1} to new: {2}\".format(updatePath[3], lyr.dataSource, updatePath[0]))\n lyr.replaceDataSource(newDSPath, newType, newDSName, False)\n #myMsgs(\" new datasource: {0}\".format(lyr.dataSource))\n myMsgs(\" **the new data source: {0}\".format(updateStatus))\n cntFixed += 1\n myMsgs(\" Still broken? {0}\".format(lyr.isBroken))\n else:\n myMsgs(\"not sure what it is, but can't process {0}\".format(lyr.name))\n \n else:\n myMsgs(\" -Not Broken: {0}\".format(str(lyr)))\n\n myMsgs(\" Number of links fixed processed: {0}\".format(cntFixed))\n myMsgs(\" -{0} Review complete.\".format(fullPath))\n reportFile.write(\" -Number of links fixed processed: {0} \\n\".format(cntFixed))\t\t\t\t\t\t\n reportFile.write(\" -{0} Review complete. \\n\\n\".format(fullPath))\n\n if cntFixed > 0:\n mxd.save()\n myMsgs(\"saved to {0}\".format(fullPath))\n reportFile.write(\"saved to {0}\".format(fullPath))\n cntTotalFixed += cntFixed\n cntFixed = 0\n \"\"\"if cntFixed > 0:\n\t\t\t\t\t\t\tmxd.saveACopy(outMXDName, '10.1')\n\t\t\t\t\t\t\tmyMsgs(\"saved to {0}\".format(outMXDName))\n\t\t\t\t\t\t\tcntFixed = 0\"\"\"\n '''if arcpy.Exists(outMXDName):\n outMXDName.()\n myMsgs(\"saved 1\")\n else:\n mxd.saveACopy(outMXDName, '10.1')\n myMsgs(\"saved 2\")'''\n del mxd\n cntFixed = 0\n else:\n myMsgs (\"ERROR: Required repair source list: [0] does not exit. \\n\".format(inFile))\n outText = (\"\\n\\n ==========================================\")\n outText += (\"\\n Number of MXD's processed: {0} \\n\".format(cntMXD))\n outText += (\" Total Number of links it fixed, all mxds: {0} \\n\".format(cntTotalFixed) )\n\n myMsgs(\" {0}\".format(outText))\n\n reportFile.write(outText)\n # close the .txt file, \n reportFile.close()\n myMsgs( \"File {0} is closed? {1}\".format(outfileTXT, str(reportFile.closed)))\t\n\n myMsgs('!!! Success !!! ')", "def ana_fixations_spatial_distribution(data_dict):\n counter = collections.Counter()\n for search in data_dict['searches'].viewvalues():\n for fixation in search['path'][1:]: # Exclude first fixation. It is always (0,0).\n counter[tuple(fixation)] += 1\n frequency_map = np.zeros((data_dict['senzory_map'].shape[0],1))\n for i in xrange(len(frequency_map)):\n frequency_map[i] = counter[tuple(data_dict['senzory_map'][i])]\n return np.hstack((data_dict['senzory_map'],frequency_map))", "def runcircos(self):\n pd.read_csv(self.cns, sep=\"\\t\")[\n [\"chromosome\", \"start\", \"end\", \"tcn\"]\n ].rename({\"chromosome\": \"chrm\", \"tcn\": \"cns\"}, axis=1).to_csv(\n self.segs, index=None\n )\n\n passed_svs = [\n sv\n for sv in self.svs.values()\n ]\n circos_sv_file = os.path.join(\n self.out_dir, \"circos_svs.tsv\"\n )\n circos_df = pd.DataFrame(\n [\n (\"chr\" + sv.chr1, sv.pos1, sv.pos1, \"chr\" + sv.chr2, sv.pos2, sv.pos2)\n for sv in passed_svs\n ],\n columns=[\n \"Chromosome\",\n \"chromStart\",\n \"chromEnd\",\n \"Chromosome.1\",\n \"chromStart.1\",\n \"chromEnd.1\",\n ],\n )\n circos_df.to_csv(circos_sv_file, index=None)", "def regenerate_gas_strat_lut_files():\n for record in gas_records.values():\n record(force_strat_calculation=True, save_strat=True)", "def shard(xrec=None):\n\n xrec = conf.get_current()\n\n print \"FIX_DAT: shard()\", xrec\n zip_ver = xrec['zip_dir']\n\n xindex = []\n\n regEx = re.compile(\"[A-Z]{5}\")\n\n inputfile = conf.work_dir(\"/xplane_unzipped/%s/earth_fix.dat\" % (xrec['zip_dir']))\n c = 0\n print inputfile\n\n\n with open(inputfile) as readnav:\n\n for line in readnav:\n c += 1\n\n # Skip first three lines, hope Robin Peel will never change this behaviour ;-)\n if c < 4:\n pass\n else:\n\n if not line.startswith(\"99\"):\n\n lst = line.strip().split()\n fix_ident = str(lst[2])\n\n fixblob = None\n\n if fix_ident == \"NPOLE\":\n pass\n\n else:\n\n ## Write shard\n blob_path = conf.raw_fix_path( xrec, fix_ident)\n #print file_path, xrec\n\n f = open(blob_path + \".txt\", \"w\")\n f.write(line)\n f.close()\n\n ## make dic\n maj = True if regEx.match(fix_ident) else False\n data = dict(\n ident=fix_ident, major=maj, src=line,\n lat=str(lst[0]), lon=str(lst[1])\n )\n json_path = blob_path + \".json\"\n conf.write_json(json_path, data)\n\n\n\n xindex.append(fix_ident)\n\n\n if c % 5000 == 0:\n print \" > fix: %s - %s of %s\" % (fix_ident, c, MAX_LINES_GUESS)\n #sys.exit(0)\n #db.Con.commit()\n\n ## commit any outstanding after rows at end of loop\n #db.Con.commit()", "def extract_syllable_features_from_txt():\n input_files = sys.argv[1]\n csv_name = sys.argv[2]\n syllable_stats = pd.DataFrame(columns=SYLLABLE_COLUMNS)\n re_word = re.compile(r'[\\w-]+')\n i = 0\n for filename in os.listdir(input_files):\n if filename != '.DS_Store':\n print(filename, i)\n syllable_count = 0\n for line in open(input_files+filename):\n for word in re_word.findall(line):\n syllable_count += estimate(word)\n syllable_stats = syllable_stats.append({\n TRANSCRIPT_ID: filename[:-4],\n MEMORY_SYLLABLE_COUNT: syllable_count,\n }, ignore_index=True)\n i += 1\n syllable_stats = syllable_stats.set_index(TRANSCRIPT_ID)\n syllable_stats.to_csv(csv_name+'.csv')", "def process_ssearch36_df(name, ssearch_df, fasta_inputs, out_dir):\n\n fasta_in = [x for x in fasta_inputs if name.split(\".fasta.ss\")[0] == re.sub(\".fasta$\",\"\",os.path.basename(x))][0]\n fasta_file = pyfasta.Fasta(fasta_in)\n print(name) \n try:\n os.mkdir(out_dir)\n except:\n pass\n with open(os.path.join(out_dir, os.path.basename(fasta_in)),\"w\") as out_f:\n for gene in (ssearch_df[\"gene\"].unique()):\n ssearch_tmp = ssearch_df[ssearch_df[\"gene\"] == gene]\n gene_match = (ssearch_tmp.sort_values(by=\"11\",ascending=False).head(n=1)[\"strain\"])\n if (any(gene_match.isin([gene]))):\n out_f.write(\">\" + gene +\"\\n\")\n out_f.write(str(fasta_file[gene]) + \"\\n\")", "def inout_creator(df = pd.DataFrame(), features='datosrahm.csv'):\r\n df = df\r\n \r\n start=time.time()\r\n \r\n datos=pd.read_csv(features)\r\n datos=datos.fillna(-1)\r\n\r\n dicc=dict(datos[['Symbol','Z']].values)\r\n\r\n dicc['D']=1\r\n dicc['Bk']=97\r\n dicc['Cf']=98\r\n dicc['Es']=99\r\n dicc['Fm']=100\r\n dicc['Md']=101\r\n dicc['No']=102\r\n dicc['Lr']=103\r\n \r\n max_sitios = max(df['sitios'].values)\r\n \r\n X=np.zeros((len(df),max_sitios,104))\r\n\r\n mult=np.zeros((len(df),max_sitios))\r\n wyckmul=np.load('support/WyckoffSG_dict.npy').item()['wyckmul']\r\n \r\n todelete = list()\r\n \r\n for row in range(len(df)):\r\n item=df['WyckOcc'][row]\r\n sitios=list(item.values()) \r\n sitocc=np.zeros((len(sitios),104)) \r\n spacegroup = str(df['sgnum'][row]).zfill(3)\r\n \r\n try:\r\n \r\n s=[int(wyckmul[spacegroup][i]) for j in [list(item.keys()) for item in \\\r\n sitios] for i in j]\r\n \r\n except:\r\n print(row)\r\n print('There exists an error concerning with the space group of CIF ', df['cif'][row],'\\n')\r\n print('Please check in www.crystallography.net to provide the correct space group number of that CIF',\r\n '\\n','\\n')\r\n spacegroup=input('Give me the correct spacegroup:'+'\\n'+'\\n')\r\n s=[int(wyckmul[spacegroup][i]) for j in [list(item.keys()) for item in \\\r\n list(df['WyckOcc'][row].values())] for i in j]\r\n \r\n occs=[]\r\n for i in range(len(sitios)):\r\n\r\n for j in list(sitios[i].values()):\r\n \r\n ocupacion=np.array(list(j.values()))\r\n llaves=[llave.replace('+','').replace('-','').replace('1',\r\n '').replace('2','').replace('3','').replace('4',\r\n '') for llave in np.array(list(j.keys()))]\r\n llaves=[llave.replace('.','') for llave in llaves]\r\n llaves=[llave.replace('5','').replace('6','').replace('7',\r\n '').replace('8','').replace('9','').replace('0',\r\n '') for llave in llaves]\r\n vector=np.zeros((1,104))\r\n occs=[sum(ocupacion)]+occs\r\n \r\n try:\r\n \r\n idx=[dicc[k] for k in llaves]\r\n \r\n except:\r\n print('The compound with the cif ', df['cif'][row], ' will be deleted')\r\n print('The database will be updated')\r\n todelete += [row]\r\n \r\n for k in idx:\r\n vector[0][k-1] = ocupacion[idx.index(k)]\r\n \r\n sitocc[i]=vector\r\n \r\n while sitocc.shape[0] != max_sitios:\r\n sitocc=np.concatenate((np.zeros((1,104)),sitocc))\r\n s=[0]+s\r\n \r\n X[row,:,:]=sitocc\r\n mult[row]=s\r\n \r\n features=datos.iloc[:,2:].values\r\n x=X[:,:,:96]\r\n \r\n fracsum = np.expand_dims(np.sum(x,axis=2), axis=2)\r\n \r\n x=np.dot(x,features) \r\n \r\n x = np.delete(x, todelete,axis=0)\r\n df = df.drop(df.index[todelete]).reset_index(drop=True)\r\n \r\n print('inout_creator lasted ',round(time.time()-start,2),' s') \r\n return x, fracsum, df", "def main(args):\n fn = open(args.filename,\"r+\")\n for i, line in enumerate(fn, start = 1):\n f = open(\"string_examples_%i.txt\" %i,'w+')\n check = letter_check(line.rstrip())\n if check == 0:\n print('Sequence:', line.rstrip(), ' includes letters other than A,C,T or G, please revise this sequence')\n else:\n panda = create_panda(line.rstrip())\n LingC = calculate_LC(line.rstrip())\n f.write(line)\n f.write(str(LingC))\n f.close()\n panda.to_csv('data%i.csv' %i)", "def pre_process_salesrank(input_path, output_path):\n files = [pos_json for pos_json in os.listdir(input_path) if pos_json.endswith('.json')]\n # files = os.listdir(input_path)\n asins = list(map(lambda each:each.strip(\"_com_norm.json\"), files))\n\n if not os.path.exists(output_path):\n os.mkdir(output_path)\n\n seq = [i for i in range(40, 66760+1, 40)]\n \n big_df = pd.DataFrame(columns=['asin', 'rank'])\n \n for asin, filename in zip(asins, files):\n try:\n df = pd.read_json(os.path.join(input_path, filename), typ='series', convert_axes=False)\n df = df.to_frame(name='rank')\n df = df.assign(asin=asin)\n big_df = big_df.append(df, sort=False)\n print(asins.index(asin))\n except:\n print(f\"failed to process {filename}\")\n continue\n if asins.index(asin) in seq:\n big_df.index.name = 'ts'\n big_df.to_csv(f\"{output_path}/{asins.index(asin)}.csv\")\n print(asins.index(asin))\n big_df = pd.DataFrame(columns=['asin', 'rank'])", "def sasa(grofile,trajfile,**kwargs):\n\n\t#---unpack\n\tsn = kwargs['sn']\n\twork = kwargs['workspace']\n\n\t#---compute SASA\n\tdssp_in = get_sasa(grofile,trajfile)\n\n\t#---pack\n\tattrs,results = {},{}\n\n\tfor chain in dssp_in.keys():\n\t\tresids = array(sorted(dssp_in[chain].keys()))\n\t\tchain_ids = array([chain]*len(dssp_in[chain].keys()))\n\t\tresnames = array([dssp_in[chain][k]['resname'] for k in resids])\n\t\tdssp = array([dssp_in[chain][k]['dssp'] for k in resids])\n\t\trel_sasa = array([dssp_in[chain][k]['rel_sasa'] for k in resids])\n\t\tabs_sasa = array([dssp_in[chain][k]['abs_sasa'] for k in resids])\n\t\tphi = array([dssp_in[chain][k]['phi'] for k in resids])\n\t\tpsi = array([dssp_in[chain][k]['psi'] for k in resids])\n\t\ttime = array([dssp_in[chain][k]['time'] for k in resids])\n\n\t\tif 'resid' in results.keys() and 'dssp' in results.keys():\n\t\t\tresults['dssp'] = append(results['dssp'],dssp,axis=0)\n\t\t\tresults['rel_sasa'] = append(results['rel_sasa'],rel_sasa,axis=0)\n\t\t\tresults['abs_sasa'] = append(results['abs_sasa'],abs_sasa,axis=0)\n\t\t\tresults['phi'] = append(results['phi'],phi,axis=0)\n\t\t\tresults['psi'] = append(results['psi'],psi,axis=0)\n\t\t\tresults['resid'] = append(results['resid'],resids,axis=0)\n\t\t\tresults['chain_id'] = append(results['chain_id'],chain_ids,axis=0)\n\t\t\tresults['resname'] = append(results['resname'],resnames,axis=0)\n\t\t\tresults['time'] = append(results['time'],resnames,axis=0)\n\t\telse:\n\t\t\tresults['dssp'] = dssp\n\t\t\tresults['rel_sasa'] = rel_sasa\n\t\t\tresults['abs_sasa'] = abs_sasa\n\t\t\tresults['phi'] = phi\n\t\t\tresults['psi'] = psi\n\t\t\tresults['resid'] = resids\n\t\t\tresults['chain_id'] = chain_ids\n\t\t\tresults['resname'] = resnames\n\t\t\tresults['time'] = time\n\treturn results,attrs", "def generatePositivePHASLoci(options,whole_mapped_data,phase,cycle):\n out_filename=options.output_directory_per_run+\"/\"+options.input_filename+\"_\"+str(phase)+\"_\"+str(cycle)+\".positive_phase_loci\"\n fhw=open(out_filename,\"w\")\n for chromosome in sorted(whole_mapped_data):\n filename=options.output_directory_per_run+\"/\"+options.input_filename+\"_\"+str(phase)+\"_\"+str(cycle)+\"_\"+chromosome+\".regionsOfInterest.concentrated\"\n try:\n fhr=open(filename,\"r\")\n except FileNotFoundError:\n continue\n flag_reg=1000\n window_start,window_end=0,0\n for line in fhr:\n \"\"\"pvalue=float(line.strip().split()[-1])\n if pvalue>=options.pvalue_cutoff:continue\"\"\"\n register,start,end=map(int,line.strip().split()[:3])\n if register==flag_reg:\n if window_end>start:\n window_end=end\n else:\n fhw.write(chromosome+\"\\t\"+str(window_start)+\"\\t\"+str(window_end)+\"\\n\")\n window_start=start\n window_end=end\n else:\n if flag_reg!=1000:\n fhw.write(chromosome+\"\\t\"+str(window_start)+\"\\t\"+str(window_end)+\"\\n\")\n window_start=start\n window_end=end\n flag_reg=register\n fhr.close()\n fhw.write(chromosome+\"\\t\"+str(window_start)+\"\\t\"+str(window_end)+\"\\n\")\n fhw.close()", "def set_words(data_path):\n w_df = pd.read_csv(data_path, names=['es','gn','syn1','syn2'], encoding='iso-8859-1') # file -i\n gn_df = w_df[['gn','syn1','syn2']].drop_duplicates()\n gn_lst = gn_df['gn'].tolist()+gn_df['syn1'].tolist()+gn_df['syn2'].tolist()\n cleanedList = [x for x in gn_lst if str(x) != 'nan' and len(x)>=3]\n gn_set = set(cleanedList)\n \n print(len(gn_set))\n \n f = open(data_path[:-4]+\".txt\", 'w')\n for w in gn_set:\n f.write('{}\\n'.format(w))\n f.close()\n \n return list(gn_set)", "def read_infsao_file(saofile, atoms, natm, saos):\n \n #\n # Read in the infsao file\n #\n with open(saofile, 'r') as infsao:\n infsao_lines = infsao.readlines()\n\n #\n # (1) Initialise the SAO objects\n #\n n=-1\n for line in infsao_lines:\n if 'representation' in line:\n # Update the number of irreps\n aobas.nirrep+=1\n n+=1\n # Keywords on the line\n keywords=line.split()\n # Append the irrep label to the list\n aobas.irrep_labels.append(keywords[1])\n # Create an SAO class for the irrep\n saos.append(aobas.SAO()) \n # Set the irrep label and number of SAOs for the irrep\n saos[n].set_label(keywords[1])\n saos[n].set_nsao(int(keywords[5]))\n\n #\n # (2) Read in the AO-to-SAO transformations\n #\n # Initialise the irrep and line counters\n irrep = -1\n i = -1\n\n # Loop over lines in infsao\n while True:\n\n # Increment the line counter\n i += 1\n \n # Are we at the next irrep?\n if 'SAO-index ' in infsao_lines[i]:\n # Increment te irrep counter\n irrep += 1\n\n # Initialise the SAO counter\n nsao = -1\n\n # Loop over SAOs\n while True:\n\n # Increment the line counter\n i += 1\n\n # Split the line into keywords\n keywords=infsao_lines[i].split()\n \n # Are we at the next SAO?\n if len(keywords) == 7:\n # Increment the SAO counter\n nsao += 1\n\n # Initialise the current column in the\n # AO-to-SAO transformation matrix\n column = np.zeros(aobas.nao_tot)\n \n # Read in the SAO coefficients\n atnum = int(keywords[2])-1\n aolbl = str(keywords[4])+' '+str(keywords[5])\n aonum = atoms[atnum].offset+atoms[atnum].aolbls.index(aolbl)\n column[aonum]=float(keywords[6])\n while True:\n i += 1\n keywords1=infsao_lines[i].split()\n if len(keywords1) == 5:\n atnum = int(keywords1[0])-1\n aolbl = str(keywords1[2])+' '+str(keywords1[3])\n aonum = atoms[atnum].offset+atoms[atnum].aolbls.index(aolbl)\n column[aonum]=float(keywords1[4])\n else:\n # Break out of the SAO coeff. loop\n i -= 1\n break\n\n # Add the current current column in the\n # AO-to-SAO transformation matrix\n saos[irrep].add_column(column)\n \n # Break out of the SAO loop if we have passed the\n # last SAO for the current irrep\n if nsao+1 == saos[irrep].nsao:\n break\n \n # Break out of the main loop of we have parsed the entry for\n # the last irrep\n if irrep+1 == aobas.nirrep:\n break", "def save_segmentation_samples(self, dest=\"./Datasets/IsophonicsSegmentation.seg\", song_indices=[0, 10, 20, 30, 40, 50, 60, 70], hop_length=512, norm_to_C=False, spectrogram_generator=log_mel_spectrogram, n_frames=500):\n data = []\n chords = []\n gold_targets = []\n # Iterate over all song indices on the input\n for song_ind in song_indices:\n # Prprocess audio\n preprocessed_audio = IsophonicsDataset.preprocess_audio(\n waveform=self.DATA[song_ind].WAVEFORM,\n sample_rate=self.DATA[song_ind].SAMPLE_RATE,\n spectrogram_generator=spectrogram_generator,\n nfft=self.NFFT, hop_length=hop_length,\n norm_to_C=norm_to_C, key=self.KEYS[song_ind].get_first_key()\n ).swapaxes(0,1)\n\n num_samples, _ = preprocessed_audio.shape\n\n # Convert data and chord targets to sequences\n data_in_seqs, targets_in_seqs = Dataset.songs_to_sequences(\n FEATURESs=[preprocessed_audio],\n CHORDs=[self.CHORDS[song_ind]],\n TIME_BINSs=[[float(i)/(float(self.SAMPLE_RATE) / float(hop_length)) for i in range(num_samples)]],\n KEYs=self.KEYS[song_ind].get_first_key(),\n n_frames=n_frames,\n norm_to_C=norm_to_C\n )\n\n # Add song's sequences to lists as a new element\n data.append(data_in_seqs)\n chords.append(targets_in_seqs)\n gold_targets.append(SegmentationCRNN.labels2changes(targets = chords[-1]))\n\n # Save all three np arrays generated in this function .. data, chords, gold_targets aka chord changes\n with lzma.open(dest, \"wb\") as dataset_file:\n pickle.dump((data, chords, gold_targets), dataset_file)\n\n print(\"[INFO] The Isophonics segmentation samples was saved successfully.\")", "def get_sasa(topology, trajectory, dssp_loc=master_dssp_location,skip=None):\n\n\tdssp_loc = dssp_loc\n\tDSSP={'A':{}}\n\tuniverse = MDAnalysis.Universe(topology, trajectory)\n\n\t#set the chain name here. this will only work for MDAnalysis 0.16\n\tchain_name=universe.add_Segment(segid='A')\n\tuniverse.residues[...].segments=chain_name\n\n\tprotein=universe.select_atoms(\"protein\")\n\tdiff_res=[]\n\t#this attempt to identify chain breaks will only work if the resids\n\t#... in the chains are not numbered consecutively\n\tfor i in range(len(protein.resnums)):\n\t\tif protein.resnums[i]-protein.resnums[i-1]<0 and i!=0:\n\t\t\tdiff_res.append(i)\n\tif len(diff_res)>=1:\n\t\tchain_sep=diff_res.pop(0)\n\t\tchain_end=len(protein.resnums)\n\t\tbchain=protein[chain_sep:chain_end]\n\t\tbchain.set_segids('B')\n\t\tDSSP['B']={}\n\n\tfor ts in universe.trajectory:\n\t\tif skip:\n\t\t\tuniverse.trajectory.skip=skip\n\t\tsys.stdout.flush()\n\t\tsys.stdout.write('\\rsasa [step {0}] '.format(\n\t\t\tuniverse.trajectory.frame))\n\t\twriter=MDAnalysis.Writer(\"tmp.pdb\")\n\t\twriter.write(protein)\n\t\twriter.close()\n\t\tparser=bp.PDBParser()\n\t\tstructure=parser.get_structure('tmp','tmp.pdb')\n\t\tdssp=bp.DSSP(structure[0],'tmp.pdb',dssp_loc)\n\t\tfor key in dssp.keys():\n\t\t\tif 0:\n\t\t\t\tresobj=dssp[key][0]\n\t\t\t\tresname=dssp[key][0].resname\n\t\t\t\tresidx=resobj.id[1]\n\t\t\t\tchain=key[0]\n\t\t\t\tsecondary_structure=resobj.xtra['SS_DSSP']\n\t\t\t\trel_sasa=resobj.xtra['EXP_DSSP_RASA']\n\t\t\t\tabs_sasa=resobj.xtra['EXP_DSSP_ASA']\n\t\t\t\tphi=resobj.xtra['PHI_DSSP']\n\t\t\t\tpsi=resobj.xtra['PSI_DSSP']\n\t\t\tresobj=dssp[key]\n\t\t\tresname=residue_codes_reverse[resobj[1]]\n\t\t\tresidx=key[1][1]\n\t\t\tchain=key[0]\n\t\t\tsecondary_structure=resobj[2]\n\t\t\trel_sasa=resobj[3]\n\t\t\tabs_sasa=resobj[3]*dssp.residue_max_acc[resname]\n\t\t\tphi=resobj[4]\n\t\t\tpsi=resobj[5]\n\t\t\tif residx in DSSP[chain] and DSSP[chain][residx]['resname']==resname:\n\t\t\t\tDSSP[chain][residx]['dssp'].append(secondary_structure)\n\t\t\t\tDSSP[chain][residx]['rel_sasa'].append(rel_sasa)\n\t\t\t\tDSSP[chain][residx]['abs_sasa'].append(abs_sasa)\n\t\t\t\tDSSP[chain][residx]['phi'].append(phi)\n\t\t\t\tDSSP[chain][residx]['psi'].append(psi)\n\t\t\t\tDSSP[chain][residx]['time'].append(ts.time)\n\t\t\telse:\n\t\t\t\tDSSP[chain][residx]={'dssp':[secondary_structure],'phi':[phi],'time':[ts.time],\n\t\t\t\t\t\t\t\t\t 'psi':[psi],'rel_sasa':[rel_sasa],'chain':chain,\n\t\t\t\t\t\t\t\t\t 'abs_sasa':[abs_sasa],'resname':resname}\n\treturn DSSP", "def postIdealizedAnalysis(inpath, outpath, member,\n refpath='/lustre/research/bancell/aucolema/HWT2016runs/2016050800/wrfoutREF'):\n # SENSvals file naming conventions\n sensval_varstrings = [\"GPH_300\", \"GPH_500\", \"GPH_700\", \"GPH_850\", \"SKIP\",\n \"T_300\", \"T_500\", \"T_700\", \"T_850\", \"T_925\",\n \"U_300\", \"U_500\", \"U_700\", \"U_850\", \"U_925\",\n \"V_300\", \"V_500\", \"V_700\", \"V_850\", \"V_925\",\n \"SKIP\", \"SKIP\", \"SKIP\", \"SKIP\", \"SKIP\", \"SKIP\",\n \"SKIP\", \"SKIP\", \"Q_850\", \"SKIP\", \"SLP\", \"T2\",\n \"TD2\", \"U10\", \"V10\"]\n # Post-processed new file naming conventions\n sensstringslist = [\"300 hPa GPH\",\"500 hPa GPH\",\"700 hPa GPH\",\n \"850 hPa GPH\",\"925 hPa GPH\",\"300 hPa T\",\"500 hPa T\",\n \"700 hPa T\",\"850 hPa T\",\"925 hPa T\",\"300 hPa U-Wind\",\n \"500 hPa U-Wind\",\"700 hPa U-Wind\",\"850 hPa U-Wind\",\n \"925 hPa U-Wind\",\"300 hPa V-Wind\",\"500 hPa V-Wind\",\n \"700 hPa V-Wind\",\"850 hPa V-Wind\",\"925 hPa V-Wind\",\n \"300 hPa Dewpt\", \"500 hPa Dewpt\", \"700 hPa Dewpt\",\n \"850 hPa Dewpt\", \"925 hPa Dewpt\", \"300 hPa Q\",\n \"500 hPa Q\", \"700 hPa Q\", \"850 hPa Q\", \"925 hPa Q\",\n \"SLP\",\"2m Temp\",\"2m Dewpt\",\n \"10m U-Wind\",\"10m V-Wind\"]\n\n # Get more dimensions/geographical info\n wrf_d1 = Dataset(refpath)\n lons, lats = wrf_d1.variables['XLONG'][0], wrf_d1.variables['XLAT'][0]\n wrf_idim = len(lons[0,:])\n wrf_jdim = len(lats[:,0])\n\n # Write interpolated variables to netCDF\n new_analysis = Dataset(outpath, \"w\", format=\"NETCDF4\")\n new_analysis.createDimension('lat', wrf_jdim)\n new_analysis.createDimension('lon', wrf_idim)\n new_analysis.createDimension('time', None)\n xlat = new_analysis.createVariable(\"XLAT\", float, dimensions=('lat','lon'))\n xlat[:,:] = lats\n xlon = new_analysis.createVariable(\"XLONG\", float, dimensions=('lat','lon'))\n xlon[:,:] = lons\n\n # Open dataset and start pulling member fields\n member_fields = np.zeros((len(sensval_varstrings), wrf_jdim, wrf_idim))\n sensvar_dat = Dataset(inpath)\n for ind, var in enumerate(sensval_varstrings):\n # print(\"SENSvals variable:\", var, \"New variable string\", sensstringslist[ind])\n if var != \"SKIP\":\n member_fields[ind] = sensvar_dat[var][member-1][:]\n newvar = new_analysis.createVariable(\n sensstringslist[ind].replace(\" \",\"_\"),\n member_fields[ind].dtype,\n dimensions=('lat','lon'))\n newvar[:,:] = member_fields[ind]\n new_analysis.close()\n return", "def postIdealizedAnalysis(inpath, outpath, member,\n refpath='/lustre/research/bancell/aucolema/HWT2016runs/2016050800/wrfoutREF'):\n # SENSvals file naming conventions\n sensval_varstrings = [\"GPH_300\", \"GPH_500\", \"GPH_700\", \"GPH_850\", \"SKIP\",\n \"T_300\", \"T_500\", \"T_700\", \"T_850\", \"T_925\",\n \"U_300\", \"U_500\", \"U_700\", \"U_850\", \"U_925\",\n \"V_300\", \"V_500\", \"V_700\", \"V_850\", \"V_925\",\n \"SKIP\", \"SKIP\", \"SKIP\", \"SKIP\", \"SKIP\", \"SKIP\",\n \"SKIP\", \"SKIP\", \"Q_850\", \"SKIP\", \"SLP\", \"T2\",\n \"TD2\", \"U10\", \"V10\"]\n # Post-processed new file naming conventions\n sensstringslist = [\"300 hPa GPH\",\"500 hPa GPH\",\"700 hPa GPH\",\n \"850 hPa GPH\",\"925 hPa GPH\",\"300 hPa T\",\"500 hPa T\",\n \"700 hPa T\",\"850 hPa T\",\"925 hPa T\",\"300 hPa U-Wind\",\n \"500 hPa U-Wind\",\"700 hPa U-Wind\",\"850 hPa U-Wind\",\n \"925 hPa U-Wind\",\"300 hPa V-Wind\",\"500 hPa V-Wind\",\n \"700 hPa V-Wind\",\"850 hPa V-Wind\",\"925 hPa V-Wind\",\n \"300 hPa Dewpt\", \"500 hPa Dewpt\", \"700 hPa Dewpt\",\n \"850 hPa Dewpt\", \"925 hPa Dewpt\", \"300 hPa Q\",\n \"500 hPa Q\", \"700 hPa Q\", \"850 hPa Q\", \"925 hPa Q\",\n \"SLP\",\"2m Temp\",\"2m Dewpt\",\n \"10m U-Wind\",\"10m V-Wind\"]\n\n # Get more dimensions/geographical info\n wrf_d1 = Dataset(refpath)\n lons, lats = wrf_d1.variables['XLONG'][0], wrf_d1.variables['XLAT'][0]\n wrf_idim = len(lons[0,:])\n wrf_jdim = len(lats[:,0])\n\n # Write interpolated variables to netCDF\n new_analysis = Dataset(outpath, \"w\", format=\"NETCDF4\")\n new_analysis.createDimension('lat', wrf_jdim)\n new_analysis.createDimension('lon', wrf_idim)\n new_analysis.createDimension('time', None)\n xlat = new_analysis.createVariable(\"XLAT\", float, dimensions=('lat','lon'))\n xlat[:,:] = lats\n xlon = new_analysis.createVariable(\"XLONG\", float, dimensions=('lat','lon'))\n xlon[:,:] = lons\n\n # Open dataset and start pulling member fields\n member_fields = np.zeros((len(sensval_varstrings), wrf_jdim, wrf_idim))\n sensvar_dat = Dataset(inpath)\n for ind, var in enumerate(sensval_varstrings):\n # print(\"SENSvals variable:\", var, \"New variable string\", sensstringslist[ind])\n if var != \"SKIP\":\n member_fields[ind] = sensvar_dat[var][member-1][:]\n newvar = new_analysis.createVariable(\n sensstringslist[ind].replace(\" \",\"_\"),\n member_fields[ind].dtype,\n dimensions=('lat','lon'))\n newvar[:,:] = member_fields[ind]\n new_analysis.close()\n return" ]
[ "0.53746367", "0.5096252", "0.50762796", "0.50170225", "0.4978774", "0.4946678", "0.4932188", "0.4927958", "0.48932934", "0.48801583", "0.48791954", "0.48785833", "0.4873219", "0.48639885", "0.4859189", "0.48531154", "0.48424622", "0.481692", "0.48109287", "0.4809388", "0.48079917", "0.4805607", "0.47992572", "0.47991416", "0.4785583", "0.47796175", "0.47787705", "0.47740126", "0.47561568", "0.47561568" ]
0.5598505
0
Create a ZoneRecord resource with the given unique name, props, and options.
def __init__(__self__, resource_name: str, args: ZoneRecordArgs, opts: Optional[pulumi.ResourceOptions] = None): ...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_zone(self, zone, serial=None):\r\n return self.service.createObject({\r\n 'name': zone,\r\n 'serial': serial or strftime('%Y%m%d01'),\r\n \"resourceRecords\": {}})", "def create_record(self, zone_id, record, record_type, data, ttl=60):\r\n self.record.createObject({\r\n 'domainId': zone_id,\r\n 'ttl': ttl,\r\n 'host': record,\r\n 'type': record_type,\r\n 'data': data})", "def create(self):\n\n record = {\n 'type': self.type,\n 'ttl': self.ttl,\n 'priority': self.priority,\n 'rdata': self.rdata,\n }\n\n if self.call(method='addZoneRecord', args=[self.domainname, self.subdomain, record]):\n return self", "def create_record(self, name, zone, type, data, extra=None):\n params = {\"type\": self.RECORD_TYPE_MAP[type], \"name\": name, \"data\": data}\n if extra:\n try:\n params[\"priority\"] = extra[\"priority\"]\n except KeyError:\n params[\"priority\"] = None\n try:\n params[\"port\"] = extra[\"port\"]\n except KeyError:\n params[\"port\"] = None\n try:\n params[\"weight\"] = extra[\"weight\"]\n except KeyError:\n params[\"weight\"] = None\n\n if \"ttl\" in extra:\n params[\"ttl\"] = extra[\"ttl\"]\n\n res = self.connection.request(\n \"/v2/domains/%s/records\" % zone.id, data=json.dumps(params), method=\"POST\"\n )\n\n return Record(\n id=res.object[\"domain_record\"][\"id\"],\n name=res.object[\"domain_record\"][\"name\"],\n type=type,\n data=data,\n zone=zone,\n ttl=res.object[\"domain_record\"].get(\"ttl\", None),\n driver=self,\n extra=extra,\n )", "def __init__(__self__,\n resource_name: str,\n args: ZoneArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: Optional[ZoneArgs] = None,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def create_record(self, name, zone, type, data, extra=None):\n id = \"id-%s\" % (name)\n\n zone = self.get_zone(zone_id=zone.id)\n\n if id in self._zones[zone.id][\"records\"]:\n raise RecordAlreadyExistsError(record_id=id, value=None, driver=self)\n\n record = Record(id=id, name=name, type=type, data=data, extra=extra, zone=zone, driver=self)\n self._zones[zone.id][\"records\"][id] = record\n return record", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n name: Optional[pulumi.Input[str]] = None,\n priority: Optional[pulumi.Input[str]] = None,\n qualified_name: Optional[pulumi.Input[str]] = None,\n ttl: Optional[pulumi.Input[str]] = None,\n type: Optional[pulumi.Input[str]] = None,\n value: Optional[pulumi.Input[str]] = None,\n zone_id: Optional[pulumi.Input[str]] = None,\n zone_name: Optional[pulumi.Input[str]] = None) -> 'ZoneRecord':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _ZoneRecordState.__new__(_ZoneRecordState)\n\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"priority\"] = priority\n __props__.__dict__[\"qualified_name\"] = qualified_name\n __props__.__dict__[\"ttl\"] = ttl\n __props__.__dict__[\"type\"] = type\n __props__.__dict__[\"value\"] = value\n __props__.__dict__[\"zone_id\"] = zone_id\n __props__.__dict__[\"zone_name\"] = zone_name\n return ZoneRecord(resource_name, opts=opts, __props__=__props__)", "def test_add_record_to_zone(self):\n zone = Zone('test.example.com')\n record = Record(zone, 'test-record', {'type': 'A', 'ttl': 300})\n zone.add_record(record)\n self.assertEqual(zone.records.get('test-record'), record)", "def create_record(self, name, zone, type, data, extra=None):\n if (extra is None) or (\"entry\" not in extra):\n # If no entry is specified, we look for an available one. If all\n # are full, raise error.\n record_id = self._get_available_record_entry(zone)\n if not record_id:\n raise WorldWideDNSError(value=\"All record entries are full\", driver=zone.driver)\n else:\n record_id = extra.get(\"entry\")\n if name == \"\":\n name = \"@\"\n if type not in self.RECORD_TYPE_MAP:\n raise RecordError(\n value=\"Record type is not allowed\",\n driver=zone.driver,\n record_id=record_id,\n )\n extra = {\n \"S%s\" % record_id: name,\n \"T%s\" % record_id: type,\n \"D%s\" % record_id: data,\n }\n zone = self.update_zone(zone, zone.domain, extra=extra)\n record = self.get_record(zone.id, record_id)\n return record", "def create_record(self, context, record):\n record = self.dns_manager.create_record(context, record)\n return record", "def __init__(__self__, __name__, __opts__=None, domain=None, flags=None, name=None, port=None, priority=None, tag=None, ttl=None, type=None, value=None, weight=None):\n if not __name__:\n raise TypeError('Missing resource name argument (for URN creation)')\n if not isinstance(__name__, basestring):\n raise TypeError('Expected resource name to be a string')\n if __opts__ and not isinstance(__opts__, pulumi.ResourceOptions):\n raise TypeError('Expected resource options to be a ResourceOptions instance')\n\n __props__ = dict()\n\n if not domain:\n raise TypeError('Missing required property domain')\n elif not isinstance(domain, basestring):\n raise TypeError('Expected property domain to be a basestring')\n __self__.domain = domain\n \"\"\"\n The domain to add the record to\n \"\"\"\n __props__['domain'] = domain\n\n if flags and not isinstance(flags, int):\n raise TypeError('Expected property flags to be a int')\n __self__.flags = flags\n \"\"\"\n The flags of the record (integer between 0-255), for CAA records.\n \"\"\"\n __props__['flags'] = flags\n\n if name and not isinstance(name, basestring):\n raise TypeError('Expected property name to be a basestring')\n __self__.name = name\n \"\"\"\n The name of the record\n \"\"\"\n __props__['name'] = name\n\n if port and not isinstance(port, int):\n raise TypeError('Expected property port to be a int')\n __self__.port = port\n \"\"\"\n The port of the record, for SRV records.\n \"\"\"\n __props__['port'] = port\n\n if priority and not isinstance(priority, int):\n raise TypeError('Expected property priority to be a int')\n __self__.priority = priority\n \"\"\"\n The priority of the record, for MX and SRV\n records.\n \"\"\"\n __props__['priority'] = priority\n\n if tag and not isinstance(tag, basestring):\n raise TypeError('Expected property tag to be a basestring')\n __self__.tag = tag\n \"\"\"\n The tag of the record (one of `issue`, `wildissue`, or `iodef`), for CAA records.\n \"\"\"\n __props__['tag'] = tag\n\n if ttl and not isinstance(ttl, int):\n raise TypeError('Expected property ttl to be a int')\n __self__.ttl = ttl\n \"\"\"\n The time to live for the record, in seconds.\n \"\"\"\n __props__['ttl'] = ttl\n\n if not type:\n raise TypeError('Missing required property type')\n elif not isinstance(type, basestring):\n raise TypeError('Expected property type to be a basestring')\n __self__.type = type\n \"\"\"\n The type of record\n \"\"\"\n __props__['type'] = type\n\n if not value:\n raise TypeError('Missing required property value')\n elif not isinstance(value, basestring):\n raise TypeError('Expected property value to be a basestring')\n __self__.value = value\n \"\"\"\n The value of the record\n \"\"\"\n __props__['value'] = value\n\n if weight and not isinstance(weight, int):\n raise TypeError('Expected property weight to be a int')\n __self__.weight = weight\n \"\"\"\n The weight of the record, for SRV records.\n \"\"\"\n __props__['weight'] = weight\n\n __self__.fqdn = pulumi.runtime.UNKNOWN\n \"\"\"\n The FQDN of the record\n \"\"\"\n\n super(DNSRecord, __self__).__init__(\n 'do:core/dNSRecord:DNSRecord',\n __name__,\n __props__,\n __opts__)", "def create_record(self, context, payload):\n access_token = util.get_access_token(context[\"headers\"])\n record = ZohorecruitRecord(**payload)\n endpoint = f\"{record.module}\"\n record_data = self.retrieve_record_body(record)\n response = util.rest(\"POST\",endpoint,access_token,record_data)\n return json.loads(response.text)", "def Run(self, args):\n project = properties.VALUES.core.project.Get(required=True)\n zone = {}\n zone['dnsName'] = args.dns_name\n zone['name'] = args.zone\n zone['description'] = args.description\n\n really = console_io.PromptContinue('Creating %s in %s' % (zone, project))\n if not really:\n return\n\n dns = self.context['dns']\n request = dns.managedZones().create(project=project, body=zone)\n try:\n result = request.execute()\n return result\n except errors.HttpError as error:\n raise exceptions.HttpException(util.GetError(error))\n except errors.Error as error:\n raise exceptions.ToolException(error)", "def test_create_domain_with_a_record(self):\n fake_dns_instance = FakeDnsInstance()\n t = template_format.parse(domain_only_template)\n a_record = [{\n \"type\": \"A\",\n \"name\": \"ftp.example.com\",\n \"data\": \"192.0.2.8\",\n \"ttl\": 3600\n }]\n t['Resources']['domain']['Properties']['records'] = a_record\n instance = self._setup_test_cloud_dns_instance('dnsinstance_create', t)\n create_args = self._get_create_args_with_comments(a_record)\n self._stubout_create(instance, fake_dns_instance, **create_args)\n scheduler.TaskRunner(instance.create)()\n self.assertEqual((instance.CREATE, instance.COMPLETE), instance.state)\n self.m.VerifyAll()", "def pre_virtual_DNS_record_create(self, resource_dict):\n pass", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n name: Optional[pulumi.Input[str]] = None,\n priority: Optional[pulumi.Input[str]] = None,\n ttl: Optional[pulumi.Input[str]] = None,\n type: Optional[pulumi.Input[str]] = None,\n value: Optional[pulumi.Input[str]] = None,\n zone_name: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ...", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n description: Optional[pulumi.Input[str]] = None,\n discovery_spec: Optional[pulumi.Input[pulumi.InputType['ZoneDiscoverySpecArgs']]] = None,\n display_name: Optional[pulumi.Input[str]] = None,\n labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n lake: Optional[pulumi.Input[str]] = None,\n location: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n project: Optional[pulumi.Input[str]] = None,\n resource_spec: Optional[pulumi.Input[pulumi.InputType['ZoneResourceSpecArgs']]] = None,\n type: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ...", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n attributes: Optional[pulumi.Input[Mapping[str, Any]]] = None,\n description: Optional[pulumi.Input[str]] = None,\n disable_status_check: Optional[pulumi.Input[bool]] = None,\n email: Optional[pulumi.Input[str]] = None,\n masters: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n name: Optional[pulumi.Input[str]] = None,\n project_id: Optional[pulumi.Input[str]] = None,\n region: Optional[pulumi.Input[str]] = None,\n ttl: Optional[pulumi.Input[int]] = None,\n type: Optional[pulumi.Input[str]] = None,\n value_specs: Optional[pulumi.Input[Mapping[str, Any]]] = None) -> 'Zone':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _ZoneState.__new__(_ZoneState)\n\n __props__.__dict__[\"attributes\"] = attributes\n __props__.__dict__[\"description\"] = description\n __props__.__dict__[\"disable_status_check\"] = disable_status_check\n __props__.__dict__[\"email\"] = email\n __props__.__dict__[\"masters\"] = masters\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"project_id\"] = project_id\n __props__.__dict__[\"region\"] = region\n __props__.__dict__[\"ttl\"] = ttl\n __props__.__dict__[\"type\"] = type\n __props__.__dict__[\"value_specs\"] = value_specs\n return Zone(resource_name, opts=opts, __props__=__props__)", "def post_virtual_DNS_record_create(self, resource_dict):\n pass", "def AddZoneResourceArg(parser, verb, positional=True):\n name = 'zone' if positional else '--zone'\n return concept_parsers.ConceptParser.ForResource(\n name,\n GetZoneResourceSpec(),\n 'The Zone {}'.format(verb),\n required=True).AddToParser(parser)", "def createRecord(self):\n self.dto.getRecord().append(self.controller.createNewObj())\n print(\"Record added.\")", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n vpc_id: Optional[pulumi.Input[str]] = None,\n vpc_region: Optional[pulumi.Input[str]] = None,\n zone_id: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ...", "def test_record_name(self):\n zone = Zone('test.example.com')\n record = Record(zone, 'test-record', {'type': 'A', 'ttl': 300})\n self.assertEqual(record.name, 'test-record')", "def create(cls, dump, model, pid_provider, legacy_id_key=\"legacy_recid\"):\n record = cls.create_record(\n dump, model, pid_provider, legacy_id_key=legacy_id_key\n )\n return record", "def __init__(self,\n az_account: 'account.AZAccount',\n resource_id: str,\n name: str,\n region: str,\n zones: Optional[List[str]] = None) -> None:\n super().__init__(az_account,\n resource_id,\n name,\n region,\n zones=zones)", "def __init__(self,\n az_account: 'account.AZAccount',\n resource_id: str,\n name: str,\n region: str,\n zones: Optional[List[str]] = None) -> None:\n super().__init__(az_account,\n resource_id,\n name,\n region,\n zones=zones)", "def create(self, validated_data):\n\n data = self.context.get('data')\n created = data.get('created')\n called = data.get('called')\n now = timezone.now()\n\n if created is None or called is None:\n created = now\n else:\n timedelta = parse_datetime(called) - parse_datetime(created)\n created = now - timedelta\n\n self.instance = AirQualityLocation.objects.create(\n name=validated_data.get('name'),\n geometry=validated_data.get('geometry'),\n creator=self.context.get('user'),\n created=created,\n properties=validated_data.get('properties')\n )\n\n return self.instance", "def __init__(__self__, *,\n name: Optional[pulumi.Input[str]] = None,\n priority: Optional[pulumi.Input[str]] = None,\n qualified_name: Optional[pulumi.Input[str]] = None,\n ttl: Optional[pulumi.Input[str]] = None,\n type: Optional[pulumi.Input[str]] = None,\n value: Optional[pulumi.Input[str]] = None,\n zone_id: Optional[pulumi.Input[str]] = None,\n zone_name: Optional[pulumi.Input[str]] = None):\n if name is not None:\n pulumi.set(__self__, \"name\", name)\n if priority is not None:\n pulumi.set(__self__, \"priority\", priority)\n if qualified_name is not None:\n pulumi.set(__self__, \"qualified_name\", qualified_name)\n if ttl is not None:\n pulumi.set(__self__, \"ttl\", ttl)\n if type is not None:\n pulumi.set(__self__, \"type\", type)\n if value is not None:\n pulumi.set(__self__, \"value\", value)\n if zone_id is not None:\n pulumi.set(__self__, \"zone_id\", zone_id)\n if zone_name is not None:\n pulumi.set(__self__, \"zone_name\", zone_name)", "def MakeRecordsFromZone(self):\n return self.core_helper_instance.AddFormattedRecords(\n self.zone_name, self.zone_file_string, self.view)" ]
[ "0.67830896", "0.66456556", "0.65979064", "0.63441277", "0.6284031", "0.62557477", "0.6229536", "0.6211032", "0.60351735", "0.59962225", "0.5970569", "0.5852241", "0.57941115", "0.5788343", "0.5786571", "0.5690906", "0.5651166", "0.5644698", "0.56435764", "0.56245065", "0.553065", "0.5498713", "0.544719", "0.5439446", "0.54159385", "0.53960747", "0.53960747", "0.53856736", "0.53422344", "0.53359866" ]
0.7262241
0
Get an existing ZoneRecord resource's state with the given name, id, and optional extra properties used to qualify the lookup.
def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] = None, name: Optional[pulumi.Input[str]] = None, priority: Optional[pulumi.Input[str]] = None, qualified_name: Optional[pulumi.Input[str]] = None, ttl: Optional[pulumi.Input[str]] = None, type: Optional[pulumi.Input[str]] = None, value: Optional[pulumi.Input[str]] = None, zone_id: Optional[pulumi.Input[str]] = None, zone_name: Optional[pulumi.Input[str]] = None) -> 'ZoneRecord': opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = _ZoneRecordState.__new__(_ZoneRecordState) __props__.__dict__["name"] = name __props__.__dict__["priority"] = priority __props__.__dict__["qualified_name"] = qualified_name __props__.__dict__["ttl"] = ttl __props__.__dict__["type"] = type __props__.__dict__["value"] = value __props__.__dict__["zone_id"] = zone_id __props__.__dict__["zone_name"] = zone_name return ZoneRecord(resource_name, opts=opts, __props__=__props__)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n asset_statuses: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ZoneAssetStatusArgs']]]]] = None,\n create_time: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n discovery_spec: Optional[pulumi.Input[pulumi.InputType['ZoneDiscoverySpecArgs']]] = None,\n display_name: Optional[pulumi.Input[str]] = None,\n labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n lake: Optional[pulumi.Input[str]] = None,\n location: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n project: Optional[pulumi.Input[str]] = None,\n resource_spec: Optional[pulumi.Input[pulumi.InputType['ZoneResourceSpecArgs']]] = None,\n state: Optional[pulumi.Input[str]] = None,\n type: Optional[pulumi.Input[str]] = None,\n uid: Optional[pulumi.Input[str]] = None,\n update_time: Optional[pulumi.Input[str]] = None) -> 'Zone':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _ZoneState.__new__(_ZoneState)\n\n __props__.__dict__[\"asset_statuses\"] = asset_statuses\n __props__.__dict__[\"create_time\"] = create_time\n __props__.__dict__[\"description\"] = description\n __props__.__dict__[\"discovery_spec\"] = discovery_spec\n __props__.__dict__[\"display_name\"] = display_name\n __props__.__dict__[\"labels\"] = labels\n __props__.__dict__[\"lake\"] = lake\n __props__.__dict__[\"location\"] = location\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"project\"] = project\n __props__.__dict__[\"resource_spec\"] = resource_spec\n __props__.__dict__[\"state\"] = state\n __props__.__dict__[\"type\"] = type\n __props__.__dict__[\"uid\"] = uid\n __props__.__dict__[\"update_time\"] = update_time\n return Zone(resource_name, opts=opts, __props__=__props__)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n attributes: Optional[pulumi.Input[Mapping[str, Any]]] = None,\n description: Optional[pulumi.Input[str]] = None,\n disable_status_check: Optional[pulumi.Input[bool]] = None,\n email: Optional[pulumi.Input[str]] = None,\n masters: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n name: Optional[pulumi.Input[str]] = None,\n project_id: Optional[pulumi.Input[str]] = None,\n region: Optional[pulumi.Input[str]] = None,\n ttl: Optional[pulumi.Input[int]] = None,\n type: Optional[pulumi.Input[str]] = None,\n value_specs: Optional[pulumi.Input[Mapping[str, Any]]] = None) -> 'Zone':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _ZoneState.__new__(_ZoneState)\n\n __props__.__dict__[\"attributes\"] = attributes\n __props__.__dict__[\"description\"] = description\n __props__.__dict__[\"disable_status_check\"] = disable_status_check\n __props__.__dict__[\"email\"] = email\n __props__.__dict__[\"masters\"] = masters\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"project_id\"] = project_id\n __props__.__dict__[\"region\"] = region\n __props__.__dict__[\"ttl\"] = ttl\n __props__.__dict__[\"type\"] = type\n __props__.__dict__[\"value_specs\"] = value_specs\n return Zone(resource_name, opts=opts, __props__=__props__)", "def get_zone(self, zone_id, records=True):\r\n mask = None\r\n if records:\r\n mask = 'resourceRecords'\r\n return self.service.getObject(id=zone_id, mask=mask)", "def get_zone(name: Optional[str] = None,\n private_zone: Optional[bool] = None,\n resource_record_set_count: Optional[int] = None,\n tags: Optional[Mapping[str, str]] = None,\n vpc_id: Optional[str] = None,\n zone_id: Optional[str] = None,\n opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetZoneResult:\n __args__ = dict()\n __args__['name'] = name\n __args__['privateZone'] = private_zone\n __args__['resourceRecordSetCount'] = resource_record_set_count\n __args__['tags'] = tags\n __args__['vpcId'] = vpc_id\n __args__['zoneId'] = zone_id\n opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)\n __ret__ = pulumi.runtime.invoke('aws:route53/getZone:getZone', __args__, opts=opts, typ=GetZoneResult).value\n\n return AwaitableGetZoneResult(\n arn=pulumi.get(__ret__, 'arn'),\n caller_reference=pulumi.get(__ret__, 'caller_reference'),\n comment=pulumi.get(__ret__, 'comment'),\n id=pulumi.get(__ret__, 'id'),\n linked_service_description=pulumi.get(__ret__, 'linked_service_description'),\n linked_service_principal=pulumi.get(__ret__, 'linked_service_principal'),\n name=pulumi.get(__ret__, 'name'),\n name_servers=pulumi.get(__ret__, 'name_servers'),\n primary_name_server=pulumi.get(__ret__, 'primary_name_server'),\n private_zone=pulumi.get(__ret__, 'private_zone'),\n resource_record_set_count=pulumi.get(__ret__, 'resource_record_set_count'),\n tags=pulumi.get(__ret__, 'tags'),\n vpc_id=pulumi.get(__ret__, 'vpc_id'),\n zone_id=pulumi.get(__ret__, 'zone_id'))", "def get_record(self, zone_id, record_id):\n\n self.get_zone(zone_id=zone_id)\n zone_records = self._zones[zone_id][\"records\"]\n\n if record_id not in zone_records:\n raise RecordDoesNotExistError(record_id=record_id, value=None, driver=self)\n\n return zone_records[record_id]", "def get_state_by_id(state_id):\n for key, value in storage.all(\"State\").items():\n if state_id == value.id:\n return jsonify(value.to_dict())\n abort(404)", "def get_record(self, zone):\n to_return = None\n try:\n to_return = zone.loadRecord(self.module.params.get('name'),\n self.module.params.get('type').upper())\n except ResourceException as re:\n if re.response.code != 404:\n self.module.fail_json(\n msg=\"error code %s - %s \" % (re.response.code, re.message)\n )\n to_return = None\n return to_return", "def state_by_id(state_id):\n states_values = storage.all(\"State\").values()\n for obj in states_values:\n if obj.id == state_id:\n return jsonify(obj.to_dict())\n abort(404)", "def get_record(self, zone_id, record_id):\n zone = self.get_zone(zone_id)\n try:\n if int(record_id) not in range(1, MAX_RECORD_ENTRIES + 1):\n raise RecordDoesNotExistError(\n value=\"Record doesn't exists\",\n driver=zone.driver,\n record_id=record_id,\n )\n except ValueError:\n raise WorldWideDNSError(value=\"Record id should be a string number\", driver=self)\n subdomain = zone.extra.get(\"S%s\" % record_id)\n type = zone.extra.get(\"T%s\" % record_id)\n data = zone.extra.get(\"D%s\" % record_id)\n record = self._to_record(record_id, subdomain, type, data, zone)\n return record", "def get_state_by_id(state_id):\n my_state = storage.get('State', state_id)\n if my_state is None:\n abort(404)\n return jsonify(my_state.to_dict())", "def find_availability_zone(self, name_or_id, ignore_missing=False):\n return self._find(_availability_zone.AvailabilityZone, name_or_id,\n ignore_missing=ignore_missing)", "def show(self, req, id):\n zone_id = int(id)\n zone = api.zone_get(req.environ['nova.context'], zone_id)\n return dict(zone=_scrub_zone(zone))", "def get_region_db_detail(self, context, id):\n zone_obj = self.dns_manager.get_region_db_detail(context, id)\n return zone_obj", "def get_zone(zone_id: int, allow_unloaded_zones: bool=False) -> Zone:\n return services.get_zone(zone_id, allow_uninstantiated_zones=allow_unloaded_zones)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n cidr: Optional[pulumi.Input[str]] = None,\n commissioning_enabled: Optional[pulumi.Input[bool]] = None,\n internet_advertising_disabled: Optional[pulumi.Input[bool]] = None,\n location: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n parent_custom_ip_prefix_id: Optional[pulumi.Input[str]] = None,\n resource_group_name: Optional[pulumi.Input[str]] = None,\n roa_validity_end_date: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n wan_validation_signed_message: Optional[pulumi.Input[str]] = None,\n zones: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None) -> 'Prefix':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _PrefixState.__new__(_PrefixState)\n\n __props__.__dict__[\"cidr\"] = cidr\n __props__.__dict__[\"commissioning_enabled\"] = commissioning_enabled\n __props__.__dict__[\"internet_advertising_disabled\"] = internet_advertising_disabled\n __props__.__dict__[\"location\"] = location\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"parent_custom_ip_prefix_id\"] = parent_custom_ip_prefix_id\n __props__.__dict__[\"resource_group_name\"] = resource_group_name\n __props__.__dict__[\"roa_validity_end_date\"] = roa_validity_end_date\n __props__.__dict__[\"tags\"] = tags\n __props__.__dict__[\"wan_validation_signed_message\"] = wan_validation_signed_message\n __props__.__dict__[\"zones\"] = zones\n return Prefix(resource_name, opts=opts, __props__=__props__)", "def get_state(state_id):\n try:\n ''' Check that state_id exists '''\n query = State.select().where(State.id == state_id)\n if not query.exists():\n raise LookupError('state_id')\n\n state = State.get(State.id == state_id)\n return state.to_dict(), 200\n except LookupError as e:\n abort(404)\n except Exception as e:\n abort(500)", "def get_state_by_id(state_id):\n state = storage.get(State, state_id)\n if not state:\n abort(404)\n return jsonify(state.to_dict()), 200", "def state_by_id(state_id):\n state = storage.get(State, state_id)\n if state is None:\n abort(404)\n return jsonify(state.to_dict())", "def a_state(id):\n state = storage.get(State, id)\n if state is not None:\n return jsonify(state.to_dict())\n abort(404)", "def __init__(__self__,\n resource_name: str,\n args: ZoneRecordArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def get_state_by_id(state_id):\r\n response = Response(json.dumps(json_error(ResponsesREST.INVALID_INPUT.value)),\r\n status=ResponsesREST.INVALID_INPUT.value, mimetype=\"application/json\")\r\n if validator_id.is_valid({\"id\": state_id}):\r\n state_get = State()\r\n state_get.id_state = state_id\r\n result = state_get.get_state()\r\n if result in (ResponsesREST.NOT_FOUND.value, ResponsesREST.SERVER_ERROR.value):\r\n response = Response(json.dumps(json_error(result)),\r\n status=result, mimetype=\"application/json\")\r\n else:\r\n response = Response(json.dumps(result.json_state()),\r\n status=ResponsesREST.SUCCESSFUL.value,\r\n mimetype=\"application/json\")\r\n return response", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n minimal_action: Optional[pulumi.Input[str]] = None,\n most_disruptive_allowed_action: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n preserved_state: Optional[pulumi.Input[pulumi.InputType['RegionPerInstanceConfigPreservedStateArgs']]] = None,\n project: Optional[pulumi.Input[str]] = None,\n region: Optional[pulumi.Input[str]] = None,\n region_instance_group_manager: Optional[pulumi.Input[str]] = None,\n remove_instance_state_on_destroy: Optional[pulumi.Input[bool]] = None) -> 'RegionPerInstanceConfig':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _RegionPerInstanceConfigState.__new__(_RegionPerInstanceConfigState)\n\n __props__.__dict__[\"minimal_action\"] = minimal_action\n __props__.__dict__[\"most_disruptive_allowed_action\"] = most_disruptive_allowed_action\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"preserved_state\"] = preserved_state\n __props__.__dict__[\"project\"] = project\n __props__.__dict__[\"region\"] = region\n __props__.__dict__[\"region_instance_group_manager\"] = region_instance_group_manager\n __props__.__dict__[\"remove_instance_state_on_destroy\"] = remove_instance_state_on_destroy\n return RegionPerInstanceConfig(resource_name, opts=opts, __props__=__props__)", "def get_record(self, zone_id, record_id):\n data = self.connection.request(\n \"/v2/domains/{}/records/{}\".format(zone_id, record_id)\n ).object[\"domain_record\"]\n\n # TODO: Any way of not using get_zone which polls the API again\n # without breaking the DNSDriver.get_record parameters?\n return self._to_record(data, self.get_zone(zone_id))", "def get_zone_db_details(self, context, id):\n zone_obj = self.dns_manager.get_zone_db_details(context, id)\n return zone_obj", "def a_states_id(state_id):\n i = storage.get(\"State\", state_id)\n if i:\n return jsonify(i.to_dict())\n else:\n return (jsonify({\"error\": \"Not found\"}), 404)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n arn: Optional[pulumi.Input[str]] = None,\n auth_mode: Optional[pulumi.Input[str]] = None,\n default_s3_location: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n engine_security_group_id: Optional[pulumi.Input[str]] = None,\n idp_auth_url: Optional[pulumi.Input[str]] = None,\n idp_relay_state_parameter_name: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n service_role: Optional[pulumi.Input[str]] = None,\n subnet_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n url: Optional[pulumi.Input[str]] = None,\n user_role: Optional[pulumi.Input[str]] = None,\n vpc_id: Optional[pulumi.Input[str]] = None,\n workspace_security_group_id: Optional[pulumi.Input[str]] = None) -> 'Studio':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _StudioState.__new__(_StudioState)\n\n __props__.__dict__[\"arn\"] = arn\n __props__.__dict__[\"auth_mode\"] = auth_mode\n __props__.__dict__[\"default_s3_location\"] = default_s3_location\n __props__.__dict__[\"description\"] = description\n __props__.__dict__[\"engine_security_group_id\"] = engine_security_group_id\n __props__.__dict__[\"idp_auth_url\"] = idp_auth_url\n __props__.__dict__[\"idp_relay_state_parameter_name\"] = idp_relay_state_parameter_name\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"service_role\"] = service_role\n __props__.__dict__[\"subnet_ids\"] = subnet_ids\n __props__.__dict__[\"tags\"] = tags\n __props__.__dict__[\"tags_all\"] = tags_all\n __props__.__dict__[\"url\"] = url\n __props__.__dict__[\"user_role\"] = user_role\n __props__.__dict__[\"vpc_id\"] = vpc_id\n __props__.__dict__[\"workspace_security_group_id\"] = workspace_security_group_id\n return Studio(resource_name, opts=opts, __props__=__props__)", "def get_by_id(cls, name):\n\t\treturn super(Locality, cls).get_by_id(cls.normalized_name(name))", "def get(self, request, state_id, format=None):\n try:\n state = State.objects.get(id=state_id)\n except ObjectDoesNotExist:\n raise NotFound(detail=\"State not found\")\n\n return Response(StateSerializer(state).data)", "def get_record_from_db(self, context, record_id):\n record = self.dns_manager.get_record_from_db(context, record_id)\n return record", "def get_zone(self):\n to_return = None\n try:\n to_return = self.ns1.loadZone(self.module.params.get('zone'))\n except ResourceException as re:\n if re.response.code == 404:\n if (\n self.module.params.get('ignore_missing_zone')\n and self.module.params.get('state') == \"absent\"\n ):\n # zone not found but we are in the absent state\n # and the user doesn't care that the zone doesn't exist\n # nothing to do and no change\n self.module.exit_json(changed=False)\n else:\n # generic error or user cares about missing zone\n self.module.fail_json(\n msg=\"error code %s - %s \" % (re.response.code, re.message)\n )\n return to_return" ]
[ "0.6687499", "0.6661502", "0.6029663", "0.5514889", "0.54734415", "0.54335076", "0.54033715", "0.53986657", "0.5330955", "0.53139925", "0.5313418", "0.5290533", "0.52262396", "0.5205333", "0.5184923", "0.5163674", "0.5150722", "0.5121249", "0.5112581", "0.50963205", "0.50445664", "0.50171053", "0.5015748", "0.5008808", "0.4981065", "0.49478814", "0.49350908", "0.48965535", "0.48643982", "0.48587555" ]
0.74212474
0
It generates and returns the realizations of the process. The realizations are hosted in an array matrix S whose row i represent the value of the process at time i for all the state of the world, and whose column j represents the path of the process for the simulation (or state of the world) j. Returns array The matrix hosting the realizations of the process.
def generateRealizations(self): #maybe, in this case, it is better dealing with arrays instead of lists. #If realizations are hosted in an array, it's easier to perform #operations with them: for example, remember that when you sum or #multiply two lists, the operation is not executed component-wise. realizations = np.full((self.numberOfTimes,self.numberOfSimulations),math.nan) # first the initial values. Look at how we can fill a vector with a single # value in Python. realizations[0] = [self.initialValue] * self.numberOfSimulations #realizations[0] = np.full((self.numberOfSimulations),self.initialValue) upsAndDowns = self.getUpsAndDowns() for timeIndex in range(1,self.numberOfTimes): #S[i+1,j] = upsAndDowns[i,j]S[i,j] realizations[timeIndex] = realizations[timeIndex - 1] * upsAndDowns[timeIndex - 1] return realizations
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def sys(self):\r\n return System(np.dot(self.psys.matrix, self.rsys.matrix))", "def run_simulation(self):\n self._data = msprime.sim_ancestry(\n recombination_rate=self.recombination_rate,\n sequence_length=self.len,\n num_replicates=self.num_replicates,\n demography=self.demographic_events,\n model=self.model,\n random_seed=self.random_seed,\n samples=self.sample_size)\n return self._data", "def generate_trajectories():\n\n setup_timestamp_logging()\n\n logger = logging.getLogger()\n\n substance = Substance.from_components('C(C(C(C(C(F)(F)Br)(F)F)(F)F)(F)F)(C(C(C(F)(F)F)(F)F)(F)F)(F)F')\n\n logger.info('Building system.')\n\n build_system = BuildSmirnoffSystem('build_system')\n build_system.coordinate_file_path = 'coords.pdb'\n build_system.substance = substance\n build_system.force_field_path = 'smirnoff99Frosst-1.1.0.offxml'\n build_system.execute('', None)\n\n logger.info('System built.')\n\n production_simulation = RunOpenMMSimulation(f'production_simulation')\n production_simulation.steps_per_iteration = 500\n production_simulation.output_frequency = 1\n production_simulation.timestep = 2.0 * unit.femtosecond\n production_simulation.thermodynamic_state = ThermodynamicState(temperature=298.15*unit.kelvin,\n pressure=1.0*unit.atmosphere)\n production_simulation.input_coordinate_file = 'coords.pdb'\n production_simulation.system_path = 'system.xml'\n\n compute_resources = ComputeResources(number_of_threads=4)\n\n logger.info(f'Simulation started.')\n production_simulation_schema = production_simulation.schema\n production_simulation.execute('', compute_resources)\n production_simulation.schema = production_simulation_schema\n logger.info(f'Simulation finished.')", "def runSim(self):\n if self.verbose:\n print(\"Running Simulation, This may take a while\")\n self.makeXData(float(self.pretime))\n pool = Pool(processes=len(self.powers))\n jobs = []\n self.gem_pair = []\n self.electron = []\n self.hole = []\n self.filled = []\n self.signal = []\n self.gsignal = []\n self.ehsignal = []\n self.gloss = []\n self.tloss = []\n self.qk = []\n for power, pulse in zip(self.powers, self.pulses):\n inputs = [power, pulse, self.steps, self.trap, self.tolerance,\n self.EHdecay, self.Etrap, self.FHloss, self.Gdecay,\n self.G2decay, self.G3decay, self.GHdecay, self.Gescape,\n self.Gform, self.G3loss, self.Keq, self.trackQ,\n self.verbose]\n jobs.append(pool.apply_async(powerRun, inputs))\n for job in jobs:\n gem_pair, electron, hole, filled, signal, gsignal, ehsignal, gloss, tloss, qk = job.get()\n self.signal.append(signal * self.scalar / self.step)\n self.gsignal.append(gsignal * self.scalar / self.step)\n self.ehsignal.append(ehsignal * self.scalar / self.step)\n self.gloss.append(gloss * self.scalar / self.step)\n self.tloss.append(tloss * self.scalar / self.step)\n self.gem_pair.append(gem_pair)\n self.electron.append(electron)\n self.hole.append(hole)\n self.filled.append(filled)\n self.qk.append(qk)\n pool.close()", "def results(self):\n\n #: Instatiate the muscle results container\n self.sys.muscle_sys.muscle_1.instantiate_result_from_state(\n self.time)\n self.sys.muscle_sys.muscle_2.instantiate_result_from_state(\n self.time)\n\n angle = self.res[:, 1]\n muscle_1_state = self.res[:, 2:4]\n muscle_2_state = self.res[:, 4:6]\n\n muscle_1 = self.sys.muscle_sys.muscle_1\n muscle_2 = self.sys.muscle_sys.muscle_2\n\n for idx, _time in enumerate(self.time):\n #: Compute muscle lengths from angle\n muscle_lengths = self.sys.muscle_sys.update(\n angle[idx], muscle_1_state[idx][0], muscle_2_state[idx][0]\n )\n muscle_1.generate_result_from_state(\n idx, _time, self.sys.muscle_sys.muscle_1_length, muscle_1_state[idx][:])\n muscle_2.generate_result_from_state(\n idx, _time, self.sys.muscle_sys.muscle_2_length, muscle_2_state[idx][:])\n\n return np.concatenate(\n (np.expand_dims(self.time, axis=1), self.res), axis=1)", "def _calculate_system(self) -> None:\n self.y = solve_ode(\n derivative,\n self.y0,\n self.t,\n self.g,\n self.pendulum1,\n self.pendulum2\n )\n\n # Calculate individual pendulum paths\n self.pendulum1.calculate_path(\n theta=self.y[:, 0],\n dtheta=self.y[:, 1]\n )\n self.pendulum2.calculate_path(\n theta=self.y[:, 2],\n dtheta=self.y[:, 3],\n x0=self.pendulum1.x,\n y0=self.pendulum1.y\n )\n\n self.w = self.y[:, 1]\n self.df = pd.DataFrame(\n self.y,\n columns=[\"theta1\", \"dtheta1\", \"theta2\", \"dtheta2\"]\n )", "def runSimulation(self, R=5000, N=1,s=1000, method='RL'):\n global n_ec\n import numpy as np\n import matplotlib.pyplot as plt\n import matplotlib\n matplotlib.use('Agg')\n matplotlib.pyplot.switch_backend('agg')\n\n plt.rcParams.update({'font.size':20})\n plt.rc('xtick', labelsize=20)\n plt.rc('ytick', labelsize=20)\n # step = 2 fs\n # each round is 2 fs * 1000 = 2 ps\n\n init = 'ala2_1stFrame.pdb' #pdb name\n inits = init\n n_ec = 2 # angles\n count = 1\n newPoints_name = 'start_r_'+str(count)+'.pdb'\n \n #W_0 = [1/n_ec for i in range(n_ec)] # no direction\n #W_0 = [[0.25, 0.25], [0.25, 0.25]]\n W_0 = [[1/(2*n_ec), 1/(2*n_ec)] for i in range(n_ec)] # directional\n print(W_0)\n\n Ws = []\n Ws.append(W_0)\n \n trj1 = self.run(production_steps = s, start=inits, production='trj_R_0.pdb') # return mdtraj object\n comb_trj1 = trj1 # single trajectory\n trjs = comb_trj1\n trj1_theta = self.map_angles(trj1) # changed for angles to display\n print('trj1_theta', len(trj1_theta), len(trj1_theta[0]))\n trj1_Ps_theta, index = self.PreSamp(trj1_theta, myn_clusters = 10) # pre analysis (least count)\n trj1_Ps_w_theta, index_w = self.PreSamp(trj1_theta, myn_clusters = 100) # for updating the weights\n print('trj1_Ps_theta', len(trj1_Ps_theta), len(trj1_Ps_theta[0]))\n\n newPoints_index_orig = self.findStarting(trj1_Ps_theta, index, W_0, starting_n = N , method = 'RL') #need change\n newPoints = trj1[newPoints_index_orig[0]]\n newPoints.save_pdb(newPoints_name)\n \n \n print('trj1_theta[0]',trj1_theta[0])\n plt.scatter(trj1_theta[0], trj1_theta[1], color='dodgerblue', s=5, alpha=0.2)\n plt.xlim([-180, 180])\n plt.ylim([-180, 180])\n newPoints_theta_x = trj1_theta[0][newPoints_index_orig[0]]\n newPoints_theta_y = trj1_theta[1][newPoints_index_orig[0]]\n plt.scatter(newPoints_theta_x, newPoints_theta_y, color='red', s=50)\n plt.xlabel(r'$\\phi$')\n plt.ylabel(r'$\\psi$')\n plt.savefig('fig_'+str(count))\n plt.close()\n trjs_theta = trj1_theta\n trjs_Ps_theta = trj1_Ps_theta\n trjs_Ps_w_theta = trj1_Ps_w_theta \n for round in range(R):\n self.updateStat(trjs_theta) # based on all trajectories\n #W_1 = self.updateW(trjs_Ps_theta, W_0) \n W_1 = self.updateW(trjs_Ps_w_theta, W_0) \n W_0 = W_1\n W_1 = W_0\n Ws.append(W_0)\n s = 1000\n trj1 = self.run(production_steps = s, start=newPoints_name, production='trj_R_'+str(count)+'.pdb') # return mdtraj object\n com_trjs = trjs.join(trj1) \n trjs = com_trjs\n trjs_theta = np.array(self.map_angles(trjs)) \n trjs_Ps_theta, index = self.PreSamp(trjs_theta, myn_clusters = 100)\n myn_clusters1 = 100 #int(10*(round)+1)\n trjs_Ps_w_theta = trjs_Ps_theta\n #trjs_Ps_w_theta, index_w = self.PreSamp(trjs_theta, myn_clusters = myn_clusters1)\n newPoints_index_orig = self.findStarting(trjs_Ps_theta, index, W_1, starting_n = N , method = 'RL')\n newPoints = trjs[newPoints_index_orig[0]] \n \n count = count + 1\n newPoints_name = 'start_r_'+str(count)+'.pdb'\n newPoints.save_pdb(newPoints_name)\n\n print( myn_clusters1, W_1, self.theta_mean)\n plt.scatter(trjs_theta[0], trjs_theta[1], color='dodgerblue', s=5, alpha=0.2)\n plt.xlim([-np.pi, np.pi])\n plt.ylim([-np.pi, np.pi])\n newPoints_theta_x = trjs_theta[0][newPoints_index_orig[0]]\n newPoints_theta_y = trjs_theta[1][newPoints_index_orig[0]]\n plt.scatter(newPoints_theta_x, newPoints_theta_y, color='red', s=50)\n plt.scatter(trjs_Ps_w_theta[0], trjs_Ps_w_theta[1], color='green', s=5)\n plt.xlabel(r'$\\phi$')\n plt.ylabel(r'$\\psi$')\n plt.savefig('fig_'+str(count))\n plt.close()\n \n np.save('w_'+'r'+str(int(R))+'N'+str(N)+'s'+str(s), Ws)\n np.save('trjs_theta', trjs_theta)\n return", "def run(self):\n results = self._optimization.run()\n self._optimization.sim.fdtd.close()\n \n # plot optimization recap figure\n plt.show()\n \n return [results[0], np.array(results[1])]", "def createSystemSims(self):\n # create systems\n import anwp.sims\n self.systemSims = []\n for systemID, systemDict in self.game.allSystems.iteritems():\n empireDict = self.game.allEmpires[systemDict['myEmpireID']]\n imageFileName = '%s%s.png' % (self.game.app.simImagePath, systemDict['imageFile']) \n \n # create sim\n sim = SystemEntity(self, anwp.sims.categories.ClickableCategory(imageFileName,'system'), systemDict, empireDict)\n \n # add sim to world\n self.systemSims.append(sim)\n x = systemDict['x']\n y = systemDict['y']\n facing = 0\n speed = 0\n sim.turnRate = 0\n self.world.addToWorld(sim, x, y, facing, speed)", "def _compute_(self):\n dic = \"data/sim/{dn}/{rad}/\".format(dn=self.event.strftime(\"%Y.%m.%d.%H.%M\"), rad=self.rad)\n fbgc = \"data/sim/{dn}/{rad}/exp.bgc.bm({bm}).elv(<elv>).csv\".format(dn=self.event.strftime(\"%Y.%m.%d.%H.%M\"), \n rad=self.rad, bm=self.bmnum)\n fflare = \"data/sim/{dn}/{rad}/exp.flare.bm({bm}).elv(<elv>).csv\".format(dn=self.event.strftime(\"%Y.%m.%d.%H.%M\"),\n rad=self.rad, bm=self.bmnum)\n cmd = \"export DIR_MODELS_REF_DAT=/home/shibaji/Collaboration_NCAR/code_rt_sd/pharlap/pharlap_4.1.3/dat;\\\n cd pharlap;\\\n matlab -nodisplay -nodesktop -nosplash -nojvm -r \\\"UT=[{ut}];rad='{rad}';dic='{dic}';fbgc='{fbgc}';bm={bm};\\\n fflare='{fflare}';rt_1D_sim;exit;\\\"\".format(ut=self.event.strftime(\"%Y %m %d %H %S\"), rad=self.rad,\n dic=dic, bm=self.bmnum, fbgc=fbgc, fflare=fflare)\n os.system(cmd)\n return", "def _exe_(self):\n print(\"\\n Start simulation (using Pharlap) ...\")\n dic = \"data/sim/{dn}/{rad}/\".format(dn=self.event.strftime(\"%Y.%m.%d.%H.%M\"), rad=self.rad)\n self._estimate_edens_()\n self._compute_()\n plotlib.plot_exp_rays(dic, self.event, self.bmnum, \"bgc\")\n plotlib.plot_exp_rays(dic, self.event, self.bmnum, \"flare\")\n if self.verbose: print(\"\\n Processing Doppler.\")\n self._compute_doppler_()\n rec = self._compute_velocity_()\n return rec", "def simulation():\n\n return {\n \"type\": \"class\",\n \"base\": \"iso.process_step\",\n \"is_abstract\": False,\n \"is_document\": True,\n \"pstr\": (\"({}/{}/{})\", (\"used\", \"ran_for_experiments\", \"ensemble_id\")),\n \"properties\": [\n (\n \"part_of_project\",\n \"linked_to(designing.project)\",\n \"1.N\",\n \"Project or projects for which simulation was run\",\n ),\n (\n \"ran_for_experiments\",\n \"linked_to(designing.numerical_experiment)\",\n \"1.N\",\n \"One or more experiments with which the simulation is \"\n \"associated\",\n ),\n (\n \"sub_experiment\",\n \"linked_to(designing.numerical_experiment)\",\n \"0.1\",\n \"For start-date ensembles, this will indicate the beginning \"\n \"year; for offline models driven by output from another \"\n \"model, this will provide the source_id and variant_label \"\n \"for the 'driving' model.\",\n ),\n (\n \"used\",\n \"linked_to(science.model)\",\n \"1.1\",\n \"The model used to run the simulation\",\n ),\n (\n \"primary_ensemble\",\n \"linked_to(activity.ensemble)\",\n \"0.1\",\n \"Primary Ensemble (ensemble for which this simulation was \"\n \"first run).\",\n ),\n (\n \"institution\",\n \"linked_to(shared.party)\",\n \"0.1\",\n \"institution which carried out the simulation\",\n ),\n (\n \"parent_of\",\n \"linked_to(activity.child_simulation)\",\n \"0.N\",\n \"If appropriate, links to simulations which branched from \"\n \"this one\",\n ),\n (\n \"produced\",\n \"linked_to(data.dataset)\",\n \"0.N\",\n \"Products of the simulation\",\n ),\n (\n \"had_performance\",\n \"linked_to(platform.performance)\",\n \"0.1\",\n \"Performance of the simulation.\",\n ),\n (\n \"ran_on\",\n \"linked_to(platform.machine)\",\n \"0.1\",\n \"The machine on which the simulation was run.\",\n ),\n (\n \"errata\",\n \"shared.online_resource\",\n \"0.1\",\n \"Link to errata associated with this simulation.\",\n ),\n (\n \"ensemble_id\",\n \"activity.axis_member\",\n \"0.N\",\n \"Identification within ensemble axes via axis member. \"\n \"(Multiple axis members within a simulation cannot share the \"\n \"same ensemble_axis.) (There must be an axis_member instance \"\n \"for each ensemble axis in a parent ensemble.)\",\n ),\n # Time\n (\n \"start_time\",\n \"time.date_time\",\n \"0.1\",\n \"The start date-time of the simulation. e.g. \"\n \"2012-04-01 00:00:00\",\n ),\n (\n \"end_time\",\n \"time.date_time\",\n \"0.1\",\n \"The end date-time of the simulation. e.g. \"\n \"2087-11-30 12:00:00\",\n ),\n (\n \"calendar\",\n \"time.calendar\",\n \"0.1\",\n \"The calendar used in the simulation\",\n ),\n # Further Info URL\n (\n \"documentation\",\n \"shared.online_resource\",\n \"0.1\",\n \"On-line location of additional documentation\",\n ),\n # Extra attributes\n (\n \"extra_attributes\",\n \"shared.extra_attribute\",\n \"0.N\",\n \"Additional attributes provided with simulation.\",\n ),\n ],\n \"constraints\": [\n (\"cardinality\", \"rationale\", \"0.0\"),\n ],\n }", "def run_metropolis(self):\n\n # Initialize the posistions for each new Monte Carlo run\n positions = np.random.rand(self.num_p, self.num_d)\n # Initialize the distance matrix\n self.s.positions_distances(positions)\n # check if the wave function is zero\n while True:\n test_wavefunction = self.w.wavefunction(positions)\n if test_wavefunction**2 <= 1e-14:\n # Initialize the posistions for each new Monte Carlo run\n positions = np.random.rand(self.num_p, self.num_d)\n # Initialize the distance matrix\n self.s.positions_distances(positions)\n else:\n break\n\n # Initialize sampler method for each new Monte Carlo run\n self.sam.initialize()\n\n for i in range(self.mc_cycles):\n new_positions = self.metropolis_step(positions)\n positions = new_positions\n self.sam.sample_values(positions)\n\n self.sam.average_values(self.mc_cycles)\n energy = self.sam.local_energy\n d_El = self.sam.derivative_energy\n var = self.sam.variance\n self.print_averages()\n return d_El, energy, var", "def _exe_(self):\n print(\"\\n Start simulation (using Pharlap) ...\")\n dic = \"data/sim/{dn}/{rad}/\".format(dn=self.event.strftime(\"%Y.%m.%d.%H.%M\"), rad=self.rad)\n self._copy_ne_()\n [self._compute_(case) for case in [\"bgc\", \"flare\"]]\n plotlib.plot_exp_rays(dic, self.event, self.bmnum, \"bgc\")\n plotlib.plot_exp_rays(dic, self.event, self.bmnum, \"flare\")\n self._compute_doppler_()\n rec = self._compute_velocity_()\n return rec", "def compute_matrix(self, process=False):\n # calculate matrix\n for i, text in enumerate(self.texts):\n vec = self.stem2vec(text)\n self.matrix.append(vec)\n if process:\n progress_bar(\n i+1, len(self.texts), prefix=\"Indexing\", length=40\n )\n\n self.matrix = np.array(self.matrix)", "def read_simulation_files(self):\n\n # Check if simulation files exist in current directory, if not kill process\n if not os.path.isfile('{}.xyz'.format(self.prefix)):\n print('Cannot find simulation file \"{}.xyz\"'.format(self.prefix))\n sys.exit()\n if not os.path.isfile('{}_vis2d.dat'.format(self.prefix)):\n print('Cannot find simulation file \"{}_vis2d.dat\"'.format(self.prefix))\n sys.exit()\n if not os.path.isfile('{}_dia.dat'.format(self.prefix)):\n print('Cannot find simulation file \"{}_dia.dat\"'.format(self.prefix))\n sys.exit()\n\n # Read coordinate file\n print('Reading simulation xyz file')\n with open('{}.xyz'.format(self.prefix),'r') as f:\n self.n = int(f.readline().split()[0])\n self.crds = np.zeros((self.n,2))\n f.seek(0,0)\n for i in range(self.frame):\n for j in range(2+self.n):\n f.readline()\n f.readline()\n f.readline()\n for j in range(self.n):\n self.crds[j,:] = np.array([float(c) for c in f.readline().split()[1:3]])\n\n # Read rings file\n print('Reading simulation ring file')\n with open('{}_vis2d.dat'.format(self.prefix),'r') as f:\n self.rings = []\n if self.vis_vortype != 0:\n while True:\n frame = int(f.readline().split()[0])\n vor_type = int(f.readline().split()[0])\n self.param = float(f.readline().split()[0])\n self.m = int(f.readline().split()[0])\n if frame==self.frame and vor_type==self.vis_vortype:\n for i in range(self.m):\n ring = np.array([float(c) for c in f.readline().split()])\n self.rings.append(ring.reshape(ring.shape[0]//2,2))\n break\n else:\n for i in range(self.m):\n f.readline()\n\n # Read diameter file\n print('Reading simulation radii and weights file')\n data = np.genfromtxt('{}_dia.dat'.format(self.prefix)).astype(float)\n self.radii = data[:self.n]/2\n self.weights = data[self.n:]", "def reflect(real_seqs):\n reflectX = np.random.choice([-1, 1])\n reflectY = np.random.choice([-1, 1])\n reflected = real_seqs * np.array([reflectX, reflectY, 1])\n return reflected", "def simulate_memories(simulation_length):\n \n \n pass", "def exe(self, func_mox):\n ## combine several dict of parameters\n cond = dict(self.cond_ex, **self.cond_cal, **self.const_model, **self.funclist_cea, **self.plot_param)\n ## set several constant, function and variables before calculation\n N = self.cond_ex[\"N\"]\n func_cstr = cond[\"func_CSTAR\"]\n cond[\"time\"], cond[\"x\"], r_tmp, rdot_tmp, rdotn_tmp = mod_shape.initialize_calvalue(**cond)\n self.x = cond[\"x\"]\n val = {}\n ## Following iteration part is the main sectioin of this simulation program.\n for t in tqdm(cond[\"time\"]):\n ## update each value at the follwoing lines\n self.t_history = np.append(self.t_history, t)\n mox = func_mox(t)\n self.mox_history = np.append(self.mox_history, mox)\n if t == 0:\n Pc = cond[\"Pci\"]\n else:\n Pc = Pc_new\n val[\"Pc\"] = Pc\n self.Pc_history = np.append(self.Pc_history, Pc)\n Vox = mod_shape.func_Vox(mox, Pc, **cond)\n val[\"Vox\"] = Vox\n self.Vox_history = np.append(self.Vox_history, Vox)\n Vf = mod_shape.func_Vf(Vox, Pc, **cond)\n self.Vf_history = np.append(self.Vf_history, Vf)\n if t != 0:\n r_tmp = r_new_tmp\n rdot_tmp = rdot_new_tmp\n rdotn_tmp = rdotn_new_tmp\n ## reshape and eliminate the unneccesary part of regression shape.\n r, rdot, rdotn = mod_shape.func_rcut(r_tmp, rdot_tmp, rdotn_tmp, self.t_history, self.Vf_history, **cond)\n self.r_history = np.vstack((self.r_history, r))\n self.rdot_history = np.vstack((self.rdot_history, rdot))\n self.rdotn_history = np.vstack((self.rdotn_history, rdotn))\n ## calculate the others parameter at the following lines\n if cond[\"Vf_mode\"]:\n mf = N *mod_shape.func_mf(r[~np.isnan(r)].size-1, r[~np.isnan(r)], rdot[~np.isnan(rdot)], Vf=Vf, **cond)\n else:\n mf = N *mod_shape.func_mf(r[~np.isnan(r)].size-1, r[~np.isnan(r)], rdot[~np.isnan(rdot)], Vf=Vf, **cond)\n self.mf_history = np.append(self.mf_history, mf)\n if mf<=0.0:\n of = np.nan\n cstr_ex = Pc*np.pi*np.power(cond[\"Dt\"], 2)/(4*mox)\n else:\n of = mox/mf\n cstr_ex = cond[\"eta\"]*func_cstr(of, Pc)\n self.of_history = np.append(self.of_history, of)\n self.cstr_history = np.append(self.cstr_history, cstr_ex)\n ## calculate the next time step values at the following lines\n val[\"r\"] = r_tmp\n val[\"rdot\"] = rdot_tmp\n val[\"rdotn\"] = rdotn_tmp\n Pc_new = mod_response.exe_EULER(t, mf, Pc, func_mox, self.t_history, self.Vf_history, **cond)\n r_new_tmp, rdot_new_tmp, rdotn_new_tmp = mod_shape.exe(val, **cond)\n ## CFL [-] Courant number, which must be less than unity \n self.cond_cal[\"CFL\"] = np.abs(self.Vf_history.max()*self.cond_cal[\"dt\"]/self.cond_cal[\"dx\"])", "def make_simulations(self):\n pass", "def data_generate_process():\n\n a = 0.8\n b = 0.4\n c = 0.1\n d = 0.3\n e = 0.7\n y_0 = 0.0\n a_0 = 0.2\n sigma_0 = 0.35\n\n data_len = 10000\n y_series = pd.Series([np.nan] * data_len)\n a_series = pd.Series([np.nan] * data_len)\n sigma_series = pd.Series([np.nan] * data_len)\n\n epsilon_normal = np.random.normal(loc=0.0, scale=1.0, size=data_len)\n\n y_series[0] = y_0\n a_series[0] = a_0\n sigma_series[0] = sigma_0\n\n for idx in range(1, data_len):\n epsilon_t = epsilon_normal[idx]\n sigma_t = np.sqrt(c + d * a_series[idx - 1] ** 2 + e * sigma_series[idx - 1] ** 2)\n a_t = epsilon_t * sigma_t\n y_series[idx] = a * y_series[idx - 1] + b + a_t\n a_series[idx] = a_t\n sigma_series[idx] = sigma_t\n\n return y_series, a_series, sigma_series", "def run(self):\n\n # If this was a tanh model or some such thing, we're already done.\n if self.is_phenom:\n return\n if self.is_complete:\n print(\"Already ran simulation!\")\n return\n\n # Need to generate radiation backgrounds first.\n if self.pf['radiative_transfer']:\n self.medium.field.run()\n self._f_Jc = self.medium.field._f_Jc\n self._f_Ji = self.medium.field._f_Ji\n self._f_Jlw = self.medium.field._f_Jlw\n else:\n self._f_Jc = lambda z: 0.0\n self._f_Ji = lambda z: 0.0\n self._f_Jlw = lambda z: 0.0\n\n # Start timer\n t1 = time.time()\n\n tf = self.medium.tf\n self.medium._insert_inits()\n\n pb = self.pb = ProgressBar(tf, use=self.pf['progress_bar'],\n name='gs-21cm')\n\n # Lists for data in general\n self.all_t, self.all_z, self.all_data_igm, self.all_data_cgm, \\\n self.all_RC_igm, self.all_RC_cgm = \\\n self.medium.all_t, self.medium.all_z, self.medium.all_data_igm, \\\n self.medium.all_data_cgm, self.medium.all_RCs_igm, self.medium.all_RCs_cgm\n\n # Add zeros for Ja\n for element in self.all_data_igm:\n element['Ja'] = np.zeros(self.grid.dims)\n element['Jc'] = np.zeros(self.grid.dims)\n element['Ji'] = np.zeros(self.grid.dims)\n element['Jlw'] = np.zeros(self.grid.dims)\n\n # List for extrema-finding\n self.all_dTb = self._init_dTb()\n for t, z, data_igm, data_cgm, rc_igm, rc_cgm in self.step():\n\n # Occasionally the progress bar breaks if we're not careful\n if z < self.pf['final_redshift']:\n break\n if z < self.pf['kill_redshift']:\n break\n\n # Delaying the initialization prevents progressbar from being\n # interrupted by, e.g., PrintInfo calls\n if not pb.has_pb:\n pb.start()\n\n pb.update(t)\n\n # Save data\n self.all_z.append(z)\n self.all_t.append(t)\n self.all_dTb.append(data_igm['dTb'][0])\n self.all_data_igm.append(data_igm.copy())\n self.all_RC_igm.append(rc_igm.copy())\n\n if self.pf['include_cgm']:\n self.all_data_cgm.append(data_cgm.copy())\n self.all_RC_cgm.append(rc_cgm.copy())\n\n # Automatically find turning points\n if self.pf['track_extrema']:\n if self.track.is_stopping_point(self.all_z, self.all_dTb):\n break\n\n pb.finish()\n\n self.history_igm = _sort_history(self.all_data_igm, prefix='igm_',\n squeeze=True)\n\n if self.pf['include_cgm']:\n self.history_cgm = _sort_history(self.all_data_cgm, prefix='cgm_',\n squeeze=True)\n else:\n self.history_cgm = {}\n\n self.history = self.history_igm.copy()\n self.history.update(self.history_cgm)\n\n ##\n # In the future, could do this better by only calculating Ja at\n # the end, since it a passive quantity (unless we included its\n # very small heating).\n ##\n #if self.pf['secondary_lya']:\n # xe = lambda zz: np.interp(zz, self.history['z'][-1::-1],\n # self.history['igm_e'][-1::-1])\n # self.medium.field.run(xe=xe)\n # self._f_Ja = self.medium.field._f_Ja\n # #self._f_Jlw = self.medium.field._f_Jlw\n #\n # # Fix Ja in history\n\n self.history['dTb'] = self.history['igm_dTb']\n #self.history['dTb_bulk'] = self.history['igm_dTb_bulk']\n\n self.history['Ts'] = self.history['igm_Ts']\n self.history['Jc'] = self.history['igm_Jc']\n self.history['Ji'] = self.history['igm_Ji']\n self.history['Ja'] = self.history['igm_Jc'] + self.history['igm_Ji']\n self.history['Jlw'] = self.history['igm_Jlw']\n\n # Save rate coefficients [optional]\n if self.pf['save_rate_coefficients']:\n self.rates_igm = \\\n _sort_history(self.all_RC_igm, prefix='igm_', squeeze=True)\n self.rates_cgm = \\\n _sort_history(self.all_RC_cgm, prefix='cgm_', squeeze=True)\n\n self.history.update(self.rates_igm)\n self.history.update(self.rates_cgm)\n\n self.history['t'] = np.array(self.all_t)\n self.history['z'] = np.array(self.all_z)\n\n ##\n # Optional extra radio background\n ##\n Tr = np.zeros_like(self.history['z'])\n for popid, pop in enumerate(self.pops):\n if not pop.is_src_radio:\n continue\n\n z, E, flux = self.field.get_history(popid, flatten=True)\n\n E21cm = h_p * nu_0_mhz * 1e6 / erg_per_ev\n f21 = interp1d(E, flux, axis=1, bounds_error=False,\n fill_value=0.0, force_scipy=True)\n flux_21cm = f21(E21cm)\n\n Tr += np.interp(self.history['z'], z, flux_21cm) \\\n * E21cm * erg_per_ev * c**2 / k_B / 2. / (nu_0_mhz * 1e6)**2\n\n if not np.all(Tr == 0):\n assert self.medium.parcel_igm.grid.hydr.Tbg is None\n elif self.medium.parcel_igm.grid.hydr.Tbg is not None:\n Tr = self.medium.parcel_igm.grid.hydr.Tbg(self.history['z'])\n\n self.history['Tr'] = Tr\n\n # Correct the brightness temperature if there are non-CMB backgrounds\n if not np.all(Tr == 0):\n zall = self.history['z']\n n_H = self.medium.parcel_igm.grid.cosm.nH(zall)\n Ts = self.medium.parcel_igm.grid.hydr.Ts(zall,\n self.history['igm_Tk'], self.history['Ja'],\n self.history['igm_h_2'], self.history['igm_e'] * n_H, Tr)\n\n if self.pf['floor_Ts']:\n Ts = max(Ts, self.medium.parcel_igm.grid.hydr.Ts_floor(z=zall))\n\n # Compute volume-averaged ionized fraction\n xavg = self.history['cgm_h_2'] \\\n + (1. - self.history['cgm_h_2']) * self.history['igm_h_2']\n\n # Derive brightness temperature\n dTb = self.medium.parcel_igm.grid.hydr.get_21cm_dTb(zall, Ts,\n xavg=xavg, Tr=Tr)\n\n self.history['dTb_no_radio'] = self.history['dTb'].copy()\n self.history['dTb'] = dTb\n\n #self.history['dTb_bulk'] = \\\n # self.medium.parcel_igm.grid.hydr.dTb(zall, 0.0, Ts, Tr)\n\n t2 = time.time()\n\n self.timer = t2 - t1\n\n self.is_complete = True", "def execute(self):\n for n in range(0, self.N-1):\n # Subtract 1 from i to start from time-step 0 (t_0), instead of time-step 1 (t_1).\n self.u_n = self.one_dim_reaction_diffusion_step(self.u_n, n)\n self.u_storage[n+1, :] = np.copy(self.u_n)\n\n return self.u_storage", "def to_ri(self,system):\n # rotating wave approx\n for i in system:\n for j in i:\n j.RWA()\n #change system to number\n for i in range(self.N):\n for j in range(self.N):\n system[i][j] = system[i][j].mag()\n# print self.system\n #transform conjugate to real and imag\n for i in range(self.n):\n for j in range(i,self.n):\n id1 = self.index(i,j) \n if i == j:\n for k in range(self.n):\n for l in range(k,self.n):\n if k != l:\n id2 = self.index(k,l)\n tmp1 = (system[id1][id2]+system[id1][id2+1]).real\n tmp2 = (system[id1][id2]-system[id1][id2+1]).imag \n system[id1][id2],system[id1][id2+1] = tmp1,-1*tmp2\n else:\n for k in range(self.n):\n for l in range(k,self.n):\n id2 = self.index(k,l)\n if k==l:\n tmp = system[id1][id2]\n system[id1][id2],system[id1+1][id2]=tmp.real,tmp.imag\n else:\n tmp1 = system[id1][id2]+system[id1][id2+1]\n tmp2 = system[id1][id2]-system[id1][id2+1]\n system[id1][id2],system[id1][id2+1] = tmp1.real,-1*tmp2.imag\n system[id1+1][id2],system[id1+1][id2+1] = tmp1.imag,tmp2.real\n return system", "def run_delayed_ssa(system):\n \n #vars used in the simulation\n time = 0 #unitless\n end_time = system['sim-time']\n species = system['participants']\n parameters = system['parameters']\n events = system['events']\n prop_funcs = {}\n exec_funcs = {}\n props = {}\n delays = {}\n last_exec_time = {}\n \n #return values\n time_array = []\n species_array = []\n \n #populate results array\n time_array = [time]\n row = [0]*len(species)\n species_names = [''] * len(species)\n \n #create species vars so that rate code can be executed\n i = 0\n for name in species:\n species_names[i] = name\n exec( name + '=' + str(species[name]) )\n row[i] = species[name]\n i += 1\n species_array.append(row)\n \n #create parameter vars so that rate code can be executed\n for name in parameters:\n exec( name + '=' + str(parameters[name]) )\n\n #create (compile) functions from input strings for rates and events\n for name in events:\n if events[name].get('delay'):\n delays[name] = events[name]['delay']\n else:\n delays[name] = 0.0\n last_exec_time[name] = -1\n props[name] = 0.0\n prop_funcs[name] = compile(\"props['\" + name + \"'] = \" + str(events[name]['propensity']), 'prop_funcs_'+name, 'exec')\n exec_funcs[name] = compile(events[name]['consequence'], 'exec_funcs_'+name, 'exec')\n \n #MAIN LOOP\n while time < end_time:\n \n #calculate propensities\n for name in props:\n exec(prop_funcs[name])\n if delays[name] > 0 and delays[name] + last_exec_time[name] < time:\n print(name)\n props[name] = 0.0\n \n #calculate total of all propensities\n total_prop = 0\n for name in props:\n total_prop += props[name]\n \n \n u = random.uniform(0,total_prop)\n usum = 0\n lucky = None\n for name in props:\n usum += props[name]\n if usum > u:\n lucky = name\n break\n\n #fire that reaction\n if lucky:\n last_exec_time[lucky] = time\n exec(exec_funcs[lucky])\n \n \n row = [0]*len(species)\n i = 0\n for name in species:\n row[i] = eval(name)\n i += 1\n time_array.append(time)\n species_array.append(row)\n \n #update next time using exp distrib\n if total_prop == 0.0: #jump to next delay\n lowest_delay = inf\n for name in props:\n if delays[name] > 0 and delays[name] < lowest_delay:\n lowest_delay = delays[name]\n time += lowest_delay\n else:\n dt = random.exponential(1.0/total_prop)\n time += dt\n\n #END MAIN LOOP\n\n result = {'time':time_array, 'participants':species_array, 'headers': species_names}\n return result", "def simulate(self):\r\n \r\n # Worker ID\r\n self.RawData[:, 0] = np.arange(start=1, stop=self.NumInd+1, step=1, dtype=np.int8)\r\n \r\n # IID draws for observables from standard normal distribution\r\n self.RawData[:, 1] = np.random.randn(self.NumInd) # Draw for X\r\n self.RawData[:, 2] = np.random.randn(self.NumInd) # Draw for Z\r\n \r\n # IID draws for unobservables from normal distribution with variance 0.04\r\n self.RawData[:, 3] = 0.2 * np.random.randn(self.NumInd) # Draw for epsilon\r\n self.RawData[:, 4] = 0.2 * np.random.randn(self.NumInd) # Draw for u\r\n self.RawData[:, 5] = 0.2 * np.random.randn(self.NumInd) # Draw for xi\r\n \r\n # DGP for log W := eta * X + Z + u\r\n self.RawData[:, 6] = self.η * self.RawData[:, 1] + self.RawData[:, 2] + self.RawData[:, 4]\r\n \r\n # DGP for log R := delta_0 + log W + delta * Z + xi\r\n self.RawData[:, 7] = self.δ0 + self.RawData[:, 6] + self.δ * self.RawData[:, 2] + self.RawData[:, 5]\r\n \r\n # DGP for log beta := nu * X + a * xi + eps\r\n self.RawData[:, 8] = self.ν * self.RawData[:, 1] + self.RawData[:, 3] + self.a * self.RawData[:, 5]\r\n \r\n # Determination of labor force participation (LFP)\r\n self.RawData[:, 9] = (np.log(self.ρ) + self.RawData[:, 6] - self.RawData[:, 7] >= 0)\r\n \r\n # Computing optimal number of hours worked\r\n self.RawData[:, 10] = np.power(np.divide(self.ρ * np.exp(self.RawData[:, 6]),\r\n np.exp(self.RawData[:, 8])), \r\n np.divide(1., self.γ)) * self.RawData[:, 9] \r\n \r\n # Hiding wage details for non-participants in the labor market\r\n self.RawData[:, 6] = self.RawData[:, 6] * self.RawData[:, 9] \r\n \r\n \r\n # Converstion to Pandas Dataframe\r\n panel_from_sim = pd.DataFrame(data=self.RawData, columns=self.DataColumns)\r\n\r\n # Establishing data types for each column of the Pandas DataFrame.\r\n panel_from_sim = panel_from_sim.astype({'ID': np.uint, 'X': np.float, 'Z': np.float,\r\n 'ϵ': np.float, 'u': np.float, 'ξ': np.float, \r\n 'W': np.float, 'R': np.float, 'β': np.float,\r\n 'LFP': np.uint, 'H': np.float})\r\n \r\n # panel_from_sim[['W', 'R', 'β']].applymap(np.exp)\r\n \r\n # Store RawData as a Pandas dataframe\r\n self.RawData = panel_from_sim.copy()", "def simulate():\n # Simulation set-up\n end_time = 50\n ts = numpy.linspace(0, end_time, end_time*10)\n dt = ts[1]\n dt_control = 1\n assert dt <= dt_control\n\n bioreactor, lin_model, K, _ = sim_base.get_parts(dt_control=dt_control)\n\n # Initial values\n us = [numpy.array([0.06, 0.2])]\n xs = [bioreactor.X.copy()]\n ys = [bioreactor.outputs(us[-1])]\n\n biass = []\n\n t_next = 0\n for t in tqdm.tqdm(ts[1:]):\n if t > t_next:\n U_temp = us[-1].copy()\n if K.y_predicted is not None:\n biass.append(lin_model.yn2d(ys[-1]) - K.y_predicted)\n\n u = K.step(lin_model.xn2d(xs[-1]), lin_model.un2d(us[-1]), lin_model.yn2d(ys[-1]))\n U_temp[lin_model.inputs] = lin_model.ud2n(u)\n us.append(U_temp.copy())\n t_next += dt_control\n else:\n us.append(us[-1])\n\n bioreactor.step(dt, us[-1])\n outputs = bioreactor.outputs(us[-1])\n ys.append(outputs.copy())\n xs.append(bioreactor.X.copy())\n\n ys = numpy.array(ys)\n us = numpy.array(us)\n biass = numpy.array(biass)\n\n print('Performance: ', sim_base.performance(ys[:, lin_model.outputs], lin_model.yd2n(K.ysp), ts))\n\n return ts, ys, lin_model, K, us, dt_control, biass, end_time", "def simulate(self):\n n_samples = self.n_samples\n n_features = self.n_features\n nb_active_features = self.nb_active_features\n K = self.K\n pi_0 = self.pi_0\n gap = self.gap\n p0 = self.p0\n p1 = self.p1\n r_c = self.r_c\n r_cf = self.r_cf\n rho = self.rho\n\n coeffs = np.zeros(n_features)\n coeffs[0:nb_active_features] = K\n\n features = features_normal_cov_toeplitz(n_samples, n_features, rho)\n\n # Add class relative information on the design matrix \n A = np.random.choice(range(n_samples), size=int((1 - pi_0) * n_samples),\n replace=False)\n A_ = np.delete(range(n_samples), A)\n\n index_plus_gap = nb_active_features + int(\n (n_features - nb_active_features) * r_cf)\n features[A, :index_plus_gap] += gap\n features[A_, :index_plus_gap] -= gap\n\n self.features = features\n xc = features.dot(coeffs)\n\n # Simulation of latent variables\n pi = self.logistic_grad(-xc)\n u = np.random.rand(n_samples)\n Z = (u <= 1 - pi)\n self.Z = Z\n\n # Simulation of true times\n n_samples_class_1 = np.sum(Z)\n n_samples_class_0 = n_samples - n_samples_class_1\n T = np.empty(n_samples)\n pi_0_est = 1 - Z.mean()\n T[Z == 0] = np.random.geometric(p0, size=n_samples_class_0)\n\n # Compute p_c to obtain censoring rate r_c\n r_c_ = 1 - r_c\n p0_ = 1 - p0\n p1_ = 1 - p1\n pi_0_ = 1 - pi_0_est\n a = r_c_ * p0_ * p1_\n b = p0 * pi_0_est * p1_ + p1 * pi_0_ * p0_ - r_c_ * (p1_ + p0_)\n c = r_c_ - p0 * pi_0_est - p1 * pi_0_\n res = self.poldeg2_solver(a=a, b=b, c=c)\n if isinstance(res, list):\n if res[0] > 0:\n pc = 1 - res[0]\n else:\n pc = 1 - res[1]\n else:\n pc = 1 - res\n T[Z == 1] = np.random.geometric(p1, size=n_samples_class_1)\n\n # Simulation of the censoring\n C = np.random.geometric(pc, size=n_samples)\n\n # Censoring indicator: 1 if it is a time of failure, 0 if it's \n # censoring.\n delta = (T <= C).astype(int)\n\n # Observed time\n Y = np.minimum(T, C).astype(int)\n if np.sum(Y == 0) > 0:\n Y += 1\n self.delta = delta\n self.Y = Y\n return features, Y, delta", "def generate_continuum_realizations_periodic(grid_path, save_path, perm_path, dp_x, dp_y, n_images, print_every=50):\n # loading the grid\n with open(grid_path, 'rb') as input:\n grid = pickle.load(input)\n n_cell = grid.m\n n_face = grid.nr_t\n # initialize perm matrix and pressure solution (n_cell x n_cell x n_perm_fields)\n X, Y = [np.zeros((n_images, n_cell, n_cell, 1)) for i in range(2)]\n # initialize arrays for saving the faces velocities\n U_face = np.zeros((n_images, n_face))\n # initialize the array for saving the face operator and bias\n face_operator_list = []\n face_bias_array = np.zeros((n_images, n_face))\n # load the permeability dataframe, each column is one realization\n # this is the file saved by SGEMS (Geostats software)\n perm_frame = pd.read_csv(perm_path, usecols=range(n_images))\n # initialize a linear system for the pressure fluctuations for the grid\n LS = LinearSystemStandard(grid)\n # initialize the perturbation system object\n PI = PeriodicPerturbations(grid, dp_x, dp_y)\n # for the number of specified realizations run particle tracking and save the results\n for i in range(n_images):\n if not i%print_every:\n print('realization number '+str(i))\n logperm = perm_frame.ix[:, i]\n perm = np.exp(logperm)\n grid.set_transmissibility(perm)\n # solve for fluctuations around mean pressure gradient\n # setting the left hand side of the equation\n LS.fill_matrix(grid.transmissibility)\n # for each cell add (dp_x/lx)*(T_down - T_up)_x + (dp_y/ly)*(T_down - T_up)_y\n # to the rhs\n rhs_vec = PI.periodic_rhs_vec(grid.transmissibility)\n LS.rhs.set_neumann_pores_distributed(range(grid.nr_p), rhs_vec)\n # set a dirichlet cell: no fluctuation for cell 0\n LS.set_dirichlet_pores([0], 0.0)\n LS.solve()\n # copy the pressure solution and the permeability field to the X and Y\n X[i, :, :, 0] = np.reshape(logperm, (n_cell, n_cell))\n Y[i, :, :, 0] = np.copy(np.reshape(LS.sol, (n_cell, n_cell)))\n grid.pressure = LS.sol\n # get the operators to calculate face velocity\n U_face_operator, U_face_fixed = PI.face_velocity_operator(grid.transmissibility)\n # save face_velocity\n U_face[i,:] = U_face_operator.dot(LS.sol) + U_face_fixed\n # save the face operator\n face_operator_list.append(U_face_operator)\n face_bias_array[i,:] = U_face_fixed\n # save X, Y, U_face, operators\n np.savez(save_path, X=X, Y=Y, U_face=U_face, U_face_operator=face_operator_list, U_face_fixed=face_bias_array)", "def simulation(self):\r\n \r\n self.Real.Pbat, self.Real.Pbs, self.Real.soc, self.Real.soc0, self.Real.Pbs0 = batmod_ac(\r\n self.d, self.dt, self.Real.soc0, self.Real.soc, self.Real.Pr, self.Real.Pbs0, self.Real.Pbs, self.Real.Pbat)\r\n \r\n self.Ideal.Pbs, self.Ideal.Pbat, self.Ideal.soc0, self.Ideal.soc = batmod_ac_ideal(\r\n self.d, self.dt, self.Ideal.soc0, self.Ideal.soc, self.Ideal.Pr, self.Ideal.Pbat)" ]
[ "0.5854377", "0.57707894", "0.56632286", "0.5630439", "0.5529519", "0.5455803", "0.5446149", "0.5443754", "0.5397323", "0.53795385", "0.53627515", "0.5360387", "0.53161407", "0.52479273", "0.5228732", "0.5227275", "0.5217589", "0.518999", "0.5186102", "0.51725996", "0.5159401", "0.515666", "0.51434094", "0.51326025", "0.51248413", "0.51169074", "0.5110498", "0.5108435", "0.50966305", "0.5081126" ]
0.64867127
0
It returns the realizations of the process at time timeIndex
def getRealizationsAtGivenTime(self, timeIndex): realizations = self.realizations; return realizations[timeIndex]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def time_list(self):\n return (self.N_T * (np.arange(self.N_itr) + 1) /\n self.N_itr * 1000 * self.DT)", "def getTimes():", "def getTimes():", "def getTimes():", "def get_timing(pidx):\n pgconn = get_dbconn(\"mesosite\")\n cursor = pgconn.cursor()\n cursor.execute(\n \"SELECT avg(timing) from autoplot_timing where appid = %s \"\n \"and valid > (now() - '7 days'::interval)\",\n (pidx,),\n )\n timing = cursor.fetchone()[0]\n return timing if timing is not None else -1", "def time(self):\r\n return self._idx", "def time_callback(from_index,to_index): #\n # Convert from routing variable Index to NodeIndex in time\n from_node = manager.IndexToNode(from_index)\n to_node = manager.IndexToNode(to_index)\n return time_matrix[from_node][to_node]", "def timings(self):\r\n return self._timings", "def time_callback(from_index, to_index):\n # Convert from routing variable Index to time matrix NodeIndex.\n from_node = manager.IndexToNode(from_index)\n to_node = manager.IndexToNode(to_index)\n return self.data['time_matrix'][from_node][to_node]", "def evaluate(self, time) -> float:\n ...", "def time_callback(self, from_index, to_index):\n # Convert from routing variable Index to time matrix NodeIndex.\n from_node = self.manager.IndexToNode(from_index)\n to_node = self.manager.IndexToNode(to_index)\n return self.data['time_matrix'][from_node][to_node]", "def time_callback(from_index, to_index):\r\n # Convert from routing variable Index to time matrix NodeIndex.\r\n from_node = manager.IndexToNode(from_index)\r\n to_node = manager.IndexToNode(to_index)\r\n return data['time_matrix'][from_node][to_node]", "def __get_times(self):\n data = self.simulate_file.readlines()\n data = list(map(str.strip, data))\n data = list(map(float, data))\n start = data[0]\n times = data[1:]\n return (start, times)", "def time(self):\n return self.time_array", "def getScaleKeyTime(self, index, view) -> float:\n ...", "def computation_times(self) -> List[float]:\r\n return self._computation_times", "def get_times(self):\n times = []\n for i in range(1, len(self.events)):\n times.append(self.events[i-1].elapsed_time(self.events[i]))\n return times", "def CalculateSpeedIndex(self):\n time_completeness_list = self.GetTimeCompletenessList()\n prev_completeness = 0.0\n speed_index = 0.0\n prev_time = time_completeness_list[0][0]\n for time, completeness in time_completeness_list:\n # Add the incemental value for the interval just before this event.\n elapsed_time = time - prev_time\n incompleteness = (1.0 - prev_completeness)\n speed_index += elapsed_time * incompleteness\n\n # Update variables for next iteration.\n prev_completeness = completeness\n prev_time = time\n return speed_index", "def real_time(self):\n try:\n # TODO: Update for resuming runs\n with open(path.join(self.run_dir, \"TIMINGS\", \"timings.001\"), \"r\") as f:\n text = f.read()\n r = re.match(r\" Total time for loop was(?: *)(.*?)(?: *)seconds\", text, re.DOTALL + re.MULTILINE)\n if not r:\n logger.warning(\"Bad format in timings file. The real time could not be read.\")\n return float(\"nan\")\n else:\n return float(r.group(1))\n except FileNotFoundError:\n return float(\"nan\")", "def get_timings(log):\n log.info('Doing get_timings')\n timingfile = os.path.join(os.environ['decor'], 'decorcode',\n 'stim_timing_info', 'Timing_layout.txt')\n timingf = open(timingfile, 'r')\n run = []\n clip = []\n trs = []\n for line in timingf:\n i, j, k = line.split()\n run.append(i)\n clip.append(j)\n trs.append(k)\n return (run, clip, trs)", "def ensemble_times(self):\n return self['validtime'].values", "def timingColumns(self, results):\n \n pass", "def time_index(self):\n if self._time_index is None and self.cf_file is not None:\n with Outputs(self.cf_file) as cfh:\n if 'time_index' in cfh.datasets:\n self._time_index = cfh.time_index\n\n return self._time_index", "def calculate(self):\n #runs = [ai\n # for ei in self.experiment_queues\n # for ai in ei.cleaned_automated_runs]\n #\n #ni = len(runs)\n #self.nruns = ni\n # for ei in self.experiment_queues:\n # dur=ei.stats.calculate_duration(ei.cleaned_automated_runs)\n # if\n\n\n tt = sum([ei.stats.calculate_duration(ei.cleaned_automated_runs)\n for ei in self.experiment_queues])\n self._total_time = tt\n offset = 0\n if self._start_time:\n offset = time.time() - self._start_time\n\n self.etf = self.format_duration(tt - offset)", "def realtime(self):", "def indexTime(self):\n productSet = self.productSearch.productClient.get_product_set(name=self.productSetPath)\n return productSet.index_time", "def get_time_step_values(self):\n return DiscretizeMeshReader.get_time_step_values(self)", "def get_timings(self):\n exp=lib.is_Exposure_d8(self.hcam,7)*1E-3\n frame_rate=lib.is_SetFrameRate(self.hcam,0x8000)\n return self.AcqTimes(exp,1./frame_rate)", "def getTransformKeyTime(self, index, view) -> float:\n ...", "def getWeightTime(self, index):\r\n\t\treturn None" ]
[ "0.61247146", "0.6043734", "0.6043734", "0.6043734", "0.57777035", "0.57670903", "0.5563724", "0.5549501", "0.55488217", "0.55278444", "0.5521647", "0.5457292", "0.54337156", "0.5431724", "0.5423495", "0.54172146", "0.53973734", "0.5394623", "0.5386949", "0.53651875", "0.53540343", "0.5348444", "0.5344045", "0.5341939", "0.53413576", "0.53404194", "0.5322253", "0.53201973", "0.52934617", "0.5289016" ]
0.68299645
0
It returns the entire path of the process for a given simulation index
def getPath(self, simulationIndex): realizations = self.realizations; return realizations[:,simulationIndex]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def index_path(self):\n\t\treturn os.path.normpath(self.output + \"/\" + self.resultset_index)", "def printPath(self, simulationIndex):\n path = self.getPath(simulationIndex);\n\n print(\"The path for the\", simulationIndex, \"-th simulation is the following:\")\n print()\n print('\\n'.join('{:.3}'.format(realization) for realization in path))\n print()", "def solution_path(self) -> list[State]:", "def findPath(enviroment: Environment, position: tuple) -> list:", "def findPath(enviroment: Environment, position: tuple) -> list:", "def image_path_from_index(self, index):\n image_path = os.path.join(self._data_path,'query',\n index)\n assert os.path.exists(image_path), \\\n 'Path does not exist: {}'.format(image_path)\n return image_path", "def image_path_at(self, i):\n return self.image_path_from_index(self._image_index[i])", "def image_path_at(self, i):\n return self.image_path_from_index(self._image_index[i])", "def image_path_from_index(self, index):\n raise NotImplementedError", "def frame_paths(self, indx):\n if isinstance(indx, (int,np.integer)):\n return os.path.join(self['directory'][indx], self['filename'][indx])\n return [os.path.join(d,f) for d,f in zip(self['directory'][indx], self['filename'][indx])]", "def image_path_at(self, i):\n return self.image_path_from_index(self.image_index[i])", "def gen_path(self, index):\n if self.immutable_bounds:\n if not self.check_bounds(index):\n raise DataFileSequenceError(\n \"Index %d out of bounds [%d, %d]\"\n % (index, self.lower_bound, self.upper_bound)\n )\n elif index < 0:\n raise DataFileSequenceError(\"Indices must be nonnegative\")\n elif index == self.lower_bound - 1:\n self._lower_bound = index\n elif index == self.upper_bound + 1:\n self._upper_bound = index\n elif not self.check_bounds(index):\n raise DataFileSequenceError(\n \"Index %d out of bounds [%d, %d]; mutable sequences can be \"\n \"extended at most one index above/below.\"\n % (index, self.lower_bound, self.upper_bound)\n )\n\n return self.sequence % index", "def ascii_graph_index():\n return MandelbrotController.invoke(OUTPUT_DIRECTORY)", "def paths(self, return_indices=False):\n paths = []\n for tree in self.components():\n paths += self._single_tree_paths(tree, return_indices=return_indices)\n return paths", "def image_path_from_index(self, index):\n image_path = os.path.join(self._data_path, index)\n assert os.path.exists(image_path), 'path does not exist: {}'.format(image_path)\n return image_path", "def mp_findDrivingDistance(index):\n keys = ['Home Block', 'Home Lat', 'Home Lon',\n 'Work Block', 'Work Lat', 'Work Lon',\n 'Distance [mi]']\n commutes = {keys[0]:[], keys[1]:[], keys[2]:[],\n keys[3]:[], keys[4]:[], keys[5]:[],\n keys[6]:[]}\n missedBlocks = {keys[0]:[], keys[3]:[]}\n worker = mp.current_process()\n wid = worker.name\n\n data = index[2]\n datakeys = data.keys()\n locs = index[3]\n name = index[4]\n\n for i in range(index[0],index[1]):\n # getting the FIPS code for home and work blocks\n work = data[datakeys[0]][i]\n home = data[datakeys[1]][i]\n\n # retrieving state FIPS code\n wState = work[:2]\n hState = home[:2]\n\n # gathering column names from locs DataFrame\n lockeys = locs[wState].keys()\n geo = lockeys[0]\n lat = lockeys[1]\n lon = lockeys[2]\n\n # get lat,lon from current county\n wLatLon = locs[wState][work == locs[wState][geo]].reset_index(drop=True)\n hLatLon = locs[hState][home == locs[hState][geo]].reset_index(drop=True)\n\n # place retrieved lat,lon into url for server\n # NOTE: server takes location in lon,lat format\n try:\n # parsing lat,lon values\n homeLat = hLatLon[lat][0]\n homeLon = hLatLon[lon][0]\n workLat = wLatLon[lat][0]\n workLon = wLatLon[lon][0]\n\n # generating url to call server\n url = ('http://127.0.0.1:5000/route/v1/driving/{0},{1};{2},{3}?steps=true'\n .format(homeLon,\n homeLat,\n workLon,\n workLat))\n\n # calling server\n response = requests.get(url).json()\n # retrieving distance and converting to miles\n distance = response['routes'][0]['distance']\n distance = meters_to_miles(distance)\n\n # appending all necessary values to dictionary\n commutes[keys[0]].append(home)\n commutes[keys[1]].append(homeLat)\n commutes[keys[2]].append(homeLon)\n commutes[keys[3]].append(work)\n commutes[keys[4]].append(workLat)\n commutes[keys[5]].append(workLon)\n commutes[keys[6]].append(distance)\n except:\n # any missed values get appened to other dictionary\n missedBlocks[keys[0]].append(home)\n missedBlocks[keys[3]].append(work)\n\n lg.info('index ' + str(index[0]) + ','+ str(index[1]) + ' done processing: ' + str(wid))\n\n # time stamp for file name\n now = dt.datetime.now().strftime(\"%Y%m%d-%H%M\")\n # saving files\n title = now + '-' + name + '-' + wid + '.csv'\n miss = pd.DataFrame(missedBlocks).to_csv('missed/' + title, index=False)\n hits = pd.DataFrame(commutes).to_csv('results/'+ title, index=False)", "def plotPaths(self, simulationIndex, numberOfPaths):\n for k in range(numberOfPaths):\n path = self.getPath(simulationIndex + k);\n plt.plot(path)\n plt.xlabel('Time')\n plt.ylabel('Realizations of the process')\n plt.show()", "def image_path_at(self, i):\n return self.image_path_from_index(self._image_index[i])", "def image_path_at(self, i):\n return self.image_path_from_index(self._image_index[i])", "def image_path_at(self, i):\n return self.image_path_from_index(self._image_index[i])", "def image_path_at(self, i):\n return self.image_path_from_index(self._image_index[i])", "def image_path_at(self, i):\n return self.image_path_from_index(self._image_index[i])", "def image_path_from_index(self, index):\n for ext in self._image_ext:\n image_path = os.path.join(self._data_path, 'Images',\n index + ext)\n if os.path.exists(image_path):\n break\n assert os.path.exists(image_path), \\\n 'Path does not exist: {}'.format(image_path)\n\treturn image_path", "def generateFullPath(nSeq, nstates, prng, weights, bt, isNormalized):\n rateMtxQ = ReversibleRateMtx.ReversibleRateMtx(nstates, weights)\n if isNormalized:\n Q = rateMtxQ.getNormalizedRateMtx()\n else:\n Q = rateMtxQ.getRateMtx()\n \n stationary = rateMtxQ.getStationaryDist()\n ## sample the initial states for each sequence\n initialStateSeq = prng.choice(nstates, nSeq, replace=True, p=stationary)\n ## given the initial state, we sample the full path and save each sequence in a list\n seqList = []\n simulator = SimuSeq.ForwardSimulation(bt, Q)\n for i in range(0, nSeq):\n seq = simulator.sampleStateTimeSeq(prng, initialStateSeq[i])\n seqList.append(seq)\n ## get the keys for each sequence seqList[0].keys() \n return seqList", "def image_path_from_index(self, index):\n image_path = os.path.join(self._data_path, 'Images', index + self._image_ext)\n assert os.path.exists(image_path), 'Path does not exist: {}'.format(image_path)\n return image_path", "def simulation_dir(self):\n try:\n return (self.output_directory / self.sim_id).expand()\n except AttributeError:\n return Path()", "def search(self):\n self.updateStatus(\"Executing simulation...\")\n nextstep = self.cfg[\"START\"]\n\n # keep generating next steps as long as goal not in goal & enough time\n while not self.cfg[\"GOAL\"] == nextstep and self.timeremaining:\n try:\n # Don't set signal for infinite time\n if self.timeout < float('inf'):\n with Timeout(self.timeout): # call under SIGNAL\n nextstep = self._get_coordinate(self.gen.next())\n else:\n nextstep = self._get_coordinate(self.gen.next()) # call with no SIGNAL\n except Timeout.Timeout:\n self.timeremaining = 0\n self.updateStatus(\"Timed Out!\")\n except:\n self.updateStatus(\"Agent returned \" + str(nextstep))\n logging.error(\"Trace-back: \\n {}\".format(traceback.format_exc()))\n raise SystemExit()\n break\n return self.hdlStop() # (totalcost, pathsteps, timeremaining, pathtime)", "def _image_path_from_index(self, index):\n # Example image path for index=119993:\n # images/train2014/COCO_train2014_000000119993.jpg\n file_name = (str(index).zfill(12) + '.jpg')\n image_path = os.path.join(self._root_dir, self._data_name, file_name)\n assert os.path.exists(image_path), 'Path does not exist: {}'.format(image_path)\n return image_path", "def get_one_path(commands):\n path = []\n last_position = (0, 0)\n for command in commands:\n path += list(apply_one_command(last_position, command))\n last_position = path[-1]\n return path", "def print_path(path, index):\r\n\r\n print(\"Printing trace for puzzle no. {0}\".format(index))\r\n print_puzzle(path[0][0])\r\n for i in range(1, len(path)):\r\n movement = get_move(path[i-1][1], path[i][1])\r\n\r\n moved_tile = get_value(path[i-1][0], path[i][1])\r\n print(i, \": move \", moved_tile, \" \", movement, sep=\"\")\r\n print_puzzle(path[i][0])\r\n print('')" ]
[ "0.6194272", "0.5910049", "0.57058936", "0.56998456", "0.56998456", "0.5684137", "0.5624882", "0.5624882", "0.5623874", "0.5590808", "0.5588383", "0.5558976", "0.5552057", "0.55445606", "0.5506157", "0.5492166", "0.5461353", "0.54532665", "0.54532665", "0.54532665", "0.54532665", "0.54532665", "0.54249483", "0.54240584", "0.54203695", "0.54184884", "0.53792965", "0.53549457", "0.5343239", "0.5337632" ]
0.72079676
0
It prints the entire path of the process for a given simulation index
def printPath(self, simulationIndex): path = self.getPath(simulationIndex); print("The path for the", simulationIndex, "-th simulation is the following:") print() print('\n'.join('{:.3}'.format(realization) for realization in path)) print()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def print_path(path, index):\r\n\r\n print(\"Printing trace for puzzle no. {0}\".format(index))\r\n print_puzzle(path[0][0])\r\n for i in range(1, len(path)):\r\n movement = get_move(path[i-1][1], path[i][1])\r\n\r\n moved_tile = get_value(path[i-1][0], path[i][1])\r\n print(i, \": move \", moved_tile, \" \", movement, sep=\"\")\r\n print_puzzle(path[i][0])\r\n print('')", "def plotPaths(self, simulationIndex, numberOfPaths):\n for k in range(numberOfPaths):\n path = self.getPath(simulationIndex + k);\n plt.plot(path)\n plt.xlabel('Time')\n plt.ylabel('Realizations of the process')\n plt.show()", "def print_path(self):\n temp_board = self._untraversed_board\n step_numb = 0\n prev_row = -1\n prev_col = -1\n # Iterate through each board configuration.\n for loc in self._path:\n # Depending on if this is the initial setup or not\n # Process the next print out.\n if (step_numb == 0):\n print \"Initial:\"\n else:\n temp_board[prev_row][prev_col] = \".\"\n temp_board[loc.get_row()][loc.get_column()] = \"@\"\n # Print the step number\n print \"\"\n print \"Step {0}:\".format(step_numb)\n\n # Print the board\n for board_row in temp_board:\n # Build the string from the array then print it\n board_str = \"\".join(board_row)\n print board_str\n\n # Store the previous location for next time through the loop.\n prev_row = loc.get_row()\n prev_col = loc.get_column()\n step_numb += 1\n\n # Check if the target is reached\n final_loc = self._path[len(self._path) - 1]\n if (final_loc == self._goal_loc):\n print \"Problem Solved! I had some noodles!\"", "def ascii_graph_index():\n return MandelbrotController.invoke(OUTPUT_DIRECTORY)", "def print_path_amount_and_details(m, n):\n print(path_number(m, n))\n mat = []\n # [i][j]\n for i in range(m):\n line = []\n for j in range(n):\n line.append((j,i))\n mat.append(line)\n\n printallpaths(mat, m, n)", "def show_paths(self):\r\n print(\"------------------------\")\r\n print(\"######### ALL PATHS #########\")\r\n\r\n if self.size == 0:\r\n print(\"Empty tree!\")\r\n else:\r\n for i in range(1, self.root.size_tree + 1):\r\n node = self.select(i)\r\n if node.size_tree == 1:\r\n print(\"|\" + self.str_single_path(node))\r\n\r\n print(\"------------------------\")", "def displayPathtoPrincess(n, grid):\n solution = SavePrincess(grid)\n print \"{0}\".format(solution)", "def displayPathToPrincess(n, grid):\n for i, row in enumerate(grid):\n if 'p' in row:\n princess = [i, row.index('p')]\n if 'm' in row:\n me = [i, row.index('m')]\n\n r, c = find_path(princess, me)\n path = [r]\n path.append(c)\n return '\\n'.join(path)", "def output(index: int = 0) -> str:\n return outputs()[index]", "def print_path(self):\n\n grid = tg.Graph.grid_graph(self.graph.rows,self.graph.cols)\n #tg.draw_grid(self.draw_edges_alt,self.graph.rows,self.graph.cols,grid)\n tg.draw_grid(self.edges,self.graph.rows,self.graph.cols,grid)", "def __printThePath(self, tile):\n print()\n print(\"Path is found. Initial tile: \" + str(self.startingPoint) + \", Goal tile: \" + str(self.goalPoint))\n print(\"Here is the path cost: \" + str(tile.cost) + \" and path is:\")\n print(tile.pathToTile[::-1])\n print()", "def index_path(self):\n\t\treturn os.path.normpath(self.output + \"/\" + self.resultset_index)", "def show_path(self):\n\n node = self.goal\n\n while node.parent:\n node.parent.value = 1\n node = node.parent", "def print_paths(self):\n for path_key, path_value in self.paths.items():\n # Handler for request in path\n self.current_path = path_key\n for request_key, request_value in path_value.items():\n if request_key == 'parameters':\n continue\n self.get_main_title(path_key, request_key)\n self.get_description(request_value)\n self.get_status_code_and_schema_rst(request_value['responses'])\n self.get_params(path_value['parameters'], 'param')\n self.get_params(request_value['parameters'], 'query')", "def print_solution(self, solution_path):\n print(\"---SOLUTION---: \")\n for node in solution_path:\n node.state.plot_cube(\n \"SOLUTION: Node [\" + str(node.id) + \"] at depth \" + str(node.node_depth)\n )\n if node.last_action != None:\n print(\"Next action: \", node.last_action)\n print(\"[\" + str(node.id) + \"] \" + str(node.state.create_md5()))\n\n print(\"\\n TOTAL COST: \", solution_path[len(solution_path) - 1].cost)", "def paths_print(atree):\n\n l = atree.pathFromHere_explore('/')\n for d in l:\n print(d)", "def getPath(self, simulationIndex):\n realizations = self.realizations;\n return realizations[:,simulationIndex]", "def display_path(self, path):\n graph = path.graph\n if not graph:\n return\n for v in sorted(graph.vertices()):\n p = graph.get_vertex_attribute(v, 'xy')\n x, y = to_geometry(p[0]), to_geometry(p[1])\n print('define v{} ellipse 2 2 c_vertex {} {}'.format(v, x, y))\n #print('define v{0}t text {0} 14 white {1} {2}'.format(v, x, y))\n for u, v in graph.edges():\n print('define - link v{} v{} 1 c_edge'.format(u, v))\n # NOTE: this code assumes paths will not move indefinitely\n print('fix /./')", "def path_show(args):\n print(header(\"$PATH Components\"))\n loop_fmt = \"{pad}{color}{path}\"\n pad = 4\n\n cnt = 0\n for part in os.environ[\"PATH\"].split(\":\"):\n color = u\"\"\n if args.color:\n color = CODES[cnt]\n cnt = (cnt + 1) % len(CODES)\n\n print(loop_fmt.format(pad=pad * \" \", color=color, path=part))\n if args.nowarn:\n continue\n\n for warn in check_path_folder(part):\n print(\"{}X {}\".format(pad * 2 * \" \", warn))", "def browse(self):\n res = \"PID[\" + str(PID) + \"] \"\n for (start, offset) in \\\n self.__global_index[self.__start_index: self.__start_index + self.__nb_segs]:\n seg = Segment(self.__content[start:start + offset])\n res = res + \"\\n \" + str(seg)\n return res", "def print_seq(self):\n names, values = [], []\n for each in self.minions:\n names.append(each.name)\n values.append(f'{each.atk}/{each.dfs}')\n t = PrettyTable()\n t.add_row(names)\n t.add_row(values)\n print(t)", "def printInfo():\n print('\\t' * 6 + 'Combinational Circuit Paths')\n\n print('-' * 75)\n\n print('Input: Verilog file with Gate Level Modelling')\n print('Output: All paths from input to output of the circuit described by the Verilog file')\n print('(Optional: Graph of the circuit can also be exported)')\n\n print('-' * 75, end='\\n\\n')", "def print_index(self):\n\n # Process event handlers\n pygame.event.pump()\n\n # Buttons\n for button in range(0, self.num_buttons):\n value = self.js.get_button(button)\n if value:\n print \"Button {} on\".format(button)\n \n # Axes\n for axis in range(0, self.num_axes):\n value = self.js.get_axis(axis)\n if value > 0:\n print \"Axis {} positive\".format(axis)\n elif value < 0:\n print \"Axis {} negative\".format(axis)\n \n # Hats\n for hat in range(0, self.num_hats):\n value = self.js.get_hat(hat)\n if any(value) != 0:\n print \"Hat {}: {}\".format(hat, value)", "def print_path(self, d, parent, s, t):\n idxs = [t]\n while idxs[-1]!=s:\n idxs.append(parent[idxs[-1]])\n idxs.reverse()\n print('[{:g}]'.format(d[t])+' '+'-->'.join([str(self.vertices[i]) for i in idxs]))", "def do_print_routes(self, line=''):\n self.fibbing.print_routes()", "def _printFromIndex(self, index):\n ret = str(self.arr[index])\n iterator = index + 1\n while iterator != index:\n ret += ' {}'.format(self.arr[iterator])\n iterator = iterator + 1\n iterator = iterator % self.size\n return ret", "def printPaths(graph, data):\n\n # Printing data related to the circuit\n print(f'Module name: {data[\"module_name\"]}')\n print('Input: ', end='')\n print(*data['input'], sep=', ')\n print('Output: ', end='')\n print(*data['output'], sep=', ')\n print('Wire: ', end='')\n print(*data['wire'], sep=', ', end='\\n\\n')\n\n # Printing the paths in the graphical version of the circuit\n print('All paths from input to output')\n for io in [[i, o] for i in data['input'] for o in data['output']]:\n for path in nx.all_simple_paths(graph, source=io[0], target=io[1]):\n print(*path, sep=' --> ')", "def print_progression(self, n):\n print(\" \".join(str(next(self)) for i in range(n)))", "def view_command(path, verbose):\n job = ReadOnlyJob(path)\n print(job.summary(verbose=verbose))", "def run():\n\n for d in hiv_drms.drms:\n print \"\\\"%s\\\",\" % unicode(d.locus_str())" ]
[ "0.7143878", "0.62840337", "0.62348145", "0.6183162", "0.5965672", "0.59566885", "0.59365845", "0.5935982", "0.58508074", "0.58452827", "0.57137233", "0.5694292", "0.56709874", "0.56071806", "0.55772626", "0.5566466", "0.55639225", "0.5555327", "0.55487084", "0.5505415", "0.54637057", "0.5448724", "0.540948", "0.54032755", "0.53661156", "0.5350397", "0.5340006", "0.53370476", "0.53272784", "0.53184426" ]
0.7499326
0
It plots the paths of the process from simulationIndex to simulationIndex + numberOfPaths
def plotPaths(self, simulationIndex, numberOfPaths): for k in range(numberOfPaths): path = self.getPath(simulationIndex + k); plt.plot(path) plt.xlabel('Time') plt.ylabel('Realizations of the process') plt.show()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def plot_path_statistics( self, path_file=\"tse_ensemble.json\" ):\n from matplotlib import pyplot as plt\n with open(path_file,'r') as infile:\n data = json.load(infile)\n try:\n paths = data[\"transition_paths\"]\n except KeyError:\n paths = [data]\n total_product_indicator = np.zeros(len(paths[0][\"symbols\"]))\n total_reactant_indicator = np.zeros(len(paths[0][\"symbols\"]))\n self.nuc_mc.min_size_product = paths[0][\"min_size_product\"]\n self.nuc_mc.max_size_reactant = paths[0][\"max_size_reactant\"]\n for path in paths:\n product_indicator = []\n reactant_indicator = []\n reactant_indicator, product_indicator = self.get_basin_indicators(path)\n\n total_product_indicator += np.cumsum(product_indicator)/float( len(product_indicator) )\n total_reactant_indicator += np.cumsum(reactant_indicator)/float( len(reactant_indicator) )\n\n total_reactant_indicator /= float( len(paths) )\n total_product_indicator /= float( len(paths) )\n\n fig = plt.figure()\n ax = fig.add_subplot(1,1,1)\n ax.plot( total_product_indicator, label=\"Product\" )\n ax.plot( total_reactant_indicator, label=\"Reactant\" )\n ax.set_xlabel( \"MC sweeps\" )\n ax.set_ylabel( \"Indicator\" )\n ax.spines[\"right\"].set_visible(False)\n ax.spines[\"top\"].set_visible(False)\n ax.legend( frameon=False, loc=\"best\" )\n return fig", "def represent_paths(all_x, all_y, paths, n): \n \n # Number of Hamiltonian paths \n m = len(paths)\n \n for i in range(m):\n # for each Hamiltonian path\n path = paths[i]\n \n # define the figure on which we will plot the path \n fig, ax = plt.subplots()\n \n # draw the vertices of the initial graph \n for x in all_x :\n \n y = all_y[x]\n represent_vertex(ax, x, y)\n \n # draw the edges\n # take the first vertex (random vertex x)\n vertex_start = path[0]\n \n for j in range(1,n) :\n # for each vertex following x\n vertex = path[j]\n \n # draw an array between the former vertex and the new one\n # coordinate of the former vertex\n x = vertex_start\n y = all_y[x]\n # coordinate of the vertex\n x_neighbour = vertex\n y_neighbour = all_y[x_neighbour]\n # link them by an array\n represent_link(ax, x, y, x_neighbour, y_neighbour)\n vertex_start = vertex\n \n # Define the window\n plt.xlim(0,n)\n plt.ylim(0,n)\n plt.title('Hamiltonian path number ' + str(i+1))\n \n # Save the result in a png file\n plt.savefig(\"Hamiltonian_path_\"+str(i+1)+\".png\")\n \n #Show all paths\n plt.show()", "def plot_path(path):\n s = np.linspace(0, path.total_length, 1000, endpoint=False)\n twists = np.array(list(path.target_state(si) for si in s))\n print(twists.shape)\n plt.plot(twists[:,0], twists[:,1])\n plt.show()", "def printPath(self, simulationIndex):\n path = self.getPath(simulationIndex);\n\n print(\"The path for the\", simulationIndex, \"-th simulation is the following:\")\n print()\n print('\\n'.join('{:.3}'.format(realization) for realization in path))\n print()", "def path_plot(robot_path, regions, obs):\n\n for robot, path in robot_path.items():\n # prefix path\n if len(path) == 1:\n continue\n x_pre = np.asarray([point[0] + 0.5 for point in path])\n y_pre = np.asarray([point[1] + 0.5 for point in path])\n plt.quiver(x_pre[:-1], y_pre[:-1], x_pre[1:] - x_pre[:-1], y_pre[1:] - y_pre[:-1],\n color=\"#\" + ''.join([random.choice('0123456789ABCDEF') for j in range(6)]),\n scale_units='xy', angles='xy', scale=1, label='prefix path')\n\n plt.savefig('img/path.png', bbox_inches='tight', dpi=600)", "def show_grid(self):\n\n if not os.path.exists(self.path_to_results):\n os.mkdir(self.path_to_results)\n\n fig = plt.figure()\n\n if self.show_points == 1:\n plt.scatter(self.x_list_grid, self.y_list_grid, c='blue')\n\n plt.plot(self.x_list_main, self.y_list_main,\n 'green', label='straight path')\n plt.plot(self.x_list, self.y_list, 'red', label='first path')\n plt.plot(self.x_list_filtered, self.y_list_filtered,\n 'blue', label='filtered path')\n plt.title('Paths')\n plt.ylabel('Latitude')\n plt.xlabel('Longitude')\n # plt.legend()\n\n fig.savefig(os.path.join(self.path_to_results, 'Paths.png'))", "def run_path_visualisation(paths, config, modulesConfig):\n all_targets = [os.path.basename(config[s][\"target\"]) for s in config.sections]\n all_target_tasks = {os.path.basename(config[s][\"target\"]):s for s in config.sections}\n \n added_tasks = []\n prepared_paths = []\n for path in paths:\n prepared_tasks = []\n for idx, task in enumerate(list(reversed(path))):\n s_module, s_name, *identifier = task.split(\" \")\n\n # Special Rule For Join Module To Have A Connection To Another Module\n special_connection = False\n if s_module == \"processing_join\":\n args = config[task]\n con_module, con_name, *identifier = all_target_tasks.get(os.path.basename(args[\"joinwith\"]), s_module+\"_SPECIAL \"+s_name+\"_SPECIAL\").split(\" \")\n special_connection = {\n \"connection_to_module\" : con_module,\n \"connection_to_name\" : con_name,\n \"will_be_created\" : (os.path.basename(args[\"joinwith\"]) in all_targets)\n }\n\n prepared_tasks.append({\n 'module':s_module,\n 'name':s_name,\n 'display': (task not in added_tasks),\n 'specialConnection': special_connection,\n 'last': (idx == len(path) - 1),\n 'attributes': config[task]\n })\n added_tasks.append(task)\n prepared_paths.append(prepared_tasks)\n logger.debug(\"Path prepared for visualization!\")\n render_path_visualisation(config['projectRoot'], config['projectName'], prepared_paths)", "def print_path(self):\n\n grid = tg.Graph.grid_graph(self.graph.rows,self.graph.cols)\n #tg.draw_grid(self.draw_edges_alt,self.graph.rows,self.graph.cols,grid)\n tg.draw_grid(self.edges,self.graph.rows,self.graph.cols,grid)", "def plot_graph():\n name = request.args.get('instance')\n name = str(name)\n distance = request.args.get('distance')\n path = request.args.get('path')\n if name == 'Custom':\n coords = request.args.get('coords')\n coords = str(coords)\n nodes = custom_nodes(coords)\n else:\n nodes = create_nodes(name)\n fig = Figure()\n axis = fig.add_subplot(1, 1, 1)\n\n axis.set_title(name + \" - Distance: \"+ str(distance))\n path = str(path).split(',')\n path = [int(i) for i in path]\n for i in range(len(path) - 1):\n\n start_node = nodes[path[i]]\n x1, y1 = start_node.x, start_node.y\n axis.scatter(x1, y1, c = 'b', label = str(path[i]))\n axis.text(x1,y1, str(path[i]))\n end_node = nodes[path[i+1]]\n x2, y2 = end_node.x, end_node.y\n axis.plot([x1,x2], [y1, y2])\n\n last_node = nodes[path[len(path)-1]]\n x1, y1 = last_node.x, last_node.y\n axis.text(x1,y1, str(path[len(path)-1]))\n\n begin_node = nodes[path[0]]\n x2, y2 = begin_node.x, begin_node.y\n axis.scatter(x1, y1, c = 'b', label = str(path[len(path)-1]))\n axis.plot([x1,x2], [y1, y2])\n\n output = io.BytesIO()\n FigureCanvas(fig).print_png(output)\n return Response(output.getvalue(), mimetype=\"image/png\")", "def plot_graph(self) -> None:", "def draw_path(self):\r\n if len(self.path) > 1:\r\n for i in range(1, len(self.path)):\r\n pg.draw.line(self.screen, (0, 150, 0),\r\n self.path[i - 1], self.path[i], 1)\r\n elif len(self.path) == 1:\r\n pg.draw.circle(self.screen, (0, 150, 0),\r\n (int(self.path[0].x), int(self.path[0].y)), 1)", "def draw_path(self):\n\n # using current data row number\n # switch to appropriate row in file\n self.switch_row(self.data_row_num)\n path = self.list2FloatPairs(self.row)\n\n for pt in path:\n x = int(pt[0])\n y = int(pt[1])\n pts = x,y\n pygame.draw.circle(self.screen,self.BLACK,pts,2)", "def showPlot2():\n interested_in = list(range(1,10))\n proc_sim_data = []\n for item in interested_in:\n len_sim_data = []\n raw_sim_data = runSimulation(item, 1.0, 25, 25, 0.75, 100, Robot, False)\n for mes in raw_sim_data:\n len_sim_data.append(len(mes))\n proc_sim_data.append(sum(len_sim_data)/len(len_sim_data))\n plot(interested_in, proc_sim_data)\n title('Dependence of cleaning time on number of robots')\n xlabel('number of robots (tiles)')\n ylabel('mean time (clocks)')\n show()", "def plot_path(self, current_path):\n full_path = current_path.copy()\n full_path.insert(0, self.root)\n\n path = Marker()\n id = 1\n\n # edge between nodes\n path = Marker()\n path.header.frame_id = \"map\"\n path.header.stamp = rospy.get_rostime()\n path.ns = \"markers\"\n path.id = 1\n path.type = path.LINE_STRIP\n path.action = path.ADD\n path.scale.x = self.rviz_tuning_plt\n path.color.a = 1.0\n path.color.r = 0.0\n path.color.g = 1.0\n path.color.b = 0.0\n\n path.lifetime = rospy.Duration()\n path.pose.orientation.w = 1.0\n\n for node in full_path:\n p1 = Point()\n p1.x = node.x\n p1.y = node.y\n p1.z = 0.03\n path.points.append(p1)\n\n self.pub_path.publish(path)", "def plot_paths(gdf_dict: Dict) -> None:\n fig, ax = plt.subplots(1, 1)\n\n # TODO change so that get all models plotted\n for model, gdf in gdf_dict.items():\n gdf.plot(ax=ax, legend=True)\n\n plt.show()\n return None", "def plot_parcles_run(k, v):\n\n L.info(f'Received Input: {v}')\n\n L.info(f\"Plotting results from: {v['filepath']}\")\n\n plotpath = Path(__file__).parent.parent / 'plots'\n filename = v['id'] + '.png'\n plotfile = str(plotpath / filename) \n\n plotTrajectoriesFile(v['filepath'], mode='2d', show_plt=False)\n f = plt.gcf()\n f.savefig(plotfile)\n\n L.info(f'Saved plot to: {plotfile}')", "def plot(self):\n\t\tplot_chain(self.database_path, self.temp_folder)\n\t\tplot_density(self.database_path, self.temp_folder, self.cal_params)", "def plot_graph(self) -> None:\n\n nodes_on_graph = self.dw_graph.get_all_v()\n for k, v in nodes_on_graph.items():\n if v.position is None:\n x_rand = random.uniform(0.5, self.dw_graph.v_size())\n y_rand = random.uniform(0.5, self.dw_graph.v_size())\n v.position = (x_rand, y_rand)\n x_vals = []\n y_vals = []\n n = list(nodes_on_graph.keys())\n for k, v in nodes_on_graph.items(): # draw nodes\n x_vals.append(v.position[0])\n y_vals.append(v.position[1])\n\n fig, ax = plt.subplots()\n plt.plot(x_vals, y_vals, 'ro', markersize=5, data=\"d\")\n\n for p, txt in enumerate(n):\n ax.annotate(n[p], (x_vals[p]+0.00003, y_vals[p]), color='g')\n\n for n in nodes_on_graph:\n n1 = self.dw_graph.get_nodes(n)\n x = n1.position[0]\n y = n1.position[1]\n for r in self.dw_graph.all_out_edges_of_node(n):\n dx = self.dw_graph.get_nodes(r).position[0]\n dy = self.dw_graph.get_nodes(r).position[1]\n ax.quiver(x, y, dx-x, dy-y, angles='xy', scale_units='xy', scale=1)\n #plt.arrow(x, y, dx - x, dy - y, head_width=0.0009, width=0.00005, length_includes_head=True)\n\n\n plt.xlabel(\"x axis \")\n plt.ylabel(\"y axis \")\n plt.title(\"The title of the graph\")\n plt.show()", "def show_paths(self):\r\n print(\"------------------------\")\r\n print(\"######### ALL PATHS #########\")\r\n\r\n if self.size == 0:\r\n print(\"Empty tree!\")\r\n else:\r\n for i in range(1, self.root.size_tree + 1):\r\n node = self.select(i)\r\n if node.size_tree == 1:\r\n print(\"|\" + self.str_single_path(node))\r\n\r\n print(\"------------------------\")", "def plot_sequence(sequence: Sequence, **kwargs: Any) -> mpl.figure.Figure:\n return plot_paths(sequence.prediction.get_path(), sequence.ground_truth.get_path(), **kwargs)", "def showPlot1():\n\n interested_in = list(range(5,30,5))\n proc_sim_data = []\n for item in interested_in:\n len_sim_data = []\n raw_sim_data = runSimulation(1, 1.0, item, item, 0.75, 100, Robot, False)\n for mes in raw_sim_data:\n len_sim_data.append(len(mes))\n proc_sim_data.append(sum(len_sim_data)/len(len_sim_data))\n plot(interested_in, proc_sim_data)\n title('Dependence of cleaning time on room size')\n xlabel('area of the room (tiles)')\n ylabel('mean time (clocks)')\n show()", "def print_path(path, index):\r\n\r\n print(\"Printing trace for puzzle no. {0}\".format(index))\r\n print_puzzle(path[0][0])\r\n for i in range(1, len(path)):\r\n movement = get_move(path[i-1][1], path[i][1])\r\n\r\n moved_tile = get_value(path[i-1][0], path[i][1])\r\n print(i, \": move \", moved_tile, \" \", movement, sep=\"\")\r\n print_puzzle(path[i][0])\r\n print('')", "def plot_single_sharing_configuration(gval, index=0):\n routing_probs = gval[index][1]['test/routing_probs'][-1]\n modules = np.argmax(routing_probs, axis=-1)\n num_pos = modules.shape[0]\n num_tasks = modules.shape[1]\n num_modules = np.max(modules)\n\n cmap = mpl.cm.viridis\n norm = mpl.colors.Normalize(vmin=0, vmax=num_modules)\n\n fig, ax = plt.subplots()\n ax.set_xlabel(\"Position in network\")\n ax.set_ylabel(\"Task\")\n ax.set_xticks(range(1, num_pos + 1))\n ax.set_yticks(range(1, num_tasks + 1))\n ax.set_xlim(0.5, num_pos + 0.5)\n ax.set_ylim(0.5, num_tasks + 0.5)\n\n radius_outer = 0.25\n radius_inner = 0.15\n\n # Draw modules\n for pos in range(num_pos):\n for task in range(num_tasks):\n mod = modules[pos, task]\n ax.text(pos + 1, task + 1, f'{pos + 1}.{mod}', va='center', ha='center')\n circle1 = plt.Circle((pos + 1, task + 1), radius_outer, color=cmap(norm(mod)))\n circle2 = plt.Circle((pos + 1, task + 1), radius_inner, color='w')\n ax.add_artist(circle1)\n ax.add_artist(circle2)\n\n # Draw arrows between modules\n for pos in range(num_pos - 1):\n for task in range(num_tasks):\n ax.arrow(pos + 1 + radius_outer, task + 1, 1 - 2 * radius_outer - 0.1, 0,\n head_width=0.05, head_length=0.1, fc='k', ec='k')", "def animate(i):\n plot_viz_x = []\n plot_viz_y = []\n for node in graph:\n node_x, node_y = node.position\n agent_count = node.agent_count\n\n for _ in range(0, agent_count):\n plot_viz_x.append(self._get_visual_position(node_x))\n plot_viz_y.append(self._get_visual_position(node_y))\n\n self.plots[0].set_data(plot_viz_x, plot_viz_y)\n self.plots[1].set_data(leader.visual[0], leader.visual[1])\n return self.plots", "def visualize(self):\n\n # Tools that will be displayed on the plots\n tools = \"pan,wheel_zoom,reset,save\"\n\n # Plot displaying the optimized path\n result_plot = figure(\n plot_width=1000,\n plot_height=500,\n tools=tools,\n active_scroll='wheel_zoom')\n result_plot.title.text = \"Optimized Path\"\n\n # Plot displaying the non optimized path\n initial_plot = figure(\n plot_width=1000,\n plot_height=500,\n tools=tools,\n active_scroll='wheel_zoom')\n initial_plot.title.text = \"Initial Path\"\n\n # Add the data to the result plot\n result_plot = self.populate_plot(result_plot, self.result)\n result_plot.legend.location = \"bottom_right\"\n\n # Add the data to the initial plot\n initial_plot = self.populate_plot(initial_plot, self.initial)\n initial_plot.legend.location = \"bottom_right\"\n\n # Add cutting tool to plots\n # Generate the points on which the triangle should move on\n result_lines_x, result_lines_y = self.generate_tool_path(self.result, 1)\n initial_lines_x, initial_lines_y = self.generate_tool_path(self.initial, 1)\n\n # Add cutting tool triangle to optimized path\n result_triangle_position = ColumnDataSource(\n data=dict(\n x=[result_lines_x[0]],\n y=[result_lines_y[0]]\n ))\n result_triangle = Triangle(\n x='x', y='y', line_color=Category10_4[3], line_width=3,\n size=20, fill_alpha=0\n )\n result_plot.add_glyph(result_triangle_position, result_triangle)\n\n # Add cutting tool triangle to initial path\n initial_triangle_position = ColumnDataSource(\n data=dict(\n x=[initial_lines_x[0]],\n y=[initial_lines_y[0]]\n ))\n initial_triangle = Triangle(\n x='x', y='y', line_color=Category10_4[3], line_width=3,\n size=20, fill_alpha=0\n )\n initial_plot.add_glyph(initial_triangle_position, initial_triangle)\n\n # Add button to start moving the triangle\n button = Button(label='Start')\n result_num_steps = result_lines_x.shape[0]\n initial_num_steps = initial_lines_x.shape[0]\n num_steps = max(result_num_steps, initial_num_steps)\n\n # JavaScript callback which will be called once the button is pressed\n callback = CustomJS(args=dict(\n result_triangle_position=result_triangle_position,\n result_lines_x=result_lines_x,\n result_lines_y=result_lines_y,\n result_num_steps=result_num_steps,\n initial_triangle_position=initial_triangle_position,\n initial_lines_x=initial_lines_x,\n initial_lines_y=initial_lines_y,\n initial_num_steps=initial_num_steps,\n num_steps=num_steps\n ),\n code=\"\"\"\n // Animate optimal path plot\n for(let i = 0; i < num_steps; i += 50) {\n setTimeout(function() {\n if (i < result_num_steps) {\n result_triangle_position.data['x'][0] = result_lines_x[i]\n result_triangle_position.data['y'][0] = result_lines_y[i]\n }\n\n if (i < initial_num_steps) {\n initial_triangle_position.data['x'][0] = initial_lines_x[i]\n initial_triangle_position.data['y'][0] = initial_lines_y[i]\n }\n\n result_triangle_position.change.emit()\n initial_triangle_position.change.emit()\n\n }, i)\n }\n \"\"\")\n # Add callback function to button, which starts the whole animation\n button.js_on_click(callback)\n\n # Save the plot\n result_plot = row([result_plot, button])\n plot = column([result_plot, initial_plot])\n output_file(\"visualization.html\", title=\"CNC Path Optimization\")\n save(plot)", "def visualize_routes(self):\n visualize_tsp.plotTSP([self.best_solution], self.coords)", "def path_plot(self, path=[90, 90], num_points=100, normalization=1.0, latex=False):\n from matplotlib import pyplot as plt\n fig = plt.figure()\n ax = fig.add_subplot(1, 1, 1)\n x_values = []\n tick_label = []\n\n # Plot the first path\n end = path[0]*np.pi/180.0\n theta = np.linspace(0.0, end, num_points).tolist()\n phi = 0.0\n counter = 0\n gamma = []\n angles = []\n tick_label.append(0)\n for t in theta:\n x_values.append(counter)\n gamma.append(self.eval(t, phi))\n angles.append((t, phi))\n counter += 1\n\n # Plot second path\n theta = end\n end = path[1]*np.pi/180.0\n phi = np.linspace(0.0, end, num_points).tolist()\n counter -= 1\n tick_label.append(counter)\n for p in phi:\n x_values.append(counter)\n gamma.append(self.eval(theta, p))\n angles.append((theta, p))\n counter += 1\n\n # Plot third path (back to origin)\n theta = np.linspace(0.0, theta, num_points)[::-1]\n theta = list(theta)\n phi = end\n counter -= 1\n tick_label.append(counter)\n for t in theta:\n x_values.append(counter)\n gamma.append(self.eval(t, phi))\n counter += 1\n angles.append((t, phi))\n tick_label.append(counter-1)\n\n gamma = np.array(gamma)*normalization\n ax.plot(x_values, gamma)\n if latex:\n ax.set_ylabel(r\"Surface tension (mJ/\\$m^2\\$\")\n else:\n ax.set_ylabel(\"Surface tension (mJ/$m^2$\")\n ax.set_xticklabels([])\n ax.spines[\"right\"].set_visible(False)\n ax.spines[\"top\"].set_visible(False)\n ax.set_xticks(tick_label)\n ax.set_xticklabels([(0, 0), (path[0], 0), (path[0], path[1]), (0, path[1])])\n return fig", "def plot_decision_paths(\n self, distributions=True, heatmap=True, thr_pvalue=1, num_cols=6, save=None\n ):\n # drop insignificant values\n data_clustering_ranked = self._data_clustering_ranked.copy()\n for column in data_clustering_ranked.columns:\n if self.p_value_of_features[column] > thr_pvalue:\n data_clustering_ranked.drop(column, axis=1, inplace=True)\n\n if heatmap:\n plotting._plot_heatmap(\n data_clustering_ranked, thr_pvalue, self.model_type, save\n )\n\n if distributions:\n plotting._plot_distributions(\n data_clustering_ranked, thr_pvalue, num_cols, save\n )", "def draw_loop():\n global G\n\n plt.ion()\n\n # mng = plt.get_current_fig_manager()\n # mng.resize(*mng.window.maxsize())\n plt.draw()\n\n for line in fileinput.input():\n if output(line):\n plt.clf()\n nx.draw(G)\n plt.draw()", "def draw_graph(self, fpath):\n import networkx as nx\n G = self.to_networkx()\n A = nx.nx_agraph.to_agraph(G)\n\n for proc in self.procs.values():\n nbunch = [proc.name]\n nbunch += [iport.absname() for iport in proc.iports.values()]\n nbunch += [oport.absname() for oport in proc.oports.values()]\n A.add_subgraph(\n nbunch, name='cluster_' + proc.name,\n color='lightgray', style='filled', fillcolor='lightgray')\n # color=lightgray;style=filled;fillcolor=lightgray;\n A.layout(prog='dot')\n A.draw(fpath)" ]
[ "0.6657115", "0.6575424", "0.6300528", "0.6178793", "0.61458", "0.610633", "0.6047569", "0.5977867", "0.59663767", "0.5956206", "0.58128816", "0.5809306", "0.58068746", "0.5751905", "0.57391244", "0.5727227", "0.57147604", "0.5711319", "0.57035184", "0.56797856", "0.5677725", "0.5659866", "0.56391716", "0.5637766", "0.56340706", "0.56334347", "0.56167877", "0.5611103", "0.5611048", "0.56058234" ]
0.8709885
0
It returns the maximum of the realizations of the process at time timeIndex
def getMaximumAtGivenTime(self, timeIndex): realizationsAtTimeIndex = self.getRealizationsAtGivenTime(timeIndex) return np.max(realizationsAtTimeIndex)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getEvolutionMax(self):\n \n return [self.getMaximumAtGivenTime(timeIndex) for timeIndex in range(self.numberOfTimes - 1)]", "def max_time(self):\n return self.time[np.argmax(self.flux)]", "def max(self):\n\n return time_stat(self, stat=\"max\")", "def max_time(self) -> float:\r\n if(len(self.operations_by_name) == 0):\r\n return -1\r\n return max(map(lambda x: x[\"time_step\"], self.operations_by_name.values()))", "def _get_max_t(self):\n\n return max([\n self.s_of_t[-1][0],\n self.i_of_t[-1][0],\n self.r_of_t[-1][0],\n ])", "def _get_max_t(self):\n \"\"\"\n if hasattr(self,'k_of_t'):\n return max([ \n self.s_of_t[-1][0],\n self.i_of_t[-1][0],\n self.r_of_t[-1][0],\n self.k_of_t[-1][0],\n ])\n else:\n return max([ \n self.s_of_t[-1][0],\n self.i_of_t[-1][0],\n self.r_of_t[-1][0],\n ])\n \"\"\"\n return self.t_max", "def get_longest_process_time_first_solution(self):\n return self._generate_solution_w_processing_time_criteria(lpt=True)", "def peak_time(self):\n return np.array([self.wftime[ch][self.waveform[ch].argmax()] for ch in range(self.nchannels)])", "def max_time(self):\n #{{{ function to return time of last sample\n\n if self.maxtime == -1:\n return stock.now()\n\n return self.maxtime", "def maxKeyTime(analyzer):\n return om.maxKey(analyzer['timeIndex'])", "def max(self, i):\n x=self.val(i,0)\n lm=len(self)\n t=1\n while t<lm:\n y=self.val(i,t)\n if x<y:\n x=y\n t+=1\n return x", "def get_max_temp(self):\n self.max_temp = self.domain[1] * 2", "def max_intensity(self, time):\n ti = np.where(time == self.times)[0][0]\n return self.timesteps[ti].max()", "def get_tmax(data):\n return data[np.argmax(data[:, 1])][0]", "def endTime(self) -> float:\n try: return self.times[-1]\n except IndexError: return 0.0", "def storage_upper_bound(index):\n i = index[0]\n return storage_para[i].pmax", "def find_max(trajectory):\n x = trajectory.s\n y = trajectory.i\n yt = np.abs(y - max(y))\n yt = yt < 1e-5\n max_idx = np.where(yt == True)[0]\n max_idx = max(max_idx)\n return [x[max_idx], y[max_idx]]", "def search_for_maximum(self):\n return self.maximise_aquisition(self.expected_improvement)", "def find_tim(self):\n start_max = 0\n finish_max = 0\n op_mode = self.op_number + ',' + self.mode_number\n for resource in self.resources:\n end_time = resource.usage[op_mode][\"start_time\"] + resource.usage[op_mode][\"duration\"]\n if end_time > finish_max:\n finish_max = end_time\n start_max = resource.usage[op_mode][\"start_time\"]\n self.tim = finish_max\n self.sim = start_max", "def max_time(self):\n return self._ll_tree_sequence.get_max_time()", "def get_max_time_vals(train_results):\n t_tr, t_te, t_lo, t_re = 0, 0, 0, 0\n for tres in train_results:\n t_tr += tres.time_train\n t_te += tres.time_test\n t_lo += tres.time_load\n t_re += tres.time_reduce\n n = len(train_results)\n return t_tr/n, t_te/n, t_lo/n, t_re/n", "def get_timing(pidx):\n pgconn = get_dbconn(\"mesosite\")\n cursor = pgconn.cursor()\n cursor.execute(\n \"SELECT avg(timing) from autoplot_timing where appid = %s \"\n \"and valid > (now() - '7 days'::interval)\",\n (pidx,),\n )\n timing = cursor.fetchone()[0]\n return timing if timing is not None else -1", "def max(ev):\n profData = getProfilingData(ev)\n if profData is not None:\n return profData.Tmax()\n return \"\"", "def getMaxSimTime(self):\n return self.max_simsecs_value", "def max_time(self) -> 'ImageCollection':\n\n process_id = 'max_time'\n\n args = {\n 'imagery': self.graph\n }\n\n return self.graph_add_process(process_id, args)", "def solve_bruteforce(self):\n max_value = -1\n for z in range(0, self.k):\n max_value = -1\n max_index = -1\n for i, v in enumerate(self.numbers):\n if v > max_value:\n max_index = i\n max_value = v\n del self.numbers[max_index]\n\n return max_value", "def get_current_simulated_time(self):\n\n query = \"SELECT MAX(time) FROM patient_signal_values\"\n\n return self.mysql_obj.fetch_value(query)", "def max(self):\n maxs = self.client.map(_call_max, self.vecDask, pure=False)\n max_val = - np.inf\n for future, result in daskD.as_completed(maxs, with_results=True):\n if result > max_val:\n max_val = result\n return max_val", "def get_max(self):\n return self.max[-1]", "def find_index_of_max(array):\n\n max_value = -abs(np.amax(array))\n\n max_index_hour = 0\n\n for k in range(len(array)):\n if array[k] > max_value:\n max_value = array[k]\n max_index_hour = k\n\n return max_index_hour" ]
[ "0.7338751", "0.71580356", "0.6704653", "0.6664134", "0.6621409", "0.6446359", "0.63972676", "0.6210386", "0.608417", "0.6026619", "0.60082227", "0.5994343", "0.5993893", "0.5967766", "0.5956552", "0.5921046", "0.5900468", "0.58589935", "0.5795851", "0.5795324", "0.5782808", "0.57821447", "0.5781393", "0.57780707", "0.5771714", "0.5745783", "0.5730141", "0.57289094", "0.5678312", "0.5643285" ]
0.76386476
0
It returns the evolution of the maximum of the realizations of the process at given times Returns
def getEvolutionMax(self): return [self.getMaximumAtGivenTime(timeIndex) for timeIndex in range(self.numberOfTimes - 1)]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def max_time(self):\n return self.time[np.argmax(self.flux)]", "def M_D_1(arrival_time,max_time,service_time=1/90):\n #conversion in seconds\n max_seconds = max_time*60*60\n sim_time = 0.0 # simulation time\n t_1 = 0.0 # time for next event (arrival)\n t_2 = max_seconds # time for next event (departure)\n t_n = 0.0 #last event time--> tempo dell'ultimo avvenimento generico\n t_b = 0.0 # last start of busy time--> tempo in cui la queue inizia ad essere non vuota per l'ultima volta\n c = 0 # numero di servizi completati\n queue_aircraft = [] # number of aircraft in the queue\n aircraft = 0\n arrival = [] # time of arrival\n attesa = [] # attesa per gli aerei-->NON SICURO CHE SI CALCOLI COSI'\n # simulation loop\n while(sim_time < max_seconds):\n if(t_1<t_2): #event1:arrival\n sim_time = t_1\n arrival.append(t_1)\n aircraft += 1\n queue_aircraft.append(aircraft)\n t_n = sim_time\n t_1 = sim_time + rm.expovariate(arrival_time)\n if(aircraft==1):\n t_b = sim_time\n t_2 = sim_time + 1/service_time\n else:\n sim_time = t_2\n aircraft = aircraft -1\n queue_aircraft.append(aircraft)\n t_n = sim_time\n attesa.append( t_2 - arrival[c])\n c+=1\n if(aircraft>0):\n t_2=sim_time + 1/service_time\n else:\n t_2 = max_seconds\n\n\n\n\n return queue_aircraft,arrival,attesa", "def getMaximumAtGivenTime(self, timeIndex):\n realizationsAtTimeIndex = self.getRealizationsAtGivenTime(timeIndex)\n return np.max(realizationsAtTimeIndex)", "def _get_max_t(self):\n\n return max([\n self.s_of_t[-1][0],\n self.i_of_t[-1][0],\n self.r_of_t[-1][0],\n ])", "def findMaximumDeviationLoop(junctions, wires, resistances, voltages, currents):\n raise NotImplementedError", "def max_intensity(self, time):\n ti = np.where(time == self.times)[0][0]\n return self.timesteps[ti].max()", "def max_time(self) -> float:\r\n if(len(self.operations_by_name) == 0):\r\n return -1\r\n return max(map(lambda x: x[\"time_step\"], self.operations_by_name.values()))", "def find_min_max(model, n_times=200):\n rand_point = model.any(model)\n max_e = rand_point.energy\n for _ in xrange(n_times):\n rand_point = model.any(model)\n if rand_point.energy > max_e: max_e = rand_point.energy\n return max_e", "def test_compute_c_max_output():\n # build\n T = np.array([600, 500])\n E_ion = np.array([20, 10])\n E_atom = np.array([30, 40])\n angles_ion = np.array([60, 60])\n angles_atom = np.array([60, 60])\n ion_flux = np.array([1e21, 1e20])\n atom_flux = np.array([2e21, 2e20])\n\n # run\n output = divHretention.compute_c_max(\n T, E_ion, E_atom, angles_ion, angles_atom,\n ion_flux, atom_flux, full_export=True)\n\n # test\n assert len(output) == 3\n\n # run\n output = divHretention.compute_c_max(\n T, E_ion, E_atom, angles_ion, angles_atom,\n ion_flux, atom_flux, full_export=False)\n\n # test\n assert len(output) == 2", "def _get_max_t(self):\n \"\"\"\n if hasattr(self,'k_of_t'):\n return max([ \n self.s_of_t[-1][0],\n self.i_of_t[-1][0],\n self.r_of_t[-1][0],\n self.k_of_t[-1][0],\n ])\n else:\n return max([ \n self.s_of_t[-1][0],\n self.i_of_t[-1][0],\n self.r_of_t[-1][0],\n ])\n \"\"\"\n return self.t_max", "def max():\n valid=result_alpha.F>0\n src_data.F[valid]=np.maximum( src_data.F[valid],result_data.F[valid] )", "def compute_vmax(particle, fieldset, time):\n if particle.active == 1:\n particle.vmax = fieldset.vscale*(particle.SCL**fieldset.d)", "def max(self):\n\n return time_stat(self, stat=\"max\")", "def MAXED(N, sigma2, R, f_def, params):\n\n # pull out algorithm-specific parameters\n Omega = params['Omega']\n\n # create the function that we will maximize, Z\n def Z(lam, N, sigma2, R, f_def, Omega):\n \"\"\"A function, the maximization of which is equivalent to the\n maximization of \"\"\"\n\n A = - np.sum(f_def * np.exp(- np.sum((lam * R.T).T, axis=0)))\n B = - (Omega * np.sum(lam**2 * sigma2))**(0.5)\n C = - np.sum(N * lam)\n\n # negate because it's a minimization\n return - (A + B + C)\n\n # create a lambda\n lam = np.ones(len(N))\n\n # apply the simulated annealing to the Z\n mk = {'args': (N, sigma2, R, f_def, Omega)}\n lam = basinhopping(Z, lam, minimizer_kwargs=mk).x\n\n # back out the spectrum values from the lam\n return f_def * np.exp(-np.sum((lam * R.T).T, axis=0))", "def max_power_existing_solar_rule(_m, g, y, s, t):\r\n\r\n return m.p[g, y, s, t] - (m.Q_S[g, y, s, t] * m.P_MAX[g] * (1 - m.F[g, y])) <= 0", "def get_max_dwell_mvals(model, state_data):\n dwell_results = []\n for ind in range(len(state_data)):\n ind_dwell = (model.times >= state_data['tstart'][ind]) & (model.times <= state_data['tstop'][ind])\n if np.any(ind_dwell):\n dwell_results.append(np.max(model.mvals[ind_dwell]))\n else:\n dwell_results.append(-1.0e6)\n return tuple(dwell_results)", "def _psp_max_time(rise, decay, rise_power):\n return rise * np.log(1 + (decay * rise_power / rise))", "def times_and_values_maxima(time, values, start_time=0.0, end_time=10000000):\n end_index = np.where(time <= end_time)[0][-1]\n start_index = np.where(start_time <= time)[0][0]\n time = time[start_index:end_index+1]\n values = values[start_index:end_index+1]\n\n # time, values = np.where(start_time <= time <= end_time, time, values)\n\n # these are the maxima of the graphs, will be used for fitting to look at trend\n max_index = (np.diff(np.sign(np.diff(values))) < 0).nonzero()[0] + 1\n time_max = time[max_index]\n value_max = values[max_index]\n return time_max, value_max", "def EvolveSystem(initial_conditions, tMax, n, energy, sigma):\n p1 = zeros([n, 2])\n v1 = zeros_like(p1)\n p2 = zeros([n, 2])\n v2 = zeros_like(p2)\n dt = tMax/n\n p1[0], v1[0], p2[0], v2[0] = initial_conditions\n for i in range(n-1):\n p1[i+1], v1[i+1], p2[i+1], v2[i+1] = Leapfrog2D(p1[i], v1[i], p2[i], v2[i], dt, energy, sigma)\n return p1, v1, p2, v2", "def _pred_mag(self,params: ndarray, times: ndarray) -> ndarray:\n tE = np.exp(params[0])\n A0 = np.exp(params[1])\n deltaT = np.exp(params[2])\n fbl = params[3]\n mb = params[4]\n\n u0 = np.sqrt((2*A0/np.sqrt(A0**2-1))-2)\n u = np.sqrt(u0**2+((times-deltaT-self.alert_time)/tE)**2)\n Amp = (u**2+2) / (u*np.sqrt(u**2+4))\n pred_mag = mb - 2.5*np.log10(fbl*(Amp-1)+1)\n\n return pred_mag", "def find_max_x(self, Ns=50):\n with self.fix_evaluator():\n x0 = brute(lambda x: -self(x[0])[0], [[0, np.pi]], Ns=Ns,\n finish=None)\n res = minimize_scalar(\n lambda x: -self(x)[0],\n bracket=(x0, np.pi/Ns), bounds=(0, np.pi), method='bounded',\n options=dict(xatol=1e-12)\n )\n return res.x", "def max_power_candidate_solar_rule(_m, g, y, s, t):\r\n\r\n return m.p[g, y, s, t] - (m.Q_S[g, y, s, t] * sum(m.x_c[g, j] for j in m.Y if j <= y)) <= 0", "def max_power_existing_thermal_rule(_m, g, y, s, t):\r\n\r\n return m.p[g, y, s, t] - (m.P_MAX[g] * (1 - m.F[g, y])) <= 0", "def max_evidence(self):\n self.A = np.linalg.inv(self.Sn)\n A_eigval = np.linalg.eigvals(self.A)\n gamma = 0\n for i in range(len(A_eigval)):\n gamma += A_eigval[i]/(self.alpha + A_eigval[i])\n new_alpha = gamma/([email protected])\n\n sum = 0\n for i in range(self.n):\n sum +=(self.t[i][email protected]_matrix[i])**2\n new_beta = 1/((1/(self.n-gamma))*sum)\n\n return new_alpha, new_beta", "def peak_time(self):\n return np.array([self.wftime[ch][self.waveform[ch].argmax()] for ch in range(self.nchannels)])", "def alturamax(gravedad, veli):\r\n #se realiza varias operacione para encontrar la altura maxima \r\n maxima=(veli/2)*(veli/gravedad)\r\n #se regresa el valor de maxima\r\n return maxima", "def evaluate(self, time) -> float:\n ...", "def rmax(env, gamma, m, R_max, epsilon, num_episodes, max_step = 6):\n\n Q = np.ones((env.nS, env.nA)) * R_max / (1 - gamma)\n R = np.zeros((env.nS, env.nA))\n nSA = np.zeros((env.nS, env.nA))\n nSASP = np.zeros((env.nS, env.nA, env.nS))\n ########################################################\n # YOUR CODE HERE #\n ########################################################\n\n # Generate episodes\n average_scores = []\n accum = 0.0\n term = int(np.log(1 / (epsilon * (1 - gamma))) / (1 - gamma))\n for i in xrange(num_episodes):\n S = env.reset()\n done = False\n episode_reward = 0.0\n n_steps = 0\n\n while not done:\n\n if n_steps >= max_step:\n break\n\n A = np.argmax([Q[S,a] for a in range(env.nA)])\n\n # Make an action\n nextS, reward, done, _ = env.step(A)\n episode_reward += reward\n\n # R-Max\n if nSA[S, A] < m:\n nSA[S, A] += 1\n R[S, A] += reward\n nSASP[S, A, nextS] += 1\n\n if nSA[S, A] == m:\n for j in range(term):\n for S_bar in range(env.nS):\n for A_bar in range(env.nA):\n if nSA[S_bar, A_bar] >= m:\n N = float(nSA[S_bar, A_bar])\n T_hat = nSASP[S_bar, A_bar, :] / N\n R_hat = R[S_bar, A_bar] / N\n Q[S_bar, A_bar] = R_hat\n Q[S_bar, A_bar] += gamma * np.sum(T_hat * np.max(Q, axis=1))\n\n\n # Update Q-value\n S = nextS\n n_steps += 1\n\n accum += episode_reward\n average_scores.append(accum/(i+1))\n\n plt.plot(average_scores[:10000], label=\"m=%d\"%(m))\n\n ########################################################\n # END YOUR CODE #\n ########################################################\n return Q", "def auxmaxf2(x):\n # Sum over data points\n f = 0.0\n for m_ind in range(cfg.ntrain):\n f += auxmaxrho1(x,m_ind) + auxmaxrho2(x,m_ind) \n \n return f", "def FigA7(case):\n \n #set the parameter, arrays\n \n n_array=np.array([1,2,3])\n\n #set the result arrays\n if case==0:\n class_number=5\n elif case==1:\n class_number=6\n fate=np.zeros([class_number])#number of evolutionary fate\n fate_matrix=np.zeros([np.size(n_array),np.size(fate)])\n \n time=np.linspace(0,100000, 1000000)\n loop=10**6\n \"\"\"\n 0 Co and/or Ch cannot survive in mono-culture\n 1 Co cannot invade\n 2 Only equilibrium of exclusion is stable\n 3 Only equilibrium of coexistence is stable\n 4 Two equilibria are UNstable\n 5 two Equilibrium are stable (which may occur only when sCO vs rCh)\n \"\"\"\n for tri in range(np.size(n_array)):\n counter=0\n n=n_array[tri]\n print(str(\"Hill coefficient is %d\" %(n)))\n fate=np.zeros([class_number])#number of evolutionary fate should be reset\n if case==0 or case==1:\n fname=str('parameter-sweep-MC-n%d-case%d' %(n, case))\n else:\n print(\"Error in case\")\n return 1\n \n for i in range(loop):\n if(i+1)%10000==0:\n print(i+1)\n Ks,cd,T0, alpha,=np.random.uniform(0,1,4)\n Kr,cr=np.random.uniform([Ks,0],[1,1],2)#Kr>Ks and cr.cd\n #check whether r is positive or not\n if case==0:\n r1=rmax*(1-cr-cd)#rCO\n r2=rmax#sCH\n W0Co=r1-dmax*T0**n/(T0**n+Kr**n)-alpha#initial growth of Cooperator\n W0Ch=r2-dmax*T0**n/(T0**n+Ks**n)-alpha#initial growth of Cheater\n elif case==1:\n r1=rmax*(1-cd)#sCo\n r2=rmax*(1-cr)#rCh\n W0Co=r1-dmax*T0**n/(T0**n+Ks**n)-alpha\n W0Ch=r2-dmax*T0**n/(T0**n+Kr**n)-alpha\n stab_e=0#initialize the falgs of stability\n stab_c=0\n if W0Co<0 or W0Ch<0:\n fate[0]+=1\n res=0\n else:\n #succeed in mono-culture \n init=np.array([T0,10**(-6)])\n if case==0: \n solCo=odeint(DyCoop, init, time, args=(T0, r1, Kr, alpha, n))\n Ts=solCo[-1,0]\n #x1s=solCo[-1,1]\n solCh=odeint(DyCheat, init, time, args=(T0, r2, Ks, alpha, n))\n x2s=solCh[-1,1]\n else:\n solCo=odeint(DyCoop, init, time, args=(T0, r1, Ks, alpha, n))\n Ts=solCo[-1,0]\n #x1s=solCo[-1,1]\n solCh=odeint(DyCheat, init, time, args=(T0, r2, Kr, alpha, n))\n x2s=solCh[-1,1]\n \n #Evolutionary dynamics \n if case==0:\n K=Kr\n else:\n K=Ks\n if r1*(1-x2s)-dmax*T0**n/(T0**n+K**n)<alpha:\n #Co cannot invade\n fate[1]+=1\n res=1\n else:\n #Co can invade\n #calculate Tdagger Td and check whether coexist or exclude\n if case==0:\n #rCo vs sCh\n #in this case, at most one equilbrium is stable\n tau=Quad(case,alpha,cr+cd,0,Kr, Ks, n)\n Td=tau**(1/n)\n if Td<Ts:\n #Co exclude Ch\n fate[2]+=1\n res=2\n else:\n x1d=alpha*Kd*(T0-Td)/(fmax*Td-alpha*(T0-Td))\n x2d=1-x1d-(dmax*Td**n/(Td**n+K**n)+alpha)/r1\n #check the stability condition\n stab=Stab_cond(alpha, T0, Td,x1d,x2d, r1,r2,n, K)\n if stab==0:\n #stable coexistence\n fate[3]+=1\n res=3\n else:\n #unstable coexistence nor exclusion\n fate[4]+=1\n res=4\n print(Td, x1d, x2d)\n else:\n #sCo vs rCh\n # in this case two equilibria can be stable at the same time\n [tau_p,tau_m]=Quad(case,alpha,cd,cr,Ks, Kr, n)\n if tau_m>Ts**n or tau_p<Ts**n:\n # cexclusion is stable\n stab_e=1\n # stability in coexistence \n if tau_p<0:\n stab_c=0\n else:\n Td=tau_p**(1/n)\n x1d=alpha*Kd*(T0-Td)/(fmax*Td-alpha*(T0-Td))\n x2d=1-x1d-(dmax*Td**n/(Td**n+K**n)+alpha)/r1\n #check the stability condition\n stab=Stab_cond(alpha, T0, Td,x1d,x2d, r1,r2,n, K)\n if stab==0:\n #stable coexistence\n stab_c=1\n #classify\n if stab_e==1 and stab_c==1:\n # two stable equilbria\n fate[5]+=1\n res=5\n elif stab_e==1 and stab_c==0:\n #only stable cexclusion\n fate[2]+=1\n res=2\n elif stab_e==0 and stab_c==1:\n #stable coexistence\n fate[3]+=1\n res=3\n else:\n #both unstable\n fate[4]+=1\n res=4\n \n #save the results\n if counter==0:\n result=np.array([[Ks, Kr, cr, cd, alpha, T0,res]])\n #save the result with parameter values\n \n else:\n #add array of results\n R=np.array([[Ks, Kr, cr, cd, alpha, T0,res]])\n result=np.concatenate((result, R), axis=0)\n counter+=1\n \n #save csv file and graph\n np.savetxt(fname+'.csv',result, delimiter=',', header='Ks, Kr, cr, cd, alpha, T0, class', fmt='%.6f') \n print(fate)\n fate_matrix[tri,:]=fate \n if case==0: \n np.savetxt('parameter_sweep_MC_total_case0.csv',fate_matrix, delimiter=',', header='cl0,l1,cl2,cl3,cl4', fmt='%d')\n else:\n np.savetxt('parameter_sweep_MC_total_case1.csv',fate_matrix, delimiter=',', header='cl0,l1,cl2,cl3,cl4,cl5', fmt='%d')\n Plot(case)" ]
[ "0.6125524", "0.5842367", "0.5835992", "0.5787089", "0.5771827", "0.5767159", "0.5761559", "0.57614523", "0.5683583", "0.56713635", "0.56365997", "0.5625341", "0.56223917", "0.5598241", "0.5570882", "0.55639154", "0.55524063", "0.5544458", "0.55353314", "0.55329156", "0.5531393", "0.5526634", "0.5521958", "0.5520476", "0.5504573", "0.5502676", "0.54970765", "0.5488066", "0.5485663", "0.54641354" ]
0.7328528
0
It prints the evolution of the maximum of the realizations of the process at given times Returns None.
def printEvolutionMaximum(self): evolutionMaximum = self.getEvolutionMax(); print(evolutionMaximum) print("The path of the maximum evolution is the following:") print() print('\n'.join('{:.3}'.format(max) for max in evolutionMaximum)) print()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getEvolutionMax(self):\n \n return [self.getMaximumAtGivenTime(timeIndex) for timeIndex in range(self.numberOfTimes - 1)]", "def plotEvolutionMaximum(self):\n evolutionMaximum = self.getEvolutionMax();\n plt.plot(evolutionMaximum)\n plt.xlabel('Time')\n plt.ylabel('Maximum realizations')\n plt.show()", "def main():\n\n\t# eesAmplitudes = range(200,321,10)\n\teesAmplitudes = [\"%\"+\"%.2f_0_0\"%(i) for i in np.arange(0,1.01,.05)]\n\t# eesFrequencies = range(10,1001,20)\n\teesFrequencies = np.logspace(1,3,50)\n\t# nrnStructureFile = \"fsSFrFfMnArtMod.txt\"\n\t# nrnStructureFile = \"fsSFrFfMnArtModHuman.txt\"\n\tnrnStructureFile = \"fsMnArtModHuman.txt\"\n\t# name = \"FreqAmpModHuman_0367S\"\n\tname = \"FreqAmpModHuman_ArtmodHuman_10msBurst\"\n\n\tnSim = len(eesFrequencies)*len(eesAmplitudes)\n\tcount=0.\n\tpercLastPrint=0.\n\tprintPeriod = 0.05\n\t# simTime = 250\n\tsimTime = 15\n\tspecies = \"human\"\n\n\tfor eesAmplitude in eesAmplitudes:\n\t\tfor eesFrequency in eesFrequencies:\n\t\t\tfilName = name+\"_amp_\"+str(eesAmplitude)+\"_freq_\"+str(eesFrequency)\n\t\t\tresultFile = gt.find(\"*\"+filName+\".p\",pathToResults)\n\t\t\tif not resultFile:\n\t\t\t\treturnCode = None\n\t\t\t\twhile not returnCode==0:\n\t\t\t\t\tprogram = ['python','scripts/computeAfferentsEfferentsModulation.py',\n\t\t\t\t\t\tstr(eesFrequency),str(eesAmplitude),species,nrnStructureFile,name,\"--simTime\",str(simTime)]\n\t\t\t\t\tprint \" \".join(program)\n\t\t\t\t\tforwardSimulation = subprocess.Popen(program, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n\t\t\t\t\treturnCode = None\n\t\t\t\t\twhile returnCode is None:\n\t\t\t\t\t\tmessage = forwardSimulation.stdout.readline().rstrip(\"\\n\").split()\n\t\t\t\t\t\tif message != None:print \"\\t\\t\"+\" \".join(message)+\"\\t\\t\"\n\t\t\t\t\t\treturnCode = forwardSimulation.poll()\n\t\t\t\t\tif returnCode != 0: print \"\\t\\t\\t\\t Error n: \",forwardSimulation.poll(),\" resetting simulation...\"\n\t\t\tcount+=1\n\t\t\tif count/nSim-percLastPrint>=printPeriod:\n\t\t\t\tpercLastPrint=count/nSim\n\t\t\t\tprint str(round(count/nSim*100))+\"% of simulations performed...\"\n\tplot_stats(eesAmplitudes,eesFrequencies,simTime,name)", "def run(self, times=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12,13, 17, 21, 25, 29, 33, 37, 41, 45, 49, 52, 53, 54]):\n self.mse_per_step = np.array([])\n self.all_alphas = np.array([])\n start_time = time.time()\n for t in times:\n mse, alphas = self.one_step(t)\n self.mse_per_step = np.append(self.mse_per_step, mse)\n self.all_alphas = np.append(self.all_alphas, alphas)\n np.savetxt('alphas_per_T_new2.txt', self.all_alphas)\n\n\n print(\"Done in\", time.time() - start_time, \"seconds.\")\n print(\"T's\", times)\n print(\"Scores:\", self.mse_per_step)", "def energies():\n # Hardcoded initial values\n numsteps = 10000\n time_max = 1\n # Running the calculation in the solver class using the velocity verlet method\n # for better accuracy.\n verlet = solver(input_matrix, 'verlet', time_max, numsteps)\n output_matrix, KE, PE, AM = verlet.main()\n # Creating a simple time axis for plotting\n x = np.linspace(0, 1, numsteps+1)\n\n # Plotting kinetic energy over time\n plt.figure(1, figsize=(10, 10))\n plt.plot(x, KE)\n plt.suptitle('Total kinetic energy in the Earth-Sun system.', fontsize=24)\n plt.xlabel('time [yr]', fontsize=16)\n plt.ylabel('energy [AU²*kg/yr²]', fontsize=16)\n plt.legend(['KE'])\n\n # Plotting potential energy over time\n plt.figure(2, figsize=(10, 10))\n plt.plot(x, PE)\n plt.suptitle('Total potential energy in the Earth-Sun system.', fontsize=24)\n plt.xlabel('time [yr]', fontsize=16)\n plt.ylabel('energy [AU²*kg/yr²]', fontsize=16)\n plt.legend(['PE'])\n\n # Plotting total energy against time\n plt.figure(3, figsize=(10, 10))\n plt.plot(x, PE+KE)\n plt.suptitle('Total energy in the Earth-Sun system.', fontsize=24)\n plt.xlabel('time [yr]', fontsize=16)\n plt.ylabel('energy [AU²*kg/yr²]', fontsize=16)\n plt.legend(['KE+PE'])\n\n # Plotting angular momentum against time. print the amplitude to terminal\n amplitude = max(AM)-min(AM)\n print('Amplitude of angular momentum during 1 year: %g[AU²/yr²]' %(amplitude))\n plt.figure(4, figsize=(10, 10))\n plt.plot(x, AM)\n plt.suptitle('Total angular momentum in the Earth-Sun system.', fontsize=24)\n plt.xlabel('time [yr]', fontsize=16)\n plt.ylabel('energy [AU²/yr²]', fontsize=16)\n plt.legend(['AM'])\n\n # Plotting the kinetic, potential and total energy against time to see\n # how great the variations are\n plt.figure(5, figsize=(10, 10))\n plt.plot(x, PE, x, KE, x, KE+PE)\n plt.suptitle('Total energy in the Earth-Sun system.', fontsize=24)\n plt.xlabel('time [yr]', fontsize=16)\n plt.ylabel('energy [AU²*kg/yr²]', fontsize=16)\n plt.legend(['PE', 'KE', 'KE+PE'])\n plt.show()", "def maxtimes(self):\n vas = []\n file = self.read1()\n for line in file:\n line = line.strip()\n string = re.sub(\"[^0-9a-zA-Z]\", \" \", line).split(\" \")\n for s_i in string:\n vas.append(s_i)\n count = 0\n num = vas[0]\n for i in vas:\n freq = vas.count(i)\n if (freq > count): #pylint: disable = superfluous-parens\n count = freq\n num = i\n vas1 = []\n vas1.append(num)\n self.print(vas1)\n self.write(vas1)\n logging.debug(\"Starting with to\")\n return vas1", "def update_results_display(self, max_a=-1.0):\n self.outer_time_line_edit.setText(str(self.qr_polytraj.data_track.outer_opt_time))\n self.comp_time_line_edit.setText(str(self.qr_polytraj.data_track.optimise_time))\n self.traj_time_line_edit.setText(str(np.sum(self.qr_polytraj.times)))\n self.traj_accel_line_edit.setText(str(max_a))\n self.iteration_line_edit.setText(str(self.qr_polytraj.data_track.iterations))\n # print(\"Astro cost is: {}\".format(self.qr_polytraj.data_track.cost[-1]))", "def video_times():\n p = parse_cmdline(get_parser=get_parser_times)\n log.setup_main_handler(\n mods=(\"fogtools\", \"typhon\", \"fogpy\", \"sattools\", \"fcitools\", \"satpy\",\n \"pyresample\"),\n level=logging.DEBUG)\n vis.show_video_abi_glm_times(\n start_date=p.start_time,\n end_date=p.end_time,\n img_out=p.filename_pattern_image,\n vid_out=p.filename_pattern_video,\n out_dir=p.outdir,\n sector=p.sector,\n area=p.area)\n print(\"Files written to:\", p.outdir)", "def print_fun_facts(num_hours, num_minutes):\n\n # If the number of hours are less than 1, there are no real analytics that\n # can be given to the user, so the program exits\n if num_hours < 1:\n os._exit(1)\n\n print(\"\\nIn the time you spent on league, here's some things you\", \n \"could have done:\")\n\n # Get the total number of minutes that the user spent playing league in the\n # last week\n total_mins = num_hours * 60 + num_minutes\n\n # Number of hours it takes to fly coast to coast\n hours_to_fly_from_la_to_nyc = 5\n\n # Find how far or how many times the user could have flown coast to coast\n flying_data = time_to_perform_task(total_mins, hours_to_fly_from_la_to_nyc)\n\n # Check if the data returned is not a whole number, but a percentage\n # This will occur if hte user hasn't played enough league to complete more\n # than 1 flight from coast to coast\n if flying_data[0]:\n print(\"- Flown \", flying_data[1],\"% of the way from LA to NYC\", sep='')\n else:\n print(\"- Flown from LA to NYC\", flying_data[1], \"times\")\n\n # Repeating the same process, but with the Great Gatsby\n hours_to_read_great_gatsby = 2.62\n gatsby_data = time_to_perform_task(total_mins, hours_to_read_great_gatsby)\n if gatsby_data[0]:\n print(\"- Read \", gatsby_data[1],\"% of The Great Gatsby\", sep='')\n else:\n print(\"- Read The Great Gatsby \", gatsby_data[1], \" times\", sep='')\n \n # Again repeating the same process to print analytics about Avengers: Endgame\n hours_to_watch_endgame = 3.2\n endgame_data = time_to_perform_task(total_mins, hours_to_watch_endgame)\n if endgame_data[0]:\n print(\"- Watched \", endgame_data[1],\"% of Avengers: Endgame\", sep='')\n else:\n print(\"- Watched Avengers: Endgame \", endgame_data[1], \" times\", sep='')", "def compare_hard_noprint():\n\n\n cores = [1, 2, 4, 8, 16]\n s_times = []\n\n for n_cores in cores:\n print('analyzing', '-'.join(['data/bench', 's', 'np', str(n_cores)]) + '.out')\n data = split_output_file('-'.join(['data/bench', 's', 'np', str(n_cores)]) + '.out')\n s_times.append(data['FOOTER']['total_time'])\n\n # speedup plot\n fig, axs = plt.subplots()\n\n axs.plot(cores, [s_times[0] / x / k for x, k in zip(s_times, cores)], label='sequential')\n axs.set_xticks(cores)\n axs.ticklabel_format(style='sci', scilimits=(-128, 128))\n\n axs.set_ylabel('Relative speedup')\n axs.set_xlabel('Number of cores')\n axs.set_ylim(0, 1.1)\n\n fig.set_size_inches(4, 2, forward=True)\n plt.tight_layout()\n\n plt.show()", "def exercise_gen(ret_val, times):", "def _disp(self, t_elapsed):\n disp_str = \"Epoch: %4d/%4d | Duration: %6.2f secs\" % \\\n (self.iteration, self.Nepochs, t_elapsed) \n disp_str += \" | Objective: %4e\" % self.of_list[-1]\n if self.disp_p:\n disp_str += \" | Parameters: %s\" % self.params\n print(disp_str)", "def max_time(self) -> float:\r\n if(len(self.operations_by_name) == 0):\r\n return -1\r\n return max(map(lambda x: x[\"time_step\"], self.operations_by_name.values()))", "def value_printer():#todo: add all wanted items\n print(\"Max ascent speed = \"+ max_ascent_speed() + \" m/s\")\n print(\"Max ascent acceleration = \" + ascent_acc() + \" m/s^2\")\n print(\"Max ascent acceleration = \" + descent_acc() + \" m/s^2\")\n print(\"Max acceleration = \" + acc() + \" m/s^2\")", "def run():\n\n # Build list of stations\n stations = build_station_list()\n list_of_rivers_numbers=rivers_by_station_number(stations, 9)\n print(\"Rivers with greatest number of stations: {}\".format(list_of_rivers_numbers))", "def Log(self, times):\n\n print '--'\n print times.PrettyPrintLog()\n\n return", "def main(num, li1, list2):\n li1 = [[float(input()), float(input())] for i in range(num)]\n list2 = [li1[i][1]/li1[i][0] for i in range(num)]\n li1.sort(key=lambda x: x[0])\n for i in range(num):\n if li1[i][1]/li1[i][0] == max(list2):\n return print(\"%.2f %.2f\"%(li1[i][0], li1[i][1]))", "def max(ev):\n profData = getProfilingData(ev)\n if profData is not None:\n return profData.Tmax()\n return \"\"", "def updatefig(*args):\n p1.set_array(turn(grid))\n p2.set_data(tally['time'], tally['sickos'])\n p3.set_data(tally['time'], tally['immune'])\n p4.set_data(tally['time'], tally['dead'])\n ax2.set_xlim(0, max(tally['time']))\n # ax2.set_ylim(0, max(max(sickos), max(immune)))\n # End sim if the disease is gone\n if tally['sickos'][-1] == 0:\n ani.event_source.stop()\n end_time = time.process_time()\n show_summary()\n print(\"Process time:\", end_time - start_time)\n return p1, p2, p3, p4,", "def compare_hard():\n\n\n cores = [1, 2, 4, 8, 16]\n s_times = [[], [], []]\n g_times = [[], [], []]\n\n for simulator in ['g', 's']:\n for n_cores in cores:\n for i, size in enumerate([1, 10, 100]):\n ss = str(size) + 'k'\n\n print('analyzing', '-'.join(['data/bench', str(simulator), ss, str(n_cores)]) + '.out')\n data = split_output_file('-'.join(['data/bench', str(simulator), ss, str(n_cores)]) + '.out')\n if simulator == 'g':\n g_times[i].append(data['FOOTER']['total_time'])\n if simulator == 's':\n s_times[i].append(data['FOOTER']['total_time'])\n\n # absolute time plot\n fig, axs = plt.subplots(3)\n\n for i in range(3):\n axs[i].plot(cores, s_times[i], label='sequential')\n axs[i].plot(cores, g_times[i], label='GPU')\n # axs[i].set_yticks([])\n axs[i].set_xticks(cores)\n axs[i].set_title(str([1, 10, 100][i]) + 'k population size')\n axs[i].ticklabel_format(style='sci', scilimits=(-128, 128))\n\n axs[0].legend()\n axs[1].set_ylabel('Total simulation time [ms]')\n axs[2].set_xlabel('Number of cores')\n\n fig.set_size_inches(4, 6, forward=True)\n plt.tight_layout()\n\n plt.show()\n\n # speedup plot\n fig, axs = plt.subplots(3)\n\n for i in range(3):\n axs[i].plot(cores, [s_times[i][0] / x / k for x, k in zip(s_times[i], cores)], label='sequential')\n axs[i].plot(cores, [g_times[i][0] / x / k for x, k in zip(g_times[i], cores)], label='GPU')\n # axs[i].plot([0, 16], [0, 16], label='theoretical')\n # axs[i].set_yticks([])\n axs[i].set_xticks(cores)\n axs[i].set_title(str([1.4, 14, 140][i]) + 'k population size')\n axs[i].ticklabel_format(style='sci', scilimits=(-128, 128))\n\n axs[0].legend()\n axs[1].set_ylabel('Relative speedup')\n axs[2].set_xlabel('Number of cores')\n\n fig.set_size_inches(4, 6, forward=True)\n plt.tight_layout()\n\n plt.show()\n\n # scaling plot\n fig, axs = plt.subplots(1)\n\n axs.plot([1400, 14000, 140000], [s_times[i][0] for i in range(3)], label='seqential')\n axs.plot([1400, 14000, 140000], [g_times[i][0] for i in range(3)], label='GPU')\n axs.set_xticks(cores)\n axs.set_title(str([1.4, 14, 140][i]) + 'k population size')\n axs.ticklabel_format(style='sci', scilimits=(-128, 128))\n\n axs.legend()\n axs.set_ylabel('Relative speedup')\n axs.set_xlabel('Number of cores')\n\n fig.set_size_inches(4, 2, forward=True)\n plt.tight_layout()\n\n plt.show()", "def runTimingTests(c, startNx, endNx, stepNx, displayResults = False):\n timesArray = []\n nxs = np.empty(shape=[0])\n iteration = 0\n\n for currNx in range(startNx, endNx, stepNx):\n nx = currNx\n nt = nx\n nxs = np.append(nxs, nx)\n _, timesSmooth, _, _ = main(nx, nt, c, displayResults = False)\n timesArray = np.append(timesArray, timesSmooth)\n iteration = iteration+1\n \n timesArray = timesArray.reshape(iteration, len(timesSmooth)) \n timesArray = np.matrix.transpose(timesArray)\n logNxs = np.log10(nxs)\n logTimes = np.log10(timesArray)\n methods = [\"FTBS\", \"CTCS\", \"CNCS\", \"LaxWendroff\"]\n if(display):\n for i in range (0, 4):\n plt.plot(logNxs, logTimes[i], label=methods[i])\n coeff = np.polyfit(logNxs,logTimes[i],1)\n print(\"Estimated order of magnitude time vs nx \"\\\n +methods[i]+\": \"+str(coeff[0]))\n plt.title(\"Log-log plot time of execution in s vs nx\\nc=\"+str(c))\n plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)\n plt.show()", "def M_D_1(arrival_time,max_time,service_time=1/90):\n #conversion in seconds\n max_seconds = max_time*60*60\n sim_time = 0.0 # simulation time\n t_1 = 0.0 # time for next event (arrival)\n t_2 = max_seconds # time for next event (departure)\n t_n = 0.0 #last event time--> tempo dell'ultimo avvenimento generico\n t_b = 0.0 # last start of busy time--> tempo in cui la queue inizia ad essere non vuota per l'ultima volta\n c = 0 # numero di servizi completati\n queue_aircraft = [] # number of aircraft in the queue\n aircraft = 0\n arrival = [] # time of arrival\n attesa = [] # attesa per gli aerei-->NON SICURO CHE SI CALCOLI COSI'\n # simulation loop\n while(sim_time < max_seconds):\n if(t_1<t_2): #event1:arrival\n sim_time = t_1\n arrival.append(t_1)\n aircraft += 1\n queue_aircraft.append(aircraft)\n t_n = sim_time\n t_1 = sim_time + rm.expovariate(arrival_time)\n if(aircraft==1):\n t_b = sim_time\n t_2 = sim_time + 1/service_time\n else:\n sim_time = t_2\n aircraft = aircraft -1\n queue_aircraft.append(aircraft)\n t_n = sim_time\n attesa.append( t_2 - arrival[c])\n c+=1\n if(aircraft>0):\n t_2=sim_time + 1/service_time\n else:\n t_2 = max_seconds\n\n\n\n\n return queue_aircraft,arrival,attesa", "def run_delayed_ssa(system):\n \n #vars used in the simulation\n time = 0 #unitless\n end_time = system['sim-time']\n species = system['participants']\n parameters = system['parameters']\n events = system['events']\n prop_funcs = {}\n exec_funcs = {}\n props = {}\n delays = {}\n last_exec_time = {}\n \n #return values\n time_array = []\n species_array = []\n \n #populate results array\n time_array = [time]\n row = [0]*len(species)\n species_names = [''] * len(species)\n \n #create species vars so that rate code can be executed\n i = 0\n for name in species:\n species_names[i] = name\n exec( name + '=' + str(species[name]) )\n row[i] = species[name]\n i += 1\n species_array.append(row)\n \n #create parameter vars so that rate code can be executed\n for name in parameters:\n exec( name + '=' + str(parameters[name]) )\n\n #create (compile) functions from input strings for rates and events\n for name in events:\n if events[name].get('delay'):\n delays[name] = events[name]['delay']\n else:\n delays[name] = 0.0\n last_exec_time[name] = -1\n props[name] = 0.0\n prop_funcs[name] = compile(\"props['\" + name + \"'] = \" + str(events[name]['propensity']), 'prop_funcs_'+name, 'exec')\n exec_funcs[name] = compile(events[name]['consequence'], 'exec_funcs_'+name, 'exec')\n \n #MAIN LOOP\n while time < end_time:\n \n #calculate propensities\n for name in props:\n exec(prop_funcs[name])\n if delays[name] > 0 and delays[name] + last_exec_time[name] < time:\n print(name)\n props[name] = 0.0\n \n #calculate total of all propensities\n total_prop = 0\n for name in props:\n total_prop += props[name]\n \n \n u = random.uniform(0,total_prop)\n usum = 0\n lucky = None\n for name in props:\n usum += props[name]\n if usum > u:\n lucky = name\n break\n\n #fire that reaction\n if lucky:\n last_exec_time[lucky] = time\n exec(exec_funcs[lucky])\n \n \n row = [0]*len(species)\n i = 0\n for name in species:\n row[i] = eval(name)\n i += 1\n time_array.append(time)\n species_array.append(row)\n \n #update next time using exp distrib\n if total_prop == 0.0: #jump to next delay\n lowest_delay = inf\n for name in props:\n if delays[name] > 0 and delays[name] < lowest_delay:\n lowest_delay = delays[name]\n time += lowest_delay\n else:\n dt = random.exponential(1.0/total_prop)\n time += dt\n\n #END MAIN LOOP\n\n result = {'time':time_array, 'participants':species_array, 'headers': species_names}\n return result", "def E_vs_length(Emax, Emin, wmax=90, wmin=10, Lmax=1000, Lmin=102.4, p=75,\r\n fmax=5.7e9, p1=database['K+'], p2=database['pi+'],\r\n p3=database['p+'], delta_p=1.6e-2, nE=10, nw=10, nL=10, ng=50,\r\n nl=50, nf=20, L_resolution=0.01, w_resolution=0.01, plot=True,\r\n set_freq=5.7e9, count_L=False, count_w=True, count_E=True):\r\n E_range = np.logspace(np.log10(Emin), np.log10(Emax), int(nE))\r\n plot_E, w, length, intensity = [], [], [], []\r\n for E in E_range:\r\n if count_E == True:\r\n if count_w == True:\r\n print(f'E = {round(E, -3)} MV/m')\r\n else:\r\n print(E)\r\n output = efficiency_vs_w(wmax, wmin, Lmax, Lmin, p, fmax, p1, p2, p3,\r\n E, delta_p, nw, nL, ng, nl, nf, L_resolution,\r\n w_resolution, False, set_freq, count_L, count_w)\r\n if output != None:\r\n plot_E.append(E*1e-6)\r\n w.append(output[0])\r\n length.append(output[1])\r\n intensity.append(output[2])\r\n file = open(f'Save_Data_{Emin}_{Emax}.txt','a')\r\n file.write(f'{[plot_E[-1], w[-1], length[-1], intensity[-1]]}\\n')\r\n file.close()\r\n if plot == True:\r\n fig = plt.figure(figsize=[10, 5])\r\n ax = fig.add_subplot(1, 1, 1)\r\n fig.subplots_adjust(right=0.75)\r\n line1, = ax.plot(plot_E, length, 'r', lw=2, label='Target Distance')\r\n ax2 = ax.twinx()\r\n line2, = ax2.plot(plot_E, intensity, 'g', lw=2, label='Intensity Required')\r\n ax3 = ax.twinx()\r\n ax3.spines['right'].set_position(('axes', 1.2))\r\n make_patch_spines_invisible(ax3)\r\n ax3.spines['right'].set_visible(True)\r\n line3, = ax3.plot(plot_E, w, 'b', lw=2, label='Collimator Width')\r\n ax.set_xlabel(r'Electric Field Strength / MVm$^{-1}$', fontsize=20)\r\n ax.set_xlim(np.min(plot_E), np.max(plot_E))\r\n ax.set_ylabel('Target Distance / m', fontsize=20, color=line1.get_color())\r\n ax2.set_ylabel(r'Intensity / I$_0$', fontsize=20, color=line2.get_color())\r\n ax3.set_ylabel('Collimator Width / mm', fontsize=20, color=line3.get_color())\r\n ax.tick_params(axis='y', colors=line1.get_color())\r\n ax2.tick_params(axis='y', colors=line2.get_color())\r\n ax3.tick_params(axis='y', colors=line3.get_color())\r\n lines = [line1, line2, line3]\r\n ax.legend(lines, [l.get_label() for l in lines], loc='upper center', fontsize=15)\r\n ax.set_xscale('log')\r\n ax.minorticks_on()\r\n ax2.minorticks_on()\r\n ax3.minorticks_on()\r\n ax.grid()\r\n plt.show()\r\n return [w, length, intensity]", "def test(iterations, max_escape_time):\n\ttest = iterations - np.ones((iterations.shape[0],iterations.shape[1])) * max_escape_time == 0.0\n\tif(test.all() == 0.0):\n\t\tmsg = \"all numbers in mandelbrot set\"\n\t\tprint (msg)\n\ttest = iterations - np.ones((iterations.shape[0],iterations.shape[1])) * max_escape_time == float(-max_escape_time)\n\tif(test.all()):\n\t\tmsg = \"all numbers outside mandelbrot set\"\n\t\tprint (msg)", "def expected_yields(A, lumi, masses):\n fmt = 'Expected numbers of H=>Zg=>eeg events for sqrt(s)=14TeV and lumi={0}/fb:'\n print(fmt.format(lumi))\n print(' ' + ' '.join('{0:6.1f}'.format(x) for x in masses))\n for proc in A['procs']:\n print(' {0:4s}'.format(proc + ':'), end='')\n\n for mass in masses:\n m = '{0:.1f}'.format(mass)\n nev_exp = A['xs'][(proc, m)] * lumi * 1000 * A['br'][m] * 0.033658\n\n print(' {0:6.1f}'.format(nev_exp), end='')\n\n print()", "def maxQ(self,feat):\r\n \r\n maxQ = float('-inf')\r\n maxA = 0\r\n for a in self.actions:\r\n q = self.Q(feat,a)\r\n print(q,a)\r\n if q > maxQ:\r\n maxQ = q\r\n maxA = a\r\n return(maxQ,maxA)", "def print_result(*Volumes):\n max_phi = -1/ureg.hr\n for volume in Volumes:\n if volume.phi > max_phi:\n max_volume = volume\n line_1 = '# Fatality rate for {} is {:.1e} # '.format(max_volume, volume.phi)\n pad = len(line_1)\n line_2 = '# Recommended ODH class {}'.format(max_volume.odh_class()).ljust(pad-1)+'#'\n print('#'*pad)\n print(line_1)\n print(line_2)\n print('#'*pad)", "def report(self):\n np.set_printoptions(formatter={'float': '{:.2e}'.format})\n print('---Differences of breakpoints---')\n all_max = np.max(self.max, axis=0, keepdims=False)\n print('Maximum:\\n', all_max)\n all_mean = np.mean(self.mean, axis=0, keepdims=False)\n print('Mean:\\n', all_mean)", "def printLatestMeasurement(self): \n data = self.tristimulus[len(self.tristimulus)-1]\n x = data[0]\n y = data[1]\n L = data[2]\n print\"(x,y) = ({0:.4f}, {1:.4f}), L = {2:.4f} cd/m2 ({3:.4f} fL)\".format( x, y, L, 0.291863508*L)" ]
[ "0.601353", "0.5673783", "0.5593407", "0.55719185", "0.5419212", "0.5312438", "0.5301795", "0.5280384", "0.52526265", "0.521304", "0.52068067", "0.52050275", "0.5189907", "0.5148449", "0.5141974", "0.5139584", "0.5132588", "0.5115675", "0.5103251", "0.5102717", "0.51026523", "0.5094036", "0.509224", "0.508876", "0.50820553", "0.50595313", "0.50573343", "0.503966", "0.5032527", "0.5024714" ]
0.6115887
0
It plots the evolution of the maximum of the realizations of the process at given times Returns None.
def plotEvolutionMaximum(self): evolutionMaximum = self.getEvolutionMax(); plt.plot(evolutionMaximum) plt.xlabel('Time') plt.ylabel('Maximum realizations') plt.show()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def updatefig(*args):\n p1.set_array(turn(grid))\n p2.set_data(tally['time'], tally['sickos'])\n p3.set_data(tally['time'], tally['immune'])\n p4.set_data(tally['time'], tally['dead'])\n ax2.set_xlim(0, max(tally['time']))\n # ax2.set_ylim(0, max(max(sickos), max(immune)))\n # End sim if the disease is gone\n if tally['sickos'][-1] == 0:\n ani.event_source.stop()\n end_time = time.process_time()\n show_summary()\n print(\"Process time:\", end_time - start_time)\n return p1, p2, p3, p4,", "def plot_vanHove_dt(comp,conn,start,step_size,steps):\n \n (fin,) = conn.execute(\"select fout from comps where comp_key = ?\",comp).fetchone()\n (max_step,) = conn.execute(\"select max_step from vanHove_prams where comp_key = ?\",comp).fetchone()\n Fin = h5py.File(fin,'r')\n g = Fin[fd('vanHove',comp[0])]\n\n temp = g.attrs['temperature']\n dtime = g.attrs['dtime']\n\n\n # istatus = plots.non_i_plot_start()\n \n fig = mplt.figure()\n fig.suptitle(r'van Hove dist temp: %.2f dtime: %d'% (temp,dtime))\n dims = figure_out_grid(steps)\n \n plt_count = 1\n outs = []\n tmps = []\n for j in range(start,start+step_size*steps, step_size):\n (edges,count,x_lim) = _extract_vanHove(g,j+1,1,5)\n if len(count) < 50:\n plt_count += 1\n continue\n #count = count/np.sum(count)\n \n sp_arg = dims +(plt_count,)\n ax = fig.add_subplot(*sp_arg)\n ax.grid(True)\n\n \n alpha = _alpha2(edges,count)\n \n ax.set_ylabel(r'$\\log{P(N)}$')\n ax.step(edges,np.log((count/np.sum(count))),lw=2)\n ax.set_title(r'$\\alpha_2 = %.2f$'%alpha + ' j:%d '%j )\n ax.set_xlim(x_lim)\n plt_count += 1\n\n mplt.draw()\n\n # plots.non_i_plot_start(istatus)\n\n del g\n Fin.close()\n del Fin", "def showPlot2():\n interested_in = list(range(1,10))\n proc_sim_data = []\n for item in interested_in:\n len_sim_data = []\n raw_sim_data = runSimulation(item, 1.0, 25, 25, 0.75, 100, Robot, False)\n for mes in raw_sim_data:\n len_sim_data.append(len(mes))\n proc_sim_data.append(sum(len_sim_data)/len(len_sim_data))\n plot(interested_in, proc_sim_data)\n title('Dependence of cleaning time on number of robots')\n xlabel('number of robots (tiles)')\n ylabel('mean time (clocks)')\n show()", "def comp_time_plot(p1=database['K+'], p2=database['pi+'], pmax=80, plot=True):\r\n dt = []\r\n p_range = np.linspace(10, pmax, 1000)\r\n m1 = p1.mass\r\n m2 = p2.mass\r\n for p in p_range:\r\n t1_per_m = 76.273/(beta(p, m1)*gamma(p, m1)*c)\r\n t2_per_m = 76.273/(beta(p, m2)*gamma(p, m2)*c)\r\n dt.append(abs(t1_per_m - t2_per_m)*1e12)\r\n dt_12_5 = dt[np.argmin(abs(p_range-12.5))]\r\n dt_75 = dt[np.argmin(abs(p_range-75))]\r\n ratio = dt_12_5/dt_75\r\n if plot==True:\r\n fig = plt.figure(figsize=[10, 5])\r\n ax = fig.add_subplot(1, 1, 1)\r\n ax.plot(p_range, dt, 'b', label=r'$\\Delta t$')\r\n ax.axvline(12.5, color='r', label='p=12.5 GeV')\r\n ax.axvline(75, color='g', label='p=75 GeV')\r\n ax.set_xlim(10, pmax)\r\n ax.set_ylim(0)\r\n ax.set_xlabel('p / GeV', fontsize=20)\r\n# ax.set_yscale('log')\r\n ax.set_ylabel(r'$\\Delta t$ / ps', fontsize=20)\r\n title = f'{p1.name} to {p2.name} '\r\n title += r'$\\Delta t$ dependancy on particle momenta'\r\n ax.set_title(title, fontsize=20)\r\n ax.legend(fontsize=20)\r\n text = 'dt(12.5) = {0:.2f} ps, '.format(dt_12_5)\r\n text += 'dt(75) = {0:.2f} ps, '.format(dt_75)\r\n text += 'ratio = {0:.3f}'.format(ratio)\r\n plt.show()\r\n print(text)\r\n return [dt_12_5, dt_75, ratio]", "def plot_spectrum(sims, noise=False, maxtime=240):\n logging.log(15, \"starte plotting\")\n #ein Spektrum mit max 30 Chroms, gemeinsame Zeitenliste erstellen\n if len(sims) < 30:\n spectrum = [0,maxtime]\n #evtl Rauschen hinzufuegen\n if noise:\n for i in range(int(sims[0].number*len(sims)/10)):\n spectrum.append(random.uniform(0, maxtime))\n for sim in sims:\n for t in sim.times:\n if sim.pd[0] < 250:\n spectrum.append(t)\n hist, bins = np.histogram(spectrum, bins= maxtime, normed = True)\n offset = bins[1:]-bins[:-1]\n plt.plot(bins[:-1]+offset, hist, \"k\")\n #plt.ylim((0, 0.3))\n plt.xlim((0, maxtime))\n plt.xlabel(\"Retentionszeit/s\")\n plt.ylabel(\"Intensität\")\n title = \"Spektrum\"\n if noise:\n title += \" mit Rauschen\"\n plt.suptitle(title)\n plt.show()", "def showPlot1():\n\n interested_in = list(range(5,30,5))\n proc_sim_data = []\n for item in interested_in:\n len_sim_data = []\n raw_sim_data = runSimulation(1, 1.0, item, item, 0.75, 100, Robot, False)\n for mes in raw_sim_data:\n len_sim_data.append(len(mes))\n proc_sim_data.append(sum(len_sim_data)/len(len_sim_data))\n plot(interested_in, proc_sim_data)\n title('Dependence of cleaning time on room size')\n xlabel('area of the room (tiles)')\n ylabel('mean time (clocks)')\n show()", "def plot(self, *args, **kwargs):\n\n n = len(args)\n\n self.fig, ax = plt.subplots(n,1)\n if 'title' in kwargs:\n self.fig.canvas.set_window_title(kwargs['title'])\n self.fig.suptitle(kwargs['title'], fontsize=11, fontweight='bold')\n if n == 1:\n ax.plot(self.vecs['time'], self.vecs[args[0]])\n ax.set_title('Time vs. ' + args[0].title())\n\n ax.set_ylabel(args[0].title())\n ax.set_xlim([self.vecs['time'][0], self.vecs['time'][-1]])\n\n else:\n for i in range(n):\n ax[i].plot(self.vecs['time'], self.vecs[args[i]])\n ax[i].set_title('Time vs. ' + args[i].title())\n ax[i].set_ylabel(args[i].title())\n ax[i].set_xlim([self.vecs['time'][0], self.vecs['time'][-1]])\n if i != (n-1):\n plt.setp(ax[i].get_xticklabels(), visible=False)\n else:\n ax[i].set_xlabel('Time')\n\n plt.tight_layout(h_pad=0.2)\n plt.subplots_adjust(top=0.85)\n plt.show()", "def results_plot_fuel_reactor(self):\n \n import matplotlib.pyplot as plt \n\n # Total pressure profile\n P = []\n for z in self.MB_fuel.z:\n P.append(value(self.MB_fuel.P[z]))\n fig_P = plt.figure(1)\n plt.plot(self.MB_fuel.z, P)\n plt.grid()\n plt.xlabel(\"Bed height [-]\")\n plt.ylabel(\"Total Pressure [bar]\") \n\n # Temperature profile\n Tg = []\n Ts = []\n# Tw = []\n for z in self.MB_fuel.z:\n Tg.append(value(self.MB_fuel.Tg[z] - 273.15))\n Ts.append(value(self.MB_fuel.Ts[z] - 273.15))\n# Tw.append(value(self.MB_fuel.Tw[z]))\n fig_T = plt.figure(2)\n plt.plot(self.MB_fuel.z, Tg, label='Tg')\n plt.plot(self.MB_fuel.z, Ts, label='Ts')\n# plt.plot(self.MB_fuel.z, Tw, label='Tw')\n plt.legend(loc=0,ncol=2)\n plt.grid()\n plt.xlabel(\"Bed height [-]\")\n plt.ylabel(\"Temperature [C]\") \n \n # Superficial gas velocity and minimum fluidization velocity\n vg = []\n umf = []\n for z in self.MB_fuel.z:\n vg.append(value(self.MB_fuel.vg[z]))\n umf.append(value(self.MB_fuel.umf[z]))\n fig_vg = plt.figure(3)\n plt.plot(self.MB_fuel.z, vg, label='vg')\n plt.plot(self.MB_fuel.z, umf, label='umf')\n plt.legend(loc=0,ncol=2)\n plt.grid()\n plt.xlabel(\"Bed height [-]\")\n plt.ylabel(\"Superficial gas velocity [m/s]\")\n \n # Gas components molar flow rate\n for j in self.MB_fuel.GasList:\n F = []\n for z in self.MB_fuel.z:\n F.append(value(self.MB_fuel.F[z,j]))\n fig_F = plt.figure(4)\n plt.plot(self.MB_fuel.z, F, label=j)\n plt.legend(loc=0,ncol=len(self.MB_fuel.GasList))\n plt.grid()\n plt.xlabel(\"Bed height [-]\")\n plt.ylabel(\"Gas component molar flow rate, F [mol/s]\") \n \n # Bulk gas phase total molar flow rate\n Ftotal = []\n for z in self.MB_fuel.z:\n Ftotal.append(value(self.MB_fuel.Ftotal[z]))\n fig_Ftotal = plt.figure(5)\n plt.plot(self.MB_fuel.z, Ftotal)\n plt.grid()\n plt.xlabel(\"Bed height [-]\")\n plt.ylabel(\"Total molar gas flow rate [mol/s]\") \n\n # Solid components mass flow rate\n for j in self.MB_fuel.SolidList:\n M = []\n for z in self.MB_fuel.z:\n M.append(value(self.MB_fuel.Solid_M[z,j]))\n fig_M = plt.figure(6)\n plt.plot(self.MB_fuel.z, M, label=j)\n plt.legend(loc=0,ncol=len(self.MB_fuel.SolidList))\n plt.grid()\n plt.xlabel(\"Bed height [-]\")\n plt.ylabel(\"Solid components mass flow rate [kg/s]\")\n \n # Bulk solid phase total molar flow rate\n Mtotal = []\n for z in self.MB_fuel.z:\n Mtotal.append(value(self.MB_fuel.Solid_M_total[z]))\n fig_Mtotal = plt.figure(7)\n plt.plot(self.MB_fuel.z, Mtotal)\n plt.grid()\n plt.xlabel(\"Bed height [-]\")\n plt.ylabel(\"Solid total mass flow rate [kg/s]\") \n \n # Gas phase concentrations\n for j in self.MB_fuel.GasList:\n Cg = []\n for z in self.MB_fuel.z:\n Cg.append(value(self.MB_fuel.Cg[z,j]))\n fig_Cg = plt.figure(8)\n plt.plot(self.MB_fuel.z, Cg, label=j)\n plt.legend(loc=0,ncol=len(self.MB_fuel.GasList))\n plt.grid()\n plt.xlabel(\"Bed height [-]\")\n plt.ylabel(\"Concentration [mol/m3]\") \n \n # Gas phase mole fractions\n for j in self.MB_fuel.GasList:\n y = []\n for z in self.MB_fuel.z:\n y.append(value(self.MB_fuel.y[z,j]))\n fig_y = plt.figure(9)\n plt.plot(self.MB_fuel.z, y, label=j)\n plt.legend(loc=0,ncol=len(self.MB_fuel.GasList))\n plt.grid()\n plt.xlabel(\"Bed height [-]\")\n plt.ylabel(\"y [-]\") \n \n # Solid phase mass fractions\n for j in self.MB_fuel.SolidList:\n x = []\n for z in self.MB_fuel.z:\n x.append(value(self.MB_fuel.x[z,j]))\n fig_x = plt.figure(10)\n plt.plot(self.MB_fuel.z, x, label=j)\n plt.legend(loc=0,ncol=len(self.MB_fuel.SolidList))\n plt.grid()\n plt.xlabel(\"Bed height [-]\")\n plt.ylabel(\"x [-]\") \n\n # Total mass fraction\n xtot = []\n for z in self.MB_fuel.z:\n xtot.append(value(self.MB_fuel.xtot[z]))\n fig_xtot = plt.figure(11)\n plt.plot(self.MB_fuel.z, xtot)\n plt.grid()\n plt.xlabel(\"Bed height [-]\")\n plt.ylabel(\"Total mass fraction [-]\") \n \n # # Gas mix density\n # rhog = []\n # for z in self.MB_fuel.z:\n # rhog.append(value(self.MB_fuel.rho_vap[z]))\n # fig_rhog = plt.figure(23)\n # plt.plot(self.MB_fuel.z, rhog)\n # plt.grid()\n # plt.xlabel(\"Bed height [-]\")\n # plt.ylabel(\"Gas mix density [kg/m3]\") \n \n # Fe conversion\n X_Fe = []\n for z in self.MB_fuel.z:\n X_Fe.append(value(self.MB_fuel.X[z])*100)\n fig_X_Fe = plt.figure(13)\n plt.plot(self.MB_fuel.z, X_Fe)\n plt.grid()\n plt.xlabel(\"Bed height [-]\")\n plt.ylabel(\"Fraction of metal oxide converted [%]\")", "def plot_maxdisp_time(pointsh5, xscale=1e3, yscale=1e-2, tscale=3.1536e7,\n adjustRadial=False):\n coords,data,number,times = pu.load_h5_visco(pointsh5)\n x = coords[:,0]\n ur = np.hypot(data[:,:,0], data[:,:,1])\n uz = data[:,:,2]\n\n # Convert units & extract maximums for each timestep\n x = x / xscale\n ur = np.max(ur,1) / yscale\n uz = np.max(uz,1) / yscale #cm\n #times = times / 8.64e4 #days\n #times = times / 31536000 #years\n times = times / tscale\n\n plt.figure()\n line, = plt.plot(times, uz, 'b.-', lw=2, label='Uz')\n plt.plot(times, ur, ls='dashed', lw=2, marker='.', color=line.get_color(), label='Ur')\n plt.title('Maximum displacements')\n plt.ylabel('Displacement [{}]'.format(get_unit(yscale)))\n plt.xlabel('Time [{}]'.format(get_unit(tscale)))\n plt.show()\n plt.legend(loc='best')\n plt.grid()", "def energies():\n # Hardcoded initial values\n numsteps = 10000\n time_max = 1\n # Running the calculation in the solver class using the velocity verlet method\n # for better accuracy.\n verlet = solver(input_matrix, 'verlet', time_max, numsteps)\n output_matrix, KE, PE, AM = verlet.main()\n # Creating a simple time axis for plotting\n x = np.linspace(0, 1, numsteps+1)\n\n # Plotting kinetic energy over time\n plt.figure(1, figsize=(10, 10))\n plt.plot(x, KE)\n plt.suptitle('Total kinetic energy in the Earth-Sun system.', fontsize=24)\n plt.xlabel('time [yr]', fontsize=16)\n plt.ylabel('energy [AU²*kg/yr²]', fontsize=16)\n plt.legend(['KE'])\n\n # Plotting potential energy over time\n plt.figure(2, figsize=(10, 10))\n plt.plot(x, PE)\n plt.suptitle('Total potential energy in the Earth-Sun system.', fontsize=24)\n plt.xlabel('time [yr]', fontsize=16)\n plt.ylabel('energy [AU²*kg/yr²]', fontsize=16)\n plt.legend(['PE'])\n\n # Plotting total energy against time\n plt.figure(3, figsize=(10, 10))\n plt.plot(x, PE+KE)\n plt.suptitle('Total energy in the Earth-Sun system.', fontsize=24)\n plt.xlabel('time [yr]', fontsize=16)\n plt.ylabel('energy [AU²*kg/yr²]', fontsize=16)\n plt.legend(['KE+PE'])\n\n # Plotting angular momentum against time. print the amplitude to terminal\n amplitude = max(AM)-min(AM)\n print('Amplitude of angular momentum during 1 year: %g[AU²/yr²]' %(amplitude))\n plt.figure(4, figsize=(10, 10))\n plt.plot(x, AM)\n plt.suptitle('Total angular momentum in the Earth-Sun system.', fontsize=24)\n plt.xlabel('time [yr]', fontsize=16)\n plt.ylabel('energy [AU²/yr²]', fontsize=16)\n plt.legend(['AM'])\n\n # Plotting the kinetic, potential and total energy against time to see\n # how great the variations are\n plt.figure(5, figsize=(10, 10))\n plt.plot(x, PE, x, KE, x, KE+PE)\n plt.suptitle('Total energy in the Earth-Sun system.', fontsize=24)\n plt.xlabel('time [yr]', fontsize=16)\n plt.ylabel('energy [AU²*kg/yr²]', fontsize=16)\n plt.legend(['PE', 'KE', 'KE+PE'])\n plt.show()", "def plotTime(self):\n plt.figure()\n t = [i for i in range(len(self.nodes_infected))]\n print(t)\n plt.title('Nodos infectados vs Tiempo')\n plt.xlabel('Instantes de tiempo')\n plt.ylabel('# de nodos infectados')\n plt.plot(t, self.nodes_infected)\n plt.grid(True)\n plt.show()", "def plot_bp_exptimes(self, plot_spectrum = True, title = None, ylims = (1.0, 1e7),\n cc = [\"C0\", \"C2\", \"C3\"], iremove = []):\n\n # Reshape exposure times\n tmp = self.tpbpcs_rect.T\n\n # Calculate clean spectrum\n output = self.complete_spectrum_time()\n spectrum = output[2]\n\n fig, ax2 = plt.subplots(figsize = (16,5))\n\n if title is not None:\n ax2.set_title(title)\n\n icount = 0\n for ichan in range(len(CHANNELS)):\n\n data = []\n positions = []\n widths = []\n\n for j in range(len(self.bp_names[self.bp_chan == ichan])):\n\n nanmask = np.isfinite(tmp[icount,:])\n\n data.append(tmp[icount,nanmask])\n positions.append(np.mean(spectrum[0][icount]))\n widths.append(spectrum[0][icount][-1] - spectrum[0][icount][0] + np.mean(spectrum[1][icount][:]))\n color1 = cc[ichan]\n\n comp_str = \"$%i \\%%$\" %(100.*self.frac_bias_bp[icount])\n comp_str2 = \"$\\mathbf{%i {\\%%}}$\" %(100.*self.frac_bias_bp[icount])\n comp_str3 = \"$\\mathbf{%i}$\" %(100.*self.frac_bias_bp[icount])\n #ax2.text(positions[j], np.median(tmp[icount,:]) + 5.*np.std(tmp[icount,:]), comp_str2,\n # ha = \"center\", va = \"top\", fontsize = 12, color = \"w\")\n q_l, q_50, q_h, q_m, q_p = nsig_intervals(tmp[icount,nanmask], intvls=[0.25, 0.5, 0.75])\n #ax2.text(positions[j], ylims[1], comp_str2,\n # ha = \"center\", va = \"top\", color = color1, fontsize = 12)\n ax2.text(positions[j], q_50 + q_p, comp_str3,\n ha = \"center\", va = \"bottom\", color = color1)\n\n #ax2.plot(self.bandpasses[icount], [q_50, q_50], color = color1, zorder = 120, ls = \"dashed\")\n\n icount += 1\n\n positions = np.array(positions)\n widths = np.array(widths)\n bp1 = ax2.boxplot(data, sym = '', widths = widths, showfliers = False,\n boxprops = {\"color\" : color1, \"alpha\" : 0.5},\n whiskerprops = {\"color\" : color1, \"linewidth\" : 2.0},\n capprops = {\"color\" : color1, \"linewidth\" : 0.0},\n medianprops = {\"color\" : \"w\", \"linewidth\" : 2.0},\n patch_artist=True, positions = positions, whis = [5, 95]);\n\n for patch in bp1['boxes']:\n patch.set_facecolor(color1)\n\n if plot_spectrum:\n\n ax = ax2.twinx()\n ax2.set_zorder(100)\n ax2.patch.set_visible(False)\n\n ax.set_xlabel(\"Wavelength [$\\mu$m]\")\n ax.set_ylabel(r\"Planet-Star Flux Ratio ($\\times 10^{-10}$)\", rotation = 270, labelpad = 25)\n for i in range(len(self.bp_names)):\n if i not in iremove:\n pass\n #ax.plot(spectrum[0][i], 1e10*spectrum[3][i], \"o\", ms = 4.0, alpha = 0.65, color = \"w\", zorder = 80)\n #ax.errorbar(spectrum[0][i], 1e10*spectrum[3][i], yerr=1e10*spectrum[4][i], fmt = \"o\", ms = 2.0, alpha = 0.65, color = \"k\", zorder = 80)\n #ax.axvspan(drmA.bandpasses[i][0], drmA.bandpasses[i][1], alpha = 0.2, color = cc[drmA.bp_chan[i]])\n\n self.cn.telescope.lammin = 0.2\n self.cn.telescope.lammax = 2.0\n self.cn.telescope.resolution = 140.\n # Re-do count rate calcs for true Earth spectrum\n self.cn.run_count_rates(self.AHR, self.LAMHR, self.FSTAR)\n l1, = ax.plot(self.cn.lam, 1e10*self.cn.Cratio, color = \"purple\", zorder = 0, lw = 4.0, alpha = 1.)\n l2, = ax.plot(self.cn.lam, 1e10*self.cn.Cratio, color = \"w\", zorder = 0, lw = 2.0, alpha = 0.65)\n ax.set_ylim(bottom=0.0)\n ax.legend([(l1, l2)], [(\"Modern Earth\")], framealpha = 0.0)\n\n # Label Molecules\n ax.text(0.27, 1.55, \"O$_3$\", ha = \"center\", va = \"center\")\n ax.text(0.6, 1.25, \"O$_3$\", ha = \"center\", va = \"center\")\n ax.text(0.68, 1.35, \"O$_2$\", ha = \"center\", va = \"center\")\n ax.text(0.76, 1.45, \"O$_2$\", ha = \"center\", va = \"center\")\n ax.text(0.96, 1.45, \"H$_2$O\", ha = \"center\", va = \"center\")\n ax.text(1.15, 1.45, \"H$_2$O\", ha = \"center\", va = \"center\")\n ax.text(1.4, 1.45, \"H$_2$O\", ha = \"center\", va = \"center\")\n ax.text(1.9, 1.45, \"H$_2$O\", ha = \"center\", va = \"center\")\n ax.text(1.6, 1.25, \"CO$_2$\", ha = \"center\", va = \"center\")\n\n ax2.set_ylabel(\"Science Time [hrs]\")\n #ax2.set_title(r\"All %i targets (S/N$\\approx$%i)\" %(Ndraw, wantSNR))\n ax2.set_yscale(\"log\")\n\n ax2.set_xlabel(\"Wavelength [$\\mu$m]\")\n ax2.set_ylim(bottom = ylims[0], top = ylims[1])\n\n ax2.set_xticks([0.2, 0.4, 0.6, 0.8, 1.0, 1.2, 1.4, 1.6, 1.8, 2.0])\n ax2.set_xticklabels([\"$0.2$\", \"$0.4$\", \"$0.6$\", \"$0.8$\", \"$1.0$\", \"$1.2$\", \"$1.4$\", \"$1.6$\", \"$1.8$\", \"$2.0$\"])\n ax2.set_xlim(0.1, 2.1)\n #ax2.set_xlim(0.4, 1.0)\n\n #fig.savefig(\"/Users/Jake/Dropbox/Astronomy/UW/Astrobio/Research Rotation/LUVOIR/figures/drm_bp10_science_time_%s.pdf\" %drm.architecture, bbox_inches = \"tight\")\n\n return fig", "def plot_limit(bolo_name, list_mass, analysis_type, exposure, detector_mass = 0.6):\n\n d_graph = {}\n list_color = [kOrange-8, kGreen+2, kBlue-7, kRed, kBlack, kMagenta, kAzure+10, kGreen-3, kOrange-9]\n\n for index, heat_fraction in enumerate([\"0.3\",\"0.4\",\"0.5\",\"0.8\",\"1\"]):\n d_graph[heat_fraction] = get_simulated_event_limit(bolo_name, list_mass, analysis_type, \"_\" + heat_fraction, exposure, detector_mass = 0.6)\n d_graph[heat_fraction].SetName(heat_fraction)\n PyRPl.process_TGraph(d_graph[heat_fraction], color = list_color[index])\n\n gr_edw_poisson = get_limit_graph(\"./Text_files/edw3_ana_1.5_0_5_poisson.txt\", 2, kBlack)\n gr_edw_low = get_limit_graph(\"./Text_files/Published_limits/edw_lowmass_2012.txt\", 2, kRed)\n gr_edw_low.SetLineStyle(7)\n gr_cdms = get_limit_graph(\"./Text_files/Published_limits/cdms_limit.txt\", 2, kBlue)\n\n h = TH1F(\"h\", \"\", 100, 3,25)\n PyRPl.process_TH1(h, X_title = \"Mass (GeV)\", Y_title = \"#sigma (pb)\", X_title_size = .06, Y_title_size = .06, X_title_offset = .98, Y_title_offset = .95)\n\n\n gr_edw_low.SetName(\"gr_edw_low\")\n gr_edw_poisson.SetName(\"gr_edw_poisson\")\n gr_cdms.SetName(\"gr_cdms\")\n\n cc = TCanvas(\"cc\", \"cc\")\n gPad.SetLogy()\n gPad.SetLogx()\n h.SetMaximum(1E-1)\n h.SetMinimum(4E-8)\n h.Draw()\n\n gr_cdms.Draw(\"sameC\")\n gr_edw_poisson.Draw(\"sameC\")\n gr_edw_low.Draw(\"sameC\")\n\n for index, heat_fraction in enumerate([\"0.3\",\"0.4\",\"0.5\",\"0.8\",\"1\"]):\n d_graph[heat_fraction].Draw(\"sameC\")\n\n leg =TLegend(0.564,0.584,0.83,0.857)\n leg.AddEntry(\"gr_cdms\", \"SCDMS\" , \"l\")\n leg.AddEntry(\"gr_edw_low\", \"EDW II\" , \"l\")\n leg.AddEntry(\"gr_edw_poisson\", \"EDW III Poisson\" , \"l\")\n for index, heat_fraction in enumerate([\"0.3\",\"0.4\",\"0.5\",\"0.8\",\"1\"]):\n leg.AddEntry( d_graph[heat_fraction].GetName(), heat_fraction , \"l\")\n\n leg.SetFillColor(kWhite)\n leg.SetLineColor(kWhite)\n leg.Draw()\n raw_input()", "def main(values, is_animation=False):\n\n def on_clicked(event):\n \"\"\"Direct the program when a key is pressed.\"\"\"\n\n if event.key == \"x\":\n # Use this os._exit(0) to close whole window, even when playing\n os._exit(0)\n\n if event.key == \"s\":\n # Get time to define image's name\n now = datetime.now()\n current_time = now.strftime(\"%H-%M-%S\")\n plot_name = \"Plot\" + \"-\" + current_time\n\n # Remove left title, then save image\n pyplot.title(\"\", loc=\"left\", pad=20)\n fig.savefig(\n \"%s%s%s\"\n % (\n CONS[\"OUTPUT_PHOTO_DIRECTORY\"],\n plot_name,\n CONS[\"PHOTO_TYPE\"],\n ),\n transparent=False,\n dpi=300,\n )\n\n # Use this exit(0) to prevent exiting when playing the plot\n # but allow closing when plotting finishes\n exit(0)\n\n def draw(values):\n \"\"\"Plot the grid, the line graphs and the titles.\"\"\"\n\n # Turn on grid with dashed style\n subplot.yaxis.grid(True, linestyle=\"dashed\")\n\n # Get list of new higher values\n new_values = get_new_values(values)\n\n # Plot 2 lines\n subplot.plot(range(len(values)), values)\n subplot.plot(range(len(new_values)), new_values, linewidth=2)\n\n # Print left plot title\n pyplot.title(\n \"Press X to exit\\nPress S to save\",\n loc=\"left\",\n fontsize=14,\n color=\"#1F76B4\",\n style=\"italic\",\n pad=20,\n )\n\n # Print right plot title\n pyplot.title(\n f\"{'Max objective:':>25}{max(values):>10.2E}\\n\"\n f\"{'Generation:':>25}{values.index(max(values)):>10}\",\n loc=\"right\",\n fontfamily=\"Lucida Sans Typewriter\",\n fontsize=12,\n color=\"#FF7E0E\",\n pad=20,\n )\n\n # The following code configures some elements of the plot window\n\n # Disable toolbar\n maplot.rcParams[\"toolbar\"] = \"None\"\n\n # Set font\n maplot.rcParams[\"font.family\"] = \"Candara\"\n maplot.rcParams[\"font.size\"] = 12\n maplot.rcParams[\"font.weight\"] = 500\n\n # Set window title\n fig = pyplot.figure(figsize=(10, 5))\n fig.canvas.set_window_title(\"Prosthetic Foot Design by Genetic Algorithm\")\n\n # Set icon\n manager = pyplot.get_current_fig_manager()\n manager.window.wm_iconbitmap(CONS[\"ICON_FILE\"])\n\n # Disable some borders\n subplot = fig.add_subplot(111, frameon=True)\n subplot.spines[\"right\"].set_visible(False)\n subplot.spines[\"left\"].set_visible(False)\n subplot.spines[\"top\"].set_visible(False)\n\n # Push verticle axis to the right\n subplot.yaxis.tick_right()\n\n # Padding axis label from plot area, maybe unnecessary\n subplot.tick_params(axis=\"y\", which=\"major\", pad=5)\n subplot.tick_params(axis=\"x\", which=\"major\", pad=5)\n\n # Adjust subplot size based on window size\n pyplot.subplots_adjust(left=0.03, right=0.94, top=0.82, bottom=0.1)\n\n # Reconize key pressed\n pyplot.connect(\"key_press_event\", on_clicked)\n\n if is_animation:\n for index in range(1, len(values) + 1):\n subplot.clear()\n draw(values[:index])\n pyplot.pause(0.0001)\n else:\n draw(values)\n\n # Hold window\n pyplot.show()", "def plot(self):\n\t\tself.plotOfLoopVoltage()", "def plot_multiLyapunov(systems, mode=2, savefig=True, figname=None):\n if mode == 2:\n print(systems)\n# divnorm = colors.DivergingNorm(vmin=max([np.nanmin(np.nanmax(np.nanmax(system.lyapunov_2, axis=0), axis=0)) for system in systems]), vcenter=0, vmax=max[np.nanmax(system.lyapunov_2) for system in systems])\n if figname == None:\n figname = 'sum_of_first_2_lyapunov'\n \n fig, ax = plt.subplots()\n for system in systems:\n\n lyapunov_2 = system.lyapunov_2\n x = system.x\n y = system.y\n l = system.l\n a = system.a\n\n\n\n plt.contourf(a[0,0,:,:],l[0,0,:,:],np.nanmax(np.nanmax(lyapunov_2, axis=0), axis=0), levels = 100, cmap = 'RdBu_r')\n# , norm=divnorm)\n for i in range(lyapunov_2.shape[0]):\n for j in range(lyapunov_2.shape[1]):\n plt.contour(a[0,0,:,:],l[0,0,:,:],lyapunov_2[i,j], levels = [0,], colors=('k',),alpha=0.1)\n lyap_sum = plt.contour(a[0,0,:,:],l[0,0,:,:],lyapunov_2.max(axis=0).max(axis=0), levels = [0,], colors=('blue',),alpha=1)\n\n# cbar = plt.colorbar()\n plt.plot(wild_chaos[:,0],wild_chaos[:,1],'--r',lw=3)\n plt.title('Sum of the first 2 Lyapunov exponents ')\n plt.ylabel('$\\lambda$')\n plt.xlabel('a')\n# cbar.ax.set_ylabel('Sum of the first 2 Lyapunov exponents')\n\n ax.set_ylim([l.min(),l.max()])\n ax.set_xlim([a.min(),a.max()])\n if savefig:\n plt.savefig(f'images/{figname}.pdf')\n plt.show()", "def showPlot3():\n interested_in = [(20,20),(25,16),(40,10),(50,8),(80,5),(100,4)]\n proc_sim_data = []\n for item in interested_in:\n len_sim_data = []\n raw_sim_data = runSimulation(2, 1.0, item[0], item[1], 0.75, 100, Robot, False)\n for mes in raw_sim_data:\n len_sim_data.append(len(mes))\n proc_sim_data.append(sum(len_sim_data)/len(len_sim_data))\n plot([1,1.56,4,6.25,16,25], proc_sim_data)\n title('Dependence of cleaning time on room shape')\n xlabel('ratio of width to height')\n ylabel('mean time (clocks)')\n show()", "def plot_time(self, X, x0, t):\n\n Pressure = [Solution(self, (x-x0)/t).pressure for x in X]\n Velocity = [Solution(self, (x-x0)/t).velocity for x in X]\n Density = [Solution(self, (x-x0)/t).rho for x in X]\n\n fig, axs = plt.subplots(3, sharex=True)\n fig.suptitle(\"Solution of the Riemann problem\\nat t = {}s\".format(t))\n axs[0].plot(X, Density)\n axs[1].plot(X, Velocity)\n axs[2].plot(X, Pressure)\n\n axs[0].grid()\n axs[0].set(ylabel = \"Density\")\n axs[1].grid()\n axs[1].set(ylabel = \"Velocity\")\n axs[2].grid()\n axs[2].set(ylabel = \"Pressure\")\n\n plt.xlabel(\"Location x\")", "def showPlot4():\n overall_data = []\n per_to_clean = [round(x * 0.1,1) for x in range(0,10)]\n number_of_robots = list(range(1,6))\n for per in per_to_clean:\n proc_sim_data = []\n for item in number_of_robots:\n len_sim_data = []\n raw_sim_data = runSimulation(item, 1.0, 25, 25, per, 10, Robot, False)\n for mes in raw_sim_data:\n len_sim_data.append(len(mes))\n proc_sim_data.append(sum(len_sim_data)/len(len_sim_data))\n overall_data.append(proc_sim_data)\n plot(per_to_clean, overall_data)\n title('cleaning time vs. percentage cleaned')\n xlabel('percentage clean')\n ylabel('mean time (clocks)')\n show()", "def main():\n # Benutzerfuehrung\n print __doc__\n # Benoetigte Parameter (einige global, da event-funktion):\n global T_plot, hist, norm, erwartung, varianz,erwartung_werte, x_abs, x_th\n global varianz_werte, norm_werte, bins, orte_theorie, orte_theorie_abs\n global teilchen\n # Zeitparameter\n T0 = 0\n T_max = 40\n dt = 0.01\n N = (T_max - T0)/dt +1\n T = np.linspace(T0, T_max, N)\n plot_intervall = 100\n T_plot = T[::plot_intervall]\n\n # Parameter der Teilchen\n x0 = 0\n v_drift = 0.15\n diff_const = 1.5\n x_abs = 15\n R = 10**4\n # Histogrammparameter\n bins = 20\n \n \n # Berechnung fuer mehrere Realisierungen und Benutzerinformation\n print \"Nach der Berechnung besteht die Moeglichkeit einer dyn. Simulation!\"\n print \"Starte Berechnung von\", R, \"Teilchen:\"\n # Array zur Speicherung der Werte mit und ohne abs. Rand:\n teilchen = np.empty(shape=(R,((len(T)/plot_intervall)+1)))\n \n for i in range(R):\n teilchen[i,:] = orte_plot(T, x0, v_drift,\n diff_const, x_abs, plot_intervall)\n if (i+1) % 1000 == 0:\n print i+1, \"Teilchen berechnet.\"\n\n # Statistische Auswertung der simulierten Teilchen \n erwartung_werte, varianz_werte, norm_werte = statistik(teilchen, x_abs)\n\n # Theoretische Vorhersagen bestimmen\n x_th = np.linspace(-20,20, 1000)\n orte_theorie, orte_theorie_abs = verteilung_theorie(x_th, T_plot, x0,\n v_drift, diff_const, x_abs)\n \n print \"Berechnungen beendet!\"\n print \"Sie koennen jetzt die Simulation mit Klick in das Diagramm oben \\\n links starten\"\n\n \n # Plotbereiche anlegen\n figure = plt.figure(0, figsize=(14,9))\n figure.subplots_adjust(wspace=.3, hspace=.4)\n \n hist = plt.subplot(221)\n hist.set_title(\"Ortsverteilung der Teilchen\")\n hist.set_xlabel(r\"$x$\")\n hist.set_ylabel(\"Wahrscheinlichkeit\")\n hist.set_xlim(-20,20)\n hist.set_ylim(0, .1)\n\n norm = plt.subplot(222)\n norm.set_title(r\"Normierungsfaktor R(t_n)/R\")\n norm.set_xlabel(r\"$t$\")\n norm.set_ylabel(\"Norm\")\n norm.set_xlim(T0, T_max)\n norm.set_ylim(np.min(norm_werte), 1.1)\n\n erwartung = plt.subplot(223)\n erwartung.set_title(\"Erwartungswert\")\n erwartung.set_xlabel(r\"$t$\")\n erwartung.set_ylabel(\"Erwartungswert\")\n erwartung.set_xlim(T0, T_max)\n erwartung.set_ylim(0, np.max(erwartung_werte))\n\n varianz = plt.subplot(224)\n varianz.set_title(\"Varianz\")\n varianz.set_xlabel(r\"$t$\")\n varianz.set_ylabel(\"Varianz\")\n varianz.set_xlim(T0, T_max)\n varianz.set_ylim(0, np.max(varianz_werte))\n\n # Theoretische Werte ausgeben\n norm.axhline(1, T0, T_max, label = \"theoretisch, ohne abs.\",\n color = \"black\")\n norm.legend(loc=\"lower left\")\n erwartung.plot(T_plot, x0 + v_drift * T_plot,\n label = \"theoretisch, ohne abs.\", color = \"black\")\n erwartung.legend(loc=\"lower right\")\n varianz.plot(T_plot, 2 * diff_const * T_plot,\n label = \"theoretisch, ohne abs.\", color = \"black\")\n varianz.legend(loc=\"lower right\")\n \n plt.connect('button_press_event', plot_animation)\n plt.show()", "def draw_spectrum(msm):\n # eigenvalues of T sorted by the size\n length = min(len(msm.eigenv), 10) \n a = msm.eigenv[0:length]\n #a = sorted(W, reverse=True, key=lambda x: abs(x))[0:length]\n time = msm.timescales[0:length]\n x = np.arange(1.0,11.0,1.0)[0:length]\n\n # Define limits of the graph\n xmin = 0.7\n xmax = 10.3\n ymin = -0.1\n ymax = 1.1\n\n # Plot the ten biggest eigenvalues:\n ax1 = plt.subplot(111)\n plt.plot(x,a, 'ro', alpha=0.7, ms=8)\n plt.vlines(x,0,a)\n plt.xlabel('Index i', fontsize=12)\n ax1.set_ylabel(r'Eigenvalue $\\lambda_i$', fontsize=12, color='r')\n for tl in ax1.get_yticklabels(): #set tick label color\n tl.set_color('r')\n ax1.xaxis.tick_bottom()\n ax1.yaxis.tick_left()\n plt.axis([xmin, xmax, ymin, ymax])\n\n # add horizontal lines for orientation\n plt.axhline(linewidth=1, color='k')\n plt.axhline(y=1, linewidth=1, color='y')\n\n # plot timescales on the right y-axis:\n ax2 = plt.twinx()\n ax2.plot(x, time, 'bs', alpha=0.6, ms=6)\n #ax2.set_ylim([ymin, ymax])\n #ax2.set_yticks(time)\n #ax2.set_yticklabels([\"{0:0.2}\".format(timescale) for timescale in time])\n ax2.set_ylabel(r'Implied timescale $t_i$', fontsize=12, color='b')\n for tl in ax2.get_yticklabels():\n tl.set_color('b')\n ax2.yaxis.tick_right()\n\n plt.title('Eigenvalues and Implied Timescales', fontsize=16)\n\n plt.axis([xmin, xmax, 0., 1.05*time[1]])\n plt.show()", "def figure6():\n\n plot_settings = {'y_limits': [-100, 30],\n 'x_limits': None,\n 'y_ticks': [-80, -60, -40, -20, 0, 20],\n 'locator_size': 10,\n 'y_label': 'Voltage (mV)',\n 'x_ticks': [],\n 'scale_size': 50,\n 'x_label': \"\",\n 'scale_loc': 3,\n 'figure_name': 'figure_6',\n 'legend': None,\n 'legend_size': 8,\n 'y_on': True}\n\n marker = ['o', 's', '^']\n line_styles = ['-', 'dotted', '--']\n\n plt.figure(figsize=(5, 3), dpi=96)\n\n plt.subplot(2, 1, 1) # Generate subplot 1 (top)\n t, y = solver(240, i_bias_on=2, g_t_bar=0.1 / 10, duration=250)\n plt.plot(t, y[:, 0], 'k-')\n alter_figure(plot_settings)\n\n plt.subplot(2, 1, 2) # Generate subplot 2 (bottom)\n for ix, i_bias_on in enumerate([2, 1.5, 1]):\n t, y = solver(240, i_bias_on=i_bias_on, g_t_bar=0.1 / 10, duration=250)\n t_spike, f = spike_times(t, y[:, 0])\n plt.plot(t_spike[0:-1], f, c='k', linestyle=line_styles[ix], marker=marker[ix], fillstyle='none')\n\n plot_settings['y_limits'] = [0, 200]\n plot_settings['y_ticks'] = [0, 50, 100, 150, 200]\n plot_settings['locator_size'] = 25\n plot_settings['y_label'] = 'Frequency (Hz)'\n plot_settings['legend'] = ['2.0 nA', '1.5 nA', '1.0 nA']\n plot_settings['scale_size'] = 0\n alter_figure(plot_settings, close=True) # Alter figure for publication", "def ex1_plot(pace=\"\",delta=\"\",a_range=[.5,1,5]):\n\t# safety\n\tpace = str(pace)\n\tdelta = str(delta)\n\t\n\t# parameters\n\t#a_range = [0.5,2,5] # different values of alpha,beta\n\t#a_range = [x/5 for x in range(1,4)]\n\tb_range = sorted([1.5/a for a in a_range]) # different values of alpha,beta\n\tb_range = [.5,1,1.5]\n\tpace = 10\n\tl = len(a_range)\n\tc = [ ['#FFA13D', '#7DD85F', '#8EBFFF'],\n\t\t ['#FF1C1C', '#0EA03C', '#0B6DDD'],\n\t\t ['#960019', '#155B00', '#0A0AA8']]\n\tX = [i for i in range(T+1)]\n\t\t \n\tfig,axes = plt.subplots(l,1, sharex=True, sharey=True, figsize=(10,15))\n\t\n\tplt.xlabel('Time')\n\tplt.ylabel('Energy')\n\tplt.ylim(0,0.6)\n\t\n\tthreads=[]\n\t# create the data\n\tstep = 0\n\tfor i in range(l):\n\t\talpha = a_range[i]\n\t\tfor j in range(l):\n\t\t\tbeta = 1.5*b_range[j]/alpha\n\t\t\tdelta = beta*pace/T\n\t\t\tthreads+=[mp.Process(target=ex1_create, args=(alpha,beta,pace,delta))]\n\t\t\tthreads[-1].start()\n\t\t\tif(len(threads)>=3):\n\t\t\t\tfor t in threads:\n\t\t\t\t\tplot_avancement(step, l*l)\n\t\t\t\t\tstep+=1\n\t\t\t\t\tt.join()\n\t\t\t\tthreads = []\n\t\n\tfor t in threads:\n\t\tplot_avancement(step, l*l)\n\t\tstep+=1\n\t\tt.join()\n\t\t\n\t# get the data\n\tfor i in range(l):\n\t\talpha = a_range[i]\n\t\tfor j in range(l):\n\t\t\tbeta = 1.5*b_range[j]/alpha\n\t\t\tdelta = beta*pace/T\n\t\t\tY = ex1_get(alpha,beta,pace,delta)\n\t\t\taxes[i].plot(X,Y,label='beta='+str(beta)[:4],color=c[j][0])\n\t\t\t#axes[j,1].plot(X,Y,label='alpha='+str(alpha)[:4],color=c[i][j])\n\t\t\t\n\t\t\t#if i==l-1:\n\t\t\t#\taxes[j,1].set_title('Energy evolution for beta='+str(beta)[:4])\n\t\t\t#\taxes[j,1].legend() \n\n\t\taxes[i].set_title('Energy evolution with simulated annealing for alpha='+str(alpha)[:4])\n\t\taxes[i].legend()\n\t\t\n\t\n\tdest_file = res_path+'ex1_sim_'+seed+'.png'\n\tfig.savefig(dest_file)\n\tprint('\\nEnergy evolution plots saved in '+dest_file)", "def sysPLQF(mirror, blkFlag=True):\n import matplotlib.pyplot as plt\n import numpy as np # to ndarray.flatten ax\n\n mir = mirror\n xend = max(mir.r_t)\n\n fig, ax = plt.subplots(nrows=2, ncols=2,)\n ax = np.ndarray.flatten(ax)\n ax[0].set_title('Real Power Generated')\n for mach in mir.Machines:\n ax[0].plot(mir.r_t, mach.r_Pe, \n marker = 10,\n fillstyle='none',\n #linestyle = ':',\n label = 'Pe Gen '+ mach.Busnam)\n ax[0].set_xlabel('Time [sec]')\n ax[0].set_ylabel('MW')\n\n ax[2].set_title('Reactive Power Generated')\n for mach in mir.Machines:\n ax[2].plot(mir.r_t, mach.r_Q, \n marker = 10,\n fillstyle='none',\n #linestyle = ':',\n label = 'Q Gen '+ mach.Busnam)\n ax[2].set_xlabel('Time [sec]')\n ax[2].set_ylabel('MVAR')\n\n ax[1].set_title('Total System P Loading')\n ax[1].plot(mir.r_t, mir.r_ss_Pload, \n marker = 11,\n #fillstyle='none',\n #linestyle = ':',\n label = 'Pload')\n ax[1].set_xlabel('Time [sec]')\n ax[1].set_ylabel('MW')\n\n ax[3].set_title('System Mean Frequency')\n ax[3].plot(mir.r_t, mir.r_f,\n marker = '.',\n #linestyle = ':',\n label = r'System Frequency')\n ax[3].set_xlabel('Time [sec]')\n ax[3].set_ylabel('Frequency [PU]')\n\n # Global Plot settings\n for x in np.ndarray.flatten(ax):\n x.set_xlim(0,xend)\n x.legend()\n x.grid(True)\n\n fig.tight_layout()\n\n plt.show(block = blkFlag)", "def H_perform_plot(performance, hurricane):\n fig = plt.figure(figsize = (15, 10))\n for i in range(len(performance)):\n temp1 = performance[i]\n temp2 = hurricane[i]\n plt.plot(np.arange(0, len(temp1), 1), temp1, color = temp2.c, label = temp2.name)\n plt.xlabel('Time Step')\n plt.xticks(np.arange(0, len(temp1), 30))\n plt.ylabel('Performance')\n plt.legend(bbox_to_anchor=(1, 1), loc='upper left', ncol=1, frameon = 0)\n plt.grid(True)", "def plotEvaluation(results,nbr_file : int,path : str, labels = [\"\"],target_name = \"\"):\n fig,ax = plt.subplots(1,1)\n ax.set_yscale(\"log\")\n for res in range(len(results)):\n data = [np.array(p[1]) for p in results[res]]\n number_of_simulation = len(data)\n\n average_values = np.zeros(len(data[0]))\n for d in data:\n average_values =average_values + d\n average_values = np.array(average_values) / number_of_simulation\n \n error_values = [0 for i in range(len(average_values))]\n for j in range(len(error_values)):\n if j%int(len(error_values)/50)==0 :\n for i in range(len(data)):\n error_values[j] += (data[i][j] - average_values[j])**2\n error_values[j] = np.sqrt(error_values[j]/number_of_simulation)\n opt = 0\n tfile = ''\n if path == 'data/B/b':\n opt = B_opts[nbr_file-1]\n tfile = 'b'\n else:\n opt = C_opts[nbr_file-1]\n tfile = 'c'\n \n \n ax.errorbar(range(len(average_values)),average_values,yerr = error_values, ecolor = \"black\", linewidth = 1, elinewidth = 1, label = labels[res])\n \n \n #ax.ylim((opt-5,max(opt*2,average_values[-1]+10)))\n plt.title(f'{target_name} : The evolution of the best evaluation (in average) \\nfor graph {tfile}{nbr_file}.stp for {number_of_simulation} simulations')\n plt.xlabel(\"steps\")\n plt.ylabel(\"evaluation\")\n ax.legend()\n ax.axhline(opt, color='red', label = \"Optimal solution\")\n plt.savefig(f'best_{tfile}{nbr_file}_evaluation_{target_name}.png')\n plt.show()", "def figure9():\n\n g_h_bars = np.linspace(0.005, 0.05, 10)\n plot_settings = {'y_limits': [70, 115],\n 'x_limits': [0.005, 0.05],\n 'y_ticks': [70, 80, 90, 100, 110],\n 'locator_size': 5,\n 'y_label': 'mAHP Duration (ms)',\n 'x_ticks': g_h_bars[0::2],\n 'scale_size': 0,\n 'x_label': '$\\\\bar{g_H }$ ($\\mu S$)',\n 'scale_loc': 4,\n 'figure_name': 'figure_9',\n 'legend': None,\n 'legend_size': 8,\n 'y_on': True,\n 'x_on': True}\n\n plt.figure(figsize=(5, 3), dpi=96)\n\n t_start = 3000 # long to allow V to stabilize to new rest location\n length_after = np.zeros((len(g_h_bars),))\n\n for ix, g_h_bar in enumerate(g_h_bars):\n t, y = solver(5000, t_start=t_start, g_h_bar=g_h_bar)\n v = y[:, 0]\n\n pk = np.where(v == np.max(v))[0][0]\n v_clipped = v[pk:]\n v_rest = v[np.where(t <= t_start)[0][-1]] # v_rest is v immediately before the stimulus turns on\n v_clipped -= v_rest\n crossings = np.where((v_clipped[:-1] * v_clipped[1:]) < 0)[0]\n ix_after = np.where(t[crossings] < 200)[0][-1]\n length_after[ix] = t[crossings[ix_after] - crossings[ix_after - 1]]\n plt.plot(g_h_bars, length_after, c='k', marker='o', fillstyle='none')\n \"\"\"\n x is digitized from figure 9 in the original manuscript\n \"\"\"\n # x = [108.44444444444443, 97.03703703703704, 89.7037037037037, 85.55555555555556, 82.22222222222223,\n # 80.2962962962963, 78.22222222222223, 77.18518518518519, 76.81481481481481, 74.07407407407408]\n # plt.plot(g_h_bars, x)\n\n \"\"\"\n Annotate plot\n \"\"\"\n ellipse = patches.Ellipse(xy=(0.017, 105), width=0.01, height=4, angle=0)\n plt.gca().add_artist(ellipse)\n ellipse.set_facecolor((1, 1, 1))\n plt.gca().annotate(\"Neonatal\", (0.017, 105), fontsize=8, ha=\"center\", va=\"center\")\n\n ellipse = patches.Ellipse(xy=(0.04, 72), width=0.005, height=4, angle=0)\n plt.gca().add_artist(ellipse)\n ellipse.set_facecolor((1, 1, 1))\n plt.gca().annotate(\"Adult\", (0.04, 72), fontsize=8, ha=\"center\", va=\"center\")\n alter_figure(plot_settings, close=True)", "def Traffic_Perform_Plot(performance, hurricanes):\n fig = plt.figure(figsize = (8, 6))\n for i in range(len(performance)):\n temp1 = performance[i]\n temp2 = hurricanes[i]\n perform = sf.Normalize(temp1, Type = 'max')\n plt.plot(np.arange(0, len(perform), 1), perform, color = temp2.c, label = temp2.name, marker = 'o')\n plt.xlabel('Time Step')\n plt.xticks(np.arange(0, len(perform), 1))\n plt.ylabel('Performance')\n plt.legend(bbox_to_anchor=(1, 1), loc='upper left', ncol=1, frameon = 0)\n plt.grid(True)", "def plot(self):\n \n \n x_ibs=[] \n x_gss=[]\n y_ibs=[] \n y_gss=[]\n x_pso=[]\n x_bgd=[]\n y_bgd=[]\n y_pso=[]\n x_gd=[]\n y_gd=[]\n \n i=0.0000001\n \n # for k in range(1,51):\n # i= random.uniform(0.00000001, 1)\n # t_avg_ibs=[]\n # t_avg_gss=[]\n # for j in range(1,51):\n #L=random.randint(-100, 0)\n #U=random.randint(0, 100)\n max_iter=self.Max_iter \n L=self.Lower_bound\n U=self.Upper_bound\n \n minima=self.gss(L,U,i,1000)\n #print(\"minima at X = \",minima[1])\n x_ibs.append(self.I_bisection(L,U,minima[1],max_iter)[0])\n x_gss.append(self.gss(L,U,i,max_iter)[0])\n x_pso.append(self.particle_Swarm(self.func, L, U, 2, max_iter)[0])\n x_gd.append(self.gradient_descent(X=U ,eta=0.01, tol=minima[1],iter= max_iter)[0])\n x_bgd.append(self.b_gradient_descent(LB=L,UB=U ,eta=0.01, tol=minima[1],iter=max_iter)[0])\n #print(x_pso)\n for i in x_ibs[0]:\n #print(self.Func(i)) \n y_ibs.append(self.Func(i))\n for i in x_gss[0]:\n y_gss.append(self.Func(i)) \n for i in x_pso[0]:\n y_pso.append(self.Func(i)) \n for i in x_gd[0]:\n y_gd.append(self.Func(i)) \n for i in x_bgd[0]:\n y_bgd.append(self.Func(i)) \n #print(y_gss)\n\n plt.plot(x_ibs[0], y_ibs, 'r.')\n plt.plot(x_gss[0], y_gss, '.')\n plt.plot(x_pso[0], y_pso, 'y.')\n #plt.plot(x_gd[0], y_gd, 'y.')\n #plt.plot(x_bgd[0], y_bgd, 'k.')\n plt.xlabel('x')\n plt.ylabel('y')\n \n plt.suptitle('Interval Bisection Search (Red) vs Golden Section Search (Blue) vs Particle swarm optimization (Green)')\n #plt.axis([0, 100, 0.00000001, 1]) \n plt.show()\n plt.plot(x_gd[0], y_gd, 'r.')\n plt.plot(x_bgd[0], y_bgd, 'k.')\n plt.xlabel('x')\n plt.ylabel('y') \n plt.suptitle('Gradient Descent (Red) vs Batch Gradient Descent (Black) ')\n \n plt.show()\n \n start_time = timeit.default_timer()\n ibs=self.I_bisection(L,U,minima[1],max_iter)\n print(\" Execution time for Interval bisection Method is\", timeit.default_timer() - start_time,\"s\")\n start_time = timeit.default_timer()\n gss=self.gss(L,U,i,max_iter)\n print(\" Execution time for Golden Section Search is\", timeit.default_timer() - start_time,\"s\")\n start_time = timeit.default_timer()\n pso=self.particle_Swarm(self.func, L, U, 2, max_iter)\n print(\" Execution time for Particle swarm optimization is\", timeit.default_timer() - start_time,\"s\")\n start_time = timeit.default_timer()\n gd=self.gradient_descent(X=U ,eta=0.01, tol=minima[1],iter= max_iter)\n print(\" Execution time for Gradient Descent is\", timeit.default_timer() - start_time,\"s\")\n start_time = timeit.default_timer()\n bgd=self.b_gradient_descent(LB=L,UB=U ,eta=0.01, tol=minima[1],iter=max_iter)\n print(\" Execution time for Batch Gradient Descent is\", timeit.default_timer() - start_time,\"s\")\n plt.plot(ibs[1], ibs[2], 'r.')\n plt.text(ibs[1], ibs[2],\"IB\")\n plt.plot(gss[1], gss[2], '.')\n plt.text(gss[1], gss[2],\" GSS\")\n plt.plot(pso[1], pso[2], 'y.')\n plt.text(pso[1], pso[2],\" PSO\")\n plt.plot(gd[1], gd[2], 'g.')\n plt.text(gd[1], gd[2],\" GD \")\n plt.plot(bgd[1],bgd[2], 'k.')\n plt.text(bgd[1], bgd[2],\" Batch_GD\")\n \n plt.xlabel('Value of X')\n plt.ylabel('NUmber of iteration') \n plt.suptitle('Number of iterations vs minimum value of x')\n \n plt.show()", "def plot_running_time():\n global counter\n counter += 1\n running_time_targeted = []\n running_time_fast_targeted = []\n \n for node_number in range(10, 1000, 10):\n synthetic_undirected_graph = make_synthetic_undirected_graph(node_number, 5)\n\n start_time = time.time()\n attack_order = targeted_order(synthetic_undirected_graph)\n stop_time = time.time()\n running_time_targeted.append(stop_time - start_time)\n \n start_time = time.time()\n attack_order = fast_targeted_order(synthetic_undirected_graph)\n stop_time = time.time()\n running_time_fast_targeted.append(stop_time - start_time)\n \n plt.plot(range(10, 1000, 10), running_time_targeted, '-b', label = 'targeted_order')\n plt.plot(range(10, 1000, 10), running_time_fast_targeted, '-r', label = 'fast_targeted_order')\n \n plt.legend(loc='upper right')\n\n\n plt.title(\" plot of running time of desktop Python\")\n plt.xlabel(\"the number of nodes\")\n plt.ylabel(\"running times\")\n plt.savefig(\"running_time_\"+str(counter)+\".png\", dpi = 72)\n plt.gcf().clear() # hose-keeping" ]
[ "0.627695", "0.6128777", "0.609871", "0.6001754", "0.5972552", "0.59707105", "0.59184533", "0.5913484", "0.59092885", "0.5882151", "0.58819574", "0.5839702", "0.583627", "0.58102316", "0.5798293", "0.5763779", "0.5756616", "0.57515097", "0.574167", "0.5741197", "0.5739625", "0.5717637", "0.5715399", "0.5684476", "0.5681501", "0.5656179", "0.56447", "0.56411195", "0.56244695", "0.56131685" ]
0.6852981
0
Test case for create_project
def test_create_project(self): pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_create_project_request(self):\n pass", "def test_add_project(self):\n pass", "def test_project_creation(self):\n title = 'Project title'\n code = 'SCW-12345'\n project = self.create_project(\n title=title,\n code=code,\n institution=self.institution,\n tech_lead=self.project_owner,\n category=self.category,\n funding_source=self.funding_source,\n )\n self.assertTrue(isinstance(project, Project))\n self.assertEqual(project.__str__(), code + ' - ' + title)\n self.assertEqual(project.status, Project.AWAITING_APPROVAL)\n self.assertEqual(project.title, title)\n self.assertEqual(project.code, code)\n self.assertTrue(project.awaiting_approval())", "def test_create_project(self):\n yield self.nodes[0].overlay.create_project(\"test\", \"specpointer\", \"01-02-03\", 300, \"EUR\", 5)\n yield self.deliver_messages()\n\n # Node 2 should know about this app request now\n projects = self.nodes[1].overlay.persistence.get_projects()\n self.assertTrue(projects)\n self.assertEqual(projects[0]['id'], 1)", "def test_create_project_from_template(self):\n project_new = self.project_template.take_template()\n\n self.assertTrue(project_new)", "def test_create_project_root(self):\n self.assertEqual(Project.objects.count(), 2)\n url = reverse('projectroles:api_project_create')\n post_data = {\n 'title': NEW_PROJECT_TITLE,\n 'type': PROJECT_TYPE_PROJECT,\n 'parent': None,\n 'description': 'description',\n 'readme': 'readme',\n 'public_guest_access': False,\n 'owner': str(self.user.sodar_uuid),\n }\n response = self.request_knox(url, method='POST', data=post_data)\n self.assertEqual(response.status_code, 400)\n self.assertEqual(Project.objects.count(), 2)", "def test_create_project(client, session, tokens):\n response = client.post(\n \"/projects\",\n json={\n \"name\": \"New Project\",\n \"organizations\": [],\n \"teams\": [],\n \"users\": [],\n },\n headers={\"Authorization\": f\"Bearer {tokens['write']}\"},\n )\n assert response.status_code == 201\n project_id = response.json[\"id\"]\n assert Project.query.filter(Project.id == project_id).count() == 1", "def project_create(project):\n client.project.create(project)", "def test_get_project(self):\n pass", "def _create_project(self):\n request = {\n \"project\": {\n \"description\": \"description\",\n \"enabled\": True,\n \"name\": uuid.uuid4().hex,\n \"domain_id\": \"default\",\n }\n }\n response = self.client.post(PROJECT_PATH, data=json.dumps(request),\n headers=HEADERS)\n\n if response.status_code == 201:\n return response.json()\n else:\n raise SystemExit(\"Failed to create project.\")", "def testSessionCreate(self):\n success = False\n project = None\n\n try:\n project = self.session.create_project()\n\n success = True\n except Exception:\n pass\n\n self.failUnless(success)\n self.failIf(project is None)", "def test_create_project_target_enabled(self):\n self.assertEqual(Project.objects.count(), 2)\n url = reverse('projectroles:api_project_create')\n post_data = {\n 'title': NEW_PROJECT_TITLE,\n 'type': PROJECT_TYPE_PROJECT,\n 'parent': str(self.category.sodar_uuid),\n 'description': 'description',\n 'readme': 'readme',\n 'public_guest_access': False,\n 'owner': str(self.user.sodar_uuid),\n }\n response = self.request_knox(url, method='POST', data=post_data)\n self.assertEqual(response.status_code, 201, msg=response.content)\n self.assertEqual(Project.objects.count(), 3)", "def test_new_project_existing(self):\n\n project = fake_clients.FakeProject(name=\"test_project\")\n\n setup_identity_cache(projects=[project])\n\n url = \"/v1/actions/CreateProjectAndUser\"\n data = {\"project_name\": \"test_project\", \"email\": \"[email protected]\"}\n response = self.client.post(url, data, format=\"json\")\n self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)\n\n headers = {\n \"project_name\": \"test_project\",\n \"project_id\": \"test_project_id\",\n \"roles\": \"admin,member\",\n \"username\": \"[email protected]\",\n \"user_id\": \"test_user_id\",\n \"authenticated\": True,\n }\n new_task = Task.objects.all()[0]\n url = \"/v1/tasks/\" + new_task.uuid\n response = self.client.post(\n url, {\"approved\": True}, format=\"json\", headers=headers\n )\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n self.assertEqual(response.json(), {\"errors\": [\"actions invalid\"]})", "def test_new_project(self):\n\n setup_identity_cache()\n\n url = \"/v1/actions/CreateProjectAndUser\"\n data = {\"project_name\": \"test_project\", \"email\": \"[email protected]\"}\n response = self.client.post(url, data, format=\"json\")\n self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)\n\n headers = {\n \"project_name\": \"test_project\",\n \"project_id\": \"test_project_id\",\n \"roles\": \"admin,member\",\n \"username\": \"[email protected]\",\n \"user_id\": \"test_user_id\",\n \"authenticated\": True,\n }\n new_task = Task.objects.all()[0]\n url = \"/v1/tasks/\" + new_task.uuid\n response = self.client.post(\n url, {\"approved\": True}, format=\"json\", headers=headers\n )\n self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)\n self.assertEqual(response.json(), {\"notes\": [\"created token\"]})\n\n new_project = fake_clients.identity_cache[\"new_projects\"][0]\n self.assertEqual(new_project.name, \"test_project\")\n\n new_token = Token.objects.all()[0]\n url = \"/v1/tokens/\" + new_token.token\n data = {\"password\": \"testpassword\"}\n response = self.client.post(url, data, format=\"json\")\n self.assertEqual(response.status_code, status.HTTP_200_OK)", "def test_create_project(self):\n self.assertEqual(Project.objects.count(), 2)\n\n url = reverse('projectroles:api_project_create')\n post_data = {\n 'title': NEW_PROJECT_TITLE,\n 'type': PROJECT_TYPE_PROJECT,\n 'parent': str(self.category.sodar_uuid),\n 'description': 'description',\n 'readme': 'readme',\n 'public_guest_access': False,\n 'owner': str(self.user.sodar_uuid),\n }\n response = self.request_knox(url, method='POST', data=post_data)\n\n self.assertEqual(response.status_code, 201, msg=response.content)\n self.assertEqual(Project.objects.count(), 3)\n new_project = Project.objects.get(title=NEW_PROJECT_TITLE)\n model_dict = model_to_dict(new_project)\n model_dict['readme'] = model_dict['readme'].raw\n expected = {\n 'id': new_project.pk,\n 'title': new_project.title,\n 'type': new_project.type,\n 'parent': self.category.pk,\n 'description': new_project.description,\n 'readme': new_project.readme.raw,\n 'public_guest_access': False,\n 'archive': False,\n 'full_title': self.category.title + ' / ' + new_project.title,\n 'has_public_children': False,\n 'sodar_uuid': new_project.sodar_uuid,\n }\n self.assertEqual(model_dict, expected)\n self.assertEqual(\n RoleAssignment.objects.filter(\n project=new_project, user=self.user, role=self.role_owner\n ).count(),\n 1,\n )\n expected = {\n 'title': NEW_PROJECT_TITLE,\n 'type': PROJECT_TYPE_PROJECT,\n 'parent': str(self.category.sodar_uuid),\n 'description': new_project.description,\n 'readme': new_project.readme.raw,\n 'public_guest_access': False,\n 'sodar_uuid': str(new_project.sodar_uuid),\n }\n self.assertEqual(json.loads(response.content), expected)", "def test_create_account_project(self, create):\n row = {'PROJ_NAME1': 'Some Proj', 'PROJ_NO': '121-212',\n 'SECTOR': 'IT'}\n sync.create_account(row, None)\n self.assertTrue(create.called)\n account, row, issue_map = create.call_args[0]\n self.assertEqual(account.name, 'Some Proj')\n self.assertEqual(account.code, '121-212')\n self.assertEqual(account.category, Account.PROJECT)\n self.assertEqual(0, len(Account.objects.filter(pk=account.pk)))", "def test_user_can_create_a_project(self):\n self.assertEqual(project_model.Project.objects.get(user=self.test_user).pk, self.test_project.pk)", "def _create_dummy_project(self,projectname=\"testproject\"):\n # Create three types of users that exist: Root, can do anything, \n # projectadmin, cam do things to a project he or she owns. And logged in\n # user \n \n #created in _create_main_project_and_root.\n root = self.root\n # non-root users are created as if they signed up through the project, \n # to maximize test coverage. \n \n # A user who has created a project\n projectadmin = self._create_random_user(\"projectadmin_\")\n \n testproject = self._create_comicsite_in_admin(projectadmin,projectname)\n create_page_in_admin(testproject,\"testpage1\")\n create_page_in_admin(testproject,\"testpage2\")\n \n # a user who explicitly signed up to testproject\n participant = self._create_random_user(\"participant_\")\n self._register(participant,testproject)\n \n # a user who only signed up but did not register to any project\n registered_user = self._create_random_user(\"comicregistered_\")\n \n #TODO: How to do this gracefully? \n return [testproject,root,projectadmin,participant,registered_user]", "def _create_project(org, project_name):\n project = Project(\n org=org,\n name=project_name\n )\n project.save()\n return project", "def create_project(self, **kwargs):\n save = kwargs.get('save', True) \n if kwargs.has_key('save'):\n del(kwargs['save'])\n\n index = self.object_index()\n defaults = dict(slug = \"test-project-%s\" % index,\n basecamp_url = \"https://foo.basecamphq.com/projects/%s/log\" % index)\n defaults.update(kwargs)\n p = Project(**defaults)\n\n if save:\n p.save()\n self.assert_(p.id)\n return p", "def test_empty_project_create(self):\n\n responses.add(\n responses.POST,\n self.host + \"/manager\",\n json={'message': \"Project name cannot be empty.\", 'status':\"error\"},\n status=200\n )\n\n with self.assertRaises(CreateError):\n self.azk.create('', 'description')", "def run(opts, args):\n create_new_project()", "def create_project():\n client = RequestManager()\n project_name = \"\".join(choices(string.ascii_letters + string.digits, k=10))\n client.set_method(\"POST\")\n client.set_endpoint(\"/projects\")\n body = {\"name\": project_name}\n client.set_body(json.dumps(body))\n response = client.execute_request()\n STORED_ID['project_id'] = response.json()['id']", "def CreateProject(projectName='project'):\r\n projectName = input('''The project's name: ''')\r\n if not os.path.exists(projectName):\r\n os.mkdir(projectName)\r\n else:\r\n print('There is a file with the same name.')\r\n\r\n for dir in ['OPT', 'SCF', 'PHO']:\r\n if not os.path.exists(projectName + os.sep + dir):\r\n os.mkdir(projectName + os.sep + dir)", "def setup_project(client, project_template, do_auth=True):\n client = deepcopy(client)\n email = \"[email protected]\"\n password = \"test\"\n urls = URLS()\n project_config = project_template()\n\n # we work in empty database, so let's create business user and login\n user = User.objects.create(email=email)\n user.set_password(password) # set password without hash\n\n create_business(user)\n org = Organization.create_organization(created_by=user, title=user.first_name)\n user.active_organization = org\n user.save()\n\n if do_auth:\n\n assert signin(client, email, password).status_code == 302\n # create project\n with requests_mock.Mocker() as m:\n m.register_uri('POST', re.compile(r'ml\\.heartex\\.net/\\d+/validate'), text=json.dumps({'status': 'ok'}))\n m.register_uri('GET', re.compile(r'ml\\.heartex\\.net/\\d+/health'), text=json.dumps({'status': 'UP'}))\n r = client.post(urls.project_create, data=project_config)\n print('Project create with status code:', r.status_code)\n assert r.status_code == 201, f'Create project result should be redirect to the next page'\n\n # get project id and prepare url\n project = Project.objects.filter(title=project_config['title']).first()\n urls.set_project(project.pk)\n print('Project id:', project.id)\n\n client.project = project\n\n client.user = user\n client.urls = urls\n client.project_config = project_config\n client.org = org\n return client", "def test_create_project_from_template(svc_client_templates_creation):\n from git import Repo\n\n from renku.service.serializers.headers import RenkuHeaders\n from renku.service.utils import CACHE_PROJECTS_PATH\n\n svc_client, headers, payload, rm_remote = svc_client_templates_creation\n\n # NOTE: fail: remote authentication\n anonymous_headers = deepcopy(headers)\n anonymous_headers[\"Authorization\"] = \"Bearer None\"\n response = svc_client.post(\"/templates.create_project\", data=json.dumps(payload), headers=anonymous_headers)\n\n assert response\n assert response.json[\"error\"]\n assert \"Cannot push changes\" in response.json[\"error\"][\"reason\"]\n\n # NOTE: fail: missing parameters\n if len(payload[\"parameters\"]) > 0:\n payload_without_parameters = deepcopy(payload)\n payload_without_parameters[\"parameters\"] = []\n response = svc_client.post(\n \"/templates.create_project\", data=json.dumps(payload_without_parameters), headers=headers\n )\n assert response\n assert response.json[\"error\"]\n assert RENKU_EXCEPTION_ERROR_CODE == response.json[\"error\"][\"code\"]\n assert \"missing parameter\" in response.json[\"error\"][\"reason\"]\n\n # NOTE: successfully push with proper authentication\n response = svc_client.post(\"/templates.create_project\", data=json.dumps(payload), headers=headers)\n\n assert response\n assert {\"result\"} == set(response.json.keys())\n stripped_name = normalize_to_ascii(payload[\"project_name\"])\n assert stripped_name == response.json[\"result\"][\"slug\"]\n expected_url = \"{0}/{1}/{2}\".format(payload[\"project_repository\"], payload[\"project_namespace\"], stripped_name)\n assert expected_url == response.json[\"result\"][\"url\"]\n\n # NOTE: assert correct git user is set on new project\n user_data = RenkuHeaders.decode_user(headers[\"Renku-User\"])\n project_path = (\n CACHE_PROJECTS_PATH\n / user_data[\"user_id\"]\n / response.json[\"result\"][\"project_id\"]\n / payload[\"project_namespace\"]\n / stripped_name\n )\n repo = Repo(project_path)\n reader = repo.config_reader()\n assert reader.get_value(\"user\", \"email\") == user_data[\"email\"]\n assert reader.get_value(\"user\", \"name\") == user_data[\"name\"]\n\n # NOTE: successfully re-use old name after cleanup\n assert rm_remote() is True\n sleep(1) # NOTE: sleep to make sure remote isn't locked\n response = svc_client.post(\"/templates.create_project\", data=json.dumps(payload), headers=headers)\n assert response\n assert {\"result\"} == set(response.json.keys())\n assert expected_url == response.json[\"result\"][\"url\"]\n assert rm_remote() is True", "def test_create_project_unknown_user(self):\n self.assertEqual(Project.objects.count(), 2)\n url = reverse('projectroles:api_project_create')\n post_data = {\n 'title': NEW_PROJECT_TITLE,\n 'type': PROJECT_TYPE_PROJECT,\n 'parent': str(self.category.sodar_uuid),\n 'description': 'description',\n 'readme': 'readme',\n 'public_guest_access': False,\n 'owner': INVALID_UUID,\n }\n response = self.request_knox(url, method='POST', data=post_data)\n self.assertEqual(response.status_code, 400)\n self.assertEqual(Project.objects.count(), 2)", "def test_read_project(self):\n pass", "def test_read_project(self):\n pass", "def test_project_generation(cookies, context, context_combination):\n result = cookies.bake(extra_context={**context, **context_combination})\n assert result.exit_code == 0\n assert result.exception is None\n assert result.project.basename == context[\"project_slug\"]\n assert result.project.isdir()\n\n paths = build_files_list(str(result.project))\n assert paths\n check_paths(paths)" ]
[ "0.87766117", "0.8437002", "0.841835", "0.80077946", "0.7994402", "0.7775029", "0.77226585", "0.76863635", "0.7676147", "0.76569086", "0.7646558", "0.7646026", "0.76438665", "0.7638706", "0.76039904", "0.74945164", "0.7460312", "0.7429228", "0.74266595", "0.7354817", "0.7349462", "0.7287748", "0.7284829", "0.7261214", "0.72552454", "0.7198857", "0.71860194", "0.7158664", "0.7158664", "0.7150631" ]
0.94393003
0
Test case for list_projects
def test_list_projects(self): pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_list_project(self):\n pass", "def test_get_projects(self):\n pass", "def test_list_project_request(self):\n pass", "def test_get_project_list_with_projects(self):\n # Add two test projects.\n projects = [\n add_project(title='1', description='1'),\n add_project(title='2', description='2'),\n ]\n\n result = get_project_list()\n result_projects = result['projects'].object_list\n\n # Make sure two test projects are retrieved.\n for project in projects:\n self.assertTrue(project in result_projects)\n self.assertEqual(len(result_projects), len(projects))\n self.assertIsNone(result['tag'])\n self.assertFalse(result['filtered'])", "def test_get_projects(self):\n for project in ['TEST', 'NEWTEST', 'MYPROJECT']:\n self.db.insert_single_result(generate_mock_result(project=project))\n projects = self.db.get_projects()\n self.assertItemsEqual(['MYPROJECT', 'NEWTEST', 'TEST'], projects)", "def test_project_list_with_projects(self):\n # Add test projects.\n first_project = add_project(title='Title 1', description='Description 1')\n second_project = add_project(title='Title 2', description='Description 2')\n\n # Check that project list contains test projects.\n response = self.client.get(reverse('portfolio:project_list'))\n self.assertEqual(response.status_code, 200)\n self.assertContains(response, first_project.title)\n self.assertContains(response, first_project.description)\n self.assertContains(response, second_project.title)\n self.assertContains(response, second_project.description)", "def test_get_projects_returns_projects(fc: fetcher.Fetcher):\n projects = fc.get_projects()\n assert isinstance(projects, list)\n assert isinstance(projects[0], models.Project)", "def test_get_projects_filters(fc: fetcher.Fetcher, test_project_name):\n projects = fc.get_projects(test_project_name)\n assert isinstance(projects, list)\n assert len(projects) == 1\n assert projects[0].name == test_project_name", "def test_project_list(self):\n rv = self.app.get(\"/\")\n self.assertIn(\"Assignment0\", rv.data)\n self.assertIn(\"Assignment1.0\", rv.data)\n self.assertIn(\"Assignment2.0\", rv.data)", "def test_projects_get(self):\n response = self.client.open('/project-tracker/projects',\n method='GET')\n self.assert200(response, \"Response body is : \" + response.data.decode('utf-8'))", "def test_get_projects_expanded(self):\n pass", "def test_get_project_list_with_no_projects(self):\n result = get_project_list()\n self.assertQuerysetEqual(result['projects'].object_list, [])\n self.assertIsNone(result['tag'])\n self.assertFalse(result['filtered'])", "def test_get_project(self):\n pass", "def test_show_project_list(self):\n fake_project = FakeResource(1)\n\n # This mocks is faking keystone retrieving a defined list of\n # projects\n patch('identity.views.Keystone.project_list',\n Mock(return_value=[fake_project])).start()\n\n render_mock = patch(\n 'identity.views.ListProjectView.render_to_response').start()\n\n response = self.view(self.request)\n\n render_args = render_mock.call_args[0][0]\n computed = render_args['projects'][0]\n\n self.assertEqual(computed, fake_project.to_dict())", "def test_list_projects():\n\n inventory.save(\n name=self._project[\"name\"],\n config=self._config,\n inventory=self._inventory\n )\n\n return_code = subprocess.call(\n [\n sys.executable,\n \"-u\",\n \"-m\",\n \"avalon.inventory\",\n \"--ls\"\n ],\n cwd=tempfile.mkdtemp(dir=self._tempdir)\n )\n\n assert 0 == return_code", "def _ExpectListProjects(self, projects):\n self.mock_projects_client.projects.List.Expect(\n self.projects_messages.CloudresourcemanagerProjectsListRequest(\n filter='lifecycleState:ACTIVE'),\n self.projects_messages.ListProjectsResponse(\n projects=[\n self.projects_messages.Project(\n projectId=p, name='name') for p in projects]))", "def list_projects(arn=None, nextToken=None):\n pass", "def test_get_project_list_with_page_filter(self):\n # Add test projects.\n projects = [\n add_project(title=str(i), description=str(i)) for i in range(10)\n ]\n pages = {\n 1: projects[5:],\n 2: projects[:5],\n }\n\n # Check first page results.\n result = get_project_list(page=1)\n first_page_results = result['projects'].object_list\n for first_page_project in pages[1]:\n self.assertTrue(first_page_project in first_page_results)\n self.assertFalse(\n any(project in first_page_results for project in pages[2]))\n\n # Check second page results.\n result = get_project_list(page=2)\n second_page_results = result['projects'].object_list\n self.assertFalse(\n any(project in second_page_results for project in pages[1]))\n for second_page_project in pages[2]:\n self.assertTrue(second_page_project in second_page_results)", "def test_get_projects(client, session, models, tokens):\n response = client.get(\n \"/projects\", headers={\"Authorization\": f\"Bearer {tokens['read']}\"}\n )\n assert response.status_code == 200\n assert len(response.json) > 0", "def list_projects():\n with BMI(_username, _password, constants.BMI_ADMIN_PROJECT) as bmi:\n ret = bmi.list_projects()\n if ret[constants.STATUS_CODE_KEY] == 200:\n table = PrettyTable(\n field_names=[\"Id\", \"Name\", \"Provision Network\"])\n projects = ret[constants.RETURN_VALUE_KEY]\n for project in projects:\n table.add_row(project)\n click.echo(table.get_string())\n else:\n click.echo(ret[constants.MESSAGE_KEY])", "def test_project_list_tags(self):\n # Add test project with tags.\n tags = ['tag1', 'tag2', 'tag3']\n add_project(title='1', description='1', tags=tags)\n\n # Check that project list contains each tag.\n response = self.client.get(reverse('portfolio:project_list'))\n self.assertEqual(response.status_code, 200)\n for tag in tags:\n self.assertContains(response, tag)", "def do_project_list(cs, args):\n _, projects = cs.projects.list()\n fields = [\n 'project_id',\n 'name',\n 'owner_id',\n 'current_user_role_id',\n 'repo_count',\n 'creation_time',\n 'public',\n ]\n utils.print_list(projects, fields, formatters={}, sortby=args.sortby)", "def list_projects(ctx):\n pprint(ctx.obj.groups.get().data)", "def test_project_list_pagination(self):\n # Add enough projects so that pagination is required.\n # project_list should show 5 projects per page, so 15\n # projects will be split up over 3 pages.\n for i in range(15):\n add_project(title='{0}'.format(i), description='{0}'.format(i))\n\n url = reverse('portfolio:project_list')\n\n # Check buttons on first page.\n response = self.client.get(url)\n self.assertNotContains(response, 'Previous')\n self.assertContains(response, 'Next')\n\n # Check buttons on second page.\n response = self.client.get('{url}?page=2'.format(url=url))\n self.assertContains(response, 'Previous')\n self.assertContains(response, 'Next')\n\n # Check buttons on third page.\n response = self.client.get('{url}?page=3'.format(url=url))\n self.assertContains(response, 'Previous')\n self.assertNotContains(response, 'Next')", "def list_projects():\n if '.wcscanner' not in os.listdir(context.__BASE_PATH__):\n return []\n return os.listdir(context.__PROJECTS_PATH__)", "def get_project_list():\n return parse_list_output(Popen(\n 'openstack project list'.split(), stdout=STDOUT, stderr=STDERR\n ).communicate()[0])", "def test_read_project(self):\n pass", "def test_read_project(self):\n pass", "def test_projects_endpoint(self):\n with open('demo/tests/mock_results.json', 'r') as result_file:\n data = result_file.read()\n expected_response = json.loads(data)[\"test_project_calls\"]\n\n responses.add(\n responses.GET,\n f'{os.environ[\"AIVEN_API_URL\"]}/v1/project',\n json=expected_response,\n status=200\n )\n resp = requests.get(f'{os.environ[\"AIVEN_API_URL\"]}/v1/project')\n\n assert resp.status_code == 200\n assert resp.json() == expected_response\n assert len(responses.calls) == 1\n assert responses.calls[0].request.url == f'{os.environ[\"AIVEN_API_URL\"]}/v1/project'\n assert \"MY-PROJECT-NAME\" in responses.calls[0].response.text\n assert responses.calls[0].response.json() == expected_response", "def get_projects(self):\n projects = []\n page = 1\n while not len(projects) % 100:\n projects += self._get('/projects?{0}'.format(urllib.urlencode({'per_page': 100, 'page': page})))\n if not projects:\n break\n page += 1\n return projects" ]
[ "0.9042425", "0.8778745", "0.87161314", "0.8480545", "0.8213874", "0.79792327", "0.7973112", "0.79351175", "0.7776914", "0.7674936", "0.7659318", "0.7621019", "0.7614639", "0.7605095", "0.75506294", "0.750285", "0.73882", "0.73737574", "0.72951984", "0.72737426", "0.72640103", "0.72562426", "0.7246814", "0.7234167", "0.7178262", "0.7173774", "0.7161938", "0.7161938", "0.7129362", "0.712571" ]
0.92746294
0
Test case for read_project
def test_read_project(self): pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_get_project(self):\n pass", "def test_get_projects(self):\n pass", "def test_list_project(self):\n pass", "def project():", "def project():", "def project():", "def test_project_detail(self):\n rv = self.app.get(\"/Assignment0\")\n self.assertIn(\"Assignment0\", rv.data)\n self.assertIn(\"2015-02-04 21:57:12.156363\", rv.data)\n self.assertIn(\"221\", rv.data)\n self.assertIn(\"commit assignment0\", rv.data)\n\n self.assertIn(\"Assignment0/Procfile\", rv.data)\n self.assertIn(\"Assignment0/README.md\", rv.data)", "def test_list_projects(self):\n pass", "def test_list_projects(self):\n pass", "def test_get_project(self):\n url = reverse(\n 'projectroles:api_project_retrieve',\n kwargs={'project': self.project.sodar_uuid},\n )\n response = self.request_knox(url)\n\n self.assertEqual(response.status_code, 200)\n response_data = json.loads(response.content)\n expected = {\n 'title': self.project.title,\n 'type': self.project.type,\n 'parent': str(self.category.sodar_uuid),\n 'description': self.project.description,\n 'readme': '',\n 'public_guest_access': False,\n 'archive': False,\n 'roles': {\n str(self.owner_as_cat.sodar_uuid): {\n 'user': self.get_serialized_user(self.user_owner_cat),\n 'role': PROJECT_ROLE_OWNER,\n 'inherited': True,\n 'sodar_uuid': str(self.owner_as_cat.sodar_uuid),\n },\n str(self.owner_as.sodar_uuid): {\n 'user': self.get_serialized_user(self.user_owner),\n 'role': PROJECT_ROLE_OWNER,\n 'inherited': False,\n 'sodar_uuid': str(self.owner_as.sodar_uuid),\n },\n },\n 'sodar_uuid': str(self.project.sodar_uuid),\n }\n self.assertEqual(response_data, expected)", "def test_list_project_request(self):\n pass", "def test_demo_project_call(self):\n resp = DemoAivenStorage(os.environ[\"AIVEN_API_URL\"],\n os.environ[\"AIVEN_TOKEN\"]).get_project_names()\n assert isinstance(resp, list)\n assert len(resp) == 1\n assert 'romainducarrouge-31f2' in resp", "def test_search_project(self):\n title = Project.search_project(\"dee\")\n self.assertTrue(len(title) > 0)", "def test_project_reader(project):\n if is_server_administrator():\n return True\n if is_project_administrator(project):\n return True\n if is_project_writer(project):\n return True\n if is_project_reader(project):\n return True\n return False", "def test_get_project(self):\n self.assertEqual(self.remote_project.get_project(), self.project)", "def test_create_project(self):\n pass", "def test_create_project(self):\n pass", "def test_create_project(self):\n pass", "def test_projects_get(self):\n response = self.client.open('/project-tracker/projects',\n method='GET')\n self.assert200(response, \"Response body is : \" + response.data.decode('utf-8'))", "def test_set_project_itar_information(self):\n pass", "def get_project(arn=None):\n pass", "def test_create_project_request(self):\n pass", "def test_project(self, doi_dataset, doi_bib, orcid, metadata_parser):\n apply_mock(doi_dataset, doi_bib, orcid, metadata_parser)\n os.chdir(\"input/\")\n _set_args(\"-i\",\"yamls/project.yaml\",\"-o\",\"../out\")\n with HiddenPrints():\n ya2ro.main()\n\n data = yaml.load(\"yamls/project.yaml\", Loader=SafeLoader)\n with open(\"../out/project/index-en.html\") as f:\n web = f.read()\n\n self.assert_data_in_web(data, web)", "def test_get_projects(self):\n for project in ['TEST', 'NEWTEST', 'MYPROJECT']:\n self.db.insert_single_result(generate_mock_result(project=project))\n projects = self.db.get_projects()\n self.assertItemsEqual(['MYPROJECT', 'NEWTEST', 'TEST'], projects)", "def test_get_projects_expanded(self):\n pass", "def test_find_project(self):\n result = Project.objects.find(\n ['test'], project_type=PROJECT_TYPE_PROJECT\n )\n self.assertEqual(len(result), 1)\n self.assertEqual(result[0], self.project)", "def test_project_list(self):\n rv = self.app.get(\"/\")\n self.assertIn(\"Assignment0\", rv.data)\n self.assertIn(\"Assignment1.0\", rv.data)\n self.assertIn(\"Assignment2.0\", rv.data)", "def test_add_project(self):\n pass", "def test_get_projects_filters(fc: fetcher.Fetcher, test_project_name):\n projects = fc.get_projects(test_project_name)\n assert isinstance(projects, list)\n assert len(projects) == 1\n assert projects[0].name == test_project_name", "def read_project(filename):\n sep ='='\n \n tags = {\n 'description': ['BEGIN DESCRIPTION:', 'END DESCRIPTION:']\n }\n\n fixed = {\n 'units': ['SI Units' 'English Units']\n }\n\n keys = {\n 'Proj Title': '',\n 'Default Exp/Contr': '=0.3,0.1',\n 'Current Plan': '=p03',\n 'Geom File': '=g01',\n 'Flow File': '=f01',\n 'Plan File': '=p01',\n 'Y Axis Title=Elevation': '',\n 'X Axis Title(PF)': '=Main Channel Distance',\n 'X Axis Title(XS)': '=Station',\n 'DSS Start Date': '=',\n 'DSS Start Time': '=',\n 'DSS End Date': '=',\n 'DSS End Time': '=',\n 'DSS Export Filename': '=',\n 'DSS Export Rating Curves': '= 0',\n 'DSS Export Rating Curve Sorted': '= 0',\n 'DSS Export Volume Flow Curves': '= 0',\n 'DXF Filename': '=',\n 'DXF OffsetX': '= 0',\n 'DXF OffsetY': '= 0',\n 'DXF ScaleX': '= 1',\n 'DXF ScaleY': '= 10',\n 'GIS Export Profiles': '= 0'\n }" ]
[ "0.828083", "0.7624312", "0.73511124", "0.70184237", "0.70184237", "0.70184237", "0.6992255", "0.69816697", "0.69816697", "0.6962875", "0.6943265", "0.69107896", "0.68491393", "0.6812562", "0.6809167", "0.6785107", "0.6785107", "0.6785107", "0.6781834", "0.67258483", "0.6630345", "0.6629928", "0.65947866", "0.65929943", "0.6576285", "0.65605", "0.6538033", "0.6501766", "0.6412429", "0.6410664" ]
0.92293584
0
Test case for update_project
def test_update_project(self): pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_patch_project(self):\n pass", "def test_replace_project(self):\n pass", "def test_patch_project(self):\n self.assertEqual(Project.objects.count(), 2)\n\n url = reverse(\n 'projectroles:api_project_update',\n kwargs={'project': self.project.sodar_uuid},\n )\n patch_data = {\n 'title': UPDATED_TITLE,\n 'description': UPDATED_DESC,\n 'readme': UPDATED_README,\n 'public_guest_access': True,\n }\n response = self.request_knox(url, method='PATCH', data=patch_data)\n\n self.assertEqual(response.status_code, 200, msg=response.content)\n self.assertEqual(Project.objects.count(), 2)\n\n self.project.refresh_from_db()\n model_dict = model_to_dict(self.project)\n model_dict['readme'] = model_dict['readme'].raw\n expected = {\n 'id': self.project.pk,\n 'title': UPDATED_TITLE,\n 'type': PROJECT_TYPE_PROJECT,\n 'parent': self.category.pk,\n 'description': UPDATED_DESC,\n 'readme': UPDATED_README,\n 'public_guest_access': True,\n 'archive': False,\n 'full_title': self.category.title + ' / ' + UPDATED_TITLE,\n 'has_public_children': False,\n 'sodar_uuid': self.project.sodar_uuid,\n }\n self.assertEqual(model_dict, expected)\n self.assertEqual(self.project.get_owner().user, self.user_owner)\n\n expected = {\n 'title': UPDATED_TITLE,\n 'type': PROJECT_TYPE_PROJECT,\n 'parent': str(self.category.sodar_uuid),\n 'description': UPDATED_DESC,\n 'readme': UPDATED_README,\n 'public_guest_access': True,\n 'archive': False,\n 'roles': {\n str(self.category.get_owner().sodar_uuid): {\n 'role': PROJECT_ROLE_OWNER,\n 'user': self.get_serialized_user(self.user_owner_cat),\n 'inherited': True,\n 'sodar_uuid': str(self.category.get_owner().sodar_uuid),\n },\n str(self.project.get_owner().sodar_uuid): {\n 'role': PROJECT_ROLE_OWNER,\n 'user': self.get_serialized_user(self.user_owner),\n 'inherited': False,\n 'sodar_uuid': str(self.project.get_owner().sodar_uuid),\n },\n },\n 'sodar_uuid': str(self.project.sodar_uuid),\n }\n self.assertEqual(json.loads(response.content), expected)", "def test_add_project(self):\n pass", "def test_put_project(self):\n self.assertEqual(Project.objects.count(), 2)\n\n url = reverse(\n 'projectroles:api_project_update',\n kwargs={'project': self.project.sodar_uuid},\n )\n put_data = {\n 'title': UPDATED_TITLE,\n 'type': PROJECT_TYPE_PROJECT,\n 'parent': str(self.category.sodar_uuid),\n 'description': UPDATED_DESC,\n 'readme': UPDATED_README,\n 'public_guest_access': True,\n }\n response = self.request_knox(url, method='PUT', data=put_data)\n\n self.assertEqual(response.status_code, 200, msg=response.content)\n self.assertEqual(Project.objects.count(), 2)\n\n self.project.refresh_from_db()\n model_dict = model_to_dict(self.project)\n model_dict['readme'] = model_dict['readme'].raw\n expected = {\n 'id': self.project.pk,\n 'title': UPDATED_TITLE,\n 'type': PROJECT_TYPE_PROJECT,\n 'parent': self.category.pk,\n 'description': UPDATED_DESC,\n 'readme': UPDATED_README,\n 'public_guest_access': True,\n 'archive': False,\n 'full_title': self.category.title + ' / ' + UPDATED_TITLE,\n 'has_public_children': False,\n 'sodar_uuid': self.project.sodar_uuid,\n }\n self.assertEqual(model_dict, expected)\n\n expected = {\n 'title': UPDATED_TITLE,\n 'type': PROJECT_TYPE_PROJECT,\n 'parent': str(self.category.sodar_uuid),\n 'description': UPDATED_DESC,\n 'readme': UPDATED_README,\n 'public_guest_access': True,\n 'archive': False,\n 'roles': {\n str(self.category.get_owner().sodar_uuid): {\n 'role': PROJECT_ROLE_OWNER,\n 'user': self.get_serialized_user(self.user_owner_cat),\n 'inherited': True,\n 'sodar_uuid': str(self.category.get_owner().sodar_uuid),\n },\n str(self.project.get_owner().sodar_uuid): {\n 'role': PROJECT_ROLE_OWNER,\n 'user': self.get_serialized_user(self.user_owner),\n 'inherited': False,\n 'sodar_uuid': str(self.project.get_owner().sodar_uuid),\n },\n },\n 'sodar_uuid': str(self.project.sodar_uuid),\n }\n self.assertEqual(json.loads(response.content), expected)", "def updateProjects(request):\n\n updater = ProjectUpdater()\n updater.run()\n return http.HttpResponse(\"Ok\")", "def test_projects_patch(self):\n project = Project()\n response = self.client.open('/project-tracker/projects',\n method='PATCH',\n data=json.dumps(project),\n content_type='application/json')\n self.assert200(response, \"Response body is : \" + response.data.decode('utf-8'))", "def do_project_update(cs, args):\n raise NotImplementedError", "def test_projects_put(self):\n project = Project()\n response = self.client.open('/project-tracker/projects',\n method='PUT',\n data=json.dumps(project),\n content_type='application/json')\n self.assert200(response, \"Response body is : \" + response.data.decode('utf-8'))", "def test_create_project_request(self):\n pass", "def test_create_project(self):\n pass", "def test_create_project(self):\n pass", "def test_create_project(self):\n pass", "def test_remove_project(self):\n pass", "def test_updateVersion(self):\n project = self.makeProject(Version(\"bar\", 2, 1, 0))\n newVersion = Version(\"bar\", 3, 2, 9)\n project.updateVersion(newVersion)\n self.assertEquals(project.getVersion(), newVersion)\n self.assertEquals(\n project.directory.child(\"topfiles\").child(\"README\").getContent(),\n \"3.2.9\")", "def test_update(self):\n pass", "def test_update(self):\n pass", "def test_update(self):\n pass", "def test_update_case(self):\n pass", "def test_get_project(self):\n pass", "def update_project_info(data):\n\tif 'pk' in data:\n\t\tif data['pk'] is not None:\n\t\t\tproject = get_or_none(ProjectInfo, pk=data['pk'])\n\t\t\tif project:\n\t\t\t\tproject.name = data['name']\n\t\t\t\tproject.description = data['description']\n\t\t\t\tproject.start_date = data['start_date']\n\t\t\t\tproject.end_date = data['end_date']\n\t\t\t\tproject.save()\n\t\t\t\tprint ('Updated')\n\t\t\t\treturn True\n\t\t\telse:\n\t\t\t\treturn False\n\t\telse:\n\t\t\treturn False\n\n\telse:\n\t\tprint (\"please provide pk for updating\")\n\t\treturn False", "def test_set_project_itar_information(self):\n pass", "def test_update_team(self):\n pass", "def test_delete_project(self):\n pass", "def test_delete_project(self):\n pass", "def test_update(app):\n\n assert False", "def test_update_goal(self):\n pass", "def test_update9(self):\n pass", "def test_patch_project_move(self):\n self.assertEqual(\n self.project.full_title,\n self.category.title + ' / ' + self.project.title,\n )\n\n new_category = self.make_project(\n 'NewCategory', PROJECT_TYPE_CATEGORY, None\n )\n self.make_assignment(new_category, self.user_owner_cat, self.role_owner)\n url = reverse(\n 'projectroles:api_project_update',\n kwargs={'project': self.project.sodar_uuid},\n )\n patch_data = {'parent': str(new_category.sodar_uuid)}\n response = self.request_knox(url, method='PATCH', data=patch_data)\n\n self.assertEqual(response.status_code, 200, msg=response.content)\n self.project.refresh_from_db()\n model_dict = model_to_dict(self.project)\n self.assertEqual(model_dict['parent'], new_category.pk)\n owners = [a.user for a in self.project.get_owners()]\n self.assertIn(self.user_owner_cat, owners)\n self.assertIn(self.user_owner, owners)\n\n # Assert child project full title update\n self.assertEqual(\n self.project.full_title,\n new_category.title + ' / ' + self.project.title,\n )\n self.assertEqual(\n json.loads(response.content)['parent'], str(new_category.sodar_uuid)\n )", "def test_projects_id_put(self):\n project = Project()\n response = self.client.open('/project-tracker/projects/{id}'.format(id=3.4),\n method='PUT',\n data=json.dumps(project),\n content_type='application/json')\n self.assert200(response, \"Response body is : \" + response.data.decode('utf-8'))" ]
[ "0.82712364", "0.7873975", "0.7442448", "0.74199355", "0.7384629", "0.7369599", "0.73155415", "0.7272609", "0.7195377", "0.7130262", "0.7127845", "0.7127845", "0.7127845", "0.7080842", "0.7078068", "0.7027116", "0.7027116", "0.7027116", "0.7019853", "0.7015513", "0.700853", "0.6979212", "0.697569", "0.6932203", "0.6932203", "0.69294965", "0.67687577", "0.6764707", "0.6743658", "0.67169243" ]
0.94864285
0
Create a pretty image of a molecule, with a colored frame around it
def molecule_to_image(mol: Molecule, frame_color: PilColor) -> PilImage: mol = Chem.MolFromSmiles(mol.smiles) img = Draw.MolToImage(mol) cropped_img = crop_image(img) return draw_rounded_rectangle(cropped_img, frame_color)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def molecule_to_image(\n mol: Molecule, frame_color: PilColor, size: int = 300\n) -> PilImage:\n mol = Chem.MolFromSmiles(mol.smiles)\n img = Draw.MolToImage(mol, size=(size, size))\n cropped_img = crop_image(img)\n return draw_rounded_rectangle(cropped_img, frame_color)", "def _repr_png_(self):\n mol = self.owner.mol\n keku = IPythonConsole.kekulizeStructures\n size = IPythonConsole.molSize\n opts = IPythonConsole.drawOptions\n return Draw._moltoimg(\n mol, size, self.aix, \"\", returnPNG=True, drawOptions=opts,\n kekulize=keku, highlightBonds=self.bix\n )", "def _repr_png_(self):\n mol = self.owner.mol\n keku = IPythonConsole.kekulizeStructures\n size = IPythonConsole.molSize\n opts = IPythonConsole.drawOptions\n return Draw._moltoimg(\n mol, size, self.aix, \"\", returnPNG=True, drawOptions=opts,\n kekulize=keku, highlightBonds=self.bix\n )", "def print_image(indiv,name):\n routine = gp.compile(indiv,pset)\n output = gen_beat_output(routine)\n bits = np.array(map(bitlist,output)[0:24000]).transpose()\n plt.style.use('classic')\n plt.imshow(bits,interpolation='nearest',aspect='auto',cmap=plt.get_cmap('Greys'))\n plt.savefig(name+\".png\",dpi=150)", "def make_graphviz_image(\n molecules: Union[Sequence[Molecule], Sequence[UniqueMolecule]],\n reactions: Union[Sequence[RetroReaction], Sequence[FixedRetroReaction]],\n edges: Sequence[Tuple[Any, Any]],\n frame_colors: Sequence[PilColor],\n) -> PilImage:\n\n def _create_image(use_splines):\n txt = template.render(\n molecules=mol_spec,\n reactions=reactions,\n edges=edges,\n use_splines=use_splines,\n )\n _, input_name = tempfile.mkstemp(suffix=\".dot\")\n with open(input_name, \"w\") as this_fileobj:\n this_fileobj.write(txt)\n\n _, output_img2 = tempfile.mkstemp(suffix=\".png\")\n ext = \".bat\" if sys.platform.startswith(\"win\") else \"\"\n subprocess.call([f\"dot{ext}\", \"-T\", \"png\", f\"-o{output_img2}\", input_name])\n if not os.path.exists(output_img2) or os.path.getsize(output_img2) == 0:\n raise FileNotFoundError(\n \"Could not produce graph with layout - check that 'dot' command is in path\"\n )\n return output_img2\n\n mol_spec = save_molecule_images(molecules, frame_colors)\n\n template_filepath = os.path.join(data_path(), \"templates\", \"reaction_tree.dot\")\n with open(template_filepath, \"r\") as fileobj:\n template = Template(fileobj.read())\n template.globals[\"id\"] = id # type: ignore\n\n try:\n output_img = _create_image(use_splines=True)\n except FileNotFoundError:\n output_img = _create_image(use_splines=False)\n\n return Image.open(output_img)", "def show(self):\n data = []\n for row in self.grid:\n mid, bottom = [], []\n for node in row:\n \tmid += [0, int(node.right)]\n \tbottom += [int(node.down), 1]\n data += mid + [0] + bottom + [0] \n data[self.width*2+1] = 1\n data[-1] = 1\n data += (self.width*2) * [0]\n im = Image.new('1', (self.width*2+1, self.height*2+1))\n im.putdata(data)\n im.save('maze.png')\n im.show()", "def make_graphviz_image(\n molecules: Union[Sequence[Molecule], Sequence[UniqueMolecule]],\n reactions: Union[Sequence[RetroReaction], Sequence[FixedRetroReaction]],\n edges: Sequence[Tuple[Any, Any]],\n frame_colors: Sequence[PilColor],\n reaction_shapes: Sequence[str] = None,\n use_splines: bool = True,\n) -> PilImage:\n\n def _create_image(use_splines):\n txt = template.render(\n molecules=mol_spec,\n reactions=rxn_spec,\n edges=edges,\n use_splines=use_splines,\n )\n _, input_name = tempfile.mkstemp(suffix=\".dot\")\n with open(input_name, \"w\") as this_fileobj:\n this_fileobj.write(txt)\n\n _, output_img2 = tempfile.mkstemp(suffix=\".png\")\n ext = \".bat\" if sys.platform.startswith(\"win\") else \"\"\n subprocess.call([f\"dot{ext}\", \"-T\", \"png\", f\"-o{output_img2}\", input_name])\n if not os.path.exists(output_img2) or os.path.getsize(output_img2) == 0:\n raise FileNotFoundError(\n \"Could not produce graph with layout - check that 'dot' command is in path\"\n )\n return output_img2\n\n mol_spec = save_molecule_images(molecules, frame_colors)\n reaction_shapes = reaction_shapes or [\"circle\"] * len(reactions)\n rxn_spec = zip(reactions, reaction_shapes)\n\n template_filepath = os.path.join(data_path(), \"templates\", \"reaction_tree.dot\")\n with open(template_filepath, \"r\") as fileobj:\n template = Template(fileobj.read())\n template.globals[\"id\"] = id # type: ignore\n\n if not use_splines:\n output_img = _create_image(use_splines=False)\n return Image.open(output_img)\n\n try:\n output_img = _create_image(use_splines=True)\n except FileNotFoundError:\n output_img = _create_image(use_splines=False)\n\n return Image.open(output_img)", "def build_schematic(self, bg=None):", "def __init__(\n self,\n win,\n outer_diam_pix,\n inner_diam_pix,\n bg_colour=(-1, -1, -1),\n line_colour=(+1, +1, +1),\n spot_colour=(-1, -1, -1),\n circle_edges=128,\n ):\n\n self._win = win\n self._outer_diam_pix = outer_diam_pix\n self._inner_diam_pix = inner_diam_pix\n self._circle_edges = circle_edges\n self._stim = {}\n\n self._stim[\"aperture\"] = psychopy.visual.Aperture(\n win=win,\n size=self._outer_diam_pix,\n nVert=self._circle_edges,\n shape=\"circle\",\n units=\"pix\",\n autoLog=False,\n )\n\n self._stim[\"aperture\"].enabled = False\n\n self._stim[\"bg\"] = psychopy.visual.Circle(\n win=self._win,\n radius=self._outer_diam_pix / 2.0,\n units=\"pix\",\n lineColor=None,\n fillColor=bg_colour,\n edges=self._circle_edges,\n autoLog=False,\n )\n\n self._stim[\"line\"] = psychopy.visual.Rect(\n win=self._win,\n size=(self._outer_diam_pix * 2, self._inner_diam_pix),\n units=\"pix\",\n lineWidth=0,\n lineColor=None,\n fillColor=line_colour,\n autoLog=False,\n )\n\n self._stim[\"spot\"] = psychopy.visual.Circle(\n win=self._win,\n radius=self._inner_diam_pix / 2.0,\n units=\"pix\",\n fillColor=spot_colour,\n edges=self._circle_edges,\n lineWidth=0,\n autoLog=False,\n )\n\n self.bg_colour = bg_colour\n self.line_colour = line_colour\n self.spot_colour = spot_colour", "def bprint(self):\n\t\tpcolor = [\n\t\t\t(0, 0, 255, 255),\n\t\t\t(255, 0, 0, 255),\n\t\t\t(0, 255, 0, 255),\n\t\t\t(255, 255, 0, 255),\n\t\t\t(0, 255, 255, 255),\n\t\t\t(255, 140, 0, 255),\n\t\t\t(140, 0, 255, 255),\n\t\t\t(255, 0, 255, 255)\n\t\t]\n\t\timg = Image.open(bundled_data_path(self.cog) / 'img.png')\n\t\td = ImageDraw.Draw(img)\n\t\t#OWNEDBY\n\t\tfor t in range(40):\n\t\t\tif self.ownedby[t] > -1:\n\t\t\t\tif 0 < t < 10:\n\t\t\t\t\td.rectangle(\n\t\t\t\t\t\t[(650-(t*50))-39,702,(650-(t*50))-10,735],\n\t\t\t\t\t\tfill=(0,0,0,255)\n\t\t\t\t\t)\n\t\t\t\t\td.rectangle(\n\t\t\t\t\t\t[(650-(t*50))-37,702,(650-(t*50))-12,733],\n\t\t\t\t\t\tfill=pcolor[self.ownedby[t]]\n\t\t\t\t\t)\n\t\t\t\telif 10 < t < 20:\n\t\t\t\t\td.rectangle(\n\t\t\t\t\t\t[16,(650-((t-10)*50))-39,50,(650-((t-10)*50))-10],\n\t\t\t\t\t\tfill=(0,0,0,255)\n\t\t\t\t\t)\n\t\t\t\t\td.rectangle(\n\t\t\t\t\t\t[18,(650-((t-10)*50))-37,50,(650-((t-10)*50))-12],\n\t\t\t\t\t\tfill=pcolor[self.ownedby[t]]\n\t\t\t\t\t)\n\t\t\t\telif 20 < t < 30:\n\t\t\t\t\td.rectangle(\n\t\t\t\t\t\t[(100+((t-20)*50))+11,16,(100+((t-20)*50))+41,50],\n\t\t\t\t\t\tfill=(0,0,0,255)\n\t\t\t\t\t)\n\t\t\t\t\td.rectangle(\n\t\t\t\t\t\t[(100+((t-20)*50))+13,18,(100+((t-20)*50))+39,50],\n\t\t\t\t\t\tfill=pcolor[self.ownedby[t]]\n\t\t\t\t\t)\n\t\t\t\telif 30 < t < 40:\n\t\t\t\t\td.rectangle(\n\t\t\t\t\t\t[702,(100+((t-30)*50))+11,736,(100+((t-30)*50))+41],\n\t\t\t\t\t\tfill=(0,0,0,255)\n\t\t\t\t\t)\n\t\t\t\t\td.rectangle(\n\t\t\t\t\t\t[702,(100+((t-30)*50))+13,734,(100+((t-30)*50))+39],\n\t\t\t\t\t\tfill=pcolor[self.ownedby[t]]\n\t\t\t\t\t)\n\t\t#TILE\n\t\t#Because the player int used to be 1 indexed, the players would be in the wrong\n\t\t#position without 1 indexing and subtracting 1 from t when calling self.tile[t]\n\t\t#and pcolor[t]. I could fix this by changing the hard coded values, but this is\n\t\t#easier in the short term.\n\t\tfor t in range(1, self.num + 1):\n\t\t\tif not self.isalive[t-1]:\n\t\t\t\tcontinue\n\t\t\tif self.tile[t-1] == 0:\n\t\t\t\td.rectangle(\n\t\t\t\t\t[(12*(t-1))+604,636,(12*(t-1))+614,646], fill=(0,0,0,255)\n\t\t\t\t)\n\t\t\t\td.rectangle(\n\t\t\t\t\t[(12*(t-1))+605,637,(12*(t-1))+613,645], fill=pcolor[t-1]\n\t\t\t\t)\n\t\t\telif 0 < self.tile[t-1] < 10:\n\t\t\t\tif t < 5:\n\t\t\t\t\td.rectangle(\n\t\t\t\t\t\t[((650-(self.tile[t-1]*50))-47)+(12*(t-1)),636,((650-(self.tile[t-1]*50))-37)+(12*(t-1)),646],\n\t\t\t\t\t\tfill=(0,0,0,255)\n\t\t\t\t\t)\n\t\t\t\t\td.rectangle(\n\t\t\t\t\t\t[((650-(self.tile[t-1]*50))-46)+(12*(t-1)),637,((650-(self.tile[t-1]*50))-38)+(12*(t-1)),645],\n\t\t\t\t\t\tfill=pcolor[t-1]\n\t\t\t\t\t)\n\t\t\t\telse:\n\t\t\t\t\td.rectangle(\n\t\t\t\t\t\t[((650-(self.tile[t-1]*50))-47)+(12*(t-5)),648,((650-(self.tile[t-1]*50))-37)+(12*(t-5)),658],\n\t\t\t\t\t\tfill=(0,0,0,255)\n\t\t\t\t\t)\n\t\t\t\t\td.rectangle(\n\t\t\t\t\t\t[((650-(self.tile[t-1]*50))-46)+(12*(t-5)),649,((650-(self.tile[t-1]*50))-38)+(12*(t-5)),657],\n\t\t\t\t\t\tfill=pcolor[t-1]\n\t\t\t\t\t)\n\t\t\telif self.tile[t-1] == 10:\n\t\t\t\td.rectangle(\n\t\t\t\t\t[106,(12*(t-1))+604,116,(12*(t-1))+614],\n\t\t\t\t\tfill=(0,0,0,255)\n\t\t\t\t)\n\t\t\t\td.rectangle(\n\t\t\t\t\t[107,(12*(t-1))+605,115,(12*(t-1))+613],\n\t\t\t\t\tfill=pcolor[t-1]\n\t\t\t\t)\n\t\t\telif 10 < self.tile[t-1] < 20:\n\t\t\t\tif t < 5:\n\t\t\t\t\td.rectangle(\n\t\t\t\t\t\t[106,((650-((self.tile[t-1]-10)*50))-47)+(12*(t-1)),116,((650-((self.tile[t-1]-10)*50))-37)+(12*(t-1))],\n\t\t\t\t\t\tfill=(0,0,0,255)\n\t\t\t\t\t)\n\t\t\t\t\td.rectangle(\n\t\t\t\t\t\t[107,((650-((self.tile[t-1]-10)*50))-46)+(12*(t-1)),115,((650-((self.tile[t-1]-10)*50))-38)+(12*(t-1))],\n\t\t\t\t\t\tfill=pcolor[t-1]\n\t\t\t\t\t)\n\t\t\t\telse:\n\t\t\t\t\td.rectangle(\n\t\t\t\t\t\t[94,((650-((self.tile[t-1]-10)*50))-47)+(12*(t-5)),104,((650-((self.tile[t-1]-10)*50))-37)+(12*(t-5))],\n\t\t\t\t\t\tfill=(0,0,0,255)\n\t\t\t\t\t)\n\t\t\t\t\td.rectangle(\n\t\t\t\t\t\t[95,((650-((self.tile[t-1]-10)*50))-46)+(12*(t-5)),103,((650-((self.tile[t-1]-10)*50))-38)+(12*(t-5))],\n\t\t\t\t\t\tfill=pcolor[t-1]\n\t\t\t\t\t)\n\t\t\telif self.tile[t-1] == 20:\n\t\t\t\td.rectangle(\n\t\t\t\t\t[138-(12*(t-1)),106,148-(12*(t-1)),116],\n\t\t\t\t\tfill=(0,0,0,255)\n\t\t\t\t)\n\t\t\t\td.rectangle(\n\t\t\t\t\t[139-(12*(t-1)),107,147-(12*(t-1)),115],\n\t\t\t\t\tfill=pcolor[t-1]\n\t\t\t\t)\n\t\t\telif 20 < self.tile[t-1] < 30:\n\t\t\t\tif t < 5:\n\t\t\t\t\td.rectangle(\n\t\t\t\t\t\t[((100+((self.tile[t-1]-20)*50))+39)-(12*(t-1)),106,((100+((self.tile[t-1]-20)*50))+49)-(12*(t-1)),116],\n\t\t\t\t\t\tfill=(0,0,0,255)\n\t\t\t\t\t)\n\t\t\t\t\td.rectangle(\n\t\t\t\t\t\t[((100+((self.tile[t-1]-20)*50))+40)-(12*(t-1)),107,((100+((self.tile[t-1]-20)*50))+48)-(12*(t-1)),115],\n\t\t\t\t\t\tfill=pcolor[t-1]\n\t\t\t\t\t)\n\t\t\t\telse:\n\t\t\t\t\td.rectangle(\n\t\t\t\t\t\t[((100+((self.tile[t-1]-20)*50))+39)-(12*(t-5)),94,((100+((self.tile[t-1]-20)*50))+49)-(12*(t-5)),104],\n\t\t\t\t\t\tfill=(0,0,0,255)\n\t\t\t\t\t)\n\t\t\t\t\td.rectangle(\n\t\t\t\t\t\t[((100+((self.tile[t-1]-20)*50))+40)-(12*(t-5)),95,((100+((self.tile[t-1]-20)*50))+48)-(12*(t-5)),103],\n\t\t\t\t\t\tfill=pcolor[t-1]\n\t\t\t\t\t)\n\t\t\telif self.tile[t-1] == 30:\n\t\t\t\td.rectangle(\n\t\t\t\t\t[636,138-(12*(t-1)),646,148-(12*(t-1))],\n\t\t\t\t\tfill=(0,0,0,255)\n\t\t\t\t)\n\t\t\t\td.rectangle(\n\t\t\t\t\t[637,139-(12*(t-1)),645,147-(12*(t-1))],\n\t\t\t\t\tfill=pcolor[t-1]\n\t\t\t\t)\n\t\t\telif 30 < self.tile[t-1] < 40:\n\t\t\t\tif t < 5:\n\t\t\t\t\td.rectangle(\n\t\t\t\t\t\t[636,((100+((self.tile[t-1]-30)*50))+39)-(12*(t-1)),646,((100+((self.tile[t-1]-30)*50))+49)-(12*(t-1))],\n\t\t\t\t\t\tfill=(0,0,0,255)\n\t\t\t\t\t)\n\t\t\t\t\td.rectangle(\n\t\t\t\t\t\t[637,((100+((self.tile[t-1]-30)*50))+40)-(12*(t-1)),645,((100+((self.tile[t-1]-30)*50))+48)-(12*(t-1))],\n\t\t\t\t\t\tfill=pcolor[t-1]\n\t\t\t\t\t)\n\t\t\t\telse:\n\t\t\t\t\td.rectangle(\n\t\t\t\t\t\t[648,((100+((self.tile[t-1]-30)*50))+39)-(12*(t-5)),658,((100+((self.tile[t-1]-30)*50))+49)-(12*(t-5))],\n\t\t\t\t\t\tfill=(0,0,0,255)\n\t\t\t\t\t)\n\t\t\t\t\td.rectangle(\n\t\t\t\t\t\t[649,((100+((self.tile[t-1]-30)*50))+40)-(12*(t-5)),657,((100+((self.tile[t-1]-30)*50))+48)-(12*(t-5))],\n\t\t\t\t\t\tfill=pcolor[t-1]\n\t\t\t\t\t)\n\t\t#NUMHOUSE\n\t\tfor t in range(40):\n\t\t\tif self.numhouse[t] == 5:\n\t\t\t\tif 0 < t < 10:\n\t\t\t\t\td.rectangle(\n\t\t\t\t\t\t[(650-(t*50))-33,606,(650-(t*50))-15,614],\n\t\t\t\t\t\tfill=(0,0,0,255)\n\t\t\t\t\t)\n\t\t\t\t\td.rectangle(\n\t\t\t\t\t\t[(650-(t*50))-32,607,(650-(t*50))-16,613],\n\t\t\t\t\t\tfill=(255,0,0,255)\n\t\t\t\t\t)\n\t\t\t\telif 10 < t < 20:\t\t\t\n\t\t\t\t\td.rectangle(\n\t\t\t\t\t\t[138,(650-((t-10)*50))-33,146,(650-((t-10)*50))-17],\n\t\t\t\t\t\tfill=(0,0,0,255)\n\t\t\t\t\t)\n\t\t\t\t\td.rectangle(\n\t\t\t\t\t\t[139,(650-((t-10)*50))-32,145,(650-((t-10)*50))-18],\n\t\t\t\t\t\tfill=(255,0,0,255)\n\t\t\t\t\t)\n\t\t\t\telif 20 < t < 30:\n\t\t\t\t\td.rectangle(\n\t\t\t\t\t\t[(100+((t-20)*50))+17,138,(100+((t-20)*50))+35,146],\n\t\t\t\t\t\tfill=(0,0,0,255)\n\t\t\t\t\t)\n\t\t\t\t\td.rectangle(\n\t\t\t\t\t\t[(100+((t-20)*50))+18,139,(100+((t-20)*50))+34,145],\n\t\t\t\t\t\tfill=(255,0,0,255)\n\t\t\t\t\t)\n\t\t\t\telif 30 < t < 40:\n\t\t\t\t\td.rectangle(\n\t\t\t\t\t\t[606,(100+((t-30)*50))+17,614,(100+((t-30)*50))+35],\n\t\t\t\t\t\tfill=(0,0,0,255)\n\t\t\t\t\t)\n\t\t\t\t\td.rectangle(\n\t\t\t\t\t\t[607,(100+((t-30)*50))+18,613,(100+((t-30)*50))+34],\n\t\t\t\t\t\tfill=(255,0,0,255)\n\t\t\t\t\t)\n\t\t\telif self.numhouse[t] > 0:\n\t\t\t\tfor tt in range(self.numhouse[t]):\n\t\t\t\t\tif 0 < t < 10:\n\t\t\t\t\t\td.rectangle(\n\t\t\t\t\t\t\t[((650-(t*50))-47)+(tt*12),606,((650-(t*50))-37)+(tt*12),614],\n\t\t\t\t\t\t\tfill=(0,0,0,255)\n\t\t\t\t\t\t)\n\t\t\t\t\t\td.rectangle(\n\t\t\t\t\t\t\t[((650-(t*50))-46)+(tt*12),607,((650-(t*50))-38)+(tt*12),613],\n\t\t\t\t\t\t\tfill=(0,255,0,255)\n\t\t\t\t\t\t)\n\t\t\t\t\telif 10 < t < 20:\n\t\t\t\t\t\td.rectangle(\n\t\t\t\t\t\t\t[138,((650-((t-10)*50))-47)+(tt*12),146,((650-((t-10)*50))-37)+(tt*12)],\n\t\t\t\t\t\t\tfill=(0,0,0,255)\n\t\t\t\t\t\t)\n\t\t\t\t\t\td.rectangle(\n\t\t\t\t\t\t\t[139,((650-((t-10)*50))-46)+(tt*12),145,((650-((t-10)*50))-38)+(tt*12)],\n\t\t\t\t\t\t\tfill=(0,255,0,255)\n\t\t\t\t\t\t)\n\t\t\t\t\telif 20 < t < 30:\n\t\t\t\t\t\td.rectangle(\n\t\t\t\t\t\t\t[((100+((t-20)*50))+39)-(tt*12),138,((100+((t-20)*50))+49)-(tt*12),146],\n\t\t\t\t\t\t\tfill=(0,0,0,255)\n\t\t\t\t\t\t)\n\t\t\t\t\t\td.rectangle(\n\t\t\t\t\t\t\t[((100+((t-20)*50))+40)-(tt*12),139,((100+((t-20)*50))+48)-(tt*12),145],\n\t\t\t\t\t\t\tfill=(0,255,0,255)\n\t\t\t\t\t\t)\n\t\t\t\t\telif 30 < t < 40:\n\t\t\t\t\t\td.rectangle(\n\t\t\t\t\t\t\t[606,((100+((t-30)*50))+39)-(tt*12),614,((100+((t-30)*50))+49)-(tt*12)],\n\t\t\t\t\t\t\tfill=(0,0,0,255)\n\t\t\t\t\t\t)\n\t\t\t\t\t\td.rectangle(\n\t\t\t\t\t\t\t[607,((100+((t-30)*50))+40)-(tt*12),613,((100+((t-30)*50))+48)-(tt*12)],\n\t\t\t\t\t\t\tfill=(0,255,0,255)\n\t\t\t\t\t\t)\n\t\t#END\n\t\ttemp = BytesIO()\n\t\ttemp.name = 'board.png'\n\t\timg.save(temp)\n\t\ttemp.seek(0)\n\t\treturn temp", "def imageframe(sf, identifier, loc=\".\"):\n from matplotlib.patches import Circle\n\n # Computing center and radius\n blurred = blur(sf.data)\n bin_img = threshold(blurred, sf.calcthresh(blurred))\n cont_found, big_cont = find_largest_contour(bin_img, blurred)\n if cont_found:\n _, radius = cv2.minEnclosingCircle(big_cont)\n center = (sf.c, sf.r)\n sig_r = RADIUS_SIGNAL\n bg_r1, bg_r2 = RADIUS_BGINNER, RADIUS_BGOUTER\n # -----------------------\n # Temporary and just for display and testing\n fig, ax = plt.subplots()\n ax.imshow(\n sf.data,\n cmap=\"gray\",\n extent=[\n sf.c - sf.radius,\n sf.c + sf.radius,\n sf.r + sf.radius,\n sf.r - sf.radius,\n ],\n )\n p = Circle(center, radius + sig_r, fill=False, ec=\"green\", lw=2)\n p2 = Circle(center, radius + bg_r1, fill=False, ec=\"cyan\", lw=2)\n p3 = Circle(center, radius + bg_r2, fill=False, ec=\"cyan\", lw=2)\n ax.add_patch(p)\n ax.add_patch(p2)\n ax.add_patch(p3)\n fig.suptitle(f\"Frame {identifier}\", size=12, weight=\"bold\")\n ax.set_title(sf.time.strftime(\"%Y%m%d %H:%M:%S.%f %Z\"))\n # plt.imshow(signal | background) # Shows masks\n if os.path.exists(loc):\n fig.savefig(f\"{loc}/Frame{identifier}.png\")\n else:\n os.makedirs(loc)\n fig.savefig(f\"{loc}/Frame{identifier}.png\")\n #plt.close(fig)\n # -----------------------", "def setup():\n img = Image.new('RGB', (10, 20))\n img.putpixel((5, 10), (0, 255, 0))\n img.save('green-dot.tif')\n img.save('green-dot.jpg')\n img.save('green-dot.png')", "def render_molecule(\n smiles: str,\n path: str,\n width: int = 320,\n height: int = 240,\n file_format: str = \"svg\",\n clearbackground: bool = False,\n force_regenerate: bool = False,\n) -> None:\n # Import the openeye toolkit\n from openeye import oechem, oedepict\n\n output_name = get_image_filename(smiles)\n output_path = os.path.join(path, os.extsep.join([output_name, file_format]))\n\n if not force_regenerate and os.path.exists(output_path):\n logging.info(\"Skipping already-rendered molecule: %s\", smiles)\n return\n\n # Generate OpenEye OEMol object from SMILES\n # see https://docs.eyesopen.com/toolkits/python/oechemtk/molctordtor.html?highlight=smiles#construction-from-smiles\n mol = oechem.OEGraphMol()\n\n if not oechem.OESmilesToMol(mol, smiles):\n raise ValueError(f\"Failed to convert SMILES string to molecule: {smiles}\")\n\n # Configure options (lots more are available)\n # see https://docs.eyesopen.com/toolkits/python/depicttk/OEDepictClasses/OE2DMolDisplayOptions.html\n opts = oedepict.OE2DMolDisplayOptions()\n opts.SetWidth(width)\n opts.SetHeight(height)\n\n # Render image\n oedepict.OEPrepareDepiction(mol)\n disp = oedepict.OE2DMolDisplay(mol, opts)\n oedepict.OERenderMolecule(output_path, disp, clearbackground)", "def GrayCodePattern_create(width, height):\n pass", "def show_image(image):\n print('-' * (len(image) + 4))\n for line in image:\n print('| ', end='')\n for ch in line:\n char = '#' if ch is True else '.'\n print(char, end='')\n print(' |')\n print('-' * (len(image) + 4))", "def part1(width, height, size, color):\n pass", "def draw_mol(mol, highlightAtoms, highlightColors):\n drawer = rdMolDraw2D.MolDraw2DSVG(400, 200)\n drawer.DrawMolecule(mol, highlightAtoms=highlightAtoms, highlightAtomColors=highlightColors)\n drawer.FinishDrawing()\n\n # TODO: return or save image, for inclusion in a PDF report or similar\n\n # To display in a notebook:\n # svg = drawer.GetDrawingText().replace('svg:', '')\n # display(SVG(svg))", "def clumpy_graph(self,img):\n \n id = self._getGraphId()\n root = 'S_%s' % (id,)\n pngname = root + '.png' ; epsname = root + '.eps'\n jpgname = root + '.jpg'\n doStamp(img.copy(),pngname,format='PNG')\n Convert(pngname,jpgname)\n \n Painted = Paint(jpgname)\n try : Painted.load()\n except IOError : stop()\n text = 'S=%5.2f' % (self['M_S'])\n # Painted.Graffiti(text,commtextpos)\n Painted.save(jpgname) \n Painted.release()\n \n Convert(jpgname,epsname)\n os.system('rm %s %s' % (pngname,jpgname))\n self['figures']['S'] = epsname\n self['figcomms']['S'] = text", "def setup():\n size(SPACE['w'], SPACE['h'])\n colorMode(RGB, 1)", "def __generate_image(self):\n\t\tself.img = np.ones((self.size*self.width+self.border,self.size*self.width+self.border,1), np.uint8)*255\n\t\tfor i in range(len(self.matrix)):\n\t\t\tfor j in range(len(self.matrix)):\n\t\t\t\tif self.matrix[j][i] == 1:\n\t\t\t\t\tself.img = cv2.rectangle(self.img,(i*self.width+int(self.border/2),j*self.width+int(self.border/2))\n\t\t\t\t\t\t,(i*self.width+self.width+int(self.border/2),j*self.width+self.width+int(self.border/2)),(0,0,0),-1)\n\t\tif '.' in self.name:\n\t\t\tcv2.imwrite(self.name,self.img)\n\t\telse:\n\t\t\tcv2.imwrite(self.name+'.jpg',self.img)\n\t\tcv2.imshow(\"Image\",self.img)\n\t\tcv2.waitKey(0)\n\t\tcv2.destroyAllWindows()", "def showAssembled(self):\n im = np.zeros(self.puzzleImage.shape);\n r,c,d = self.puzzleImage.shape;\n r = r/len(self.puzzlePieces); # assume square matrix\n c = c/len(self.puzzlePieces);\n \n for i in range (len(self.puzzlePieces)):\n for j in range (len(self.puzzlePieces)):\n im[i*r:(i+1)*r, j*c:(j+1)*c] = self.puzzlePieces[i,j];\n \n plt.imshow(im);\n plt.show();", "def pretty_print(image_example):\n print numpy.array_str(image_example, precision=1, max_line_width=142)", "def generate_frame(offset: int = 0, color: str = \"yellow\"):\n # Setup the canvas\n c = COLORS[color]\n canvas = Image.new(\"RGBA\", (768, 768), c[0])\n draw = ImageDraw.Draw(canvas)\n n = 5\n q = 360 / (2 * n)\n\n # Render each 'beam' of the sunbeam effect\n for i in range(n):\n startang = offset + 2 * (i - 1) * q\n endang = offset + (2 * (i - 1) + 1) * q\n draw.pieslice((0, 0, 768, 768), startang, endang, fill=c[1])\n\n # Crop to the center 512x\n return canvas.crop((128, 128, 640, 640))", "def show(self):\n import Helpers\n for p in self.parts:\n color = (p[1][0]*255, p[1][1]*255, p[1][2]*255, 0)\n Helpers.show(p[0], color)", "def draw(self):\n self.draw_occupied_cells()\n self.draw_open_cells()\n self.draw_edges()\n plt.xlabel(\"Red\")\n plt.ylabel(\"Black\")\n plt.title('Hex')\n self.camera.snap()", "def render_frame(self, image):\n arr = np.array(image.resize(self.curses_shape))\n characters = self.character_transformer.map_pixels_to_characters(arr)\n colors = self.color_transformer.nearest_neighbors(arr)\n return CursesFrame(characters, colors)", "def dem_jpeg(dem_file):\n out_file = dem_file+'.jpeg'\n rsc_file = out_file+'.rsc'\n shutil.copy2(dem_file+'.rsc', rsc_file)\n # read data\n dem = readfile.read(dem_file)[0]\n print('dem.shape:',dem.shape)\n # figure size\n ds_shape = tuple(reversed(dem.shape))\n fig_dpi = 300\n fig_size = [i / fig_dpi for i in ds_shape]\n print('fig_size:',fig_size)\n # color range\n disp_min = np.nanmin(dem) - 4000\n disp_max = np.nanmax(dem) + 2000\n # prepare shaded relief\n ls = LightSource(azdeg=315, altdeg=45)\n dem_shade = ls.shade(dem, vert_exag=0.3, cmap=plt.get_cmap('gray'), vmin=disp_min, vmax=disp_max)\n dem_shade[np.isnan(dem_shade[:, :, 0])] = np.nan\n print('dem_shade.shape:',dem_shade.shape)\n # plot\n fig, ax = plt.subplots(figsize=fig_size)\n ax.imshow(dem_shade, interpolation='spline16', origin='upper')\n # get rid of whitespace on the side\n ax.axis('off')\n ax.get_xaxis().set_ticks([])\n ax.get_yaxis().set_ticks([])\n fig.subplots_adjust(left=0,right=1,bottom=0,top=1)\n # output\n print('save figure to file {}'.format(out_file))\n plt.savefig(out_file, transparent=True, dpi=300, pad_inches=0.0)\n \n #resize to desired size(FA 8/19, unclear why size is wrong)\n im = Image.open(out_file)\n im_out = im.resize(dem.shape, Image.NEAREST)\n im_out.save(out_file)\n \n #plt.show()", "def __init__(self, mapfile, xpos, zpos, emap, width=10.0, depth=10.0, height=10.0, name=\"building\", draw_details=None, yoff=0.0, scheme=None):\r\n self.xpos = xpos\r\n self.zpos = zpos\r\n self.width = width\r\n self.depth = depth\r\n self.height = height\r\n self.name = name\r\n self.ceilingthickness = 1.0\r\n self.walls = []\r\n\r\n if scheme == None:\r\n self.scheme = Building.baseScheme\r\n else:\r\n self.scheme = scheme\r\n\r\n # We don't have to be rigorous here, this should only be a draw_details or an iterable of draw_details.\r\n if hasattr(draw_details, \"__getitem__\") or hasattr(draw_details, \"__iter__\"):\r\n assert (len(draw_details) == self.scheme[\"#models\"])\r\n self.details = draw_details\r\n else:\r\n self.details = [draw_details for x in range(self.scheme[\"#models\"])]\r\n # having a method like this allows draw details to be set later\r\n\r\n self.yoff = yoff\r\n\r\n self.model = [MergeShape(name=name+\".\"+str(x)) for x in range(self.scheme[\"#models\"])]\r\n\r\n if mapfile[0] != '/':\r\n mapfile = sys.path[0] + '/' + mapfile\r\n print(\"Loading building map ...\", mapfile)\r\n\r\n im = Image.open(mapfile)\r\n im = ImageOps.invert(im)\r\n ix,iy = im.size\r\n\r\n print(\"image size\", ix, \",\", iy)\r\n\r\n startx = xpos - ix / 2 * width\r\n starty = zpos - ix / 2 * depth\r\n\r\n yoff += emap.calcHeight(-xpos,-zpos)\r\n\r\n if not im.mode == \"P\":\r\n im = im.convert('P', palette=Image.ADAPTIVE)\r\n im = im.transpose(Image.FLIP_TOP_BOTTOM)\r\n im = im.transpose(Image.FLIP_LEFT_RIGHT)\r\n pixels = im.load()\r\n\r\n for y in range(1,iy-1):\r\n print(\".\", end='')\r\n for x in range(1,ix-1):\r\n colour = pixels[x,y]\r\n\r\n if x == 1:\r\n self._executeScheme(x, y, startx, starty, (colour, pixels[x-1,y], \"edge\"), wallfunc=self.west_wall, ceilingedgefunc=self.west_edge, ceilingfunc=self.ceiling, rooffunc=self.roof)\r\n else:\r\n self._executeScheme(x, y, startx, starty, (colour, pixels[x-1,y]), wallfunc=self.west_wall, ceilingedgefunc=self.west_edge, ceilingfunc=self.ceiling, rooffunc=self.roof)\r\n\r\n if x == ix-2:\r\n self._executeScheme(x, y, startx, starty, (colour, pixels[x+1,y], \"edge\"), wallfunc=self.east_wall, ceilingedgefunc=self.east_edge, ceilingfunc=self.ceiling, rooffunc=self.roof)\r\n else:\r\n self._executeScheme(x, y, startx, starty, (colour, pixels[x+1,y]), wallfunc=self.east_wall, ceilingedgefunc=self.east_edge, ceilingfunc=self.ceiling, rooffunc=self.roof)\r\n\r\n if y == 1:\r\n self._executeScheme(x, y, startx, starty, (colour, pixels[x,y-1], \"edge\"), wallfunc=self.south_wall, ceilingedgefunc=self.south_edge, ceilingfunc=self.ceiling, rooffunc=self.roof)\r\n else:\r\n self._executeScheme(x, y, startx, starty, (colour, pixels[x,y-1]), wallfunc=self.south_wall, ceilingedgefunc=self.south_edge, ceilingfunc=self.ceiling, rooffunc=self.roof)\r\n\r\n if y == iy-2:\r\n self._executeScheme(x, y, startx, starty, (colour, pixels[x, y+1], \"edge\"), wallfunc=self.north_wall, ceilingedgefunc=self.north_edge, ceilingfunc=self.ceiling, rooffunc=self.roof)\r\n else:\r\n self._executeScheme(x, y, startx, starty, (colour, pixels[x,y+1]), wallfunc=self.north_wall, ceilingedgefunc=self.north_edge, ceilingfunc=self.ceiling, rooffunc=self.roof)\r\n\r\n self._executeScheme(x, y, startx, starty, (colour, None), wallfunc=None, ceilingedgefunc=None, ceilingfunc=self.ceiling, rooffunc=self.roof)\r\n\r\n self.set_draw_details(self.details) # after models created otherwise\r\n # details lost by merging\r", "def __repr__(self):\n \n s = '#cpt palette generated by gmt_interface.py\\n'\n s += '#COLOR_MODEL = %s\\n' %self.color_model\n s += '#\\n'\n \n for seg in self.segments:\n\n #print x, seg\n xmin = seg.lower_bound\n xmax = seg.upper_bound\n\n rgb_min = seg.rgb_min\n rgb_max = rgb_min + seg.rgb_dif\n \n # Determine number of decimal points\n xmax-xmin\n \n fmin = format_string(xmin) \n fmax = format_string(xmax)\n\n s += fmin %xmin\n s += ' %d %d %d ' %(rgb_min[0], rgb_min[1], rgb_min[2]) \n s += fmax %xmax\n s += ' %d %d %d ' %(rgb_max[0], rgb_max[1], rgb_max[2])\n s += ' %s' %seg.color_segment_boundary\n s += '\\n'\n \n return s", "def make_image(self, save=False):\n\n # image_grid = np.full((self.size_x, self.size_y), '#888888', dtype=str)\n image_grid = np.full((self.size_x, self.size_y, 3), 0, dtype=np.uint8)\n\n # self.grid = np.flip(self.grid, 1)\n\n # self.grid = np.swapaxes(self.grid, 0, 0)\n \"\"\"\n image_grid[self.grid == 0] = 'FFFFFF'\n image_grid[self.grid == 1] = '000000'\n image_grid[self.grid == 2] = '00FF00'\n image_grid[self.grid == 3] = '0000FF'\n image_grid[self.grid == 4] = 'FFFF00'\n image_grid[self.grid == 5] = '00FFFF'\n image_grid[self.grid == 6] = 'FF00FF'\n \"\"\"\n image_grid[self.grid == 0] = (1, 1, 1)\n image_grid[self.grid == 1] = (0, 0, 0)\n image_grid[self.grid == 2] = (1, 0, 1)\n image_grid[self.grid == 3] = (0, 1, 0)\n image_grid[self.grid == 4] = (0, 0, 1)\n image_grid[self.grid == 5] = (0, 1, 1)\n image_grid[self.grid == 6] = (1, 1, 0)\n\n #for ant in self.ants:\n # image_grid[ant.x, ant.y] = (1, 0, 0)\n\n # image_grid = image_grid.swapaxes(0, 1)\n # self.grid = self.grid.swapaxes(0, 1)\n\n\n\n DPI = 100\n width, height = 1000, 1000\n fig = plt.figure(figsize=(width / DPI, height / DPI), dpi=DPI, facecolor='k')\n ax = fig.add_subplot()\n\n plt.axis('equal')\n plt.subplots_adjust(top=1, bottom=0, right=1, left=0, hspace=0, wspace=0)\n\n for y in range(self.size_x):\n for x in range(self.size_y):\n if self.grid[x, y] != 0:\n # Only plot a hexagon if its state is not zero.\n plot_hex(ax, x, y, image_grid[x, y])\n\n ax.set_xlim(0, self.size_x)\n ax.set_ylim(0, self.size_y)\n\n plt.show()\n\n logging.info(\"Finished Image Processing\")" ]
[ "0.66217947", "0.62990767", "0.62990767", "0.58950996", "0.5866949", "0.5851361", "0.5850181", "0.5778608", "0.575116", "0.5668649", "0.5657043", "0.56522775", "0.56274706", "0.5611535", "0.56026417", "0.55969375", "0.55717343", "0.55542105", "0.55526847", "0.5550516", "0.55152977", "0.5489614", "0.548346", "0.54646397", "0.5458101", "0.5438773", "0.54106987", "0.54036564", "0.54009753", "0.5396738" ]
0.6512195
1
Draw a rounded rectangle around an image
def draw_rounded_rectangle( img: PilImage, color: PilColor, arc_size: int = 20 ) -> PilImage: x0, y0, x1, y1 = img.getbbox() x1 -= 1 y1 -= 1 copy = img.copy() draw = ImageDraw.Draw(copy) arc_size_half = arc_size // 2 draw.arc((x0, y0, arc_size, arc_size), start=180, end=270, fill=color) draw.arc((x1 - arc_size, y0, x1, arc_size), start=270, end=0, fill=color) draw.arc((x1 - arc_size, y1 - arc_size, x1, y1), start=0, end=90, fill=color) draw.arc((x0, y1 - arc_size, arc_size, y1), start=90, end=180, fill=color) draw.line((x0 + arc_size_half, y0, x1 - arc_size_half, y0), fill=color) draw.line((x1, arc_size_half, x1, y1 - arc_size_half), fill=color) draw.line((arc_size_half, y1, x1 - arc_size_half, y1), fill=color) draw.line((x0, arc_size_half, x0, y1 - arc_size_half), fill=color) return copy
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def draw_rounded_rectangle(\n img: PilImage, color: PilColor, arc_size: int = 20\n) -> PilImage:\n # pylint: disable=invalid-name\n x0, y0, x1, y1 = img.getbbox()\n x1 -= 1\n y1 -= 1\n copy = img.copy()\n draw = ImageDraw.Draw(copy)\n arc_size_half = arc_size // 2\n draw.arc((x0, y0, arc_size, arc_size), start=180, end=270, fill=color)\n draw.arc((x1 - arc_size, y0, x1, arc_size), start=270, end=0, fill=color)\n draw.arc((x1 - arc_size, y1 - arc_size, x1, y1), start=0, end=90, fill=color)\n draw.arc((x0, y1 - arc_size, arc_size, y1), start=90, end=180, fill=color)\n draw.line((x0 + arc_size_half, y0, x1 - arc_size_half, y0), fill=color)\n draw.line((x1, arc_size_half, x1, y1 - arc_size_half), fill=color)\n draw.line((arc_size_half, y1, x1 - arc_size_half, y1), fill=color)\n draw.line((x0, arc_size_half, x0, y1 - arc_size_half), fill=color)\n return copy", "def drawRectangle(img, top_left, bottom_right, color = (0,0,255), thickness = 3):\n\tcv2.rectangle(img, top_left, bottom_right, color, thickness)", "def rounded_rectangle(src: np.array, top_left: tuple, bottom_right: tuple, cornerRadius: int = cornerRadius, color: tuple = (255,255,255), thickness: int = 1, lineType: int=cv2.LINE_AA) -> Any:\r\n # corners:\r\n # p1 - p2\r\n # | |\r\n # p4 - p3\r\n\r\n p1 = Point(top_left[0], top_left[1])\r\n p2 = Point(bottom_right[0], top_left[1])\r\n p3 = Point(bottom_right[0], bottom_right[1])\r\n p4 = Point(top_left[0], bottom_right[1])\r\n\r\n # Fill\r\n if thickness < 0:\r\n main_rect = [Point(p1.x + cornerRadius, p1.y), Point(p3.x - cornerRadius, p3.y)]\r\n left_rect = [Point(p1.x + cornerRadius, p1.y + cornerRadius), Point(p4.x, p4.y - cornerRadius)]\r\n right_rect = [Point(p2.x - cornerRadius, p2.y + cornerRadius), Point(p3.x, p3.y - cornerRadius)]\r\n\r\n [cv2.rectangle(src, rect[0].toTuple(), rect[1].toTuple(), color, thickness) for rect in [main_rect, left_rect, right_rect]]\r\n\r\n # Outline\r\n cv2.line(src, (p1.x+cornerRadius,p1.y), (p2.x-cornerRadius,p2.y), color, abs(thickness), lineType);\r\n cv2.line(src, (p2.x,p2.y+cornerRadius), (p3.x,p3.y-cornerRadius), color, abs(thickness), lineType);\r\n cv2.line(src, (p4.x+cornerRadius,p4.y), (p3.x-cornerRadius,p3.y), color, abs(thickness), lineType);\r\n cv2.line(src, (p1.x,p1.y+cornerRadius), (p4.x,p4.y-cornerRadius), color, abs(thickness), lineType);\r\n\r\n # Arc\r\n cv2.ellipse(src, (p1+Point(cornerRadius, cornerRadius)).toTuple(), (cornerRadius, cornerRadius), 180.0, 0, 90, color, thickness, lineType);\r\n cv2.ellipse(src, (p2+Point(-cornerRadius, cornerRadius)).toTuple(), (cornerRadius, cornerRadius), 270.0, 0, 90, color, thickness, lineType);\r\n cv2.ellipse(src, (p3+Point(-cornerRadius, -cornerRadius)).toTuple(), (cornerRadius, cornerRadius), 0.0, 0, 90, color, thickness, lineType);\r\n cv2.ellipse(src, (p4+Point(cornerRadius, -cornerRadius)).toTuple(), (cornerRadius, cornerRadius), 90.0, 0, 90, color, thickness, lineType);", "def round_rect(self, surface, rect, color, rad=20, border=0, inside=(0,0,0,0)):\n rect = pygame.Rect(rect)\n zeroed_rect = rect.copy()\n zeroed_rect.topleft = 0,0\n image = pygame.Surface(rect.size).convert_alpha()\n image.fill((0,0,0,0))\n self.render_region(image, zeroed_rect, color, rad)\n if border:\n zeroed_rect.inflate_ip(-2*border, -2*border)\n self.render_region(image, zeroed_rect, inside, rad)\n surface.blit(image, rect)", "def round_rect(x, y, w, h, i):\n X, Y, W, H = int(x + 10), int(y + 10), int(w - 20), int(h - 20)\n\n pygame.draw.rect(gameDisplay, i, (x, Y, w, H))\n pygame.draw.rect(gameDisplay, i, (X, y, W, h))\n\n pygame.draw.circle(gameDisplay, i, (X, Y), 10)\n pygame.draw.circle(gameDisplay, i, (X + W, Y), 10)\n pygame.draw.circle(gameDisplay, i, (X, Y + H), 10)\n pygame.draw.circle(gameDisplay, i, (X + W, Y + H), 10)\n\n pygame.draw.rect(gameDisplay, i, (X, Y, W, H))", "def draw_rect(self, center_x, center_y, radius, thickness):\n\n center_x = int(center_x)\n center_y = int(center_y)\n radius = int(radius)\n thickness = int(thickness)\n\n edge_length = int(radius * 0.3)\n\n x_ranges = list(range(center_x - radius - thickness, center_x - edge_length)) + list(range(center_x + edge_length, center_x + radius + thickness))\n y_ranges = list(range(center_y - radius - thickness, center_y - radius)) + list(range(center_y + radius, center_y + radius + thickness))\n\n for x in x_ranges:\n for y in y_ranges:\n\n if self.image_width > x >= 0 and self.image_height > y >= 0: # for the frames' limit protection.\n [b, g, r] = self.image[y, x] = numpy.array(self.image[y, x]) * numpy.array([0, 1, 0])\n\n if g <= 100:\n if g == 0:\n g = 1\n self.image[y, x] = [0, 0, 1]\n greenness_rate = (255 / g) / 0.12\n self.image[y, x] = numpy.array(self.image[y, x]) * numpy.array([0, greenness_rate, 0])\n\n y_ranges = list(range(center_y - radius - thickness, center_y - edge_length)) + list(range(center_y + edge_length, center_y + radius + thickness))\n x_ranges = list(range(center_x - radius - thickness, center_x - radius)) + list(range(center_x + radius, center_x + radius + thickness))\n\n for y in y_ranges:\n for x in x_ranges:\n\n if self.image_width > x >= 0 and self.image_height > y >= 0: # for the frames' limit protection.\n [b, g, r] = self.image[y, x] = numpy.array(self.image[y, x]) * numpy.array([0, 1, 0])\n\n if g <= 100:\n if g == 0:\n g = 1\n self.image[y, x] = [0, 0, 1]\n greenness_rate = (255 / g) / 0.12\n self.image[y, x] = numpy.array(self.image[y, x]) * numpy.array([0, greenness_rate, 0])\n\n x_ranges = list(range(int(center_x - radius * 1.5), int(center_x - edge_length))) + list(range(int(center_x + edge_length), int(center_x + radius * 1.5)))\n\n for x in x_ranges:\n if self.image_width > x >= 0: # for the frames' limit protection.\n self.image[center_y, x] = numpy.array(self.image[center_y, x]) * numpy.array([0, 2, 0])\n\n y_ranges = list(range(int(center_y - radius * 1.5), int(center_y - edge_length))) + list(range(int(center_y + edge_length), int(center_y + radius * 1.5)))\n\n for y in y_ranges:\n if self.image_height > y >= 0: # for the frames' limit protection.\n self.image[y, center_x] = numpy.array(self.image[y, center_x]) * numpy.array([0, 2, 0])", "def round_corner(self,radius, fill):\r\n corner = Image.new('RGBA', (radius, radius), (0, 0, 0, 0))\r\n draw = ImageDraw.Draw(corner)\r\n draw.pieslice((0, 0, radius * 2, radius * 2), 180, 270, fill=fill)\r\n return corner", "def draw_round_rect(self, x, y, w, h, r, color=None, aa=False):\n self._draw_fast_hline(x + r, y, w - 2 * r, color, aa) # Top\n self._draw_fast_hline(x + r, y + h - 1, w - 2 * r, color, aa) # Bottom\n self._draw_fast_vline(x, y + r, h - 2 * r, color, aa) # Left\n self._draw_fast_vline(x + w - 1, y + r, h - 2 * r, color, aa) # Right\n # draw four corners\n self._draw_circle_helper(x + r, y + r, r, 1, color)\n self._draw_circle_helper(x + w - r - 1, y + r, r, 2, color)\n self._draw_circle_helper(x + w - r - 1, y + h - r - 1, r, 4, color)\n self._draw_circle_helper(x + r, y + h - r - 1, r, 8, color)", "def draw_rectangle(image, rect, color=(0,255,255)):\n x,y,w,h = rect\n cv2.rectangle(image, (x,y), (x+w,y+h), color, 3)\n return image", "def draw_rounded_rect(self, context, x, y, width, height, radius, lineWidth):\n from math import pi\n degrees = pi / 180\n\n context.set_line_width(lineWidth)\n context.set_source_rgba(0.5, 0.0, 0.0, 1.0) # Red\n\n # cr.new_sub_path()\n context.arc(x + width - radius, y + radius, radius, -90 * degrees, 0 * degrees)\n context.arc(x + width - radius, y + height - radius, radius, 0 * degrees, 90 * degrees)\n context.arc(x + radius, y + height - radius, radius, 90 * degrees, 180 * degrees)\n context.arc(x + radius, y + radius, radius, 180 * degrees, 270 * degrees)\n context.close_path()\n context.stroke_preserve()\n context.set_source_rgba(0.0, 0.5, 0.5, 1.0)\n # and use it to fill the path (that we had kept)\n context.fill()\n context.stroke()", "def round_rect(x,y,w,h, i):\n X,Y,W,H=int(x+10),int(y+10),int(w-20),int(h-20)\n\n pygame.draw.rect(gameDisplay, i, (x,Y, w, H))\n pygame.draw.rect(gameDisplay, i, (X,y, W, h))\n\n pygame.draw.circle(gameDisplay, i, (X,Y), 10)\n pygame.draw.circle(gameDisplay, i, (X+W,Y), 10)\n pygame.draw.circle(gameDisplay, i, (X,Y+H), 10)\n pygame.draw.circle(gameDisplay, i, (X+W,Y+H), 10)\n\n pygame.draw.rect(gameDisplay, i, (X,Y,W,H))", "def DrawRoundedRectangle(*args, **kwargs):\n return _gdi_.DC_DrawRoundedRectangle(*args, **kwargs)", "def round_corners(self, im, rad):\n circle = Image.new('L', (rad * 2, rad * 2), 0)\n draw = ImageDraw.Draw(circle)\n draw.ellipse((0, 0, rad * 2, rad * 2), fill=255)\n alpha = Image.new('L', im.size, 255)\n w, h = im.size\n alpha.paste(circle.crop((0, 0, rad, rad)), (0, 0))\n alpha.paste(circle.crop((0, rad, rad, rad * 2)), (0, h - rad))\n alpha.paste(circle.crop((rad, 0, rad * 2, rad)), (w - rad, 0))\n alpha.paste(circle.crop((rad, rad, rad * 2, rad * 2)), (w - rad, h - rad))\n im.putalpha(alpha)\n return im", "def DrawRoundedRectangle(*args, **kwargs):\n return _gdi_.GraphicsContext_DrawRoundedRectangle(*args, **kwargs)", "def aa_round_rect(surface, rect, color, rad=20, border=0, inside=(0, 0, 0)):\n rect = pg.Rect(rect)\n _aa_render_region(surface, rect, color, rad)\n if border:\n rect.inflate_ip(-2 * border, -2 * border)\n _aa_render_region(surface, rect, inside, rad)", "def DrawRoundedRectangle(*args, **kwargs):\n return _gdi_.PseudoDC_DrawRoundedRectangle(*args, **kwargs)", "def DrawRoundedRectangleRect(*args, **kwargs):\n return _gdi_.DC_DrawRoundedRectangleRect(*args, **kwargs)", "def DrawRoundedRectangleRect(*args, **kwargs):\n return _gdi_.PseudoDC_DrawRoundedRectangleRect(*args, **kwargs)", "def GetRoundBitmap(w, h, r):\n maskColor = wx.Colour(0, 0, 0)\n shownColor = wx.Colour(5, 5, 5)\n b = wx.Bitmap(w, h)\n dc = wx.MemoryDC(b)\n dc.SetBrush(wx.Brush(maskColor))\n dc.DrawRectangle(0, 0, w, h)\n dc.SetBrush(wx.Brush(shownColor))\n dc.SetPen(wx.Pen(shownColor))\n dc.DrawRoundedRectangle(0, 0, w, h, r)\n dc.SelectObject(wx.NullBitmap)\n b.SetMaskColour(maskColor)\n return b", "def drawCircleAroundRect(img, rect,color=(0,255,0)):\n drawCircle(img, (rect[0],rect[1]),color)\n drawCircle(img, (rect[0]+rect[2],rect[1]), color)\n drawCircle(img, (rect[0],rect[1]+rect[3]), color)\n drawCircle(img, (rect[0]+rect[2],rect[1]+rect[3]), color)", "def draw_bounding_box_on_image(image, ymin, xmin, ymax, xmax, color=(255, 0, 0), thickness=5):\n\n image_width = image.shape[1]\n image_height = image.shape[0]\n cv2.rectangle(image, (int(xmin), int(ymin)), (int(xmax), int(ymax)), color, thickness)", "def highlight_rect(image, rect, color=(125, 125, 25), thickness=1):\n return cv2.rectangle(image, (rect[0], rect[1]), (rect[0] + rect[2], rect[1] + rect[3]), color, thickness)", "def _get_round_edges_bitmap(width: int, height: int, radius: int):\n mask_color = opts['gui']['attrs']['mask_color']\n background_color = opts['gui']['attrs']['background_color']\n bitmap = wx.Bitmap(width, height)\n dc = wx.MemoryDC(bitmap)\n dc.SetBrush(wx.Brush(mask_color))\n dc.DrawRectangle(0, 0, width, height)\n dc.SetBrush(wx.Brush(background_color))\n dc.SetPen(wx.Pen(background_color))\n dc.DrawRoundedRectangle(0, 0, width, height, radius)\n bitmap.SetMaskColour(mask_color)\n return bitmap", "def render_region(self, image, rect, color, rad):\n corners = rect.inflate(-2*rad, -2*rad)\n for attribute in (\"topleft\", \"topright\", \"bottomleft\", \"bottomright\"):\n pygame.draw.circle(image, color, getattr(corners,attribute), rad)\n image.fill(color, rect.inflate(-2*rad,0))\n image.fill(color, rect.inflate(0,-2*rad))", "def draw_rectangle(self, roi, color, thickness=2):\n top_left = self._format_point(Point(roi[0], roi[1]))\n bottom_right = self._format_point(Point(roi[2], roi[3]))\n opencv.rectangle(self.img, top_left.tuple(), bottom_right.tuple(), color.bgra(), thickness=thickness)", "def addRoundRect(self, *__args): # real signature unknown; restored from __doc__ with multiple overloads\r\n pass", "def imBox(self, width, height):\n img = Image.new(\"1\", (width, height))\n draw = ImageDraw.Draw(img)\n bgColor=255\n draw.rectangle((0,0) + img.size,fill=bgColor)\n return img", "def drawRectangle(x,y,width,height,rounding=0,ucoords=1):\n if ucoords:\n dislin.rlrnd(x,y,width,height,rounding)\n else:\n dislin.rndrec(x,y,width,height,rounding)", "def drawRectangle(img, rect, color):\n \n if len(rect) is not 4:\n # TODO throw error\n return;\n rect = rect * DISPLAY_SCALE;\n x1, y1, x2, y2 = rect.astype(numpy.int32);\n cv2.rectangle(img, (x1, y1), (x2, y2), color, 2);", "def draw_round_rect_filled(self, x, y, w, h, r, color=None, aa=False):\n self.draw_rect_filled(x + r, y, w - 2 * r, h, color, aa)\n self._draw_circle_filled_helper(x + w - r - 1, y + r, r,\n 1, h - 2 * r - 1, color)\n self._draw_circle_filled_helper(x + r, y + r, r, 2, h - 2 * r - 1, color)" ]
[ "0.7510413", "0.72969645", "0.7181461", "0.6953991", "0.6858651", "0.68553674", "0.6835767", "0.68216497", "0.6786222", "0.67735654", "0.6771525", "0.6707417", "0.66786045", "0.6653492", "0.66526777", "0.66202426", "0.6594753", "0.6554674", "0.6513422", "0.6496861", "0.64685637", "0.64378285", "0.64334244", "0.64303213", "0.641181", "0.6311513", "0.6281569", "0.6246603", "0.62209237", "0.62085694" ]
0.77442485
0
Create images of a list of molecules and save them to disc a globally managed folder.
def save_molecule_images( molecules: Sequence[Molecule], frame_colors: Sequence[PilColor] ) -> Dict[Molecule, str]: global IMAGE_FOLDER spec = {} for molecule, frame_color in zip(molecules, frame_colors): image_filepath = os.path.join( IMAGE_FOLDER, f"{molecule.inchi_key}_{frame_color}.png" ) if not os.path.exists(image_filepath): image_obj = molecule_to_image(molecule, frame_color) image_obj.save(image_filepath) spec[molecule] = image_filepath return spec
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __save_to_dir(self, imagelist, prefix, PATH):\n for pair in imagelist:\n directory = os.path.join(PATH, pair[1])\n if not os.path.exists(directory):\n os.mkdir(directory)\n filename = prefix + pair[2]\n pair[0].save(os.path.join(directory, filename))\n print(\"Saved \" + os.path.join(directory, filename))", "def save_molecule_images(\n molecules: Sequence[Molecule], frame_colors: Sequence[PilColor], size: int = 300\n) -> Dict[Molecule, str]:\n global IMAGE_FOLDER\n\n try:\n images = molecules_to_images(molecules, frame_colors, size)\n # pylint: disable=broad-except\n except Exception: # noqa\n images = [\n molecule_to_image(molecule, frame_color, size)\n for molecule, frame_color in zip(molecules, frame_colors)\n ]\n\n spec = {}\n for molecule, image_obj in zip(molecules, images):\n image_filepath = os.path.join(IMAGE_FOLDER, f\"{molecule.inchi_key}.png\")\n image_obj.save(image_filepath)\n spec[molecule] = image_filepath\n return spec", "def createImages(self, geneticInstances):\n genomes = []\n for geneticInstance in geneticInstances:\n genomes.append(geneticInstance.toGenomeRepresentation())\n generatePlantImages(genomes)\n # We now have the output pictures. We'll get to them using the database instances' filenames", "def createAllImageFiles(poly, name) :\n \n for i in range(len(poly.getPaths())):\n fileName = name + \"_\" + str(i) + \".dot\"\n imgName = name + \"_\" + str(i) + \".jpg\"\n \n Command = \"neato -Tjpeg \" + fileName + \" -o \" + imgName\n run(Command, shell=True)", "def save_imgs(self, epoch):\n row, column = 5, 5\n\n # Generates r*c images from the model, saves them individually and as a gallery\n images_generated = self.generate_images(row * column)\n\n # ???\n images_generated = 0.5 * images_generated + 0.5\n\n for index, np_array_image in enumerate(images_generated):\n path = f\"{self.output_directory}/generated_{self.img_size[0]}x{self.img_size[1]}\"\n if not os.path.exists(path):\n os.makedirs(path)\n imsave(path + f\"/{unique_name()}_{epoch}_{index}.png\", np_array_image)\n\n # 4D array:\n nindex, height, width, intensity = images_generated.shape\n\n nrows = nindex // column\n\n assert nindex == nrows * column\n\n # Form the gallery by combining the data at pixel levels (may not be the best approach)\n # want result.shape = (height*n-rows, width*n-cols, intensity)\n gallery = (\n images_generated.reshape(nrows, column, height, width, intensity)\n .swapaxes(1, 2)\n .reshape(height * nrows, width * column, intensity)\n )\n\n path = f\"{self.output_directory}/gallery_generated_{self.img_size[0]}x{self.img_size[1]}\"\n if not os.path.exists(path):\n os.makedirs(path)\n imsave(path + f\"/{unique_name()}_{epoch}.png\", gallery)", "def create_list(self):\n for _ in range(self.count):\n id_ = random.randint(10000, 99999)\n self.ids.append(id_)\n self.img_paths.append(f\"{self.save_path}{self.name}/images/{id_}.png\")\n if hasattr(self, \"masks\"):\n self.masks.append(f\"{self.save_path}{self.name}/masks/{id_}.png\")", "def save_imgs(self):\n print(\"Saving the images with required categories ...\")\n os.makedirs(self.imgs_dir, exist_ok=True)\n # Save the images into a local folder\n for im in tqdm(self.images):\n img_data = requests.get(im['coco_url']).content\n with open(os.path.join(self.imgs_dir, im['file_name']), 'wb') as handler:\n handler.write(img_data)", "def storeAllOnDisk(self, path):\n # fetch meta data\n urls = list()\n y_data = self.data_dict.labels\n ids = self.data_dict.unique_ids\n urls = self.data_dict.paths\n\n # save in chunks of 1000 images\n cuts = [x for x in range(0, self.n_observations, 1000)]\n if cuts[-1] < self.n_observations:\n cuts.append(self.n_observations)\n\n # convert batch sizes to integers\n cuts = [int(x) for x in cuts]\n\n for i in range(0, (len(cuts) - 1)):\n\n idx = [x for x in range(cuts[i], cuts[i+1])]\n\n current_ids = [ids[z] for z in idx]\n current_urls = [urls[z] for z in idx]\n current_y = [y_data[z] for z in idx]\n\n # invoke asynchronous read\n binary_images = self.imageLoader.getImages(current_urls)\n\n # store on disk\n img_id = 0\n for c_id, c_y in zip(current_ids, current_y):\n # check directory\n if not os.path.isdir(path + str(c_y)):\n os.mkdir(path + str(c_y))\n # define path\n path_img = path + str(c_y) + \"/\" + \\\n str(c_id) + \".jpeg\"\n img = binary_images[img_id]\n img = img.resize(self.image_size)\n img.save(path_img)\n img_id += 1\n return None", "def appendpics(pathofimg, w_sub, h_sub, step):\n num = 0\n dirlist = []\n images = [] # images in each folder\n for root, dirs, fileswer in os.walk(pathofimg):\n if len(dirs)!= 0:\n for dir in dirs:\n dirlist.append(dir)\n for rooert, dirwerwes, files in os.walk(pathofimg+'/'+dir):\n for file in files:\n if(file.endswith('.png')):\n images.append(Image.open(pathofimg+'/'+dir+'/'+file))\n if(len(images)==81):\n break\n target = montage(images, w_sub, h_sub, step)\n target.save(pathofimg +'/'+ dir + '.png', quality=100)\n else:\n dir = 'Generated'\n for file in fileswer:\n if (file.endswith('.png')):\n images.append(Image.open(pathofimg +'/'+ file))\n target1 = montage(images, w_sub, h_sub, step)\n savepath = pathofimg +'/'+ 'generated'\n os.makedirs(savepath)\n target1.save(savepath +'/'+ dir + '.png', quality=100)", "def make_images(self):\n self._images = [tree.to_image() for tree in self.reaction_trees]\n self._update_route_dict(self._images, \"image\")", "def saveImages(image_list, name_list, path):\n\ti = 0\n\tfor image in image_list:\n\t\tname = name_list[i]\n\t\tio.imsave(path + \"/\" + name + \".jpg\", image)\n\t\ti += 1", "def molecules_to_images(\n mols: Sequence[Molecule],\n frame_colors: Sequence[PilColor],\n size: int = 300,\n) -> List[PilImage]:\n # Make sanitized copies of all molecules\n mol_copies = [mol.make_unique() for mol in mols]\n for mol in mol_copies:\n mol.sanitize()\n\n all_mols = Draw.MolsToGridImage(\n [mol.rd_mol for mol in mol_copies],\n molsPerRow=len(mols),\n subImgSize=(size, size),\n )\n if not hasattr(all_mols, \"crop\"): # Is not a PIL image\n fileobj = io.BytesIO(all_mols.data)\n all_mols = Image.open(fileobj)\n\n images = []\n for idx, frame_color in enumerate(frame_colors):\n image_obj = all_mols.crop((size * idx, 0, size * (idx + 1), size))\n image_obj = crop_image(image_obj)\n images.append(draw_rounded_rectangle(image_obj, frame_color))\n return images", "def insert_suppl_folders(path_mode):\n global REL_PATH_IMAGES\n os.mkdir(path_mode + '/allegati')\n if len(images_to_add) > 0:\n path_ex_images = path_mode + 'img'\n os.mkdir(path_ex_images)\n for img in images_to_add:\n path_img_src = os.getcwd() + '/' + REL_PATH_IMAGES + img\n shutil.copy2(path_img_src, path_ex_images)\n os.chmod(path_ex_images+\"/\"+img, S_IREAD)", "def create(path=\"cubes\",pathIm=\"cubes/img\"):\r\n\tobj_name=createNames(pathImg=pathIm)\r\n\tfor i in obj_name:\r\n\t\tfor j in obj_name[i]:\r\n\t\t\tdest=path+chr(47)+str(i)+\"_\"+str(j)\r\n\t\t\tcreate_mtl(dest+\".mtl\",\"img\"+chr(47)+str(i)+chr(47)+str(j)+\".png\")\r\n\t\t\tcreate_obj(dest+\".obj\",str(i)+\"_\"+str(j)+\".mtl\")\r\n\t\t\tcreate_urdf(dest+\".urdf\",dest+\".obj\")\r\n\treturn obj_name", "def save_images(all_patients, contour_type='i_contour',\n main_dir='final_data/images/'):\n\n # create folder for contour_type\n dirname = main_dir + f'{contour_type}/'\n os.makedirs(dirname, exist_ok=True)\n\n for patient in all_patients:\n # create patient folders for saving\n dirname = main_dir + f'{contour_type}/{patient.dicom_id}/'\n os.makedirs(dirname, exist_ok=True)\n\n # create numpy arrays for the patient\n patient.create_numpy_arrays()\n\n # loop over slices in numpy array dict\n for slice_no in patient.all_numpy_dict:\n slice_dict = patient.all_numpy_dict[slice_no]\n\n # only show image for given contour type\n if slice_dict[f'{contour_type}_array'] is not None:\n\n img_array = slice_dict['dicom_array']\n msk_array = slice_dict[f'{contour_type}_array']\n\n show_img_msk_fromarray(img_array,\n msk_array,\n cmap='Wistia',\n sz=10, alpha=0.7,\n save_path=dirname +f'slice_{slice_no}.png')", "def saveImages(saveImagePath,dataForSaving,enumeratedList):\n \n for i in range(len(dataForSaving[0])):\n singleChar = dataForSaving[0][i]\n singleImage = dataForSaving[1][i]\n \n if singleChar not in enumeratedList:\n enumeratedList.append(singleChar)\n \n dimension = int(singleImage.shape[0]**0.5)\n singleImage = Image.fromarray(np.resize(singleImage,(dimension,dimension)), 'L')\n \n copyVal = 0\n while os.path.exists('{}\\\\{}_copy{}.png'.format(saveImagePath,\\\n enumeratedList.index(singleChar),copyVal)):\n copyVal += 1\n \n singleImage.save('{}\\\\{}_copy{}.png'.format(saveImagePath,\\\n enumeratedList.index(singleChar),copyVal))", "def save_test_images(images):\n for description, img in images.items():\n save_to_image(img, description)\n save_to_netcdf(img, description)", "def writeXml(self):\n curdir = os.getcwd()\n os.chdir(self.Imagedir)\n allImageLists = [self.sciImlist, self.ctxImlist, self.wgtImlist, self.rmsImlist]\n \n for imlist in allImageLists:\n for im in imlist:\n file = xmlUtil.markupImage(im,dataset=self.obsName)\n \n # Don't write these images as output of this module, which\n # really doesn't have any.\n \n #if file not in self.outputList.keys():\n # self.outputList[file] = [im]\n \n os.chdir(curdir)\n return", "def save_step_1(imgs, output_path='./output/step1'):\n # ... your code here ...\n i=0\n for each in imgs:\n i+=1\n cv2.imwrite(output_path+\"/output\"+str(i)+\".jpg\", each)", "def move_images_and_list(path, final_path):\n #Lists all created folders\n directories = os.listdir(path)\n #Array that stores the path to each image\n lists = []\n #This variable will be used to give a unique name to each image\n tot_images = 0\n #Creates the path where will be stored all files\n if not os.path.exists(final_path):\n os.mkdir(final_path)\n #Iterates over each folder\n for ph in directories:\n #Iterates over each line of the generated file images.lst\n for img in open(os.path.join(path, ph, \"images.lst\")).readlines():\n \"\"\"Images are stored with a name, how many objects have and\n where it is, like this '01_0252_0067_0139_0222.jpg 1 252 67 139 222'\n so these five lines under changes the first part before '_', because\n in some cases, the command opencv_createsamples creates a same name\n to different positive images, this ensures a different name to each\n image\"\"\"\n split_space = img.split()\n split_underscore = split_space[0].split(\"_\")\n split_underscore[0] = str(tot_images)\n join_underscore = \"_\".join(split_underscore)\n join_space = \" \".join([join_underscore, *split_space[1:]])\n #Appends the new image's name to the list\n lists.append(join_space)\n #Moves each image in the folder to the final path, with a new name\n move(os.path.join(path, ph, split_space[0]),\n os.path.join(final_path, join_space.split()[0]))\n tot_images += 1\n #Writes a file withe the name of all images in the folder\n with open(os.path.join(final_path, \"images.lst\"), \"w+\") as f:\n for i in lists:\n f.write(\"\".join([i, '\\n']))\n #Removes the temporary path\n rmtree(os.path.abspath(path))\n #Name of the created file\n return \"images.lst\"", "def make_processed_directories(zone, region, zoom_level = 19, image_size = 256):\n os.system(f'mkdir ../../data/processed/images-{image_size}-{region}-{zone}-{zoom_level}')\n os.system(f'mkdir ../../data/processed/masks-{image_size}-{region}-{zone}-{zoom_level}')\n img_path = f'../../data/processed/images-{image_size}-{region}-{zone}-{zoom_level}'\n mask_path = f'../../data/processed/masks-{image_size}-{region}-{zone}-{zoom_level}'\n return img_path, mask_path", "def create_preset_images(self):\n for f in sorted(self.get_files_from_data()):\n photoInstances = {}\n for preset in self.generator.settings[\"GALLERY_PRESETS\"]:\n preset_dir = \"%s%s%s\" % (self.absolute_output_path,\n os.sep, \n preset[\"name\"])\n photoInstances[preset[\"name\"]] = Photo(self, f, preset_dir, preset)\n \n self.photos.append(photoInstances)", "def save(images, output):\n for image, frame in images:\n image.save(output(frame))", "def save_reconstructions(reconstructions, out_dir):\n if (not (os.path.exists(out_dir))):\n os.mkdir(out_dir)\n out_dir.mkdir(exist_ok=True)\n print('Saved directory is',out_dir)\n for fname, recons in reconstructions.items():\n with h5py.File(out_dir / fname, 'w') as f:\n f.create_dataset('reconstruction', data=recons)", "def save_images_in_folder(folder, images, size = (224, 224), start_index = 0):\n\n # Loop over the images\n for i, image in enumerate(images):\n # Resize image to target size\n image = cv2.resize(image, dsize = size)\n \n # Convert image back to BGR color space\n image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)\n \n # Create the path where the image will be save, images will be indexed\n path = os.path.join(folder, str(start_index + i) + '.png')\n \n # Write / Save image \n cv2.imwrite(path, image)", "def generate_and_save_images(model, seed, output_path, title):\n\n predictions = model(tf.Variable(seed, trainable=False))\n\n fig = plt.figure(figsize=(4,4))\n\n for i in range(predictions.shape[0]):\n plt.subplot(4, 4, i+1)\n plt.imshow(denormalize_generate_image(predictions[i, :, :, 0]), cmap='gray')\n plt.axis('off')\n if not os.path.exists(output_path):\n os.mkdir(output_path)\n\n plt.savefig(os.path.join(output_path, '{}.png'.format(title)))\n plt.close()", "def generate_images(\n network_pkl,\n seeds,\n truncation_psi,\n noise_mode,\n outdir \n):\n\n print('Loading networks from \"%s\"...' % network_pkl)\n # device = torch.device('cuda')\n device = torch.device('cpu')\n with dnnlib.util.open_url(network_pkl) as f:\n G = legacy.load_network_pkl(f)['G_ema'].to(device) # type: ignore\n\n os.makedirs(outdir, exist_ok=True)\n\n \n\n # if seeds is None:\n # ctx.fail('--seeds option is required when not using --projected-w')\n\n # Labels.\n label = torch.zeros([1, G.c_dim], device=device)\n\n # Generate images.\n file_list = []\n for seed_idx, seed in enumerate(seeds):\n print('Generating image for seed %d (%d/%d) ...' % (seed, seed_idx, len(seeds)))\n z = torch.from_numpy(np.random.RandomState(seed).randn(1, G.z_dim)).to(device)\n # img = G(z, label, truncation_psi=truncation_psi, noise_mode=noise_mode)\n img = G(z, label, truncation_psi=truncation_psi, noise_mode=noise_mode, force_fp32=True)\n img = (img.permute(0, 2, 3, 1) * 127.5 + 128).clamp(0, 255).to(torch.uint8)\n filename = f'{outdir}/seed{seed:04d}_{str(truncation_psi)}.png'\n PIL.Image.fromarray(img[0].cpu().numpy(), 'RGB').save(filename)\n file_list.append(filename)\n return file_list", "def save_images(self):\n for q in range(self.N_itr):\n plt.clf()\n self.plot_EM_estimate(q)\n plt.savefig('img%d.png' % (100 + q))", "def save_to_images(self):\n \n logging.debug(\"save_to_images called\")\n # return None\n notify(\"Saving to images\")\n # first, create the images\n image_map = {}\n for machine in self.machines:\n logging.info(\"Creating image for %s\" % machine)\n notify(\"Creating image for %s\" % machine)\n m = self.machines[machine]\n img_id = m.create_image()\n logging.debug(\"machine: %s, img_id: %s\" % (str(machine), str(img_id) ))\n\n old_img_id = self.images.get(m.machine_name, None)\n if old_img_id:\n logging.info(\"machine %s old image added to old_images %s \" % ( str(machine), str(old_img_id) ))\n self.old_images.append(old_img_id)\n image_map[m.machine_name] = img_id\n \n # print image_map\n # FIXME: this needs to be updating the cloudfiles\n # savefile = open(self.savefile, 'w')\n # yaml.dump(image_map, savefile)\n # savefile.close()\n # print self.images\n # print image_map\n notify(\"Saving config\")\n self.images = image_map\n self.save()", "def create_paths(manager, parentpath=\"extractor_test_results/HoG/\"):\n \n paths_to_create = [\"data/features_all\", \"data/features_filled\",\n \"data/pair/both\", \"hog_images\", \"hog_plots\",\n \"orig_frames\", \"processed_frames\", \"evaluation\"]\n \n for path in paths_to_create:\n manager.make_folder(parentpath + path)" ]
[ "0.6778279", "0.6587836", "0.6533606", "0.6498707", "0.64328074", "0.6420537", "0.62873924", "0.62810856", "0.6238603", "0.6238188", "0.62225974", "0.61654925", "0.61612916", "0.60948086", "0.6073354", "0.60659975", "0.6053131", "0.60344714", "0.5995892", "0.59935164", "0.5988198", "0.59871304", "0.595117", "0.5950829", "0.59448075", "0.5941152", "0.5928216", "0.58972055", "0.5878435", "0.58775973" ]
0.68451995
0
Create an image of a bipartite graph of molecules and reactions using the dot program of graphviz
def make_graphviz_image( molecules: Union[Sequence[Molecule], Sequence[UniqueMolecule]], reactions: Union[Sequence[RetroReaction], Sequence[FixedRetroReaction]], edges: Sequence[Tuple[Any, Any]], frame_colors: Sequence[PilColor], ) -> PilImage: def _create_image(use_splines): txt = template.render( molecules=mol_spec, reactions=reactions, edges=edges, use_splines=use_splines, ) _, input_name = tempfile.mkstemp(suffix=".dot") with open(input_name, "w") as this_fileobj: this_fileobj.write(txt) _, output_img2 = tempfile.mkstemp(suffix=".png") ext = ".bat" if sys.platform.startswith("win") else "" subprocess.call([f"dot{ext}", "-T", "png", f"-o{output_img2}", input_name]) if not os.path.exists(output_img2) or os.path.getsize(output_img2) == 0: raise FileNotFoundError( "Could not produce graph with layout - check that 'dot' command is in path" ) return output_img2 mol_spec = save_molecule_images(molecules, frame_colors) template_filepath = os.path.join(data_path(), "templates", "reaction_tree.dot") with open(template_filepath, "r") as fileobj: template = Template(fileobj.read()) template.globals["id"] = id # type: ignore try: output_img = _create_image(use_splines=True) except FileNotFoundError: output_img = _create_image(use_splines=False) return Image.open(output_img)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def make_graphviz_image(\n molecules: Union[Sequence[Molecule], Sequence[UniqueMolecule]],\n reactions: Union[Sequence[RetroReaction], Sequence[FixedRetroReaction]],\n edges: Sequence[Tuple[Any, Any]],\n frame_colors: Sequence[PilColor],\n reaction_shapes: Sequence[str] = None,\n use_splines: bool = True,\n) -> PilImage:\n\n def _create_image(use_splines):\n txt = template.render(\n molecules=mol_spec,\n reactions=rxn_spec,\n edges=edges,\n use_splines=use_splines,\n )\n _, input_name = tempfile.mkstemp(suffix=\".dot\")\n with open(input_name, \"w\") as this_fileobj:\n this_fileobj.write(txt)\n\n _, output_img2 = tempfile.mkstemp(suffix=\".png\")\n ext = \".bat\" if sys.platform.startswith(\"win\") else \"\"\n subprocess.call([f\"dot{ext}\", \"-T\", \"png\", f\"-o{output_img2}\", input_name])\n if not os.path.exists(output_img2) or os.path.getsize(output_img2) == 0:\n raise FileNotFoundError(\n \"Could not produce graph with layout - check that 'dot' command is in path\"\n )\n return output_img2\n\n mol_spec = save_molecule_images(molecules, frame_colors)\n reaction_shapes = reaction_shapes or [\"circle\"] * len(reactions)\n rxn_spec = zip(reactions, reaction_shapes)\n\n template_filepath = os.path.join(data_path(), \"templates\", \"reaction_tree.dot\")\n with open(template_filepath, \"r\") as fileobj:\n template = Template(fileobj.read())\n template.globals[\"id\"] = id # type: ignore\n\n if not use_splines:\n output_img = _create_image(use_splines=False)\n return Image.open(output_img)\n\n try:\n output_img = _create_image(use_splines=True)\n except FileNotFoundError:\n output_img = _create_image(use_splines=False)\n\n return Image.open(output_img)", "def create_graph(dot, filename=\"network\"):\n proc = subprocess.Popen('dot -Tpng > %s.png' % filename,\n shell=True,\n stdin=subprocess.PIPE\n )\n proc.communicate(dot.encode('utf_8'))\n execvp('open', ['open', '%s.png'%filename,])", "def create_dot(nodes, assocs, hierarchy):\n def field_names(fields):\n return ' | '.join(sorted(fields))\n out = StringIO()\n print >> out, \"digraph phemi_class_diagram {\"\n print >> out, \" node[shape=record];\"\n for clazz, fields in nodes.iteritems():\n print >> out, ' \"%s\" [label=\"{%s | %s}\"];' % (\n fullname(clazz), clazz.__name__, field_names(fields)\n )\n for edgemap in [assocs, hierarchy]:\n for clazz, edges in edgemap.iteritems():\n for edge in edges:\n print >> out, ' \"%s\" -> \"%s\" %s' % (\n fullname(clazz), fullname(edge.dst), edge.style\n )\n print >> out, \"}\"\n return out.getvalue()", "def plot_dag(\n self,\n filename,\n traverser,\n node_size=500,\n label_font_size=12,\n text_angle=0,\n image_width=16,\n image_height=12,\n ):\n # map nodes to a color for their operation type\n # https://stackoverflow.com/questions/27030473/how-to-set-colors-for-nodes-in-networkx-python\n color_map = []\n colors = [\"#fbb4ae\", \"#b3cde3\", \"#ccebc5\", \"#decbe4\", \"#fed9a6\"]\n for node in self.G2:\n if self.node_map[node] == OperationType.reader.value:\n color_map.append(colors[0])\n elif self.node_map[node] == OperationType.pipeline.value:\n color_map.append(colors[1])\n elif self.node_map[node] == OperationType.model.value:\n color_map.append(colors[2])\n elif self.node_map[node] == OperationType.writer.value:\n color_map.append(colors[3])\n else:\n color_map.append(colors[4])\n\n fig = plt.figure(figsize=(image_width, image_height))\n ax = plt.subplot(111)\n ax.set_title(filename, fontsize=10)\n\n try:\n import pydot\n from networkx.drawing.nx_pydot import graphviz_layout\n except ImportError: # pragma: no cover\n raise ImportError(\n \"This example needs Graphviz and pydot.\"\n \"Please refer to the Plotting requirements in the README\"\n )\n\n # pos = nx.spring_layout(G)\n # pos = nx.circular_layout(G)\n # pos = nx.kamada_kawai_layout(G)\n # pos = nx.shell_layout(G)\n # pos = nx.spectral_layout(G)\n pos = graphviz_layout(self.G2, prog=\"dot\") # , prog='twopi', args='')\n\n nx.draw(\n self.G2,\n pos,\n node_size=node_size,\n node_color=color_map,\n edge_color=\"#939393\",\n font_size=8,\n font_weight=\"bold\",\n )\n # nx.draw_networkx_nodes(G, pos, node_color='b', node_size=500, alpha=0.8)\n\n if len(self.conditional_nodes) > 0:\n cnodes = nx.draw_networkx_nodes(\n self.G2,\n pos,\n node_color=\"#e6b655\",\n node_size=1.5 * node_size,\n alpha=0.8,\n node_shape=\"D\",\n nodelist=list(self.conditional_nodes),\n )\n cnodes.set_edgecolor(\"red\")\n\n # nx.draw_networkx_labels(self.G2,pos, font_size=9)\n\n text = nx.draw_networkx_labels(\n self.G2, pos, font_size=label_font_size\n )\n\n if traverser:\n # map node name to sequence number\n sequence = traverser.traversal_list()\n idx = list(range(1, len(sequence) + 1))\n d = dict(zip(sequence, idx))\n\n # let's plot the sequence numner above the node. How far above it?\n ys = [t._y for _, t in text.items()]\n ysrange = max(ys) - min(ys)\n offset = 0.02 * abs(ysrange)\n\n for _, t in text.items():\n t.set_rotation(text_angle)\n\n if traverser:\n plt.text(t._x, t._y + offset, d[t._text], fontsize=24, color=\"red\")\n\n plt.axis(\"off\")\n plt.tight_layout()\n plt.savefig(filename, format=\"PNG\")\n logging.info(\"Graph written to %s\" % filename)", "def generateGraph(mids, chaptersField, labelsField):\n output = \"digraph G { \\n\"\n # On ne traite que les chapitres qui ont actives le graphe\n chapts = chapters.graphChapters()\n # le dico nodes contient une liste pour chaque chapitre. Chaque liste\n # contient tous les neuds (un par note) presents dans ce chapitre, et\n # representes par des tuples (noteId, label)\n nodes = {}\n for mid in mids:\n chapterField = chaptersField[mid]\n labelField = labelsField[mid]\n for id, flds in mw.col.db.execute(\"\"\"\n SELECT id, flds FROM notes WHERE mid=%d\n \"\"\" % mid):\n fields = splitFields(flds)\n chapter = fields[chapterField]\n if not chapter in chapts:\n continue\n label = fields[labelField]\n if(not chapter in nodes):\n nodes[chapter] = []\n nodes[chapter].append((id, label))\n # On genere les noeuds, dans des clusters (un par chapitre)\n notes = []\n for chap in nodes:\n output += \"\"\"subgraph cluster_%d {\n node [style=filled];\n label = \"%s\";\n color=blue;\n \"\"\" % (chapts[chap], chap)\n for n in nodes[chap]:\n output += \"\"\"n%d [label=\"%s\", URL=\"%d\"];\\n\"\"\" % (n[0], n[1], n[0])\n notes.append(n)\n output += \"\"\"\n }\\n\"\"\"\n # Puis on ajoute tous les liens ..\n for n in notes:\n for nid in mw.col.db.execute(\"\"\"SELECT N.noteId FROM `PATH.links` AS L\n JOIN `PATH.match` AS M ON M.id = L.matchId\n JOIN `PATH.nodes` AS N ON M.nodeId = N.id\n WHERE L.noteId = %d\"\"\" % (n[0])):\n output += \"\"\"n%d -> n%d;\\n\"\"\" % (nid[0], n[0])\n output += \"}\"\n generateGraphImage(output)", "def build_graphviz(input_dim, output_dim, num_intermediate, \n connections, activations, activation_labels):\n \n if not is_valid_adjacency_matrix(connections, num_intermediate, input_dim, output_dim):\n raise ValueError(\"Connectivity matrix is invalid\")\n num_emitting = num_intermediate + input_dim\n num_receiving = num_intermediate + output_dim\n size = num_emitting + output_dim\n dag = graphviz.Digraph()\n #add nodes labeled by activation functions\n for i in range(size):\n node=str(i)\n if i < input_dim:\n label = \"input %d\" % i\n attrs = {}\n else:\n act_index = activations[i-input_dim].item()\n act_label = activation_labels[act_index]\n attrs = {\n 'activation_index': str(act_index),\n 'activation_label': str(act_label)\n } \n if i >= num_emitting:\n label = f\"output {i-num_emitting}\"\n else:\n label = None\n\n dag.node(node, label=label, **attrs)\n #add edges\n edgelist = []\n for i in range(num_receiving):\n rec_index = i + input_dim\n for emitting_index in range(min(rec_index, num_emitting)):\n if connections[i, emitting_index] > 0:\n edgelist.append((str(emitting_index), str(rec_index)))\n dag.edges(edgelist)\n act_mapping = {str(i) : activation_labels[i] for i in range(len(activation_labels))}\n dag.attr(**act_mapping)\n return dag", "def render(self): # pragma: no cover\n from graphviz import Digraph\n dot = Digraph(name=\"top\")\n for block in self.blocks:\n if isinstance(block, Branch):\n label = \"if \" + astor.to_source(block.cond)\n dot.node(str(id(block)), label.rstrip(), {\"shape\": \"invhouse\"})\n elif isinstance(block, Yield):\n label = astor.to_source(block.value)\n # label += \"\\nLive Ins : \" + str(block.live_ins)\n # label += \"\\nLive Outs : \" + str(block.live_outs)\n # label += \"\\nGen : \" + str(block.gen)\n # label += \"\\nKill : \" + str(block.kill)\n dot.node(str(id(block)), label.rstrip(), {\"shape\": \"oval\"})\n elif isinstance(block, BasicBlock):\n label = \"\\n\".join(astor.to_source(stmt).rstrip() for stmt in block.statements)\n # label += \"\\nLive Ins : \" + str(block.live_ins)\n # label += \"\\nLive Outs : \" + str(block.live_outs)\n # label += \"\\nGen : \" + str(block.gen)\n # label += \"\\nKill : \" + str(block.kill)\n dot.node(str(id(block)), label.rstrip(), {\"shape\": \"box\"})\n elif isinstance(block, HeadBlock):\n label = \"Initial\"\n dot.node(str(id(block)) + \"_start\", label.rstrip(), {\"shape\": \"doublecircle\"})\n label = \"\\n\".join(astor.to_source(stmt).rstrip() for stmt in block.initial_statements)\n # label += \"\\nLive Ins : \" + str(block.live_ins)\n # label += \"\\nLive Outs : \" + str(block.live_outs)\n # label += \"\\nGen : \" + str(block.gen)\n # label += \"\\nKill : \" + str(block.kill)\n dot.node(str(id(block)), label.rstrip(), {\"shape\": \"box\"})\n dot.edge(str(id(block)) + \"_start\", str(id(block)))\n else:\n raise NotImplementedError(type(block))\n # for source, sink, label in self.edges:\n for sink, label in block.outgoing_edges:\n dot.edge(str(id(block)), str(id(sink)), label)\n\n\n file_name = tempfile.mktemp(\"gv\")\n dot.render(file_name, view=True)\n # with open(\"cfg.dot\", \"w\") as file:\n # file.write(dot.source)\n # exit()", "def graphviz_prettify(self, network):\n graph_settings = {\n 'rankdir': 'LR',\n 'dpi': 60,\n }\n network.graph.update(graph_settings)\n\n for n in network.nodes():\n if isinstance(n, Variable):\n network.nodes[n]['label'] = n.name\n elif isinstance(n, Equation):\n network.nodes[n]['shape'] = 'diamond'", "def graphviz_dot(sentence, font=\"Arial\", colors=BLUE):\n s = 'digraph sentence {\\n'\n s += '\\tranksep=0.75;\\n'\n s += '\\tnodesep=0.15;\\n'\n s += '\\tnode [penwidth=1, fontname=\"%s\", shape=record, margin=0.1, height=0.35];\\n' % font\n s += '\\tedge [penwidth=1];\\n'\n s += '\\t{ rank=same;\\n'\n # Create node groups for words, chunks and PNP chunks.\n for w in sentence.words:\n s += '\\t\\tword%s [label=\"<f0>%s|<f1>%s\"%s];\\n' % (w.index, w.string, w.type, _colorize(w, colors))\n for w in sentence.words[:-1]:\n # Invisible edges forces the words into the right order:\n s += '\\t\\tword%s -> word%s [color=none];\\n' % (w.index, w.index+1)\n s += '\\t}\\n'\n s += '\\t{ rank=same;\\n' \n for i, ch in enumerate(sentence.chunks):\n s += '\\t\\tchunk%s [label=\"<f0>%s\"%s];\\n' % (i+1, \"-\".join([x for x in (\n ch.type, ch.role, str(ch.relation or '')) if x]) or '-', _colorize(ch, colors))\n for i, ch in enumerate(sentence.chunks[:-1]):\n # Invisible edges forces the chunks into the right order:\n s += '\\t\\tchunk%s -> chunk%s [color=none];\\n' % (i+1, i+2)\n s += '}\\n'\n s += '\\t{ rank=same;\\n'\n for i, ch in enumerate(sentence.pnp):\n s += '\\t\\tpnp%s [label=\"<f0>PNP\"%s];\\n' % (i+1, _colorize(ch, colors))\n s += '\\t}\\n'\n s += '\\t{ rank=same;\\n S [shape=circle, margin=0.25, penwidth=2]; }\\n'\n # Connect words to chunks.\n # Connect chunks to PNP or S.\n for i, ch in enumerate(sentence.chunks):\n for w in ch:\n s += '\\tword%s -> chunk%s;\\n' % (w.index, i+1)\n if ch.pnp:\n s += '\\tchunk%s -> pnp%s;\\n' % (i+1, sentence.pnp.index(ch.pnp)+1)\n else:\n s += '\\tchunk%s -> S;\\n' % (i+1)\n if ch.type == 'VP':\n # Indicate related chunks with a dotted\n for r in ch.related:\n s += '\\tchunk%s -> chunk%s [style=dotted, arrowhead=none];\\n' % (\n i+1, sentence.chunks.index(r)+1)\n # Connect PNP to anchor chunk or S.\n for i, ch in enumerate(sentence.pnp):\n if ch.anchor:\n s += '\\tpnp%s -> chunk%s;\\n' % (i+1, sentence.chunks.index(ch.anchor)+1)\n s += '\\tpnp%s -> S [color=none];\\n' % (i+1)\n else:\n s += '\\tpnp%s -> S;\\n' % (i+1)\n s += \"}\"\n return s", "def visualize(self):\n dot = Graph()\n \n for k, v in self.vs.items():\n if v.observed:\n dot.node(v.word, style=\"filled\")\n else:\n dot.node(v.word)\n\n for i, (k, v) in enumerate(self.fs.items()):\n dot.node(str(i), shape=\"square\", style=\"bold\")\n s, t = k[1], k[3]\n dot.edge(s, str(i))\n dot.edge(t, str(i))\n \n print dot.source\n #src.render('test-output/holy-grenade.gv', view=True)", "def generate(self, diagram):", "def write_dot(cs, path):\n assert len(path)!=0, \"Filename can't be empty\"\n output_file = open(path, \"w\")\n output_file.write(\"digraph L{\")\n output_file.write(\"\\n\")\n output_file.write(\"node[shape=circle,style=filled,label=\\\"\\\"];\")\n output_file.write(\"\\n\")\n output_file.write(\"edge[dir=\\\"none\\\",minlen=2];\")\n output_file.write(\"\\n\")\n\n own_objects = find_own_objects(cs)\n own_attributes = find_own_attributes(cs)\n for i in xrange(len(cs)):\n output_file.write(\"c%i [width=0.25]\\n\" % i)\n if len(own_objects[cs[i]])!=0:\n output_file.write(\"c%i -> c%i\" % (i, i))\n # TODO:\n if len(own_objects[cs[i]]) >= 5:\n own_objects[cs[i]] = [str(len(own_objects[cs[i]]))]\n output_file.write(\"[headlabel=\\\"%s\\\", \" %\\\n \"; \".join(own_objects[cs[i]]))\n output_file.write(\n \"labeldistance=1,labelangle=270,color=transparent]\\n\")\n if len(own_attributes[cs[i]])!=0:\n output_file.write(\"c%i -> c%i\" % (i, i))\n # TODO:\n if len(own_attributes[cs[i]]) >= 5:\n own_attributes[cs[i]] = [str(len(own_attributes[cs[i]]))]\n output_file.write(\"[taillabel=\\\"%s\\\", \" %\\\n \"; \".join(own_attributes[cs[i]]))\n output_file.write(\n \"labeldistance=2,labelangle=90,color=transparent]\\n\")\n\n parents = fca.compute_covering_relation(cs)\n for i in xrange(len(cs)):\n for p in parents[cs[i]]:\n output_file.write(\"c%i -> c%i\\n\" % (cs.index(p), i))\n output_file.write(\"}\")\n\n output_file.close()", "def fig5b():\n # fmt: off\n tpm = np.array([\n [1, 0, 0],\n [1, 1, 1],\n [1, 0, 1],\n [1, 1, 1],\n [1, 1, 0],\n [1, 1, 1],\n [1, 1, 1],\n [1, 1, 1],\n ])\n cm = np.array([\n [0, 1, 1],\n [0, 0, 1],\n [0, 1, 0],\n ])\n # fmt: on\n return Network(tpm, cm=cm, node_labels=LABELS[:tpm.shape[1]])", "def graphviz(A,B):\r\n N = len(A) \r\n sc = 1.0 / len(B)\r\n \r\n print \"digraph AdjacencyMatrix {\"\r\n print \"\\tgraph[label=\\\"Graph representing the weight of the edge and adjacent\\\",labelloc =t];\"\r\n for i in range(N):\r\n #p = colorsys.hsv_to_rgb(sc*i,1.0,1.0)\r\n p = colorsys.hsv_to_rgb(sc*B[i],1.0,1.0)\r\n print \"\\t%s [style = filled, color=\\\"#000000\\\" fillcolor = \\\"#%s%s%s\\\"];\" \\\r\n % (i+1, \"00\" if p[0] == 0.0 else hex(int(p[0]*255)).replace(\"0x\",\"\"), \\\r\n \"00\" if p[1] == 0.0 else hex(int(p[1]*255)).replace(\"0x\",\"\"), \\\r\n \"00\" if p[2] == 0.0 else hex(int(p[2]*255)).replace(\"0x\",\"\") )\r\n for j in range(N): \r\n if i != j and A[i,j] != 0.0:\r\n print \"\\t%s->%s\\t[label=\\\"%s\\\",color=\\\"%s\\\"];\" \\\r\n % (j+1, i+1, A[i,j], \"red\" if A[i,j] < 0.0 else \"blue\")\r\n print \"}\"", "def plot_pipeline_diagrams(phenotype):\n # Read data\n n = 10\n replication_results = pd.read_table(f'{phenotype}_replication_scores.txt', sep=\"\\t\").head(n)\n\n # Wider ranksep for the more convoluted result\n if phenotype == \"p_cd4difw48w4\":\n ranksep = 4\n else:\n ranksep = 2\n\n # Set up graph\n graph = pydot.Dot(graph_type='digraph', rankdir=\"LR\", ranksep=ranksep, nodesep=0.02,\n label=f\"{VARIABLE_DESCRIPTIONS[phenotype]}\", labelloc=\"t\", fontsize=TITLE_FONTSIZE)\n\n # Set up clusters\n cluster_fss = pydot.Cluster('fss', label='Feature Set Selector', rank=\"same\", penwidth=0)\n cluster_transformer = pydot.Cluster('transformer', label='Transformer', rank=\"same\", penwidth=0)\n cluster_regressor = pydot.Cluster('regressor', label='Regressor', rank=\"same\", penwidth=0)\n cluster_score = pydot.Cluster('score', label='R^2 Score', rank=\"same\", penwidth=0)\n\n # Add clusters\n graph.add_subgraph(cluster_fss)\n graph.add_subgraph(cluster_transformer)\n graph.add_subgraph(cluster_regressor)\n graph.add_subgraph(cluster_score)\n\n # Setup representative nodes and add them to their clusters\n cluster_fss_node = pydot.Node('cluster_fss', style='invis', shape='point')\n cluster_fss.add_node(cluster_fss_node)\n cluster_transformer_node = pydot.Node('cluster_transformer', style='invis', shape='point')\n cluster_transformer.add_node(cluster_transformer_node)\n cluster_regressor_node = pydot.Node('cluster_regressor', style='invis', shape='point')\n cluster_regressor.add_node(cluster_regressor_node)\n cluster_score_node = pydot.Node('cluster_score', style='invis', shape='point')\n cluster_score.add_node(cluster_score_node)\n\n # Link Clusters via their representative nodes\n graph.add_edge(pydot.Edge(cluster_fss_node, cluster_transformer_node, style=\"invisible\", arrowhead=\"none\", weight=1000))\n graph.add_edge(pydot.Edge(cluster_transformer_node, cluster_regressor_node, style=\"invisible\", arrowhead=\"none\", weight=1000))\n graph.add_edge(pydot.Edge(cluster_regressor_node, cluster_score_node, style=\"invisible\", arrowhead=\"none\", weight=1000))\n\n # Create Nodes\n fss_nodes = []\n for fss in replication_results['FSS Name'].unique():\n node = pydot.Node(fss, label=format_fss_name(fss), shape='box', style='rounded', fontsize=TEXT_FONTSIZE)\n cluster_fss.add_node(node)\n fss_nodes.append(node)\n transformer_nodes = []\n for transformer in replication_results['Transformer'].unique():\n node = pydot.Node(transformer, fontsize=TEXT_FONTSIZE)\n cluster_transformer.add_node(node)\n transformer_nodes.append(node)\n regressor_nodes = []\n for regressor in replication_results['Regressor'].unique():\n node = pydot.Node(regressor, fontsize=TEXT_FONTSIZE)\n cluster_regressor.add_node(node)\n regressor_nodes.append(node)\n\n # Create score nodes from min score to max score, marking every 0.001\n max_score = math.ceil(replication_results['R^2 Score'].max() * 100) / 100\n min_score = math.floor(replication_results['R^2 Score'].min() * 100) / 100\n last = None\n\n # Iterate through a range of scores using integers\n i = max_score * 1000\n while i >= (min_score * 1000): \n score = format_score(i/1000)\n if i % 10 == 0:\n node = pydot.Node(score, shape=\"plain\", label=score, fontsize=TEXT_FONTSIZE)\n else:\n node = pydot.Node(score, shape=\"point\")\n cluster_score.add_node(node)\n # Decrement\n i -= 1\n # Add edge\n if last is not None:\n cluster_score.add_edge(pydot.Edge(last, node, penwidth=0.5, constraint=\"false\", arrowhead=\"none\", len=0.01))\n last = node\n\n # Add each pipeline\n for idx, row in replication_results.iterrows():\n fss = row['FSS Name']\n transformer = row['Transformer']\n regressor = row['Regressor']\n score = format_score(row['R^2 Score'])\n color = COLORS[idx]\n penwidth = PATH_WIDTH[idx]\n graph.add_edge(pydot.Edge(fss, transformer, color=color, label=str(idx+1), penwidth=penwidth, constraint=\"false\", fontsize=TEXT_FONTSIZE))\n graph.add_edge(pydot.Edge(transformer, regressor, color=color, label=str(idx+1), penwidth=penwidth, constraint=\"false\", fontsize=TEXT_FONTSIZE))\n graph.add_edge(pydot.Edge(regressor, score, color=color, label=str(idx+1), penwidth=penwidth, constraint=\"false\", fontsize=TEXT_FONTSIZE))\n\n graph.write_png(f\"plots/{phenotype}_pipeline_diagram.png\")\n graph.write_svg(f\"plots/{phenotype}_pipeline_diagram.svg\")", "def draw_bipartite_graph(G):\n\n\ttop, bot = nx.bipartite.sets(G)\n\tpos = nx.bipartite_layout(G, top)\n\tnx.draw_networkx(G,pos=pos)\n\tplt.show()\n\n\treturn", "def print_graph(dag, image_path, graph_path):\n for node in dag.nodes():\n dag.node[node]['label'] = node.label\n nx.write_graphml(dag, graph_path)\n pos = nx.random_layout(dag)\n nx.draw_networkx(dag, ax=None, width=3, pos=pos)\n p.savefig(image_path)", "def OutputDot(self, output):\n sorted_nodes = [n for n in self._graph.Nodes(sort=True)]\n self._global_start = min([n.StartTime() for n in sorted_nodes])\n visited_nodes = set([n for n in sorted_nodes])\n\n output.write(\"\"\"digraph dependencies {\n rankdir = LR;\n \"\"\")\n\n orphans = set()\n for n in sorted_nodes:\n for s in itertools.chain(n.Node().Successors(),\n n.Node().Predecessors()):\n if s in visited_nodes:\n break\n else:\n orphans.add(n)\n if orphans:\n output.write(\"\"\"subgraph cluster_orphans {\n color=black;\n label=\"Orphans\";\n \"\"\")\n for n in orphans:\n # Ignore synthetic nodes for orphan display.\n if not self._graph.NodeInfo(n).Request():\n continue\n output.write(self.DotNode(n))\n output.write('}\\n')\n\n output.write(\"\"\"subgraph cluster_nodes {\n color=invis;\n \"\"\")\n\n for n in sorted_nodes:\n if n in orphans:\n continue\n output.write(self.DotNode(n))\n\n for n in visited_nodes:\n for s in n.Node().Successors():\n if s not in visited_nodes:\n continue\n style = 'color = orange'\n label = '%.02f' % self._graph.EdgeCost(n, s)\n annotations = self._graph.EdgeAnnotations(n, s)\n edge_kind = annotations.get(\n loading_model.ResourceGraph.EDGE_KIND_KEY, None)\n assert ((edge_kind is None)\n or (edge_kind in loading_model.ResourceGraph.EDGE_KINDS))\n style = 'color = %s' % self._EDGE_KIND_TO_COLOR[edge_kind]\n if edge_kind == 'timing':\n style += '; style=dashed'\n if self._graph.EdgeCost(n, s) > self._LONG_EDGE_THRESHOLD_MS:\n style += '; penwidth=5; weight=2'\n\n label = '%.02f' % self._graph.EdgeCost(n, s)\n if 'activity' in annotations:\n activity = annotations['activity']\n separator = ' - '\n for activity_type, activity_label in self._ACTIVITY_TYPE_LABEL:\n label += '%s%s:%.02f ' % (\n separator, activity_label, activity[activity_type])\n separator = ' '\n arrow = '[%s; label=\"%s\"]' % (style, label)\n output.write('%d -> %d %s;\\n' % (n.Index(), s.Index(), arrow))\n output.write('}\\n')\n\n output.write('}\\n')", "def transitions_flow_diagram(self):\n\n # check if dot present in path\n result = subprocess.getoutput(\"dot -V\")\n if \"graphviz\" not in result:\n QMessageBox.critical(self, programName, (\"The GraphViz package is not installed.<br>\"\n \"The <b>dot</b> program was not found in the path.<br><br>\"\n \"\"\"Go to <a href=\"http://www.graphviz.org\">\"\"\"\n \"\"\"http://www.graphviz.org</a> for information\"\"\"))\n return\n\n fn = QFileDialog(self).getOpenFileNames(self, \"Select one or more transitions matrix files\", \"\",\n \"Transitions matrix files (*.txt *.tsv);;All files (*)\")\n fileNames = fn[0] if type(fn) is tuple else fn\n\n out = \"\"\n for fileName in fileNames:\n with open(fileName, \"r\") as infile:\n try:\n gv = transitions.create_transitions_gv_from_matrix(infile.read(),\n cutoff_all=0,\n cutoff_behavior=0,\n edge_label=\"percent_node\")\n\n with open(tempfile.gettempdir() + os.sep + os.path.basename(fileName) + \".tmp.gv\", \"w\") as f:\n f.write(gv)\n result = subprocess.getoutput(\"\"\"dot -Tpng -o \"{0}.png\" \"{1}\" \"\"\".format(fileName,\n tempfile.gettempdir() +\n os.sep + os.path.basename(\n fileName) +\n \".tmp.gv\"))\n if not result:\n out += \"<b>{}</b> created<br>\".format(fileName + \".png\")\n else:\n out += \"Problem with <b>{}</b><br>\".format(fileName)\n except:\n QMessageBox.information(self, programName,\n \"Error during flow diagram creation.\\n{}\".format(str(sys.exc_info()[0])))\n\n if out:\n QMessageBox.information(self, programName, out)", "def network_graph(net_dict=None):\n if net_dict == None:\n net_dict = {}\n else:\n G = nx.from_dict_of_lists(net_dict)\n plt.figure(num=None, figsize=(30, 30), dpi=80, facecolor='w', edgecolor='c')\n nx.draw_networkx(G, with_labels=True, alpha=0.5, edge_color='c', cmap=plt.cm.GnBu)\n plt.savefig(\"metabolism_5years.png\", bbox_inches='tight')", "def init_dot_format():\n ifaint.add_format(_load_dot, None, \"GraphViz dot\", \"dot\")", "def generate_dot_file(self):\n dot_text = \"digraph blockchain {\"\n frontier = [self.root]\n while frontier != []:\n parent = frontier.pop(0)\n children = parent.children\n for child in children:\n frontier.append(child)\n dot_text += \"\\n\\t{c} -> {p};\".format(p='<' + str(parent.block) + '>',\n c='<' + str(child.block) + '>'\n )\n dot_text += \"\\n}\"\n with open(\"blockchain.gv\", \"w\") as writeFile:\n writeFile.write(dot_text)", "def disp_graph(graph, output_filename):\n dot = Graph(name=\"Graph\", format=\"png\") # instantiate a graph object\n for node in graph.keys(): # add nodes to the graph\n dot.node(str(node))\n for node in graph.keys(): # for every node in the input graph\n # for every other node in the input graph that the first node is connected to\n for other_node in graph[node].keys():\n dot.edge(str(node), str(other_node)) # create the edge\n dot.render(output_filename, view=True) # visualize the graph and save it", "def render_dot_file(dot_string, save_file, image_format='png'):\r\n if type(dot_string).__name__ != 'str':\r\n raise TypeError('visualize() requires a string representation of a decision tree.\\nUse tree.export_graphviz()'\r\n 'for decision trees produced by scikit-learn and to_graphviz() for decision trees produced by'\r\n 'your code.\\n')\r\n\r\n # Set path to your GraphViz executable here\r\n os.environ[\"PATH\"] += os.pathsep + 'C:/Program Files (x86)/Graphviz2.38/bin/'\r\n graph = graphviz.Source(dot_string)\r\n graph.format = image_format\r\n graph.render(save_file, view=True)", "def write_dot(graph: Graph, f: IO[str], directed=False):\n if directed:\n f.write('digraph G {\\n')\n else:\n f.write('graph G {\\n')\n\n name = {}\n next_name = 0\n for v in graph:\n name[v] = next_name\n next_name += 1\n options = 'penwidth=3,'\n if hasattr(v, 'label'):\n options += 'label=\"' + str(v.label) + '\",'\n if hasattr(v, 'colortext'):\n options += 'color=\"' + v.colortext + '\",'\n elif hasattr(v, 'colornum'):\n options += 'color=' + str(v.colornum % NUM_COLORS + 1) + ', colorscheme=' + DEFAULT_COLOR_SCHEME + ','\n if v.colornum >= NUM_COLORS:\n options += 'style=filled,fillcolor=' + str((v.colornum // NUM_COLORS) % NUM_COLORS + 1) + ','\n if len(options) > 0:\n f.write(' ' + str(name[v]) + ' [' + options[:-1] + ']\\n')\n else:\n f.write(' ' + str(name[v]) + '\\n')\n f.write('\\n')\n\n for e in graph.edges:\n options = 'penwidth=2,'\n if hasattr(e, 'weight'):\n options += 'label=\"' + str(e.weight) + '\",'\n if hasattr(e, 'colortext'):\n options += 'color=\"' + e.colortext + '\",'\n elif hasattr(e, 'colornum'):\n options += 'color=' + str(e.colornum % NUM_COLORS + 1) + ', colorscheme=' + DEFAULT_COLOR_SCHEME + ','\n if len(options) > 0:\n options = ' [' + options[:-1] + ']'\n if directed:\n f.write(' ' + str(name[e.tail]) + ' -> ' + str(name[e.head]) + options + '\\n')\n else:\n f.write(' ' + str(name[e.tail]) + '--' + str(name[e.head]) + options + '\\n')\n\n f.write('}')", "def print(self):\n dot = \"digraph G {\\nrankdir = UD\\n\"\n\n for i in range(len(self.allNodes)):\n if self.allNodes[i].left is not None:\n dot += str(self.allNodes[i].key) + \" -> \" + str(self.allNodes[i].left.key) + \"\\n\"\n if self.allNodes[i].right is not None:\n dot += str(self.allNodes[i].key) + \" -> \" + str(self.allNodes[i].right.key) + \"\\n\"\n\n dot += \"}\"\n\n file = open(\"outputfiles/BinTree.dot\", \"w\")\n file.write(dot)\n file.close()\n\n os.system(\"dot outputfiles/BinTree.dot -Tpng -o outputfiles/BinTree.png\")", "def plot_network(genome):\n g = genome.n\n # width = g.graph[\"size\"]\n # height = g.graph[\"size\"]\n\n # fig = plt.figure(figsize=(width,height))\n fig = plt.figure()\n fig.patch.set_facecolor('white')\n ax = fig.add_subplot(111, aspect='equal')\n # ax.set_axis_off()\n\n # collision_coords = find_collisions(genome)\n # das_coords = find_das_extended(genome)\n # slp_coords = find_slp(genome)\n slp_nodes = find_attacker_path(genome.n)\n\n # Plot the parent-child tree\n for n in g.nodes_iter():\n if g.node[n][\"parent\"] is not None:\n _line(g.node[n][\"coord\"], g.node[g.node[n][\"parent\"]][\"coord\"], zorder=0, color='k')\n\n for n in g.nodes_iter():\n coord = g.node[n][\"coord\"]\n shape = _circles\n colour = 'b'\n s = 0.4\n if n in slp_nodes:\n shape = _hexagons\n colour = 'y'\n s = 0.45\n if n == g.graph[\"source\"]:\n shape = _squares\n colour = 'g'\n if n == g.graph[\"sink\"]:\n shape = _octogons\n colour = 'k'\n s = 0.45\n shape(coord[0], coord[1], s, fc=\"white\", ec=colour)\n if(len(str(g.node[n][\"slot\"])) == 1):\n ax.text(coord[0]-0.15, coord[1]+0.15, str(g.node[n][\"slot\"]))\n elif(len(str(g.node[n][\"slot\"])) == 2):\n ax.text(coord[0]-0.25, coord[1]+0.15, str(g.node[n][\"slot\"]))\n elif(len(str(g.node[n][\"slot\"])) == 3):\n ax.text(coord[0]-0.4, coord[1]+0.15, str(g.node[n][\"slot\"]))\n else:\n ax.text(coord[0]-0.5, coord[1]+0.15, str(g.node[n][\"slot\"]))\n\n\n plt.gca().invert_yaxis()\n fig.show()", "def build_dot_str(self) -> Text:\n s = []\n s.append(\"digraph {\")\n for node in self.nodes:\n label = str(node)\n if node in self.start_nodes:\n label += \"S\"\n if node in self.accept_nodes:\n label += \"A\"\n s.append(f' \"{node}\" [label=\"{label}\"];')\n s.append(\"\")\n for from_node, transitions in self.nodes.items():\n for transition, to_nodes in transitions.items():\n if not transition:\n transition = \"&epsilon;\"\n for to_node in to_nodes:\n s.append(f' \"{from_node}\" -> \"{to_node}\" [label=\"{transition}\"];')\n s.append(\"}\")\n return \"\\n\".join(s)", "def fig4():\n # fmt: off\n tpm = np.array([\n [0, 0, 0],\n [0, 0, 1],\n [1, 0, 1],\n [1, 0, 0],\n [1, 0, 0],\n [1, 1, 1],\n [1, 0, 1],\n [1, 1, 0],\n ])\n cm = np.array([\n [0, 1, 1],\n [1, 0, 1],\n [1, 1, 0],\n ])\n # fmt: on\n return Network(tpm, cm=cm, node_labels=LABELS[:tpm.shape[1]])", "def make_architecture_pydot_graph(layers, output_shape=True, fullinfo=True):\n import pydot\n node_dict = {}\n edge_list = []\n\n REMOVE_BATCH_SIZE = True\n\n alias_map = {\n 'Conv2DCCLayer': 'Conv',\n 'MaxPool2DCCLayer': 'MaxPool',\n 'LeakyRectify': 'LRU',\n 'InputLayer': 'Input',\n 'DropoutLayer': 'Dropout',\n 'FlattenLayer': 'Flatten',\n }\n\n def get_hex_color(layer_type):\n if 'Input' in layer_type:\n return '#A2CECE'\n if 'Conv' in layer_type:\n return '#7C9ABB'\n if 'Dense' in layer_type:\n return '#6CCF8D'\n if 'Pool' in layer_type:\n return '#9D9DD2'\n else:\n return '#{0:x}'.format(hash(layer_type + 'salt') % 2 ** 24)\n\n for i, layer in enumerate(layers):\n lines = []\n layer_type = '{0}'.format(layer.__class__.__name__)\n layer_type = alias_map.get(layer_type, layer_type)\n key = repr(layer)\n color = get_hex_color(layer_type)\n # Make label\n lines.append(layer_type)\n if fullinfo:\n attr = 'name'\n val = getattr(layer, attr, None)\n if val is not None:\n if len(val) < 3:\n lines[-1] += ' ({0})'.format(val)\n else:\n if val.lower() != layer_type.lower():\n # add name if it is relevant\n lines.append('{0}: {1}'.format(attr, val))\n\n for attr in ['num_filters', 'num_units', 'ds', 'axis'\n 'filter_shape', 'stride', 'strides', 'p']:\n val = getattr(layer, attr, None)\n if val is not None:\n lines.append('{0}: {1}'.format(attr, val))\n\n attr = 'shape'\n if hasattr(layer, attr):\n val = getattr(layer, attr)\n shape = val[1:] if REMOVE_BATCH_SIZE else val\n lines.append('{0}: {1}'.format(attr, shape))\n\n if hasattr(layer, 'nonlinearity'):\n try:\n val = layer.nonlinearity.__name__\n except AttributeError:\n val = layer.nonlinearity.__class__.__name__\n val = alias_map.get(val, val)\n lines.append('nonlinearity:\\n{0}'.format(val))\n\n if output_shape:\n outshape = layer.output_shape\n if REMOVE_BATCH_SIZE:\n outshape = outshape[1:]\n lines.append('Output shape:\\n{0}'.format(outshape))\n\n label = '\\n'.join(lines)\n # append node\n\n node_dict[key] = dict(name=key, label=label, shape='record',\n fillcolor=color, style='filled',)\n\n if hasattr(layer, 'input_layers'):\n for input_layer in layer.input_layers:\n edge_list.append([repr(input_layer), key])\n\n if hasattr(layer, 'input_layer'):\n edge_list.append([repr(layer.input_layer), key])\n\n #ut.embed()\n if ut.get_argflag('--nx-cnn-hack'):\n import networkx as nx\n import plottool as pt\n from matplotlib import offsetbox\n #import TextArea, AnnotationBbox\n #import matplotlib.offsetbox # NOQA\n import matplotlib as mpl\n\n #from pylab import rcParams\n #rcParams['figure.figsize'] = 20, 2\n\n #fig = pt.figure(figsize=(10, 2))\n #pt.plt.figure(figsize=(20, 2))\n\n mpl.offsetbox = offsetbox\n nx = nx\n G = netx_graph = nx.DiGraph()\n netx_nodes = [(key_, ut.delete_keys(node.copy(), ['name']))\n for key_, node in node_dict.items()]\n\n netx_edges = [(key1, key2, {}) for key1, key2 in edge_list]\n netx_graph.add_nodes_from(netx_nodes)\n netx_graph.add_edges_from(netx_edges)\n\n #netx_graph.graph.setdefault('graph', {})['rankdir'] = 'LR'\n netx_graph.graph.setdefault('graph', {})['rankdir'] = 'TB'\n #netx_graph.graph.setdefault('graph', {})['prog'] = 'dot'\n netx_graph.graph.setdefault('graph', {})['prog'] = 'dot'\n\n pos_dict = nx.nx_pydot.pydot_layout(netx_graph, prog='dot')\n # hack to expand sizes\n #pos_dict = {key: (val[0] * 40, val[1] * 40) for key, val in pos_dict.items()}\n node_key_list = ut.get_list_column(netx_nodes, 0)\n pos_list = ut.dict_take(pos_dict, node_key_list)\n\n artist_list = []\n offset_box_list = []\n\n ax = pt.gca()\n ax.cla()\n nx.draw(netx_graph, pos=pos_dict, ax=ax)\n for pos_, node in zip(pos_list, netx_nodes):\n x, y = pos_\n node_attr = node[1]\n textprops = {\n 'horizontalalignment': 'center',\n }\n offset_box = mpl.offsetbox.TextArea(node_attr['label'], textprops)\n artist = mpl.offsetbox.AnnotationBbox(\n offset_box, (x, y), xybox=(-0., 0.),\n xycoords='data', boxcoords=\"offset points\",\n pad=0.25, framewidth=True, bboxprops=dict(fc=node_attr['fillcolor']),\n #pad=0.1,\n #framewidth=False,\n )\n offset_box_list.append(offset_box)\n artist_list.append(artist)\n\n for artist in artist_list:\n ax.add_artist(artist)\n\n xmin, ymin = np.array(pos_list).min(axis=0)\n xmax, ymax = np.array(pos_list).max(axis=0)\n ax.set_xlim((xmin, xmax))\n\n fig = pt.gcf()\n fig.canvas.draw()\n #fig.set_size_inches(10, 3)\n\n #pt.update()\n\n # Superhack for centered text\n # Fix bug in\n # /usr/local/lib/python2.7/dist-packages/matplotlib/offsetbox.py\n # /usr/local/lib/python2.7/dist-packages/matplotlib/text.py\n for offset_box in offset_box_list:\n offset_box.set_offset\n #offset_box.get_offset\n #self = offset_box\n z = offset_box._text.get_window_extent()\n (z.x1 - z.x0) / 2\n offset_box._text\n T = offset_box._text.get_transform()\n A = mpl.transforms.Affine2D()\n A.clear()\n A.translate((z.x1 - z.x0) / 2, 0)\n offset_box._text.set_transform(T + A)\n\n #pt.update()\n #pt.draw()\n\n # MEGA HACK\n ut.show_if_requested()\n\n #nx.draw(netx_graph, pos=pos_dict, ax=ax, with_labels=True)\n #nx.draw_networkx(netx_graph, pos=pos_dict, ax=ax, node_shape='box')\n #pos_dict = nx.nx_agraph.graphviz_layout(netx_graph)\n # http://stackoverflow.com/questions/20885986/how-to-add-dots-graph-attribute-into-final-dot-output\n #for key, node in netx_nodes:\n # #node['labels'] = {'lbl': node['label']}\n # node['color'] = node['fillcolor']\n\n #from matplotlib.offsetbox import OffsetImage, AnnotationBbox\n if False:\n nx.get_node_attributes(netx_graph, key_)\n\n A = nx.to_pydot(G)\n #A.draw('color.png')\n print(A.to_string())\n #rankdir\n\n Z = nx.from_pydot(A)\n\n #nx.draw(Z)\n Zpos = nx.nx_pydot.pydot_layout(Z, prog='dot')\n nx.draw_networkx(Z, pos=Zpos)\n\n else:\n\n #pydot_graph = pydot.Dot('Network', graph_type='digraph')\n pydot_graph = pydot.Dot('Network', graph_type='digraph', rankdir='LR')\n\n pydot_node_dict = dict([\n (node['name'], pydot.Node(**node))\n for node in node_dict.values()\n ])\n for pydot_node in pydot_node_dict.values():\n pydot_graph.add_node(pydot_node)\n\n for edge in edge_list:\n pydot_graph.add_edge(\n pydot.Edge(pydot_node_dict[edge[0]], pydot_node_dict[edge[1]]))\n return pydot_graph" ]
[ "0.7629829", "0.69144285", "0.6787022", "0.67797905", "0.6778502", "0.67635626", "0.67353326", "0.6653782", "0.6613842", "0.6556271", "0.6535844", "0.6419457", "0.6395842", "0.63492775", "0.6347129", "0.6342978", "0.63408387", "0.63107", "0.63010144", "0.6286106", "0.6280245", "0.6262618", "0.6260367", "0.62543064", "0.624517", "0.61734146", "0.6148353", "0.6146951", "0.6146186", "0.61407906" ]
0.77664346
0
Create HTML code of a bipartite graph of molecules and reactions using the vis.js network library. Package the created HTML page and all images as tarball.
def make_visjs_page( filename: str, molecules: Sequence[Molecule], reactions: Sequence[FixedRetroReaction], edges: Union[Sequence[Tuple[Any, Any]], nx.digraph.OutEdgeView], frame_colors: Sequence[PilColor], hierarchical: bool = False, ) -> None: mol_spec = save_molecule_images(molecules, frame_colors) template_filepath = os.path.join(data_path(), "templates", "reaction_tree.thtml") with open(template_filepath, "r") as fileobj: template = Template(fileobj.read()) template.globals["id"] = id # type: ignore tmpdir = tempfile.mkdtemp() for image_filepath in mol_spec.values(): shutil.copy(image_filepath, tmpdir) mol_spec = {molecule: os.path.basename(path) for molecule, path in mol_spec.items()} input_name = os.path.join(tmpdir, "route.html") with open(input_name, "w") as fileobj: fileobj.write( template.render( molecules=mol_spec, reactions=reactions, edges=edges, hierarchical=hierarchical, ) ) basename, _ = os.path.splitext(filename) shutil.make_archive(basename, "tar", root_dir=tmpdir)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _repr_html_(self):\n import io\n import base64\n from PIL import Image\n\n library_name = \"vedo.assembly.Assembly\"\n help_url = \"https://vedo.embl.es/docs/vedo/assembly.html\"\n\n arr = self.thumbnail(zoom=1.1, elevation=-60)\n\n im = Image.fromarray(arr)\n buffered = io.BytesIO()\n im.save(buffered, format=\"PNG\", quality=100)\n encoded = base64.b64encode(buffered.getvalue()).decode(\"utf-8\")\n url = \"data:image/png;base64,\" + encoded\n image = f\"<img src='{url}'></img>\"\n\n # statisitics\n bounds = \"<br/>\".join(\n [\n vedo.utils.precision(min_x, 4) + \" ... \" + vedo.utils.precision(max_x, 4)\n for min_x, max_x in zip(self.bounds()[::2], self.bounds()[1::2])\n ]\n )\n\n help_text = \"\"\n if self.name:\n help_text += f\"<b> {self.name}: &nbsp&nbsp</b>\"\n help_text += '<b><a href=\"' + help_url + '\" target=\"_blank\">' + library_name + \"</a></b>\"\n if self.filename:\n dots = \"\"\n if len(self.filename) > 30:\n dots = \"...\"\n help_text += f\"<br/><code><i>({dots}{self.filename[-30:]})</i></code>\"\n\n allt = [\n \"<table>\",\n \"<tr>\",\n \"<td>\",\n image,\n \"</td>\",\n \"<td style='text-align: center; vertical-align: center;'><br/>\",\n help_text,\n \"<table>\",\n \"<tr><td><b> nr. of objects </b></td><td>\"\n + str(self.GetNumberOfPaths())\n + \"</td></tr>\",\n \"<tr><td><b> position </b></td><td>\" + str(self.GetPosition()) + \"</td></tr>\",\n \"<tr><td><b> diagonal size </b></td><td>\"\n + vedo.utils.precision(self.diagonal_size(), 5)\n + \"</td></tr>\",\n \"<tr><td><b> bounds </b> <br/> (x/y/z) </td><td>\" + str(bounds) + \"</td></tr>\",\n \"</table>\",\n \"</table>\",\n ]\n return \"\\n\".join(allt)", "def make_graphviz_image(\n molecules: Union[Sequence[Molecule], Sequence[UniqueMolecule]],\n reactions: Union[Sequence[RetroReaction], Sequence[FixedRetroReaction]],\n edges: Sequence[Tuple[Any, Any]],\n frame_colors: Sequence[PilColor],\n) -> PilImage:\n\n def _create_image(use_splines):\n txt = template.render(\n molecules=mol_spec,\n reactions=reactions,\n edges=edges,\n use_splines=use_splines,\n )\n _, input_name = tempfile.mkstemp(suffix=\".dot\")\n with open(input_name, \"w\") as this_fileobj:\n this_fileobj.write(txt)\n\n _, output_img2 = tempfile.mkstemp(suffix=\".png\")\n ext = \".bat\" if sys.platform.startswith(\"win\") else \"\"\n subprocess.call([f\"dot{ext}\", \"-T\", \"png\", f\"-o{output_img2}\", input_name])\n if not os.path.exists(output_img2) or os.path.getsize(output_img2) == 0:\n raise FileNotFoundError(\n \"Could not produce graph with layout - check that 'dot' command is in path\"\n )\n return output_img2\n\n mol_spec = save_molecule_images(molecules, frame_colors)\n\n template_filepath = os.path.join(data_path(), \"templates\", \"reaction_tree.dot\")\n with open(template_filepath, \"r\") as fileobj:\n template = Template(fileobj.read())\n template.globals[\"id\"] = id # type: ignore\n\n try:\n output_img = _create_image(use_splines=True)\n except FileNotFoundError:\n output_img = _create_image(use_splines=False)\n\n return Image.open(output_img)", "def make_graphviz_image(\n molecules: Union[Sequence[Molecule], Sequence[UniqueMolecule]],\n reactions: Union[Sequence[RetroReaction], Sequence[FixedRetroReaction]],\n edges: Sequence[Tuple[Any, Any]],\n frame_colors: Sequence[PilColor],\n reaction_shapes: Sequence[str] = None,\n use_splines: bool = True,\n) -> PilImage:\n\n def _create_image(use_splines):\n txt = template.render(\n molecules=mol_spec,\n reactions=rxn_spec,\n edges=edges,\n use_splines=use_splines,\n )\n _, input_name = tempfile.mkstemp(suffix=\".dot\")\n with open(input_name, \"w\") as this_fileobj:\n this_fileobj.write(txt)\n\n _, output_img2 = tempfile.mkstemp(suffix=\".png\")\n ext = \".bat\" if sys.platform.startswith(\"win\") else \"\"\n subprocess.call([f\"dot{ext}\", \"-T\", \"png\", f\"-o{output_img2}\", input_name])\n if not os.path.exists(output_img2) or os.path.getsize(output_img2) == 0:\n raise FileNotFoundError(\n \"Could not produce graph with layout - check that 'dot' command is in path\"\n )\n return output_img2\n\n mol_spec = save_molecule_images(molecules, frame_colors)\n reaction_shapes = reaction_shapes or [\"circle\"] * len(reactions)\n rxn_spec = zip(reactions, reaction_shapes)\n\n template_filepath = os.path.join(data_path(), \"templates\", \"reaction_tree.dot\")\n with open(template_filepath, \"r\") as fileobj:\n template = Template(fileobj.read())\n template.globals[\"id\"] = id # type: ignore\n\n if not use_splines:\n output_img = _create_image(use_splines=False)\n return Image.open(output_img)\n\n try:\n output_img = _create_image(use_splines=True)\n except FileNotFoundError:\n output_img = _create_image(use_splines=False)\n\n return Image.open(output_img)", "def generateGraph(mids, chaptersField, labelsField):\n output = \"digraph G { \\n\"\n # On ne traite que les chapitres qui ont actives le graphe\n chapts = chapters.graphChapters()\n # le dico nodes contient une liste pour chaque chapitre. Chaque liste\n # contient tous les neuds (un par note) presents dans ce chapitre, et\n # representes par des tuples (noteId, label)\n nodes = {}\n for mid in mids:\n chapterField = chaptersField[mid]\n labelField = labelsField[mid]\n for id, flds in mw.col.db.execute(\"\"\"\n SELECT id, flds FROM notes WHERE mid=%d\n \"\"\" % mid):\n fields = splitFields(flds)\n chapter = fields[chapterField]\n if not chapter in chapts:\n continue\n label = fields[labelField]\n if(not chapter in nodes):\n nodes[chapter] = []\n nodes[chapter].append((id, label))\n # On genere les noeuds, dans des clusters (un par chapitre)\n notes = []\n for chap in nodes:\n output += \"\"\"subgraph cluster_%d {\n node [style=filled];\n label = \"%s\";\n color=blue;\n \"\"\" % (chapts[chap], chap)\n for n in nodes[chap]:\n output += \"\"\"n%d [label=\"%s\", URL=\"%d\"];\\n\"\"\" % (n[0], n[1], n[0])\n notes.append(n)\n output += \"\"\"\n }\\n\"\"\"\n # Puis on ajoute tous les liens ..\n for n in notes:\n for nid in mw.col.db.execute(\"\"\"SELECT N.noteId FROM `PATH.links` AS L\n JOIN `PATH.match` AS M ON M.id = L.matchId\n JOIN `PATH.nodes` AS N ON M.nodeId = N.id\n WHERE L.noteId = %d\"\"\" % (n[0])):\n output += \"\"\"n%d -> n%d;\\n\"\"\" % (nid[0], n[0])\n output += \"}\"\n generateGraphImage(output)", "def fig5b():\n # fmt: off\n tpm = np.array([\n [1, 0, 0],\n [1, 1, 1],\n [1, 0, 1],\n [1, 1, 1],\n [1, 1, 0],\n [1, 1, 1],\n [1, 1, 1],\n [1, 1, 1],\n ])\n cm = np.array([\n [0, 1, 1],\n [0, 0, 1],\n [0, 1, 0],\n ])\n # fmt: on\n return Network(tpm, cm=cm, node_labels=LABELS[:tpm.shape[1]])", "def graphing2():\n return render_template('graph2.html')", "def show(data, types=(\"inflated\", ), recache=False, cmap='RdBu_r', layout=None,\n autoclose=None, open_browser=None, port=None, pickerfun=None,\n template=\"mixer.html\", overlays_available=None,\n overlays_visible=('rois', 'sulci'), labels_visible=('rois', ),\n overlay_file=None, title='Brain', **kwargs):\n\n # populate default webshow args\n if autoclose is None:\n autoclose = options.config.get('webshow', 'autoclose', fallback='true') == 'true'\n if open_browser is None:\n open_browser = options.config.get('webshow', 'open_browser', fallback='true') == 'true'\n\n data = dataset.normalize(data)\n if not isinstance(data, dataset.Dataset):\n data = dataset.Dataset(data=data)\n\n html = FallbackLoader([os.path.split(os.path.abspath(template))[0], serve.cwd]).load(template)\n db.auxfile = data\n\n #Extract the list of stimuli, for special-casing\n stims = dict()\n for name, view in data:\n if 'stim' in view.attrs and os.path.exists(view.attrs['stim']):\n sname = os.path.split(view.attrs['stim'])[1]\n stims[sname] = view.attrs['stim']\n\n package = Package(data)\n metadata = json.dumps(package.metadata())\n images = package.images\n subjects = list(package.subjects)\n\n ctmargs = dict(method='mg2', level=9, recache=recache,\n external_svg=overlay_file, overlays_available=overlays_available)\n ctms = dict((subj, utils.get_ctmpack(subj, types, **ctmargs))\n for subj in subjects)\n package.reorder(ctms)\n\n subjectjs = json.dumps(dict((subj, \"ctm/%s/\"%subj) for subj in subjects))\n db.auxfile = None\n\n if layout is None:\n layout = [None, (1, 1), (2, 1), (3, 1), (2, 2), (3, 2), (3, 2), (3, 3), (3, 3), (3, 3)][len(subjects)]\n\n linear = lambda x, y, m: (1.-m)*x + m*y\n mixes = dict(\n linear=linear,\n smoothstep=(lambda x, y, m: linear(x, y, 3*m**2 - 2*m**3)),\n smootherstep=(lambda x, y, m: linear(x, y, 6*m**5 - 15*m**4 + 10*m**3))\n )\n\n post_name = Queue()\n\n # Put together all view options\n my_viewopts = dict(options.config.items('webgl_viewopts'))\n my_viewopts['overlays_visible'] = overlays_visible\n my_viewopts['labels_visible'] = labels_visible\n my_viewopts['brightness'] = options.config.get('curvature', 'brightness')\n my_viewopts['smoothness'] = options.config.get('curvature', 'webgl_smooth')\n my_viewopts['contrast'] = options.config.get('curvature', 'contrast')\n\n for sec in options.config.sections():\n if 'paths' in sec or 'labels' in sec:\n my_viewopts[sec] = dict(options.config.items(sec))\n\n if pickerfun is None:\n pickerfun = lambda a, b: None\n\n class CTMHandler(web.RequestHandler):\n def get(self, path):\n subj, path = path.split('/')\n if path == '':\n self.set_header(\"Content-Type\", \"application/json\")\n self.write(open(ctms[subj]).read())\n else:\n fpath = os.path.split(ctms[subj])[0]\n mtype = mimetypes.guess_type(os.path.join(fpath, path))[0]\n if mtype is None:\n mtype = \"application/octet-stream\"\n self.set_header(\"Content-Type\", mtype)\n self.write(open(os.path.join(fpath, path), 'rb').read())\n\n class DataHandler(web.RequestHandler):\n def get(self, path):\n path = path.strip(\"/\")\n try:\n dataname, frame = path.split('/')\n except ValueError:\n dataname = path\n frame = 0\n\n if dataname in images:\n dataimg = images[dataname][int(frame)]\n if dataimg[1:6] == \"NUMPY\":\n self.set_header(\"Content-Type\", \"application/octet-stream\")\n else:\n self.set_header(\"Content-Type\", \"image/png\")\n\n if 'Range' in self.request.headers:\n self.set_status(206)\n rangestr = self.request.headers['Range'].split('=')[1]\n start, end = [ int(i) if len(i) > 0 else None for i in rangestr.split('-') ]\n\n clenheader = 'bytes %s-%s/%s' % (start, end or len(dataimg), len(dataimg) )\n self.set_header('Content-Range', clenheader)\n self.set_header('Content-Length', end-start+1)\n self.write(dataimg[start:end+1])\n else:\n self.write(dataimg)\n else:\n self.set_status(404)\n self.write_error(404)\n\n class StimHandler(web.StaticFileHandler):\n def initialize(self):\n pass\n\n def get(self, path):\n if path not in stims:\n self.set_status(404)\n self.write_error(404)\n else:\n self.root, fname = os.path.split(stims[path])\n super(StimHandler, self).get(fname)\n\n class StaticHandler(web.StaticFileHandler):\n def initialize(self):\n self.root = ''\n\n class MixerHandler(web.RequestHandler):\n def get(self):\n self.set_header(\"Content-Type\", \"text/html\")\n generated = html.generate(data=metadata,\n colormaps=colormaps,\n default_cmap=cmap,\n python_interface=True,\n leapmotion=True,\n layout=layout,\n subjects=subjectjs,\n viewopts=json.dumps(my_viewopts),\n title=title,\n **kwargs)\n #overlays_visible=json.dumps(overlays_visible),\n #labels_visible=json.dumps(labels_visible),\n #**viewopts)\n self.write(generated)\n\n def post(self):\n data = self.get_argument(\"svg\", default=None)\n png = self.get_argument(\"png\", default=None)\n with open(post_name.get(), \"wb\") as svgfile:\n if png is not None:\n data = png[22:].strip()\n try:\n data = binascii.a2b_base64(data)\n except:\n print(\"Error writing image!\")\n data = png\n svgfile.write(data)\n\n class JSMixer(serve.JSProxy):\n @property\n def view_props(self):\n \"\"\"An enumerated list of settable properties for views. \n There may be a way to get this from the javascript object, \n but I (ML) don't know how.\n\n There may be additional properties we want to set in views\n and animations; those must be added here.\n\n Old property list that used to be settable before webgl refactor:\n view_props = ['altitude', 'azimuth', 'target', 'mix', 'radius', 'pivot',\n 'visL', 'visR', 'alpha', 'rotationR', 'rotationL', 'projection',\n 'volume_vis', 'frame', 'slices']\n \"\"\"\n camera = getattr(self.ui, \"camera\")\n _camera_props = ['camera.%s' % k for k in camera._controls.attrs.keys()]\n surface = getattr(self.ui, \"surface\")\n _subject = list(surface._folders.attrs.keys())[0]\n _surface = getattr(surface, _subject)\n _surface_props = ['surface.{subject}.%s'%k for k in _surface._controls.attrs.keys()]\n _curvature_props = ['surface.{subject}.curvature.brightness',\n 'surface.{subject}.curvature.contrast',\n 'surface.{subject}.curvature.smoothness']\n return _camera_props + _surface_props + _curvature_props\n\n def _set_view(self, **kwargs):\n \"\"\"Low-level command: sets view parameters in the current viewer\n\n Sets each the state of each keyword argument provided. View parameters\n that can be set include all parameters in the data.gui in the html view.\n\n \"\"\"\n # Set unfolding level first, as it interacts with other arguments\n surface = getattr(self.ui, \"surface\")\n subject_list = surface._folders.attrs.keys()\n # Better to only self.view_props once; it interacts with javascript, \n # don't want to do that too often, it leads to glitches.\n vw_props = copy.copy(self.view_props)\n for subject in subject_list:\n if 'surface.{subject}.unfold' in kwargs:\n unfold = kwargs.pop('surface.{subject}.unfold')\n self.ui.set('surface.{subject}.unfold'.format(subject=subject), unfold)\n for k, v in kwargs.items():\n if not k in vw_props:\n print('Unknown parameter %s!'%k)\n continue\n else:\n self.ui.set(k.format(subject=subject) if '{subject}' in k else k, v)\n # Wait for webgl. Wait for it. .... WAAAAAIIIT.\n time.sleep(0.03)\n\n def _capture_view(self, frame_time=None):\n \"\"\"Low-level command: returns a dict of current view parameters\n\n Retrieves the following view parameters from current viewer:\n\n altitude, azimuth, target, mix, radius, visL, visR, alpha,\n rotationR, rotationL, projection, pivot\n\n Parameters\n ----------\n frame_time : scalar\n time (in seconds) to specify for this frame.\n \n Notes\n -----\n If multiple subjects are present, only retrieves view for first subject.\n \"\"\"\n view = {}\n subject = list(self.ui.surface._folders.attrs.keys())[0]\n for p in self.view_props:\n try:\n view[p] = self.ui.get(p.format(subject=subject) if '{subject}' in p else p)[0]\n # Wait for webgl.\n time.sleep(0.03)\n except Exception as err:\n # TO DO: Fix this hack with an error class in serve.py & catch it here\n print(err) #msg = \"Cannot read property 'undefined'\"\n #if err.message[:len(msg)] != msg:\n # raise err\n if frame_time is not None:\n view['time'] = frame_time\n return view\n\n def save_view(self, subject, name, is_overwrite=False):\n \"\"\"Saves current view parameters to pycortex database\n\n Parameters\n ----------\n subject : string\n pycortex subject id\n name : string\n name for view to store\n is_overwrite: bool\n whether to overwrite an extant view (default : False)\n\n Notes\n -----\n Equivalent to call to cortex.db.save_view(subject, vw, name)\n For a list of the view parameters saved, see viewer._capture_view\n \"\"\"\n db.save_view(self, subject, name, is_overwrite)\n\n def get_view(self, subject, name):\n \"\"\"Get saved view from pycortex database.\n\n Retrieves named view from pycortex database and sets current\n viewer parameters to retrieved values.\n\n Parameters\n ----------\n subject : string\n pycortex subject ID\n name : string\n name of saved view to re-load\n\n Notes\n -----\n Equivalent to call to cortex.db.get_view(subject, vw, name)\n For a list of the view parameters set, see viewer._capture_view\n \"\"\"\n view = db.get_view(self, subject, name)\n\n def addData(self, **kwargs):\n Proxy = serve.JSProxy(self.send, \"window.viewers.addData\")\n new_meta, new_ims = _convert_dataset(Dataset(**kwargs), path='/data/', fmt='%s_%d.png')\n metadata.update(new_meta)\n images.update(new_ims)\n return Proxy(metadata)\n\n def getImage(self, filename, size=(1920, 1080)):\n \"\"\"Saves currently displayed view to a .png image file\n\n Parameters\n ----------\n filename : string\n duh.\n size : tuple (x, y)\n size (in pixels) of image to save.\n \"\"\"\n post_name.put(filename)\n Proxy = serve.JSProxy(self.send, \"window.viewer.getImage\")\n return Proxy(size[0], size[1], \"mixer.html\")\n\n def makeMovie(self, animation, filename=\"brainmovie%07d.png\", offset=0,\n fps=30, size=(1920, 1080), interpolation=\"linear\"):\n \"\"\"Renders movie frames for animation of mesh movement\n\n Makes an animation (for example, a transition between inflated and\n flattened brain or a rotating brain) of a cortical surface. Takes a\n list of dictionaries (`animation`) as input, and uses the values in\n the dictionaries as keyframes for the animation.\n\n Mesh display parameters that can be animated include 'elevation',\n 'azimuth', 'mix', 'radius', 'target' (more?)\n\n\n Parameters\n ----------\n animation : list of dicts\n Each dict should have keys `idx`, `state`, and `value`.\n `idx` is the time (in seconds) at which you want to set `state` to `value`\n `state` is the parameter to animate (e.g. 'altitude', 'azimuth')\n `value` is the value to set for `state`\n filename : string path name\n Must contain '%d' (or some variant thereof) to account for frame\n number, e.g. '/some/directory/brainmovie%07d.png'\n offset : int\n Frame number for first frame rendered. Useful for concatenating\n animations.\n fps : int\n Frame rate of resultant movie\n size : tuple (x, y)\n Size (in pixels) of resulting movie\n interpolation : {\"linear\", \"smoothstep\", \"smootherstep\"}\n Interpolation method for values between keyframes.\n\n Example\n -------\n # Called after a call of the form: js_handle = cortex.webgl.show(DataViewObject)\n # Start with left hemisphere view\n js_handle._setView(azimuth=[90], altitude=[90.5], mix=[0])\n # Initialize list\n animation = []\n # Append 5 key frames for a simple rotation\n for az, idx in zip([90, 180, 270, 360, 450], [0, .5, 1.0, 1.5, 2.0]):\n animation.append({'state':'azimuth', 'idx':idx, 'value':[az]})\n # Animate! (use default settings)\n js_handle.makeMovie(animation)\n \"\"\"\n # build up two variables: State and Anim.\n # state is a dict of all values being modified at any time\n state = dict()\n # anim is a list of transitions between keyframes\n anim = []\n setfunc = self.ui.set\n for f in sorted(animation, key=lambda x:x['idx']):\n if f['idx'] == 0:\n setfunc(f['state'], f['value'])\n state[f['state']] = dict(idx=f['idx'], val=f['value'])\n else:\n if f['state'] not in state:\n state[f['state']] = dict(idx=0, val=self.getState(f['state'])[0])\n start = dict(idx=state[f['state']]['idx'],\n state=f['state'],\n value=state[f['state']]['val'])\n end = dict(idx=f['idx'], state=f['state'], value=f['value'])\n state[f['state']]['idx'] = f['idx']\n state[f['state']]['val'] = f['value']\n if start['value'] != end['value']:\n anim.append((start, end))\n\n for i, sec in enumerate(np.arange(0, anim[-1][1]['idx']+1./fps, 1./fps)):\n for start, end in anim:\n if start['idx'] < sec <= end['idx']:\n idx = (sec - start['idx']) / float(end['idx'] - start['idx'])\n if start['state'] == 'frame':\n func = mixes['linear']\n else:\n func = mixes[interpolation]\n\n val = func(np.array(start['value']), np.array(end['value']), idx)\n if isinstance(val, np.ndarray):\n setfunc(start['state'], val.ravel().tolist())\n else:\n setfunc(start['state'], val)\n self.getImage(filename%(i+offset), size=size)\n\n def _get_anim_seq(self, keyframes, fps=30, interpolation='linear'):\n \"\"\"Convert a list of keyframes to a list of EVERY frame in an animation.\n\n Utility function called by make_movie; separated out so that individual\n frames of an animation can be re-rendered, or for more control over the\n animation process in general.\n\n \"\"\"\n # Misc. setup\n fr = 0\n a = np.array\n func = mixes[interpolation]\n #skip_props = ['surface.{subject}.right', 'surface.{subject}.left', ] #'projection',\n # Get keyframes\n keyframes = sorted(keyframes, key=lambda x:x['time'])\n # Normalize all time to frame rate\n fs = 1./fps\n for k in range(len(keyframes)):\n t = keyframes[k]['time']\n t = np.round(t/fs)*fs\n keyframes[k]['time'] = t\n allframes = []\n for start, end in zip(keyframes[:-1], keyframes[1:]):\n t0 = start['time']\n t1 = end['time']\n tdif = float(t1-t0)\n # Check whether to continue frame sequence to endpoint\n use_endpoint = keyframes[-1]==end\n nvalues = np.round(tdif/fs).astype(int)\n if use_endpoint:\n nvalues += 1\n fr_time = np.linspace(0, 1, nvalues, endpoint=use_endpoint)\n # Interpolate between values\n for t in fr_time:\n frame = {}\n for prop in start.keys():\n if prop=='time':\n continue\n if (start[prop] is None) or (start[prop] == end[prop]) or isinstance(start[prop], (bool, str)):\n frame[prop] = start[prop]\n continue\n val = func(a(start[prop]), a(end[prop]), t)\n if isinstance(val, np.ndarray):\n frame[prop] = val.tolist()\n else:\n frame[prop] = val\n allframes.append(frame)\n return allframes\n\n def make_movie_views(self, animation, filename=\"brainmovie%07d.png\", \n offset=0, fps=30, size=(1920, 1080), alpha=1, frame_sleep=0.05,\n frame_start=0, interpolation=\"linear\"):\n \"\"\"Renders movie frames for animation of mesh movement\n\n Makes an animation (for example, a transition between inflated and\n flattened brain or a rotating brain) of a cortical surface. Takes a\n list of dictionaries (`animation`) as input, and uses the values in\n the dictionaries as keyframes for the animation.\n\n Mesh display parameters that can be animated include 'elevation',\n 'azimuth', 'mix', 'radius', 'target' (more?)\n\n\n Parameters\n ----------\n animation : list of dicts\n This is a list of keyframes for the animation. Each keyframe should be\n a dict in the form captured by the ._capture_view method. NOTE: every\n view must include all view parameters. Additionally, there should be\n one extra key/value pair for \"time\". The value for time should be\n in seconds. The list of keyframes is sorted by time before applying,\n so they need not be in order in the input.\n filename : string path name\n Must contain '%d' (or some variant thereof) to account for frame\n number, e.g. '/some/directory/brainmovie%07d.png'\n offset : int\n Frame number for first frame rendered. Useful for concatenating\n animations.\n fps : int\n Frame rate of resultant movie\n size : tuple (x, y)\n Size (in pixels) of resulting movie\n interpolation : {\"linear\", \"smoothstep\", \"smootherstep\"}\n Interpolation method for values between keyframes.\n\n Notes\n -----\n Make sure that all values that will be modified over the course\n of the animation are initialized (have some starting value) in the first\n frame.\n\n Example\n -------\n # Called after a call of the form: js_handle = cortex.webgl.show(DataViewObject)\n # Start with left hemisphere view\n js_handle._setView(azimuth=[90], altitude=[90.5], mix=[0])\n # Initialize list\n animation = []\n # Append 5 key frames for a simple rotation\n for az, t in zip([90, 180, 270, 360, 450], [0, .5, 1.0, 1.5, 2.0]):\n animation.append({'time':t, 'azimuth':[az]})\n # Animate! (use default settings)\n js_handle.make_movie(animation)\n \"\"\"\n allframes = self._get_anim_seq(animation, fps, interpolation)\n for fr, frame in enumerate(allframes[frame_start:], frame_start):\n self._set_view(**frame)\n time.sleep(frame_sleep)\n self.getImage(filename%(fr+offset+1), size=size)\n time.sleep(frame_sleep)\n\n class PickerHandler(web.RequestHandler):\n def get(self):\n pickerfun(int(self.get_argument(\"voxel\")), int(self.get_argument(\"vertex\")))\n\n class WebApp(serve.WebApp):\n disconnect_on_close = autoclose\n def get_client(self):\n self.connect.wait()\n self.connect.clear()\n return JSMixer(self.send, \"window.viewer\")\n\n def get_local_client(self):\n return JSMixer(self.srvsend, \"window.viewer\")\n\n if port is None:\n port = random.randint(1024, 65536)\n\n server = WebApp([(r'/ctm/(.*)', CTMHandler),\n (r'/data/(.*)', DataHandler),\n (r'/stim/(.*)', StimHandler),\n (r'/mixer.html', MixerHandler),\n (r'/picker', PickerHandler),\n (r'/', MixerHandler),\n (r'/static/(.*)', StaticHandler)],\n port)\n\n server.start()\n print(\"Started server on port %d\"%server.port)\n url = \"http://%s%s:%d/mixer.html\"%(serve.hostname, domain_name, server.port)\n if open_browser:\n webbrowser.open(url)\n client = server.get_client()\n client.server = server\n return client\n else:\n try:\n from IPython.display import display, HTML\n display(HTML('Open viewer: <a href=\"{0}\" target=\"_blank\">{0}</a>'.format(url)))\n except:\n pass\n return server", "def graph():\n port_to_csv()\n\n source = ''\n if request.form.get('GraphType', '') == '':\n source = url_for('static', filename='frog no graph.png')\n else:\n source = s_modular(request.form.get('GraphType', ''), '')\n\n return render_template(\n 'tmpGraph.jade',\n title=\"Graph\",\n year=datetime.now().year,\n src=source\n )", "def make_static(outpath, data, types=(\"inflated\",), recache=False, cmap=\"RdBu_r\",\n template=\"static.html\", layout=None, anonymize=False, overlays_available=None,\n html_embed=True, overlays_visible=('rois', 'sulci'), labels_visible=('rois', ),\n overlay_file=None, copy_ctmfiles=True, title='Brain', **kwargs):\n\n outpath = os.path.abspath(os.path.expanduser(outpath)) # To handle ~ expansion\n if not os.path.exists(outpath):\n os.makedirs(outpath)\n if not os.path.exists(os.path.join(outpath, 'data')):\n # Don't lump together w/ outpath, because of edge cases\n # for which outpath exists but not sub-folder `data`\n os.makedirs(os.path.join(outpath, \"data\"))\n\n data = dataset.normalize(data)\n if not isinstance(data, dataset.Dataset):\n data = dataset.Dataset(data=data)\n\n db.auxfile = data\n\n package = Package(data)\n subjects = list(package.subjects)\n\n ctmargs = dict(method='mg2', level=9, recache=recache, external_svg=overlay_file,\n overlays_available=overlays_available)\n ctms = dict((subj, utils.get_ctmpack(subj, types, **ctmargs))\n for subj in subjects)\n package.reorder(ctms)\n\n db.auxfile = None\n if layout is None:\n layout = [None, (1, 1), (2, 1), (3, 1), (2, 2), (3, 2), (3, 2), (3, 3), (3, 3), (3, 3)][len(subjects)]\n\n ## Rename files to anonymize\n submap = dict()\n for i, (subj, ctmfile) in enumerate(ctms.items()):\n oldpath, fname = os.path.split(ctmfile)\n fname, ext = os.path.splitext(fname)\n if anonymize:\n newfname = \"S%d\"%i\n submap[subj] = newfname\n else:\n newfname = fname\n ctms[subj] = newfname+\".json\"\n\n for ext in ['json', 'ctm', 'svg']:\n srcfile = os.path.join(oldpath, \"%s.%s\"%(fname, ext))\n newfile = os.path.join(outpath, \"%s.%s\"%(newfname, ext))\n if os.path.exists(newfile):\n os.unlink(newfile)\n\n if os.path.exists(srcfile) and copy_ctmfiles:\n shutil.copy2(srcfile, newfile)\n\n if ext == \"json\" and anonymize:\n ## change filenames in json\n nfh = open(newfile)\n jsoncontents = nfh.read()\n nfh.close()\n\n ofh = open(newfile, \"w\")\n ofh.write(jsoncontents.replace(fname, newfname))\n ofh.close()\n if anonymize:\n old_subjects = sorted(list(ctms.keys()))\n ctms = dict(('S%d'%i, ctms[k]) for i, k in enumerate(old_subjects))\n if len(submap) == 0:\n submap = None\n\n #Process the data\n metadata = package.metadata(fmt=\"data/{name}_{frame}.png\", submap=submap)\n images = package.images\n #Write out the PNGs\n for name, imgs in images.items():\n impath = os.path.join(outpath, \"data\", \"{name}_{frame}.png\")\n for i, img in enumerate(imgs):\n with open(impath.format(name=name, frame=i), \"wb\") as binfile:\n binfile.write(img)\n\n #Copy any stimulus files\n stimpath = os.path.join(outpath, \"stim\")\n for name, view in data:\n if 'stim' in view.attrs and os.path.exists(view.attrs['stim']):\n if not os.path.exists(stimpath):\n os.makedirs(stimpath)\n shutil.copy2(view.attrs['stim'], stimpath)\n\n #Parse the html file and paste all the js and css files directly into the html\n from . import htmlembed\n if os.path.exists(template):\n ## Load locally\n templatedir, templatefile = os.path.split(os.path.abspath(template))\n rootdirs = [templatedir, serve.cwd]\n else:\n ## Load system templates\n templatefile = template\n rootdirs = [serve.cwd]\n loader = FallbackLoader(rootdirs)\n tpl = loader.load(templatefile)\n\n # Put together all view options\n my_viewopts = dict(options.config.items('webgl_viewopts'))\n my_viewopts['overlays_visible'] = overlays_visible\n my_viewopts['labels_visible'] = labels_visible\n my_viewopts['brightness'] = options.config.get('curvature', 'brightness')\n my_viewopts['smoothness'] = options.config.get('curvature', 'webgl_smooth')\n my_viewopts['contrast'] = options.config.get('curvature', 'contrast')\n\n for sec in options.config.sections():\n if 'paths' in sec or 'labels' in sec:\n my_viewopts[sec] = dict(options.config.items(sec))\n\n html = tpl.generate(data=json.dumps(metadata),\n colormaps=colormaps,\n default_cmap=cmap,\n python_interface=False,\n leapmotion=True,\n layout=layout,\n subjects=json.dumps(ctms),\n viewopts=json.dumps(my_viewopts),\n title=title,\n **kwargs)\n desthtml = os.path.join(outpath, \"index.html\")\n if html_embed:\n htmlembed.embed(html, desthtml, rootdirs)\n else:\n with open(desthtml, \"w\") as htmlfile:\n htmlfile.write(html)", "def create_network_graph(df_graph_tree):\n net = Network(height='750px', width='100%', directed=True, bgcolor='#222222', font_color='white')\n net.force_atlas_2based(gravity=-75)\n for index, row in df_graph_tree.iterrows():\n src = row['Source']\n dst = row['Target']\n label = row['Label']\n title = \"File fullname : {} <br> Type : {}\".format(row['Source'], row['File Type'])\n color = color_of_extension[row['File Type'].lower()] if row['File Type'].lower() in color_of_extension.keys() else 'grey'\n if row['File Type'] == 'folder':\n net.add_node(src, shape='text', label=label, color = color, title = title)\n else:\n net.add_node(src, shape='dot', label=label, color = color, title = title)\n if dst != '':\n #net.add_node(dst, label=label, title=title)\n net.add_edge(src, dst, value=1, color = '#6c6c6c')\n return net", "def graph_with_images(image_path: str):\n\n def createNode(graph, image_path, ID):\n graph.node(ID, image=f'{image_path}')\n\n def makeTable(title, image_path):\n return f\"\"\"<TABLE BORDER=\"2\" CELLPADDING=\"0\" CELLSPACING=\"0\">\n <TR>\n <TD CELLPADDING=\"10\" CELLSPACING=\"0\" ALIGN=\"TEXT\"><FONT face=\"Helvetica\" point-size=\"30\">{title}<BR ALIGN=\"LEFT\"/></FONT></TD>\n </TR>\n <TR>\n <TD><IMG SRC=\"{image_path}\"/></TD>\n </TR>\n </TABLE>\"\"\"\n\n def createFancyNode(graph, title, image_path, ID):\n graph.node(ID, label=f'<{makeTable(title, image_path)}>')\n\n graph = Digraph(\"graph_with_images\", format='svg', node_attr={\n 'label': \"\", 'width': \"8\", 'height': \"8\", \n 'fixedsize': \"true\", 'imagescale': \"true\", 'penwidth': \"0\"\n })\n graph.graph_attr['rankdir'] = 'LR'\n\n createNode(graph, image_path, \"A\")\n createNode(graph, image_path, \"B\")\n createNode(graph, image_path, \"C\")\n createNode(graph, image_path, \"D\")\n createNode(graph, image_path, \"E\")\n createFancyNode(graph, 'Dummy Tittle', image_path, \"F\")\n\n graph.edges(['AB', 'BC', 'AD', 'DE', 'CF', 'EF'])\n\n graph.render('grah.gv', view=True)", "def make_flow_diagram_png(self, png):\n\n try:\n from graphviz import Digraph\n except:\n print(\"Cannot make flow-chart as graphviz was not loaded\")\n return\n\n styles = {\n 'graph': {\n 'fontsize': '14',\n 'fontname': 'Helvetica',\n 'pad': '0.2',\n },\n 'nodes': {\n 'fontname': 'Helvetica',\n 'shape': 'box',\n 'style': 'filled, rounded',\n 'fillcolor': '#DDEEFF',\n 'color': '#AABBDD'\n },\n 'edges': {\n 'style': 'dotted',\n 'arrowhead': 'open',\n 'fontname': 'Helvetica',\n 'fontsize': '8',\n }\n }\n\n def apply_styles(graph, styles):\n graph.graph_attr.update(\n ('graph' in styles and styles['graph']) or {})\n graph.node_attr.update(\n ('nodes' in styles and styles['nodes']) or {})\n graph.edge_attr.update(\n ('edges' in styles and styles['edges']) or {})\n return graph\n\n def num_str(f):\n\n return '%.3g' % f\n\n self.graph = Digraph(format='png')\n for label in self.labels:\n self.graph.node(label)\n if len(self.infection_death_rate_flows) > 0:\n self.graph.node('infection_death')\n for from_label, to_label, var_label in self.var_transfer_rate_flows:\n self.graph.edge(from_label, to_label, label=var_label)\n for from_label, to_label, rate in self.fixed_transfer_rate_flows:\n self.graph.edge(from_label, to_label, label=num_str(rate))\n if len(self.infection_death_rate_flows) > 0:\n for label, rate in self.infection_death_rate_flows:\n self.graph.edge(label, 'infection_death', label=num_str(rate))\n base, ext = os.path.splitext(png)\n if ext.lower() != '.png':\n base = png\n\n self.graph = apply_styles(self.graph, styles)\n\n try:\n self.graph.render(base)\n except:\n print(\n \"Error running graphviz: probably not installed on your system\"\n )", "def graphing1():\n return render_template('graph1.html')", "def transitions_flow_diagram(self):\n\n # check if dot present in path\n result = subprocess.getoutput(\"dot -V\")\n if \"graphviz\" not in result:\n QMessageBox.critical(self, programName, (\"The GraphViz package is not installed.<br>\"\n \"The <b>dot</b> program was not found in the path.<br><br>\"\n \"\"\"Go to <a href=\"http://www.graphviz.org\">\"\"\"\n \"\"\"http://www.graphviz.org</a> for information\"\"\"))\n return\n\n fn = QFileDialog(self).getOpenFileNames(self, \"Select one or more transitions matrix files\", \"\",\n \"Transitions matrix files (*.txt *.tsv);;All files (*)\")\n fileNames = fn[0] if type(fn) is tuple else fn\n\n out = \"\"\n for fileName in fileNames:\n with open(fileName, \"r\") as infile:\n try:\n gv = transitions.create_transitions_gv_from_matrix(infile.read(),\n cutoff_all=0,\n cutoff_behavior=0,\n edge_label=\"percent_node\")\n\n with open(tempfile.gettempdir() + os.sep + os.path.basename(fileName) + \".tmp.gv\", \"w\") as f:\n f.write(gv)\n result = subprocess.getoutput(\"\"\"dot -Tpng -o \"{0}.png\" \"{1}\" \"\"\".format(fileName,\n tempfile.gettempdir() +\n os.sep + os.path.basename(\n fileName) +\n \".tmp.gv\"))\n if not result:\n out += \"<b>{}</b> created<br>\".format(fileName + \".png\")\n else:\n out += \"Problem with <b>{}</b><br>\".format(fileName)\n except:\n QMessageBox.information(self, programName,\n \"Error during flow diagram creation.\\n{}\".format(str(sys.exc_info()[0])))\n\n if out:\n QMessageBox.information(self, programName, out)", "def generate(self, diagram):", "def plot_network(path, saveas=None, **kwargs):\n if saveas is None:\n saveas = \"_srcnetwork.html\"\n fn = FileNetwork(path, **kwargs)\n nt = Network(\"1500px\", \"1500px\")\n nt.toggle_physics(True)\n nt.from_nx(fn.network)\n nt.set_options(get_pyvis_options())\n nt.show(f\"{saveas}\")\n return", "def create_visual_graph(self):\n if self.predict_new and self.prediction_without_covid_case:\n self.predict_co2_emission_future()\n self.save_prediction_df()\n else:\n self.restore_prediction_df()\n if not self.analysis_plot:\n self.predict_co2_emission_future()\n self.save_prediction_df()\n\n self.do_plot()\n self.output_graph_file = OUTPUT_GRAPH_PATH\n return self.output_graph_file", "def to_html(self, result_dir):\n png_path = self.png_path(result_dir)\n data_table = self.html_data_table()\n return \"XXX figure html\"", "def get_graphs(self):\n\n try:\n from keras.utils import plot_model\n from keras.utils.vis_utils import model_to_dot\n\n # from IPython.display import SVG\n\n plot_model(self.model, to_file=\"model.png\")\n plot_model(\n self.latent_to_states_model, to_file=\"latent_to_states_model.png\"\n )\n plot_model(self.batch_model, to_file=\"batch_model.png\")\n if self.mol_to_latent_model is not None:\n plot_model(self.mol_to_latent_model, to_file=\"mol_to_latent_model.png\")\n\n print(\"Models exported to png files.\")\n\n except:\n print(\"Check pydot and graphviz installation.\")", "def show_graph(self, output_fmt='pdf', direction = 'BT'):\n from PsyNeuLink.Components.Mechanisms.ProcessingMechanisms.ObjectiveMechanism import ObjectiveMechanism\n from PsyNeuLink.Components.Mechanisms.AdaptiveMechanisms.LearningMechanisms.LearningMechanism import LearningMechanism\n \n import graphviz as gv\n\n system_graph = self.graph\n learning_graph=self.learningGraph\n \n # build graph and configure visualisation settings\n G = gv.Digraph(engine = \"dot\", \n node_attr = {'fontsize':'12', 'fontname': 'arial', 'shape':'oval'}, \n edge_attr = {'arrowhead':'halfopen', 'fontsize': '10', 'fontname': 'arial'},\n graph_attr = {\"rankdir\" : direction} )\n \n # work with system graph\n rcvrs = list(system_graph.keys())\n # loop through receivers\n for rcvr in rcvrs:\n if isinstance(rcvr[0], ObjectiveMechanism) or isinstance(rcvr[0], LearningMechanism):\n continue\n rcvr_name = rcvr[0].name\n rcvr_shape = rcvr[0].variable.shape[1]\n rcvr_label = \" {} ({}) \".format(rcvr_name, rcvr_shape)\n \n # loop through senders\n sndrs = system_graph[rcvr]\n for sndr in sndrs:\n sndr_name = sndr[0].name\n sndr_shape = sndr[0].variable.shape[1]\n sndr_label = \" {} ({}) \".format(sndr_name, sndr_shape)\n \n # find edge name\n projs = sndr[0].outputState.sendsToProjections\n for proj in projs:\n if proj.receiver.owner == rcvr[0]:\n edge_name = proj.name\n edge_shape = proj.matrix.shape\n edge_label = \" {} {} \".format(edge_name, edge_shape)\n G.edge(sndr_label, rcvr_label, label = edge_label)\n \n if output_fmt == 'pdf':\n G.view(self.name.replace(\" \", \"-\"), cleanup=True)\n elif output_fmt == 'jupyter':\n return G", "def render(self): # pragma: no cover\n from graphviz import Digraph\n dot = Digraph(name=\"top\")\n for block in self.blocks:\n if isinstance(block, Branch):\n label = \"if \" + astor.to_source(block.cond)\n dot.node(str(id(block)), label.rstrip(), {\"shape\": \"invhouse\"})\n elif isinstance(block, Yield):\n label = astor.to_source(block.value)\n # label += \"\\nLive Ins : \" + str(block.live_ins)\n # label += \"\\nLive Outs : \" + str(block.live_outs)\n # label += \"\\nGen : \" + str(block.gen)\n # label += \"\\nKill : \" + str(block.kill)\n dot.node(str(id(block)), label.rstrip(), {\"shape\": \"oval\"})\n elif isinstance(block, BasicBlock):\n label = \"\\n\".join(astor.to_source(stmt).rstrip() for stmt in block.statements)\n # label += \"\\nLive Ins : \" + str(block.live_ins)\n # label += \"\\nLive Outs : \" + str(block.live_outs)\n # label += \"\\nGen : \" + str(block.gen)\n # label += \"\\nKill : \" + str(block.kill)\n dot.node(str(id(block)), label.rstrip(), {\"shape\": \"box\"})\n elif isinstance(block, HeadBlock):\n label = \"Initial\"\n dot.node(str(id(block)) + \"_start\", label.rstrip(), {\"shape\": \"doublecircle\"})\n label = \"\\n\".join(astor.to_source(stmt).rstrip() for stmt in block.initial_statements)\n # label += \"\\nLive Ins : \" + str(block.live_ins)\n # label += \"\\nLive Outs : \" + str(block.live_outs)\n # label += \"\\nGen : \" + str(block.gen)\n # label += \"\\nKill : \" + str(block.kill)\n dot.node(str(id(block)), label.rstrip(), {\"shape\": \"box\"})\n dot.edge(str(id(block)) + \"_start\", str(id(block)))\n else:\n raise NotImplementedError(type(block))\n # for source, sink, label in self.edges:\n for sink, label in block.outgoing_edges:\n dot.edge(str(id(block)), str(id(sink)), label)\n\n\n file_name = tempfile.mktemp(\"gv\")\n dot.render(file_name, view=True)\n # with open(\"cfg.dot\", \"w\") as file:\n # file.write(dot.source)\n # exit()", "def main():\n data_visualisation()\n write_hyper_params()\n write_result_tables()\n write_box_plots()", "def output_div(self, output_method):\n instance = self.instance\n G = myGraph(instance.view_num)\n for i in range(instance.view_num):\n view = instance.tables[instance.views[i].table_pos].views[instance.views[i].view_pos]\n G.addNode(view)\n G.getSim()\n result = G.getTopK(instance.view_num)\n order = 1\n export_list = []\n if output_method == 'list':\n for item in result:\n export_list.append(G.nodes[item].output(order))\n order += 1\n return export_list\n elif output_method == 'print':\n for item in result:\n pprint (G.nodes[item].output(order))\n order += 1\n return\n elif output_method == 'single_json' or output_method == 'multiple_jsons':\n path2 = os.getcwd() + '/json/'\n if not os.path.exists(path2):\n os.mkdir(path2)\n if output_method == 'single_json':\n f = open(path2 + self.table_name + '.json','w')\n for item in result:\n f.write(G.nodes[item].output(order) + '\\n')\n order += 1\n f.close() # Notice that f.close() is out of the loop to create only one file\n else: #if output_method == 'multiple_jsons'\n for item in result:\n f = open(path2 + self.table_name + str(order)+'.json','w')\n f.write(G.nodes[item].output(order))\n order += 1\n f.close() # Notice that f.close() is in the loop to create multiple files\n return\n elif output_method == 'single_html' or output_method == 'multiple_htmls':\n path2 = os.getcwd() + '/html/'\n if not os.path.exists(path2):\n os.mkdir(path2)\n page = Page()\n if output_method == 'single_html':\n self.page = Page()\n for item in result:\n view = G.nodes[item]\n self.html_output(order, view, 'single')\n order += 1\n self.page.render('./html/' + self.table_name + '_all' + '.html')\n else: # if output_method == 'multiple_htmls'\n path3 = os.getcwd() + '/html/' + self.table_name\n if not os.path.exists(path3):\n os.mkdir(path3)\n for item in result:\n view = G.nodes[item]\n self.html_output(order, view, 'multiple')\n order += 1\n return", "def visualize(model: Model, structural_part=True, measurement_part=False,\n view=True, filename=None, title=''):\n g = gv.Digraph(format='jpg', graph_attr={'label': title})\n if structural_part:\n g.node_attr.update(color='red', shape='box')\n for i, j in model.parameters['Beta']:\n lval, rval = model.beta_names[0][i], model.beta_names[0][j]\n g.edge(rval, lval)\n if measurement_part:\n g.node_attr.update(color='black', shape='circle')\n for i, j in model.parameters['Lambda']:\n lval, rval = model.lambda_names[0][i], model.lambda_names[0][j]\n g.edge(lval, rval)\n g.render(filename, view=view)", "def graphs():\n return render_template(\"graphs.html\")", "def generateHtml(self):\n # only the master processor needs to do this\n if not self.master: return\n\n for page in self.layout.pages:\n \n # build the metric dictionary\n metrics = {}\n page.models = []\n for fname in glob.glob(os.path.join(self.output_path,\"*.nc\")):\n with Dataset(fname) as dataset:\n mname = dataset.getncattr(\"name\")\n if mname != \"Benchmark\": page.models.append(mname)\n if not dataset.groups.has_key(page.name): continue\n group = dataset.groups[page.name]\n\n # if the dataset opens, we need to add the model (table row)\n metrics[mname] = {}\n \n # each model will need to have all regions\n for region in self.regions: metrics[mname][region] = {}\n \n # columns in the table will be in the scalars group\n if not group.groups.has_key(\"scalars\"): continue\n \n # we add scalars to the model/region based on the region\n # name being in the variable name. If no region is found,\n # we assume it is the global region.\n grp = group.groups[\"scalars\"]\n for vname in grp.variables.keys():\n found = False\n for region in self.regions:\n if region in vname: \n found = True\n var = grp.variables[vname]\n name = vname.replace(region,\"\")\n metrics[mname][region][name] = Variable(name = name,\n unit = var.units,\n data = var[...])\n if not found:\n var = grp.variables[vname]\n metrics[mname][\"global\"][vname] = Variable(name = vname,\n unit = var.units,\n data = var[...])\n page.setMetrics(metrics)\n \n # write the HTML page\n f = file(os.path.join(self.output_path,\"%s.html\" % (self.name)),\"w\")\n f.write(str(self.layout))\n f.close()", "def print_graph(dag, image_path, graph_path):\n for node in dag.nodes():\n dag.node[node]['label'] = node.label\n nx.write_graphml(dag, graph_path)\n pos = nx.random_layout(dag)\n nx.draw_networkx(dag, ax=None, width=3, pos=pos)\n p.savefig(image_path)", "def graphviz_prettify(self, network):\n graph_settings = {\n 'rankdir': 'LR',\n 'dpi': 60,\n }\n network.graph.update(graph_settings)\n\n for n in network.nodes():\n if isinstance(n, Variable):\n network.nodes[n]['label'] = n.name\n elif isinstance(n, Equation):\n network.nodes[n]['shape'] = 'diamond'", "def network_view(request, simulation):\n # If the network is large, the display method is different.\n links = get_query('link', simulation)\n large_network = links.count() > NETWORK_THRESHOLD\n # File where the data for the network are stored.\n output_file = (\n '{0}/website_files/network_output/network_{1!s}.json'\n .format(settings.BASE_DIR, simulation.id)\n )\n if simulation.has_changed or not os.path.isfile(output_file):\n # Generate a new output file.\n output = network_output(simulation, large_network)\n with open(output_file, 'w') as f:\n json.dump(output, f)\n # Do not generate a new output file the next time (unless the\n # simulation changes).\n simulation.has_changed = False\n simulation.save()\n else:\n # Use data from the existing output file.\n with open(output_file, 'r') as f:\n output = json.load(f)\n context = {\n 'simulation': simulation,\n 'output': output,\n 'large_network': large_network,\n }\n return render(request, 'metro_app/network.html', context)", "def task_render():\n target = 'analysis.html'\n dep = 'analysis.ipynb'\n return {\n 'file_dep': [dep],\n 'targets': [target],\n 'actions': [\n f\"jupyter nbconvert --execute --to html {dep}\"\n ],\n 'clean': True\n }" ]
[ "0.65", "0.6355544", "0.62730944", "0.5997142", "0.58431", "0.58331263", "0.58309436", "0.57839453", "0.57279736", "0.57146037", "0.5705368", "0.5702328", "0.5696547", "0.56960404", "0.5689114", "0.5658812", "0.56489384", "0.5632636", "0.5624643", "0.5612612", "0.5604968", "0.5574917", "0.554443", "0.5531554", "0.55250835", "0.5514382", "0.5513073", "0.55059284", "0.5504308", "0.54933965" ]
0.69609904
1
Get policy state actions if they exist
def get_policy_actions(policy_lookup, state, player): if (state, player) not in policy_lookup: policy_lookup[(state, player)] = dict() return policy_lookup[(state, player)]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_available_actions(self, state):\n pass", "def getActions(self, state): \n util.raiseNotDefined()", "def getLegalActions(self,state):\n return self.actionFn(state)", "def getLegalActions(self, state):\n return self.actionFn(state)", "def step(self, state):\n mcts_action = self.mcts_search(state)\n policy = [(action, (1.0 if action == mcts_action else 0.0))\n for action in state.legal_actions(self.player_id())]\n\n return policy, mcts_action", "def _get_actions(self):\n return self.__actions", "def _get_actions(self):\n return self.__actions", "def _get_actions(self):\n return self.__actions", "def getAction(self, state):\n # Pick Action\n legalActions = self.getLegalActions(state)\n if len(legalActions) == 0:\n return None\n elif util.flipCoin(self.epsilon):\n return random.choice(legalActions)\n else:\n return self.getPolicy(state)", "def get_actions(self, state: TState = None) -> Sequence[TAction]:\n pass", "def actions(self) -> list:\n if self.debug: print(f\"AState.actions()\")\n if not self._examined:\n if self.debug: print(f\"\\tExamining...\")\n self._actions = self._generate_actions()\n self._examined = True\n return self._actions", "def getStateActionFeatures(self,state,action):\n return [state, self.actions[action]]", "def get_available_actions(self):\n return self.actions", "def getAction(self, state):\n # Pick Action\n legalActions = self.getLegalActions(state)\n action = None\n \"*** YOUR CODE HERE ***\"\n # util.raiseNotDefined()\n if random.random() < self.epsilon:\n action = random.choice(legalActions)\n else:\n action = self.getPolicy(state)\n return action", "def getAction(self, state):\n # Pick Action\n legalActions = self.getLegalActions(state)\n action = None\n \"*** YOUR CODE HERE ***\"\n length_legalActions = len(legalActions) #Find length of allowed actions\n\n if length_legalActions == 0: #To check if no legal action is possible, that is incase of terminal state\n action = None #set action as none and return from here\n return action\n\n epsilon = self.epsilon #to get the epsilon value\n\n if util.flipCoin(epsilon): #util.flipcoin returns binary variable with probability p of success by using util.flipCoin(p), which returns True with probability p and False with probability 1-p.\n action = random.choice(legalActions) #Choosing randomly from list of allowed actions\n return action\n\n action = self.getPolicy(state) #Without probability epsilon we should take best policy action. getPolicy function calls the computeActionFromQValues function which gives us the best action to take in a state\n\n #util.raiseNotDefined()\n\n return action", "def get_actions(self):\n return []", "def get_list_of_actions(self):\n return self.actions", "def action(self, gstate, actions):\n self.log.debug(\"Picking among actions %s\" % actions)\n return actions[0]", "def get_action(self, state):\n time.sleep(2.0)\n return random.choice(state.get_legal_actions(self.index))", "def getAc(self, state):\n\n # Pick Action\n flip = util.flipCoin(self.epsilon)\n\n if flip:\n\t\t\treturn random.choice(self.actions)\n\n return self.getPolicy(state)", "def _state_actions(self) -> dict:\n return {}", "def actions(self, agent_state):\n raise NotImplementedError(\"Don't know what actions are available\")", "def getPolicy(self, state):\n \"*** YOUR CODE HERE ***\"\n # OUR CODE HERE\n possibleActions = self.mdp.getPossibleActions(state)\n #checking for terminal state (no possible actions)\n if len(possibleActions) is 0: \n return None\n \n #attempt at using the Counter\n eValsActions = util.Counter()\n for action in possibleActions:\n for transitionState, probability in self.mdp.getTransitionStatesAndProbs(state, action):\n eValsActions[action] += probability * (self.mdp.getReward( state, action, transitionState) + self.discount * self.values[transitionState])\n \n return eValsActions.argMax()\n \n #fail attempt using lists :(\n \"\"\"\n #list to hold the expected value of the actions\n eValsActions = []\n #iterate through all actions and their transtion states\n for action in possibleActions:\n for transitionState, probability in self.mdp.getTransitionStatesAndProbs(state, action):\n #expected value of reward with discount * the value of the transitions\n eValsActions[action] += probability * (self.mdp.getReward( state, action, transitionState) + self.discount * self.values[transitionState])\n \n #now iterate through and find the action with the best value\n #(that will be the best action)\n maxVal = -float(\"inf\")\n bestAction = None\n for action in possibleActions:\n if eValsActions[action] > maxVal:\n maxVal = eValsAction[action]\n bestAction = action\n \"\"\"\n return action\n # END OUR CODE", "def get_state_actions_mapping(self):\n return None", "def getAction(self, state):\n # Pick Action\n legalActions = self.getLegalActions(state)\n action = None\n\n \"\"\"Description:\n Use util.flipCoin, if return true then randomly choice from legalAction\n if flase, then sue getPolicy to get best policy action\n \"\"\"\n \"\"\" YOUR CODE HERE \"\"\"\n if len(legalActions) == 0:\n return action # None\n \n if util.flipCoin(self.epsilon):\n ''' exploration function (not work well)''' \n# posPol = util.Counter()\n# for a in legalActions:\n# if self.getQValue(state,a) >= 0:\n# posPol[a] = -1*self.getQValue(state, a) + (1000/(self.vitCount[(state,a)]+0.0001))\n# #print \"posPol[\", a, \"]= \",posPol[a]\n# #posPol[a] = (self.getQValue(state, a) * self.epsilon** self.vitCount[(state,a)]) + ( self.epsilon/(self.vitCount[(state,a)]+0.1) )\n# if len(posPol) == 0:\n# action = random.choice(legalActions)\n# else:\n# action = posPol.argMax() # random.choice(posPol.keys())\n ''' Random exploration '''\n action = random.choice(legalActions)\n else:\n action = self.getPolicy(state)\n \n \"\"\" END CODE \"\"\"\n\n return action", "def actions(self, state):\n myActionList= (1,2);\n return myActionList", "def get_actions(self):\n return self.agent.get_actions()", "def get_actions(self):\n\n if self.description == exceptions.NotAvailableError:\n raise exceptions.NotAvailableError('Can\\'t get actions because a description for this service is'\n ' not available.')\n return list(self.actions.values())", "def get_possible_actions(self, state):\n return [LEFT, DOWN, RIGHT, UP]", "def get_possible_actions(self, state):\n return tuple(self._transition_probs.get(state, {}).keys())" ]
[ "0.7593261", "0.7569062", "0.7208601", "0.71995384", "0.7015666", "0.69273573", "0.69273573", "0.69273573", "0.6888104", "0.68572384", "0.6758826", "0.67579144", "0.67430145", "0.67199033", "0.6602976", "0.6591265", "0.65893406", "0.65571266", "0.65557694", "0.65148705", "0.6508952", "0.64988494", "0.64890355", "0.647564", "0.64743775", "0.64733607", "0.64676046", "0.64476883", "0.6416945", "0.6402229" ]
0.7607066
0
Get policy action value if it exists
def get_policy_value(policy_lookup, state, player, action): if action not in get_policy_actions(policy_lookup, state, player): policy_lookup[(state, player)][action] = 0 return policy_lookup[(state, player)][action]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getAction(self, state):\n # Pick Action\n legalActions = self.getLegalActions(state)\n if len(legalActions) == 0:\n return None\n elif util.flipCoin(self.epsilon):\n return random.choice(legalActions)\n else:\n return self.getPolicy(state)", "def get_action(self, obs):\n obs = torch.FloatTensor(obs).to(self.device)\n value_int, value_ext = self.ppo.critic(obs)\n action, policy = self.ppo.explore(obs)\n return action, policy, value_ext.data.cpu().numpy(), value_int.data.cpu().numpy()", "def _get_action(self):\n return self.__action", "def action(self) -> Optional[str]:\n return pulumi.get(self, \"action\")", "def action(self) -> Optional[str]:\n return pulumi.get(self, \"action\")", "def action(self) -> Optional[str]:\n return pulumi.get(self, \"action\")", "def action(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"action\")", "def getAction(self, state):\n # Pick Action\n legalActions = self.getLegalActions(state)\n action = None\n \"*** YOUR CODE HERE ***\"\n # util.raiseNotDefined()\n if random.random() < self.epsilon:\n action = random.choice(legalActions)\n else:\n action = self.getPolicy(state)\n return action", "def _get_action_from_dict(self, action_dict, **kwargs):\n return action_dict.get(self.channel, self.null_value)", "def get_state_action_value(self, state, action):\n raise NotImplementedError", "def action_value(self, state, action):\n return self.value_function[to_table_index(state, action)]", "def getAction(self, state):\n # Pick Action\n legalActions = self.getLegalActions(state)\n action = None\n \"*** YOUR CODE HERE ***\"\n length_legalActions = len(legalActions) #Find length of allowed actions\n\n if length_legalActions == 0: #To check if no legal action is possible, that is incase of terminal state\n action = None #set action as none and return from here\n return action\n\n epsilon = self.epsilon #to get the epsilon value\n\n if util.flipCoin(epsilon): #util.flipcoin returns binary variable with probability p of success by using util.flipCoin(p), which returns True with probability p and False with probability 1-p.\n action = random.choice(legalActions) #Choosing randomly from list of allowed actions\n return action\n\n action = self.getPolicy(state) #Without probability epsilon we should take best policy action. getPolicy function calls the computeActionFromQValues function which gives us the best action to take in a state\n\n #util.raiseNotDefined()\n\n return action", "def obtain_action(self):\r\n\t\treturn", "def get_action(action_name):\n action = justrok.Globals.action_collection.action(action_name)\n if action is None:\n justrok.logger.error('action %r not found', action_name)\n return lambda: None\n else:\n return action.trigger", "def get_action(self):\n return self.__action", "def getPolicy(self, state):\n \"*** YOUR CODE HERE ***\"\n # OUR CODE HERE\n possibleActions = self.mdp.getPossibleActions(state)\n #checking for terminal state (no possible actions)\n if len(possibleActions) is 0: \n return None\n \n #attempt at using the Counter\n eValsActions = util.Counter()\n for action in possibleActions:\n for transitionState, probability in self.mdp.getTransitionStatesAndProbs(state, action):\n eValsActions[action] += probability * (self.mdp.getReward( state, action, transitionState) + self.discount * self.values[transitionState])\n \n return eValsActions.argMax()\n \n #fail attempt using lists :(\n \"\"\"\n #list to hold the expected value of the actions\n eValsActions = []\n #iterate through all actions and their transtion states\n for action in possibleActions:\n for transitionState, probability in self.mdp.getTransitionStatesAndProbs(state, action):\n #expected value of reward with discount * the value of the transitions\n eValsActions[action] += probability * (self.mdp.getReward( state, action, transitionState) + self.discount * self.values[transitionState])\n \n #now iterate through and find the action with the best value\n #(that will be the best action)\n maxVal = -float(\"inf\")\n bestAction = None\n for action in possibleActions:\n if eValsActions[action] > maxVal:\n maxVal = eValsAction[action]\n bestAction = action\n \"\"\"\n return action\n # END OUR CODE", "def getAction(trainer):\n\tpolicy = trainer.get_policy()\n\tstate = randState()\n\tprint(\"state:\")\n\tprint(state)\n\taction = policy.compute_single_action(state)\n\treturn action", "def getAction(self, state):\n # Pick Action\n legalActions = self.getLegalActions(state)\n action = None\n\n \"\"\"Description:\n Use util.flipCoin, if return true then randomly choice from legalAction\n if flase, then sue getPolicy to get best policy action\n \"\"\"\n \"\"\" YOUR CODE HERE \"\"\"\n if len(legalActions) == 0:\n return action # None\n \n if util.flipCoin(self.epsilon):\n ''' exploration function (not work well)''' \n# posPol = util.Counter()\n# for a in legalActions:\n# if self.getQValue(state,a) >= 0:\n# posPol[a] = -1*self.getQValue(state, a) + (1000/(self.vitCount[(state,a)]+0.0001))\n# #print \"posPol[\", a, \"]= \",posPol[a]\n# #posPol[a] = (self.getQValue(state, a) * self.epsilon** self.vitCount[(state,a)]) + ( self.epsilon/(self.vitCount[(state,a)]+0.1) )\n# if len(posPol) == 0:\n# action = random.choice(legalActions)\n# else:\n# action = posPol.argMax() # random.choice(posPol.keys())\n ''' Random exploration '''\n action = random.choice(legalActions)\n else:\n action = self.getPolicy(state)\n \n \"\"\" END CODE \"\"\"\n\n return action", "def policy(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"policy\")", "def policy(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"policy\")", "def policy(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"policy\")", "def policy_to_action(self, policy, obs):\n\n if self.env_info['policy_type'] == 'binary':\n # A policy defines an action for every state vector\n # An action is chosen according to a simple hyperplane\n if np.dot(policy[:obs.size], obs) + policy[-1] > 0:\n ind = 0\n else:\n ind = 1\n return self.env_info['action_space'][ind]\n\n elif self.env_info['policy_type'] == 'grid':\n # A policy defines an action for every point in the grid\n # obs (observation) is a position index in the grid\n return policy[obs]\n\n elif self.env_info['policy_type'] == 'function':\n # A policy is a function that maps states to actions\n # Every state gets assigned an action (e.g. through softmax)\n raise NotImplementedError('This state type is not yet supported.')", "def get_policy(self):\n return self.agent.get_policy()", "def get(self, action: Action) -> Qval:\n return NotImplemented", "def getQValue(self, state, action):\n return self.q.get((tuple(state), action), 0)", "def getQValue(self, state, action):\n return self.qValues[(state, action)]", "def getAction(self, state):\n # Pick Action\n legalActions = self.getLegalActions(state)\n action = None\n\n if len(legalActions) == 0:\n return None\n useRandomAction = util.flipCoin(self.epsilon)\n if useRandomAction:\n action = random.choice(legalActions)\n else:\n action = self.computeActionFromQValues(state)\n\n return action", "def rule_action(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"rule_action\")", "def get_action(self):\n return self.current_action", "def probe_action(self) -> Optional[pulumi.Input[Union['ExecActionArgs', 'HTTPGetActionArgs', 'TCPSocketActionArgs']]]:\n return pulumi.get(self, \"probe_action\")" ]
[ "0.6932814", "0.6800669", "0.6750365", "0.6698205", "0.6698205", "0.6698205", "0.66241604", "0.6580595", "0.6480911", "0.64533645", "0.64310986", "0.64271003", "0.6360185", "0.6328164", "0.6317932", "0.63010204", "0.6281348", "0.6266339", "0.625199", "0.625199", "0.625199", "0.62506354", "0.623897", "0.6238025", "0.6209715", "0.6182059", "0.6169178", "0.61649746", "0.6157247", "0.61299914" ]
0.74600756
0
Return minimum policy action value for a state
def min_value(policy_lookup, state, player): action_values = list(get_policy_actions(policy_lookup, state, player).values()) if action_values: return np.min(action_values) return 0
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getPolicy(self, state):\n \"*** YOUR CODE HERE ***\"\n possibleActions = self.mdp.getPossibleActions(state)\n if len(possibleActions) == 0: return None\n results = []\n for action in possibleActions:\n total = 0\n for (nextState, prob) in self.mdp.getTransitionStatesAndProbs(state,action):\n total += (prob * self.values[nextState])\n results.append(total)\n maxIndex = max(enumerate(results), key=lambda x: x[1])[0]\n #print(\"here\")\n return possibleActions[maxIndex]", "def getPolicy(self, state):\n \"*** YOUR CODE HERE ***\"\n # OUR CODE HERE\n possibleActions = self.mdp.getPossibleActions(state)\n #checking for terminal state (no possible actions)\n if len(possibleActions) is 0: \n return None\n \n #attempt at using the Counter\n eValsActions = util.Counter()\n for action in possibleActions:\n for transitionState, probability in self.mdp.getTransitionStatesAndProbs(state, action):\n eValsActions[action] += probability * (self.mdp.getReward( state, action, transitionState) + self.discount * self.values[transitionState])\n \n return eValsActions.argMax()\n \n #fail attempt using lists :(\n \"\"\"\n #list to hold the expected value of the actions\n eValsActions = []\n #iterate through all actions and their transtion states\n for action in possibleActions:\n for transitionState, probability in self.mdp.getTransitionStatesAndProbs(state, action):\n #expected value of reward with discount * the value of the transitions\n eValsActions[action] += probability * (self.mdp.getReward( state, action, transitionState) + self.discount * self.values[transitionState])\n \n #now iterate through and find the action with the best value\n #(that will be the best action)\n maxVal = -float(\"inf\")\n bestAction = None\n for action in possibleActions:\n if eValsActions[action] > maxVal:\n maxVal = eValsAction[action]\n bestAction = action\n \"\"\"\n return action\n # END OUR CODE", "def get_value(self, state):\n epsilon = self.epsilon\n possible_actions = self.get_legal_actions(state)\n\n #If there are no legal actions, return 0.0\n if len(possible_actions) == 0:\n return 0.0\n\n optimal_action = possible_actions[\n np.argmax([self.get_qvalue(state, action) for action in possible_actions])\n ]\n state_value = 0\n for action in possible_actions:\n if action == optimal_action:\n state_value += (1 - epsilon) * self.get_qvalue(state, action)\n state_value += (epsilon / len(possible_actions)) * self.get_qvalue(state, action)\n return state_value", "def _best_action(self, state):\n actions_rewards = list(self.Q[state].items())\n return max(actions_rewards, key=lambda x: x[1])[0]", "def policy(self, state):\n maskState = self.discretizeState(state)\n\n qValues = self._q[maskState]\n qAction0 = qValues[0]\n qAction1 = qValues[1]\n\n if qAction0 == qAction1:\n return random.randint(0, 1)\n if qAction0 > qAction1:\n return 0\n return 1", "def computeActionFromValues(self, state):\n \"*** YOUR CODE HERE ***\"\n maxvalue = -100000000\n bestaction = None\n for action in self.mdp.getPossibleActions(state):\n valueforthisaction = self.getQValue(state, action) # is this right? \n if valueforthisaction > maxvalue:\n bestaction = action\n maxvalue = valueforthisaction\n return bestaction", "def bestAction(self, state):\n action = self.q_network.chooseBestAction(state)\n V = max(self.q_network.qValues(state))\n return action, V", "def computeActionFromQValues(self, state):\n \"*** YOUR CODE HERE ***\"\n if not self.getLegalActions(state): return None\n\n best_action = None;\n best_value = float('-inf')\n for action in self.getLegalActions(state):\n if self.getQValue(state, action) > best_value:\n best_value = self.getQValue(state, action)\n best_action = action\n return best_action", "def state_min(self) -> float:\n raise NotImplementedError", "def getBestAction(self, state):\n best_action = 0\n max_Q = -9999\n\n for action in self.getLegalActions(state):\n Q = self.getQValue(state, action)\n if Q > max_Q:\n best_action = action\n max_Q = Q\n \n return best_action", "def get_highest_value_action(self, state):\n a = self.sess.run(self.network.maxOutputNode, feed_dict={self.network.inputs: [state]})\n return a[0]", "def get_value(self, state):\n possible_actions = self.get_legal_actions(state)\n\n # If there are no legal actions, return 0.0\n if len(possible_actions) == 0:\n return 0.0\n\n #\n # INSERT CODE HERE to get maximum possible value for a given state\n #\n\n max_value = self.get_qvalue(state, possible_actions[0])\n for action in possible_actions[1:]:\n qvalue = self.get_qvalue(state, action)\n if qvalue > max_value:\n max_value = qvalue\n\n return max_value", "def __call__(self, state, action):\n s = float(state.__hash__()) # pybrain secretly casts state to float when we do rl\n a = int(action)\n qvalues = self.module.getActionValues(s)\n maxq = max(qvalues)\n if qvalues[a] == maxq:\n n_max = sum([1 if q == maxq else 0 for q in qvalues])\n return 1.0 / n_max\n return 0", "def getAction(self, gameState):\n result = float(\"-inf\")\n action = 1\n for agentState in gameState.getLegalActions(0):\n valorminimax = self.miniMaxDecision(1, 0, gameState.generateSuccessor(0, agentState))\n if valorminimax > result:\n result = valorminimax\n action = agentState\n return action", "def min_value(gameState):\n if terminal_test(gameState): return 1\n return min( max_value(gameState.forecast_move(move)) for move in gameState.get_legal_moves() )", "def best_action(self, state):\n return random.choice(self.possible_actions)", "def get_action(self, state):\n state = torch.from_numpy(state).float().to(self.training_device)\n action_dist, _ = self.old_policy.forward(state)\n action = action_dist.sample()\n\n return action", "def get_action(self, state):\n\n best_action = None\n best_value = -np.inf\n actions = [0, 1, 2, 3] # left, down, right, up\n for a in actions:\n row = state // self.edge\n col = state % self.edge\n # print (row, col)\n if a == 0:\n col = max(col-1, 0)\n elif a == 1:\n row = min(row+1, self.edge-1)\n elif a == 2:\n col = min(col+1, self.edge-1)\n elif a == 3:\n row = max(row-1, 0)\n # print (row, col)\n\n new_state = row * self.edge + col\n # print (new_state)\n if (self.values[new_state] > best_value or new_state == self.num_states-1): #goal\n best_value = 1.0 if new_state == self.num_states-1 else self.values[new_state]\n best_action = a\n return best_action", "def getPolicy(self, state):\n \"\"\"Description:\n Find all of q-values of current state, and choose the action \n with the hight q-value as optimal policy\n \"\"\"\n \"\"\" YOUR CODE HERE \"\"\"\n legalActions = self.getLegalActions(state)\n action = None\n policy = util.Counter() # use counter to store action and its q-value\n \n if len(legalActions) == 0:\n return action\n \n for a in legalActions:\n policy[a] = self.getQValue(state, a)\n action = policy.argMax()\n return action\n\n \"\"\" END CODE \"\"\"", "def choose_action(self, state):\n if random.random() < self.epsilon:\n self.epsilon -= self.epsilon_annealing_rate\n return random.choice(self.valid_actions)\n \n #initialize search variables\n opt_action = self.valid_actions[0]\n opt_value = 0\n\n #performs a search across all valid actions for highest q-value.\n for action in self.valid_actions:\n cur_value = self.q_value(state, action)\n if cur_value > opt_value:\n opt_action = action\n opt_value = cur_value\n elif cur_value == opt_value:\n opt_action = random.choice([opt_action, action])\n return opt_action", "def computeActionFromValues(self, state):\n \"*** YOUR CODE HERE ***\"\n actions = self.mdp.getPossibleActions(state)\n optimalAction = None\n maxValue = float('-inf')\n for a in actions:\n qValue = self.computeQValueFromValues(state, a)\n if qValue > maxValue:\n maxValue = qValue\n optimalAction = a\n return optimalAction", "def computeActionFromQValues(self, state):\n \"*** YOUR CODE HERE ***\"\n actions = self.getLegalActions(state)\n if len(actions) == 0:\n return None\n values = [self.getQValue(state, action) for action in actions]\n LoT = zip(values, actions)\n (bestValue, bestAction) = max(LoT)\n return bestAction", "def get_state_value(self, state):\n values = self.get_all_state_action_values(state)\n policy = self.target_policy(values)\n return (values @ policy.probs.t()).item()", "def __call__(self, state):\n if random.random() > self._epsilon:\n return self._max_policy(state)\n return random.choice(np.arange(self._action_size))", "def best_Q_action(self, state):\n state_Q = {}\n\n for action in self.actions:\n if (state, action) not in self.Q:\n return False\n else:\n state_Q[(state, action)] = self.Q[(state, action)]\n\n return max(state_Q.iteritems(), key=operator.itemgetter(1))[0][1]", "def get_optimal_action(self, state):\n # check if there are multiple equivalent optimal actions\n if sum(self.Q_values[state] == np.amax(self.Q_values[state])) > 1:\n # select one of the optimal actions randomly\n idxs = np.where(self.Q_values[state] == np.amax(self.Q_values[state]))[0]\n return idxs[np.random.randint(0, idxs.size)]\n else:\n # return the unique optimal action\n return np.argmax(self.Q_values[state])", "def get_action(self,state):\n \n q_values = self.__network.predict(state[None])[0]\n \n ###YOUR CODE\n if np.random.rand()<self.epsilon:\n return np.random.choice(self.n_actions)\n return np.argmax(q_values)", "def computeActionFromQValues(self, state):\n \"*** YOUR CODE HERE ***\"\n\n legal_actions = self.getLegalActions(state)\n\n if len(legal_actions) == 0:\n return None\n\n max_value = self.computeValueFromQValues(state)\n\n actions = [action for action in legal_actions if self.values[(str(state), action)] == max_value]\n\n return random.choice(actions)", "def computeActionFromQValues(self, state):\n \"*** YOUR CODE HERE ***\"\n\n max_qvalue = None\n for action in self.legalActions:\n qvalue = self.getQValue(state, action)\n if max_qvalue is None or max_qvalue < qvalue:\n max_qvalue = qvalue\n\n if max_qvalue is None:\n return None\n\n actions = []\n for action in self.legalActions:\n qvalue = self.getQValue(state, action)\n if qvalue == max_qvalue:\n actions.append(action)\n\n if max_qvalue is not None and len(actions) == 0:\n return self.legalActions[0]\n if len(actions) > 1:\n return Const.DO_NOTHING\n return random.choice(actions)", "def computeActionFromQValues(self, state):\n \"*** YOUR CODE HERE ***\"\n\n max_qvalue = None\n for action in self.legalActions:\n qvalue = self.getQValue(state, action)\n if max_qvalue is None or max_qvalue < qvalue:\n max_qvalue = qvalue\n\n if max_qvalue is None:\n return None\n\n actions = []\n for action in self.legalActions:\n qvalue = self.getQValue(state, action)\n if qvalue == max_qvalue:\n actions.append(action)\n\n if max_qvalue is not None and len(actions) == 0:\n return self.legalActions[0]\n if len(actions) > 1:\n return Const.DO_NOTHING\n return random.choice(actions)" ]
[ "0.7423119", "0.741661", "0.737838", "0.72380596", "0.72140557", "0.7211273", "0.71713763", "0.71495", "0.7147739", "0.7118717", "0.70819044", "0.7045204", "0.70225585", "0.7010276", "0.7000682", "0.6985005", "0.69514984", "0.69384867", "0.69344324", "0.68877256", "0.6883074", "0.6866663", "0.68474764", "0.6840173", "0.6833897", "0.6818375", "0.68119854", "0.68075097", "0.6803928", "0.6803928" ]
0.8106444
0
Return maximum policy action value for a state
def max_value(policy_lookup, state, player): action_values = list(get_policy_actions(policy_lookup, state, player).values()) if action_values: return np.max(action_values) return 0
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_highest_value_action(self, state):\n a = self.sess.run(self.network.maxOutputNode, feed_dict={self.network.inputs: [state]})\n return a[0]", "def get_value(self, state):\n possible_actions = self.get_legal_actions(state)\n\n # If there are no legal actions, return 0.0\n if len(possible_actions) == 0:\n return 0.0\n\n #\n # INSERT CODE HERE to get maximum possible value for a given state\n #\n\n max_value = self.get_qvalue(state, possible_actions[0])\n for action in possible_actions[1:]:\n qvalue = self.get_qvalue(state, action)\n if qvalue > max_value:\n max_value = qvalue\n\n return max_value", "def getPolicy(self, state):\n \"*** YOUR CODE HERE ***\"\n # OUR CODE HERE\n possibleActions = self.mdp.getPossibleActions(state)\n #checking for terminal state (no possible actions)\n if len(possibleActions) is 0: \n return None\n \n #attempt at using the Counter\n eValsActions = util.Counter()\n for action in possibleActions:\n for transitionState, probability in self.mdp.getTransitionStatesAndProbs(state, action):\n eValsActions[action] += probability * (self.mdp.getReward( state, action, transitionState) + self.discount * self.values[transitionState])\n \n return eValsActions.argMax()\n \n #fail attempt using lists :(\n \"\"\"\n #list to hold the expected value of the actions\n eValsActions = []\n #iterate through all actions and their transtion states\n for action in possibleActions:\n for transitionState, probability in self.mdp.getTransitionStatesAndProbs(state, action):\n #expected value of reward with discount * the value of the transitions\n eValsActions[action] += probability * (self.mdp.getReward( state, action, transitionState) + self.discount * self.values[transitionState])\n \n #now iterate through and find the action with the best value\n #(that will be the best action)\n maxVal = -float(\"inf\")\n bestAction = None\n for action in possibleActions:\n if eValsActions[action] > maxVal:\n maxVal = eValsAction[action]\n bestAction = action\n \"\"\"\n return action\n # END OUR CODE", "def _best_action(self, state):\n actions_rewards = list(self.Q[state].items())\n return max(actions_rewards, key=lambda x: x[1])[0]", "def getPolicy(self, state):\n \"*** YOUR CODE HERE ***\"\n possibleActions = self.mdp.getPossibleActions(state)\n if len(possibleActions) == 0: return None\n results = []\n for action in possibleActions:\n total = 0\n for (nextState, prob) in self.mdp.getTransitionStatesAndProbs(state,action):\n total += (prob * self.values[nextState])\n results.append(total)\n maxIndex = max(enumerate(results), key=lambda x: x[1])[0]\n #print(\"here\")\n return possibleActions[maxIndex]", "def computeActionFromValues(self, state):\n \"*** YOUR CODE HERE ***\"\n maxvalue = -100000000\n bestaction = None\n for action in self.mdp.getPossibleActions(state):\n valueforthisaction = self.getQValue(state, action) # is this right? \n if valueforthisaction > maxvalue:\n bestaction = action\n maxvalue = valueforthisaction\n return bestaction", "def computeActionFromValues(self, state):\n \n State_actions = self.mdp.getPossibleActions(state)\n max_Action=util.Counter()\n for k in State_actions:\n max_Action[k] = self.getQValue(state,k)\n return max_Action.argMax()\n \n util.raiseNotDefined()", "def bestAction(self, state):\n action = self.q_network.chooseBestAction(state)\n V = max(self.q_network.qValues(state))\n return action, V", "def max_q_value(self, state):\n max_value = None\n for action in self.valid_actions:\n cur_value = self.q_value(state, action)\n if max_value is None or cur_value > max_value:\n max_value = cur_value\n return max_value", "def max_value (self, new_state):\n \n ##create a list to save reward information\n return_list = []\n \n ##get each values from Q based on the new_state and its possible actions\n for s, a in self.Q.keys():\n if s == new_state:\n return_list.append(self.Q[s,a])\n \n ##return the maximum value based on new_state\n return max(return_list)", "def getBestAction(self, state):\n best_action = 0\n max_Q = -9999\n\n for action in self.getLegalActions(state):\n Q = self.getQValue(state, action)\n if Q > max_Q:\n best_action = action\n max_Q = Q\n \n return best_action", "def chooseAction(self, gameState):\n probabilities = self.assignProbablities(gameState)\n #print probabilities\n prob, bestProbabilityAction = max(probabilities)\n return bestProbabilityAction", "def select_action(self, state):\n return np.argmax(self.Q[state])", "def getPolicy(self, state):\n \"\"\"Description:\n Find all of q-values of current state, and choose the action \n with the hight q-value as optimal policy\n \"\"\"\n \"\"\" YOUR CODE HERE \"\"\"\n legalActions = self.getLegalActions(state)\n action = None\n policy = util.Counter() # use counter to store action and its q-value\n \n if len(legalActions) == 0:\n return action\n \n for a in legalActions:\n policy[a] = self.getQValue(state, a)\n action = policy.argMax()\n return action\n\n \"\"\" END CODE \"\"\"", "def best_action(q_table: np.ndarray, state: int) -> int:\n return int(np.argmax(q_table[state]))", "def get_max_action(self, s):\r\n return 0.0", "def getValue(self, state):\n \"\"\"Description:\n first get legal actions of current state and find the max q-value among all legalaction. \n \"\"\"\n \"\"\" YOUR CODE HERE \"\"\"\n legalActions = self.getLegalActions(state)\n if len(legalActions) == 0:\n return 0.0\n maxValues = max([ self.getQValue(state, a) for a in legalActions])\n return maxValues\n \n \"\"\" END CODE \"\"\"", "def computeActionFromValues(self, state):\n \"*** YOUR CODE HERE ***\"\n actions = self.mdp.getPossibleActions(state)\n # Initialize max_value as - infinity\n # Initialize best action as None, choose max_value action\n max_value = float(\"-inf\")\n computed_action = None\n\n for action in actions:\n # Find q value of specified action\n q_value = self.computeQValueFromValues(state, action)\n # Update action if it's the best so far\n if q_value > max_value:\n max_value = q_value\n computed_action = action\n return computed_action", "def computeActionFromValues(self, state):\n \"*** YOUR CODE HERE ***\"\n actions = self.mdp.getPossibleActions(state)\n optimalAction = None\n maxValue = float('-inf')\n for a in actions:\n qValue = self.computeQValueFromValues(state, a)\n if qValue > maxValue:\n maxValue = qValue\n optimalAction = a\n return optimalAction", "def max_value(gameState):\n if terminal_test(gameState): return -1", "def __call__(self, state, action):\n s = float(state.__hash__()) # pybrain secretly casts state to float when we do rl\n a = int(action)\n qvalues = self.module.getActionValues(s)\n maxq = max(qvalues)\n if qvalues[a] == maxq:\n n_max = sum([1 if q == maxq else 0 for q in qvalues])\n return 1.0 / n_max\n return 0", "def getAction(self, gameState):\n result = float(\"-inf\")\n action = 1\n for agentState in gameState.getLegalActions(0):\n valorminimax = self.expectiMaxDecision(1, 0, gameState.generateSuccessor(0, agentState))\n if valorminimax > result:\n result = valorminimax\n action = agentState\n return action", "def get_value(self, state):\n epsilon = self.epsilon\n possible_actions = self.get_legal_actions(state)\n\n #If there are no legal actions, return 0.0\n if len(possible_actions) == 0:\n return 0.0\n\n optimal_action = possible_actions[\n np.argmax([self.get_qvalue(state, action) for action in possible_actions])\n ]\n state_value = 0\n for action in possible_actions:\n if action == optimal_action:\n state_value += (1 - epsilon) * self.get_qvalue(state, action)\n state_value += (epsilon / len(possible_actions)) * self.get_qvalue(state, action)\n return state_value", "def computeActionFromQValues(self, state):\n \"*** YOUR CODE HERE ***\"\n actions = self.getLegalActions(state)\n if len(actions) == 0:\n return None\n values = [self.getQValue(state, action) for action in actions]\n LoT = zip(values, actions)\n (bestValue, bestAction) = max(LoT)\n return bestAction", "def computeActionFromQValues(self, state):\n \"*** YOUR CODE HERE ***\"\n\n legal_actions = self.getLegalActions(state)\n\n if len(legal_actions) == 0:\n return None\n\n max_value = self.computeValueFromQValues(state)\n\n actions = [action for action in legal_actions if self.values[(str(state), action)] == max_value]\n\n return random.choice(actions)", "def getAction(self, gameState):\n result = float(\"-inf\")\n action = 1\n for agentState in gameState.getLegalActions(0):\n valorminimax = self.miniMaxDecision(1, 0, gameState.generateSuccessor(0, agentState))\n if valorminimax > result:\n result = valorminimax\n action = agentState\n return action", "def computeValueFromQValues(self, state):\n \treturn max([self.getQValue(state, action) for action in self.actions])", "def get_action(self,state):\n \n q_values = self.__network.predict(state[None])[0]\n \n ###YOUR CODE\n if np.random.rand()<self.epsilon:\n return np.random.choice(self.n_actions)\n return np.argmax(q_values)", "def computeValueFromQValues(self, state):\n \"*** YOUR CODE HERE ***\"\n if len(self.getLegalActions(state)) == 0:\n return 0.0\n max_value = -float('inf')\n for action in self.getLegalActions(state):\n max_value = max(max_value, self.getQValue(state, action))\n return max_value", "def computeActionFromQValues(self, state):\n \"*** YOUR CODE HERE ***\"\n legal_actions = self.getLegalActions(state)\n if len(legal_actions) == 0: return None\n values = [self.getQValue(state, action) for action in legal_actions]\n max_value = max(values)\n best_indices = [index for index in range(len(values)) if values[index] == max_value]\n return legal_actions[random.choice(best_indices)]" ]
[ "0.8555896", "0.8210642", "0.81356865", "0.7956355", "0.7945223", "0.78740704", "0.7716465", "0.76843894", "0.767578", "0.7544931", "0.7542873", "0.7514069", "0.7505697", "0.7458506", "0.74196005", "0.7408121", "0.7404285", "0.7384199", "0.73444754", "0.7327485", "0.7325625", "0.7321644", "0.7287363", "0.7285249", "0.7226769", "0.7223767", "0.722337", "0.71980125", "0.71942955", "0.7192279" ]
0.82146424
1
Display policy values of a board state
def visualize_policy(policy_lookup, board, player): state = board.flatten() # initialize board policy values to None board_policy_values = np.zeros((3, 3)) for i in range(3): for j in range(3): board_policy_values[i][j] = None # lookup policy value for each action for action in board.get_open(): board_policy_values[action[0], action[1]] = get_policy_value( policy_lookup, state, player, action) return board_policy_values
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def printState(self,board):\n self.printBoard(board.getBoard())\n self.printScore(board,board.getScore())", "def policy_eval():\r\n \r\n action_prob = [0.125, 0.625, 0.125, 0.125]# actions with probabilities\r\n data = grid_world()\r\n state_axis = np.zeros((9, 9))#initialize states\r\n threshold = .1\r\n prior_state = np.ones((9, 9))\r\n \r\n while np.abs(state_axis - prior_state).max() > threshold:\r\n for x, y in product(range(9), repeat=2):\r\n prior_state = state_axis.copy()\r\n if data.array[x, y] == 'X':\r\n continue\r\n updated_values = [data.next_direction(np.array([x, y]), next_move)\r\n for next_move in data.directions]#Updating states with directions\r\n Sum_Expectation = np.dot(action_prob,\r\n [points_val + 0.9 * state_axis[position[0], position[1]]\r\n for position, points_val in updated_values])\r\n state_axis[x, y] = Sum_Expectation\r\n print(\"\\nExercise 3.1 Shows Value functions for the policy\\n\")\r\n print(state_axis)\r\n build_grid(state_axis, \"Shows Value functions for the policy\")", "def display_state(self):\r\n\r\n print('\\n')\r\n print('>>CURRENT STATE')\r\n ct = 0\r\n for i in self.state:\r\n for j in i:\r\n if j == -1:\r\n val = 'X'\r\n else:\r\n val = str(ct)\r\n if len(val) == 1:\r\n print(' ' + val + ' ', end='')\r\n else:\r\n print(val + ' ', end='')\r\n ct += 1\r\n print('\\n')", "def show_board(self):\n board_vis = f\"\\n{'*' * 22}Board state{'*' * 23}\\n\"\n str_p2_store=\" \"+str(self.p2_store()) if self.p2_store()<10 else str(self.p2_store())\n board_vis += (f\" {str_p2_store} - | \" +\n \" || \".join(\n [i if len(i) == 2 else ' ' + i for i in list(map(str, self.p2_pits()[::-1]))]) + \" | \\n\")\n board_vis += f\"{'-------' * (self.M + 2)}\\n\"\n board_vis += (\" | \" + \" || \".join(\n [i if len(i) == 2 else ' ' + i for i in list(map(str, self.p1_pits()))]) +\n f\" | - {self.p1_store()}\\n\")\n board_vis += f\"{'*' * 56}\\n\"\n print(board_vis)", "def policy_value_fn(board):\n # return uniform probabilities and 0 score for pure MCTS", "def policy_value_fn(board):\n # return uniform probabilities and 0 score for pure MCTS\n action_probs = np.ones(len(board.availables))/len(board.availables)\n return zip(board.availables, action_probs), 0", "def showState(self):\n for i in self.state[0]:\n for j in self.state[1]:\n print(self.table[i][j], end=\"\")\n print(\"\")", "def print_game_state(board):\r\n print(board)\r\n illegal_moves = [(0, 0), (2, 0), (0, 4), (2, 4)]\r\n for i in range(board.shape[0]):\r\n buffer = ''\r\n for j in range(board.shape[1]):\r\n if board[i][j] == 1:\r\n buffer += 'X\\t'\r\n elif board[i][j] == 2:\r\n buffer += '0\\t'\r\n elif (i, j) in illegal_moves:\r\n buffer += ' \\t'\r\n else:\r\n buffer += '-\\t'\r\n print (buffer)", "def show_grid_policy(policy, states):\n\n actions = np.array([policy(s) for s in states])\n states = np.array(states)\n plt.quiver(states[:, 0], states[:, 1], actions[:, 0], actions[:, 1])\n plt.axis((min(states)[0], max(states)[0],\n min(states, key=lambda s: s[1])[1], max(states, key=lambda s: s[1])[1]))\n plt.show()", "def policy_value_fn(board):\n # return uniform probabilities and 0 score for pure MCTS\n moves, true_moves = board.get_avaiable_moves()\n action_probs = np.ones(len(moves)) / len(moves)\n return zip(true_moves, action_probs), 0", "def render(self, policy=None, value=None):\n\n print('FrozenLake:')\n world = self.world.copy()\n if self.state < self.absorbing_state:\n world[index_to_position(self.state, self.columns)] = '@'\n print(world)\n\n if policy is not None:\n actions = ['↑', '↓', '←', '→']\n\n print('Policy:')\n policy = np.array([actions[a] for a in policy[:-1]])\n print(policy.reshape(self.world.shape))\n\n print('Value:')\n with self._printoptions(precision=3, suppress=True):\n print(value[:-1].reshape(self.world.shape))", "def __display(self,state: dict):\n width = 1+max(len(state[s]) for s in self.__boxes)\n line = '+'.join(['-'*(width*3)]*3)\n for r in self.__rows:\n print(''.join( state[r+c].center(width)+ ('|' if c in '36' else '')\n for c in self.__cols))\n if r in 'CF': print(line)", "def get_board_state_pretty(self):\n\n board_state = ''\n for i in range(0, 3):\n board_state += ' | '.join([self.board['{}{}'.format(i, j)] for j in range(0, 3)])\n board_state += '\\n'\n return board_state", "def print_state(self):\n grid = [[\".\" for _ in range(self.width)] for _ in range(self.height)]\n #icons = [\"^\", \"/\", \">\", \"\\\\\", \"|\", \"/\", \"<\", \"\\\\\"] # NON-UNICODE, uncomment if problems\n icons = [chr(0x2191), chr(0x2197), chr(0x2192), chr(0x2198), \\\n chr(0x2193), chr(0x2199), chr(0x2190), chr(0x2196)]\n for robot in self.robots:\n grid[robot[1]][robot[0]] = icons[(robot[2]+robot[3]) % 8]\n for item in self.items:\n if item[2] == 1:\n grid[item[1]][item[0]] = \"O\"\n elif item[2] == 2:\n grid[item[1]][item[0]] = \"*\"\n print(\"-\"*(self.width+2))\n for i in range(self.height):\n print(\"|\", end=\"\")\n for j in range(self.width):\n print(grid[i][j], end=\"\")\n print(\"|\")\n print(\"-\"*(self.width+2))", "def show_board(self):\n print(self.capacity_list)", "def printBoard(self):\n\t\tkey = [' ', 'X', 'O']\n\t\tprint(' | |')\n\t\tprint(' ' + key[self.state[0][0]] + ' | ' + key[self.state[0][1]] + ' | ' + key[self.state[0][2]])\n\t\tprint(' | |')\n\t\tprint('-----------')\n\t\tprint(' | |')\n\t\tprint(' ' + key[self.state[1][0]] + ' | ' + key[self.state[1][1]] + ' | ' + key[self.state[1][2]])\n\t\tprint(' | |')\n\t\tprint('-----------')\n\t\tprint(' | |')\n\t\tprint(' ' + key[self.state[2][0]] + ' | ' + key[self.state[2][1]] + ' | ' + key[self.state[2][2]])\n\t\tprint(' | |')", "def display_state_values(state):\n\n st.write(\"Ticker Symbols:\", state.stocks)\n st.write(\"Tickers List:\", state.stocks_list)\n st.write(\"Time Period:\", state.period)\n st.write(\"Export Checkbox state:\", state.export_checkbox)\n st.write(\"Export/Append file name:\", state.file_name)\n\n if st.button(\"Clear state\"): # resets the state\n state.clear()", "def printboard(state):\n cells = []\n for i in range(3):\n for j in range(3):\n cells.append(NAMES[state[i][j]].center(6))\n print(cells)\n print(*cells)\n print(BOARD_FORMAT.format(*cells))", "def __repr__(self):\n string = \"Current state: \\n\"\n if self.state[0] == 0: # We're on the left side\n string += \"M: \"\n string += str(self.state[1]).ljust(10)\n string += \"M: \"\n string += str(TOTAL_NO_MISSIONARIES - self.state[1]).ljust(10)\n string += \"\\n\"\n\n string += \"C: \"\n string += str(self.state[2]).ljust(10)\n string += \"C: \"\n string += str(TOTAL_NO_CANNIBALS - self.state[2]).ljust(10)\n string += \"\\n\"\n\n string += \"Boat position: left\\n\"\n else: # We're on the right side\n string += \"M: \"\n string += str(TOTAL_NO_MISSIONARIES - self.state[1]).ljust(10)\n string += \"M: \"\n string += str(self.state[1])\n string += \"\\n\"\n\n string += \"C: \"\n string += str(TOTAL_NO_CANNIBALS - self.state[2]).ljust(10)\n string += \"C: \"\n string += str(self.state[2]).ljust(10)\n string += \"\\n\"\n\n string += \"Boat position: right\\n\"\n string += \"\\n\"\n return string", "def getPolicy(self, state):\n \"*** YOUR CODE HERE ***\"\n possibleActions = self.mdp.getPossibleActions(state)\n if len(possibleActions) == 0: return None\n results = []\n for action in possibleActions:\n total = 0\n for (nextState, prob) in self.mdp.getTransitionStatesAndProbs(state,action):\n total += (prob * self.values[nextState])\n results.append(total)\n maxIndex = max(enumerate(results), key=lambda x: x[1])[0]\n #print(\"here\")\n return possibleActions[maxIndex]", "def print_state(X):\n out = ''\n for coord in range(18):\n out += \"{0}\".format(STATE_VARS[coord])\n val = float(X[coord])\n out += \" {0: 2.4e}\\n\".format(val)\n\n print out", "def state_print_do(cfg, app, win, events):", "def __repr__(self, state):\n print ' ',\n for w in range(len(state)+2):\n print \"___\",\n print '\\n'\n for x in state:\n print \"| \", x, \" |\"\n print ' ',\n for y in range(len(state)+2):\n print \"___\",\n print '\\n'\n return state", "def draw_board(board_state):\n print(\" {} | {} | {} \".format(board_state[6], board_state[7], board_state[8]))\n print(\"-----------\")\n print(\" {} | {} | {} \".format(board_state[3], board_state[4], board_state[5]))\n print(\"-----------\")\n print(\" {} | {} | {} \".format(board_state[0], board_state[1], board_state[2]))", "def _display_board(state: game.GameState) -> None:\n for row in range(state.get_rows()):\n rowString = \"|\"\n for col in range(state.get_columns()):\n cellValue = state.get_cell_contents(row, col)\n cellState = state.get_cell_state(row, col)\n if cellState == game.EMPTY_CELL:\n rowString += ' '\n elif cellState == game.OCCUPIED_CELL:\n rowString += (' ' + cellValue + ' ')\n elif cellState == game.FALLER_MOVING_CELL:\n rowString += ('[' + cellValue + ']')\n elif cellState == game.FALLER_STOPPED_CELL:\n rowString += ('|' + cellValue + '|')\n elif cellState == game.MATCHED_CELL:\n rowString += ('*' + cellValue + '*')\n rowString += '|'\n print(rowString)\n finalLine = ' '\n for col in range(state.get_columns()):\n finalLine += '---'\n finalLine += ' '\n print(finalLine)", "def policy_repr(self, policy):\n return policy.__repr__()", "def display(self):\n for value, prob in self.items():\n print(value, prob)", "def display(self):\n for r in range(1, self.size+1):\n print(\"+\" + (\"-+\"*self.size))\n print(\"|\", end=\"\")\n for c in range(1, self.size+1):\n print(self.gameState[r,c], end=\"\")\n print(\"|\",end=\"\")\n print()\n print(\"+\" + (\"-+\"*self.size))", "def print_state(self):\n\t\tprint self.time, len(self.state['s']), len(self.state['p']), len(self.state['c'])", "def print_q_values(self):\n values = deepcopy(self.q_values)\n for (state, action) in values:\n cells = []\n for i in range(3):\n for j in range(3):\n if state[i][j] == VALUES.EMPTY:\n state[i][j] = self.side\n cells.append(str(self.q_values[self.represent_state(state), action]).center(3))\n state[i][j] = VALUES.EMPTY\n else:\n cells.append(NAMES[state[i][j]].center(3))\n self.logger.info(BOARD.format(*cells))" ]
[ "0.6994765", "0.6739332", "0.6570411", "0.6556608", "0.644996", "0.6426292", "0.6337836", "0.6275739", "0.62564236", "0.62422144", "0.6233658", "0.6207903", "0.6100384", "0.60993165", "0.6043009", "0.6030694", "0.6024969", "0.6012984", "0.60074884", "0.5963703", "0.59048307", "0.58952904", "0.58895385", "0.5878359", "0.58461297", "0.5844413", "0.58382696", "0.5833339", "0.5825214", "0.5805991" ]
0.737854
0
Returns the sum of the numbers from lower through upper.
def summationLoop(lower, upper): sum = 0 for i in range(lower, upper + 1): sum += i return sum
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def sum_range(self, lower, upper):\n if upper>self.upper:\n upper=self.upper\n if lower<self.lower:\n lower = self.lower\n\n i_l = int(np.floor((lower-self.lower)/self._dx))\n i_u = int(np.floor((upper-self.lower)/self._dx))\n total = 0.0\n for i in range(i_l,i_u):\n total+= self.y[i]\n return total", "def summationReduce(lower, upper):\r\n if lower > upper:\r\n return 0\r\n else:\r\n return reduce(lambda x, y: x + y, range(lower, upper + 1))", "def sum_range(lower, upper):\n\n def copies(pmin, pmax):\n if lower <= pmin and pmax <= upper:\n return True\n elif pmax > upper:\n return False\n return copies(pmin + 50, pmax + 60) or copies(pmin + 130, pmax + 140)\n\n return copies(0, 0)", "def sum_range(lower, upper):\n\n def copies(pmin, pmax):\n if lower <= pmin and pmax <= upper:\n return True\n elif pmax > upper:\n return False\n return copies(pmin+50, pmax+60)\n\n return copies(0, 0)", "def summationRecursion(lower, upper):\r\n if lower > upper:\r\n return 0\r\n else:\r\n return lower + summationRecursion(lower + 1, upper)", "def sum_of_numbers(numbers):\r\n return sum(numbers)", "def range_sum(self, left: int, right: int) -> int:\n if right < left:\n raise ValueError(\"Right needs to be >= left\")\n return self._prefix_sum(right) - self._prefix_sum(left - 1)", "def integrate_range(self, lower, upper):\n if upper>self.upper:\n upper=self.upper\n if lower<self.lower:\n lower = self.lower\n\n i_l = int(np.floor((lower-self.lower)/self._dx))\n i_u = int(np.floor((upper-self.lower)/self._dx))\n #print \"i_l \",i_l,\" i_u \",i_u\n total = 0.0\n for i in range(i_l,i_u):\n total+= self.y[i]*self._dx\n return total", "def sum_range(nums, start=0, end=None):\n n = len(nums)\n if end == None:\n nz = n\n elif end > n:\n nz = n\n else:\n nz = end\n\n return sum(nums[start:nz + 1])", "def divisors_sum(upper=10**5):\n nums = [0] * (upper + 1)\n for i in range(1, upper + 1):\n for j in range(i, upper + 1, i):\n nums[j] += i\n return nums", "def rangeSumBST(self, root: TreeNode, lo: int, hi: int) -> int:\n\n def visit(node: TreeNode) -> int:\n if not node:\n return 0\n if node.val < lo:\n return visit(node.right)\n elif hi < node.val:\n return visit(node.left)\n else:\n return node.val + visit(node.left) + visit(node.right)\n return visit(root)", "def sumRange(self, i, j):\n if not self.nums: return 0 # edge case\n return self.sum(j+1)-self.sum(i)", "def sum_numbers(numbers):\n sum = 0\n for number in numbers:\n sum += number\n\n return sum", "def find_sum():\n term_1 = 1\n term_2 = 2\n total = 2\n while True:\n new_term = term_1 + term_2\n\n # Break if passing upper bound\n if new_term > UPPER_BOUND:\n break\n\n if new_term % 2:\n total += new_term\n\n term_1 = term_2\n term_2 = new_term\n\n print \"Sum: {0}\".format(total)", "def sumRange(self, i, j):\n print self.sumArr[i], i\n if i <1:\n return self.sumArr[j]\n return self.sumArr[j] - self.sumArr[i-1]", "def sumRange(self, i, j):\n return self.sums[j]-self.sums[i-1] if i>0 else self.sums[j]", "def sumRange(self, i, j):\r\n # Sum of the range can be obtained by subtracting dp[j] - dp[i-1]\r\n return self.dp[j] if i == 0 else self.dp[j] - self.dp[i-1]", "def sum_items(numbers):\n total = 0\n for item in numbers:\n total += item\n return total", "def sum(values):\n total = 0\n for i in values:\n total += i\n return total", "def sum(self, start=0, end=None):\n return super().reduce(start, end)", "def sum(self, start: int = 0, end: Optional[Any] = None) -> Any:\n return self.reduce(start, end)", "def sum_numbers(numbers=None):\n if numbers is None:\n return sum(range(1, 101))\n else:\n return sum(numbers)", "def sum_values(values):\n return (sum(values))", "def sum(*args):\n result = 0\n for i in args:\n result += i\n return result", "def summation(self):\n return sum(self.read_ints())", "def get_sum(a,b):\n return", "def add(numbers):\n sum1 = 0\n for i in numbers:\n sum1 = sum1+i\n\n return sum1", "def sum_range_multiples_3_5(min, max):\n\ttotal = 0\n\tfor i in range(min,max):\n\t\tif (i % 3 == 0) or (i % 5 == 0):\n\t\t\ttotal += i\n\treturn total", "def segment_sum(self, left, right):\n if self.empty():\n return 0\n less, _ = self.root.split_sums(left)\n _, greater = self.root.split_sums(right)\n return self.sum - less - greater", "def sum_numbers_one_to_ten():\n sum=0\n for num in range(1,11):\n sum=sum+num\n return sum\n pass" ]
[ "0.8400423", "0.8182569", "0.74909437", "0.7432203", "0.7408973", "0.70399815", "0.6944452", "0.6944229", "0.68275565", "0.6716233", "0.66702175", "0.6658637", "0.66237795", "0.6615981", "0.6517438", "0.647821", "0.6446492", "0.6336825", "0.6324186", "0.6315787", "0.62997735", "0.6295226", "0.6232232", "0.6230672", "0.62054616", "0.6198568", "0.6189656", "0.61618984", "0.6142011", "0.6138326" ]
0.82650006
1
generate a random filename
def random_filename(): return ''.join(random.choices(string.ascii_uppercase + string.digits, k=5))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_random_file_name():\n\n def random_file_name_factory():\n length = random.randint(10, 15)\n chars = string.ascii_letters + string.digits + \"-_\"\n return f\"{''.join(random.choice(chars) for _ in range(length))}.jpg\"\n\n return random_file_name_factory", "def generate_filename(filename: str) -> str:\n return f\"{str(uuid.uuid4())}.{get_extension(filename)}\"", "def get_random_filename(dirname, ext):\n import random\n\n # assure a first go in the while loop\n found = 1\n\n # do until you find a unique name\n while found:\n\n # get a random int number\n str_num = str(random.randint(10000, 99999))\n\n # compose a random name\n fname = dirname + 'tmp' + str_num + ext\n\n # check whether the file exists\n if not os.path.isfile(fname):\n found = 0\n\n return fname", "async def filename_generator(self):\n chars = list(string.ascii_letters+string.digits)\n name = ''\n for i in range(random.randint(9, 25)):\n name += random.choice(chars)\n\n if name not in self.player['audio_files']:\n return name\n\n return await self.filename_generator()", "def generate_random_name(filename):\n ext = filename.split('.')[-1]\n rns = [random.randint(0, len(LETTER_SET) - 1) for _ in range(3)]\n name = ''.join([LETTER_SET[rn] for rn in rns])\n return \"{new_fn}.{ext}\".format(new_fn=name, ext=ext)", "def _make_random_file(self, dir, num_chars=10000):\n filename = os.path.join(dir, \"f-%d\" % random.randint(1, 2**63 - 1))\n content = \"\".join([random.choice(\"0123456789abcdefghijklmnopqrstuvwxyz\\n\") for _ in range(num_chars)])\n with open(filename, \"w\") as f:\n f.writelines(content)\n return filename", "def filename_generate(image_class, size=12, chars=string.ascii_uppercase + string.ascii_lowercase + string.digits):\n\tnew_filename = time.strftime(\"%d-%m-%Y_\")\n\tnew_filename = new_filename + ''.join(random.choice(chars) for _ in range(size))\n\tnew_filename = new_filename + \"_P\" + str(image_class)\n\treturn new_filename", "def giverandomfilename(self,user,postfix=\"\"):\n return \"%s_%s_%s\" % (user.username.encode(\"ascii\",\"ignore\"),\n str(randint(10000,99999)),\n \"testfile%s.txt\" % postfix)", "def random_filename_upload_to(path):\n\n def f(instance, filename):\n ext = filename.split('.')[-1]\n filename = '{0}.{1}'.format(uuid.uuid4().hex, ext)\n return os.path.join(path, filename)\n\n return f", "def generate_rand_name() -> str:\n suf = \"\".join(random.choices(string.ascii_uppercase + string.digits, k=6))\n return f\"exporters_{suf}\"", "def GenerateRandomName():\n buf = cStringIO.StringIO()\n buf.write(random.choice(_BEGIN_ALPHABET))\n for _ in xrange(_LENGTH - 1):\n buf.write(random.choice(_ALPHABET))\n return buf.getvalue()", "def generate_random_media_filepath(extension: str):\n\tfilename = f'{_generate_random_string(30)}{extension}'\n\treturn os.path.join(get_media_directory(), filename)", "def generate_file(name, size):\n print('=> Generating %s file' % name)\n with open(DATASET_DIR+name+DATASET_EXTENSION, 'wb+') as fout:\n fout.write(os.urandom(size))", "def generate_filename(player_name):\n name = player_name.split()\n filename = '_'.join(name).lower()\n return filename", "def unique_filename(data):\n file = data\n get_ext = file.filename.split(\".\")[-1]\n new_name = \"%s.%s\" % (uuid.uuid4().hex, get_ext)\n return new_name", "def generate_temp_filename(self):\n prefix = self.generate_filename_prefix()\n now = datetime.now()\n # Ok that might not be the best timestamp system, but it's\n # enough for our needs.\n timestamp = '-'.join([\n ''.join([str(x) for x in now.timetuple()]),\n str(now.microsecond),\n str(randint(10000, 99999))])\n\n filename = prefix + timestamp\n return find_filename(self.tempdir,\n filename)", "def _generate_filename(instance, filename, prefix):\n md5 = hashlib.md5()\n md5.update(struct.pack('f', time.time()))\n for chunk in instance.file.chunks():\n md5.update(chunk)\n extension = os.path.splitext(filename)[1]\n return os.path.join(prefix, md5.hexdigest() + extension)", "def _generate_name(name):\n return 'test-%s-%s-%s' % (time.strftime('%Y%m%d%H%M%S'),\n random.randint(0, 999), name)", "def generate_image_filename():\n now = datetime.now().strftime('%a-%w-%b-%H:%M:%S')\n return 'CCTV_{0}.jpg'.format(now)", "def new_filename(fname=None,ndigits=3):\n if fname is None:\n ext = (\"%%.%ii\" % ndigits) % 1\n fname = \"%s.%s\" % (random_string(6), ext)\n \n if os.path.exists(fname): \n fname = increment_filename(fname,ndigits=ndigits)\n\n return fname", "def append_random_number_to_filename(self, local_img_file):\n date = datetime.datetime.now()\n date_string = date.strftime(\"%m-%d-%Y\")\n return \"%s-glitched.%s\" % (local_img_file.split(\".\")[0], local_img_file.split(\".\")[1])", "def get_random_file():\n\n return random.choice(File.get_files())", "def gen_file_name(filename, path=UPLOAD_FOLDER):\n\n i = 1\n while os.path.exists(os.path.join(path, filename)):\n name, extension = os.path.splitext(filename)\n filename = '%s_%s%s' % (name, str(i), extension)\n i += 1\n\n return filename", "def generate_filename(ext,sha512base16_hash=None):\n## # Timestamp filename\n## timestamp = str(get_current_unix_time())\n## filename = timestamp+\".\"+ext\n # Base16 hash filename\n filename = sha512base16_hash+\".\"+ext\n return filename", "def make_img_name(file_ext='.png'):\r\n fn = []\r\n # format seqs and write out to temp file\r\n for i in range(0, 30):\r\n fn.append(choice(ALPHABET))\r\n return ''.join(fn) + file_ext", "def generate_filename(extension, with_path=True, base_folder=None):\n name = get_md5(str(uuid4()))\n # if not extension:\n # extension = get_file_extension()\n if base_folder is not None:\n base_folder = \"%s/\" % base_folder.rstrip(\"/\")\n else:\n base_folder = \"\"\n\n if with_path:\n return \"%s%s/%s/%s/%s.%s\" % (base_folder, name[0], name[1], name[2], name, extension)\n else:\n return \"%s%s.%s\" % (base_folder, name, extension)", "def gettempfilename(suffix):\n if '_' in os.environ:\n # tempfile.mktemp() crashes on some Wine versions (the one of Ubuntu 12.04 particularly)\n if os.environ['_'].find('wine') >= 0:\n tmpdir = '.'\n if 'TMP' in os.environ:\n tmpdir = os.environ['TMP']\n import time\n import random\n random.seed(time.time())\n random_part = 'file%d' % random.randint(0, 1000000000)\n return os.path.join(tmpdir, random_part + suffix)\n\n return tempfile.mktemp(suffix)", "def construct_name_file(size_sample, randomness, pos_equal_neg, kernel):\n if randomness:\n randomness = \"rand\"\n else:\n randomness = \"nrand\"\n\n if pos_equal_neg:\n pos_equal_neg = \"pos-neg-eq\"\n else:\n pos_equal_neg = \"pos-neg-neq\"\n\n return \"{}_{}_{}_{}.json\".format(size_sample, randomness, pos_equal_neg, kernel)", "def _get_random_name(self, length=10):\n return base64.b64encode(os.urandom(10)).translate(None, '=+/')", "def generate_file_filename(instance, filename):\n return _generate_filename(instance, filename, 'photos')" ]
[ "0.85415816", "0.80759776", "0.7940292", "0.7855972", "0.78502667", "0.7841351", "0.77415335", "0.77123237", "0.7674128", "0.74968827", "0.7459299", "0.7438276", "0.7380692", "0.7274986", "0.7238404", "0.7179149", "0.714998", "0.71346515", "0.71212363", "0.71189535", "0.710063", "0.70617604", "0.70410436", "0.70117766", "0.6982614", "0.6976885", "0.69685596", "0.69350344", "0.6924679", "0.69069296" ]
0.8476896
1
Tests the creation of a AmazonS3 specific data storer.
def testCreateDataStorer(self): self.assertTrue(isinstance(self._factory.createDataStorer("identifier"), DataS3Adapter))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_amazon_s3_store_data(self):\n config = Config()\n metadata_bucket = config.config.get(\"metadata\", \"bucket\")\n data_bucket = config.config.get(\"data\", \"bucket\")\n metadata_provider = amazon.S3(config, metadata_bucket).connect()\n provider = amazon.S3(config, data_bucket).connect()\n\n datas = dict()\n metadatas = dict()\n\n for data, metadata in ((\"Data 1\", \"Metadata 1\"),\n (\"Data 2\", \"Metadata 2\")):\n key = checksum_data(data)\n metadata_provider.store(key, metadata)\n provider.store(key, data)\n new_metadata = metadata_provider.retrieve(key)\n new_data = provider.retrieve(key)\n self.assertEqual(new_data, data)\n self.assertEqual(new_metadata, metadata)\n datas[key] = data\n metadatas[key] = metadata\n for key, metadata in metadata_provider.list().items():\n self.assertEqual(metadata, metadatas[key])\n for key, data in provider.list().items():\n self.assertEqual(data, datas[key])\n for key, metadata in metadatas.items():\n metadata_provider.delete(key)\n for key, data in datas.items():\n provider.delete(key)\n metadata_provider.disconnect()\n provider.disconnect()", "def test_create_bucket(self):\n pass", "def test_amazon_s3_store_filename(self):\n config = Config()\n metadata_bucket = config.config.get(\"metadata\", \"bucket\")\n data_bucket = config.config.get(\"data\", \"bucket\")\n metadata_provider = amazon.S3(config, metadata_bucket).connect()\n provider = amazon.S3(config, data_bucket).connect()\n key = checksum_file(\"LICENSE\")\n metadata_provider.store(key, \"LICENSE METADATA\")\n provider.store_from_filename(key, \"LICENSE\")\n t = tempfile.NamedTemporaryFile()\n metadata = metadata_provider.retrieve(key)\n provider.retrieve_to_filename(key, t.name)\n self.assertEqual(file(\"LICENSE\").read(), file(t.name).read())\n self.assertEqual(\"LICENSE METADATA\", metadata)\n metadata_provider.delete(key)\n provider.delete(key)\n metadata_provider.disconnect()\n provider.disconnect()", "def test_init(self, _s3):\n ps = PersistenceStore(s3_client=None)\n assert ps.s3_client is not None", "def test_s3_executor_create_object(sdc_builder, sdc_executor, aws):\n # setup test static\n s3_bucket = aws.s3_bucket_name\n s3_key = f'{S3_SANDBOX_PREFIX}/{get_random_string(string.ascii_letters, 10)}'\n raw_str = f'{{\"bucket\": \"{s3_bucket}\", \"company\": \"StreamSets Inc.\"}}'\n\n # Build the pipeline\n builder = sdc_builder.get_pipeline_builder()\n\n dev_raw_data_source = builder.add_stage('Dev Raw Data Source').set_attributes(data_format='JSON',\n raw_data=raw_str)\n\n record_deduplicator = builder.add_stage('Record Deduplicator')\n to_error = builder.add_stage('To Error')\n\n s3_executor = builder.add_stage('Amazon S3', type='executor')\n s3_executor.set_attributes(bucket='${record:value(\"/bucket\")}',\n task='CREATE_NEW_OBJECT',\n object=s3_key,\n content='${record:value(\"/company\")}')\n\n dev_raw_data_source >> record_deduplicator >> s3_executor\n record_deduplicator >> to_error\n\n s3_exec_pipeline = builder.build(title='Amazon S3 executor pipeline').configure_for_environment(aws)\n sdc_executor.add_pipeline(s3_exec_pipeline)\n\n client = aws.s3\n try:\n # start pipeline and capture pipeline messages to assert\n sdc_executor.start_pipeline(s3_exec_pipeline).wait_for_pipeline_output_records_count(1)\n sdc_executor.stop_pipeline(s3_exec_pipeline)\n\n # assert record count to S3 the size of the objects put\n list_s3_objs = client.list_objects_v2(Bucket=s3_bucket, Prefix=s3_key)\n assert len(list_s3_objs['Contents']) == 1\n\n # read data from S3 to assert it is what got ingested into the pipeline\n s3_contents = [client.get_object(Bucket=s3_bucket, Key=s3_content['Key'])['Body'].read().decode().strip()\n for s3_content in list_s3_objs['Contents']]\n\n assert s3_contents[0] == 'StreamSets Inc.'\n finally:\n delete_keys = {'Objects': [{'Key': k['Key']}\n for k in client.list_objects_v2(Bucket=s3_bucket, Prefix=s3_key)['Contents']]}\n client.delete_objects(Bucket=s3_bucket, Delete=delete_keys)", "def s3(ctx, bucket_name, data_file, region):\n ctx.obj['BUCKET_NAME'] = bucket_name\n ctx.obj['DATA_FILE'] = data_file\n ctx.obj['TYPE'] = 's3'\n ctx.obj['REGION'] = region", "def test_upload_new_file(self):\n ps = PersistenceStore(s3_client=S3NewUpload())\n\n try:\n ps.update({}, 'filename.json')\n except Exception:\n assert False, 'Exception raised'", "def setUp(self):\r\n \r\n factory.FileSystem._getConnectionPool = SimpleMock(SimpleMock(SimpleMock()))\r\n self._factory = factory.FileSystem(BaseConfiguration(\"http://s3.amazonaws.de/bucketname/keyname\"))", "def aws_s3_upload(self):\n\n # absolute path to the csv file to create\n csv_file = os.path.join(self.csvfile, \"DLTINS_20210117_01of01.csv\")\n\n # Test for correct data\n self.assertTrue(\n aws_s3_upload(\n csv_file,\n self.region_name,\n self.aws_access_key_id,\n self.aws_secret_access_key,\n self.bucket_name,\n )\n )\n\n # Test for non existent bucket\n self.assertFalse(\n aws_s3_upload(\n csv_file,\n \"useast\",\n self.aws_access_key_id,\n self.aws_secret_access_key,\n self.bucket_name,\n )\n )\n\n # Test for non existent region\n self.assertFalse(\n aws_s3_upload(\n csv_file,\n self.region_name,\n self.aws_access_key_id,\n self.aws_secret_access_key,\n \"nonexistentbucketname\",\n )\n )\n\n # Test for incorrect keys\n self.assertFalse(\n aws_s3_upload(\n csv_file,\n self.region_name,\n \"xjvachiahvlchabo;jvbo\",\n \"khkc vah haaih aih ika\",\n self.bucket_name,\n )\n )", "def setUp(self):\n self.inited = False\n self.skip_tests = True\n\n if self.inited:\n return\n\n if os.path.exists(TestS3.CONFIG_FILE_PATH):\n cp = ConfigParser.RawConfigParser()\n try:\n cp.read(TestS3.CONFIG_FILE_PATH)\n defaults = cp.defaults()\n for key, value in defaults.items():\n self.__dict__[key] = value\n except ConfigParser.ParsingError, e:\n print (\"Failed to read test_s3.conf config file. \"\n \"Got error: %s\" % e)\n super(TestS3, self).setUp()\n self.inited = True\n return\n\n from boto.s3.connection import S3Connection\n from boto.exception import S3ResponseError\n\n try:\n s3_host = self.s3_store_host\n access_key = self.s3_store_access_key\n secret_key = self.s3_store_secret_key\n bucket_name = self.s3_store_bucket\n except AttributeError, e:\n print (\"Failed to find required configuration options for \"\n \"S3 store. Got error: %s\" % e)\n self.inited = True\n super(TestS3, self).setUp()\n return\n\n s3_conn = S3Connection(access_key, secret_key, host=s3_host)\n\n self.bucket = None\n try:\n buckets = s3_conn.get_all_buckets()\n for bucket in buckets:\n if bucket.name == bucket_name:\n self.bucket = bucket\n except S3ResponseError, e:\n print (\"Failed to connect to S3 with credentials,\"\n \"to find bucket. Got error: %s\" % e)\n self.inited = True\n super(TestS3, self).setUp()\n return\n except TypeError, e:\n # This hack is necessary because of a bug in boto 1.9b:\n # http://code.google.com/p/boto/issues/detail?id=540\n print (\"Failed to connect to S3 with credentials. \"\n \"Got error: %s\" % e)\n self.inited = True\n super(TestS3, self).setUp()\n return\n\n self.s3_conn = s3_conn\n\n if not self.bucket:\n try:\n self.bucket = s3_conn.create_bucket(bucket_name)\n except boto.exception.S3ResponseError, e:\n print (\"Failed to create bucket. Got error: %s\" % e)\n self.inited = True\n super(TestS3, self).setUp()\n return\n else:\n self.clear_bucket()\n\n self.skip_tests = False\n self.inited = True\n self.default_store = 's3'\n\n super(TestS3, self).setUp()", "def test_sts_external_storage_location(self):\n bucket_name, _ = get_aws_env()\n _, folder, storage_location_id = self._configure_storage_location(\n sts_enabled=True\n )\n\n sts_read_creds = self.syn.get_sts_storage_token(\n folder[\"id\"], \"read_only\", output_format=\"boto\"\n )\n sts_write_creds = self.syn.get_sts_storage_token(\n folder[\"id\"], \"read_write\", output_format=\"boto\"\n )\n\n s3_read_client = boto3.client(\"s3\", **sts_read_creds)\n s3_write_client = boto3.client(\"s3\", **sts_write_creds)\n\n # put an object directly using our sts creds\n file_contents = \"saved using sts\"\n temp_file = self._make_temp_file(contents=file_contents, suffix=\".txt\")\n\n remote_key = f\"{folder.name}/sts_saved\"\n\n # verify that the read credentials are in fact read only\n with pytest.raises(Exception) as ex_cm:\n s3_read_client.upload_file(\n Filename=temp_file.name,\n Bucket=bucket_name,\n Key=remote_key,\n )\n assert \"Access Denied\" in str(ex_cm.value)\n\n # now create a file directly in s3 using our STS creds\n s3_write_client.upload_file(\n Filename=temp_file.name,\n Bucket=bucket_name,\n Key=remote_key,\n ExtraArgs={\"ACL\": \"bucket-owner-full-control\"},\n )\n\n # now read the file using our read credentials\n # S3 is not ACID so we add a retry here to try to ensure our\n # object will be available before we try to create the handle\n with_retry(\n lambda: s3_read_client.get_object(Bucket=bucket_name, Key=remote_key),\n retry_exceptions=[s3_read_client.exceptions.NoSuchKey],\n )\n\n # create an external file handle so we can read it via synapse\n file_handle = self.syn.create_external_s3_file_handle(\n bucket_name,\n remote_key,\n temp_file.name,\n storage_location_id=storage_location_id,\n )\n file = File(parentId=folder[\"id\"], dataFileHandleId=file_handle[\"id\"])\n file_entity = self.syn.store(file)\n\n # now should be able to retrieve the file via synapse\n retrieved_file_entity = self.syn.get(file_entity[\"id\"])\n with open(retrieved_file_entity.path, \"r\") as f:\n assert file_contents == f.read()", "def test_MissedS3folder(self):\n\t\tdashboardPage = DashboardPage(self.driver)\n\t\tdashboardPage.goToOnboard()\n\t\tdashboardPage.createCustomer(USER_NAME, \"\")\n\n\t\terrMes = dashboardPage.getErrorNoS3()\n\t\tself.assertEqual(errMes.text, ERR_S3)", "def test_creating_a_bucket(self):\n with self.client:\n self.create_bucket(self.get_user_token())", "def mock_s3_fixture():\n with mock_s3():\n yield", "def setUpClass(cls):\n\n cls.s3 = boto3.client('s3', config=Config(signature_version=UNSIGNED))\n cls.direct_expression = cls._read_expression_direct()\n cls.direct_cell = cls._read_cell_direct()", "def test_put_file(self):\n self.prepare_uploads()\n backend = BackendS3(**self.config)\n uploads = self.upload_path\n src = os.path.join(uploads, 'demo-test.tar.gz')\n id = utils.generate_id('demo-test.tar.gz')\n backend.put(src, id)\n path = '/'.join(backend.id_to_path(id)) + '/demo-test.tar.gz'\n self.assertTrue(backend.exists(path))", "def test_bucket_availability(self):\n s3 = boto3.resource('s3')\n bucket = s3.Bucket(app.config['S3_PHOTO_BUCKET'])\n exists = True\n try:\n s3.meta.client.head_bucket(Bucket=app.config['S3_PHOTO_BUCKET'])\n self.assertEqual(exists, True)\n except botocore.exceptions.ClientError as e:\n # If a client error is thrown, then check that it was a 404 error.\n # If it was a 404 error, then the bucket does not exist.\n error_code = e.response['Error']['Code']\n if error_code == '404':\n exists = False\n self.assertEqual(exists, True, msg='Bucket is not exist!')", "def test_create_bucket(self, boto_mock):\n conn = boto_mock.s3.connect_to_region()\n boto_mock.exception.S3ResponseError = boto.exception.S3ResponseError\n\n def raise_not_found(*_, **__):\n \"\"\" Raise a 'bucket not found' exception \"\"\"\n e = boto.exception.S3ResponseError(400, 'missing')\n e.error_code = 'NoSuchBucket'\n raise e\n conn.get_bucket = raise_not_found\n settings = {\n 'storage.bucket': 'new_bucket',\n 'storage.region': 'us-east-1',\n }\n S3Storage.configure(settings)\n conn.create_bucket.assert_called_with('new_bucket',\n location='us-east-1')", "def test_s3_get_bucket_info(self, mock_class):\n\n mock_class().list_objects.return_value = list(s3_fake_objects[\"bucket1\"].keys())\n mock_class().stat_object.side_effect = list(s3_fake_objects[\"bucket1\"].values())\n return_value = servicex_storage.s3_storage_manager.BucketInfo(name=\"bucket1\",\n size=60,\n last_modified=datetime.datetime(\n year=2021, month=10,\n day=1, hour=10,\n minute=10, second=10))\n test_obj = servicex_storage.s3_storage_manager.S3Store(s3_endpoint=\"abc\",\n access_key=\"abc\",\n secret_key=\"abc\")\n bucket_info = test_obj.get_bucket_info(\"bucket1\")\n self.assertEqual(bucket_info, return_value)", "def _s3_stash(self):\n s3_url = 's3://{}/{}'.format(BUCKET, self.atom_file)\n bucketpath = BUCKET.strip(\"/\")\n bucketbase = BUCKET.split(\"/\")[0]\n parts = urlparse.urlsplit(s3_url)\n mimetype = 'application/xml' \n \n conn = boto.connect_s3()\n\n try:\n bucket = conn.get_bucket(bucketbase)\n except boto.exception.S3ResponseError:\n bucket = conn.create_bucket(bucketbase)\n self.logger.info(\"Created S3 bucket {}\".format(bucketbase))\n\n if not(bucket.get_key(parts.path)):\n key = bucket.new_key(parts.path)\n key.set_metadata(\"Content-Type\", mimetype)\n key.set_contents_from_filename(self.atom_file)\n msg = \"created {0}\".format(s3_url)\n self.logger.info(msg)\n else:\n key = bucket.get_key(parts.path)\n key.set_metadata(\"Content-Type\", mimetype)\n key.set_contents_from_filename(self.atom_file)\n msg = \"re-uploaded {}\".format(s3_url)\n self.logger.info(msg)", "def test_put_raises_on_overwriting(self):\n self.prepare_uploads()\n backend = BackendS3(**self.config)\n uploads = self.upload_path\n src1 = os.path.join(uploads, 'demo-test.tar.gz')\n src2 = os.path.join(uploads, 'test.jpg')\n id = utils.generate_id('demo-test.tar.gz')\n backend.put_variant(src1, id, 'demo-test.tar.gz')\n with assert_raises(x.FileExists):\n backend.put_variant(src2, id, 'demo-test.tar.gz')", "def test_exists_cache() -> None:\n s3_client = boto3.client(\"s3\", region_name=\"us-east-1\")\n s3_client.create_bucket(Bucket=\"example-bucket\")\n\n # Object should not exist.\n assert not File(\"s3://example-bucket/a\").exists()\n assert File(\"s3://example-bucket/a\").get_hash() == \"cb7880ecc11723b8b8cad37f6b5160251d7a765e\"\n\n # Update object outside of s3fs.\n s3_client.put_object(Body=b\"hello\", Bucket=\"example-bucket\", Key=\"a\")\n\n # Using the normal s3fs exists(), the existance check would be cached and\n # would now return an incorrect result.\n\n # However, File.exists() avoids using the s3fs cache and gives the correct result.\n # The hash should update as well.\n assert File(\"s3://example-bucket/a\").exists()\n assert File(\"s3://example-bucket/a\").get_hash() == \"ea438dc20234f0226736d407d7caba13f7e3c49e\"\n\n # Directory should not exist.\n assert not Dir(\"s3://example-bucket/dir/\").exists()\n\n # Update object outside of s3fs.\n s3_client.put_object(Body=b\"hello\", Bucket=\"example-bucket\", Key=\"dir/a\")\n\n # Directory should now exist.\n assert Dir(\"s3://example-bucket/dir/\").exists()", "def test_s3_table_functions(started_cluster):\n node.query(\n \"\"\"\n INSERT INTO FUNCTION s3\n (\n nc_s3,\n filename = 'test_file.tsv.gz',\n format = 'TSV',\n structure = 'number UInt64',\n compression_method = 'gz'\n )\n SELECT * FROM numbers(1000000)\n \"\"\",\n settings=settings,\n )\n\n assert (\n node.query(\n \"\"\"\n SELECT count(*) FROM s3\n (\n nc_s3,\n filename = 'test_file.tsv.gz',\n format = 'TSV',\n structure = 'number UInt64',\n compression_method = 'gz'\n );\n \"\"\"\n )\n == \"1000000\\n\"\n )", "def test_connect_to_aws_s3(self):\n conn = boto3.resource('s3', region_name='us-east-1')\n # We need to create the bucket since this is all in Moto's 'virtual' AWS account\n conn.create_bucket(Bucket='foobucket')\n\n s3_connector = S3Connector()\n\n try:\n s3_connector.connect(\"default\")\n except:\n self.fail(\"Could not connect to aws using mock aws s3\")", "def test_validate_bookstore_bucket():\n expected = {\n \"bookstore_valid\": True,\n \"publish_valid\": True,\n \"archive_valid\": True,\n \"clone_valid\": True,\n }\n settings = BookstoreSettings(s3_bucket=\"A_bucket\")\n assert validate_bookstore(settings) == expected", "def test_init_auto_create_bucket(self):\n access_key = 'fake_key'\n secret_key = 'fake_secret'\n bucket_name = 'fake_bucket'\n location = 'anywhere'\n\n with patch('boto.connect_s3') as mock_connect_s3:\n mock_error = S3ResponseError(999, \"Failed\")\n mock_error.error_code = \"NoSuchBucket\"\n mock_conn = Mock()\n mock_conn.get_bucket.side_effect = mock_error\n\n mock_connect_s3.return_value = mock_conn\n\n S3Backend(access_key=access_key, secret_key=secret_key,\n bucket_name=bucket_name, s3_location=location)\n\n mock_connect_s3.assert_called_once_with(access_key, secret_key)\n mock_conn.get_bucket.assert_called_once_with(bucket_name)\n\n mock_conn.create_bucket.assert_called_once_with(bucket_name, location=location)", "def main():\n t0 = time.time()\n parser = argparse.ArgumentParser()\n parser.add_argument('-e', '--env', default='LOCAL', help='Enter one of DOCKER, LOCAL or S3')\n parser.add_argument('--bucket-name', help='Enter S3 bucket')\n parser.add_argument('--aws-access-key-id', help='Enter AWS access key id')\n parser.add_argument('--aws-secret-access-key', help='Enter AWS secrest access key')\n parser.add_argument('--aws-region', default='us-west-2', help='Enter AWS region')\n # subparser = parser.add_subparsers(dest='subcommand', help='Can choose bucket name if S3 is chosen')\n # parser_bucket = subparser.add_parser('S3')\n # parser_bucket.add_argument('bucket', help='S3 bucket name')\n args = vars(parser.parse_args())\n args['env'] = args['env'].upper()\n if args['env'] != 'S3' and args['bucket_name']:\n parser.error('Can specify a bucket name with only S3...')\n if args['env'] == 'S3' and not (args['bucket_name'] and \n args['aws_access_key_id'] and\n args['aws_secret_access_key']):\n parser.error('Specify a bucket, access key and secret access key...')\n # print(args)\n # print(args['env'])\n # print(args['subcommand'])\n\n if args['env'] == 'S3' and args['aws_region'] != '':\n s3_client = create_client(\n \"s3\",\n region=args['aws_region'],\n access_key_id=args['aws_access_key_id'],\n secret_access_key=args['aws_secret_access_key']\n )\n os.environ['AWS_ACCESS_KEY_ID'] = args['aws_access_key_id'].strip()\n os.environ['AWS_SECRET_ACCESS_KEY'] = args['aws_secret_access_key'].strip()\n logger.info('Check to see whether s3 bucket exits...')\n try:\n s3.meta.client.head_bucket(Bucket=args['bucket_name'])\n logger.info(f\"S3 bucket {args['bucket_name']} exits...\")\n except Exception as e:\n logger.warn(f\"Bucket {args['bucket_name']} doesn't exist...\")\n logger.info('Creating bucket...')\n create_s3_bucket(s3_client, args['bucket_name'], args['aws_region'])\n\n\n config = configparser.ConfigParser()\n if args['env'] == 'DOCKER':\n CFG_FILE = r'/usr/local/airflow/config/etl_config.cfg'\n try:\n config.read(CFG_FILE)\n except Exception as e:\n print('Configuration file is missing or cannot be read...')\n raise\n elif args['env'] == 'S3':\n obj = s3_client.get_object(Bucket=args['bucket_name'], Key='config/etl_config.cfg')\n try:\n config.read_string(obj['Body'].read().decode())\n except Exception as e:\n print('Configuration file is missing or cannot be read...')\n raise\n else:\n CFG_FILE = r'/Users/home/Documents/dend/Data-Engineering-ND/Capstone/config/etl_config.cfg'\n try:\n config.read(CFG_FILE)\n except Exception as e:\n print('Configuration file is missing or cannot be read...')\n raise\n\n sas_jar_ver = config['APP']['sas_jar_ver']\n os.environ['SAS_JAR'] = \".\".join(sas_jar_ver.split('.')[:-1])\n\n if args['env'] == 'DOCKER':\n base_dir = config['DOCKER']['base_dir']\n data_dir = config['DOCKER']['data_dir']\n path = config['DOCKER']['sas_data_dir']\n sas_file_path = os.path.join(base_dir, data_dir, path)\n dict_dir = config['DOCKER']['dict_dir']\n files = json.loads(config['DOCKER']['input_files'])\n airport_file = os.path.join(base_dir, data_dir, config['DOCKER']['airports_file'])\n demographic_file = os.path.join(base_dir, data_dir, config['DOCKER']['us_demographics_file'])\n dictionary_file = os.path.join(base_dir, dict_dir, config['DOCKER']['dictionary_file'])\n output_dir = os.path.join(base_dir, config['DOCKER']['output_dir'])\n log_dir = os.path.join(base_dir, config['LOCAL']['log_dir'])\n log_file = config['LOCAL']['log_file']\n elif args['env'] == 'S3':\n bucket = args['bucket_name']\n path = config['S3']['s3_sas_key']\n dict_dir = config['S3']['s3_dict_key']\n csv_dir = config['S3']['s3_csv_key']\n sas_file_path = os.path.join(\"s3a://\", bucket, csv_dir, path)\n files = json.loads(config['S3']['input_files'])\n airport_file = os.path.join(\"s3a://\", bucket, csv_dir, config['S3']['airports_file'])\n demographic_file = os.path.join(\"s3a://\", bucket, csv_dir, config['S3']['us_demographics_file'])\n dictionary_file = os.path.join(\"s3a://\", bucket, config['S3']['dictionary_file'])\n output_dir = os.path.join(\"s3a://\", bucket, config['S3']['output_dir'])\n else:\n base_dir = config['LOCAL']['base_dir']\n data_dir = config['LOCAL']['data_dir']\n path = config['LOCAL']['sas_data_dir']\n sas_file_path = os.path.join(base_dir, data_dir, path)\n dict_dir = config['LOCAL']['dict_dir']\n files = json.loads(config['LOCAL']['input_files'])\n airport_file = os.path.join(base_dir, data_dir, config['LOCAL']['airports_file'])\n demographic_file = os.path.join(base_dir, data_dir, config['LOCAL']['us_demographics_file'])\n dictionary_file = os.path.join(base_dir, dict_dir, config['LOCAL']['dictionary_file'])\n output_dir = os.path.join(base_dir, config['LOCAL']['output_dir'])\n log_dir = os.path.join(base_dir, config['LOCAL']['log_dir'])\n log_file = config['LOCAL']['log_file']\n \n try:\n # Log file written to Hadoop EMR env\n base_dir = config['HADOOP']['base_dir']\n log_dir = os.path.join(base_dir, config['HADOOP']['log_dir'])\n log_file = config['HADOOP']['log_file']\n pathlib.Path(log_dir).mkdir(exist_ok=True)\n file_handler = enable_logging(log_dir, log_file)\n logger.addHandler(file_handler)\n print(\"Create log dir if it doesn't exist...\")\n except:\n base_dir = config['LOCAL']['base_dir']\n log_dir = os.path.join(base_dir, config['LOCAL']['log_dir'])\n log_file = config['LOCAL']['log_file']\n pathlib.Path(log_dir).mkdir(exist_ok=True)\n file_handler = enable_logging(log_dir, log_file)\n logger.addHandler(file_handler)\n print(\"Create log dir if it doesn't exist...\")\n\n\n logger.info('ETL parsing has started...')\n logger.info(\"Create output dir if it doesn't exist...\")\n if args['env'] != 'S3':\n pathlib.Path(output_dir).mkdir(exist_ok=True)\n else:\n # config.set('S3', 's3_bucket_name', args['bucket_name'])\n # s3_client.put_object(Bucket=args['bucket_name'], Key=config['S3']['config_dir'], Body=)\n s3_client.put_object(Bucket=args['bucket_name'], Key=config['S3']['output_dir'])\n logger.info('Created S3 bucket...')\n \n spark = create_spark_session()\n logger.info('Pyspark session created...')\n logger.info('Register UDFs...')\n \n spark.udf.register('SASDateConverter', sas_date_converter, Date())\n logger.info('Register sas_date_converter UDF...')\n\n # change_date_format_1 = F.udf(lambda x: datetime.strptime(x.strip(), '%Y%m%d'), Date())\n # change_date_format_2 = F.udf(lambda x: datetime.strptime(x.strip(), '%m%d%Y'), Date())\n dt = F.udf(change_date_format, Date())\n\n logger.info('Read and concatenate the raw SAS files...')\n dfs = []\n for file in files:\n try:\n df = spark.read.format('com.github.saurfang.sas.spark')\\\n .load(os.path.join(sas_file_path, file))\n dfs.append(df)\n except Exception as e:\n logger.info(f'File {file} is not available. Skipping...')\n logger.info(f'Read {len(files)} files successfully...')\n df = []\n if len(dfs) > 0:\n df = concat_df(*dfs)\n logger.info(f'Successfully concatenated {len(files)}...')\n if not isinstance(df, list):\n # SAS raw data table creation begins here\n cols = ['cicid', 'i94yr', 'i94mon', 'i94port', 'i94mode', 'visapost', \n 'entdepa', 'entdepd', 'entdepu', 'matflag', \n 'dtadfile', 'dtaddto']\n parquet_tables = ['i94_immigrations', 'i94_trips', 'i94_visitors', 'i94_flights']\n f_transforms = [i94_immigrations, i94_trips, i94_visitors, i94_flights]\n res_df = None\n for table, f_transform in zip(parquet_tables, f_transforms):\n if table == 'i94_immigrations':\n # only table not using spark sql\n res_df = create_and_write_df(df, table, f_transform, \n output_dir,\n spark=None, cols=cols,\n udf=dt, fmt='parquet',\n is_partition=True,\n is_overwrite=True,\n crate_date_df=False)\n elif table == 'i94_flights':\n res_df = create_and_write_df(df, table, f_transform, \n output_dir,\n spark=spark, cols=None,\n udf=None, fmt='csv',\n is_partition=False,\n is_overwrite=True,\n crate_date_df=False)\n else:\n res_df = create_and_write_df(df, table, f_transform, \n output_dir,\n spark=spark, cols=None,\n udf=None, fmt='parquet',\n is_partition=True,\n is_overwrite=True,\n crate_date_df=False)\n\n if table == 'i94_trips':\n table = 'i94_dates'\n create_and_write_df(res_df, table, i94_dates, \n output_dir,\n spark=spark, cols=None,\n udf=None, fmt='parquet',\n is_partition=True,\n is_overwrite=True,\n crate_date_df=False)\n\n # Reference data for airports and us city demographics begins here\n airport_df = spark.createDataFrame([], R([]))\n demographic_df = spark.createDataFrame([], R([]))\n logger.info('Read the airports reference file...')\n try:\n airport_df = spark.read.option('header', True) \\\n .csv(airport_file)\n except Exception as e:\n logger.error(f'File {airport_file} is not available. Skipping...')\n\n logger.info('Read the US demographics reference file...')\n try:\n demographic_df = spark.read.options(header='True', delimiter=';') \\\n .csv(demographic_file) \n except Exception as e:\n logger.error(f'File {demographic_file} is not available. Skipping...')\n if airport_df.count() > 0 and demographic_df.count() > 0: \n csv_tables = ['i94_airports', 'i94_us_states_demographic', \n 'i94_us_cities_demographic']\n f_transforms = [i94_airports, i94_us_states_demographic, i94_us_cities_demographic]\n csv_dfs = [airport_df, demographic_df, demographic_df]\n for table, f_transform, df in zip(csv_tables, f_transforms, csv_dfs):\n res_df = create_and_write_df(df, table, f_transform, \n output_dir,\n spark=spark, cols=None,\n udf=dt, fmt='csv',\n is_partition=False,\n is_overwrite=True)\n\n # SAS reference data creation begins here\n ref_csv_tables = ['i94_countries', 'i94_port_state_mapping', 'i94_travel_mode', \n 'i94_state_mapping', 'i94_visa']\n table_pos_dict = {\n 'i94_countries': [2, 3, 'country', 'country_id'],\n 'i94_port_state_mapping': [3, 4, 'city', 'i94_port'],\n 'i94_travel_mode': [4, 5, 'mode', 'mode_id'],\n 'i94_state_mapping': [5, 6, 'state', 'state_id'],\n 'i94_visa': [6, 7, 'visa_purpose', 'visa_id']\n }\n logger.info('Read the SAS data dictionary reference file...') \n for table in ref_csv_tables:\n create_and_write_ref_df(dictionary_file, table, output_dir, spark, \n fmt='csv', start_pos=table_pos_dict[table][0], \n end_pos=table_pos_dict[table][1],\n col_name=table_pos_dict[table][2], \n index_name=table_pos_dict[table][3],\n is_partition=False,\n is_overwrite=True)\n\n logger.info('ETL parsing has completed...')\n logger.info('Time taken to complete job {} minutes'.format((time.time() - t0) / 60))", "def test_buckets(self):\n objectstore.bucket.Bucket.create('new_bucket', self.context)\n bucket = objectstore.bucket.Bucket('new_bucket')\n\n # creator is authorized to use bucket\n self.assert_(bucket.is_authorized(self.context))\n\n # another user is not authorized\n context2 = context.RequestContext('user2', 'proj2')\n self.assertFalse(bucket.is_authorized(context2))\n\n # admin is authorized to use bucket\n admin_context = context.RequestContext('admin_user', None)\n self.assertTrue(bucket.is_authorized(admin_context))\n\n # new buckets are empty\n self.assertTrue(bucket.list_keys()['Contents'] == [])\n\n # storing keys works\n bucket['foo'] = \"bar\"\n\n self.assertEquals(len(bucket.list_keys()['Contents']), 1)\n\n self.assertEquals(bucket['foo'].read(), 'bar')\n\n # md5 of key works\n self.assertEquals(bucket['foo'].md5, hashlib.md5('bar').hexdigest())\n\n # deleting non-empty bucket should throw a NotEmpty exception\n self.assertRaises(NotEmpty, bucket.delete)\n\n # deleting key\n del bucket['foo']\n\n # deleting empty bucket\n bucket.delete()\n\n # accessing deleted bucket throws exception\n self.assertRaises(NotFound, objectstore.bucket.Bucket, 'new_bucket')", "def test_get_bucket(self):\n pass", "def test_s3_executor_non_existing_object(sdc_builder, sdc_executor, aws):\n # setup test static\n s3_bucket = aws.s3_bucket_name\n s3_key = f'{S3_SANDBOX_PREFIX}/{get_random_string(string.ascii_letters, 10)}'\n raw_str = f'{{\"bucket\": \"{s3_bucket}\", \"key\": \"{s3_key}\"}}'\n\n # Build the pipeline\n builder = sdc_builder.get_pipeline_builder()\n\n dev_raw_data_source = builder.add_stage('Dev Raw Data Source').set_attributes(data_format='JSON',\n raw_data=raw_str)\n\n s3_executor = builder.add_stage('Amazon S3', type='executor')\n s3_executor.set_attributes(bucket='${record:value(\"/bucket\")}',\n task='CHANGE_EXISTING_OBJECT',\n object='${record:value(\"/key\")}',\n tags=Configuration(property_key='key', company='${record:value(\"/company\")}'))\n\n dev_raw_data_source >> s3_executor\n\n s3_exec_pipeline = builder.build(title='Amazon S3 executor pipeline').configure_for_environment(aws)\n sdc_executor.add_pipeline(s3_exec_pipeline)\n\n # Read snapshot of the pipeline\n snapshot = sdc_executor.capture_snapshot(s3_exec_pipeline, start_pipeline=True).snapshot\n sdc_executor.stop_pipeline(s3_exec_pipeline)\n\n # All records should go to error stream.\n input_records = snapshot[dev_raw_data_source.instance_name].output\n stage = snapshot[s3_executor.instance_name]\n assert len(stage.error_records) == len(input_records)" ]
[ "0.7115106", "0.69014126", "0.67061657", "0.6605487", "0.65491045", "0.6284498", "0.6261592", "0.6202564", "0.61872977", "0.6168269", "0.6154194", "0.6150717", "0.6142914", "0.6121535", "0.60793626", "0.6062802", "0.60626155", "0.6047116", "0.60463965", "0.60335803", "0.6031361", "0.60310966", "0.60062826", "0.5984072", "0.5982252", "0.5970054", "0.5955365", "0.5948528", "0.59483904", "0.594682" ]
0.8074689
0
Tests to update the credentials
def testUpdateCredentials(self): credentials = dict() credentials["username"] = "" credentials["password"] = "" self._factory.updateCredentials(credentials)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_update_user_profile(self):\n\n new_credentials = {'name': 'New Name', 'password': 'NewTestpass12'}\n response = self.client.patch(URL_ME, new_credentials)\n\n # Refresh the details of the user from the database.\n self.user.refresh_from_db()\n\n # Check that the update is successful.\n self.assertEqual(self.user.name, new_credentials['name'])\n self.assertTrue(self.user.check_password(new_credentials['password']))\n self.assertEqual(response.status_code, status.HTTP_200_OK)", "def test_save_creds(self):\n self.new_credentials.save_creds()\n self.assertEqual(len(Credentials.credential_list),1)", "def set_credentials():", "def test_update_password(self):\n\n sync = SyncUserAndGroups(\n tsurl=TS_URL,\n username=TS_USER,\n password=TS_PASSWORD,\n disable_ssl=True,\n )\n auag = UsersAndGroups()\n auag.add_user(\n User(name=\"userx\", mail=\"[email protected]\", display_name=\"User X\", password=\"password1\")\n )\n # sync updates\n sync.sync_users_and_groups(users_and_groups=auag)\n sync.update_user_password(\n userid=\"userx\", currentpassword=TS_PASSWORD, password=\"password2\"\n )", "def test_valid_update_user_password(self):\n\n data = {\n 'password': 'pedro123456',\n 'new_password': 'pedro123456789',\n 'confirm_password': 'pedro123456789'\n }\n response = self.client.put(self.url, data)\n self.assertEqual(response.status_code, status.HTTP_200_OK)", "def setUp(self):\n self.new_credentials = Credentials(\"gmail\", \"Zephon Makale\", \"1234xyz\")", "def setUp(self):\n self.new_credentials = Credentials(\"Facebook\",\"Josphato\",\"jose!!otieno@45\")", "def test_update_user(self):\n pass", "def test_update(sqlite_db):\n updated_pass = \"TheUpdatedPassword\"\n site = \"www.example.com\"\n response = smm.update_passwd(site, updated_pass)\n assert response\n assert smm.read_passwd(site) == updated_pass\n bad_response = smm.update_passwd(\"NotASite\", updated_pass)\n assert not bad_response", "def test_update_profile_valid_put(self):\n update_user = {\n 'email': '[email protected]',\n 'password': 'NewPassword!',\n }\n res = self.client.put(ME_URL, update_user)\n\n self.assertEqual(res.status_code, status.HTTP_200_OK)\n\n self.user.refresh_from_db()\n\n self.assertTrue(self.user.check_password(update_user['password']))\n self.assertEquals(self.user.email, update_user['email'])\n self.assertTrue(self.user.name)", "def setUp(self):\n self.new_credentials = Credentials(\"Instagram\",\"bensongathu\",\"vcxz4321\")", "def test_init(self):\n self.assertEqual(self.new_credentials.account,\"Instagram\")\n self.assertEqual(self.new_credentials.username,\"bensongathu\")\n self.assertEqual(self.new_credentials.password,\"vcxz4321\")", "def test_find_credentials(self):\n self.new_credentials.save_credentials()\n new_account= Credentials(\"Twitter\",\"josephat_otieno\", \"joseotis45\")\n new_account.save_credentials()\n\n found_credential= Credentials.find_credentials(\"Twitter\")\n\n self.assertEqual(found_credential.account_name,new_account.account_name)", "def test_update_profile_success(self):\n payload = {\n 'email': '[email protected]',\n 'password': 'newpassword'\n }\n res = self.client.patch(ME_URL, payload)\n\n # Refresh the user object with latest values from db\n self.user.refresh_from_db()\n\n self.assertEqual(res.status_code, status.HTTP_200_OK)\n self.assertEqual(self.user.email, payload['email'])\n self.assertTrue(self.user.check_password(payload['password']))", "def setUp(self):\n self.new_cred = Credentials('github','Lugaga', 'tangodown!')", "def test_update_user_profile(self):\n payload = {'name': 'Test name', 'password': 'new_password'}\n res = self.client.patch(ME_URL, payload)\n\n self.user.refresh_from_db()\n self.assertEqual(self.user.name, payload['name'])\n self.assertTrue(self.user.check_password(payload['password']))\n self.assertEqual(res.status_code, status.HTTP_200_OK)", "def setUp(self):\n self.credentials = {\n \"username\": \"BobRobert\",\n \"first_name\": \"Bob\",\n \"last_name\": \"Robert\",\n \"email\": \"[email protected]\",\n \"password\": \"fglZfYmr%?,\",\n }", "def test_init(self):\n self.assertEqual(self.new_credential.app_name, \"MySpace\")\n self.assertEqual(self.new_credential.account_name, \"Ghostke99\")\n self.assertEqual(self.new_credential.account_password, \"daimaMkenya001\")", "def test_update_user_profile(self):\n payload = {'name': 'new name', 'password': 'newpassword123'}\n\n res = self.client.patch(ME_URL, payload)\n\n self.user.refresh_from_db()\n self.assertEqual(self.user.name, payload['name'])\n self.assertTrue(self.user.check_password(payload['password']))\n self.assertEqual(res.status_code, status.HTTP_200_OK)", "def test_update_user_profile(self):\n payload = {'name': 'new name', 'password': 'newpassword123'}\n\n response = self.client.patch(ME_URL, payload)\n\n self.user.refresh_from_db()\n self.assertEqual(self.user.name, payload['name'])\n self.assertTrue(self.user.check_password(payload['password']))\n self.assertEqual(response.status_code, status.HTTP_200_OK)", "def test_update(self):\n user = self.custodian_1_user\n user_client = self.custodian_1_client\n urls = [reverse('api:user-detail', kwargs={'pk': user.pk})]\n new_first_name = \"New First Name\"\n data = {\n \"first_name\": new_first_name,\n }\n access = {\n \"forbidden\": [self.anonymous_client, self.readonly_client, self.custodian_2_client],\n \"allowed\": [self.admin_client, user_client]\n }\n\n for client in access['forbidden']:\n for url in urls:\n self.assertIn(\n client.patch(url, data, format='json').status_code,\n [status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN]\n )\n\n for client in access['allowed']:\n for url in urls:\n new_first_name += '1'\n data['first_name'] = new_first_name\n self.assertEqual(\n client.patch(url, data, format='json').status_code,\n status.HTTP_200_OK\n )\n user.refresh_from_db()\n self.assertEqual(user.first_name, new_first_name)", "def test_update_user_profile(self):\n payload = {\"name\": \"Lucifer\", 'password': \"12346987\"}\n res = self.client.patch(ME_URL, payload)\n\n self.user.refresh_from_db()\n\n self.assertEqual(self.user.name, payload['name'])\n self.assertTrue(self.user.check_password(payload['password']))\n self.assertEqual(res.status_code, status.HTTP_200_OK)", "def test_update_profile_valid_patch(self):\n update_fields = {\n 'password': 'NewPassword!',\n 'name': 'Mona Lisa'\n }\n res = self.client.patch(ME_URL, update_fields)\n\n self.assertEqual(res.status_code, status.HTTP_200_OK)\n\n self.user.refresh_from_db()\n\n self.assertTrue(self.user.check_password(update_fields['password']))\n self.assertEquals(self.user.name, update_fields['name'])", "def test_update_user_profile(self):\r\n payload = {\r\n 'name': 'new_name',\r\n 'password': 'password123'\r\n }\r\n\r\n res = self.client.patch(ME_URL, payload)\r\n\r\n self.user.refresh_from_db()\r\n\r\n self.assertEqual(self.user.name, payload['name'])\r\n self.assertTrue(self.user.check_password(payload['password']))\r\n self.assertEqual(res.status_code, status.HTTP_200_OK)", "def test_update_self_fail(self):\n new_user = self.create_user('1')\n url = '/0/chefs/' + str(new_user.pk)\n\n headers = self.login()\n resp = self.client.put(url, **headers)\n self.assertInvalidCredentials(resp)", "def test_update_account_user(self):\n self._require_login()\n\n response = self.client.put('/v1/users/' +str(self.user.id)+'/',\n {\"username\": 'toni@malucao', \"password\": 'cidadeeee'},\n format='json')\n\n self.assertEqual(response.status_code, 200,\n 'Expected Response Code 200, received {0} instead.'.format(response.status_code))", "def test_validate_credentials(self):\n pass", "def test_account_update(self):\r\n params = {\r\n 'name': u'Test Admin'\r\n }\r\n res = self.testapp.post(\r\n str(u\"/api/v1/admin/account?api_key=\" + str(API_KEY)),\r\n content_type='application/json',\r\n params=json.dumps(params),\r\n status=200)\r\n\r\n # make sure we can decode the body\r\n user = json.loads(res.body)\r\n\r\n self.assertEqual(\r\n user['username'], 'admin',\r\n \"Should have a username of admin {0}\".format(user))\r\n self.assertEqual(\r\n user['name'], 'Test Admin',\r\n \"Should have a new name of Test Admin {0}\".format(user))\r\n\r\n self.assertTrue(\r\n 'password' not in user,\r\n \"Should not have a field password {0}\".format(user))\r\n self.assertTrue(\r\n '_password' not in user,\r\n \"Should not have a field password {0}\".format(user))\r\n self.assertTrue(\r\n 'api_key' not in user,\r\n \"Should not have a field password {0}\".format(user))\r\n self._check_cors_headers(res)", "def test_update(self):\n\n\n username,userpass = self.testdata.find_account_for('toolsubmitter')\n\n self.utils.account.login_as(username,userpass)\n\n self.contribtool.update(TOOLNAME,username,userpass)", "def test_password_change_provided(self):\n token = str((jwt.encode(\n {\"email\": \"[email protected]\"}, \n settings.SECRET_KEY)).decode('utf-8')\n )\n self.client.post(self.registration_url, valid_user, format='json')\n response = self.client.patch(\n self.change_password_url+\"?token=\"+token, {\"pwd\": \"bagenda1234\"},\n format='json'\n )\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n self.assertEqual(response.data['errors']\n [0], \"Password field is required.\")" ]
[ "0.7746443", "0.7330875", "0.7330343", "0.7273652", "0.726221", "0.7201267", "0.7166417", "0.7116094", "0.71063346", "0.70503443", "0.7034849", "0.70041025", "0.6960353", "0.69588923", "0.69543844", "0.69487727", "0.6941252", "0.6916871", "0.69129294", "0.69068056", "0.6888339", "0.6883874", "0.6869598", "0.68637484", "0.6859414", "0.68585813", "0.6852854", "0.6849769", "0.6812748", "0.6803697" ]
0.81626886
0
This function is called periodically during autonomous
def AutonomousPeriodic(self): Scheduler.GetInstance().Run()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def autonomousPeriodic(self):\n self.teleopPeriodic()", "def autonomousInit(self):\n #self.timer.reset()\n #self.timer.start()\n pass", "def realtime(self):", "def autonomousPeriodic(self):\n '''\n for i in self.dataSet:\n if i[4][0] < self.timer.get() and self.timer.get() <= i[4][1]:\n self.drive.arcadeDrive(i[0],i[1])\n self.SV1.set(i[3])\n self.sd.putValue(\"Camera\",i[5])\n else:\n self.drive.arcadeDrive(0,0)\n '''\n\n #self.auto = self.sd.getNumber(\"auto\",0)\n #test\n #if(self.auto != 1):\n if self.auto == 6:\n if self.autoState == 0:\n if self.gyro.getAngle() >= -41 and self.gyro.getAngle() <= 0:\n self.drive.arcadeDrive(0.5,-0.4)\n else:\n self.autoState = 1\n self.EC1.reset()\n if self.autoState == 1:\n if self.EC1.getDistance() <= 282 and self.EC1.getDistance() >= 0:\n self.drive.arcadeDrive(0.6,0)\n else:\n self.autoState = 2\n if self.autoState == 2:\n if self.gyro.getAngle() >= -41 and self.gyro.getAngle() <= 0:\n self.drive.arcadeDrive(0.5,0.4)\n else:\n self.autoState = 3\n self.EC1.reset()\n if self.autoState == 3:\n if self.EC1.getDistance() <= 120 and self.EC1.getDistance() >= 0:\n self.drive.arcadeDrive(0,6,0)\n else:\n self.autoState = 4\n if self.autoState == 4:\n if self.EC2.getDistance() <= 831 and self.EC2.getDistance() >= 0: #shoulder\n self.S1.set(-0.25)\n self.S2.set(-0.25)\n else:\n self.autoState = 5\n if self.autoState == 5:\n if self.EC2.getDistance() >= 831 and self.EC2.getDistance() <= 887:\n self.goldenArrowhead.set(False)\n self.S1.set(-0.25)\n self.S2.set(-0.25)\n else:\n self.autoState = 6", "def autonomousInit(self):\n self.globalInit()\n self.autonomous.start()", "def __periodic_maintenance__(self):\n pass", "def run():\r\n autostartup()", "def TeleopPeriodic(self):\n Scheduler.GetInstance().Run()\n LiveWindow.Run()", "def robotPeriodic(self):\n\n wpilib.SmartDashboard.putNumber(\"YawAngle\", self.m_imu.getAngle())\n wpilib.SmartDashboard.putNumber(\n \"XCompAngle\", self.m_imu.getXComplementaryAngle()\n )\n wpilib.SmartDashboard.putNumber(\n \"YCompAngle\", self.m_imu.getYComplementaryAngle()\n )\n self.m_runCal = wpilib.SmartDashboard.getBoolean(\"RunCal\", False)\n self.m_configCal = wpilib.SmartDashboard.getBoolean(\"ConfigCal\", False)\n self.m_reset = wpilib.SmartDashboard.getBoolean(\"Reset\", False)\n self.m_setYawAxis = wpilib.SmartDashboard.getBoolean(\"SetYawAxis\", False)\n self.m_yawSelected = self.m_yawChooser.getSelected()\n\n # Set IMU settings\n if self.m_configCal:\n self.m_imu.configCalTime(ADIS16470CalibrationTime._8s)\n wpilib.SmartDashboard.putBoolean(\"ConfigCal\", False)\n self.m_configCal = False\n\n if self.m_reset:\n self.m_imu.Reset()\n wpilib.SmartDashboard.putBoolean(\"Reset\", False)\n self.m_reset = False\n\n if self.m_runCal:\n self.m_imu.Calibrate()\n wpilib.SmartDashboard.putBoolean(\"RunCal\", False)\n self.m_runCal = False\n\n # Read the desired yaw axis from the dashboard\n if self.m_yawSelected == \"X-Axis\":\n self.m_yawActiveAxis = ADIS16470_IMU.IMUAxis.kX\n elif self.m_yawSelected == \"Y-Axis\":\n self.m_yawActiveAxis = ADIS16470_IMU.IMUAxis.kY\n else:\n self.m_yawActiveAxis = ADIS16470_IMU.IMUAxis.kZ\n\n # Set the desired yaw axis from the dashboard\n if self.m_setYawAxis:\n self.m_imu.SetYawAxis(self.m_yawActiveAxis)\n wpilib.SmartDashboard.putBoolean(\"SetYawAxis\", False)\n self.m_setYawAxis = False", "def everytime(self):\n return True", "def _loop(self):\n while True:\n if GameLoop.getInstance()._cancelation_token==True:\n break\n self._update_signal.notify_all()\n sleep(1/60)", "def teleopPeriodic(self):\n self.drive.arcadeDrive(1, 0)\n self.brushless.set(1)\n self.spark.set(self.joystick.getY())", "def on_tick(self, time):\n pass", "def on_start(self):", "def on_start(self):", "def run(self):\r\n while True:\r\n if self.camera_device.is_detecting():\r\n self.alarm_device.switch_alarm()", "def autonomousInit(self):\n '''\n self.cumulativeTime=0\n self.totalTime=0\n self.dataSet=[[-0.5,0,1,-1.0],[0.3,0.4,1,1.0],[-0.5,0,1,-1.0]]\n for i in self.dataSet:\n self.totalTime+=i[2]\n self.intervals = 0\n self.currentTime = 0\n for i in range(0,len(self.dataSet)):\n self.dataSet[i].append([self.currentTime,self.currentTime+self.dataSet[i][2]])\n self.currentTime+=self.dataSet[i][2]\n for i in self.dataSet:\n if i[3]==1.0:\n i.append(\"Forward\")\n if i[3]==-1.0:\n i.append(\"Backward\")\n \n self.timer.reset()\n self.timer.start()\n '''\n self.timer.reset()\n self.timer.start()\n\n #self.auto = self.chooser.getSelected()\n self.auto = 6\n self.autoState = 0\n #self.auto = 1\n\n self.EC1.reset()\n \n\n #self.auto = self.chooser.getSelected()", "def globalInit(self):\n self.updateodemetry.start()", "def periodicUpdate(self):\n try:\n logging.info(f'{self.cn} periodicUpdate = Start')\n isHaz = JsonSettings.parseJson('settings.json','isHazelcast')\n if self.db.isDb():\n self.insertStats()\n self.insertPorts()\n if isHaz:\n self.insertHaz() \n else:\n self.db.initDb()\n self.insertSys()\n self.insertStats()\n self.insertPorts()\n if isHaz:\n self.insertHaz() \n except Exception as e:\n logging.critical(f'{self.cn} Exception: {e}')\n logging.critical(f'{self.cn} StackTrace: \\n', exc_info=1)\n finally:\n logging.info(f'{self.cn} periodicUpdate = End')", "def sync_start(self):", "def on_timer(self):\n self.read_serial_data()\n # self.update_monitor()", "def on_run(self):\r\n\r\n\t\tpass", "def service( self ):\n\n self.alive = time.time()", "def __auto_mode(self):\n while True:\n # establish connection\n while True:\n if self.android_api.is_connect():\n break\n self.android_api.init_bluetooth()\n time.sleep(0.05)\n\n\n if self.android_api.is_map_empty():\n if self.production:\n # self.print_msg(\"Waiting for map update\")\n time.sleep(0.05)\n continue\n else:\n self.__test_run_pipeline_style()\n else:\n self.print_msg(\"Updating map\")\n self.android_api.map_pop_n_exe()\n time.sleep(0.05)", "def monitor(self):\n if self.startup():\n time.sleep(0.250)\n self.run()", "def testPeriodic(self):\n wpilib.LiveWindow.run()", "def loop(self):\n pass", "def _idle(self):\n # self._purge_timedout()\n # ...", "def refresh(self):\n\n self._refreshed_on = time.time() * 1000", "def TestPeriodic(self):\n LiveWindow.Run()" ]
[ "0.76939803", "0.7333295", "0.7000954", "0.6831967", "0.6753689", "0.67142034", "0.6691596", "0.6546741", "0.65020126", "0.6491816", "0.6403982", "0.63931847", "0.63259196", "0.631717", "0.631717", "0.63160294", "0.6300988", "0.62993395", "0.62885785", "0.6281241", "0.6273936", "0.62721556", "0.62640697", "0.62419796", "0.6238892", "0.6232729", "0.6222074", "0.6194677", "0.61881393", "0.61715096" ]
0.78060895
0
This function is called periodically during test mode
def TestPeriodic(self): LiveWindow.Run()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def testRefresh(self):\n \n pass", "def test_run_started(self):", "def testPeriodic(self):\n wpilib.LiveWindow.run()", "def test_heartbeat(self):\n pass", "def trial(self):\n pass", "def startTestRun(self):", "def test_run_ended(self):", "def everytime(self):\n return True", "def runtest(self):", "def on_test_begin(self, logs=None):", "def test(self):\n return test_throttle_method()", "def run(self):\n self.speed_test.start()", "def startTest(self, test):\n self._timer = time()", "def test_sleep():\n time.sleep(3600 * 24)", "def test_issue_reset_time(self):\n pass", "def _test(self):\n pass", "def _test(self):\n pass", "def _test(self):\n pass", "def runTest(self):\n return True", "def startTestHook(self):", "def runTests(self):\n \n pass", "def __init__(self):\n sleep(10)", "def timer_setup(self):\n pass", "def postRun(self):\n pass", "def monitor(self):", "def on_test_end(self, logs=None):", "def test_time(self):\r\n pass", "def work(self):\n time.sleep(random.randint(0, 200) / 100)\n pass", "def setUp(self):\n self.t = Timew()", "def setUp(self):\n self.t = Timew()" ]
[ "0.75196457", "0.7250264", "0.7207904", "0.7137617", "0.7080581", "0.6928592", "0.6900251", "0.6891117", "0.68818337", "0.67345786", "0.66311336", "0.66012233", "0.65951467", "0.6491059", "0.647534", "0.6475096", "0.6475096", "0.6475096", "0.647335", "0.64714676", "0.6459485", "0.64253366", "0.6424397", "0.6404042", "0.6382268", "0.6367219", "0.63668734", "0.63466614", "0.6320069", "0.6320069" ]
0.7506525
1
Run the Opsy server.
def run(script_info): app = script_info.load_app() server = create_server(app) try: host = app.config.opsy['server']['host'] port = app.config.opsy['server']['port'] proto = 'https' if server.ssl_adapter else 'http' app.logger.info(f'Starting Opsy server at {proto}://{host}:{port}/...') app.logger.info(f'API docs available at {proto}://{host}:{port}/docs/') server.start() except KeyboardInterrupt: app.logger.info('Stopping Opsy server...') finally: server.stop()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def run(self):\n\t\t\n\t\tself.connect(self.config[\"server\"])", "def main():\n return run_server(**parse_server_args())", "def run(self):\n self._server = self._get_server()\n self._server.serve_forever()", "def main():\n docopt = docoptinit(__doc__)\n logging.basicConfig(level=logging.INFO,\n format='[%(asctime)s] [%(levelname)s] [ %(filename)s:%(lineno)s - %(name)s ] %(message)s ')\n logging.info('basic config')\n # qb.set_logger(__file__, debug=docopt['--debug'])\n host = docopt['--host']\n port = int(docopt['--port'])\n if not (1 <= port <= 65535):\n raise Exception('port must be 1-65535')\n\n global verbose\n verbose = int(docopt['--verbose'])\n loop = asyncio.get_event_loop()\n try:\n loop.run_until_complete(start_warp_server(host, port))\n loop.run_forever()\n except OSError:\n pass\n except KeyboardInterrupt:\n print('bye')\n finally:\n loop.close()", "def run_server(self, _):\n if not ENABLE_SERVER:\n logger.info('server not enabled, exit')\n return\n app.run(host=API_HOST, port=API_PORT, threaded=API_THREADED)", "def run(self):\n self.__server.serve_forever()", "def main():\n\n apps = [\n 'fires', 'hw6',\n 'imageapp',\n 'quixote_demo',\n 'quotes',\n 'chat',\n 'cookie'\n ]\n parser = argparse.ArgumentParser(\n description='A WSGI Server implemented for CSE491-001.',\n epilog='Please check the non-existent documentation for more info.',\n formatter_class=argparse.RawTextHelpFormatter\n )\n # Add the '-?' alias for '--help', which I prefer to use:\n parser.add_argument('-?',\n action='help',\n help='Alias for --help')\n # Add the application argument:\n parser.add_argument('--app',\n nargs='?',\n dest='app',\n default='fires',\n choices=apps,\n help='\\n'.join([\n 'Which WSGI application to run.',\n '(default: \"%(default)s\" - my homework 6)',\n 'Alias: -A'\n ]))\n parser.add_argument('-A',\n nargs='?',\n dest='app',\n default='fires',\n choices=apps,\n help=argparse.SUPPRESS)\n # Add the port argument:\n parser.add_argument('--port',\n nargs='?',\n default=random.randint(8000, 9999),\n type=int,\n help='\\n'.join([\n 'Which port to start the server on.',\n '(default: random integer between 8000 and 9999)',\n 'Alias: -p'\n ]))\n # After that, parse the command-line arguments.\n args = parser.parse_args()\n\n # Create a socket object\n sock = socket.socket()\n # Get local machine name\n host = socket.getfqdn()\n\n if host in ('magrathea', 'Thoth'):\n # For testing, I don't want to have to change my url all the damn time.\n port = 8080\n else:\n port = args.port\n # Bind to the port\n # TODO figure out how to immediately unbind when I'm done\n sock.bind((host, port))\n print 'Starting server at http://%s:%d/' % (host, port)\n # Now wait for client connection.\n sock.listen(5)\n\n # get this from commandline\n app_to_run = args.app\n if app_to_run == 'quixote_demo':\n # quixote stuff for testing with that\n p = create_publisher()\n # p.is_thread_safe = True # hack...\n wsgi_app = quixote.get_wsgi_app()\n elif app_to_run == 'imageapp':\n imageapp.setup()\n p = imageapp.create_publisher()\n wsgi_app = quixote.get_wsgi_app()\n elif app_to_run == 'quotes':\n wsgi_app = QuotesApp('./quotes/quotes.txt', './quotes/html')\n elif app_to_run == 'chat':\n wsgi_app = ChatApp('./chat/html')\n elif app_to_run == 'cookie':\n wsgi_app = cookieapp.wsgi_app\n else: #if app_to_run == 'fires': # default\n wsgi_app = app.make_app()\n\n\n print 'Entering infinite loop; hit CTRL-C to exit'\n try:\n while True:\n # Establish connection with client.\n conn, (client_host, client_port) = sock.accept()\n print 'Got connection from', client_host, client_port\n handle_connection(conn, wsgi_app)\n finally:\n # teardown stuffs\n if app_to_run == 'imageapp':\n imageapp.teardown()\n sock.shutdown(2)\n sock.close()", "def main():\n s = start_server()\n accept_connection(s)", "def run():\n import argparse\n\n parser = argparse.ArgumentParser(description='Phovea Server')\n parser.add_argument('--use_reloader', action='store_true', help='whether to automatically reload the server')\n parser.add_argument('--env', default=cc.get('env'), help='environment mode (dev or prod)')\n\n # parse before to enable correct plugin discovery\n args = parser.parse_known_args()[0]\n if args.env.startswith('dev'):\n enable_dev_mode()\n else:\n enable_prod_mode()\n\n # resolve the default command to decide which application to launch\n default_command = _resolve_commands(parser)\n if default_command is not None:\n # set a default subparse to extract the defined arguments from the instance to the main arguments (?)\n set_default_subparser(parser, default_command)\n\n args = parser.parse_args()\n\n _set_runtime_infos(args)\n\n main = args.launcher(args) # execute the launcher function, which returns another function\n\n if args.use_reloader:\n _log.info('start application using reloader...')\n run_with_reloader(main, extra_files=_config_files())\n else:\n _log.info('start application...')\n main()", "def run_server():\n app = init_app()\n app.run(host=app.config['HOST'], port=app.config['PORT'])", "def server_cli():\n\n import argparse\n\n parser = argparse.ArgumentParser(\n description=\"The free and open-source paste bin and trash can \"\n \"for your stuff.\")\n parser.add_argument('--host', help='Host to listen on')\n parser.add_argument('--port', type=int, help='Port to listen on')\n parser.add_argument('--debug', help='Activate debug mode',\n action='store_true')\n args = parser.parse_args()\n\n app = create_app()\n\n print \" * Starting bepasty server...\"\n app.run(\n host=args.host,\n port=args.port,\n debug=args.debug\n )", "def main(self):\n self.parse_option()\n self.set_option()\n\n r = Bootscripts()\n reactor.listenTCP(8009, server.Site(r))\n reactor.run()", "def run():\r\n log.debug('Starter::run()')\r\n try:\r\n # check specified port\r\n if not conf.port:\r\n raise Exception(\"Please specify port number! (use --port)\")\r\n Server(conf.port).run()\r\n except Exception as E:\r\n log.critical(E)", "def main():\n parser = create_arg_parser()\n\n # If script run without arguments, print syntax\n if len(sys.argv) == 1:\n parser.print_help()\n sys.exit(1)\n\n # Parse arguments\n args = parser.parse_args()\n host = args.h\n mode = args.m\n port = args.p\n debug_mode = args.debug\n\n # Run server with user-given arguments\n run_server(host, port, mode, debug_mode)", "def start_server(self):\n app.run(host=str(self.__constants.host),\n port=int(self.__constants.port),\n debug=bool(self.__constants.runindebug))", "def main():\n print(\"Starting python server...\")\n\n # Set address to localhost\n address = \"tcp://127.0.0.1:\" + parse_port()\n\n # Start server with class API as \n server = zerorpc.Server(API.API())\n server.bind(address)\n\n print(\"Server started running on {}\".format(address))\n\n # Blocking command. Keeps server running\n server.run()", "def runserver():\n\tapp.run(host = '0.0.0.0', port = 5000)", "def main():\n lgs = LifeGenServer()\n lgs.listening()", "def main():\n tornado.options.parse_command_line()\n ioloop = tornado.ioloop.IOLoop.instance()\n http_server = tornado.httpserver.HTTPServer(App())\n http_server.listen(options.port)\n tornado.autoreload.start()\n ioloop.start()", "def __run_server(self):\n os.chdir(os.path.dirname(self.server_path))\n self.server_process = subprocess.Popen([self.server_path, \\\n \"{}:{}\".format(self.args.ip, self.args.port)])", "def run():\n app = Application()\n #app.sentry_client = AsyncSentryClient(app.settings['sentry_url'])\n http_server = HTTPServer(app, xheaders=True)\n http_server.listen(options.port)\n print('Running on port %d' % options.port)", "def Run(self):\n self.BuildWebAppSite()\n\n self.BuildRPCSite(self.env.umpire_cli_port, self.methods_for_cli, '0.0.0.0')\n self.BuildRPCSite(self.env.umpire_rpc_port, self.methods_for_dut)\n\n # Start services.\n reactor.callWhenRunning(self.OnStart)\n # And start reactor loop.\n reactor.run()", "def runserver():\n app.run(host=config.HOST, port=config.PORT, debug=config.DEBUG, threaded=config.THREADED)", "def main() -> None:\n try:\n # ServerManager expects cwd to be the server dir (containing\n # dist/, config.yaml, etc.)\n # Let's change our working directory to the location of this file\n # so we can run this script from anywhere and it'll work.\n os.chdir(os.path.abspath(os.path.dirname(__file__)))\n\n ServerManagerApp().run_interactive()\n except CleanError as exc:\n # For clean errors, do a simple print and fail; no tracebacks/etc.\n exc.pretty_print()\n sys.exit(1)", "def start(self):\n self.serve_forever()", "def start(self):\n self.serve_forever()", "def server():", "def server():", "def run(self):\n self.__rpc_server.run()", "def run():\n\n # Construct a server.\n server = wsgiref.simple_server.make_server(\n _config[ 'address' ],\n _config[ 'port' ],\n application\n )\n\n # Run the server.\n server.serve_forever()\n\n # Return result.\n return 0" ]
[ "0.6982762", "0.6922084", "0.68232703", "0.67431873", "0.6727341", "0.6725", "0.67038316", "0.667355", "0.66723275", "0.66492367", "0.6638128", "0.6635666", "0.66108435", "0.6598311", "0.65349036", "0.65341073", "0.6523984", "0.6489315", "0.6486966", "0.64636195", "0.64170146", "0.6416951", "0.64005536", "0.63736963", "0.6370866", "0.6370866", "0.63674515", "0.63674515", "0.63476384", "0.63420844" ]
0.7912345
0
List all permissions the app is aware of.
def permission_list(**kwargs): print(AppPermissionSchema(many=True).dumps( get_protected_routes(ignored_methods=["HEAD", "OPTIONS"]), indent=4))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def list_permissions(self):\n # type: () -> List[Permission]\n headers = Headers({\"accept\": \"application/json\"})\n return self.connection.api_call(\n \"GET\", [\"resources\", self.id, \"permissions\"], model=Permission, headers=headers,\n )", "def get_all_permissions(self, obj=None):", "def permissions(self):\n return list(self._permissions)", "def permissions(self):\n return self.get_permissions()", "def permissions(self) -> 'outputs.PermissionsResponse':\n return pulumi.get(self, \"permissions\")", "def get_all_permissions(self):\n\t\turl = f'{self.root.url}/api/v1/sessions/permissions'\n\t\treturn self.root.r('GET', url, body=None, headers=None, verify=self.root.verify)", "def permissions(self):\n return self._permissions", "def get_permissions(self):\n return self.settings[\"permissions\"]", "def get_all_permissions(self, obj=None):\n return self.get_group_permissions(obj)", "def permissions(self):\n return self.proto.details.appDetails.permission", "def get_permissions(self):\n if not hasattr(self, '_permissions'):\n self._permissions = self.permissions.all()\n return self._permissions", "def get_permissions(self):\n permissions = [IsAdminUser]\n return [permission() for permission in permissions]", "def all_perms(self, id, **kwargs):\r\n p = self.db.auth_permission\r\n if self.all_permissions:\r\n ret = self.sql(\r\n (p.record_id == id) & (p.table_name == self.table._tablename) & p.name.belongs(self.all_permissions),\r\n p.name, p.group_id,\r\n orderby=p.group_id)\r\n else:\r\n ret = []\r\n current.response.text = ret\r\n return ret", "def get_permissions(self):\n permissions = [IsAuthenticated]\n return [permission() for permission in permissions]", "def get_permissions(self):\n if self.action in ['signup', 'login']:\n permissions = [AllowAny]\n return [permission() for permission in permissions]", "def get_permissions(self):\n return [permission() for permission in self.permission_classes]", "def permissions(self):\n return [DSSWorkspacePermissionItem(permission) for permission in self.settings['permissions']]", "def get_permissions(self):\n if self.action in ['signup', 'login']:\n permissions = [AllowAny]\n elif self.action in ['retrieve']:\n permissions = [IsAuthenticated, IsAccountOwner]\n else:\n permissions = [AllowAny]\n return [p() for p in permissions]", "def permissions(self):\n return None", "def getAllPerms(self,request):\n request.needAuthType(request.ADMIN)\n request.getAuthNameObj().canDo(\"CHANGE ADMIN PERMISSIONS\")\n all_perms_dic=perm_loader.getLoader().getAllPerms()\n if request.has_key(\"category\"):\n category=request[\"category\"]\n else:\n category=\"all\"\n all_perms_list=self.__getPermsListFromPerms(all_perms_dic,category)\n sorted=SortedList(all_perms_list)\n sorted.sortByPostText('[\"name\"]',0)\n return sorted.getList()", "def getPermissions(self, scope):\n\n return [permissions.api_enum_for_permission(p)\n for p in permissions.get_permissions(scope)]", "def get_permissions(self):\n if self.action in ['list', 'retrieve']:\n permission_classes = [IsAuthenticated]\n else:\n permission_classes = [IsAdminUser]\n return [permission() for permission in permission_classes]", "def get_permissions(self):\n if self.action == 'list':\n permission_classes = [IsAuthenticated]\n else:\n permission_classes = [IsAdminUser]\n return [permission() for permission in permission_classes]", "def permission_resources(self):\n return self._permission_resources", "def permission_resources(self):\n return self._permission_resources", "def get_permissions(self):\n if self.action in ['signup', 'login', 'verify']:\n permissions = [AllowAny]\n elif self.action in ['retrieve', 'update', 'partial_update', 'destroy', 'u', 'p']:\n permissions = [IsAuthenticated, IsAccountOwner]\n else:\n permissions = [IsAuthenticated]\n return [p() for p in permissions]", "def permissions(self) -> str:\n return pulumi.get(self, \"permissions\")", "def get_all_permissions(self) -> set[tuple[str, str]]:\n return set(\n self.appbuilder.get_session.execute(\n select(self.action_model.name, self.resource_model.name)\n .join(self.permission_model.action)\n .join(self.permission_model.resource)\n )\n )", "def get_permissions(self):\n if self.action in ['list', 'create']:\n permission_classes = [IsStaffOrReadOnly]\n else:\n permission_classes = [IsAuthorOrReadOnly, IsStaffOrReadOnly]\n return [permission() for permission in permission_classes]", "def get_permissions(self):\n\t\treturn call_sdk_function('PrlFsEntry_GetPermissions', self.handle)" ]
[ "0.82634306", "0.787807", "0.7769037", "0.76802576", "0.76248133", "0.75797546", "0.73929065", "0.7326292", "0.7316874", "0.7312243", "0.72730917", "0.7266721", "0.7251582", "0.72510326", "0.7225519", "0.72052515", "0.72032905", "0.7176814", "0.714984", "0.71372634", "0.7125943", "0.71183753", "0.708654", "0.7076597", "0.7076597", "0.70489633", "0.7029838", "0.70232534", "0.70084465", "0.7008013" ]
0.826862
0
Write a value into the buffer for the given length. This is more efficient then creating and writing an array of a single value.
def write_value(self, value, length, error=True, move_start=True): if not error and length > self.maxsize: length = self.maxsize idxs = self.get_indexes(self._end, length, self.maxsize) self.move_end(length, error, move_start) self._data[idxs] = value
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def write(self, value: int):\n self.data[self.pointer] = value", "def write(self, value: int, /) -> None:", "def Write(self, offset, value):\r\n addr = offset * 4 \r\n self.write(addr, value)", "def write(self, address: int, value: bytearray):\n for i, val in enumerate(value):\n self.mem[address+i] = value", "def write(self, addr, value, index=None):\n\n addr_idx = self.index_addr(addr, index)\n self.mem.write(addr_idx, value)", "def write(writer: BitStreamWriter, value: int) -> None:\n\n writer.writeVarSize(value)", "def write_binary(self,value):\n self.write_uint32(len(value))\n self.data.extend(value)", "def write(self, b):\n if not self._writable:\n raise UnsupportedOperation(\"write\")\n\n size = len(b)\n b_view = memoryview(b)\n size_left = size\n buffer_size = self._buffer_size\n max_buffers = self._max_buffers\n\n with self._seek_lock:\n end = self._buffer_seek\n buffer_view = memoryview(self._write_buffer)\n\n while size_left > 0:\n start = end\n end = start + size_left\n\n if end > buffer_size:\n end = buffer_size\n flush = True\n else:\n flush = False\n\n buffer_range = end - start\n\n b_start = size - size_left\n size_left -= buffer_range\n\n buffer_view[start:end] = b_view[b_start : b_start + buffer_range]\n\n if flush:\n self._buffer_seek = end\n self._seek += 1\n\n if max_buffers:\n futures = self._write_futures\n flush_wait = self._FLUSH_WAIT\n while (\n sum(1 for future in futures if not future.done())\n >= max_buffers\n ):\n sleep(flush_wait)\n\n with handle_os_exceptions():\n self._flush()\n\n self._write_buffer = bytearray(buffer_size)\n buffer_view = memoryview(self._write_buffer)\n end = 0\n\n self._buffer_seek = end\n return size", "def _write(self, data, length, error, move_start=True):\n idxs = self.get_indexes(self._end, length, self.maxsize)\n self.move_end(length, error, move_start)\n self._data[idxs] = data", "def write(value):\n return value", "def writeString(self, value: str):\n length = len(value)\n\n self._pack('i', length)\n self._pack('{:d}s'.format(length), value)", "def push(self, value):\n # Add value, then increment pointer (and size if necessary)\n self.buf[self.ptr] = np.array(value)\n self.ptr = (self.ptr + 1) % self.length\n if self._size < self.length:\n self._size += 1", "def write_zeros(self, length, error=True, move_start=True):\n self.write_value(0, length, error=error, move_start=move_start)", "def write(self, val: int, idx: Optional[int] = None) -> None:\n if not idx:\n idx = self.ip\n\n self.memory[idx] = val", "def write(self, data):\n self.buffer.write(data)\n self.offset += len(data)", "def write(self, value):\r\n self.__output__.write(value)", "def write_value(self, value):\n raise NotImplementedError", "def _writeByte(self, val):\n self.__writeValue(self.byteFormat, val)", "def __writeValue(self, valFormat, val):\n self._messageBuf.extend(pack(valFormat, val))", "def write(writer: BitStreamWriter, value: BitBuffer) -> None:\n\n writer.writeBitBuffer(value)", "def write(writer: BitStreamWriter, value: int) -> None:\n\n writer.writeVarUInt64(value)", "def write(writer: BitStreamWriter, value: int) -> None:\n\n writer.writeVarInt64(value)", "def write(writer: BitStreamWriter, value: typing.Any) -> None:\n\n value.write(writer)", "def write( self, value ): # uint_8\n\t\tif (type(value) is bytearray) or (type(value) is bytes):\n\t\t\tfor data in value:\n\t\t\t\tself.send( data, LCD_RS )\n\t\telse:\n\t\t\tself.send(value, LCD_RS )", "def write(self, value):\n return value", "def write(self, value):\n return value", "def write(self, value):\n return value", "def write(self, new_value):\n self.write_value = new_value", "def _write(self, v, w):\n if self.overwrite_mode:\n if w > 0.5:\n self.memory[self.head_pos] = np.copy(v)\n if self.history is not None:\n self.history[\"adds\"][-1] = self._read()\n else:\n if self.history is not None:\n self.history[\"adds\"][-1] = (w * (v - self._read()))\n self.memory[self.head_pos] = (1 - w) * self._read() + v * w", "def write(writer: BitStreamWriter, value: int) -> None:\n\n writer.writeVarUInt(value)" ]
[ "0.65248704", "0.64803195", "0.6296469", "0.62879", "0.61605716", "0.609862", "0.60918474", "0.60880065", "0.6039514", "0.6036492", "0.60341746", "0.60159934", "0.59341705", "0.59053075", "0.584951", "0.58398277", "0.581167", "0.5808708", "0.58013177", "0.57925296", "0.57789797", "0.5758296", "0.5756961", "0.5728029", "0.5723022", "0.5723022", "0.5723022", "0.5716369", "0.56526554", "0.5651065" ]
0.74076855
0
Write zeros into the buffer for the specified length.
def write_zeros(self, length, error=True, move_start=True): self.write_value(0, length, error=error, move_start=move_start)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def write_nul_bytes(self, n):\n self.write(b'\\x00' * n)", "def prepend_zeros(data: bytes, length: int):\n print(\"prepend \" + str(length))\n return length * b\"0\" + data", "def append_zeros(input_signal, length=None):\n if length is None:\n length = 2 ** int(math.ceil(math.log(len(input_signal), 2)))\n zeros = length - len(input_signal)\n result = sumpf.Signal(channels=tuple([c + (0.0,) * zeros for c in input_signal.GetChannels()]),\n samplingrate=input_signal.GetSamplingRate(),\n labels=input_signal.GetLabels())\n return result", "def reset(self):\n self._buffer.fill(0)", "def zero_pad(data):\n N = len(data)\n pow_2 = np.ceil(np.log2(N))\n return np.pad(data,(0,int((2**pow_2)-N)),'constant')", "def clear_buffer(self):\n for i, value in enumerate(self.buffer):\n self.buffer[i] = 0", "def write(self, len, buf):\n ret = libxml2mod.xmlOutputBufferWrite(self._o, len, buf)\n return ret", "def write(self, data):\n self.buffer.write(data)\n self.offset += len(data)", "def flush(self) -> None:\n super().put(self.buffer)\n self.buffer = np.ndarray((0, 1), dtype=np.int16)", "def zeros(shape, dtype=None):\n raise NotImplementedError", "def zeros(shape, dtype=None):\n\n return full(shape, 0, dtype)", "def clear(self):\n self.length = 0", "def _write(self, data, length, error, move_start=True):\n idxs = self.get_indexes(self._end, length, self.maxsize)\n self.move_end(length, error, move_start)\n self._data[idxs] = data", "def write(self, data):\n return 0", "def get_zero_buffer(for_data, datatype):\n global zero_data_offset\n global bss_buffer\n global buffer_index\n\n RADIX_MAX = 5 # TODO: Make parameterized\n\n width = for_data.shape[2]\n channels = for_data.shape[0]\n pad = RADIX_MAX // 2 * (width + 1) * (channels) * dtype_size(datatype)\n buffer_size = len(for_data.flatten()) * dtype_size(datatype) + 2 * pad\n\n (buffer_size, for_data) = align(buffer_size, for_data, 64)\n\n bss_buffer.append(for_data)\n zero_data_offset += buffer_size\n buffer_index += 1\n if zero_data_offset - buffer_size + pad + buffer_index > 100 * 1024 * 1024:\n throw_error(ErrorTable.NoResources)\n\n return zero_data_offset - buffer_size + pad, buffer_index", "def clear(self):\n self.buf = np.zeros((self.length, self.dim))\n self.ptr = 0\n self._size = 0", "def _no_length_test(self, factory):\n output_buffer = factory(5)\n\n server, client = loopback()\n server.send(b\"xy\")\n\n assert client.recv_into(output_buffer) == 2\n assert output_buffer == bytearray(b\"xy\\x00\\x00\\x00\")", "def zeros(shape: any,\n dtype: any = float,\n order: {'C', 'F'} = 'C',\n *,\n alignment: int = 16,\n **kwargs):\n return empty(shape=shape,\n dtype=dtype,\n order=order,\n alignment=alignment,\n __gen__=np.zeros)", "def test_zero_pad():\r\n # Freely assume that time is the last dimension:\r\n ts1 = np.empty((64, 64, 35, 32))\r\n NFFT = 64 \r\n zp1 = utils.zero_pad(ts1, NFFT)\r\n npt.assert_equal(zp1.shape[-1], NFFT)\r\n\r\n # Try this with something with only 1 dimension:\r\n ts2 = np.empty(64)\r\n zp2 = utils.zero_pad(ts2, NFFT)\r\n npt.assert_equal(zp2.shape[-1], NFFT)", "def test_memoryview_no_length(self):\n self._no_length_test(_make_memoryview)", "def zero(*_, **__) -> None:\n return", "def flush(self):\n data = self._wbuf.getvalue()\n if data:\n self._write(data)\n self._len = 0\n self._wbuf = BytesIO()", "def write(self, buf: bytes, /) -> Optional[int]:", "def write(self, buf: bytes, /) -> Optional[int]:", "def write(self, buf: bytes, /) -> Optional[int]:", "def length(self, length):\n\n self._length = length", "def write(self, buf: AnyReadableBuf, /) -> int:", "def set_length(self, length):\n if length < 0:\n raise AttributeError('length should be positive')\n self.progress_char_length = length", "def on_write_needed(self, nbytes, underflow):", "def test_op_zero_int(self):\n\n device = pymic.devices[0]\n stream = device.get_default_stream()\n a = numpy.arange(1, 4711 * 1024, dtype=int)\n offl_a = stream.bind(a)\n offl_a.zero()\n offl_a.update_host()\n stream.sync()\n self.assertEqual(sum(a), 0,\n \"Array should be all zeros.\")" ]
[ "0.6283033", "0.62015116", "0.5955452", "0.5634417", "0.5614875", "0.56004006", "0.5534813", "0.55236316", "0.5512742", "0.5460759", "0.5379353", "0.5364001", "0.52845514", "0.52615494", "0.52562517", "0.51941323", "0.517467", "0.51620394", "0.5158084", "0.51518273", "0.5140381", "0.5123512", "0.5112596", "0.5112596", "0.5112596", "0.5105404", "0.5100962", "0.50828034", "0.50803965", "0.5075414" ]
0.7804039
0
Read the data and move the start/read pointer, so that data is not read again. This method reads empty if the amount specified is greater than the amount in the buffer.
def read(self, amount=None): if amount is None: amount = self._length # Check available read size if amount == 0 or amount > self._length: return self._data[0:0].copy() idxs = self.get_indexes(self._start, amount, self.maxsize) self.move_start(amount) return self._data[idxs].copy()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def read_remaining(self, amount=None):\n if amount is None or amount > self._length:\n amount = self._length\n\n # Check available read size\n if amount == 0:\n return self._data[0:0].copy()\n\n idxs = self.get_indexes(self._start, amount, self.maxsize)\n self.move_start(amount)\n return self._data[idxs].copy()", "def _readData(self):\n # Debug. This fn should be called only after checking canRead()\n if not self._canRead():\n raise Exception(\"Trying to read more data than there is.\")\n\n data = self.buffer[:self._expectedByteCount]\n self.buffer = self.buffer[self._expectedByteCount:]\n\n return data", "def move_start(self, amount, error=True, limit_amount=True):\n if amount == 0:\n return\n elif amount > self._length:\n if error:\n raise UnderflowError(\"Not enough data in the buffer \" + repr(self))\n\n if limit_amount:\n # You cannot read more than what you have\n amount = self._length\n # end error\n\n stop = self._start + amount\n try:\n self._start = stop % self.maxsize\n except ZeroDivisionError:\n self._start = stop\n\n self.sync_length(False or amount < 0) # Length grows if amount was negative.", "def read_until_size(self, size):\n if not size:\n do_return(b'')\n with self.reading:\n while len(self.read_buffer) < size:\n self.read_buffer.enqueue((yield self.base.read(self.bufsize)))\n do_return(self.read_buffer.dequeue(size))", "def _read_bytes(self, start, count): # type: (int) -> bytes\n bytes_data = self._buffer[start:start + count]\n\n if len(bytes_data) != count:\n raise ASN1WantMore('Premature end of input.')\n\n return bytes_data", "def read(self, size: int = -1) -> bytes:\n if self.size_read >= self.chunksize:\n return b''\n if size < 0:\n size = self.chunksize - self.size_read\n if size > self.chunksize - self.size_read:\n size = self.chunksize - self.size_read\n data = self.file.read(size)\n self.size_read = self.size_read + len(data)\n if self.size_read == self.chunksize and (self.chunksize & 1):\n dummy = self.file.read(1)\n self.size_read = self.size_read + len(dummy)\n return data", "def get(self, amount, offset=0):\n min_buff_size = offset + amount\n\n while len(self.buff) < min_buff_size:\n\n if len(self.chunkbuffer) <= 0:\n return None\n\n chunk = self.chunkbuffer.pop()\n\n self.buff.extend(chunk)\n\n data = self.buff[offset:offset + amount]\n\n self.autocommit_amount = offset + amount\n\n return data", "def read(self, size=-1):\n\n if size < 0:\n raise NotImplementedError(\"Don't be greedy, that could be massive!\")\n elif size == 0:\n if self._text:\n return \"\"\n else:\n return b\"\"\n elif self._within_block_offset + size <= len(self._buffer):\n # This may leave us right at the end of a block\n # (lazy loading, don't load the next block unless we have too)\n data = self._buffer[self._within_block_offset:self._within_block_offset + size]\n self._within_block_offset += size\n assert data # Must be at least 1 byte\n return data\n else:\n # if read data overflows to next block\n # pull in rest of data in current block\n data = self._buffer[self._within_block_offset:]\n\n # decrement size so that we only pull the rest of the data\n # from next block\n size -= len(data)\n self._load_block() # will reset offsets\n\n if not self._buffer:\n return data # EOF\n\n # if there is still more to read\n elif size:\n # pull rest of data from next block\n return data + self.read(size)\n else:\n # Only needed the end of the last block\n return data", "def read(self, size=1):\n assert self.count < len(self.data)\n data = self.data[self.count]\n self.count += 1\n return data", "def _get_data(self):\n while not (self.closed or self.ended):\n block = self.stream.read(8192)\n with self.lock:\n if len(block) == 0:\n self.ended = True\n else:\n self.data.append(block)\n self.available.set()", "def read(self, size=-1):\n if not self._buf:\n self._buf.append(next(self._iter, b''))\n if len(self._buf[0]) < size or size < 0:\n return self._buf.pop(0)\n block = self._buf.pop(0)\n self._buf.insert(0, block[size:])\n return block[:size]", "def read_bytes(self, number_of_bytes):\n\n self.index = -1\n data = self.buf[self.offset:self.offset + number_of_bytes]\n self.offset += number_of_bytes\n\n return data", "def read(self, size=-1):\n ...", "def read(self, *args, **kwargs):\r\n buf = io.BufferedReader.read(self, *args, **kwargs)\r\n self.increment(len(buf))\r\n return buf", "def _buffer_to(self, amount):\n if amount > self.lookahead:\n raise Exception(\n 'Cannot extend buffer to {}: '\n 'beyond buffer lookahead {}'.format(\n amount, self.lookahead\n )\n )\n while len(self.buffer) < amount:\n try:\n self.buffer.appendleft(next(self.stream))\n except StopIteration:\n break", "def _read_amt(self, byte_count):\n full_msg = bytearray()\n while len(full_msg) < byte_count:\n block = self.request.recv(byte_count - len(full_msg))\n full_msg.extend(block)\n return full_msg", "def read(self, num_bytes_to_read):\n pass", "def get_raw_data(self, ptr, unbuffered=False):\n if unbuffered:\n self._data_pos = None\n # round down ptr to a 'block boundary'\n idx = ptr - (ptr % 0x20)\n ptr -= idx\n count = reading_len[self.data_format]\n if self._data_pos == idx:\n # cache contains useful data\n result = self._data_block[ptr:ptr + count]\n if len(result) >= count:\n return result\n else:\n result = list()\n if ptr + count > 0x20:\n # need part of next block, which may be in cache\n if self._data_pos != idx + 0x20:\n self._data_pos = idx + 0x20\n self._data_block = self._read_block(self._data_pos)\n result += self._data_block[0:ptr + count - 0x20]\n if len(result) >= count:\n return result\n # read current block\n self._data_pos = idx\n self._data_block = self._read_block(self._data_pos)\n result = self._data_block[ptr:ptr + count] + result\n return result", "def __reader(self):\n empty = bytes()\n\n try:\n while not self._wantExit:\n # logging.debug(\"reading character\")\n b = self._readBytes(1)\n # logging.debug(\"In reader loop\")\n if len(b) > 0:\n # logging.debug(f\"read returned {b}\")\n c = b[0]\n ptr = len(self._rxBuf)\n\n # Assume we want to append this byte, fixme use bytearray instead\n self._rxBuf = self._rxBuf + b\n\n if ptr == 0: # looking for START1\n if c != START1:\n self._rxBuf = empty # failed to find start\n if self.debugOut != None:\n try:\n self.debugOut.write(b.decode(\"utf-8\"))\n except:\n self.debugOut.write('?')\n\n elif ptr == 1: # looking for START2\n if c != START2:\n self._rxBuf = empty # failed to find start2\n elif ptr >= HEADER_LEN: # we've at least got a header\n # big endian length follos header\n packetlen = (self._rxBuf[2] << 8) + self._rxBuf[3]\n\n if ptr == HEADER_LEN: # we _just_ finished reading the header, validate length\n if packetlen > MAX_TO_FROM_RADIO_SIZE:\n self._rxBuf = empty # length ws out out bounds, restart\n\n if len(self._rxBuf) != 0 and ptr + 1 == packetlen + HEADER_LEN:\n try:\n self._handleFromRadio(self._rxBuf[HEADER_LEN:])\n except Exception as ex:\n logging.error(\n f\"Error while handling message from radio {ex}\")\n traceback.print_exc()\n self._rxBuf = empty\n else:\n # logging.debug(f\"timeout\")\n pass\n except serial.SerialException as ex:\n if not self._wantExit: # We might intentionally get an exception during shutdown\n logging.warn(f\"Meshtastic serial port disconnected, disconnecting... {ex}\")\n except OSError as ex:\n if not self._wantExit: # We might intentionally get an exception during shutdown\n logging.error(f\"Unexpected OSError, terminating meshtastic reader... {ex}\") \n except Exception as ex:\n logging.error(f\"Unexpected exception, terminating meshtastic reader... {ex}\")\n finally:\n logging.debug(\"reader is exiting\")\n self._disconnected()", "def skip(self) -> None:\n n = self.chunksize - self.size_read\n # maybe fix alignment\n if self.chunksize & 1:\n n = n + 1\n try:\n self.file.seek(n, 1)\n except (AttributeError, OSError): # Cannot seek, manually read.\n while self.size_read < self.chunksize:\n n = min(8192, self.chunksize - self.size_read)\n skipped = self.read(n)\n if not skipped:\n raise EOFError from None\n else:\n self.size_read = self.size_read + n", "def readBuffer(self, start, buffer_size):\r\n\r\n start, stop = self.getReadParameters(start, buffer_size)\r\n try:\r\n # data_source is a tables.Table or a tables.XArray\r\n # but data is a numpy array\r\n # Warning: in a EArray with shape (2,3,3) and extdim attribute\r\n # being 1, the read method will have 3 rows. However, the numpy\r\n # array returned by EArray.read() will have only 2 rows\r\n data = self.data_source.read(start, stop)\r\n except tables.HDF5ExtError:\r\n print(translate('Buffer',\r\n \"\"\"\\nError: problems reading records. The dataset maybe \"\"\"\r\n \"\"\"corrupted.\"\"\",\r\n 'A dataset readability error'))\r\n except:\r\n vitables.utils.formatExceptionInfo()\r\n else:\r\n # Update the buffer contents and its start position\r\n self.chunk = data\r\n self.start = start", "def _read_range(self, start=0, end=None):\n max_read_size = self.channel.connection.negotiate_response.max_read_size\n offset = start\n response_buffers = []\n while end is None or offset < end:\n if end is not None:\n max_read_size = min(end - offset, max_read_size)\n available = min(\n self.channel.connection.credits * smb2.BYTES_PER_CREDIT,\n max_read_size,\n )\n try:\n read_resp = self.channel.read(self, available, offset)\n response_buffers.append(read_resp)\n offset += len(read_resp)\n except ResponseError as re:\n if re.response.status == ntstatus.STATUS_END_OF_FILE:\n break\n raise\n read_buffer = b\"\".join(rb.tobytes() for rb in response_buffers)\n if read_buffer:\n self._offset = start + len(read_buffer)\n # update the EOF marker if we read past it\n self._end_of_file = max(self.end_of_file, self._offset)\n return read_buffer", "def _read(self, file_service):\n self.is_reading_lock.acquire()\n while self.is_reading:\n # is_reading_lock guards is_reading which is condition of the while loop.\n # We want to allow is_reading to be changed during the body of the while\n # so we release it in the beginning of the loop.\n self.is_reading_lock.release()\n self.data += self.stream.read(self.chunk_size)\n self.bytes_read = len(self.data)\n\n if self._is_last_chunk(self.bytes_read, self.previous_read):\n file_service.store_bytes(\n self.stream_id, self.data[:self.bytes_read], datetime.now())\n self.data = self.data[self.bytes_read:]\n\n elif self.bytes_read >= self.chunk_size:\n file_service.store_bytes(\n self.stream_id, self.data[:self.chunk_size], datetime.now())\n self.data = self.data[self.chunk_size:]\n\n self.previous_read = self.bytes_read\n\n # Acquire the lock before checking is_reading again,\n # so we acquire it at the end of the loop.\n self.is_reading_lock.acquire()\n\n # Store any leftover data\n if len(self.data) != 0:\n file_service.store_bytes(self.stream_id, self.data, datetime.now())\n\n self.is_reading_lock.release()", "def read(self, size=-1):\n chunk_index, prefix_size = self._index_pos(self._pos)\n prefixed_buffer = []\n try:\n if size < 0:\n while True:\n prefixed_buffer.append(self._readchunk(chunk_index))\n chunk_index += 1\n else:\n need = prefix_size + size\n while need > 0:\n chunk_data = self._readchunk(chunk_index)\n prefixed_buffer.append(chunk_data[:need])\n need -= len(chunk_data)\n chunk_index += 1\n\n except EOFError:\n # PR#16/18 - support identifying EOF\n # use a read() as a sync from desired position to actual position\n # read(0) can be used as a synchronization call\n dec_eof_position = self._members[-1].start_pos + self._members[-1].isize\n self._pos = dec_eof_position\n if prefixed_buffer:\n # subtracting the data in the EOF Case so the normal path will add it back\n # before the function return to avoid changing the path\n # adding up lengths rather than concatenating here to avoid creating new buffers\n self._pos -= sum([len(x) for x in prefixed_buffer]) - prefix_size\n prefixed_buffer = b\"\".join(prefixed_buffer)\n result = prefixed_buffer[prefix_size:]\n self._pos += len(result)\n return result", "def read_data(self, size, attempts = 1):\n data = Array('B')\n # do we have all of the data in the read buffer?\n if size <= len(self.rdbuf) - self.rdofs:\n data = self.rdbuf[self.rdofs : self.rdofs + size]\n self.rdofs += size\n return data\n # do we have some of the data in the read buffer?\n if len(self.rdbuf) - self.rdofs > 0:\n data = self.rdbuf[self.rdofs:]\n # do a usb read to get the rest...\n # read from the usb device\n try:\n bytes_to_rd = size - len(data)\n while bytes_to_rd > 0:\n # read from the usb device\n while True:\n self.rdbuf = self._read()\n self.rdofs = 0\n if len(self.rdbuf) > 0:\n break\n else:\n # no data received\n attempts -= 1\n if attempts > 0:\n # try again\n continue\n # return what we have\n return data\n # copy the read buffer into the returned data\n n = len(self.rdbuf)\n if n >= bytes_to_rd:\n # copy a partial read buffer\n data += self.rdbuf[:bytes_to_rd]\n self.rdofs = bytes_to_rd\n return data\n else:\n # copy all of the read buffer\n data += self.rdbuf\n bytes_to_rd -= n\n # read more data...\n except usb.core.USBError as e:\n raise usbdev_error(str(e))\n # never reached\n raise usbdev_error(\"internal error\")", "def consume(self):\n rest = self._buf.read()\n self._buf.seek(0, 0)\n self._buf.truncate(0)\n self._len = 0\n self.append(rest)", "def read(self, size=-1):\n buf = self._fd.read(size)\n self._progress.update(len(buf))\n return buf", "def read(self, size=1):\n \n data = self.fp.read(1)\n if data == '':\n self.fp.seek(0)\n data = self.fp.read(1)\n \n return data", "def read(self, size=-1):\n _complain_ifclosed(self._closed)\n buf = self._buf\n while size < 0 or len(buf) < size:\n try:\n buf = buf + next(self._generator)\n except StopIteration:\n break\n\n returned = b\"\"\n if size >= 1:\n self._buf = buf[size:]\n returned = buf[:size]\n else:\n self._buf = b\"\"\n returned = buf\n\n self._position = self._position + len(returned)\n return returned", "def __read_block(self, size):\n buf = b\"\"\n if len(self.__read_buffer):\n limit = (\n size if size <= len(self.__read_buffer) else\n len(self.__read_buffer)\n )\n buf = self.__read_buffer[:limit]\n self.__read_buffer = self.__read_buffer[limit:]\n size -= limit\n if not size:\n return buf\n try:\n buf += self.sock.recv(size)\n except (socket.timeout, ssl.SSLError):\n raise Error(\"Failed to read %d bytes from the server\" % size)\n self.__dprint(buf)\n return buf" ]
[ "0.7004888", "0.68289405", "0.6578622", "0.6386881", "0.63642263", "0.6346135", "0.63439137", "0.632571", "0.62540615", "0.6246668", "0.6226175", "0.6209409", "0.6203951", "0.61999094", "0.61687976", "0.616352", "0.6146556", "0.6113841", "0.60972565", "0.60848075", "0.6070838", "0.6068955", "0.606675", "0.60435086", "0.6042586", "0.60034645", "0.59915143", "0.59868634", "0.5968768", "0.59675956" ]
0.68437034
1
Read the data and move the start/read pointer, so that the data is not read again. This method reads the remaining data if the amount specified is greater than the amount in the buffer.
def read_remaining(self, amount=None): if amount is None or amount > self._length: amount = self._length # Check available read size if amount == 0: return self._data[0:0].copy() idxs = self.get_indexes(self._start, amount, self.maxsize) self.move_start(amount) return self._data[idxs].copy()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _readData(self):\n # Debug. This fn should be called only after checking canRead()\n if not self._canRead():\n raise Exception(\"Trying to read more data than there is.\")\n\n data = self.buffer[:self._expectedByteCount]\n self.buffer = self.buffer[self._expectedByteCount:]\n\n return data", "def read(self, amount=None):\n if amount is None:\n amount = self._length\n\n # Check available read size\n if amount == 0 or amount > self._length:\n return self._data[0:0].copy()\n\n idxs = self.get_indexes(self._start, amount, self.maxsize)\n self.move_start(amount)\n return self._data[idxs].copy()", "def move_start(self, amount, error=True, limit_amount=True):\n if amount == 0:\n return\n elif amount > self._length:\n if error:\n raise UnderflowError(\"Not enough data in the buffer \" + repr(self))\n\n if limit_amount:\n # You cannot read more than what you have\n amount = self._length\n # end error\n\n stop = self._start + amount\n try:\n self._start = stop % self.maxsize\n except ZeroDivisionError:\n self._start = stop\n\n self.sync_length(False or amount < 0) # Length grows if amount was negative.", "def _read_bytes(self, start, count): # type: (int) -> bytes\n bytes_data = self._buffer[start:start + count]\n\n if len(bytes_data) != count:\n raise ASN1WantMore('Premature end of input.')\n\n return bytes_data", "def read_bytes(self, number_of_bytes):\n\n self.index = -1\n data = self.buf[self.offset:self.offset + number_of_bytes]\n self.offset += number_of_bytes\n\n return data", "def read(self, *args, **kwargs):\r\n buf = io.BufferedReader.read(self, *args, **kwargs)\r\n self.increment(len(buf))\r\n return buf", "def _buffer_to(self, amount):\n if amount > self.lookahead:\n raise Exception(\n 'Cannot extend buffer to {}: '\n 'beyond buffer lookahead {}'.format(\n amount, self.lookahead\n )\n )\n while len(self.buffer) < amount:\n try:\n self.buffer.appendleft(next(self.stream))\n except StopIteration:\n break", "def _get_data(self):\n while not (self.closed or self.ended):\n block = self.stream.read(8192)\n with self.lock:\n if len(block) == 0:\n self.ended = True\n else:\n self.data.append(block)\n self.available.set()", "def _read(self, file_service):\n self.is_reading_lock.acquire()\n while self.is_reading:\n # is_reading_lock guards is_reading which is condition of the while loop.\n # We want to allow is_reading to be changed during the body of the while\n # so we release it in the beginning of the loop.\n self.is_reading_lock.release()\n self.data += self.stream.read(self.chunk_size)\n self.bytes_read = len(self.data)\n\n if self._is_last_chunk(self.bytes_read, self.previous_read):\n file_service.store_bytes(\n self.stream_id, self.data[:self.bytes_read], datetime.now())\n self.data = self.data[self.bytes_read:]\n\n elif self.bytes_read >= self.chunk_size:\n file_service.store_bytes(\n self.stream_id, self.data[:self.chunk_size], datetime.now())\n self.data = self.data[self.chunk_size:]\n\n self.previous_read = self.bytes_read\n\n # Acquire the lock before checking is_reading again,\n # so we acquire it at the end of the loop.\n self.is_reading_lock.acquire()\n\n # Store any leftover data\n if len(self.data) != 0:\n file_service.store_bytes(self.stream_id, self.data, datetime.now())\n\n self.is_reading_lock.release()", "def skip(self) -> None:\n n = self.chunksize - self.size_read\n # maybe fix alignment\n if self.chunksize & 1:\n n = n + 1\n try:\n self.file.seek(n, 1)\n except (AttributeError, OSError): # Cannot seek, manually read.\n while self.size_read < self.chunksize:\n n = min(8192, self.chunksize - self.size_read)\n skipped = self.read(n)\n if not skipped:\n raise EOFError from None\n else:\n self.size_read = self.size_read + n", "def _read_amt(self, byte_count):\n full_msg = bytearray()\n while len(full_msg) < byte_count:\n block = self.request.recv(byte_count - len(full_msg))\n full_msg.extend(block)\n return full_msg", "def read_until_size(self, size):\n if not size:\n do_return(b'')\n with self.reading:\n while len(self.read_buffer) < size:\n self.read_buffer.enqueue((yield self.base.read(self.bufsize)))\n do_return(self.read_buffer.dequeue(size))", "def read(self, num_bytes_to_read):\n pass", "def get(self, amount, offset=0):\n min_buff_size = offset + amount\n\n while len(self.buff) < min_buff_size:\n\n if len(self.chunkbuffer) <= 0:\n return None\n\n chunk = self.chunkbuffer.pop()\n\n self.buff.extend(chunk)\n\n data = self.buff[offset:offset + amount]\n\n self.autocommit_amount = offset + amount\n\n return data", "def read_until(self, data):\n\n while not data in self.buff:\n self.buff += self.socket.recv(1024)\n\n pos = self.buff.find(data)\n rval = self.buff[: pos + len(data)]\n self.buff = self.buff[pos + len(data) :]\n\n return rval", "def _read_range(self, start=0, end=None):\n max_read_size = self.channel.connection.negotiate_response.max_read_size\n offset = start\n response_buffers = []\n while end is None or offset < end:\n if end is not None:\n max_read_size = min(end - offset, max_read_size)\n available = min(\n self.channel.connection.credits * smb2.BYTES_PER_CREDIT,\n max_read_size,\n )\n try:\n read_resp = self.channel.read(self, available, offset)\n response_buffers.append(read_resp)\n offset += len(read_resp)\n except ResponseError as re:\n if re.response.status == ntstatus.STATUS_END_OF_FILE:\n break\n raise\n read_buffer = b\"\".join(rb.tobytes() for rb in response_buffers)\n if read_buffer:\n self._offset = start + len(read_buffer)\n # update the EOF marker if we read past it\n self._end_of_file = max(self.end_of_file, self._offset)\n return read_buffer", "def _read_data_into_packet(self, p):\n\n length = p.length * self.disc.audio_format.bytes_per_frame\n\n if p.file_pos is None:\n # Silence, so send on null bytes to player\n p.data = '\\0' * length\n\n else:\n file_pos = p.file_pos * self.disc.audio_format.bytes_per_frame\n self.audio_file.seek(file_pos)\n\n p.data = self.audio_file.read(length)\n length -= len(p.data)\n file_pos += len(p.data)\n\n # If we didn't get all data, iterate with a timeout until\n # it's all been read or the ripping process has stopped.\n # This is not very efficient, and there's a small race\n # condition at the end of the disc, but this should be\n # very rare so keep it unoptimised for now.\n\n while length > 0 and self.is_ripping and self.is_ripping.is_set():\n time.sleep(1)\n\n self.audio_file.seek(file_pos)\n d = self.audio_file.read(length)\n\n length -= len(d)\n file_pos += len(d)\n\n p.data += d\n\n # Still didn't get all data, treat it as an exception\n if length > 0:\n raise SourceError('unexpected end of file, expected at least {0} bytes'\n .format(length))", "def dataReceived(self, data):\n if len(self._buffer) + len(data) > self.MAX_BUFFER_SIZE:\n raise ASN1TooMuch(\n 'Call read() or flush() before piping more data.')\n\n self._buffer += data", "def read_until(self, data):\n\n while not data in self.buff:\n self.buff += self.socket.recv(1024)\n \n pos = self.buff.find(data)\n rval = self.buff[:pos + len(data)]\n self.buff = self.buff[pos + len(data):]\n \n return rval", "def read_until(self, data):\n\n while not data in self.buff:\n self.buff += self.socket.recv(1024)\n \n pos = self.buff.find(data)\n rval = self.buff[:pos + len(data)]\n self.buff = self.buff[pos + len(data):]\n \n return rval", "def get_raw_data(self, ptr, unbuffered=False):\n if unbuffered:\n self._data_pos = None\n # round down ptr to a 'block boundary'\n idx = ptr - (ptr % 0x20)\n ptr -= idx\n count = reading_len[self.data_format]\n if self._data_pos == idx:\n # cache contains useful data\n result = self._data_block[ptr:ptr + count]\n if len(result) >= count:\n return result\n else:\n result = list()\n if ptr + count > 0x20:\n # need part of next block, which may be in cache\n if self._data_pos != idx + 0x20:\n self._data_pos = idx + 0x20\n self._data_block = self._read_block(self._data_pos)\n result += self._data_block[0:ptr + count - 0x20]\n if len(result) >= count:\n return result\n # read current block\n self._data_pos = idx\n self._data_block = self._read_block(self._data_pos)\n result = self._data_block[ptr:ptr + count] + result\n return result", "def consume(self):\n rest = self._buf.read()\n self._buf.seek(0, 0)\n self._buf.truncate(0)\n self._len = 0\n self.append(rest)", "def read(self, size=-1):\n\n if size < 0:\n raise NotImplementedError(\"Don't be greedy, that could be massive!\")\n elif size == 0:\n if self._text:\n return \"\"\n else:\n return b\"\"\n elif self._within_block_offset + size <= len(self._buffer):\n # This may leave us right at the end of a block\n # (lazy loading, don't load the next block unless we have too)\n data = self._buffer[self._within_block_offset:self._within_block_offset + size]\n self._within_block_offset += size\n assert data # Must be at least 1 byte\n return data\n else:\n # if read data overflows to next block\n # pull in rest of data in current block\n data = self._buffer[self._within_block_offset:]\n\n # decrement size so that we only pull the rest of the data\n # from next block\n size -= len(data)\n self._load_block() # will reset offsets\n\n if not self._buffer:\n return data # EOF\n\n # if there is still more to read\n elif size:\n # pull rest of data from next block\n return data + self.read(size)\n else:\n # Only needed the end of the last block\n return data", "def read(self, size: int = -1) -> bytes:\n if self.size_read >= self.chunksize:\n return b''\n if size < 0:\n size = self.chunksize - self.size_read\n if size > self.chunksize - self.size_read:\n size = self.chunksize - self.size_read\n data = self.file.read(size)\n self.size_read = self.size_read + len(data)\n if self.size_read == self.chunksize and (self.chunksize & 1):\n dummy = self.file.read(1)\n self.size_read = self.size_read + len(dummy)\n return data", "def readBuffer(self, start, buffer_size):\r\n\r\n start, stop = self.getReadParameters(start, buffer_size)\r\n try:\r\n # data_source is a tables.Table or a tables.XArray\r\n # but data is a numpy array\r\n # Warning: in a EArray with shape (2,3,3) and extdim attribute\r\n # being 1, the read method will have 3 rows. However, the numpy\r\n # array returned by EArray.read() will have only 2 rows\r\n data = self.data_source.read(start, stop)\r\n except tables.HDF5ExtError:\r\n print(translate('Buffer',\r\n \"\"\"\\nError: problems reading records. The dataset maybe \"\"\"\r\n \"\"\"corrupted.\"\"\",\r\n 'A dataset readability error'))\r\n except:\r\n vitables.utils.formatExceptionInfo()\r\n else:\r\n # Update the buffer contents and its start position\r\n self.chunk = data\r\n self.start = start", "def read_until(\n self,\n min_num_bytes: int,\n ending: bytes,\n timeout: float = 10.0,\n data_consumer=None,\n ):\n\n data = b''\n\n # If a miniumum number of bytes is given, wait till at least\n # that number of bytes are received. If the value is 0, then\n # continue, and rely on the terminator and timeout values.\n if min_num_bytes:\n data = self.con.read(min_num_bytes)\n # debug(f'read {data=}')\n if data_consumer:\n data_consumer(data)\n\n timeout_count = 0\n while True:\n if ending and data.endswith(ending):\n break\n else:\n # debug(f\"{ending=} was not found\")\n pass\n\n if self.con.inWaiting() > 0:\n new_data = self.con.read(1)\n # debug(f'read {new_data=}')\n data = data + new_data\n # if len(data) > 80:\n # debug(f'data: len={len(data)} {data[-80:]=}')\n # else:\n # debug(f'data: len={len(data)} {data=}')\n if data_consumer:\n data_consumer(new_data)\n # timeout_count = 0\n else:\n timeout_count += 1\n # debug(f'{timeout_count=}')\n if timeout is not None and timeout_count >= 100 * timeout:\n if not data:\n debug(f\"TIMEOUT - No data received within {timeout} seconds\")\n else:\n debug(f\"TIMEOUT - data {data} did not end with {ending}\")\n break\n time.sleep(0.01)\n debug(f\"read_until returns {data=}\")\n return data", "def read_data(self, size, attempts = 1):\n data = Array('B')\n # do we have all of the data in the read buffer?\n if size <= len(self.rdbuf) - self.rdofs:\n data = self.rdbuf[self.rdofs : self.rdofs + size]\n self.rdofs += size\n return data\n # do we have some of the data in the read buffer?\n if len(self.rdbuf) - self.rdofs > 0:\n data = self.rdbuf[self.rdofs:]\n # do a usb read to get the rest...\n # read from the usb device\n try:\n bytes_to_rd = size - len(data)\n while bytes_to_rd > 0:\n # read from the usb device\n while True:\n self.rdbuf = self._read()\n self.rdofs = 0\n if len(self.rdbuf) > 0:\n break\n else:\n # no data received\n attempts -= 1\n if attempts > 0:\n # try again\n continue\n # return what we have\n return data\n # copy the read buffer into the returned data\n n = len(self.rdbuf)\n if n >= bytes_to_rd:\n # copy a partial read buffer\n data += self.rdbuf[:bytes_to_rd]\n self.rdofs = bytes_to_rd\n return data\n else:\n # copy all of the read buffer\n data += self.rdbuf\n bytes_to_rd -= n\n # read more data...\n except usb.core.USBError as e:\n raise usbdev_error(str(e))\n # never reached\n raise usbdev_error(\"internal error\")", "def read( self, bytes=1024 ):\n count = len( self.readbuf )\n if count < bytes:\n data = os.read( self.stdout.fileno(), bytes - count )\n self.readbuf += data\n if bytes >= len( self.readbuf ):\n result = self.readbuf\n self.readbuf = ''\n else:\n result = self.readbuf[ :bytes ]\n self.readbuf = self.readbuf[ bytes: ]\n return result", "def read(self, size=1):\n assert self.count < len(self.data)\n data = self.data[self.count]\n self.count += 1\n return data", "def data_received(self, data):\n self.buffered += data\n while True:\n if self.have_length:\n if len(self.buffered) < self.message_length:\n break\n self._decode_message(self.buffered[:self.message_length])\n self.have_length = False\n self.buffered = self.buffered[self.message_length:]\n self.message_length = 0\n else:\n if len(self.buffered) < 4:\n break\n (self.message_length,) = struct.unpack_from(\">I\", self.buffered)\n self.buffered = self.buffered[4:]\n self.have_length = True" ]
[ "0.6858934", "0.65308416", "0.65099293", "0.63198656", "0.6250639", "0.6179618", "0.6173773", "0.61646664", "0.61498034", "0.6144139", "0.61436623", "0.61382663", "0.6129541", "0.6122185", "0.6102098", "0.6098493", "0.60864174", "0.6056444", "0.60450536", "0.60450536", "0.6026147", "0.6014328", "0.6012285", "0.59991163", "0.5994045", "0.59930587", "0.5963851", "0.59259415", "0.5915458", "0.5892578" ]
0.71143496
0
Read the last amount of data and move the start/read pointer. This is an odd method for FFT calculations. It reads the newest data moving the start pointer by the update_rate amount that it was given. The returned skips number is the number of update_rate values.
def read_last(self, amount=None, update_rate=None): if amount is None: amount = self._length if update_rate is None: update_rate = amount # Check available read size if amount == 0 or amount > self._length: return None, 0 skips = (self._length - amount) // update_rate if skips > 0: self.move_start(update_rate * skips) idxs = self.get_indexes(self._start, amount, self.maxsize) self.move_start(update_rate) return self._data[idxs].copy(), skips + 1
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update(self) -> np.ndarray:\r\n # Read chunk from array\r\n signal_slice: np.ndarray = np.array(\r\n self._signal_data[self._pointer : self._pointer + self._chunk]\r\n )\r\n # Move index over\r\n self._pointer += self._chunk\r\n if self._pointer > len(self._signal_data):\r\n # Go back to beginning\r\n self._pointer = 0\r\n self._restart_flag = True\r\n print(\"Restarting stream...\")\r\n\r\n return signal_slice", "def update(self) -> np.ndarray:\r\n # Read chunk from array\r\n signal_slice: np.ndarray = np.array(\r\n self._signal_data[self._pointer : self._pointer + self._chunk]\r\n )\r\n # Move index over\r\n self._pointer += self._chunk\r\n if self._pointer > len(self._signal_data):\r\n # Go back to beginning\r\n self._pointer = 0\r\n self._restart_flag = True\r\n print(\"Restarting stream...\")\r\n\r\n return signal_slice", "def update_rate(self):\n self._rate = (\n (self._received - self._samples[0]) / float(self.sample_size)\n )\n self._samples.append(self._received)", "def scan_shift(scan_speeds, idx, movement_speed, info=ProcessInfo()):\n # Need to think about when scan speed == 0, when we don't have a line of data. I think I should just remove these lines from the array, and shift everything up.\n # This divide by zero is what is ruining the data\n incr = info.INSTRUMENT_DIRECTION # Get increment from class (either +1 or -1 depending on instrument orientation)\n\n time_taken = info.LIDAR_LSP_DIST_X / movement_speed\n\n num_scans = len(scan_speeds)\n scan_time = 0\n while scan_time < time_taken:\n # Return if we have reached the first point of data and still haven't got to the time required\n if idx < 0 or idx >= num_scans:\n print('Lidar offset extends beyond the bounds of LSP data')\n return None\n elif scan_speeds[idx] == 0:\n print('Scan speed=0, either all blank LSP lines have not been removed, or we have reached the end of the data')\n return None\n\n # Loop backwards through scan speeds, summing the time of each line until we reach the time taken\n scan_time += (1/scan_speeds[idx])\n\n idx = idx - incr\n\n # Determine whether this final scan, or the previous scan were closest to the time_taken\n prev_scan_time = scan_time - (1/scan_speeds[idx+incr]) # Subtract final scan time to get previous cumulative time\n difference = [abs(time_taken-scan_time), abs(time_taken-prev_scan_time)]\n closest_scan = np.argmin(difference)\n\n if closest_scan == 1:\n idx = idx + (2*incr) # Correct index if we want to use previous scan as final answer\n elif closest_scan == 0:\n idx = idx + incr # Return back to final index if we want to use that as final answer\n else:\n print('Error determining scan position')\n return None\n return idx", "def endNum(lastFile,sampleRate,dateStop):\n eb, ej, ee = fileTimeRange(lastFile)\n #print ' start: %s\\n dateStop: %s\\n stop: %s' % ( unixTimeToString(eb), dateStop, unixTimeToString(ee) )\n practicalStop = min(stringTimeToUnix(dateStop), ee)\n dateStopOffset = practicalStop - eb # time to skip in last pad file\n stopNumRecords = int(dateStopOffset * sampleRate + 0.5)\n actualStop = eb + stopNumRecords/float(sampleRate)\n #print ' STOP: numRecords: %d, sec: %f' % ( stopNumRecords, stopNumRecords/float(sampleRate) )\n return stopNumRecords,actualStop", "def inc_ptr(self, ptr):\n result = ptr + reading_len[self.data_format]\n if result >= 0x10000:\n result = data_start\n return result", "def readData(self, duration_s, toV=True, maxAI=1024, maxV=5.0,\n updateFunc=None, nPntsUpdate=1, trigger=[]):\n isDone = False\n nLines = 0\n empty = numpy.array([])\n \n if self.rate_ms >= 0:\n nPnts = round(duration_s *1000.0 /self.rate_ms)\n print(\"{0:.3f} s duration = {1} samples\".format(duration_s, nPnts))\n else: \n nPnts = 100\n print(\"Sample rate invalid, {0} samples will be recorded\".format(nPnts))\n\n if not(self.isOpen): \n print(\"ERROR: Link not open\")\n return (-1, empty, empty, empty)\n \n # Create empty arrays for the data\n #\n np_data_0 = numpy.zeros([nPnts], dtype=float)\n np_data_1 = numpy.zeros([nPnts], dtype=float)\n np_data_t = numpy.zeros([nPnts], dtype=float)\n np_dt_ms = numpy.zeros([nPnts], dtype=float)\n \n # Attempt to read data\n #\n while not(isDone):\n # Read a line\n #\n (errC, parts) = self.__readLine()\n if not(errC == 0):\n return (errC, empty, empty, empty)\n \n else: \n np_data_0[nLines] = float(parts[2])\n np_data_1[nLines] = float(parts[3])\n np_data_t[nLines] = float(parts[0])/1000.0\n np_dt_ms[nLines] = float(parts[1]) /1000.0\n \"\"\"\n print(\"{0:.3f} {1} {2}\".format(int(parts[0])/1000.0, \n int(parts[2]), int(parts[3])))\n \"\"\"\n # Update plot of data, if requested\n #\n if updateFunc and ((nLines % nPntsUpdate) == 0):\n updateFunc(nLines, np_data_t[nLines], np_data_0[nLines], \n np_data_1[nLines])\n \n if nLines < (nPnts-1):\n sys.stdout.write(\"\\r{0:.0f}% {1:.3f} s: {2:.3f} {3:.3f} ...\"\n .format(nLines/float(nPnts) *100, \n np_data_t[nLines]/1000.0,\n np_data_0[nLines], np_data_1[nLines]))\n else: \n sys.stdout.write(\"\\r100% done\" +\" \"*40 +\"\\n\")\n \n nLines += 1\n isDone = (nLines == nPnts)\n \n print(\"SUCCESS\") \n print(\"{0} data points recorded\".format(nLines))\n print(\"Rate = {0:.3f} +/- {1:.3f} ms\".format(numpy.mean(np_dt_ms), \n numpy.std(np_dt_ms)))\n\n if toV: \n np_data_0 = np_data_0 /float(maxAI -1) *maxV\n np_data_1 = np_data_1 /float(maxAI -1) *maxV\n \n return (0, np_data_t, np_data_0, np_data_1)", "def most_recent_read(self):\n self.read_pos = (self.write_pos - 1) % self.log_len\n return", "def read(self, index_delta):\n assert(not self.is_almost_finished(index_delta))\n assert(index_delta > 0)\n\n # Conduct the sensor read\n self.currIndex += index_delta\n self.currValue = self.data[self.currIndex]", "def update():\n\n # Get last new x value as last x value + 1\n x_n0 = data_source.data['x'][-1]\n x_n1 = x_n0 + 0.1\n\n # Assign a new y value\n y_n1 = param_source.data['amp_sine'][0] * np.sin(x_n1) +\\\n param_source.data['amp_rand'][0] * np.random.rand(1)\n\n # Get old last average and use to calculate new average\n avg_n1 = _get_new_avg(data_source,\n y_n1,\n param_source.data['rollover'][0])\n\n # Make a dict of data to add on to the end of the source\n additional_data = dict(x=[x_n1], y=[y_n1], avg=[avg_n1])\n\n # Stream the new data with a rollover value of 10\n data_source.stream(additional_data,\n rollover=param_source.data['rollover'][0])\n\n # logger.debug(param_source.data['update_delay'][0])", "def dataset_read(self):\n # while self.running:\n # grab current data_list and own it locally per cycle\n # to avoid mid-parse changes\n self.local_data_list = self.data_list\n\n # set a random duration for reading from random line\n # before choosing another from current set\n dataset_read_dur = (random.randrange(3000, 13000) / 1000) * self.glob_speed\n\n # prepare start line to read\n starting_line = self.line_to_read()\n\n # sorts out durations\n if self.debug_choose:\n print('B1 dataset line read duration = ', dataset_read_dur)\n end_time = self.end_time_calc(dataset_read_dur)\n\n # determine if read is to be looped or sequential\n looped = self.is_loop()\n\n while time.time() < end_time:\n # calc baudrate and cycle clock for speed of line read\n baudrate = self.baudrate()\n\n # if looped\n if looped > 0:\n loop_end = time.time() + looped\n\n # reset the start read point\n line_to_read = starting_line\n\n # for each loop\n while time.time() < loop_end:\n active_line = self.local_data_list[line_to_read]\n self.parse_active_line(active_line)\n line_to_read += 1\n if self.debug_read:\n print(f'******** line to read LOOPING {line_to_read}')\n # print(f'config data = {config.x_ds}, {config.y_ds}, {config.z_ds}')\n\n # pause for 10th of baudrate, while parse_active_line slides\n time.sleep(baudrate/10)\n else:\n # if no loop\n active_line = self.local_data_list[starting_line]\n self.parse_active_line(active_line)\n starting_line += 1\n if self.debug_read:\n print(f'******** line to read NO LOOP {starting_line}')\n # print(f'config data = {config.x_ds}, {config.y_ds}, {config.z_ds}')\n\n # pause for 10th of baudrate, while parse_active_line slides\n time.sleep(baudrate/10)", "def bufferCnt():\n if(reset == 1):\n bufferCounter.next = 0\n else:\n if(decimationRatio > 0):\n if(bufferCounter == (decimationRatio-1)):\n bufferCounter.next = 0\n else:\n bufferCounter.next = bufferCounter + 1", "def digest_next_sensor_reading(\n start_time: float,\n data_queue: Queue,\n current_readings: AtomicBuffer,\n gps_value,\n altimeter_value,\n magnetometer_accelerometer_value,\n) -> float:\n now = time.time()\n info = (\n now - start_time,\n *altimeter_value,\n *magnetometer_accelerometer_value,\n *gps_value,\n )\n if not data_queue.full():\n data_queue.put(info)\n current_readings.put(info)\n return now", "def current_pos(self):\n new_ptr = _decode(self._read_fixed_block(0x0020),\n lo_fix_format['current_pos'])\n if new_ptr is None:\n raise ObservationError('current_pos is None')\n if new_ptr == self._current_ptr:\n return self._current_ptr\n if self._current_ptr and new_ptr != self.inc_ptr(self._current_ptr):\n for k in reading_len:\n if (new_ptr - self._current_ptr) == reading_len[k]:\n log.error('changing data format from %s to %s' % (self.data_format, k))\n self.data_format = k\n break\n self._current_ptr = new_ptr\n return self._current_ptr", "def start_write_loop(self):\n\n global stop\n stop.value = 0\n\n count = 0\n\tsumXZ=0\n\tsumYZ=0\n\n while True:\n # This obviously is a very naive implementation of the consumer (while loop instead of lock).\n # However, this already achieves the maximum sampling rate because the I2C communication with\n # the sensors is the bottleneck.\n if not self.__buffer.empty():\n data_point = self.__buffer.get()\n\t\tcount +=1\n\t\tsumXZ += data_point.XZ\n\t\tsumYZ += data_point.YZ\n\t\tpromXZ = (sumYZ/count)\n\t\tpromYZ = (sumXZ/count)\n\t\tif count % self.nth_sample == 0:\n\t \t #self._write_sample(promXZ,promYZ)\n self._write_sample(data_point.XZ,data_point.YZ)\n #print(\"raw-\"+str(data_point))\n\t\t #print(\"prom\"+str(promXZ)+\" \"+str(promYZ)+\"--->\"+str(sumXZ)+\"/\"+str(count)+\"--->\"+str(sumYZ)+\"/\"+str(count))\n sumXZ=0\n sumYZ=0\n promXZ=0\n promYZ=0\n count=1", "def step_update(self, num_updates):\n cycle = math.floor(num_updates / (2 * self.stepsize))\n\n lr_shrink = self.lr_shrink ** cycle\n max_lr = self.max_lr * lr_shrink\n if self.shrink_min:\n min_lr = self.min_lr * lr_shrink\n else:\n min_lr = self.min_lr\n\n x = abs(num_updates / self.stepsize - 2 * (cycle + 1) + 1)\n self.lr = min_lr + (max_lr - min_lr) * max(0, (1 - x))\n\n self.optimizer.set_lr(self.lr)\n return self.lr", "def _load_next_file(self):\n\n gains = super()._load_next_file()\n self._time_ptr = 0\n\n return gains", "def read_line(self):\n self.read_calibrated()\n\n avg = 0\n summ = 0\n online = False\n\n for i in range(0, self.NUM_SENSORS):\n val = self.sensorValues[i]\n if val > 500: online = True\n if val > 50:\n multiplier = i * 1000\n avg += val * multiplier\n summ += val\n\n if online == False:\n if self.lastValue < (self.NUM_SENSORS-1)*1000/2:\n return 0\n else:\n return (self.NUM_SENSORS-1)*1000\n\n self.lastValue = avg/summ\n return self.lastValue", "def get_next_sample(self):", "def set_reader_position(self):\n recorded_position = self.process_application.get_recorded_position(\n self.upstream_name\n )\n self.reader.seek(recorded_position)", "def read(self, start, size, resize_if_less=False):\n \n # number of zeros to add to start and end of the buffer\n add_to_start = 0\n add_to_end = 0\n \n if start < 0:\n # the first FFT window starts centered around zero\n if size + start <= 0:\n return numpy.zeros(size) if resize_if_less else numpy.array([])\n else:\n self.audio_file.seek(0)\n \n add_to_start = -start # remember: start is negative!\n to_read = size + start\n \n if to_read > self.frames:\n add_to_end = to_read - self.frames\n to_read = self.frames\n else:\n self.audio_file.seek(start)\n \n to_read = size\n if start + to_read >= self.frames:\n to_read = self.frames - start\n add_to_end = size - to_read\n \n try:\n samples = self.audio_file.read_frames(to_read)\n except IOError:\n # this can happen for wave files with broken headers...\n return numpy.zeros(size) if resize_if_less else numpy.zeros(2)\n \n # select which channel to draw\n if self.channels > 1:\n if self.channel==1:\n samples = samples[:,0]\n if self.channel==2:\n samples = samples[:,1]\n \n if resize_if_less and (add_to_start > 0 or add_to_end > 0):\n if add_to_start > 0:\n samples = numpy.concatenate((numpy.zeros(add_to_start), samples), axis=1)\n \n if add_to_end > 0:\n samples = numpy.resize(samples, size)\n samples[size - add_to_end:] = 0\n \n return samples", "def advance(self, amount=1):\n self._current += amount\n if self._current - self._updateRate >= self._lastUpdated:\n self.redraw()\n # go to nearest multiple of updateRate less than current\n self._lastUpdated = (self._current // self._updateRate)*self._updateRate", "def max_skip_interval(self):\n if not self.frame_skipping_allowed:\n return 0\n skip = (self.frame_rate_ctrl_sup >> 1) & 0x7\n if skip == 0:\n return -1\n return skip * 0.5", "def get_datawriting_indices_update_ctr(self, new_data,\n update: bool=True):\n\n # This is the case if the detector returns a simple float or int\n if len(np.shape(new_data)) == 0:\n xlen = 1\n # This is the case for a 1D hard detector or an N-D soft detector\n elif len(np.shape(new_data)) == 1:\n # Soft detector (returns values 1 by 1)\n if len(self.detector_function.value_names) == np.shape(new_data)[0]:\n xlen = 1\n else: # 1D Hard detector (returns values in chunks)\n xlen = len(new_data)\n else:\n if self.detector_function.detector_control == 'soft':\n # FIXME: this is an inconsistency that should not be there.\n xlen = np.shape(new_data)[1]\n else:\n # in case of an N-D Hard detector dataset\n xlen = np.shape(new_data)[0]\n\n start_idx = self.get_datawriting_start_idx()\n stop_idx = start_idx + xlen\n\n if update:\n # Sometimes one wants to know the start/stop idx without\n self.total_nr_acquired_values += xlen\n\n return start_idx, stop_idx", "def process_download_other_old(self, data, meta_file_name):\n block_size = 1024\n # content-length in bytes\n self.data_len = float(data.info().get('Content-length', None))\n config_pytomo.LOG.debug('Content-length: %s' % self.data_len)\n #meta_file = open(meta_file_name, 'ab')\n #meta_file = open(meta_file_name, 'ab+')\n tries = 0\n accumulated_playback = 0\n buff_state_tracker = False\n accumulated_buffer = 0.0\n initial_data = 0\n initial_rate = 0\n byte_counter = 0\n self.state = INITIAL_BUFFERING_STATE\n start = time.time()\n while True:\n # Download and write\n before = time.time()\n if not ((before - start) > self.download_time):\n # read in bytes\n data_block = data.read(block_size)\n else:\n break\n if (not self.encoding_rate\n and tries <= config_pytomo.MAX_NB_TRIES_ENCODING):\n self.compute_encoding_rate(meta_file_name)\n tries += 1\n data_block_len = len(data_block)\n if data_block_len == 0:\n break\n after = time.time()\n self.compute_interruptions(data_block_len, after)\n if self.state == PLAYING_STATE:\n accumulated_playback += (after - before)\n if not buff_state_tracker:\n initial_duration = accumulated_buffer\n try:\n initial_rate = (initial_data * 8 / initial_duration /\n 1000)\n except ZeroDivisionError:\n initial_rate = 0\n buff_state_tracker = True\n elif self.state == BUFFERING_STATE:\n accumulated_buffer += (after - before)\n if not buff_state_tracker:\n initial_data += data_block_len\n else:\n config_pytomo.LOG.error(\"Unexpected state case\")\n break\n byte_counter += data_block_len\n block_size = self.best_block_size(after - before, data_block_len)\n instant_thp = (8e-3 * data_block_len / (after - before)\n if (after - before) != 0 else None)\n self.max_instant_thp = max(self.max_instant_thp, instant_thp)\n if config_pytomo.LOG_LEVEL == config_pytomo.DEBUG:\n # Progress message\n progress_stats = {\n 'percent_str': self.calc_percent(self._total_bytes,\n self.data_len),\n 'data_len_str': self.format_bytes(self.data_len),\n 'eta_str': self.calc_eta(start, time.time(), self.data_len,\n self._total_bytes),\n 'speed_str': self.calc_speed(start, time.time(),\n self._total_bytes),\n # in order to avoid None convertion to float in\n # report_progress and still have information\n 'instant_thp': str(instant_thp),\n 'byte_counter': self._total_bytes,\n 'current_buffer': self.current_buffer,\n }\n self.report_progress(progress_stats)\n self.set_total_bytes(byte_counter)\n self.accumulated_playback = accumulated_playback\n self.accumulated_buffer = accumulated_buffer\n self.initial_data = initial_data\n self.initial_rate = initial_rate\n return after - start", "def read(self, buf, n):\n l = min(len(self.prev), n)\n buf[:l] = self.prev[:l]\n self.prev = self.prev[l:] # pitfall self.prev = []\n\n idx = l # the next reading\n while idx < n:\n buf4 = [\"\" for _ in xrange(4)]\n r = read4(buf4)\n if idx+r < n:\n buf[idx:idx+r] = buf4[:r]\n idx += r\n if r < 4: return idx\n else:\n buf[idx:n] = buf4[:n-idx]\n self.prev = buf4[n-idx:r] # pitfall buf4[n-idx:]\n idx = n\n\n return idx", "def get_next(self) -> float:\n return self._current + self._offset", "def scanNext(self):\n if self._iPix < self.SI.nPix:\n # Compute next position and move there\n x,y = self.SI.xyPath[self._iPix]\n self.moveTo((x,y), dt_ms=SERVO_MOVE_MS)\n\n # Measure spectrum and 3D position and store it\n self.SP.read()\n self.SI.storePixel((x,y), 0,0,0, self.SP.spectrum)\n\n self._iPix += 1\n return True\n else:\n # Close file, if needed and move back to origin\n self.SI.finalize()\n self.moveTo()\n return False", "def step_update(self, num_updates):\n if self.args['optimization']['warmup_updates'] > 0 and \\\n num_updates <= self.args['optimization']['warmup_updates']:\n self.warmup_factor = num_updates / float(self.args['optimization']['warmup_updates'])\n lr = self.warmup_factor * self.lr\n elif num_updates >= self.total_num_update:\n lr = self.end_learning_rate\n else:\n warmup = self.args['optimization']['warmup_updates']\n lr_range = self.lr - self.end_learning_rate\n pct_remaining = 1 - (num_updates - warmup) / (self.total_num_update - warmup)\n lr = lr_range * pct_remaining ** (self.power) + self.end_learning_rate\n self.optimizer.set_lr(lr)\n return self.optimizer.get_lr()", "def update(self):\n if self._position:\n self._frequency = len(self._position)\n for i in range(len(self._position)):\n # convert from text file\n self._position[i] = float(self._position[i])\n self._recency = [self._position[0]]\n for i in range(1, self._frequency):\n self._recency.append(self._position[i] - self._position[i - 1])\n self._recency.append(1 - self._position[self._frequency - 1])\n self._isUpdated = True" ]
[ "0.53829694", "0.53829694", "0.5337989", "0.53274786", "0.51686627", "0.5128614", "0.5126201", "0.511141", "0.5061084", "0.50162256", "0.50062734", "0.5001223", "0.49057913", "0.48822874", "0.48516887", "0.4841836", "0.48409307", "0.4828322", "0.48135075", "0.47934806", "0.47862032", "0.47816035", "0.47791654", "0.47752917", "0.4756885", "0.47568378", "0.47443846", "0.47404063", "0.473379", "0.47298798" ]
0.6848442
0
Sync the length with the start and end pointers.
def sync_length(self, should_grow=True): try: self._length = (self._end - self._start) % self.maxsize except ZeroDivisionError: self._length = 0 if self._length == 0 and should_grow: self._length = self.maxsize
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def twist(self, length):\r\n\r\n segment = []\r\n\r\n #grab all the items in the list from\r\n #our current location until the end of length\r\n mod_start = self.current_index % self.size\r\n mod_end = (self.current_index + length) % self.size\r\n\r\n #if we wrapped around to the beginning\r\n if mod_end <= mod_start:\r\n segment += self.list[mod_start:]\r\n segment += self.list[:mod_end]\r\n else:\r\n segment += self.list[mod_start:mod_end]\r\n\r\n segment = list(reversed(segment))\r\n\r\n #replace the items in our buffer\r\n for i in range(length):\r\n\r\n mod_index = self.current_index + i\r\n self.list[mod_index % self.size] = segment[i]\r\n\r\n self.current_index += length + self.skip_size\r\n self.skip_size += 1", "def sync(self, other):\n pass # TODO", "def set_last_segment_length(self, length):\n prior_length = self.segments[-1].get_length()\n if prior_length != -1:\n self.end_time -= prior_length\n\n self.segments[-1].set_length(length)\n self.end_time += length", "def _sync_ranges(self):\n with self.sphere.sphere_lock:\n self._sync_range(\n self.sphere.bai_1d_args, 'radial_range', 'numpoints',\n self.radialRange1D\n )\n self._sync_range(\n self.sphere.bai_2d_args, 'radial_range', 'npt_rad',\n self.radialRange2D\n )\n self._sync_range(\n self.sphere.bai_2d_args, 'azimuth_range', 'npt_azim',\n self.azimuthalRange2D\n )", "def extend(self, ext_len, chrom_len):\n mid = find_midpoint(self.start, self.end)\n self.start = max(0, mid - ext_len / 2)\n self.end = self.start + ext_len\n if chrom_len and self.end > chrom_len:\n self.end = chrom_len\n self.start = self.end - ext_len", "def _sync_range(self, args, rkey, pkey, rwidget):\n rwidget.blockSignals(True)\n try:\n self._sync_range_hilow(args, rkey, rwidget)\n self._sync_range_points(args, pkey, rwidget)\n self._sync_unit(args, rwidget)\n finally:\n rwidget.blockSignals(False)", "def sync() -> None:", "def extend_pos(self, start: int, end: int) -> None:", "def __init__(self, start: long, end: long):\n ...", "def __len__(self):\n diff = (self.end - self.start) % len(self.buffer)\n if diff == 0:\n if self.buffer[self.start]:\n return len(self.buffer)\n else:\n return 0\n else:\n return diff", "def edit_pointers_in_range(self, rng, diff, allow_double_edits=False):\n # Reinserters with poiner reassignments need double edits enabled (Appareden).\n #print(\"Called edit_pointers_in_range\", self.filename, rng, diff)\n start, stop = rng\n\n if diff != 0:\n print(\"Editing pointers in range %s %s with diff %s\" % (hex(start), hex(stop), hex(diff)))\n #print(self.pointers)\n #for p in self.pointers:\n #print(hex(p))\n\n # Need to move pointers if there are any in this range\n\n if self.blocks:\n #print([hex(p) for p in range(start+1, stop+1)])\n #print([hex(p) for p in self.pointer_locations])\n for offset in [p for p in range(start+1, stop+1) if p in self.pointer_locations]:\n print(hex(offset), \"needs to be moved\")\n for p in self.pointers:\n for loc in self.pointers[p]:\n if loc.original_location == offset:\n print(\"moving %s -> %s\" % (hex(loc.location), hex(loc.location + diff)))\n loc.move_pointer_location(diff)\n #print(loc)\n self.pointer_locations.remove(offset)\n self.pointer_locations.append(offset + diff)\n #for p in self.pointers:\n # print(hex(p), self.pointers[p])\n else:\n # Don't need to move pointers if there's no block for them to be in\n pass\n\n\n for offset in [p for p in range(start+1, stop+1) if p in self.pointers]:\n print(offset, self.pointers[offset])\n for ptr in self.pointers[offset]:\n print(\"editing %s (originally %s)\" % (ptr, hex(ptr.original_location)))\n #print(hex(ptr.text_location), hex(ptr.original_text_location))\n if allow_double_edits:\n ptr.edit(diff)\n else:\n if start+1 <= ptr.original_text_location <= stop+1:\n if self.blocks:\n block_found = False\n for b in self.blocks:\n if b.start <= ptr.original_location <= b.stop:\n block_found = True\n ptr.edit(diff, block=b)\n if not block_found:\n ptr.edit(diff)\n else:\n ptr.edit(diff)\n else:\n print(\"Skipping this one to avoid double-edit\")", "def update_position(self):\n self.back = self.pos % self.road_len\n self.front = (self.pos + self.length) % self.road_len", "def _move_range_mirror(self, range_len):\n #start1 = randint(range_len, len(self.state) - range_len)\n start = randint(0, len(self.state) - range_len)\n #range_list = choice([[start1, start1 - range_len], [start2, start2 + range_len]])\n end = start + range_len\n\n copy_state = self.state[start:end]\n copy_state.reverse()\n self.state[start:end] = copy_state\n\n for wizard in self.state[start:end]:\n self.wiz_to_pos[wizard] = self.state.index(wizard)", "def flip(self):\n self._start, self._end = self._end, self._start", "def update(self, initial, follows):", "def _sync_range_points(self, args, pkey, rwidget):\n if pkey in args:\n rwidget.ui.points.setValue(args[pkey])\n else:\n args[pkey] = rwidget.ui.points.value()", "def end(self):\n return self.start + self.size", "def length(self, length: Union[int, float]):\n self._length = length\n self._update_length()\n self.events.length()\n\n self.refresh()", "def get_data_range(self, start_position, length):\n pass", "def strip_to_start_end(self):\n # Update the length of the playlist\n if not self.pl_start or self.pl_start < 1:\n self.pl_start = 1\n if not self.pl_end or self.pl_end < 1:\n self.pl_end = len(self.list_content_tuple)\n\n # reset marker to make sure it's in the order given by start/end\n start = min(self.pl_start, self.pl_end)\n end = max(self.pl_start, self.pl_end)\n step = 1 if (self.pl_start <= self.pl_end) else -1\n if self._is_valid(start, end):\n self.list_content_tuple = self.list_content_tuple[start - 1 : end]\n if step == -1:\n self.list_content_tuple = self.list_content_tuple[::-1]", "def __len__(self):\n return self.end - self.begin", "def do_sync(self):\n raise NotImplementedError() # pragma: no cover", "def __len__(self):\n return self.start.dist(self.end)", "def sync(self):\n pass", "def sync(self):\n pass", "def inc_size(self):\r\n self.__length += 1", "def sync(self, sync):\n self._sync = sync", "def reset(self):\n\n self._begin = 0\n self._end = 0\n self._size = 0", "def adjust_references_of_iat(self, start, end, gap_size):\n self.pe_manager.adjust_data_in_range(start, end, gap_size)", "def update_pointer(self):\n pointer_length = -self.pointer_frac * self.radius\n # Add pi/2 to the angle because we consider 0 radians to be pi/2 in standard position.\n x = pointer_length * math.cos(self._radians + math.pi / 2)\n y = pointer_length * math.sin(self._radians + math.pi / 2)\n self.coords(self.pointer, 0, 0, x, y)" ]
[ "0.5956281", "0.5576492", "0.5475356", "0.54602545", "0.54277325", "0.5397421", "0.5393387", "0.52974296", "0.5243593", "0.52093387", "0.5208252", "0.519974", "0.5197779", "0.51680917", "0.516339", "0.5162192", "0.51388997", "0.5125775", "0.5125302", "0.5114871", "0.5105606", "0.50761", "0.5074012", "0.50635076", "0.50635076", "0.50590724", "0.50538874", "0.5033322", "0.50270206", "0.50262773" ]
0.6733816
0
Return the available space.
def get_available_space(self): return self.maxsize - len(self)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def available_space(self):\n # From http://stackoverflow.com/a/787832/732596\n s = os.statvfs(self.path)\n return (s.f_bavail * s.f_frsize) / 1024**2", "def get_space_used():\n fs.get_space_used()", "def mem_avail():\n return psutil.virtual_memory().available", "def available_space(self):\r\n space = dict()\r\n for path in self._mounts.keys():\r\n space.update({path:self.available_space_for_path(path)})\r\n return space", "def _free_space() -> int:\n return disk_usage(realpath('/')).free", "def get_free_space(dirname):\n return psutil.disk_usage(dirname).free", "def _available_space( self, pool_name ):\n\t\ttry:\n\t\t\treturn self.storage_pools[pool_name].available\n\t\texcept KeyError:\n\t\t\treturn -1", "def get_available_memory():\n if platform == 'linux' or platform == 'linux2':\n return _get_available_memory_linux()\n elif platform == 'darwin':\n return _get_available_memory_darwin()\n else:\n raise Exception('Platform not supported')", "def get_available_space(device, directory='', output=None):\n try:\n dir_output = device.parse('dir {}'.format(directory), output=output)\n except Exception as e:\n log.error(\"Failed to parse the directory listing due to: {}\".format(str(e)))\n return None\n\n free_space = dir_output.get('disk_free_space')\n if free_space:\n return int(free_space)\n else:\n log.error(\"Failed to get available space for {}\".format(directory))", "def freespace(self):\n self.log.info(\"freespace\")\n freebytes = shutil.disk_usage(self.s3_dir).free\n self.log.info(\"returning:\" + str(freebytes))\n return freebytes", "def get_disk_space():\n try:\n return shutil.disk_usage('/')\n except FileNotFoundError:\n logging.error(\n 'Failed to locate OS partition. Could not determine disk size.')", "def fs_total_reserved_space(self):\n return self._fs_total_reserved_space", "def get_capacity():\n fs.get_capacity()", "def get_free_ram_size(self):\n\t\treturn call_sdk_function('PrlStat_GetFreeRamSize', self.handle)", "def reserved_disk_space_in_bytes(self):\n try:\n return int(environment.get(\"ReservedDiskSpaceInBytes\"))\n except KeyError:\n return maxsize", "def available_capacity(self):\r\n return self.capacity - len(self.passengers)", "def get_free_space(config, task):\n if 'host' in config:\n import paramiko\n\n ssh = paramiko.SSHClient()\n ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())\n try:\n ssh.connect(\n config.get('host'),\n config.get('port', 22),\n config.get('user'),\n config.get('password', None),\n config.get('pkey', None),\n config.get('ssh_key_filepath'),\n timeout=5000,\n )\n except Exception as e:\n logger.error(\"Issue connecting to remote host. {}\", e)\n task.abort('Error with remote host.')\n if config['allotment'] != -1:\n stdin, stdout, stderr = ssh.exec_command(f\"du -s {config['path']} | cut -f 1\")\n else:\n stdin, stdout, stderr = ssh.exec_command(\n f\"df -k {config['path']} | tail -1 | tr -s ' ' | cut -d' ' -f4\"\n )\n outlines = stdout.readlines()\n resp = ''.join(outlines)\n ssh.close()\n try:\n if config['allotment'] != -1:\n free = int(config['allotment']) - ((int(resp.strip()) * 1024) / 1000000)\n else:\n free = int(resp.strip()) / 1000\n except ValueError:\n logger.error('Non-integer was returned when calculating disk usage.')\n task.abort('Error with remote host.')\n return free\n elif os.name == 'nt':\n import ctypes\n\n free_bytes = ctypes.c_ulonglong(0)\n ctypes.windll.kernel32.GetDiskFreeSpaceExW(\n ctypes.c_wchar_p(config['path']), None, None, ctypes.pointer(free_bytes)\n )\n return free_bytes.value / (1024 * 1024)\n else:\n stats = os.statvfs(config['path'])\n return (stats.f_bavail * stats.f_frsize) / (1024 * 1024)", "def fs_size_available(self):\n return self._fs_size_available", "def used_ram():\n return total_ram() - free_ram()", "def get_free_gb():\n mem_info = get_mem_info()\n free_gb = float(mem_info['MemAvailable'].value) / 10**6\n return free_gb", "def get_available_size(path):\n if not os.path.exists(path):\n raise ValueError(\"%s is a non-existent path\" % path)\n f = os.statvfs(path)\n free = long(f[statvfs.F_BAVAIL] * f[statvfs.F_FRSIZE])\n \n return free", "def usedspace(self):\n self.log.info(\"freespace\")\n nbytes = 0\n keys = list(self.downloads.keys())\n keys.sort()\n for key in keys:\n download = self.downloads[key]\n nbytes += download['size']\n self.log.info(\"returning:\" + str(nbytes))\n return nbytes", "def memory(self):\n # Run 'free -m' command and make a list from output.\n mem_data = self.execCMD('free', '-m').split()\n total_mem = int(mem_data[7]) / 1024.\n used_mem = int(mem_data[15]) / 1024.\n # Caculate percentage\n used_mem_percent = int(used_mem / (total_mem / 100))\n\n # Results are in kilobyte.\n return total_mem, used_mem, used_mem_percent", "def free_ram():\n return int(convert.bytetomb(psutil.virtual_memory().available))", "def check_available_memory(self,unit='B'):\n free = psutil.virtual_memory().available\n\n if unit == 'MB':\n\n return free/10**6\n\n elif unit == 'GB':\n\n return free/10**9\n\n else:\n\n return free", "def getSpaceUsage(path):\n st = os.statvfs(path)\n \n flash = { \"free\" : st.f_bavail * st.f_frsize, \"used\":(st.f_blocks - st.f_bfree) * st.f_frsize }\n \n #free = st.f_bavail * st.f_frsize\n #total = st.f_blocks * st.f_frsize\n #used = (st.f_blocks - st.f_bfree) * st.f_frsize\n return flash", "def get_free_disk_space(p):\n s = os.statvfs(p)\n return s.f_frsize * s.f_bavail", "def get_space_committed():\n reserved = jobtracker.query(\"SELECT SUM(size) FROM files \" \\\n \"WHERE status IN ('downloading', 'new', \" \\\n \"'retrying', 'failed')\", \\\n fetchone=True)\n if reserved is None:\n reserved = 0\n return reserved", "def freespace(p):\n s = os.statvfs(p)\n return (s.f_bsize * s.f_bavail) /1024", "def total_ram(self):\n return sum([self.size_to_gb(slot[\"Size\"]) for slot in self.get(\"Memory Device\")])" ]
[ "0.8008372", "0.7613703", "0.75092167", "0.74836314", "0.7220819", "0.7144782", "0.70907354", "0.7068847", "0.7026799", "0.7015171", "0.69986105", "0.6964338", "0.6933497", "0.68823874", "0.68521756", "0.6828981", "0.6789977", "0.674724", "0.66928", "0.6689496", "0.66668266", "0.6634435", "0.6633646", "0.65965486", "0.65865034", "0.65665734", "0.6520369", "0.64660627", "0.64517725", "0.641151" ]
0.7887875
1
Return the dtype of the data.
def dtype(self): return self._data.dtype
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def dtype(self):\n return self.data.dtype", "def dtype(self) -> DtypeLike:\n\n return self.data.dtype", "def dtype(self):\n return self.dataset.dtype", "def dtype(self):\n return self.array.dtype", "def dtype(self):\n return self._dtype", "def dtype(self):\n return self._dtype", "def dtype(self):\n return self._dtype", "def dtype(self):\n return self.__dtype", "def dtype(self) -> np.dtype:\n return self._dtype", "def dtype(self) -> DataType:\n return self._dtype", "def dtype(a):\n return a.dtype", "def dtype(self):\n return self._dtype", "def dtype(self):\n return self._fl.raw.dtype", "def dtype(self) -> np.dtype:\n ...", "async def get_dtype(self):\r\n pass", "def dtype(self):\n return self.config[\"dtype\"]", "def dtype(self):\n return self._vars[0].dtype", "def dtype(self) -> str:\n return self._dtype", "def type(self) -> np.dtype:\n return self._tensorInfo.dtype", "def dtype(self):\n return self.initial_value.dtype", "def dtype(self):\n return self._channel.datatype", "def get_data_type(self, idx):\n return(self.data[idx].dtype)", "def dtype(self) -> np.dtype:\n return self._channel_arrays[0].dtype", "def dtype():\n return RaggedDtype()", "def dtype(self) -> Type[DTypeFloat]:\n\n return self._dtype", "def dtype(self) -> tf.dtypes.DType:", "def dtype(self):\n # type: () -> ExtensionDtype\n return self._dtype", "def infer_dtype(self):\n raise NotImplementedError", "def data_type(self):\r\n return self._data_type", "def data_type(self):\n return self._data_type" ]
[ "0.87301797", "0.8446338", "0.84148496", "0.83855313", "0.83289075", "0.83289075", "0.83289075", "0.82657504", "0.8225879", "0.82118034", "0.82092184", "0.8098053", "0.80584", "0.8049524", "0.7933737", "0.7931455", "0.7907679", "0.7857508", "0.78264457", "0.78102624", "0.78011686", "0.7766503", "0.76889676", "0.7570876", "0.7566263", "0.7532714", "0.7520908", "0.74144423", "0.7412528", "0.7398269" ]
0.87331676
0
Creates a new certificate. If this is the first version, the certificate resource is created. This operation requires the certificates/create permission. The poller requires the certificates/get permission, otherwise raises an
async def create_certificate( self, certificate_name: str, policy: CertificatePolicy, **kwargs ) -> Union[KeyVaultCertificate, CertificateOperation]: if not (policy.san_emails or policy.san_user_principal_names or policy.san_dns_names or policy.subject): raise ValueError(NO_SAN_OR_SUBJECT) polling_interval = kwargs.pop("_polling_interval", None) if polling_interval is None: polling_interval = 5 enabled = kwargs.pop("enabled", None) if enabled is not None: attributes = self._models.CertificateAttributes(enabled=enabled) else: attributes = None parameters = self._models.CertificateCreateParameters( certificate_policy=policy._to_certificate_policy_bundle(), certificate_attributes=attributes, tags=kwargs.pop("tags", None), ) cert_bundle = await self._client.create_certificate( vault_base_url=self.vault_url, certificate_name=certificate_name, parameters=parameters, **kwargs ) create_certificate_operation = CertificateOperation._from_certificate_operation_bundle(cert_bundle) command = partial(self.get_certificate_operation, certificate_name=certificate_name, **kwargs) get_certificate_command = partial(self.get_certificate, certificate_name=certificate_name, **kwargs) create_certificate_polling = CreateCertificatePollerAsync( get_certificate_command=get_certificate_command, interval=polling_interval ) def no_op(*_, **__) -> Any: # The deserialization callback is ignored based on polling implementation pass return await async_poller(command, create_certificate_operation, no_op, create_certificate_polling)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_add_certificate(self):\n response = self.client.post(\n '/api/v1/certificates', data=json.dumps(new_certificate),\n content_type='application/json',\n headers=self.get_registrar_token())\n result = json.loads(response.data.decode())\n self.assertEqual(result['message'],\n 'Certificate added successfully')\n assert response.status_code == 201", "def CreateCertificate(self, request):\n try:\n params = request._serialize()\n headers = request.headers\n body = self.call(\"CreateCertificate\", params, headers=headers)\n response = json.loads(body)\n model = models.CreateCertificateResponse()\n model._deserialize(response[\"Response\"])\n return model\n except Exception as e:\n if isinstance(e, TencentCloudSDKException):\n raise\n else:\n raise TencentCloudSDKException(type(e).__name__, str(e))", "def new_cert(self, commonname, extensions=None):\n\n serial = self._get_serial()\n pkey = self._create_pkey(commonname, serial)\n self._create_cert(pkey, commonname, serial, extensions)", "def create_certificate(self, subscription_id, management_host, hackathon_name):\n\n # make sure certificate dir exists\n if not os.path.isdir(self.CERT_BASE):\n self.log.debug('certificate dir not exists')\n os.mkdir(self.CERT_BASE)\n\n base_url = '%s/%s' % (self.CERT_BASE, subscription_id)\n\n pem_url = base_url + '.pem'\n # avoid duplicate pem generation\n if not os.path.isfile(pem_url):\n pem_command = 'openssl req -x509 -nodes -days 365 -newkey rsa:1024 -keyout %s -out %s -batch' % \\\n (pem_url, pem_url)\n commands.getstatusoutput(pem_command)\n else:\n self.log.debug('%s exists' % pem_url)\n\n cert_url = base_url + '.cer'\n # avoid duplicate cert generation\n if not os.path.isfile(cert_url):\n cert_command = 'openssl x509 -inform pem -in %s -outform der -out %s' % (pem_url, cert_url)\n commands.getstatusoutput(cert_command)\n else:\n self.log.debug('%s exists' % cert_url)\n\n azure_key = self.db.find_first_object_by(AzureKey,\n cert_url=cert_url,\n pem_url=pem_url,\n subscription_id=subscription_id,\n management_host=management_host)\n # avoid duplicate azure key\n if azure_key is None:\n azure_key = self.db.add_object_kwargs(AzureKey,\n cert_url=cert_url,\n pem_url=pem_url,\n subscription_id=subscription_id,\n management_host=management_host)\n self.db.commit()\n else:\n self.log.debug('azure key exists')\n\n hackathon_id = self.db.find_first_object_by(Hackathon, name=hackathon_name).id\n hackathon_azure_key = self.db.find_first_object_by(HackathonAzureKey,\n hackathon_id=hackathon_id,\n azure_key_id=azure_key.id)\n # avoid duplicate hackathon azure key\n if hackathon_azure_key is None:\n self.db.add_object_kwargs(HackathonAzureKey,\n hackathon_id=hackathon_id,\n azure_key_id=azure_key.id)\n self.db.commit()\n else:\n self.log.debug('hackathon azure key exists')\n\n azure_cert_url = self.file_service.upload_file_to_azure_from_path(cert_url, self.CONTAINER_NAME,\n subscription_id + '.cer')\n azure_key.cert_url = azure_cert_url\n self.db.commit()\n return azure_cert_url", "def fusion_api_create_certificate_request(self, body, uri=None, api=None, param='', headers=None):\n param = '/certificates/https/certificaterequest'\n return self.ic.post(uri=uri, body=body, api=api, headers=headers, param=param)", "def create_or_update(\n self, resource_group_name, provisioning_service_name, certificate_name, if_match=None, certificate=None, custom_headers=None, raw=False, **operation_config):\n certificate_description = models.CertificateBodyDescription(certificate=certificate)\n\n # Construct URL\n url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/provisioningServices/{provisioningServiceName}/certificates/{certificateName}'\n path_format_arguments = {\n 'subscriptionId': self._serialize.url(\"self.config.subscription_id\", self.config.subscription_id, 'str'),\n 'resourceGroupName': self._serialize.url(\"resource_group_name\", resource_group_name, 'str'),\n 'provisioningServiceName': self._serialize.url(\"provisioning_service_name\", provisioning_service_name, 'str'),\n 'certificateName': self._serialize.url(\"certificate_name\", certificate_name, 'str', max_length=256)\n }\n url = self._client.format_url(url, **path_format_arguments)\n\n # Construct parameters\n query_parameters = {}\n query_parameters['api-version'] = self._serialize.query(\"self.api_version\", self.api_version, 'str')\n\n # Construct headers\n header_parameters = {}\n header_parameters['Content-Type'] = 'application/json; charset=utf-8'\n if self.config.generate_client_request_id:\n header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())\n if custom_headers:\n header_parameters.update(custom_headers)\n if if_match is not None:\n header_parameters['If-Match'] = self._serialize.header(\"if_match\", if_match, 'str')\n if self.config.accept_language is not None:\n header_parameters['accept-language'] = self._serialize.header(\"self.config.accept_language\", self.config.accept_language, 'str')\n\n # Construct body\n body_content = self._serialize.body(certificate_description, 'CertificateBodyDescription')\n\n # Construct and send request\n request = self._client.put(url, query_parameters)\n response = self._client.send(\n request, header_parameters, body_content, stream=False, **operation_config)\n\n if response.status_code not in [200]:\n raise models.ErrorDetailsException(self._deserialize, response)\n\n deserialized = None\n\n if response.status_code == 200:\n deserialized = self._deserialize('CertificateResponse', response)\n\n if raw:\n client_raw_response = ClientRawResponse(deserialized, response)\n return client_raw_response\n\n return deserialized", "def create_selfsigned_certificates(name):\n pass", "def fusion_api_create_appliance_selfsigned_certificate(self, body, api=None, headers=None):\n return self.appliance_certificate.put(body, api, headers)", "def add_certificate(self, certificate):\r\n return self.ssl.createObject(certificate)", "def put_certificate(self, target, who, args, _files, _user_path):\n name = self.arg_get(args, 'name', str)\n if not commonl.verify_str_safe(name, do_raise = False):\n raise ValueError(\n f\"{name}: invalid certificate name, only [-_a-zA-Z0-9] allowed\")\n\n with target.target_owned_and_locked(who):\n target.timestamp()\n\n cert_path = os.path.join(target.state_dir, \"certificates\")\n cert_client_path = os.path.join(target.state_dir, \"certificates_client\")\n self._setup_maybe(target, cert_path, cert_client_path)\n\n client_key_path = os.path.join(cert_client_path, name + \".key\")\n client_req_path = os.path.join(cert_client_path, name + \".req\")\n client_cert_path = os.path.join(cert_client_path, name + \".cert\")\n\n if os.path.isfile(client_key_path) \\\n and os.path.isfile(client_cert_path):\t# already made?\n with open(client_key_path) as keyf, \\\n open(client_cert_path) as certf:\n return dict({\n \"name\": name,\n \"created\": False,\n \"key\": keyf.read(),\n \"cert\": certf.read(),\n })\n\n try:\n subprocess.run(\n f\"openssl genrsa -out {client_key_path} {self.key_size}\".split(),\n stdin = None, timeout = 5,\n capture_output = True, cwd = cert_path, check = True)\n allocid = target.fsdb.get(\"_alloc.id\", \"UNKNOWN\")\n subprocess.run(\n f\"openssl req -new -key {client_key_path} -out {client_req_path}\"\n f\" -subj /C=LC/ST=Local/L=Local/O=TCF-Signing-Authority-{target.id}-{allocid}/CN=TCF-{name}\".split(),\n check = True, cwd = cert_path,\n stdout = subprocess.PIPE, stderr = subprocess.STDOUT)\n target.log.debug(f\"{name}: created client's certificate\")\n\n # Issue the client certificate using the cert request and the CA cert/key.\n # note we run in the cert_path directory, so the ca.*\n # files are there\n subprocess.run(\n f\"openssl x509 -req -in {client_req_path} -CA ca.cert\"\n \" -CAkey ca.key -set_serial 101 -extensions client\"\n f\" -days 365 -outform PEM -out {client_cert_path}\".split(),\n stdin = None, timeout = 5,\n capture_output = True, cwd = cert_path, check = True)\n except subprocess.CalledProcessError as e:\n target.log.error(f\"command {' '.join(e.cmd)} failed: {e.output}\")\n self._client_wipe(name, cert_client_path)\t# don't leave things half there\n raise\n\n with open(client_key_path) as keyf, \\\n open(client_cert_path) as certf:\n return dict({\n \"name\": name,\n \"created\": True,\n \"key\": keyf.read(),\n \"cert\": certf.read(),\n })", "def add_cert(self, student, course_id, course=None, forced_grade=None, template_file=None, title='None'):\r\n\r\n VALID_STATUSES = [status.generating,\r\n status.unavailable,\r\n status.deleted,\r\n status.error,\r\n status.notpassing]\r\n\r\n cert_status = certificate_status_for_student(student, course_id)['status']\r\n\r\n new_status = cert_status\r\n\r\n if cert_status in VALID_STATUSES:\r\n # grade the student\r\n\r\n # re-use the course passed in optionally so we don't have to re-fetch everything\r\n # for every student\r\n if course is None:\r\n course = courses.get_course_by_id(course_id)\r\n profile = UserProfile.objects.get(user=student)\r\n profile_name = profile.name\r\n\r\n # Needed\r\n self.request.user = student\r\n self.request.session = {}\r\n\r\n course_name = course.display_name or course_id.to_deprecated_string()\r\n is_whitelisted = self.whitelist.filter(user=student, course_id=course_id, whitelist=True).exists()\r\n grade = grades.grade(student, self.request, course)\r\n enrollment_mode = CourseEnrollment.enrollment_mode_for_user(student, course_id)\r\n mode_is_verified = (enrollment_mode == GeneratedCertificate.MODES.verified)\r\n user_is_verified = SoftwareSecurePhotoVerification.user_is_verified(student)\r\n user_is_reverified = SoftwareSecurePhotoVerification.user_is_reverified_for_all(course_id, student)\r\n cert_mode = enrollment_mode\r\n if (mode_is_verified and user_is_verified and user_is_reverified):\r\n template_pdf = \"certificate-template-{id.org}-{id.course}-verified.pdf\".format(id=course_id)\r\n elif (mode_is_verified and not (user_is_verified and user_is_reverified)):\r\n template_pdf = \"certificate-template-{id.org}-{id.course}.pdf\".format(id=course_id)\r\n cert_mode = GeneratedCertificate.MODES.honor\r\n else:\r\n # honor code and audit students\r\n template_pdf = \"certificate-template-{id.org}-{id.course}.pdf\".format(id=course_id)\r\n if forced_grade:\r\n grade['grade'] = forced_grade\r\n\r\n cert, __ = GeneratedCertificate.objects.get_or_create(user=student, course_id=course_id)\r\n\r\n cert.mode = cert_mode\r\n cert.user = student\r\n cert.grade = grade['percent']\r\n cert.course_id = course_id\r\n cert.name = profile_name\r\n # Strip HTML from grade range label\r\n grade_contents = grade.get('grade', None)\r\n try:\r\n grade_contents = lxml.html.fromstring(grade_contents).text_content()\r\n except (TypeError, XMLSyntaxError, ParserError) as e:\r\n # Despite blowing up the xml parser, bad values here are fine\r\n grade_contents = None\r\n\r\n if is_whitelisted or grade_contents is not None:\r\n\r\n # check to see whether the student is on the\r\n # the embargoed country restricted list\r\n # otherwise, put a new certificate request\r\n # on the queue\r\n\r\n if self.restricted.filter(user=student).exists():\r\n new_status = status.restricted\r\n cert.status = new_status\r\n cert.save()\r\n else:\r\n key = make_hashkey(random.random())\r\n cert.key = key\r\n contents = {\r\n 'action': 'create',\r\n 'username': student.username,\r\n 'course_id': course_id.to_deprecated_string(),\r\n 'course_name': course_name,\r\n 'name': profile_name,\r\n 'grade': grade_contents,\r\n 'template_pdf': template_pdf,\r\n }\r\n if template_file:\r\n contents['template_pdf'] = template_file\r\n new_status = status.generating\r\n cert.status = new_status\r\n cert.save()\r\n self._send_to_xqueue(contents, key)\r\n else:\r\n cert_status = status.notpassing\r\n cert.status = cert_status\r\n cert.save()\r\n\r\n return new_status", "def add_cert(session, node_id, cert_id, cert_name,\n cert_location, cert_expiration, username='system_user'):\n session = validate_session(session)\n try:\n add_cert = SslInfo(node_id, cert_id, cert_name,\n cert_location, cert_expiration)\n session.add(add_cert)\n session.commit()\n return add_cert\n except Exception as e:\n session.rollback()", "def create_cert(commonname, ca_dir):\n sca = SimpleCA(ca_dir)\n sca.new_cert(commonname)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n certificate: Optional[pulumi.Input[str]] = None,\n certificate_id: Optional[pulumi.Input[int]] = None,\n creation_timestamp: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n expire_time: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n name_prefix: Optional[pulumi.Input[str]] = None,\n private_key: Optional[pulumi.Input[str]] = None,\n project: Optional[pulumi.Input[str]] = None,\n self_link: Optional[pulumi.Input[str]] = None) -> 'SSLCertificate':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _SSLCertificateState.__new__(_SSLCertificateState)\n\n __props__.__dict__[\"certificate\"] = certificate\n __props__.__dict__[\"certificate_id\"] = certificate_id\n __props__.__dict__[\"creation_timestamp\"] = creation_timestamp\n __props__.__dict__[\"description\"] = description\n __props__.__dict__[\"expire_time\"] = expire_time\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"name_prefix\"] = name_prefix\n __props__.__dict__[\"private_key\"] = private_key\n __props__.__dict__[\"project\"] = project\n __props__.__dict__[\"self_link\"] = self_link\n return SSLCertificate(resource_name, opts=opts, __props__=__props__)", "def test_add_certificate_keys(self):\n response = self.client.post(\n '/api/v1/certificates', data=json.dumps(certificate_keys),\n content_type='application/json',\n headers=self.get_registrar_token())\n result = json.loads(response.data.decode())\n self.assertEqual(result['message'],\n 'Invalid certificate_name key')\n assert response.status_code == 400", "def create_cert(self, cert_file, key_file):\n if os.path.isfile(cert_file) and os.path.isfile(key_file):\n return cert_file, key_file\n\n k = crypto.PKey()\n k.generate_key(crypto.TYPE_RSA, 2048)\n cert = crypto.X509()\n cert.get_subject().C = \"US\"\n cert.get_subject().ST = \"CO\"\n cert.get_subject().L = \"Denver\"\n cert.get_subject().CN = gethostname()\n cert.get_subject().O = \"Metropolitan State University of Denver\"\n cert.get_subject().OU = \"Computer Science\"\n cert.set_serial_number(6)\n cert.gmtime_adj_notBefore(0)\n cert.gmtime_adj_notAfter(365*24*60*60)\n cert.set_issuer(cert.get_subject())\n cert.set_pubkey(k)\n cert.sign(k, 'sha1')\n\n open(join(cert_file), 'w').write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert))\n open(join(key_file), \"w\").write(crypto.dump_privatekey(crypto.FILETYPE_PEM, k))\n return cert_file, key_file", "def regen_cert(self, student, course_id, course=None, forced_grade=None, template_file=None):\r\n # TODO: when del_cert is implemented and plumbed through certificates\r\n # repo also, do a deletion followed by a creation r/t a simple\r\n # recreation. XXX: this leaves orphan cert files laying around in\r\n # AWS. See note in the docstring too.\r\n try:\r\n certificate = GeneratedCertificate.objects.get(user=student, course_id=course_id)\r\n certificate.status = status.unavailable\r\n certificate.save()\r\n except GeneratedCertificate.DoesNotExist:\r\n pass\r\n\r\n return self.add_cert(student, course_id, course, forced_grade, template_file)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n certificate: Optional[pulumi.Input[str]] = None,\n certificate_id: Optional[pulumi.Input[str]] = None,\n certificate_name: Optional[pulumi.Input[str]] = None,\n domain: Optional[pulumi.Input[str]] = None,\n instance_id: Optional[pulumi.Input[str]] = None,\n private_key: Optional[pulumi.Input[str]] = None) -> 'Certificate':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _CertificateState.__new__(_CertificateState)\n\n __props__.__dict__[\"certificate\"] = certificate\n __props__.__dict__[\"certificate_id\"] = certificate_id\n __props__.__dict__[\"certificate_name\"] = certificate_name\n __props__.__dict__[\"domain\"] = domain\n __props__.__dict__[\"instance_id\"] = instance_id\n __props__.__dict__[\"private_key\"] = private_key\n return Certificate(resource_name, opts=opts, __props__=__props__)", "def _create_certificate_chain():\n caext = X509Extension(b\"basicConstraints\", False, b\"CA:true\")\n not_after_date = datetime.date.today() + datetime.timedelta(days=365)\n not_after = not_after_date.strftime(\"%Y%m%d%H%M%SZ\").encode(\"ascii\")\n\n # Step 1\n cakey = PKey()\n cakey.generate_key(TYPE_RSA, 2048)\n cacert = X509()\n cacert.set_version(2)\n cacert.get_subject().commonName = \"Authority Certificate\"\n cacert.set_issuer(cacert.get_subject())\n cacert.set_pubkey(cakey)\n cacert.set_notBefore(b\"20000101000000Z\")\n cacert.set_notAfter(not_after)\n cacert.add_extensions([caext])\n cacert.set_serial_number(0)\n cacert.sign(cakey, \"sha256\")\n\n # Step 2\n ikey = PKey()\n ikey.generate_key(TYPE_RSA, 2048)\n icert = X509()\n icert.set_version(2)\n icert.get_subject().commonName = \"Intermediate Certificate\"\n icert.set_issuer(cacert.get_subject())\n icert.set_pubkey(ikey)\n icert.set_notBefore(b\"20000101000000Z\")\n icert.set_notAfter(not_after)\n icert.add_extensions([caext])\n icert.set_serial_number(0)\n icert.sign(cakey, \"sha256\")\n\n # Step 3\n skey = PKey()\n skey.generate_key(TYPE_RSA, 2048)\n scert = X509()\n scert.set_version(2)\n scert.get_subject().commonName = \"Server Certificate\"\n scert.set_issuer(icert.get_subject())\n scert.set_pubkey(skey)\n scert.set_notBefore(b\"20000101000000Z\")\n scert.set_notAfter(not_after)\n scert.add_extensions(\n [X509Extension(b\"basicConstraints\", True, b\"CA:false\")]\n )\n scert.set_serial_number(0)\n scert.sign(ikey, \"sha256\")\n\n return [(cakey, cacert), (ikey, icert), (skey, scert)]", "def _Run(args, holder, ssl_certificate_ref):\n client = holder.client\n\n certificate_type, self_managed, managed = _ParseCertificateArguments(\n client, args)\n\n if ssl_certificates_utils.IsRegionalSslCertificatesRef(ssl_certificate_ref):\n request = client.messages.ComputeRegionSslCertificatesInsertRequest(\n sslCertificate=client.messages.SslCertificate(\n type=certificate_type,\n name=ssl_certificate_ref.Name(),\n selfManaged=self_managed,\n managed=managed,\n description=args.description),\n region=ssl_certificate_ref.region,\n project=ssl_certificate_ref.project)\n else:\n request = client.messages.ComputeSslCertificatesInsertRequest(\n sslCertificate=client.messages.SslCertificate(\n type=certificate_type,\n name=ssl_certificate_ref.Name(),\n selfManaged=self_managed,\n managed=managed,\n description=args.description),\n project=ssl_certificate_ref.project)\n\n if ssl_certificates_utils.IsRegionalSslCertificatesRef(ssl_certificate_ref):\n collection = client.apitools_client.regionSslCertificates\n else:\n collection = client.apitools_client.sslCertificates\n\n return client.MakeRequests([(collection, 'Insert', request)])", "def fusion_api_create_rabbitmq_client_certificate(self, body, uri=None, api=None, headers=None, param=''):\n return self.rabmq.post(body, uri=uri, api=api, headers=headers, param=param)", "def CreateStarCert(filename, log = logging):\n temp1 = tempfile.mkstemp(prefix = 'ssl_proxy')\n temp2 = tempfile.mkstemp(prefix = 'ssl_proxy')\n\n cert_fields = { \"C\": \"US\", \"ST\": \"**INSECURE CONNECTION**\",\n \"L\": \"**INSECURE CONNECTION**\",\n \"O\": \"**INSECURE CONNECTION**\",\n \"OU\": \"**INSECURE CONNECTION**\",\n \"CN\": \"*\" }\n\n cert_valid_days = 1\n\n cert_string = '/C=%(C)s/ST=%(ST)s/L=%(L)s/O=%(O)s/OU=%(OU)s/CN=%(CN)s' % \\\n cert_fields\n\n openssl_command = 'openssl req -newkey rsa:1024 -keyout \"%s\" -nodes ' \\\n '-x509 -days 365 -out \"%s\" -subj \"%s\" -set_serial 0 -days %s ' \\\n '-batch' % (temp1[1], temp2[1], cert_string, cert_valid_days)\n\n find_openssl = os.system('which openssl > /dev/null')\n\n if not find_openssl == 0:\n log.error('Could not find openssl. (Used \"which openssl\" to search)')\n raise OSError, 'Command \"which openssl\" returned: %s' % find_openssl\n\n log.info('Running command: %s' % openssl_command)\n openssl_status = os.system(openssl_command)\n if not openssl_status == 0:\n raise OSError, 'Attempt to run openssl returned: %s' % openssl_status\n\n # Extract the keys into strings.\n key = os.read(temp1[0], 2048)\n cert = os.read(temp2[0], 2048)\n\n os.close(temp1[0])\n os.close(temp2[0])\n\n os.unlink(temp1[1])\n os.unlink(temp2[1])\n\n new_cert = open(filename, 'wb')\n new_cert.write('%s\\n%s' % (key, cert))\n\n new_cert.close()\n\n log.info('Successfully created %s' % filename)\n return True", "def request_cert():\n\n api_request = shallow_copy(props)\n\n for key in ['ServiceToken', 'Region', 'Tags', 'Route53RoleArn']:\n api_request.pop(key, None)\n\n if 'ValidationMethod' in props:\n if props['ValidationMethod'] == 'DNS':\n\n # Check that we have all the hosted zone information we need to validate\n # before we create the certificate\n for name in set([props['DomainName']] + props.get('SubjectAlternativeNames', [])):\n get_zone_for(name)\n\n del api_request['DomainValidationOptions']\n\n e['PhysicalResourceId'] = acm.request_certificate(\n IdempotencyToken=i_token,\n **api_request\n )['CertificateArn']\n add_tags()", "def __init__(__self__, *,\n certificate: Optional[pulumi.Input[str]] = None,\n certificate_id: Optional[pulumi.Input[str]] = None,\n certificate_name: Optional[pulumi.Input[str]] = None,\n domain: Optional[pulumi.Input[str]] = None,\n instance_id: Optional[pulumi.Input[str]] = None,\n private_key: Optional[pulumi.Input[str]] = None):\n if certificate is not None:\n pulumi.set(__self__, \"certificate\", certificate)\n if certificate_id is not None:\n pulumi.set(__self__, \"certificate_id\", certificate_id)\n if certificate_name is not None:\n pulumi.set(__self__, \"certificate_name\", certificate_name)\n if domain is not None:\n pulumi.set(__self__, \"domain\", domain)\n if instance_id is not None:\n pulumi.set(__self__, \"instance_id\", instance_id)\n if private_key is not None:\n pulumi.set(__self__, \"private_key\", private_key)", "def test_update_non_existing_certificate(self):\n self.client.post(\n '/api/v1/certificates', data=json.dumps(new_certificate),\n content_type='application/json',\n headers=self.get_registrar_token())\n response = self.client.put(\n '/api/v1/certificates/10', data=json.dumps(update_certificate),\n content_type='application/json',\n headers=self.get_registrar_token())\n result = json.loads(response.data.decode())\n self.assertEqual(result['message'],\n 'Certificate not found')\n assert response.status_code == 404", "def CreateRequests(self, args):\n\n ssl_certificate_ref = self.SSL_CERTIFICATE_ARG.ResolveAsResource(\n args, self.resources)\n certificate = file_utils.ReadFile(args.certificate, 'certificate')\n private_key = file_utils.ReadFile(args.private_key, 'private key')\n\n request = self.messages.ComputeSslCertificatesInsertRequest(\n sslCertificate=self.messages.SslCertificate(\n name=ssl_certificate_ref.Name(),\n certificate=certificate,\n privateKey=private_key,\n description=args.description),\n project=self.project)\n\n return [request]", "def assign(id, type, appid, specialid):\n try:\n client().certificates.assign(id, type, appid, specialid)\n logger.info(\n 'ctl:cert:assign', 'Assigned {0} to {0}'.format(id, appid)\n )\n except Exception as e:\n raise CLIException(str(e))", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n arn: Optional[pulumi.Input[str]] = None,\n certificate_body: Optional[pulumi.Input[str]] = None,\n certificate_chain: Optional[pulumi.Input[str]] = None,\n expiration: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n name_prefix: Optional[pulumi.Input[str]] = None,\n path: Optional[pulumi.Input[str]] = None,\n private_key: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n upload_date: Optional[pulumi.Input[str]] = None) -> 'ServerCertificate':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _ServerCertificateState.__new__(_ServerCertificateState)\n\n __props__.__dict__[\"arn\"] = arn\n __props__.__dict__[\"certificate_body\"] = certificate_body\n __props__.__dict__[\"certificate_chain\"] = certificate_chain\n __props__.__dict__[\"expiration\"] = expiration\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"name_prefix\"] = name_prefix\n __props__.__dict__[\"path\"] = path\n __props__.__dict__[\"private_key\"] = private_key\n __props__.__dict__[\"tags\"] = tags\n __props__.__dict__[\"tags_all\"] = tags_all\n __props__.__dict__[\"upload_date\"] = upload_date\n return ServerCertificate(resource_name, opts=opts, __props__=__props__)", "def test_update_certificate(self):\n self.client.post(\n '/api/v1/certificates', data=json.dumps(new_certificate),\n content_type='application/json',\n headers=self.get_registrar_token())\n response = self.client.put(\n '/api/v1/certificates/1', data=json.dumps(update_certificate),\n content_type='application/json',\n headers=self.get_registrar_token())\n result = json.loads(response.data.decode())\n self.assertEqual(result['message'],\n 'Certificate updated successfully')\n assert response.status_code == 200", "def create_pki():\n os.mkdir(pki_dir)\n os.mkdir(f'{pki_dir}/newcerts')\n Path(f'{pki_dir}/index.txt').touch()\n with open(f'{pki_dir}/serial', 'w') as serial_file:\n serial_file.write('00000000')\n serial_file.close()\n create_CA('/CN=My cool CA/O=Honest Achmed/OU=Used Cars/C=EU')" ]
[ "0.6489642", "0.64813066", "0.63798654", "0.6109609", "0.59978354", "0.5797838", "0.57907546", "0.57467526", "0.5694181", "0.55973345", "0.55772674", "0.5559559", "0.55436045", "0.55427647", "0.55206823", "0.5512409", "0.5498758", "0.5490636", "0.5467333", "0.54404825", "0.5436675", "0.5432874", "0.54270506", "0.5413606", "0.5393902", "0.53906566", "0.5339951", "0.5325069", "0.5317284", "0.5296046" ]
0.69468284
0
Gets a specific version of a certificate without returning its management policy. Requires certificates/get permission. To get the latest version of the certificate, or to get the certificate's
async def get_certificate_version( self, certificate_name: str, version: str, **kwargs ) -> KeyVaultCertificate: bundle = await self._client.get_certificate( vault_base_url=self.vault_url, certificate_name=certificate_name, certificate_version=version, **kwargs ) return KeyVaultCertificate._from_certificate_bundle(certificate_bundle=bundle)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_certificate_request(self, vault_name: str,\n certificate_name: str,\n certificate_version: str) -> dict[str, Any]:\n url = f'https://{vault_name}{self.azure_cloud.suffixes.keyvault_dns}/certificates/{certificate_name}'\n if certificate_version:\n url = url + f'/{certificate_version}'\n response = self.http_request(\n 'GET', full_url=url,\n resource=self.get_vault_resource())\n\n return response", "def get_certificate_command(client: KeyVaultClient, args: dict[str, Any]) -> CommandResults:\n vault_name = args.get('vault_name', '')\n certificate_name = args.get('certificate_name', '')\n certificate_version = args.get('certificate_version', '')\n response = client.get_certificate_request(\n vault_name, certificate_name, certificate_version)\n\n outputs = copy.deepcopy(response)\n outputs['attributes'] = convert_time_attributes_to_iso(outputs['attributes'])\n outputs['policy']['attributes'] = convert_time_attributes_to_iso(outputs['policy']['attributes'])\n\n readable_response = {'certificate_id': response.get(\n 'id'), **convert_attributes_to_readable(response.get('attributes', {}).copy())}\n outputs[VAULT_NAME_CONTEXT_FIELD] = vault_name\n\n readable_output = tableToMarkdown(f'{certificate_name} Information',\n readable_response,\n ['certificate_id', 'enabled', 'create_time', 'update_time', 'expiry_time'],\n removeNull=True,\n headerTransform=string_to_table_header)\n command_results = CommandResults(\n outputs_prefix='AzureKeyVault.Certificate',\n outputs_key_field='id',\n outputs=outputs,\n raw_response=response,\n readable_output=readable_output,\n ignore_auto_extract=True\n )\n\n return command_results", "def get_ssl_certificate():", "def get_server_certificate(latest=None,name=None,name_prefix=None,path_prefix=None,opts=None):\n __args__ = dict()\n\n __args__['latest'] = latest\n __args__['name'] = name\n __args__['namePrefix'] = name_prefix\n __args__['pathPrefix'] = path_prefix\n if opts is None:\n opts = pulumi.InvokeOptions()\n if opts.version is None:\n opts.version = utilities.get_version()\n __ret__ = pulumi.runtime.invoke('aws:iam/getServerCertificate:getServerCertificate', __args__, opts=opts).value\n\n return AwaitableGetServerCertificateResult(\n arn=__ret__.get('arn'),\n certificate_body=__ret__.get('certificateBody'),\n certificate_chain=__ret__.get('certificateChain'),\n expiration_date=__ret__.get('expirationDate'),\n latest=__ret__.get('latest'),\n name=__ret__.get('name'),\n name_prefix=__ret__.get('namePrefix'),\n path=__ret__.get('path'),\n path_prefix=__ret__.get('pathPrefix'),\n upload_date=__ret__.get('uploadDate'),\n id=__ret__.get('id'))", "def fusion_api_get_certificate_info(self, uri=None, api=None, param='', headers=None):\n param = '/certificates/https/'\n return self.ic.get(uri=uri, api=api, headers=headers, param=param)", "def cert_version(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"cert_version\")", "def get_certificate(self, cert_id):\r\n return self.ssl.getObject(id=cert_id)", "def get_ssl_certificate() :", "async def get_certificate(self, certificate_name: str, **kwargs) -> KeyVaultCertificate:\n bundle = await self._client.get_certificate(\n vault_base_url=self.vault_url,\n certificate_name=certificate_name,\n certificate_version=\"\",\n **kwargs\n )\n return KeyVaultCertificate._from_certificate_bundle(certificate_bundle=bundle)", "def fusion_api_get_server_certificate(self, aliasname, api=None, headers=None):\n return self.server_certificate.get(aliasname, api, headers)", "def get_certificate_policy_request(self, vault_name: str, certificate_name: str) -> dict[str, Any]:\n url = f'https://{vault_name}{self.azure_cloud.suffixes.keyvault_dns}/certificates/{certificate_name}/policy'\n response = self.http_request(\n 'GET', full_url=url, resource=self.get_vault_resource())\n\n return response", "def get_certificate_policy_command(client: KeyVaultClient, args: dict[str, Any]) -> CommandResults:\n vault_name = args['vault_name']\n certificate_name = args['certificate_name']\n response = client.get_certificate_policy_request(\n vault_name, certificate_name)\n outputs = copy.deepcopy(response)\n outputs['attributes'] = convert_time_attributes_to_iso(outputs['attributes'])\n outputs['CertificateName'] = certificate_name\n\n readable_output = tableToMarkdown(f'{certificate_name} Policy Information',\n outputs,\n ['id', 'key_props', 'secret_props',\n 'x509_props', 'issuer', 'attributes'],\n removeNull=True, headerTransform=string_to_table_header)\n command_results = CommandResults(\n outputs_prefix='AzureKeyVault.CertificatePolicy',\n outputs_key_field='id',\n outputs=outputs,\n raw_response=response,\n readable_output=readable_output\n )\n\n return command_results", "def certificate(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"certificate\")", "def certificate(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"certificate\")", "def certificate(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"certificate\")", "def certificate(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"certificate\")", "def view_certificate(self, request, queryset):\n if len(queryset) > 1:\n self.message_user(\n request,\n 'You can only choose one certificate.',\n level=messages.ERROR)\n return None\n response = HttpResponse(content_type=\"text/plain\")\n cert = queryset.first()\n response.write(crypto.dump_certificate(\n crypto.FILETYPE_TEXT, cert.get_certificate()))\n return response", "def get_server_certificate(self, cert_name):\r\n params = {'ServerCertificateName' : cert_name}\r\n return self.get_response('GetServerCertificate', params)", "def test_get_certificate(self):\n chain = _create_certificate_chain()\n [(cakey, cacert), (ikey, icert), (skey, scert)] = chain\n\n context = Context(SSLv23_METHOD)\n context.use_certificate(scert)\n client = Connection(context, None)\n cert = client.get_certificate()\n assert cert is not None\n assert \"Server Certificate\" == cert.get_subject().CN", "def get(self, cache_id):\n return self.certificates.get(cache_id)", "def fusion_api_get_client_certificate(self, ip, api=None, headers=None):\n return self.client_certificate.get(ip, api, headers)", "def test_get_certificate_none(self):\n context = Context(SSLv23_METHOD)\n client = Connection(context, None)\n cert = client.get_certificate()\n assert cert is None", "def get_certificate(self, cert_name, callback=None):\n # TODO: get certificate from DHT (alternative to getting from disk).\n# _log.debug(\"get_certificate:\\n\\tmy_node_name={}\\n\\tcert_name={}\\n\\tcallback={}\".format(self.node_name, cert_name, callback))\n try:\n cert = self.get_certificate_locally(cert_name)\n if cert and callback:\n callback(certstring=cert)\n elif cert:\n return cert\n else:\n try:\n self.node.storage.get_index(['certificate',cert_name],\n cb=CalvinCB(self._get_certificate_from_storage_cb,\n callback=callback))\n except Exception as err:\n _log.debug(\"Certificate could not be found in storage, err={}\".format(err))\n raise\n except Exception as err:\n _log.debug(\"Failed searching for certificate locally, cert_name={}, err={}\".format(cert_name, err))", "def fusion_api_get_appliance_certificate(self, api=None, headers=None):\n return self.appliance_certificate.get(api, headers)", "def cert(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"cert\")", "def cert(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"cert\")", "def get(resource, **kwargs):\n\t#print(_endpoint(resource, 'GET'))\n\tresp = requests.get(\n\t\t_endpoint(resource, 'GET'),\n\t\tparams=_jsonify_dict_values(kwargs),\n\t\tverify=SERVER_CERT\n\t)\n\tresp.raise_for_status()\n\treturn resp.json()", "def GetVersion(self):\n return self._SendRequest(HTTP_GET, \"/version\", None, None)", "def certificate(self) -> str:\n return pulumi.get(self, \"certificate\")", "def fusion_api_get_appliance_certificate(self, api=None, headers=None):\n return self.wsc.get(api=api, headers=headers)" ]
[ "0.6604066", "0.62116116", "0.6183379", "0.6155492", "0.61449355", "0.61368275", "0.60539097", "0.5980792", "0.59279466", "0.5823024", "0.58037525", "0.57737714", "0.57406354", "0.5730415", "0.5730415", "0.5730415", "0.57215726", "0.5670612", "0.5638924", "0.5626855", "0.5620727", "0.56074005", "0.5576151", "0.55656374", "0.5521216", "0.5521216", "0.5515047", "0.55028045", "0.54988116", "0.54979265" ]
0.7093787
0
Get a deleted certificate. Possible only in a vault with softdelete enabled. Requires certificates/get permission. Retrieves the deleted certificate information plus its attributes, such as retention interval, scheduled permanent deletion, and the current deletion recovery level.
async def get_deleted_certificate(self, certificate_name: str, **kwargs) -> DeletedCertificate: bundle = await self._client.get_deleted_certificate( vault_base_url=self.vault_url, certificate_name=certificate_name, **kwargs ) return DeletedCertificate._from_deleted_certificate_bundle(deleted_certificate_bundle=bundle)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def recover_deleted_certificate(self, certificate_name: str, **kwargs) -> KeyVaultCertificate:\n polling_interval = kwargs.pop(\"_polling_interval\", None)\n if polling_interval is None:\n polling_interval = 2\n recovered_cert_bundle = await self._client.recover_deleted_certificate(\n vault_base_url=self.vault_url, certificate_name=certificate_name, **kwargs\n )\n recovered_certificate = KeyVaultCertificate._from_certificate_bundle(recovered_cert_bundle)\n\n command = partial(self.get_certificate, certificate_name=certificate_name, **kwargs)\n polling_method = AsyncDeleteRecoverPollingMethod(\n command=command, final_resource=recovered_certificate, finished=False, interval=polling_interval\n )\n await polling_method.run()\n\n return polling_method.resource()", "async def delete_certificate(self, certificate_name: str, **kwargs) -> DeletedCertificate:\n polling_interval = kwargs.pop(\"_polling_interval\", None)\n if polling_interval is None:\n polling_interval = 2\n deleted_cert_bundle = await self._client.delete_certificate(\n vault_base_url=self.vault_url, certificate_name=certificate_name, **kwargs\n )\n deleted_certificate = DeletedCertificate._from_deleted_certificate_bundle(deleted_cert_bundle)\n\n polling_method = AsyncDeleteRecoverPollingMethod(\n # no recovery ID means soft-delete is disabled, in which case we initialize the poller as finished\n finished=deleted_certificate.recovery_id is None,\n command=partial(self.get_deleted_certificate, certificate_name=certificate_name, **kwargs),\n final_resource=deleted_certificate,\n interval=polling_interval,\n )\n await polling_method.run()\n\n return polling_method.resource()", "def list_deleted_certificates(\n self, *, include_pending: Optional[bool] = None, **kwargs\n ) -> AsyncItemPaged[DeletedCertificate]:\n max_page_size = kwargs.pop(\"max_page_size\", None)\n\n if self.api_version == \"2016-10-01\":\n if include_pending is not None:\n raise NotImplementedError(\n \"The 'include_pending' parameter to `list_deleted_certificates` \"\n \"is only available for API versions v7.0 and up\"\n )\n else:\n kwargs.update({\"include_pending\": include_pending})\n\n return self._client.get_deleted_certificates(\n vault_base_url=self._vault_url,\n maxresults=max_page_size,\n cls=lambda objs: [DeletedCertificate._from_deleted_certificate_item(x) for x in objs],\n **kwargs\n )", "def Deleted(self, default=None):\n return self.data.get('metadata', {}).get('deleted', default)", "def revoke_certificate(self):\n return self.__query(\"certificateRevoke\", kwargs)", "def DeleteCertificate(self, request):\n try:\n params = request._serialize()\n headers = request.headers\n body = self.call(\"DeleteCertificate\", params, headers=headers)\n response = json.loads(body)\n model = models.DeleteCertificateResponse()\n model._deserialize(response[\"Response\"])\n return model\n except Exception as e:\n if isinstance(e, TencentCloudSDKException):\n raise\n else:\n raise TencentCloudSDKException(type(e).__name__, str(e))", "def deleted(self, deleted_since=None, filters=None, params=None):\n return self.tc_requests.deleted(\n self.api_type,\n self.api_branch,\n deleted_since=deleted_since,\n owner=self.owner,\n filters=filters,\n params=params,\n )", "async def purge_deleted_certificate(self, certificate_name: str, **kwargs) -> None:\n await self._client.purge_deleted_certificate(\n vault_base_url=self.vault_url, certificate_name=certificate_name, **kwargs\n )", "def vault_delete(self):\n return self._vault_delete", "def get_certificate(self, cert_id):\r\n return self.ssl.getObject(id=cert_id)", "def test_delete_certificate(self):\n self.client.post(\n '/api/v1/certificates', data=json.dumps(new_certificate),\n content_type='application/json',\n headers=self.get_registrar_token())\n response = self.client.delete(\n '/api/v1/certificates/1', content_type='application/json',\n headers=self.get_registrar_token())\n result = json.loads(response.data.decode())\n self.assertEqual(result['message'],\n 'Certificate deleted successfully')\n assert response.status_code == 200", "def delete(self):\n if \"delete\" in self._prop_dict:\n return self._prop_dict[\"delete\"]\n else:\n return None", "def delete(resource):\n\tresp = requests.delete(\n\t\t_endpoint(resource, 'DELETE'),\n\t\theaders=PAYLOAD_HEADERS,\n\t\tverify=SERVER_CERT\n\t)\n\tresp.raise_for_status()\n\treturn resp.json()", "def deletion_policy(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"deletion_policy\")", "def delete(self, **ctx_options):\n return self.delete_async(**ctx_options).get_result()", "def get_delete_account(self):\n return self.client.get(self.delete_account_url)", "def getDeletion(self, *args):\n return _libsbml.Submodel_getDeletion(self, *args)", "def fusion_api_delete_client_certificate(self, aliasname, api=None, headers=None):\n return self.client_certificate.delete(aliasname, api, headers)", "def delete_signing_cert(self, cert_id, user_name=None):\r\n params = {'CertificateId' : cert_id}\r\n if user_name:\r\n params['UserName'] = user_name\r\n return self.get_response('DeleteSigningCertificate', params)", "def deleted(self) -> bool:\n return pulumi.get(self, \"deleted\")", "def delete(self, id, timeout=None):\n req = SecretStoreDeleteRequest()\n\n req.id = (id)\n tries = 0\n plumbing_response = None\n while True:\n try:\n plumbing_response = self.stub.Delete(\n req,\n metadata=self.parent.get_metadata('SecretStores.Delete',\n req),\n timeout=timeout)\n except Exception as e:\n if self.parent.shouldRetry(tries, e):\n tries += 1\n self.parent.jitterSleep(tries)\n continue\n raise plumbing.convert_error_to_porcelain(e) from e\n break\n\n resp = models.SecretStoreDeleteResponse()\n resp.meta = plumbing.convert_delete_response_metadata_to_porcelain(\n plumbing_response.meta)\n resp.rate_limit = plumbing.convert_rate_limit_metadata_to_porcelain(\n plumbing_response.rate_limit)\n return resp", "async def get_certificate(self, certificate_name: str, **kwargs) -> KeyVaultCertificate:\n bundle = await self._client.get_certificate(\n vault_base_url=self.vault_url,\n certificate_name=certificate_name,\n certificate_version=\"\",\n **kwargs\n )\n return KeyVaultCertificate._from_certificate_bundle(certificate_bundle=bundle)", "def delete(self):\n return self.get_data()", "def volume_deleted(self, block=False, poll_interval=POLL_INTERVAL):\n try:\n return self._volume_status_check(\n STATUS_DELETED, block, VOLUME_DELETE_TIMEOUT, poll_interval)\n except NotFound as e:\n logger.debug(\n \"Volume not found when querying status for %s with message \"\n \"%s\", STATUS_DELETED, e)\n return True", "async def delete_certificate_operation(self, certificate_name: str, **kwargs) -> CertificateOperation:\n bundle = await self._client.delete_certificate_operation(\n vault_base_url=self.vault_url, certificate_name=certificate_name, **kwargs\n )\n return CertificateOperation._from_certificate_operation_bundle(certificate_operation_bundle=bundle)", "def DeletedRecords(self, default=[{}]):\n tmp = self.data.get('metadata', {}).get('deleted_records', default)\n return [HEP.JSONReferenceObject(i) for i in tmp]", "def get_certificate_request(self, vault_name: str,\n certificate_name: str,\n certificate_version: str) -> dict[str, Any]:\n url = f'https://{vault_name}{self.azure_cloud.suffixes.keyvault_dns}/certificates/{certificate_name}'\n if certificate_version:\n url = url + f'/{certificate_version}'\n response = self.http_request(\n 'GET', full_url=url,\n resource=self.get_vault_resource())\n\n return response", "def delete(self, endpoint, content=None, params=None):\n\t\treturn self._call(\"DELETE\", endpoint, content, params)", "def describe_cluster_deleting_response():\n return {\n \"cluster\": {\n \"status\": \"DELETING\",\n \"endpoint\": \"https://endpoint.amazonaws.com\",\n \"name\": EXAMPLE_NAME,\n \"certificateAuthority\": {\n \"data\": \"LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tDQpWR1Z6ZEdsdVp5QkVZWFJoRFFwVVpYTjBhVzVuSUVSaGRHRU5DbFJsYzNScGJtY2dSR0YwWVEwS2EzVmlaWEp1WlhSbGN6QWVGdzBLVkdWemRHbHVaeUJFWVhSaERRcFVaWE4wYVc1bklFUmhkR0ZWQkFNVERRcHJkV0psY201bGRHVnpNQUVpTUEwS1ZHVnpkR2x1WnlCRVlYUmhEUXBVWlhOMGFXNW5JRVJoZEdFTkNsUmxjM1JwYm1jZ1JHRjBZY3UvR1FnbmFTcDNZaHBDTWhGVVpYTjBhVzVuSUVSaGRHRXl3clZqeEpWNjNwNFVHRmpZdHdGR1drUldJVkV1VkdWemRHbHVaeUJFWVhSaGJzT0MxSVJiTDhPd0lpMVhiWGg2VkdWemRHbHVaeUJFWVhSaFpXVndTTk9VVUZKNmN5QWJaaFpnWVNkTUV3MEtGMVJsYzNScGJtY2dSR0YwWVFZRFZSMFBBUUVFQkFNQ0FsUmxjM1JwYm1jZ1JHRjBZUUV3RFFvR0NTcElEUXBVWlhOMGFXNW5JRVJoZEdGcEgxc1pPRTNMa3lrMU9DWUNHUloyTEZjM3paOCtHell3WEZSbGMzUnBibWNnUkdGMFlYMUR5NjFNMVlGV1AxWVRIMVJsYzNScGJtY2dSR0YwWVd0aE5oMVphM2dWUDBGaGNSWjdKaW9oZVc4N1JsUmxjM1JwYm1jZ1JHRjBZUVpIVHd4NE9IdzZmZz09DQotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0t\"\n },\n \"roleArn\": \"arn:aws:iam::111222333444/eksRole\",\n \"resourcesVpcConfig\": {\n \"subnetIds\": [\n \"subnet-00000000000000000\",\n \"subnet-00000000000000001\",\n \"subnet-00000000000000002\"\n ],\n \"vpcId\": \"vpc-00000000000000000\",\n \"securityGroupIds\": [\n \"sg-00000000000000000\"\n ]\n },\n \"version\": \"1.10\",\n \"arn\": \"arn:aws:eks:region:111222333444:cluster/\" + EXAMPLE_NAME,\n \"createdAt\": 1500000000.000\n }\n }", "def get_certificate_command(client: KeyVaultClient, args: dict[str, Any]) -> CommandResults:\n vault_name = args.get('vault_name', '')\n certificate_name = args.get('certificate_name', '')\n certificate_version = args.get('certificate_version', '')\n response = client.get_certificate_request(\n vault_name, certificate_name, certificate_version)\n\n outputs = copy.deepcopy(response)\n outputs['attributes'] = convert_time_attributes_to_iso(outputs['attributes'])\n outputs['policy']['attributes'] = convert_time_attributes_to_iso(outputs['policy']['attributes'])\n\n readable_response = {'certificate_id': response.get(\n 'id'), **convert_attributes_to_readable(response.get('attributes', {}).copy())}\n outputs[VAULT_NAME_CONTEXT_FIELD] = vault_name\n\n readable_output = tableToMarkdown(f'{certificate_name} Information',\n readable_response,\n ['certificate_id', 'enabled', 'create_time', 'update_time', 'expiry_time'],\n removeNull=True,\n headerTransform=string_to_table_header)\n command_results = CommandResults(\n outputs_prefix='AzureKeyVault.Certificate',\n outputs_key_field='id',\n outputs=outputs,\n raw_response=response,\n readable_output=readable_output,\n ignore_auto_extract=True\n )\n\n return command_results" ]
[ "0.6923022", "0.63999605", "0.60577244", "0.57940936", "0.55470073", "0.5532237", "0.54090583", "0.53719103", "0.5317824", "0.5283685", "0.5267442", "0.5235222", "0.5188259", "0.5171617", "0.5094433", "0.5058892", "0.49639174", "0.49404588", "0.48969808", "0.486709", "0.4836004", "0.48348966", "0.48288143", "0.48258254", "0.47985634", "0.47980458", "0.47899362", "0.47790247", "0.47769266", "0.47747838" ]
0.7624662
0
Permanently deletes a deleted certificate. Possible only in vaults with softdelete enabled. Requires certificates/purge permission. Performs an irreversible deletion of the specified certificate, without possibility for recovery. The operation is not available if the
async def purge_deleted_certificate(self, certificate_name: str, **kwargs) -> None: await self._client.purge_deleted_certificate( vault_base_url=self.vault_url, certificate_name=certificate_name, **kwargs )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def delete_certificate(self, certificate_name: str, **kwargs) -> DeletedCertificate:\n polling_interval = kwargs.pop(\"_polling_interval\", None)\n if polling_interval is None:\n polling_interval = 2\n deleted_cert_bundle = await self._client.delete_certificate(\n vault_base_url=self.vault_url, certificate_name=certificate_name, **kwargs\n )\n deleted_certificate = DeletedCertificate._from_deleted_certificate_bundle(deleted_cert_bundle)\n\n polling_method = AsyncDeleteRecoverPollingMethod(\n # no recovery ID means soft-delete is disabled, in which case we initialize the poller as finished\n finished=deleted_certificate.recovery_id is None,\n command=partial(self.get_deleted_certificate, certificate_name=certificate_name, **kwargs),\n final_resource=deleted_certificate,\n interval=polling_interval,\n )\n await polling_method.run()\n\n return polling_method.resource()", "async def recover_deleted_certificate(self, certificate_name: str, **kwargs) -> KeyVaultCertificate:\n polling_interval = kwargs.pop(\"_polling_interval\", None)\n if polling_interval is None:\n polling_interval = 2\n recovered_cert_bundle = await self._client.recover_deleted_certificate(\n vault_base_url=self.vault_url, certificate_name=certificate_name, **kwargs\n )\n recovered_certificate = KeyVaultCertificate._from_certificate_bundle(recovered_cert_bundle)\n\n command = partial(self.get_certificate, certificate_name=certificate_name, **kwargs)\n polling_method = AsyncDeleteRecoverPollingMethod(\n command=command, final_resource=recovered_certificate, finished=False, interval=polling_interval\n )\n await polling_method.run()\n\n return polling_method.resource()", "def DeleteCertificate(self, request):\n try:\n params = request._serialize()\n headers = request.headers\n body = self.call(\"DeleteCertificate\", params, headers=headers)\n response = json.loads(body)\n model = models.DeleteCertificateResponse()\n model._deserialize(response[\"Response\"])\n return model\n except Exception as e:\n if isinstance(e, TencentCloudSDKException):\n raise\n else:\n raise TencentCloudSDKException(type(e).__name__, str(e))", "def delete_signing_cert(self, cert_id, user_name=None):\r\n params = {'CertificateId' : cert_id}\r\n if user_name:\r\n params['UserName'] = user_name\r\n return self.get_response('DeleteSigningCertificate', params)", "def delete_certificate(a): # delete_certificate(arn, /)\n\n while True:\n\n try:\n acm.delete_certificate(**{'CertificateArn': a})\n return\n except ClientError as exception:\n log_exception('')\n\n err_code = exception.response['Error']['Code']\n\n if err_code == 'ResourceInUseException':\n if get_remaining_time_in_millis() / 1000 < 30:\n raise\n\n sleep(5)\n continue\n\n if err_code in ['ResourceNotFoundException', 'ValidationException']:\n # If the arn is invalid, it didn't exist anyway.\n return\n\n raise\n\n except ParamValidationError:\n # invalid arn\n return", "async def delete_certificate_operation(self, certificate_name: str, **kwargs) -> CertificateOperation:\n bundle = await self._client.delete_certificate_operation(\n vault_base_url=self.vault_url, certificate_name=certificate_name, **kwargs\n )\n return CertificateOperation._from_certificate_operation_bundle(certificate_operation_bundle=bundle)", "def vault_delete(self, vault_delete):\n self._vault_delete = vault_delete", "async def get_deleted_certificate(self, certificate_name: str, **kwargs) -> DeletedCertificate:\n bundle = await self._client.get_deleted_certificate(\n vault_base_url=self.vault_url, certificate_name=certificate_name, **kwargs\n )\n return DeletedCertificate._from_deleted_certificate_bundle(deleted_certificate_bundle=bundle)", "def revoke_certificate(self):\n return self.__query(\"certificateRevoke\", kwargs)", "def test_delete_certificate(self):\n self.client.post(\n '/api/v1/certificates', data=json.dumps(new_certificate),\n content_type='application/json',\n headers=self.get_registrar_token())\n response = self.client.delete(\n '/api/v1/certificates/1', content_type='application/json',\n headers=self.get_registrar_token())\n result = json.loads(response.data.decode())\n self.assertEqual(result['message'],\n 'Certificate deleted successfully')\n assert response.status_code == 200", "def _acme_revoke(self, cert):\n # XXX | pylint: disable=unused-variable\n\n # pylint: disable=protected-access\n certificate = jose_util.ComparableX509(cert._cert)\n try:\n with open(cert.backup_key_path, \"rU\") as backup_key_file:\n key = OpenSSL.crypto.load_privatekey(\n OpenSSL.crypto.FILETYPE_PEM, backup_key_file.read())\n # If the key file doesn't exist... or is corrupted\n except OpenSSL.crypto.Error as error:\n logger.debug(error, exc_info=True)\n raise errors.RevokerError(\n \"Corrupted backup key file: %s\" % cert.backup_key_path)\n\n return self.acme.revoke(cert=None) # XXX", "def delete(\n self, resource_group_name, if_match, provisioning_service_name, certificate_name, certificatename=None, certificateraw_bytes=None, certificateis_verified=None, certificatepurpose=None, certificatecreated=None, certificatelast_updated=None, certificatehas_private_key=None, certificatenonce=None, custom_headers=None, raw=False, **operation_config):\n # Construct URL\n url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/provisioningServices/{provisioningServiceName}/certificates/{certificateName}'\n path_format_arguments = {\n 'subscriptionId': self._serialize.url(\"self.config.subscription_id\", self.config.subscription_id, 'str'),\n 'resourceGroupName': self._serialize.url(\"resource_group_name\", resource_group_name, 'str'),\n 'provisioningServiceName': self._serialize.url(\"provisioning_service_name\", provisioning_service_name, 'str'),\n 'certificateName': self._serialize.url(\"certificate_name\", certificate_name, 'str')\n }\n url = self._client.format_url(url, **path_format_arguments)\n\n # Construct parameters\n query_parameters = {}\n if certificatename is not None:\n query_parameters['certificate.name'] = self._serialize.query(\"certificatename\", certificatename, 'str')\n if certificateraw_bytes is not None:\n query_parameters['certificate.rawBytes'] = self._serialize.query(\"certificateraw_bytes\", certificateraw_bytes, 'bytearray')\n if certificateis_verified is not None:\n query_parameters['certificate.isVerified'] = self._serialize.query(\"certificateis_verified\", certificateis_verified, 'bool')\n if certificatepurpose is not None:\n query_parameters['certificate.purpose'] = self._serialize.query(\"certificatepurpose\", certificatepurpose, 'str')\n if certificatecreated is not None:\n query_parameters['certificate.created'] = self._serialize.query(\"certificatecreated\", certificatecreated, 'iso-8601')\n if certificatelast_updated is not None:\n query_parameters['certificate.lastUpdated'] = self._serialize.query(\"certificatelast_updated\", certificatelast_updated, 'iso-8601')\n if certificatehas_private_key is not None:\n query_parameters['certificate.hasPrivateKey'] = self._serialize.query(\"certificatehas_private_key\", certificatehas_private_key, 'bool')\n if certificatenonce is not None:\n query_parameters['certificate.nonce'] = self._serialize.query(\"certificatenonce\", certificatenonce, 'str')\n query_parameters['api-version'] = self._serialize.query(\"self.api_version\", self.api_version, 'str')\n\n # Construct headers\n header_parameters = {}\n header_parameters['Content-Type'] = 'application/json; charset=utf-8'\n if self.config.generate_client_request_id:\n header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())\n if custom_headers:\n header_parameters.update(custom_headers)\n header_parameters['If-Match'] = self._serialize.header(\"if_match\", if_match, 'str')\n if self.config.accept_language is not None:\n header_parameters['accept-language'] = self._serialize.header(\"self.config.accept_language\", self.config.accept_language, 'str')\n\n # Construct and send request\n request = self._client.delete(url, query_parameters)\n response = self._client.send(request, header_parameters, stream=False, **operation_config)\n\n if response.status_code not in [200, 204]:\n raise models.ErrorDetailsException(self._deserialize, response)\n\n if raw:\n client_raw_response = ClientRawResponse(None, response)\n return client_raw_response", "def revoke_certificate(\n project_id: str,\n location: str,\n ca_pool_name: str,\n certificate_name: str,\n) -> None:\n\n caServiceClient = privateca_v1.CertificateAuthorityServiceClient()\n\n # Create Certificate Path.\n certificate_path = caServiceClient.certificate_path(\n project_id, location, ca_pool_name, certificate_name\n )\n\n # Create Revoke Certificate Request and specify the appropriate revocation reason.\n request = privateca_v1.RevokeCertificateRequest(\n name=certificate_path, reason=privateca_v1.RevocationReason.PRIVILEGE_WITHDRAWN\n )\n result = caServiceClient.revoke_certificate(request=request)\n\n print(\"Certificate revoke result:\", result)", "def fusion_api_delete_client_certificate(self, aliasname, api=None, headers=None):\n return self.client_certificate.delete(aliasname, api, headers)", "def delete(ctx, **_):\n # Delete the resource\n azure_config = ctx.node.properties.get('azure_config')\n if not azure_config.get(\"subscription_id\"):\n azure_config = ctx.node.properties.get('client_config')\n else:\n ctx.logger.warn(\"azure_config is deprecated please use client_config, \"\n \"in later version it will be removed\")\n resource_group_name = utils.get_resource_group(ctx)\n vm_name = ctx.instance.runtime_properties.get('virtual_machine')\n name = ctx.instance.runtime_properties.get('name')\n api_version = \\\n ctx.node.properties.get('api_version', constants.API_VER_COMPUTE)\n vm_extension = VirtualMachineExtension(azure_config, ctx.logger,\n api_version)\n utils.handle_delete(ctx, vm_extension, resource_group_name, name, vm_name)", "def delete(resource):\n\tresp = requests.delete(\n\t\t_endpoint(resource, 'DELETE'),\n\t\theaders=PAYLOAD_HEADERS,\n\t\tverify=SERVER_CERT\n\t)\n\tresp.raise_for_status()\n\treturn resp.json()", "def run(self):\n certificate = self.admin_barbican.create_certificate()\n self.admin_barbican.orders_delete(certificate.order_ref)", "def storage_delete(context, storage_id):\n delete_info = {'deleted': True, 'deleted_at': timeutils.utcnow()}\n _storage_get_query(context).filter_by(id=storage_id).update(delete_info)", "def delete(ctx):\n click.echo('deleting')\n ctx.delete()\n click.echo('done')", "async def delete(self, delete: TPayload) -> None:", "def delete(self, **ctx_options):\n return self.delete_async(**ctx_options).get_result()", "def do_charge_purchase_delete(cs, args):\n cs.charge_purchases.delete(args.charge_purchase_id)", "def fusion_api_delete_server_certificate(self, aliasname, api=None, headers=None):\n return self.server_certificate.delete(aliasname, api, headers)", "def delete_trust(TrustId=None, DeleteAssociatedConditionalForwarder=None):\n pass", "def delete_server_cert(self, cert_name):\r\n params = {'ServerCertificateName' : cert_name}\r\n return self.get_response('DeleteServerCertificate', params)", "def test_delete_non_existing_certificate(self):\n self.client.post(\n '/api/v1/certificates', data=json.dumps(new_certificate),\n content_type='application/json',\n headers=self.get_registrar_token())\n response = self.client.delete(\n '/api/v1/certificates/10', content_type='application/json',\n headers=self.get_registrar_token())\n result = json.loads(response.data.decode())\n self.assertEqual(result['message'],\n 'Certificate not found')\n assert response.status_code == 404", "def run(self):\n certificate = self.admin_barbican.create_asymmetric()\n self.admin_barbican.orders_delete(certificate.order_ref)", "def delete_policy(policystore_url, policy_credentials, verbose):\n\n if verbose:\n logging.info('Deleting policy')\n pprint.pprint(policy_credentials)\n\n delete_url = policystore_url + POLICYSTORE_PREFIX + 'DeleteEntitlementPolicy'\n\n r = requests.post(delete_url, headers=headers(), json=policy_credentials)\n if r.status_code != 200:\n logging.error(f'ERROR: Unexpected response: {r.status_code}')\n pprint.pprint(r.json())\n sys.exit('Failed to delete policy')\n\n logging.info('SUCCESS: Deleted policy')", "def do_delete_configured_volume(self, arg):\n args = self.parse_arguments(arg)\n if len(args) == 0:\n self.perror(\"No storage specified.\")\n return\n self.do_coroutine(self._localStorageRoutines.delete_configured_volume_routine(args[0]))", "def vault_delete(self):\n return self._vault_delete" ]
[ "0.67054504", "0.6211404", "0.6034061", "0.5930464", "0.5928886", "0.5907658", "0.588775", "0.5874071", "0.5767075", "0.57497597", "0.57255626", "0.5650153", "0.5645817", "0.56201583", "0.56109685", "0.55468774", "0.5518235", "0.54834414", "0.5473937", "0.54476756", "0.541691", "0.53540075", "0.53434926", "0.534091", "0.5326787", "0.5294844", "0.5251366", "0.5248059", "0.5228921", "0.52209026" ]
0.75400406
0
Recover a deleted certificate to its latest version. Possible only in a vault with softdelete enabled. Requires certificates/recover permission. If the vault does not have softdelete enabled,
async def recover_deleted_certificate(self, certificate_name: str, **kwargs) -> KeyVaultCertificate: polling_interval = kwargs.pop("_polling_interval", None) if polling_interval is None: polling_interval = 2 recovered_cert_bundle = await self._client.recover_deleted_certificate( vault_base_url=self.vault_url, certificate_name=certificate_name, **kwargs ) recovered_certificate = KeyVaultCertificate._from_certificate_bundle(recovered_cert_bundle) command = partial(self.get_certificate, certificate_name=certificate_name, **kwargs) polling_method = AsyncDeleteRecoverPollingMethod( command=command, final_resource=recovered_certificate, finished=False, interval=polling_interval ) await polling_method.run() return polling_method.resource()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _acme_revoke(self, cert):\n # XXX | pylint: disable=unused-variable\n\n # pylint: disable=protected-access\n certificate = jose_util.ComparableX509(cert._cert)\n try:\n with open(cert.backup_key_path, \"rU\") as backup_key_file:\n key = OpenSSL.crypto.load_privatekey(\n OpenSSL.crypto.FILETYPE_PEM, backup_key_file.read())\n # If the key file doesn't exist... or is corrupted\n except OpenSSL.crypto.Error as error:\n logger.debug(error, exc_info=True)\n raise errors.RevokerError(\n \"Corrupted backup key file: %s\" % cert.backup_key_path)\n\n return self.acme.revoke(cert=None) # XXX", "def svn_fs_recover(*args):\r\n return _fs.svn_fs_recover(*args)", "def revoke_certificate(self):\n return self.__query(\"certificateRevoke\", kwargs)", "async def get_deleted_certificate(self, certificate_name: str, **kwargs) -> DeletedCertificate:\n bundle = await self._client.get_deleted_certificate(\n vault_base_url=self.vault_url, certificate_name=certificate_name, **kwargs\n )\n return DeletedCertificate._from_deleted_certificate_bundle(deleted_certificate_bundle=bundle)", "def svn_fs_berkeley_recover(*args):\r\n return _fs.svn_fs_berkeley_recover(*args)", "async def delete_certificate(self, certificate_name: str, **kwargs) -> DeletedCertificate:\n polling_interval = kwargs.pop(\"_polling_interval\", None)\n if polling_interval is None:\n polling_interval = 2\n deleted_cert_bundle = await self._client.delete_certificate(\n vault_base_url=self.vault_url, certificate_name=certificate_name, **kwargs\n )\n deleted_certificate = DeletedCertificate._from_deleted_certificate_bundle(deleted_cert_bundle)\n\n polling_method = AsyncDeleteRecoverPollingMethod(\n # no recovery ID means soft-delete is disabled, in which case we initialize the poller as finished\n finished=deleted_certificate.recovery_id is None,\n command=partial(self.get_deleted_certificate, certificate_name=certificate_name, **kwargs),\n final_resource=deleted_certificate,\n interval=polling_interval,\n )\n await polling_method.run()\n\n return polling_method.resource()", "async def restore_certificate_backup(self, backup: bytes, **kwargs) -> KeyVaultCertificate:\n bundle = await self._client.restore_certificate(\n vault_base_url=self.vault_url,\n parameters=self._models.CertificateRestoreParameters(certificate_bundle_backup=backup),\n **kwargs\n )\n return KeyVaultCertificate._from_certificate_bundle(certificate_bundle=bundle)", "def do_recover(self):\n res = self.entity.do_recover(self.context, self)\n if res:\n return self.RES_OK, 'Node recovered successfully.'\n else:\n return self.RES_ERROR, 'Node recover failed.'", "def test_revoked_cert(self):\n\n # Initially should be able to to operations like open a session\n self._open_session()\n HttpAgentRpc().remove_host(self.host.fqdn)\n\n # After revokation any access should be bounced\n response = self._post([])\n self.assertEqual(response.status_code, 403)\n response = self._get()\n self.assertEqual(response.status_code, 403)", "def revoke_from_menu(self):\n\n csha1_vhlist = self._get_installed_locations()\n certs = self._populate_saved_certs(csha1_vhlist)\n\n while True:\n if certs:\n code, selection = revocation.display_certs(certs)\n\n if code == display_util.OK:\n revoked_certs = self._safe_revoke([certs[selection]])\n # Since we are currently only revoking one cert at a time...\n if revoked_certs:\n del certs[selection]\n elif code == display_util.HELP:\n revocation.more_info_cert(certs[selection])\n else:\n return\n else:\n logger.info(\n \"There are not any trusted Let's Encrypt \"\n \"certificates for this server.\")\n return", "def vault_delete(self, vault_delete):\n self._vault_delete = vault_delete", "def recover(self):\n self.deleted = False\n self.save()\n self.history.create(user_id=self.pk, action=user_history.RECOVERY)", "def renew_certificate(self, kwargs):\n return self.__query(\"certificateRenew\", kwargs)", "def revoke_from_cert(self, cert_path):\n # Locate the correct certificate (do not rely on filename)\n cert_to_revoke = Cert(cert_path)\n\n with open(self.list_path, \"rb\") as csvfile:\n csvreader = csv.reader(csvfile)\n for row in csvreader:\n cert = Cert.fromrow(row, self.config.cert_key_backup)\n\n if cert.get_der() == cert_to_revoke.get_der():\n self._safe_revoke([cert])\n return\n\n logger.info(\"Associated ACME certificate was not found.\")", "async def backup_certificate(self, certificate_name: str, **kwargs) -> bytes:\n backup_result = await self._client.backup_certificate(\n vault_base_url=self.vault_url, certificate_name=certificate_name, **kwargs\n )\n return backup_result.value", "def check_deletion():\n\n if newrev == zero:\n ERROR(\"[POLICY] Refusing to delete this ref\")\n sys.exit(1)", "def test_delete_certificate(self):\n self.client.post(\n '/api/v1/certificates', data=json.dumps(new_certificate),\n content_type='application/json',\n headers=self.get_registrar_token())\n response = self.client.delete(\n '/api/v1/certificates/1', content_type='application/json',\n headers=self.get_registrar_token())\n result = json.loads(response.data.decode())\n self.assertEqual(result['message'],\n 'Certificate deleted successfully')\n assert response.status_code == 200", "async def purge_deleted_certificate(self, certificate_name: str, **kwargs) -> None:\n await self._client.purge_deleted_certificate(\n vault_base_url=self.vault_url, certificate_name=certificate_name, **kwargs\n )", "def vault_delete(self):\n return self._vault_delete", "def on_delete(self, req, resp):\n try:\n days_to_retain = int(req.params[\"days\"])\n except Exception:\n days_to_retain = 90\n\n try:\n retention_status = self.state_manager.task_retention(\n retain_days=str(days_to_retain))\n if not retention_status:\n resp.status = falcon.HTTP_404\n return\n resp.text = \"Tables purged successfully.\"\n except Exception as e:\n self.error(req.context, \"Unknown error: %s\" % (str(e)))\n resp.text = \"Unexpected error.\"\n resp.status = falcon.HTTP_500\n return\n resp.status = falcon.HTTP_200", "def certificate_revocation_check(cache: dict, session, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:\n acm = session.client(\"acm\")\n iso8601Time = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()\n for carn in list_certificates(cache, session):\n # Get ACM Cert Details\n cert = acm.describe_certificate(CertificateArn=carn)[\"Certificate\"]\n # B64 encode all of the details for the Asset\n assetJson = json.dumps(cert,default=str).encode(\"utf-8\")\n assetB64 = base64.b64encode(assetJson)\n cDomainName = str(cert['DomainName'])\n cIssuer = str(cert['Issuer'])\n cSerial = str(cert['Serial'])\n cStatus = str(cert['Status'])\n cKeyAlgo = str(cert['KeyAlgorithm'])\n try:\n # this is a failing check\n revokeReason = str(cert['RevocationReason'])\n finding = {\n \"SchemaVersion\": \"2018-10-08\",\n \"Id\": carn + \"/acm-cert-revoke-check\",\n \"ProductArn\": f\"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default\",\n \"GeneratorId\": carn,\n \"AwsAccountId\": awsAccountId,\n \"Types\": [\n \"Software and Configuration Checks/AWS Security Best Practices\",\n \"Effects/Denial of Service\"\n ],\n \"FirstObservedAt\": iso8601Time,\n \"CreatedAt\": iso8601Time,\n \"UpdatedAt\": iso8601Time,\n \"Severity\": {\"Label\": \"CRITICAL\"},\n \"Confidence\": 99,\n \"Title\": \"[ACM.1] ACM Certificates should be monitored for revocation\",\n \"Description\": \"ACM Certificate \"\n + carn\n + \" is currently revoked due to \"\n + revokeReason\n + \". If the Certificate was in use by any applications they are likely unavailable or returning certificate revocation and invalidity warnings to end-users who are attempting to browse to your applications. You should immediately generate new certificates and distribute them to your applications (CloudFront, ALB Listeners, self-managed web applicaitons) and communicate with clients and other end-users. Refer to the remediation instructions if this configuration is not intended.\",\n \"Remediation\": {\n \"Recommendation\": {\n \"Text\": \"For more information on revocation of certificates, review the ACM FAQ on the topic of 'Revoke'\",\n \"Url\": \"https://aws.amazon.com/certificate-manager/faqs/\"\n }\n },\n \"ProductFields\": {\n \"ProductName\": \"ElectricEye\",\n \"Provider\": \"AWS\",\n \"ProviderType\": \"CSP\",\n \"ProviderAccountId\": awsAccountId,\n \"AssetRegion\": awsRegion,\n \"AssetDetails\": assetB64,\n \"AssetClass\": \"Security Services\",\n \"AssetService\": \"Amazon Certificate Manager\",\n \"AssetComponent\": \"Certificate\"\n },\n \"Resources\": [\n {\n \"Type\": \"AwsCertificateManagerCertificate\",\n \"Id\": carn,\n \"Partition\": awsPartition,\n \"Region\": awsRegion,\n \"Details\": {\n \"AwsCertificateManagerCertificate\": {\n \"DomainName\": cDomainName,\n \"Issuer\": cIssuer,\n \"Serial\": cSerial,\n \"KeyAlgorithm\": cKeyAlgo,\n \"Status\": cStatus\n }\n }\n }\n ],\n \"Compliance\": {\n \"Status\": \"FAILED\",\n \"RelatedRequirements\": [\n \"NIST CSF V1.1 PR.MA-1\",\n \"NIST SP 800-53 Rev. 4 MA-2\",\n \"NIST SP 800-53 Rev. 4 MA-3\",\n \"NIST SP 800-53 Rev. 4 MA-5\",\n \"NIST SP 800-53 Rev. 4 MA-6\",\n \"AICPA TSC CC8.1\",\n \"ISO 27001:2013 A.11.1.2\",\n \"ISO 27001:2013 A.11.2.4\",\n \"ISO 27001:2013 A.11.2.5\",\n \"ISO 27001:2013 A.11.2.6\"\n ]\n },\n \"Workflow\": {\"Status\": \"NEW\"},\n \"RecordState\": \"ACTIVE\"\n }\n yield finding\n except Exception as e:\n if str(e) == \"'RevocationReason'\":\n # this is a passing check\n finding = {\n \"SchemaVersion\": \"2018-10-08\",\n \"Id\": carn + \"/acm-cert-revoke-check\",\n \"ProductArn\": f\"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default\",\n \"GeneratorId\": carn,\n \"AwsAccountId\": awsAccountId,\n \"Types\": [\n \"Software and Configuration Checks/AWS Security Best Practices\",\n \"Effects/Denial of Service\"\n ],\n \"FirstObservedAt\": iso8601Time,\n \"CreatedAt\": iso8601Time,\n \"UpdatedAt\": iso8601Time,\n \"Severity\": {\"Label\": \"INFORMATIONAL\"},\n \"Confidence\": 99,\n \"Title\": \"[ACM.1] ACM Certificates should be monitored for revocation\",\n \"Description\": \"ACM Certificate \"\n + carn\n + \" is not currently revoked.\",\n \"Remediation\": {\n \"Recommendation\": {\n \"Text\": \"For more information on revocation of certificates, review the ACM FAQ on the topic of 'Revoke'\",\n \"Url\": \"https://aws.amazon.com/certificate-manager/faqs/\"\n }\n },\n \"ProductFields\": {\n \"ProductName\": \"ElectricEye\",\n \"Provider\": \"AWS\",\n \"ProviderType\": \"CSP\",\n \"ProviderAccountId\": awsAccountId,\n \"AssetRegion\": awsRegion,\n \"AssetDetails\": assetB64,\n \"AssetClass\": \"Security Services\",\n \"AssetService\": \"Amazon Certificate Manager\",\n \"AssetComponent\": \"Certificate\"\n },\n \"Resources\": [\n {\n \"Type\": \"AwsCertificateManagerCertificate\",\n \"Id\": carn,\n \"Partition\": awsPartition,\n \"Region\": awsRegion,\n \"Details\": {\n \"AwsCertificateManagerCertificate\": {\n \"DomainName\": cDomainName,\n \"Issuer\": cIssuer,\n \"Serial\": cSerial,\n \"KeyAlgorithm\": cKeyAlgo,\n \"Status\": cStatus\n }\n }\n }\n ],\n \"Compliance\": {\n \"Status\": \"PASSED\",\n \"RelatedRequirements\": [\n \"NIST CSF V1.1 PR.MA-1\",\n \"NIST SP 800-53 Rev. 4 MA-2\",\n \"NIST SP 800-53 Rev. 4 MA-3\",\n \"NIST SP 800-53 Rev. 4 MA-5\",\n \"NIST SP 800-53 Rev. 4 MA-6\",\n \"AICPA TSC CC8.1\",\n \"ISO 27001:2013 A.11.1.2\",\n \"ISO 27001:2013 A.11.2.4\",\n \"ISO 27001:2013 A.11.2.5\",\n \"ISO 27001:2013 A.11.2.6\"\n ]\n },\n \"Workflow\": {\"Status\": \"RESOLVED\"},\n \"RecordState\": \"ARCHIVED\"\n }\n yield finding\n else:\n print(e)", "def unseal(self):\n\n client = self.connect()\n if not client.sys.is_sealed():\n print(\"Vault is already unsealed\")\n return 0\n\n key_file = self.path(VAULT_KEY)\n keys = []\n for f in glob.glob(key_file + \"*\"):\n with open(f, \"r\") as fh:\n keys.append(fh.read())\n\n if len(keys) == 0:\n raise VaultError(\"Could not locate any key files, not unsealing\")\n\n res = client.sys.submit_unseal_keys(keys)\n if res['sealed']:\n p = res['progress']\n t = res['t']\n print(\"Vault partly unsealed, {} of {} needed keys entered\".format(p,t))\n print(\"Enter {} more keys to finish unsealing the vault\". format(t-p))\n return (t-p)\n else:\n print(\"Vault unsealed\")\n return 0", "def refund_cert_callback(sender, course_enrollment=None, **kwargs):\r\n\r\n # Only refund verified cert unenrollments that are within bounds of the expiration date\r\n if not course_enrollment.refundable():\r\n return\r\n\r\n target_certs = CertificateItem.objects.filter(course_id=course_enrollment.course_id, user_id=course_enrollment.user, status='purchased', mode='verified')\r\n try:\r\n target_cert = target_certs[0]\r\n except IndexError:\r\n log.error(\"Matching CertificateItem not found while trying to refund. User %s, Course %s\", course_enrollment.user, course_enrollment.course_id)\r\n return\r\n target_cert.status = 'refunded'\r\n target_cert.refund_requested_time = datetime.now(pytz.utc)\r\n target_cert.save()\r\n target_cert.order.status = 'refunded'\r\n target_cert.order.save()\r\n\r\n order_number = target_cert.order_id\r\n # send billing an email so they can handle refunding\r\n subject = _(\"[Refund] User-Requested Refund\")\r\n message = \"User {user} ({user_email}) has requested a refund on Order #{order_number}.\".format(user=course_enrollment.user,\r\n user_email=course_enrollment.user.email,\r\n order_number=order_number)\r\n to_email = [settings.PAYMENT_SUPPORT_EMAIL]\r\n from_email = microsite.get_value('payment_support_email', settings.PAYMENT_SUPPORT_EMAIL)\r\n try:\r\n send_mail(subject, message, from_email, to_email, fail_silently=False)\r\n except Exception as exception: # pylint: disable=broad-except\r\n err_str = ('Failed sending email to billing to request a refund for verified certificate'\r\n ' (User {user}, Course {course}, CourseEnrollmentID {ce_id}, Order #{order})\\n{exception}')\r\n log.error(err_str.format(\r\n user=course_enrollment.user,\r\n course=course_enrollment.course_id,\r\n ce_id=course_enrollment.id,\r\n order=order_number,\r\n exception=exception,\r\n ))\r\n\r\n return target_cert", "def revert(self, record, new_password):\n if self.old_key_deleted:\n if self.aws_sync_profile:\n if self.sync_with_creds_file():\n logging.info(\n f'New key id \"{self.new_key_id}\" was updated in profile \"{self.aws_sync_profile}\"'\n ' of AWS credentials file, but failed to update in Keeper record.'\n )\n else:\n logging.info(\n f'New key id {self.new_key_id} failed to update in profile \"{self.aws_sync_profile}\"'\n ' of AWS credentials file, and also failed to update in Keeper record.'\n )\n return False\n else:\n self.delete_key(new_key=True)", "def recover_alarm(alarm_id, auth, url):\n f_url = url + \"/imcrs/fault/alarm/recover/\" + str(alarm_id)\n response = requests.put(f_url, auth=auth, headers=HEADERS)\n try:\n return response.status_code\n except requests.exceptions.RequestException as error:\n return \"Error:\\n\" + str(error) + 'recover_alarm: An Error has occured'", "def _safe_revoke(self, certs):\n success_list = []\n try:\n for cert in certs:\n if self.no_confirm or revocation.confirm_revocation(cert):\n try:\n self._acme_revoke(cert)\n except errors.Error:\n # TODO: Improve error handling when networking is set...\n logger.error(\n \"Unable to revoke cert:%s%s\", os.linesep, str(cert))\n success_list.append(cert)\n revocation.success_revocation(cert)\n finally:\n if success_list:\n self._remove_certs_keys(success_list)\n\n return success_list", "def replace_certificate(self):\n return self.__query(\"certificateReplace\", data)", "def test_delete_non_existing_certificate(self):\n self.client.post(\n '/api/v1/certificates', data=json.dumps(new_certificate),\n content_type='application/json',\n headers=self.get_registrar_token())\n response = self.client.delete(\n '/api/v1/certificates/10', content_type='application/json',\n headers=self.get_registrar_token())\n result = json.loads(response.data.decode())\n self.assertEqual(result['message'],\n 'Certificate not found')\n assert response.status_code == 404", "def edit_certificate(self, certificate):\r\n return self.ssl.editObject(certificate, id=certificate['id'])", "def robustified_delete_backup(self, backup):\n # do some validation,\n target_ref = backup.target_reference\n\n if backup.state == State.SUCCEEDED and not target_ref:\n raise BackupSweepError(\"Cannot delete backup '%s'. \"\n \"Backup never uploaded\" % backup.id)\n\n logger.info(\"Deleting target references for backup '%s'.\" % backup.id)\n\n\n\n logger.info(\"Deleting primary target reference for backup '%s'.\" %\n backup.id)\n # target ref can be None for CANCELED backups\n if target_ref:\n self.do_delete_target_ref(backup, backup.target, target_ref)\n\n # delete log file\n if backup.log_target_reference:\n logger.info(\"Deleting log target reference for backup '%s'.\" %\n backup.id)\n self.do_delete_target_ref(backup, backup.target, backup.log_target_reference)\n\n if backup.secondary_target_references:\n logger.info(\"Deleting secondary target references for backup '%s'.\" %\n backup.id)\n sec_targets = backup.secondary_targets\n sec_target_refs = backup.secondary_target_references\n for (sec_target, sec_tgt_ref) in zip(sec_targets, sec_target_refs):\n logger.info(\"Deleting secondary target reference %s for backup \"\n \"'%s'.\" % (sec_tgt_ref, backup.id))\n self.do_delete_target_ref(backup, sec_target, sec_tgt_ref)\n\n # set deleted date\n backup.deleted_date = date_now()\n update_props = [\"deletedDate\", \"targetReference\",\n \"secondaryTargetReferences\"]\n persistence.update_backup(backup, properties=update_props,\n event_name=\"DELETING\",\n message=\"Deleting target references\")\n\n logger.info(\"Backup %s target references deleted successfully!\" %\n backup.id)" ]
[ "0.5909231", "0.5637491", "0.5616782", "0.55714905", "0.5538955", "0.549184", "0.5395828", "0.52587295", "0.5119891", "0.5034296", "0.5010087", "0.500666", "0.49803507", "0.49446768", "0.4943516", "0.49153277", "0.49063587", "0.49057066", "0.49054945", "0.48861593", "0.4849271", "0.4819668", "0.4817717", "0.4731088", "0.4713413", "0.46508756", "0.46290717", "0.46225208", "0.46054044", "0.4601864" ]
0.71048915
0
Import a certificate created externally. Requires certificates/import permission. Imports an existing valid certificate, containing a private key, into Azure Key Vault. The certificate to be imported can be in either PFX or PEM format. If the certificate is in PEM format the PEM file must contain the key as well as x509 certificates, and you must provide a ``policy`` with
async def import_certificate( self, certificate_name: str, certificate_bytes: bytes, **kwargs ) -> KeyVaultCertificate: enabled = kwargs.pop("enabled", None) policy = kwargs.pop("policy", None) if enabled is not None: attributes = self._models.CertificateAttributes(enabled=enabled) else: attributes = None base64_encoded_certificate = base64.b64encode(certificate_bytes).decode("utf-8") parameters = self._models.CertificateImportParameters( base64_encoded_certificate=base64_encoded_certificate, password=kwargs.pop("password", None), certificate_policy=policy._to_certificate_policy_bundle() if policy else None, certificate_attributes=attributes, tags=kwargs.pop("tags", None), ) bundle = await self._client.import_certificate( vault_base_url=self.vault_url, certificate_name=certificate_name, parameters=parameters, **kwargs ) return KeyVaultCertificate._from_certificate_bundle(certificate_bundle=bundle)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _store_certificate(fullchain, key, domain=None, tag_prefix=None,\n region_name=None, acm_client=None, dry_run=False):\n #pylint:disable=unused-argument\n result = _check_certificate(fullchain, key, domain=domain)\n if not domain:\n domain = result['ssl_certificate']['common_name']\n cert, chain = _split_fullchain(fullchain)\n if not acm_client:\n acm_client = boto3.client('acm', region_name=region_name)\n kwargs = {}\n resp = acm_client.list_certificates()\n for acm_cert in resp['CertificateSummaryList']:\n if acm_cert['DomainName'] == domain:\n LOGGER.info(\"A certificate for domain %s has already been\"\\\n \" imported as %s - replacing\",\n domain, acm_cert['CertificateArn'])\n kwargs['CertificateArn'] = acm_cert['CertificateArn']\n break\n if not dry_run:\n resp = acm_client.import_certificate(\n Certificate=cert.encode('ascii'),\n PrivateKey=key.encode('ascii'),\n CertificateChain=chain.encode('ascii'),\n **kwargs)\n LOGGER.info(\"%s (re-)imported TLS certificate %s as %s\",\n tag_prefix, result['ssl_certificate'], resp['CertificateArn'])\n result.update({'CertificateArn': resp['CertificateArn']})\n return result", "def fusion_api_import_client_certificate(self, body, api=None, headers=None):\n return self.client_certificate.post(body, api, headers)", "def fusion_api_import_appliance_certificate(self, body, api=None, headers=None, param=''):\n return self.wsc.put(body, api=api, headers=headers, param=param)", "def import_public_key_from_cert_file(filename):\n with open(filename, \"rb\") as key_file:\n cert = x509.load_pem_x509_certificate(key_file.read(), backend=default_backend())\n return cert.public_key()", "def test_azure_import(self):\n\n uri, pubkey = AzureSigner.import_(\"fsn-vault-1\", \"ec-key-1\")\n\n self.assertEqual(pubkey, self.azure_pubkey)\n self.assertEqual(uri, self.azure_id)", "def importprivkey(self, privkey, acct='', rescan=True):\n return self.proxy.importprivkey(privkey, acct, rescan)", "def import_public_key_from_pem_file(filename):\n with open(filename, \"rb\") as key_file:\n public_key = serialization.load_pem_public_key(key_file.read(), backend=default_backend())\n return public_key", "def load_cert_chain(self, certfile, keyfile: Optional[Any] = ...):\n ...", "async def create_certificate(\n self, certificate_name: str, policy: CertificatePolicy, **kwargs\n ) -> Union[KeyVaultCertificate, CertificateOperation]:\n if not (policy.san_emails or policy.san_user_principal_names or policy.san_dns_names or policy.subject):\n raise ValueError(NO_SAN_OR_SUBJECT)\n\n polling_interval = kwargs.pop(\"_polling_interval\", None)\n if polling_interval is None:\n polling_interval = 5\n enabled = kwargs.pop(\"enabled\", None)\n\n if enabled is not None:\n attributes = self._models.CertificateAttributes(enabled=enabled)\n else:\n attributes = None\n\n parameters = self._models.CertificateCreateParameters(\n certificate_policy=policy._to_certificate_policy_bundle(),\n certificate_attributes=attributes,\n tags=kwargs.pop(\"tags\", None),\n )\n\n cert_bundle = await self._client.create_certificate(\n vault_base_url=self.vault_url,\n certificate_name=certificate_name,\n parameters=parameters,\n **kwargs\n )\n\n create_certificate_operation = CertificateOperation._from_certificate_operation_bundle(cert_bundle)\n\n command = partial(self.get_certificate_operation, certificate_name=certificate_name, **kwargs)\n\n get_certificate_command = partial(self.get_certificate, certificate_name=certificate_name, **kwargs)\n\n create_certificate_polling = CreateCertificatePollerAsync(\n get_certificate_command=get_certificate_command, interval=polling_interval\n )\n def no_op(*_, **__) -> Any: # The deserialization callback is ignored based on polling implementation\n pass\n return await async_poller(command, create_certificate_operation, no_op, create_certificate_polling)", "def load_policy(self, source, templatefile, key):\n self._logger.info(\n f\"Loading policy model from {source} and templates from {templatefile} to {key}\"\n )\n model = self._load_model(source)\n templates = pd.read_hdf(templatefile, \"table\")\n self._policies[key] = {\"model\": model, \"templates\": templates}", "def import_private_key_from_pem_file(filename, passphrase=None):\n with open(filename, \"rb\") as key_file:\n private_key = serialization.load_pem_private_key(\n key_file.read(), password=passphrase, backend=default_backend()\n )\n return private_key", "def add_certificate(self, certificate):\r\n return self.ssl.createObject(certificate)", "def import_policy_model_from_h5(\n self,\n import_file: str,\n policy_id: PolicyID = DEFAULT_POLICY_ID,\n ) -> None:\n self.get_policy(policy_id).import_model_from_h5(import_file)\n # Sync new weights to remote workers.\n self._sync_weights_to_workers(worker_set=self.workers)", "def import_proof(self, blocks=None, filename=None):\n if blocks is None and filename is None:\n raise NameError(\n \"You need to provide number of blocks or filename for proof\"\n )\n if filename is None:\n filename = self.make_proof_filename(blocks)\n pickle_in = open(filename, \"rb\")\n proof = pickle.load(pickle_in)\n print(\"Proof loaded from \" + filename)\n return proof", "def edit_certificate(self, certificate):\r\n return self.ssl.editObject(certificate, id=certificate['id'])", "def _sign_cert(self, cert):\n with open(self._get_key_link(self.commonname), 'r') as private_file:\n data = private_file.read()\n pkey = crypto.load_privatekey(crypto.FILETYPE_PEM,\n data)\n cert.sign(pkey, 'sha256')", "def Run(self, args):\n holder = base_classes.ComputeApiHolder(self.ReleaseTrack())\n client = holder.client\n\n ssl_certificate_ref = self.SSL_CERTIFICATE_ARG.ResolveAsResource(\n args, holder.resources, default_scope=compute_scope.ScopeEnum.GLOBAL)\n\n certificate = files.ReadFileContents(args.certificate)\n private_key = files.ReadFileContents(args.private_key)\n\n if ssl_certificates_utils.IsRegionalSslCertificatesRef(ssl_certificate_ref):\n request = client.messages.ComputeRegionSslCertificatesInsertRequest(\n sslCertificate=client.messages.SslCertificate(\n name=ssl_certificate_ref.Name(),\n certificate=certificate,\n privateKey=private_key,\n description=args.description),\n region=ssl_certificate_ref.region,\n project=ssl_certificate_ref.project)\n collection = client.apitools_client.regionSslCertificates\n else:\n request = client.messages.ComputeSslCertificatesInsertRequest(\n sslCertificate=client.messages.SslCertificate(\n name=ssl_certificate_ref.Name(),\n certificate=certificate,\n privateKey=private_key,\n description=args.description),\n project=ssl_certificate_ref.project)\n collection = client.apitools_client.sslCertificates\n\n return client.MakeRequests([(collection, 'Insert', request)])", "def __init__(__self__, *,\n key_vault_cert_name: pulumi.Input[str],\n type: pulumi.Input[str],\n vault_uri: pulumi.Input[str],\n cert_version: Optional[pulumi.Input[str]] = None,\n exclude_private_key: Optional[pulumi.Input[bool]] = None):\n pulumi.set(__self__, \"key_vault_cert_name\", key_vault_cert_name)\n pulumi.set(__self__, \"type\", 'KeyVaultCertificate')\n pulumi.set(__self__, \"vault_uri\", vault_uri)\n if cert_version is not None:\n pulumi.set(__self__, \"cert_version\", cert_version)\n if exclude_private_key is None:\n exclude_private_key = False\n if exclude_private_key is not None:\n pulumi.set(__self__, \"exclude_private_key\", exclude_private_key)", "def import_publickey_cert_pem(self, cert_pemstring, privkey_pemstring=None):\n ## TODO: This method is not tested. It may have bugs.\n if isinstance(cert_pemstring, str):\n cert_pemstring = cert_pemstring.encode('utf-8')\n cert = x509.load_pem_x509_certificate(cert_pemstring, default_backend())\n fingerprint = cert.fingerprint(hashes.SHA256())\n\n if privkey_pemstring is not None:\n self.mk_keyobj_from_private_key_pem(privkey_pemstring)\n sig = self.private_key_obj.sign(fingerprint, ec.ECDSA(utils.Prehashed(hashes.SHA256())))\n public_key = cert.public_key()\n result = public_key.verify(sig, fingerprint, ec.ECDSA(hashes.SHA256()))\n if not result:\n return False\n self.private_key_obj = public_key\n self._get_naive_private_key_bytes()\n\n self._get_naive_public_key_bytes()\n return True", "def fusion_api_import_server_certificate(self, body, api=None, headers=None):\n return self.server_certificate.post(body, api, headers)", "def _Run(args, holder, ssl_certificate_ref):\n client = holder.client\n\n certificate_type, self_managed, managed = _ParseCertificateArguments(\n client, args)\n\n if ssl_certificates_utils.IsRegionalSslCertificatesRef(ssl_certificate_ref):\n request = client.messages.ComputeRegionSslCertificatesInsertRequest(\n sslCertificate=client.messages.SslCertificate(\n type=certificate_type,\n name=ssl_certificate_ref.Name(),\n selfManaged=self_managed,\n managed=managed,\n description=args.description),\n region=ssl_certificate_ref.region,\n project=ssl_certificate_ref.project)\n else:\n request = client.messages.ComputeSslCertificatesInsertRequest(\n sslCertificate=client.messages.SslCertificate(\n type=certificate_type,\n name=ssl_certificate_ref.Name(),\n selfManaged=self_managed,\n managed=managed,\n description=args.description),\n project=ssl_certificate_ref.project)\n\n if ssl_certificates_utils.IsRegionalSslCertificatesRef(ssl_certificate_ref):\n collection = client.apitools_client.regionSslCertificates\n else:\n collection = client.apitools_client.sslCertificates\n\n return client.MakeRequests([(collection, 'Insert', request)])", "def do_import(args):\n base64str = b''\n for infile_name in args.infile_names:\n if args.png:\n chunk = subprocess.check_output(['zbarimg', '--raw', infile_name])\n base64str += chunk\n elif args.base64:\n with open(infile_name, 'rb') as infile:\n chunk = infile.read()\n base64str += chunk\n\n raw = base64.b64decode(base64str)\n paperkey = subprocess.Popen(['paperkey', '--pubring', args.pubkey],\n stdin=subprocess.PIPE,\n stdout=subprocess.PIPE)\n (paperkey_stdout, _) = paperkey.communicate(raw)\n gpg = subprocess.Popen(['gpg', '--import'], stdin=subprocess.PIPE)\n gpg.communicate(paperkey_stdout)", "def handle_pem_extension(oid, _input):\r\n try:\r\n cert = objects.X509(oid)\r\n cert.pem = _input.read()\r\n except (ValueError, TypeError, OSError) as failed_to_init:\r\n raise click.BadParameter(\r\n '[{0}]: File Content can\\'t be parsed or written.\\n {1}'.format(_input.name, _input.read())\r\n ) from failed_to_init", "def import_private_key(self, raw_key):\n error = vscf_error_t()\n result = self._lib_vscf_ecc.vscf_ecc_import_private_key(self.ctx, raw_key.ctx, error)\n VscfStatus.handle_status(error.status)\n instance = VscfImplTag.get_type(result)[0].take_c_ctx(cast(result, POINTER(VscfImplTag.get_type(result)[1])))\n return instance", "def load_pem_x509_certificate(data):\n return _x509.load_pem_x509_certificate(data, _backends.default_backend())", "def SecurityPolicyFromFile(input_file, messages, file_format):\n\n if file_format == 'yaml':\n parsed_security_policy = yaml.load(input_file)\n else:\n try:\n parsed_security_policy = json.load(input_file)\n except ValueError as e:\n raise exceptions.BadFileException('Error parsing JSON: {0}'.format(\n six.text_type(e)))\n\n security_policy = messages.SecurityPolicy()\n if 'description' in parsed_security_policy:\n security_policy.description = parsed_security_policy['description']\n if 'fingerprint' in parsed_security_policy:\n security_policy.fingerprint = base64.urlsafe_b64decode(\n parsed_security_policy['fingerprint'].encode('ascii'))\n if 'type' in parsed_security_policy:\n security_policy.type = (\n messages.SecurityPolicy.TypeValueValuesEnum(\n parsed_security_policy['type']))\n if 'cloudArmorConfig' in parsed_security_policy:\n security_policy.cloudArmorConfig = messages.SecurityPolicyCloudArmorConfig(\n enableMl=parsed_security_policy['cloudArmorConfig']['enableMl'])\n if 'adaptiveProtectionConfig' in parsed_security_policy:\n security_policy.adaptiveProtectionConfig = (\n messages.SecurityPolicyAdaptiveProtectionConfig(\n layer7DdosDefenseConfig=messages\n .SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig(\n enable=parsed_security_policy['adaptiveProtectionConfig']\n ['layer7DdosDefenseConfig']['enable']),))\n if 'autoDeployConfig' in parsed_security_policy['adaptiveProtectionConfig']:\n security_policy.adaptiveProtectionConfig.autoDeployConfig = (\n messages.SecurityPolicyAdaptiveProtectionConfigAutoDeployConfig())\n if 'loadThreshold' in parsed_security_policy['adaptiveProtectionConfig'][\n 'autoDeployConfig']:\n security_policy.adaptiveProtectionConfig.autoDeployConfig.loadThreshold = (\n parsed_security_policy['adaptiveProtectionConfig']\n ['autoDeployConfig']['loadThreshold'])\n if 'confidenceThreshold' in parsed_security_policy[\n 'adaptiveProtectionConfig']['autoDeployConfig']:\n security_policy.adaptiveProtectionConfig.autoDeployConfig.confidenceThreshold = (\n parsed_security_policy['adaptiveProtectionConfig']\n ['autoDeployConfig']['confidenceThreshold'])\n if 'impactedBaselineThreshold' in parsed_security_policy[\n 'adaptiveProtectionConfig']['autoDeployConfig']:\n security_policy.adaptiveProtectionConfig.autoDeployConfig.impactedBaselineThreshold = (\n parsed_security_policy['adaptiveProtectionConfig']\n ['autoDeployConfig']['impactedBaselineThreshold'])\n if 'expirationSec' in parsed_security_policy['adaptiveProtectionConfig'][\n 'autoDeployConfig']:\n security_policy.adaptiveProtectionConfig.autoDeployConfig.expirationSec = (\n parsed_security_policy['adaptiveProtectionConfig']\n ['autoDeployConfig']['expirationSec'])\n if 'ruleVisibility' in parsed_security_policy['adaptiveProtectionConfig'][\n 'layer7DdosDefenseConfig']:\n security_policy.adaptiveProtectionConfig.layer7DdosDefenseConfig.ruleVisibility = (\n messages.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig\n .RuleVisibilityValueValuesEnum(\n parsed_security_policy['adaptiveProtectionConfig']\n ['layer7DdosDefenseConfig']['ruleVisibility']))\n if 'advancedOptionsConfig' in parsed_security_policy:\n advanced_options_config = parsed_security_policy['advancedOptionsConfig']\n security_policy.advancedOptionsConfig = (\n messages.SecurityPolicyAdvancedOptionsConfig())\n if 'jsonParsing' in advanced_options_config:\n security_policy.advancedOptionsConfig.jsonParsing = (\n messages.SecurityPolicyAdvancedOptionsConfig\n .JsonParsingValueValuesEnum(\n advanced_options_config['jsonParsing']))\n if 'jsonCustomConfig' in advanced_options_config:\n security_policy.advancedOptionsConfig.jsonCustomConfig = (\n messages.SecurityPolicyAdvancedOptionsConfigJsonCustomConfig(\n contentTypes=advanced_options_config\n ['jsonCustomConfig'].get('contentTypes', [])))\n if 'logLevel' in advanced_options_config:\n security_policy.advancedOptionsConfig.logLevel = (\n messages.SecurityPolicyAdvancedOptionsConfig.LogLevelValueValuesEnum(\n advanced_options_config['logLevel']))\n if 'userIpRequestHeaders' in advanced_options_config:\n security_policy.advancedOptionsConfig.userIpRequestHeaders = (\n advanced_options_config['userIpRequestHeaders'])\n if 'ddosProtectionConfig' in parsed_security_policy:\n security_policy.ddosProtectionConfig = (\n messages.SecurityPolicyDdosProtectionConfig(\n ddosProtection=messages.SecurityPolicyDdosProtectionConfig\n .DdosProtectionValueValuesEnum(\n parsed_security_policy['ddosProtectionConfig']\n ['ddosProtection'])))\n if 'recaptchaOptionsConfig' in parsed_security_policy:\n security_policy.recaptchaOptionsConfig = (\n messages.SecurityPolicyRecaptchaOptionsConfig())\n if 'redirectSiteKey' in parsed_security_policy['recaptchaOptionsConfig']:\n security_policy.recaptchaOptionsConfig.redirectSiteKey = (\n parsed_security_policy['recaptchaOptionsConfig']['redirectSiteKey'])\n\n if 'userDefinedFields' in parsed_security_policy:\n user_defined_fields = []\n for udf in parsed_security_policy['userDefinedFields']:\n user_defined_field = messages.SecurityPolicyUserDefinedField()\n user_defined_field.name = udf['name']\n user_defined_field.base = (\n messages.SecurityPolicyUserDefinedField.BaseValueValuesEnum(\n udf['base']\n )\n )\n user_defined_field.offset = udf['offset']\n user_defined_field.size = udf['size']\n if 'mask' in udf:\n user_defined_field.mask = udf['mask']\n user_defined_fields.append(user_defined_field)\n security_policy.userDefinedFields = user_defined_fields\n\n rules = []\n for rule in parsed_security_policy['rules']:\n security_policy_rule = messages.SecurityPolicyRule()\n security_policy_rule.action = rule['action']\n if 'description' in rule:\n security_policy_rule.description = rule['description']\n if 'match' in rule:\n match = messages.SecurityPolicyRuleMatcher()\n if 'versionedExpr' in rule['match']:\n match.versionedExpr = ConvertToEnum(\n rule['match']['versionedExpr'], messages\n )\n if 'expr' in rule['match']:\n match.expr = messages.Expr(\n expression=rule['match']['expr']['expression']\n )\n if 'exprOptions' in rule['match']:\n expr_options = messages.SecurityPolicyRuleMatcherExprOptions()\n if 'recaptchaOptions' in rule['match']['exprOptions']:\n expr_options.recaptchaOptions = (\n messages.SecurityPolicyRuleMatcherExprOptionsRecaptchaOptions(\n actionTokenSiteKeys=rule['match']['exprOptions'][\n 'recaptchaOptions'\n ].get('actionTokenSiteKeys', []),\n sessionTokenSiteKeys=rule['match']['exprOptions'][\n 'recaptchaOptions'\n ].get('sessionTokenSiteKeys', []),\n )\n )\n match.exprOptions = expr_options\n if 'config' in rule['match']:\n if 'srcIpRanges' in rule['match']['config']:\n match.config = messages.SecurityPolicyRuleMatcherConfig(\n srcIpRanges=rule['match']['config']['srcIpRanges']\n )\n security_policy_rule.match = match\n if 'networkMatch' in rule:\n network_match = messages.SecurityPolicyRuleNetworkMatcher()\n if 'userDefinedFields' in rule['networkMatch']:\n user_defined_fields = []\n for udf in rule['networkMatch']['userDefinedFields']:\n user_defined_field_match = (\n messages.SecurityPolicyRuleNetworkMatcherUserDefinedFieldMatch()\n )\n user_defined_field_match.name = udf['name']\n user_defined_field_match.values = udf['values']\n user_defined_fields.append(user_defined_field_match)\n network_match.userDefinedFields = user_defined_fields\n if 'srcIpRanges' in rule['networkMatch']:\n network_match.srcIpRanges = rule['networkMatch']['srcIpRanges']\n if 'destIpRanges' in rule['networkMatch']:\n network_match.destIpRanges = rule['networkMatch']['destIpRanges']\n if 'ipProtocols' in rule['networkMatch']:\n network_match.ipProtocols = rule['networkMatch']['ipProtocols']\n if 'srcPorts' in rule['networkMatch']:\n network_match.srcPorts = rule['networkMatch']['srcPorts']\n if 'destPorts' in rule['networkMatch']:\n network_match.destPorts = rule['networkMatch']['destPorts']\n if 'srcRegionCodes' in rule['networkMatch']:\n network_match.srcRegionCodes = rule['networkMatch']['srcRegionCodes']\n if 'srcAsns' in rule['networkMatch']:\n network_match.srcAsns = rule['networkMatch']['srcAsns']\n security_policy_rule.networkMatch = network_match\n security_policy_rule.priority = int(rule['priority'])\n if 'preview' in rule:\n security_policy_rule.preview = rule['preview']\n rules.append(security_policy_rule)\n if 'redirectTarget' in rule:\n security_policy_rule.redirectTarget = rule['redirectTarget']\n if 'ruleNumber' in rule:\n security_policy_rule.ruleNumber = int(rule['ruleNumber'])\n if 'redirectOptions' in rule:\n redirect_options = messages.SecurityPolicyRuleRedirectOptions()\n if 'type' in rule['redirectOptions']:\n redirect_options.type = (\n messages.SecurityPolicyRuleRedirectOptions.TypeValueValuesEnum(\n rule['redirectOptions']['type']))\n if 'target' in rule['redirectOptions']:\n redirect_options.target = rule['redirectOptions']['target']\n security_policy_rule.redirectOptions = redirect_options\n if 'headerAction' in rule:\n header_action = messages.SecurityPolicyRuleHttpHeaderAction()\n headers_in_rule = rule['headerAction'].get('requestHeadersToAdds', [])\n headers_to_add = []\n for header_to_add in headers_in_rule:\n headers_to_add.append(\n messages.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption(\n headerName=header_to_add['headerName'],\n headerValue=header_to_add['headerValue']))\n if headers_to_add:\n header_action.requestHeadersToAdds = headers_to_add\n security_policy_rule.headerAction = header_action\n if 'rateLimitOptions' in rule:\n rate_limit_options = rule['rateLimitOptions']\n security_policy_rule.rateLimitOptions = (\n messages.SecurityPolicyRuleRateLimitOptions(\n rateLimitThreshold=messages\n .SecurityPolicyRuleRateLimitOptionsThreshold(\n count=rate_limit_options['rateLimitThreshold']['count'],\n intervalSec=rate_limit_options['rateLimitThreshold']\n ['intervalSec']),\n conformAction=rate_limit_options['conformAction'],\n exceedAction=rate_limit_options['exceedAction']))\n if 'exceedActionRpcStatus' in rate_limit_options:\n exceed_action_rpc_status = (\n messages.SecurityPolicyRuleRateLimitOptionsRpcStatus()\n )\n if 'code' in rate_limit_options['exceedActionRpcStatus']:\n exceed_action_rpc_status.code = rate_limit_options[\n 'exceedActionRpcStatus']['code']\n if 'message' in rate_limit_options['exceedActionRpcStatus']:\n exceed_action_rpc_status.message = rate_limit_options[\n 'exceedActionRpcStatus']['message']\n security_policy_rule.rateLimitOptions.exceedActionRpcStatus = (\n exceed_action_rpc_status\n )\n if 'exceedRedirectOptions' in rate_limit_options:\n exceed_redirect_options = messages.SecurityPolicyRuleRedirectOptions()\n if 'type' in rate_limit_options['exceedRedirectOptions']:\n exceed_redirect_options.type = (\n messages.SecurityPolicyRuleRedirectOptions.TypeValueValuesEnum(\n rate_limit_options['exceedRedirectOptions']['type']))\n if 'target' in rate_limit_options['exceedRedirectOptions']:\n exceed_redirect_options.target = rate_limit_options[\n 'exceedRedirectOptions']['target']\n security_policy_rule.rateLimitOptions.exceedRedirectOptions = (\n exceed_redirect_options)\n if 'banThreshold' in rate_limit_options:\n security_policy_rule.rateLimitOptions.banThreshold = (\n messages.SecurityPolicyRuleRateLimitOptionsThreshold(\n count=rate_limit_options['banThreshold']['count'],\n intervalSec=rate_limit_options['banThreshold']['intervalSec']))\n if 'banDurationSec' in rate_limit_options:\n security_policy_rule.rateLimitOptions.banDurationSec = (\n rate_limit_options['banDurationSec'])\n if 'enforceOnKey' in rate_limit_options:\n security_policy_rule.rateLimitOptions.enforceOnKey = (\n messages.SecurityPolicyRuleRateLimitOptions\n .EnforceOnKeyValueValuesEnum(rate_limit_options['enforceOnKey']))\n if 'enforceOnKeyName' in rate_limit_options:\n security_policy_rule.rateLimitOptions.enforceOnKeyName = (\n rate_limit_options['enforceOnKeyName'])\n if 'preconfiguredWafConfig' in rule:\n preconfig_waf_config = messages.SecurityPolicyRulePreconfiguredWafConfig()\n for exclusion in rule['preconfiguredWafConfig'].get('exclusions', []):\n exclusion_to_add = (\n messages.SecurityPolicyRulePreconfiguredWafConfigExclusion())\n if 'targetRuleSet' in exclusion:\n exclusion_to_add.targetRuleSet = exclusion['targetRuleSet']\n for target_rule_id in exclusion.get('targetRuleIds', []):\n exclusion_to_add.targetRuleIds.append(target_rule_id)\n for request_header in exclusion.get('requestHeadersToExclude', []):\n exclusion_to_add.requestHeadersToExclude.append(\n ConvertPreconfigWafExclusionRequestField(request_header,\n messages))\n for request_cookie in exclusion.get('requestCookiesToExclude', []):\n exclusion_to_add.requestCookiesToExclude.append(\n ConvertPreconfigWafExclusionRequestField(request_cookie,\n messages))\n for request_query_param in exclusion.get('requestQueryParamsToExclude',\n []):\n exclusion_to_add.requestQueryParamsToExclude.append(\n ConvertPreconfigWafExclusionRequestField(request_query_param,\n messages))\n for request_uri in exclusion.get('requestUrisToExclude', []):\n exclusion_to_add.requestUrisToExclude.append(\n ConvertPreconfigWafExclusionRequestField(request_uri, messages))\n preconfig_waf_config.exclusions.append(exclusion_to_add)\n security_policy_rule.preconfiguredWafConfig = preconfig_waf_config\n\n security_policy.rules = rules\n\n return security_policy", "def create_policy(policystore_url, create_policy_request, verbose):\n\n if verbose:\n logging.info('Creating policy')\n pprint.pprint(create_policy_request)\n\n create_url = policystore_url + POLICYSTORE_PREFIX + 'CreateEntitlementPolicy'\n\n r = requests.post(\n create_url, headers=headers(), json=create_policy_request)\n if r.status_code != 200:\n logging.error(f'ERROR: Unexpected response: {r.status_code}')\n pprint.pprint(r.json())\n\n sys.exit('Failed to create policy')\n\n resp = r.json()\n\n logging.info(\n f'SUCCESS: Created policy - ID: {resp[\"policy_id\"]}, Token: {resp[\"token\"]}'\n )\n\n return resp", "def import_model(self, import_file: str):\n # Check for existence.\n if not os.path.exists(import_file):\n raise FileNotFoundError(\n \"`import_file` '{}' does not exist! Can't import Model.\".format(\n import_file\n )\n )\n # Get the format of the given file.\n import_format = \"h5\" # TODO(sven): Support checkpoint loading.\n\n ExportFormat.validate([import_format])\n if import_format != ExportFormat.H5:\n raise NotImplementedError\n else:\n return self.import_policy_model_from_h5(import_file)", "def test_aws_service_api_keypair_import_post(self):\n pass", "def __init__(__self__, *,\n auth_type: pulumi.Input[str],\n certificate: pulumi.Input[str],\n client_id: pulumi.Input[str],\n principal_id: pulumi.Input[str]):\n pulumi.set(__self__, \"auth_type\", 'servicePrincipalCertificate')\n pulumi.set(__self__, \"certificate\", certificate)\n pulumi.set(__self__, \"client_id\", client_id)\n pulumi.set(__self__, \"principal_id\", principal_id)" ]
[ "0.555027", "0.53187644", "0.52335936", "0.51478225", "0.5022696", "0.49920884", "0.49599612", "0.49253953", "0.4920437", "0.48891997", "0.48853323", "0.48505723", "0.478446", "0.47836658", "0.47507176", "0.47453344", "0.47313616", "0.47218686", "0.47094363", "0.46670148", "0.46227217", "0.46138683", "0.46137914", "0.46094483", "0.45680937", "0.45457038", "0.45333445", "0.45238152", "0.44960096", "0.4487615" ]
0.68919945
0
Gets the policy for a certificate. Requires certificates/get permission. Returns the specified certificate policy resources in the key vault.
async def get_certificate_policy(self, certificate_name: str, **kwargs) -> CertificatePolicy: bundle = await self._client.get_certificate_policy( vault_base_url=self.vault_url, certificate_name=certificate_name, **kwargs ) return CertificatePolicy._from_certificate_policy_bundle(certificate_policy_bundle=bundle)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_certificate_policy_request(self, vault_name: str, certificate_name: str) -> dict[str, Any]:\n url = f'https://{vault_name}{self.azure_cloud.suffixes.keyvault_dns}/certificates/{certificate_name}/policy'\n response = self.http_request(\n 'GET', full_url=url, resource=self.get_vault_resource())\n\n return response", "def get_certificate_policy_command(client: KeyVaultClient, args: dict[str, Any]) -> CommandResults:\n vault_name = args['vault_name']\n certificate_name = args['certificate_name']\n response = client.get_certificate_policy_request(\n vault_name, certificate_name)\n outputs = copy.deepcopy(response)\n outputs['attributes'] = convert_time_attributes_to_iso(outputs['attributes'])\n outputs['CertificateName'] = certificate_name\n\n readable_output = tableToMarkdown(f'{certificate_name} Policy Information',\n outputs,\n ['id', 'key_props', 'secret_props',\n 'x509_props', 'issuer', 'attributes'],\n removeNull=True, headerTransform=string_to_table_header)\n command_results = CommandResults(\n outputs_prefix='AzureKeyVault.CertificatePolicy',\n outputs_key_field='id',\n outputs=outputs,\n raw_response=response,\n readable_output=readable_output\n )\n\n return command_results", "def get_certificate_request(self, vault_name: str,\n certificate_name: str,\n certificate_version: str) -> dict[str, Any]:\n url = f'https://{vault_name}{self.azure_cloud.suffixes.keyvault_dns}/certificates/{certificate_name}'\n if certificate_version:\n url = url + f'/{certificate_version}'\n response = self.http_request(\n 'GET', full_url=url,\n resource=self.get_vault_resource())\n\n return response", "def get_policies():\r\n policy = policies.values()\r\n return policy", "def GetPolicies(self):\n policy = {}\n if json is None:\n logging.error('No JSON module, cannot parse policy information')\n else :\n try:\n policy = json.loads(open(self.policy_path).read(), strict=False)\n except IOError:\n logging.error('Failed to load policies from %s' % self.policy_path)\n return policy", "def get_sp_policy(self, context, id):\n # handling policy method in RPC\n response = self.dns_manager.get_sp_policy(context, id)\n return response", "async def get_certificate(self, certificate_name: str, **kwargs) -> KeyVaultCertificate:\n bundle = await self._client.get_certificate(\n vault_base_url=self.vault_url,\n certificate_name=certificate_name,\n certificate_version=\"\",\n **kwargs\n )\n return KeyVaultCertificate._from_certificate_bundle(certificate_bundle=bundle)", "def get_certificates_by_pcc(conn: dict, id: str) -> dict:\n return get(conn, f\"{S3PCCS}/{id}/certificates\")", "def storage_policies(self, **kwargs):\n self.logger.debug(f\"Get storage policies data\")\n url_path = 'storage/policies'\n body = self._make_body(kwargs)\n return self._common_get(request_path=url_path, parameters=body)", "def get_certificate(self, cert_id):\r\n return self.ssl.getObject(id=cert_id)", "def read(self, policy_name):\n path = self.vault.normalize(\"/sys/policies/acl/\" + policy_name)\n address = self.vault.vault_adress + \"/v1\" + path\n logging.debug(\"Reading the policy: %s\", address)\n response = self.vault.requests_request(\n \"GET\", address, headers=self.vault.token_header\n )\n policy_details = response.json()[\"data\"][\"policy\"]\n return policy_details", "def GetPolicy(self, request, global_params=None):\n config = self.GetMethodConfig('GetPolicy')\n return self._RunMethod(\n config, request, global_params=global_params)", "def list_policies(self):\n client = self.connect(VAULT_TOKEN)\n return client.list_policies()", "def get(self, cache_id):\n return self.certificates.get(cache_id)", "def list_policies(policystore_url, verbose):\n\n if verbose:\n logging.info('Listing policies')\n\n list_url = policystore_url + POLICYSTORE_PREFIX + 'ListEntitlementPolicies'\n\n r = requests.post(list_url, headers=headers(), json={})\n if r.status_code != 200:\n logging.error(f'ERROR: Unexpected response: {r.status_code}')\n pprint.pprint(r.json())\n sys.exit('Failed to list policies')\n\n logging.info('SUCCESS: Listed policies')\n\n resp = r.json()\n\n if verbose:\n logging.info('Policies retrieved')\n pprint.pprint(resp)\n\n return resp", "def get_policy(client, policy_name):\n response = client.describe_firewall_policy(\n FirewallPolicyName=policy_name,\n )\n return response", "def get_policy_by_id(self, id):\n for service, policy_list in self.remote_store.get_policy_list().items():\n for policy in policy_list:\n if policy.id == id:\n return policy", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n certificate: Optional[pulumi.Input[str]] = None,\n certificate_id: Optional[pulumi.Input[str]] = None,\n certificate_name: Optional[pulumi.Input[str]] = None,\n domain: Optional[pulumi.Input[str]] = None,\n instance_id: Optional[pulumi.Input[str]] = None,\n private_key: Optional[pulumi.Input[str]] = None) -> 'Certificate':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _CertificateState.__new__(_CertificateState)\n\n __props__.__dict__[\"certificate\"] = certificate\n __props__.__dict__[\"certificate_id\"] = certificate_id\n __props__.__dict__[\"certificate_name\"] = certificate_name\n __props__.__dict__[\"domain\"] = domain\n __props__.__dict__[\"instance_id\"] = instance_id\n __props__.__dict__[\"private_key\"] = private_key\n return Certificate(resource_name, opts=opts, __props__=__props__)", "def rbac_policy_get(request, policy_id, **kwargs):\n policy = neutronclient(request).show_rbac_policy(\n policy_id, **kwargs).get('rbac_policy')\n return RBACPolicy(policy)", "def policy(self) -> pulumi.Output['outputs.ServicePolicy']:\n return pulumi.get(self, \"policy\")", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n certificate: Optional[pulumi.Input[str]] = None,\n certificate_id: Optional[pulumi.Input[int]] = None,\n creation_timestamp: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n expire_time: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n name_prefix: Optional[pulumi.Input[str]] = None,\n private_key: Optional[pulumi.Input[str]] = None,\n project: Optional[pulumi.Input[str]] = None,\n self_link: Optional[pulumi.Input[str]] = None) -> 'SSLCertificate':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _SSLCertificateState.__new__(_SSLCertificateState)\n\n __props__.__dict__[\"certificate\"] = certificate\n __props__.__dict__[\"certificate_id\"] = certificate_id\n __props__.__dict__[\"creation_timestamp\"] = creation_timestamp\n __props__.__dict__[\"description\"] = description\n __props__.__dict__[\"expire_time\"] = expire_time\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"name_prefix\"] = name_prefix\n __props__.__dict__[\"private_key\"] = private_key\n __props__.__dict__[\"project\"] = project\n __props__.__dict__[\"self_link\"] = self_link\n return SSLCertificate(resource_name, opts=opts, __props__=__props__)", "def policy_get(request, policy_id, **kwargs):\n policy = neutronclient(request).show_qos_policy(\n policy_id, **kwargs).get('policy')\n return QoSPolicy(policy)", "def list_policies(profile=None, api_key=None):\n return salt.utils.pagerduty.list_items(\n \"escalation_policies\",\n \"id\",\n __salt__[\"config.option\"](profile),\n api_key,\n opts=__opts__,\n )", "def certificates(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ServiceCertificateArgs']]]]:\n return pulumi.get(self, \"certificates\")", "def certificates(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ServiceCertificateArgs']]]]:\n return pulumi.get(self, \"certificates\")", "def certificates(self) -> pulumi.Output[Optional[Sequence['outputs.ServiceCertificate']]]:\n return pulumi.get(self, \"certificates\")", "def get_key_ring_policy(project_id, location_id, key_ring_id):\n\n # Creates an API client for the KMS API.\n kms_client = googleapiclient.discovery.build('cloudkms', 'v1')\n\n # The resource name of the KeyRing.\n parent = 'projects/{}/locations/{}/keyRings/{}'.format(\n project_id, location_id, key_ring_id)\n\n # Get the current IAM policy.\n request = kms_client.projects().locations().keyRings().getIamPolicy(\n resource=parent)\n response = request.execute()\n\n if 'bindings' in response.keys():\n print('Printing IAM policy for resource {}:'.format(parent))\n for binding in response['bindings']:\n print('')\n print('Role: {}'.format(binding['role']))\n print('Members:')\n for member in binding['members']:\n print(member)\n print('')\n else:\n print('No roles found for resource {}.'.format(parent))", "def list_policy(self, **kwargs):\n\n all_params = ['pretty', 'label_selector', 'field_selector', 'watch', 'resource_version', 'timeout_seconds']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method list_policy\" % key\n )\n params[key] = val\n del params['kwargs']\n\n\n resource_path = '/oapi/v1/policies'.replace('{format}', 'json')\n path_params = {}\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n if 'label_selector' in params:\n query_params['labelSelector'] = params['label_selector']\n if 'field_selector' in params:\n query_params['fieldSelector'] = params['field_selector']\n if 'watch' in params:\n query_params['watch'] = params['watch']\n if 'resource_version' in params:\n query_params['resourceVersion'] = params['resource_version']\n if 'timeout_seconds' in params:\n query_params['timeoutSeconds'] = params['timeout_seconds']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json', 'application/yaml'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['*/*'])\n\n # Authentication setting\n auth_settings = []\n\n response = self.api_client.call_api(resource_path, 'GET',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='V1PolicyList',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response", "def get_certificate_from_arn(self, certificate_arn):\n with stats.timer('get_certificate_from_arn'):\n client = confidant.clients.get_boto_client('acm-pca')\n # When a certificate is issued, it may take a while before it's\n # available via get_certificate. We need to keep retrying until it's\n # fully issued.\n i = 0\n while True:\n try:\n response = client.get_certificate(\n CertificateAuthorityArn=self.settings['arn'],\n CertificateArn=certificate_arn,\n )\n break\n except client.exceptions.RequestInProgressException:\n # Sleep for a maximum of 10 seconds\n if i >= 50:\n raise\n logger.debug(\n 'Sleeping in get_certificate_from_arn for {}'.format(\n certificate_arn,\n )\n )\n time.sleep(.200)\n i = i + 1\n return {\n 'certificate': response['Certificate'],\n 'certificate_chain': response['CertificateChain'],\n }", "async def update_certificate_policy(\n self, certificate_name: str, policy: CertificatePolicy, **kwargs\n ) -> CertificatePolicy:\n bundle = await self._client.update_certificate_policy(\n vault_base_url=self.vault_url,\n certificate_name=certificate_name,\n certificate_policy=policy._to_certificate_policy_bundle(),\n **kwargs\n )\n return CertificatePolicy._from_certificate_policy_bundle(certificate_policy_bundle=bundle)" ]
[ "0.77671975", "0.6953911", "0.5839124", "0.5742078", "0.5690433", "0.5651099", "0.5582334", "0.5481366", "0.5472438", "0.5401795", "0.53769606", "0.53623295", "0.53264284", "0.53199345", "0.5318054", "0.5287591", "0.5262056", "0.52577", "0.5184184", "0.5177227", "0.5166596", "0.51545274", "0.5151201", "0.5137365", "0.5137365", "0.5133115", "0.5124548", "0.51232946", "0.5113483", "0.51027244" ]
0.7358483
1
Updates the policy for a certificate. Requires certificates/update permission. Set specified members in the certificate policy. Leaves others as null.
async def update_certificate_policy( self, certificate_name: str, policy: CertificatePolicy, **kwargs ) -> CertificatePolicy: bundle = await self._client.update_certificate_policy( vault_base_url=self.vault_url, certificate_name=certificate_name, certificate_policy=policy._to_certificate_policy_bundle(), **kwargs ) return CertificatePolicy._from_certificate_policy_bundle(certificate_policy_bundle=bundle)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update_policy(self, *args, **kwargs):\r\n pass", "def device_update_policy(self, device_ids, policy_id):\n return self._device_action(device_ids, \"UPDATE_POLICY\", {\"policy_id\": policy_id})", "def UpdatePolicy(self, request, global_params=None):\n config = self.GetMethodConfig('UpdatePolicy')\n return self._RunMethod(\n config, request, global_params=global_params)", "def update_policy(self):\n pass", "def update_service_access_policies(DomainName=None, AccessPolicies=None):\n pass", "def Update(self,\n fp_id=None,\n only_generate_request=False,\n firewall_policy=None,\n batch_mode=False):\n\n if batch_mode:\n requests = [\n self._MakeUpdateRequestTuple(\n fp_id=fp_id, firewall_policy=firewall_policy)\n ]\n if not only_generate_request:\n return self._compute_client.MakeRequests(requests)\n return requests\n\n op_res = self._service.Patch(\n self._MakeUpdateRequestTuple(\n fp_id=fp_id, firewall_policy=firewall_policy)[2])\n return self.WaitOperation(\n op_res, message='Updating the organization firewall policy.')", "def device_update_policy(self, device_update_policy):\n\n self._device_update_policy = device_update_policy", "def update_policy_profile(self, profile, body=None):\r\n return self.put(self.policy_profile_path % (profile), body=body)", "def update_policy(self, policy, inverse_policy=None):\n self.make_T_policy_matrix(policy)\n self.inverse_dynamics_by_time = dict()\n self.policy = policy\n self.inverse_policy = inverse_policy", "def update_policy(self, policy_id, update_policy_details, **kwargs):\n resource_path = \"/policies/{policyId}\"\n method = \"PUT\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"if_match\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"update_policy got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"policyId\": policy_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\",\n \"if-match\": kwargs.get(\"if_match\", missing)\n }\n header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n body=update_policy_details,\n response_type=\"Policy\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n body=update_policy_details,\n response_type=\"Policy\")", "def policies(self, policies):\n\n self._policies = policies", "def update_Policy(self,inputpolicy):\n \n policyob = self.SD_Map.retrieve_ob(inputpolicy)\n policyob.values[-1] = self.PolicyDicts[inputpolicy][self.translate(self.policy_option_vars[inputpolicy].get(),\n input_language = self.language,\n output_language = 'english')]", "def update_apic(self):\n return self.client.policy.update(policyList=self.policy_list.response)", "def test_update_certificate(self):\n self.client.post(\n '/api/v1/certificates', data=json.dumps(new_certificate),\n content_type='application/json',\n headers=self.get_registrar_token())\n response = self.client.put(\n '/api/v1/certificates/1', data=json.dumps(update_certificate),\n content_type='application/json',\n headers=self.get_registrar_token())\n result = json.loads(response.data.decode())\n self.assertEqual(result['message'],\n 'Certificate updated successfully')\n assert response.status_code == 200", "def test_update_ipsecpolicy(self):\r\n resource = 'ipsecpolicy'\r\n cmd = ipsecpolicy.UpdateIPsecPolicy(test_cli20.MyApp(sys.stdout), None)\r\n self._test_update_resource(resource, cmd, 'myid',\r\n ['myid', '--name', 'newname'],\r\n {'name': 'newname', })", "def test_update_certificate_name_restrictions(self):\n self.client.post(\n '/api/v1/certificates', data=json.dumps(new_certificate),\n content_type='application/json',\n headers=self.get_registrar_token())\n response = self.client.put(\n '/api/v1/certificates/1',\n data=json.dumps(update_certificate_name_restrictions),\n content_type='application/json',\n headers=self.get_registrar_token())\n result = json.loads(response.data.decode())\n self.assertEqual(result['message'],\n 'Invalid certificate name')\n assert response.status_code == 400", "def update_authentication_policy(self, compartment_id, update_authentication_policy_details, **kwargs):\n resource_path = \"/authenticationPolicies/{compartmentId}\"\n method = \"PUT\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",\n \"if_match\"\n ]\n extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]\n if extra_kwargs:\n raise ValueError(\n \"update_authentication_policy got unknown kwargs: {!r}\".format(extra_kwargs))\n\n path_params = {\n \"compartmentId\": compartment_id\n }\n\n path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}\n\n for (k, v) in six.iteritems(path_params):\n if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):\n raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))\n\n header_params = {\n \"accept\": \"application/json\",\n \"content-type\": \"application/json\",\n \"if-match\": kwargs.get(\"if_match\", missing)\n }\n header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}\n\n retry_strategy = self.retry_strategy\n if kwargs.get('retry_strategy'):\n retry_strategy = kwargs.get('retry_strategy')\n\n if retry_strategy:\n return retry_strategy.make_retrying_call(\n self.base_client.call_api,\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n body=update_authentication_policy_details,\n response_type=\"AuthenticationPolicy\")\n else:\n return self.base_client.call_api(\n resource_path=resource_path,\n method=method,\n path_params=path_params,\n header_params=header_params,\n body=update_authentication_policy_details,\n response_type=\"AuthenticationPolicy\")", "def update_policy(ranger_url, policy_id, policy_data, admin_username_password):\n\n url = format(\"{ranger_url}/service/public/v2/api/policy/{policy_id}\")\n\n base_64_string = base64.encodestring(admin_username_password).replace('\\n', '')\n\n request = urllib2.Request(url, json.dumps(policy_data))\n request.get_method = lambda: 'PUT'\n request.add_header('Content-Type', 'application/json')\n request.add_header('Accept', 'application/json')\n request.add_header('Authorization', format('Basic {base_64_string}'))\n\n try:\n result = openurl(request, timeout=20)\n response_code = result.getcode()\n if response_code == 200:\n Logger.info(format(\"Successfully updated policy in Ranger Admin\"))\n return response_code\n else:\n Logger.error(format(\"Unable to update policy in Ranger Admin\"))\n return None\n except urllib2.HTTPError as e:\n raise Fail(\"HTTPError while updating policy Reason = \" + str(e.code))\n except urllib2.URLError as e:\n raise Fail(\"URLError while updating policy. Reason = \" + str(e.reason))\n except TimeoutError:\n raise Fail(\"Connection timeout error while updating policy\")\n except Exception as err:\n raise Fail(format(\"Error while updating policy. Reason = {err}\"))", "def update_access_policy_request(self, subscription_id: str, resource_group_name: str,\n vault_name: str, operation_kind: str, object_id: str,\n keys: list[str], secrets: list[str], certificates: list[str],\n storage: list[str]) -> dict[str, Any]:\n permissions = self.config_vault_permission(\n keys, secrets, certificates, storage)\n data = {\"properties\": {\"accessPolicies\": [\n {\"objectId\": object_id, \"permissions\": permissions, \"tenantId\": self.ms_client.tenant_id}]}}\n full_url = urljoin(self.azure_cloud.endpoints.resource_manager, f'subscriptions/{subscription_id}/resourceGroups/'\n f'{resource_group_name}/providers/Microsoft.KeyVault/vaults/'\n f'{vault_name}/accessPolicies/{operation_kind}')\n\n return self.http_request('PUT', full_url=full_url, data=data, ok_codes=[200, 201])", "def policy_update_fn(self, data: Dict[str, Any], result: Dict[str, Any]) -> None:", "def set_policy (self, policy = None, args = (), policy_cleanup = None):\n if policy == self.policy:\n # same policy; might want to change args/cleanup function, though\n self._policy_args = args\n if policy is not None and not isinstance(policy, basestring):\n self._policy_cleanup = policy_cleanup\n return\n # perform cleanup for current policy, if any\n if isinstance(self.policy, basestring):\n # built-in\n try:\n POLICY_CLEANUP[self.policy](self)\n except AttributeError:\n pass\n elif self.policy is not None and self._policy_cleanup is not None:\n # custom\n self._policy_cleanup(self)\n del self._policy_cleanup\n # set new policy\n self.policy = policy\n if policy is None:\n # if disabling scrolling, clean up some attributes we won't need\n try:\n del self._scroll_fn, self._policy_args\n except AttributeError:\n pass\n else:\n self._policy_args = args if args else ()\n if isinstance(policy, basestring):\n # built-in\n self._scroll_fn = POLICY_SCROLL[policy]\n else:\n # custom\n self._scroll_fn = policy\n self._policy_cleanup = policy_cleanup", "def rbac_policy_update(request, policy_id, **kwargs):\n body = {'rbac_policy': kwargs}\n rbac_policy = neutronclient(request).update_rbac_policy(\n policy_id, body=body).get('rbac_policy')\n return RBACPolicy(rbac_policy)", "def test_update_certificate_keys(self):\n self.client.post(\n '/api/v1/certificates', data=json.dumps(new_certificate),\n content_type='application/json',\n headers=self.get_registrar_token())\n response = self.client.put(\n '/api/v1/certificates/1', data=json.dumps(update_certificate_keys),\n content_type='application/json',\n headers=self.get_registrar_token())\n result = json.loads(response.data.decode())\n self.assertEqual(result['message'],\n 'Invalid certificate_name key')\n assert response.status_code == 400", "def ModifyCertificate(self, request):\n try:\n params = request._serialize()\n headers = request.headers\n body = self.call(\"ModifyCertificate\", params, headers=headers)\n response = json.loads(body)\n model = models.ModifyCertificateResponse()\n model._deserialize(response[\"Response\"])\n return model\n except Exception as e:\n if isinstance(e, TencentCloudSDKException):\n raise\n else:\n raise TencentCloudSDKException(type(e).__name__, str(e))", "def test_update_firewall_policy(self):\r\n resource = 'firewall_policy'\r\n cmd = firewallpolicy.UpdateFirewallPolicy(test_cli20.MyApp(sys.stdout),\r\n None)\r\n self._test_update_resource(resource, cmd, 'myid',\r\n ['myid', '--name', 'newname'],\r\n {'name': 'newname', })", "def update_s3_resources(memberAccountId=None, s3ResourcesUpdate=None):\n pass", "def edit_certificate(self, certificate):\r\n return self.ssl.editObject(certificate, id=certificate['id'])", "def _modify_schedule_policy_properties(self):\n request_json = {\n 'taskInfo':\n {\n 'taskOperation': 1,\n 'associations': self._associations,\n 'task': self._task_json,\n \"appGroup\":\n {\n \"appGroups\": self._app_groups if self._app_groups else [],\n },\n 'subTasks': self._subtasks\n }\n }\n\n flag, response = self._commcell_object._cvpysdk_object.make_request(\n 'PUT', self._MODIFY_SCHEDULE_POLICY, request_json\n )\n output = self._process_schedule_policy_update_response(flag, response)\n self.refresh()\n\n if output[0]:\n return\n\n o_str = 'Failed to update properties of Schedule Policy\\nError: \"{0}\"'\n raise SDKException('Schedules', '102', o_str.format(output[2]))", "def test_update_ikepolicy(self):\r\n resource = 'ikepolicy'\r\n cmd = ikepolicy.UpdateIKEPolicy(test_cli20.MyApp(sys.stdout), None)\r\n self._test_update_resource(resource, cmd, 'myid',\r\n ['myid', '--name', 'newname'],\r\n {'name': 'newname', })", "def Update(self,\n priority=None,\n firewall_policy=None,\n firewall_policy_rule=None,\n batch_mode=False,\n only_generate_request=False):\n\n if batch_mode:\n requests = [\n self._MakeUpdateRuleRequestTuple(\n priority=priority,\n firewall_policy=firewall_policy,\n firewall_policy_rule=firewall_policy_rule)\n ]\n if not only_generate_request:\n return self._compute_client.MakeRequests(requests)\n return requests\n\n op_res = self._service.PatchRule(\n self._MakeUpdateRuleRequestTuple(\n priority=priority,\n firewall_policy=firewall_policy,\n firewall_policy_rule=firewall_policy_rule)[2])\n return self.WaitOperation(\n op_res, message='Updating a rule in the organization firewall policy.')" ]
[ "0.6495223", "0.6122082", "0.6114352", "0.6102968", "0.59703684", "0.5800063", "0.5687958", "0.56578624", "0.5580727", "0.54616994", "0.54585326", "0.5441063", "0.5394291", "0.535095", "0.53178614", "0.52896583", "0.5280764", "0.52681", "0.52563095", "0.52250296", "0.5208998", "0.5203099", "0.5188732", "0.51757646", "0.5175695", "0.5173492", "0.51228315", "0.50419635", "0.5002615", "0.49904612" ]
0.64556706
1
Back up a certificate in a protected form useable only by Azure Key Vault. Requires certificates/backup permission. This is intended to allow copying a certificate from one vault to another. Both vaults must be owned by the same Azure subscription. Also, backup / restore cannot be performed across geopolitical boundaries. For example, a backup from a vault in a USA region cannot be restored to a vault in an EU region.
async def backup_certificate(self, certificate_name: str, **kwargs) -> bytes: backup_result = await self._client.backup_certificate( vault_base_url=self.vault_url, certificate_name=certificate_name, **kwargs ) return backup_result.value
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def restore_certificate_backup(self, backup: bytes, **kwargs) -> KeyVaultCertificate:\n bundle = await self._client.restore_certificate(\n vault_base_url=self.vault_url,\n parameters=self._models.CertificateRestoreParameters(certificate_bundle_backup=backup),\n **kwargs\n )\n return KeyVaultCertificate._from_certificate_bundle(certificate_bundle=bundle)", "def fullBackup(backupName, verify, doTheBackup = True):\n backup(backupName, full = True, verify = verify, verifyIncrementally = False, doTheBackup = doTheBackup)", "def test_backup_restore_with_credentials_env(self):\n gen = BlobGenerator(\"ent-backup\", \"ent-backup-\", self.value_size,\n end=self.num_items)\n self._load_all_buckets(self.master, gen, \"create\", 0)\n self.backup_create()\n output, error = self.backup_cluster()\n if output and not self._check_output(\"Backup completed successfully\", output):\n self.fail(\"Failed to run with password env %s \" % output)\n self.backup_cluster_validate(skip_backup=True)\n self.backup_list()\n self.backup_restore_validate()", "def backup(backupName, full, verify, verifyIncrementally = False, doTheBackup = True):\n testRestoreDir = localenv.backups.testRestoreDir\n backupDetails = localenv.backups.backups[backupName]\n backupMap = getBackupMap(backupName)\n BackupOperations.doBackup (backupDetails.source, backupMap, testRestoreDir, full = full, \n verify = verify, verifyIncrementally = verifyIncrementally, \n doTheBackup = doTheBackup, \n recordTrigger = localenv.backups.recordTrigger)", "def verify_election_partial_key_backup(\n verifier_id: GUARDIAN_ID,\n backup: ElectionPartialKeyBackup,\n auxiliary_key_pair: AuxiliaryKeyPair,\n decrypt: AuxiliaryDecrypt = rsa_decrypt,\n) -> ElectionPartialKeyVerification:\n\n decrypted_value = decrypt(backup.encrypted_value, auxiliary_key_pair.secret_key)\n if decrypted_value is None:\n return ElectionPartialKeyVerification(\n backup.owner_id, backup.designated_id, verifier_id, False\n )\n value = get_optional(hex_to_q(decrypted_value))\n return ElectionPartialKeyVerification(\n backup.owner_id,\n backup.designated_id,\n verifier_id,\n verify_polynomial_coordinate(\n value, backup.designated_sequence_order, backup.coefficient_commitments\n ),\n )", "def backupwallet(self, destination):\n return self.proxy.backupwallet(destination)", "def create_or_update_key_vault_request(self, subscription_id: str, resource_group_name: str,\n vault_name: str, object_id: str, location: str,\n sku_name: str,\n keys_permissions: list[str], secrets_permissions: list[str],\n certificates_permissions: list[str], storage_accounts: list[str],\n enabled_for_deployment: bool,\n enabled_for_disk_encryption: bool,\n enabled_for_template_deployment: bool,\n default_action: str, bypass: str, vnet_subnet_id: str,\n ignore_missing_vnet_service_endpoint: bool,\n ip_rules: list[str]) -> dict[str, Any]:\n # permissions property\n\n permissions = self.config_vault_permission(\n keys_permissions, secrets_permissions, certificates_permissions, storage_accounts)\n\n # network property\n network_acl = self.config_vault_network_acls(default_action, bypass, vnet_subnet_id,\n ignore_missing_vnet_service_endpoint, ip_rules)\n # private end point connection property\n\n properties = self.config_vault_properties(object_id, self.ms_client.tenant_id, enabled_for_deployment,\n enabled_for_disk_encryption,\n enabled_for_template_deployment, sku_name, permissions, network_acl)\n\n data = {\"location\": location, \"properties\": properties}\n\n full_url = urljoin(self.azure_cloud.endpoints.resource_manager, f'subscriptions/{subscription_id}/resourceGroups/'\n f'{resource_group_name}/providers/Microsoft.KeyVault/vaults/{vault_name}')\n\n return self.http_request('PUT', full_url=full_url, data=data, ok_codes=[200, 201])", "def EnsureBackup(temp_file, save_file):\n temp_file = temp_file.lower()\n save_file = save_file.lower()\n bakFile = save_file + '.bak'\n sameFileData = filecmp.cmp(temp_file, save_file, shallow=0)\n if not sameFileData:\n shutil.copyfile(save_file, bakFile)\n shutil.copyfile(temp_file, save_file)\n os.remove(temp_file)", "def test_restore_from_compacted_backup(self):\n gen = BlobGenerator(\"ent-backup\", \"ent-backup-\", self.value_size, end=self.num_items)\n self._load_all_buckets(self.master, gen, \"create\", 0)\n self.backup_create()\n self.backup_cluster_validate()\n self.backup_compact()\n self.backup_restore_validate()", "def create_encrypted_backup(self, source, destination):\n # Wipe an existing backup (if any).\n if os.path.isdir(destination):\n execute('rm', '--recursive', destination, sudo=True)\n # Create a new backup.\n exit_code, output = run_cli(\n '--crypto=%s' % CRYPTO_NAME,\n '--mount=%s' % MOUNT_POINT,\n '--disable-notifications',\n # We skip snapshot creation and rotation to minimize the number\n # of commands required in /etc/sudoers.d/rsync-system-backup.\n '--backup',\n source, destination,\n )\n assert exit_code == 0", "def backup(zoneFile, reverseZoneFile):\n # Backup the forward zone file if it exists\n if zoneFile is not None:\n with open(zoneFile, \"r\") as fd:\n zoneBackup = fd.readlines()\n\n with open(\"./zoneFile.bak\", \"w\") as fd:\n fd.writelines(zoneBackup)\n\n # Backup the reverse zone file if it exists\n if reverseZoneFile is not None:\n with open(reverseZoneFile, \"r\") as fd:\n reverseZoneBackup = fd.readlines()\n\n with open(\"./reverseFile.bak\", \"w\") as fd:\n fd.writelines(reverseZoneBackup)", "def create_or_update_key_vault_command(client: KeyVaultClient, args: dict[str, Any],\n params: dict[str, Any]) -> CommandResults:\n vault_name = args['vault_name']\n object_id = args['object_id']\n\n location = args.get('location', 'westus')\n sku_name = args.get('sku_name', 'standard')\n\n # access policy arguments\n keys_permissions = argToList(args.get('keys', ['get', 'list', 'update', 'create', 'import',\n 'delete', 'recover', 'backup', 'restore']))\n\n secrets_permissions = argToList(args.get('secrets', ['get', 'list', 'set', 'delete', 'recover',\n 'backup', 'restore']))\n certificates_permissions = argToList(\n args.get('certificates', ['get', 'list', 'update', 'create', 'import', 'delete', 'recover',\n 'backup', 'restore',\n 'managecontacts', 'manageissuers', 'getissuers', 'listissuers',\n 'setissuers', 'deleteissuers']))\n\n storage_accounts_permissions = argToList(\n args.get('storage', ['get', 'list', 'delete', 'set',\n 'update', 'regeneratekey',\n 'getsas', 'listsas']))\n\n enabled_for_deployment = argToBoolean(\n args.get('enabled_for_deployment', True))\n enabled_for_disk_encryption = argToBoolean(\n args.get('enabled_for_disk_encryption', True))\n enabled_for_template_deployment = argToBoolean(args.get(\n 'enabled_for_template_deployment', True))\n\n # network acl arguments\n default_action = args.get('default_action', '')\n bypass = args.get('bypass', '')\n vnet_subnet_id = args.get('vnet_subnet_id', '')\n ignore_missing_vnet_service_endpoint = argToBoolean(\n args.get('ignore_missing_vnet_service_endpoint', True))\n ip_rules = argToList(args.get('ip_rules'))\n # subscription_id and resource_group_name arguments can be passed as command arguments or as configuration parameters,\n # if both are passed as arguments, the command arguments will be used.\n subscription_id = get_from_args_or_params(params=params, args=args, key='subscription_id')\n resource_group_name = get_from_args_or_params(params=params, args=args, key='resource_group_name')\n\n response = client.create_or_update_key_vault_request(subscription_id, resource_group_name,\n vault_name, object_id, location, sku_name, keys_permissions,\n secrets_permissions, certificates_permissions,\n storage_accounts_permissions, enabled_for_deployment,\n enabled_for_disk_encryption, enabled_for_template_deployment,\n default_action, bypass, vnet_subnet_id,\n ignore_missing_vnet_service_endpoint, ip_rules)\n\n readable_output = tableToMarkdown(f'{vault_name} Information',\n response,\n ['id', 'name', 'type', 'location'], removeNull=True,\n headerTransform=string_to_table_header)\n\n return CommandResults(\n outputs_prefix='AzureKeyVault.KeyVault',\n outputs_key_field='id',\n outputs=response,\n raw_response=response,\n readable_output=readable_output,\n ignore_auto_extract=True\n )", "def test_backup_restore_with_optional_flags(self):\n self.log.info(\"Load 1st batch docs\")\n create_gen1 = BlobGenerator(\"ent-backup\", \"ent-backup-\", self.value_size,\n end=self.num_items)\n self._load_all_buckets(self.master, create_gen1, \"create\", 0)\n self.backup_create()\n verify_data = True\n output, error = self.backup_cluster()\n if self.backupset.secure_conn:\n if self.backupset.bk_no_cert:\n if self._check_output(\"Backup completed successfully\", output):\n self.fail(\"Taking cluster backup failed.\")\n elif self._check_output(\"Error\", output):\n verify_data = False\n else:\n if not self._check_output(\"Backup completed successfully\", output):\n self.fail(\"Taking cluster backup failed.\")\n\n if verify_data:\n self.validate_backup_data(self.backupset.backup_host,\n self.servers[:self.nodes_init],\n \"ent-backup\", False, False, \"memory\",\n self.num_items, None)\n if self.do_restore:\n self.log.info(\"Restore with secure connection\")\n self.backup_restore()", "def test_backup_compact(self):\n gen = BlobGenerator(\"ent-backup\", \"ent-backup-\", self.value_size, end=self.num_items)\n self._load_all_buckets(self.master, gen, \"create\", 0)\n self.backup_create()\n self.backup_cluster_validate()\n self.backup_compact_validate()", "def backup(isamAppliance, check_mode=False, force=False):\n if check_mode is True:\n return isamAppliance.create_return_object(changed=True)\n else:\n return isamAppliance.invoke_put(\"Creating a backup of the active partition\",\n \"/firmware_settings/kickoff_backup\", {}, requires_model=requires_model)", "def backup_cloudformation_temlates(self, template, region='eu-west-2'):\n regions = {\n 'eu-west-1': 'euw1',\n 'eu-west-2': 'euw2'\n }\n backup_bucket = 'cft-bucket-{}'.format(regions[region])\n bucket_name = self.get_bucket_from_cloudformation_template(template)\n key = '{}/{}.json'.format(region, bucket_name)\n\n if template != {}:\n response = self.client.put_object(\n Body=json.dumps(template),\n Bucket=backup_bucket,\n Key=key\n )\n return response", "def _unlock_singlepass_encrypted_disk_fallback(source_vm, resource_group_name, repair_vm_name, repair_group_name, copy_disk_name, is_linux):\n\n # Installs the extension on repair VM and mounts the disk after unlocking.\n encryption_type, key_vault, kekurl, secreturl = _fetch_encryption_settings(source_vm)\n if is_linux:\n volume_type = 'DATA'\n else:\n volume_type = 'ALL'\n\n try:\n if encryption_type is Encryption.SINGLE_WITH_KEK:\n install_ade_extension_command = 'az vm encryption enable --disk-encryption-keyvault {vault} --name {repair} --resource-group {g} --key-encryption-key {kek_url} --volume-type {volume}' \\\n .format(g=repair_group_name, repair=repair_vm_name, vault=key_vault, kek_url=kekurl, volume=volume_type)\n elif encryption_type is Encryption.SINGLE_WITHOUT_KEK:\n install_ade_extension_command = 'az vm encryption enable --disk-encryption-keyvault {vault} --name {repair} --resource-group {g} --volume-type {volume}' \\\n .format(g=repair_group_name, repair=repair_vm_name, vault=key_vault, volume=volume_type)\n # Add format-all flag for linux vms\n if is_linux:\n install_ade_extension_command += \" --encrypt-format-all\"\n logger.info('Unlocking attached copied disk...')\n _call_az_command(install_ade_extension_command)\n # Linux VM encryption extension has a bug and we need to manually unlock and mount its disk\n if is_linux:\n # Validating secret tag and setting original tag if it got changed\n _secret_tag_check(resource_group_name, copy_disk_name, secreturl)\n logger.debug(\"Manually unlocking and mounting disk for Linux VMs.\")\n _unlock_mount_linux_encrypted_disk(repair_vm_name, repair_group_name)\n except AzCommandError as azCommandError:\n error_message = str(azCommandError)\n # Linux VM encryption extension bug where it fails and then continue to mount disk manually\n if is_linux and \"Failed to encrypt data volumes with error\" in error_message:\n logger.debug(\"Expected bug for linux VMs. Ignoring error.\")\n # Validating secret tag and setting original tag if it got changed\n _secret_tag_check(resource_group_name, copy_disk_name, secreturl)\n _unlock_mount_linux_encrypted_disk(repair_vm_name, repair_group_name)\n else:\n raise", "def _acme_revoke(self, cert):\n # XXX | pylint: disable=unused-variable\n\n # pylint: disable=protected-access\n certificate = jose_util.ComparableX509(cert._cert)\n try:\n with open(cert.backup_key_path, \"rU\") as backup_key_file:\n key = OpenSSL.crypto.load_privatekey(\n OpenSSL.crypto.FILETYPE_PEM, backup_key_file.read())\n # If the key file doesn't exist... or is corrupted\n except OpenSSL.crypto.Error as error:\n logger.debug(error, exc_info=True)\n raise errors.RevokerError(\n \"Corrupted backup key file: %s\" % cert.backup_key_path)\n\n return self.acme.revoke(cert=None) # XXX", "def test_backup_only(self):\n # Check that by default a backup is performed and a snapshot is created.\n with TemporaryDirectory() as temporary_directory:\n source = os.path.join(temporary_directory, 'source')\n destination = os.path.join(temporary_directory, 'destination')\n latest_directory = os.path.join(destination, 'latest')\n # Create a source for testing.\n self.create_source(source)\n # Run the program through the command line interface.\n exit_code, output = run_cli(\n '--backup', '--no-sudo',\n '--disable-notifications',\n source, latest_directory,\n )\n assert exit_code == 0\n # Make sure the backup was created.\n self.verify_destination(latest_directory)\n # Make sure no snapshot was created.\n assert len(find_snapshots(destination)) == 0", "def test_csi_volumesnapshot_restore_existing_backup(set_random_backupstore, # NOQA\n client, # NOQA\n core_api, # NOQA\n volume_name, # NOQA\n csi_pv, # NOQA\n pvc, # NOQA\n pod_make, # NOQA\n volumesnapshotclass, # NOQA\n volumesnapshotcontent,\n volumesnapshot, # NOQA\n volsnapshotclass_delete_policy, # NOQA\n backup_is_deleted): # NOQA\n csisnapclass = \\\n volumesnapshotclass(name=\"snapshotclass\",\n deletepolicy=volsnapshotclass_delete_policy)\n\n pod_name, pv_name, pvc_name, md5sum = \\\n prepare_pod_with_data_in_mb(client, core_api,\n csi_pv, pvc, pod_make,\n volume_name,\n data_path=\"/data/test\")\n\n volume = client.by_id_volume(volume_name)\n snap = create_snapshot(client, volume_name)\n volume.snapshotBackup(name=snap.name)\n wait_for_backup_completion(client, volume_name, snap.name)\n bv, b = find_backup(client, volume_name, snap.name)\n\n csivolsnap_name = volume_name + \"-volumesnapshot\"\n csivolsnap_namespace = \"default\"\n\n volsnapcontent = \\\n volumesnapshotcontent(\"volsnapcontent\",\n csisnapclass[\"metadata\"][\"name\"],\n \"Delete\",\n \"bs://\" + volume_name + \"/\" + b.name,\n csivolsnap_name,\n csivolsnap_namespace)\n\n csivolsnap = volumesnapshot(csivolsnap_name,\n csivolsnap_namespace,\n csisnapclass[\"metadata\"][\"name\"],\n \"volumeSnapshotContentName\",\n volsnapcontent[\"metadata\"][\"name\"])\n\n restore_pvc_name = pvc[\"metadata\"][\"name\"] + \"-restore\"\n restore_pvc_size = pvc[\"spec\"][\"resources\"][\"requests\"][\"storage\"]\n\n restore_csi_volume_snapshot(core_api,\n client,\n csivolsnap,\n restore_pvc_name,\n restore_pvc_size)\n\n restore_pod = pod_make()\n restore_pod_name = restore_pod[\"metadata\"][\"name\"]\n restore_pod['spec']['volumes'] = [create_pvc_spec(restore_pvc_name)]\n\n create_and_wait_pod(core_api, restore_pod)\n restore_md5sum = \\\n get_pod_data_md5sum(core_api, restore_pod_name, path=\"/data/test\")\n\n assert restore_md5sum == md5sum\n\n # Delete volumeSnapshot test\n delete_volumesnapshot(csivolsnap[\"metadata\"][\"name\"], \"default\")\n\n if backup_is_deleted is False:\n find_backup(client, volume_name, b[\"snapshotName\"])\n else:\n wait_for_backup_delete(client, volume_name, b[\"name\"])", "def makeBackup(self):\n #--File Path\n original = self.path\n #--Backup\n backup = self.path+'.bak'\n shutil.copy(original,backup)\n #--First backup\n firstBackup = self.path+'.baf'\n if not os.path.exists(firstBackup):\n shutil.copy(original,firstBackup)", "def _copy_files(cls, backup_dir, idx, cert_path, key_path):\n shutil.copy2(cert_path, cls._get_backup(backup_dir, idx, cert_path))\n shutil.copy2(key_path, cls._get_backup(backup_dir, idx, key_path))", "def backup():\n backup_shift(os, config.utils.tasks.backup_depth)\n if config.utils.tasks.secret_key is None:\n shutil.copyfile(config.core.database_name, config.core.database_name+'.1')\n else:\n data = get_encrypted_database()\n with open(config.core.database_name+'.1', 'wb') as f:\n f.write(data)", "def test_backup_restore_with_deletes(self):\n gen = BlobGenerator(\"ent-backup\", \"ent-backup-\", self.value_size, end=self.num_items)\n self._load_all_buckets(self.master, gen, \"create\", 0)\n self.backup_create()\n self.backup_cluster_validate()\n self._load_all_buckets(self.master, gen, \"delete\", 0)\n self.backup_restore_validate(compare_uuid=False, seqno_compare_function=\"<=\")", "def test_restore_with_rbac(self):\n all_buckets = self.input.param(\"all_buckets\", False)\n self.log.info(\"Copy backup dataset to tmp dir\")\n shell = RemoteMachineShellConnection(self.backupset.backup_host)\n\n # Since we are just wiping out the archive here, we can just run the object store teardown\n if self.objstore_provider:\n self.objstore_provider.teardown(shell.extract_remote_info().type.lower(), shell)\n else:\n shell.execute_command(\"rm -rf {0} \".format(self.backupset.directory))\n shell.execute_command(\"rm -rf {0} \".format(self.backupset.directory.split(\"_\")[0]))\n\n backup_file = ENT_BKRS\n backup_dir_found = False\n backup_dir = \"entbackup_{0}\".format(self.master.ip)\n output, error = shell.execute_command(\"ls | grep entbackup\")\n self.log.info(\"check if %s dir exists on this server \" % backup_dir)\n if output:\n for x in output:\n if x == backup_dir:\n backup_dir_found = True\n if not backup_dir_found:\n self.log.info(\"%s dir does not exist on this server. Downloading.. \"\n % backup_dir)\n shell.execute_command(\"{0} -q {1} --no-check-certificate -O {2}.tgz \"\n .format(self.wget, backup_file, backup_dir))\n shell.execute_command(\"tar -zxvf {0}.tgz \".format(backup_dir))\n shell.execute_command(\"mv {0} {1}\".format(backup_dir.split(\"_\")[0], backup_dir))\n if \"-\" in self.cluster_new_role:\n self.cluster_new_role = self.cluster_new_role.replace(\"-\", \",\")\n if self.objstore_provider and self.objstore_provider.schema_prefix() == \"s3://\":\n command = \"\"\n if self.backupset.objstore_region or self.backupset.objstore_access_key_id or self.backupset.objstore_secret_access_key:\n command += \"env\"\n if self.backupset.objstore_region:\n command += f\" AWS_REGION={self.backupset.objstore_region}\"\n if self.backupset.objstore_access_key_id:\n command += f\" AWS_ACCESS_KEY_ID={self.backupset.objstore_access_key_id}\"\n if self.backupset.objstore_secret_access_key:\n command += f\" AWS_SECRET_ACCESS_KEY={self.backupset.objstore_secret_access_key}\"\n\n command += \" aws\"\n\n if self.backupset.objstore_endpoint:\n command += f\" --endpoint={self.backupset.objstore_endpoint}\"\n\n command += f\" s3 sync entbackup_{self.master.ip} s3://{self.backupset.objstore_bucket}/{self.backupset.directory}\"\n\n _, error = shell.execute_command(command, debug=False) # Contains senstive info so don't log\n if error:\n self.fail(f\"Failed to sync backup to S3: {error}\")\n else:\n shell.execute_command(\"cp -r entbackup_{0}/ {1}/entbackup_{0}\"\\\n .format(self.master.ip, self.tmp_path))\n status, _, message = self.backup_list()\n if not status:\n self.fail(message)\n\n self.log.info(\"Restore data from backup files\")\n\n if all_buckets:\n if \"bucket_full_access\" in self.cluster_new_role and \\\n \"bucket_full_access[*]\" not in self.cluster_new_role:\n self.cluster_new_role = self.cluster_new_role.replace(\"bucket_full_access\",\n \"bucket_full_access[*]\")\n else:\n self.cluster_new_role = self.cluster_new_role + \"[*]\"\n if \"data_backup\" in self.cluster_new_role and \\\n \"data_backup[*]\" not in self.cluster_new_role:\n self.cluster_new_role = self.cluster_new_role.replace(\"data_backup\",\n \"data_backup[*]\")\n if \"fts_admin\" in self.cluster_new_role and \\\n \"fts_admin[*]\" not in self.cluster_new_role:\n self.cluster_new_role = self.cluster_new_role.replace(\"fts_admin\",\n \"fts_admin[*]\")\n admin_roles = [\"cluster_admin\", \"eventing_admin\"]\n for role in admin_roles:\n if role in self.cluster_new_role:\n self.cluster_new_role = self.cluster_new_role.replace(role + \"[*]\", role)\n\n self.log.info(\"\\n***** Create new user: %s with role: %s to do backup *****\"\n % (self.cluster_new_user, self.cluster_new_role))\n testuser = [{\"id\": \"%s\" % self.cluster_new_user,\n \"name\": \"%s\" % self.cluster_new_user,\n \"password\": \"password\"}]\n rolelist = [{\"id\": \"%s\" % self.cluster_new_user,\n \"name\": \"%s\" % self.cluster_new_user,\n \"roles\": \"%s\" % self.cluster_new_role}]\n try:\n status = self.add_built_in_server_user(testuser, rolelist)\n if not status:\n self.fail(\"Fail to add user: %s with role: %s \" \\\n % (self.cluster_new_user,\n self.cluster_new_role))\n\n users_can_restore_all = [\"admin\", \"bucket_full_access[*]\",\n \"data_backup[*]\", \"eventing_admin\"]\n users_can_not_restore_all = [\"views_admin[*]\", \"ro_admin\",\n \"replication_admin\", \"data_monitoring[*]\",\n \"data_writer[*]\", \"data_reader[*]\",\n \"data_dcp_reader[*]\", \"fts_searcher[*]\",\n \"fts_admin[*]\", \"query_manage_index[*]\",\n \"replication_target[*]\", \"cluster_admin\",\n \"bucket_admin[*]\"]\n if self.cluster_new_role in users_can_not_restore_all:\n self.should_fail = True\n output, error = self.backup_restore()\n rest_rs = RestConnection(self.backupset.restore_cluster_host)\n eventing_service_in = False\n rs_cluster_services = list(rest_rs.get_nodes_services().values())\n for srv in rs_cluster_services:\n if \"eventing\" in srv:\n eventing_service_in = True\n eventing_err = \"User needs one of the following permissions: cluster.eventing\"\n if eventing_service_in and self._check_output(eventing_err, output) and \\\n (\"admin\" not in self.cluster_new_role or \\\n \"eventing_admin\" not in self.cluster_new_role):\n self.log.info(\"Only admin role could backup eventing service\")\n return\n success_msg = 'Restore completed successfully'\n fail_msg = \"Error restoring cluster:\"\n\n failed_persisted_bucket = []\n ready = RebalanceHelper.wait_for_stats_on_all(self.backupset.cluster_host,\n \"default\", 'ep_queue_size',\n 0, timeout_in_seconds=120)\n if not ready:\n failed_persisted_bucket.append(\"default\")\n if failed_persisted_bucket:\n self.fail(\"Buckets %s did not persisted.\" % failed_persisted_bucket)\n\n self.sleep(3)\n rest = RestConnection(self.master)\n actual_keys = rest.get_active_key_count(\"default\")\n print((\"\\nActual keys in default bucket: %s \\n\" % actual_keys))\n if self.cluster_new_role in users_can_restore_all:\n if not self._check_output(success_msg, output):\n self.fail(\"User with roles: %s failed to restore data.\\n\"\n \"Here is the output %s \" % \\\n (self.cluster_new_role, output))\n\n roles = []\n if \",\" in self.cluster_new_role:\n roles = self.cluster_new_role.split(\",\")\n if set(roles) & set(users_can_not_restore_all) and \\\n set(roles) & set(users_can_restore_all):\n if not self._check_output(success_msg, output):\n self.fail(\"User: %s failed to restore data with roles: %s. \" \\\n \"Here is the output %s \" % \\\n (self.cluster_new_user, roles, output))\n if int(actual_keys) != 10000:\n self.fail(\"User: %s failed to restore data with roles: %s. \" \\\n \"Here is the actual docs in bucket %s \" % \\\n (self.cluster_new_user, roles, actual_keys))\n elif self.cluster_new_role in users_can_not_restore_all:\n if int(actual_keys) == 1000:\n self.fail(\"User: %s with role: %s should not allow to restore data\" \\\n % (self.cluster_new_user,\n self.cluster_new_role))\n if not self._check_output(fail_msg, output):\n self.fail(\"cbbackupmgr failed to block user to restore\")\n finally:\n self.log.info(\"Delete new create user: %s \" % self.cluster_new_user)\n shell = RemoteMachineShellConnection(self.backupset.backup_host)\n curl_path = \"\"\n if self.os_name == \"windows\":\n curl_path = self.cli_command_location\n cmd = \"%scurl%s -g -X %s -u %s:%s http://%s:8091/settings/rbac/users/local/%s\" \\\n % (curl_path,\n self.cmd_ext,\n \"DELETE\",\n self.master.rest_username,\n self.master.rest_password,\n self.backupset.cluster_host.ip,\n self.cluster_new_user)\n output, error = shell.execute_command(cmd)\n shell.disconnect()", "def createBackup(self, filename):\n if (not os.path.isfile(filename + '.bak')) and os.path.isfile(filename):\n with open(filename + '.bak', 'wb') as bakf:\n with open(filename, 'rb') as oldf:\n bakf.write(oldf.read())\n print(filename + \" backed up\")", "def generate_election_partial_key_backup(\n owner_id: GUARDIAN_ID,\n polynomial: ElectionPolynomial,\n auxiliary_public_key: AuxiliaryPublicKey,\n encrypt: AuxiliaryEncrypt = rsa_encrypt,\n) -> Optional[ElectionPartialKeyBackup]:\n value = compute_polynomial_coordinate(\n auxiliary_public_key.sequence_order, polynomial\n )\n encrypted_value = encrypt(value.to_hex(), auxiliary_public_key.key)\n if encrypted_value is None:\n return None\n return ElectionPartialKeyBackup(\n owner_id,\n auxiliary_public_key.owner_id,\n auxiliary_public_key.sequence_order,\n encrypted_value,\n polynomial.coefficient_commitments,\n polynomial.coefficient_proofs,\n )", "def create_volume_backup(self, volume, name=None, description=None,\n container=None, check=True):\n cmd = 'cinder backup-create'\n if name:\n cmd += ' --name ' + name\n if description is not None:\n cmd += ' --description ' + moves.shlex_quote(description)\n if container:\n cmd += ' --container ' + container\n\n cmd += ' ' + volume.id\n\n exit_code, stdout, stderr = self.execute_command(\n cmd, timeout=config.BACKUP_AVAILABLE_TIMEOUT, check=check)\n\n backup_table = output_parser.table(stdout)\n backup = {key: value for key, value in backup_table['values']}\n\n return backup", "def create_certificate(self, subscription_id, management_host, hackathon_name):\n\n # make sure certificate dir exists\n if not os.path.isdir(self.CERT_BASE):\n self.log.debug('certificate dir not exists')\n os.mkdir(self.CERT_BASE)\n\n base_url = '%s/%s' % (self.CERT_BASE, subscription_id)\n\n pem_url = base_url + '.pem'\n # avoid duplicate pem generation\n if not os.path.isfile(pem_url):\n pem_command = 'openssl req -x509 -nodes -days 365 -newkey rsa:1024 -keyout %s -out %s -batch' % \\\n (pem_url, pem_url)\n commands.getstatusoutput(pem_command)\n else:\n self.log.debug('%s exists' % pem_url)\n\n cert_url = base_url + '.cer'\n # avoid duplicate cert generation\n if not os.path.isfile(cert_url):\n cert_command = 'openssl x509 -inform pem -in %s -outform der -out %s' % (pem_url, cert_url)\n commands.getstatusoutput(cert_command)\n else:\n self.log.debug('%s exists' % cert_url)\n\n azure_key = self.db.find_first_object_by(AzureKey,\n cert_url=cert_url,\n pem_url=pem_url,\n subscription_id=subscription_id,\n management_host=management_host)\n # avoid duplicate azure key\n if azure_key is None:\n azure_key = self.db.add_object_kwargs(AzureKey,\n cert_url=cert_url,\n pem_url=pem_url,\n subscription_id=subscription_id,\n management_host=management_host)\n self.db.commit()\n else:\n self.log.debug('azure key exists')\n\n hackathon_id = self.db.find_first_object_by(Hackathon, name=hackathon_name).id\n hackathon_azure_key = self.db.find_first_object_by(HackathonAzureKey,\n hackathon_id=hackathon_id,\n azure_key_id=azure_key.id)\n # avoid duplicate hackathon azure key\n if hackathon_azure_key is None:\n self.db.add_object_kwargs(HackathonAzureKey,\n hackathon_id=hackathon_id,\n azure_key_id=azure_key.id)\n self.db.commit()\n else:\n self.log.debug('hackathon azure key exists')\n\n azure_cert_url = self.file_service.upload_file_to_azure_from_path(cert_url, self.CONTAINER_NAME,\n subscription_id + '.cer')\n azure_key.cert_url = azure_cert_url\n self.db.commit()\n return azure_cert_url", "def backup(self, backup, volume_file, backup_metadata=True):\n if self.chunk_size_bytes % self.sha_block_size_bytes:\n err = _('Chunk size is not multiple of '\n 'block size for creating hash.')\n raise exception.InvalidBackup(reason=err)\n\n # Read the shafile of the parent backup if backup['parent_id']\n # is given.\n parent_backup_shafile = None\n parent_backup = None\n if backup['parent_id']:\n parent_backup = self.db.backup_get(self.context,\n backup['parent_id'])\n parent_backup_shafile = self._read_sha256file(parent_backup)\n parent_backup_shalist = parent_backup_shafile['sha256s']\n if (parent_backup_shafile['chunk_size'] !=\n self.sha_block_size_bytes):\n err = (_('Hash block size has changed since the last '\n 'backup. New hash block size: %(new)s. Old hash '\n 'block size: %(old)s. Do a full backup.')\n % {'old': parent_backup_shafile['chunk_size'],\n 'new': self.sha_block_size_bytes})\n raise exception.InvalidBackup(reason=err)\n # If the volume size increased since the last backup, fail\n # the incremental backup and ask user to do a full backup.\n if backup['size'] > parent_backup['size']:\n err = _('Volume size increased since the last '\n 'backup. Do a full backup.')\n raise exception.InvalidBackup(reason=err)\n\n (object_meta, object_sha256, extra_metadata, container,\n volume_size_bytes) = self._prepare_backup(backup)\n\n counter = 0\n total_block_sent_num = 0\n\n # There are two mechanisms to send the progress notification.\n # 1. The notifications are periodically sent in a certain interval.\n # 2. The notifications are sent after a certain number of chunks.\n # Both of them are working simultaneously during the volume backup,\n # when swift is taken as the backup backend.\n def _notify_progress():\n self._send_progress_notification(self.context, backup,\n object_meta,\n total_block_sent_num,\n volume_size_bytes)\n timer = loopingcall.FixedIntervalLoopingCall(\n _notify_progress)\n if self.enable_progress_timer:\n timer.start(interval=self.backup_timer_interval)\n\n sha256_list = object_sha256['sha256s']\n shaindex = 0\n length_bytes = 0\n write_length_bytes = 0\n index = 1\n while True:\n data_offset = volume_file.tell()\n data = volume_file.read(self.chunk_size_bytes)\n if data == '':\n break\n\n # Calculate new shas with the datablock.\n shalist = []\n off = 0\n datalen = len(data)\n while off < datalen:\n chunk_start = off\n chunk_end = chunk_start + self.sha_block_size_bytes\n if chunk_end > datalen:\n chunk_end = datalen\n chunk = data[chunk_start:chunk_end]\n sha = hashlib.sha256(chunk).hexdigest()\n shalist.append(sha)\n off += self.sha_block_size_bytes\n sha256_list.extend(shalist)\n\n # If parent_backup is not None, that means an incremental\n # backup will be performed.\n if parent_backup:\n # Find the extent that needs to be backed up.\n extent_off = -1\n for idx, sha in enumerate(shalist):\n if sha != parent_backup_shalist[shaindex]:\n if extent_off == -1:\n # Start of new extent.\n extent_off = idx * self.sha_block_size_bytes\n else:\n if extent_off != -1:\n # We've reached the end of extent.\n extent_end = idx * self.sha_block_size_bytes\n segment = data[extent_off:extent_end]\n backup_bytes = self._backup_chunk(\n backup, container, segment,\n data_offset + extent_off,\n object_meta, extra_metadata)\n length_bytes += len(segment)\n write_length_bytes += backup_bytes\n extent_off = -1\n shaindex += 1\n\n # The last extent extends to the end of data buffer.\n if extent_off != -1:\n extent_end = datalen\n segment = data[extent_off:extent_end]\n backup_bytes = self._backup_chunk(\n backup, container, segment,\n data_offset + extent_off,\n object_meta, extra_metadata)\n length_bytes += len(segment)\n write_length_bytes += backup_bytes\n extent_off = -1\n else: # Do a full backup.\n backup_bytes = self._backup_chunk(\n backup, container, data, data_offset,\n object_meta, extra_metadata)\n length_bytes += len(data)\n write_length_bytes += backup_bytes\n\n # Notifications\n total_block_sent_num += self.data_block_num\n counter += 1\n if counter == self.data_block_num:\n # Send the notification to Ceilometer when the chunk\n # number reaches the data_block_num. The backup percentage\n # is put in the metadata as the extra information.\n self._send_progress_notification(self.context, backup,\n object_meta,\n total_block_sent_num,\n volume_size_bytes)\n # Reset the counter\n counter = 0\n\n LOG.debug(('Backup volume, '\n 'backup id: %(bk_id)s, volume id: %(vol_id)s, '\n 'chunk index: %(index)s, '\n 'total write size before '\n 'compression: %(length_bytes)s bytes, '\n 'total write size actually: %(w_length)s bytes.'),\n {'bk_id': backup['id'],\n 'vol_id': backup['volume_id'],\n 'index': index,\n 'length_bytes': length_bytes,\n 'w_length': write_length_bytes})\n index += 1\n\n # Stop the timer.\n timer.stop()\n # All the data have been sent, the backup_percent reaches 100.\n self._send_progress_end(self.context, backup, object_meta)\n\n object_sha256['sha256s'] = sha256_list\n if backup_metadata:\n try:\n self._backup_metadata(backup, object_meta)\n # Whatever goes wrong, we want to log, cleanup, and re-raise.\n except Exception as err:\n with excutils.save_and_reraise_exception():\n LOG.exception(_LE(\"Backup volume metadata failed: %s.\"),\n err)\n self.delete(backup)\n\n length_mb = length_bytes / units.Mi\n write_length_mb = (write_length_bytes + units.Mi - 1) / units.Mi\n\n self._finalize_backup(backup, container, object_meta, object_sha256)\n LOG.info(_LI('Backup volume, '\n 'backup id:%(bk_id)s, volume id:%(vol_id)s, '\n 'write size before compression: %(length)s MB, '\n 'write size actually: %(w_length)s MB.'),\n {'bk_id': backup['id'],\n 'vol_id': backup['volume_id'],\n 'length': length_mb,\n 'w_length': write_length_mb})\n return {'size': write_length_mb, 'container': container}" ]
[ "0.61585116", "0.5432173", "0.5318716", "0.5244657", "0.5137772", "0.50160456", "0.49782538", "0.4914477", "0.48900342", "0.4872436", "0.48656285", "0.48302194", "0.48298165", "0.48226115", "0.48034236", "0.4759593", "0.47491443", "0.4740311", "0.47066763", "0.46644107", "0.46530557", "0.46048656", "0.45764866", "0.45744735", "0.45737317", "0.4571531", "0.45652264", "0.45545322", "0.4543664", "0.45337513" ]
0.6646567
0
Restore a certificate backup to the vault. Requires certificates/restore permission. This restores all versions of the certificate, with its name, attributes, and access control policies. If the certificate's name is already in use, restoring it will fail. Also, the target vault must be owned by the same Microsoft Azure subscription as the source vault.
async def restore_certificate_backup(self, backup: bytes, **kwargs) -> KeyVaultCertificate: bundle = await self._client.restore_certificate( vault_base_url=self.vault_url, parameters=self._models.CertificateRestoreParameters(certificate_bundle_backup=backup), **kwargs ) return KeyVaultCertificate._from_certificate_bundle(certificate_bundle=bundle)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def backup_certificate(self, certificate_name: str, **kwargs) -> bytes:\n backup_result = await self._client.backup_certificate(\n vault_base_url=self.vault_url, certificate_name=certificate_name, **kwargs\n )\n return backup_result.value", "def restore_backup(self, backup, name, flavor, volume):\n return self._manager.restore_backup(backup, name, flavor, volume)", "def restore_backup(self, backup_id, volume_id=None):\n aname = \"cinder_v%s.restore_backup\" % self.version\n with atomic.ActionTimer(self, aname):\n restore = self._get_client().restores.restore(backup_id, volume_id)\n restored_volume = self._get_client().volumes.get(restore.volume_id)\n return self._wait_available_volume(restored_volume)", "def full_vm_restore_out_of_place(self,\n vm_to_restore=None,\n cloud_service=None,\n storage_account=None,\n proxy_client=None,\n restore_new_name=None,\n overwrite=False,\n power_on=False,\n copy_precedence=0,\n restore_option=None):\n\n\n # restore options\n if restore_option is None:\n restore_option = {}\n\n # check input parameters are correct\n if bool(restore_option):\n if not (isinstance(overwrite, bool) and\n isinstance(power_on, bool)):\n raise SDKException('Subclient', '101') \n \n self._set_restore_inputs(\n restore_option,\n vm_to_restore=self._set_vm_to_restore(vm_to_restore),\n unconditional_overwrite=overwrite,\n power_on=power_on,\n copy_precedence=copy_precedence,\n volume_level_restore=1,\n esx_host=cloud_service,\n datastore=storage_account,\n client_name=proxy_client,\n out_place=True,\n restore_new_name=restore_new_name\n )\n\n # set attr for all the option in restore xml from user inputs\n\n request_json = self._prepare_fullvm_restore_json(restore_option)\n return self._process_restore_response(request_json)", "def RestoreFromBackup(self, request, global_params=None):\n config = self.GetMethodConfig('RestoreFromBackup')\n return self._RunMethod(\n config, request, global_params=global_params)", "def restore(self, oid, serial, data, version, prev_txn, transaction):\n assert not version\n self._check_trans(transaction, 'restore')\n self._async('restorea', oid, serial, data, prev_txn, id(transaction))", "async def recover_deleted_certificate(self, certificate_name: str, **kwargs) -> KeyVaultCertificate:\n polling_interval = kwargs.pop(\"_polling_interval\", None)\n if polling_interval is None:\n polling_interval = 2\n recovered_cert_bundle = await self._client.recover_deleted_certificate(\n vault_base_url=self.vault_url, certificate_name=certificate_name, **kwargs\n )\n recovered_certificate = KeyVaultCertificate._from_certificate_bundle(recovered_cert_bundle)\n\n command = partial(self.get_certificate, certificate_name=certificate_name, **kwargs)\n polling_method = AsyncDeleteRecoverPollingMethod(\n command=command, final_resource=recovered_certificate, finished=False, interval=polling_interval\n )\n await polling_method.run()\n\n return polling_method.resource()", "def restore(ctx, destination, filesystem, backup_time):\n config_path = ctx.obj['config_path']\n\n config = Config(config_path)\n job = config.jobs.get(filesystem)\n\n if job is None:\n print('Filesystem does not exist.')\n sys.exit(1)\n\n job.restore(backup_time, destination)\n\n print('Restore successful.')", "def RestoreVolumeSnapshot(self, request, global_params=None):\n config = self.GetMethodConfig('RestoreVolumeSnapshot')\n return self._RunMethod(\n config, request, global_params=global_params)", "def restore(\n self,\n name: str,\n version: Optional[str] = None,\n label: Optional[str] = None,\n **kwargs, # pylint:disable=unused-argument\n ) -> None:\n name = _preprocess_environment_name(name)\n _archive_or_restore(\n asset_operations=self,\n version_operation=self._version_operations,\n container_operation=self._containers_operations,\n is_archived=False,\n name=name,\n version=version,\n label=label,\n )", "def restore_from_snapshot(SnapshotId=None):\n pass", "def restore_object(Bucket=None, Key=None, VersionId=None, RestoreRequest=None, RequestPayer=None):\n pass", "def full_vm_restore_in_place(self,\n vm_to_restore=None,\n overwrite=True,\n power_on=True,\n copy_precedence=0):\n restore_option = {}\n # check mandatory input parameters are correct\n if not (isinstance(overwrite, bool) and\n isinstance(power_on, bool)):\n raise SDKException('Subclient', '101')\n # set attr for all the option in restore xml from user inputs\n self._set_restore_inputs(\n restore_option,\n vm_to_restore=self._set_vm_to_restore(vm_to_restore),\n unconditional_overwrite=overwrite,\n power_on=power_on,\n copy_preceedence=copy_precedence,\n volume_level_restore=1,\n out_place=False\n )\n request_json = self._prepare_fullvm_restore_json(restore_option)\n return self._process_restore_response(request_json)", "def restore_from_snapshot(self, volume_id, snapshot_id):\r\n self.iscsi_svc.restoreFromSnapshot(snapshot_id, id=volume_id)", "def restore_backup(self):\n print \"Restoring backup for database: %s\" % self.database['NAME']\n # Fetch the latest backup if filepath not specified\n if not self.filepath:\n print \" Finding latest backup\"\n filepaths = self.storage.list_directory()\n filepaths = self.dbcommands.filter_filepaths(filepaths, self.servername)\n if not filepaths:\n raise CommandError(\"No backup files found in: %s\" % self.storage.backup_dir())\n self.filepath = filepaths[-1]\n # Restore the specified filepath backup\n print \" Restoring: %s\" % self.filepath\n backupfile = self.storage.read_file(self.filepath)\n print \" Restore tempfile created: %s\" % utils.handle_size(backupfile)\n self.dbcommands.run_restore_commands(backupfile)", "def restore(self, backup_id):\n request = Request(\n method='post',\n endpoint='/_admin/backup/restore',\n data={'id': backup_id}\n )\n\n def response_handler(resp):\n if resp.is_success:\n return format_backup_restore(resp.body['result'])\n raise BackupRestoreError(resp, request)\n\n return self._execute(request, response_handler)", "def restore_vm_backup(self, sVmUuid, sBackupUuid, sTargetHost, nTargetPort, sTargetSessionId, sTargetVmHomePath = '', sTargetVmName = '', restore_flags = consts.PVMSL_LOW_SECURITY, reserved_flags = 0, force_operation = True):\n\t\treturn Job(SDK.PrlSrv_RestoreVmBackup(self.handle, sVmUuid, sBackupUuid, sTargetHost, nTargetPort, sTargetSessionId, sTargetVmHomePath, sTargetVmName, restore_flags, reserved_flags, force_operation)[0])", "def restore(self, restore):\n self._restore = restore", "def guest_file_restore(self, *args, **kwargs):\r\n if args and isinstance(args[0], dict):\r\n options = args[0]\r\n else:\r\n options = kwargs\r\n vm_name=options.get('vm_name', None)\r\n folder_to_restore=options.get('folder_to_restore',None)\r\n destination_client=options.get('destination_client',None)\r\n destination_path=options.get('destination_path',None)\r\n copy_precedence=options.get('copy_precedence',0)\r\n preserve_level=options.get('preserve_level',1)\r\n unconditional_overwrite=options.get('unconditional_overwrite',False)\r\n v2_indexing=options.get('v2_indexing',False)\r\n restore_ACL=options.get('restore_ACL',True)\r\n from_date=options.get('from_date',0)\r\n to_date=options.get('to_date',0)\r\n show_deleted_files=options.get('show_deleted_files',False)\r\n fbr_ma=options.get('fbr_ma',None)\r\n browse_ma=options.get('browse_ma',\"\")\r\n agentless=options.get('agentless',\"\")\r\n\r\n _vm_names, _vm_ids = self._get_vm_ids_and_names_dict_from_browse()\r\n _file_restore_option = {}\r\n\r\n # check if inputs are correct\r\n if not(isinstance(destination_path, basestring) and\r\n (isinstance(vm_name, basestring))):\r\n raise SDKException('Subclient', '105')\r\n\r\n if vm_name not in _vm_names:\r\n raise SDKException('Subclient', '111')\r\n\r\n # check if client name is correct\r\n if destination_client is None:\r\n destination_client = self._backupset_object._instance_object.co_ordinator\r\n\r\n if fbr_ma:\r\n _file_restore_option[\"proxy_client\"] = fbr_ma\r\n\r\n _file_restore_option[\"client\"] = destination_client\r\n _file_restore_option[\"destination_path\"] = destination_path\r\n\r\n # process the folder to restore for browse\r\n if isinstance(folder_to_restore, list):\r\n _folder_to_restore_list = folder_to_restore\r\n\r\n elif isinstance(folder_to_restore, basestring):\r\n _folder_to_restore_list = []\r\n _folder_to_restore_list.append(folder_to_restore)\r\n else:\r\n raise SDKException('Subclient', '105')\r\n\r\n _file_restore_option[\"paths\"] = []\r\n for _each_folder in _folder_to_restore_list:\r\n _file_restore_option[\"paths\"].append(\r\n self._check_folder_in_browse(_vm_ids[vm_name],\r\n \"%s\" % _each_folder,\r\n from_date,\r\n to_date,\r\n copy_precedence,\r\n media_agent=browse_ma))\r\n\r\n # set the browse options\r\n _file_restore_option[\"disk_browse\"] = False\r\n _file_restore_option[\"file_browse\"] = True\r\n\r\n # set the common file level restore options\r\n _file_restore_option[\"striplevel_type\"] = \"PRESERVE_LEVEL\"\r\n _file_restore_option[\"preserve_level\"] = preserve_level\r\n _file_restore_option[\"unconditional_overwrite\"] = unconditional_overwrite\r\n _file_restore_option[\"restore_ACL\"] = restore_ACL\r\n\r\n # set the browse option\r\n _file_restore_option[\"copy_precedence_applicable\"] = True\r\n _file_restore_option[\"copy_precedence\"] = copy_precedence\r\n _file_restore_option[\"media_agent\"] = browse_ma\r\n\r\n # set agentless options\r\n if agentless:\r\n _file_restore_option[\"server_name\"] = agentless['vserver']\r\n _file_restore_option[\"vm_guid\"] = agentless['vm_guid']\r\n _file_restore_option[\"vm_name\"] = agentless['vm_name']\r\n _file_restore_option[\"user_name\"] = agentless['vm_user']\r\n _file_restore_option[\"password\"] = agentless['vm_pass']\r\n _file_restore_option[\"agentless\"] = True\r\n\r\n # prepare and execute the Json\r\n request_json = self._prepare_filelevel_restore_json(_file_restore_option)\r\n\r\n if v2_indexing:\r\n\r\n _vmclient_obj = self._commcell_object.clients.get(vm_name)\r\n _vmagent_obj = _vmclient_obj.agents.get(self._agent_object._agent_name)\r\n _vminstance_obj = _vmagent_obj.instances.get('VMInstance')\r\n _vmbackupset_obj = _vminstance_obj.backupsets.get(\r\n self._backupset_object._backupset_name)\r\n _vmsub_obj = _vmbackupset_obj.subclients.get('default')\r\n\r\n request_json['taskInfo']['associations'][0]['clientName'] = vm_name\r\n request_json['taskInfo']['associations'][0]['clientId'] = \\\r\n _vmsub_obj._subClientEntity['clientId']\r\n request_json['taskInfo']['associations'][0]['instanceName'] = 'VMInstance'\r\n request_json['taskInfo']['associations'][0]['backupsetId'] = \\\r\n _vmsub_obj._subClientEntity['backupsetId']\r\n request_json['taskInfo']['associations'][0]['instanceId'] = \\\r\n _vmsub_obj._subClientEntity['instanceId']\r\n request_json['taskInfo']['associations'][0]['subclientGUID'] = \\\r\n subclientGUID = _vmsub_obj._subClientEntity['subclientGUID']\r\n request_json['taskInfo']['associations'][0]['subclientName'] = 'default'\r\n request_json['taskInfo']['associations'][0]['subclientId'] = \\\r\n _vmsub_obj._subClientEntity['subclientId']\r\n\r\n return self._process_restore_response(request_json)", "def restoreBackup(self, filename, warnMissing = True):\n if (os.path.isfile(filename + '.bak')):\n with open(filename, 'wb') as oldf:\n with open(filename + '.bak', 'rb') as bakf:\n oldf.write(bakf.read())\n print(filename + \" reverted\")\n else:\n if ((not self.useDCX) and (not \"FRPG_SfxBnd\" in filename) and warnMissing):\n print(\"Failed to restore \" + filename + \", \" + filename + \".bak not found.\")", "def restore_backup(self, backup, name, flavor, volume):\n flavor_ref = self.api._get_flavor_ref(flavor)\n body = {\"instance\": {\n \"name\": name,\n \"flavorRef\": flavor_ref,\n \"volume\": {\"size\": volume},\n \"restorePoint\": {\"backupRef\": utils.get_id(backup)},\n }}\n uri = \"/%s\" % self.uri_base\n resp, resp_body = self.api.method_post(uri, body=body)\n return CloudDatabaseInstance(self, resp_body.get(\"instance\", {}))", "def restore(self, file_prefix, options=None):\n options = options or checkpoint_options.CheckpointOptions()\n tensor_names = []\n tensor_dtypes = []\n slice_specs = []\n\n for checkpoint_key, tensor_slices in self._tensor_slice_dict.items():\n for slice_spec, tensor in tensor_slices.items():\n tensor_dtypes.append(tensor.dtype)\n if isinstance(tensor, saveable_object.SaveSpec):\n slice_specs.append(tensor.slice_spec)\n tensor_names.append(tensor.name)\n else:\n slice_specs.append(slice_spec)\n tensor_names.append(checkpoint_key)\n\n restore_device = options.experimental_io_device or \"cpu:0\"\n with ops.device(restore_device):\n restored_tensors = io_ops.restore_v2(\n file_prefix, tensor_names, slice_specs, tensor_dtypes)\n\n restored_tensor_dict = {}\n for checkpoint_key, tensor_slices in self._tensor_slice_dict.items():\n for slice_spec in tensor_slices:\n restored_tensor = restored_tensors.pop(0)\n restored_tensor_dict.setdefault(checkpoint_key, {})[slice_spec] = (\n restored_tensor)\n return restored_tensor_dict", "def test_csi_volumesnapshot_restore_existing_backup(set_random_backupstore, # NOQA\n client, # NOQA\n core_api, # NOQA\n volume_name, # NOQA\n csi_pv, # NOQA\n pvc, # NOQA\n pod_make, # NOQA\n volumesnapshotclass, # NOQA\n volumesnapshotcontent,\n volumesnapshot, # NOQA\n volsnapshotclass_delete_policy, # NOQA\n backup_is_deleted): # NOQA\n csisnapclass = \\\n volumesnapshotclass(name=\"snapshotclass\",\n deletepolicy=volsnapshotclass_delete_policy)\n\n pod_name, pv_name, pvc_name, md5sum = \\\n prepare_pod_with_data_in_mb(client, core_api,\n csi_pv, pvc, pod_make,\n volume_name,\n data_path=\"/data/test\")\n\n volume = client.by_id_volume(volume_name)\n snap = create_snapshot(client, volume_name)\n volume.snapshotBackup(name=snap.name)\n wait_for_backup_completion(client, volume_name, snap.name)\n bv, b = find_backup(client, volume_name, snap.name)\n\n csivolsnap_name = volume_name + \"-volumesnapshot\"\n csivolsnap_namespace = \"default\"\n\n volsnapcontent = \\\n volumesnapshotcontent(\"volsnapcontent\",\n csisnapclass[\"metadata\"][\"name\"],\n \"Delete\",\n \"bs://\" + volume_name + \"/\" + b.name,\n csivolsnap_name,\n csivolsnap_namespace)\n\n csivolsnap = volumesnapshot(csivolsnap_name,\n csivolsnap_namespace,\n csisnapclass[\"metadata\"][\"name\"],\n \"volumeSnapshotContentName\",\n volsnapcontent[\"metadata\"][\"name\"])\n\n restore_pvc_name = pvc[\"metadata\"][\"name\"] + \"-restore\"\n restore_pvc_size = pvc[\"spec\"][\"resources\"][\"requests\"][\"storage\"]\n\n restore_csi_volume_snapshot(core_api,\n client,\n csivolsnap,\n restore_pvc_name,\n restore_pvc_size)\n\n restore_pod = pod_make()\n restore_pod_name = restore_pod[\"metadata\"][\"name\"]\n restore_pod['spec']['volumes'] = [create_pvc_spec(restore_pvc_name)]\n\n create_and_wait_pod(core_api, restore_pod)\n restore_md5sum = \\\n get_pod_data_md5sum(core_api, restore_pod_name, path=\"/data/test\")\n\n assert restore_md5sum == md5sum\n\n # Delete volumeSnapshot test\n delete_volumesnapshot(csivolsnap[\"metadata\"][\"name\"], \"default\")\n\n if backup_is_deleted is False:\n find_backup(client, volume_name, b[\"snapshotName\"])\n else:\n wait_for_backup_delete(client, volume_name, b[\"name\"])", "def restore(self, archive):\n logger.info(\"Restoring an old archive run from {}\".format(archive))\n if os.path.isabs(archive):\n restorefile = archive\n else:\n restorefile = os.path.join(self.containerpath, const.ARCHIVEDIR, archive)\n with ignored(OSError):\n shutil.rmtree(os.path.join(self.rundir))\n with tarfile.open(restorefile, \"r:gz\") as f:\n def is_within_directory(directory, target):\n \n abs_directory = os.path.abspath(directory)\n abs_target = os.path.abspath(target)\n \n prefix = os.path.commonprefix([abs_directory, abs_target])\n \n return prefix == abs_directory\n \n def safe_extract(tar, path=\".\", members=None, *, numeric_owner=False):\n \n for member in tar.getmembers():\n member_path = os.path.join(path, member.name)\n if not is_within_directory(path, member_path):\n raise Exception(\"Attempted Path Traversal in Tar File\")\n \n tar.extractall(path, members, numeric_owner=numeric_owner) \n \n \n safe_extract(f, self.rundir)\n self._refreshconfig()", "def restore(self, checkpoint):\n raise NotImplementedError", "def _acme_revoke(self, cert):\n # XXX | pylint: disable=unused-variable\n\n # pylint: disable=protected-access\n certificate = jose_util.ComparableX509(cert._cert)\n try:\n with open(cert.backup_key_path, \"rU\") as backup_key_file:\n key = OpenSSL.crypto.load_privatekey(\n OpenSSL.crypto.FILETYPE_PEM, backup_key_file.read())\n # If the key file doesn't exist... or is corrupted\n except OpenSSL.crypto.Error as error:\n logger.debug(error, exc_info=True)\n raise errors.RevokerError(\n \"Corrupted backup key file: %s\" % cert.backup_key_path)\n\n return self.acme.revoke(cert=None) # XXX", "def test_resume_restore(self):\n if not self.backupset.resume:\n self.fail(\"Resume must be True for this test\")\n gen = BlobGenerator(\"ent-backup\", \"ent-backup-\", self.value_size, end=self.num_items)\n self._load_all_buckets(self.master, gen, \"create\", 0)\n self.backup_create()\n self.backup_cluster_validate()\n self.log.info(\"Start to flush bucket\")\n self._all_buckets_flush()\n restore_result = self.cluster.async_restore_cluster(backupset=self.backupset,\n objstore_provider=self.objstore_provider,\n no_progress_bar=self.no_progress_bar,\n cli_command_location=self.cli_command_location,\n cb_version=self.cb_version,\n force_updates=self.backupset.force_updates,\n no_resume=True)\n state = \"\"\n while state not in (\"FINISHED\", \"EXECUTING\"):\n state = restore_result.state\n self._kill_cbbackupmgr()\n self.assertFalse(self._check_output(\"success\", restore_result.result()))\n self.backup_restore_validate(compare_uuid=False, seqno_compare_function=\">=\")", "def restore_server(BackupId=None, ServerName=None, InstanceType=None, KeyPair=None):\n pass", "def restore(owner_id=None, photo_id=None):\n params = {\n 'owner_id': owner_id,\n 'photo_id': photo_id\n }\n result = call('photos.restore', **params)\n return parse_response(result)", "def restore_aws_kms_store(session, region, kmskeyid, access_id, secret, return_type=None, **kwargs):\n body_values = {'region': region, 'kmskeyid': kmskeyid,\n 'access_id': access_id, 'secret': secret}\n\n path = '/api/settings/restore_encryption_aws.json'\n\n return session.post_api(path=path, body=body_values, return_type=return_type, **kwargs)" ]
[ "0.6206176", "0.578941", "0.57810414", "0.5518122", "0.5459229", "0.545646", "0.5449263", "0.5411994", "0.5381392", "0.5350106", "0.5296097", "0.528997", "0.52898425", "0.5289533", "0.52668786", "0.52342594", "0.5188092", "0.5162289", "0.51536995", "0.5060998", "0.5055808", "0.5034262", "0.5032658", "0.50094706", "0.5006943", "0.50006306", "0.49992588", "0.49753073", "0.49701515", "0.4969092" ]
0.775596
0
Lists the currentlyrecoverable deleted certificates. Possible only if vault is softdelete enabled. Requires certificates/get/list permission. Retrieves the certificates in the current vault which are in a deleted state and ready for recovery or purging. This operation includes deletionspecific information.
def list_deleted_certificates( self, *, include_pending: Optional[bool] = None, **kwargs ) -> AsyncItemPaged[DeletedCertificate]: max_page_size = kwargs.pop("max_page_size", None) if self.api_version == "2016-10-01": if include_pending is not None: raise NotImplementedError( "The 'include_pending' parameter to `list_deleted_certificates` " "is only available for API versions v7.0 and up" ) else: kwargs.update({"include_pending": include_pending}) return self._client.get_deleted_certificates( vault_base_url=self._vault_url, maxresults=max_page_size, cls=lambda objs: [DeletedCertificate._from_deleted_certificate_item(x) for x in objs], **kwargs )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def recover_deleted_certificate(self, certificate_name: str, **kwargs) -> KeyVaultCertificate:\n polling_interval = kwargs.pop(\"_polling_interval\", None)\n if polling_interval is None:\n polling_interval = 2\n recovered_cert_bundle = await self._client.recover_deleted_certificate(\n vault_base_url=self.vault_url, certificate_name=certificate_name, **kwargs\n )\n recovered_certificate = KeyVaultCertificate._from_certificate_bundle(recovered_cert_bundle)\n\n command = partial(self.get_certificate, certificate_name=certificate_name, **kwargs)\n polling_method = AsyncDeleteRecoverPollingMethod(\n command=command, final_resource=recovered_certificate, finished=False, interval=polling_interval\n )\n await polling_method.run()\n\n return polling_method.resource()", "async def get_deleted_certificate(self, certificate_name: str, **kwargs) -> DeletedCertificate:\n bundle = await self._client.get_deleted_certificate(\n vault_base_url=self.vault_url, certificate_name=certificate_name, **kwargs\n )\n return DeletedCertificate._from_deleted_certificate_bundle(deleted_certificate_bundle=bundle)", "async def delete_contacts(self, **kwargs) -> List[CertificateContact]:\n contacts = await self._client.delete_certificate_contacts(\n vault_base_url=self.vault_url, **kwargs\n )\n return [CertificateContact._from_certificate_contacts_item(contact_item=item) for item in contacts.contact_list]", "def certificates(self):\n if self.user.is_superuser:\n return Certificate.objects.all()\n else:\n return Certificate.objects.filter(licensee__in=self.licensees.all())", "def credential_list():\n rows = safeisland.list_certificates()\n certs = []\n for row in rows:\n# certs.append(row[\"cert\"])\n certs.append({\"uuid\": row[\"uuid\"], \"cert\": row[\"cert\"]})\n\n return {\"payload\": certs}", "def list_certificates_request(self, vault_name: str, limit: int, offset: int) -> list[dict]:\n url = f'https://{vault_name}{self.azure_cloud.suffixes.keyvault_dns}/certificates'\n\n response = self.http_request(\n 'GET', full_url=url, resource=self.get_vault_resource())\n\n return self.get_entities_independent_of_pages(response, limit, offset, self.get_vault_resource())", "def certificates(self) -> pulumi.Output[Optional[Sequence['outputs.ServiceCertificate']]]:\n return pulumi.get(self, \"certificates\")", "def deleted_messages(self):\n return self._get_messages_from_folder_name('DeletedItems')", "def list_certificates_command(client: KeyVaultClient, args: dict[str, Any]) -> CommandResults:\n vault_name = args['vault_name']\n limit = arg_to_number(args.get('limit')) or DEFAULT_LIMIT\n offset = arg_to_number(args.get('offset')) or DEFAULT_OFFSET\n\n response = client.list_certificates_request(vault_name, limit, offset)\n outputs = copy.deepcopy(response)\n\n readable_response = []\n for certificate in outputs:\n readable_response.append({\n 'certificate_id': certificate.get('id'),\n **convert_attributes_to_readable(certificate.get('attributes', {}).copy())\n })\n certificate[VAULT_NAME_CONTEXT_FIELD] = vault_name\n certificate['attributes'] = convert_time_attributes_to_iso(certificate['attributes'])\n\n readable_output = tableToMarkdown(\n f'{vault_name} Certificates List',\n readable_response,\n ['certificate_id', 'enabled', 'create_time', 'update_time', 'expiry_time'],\n removeNull=True,\n headerTransform=string_to_table_header)\n\n command_results = CommandResults(\n outputs_prefix='AzureKeyVault.Certificate',\n outputs_key_field='id',\n outputs=outputs,\n raw_response=response,\n readable_output=readable_output,\n ignore_auto_extract=True\n )\n\n return command_results", "def DeletedRecords(self, default=[{}]):\n tmp = self.data.get('metadata', {}).get('deleted_records', default)\n return [HEP.JSONReferenceObject(i) for i in tmp]", "def _safe_revoke(self, certs):\n success_list = []\n try:\n for cert in certs:\n if self.no_confirm or revocation.confirm_revocation(cert):\n try:\n self._acme_revoke(cert)\n except errors.Error:\n # TODO: Improve error handling when networking is set...\n logger.error(\n \"Unable to revoke cert:%s%s\", os.linesep, str(cert))\n success_list.append(cert)\n revocation.success_revocation(cert)\n finally:\n if success_list:\n self._remove_certs_keys(success_list)\n\n return success_list", "async def delete_certificate(self, certificate_name: str, **kwargs) -> DeletedCertificate:\n polling_interval = kwargs.pop(\"_polling_interval\", None)\n if polling_interval is None:\n polling_interval = 2\n deleted_cert_bundle = await self._client.delete_certificate(\n vault_base_url=self.vault_url, certificate_name=certificate_name, **kwargs\n )\n deleted_certificate = DeletedCertificate._from_deleted_certificate_bundle(deleted_cert_bundle)\n\n polling_method = AsyncDeleteRecoverPollingMethod(\n # no recovery ID means soft-delete is disabled, in which case we initialize the poller as finished\n finished=deleted_certificate.recovery_id is None,\n command=partial(self.get_deleted_certificate, certificate_name=certificate_name, **kwargs),\n final_resource=deleted_certificate,\n interval=polling_interval,\n )\n await polling_method.run()\n\n return polling_method.resource()", "def revoke_certificate(self):\n return self.__query(\"certificateRevoke\", kwargs)", "def delete_all(self):\n return self.context.delete(\"/ckks/all\", None,\n \"CKKS:: failed deleting all the CKKS data\"\n )", "def get_deleted_users(self):\n return self.ldap_connection.search_s(\"ou=DeletedUsers,dc=redhat,dc=com\",\n ldap.SCOPE_SUBTREE, \"uid=*\", [\"uid\"])", "def certificates(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['CertificateReferenceArgs']]]]:\n return pulumi.get(self, \"certificates\")", "def deleted(self, deleted_since=None, filters=None, params=None):\n return self.tc_requests.deleted(\n self.api_type,\n self.api_branch,\n deleted_since=deleted_since,\n owner=self.owner,\n filters=filters,\n params=params,\n )", "def get_certificates_by_pcc(conn: dict, id: str) -> dict:\n return get(conn, f\"{S3PCCS}/{id}/certificates\")", "def show_all_certifications():\n if not g.user:\n flash(\"Please login to access\", \"danger\")\n return redirect(\"/\")\n if g.user.is_admin == False:\n flash (\"Unauthorized\", \"danger\")\n return redirect(\"/login\")\n\n \n certs = Cert.query.all()\n ## all possible certs...\n \n return render_template(\"certs_display.html\", certs = certs)", "def certificates(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ServiceCertificateArgs']]]]:\n return pulumi.get(self, \"certificates\")", "def certificates(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ServiceCertificateArgs']]]]:\n return pulumi.get(self, \"certificates\")", "def vault_delete(self):\n return self._vault_delete", "def mesh_certificates(self) -> 'outputs.MeshCertificatesResponse':\n return pulumi.get(self, \"mesh_certificates\")", "def revoke_from_menu(self):\n\n csha1_vhlist = self._get_installed_locations()\n certs = self._populate_saved_certs(csha1_vhlist)\n\n while True:\n if certs:\n code, selection = revocation.display_certs(certs)\n\n if code == display_util.OK:\n revoked_certs = self._safe_revoke([certs[selection]])\n # Since we are currently only revoking one cert at a time...\n if revoked_certs:\n del certs[selection]\n elif code == display_util.HELP:\n revocation.more_info_cert(certs[selection])\n else:\n return\n else:\n logger.info(\n \"There are not any trusted Let's Encrypt \"\n \"certificates for this server.\")\n return", "def cavium_certs(self) -> Sequence[str]:\n return pulumi.get(self, \"cavium_certs\")", "def test_get_all_certificates(self):\n self.client.post(\n '/api/v1/certificates', data=json.dumps(new_certificate),\n content_type='application/json',\n headers=self.get_registrar_token())\n response = self.client.get(\n '/api/v1/certificates', content_type='application/json',\n headers=self.get_token())\n result = json.loads(response.data.decode())\n self.assertEqual(result['message'],\n 'Certificates retrieved successfully')\n assert response.status_code == 200", "def get_deleted_nodes(self):\n\n return self._deleted_nodes", "def GetCertificates(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def get_all_server_certs(self, path_prefix='/',\r\n marker=None, max_items=None):\r\n params = {}\r\n if path_prefix:\r\n params['PathPrefix'] = path_prefix\r\n if marker:\r\n params['Marker'] = marker\r\n if max_items:\r\n params['MaxItems'] = max_items\r\n return self.get_response('ListServerCertificates',\r\n params,\r\n list_marker='ServerCertificateMetadataList')", "def get_deleted_objects(self, objs, opts, request, using):\n\n from django.contrib.admin.util import NestedObjects\n collector = NestedObjects(using=using)\n collector.collect(objs)\n perms_needed = set()\n \n def format_callback(obj):\n has_admin = obj.__class__ in self.admin_site._registry\n opts = obj._meta\n\n from django.utils.html import escape\n from django.utils.safestring import mark_safe\n from django.utils.text import capfirst\n from django.core.urlresolvers import reverse\n \n if has_admin:\n from django.contrib.admin.util import quote\n admin_url = reverse('%s:%s_%s_change'\n % (self.admin_site.name,\n opts.app_label,\n opts.object_name.lower()),\n None, (quote(obj._get_pk_val()),))\n p = '%s.%s' % (opts.app_label,\n opts.get_delete_permission())\n \n if isinstance(obj, self.model):\n if not self.has_delete_permission(request, obj):\n perms_needed.add(opts.verbose_name)\n elif not request.user.has_perm(p):\n perms_needed.add(opts.verbose_name)\n # Display a link to the admin page.\n\n return mark_safe(u'%s: <a href=\"%s\">%s</a>' %\n (escape(capfirst(opts.verbose_name)),\n admin_url,\n escape(obj)))\n else:\n # Don't display link to edit, because it either has no\n # admin or is edited inline.\n from django.utils.encoding import force_unicode\n return u'%s: %s' % (capfirst(opts.verbose_name),\n force_unicode(obj))\n \n to_delete = collector.nested(format_callback)\n \n protected = [format_callback(obj) for obj in collector.protected]\n \n return to_delete, perms_needed, protected" ]
[ "0.5930504", "0.5810624", "0.5445564", "0.5431113", "0.5354896", "0.5310337", "0.5298006", "0.5263039", "0.52389675", "0.5166186", "0.51401037", "0.5125574", "0.5110472", "0.5071451", "0.5021978", "0.50173384", "0.49939674", "0.49534753", "0.49424502", "0.4934416", "0.4934416", "0.49194884", "0.49166113", "0.48052007", "0.4805147", "0.47834665", "0.47827324", "0.47742006", "0.47595826", "0.47529846" ]
0.6773191
0
List the identifiers and properties of a certificate's versions. Requires certificates/list permission.
def list_properties_of_certificate_versions( self, certificate_name: str, **kwargs ) -> AsyncItemPaged[CertificateProperties]: max_page_size = kwargs.pop("max_page_size", None) return self._client.get_certificate_versions( vault_base_url=self._vault_url, certificate_name=certificate_name, maxresults=max_page_size, cls=lambda objs: [CertificateProperties._from_certificate_item(x) for x in objs], **kwargs )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def certifiVersions():\n log = logger.new(function='certifiVersions')\n r = yield treq.get('https://pypi.python.org/pypi/certifi/json', timeout=5)\n log.msg(\"got certifi versions!\")\n data = yield r.json()\n\n # Note: this takes advantage of the fact that certifi's releases have the\n # same version number sort order as lexicographical. If that changes,\n # this will break.\n releases = sorted(data[u'releases'].keys())\n\n first_release = releases.index('14.05.14')\n target_versions = releases[first_release:]\n\n result = []\n for version in target_versions:\n files = data[u'releases'][version]\n\n # Find the .tar.gz release.\n for file in files:\n if file[u'filename'].endswith(u'.tar.gz'):\n break\n else:\n continue\n\n log.msg(\"new release located\", version=version, tarball=file[u'url'])\n result.append((version, file[u'url']))\n\n returnValue(result)", "def list_versions(quartus_versions):\n for key in quartus_versions.keys():\n print(key)", "def versions(self) -> Dict[str, str]:\n self.__logger.debug('Eva.versions called')\n return self.__http_client.api_versions()", "def get_component_versions(session):\n # type: (Session) -> Dict[str, Any]\n return _get_dict(session, \"/version\")", "def get_versions():\n ret_obj = {'versions': picard_versions(current_app)}\n return make_response(jsonify(ret_obj), 200)", "def list_versions(self):\n version_url = self._get_base_version_url()\n\n resp, body = self.raw_request(version_url, 'GET')\n # NOTE: We need a raw_request() here instead of request() call because\n # \"list API versions\" API doesn't require an authentication and we can\n # skip it with raw_request() call.\n self._error_checker(resp, body)\n\n body = json.loads(body)\n self.validate_response(schema.list_versions, resp, body)\n return rest_client.ResponseBody(resp, body)", "def versions(self):\n return self._versions", "def get_all_versions(self, headers=None, **params):\r\n return self._get_all([('Version', self.key_class),\r\n ('CommonPrefixes', Prefix),\r\n ('DeleteMarker', DeleteMarker)],\r\n 'versions', headers, **params)", "def get_versions(self):\n raise NotImplementedError", "def ListVersions(self, request, context):\n context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)", "def list_versions(self):\n if not USE_GCLOUD:\n return self.run_appcfg(['list_versions'])\n data = self.run_gcloud(['app', 'versions', 'list'])\n per_module = collections.defaultdict(list)\n for deployment in data:\n service = deployment['service'].encode('utf-8')\n version_id = deployment['id'].encode('utf-8')\n per_module[service].append(version_id)\n return dict(per_module)", "def credential_list():\n rows = safeisland.list_certificates()\n certs = []\n for row in rows:\n# certs.append(row[\"cert\"])\n certs.append({\"uuid\": row[\"uuid\"], \"cert\": row[\"cert\"]})\n\n return {\"payload\": certs}", "def listNoteVersions(self, authenticationToken, noteGuid):\r\n pass", "def versions(self):\n raise Exception(\"mcapi.Datafile.versions is not implemented\")", "def get_versions(self, async = False):\n\n\t\tself._send_message(\"VERSION\", \"\\x00\")\n\n\t\tif not async:\n\t\t\treturn EndpointSync(self, \"VERSION\").get_data()", "def getVersions(self):\n logger.debug(\"Func: getVersions\")\n\n try:\n return self._currentSceneInfo[\"Versions\"]\n except:\n return []", "def all(self):\r\n if self._versions is None or \\\r\n len(self._versions) == 0:\r\n url = \"%s/versions\" % self._url\r\n params = {'f':'json'}\r\n res = self._con.get(url, params)\r\n self._versions = []\r\n if 'versions' in res:\r\n for v in res['versions']:\r\n guid = v['versionGuid'][1:-1]\r\n vurl = \"%s/versions/%s\" % (self._url, guid)\r\n self._versions.append(Version(url=vurl,\r\n flc=self._flc,\r\n gis=self._gis))\r\n return self._versions\r\n return self._versions", "def listNoteVersions(self, authenticationToken, noteGuid):\r\n self.send_listNoteVersions(authenticationToken, noteGuid)\r\n return self.recv_listNoteVersions()", "def ListVersions(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def select_versions(self):\n return []", "def api(self):\n res = self.client.call('/', 'GET', data='')\n self.logger.debug('Get openstack identity api versions: %s' % truncate(res))\n return res[0]", "def api_versions(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"api_versions\")", "def api_versions(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"api_versions\")", "def api_versions(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"api_versions\")", "def api_versions(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"api_versions\")", "def available_versions(url, session=None, **kwargs):\n if not session:\n session = client_session.Session._construct(kwargs)\n\n return _discover.get_version_data(session, url)", "def ListVersions(self, request, timeout, metadata=None, with_call=False, protocol_options=None):\n raise NotImplementedError()", "def list_versions(self, service_id):\n return [self.fastly_cache[service_id]['service_details']]", "def list_authorities():\n try:\n certs = client().certificates.get_authorities()\n if not certs:\n logger.info(\n 'ctl:cert:authorities', 'No certificate authorities found'\n )\n return\n llen = len(sorted(certs, key=lambda x: len(x[\"id\"]))[-1][\"id\"])\n for x in sorted(certs, key=lambda x: x[\"id\"]):\n click.echo(\n click.style(\n '{name: <{fill}}'.format(name=x[\"id\"], fill=llen + 3),\n fg=\"white\", bold=True) + \"Expires \" +\n click.style(x[\"expiry\"].strftime(\"%c\"), fg=\"yellow\")\n )\n except Exception as e:\n raise CLIException(str(e))", "def versions(self) -> List['RadsProjectVersion']:\n logger.debug(f\"retrieve versions of {self}\")\n listing = self.storage.request_text(f\"{self.path}/releaselisting\")\n return [RadsProjectVersion(self, RadsVersion(l)) for l in listing.splitlines()]" ]
[ "0.6490166", "0.6426438", "0.63160443", "0.63044", "0.6249581", "0.61390465", "0.6003774", "0.60021186", "0.5911201", "0.5854424", "0.5776592", "0.57742536", "0.5766685", "0.5750346", "0.57278854", "0.56947684", "0.56905156", "0.5607363", "0.5604912", "0.5604374", "0.5584045", "0.5551337", "0.5551337", "0.5551337", "0.5551337", "0.55458003", "0.552268", "0.5510268", "0.5510248", "0.5497352" ]
0.65740174
0
Sets the certificate contacts for the key vault. Requires certificates/managecontacts permission.
async def set_contacts(self, contacts: List[CertificateContact], **kwargs) -> List[CertificateContact]: new_contacts = await self._client.set_certificate_contacts( vault_base_url=self.vault_url, contacts=self._models.Contacts(contact_list=[c._to_certificate_contacts_item() for c in contacts]), **kwargs ) return [ CertificateContact._from_certificate_contacts_item(contact_item=item) for item in new_contacts.contact_list ]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_contacts(self, contacts):\n\n\t\tif contacts is not None and not isinstance(contacts, list):\n\t\t\traise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: contacts EXPECTED TYPE: list', None, None)\n\t\t\n\t\tself.__contacts = contacts\n\t\tself.__key_modified['Contacts'] = 1", "def contacts(self, contacts):\n\n self._contacts = contacts", "def contacts(self, contacts):\n\n self._contacts = contacts", "async def get_contacts(self, **kwargs) -> List[CertificateContact]:\n contacts = await self._client.get_certificate_contacts(\n vault_base_url=self._vault_url, **kwargs\n )\n return [CertificateContact._from_certificate_contacts_item(contact_item=item) for item in contacts.contact_list]", "def contact_list(self, contact_list):\n \n self._contact_list = contact_list", "async def delete_contacts(self, **kwargs) -> List[CertificateContact]:\n contacts = await self._client.delete_certificate_contacts(\n vault_base_url=self.vault_url, **kwargs\n )\n return [CertificateContact._from_certificate_contacts_item(contact_item=item) for item in contacts.contact_list]", "def support_contacts(self, support_contacts):\n self._support_contacts = support_contacts", "def update_contacts(self, contacts):\n\n if contacts.time.size != 1:\n raise IndexError(\"Contacts should be from one frame only\")\n if contacts.channel.size != self.contacts.channel.size:\n self.new_contact_set(contacts)\n return # Prevent calling update_contacts recursively\n self.contacts = contacts\n contacts = np.array(contacts)\n\n for i, actor in enumerate(self.contacts_actors):\n # mapper = actors.GetNextActor().GetMapper()\n mapper = actor.GetMapper()\n self.contacts_actors[i].GetProperty().SetColor(self.contacts_color)\n self.contacts_actors[i].GetProperty().SetOpacity(self.contacts_opacity)\n source = vtkSphereSource()\n source.SetCenter(contacts[0:3, i])\n source.SetRadius(self.contacts_size)\n mapper.SetInputConnection(source.GetOutputPort())", "def contact_points(self, contact_points: object):\n\n self._contact_points = contact_points", "def new_contact_set(self, contacts):\n if contacts.time.size != 1:\n raise IndexError(\"Contacts should be from one frame only\")\n self.contacts = contacts\n\n # Remove previous actors from the scene\n for actor in self.contacts_actors:\n self.parent_window.ren.RemoveActor(actor)\n self.contacts_actors = list()\n\n # Create the geometry of a point (the coordinate) points = vtk.vtkPoints()\n for i in range(contacts.channel.size):\n # Create a mapper\n mapper = vtkPolyDataMapper()\n\n # Create an actor\n self.contacts_actors.append(vtkActor())\n self.contacts_actors[i].SetMapper(mapper)\n\n self.parent_window.ren.AddActor(self.contacts_actors[i])\n\n # Update marker position\n self.update_contacts(self.contacts)", "def update_contacts(self):\n self.contacts = self.db.list_contacts()\n return self.list_contacts()", "def contacts(self):\n from hubspot3.contacts import ContactsClient\n\n return ContactsClient(**self.auth, **self.options)", "def contacts(self):\n service_root = self._get_webservice_url(\"contacts\")\n return ContactsService(service_root, self.session, self.params)", "def list_contact(self, key, value):\n self.db.list_contact(\n key,\n value,\n )", "def set_contact_mechanisms(cls, records, name, value=None):\n Party = Pool().get('party.party')\n\n for record in records:\n Party.write([record.party], {'contact_mechanisms': value})", "def new_soft_contacts_set(self, soft_contacts):\n if soft_contacts.time.size != 1:\n raise IndexError(\"soft_contacts should be from one frame only\")\n self.soft_contacts = soft_contacts\n\n # Remove previous actors from the scene\n for actor in self.soft_contacts_actors:\n self.parent_window.ren.RemoveActor(actor)\n self.soft_contacts_actors = list()\n\n # Create the geometry of a point (the coordinate) points = vtk.vtkPoints()\n for i in range(soft_contacts.channel.size):\n # Create a mapper\n mapper = vtkPolyDataMapper()\n\n # Create an actor\n self.soft_contacts_actors.append(vtkActor())\n self.soft_contacts_actors[i].SetMapper(mapper)\n\n self.parent_window.ren.AddActor(self.soft_contacts_actors[i])\n # Update marker position\n self.update_soft_contacts(self.soft_contacts)", "def list_contacts(self):\n return self.contacts", "def get_contacts(self):\n\n\t\treturn self.__contacts", "def test_set_one_ca_list(self):\n cacert = load_certificate(FILETYPE_PEM, root_cert_pem)\n cadesc = cacert.get_subject()\n\n def single_ca(ctx):\n ctx.set_client_ca_list([cadesc])\n return [cadesc]\n\n self._check_client_ca_list(single_ca)", "def getcontacts():\n contacts = {}\n\n try:\n #get list of contact ids\n contactids = r.smembers(\"contacts\")\n\n #for each contact id get data\n for contactid in contactids:\n contacts.update(_getcontact(str(contactid)))\n return contacts\n except:\n print \"Unexpected error:\", sys.exc_info()[0]\n raise", "def contacts(self):\n return ContactCollection(self.request)", "def block_contacts(self, contacts):\n self._post('contact_actions', None, self._build_params(contacts=contacts, action='block'))", "def update_soft_contacts(self, soft_contacts):\n\n if soft_contacts.time.size != 1:\n raise IndexError(\"soft_contacts should be from one frame only\")\n if soft_contacts.channel.size != self.soft_contacts.channel.size:\n self.new_soft_contacts_set(soft_contacts)\n return # Prevent calling update_soft_contacts recursively\n self.soft_contacts = soft_contacts\n soft_contacts = np.array(soft_contacts)\n\n for i, actor in enumerate(self.soft_contacts_actors):\n # mapper = actors.GetNextActor().GetMapper()\n mapper = actor.GetMapper()\n self.soft_contacts_actors[i].GetProperty().SetColor(self.soft_contacts_color)\n self.soft_contacts_actors[i].GetProperty().SetOpacity(self.soft_contacts_opacity)\n source = vtkSphereSource()\n source.SetCenter(soft_contacts[0:3, i])\n source.SetRadius(self.soft_contacts_size[i])\n mapper.SetInputConnection(source.GetOutputPort())", "def _get_contacts(self, tgt):\n with open(tgt, mode='r', encoding='utf-8') as f:\n str_contents = f.read()\n self.contacts = json.loads(str_contents)\n return", "def contact(self, contact):\n\n self.logger.debug(\"In 'contact' setter.\")\n\n self._contact = contact", "def contact(self, contact):\n\n self._contact = contact", "def contact(self, contact):\n\n self._contact = contact", "def _setcontact(id, name=None, address=None, phone=None, email=None):\n try:\n if name is not None:\n r.set(\"uid:\" + id + \":name\", name)\n if address is not None: \n r.set(\"uid:\" + id + \":address\", address)\n if phone is not None: \n r.set(\"uid:\" + id + \":phone\", phone)\n if email is not None: \n r.set(\"uid:\" + id + \":email\", email)\n\n return True\n except:\n print \"Unexpected error:\", sys.exc_info()[0]\n raise", "def contacts(self):\r\n return contacts.Contacts(self)", "def __init__(self, contacts_client):\n self.contacts_client = contacts_client" ]
[ "0.7523106", "0.71805346", "0.71805346", "0.6156637", "0.6142041", "0.5884162", "0.58729553", "0.58603793", "0.57227623", "0.5721069", "0.57132614", "0.5505021", "0.5488495", "0.54683405", "0.5432297", "0.5311218", "0.5282868", "0.528062", "0.5279917", "0.52583146", "0.5252208", "0.52108693", "0.5183219", "0.51700103", "0.5158638", "0.51283365", "0.51283365", "0.5091125", "0.5086827", "0.50619227" ]
0.78716165
0
Gets the certificate contacts for the key vault. Requires the certificates/managecontacts permission.
async def get_contacts(self, **kwargs) -> List[CertificateContact]: contacts = await self._client.get_certificate_contacts( vault_base_url=self._vault_url, **kwargs ) return [CertificateContact._from_certificate_contacts_item(contact_item=item) for item in contacts.contact_list]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def list_contacts(self):\n return self.contacts", "def get_contacts(self):\n\n\t\treturn self.__contacts", "def getcontacts():\n contacts = {}\n\n try:\n #get list of contact ids\n contactids = r.smembers(\"contacts\")\n\n #for each contact id get data\n for contactid in contactids:\n contacts.update(_getcontact(str(contactid)))\n return contacts\n except:\n print \"Unexpected error:\", sys.exc_info()[0]\n raise", "async def set_contacts(self, contacts: List[CertificateContact], **kwargs) -> List[CertificateContact]:\n new_contacts = await self._client.set_certificate_contacts(\n vault_base_url=self.vault_url,\n contacts=self._models.Contacts(contact_list=[c._to_certificate_contacts_item() for c in contacts]),\n **kwargs\n )\n return [\n CertificateContact._from_certificate_contacts_item(contact_item=item) for item in new_contacts.contact_list\n ]", "async def delete_contacts(self, **kwargs) -> List[CertificateContact]:\n contacts = await self._client.delete_certificate_contacts(\n vault_base_url=self.vault_url, **kwargs\n )\n return [CertificateContact._from_certificate_contacts_item(contact_item=item) for item in contacts.contact_list]", "def fetch_contacts(owner_account_id):\n resp = oauth.tapkey.get(f\"Owners/{owner_account_id}/Contacts?$select=id,identifier\")\n contacts = resp.json()\n return contacts", "def GetContactList(self):\n\t\tfeeds = []\n\t\tfeed = self.client.GetContacts()\n\t\tfeeds.append(feed)\n\t\tnext = feed.GetNextLink()\n\t\twhile next:\n\t\t\tfeed = self.client.GetContacts(uri=next.href)\n\t\t\tfeeds.append(feed)\n\t\t\tnext = feed.GetNextLink()\n\t\t\n\t\tcontacts = []\n\t\tfor feed in feeds:\n\t\t\tif not feed.entry:\n\t\t\t\tcontinue\n\t\t\telse:\n\t\t\t\tfor i, entry in enumerate(feed.entry):\n\t\t\t\t\tcontacts.append(entry)\n\t\treturn contacts", "def contacts(self):\n return ContactCollection(self.request)", "def get_contacts():\n return jsonify(g.driver.get_contacts())", "def contacts(self):\n from hubspot3.contacts import ContactsClient\n\n return ContactsClient(**self.auth, **self.options)", "def contacts(self):\n service_root = self._get_webservice_url(\"contacts\")\n return ContactsService(service_root, self.session, self.params)", "def getAccountContacts(self,accountId, startIndex = None, pageSize = None, sortBy = None, filter = None, responseFields = None):\r\n\r\n\t\turl = MozuUrl(\"/api/commerce/customer/accounts/{accountId}/contacts?startIndex={startIndex}&pageSize={pageSize}&sortBy={sortBy}&filter={filter}&responseFields={responseFields}\", \"GET\", UrlLocation.TenantPod, False);\r\n\t\turl.formatUrl(\"accountId\", accountId);\r\n\t\turl.formatUrl(\"filter\", filter);\r\n\t\turl.formatUrl(\"pageSize\", pageSize);\r\n\t\turl.formatUrl(\"responseFields\", responseFields);\r\n\t\turl.formatUrl(\"sortBy\", sortBy);\r\n\t\turl.formatUrl(\"startIndex\", startIndex);\r\n\t\tself.client.withResourceUrl(url).execute();\r\n\t\treturn self.client.result();", "def contacts(self):\r\n return contacts.Contacts(self)", "def get_contacts(self):\n contacts = Membership.objects.filter(entity = self, key_contact = True).order_by('importance_to_entity')\n return contacts", "def ListAllContacts(self):\n feed = self.gd_client.GetContacts()\n self.contacts = self.CleanPhoneNumbers(self.GetContactsInfo(feed))\n return self.contacts", "def get_all_contacts(self,\n hook,\n resource,\n data=None,\n headers=None,\n extra_options=None):\n all_pages = []\n total_contacts = -1\n next_token = None\n\n while len(all_pages) != total_contacts:\n if not next_token:\n result = hook.run('{}/contacts'.format(resource),\n data,\n headers,\n extra_options).json()\n else:\n result = hook.run('{}/contacts/{}'.format(resource, next_token),\n data,\n headers,\n extra_options).json()\n\n all_pages += result.get('contacts', None)\n\n total_contacts = result.get('total_contacts', None)\n\n if 'bookmark' in result:\n next_token = result.get('bookmark', None)\n\n return all_pages", "def get_contacts():\n # Parse command line options\n try:\n opts, args = getopt.getopt(sys.argv[1:], '', ['user=', 'password='])\n except getopt.error, msg:\n print 'python contacts_example.py --user [username] --password [password]'\n sys.exit(2)\n user = ''\n password = ''\n # Process options\n for option, arg in opts:\n if option == '--user':\n user = arg\n elif option == '--password':\n password = arg\n\n while not user:\n print 'NOTE: Please run these tests only with a test account.'\n user = raw_input('Please enter your username: ')\n while not password:\n password = getpass.getpass()\n if not password:\n print 'Password cannot be blank.'\n try:\n contacts = GoogleContacts(user, password)\n except gdata.client.BadAuthentication:\n print 'Invalid user credentials given.'\n exit(1)\n contacts_list = contacts.Run()\n return contacts_list", "def get_all(self):\n total_contacts = []\n get_count = {\n 'query': {\n 'object': 'CONTACT',\n 'select': {\n 'field': 'RECORDNO'\n },\n 'pagesize': '1'\n }\n }\n\n response = self.format_and_send_request(get_count)\n count = int(response['data']['@totalcount'])\n pagesize = 2000\n offset = 0\n for i in range(0, count, pagesize):\n data = {\n 'query': {\n 'object': 'CONTACT',\n 'select': {\n 'field': [\n 'RECORDNO',\n 'CONTACTNAME',\n 'COMPANYNAME',\n 'FIRSTNAME',\n 'LASTNAME',\n 'INITIAL',\n 'PRINTAS',\n 'TAXABLE',\n 'MAILADDRESS.ADDRESS1'\n ]\n },\n 'pagesize': pagesize,\n 'offset': offset\n }\n }\n contacts = self.format_and_send_request(data)['data']['CONTACT']\n total_contacts = total_contacts + contacts\n offset = offset + pagesize\n return total_contacts", "def contacts(self):\n if \"contacts\" in self._prop_dict:\n return ContactsCollectionPage(self._prop_dict[\"contacts\"])\n else:\n return None", "def contact_list(self):\n return self._contact_list", "def get_contacts(self, uuid=None, urn=None, group=None, deleted=None, before=None, after=None):\n params = self._build_params(uuid=uuid, urn=urn, group=group, deleted=deleted, before=before, after=after)\n return self._get_query('contacts', params, Contact)", "def get_contacts(self, uuids=None, urns=None, groups=None, before=None, after=None, deleted=None, pager=None):\n params = self._build_params(uuid=uuids, urns=urns, group_uuids=groups, before=before, after=after,\n deleted=deleted)\n return Contact.deserialize_list(self._get_multiple('contacts', params, pager))", "def get_all_contacts(self):\n self.init_db(self._testing)\n\n query = \"SELECT {} FROM {} ORDER BY id;\".format(\", \".join(Contact.columns_with_uid), Contact.table_name)\n\n data = self.db.conn.execute(query)\n\n return [Contact(*item) for item in data]", "def get_queryset(self):\n return self.request.user.contacts.all()", "def list_contact(self, key, value):\n self.db.list_contact(\n key,\n value,\n )", "def get_contacts_list(self):\n return [(id + 1, contact) for id, contact in enumerate(self.contact_list)]", "def getContactsData(service, groupResourceName, maxMembers):\n # get the ids of the contacts inside the specified group\n contactsIDs = service.contactGroups().get(\n resourceName=groupResourceName, \n maxMembers=maxMembers).execute()[\"memberResourceNames\"]\n\n # get data of the contacts that correspond to the ids obtained\n contactsData = service.people().getBatchGet(\n resourceNames=contactsIDs,\n personFields='names,emailAddresses').execute()[\"responses\"]\n\n # extract the names and the emailAddresses of the contacts\n namessList = [] \n mailsList = []\n for contact in contactsData:\n try:\n namessList.append(contact[\"person\"][\"names\"][0][\"displayName\"])\n except:\n raise Exception(\"All contacts must have a name associated\")\n mailsList.append(contact[\"person\"][\"emailAddresses\"][0][\"value\"])\n return namessList, mailsList", "def get_contacts_by_company(self, company_id):\n\n contacts = self._request('getContactsByCompany', {'company_id': company_id})\n for contact in contacts:\n yield contact", "def update_contacts(self):\n self.contacts = self.db.list_contacts()\n return self.list_contacts()", "def getallcontacts(self):\n feed_url = self.contacts_client.GetFeedUri(projection='full')\n total_read = 0\n while True:\n print('Retrieving contacts... (%d retrieved so far)' % total_read)\n feed = self.contacts_client.get_feed(uri=feed_url,\n auth_token=None,\n desired_class=gdata.contacts.data.ContactsFeed)\n total_read += len(feed.entry)\n for entry in feed.entry:\n yield entry\n next_link = feed.GetNextLink()\n if next_link is None:\n print('All contacts retrieved: %d total' % total_read)\n break\n feed_url = next_link.href" ]
[ "0.734212", "0.7277253", "0.7175655", "0.6969443", "0.6927339", "0.6741601", "0.669288", "0.6679771", "0.6647958", "0.66372335", "0.6624503", "0.6612645", "0.6528251", "0.64787", "0.6432151", "0.63611555", "0.6351894", "0.6325086", "0.62903816", "0.6270996", "0.62341815", "0.6046413", "0.6009918", "0.5997268", "0.59686524", "0.59275556", "0.5921013", "0.5877889", "0.58665556", "0.5846721" ]
0.83755594
0
Deletes the certificate contacts for the key vault. Requires the certificates/managecontacts permission.
async def delete_contacts(self, **kwargs) -> List[CertificateContact]: contacts = await self._client.delete_certificate_contacts( vault_base_url=self.vault_url, **kwargs ) return [CertificateContact._from_certificate_contacts_item(contact_item=item) for item in contacts.contact_list]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def delete_contacts(self):\n self.db.delete_all_contacts()\n return self.update_contacts()", "def delete_contacts(self, contacts):\n self._post('contact_actions', None, self._build_params(contacts=contacts, action='delete'))", "def delete(self):\n self.skype.conn(\"DELETE\", \"{0}/users/{1}/contacts/8:{2}\"\n .format(SkypeConnection.API_CONTACTS, self.skype.userId, self.id),\n auth=SkypeConnection.Auth.SkypeToken)\n self.skype.conn(\"DELETE\", \"{0}/users/ME/contacts/8:{1}\".format(self.skype.conn.msgsHost, self.id),\n auth=SkypeConnection.Auth.RegToken)", "async def set_contacts(self, contacts: List[CertificateContact], **kwargs) -> List[CertificateContact]:\n new_contacts = await self._client.set_certificate_contacts(\n vault_base_url=self.vault_url,\n contacts=self._models.Contacts(contact_list=[c._to_certificate_contacts_item() for c in contacts]),\n **kwargs\n )\n return [\n CertificateContact._from_certificate_contacts_item(contact_item=item) for item in new_contacts.contact_list\n ]", "def test_projects_id_contacts_delete(self):\n project = Contact()\n response = self.client.open('/project-tracker/projects/{id}/contacts'.format(id=56),\n method='DELETE',\n data=json.dumps(project),\n content_type='application/json')\n self.assert200(response, \"Response body is : \" + response.data.decode('utf-8'))", "def delcontact(id):\n delid = str(id)\n\n try:\n r.srem(\"contacts\", delid, 1)\n\n r.delete(\"uid:\" + delid + \":name\")\n r.delete(\"uid:\" + delid + \":address\")\n r.delete(\"uid:\" + delid + \":phone\")\n r.delete(\"uid:\" + delid + \":email\")\n\n return {}\n except:\n print \"Unexpected error:\", sys.exc_info()[0]\n raise", "def delete_contact(self, contact):\n self._delete('contacts', self._build_params(uuid=contact))", "def clearContactsFromPhone():\n\tprint \"Deleting any contacts from phone...\"\n\tcmd =r\"adb shell pm clear com.android.providers.contacts\"\n\tos.system(cmd)\n\tprint \"Finished deleting contacts from phone.\"", "def del_contact(contact):\n db = get_db()\n \n if contact.get_hash_name() in db:\n db.pop(contact.get_hash_name())\n write_db(db)\n sys.exit(logger.ok('success: contact ' + '\"%s\"' % contact.get_name() + ' deleted'))\n else:\n sys.exit(logger.fail('fatal: contact does not exist'))", "def RemoveAll(self):\n\t\tcontacts = self.GetContactList()\n\t\t\n\t\tfor contact in contacts:\n\t\t\tself.BatchEnqueue('delete', contact)\n\t\tself.ExecuteBatchQueue()", "async def delete_contact(dbcon: DBConnection, contact_id: int) -> None:\n if not await contact_exists(dbcon, contact_id):\n raise errors.InvalidArguments('contact does not exist')\n q = \"\"\"delete from contacts where id=%s\"\"\"\n await dbcon.operation(q, (contact_id,))", "def rm_contact_from_addressbook(database, name, surname, database_counter,\n database_ids):\n\n from addressbook.verify_contact import check_if_contact_exists\n\n if check_if_contact_exists(database, name, surname, database_counter,\n database_ids)[0] == 'Yes':\n print('The following contact will be removed:')\n id = check_if_contact_exists(database, name, surname, database_counter,\n database_ids)[1]\n print(str(id), '|', database[f'{id}']['first name'], '|',\n database[f'{id}']['last name'],\n '|', database[f'{id}']['address'], '|',\n database[f'{id}']['mobile phone'])\n del database[f'{id}']\n print('\\n')\n return id\n else:\n print('There is no such contact for deletion!')\n print('\\n')\n return 0", "def remove_contacts(self, contacts, group=None, group_uuid=None):\n payload = self._build_params(contacts=contacts, action='remove', group=group, group_uuid=group_uuid)\n self._post('contact_actions', None, payload)", "def delete_contact_from_personal_addressbook(self, contact_id, give_json=False):\n\n url = Constants.BASE_URL + 'users/addressbooks/personal'\n response = requests.delete(url=url, params={'key': self.user_access_token, 'contact_id': contact_id})\n\n if give_json:\n return response.json()\n else:\n return response.text", "def test_delete_contact_list(self):\n contact_list = ContactList.objects.first()\n\n url, parsed = self.prepare_urls('v1:contact_list-detail', subdomain=self.company.subdomain, kwargs={'pk':contact_list.id})\n \n response = self.client.delete(url, HTTP_HOST=parsed.netloc)\n self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)\n\n self.authenticate_user()\n response = self.client.delete(url, HTTP_HOST=parsed.netloc)\n self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)\n \n response = self.client.get(url, HTTP_HOST=parsed.netloc)\n self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)\n\n url, parsed = self.prepare_urls('v1:contact_list-list', subdomain=self.company.subdomain)\n response = self.client.get(url, HTTP_HOST=parsed.netloc)\n content = json.loads(response.content)\n self.assertEqual(self.contact_lists_count-1, len(content))", "def delete_contact(self):\n delete_first_name = input(\"Enter first name that you want to delete\\n\")\n for contact in self.contact_list:\n if contact.first_name == delete_first_name:\n #print(str(contact))\n self.contact_list.remove(contact)\n else:\n print(f\"No contact is present with first name {delete_first_name} \")", "def expire_contacts(self, contacts):\n self._post('contact_actions', None, self._build_params(contacts=contacts, action='expire'))", "def del_contact_all(self):\n\n send_key(KEY_MENU)\n delstr = contact.get_value('contact_delete')\n if search_text(delstr):\n click_textview_by_text(delstr)\n click_checkbox_by_id('select_all_check')\n click_button_by_id('btn_ok')\n click_button_by_index(1)\n else:\n goback()\n\n sleep(2) #take a rest to wait view ...", "async def get_contacts(self, **kwargs) -> List[CertificateContact]:\n contacts = await self._client.get_certificate_contacts(\n vault_base_url=self._vault_url, **kwargs\n )\n return [CertificateContact._from_certificate_contacts_item(contact_item=item) for item in contacts.contact_list]", "def deleteAccountContact(self,accountId, contactId):\r\n\r\n\t\turl = MozuUrl(\"/api/commerce/customer/accounts/{accountId}/contacts/{contactId}\", \"DELETE\", UrlLocation.TenantPod, False);\r\n\t\turl.formatUrl(\"accountId\", accountId);\r\n\t\turl.formatUrl(\"contactId\", contactId);\r\n\t\tself.client.withResourceUrl(url).execute();", "def unblock_contacts(self, contacts):\n self._post('contact_actions', None, self._build_params(contacts=contacts, action='unblock'))", "def delete(self, id):\n return Contacts().delete_one(id)", "def RemoveContact(self, contact):\n\t\tself.client.Delete(contact)", "async def delete_contact_from_active_monitor(dbcon: DBConnection, contact_id: int, monitor_id: int) -> None:\n q = \"\"\"delete from active_monitor_contacts where active_monitor_id=%s and contact_id=%s\"\"\"\n q_args = (monitor_id, contact_id)\n await dbcon.operation(q, q_args)", "def test_delete_contact(self):\n self.new_contact.save_contact()\n # new contact\n test_contact = Contact(\"Test\", \"user\", \"0745639300\", \"[email protected]\")\n # new contact saved\n test_contact.save_contact()\n # For deleting the new contact\n self.new_contact.delete_contact()\n self.assertEqual(len(Contact.contact_list), 1)", "def test_delete_contact(self):\n self.new_contact.save_contact()\n test_contact = Contact(\"Test\", \"User\", 254712345678, \"[email protected]\") # new contact\n test_contact.save_contact()\n self.new_contact.delete_contact() # delete a contact object\n self.assertEqual(len(Contact.contact_list), 1)", "async def delete_contact_from_contact_group(dbcon: DBConnection, contact_group_id: int, contact_id: int) -> None:\n q = \"\"\"delete from contact_group_contacts where contact_group_id=%s and contact_id=%s\"\"\"\n q_args = (contact_group_id, contact_id)\n await dbcon.operation(q, q_args)", "def delete(max_iterations):\n persons = get_persons()\n count = 0\n for person in persons:\n if count > max_iterations:\n return\n count += 1\n if choice([0, 1]):\n params = {\"event\": \"contact.delete\",\n \"data\": {\"id\": person['id']}}\n request(params)", "def removeContact(self, LibraryID, ListID, RecipientID, **kwargs):\n if not self.request(\"removeContact\",\n Product='TA',\n LibraryID=LibraryID,\n ListID=ListID,\n RecipientID=RecipientID,\n **kwargs):\n print(self.last_error_message)\n return None\n return self.json_response", "def contacts(self, contacts):\n\n self._contacts = contacts" ]
[ "0.7537571", "0.7379823", "0.63402975", "0.6199307", "0.615296", "0.6111035", "0.6089803", "0.60673594", "0.5951288", "0.5945933", "0.58693403", "0.58645344", "0.58433", "0.5814317", "0.5788029", "0.5750317", "0.5739216", "0.5732476", "0.5683771", "0.56610817", "0.5631036", "0.55701464", "0.5548464", "0.55424905", "0.552256", "0.54957217", "0.54680324", "0.54410386", "0.541677", "0.5409133" ]
0.8016096
0
Gets the creation operation of a certificate. Requires the certificates/get permission.
async def get_certificate_operation(self, certificate_name: str, **kwargs) -> CertificateOperation: bundle = await self._client.get_certificate_operation( vault_base_url=self.vault_url, certificate_name=certificate_name, **kwargs ) return CertificateOperation._from_certificate_operation_bundle(certificate_operation_bundle=bundle)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def CreateCertificate(self, request):\n try:\n params = request._serialize()\n headers = request.headers\n body = self.call(\"CreateCertificate\", params, headers=headers)\n response = json.loads(body)\n model = models.CreateCertificateResponse()\n model._deserialize(response[\"Response\"])\n return model\n except Exception as e:\n if isinstance(e, TencentCloudSDKException):\n raise\n else:\n raise TencentCloudSDKException(type(e).__name__, str(e))", "async def create_certificate(\n self, certificate_name: str, policy: CertificatePolicy, **kwargs\n ) -> Union[KeyVaultCertificate, CertificateOperation]:\n if not (policy.san_emails or policy.san_user_principal_names or policy.san_dns_names or policy.subject):\n raise ValueError(NO_SAN_OR_SUBJECT)\n\n polling_interval = kwargs.pop(\"_polling_interval\", None)\n if polling_interval is None:\n polling_interval = 5\n enabled = kwargs.pop(\"enabled\", None)\n\n if enabled is not None:\n attributes = self._models.CertificateAttributes(enabled=enabled)\n else:\n attributes = None\n\n parameters = self._models.CertificateCreateParameters(\n certificate_policy=policy._to_certificate_policy_bundle(),\n certificate_attributes=attributes,\n tags=kwargs.pop(\"tags\", None),\n )\n\n cert_bundle = await self._client.create_certificate(\n vault_base_url=self.vault_url,\n certificate_name=certificate_name,\n parameters=parameters,\n **kwargs\n )\n\n create_certificate_operation = CertificateOperation._from_certificate_operation_bundle(cert_bundle)\n\n command = partial(self.get_certificate_operation, certificate_name=certificate_name, **kwargs)\n\n get_certificate_command = partial(self.get_certificate, certificate_name=certificate_name, **kwargs)\n\n create_certificate_polling = CreateCertificatePollerAsync(\n get_certificate_command=get_certificate_command, interval=polling_interval\n )\n def no_op(*_, **__) -> Any: # The deserialization callback is ignored based on polling implementation\n pass\n return await async_poller(command, create_certificate_operation, no_op, create_certificate_polling)", "def get_certificate_command(client: KeyVaultClient, args: dict[str, Any]) -> CommandResults:\n vault_name = args.get('vault_name', '')\n certificate_name = args.get('certificate_name', '')\n certificate_version = args.get('certificate_version', '')\n response = client.get_certificate_request(\n vault_name, certificate_name, certificate_version)\n\n outputs = copy.deepcopy(response)\n outputs['attributes'] = convert_time_attributes_to_iso(outputs['attributes'])\n outputs['policy']['attributes'] = convert_time_attributes_to_iso(outputs['policy']['attributes'])\n\n readable_response = {'certificate_id': response.get(\n 'id'), **convert_attributes_to_readable(response.get('attributes', {}).copy())}\n outputs[VAULT_NAME_CONTEXT_FIELD] = vault_name\n\n readable_output = tableToMarkdown(f'{certificate_name} Information',\n readable_response,\n ['certificate_id', 'enabled', 'create_time', 'update_time', 'expiry_time'],\n removeNull=True,\n headerTransform=string_to_table_header)\n command_results = CommandResults(\n outputs_prefix='AzureKeyVault.Certificate',\n outputs_key_field='id',\n outputs=outputs,\n raw_response=response,\n readable_output=readable_output,\n ignore_auto_extract=True\n )\n\n return command_results", "def creation_data(self) -> 'outputs.CreationDataResponse':\n return pulumi.get(self, \"creation_data\")", "def certificate(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"certificate\")", "def certificate(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"certificate\")", "def certificate(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"certificate\")", "def get_operation(\n self,\n ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]:\n # Generate a \"stub function\" on-the-fly which will actually make\n # the request.\n # gRPC handles serialization and deserialization, so we just need\n # to pass in the functions for each.\n if \"get_operation\" not in self._stubs:\n self._stubs[\"get_operation\"] = self.grpc_channel.unary_unary(\n \"/google.longrunning.Operations/GetOperation\",\n request_serializer=operations_pb2.GetOperationRequest.SerializeToString,\n response_deserializer=operations_pb2.Operation.FromString,\n )\n return self._stubs[\"get_operation\"]", "def get_operation(\n self,\n ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]:\n # Generate a \"stub function\" on-the-fly which will actually make\n # the request.\n # gRPC handles serialization and deserialization, so we just need\n # to pass in the functions for each.\n if \"get_operation\" not in self._stubs:\n self._stubs[\"get_operation\"] = self.grpc_channel.unary_unary(\n \"/google.longrunning.Operations/GetOperation\",\n request_serializer=operations_pb2.GetOperationRequest.SerializeToString,\n response_deserializer=operations_pb2.Operation.FromString,\n )\n return self._stubs[\"get_operation\"]", "def get_operation(\n self,\n ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]:\n # Generate a \"stub function\" on-the-fly which will actually make\n # the request.\n # gRPC handles serialization and deserialization, so we just need\n # to pass in the functions for each.\n if \"get_operation\" not in self._stubs:\n self._stubs[\"get_operation\"] = self.grpc_channel.unary_unary(\n \"/google.longrunning.Operations/GetOperation\",\n request_serializer=operations_pb2.GetOperationRequest.SerializeToString,\n response_deserializer=operations_pb2.Operation.FromString,\n )\n return self._stubs[\"get_operation\"]", "def certificate(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"certificate\")", "def certificate(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"certificate\")", "def certificate(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"certificate\")", "def getCertificate(self, req):\n return dumpCert(createCert(parseCertReqStr(req), self._cert,\n self._key))", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n certificate: Optional[pulumi.Input[str]] = None,\n certificate_id: Optional[pulumi.Input[str]] = None,\n certificate_name: Optional[pulumi.Input[str]] = None,\n domain: Optional[pulumi.Input[str]] = None,\n instance_id: Optional[pulumi.Input[str]] = None,\n private_key: Optional[pulumi.Input[str]] = None) -> 'Certificate':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _CertificateState.__new__(_CertificateState)\n\n __props__.__dict__[\"certificate\"] = certificate\n __props__.__dict__[\"certificate_id\"] = certificate_id\n __props__.__dict__[\"certificate_name\"] = certificate_name\n __props__.__dict__[\"domain\"] = domain\n __props__.__dict__[\"instance_id\"] = instance_id\n __props__.__dict__[\"private_key\"] = private_key\n return Certificate(resource_name, opts=opts, __props__=__props__)", "def get_server_certificate(latest=None,name=None,name_prefix=None,path_prefix=None,opts=None):\n __args__ = dict()\n\n __args__['latest'] = latest\n __args__['name'] = name\n __args__['namePrefix'] = name_prefix\n __args__['pathPrefix'] = path_prefix\n if opts is None:\n opts = pulumi.InvokeOptions()\n if opts.version is None:\n opts.version = utilities.get_version()\n __ret__ = pulumi.runtime.invoke('aws:iam/getServerCertificate:getServerCertificate', __args__, opts=opts).value\n\n return AwaitableGetServerCertificateResult(\n arn=__ret__.get('arn'),\n certificate_body=__ret__.get('certificateBody'),\n certificate_chain=__ret__.get('certificateChain'),\n expiration_date=__ret__.get('expirationDate'),\n latest=__ret__.get('latest'),\n name=__ret__.get('name'),\n name_prefix=__ret__.get('namePrefix'),\n path=__ret__.get('path'),\n path_prefix=__ret__.get('pathPrefix'),\n upload_date=__ret__.get('uploadDate'),\n id=__ret__.get('id'))", "def GetOperation(name):\n client = GetClientInstance()\n messages = client.MESSAGES_MODULE\n request = messages.ApikeysOperationsGetRequest(name=name)\n try:\n return client.operations.Get(request)\n except (apitools_exceptions.HttpForbiddenError,\n apitools_exceptions.HttpNotFoundError) as e:\n exceptions.ReraiseError(e, exceptions.OperationErrorException)", "def pickup_certificate(self):\n return self.__query(\"certificatePickup\", data)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n certificate: Optional[pulumi.Input[str]] = None,\n certificate_id: Optional[pulumi.Input[int]] = None,\n creation_timestamp: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n expire_time: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n name_prefix: Optional[pulumi.Input[str]] = None,\n private_key: Optional[pulumi.Input[str]] = None,\n project: Optional[pulumi.Input[str]] = None,\n self_link: Optional[pulumi.Input[str]] = None) -> 'SSLCertificate':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _SSLCertificateState.__new__(_SSLCertificateState)\n\n __props__.__dict__[\"certificate\"] = certificate\n __props__.__dict__[\"certificate_id\"] = certificate_id\n __props__.__dict__[\"creation_timestamp\"] = creation_timestamp\n __props__.__dict__[\"description\"] = description\n __props__.__dict__[\"expire_time\"] = expire_time\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"name_prefix\"] = name_prefix\n __props__.__dict__[\"private_key\"] = private_key\n __props__.__dict__[\"project\"] = project\n __props__.__dict__[\"self_link\"] = self_link\n return SSLCertificate(resource_name, opts=opts, __props__=__props__)", "def get(resource_name, id, opts=None, arn=None, certificate=None, certificate_authority_configuration=None, certificate_chain=None, certificate_signing_request=None, enabled=None, not_after=None, not_before=None, permanent_deletion_time_in_days=None, revocation_configuration=None, serial=None, status=None, tags=None, type=None):\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = dict()\n __props__[\"arn\"] = arn\n __props__[\"certificate\"] = certificate\n __props__[\"certificate_authority_configuration\"] = certificate_authority_configuration\n __props__[\"certificate_chain\"] = certificate_chain\n __props__[\"certificate_signing_request\"] = certificate_signing_request\n __props__[\"enabled\"] = enabled\n __props__[\"not_after\"] = not_after\n __props__[\"not_before\"] = not_before\n __props__[\"permanent_deletion_time_in_days\"] = permanent_deletion_time_in_days\n __props__[\"revocation_configuration\"] = revocation_configuration\n __props__[\"serial\"] = serial\n __props__[\"status\"] = status\n __props__[\"tags\"] = tags\n __props__[\"type\"] = type\n return CertificateAuthority(resource_name, opts=opts, __props__=__props__)", "def certificate(self) -> str:\n return pulumi.get(self, \"certificate\")", "def certificate(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"certificate\")", "def certificate(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"certificate\")", "def certificate(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"certificate\")", "def certificate(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"certificate\")", "def certificate(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"certificate\")", "def certificate(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"certificate\")", "def get_certificate_policy_command(client: KeyVaultClient, args: dict[str, Any]) -> CommandResults:\n vault_name = args['vault_name']\n certificate_name = args['certificate_name']\n response = client.get_certificate_policy_request(\n vault_name, certificate_name)\n outputs = copy.deepcopy(response)\n outputs['attributes'] = convert_time_attributes_to_iso(outputs['attributes'])\n outputs['CertificateName'] = certificate_name\n\n readable_output = tableToMarkdown(f'{certificate_name} Policy Information',\n outputs,\n ['id', 'key_props', 'secret_props',\n 'x509_props', 'issuer', 'attributes'],\n removeNull=True, headerTransform=string_to_table_header)\n command_results = CommandResults(\n outputs_prefix='AzureKeyVault.CertificatePolicy',\n outputs_key_field='id',\n outputs=outputs,\n raw_response=response,\n readable_output=readable_output\n )\n\n return command_results", "def create(self, request, action, *args, **kwargs):\n\t\tallowed_actions = ['get_token']\n\t\tif action in allowed_actions:\n\t\t\treturn getattr(self, '_create_' + action)(request, *args, **kwargs)\n\t\treturn rc(rcs.BAD_REQUEST)", "def create_ops(self):\n return self._create_ops" ]
[ "0.5323745", "0.5290432", "0.5155442", "0.5148315", "0.5133307", "0.5133307", "0.5133307", "0.5118752", "0.5118752", "0.5118752", "0.51068866", "0.51068866", "0.5080824", "0.50439566", "0.50420284", "0.49880618", "0.49764156", "0.4970792", "0.4961988", "0.49596292", "0.49279034", "0.492387", "0.492387", "0.492387", "0.492387", "0.492387", "0.492387", "0.49055514", "0.48833454", "0.48737" ]
0.60303086
0
Cancels an inprogress certificate operation. Requires the certificates/update permission.
async def cancel_certificate_operation(self, certificate_name: str, **kwargs) -> CertificateOperation: bundle = await self._client.update_certificate_operation( vault_base_url=self.vault_url, certificate_name=certificate_name, certificate_operation=self._models.CertificateOperationUpdateParameter(cancellation_requested=True), **kwargs ) return CertificateOperation._from_certificate_operation_bundle(certificate_operation_bundle=bundle)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _cancel_pending_change(self, cert_obj):\n try:\n change_url = (\n cert_obj.cert_details[\"Akamai\"]\n [\"extra_info\"][\"change_url\"]\n )\n except KeyError:\n status = \"Maybe the domain {} already removed? \" \\\n \"Delete domain operation failed\"\\\n .format(cert_obj.domain_name)\n LOG.info(status)\n\n return self.responder.ssl_certificate_deleted(\n cert_obj.domain_name,\n {\n 'status': status,\n }\n )\n\n headers = {\n 'Accept': 'application/vnd.akamai.cps.change-id.v1+json'\n }\n cps_cancel_url = self.driver.akamai_conf.policy_api_base_url + \\\n change_url[1:]\n cancel_cps = self.cps_api_client.delete(cps_cancel_url,\n headers=headers)\n if cancel_cps.ok:\n status = \"Successfully cancelled the CPS change {0}. \" \\\n .format(change_url)\n LOG.info(status)\n return self.responder.ssl_certificate_deleted(\n cert_obj.domain_name,\n {\n 'status': status\n }\n )\n else:\n status = \"CPS request failed to cancel the CPS change {0}.\" \\\n \"Delete domain operation will be attempted \" \\\n \"again through retry logic\".format(change_url)\n LOG.info(status)\n return self.responder.failed(status)", "def cancel(bot, update):\n bot.sendMessage(chat_id=update.message.chat_id, text=\"As you wish, the operation has been cancelled! 😊\")\n return ConversationHandler.END", "def do_cancel(self):\r\n self.write({'cancelled': True})", "def cancel(self):\n self.succeeded = False\n self.reject()", "def cancel(self):\n self.succeeded = False\n self.reject()", "def cancel(self):\n self.succeeded = False\n self.reject()", "def cancel(self):\n self.succeeded = False\n self.reject()", "def revoke_certificate(self):\n return self.__query(\"certificateRevoke\", kwargs)", "def on_cancel(self):\n self.state = CANCELED\n self._reject()", "async def cancel(self, ctx):\n\n return", "def action_cancel(self):\n self.state = 'canceled'", "def cancel(self):\n self._log.debug(\"About to cancel job {0}\".format(self.id))\n resp = self._api.cancel(self.id)\n\n if resp.success:\n self.update()\n return True\n\n if resp.result.type is None:\n # Call was successful but job was unable to be cancelled.\n return False\n\n else:\n raise resp.result", "def cancel(self):\n self.__canceled = True", "def cancel():", "def cancel(self):\n return self.RES_OK", "def cancel(self):\n self.cancelled = True", "def cancel(self):\n self.cancelled = True", "def cancel(self):\r\n self.require_item()\r\n\r\n url = '{0}/cancel'.format(self.get_url())\r\n request = http.Request('PUT', url)\r\n request.use_xml = False\r\n\r\n return request, parsers.parse_empty", "def cancelJob(_id):\n job = mongo.db.jobs.find_one({'_id': _id})\n tasks = mongo.db.tasks.find({'job': _id})\n for each in tasks:\n _t = ca.AsyncResult(each.get('ctid'))\n _t.revoke()\n job['status'] = 'cancelled'\n \"\"\"Set status of job to cancelled\"\"\"\n mongo.db.jobs.update({'_id': _id}, job)\n \"\"\"Bulk update tasks\"\"\"\n bulk = mongo.db.tasks.initialize_unordered_bulk_op()\n bulk.find({'job': _id, 'status': {'$ne': 'completed'}}).update({\n '$set': {\n 'status': \"cancelled\",\n 'cancelled_on': now(),\n 'slave': None,\n }})\n bulk.execute()\n\n return {'info': 'success'}", "def cancel_retry(self):\n if self._cancel_retry is not None:\n self._cancel_retry.cancel()\n self._cancel_retry = None", "def cancel_retry(self):\n if self._cancel_retry is not None:\n self._cancel_retry.cancel()\n self._cancel_retry = None", "def ctxAbort(*args, **kwargs)->None:\n pass", "def RevokeCertificates(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def cancel(update: Update, context: CallbackContext) -> int:\n update.message.reply_text(\n 'Bye! I hope we can talk again some day.', reply_markup=ReplyKeyboardRemove()\n )\n\n logger.info(\"User [%s] canceled BBT conversation using command [/stop].\",\n update.message.from_user.first_name)\n return ConversationHandler.END", "def cancel(self):\n import googleapiclient\n\n # projects.locations.operations/cancel\n operations = self._api.projects().locations().operations()\n\n for job in self.active_jobs:\n request = operations.cancel(name=job.jobname)\n logger.debug(\"Cancelling operation {}\".format(job.jobid))\n try:\n self._retry_request(request)\n except (Exception, BaseException, googleapiclient.errors.HttpError):\n continue\n\n self.shutdown()", "def tpc_abort(self, transaction):\n raise NotImplementedError", "def fusion_api_revoke_certificate(self, name=None, api=None, headers=None):\n return self.ca.revoke(name=name, api=api, headers=headers)", "def cancel(update: Update, context: CallbackContext) -> int:\n context.user_data.clear()\n update.message.reply_text(\"취소 되었습니다.\")\n return ConversationHandler.END", "def cancel(self):\n pass", "def cancel(self):" ]
[ "0.67305464", "0.60831314", "0.6037295", "0.6024934", "0.6024934", "0.6024934", "0.6024934", "0.5966845", "0.5931395", "0.57804435", "0.57093364", "0.5697872", "0.5600302", "0.55759084", "0.55702156", "0.5547477", "0.5547477", "0.55273867", "0.5523492", "0.5518105", "0.5518105", "0.54938006", "0.5491352", "0.548555", "0.54771507", "0.54697645", "0.54610527", "0.5443541", "0.5438673", "0.5421023" ]
0.6692325
1
Merges a certificate or a certificate chain with a key pair existing on the server. Requires the certificates/create permission. Performs the merging of a certificate or certificate chain with a key pair currently available in the service. Make sure when creating the certificate to merge using
async def merge_certificate( self, certificate_name: str, x509_certificates: List[bytes], **kwargs ) -> KeyVaultCertificate: enabled = kwargs.pop("enabled", None) if enabled is not None: attributes = self._models.CertificateAttributes(enabled=enabled) else: attributes = None parameters = self._models.CertificateMergeParameters( x509_certificates=x509_certificates, certificate_attributes=attributes, tags=kwargs.pop("tags", None) ) bundle = await self._client.merge_certificate( vault_base_url=self.vault_url, certificate_name=certificate_name, parameters=parameters, **kwargs ) return KeyVaultCertificate._from_certificate_bundle(certificate_bundle=bundle)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _create_certificate_chain():\n caext = X509Extension(b\"basicConstraints\", False, b\"CA:true\")\n not_after_date = datetime.date.today() + datetime.timedelta(days=365)\n not_after = not_after_date.strftime(\"%Y%m%d%H%M%SZ\").encode(\"ascii\")\n\n # Step 1\n cakey = PKey()\n cakey.generate_key(TYPE_RSA, 2048)\n cacert = X509()\n cacert.set_version(2)\n cacert.get_subject().commonName = \"Authority Certificate\"\n cacert.set_issuer(cacert.get_subject())\n cacert.set_pubkey(cakey)\n cacert.set_notBefore(b\"20000101000000Z\")\n cacert.set_notAfter(not_after)\n cacert.add_extensions([caext])\n cacert.set_serial_number(0)\n cacert.sign(cakey, \"sha256\")\n\n # Step 2\n ikey = PKey()\n ikey.generate_key(TYPE_RSA, 2048)\n icert = X509()\n icert.set_version(2)\n icert.get_subject().commonName = \"Intermediate Certificate\"\n icert.set_issuer(cacert.get_subject())\n icert.set_pubkey(ikey)\n icert.set_notBefore(b\"20000101000000Z\")\n icert.set_notAfter(not_after)\n icert.add_extensions([caext])\n icert.set_serial_number(0)\n icert.sign(cakey, \"sha256\")\n\n # Step 3\n skey = PKey()\n skey.generate_key(TYPE_RSA, 2048)\n scert = X509()\n scert.set_version(2)\n scert.get_subject().commonName = \"Server Certificate\"\n scert.set_issuer(icert.get_subject())\n scert.set_pubkey(skey)\n scert.set_notBefore(b\"20000101000000Z\")\n scert.set_notAfter(not_after)\n scert.add_extensions(\n [X509Extension(b\"basicConstraints\", True, b\"CA:false\")]\n )\n scert.set_serial_number(0)\n scert.sign(ikey, \"sha256\")\n\n return [(cakey, cacert), (ikey, icert), (skey, scert)]", "def _store_certificate(fullchain, key, domain=None, tag_prefix=None,\n region_name=None, acm_client=None, dry_run=False):\n #pylint:disable=unused-argument\n result = _check_certificate(fullchain, key, domain=domain)\n if not domain:\n domain = result['ssl_certificate']['common_name']\n cert, chain = _split_fullchain(fullchain)\n if not acm_client:\n acm_client = boto3.client('acm', region_name=region_name)\n kwargs = {}\n resp = acm_client.list_certificates()\n for acm_cert in resp['CertificateSummaryList']:\n if acm_cert['DomainName'] == domain:\n LOGGER.info(\"A certificate for domain %s has already been\"\\\n \" imported as %s - replacing\",\n domain, acm_cert['CertificateArn'])\n kwargs['CertificateArn'] = acm_cert['CertificateArn']\n break\n if not dry_run:\n resp = acm_client.import_certificate(\n Certificate=cert.encode('ascii'),\n PrivateKey=key.encode('ascii'),\n CertificateChain=chain.encode('ascii'),\n **kwargs)\n LOGGER.info(\"%s (re-)imported TLS certificate %s as %s\",\n tag_prefix, result['ssl_certificate'], resp['CertificateArn'])\n result.update({'CertificateArn': resp['CertificateArn']})\n return result", "def add_cert_and_key(priv_key, cert_list, alias):\n logger.info(\"Writing certificate and private key to filesystem\")\n\n # Determine which directory to store certs in\n if command_exists(\"update-ca-trust\"):\n ca_dir = \"/etc/pki/tls\"\n elif command_exists(\"update-ca-certificates\"):\n ca_dir = \"/etc/ssl\"\n else:\n logger.error(\"Cannot determine certs directory\")\n raise OSError(\n \"OS is missing a required command for CA trust. Either update-ca-trust or \"\n \"update-ca-certificates is required.\"\n )\n\n logger.info(\"Using cert directory:\" + ca_dir)\n\n with open(ca_dir + \"/private/\" + alias + \".key\", \"a\") as f:\n f.write(str(priv_key))\n\n for cert in cert_list:\n with open(ca_dir + \"/certs/\" + alias + \".crt\", \"a\") as f:\n f.write(cert)", "def create_server_certs():\n global server_key_files, server_keystore, config\n\n same_enc_sign_cert = config[\"config\"][\"same_enc_sign_cert\"]\n if not Path(server_key_files[\"key\"]).is_file() or not Path(server_key_files[\"crt\"]).is_file():\n print(\"create new encryption cert\\n\")\n create_server_certs_enc()\n for f_item in [\"key\", \"crt\"]:\n with open(server_key_files[f_item], \"w\") as f:\n f.write(server_keystore[f_item])\n f.close()\n else:\n for f_item in [\"key\", \"crt\"]:\n with open(server_key_files[f_item], \"r\") as f:\n server_keystore[f_item] = f.read()\n f.close()\n\n server_keystore[\"key-sign\"] = server_keystore[\"key\"]\n server_keystore[\"crt-sign\"] = server_keystore[\"crt\"]\n\n if not Path(server_key_files[\"key-sign\"]).is_file() or not Path(server_key_files[\"crt-sign\"]).is_file():\n print(\"create new signing cert\\n\")\n if not same_enc_sign_cert:\n create_server_certs_sign()\n for f_item in [\"key-sign\", \"crt-sign\"]:\n with open(server_key_files[f_item], \"w\") as f:\n f.write(server_keystore[f_item])\n f.close()\n else:\n for f_item in [\"key-sign\", \"crt-sign\"]:\n with open(server_key_files[f_item], \"r\") as f:\n server_keystore[f_item] = f.read()\n f.close()", "def put_certificate(self, target, who, args, _files, _user_path):\n name = self.arg_get(args, 'name', str)\n if not commonl.verify_str_safe(name, do_raise = False):\n raise ValueError(\n f\"{name}: invalid certificate name, only [-_a-zA-Z0-9] allowed\")\n\n with target.target_owned_and_locked(who):\n target.timestamp()\n\n cert_path = os.path.join(target.state_dir, \"certificates\")\n cert_client_path = os.path.join(target.state_dir, \"certificates_client\")\n self._setup_maybe(target, cert_path, cert_client_path)\n\n client_key_path = os.path.join(cert_client_path, name + \".key\")\n client_req_path = os.path.join(cert_client_path, name + \".req\")\n client_cert_path = os.path.join(cert_client_path, name + \".cert\")\n\n if os.path.isfile(client_key_path) \\\n and os.path.isfile(client_cert_path):\t# already made?\n with open(client_key_path) as keyf, \\\n open(client_cert_path) as certf:\n return dict({\n \"name\": name,\n \"created\": False,\n \"key\": keyf.read(),\n \"cert\": certf.read(),\n })\n\n try:\n subprocess.run(\n f\"openssl genrsa -out {client_key_path} {self.key_size}\".split(),\n stdin = None, timeout = 5,\n capture_output = True, cwd = cert_path, check = True)\n allocid = target.fsdb.get(\"_alloc.id\", \"UNKNOWN\")\n subprocess.run(\n f\"openssl req -new -key {client_key_path} -out {client_req_path}\"\n f\" -subj /C=LC/ST=Local/L=Local/O=TCF-Signing-Authority-{target.id}-{allocid}/CN=TCF-{name}\".split(),\n check = True, cwd = cert_path,\n stdout = subprocess.PIPE, stderr = subprocess.STDOUT)\n target.log.debug(f\"{name}: created client's certificate\")\n\n # Issue the client certificate using the cert request and the CA cert/key.\n # note we run in the cert_path directory, so the ca.*\n # files are there\n subprocess.run(\n f\"openssl x509 -req -in {client_req_path} -CA ca.cert\"\n \" -CAkey ca.key -set_serial 101 -extensions client\"\n f\" -days 365 -outform PEM -out {client_cert_path}\".split(),\n stdin = None, timeout = 5,\n capture_output = True, cwd = cert_path, check = True)\n except subprocess.CalledProcessError as e:\n target.log.error(f\"command {' '.join(e.cmd)} failed: {e.output}\")\n self._client_wipe(name, cert_client_path)\t# don't leave things half there\n raise\n\n with open(client_key_path) as keyf, \\\n open(client_cert_path) as certf:\n return dict({\n \"name\": name,\n \"created\": True,\n \"key\": keyf.read(),\n \"cert\": certf.read(),\n })", "def upload(name, certfile, keyfile, chainfile):\n try:\n cmd = client().certificates.upload\n job, data = cmd(name, certfile, keyfile, chainfile)\n handle_job(job)\n except Exception as e:\n raise CLIException(str(e))", "def new_cert(self, commonname, extensions=None):\n\n serial = self._get_serial()\n pkey = self._create_pkey(commonname, serial)\n self._create_cert(pkey, commonname, serial, extensions)", "def update_cert(c, stack_name, domain_name, profile, create=False):\n action = 'create' if create else 'update'\n\n with chdir(WORKING_DIR):\n aws('cloudformation', f'{action}-stack',\n '--stack-name', f'{stack_name}-cert',\n '--template-body', f'file://cert.yaml',\n '--parameters',\n f'ParameterKey=DomainName,ParameterValue={domain_name}',\n f'--profile', f'{profile}')\n # Cert also needs adding to us-east-1 to be used by CloudFront\n aws('cloudformation', f'{action}-stack',\n '--stack-name', f'{stack_name}-cert',\n '--template-body', f'file://cert.yaml',\n '--parameters',\n f'ParameterKey=DomainName,ParameterValue={domain_name}',\n f'--profile', f'{profile}',\n '--region', 'us-east-1')", "def push_ssl_crt():\n logger.info(u\"Pushing SSl Certificates\")\n key = '%(config_folder)s/%(ssl_key)s' % env\n crt = '%(config_folder)s/%(ssl_crt)s' % env\n bundle = '%(config_folder)s/rapidssl_ca_bundle.pem' % env\n logger.info(u\"Using SSL keys and certs at %s and %s\" % (key, crt))\n\n # Putting to /tmp and moving for permission purposes\n put(key, '/tmp/_.policystat.com.key')\n sudo('mv /tmp/_.policystat.com.key /etc/ssl/private/_.policystat.com.key')\n sudo('chmod 640 /etc/ssl/private/_.policystat.com.key')\n sudo('chown root:ssl-cert /etc/ssl/private/_.policystat.com.key')\n\n put(crt, '/tmp/_.policystat.com.crt')\n put(bundle, '/tmp/rapidssl_ca_bundle.pem')\n # Combine the crt with the rapidssl intermediate bundle\n sudo('cat /tmp/_.policystat.com.crt /tmp/rapidssl_ca_bundle.pem > \\\n /tmp/_.policystat.com.crt.bundled')\n sudo(\n 'mv /tmp/_.policystat.com.crt.bundled '\n '/etc/ssl/certs/_.policystat.com.crt'\n )\n sudo('chmod 777 /etc/ssl/certs/_.policystat.com.crt')", "def cmd_cert_clone(hostname, port, keyfile, certfile, copy_extensions, expired, verbose):\n\n context = ssl.create_default_context()\n\n with socket.create_connection((hostname, port), timeout=3) as sock:\n with context.wrap_socket(sock, server_hostname=hostname) as ssock:\n original = ssock.getpeercert(binary_form=True)\n\n key, cert = certclone(original, copy_extensions=copy_extensions, expired=expired)\n\n keyfile.write(key)\n certfile.write(cert)", "def create_cert(self, cert_file, key_file):\n if os.path.isfile(cert_file) and os.path.isfile(key_file):\n return cert_file, key_file\n\n k = crypto.PKey()\n k.generate_key(crypto.TYPE_RSA, 2048)\n cert = crypto.X509()\n cert.get_subject().C = \"US\"\n cert.get_subject().ST = \"CO\"\n cert.get_subject().L = \"Denver\"\n cert.get_subject().CN = gethostname()\n cert.get_subject().O = \"Metropolitan State University of Denver\"\n cert.get_subject().OU = \"Computer Science\"\n cert.set_serial_number(6)\n cert.gmtime_adj_notBefore(0)\n cert.gmtime_adj_notAfter(365*24*60*60)\n cert.set_issuer(cert.get_subject())\n cert.set_pubkey(k)\n cert.sign(k, 'sha1')\n\n open(join(cert_file), 'w').write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert))\n open(join(key_file), \"w\").write(crypto.dump_privatekey(crypto.FILETYPE_PEM, k))\n return cert_file, key_file", "def sign(self, keypair, certs, crls = None, no_certs = False):\n\n if isinstance(certs, X509):\n cert = certs\n certs = ()\n else:\n cert = certs[0]\n certs = certs[1:]\n\n if crls is None:\n crls = ()\n elif isinstance(crls, CRL):\n crls = (crls,)\n\n if self.debug_cms_certs:\n logger.debug(\"Signing with cert issuer %s subject %s SKI %s\",\n cert.getIssuer(), cert.getSubject(), cert.hSKI())\n for i, c in enumerate(certs):\n logger.debug(\"Additional cert %d issuer %s subject %s SKI %s\",\n i, c.getIssuer(), c.getSubject(), c.hSKI())\n\n self._sign(cert.get_POW(),\n keypair.get_POW(),\n [x.get_POW() for x in certs],\n [c.get_POW() for c in crls],\n rpki.POW.CMS_NOCERTS if no_certs else 0)", "def load_cert_chain(self, certfile, keyfile: Optional[Any] = ...):\n ...", "def _generate_certificates(certfile_path: str, keyfile_path: str,\n common_name: str) -> None:\n ca_key = OpenSSL.crypto.load_privatekey(OpenSSL.crypto.FILETYPE_PEM,\n _CA_KEY)\n ca_cert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM,\n _CA_CERT)\n\n k = OpenSSL.crypto.PKey()\n k.generate_key(OpenSSL.crypto.TYPE_RSA, 4096)\n\n cert = OpenSSL.crypto.X509()\n cert.get_subject().C = 'US'\n cert.get_subject().CN = common_name\n cert.set_serial_number(random.randint(0, 2**64))\n cert.gmtime_adj_notBefore(0)\n cert.gmtime_adj_notAfter(10 * 365 * 24 * 60 * 60)\n cert.set_issuer(ca_cert.get_subject())\n cert.set_pubkey(k)\n cert.sign(ca_key, 'sha512')\n with open(certfile_path, \"w\") as f:\n f.write(\n OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM,\n cert).decode(\"utf-8\"))\n f.write(_CA_CERT)\n with open(keyfile_path, \"w\") as f:\n f.write(\n OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM,\n k).decode(\"utf-8\"))", "def synchronize_ca(fatal=False):\n paths_to_sync = []\n peer_service_actions = {'restart': []}\n peer_actions = []\n\n if bool_from_string(config('https-service-endpoints')):\n log(\"Syncing all endpoint certs since https-service-endpoints=True\",\n level=DEBUG)\n paths_to_sync.append(SSL_DIR)\n paths_to_sync.append(CA_CERT_PATH)\n # We need to restart peer apache services to ensure they have picked up\n # new ssl keys.\n peer_service_actions['restart'].append('apache2')\n peer_actions.append('update-ca-certificates')\n\n if bool_from_string(config('use-https')):\n log(\"Syncing keystone-endpoint certs since use-https=True\",\n level=DEBUG)\n paths_to_sync.append(SSL_DIR)\n paths_to_sync.append(APACHE_SSL_DIR)\n paths_to_sync.append(CA_CERT_PATH)\n # We need to restart peer apache services to ensure they have picked up\n # new ssl keys.\n peer_service_actions['restart'].append('apache2')\n peer_actions.append('update-ca-certificates')\n\n # NOTE: certs needed for token signing e.g. pki and revocation list query.\n log(\"Syncing token certs\", level=DEBUG)\n paths_to_sync.append(PKI_CERTS_DIR)\n peer_actions.append('ensure-pki-permissions')\n\n if not paths_to_sync:\n log(\"Nothing to sync - skipping\", level=DEBUG)\n return {}\n\n if not os.path.isdir(SYNC_FLAGS_DIR):\n mkdir(SYNC_FLAGS_DIR, SSH_USER, 'keystone', 0o775)\n\n for action, services in peer_service_actions.iteritems():\n create_peer_service_actions(action, set(services))\n\n create_peer_actions(peer_actions)\n\n paths_to_sync = list(set(paths_to_sync))\n stage_paths_for_sync(paths_to_sync)\n\n hash1 = hashlib.sha256()\n for path in paths_to_sync:\n update_hash_from_path(hash1, path)\n\n cluster_rel_settings = {'ssl-cert-available-updates': SSL_SYNC_ARCHIVE,\n 'sync-hash': hash1.hexdigest()}\n\n synced_units = unison_sync([SSL_SYNC_ARCHIVE, SYNC_FLAGS_DIR])\n if synced_units:\n # Format here needs to match that used when peers request sync\n synced_units = [u.replace('/', '-') for u in synced_units]\n cluster_rel_settings['ssl-synced-units'] = \\\n json.dumps(synced_units)\n\n trigger = str(uuid.uuid4())\n log(\"Sending restart-services-trigger=%s to all peers\" % (trigger),\n level=DEBUG)\n cluster_rel_settings['restart-services-trigger'] = trigger\n\n log(\"Sync complete\", level=DEBUG)\n return cluster_rel_settings", "def Run(self, args):\n holder = base_classes.ComputeApiHolder(self.ReleaseTrack())\n client = holder.client\n\n ssl_certificate_ref = self.SSL_CERTIFICATE_ARG.ResolveAsResource(\n args, holder.resources, default_scope=compute_scope.ScopeEnum.GLOBAL)\n\n certificate = files.ReadFileContents(args.certificate)\n private_key = files.ReadFileContents(args.private_key)\n\n if ssl_certificates_utils.IsRegionalSslCertificatesRef(ssl_certificate_ref):\n request = client.messages.ComputeRegionSslCertificatesInsertRequest(\n sslCertificate=client.messages.SslCertificate(\n name=ssl_certificate_ref.Name(),\n certificate=certificate,\n privateKey=private_key,\n description=args.description),\n region=ssl_certificate_ref.region,\n project=ssl_certificate_ref.project)\n collection = client.apitools_client.regionSslCertificates\n else:\n request = client.messages.ComputeSslCertificatesInsertRequest(\n sslCertificate=client.messages.SslCertificate(\n name=ssl_certificate_ref.Name(),\n certificate=certificate,\n privateKey=private_key,\n description=args.description),\n project=ssl_certificate_ref.project)\n collection = client.apitools_client.sslCertificates\n\n return client.MakeRequests([(collection, 'Insert', request)])", "def create_certs(application_name, ip, issuer_name, signing_key):\n logging.info(\"Creating cert for {}\".format(application_name))\n # The IP is used as the CN for backward compatability and as an\n # alternative_name for forward comapability.\n (key, cert) = zaza.openstack.utilities.cert.generate_cert(\n ip,\n issuer_name=ISSUER_NAME,\n alternative_names=[ip],\n signing_key=signing_key)\n APP_CERT_DIR = os.path.join(CERT_DIR, application_name)\n if not os.path.exists(APP_CERT_DIR):\n os.makedirs(APP_CERT_DIR)\n write_cert(APP_CERT_DIR, 'cert.pem', cert)\n write_cert(APP_CERT_DIR, 'cert.key', key)", "def create_or_update(\n self, resource_group_name, provisioning_service_name, certificate_name, if_match=None, certificate=None, custom_headers=None, raw=False, **operation_config):\n certificate_description = models.CertificateBodyDescription(certificate=certificate)\n\n # Construct URL\n url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/provisioningServices/{provisioningServiceName}/certificates/{certificateName}'\n path_format_arguments = {\n 'subscriptionId': self._serialize.url(\"self.config.subscription_id\", self.config.subscription_id, 'str'),\n 'resourceGroupName': self._serialize.url(\"resource_group_name\", resource_group_name, 'str'),\n 'provisioningServiceName': self._serialize.url(\"provisioning_service_name\", provisioning_service_name, 'str'),\n 'certificateName': self._serialize.url(\"certificate_name\", certificate_name, 'str', max_length=256)\n }\n url = self._client.format_url(url, **path_format_arguments)\n\n # Construct parameters\n query_parameters = {}\n query_parameters['api-version'] = self._serialize.query(\"self.api_version\", self.api_version, 'str')\n\n # Construct headers\n header_parameters = {}\n header_parameters['Content-Type'] = 'application/json; charset=utf-8'\n if self.config.generate_client_request_id:\n header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())\n if custom_headers:\n header_parameters.update(custom_headers)\n if if_match is not None:\n header_parameters['If-Match'] = self._serialize.header(\"if_match\", if_match, 'str')\n if self.config.accept_language is not None:\n header_parameters['accept-language'] = self._serialize.header(\"self.config.accept_language\", self.config.accept_language, 'str')\n\n # Construct body\n body_content = self._serialize.body(certificate_description, 'CertificateBodyDescription')\n\n # Construct and send request\n request = self._client.put(url, query_parameters)\n response = self._client.send(\n request, header_parameters, body_content, stream=False, **operation_config)\n\n if response.status_code not in [200]:\n raise models.ErrorDetailsException(self._deserialize, response)\n\n deserialized = None\n\n if response.status_code == 200:\n deserialized = self._deserialize('CertificateResponse', response)\n\n if raw:\n client_raw_response = ClientRawResponse(deserialized, response)\n return client_raw_response\n\n return deserialized", "def CreateStarCert(filename, log = logging):\n temp1 = tempfile.mkstemp(prefix = 'ssl_proxy')\n temp2 = tempfile.mkstemp(prefix = 'ssl_proxy')\n\n cert_fields = { \"C\": \"US\", \"ST\": \"**INSECURE CONNECTION**\",\n \"L\": \"**INSECURE CONNECTION**\",\n \"O\": \"**INSECURE CONNECTION**\",\n \"OU\": \"**INSECURE CONNECTION**\",\n \"CN\": \"*\" }\n\n cert_valid_days = 1\n\n cert_string = '/C=%(C)s/ST=%(ST)s/L=%(L)s/O=%(O)s/OU=%(OU)s/CN=%(CN)s' % \\\n cert_fields\n\n openssl_command = 'openssl req -newkey rsa:1024 -keyout \"%s\" -nodes ' \\\n '-x509 -days 365 -out \"%s\" -subj \"%s\" -set_serial 0 -days %s ' \\\n '-batch' % (temp1[1], temp2[1], cert_string, cert_valid_days)\n\n find_openssl = os.system('which openssl > /dev/null')\n\n if not find_openssl == 0:\n log.error('Could not find openssl. (Used \"which openssl\" to search)')\n raise OSError, 'Command \"which openssl\" returned: %s' % find_openssl\n\n log.info('Running command: %s' % openssl_command)\n openssl_status = os.system(openssl_command)\n if not openssl_status == 0:\n raise OSError, 'Attempt to run openssl returned: %s' % openssl_status\n\n # Extract the keys into strings.\n key = os.read(temp1[0], 2048)\n cert = os.read(temp2[0], 2048)\n\n os.close(temp1[0])\n os.close(temp2[0])\n\n os.unlink(temp1[1])\n os.unlink(temp2[1])\n\n new_cert = open(filename, 'wb')\n new_cert.write('%s\\n%s' % (key, cert))\n\n new_cert.close()\n\n log.info('Successfully created %s' % filename)\n return True", "def upload_server_cert(self, cert_name, cert_body, private_key,\r\n cert_chain=None, path=None):\r\n params = {'ServerCertificateName' : cert_name,\r\n 'CertificateBody' : cert_body,\r\n 'PrivateKey' : private_key}\r\n if cert_chain:\r\n params['CertificateChain'] = cert_chain\r\n if path:\r\n params['Path'] = path\r\n return self.get_response('UploadServerCertificate', params,\r\n verb='POST')", "def create_server_certs_sign():\n global server_keystore\n\n dn_sign = \"/CN=server certificate sign RSA-PSS\"\n key_pair_rsa_sign = create_csr_pss(dn_sign)\n server_keystore[\"key-sign\"] = key_pair_rsa_sign[\"key\"]\n san = [f'URI.1 = {uuid.uuid4().urn}']\n server_keystore[\"crt-sign\"] = sign_csr(key_pair_rsa_sign[\"pub\"], dn_sign, san)", "def create_server_certs_enc():\n global server_keystore, config\n\n same_enc_sign_cert = config[\"config\"][\"same_enc_sign_cert\"]\n if same_enc_sign_cert:\n dn = \"/CN=server certificate RSA\"\n else:\n dn = \"/CN=server certificate encryption RSA\"\n key_pair_rsa = create_csr(dn)\n server_keystore[\"key\"] = key_pair_rsa[\"key\"]\n san = [f'URI.1 = {uuid.uuid4().urn}']\n server_keystore[\"crt\"] = sign_csr(key_pair_rsa[\"pub\"], dn, san)", "def deploy_cert(self, domain, cert_path, key_path, chain_path, fullchain_path):\n if self.config.rsa_key_size > 2048:\n print(\n \"The maximum public key size allowed for Cloudfront is 2048 (\"\n \"https://docs.aws.amazon.com/AmazonCloudFront/latest\"\n \"/DeveloperGuide/cnames-and-https-requirements.html)\\n\"\n \"Please, use --rsa_key_size 2048 or edit your cli.ini\")\n sys.exit(1)\n client = boto3.client('iam')\n cf_client = boto3.client('cloudfront')\n\n name = \"le-%s\" % domain\n body = open(cert_path).read()\n key = open(key_path).read()\n chain = open(chain_path).read()\n\n suffix = \"-%i\" % int(os.path.getmtime(cert_path))\n\n # Check if certificate already exists\n certificates = client.list_server_certificates(\n PathPrefix=\"/cloudfront/letsencrypt/\"\n )\n cert_id = None\n for cert in certificates['ServerCertificateMetadataList']:\n if cert['ServerCertificateName'] == (name + suffix):\n cert_id = cert['ServerCertificateId']\n\n # If certificate doesn't already exists, upload cert to IAM\n if not cert_id:\n response = client.upload_server_certificate(\n Path=\"/cloudfront/letsencrypt/\",\n ServerCertificateName=name + suffix,\n CertificateBody=body,\n PrivateKey=key,\n CertificateChain=chain\n )\n cert_id = response['ServerCertificateMetadata']['ServerCertificateId']\n\n # Update CloudFront config to use the new one\n cf_cfg = cf_client.get_distribution_config(Id=self.conf('cf-distribution-id'))\n cf_cfg['DistributionConfig']['ViewerCertificate']['IAMCertificateId'] = cert_id\n cf_cfg['DistributionConfig']['ViewerCertificate']['Certificate'] = cert_id\n cf_cfg['DistributionConfig']['ViewerCertificate']['CertificateSource'] = 'iam'\n\n # Set the default mode to SNI-only to avoid surprise charges\n if 'SSLSupportMethod' not in cf_cfg['DistributionConfig']['ViewerCertificate']:\n cf_cfg['DistributionConfig']['ViewerCertificate']['SSLSupportMethod'] = 'sni-only'\n cf_cfg['DistributionConfig']['ViewerCertificate']['MinimumProtocolVersion'] = 'TLSv1'\n\n try:\n cf_cfg['DistributionConfig']['ViewerCertificate'].pop('CloudFrontDefaultCertificate')\n except KeyError:\n pass\n try:\n cf_cfg['DistributionConfig']['ViewerCertificate'].pop('ACMCertificateArn')\n except KeyError:\n pass\n response = cf_client.update_distribution(DistributionConfig=cf_cfg['DistributionConfig'],\n Id=self.conf('cf-distribution-id'),\n IfMatch=cf_cfg['ETag'])\n\n # Delete old certs\n certificates = client.list_server_certificates(\n PathPrefix=\"/cloudfront/letsencrypt/\"\n )\n for cert in certificates['ServerCertificateMetadataList']:\n if (cert['ServerCertificateName'].startswith(name) and\n cert['ServerCertificateName'] != name + suffix):\n try:\n client.delete_server_certificate(\n ServerCertificateName=cert['ServerCertificateName']\n )\n except botocore.exceptions.ClientError as e:\n logger.error(e)", "def create_x509_cert(privkey, pubkey, subject_info, issuer_info, days):\n if not isinstance(subject_info, CertInfo):\n info = CertInfo()\n info.load_from_existing(subject_info)\n subject_info = info\n if not isinstance(issuer_info, CertInfo):\n info = CertInfo()\n info.load_from_existing(issuer_info)\n issuer_info = info\n\n dt_now = datetime.utcnow()\n dt_start = dt_now - timedelta(hours=1)\n dt_end = dt_now + timedelta(days=days)\n\n builder = (x509.CertificateBuilder()\n .subject_name(subject_info.get_name())\n .issuer_name(issuer_info.get_name())\n .not_valid_before(dt_start)\n .not_valid_after(dt_end)\n .serial_number(int(uuid.uuid4()))\n .public_key(pubkey))\n\n builder = subject_info.install_extensions(builder)\n\n # SubjectKeyIdentifier\n ext = x509.SubjectKeyIdentifier.from_public_key(pubkey)\n builder = builder.add_extension(ext, critical=False)\n\n # AuthorityKeyIdentifier\n ext = x509.AuthorityKeyIdentifier.from_issuer_public_key(privkey.public_key())\n builder = builder.add_extension(ext, critical=False)\n\n # IssuerAlternativeName\n if issuer_info.san:\n ext = x509.IssuerAlternativeName(issuer_info.get_san_gnames())\n builder = builder.add_extension(ext, critical=False)\n\n # final cert\n cert = builder.sign(private_key=privkey, algorithm=SHA256(), backend=get_backend())\n return cert", "def create_pki():\n os.mkdir(pki_dir)\n os.mkdir(f'{pki_dir}/newcerts')\n Path(f'{pki_dir}/index.txt').touch()\n with open(f'{pki_dir}/serial', 'w') as serial_file:\n serial_file.write('00000000')\n serial_file.close()\n create_CA('/CN=My cool CA/O=Honest Achmed/OU=Used Cars/C=EU')", "def save(self, cert_path: Union[Path, str], key_path: Union[Path, str]):\n cert_path, key_path = Path(cert_path), Path(key_path)\n\n cert_path.parent.mkdir(parents=True, exist_ok=True)\n with cert_path.open(\"wb\") as file:\n file.write(OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM, self.cert))\n\n key_path.parent.mkdir(parents=True, exist_ok=True)\n with key_path.open(\"wb\") as file:\n file.write(OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM, self.key))", "def create_ssl_cert_request ( ssl_hostnames ) :\n first_hostname = ssl_hostnames[ 0 ]\n csr_filename = get_ssl_csr_filename( first_hostname )\n key_filename = get_ssl_key_filename( first_hostname )\n openssl_cnf = \"\"\"\n[req]\ndistinguished_name = req_distinguished_name\nreq_extensions = san_ext\n\n[req_distinguished_name]\ncountryName_default = US\nstateOrProvinceName_default = New York\nlocalityName_default = New York\norganizationalUnitName_default = Home Box Office, Inc\ncommonName_default = \"\"\" + first_hostname + \"\"\"\n\n[san_ext]\nbasicConstraints = CA:FALSE\nkeyUsage = nonRepudiation, digitalSignature, keyEncipherment\nsubjectAltName = @sans\n\n[sans]\n\"\"\"\n counter = 0\n for hostname in ssl_hostnames :\n counter += 1\n openssl_cnf += 'DNS.' + str( counter ) + ' = ' + hostname + '\\n'\n\n with open( first_hostname, 'w' ) as f :\n f.write( openssl_cnf )\n cmd = 'openssl req -new -newkey rsa:2048 -nodes -out ' + csr_filename + ' -keyout ' + key_filename\n cmd += ' -config ' + first_hostname + ' -subj \"/C=US/ST=New York/L=New York/O=Home Box Office Inc/CN=' + first_hostname + '\"'\n keygen = subprocess.call( cmd, shell = True )\n os.remove( first_hostname )\n if keygen != 0 :\n print \"Generation of SSL request failed!\"\n return None\n\n return { 'csr-filename' : csr_filename, 'key-filename' : key_filename }", "def _bpki_certify(cls, keypair, issuer_name, subject_name, subject_key,\n serial, now, notAfter, pathLenConstraint, is_ca):\n\n if now is None:\n now = rpki.sundial.now()\n\n issuer_key = keypair.get_public()\n\n assert (issuer_key == subject_key) == (issuer_name == subject_name)\n assert is_ca or issuer_name != subject_name\n assert is_ca or pathLenConstraint is None\n assert pathLenConstraint is None or (isinstance(pathLenConstraint, (int, long)) and\n pathLenConstraint >= 0)\n\n cert = rpki.POW.X509()\n cert.setVersion(2)\n cert.setSerial(serial)\n cert.setIssuer(issuer_name.get_POW())\n cert.setSubject(subject_name.get_POW())\n cert.setNotBefore(now)\n cert.setNotAfter(notAfter)\n cert.setPublicKey(subject_key.get_POW())\n cert.setSKI(subject_key.get_POW().calculateSKI())\n if issuer_key != subject_key:\n cert.setAKI(issuer_key.get_POW().calculateSKI())\n if is_ca:\n cert.setBasicConstraints(True, pathLenConstraint)\n cert.sign(keypair.get_POW(), rpki.POW.SHA256_DIGEST)\n return cls(POW = cert)", "def _sign(self, cert, keypair, certs, crls, flags):\n\n # pylint: disable=W0201\n cms = self.POW_class()\n cms.sign(cert, keypair, self.encode(), certs, crls, self.econtent_oid, flags)\n self.POW = cms", "def fusion_api_generate_certificate_signing_request(self, body, api=None, headers=None):\n return self.wsc.post(body, api=api, headers=headers)" ]
[ "0.5912793", "0.5785164", "0.5719512", "0.5630169", "0.5488952", "0.54233533", "0.52684283", "0.5234029", "0.5136958", "0.5136601", "0.51317996", "0.5124178", "0.5102918", "0.5094773", "0.5064548", "0.50599504", "0.5055451", "0.49640405", "0.4957639", "0.49432704", "0.48989725", "0.48694596", "0.48628944", "0.48533514", "0.48483327", "0.48461372", "0.48131984", "0.47943997", "0.47891483", "0.47797883" ]
0.6241665
0
Updates the specified certificate issuer. Requires certificates/setissuers permission.
async def update_issuer(self, issuer_name: str, **kwargs) -> CertificateIssuer: enabled = kwargs.pop("enabled", None) account_id = kwargs.pop("account_id", None) password = kwargs.pop("password", None) organization_id = kwargs.pop("organization_id", None) admin_contacts = kwargs.pop("admin_contacts", None) if account_id or password: issuer_credentials = self._models.IssuerCredentials(account_id=account_id, password=password) else: issuer_credentials = None if admin_contacts: admin_details: Optional[List[Any]] = list( self._models.AdministratorDetails( first_name=contact.first_name, last_name=contact.last_name, email_address=contact.email, phone=contact.phone, ) for contact in admin_contacts ) else: admin_details = None if organization_id or admin_details: organization_details = self._models.OrganizationDetails(id=organization_id, admin_details=admin_details) else: organization_details = None if enabled is not None: issuer_attributes = self._models.IssuerAttributes(enabled=enabled) else: issuer_attributes = None parameters = self._models.CertificateIssuerUpdateParameters( provider=kwargs.pop("provider", None), credentials=issuer_credentials, organization_details=organization_details, attributes=issuer_attributes, ) issuer_bundle = await self._client.update_certificate_issuer( vault_base_url=self.vault_url, issuer_name=issuer_name, parameter=parameters, **kwargs ) return CertificateIssuer._from_issuer_bundle(issuer_bundle=issuer_bundle)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def issuer(self, issuer: str):\n\n self._issuer = issuer", "def certificate_issuer_id(self, certificate_issuer_id):\n\n self._certificate_issuer_id = certificate_issuer_id", "def issuer(self, value):\n\n is_oscrypto = isinstance(value, asymmetric.Certificate)\n if not isinstance(value, x509.Certificate) and not is_oscrypto:\n raise TypeError(_pretty_message(\n '''\n issuer must be an instance of asn1crypto.x509.Certificate or\n oscrypto.asymmetric.Certificate, not %s\n ''',\n _type_name(value)\n ))\n\n if is_oscrypto:\n value = value.asn1\n\n self._issuer = value.subject\n\n self._key_identifier = self._subject_public_key.sha1\n self._authority_key_identifier = x509.AuthorityKeyIdentifier({\n 'key_identifier': value.public_key.sha1\n })", "def set_issuer(self, claim=ISSUER):\n if api_settings.ISSUER is not None:\n self.payload[claim] = api_settings.ISSUER", "def update_oidc_issuer_profile(self, mc: ManagedCluster) -> ManagedCluster:\n self._ensure_mc(mc)\n oidc_issuer_profile = self.context.get_oidc_issuer_profile()\n if oidc_issuer_profile is not None:\n mc.oidc_issuer_profile = oidc_issuer_profile\n\n return mc", "def certificate_issuer_id(self):\n return self._certificate_issuer_id", "def certificate_issuer_value(self):\n\n if self._processed_extensions is False:\n self._set_extensions()\n return self._certificate_issuer_value", "def add_issuer_arguments(parser):\n group = parser.add_argument_group(\"Issuer Information\")\n group.add_argument(\n \"-ik\", \"--issuer_key\",\n help='Key used to certificate the key',\n )\n group.add_argument(\n \"-ic\", \"--issuer_cert\",\n help=\"Certificate used to certificate the key\",\n )\n return group", "async def create_issuer(self, issuer_name: str, provider: str, **kwargs) -> CertificateIssuer:\n\n enabled = kwargs.pop(\"enabled\", None)\n account_id = kwargs.pop(\"account_id\", None)\n password = kwargs.pop(\"password\", None)\n organization_id = kwargs.pop(\"organization_id\", None)\n admin_contacts = kwargs.pop(\"admin_contacts\", None)\n\n if account_id or password:\n issuer_credentials = self._models.IssuerCredentials(account_id=account_id, password=password)\n else:\n issuer_credentials = None\n if admin_contacts:\n admin_details: Optional[List[Any]] = [\n self._models.AdministratorDetails(\n first_name=contact.first_name,\n last_name=contact.last_name,\n email_address=contact.email,\n phone=contact.phone,\n )\n for contact in admin_contacts\n ]\n else:\n admin_details = None\n if organization_id or admin_details:\n organization_details = self._models.OrganizationDetails(id=organization_id, admin_details=admin_details)\n else:\n organization_details = None\n if enabled is not None:\n issuer_attributes = self._models.IssuerAttributes(enabled=enabled)\n else:\n issuer_attributes = None\n\n parameters = self._models.CertificateIssuerSetParameters(\n provider=provider,\n credentials=issuer_credentials,\n organization_details=organization_details,\n attributes=issuer_attributes,\n )\n\n issuer_bundle = await self._client.set_certificate_issuer(\n vault_base_url=self.vault_url, issuer_name=issuer_name, parameter=parameters, **kwargs\n )\n return CertificateIssuer._from_issuer_bundle(issuer_bundle=issuer_bundle)", "def issuer(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"issuer\")", "def issuer(self) -> str:\n return self._issuer", "def set_up_oidc_issuer_profile(self, mc: ManagedCluster) -> ManagedCluster:\n self._ensure_mc(mc)\n oidc_issuer_profile = self.context.get_oidc_issuer_profile()\n if oidc_issuer_profile is not None:\n mc.oidc_issuer_profile = oidc_issuer_profile\n\n return mc", "def issuer(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"issuer\")", "def issuer_did(self) -> str:\n return self._issuer_did", "def issuer(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"issuer\")", "def xforwardedforclientcert_issuerdnalias(self) -> str:\n return pulumi.get(self, \"xforwardedforclientcert_issuerdnalias\")", "def is_trusted_issuer(self, hostname, issuer):\n secondaries = json.dumps(self.trusted_secondaries)\n return self.runner.is_trusted_issuer(hostname=hostname,\n issuer=issuer,\n trusted_secondaries=secondaries)", "def do_sign(subject_csr, issuer_obj, issuer_key, days, path_length, reqInfo, reset_info=None):\n # Certificate duration\n if days is None:\n die(\"Need --days\")\n if days <= 0:\n die(\"Invalid --days\")\n\n # Load CA info\n issuer_info = CertInfo(load=issuer_obj)\n\n # Load certificate request\n subject_info = CertInfo(load=subject_csr)\n if reset_info:\n subject_info = reset_info\n\n # Check CA parameters\n if not same_pubkey(subject_csr, issuer_obj):\n if not issuer_info.ca:\n die(\"Issuer must be CA.\")\n if 'key_cert_sign' not in issuer_info.usage:\n die(\"Issuer CA is not allowed to sign certs.\")\n if subject_info.ca:\n if not same_pubkey(subject_csr, issuer_obj):\n # not self-signing, check depth\n if issuer_info.path_length == 0:\n die(\"Issuer cannot sign sub-CAs\")\n if issuer_info.path_length - 1 < path_length:\n die(\"--path-length not allowed by issuer\")\n\n # Load subject's public key, check sanity\n pkey = subject_csr.public_key()\n if isinstance(pkey, ec.EllipticCurvePublicKey):\n pkeyinfo = 'ec:' + str(pkey.curve.name)\n if pkey.curve.name not in EC_CURVES:\n die(\"Curve not allowed: %s\", pkey.curve.name)\n elif isinstance(pkey, rsa.RSAPublicKey):\n pkeyinfo = 'rsa:' + str(pkey.key_size)\n if pkey.key_size < MIN_RSA_BITS or pkey.key_size > MAX_RSA_BITS:\n die(\"RSA size not allowed: %s\", pkey.key_size)\n else:\n die(\"Unsupported public key: %s\", str(pkey))\n\n # Report\n if subject_info.ca:\n msg('Signing CA cert [%s] - %s', pkeyinfo, reqInfo)\n else:\n msg('Signing end-entity cert [%s] - %s', pkeyinfo, reqInfo)\n msg('Issuer name: %s', render_name(issuer_info.subject))\n msg('Subject:')\n subject_info.show(msg_show)\n\n # Load CA private key\n if not same_pubkey(issuer_key, issuer_obj):\n die(\"--ca-private-key does not match --ca-info data\")\n\n # Stamp request\n cert = create_x509_cert(issuer_key, subject_csr.public_key(), subject_info, issuer_info, days=days)\n return cert", "def fusion_api_update_server_certificate(self, aliasname, body, api=None, headers=None):\n return self.server_certificate.put(aliasname, body, api, headers)", "async def delete_issuer(self, issuer_name: str, **kwargs) -> CertificateIssuer:\n issuer_bundle = await self._client.delete_certificate_issuer(\n vault_base_url=self.vault_url, issuer_name=issuer_name, **kwargs\n )\n return CertificateIssuer._from_issuer_bundle(issuer_bundle=issuer_bundle)", "def issuer_uri(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"issuer_uri\")", "async def get_issuer(self, issuer_name: str, **kwargs) -> CertificateIssuer:\n issuer_bundle = await self._client.get_certificate_issuer(\n vault_base_url=self.vault_url, issuer_name=issuer_name, **kwargs\n )\n return CertificateIssuer._from_issuer_bundle(issuer_bundle=issuer_bundle)", "def _issuer(self, entityid=None):\n if entityid:\n if isinstance(entityid, Issuer):\n return entityid\n else:\n return Issuer(text=entityid, format=NAMEID_FORMAT_ENTITY)\n else:\n return Issuer(text=self.config.entityid,\n format=NAMEID_FORMAT_ENTITY)", "def issuer_ref(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"issuer_ref\")", "def issuer_ref(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"issuer_ref\")", "def issuer_ref(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"issuer_ref\")", "def test_update_certificate(self):\n self.client.post(\n '/api/v1/certificates', data=json.dumps(new_certificate),\n content_type='application/json',\n headers=self.get_registrar_token())\n response = self.client.put(\n '/api/v1/certificates/1', data=json.dumps(update_certificate),\n content_type='application/json',\n headers=self.get_registrar_token())\n result = json.loads(response.data.decode())\n self.assertEqual(result['message'],\n 'Certificate updated successfully')\n assert response.status_code == 200", "def verify(self, issuer: CscaCertificate):\n verify_sig(issuer, self['tbs_cert_list'].dump(), self['signature'], self['signature_algorithm'])", "def fusion_api_update_client_certificate(self, aliasname, body, api=None, headers=None):\n return self.client_certificate.put(aliasname, body, api, headers)", "def platform_issuer_identifier(self, platform_issuer_identifier):\n if platform_issuer_identifier is None:\n raise ValueError(\"Invalid value for `platform_issuer_identifier`, must not be `None`\") # noqa: E501\n\n self._platform_issuer_identifier = platform_issuer_identifier" ]
[ "0.6972535", "0.6511538", "0.64975655", "0.617353", "0.60718924", "0.58061564", "0.56294805", "0.55558544", "0.5507725", "0.53951555", "0.53897464", "0.5285362", "0.52114314", "0.5205502", "0.51751256", "0.5158138", "0.515145", "0.5130808", "0.51226896", "0.50910485", "0.5083164", "0.50509596", "0.502237", "0.4989626", "0.4989626", "0.49668598", "0.4937651", "0.4903257", "0.4853267", "0.47390616" ]
0.76455516
0
Deletes the specified certificate issuer. Requires certificates/manageissuers/deleteissuers permission.
async def delete_issuer(self, issuer_name: str, **kwargs) -> CertificateIssuer: issuer_bundle = await self._client.delete_certificate_issuer( vault_base_url=self.vault_url, issuer_name=issuer_name, **kwargs ) return CertificateIssuer._from_issuer_bundle(issuer_bundle=issuer_bundle)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def certificate_issuer_id(self):\n return self._certificate_issuer_id", "def certificate_issuer_id(self, certificate_issuer_id):\n\n self._certificate_issuer_id = certificate_issuer_id", "def issuer(self, value):\n\n is_oscrypto = isinstance(value, asymmetric.Certificate)\n if not isinstance(value, x509.Certificate) and not is_oscrypto:\n raise TypeError(_pretty_message(\n '''\n issuer must be an instance of asn1crypto.x509.Certificate or\n oscrypto.asymmetric.Certificate, not %s\n ''',\n _type_name(value)\n ))\n\n if is_oscrypto:\n value = value.asn1\n\n self._issuer = value.subject\n\n self._key_identifier = self._subject_public_key.sha1\n self._authority_key_identifier = x509.AuthorityKeyIdentifier({\n 'key_identifier': value.public_key.sha1\n })", "def issuer(self, issuer: str):\n\n self._issuer = issuer", "def verify(self, issuer: CscaCertificate):\n verify_sig(issuer, self['tbs_cert_list'].dump(), self['signature'], self['signature_algorithm'])", "async def update_issuer(self, issuer_name: str, **kwargs) -> CertificateIssuer:\n\n enabled = kwargs.pop(\"enabled\", None)\n account_id = kwargs.pop(\"account_id\", None)\n password = kwargs.pop(\"password\", None)\n organization_id = kwargs.pop(\"organization_id\", None)\n admin_contacts = kwargs.pop(\"admin_contacts\", None)\n\n if account_id or password:\n issuer_credentials = self._models.IssuerCredentials(account_id=account_id, password=password)\n else:\n issuer_credentials = None\n if admin_contacts:\n admin_details: Optional[List[Any]] = list(\n self._models.AdministratorDetails(\n first_name=contact.first_name,\n last_name=contact.last_name,\n email_address=contact.email,\n phone=contact.phone,\n )\n for contact in admin_contacts\n )\n else:\n admin_details = None\n if organization_id or admin_details:\n organization_details = self._models.OrganizationDetails(id=organization_id, admin_details=admin_details)\n else:\n organization_details = None\n if enabled is not None:\n issuer_attributes = self._models.IssuerAttributes(enabled=enabled)\n else:\n issuer_attributes = None\n\n parameters = self._models.CertificateIssuerUpdateParameters(\n provider=kwargs.pop(\"provider\", None),\n credentials=issuer_credentials,\n organization_details=organization_details,\n attributes=issuer_attributes,\n )\n\n issuer_bundle = await self._client.update_certificate_issuer(\n vault_base_url=self.vault_url, issuer_name=issuer_name, parameter=parameters, **kwargs\n )\n return CertificateIssuer._from_issuer_bundle(issuer_bundle=issuer_bundle)", "def do_sign(subject_csr, issuer_obj, issuer_key, days, path_length, reqInfo, reset_info=None):\n # Certificate duration\n if days is None:\n die(\"Need --days\")\n if days <= 0:\n die(\"Invalid --days\")\n\n # Load CA info\n issuer_info = CertInfo(load=issuer_obj)\n\n # Load certificate request\n subject_info = CertInfo(load=subject_csr)\n if reset_info:\n subject_info = reset_info\n\n # Check CA parameters\n if not same_pubkey(subject_csr, issuer_obj):\n if not issuer_info.ca:\n die(\"Issuer must be CA.\")\n if 'key_cert_sign' not in issuer_info.usage:\n die(\"Issuer CA is not allowed to sign certs.\")\n if subject_info.ca:\n if not same_pubkey(subject_csr, issuer_obj):\n # not self-signing, check depth\n if issuer_info.path_length == 0:\n die(\"Issuer cannot sign sub-CAs\")\n if issuer_info.path_length - 1 < path_length:\n die(\"--path-length not allowed by issuer\")\n\n # Load subject's public key, check sanity\n pkey = subject_csr.public_key()\n if isinstance(pkey, ec.EllipticCurvePublicKey):\n pkeyinfo = 'ec:' + str(pkey.curve.name)\n if pkey.curve.name not in EC_CURVES:\n die(\"Curve not allowed: %s\", pkey.curve.name)\n elif isinstance(pkey, rsa.RSAPublicKey):\n pkeyinfo = 'rsa:' + str(pkey.key_size)\n if pkey.key_size < MIN_RSA_BITS or pkey.key_size > MAX_RSA_BITS:\n die(\"RSA size not allowed: %s\", pkey.key_size)\n else:\n die(\"Unsupported public key: %s\", str(pkey))\n\n # Report\n if subject_info.ca:\n msg('Signing CA cert [%s] - %s', pkeyinfo, reqInfo)\n else:\n msg('Signing end-entity cert [%s] - %s', pkeyinfo, reqInfo)\n msg('Issuer name: %s', render_name(issuer_info.subject))\n msg('Subject:')\n subject_info.show(msg_show)\n\n # Load CA private key\n if not same_pubkey(issuer_key, issuer_obj):\n die(\"--ca-private-key does not match --ca-info data\")\n\n # Stamp request\n cert = create_x509_cert(issuer_key, subject_csr.public_key(), subject_info, issuer_info, days=days)\n return cert", "def test_delete_certificate(self):\n self.client.post(\n '/api/v1/certificates', data=json.dumps(new_certificate),\n content_type='application/json',\n headers=self.get_registrar_token())\n response = self.client.delete(\n '/api/v1/certificates/1', content_type='application/json',\n headers=self.get_registrar_token())\n result = json.loads(response.data.decode())\n self.assertEqual(result['message'],\n 'Certificate deleted successfully')\n assert response.status_code == 200", "def fusion_api_delete_server_certificate(self, aliasname, api=None, headers=None):\n return self.server_certificate.delete(aliasname, api, headers)", "def issuer_did(self) -> str:\n return self._issuer_did", "def delete_certificate(a): # delete_certificate(arn, /)\n\n while True:\n\n try:\n acm.delete_certificate(**{'CertificateArn': a})\n return\n except ClientError as exception:\n log_exception('')\n\n err_code = exception.response['Error']['Code']\n\n if err_code == 'ResourceInUseException':\n if get_remaining_time_in_millis() / 1000 < 30:\n raise\n\n sleep(5)\n continue\n\n if err_code in ['ResourceNotFoundException', 'ValidationException']:\n # If the arn is invalid, it didn't exist anyway.\n return\n\n raise\n\n except ParamValidationError:\n # invalid arn\n return", "def issuer(self) -> str:\n return self._issuer", "def delete_server_cert(self, cert_name):\r\n params = {'ServerCertificateName' : cert_name}\r\n return self.get_response('DeleteServerCertificate', params)", "def catalog_alias_delete(self, args):\n try:\n alias = self.server.connect_ermrest_alias(args.id)\n alias.delete_ermrest_alias(really=True)\n except HTTPError as e:\n if e.response.status_code == requests.codes.not_found:\n raise ResourceException('Catalog alias not found', e)\n else:\n raise e", "def is_trusted_issuer(self, hostname, issuer):\n secondaries = json.dumps(self.trusted_secondaries)\n return self.runner.is_trusted_issuer(hostname=hostname,\n issuer=issuer,\n trusted_secondaries=secondaries)", "def fusion_api_delete_client_certificate(self, aliasname, api=None, headers=None):\n return self.client_certificate.delete(aliasname, api, headers)", "def add_issuer_arguments(parser):\n group = parser.add_argument_group(\"Issuer Information\")\n group.add_argument(\n \"-ik\", \"--issuer_key\",\n help='Key used to certificate the key',\n )\n group.add_argument(\n \"-ic\", \"--issuer_cert\",\n help=\"Certificate used to certificate the key\",\n )\n return group", "def delete(nitro, csvserver_responderpolicy_binding):\n __csvserver_responderpolicy_binding = NSCSVServerResponderPolicyBinding()\n __csvserver_responderpolicy_binding.set_name(csvserver_responderpolicy_binding.get_name())\n __csvserver_responderpolicy_binding.set_policyname(csvserver_responderpolicy_binding.get_policyname())\n __csvserver_responderpolicy_binding.set_priority(csvserver_responderpolicy_binding.get_priority())\n __csvserver_responderpolicy_binding.set_bindpoint(csvserver_responderpolicy_binding.get_bindpoint())\n nsresponse = __csvserver_responderpolicy_binding.delete_resource(nitro)\n return nsresponse", "def xforwardedforclientcert_issuerdnalias(self) -> str:\n return pulumi.get(self, \"xforwardedforclientcert_issuerdnalias\")", "def delete_signing_cert(self, cert_id, user_name=None):\r\n params = {'CertificateId' : cert_id}\r\n if user_name:\r\n params['UserName'] = user_name\r\n return self.get_response('DeleteSigningCertificate', params)", "def certificate_issuer_value(self):\n\n if self._processed_extensions is False:\n self._set_extensions()\n return self._certificate_issuer_value", "def test_delete_non_existing_certificate(self):\n self.client.post(\n '/api/v1/certificates', data=json.dumps(new_certificate),\n content_type='application/json',\n headers=self.get_registrar_token())\n response = self.client.delete(\n '/api/v1/certificates/10', content_type='application/json',\n headers=self.get_registrar_token())\n result = json.loads(response.data.decode())\n self.assertEqual(result['message'],\n 'Certificate not found')\n assert response.status_code == 404", "def revoke_certificate(self):\n return self.__query(\"certificateRevoke\", kwargs)", "async def create_issuer(self, issuer_name: str, provider: str, **kwargs) -> CertificateIssuer:\n\n enabled = kwargs.pop(\"enabled\", None)\n account_id = kwargs.pop(\"account_id\", None)\n password = kwargs.pop(\"password\", None)\n organization_id = kwargs.pop(\"organization_id\", None)\n admin_contacts = kwargs.pop(\"admin_contacts\", None)\n\n if account_id or password:\n issuer_credentials = self._models.IssuerCredentials(account_id=account_id, password=password)\n else:\n issuer_credentials = None\n if admin_contacts:\n admin_details: Optional[List[Any]] = [\n self._models.AdministratorDetails(\n first_name=contact.first_name,\n last_name=contact.last_name,\n email_address=contact.email,\n phone=contact.phone,\n )\n for contact in admin_contacts\n ]\n else:\n admin_details = None\n if organization_id or admin_details:\n organization_details = self._models.OrganizationDetails(id=organization_id, admin_details=admin_details)\n else:\n organization_details = None\n if enabled is not None:\n issuer_attributes = self._models.IssuerAttributes(enabled=enabled)\n else:\n issuer_attributes = None\n\n parameters = self._models.CertificateIssuerSetParameters(\n provider=provider,\n credentials=issuer_credentials,\n organization_details=organization_details,\n attributes=issuer_attributes,\n )\n\n issuer_bundle = await self._client.set_certificate_issuer(\n vault_base_url=self.vault_url, issuer_name=issuer_name, parameter=parameters, **kwargs\n )\n return CertificateIssuer._from_issuer_bundle(issuer_bundle=issuer_bundle)", "def delete(self, urns, client_cert, credentials, best_effort): ### FIX the response\n result = []\n slice_urn = urns[0]\n # try:\n for urn in urns:\n if self._verify_users:\n logger.debug(\"delete: authenticate the user for %s\" % (urn))\n client_urn, client_uuid, client_email =\\\n self.auth(client_cert, credentials, urn, (\"deletesliver\",))\n logger.info(\"Client urn=%s, uuid=%s, email=%s\" % (\n client_urn, client_uuid, client_email,))\n\n try:\n links_db, nodes, links = self.SESlices.get_link_db(urn)\n except Exception as e:\n raise geni_ex.GENIv3GeneralError(\"Slice does not exist.\")\n\n reservation_ports = self.SESlices._allocate_ports_in_slice(nodes)[\"ports\"]\n\n portsVlansPairs = getPortsVlansPairs(links_db)\n\n try:\n for portVlanItem in portsVlansPairs:\n (in_port, out_port, in_vlan, out_vlan) = portVlanItem\n se_provision.deleteSwitchingRule(in_port, out_port, in_vlan, out_vlan)\n logger.debug(\"unprovision SE-Slice-Urn=%s, in_port=%s , out_port=%s, in_vlan=%s, out_port=%s\" % (urn,in_port, out_port, in_vlan, out_vlan))\n except:\n logger.warning(\"Problem in communication with SE\")\n\n # expires_date = datetime.strptime(links_db['geni_expires'], RFC3339_FORMAT_STRING)\n expires_date = links_db['geni_expires']\n\n\n for sliver in links_db[\"geni_sliver_urn\"]:\n result.append( \n { \n \"geni_sliver_urn\": sliver,\n \"geni_expires\": expires_date,\n \"geni_allocation_status\": \"geni_unallocated\",\n \"geni_operational_status\" : \"geni_notready\"\n }\n )\n\n # Mark resources as free\n self.SEResources.free_resource_reservation(reservation_ports)\n\n # Remove reservation\n self.SESlices.remove_link_db(urn)\n \n logger.info(\"delete successfully completed: %s\", slice_urn)\n \n return result\n\n # except:\n\n # raise geni_ex.GENIv3GeneralError(\"Delete Failed. Requested resources are not available.\")", "def subject_destroy(context, subject_id):\n session = get_session()\n with session.begin():\n subject_ref = _subject_get(context, subject_id, session=session)\n\n # Perform authorization check\n _check_mutate_authorization(context, subject_ref)\n\n subject_ref.delete(session=session)\n delete_time = subject_ref.deleted_at\n\n _subject_locations_delete_all(context, subject_id, delete_time, session)\n\n _subject_property_delete_all(context, subject_id, delete_time, session)\n\n _subject_member_delete_all(context, subject_id, delete_time, session)\n\n _subject_tag_delete_all(context, subject_id, delete_time, session)\n\n return _normalize_locations(context, subject_ref)", "def issuer(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"issuer\")", "def DeleteCertificate(self, request):\n try:\n params = request._serialize()\n headers = request.headers\n body = self.call(\"DeleteCertificate\", params, headers=headers)\n response = json.loads(body)\n model = models.DeleteCertificateResponse()\n model._deserialize(response[\"Response\"])\n return model\n except Exception as e:\n if isinstance(e, TencentCloudSDKException):\n raise\n else:\n raise TencentCloudSDKException(type(e).__name__, str(e))", "def _acme_revoke(self, cert):\n # XXX | pylint: disable=unused-variable\n\n # pylint: disable=protected-access\n certificate = jose_util.ComparableX509(cert._cert)\n try:\n with open(cert.backup_key_path, \"rU\") as backup_key_file:\n key = OpenSSL.crypto.load_privatekey(\n OpenSSL.crypto.FILETYPE_PEM, backup_key_file.read())\n # If the key file doesn't exist... or is corrupted\n except OpenSSL.crypto.Error as error:\n logger.debug(error, exc_info=True)\n raise errors.RevokerError(\n \"Corrupted backup key file: %s\" % cert.backup_key_path)\n\n return self.acme.revoke(cert=None) # XXX", "def catalog_delete(self, args):\n headers = DEFAULT_HEADERS.copy()\n headers.update(args.headers)\n try:\n catalog = self.server.connect_ermrest(args.id)\n catalog.delete(args.path, headers)\n except HTTPError as e:\n if e.response.status_code == requests.codes.not_found:\n raise ResourceException('Catalog not found', e)\n else:\n raise e" ]
[ "0.54405326", "0.53633106", "0.5308739", "0.5292008", "0.51837564", "0.5093947", "0.5015717", "0.48553553", "0.48267108", "0.47868848", "0.47505197", "0.4654417", "0.46471775", "0.46171558", "0.4612264", "0.46041763", "0.46008196", "0.45570296", "0.45004335", "0.4493562", "0.44899055", "0.4458608", "0.4439185", "0.44239676", "0.44162676", "0.4411061", "0.44095892", "0.4360557", "0.43395066", "0.43044123" ]
0.73457456
0
Lists properties of the certificate issuers for the key vault. Requires the certificates/manageissuers/getissuers permission.
def list_properties_of_issuers(self, **kwargs) -> AsyncItemPaged[IssuerProperties]: max_page_size = kwargs.pop("max_page_size", None) return self._client.get_certificate_issuers( vault_base_url=self.vault_url, maxresults=max_page_size, cls=lambda objs: [IssuerProperties._from_issuer_item(x) for x in objs], **kwargs )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def list_trusted_issuers():\n\n # Query the blockchain and manage exceptions\n try:\n trusted_issuers = tf.dump_trusted_identities()\n except Exception as e:\n detail=str(e)\n log.error(detail)\n raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=detail)\n\n return {\"payload\": trusted_issuers}", "def list_authorities():\n try:\n certs = client().certificates.get_authorities()\n if not certs:\n logger.info(\n 'ctl:cert:authorities', 'No certificate authorities found'\n )\n return\n llen = len(sorted(certs, key=lambda x: len(x[\"id\"]))[-1][\"id\"])\n for x in sorted(certs, key=lambda x: x[\"id\"]):\n click.echo(\n click.style(\n '{name: <{fill}}'.format(name=x[\"id\"], fill=llen + 3),\n fg=\"white\", bold=True) + \"Expires \" +\n click.style(x[\"expiry\"].strftime(\"%c\"), fg=\"yellow\")\n )\n except Exception as e:\n raise CLIException(str(e))", "def getScopusIssns(publisherName):\n journalFname = pubConf.journalTable\n if not isfile(journalFname):\n logging.warn('%s does not exist, cannot use ISSNs to assign crawler' % journalFname)\n return ({}, [])\n issns = set()\n for row in maxCommon.iterTsvRows(journalFname):\n if row.source != 'SCOPUS':\n continue\n if row.correctPublisher != publisherName:\n continue\n if row.pIssn != '':\n issns.add(row.pIssn.strip())\n if row.eIssn != '':\n issns.add(row.eIssn.strip())\n\n logging.debug('Read %d issns from %s' % (len(issns), journalFname))\n return issns", "def get_csr_san(self, csr):\n dns_names = []\n try:\n san = csr.extensions.get_extension_for_class(\n x509.SubjectAlternativeName\n )\n except ExtensionNotFound:\n san = None\n if san:\n for dns_name in san.value:\n dns_names.append(dns_name.value)\n return dns_names", "def getCertifications(self):\n return [c for c in self.objectValues('InstrumentCertification') if c]", "def security_entries(self):\n return self._security_entries", "def security_identities(self):\n return self._security_identities", "def certificate_issuer_id(self):\n return self._certificate_issuer_id", "def iss(self) -> t.Optional[int]:\n return self.claims.get(\"iss\")", "def security_list_ids(self):\n return self._security_list_ids", "def trusted_server_certificates(self):\n return self._trusted_server_certificates", "def signins(self):\n return self.properties.get('signIns',\n EntityCollection(self.context, SignIn, ResourcePath(\"signIns\", self.resource_path)))", "def validate_iss(self):\n self._validate_claim_value('iss')", "def request_issuance(self, csr):\n action = LOG_ACME_REQUEST_CERTIFICATE()\n with action.context():\n return (\n DeferredContext(\n self._client.post(\n self.directory[csr], csr,\n content_type=DER_CONTENT_TYPE,\n headers=Headers({b'Accept': [DER_CONTENT_TYPE]})))\n .addCallback(self._expect_response, http.CREATED)\n .addCallback(self._parse_certificate)\n .addActionFinish())", "def certificates(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"certificates\")", "def certificates(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"certificates\")", "def show_all_certifications():\n if not g.user:\n flash(\"Please login to access\", \"danger\")\n return redirect(\"/\")\n if g.user.is_admin == False:\n flash (\"Unauthorized\", \"danger\")\n return redirect(\"/login\")\n\n \n certs = Cert.query.all()\n ## all possible certs...\n \n return render_template(\"certs_display.html\", certs = certs)", "def get_cert_IPAddresses(cert):\n try:\n ext = cert.extensions.get_extension_for_oid(x509.oid.ExtensionOID.SUBJECT_ALTERNATIVE_NAME)\n addresses = ext.value.get_values_for_type(x509.IPAddress)\n except Exception:\n raise exception.SysinvException(_(\n \"Failed to get certificate SAN's IPAddresses.\"))\n return [format(ips) for ips in addresses]", "def subjects(self):\n return self.cache.subjects()", "def assigns():\n click.echo(\"Apps/Sites that can use certificates:\")\n try:\n assigns = client().certificates.get_possible_assigns()\n for x in assigns:\n imsg = click.style(\"(\" + x[\"type\"].capitalize() + \")\", fg=\"green\")\n click.echo(\n click.style(x[\"name\"], fg=\"white\", bold=True) + \" \" + imsg\n )\n except Exception as e:\n raise CLIException(str(e))", "def list(self):\n invites = []\n from arcgis.gis import GIS\n isinstance(self._gis, GIS)\n url = self._url\n params = {\n 'f' : 'json',\n 'num':100,\n 'start':1\n }\n res = self._gis._con.get(url, params)\n invites = res[\"invitations\"]\n while res['nextStart'] > -1:\n params['start'] += 100\n res = self._gis._con.get(url, params)\n invites.extend(res[\"invitations\"])\n return invites", "def certificates(self):\n if self.user.is_superuser:\n return Certificate.objects.all()\n else:\n return Certificate.objects.filter(licensee__in=self.licensees.all())", "def list_incidents_command():\n cursor = COLLECTION.find({}, {'_id': False})\n incidents = []\n results: list = []\n for incident in cursor:\n for name in incident:\n incidents.append(name)\n for i in incidents:\n if i not in results:\n results.append(i)\n human_readable = tableToMarkdown(f'List of incidents in collecion {COLLECTION_NAME}', results,\n headers=['Incidents'])\n return human_readable, {}, {}", "def xforwardedforclientcert_issuerdnalias(self) -> str:\n return pulumi.get(self, \"xforwardedforclientcert_issuerdnalias\")", "def GetValidHostsForCert(cert):\r\n if 'subjectAltName' in cert:\r\n return [x[1] for x in cert['subjectAltName'] if x[0].lower() == 'dns']\r\n else:\r\n return [x[0][1] for x in cert['subject']\r\n if x[0][0].lower() == 'commonname']", "def get_own_public_certificates():\n attempt = 0\n while True:\n attempt += 1\n try:\n certs = app_identity.get_public_certificates(deadline=1.5)\n break\n except apiproxy_errors.DeadlineExceededError as e:\n logging.warning('%s', e)\n if attempt == 3:\n raise\n return {\n 'certificates': [\n {\n 'key_name': cert.key_name,\n 'x509_certificate_pem': cert.x509_certificate_pem,\n }\n for cert in certs\n ],\n 'timestamp': utils.datetime_to_timestamp(utils.utcnow()),\n }", "def list_cas():\n cas = []\n for ca in settings.ACM_PRIVATE_CA_SETTINGS:\n _ca = get_ca(ca)\n cas.append(_ca.get_certificate_authority_certificate())\n return cas", "def list_trusts(self, kwargs):\n verbose = kwargs.get(\"verbose\", False)\n results = self.engine.query(self.engine.TRUSTS_INFO_FILTER())\n\n ATTRIBUTE_TRANSLATION = {\n \"trustDirection\": {\n 0x00000003: \"bidirectional\",\n 0x00000002: \"outbound\",\n 0x00000001: \"inbound\",\n 0x00000000: \"disabled\",\n },\n \"trustType\": {\n 0x00000001: \"Non running Windows domain\",\n 0x00000002: \"Windows domain running Active Directory\",\n 0x00000003: \"Non Windows domain\",\n },\n }\n\n trusts = []\n for result in results:\n for key in ATTRIBUTE_TRANSLATION:\n if key in result:\n result[key] = ATTRIBUTE_TRANSLATION[key][int(result[key])]\n trusts.append(result)\n\n if verbose:\n self.display(results, verbose)\n return\n\n FIELDS_TO_PRINT = [\n \"dn\",\n \"cn\",\n \"securityIdentifier\",\n \"name\",\n \"trustDirection\",\n \"trustPartner\",\n \"trustType\",\n \"trustAttributes\",\n \"flatName\"\n ]\n\n for result in trusts:\n for field in FIELDS_TO_PRINT:\n if field in result:\n val = result[field]\n print(\"{field}: {val}\".format(field=field, val=val))\n print(\"\")", "def cavium_certs(self) -> Sequence[str]:\n return pulumi.get(self, \"cavium_certs\")", "def getHosterIssns(publisherName):\n global publisherIssns\n global publisherUrls\n if publisherIssns is None:\n journalFname = pubConf.journalTable\n if not isfile(journalFname):\n logging.warn('%s does not exist, cannot ISSN-assign highwire crawler' % journalFname)\n return ({}, set([]))\n publisherIssns = defaultdict(dict)\n publisherUrls = defaultdict(set)\n logging.log(5, 'Parsing %s to get highwire ISSNs' % journalFname)\n for row in maxCommon.iterTsvRows(journalFname):\n if row.source in ('HIGHWIRE', 'WILEY'):\n hoster = row.source\n journalUrl = 'http://' + row.urls.strip().replace('http://', '')\n issn = row.pIssn.strip()\n eIssn = row.eIssn.strip()\n publisherIssns[hoster][issn] = journalUrl\n publisherIssns[hoster][eIssn] = journalUrl\n if journalUrl != '':\n publisherUrls[hoster].add(journalUrl)\n\n return (publisherIssns[publisherName], publisherUrls[publisherName])" ]
[ "0.62516004", "0.5375079", "0.5267441", "0.523019", "0.51898634", "0.5173039", "0.5050751", "0.497449", "0.49561843", "0.4937976", "0.49265328", "0.48140576", "0.47978282", "0.47966686", "0.47876447", "0.47876447", "0.4781479", "0.4780105", "0.47510105", "0.47499838", "0.4743856", "0.47359428", "0.4732779", "0.47257102", "0.47170436", "0.47083205", "0.46992382", "0.46903148", "0.46694392", "0.46508452" ]
0.732406
0
Returns the list of all geometries in given region
def get_geometry(self, region=None) -> List[Geometry2D]: return self.geometry_list
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def allGeometry(self):\n\n # TODO: This needs to handle pagination once that is implemented in the web\n # service.\n req = urllib2.Request(self.baseUri + 'geometry')\n r = urllib2.urlopen(req)\n\n data = json.load(r)\n\n # This ignore the \"lastEditTime\" and just works on the data.\n for item in data.get('geometry', []):\n yield item\n\n r.close()", "def inside(self,region):\n fs = FeatureSet()\n for f in self:\n if(f.isContainedWithin(region)):\n fs.append(f)\n return fs", "def get_region(geom):\n polygons = []\n coordinates = geom.get('coordinates')\n polygons.append(ee.Geometry.Polygon(coordinates))\n return ee.FeatureCollection(polygons)", "def getRegions(self, polygon: Polygon, epsg: int) -> list:\n self.output_epsg = epsg\n polygon_df = gpd.GeoDataFrame([polygon], columns=['geometry'])\n\n polygon_df.set_crs(epsg=self.output_epsg, inplace=True)\n polygon_df['geometry'] = polygon_df['geometry'].to_crs(epsg=self.input_epsg)\n minx, miny, maxx, maxy = polygon_df['geometry'][0].bounds\n\n cond_xmin = self.metadata.xmin <= minx\n cond_xmax = self.metadata.xmax >= maxx\n cond_ymin = self.metadata.ymin <= miny\n cond_ymax = self.metadata.ymax >= maxy\n\n df = self.metadata[cond_xmin & cond_xmax & cond_ymin & cond_ymax]\n sort_df = df.sort_values(by=['year'])\n regions = sort_df['filename'].to_list()\n return regions", "def RegionList(self):\n command = \"\"\"\n IPython.notebook.kernel.execute(\"RegionList=\" + JSON.stringify(JS9.GetShapes(\"regions\", {{display: '{wid}JS9'}})));\n \"\"\".format(wid=self.wid)\n get_ipython().run_cell_magic('javascript', '', command)", "def region(self):\n return [node.region for node in self]", "def get_regions(self):\n return self._regions", "def find_features_geojson(self, geojson_tagset):\n kreis_region_bund_list = []\n only_regs_set = set()\n for feature in geojson_tagset:\n bundesl = feature.properties.get('NAME_1')\n region = feature.properties.get('NAME_2')\n kreis = feature.properties.get('NAME_3')\n\n kreis_region_bund_list.append((kreis, region, bundesl))\n #Check: does \"Göttingen\" appear in this list as a region? Why does Goettingen need to be a region?)\n return kreis_region_bund_list", "def geom_as_list(geometry):\n if geometry.geom_type == \"Polygon\":\n return [geometry]\n elif geometry.geom_type == \"MultiPolygon\":\n return geometry.geoms", "def api_get_regions():\n db_session = DBSession()\n\n rows = []\n criteria = '%'\n if request.args and request.args.get('q'):\n criteria += request.args.get('q') + '%'\n else:\n criteria += '%'\n\n regions = db_session.query(Region).filter(Region.name.like(criteria)).order_by(Region.name.asc()).all()\n if len(regions) > 0:\n if request.args.get('show_all'):\n rows.append({'id': 0, 'text': 'ALL'})\n for region in regions:\n rows.append({'id': region.id, 'text': region.name})\n\n return jsonify(**{'data': rows})", "def filter_regions(self):\n return self.filter_nodes('/DistrictBuilder/Regions/Region')", "def regions(self):\n return self._regions", "def overlaps(self,region):\n fs = FeatureSet()\n for f in self:\n if( f.overlaps(region) ):\n fs.append(f)\n return fs", "def GetRegionVertices(self, *float, **kwargs):\n ...", "def read_feature(region='Adriatic_Sea'):\n if 'GEOMETRIC_DATA_DIR' in os.environ:\n cache_location = os.environ['GEOMETRIC_DATA_DIR']\n else:\n cache_location = './geometric_data'\n\n filename = f'{cache_location}/ocean/region/{region}/region.geojson'\n\n fc = read_feature_collection(filename)\n return fc", "def obtain_geometries(self):\n\n assert isinstance(self.ts, TS)\n\n \n symbol_dict = {\n 17: \"Cl\",\n 9: \"F\",\n 8: \"O\",\n 7: \"N\",\n 6: \"C\",\n 1: \"H\",\n }\n atoms = []\n\n parser = ccread(self.log_file, loglevel=logging.ERROR)\n\n for atom_num, coords in zip(parser.atomnos, parser.atomcoords[-1]):\n atoms.append(Atom(symbol=symbol_dict[atom_num], position=coords))\n \n self.ts._ase_molecule = Atoms(atoms)\n self.ts.update_coords_from(\"ase\")\n\n self.pre_geometry = self.ts.ase_molecule.copy()\n self.post_geometry = self.ts.ase_molecule.copy()\n\n for vib, displacements in self.vibrations:\n if vib < 0: # Finding the imaginary frequency\n self.post_geometry.arrays[\"positions\"] -= displacements\n\n return self.pre_geometry, self.post_geometry", "def get_instances(self, region):\n try:\n conn = ec2.connect_to_region(region, **self.credentials)\n region_instances = []\n reservations = conn.get_all_reservations()\n for reservation in reservations:\n for instance in reservation.instances:\n region_instances.append(instance)\n except boto.exception.EC2ResponseError:\n return []\n return region_instances", "def regions(self):\n\n class RegionIter(object):\n def __init__(self, region_based):\n self._region_based = region_based\n\n def __len__(self):\n return self._region_based._region_len()\n\n def __iter__(self):\n return self()\n\n def _fix_chromosome(self, regions):\n for r in regions:\n r.fix_chromosome(copy=True)\n\n def __call__(self, key=None, *args, **kwargs):\n fix_chromosome = kwargs.pop('fix_chromosome', False)\n\n if key is None:\n iterator = self._region_based._region_iter(*args, **kwargs)\n else:\n if isinstance(key, string_types) or isinstance(key, GenomicRegion):\n iterator = self._region_based.region_subset(key, *args, **kwargs)\n else:\n iterator = self._region_based._get_regions(key, *args, **kwargs)\n\n if fix_chromosome:\n return self._fix_chromosome(iterator)\n else:\n return iterator\n\n def __getitem__(self, item):\n if isinstance(item, string_types) or isinstance(item, GenomicRegion):\n return self._region_based.region_subset(item)\n return self._region_based._get_regions(item)\n\n return RegionIter(self)", "def scope(self) -> List[Region]:\n return [self]", "def _get_available_regions():\n session = boto3.session.Session()\n\n return session.get_available_regions(service_name='s3')", "def getRegions(self, clearCache=False):\n if clearCache:\n self._regionCache = None\n if self._regionCache is not None:\n return self._regionCache\n\n self.lock.acquire()\n\n regions = []\n self._regionsByName = {}\n\n # Iterate over all descriptors (even numbered regions)\n for index in range(0, MAX_REGIONS, 2):\n def storeDescriptor(descriptor, index=index):\n size = struct.unpack(\"<I\", descriptor[:4])[0]\n name = descriptor[4:].split('\\x00')[0]\n if name:\n region = Region(index + 1, size, name)\n regions.append(region)\n self._regionsByName[name] = region\n\n # Send the command the low-level way, since we already have the lock.\n self.recv.queue.put((MAX_DESCRIPTOR_LEN, storeDescriptor))\n self.send.queue.put(opSetRegion(index) + opReadLongs(MAX_DESCRIPTOR_LEN))\n\n self.recv.queue.join()\n self._regionCache = regions\n\n self.lock.release()\n return regions", "def selected(self):\n\t\treturn [self.regions[int(i)]\n\t\t\tfor i in self.regionListing.hlist.info_selection()]", "def ListRegions(self):\n project = properties.VALUES.core.project.GetOrFail()\n request = self.messages.CloudfunctionsProjectsLocationsListRequest(\n name='projects/' + project\n )\n return list_pager.YieldFromList(\n service=self.client.projects_locations,\n request=request,\n field='locations',\n batch_size_attribute='pageSize',\n )", "def get_snapshots(self, region):\n try:\n conn = ec2.connect_to_region(region, **self.credentials)\n region_snapshots = conn.get_all_snapshots(owner='self')\n except boto.exception.EC2ResponseError:\n return []\n return region_snapshots", "def get_regions(**kwargs):\n\n instance = Ceic._get_instance()\n\n get_dictionaries_method = instance._dictionary_facade.get_regions\n result = instance._make_request(get_dictionaries_method, **kwargs)\n\n return result", "def query_geometries(self, geometry_params):\n username, password, api_key, max_items_to_return = SettingsOps.get_settings()\n geometry_runnable = GeometryRunnable(username, password, api_key, geometry_params)\n geometry_runnable.geometry_object.task_complete.connect(self.on_new_geometries)\n self.init_progress_bar()\n self.search_thread_pool.start(geometry_runnable)", "def get_region_services(self,format=None):\n clients = HWIOS.pb_server.get_clients()\n region_services = []\n for client in clients:\n region_services.extend(client.region_services)\n #for django forms\n if format == 'tuple':\n tuple_list = []\n for region_service in region_services:\n tuple_list.append((region_service['uuid'],region_service['name']))\n return tuple_list\n return region_services", "def get_regions():\n\n # Also known as the 'climbing directory'\n route_guide = urlopen('https://www.mountainproject.com/route-guide',\n context=ctx)\n # Opens HTML\n region_html = route_guide.read()\n # Parses HTML with BS package\n region_soup = BeautifulSoup(region_html, 'html.parser')\n # Finds regions area of the page\n regions = region_soup.find('div', id='route-guide')\\\n .find_all('div', class_='mb-half')\n\n for region in regions:\n # Link to region area guide\n url = region.find('a')['href']\n # English name of region\n region_name = region.find('a').get_text()\n # Writes region name and url to Areas DB. This gives the region a\n # unique id automatically\n cursor.execute('''\n INSERT INTO Areas(url, name)\n VALUES ('%s', '%s')\n ON CONFLICT DO NOTHING\n ''' % (url, region_name))\n # Commits to DB\n conn.commit()", "def get_geometries ( self, object_class_table, spatial_column, select_column, select_id ) :\n stmt = 'select sdo_util.to_wktgeometry(' + str(spatial_column) + ') from ' + str(object_class_table) + ' where ' + str(select_column) + ' = ' + str(select_id)\n self.oracle_cursor.execute( stmt )\n resultset = self.oracle_cursor.fetchall()\n return resultset", "def getMultiGeometry(geometry):\n geom = arcpy.Array()\n for feature in geometry:\n array = arcpy.Array()\n for point in feature:\n point = arcpy.Point(float(point[0]), float(point[1]))\n array.add(point)\n geom.add(array)\n return geom" ]
[ "0.67121756", "0.64286244", "0.6395986", "0.63092536", "0.6299374", "0.6102714", "0.60804313", "0.60661674", "0.60644054", "0.5955644", "0.5845528", "0.5827808", "0.57959247", "0.57683283", "0.5693263", "0.5645893", "0.5640617", "0.5637714", "0.56285036", "0.5622284", "0.56084436", "0.55942494", "0.5582162", "0.5574251", "0.55595666", "0.5553779", "0.55516845", "0.554346", "0.5541786", "0.5538701" ]
0.7447639
0