query
stringlengths
9
9.05k
document
stringlengths
10
222k
metadata
dict
negatives
listlengths
30
30
negative_scores
listlengths
30
30
document_score
stringlengths
4
10
document_rank
stringclasses
2 values
Sets all log arrays to a current value.
def reset_logs(self): # reset log arrays try: bc = self.petra.BeamCurrent except: bc = numpy.nan try: pac = self.tserver.read_attribute('PosAndAvgCurr').value except: pac = numpy.array([numpy.nan, numpy.nan, numpy.nan]) server_query = numpy.append(pac, bc) for log_group, log_arrays in self.log_names.items(): omit_group = ['log_sens'] if log_group not in omit_group: for n, log_array in enumerate(log_arrays): self.log_arrays[log_array] = numpy.full(self.log_length, server_query[n]) # reset sensitivity log for log_array in self.log_names['log_sens']: self.log_arrays[log_array] = numpy.full(self.log_length, numpy.nan) # reset time array length = self.log_time.size t0 = self.timestamp() - self.backlog t1 = self.timestamp() self.log_time = numpy.linspace(t0, t1, length)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def clear_logging_arr(self):\n self.log_arr = []", "def log_elements(self, log_elements):\n\n self._log_elements = log_elements", "def _wipe_log(self):\n self.temp_log.clear()\n self.temp_log = [[], []] # init a 2D array", "def reset_logger():\n for log in ALL_LEVELS:\n setattr(vlog.Vlog, _LOG_MAPPING[log].__name__, _LOG_MAPPING[log])", "def updateArrays(self):\n for channelNumber in range(0, 8):\n self.channels[channelNumber][self.currentPosition]=self._voltage_get(channelNumber)#update next element in each array\n self.currentPosition+=1\n if self.currentPosition>=self.numberOfPoints:#reset position to beginning when we hit max number of points (like rolling oscilloscope)\n self.currentPosition=0\n self.cursorXS = self.getCurrentPositionArray()\n #could also set the next points to NaN's to make a gap!", "def reset_values(self):\n\n self.values = np.array([])", "def set_log_from_main(self, *args):\n if self.logarithmic:\n self.scale.props.adjustment.props.value = \\\n self.smart_log(self.adjustment.props.value)", "def _setVals(self, *args, **kwargs):\n pass", "def _plot_update(self):\n omit_log = ['sens_log']\n for log_group, log_arrays in self.qbpm.log_names.items():\n for log_array in log_arrays:\n if log_array not in omit_log:\n self.curves[log_array].setData(self.qbpm.log_time, self.qbpm.log_arrays[log_array],clear=True)\n # self.fill.setCurves(self.curves['posz_sens_low_log'], self.curves['posz_sens_high_log'])", "def clear_log(self):\n\n self.__logs = []", "def flush(self):\n for k, l in self.logs.items():\n self.full_logs[k].extend(l)\n self.logs = dict()", "def set_values(self,x):\n for i in range(len(self)):\n self[i].set_value(x[i])", "def change_log_length(self, log_length):\n len_diff = abs(self.log_length - log_length)\n if log_length > self.log_length:\n for log_group in self.log_names.values():\n for log_array in log_group:\n tmparr = numpy.full(log_length, self.log_arrays[log_array][0]) # generate tmparr with first value from array\n tmparr[-self.log_arrays[log_array].size:] = self.log_arrays[log_array] # fill end with current array\n self.log_arrays[log_array] = tmparr\n tmparr = numpy.zeros(log_length)\n tmparr[:len_diff] = numpy.linspace(self.log_time[0] - len_diff/self.frequency,\n self.log_time[0], len_diff)\n tmparr[-self.log_time.size:] = self.log_time\n self.log_time = tmparr\n else:\n for log_group in self.log_names.values():\n for log_array in log_group:\n tmparr = numpy.zeros(log_length)\n tmparr[:] = self.log_arrays[log_array][-log_length:]\n self.log_arrays[log_array] = tmparr\n tmparr = numpy.zeros(log_length)\n tmparr[:] = self.log_time[-log_length:]\n self.log_time = tmparr\n self.log_length = log_length", "def __call__(self, **kwargs: float):\n for k, v in kwargs.items():\n if k not in self.logs:\n self.logs[k] = []\n self.logs[k].append(v)", "def log_paths(self, value):\n self._log_paths = value", "def log_all(self):\n self.save_raw()\n self.log()", "def on_train_begin(self, logs={}):\n self._beta = []", "def array(self, src) -> None:\n self.set_array(src)", "def _set_array(self, name, value, index=None):\n util.set_array_if_not_same(self._arrays[name], value, index)", "def __convert_to_log(self):\n for i in range(self.nStates):\n if self.pi[i]>0:\n self.pi[i]=log(self.pi[i])\n else:\n self.pi[i]=float('-inf')\n for j in range(self.nStates):\n if self.t[i][j]>0:\n self.t[i][j]=log(self.t[i][j])\n else:\n self.t[i][j]=float('-inf')\n for j in range(self.nObs):\n if self.e[i][j]>0:\n self.e[i][j]=log(self.e[i][j])\n else:\n self.e[i][j]=float('-inf')\n self.logdomain=True", "def set_values(self, value):\n for i in range(len(self)):\n self._elements[i] = value", "def updateArrayPlotData(self):\n self.arrayPlotData.set_data(\"channel0\",self.array0)\n self.arrayPlotData.set_data(\"channel1\",self.array1)\n self.arrayPlotData.set_data(\"channel2\",self.array2)\n self.arrayPlotData.set_data(\"channel3\",self.array3)\n self.arrayPlotData.set_data(\"channel4\",self.array4)\n self.arrayPlotData.set_data(\"channel5\",self.array5)\n self.arrayPlotData.set_data(\"channel6\",self.array6)\n self.arrayPlotData.set_data(\"channel7\",self.array7)\n self.arrayPlotData.set_data(\"cursorXS\",self.cursorXS)\n #self.arrayPlotData.set_data(\"cursorVertical\",self.cursorVertical)", "def reset_data_recorder(self):\n\n self.t_values = []\n self.x_values = []\n self.tau_values = []", "def on_train_begin(self, logs={}):\n self.val_kappas = []", "def logdir(self, logdir) -> None:\n self._logdir = logdir\n self._update_logdir()\n for child_metric_real, child_metric_fake in self.children_real_fake:\n child_metric_real.logdir, child_metric_fake.logdir = logdir, logdir", "def setAppendLog(self,value):\n self.PDFreactorConfiguration.in1[\"appendLog\"] = value", "def setValues(self):\n pass", "def setValues(self):\n pass", "def setValues(self):\n pass", "def setValues(self):\n pass" ]
[ "0.6752328", "0.64241666", "0.6055851", "0.5979368", "0.5926505", "0.5659023", "0.5599223", "0.55467343", "0.5533408", "0.5506867", "0.5502828", "0.54892516", "0.5484629", "0.54787904", "0.54655725", "0.5448269", "0.5432319", "0.54243636", "0.5422054", "0.5410817", "0.54024714", "0.5401402", "0.5400872", "0.5391085", "0.53906584", "0.53810924", "0.53780264", "0.53780264", "0.53780264", "0.53780264" ]
0.706452
0
Get list of requirements required for installation.
def install_requires(): return reqs('requirements.txt')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def install_requires():\n return reqs(\"requirements.txt\")", "def get_install_requires() -> List[str]:\n return [\n \n ]", "def requires():\n install_reqs = parse_requirements(join(CWD, 'requirements', 'base.txt'),\n session=False)\n return [str(ir.req) for ir in install_reqs]", "def install_requires():\n skip_install_requires = environ.get('SKIP_INSTALL_REQUIRES')\n if not skip_install_requires:\n with open('requirements.pip') as r:\n return r.readlines()\n return []", "def get_requirements():\n name = 'pypeit/requirements.txt'\n\n requirements_file = os.path.join(os.path.dirname(__file__), name)\n install_requires = [line.strip().replace('==', '>=') for line in open(requirements_file)\n if not line.strip().startswith('#') and line.strip() != '']\n return install_requires", "def get_requirements():\n requirements_list = []\n\n if not os.path.isfile(REQUIREMENTS_FILE):\n # Check if requirements file did not exist.\n return requirements_list\n\n with open(REQUIREMENTS_FILE) as reqs:\n for install in reqs:\n requirements_list.append(install.strip())\n\n return requirements_list", "def requirements(self):\n requirements = []\n return requirements", "def get_install_requires():\n requirements = []\n for line in open('requirements.txt').readlines():\n # skip to next iteration if comment or empty line\n if line.startswith('#') or line == '' or line.startswith('http') or line.startswith('git'):\n continue\n # add line to requirements\n requirements.append(line)\n return requirements", "def get_requirements(req):\n\n install_requires = []\n with open(req) as f:\n for line in f:\n if not line.startswith(\"#\"):\n install_requires.append(line.strip())\n return install_requires", "def get_requirements():\n raw_requirements = read(\"requirements.txt\")\n requirements = []\n dependencies = []\n\n for req in raw_requirements.splitlines():\n req = req.strip()\n if not req:\n continue\n\n if req.startswith(\"#\"):\n continue\n\n if \"+\" in req:\n dependencies.append(req)\n else:\n requirements.append(req)\n\n return requirements, dependencies", "def requires(self):\n return []", "def python_requirements(self):\n try:\n dist = self.requirement.pip_requirement.get_dist()\n extras = self.requirement.pip_requirement.extras\n requirements = list(dist.requires(extras))\n except Exception:\n logger.warning(\"Failed to determine installation requirements of %s \"\n \"using pkg-resources, falling back to old implementation.\",\n self, exc_info=True)\n requirements = self.python_requirements_fallback\n logger.debug(\"Python requirements of %s: %r\", self, requirements)\n return requirements", "def get_fsleyes_deps():\n\n # The dependency list is stored in requirements.txt\n with open(op.join(basedir, 'requirements.txt'), 'rt') as f:\n install_requires = f.readlines()\n\n return [i.strip() for i in install_requires]", "def learn_requirements():\n req_file = \"requirements.txt\"\n reqs = []\n\n import os\n\n path = os.path.dirname(__file__)\n req_file = os.path.join(path, \"..\", req_file)\n if not os.path.exists(req_file):\n # not needed with installed package\n return reqs\n\n excludes = \"versioneer coveralls coverage\".split()\n with open(req_file, \"r\") as fp:\n buf = fp.read().strip().splitlines()\n for req in buf:\n req = req.strip()\n if (\n req != \"\"\n and not req.startswith(\"#\")\n and req not in excludes\n ):\n reqs.append(req)\n return reqs", "def get_requirement_strings(self):\n opts = self.get_options()\n return (\n opts.requirements,\n opts.timeout_requirements,\n opts.cov_requirements,\n opts.unittest2_requirements,\n )", "def get_requirements():\n command = ['pip', 'list']\n result = run(command, stdout=PIPE, stderr=PIPE, universal_newlines=True)\n assert not result.stderr, \"stderr not empty\"\n return result.stdout", "def get_setup_requires(dist):\n reqs = dist.command_options.get('metadata', {}).get('setup_requires')\n if reqs:\n return pkg_resources.parse_requirements([i.strip()\n for i in reqs[1].split('\\n')\n if i.strip()])\n return []", "def get_setup_requires(dist):\n reqs = dist.command_options.get('metadata', {}).get('setup_requires')\n if reqs:\n return pkg_resources.parse_requirements([i.strip()\n for i in reqs[1].split('\\n')\n if i.strip()])\n return []", "def findRequirements():\n return [\n line.strip()\n for line in open(\"requirements.txt\").readlines()\n if not line.startswith(\"#\")\n ]", "def required_packages(cls) -> List[Text]:\n return []", "def install_requires():\n return [\n \"SQLAlchemy~=1.3\",\n \"bibtexparser~=0.6.2\",\n \"click~=6.7\",\n \"nltk~=3.4\",\n \"numpy~=1.17\",\n \"langdetect\",\n \"langcodes\",\n \"PyPDF2~=1.26\",\n \"tabulate~=0.7\",\n \"tqdm~=4.11.2\",\n \"requests>2,<3\",\n ]", "def requirements(self, context):\n\n requirements = []\n\n # Get all the tasks and the lists (so the .fill on lists are also\n # considered.)\n all_tasks = list(self.tasks) + list(flatten(self.tasks, context))\n for task in all_tasks:\n task_details = getattr(task, '__garcon__', None)\n if task_details:\n requirements += task_details.get('requirements', [])\n else:\n raise NoRunnerRequirementsFound()\n return set(requirements)", "def get_requirements():\n\n with open('requirements.txt', 'r') as f:\n requirements = f.readlines()\n requires = []\n for require in requirements:\n if require.startswith(\"#\") or require.startswith(\"\\n\"):\n continue\n else:\n requires.append(require.replace(\"\\n\", \"\"))\n return requires", "def find_requirements():\n with open(\"requirements.txt\", 'r') as f:\n return f.read().splitlines()", "def get_requirement_info():\n links, requirements = [], []\n info = {'dependency_links': links, 'install_requires': requirements}\n requirements_path = 'requirements.txt'\n\n if not os.path.isfile(requirements_path):\n print('requirements.txt not found. Did you forget it?')\n return info\n\n reqs = filter(None, map(str.strip, open(requirements_path)))\n for line in reqs:\n if is_http(line):\n i = line.find('#egg=')\n if i == -1:\n raise SetupError('Missing \\'#egg=\\' in requirement link.')\n links.append(line[:i])\n requirements.append(line[i+5:])\n else:\n requirements.append(line)\n return info", "def read_requirements():\r\n reqs_path = os.path.join('.', 'requirements.txt')\r\n with open(reqs_path, 'r') as f:\r\n requirements = [line.rstrip() for line in f]\r\n return requirements", "def read_requirements():\n with open('requirements.txt') as f:\n requirements = f.readlines()\n return [element.strip() for element in requirements]", "def system_requirements(self):\n return self._system_requirements", "def get_requirements():\n with open('requirements.txt') as fd:\n lines = fd.read().splitlines()\n requires, links = [], []\n for line in lines:\n if line.startswith('git+'):\n links.append(line)\n elif line:\n requires.append(line)\n return requires, links", "def read_requirements():\n reqs_path = os.path.join(__location__, 'requirements.txt')\n with open(reqs_path, encoding='utf8') as f:\n reqs = [line.strip() for line in f if not line.strip().startswith('#')]\n\n names = []\n links = []\n for req in reqs:\n if '://' in req:\n links.append(req)\n else:\n names.append(req)\n return {'install_requires': names, 'dependency_links': links}" ]
[ "0.8409988", "0.8359472", "0.828312", "0.8219742", "0.81854343", "0.81252146", "0.8071705", "0.80322313", "0.7970809", "0.7774683", "0.7724761", "0.76847386", "0.7667431", "0.76324123", "0.76069945", "0.76004237", "0.75916094", "0.75916094", "0.74921674", "0.7445241", "0.743117", "0.74096406", "0.73980314", "0.7300958", "0.7213318", "0.71520126", "0.714515", "0.7109481", "0.7095788", "0.7085242" ]
0.84131104
0
Return k closest points to the origin >>> kclosestpoints([(1,1), (0,0), (2,2), (1,2), (3,2)], 2) [((0, 0), 0), ((1, 1), 2)]
def kclosestpoints(points, k): dist = {p : 0 for p in points} for point in points: dist[point] = point[0] ** 2 + point[1] ** 2 dist = sorted(dist.items(), key=lambda x : x[1], reverse=False) return dist[:k]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_k_closest_points(point, data, k, distance_metric):\n points_and_scores = []\n k_closest_points = []\n for item in data:\n item_score = distance_metric(point, item)\n points_and_scores.append([item, item_score])\n points_and_scores = sorted(points_and_scores, key = lambda item:(item[1], item[0].coords))\n for i in range(k):\n k_closest_points.append(points_and_scores[i][0])\n return k_closest_points", "def find_nearest_neighbors(p, points, k):\n\timport numpy as np\n\tdistances = np.zeros(points.shape[0])\n\tfor i in range(len(distances)):\n\t\tdistances[i] = distance(p,points[i])\n\tind = np.argsort(distances)\n\treturn ind[0:k]", "def find_k_closest(self, a, k):\r\n if not a:\r\n return []\r\n\r\n # Sort input array by Euclidean distance from origin\r\n a.sort(key = lambda x: x[0] ** 2 + x[1] ** 2)\r\n\r\n # Return the \"k\" smallest elements\r\n return a[:k]", "def find_nearest_neighbors(p, points, k=5):\n dist = np.zeros(points.shape[0])\n for i in range(len(dist)):\n dist[i] = distance(p, points[i])\n ind = np.argsort(dist)\n return ind[0:k]", "def get_nearest(src_points, candidates, k_neighbors=1):\n\n # Create tree from the candidate points\n tree = BallTree(candidates, leaf_size=15, metric='haversine')\n distances, indices = tree.query(src_points, k=k_neighbors)\n\n # Transpose to get distances and indices into arrays\n distances = distances.transpose()\n indices = indices.transpose()\n\n # Get closest indices and distances (i.e. array at index 0)\n # note: for the second closest points, you would take index 1, etc.\n closest = indices[0]\n closest_dist = distances[0]\n\n # Return indices and distances\n return closest, closest_dist", "def get_nearest(src_points, candidates, k_neighbors=1):\r\n\r\n # Create tree from the candidate points. leaf-size só muda o processamento, e a métrica é a forma de cálculo, que no caso é a Great Circle Distances\r\n tree = BallTree(candidates, leaf_size=15, metric='haversine')\r\n\r\n # Find closest points and distances. K é a quantidade de pontos que queremos a dis^tanica e SRC points são os pontos\r\n distances, indices = tree.query(src_points, k=k_neighbors)\r\n\r\n # Transpose to get distances and indices into arrays\r\n distances = distances.transpose()\r\n indices = indices.transpose()\r\n\r\n # Get closest indices and distances (i.e. array at index 0)\r\n # note: for the second closest points, you would take index 1, etc.\r\n closest = indices[0]\r\n closest_dist = distances[0]\r\n\r\n # Return indices and distances\r\n return (closest, closest_dist)", "def k_nearest(self, pt, k):\n if k < 1:\n raise ValueError('k should be at least 1')\n result = []\n visit_ct = k_nearest(self.root, pt, k, result)\n logging.debug('Visited {0} leaf nodes'.format(visit_ct))\n return [(math.sqrt(d), item) for (d, item) in result]", "def test_k_nearest(self):\n L = range(100)\n L = [(i, i, i, i) for i in L]\n tree = KdTree(L)\n # remove distance, only keep points from the result\n items = lambda items: [x for (d, x) in items] \n assert items(tree.k_nearest((-1, -1), 1)) == [(0, 0, 0, 0)]\n assert items(tree.k_nearest((100, 100), 1)) == [(99, 99, 99, 99)]\n assert items(tree.k_nearest((50, 50), 1)) == [(50, 50, 50, 50)]\n assert items(tree.k_nearest((-1, -1), 2)) == [(0, 0, 0, 0),\n (1, 1, 1, 1)]", "def find_closest_points(points):\n closest_dist = float(\"inf\")\n closest_points = None, None\n for y, point_one in enumerate(points):\n for x, point_two in enumerate(points):\n if x > y:\n dist= distance_between(point_one.points,point_two.points)\n if dist < closest_dist:\n closest_dist = dist\n closest_points= point_one, point_two\n\n return closest_points", "def closest_point(point, points):\n return points[cdist([point], points).argmin()]", "def nearest_input_pts(\n in_latlons: ndarray, out_latlons: ndarray, k: int\n) -> Tuple[ndarray, ndarray]:\n # Convert input latitude and longitude to XYZ coordinates, then create KDtree\n in_x, in_y, in_z = ecef_coords(in_latlons[:, 0].flat, in_latlons[:, 1].flat)\n in_coords = np.c_[in_x, in_y, in_z]\n in_kdtree = KDTree(in_coords)\n # Convert output to XYZ and query the KDtree for nearby input points\n out_x, out_y, out_z = ecef_coords(out_latlons[:, 0].flat, out_latlons[:, 1].flat)\n out_coords = np.c_[out_x, out_y, out_z]\n distances, indexes = in_kdtree.query(out_coords, k)\n # Avoid single dimension output for k=1 case\n if distances.ndim == 1:\n distances = np.expand_dims(distances, axis=1)\n if indexes.ndim == 1:\n indexes = np.expand_dims(indexes, axis=1)\n return distances, indexes", "def brute_closest_pair(points):\n n = len(points)\n min_distance = float(\"inf\")\n last_pair = None\n for i in range(n):\n for j in range(i+1, n):\n result = distance(points[i], points[j])\n if result < min_distance:\n min_distance = result\n last_pair = [points[i], points[j]]\n return last_pair", "def closest(point, points):\n pts = [(Point.distance(point, p), p) for p in points]\n pts.sort()\n return pts[0][1]", "def closest_points(point, points, nn=1):\n\n eu_dsts = point - points\n eu_dsts = np.sqrt((eu_dsts * eu_dsts).sum(axis=1))\n n_ids = np.argsort(eu_dsts)\n out_points = np.zeros(shape=(nn, 3))\n for i in range(nn):\n out_points[i] = points[n_ids[i], :]\n return out_points", "def k_closest_contexts(self, context_vec: np.ndarray, candidates: List[NumberContext], k: int = 5)\\\n -> List[Tuple[NumberContext, float]]:\n sim_candidates = list(map(lambda cand: (embeddings_sim(context_vec, cand[1], self.model.dim()), cand),\n candidates))\n results = sorted(sim_candidates, key=lambda p: p[0], reverse=True)[:k]\n return list(map(lambda a: (a[1], a[0]), results))", "def find_k(self, kpt):\n kpt = np.array(kpt)\n ns = np.linalg.norm(self.kpts - kpt[None, :], axis=1)\n ik = np.argmin(ns)\n return ik", "def get_k_neighbors(self, point):\n nn = []\n nnl = []\n for p,l in zip(self.train_features,self.train_labels):\n d = self.distance_function(p,point)\n dl_pair = (d,l)\n nn.append(dl_pair)\n nn = sorted(nn, key = lambda x: x[0])\n for i in range(0,self.k):\n nnl.append(nn[i][1])\n return nnl\n raise NotImplementedError", "def k_nearest_neighbors(x_test, df_training, k):\n\n return np.argpartition(distance_to_each_training_point(x_test,\n df_training), k-1)[:,0:k]", "def findK_centroids_closest(self, features, clusters):\n\n class InnerFeatures:\n def __init__(self, kps, des, pos):\n self.kps = kps\n self.des = des\n self.pos = pos\n\n kmeans = KMeans(n_clusters=clusters)\n\n pts = np.array(features.pos)\n kps = np.array(features.kps)\n des = np.array(features.des)\n\n kmeans.fit(pts)\n m_clusters = kmeans.labels_.tolist()\n centers = np.array(kmeans.cluster_centers_)\n\n closest, _ = pairwise_distances_argmin_min(kmeans.cluster_centers_, pts)\n\n assert len(set(closest)) == clusters\n\n result = InnerFeatures(kps[closest], des[closest], pts[closest])\n return result", "def closest_points(self, points, maxdist=None):\n return [self.closest_point(point, maxdist) for point in points]", "def bruteClosest(list_points):\n\n minimum = 0\n p1 = 0\n p2 = 0\n for i in list_points:\n for k in list_points:\n \n d = dist(i,k)\n if (d < minimum and d != 0) or minimum == 0:\n p1 = i\n p2 = k\n minimum = d\n return [p1, p2, minimum]", "def closest(centroids,coordinates):\n tup = [(cen[0], haversine(coordinates,cen[1])) for cen in centroids]\n distance = min(tup, key = lambda x:x[1])\n return (distance[0],coordinates)", "def visit_k_nearest(node, pt, k, result):\n # rather brute force but because cut off and k expected to be rather small\n # not further optimized\n # (result could instead of list be a bin heap with at most k items)\n for active, item in zip(node.active, node.items):\n # check active items\n if active:\n d = distance2(pt, item)\n result.append( (d, item) )\n # sort on distance\n result.sort(key=lambda x: x[0])\n # keep max k items\n while len(result) > k:\n result.pop()", "def nearest_point(pt):\n nearest_point = None\n min_dist = float(\"inf\")\n for p in cur_points:\n dist = euclidean_dist(pt, p.to_tuple())\n if dist < min_dist:\n min_dist, nearest_point = dist, p\n\n return nearest_point.to_tuple()", "def closest_point(p1: Vector3, p2: Vector3, p3: Vector3) -> Vector3:\n k = ((p2.y - p1.y) * (p3.x - p1.x) - (p2.x - p1.x) * (p3.y - p1.y)) / ((p2.y - p1.y) ** 2 + (p2.x - p1.x) ** 2)\n x4 = p3.x - k * (p2.y - p1.y)\n y4 = p3.y + k * (p2.x - p1.x)\n\n return Vector3(x4, y4, 0)", "def k_nearest(node, pt, k, result):\n if node.items:\n visit_k_nearest(node, pt, k, result)\n return 1\n else:\n dx = pt[node.cutdim] - node.cutval\n if dx <= 0:\n near = node.left\n far = node.right\n else:\n near = node.right\n far = node.left\n ct_near = k_nearest(near, pt, k, result)\n # check if we found results, \n # if we have sufficient results and the closest of these\n # is closer than the split line, we do not have to search further\n if result and len(result) >= k and pow(dx, 2) >= result[0][0]:\n return ct_near \n ct_far = k_nearest(far, pt, k, result)\n return ct_near + ct_far", "def getClosestK( centerVal, pixel ):\n distanceDict = {}\n minDistance = 99999999\n minDistanceIndex = 0\n for i in range(len(centerVal)):\n distanceDict[i] = getDistance( centerVal[i], pixel )\n if minDistance > distanceDict[i]:\n minDistance = distanceDict[i]\n minDistanceIndex = i\n closestK = centerVal[minDistanceIndex]\n return closestK", "def knn0(pnts, p, k):\r\n p = np.asarray(p)\r\n pnts = np.asarray(pnts)\r\n diff = pnts - p[np.newaxis, :]\r\n d = np.einsum('ij,ij->i', diff, diff)\r\n idx = np.argsort(d)[:k]\r\n# s = [i.tolist() for i in pnts[idx]]\r\n return pnts[idx].tolist()", "def closestCentroids(self, points , centroids ):\n dists = scipy.spatial.distance.cdist(points,centroids)\n # 1 is dimension\n minIds = numpy.argmin(dists, 1)\n return minIds", "def nearest_neighbors(self):\n neighbor_distances_and_indices = []\n for idx, data_point in enumerate(self.data):\n distance = self.euclidean_dis(data_point[:-1], self.query) # Calculate the distance between the query\n # example and the current example from the data.\n\n neighbor_distances_and_indices.append((distance, idx)) # Add the distance and the index of the example\n # to an ordered collection\n\n sorted_neighbor_distances_and_indices = sorted(neighbor_distances_and_indices, key=lambda x: x[0]) #\n # Sort the ordered collection of distances and indices from smallest to largest (in ascending order) by\n # the distances\n\n k_nearest_distances_and_indices = sorted_neighbor_distances_and_indices[:self.k] # Pick the first K\n # entries from the sorted collection\n\n k_nearest_labels = [self.data[i][1] for distance, i in k_nearest_distances_and_indices] # Get the labels of\n # the selected K entries\n\n return k_nearest_labels, self.mode(k_nearest_labels)" ]
[ "0.79715997", "0.7098614", "0.70486695", "0.70407385", "0.6972852", "0.6917117", "0.68956125", "0.6822356", "0.6779336", "0.67348033", "0.67085296", "0.6700983", "0.661611", "0.64570594", "0.64557004", "0.6420059", "0.6397378", "0.63768524", "0.6335915", "0.6302754", "0.62684274", "0.6256114", "0.62516713", "0.6212001", "0.6198276", "0.6195975", "0.6177495", "0.6092671", "0.60451585", "0.60318595" ]
0.8524627
0
Test Scenario for following sequence. 1. Put metric alarm (period 60, evaluation_periods 1) 2. Put metric data which is over the threshold so that the alarm state would be 'ALARM' 3. Wait 2 minutes so that alarm state could be 'INSUFFICIENT_DATA' 4. Put metric data which is under the threshold so that the alarm state would be 'OK' 5. Wait 3 minutes so that alarm state could be 'INSUFFICIENT_DATA' 6. Describe alarm history and check if it has been changed as we are expected
def test_eval_alarm(self): def get_state_update_value(h): """ """ oldstate = h.data['oldState']['stateValue'] newstate = h.data['newState']['stateValue'] querydate = h.data['newState']['stateReasonData']['queryDate'] querydate = utils.parse_strtime(querydate) return oldstate, newstate, querydate test_uuid = str(uuid.uuid4()) alarmname = "TestEvalAlarm_" + test_uuid metricname = "TestEvalMetric_" + test_uuid namespace = "unittest" unit = "Percent" dimensions = {"test_id":test_uuid} threshold = 2.0 # create metric alarm alarm = MetricAlarm(name=alarmname, metric=metricname, namespace=namespace, statistic="Average", comparison=">", threshold=threshold, period=60, evaluation_periods=1, unit=unit, dimensions=dimensions) self.synaps.put_metric_alarm(alarm) # due to put_metric_alarm is asynchronous time.sleep(ASYNC_WAIT) alarm_time = datetime.datetime.utcnow().replace(second=0, microsecond=0) self.synaps.put_metric_data(namespace=namespace, name=metricname, value=threshold + 1, timestamp=alarm_time, unit=unit, dimensions=dimensions) time.sleep(60 * 5) ok_time = datetime.datetime.utcnow().replace(second=0, microsecond=0) self.synaps.put_metric_data(namespace=namespace, name=metricname, value=threshold - 2, timestamp=ok_time, unit=unit, dimensions=dimensions) time.sleep(60 * 5) histories = self.synaps.describe_alarm_history(alarm_name=alarmname, history_item_type="StateUpdate") histories.sort(cmp=lambda a, b: cmp(a.timestamp, b.timestamp)) result = map(get_state_update_value, histories) expected = (('INSUFFICIENT_DATA', 'ALARM', alarm_time), ('ALARM', 'INSUFFICIENT_DATA', None), ('INSUFFICIENT_DATA', 'OK', ok_time), ('OK', 'INSUFFICIENT_DATA', None)) failmsg = "expected: %s real: %s" % (expected, result) self.assertEqual(len(result), len(expected), msg=failmsg) for ((r_new, r_old, r_time), (e_new, e_old, e_time)) in zip(result, expected): self.assertEqual(r_new, e_new, msg=failmsg) self.assertEqual(r_old, e_old, msg=failmsg) if e_time: self.assertTrue((r_time - e_time) < timedelta(seconds=300), msg=failmsg) self.synaps.delete_alarms(alarms=[alarmname])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def alarm_in_setup_change():\n setup_write(\"!M1 meas interval\", \"00:01:00\")\n setup_write(\"!M2 meas interval\", \"00:01:00\")\n setup_write(\"!TX3 scheduled interval\", \"00:05:00\")", "def test_alert_high(fd, formatter, event):\n ao = AlertObserver(3, fd=fd, formatter=formatter, event=event)\n ao.start()\n event.run_until_wait()\n for _ in range(360):\n ao.update(DummyLogRecord())\n event.run_until_wait() # 'alarm triggered'\n event.stop()\n assert fd.getvalue() == 'alarm triggered\\n'", "def testAlternateMetrics(self):\n\n def result2(t, rew):\n return TrainingResult(time_total_s=t, neg_mean_loss=rew)\n\n sched = HyperBandScheduler(\n time_attr='time_total_s', reward_attr='neg_mean_loss')\n stats = self.default_statistics()\n\n for i in range(stats[\"max_trials\"]):\n t = Trial(\"__fake\")\n sched.on_trial_add(None, t)\n runner = _MockTrialRunner(sched)\n\n big_bracket = sched._hyperbands[0][-1]\n\n for trl in big_bracket.current_trials():\n runner._launch_trial(trl)\n current_length = len(big_bracket.current_trials())\n\n # Provides results from 0 to 8 in order, keeping the last one running\n for i, trl in enumerate(big_bracket.current_trials()):\n action = sched.on_trial_result(runner, trl, result2(1, i))\n runner.process_action(trl, action)\n\n new_length = len(big_bracket.current_trials())\n self.assertEqual(action, TrialScheduler.CONTINUE)\n self.assertEqual(new_length, self.downscale(current_length, sched))", "def test_alert_low(fd, formatter, event):\n ao = AlertObserver(3, fd=fd, formatter=formatter, event=event)\n ao.start()\n event.run_until_wait()\n for _ in range(360):\n ao.update(DummyLogRecord())\n event.run_until_wait() # 'alarm triggered'\n for _ in range(359):\n ao.update(DummyLogRecord())\n event.run_until_wait() # 'alarm recovered'\n event.stop()\n assert fd.getvalue() == 'alarm triggered\\nalarm recovered\\n'", "def test_check_alarm(self):\n\n fail_msg = \"Creation instance failed\"\n\n create_kwargs = {}\n if 'neutron' in self.config.network.network_provider:\n network = [net.id for net in\n self.compute_client.networks.list()\n if net.label == self.private_net]\n\n create_kwargs = {'nics': [{'net-id': network[0]}]}\n\n image = nmanager.get_image_from_name()\n name = rand_name('ost1_test-instance-alarm_actions')\n self.instance = self.verify(600, self.compute_client.servers.create, 1,\n fail_msg,\n \"server creation\",\n name=name,\n flavor=self.flavor,\n image=image,\n **create_kwargs)\n self.set_resource(self.instance.id, self.instance)\n\n self.verify(200, self._wait_for_instance_metrics, 2,\n \"instance is not available\",\n \"instance becoming 'available'\",\n self.instance, 'ACTIVE')\n\n fail_msg = \"Creation metrics failed.\"\n\n statistic_meter_resp = self.verify(600, self.wait_for_instance_metrics, 3,\n fail_msg,\n \"metrics created\",\n self.meter_name)\n\n fail_msg = \"Creation alarm failed.\"\n threshold = statistic_meter_resp[0].avg - 1\n create_alarm_resp = self.verify(5, self.create_alarm,\n 4, fail_msg, \"alarm_create\",\n meter_name=self.meter_name,\n threshold=threshold,\n name=self.name,\n period=self.period,\n statistic=self.statistic,\n comparison_operator=self.comparison_operator)\n\n fail_msg = \"Alarm verify state failed.\"\n\n self.verify(1000, self.wait_for_alarm_status, 5,\n fail_msg,\n \"alarm status becoming 'alarm'\",\n create_alarm_resp.alarm_id)", "def testGetPowerHighAlarm(self):\n self.ports.get_power_high_alarm(file_name = 'get_power_high_alarm.xml', port_ids = portsDict['port_ids'], power_high_alarms = portsDict['power_high_alarm'])", "def test_update_goal_metric(self):\n pass", "def test_alert_high_doesnt_fire_twice(fd, formatter, event):\n ao = AlertObserver(3, fd=fd, formatter=formatter, event=event)\n ao.start()\n event.run_until_wait()\n for _ in range(360):\n ao.update(DummyLogRecord())\n event.run_until_wait() # 'alarm triggered'\n for _ in range(360):\n ao.update(DummyLogRecord())\n event.run_until_wait() # no print\n event.stop()\n assert fd.getvalue() == 'alarm triggered\\n'", "def testGetPowerAlarmHyteresis(self):\n self.ports.get_power_alarm_hysteresis(file_name = 'get_port_label.xml', port_ids = portsDict['port_ids'], power_alarm_hysteresis = portsDict['power_alarm_hysteresis'])", "def event_m10_29_x35():\r\n \"\"\"State 0,2: Timer start judgment\"\"\"\r\n CompareAreaTimer(0, 0, 0, 0)\r\n CompareAreaTimer(1, 0, 0, 2)\r\n if ConditionGroup(0):\r\n \"\"\"State 3: Start area timer\"\"\"\r\n StartAreaTimer(0)\r\n elif ConditionGroup(1):\r\n \"\"\"State 4: Restart area timer\"\"\"\r\n RestartAreaTimer(0)\r\n \"\"\"State 1: Has the waiting time been exceeded?\"\"\"\r\n CompareAreaTimer(0, 0, 300, 2)\r\n assert ConditionGroup(0)\r\n \"\"\"State 5: Stop area timer\"\"\"\r\n PauseAreaTimer(0)\r\n \"\"\"State 6: End state\"\"\"\r\n return 0", "def test_alert_schedule(cinq_test_service):\n\n setup_info = setup_test_aws(cinq_test_service)\n account = setup_info['account']\n\n prep_s3_testing(cinq_test_service)\n\n # Add resources\n client = aws_get_client('s3')\n bucket_name = dbconfig.get('test_bucket_name', NS_CINQ_TEST, default='testbucket')\n client.create_bucket(Bucket=bucket_name)\n\n # Collect resources\n collect_resources(account=account, resource_types=['s3'])\n\n # Initialize auditor\n auditor = MockRequiredTagsAuditor()\n\n # Test 1 --- The auditor should not alert again as we are not at the next scheduled alert time\n auditor.run()\n assert auditor._cinq_test_notices\n auditor.run()\n assert not auditor._cinq_test_notices", "def test_alert_low_doesnt_fire_twice(fd, formatter, event):\n ao = AlertObserver(3, fd=fd, formatter=formatter, event=event)\n ao.start()\n event.run_until_wait()\n for _ in range(360):\n ao.update(DummyLogRecord())\n event.run_until_wait() # 'alarm triggered'\n for _ in range(359):\n ao.update(DummyLogRecord())\n event.run_until_wait() # 'alarm recovered'\n for _ in range(359):\n ao.update(DummyLogRecord())\n event.run_until_wait() # no print\n event.stop()\n assert fd.getvalue() == 'alarm triggered\\nalarm recovered\\n'", "def wait_for_metric_alarm_state(session: Session, alarm_name: str, expected_alarm_state: str, time_to_wait: int):\n start_time = time.time()\n elapsed_time = time.time() - start_time\n alarm_state = get_metric_alarm_state(session, alarm_name)\n\n # Wait for execution step to resolve in waiting or one of terminating statuses\n while alarm_state.name != expected_alarm_state:\n if elapsed_time > time_to_wait:\n raise Exception(f'Waiting for alarm {alarm_name} to be in step {expected_alarm_state} timed out')\n time.sleep(constants.sleep_time_secs)\n alarm_state = get_metric_alarm_state(session, alarm_name)\n elapsed_time = time.time() - start_time\n return True", "def alarm_out_setup_change():\n setup_write(\"!M1 meas interval\", \"00:10:00\")\n setup_write(\"!M2 meas interval\", \"00:10:00\")\n setup_write(\"!TX3 scheduled interval\", \"01:00:00\")", "def testGetConfigPowerHighAlarm(self):\n self.ports.getconfig_power_high_alarm(file_name = 'get_power_high_alarm.xml', port_ids = portsDict['port_ids'], power_high_alarms = portsDict['power_high_alarm'])", "def testEditConfigCreatePowerHighAlarm(self):\n self.ports.editconfig_create_power_high_alarm(file_name = 'editconfig_create_port_label.xml', port_ids = portsDict['port_ids'], power_high_alarms = portsDict['power_high_alarm'])", "def test_add_old(self):\n TestStorage.set_timeout(10)\n TestStorage.set_time(100)\n store = RatedStatisticStorage()\n store._RatedStatisticStorage__add_single_outcome(\n \"n!node1\", \"cpu\", Outcome.HIGH, rospy.Time(90))\n self.assertEqual(\n store.get_outcome(\"n!node1\", \"cpu\"), Outcome.UNKNOWN)\n\n \"\"\"Test if an statistic thats just not to old will be saved.\"\"\"\n TestStorage.set_timeout(10)\n TestStorage.set_time(100)\n store = RatedStatisticStorage()\n store._RatedStatisticStorage__add_single_outcome(\n \"n!node2\", \"cpu\", Outcome.HIGH, rospy.Time(91))\n self.assertEqual(\n store.get_outcome(\"n!node2\", \"cpu\"), Outcome.HIGH)", "def testGetPowerAlarmStatus(self):\n self.ports.get_power_alarm_status(file_name = 'get_power_alarm_status.xml', port_ids = portsDict['port_ids'], power_alarm_status = portsDict['power_alarm_status'])", "def common_alarm_func_add(asg_name, metricname, namespace, arn_scalein, arn_scaleout, alarmname, desc, Unit):\n d1=desc+ \" High\"\n a1=alarmname + '-high'\n try:\n cloudwatch.put_metric_alarm(AlarmName=a1, AlarmDescription=d1,\n AlarmActions=[arn_scaleout],\n ActionsEnabled=True, MetricName=metricname, EvaluationPeriods=1,\n Threshold=float(ScaleUpThreshold), Statistic=\"Average\", Namespace=namespace,\n ComparisonOperator=\"GreaterThanThreshold\", Period=ScalingPeriod, Unit=Unit)\n except Exception as e:\n logger.error('Failed to add High Alarm: ' + desc + ' for ASG: ' + asg_name)\n logger.error(\"[Alarm High Add]: {}\".format(e))\n return False\n\n a1=alarmname + '-low'\n d1=desc+ \" Low\"\n try:\n cloudwatch.put_metric_alarm(AlarmName=a1, AlarmDescription=d1,\n AlarmActions=[arn_scalein],\n ActionsEnabled=True, MetricName=metricname, EvaluationPeriods=1,\n Threshold=float(ScaleDownThreshold), Statistic=\"Average\", Namespace=namespace,\n ComparisonOperator=\"LessThanThreshold\", Period=ScalingPeriod,\n Unit=Unit)\n except Exception as e:\n logger.error('Failed to add Low Alarm: ' + desc + ' for ASG: ' + asg_name)\n logger.error(\"[Alarm Low Add]: {}\".format(e))\n return False\n\n return True", "def test_days_weeks_activity():\n assert analytics.activity('daily', yoga_trackings(), 1) == 17\n assert analytics.activity('weekly', run_trackings(), 1) == 4\n assert analytics.activity('daily', read_trackings(), 1) == 18\n assert analytics.activity('daily', meditation_trackings(), 1) == 15\n assert analytics.activity('weekly', french_trackings(), 1) == 5", "async def test_age_limit_expiry(hass: HomeAssistant) -> None:\n now = dt_util.utcnow()\n current_time = datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC)\n\n with freeze_time(current_time) as freezer:\n assert await async_setup_component(\n hass,\n \"sensor\",\n {\n \"sensor\": [\n {\n \"platform\": \"statistics\",\n \"name\": \"test\",\n \"entity_id\": \"sensor.test_monitored\",\n \"state_characteristic\": \"mean\",\n \"sampling_size\": 20,\n \"max_age\": {\"minutes\": 4},\n },\n ]\n },\n )\n await hass.async_block_till_done()\n\n for value in VALUES_NUMERIC:\n current_time += timedelta(minutes=1)\n freezer.move_to(current_time)\n async_fire_time_changed(hass, current_time)\n hass.states.async_set(\n \"sensor.test_monitored\",\n str(value),\n {ATTR_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS},\n )\n await hass.async_block_till_done()\n\n # After adding all values, we should only see 5 values in memory\n\n state = hass.states.get(\"sensor.test\")\n new_mean = round(sum(VALUES_NUMERIC[-5:]) / len(VALUES_NUMERIC[-5:]), 2)\n assert state is not None\n assert state.state == str(new_mean)\n assert state.attributes.get(\"buffer_usage_ratio\") == round(5 / 20, 2)\n assert state.attributes.get(\"age_coverage_ratio\") == 1.0\n\n # Values expire over time. Only two are left\n\n current_time += timedelta(minutes=3)\n freezer.move_to(current_time)\n async_fire_time_changed(hass, current_time)\n await hass.async_block_till_done()\n\n state = hass.states.get(\"sensor.test\")\n new_mean = round(sum(VALUES_NUMERIC[-2:]) / len(VALUES_NUMERIC[-2:]), 2)\n assert state is not None\n assert state.state == str(new_mean)\n assert state.attributes.get(\"buffer_usage_ratio\") == round(2 / 20, 2)\n assert state.attributes.get(\"age_coverage_ratio\") == 1 / 4\n\n # Values expire over time. Only one is left\n\n current_time += timedelta(minutes=1)\n freezer.move_to(current_time)\n async_fire_time_changed(hass, current_time)\n await hass.async_block_till_done()\n\n state = hass.states.get(\"sensor.test\")\n new_mean = float(VALUES_NUMERIC[-1])\n assert state is not None\n assert state.state == str(new_mean)\n assert state.attributes.get(\"buffer_usage_ratio\") == round(1 / 20, 2)\n assert state.attributes.get(\"age_coverage_ratio\") == 0\n\n # Values expire over time. Buffer is empty\n\n current_time += timedelta(minutes=1)\n freezer.move_to(current_time)\n async_fire_time_changed(hass, current_time)\n await hass.async_block_till_done()\n\n state = hass.states.get(\"sensor.test\")\n assert state is not None\n assert state.state == STATE_UNKNOWN\n assert state.attributes.get(\"buffer_usage_ratio\") == round(0 / 20, 2)\n assert state.attributes.get(\"age_coverage_ratio\") is None", "def testGetConfigPowerAlarmHyteresis(self):\n self.ports.getconfig_power_alarm_hysteresis(file_name = 'get_port_label.xml', port_ids = portsDict['port_ids'], power_alarm_hysteresis = portsDict['power_alarm_hysteresis'])", "def set_alarms(alarm:dict, s):\r\n time = alarm['title'][:10:] + \" \" + alarm['title'][11::]\r\n alarm_time = datetime.datetime.strptime(time, \"%Y-%m-%d %H:%M\")\r\n delay = (alarm_time - datetime.datetime.strptime(str(datetime.datetime.now()).rpartition(':')[0], \"%Y-%m-%d %H:%M\")).total_seconds()\r\n if alarm['news'] and alarm['weather']:\r\n message = alarm['content'] + \" - Top news stories - One - \" + (get_news()[-1])['name'] + \" - two - \" + (get_news()[-2])['name'] + \" - three - \" + (get_news()[-3])['name'] + \" - \" + get_weather() + \" - Covid-19 update - \" + get_covid()\r\n elif alarm['news']:\r\n message = alarm['content'] + \" - Top news stories - One - \" + (get_news()[-1])['name'] + \" - two - \" + (get_news()[-2])['name'] + \" - three - \" + (get_news()[-3])['name'] + \" - Covid-19 update - \" + get_covid()\r\n elif alarm['weather']:\r\n message = alarm['content'] + \" - \" + get_weather() + \" - Covid-19 update - \" + get_covid()\r\n else:\r\n message = alarm['content'] + \" - Covid-19 update - \" + get_covid()\r\n s.enter(int(delay),1,set_off_alarm,(message,))\r\n logging.info(\"Alarm set in set_alarms(): \" + message)", "def test_api_livesession_read_attendances_well_computed_running_between_info_attendance_delay(\n self,\n ):\n # set the start at current time minus 10 seconds\n started = int(to_timestamp(timezone.now())) - 10\n\n video = VideoFactory(\n live_state=RUNNING,\n live_info={\"started_at\": str(started)},\n live_type=JITSI,\n )\n\n livesession = LiveSessionFactory(\n consumer_site=video.playlist.consumer_site,\n email=\"[email protected]\",\n live_attendance={\n started: {\"muted\": 1},\n started + 3: {\"onStage\": 0},\n started + 6: {\"muted\": 0},\n },\n lti_id=str(video.playlist.lti_id),\n lti_user_id=\"56255f3807599c377bf0e5bf072359fd\",\n video=video,\n )\n\n # token with right context_id and lti_user_id\n jwt_token = InstructorOrAdminLtiTokenFactory(\n playlist=video.playlist,\n consumer_site=str(video.playlist.consumer_site.id),\n context_id=str(video.playlist.lti_id),\n )\n livesession.refresh_from_db()\n with self.assertNumQueries(3):\n response = self.client.get(\n self._get_url(video),\n HTTP_AUTHORIZATION=f\"Bearer {jwt_token}\",\n )\n\n self.assertEqual(response.status_code, 200)\n # we expect 3 attendance points for 30 seconds, so one every 15\n response_json = {\n \"count\": 1,\n \"next\": None,\n \"previous\": None,\n \"results\": [\n {\n \"id\": str(livesession.id),\n \"display_name\": \"[email protected]\",\n \"is_registered\": False,\n \"live_attendance\": {\n str(started): {\"muted\": 1},\n # ATTENDANCE_PUSH_DELAY = 1, started+3 exists but not started+4,\n # so the user doesn't seem connected anymore at started+ 5\n str(started + 5): {\n \"connectedInBetween\": True,\n \"lastConnected\": started + 3,\n },\n # ATTENDANCE_PUSH_DELAY = 1, started+6 exists but not started+9,\n # so the user doesn't seem connected anymore at started+ 10\n str(started + 10): {\n \"connectedInBetween\": True,\n \"lastConnected\": started + 6,\n },\n },\n }\n ],\n }\n\n self.assertEqual(response.json(), response_json)\n\n # we call again the same request,\n # results are identical as it is cached, no queries are executed\n with self.assertNumQueries(0):\n response = self.client.get(\n self._get_url(video),\n HTTP_AUTHORIZATION=f\"Bearer {jwt_token}\",\n )\n\n self.assertEqual(response.status_code, 200)\n self.assertEqual(response.json(), response_json)", "def test_is_historical(self):\n\n class Node:\n my_metric = Metric(Int64)\n\n exp_value = 42\n\n node = Node()\n node.my_metric = Historical(exp_value)\n my_metric = get_metric_object(node, 'my_metric')\n tahu_metric = my_metric.tahu_metric(node)\n self.assertTrue(tahu_metric.is_historical)\n self.assertEqual(tahu_metric.long_value, 42)", "def alarmoff() :\n s.alarm(False, \"\")", "def test_autosample(self):\n \n # Start data subscribers.\n #self._start_data_subscribers(6)\n #self.addCleanup(self._stop_data_subscribers) \n \n # Set up a subscriber to collect error events.\n #self._start_event_subscriber('ResourceAgentResourceStateEvent', 7)\n #self.addCleanup(self._stop_event_subscriber) \n \n state = self._ia_client.get_agent_state()\n self.assertEqual(state, ResourceAgentState.UNINITIALIZED)\n \n cmd = AgentCommand(command=ResourceAgentEvent.INITIALIZE)\n retval = self._ia_client.execute_agent(cmd)\n state = self._ia_client.get_agent_state()\n self.assertEqual(state, ResourceAgentState.INACTIVE)\n\n cmd = AgentCommand(command=ResourceAgentEvent.GO_ACTIVE)\n retval = self._ia_client.execute_agent(cmd)\n state = self._ia_client.get_agent_state()\n self.assertEqual(state, ResourceAgentState.IDLE)\n\n cmd = AgentCommand(command=ResourceAgentEvent.RUN)\n retval = self._ia_client.execute_agent(cmd)\n state = self._ia_client.get_agent_state()\n self.assertEqual(state, ResourceAgentState.COMMAND)\n\n cmd = AgentCommand(command=SBE37ProtocolEvent.START_AUTOSAMPLE)\n retval = self._ia_client.execute_resource(cmd)\n \n gevent.sleep(15)\n \n cmd = AgentCommand(command=SBE37ProtocolEvent.STOP_AUTOSAMPLE)\n retval = self._ia_client.execute_resource(cmd)\n \n cmd = AgentCommand(command=ResourceAgentEvent.RESET)\n retval = self._ia_client.execute_agent(cmd)\n state = self._ia_client.get_agent_state()\n self.assertEqual(state, ResourceAgentState.UNINITIALIZED)\n\n #self._async_event_result.get(timeout=CFG.endpoint.receive.timeout)\n #self.assertGreaterEqual(len(self._events_received), 6)\n\n #self._async_sample_result.get(timeout=CFG.endpoint.receive.timeout)\n #self.assertGreaterEqual(len(self._samples_received), 6)", "def polling_experiment_multiple(dataset_id, times, **kwargs):\n total_yeahs = 0\n total_noes = 0\n for i in range(0, times):\n yeah, noes = polling_experiment(dataset_id, **kwargs)\n total_yeahs += yeah\n total_noes += noes\n logging.info(\"After \"+str(times)+\" \"+ str(dataset_id) + \" experiments:\")\n logging.info(\"\"+ str(total_yeahs) + \" correct predictions\")\n logging.info(\"\"+ str(total_noes) + \" wrong predictions\")\n logging.info(\"\"+ str(total_yeahs/(total_yeahs + total_noes)) + \" accuracy\")", "def test_some_meet(self, initial_placement_fixture):\n assert len(ctx.cluster.influx_db.aggregate_performance()) == 0, \\\n \"Test should run on the basic model\"\n self.generic_function(above_objective=2)", "def test_basic(self):\n data = get()\n metrics = [verif.metric.Within(),\n verif.metric.A(), # Hit\n verif.metric.B(), # FA\n verif.metric.C(), # Miss\n verif.metric.D(), # Correct rejection\n verif.metric.Hit(),\n verif.metric.Threat(),\n verif.metric.Conditional(),\n verif.metric.XConditional(func=np.median),\n ]\n intervals = [verif.interval.Interval(-np.inf, 0, True, True), # [-inf, 0]\n verif.interval.Interval(-np.inf, 1, True, True),\n verif.interval.Interval(-np.inf, 2, True, True),\n ]\n obs = [0, 1.5, 2]\n fcst = [3.1, 1.1, -2.1]\n N = len(obs)*1.0\n\n # Each line is one metric (one number for each threshold)\n expected = [[0/N, 100/N, 100/N], # Within\n [0/N, 0/N, 2/N], # Hit\n [1/N, 1/N, 0/N], # FA\n [1/N, 1/N, 1/N], # Miss\n [1/N, 1/N, 0/N], # Correct rejection\n [0, 0, 2.0/3], # Hit rate\n [0, 0, 2.0/3], # Threat score\n [3.1, 3.1, 0.7], # Average fcst given obs in interval\n [0, 0, 1.5], # Average obs given obs in interval\n ]\n\n for m in range(len(metrics)):\n metric = metrics[m]\n for i in range(len(intervals)):\n value = metric.compute_from_obs_fcst(np.array(obs), np.array(fcst), intervals[i])\n ex = expected[m][i] * 1.0\n if np.isnan(value):\n self.assertTrue(np.isnan(ex))\n else:\n self.assertAlmostEqual(ex, value)" ]
[ "0.6144483", "0.613362", "0.61275727", "0.60468686", "0.59062594", "0.57918984", "0.57610506", "0.5745387", "0.5712116", "0.57005745", "0.5697942", "0.56855434", "0.5667826", "0.5645718", "0.5632293", "0.5605659", "0.5582332", "0.5570504", "0.55398816", "0.55327576", "0.5522071", "0.5514678", "0.5497661", "0.54957485", "0.5444977", "0.5412593", "0.5407091", "0.53957623", "0.53782976", "0.5369705" ]
0.80237985
0
determine the tuning which is symmetrically similar to target that can be reached from current, within the minimum number of half step modifications
def compute_optimal_tuning(target,current): target = target.split(' ') current = current.split(' ') initial = [calculate_note_distance(current[i],target[i]) for i in range(min(len(current), len(target)))] total_modifications = reduce(lambda x,y: abs(x) + abs(y), initial) winner = [] min_sum = total_modifications for i in initial: mods = abs(i) tmp = [abs(i - mods) if i > 0 else abs(i + mods) for i in initial] tsum = sum(tmp) if tsum < min_sum: min_sum = tsum winner = tmp #transpose the current tuning into the optimal one ret = [transpose(note=current[i],distance=winner[i]) for i in range(min(len(current), len(target)))] return " ".join(ret)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def best_allowed(self, base):\n x = base.copy()\n var_opt = None\n dim = x.shape[0]\n for i in range(dim):\n # Plus increment\n x[i] += self.step\n curr_obj = self.obj_wrap(x)\n # Check update feasible, obj improved\n # new point in STM, before accepting\n if (curr_obj and \n not np.isclose(x.T, self.STM).all(axis=1).any()):\n if var_opt is None:\n var_opt = (i, self.step, curr_obj)\n elif var_opt[2] > curr_obj:\n var_opt = (i, self.step, curr_obj)\n\n \n # Minus increment\n x[i] -= 2 * self.step\n curr_obj = self.obj_wrap(x)\n # Check update feasible, obj improved\n # new point in STM, before accepting\n if (curr_obj and \n not np.isclose(x.T, self.STM).all(axis=1).any()):\n if var_opt is None:\n var_opt = (i, -self.step, curr_obj)\n elif var_opt[2] > curr_obj:\n var_opt = (i, -self.step, curr_obj)\n \n # Restore to original value\n x[i] += self.step\n \n if var_opt:\n x[var_opt[0]] += var_opt[1]\n return x, var_opt[2]\n else:\n return None", "def calculate_appropriate_target(self):\n pass", "def calculate_appropriate_target(self):\n pass", "def best_step(self):\r\n return self._best_value_step", "def _SD_optimal(t):", "def heuristic_cost_estimate(self, current):\n relevants = 0\n accurate_relevants = 0\n for i in range(len(self.sample)):\n if is_relevant(self.sample.iloc[i], current.anchor):\n relevants += 1\n if self.pred_sample.iloc[i] == self.pred_example:\n accurate_relevants += 1\n accuracy = accurate_relevants/relevants\n if self.threshold-accuracy <= 0:\n x = 5\n return max(0, self.threshold - accuracy)", "def get_winners(self):\n\n if self.optimal is not None:\n return self.optimal\n clean_proposals = self.cleaner.create_scenarios(self.proposals)\n self.optimal = self.optimizer.optimize(clean_proposals)\n return self.optimal", "def gain_opt(machine, T):\n res = (np.arange(T)+1)\n return res * np.amax(machine)", "def minimum_spanning_arborescence(sol):", "def _tune(acc_rate, proposed, step):\n if step.tune_scaling:\n # a and b after Muto & Beck 2008.\n a = 1 / 9\n b = 8 / 9\n step.scaling = (a + b * acc_rate) ** 2\n if step.tune_steps:\n acc_rate = max(1.0 / proposed, acc_rate)\n step.n_steps = min(step.max_steps, 1 + int(np.log(step.p_acc_rate) / np.log(1 - acc_rate)))", "def adaptive_step(self):\n\n s = self\n \n # adaptive line search parameters\n down_adjustment_frequency = 1\n up_adjustment_frequency = 50\n alpha=0.3 # acceptable fraction of predicted decrease\n beta=0.8 # how much to shrink when violation\n gamma=1.05 # how much to grow when too conservative\n report_frequency = 1\n \n\n s.model.advance_batch()\n s.update_stats() # update cov matrices and svds\n\n s.grad.update() # gradient (ptodo, already updated in stats)\n s.grad2.update() # gradient from synth labels (don't need?)\n s.grad_new.update() # corrected gradient\n\n # TODO: decide on s vs s.\n s.run(s.param_save_op) # TODO: insert lr somewhere\n lr0, loss0 = s.run(s.lr, s.model.loss)\n\n s.run(s.param_update_op)\n loss1 = s.run(s.model.loss)\n \n target_slope = -s.run(s.grad_dot_grad_new_op)\n target_delta = lr0*target_slope\n actual_delta = loss1 - loss0\n actual_slope = actual_delta/lr0\n slope_ratio = actual_slope/target_slope # between 0 and 1.01\n \n s.record('loss', loss0)\n s.record('step_length', lr0)\n s.record('grad_norm', s.run(s.grad_norm_op))\n s.record('grad_new_norm', s.run(s.grad_new_norm_op))\n s.record('target_delta', target_delta)\n\n if step_rejection and actual_delta > 0:\n print('Observed increase in loss %.2f, rejecting step'%(actual_delta,))\n s.run(s.param_restore_op)\n\n if s.step_counter % report_frequency == 0:\n print('NStep %d loss %.2f, target decrease %.3f, actual decrease, %.3f ratio %.2f'%(self.step_counter, loss0, target_delta, actual_delta, slope_ratio))\n\n if (adaptive_step and s.step_counter % down_adjustment_frequency == 0 and\n slope_ratio < alpha and abs(target_delta)>eps):\n print('%.2f %.2f %.2f'%(loss0, loss1, slope_ratio))\n print('Slope optimality %.2f, shrinking learning rate to %.2f'%(slope_ratio, lr0*beta,))\n s.lr.set(lr0*beta)\n elif (adaptive_step and s.step_counter % up_adjustment_frequency == 0 and\n slope_ratio>0.90):\n print('%.2f %.2f %.2f'%(loss0, loss1, slope_ratio))\n print('Growing learning rate to %.2f'%(lr0*gamma))\n s.lr.set(lr0*gamma)\n \n s.step_counter+=1", "def create_tuning_functions(self):\r\n\t\tmotion_tuning = np.zeros((par['num_motion_tuned'], par['num_receptive_fields'], par['num_motion_dirs']), dtype=np.float32)\r\n\t\tfix_tuning = np.zeros((par['num_fix_tuned'], par['num_receptive_fields']), dtype=np.float32)\r\n\t\trule_tuning = np.zeros((par['num_rule_tuned'], par['num_rules']), dtype=np.float32)\r\n\r\n\t\t# generate list of prefered directions\r\n\t\t# dividing neurons by 2 since two equal groups representing two modalities\r\n\t\tpref_dirs = np.arange(0,360,360/(par['num_motion_tuned']//par['num_receptive_fields'])).astype(np.float32)\r\n\r\n\t\t# generate list of possible stimulus directions\r\n\t\tstim_dirs = np.arange(0,360,360/par['num_motion_dirs']).astype(np.float32)\r\n\r\n\t\tfor n in range(par['num_motion_tuned']//par['num_receptive_fields']):\r\n\t\t\tfor i in range(len(stim_dirs)):\r\n\t\t\t\tfor r in range(par['num_receptive_fields']):\r\n\t\t\t\t\td = np.cos((stim_dirs[i] - pref_dirs[n])/180*np.pi)\r\n\t\t\t\t\tn_ind = n+r*par['num_motion_tuned']//par['num_receptive_fields']\r\n\t\t\t\t\tmotion_tuning[n_ind,r,i] = par['tuning_height']*np.exp(par['kappa']*d)/np.exp(par['kappa'])\r\n\r\n\t\tfor n in range(par['num_fix_tuned']):\r\n\t\t\tfor i in range(par['num_receptive_fields']):\r\n\t\t\t\tif n%par['num_receptive_fields'] == i:\r\n\t\t\t\t\tfix_tuning[n,i] = par['tuning_height']\r\n\r\n\t\tneurons_per_rule = par['num_rule_tuned']//par['num_rules']\r\n\t\tfor n in range(par['num_rule_tuned']):\r\n\t\t\tfor i in range(par['num_rules']):\r\n\t\t\t\tif n in range(i*neurons_per_rule, (i+1)*neurons_per_rule):\r\n\t\t\t\t\trule_tuning[n,i] = par['tuning_height']\r\n\r\n\r\n\t\treturn motion_tuning, fix_tuning, rule_tuning", "def get_optimal_step(self, num_min):\r\n if self.pmax <= self.pmin:\r\n return None\r\n stepex = float(self.pmax - self.pmin) / num_min\r\n step1 = math.pow(10, math.floor(math.log(stepex, 10)))\r\n step2 = step1 * 2\r\n step5 = step1 * 5\r\n if step5 <= stepex:\r\n return step5\r\n if step2 <= stepex:\r\n return step2\r\n return step1", "def trainAndTune(self, trainingData, trainingLabels, validationData, validationLabels, Cgrid):\n \"*** YOUR CODE HERE ***\"\n\n #if predicted label is not equal to actual label\n num_errors = 0 \n \n #weights will be changed when checking if labels are equal to each other\n \n\n \n #traversing across the Cgrid to train each set across each value of c in Cgrid \n for c in Cgrid:\n updatedWeights = self.weights.copy()\n for iteration in range(self.max_iterations):\n \n print(\"Starting iteration \", iteration, \"..\")\n if iteration > 0:\n num_errors = 0\n\n for i in range(len(trainingData)):\n trainingUnit = trainingData[i].copy() #trainingUnit is one instance of training data at i\n #predLabel = self.classify(trainingUnit) #classifies data in order list of predicted label values\n #predictedLabel = predLabel[0] #extract predicted label where max is at first index\n realLabel = trainingLabels[i] #extract real label from training label in order to compare\n\n\n\n predY = 0\n predictedLabel = -1;\n for label in self.legalLabels:\n predLabel = trainingUnit * updatedWeights[label]\n if predictedLabel < predLabel or predictedLabel == -1:\n predictedLabel = predLabel\n predY = label\n\n tau = 0 \n \n #if predicted label is not equal to real label\n if predY != realLabel: \n feature = trainingUnit.copy() #extract feature of current training unit\n num_errors += 1 \n #t = ((wpred - wactual)*feature + 1.0)/(2 * feature * feature) = num/div \n num = updatedWeights[predY] - updatedWeights[realLabel]\n num = num * feature\n num += 1.0 \n \n\n div = (feature*feature)\n \n div += 2.0\n t = num/div\n \n tau = min(c,t)\n \n \n \n #for j in range(feature):\n for j in range(len(trainingData[i])):\n feature[j] = feature[j] * tau\n updatedWeights[realLabel] = updatedWeights[realLabel] + feature #wactual = wactual + tau*feature\n updatedWeights[predY] = updatedWeights[predY] - feature #wpred = wpred + tau*feature\n \n\n print(\"finished updating weights\")\n\n #determine guesses by classifying validation data\n guesses = self.classify(validationData)\n correct = 0\n bestAccuracy = None #no best accuracy rate yet\n\n #traverse over guesses, determine how many \n #answers were correct \n for i in range(len(guesses)):\n if guesses[i] == validationLabels[i]: #guess matches validation label\n correct += 1\n\n accuracy = correct / len(guesses) #determine percentage\n if(accuracy > bestAccuracy):\n bestAccuracy = accuracy\n\n self.weights = updatedWeights", "def update_policy(self, minibatch_size):\n \n steps = self.rewards.shape[0]\n batch_size = self.rewards.shape[0] * self.rewards.shape[1]\n #steps = 500\n #batch_size = 500\n #print(steps)\n #print(batch_size)\n \n # Compute advantages\n '''\n with torch.no_grad():\n if self.gae:\n advantages = torch.zeros_like(self.rewards).to(self.training_device)\n lastgaelam = 0\n for t in reversed(range(steps)):\n if t == steps - 1:\n nextnonterminal = 1.0 - self.dones[t]\n nextvalues = self.state_values[t]\n else:\n nextnonterminal = 1.0 - self.dones[t + 1]\n nextvalues = self.state_values[t + 1]\n delta = self.rewards[t] + self.gamma * nextvalues * nextnonterminal - self.state_values[t]\n advantages[t] = lastgaelam = delta + self.gamma * self.gae_lambda * nextnonterminal * lastgaelam\n returns = advantages + self.state_values\n else:\n returns = torch.zeros_like(self.rewards).to(self.training_device)\n for t in reversed(range(steps)):\n if t == steps - 1:\n nextnonterminal = 1.0 - self.dones[t]\n next_return = self.state_values[t]\n else:\n nextnonterminal = 1.0 - self.dones[t+1]\n next_return = returns[t+1]\n returns[t] = self.rewards[t] + self.gamma * nextnonterminal * next_return\n advantages = returns - self.state_values\n ''' \n returns = torch.zeros_like(self.rewards).to(self.training_device)\n for t in reversed(range(steps)):\n if t == steps - 1:\n nextnonterminal = 1.0 - self.dones[t]\n next_return = self.state_values[t]\n else:\n nextnonterminal = 1.0 - self.dones[t+1]\n next_return = returns[t+1]\n returns[t] = self.rewards[t] + self.gamma * nextnonterminal * next_return\n advantages = returns - self.state_values\n \n\n # flatten the batch\n #b_obs = self.states.reshape((-1,) + self.state_space)\n #print(self.states.shape)\n b_obs = self.states.reshape((-1,4)).detach()\n b_logprobs = self.action_probs.reshape(-1,1).detach()\n b_actions = self.actions.reshape((-1,)).detach()\n b_advantages = advantages.reshape(-1,1)\n b_returns = returns.reshape(-1,1)\n b_values = self.state_values.reshape(-1,1)\n \n # Optimize policy and value network for K epochs, run optimization in minibatches\n \n inds = np.arange(batch_size)\n for i_epoch_pi in range(self.epochs):\n np.random.shuffle(inds)\n for start in range(0, batch_size, minibatch_size):\n end = start + minibatch_size\n minibatch_ind = inds[start:end]\n mb_advantages = b_advantages[minibatch_ind]\n if self.norm_adv:\n mb_advantages = (mb_advantages - mb_advantages.mean()) / (mb_advantages.std() + 1e-8)\n \n #_, newlogproba, entropy = self.get_action(b_obs[minibatch_ind], b_actions[minibatch_ind])\n newlogproba, entropy = self.evaluate(b_obs[minibatch_ind], b_actions[minibatch_ind])\n #ratio = (newlogproba - b_logprobs[minibatch_ind]).exp()\n ratio = torch.exp((newlogproba - b_logprobs[minibatch_ind].detach()))\n \n # Stats\n approx_kl = (b_logprobs[minibatch_ind] - newlogproba).mean()\n\n # Policy loss\n pg_loss1 = -mb_advantages * ratio\n pg_loss2 = -mb_advantages * torch.clamp(ratio, 1 - self.clip_epsilon, 1 + self.clip_epsilon)\n pg_loss = torch.max(pg_loss1, pg_loss2).mean()\n entropy_loss = entropy.mean()\n\n # Value loss\n _, new_values = self.policy.forward(b_obs[minibatch_ind])\n if self.clip_vloss:\n \n v_loss_unclipped = self.MseLoss(new_values,b_returns[minibatch_ind])\n #v_loss_unclipped = ((new_values - b_returns[minibatch_ind]) ** 2)\n v_clipped = b_values[minibatch_ind] + torch.clamp(new_values - b_values[minibatch_ind],\n -self.clip_epsilon, self.clip_epsilon)\n #v_loss_clipped = (v_clipped - b_returns[minibatch_ind]) ** 2\n v_loss_clipped = self.MseLoss(v_clipped,b_returns[minibatch_ind])\n v_loss_max = torch.max(v_loss_unclipped, v_loss_clipped)\n #v_loss = 0.5 * v_loss_max.mean()\n v_loss = 0.5 * v_loss_max\n else:\n #v_loss = 0.5 * ((new_values - b_returns[minibatch_ind]) ** 2).mean()\n v_loss = self.MseLoss(new_values,b_returns[minibatch_ind])\n\n loss = pg_loss + v_loss * self.vf_coeff - self.ent_coeff * entropy_loss\n\n self.optimizer.zero_grad()\n loss.backward()\n torch.nn.utils.clip_grad_norm_(self.policy.parameters(), self.max_grad_norm)\n self.optimizer.step()\n # Copy new weights into old policy:\n self.old_policy.load_state_dict(self.policy.state_dict())", "def control_opt(self):\n\n\n if self.run_opt['refine']:\n self.run_opt['relaunch']=1\n \n #check value for 'madweight_main'\n for i in range(3,9)+[-1,-3]:\n if self.run_opt[num_to_tag[i]]==1:\n self.run_opt['madweight_main']=1\n break\n\n if self.run_opt['relaunch']==1:\n self.run_opt['control']=1", "def overall_sensitivity(self):\n if self.mod1:\n s = torch.max(torch.max(self.weight, -1)[0], -1)[0].item()\n else:\n s = torch.max(torch.sqrt(torch.sum(self.weight * self.weight, -1)))[0].item()\n s *= np.sqrt(2. / np.e)\n return s", "def step_utility(curr_dr, req_dr):\n if curr_dr >= req_dr:\n return MAX_UTILITY\n return MIN_UTILITY", "def get_target_per_score(self):\n pass", "def find_opt_size(instance, maxtime):\n if maxtime is None:\n maxtime = -1\n print(\"Searching for minimum-sized set of weights, timeout set at {}\"\n \"\".format(maxtime))\n try:\n with timeout(seconds=maxtime):\n while True:\n print(\"# \\tCall guess_weight with k = {}\".format(instance.k))\n solutions = solve(instance, silent=True)\n if bool(solutions):\n break\n instance.try_larger_k()\n elapsed = time.time() - start\n print(\"# Weights computation took {:.2f} seconds\".format(elapsed))\n print(\"# Solution:\", solutions)\n return solutions, elapsed\n except TimeoutError:\n print(\"Timed out after {} seconds\".format(maxtime))\n return set(), maxtime", "def optimization_step(self):\n \n if \"CSS\" in self.algorithm:\n \n input_dict = {self.x: self.train_inputs[self.minibatch_set,:]}\n \n var_list = [self.x_tilda, self.minibatch_set]\n \n if (self.num_samples > 0) and (not self.mixture):\n \n if ((self.mf_steps > 0) and self.alpha >0) or\\\n self.gibbs_steps > 0: \n \n var_list.append(self.sampler_theta)\n \n elif \"CD\" in self.algorithm:\n \n input_dict = {self.x : self.train_inputs[self.minibatch_set,:]} \n \n var_list = [self.minibatch_set]\n \n var_list.append(self.learning_rate)\n \n if self.use_momentum:\n \n var_list.append(self.momentum)\n \n output_vars = [self.pseudo_cost]\n \n if self.report_p_tilda:\n \n output_vars.append(self.p_tilda)\n \n else:\n \n output_vars.append(theano.shared(0))\n \n opt_step = theano.function(inputs = var_list,\n outputs = output_vars,\n updates = self.updates,\n givens = input_dict,\n on_unused_input='warn')\n \n return opt_step", "def get_learning_completion(self):\n return min(1.0, self.step / Parameters.FINAL_EXPLORATION)", "def learning_rate_range():\n # Lower and upper bounds\n #######\n lower_bound = 0.1 \n upper_bound = 1e-6\n #######\n return lower_bound, upper_bound", "def step(self, delta_l11, delta_l12, delta_l13, delta_l21, delta_l22, delta_l23):\n self.l11 += delta_l11; self.l12 += delta_l12; self.l13 += delta_l13\n self.l21 += delta_l11; self.l22 += delta_l12; self.l23 += delta_l13\n self.l21 += delta_l21; self.l22 += delta_l22; self.l23 += delta_l23\n # check that all tendon lenghts are within limit\n self.l11 = self.l1min if self.l11 < self.l1min else self.l11\n self.l12 = self.l1min if self.l12 < self.l1min else self.l12\n self.l13 = self.l1min if self.l13 < self.l1min else self.l13\n self.l11 = self.l1max if self.l11 > self.l1max else self.l11\n self.l12 = self.l1max if self.l12 > self.l1max else self.l12\n self.l13 = self.l1max if self.l13 > self.l1max else self.l13\n self.l21 = self.l2min if self.l21 < self.l2min else self.l21\n self.l22 = self.l2min if self.l22 < self.l2min else self.l22\n self.l23 = self.l2min if self.l23 < self.l2min else self.l23\n self.l21 = self.l2max if self.l21 > self.l2max else self.l21\n self.l22 = self.l2max if self.l22 > self.l2max else self.l22\n self.l23 = self.l2max if self.l23 > self.l2max else self.l23\n old_tip_vec = self.tip_vec2 # used for potential reward\n self.update_variables()\n new_tip_vec = self.tip_vec2 # used for potential reward\n reward = self.r_static\n return reward", "def getNextOptimal(self):\n\t\tnodes=self.optNodes\n\t\texceeds=self.m.exceedsAngleLim\n\t\tif self.optNode is len(nodes)-1: #last node\n\t\t\tself.noMoreSpots=True\n\t\t\treturn self.pos\n\t\telif len(nodes) is 0 or (self.otherDevice is not None and exceeds(self,nodes[self.optNode+1],self.otherDevice)):\n\t\t\treturn self.pos #could not go to next ideal, other arm is blocking.\n\t\telse:\n\t\t\t#get the next optimal in list and iterate until it is \"forward\" angularly.\n\t\t\tself.optNode+=1\n\t\t\tif '2a' in self.m.type:\n\t\t\t\twhile self.m.getCylindrical(nodes[self.optNode])[1] > self.posCyl[1] and self.optNode<len(nodes)-1 and not exceeds(self,nodes[self.optNode+1],self.otherDevice): \n\t\t\t\t\tself.optNode+=1\n\t\treturn nodes[self.optNode]", "def better_action(tip_speed):\n possible_actions_in_state = Q[get_state(tip_speed)]\n action_of_choice = np.argmax(possible_actions_in_state)\n return action_of_choice", "def _advance_settings(self, sol):\r\n if self.cond == True:\r\n # Save last solution...\r\n self.lst_tmp = sol\r\n # Check if all timesteps are complete.\r\n self.current_T += self.d_T\r\n self.step += 1\r\n if self.current_T > self.max_T:\r\n return False\r\n # Set to not be conduction any more\r\n self.cond = False\r\n if len(self.fq_list) > 0:\r\n self.rad = 0\r\n else:\r\n # There are radiation steps to do.\r\n self.cond = True\r\n return True\r\n\r\n # If we're here, we're either not done anything yet or have\r\n # just done a radiation step.\r\n if self.rad != None:\r\n # Save last solution\r\n self.lst_rad[self.rad] = sol\r\n # Advance to next radiation stage if one exists. Else cond.\r\n if self.rad + 1 != len(self.fq_list):\r\n self.rad += 1\r\n else:\r\n self.rad = None\r\n self.cond = True\r\n return True\r\n\r\n # If we've made it to here, we must just setting the simulation\r\n # going.\r\n assert (len(self.fq_list) == len(self.lst_rad))\r\n if len(self.lst_rad) > 0:\r\n assert (len(self.fq_list) == len(self.absorb_coeffs))\r\n assert (self.refr_idx_vol >= 0.0)\r\n # Could set to zero, but that might limit restarts. Just check\r\n # Validity....\r\n assert (self.step != None)\r\n assert (self.d_T > 0.0)\r\n assert (self.current_T != None)\r\n assert (self.max_T != None)\r\n assert (self.max_T > self.current_T)\r\n assert (self.diff_scale >= 0.0)\r\n assert (self.diff_scale <= 1.0)\r\n assert (self.thermal_conductivity > 0.0)\r\n assert (self.alpha >= 0.0)\r\n assert (self.refr_idx_background >= 0.0)\r\n # Set the ball rolling:\r\n if len(self.fq_list) > 0:\r\n # We can set solver for frequencies first...\r\n self.rad = 0\r\n else:\r\n self.cond = True\r\n return True", "def tune_model(self):\n acc = 0\n ### Early Stop Mechanism\n loss = previous_loss = float(\"inf\")\n patience_left = self.config.patience\n ### Early Stop Mechanism\n\n self.generator = Generator(self.model.config, training_strategy=self.training_strategy)\n self.evaluator = Evaluator(model=self.model,data_type=self.teston, debug=self.debug, tuning=True)\n \n for cur_epoch_idx in range(self.config.epochs):\n loss = self.train_model_epoch(cur_epoch_idx, tuning=True)\n ### Early Stop Mechanism\n ### start to check if the loss is still decreasing after an interval. \n ### Example, if early_stop_epoch == 50, the trainer will check loss every 50 epoche.\n ### TODO: change to support different metrics.\n if ((cur_epoch_idx + 1) % self.config.early_stop_epoch) == 0: \n if patience_left > 0 and previous_loss <= loss:\n patience_left -= 1\n print('%s more chances before the trainer stops the training. (prev_loss, curr_loss): (%.f, %.f)' % \\\n (patience_left, previous_loss, loss))\n\n elif patience_left == 0 and previous_loss <= loss:\n self.evaluator.result_queue.put(Evaluator.TEST_BATCH_EARLY_STOP)\n break\n else:\n patience_left = self.config.patience\n\n previous_loss = loss\n\n self.generator.stop()\n self.evaluator.test(cur_epoch_idx)\n acc = self.evaluator.output_queue.get()\n self.evaluator.stop()\n\n return acc", "def calibrate_threshold(test_graphs):\r\n best_threshold = None\r\n best_result = None\r\n for threhold in range(1, 50):\r\n cur_res = evaluate_argument_mention(test_graphs, threhold)\r\n if (best_result is None) or (cur_res > best_result):\r\n best_result = cur_res\r\n best_threshold = threhold\r\n return (best_threshold, best_result)", "def initial_estimator(f, x, step,k):\n\n fx = f(x)\n\n if decide(fx > 0):\n sign1 = 1\n else:\n sign1 = -1\n k_step = k\n h = fx / derivative(f, x)\n\n for k1 in range(1, 50000):\n step = step + 1\n x_new = x - k_step * h\n k_step = k_step * 2 # make the k double in each iteration\n fx_new = f(x_new)\n if decide(fx_new > 0):\n sign2 = 1\n else:\n sign2 = -1\n\n if not (sign1 == sign2):\n return x_new, step\n\n print(\"limit need to Increase\")" ]
[ "0.6273489", "0.61885136", "0.61885136", "0.6086417", "0.59578407", "0.585661", "0.5825039", "0.5813893", "0.5754839", "0.5698001", "0.56716925", "0.56596774", "0.5644322", "0.56128955", "0.56061", "0.55671877", "0.5557258", "0.55553675", "0.55381364", "0.55143857", "0.55056536", "0.55011743", "0.5494779", "0.5488813", "0.5453158", "0.5451588", "0.542855", "0.5428309", "0.5419388", "0.5417666" ]
0.6830949
0
Join several querysets by a UNION clause. Returns the SQL string and the list of parameters.
def union(self, querysets): # union() is "New in Django 1.11." (docs site) # but buggy in 2.0, with a backport in 1.11.8 ; my ticket 29229, fixed in 1.11.12 & 2.0.4. # For simplicity, let's even ignore the usable 1.11.0-7 frame. # Ticket 29286 reintroduced a bug in 1.11.13 & 2.0.5, by considering only the annotate() case and not the extra(). # Ticket 29694 fixed the missing extra() case, but is only effective as of 2.1.1, # because extra() is destined to be deprecated. # So the final solution here was to replace all extra() by annotate() in this app. if VERSION < (1, 11, 12) or (2, 0) <= VERSION < (2, 0, 4): result_sql, result_params = [], [] for qs in querysets: sql, params = qs.query.sql_with_params() result_sql.append(sql) result_params.extend(params) return ' UNION '.join(result_sql), tuple(result_params) else: qs = querysets[0].union(*querysets[1:]) return qs.query.sql_with_params()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def union(self, list_or_stmt):\n if not isinstance(list_or_stmt, basestring) and not isinstance(list_or_stmt, mysqlstmt.Select):\n for c in list_or_stmt:\n self.union(c)\n else:\n self._selects.append(list_or_stmt)\n return self", "def sql_merge(sqls=[],clauseTables=[]):\n\tsql = ''\n\tfor statement in sqls:\n\t\tsql += statement + \" AND \"\n\tsql = sql[0:-5]\n\ttabs = set(clauseTables)\n\treturn dict(sql=sql,clauseTables=tabs)", "def query_join(*query_list):\n return \"&\".join(query_list)", "def sql(self):\n if not self._selects:\n raise ValueError('No SELECT statements are specified')\n\n sql = []\n param_values = []\n\n # MySQL SELECT syntax as of 5.7:\n #\n # SELECT ...\n # UNION [ALL | DISTINCT] SELECT ...\n # [UNION [ALL | DISTINCT] SELECT ...]\n\n if self.query_options:\n sql.extend(self.query_options)\n\n for stmt in self._selects:\n if isinstance(stmt, mysqlstmt.Select):\n select_sql, select_params = stmt.sql()\n stmtsql = select_sql\n if select_params is not None:\n param_values.extend(select_params)\n else:\n stmtsql = stmt\n\n if sql:\n if self._distinct is False:\n sql.append('UNION ALL')\n else:\n sql.append('UNION')\n\n sql.append(u'({0})'.format(stmtsql))\n\n if self._orderby_conds:\n sql.append('ORDER BY')\n sql.append(', '.join(self._orderby_conds))\n\n if self._limit is not None:\n row_count, offset = self._limit\n if offset > 0:\n sql.append('LIMIT {0},{1}'.format(offset, row_count))\n else:\n sql.append('LIMIT {0}'.format(row_count))\n\n if self.placeholder:\n return ' '.join(sql), param_values if param_values else None\n assert not param_values\n return ' '.join(sql)", "def _eval_rewrite_as_Union(self, *sets, **kwargs):\n\n dj_union = S.EmptySet\n index = 0\n for set_i in sets:\n if isinstance(set_i, Set):\n cross = ProductSet(set_i, FiniteSet(index))\n dj_union = Union(dj_union, cross)\n index = index + 1\n return dj_union", "def test_union_with_different_models(self):\n queryset = QuerySetModel.objects.values_list(\"pk\")\n other_queryset = OtherModel.objects.values_list(\"pk\")\n self.assertEqual(\n queryset.union(other_queryset).count(),\n 1\n )\n self.assertEqual(\n other_queryset.union(queryset).count(),\n 1\n )\n queryset = QuerySetModel.all_objects.values_list(\"pk\")\n self.assertEqual(\n queryset.union(other_queryset, all=True).count(),\n 2\n )\n self.assertEqual(\n other_queryset.union(queryset, all=True).count(),\n 2\n )", "def make_union(self, *args, **kwargs): # real signature unknown\n pass", "def union(set1, set2):", "def union(self, *lists):\n if self.is_a(set):\n return _(self._.union(*lists))\n return _(_union(self._, *lists))", "def test_union(self):\n queryset = QuerySetModel.objects.all()\n self.assertEqual(\n queryset.union(queryset).count(),\n 0\n )\n\n queryset = QuerySetModel.all_objects.all()\n self.assertEqual(\n queryset.union(queryset, all=False).count(),\n 1\n )\n self.assertEqual(\n queryset.union(queryset, all=True).count(),\n 2\n )", "def union_all(self, query):\n return self.union(query, True)", "def _array_union(cls, queries):\n clean_queries = [q for q in queries if q is not None]\n if len(clean_queries) == 0:\n return db.session.query(Relationship.source_id).filter(sql.false())\n\n query = clean_queries.pop()\n for q in clean_queries:\n query = query.union(q)\n return query", "def union(A, B, *C):\n return setutils(\"union\", A, B, *C)", "def union(first, second):\n # Put your code here.", "def union(arguments, flatten=True):\n return Component(\n \"Union\",\n arguments=arguments,\n options={\n 'flatten': flatten\n },\n constraints=None)", "def union(cls, forms):\n \"\"\"This function must be recursive.\"\"\"\n if len(forms) == 2:\n return cls.unionTwoForms(forms[0], forms[1])\n else:\n pass\n\n result = forms[0]\n for form in forms[1:]:\n result.extend(cls.union(form, result))\n return result", "def concat_all_dataframes(*args):\n return reduce(DataFrame.unionAll, args)", "def sqlors(left, lst):\n if isinstance(lst, iters):\n lst = list(lst)\n ln = len(lst)\n if ln == 0:\n return SQLQuery(\"1=2\")\n if ln == 1:\n lst = lst[0]\n\n if isinstance(lst, iters):\n return SQLQuery(['('] + \n sum([[left, sqlparam(x), ' OR '] for x in lst], []) +\n ['1=2)']\n )\n else:\n return left + sqlparam(lst)", "def makeQueries(baseQuery, joiningChar):\n results = []\n searchQueries = sys.argv[2:]\n for query in searchQueries: # for every individual query\n queryList = query.split() # split individual terms in a query\n # join them back with the joining char between them\n formatedQuery = joiningChar.join(queryList)\n # append the structured query to the result\n results.append(baseQuery + formatedQuery)\n return results", "def _control_union(self, entities_1: List[str], entities_2: List[str]):\n return list(set(entities_1).union(set(entities_2)))", "def union(self, *others):\r\n return self.r.sunion(self.r_key, *[o.r_key for o in others])", "def union(self, p, q):\n pass", "def _union(cls, s1, s2):\n return s1.union(s2)", "def union(self, iterable):\n pass", "def build_query_clauses(\n where: str = \"\", order: str = \"\", limit: int = 0, offset: int = 0\n ) -> str:\n return SqliteQueryBuilder.build_query_clauses(where, order, limit, offset)", "def union(self, query, all=False):\n self.unions.append({\n 'query': query,\n 'all': all\n })\n\n return self.merge_bindings(query)", "def add_joins(self, model, queryset=None, **kwargs):\n if queryset is None:\n clone = self.get_queryset()\n else:\n clone = queryset._clone()\n\n alias = None\n\n for i, join in enumerate(self.get_joins(model, **kwargs)):\n alias = clone.query.join(**join)\n\n # this implies the join is redudant and occuring on the root model's\n # table\n if alias is None:\n alias = clone.query.get_initial_alias()\n\n return clone, alias", "def unionUse(expression, unpack=True, dump=False):\n\n initTechnique(PAYLOAD.TECHNIQUE.UNION)\n\n abortedFlag = False\n count = None\n origExpr = expression\n startLimit = 0\n stopLimit = None\n value = None\n\n width = getConsoleWidth()\n start = time.time()\n\n _, _, _, _, _, expressionFieldsList, expressionFields, _ = agent.getFields(origExpr)\n\n # Set kb.partRun in case the engine is called from the API\n kb.partRun = getPartRun(alias=False) if conf.api else None\n\n if Backend.isDbms(DBMS.MSSQL) and kb.dumpColumns:\n kb.rowXmlMode = True\n _ = \"(%s FOR XML RAW, BINARY BASE64)\" % expression\n output = _oneShotUnionUse(_, False)\n value = parseUnionPage(output)\n kb.rowXmlMode = False\n\n if expressionFieldsList and len(expressionFieldsList) > 1 and \"ORDER BY\" in expression.upper():\n # Removed ORDER BY clause because UNION does not play well with it\n expression = re.sub(r\"(?i)\\s*ORDER BY\\s+[\\w,]+\", \"\", expression)\n debugMsg = \"stripping ORDER BY clause from statement because \"\n debugMsg += \"it does not play well with UNION query SQL injection\"\n singleTimeDebugMessage(debugMsg)\n\n # We have to check if the SQL query might return multiple entries\n # if the technique is partial UNION query and in such case forge the\n # SQL limiting the query output one entry at a time\n # NOTE: we assume that only queries that get data from a table can\n # return multiple entries\n if value is None and (kb.injection.data[PAYLOAD.TECHNIQUE.UNION].where == PAYLOAD.WHERE.NEGATIVE or kb.forcePartialUnion or (dump and (conf.limitStart or conf.limitStop)) or \"LIMIT \" in expression.upper()) and \" FROM \" in expression.upper() and ((Backend.getIdentifiedDbms() not in FROM_DUMMY_TABLE) or (Backend.getIdentifiedDbms() in FROM_DUMMY_TABLE and not expression.upper().endswith(FROM_DUMMY_TABLE[Backend.getIdentifiedDbms()]))) and not re.search(SQL_SCALAR_REGEX, expression, re.I):\n expression, limitCond, topLimit, startLimit, stopLimit = agent.limitCondition(expression, dump)\n\n if limitCond:\n # Count the number of SQL query entries output\n countedExpression = expression.replace(expressionFields, queries[Backend.getIdentifiedDbms()].count.query % ('*' if len(expressionFieldsList) > 1 else expressionFields), 1)\n\n if \" ORDER BY \" in countedExpression.upper():\n _ = countedExpression.upper().rindex(\" ORDER BY \")\n countedExpression = countedExpression[:_]\n\n output = _oneShotUnionUse(countedExpression, unpack)\n count = unArrayizeValue(parseUnionPage(output))\n\n if isNumPosStrValue(count):\n if isinstance(stopLimit, int) and stopLimit > 0:\n stopLimit = min(int(count), int(stopLimit))\n else:\n stopLimit = int(count)\n\n infoMsg = \"used SQL query returns \"\n infoMsg += \"%d %s\" % (stopLimit, \"entries\" if stopLimit > 1 else \"entry\")\n logger.info(infoMsg)\n\n elif count and (not isinstance(count, basestring) or not count.isdigit()):\n warnMsg = \"it was not possible to count the number \"\n warnMsg += \"of entries for the SQL query provided. \"\n warnMsg += \"sqlmap will assume that it returns only \"\n warnMsg += \"one entry\"\n logger.warn(warnMsg)\n\n stopLimit = 1\n\n elif (not count or int(count) == 0):\n if not count:\n warnMsg = \"the SQL query provided does not \"\n warnMsg += \"return any output\"\n logger.warn(warnMsg)\n else:\n value = [] # for empty tables\n return value\n\n if isNumPosStrValue(count) and int(count) > 1:\n threadData = getCurrentThreadData()\n\n try:\n threadData.shared.limits = iter(xrange(startLimit, stopLimit))\n except OverflowError:\n errMsg = \"boundary limits (%d,%d) are too large. Please rerun \" % (startLimit, stopLimit)\n errMsg += \"with switch '--fresh-queries'\"\n raise SqlmapDataException(errMsg)\n\n numThreads = min(conf.threads, (stopLimit - startLimit))\n threadData.shared.value = BigArray()\n threadData.shared.buffered = []\n threadData.shared.counter = 0\n threadData.shared.lastFlushed = startLimit - 1\n threadData.shared.showEta = conf.eta and (stopLimit - startLimit) > 1\n\n if threadData.shared.showEta:\n threadData.shared.progress = ProgressBar(maxValue=(stopLimit - startLimit))\n\n if stopLimit > TURN_OFF_RESUME_INFO_LIMIT:\n kb.suppressResumeInfo = True\n debugMsg = \"suppressing possible resume console info because of \"\n debugMsg += \"large number of rows. It might take too long\"\n logger.debug(debugMsg)\n\n try:\n def unionThread():\n threadData = getCurrentThreadData()\n\n while kb.threadContinue:\n with kb.locks.limit:\n try:\n threadData.shared.counter += 1\n num = next(threadData.shared.limits)\n except StopIteration:\n break\n\n if Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.SYBASE):\n field = expressionFieldsList[0]\n elif Backend.isDbms(DBMS.ORACLE):\n field = expressionFieldsList\n else:\n field = None\n\n limitedExpr = agent.limitQuery(num, expression, field)\n output = _oneShotUnionUse(limitedExpr, unpack, True)\n\n if not kb.threadContinue:\n break\n\n if output:\n with kb.locks.value:\n if all(_ in output for _ in (kb.chars.start, kb.chars.stop)):\n items = parseUnionPage(output)\n\n if threadData.shared.showEta:\n threadData.shared.progress.progress(threadData.shared.counter)\n if isListLike(items):\n # in case that we requested N columns and we get M!=N then we have to filter a bit\n if len(items) > 1 and len(expressionFieldsList) > 1:\n items = [item for item in items if isListLike(item) and len(item) == len(expressionFieldsList)]\n items = [_ for _ in flattenValue(items)]\n if len(items) > len(expressionFieldsList):\n filtered = OrderedDict()\n for item in items:\n key = re.sub(r\"[^A-Za-z0-9]\", \"\", item).lower()\n if key not in filtered or re.search(r\"[^A-Za-z0-9]\", item):\n filtered[key] = item\n items = filtered.values()\n items = [items]\n index = None\n for index in xrange(1 + len(threadData.shared.buffered)):\n if index < len(threadData.shared.buffered) and threadData.shared.buffered[index][0] >= num:\n break\n threadData.shared.buffered.insert(index or 0, (num, items))\n else:\n index = None\n if threadData.shared.showEta:\n threadData.shared.progress.progress(threadData.shared.counter)\n for index in xrange(1 + len(threadData.shared.buffered)):\n if index < len(threadData.shared.buffered) and threadData.shared.buffered[index][0] >= num:\n break\n threadData.shared.buffered.insert(index or 0, (num, None))\n\n items = output.replace(kb.chars.start, \"\").replace(kb.chars.stop, \"\").split(kb.chars.delimiter)\n\n while threadData.shared.buffered and (threadData.shared.lastFlushed + 1 >= threadData.shared.buffered[0][0] or len(threadData.shared.buffered) > MAX_BUFFERED_PARTIAL_UNION_LENGTH):\n threadData.shared.lastFlushed, _ = threadData.shared.buffered[0]\n if not isNoneValue(_):\n threadData.shared.value.extend(arrayizeValue(_))\n del threadData.shared.buffered[0]\n\n if conf.verbose == 1 and not (threadData.resumed and kb.suppressResumeInfo) and not threadData.shared.showEta:\n _ = ','.join(\"'%s'\" % _ for _ in (flattenValue(arrayizeValue(items)) if not isinstance(items, basestring) else [items]))\n status = \"[%s] [INFO] %s: %s\" % (time.strftime(\"%X\"), \"resumed\" if threadData.resumed else \"retrieved\", _ if kb.safeCharEncode else safecharencode(_))\n\n if len(status) > width:\n status = \"%s...\" % status[:width - 3]\n\n dataToStdout(\"%s\\n\" % status)\n\n runThreads(numThreads, unionThread)\n\n if conf.verbose == 1:\n clearConsoleLine(True)\n\n except KeyboardInterrupt:\n abortedFlag = True\n\n warnMsg = \"user aborted during enumeration. sqlmap \"\n warnMsg += \"will display partial output\"\n logger.warn(warnMsg)\n\n finally:\n for _ in sorted(threadData.shared.buffered):\n if not isNoneValue(_[1]):\n threadData.shared.value.extend(arrayizeValue(_[1]))\n value = threadData.shared.value\n kb.suppressResumeInfo = False\n\n if not value and not abortedFlag:\n output = _oneShotUnionUse(expression, unpack)\n value = parseUnionPage(output)\n\n duration = calculateDeltaSeconds(start)\n\n if not kb.bruteMode:\n debugMsg = \"performed %d queries in %.2f seconds\" % (kb.counters[PAYLOAD.TECHNIQUE.UNION], duration)\n logger.debug(debugMsg)\n\n return value", "def _sql_where(cur, tables, andalso, orelse, prefix=None, aggregate=False):\n disjunctions = []\n andsql = _cond_where_sql(cur, andalso, tables, prefix=prefix,\n aggregate=aggregate)\n andsql = ' AND '.join(andsql)\n\n if len(andsql) > 0:\n andsql = '(%s)' % andsql\n disjunctions.append(andsql)\n disjunctions += _cond_where_sql(cur, orelse, tables, prefix=prefix,\n aggregate=aggregate)\n\n if len(disjunctions) == 0:\n return ''\n return '(%s)' % (' OR '.join(disjunctions))", "def union(seq, *seqs):\n yield from unionby(None, seq, *seqs)" ]
[ "0.64259", "0.6321942", "0.6130795", "0.61098236", "0.60906136", "0.5835884", "0.58204424", "0.5803518", "0.57851565", "0.56865054", "0.5662633", "0.5622609", "0.5616906", "0.55898374", "0.55030537", "0.54707825", "0.5422362", "0.5398607", "0.53946865", "0.534702", "0.52942663", "0.52885765", "0.52879506", "0.5265135", "0.5264869", "0.5263987", "0.52146477", "0.5212533", "0.52104145", "0.5183532" ]
0.7916193
0
Configure Heatzy API using Home Assistant configuration and fetch all Heatzy devices.
async def async_setup_platform(hass, config, add_devices, discovery_info=None): # retrieve platform config username = config.get(CONF_USERNAME) password = config.get(CONF_PASSWORD) session = aiohttp_client.async_get_clientsession(hass) store = storage.Store(hass, STORAGE_VERSION, STORAGE_KEY) authenticator = HeatzyAuthenticator(session, store, username, password) api = HeatzyAPI(session, authenticator) # fetch configured Heatzy devices devices = await api.async_get_devices() # add all Heatzy devices with HA implementation to home assistant add_devices(filter(None.__ne__, map(setup_heatzy_device(api), devices))) return True
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def setup(hass, base_config):\n from pyhusmow import API as HUSMOW_API\n\n config = base_config.get(DOMAIN)\n\n if hass.data.get(DOMAIN) is None:\n hass.data[DOMAIN] = { 'devices': [] }\n\n api = HUSMOW_API()\n api.login(config.get(CONF_USERNAME), config.get(CONF_PASSWORD))\n\n robots = api.list_robots()\n\n if not robots:\n return False\n\n for robot in robots:\n hass.data[DOMAIN]['devices'].append(AutomowerDevice(robot, api))\n\n for component in AUTOMOWER_COMPONENTS:\n discovery.load_platform(hass, component, DOMAIN, {}, base_config)\n\n return True", "async def async_setup(self):\n self._unsub_stop = self.hass.bus.async_listen(\n EVENT_HOMEASSISTANT_STOP, self._handle_ha_stop\n )\n dev_reg = await device_registry.async_get_registry(self.hass)\n model_type = self.device.settings[\"device\"][\"type\"]\n dev_reg.async_get_or_create(\n config_entry_id=self.entry.entry_id,\n name=self.name,\n connections={(device_registry.CONNECTION_NETWORK_MAC, self.mac)},\n # This is duplicate but otherwise via_device can't work\n identifiers={(DOMAIN, self.mac)},\n manufacturer=\"Shelly\",\n model=aioshelly.MODEL_NAMES.get(model_type, model_type),\n sw_version=self.device.settings[\"fw\"],\n )", "async def async_refresh_devices(hass: HomeAssistant, tern):\n _LOGGER.info(\"refresh devices now\")\n response = await tern.get_entities(\"device\", True)\n devices = response[\"rsp\"][\"entities\"]\n pdata = tern.hass_platform_data\n\n device_registry = await dr.async_get_registry(hass)\n device_registry.async_get_or_create(\n config_entry_id=pdata.hub_entry.entry_id,\n connections={(dr.CONNECTION_NETWORK_MAC, pdata.mac)},\n identifiers={(DOMAIN, pdata.hub_entry.entry_id)},\n manufacturer=TERNCY_MANU_NAME,\n name=pdata.hub_entry.title,\n model=\"TERNCY-GW01\",\n sw_version=1,\n )\n\n for dev in devices:\n await update_or_create_entity(dev, tern)", "def setup_platform(hass, config, add_entities, discovery_info=None):\n import jsonpath\n jsonpath = jsonpath.jsonpath\n global HEAT_PUMPS\n hub.update_overview()\n if int(hub.config.get(CONF_CLIMATE, 1)):\n HEAT_PUMPS = hub.get('$.heatPumps')\n if HEAT_PUMPS:\n for heat_pump in HEAT_PUMPS[0]:\n device_label = jsonpath(heat_pump, '$.deviceLabel')[0]\n add_entities([\n VerisureHeatPump(device_label)\n ])", "def setup_platform(hass, config, add_devices, discovery_info=None):\n name = config.get(CONF_NAME)\n ip_addr = config.get(CONF_HOST)\n mac_addr = binascii.unhexlify(config.get(CONF_MAC).encode().replace(b':', b''))\n target_temp_default = config.get(CONF_TARGET_TEMP)\n target_temp_step = config.get(CONF_TARGET_TEMP_STEP)\n operation_list = DEFAULT_OPERATION_LIST\n \n import broadlink\n \n broadlink_device = broadlink.hysen((ip_addr, 80), mac_addr, None)\n broadlink_device.timeout = config.get(CONF_TIMEOUT)\n\n try:\n broadlink_device.auth()\n add_devices([\n BroadlinkHysenClimate(hass, name, broadlink_device, target_temp_default, target_temp_step, operation_list)\n ])\n except socket.timeout:\n _LOGGER.error(\"Failed to connect to Broadlink Hysen Device IP:%s\",ip_addr)", "def get_entities():\n entities = []\n hc_api = hass.data[DOMAIN][config_entry.entry_id]\n for device_dict in hc_api.devices:\n entity_dicts = device_dict.get(CONF_ENTITIES, {}).get(\"switch\", [])\n entity_list = [HomeConnectProgramSwitch(**d) for d in entity_dicts]\n entity_list += [HomeConnectPowerSwitch(device_dict[CONF_DEVICE])]\n entities += entity_list\n return entities", "async def async_setup(self) -> None:\n await self.hass.async_add_executor_job(self._setup)\n\n # set already known devices to away instead of unavailable\n device_registry = dr.async_get(self.hass)\n devices = dr.async_entries_for_config_entry(device_registry, self.entry_id)\n for device_entry in devices:\n if device_entry.via_device_id is None:\n continue # do not add the router itself\n\n device_mac = dict(device_entry.connections).get(dr.CONNECTION_NETWORK_MAC)\n self.devices[device_mac] = {\n \"mac\": device_mac,\n \"name\": device_entry.name,\n \"active\": False,\n \"last_seen\": dt_util.utcnow() - timedelta(days=365),\n \"device_model\": None,\n \"device_type\": None,\n \"type\": None,\n \"link_rate\": None,\n \"signal\": None,\n \"ip\": None,\n }\n\n await self.async_update_device_trackers()\n self.entry.async_on_unload(\n async_track_time_interval(\n self.hass, self.async_update_device_trackers, SCAN_INTERVAL\n )\n )\n\n async_dispatcher_send(self.hass, self.signal_device_new)", "def setup(self):\n try:\n self.homedata = pyatmo.HomeData(self.auth)\n self.home_id = self.homedata.gethomeId(self.home)\n except TypeError:\n _LOGGER.error(\"Error when getting home data\")\n except AttributeError:\n _LOGGER.error(\"No default_home in HomeData\")\n except pyatmo.NoDevice:\n _LOGGER.debug(\"No thermostat devices available\")\n except pyatmo.InvalidHome:\n _LOGGER.debug(\"Invalid home %s\", self.home)", "def setup(hass: HomeAssistant, base_config: ConfigType) -> bool: # noqa: C901\n\n hass.data[DOMAIN] = {}\n\n # Parse configuration into a dict of device name to physical address\n # represented as a list of four elements.\n device_aliases = {}\n devices = base_config[DOMAIN].get(CONF_DEVICES, {})\n _LOGGER.debug(\"Parsing config %s\", devices)\n device_aliases.update(parse_mapping(devices))\n _LOGGER.debug(\"Parsed devices: %s\", device_aliases)\n\n platform = base_config[DOMAIN].get(CONF_PLATFORM, SWITCH)\n\n loop = (\n # Create own thread if more than 1 CPU\n hass.loop\n if multiprocessing.cpu_count() < 2\n else None\n )\n host = base_config[DOMAIN].get(CONF_HOST)\n display_name = base_config[DOMAIN].get(CONF_DISPLAY_NAME, DEFAULT_DISPLAY_NAME)\n if host:\n adapter = TcpAdapter(host, name=display_name, activate_source=False)\n else:\n adapter = CecAdapter(name=display_name[:12], activate_source=False)\n hdmi_network = HDMINetwork(adapter, loop=loop)\n\n def _adapter_watchdog(now=None):\n _LOGGER.debug(\"Reached _adapter_watchdog\")\n event.call_later(hass, WATCHDOG_INTERVAL, _adapter_watchdog_job)\n if not adapter.initialized:\n _LOGGER.info(\"Adapter not initialized; Trying to restart\")\n hass.bus.fire(EVENT_HDMI_CEC_UNAVAILABLE)\n adapter.init()\n\n _adapter_watchdog_job = HassJob(_adapter_watchdog, cancel_on_shutdown=True)\n\n @callback\n def _async_initialized_callback(*_: Any):\n \"\"\"Add watchdog on initialization.\"\"\"\n return event.async_call_later(hass, WATCHDOG_INTERVAL, _adapter_watchdog_job)\n\n hdmi_network.set_initialized_callback(_async_initialized_callback)\n\n def _volume(call: ServiceCall) -> None:\n \"\"\"Increase/decrease volume and mute/unmute system.\"\"\"\n mute_key_mapping = {\n ATTR_TOGGLE: KEY_MUTE_TOGGLE,\n ATTR_ON: KEY_MUTE_ON,\n ATTR_OFF: KEY_MUTE_OFF,\n }\n for cmd, att in call.data.items():\n if cmd == CMD_UP:\n _process_volume(KEY_VOLUME_UP, att)\n elif cmd == CMD_DOWN:\n _process_volume(KEY_VOLUME_DOWN, att)\n elif cmd == CMD_MUTE:\n hdmi_network.send_command(\n KeyPressCommand(mute_key_mapping[att], dst=ADDR_AUDIOSYSTEM)\n )\n hdmi_network.send_command(KeyReleaseCommand(dst=ADDR_AUDIOSYSTEM))\n _LOGGER.info(\"Audio muted\")\n else:\n _LOGGER.warning(\"Unknown command %s\", cmd)\n\n def _process_volume(cmd, att):\n if isinstance(att, (str,)):\n att = att.strip()\n if att == CMD_PRESS:\n hdmi_network.send_command(KeyPressCommand(cmd, dst=ADDR_AUDIOSYSTEM))\n elif att == CMD_RELEASE:\n hdmi_network.send_command(KeyReleaseCommand(dst=ADDR_AUDIOSYSTEM))\n else:\n att = 1 if att == \"\" else int(att)\n for _ in range(0, att):\n hdmi_network.send_command(KeyPressCommand(cmd, dst=ADDR_AUDIOSYSTEM))\n hdmi_network.send_command(KeyReleaseCommand(dst=ADDR_AUDIOSYSTEM))\n\n def _tx(call: ServiceCall) -> None:\n \"\"\"Send CEC command.\"\"\"\n data = call.data\n if ATTR_RAW in data:\n command = CecCommand(data[ATTR_RAW])\n else:\n src = data.get(ATTR_SRC, ADDR_UNREGISTERED)\n dst = data.get(ATTR_DST, ADDR_BROADCAST)\n if ATTR_CMD in data:\n cmd = data[ATTR_CMD]\n else:\n _LOGGER.error(\"Attribute 'cmd' is missing\")\n return\n if ATTR_ATT in data:\n if isinstance(data[ATTR_ATT], (list,)):\n att = data[ATTR_ATT]\n else:\n att = reduce(lambda x, y: f\"{x}:{y:x}\", data[ATTR_ATT])\n else:\n att = \"\"\n command = CecCommand(cmd, dst, src, att)\n hdmi_network.send_command(command)\n\n def _standby(call: ServiceCall) -> None:\n hdmi_network.standby()\n\n def _power_on(call: ServiceCall) -> None:\n hdmi_network.power_on()\n\n def _select_device(call: ServiceCall) -> None:\n \"\"\"Select the active device.\"\"\"\n if not (addr := call.data[ATTR_DEVICE]):\n _LOGGER.error(\"Device not found: %s\", call.data[ATTR_DEVICE])\n return\n if addr in device_aliases:\n addr = device_aliases[addr]\n else:\n entity = hass.states.get(addr)\n _LOGGER.debug(\"Selecting entity %s\", entity)\n if entity is not None:\n addr = entity.attributes[\"physical_address\"]\n _LOGGER.debug(\"Address acquired: %s\", addr)\n if addr is None:\n _LOGGER.error(\n \"Device %s has not physical address\", call.data[ATTR_DEVICE]\n )\n return\n if not isinstance(addr, (PhysicalAddress,)):\n addr = PhysicalAddress(addr)\n hdmi_network.active_source(addr)\n _LOGGER.info(\"Selected %s (%s)\", call.data[ATTR_DEVICE], addr)\n\n def _update(call: ServiceCall) -> None:\n \"\"\"Update if device update is needed.\n\n Called by service, requests CEC network to update data.\n \"\"\"\n hdmi_network.scan()\n\n def _new_device(device):\n \"\"\"Handle new devices which are detected by HDMI network.\"\"\"\n key = f\"{DOMAIN}.{device.name}\"\n hass.data[DOMAIN][key] = device\n ent_platform = base_config[DOMAIN][CONF_TYPES].get(key, platform)\n discovery.load_platform(\n hass,\n ent_platform,\n DOMAIN,\n discovered={ATTR_NEW: [key]},\n hass_config=base_config,\n )\n\n def _shutdown(call):\n hdmi_network.stop()\n\n def _start_cec(callback_event):\n \"\"\"Register services and start HDMI network to watch for devices.\"\"\"\n hass.services.register(\n DOMAIN, SERVICE_SEND_COMMAND, _tx, SERVICE_SEND_COMMAND_SCHEMA\n )\n hass.services.register(\n DOMAIN, SERVICE_VOLUME, _volume, schema=SERVICE_VOLUME_SCHEMA\n )\n hass.services.register(\n DOMAIN,\n SERVICE_UPDATE_DEVICES,\n _update,\n schema=SERVICE_UPDATE_DEVICES_SCHEMA,\n )\n hass.services.register(DOMAIN, SERVICE_POWER_ON, _power_on)\n hass.services.register(DOMAIN, SERVICE_STANDBY, _standby)\n hass.services.register(DOMAIN, SERVICE_SELECT_DEVICE, _select_device)\n\n hdmi_network.set_new_device_callback(_new_device)\n hdmi_network.start()\n\n hass.bus.listen_once(EVENT_HOMEASSISTANT_START, _start_cec)\n hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, _shutdown)\n return True", "async def get_device_list(self):\n self.logger.debug(\"Retrieving device list information.\")\n #url = 'https://{}/api/user/device'.format(self.apiHost) #suddenly stopped worrking, so use\n '''\n #full version\n url = 'https://{}/api/user/device?lang=en&apiKey={}&getTags=1&version={}&ts={}&nonce={}&appid={}&imei={}&os={}&model={}&romVersion={}&appVersion={}'.format(self.apiHost,\n self.apikey,\n self.timestamp,\n self._version,\n self._nonce,\n self._appid,\n self._imei,\n self._os,\n self._model,\n self._romVersion,\n self._appVersion)\n '''\n url = 'https://{}/api/user/device?version={}&appid={}'.format(self.apiHost, self._version, self._appid)\n headers = {\n 'Authorization': 'Bearer %s' % self.authenticationToken,\n }\n self.logger.debug('url: %s, headers: %s' % (url, headers))\n async with ClientSession() as session:\n async with session.get(url, headers=headers) as response:\n json_response = await response.json()\n \n self.logger.debug('received response status: %s' % response.status) \n self.logger.debug('received response: %s' % self.pprint(json_response))\n if response.status != 200:\n self.logger.error('error: %s received' % response.status)\n return\n \n if json_response.get(\"devicelist\"):\n self.logger.info('New response format found')\n json_response = json_response[\"devicelist\"]\n \n self.logger.debug('number of device(s) is: %d' % len(json_response))\n \n self._devices = json_response #list of devices and current configurations\n \n self._create_client_devices()\n \n '''\n Example Response:\n [\n {\n \"__v\": 0,\n \"_id\": \"5becffa6d2b4a3c34cb79b38\",\n \"apikey\": \"530303a6-cf2c-4246-894c-xxxxxxxxxxx\",\n \"brandName\": \"AUTOSLIDE\",\n \"createdAt\": \"2018-11-15T05:09:58.341Z\",\n \"deviceStatus\": \"\",\n \"deviceUrl\": \"\",\n \"deviceid\": \"100050xxxxx\",\n \"devicekey\": \"4123ec79-d2c3-4d32-930a-xxxxxxxxxxxxx\",\n \"extra\": {\n \"_id\": \"xxxxxxxxxxxxxxxx\",\n \"extra\": {\n \"apmac\": \"xx:xx:xx:xx:xx:xx\",\n \"brandId\": \"5a6fcf00f620073c67efc280\",\n \"description\": \"20180813001\",\n \"mac\": \"xx:xx:xx0:xx:xx:xx\",\n \"manufacturer\": \"\\u9752\\u5c9b\\u6fb3\\u601d\\u5fb7\\u667a\\u80fd\\u95e8\\u63a7\\u7cfb\\u7edf\\u6709\\u9650\\u516c\\u53f8\",\n \"model\": \"PSA-BTA-GL\",\n \"modelInfo\": \"5af3f5332c8642b001540dac\",\n \"ui\": \"\\u63a8\\u62c9\\u5ba0\\u7269\\u95e8\",\n \"uiid\": 54\n }\n },\n \"group\": \"\",\n \"groups\": [],\n \"ip\": \"xxx.xx.xx.xxx\",\n \"location\": \"\",\n \"name\": \"Patio Door\",\n \"offlineTime\": \"2018-12-31T07:23:31.018Z\",\n \"online\": true,\n \"onlineTime\": \"2018-12-31T12:19:33.216Z\",\n \"params\": {\n \"a\": \"3\",\n \"b\": \"3\",\n \"c\": \"1\",\n \"d\": \"1\",\n \"e\": \"1\",\n \"f\": \"1\",\n \"fwVersion\": \"2.0.2\",\n \"g\": \"0\",\n \"h\": \"1\",\n \"i\": \"0\",\n \"j\": \"00\",\n \"k\": \"0\",\n \"l\": \"1\",\n \"m\": \"2\",\n \"n\": \"0\",\n \"rssi\": -53,\n \"staMac\": \"xx:xx:xx:xx:xx:xx\"\n },\n \"productModel\": \"WFA-1\",\n \"settings\": {\n \"alarmNotify\": 1,\n \"opsHistory\": 1,\n \"opsNotify\": 0\n },\n \"sharedTo\": [\n {\n \"note\": \"\",\n \"permit\": 15,\n \"phoneNumber\": \"[email protected]\",\n \"shareTime\": 1542259546087\n }\n ],\n \"showBrand\": true,\n \"type\": \"10\",\n \"uiid\": 54\n }\n ]\n \n or New format:\n {\n \"devicelist\": [\n {\n \"__v\": 0,\n \"_id\": \"5c3665d012d28ae6ba4943c8\",\n \"apikey\": \"530303a6-cf2c-4246-894c-50855b00e6d8\",\n \"brandLogoUrl\": \"https://us-ota.coolkit.cc/logo/KRZ54OifuGmjoEMxT1YYM3Ybu2fj5K2C.png\",\n \"brandName\": \"Sonoff\",\n \"createdAt\": \"2019-01-09T21:21:20.402Z\",\n \"devConfig\": {},\n \"devGroups\": [],\n \"deviceStatus\": \"\",\n ... as before\n '''", "async def async_setup_platform(hass, config, async_add_entities,\n discovery_info=None):\n from pyhs3 import HASS_SENSORS, DEVICE_ZWAVE_BATTERY\n\n sensor_devices = []\n homeseer = hass.data[DOMAIN]\n\n for device in homeseer.devices:\n if device.device_type_string in HASS_SENSORS:\n if device.device_type_string == DEVICE_ZWAVE_BATTERY:\n dev = HSBattery(device, homeseer)\n else:\n dev = HSSensor(device, homeseer)\n sensor_devices.append(dev)\n _LOGGER.info('Added HomeSeer sensor-type device: {}'.format(dev.name))\n\n async_add_entities(sensor_devices)", "async def async_setup(hass, config):\n\n conf = config[DOMAIN]\n port = conf.get(CONF_PORT)\n host = conf.get(CONF_HOST)\n ip_port = conf.get(CONF_IP_PORT)\n username = conf.get(CONF_HUB_USERNAME)\n password = conf.get(CONF_HUB_PASSWORD)\n hub_version = conf.get(CONF_HUB_VERSION)\n\n if host:\n _LOGGER.info(\"Connecting to Insteon Hub on %s:%d\", host, ip_port)\n else:\n _LOGGER.info(\"Connecting to Insteon PLM on %s\", port)\n\n try:\n await async_connect(\n device=port,\n host=host,\n port=ip_port,\n username=username,\n password=password,\n hub_version=hub_version,\n )\n except ConnectionError:\n _LOGGER.error(\"Could not connect to Insteon modem\")\n return False\n _LOGGER.info(\"Connection to Insteon modem successful\")\n\n hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, close_insteon_connection)\n conf = config[DOMAIN]\n overrides = conf.get(CONF_OVERRIDE, [])\n x10_devices = conf.get(CONF_X10, [])\n\n await devices.async_load(\n workdir=hass.config.config_dir, id_devices=0, load_modem_aldb=0\n )\n\n for device_override in overrides:\n # Override the device default capabilities for a specific address\n address = device_override.get(\"address\")\n if not devices.get(address):\n cat = device_override[CONF_CAT]\n subcat = device_override[CONF_SUBCAT]\n firmware = device_override.get(CONF_FIRMWARE)\n if firmware is None:\n firmware = device_override.get(CONF_PRODUCT_KEY, 0)\n devices.set_id(address, cat, subcat, firmware)\n\n for device in x10_devices:\n housecode = device.get(CONF_HOUSECODE)\n unitcode = device.get(CONF_UNITCODE)\n x10_type = \"on_off\"\n steps = device.get(CONF_DIM_STEPS, 22)\n if device.get(CONF_PLATFORM) == \"light\":\n x10_type = \"dimmable\"\n elif device.get(CONF_PLATFORM) == \"binary_sensor\":\n x10_type = \"sensor\"\n _LOGGER.debug(\n \"Adding X10 device to Insteon: %s %d %s\", housecode, unitcode, x10_type\n )\n device = devices.add_x10_device(housecode, unitcode, x10_type, steps)\n\n asyncio.create_task(async_setup_platforms(hass, config))\n return True", "def setup(hass, config):\n _LOGGER.debug(\"Heatmeter init.py: config = %s\", config[DOMAIN])\n\n hass.data[DOMAIN] = {}\n hass.data[DOMAIN][CONF_HOST] = config[DOMAIN][CONF_HOST]\n hass.data[DOMAIN][CONF_PORT] = config[DOMAIN][CONF_PORT]\n hass.data[DOMAIN][CONF_USERNAME] = config[DOMAIN][CONF_USERNAME]\n hass.data[DOMAIN][CONF_PASSWORD] = config[DOMAIN][CONF_PASSWORD]\n\n _LOGGER.debug(\"Heatmeter init.py: hass.data = %s\", hass.data[DOMAIN])\n\n\n def handle_setpoint(call):\n \"\"\"Handle the service call.\"\"\"\n _LOGGER.debug(\"Heatmeter init.py: call = %s\", call)\n \n temp = call.data.get(TEMPERATURE_NAME, TEMPERATURE_DEFAULT)\n _LOGGER.debug(\"Heatmeter init.py: temp = %s\", temp)\n\n\n try:\n data = {'username':hass.data[DOMAIN][CONF_USERNAME], \n 'password':hass.data[DOMAIN][CONF_PASSWORD]}\n\n _LOGGER.debug(\"Heatmeter handle_setpoint: data = %s\", data)\n\n url = ADMIN_URL.format(\n hass.data[DOMAIN][CONF_HOST], hass.data[DOMAIN][CONF_PORT]\n )\n _LOGGER.debug(\"Heatmeter handle_setpoint: ADMIN_URL = %s\", url)\n \n r = requests.post(url, data = data)\n if r.status_code == 200:\n _LOGGER.debug(\"Heatmeter handle_setpoint Status: %s\" % (r.text))\n _LOGGER.debug(\"Heatmeter handle_setpoint headers: %s\" % (r.headers))\n \n tokens = r.headers['set-cookie'].split(';')\n headers = {'Cookie': tokens[0] +';'}\n \n url = SET_URL.format(\n hass.data[DOMAIN][CONF_HOST], hass.data[DOMAIN][CONF_PORT], tokens[2] , temp\n )\n _LOGGER.debug(\"Heatmeter handle_setpoint: SET_URL = %s\", url)\n #url = 'http://smoker.lan/luci/;'+ tokens[2] + '/admin/lm/set?sp=' + temp\n r = requests.get(url, headers=headers)\n if r.status_code == 200:\n _LOGGER.info(\"Heatmeter handle_setpoint Setpoint updated: %s\" % (temp))\n\n except requests.exceptions.RequestException as e: # This is the correct syntax\n _LOGGER.error(\"Heatmeter handle_setpoint Post Connection error %s\" % (e))\n\n hass.services.register(DOMAIN, 'set_temperature', handle_setpoint)\n\n hass.helpers.discovery.load_platform('sensor', DOMAIN, {}, config)\n\n # Return boolean to indicate that initialization was successfully.\n return True", "async def async_discovery(hass: HomeAssistant) -> list[dict[str, Any]]:\n LOGGER.debug(\"Starting ONVIF discovery\")\n services = await hass.async_add_executor_job(wsdiscovery)\n\n devices = []\n for service in services:\n url = urlparse(service.getXAddrs()[0])\n device = {\n CONF_DEVICE_ID: None,\n CONF_NAME: service.getEPR(),\n CONF_HOST: url.hostname,\n CONF_PORT: url.port or 80,\n CONF_HARDWARE: None,\n }\n for scope in service.getScopes():\n scope_str = scope.getValue()\n if scope_str.lower().startswith(\"onvif://www.onvif.org/name\"):\n device[CONF_NAME] = scope_str.split(\"/\")[-1]\n if scope_str.lower().startswith(\"onvif://www.onvif.org/hardware\"):\n device[CONF_HARDWARE] = scope_str.split(\"/\")[-1]\n if scope_str.lower().startswith(\"onvif://www.onvif.org/mac\"):\n device[CONF_DEVICE_ID] = scope_str.split(\"/\")[-1]\n devices.append(device)\n\n return devices", "async def async_setup_platform(\n hass, config, async_add_entities, discovery_info=None):\n\n for device_config in config[CONF_DEVICES]:\n host = device_config[CONF_HOST]\n username = device_config[CONF_USERNAME]\n password = device_config[CONF_PASSWORD]\n interval = device_config.get(CONF_SCAN_INTERVAL, SCAN_INTERVAL)\n\n session = async_create_clientsession(hass, cookie_jar=aiohttp.CookieJar(unsafe=True))\n\n poe_data = ZyxelPoeData(host, username, password, interval, session)\n\n await poe_data.async_update()\n\n switches = list()\n for port, data in poe_data.ports.items():\n switches.append(ZyxelPoeSwitch(poe_data, host, port))\n\n async_add_entities(switches, False)", "def setup_platform(\n hass: HomeAssistant,\n config: Dict,\n add_devices: Callable,\n discovery_info: Optional[Dict] = None,\n) -> None:\n havdalah = config[HAVDALAH_MINUTES]\n candle_light = config[CANDLE_LIGHT_MINUTES]\n cities = config[GEONAMES]\n cities_list = cities.split(\",\")\n\n add_devices(\n [\n ShabbatTimes(\n hass,\n city,\n \"Shabbat Times {}\".format(city.replace(\"-\", \"_\")),\n havdalah,\n candle_light,\n )\n for city in cities_list\n ]\n )", "def setup_method(self):\n self.hass = get_test_home_assistant()\n\n self.config = {\n ip.DOMAIN: {\n \"platform\": \"microsoft_face_identify\",\n \"source\": {\"entity_id\": \"camera.demo_camera\", \"name\": \"test local\"},\n \"group\": \"Test Group1\",\n },\n \"camera\": {\"platform\": \"demo\"},\n mf.DOMAIN: {\"api_key\": \"12345678abcdef6\"},\n }\n\n self.endpoint_url = f\"https://westus.{mf.FACE_API_URL}\"", "async def main():\n data_file = open(\"data_file_nefit2.txt\", \"r\")\n data = data_file.read().splitlines()\n loop = asyncio.get_event_loop()\n BoschGateway = bosch.gateway_chooser(device_type=NEFIT)\n gateway = BoschGateway(session=loop,\n session_type=XMPP,\n host=data[0],\n access_token=data[1],\n password=data[2],\n nefit_connector=NefitConnector2)\n # gateway = BoschGateway(session=loop,\n # session_type=\"xmpp\",\n # host=data[0],\n # access_key=data[1],\n # password=data[2])\n print(await gateway.custom_test())\n # await gateway.initialize()\n # return\n # print(f\"UUID {await gateway.check_connection()}\")\n\n # small = await gateway.smallscan(DHW_CIRCUITS)\n# myjson = json.loads(small)\n # print(small)\n # return\n sensors = gateway.initialize_sensors()\n for sensor in sensors:\n await sensor.update()\n for sensor in sensors:\n print(f\"{sensor.name} : {sensor.state}\")\n await gateway.get_capabilities()\n for hc in gateway.heating_circuits:\n await hc.update()\n print(\"hvac mode\", hc.ha_mode)\n print(\"target temp ->\", hc.target_temperature)\n return\n \n# await hc.set_ha_mode(\"auto\") #MEANS AUTO\n# await hc.update()\n # time.sleep(4)\n await dhw.set_temperature(53.0)\n # return\n # return\n # await dhw.set_ha_mode(\"performance\") #MEANS MANUAL\n return\n # print(\"target in manual\", hc.target_temperature)\n # print(\"ha mode in manual\", hc.ha_mode)\n # await hc.update()\n # print(\"target after update\", hc.target_temperature)\n # print(\"ha mode\", hc.ha_mode)\n\n # await hc.set_ha_mode(\"auto\") #MEANS AUTO\n # print(\"target after auto without update\", hc.target_temperature)\n # print(\"ha mode\", hc.ha_mode)\n\n # return\n # print(await hc.set_temperature(10.0))\n # print(\"ustawiona!\")\n dhws = gateway.dhw_circuits\n dhw = dhws[0]\n await dhw.update()\n print(\"START1\")\n print(dhw.target_temperature)\n print(\"START2\")\n print(dhw.current_mode)\n print(dhw.target_temperature)\n \n return\n print(\"START3\")\n print(dhw.target_temperature)\n return\n # print(hc.schedule)\n print(gateway.get_info(DATE))\n # print(await gateway.rawscan())\n #print(hc.schedule.get_temp_for_date(gateway.get_info(DATE)))\n return\n aa=0\n while aa < 10:\n time.sleep(1)\n await hc.update()\n print(hc.target_temperature)\n aa = aa+1\n \n await hc.set_operation_mode(\"auto\")\n\n aa=0\n while aa < 10:\n time.sleep(1)\n await hc.update()\n print(hc.target_temperature)\n aa = aa+1\n\n # print(gateway.get_property(TYPE_INFO, UUID))\n await loop.close()", "async def async_setup(hass: HomeAssistant, config):\n if DOMAIN not in config:\n return True\n\n conf_adapters = config[DOMAIN].get(CONF_ADAPTERS)\n if not conf_adapters:\n return False\n\n hass.data.setdefault(DOMAIN, {}).update({CONF_INDOOR_UNITS: {}})\n\n for conf_adapter in conf_adapters:\n conf_adapter_name = conf_adapter[CONF_ADAPTER_NAME]\n conf_adapter_host = conf_adapter[CONF_ADAPTER_HOST]\n conf_adapter_port = conf_adapter[CONF_ADAPTER_PORT]\n conf_adapter_slave = conf_adapter[CONF_ADAPTER_SLAVE]\n adapter : DaikinAPI = await hass.async_add_executor_job(create_adapter, conf_adapter_host, conf_adapter_port, conf_adapter_slave)\n\n for indoor_unit in adapter.indoor_units.values():\n indoor_unit_global_name = \"daikin_dta116a621_\" + conf_adapter_name + \"_\" + str(indoor_unit.indoor_unit_id).replace(\"-\",\"_\")\n hass.data[DOMAIN][CONF_INDOOR_UNITS][indoor_unit_global_name] = indoor_unit\n hass.helpers.discovery.load_platform('climate', DOMAIN, {}, config)\n #discovery.async_load_platform(hass, DOMAIN, 'climate', {}, config)\n return True", "async def setup_homeassistant(hass: HomeAssistant):\n await async_setup_component(hass, \"homeassistant\", {})", "def _get_data(self):\n devices = []\n try:\n if not self.router_client.login():\n self.hass.states.set(f\"{DOMAIN}.statusmsg\", self.router_client.statusmsg)\n _LOGGER.warning(\"Login failed: {0}:{1}@{2}\".format(self.router_client.username, self.router_client.password,self.router_client.host))\n self.router_client.logout()\n return devices\n\n devices_json = self.router_client.get_devices_response()\n finally:\n self.router_client.logout()\n\n self.hass.states.set(f\"{DOMAIN}.scanning\", devices_json != False)\n\n if devices_json != False:\n for device in devices_json:\n # _LOGGER.debug(\"Device: {0}\".format(device))\n dev = Device(\n device['HostName'].replace('未知设备', 'Unknown'),\n device['IPAddress'],\n device['MACAddress'],\n device['Active'],\n ICONS.get(device['IconType'])\n )\n # _LOGGER.debug(\"Device: {0}\".format(dev))\n devices.append(dev)\n return devices\n else:\n return []", "async def async_setup_platform(hass, hass_config, async_add_entities,\n discovery_info=None):\n client = hass.data[DOMAIN]['client']\n\n entities = [GeniusWaterHeater(client, z)\n for z in client.hub.zone_objs if z.type in GH_HEATERS]\n\n async_add_entities(entities)", "async def initialize(self):\n client_session = aiohttp_client.async_get_clientsession(self.hass)\n self.easee = Easee(self.username, self.password, client_session)\n\n try:\n with timeout(TIMEOUT):\n await self.easee.connect()\n except asyncio.TimeoutError as err:\n _LOGGER.debug(\"Connection to easee login timed out\")\n raise ConfigEntryNotReady from err\n except ServerFailureException as err:\n _LOGGER.debug(\"Easee server failure\")\n raise ConfigEntryNotReady from err\n except TooManyRequestsException as err:\n _LOGGER.debug(\"Easee server too many requests\")\n raise ConfigEntryNotReady from err\n except AuthorizationFailedException as err:\n _LOGGER.error(\"Authorization failed to easee\")\n raise Unauthorized from err\n except Exception: # pylint: disable=broad-except\n _LOGGER.error(\"Unexpected error creating device\")\n return None\n\n self.sites: List[Site] = await self.easee.get_sites()\n\n self.monitored_sites = self.config.options.get(\n CONF_MONITORED_SITES, [site[\"name\"] for site in self.sites]\n )\n\n for site in self.sites:\n if not site[\"name\"] in self.monitored_sites:\n _LOGGER.debug(\"Found site (unmonitored): %s %s\", site.id, site[\"name\"])\n else:\n _LOGGER.debug(\"Found site (monitored): %s %s\", site.id, site[\"name\"])\n for equalizer in site.get_equalizers():\n _LOGGER.debug(\n \"Found equalizer: %s %s\", equalizer.id, equalizer[\"name\"]\n )\n self.equalizers.append(equalizer)\n equalizer_data = EqualizerData(equalizer, site)\n self.equalizers_data.append(equalizer_data)\n for circuit in site.get_circuits():\n _LOGGER.debug(\n \"Found circuit: %s %s\", circuit.id, circuit[\"panelName\"]\n )\n self.circuits.append(circuit)\n for charger in circuit.get_chargers():\n _LOGGER.debug(\"Found charger: %s %s\", charger.id, charger.name)\n self.chargers.append(charger)\n charger_data = ChargerData(charger, circuit, site)\n self.chargers_data.append(charger_data)\n\n self._create_entitites()", "def setup_platform(hass, config, add_devices, discovery_info=None):\n from evohomeclient import EvohomeClient\n\n username = config.get(CONF_USERNAME)\n password = config.get(CONF_PASSWORD)\n\n if username is None or password is None:\n _LOGGER.error(\"Missing required configuration items %s or %s\",\n CONF_USERNAME, CONF_PASSWORD)\n return False\n\n evo_api = EvohomeClient(username, password)\n try:\n add_devices([RoundThermostat(evo_api)])\n except socket.error:\n _LOGGER.error(\n \"Connection error logging into the honeywell evohome web service\"\n )\n return False", "def setup_platform(hass, config, add_entities, discovery_info=None):\n ham_data = hass.data.get(DATA_HAM)\n\n if not ham_data:\n return\n\n sensors = []\n data_provider = {\n ATTR_WEEKDAY: {\n ATTR_STATE: ham_data.get_weekday,\n ATTR_ATTRIBUTES: None\n },\n ATTR_DAY_PART: {\n ATTR_STATE: ham_data.get_day_part,\n ATTR_ATTRIBUTES: None\n },\n ATTR_CURRENT_PROFILE: {\n ATTR_STATE: ham_data.get_current_profile,\n ATTR_ATTRIBUTES: ham_data.get_current_profile_data_parts\n },\n ATTR_CURRENT_SCENE: {\n ATTR_STATE: ham_data.get_current_scene,\n ATTR_ATTRIBUTES: None\n }\n }\n\n for sensor_type in SENSOR_TYPES:\n sensor_type_data = SENSOR_TYPES[sensor_type]\n sensor_name = sensor_type_data[0]\n sensor_icon = sensor_type_data[len(sensor_type_data) - 1]\n \n sensor = HomeAutomationManagerSensor(sensor_name, sensor_type, sensor_icon, hass, data_provider)\n \n sensors.append(sensor)\n \n add_entities(sensors, True)", "def setup_platform(\n hass: HomeAssistant,\n config: ConfigType,\n add_entities: AddEntitiesCallback,\n discovery_info: DiscoveryInfoType | None = None,\n) -> None:\n bt_device_id: int = config[CONF_BT_DEVICE_ID]\n\n beacons: dict[str, dict[str, str]] = config[CONF_BEACONS]\n devices: list[EddystoneTemp] = []\n\n for dev_name, properties in beacons.items():\n namespace = get_from_conf(properties, CONF_NAMESPACE, 20)\n instance = get_from_conf(properties, CONF_INSTANCE, 12)\n name = properties.get(CONF_NAME, dev_name)\n\n if instance is None or namespace is None:\n _LOGGER.error(\"Skipping %s\", dev_name)\n continue\n\n devices.append(EddystoneTemp(name, namespace, instance))\n\n if devices:\n mon = Monitor(hass, devices, bt_device_id)\n\n def monitor_stop(event: Event) -> None:\n \"\"\"Stop the monitor thread.\"\"\"\n _LOGGER.info(\"Stopping scanner for Eddystone beacons\")\n mon.stop()\n\n def monitor_start(event: Event) -> None:\n \"\"\"Start the monitor thread.\"\"\"\n _LOGGER.info(\"Starting scanner for Eddystone beacons\")\n mon.start()\n\n add_entities(devices)\n mon.start()\n hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, monitor_stop)\n hass.bus.listen_once(EVENT_HOMEASSISTANT_START, monitor_start)\n else:\n _LOGGER.warning(\"No devices were added\")", "def device_discovery(endless):\r\n click.echo(\"start device discovery ...\")\r\n _device_discovery(endless)", "def setup_platform(\n hass: HomeAssistant,\n config: ConfigType,\n add_entities: AddEntitiesCallback,\n discovery_info: DiscoveryInfoType | None = None,\n) -> None:\n lights = []\n for channel, device_config in config[CONF_DEVICES].items():\n device = {}\n device[\"name\"] = device_config[CONF_NAME]\n device[\"dimmable\"] = device_config[\"dimmable\"]\n device[\"channel\"] = channel\n device[\"driver\"] = config[CONF_DRIVER]\n device[\"host\"] = config[CONF_HOST]\n device[\"port\"] = config[CONF_PORT]\n lights.append(FutureNowLight(device))\n\n add_entities(lights, True)", "def device_config(i):\n apipath = \"/targets/devices\"\n url = SERVER + apipath\n params = {\n 'q': '(deviceType:ASA)',\n 'resolve': '[targets/devices.{name,deviceConfig}]',\n 'sort': 'name:desc',\n 'limit': NUM_DEVICES_TO_RETRIEVE_PER_QUERY,\n 'offset': i}\n headers = {\n 'Accept': \"application/json\",\n 'Content-Type': \"application/json\",\n 'Authorization': \"bearer {}\".format(token)}\n response = requests.get(url, verify=False, stream=True, headers=headers, params=params)\n getstatuscode = response.status_code\n getresponse = response.json()\n if getstatuscode == 200:\n return getresponse\n else:\n response.raise_for_status()", "def setup_platform(hass, config: ConfigType,\n add_devices: Callable[[list], None], discovery_info=[]):\n elk = hass.data['PyElk']['connection']\n elk_config = hass.data['PyElk']['config']\n discovered_devices = hass.data['PyElk']['discovered_devices']\n if elk is None:\n _LOGGER.error('Elk is None')\n return False\n if not elk.connected:\n _LOGGER.error('A connection has not been made to the Elk panel.')\n return False\n devices = []\n from PyElk.Thermostat import Thermostat as ElkThermostat\n # If no discovery info was passed in, discover automatically\n if len(discovery_info) == 0:\n # Gather areas\n for node in elk.THERMOSTATS:\n if node:\n if node.included is True and node.enabled is True:\n discovery_info.append(node)\n # If discovery info was passed in, check if we want to include it\n else:\n for node in discovery_info:\n if node.included is True and node.enabled is True:\n continue\n else:\n discovery_info.remove(node)\n # Add discovered devices\n for node in discovery_info:\n if isinstance(node, ElkThermostat):\n node_name = 'climate.' + 'elk_thermostat_' + format(node.number, '02')\n else:\n continue\n if node_name not in discovered_devices:\n _LOGGER.debug('Loading Elk %s: %s', node.classname, node.description_pretty())\n device = ElkClimateDevice(node)\n discovered_devices[node_name] = device\n devices.append(device)\n else:\n _LOGGER.debug('Skipping already loaded Elk %s: %s', node.classname, node.description_pretty())\n\n add_devices(devices, True)\n return True" ]
[ "0.62153625", "0.61709046", "0.61675733", "0.6046003", "0.6033546", "0.6017969", "0.60002464", "0.5992273", "0.5990323", "0.5985458", "0.5971451", "0.596242", "0.59595805", "0.5925293", "0.59210336", "0.5850367", "0.5831101", "0.582566", "0.5823615", "0.58009064", "0.5798444", "0.5797993", "0.57907015", "0.5790522", "0.5760821", "0.575075", "0.57176477", "0.57152605", "0.57040715", "0.5703769" ]
0.68286645
0
Find Home Assistant implementation for the Heatzy device. Implementation search is based on device 'product_key'. If the implementation is not found, returns None.
def find_heatzy_device_implementation(device): DeviceImplementation = PRODUCT_KEY_TO_DEVICE_IMPLEMENTATION.get( device.get('product_key')) if DeviceImplementation is None: _LOGGER.warn('Device %s with product key %s is not supported', device.get('did'), device.get('product_key')) return None return DeviceImplementation(api, device)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def finddevice():\n\n return next((device for device in [\"xpu\"] if hasattr(torch, device) and getattr(torch, device).is_available()), None)", "def version_homeassistant(self):\n return self._data.get(ATTR_HOMEASSISTANT)", "def find_hardware(self, device_info=None):\n if os.name is not 'nt': # If not on a Windows system, just set up soundcard\n self.setup_soundcard()\n self.hardware.append('Soundcard')\n self.out_samplefreq = 44100\n else:\n if 'NIDAQ' in self.required_hardware and self.setup_nidaq(device_info):\n self.hardware.append('NIDAQ')\n if 'RP21' in self.required_hardware and self.setup_RP21('c:\\pystartle\\startle.rco'):\n self.hardware.append('RP21')\n if 'PA5' in self.required_hardware and self.setup_PA5():\n self.hardware.append('PA5')\n if 'RZ5D' in self.required_hardware and self.setup_RZ5D():\n self.hardware.append('RZ5D')", "def find_device(devices, *, usage_page, usage):\n if hasattr(devices, \"send_report\"):\n devices = [devices]\n for device in devices:\n if (\n device.usage_page == usage_page\n and device.usage == usage\n and hasattr(device, \"send_report\")\n ):\n return device\n raise ValueError(\"Could not find matching HID device.\")", "async def get_hubitat_device(\n hass: HomeAssistant, device_id: str\n) -> Tuple[Optional[Device], Optional[Hub]]:\n device = await get_device(hass, device_id)\n if device is None:\n return None, None\n\n hubitat_id = None\n for identifier in device.identifiers:\n if identifier[0] == DOMAIN:\n hubitat_id = identifier[1]\n break\n\n if hubitat_id is None:\n _LOGGER.debug(\"Couldn't find Hubitat ID for device %s\", device_id)\n return None, None\n\n for entry_id in device.config_entries:\n hub = get_hub(hass, entry_id)\n if hubitat_id in hub.devices:\n return hub.devices[hubitat_id], hub\n\n _LOGGER.debug(\"Couldn't find Hubitat device for ID %s\", hubitat_id)\n return None, None", "def homeassistant(self):\r\n return self.manifest.get(\"homeassistant\")", "async def async_create_wemo_entity(hass, pywemo_device, wemo_entity_suffix):\n assert await async_setup_component(\n hass,\n DOMAIN,\n {\n DOMAIN: {\n CONF_DISCOVERY: False,\n CONF_STATIC: [f\"{MOCK_HOST}:{MOCK_PORT}\"],\n },\n },\n )\n await hass.async_block_till_done()\n\n entity_registry = er.async_get(hass)\n for entry in entity_registry.entities.values():\n if entry.entity_id.endswith(wemo_entity_suffix or pywemo_device.name.lower()):\n return entry\n\n return None", "async def get_device(hass: HomeAssistant, device_id: str) -> Optional[DeviceEntry]:\n device_registry = await hass.helpers.device_registry.async_get_registry()\n return device_registry.async_get(device_id)", "def evaluate_hardware_support(self):\n return hardware.HardwareSupport.SERVICE_PROVIDER", "def _get_product(self):\n try:\n return self.activities[industry.MANUFACTURING].products[0].typeID\n except (KeyError, IndexError):\n return None", "def get_implementation(self):\n return self.__capabilities[\"IMPLEMENTATION\"]", "def _find_device(self):\n for bus in usb.busses():\n for dev in bus.devices:\n if dev.idVendor == self.vendor_id and dev.idProduct == self.product_id:\n if self.device_id is None or dev.filename == self.device_id:\n log.info('found station on USB bus=%s device=%s' % (bus.dirname, dev.filename))\n return dev\n return None", "def get_kwh(self):\n\n svc = \"urn:micasaverde-com:serviceId:EnergyMetering1\"\n if not svc in self.services:\n raise RuntimeError, \"Device doesn't support the service\"\n\n return self.get_variable(svc, \"KWH\")", "def find_device():\n device = usb.core.find(\n idVendor=LuxaforFlag.DEVICE_VENDOR_ID,\n idProduct=LuxaforFlag.DEVICE_PRODUCT_ID\n )\n return device", "def find_device(device):\n return usb.core.find(idVendor=device['idVendor'], idProduct=device['idProduct'])", "def _find_device(self):\n found_device = False\n nearby_devices = None\n try:\n nearby_devices = self._adapter.scan()\n except Exception:\n pass\n\n if nearby_devices is not None:\n for device in nearby_devices:\n name = device['name']\n if name is not None and name.startswith(self._search_name):\n self._address = device['address']\n print(f'Found device named: {name} at {self._address}')\n found_device = True\n break\n\n return found_device", "def get_scanner(hass, config):\n scanner = HuaweiHG659DeviceScanner(config[DOMAIN])\n\n return scanner", "def get_sensor_entity(device, connection):\n if device.device_type_string == DEVICE_ZWAVE_BATTERY:\n return HomeSeerBatterySensor(device, connection)\n elif device.device_type_string == DEVICE_ZWAVE_RELATIVE_HUMIDITY:\n return HomeSeerHumiditySensor(device, connection)\n elif device.device_type_string == DEVICE_ZWAVE_FAN_STATE:\n return HomeSeerFanStateSensor(device, connection)\n elif device.device_type_string == DEVICE_ZWAVE_OPERATING_STATE:\n return HomeSeerOperatingStateSensor(device, connection)\n elif device.device_type_string == DEVICE_ZWAVE_DOOR_LOCK_LOGGING:\n return HomeSeerDoorLockLoggingSensor(device, connection)\n elif device.device_type_string in GENERIC_VALUE_SENSOR_TYPES:\n return HomeSeerValueSensor(device, connection)\n return HomeSeerStatusSensor(device, connection)", "def get_device_details(device):\n ret = device.wait_for_output(\"SetupQRCode\")\n if ret is None or len(ret) < 2:\n return None\n\n qr_code = re.sub(\n r\"[\\[\\]]\", \"\", ret[-1].partition(\"SetupQRCode:\")[2]).strip()\n try:\n device_details = dict(SetupPayload().ParseQrCode(\n \"VP:vendorpayload%{}\".format(qr_code)).attributes)\n except exceptions.ChipStackError as ex:\n log.error(ex.msg)\n return None\n\n return device_details", "async def device_fixture(hass: HomeAssistant, ufp: MockUFPFixture):\n\n await init_entry(hass, ufp, [])\n\n device_registry = dr.async_get(hass)\n\n return list(device_registry.devices.values())[0]", "def get_chassis_type(device):\n\n try:\n out = device.parse('show version')\n except SubCommandFailure:\n log.info('Could not get device version information')\n return None\n\n return out.q.get_values('chassis', 0)", "def get_host_info(hass: HomeAssistant) -> dict[str, Any] | None:\n return hass.data.get(DATA_HOST_INFO)", "def get_hypixel_key(self):\n key = self.bot_data_file[\"apiKeys\"][\"hypixel\"]\n if self.check_empty_key(key):\n return key\n else:\n print(\"ERROR GETTING THE HYPIXEL KEY (get yours from https://api.hypixel.net/) - ABORTING\")\n quit(1)", "def get_vendor(mac):\r\n return p.get_manuf(mac) or 'None'", "def run(self):\n \n # shortcut for self\n s = self\n \n # shortcut to existing heating fuel\n fuel = s.exist_fuel\n\n # holds summary measures for the heat pump project (e.g. seasonal COP,\n # internal rate of return). Fill out first item: secondary fuel info.\n s.summary = {'fuel_unit': fuel.unit, 'fuel_desc': fuel.desc}\n \n # Create the home energy simulation object\n sim = HomeHeatModel(\n city_id=s.city_id,\n hp_model_id=s.hp_model_id,\n exist_heat_fuel_id=s.exist_heat_fuel_id,\n exist_heat_effic=s.exist_heat_effic,\n exist_kwh_per_mmbtu=s.exist_kwh_per_mmbtu, \n co2_lbs_per_kwh=s.co2_lbs_per_kwh,\n low_temp_cutoff=s.low_temp_cutoff,\n off_months=s.off_months_chks,\n garage_stall_count=s.garage_stall_count,\n garage_heated_by_hp=s.garage_heated_by_hp,\n bldg_floor_area=s.bldg_floor_area,\n indoor_heat_setpoint=s.indoor_heat_setpoint,\n insul_level=s.insul_level,\n pct_exposed_to_hp=s.pct_exposed_to_hp,\n doors_open_to_adjacent=s.doors_open_to_adjacent,\n bedroom_temp_tolerance=s.bedroom_temp_tolerance, \n )\n\n # If other end uses use the heating fuel, make an estimate of their annual\n # consumption of that fuel. This figure is expressed in the physical unit\n # for the fuel type, e.g. gallons of oil. Save this as an object attribute\n # so it is accessible in other routines. See Evernote notes on values (AkWarm\n # for DHW and Michael Bluejay for Drying and Cooking).\n is_electric = (s.exist_heat_fuel_id == constants.ELECTRIC_ID) # True if Electric\n s.fuel_other_uses = s.includes_dhw * 4.23e6 / fuel.dhw_effic\n s.fuel_other_uses += s.includes_dryer * (0.86e6 if is_electric else 2.15e6)\n s.fuel_other_uses += s.includes_cooking * (0.64e6 if is_electric else 0.8e6)\n s.fuel_other_uses *= s.occupant_count / fuel.btus\n\n # For elecric heat we also need to account for lights and other applicances not\n # itemized above.\n if is_electric:\n # Use the AkWarm Medium Lights/Appliances formula but take 25% off\n # due to efficiency improvements since then.\n s.lights_other_elec = 2086. + 1.20 * s.bldg_floor_area # kWh in the year\n else:\n s.lights_other_elec = 0.0\n \n # Match the existing space heating use if it is provided. Do so by using\n # the UA true up factor.\n if not is_null(s.exist_fuel_use):\n \n # Remove the energy use from the other end uses that use the fuel, unless\n # this is electric heat and the user indicated that the entered value is\n # just space heating.\n if is_electric and s.elec_uses=='space':\n # user explicitly indicated that the entered annual usage value is\n # just space heating.\n space_fuel_use = s.exist_fuel_use\n else:\n space_fuel_use = s.exist_fuel_use - s.fuel_other_uses - s.lights_other_elec\n\n sim.no_heat_pump_use = True\n sim.calculate()\n if is_electric:\n # For electric heat, electric use for space heat is in secondary_kwh\n fuel_use1 = sim.annual_results().secondary_kwh\n else:\n fuel_use1 = sim.annual_results().secondary_fuel_units\n \n # scale the UA linearly to attempt to match the target fuel use\n ua_true_up = space_fuel_use / fuel_use1\n sim.ua_true_up = ua_true_up\n sim.calculate()\n\n if is_electric:\n # For electric heat, electric use for space heat is in secondary_kwh\n fuel_use2 = sim.annual_results().secondary_kwh\n else:\n fuel_use2 = sim.annual_results().secondary_fuel_units\n \n # In case it wasn't linear, inter/extrapolate to the final ua_true_up\n slope = (fuel_use2 - fuel_use1)/(ua_true_up - 1.0)\n # print(space_fuel_use, fuel_use1, fuel_use2, ua_true_up)\n ua_true_up = 1.0 + (space_fuel_use - fuel_use1) / slope\n # print(ua_true_up)\n\n else:\n ua_true_up = 1.0\n \n # Set the UA true up value into the model and also save it as\n # an attribute of this object so it can be observed.\n sim.ua_true_up = ua_true_up\n s.ua_true_up = ua_true_up\n \n # Run the base case with no heat pump and record energy results.\n # This model only models the space heating end use.\n sim.no_heat_pump_use = True\n sim.calculate()\n s.df_mo_en_base = sim.monthly_results()\n s.ann_en_base = sim.annual_results()\n # print(s.ann_en_base.secondary_kwh)\n \n # Run the model with the heat pump and record energy results\n sim.no_heat_pump_use = False\n sim.calculate()\n s.df_mo_en_hp = sim.monthly_results()\n s.ann_en_hp = sim.annual_results()\n s.df_hourly = sim.df_hourly\n\n # record design heat load\n s.summary['design_heat_load'], s.summary['design_heat_temp'] = sim.design_heat_load()\n \n # Calculate some summary measures\n s.summary['cop'] = s.ann_en_hp.cop\n s.summary['hp_max_capacity_5F'] = sim.hp_max_capacity_5F()\n s.summary['max_hp_reached'] = sim.max_hp_reached\n \n # CO2 savings\n s.summary['co2_lbs_saved'] = s.ann_en_base.co2_lbs - s.ann_en_hp.co2_lbs\n s.summary['co2_driving_miles_saved'] = convert_co2_to_miles_driven(s.summary['co2_lbs_saved'])\n s.summary['hp_load_frac'] = s.ann_en_hp.hp_load_mmbtu / (s.ann_en_hp.hp_load_mmbtu + s.ann_en_hp.secondary_load_mmbtu)\n \n # Create DataFrames that hold monthly energy cost amounts\n # Results are stored as object attributes.\n self.calc_monthly_cash()\n \n # Create a multi-year Cash Flow DataFrame and summary economic measures.\n # Results are stored as object attributes.\n self.calc_cash_flow()\n\n # Save a gzipped pickle of this object using Unix time as the file name.\n # make a directory to hold the files\n save_dir = 'hpcalc_runs'\n Path(save_dir).mkdir(exist_ok=True)\n fname = f'{time.time():.2f}.pkl.gz'\n s.file_name = fname\n pickle.dump(self, gzip.open(f'{save_dir}/{fname}', 'wb'))", "def primaryHDU(self):\n if len(self.hdus) > 0:\n for hdu in self.hdus:\n if isinstance(hdu, pyfits.PrimaryHDU):\n return hdu\n else:\n return None", "async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:\n\n # 0.104 introduced config entry unique id, this makes upgrading possible\n if entry.unique_id is None:\n new_data = dict(entry.data)\n\n hass.config_entries.async_update_entry(\n entry, unique_id=new_data[HMIPC_HAPID], data=new_data\n )\n\n hap = HomematicipHAP(hass, entry)\n hass.data[DOMAIN][entry.unique_id] = hap\n\n if not await hap.async_setup():\n return False\n\n await async_setup_services(hass)\n _async_remove_obsolete_entities(hass, entry, hap)\n\n # Register on HA stop event to gracefully shutdown HomematicIP Cloud connection\n hap.reset_connection_listener = hass.bus.async_listen_once(\n EVENT_HOMEASSISTANT_STOP, hap.shutdown\n )\n\n # Register hap as device in registry.\n device_registry = dr.async_get(hass)\n\n home = hap.home\n hapname = home.label if home.label != entry.unique_id else f\"Home-{home.label}\"\n\n device_registry.async_get_or_create(\n config_entry_id=entry.entry_id,\n identifiers={(DOMAIN, home.id)},\n manufacturer=\"eQ-3\",\n # Add the name from config entry.\n name=hapname,\n )\n return True", "async def async_setup_platform(hass, config, add_devices, discovery_info=None):\n # retrieve platform config\n username = config.get(CONF_USERNAME)\n password = config.get(CONF_PASSWORD)\n\n session = aiohttp_client.async_get_clientsession(hass)\n store = storage.Store(hass, STORAGE_VERSION, STORAGE_KEY)\n\n authenticator = HeatzyAuthenticator(session, store, username, password)\n api = HeatzyAPI(session, authenticator)\n\n # fetch configured Heatzy devices\n devices = await api.async_get_devices()\n # add all Heatzy devices with HA implementation to home assistant\n add_devices(filter(None.__ne__, map(setup_heatzy_device(api), devices)))\n return True", "def get_device_info(platform_path: str):\n device_name = os.path.basename(platform_path)\n try:\n platform_file = next(\n glob.iglob(os.path.join(glob.escape(platform_path), 'hw', f'*.[xd]sa')))\n except StopIteration as e:\n raise ValueError('cannot find platform file for %s' % device_name) from e\n with zipfile.ZipFile(platform_file) as platform:\n # platform_file must end with .xsa or .dsa, thus [:-4]\n with platform.open(os.path.basename(platform_file)[:-4] +\n '.hpfm') as metadata:\n platform_info = ET.parse(metadata).find('./xd:component/xd:platformInfo',\n XILINX_XML_NS)\n if platform_info is None:\n raise ValueError('cannot parse platform')\n clock_period = platform_info.find(\n \"./xd:systemClocks/xd:clock/[@xd:id='0']\", XILINX_XML_NS)\n if clock_period is None:\n raise ValueError('cannot find clock period in platform')\n part_num = platform_info.find('xd:deviceInfo', XILINX_XML_NS)\n if part_num is None:\n raise ValueError('cannot find part number in platform')\n return {\n 'clock_period':\n clock_period.attrib['{{{xd}}}period'.format(**XILINX_XML_NS)],\n 'part_num':\n part_num.attrib['{{{xd}}}name'.format(**XILINX_XML_NS)]\n }", "def find_stick():\n out = subprocess.check_output(\n \"gdbus introspect --system --dest org.freedesktop.UDisks \"\n \"--object-path /org/freedesktop/UDisks/devices --recurse \"\n \"--only-properties\".split())\n devs = zip(*((re.match(r\".* = '?(.*?)'?;\", x).group(1)\n for x in out.splitlines()\n if \"DriveConnectionInterface =\" in x\n or \"DeviceIsPartition =\" in x\n or \"DeviceFile = \" in x),)*3)\n try:\n return next(dev[2] for dev in devs if dev[0] == 'usb'\n and dev[1] == 'true')\n except StopIteration:\n return None" ]
[ "0.56770015", "0.54674935", "0.5386558", "0.5367639", "0.5356104", "0.5310794", "0.5292249", "0.528761", "0.5282866", "0.52739716", "0.5228391", "0.5215351", "0.51800424", "0.514778", "0.51102084", "0.5108489", "0.50883085", "0.50771827", "0.5076043", "0.5057647", "0.50539875", "0.50247294", "0.5022293", "0.5013659", "0.5011706", "0.50081813", "0.50074774", "0.5005181", "0.5000432", "0.49798203" ]
0.76875865
0
Parse a signed transaction document, check its validity, verify signature and add to local blockchain. Broadcast to the same endpoint for network if required.
def submit_transaction(): data = request.get_json() # Create candidate transaction object try: tx = Transaction.from_dict(data['transaction']) except (KeyError, TypeError): response = dict(message='Improper transaction json provided.') status_code = 400 return jsonify(response), status_code statuses = [] # Broadcast if needed and turn off broadcasting for other nodes if request.args.get('broadcast', type=int, default=0): for node_ in node.network: if not node_['id'] == node.node_id: response = requests.post( node_['ip'] + '/transactions/submit?broadcast=0', json=dict( transaction=data['transaction'], signature=data['signature'] ) ) statuses.append(response.status_code) if not response.status_code == 200: response = dict(message='Transaction rejected by the network.') return jsonify(response), 202 # Validate transaction as-is val_result = validate_transaction_document(tx) if isinstance(val_result, str): response = dict(message=val_result) status_code = 400 return jsonify(response), status_code # Verify signature # defined in backend/utils sign_result = verify_signature(tx, data['signature']) if isinstance(sign_result, str): response = dict(message=sign_result) status_code = 400 return jsonify(response), status_code # Add transaction to local blockchain node.blkchain.add_transaction(tx) myurl = node.network[node.node_id]['ip'] url = myurl + '/blockchain/mine_block' mine_resp = requests.get(url=url) if mine_resp.status_code == 200: block_dict = mine_resp.json() add_resp = requests.post(url=myurl + '/blockchain/add_block?\ broadcast=1', json=block_dict) # run consensus requests.get(url=myurl+'/blockchain/consensus') response = dict(message='Transaction added.') return jsonify(response), 200
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def verify_transaction_signature(sender_address, signature, transaction):\n public_key = RSA.importKey(binascii.unhexlify(sender_address))\n verifier = PKCS1_v1_5.new(public_key)\n h = SHA.new(str(transaction).encode('utf8'))\n return verifier.verify(h, binascii.unhexlify(signature))", "def sign_transaction(self, transaction):\n try:\n address = transaction.from_address\n private_key = self.addresses[address]['private_key']\n transaction.sign_transaction(private_key)\n except Exception as ex:\n print(\"Error signing transaction from address: \" + address + \" \" + str(ex))", "def verify_signature(self, sender_address: str, signature, transaction: dict) -> bool:\n try:\n public_key = serialization.load_pem_public_key(\n binascii.unhexlify(sender_address.encode('utf8')),\n backend=default_backend()\n )\n public_key.verify(\n signature,\n str(transaction).encode('utf8'),\n padding.PSS(\n mgf=padding.MGF1(hashes.SHA256()),\n salt_length=padding.PSS.MAX_LENGTH\n ),\n hashes.SHA256()\n )\n except:\n return False\n return True", "def handleTransaction(self, trans_str):\n # pharse the trans_str \n trans = Transaction()\n trans.parseJson(trans_str)\n new_block = self.nextBlock(trans)\n \n # Do not generate Block\n if not new_block:\n return True\n \n self.boradBlock(new_block)\n\n return True # Temporae", "def verify_signature(self, payload, signature, timestamp, public_key=None):\n timestamped_payload = timestamp + payload\n decoded_signature = Signature.fromBase64(signature)\n\n key = public_key or self.public_key\n return Ecdsa.verify(timestamped_payload, decoded_signature, key)", "def deserialize(cls, raw_transaction: bytes) -> Transaction:\n return cls.from_solders(SoldersTx.from_bytes(raw_transaction))", "def verify(blocknumber, trx, use_api):\n stm = shared_morphene_instance()\n if mph.rpc is not None:\n mph.rpc.rpcconnect()\n b = Blockchain(morphene_instance=stm)\n i = 0\n if not blocknumber:\n blocknumber = b.get_current_block_num()\n try:\n int(blocknumber)\n block = Block(blocknumber, morphene_instance=stm)\n if trx is not None:\n i = int(trx)\n trxs = [block.json_transactions[int(trx)]]\n else:\n trxs = block.json_transactions\n except Exception:\n trxs = [b.get_transaction(blocknumber)]\n blocknumber = trxs[0][\"block_num\"]\n wallet = Wallet(morphene_instance=stm)\n t = PrettyTable([\"trx\", \"Signer key\", \"Account\"])\n t.align = \"l\"\n if not use_api:\n from morphenepythonbase.signedtransactions import Signed_Transaction\n for trx in trxs:\n if not use_api:\n # trx is now identical to the output of get_transaction\n # This is just for testing porpuse\n if True:\n signed_tx = Signed_Transaction(trx.copy())\n else:\n tx = b.get_transaction(trx[\"transaction_id\"])\n signed_tx = Signed_Transaction(tx)\n public_keys = []\n for key in signed_tx.verify(chain=mph.chain_params, recover_parameter=True):\n public_keys.append(format(Base58(key, prefix=mph.prefix), mph.prefix))\n else:\n tx = TransactionBuilder(tx=trx, morphene_instance=stm)\n public_keys = tx.get_potential_signatures()\n accounts = []\n empty_public_keys = []\n for key in public_keys:\n account = wallet.getAccountFromPublicKey(key)\n if account is None:\n empty_public_keys.append(key)\n else:\n accounts.append(account)\n new_public_keys = []\n for key in public_keys:\n if key not in empty_public_keys or use_api:\n new_public_keys.append(key)\n if isinstance(new_public_keys, list) and len(new_public_keys) == 1:\n new_public_keys = new_public_keys[0]\n else:\n new_public_keys = json.dumps(new_public_keys, indent=4)\n if isinstance(accounts, list) and len(accounts) == 1:\n accounts = accounts[0]\n else:\n accounts = json.dumps(accounts, indent=4)\n t.add_row([\"%d\" % i, new_public_keys, accounts])\n i += 1\n print(t)", "def sign_raw_transaction(hexstring):\n try:\n stdout = subprocess.check_output([\"litecoin-cli\", \"signrawtransaction\", hexstring])\n signed_tx = json.loads(stdout.decode())\n except:\n sys.exit(1)\n\n return signed_tx", "def submit_and_store_transaction(self, signed_transaction_data):\n return self._call_account_method(\n 'submitAndStoreTransaction', {\n 'signedTransactionData': signed_transaction_data\n }\n )", "def run(send_to_bank=False):\n\n # Signed request\n sk = read_signing_key_file(os.path.join(SIGNING_KEY_DIR, 'cv_nid'))\n signed_request = generate_signed_request(\n data={\n 'end': '2020-07-09T22:10:25Z',\n 'start': '2020-08-09T22:10:25Z'\n },\n nid_signing_key=sk\n )\n\n if send_to_bank:\n send_request_to_bank(signed_request)\n\n write_json(\n os.path.join(SIGNED_REQUESTS_DIR, 'signed-validator-confirmation-services-request.json'),\n signed_request\n )", "def sign_transaction():\n data = request.get_json()\n\n try:\n tx = Transaction.from_dict(data)\n except TypeError:\n response = dict(message='Improper transaction json provided.')\n status_code = 400\n return jsonify(response), status_code\n\n signature = tx.sign(node.wallet.private_key_rsa)\n response = dict(signature=signature)\n return jsonify(response), 200", "def validate_transaction(self, tx, throw_exception=False):\n\n # 1. Validate signature\n isValid = signature.verify(tx.from_pk, tx.to_string_for_hashing(), tx.signature)\n if not isValid:\n error_msg = \"Signature not valid!\"\n if throw_exception:\n print(error_msg)\n raise Exception(error_msg)\n else:\n print(error_msg)\n return False\n\n # 2. Validate sender balance\n balance = get_balance(tx.from_pk, self.blocks)\n if tx.amount > balance:\n error_msg = \"Insufficient funds for this transaction!\"\n if throw_exception:\n print(error_msg)\n raise Exception(error_msg)\n else:\n print(error_msg)\n return False\n return True", "def verify(self, signature, body, external_aad, public_key):", "async def process_deposit(cls, transaction, server, locks):\n logger.debug(f\"processing transaction {transaction.id}\")\n if await cls.requires_trustline(transaction, server, locks):\n logger.debug(f\"transaction {transaction.id} requires trustline\")\n return\n\n logger.debug(f\"transaction {transaction.id} is not pending trust\")\n try:\n requires_multisig = await cls.requires_multisig(transaction)\n except NotFoundError:\n await sync_to_async(cls.handle_error)(\n transaction,\n f\"{transaction.asset.code} distribution account \"\n f\"{transaction.asset.distribution_account} does not exist\",\n )\n await maybe_make_callback_async(transaction)\n return\n except ConnectionError:\n await sync_to_async(cls.handle_error)(\n transaction,\n f\"Unable to connect to horizon to fetch {transaction.asset.code} \"\n \"distribution account signers\",\n )\n await maybe_make_callback_async(transaction)\n return\n if requires_multisig:\n await cls.save_as_pending_signatures(transaction, server)\n return\n logger.debug(f\"transaction {transaction.id} does not require multisig\")\n\n await cls.handle_submit(transaction, server, locks)", "def fundrawtransaction(self, given_transaction, *args, **kwargs):\n # just use any txid here\n vintxid = lx(\"99264749804159db1e342a0c8aa3279f6ef4031872051a1e52fb302e51061bef\")\n\n if isinstance(given_transaction, str):\n given_bytes = x(given_transaction)\n elif isinstance(given_transaction, CMutableTransaction):\n given_bytes = given_transaction.serialize()\n else:\n raise FakeBitcoinProxyException(\"Wrong type passed to fundrawtransaction.\")\n\n # this is also a clever way to not cause a side-effect in this function\n transaction = CMutableTransaction.deserialize(given_bytes)\n\n for vout_counter in range(0, self._num_fundrawtransaction_inputs):\n txin = CMutableTxIn(COutPoint(vintxid, vout_counter))\n transaction.vin.append(txin)\n\n # also allocate a single output (for change)\n txout = make_txout()\n transaction.vout.append(txout)\n\n transaction_hex = b2x(transaction.serialize())\n\n return {\"hex\": transaction_hex, \"fee\": 5000000}", "def add_transaction(self, recipient, sender, signature, amount=1.0, is_reciving=False):\n\n # if self.public_key == None:\n # return False\n transaction = Transaction(sender, recipient, signature, amount)\n if Verification.verify_transaction(transaction, self.get_balence):\n self.open_transactions.append(transaction)\n self.save_data()\n if not is_reciving:\n for node in self.peer_nodes:\n url = 'http://{}/broadcast-transaction'.format(node)\n try:\n response = requests.post(url, json={\n \"sender\": sender, \"recipient\": recipient, \"amount\": amount, \"signature\": signature})\n if response.status_code == 400 or response.status_code == 500:\n print(\"transaction declined, need resolving\")\n return False\n if response.status_code == 409:\n self.resolve_conflits = True\n except requests.exceptions.ConnectionError:\n continue\n return True\n return False", "def add_transaction(self, transaction, signature, client_public_key):\r\n # Check If transaction is already in the transaciton_pool\r\n if transaction not in self.transaction_pool:\r\n # Verify With All Other Nodes\r\n if self.verify_transaction(transaction, signature, client_public_key):\r\n # Encrypt the transaction\r\n client_public_key = load_pem_public_key(client_public_key, default_backend())\r\n encrypted_transaction = client_public_key.encrypt(\r\n json.dumps(transaction).encode(),\r\n padding.OAEP(\r\n mgf = padding.MGF1(algorithm=hashes.SHA256()),\r\n algorithm = hashes.SHA256(),\r\n label = None\r\n )\r\n )\r\n\r\n self.transaction_pool.append(str(encrypted_transaction))\r\n\r\n else: return False, self.transaction_pool # Return False if Verification fails\r\n\r\n # Return True if transaction was already in transaction_pool or if verification was successful and new transaction was added\r\n return True, self.transaction_pool", "async def verify_signature(self, message: BasePendingMessage) -> bool:\n\n if message.signature is None:\n LOGGER.warning(\"'%s': missing signature.\", message.item_hash)\n return False\n\n try:\n signature = json.loads(message.signature)\n sigdata = base58.b58decode(signature[\"signature\"])\n public_key = base58.b58decode(signature[\"publicKey\"])\n except ValueError:\n LOGGER.warning(\"Solana signature deserialization error\")\n return False\n\n if signature.get(\"version\", 1) != 1:\n LOGGER.warning(\n \"Unsupported signature version %s\" % signature.get(\"version\")\n )\n return False\n\n if message.sender != signature[\"publicKey\"]:\n LOGGER.warning(\"Solana signature source error\")\n return False\n\n try:\n verify_key = VerifyKey(public_key)\n verification_buffer = get_verification_buffer(message)\n verif = verify_key.verify(verification_buffer, signature=sigdata)\n result = verif == verification_buffer\n except BadSignatureError:\n result = False\n except Exception:\n LOGGER.exception(\"Solana Signature verification error\")\n result = False\n\n return result", "def isValid(self, public_key):\n if self.sentFrom is None:\n return True\n\n if not self.signature or not len(str(self.signature)):\n raise Exception(\"Transaction Not Signed\")\n\n valid = public_key.verify(self.signature, self.txHash.encode())\n return valid", "def Verify(self, signed_bytes, signature_b64):\r\n # Generate the PKCS1-v1_5 compatible message, which includes\r\n # magic ASN.1 bytes and padding:\r\n emsa_msg = self._MakeEmsaMessageSha256(signed_bytes,\r\n self.keypair.size())\r\n\r\n # Get putative signature:\r\n putative_signature = base64.urlsafe_b64decode(signature_b64.encode('utf-8'))\r\n putative_signature = number.bytes_to_long(putative_signature)\r\n\r\n # Verify signature given public key:\r\n return self.keypair.verify(emsa_msg, (putative_signature,))", "def check(self):\n if self.is_signed():\n data = self._document.read()\n hash_value = data[-self._append_size+1:-1]\n data = data[:-self._append_size]\n\n encrypted = self._encryptor.encrypt_cbc(data, self._init_vector)\n current_hash_value = encrypted[-16:]\n\n if current_hash_value != hash_value:\n print(\"Hash values did not matched!\")\n else:\n print(\"Hash values matched!\")\n else:\n print(\"The document is not signed!\")", "def sign (self, node, tx):\n\n signed = node.signrawtransactionwithwallet (tx[\"hex\"])\n\n res = node.decoderawtransaction (signed[\"hex\"])\n res.update (signed)\n\n return res", "def test_modify_transaction_after_signing(mocker):\n transaction_original = Transaction(\n chain=0,\n nonce=4_294_967_295,\n fee=57000,\n value=5_000_000,\n to_address=\"1H7NtUENrEbwSVm52fHePzBnu4W3bCqimP\",\n )\n\n transaction = transaction_original.sign(PRIVATE_KEY_1)\n transaction.value = 10_000_000\n\n assert transaction.validate() == False\n with pytest.raises(\n TransactionNotValid, match=errors.TRANSACTION_INVALID_SIGNATURE\n ):\n transaction.validate(raise_exception=True)", "def add_transaction(self, recipient, sender, amount, signature):\n\n if self.hosting_node == None:\n return None\n transaction = Transaction(sender, recipient, amount, signature)\n if Verification.verify_transaction(transaction, self.get_balance):\n self.__open_transactions.append(transaction)\n self.save_data()\n return True\n return False", "def verify(self):\n if not self.public_key:\n self.fetch_public_key()\n data = self.doc.find(\".//{http://salmon-protocol.org/ns/magic-env}data\").text\n sig = self.doc.find(\".//{http://salmon-protocol.org/ns/magic-env}sig\").text\n sig_contents = '.'.join([\n data,\n b64encode(b\"application/xml\").decode(\"ascii\"),\n b64encode(b\"base64url\").decode(\"ascii\"),\n b64encode(b\"RSA-SHA256\").decode(\"ascii\")\n ])\n sig_hash = SHA256.new(sig_contents.encode(\"ascii\"))\n cipher = PKCS1_v1_5.new(RSA.importKey(self.public_key))\n if not cipher.verify(sig_hash, urlsafe_b64decode(sig)):\n raise SignatureVerificationError(\"Signature cannot be verified using the given public key\")", "def who_signed_tx(tx, tx_in_idx, netcode='BTC'):\n tx_in = tx.txs_in[tx_in_idx]\n parent_tx_out_idx = tx_in.previous_index\n parent_tx_out_script = tx.unspents[tx_in_idx].script\n script_obj = script_obj_from_script(parent_tx_out_script)\n signed_by = []\n\n if type(script_obj) not in (ScriptPayToAddress, ScriptPayToPublicKey, ScriptMultisig):\n raise NoAddressesForScriptTypeError(\n 'unable to determine signing addresses for script type of parent tx {}[{}]'\n .format(b2h_rev(tx_in.previous_hash), parent_tx_out_idx))\n\n script = tx_in.script\n pc = 0\n while pc < len(script):\n opcode, data, pc = get_opcode(script, pc)\n if data is None:\n continue\n try:\n sig_pair, sig_type = parse_signature_blob(data)\n except (ValueError, TypeError, binascii.Error, UnexpectedDER):\n continue\n\n sig_hash = tx.signature_hash(parent_tx_out_script, parent_tx_out_idx, sig_type)\n\n for sec_key in script_obj.sec_keys:\n public_pair = sec_to_public_pair(sec_key)\n\n if secp256k1_generator.verify(public_pair, sig_hash, sig_pair):\n addr_pfx = address_prefix_for_netcode(netcode)\n addr = public_pair_to_bitcoin_address(public_pair, address_prefix=addr_pfx)\n signed_by.append((addr, sig_type))\n return signed_by", "def submit_transaction(self, sender_address, recipient_address, stock, quanitity, signature):\n print(\"self.transactions=\", len(self.transactions))\n\n transaction = OrderedDict({\n 'sender_address': sender_address,\n 'recipient_address': recipient_address,\n 'stock': stock,\n 'quantity': quanitity\n })\n\n verified = self.verify_signature(sender_address, signature, transaction)\n if verified:\n self.transactions.append(transaction)\n print('Added tranasaction successfully (len={})'.format(len(self.transactions)))\n self.mine()\n return len(self.chain) + 1\n else:\n raise Exception(\"Failed to add transaction to blockchain\")", "def add_transaction(self, recipient, sender, signature, amount=1.0):\n if not self.hosting_node:\n return False\n\n transaction = Transaction(sender, recipient, signature, amount)\n\n if not Verification.verify_transaction(transaction, self.get_balance):\n return False\n\n self.__open_transactions.append(transaction)\n self.save_data()\n return True", "def transaction(self, transaction):\n # Allow for a list of blocks..\n transaction = utils.request_type(transaction)\n\n res = r.get(self.url + self.tx_info + str(transaction))\n return self.execute(res)", "def verify_signature(self):\n if self.get_contact_key:\n sender_key = self.get_contact_key(self.sender_handle)\n else:\n sender_key = fetch_public_key(self.sender_handle)\n if not sender_key:\n raise NoSenderKeyFoundError(\"Could not find a sender contact to retrieve key\")\n MagicEnvelope(doc=self.doc, public_key=sender_key, verify=True)" ]
[ "0.5823398", "0.5507605", "0.5499924", "0.54724085", "0.543781", "0.5409771", "0.5338388", "0.5267575", "0.5263092", "0.5254972", "0.52403116", "0.52362233", "0.52072877", "0.5187738", "0.5178956", "0.51556087", "0.512451", "0.5109286", "0.51000565", "0.50987065", "0.50754654", "0.50459856", "0.5036543", "0.5028854", "0.49785432", "0.49505028", "0.49267557", "0.49242368", "0.49213523", "0.48961687" ]
0.5765218
1
Return pyarrow.Array view of a numpy ndarray. In floating arrays, all nan values are interpreted as nulls. In complex arrays, if real or imaginary part of an array item value is nan, the value is interpreted as null.
def pyarrow_array(arr, nan_to_null=False): import numpy as np import pyarrow as pa if nan_to_null and issubclass(arr.dtype.type, (np.floating, np.complexfloating)): isnan = np.isnan(arr) if isnan.any(): pa_nul = pa.py_buffer(get_bitmap(isnan)) return pa.Array.from_buffers(pa.from_numpy_dtype(arr.dtype), arr.size, [pa_nul, pa.py_buffer(arr)]) return pa.Array.from_buffers(pa.from_numpy_dtype(arr.dtype), arr.size, [None, pa.py_buffer(arr)])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def to_array(self, fill_value: Optional[Any] = None) -> np.ndarray:\n if fill_value is None:\n fill_value = infer_nan(self.dtype)\n\n tmp = self.astype(float) if is_float(fill_value) else self\n return tmp.to_masked().filled(fill_value=fill_value)", "def astype(self, dtype):\n return NoneArray", "def _asarray(v):\n try:\n return np.asarray(v)\n except ValueError:\n return np.asarray(v, dtype=object)", "def xnd_xnd(arr, nan_to_null=False):\n import numpy as np\n import xnd\n xd = xnd.xnd.from_buffer(arr)\n if nan_to_null and issubclass(arr.dtype.type,\n (np.floating, np.complexfloating)):\n isnan = np.isnan(arr)\n if isnan.any():\n raise NotImplementedError('xnd view of numpy ndarray with nans')\n return xd", "def get_fixed_array():\n return np.array([[[np.nan, np.nan],\n [np.nan, np.nan],\n [np.nan, np.nan]],\n\n [[3., 7.],\n [2., 4.],\n [1., 16.]],\n\n [[17., 5.],\n [10., 0.],\n [14., 12.]],\n\n [[8., 6.],\n [13., 11.],\n [9., 15.]]])", "def asanyarray(a, dtype=None, order='C'):\n\n if not use_origin_backend(a):\n # if it is already dpnp.ndarray then same object should be returned\n if isinstance(a, dpnp.ndarray):\n return a\n\n if order != 'C':\n checker_throw_value_error(\"asanyarray\", \"order\", order, 'C')\n\n return array(a, dtype=dtype, order=order)\n\n return call_origin(numpy.asanyarray, a, dtype, order)", "def test_dtype_None(self):\n array = np.array([[0, 1, 2], [2, 1, 0]]).T\n self.assertTrue(to_ndarray(array, None, safe=True).flags.contiguous,\n msg='to_ndarray: Non contiguous arrays are not being consolidated when dtype is None')", "def from_array(\n cls, arr: np.ndarray, masked_value: Optional[Any] = None\n ) -> JaggedArray:\n if masked_value is None:\n masked_value = infer_nan(arr.dtype)\n\n if masked_value == np.nan:\n mask = np.isnan(arr)\n else:\n mask = np.equal(arr, masked_value)\n return cls._from_arr_and_mask(arr[~mask], mask)", "def scalararray(inp) -> np.ndarray:\n return np.array([None, inp], dtype=object)[[1]].reshape([])", "def nonans(array):\n return array[~np.isnan(array)]", "def __array__(self, dtype=None) -> np.ndarray:\n return self.values", "def matrix_to_array(x, nodata=None):\n\n s = np.shape(x)\n if nodata is None: # Nan\n ix = np.where(np.isfinite(x))\n else:\n ix = np.where(x != nodata)\n y = x[ix].copy()\n return y, ix, s", "def nans(shape, dtype=float):\n a = np.empty(shape, dtype)\n a.fill(np.nan)\n return a", "def tonumpy(self):\n import numpy\n from numpy import ma\n\n # initialize the return\n narray = None\n\n if None in self._data:\n\n # define a lambda function\n # to create the mask array\n make_mask = lambda x: x == None\n\n # create the numpy array,\n # making on the fly the mask\n narray = numpy.ma.array(self._data, mask=list(map(make_mask, self._data)))\n\n else:\n # convert the list to a numpy object\n narray = numpy.array(self._data)\n\n # return the numpy object\n return narray", "def _to_numpy_ndarray(cls, data):\n if isinstance(data, np.ndarray):\n return data\n arr = np.array(data, dtype=np.float)\n if len(arr.shape) == 1:\n arr = np.reshape(arr, newshape=(1, arr.shape[0]))\n return arr", "def __array__(self):\n return np.zeros(self.shape, self.dtype)", "def asarray(val, dtype=np.float64):\n # val is a list, tuple etc\n if not np.isscalar(val) and np.ndim(val) > 0:\n np_val = np.asarray(val, dtype=dtype)\n else:\n # val is a scalar number\n np_val = np.asarray([val], dtype=dtype)\n\n return np_val", "def __array_wrap__(self, out_arr, context=None): #pylint: disable=no-self-use, unused-argument\n if out_arr.shape != (3,):\n out_arr = out_arr.view(np.ndarray)\n return out_arr", "def pandas_series(arr, nan_to_null=False):\n import pandas as pd\n return pd.Series(arr, copy=False)", "def check_array(arr: Arrayable) -> np.ndarray:\n if isinstance(arr, np.ndarray):\n return arr\n return np.array(arr)", "def isna(self):\n # type: () -> np.ndarray\n return extract_isnull_bytemap(self.data)", "def isna(self):\n # type: () -> np.ndarray\n return extract_isnull_bytemap(self.data)", "def __new__(\n cls,\n input_array: Union[np.ndarray, Sequence],\n units: Union[Unit, str, None] = None,\n ) -> Any:\n\n arr = np.array(input_array, copy=True).view(cls)\n\n if isinstance(input_array, ValueArray) and units is None:\n arr.units = input_array.units\n else:\n arr.units = _units_init(cls, units)\n\n return arr", "def nanvarc(array_data, axis=0):\n\n mdatreal = np.ma.masked_array(array_data.real, np.isnan(array_data.real));\n varreal = np.var(mdatreal, axis=axis);\n mdatimag = np.ma.masked_array(array_data.imag, np.isnan(array_data.imag));\n varimag = np.var(mdatimag, axis=axis);\n retval = np.array( (varreal + 1j*varimag) );\n \n return retval;", "def _asfarray(x):\n if hasattr(x, \"dtype\") and x.dtype.char in numpy.typecodes[\"AllFloat\"]:\n # 'dtype' attribute does not ensure that the\n # object is an ndarray (e.g. Series class\n # from the pandas library)\n if x.dtype == numpy.half:\n # no half-precision routines, so convert to single precision\n return numpy.asarray(x, dtype=numpy.float32)\n return numpy.asarray(x, dtype=x.dtype)\n else:\n # We cannot use asfarray directly because it converts sequences of\n # complex to sequence of real\n ret = numpy.asarray(x)\n if ret.dtype == numpy.half:\n return numpy.asarray(ret, dtype=numpy.float32)\n elif ret.dtype.char not in numpy.typecodes[\"AllFloat\"]:\n return numpy.asfarray(x)\n return ret", "def _to_arraylike(data):\n _load_objects()\n if data is None:\n raise ValueError('Cannot convert None data.')\n return None\n if not isinstance(data, (ndarray, DataArray, DataFrame, Series, Index)):\n data = np.asarray(data)\n if not np.iterable(data):\n data = np.atleast_1d(data)\n return data", "def _asarray1d(arr, copy=False):\n if copy:\n return asarray(arr).flatten()\n else:\n return asarray(arr).ravel()", "def strip_array_wrappers(arry):\n curr = arry\n if curr.ndim == 0:\n if isinstance(curr[...], np.ndarray):\n return strip_array_wrappers(curr[...])\n return curr\n\n # there is a possibility for infinite looping\n # e.g. [np.ndarray, str, dict] would stay object array\n # impossible if homogeneous (implied by 1-element wrappers)\n while isinstance(curr[0], np.ndarray):\n if curr.shape[0] == 1:\n curr = curr[0]\n else:\n curr = np.array(tuple(curr))\n\n return curr", "def _align_np_datatype_for_array(array):\n return np.asarray(array, _numpy_datatype_from_nd4j_context())", "def _convert_to_np_array(inputs: Union[float, Tuple[float], np.ndarray], dim):\n outputs = None\n if isinstance(inputs, (tuple, np.ndarray)):\n outputs = np.array(inputs)\n else:\n outputs = np.full(dim, inputs)\n\n if len(outputs) != dim:\n raise ValueError(\"The inputs array has a different dimension {}\"\n \" than provided, which is {}.\".format(len(outputs), dim))\n\n return outputs" ]
[ "0.62539476", "0.6237079", "0.62334555", "0.61480075", "0.6110793", "0.61007154", "0.5992015", "0.5985517", "0.59757584", "0.5964086", "0.5952105", "0.5907571", "0.5905403", "0.58591914", "0.58547294", "0.584407", "0.58131367", "0.5769717", "0.5739238", "0.57200414", "0.56952465", "0.56952465", "0.56783426", "0.56588525", "0.56531376", "0.5647338", "0.564571", "0.563483", "0.56344956", "0.5632334" ]
0.83253616
0
Return pandas.Series view of a numpy ndarray.
def pandas_series(arr, nan_to_null=False): import pandas as pd return pd.Series(arr, copy=False)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def as_series(self, arraylike: Iterable) -> pd.Series:\n return pd.Series(arraylike, index=self.data.index)", "def to_series(self) -> pd.Series:\n df = self.to_dataframe(\"* values *\")\n dims = self.dims_list\n if len(dims) == 1:\n dims = dims[0]\n return df.set_index(dims)[\"* values *\"]", "def _get_column_as_pandas_series(self, key):\n result = self.getitem_array([key]).to_pandas().squeeze(axis=1)\n if not isinstance(result, pandas.Series):\n raise RuntimeError(\n f\"Expected getting column {key} to give \"\n + f\"pandas.Series, but instead got {type(result)}\"\n )\n return result", "def _view_(a):\r\n return a.view((a.dtype[0], len(a.dtype.names)))", "def series_view(self, **kwargs): # noqa: PR02\n return SeriesDefault.register(pandas.Series.view)(self, **kwargs)", "def as_series(self) -> \"pd.Series\":\n import pandas as pd\n\n data = {\"_row_id\": self.id, \"_row_num\": self.num, **self.as_dict()}\n series = pd.Series(data)\n return series", "def __array__(self, copy=None):\n return self.data.to_pandas().values", "def at(self, x):\n x = array(x)\n if x.ndim == 1:\n return self._method(x)\n elif x.ndim == 2:\n return Series(\n index=MultiIndex.from_arrays(\n arrays=x.T,\n names=[f'x{num}' for num in range(1, self._num_dims + 1)]\n ), data=self._method(x), name=f'{self._name}({self._parent})'\n )", "def as_row_vector(array):\n if array.ndim != 1:\n raise ValueError(\"Array must be 1D\")\n idx = _new_attribute_label('idx', array)\n ds = array.datashape.copy()\n ds.dim_low = [0] + list(ds.dim_low)\n ds.dim_high = [0] + list(ds.dim_high)\n ds.chunk_size = list(ds.chunk_size) * 2\n ds.chunk_overlap = list(ds.chunk_overlap) * 2\n ds.dim_names = [idx] + list(ds.dim_names)\n return array.redimension(ds.schema)", "def to_real_series(self, data: pd.Series) -> pd.Series:\n ...", "def __array__(self, dtype=None) -> np.ndarray:\n return self.values", "def row_to_array(r):\n a = np.ma.array([i for i in r.as_void()])\n return a", "def asarray(self):\n from numpy import asarray\n return asarray(self)", "def values(self):\n arr = self.view(np.ndarray).copy()\n return arr", "def __array__(self):\n return pa.column(\"dummy\", self.data).to_pandas().values", "def _to_ndarray(data):\n return np.atleast_1d(getattr(data, 'values', data))", "def to_array_or_spmatrix(x):\n if is_SparseDataFrame(x):\n x = x.to_coo()\n elif is_sparse_dataframe(x) or is_sparse_series(x):\n x = x.sparse.to_coo()\n elif isinstance(\n x, (sparse.spmatrix, np.ndarray, numbers.Number)\n ) and not isinstance(x, np.matrix):\n pass\n elif isinstance(x, list):\n x_out = []\n for xi in x:\n try:\n xi = to_array_or_spmatrix(xi)\n except TypeError:\n # recursed too far\n pass\n x_out.append(xi)\n # convert x_out from list to array\n x = np.array(x_out, dtype=_check_numpy_dtype(x_out))\n else:\n x = toarray(x)\n return x", "def __array__(self):\n return np.asarray(self.data)", "def _convert_df_to_series(df):\n if isinstance(df, pd.DataFrame) and df.shape[1] == 1:\n return df.iloc[:, 0]\n elif isinstance(df, pd.DataFrame) and df.shape[1] > 1:\n raise TypeError('DataFrame cannot be converted to a Series as it contains more than 1 column.')\n return df", "def get_array(obj, col=None):\n if isinstance(obj, Series) and (col is None or obj.name == col):\n arr = obj._values\n else:\n assert col is not None\n icol = obj.columns.get_loc(col)\n assert isinstance(icol, int)\n arr = obj._get_column_array(icol)\n if isinstance(arr, BaseMaskedArray):\n return arr._data\n return arr", "def transform_series(obj):\n vals = obj.values\n return transform_array(vals)", "def getSeries(self) -> Series:\n\n return self.__series", "def as_numpy(a):\n if isinstance(a, mx.nd.NDArray):\n a = a.asnumpy()\n return a", "def convert_dataframe_to_array(df_or_series):\n if isinstance(df_or_series, pd.DataFrame) or isinstance(\n df_or_series, pd.Series\n ):\n dat = np.array(df_or_series)\n if len(dat.shape) == 1:\n return dat[:, np.newaxis]\n else:\n return dat\n if isinstance(df_or_series, np.ndarray):\n return df_or_series\n else:\n raise TypeError(\n f\"InputData error:\\n\"\n f\"type should be of np.ndarray and is currently type: {type(df_or_series)}\"\n )", "def as_Ser(self,):\n return pd.Series(self.v, index=self.features)", "def _sin2view(X):\n X = np.asarray([np.sin(x) for x in X])\n return X", "def object_values_series() -> pd.Series:\n series = pd.Series(data=list(string.ascii_uppercase), index=range(101,127))\n return series", "def df_to_array(datasample):\r\n return np.array(datasample)", "def sdc_reindex_series_overload(arr, index, name, by_index):\n\n range_indexes = isinstance(index, RangeIndexType) and isinstance(by_index, RangeIndexType)\n int64_indexes = isinstance(index, Int64IndexType) and isinstance(by_index, Int64IndexType)\n data_dtype, index_dtype = arr.dtype, index.dtype\n data_is_str_arr = isinstance(arr.dtype, types.UnicodeType)\n\n def sdc_reindex_series_impl(arr, index, name, by_index):\n\n # no reindexing is needed if indexes are equal\n if range_indexes == True: # noqa\n equal_indexes = numpy_like.array_equal(index, by_index)\n elif int64_indexes == True: # noqa\n equal_indexes = numpy_like.array_equal(index, by_index)\n else:\n equal_indexes = False\n if (index is by_index or equal_indexes):\n return pandas.Series(data=arr, index=by_index, name=name)\n\n if data_is_str_arr == True: # noqa\n _res_data = [''] * len(by_index)\n res_data_nan_mask = numpy.zeros(len(by_index), dtype=types.bool_)\n else:\n _res_data = numpy.empty(len(by_index), dtype=data_dtype)\n\n # build a dict of self.index values to their positions:\n map_index_to_position = Dict.empty(\n key_type=index_dtype,\n value_type=types.int32\n )\n\n for i, value in enumerate(index):\n if value in map_index_to_position:\n raise ValueError(\"cannot reindex from a duplicate axis\")\n else:\n map_index_to_position[value] = i\n\n index_mismatch = 0\n for i in numba.prange(len(by_index)):\n val = by_index[i]\n if val in map_index_to_position:\n pos_in_self = map_index_to_position[val]\n _res_data[i] = arr[pos_in_self]\n if data_is_str_arr == True: # noqa\n res_data_nan_mask[i] = isna(arr, i)\n else:\n index_mismatch += 1\n if index_mismatch:\n msg = \"Unalignable boolean Series provided as indexer \" + \\\n \"(index of the boolean Series and of the indexed object do not match).\"\n raise IndexingError(msg)\n\n if data_is_str_arr == True: # noqa\n res_data = create_str_arr_from_list(_res_data)\n str_arr_set_na_by_mask(res_data, res_data_nan_mask)\n else:\n res_data = _res_data\n\n return pandas.Series(data=res_data, index=by_index, name=name)\n\n return sdc_reindex_series_impl", "def to_numpy(x):\r\n return x.squeeze().detach().cpu().numpy()" ]
[ "0.75008994", "0.6564477", "0.6380644", "0.63323957", "0.6261605", "0.6144519", "0.61209697", "0.60778224", "0.59853053", "0.5960021", "0.57288134", "0.5726076", "0.56944275", "0.56930923", "0.5679621", "0.56723833", "0.5619051", "0.560027", "0.55883527", "0.5580834", "0.5561013", "0.5541724", "0.55306506", "0.55244935", "0.5506983", "0.5492964", "0.5491586", "0.54877764", "0.54757714", "0.5460889" ]
0.7094817
1
Return xnd.xnd view of a numpy ndarray.
def xnd_xnd(arr, nan_to_null=False): import numpy as np import xnd xd = xnd.xnd.from_buffer(arr) if nan_to_null and issubclass(arr.dtype.type, (np.floating, np.complexfloating)): isnan = np.isnan(arr) if isnan.any(): raise NotImplementedError('xnd view of numpy ndarray with nans') return xd
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_x(self):\n return self.x[:self.nump, :]", "def xarray(shape, yx=[0, 0], dtype=int):\n\n y, x = np.indices(shape, dtype)[-2:]\n y -= yx[0]\n x -= yx[1]\n\n return x", "def xvec(self):\n return np.array([self.x, self.y])", "def get_X():\n metadata = get_dataset_metadata(['shape_X', 'type_X'])\n return get_ndarray(name='X_original',\n arr_shape=metadata['shape_X'],\n arr_type=metadata['type_X'])", "def createX(self):\n X = np.vstack((np.ones(self.N), self.x))\n return X.transpose()", "def xarray(shape, yx=[0, 0], rot=0, dtype=int):\n\n from ..util import rotmat\n\n y, x = np.indices(shape, dtype)[-2:]\n y = y - yx[0]\n x = x - yx[1]\n\n if rot != 0:\n R = rotmat(rot)\n x = x * R[0, 0] + y * R[0, 1]\n\n return x", "def to_numpy(x):\r\n return x.squeeze().detach().cpu().numpy()", "def __call__(self, xvalues: np.ndarray) -> np.ndarray:\r\n return self.model_data_1d_via_xvalues_from(xvalues=xvalues)", "def __call__(self, xvalues: np.ndarray) -> np.ndarray:\r\n return self.model_data_1d_via_xvalues_from(xvalues=xvalues)", "def xndarray_like(c, data=None):\n if data is None:\n data = np.zeros_like(c.data)\n return xndarray(data, c.axes_names, c.axes_domains.copy(),\n c.value_label, c.meta_data)", "def x(self) -> np.ndarray:\n return self.array[:, 1] if self.scalar_vector else self.array[:, 0]", "def to_xarray(self):\n\n xarray_object = xr.Dataset()\n for var, data in self.items(deep=True):\n xarray_object[var] = data.to_xarray()\n\n xarray_object.attrs.update(**self.attrs)\n\n return xarray_object", "def xvec(self):\n return self._xvec", "def state_as_x(cls,\n state: float) -> np.array:\n xs = np.array([state])\n return xs.reshape([1, xs.shape[0]])", "def x(self):\n return self[:, 0]", "def x_nondim(self, x):\n x[0:4] /= self.r_scale\n return x", "def _indarray(np_array):\n return skil_client.INDArray(\n ordering='c',\n shape=list(np_array.shape),\n data=np_array.reshape(-1).tolist()\n )", "def x(self):\n return np.array([f.x for f in self])", "def tensor2np(x):\n return x.cpu().numpy()", "def to_2dnp_array(X):\r\n if isinstance(X, np.ndarray):\r\n if X.ndim == 1:\r\n return X.reshape((-1, 1))\r\n if X.ndim == 2:\r\n return X\r\n if isinstance(X, Number):\r\n X = [X]\r\n X = np.array(X)\r\n X = X.reshape([-1, np.prod(X.shape) // X.shape[0]])\r\n return X", "def to_xarray(self, **kwargs):\n if self._AccessPoint not in self.valid_access_points:\n raise InvalidFetcherAccessPoint(\n \" Initialize an access point (%s) first.\"\n % \",\".join(self.Fetchers.keys())\n )\n return self.load().index.to_xarray(**kwargs)", "def tilda_x(X):\n return np.c_[np.ones(X.shape[0]), X]", "def xcoords(self) -> xr.IndexVariable:\n xcoords = self._obj[self.x_dim]\n if self.x_dim not in self._obj.coords:\n for key in list(self._obj.coords.keys()):\n if key.startswith(self.x_dim):\n xcoords = self._obj.coords[key]\n break\n if xcoords.ndim == 2 and list(xcoords.dims).index(self.x_dim) != 1:\n raise ValueError(\n \"Invalid raster: dimension order wrong. Fix using\"\n f'\".transpose(..., {self.y_dim}, {self.x_dim})\"'\n )\n if xcoords.size < 2 or (xcoords.ndim == 2 and xcoords.shape[1] < 2):\n raise ValueError(f\"Invalid raster: less than 2 cells in x_dim {self.x_dim}\")\n return xcoords", "def np(self, subset=None, dtype=None):\n\n def set_0(n, nn):\n if n is None:\n return 0\n if n < 0:\n nr = nn + n\n else:\n nr = n\n if nr < 0 or nr >= nn:\n raise VoxException(_t(\"Invalid start ({}) for axis dimension ({})\").format(n, nn))\n return nr\n\n def set_d(o, n, nn):\n if n is None:\n return nn - o\n return n\n\n if subset:\n start, dimension = subset\n else:\n start = (0, 0, 0)\n dimension = None\n\n # start\n if start is None:\n x0 = y0 = z0 = 0\n else:\n x0, y0, z0 = start\n x0 = set_0(x0, self.nx)\n y0 = set_0(y0, self.ny)\n z0 = set_0(z0, self.nz)\n\n # dimensions\n if dimension is None:\n nx = self.nx - x0\n ny = self.ny - y0\n nz = self.nz - z0\n else:\n nx, ny, nz = dimension\n nx = set_d(x0, nx, self.nx)\n ny = set_d(y0, ny, self.ny)\n nz = set_d(z0, nz, self.nz)\n\n gxpg = self.gxpg\n if dtype is None:\n dtype = self._dtype\n if self.is_vectorvox:\n shape = (nz, ny, nx, 3)\n dim = 3\n else:\n shape = (nz, ny, nx)\n dim = 1\n npv = np.empty(shape, dtype=dtype)\n vv = gxvv.GXvv(dtype=dtype, dim=dim)\n vv.length = nx\n\n if self.is_depth:\n z0 = self.nz - (z0 + nz)\n i = 1\n for iz in range(z0, z0 + nz):\n for iy in range(y0, y0 + ny):\n gxpg.read_row_3d(iz, iy, x0, nx, vv.gxvv)\n npv[nz - i, iy - y0, :] = vv.np\n i += 1\n\n else:\n for iz in range(z0, z0 + nz):\n for iy in range(y0, y0 + ny):\n gxpg.read_row_3d(iz, iy, x0, nx, vv.gxvv)\n npv[iz - z0, iy - y0, :] = vv.np\n\n return npv", "def as_row_vector(array):\n if array.ndim != 1:\n raise ValueError(\"Array must be 1D\")\n idx = _new_attribute_label('idx', array)\n ds = array.datashape.copy()\n ds.dim_low = [0] + list(ds.dim_low)\n ds.dim_high = [0] + list(ds.dim_high)\n ds.chunk_size = list(ds.chunk_size) * 2\n ds.chunk_overlap = list(ds.chunk_overlap) * 2\n ds.dim_names = [idx] + list(ds.dim_names)\n return array.redimension(ds.schema)", "def data(self):\n return getXarray(self.__mdsnode__,strict=self.__strict__)", "def get_xrange(self) -> np.array:\n # todo: ensure this functions work as well for y_values\n lower, upper = self.get_xrange_indices()\n return self.x[lower, upper + 1]", "def dask_data_to_xarray(self, df, var=None):\n\n lazy_values = [dask.delayed(df[dim].unique()) for dim in self.DIMS]\n dims_values = [future for future in dask.compute(*lazy_values)]\n shape = tuple([len(x) for x in dims_values])\n\n var_array = df[var].values\n var_array.compute_chunk_sizes()\n var_array_reshape = var_array.reshape(shape)\n tuple_data = (self.DIMS, var_array_reshape)\n\n coords_dict = dict(zip(self.DIMS, dims_values))\n #values_dicts = dict(zip(extract_vars, values_arrays))\n\n xarr = xr.DataArray(var_array_reshape, \n coords=dims_values,\n dims=self.DIMS)\n\n return xarr.sortby(['lat', 'lon'])", "def _view_(a):\r\n return a.view((a.dtype[0], len(a.dtype.names)))", "def dfdb(x: np.array) -> np.array:\n return x" ]
[ "0.64285505", "0.6316943", "0.60496306", "0.60137516", "0.5971376", "0.5949019", "0.59254515", "0.5917849", "0.5917849", "0.58760124", "0.5871833", "0.5835483", "0.5809457", "0.57799095", "0.5735253", "0.5714293", "0.5669488", "0.562255", "0.5617021", "0.5584969", "0.5580597", "0.55753416", "0.5571868", "0.549703", "0.5474697", "0.54672176", "0.54486215", "0.5441745", "0.54186445", "0.5408126" ]
0.7452788
0
Creates a new gene randomly inheriting attributes from its parents.
def crossover(self, gene2): assert self.key == gene2.key new_gene = self.__class__(self.key) for a in self._gene_attributes: if random() > 0.5: setattr(new_gene, a.name, getattr(self, a.name)) else: setattr(new_gene, a.name, getattr(gene2, a.name)) return new_genes
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def procreate(cls, *geneSeeds):\n assert len(geneSeeds) > 1, \"Specify at least 2 seeds\"\n # None gene leads to averaging; it occurs less often with many seeds to keep diversity\n genePool = geneSeeds + (None,)\n child = cls()\n for name, prop in properties(MushroomProps).items():\n gene = random.choice(genePool)\n if gene is None:\n if prop[\"type\"] is bpy.props.FloatVectorProperty:\n val = [0]*prop['size']\n for i in range(prop['size']):\n val[i] = sum(optionalKey(p, name, prop['default'])[i] for p in geneSeeds)/len(genePool)\n child.__setattr__(name, val)\n else: # TODO other prop types will need special treatment too\n child.__setattr__(name, sum(optionalKey(p, name, prop['default']) for p in geneSeeds)/len(genePool))\n else:\n child.__setattr__(name, optionalKey(gene, name, prop['default']))\n return child", "def create_individual(self):\n self.genes = np.random.rand(self.chromosome_size)", "def _init_inherit_physical_attributes(self):\n config = self.person.cosmos.config\n mother, father = self.person.biological_mother, self.person.biological_father\n parents = (mother.body, father.body)\n # Handedness\n if random.random() < config.heritability_of_handedness:\n takes_after = random.choice(parents)\n self.left_handed = Feature(value=takes_after.left_handed, inherited_from=takes_after)\n self.right_handed = Feature(value=takes_after.right_handed, inherited_from=takes_after)\n # Hustle\n if random.random() < config.heritability_of_hustle:\n takes_after = random.choice(parents)\n inherited_hustle = takes_after.hustle\n mutated_hustle = normal(inherited_hustle, config.hustle_mutation_sd)\n self.hustle = Feature(value=mutated_hustle, inherited_from=takes_after)\n else:\n pass # TODO SET UP GENERATING FROM NOTHING", "def create_individual(self):\n self.genes = np.random.rand(self.chromosome_size)\n self.personal_best = self.genes.copy", "def newGeneration(self):\n for i in range(0, len(self.population)):\n [ind1, ind2] = self.randomSelection()\n child = self.crossover(ind1, ind2)\n self.population[i].setGene(child)\n self.mutation(self.population[i])", "def createGene(self):\n # Beginning and end of the alphabet for random gene generation\n Astart = 97\n Zend = 122\n return \"\".join(map(lambda i: chr(random.randint(Astart, Zend)), range(random.randint(4, 8)))).upper()", "def random_gene(self):\n size = random.randint(1,50)\n gene = \"\"\n for i in range(0,size,1):\n gene+=random.choice(self.instructions)\n return gene", "def createRandom(protein):\n\n protein.occupied = []\n protein.aminoList = []\n\n for id in range(protein.proteinLength):\n protein.aminoList.append(Amino(id, protein.proteinString[id]))\n\n # Place the first and second amino acid\n if id in {0, 1}:\n thisCoordinate = [0, id]\n if protein.plane == \"3D\":\n thisCoordinate.append(0)\n protein.aminoList[id].coordinate = thisCoordinate\n protein.occupied.append(thisCoordinate)\n else:\n prevCo = protein.aminoList[(id - 1)].coordinate\n posCo = protein.getSurroundCo(prevCo, occupied=False)\n\n # If there are no surrounding coordinates available stop the folding\n if not posCo:\n protein.stability = 0\n return False\n\n coordinate = random.choice(posCo)\n protein.aminoList[id].coordinate = coordinate\n protein.occupied.append(coordinate)\n\n protein.stabilityUpdate(protein.aminoList[id])\n\n return True", "def generate_new_node(self, parent, rand_node):\n dist = np.linalg.norm(parent.state - rand_node.state)\n if dist < self.Delta: # In case rand_node is very close to parent\n new_state = rand_node.state\n else:\n new_state = parent.state + (rand_node.state - parent.state) / dist * self.Delta\n new_node = Node(new_state)\n return new_node", "def create_next_gen(self, parents_sreprs_couple):\n child0, child1 = self.recombine(parents_sreprs_couple[0], parents_sreprs_couple[1])\n if random.random() < self.mutate_prob:\n child0 = self.mutate(child0)\n if random.random() < self.mutate_prob:\n child1 = self.mutate(child1)\n\n return child0, child1", "def mutate_random(self, n=1):\n mutated_dna = self._dna\n for i in range(n):\n mutated_dna = mutate(mutated_dna)\n return Gene(mutated_dna, self._exon_regions)", "def random_selection(self, fitness, num_parents):\n\n if self.gene_type_single == True:\n parents = numpy.empty((num_parents, self.population.shape[1]), dtype=self.gene_type[0])\n else:\n parents = numpy.empty((num_parents, self.population.shape[1]), dtype=object)\n\n rand_indices = numpy.random.randint(low=0.0, high=fitness.shape[0], size=num_parents)\n\n for parent_num in range(num_parents):\n parents[parent_num, :] = self.population[rand_indices[parent_num], :].copy()\n\n return parents, rand_indices", "def _create_random_ca(cls):\n random_ca = entity.CustomAttribute()\n random_ca.ca_type = random.choice(AttributesTypes.ALL_TYPES)\n random_ca.title = cls._generate_title(random_ca.ca_type)\n random_ca.definition_type = random.choice(objects.all_objects)\n return random_ca", "def create_initial_graph(self):\n # Initialise weights\n for link in self.gene_links:\n link.weight = random.uniform(weight_init_min, weight_init_max)\n # Initialise biases\n for node in self.gene_nodes:\n node.bias = random.uniform(bias_init_min, bias_init_max)\n if node.can_modify:\n node.act_func = self.act_set.get_random_activation_func()\n if node.act_func in [activations.gaussian, activations.sin]:\n if node.act_func.__name__[0] == \"g\":\n node.freq += random.uniform(-guass_freq_adjust, guass_freq_adjust)\n elif node.act_func.__name__[0] == \"s\":\n node.freq += random.uniform(-sin_freq_adjust, sin_freq_adjust)\n node.amp += random.uniform(-func_amp_adjust, func_amp_adjust)\n node.vshift += random.uniform(-func_vshift_adjust, func_vshift_adjust)", "def _endx(self, parents):\n ALPHA = (1.-2*0.35**2)**0.5/2.\n BETA = 0.35/(self.n_gene-1)**0.5\n\n child = np.empty(self.n_gene+1)\n\n t1 = (parents[1, :self.n_gene]-parents[0, :self.n_gene]) / 2.\n t2 = np.random.normal(scale=ALPHA) * (\n parents[1, :self.n_gene] - parents[0, :self.n_gene]\n )\n t3 = np.sum(\n np.random.normal(scale=BETA, size=self.n_gene)[:, np.newaxis]\n * (\n parents[2:, :self.n_gene] - (\n np.sum(parents[2:, :self.n_gene], axis=0) / self.n_gene\n )\n ), axis=0\n )\n child[:self.n_gene] = t1 + t2 + t3\n\n return child", "def copy(self):\n new_genome = Genome(self.pop)\n #new_genome.node_genes = [gene.copy() for gene in self.node_genes]\n new_genome.node_genes = [n for n in self.node_genes]\n new_genome.link_genes = [gene.copy() for gene in self.link_genes]\n new_genome.fitness = self.fitness\n new_genome.adj_fitness = self.adj_fitness\n new_genome.species_hint = self.species_hint\n return new_genome", "def init_gene():\n gene_details=dict(\n id = '', \n anno_id = [],\n confgenes_id = [],\n name = '',\n source = '',\n gene_info = {},\n alias = '',\n name2 = [],\n strand = '',\n chr = '',\n chr_num = [],\n paralogs = [],\n start = '',\n stop = '',\n transcripts = [],\n transcript_info = [],\n transcript_status = [],\n transcript_valid = [],\n exons = [],\n exons_confirmed = [],\n cds_exons = [],\n utr5_exons = [],\n utr3_exons = [],\n tis = [],\n tis_conf = [],\n tis_info = [],\n cdsStop = [],\n cdsStop_conf = [],\n cdsStop_info = [],\n tss = [],\n tss_info = [],\n tss_conf = [],\n cleave = [],\n cleave_info = [],\n cleave_conf = [],\n polya = [],\n polya_info = [],\n polya_conf = [],\n is_alt = [],\n is_alt_spliced = 0,\n is_valid = [],\n transcript_complete = [],\n is_complete = [],\n is_correctly_gff3_referenced = '',\n splicegraph = []\n )\n return gene_details", "def procreate(self, parents: List[Chromosome]) -> List[Chromosome]:\r\n super(UniformCrossoverProcreator, self).procreate(parents)\r\n # TODO: cleanup to a single clean block within the 80 margins\r\n # generate the left index as a series of 1s and 0s with the 1s\r\n # distributed with probability P = probability\r\n left_index = random.choice(2, p=[self.probability, 1 - self.probability],\r\n size=len(parents[0].genes))\r\n # the right index is the inverse (probablity) of the left index\r\n right_index = 1 - left_index\r\n # multiplying the indecies 0s out the removed genes from either side\r\n # then adding these two vectors gives the child\r\n return [parents[0].copy(genes=(parents[0].genes * left_index) + (parents[1].genes * right_index))]", "def generate_random_individual():\n genotype = []\n ### Your code here\n return {'genotype': genotype, 'fitness': None }", "def initialize_dna(self):\n return np.random.rand(1, self.n_genes) * 2 - 1", "def generate_children(count, parent, life_min, life_max):\n\n types = parent.__subclasses__()\n for i in range(count):\n yield { \n \"class_name\": random.choice(types).__name__, \n \"lifespan\": random.randint(life_min, life_max)\n }", "def generate_child(self, parent1, parent2):\n if np.random.random() < self.crossover_prob:\n # crossover\n x, y = np.sort(np.random.randint(len(parent1), size=2))\n return np.vstack((parent1[:x], parent2[x:y], parent1[y:]))\n else:\n # mutation\n return self.generate_conformations()[0]", "def _undx(self, parents):\n child = np.empty(self.n_gene+1)\n ALPHA = 0.5\n BETA = 0.35 / (self.n_gene**0.5)\n\n d1 = np.linalg.norm(\n parents[1, :self.n_gene] - parents[0, :self.n_gene]\n )\n d2 = np.linalg.norm(\n (\n parents[2, :self.n_gene] - parents[0, :self.n_gene]\n ) - (\n np.dot(\n (\n parents[2, :self.n_gene]-parents[0, :self.n_gene]\n ), (\n parents[1, :self.n_gene]-parents[0, :self.n_gene]\n )\n ) / (\n d1 ** 2\n )\n ) * (\n parents[1, :self.n_gene]-parents[0, :self.n_gene]\n )\n )\n e1 = parents[0, :self.n_gene] / d1\n\n t = np.random.normal(scale=BETA, size=self.n_gene) * d2\n t = t - np.dot(t, e1) * e1\n t = t + np.random.normal(scale=ALPHA) * d1 * e1\n\n child[:self.n_gene] = t + \\\n (parents[0, :self.n_gene] + parents[1, :self.n_gene]) / 2.\n\n return child", "def random_plush_gene(self):\n atom = random.choice(list(self.atom_generators))\n return self.atom_to_plush_gene(atom)", "def mutate(genome):\n mutated_genome = copy.deepcopy(genome) # make a copy of the DNA to mutate\n seed = random.randint(0,3)\n if len(mutated_genome) == 0: seed = 0\n if seed == 0:\n mutate_chromosome(mutated_genome)\n elif seed == 1:\n mutate_point(mutated_genome)\n elif seed == 2:\n mutate_color(mutated_genome)\n else: #seed ==3:\n mutate_opacity(mutated_genome)\n return mutated_genome", "def createGeneticInstance(self, context):\n genetic_instance = GeneticInstance(ParametricLSystem(self.seed))\n fromBlenderToGeneticInstance(self,genetic_instance)\n return genetic_instance", "def blend_crossover(parent_1, parent_2):\n\talpha = 0.5 # ref Eshelmann & Schafer\n\n\tchild_genotype = np.zeros((parent_1.num_genes,))\n\tfor i in range(parent_1.num_genes):\n\t\tdifference = abs(parent_1.genotype[i] - parent_2.genotype[i])\n\t\tbound_1 = min(parent_1.genotype[i], parent_2.genotype[i]) - alpha * difference\n\t\tbound_2 = max(parent_1.genotype[i], parent_2.genotype[i]) + alpha * difference\n\t\tchild_genotype[i] = np.random.uniform(bound_1, bound_2)\n\n\treturn child_genotype", "def generate_genotype(self):\n genes = []\n for i in range(self.n_genes):\n genes.append(self.Gene(n_bases=self.n_bases))\n self.genes = genes", "def mutate(self, child):\n for i in range(0, self.chromosome_length):\n if random.randint(1, 100) <= self.mutation_chance:\n child[i] = self.random_gene()\n return child", "def _make_random_genome(evo_config):\n\n # create random genome by creating chromosomes for box size and movement\n return _make_size_dict(evo_config), _make_move_pattern(_make_limb_dict(), evo_config)" ]
[ "0.6713792", "0.66021925", "0.6351296", "0.6316457", "0.62280685", "0.61375886", "0.60927796", "0.6087185", "0.5995881", "0.5975843", "0.5962058", "0.5929928", "0.59042895", "0.5847486", "0.5844697", "0.57921326", "0.5765903", "0.5764469", "0.576404", "0.5721603", "0.5670374", "0.56677324", "0.5660242", "0.5655611", "0.5630574", "0.55549365", "0.5542951", "0.5523887", "0.55111146", "0.550265" ]
0.6677358
1
Converts an SDK generator object to a list of dictionaries.
def _sdk_object_to_list(object): result_list = [] for item in object: result_list.append(_get_sdk_object_dict(item)) return result_list
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def feed_dict_generator(self):\n pass", "def feed_dict_generator(self):\n pass", "def generate(self) -> Dict[str, Any]:\n raise NotImplementedError", "def __init__( self, generator):\n DictObject.__init__( self, generator.generate_dict())", "def meta(self):\n return list(self.generator.meta)", "def codebook_json_data_factory() -> List[Dict[str, Any]]:\n codebook_data = [\n {\n Features.CODEWORD: [\n {Indices.ROUND.value: 0, Indices.CH.value: 0, Features.CODE_VALUE: 1},\n {Indices.ROUND.value: 1, Indices.CH.value: 1, Features.CODE_VALUE: 1}\n ],\n Features.TARGET: \"GENE_A\"\n },\n {\n Features.CODEWORD: [\n {Indices.ROUND.value: 0, Indices.CH.value: 2, Features.CODE_VALUE: 1},\n {Indices.ROUND.value: 1, Indices.CH.value: 1, Features.CODE_VALUE: 1}\n ],\n Features.TARGET: \"GENE_B\"\n },\n ]\n return codebook_data", "def get_dicts(self, clean=False):\n return list(self.iter_dicts(clean=clean))", "def serialize(self, object: VocabularyHandle) -> Tuple[Dict, List]:\n return object.to_dict(), list(object.values)", "def _objects(self):\n for d in self._dicts_with_ids():\n yield d['id'], tuple(d[k] for k in self.fields)", "def as_list(gen):\n return list(gen())", "def list_items(self) -> List[Dict[str, Any]]:\n return [c.to_dict() for c in self._objects.values()]", "def asdict():\n pass", "def generate_object_specs(self):\n return [[] for _ in xrange(self.batch_size)]", "def _dict_items(typingctx, d):\n resty = types.DictItemsIterableType(d)\n sig = resty(d)\n codegen = _iterator_codegen(resty)\n return sig, codegen", "def getGenerators(self) -> list:\n return self.state[GENERATORS]", "def call(self) -> List[Dict]:", "def generate(self):\n return []", "def to_dict(self) -> List[Dict[str, Any]]:\n return [x.to_dict() for x in self.inputs]", "def get_objects_data(self):\n return dict(result=self.objects)", "def to_obj(self) -> Dict[str, Union[List[str],\n List[Dict[str, str]],\n List[HintRowObjType]]]:\n\n return {\n \"sources\": self.sources,\n \"source_parameters\": [\n sp.to_obj()\n for sp\n in self.source_parameters\n ],\n \"hints\": [h.to_obj() for h in self.hints],\n }", "def get_dict(self):\n return", "def _define_generators(self):\n\t\treturn {\n\t\t \"transaction_id\" : Mgcp._generate_uint32,\n\t\t \"connection_id\" : Mgcp._generate_uint32,\n\t\t \"request_id\" : Mgcp._generate_uint32,\n\t\t \"timestamp\" : Mgcp._generate_timestamp\n\t\t}", "def serialise(self):\n return {\n 'id': self.id,\n 'name': self.name,\n 'items': [i.serialise for i in self.items]\n }", "def getDict(cls, obj):\n\t\treturn obj.__dict__", "def _dict_values(typingctx, d):\n resty = types.DictValuesIterableType(d)\n sig = resty(d)\n codegen = _iterator_codegen(resty)\n return sig, codegen", "def dict(self):\n return objToDict(self)", "def __iter__(self):\n return dict(self.parameters)", "def package_dict_items():\n for idx, (key, py_subobj) in enumerate(py_obj.items()):\n # Obtain the raw string representation of this key\n key_base_type = key.__class__.__name__.encode(\"utf8\")\n if isinstance(key,str):\n if not _str_slashes.search(key):\n yield r'\"{}\"'.format(key),py_subobj,{'key_idx':idx,'key_base_type':key_base_type},kwargs\n continue\n elif isinstance(key,bytes):\n if not _byte_slashes.search(key):\n try:\n h_key = key.decode(\"utf8\")\n except UnicodeError: # pragma no cover\n pass\n else:\n yield r'b\"{}\"'.format(h_key),py_subobj,{'key_idx':idx,'key_base_type':key_base_type},kwargs\n continue\n elif key_base_type in dict_key_types_dict:\n h_key = \"{!r}\".format(key)\n if not _str_slashes.search(h_key):\n yield h_key,py_subobj,{'key_idx':idx,'key_base_type':key_base_type},kwargs\n continue\n sub_node_name = key_value_pair_name.format(idx)\n yield sub_node_name,(key,py_subobj),{'key_idx':idx,'key_base_type':b'key_value'},kwargs", "def to_dict(self):\n rtn_dict = {}\n for key in self:\n rtn_dict[key] = getattr(self._cpp_obj, self._getter)(key)\n return rtn_dict", "def list(cls) -> t.Iterable[SDict]:\n endpoint: t.Optional[str] = cls.endpoint\n\n def process_field(v):\n if isinstance(v, dict):\n return json.dumps(v, indent=True)\n return v\n\n while endpoint:\n r = Resource(endpoint=endpoint)\n items = r.get()\n # filter the items, ordering as needed\n for x in items.results:\n yield {k: process_field(x[k]) for k in cls.list_fields if k in x}\n endpoint = items.next if items.next else None" ]
[ "0.5913769", "0.5913769", "0.5789118", "0.5718109", "0.5672597", "0.565475", "0.5629378", "0.5629203", "0.55589277", "0.55574167", "0.5539112", "0.55310833", "0.5509818", "0.54935837", "0.5449725", "0.54130375", "0.5365224", "0.5355766", "0.5316992", "0.52876484", "0.52859634", "0.52640826", "0.52633125", "0.52619034", "0.52508634", "0.5249172", "0.52342176", "0.5213992", "0.5212049", "0.521064" ]
0.6143716
0
Converts an SDK object to a dictionary. Fixes any SDK imposed object oddities.
def _get_sdk_object_dict(object): item_dict = object.to_dict() if 'is_admin_state_up' in item_dict: item_dict['admin_state_up'] = item_dict['is_admin_state_up'] return item_dict
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def asdict():\n pass", "def to_dict(cls) -> dict:\n raise NotImplementedError()", "def cast_to_dict(self, obj):\n if type(obj) is dict:\n return obj\n elif type(obj) is tuple or type(obj) is list:\n # convert to dictionary\n return dict(zip(obj[0::2], obj[1::2]))\n else:\n print ('Invalid class (%s) for object. Trying to convert to'\n ' dict. Should be either dict, list or tuple.') % type(obj)\n print \"object is:\"\n pp.pprint(obj)\n traceback.print_stack()\n sys.exit(1)", "def _to_dict(self) -> dict:\n pass", "def realized(self):\n result = {\"Object\": True}\n for key in self.__dict__:\n if isinstance(self.__dict__[key], Object):\n result[key] = self.__dict__[key].asdict()\n else:\n result[key] = self.__dict__[key]\n return result", "def to_obj(self):\n return dict()", "def getDict(cls, obj):\n\t\treturn obj.__dict__", "def todict(obj):\n if isinstance(obj, str):\n return obj\n elif isinstance(obj, enum.Enum):\n return str(obj)\n elif isinstance(obj, dict):\n return dict((key, todict(val)) for key, val in obj.items())\n elif isinstance(obj, collections.Iterable):\n return [todict(val) for val in obj]\n elif hasattr(obj, \"__slots__\"):\n return todict(\n dict((name, getattr(obj, name)) for name in getattr(obj, \"__slots__\"))\n )\n elif hasattr(obj, \"__dict__\"):\n keys = vars(obj)\n if \"_sa_instance_state\" in keys:\n del keys[\"_sa_instance_state\"]\n return todict(vars(obj))\n return obj", "def to_dict(self) -> dict:", "def object_to_dict(obj, strip):\n t = type(obj)\n d = copy.copy(obj.__dict__)\n for member in strip:\n if member in d:\n del d[member]\n d['_class'] = t.__name__\n d['_module'] = t.__module__\n return d", "def convert_to_dict(self):\n # Populate the dictionary with object meta data\n obj_dict = {\"__class__\": self.__class__.__name__, \"__module__\": self.__module__}\n # Populate the dictionary with object properties\n obj_dict.update(self.__dict__)\n if self.n_to is not None:\n obj_dict['n_to'] = self.n_to\n if self.s_to is not None:\n obj_dict['s_to'] = self.s_to\n if self.e_to is not None:\n obj_dict['e_to'] = self.e_to\n if self.w_to is not None:\n obj_dict['w_to'] = self.w_to\n if self.u_to is not None:\n obj_dict['u_to'] = self.u_to\n if self.d_to is not None:\n obj_dict['d_to'] = self.d_to\n return obj_dict", "def to_dict(cls, obj):\n\n if isinstance(obj, iotbx_pdbh.model):\n labs = ('model')\n info = cls._format_mo(obj)\n elif isinstance(obj, iotbx_pdbh.chain):\n labs = ('model','chain')\n info = cls._format_ch(obj)\n elif isinstance(obj, iotbx_pdbh.residue_group):\n labs = ('model','chain','resseq','icode')\n info = cls._format_rg(obj)\n elif isinstance(obj, iotbx_pdbh.atom_group):\n labs = ('model','chain','resseq','icode','resname','altloc')\n info = cls._format_ag(obj)\n elif isinstance(obj, iotbx_pdbh.conformer):\n labs = ('model','chain','altloc')\n info = cls._format_co(obj)\n elif isinstance(obj, iotbx_pdbh.residue):\n labs = ('model','chain','altloc','resname','resseq','icode')\n info = cls._format_re(obj)\n elif isinstance(obj, iotbx_pdbh.atom):\n raise Exception('Not implemented')\n labs = ('model','chain','resseq','icode','resname','altloc','name')\n if hasattr(obj, 'chain_id'): info = cls._format_al(obj)\n else: info = cls._format_at(obj)\n elif isinstance(obj, iotbx_pdbh.atom_with_labels):\n labs = ('model','chain','resseq','icode','resname','altloc','name')\n info = cls._format_al(obj)\n else:\n raise Exception('Invalid object type provided: {}'.format(type(obj)))\n\n assert len(labs) == len(info)\n return dict(zip(labs, info))", "def to_dict(self):\n result = {}\n\n for attr, _ in six.iteritems(self.swagger_types):\n if hasattr(self, attr):\n value = getattr(self, attr)\n if isinstance(value, list):\n result[attr] = list(map(\n lambda x: x.to_dict() if hasattr(x, \"to_dict\") else x,\n value\n ))\n elif hasattr(value, \"to_dict\"):\n result[attr] = value.to_dict()\n elif isinstance(value, dict):\n result[attr] = dict(map(\n lambda item: (item[0], item[1].to_dict())\n if hasattr(item[1], \"to_dict\") else item,\n value.items()\n ))\n else:\n result[attr] = value\n if issubclass(ObjectStoreAccessPolicy, dict):\n for key, value in self.items():\n result[key] = value\n\n return result", "def to_dict(self):\n dict_obj = self.__dict__.copy()\n if dict_obj['_sa_instance_state']:\n del dict_obj['_sa_instance_state']\n return dict_obj", "def to_dict(self) -> Dict[str, Union[dict, str, int]]:\n d = {}\n\n for k, v in self.__dict__.items():\n if k in self._exempt or v is None:\n continue\n if '__dataclass_fields__' in dir(v):\n d[k] = asdict(v)\n elif inspect.isclass(v) and issubclass(v, Base):\n d[k] = v.to_dict()\n elif hasattr(v, 'value'): # we assume this is an Enum value.\n d[k] = v.value\n else:\n d[k] = v\n return d", "def convert_object_to_dictionary(obj):\n # type: (Any) -> Dict[str, str]\n # hydra ConfigStore special case:\n if obj.__class__.__module__.startswith(\"hydra.core\") and hasattr(obj, \"repo\"):\n return obj.repo\n\n dic = {}\n for attr in dir(obj):\n if attr.startswith(\"__\") or attr.startswith(\"to_\"):\n continue\n value = getattr(obj, attr)\n if callable(value):\n continue\n try:\n dic[attr] = str(value)\n except Exception:\n pass\n return dic", "def _asdict(self) -> Dict[Text, Any]:\n return self.as_base_types()", "def to_dict(self):\r\n raise NotImplementedError", "def to_dict(self) -> Dict[str, Any]:\n data = asdict(self)\n # Convert Enums to their values\n for k in data.keys():\n if isinstance(data[k], Enum):\n data[k] = data[k].value\n # Extract the _id and revision\n obj_id = data.pop('obj_id')\n revision = data.pop('revision')\n return {\n '_id': obj_id,\n 'revision': revision,\n 'credential': data,\n }", "def to_dict(self) -> Dict:\n _dict = {}\n if hasattr(self, 'type') and self.type is not None:\n _dict['type'] = self.type\n if hasattr(self, 'updates') and self.updates is not None:\n _dict['updates'] = self.updates\n return _dict", "def classToDict(obj=None):\n\tif obj == None:\n\t\treturn {}\n\n\t_obj = {}\n\t_obj.update(obj.__dict__)\n\n\treturn _obj", "def as_dict(self):\n return dict(self.as_OD())", "def dict(self):\n return objToDict(self)", "def convert_to_dict(obj):\r\n \r\n obj_dict = {}\r\n \r\n # Populate the dictionary with object properties\r\n obj_dict.update(obj.__dict__)\r\n \r\n return obj_dict", "def to_dict(self):\n result = {}\n\n for attr, _ in six.iteritems(self.swagger_types):\n value = getattr(self, attr)\n if isinstance(value, list):\n result[attr] = list(map(\n lambda x: x.to_dict() if hasattr(x, \"to_dict\") else x,\n value\n ))\n elif hasattr(value, \"to_dict\"):\n result[attr] = value.to_dict()\n elif isinstance(value, dict):\n result[attr] = dict(map(\n lambda item: (item[0], item[1].to_dict())\n if hasattr(item[1], \"to_dict\") else item,\n value.items()\n ))\n else:\n result[attr] = value\n if issubclass(EncryptionKeyManagerInfo, dict):\n for key, value in self.items():\n result[key] = value\n\n return result", "def as_dict(self) -> dict:\n tmp_dict = self.__dict__\n return tmp_dict", "def as_dict(self):\n return dict((\n (k, getattr(self, k)) for k in self.get_flat_type_info(self)\n if getattr(self, k) is not None\n ))", "def derive_error_dict(self, error_obj):\n tdict = dict(error_obj.__dict__)\n tdict.pop(\"_sa_instance_state\", None)\n return tdict", "def _dict_clean(self, obj: list[tuple[str, Any]]) -> dict[str, Any]:\n\n result = {}\n for k, v in obj:\n if v is None and k in ['revenue', 'value', 'tags', 'decisions']:\n continue\n else:\n result[k] = v\n return result", "def to_dict(self):\n raise NotImplementedError" ]
[ "0.64806324", "0.62011474", "0.6196252", "0.61945736", "0.6145522", "0.6134164", "0.6110898", "0.61091757", "0.6108938", "0.6048648", "0.5996235", "0.5911936", "0.58756256", "0.5848238", "0.5840572", "0.5840459", "0.5820397", "0.5816648", "0.58105606", "0.58056337", "0.58008724", "0.57990676", "0.5793177", "0.579063", "0.5789064", "0.5759752", "0.5740618", "0.5737214", "0.5735781", "0.5717708" ]
0.68956816
0
Poll for the status of the load balancer. Polls for the status of the load balancer and calls a function when the status changes to a specified state.
def poll_loadbalancer_status(request, loadbalancer_id, callback, from_state='PENDING_UPDATE', to_state='ACTIVE', callback_kwargs=None): interval = conf.HORIZON_CONFIG['ajax_poll_interval'] / 1000.0 status = from_state while status == from_state: time.sleep(interval) conn = get_sdk_connection(request) lb = conn.load_balancer.get_load_balancer(loadbalancer_id) status = lb.provisioning_status if status == to_state: kwargs = {'loadbalancer_id': loadbalancer_id} if callback_kwargs: kwargs.update(callback_kwargs) callback(request, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def poll_for_active_status(self, server_id, req_status=\"ACTIVE\"):\n status = \"BUILDING\"\n iteration = 30\n while status.upper() != req_status.upper() \\\n or status.upper() != \"ERROR\":\n server_info = self.show_server(server_id)\n if not isinstance(server_info, dict):\n return\n status = server_info['status']\n LOG_OBJ.debug(\"Server status : %s\" % status)\n if status.upper() in [req_status.upper(), 'ERROR']:\n break\n LOG_OBJ.debug(\"Waiting till server goes to %s state...\"\n % req_status)\n time.sleep(20)\n iteration -= 1\n if not iteration:\n err_msg = \"The server:%s is NOT in %s state\" \\\n \"within 10 minutes\" % (server_id, status)\n LOG_OBJ.error(err_msg)\n return \"POLL_TIME_EXCEEDED\"\n\n LOG_OBJ.debug(\"Server becomes %s\" % status)\n\n return status", "def poll(self):\n self.poll_function(self.connection)", "def wait_for_load_balancers_status(self, load_balancer_id,\n provisioning_status='ACTIVE',\n operating_status='ONLINE',\n is_delete_op=False):\n\n interval_time = self.build_interval\n timeout = self.build_timeout\n end_time = time.time() + timeout\n lb = None\n while time.time() < end_time:\n try:\n lb = self.show_load_balancer(load_balancer_id)\n if not lb:\n if is_delete_op:\n break\n else:\n raise Exception(\n LB_NOTFOUND.format(lb_id=load_balancer_id))\n lb = lb.get(self.resource, lb)\n if (lb.get('provisioning_status') == provisioning_status and\n lb.get('operating_status') == operating_status):\n break\n time.sleep(interval_time)\n except exceptions.NotFound as e:\n if is_delete_op:\n break\n else:\n raise e\n else:\n if is_delete_op:\n raise exceptions.TimeoutException(\n _(\"Waited for load balancer {lb_id} to be deleted for \"\n \"{timeout} seconds but can still observe that it \"\n \"exists.\").format(\n lb_id=load_balancer_id,\n timeout=timeout))\n else:\n raise exceptions.TimeoutException(\n _(\"Wait for load balancer ran for {timeout} seconds and \"\n \"did not observe {lb_id} reach {provisioning_status} \"\n \"provisioning status and {operating_status} \"\n \"operating status.\").format(\n timeout=timeout,\n lb_id=load_balancer_id,\n provisioning_status=provisioning_status,\n operating_status=operating_status))\n return lb", "def _update_status(self, new_status):\r\n old_status = self._status\r\n self._status = new_status\r\n for listener in self._listeners:\r\n # Calling user-defined callback.\r\n self._thread_pool.submit(\r\n listener.on_status_change(\r\n self, new_status.value, old_status.value))", "def refresh_status() -> None:\n ...", "def wait_for_status(self, status):\n code = self.instance.state['Code']\n while code != status:\n time.sleep(3)\n self.instance.reload()\n code = self.instance.state['Code']", "def wait_for_lb_resource(octavia_show_func, resource_id,\n provisioning_status=None, operating_status=None):\n provisioning_status = provisioning_status or 'ACTIVE'\n resp = octavia_show_func(resource_id)\n logging.info(resp['provisioning_status'])\n assert resp['provisioning_status'] == provisioning_status, (\n 'load balancer resource has not reached '\n 'expected provisioning status: {}'\n .format(resp))\n if operating_status:\n logging.info(resp['operating_status'])\n assert resp['operating_status'] == operating_status, (\n 'load balancer resource has not reached '\n 'expected operating status: {}'.format(resp))\n\n return resp", "def poll(self):\n\n query = f\"sacct -j {self.jobid} -o State -n -X -P\"\n if self.cluster:\n query += f\" --clusters={self.cluster}\"\n\n cmd = BuildTestCommand(query)\n cmd.execute()\n\n logger.debug(f\"Querying JobID: '{self.jobid}' Job State by running: '{query}'\")\n job_state = cmd.get_output()\n self._state = \"\".join(job_state).rstrip()\n logger.debug(f\"JobID: '{self.jobid}' job state:{self._state}\")", "def update_status(self, callback_function_param=False):\n self.send_message(\n {MESSAGE_TYPE: TYPE_GET_STATUS}, callback_function=callback_function_param\n )", "def _get_loadbalancer_statuses(self, lb_id):\n resource_path = \"%s/%s/%s/statuses\" % (RESOURCE_PREFIX,\n LBS_RESOURCE,\n lb_id)\n try:\n statuses = self.client.retrieve_resource(\n \"GLOBAL\", resource_path)[1]['dict']\n except ncc_client.NCCException as e:\n if e.is_not_found_exception():\n return {\"lb_statuses\": None}\n else:\n return None\n statuses = statuses[\"statuses\"]\n return {\"lb_statuses\": statuses}", "def process_status_poll(self, status):\n self.log.debug('process-status-poll', status=status)\n\n if self._admin_state != AdminState.ENABLED:\n return\n\n # Get new/missing from the discovered ONU leaf. Stale ONUs from previous\n # configs are now cleaned up during h/w re-sync/reflow.\n\n new, rediscovered_onus = self._process_status_onu_discovered_list(status.discovered_onu)\n\n # Process newly discovered ONU list and rediscovered ONUs\n\n for serial_number in new | rediscovered_onus:\n reactor.callLater(0, self.add_onu, serial_number, status)\n\n # Process LOS list\n self._process_los_alarms(frozenset(status.ont_los))\n\n # Process ONU info. Note that newly added ONUs will not be processed\n # until the next pass\n\n self._update_onu_status(status.onus)", "def poll_health():\n global timesCalled\n\n # Poll /health\n session = requests.Session()\n retry = Retry(connect=3, backoff_factor=0.5)\n adapter = HTTPAdapter(max_retries=retry)\n session.mount('http://', adapter)\n response = session.get(health_url)\n\n # Check HTTP status code\n status_code = response.status_code\n if status_code != status_ok:\n exit(1)\n\n # Get metrics values\n metrics = response.json()['metrics']\n requestLatencyValues.append(metrics['requestLatency'])\n dbLatencyValues.append(metrics['dbLatency'])\n cacheLatencyValues.append(metrics['cacheLatency'])\n\n # If 60 seconds has passed, send data to STDOUT\n timesCalled += 1\n if timesCalled == 6:\n output_data()\n\n timesCalled = 0\n requestLatencyValues.clear()\n dbLatencyValues.clear()\n cacheLatencyValues.clear()", "def test_lbheartbeat(self):\n pass", "def on_lz_status_update(self, func):\n self._set_event_handler(\"lz\")\n self._events.on_lz_status_update(func)", "def polling_call(self) -> global___Snippet.ClientCall:", "def monitor_behavior_status(self):\n self._flexbe_status_subscriber = rospy.Subscriber('/flexbe/status', BEStatus, self.callback_flexbe_status)", "def refresh_status(self):\n\n pass", "async def run(self):\n current_status = \"Init\"\n while self.expected_status != current_status:\n await asyncio.sleep(1)\n async with aiohttp.ClientSession() as session:\n async with session.get(self.url) as response:\n api_call_result = await response.json()\n current_status = api_call_result[\"status\"]\n \n # Send our single event and then we're done\n yield TriggerEvent(api_call_result)", "def updateStatusCallback(self, cb):\n self.statusCallback = cb", "def poll(self):\n while self.running and reactor._started and not reactor._stopped:\n self.check_response_queue()\n sleep(0.5)", "def track_job_to_completion(ip_address, headers, job_id, state):\n\tjob_status_map = {\n\t\t\"2020\": \"Scheduled\",\n\t\t\"2030\": \"Queued\",\n\t\t\"2040\": \"Starting\",\n\t\t\"2050\": \"Running\",\n\t\t\"2060\": \"Completed\",\n\t\t\"2070\": \"Failed\",\n\t\t\"2090\": \"Warning\",\n\t\t\"2080\": \"New\",\n\t\t\"2100\": \"Aborted\",\n\t\t\"2101\": \"Paused\",\n\t\t\"2102\": \"Stopped\",\n\t\t\"2103\": \"Canceled\"\n\t}\n\tstatus_mapping = {\n\t\t\"On\": \"Powered On\",\n\t\t\"Off\": \"Powered Off\",\n\t\t\"Cold Boot\": \"Power Cycle\",\n\t\t\"Warm Boot\": \"Reset\",\n\t\t\"ShutDown\": \"Shutdown\"\n\t}\n\n\tmax_retries = 20\n\tsleep_interval = 30\n\tfailed_job_status = [2070, 2090, 2100, 2101, 2102, 2103]\n\tjob_url = 'https://%s/api/JobService/Jobs(%s)' % (ip_address, job_id)\n\tloop_ctr = 0\n\tjob_incomplete = True\n\tprint(\"Polling %s to completion ...\" % job_id)\n\twhile loop_ctr < max_retries:\n\t\tloop_ctr += 1\n\t\ttime.sleep(sleep_interval)\n\t\tjob_resp = requests.get(job_url, headers=headers, verify=False)\n\t\tif job_resp.status_code == 200:\n\t\t\tjob_status = str((job_resp.json())['LastRunStatus']['Id'])\n\t\t\tjob_status_str = job_status_map[job_status]\n\t\t\tprint(\"Iteration %s: Status of %s is %s\" %\n\t\t\t (loop_ctr, job_id, job_status_str))\n\t\t\tif int(job_status) == 2060:\n\t\t\t\tjob_incomplete = False\n\t\t\t\tprint(\"%s operation successful\" %status_mapping[state])\n\t\t\t\tbreak\n\t\t\telif int(job_status) in failed_job_status:\n\t\t\t\tjob_incomplete = False\n\t\t\t\tif job_status_str == \"Warning\":\n\t\t\t\t\tprint(\"Completed with errors\")\n\t\t\t\telse:\n\t\t\t\t\tprint(\"%s operation failed\" %status_mapping[state])\n\t\t\t\tjob_hist_url = str(job_url) + \"/ExecutionHistories\"\n\t\t\t\tjob_hist_resp = requests.get(job_hist_url, headers=headers, verify=False)\n\t\t\t\tif job_hist_resp.status_code == 200:\n\t\t\t\t\tget_execution_detail(job_hist_resp, headers, job_hist_url)\n\t\t\t\tbreak\n\t\telse:\n\t\t\tprint(\"Unable to poll status of %s - Iteration %s \" % (job_id, loop_ctr))\n\tif job_incomplete:\n\t\tprint(\"Job %s incomplete after polling %s times...Check status\" %\n\t\t (job_id, max_retries))", "def toggle_polling(self):\n self.polling = not self.polling\n if not self.polling:\n # print('In toggle polling')\n self._stop_loop_feedback()\n self._start_loop_poll() if self.polling else self._stop_loop_poll()", "def status_callback():\n if args['retire_idle']:\n return False\n\n return True", "def _StatusUpdateThreadMain(self):\n while self._status_update_active:\n self._UpdateStatus()\n time.sleep(self._status_update_interval)", "def status_callback(self, data):\n\n print \"arm status callback\", data.data\n if data.data == \"busy\" or data.data == \"error\":\n self.status = 0\n elif data.data == \"free\":\n self.status = 1", "async def get_status():", "def wait_for_status(name, status, read_method, resource_type=None, **kwargs):\n sleep_time = CONF.kubernetes.status_poll_interval\n retries_total = CONF.kubernetes.status_total_retries\n\n commonutils.interruptable_sleep(CONF.kubernetes.start_prepoll_delay)\n\n i = 0\n while i < retries_total:\n resp = read_method(name=name, **kwargs)\n resp_id = resp.metadata.uid\n current_status = resp.status.phase\n if resp.status.phase != status:\n i += 1\n commonutils.interruptable_sleep(sleep_time)\n else:\n return\n if i == retries_total:\n raise exceptions.TimeoutException(\n desired_status=status,\n resource_name=name,\n resource_type=resource_type,\n resource_id=resp_id or \"<no id>\",\n resource_status=current_status,\n timeout=(retries_total * sleep_time))", "def get_status(self, state):\n raise NotImplementedError", "async def _watch_status(self, job_id, job_paths):\n status_path = job_paths['status.json']\n\n watcher = aionotify.Watcher()\n watcher.watch(status_path, aionotify.Flags.CLOSE_WRITE)\n await watcher.setup(self.loop)\n try:\n while True:\n try:\n await self._read_status(job_id, job_paths)\n await watcher.get_event()\n self.logger.debug(f'Detected status change for job {job_id}')\n except concurrent.futures.CancelledError:\n # Break loop (likely normal exit through task cancellation)\n break\n except Exception: # pylint: disable=broad-except\n self.logger.exception(f'Exception while watching status of job {job_id}')\n finally:\n watcher.unwatch(status_path)\n watcher.close()", "def wait_state_change(required_state):\n def cb_decorator(f):\n def cb_func(*args):\n self = args[0]\n f(*args)\n while (self.state != required_state):\n pa_mainloop_prepare(self._main_loop, PULSEAUDIO_TIMEOUT)\n pa_mainloop_poll(self._main_loop)\n if (pa_mainloop_dispatch(self._main_loop) <= 0):\n raise Exception('State Change Timed Out')\n return cb_func\n return cb_decorator" ]
[ "0.62133944", "0.6175639", "0.6137733", "0.6020755", "0.5786416", "0.5756918", "0.57512337", "0.56928927", "0.568958", "0.567798", "0.5551411", "0.5530037", "0.5486613", "0.5446558", "0.54423386", "0.5410045", "0.5408748", "0.5402765", "0.53912175", "0.5387205", "0.5353605", "0.535157", "0.53397554", "0.533454", "0.5327991", "0.53267807", "0.5323874", "0.5308468", "0.5306412", "0.52809685" ]
0.8122314
0
Create a new l7 policy.
def create_l7_policy(request, **kwargs): data = request.DATA conn = get_sdk_connection(request) l7_policy = conn.load_balancer.create_l7_policy( action=data['l7policy']['action'], admin_state_up=data['l7policy'].get('admin_state_up'), description=data['l7policy'].get('description'), listener_id=kwargs['listener_id'], name=data['l7policy'].get('name'), position=data['l7policy'].get('position'), redirect_pool_id=data['l7policy'].get('redirect_pool_id'), redirect_url=data['l7policy'].get('redirect_url'), ) return _get_sdk_object_dict(l7_policy)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_l7_rule(request, **kwargs):\n data = request.DATA\n\n conn = get_sdk_connection(request)\n l7_rule = conn.load_balancer.create_l7_rule(\n admin_state_up=data['l7rule'].get('admin_state_up'),\n compare_type=data['l7rule']['compare_type'],\n invert=data['l7rule'].get('invert'),\n key=data['l7rule'].get('key'),\n l7_policy=kwargs['l7_policy_id'],\n type=data['l7rule']['type'],\n rule_value=data['l7rule']['rule_value'],\n )\n\n return _get_sdk_object_dict(l7_rule)", "def post(self, request):\n kwargs = {'listener_id': request.DATA.get('parentResourceId')}\n return create_l7_policy(request, **kwargs)", "def post(self, request, l7_policy_id):\n kwargs = {'l7_policy_id': l7_policy_id}\n return create_l7_rule(request, **kwargs)", "def update_l7_policy(request, **kwargs):\n data = request.DATA\n l7_policy_id = data['l7policy'].get('id')\n\n conn = get_sdk_connection(request)\n l7_policy = conn.load_balancer.update_l7_policy(\n action=data['l7policy']['action'],\n admin_state_up=data['l7policy'].get('admin_state_up'),\n description=data['l7policy'].get('description'),\n l7_policy=l7_policy_id,\n name=data['l7policy'].get('name'),\n position=data['l7policy'].get('position'),\n redirect_pool_id=data['l7policy'].get('redirect_pool_id'),\n redirect_url=data['l7policy'].get('redirect_url'),\n )\n\n return _get_sdk_object_dict(l7_policy)", "def create_ltl_policy_goal(ltl_formula):\n return ExecutePolicyExtendedGoal(spec = create_ltl_task_spec(ltl_formula))", "def create_firewall_policy(self, body=None):\r\n return self.post(self.firewall_policies_path, body=body)", "def create_ikepolicy(self, body=None):\r\n return self.post(self.ikepolicies_path, body=body)", "def policy_create(request, **kwargs):\n body = {'policy': kwargs}\n policy = neutronclient(request).create_qos_policy(body=body).get('policy')\n return QoSPolicy(policy)", "def create_policy(self, fn_inputs):\n\n # determine if the policy is already in place\n response, err_msg = self._get_policy_by_sha256(fn_inputs.get('reaqta_sha256'))\n if err_msg:\n return {}, err_msg\n\n policy_info = response.json()\n if policy_info.get('result'):\n return {}, 'A policy already exists for this file hash: {0}. <a href=\"{1}\" target=\"blank\">{1}</a>'.format(\n fn_inputs.get('reaqta_sha256'),\n self.make_linkback_url(policy_info['result'][0]['id'], POLICY_DETAILS))\n\n params = {\n \"sha256\": fn_inputs.get('reaqta_sha256'),\n \"title\": fn_inputs.get('reaqta_policy_title', ''),\n \"description\": fn_inputs.get('reaqta_policy_description', ''),\n \"disable\": not fn_inputs.get('reaqta_policy_enabled', True),\n \"block\": fn_inputs.get('reaqta_policy_block', False),\n \"enabledGroups\": [],\n \"disabledGroups\": []\n }\n\n # collect all the group names and find the groupIds\n if fn_inputs.get('reaqta_policy_included_groups'):\n group_name_list = [ group.strip() for group in fn_inputs.get('reaqta_policy_included_groups', \"\").split(',') ]\n group_id_list = self.get_group_ids(group_name_list)\n if group_id_list:\n params['enabledGroups'] = group_id_list\n\n if fn_inputs.get('reaqta_policy_excluded_groups'):\n group_name_list = [ group.strip() for group in fn_inputs.get('reaqta_policy_excluded_groups', \"\").split(',') ]\n group_id_list = self.get_group_ids(group_name_list)\n if group_id_list:\n params['disabledGroups'] = group_id_list\n\n LOG.debug(\"create_policy: %s\", params)\n url = urljoin(POLICY_URI, \"trigger-on-process-hash\")\n return self.api_call(\"POST\", url, params)", "def _build_policy_from_labels(labels: LabelContainer) -> ClusterUpgradePolicyV1:\n policy_labelset = build_labelset(labels, ClusterUpgradePolicyLabelSet)\n return ClusterUpgradePolicyV1(\n workloads=policy_labelset.workloads,\n schedule=policy_labelset.schedule,\n conditions=ClusterUpgradePolicyConditionsV1(\n soakDays=policy_labelset.soak_days,\n mutexes=policy_labelset.mutexes,\n sector=policy_labelset.sector,\n blockedVersions=policy_labelset.blocked_versions,\n ),\n )", "def create_policy_request():\n return {\n 'public_key':\n r'BBLewg4VqLR38b38daE7Fj\\/uhr543uGrEpyoPFgmFZK6EZ9g2XdK\\/i65RrSJ6sJ96aXD3DJHY3Me2GJQO9\\/ifjE=',\n 'label':\n 'Integration Test Policy',\n 'operations': [{\n 'sensor_id': 10,\n 'action': 'SHARE',\n }, {\n 'sensor_id': 53,\n 'action': 'BIN',\n 'bins': [30.0, 60.0, 90.0]\n }, {\n 'sensor_id': 55,\n 'action': 'MOVING_AVG',\n 'interval': 300\n }]\n }", "def __init__(self, eid: str, name: str, weekly_salary: int):\n pay.SalaryPolicy.__init__(self, weekly_salary)\n super().__init__(eid, name)", "def create_link_level_policy(parent_mo, link_level_policy, **args):\n args = args['optional_args'] if 'optional_args' in args.keys() else args\n # Create mo\n\n if is_valid_key(args, 'atuo_negotiation'):\n if args['atuo_negotiation'] or args['atuo_negotiation'] == 'on':\n args['atuo_negotiation'] = 'on'\n elif not args['atuo_negotiation'] or args['atuo_negotiation'] == 'off':\n args['atuo_negotiation'] = 'off'\n\n fabric_hifpol = HIfPol(parent_mo, link_level_policy,\n autoNeg=get_value(args, 'atuo_negotiation', DEFAULT_AUTO_NEGOTIATION),\n speed=get_value(args, 'speed', DEFAULT_SPEED),\n linkDebounce=get_value(args, 'link_debounce_interval', DEFAULT_LINK_DEBOUNCE_INTERVAL),\n )\n return fabric_hifpol", "def gen_network_policy(project, entries):\n pol = NetworkPolicy(name='default',\n parent_obj=project,\n network_policy_entries=entries)\n return pol", "def create_policy(env, policy_type, policy_weights_file=None):\n input_size = env.observation_space.shape[0]\n output_size = env.action_space.shape[0]\n action_low = env.action_space.low\n action_high = env.action_space.high\n policy = policy_type(input_size=input_size,\n output_size=output_size,\n action_high=action_high,\n action_low=action_low)\n if policy_weights_file:\n policy.load_model(policy_weights_file)\n return policy", "def create(self, params):\n return self.make_client_call('create_load_balancer_policy', params)", "def adapter_policy_create(handle, name, descr=\"\", parent_dn=\"org-root\"):\n\n from ucsmsdk.mometa.adaptor.AdaptorHostEthIfProfile import \\\n AdaptorHostEthIfProfile\n\n obj = handle.query_dn(parent_dn)\n if not obj:\n raise ValueError(\"org '%s' does not exist\" % parent_dn)\n\n mo = AdaptorHostEthIfProfile(parent_mo_or_dn=obj, name=name, descr=descr)\n handle.add_mo(mo, modify_present=True)\n handle.commit()\n return mo", "def put(self, request, l7_policy_id):\n kwargs = {'l7_policy_id': l7_policy_id}\n update_l7_policy(request, **kwargs)", "def create_lb_health_check_policy(self, lbruleid, description, \n healthythreshold=None, intervaltime=None,\n pingpath=None, responsetimeout=None,\n unhealthythreshold=None): \n params = {'command':'createLBHealthCheckPolicy',\n 'lbruleid':lbruleid,\n 'description':description}\n\n if healthythreshold:\n params['healthythreshold'] = healthythreshold \n if intervaltime:\n params['intervaltime'] = intervaltime \n if pingpath:\n params['pingpath'] = pingpath \n if responsetimeout:\n params['responsetimeout'] = responsetimeout \n if unhealthythreshold:\n params['unhealthythreshold'] = unhealthythreshold\n\n try:\n response = self.send_request(params)\n res = json.loads(response)\n clsk_job_id = res['createlbhealthcheckpolicyresponse']['jobid']\n self.logger.debug('Start job - createLBHealthCheckPolicy: %s' % res)\n return clsk_job_id\n except KeyError as ex:\n raise ClskError('Error parsing json data: %s' % ex)\n except ApiError as ex:\n raise ClskError(ex)", "def update_l7_rule(request, **kwargs):\n data = request.DATA\n l7_rule_id = data['l7rule'].get('id')\n\n conn = get_sdk_connection(request)\n l7_rule = conn.load_balancer.update_l7_rule(\n admin_state_up=data['l7rule'].get('admin_state_up'),\n compare_type=data['l7rule']['compare_type'],\n invert=data['l7rule'].get('invert'),\n key=data['l7rule'].get('key'),\n l7_policy=kwargs['l7_policy_id'],\n l7rule=l7_rule_id,\n type=data['l7rule']['type'],\n rule_value=data['l7rule']['rule_value'],\n )\n\n return _get_sdk_object_dict(l7_rule)", "def test_create_hyperflex_ext_fc_storage_policy(self):\n pass", "def __init__(self):\n self.id = None\n \"\"\"\"the description of the healthcheck policy\"\"\"\n self.description = None\n \"\"\"\"is policy for display to the regular user\"\"\"\n self.fordisplay = None\n \"\"\"\"Amount of time between health checks\"\"\"\n self.healthcheckinterval = None\n \"\"\"\"Number of consecutive health check success before declaring an instance healthy\"\"\"\n self.healthcheckthresshold = None\n \"\"\"\"the pingpath of the healthcheck policy\"\"\"\n self.pingpath = None\n \"\"\"\"Time to wait when receiving a response from the health check\"\"\"\n self.responsetime = None\n \"\"\"\"the state of the policy\"\"\"\n self.state = None\n \"\"\"\"Number of consecutive health check failures before declaring an instance unhealthy.\"\"\"\n self.unhealthcheckthresshold = None", "def build(self):\n if ((self.allowMethods is None or len(self.allowMethods) == 0) and\n (self.denyMethods is None or len(self.denyMethods) == 0)):\n raise NameError(\"No statements defined for the policy\")\n\n policy = {\n 'principalId': self.principalId,\n 'policyDocument': {\n 'Version': self.version,\n 'Statement': []\n }\n }\n\n policy['policyDocument']['Statement'].extend(\n self._getStatementForEffect(\"Allow\", self.allowMethods))\n policy['policyDocument']['Statement'].extend(\n self._getStatementForEffect(\"Deny\", self.denyMethods))\n\n return policy", "def get(self, request, l7_policy_id):\n conn = get_sdk_connection(request)\n l7_policy = conn.load_balancer.find_l7_policy(l7_policy_id)\n l7_policy = _get_sdk_object_dict(l7_policy)\n\n if request.GET.get('includeChildResources'):\n resources = {}\n\n if l7_policy.get('rules'):\n l7_rules_list = _sdk_object_to_list(\n conn.load_balancer.l7_rules(l7_policy_id))\n l7_policy['rules'] = l7_rules_list\n\n resources['l7policy'] = l7_policy\n\n return resources\n else:\n return l7_policy", "def __init__(self, eid: str, name: str, hours_worked: int, hours_rate: int):\n pay.HourlyPolicy.__init__(self, hours_worked, hours_rate)\n super().__init__(eid, name)", "def __create_policy_def(self):\n\n self.logger.info(f\"Creating policy definition {self.policy_id}\")\n policy_definition_res = self.interactor.put_policy_definition(\n self.policy_id, self.policy_json\n )\n\n # definition was not created, report and abort\n if policy_definition_res.status_code != 201:\n self.output_res[\"result\"][\"status\"] = \"ERROR\"\n self.output_res[\"result\"][\n \"message\"\n ] = f\"Policy definition {self.policy_id} could not be created - {policy_definition_res.status_code}: {policy_definition_res.text}\"\n\n self.running_evaluations[self.eval_id] = self.output_res\n return False\n\n return True", "def gen_virtual_network_policy():\n sequence = SequenceType(major=0, minor=0)\n pol = VirtualNetworkPolicyType(sequence=sequence)\n return pol", "def minimum_packet_rate_rule_create(request, policy_id, **kwargs):\n body = {'minimum_packet_rate_rule': kwargs}\n if 'tenant_id' not in kwargs:\n kwargs['tenant_id'] = request.user.project_id\n body = {'minimum_packet_rate_rule': kwargs}\n rule = 'minimum_packet_rate_rule'\n minimum_packet_rate_rule = neutronclient(request)\\\n .create_minimum_packet_rate_rule(policy_id, body).get(rule)\n return MinimumPacketRateRule(minimum_packet_rate_rule)", "def create_policy(policystore_url, create_policy_request, verbose):\n\n if verbose:\n logging.info('Creating policy')\n pprint.pprint(create_policy_request)\n\n create_url = policystore_url + POLICYSTORE_PREFIX + 'CreateEntitlementPolicy'\n\n r = requests.post(\n create_url, headers=headers(), json=create_policy_request)\n if r.status_code != 200:\n logging.error(f'ERROR: Unexpected response: {r.status_code}')\n pprint.pprint(r.json())\n\n sys.exit('Failed to create policy')\n\n resp = r.json()\n\n logging.info(\n f'SUCCESS: Created policy - ID: {resp[\"policy_id\"]}, Token: {resp[\"token\"]}'\n )\n\n return resp", "def add(self, policy_name, data):\n path = self.vault.normalize(\"/sys/policies/acl/\" + policy_name)\n address = self.vault.vault_adress + \"/v1\" + path\n logging.info(\"Adding the policy: %s\", address)\n payload = json.dumps({\"policy\": data})\n response = self.vault.requests_request(\n \"POST\", address, headers=self.vault.token_header, data=payload\n )" ]
[ "0.71313787", "0.6708961", "0.66281354", "0.6562187", "0.5976769", "0.59415835", "0.58462644", "0.5843046", "0.5654051", "0.5594792", "0.5545354", "0.55239606", "0.55046284", "0.5478598", "0.54642725", "0.5444684", "0.542711", "0.53933513", "0.53799", "0.53317475", "0.5290894", "0.52608794", "0.5247717", "0.52270836", "0.52181375", "0.52081835", "0.51866335", "0.5167934", "0.51376474", "0.5136815" ]
0.7716427
0
Create a new l7 rule.
def create_l7_rule(request, **kwargs): data = request.DATA conn = get_sdk_connection(request) l7_rule = conn.load_balancer.create_l7_rule( admin_state_up=data['l7rule'].get('admin_state_up'), compare_type=data['l7rule']['compare_type'], invert=data['l7rule'].get('invert'), key=data['l7rule'].get('key'), l7_policy=kwargs['l7_policy_id'], type=data['l7rule']['type'], rule_value=data['l7rule']['rule_value'], ) return _get_sdk_object_dict(l7_rule)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def post(self, request, l7_policy_id):\n kwargs = {'l7_policy_id': l7_policy_id}\n return create_l7_rule(request, **kwargs)", "def update_l7_rule(request, **kwargs):\n data = request.DATA\n l7_rule_id = data['l7rule'].get('id')\n\n conn = get_sdk_connection(request)\n l7_rule = conn.load_balancer.update_l7_rule(\n admin_state_up=data['l7rule'].get('admin_state_up'),\n compare_type=data['l7rule']['compare_type'],\n invert=data['l7rule'].get('invert'),\n key=data['l7rule'].get('key'),\n l7_policy=kwargs['l7_policy_id'],\n l7rule=l7_rule_id,\n type=data['l7rule']['type'],\n rule_value=data['l7rule']['rule_value'],\n )\n\n return _get_sdk_object_dict(l7_rule)", "def create_l7_policy(request, **kwargs):\n data = request.DATA\n\n conn = get_sdk_connection(request)\n l7_policy = conn.load_balancer.create_l7_policy(\n action=data['l7policy']['action'],\n admin_state_up=data['l7policy'].get('admin_state_up'),\n description=data['l7policy'].get('description'),\n listener_id=kwargs['listener_id'],\n name=data['l7policy'].get('name'),\n position=data['l7policy'].get('position'),\n redirect_pool_id=data['l7policy'].get('redirect_pool_id'),\n redirect_url=data['l7policy'].get('redirect_url'),\n )\n\n return _get_sdk_object_dict(l7_policy)", "def EM7(Type=\"DFA\"):\n R41, R42, R43, R44, R45 = state('R41'), state('R42'), state('R43'), state('R44'), state('R45')\n for i in range(1, 7):\n R41.transit[str(i)] = R41\n R42.transit[str(i)] = R42\n R43.transit[str(i)] = R43\n R44.transit[str(i)] = R44\n R45.transit[str(i)] = R45\n R41.transit['4'] = R42\n R42.transit['4'] = R43\n R43.transit['4'] = R44\n R44.transit['4'] = R45\n R45.transit['4'] = R42\n if Type == \"pDFA\":\n R4 = pDFA('R4', list('123456'), [R41, R42, R43, R44, R45], R41, [R45])\n else:\n R4 = DFA('R4', list('123456'), [R41, R42, R43, R44, R45], R41, [R45])\n if (SIZEOF):\n EM_size[\"EM7\"] = asizeof.asizeof(R4)\n return R4", "def ParseLrules(raw_lrule_str, new_lsys):\r\n lrules = StrToLrules(raw_lrule_str)\r\n lrule_priority = 0\r\n for a_rule in lrules:\r\n Lrule.objects.create(lsys = new_lsys,\r\n str_in=a_rule[0],\r\n str_out=a_rule[1],\r\n rule_priority=lrule_priority)\r\n lrule_priority += 1", "def GachaCraftNodeExcelAddGP1107(builder, GP1107):\n return AddGP1107(builder, GP1107)", "def _addrule(self, nonterm, program, params, info):\n rule = Rule(nonterm, program, params, info)\n\n if not nonterm in self.rules:\n self.rules[nonterm] = []\n \n self.rules[nonterm].append(rule)", "def update_l7_policy(request, **kwargs):\n data = request.DATA\n l7_policy_id = data['l7policy'].get('id')\n\n conn = get_sdk_connection(request)\n l7_policy = conn.load_balancer.update_l7_policy(\n action=data['l7policy']['action'],\n admin_state_up=data['l7policy'].get('admin_state_up'),\n description=data['l7policy'].get('description'),\n l7_policy=l7_policy_id,\n name=data['l7policy'].get('name'),\n position=data['l7policy'].get('position'),\n redirect_pool_id=data['l7policy'].get('redirect_pool_id'),\n redirect_url=data['l7policy'].get('redirect_url'),\n )\n\n return _get_sdk_object_dict(l7_policy)", "def lift(self, la):\n HLP = self._kBoundedRing.ambient().hall_littlewood(t=self.t).P()\n return HLP._from_dict(dict(self(la)))", "def __init__(self, *args):\n this = _libsbml.new_Rule(*args)\n try: self.this.append(this)\n except: self.this = this", "def __init__(self, rule):\n Rule.__init__(self)\n self.__rule = rule", "def __init__(self, rule):\n Rule.__init__(self)\n self.__rule = rule", "def __init__(self, rule):\n Rule.__init__(self)\n self.__rule = rule", "def __init__(self, rule):\n Rule.__init__(self)\n self.__rule = rule", "def __init__(self, rule):\n Rule.__init__(self)\n self.__rule = rule", "def __init__(self, rule):\n Rule.__init__(self)\n self.__rule = Optional(OneOrMore(rule))", "def create_firewall_rule(project):\n listed_rules = subprocess.check_output(\n ['gcloud', 'compute', 'firewall-rules', 'list',\n '--format', 'value(name)',\n '--filter', 'name=%s' % LEO_FIREWALL_RULE,\n '--project', project])\n if LEO_FIREWALL_RULE in listed_rules:\n return\n Print.GN('Creating firewall rule for Leonardo VM.')\n subprocess.check_call(\n ['gcloud', 'compute', 'firewall-rules', 'create',\n LEO_FIREWALL_RULE,\n '--allow', 'tcp:80,tcp:443',\n '--priority', '900',\n '--target-tags', LEO_FIREWALL_RULE,\n '--project', project])", "def create_firewall_rule(self, body=None):\r\n return self.post(self.firewall_rules_path, body=body)", "def __init__(self, *args):\n this = _libsbml.new_AlgebraicRule(*args)\n try: self.this.append(this)\n except: self.this = this", "def _rules_to_trxf_dnf_ruleset(self, rules, label):\n conjunctions = list()\n for rule in rules:\n conjunction = self._rule_to_trxf_conjunction(rule)\n conjunctions.append(conjunction)\n dnf_ruleset = DnfRuleSet(conjunctions, label)\n return dnf_ruleset", "def build():\n\n n7 = ListNode(7)\n n2 = ListNode(2)\n n4 = ListNode(4)\n n3 = ListNode(3)\n n5 = ListNode(5)\n n7.next = n2\n n2.next = n4\n n4.next = n3\n n3.next = n5\n\n return n7", "def addAssignmentRule(self, var, math):\n\n r = self.model.createAssignmentRule()\n self.check(r, \"create assignment rule r\")\n self.check(r.setVariable(var), \"set assignment rule variable\")\n math_ast = libsbml.parseL3Formula(math)\n self.check(r.setMath(math_ast), \"set assignment rule equation\")\n return r", "def get(self, request, l7_policy_id):\n conn = get_sdk_connection(request)\n l7_rule_list = _sdk_object_to_list(conn.load_balancer.l7_rules(\n l7_policy_id))\n return {'items': l7_rule_list}", "def create(self, query_goal_path, all_knowledge):\n plan_filepath = self._create_plan( query_goal_path, all_knowledge )\n skolemized_plan_filepath = self.output_folder + \"partially_skolemized_plan.n3\"\n skolemize_lemmas( plan_filepath, skolemized_plan_filepath)\n \n rdf_graph = Graph()\n rdf_graph.parse(skolemized_plan_filepath, format=\"n3\")\n parser = LemmasParser(rdf_graph)\n lemmas = parser.parse()\n \n #lemma_graph.to_image( output_file = self.output_folder + \"lemma_precedences.png\" )\n #lemma_graph.to_gml( output_file = self.output_folder + \"lemma_precedences.gml\" )\n return Plan( lemmas )", "def make_rule(name, seq_id, action, protocol, src_ip, src_mask, dst_ip,\n dst_mask, sport_operator, sport_low, sport_high,\n dport_operator, dport_low, dport_high, count, log, dscp):\n xml_tring = template.IP_ACL_RULE.format()\n the_config = etree.fromstring(xml_tring)\n remove_unused_tags(the_config, name, action, protocol, src_ip, dst_ip,\n sport_operator, (sport_low, sport_high), dport_operator,\n (dport_low, dport_high), count, log, dscp)\n\n for elt in the_config.iterdescendants():\n if elt.tag == ('seq-id'):\n add_text_to_ele(elt, seq_id)\n elif elt.tag == ('action'):\n add_text_to_ele(elt, action)\n elif elt.tag == ('protocol-type'):\n add_text_to_ele(elt, protocol)\n elif elt.tag == ('src-host-any-sip'):\n add_text_to_ele(elt, src_ip)\n elif elt.tag == ('src-mask'):\n add_text_to_ele(elt, src_mask)\n elif elt.tag == ('dst-host-any-dip'):\n add_text_to_ele(elt, dst_ip)\n elif elt.tag == ('dst-mask'):\n add_text_to_ele(elt, dst_mask)\n elif elt.tag == ('sport'):\n add_text_to_ele(elt, sport_operator)\n elif \"sport-number-eq-neq\" in elt.tag:\n add_text_to_ele(elt, sport_low)\n elif \"sport-number-range-lower\" in elt.tag:\n add_text_to_ele(elt, sport_low)\n elif \"sport-number-range-higher\" in elt.tag:\n add_text_to_ele(elt, sport_high)\n elif elt.tag == ('dport'):\n add_text_to_ele(elt, dport_operator)\n elif \"dport-number-eq-neq\" in elt.tag:\n add_text_to_ele(elt, dport_low)\n elif \"dport-number-range-lower\" in elt.tag:\n add_text_to_ele(elt, dport_low)\n elif \"dport-number-range-higher\" in elt.tag:\n add_text_to_ele(elt, dport_high)\n elif \"dscp\" in elt.tag:\n add_text_to_ele(elt, dscp)\n\n xml_request = etree.tostring(the_config, pretty_print=True)\n return xml_request", "def addRule(self, ruleLine):\n cols = ruleLine.split(' ')\n positionNumber = int(cols[0])\n self._rules[positionNumber] = {}\n for i in range(1, len(cols)):\n self._rules[positionNumber][cols[i].upper()] = 1", "def rule_add(self, rulename, rule, commentline):\n\n if '->' in rule:\n zeroes = '|'.join(self.zerosymbols)\n rule = '[~$[' + zeroes + '] .o. [' + rule + ']]/[' + zeroes + ']'\n\n FST.define(rule, rulename)\n myrule = FST(rule)\n self.rules[rulename] = myrule\n self.comments[rulename] = commentline", "def createRule(self):\n res = True\n\n try:\n PATH = os.path.dirname(os.path.realpath(__file__))\n DATABASE = os.path.join(PATH, '..', 'db', 'store.db')\n conn = sqlite3.connect(DATABASE)\n c = conn.cursor()\n c.execute('INSERT OR IGNORE INTO PRICING_RULES VALUES (?, ?, ?, ?, ?, ?, ?)',\n (self.description, self.itemCode, self.minUnits, self.divisor, self.multiplier, self.discountPerc, self.extraData))\n conn.commit()\n except sqlite3.Error as e:\n print(\"An error occurred while creating rule <\" + self.description + \"> for <\" + self.itemCode + \">: \", e.args[0])\n res = False\n finally:\n c.close()\n conn.close()\n return res", "def rewrite_lp(f_lp, statement):\n f_lp.write(statement.logic_program_form())", "def __init__(self, left_rule, right_rule):\n Rule.__init__(self)\n self.__subrules = [left_rule, right_rule]" ]
[ "0.6551737", "0.6281853", "0.6256196", "0.5533587", "0.5317884", "0.51790553", "0.5176982", "0.51300806", "0.5054435", "0.50302255", "0.50243175", "0.50243175", "0.50243175", "0.50243175", "0.50243175", "0.5020988", "0.5017123", "0.49702477", "0.49285138", "0.49261418", "0.4924092", "0.48976773", "0.48806834", "0.48571157", "0.48459816", "0.48241165", "0.47801265", "0.47770363", "0.4755052", "0.47365782" ]
0.7878783
0
Create a new health monitor for a pool.
def create_health_monitor(request, **kwargs): data = request.DATA conn = get_sdk_connection(request) health_mon = conn.load_balancer.create_health_monitor( type=data['monitor']['type'], delay=data['monitor']['delay'], timeout=data['monitor']['timeout'], max_retries=data['monitor']['max_retries'], max_retries_down=data['monitor']['max_retries_down'], pool_id=kwargs['pool_id'], http_method=data['monitor'].get('http_method'), url_path=data['monitor'].get('url_path'), expected_codes=data['monitor'].get('expected_codes'), admin_state_up=data['monitor'].get('admin_state_up'), name=data['monitor'].get('name') ) return _get_sdk_object_dict(health_mon)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_healthmonitor(self, context, healthmonitor):\n LOG.info(\"Received request 'Create Pool Health Monitor' for\"\n \"Health monitor:%(hm)s\",\n {'hm': healthmonitor['id']})\n arg_dict = {'context': context,\n lb_const.HEALTHMONITOR: healthmonitor\n }\n self._send_event(lb_const.EVENT_CREATE_HEALTH_MONITOR_V2,\n arg_dict, serialize=True,\n binding_key=healthmonitor[lb_const.POOL][\n 'loadbalancer_id'],\n key=healthmonitor['id'])", "def associate_health_monitor(self, pool, body):\r\n return self.post(self.associate_pool_health_monitors_path % (pool),\r\n body=body)", "def post(self, request):\n kwargs = {'loadbalancer_id': request.DATA.get('loadbalancer_id'),\n 'pool_id': request.DATA.get('parentResourceId')}\n return create_health_monitor(request, **kwargs)", "def create_health_monitor(self, body=None):\r\n return self.post(self.health_monitors_path, body=body)", "def create_pool(request, **kwargs):\n data = request.DATA\n\n conn = get_sdk_connection(request)\n pool = conn.load_balancer.create_pool(\n protocol=data['pool']['protocol'],\n lb_algorithm=data['pool']['lb_algorithm'],\n session_persistence=data['pool'].get('session_persistence'),\n listener_id=kwargs['listener_id'],\n loadbalancer_id=kwargs['loadbalancer_id'],\n name=data['pool'].get('name'),\n description=data['pool'].get('description'),\n admin_state_up=data['pool'].get('admin_state_up'),\n tls_enabled=data['pool'].get('tls_enabled'),\n # Replace empty string by None (uses default tls cipher string)\n tls_ciphers=data['pool'].get('tls_ciphers') or None,\n )\n\n if data.get('members'):\n args = (request, kwargs['loadbalancer_id'], add_member)\n kwargs = {'callback_kwargs': {'pool_id': pool.id,\n 'index': 0}}\n thread.start_new_thread(poll_loadbalancer_status, args, kwargs)\n elif data.get('monitor'):\n args = (request, kwargs['loadbalancer_id'], create_health_monitor)\n kwargs = {'callback_kwargs': {'pool_id': pool.id}}\n thread.start_new_thread(poll_loadbalancer_status, args, kwargs)\n\n return _get_sdk_object_dict(pool)", "def pre_loadbalancer_healthmonitor_create(self, resource_dict):\n pass", "def health_check(name, target='TCP:22', healthy_threashold=2, unhealthy_threashold=3, interval=30, timeout=3):\n hc = HealthCheck(title=name + 'healthcheck')\n hc.HealthyThreshold = healthy_threashold\n hc.UnhealthyThreshold = unhealthy_threashold\n hc.Interval = interval\n hc.Target = target\n hc.Timeout = timeout\n return hc", "def post_loadbalancer_healthmonitor_create(self, resource_dict):\n pass", "def get(self, request):\n pool_id = request.GET.get('poolId')\n conn = get_sdk_connection(request)\n health_monitor_list = _sdk_object_to_list(\n conn.load_balancer.health_monitors(\n project_id=request.user.project_id\n )\n )\n\n if pool_id:\n health_monitor_list = self._filter_health_monitors(\n health_monitor_list,\n pool_id)\n return {'items': health_monitor_list}", "def test_create_healthmonitor_with_all_params(self):\r\n resource = 'health_monitor'\r\n cmd = healthmonitor.CreateHealthMonitor(test_cli20.MyApp(sys.stdout),\r\n None)\r\n admin_state_up = False\r\n delay = '60'\r\n expected_codes = '200-202,204'\r\n http_method = 'HEAD'\r\n max_retries = '2'\r\n timeout = '10'\r\n type = 'TCP'\r\n tenant_id = 'my-tenant'\r\n url_path = '/health'\r\n my_id = 'my-id'\r\n args = ['--admin-state-down',\r\n '--delay', delay,\r\n '--expected-codes', expected_codes,\r\n '--http-method', http_method,\r\n '--max-retries', max_retries,\r\n '--timeout', timeout,\r\n '--type', type,\r\n '--tenant-id', tenant_id,\r\n '--url-path', url_path]\r\n position_names = ['admin_state_up', 'delay',\r\n 'expected_codes', 'http_method',\r\n 'max_retries', 'timeout',\r\n 'type', 'tenant_id', 'url_path']\r\n position_values = [admin_state_up, delay,\r\n expected_codes, http_method,\r\n max_retries, timeout,\r\n type, tenant_id, url_path]\r\n self._test_create_resource(resource, cmd, '', my_id, args,\r\n position_names, position_values)", "def create_pool(self, context, pool):\n LOG.info(\"Received request 'Create Pool' for Pool:%(pool_id)s \",\n {'pool_id': pool['id']})\n arg_dict = {'context': context,\n lb_const.POOL: pool\n }\n # REVISIT(jiahao) M:N pool is not yet implemented.\n self._send_event(lb_const.EVENT_CREATE_POOL_V2, arg_dict,\n serialize=True,\n binding_key=pool['loadbalancer_id'],\n key=pool['id'])", "def add_health_monitor(self, loadbalancer, type, delay=10, timeout=10,\n attemptsBeforeDeactivation=3, path=\"/\", statusRegex=None,\n bodyRegex=None, hostHeader=None):\n uri = \"/loadbalancers/%s/healthmonitor\" % utils.get_id(loadbalancer)\n req_body = {\"healthMonitor\": {\n \"type\": type,\n \"delay\": delay,\n \"timeout\": timeout,\n \"attemptsBeforeDeactivation\": attemptsBeforeDeactivation,\n }}\n uptype = type.upper()\n if uptype.startswith(\"HTTP\"):\n lb = self._get_lb(loadbalancer)\n if uptype != lb.protocol:\n raise exc.ProtocolMismatch(\"Cannot set the Health Monitor type \"\n \"to '%s' when the Load Balancer's protocol is '%s'.\" %\n (type, lb.protocol))\n if not all((path, statusRegex, bodyRegex)):\n raise exc.MissingHealthMonitorSettings(\"When creating an HTTP(S) \"\n \"monitor, you must provide the 'path', 'statusRegex' and \"\n \"'bodyRegex' parameters.\")\n body_hm = req_body[\"healthMonitor\"]\n body_hm[\"path\"] = path\n body_hm[\"statusRegex\"] = statusRegex\n body_hm[\"bodyRegex\"] = bodyRegex\n if hostHeader:\n body_hm[\"hostHeader\"] = hostHeader\n resp, body = self.api.method_put(uri, body=req_body)\n return body", "def from_dict(cls, _dict: Dict) -> 'Monitor':\n args = {}\n if 'id' in _dict:\n args['id'] = _dict.get('id')\n if 'name' in _dict:\n args['name'] = _dict.get('name')\n if 'description' in _dict:\n args['description'] = _dict.get('description')\n if 'type' in _dict:\n args['type'] = _dict.get('type')\n if 'port' in _dict:\n args['port'] = _dict.get('port')\n if 'interval' in _dict:\n args['interval'] = _dict.get('interval')\n if 'retries' in _dict:\n args['retries'] = _dict.get('retries')\n if 'timeout' in _dict:\n args['timeout'] = _dict.get('timeout')\n if 'method' in _dict:\n args['method'] = _dict.get('method')\n if 'path' in _dict:\n args['path'] = _dict.get('path')\n if 'headers' in _dict:\n args['headers_'] = [HealthcheckHeader.from_dict(x) for x in _dict.get('headers')]\n if 'allow_insecure' in _dict:\n args['allow_insecure'] = _dict.get('allow_insecure')\n if 'expected_codes' in _dict:\n args['expected_codes'] = _dict.get('expected_codes')\n if 'expected_body' in _dict:\n args['expected_body'] = _dict.get('expected_body')\n if 'created_on' in _dict:\n args['created_on'] = _dict.get('created_on')\n if 'modified_on' in _dict:\n args['modified_on'] = _dict.get('modified_on')\n return cls(**args)", "def add_health_monitor(self, loadbalancer, type, delay=10, timeout=10,\n attemptsBeforeDeactivation=3, path=\"/\", statusRegex=None,\n bodyRegex=None, hostHeader=None):\n abd = attemptsBeforeDeactivation\n return loadbalancer.add_health_monitor(type=type, delay=delay,\n timeout=timeout, attemptsBeforeDeactivation=abd, path=path,\n statusRegex=statusRegex, bodyRegex=bodyRegex,\n hostHeader=hostHeader)", "def test_create_healthmonitor_with_mandatory_params(self):\r\n resource = 'health_monitor'\r\n cmd = healthmonitor.CreateHealthMonitor(test_cli20.MyApp(sys.stdout),\r\n None)\r\n admin_state_up = False\r\n delay = '60'\r\n max_retries = '2'\r\n timeout = '10'\r\n type = 'TCP'\r\n tenant_id = 'my-tenant'\r\n my_id = 'my-id'\r\n args = ['--admin-state-down',\r\n '--delay', delay,\r\n '--max-retries', max_retries,\r\n '--timeout', timeout,\r\n '--type', type,\r\n '--tenant-id', tenant_id]\r\n position_names = ['admin_state_up', 'delay', 'max_retries', 'timeout',\r\n 'type', 'tenant_id']\r\n position_values = [admin_state_up, delay, max_retries, timeout, type,\r\n tenant_id]\r\n self._test_create_resource(resource, cmd, '', my_id, args,\r\n position_names, position_values)", "def _create_volume_pool(self, pool_name):\n osd_map = self._rados_command('osd dump', {})\n\n existing_id = self._get_pool_id(osd_map, pool_name)\n if existing_id is not None:\n log.info(\"Pool {0} already exists\".format(pool_name))\n return existing_id\n\n osd_count = len(osd_map['osds'])\n\n # We can't query the actual cluster config remotely, but since this is\n # just a heuristic we'll assume that the ceph.conf we have locally reflects\n # that in use in the rest of the cluster.\n pg_warn_max_per_osd = int(self.rados.conf_get('mon_max_pg_per_osd'))\n\n other_pgs = 0\n for pool in osd_map['pools']:\n if not pool['pool_name'].startswith(self.POOL_PREFIX):\n other_pgs += pool['pg_num']\n\n # A basic heuristic for picking pg_num: work out the max number of\n # PGs we can have without tripping a warning, then subtract the number\n # of PGs already created by non-manila pools, then divide by ten. That'll\n # give you a reasonable result on a system where you have \"a few\" manila\n # shares.\n pg_num = ((pg_warn_max_per_osd * osd_count) - other_pgs) // 10\n # TODO Alternatively, respect an override set by the user.\n\n self._rados_command(\n 'osd pool create',\n {\n 'pool': pool_name,\n 'pg_num': int(pg_num),\n }\n )\n\n osd_map = self._rados_command('osd dump', {})\n pool_id = self._get_pool_id(osd_map, pool_name)\n\n if pool_id is None:\n # If the pool isn't there, that's either a ceph bug, or it's some outside influence\n # removing it right after we created it.\n log.error(\"OSD map doesn't contain expected pool '{0}':\\n{1}\".format(\n pool_name, json.dumps(osd_map, indent=2)\n ))\n raise RuntimeError(\"Pool '{0}' not present in map after creation\".format(pool_name))\n else:\n return pool_id", "def create_load_balancer_monitor(\n self, profile_name=\"LB_Test_Monitor\", resource_type=\"LBHttpMonitorProfile\",\n monitor_port=\"80\", request_url=\"/\", response_codes=None, interval=5, timeout=5,\n rise_count=3, fall_count=3):\n LB_MONITOR_PROFILE = \"https://{ip}/policy/api/v1/infra/lb-monitor-profiles/{profile_name}\"\n url = LB_MONITOR_PROFILE.format(ip=self.nsxt_ip, profile_name=profile_name)\n print('Starting PUT call to create Monitor Profile : %s' % url)\n put_status = None\n response_codes = [200] if not response_codes else response_codes\n json_payload = {\n \"request_url\": request_url, \"response_status_codes\": response_codes,\n \"resource_type\": resource_type, \"monitor_port\": monitor_port,\n \"interval\": interval, \"timeout\": timeout, \"rise_count\": rise_count,\n \"fall_count\": fall_count}\n monitor_id = None\n monitor_path = None\n try:\n response = self.rest.put(url, json_payload, self.headers, 200, auth=(\n self.nsxt_user, self.nsxt_pwd), is_json=True)\n put_status = response.status_code\n root = json.loads(response.text)\n monitor_id = root[\"id\"]\n monitor_path = root[\"path\"]\n print(\"monitor_id:%s | monitor_path:%s\" % (\n monitor_id, monitor_path))\n except Exception as e:\n print(traceback.format_exc())\n print('Exception in creating monitor profile %s' % e)\n return monitor_id, monitor_path", "def addpool(miner: Miner, pool):\n api = MinerApi(host=miner.ipaddress, port=int(miner.port))\n jaddpool = api.addpool(\"{0},{1},{2}\".format(pool.url, pool.user, \"x\"))\n return jaddpool[\"STATUS\"][0][\"Msg\"]", "def get_health_monitor(self, loadbalancer):\n return loadbalancer.get_health_monitor()", "def l7pool_add(env, identifier, **args):\n\n mgr = SoftLayer.LoadBalancerManager(env.client)\n uuid, _ = mgr.get_lbaas_uuid_id(identifier)\n\n pool_main = {\n 'name': args.get('name'),\n 'loadBalancingAlgorithm': args.get('method'),\n 'protocol': args.get('protocol')\n }\n\n pool_members = list(args.get('server'))\n\n pool_health = {\n 'interval': args.get('healthinterval'),\n 'timeout': args.get('healthtimeout'),\n 'maxRetries': args.get('healthretry'),\n 'urlPath': args.get('healthpath')\n }\n\n pool_sticky = {\n 'type': args.get('sticky')\n }\n\n try:\n mgr.add_lb_l7_pool(uuid, pool_main, pool_members, pool_health, pool_sticky)\n click.secho(\"Success\", fg='green')\n except SoftLayerAPIError as exception:\n click.secho(f\"ERROR: {exception.faultString}\", fg='red')", "def add_health_monitor(self, type, delay=10, timeout=10,\n attemptsBeforeDeactivation=3, path=\"/\", statusRegex=None,\n bodyRegex=None, hostHeader=None):\n abd = attemptsBeforeDeactivation\n return self.manager.add_health_monitor(self, type=type, delay=delay,\n timeout=timeout, attemptsBeforeDeactivation=abd,\n path=path, statusRegex=statusRegex, bodyRegex=bodyRegex,\n hostHeader=hostHeader)", "def _create_pool_vm(args):\n # check storage pool name unicity\n conn = libvirt.open(None)\n _sps = list()\n if conn:\n _sps = [sp for sp in conn.listAllStoragePools() if sp.name() == args.name]\n conn.close()\n else:\n print('Cannot contact hypervisor', file=sys.stderr)\n return 1\n\n if len(_sps) != 0:\n print(\"Storage pool with name [%s] already exists\" % args.name, file=sys.stderr)\n return 1\n\n if args.disk and args.netfshost:\n print(\"--disk and --host option are exclusive\", file=sys.stderr)\n return 1\n\n if not args.disk and not args.netfshost:\n print(\"Either --disk or --host must be specified.\", file=sys.stderr)\n return 1\n\n if args.netfshost and not args.path:\n print(\"Must specify the remote resource path with the --path option\", file=sys.stderr)\n return 1\n\n _pool_name = args.name\n if args.disk:\n return oci_utils.kvm.virt.create_fs_pool(args.disk, _pool_name)\n if args.netfshost:\n return oci_utils.kvm.virt.create_netfs_pool(args.netfshost, args.path, _pool_name)", "def post(self, request):\n kwargs = {'loadbalancer_id': request.DATA.get('loadbalancer_id'),\n 'listener_id': request.DATA.get('parentResourceId')}\n return create_pool(request, **kwargs)", "def create_health(ai_settings, screen, stats, health):\n\tif (stats.alien_kills % ai_settings.health_drop_point == 0):\n\t\tif (len(health) < 1):\n\t\t\tnew_health = Health(ai_settings, screen)\n\t\t\thealth.add(new_health)", "def create_lb_unhealthy_alarm ( cloudwatch_conn,\n base_name,\n lb_name,\n min_healthy_hosts,\n topic_arn,\n threshold = 5) :\n alarm = boto.ec2.cloudwatch.MetricAlarm( name = base_name + '-' + lb_name + '-UNHEALTHY-Alarm',\n description = 'Alarm for when ' + lb_name + ' does not have enough healthy hosts',\n metric = 'HealthyHostCount',\n namespace = 'AWS/ELB',\n statistic = 'Average',\n comparison = '<',\n threshold = min_healthy_hosts,\n period = 60,\n evaluation_periods = threshold,\n dimensions = { 'LoadBalancerName': lb_name },\n alarm_actions = topic_arn )\n cloudwatch_conn.create_alarm( alarm )\n return alarm", "def get_lbaas_agent_hosting_pool(self, pool, **_params):\r\n return self.get((self.pool_path + self.LOADBALANCER_AGENT) % pool,\r\n params=_params)", "def pool_create(self, pool_name):\n self.core.api.os.shell.cmd('{0} add apppool /name:\"{1}\"'.format(\n self.APP_CMD, pool_name\n ))", "def addCollector(self, id, sourceId, hubId, poolId):\n facade = self._getFacade()\n try:\n monitor = IInfo(facade.addMonitor(\n id, sourceId=sourceId, hubId=hubId, poolId=poolId\n ))\n except ControlCenterError as e:\n log.error(\"Control Center error: %s\", e.message)\n return DirectResponse.fail(e.message)\n return DirectResponse.succeed(data=Zuul.marshal(monitor))", "def createRewardPool(self, name, data, warnThreshold=None):\n pools = self.listRewardPools()\n if name in [x['name'] for x in pools]:\n for pool in pools:\n if pool['name'] == name:\n _id = pool['id']\n else:\n param = {'name': name}\n if warnThreshold:\n param['warnThreshold'] = warnThreshold\n resp = self.post_json('/rewardPool', param)\n _id = resp['id']\n self.addEntries(_id, data)\n return _id", "def get_health_monitor(self):\n return self.manager.get_health_monitor(self)" ]
[ "0.7401438", "0.7051553", "0.69609606", "0.66524464", "0.62678564", "0.6037645", "0.5910251", "0.5820596", "0.5788904", "0.5740553", "0.5709624", "0.5578223", "0.5542494", "0.5517016", "0.55118424", "0.54961306", "0.5465203", "0.54008526", "0.5371732", "0.53213763", "0.52980614", "0.5291099", "0.5240125", "0.52308506", "0.5223342", "0.5214958", "0.5198499", "0.51911706", "0.51782376", "0.5167297" ]
0.79059434
0
Create a new flavor profile.
def create_flavor_profile(request, **kwargs): data = request.DATA conn = get_sdk_connection(request) flavor_profile = conn.load_balancer.create_flavor( name=data['flavor_profile']['name'], provider_name=data['flavor_profile']['provider_name'], flavor_data=data['flavor_profile']['flavor_data'], ) return _get_sdk_object_dict(flavor_profile)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def post(self, request):\n kwargs = {\n 'flavor_profile': request.DATA.get('flavor_profile')\n }\n return create_flavor_profile(request, **kwargs)", "def _create_flavor(self, context, flavor):\n flavor_dict = flavor.__dict__\n name = self.prefix + flavor.name\n flavorid = self.prefix + flavor.id\n memory = flavor.ram\n vcpus = flavor.vcpus\n root_gb = flavor.disk\n ephemeral_gb = flavor_dict.get('OS-FLV-EXT-DATA:ephemeral', 0)\n u_swap = flavor_dict.get('swap', 0)\n rxtx_factor = flavor_dict.get('rxtx_factor', 1.0)\n is_public = flavor_dict.get('os-flavor-access:is_public', True)\n if u_swap == \"\":\n swap = 0\n else:\n swap = int(u_swap)\n\n try:\n return flavors.create(name, memory, vcpus, root_gb,\n ephemeral_gb=ephemeral_gb,\n flavorid=flavorid, swap=swap,\n rxtx_factor=rxtx_factor,\n is_public=is_public)\n except exception.InstanceExists as err:\n raise err", "def create_flavor(request, **kwargs):\n data = request.DATA\n\n conn = get_sdk_connection(request)\n flavor = conn.load_balancer.create_flavor(\n name=data['flavor']['name'],\n flavor_profile_id=data['flavor']['flavor_profile_id'],\n description=data['flavor'].get('description'),\n enabled=data['flavor'].get('enabled'),\n )\n\n return _get_sdk_object_dict(flavor)", "def create(*args, **kwargs):\n\n factory = V2ProfileFactory()\n output = factory.create(export_json=True)\n click.echo(output)", "def create_flavor(cls, values):\n return cls.dbdriver.create_flavor(values)", "def test_create_flavor(self):\n # Create Flavor\n flavor_settings = FlavorConfig(\n name=self.flavor_name, ram=1, disk=1, vcpus=1)\n self.flavor_creator = OpenStackFlavor(self.os_creds, flavor_settings)\n flavor = self.flavor_creator.create()\n self.assertTrue(validate_flavor(self.nova, flavor_settings, flavor))", "def createProfile(self):\n if self.profile:\n return\n from soc.modules.gsoc.models.profile import GSoCProfile\n user = self.createUser()\n properties = {'link_id': user.link_id, 'student_info': None, 'user': user,\n 'parent': user, 'scope': self.program, 'status': 'active'}\n self.profile = seeder_logic.seed(GSoCProfile, properties)", "def create(profile, name):\n client = boto3client.get(\"iam\", profile)\n params = {}\n params[\"InstanceProfileName\"] = name\n return client.create_instance_profile(**params)", "def create_flavor(self):\n logger.debug(\"Creating VM Flavor\")\n rc, flavor_id = self.cal.create_flavor(self.account, self.flavor)\n assert rc == RwTypes.RwStatus.SUCCESS\n\n return flavor_id", "def create(\n name: str,\n from_name: str = typer.Option(None, \"--from\", help=\"Copy an existing profile.\"),\n):\n\n profiles = prefect.settings.load_profiles()\n if name in profiles:\n app.console.print(\n textwrap.dedent(\n f\"\"\"\n [red]Profile {name!r} already exists.[/red]\n To create a new profile, remove the existing profile first:\n\n prefect profile delete {name!r}\n \"\"\"\n ).strip()\n )\n raise typer.Exit(1)\n\n if from_name:\n if from_name not in profiles:\n exit_with_error(f\"Profile {from_name!r} not found.\")\n\n # Create a copy of the profile with a new name and add to the collection\n profiles.add_profile(profiles[from_name].copy(update={\"name\": name}))\n else:\n profiles.add_profile(prefect.settings.Profile(name=name, settings={}))\n\n prefect.settings.save_profiles(profiles)\n\n app.console.print(\n textwrap.dedent(\n f\"\"\"\n Created profile with properties:\n name - {name!r}\n from name - {from_name or None}\n\n Use created profile for future, subsequent commands:\n prefect profile use {name!r}\n\n Use created profile temporarily for a single command:\n prefect -p {name!r} config view\n \"\"\"\n )\n )", "def test_create_flavor_existing(self):\n # Create Flavor\n flavor_settings = FlavorConfig(\n name=self.flavor_name, ram=1, disk=1, vcpus=1)\n self.flavor_creator = OpenStackFlavor(self.os_creds, flavor_settings)\n flavor = self.flavor_creator.create()\n self.assertTrue(validate_flavor(self.nova, flavor_settings, flavor))\n\n flavor_creator_2 = OpenStackFlavor(self.os_creds, flavor_settings)\n flavor2 = flavor_creator_2.create()\n\n self.assertEqual(flavor.id, flavor2.id)", "def create_profile(sender, **kwargs):\n user = kwargs[\"instance\"]\n if kwargs[\"created\"]:\n user_profile = Profile(user=user)\n user_profile.save()", "def create_profile(sender, **kw):\n user = kw['instance']\n if kw['created']:\n profile = UserProfile(user=user)\n profile.save()", "def create_user_profile(instance, created, **_):\n if created:\n Profile.objects.create(user=instance)", "def update_flavor_profile(request, **kwargs):\n data = request.DATA\n flavor_profile_id = data['flavor_profile']['id']\n\n conn = get_sdk_connection(request)\n flavor_profile = conn.load_balancer.update_flavor_profile(\n flavor_profile_id,\n name=data['flavor_profile'].get('name'),\n provider_name=data['flavor_profile'].get('provider_name'),\n flavor_data=data['flavor_profile'].get('flavor_data'),\n )\n\n return _get_sdk_object_dict(flavor_profile)", "def fusion_api_create_server_profile(self, body, api=None, headers=None, param=''):\n return self.profile.create(body, api, headers, param=param)", "async def test_create(self):\n expected = {\n 'id': 'id'\n }\n profile = {\n 'name': 'name',\n 'version': 4,\n }\n rsps = respx.post(f'{PROVISIONING_API_URL}/users/current/provisioning-profiles') \\\n .mock(return_value=Response(200, json=expected))\n id = await provisioning_client.create_provisioning_profile(profile)\n assert rsps.calls[0].request.url == f'{PROVISIONING_API_URL}/users/current/provisioning-profiles'\n assert rsps.calls[0].request.headers['auth-token'] == 'header.payload.sign'\n assert rsps.calls[0].request.content == json.dumps(profile).encode('utf-8')\n assert id == expected", "def create_user_profile(sender, instance, created, **kwargs):\n if created:\n # create new Stellar account\n stellar.api.create_account(user=instance)", "def post(self, request):\n kwargs = {\n 'flavor': request.DATA.get('flavor')\n }\n return create_flavor(request, **kwargs)", "def create_profile(sender, **kwargs):\n\n # I import profile here cause i can't import it right in the top.\n from .profiles import Profile\n\n user = kwargs['instance']\n\n Profile.objects.get_or_create(user=user)", "def create_profile(sender, instance, created, **kwargs):\n if created:\n profile, created = UserProfile.objects.get_or_create(user=instance)", "def create(profile, name):\n # Make sure it doesn't exist already.\n if exists(profile, name):\n msg = \"Instance profile '\" + str(name) + \"' already exists.\"\n raise ResourceAlreadyExists(msg)\n\n # Now we can create it.\n params = {}\n params[\"profile\"] = profile\n params[\"name\"] = name\n response = utils.do_request(instanceprofile, \"create\", params)\n\n # Check that it exists.\n instance_profile_data = polling_fetch(profile, name)\n if not instance_profile_data:\n msg = \"Instance profile '\" + str(name) + \"' not created.\"\n raise ResourceNotCreated(msg)\n\n # Send back the instance profile's info.\n return instance_profile_data", "def create_pootle_profile(sender, instance, **kwargs):\n try:\n profile = instance.get_profile()\n except PootleProfile.DoesNotExist:\n profile = PootleProfile(user=instance)\n profile.save()", "def test_create_flavor_all_settings(self):\n # Create Flavor\n if self.flavor_metadata:\n self.flavor_metadata.update(create_flavor.MEM_PAGE_SIZE_ANY)\n flavor_settings = openstack_tests.get_flavor_config(\n name=self.flavor_name, ram=1, disk=1, vcpus=1, ephemeral=2, swap=3,\n rxtx_factor=2.2, is_public=False,\n metadata=self.flavor_metadata)\n self.flavor_creator = OpenStackFlavor(self.os_creds, flavor_settings)\n flavor = self.flavor_creator.create()\n self.assertTrue(validate_flavor(self.nova, flavor_settings, flavor))\n\n # Delete Flavor\n nova_utils.delete_flavor(self.nova, flavor)\n self.assertIsNone(\n nova_utils.get_flavor_by_name(self.nova, flavor_settings.name))\n\n # Attempt to cleanup\n self.flavor_creator.clean()\n\n self.assertIsNone(self.flavor_creator.get_flavor())", "def create_profile(sender, instance, created, **kwargs):\n if created:\n Profile.objects.create(user=instance)", "def make_profile_for_user(sender, instance, **kwargs):\n if kwargs['created']:\n new_profile = ImagerProfile(user=instance)\n new_profile.save()", "def create_profile_for_new_users(sender, instance, created, **kwargs):\n if not created:\n return\n\n profile = Profile.objects.filter(user=instance).first()\n if profile is None:\n profile = Profile(user=instance)\n profile.save()", "def create_profile(self, user, *args, **kwargs):\n salt = hashlib.sha1(str(random.random())).hexdigest()[:5]\n activation_key = hashlib.sha1(salt + user.username).hexdigest()\n return self.create(user=user, activation_key=activation_key, **kwargs)", "def create_profile_for_new_user(sender, created, instance, **kwargs):\n if created:\n profile = self.get_model('profile')(user=instance)\n profile.save()", "def create_profile(sender, instance, created, **kwargs):\n if created: \n profile, new = UserProfile.objects.get_or_create(user=instance)" ]
[ "0.7588814", "0.7113851", "0.7016141", "0.6691254", "0.6609089", "0.6587327", "0.6524055", "0.65014654", "0.64622223", "0.6389659", "0.638112", "0.63053924", "0.62853515", "0.623517", "0.6190537", "0.6173143", "0.61469984", "0.61188585", "0.61023474", "0.6086395", "0.606686", "0.60383403", "0.60198927", "0.6017551", "0.6005865", "0.59947366", "0.5945505", "0.5944223", "0.59394026", "0.5932278" ]
0.81453323
0
Add a member to a pool.
def add_member(request, **kwargs): data = request.DATA members = data.get('members') pool_id = kwargs.get('pool_id') if kwargs.get('members_to_add'): members_to_add = kwargs['members_to_add'] index = [members.index(member) for member in members if member['id'] == members_to_add[0]][0] loadbalancer_id = data.get('loadbalancer_id') else: index = kwargs.get('index') loadbalancer_id = kwargs.get('loadbalancer_id') member = members[index] conn = get_sdk_connection(request) monitor_address = member.get('monitor_address') member = conn.load_balancer.create_member( pool_id, address=member['address'], protocol_port=member['protocol_port'], subnet_id=member['subnet_id'], weight=member.get('weight'), monitor_address=monitor_address if monitor_address else None, monitor_port=member.get('monitor_port'), admin_state_up=member.get('admin_state_up'), backup=member.get('backup', False), name=member.get('name'), ) index += 1 if kwargs.get('members_to_add'): args = (request, loadbalancer_id, update_member_list) members_to_add = kwargs['members_to_add'] members_to_add.pop(0) kwargs = {'callback_kwargs': { 'existing_members': kwargs.get('existing_members'), 'members_to_add': members_to_add, 'members_to_delete': kwargs.get('members_to_delete'), 'pool_id': pool_id}} thread.start_new_thread(poll_loadbalancer_status, args, kwargs) elif len(members) > index: args = (request, loadbalancer_id, add_member) kwargs = {'callback_kwargs': {'pool_id': pool_id, 'index': index}} thread.start_new_thread(poll_loadbalancer_status, args, kwargs) elif data.get('monitor'): args = (request, loadbalancer_id, create_health_monitor) kwargs = {'callback_kwargs': {'pool_id': pool_id}} thread.start_new_thread(poll_loadbalancer_status, args, kwargs) return _get_sdk_object_dict(member)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _add_pool ( self, pool ):\n self._pool_id += 1\n try:\n self._poolstack.append ( pool )\n except:\n self._pool_id -= 1\n raise\n\n self._update_resolver()", "def create_member(self, context, member):\n LOG.info(\"Received request 'Create Member' for Pool:%(pool_id)s \",\n {'pool_id': member['pool_id']})\n arg_dict = {'context': context,\n lb_const.MEMBER: member,\n }\n self._send_event(lb_const.EVENT_CREATE_MEMBER_V2, arg_dict,\n serialize=True,\n binding_key=member[lb_const.POOL]['loadbalancer_id'],\n key=member['id'])", "def add_member():\n client = RequestManager()\n client.set_method(\"POST\")\n client.set_endpoint(\"/accounts/{0}/memberships\".format(CONFIG_DATA['account_id']))\n body = {\"person_id\": CONFIG_DATA['member_id']}\n client.set_body(json.dumps(body))\n client.execute_request()", "def add_node(self, node):\n self._execution_pool[node.name] = node", "def put(self, request, pool_id):\n # Assemble the lists of member id's to add and remove, if any exist\n request_member_data = request.DATA.get('members', [])\n\n conn = get_sdk_connection(request)\n existing_members = _sdk_object_to_list(\n conn.load_balancer.members(pool_id))\n\n (members_to_add, members_to_delete) = get_members_to_add_remove(\n request_member_data, existing_members)\n\n if members_to_add or members_to_delete:\n kwargs = {'existing_members': existing_members,\n 'members_to_add': members_to_add,\n 'members_to_delete': members_to_delete,\n 'pool_id': pool_id}\n update_member_list(request, **kwargs)", "def addMember(self, member_name):\n connection = self.sock\n message = \"member_add\".encode()\n connection.send(message)\n status_code = connection.recv(2)\n\n if status_code != FAILURE:\n print(\"Error\")\n return False\n\n message = member_name.encode()\n connection.send(message)\n result = connection.recv(2)\n if result == SUCCESS:\n return True\n else:\n return False", "def add_member(self, member):\n self.members[member.name] = member\n self.relationships[member.name] = []", "def add_member(self, member, neighbors):\n self.members.append(member)\n \n # self.neighbors[member] = neighbors\n for n in neighbors:\n self.neighbors.append(n)\n self.calculate_a()", "def add_to_pool(self, data: str):\n self.pool.append(data)", "def add_member(self, persona):\n if persona not in self.members:\n self.members.append(persona)", "def add_member(group_name, member):\n query=\"INSERT INTO groupmembers(group_id, member)\\\n VALUES('{}','{}') RETURNING *;\".format(\n group_name, member\n )\n cur.execute(query)\n return cur.fetchone()", "def addpool(miner: Miner, pool):\n api = MinerApi(host=miner.ipaddress, port=int(miner.port))\n jaddpool = api.addpool(\"{0},{1},{2}\".format(pool.url, pool.user, \"x\"))\n return jaddpool[\"STATUS\"][0][\"Msg\"]", "def put(self, request, member_id, pool_id):\n data = request.DATA\n conn = get_sdk_connection(request)\n monitor_address = data.get('monitor_address')\n member = conn.load_balancer.update_member(\n member_id, pool_id, weight=data.get('weight'),\n monitor_address=monitor_address if monitor_address else None,\n monitor_port=data.get('monitor_port'),\n admin_state_up=data.get('admin_state_up'),\n backup=data.get('backup', False),\n name=data.get('name'),\n )\n return _get_sdk_object_dict(member)", "def addMember(self, *args):\n return _libsbml.Group_addMember(self, *args)", "def addMember(self, member):\n\t\tself.membersWithErrors.append([member, 0])", "def add_member(self, member_to_add):\r\n membtype = self.is_member_valid(member_to_add)\r\n if membtype > 0:\r\n self._members.append(member_to_add)\r\n if membtype == 1: # Numeric (int/float)\r\n self._length += 4\r\n elif membtype == 2: # String\r\n self._length += len(member_to_add) + 1 # Null-terminated\r\n else:\r\n raise TypeError(\"Tried to add an invalid piece of data!\")", "def add_member(self, member, dn=False):\n\n if dn:\n if self.check_member(member, dn=True):\n return\n mod = (ldap.MOD_ADD, 'member', member.encode('ascii'))\n else:\n if self.check_member(member):\n return\n mod = (ldap.MOD_ADD, 'member', member.get_dn().encode('ascii'))\n\n if self.__lib__.__batch_mods__:\n self.__lib__.enqueue_mod(self.__dn__, mod)\n elif not self.__lib__.__ro__:\n mod_attrs = [mod]\n self.__con__.modify_s(self.__dn__, mod_attrs)\n else:\n print(\"ADD VALUE member = {} FOR {}\".format(mod[2], self.__dn__))", "def add_to_pool(self):\n if self.check_pool():\n for func in self.getter._func:\n proxies = self.getter.get_proxies(func)\n for proxy in proxies:\n self.conn.push_to_right(proxy)\n else:\n print('Pool reached max capacity')", "def add(self, member, clone=True):\n if type(member)==Member:\n if clone:\n member = dict([(x,y) for x,y in member.getMember().items()])\n else:\n if self.head:\n self.tail.setLink(member)\n self.tail = member\n else:\n self.tail = member\n self.head = member \n if type(member)==dict:\n temp = Member()\n temp.setMember(member)\n member = temp\n if self.head:\n self.tail.setLink(member)\n self.tail = member\n else:\n self.tail = member\n self.head = member", "def add_peer(self, writer):\r\n address = self.get_address_string(writer)\r\n self.connection_pool[address] = writer\r\n logger.info(\"Added new peer to pool\", address=address)", "def addMember(self, *args):\n return _libsbml.ListOfMembers_addMember(self, *args)", "def register_member(self, address, uuid):\n\n final_address = self.parse_address(address)\n\n self.members.append({'uuid': uuid, 'address': final_address})", "def add(self, name, neighbours=None, weight=None):\n\n if type(neighbours) != list:\n if neighbours:\n raise TypeError(\"Wrong datatype for neighbours, input needs to be a list.\")\n if type(name) == int or type(name) == str or type(name) == float:\n pass\n else:\n raise TypeError(\"Wrong datatype for name. Only int, float and string accepted.\")\n\n if self._get(name):\n raise NameError(\"Member already exists!\")\n\n add_member = _Node(name, [], weight)\n self._members.append(add_member)\n self._size += 1\n # Makes sure to add the new member in the neighbours_list of each neighbour\n if neighbours:\n for neigh in neighbours:\n # Check if neighbour is in network first\n node_neigh = self._get(neigh)\n if node_neigh:\n add_member.neighbours.append(node_neigh) # neighbour node needs to be added to add_member\n try:\n node_neigh.neighbours.append(add_member) # add_member needs to be added to neighbour node\n # Possibly double of neigh after recursion\n except AttributeError: # Handles the case when neighbour exists, but it doesn't have neighbours\n node_neigh.neighbours = [add_member]\n elif node_neigh is None:\n self.add(neigh, [add_member.alias])\n self._updated = True", "def add_member(self, member_to_add):\r\n # Only add a member if it is a CHUNK or a FORM\r\n if self.is_member_valid(member_to_add):\r\n self._members.append(member_to_add)\r\n else:\r\n raise TypeError", "def add_member(self, peer_urls):\n return self._request_call(\n '/v2/members',\n method='post',\n json={\n 'peerURLs': peer_urls\n }\n )", "async def _add(self, ctx, points: int, *, name=None):\n server = ctx.message.server\n author = ctx.message.author\n names = None\n namesp = None\n if not self.permcheck(ctx):\n return\n if name is None:\n name = author\n elif \",\" in str(name):\n if \", \" in name:\n names = name.split(\", \")\n elif \",\" in name:\n names = name.split(\",\")\n namesp = names.copy()\n for i in range(len(names)):\n names[i] = discord.utils.find(\n lambda m: m.display_name == names[i], server.members)\n if names[i] is None:\n names[i] = discord.utils.find(\n lambda m: m.name == names[i], server.members)\n name = None\n else:\n namea = name[:]\n name = discord.utils.find(\n lambda m: m.display_name == name, server.members)\n if name is None:\n name = discord.utils.find(\n lambda m: m.name == name, server.members)\n if name is None:\n await self.bot.say(\"{} was not found, please check the spelling and also make \"\n \"sure that the member name being entered is a member in your Discord and \"\n \"that its the same as their Discord name / nickname.\".format(namea))\n return\n if server.id not in self.db:\n self.db[server.id] = {}\n if not name:\n counter = -1\n for x in names:\n counter += 1\n if x is None:\n await self.bot.say(\"{} was not found, please check the spelling and also make \"\n \"sure that the member name being entered is a member in your Discord and \"\n \"that its the same as their Discord name / nickname.\".format(namesp[counter]))\n await asyncio.sleep(1)\n continue\n elif x.id not in self.db[server.id]:\n await self.bot.say(\"{} was not found. Please add them first using points member add\"\n \" <discord name or Nickname>\".format(x.display_name))\n else:\n self.db[server.id][x.id][\"Lifetime Gain\"] += points\n self.db[server.id][x.id][\"Balance\"] += points\n await self.bot.say(\"{} points added for {}\".format(points, x.name))\n await asyncio.sleep(1)\n else:\n if name.id not in self.db[server.id]:\n await self.bot.say(\"{} is not in the list, please register first using points member add\"\n \" <Discord name or nickname>\".format(namea))\n return\n self.db[server.id][name.id][\"Lifetime Gain\"] += points\n self.db[server.id][name.id][\"Balance\"] += points\n await self.bot.say(\"{} points added for {}\".format(points, name.name))\n self.save_db()", "def add_member(self, user):\n if user is self.owner:\n raise ValidationError('A trip owner cannot also be a member.')\n # check the user is not already a member\n if self.members.filter(pk=user.pk).exists():\n return\n self.members.add(user)", "def l7pool_add(env, identifier, **args):\n\n mgr = SoftLayer.LoadBalancerManager(env.client)\n uuid, _ = mgr.get_lbaas_uuid_id(identifier)\n\n pool_main = {\n 'name': args.get('name'),\n 'loadBalancingAlgorithm': args.get('method'),\n 'protocol': args.get('protocol')\n }\n\n pool_members = list(args.get('server'))\n\n pool_health = {\n 'interval': args.get('healthinterval'),\n 'timeout': args.get('healthtimeout'),\n 'maxRetries': args.get('healthretry'),\n 'urlPath': args.get('healthpath')\n }\n\n pool_sticky = {\n 'type': args.get('sticky')\n }\n\n try:\n mgr.add_lb_l7_pool(uuid, pool_main, pool_members, pool_health, pool_sticky)\n click.secho(\"Success\", fg='green')\n except SoftLayerAPIError as exception:\n click.secho(f\"ERROR: {exception.faultString}\", fg='red')", "def add_member(self, request, pk):\n farm = self.get_object()\n user = request.data.get('user')\n farm.add_member(user)\n return Response({}, status=status.HTTP_202_ACCEPTED)", "def add_pool(name, **kwargs):\n _CONNECTIONS[name] = redis.StrictRedis(**kwargs)" ]
[ "0.7209677", "0.6895673", "0.689446", "0.67602986", "0.660132", "0.65374035", "0.648779", "0.64865744", "0.6474529", "0.64278823", "0.63709176", "0.635849", "0.6333273", "0.6296607", "0.6255828", "0.6253365", "0.62174135", "0.6187497", "0.61844695", "0.61722165", "0.61623985", "0.6119614", "0.60965884", "0.6094031", "0.6083802", "0.6010697", "0.5985343", "0.5964582", "0.59069055", "0.5877794" ]
0.73543555
0
Update a load balancer.
def update_loadbalancer(request, **kwargs): data = request.DATA loadbalancer_id = kwargs.get('loadbalancer_id') conn = get_sdk_connection(request) loadbalancer = conn.load_balancer.update_load_balancer( loadbalancer_id, name=data['loadbalancer'].get('name'), description=data['loadbalancer'].get('description'), admin_state_up=data['loadbalancer'].get('admin_state_up')) return _get_sdk_object_dict(loadbalancer)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def put(self, request, loadbalancer_id):\n kwargs = {'loadbalancer_id': loadbalancer_id}\n update_loadbalancer(request, **kwargs)", "def update_loadbalancer(self, context, lb, old):\n LOG.debug(\"\\nupdate_loadbalancer({}): called\".format(lb.id))\n hostnames = self._get_hostname(lb)\n # Update the TrafficIP group\n vapv = self._get_vapv(hostnames)\n # Update allowed_address_pairs\n if not old or lb.vip_address != old.vip_address:\n for hostname in hostnames:\n port_ids = self.openstack_connector.get_server_port_ids(\n hostname\n )\n self.openstack_connector.add_ip_to_ports(\n lb.vip_address, port_ids\n )\n # Update bandwidth allocation\n if old is not None and old.bandwidth != lb.bandwidth:\n self._update_instance_bandwidth(hostnames, lb.bandwidth)", "def update_loadbalancer(self, context, old_loadbalancer, loadbalancer):\n old_val, new_val = self.get_diff_of_dict(\n old_loadbalancer, loadbalancer)\n arg_dict = {'context': context,\n lb_const.OLD_LOADBALANCER: old_loadbalancer,\n lb_const.LOADBALANCER: loadbalancer,\n }\n LOG.info(\"Received request 'Update Loadbalancer' for LB:%(lb)s \"\n \"with new Param:%(new_val)s and old Param:%(old_val)s\",\n {'lb': loadbalancer['id'],\n 'new_val': new_val,\n 'old_val': old_val})\n self._send_event(lb_const.EVENT_UPDATE_LOADBALANCER_V2, arg_dict,\n serialize=True, binding_key=loadbalancer['id'],\n key=loadbalancer['id'])", "def update(self, name=None, labels=None):\n # type: (Optional[str], Optional[Dict[str, str]]) -> BoundLoadBalancer\n return self._client.update(self, name, labels)", "def _v1_0_11111_loadbalancers_3132(self, method, url, body, headers):\n if method == \"PUT\":\n json_body = json.loads(body)\n self.assertDictEqual(json_body, {\"name\": \"new_lb_name\"})\n return (httplib.ACCEPTED, \"\", {}, httplib.responses[httplib.ACCEPTED])\n elif method == \"GET\":\n response_body = json.loads(self.fixtures.load(\"v1_slug_loadbalancers_3xxx.json\"))\n response_body[\"loadBalancer\"][\"id\"] = 3132\n response_body[\"loadBalancer\"][\"name\"] = \"new_lb_name\"\n return (\n httplib.OK,\n json.dumps(response_body),\n {},\n httplib.responses[httplib.OK],\n )\n raise NotImplementedError", "def load_balancer_id(self, load_balancer_id):\n self._load_balancer_id = load_balancer_id", "def update_load_balancer(self,\n instance_id: str,\n dnszone_id: str,\n lb_id: str,\n *,\n name: str = None,\n description: str = None,\n enabled: bool = None,\n ttl: int = None,\n fallback_pool: str = None,\n default_pools: List[str] = None,\n az_pools: List['LoadBalancerAzPoolsItem'] = None,\n x_correlation_id: str = None,\n **kwargs\n ) -> DetailedResponse:\n\n if instance_id is None:\n raise ValueError('instance_id must be provided')\n if dnszone_id is None:\n raise ValueError('dnszone_id must be provided')\n if lb_id is None:\n raise ValueError('lb_id must be provided')\n if az_pools is not None:\n az_pools = [convert_model(x) for x in az_pools]\n headers = {\n 'X-Correlation-ID': x_correlation_id\n }\n sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,\n service_version='V1',\n operation_id='update_load_balancer')\n headers.update(sdk_headers)\n\n data = {\n 'name': name,\n 'description': description,\n 'enabled': enabled,\n 'ttl': ttl,\n 'fallback_pool': fallback_pool,\n 'default_pools': default_pools,\n 'az_pools': az_pools\n }\n data = {k: v for (k, v) in data.items() if v is not None}\n data = json.dumps(data)\n headers['content-type'] = 'application/json'\n\n if 'headers' in kwargs:\n headers.update(kwargs.get('headers'))\n headers['Accept'] = 'application/json'\n\n url = '/instances/{0}/dnszones/{1}/load_balancers/{2}'.format(\n *self.encode_path_vars(instance_id, dnszone_id, lb_id))\n request = self.prepare_request(method='PUT',\n url=url,\n headers=headers,\n data=data)\n\n response = self.send(request)\n return response", "def post_loadbalancer_member_update(self, resource_id, resource_dict):\n pass", "def _v1_0_11111_loadbalancers_3133(self, method, url, body, headers):\n if method == \"PUT\":\n json_body = json.loads(body)\n self.assertDictEqual(json_body, {\"algorithm\": \"ROUND_ROBIN\"})\n return (httplib.ACCEPTED, \"\", {}, httplib.responses[httplib.ACCEPTED])\n elif method == \"GET\":\n response_body = json.loads(self.fixtures.load(\"v1_slug_loadbalancers_3xxx.json\"))\n response_body[\"loadBalancer\"][\"id\"] = 3133\n response_body[\"loadBalancer\"][\"algorithm\"] = \"ROUND_ROBIN\"\n return (\n httplib.OK,\n json.dumps(response_body),\n {},\n httplib.responses[httplib.OK],\n )\n raise NotImplementedError", "def _v1_0_11111_loadbalancers_3137(self, method, url, body, headers):\n if method == \"PUT\":\n json_body = json.loads(body)\n self.assertDictEqual(json_body, {\"protocol\": \"IMAPv4\"})\n return (httplib.ACCEPTED, \"\", {}, httplib.responses[httplib.ACCEPTED])\n elif method == \"GET\":\n response_body = json.loads(self.fixtures.load(\"v1_slug_loadbalancers_3xxx.json\"))\n response_body[\"loadBalancer\"][\"id\"] = 3137\n response_body[\"loadBalancer\"][\"protocol\"] = \"IMAPv4\"\n return (\n httplib.OK,\n json.dumps(response_body),\n {},\n httplib.responses[httplib.OK],\n )\n raise NotImplementedError", "def update_metadata(self, loadbalancer, metadata):\n return loadbalancer.update_metadata(metadata)", "def pre_loadbalancer_member_update(self, resource_id, resource_dict):\n pass", "def _v1_0_11111_loadbalancers_3130(self, method, url, body, headers):\n if method == \"PUT\":\n json_body = json.loads(body)\n self.assertDictEqual(json_body, {\"protocol\": \"HTTPS\"})\n return (httplib.ACCEPTED, \"\", {}, httplib.responses[httplib.ACCEPTED])\n elif method == \"GET\":\n response_body = json.loads(self.fixtures.load(\"v1_slug_loadbalancers_3xxx.json\"))\n response_body[\"loadBalancer\"][\"id\"] = 3130\n response_body[\"loadBalancer\"][\"protocol\"] = \"HTTPS\"\n return (\n httplib.OK,\n json.dumps(response_body),\n {},\n httplib.responses[httplib.OK],\n )\n raise NotImplementedError", "def _v1_0_11111_loadbalancers_3135(self, method, url, body, headers):\n if method == \"PUT\":\n json_body = json.loads(body)\n self.assertDictEqual(json_body, {\"protocol\": \"IMAPv2\"})\n return (httplib.ACCEPTED, \"\", {}, httplib.responses[httplib.ACCEPTED])\n elif method == \"GET\":\n response_body = json.loads(self.fixtures.load(\"v1_slug_loadbalancers_3xxx.json\"))\n response_body[\"loadBalancer\"][\"id\"] = 3135\n response_body[\"loadBalancer\"][\"protocol\"] = \"IMAPv2\"\n return (\n httplib.OK,\n json.dumps(response_body),\n {},\n httplib.responses[httplib.OK],\n )\n raise NotImplementedError", "def _v1_0_11111_loadbalancers_3136(self, method, url, body, headers):\n if method == \"PUT\":\n json_body = json.loads(body)\n self.assertDictEqual(json_body, {\"protocol\": \"IMAPv3\"})\n return (httplib.ACCEPTED, \"\", {}, httplib.responses[httplib.ACCEPTED])\n elif method == \"GET\":\n response_body = json.loads(self.fixtures.load(\"v1_slug_loadbalancers_3xxx.json\"))\n response_body[\"loadBalancer\"][\"id\"] = 3136\n response_body[\"loadBalancer\"][\"protocol\"] = \"IMAPv3\"\n return (\n httplib.OK,\n json.dumps(response_body),\n {},\n httplib.responses[httplib.OK],\n )\n raise NotImplementedError", "def network_load_balancer_update(event, context):\n print(\"NLB update Time remaining (MS):\", context.get_remaining_time_in_millis()) \n logger.info('Running network load balancer update')\n fwcontext = lib.get_ssl_context()\n total_fw_az = len(fw_azs)\n\n\n #Search for COMMIT in firewall table\n try:\n response = lib.firewall_table_get_all_in_state(stackname, region, 'COMMIT')\n for fw in response['Items']:\n nlb_port_mask = []\n for i in range (0, (num_nlb_port)/64):\n nlb_port_mask.append(0)\n\n # Get firewall Availabilty Zone index\n fw_az_index = fw_azs.index(fw['AvailZone'])\n set_nat = True\n # Find all the nlb in commit state\n nlb_response=lib.nlb_table_get_all_in_state(stackname, region, 'COMMIT')\n \n for nlb in nlb_response['Items']:\n nlb_port = nlb['TCPPort']\n nlb_ip = nlb['NLBIp']\n rule_mask_index = int((nlb_port-start_nlb_port)/64)\n nlb_bit = int((nlb_port-start_nlb_port)%64)\n nlb_port_mask[rule_mask_index] |= 1<<nlb_bit\n fw_rule_mask = long(fw['NLBRuleMask'+str(rule_mask_index)], 0)\n \n # Skip if it's configured on firewall\n if fw_rule_mask & (1 << nlb_bit) != 0:\n continue\n nlb_az_index = nlb['AZIndex']\n total_nlb_azs = nlb['TotalAZ']\n # Skip if NLB and firewall availabilty zone doesn't match\n if nlb_az_index%total_fw_az != fw_az_index:\n continue\n\n if lib.config_firewall_add_nat_rule(fwcontext, fw['MgmtIP'], KeyPANWFirewall, fw['UntrustIP'], nlb_port, nlb_ip, True, trust_def_gw[fw_az_index], False) == False:\n logger.error('Config firewall NAT rule failed for instance %s, IP %s, NLB-Port %d', fw['InstanceID'], fw['MgmtIP'], nlb_port)\n set_nat = False\n break\n \n if set_nat == True:\n # Find all the nlb deleted\n for rule_mask_index,item in enumerate(nlb_port_mask):\n fw_rule_mask = long(fw['NLBRuleMask'+str(rule_mask_index)], 0)\n if item & fw_rule_mask != fw_rule_mask:\n #Found NLB entry has been deleted\n for bit in range(0,64):\n if (fw_rule_mask & 1<<bit) != 0 and (item & 1<<bit) == 0:\n nlb_port = rule_mask_index*64+bit+start_nlb_port\n if lib.config_firewall_delete_nat_rule(fwcontext, fw['MgmtIP'], KeyPANWFirewall, nlb_port, True, True) == False:\n logger.error('Delete firewall NAT rule failed for instance %s, IP %s, NLB-Port %d', fw['InstanceID'], fw['MgmtIP'], nlb_port)\n set_nat = False\n if lib.config_firewall_commit(fwcontext, fw['MgmtIP'], KeyPANWFirewall) == False:\n logger.error('Commit firewall configuration failed for instance %s, IP %s', fw['InstanceID'], fw['MgmtIP'])\n else:\n for mask in nlb_port_mask:\n print('port mask committed in COMMIT: {}'.format(mask))\n lib.firewall_table_update_rule_mask(stackname, region, fw['InstanceID'], nlb_port_mask)\n lib.firewall_table_update_state(stackname, region, fw['InstanceID'], 'READY')\n except Exception as e:\n logger.exception(\"Exception occurred while processing firewalls in commit: {}\".format(e))\n\n #Retrieve message from NLB queue\n pre_port = -1\n fw_update = False\n for read in xrange(0, 10):\n try:\n logger.info('Calling to retrieve message from NLB queue..: {}'.format(NetworkLoadBalancerQueue))\n message_data_str, ts, rh = lib.get_from_nlb_queue(NetworkLoadBalancerQueue, 10, 0)\n if not message_data_str:\n logger.info('No message to retrieve from NLB queue.')\n break \n else:\n #Delete message from NLB queue\n lib.delete_message_from_queue(NetworkLoadBalancerQueue, rh)\n message_data = json.loads(message_data_str)\n logger.info(\"Data from sqs: {}\".format(message_data_str))\n if 'MSG-TYPE' not in message_data or 'DNS-NAME' not in message_data:\n logger.error(\"Found invalid message in NetworkLoadBalancerQueue: {}\".format(message_data_str))\n continue\n nlb_type = message_data['MSG-TYPE']\n dns_name = message_data['DNS-NAME']\n if nlb_type == 'ADD-NLB':\n nlb_vpc = message_data['VPC-ID']\n nlb_name = message_data['NLB-NAME']\n dns_name = message_data['DNS-NAME']\n nlb_azs = message_data['AVAIL-ZONES']\n total_nlb_az = len(nlb_azs)\n nlb_port = lib.nlb_table_get_next_avail_port(stackname, region)\n for wait in xrange(0, 20):\n if pre_port == nlb_port and pre_port != 0:\n time.sleep(0.05)\n else:\n pre_port = nlb_port\n break\n if wait == 20:\n logger.error(\"Get next available port returns the same port %d, skip adding nlb %s\", nlb_port, nlb_name)\n continue\n else:\n logger.info(\"Wait for syncing dynamodb sleep count %d\", wait)\n \n if nlb_port == 0:\n logger.error(\"All ports number(%d-%d) has been used. Please deleting old network load balancer before adding more, skip adding nlb %s\", \n start_nlb_port, num_nlb_port+start_nlb_port-1, nlb_name)\n continue\n if total_nlb_az >= total_fw_az:\n for index,item in enumerate(nlb_azs):\n if 'NLB-IP' in item:\n nlb_ip = item['NLB-IP']\n else:\n logger.error(\"NLB IP is missing in ADD-NLB msg, ignore this entry\")\n continue\n nlb_subnet_id = item['SUBNET-ID']\n nlb_zone_name = item['ZONE-NAME']\n #Push NAT rules to all firewall in the same az \n if index > total_fw_az:\n continue\n \n response=lib.firewall_table_get_all_in_az_state(stackname, region, 'READY', fw_azs[index])\n for fw in response['Items']:\n fw_update = True\n if lib.config_firewall_add_nat_rule(fwcontext, fw['MgmtIP'], KeyPANWFirewall, fw['UntrustIP'], nlb_port, nlb_ip, True, trust_def_gw[index], False) == False:\n logger.error('Config firewall NAT rule failed for instance %s, ip %s, NLB-port %d', fw['InstanceID'], fw['MgmtIP'], nlb_port)\n lib.firewall_table_update_state(stackname, region, fw['InstanceID'], 'COMMIT')\n \n logger.info(\"Add NLB entry IP %s, Port %d in COMMIT state\", nlb_ip, nlb_port) \n lib.nlb_table_add_entry(stackname, region, nlb_ip, nlb_port, 'COMMIT', nlb_zone_name, nlb_subnet_id, total_nlb_az, index, dns_name, nlb_name)\n else:\n for index,item in enumerate(fw_azs):\n response=lib.firewall_table_get_all_in_az_state(stackname, region, 'READY', item)\n nlb_index = int(index%total_nlb_az)\n az = nlb_azs[nlb_index]\n nlb_ip = az['NLB-IP']\n nlb_subnet_id = az['SUBNET-ID']\n nlb_zone_name = az['ZONE-NAME']\n \n for fw in response['Items']:\n fw_update = True\n if lib.config_firewall_add_nat_rule(fwcontext, fw['MgmtIP'], KeyPANWFirewall, fw['UntrustIP'], nlb_port, nlb_ip, True, trust_def_gw[index], False) == False:\n logger.error('Config firewall NAT rule failed for instance %s, ip %s, NLB-port %d', fw['InstanceID'], fw['MgmtIP'], nlb_port)\n lib.firewall_table_update_state(stackname, region, fw['InstanceID'], 'COMMIT')\n \n if index < total_nlb_az:\n lib.nlb_table_add_entry(stackname, region, nlb_ip, nlb_port, 'COMMIT', nlb_zone_name, nlb_subnet_id, total_nlb_az, index, dns_name, nlb_name)\n elif nlb_type == 'DEL-NLB':\n #Deleting all entries belong to same DNSName\n\n print('Receive DEL-NLB msg from nlb queue')\n response = lib.nlb_table_get_entry_by_dnsname(stackname, region, dns_name)\n #Not found the NLB IP in the NLB table\n if response['Count'] == 0:\n logger.error(\"Receive NLB msg to delete non-existing NLB. DNS Name: %s\", dns_name)\n continue\n for nlb in response['Items']:\n nlb_port = nlb['TCPPort']\n nlb_ip = nlb['NLBIp']\n fw_response = lib.firewall_table_get_all_in_state(stackname, region, 'READY')\n \n for fw in fw_response['Items']:\n fw_az_index=fw_azs.index(fw['AvailZone'])\n nlb_az_index = fw_az_index%nlb['TotalAZ']\n # if NLB az index doens't mach firewall az index, skip\n if nlb['AZIndex'] != nlb_az_index:\n continue \n\n fw_update = True\n if lib.config_firewall_delete_nat_rule(fwcontext, fw['MgmtIP'], KeyPANWFirewall, nlb_port, True, False) == False:\n logger.error('Delete firewall NAT rule failed for instance %s, IP %s, NLB-Port %d', fw['InstanceID'], fw['MgmtIP'], nlb_port)\n lib.firewall_table_update_state(stackname, region, fw['InstanceID'], 'COMMIT')\n \n lib.nlb_table_delete_entry_by_dnsname(stackname, region, dns_name)\n \n else:\n logger.error('Receive invalid NLB message type for Network load balancer queue')\n\n except Exception as e:\n logger.exception(\"Exception occurred while retrieving data from NLB queue: {}\".format(e))\n \n # Perform commit once for all firewalls in READY state\n if fw_update == True:\n try:\n nlb_port_mask = []\n for i in range (0, (num_nlb_port)/64):\n nlb_port_mask.append(0)\n\n # Find all the nlb in commit state\n nlb_response=lib.nlb_table_get_all_in_state(stackname, region, 'COMMIT')\n print('nlb_response count: {}'.format(nlb_response['Count']))\n\n for nlb in nlb_response['Items']:\n nlb_port = nlb['TCPPort']\n nlb_ip = nlb['NLBIp']\n rule_mask_index = int((nlb_port-start_nlb_port)/64)\n nlb_bit = int((nlb_port-start_nlb_port)%64)\n nlb_port_mask[rule_mask_index] |= 1<<nlb_bit\n\n response=lib.firewall_table_get_all_in_state(stackname, region, 'READY')\n for fw in response['Items']:\n if lib.config_firewall_commit(fwcontext, fw['MgmtIP'], KeyPANWFirewall) == False:\n logger.error('Commit firewall configuration failed for instance %s, IP %s', fw['InstanceID'], fw['MgmtIP'])\n lib.firewall_table_update_state(stackname, region, fw['InstanceID'], 'COMMIT')\n else:\n for mask in nlb_port_mask:\n print('port mask commited in READY: {}'.format(mask))\n\n lib.firewall_table_update_rule_mask(stackname, region, fw['InstanceID'], nlb_port_mask)\n except Exception as e:\n logger.exception(\"Exception occurred while updating firewall rules: {}\".format(e))\n\n \n print(\"Time remaining return network_load_balancer_update (MS):\", context.get_remaining_time_in_millis())", "def _v1_0_11111_loadbalancers_3131(self, method, url, body, headers):\n if method == \"PUT\":\n json_body = json.loads(body)\n self.assertDictEqual(json_body, {\"port\": 1337})\n return (httplib.ACCEPTED, \"\", {}, httplib.responses[httplib.ACCEPTED])\n elif method == \"GET\":\n response_body = json.loads(self.fixtures.load(\"v1_slug_loadbalancers_3xxx.json\"))\n response_body[\"loadBalancer\"][\"id\"] = 3131\n response_body[\"loadBalancer\"][\"port\"] = 1337\n return (\n httplib.OK,\n json.dumps(response_body),\n {},\n httplib.responses[httplib.OK],\n )\n raise NotImplementedError", "def pre_loadbalancer_pool_update(self, resource_id, resource_dict):\n pass", "def update(self):\n\n self._state = get_balance(self.addresses)", "def process_load_balancer ( vpc_conn,\n ec2_conn,\n elb_conn,\n cloudwatch_conn,\n r53_conn,\n iam_conn,\n vpc,\n base_name,\n base_topicarn,\n app_name,\n params,\n aws_account_type,\n app_visibility = None,\n public_dns_cname = None,\n public_tcp_ports = [],\n app_tcp_ports = [],\n use_ssl = False,\n ssl_hostname = None\n ) :\n\n if not app_name :\n app_name = params[ 'app-name' ]\n\n if app_visibility == 'PUBLIC' :\n subnet_type = 'PRIVATE' # Public apps have app LB's that sit private. The PROXY LB is public.\n elif app_visibility == 'HBO' :\n subnet_type = 'PUBLIC' # HBO apps have app LB's that site public.\n elif app_visibility == 'PRIVATE' :\n subnet_type = 'PRIVATE'\n else :\n subnet_type = params[ 'subnet-type' ]\n\n if not public_dns_cname :\n public_dns_cname = params.get( 'public-dns-alias' )\n\n create = params.get( 'create', 'NO' )\n if create == 'YES':\n print \"Creating load balancer security group.\"\n lb_secgrp = find_secgrp(ec2_conn, get_lb_secgrp_name( base_name, app_name ))\n if not lb_secgrp :\n lb_secgrp = create_secgrp( ec2_conn,\n vpc,\n get_lb_secgrp_name( base_name, app_name ),\n 'Controls access to the ' + app_name + ' LB' )\n remove_all_rules( ec2_conn, [ lb_secgrp ] , deep=True, base_name=base_name)\n ## reload the security group after removing the rules\n lb_secgrp = find_secgrp(ec2_conn, get_lb_secgrp_name( base_name, app_name ))\n \n health_check_port = params.get( 'health-check-port', 8080 )\n health_check_url = params.get( 'health-check-url' )\n if not health_check_url :\n health_check_url = '/' + app_name + '/ping.html'\n\n ## Figure out if we need to find the SSL cert.\n ssl_cert_arn = None\n if use_ssl :\n cert = get_aws_ssl_certificate( iam_conn, ssl_cert_name )\n if cert :\n ssl_cert_arn = cert.arn\n else :\n print \"ERROR: Use SSL was specified, but could not find certificate matching host: \" + ssl_cert_name\n sys.exit( 5 )\n\n ## Generate the correct listener rules\n listeners = [ ( 80, 8080, 'http' ) ] # Default listener\n if params.get( 'listener-rules' ) :\n listeners = []\n for listener_rule in params[ 'listener-rules' ] :\n if params[ 'protocol' ] == 'https' :\n if not ssl_cert_arn :\n print \"ERRROR: https protocol specified, but use_ssl was NOT specified.\"\n sys.exit( 5 )\n listeners.append( ( params[ 'incoming-port' ],\n params[ 'outgoing-port' ],\n params[ 'protocol' ],\n ssl_cert_arn) )\n else :\n listeners.append( ( params[ 'incoming-port' ],\n params[ 'outgoing-port' ],\n params[ 'protocol' ] ) )\n ##\n ## FIX: There is a bug here where the public ports are supposed to be set on the proxy if\n ## app_visibility is PUBLIC. Don't have time to fix/regression test now...\n ##\n elif len( public_tcp_ports ) == len( app_tcp_ports ) and len( public_tcp_ports ) > 0 :\n listeners = []\n for public_port, app_port in zip( public_tcp_ports, app_tcp_ports ) :\n if public_port == 443 :\n if not ssl_cert_arn :\n print \"ERRROR: https protocol specified, but use_ssl was NOT specified.\"\n sys.exit( 5 )\n listeners.append( ( public_port, app_port, 'https', ssl_cert_arn ) )\n else :\n listeners.append( ( public_port, app_port, 'http' ) )\n\n\n print \"Creating load balancer.\"\n elb = create_elb( elb_conn,\n get_elb_name( base_name, app_name ),\n get_vpc_subnets( vpc_conn, vpc, subnet_type ),\n listeners,\n lb_secgrp,\n health_check_port,\n health_check_url,\n subnet_type == 'PUBLIC' )\n \n elb = find_elb(elb_conn, elb.name)\n \n if params.get( 'monitors' ) :\n add_monitors_to_elb( cloudwatch_conn, base_name, app_name, base_topicarn, params[ 'monitors' ] )\n\n if subnet_type == 'PUBLIC' :\n print \"Setting public DNS alias for load balancer.\"\n set_dns_cname( r53_conn, public_dns_cname, elb.dns_name )\n else :\n dns_alias = create_internal_elb_dns_name( base_name, app_name )\n print \"Configuring DNS name for load balancer: \" + dns_alias\n set_dns_cname( r53_conn, dns_alias, elb.dns_name )\n\n if app_visibility == 'HBO' :\n for port in public_tcp_ports :\n lb_secgrp.authorize( ip_protocol = \"tcp\",\n from_port = port,\n to_port = port,\n cidr_ip = hbo_cidr_list ) \n\n elif app_visibility == 'PUBLIC' :\n print \"Creating proxy load balancer.\"\n proxy_type = app_name + '-PX'\n proxy_secgrp = find_secgrp(ec2_conn, get_secgrp_name( base_name, proxy_type ))\n if not proxy_secgrp :\n proxy_secgrp = create_secgrp( ec2_conn,\n vpc,\n get_secgrp_name( base_name, proxy_type ),\n 'Controls access to the ' + proxy_type + ' servers.' )\n \n lb_proxy_secgrp = find_secgrp(ec2_conn, get_lb_secgrp_name( base_name, proxy_type ))\n \n if not lb_proxy_secgrp :\n lb_proxy_secgrp = create_secgrp( ec2_conn,\n vpc,\n get_lb_secgrp_name( base_name, proxy_type ),\n 'Controls access to the ' + proxy_type + ' load balancer.' )\n\n remove_all_rules( ec2_conn, [ lb_proxy_secgrp, proxy_secgrp ], deep=True, base_name=base_name) \n ## reload the security group after removing the rules\n lb_proxy_secgrp = find_secgrp(ec2_conn, get_lb_secgrp_name( base_name, proxy_type ))\n proxy_secgrp = find_secgrp(ec2_conn, get_secgrp_name( base_name, proxy_type ))\n\n \n ##\n ## FIX: In reality, we need to set the group rules between lb_proxy and proxy to match\n ## the listener ports that were passed in/configured.\n ##\n grant_ssh_access( ec2_conn, [ proxy_secgrp ], find_group( ec2_conn, base_name, 'NAT' ) )\n \n \n ## proxy server port is always 80\n ## updated by yliu, 2014/6/13\n ##if use_ssl :\n ## proxy_port = 443\n ##else :\n ## proxy_port = 80\n proxy_port = 80\n\n ## backend elb port that the proxy server passes request to \n if use_ssl :\n proxy_to_elb_port = 443\n else :\n proxy_to_elb_port = 80\n\n grant_grp_access( ec2_conn, [ lb_proxy_secgrp ], proxy_secgrp, proxy_port )\n grant_grp_access( ec2_conn, [ proxy_secgrp ], lb_secgrp, proxy_to_elb_port )\n for port in public_tcp_ports :\n lb_proxy_secgrp.authorize( ip_protocol = \"tcp\",\n from_port = port,\n to_port = port,\n cidr_ip = all_ip_cidr ) \n\n proxy_listeners = [ ( 80, 80, 'http' ) ]\n if use_ssl :\n proxy_listeners = [ ( 443, proxy_port, 'https', ssl_cert_arn ) ]\n\n proxy_elb = create_elb( elb_conn,\n get_elb_name( base_name, proxy_type ),\n get_vpc_subnets( vpc_conn, vpc, 'PUBLIC' ),\n proxy_listeners,\n lb_proxy_secgrp,\n proxy_port,\n '/robots.txt',\n True )\n add_monitors_to_elb( cloudwatch_conn, base_name, app_name, base_topicarn, proxy_lb_monitor_rules )\n\n if public_dns_cname :\n print \"Setting public DNS alias for load balancer.\"\n set_dns_cname( r53_conn, public_dns_cname, proxy_elb.dns_name )\n else :\n public_dns_cname = ''\n\n print \"Creating proxy instances.\"\n proxy_ami = get_ami_by_name( ec2_conn, proxy_ami_name )\n subnets = get_vpc_subnets( vpc_conn, vpc, 'PRIVATE' )\n\n ## direct proxy server to access backend elb over given protocol\n ## added by yliu, 2014/6/13\n if use_ssl :\n app_elb_protocol = 'https'\n else :\n app_elb_protocol = 'http'\n \n proxy_userdata = get_proxy_userdata( public_dns_cname, elb.dns_name, app_elb_protocol, app_name )\n proxy_instances = []\n \n proxy_keypair = get_keypair_name( aws_account_type, vpc.region.name, \"APACHE\" )\n \n for subnet in subnets : \n instance = launch_instance_vpc( ec2_conn,\n proxy_ami,\n base_name = base_name,\n instance_type = proxy_type,\n keypair = proxy_keypair,\n machine_type = 'm3.xlarge',\n security_group_id = proxy_secgrp.id ,\n subnet_id = subnet.id,\n user_data = proxy_userdata,\n public_ip = False )\n proxy_instances.append( instance )\n\n print \"Setting alarms on the proxy\"\n add_monitors_to_instance( cloudwatch_conn, base_name, instance.id, 'PROXY', base_topicarn, proxy_monitor_rules )\n \n proxy_instance_ids = [ i.id for i in proxy_instances ]\n\n print \"Waiting for proxy instances to be ready\"\n aws_waits( ec2_conn.get_only_instances, proxy_instance_ids )\n\n print \"Adding the new proxy instances into the load balancer.\"\n \n status = swap_elb_instances( elb_conn = elb_conn,\n elb = proxy_elb,\n new_instance_ids = proxy_instance_ids,\n terminate_old_instances = True,\n ec2_conn = ec2_conn,\n cloudwatch_conn = cloudwatch_conn,\n swap_smoothly = False )\n\n else :\n elb = find_elb( elb_conn, get_elb_name( base_name, app_name ) )\n print \"Processing load-balancer actions.\"\n for action_param in params.get( 'actions', [] ) :\n if action_param[ 'type' ] == 'RESTART_INSTANCES' :\n restart_elb_instances( ec2_conn, elb_conn, elb, params.get( 'restart-smoothly', 'YES' ) == 'YES' )\n\n lb_secgrp = find_group( ec2_conn, base_name, get_lb_secgrp_type( app_name ) )\n dns_alias = None\n\n return ( elb, lb_secgrp, dns_alias )", "def post_loadbalancer_pool_update(self, resource_id, resource_dict):\n pass", "def edit(env, identifier, listener, **args):\n\n mgr = SoftLayer.LoadBalancerManager(env.client)\n uuid, _ = mgr.get_lbaas_uuid_id(identifier)\n\n new_listener = {\n 'listenerUuid': listener\n }\n\n arg_to_option = {\n 'frontprotocol': 'frontendProtocol',\n 'backprotocol': 'backendProtocol',\n 'frontport': 'frontendPort',\n 'backport': 'backendPort',\n 'method': 'loadBalancingMethod',\n 'connections': 'maxConn',\n 'sticky': 'sessionType',\n 'clienttimeout': 'clientTimeout',\n 'sslcert': 'tlsCertificateId'\n }\n\n for key, value in args.items():\n if value:\n new_listener[arg_to_option[key]] = value\n\n try:\n mgr.add_lb_listener(uuid, new_listener)\n click.secho(\"Success\", fg='green')\n except SoftLayerAPIError as exception:\n click.secho(f\"ERROR: {exception.faultString}\", fg='red')", "def get_update_load_balancer_flow(self, topology):\n\n update_LB_flow = linear_flow.Flow(constants.UPDATE_LOADBALANCER_FLOW)\n update_LB_flow.add(lifecycle_tasks.LoadBalancerToErrorOnRevertTask(\n requires=constants.LOADBALANCER))\n update_LB_flow.add(vthunder_tasks.VthunderInstanceBusy(\n requires=a10constants.COMPUTE_BUSY))\n update_LB_flow.add(a10_database_tasks.GetVThunderByLoadBalancer(\n requires=constants.LOADBALANCER,\n provides=a10constants.VTHUNDER))\n if topology == constants.TOPOLOGY_ACTIVE_STANDBY:\n update_LB_flow.add(vthunder_tasks.GetMasterVThunder(\n name=a10constants.GET_MASTER_VTHUNDER,\n requires=a10constants.VTHUNDER,\n provides=a10constants.VTHUNDER))\n update_LB_flow.add(a10_database_tasks.MarkVThunderStatusInDB(\n requires=a10constants.VTHUNDER,\n inject={\"status\": constants.PENDING_UPDATE}))\n update_LB_flow.add(vthunder_tasks.SetupDeviceNetworkMap(\n requires=a10constants.VTHUNDER,\n provides=a10constants.VTHUNDER))\n update_LB_flow.add(network_tasks.ApplyQos(\n requires=(constants.LOADBALANCER, constants.UPDATE_DICT)))\n # update_LB_flow.add(amphora_driver_tasks.ListenersUpdate(\n # requires=[constants.LOADBALANCER, constants.LISTENERS]))\n # post_create_lb_flow.add(handle_vrid_for_loadbalancer_subflow())\n if CONF.a10_global.handle_vrid:\n update_LB_flow.add(self.handle_vrid_for_loadbalancer_subflow())\n update_LB_flow.add(database_tasks.GetAmphoraeFromLoadbalancer(\n requires=constants.LOADBALANCER_ID,\n provides=constants.AMPHORA))\n update_LB_flow.add(a10_database_tasks.GetLoadbalancersInProjectBySubnet(\n requires=[constants.SUBNET, a10constants.PARTITION_PROJECT_LIST],\n provides=a10constants.LOADBALANCERS_LIST))\n update_LB_flow.add(a10_database_tasks.CheckForL2DSRFlavor(\n rebind={a10constants.LB_RESOURCE: a10constants.LOADBALANCERS_LIST},\n provides=a10constants.L2DSR_FLAVOR))\n update_LB_flow.add(a10_database_tasks.CountMembersInProjectBySubnet(\n requires=[constants.SUBNET, a10constants.PARTITION_PROJECT_LIST],\n provides=a10constants.MEMBER_COUNT))\n update_LB_flow.add(vthunder_tasks.UpdateL2DSR(\n requires=(constants.SUBNET, constants.AMPHORA,\n a10constants.MEMBER_COUNT, a10constants.L2DSR_FLAVOR)))\n update_LB_flow.add(a10_database_tasks.GetFlavorData(\n rebind={a10constants.LB_RESOURCE: constants.LOADBALANCER},\n provides=constants.FLAVOR_DATA))\n update_LB_flow.add(virtual_server_tasks.UpdateVirtualServerTask(\n requires=(constants.LOADBALANCER, a10constants.VTHUNDER,\n constants.FLAVOR_DATA, constants.UPDATE_DICT)))\n update_LB_flow.add(database_tasks.UpdateLoadbalancerInDB(\n requires=[constants.LOADBALANCER, constants.UPDATE_DICT]))\n update_LB_flow.add(database_tasks.MarkLBActiveInDB(\n requires=constants.LOADBALANCER))\n update_LB_flow.add(vthunder_tasks.WriteMemory(\n requires=a10constants.VTHUNDER))\n update_LB_flow.add(a10_database_tasks.MarkVThunderStatusInDB(\n name=\"pending_update_to_active\",\n requires=a10constants.VTHUNDER,\n inject={\"status\": constants.ACTIVE}))\n update_LB_flow.add(a10_database_tasks.SetThunderUpdatedAt(\n requires=a10constants.VTHUNDER))\n return update_LB_flow", "def post_loadbalancer_healthmonitor_update(self, resource_id, resource_dict):\n pass", "def delete_balancer(ctx):\n if self.balancer_exists():\n self.delete_balancer()\n ctx.info('Successfully deleted load balancer {}:'.format(self.get_balancer_name()))\n else:\n ctx.info('Load balancer {} does not exist, nothing to delete.'.format(\n self.get_balancer_name()\n ))", "def get_update_rack_load_balancer_flow(self, vthunder_conf, device_dict, topology):\n\n update_LB_flow = linear_flow.Flow(constants.UPDATE_LOADBALANCER_FLOW)\n update_LB_flow.add(lifecycle_tasks.LoadBalancerToErrorOnRevertTask(\n requires=constants.LOADBALANCER))\n\n # device-name flavor support\n update_LB_flow.add(a10_database_tasks.GetFlavorData(\n rebind={a10constants.LB_RESOURCE: constants.LOADBALANCER},\n provides=constants.FLAVOR_DATA))\n update_LB_flow.add(vthunder_tasks.GetVthunderConfByFlavor(\n inject={a10constants.VTHUNDER_CONFIG: vthunder_conf,\n a10constants.DEVICE_CONFIG_DICT: device_dict},\n requires=(constants.LOADBALANCER, a10constants.VTHUNDER_CONFIG,\n a10constants.DEVICE_CONFIG_DICT, constants.FLAVOR_DATA),\n provides=(a10constants.VTHUNDER_CONFIG, a10constants.USE_DEVICE_FLAVOR)))\n\n update_LB_flow.add(a10_database_tasks.GetVThunderByLoadBalancer(\n requires=constants.LOADBALANCER,\n provides=a10constants.VTHUNDER))\n if topology == constants.TOPOLOGY_ACTIVE_STANDBY:\n update_LB_flow.add(vthunder_tasks.GetMasterVThunder(\n name=a10constants.GET_MASTER_VTHUNDER,\n requires=a10constants.VTHUNDER,\n provides=a10constants.VTHUNDER))\n update_LB_flow.add(a10_database_tasks.MarkVThunderStatusInDB(\n requires=a10constants.VTHUNDER,\n inject={\"status\": constants.PENDING_UPDATE}))\n update_LB_flow.add(vthunder_tasks.SetupDeviceNetworkMap(\n requires=a10constants.VTHUNDER,\n provides=a10constants.VTHUNDER))\n update_LB_flow.add(network_tasks.ApplyQos(\n requires=(constants.LOADBALANCER, constants.UPDATE_DICT)))\n if CONF.a10_global.handle_vrid:\n update_LB_flow.add(self.handle_vrid_for_loadbalancer_subflow())\n\n update_LB_flow.add(virtual_server_tasks.UpdateVirtualServerTask(\n requires=(constants.LOADBALANCER, a10constants.VTHUNDER,\n constants.FLAVOR_DATA, constants.UPDATE_DICT)))\n update_LB_flow.add(database_tasks.UpdateLoadbalancerInDB(\n requires=[constants.LOADBALANCER, constants.UPDATE_DICT]))\n if CONF.a10_global.network_type == 'vlan':\n update_LB_flow.add(vthunder_tasks.TagInterfaceForLB(\n requires=[constants.LOADBALANCER,\n a10constants.VTHUNDER]))\n update_LB_flow.add(database_tasks.MarkLBActiveInDB(\n requires=constants.LOADBALANCER))\n update_LB_flow.add(vthunder_tasks.WriteMemory(\n requires=a10constants.VTHUNDER))\n update_LB_flow.add(a10_database_tasks.MarkVThunderStatusInDB(\n name=\"pending_update_to_active\",\n requires=a10constants.VTHUNDER,\n inject={\"status\": constants.ACTIVE}))\n update_LB_flow.add(a10_database_tasks.SetThunderUpdatedAt(\n requires=a10constants.VTHUNDER))\n return update_LB_flow", "def change_type(self, load_balancer_type):\n # type: (Union[LoadBalancerType,BoundLoadBalancerType]) -> BoundAction\n return self._client.change_type(self, load_balancer_type)", "def put(self, request, member_id, pool_id):\n data = request.DATA\n conn = get_sdk_connection(request)\n monitor_address = data.get('monitor_address')\n member = conn.load_balancer.update_member(\n member_id, pool_id, weight=data.get('weight'),\n monitor_address=monitor_address if monitor_address else None,\n monitor_port=data.get('monitor_port'),\n admin_state_up=data.get('admin_state_up'),\n backup=data.get('backup', False),\n name=data.get('name'),\n )\n return _get_sdk_object_dict(member)", "def update(cls, client, resource) :\n\t\ttry :\n\t\t\tif type(resource) is not list :\n\t\t\t\tupdateresource = lbprofile()\n\t\t\t\tupdateresource.lbprofilename = resource.lbprofilename\n\t\t\t\tupdateresource.dbslb = resource.dbslb\n\t\t\t\tupdateresource.processlocal = resource.processlocal\n\t\t\t\tupdateresource.httponlycookieflag = resource.httponlycookieflag\n\t\t\t\tupdateresource.cookiepassphrase = resource.cookiepassphrase\n\t\t\t\tupdateresource.usesecuredpersistencecookie = resource.usesecuredpersistencecookie\n\t\t\t\tupdateresource.useencryptedpersistencecookie = resource.useencryptedpersistencecookie\n\t\t\t\treturn updateresource.update_resource(client)\n\t\t\telse :\n\t\t\t\tif (resource and len(resource) > 0) :\n\t\t\t\t\tupdateresources = [ lbprofile() for _ in range(len(resource))]\n\t\t\t\t\tfor i in range(len(resource)) :\n\t\t\t\t\t\tupdateresources[i].lbprofilename = resource[i].lbprofilename\n\t\t\t\t\t\tupdateresources[i].dbslb = resource[i].dbslb\n\t\t\t\t\t\tupdateresources[i].processlocal = resource[i].processlocal\n\t\t\t\t\t\tupdateresources[i].httponlycookieflag = resource[i].httponlycookieflag\n\t\t\t\t\t\tupdateresources[i].cookiepassphrase = resource[i].cookiepassphrase\n\t\t\t\t\t\tupdateresources[i].usesecuredpersistencecookie = resource[i].usesecuredpersistencecookie\n\t\t\t\t\t\tupdateresources[i].useencryptedpersistencecookie = resource[i].useencryptedpersistencecookie\n\t\t\t\tresult = cls.update_bulk_request(client, updateresources)\n\t\t\treturn result\n\t\texcept Exception as e :\n\t\t\traise e", "def balancer():\n pass" ]
[ "0.7603567", "0.75081575", "0.7132624", "0.6622582", "0.6482295", "0.5973013", "0.59142834", "0.5895011", "0.58677447", "0.58624214", "0.5861494", "0.58360505", "0.58234656", "0.5724865", "0.57089597", "0.5703134", "0.5689305", "0.56433725", "0.56244296", "0.5590786", "0.5572048", "0.5493524", "0.54502857", "0.5383067", "0.5370259", "0.53668994", "0.53437865", "0.53425485", "0.5332219", "0.53149384" ]
0.7929236
0
Update a l7 policy.
def update_l7_policy(request, **kwargs): data = request.DATA l7_policy_id = data['l7policy'].get('id') conn = get_sdk_connection(request) l7_policy = conn.load_balancer.update_l7_policy( action=data['l7policy']['action'], admin_state_up=data['l7policy'].get('admin_state_up'), description=data['l7policy'].get('description'), l7_policy=l7_policy_id, name=data['l7policy'].get('name'), position=data['l7policy'].get('position'), redirect_pool_id=data['l7policy'].get('redirect_pool_id'), redirect_url=data['l7policy'].get('redirect_url'), ) return _get_sdk_object_dict(l7_policy)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def put(self, request, l7_policy_id):\n kwargs = {'l7_policy_id': l7_policy_id}\n update_l7_policy(request, **kwargs)", "def put(self, request, l7_rule_id, l7_policy_id):\n kwargs = {'l7_rule_id': l7_rule_id, 'l7_policy_id': l7_policy_id}\n update_l7_rule(request, **kwargs)", "def update_policy(self, *args, **kwargs):\r\n pass", "def update_l7_rule(request, **kwargs):\n data = request.DATA\n l7_rule_id = data['l7rule'].get('id')\n\n conn = get_sdk_connection(request)\n l7_rule = conn.load_balancer.update_l7_rule(\n admin_state_up=data['l7rule'].get('admin_state_up'),\n compare_type=data['l7rule']['compare_type'],\n invert=data['l7rule'].get('invert'),\n key=data['l7rule'].get('key'),\n l7_policy=kwargs['l7_policy_id'],\n l7rule=l7_rule_id,\n type=data['l7rule']['type'],\n rule_value=data['l7rule']['rule_value'],\n )\n\n return _get_sdk_object_dict(l7_rule)", "def update_policy(self):\n pass", "def UpdatePolicy(self, request, global_params=None):\n config = self.GetMethodConfig('UpdatePolicy')\n return self._RunMethod(\n config, request, global_params=global_params)", "def update_Policy(self,inputpolicy):\n \n policyob = self.SD_Map.retrieve_ob(inputpolicy)\n policyob.values[-1] = self.PolicyDicts[inputpolicy][self.translate(self.policy_option_vars[inputpolicy].get(),\n input_language = self.language,\n output_language = 'english')]", "def policy_update_fn(self, data: Dict[str, Any], result: Dict[str, Any]) -> None:", "def update_firewall_policy(self, firewall_policy, body=None):\r\n return self.put(self.firewall_policy_path % (firewall_policy),\r\n body=body)", "def update_apic(self):\n return self.client.policy.update(policyList=self.policy_list.response)", "def test_update_firewall_policy(self):\r\n resource = 'firewall_policy'\r\n cmd = firewallpolicy.UpdateFirewallPolicy(test_cli20.MyApp(sys.stdout),\r\n None)\r\n self._test_update_resource(resource, cmd, 'myid',\r\n ['myid', '--name', 'newname'],\r\n {'name': 'newname', })", "def update_policy(ranger_url, policy_id, policy_data, admin_username_password):\n\n url = format(\"{ranger_url}/service/public/v2/api/policy/{policy_id}\")\n\n base_64_string = base64.encodestring(admin_username_password).replace('\\n', '')\n\n request = urllib2.Request(url, json.dumps(policy_data))\n request.get_method = lambda: 'PUT'\n request.add_header('Content-Type', 'application/json')\n request.add_header('Accept', 'application/json')\n request.add_header('Authorization', format('Basic {base_64_string}'))\n\n try:\n result = openurl(request, timeout=20)\n response_code = result.getcode()\n if response_code == 200:\n Logger.info(format(\"Successfully updated policy in Ranger Admin\"))\n return response_code\n else:\n Logger.error(format(\"Unable to update policy in Ranger Admin\"))\n return None\n except urllib2.HTTPError as e:\n raise Fail(\"HTTPError while updating policy Reason = \" + str(e.code))\n except urllib2.URLError as e:\n raise Fail(\"URLError while updating policy. Reason = \" + str(e.reason))\n except TimeoutError:\n raise Fail(\"Connection timeout error while updating policy\")\n except Exception as err:\n raise Fail(format(\"Error while updating policy. Reason = {err}\"))", "def update_policy(policy_id):\n old_policy = PolicyService.get_policy_by_id(policy_id)\n if old_policy is None:\n abort(404)\n new_policy = PolicyService.update_policy_by_id(policy_id, json_to_policy(request.json))\n if new_policy is None:\n abort(406)\n return new_policy.__dict__", "def test_update_ikepolicy(self):\r\n resource = 'ikepolicy'\r\n cmd = ikepolicy.UpdateIKEPolicy(test_cli20.MyApp(sys.stdout), None)\r\n self._test_update_resource(resource, cmd, 'myid',\r\n ['myid', '--name', 'newname'],\r\n {'name': 'newname', })", "def update_policy_profile(self, profile, body=None):\r\n return self.put(self.policy_profile_path % (profile), body=body)", "def Update(self,\n fp_id=None,\n only_generate_request=False,\n firewall_policy=None,\n batch_mode=False):\n\n if batch_mode:\n requests = [\n self._MakeUpdateRequestTuple(\n fp_id=fp_id, firewall_policy=firewall_policy)\n ]\n if not only_generate_request:\n return self._compute_client.MakeRequests(requests)\n return requests\n\n op_res = self._service.Patch(\n self._MakeUpdateRequestTuple(\n fp_id=fp_id, firewall_policy=firewall_policy)[2])\n return self.WaitOperation(\n op_res, message='Updating the organization firewall policy.')", "def rbac_policy_update(request, policy_id, **kwargs):\n body = {'rbac_policy': kwargs}\n rbac_policy = neutronclient(request).update_rbac_policy(\n policy_id, body=body).get('rbac_policy')\n return RBACPolicy(rbac_policy)", "def update(self,\n draft_id,\n policy_draft,\n ):\n return self._invoke('update',\n {\n 'draft_id': draft_id,\n 'policy_draft': policy_draft,\n })", "def device_update_policy(self, device_update_policy):\n\n self._device_update_policy = device_update_policy", "def update_policy(self):\n raise UnityTrainerException(\"The update_model method was not implemented.\")", "def Update(self,\n priority=None,\n firewall_policy=None,\n firewall_policy_rule=None,\n batch_mode=False,\n only_generate_request=False):\n\n if batch_mode:\n requests = [\n self._MakeUpdateRuleRequestTuple(\n priority=priority,\n firewall_policy=firewall_policy,\n firewall_policy_rule=firewall_policy_rule)\n ]\n if not only_generate_request:\n return self._compute_client.MakeRequests(requests)\n return requests\n\n op_res = self._service.PatchRule(\n self._MakeUpdateRuleRequestTuple(\n priority=priority,\n firewall_policy=firewall_policy,\n firewall_policy_rule=firewall_policy_rule)[2])\n return self.WaitOperation(\n op_res, message='Updating a rule in the organization firewall policy.')", "def setPolicy(self, value):\n return self._set(policy=value)", "def update_policy(self, policy, inverse_policy=None):\n self.make_T_policy_matrix(policy)\n self.inverse_dynamics_by_time = dict()\n self.policy = policy\n self.inverse_policy = inverse_policy", "def device_update_policy(self, device_ids, policy_id):\n return self._device_action(device_ids, \"UPDATE_POLICY\", {\"policy_id\": policy_id})", "def update_policy_values(policy_lookup, board, state, player, action, next_state, reward):\n\t\n\t# compute total expected reward including future rewards\n\tif board.check_end():\n\t\texpected = reward\n\telse:\n\t\tif player == 1:\n\t\t\texpected = reward + discount * min_value(policy_lookup, next_state, 2)\n\t\telif player == 2:\n\t\t\texpected = reward + discount * max_value(policy_lookup, next_state, 1)\n\t# get current policy action value\n\tpolicy_value = get_policy_value(policy_lookup, state, player, action)\n\t# update policy action value\n\tpolicy_lookup[(state, player)][action] += learning_rate * (expected - policy_value)", "def update_policy(self):\n self._sess.run(self._hard_copy_to_target_op);", "def modify_audit_policy(\n self,\n request: dds_20151201_models.ModifyAuditPolicyRequest,\n ) -> dds_20151201_models.ModifyAuditPolicyResponse:\n runtime = util_models.RuntimeOptions()\n return self.modify_audit_policy_with_options(request, runtime)", "def add_grading_policy(self, grading_policy):\r\n\r\n self.course.grading_policy = grading_policy\r\n store = editable_modulestore()\r\n store.update_item(self.course, '**replace_user**')\r\n self.refresh_course()", "def update(self,\n dns_forwarder_zone_id,\n policy_dns_forwarder_zone,\n ):\n return self._invoke('update',\n {\n 'dns_forwarder_zone_id': dns_forwarder_zone_id,\n 'policy_dns_forwarder_zone': policy_dns_forwarder_zone,\n })", "def test_update_hyperflex_ucsm_config_policy(self):\n pass" ]
[ "0.8157796", "0.7380047", "0.7321923", "0.72552663", "0.72434366", "0.6903672", "0.687366", "0.660481", "0.63835156", "0.6328471", "0.6318798", "0.6145343", "0.59935164", "0.5969864", "0.5870234", "0.5857865", "0.579704", "0.5761515", "0.57155144", "0.55789167", "0.5576889", "0.55711585", "0.55577207", "0.5552268", "0.55461323", "0.55092216", "0.543102", "0.54255897", "0.5423578", "0.54108787" ]
0.83306193
0
Update a l7 rule.
def update_l7_rule(request, **kwargs): data = request.DATA l7_rule_id = data['l7rule'].get('id') conn = get_sdk_connection(request) l7_rule = conn.load_balancer.update_l7_rule( admin_state_up=data['l7rule'].get('admin_state_up'), compare_type=data['l7rule']['compare_type'], invert=data['l7rule'].get('invert'), key=data['l7rule'].get('key'), l7_policy=kwargs['l7_policy_id'], l7rule=l7_rule_id, type=data['l7rule']['type'], rule_value=data['l7rule']['rule_value'], ) return _get_sdk_object_dict(l7_rule)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def put(self, request, l7_rule_id, l7_policy_id):\n kwargs = {'l7_rule_id': l7_rule_id, 'l7_policy_id': l7_policy_id}\n update_l7_rule(request, **kwargs)", "def update_l7_policy(request, **kwargs):\n data = request.DATA\n l7_policy_id = data['l7policy'].get('id')\n\n conn = get_sdk_connection(request)\n l7_policy = conn.load_balancer.update_l7_policy(\n action=data['l7policy']['action'],\n admin_state_up=data['l7policy'].get('admin_state_up'),\n description=data['l7policy'].get('description'),\n l7_policy=l7_policy_id,\n name=data['l7policy'].get('name'),\n position=data['l7policy'].get('position'),\n redirect_pool_id=data['l7policy'].get('redirect_pool_id'),\n redirect_url=data['l7policy'].get('redirect_url'),\n )\n\n return _get_sdk_object_dict(l7_policy)", "def update_rules():\n update_all_rules()\n return \"OK\"", "def create_l7_rule(request, **kwargs):\n data = request.DATA\n\n conn = get_sdk_connection(request)\n l7_rule = conn.load_balancer.create_l7_rule(\n admin_state_up=data['l7rule'].get('admin_state_up'),\n compare_type=data['l7rule']['compare_type'],\n invert=data['l7rule'].get('invert'),\n key=data['l7rule'].get('key'),\n l7_policy=kwargs['l7_policy_id'],\n type=data['l7rule']['type'],\n rule_value=data['l7rule']['rule_value'],\n )\n\n return _get_sdk_object_dict(l7_rule)", "def update_firewall_rule(self, firewall_rule, body=None):\r\n return self.put(self.firewall_rule_path % (firewall_rule), body=body)", "def put(self, request, l7_policy_id):\n kwargs = {'l7_policy_id': l7_policy_id}\n update_l7_policy(request, **kwargs)", "def test_update_firewall_rule(self):\r\n resource = 'firewall_rule'\r\n cmd = firewallrule.UpdateFirewallRule(test_cli20.MyApp(sys.stdout),\r\n None)\r\n self._test_update_resource(resource, cmd, 'myid',\r\n ['myid', '--name', 'newname'],\r\n {'name': 'newname', })", "def post(self, request, l7_policy_id):\n kwargs = {'l7_policy_id': l7_policy_id}\n return create_l7_rule(request, **kwargs)", "def apply_ruleset(self, ruleset):\n updates = [self._get_lexicon_update(ruleset['lexicon'])]\n updates += ruleset['rules']\n self.apply_updates(updates)", "def _UpdateAclRule(self, entry):\n\n print 'Update Acl rule: %s' % (entry.GetEditLink().href)\n roleValue = \"http://schemas.google.com/gCal/2005#%s\" % (\"read\")\n entry.role = gdata.acl.data.AclRole(value=roleValue)\n returned_rule = self.cal_client.Update(entry)", "def test_update_rule(self):\n pass", "def update_resolver_rule(ResolverRuleId=None, Config=None):\n pass", "def cloudflare_waf_firewall_rule_update_request(self, rule_id: str, filter_id: str, zone_id: str, action: str,\n description: str = None, products: List[str] = None, paused: bool = None,\n priority: int = None, ref: str = None) -> Dict[str, Any]:\n params = remove_empty_elements({\n 'id': rule_id,\n 'description': description,\n 'products': products,\n 'action': action,\n 'paused': paused,\n 'priority': priority,\n 'ref': ref,\n 'filter': {'id': filter_id}\n })\n\n return self._http_request(\n method='PUT',\n url_suffix=f'zones/{zone_id}/firewall/rules',\n json_data=[params])", "def set_rule(self, rule):\n self.rule.load_state_dict(rule, strict=True)", "def put(self, request, *args, **kwargs):\n try:\n new_rule = json.loads(request.body)\n except Exception as e:\n return error('unable to marshal json', str(e))\n try:\n validate_rule_json(new_rule)\n except RuleValidationException as e:\n return error('error validating json', str(e))\n rule = Rule()\n rule.populate(new_rule)\n rule.save()\n return success(rule.summary())", "def update_rules(self: object,\n body: dict,\n cs_username: str = None # pylint: disable=W0613 # cs_username is deprecated\n ) -> dict:\n # [PATCH] https://assets.falcon.crowdstrike.com/support/api/swagger.html#/custom-ioa/update-rules\n return process_service_request(\n calling_object=self,\n endpoints=Endpoints,\n operation_id=\"update_rules\",\n body=body\n )", "def edit_rule(self, value, new=False):\n\n if value >= 0 or new:\n if new:\n name = None\n rule = {}\n else:\n name = self.keys[value]\n rule = self.rules[value]\n text = '\"\"\"\\nIf you don\\'t need a setting, just leave it as None.\\n'\n text += 'When the rule is parsed, the default will be used.\\n'\n text += 'Each variable is evaluated separately, so you cannot substitute variables '\n text += 'in other variables.\\n\"\"\"\\n'\n text += '\\n# name (str): Rule name. Required.\\n'\n text += self.format_string('name', name)\n text += '\\n# find (str): Regular expression pattern or literal string.\\n'\n text += '# Use (?i) for case insensitive. Use (?s) for dotall.\\n'\n text += '# See https://docs.python.org/3.4/library/re.html for more info on regex flags.\\n'\n text += '# Required unless \"scope\" is defined.\\n'\n text += self.format_regex_string('find', rule.get('find'))\n text += '\\n# replace (str - default=r\\'\\\\g<0>\\'): Replace pattern.\\n'\n text += self.format_regex_string('replace', rule.get('replace'))\n text += '\\n# literal (bool - default=False): Preform a non-regex, literal search and replace.\\n'\n text += self.format_bool('literal', rule.get('literal'))\n text += '\\n# literal_ignorecase (bool - default=False): Ignore case when \"literal\" is true.\\n'\n text += self.format_bool('literal_ignorecase', rule.get('literal_ignorecase'))\n text += '\\n# scope (str): Scope to search for and to apply optional regex to.\\n'\n text += '# Required unless \"find\" is defined.\\n'\n text += self.format_string('scope', rule.get('scope'))\n text += '\\n# scope_filter ([str] - default=[]): An array of scope qualifiers for the match.\\n'\n text += '# Only used when \"scope\" is not defined.\\n'\n text += '#\\n'\n text += '# - Any instance of scope qualifies match: scope.name\\n'\n text += '# - Entire match of scope qualifies match: !scope.name\\n'\n text += '# - Any instance of scope disqualifies match: -scope.name\\n'\n text += '# - Entire match of scope disqualifies match: -!scope.name\\n'\n text += self.format_array('scope_filter', rule.get('scope_filter'))\n text += '\\n# greedy (bool - default=True): Apply action to all instances (find all).\\n'\n text += '# Used when \"find\" is defined.\\n'\n text += self.format_bool('greedy', rule.get('greedy'))\n text += '\\n# greedy_scope (bool - default=True): Find all the scopes specified by \"scope.\"\\n'\n text += self.format_bool('greedy_scope', rule.get('greedy_scope'))\n text += '\\n# format_replace (bool - default=False): Use format string style replace templates.\\n'\n text += '# Works only for Regex (with and without Backrefs) and Re (with Backrefs).\\n'\n text += '# See https://facelessuser.github.io/backrefs/usage/#format-replacements for more info.\\n'\n text += self.format_bool('format_replace', rule.get('format_replace'))\n text += '\\n# selection_inputs (bool -default=False): Use selection for inputs into find pattern.\\n'\n text += '# Global setting \"selection_only\" must be disabled for this to work.\\n'\n text += self.format_bool('selection_inputs', rule.get('selection_inputs'))\n text += '\\n# multi_pass (bool - default=False): Perform multiple sweeps on the scope region to find\\n'\n text += '# and replace all instances of the regex when regex cannot be formatted to find\\n'\n text += '# all instances. Since a replace can change a scope, this can be useful.\\n'\n text += self.format_bool('multi_pass', rule.get('multi_pass'))\n text += '\\n# plugin (str): Define replace plugin for more advanced replace logic.\\n'\n text += self.format_string('plugin', rule.get('plugin'))\n text += '\\n# args (dict): Arguments for \\'plugin\\'.\\n'\n text += self.format_dict('args', rule.get('args'))\n text += '\\n# ----------------------------------------------------------------------------------------\\n'\n text += '# test: Here you can setup a test command. This is not saved and is just used for this session.\\n'\n text += '# - replacements ([str]): A list of regex rules to sequence together.\\n'\n text += '# - find_only (bool): Highlight current find results and prompt for action.\\n'\n text += '# - action (str): Apply the given action (fold|unfold|mark|unmark|select).\\n'\n text += '# This overrides the default replace action.\\n'\n text += '# - options (dict): optional parameters for actions (see documentation for more info).\\n'\n text += '# - key (str): Unique name for highlighted region.\\n'\n text += '# - scope (str - default=\"invalid\"): Scope name to use as the color.\\n'\n text += '# - style (str - default=\"outline\"): Highlight style (solid|underline|outline).\\n'\n text += '# - multi_pass (bool): Repeatedly sweep with sequence to find all instances.\\n'\n text += '# - no_selection (bool): Overrides the \"selection_only\" setting and forces no selections.\\n'\n text += '# - regex_full_file_with_selections (bool): Apply regex search to full file then apply\\n'\n text += '# action to results under selections.\\n'\n text += textwrap.dedent(\n \"\"\"\\\n test = {\n \"replacements\": [%s],\n \"find_only\": True,\n \"action\": None,\n \"options\": {},\n \"multi_pass\": False,\n \"no_selection\": False,\n \"regex_full_file_with_selections\": False\n }\n \"\"\" % (self.simple_format_string(name) if name is not None else '')\n )\n\n replace_view = self.window.create_output_panel('reg_replace')\n replace_view.run_command('reg_replace_panel_insert', {'text': text})\n for ext in ST_LANGUAGES:\n highlighter = sublime.load_settings(\n 'reg_replace.sublime-settings'\n ).get('python_highlighter', 'Python/Python')\n highlighter = 'Packages/' + highlighter + ext\n try:\n sublime.load_resource(highlighter)\n replace_view.set_syntax_file(highlighter)\n break\n except Exception:\n pass\n replace_view.settings().set('gutter', True)\n replace_view.settings().set('line_numbers', True)\n replace_view.settings().set('reg_replace.edit_view', True)\n replace_view.settings().set('bracket_highlighter.bracket_string_escape_mode', 'regex')\n replace_view.settings().set('regreplace.name', name)\n replace_view.sel().clear()\n replace_view.sel().add(sublime.Region(0, 0))\n self.window.run_command(\"show_panel\", {\"panel\": \"output.reg_replace\"})\n sublime.set_timeout(lambda w=self.window, v=replace_view: w.focus_view(v), 100)", "def cloudflare_waf_firewall_rule_update_command(client: Client, args: Dict[str, Any]) -> CommandResults:\n rule_id = args['id']\n zone_id = args.get('zone_id', client.zone_id)\n action = args.get('action')\n filter_id = args.get('filter_id')\n products = args.get('products')\n description = args.get('description')\n paused = arg_to_boolean(args.get('paused')) # type: ignore\n priority = arg_to_number(args.get('priority'))\n ref = args.get('ref')\n\n response = client.cloudflare_waf_firewall_rule_update_request(\n rule_id, filter_id, zone_id, action, description=description, # type: ignore\n products=products, paused=paused, priority=priority, ref=ref)\n\n output = response['result']\n\n return CommandResults(\n readable_output=f'Firewall rule {rule_id} was successfully updated.',\n outputs_prefix='CloudflareWAF.FirewallRule',\n outputs_key_field='id',\n outputs=output,\n raw_response=response\n )", "def edit_rules():\n my_rules = rules.get_all_rules()\n my_rules.append(DEFAULT_RULE)\n\n selected_rule_id = select(\n label=\"Existing rules\",\n options=[{\"label\": rule[\"name\"], \"value\": rule[\"id\"]} for rule in my_rules],\n )\n # Rules have unique IDs from the database:\n logging.info(f\"selected_rule: {selected_rule_id}\")\n use_rule = [r for r in my_rules if r[\"id\"] == int(selected_rule_id)][0]\n updated_rule = input_group(\n \"Rule editing\",\n [\n input(\n \"name\", type=TEXT, name=\"name\", value=use_rule[\"name\"], required=True\n ), # Need ttextarea(\n textarea(\n \"Rule names\",\n name=\"rule\",\n rows=10,\n code={\n \"mode\": \"python\", # code language\n \"theme\": \"darcula\", # Codemirror theme. Visit https://codemirror.net/demo/theme.html#cobalt to get more themes\n },\n value=f\"\"\"{use_rule['rule']}\\n\"\"\",\n ),\n actions(\n \"actions\",\n [\n # {\"label\": \"test\", \"value\": \"test\"},\n {\"label\": \"save\", \"value\": \"save\"},\n ],\n name=\"action\",\n help_text=\"Save\",\n ),\n ],\n )\n if updated_rule is not None:\n rl = dict(updated_rule)\n if rl[\"action\"] == \"save\":\n rule_info = rules.save_rule(\n rl[\"name\"], rl[\"rule\"], selected_rule_id\n )\n put_row(put_text(\"Rule\"))\n put_row(put_code(pprint.pformat(rule_info, indent=1)))\n # Use webhook_info's ID to add/update the extractor\n\n put_text(f\"The rule added is: {updated_rule}\")", "def addRule(self, ruleLine):\n cols = ruleLine.split(' ')\n positionNumber = int(cols[0])\n self._rules[positionNumber] = {}\n for i in range(1, len(cols)):\n self._rules[positionNumber][cols[i].upper()] = 1", "def ModifyRuleAttribute(self, request):\n try:\n params = request._serialize()\n headers = request.headers\n body = self.call(\"ModifyRuleAttribute\", params, headers=headers)\n response = json.loads(body)\n model = models.ModifyRuleAttributeResponse()\n model._deserialize(response[\"Response\"])\n return model\n except Exception as e:\n if isinstance(e, TencentCloudSDKException):\n raise\n else:\n raise TencentCloudSDKException(type(e).__name__, str(e))", "def Update(self,\n priority=None,\n firewall_policy=None,\n firewall_policy_rule=None,\n batch_mode=False,\n only_generate_request=False):\n\n if batch_mode:\n requests = [\n self._MakeUpdateRuleRequestTuple(\n priority=priority,\n firewall_policy=firewall_policy,\n firewall_policy_rule=firewall_policy_rule)\n ]\n if not only_generate_request:\n return self._compute_client.MakeRequests(requests)\n return requests\n\n op_res = self._service.PatchRule(\n self._MakeUpdateRuleRequestTuple(\n priority=priority,\n firewall_policy=firewall_policy,\n firewall_policy_rule=firewall_policy_rule)[2])\n return self.WaitOperation(\n op_res, message='Updating a rule in the organization firewall policy.')", "def set_rule_applied(self, rule_applied):\n self.rule_applied = set_rule_applied", "def edit_rule(self, rule_number, rule):\n\n\t\tif self._mode == Mode.PassThrough:\n\t\t\traise ValueError(\"Can't edit rules while in passthrough mode\")\n\n\t\tif self._mode == Mode.BlackList:\n\t\t\tif len(self._blacklist_rules) - 1 < rule_number:\n\t\t\t\traise ValueError('Rule not found in rules list')\n\t\t\told_rule = self._blacklist_rules.pop(rule_number)\n\t\t\tself._blacklist_rules.append(rule)\n\t\t\tself._log.info('Replaced rule from the blacklist rules set: \\n old: %s\\n new: %s' % (old_rule, rule))\n\n\t\tif self._mode == Mode.WhiteList:\n\t\t\tif len(self._whitelist_rules) - 1 < rule_number:\n\t\t\t\traise ValueError('Rule not found in rules list')\n\t\t\told_rule = self._whitelist_rules.pop(rule_number)\n\t\t\tself._whitelist_rules.append(rule)\n\t\t\tself._log.info('Replaced rule from the whitelist rules set: \\n old: %s\\n new: %s' % (old_rule, rule))\n\n\t\tself._dump_configuration()\n\t\tself._remove_all_flow_records()\n\t\treturn old_rule", "def edit_ongoing_rule():\n rules = request.json['rules']\n now = datetime.datetime.now()\n\n for rule in rules:\n rule['line_id'] = int(rule['line_id'])\n rule['time'] = convert_to_datetime(rule['time'])\n rule['intervals'] = int(rule['intervals'])\n rule['time_wait'] = int(rule['time_wait'])\n rule['repeat_value'] = int(rule['repeat_value'])\n rule['date_start'] = convert_to_datetime(rule['date_start'])\n rule['time_start'] = convert_to_datetime(rule['time_start'])\n rule['date_time_start'] = datetime.datetime.combine(\n rule['date_start'], rule['time_start'].time())\n rule['end_date'] = convert_to_datetime(rule['end_date'])\n rule['rule_id'] = rule['rule_id']\n rule['days'] = -1\n\n if rule['date_start'].date() == rule['end_date'].date():\n date_delta = rule['end_date'].date() - now.date()\n if date_delta.days == 0:\n rule['days'] = 0\n if date_delta.days == 1:\n rule['days'] = 1\n\n # \"UPDATE ongoing_rules\n # SET line_id = {0}, time = {1}, intervals = {2}, time_wait = {3}, repeat_value={4}, date_time_start='{5}'\"\n # end_date = '{6}' WHERE rule_id = '{7}'\"\n database.update(database.QUERY[mn() + '_ongoing'].format(\n rule['line_id'], rule['time'], rule['intervals'], rule['time_wait'],\n rule['repeat_value'], rule['date_time_start'],\n rule['end_date'], rule['rule_id']))\n\n # update rules;\n update_rules_from_ongoing_rules(rule)\n # update_all_rules()\n logging.info(\"Ongoing rule modified. {0}\".format(str(rule)))\n\n send_ongoing_rule_message('edit_ongoing_rule', rule)\n\n return json.dumps({'status': 'OK'})", "def add_rule(self, rule, on=None, off=None, strength=1.):\n\n self.x[on:off, :, get_rule_index(rule, self.config)] = strength", "def update(self,\n section_id,\n rule_id,\n service_insertion_rule,\n ):\n return self._invoke('update',\n {\n 'section_id': section_id,\n 'rule_id': rule_id,\n 'service_insertion_rule': service_insertion_rule,\n })", "def set(self, subrule):\n self.__rule = subrule", "def update(self, subverbify, old_short_name, short_name, description,\n kind=None):\n rules = self._cf.get(subverbify._id36)\n if old_short_name != short_name:\n old_rule = rules.get(old_short_name, None)\n self._remove(subverbify._id36, [old_short_name])\n else:\n old_rule = rules.get(short_name, None)\n if not old_rule:\n return False\n\n old_rule = json.loads(old_rule)\n if not old_rule.get(\"created_utc\"):\n old_rule[\"created_utc\"] = time.mktime(\n datetime.strptime(\n old_rule.pop(\"when\")[:-6], \"%Y-%m-%d %H:%M:%S.%f\"\n ).timetuple())\n\n blob = self.get_rule_blob(\n short_name=short_name,\n description=description,\n priority=old_rule[\"priority\"],\n kind=kind,\n created_utc=old_rule[\"created_utc\"],\n )\n self._set_values(subverbify._id36, blob)", "def add_rule(self, rule: interpreter.Rule) -> None:\n\n if rule.target not in self.rules:\n self.rules[rule.target] = rule\n else:\n self.rules[rule.target] |= rule" ]
[ "0.7299374", "0.6614859", "0.65223616", "0.58992857", "0.57806516", "0.57245296", "0.56335515", "0.56320626", "0.56009996", "0.5545236", "0.55088514", "0.54439205", "0.54374593", "0.54025745", "0.53321713", "0.5291471", "0.52768487", "0.5186999", "0.51278824", "0.51227224", "0.5089676", "0.5083065", "0.5065541", "0.5041", "0.50134814", "0.50032586", "0.4992372", "0.4974114", "0.49575883", "0.49408767" ]
0.8170401
0
Update a flavor profile.
def update_flavor_profile(request, **kwargs): data = request.DATA flavor_profile_id = data['flavor_profile']['id'] conn = get_sdk_connection(request) flavor_profile = conn.load_balancer.update_flavor_profile( flavor_profile_id, name=data['flavor_profile'].get('name'), provider_name=data['flavor_profile'].get('provider_name'), flavor_data=data['flavor_profile'].get('flavor_data'), ) return _get_sdk_object_dict(flavor_profile)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def put(self, request, flavor_profile_id):\n update_flavor_profile(request)", "def update_flavor(cls, flavor_uuid, values):\n return cls.dbdriver.update_flavor(flavor_uuid, values)", "def update(self, profile: Dict[datetime.time, float]) -> None:\n\n if self._profile is None:\n self._profile = profile\n else:\n self._profile.update(profile)", "def update_policy_profile(self, profile, body=None):\r\n return self.put(self.policy_profile_path % (profile), body=body)", "def update_profile():\n logger.debug(\"entering function update_profile\")\n response = update_user_profile(request.json)\n logger.debug(\"exiting function update_profile\")\n return jsonify(response)", "def fusion_api_edit_server_profile(self, body, uri, api=None, headers=None, param=''):\n return self.profile.update(body, uri, api, headers, param=param)", "def test_update_profile(self):\n self.cim.update_profile(\n customer_id=u\"222\",\n description=u\"Foo bar baz quz\",\n email=u\"[email protected]\",\n customer_profile_id=u\"122\"\n )", "def update_network_profile(self, profile, body=None):\r\n return self.put(self.network_profile_path % (profile), body=body)", "def update(self,\n ipfix_dfw_profile_id,\n i_pfixdfw_profile,\n ):\n return self._invoke('update',\n {\n 'ipfix_dfw_profile_id': ipfix_dfw_profile_id,\n 'i_pfixdfw_profile': i_pfixdfw_profile,\n })", "def putProfile(profileType,value):\n # PUT /profile/$profileType\n pass", "def fusion_api_update_hypervisor_host_profile(self, uri=None, body=None, api=None, headers=None):\n return self.host_profile.update(body, uri, api, headers)", "def fusion_api_edit_server_profile_template(self, body, uri, api=None, headers=None):\n return self.profile_template.update(body, uri, api, headers)", "async def test_update(self):\n rsps = respx.put(f'{PROVISIONING_API_URL}/users/current/provisioning-profiles/id') \\\n .mock(return_value=Response(200))\n await provisioning_client.update_provisioning_profile('id', {'name': 'new name'})\n assert rsps.calls[0].request.url == \\\n f'{PROVISIONING_API_URL}/users/current/provisioning-profiles/id'\n assert rsps.calls[0].request.headers['auth-token'] == 'header.payload.sign'\n assert rsps.calls[0].request.content == json.dumps({'name': 'new name'}).encode('utf-8')", "def post(self, request):\n kwargs = {\n 'flavor_profile': request.DATA.get('flavor_profile')\n }\n return create_flavor_profile(request, **kwargs)", "def update(self,\n ike_profile_id,\n ip_sec_vpn_ike_profile,\n ):\n return self._invoke('update',\n {\n 'ike_profile_id': ike_profile_id,\n 'ip_sec_vpn_ike_profile': ip_sec_vpn_ike_profile,\n })", "def update(self,\n dpd_profile_id,\n ip_sec_vpn_dpd_profile,\n ):\n return self._invoke('update',\n {\n 'dpd_profile_id': dpd_profile_id,\n 'ip_sec_vpn_dpd_profile': ip_sec_vpn_dpd_profile,\n })", "def update(self, **kwargs: Any):\n if not kwargs:\n return False\n for key, value in kwargs.items():\n if key.lower() == _PROFILE.lower():\n self._set_profile(value)\n else:\n try:\n self._config_parser.set(self.profile, key, str(value))\n except NoSectionError:\n # Create and set default profile if it does not exist in .bonsaiconfig\n self._set_profile(self.profile)\n self._config_parser.set(self.profile, key, str(value))\n\n if not self._write_dot_bonsaiconfig():\n return False\n\n self._parse_config(self.profile)\n\n return True", "def update(self,\n tunnel_profile_id,\n ip_sec_vpn_tunnel_profile,\n ):\n return self._invoke('update',\n {\n 'tunnel_profile_id': tunnel_profile_id,\n 'ip_sec_vpn_tunnel_profile': ip_sec_vpn_tunnel_profile,\n })", "def update_profile(self, channels=None): # pragma: no cover\n pass", "def fusion_api_patch_server_profile(self, body, uri, api=None, headers=None):\n return self.profile.patch(body, uri, api, headers)", "def update_server_profile_firmware(*profile_obj):\n logger._log_to_console_and_log_file(\"Update firmware for Server Profiles\")\n\n if isinstance(profile_obj, test_data.DataObj):\n profile_obj = [profile_obj]\n elif isinstance(profile_obj, tuple):\n profile_obj = list(profile_obj[0])\n\n for profile in profile_obj:\n selenium2lib = ui_lib.get_s2l()\n if not selenium2lib._is_element_present(FusionServerProfilesPage.ID_PAGE_LABEL):\n navigate()\n profile_list = [el.text for el in selenium2lib._element_find(FusionServerProfilesPage.ID_PROFILE_LIST_NAMES, False, False)]\n if profile.name not in profile_list:\n logger._warn(\"Profile '%s' does not exist\" % profile.name)\n continue\n # Select & Edit Server Profile\n ui_lib.wait_for_element_and_click(FusionServerProfilesPage.ID_ELEMENT_PROFILE_NAME_BASE % profile.name)\n ui_lib.wait_for_element_visible(FusionServerProfilesPage.ID_MENU_MAIN_ACTION)\n ui_lib.wait_for_element_and_click(FusionServerProfilesPage.ID_MENU_MAIN_ACTION)\n ui_lib.wait_for_element_visible(FusionServerProfilesPage.ID_MENU_ACTION_EDIT)\n ui_lib.wait_for_element_and_click(FusionServerProfilesPage.ID_MENU_ACTION_EDIT)\n\n # Adding firmware baseline\n if profile.has_property(\"manageFirmware\") and profile.manageFirmware == \"true\":\n logger._log_to_console_and_log_file(\"Selecting firmware baseline..\")\n ui_lib.wait_for_element_and_click(FusionServerProfilesPage.ID_DROPDOWN_BTN_FIRMWARE_BASELINE)\n ui_lib.wait_for_element_visible(FusionServerProfilesPage.ID_COMBO_FIRMWARE_BASELINE_LIST % profile.spp)\n ui_lib.wait_for_element_and_click(FusionServerProfilesPage.ID_COMBO_FIRMWARE_BASELINE_LIST % profile.spp)\n ui_lib.wait_for_element_visible(FusionServerProfilesPage.ID_DROPDOWN_FIRMWARE_BASELINE)\n selectedFW = selenium2lib.get_text(FusionServerProfilesPage.ID_DROPDOWN_FIRMWARE_BASELINE)\n logger._log_to_console_and_log_file(\"Selected firmware is %s \" % selectedFW)\n if not selectedFW == profile.spp:\n logger._warn(\"Failed to select preferred firmware bundle..'\" + profile.spp + \"' at the edit page\")\n continue\n ui_lib.wait_for_element_visible(FusionServerProfilesPage.ID_BTN_CONFIRM_UPDATE_FIRMWARE, PerfConstants.PROFILE_ACTIVITY)\n ui_lib.wait_for_element_and_click(FusionServerProfilesPage.ID_BTN_CONFIRM_UPDATE_FIRMWARE)\n if not ui_lib.wait_for_element_visible(FusionServerProfilesPage.ID_MAIN_PAGE, PerfConstants.PROFILE_ACTIVITY):\n if ui_lib.wait_for_element_visible(FusionServerProfilesPage.ID_ERROR_POPUP, PerfConstants.DEFAULT_SYNC_TIME):\n ui_lib.wait_for_element_visible(FusionServerProfilesPage.ID_ERROR_MSG, PerfConstants.DEFAULT_SYNC_TIME)\n error_msg = selenium2lib.get_text(FusionServerProfilesPage.ID_ERROR_MSG)\n logger._warn(\"Selected Bay: '\" + profile.name + \"' has encountered an error with the message : '\" + error_msg + \"' , may be the hardware is being managed by another system\")\n ui_lib.wait_for_element_and_click(FusionServerProfilesPage.ID_BTN_CANCEL_UPDATE_FIRMWARE)\n logger._log_to_console_and_log_file(\"Firmware Update canceled\")\n continue\n if ui_lib.wait_for_element_visible(FusionServerProfilesPage.ID_STATUS_CHANGING, PerfConstants.PROFILE_ACTIVITY):\n ui_lib.wait_for_element_and_click(FusionServerProfilesPage.ID_MAIN_PAGE)\n ui_lib.wait_for_element_visible(FusionDashboardPage.ID_LINK_ACTIVITY, PerfConstants.ACTIVITY)\n ui_lib.wait_for_element_and_click(FusionDashboardPage.ID_LINK_ACTIVITY)\n if ui_lib.wait_for_element(FusionServerProfilesPage.ID_NEW_ACTIVITY_PROGRESS % profile.name, PerfConstants.FIRMWARE_VALIDATION):\n start_time = selenium2lib.get_text(FusionServerProfilesPage.ID_NEW_ACTIVITY_TIMESTAMP % profile.name)\n logger._log_to_console_and_log_file(start_time)\n logger._log_to_console_and_log_file(\"Update Server Profile Firmware %s started......... \" % profile.name)\n if ui_lib.wait_for_element(FusionServerProfilesPage.ID_NEW_ACTIVITY_SUCCESS % (profile.name, start_time), PerfConstants.FIRMWARE_FAIL_PASS_VALIDATION):\n logger._log_to_console_and_log_file(\"Updating Server Profile Firmware %s done successfully\" % profile.name)\n elif ui_lib.wait_for_element(FusionServerProfilesPage.ID_NEW_ACTIVITY_ERROR % (profile.name, start_time), PerfConstants.FIRMWARE_ERROR_VALIDATION):\n logger._log_to_console_and_log_file(\"Update Server Profile Firmware %s done with errors\" % profile.name)\n else:\n logger._log_to_console_and_log_file(\"Update Server Profile Firmware %s done with warnings\" % profile.name)\n else:\n logger._log_to_console_and_log_file(\"Selected Bay: '\" + profile.name + \"' has already been updated with the firmware baseline : '\" + profile.spp + \"'\")\n continue\n else:\n ui_lib.wait_for_element_and_click(FusionServerProfilesPage.ID_BTN_CANCEL_UPDATE_FIRMWARE)\n logger._log_to_console_and_log_file(\"Firmware Update canceled\")", "def delete(self, request, flavor_profile_id):\n conn = get_sdk_connection(request)\n conn.load_balancer.delete_flavor_profile(flavor_profile_id,\n ignore_missing=True)", "def set_profile_version(context, profile_id, version):\n\n check_profile_id(profile_id)\n ps = getToolByName(context, 'portal_setup')\n\n ps.setLastVersionForProfile(profile_id, unicode(version))\n assert(ps.getLastVersionForProfile(profile_id) == (version, ))\n print \"Set version for '%s' to '%s'.\" % (profile_id, version)", "def profile(request):\n if request.method == 'POST':\n form = UpdateForm(request.POST, instance=request.user)\n \n if form.is_valid():\n form.save()\n messages.success(request, f'Your account has been updated!')\n return redirect('profile')\n else:\n form = UpdateForm(instance=request.user)\n\n context = {\n 'title': 'Profile',\n 'form': form,\n 'prices': get_pix_price(),\n 'colors_pack': Colors_pack.objects.all().prefetch_related('contains'),\n }\n return render(request, 'users/profile.html', context)", "def update_profile(profile_id):\n \n profile = mongo.db.profiles\n profile.find_one_and_update({'_id': ObjectId(profile_id)},\n {'$set': {'date': datetime.utcnow(),\n 'headline': request.form.get('headline'),\n 'bio': request.form.get('bio'),\n 'xp': request.form.get('xp'),\n 'interests': request.form.get('interests'),\n 'stack': request.form.get('stack'),\n 'languages': request.form.get('languages'),\n 'frameworks': request.form.get('frameworks'),\n 'github': request.form.get('github'),\n 'linkedin': request.form.get('linkedin')\n }\n }\n )\n return redirect(url_for('dashboard'))", "def edit_server_profile(profile_obj):\n FusionUIBase.navigate_to_section(SectionType.SERVER_PROFILES, time_for_loading=5)\n\n total = len(profile_obj)\n not_exists = 0\n edited = 0\n\n for n, profile in enumerate(profile_obj):\n logger.info(\"{2} No: {0} --- Total: {1} {2}\".format((n + 1), total, '-' * 14))\n logger.info(\"editing a server profile with name '%s' ...\" % profile.name)\n # checking if the profile is not existing for editing\n if not VerifyServerProfile.verify_server_profile_exist(profile.name, fail_if_false=False):\n logger.warn(\"server profile '%s' does not exist\" % profile.name)\n not_exists += 1\n continue\n # - Prep the auto_power_off switch\n # - By default, this keyword will power off the server if it's powered on -- unless the attribute 'auto_power_off' is explicitly set to 'false'\n auto_power_off = False if getattr(profile, 'auto_power_off', '').lower() == 'false' else True\n # open Edit SP dialog and enter data ...\n CommonOperationServerProfile.click_server_profile(profile.name)\n # { below 3 lines were to avoid a failure caused by 2 CR that had been fixed. leave the 3 lines here as commented in case regression issue in future\n # will remove below once 2 CRs fixed\n # EditServerProfile.select_action_edit()\n # EditServerProfile.wait_edit_server_profile_dialog_shown()\n # EditServerProfile.click_cancel_button()\n # } here is a workaround for 1st time editing server profile (sp template as well) has defect that,\n # can't close dialog by OK/Cancel button, and SAN Storage's OS Type can't be read correctly,\n # so open dialog and use Cancel button to close, then everything goes well when 2nd time open Edit dialog\n\n EditServerProfile.select_action_edit()\n EditServerProfile.wait_edit_server_profile_dialog_shown()\n BuiltIn().sleep(2)\n EditServerProfile.input_name(profile.newName) if getattr(profile, 'newName', None) is not None else None\n EditServerProfile.input_description(profile.desc) if getattr(profile, 'desc', None) is not None else None\n\n sht_selected = EditServerProfile.get_selected_server_hardware_type(profile.server)\n # 20151021 Alex Ma - discussed with Tony/Alex C and get below agreed:\n # - if 'hardwareType' is defined in test data, then will firstly select/change 'Server hardware type' from UI,\n # then select/change 'Server hardware' if 'server' is defined in test data\n # - if 'hardwareType' is not defined in test data, then will only check 'server' attribute to decide if select/change 'Server hardware' from UI\n if getattr(profile, 'hardwareType', None) is not None:\n if profile.hardwareType not in sht_selected:\n logger.warn(\"server hardware type '%s' of server '%s' is NOT consistent with test data '%s'\" % (sht_selected, profile.server, profile.hardwareType))\n EditServerProfile.ChangeServerHardwareTypeAndEnclosureGroup.change_server_hardware_type(profile.hardwareType, timeout=5, fail_if_false=False)\n elif getattr(profile, 'ref_sht_server', None) is not None:\n hardware_type = FusionUIBase.APIMethods().get_server_hardware_type_by_server_hardware_name(profile.ref_sht_server)\n if hardware_type not in sht_selected:\n logger.warn(\"server hardware type '%s' of server '%s' is NOT consistent with test data '%s'\" % (sht_selected, profile.server, hardware_type))\n EditServerProfile.ChangeServerHardwareTypeAndEnclosureGroup.change_server_hardware_type(hardware_type, timeout=5, fail_if_false=False)\n\n eg_selected = EditServerProfile.get_selected_enclosure_group(profile.server)\n if getattr(profile, 'enclgroup', None) is not None:\n if profile.enclgroup not in eg_selected:\n logger.warn(\"enclosure group '%s' of server '%s' is NOT consistent with test data '%s'\" % (eg_selected, profile.server, profile.enclgroup))\n EditServerProfile.ChangeServerHardwareTypeAndEnclosureGroup.change_enclosure_group(profile.enclgroup, timeout=5, fail_if_false=False)\n\n # Input 'Server hardware'\n # - input server name,\n # - select option from the popped out drop-down list,\n # - power off the server if the it is powered on,\n # - verify the server hardware type of the selected one is refreshed to the type name displayed in the drop-down list\n # for selecting server hardware\n if not EditServerProfile.input_select_server_hardware(profile.server, auto_power_off=auto_power_off):\n logger.warn(\"server hardware '%s' is not selected for editing server profile, may be wrong name, or powered on but failed to power it off. \"\n \"test will skip this profile '%s' and continue to edit other server profiles\" % (profile.server, profile.name))\n continue\n msg = EditServerProfile.get_error_message_from_server_hardware()\n # if not CreateServerProfile.input_select_server_hardware(profile.server, auto_power_off=auto_power_off):\n # logger.warn(\"server hardware '%s' is not selected for creating server profile, may be wrong name, or powered on but failed to power it off. \"\n # \"test will skip this profile '%s' and continue to create other server profiles\" % (profile.server, profile.name))\n # continue\n # msg = CreateServerProfile.get_error_message_from_server_hardware()\n if msg is not None:\n logger.warn(\"error occurred, server profile can not be edited successfully: \\n<%s>\" % msg)\n ui_lib.fail_test(msg)\n\n if getattr(profile, 'Affinity', None) is not None:\n logger.info(\"test data for 'Affinity' is found: <%s>, start setting Affinity ...\" % profile.Affinity)\n EditServerProfile.select_affinity_by_text(profile.Affinity)\n\n if getattr(profile, 'Firmware', None) is not None:\n logger.info(\"test data for 'Firmware' is found: <%s>, start setting Firmware Baseline ...\" % profile.Firmware)\n logger.debug(\"test data for 'Firmware' is found: <%s>\" % profile.Firmware, also_console=False)\n # set Firmware Baseline and force-installation option\n CommonOperationServerProfile.Firmware.set(profile.Firmware)\n\n if getattr(profile, 'Connections', None) is not None:\n logger.debug(\"test data for 'Connections' is found: <%s>\" % profile.Connections, also_console=False)\n logger.info(\"test data for 'Connections' is found, start adding connections ...\")\n # add connections\n CommonOperationServerProfile.Connection.set(profile.Connections)\n\n if getattr(profile, 'LocalStorage', None) is not None:\n logger.debug(\"test data for 'Local Storage' is found: <%s>\" % profile.LocalStorage, also_console=False)\n logger.info(\"test data for 'Local Storage' is found, start setting local storage options ... \")\n CommonOperationServerProfile.LocalStorage.set(profile.LocalStorage)\n\n if getattr(profile, 'SANStorage', None) is not None:\n BuiltIn().sleep(3)\n logger.debug(\"test data for 'SAN Storage' is found:<%s>\" % profile.SANStorage, also_console=False)\n logger.info(\"test data for 'SAN Storage' is found, start setting SAN storage options and adding volumes ...\")\n # select \"Manage SAN Storage\" checkbox\n CommonOperationServerProfile.SANStorage.set(profile.SANStorage)\n\n if getattr(profile, 'BootSettings', None) is not None:\n logger.debug(\"test data for 'Boot Settings' is found: <%s>\" % profile.BootSettings, also_console=False)\n logger.info(\"test data for 'Boot Settings' is found, start setting its options ...\")\n CommonOperationServerProfile.BootSettings.set(profile, server_hardware_type=sht_selected)\n\n # 'BIOSSettings' part is ignored since BIOS setting is complicated to verify the result, therefor\n # might be better to use a dedicated tool to do this part automation separately\n if getattr(profile, 'BIOSSettings', None) is not None:\n logger.debug(\"test data for 'BIOS Settings' is found: <%s>\" % profile.BIOSSettings, also_console=False)\n logger.info(\"test data for 'BIOS Settings' is found, start setting its options ...\")\n CommonOperationServerProfile.BIOSSettings.set(profile.BIOSSettings)\n\n if getattr(profile, 'Advanced', None) is not None:\n BuiltIn().sleep(3)\n logger.debug(\"test data for 'Advanced' is found: <%s>\" % profile.Advanced, also_console=False)\n logger.info(\"test data for 'Advanced' is found, start setting its options ...\")\n # select \"MAC/WWN/Serial/Hide unused FlexNICs\" radio box\n EditServerProfile.Advanced.set(profile)\n\n EditServerProfile.click_ok_button()\n # logger.debug(\"sleeping for 8 seconds ...\")\n # BuiltIn().sleep(8)\n # if EditServerProfile.get_error_message_from_boot_mode() is not None:\n if CommonOperationServerProfile.BootSettings.get_error_message_from_boot_mode() is not None:\n logger.warn(\"test data may be wrongly defined for 'Boot mode', which caused an error that blocks profile being created. \"\n \"test will skip this profile '%s' and continue to create other server profiles\" % profile.name)\n continue\n\n BuiltIn().sleep(2)\n status, msg = FusionUIBase.get_error_message_from_dialog(timeout=10)\n if status is True:\n logger.warn(\"unexpected error occurred: %s\" % msg)\n ui_lib.fail_test(msg)\n\n if EditServerProfile.wait_edit_server_profile_dialog_disappear(timeout=300) is True:\n if getattr(profile, 'wait_complete', \"True\").lower() != \"false\":\n FusionUIBase.show_activity_sidebar()\n profile_name = profile.newName if getattr(profile, 'newName', None) is not None else profile.name\n timeout = int(getattr(profile, 'timeout', \"3600\"))\n FusionUIBase.wait_activity_action_ok(profile_name, 'Update', timeout=timeout, fail_if_false=True)\n FusionUIBase.show_activity_sidebar()\n fail_if_not_ok = not getattr(profile, 'IgnoreWaitForStatusOK', '').lower() == 'true'\n # control whether to stop the case when server profile status is not ok.\n CommonOperationServerProfile.wait_server_profile_status_ok(profile_name, timeout=500, fail_if_false=fail_if_not_ok)\n logger.info(\"edited server profile '%s' successfully\" % profile_name)\n edited += 1\n else:\n logger.info(\"edit server profile '%s' successfully but no need to wait for task complete\" % profile.name)\n edited += 1\n else:\n logger.warn(\"'wait_edit_server_profile_dialog_disappear' = FALSE, skip to next profile ... \")\n EditServerProfile.click_cancel_button()\n continue\n\n logger.info(\"{0} == Summary == {0}\".format('-' * 14))\n if total - not_exists == 0:\n logger.warn(\"no server profile to edit! all %s server profile(s) is NOT existing, test is considered FAILED\" % not_exists)\n return False\n else:\n if edited < total:\n logger.warn(\"not all of the server profile(s) is successfully edited - %s out of %s edited \" % (edited, total))\n if edited + not_exists == total:\n logger.warn(\"%s not-existing server profile(s) is skipped being edited, test is considered FAILED\" % not_exists)\n return False\n else:\n ui_lib.fail_test(\"%s not-existing server profile(s) is skipped being edited, %s profile(s) left is failed being edited \" % (not_exists, total - edited - not_exists))\n\n logger.info(\"all of the server profile(s) is successfully edited - %s out of %s \" % (edited, total))\n return True", "def put(self, entity, schema):\n profile = entity.profiles.get_or_404(schema=schema)\n try:\n update_data = json.loads(request.data)\n except json.JSONDecodeError as e:\n raise APIBadRequest(str(e))\n\n if 'identity' in update_data:\n profile.identity = update_data['identity']\n if 'servers' in update_data:\n profile.servers = update_data['servers']\n\n profile.save()\n\n return jsonify(profile.to_json()), 200", "def update_my_profile(\n body: Optional[UserProfileUpdate] = None,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = UpdateMyProfile.create(\n body=body,\n namespace=namespace,\n )\n return run_request(request, additional_headers=x_additional_headers, **kwargs)", "def set_flavor(self, oid, flavor):\n data = {\n \"resize\": {\n \"flavorRef\": flavor\n }\n }\n path = '/servers/%s/action' % oid\n res = self.client.call(path, 'POST', data=json.dumps(data), \n token=self.manager.identity.token)\n self.logger.debug('Pause openstack server: %s' % truncate(res))\n return res[0]", "def flavor(self, flavor):\n self._flavor = flavor" ]
[ "0.83874655", "0.6720268", "0.64170563", "0.6350232", "0.62681884", "0.62541854", "0.5983731", "0.5891509", "0.58709943", "0.58603674", "0.58362377", "0.5781399", "0.57701105", "0.57553375", "0.5744363", "0.5706912", "0.570035", "0.5665021", "0.5640496", "0.56272614", "0.5623066", "0.5559437", "0.554477", "0.5537434", "0.5527841", "0.55139667", "0.55088025", "0.55053896", "0.5484514", "0.5443125" ]
0.8229874
1
Update the list of members by adding or removing the necessary members.
def update_member_list(request, **kwargs): data = request.DATA loadbalancer_id = data.get('loadbalancer_id') pool_id = kwargs.get('pool_id') existing_members = kwargs.get('existing_members') members_to_add = kwargs.get('members_to_add') members_to_delete = kwargs.get('members_to_delete') if members_to_delete: kwargs = {'existing_members': existing_members, 'members_to_add': members_to_add, 'members_to_delete': members_to_delete, 'pool_id': pool_id} remove_member(request, **kwargs) elif members_to_add: kwargs = {'existing_members': existing_members, 'members_to_add': members_to_add, 'members_to_delete': members_to_delete, 'pool_id': pool_id} add_member(request, **kwargs) elif data.get('monitor'): args = (request, loadbalancer_id, update_monitor) thread.start_new_thread(poll_loadbalancer_status, args)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def add_members(self, members):\n self.__add_remove_members(members)", "def update_members(self, new_member_list):\n updated_members = 0\n request_list = list()\n\n # stale_members contains all old members at first, all current\n # members get then removed so that the remaining can get deleted\n stale_members = set(self.members)\n\n for member in new_member_list:\n m = Persona.query.get(member[\"id\"])\n\n if m is None:\n m = Persona(id=member[\"id\"], _stub=True)\n\n if m._stub is True:\n request_list.append(member[\"id\"])\n\n try:\n # Old and new member; remove from stale list\n stale_members.remove(m)\n except KeyError:\n # New member\n self.members.append(m)\n updated_members += 1\n\n # Remove old members that are not new members\n for member in stale_members:\n self.members.remove(member)\n\n app.logger.info(\"Updated {}'s members: {} added, {} removed, {} requested\".format(\n self.username, updated_members, len(stale_members), len(request_list)))\n\n return request_list", "def process_members(self, members):\n seq = (list, tuple, set)\n assert isinstance(members, seq), (f\"The members argument must be one\"\n f\"of '{seq}', found '{members}'.\")\n assert all([isinstance(member, dict) for member in members]), (\n f\"The members object must be a list of dicts, found {members}\")\n assert all([field in self.PROCESS_MEMBERS_FIELDS\n for member in members for field in member.keys()]), (\n f\"Invalid fields in dict, must have these keys \"\n f\"{self.PROCESS_MEMBERS_FIELDS}, members {members}\"\n )\n wanted_user_pks = [item['user'].pk for item in members]\n current_user_pks = [inst.user.pk for inst in self.memberships.all()]\n # Delete unwanted Membership objects.\n rem_user_pks = list(set(current_user_pks) - set(wanted_user_pks))\n self.memberships.select_related('user').filter(\n user__pk__in=rem_user_pks).delete()\n # Add new members.\n add_user_pks = list(set(wanted_user_pks) - set(current_user_pks))\n common_pks = list(set(wanted_user_pks) & set(current_user_pks))\n\n for item in members:\n if item['user'].pk in add_user_pks:\n # Create any new members.\n kwargs = {}\n kwargs['project'] = self\n kwargs['user'] = item['user']\n kwargs['role_text'] = item['role_text']\n obj = Membership(**kwargs)\n obj.save()\n elif item['user'].pk in common_pks:\n # Update any comment members.\n role = Membership.ROLE_MAP_REV[item['role_text']]\n self.memberships.filter(user=item['user']).update(role=role)", "def setHgMembers(self, membersToAdd):\n self.members = membersToAdd", "def members(self, members: \"List[str]\"):\n self._attrs[\"members\"] = members", "def members(self, members: \"List[str]\"):\n self._attrs[\"members\"] = members", "def members(self, members: \"List[str]\"):\n self._attrs[\"members\"] = members", "def members(self, members):\n\n self._members = members", "def _update(self):\n path = \"/members/%s\" % self._dict['member_id']\n data = self.extract()\n if self._dict['member_status_id'] in (\n MemberStatus.Active, MemberStatus.Error, MemberStatus.OptOut):\n data['status_to'] = self._dict['member_status_id']\n if not self.account.adapter.put(path, data):\n raise ex.MemberUpdateError()", "def update_members_by_id(self, user_ids):\n\n updated = set(int(id) for id in user_ids)\n\n logger = logging.getLogger(__name__)\n debug = logger.debug\n\n db = self['__store'].db\n users = zoom.users.Users(db)\n\n debug('updating members: %r', updated)\n\n cmd = 'select user_id from members where group_id=%s'\n existing = set(\n user_id for user_id, in\n db(cmd, self.group_id)\n )\n debug('existing members: %r', existing)\n\n if updated != existing:\n\n user_lookup = {\n user.user_id: user.username\n for user in users\n }\n\n to_remove = existing - updated\n if to_remove:\n debug('removing members: %r', to_remove)\n cmd = 'delete from members where group_id=%s and user_id in %s'\n db(cmd, self.group_id, to_remove)\n\n for user_id in to_remove:\n audit('remove member', self.name, \\\n user_lookup.get(user_id, 'unknown'))\n\n to_add = updated - existing\n if to_add:\n debug('adding members: %r', to_add)\n cmd = 'insert into members (group_id, user_id) values (%s, %s)'\n sequence = zip([self.group_id] * len(to_add), to_add)\n db.execute_many(cmd, sequence)\n\n for user_id in to_add:\n audit('add member', self.name, \\\n user_lookup.get(user_id, 'unknown'))\n\n else:\n debug('memberships unchanged')", "def members(self, members: object):\n\n self._members = members", "def member_list(self, member_list):\n\n self._member_list = member_list", "def update_guild_members(name, server):\n url = base_wow + guild+\"/\"+ server+\"/\"+ name+\"?\"+ method + locale + api\n r = requests.get(url)\n data = r.json()\n guilde = data['name']\n for member in data[\"members\"]:\n add_member(guilde, member['character']['name'], member['rank'], member['character']['level'])", "def _handle_member_chunk(self, members: list):\n if self._chunks_left >= 1:\n # We have a new chunk, so decrement the number left.\n self._chunks_left -= 1\n\n for member_data in members:\n id = int(member_data[\"user\"][\"id\"])\n if id in self._members:\n member_obj = self._members[id]\n else:\n member_obj = dt_member.Member(self._bot, **member_data)\n\n member_obj.nickname = member_data.get(\"nick\", member_obj.nickname)\n member_obj.guild_id = self.id\n\n self._members[member_obj.id] = member_obj", "def modify_membership(self, gfd_info):\n for member, status in gfd_info.items():\n if status:\n if member not in self.membership:\n self.membership.append(member)\n else:\n self.membership.remove(member)\n\n # Send change_replica_ips request to the client \n self.send_replica_IPs()\n\n # Elect a new primary if running on passive mode.\n if self.mode == 'passive':\n if member == self.primary:\n self.pick_primary()\n print(\"\\n The current membership is :\")\n print(self.membership)\n \n return", "def update_members_from_preferences(self, **parameters):\n super(Sequence, self).update_members_from_preferences(**parameters)\n\n for i, item in enumerate(self.items):\n para = parameters['item_{}'.format(i)]\n item.update_members_from_preferences(**para)", "def put(self, request, pool_id):\n # Assemble the lists of member id's to add and remove, if any exist\n request_member_data = request.DATA.get('members', [])\n\n conn = get_sdk_connection(request)\n existing_members = _sdk_object_to_list(\n conn.load_balancer.members(pool_id))\n\n (members_to_add, members_to_delete) = get_members_to_add_remove(\n request_member_data, existing_members)\n\n if members_to_add or members_to_delete:\n kwargs = {'existing_members': existing_members,\n 'members_to_add': members_to_add,\n 'members_to_delete': members_to_delete,\n 'pool_id': pool_id}\n update_member_list(request, **kwargs)", "def add_members(id): # pylint: disable=I0011,W0622\n\n l = Legacy.query.get_or_404(id)\n\n if current_app.config.get('IGNORE_AUTH') is not True: # pragma: no cover\n if l.owner_id != g.user.id:\n raise Http403('Access denied')\n\n if not l.can_modify(g.user.id):\n raise Http403('Access denied')\n\n if request.json is None or 'members' not in request.json:\n raise NoData('\"members\" was not specified')\n\n member_list = request.json['members']\n\n if not isinstance(member_list, list):\n raise IncorrectData('\"members\" was not a valid list')\n\n for member in member_list:\n member = str(member)\n\n if not bool(re.match(r'^\\d+$', member)):\n member = Person.query.filter_by(email=member).first()\n\n if member is None:\n # TODO: Invite member\n # TODO: Queue a task to assign member on signup\n\n continue\n else:\n member = Person.query.get(member)\n\n if member is None:\n raise IncorrectData('Member not found')\n\n # TODO: Queue a task to assign member on acceptance\n\n # TODO: Remove following after member confirmation is done\n l.members.append(member)\n\n # TODO: Remove following after member confirmation is done\n l.save()\n\n return {}", "def memberize(self, accounts, members):\n accounts.connect()\n accounts.verify_connection() # Pre-flight test of member database\n members.check_sanity()\n members.decrypt_and_verify() # Check that the member change document is trustable.\n\n accounts_not_current_members = self.add_or_update_accounts(accounts, members)\n self.make_accounts_non_members(accounts, accounts_not_current_members)", "def clear_members(self):\r\n self._members = []\r\n self._length = 4", "def members(self, items):\n pass", "def clear_members(self):\r\n self._members = []\r\n self._length = 0", "def remove_members(id): # pylint: disable=I0011,W0622\n\n l = Legacy.query.get_or_404(id)\n\n if current_app.config.get('IGNORE_AUTH') is not True: # pragma: no cover\n if l.owner_id != g.user.id:\n raise Http403('Access denied')\n\n if not l.can_modify(g.user.id):\n raise Http403('Access denied')\n\n if request.json is None or 'members' not in request.json:\n raise NoData('\"members\" was not specified')\n\n member_list = request.json['members']\n\n if not isinstance(member_list, list):\n raise IncorrectData('\"members\" was not a valid list')\n\n for member in member_list:\n member = Person.query.get(member)\n\n if member is None:\n continue\n\n try:\n l.members.remove(member)\n except ValueError:\n pass\n\n l.save()\n\n return {}", "def on_group_members_change(self, new_members):\n log.info(\"Consumer group '%s' members changed.\", self.group_name)\n\n new_members = set(new_members)\n if new_members != self.members:\n self.members = new_members\n self.rebalance()\n\n self.members_collected.set()", "def entityUpdates(self, *args):\n\t\tfor entity in self.members.values():\n\t\t\tentity.update(*args)", "def refresh(self):\n self.active_member_count\n self.description\n self.lbmethod\n self.members\n self.minimum_active_member\n self.minimum_up_member\n self.slow_ramp_time\n self.statistics", "def user_list_update(self):\n\t\tclient_log.debug(f'Запрос списка известных пользователей {self.username}')\n\t\treq = {\n\t\t\tACTION: USERS_REQUEST,\n\t\t\tTIME: time.time(),\n\t\t\tACCOUNT_NAME: self.username\n\t\t}\n\t\twith socket_lock:\n\t\t\tsend_message(self.transport, req)\n\t\t\tans = get_message(self.transport)\n\t\tif RESPONSE in ans and ans[RESPONSE] == 202:\n\t\t\tself.database.add_users(ans[LIST_INFO])\n\t\telse:\n\t\t\tclient_log.error('Не удалось обновить список известных пользователей.')", "def UpdateGroupMembership(self, newMembers):\r\n globals.groupMembers[newMembers.targetGuid] = True #remove the target Sticky\r\n\r\n for guid in newMembers.guids[0]:\r\n globals.groupMembers[guid]=True\r\n\r\n group = Group()\r\n globals._groupNumber = globals._groupNumber+1\r\n group.groupID = globals._groupName + str(globals._groupNumber)\r\n group.targetSticky[\"guid\"] = newMembers.targetGuid\r\n group.targetSticky[\"desc\"] = newMembers.targetDesc\r\n group.targetSticky[\"head\"] = newMembers.targetHead #lplp1313 new value\r\n\r\n guidSims = tuple(zip(newMembers.guids[0], newMembers.descriptions[0], newMembers.headers[0], list(newMembers.cos_sims[0]))) #lplp1313 new value \r\n for g, d, h, c in guidSims:\r\n gs = GroupSticky()\r\n gs.guid=g\r\n gs.desc=d\r\n gs.head=h #lplp1313 new value\r\n gs.cosineVal=c\r\n group.groupStickies.append(gs)\r\n\r\n globals._jsonReply._groups.append(group)", "def setHgMembers(self, members):\n self.huntGroup.setHgMembers(members)", "def update_group_members(self, group_id, members=None):\n add_member_url = self.groups_url + \"/%s/members\" % group_id\n data = json.dumps({\"members\": [members or Auth.get_project_id()]})\n\n return requests.put(add_member_url, data=data, headers=self.headers)" ]
[ "0.7322556", "0.7289237", "0.69442546", "0.68231875", "0.6818597", "0.6818597", "0.6818597", "0.6776591", "0.6742535", "0.67015535", "0.6662517", "0.65746605", "0.6528248", "0.6491567", "0.64232713", "0.6329506", "0.621608", "0.6207658", "0.6158595", "0.61513424", "0.61500156", "0.6103748", "0.6100407", "0.6040953", "0.6030286", "0.6019862", "0.5995587", "0.5988266", "0.59490776", "0.5939017" ]
0.73280567
0
Add floating IP address info to each load balancer.
def add_floating_ip_info(request, loadbalancers): floating_ips = neutron.tenant_floating_ip_list(request) for lb in loadbalancers: floating_ip = {} associated_ip = next((fip for fip in floating_ips if fip['port_id'] == lb['vip_port_id']), None) if associated_ip is not None: floating_ip['id'] = associated_ip['id'] floating_ip['ip'] = associated_ip['ip'] lb['floating_ip'] = floating_ip
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def init_host_floating_ips(self):\n\n admin_context = context.get_admin_context()\n try:\n floating_ips = objects.FloatingIPList.get_by_host(admin_context,\n self.host)\n except exception.NotFound:\n return\n\n for floating_ip in floating_ips:\n if floating_ip.fixed_ip_id:\n try:\n fixed_ip = floating_ip.fixed_ip\n except exception.FixedIpNotFound:\n LOG.debug('Fixed IP %s not found', floating_ip.fixed_ip_id)\n continue\n interface = CONF.public_interface or floating_ip.interface\n try:\n self.l3driver.add_floating_ip(floating_ip.address,\n fixed_ip.address,\n interface,\n fixed_ip.network)\n except processutils.ProcessExecutionError:\n LOG.debug('Interface %s not found', interface)\n raise exception.NoFloatingIpInterface(interface=interface)", "def set_ip_adresses(self):\n # unfold a config tree for the current suffix, if any\n for interface, details in self.interfaces.items():\n for k, v in details.items():\n if k == 'address':\n ip, prefix = address_to_ip_prefix(v)\n self.interfaces[interface]['ip_address'] = ip\n self.interfaces[interface]['ip_prefix'] = prefix\n break\n if interface == 'wan':\n self.ip_address = ip\n if interface == 'ha_sync':\n self.ha_sync_ip_address = ip", "def _update_ips(self):\n self.ip_others = []\n ips = self.mesh.ipaddr()\n self.rloc16 = self.mesh.rloc()\n for line in ips:\n if line.startswith('fd'):\n # Mesh-Local unicast IPv6\n try:\n addr = int(line.split(':')[-1], 16)\n except Exception:\n continue\n if addr == self.rloc16:\n # found RLOC\n # RLOC IPv6 has x:x:x:x:0:ff:fe00:RLOC16\n self.rloc = line\n elif ':0:ff:fe00:' not in line:\n # found Mesh-Local EID\n self.ip_eid = line\n elif line.startswith('fe80'):\n # Link-Local\n self.ip_link = line\n else:\n self.ip_others.append(line)", "def add_ips(self, ip_addresses: Iterable[str], **kwargs):\n _, ip_entities = _GEO_LITE.lookup_ip(ip_addr_list=ip_addresses)\n self.add_ip_cluster(ip_entities=ip_entities, **kwargs)", "def get_floating_ips(self):\n return self.router.get(l3_constants.FLOATINGIP_KEY, [])", "def get(self, request):\n conn = get_sdk_connection(request)\n lb_list = _sdk_object_to_list(conn.load_balancer.load_balancers(\n project_id=request.user.project_id))\n if request.GET.get('full') and neutron.floating_ip_supported(request):\n add_floating_ip_info(request, lb_list)\n return {'items': lb_list}", "def update_loadbalancer(self, context, lb, old):\n LOG.debug(\"\\nupdate_loadbalancer({}): called\".format(lb.id))\n hostnames = self._get_hostname(lb)\n # Update the TrafficIP group\n vapv = self._get_vapv(hostnames)\n # Update allowed_address_pairs\n if not old or lb.vip_address != old.vip_address:\n for hostname in hostnames:\n port_ids = self.openstack_connector.get_server_port_ids(\n hostname\n )\n self.openstack_connector.add_ip_to_ports(\n lb.vip_address, port_ids\n )\n # Update bandwidth allocation\n if old is not None and old.bandwidth != lb.bandwidth:\n self._update_instance_bandwidth(hostnames, lb.bandwidth)", "def add_ip_cores(self, scfg, ip_dir):\r\n\r\n return []", "def add_ip(self, inf, ip):\n self.interfaces[inf]['ip'] = ip", "def _init_ipaddress_ops(self):\n\n # retrieve local and external IPs\n all_ips_str = set(self.statistics.process_db_query(\"all(ipAddress)\", print_results=False))\n # external_ips_str = set(self.statistics.process_db_query(\"ipAddress(macAddress=%s)\" % self.get_probable_router_mac(), print_results=False)) # including router\n # local_ips_str = all_ips_str - external_ips_str\n external_ips = set()\n local_ips = set()\n all_ips = set()\n\n self.contains_priv_ips = False\n self.priv_ip_segment = None\n\n # convert IP strings to IPv4.IPAddress representation\n for ip in all_ips_str:\n if is_ipv4(ip):\n ip = IPAddress.parse(ip)\n # exclude local broadcast address and other special addresses\n if (not str(ip) == \"255.255.255.255\") and (not ip.is_localhost()) and (not ip.is_multicast()) and (\n not ip.is_reserved()) and (not ip.is_zero_conf()):\n all_ips.add(ip)\n\n for ip in all_ips:\n if ip.is_private():\n local_ips.add(ip)\n\n external_ips = all_ips - local_ips\n\n # save the certain unused local IPs of the network\n # to do that, divide the unused local Addressspace into chunks of (chunks_size) Addresses\n # initally only the first chunk will be used, but more chunks can be added to the pool of unused_local_ips if needed\n self.min_local_ip, self.max_local_ip = min(local_ips), max(local_ips)\n local_ip_range = (self.max_local_ip.to_int()) - (self.min_local_ip.to_int() + 1)\n if local_ip_range < 0:\n # for min,max pairs like (1,1), (1,2) there is no free address in between, but for (1,1) local_ip_range may be -1, because 1-(1+1)=-1\n local_ip_range = 0\n\n # chunk size can be adjusted if needed\n self.chunk_size = 200\n\n self.current_chunk = 1\n if local_ip_range < self.chunk_size:\n # there are not more than chunk_size unused IP Addresses to begin with\n self.chunks = 0\n self.chunk_remainder = local_ip_range\n else:\n # determine how many chunks of (chunk_size) Addresses there are and the save the remainder\n self.chunks = local_ip_range // self.chunk_size\n self.chunk_remainder = local_ip_range % self.chunk_size\n\n # add the first chunk of IP Addresses\n self.unused_local_ips = set()\n self.expand_unused_local_ips()\n\n # save the gathered information for efficient later use\n self.external_ips = frozenset(external_ips)\n self.remaining_external_ips = external_ips\n self.max_uncertain_local_ip = self.max_local_ip\n self.local_ips = frozenset(local_ips)\n # print(\"External IPS: \" + str(external_ips))\n # print(\"LOCAL IPS: \" + str(local_ips))\n self.remaining_local_ips = local_ips\n self.uncertain_local_ips = set()", "def _get_forwarded_for(self, request: Request) -> List[_BaseAddress]:\n forwarded_for_str = request.headers.getlist(\"X-Forwarded-For\")\n if not forwarded_for_str or len(forwarded_for_str) > 1:\n return []\n return [\n ip_address(addr)\n for addr in (a.strip() for a in forwarded_for_str[0].split(\",\"))\n if addr\n ]", "def process_floating_ip_addresses(self, interface_name):\n\n fip_statuses = {}\n if interface_name is None:\n LOG.debug('No Interface for floating IPs router: %s',\n self.router['id'])\n return fip_statuses\n\n device = ip_lib.IPDevice(interface_name, namespace=self.ns_name)\n existing_cidrs = self.get_router_cidrs(device)\n new_cidrs = set()\n\n floating_ips = self.get_floating_ips()\n # Loop once to ensure that floating ips are configured.\n for fip in floating_ips:\n fip_ip = fip['floating_ip_address']\n ip_cidr = common_utils.ip_to_cidr(fip_ip)\n new_cidrs.add(ip_cidr)\n fip_statuses[fip['id']] = l3_constants.FLOATINGIP_STATUS_ACTIVE\n if ip_cidr not in existing_cidrs:\n fip_statuses[fip['id']] = self.add_floating_ip(\n fip, interface_name, device)\n LOG.debug('Floating ip %(id)s added, status %(status)s',\n {'id': fip['id'],\n 'status': fip_statuses.get(fip['id'])})\n\n # mark the status as not changed. we can't remove it because\n # that's how the caller determines that it was removed\n if fip_statuses[fip['id']] == fip['status']:\n fip_statuses[fip['id']] = FLOATINGIP_STATUS_NOCHANGE\n fips_to_remove = (\n ip_cidr for ip_cidr in existing_cidrs - new_cidrs\n if common_utils.is_cidr_host(ip_cidr))\n for ip_cidr in fips_to_remove:\n self.remove_floating_ip(device, ip_cidr)\n\n return fip_statuses", "def add_floating_ip(self, floating_ip, fixed_ip, l3_interface_id,\n network=None):\n raise NotImplementedError()", "def add_ip(self, ip: IPWrapper):\n ip_name = ip.top_name\n self._ips[ip_name] = ip\n self._ips_by_internal_name[ip.ip_name] = ip\n # create a placeholder for Instance arguments to instantiate the ip\n setattr(self, ip_name, dict())", "def _add_fip_addr_to_device(self, fip, device):\n try:\n ip_cidr = common_utils.ip_to_cidr(fip['floating_ip_address'])\n device.addr.add(ip_cidr)\n return True\n except RuntimeError:\n # any exception occurred here should cause the floating IP\n # to be set in error state\n LOG.warn(_LW(\"Unable to configure IP address for \"\n \"floating IP: %s\"), fip['id'])", "def _get_ins_fips(self):\n fip_lst = list()\n for srv_grp in self.srv_grp_lst:\n grp_fip_lst = list()\n for srv in srv_grp:\n fip_pt_name = srv['name'] + '_%s' % self.fip_port\n fip = list(\n self.conn.network.ips(port_id=fip_pt_name.id))[0].floating_ip_address\n grp_fip_lst.append(fip)\n fip_lst.append(grp_fip_lst)\n return fip_lst", "def pre_floating_ip_create(self, resource_dict):\n pass", "def digest_ips(self):\n all_subnets = {}\n self.subnets = []\n self.single_ips = []\n # extract all subnets\n for ip in self.iplist:\n subnet = self.__get_sutnet(ip)\n if all_subnets.has_key(subnet):\n all_subnets[subnet].append(ip)\n else:\n new_list = [ip]\n all_subnets[subnet] = new_list\n\n for subnet, subnet_ips in all_subnets.items():\n if len(subnet_ips) > 1:\n self.subnets.append(subnet)\n else:\n self.single_ips.append(subnet_ips[0])\n\n self.subnets.sort()\n self.single_ips.sort()", "def __init__(self, do, token, url, agent):\n super(FloatingIP, self).__init__(token, agent)\n self.do = do\n self.uri = \"{}/floating_ips\".format(url)", "def get(self, request, loadbalancer_id):\n conn = get_sdk_connection(request)\n loadbalancer = conn.load_balancer.find_load_balancer(loadbalancer_id)\n loadbalancer_dict = _get_sdk_object_dict(loadbalancer)\n if request.GET.get('full') and neutron.floating_ip_supported(request):\n add_floating_ip_info(request, [loadbalancer_dict])\n return loadbalancer_dict", "def _associate_floating_ip(self, context, domain_id, extra, floating_ip_id, floating_ip, port_id):\n\n addresses = [{\n 'version': 4,\n 'address': floating_ip,\n }]\n try:\n names = self._create(context=context,\n addresses=addresses,\n name_format=cfg.CONF[self.name].format,\n extra=extra,\n domain_id=domain_id,\n managed_extra='portid:%s' % (port_id),\n resource_type='a:floatingip',\n resource_id=floating_ip_id)\n except (designate.exceptions.DuplicateRecord, CirrusRecordExists):\n LOG.warn('Could not create record for %s using default format, '\n 'trying fallback format' % (extra['instance_name']))\n names = self._create(context=context,\n addresses=addresses,\n name_format=cfg.CONF[self.name].format_fallback,\n extra=extra,\n domain_id=domain_id,\n managed_extra='portid:%s' % (port_id),\n resource_type='a:floatingip',\n resource_id=floating_ip_id)\n LOG.info(\"Created %s to point at %s\" % (','.join(names), floating_ip))", "def __init__(self):\n self.networks = [\n ipaddress.ip_network(address)\n for address in self.addresses\n ]", "def post_floating_ip_create(self, resource_dict):\n pass", "def process_floating_ip_nat_rules(self):\n # Clear out all iptables rules for floating ips\n self.iptables_manager.ipv4['nat'].clear_rules_by_tag('floating_ip')\n\n floating_ips = self.get_floating_ips()\n # Loop once to ensure that floating ips are configured.\n for fip in floating_ips:\n # Rebuild iptables rules for the floating ip.\n fixed = fip['fixed_ip_address']\n fip_ip = fip['floating_ip_address']\n for chain, rule in self.floating_forward_rules(fip_ip, fixed):\n self.iptables_manager.ipv4['nat'].add_rule(chain, rule,\n tag='floating_ip')\n\n self.iptables_manager.apply()", "def add_virtualip(self, loadbalancer, vip):\n return loadbalancer.add_virtualip(vip)", "def ip_addresses_list(self, ip_addresses_list):\n\n self._ip_addresses_list = ip_addresses_list", "def Multi_static_ips(self, vm_name, ip_addr, s_range, e_range, subnet_mask, gateway, dns_list):\n try:\n for i in range(s_range, e_range):\n new_name = vm_name + \"%s\" % i\n new_ip = ip_addr + \".%s\" % i\n self.assign_ip(new_name, new_ip, subnet_mask, gateway, dns_list)\n except Exception as error:\n print(error.message)\n raise error", "def _compute_adress(self):\r\n\t\tfor leads in self:\r\n\t\t\tleads.address = leads.street + \" \" + leads.street2", "def add_centralized_floatingip(self, fip, fip_cidr):\n if not self.get_ex_gw_port():\n return\n if not self._is_this_snat_host():\n return\n interface_name = self.get_snat_external_device_interface_name(\n self.get_ex_gw_port())\n try:\n ip_lib.add_ip_address(fip_cidr, interface_name,\n namespace=self.snat_namespace.name)\n except ip_lib.IpAddressAlreadyExists:\n pass\n except RuntimeError:\n LOG.warning(\"Unable to configure IP address for centralized \"\n \"floating IP: %s\", fip['id'])\n return lib_constants.FLOATINGIP_STATUS_ERROR\n self.process_floating_ip_nat_rules_for_centralized_floatingip()\n # Send a GARP message on the external interface for the\n # centralized floatingip configured.\n ip_lib.send_ip_addr_adv_notif(self.snat_namespace.name,\n interface_name,\n fip['floating_ip_address'])\n return lib_constants.FLOATINGIP_STATUS_ACTIVE", "def addRelIP(self, data):\n \n moduleCoordinator.ModuleCoordinator().addEvent(moduleCoordinator.RELIP_EVENT, data, self.hash)" ]
[ "0.61381817", "0.598348", "0.5813492", "0.57341975", "0.5732378", "0.57311445", "0.5620384", "0.56031364", "0.56030154", "0.55562806", "0.5500934", "0.54350865", "0.5413562", "0.53627616", "0.5329677", "0.5325784", "0.5312544", "0.52665913", "0.52376854", "0.52365977", "0.52324367", "0.52154446", "0.5208384", "0.52068627", "0.51740444", "0.5148984", "0.51473576", "0.5142189", "0.5132317", "0.51318806" ]
0.791662
0
Edit a load balancer.
def put(self, request, loadbalancer_id): kwargs = {'loadbalancer_id': loadbalancer_id} update_loadbalancer(request, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update_loadbalancer(request, **kwargs):\n data = request.DATA\n loadbalancer_id = kwargs.get('loadbalancer_id')\n\n conn = get_sdk_connection(request)\n loadbalancer = conn.load_balancer.update_load_balancer(\n loadbalancer_id,\n name=data['loadbalancer'].get('name'),\n description=data['loadbalancer'].get('description'),\n admin_state_up=data['loadbalancer'].get('admin_state_up'))\n\n return _get_sdk_object_dict(loadbalancer)", "def edit(env, identifier, listener, **args):\n\n mgr = SoftLayer.LoadBalancerManager(env.client)\n uuid, _ = mgr.get_lbaas_uuid_id(identifier)\n\n new_listener = {\n 'listenerUuid': listener\n }\n\n arg_to_option = {\n 'frontprotocol': 'frontendProtocol',\n 'backprotocol': 'backendProtocol',\n 'frontport': 'frontendPort',\n 'backport': 'backendPort',\n 'method': 'loadBalancingMethod',\n 'connections': 'maxConn',\n 'sticky': 'sessionType',\n 'clienttimeout': 'clientTimeout',\n 'sslcert': 'tlsCertificateId'\n }\n\n for key, value in args.items():\n if value:\n new_listener[arg_to_option[key]] = value\n\n try:\n mgr.add_lb_listener(uuid, new_listener)\n click.secho(\"Success\", fg='green')\n except SoftLayerAPIError as exception:\n click.secho(f\"ERROR: {exception.faultString}\", fg='red')", "def update_loadbalancer(self, context, lb, old):\n LOG.debug(\"\\nupdate_loadbalancer({}): called\".format(lb.id))\n hostnames = self._get_hostname(lb)\n # Update the TrafficIP group\n vapv = self._get_vapv(hostnames)\n # Update allowed_address_pairs\n if not old or lb.vip_address != old.vip_address:\n for hostname in hostnames:\n port_ids = self.openstack_connector.get_server_port_ids(\n hostname\n )\n self.openstack_connector.add_ip_to_ports(\n lb.vip_address, port_ids\n )\n # Update bandwidth allocation\n if old is not None and old.bandwidth != lb.bandwidth:\n self._update_instance_bandwidth(hostnames, lb.bandwidth)", "def _v1_0_11111_loadbalancers_3132(self, method, url, body, headers):\n if method == \"PUT\":\n json_body = json.loads(body)\n self.assertDictEqual(json_body, {\"name\": \"new_lb_name\"})\n return (httplib.ACCEPTED, \"\", {}, httplib.responses[httplib.ACCEPTED])\n elif method == \"GET\":\n response_body = json.loads(self.fixtures.load(\"v1_slug_loadbalancers_3xxx.json\"))\n response_body[\"loadBalancer\"][\"id\"] = 3132\n response_body[\"loadBalancer\"][\"name\"] = \"new_lb_name\"\n return (\n httplib.OK,\n json.dumps(response_body),\n {},\n httplib.responses[httplib.OK],\n )\n raise NotImplementedError", "def edit_service(self, loadbal_id, service_id, ip_address_id=None,\r\n port=None, enabled=None, hc_type=None, weight=None):\r\n _filter = NestedDict({})\r\n _filter['virtualServers']['serviceGroups']['services']['id'] = \\\r\n query_filter(service_id)\r\n\r\n kwargs = NestedDict({})\r\n kwargs['filter'] = _filter.to_dict()\r\n kwargs['mask'] = ('mask[serviceGroups[services[groupReferences,'\r\n 'healthChecks]]]')\r\n\r\n virtual_servers = self.lb_svc.getVirtualServers(id=loadbal_id,\r\n **kwargs)\r\n for service in virtual_servers[0]['serviceGroups'][0]['services']:\r\n if service['id'] == service_id:\r\n if enabled is not None:\r\n service['enabled'] = int(enabled)\r\n if port is not None:\r\n service['port'] = int(port)\r\n if weight is not None:\r\n service['groupReferences'][0]['weight'] = int(weight)\r\n if hc_type is not None:\r\n service['healthChecks'][0]['healthCheckTypeId'] = \\\r\n int(hc_type)\r\n if ip_address_id is not None:\r\n service['ipAddressId'] = ip_address_id\r\n\r\n template = {'virtualServers': virtual_servers}\r\n\r\n load_balancer = self.lb_svc.editObject(template, id=loadbal_id)\r\n return load_balancer", "def update_loadbalancer(self, context, old_loadbalancer, loadbalancer):\n old_val, new_val = self.get_diff_of_dict(\n old_loadbalancer, loadbalancer)\n arg_dict = {'context': context,\n lb_const.OLD_LOADBALANCER: old_loadbalancer,\n lb_const.LOADBALANCER: loadbalancer,\n }\n LOG.info(\"Received request 'Update Loadbalancer' for LB:%(lb)s \"\n \"with new Param:%(new_val)s and old Param:%(old_val)s\",\n {'lb': loadbalancer['id'],\n 'new_val': new_val,\n 'old_val': old_val})\n self._send_event(lb_const.EVENT_UPDATE_LOADBALANCER_V2, arg_dict,\n serialize=True, binding_key=loadbalancer['id'],\n key=loadbalancer['id'])", "def load_balancer_id(self, load_balancer_id):\n self._load_balancer_id = load_balancer_id", "def change_type(self, load_balancer, load_balancer_type):\n # type: ([LoadBalancer, BoundLoadBalancer], [LoadBalancerType, BoundLoadBalancerType]) ->BoundAction\n data = {\n \"load_balancer_type\": load_balancer_type.id_or_name,\n }\n response = self._client.request(\n url=\"/load_balancers/{load_balancer_id}/actions/change_type\".format(load_balancer_id=load_balancer.id),\n method=\"POST\", json=data)\n return BoundAction(self._client.actions, response['action'])", "def change_type(self, load_balancer_type):\n # type: (Union[LoadBalancerType,BoundLoadBalancerType]) -> BoundAction\n return self._client.change_type(self, load_balancer_type)", "def _v1_0_11111_loadbalancers_3131(self, method, url, body, headers):\n if method == \"PUT\":\n json_body = json.loads(body)\n self.assertDictEqual(json_body, {\"port\": 1337})\n return (httplib.ACCEPTED, \"\", {}, httplib.responses[httplib.ACCEPTED])\n elif method == \"GET\":\n response_body = json.loads(self.fixtures.load(\"v1_slug_loadbalancers_3xxx.json\"))\n response_body[\"loadBalancer\"][\"id\"] = 3131\n response_body[\"loadBalancer\"][\"port\"] = 1337\n return (\n httplib.OK,\n json.dumps(response_body),\n {},\n httplib.responses[httplib.OK],\n )\n raise NotImplementedError", "def update(self, name=None, labels=None):\n # type: (Optional[str], Optional[Dict[str, str]]) -> BoundLoadBalancer\n return self._client.update(self, name, labels)", "def edit_address(self, new_label: str) -> None:\n self.address_form.label_input.fill(new_label)\n self.address_form.save_button.click()", "def delete(self, loadbalancer_id):\n response.status = 201", "def _v1_0_11111_loadbalancers_3130(self, method, url, body, headers):\n if method == \"PUT\":\n json_body = json.loads(body)\n self.assertDictEqual(json_body, {\"protocol\": \"HTTPS\"})\n return (httplib.ACCEPTED, \"\", {}, httplib.responses[httplib.ACCEPTED])\n elif method == \"GET\":\n response_body = json.loads(self.fixtures.load(\"v1_slug_loadbalancers_3xxx.json\"))\n response_body[\"loadBalancer\"][\"id\"] = 3130\n response_body[\"loadBalancer\"][\"protocol\"] = \"HTTPS\"\n return (\n httplib.OK,\n json.dumps(response_body),\n {},\n httplib.responses[httplib.OK],\n )\n raise NotImplementedError", "def fusion_api_edit_ipv4_subnet(self, body, uri, api=None, headers=None):\n return self.ipv4subnet.update(body, uri, api, headers)", "def _v1_0_11111_loadbalancers_3137(self, method, url, body, headers):\n if method == \"PUT\":\n json_body = json.loads(body)\n self.assertDictEqual(json_body, {\"protocol\": \"IMAPv4\"})\n return (httplib.ACCEPTED, \"\", {}, httplib.responses[httplib.ACCEPTED])\n elif method == \"GET\":\n response_body = json.loads(self.fixtures.load(\"v1_slug_loadbalancers_3xxx.json\"))\n response_body[\"loadBalancer\"][\"id\"] = 3137\n response_body[\"loadBalancer\"][\"protocol\"] = \"IMAPv4\"\n return (\n httplib.OK,\n json.dumps(response_body),\n {},\n httplib.responses[httplib.OK],\n )\n raise NotImplementedError", "def edit_board(bid):\n form = BoardForm(request.form)\n brd = Board.query.get(bid)\n if current_user.is_admin and request.method == 'POST':\n if form.validate():\n if brd.name != form.name.data or brd.desc != form.desc.data:\n brd.name = form.name.data\n brd.desc = form.desc.data\n DB.session.commit()\n flash('Board ({}) successfully edited!'.format(form.name.data))\n else:\n flash(constants.DEFAULT_SUBMISSION_ERR)\n return redirect(request.referrer)", "def change_protection(self, load_balancer, delete=None):\n # type: (Union[LoadBalancer, BoundLoadBalancer], Optional[bool]) -> BoundAction\n data = {}\n if delete is not None:\n data.update({\"delete\": delete})\n\n response = self._client.request(\n url=\"/load_balancers/{load_balancer_id}/actions/change_protection\".format(\n load_balancer_id=load_balancer.id),\n method=\"POST\", json=data)\n return BoundAction(self._client.actions, response['action'])", "def get(self, load_balancer_id):\n response.status = 201\n return None", "def _v1_0_11111_loadbalancers_3133(self, method, url, body, headers):\n if method == \"PUT\":\n json_body = json.loads(body)\n self.assertDictEqual(json_body, {\"algorithm\": \"ROUND_ROBIN\"})\n return (httplib.ACCEPTED, \"\", {}, httplib.responses[httplib.ACCEPTED])\n elif method == \"GET\":\n response_body = json.loads(self.fixtures.load(\"v1_slug_loadbalancers_3xxx.json\"))\n response_body[\"loadBalancer\"][\"id\"] = 3133\n response_body[\"loadBalancer\"][\"algorithm\"] = \"ROUND_ROBIN\"\n return (\n httplib.OK,\n json.dumps(response_body),\n {},\n httplib.responses[httplib.OK],\n )\n raise NotImplementedError", "def _v1_0_11111_loadbalancers_3135(self, method, url, body, headers):\n if method == \"PUT\":\n json_body = json.loads(body)\n self.assertDictEqual(json_body, {\"protocol\": \"IMAPv2\"})\n return (httplib.ACCEPTED, \"\", {}, httplib.responses[httplib.ACCEPTED])\n elif method == \"GET\":\n response_body = json.loads(self.fixtures.load(\"v1_slug_loadbalancers_3xxx.json\"))\n response_body[\"loadBalancer\"][\"id\"] = 3135\n response_body[\"loadBalancer\"][\"protocol\"] = \"IMAPv2\"\n return (\n httplib.OK,\n json.dumps(response_body),\n {},\n httplib.responses[httplib.OK],\n )\n raise NotImplementedError", "def edit(self):\n\n pass", "def delete(self, load_balancer):\n # type: (LoadBalancer) -> BoundAction\n self._client.request(\n url=\"/load_balancers/{load_balancer_id}\".format(load_balancer_id=load_balancer.id), method=\"DELETE\"\n )\n return True", "def _v1_0_11111_loadbalancers_3136(self, method, url, body, headers):\n if method == \"PUT\":\n json_body = json.loads(body)\n self.assertDictEqual(json_body, {\"protocol\": \"IMAPv3\"})\n return (httplib.ACCEPTED, \"\", {}, httplib.responses[httplib.ACCEPTED])\n elif method == \"GET\":\n response_body = json.loads(self.fixtures.load(\"v1_slug_loadbalancers_3xxx.json\"))\n response_body[\"loadBalancer\"][\"id\"] = 3136\n response_body[\"loadBalancer\"][\"protocol\"] = \"IMAPv3\"\n return (\n httplib.OK,\n json.dumps(response_body),\n {},\n httplib.responses[httplib.OK],\n )\n raise NotImplementedError", "def put():\n json_data = request.get_json()\n\n # validate request\n try:\n schema, resolver = ConfigStore.load_json_schema('modify_mba_ctrl.json')\n jsonschema.validate(json_data, schema, resolver=resolver)\n except (jsonschema.ValidationError, OverflowError) as error:\n raise BadRequest(\"Request validation failed - %s\" % (str(error)))\n\n if not caps.mba_bw_supported():\n return {'message': \"MBA CTRL not supported!\"}, 409\n\n if common.CONFIG_STORE.is_any_pool_defined():\n return {'message': \"Please remove all Pools first!\"}, 409\n\n data = deepcopy(common.CONFIG_STORE.get_config())\n\n CapsMbaCtrl.set_mba_ctrl_enabled(data, json_data['enabled'])\n\n common.CONFIG_STORE.set_config(data)\n\n return {'message': \"MBA CTRL status changed.\"}, 200", "def edit(self, **kwargs):\n ...", "def edit_service_group(self, loadbal_id, group_id, allocation=None,\r\n port=None, routing_type=None, routing_method=None):\r\n kwargs = NestedDict({})\r\n kwargs['mask'] = ('mask[virtualServers[serviceGroups'\r\n '[services[groupReferences]]]]')\r\n\r\n load_balancer = self.lb_svc.getObject(id=loadbal_id, **kwargs)\r\n virtual_servers = load_balancer['virtualServers']\r\n for virtual_server in virtual_servers:\r\n if virtual_server['id'] == group_id:\r\n service_group = virtual_server['serviceGroups'][0]\r\n if allocation is not None:\r\n virtual_server['allocation'] = int(allocation)\r\n if port is not None:\r\n virtual_server['port'] = int(port)\r\n if routing_type is not None:\r\n service_group['routingTypeId'] = int(routing_type)\r\n if routing_method is not None:\r\n service_group['routingMethodId'] = int(routing_method)\r\n break\r\n return self.lb_svc.editObject(load_balancer, id=loadbal_id)", "def edit_deployment(request, deployment, **_kwargs):\n pass", "def update_load_balancer(self,\n instance_id: str,\n dnszone_id: str,\n lb_id: str,\n *,\n name: str = None,\n description: str = None,\n enabled: bool = None,\n ttl: int = None,\n fallback_pool: str = None,\n default_pools: List[str] = None,\n az_pools: List['LoadBalancerAzPoolsItem'] = None,\n x_correlation_id: str = None,\n **kwargs\n ) -> DetailedResponse:\n\n if instance_id is None:\n raise ValueError('instance_id must be provided')\n if dnszone_id is None:\n raise ValueError('dnszone_id must be provided')\n if lb_id is None:\n raise ValueError('lb_id must be provided')\n if az_pools is not None:\n az_pools = [convert_model(x) for x in az_pools]\n headers = {\n 'X-Correlation-ID': x_correlation_id\n }\n sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,\n service_version='V1',\n operation_id='update_load_balancer')\n headers.update(sdk_headers)\n\n data = {\n 'name': name,\n 'description': description,\n 'enabled': enabled,\n 'ttl': ttl,\n 'fallback_pool': fallback_pool,\n 'default_pools': default_pools,\n 'az_pools': az_pools\n }\n data = {k: v for (k, v) in data.items() if v is not None}\n data = json.dumps(data)\n headers['content-type'] = 'application/json'\n\n if 'headers' in kwargs:\n headers.update(kwargs.get('headers'))\n headers['Accept'] = 'application/json'\n\n url = '/instances/{0}/dnszones/{1}/load_balancers/{2}'.format(\n *self.encode_path_vars(instance_id, dnszone_id, lb_id))\n request = self.prepare_request(method='PUT',\n url=url,\n headers=headers,\n data=data)\n\n response = self.send(request)\n return response", "def process_load_balancer ( vpc_conn,\n ec2_conn,\n elb_conn,\n cloudwatch_conn,\n r53_conn,\n iam_conn,\n vpc,\n base_name,\n base_topicarn,\n app_name,\n params,\n aws_account_type,\n app_visibility = None,\n public_dns_cname = None,\n public_tcp_ports = [],\n app_tcp_ports = [],\n use_ssl = False,\n ssl_hostname = None\n ) :\n\n if not app_name :\n app_name = params[ 'app-name' ]\n\n if app_visibility == 'PUBLIC' :\n subnet_type = 'PRIVATE' # Public apps have app LB's that sit private. The PROXY LB is public.\n elif app_visibility == 'HBO' :\n subnet_type = 'PUBLIC' # HBO apps have app LB's that site public.\n elif app_visibility == 'PRIVATE' :\n subnet_type = 'PRIVATE'\n else :\n subnet_type = params[ 'subnet-type' ]\n\n if not public_dns_cname :\n public_dns_cname = params.get( 'public-dns-alias' )\n\n create = params.get( 'create', 'NO' )\n if create == 'YES':\n print \"Creating load balancer security group.\"\n lb_secgrp = find_secgrp(ec2_conn, get_lb_secgrp_name( base_name, app_name ))\n if not lb_secgrp :\n lb_secgrp = create_secgrp( ec2_conn,\n vpc,\n get_lb_secgrp_name( base_name, app_name ),\n 'Controls access to the ' + app_name + ' LB' )\n remove_all_rules( ec2_conn, [ lb_secgrp ] , deep=True, base_name=base_name)\n ## reload the security group after removing the rules\n lb_secgrp = find_secgrp(ec2_conn, get_lb_secgrp_name( base_name, app_name ))\n \n health_check_port = params.get( 'health-check-port', 8080 )\n health_check_url = params.get( 'health-check-url' )\n if not health_check_url :\n health_check_url = '/' + app_name + '/ping.html'\n\n ## Figure out if we need to find the SSL cert.\n ssl_cert_arn = None\n if use_ssl :\n cert = get_aws_ssl_certificate( iam_conn, ssl_cert_name )\n if cert :\n ssl_cert_arn = cert.arn\n else :\n print \"ERROR: Use SSL was specified, but could not find certificate matching host: \" + ssl_cert_name\n sys.exit( 5 )\n\n ## Generate the correct listener rules\n listeners = [ ( 80, 8080, 'http' ) ] # Default listener\n if params.get( 'listener-rules' ) :\n listeners = []\n for listener_rule in params[ 'listener-rules' ] :\n if params[ 'protocol' ] == 'https' :\n if not ssl_cert_arn :\n print \"ERRROR: https protocol specified, but use_ssl was NOT specified.\"\n sys.exit( 5 )\n listeners.append( ( params[ 'incoming-port' ],\n params[ 'outgoing-port' ],\n params[ 'protocol' ],\n ssl_cert_arn) )\n else :\n listeners.append( ( params[ 'incoming-port' ],\n params[ 'outgoing-port' ],\n params[ 'protocol' ] ) )\n ##\n ## FIX: There is a bug here where the public ports are supposed to be set on the proxy if\n ## app_visibility is PUBLIC. Don't have time to fix/regression test now...\n ##\n elif len( public_tcp_ports ) == len( app_tcp_ports ) and len( public_tcp_ports ) > 0 :\n listeners = []\n for public_port, app_port in zip( public_tcp_ports, app_tcp_ports ) :\n if public_port == 443 :\n if not ssl_cert_arn :\n print \"ERRROR: https protocol specified, but use_ssl was NOT specified.\"\n sys.exit( 5 )\n listeners.append( ( public_port, app_port, 'https', ssl_cert_arn ) )\n else :\n listeners.append( ( public_port, app_port, 'http' ) )\n\n\n print \"Creating load balancer.\"\n elb = create_elb( elb_conn,\n get_elb_name( base_name, app_name ),\n get_vpc_subnets( vpc_conn, vpc, subnet_type ),\n listeners,\n lb_secgrp,\n health_check_port,\n health_check_url,\n subnet_type == 'PUBLIC' )\n \n elb = find_elb(elb_conn, elb.name)\n \n if params.get( 'monitors' ) :\n add_monitors_to_elb( cloudwatch_conn, base_name, app_name, base_topicarn, params[ 'monitors' ] )\n\n if subnet_type == 'PUBLIC' :\n print \"Setting public DNS alias for load balancer.\"\n set_dns_cname( r53_conn, public_dns_cname, elb.dns_name )\n else :\n dns_alias = create_internal_elb_dns_name( base_name, app_name )\n print \"Configuring DNS name for load balancer: \" + dns_alias\n set_dns_cname( r53_conn, dns_alias, elb.dns_name )\n\n if app_visibility == 'HBO' :\n for port in public_tcp_ports :\n lb_secgrp.authorize( ip_protocol = \"tcp\",\n from_port = port,\n to_port = port,\n cidr_ip = hbo_cidr_list ) \n\n elif app_visibility == 'PUBLIC' :\n print \"Creating proxy load balancer.\"\n proxy_type = app_name + '-PX'\n proxy_secgrp = find_secgrp(ec2_conn, get_secgrp_name( base_name, proxy_type ))\n if not proxy_secgrp :\n proxy_secgrp = create_secgrp( ec2_conn,\n vpc,\n get_secgrp_name( base_name, proxy_type ),\n 'Controls access to the ' + proxy_type + ' servers.' )\n \n lb_proxy_secgrp = find_secgrp(ec2_conn, get_lb_secgrp_name( base_name, proxy_type ))\n \n if not lb_proxy_secgrp :\n lb_proxy_secgrp = create_secgrp( ec2_conn,\n vpc,\n get_lb_secgrp_name( base_name, proxy_type ),\n 'Controls access to the ' + proxy_type + ' load balancer.' )\n\n remove_all_rules( ec2_conn, [ lb_proxy_secgrp, proxy_secgrp ], deep=True, base_name=base_name) \n ## reload the security group after removing the rules\n lb_proxy_secgrp = find_secgrp(ec2_conn, get_lb_secgrp_name( base_name, proxy_type ))\n proxy_secgrp = find_secgrp(ec2_conn, get_secgrp_name( base_name, proxy_type ))\n\n \n ##\n ## FIX: In reality, we need to set the group rules between lb_proxy and proxy to match\n ## the listener ports that were passed in/configured.\n ##\n grant_ssh_access( ec2_conn, [ proxy_secgrp ], find_group( ec2_conn, base_name, 'NAT' ) )\n \n \n ## proxy server port is always 80\n ## updated by yliu, 2014/6/13\n ##if use_ssl :\n ## proxy_port = 443\n ##else :\n ## proxy_port = 80\n proxy_port = 80\n\n ## backend elb port that the proxy server passes request to \n if use_ssl :\n proxy_to_elb_port = 443\n else :\n proxy_to_elb_port = 80\n\n grant_grp_access( ec2_conn, [ lb_proxy_secgrp ], proxy_secgrp, proxy_port )\n grant_grp_access( ec2_conn, [ proxy_secgrp ], lb_secgrp, proxy_to_elb_port )\n for port in public_tcp_ports :\n lb_proxy_secgrp.authorize( ip_protocol = \"tcp\",\n from_port = port,\n to_port = port,\n cidr_ip = all_ip_cidr ) \n\n proxy_listeners = [ ( 80, 80, 'http' ) ]\n if use_ssl :\n proxy_listeners = [ ( 443, proxy_port, 'https', ssl_cert_arn ) ]\n\n proxy_elb = create_elb( elb_conn,\n get_elb_name( base_name, proxy_type ),\n get_vpc_subnets( vpc_conn, vpc, 'PUBLIC' ),\n proxy_listeners,\n lb_proxy_secgrp,\n proxy_port,\n '/robots.txt',\n True )\n add_monitors_to_elb( cloudwatch_conn, base_name, app_name, base_topicarn, proxy_lb_monitor_rules )\n\n if public_dns_cname :\n print \"Setting public DNS alias for load balancer.\"\n set_dns_cname( r53_conn, public_dns_cname, proxy_elb.dns_name )\n else :\n public_dns_cname = ''\n\n print \"Creating proxy instances.\"\n proxy_ami = get_ami_by_name( ec2_conn, proxy_ami_name )\n subnets = get_vpc_subnets( vpc_conn, vpc, 'PRIVATE' )\n\n ## direct proxy server to access backend elb over given protocol\n ## added by yliu, 2014/6/13\n if use_ssl :\n app_elb_protocol = 'https'\n else :\n app_elb_protocol = 'http'\n \n proxy_userdata = get_proxy_userdata( public_dns_cname, elb.dns_name, app_elb_protocol, app_name )\n proxy_instances = []\n \n proxy_keypair = get_keypair_name( aws_account_type, vpc.region.name, \"APACHE\" )\n \n for subnet in subnets : \n instance = launch_instance_vpc( ec2_conn,\n proxy_ami,\n base_name = base_name,\n instance_type = proxy_type,\n keypair = proxy_keypair,\n machine_type = 'm3.xlarge',\n security_group_id = proxy_secgrp.id ,\n subnet_id = subnet.id,\n user_data = proxy_userdata,\n public_ip = False )\n proxy_instances.append( instance )\n\n print \"Setting alarms on the proxy\"\n add_monitors_to_instance( cloudwatch_conn, base_name, instance.id, 'PROXY', base_topicarn, proxy_monitor_rules )\n \n proxy_instance_ids = [ i.id for i in proxy_instances ]\n\n print \"Waiting for proxy instances to be ready\"\n aws_waits( ec2_conn.get_only_instances, proxy_instance_ids )\n\n print \"Adding the new proxy instances into the load balancer.\"\n \n status = swap_elb_instances( elb_conn = elb_conn,\n elb = proxy_elb,\n new_instance_ids = proxy_instance_ids,\n terminate_old_instances = True,\n ec2_conn = ec2_conn,\n cloudwatch_conn = cloudwatch_conn,\n swap_smoothly = False )\n\n else :\n elb = find_elb( elb_conn, get_elb_name( base_name, app_name ) )\n print \"Processing load-balancer actions.\"\n for action_param in params.get( 'actions', [] ) :\n if action_param[ 'type' ] == 'RESTART_INSTANCES' :\n restart_elb_instances( ec2_conn, elb_conn, elb, params.get( 'restart-smoothly', 'YES' ) == 'YES' )\n\n lb_secgrp = find_group( ec2_conn, base_name, get_lb_secgrp_type( app_name ) )\n dns_alias = None\n\n return ( elb, lb_secgrp, dns_alias )" ]
[ "0.700744", "0.66535103", "0.6332974", "0.61218095", "0.6045121", "0.58444375", "0.57524234", "0.5527921", "0.54759747", "0.5452721", "0.54205173", "0.53978443", "0.53187937", "0.53139496", "0.5307594", "0.5301771", "0.5296107", "0.52546227", "0.5240936", "0.52304596", "0.52251965", "0.52217317", "0.5208918", "0.51688445", "0.5123806", "0.5110425", "0.5098374", "0.5074829", "0.50283927", "0.50247836" ]
0.74186695
0
Delete a specific load balancer.
def delete(self, request, loadbalancer_id): conn = get_sdk_connection(request) conn.load_balancer.delete_load_balancer(loadbalancer_id, ignore_missing=True, cascade=True)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def delete_balancer(self):\n response = self.client.delete_load_balancer(\n LoadBalancerArn=self.get_balancer_arn()\n )\n assert response['ResponseMetadata']['HTTPStatusCode'] == 200", "def delete(self, load_balancer):\n # type: (LoadBalancer) -> BoundAction\n self._client.request(\n url=\"/load_balancers/{load_balancer_id}\".format(load_balancer_id=load_balancer.id), method=\"DELETE\"\n )\n return True", "def delete(self):\r\n return self.connection.delete_load_balancer(self.name)", "def delete_balancer(ctx):\n if self.balancer_exists():\n self.delete_balancer()\n ctx.info('Successfully deleted load balancer {}:'.format(self.get_balancer_name()))\n else:\n ctx.info('Load balancer {} does not exist, nothing to delete.'.format(\n self.get_balancer_name()\n ))", "def delete_loadbalancer(self, context, loadbalancer):\n LOG.info(\"Received request 'Delete Loadbalancer' for LB:%(lb)s \",\n {'lb': loadbalancer['id']})\n\n arg_dict = {'context': context,\n lb_const.LOADBALANCER: loadbalancer,\n }\n self._send_event(lb_const.EVENT_DELETE_LOADBALANCER_V2, arg_dict,\n serialize=True, binding_key=loadbalancer['id'],\n key=loadbalancer['id'])", "def delete(self, loadbalancer_id):\n response.status = 201", "def delete_loadbalancer(self, context, lb):\n deployment_model = self._get_setting(\n lb.tenant_id, \"lbaas_settings\", \"deployment_model\"\n )\n hostnames = self._get_hostname(lb)\n if deployment_model in [\"PER_TENANT\", \"PER_SUBNET\"]:\n vapv = self._get_vapv(hostnames)\n if not vapv.tip_group.list():\n self._destroy_vapv(hostnames, lb)\n elif deployment_model == \"PER_TENANT\":\n # Delete subnet ports if no longer required\n if self.openstack_connector.subnet_in_use(lb) is False:\n self._detach_subnet_port(vapv, hostnames, lb)\n for hostname in hostnames:\n port_ids = self.openstack_connector.get_server_port_ids(\n hostname\n )\n self.openstack_connector.delete_ip_from_ports(\n lb.vip_address, port_ids\n )\n elif deployment_model == \"PER_LOADBALANCER\":\n self._destroy_vapv(hostnames, lb)", "def delete(ctx):\n delete_listeners(ctx)\n delete_balancer(ctx)\n delete_target_groups(ctx)\n\n ctx.info('Load balancers deletion completed.')", "def delete(env, identifier, listener):\n\n mgr = SoftLayer.LoadBalancerManager(env.client)\n uuid, _ = mgr.get_lbaas_uuid_id(identifier)\n try:\n mgr.remove_lb_listener(uuid, listener)\n click.secho(\"Success\", fg='green')\n except SoftLayerAPIError as exception:\n click.secho(f\"ERROR: {exception.faultString}\", fg='red')", "def delete(self, request, pool_id):\n conn = get_sdk_connection(request)\n retry_on_conflict(\n conn, conn.load_balancer.delete_pool,\n pool_id,\n load_balancer_getter=pool_get_load_balancer_id,\n resource_id=pool_id)", "def delete(self, params=None):\n self.logger.debug('Deleting %s with parameters: %s'\n % (self.type_name, params))\n return self.client.delete_load_balancer_policy(**params)", "def delete(self, request, listener_id):\n conn = get_sdk_connection(request)\n retry_on_conflict(\n conn, conn.load_balancer.delete_listener,\n listener_id, ignore_missing=True,\n load_balancer_getter=listener_get_load_balancer_id,\n resource_id=listener_id)", "def delete_load_balancer(self,\n instance_id: str,\n dnszone_id: str,\n lb_id: str,\n *,\n x_correlation_id: str = None,\n **kwargs\n ) -> DetailedResponse:\n\n if instance_id is None:\n raise ValueError('instance_id must be provided')\n if dnszone_id is None:\n raise ValueError('dnszone_id must be provided')\n if lb_id is None:\n raise ValueError('lb_id must be provided')\n headers = {\n 'X-Correlation-ID': x_correlation_id\n }\n sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,\n service_version='V1',\n operation_id='delete_load_balancer')\n headers.update(sdk_headers)\n\n if 'headers' in kwargs:\n headers.update(kwargs.get('headers'))\n\n url = '/instances/{0}/dnszones/{1}/load_balancers/{2}'.format(\n *self.encode_path_vars(instance_id, dnszone_id, lb_id))\n request = self.prepare_request(method='DELETE',\n url=url,\n headers=headers)\n\n response = self.send(request)\n return response", "def delete(self, request, flavor_id):\n conn = get_sdk_connection(request)\n conn.load_balancer.delete_flavor(flavor_id,\n ignore_missing=True)", "def delete_service(self, load_balancer, service):\n # type: (Union[LoadBalancer, BoundLoadBalancer], LoadBalancerService) -> List[BoundAction]\n data = {\n \"listen_port\": service.listen_port,\n }\n\n response = self._client.request(\n url=\"/load_balancers/{load_balancer_id}/actions/delete_service\".format(load_balancer_id=load_balancer.id),\n method=\"POST\", json=data)\n return BoundAction(self._client.actions, response['action'])", "def delete_load_balancer_rule(self, load_balancer_rule_id): \n params = {'command':'deleteLoadBalancerRule',\n 'id':load_balancer_rule_id} \n\n try:\n response = self.send_request(params)\n res = json.loads(response)\n clsk_job_id = res['deleteloadbalancerruleresponse']['jobid']\n self.logger.debug('Start job - deleteLoadBalancerRule: %s' % res)\n return clsk_job_id\n except KeyError as ex:\n raise ClskError('Error parsing json data: %s' % ex)\n except ApiError as ex:\n raise ClskError(ex)", "def delete(ctx, iface, resource_config, **_):\n\n # Create a copy of the resource config for clean manipulation.\n params = \\\n dict() if not resource_config else resource_config.copy()\n\n lb = params.get(LB_NAME) or ctx.instance.runtime_properties.get(LB_NAME)\n policy = \\\n params.get(RESOURCE_NAME) or \\\n ctx.instance.runtime_properties.get(RESOURCE_NAME)\n\n lb_delete_params = {\n LB_NAME: lb,\n RESOURCE_NAME: policy\n }\n\n try:\n iface.delete(lb_delete_params)\n except ClientError as e:\n if _.get('force'):\n raise OperationRetry('Retrying: {0}'.format(text_type(e)))\n pass", "def delete(self, request, member_id, pool_id):\n conn = get_sdk_connection(request)\n retry_on_conflict(\n conn, conn.load_balancer.delete_member,\n member_id, pool_id,\n load_balancer_getter=pool_get_load_balancer_id,\n resource_id=pool_id)", "def delete_node(self, loadbalancer, node):\n lb = node.parent\n if not lb:\n raise exc.UnattachedNode(\"No parent Load Balancer for this node \"\n \"could be determined.\")\n resp, body = self.api.method_delete(\"/loadbalancers/%s/nodes/%s\" %\n (lb.id, node.id))\n return resp, body", "def delete_listener(self, context, listener):\n LOG.info(\"Received request 'Delete Listener' for LB:%(lb)s \",\n {'lb': listener['loadbalancer_id']})\n arg_dict = {'context': context,\n lb_const.LISTENER: listener,\n }\n self._send_event(lb_const.EVENT_DELETE_LISTENER_V2, arg_dict,\n serialize=True,\n binding_key=listener['loadbalancer_id'],\n key=listener['id'])", "def delete(self, request, l7_policy_id):\n conn = get_sdk_connection(request)\n retry_on_conflict(\n conn, conn.load_balancer.delete_l7_policy,\n l7_policy_id,\n load_balancer_getter=l7_policy_get_load_balancer_id,\n resource_id=l7_policy_id)", "def post_loadbalancer_member_delete(self, resource_id, resource_dict):\n pass", "def pre_loadbalancer_member_delete(self, resource_id):\n pass", "def delete(self):\n self._lbcall('delete_pool', [self._name])", "def delete_entity(self, context, lb_obj):\n resource_path = \"%s/%s/%s\" % (RESOURCE_PREFIX, LBS_RESOURCE, lb_obj.id)\n msg = _(\"NetScaler driver lb_obj removal: %s\") % lb_obj.id\n LOG.debug(msg)\n self.client.remove_resource(context.tenant_id, resource_path)", "def delete_health_monitor(self, loadbalancer):\n uri = \"/loadbalancers/%s/healthmonitor\" % utils.get_id(loadbalancer)\n resp, body = self.api.method_delete(uri)", "def detach_elastic_load_balancer(ElasticLoadBalancerName=None, LayerId=None):\n pass", "def delete_health_monitor(self, loadbalancer):\n return loadbalancer.delete_health_monitor()", "def pre_loadbalancer_pool_delete(self, resource_id):\n pass", "def load_delete(id):\n load_key = client.key(\"load\", int(id))\n load = client.get(key=load_key)\n if not load:\n failed = {\"Error\": \"No load with this load_id exists\"}\n response = Response(\n response=json.dumps(failed),\n status=404,\n mimetype='application/json'\n )\n return response\n elif request.data:\n failed = {\"Error\": \"The request object does not follow specifications - see documentation.\"}\n response = Response(\n response=json.dumps(failed),\n status=400,\n mimetype='application/json'\n )\n return response\n client.delete(load_key)\n\n # Remove load from the boat if it was on one\n if load[\"carrier\"]:\n boat_key = client.key(\"boat\", load[\"carrier\"][\"id\"])\n boat = client.get(key=boat_key)\n\n # for load in boat[\"loads\"]:\n loads = [x for x in boat[\"loads\"] if x[\"id\"] != int(id)]\n boat.update({\"loads\": loads})\n client.put(boat)\n\n response = Response(\n status=204,\n mimetype='application/json'\n )\n return response" ]
[ "0.82724464", "0.82108885", "0.8187755", "0.8121004", "0.8085591", "0.7962943", "0.7837547", "0.7503282", "0.7202904", "0.703992", "0.6937303", "0.6915725", "0.6653849", "0.6465005", "0.6410816", "0.64020866", "0.639109", "0.63578004", "0.63545287", "0.62534803", "0.6184379", "0.6149524", "0.6118748", "0.61032206", "0.60931003", "0.60620403", "0.6028984", "0.6028097", "0.6019442", "0.6006143" ]
0.82243186
1
Get a specific listener. If the param 'includeChildResources' is passed in as a truthy value, the details of all resources that exist under the listener will be returned along with the listener details.
def get(self, request, listener_id): conn = get_sdk_connection(request) listener = conn.load_balancer.find_listener(listener_id) listener = _get_sdk_object_dict(listener) if request.GET.get('includeChildResources'): resources = {} resources['listener'] = listener if listener.get('default_pool_id'): pool_id = listener['default_pool_id'] pool = conn.load_balancer.find_pool(pool_id) pool = _get_sdk_object_dict(pool) resources['pool'] = pool if pool.get('members'): member_list = _sdk_object_to_list( conn.load_balancer.members(pool_id)) resources['members'] = member_list if pool.get('health_monitor_id'): monitor_id = pool['health_monitor_id'] monitor = conn.load_balancer.find_health_monitor( monitor_id) monitor = _get_sdk_object_dict(monitor) resources['monitor'] = monitor return resources else: return listener
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get(self, request):\n loadbalancer_id = request.GET.get('loadbalancerId')\n conn = get_sdk_connection(request)\n listener_list = _sdk_object_to_list(conn.load_balancer.listeners(\n project_id=request.user.project_id))\n\n if loadbalancer_id:\n listener_list = self._filter_listeners(listener_list,\n loadbalancer_id)\n return {'items': listener_list}", "def get_listener(self, service, bigip):\n vip = self.service_adapter.get_virtual_name(service)\n obj = self.vs_helper.load(bigip=bigip,\n name=vip[\"name\"],\n partition=vip[\"partition\"])\n return obj", "def listener_arn(self) -> Optional[str]:\n return pulumi.get(self, \"listener_arn\")", "def listener_id(self) -> str:\n return pulumi.get(self, \"listener_id\")", "def listener_id(self) -> str:\n return pulumi.get(self, \"listener_id\")", "def listener_id(self) -> str:\n return pulumi.get(self, \"listener_id\")", "def performance_listener(self) -> Instrument:\n return self._performance_listener", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n advanced_filter: Optional[pulumi.Input[pulumi.InputType['EventSubscriptionAdvancedFilterArgs']]] = None,\n advanced_filtering_on_arrays_enabled: Optional[pulumi.Input[bool]] = None,\n azure_function_endpoint: Optional[pulumi.Input[pulumi.InputType['EventSubscriptionAzureFunctionEndpointArgs']]] = None,\n dead_letter_identity: Optional[pulumi.Input[pulumi.InputType['EventSubscriptionDeadLetterIdentityArgs']]] = None,\n delivery_identity: Optional[pulumi.Input[pulumi.InputType['EventSubscriptionDeliveryIdentityArgs']]] = None,\n delivery_properties: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['EventSubscriptionDeliveryPropertyArgs']]]]] = None,\n event_delivery_schema: Optional[pulumi.Input[str]] = None,\n eventhub_endpoint_id: Optional[pulumi.Input[str]] = None,\n expiration_time_utc: Optional[pulumi.Input[str]] = None,\n hybrid_connection_endpoint_id: Optional[pulumi.Input[str]] = None,\n included_event_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n labels: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n name: Optional[pulumi.Input[str]] = None,\n retry_policy: Optional[pulumi.Input[pulumi.InputType['EventSubscriptionRetryPolicyArgs']]] = None,\n scope: Optional[pulumi.Input[str]] = None,\n service_bus_queue_endpoint_id: Optional[pulumi.Input[str]] = None,\n service_bus_topic_endpoint_id: Optional[pulumi.Input[str]] = None,\n storage_blob_dead_letter_destination: Optional[pulumi.Input[pulumi.InputType['EventSubscriptionStorageBlobDeadLetterDestinationArgs']]] = None,\n storage_queue_endpoint: Optional[pulumi.Input[pulumi.InputType['EventSubscriptionStorageQueueEndpointArgs']]] = None,\n subject_filter: Optional[pulumi.Input[pulumi.InputType['EventSubscriptionSubjectFilterArgs']]] = None,\n webhook_endpoint: Optional[pulumi.Input[pulumi.InputType['EventSubscriptionWebhookEndpointArgs']]] = None) -> 'EventSubscription':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _EventSubscriptionState.__new__(_EventSubscriptionState)\n\n __props__.__dict__[\"advanced_filter\"] = advanced_filter\n __props__.__dict__[\"advanced_filtering_on_arrays_enabled\"] = advanced_filtering_on_arrays_enabled\n __props__.__dict__[\"azure_function_endpoint\"] = azure_function_endpoint\n __props__.__dict__[\"dead_letter_identity\"] = dead_letter_identity\n __props__.__dict__[\"delivery_identity\"] = delivery_identity\n __props__.__dict__[\"delivery_properties\"] = delivery_properties\n __props__.__dict__[\"event_delivery_schema\"] = event_delivery_schema\n __props__.__dict__[\"eventhub_endpoint_id\"] = eventhub_endpoint_id\n __props__.__dict__[\"expiration_time_utc\"] = expiration_time_utc\n __props__.__dict__[\"hybrid_connection_endpoint_id\"] = hybrid_connection_endpoint_id\n __props__.__dict__[\"included_event_types\"] = included_event_types\n __props__.__dict__[\"labels\"] = labels\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"retry_policy\"] = retry_policy\n __props__.__dict__[\"scope\"] = scope\n __props__.__dict__[\"service_bus_queue_endpoint_id\"] = service_bus_queue_endpoint_id\n __props__.__dict__[\"service_bus_topic_endpoint_id\"] = service_bus_topic_endpoint_id\n __props__.__dict__[\"storage_blob_dead_letter_destination\"] = storage_blob_dead_letter_destination\n __props__.__dict__[\"storage_queue_endpoint\"] = storage_queue_endpoint\n __props__.__dict__[\"subject_filter\"] = subject_filter\n __props__.__dict__[\"webhook_endpoint\"] = webhook_endpoint\n return EventSubscription(resource_name, opts=opts, __props__=__props__)", "def listener(self, aaidee=0):\n return Listener(self._ptr, aaidee)", "def listener_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"listener_id\")", "def listener_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"listener_id\")", "def get(self, request, l7_policy_id):\n conn = get_sdk_connection(request)\n l7_policy = conn.load_balancer.find_l7_policy(l7_policy_id)\n l7_policy = _get_sdk_object_dict(l7_policy)\n\n if request.GET.get('includeChildResources'):\n resources = {}\n\n if l7_policy.get('rules'):\n l7_rules_list = _sdk_object_to_list(\n conn.load_balancer.l7_rules(l7_policy_id))\n l7_policy['rules'] = l7_rules_list\n\n resources['l7policy'] = l7_policy\n\n return resources\n else:\n return l7_policy", "def listener_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"listener_id\")", "def listener_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"listener_id\")", "def get_subresource(self, subresource, key_name='', headers=None,\r\n version_id=None):\r\n if not subresource:\r\n raise TypeError('get_subresource called with subresource=None')\r\n query_args = subresource\r\n if version_id:\r\n query_args += '&versionId=%s' % version_id\r\n response = self.connection.make_request('GET', self.name, key_name,\r\n query_args=query_args,\r\n headers=headers)\r\n body = response.read()\r\n if response.status != 200:\r\n raise self.connection.provider.storage_response_error(\r\n response.status, response.reason, body)\r\n return body", "def rp_listener(rp_service):\n return RPReportListener(rp_service)", "def listener_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"listener_id\")", "def get(self, request, pool_id):\n conn = get_sdk_connection(request)\n pool = conn.load_balancer.find_pool(pool_id)\n pool = _get_sdk_object_dict(pool)\n\n if request.GET.get('includeChildResources'):\n resources = {}\n resources['pool'] = pool\n\n if pool.get('members'):\n member_list = _sdk_object_to_list(\n conn.load_balancer.members(pool_id))\n resources['members'] = member_list\n\n if pool.get('health_monitor_id'):\n monitor_id = pool['health_monitor_id']\n monitor = conn.load_balancer.find_health_monitor(\n monitor_id)\n monitor = _get_sdk_object_dict(monitor)\n resources['monitor'] = monitor\n\n return resources\n else:\n return pool", "def describe_listeners(self):\n balancer_arn = self.get_balancer_arn()\n\n response = self.client.describe_listeners(\n LoadBalancerArn=balancer_arn,\n )\n assert response['ResponseMetadata']['HTTPStatusCode'] == 200\n\n return response['Listeners']", "def listener_description(self) -> str:\n return pulumi.get(self, \"listener_description\")", "def _get_extension_resource(api_root):\n # TODO: Cache this. We only use this resource as a link to sub-resources.\n return api_root.get_extension(\n extension_name='reviewbotext.extension.ReviewBotExtension')", "def getResource(self):\n return self.serviceClass.app.resource()", "def getResourceRecognition(self, authenticationToken, guid):\r\n self.send_getResourceRecognition(authenticationToken, guid)\r\n return self.recv_getResourceRecognition()", "def get(self, eventId, uid):\n raise NotImplementedError", "def create_listener(request, **kwargs):\n data = request.DATA\n\n try:\n default_tls_ref = data['certificates'][0]\n except (KeyError, IndexError):\n default_tls_ref = None\n\n conn = get_sdk_connection(request)\n # TODO(johnsom) Add SNI support\n # https://bugs.launchpad.net/octavia/+bug/1714294\n listener = conn.load_balancer.create_listener(\n protocol=data['listener']['protocol'],\n protocol_port=data['listener']['protocol_port'],\n load_balancer_id=kwargs['loadbalancer_id'],\n name=data['listener'].get('name'),\n description=data['listener'].get('description'),\n connection_limit=data['listener'].get('connection_limit'),\n default_tls_container_ref=default_tls_ref,\n sni_container_refs=None,\n admin_state_up=data['listener'].get('admin_state_up'),\n insert_headers=data['listener'].get('insert_headers'),\n timeout_client_data=data['listener'].get('timeout_client_data'),\n timeout_member_connect=data['listener'].get('timeout_member_connect'),\n timeout_member_data=data['listener'].get('timeout_member_data'),\n timeout_tcp_inspect=data['listener'].get('timeout_tcp_inspect'),\n allowed_cidrs=data['listener'].get('allowed_cidrs'),\n # Replace empty string by None (uses default tls cipher string)\n tls_ciphers=data['listener'].get('tls_ciphers') or None,\n )\n\n if data.get('pool'):\n args = (request, kwargs['loadbalancer_id'], create_pool)\n kwargs = {'callback_kwargs': {'listener_id': listener.id}}\n thread.start_new_thread(poll_loadbalancer_status, args, kwargs)\n\n return _get_sdk_object_dict(listener)", "def get(self, request):\n listener_id = request.GET.get('listenerId')\n conn = get_sdk_connection(request)\n l7_policy_list = _sdk_object_to_list(conn.load_balancer.l7_policies(\n listener_id=listener_id))\n return {'items': l7_policy_list}", "def get_resource(self, *args, **kwargs):\n target_uri = self._build_uri(**kwargs)\n resource_type = None\n if args:\n resource_type = args[2]\n elif not args and kwargs:\n resource_type = kwargs.get('resource_level')\n return self.get_request(\n target_uri, resource_type, kwargs.get('params'))", "def get(owner_name, resource_name):\n resource = get_node(owner_name, resource_name)\n return resource if isinstance(resource, Resource) else None", "def getResource(self, authenticationToken, guid, withData, withRecognition, withAttributes, withAlternateData):\r\n self.send_getResource(authenticationToken, guid, withData, withRecognition, withAttributes, withAlternateData)\r\n return self.recv_getResource()", "def get_service(listener, cert_manager, esd_repository):\n\n # Entities is a list of tuples, which each describe AS3 objects\n # which may reference each other but do not form a hierarchy.\n entities = []\n vip = listener.load_balancer.vip\n project_id = listener.load_balancer.project_id\n label = as3types.f5label(listener.name or listener.description)\n virtual_address = '{}'.format(vip.ip_address)\n service_args = {\n 'virtualPort': listener.protocol_port,\n 'persistenceMethods': [],\n 'iRules': [],\n 'policyEndpoint': [],\n 'label': label\n }\n\n def is_http2(listener):\n \"\"\"Check whether a listener counts as HTTP2.\n\n It would suffice to count a listener with alpn_protocols set to anything as HTTP2(-capable), because the\n HTTP2 profile acts like normal HTTP when HTTP2 isn't explicitely requested via ALPN. However,\n having a listener be able to speak HTTP2 without it being declared that way is unexpected behavior.\n \"\"\"\n return (hasattr(listener, 'alpn_protocols') and listener.alpn_protocols and\n lib_consts.ALPN_PROTOCOL_HTTP_2 in listener.alpn_protocols)\n\n # Custom virtual address settings\n if CONF.f5_agent.service_address_icmp_echo:\n service_address = as3.ServiceAddress(virtualAddress=virtual_address,\n icmpEcho=CONF.f5_agent.service_address_icmp_echo)\n entities.append((m_app.get_name(listener.load_balancer.id), service_address))\n service_args['virtualAddresses'] = [[as3.Pointer(m_app.get_name(listener.load_balancer.id)), virtual_address]]\n else:\n service_args['virtualAddresses'] = [virtual_address]\n\n # Determine service type\n # TCP\n if listener.protocol == lib_consts.PROTOCOL_TCP:\n service_args['_servicetype'] = CONF.f5_agent.tcp_service_type\n # UDP\n elif listener.protocol == lib_consts.PROTOCOL_UDP:\n service_args['_servicetype'] = f5_const.SERVICE_UDP\n # HTTP\n elif listener.protocol == lib_consts.PROTOCOL_HTTP:\n service_args['_servicetype'] = f5_const.SERVICE_HTTP\n # HTTPS (non-terminated, forward TCP traffic)\n elif listener.protocol == lib_consts.PROTOCOL_HTTPS:\n service_args['_servicetype'] = CONF.f5_agent.tcp_service_type\n # Terminated HTTPS\n elif listener.protocol == lib_consts.PROTOCOL_TERMINATED_HTTPS:\n service_args['_servicetype'] = f5_const.SERVICE_HTTPS\n service_args['serverTLS'] = m_tls.get_listener_name(listener.id)\n service_args['redirect80'] = False\n\n # Certificate Handling\n auth_name = None\n certificates = cert_manager.get_certificates(listener)\n\n # Client Side Certificates\n if listener.client_ca_tls_certificate_id and listener.client_authentication != 'NONE':\n try:\n auth_name, secret = cert_manager.load_secret(project_id, listener.client_ca_tls_certificate_id)\n entities.append((auth_name, m_cert.get_ca_bundle(secret, auth_name, auth_name)))\n except exceptions.CertificateRetrievalException as e:\n LOG.error(\"Error fetching certificate: %s\", e)\n\n # TLS renegotiation has to be turned off for HTTP2, in order to be compliant.\n allow_renegotiation = not is_http2(listener)\n\n entities.append((\n m_tls.get_listener_name(listener.id),\n m_tls.get_tls_server([cert['id'] for cert in certificates], listener, auth_name,\n allow_renegotiation)\n ))\n entities.extend([(cert['id'], cert['as3']) for cert in certificates])\n # Proxy\n elif listener.protocol == lib_consts.PROTOCOL_PROXY:\n service_args['_servicetype'] = f5_const.SERVICE_TCP\n name, irule = m_irule.get_proxy_irule()\n service_args['iRules'].append(name)\n entities.append((name, irule))\n\n # Set allowed cidrs\n if hasattr(listener, 'allowed_cidrs') and listener.allowed_cidrs:\n cidrs = [c.cidr for c in listener.allowed_cidrs]\n if '0.0.0.0/0' not in cidrs:\n # 0.0.0.0/0 - means all sources are allowed, no filtering needed\n entities.append((get_data_group_name(listener.id), as3.Data_Group(cidrs)))\n service_args['iRules'].append(as3.BigIP(CONF.f5_agent.irule_allowed_cidrs))\n\n # maximum number of connections\n if listener.connection_limit > 0:\n service_args['maxConnections'] = listener.connection_limit\n\n # Link default pool\n default_pool = None\n if listener.default_pool_id:\n default_pool = listener.default_pool\n\n if default_pool is not None and default_pool.provisioning_status != lib_consts.PENDING_DELETE:\n pool_name = m_pool.get_name(listener.default_pool_id)\n service_args['pool'] = pool_name\n\n # Add iRules only to Proxy Protocol pool, everything else is determined by listener type\n if default_pool.protocol == lib_consts.PROTOCOL_PROXY:\n name, irule = m_irule.get_proxy_irule()\n service_args['iRules'].append(name)\n entities.append((name, irule))\n\n # Pool member certificate handling (TLS backends)\n if default_pool.tls_enabled and listener.protocol in \\\n [lib_consts.PROTOCOL_PROXY, lib_consts.PROTOCOL_HTTP, lib_consts.PROTOCOL_TERMINATED_HTTPS]:\n client_cert = None\n trust_ca = None\n crl_file = None\n\n service_args['clientTLS'] = m_tls.get_pool_name(default_pool.id)\n certificates = cert_manager.get_certificates(default_pool)\n if len(certificates) == 1:\n cert = certificates.pop()\n entities.append((cert['id'], cert['as3']))\n client_cert = cert['id']\n\n if default_pool.ca_tls_certificate_id:\n trust_ca, secret = cert_manager.load_secret(\n project_id, default_pool.ca_tls_certificate_id)\n entities.append((trust_ca, m_cert.get_ca_bundle(\n secret, trust_ca, trust_ca)))\n\n if default_pool.crl_container_id:\n # TODO: CRL currently not supported\n pass\n\n entities.append((\n m_tls.get_pool_name(default_pool.id),\n m_tls.get_tls_client(\n default_pool,\n trust_ca=trust_ca,\n client_cert=client_cert,\n crl_file=crl_file\n )\n ))\n\n # Insert header iRules\n if service_args['_servicetype'] in f5_const.SERVICE_HTTP_TYPES:\n # HTTP profiles only\n for name, irule in m_irule.get_header_irules(listener.insert_headers):\n service_args['iRules'].append(name)\n entities.append((name, irule))\n\n # session persistence\n if listener.default_pool_id and listener.default_pool.session_persistence:\n persistence = listener.default_pool.session_persistence\n lb_algorithm = listener.default_pool.lb_algorithm\n\n if service_args['_servicetype'] in f5_const.SERVICE_HTTP_TYPES:\n # Add APP_COOKIE / HTTP_COOKIE persistence only in HTTP profiles\n if persistence.type == lib_consts.SESSION_PERSISTENCE_APP_COOKIE and persistence.cookie_name:\n # generate iRule for cookie_name\n escaped_cookie = persistence.cookie_name\n escaped_cookie.replace(\"\\\"\", \"\")\n irule_name, irule = m_irule.get_app_cookie_irule(escaped_cookie)\n entities.append((irule_name, irule))\n\n # add iRule to universal persistence profile\n name, obj_persist = m_persist.get_app_cookie(escaped_cookie)\n service_args['persistenceMethods'] = [as3.Pointer(name)]\n entities.append((name, obj_persist))\n if lb_algorithm == lib_consts.LB_ALGORITHM_SOURCE_IP:\n service_args['fallbackPersistenceMethod'] = 'source-address'\n\n elif persistence.type == lib_consts.SESSION_PERSISTENCE_HTTP_COOKIE:\n service_args['persistenceMethods'] = ['cookie']\n if lb_algorithm == lib_consts.LB_ALGORITHM_SOURCE_IP:\n service_args['fallbackPersistenceMethod'] = 'source-address'\n\n if persistence.type == lib_consts.LB_ALGORITHM_SOURCE_IP:\n if not persistence.persistence_timeout and not persistence.persistence_granularity:\n service_args['persistenceMethods'] = ['source-address']\n else:\n name, obj_persist = m_persist.get_source_ip(\n persistence.persistence_timeout,\n persistence.persistence_granularity\n )\n service_args['persistenceMethods'] = [as3.Pointer(name)]\n entities.append((name, obj_persist))\n\n # Map listener tags to ESDs\n for tag in listener.tags:\n\n # get ESD of same name\n esd = esd_repository.get_esd(tag)\n if esd is None:\n continue\n\n # enrich service with iRules and other things defined in ESD\n esd_entities = get_esd_entities(service_args['_servicetype'], esd)\n for entity_name in esd_entities:\n if entity_name == 'iRules':\n service_args['iRules'].extend(esd_entities['iRules'])\n else:\n service_args[entity_name] = esd_entities[entity_name]\n\n endpoint_policies = []\n # Map special L7policies to ESDs\n # TODO: Remove this as soon as all customers have migrated their scripts.\n # Triggering ESDs via L7policies is considered deprecated. Tags should be used instead. See the code above.\n for policy in listener.l7policies:\n # get ESD of same name\n esd = esd_repository.get_esd(policy.name)\n\n # Add ESD or regular endpoint policy\n if esd:\n # enrich service with iRules and other things defined in ESD\n esd_entities = get_esd_entities(service_args['_servicetype'], esd)\n for entity_name in esd_entities:\n if entity_name == 'iRules':\n service_args['iRules'].extend(esd_entities['iRules'])\n else:\n service_args[entity_name] = esd_entities[entity_name]\n elif policy.provisioning_status != lib_consts.PENDING_DELETE:\n endpoint_policies.append(policy)\n\n # UDP listener won't support policies\n if endpoint_policies and not service_args['_servicetype'] == f5_const.SERVICE_UDP:\n # add a regular endpoint policy\n policy_name = m_policy.get_wrapper_name(listener.id)\n\n # make endpoint policy object\n endpoint_policy = (policy_name, m_policy.get_endpoint_policy(endpoint_policies))\n entities.append(endpoint_policy)\n\n # reference endpoint policy object in service\n service_args['policyEndpoint'].append(policy_name)\n\n # Ensure no duplicate iRules\n service_args['iRules'] = list(set(service_args['iRules']))\n\n # fastL4 profile doesn't support iRules or custom TCP profiles,\n # fallback to TCP Service when iRules/Profiles detected\n if service_args['_servicetype'] == f5_const.SERVICE_L4 and (\n service_args['iRules'] or 'profileTCP' in service_args):\n service_args['_servicetype'] = f5_const.SERVICE_TCP\n\n # add default profiles to supported listeners\n if CONF.f5_agent.profile_http and service_args['_servicetype'] in f5_const.SERVICE_HTTP_TYPES:\n if 'profileHTTP' not in service_args:\n service_args['profileHTTP'] = as3.BigIP(CONF.f5_agent.profile_http)\n if CONF.f5_agent.profile_http2 and service_args['_servicetype'] in f5_const.SERVICE_HTTPS and is_http2(listener):\n if 'profileHTTP' not in service_args:\n LOG.error(\"Misconfiguration detected: listener %s should be configured with\"\n \" HTTP/2 profile but does not contain HTTP profile.\", listener.id)\n elif 'profileHTTP2' not in service_args:\n service_args['profileHTTP2'] = as3.BigIP(CONF.f5_agent.profile_http2)\n if CONF.f5_agent.profile_l4 and service_args['_servicetype'] == f5_const.SERVICE_L4:\n if 'profileL4' not in service_args:\n service_args['profileL4'] = as3.BigIP(CONF.f5_agent.profile_l4)\n if CONF.f5_agent.profile_tcp and service_args['_servicetype'] in f5_const.SERVICE_TCP_TYPES:\n if 'profileTCP' not in service_args:\n service_args['profileTCP'] = as3.BigIP(CONF.f5_agent.profile_tcp)\n if CONF.f5_agent.profile_udp and service_args['_servicetype'] == f5_const.SERVICE_UDP:\n if 'profileUDP' not in service_args:\n service_args['profileUDP'] = as3.BigIP(CONF.f5_agent.profile_udp)\n\n # Use the virtual-server address as SNAT address\n if f5_const.LISTENER_TAG_NO_SNAT in listener.tags:\n service_args['snat'] = 'none'\n elif CONF.f5_agent.snat_virtual:\n service_args['snat'] = 'self'\n\n # create service object and fill in additional fields\n service = as3.Service(**service_args)\n\n # add service to entities and return\n entities.append((get_name(listener.id), service))\n return entities" ]
[ "0.5707967", "0.5301975", "0.4999579", "0.49626032", "0.49626032", "0.49626032", "0.48981005", "0.48367912", "0.47430673", "0.47226316", "0.47226316", "0.47204754", "0.46784675", "0.46784675", "0.46152645", "0.45895073", "0.45876774", "0.4575969", "0.45758468", "0.45382637", "0.4520648", "0.45040134", "0.44585332", "0.443007", "0.4405849", "0.43879294", "0.43722618", "0.4348456", "0.4318365", "0.42498237" ]
0.75506186
0
Edit a listener as well as any resources below it.
def put(self, request, listener_id): kwargs = {'listener_id': listener_id} update_listener(request, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def edit(env, identifier, listener, **args):\n\n mgr = SoftLayer.LoadBalancerManager(env.client)\n uuid, _ = mgr.get_lbaas_uuid_id(identifier)\n\n new_listener = {\n 'listenerUuid': listener\n }\n\n arg_to_option = {\n 'frontprotocol': 'frontendProtocol',\n 'backprotocol': 'backendProtocol',\n 'frontport': 'frontendPort',\n 'backport': 'backendPort',\n 'method': 'loadBalancingMethod',\n 'connections': 'maxConn',\n 'sticky': 'sessionType',\n 'clienttimeout': 'clientTimeout',\n 'sslcert': 'tlsCertificateId'\n }\n\n for key, value in args.items():\n if value:\n new_listener[arg_to_option[key]] = value\n\n try:\n mgr.add_lb_listener(uuid, new_listener)\n click.secho(\"Success\", fg='green')\n except SoftLayerAPIError as exception:\n click.secho(f\"ERROR: {exception.faultString}\", fg='red')", "def AddListener(self, listener):\n pass", "def add_change_listener(self, listener: INotifiable):\n # Do nothing...", "def link_edit_callback(self):\n pass", "def on_edit(self, dataobj):", "def edit(self):\n\n pass", "def set_listener ( self, obj, name, remove ):\n if obj is not None:\n obj.on_trait_change( self._changed, name, remove = remove )\n if obj._is_list_trait( name ):\n obj.on_trait_change( self._changed, name + '_items',\n remove = remove )", "def set_listener(self, listener):\n\t\tth = current_thread()\n\n\t\t#print '>> SET listener on', th.name, listener\n\n\t\tth.listener = listener", "def process_IN_MODIFY(self, event):", "def _activated(self, listener):\n pass", "def update_listener(self, context, old_listener, listener):\n old_val, new_val = self.get_diff_of_dict(old_listener, listener)\n LOG.info(\"Received request 'Update Listener' for Listener:\"\n \"%(listener)s in LB:%(lb_id)s with new Param:\"\n \"%(new_val)s and old Param:%(old_val)s\",\n {'lb_id': listener['loadbalancer_id'],\n 'listener': listener['id'],\n 'old_val': old_val,\n 'new_val': new_val})\n arg_dict = {'context': context,\n lb_const.OLD_LISTENER: old_listener,\n lb_const.LISTENER: listener,\n }\n self._send_event(lb_const.EVENT_UPDATE_LISTENER_V2, arg_dict,\n serialize=True,\n binding_key=listener['loadbalancer_id'],\n key=listener['id'])", "def prep_listener(self):\n # Search for listener.\n listener = search.object_search(self.key+\"-listener\",\n typeclass=Listener)\n\n if listener:\n # Use an existing listener.\n listener = listener[0]\n listener.move_to(self.db.ev_location, quiet=True)\n self.db.listener = listener\n listener.db.bot = self\n else:\n # Create a new listener.\n listener = create.create_object(Listener, key=self.key+\"-listener\",\n location=self.db.ev_location)\n self.db.listener = listener\n listener.db.bot = self", "def edit(self, **kwargs):\n ...", "def edit_event_task(self):\n self.edit_event()", "def edit():", "def add_listener(self, listener):\r\n self.listeners.append(listener)", "def on_edit(self, event, text):\n return None", "async def _e_edit(self, ctx, name, *, text):\n event = self.database.get_guild_event(ctx.guild.id, name)\n if not event:\n await ctx.send(\"That event doesn't exist. Maybe you meant to `add` it instead?\")\n return\n event.name = name\n event.text = text\n self.database.save_item(event)\n await ctx.send(f\"Event {name} successfully edited\")", "def register_edit_view(self, blueprint):\n view = apply_decorators(self.edit_view, self.edit_decorators)\n blueprint.add_url_rule(\n self.edit_rule, self.edit_endpoint, view, methods=['GET', 'POST'])", "def register_on_edit(func, clazz):\n \n _on_edit[clazz] = func", "def addServiceListener(self, listener: ghidra.framework.plugintool.util.ServiceListener) -> None:\n ...", "def get_update_listener_flow(self):\n update_listener_flow = linear_flow.Flow(constants.UPDATE_LISTENER_FLOW)\n update_listener_flow.add(lifecycle_tasks.ListenersToErrorOnRevertTask(\n requires=[constants.LOADBALANCER, constants.LISTENERS]))\n #update_listener_flow.add(amphora_driver_tasks.ListenersUpdate(\n # requires=[constants.LOADBALANCER, constants.LISTENERS]))\n update_listener_flow.add(a10_database_tasks.GetVThunderByLoadBalancer(\n requires=constants.LOADBALANCER,\n provides=a10constants.VTHUNDER))\n update_listener_flow.add(handler_virtual_port.ListenersUpdate(\n requires=[constants.LOADBALANCER, constants.LISTENER, a10constants.VTHUNDER]))\n\n update_listener_flow.add(database_tasks.UpdateListenerInDB(\n requires=[constants.LISTENER, constants.UPDATE_DICT]))\n update_listener_flow.add(database_tasks.\n MarkLBAndListenersActiveInDB(\n requires=[constants.LOADBALANCER,\n constants.LISTENERS]))\n\n return update_listener_flow", "async def edit(self, *, name, roles: Optional[Any] = ..., reason: Optional[Any] = ...):\n ...", "def listenerModifiche(self):\n\n # solo se non è stata caricata una rete\n # ad ogni modifica effettuata \n if (self.ui.but_caricaRete.isEnabled() or self.__convalida == True) :\n # resetto la convalida della rete\n self.__convalida = False\n self.ui.but_convalida.setText(\"convalida\")\n # resetto la possibilità di effettuare il training\n self.__sequenza = False\n self.ui.but_salva.setEnabled(True)\n self.ui.but_salva.setText('salva')", "def listener(self, aaidee=0):\n return Listener(self._ptr, aaidee)", "def editability(self, editability):\n\n self._editability = editability", "def add_update_listener(self, listener: WorklistUpdateListener):\n self.__worklist_update_listeners.add(listener)", "def add_listener(cls, listener: ConfigUnitListener) -> None:\n cls.listener.append(listener)", "def add_listener(self, listener):\n self.listeners.append(listener)", "def register(self, listener):\n\t\t\t## This tests weather the listener has a 'refresh' method\t \n\t\tif not hasattr(listener,'refresh') or not hasattr(listener.refresh,'func_code'):\n\t\t\traise AttributeError(\"%s does not have a 'refresh' method.\\n Type help(Wiimote3DTracker.register) for more information on refresh methods.\"% listener.__class__)\n\t\t## Ok, so we have a refresh method (probably) but we need to make sure \n\t\t## it has the correct number of \targuments\n\t\telif listener.refresh.func_code.co_argcount == 3: \n\t\t\tif listener not in self.cartesianListeners:\n\t\t\t\tself.cartesianListeners += [listener]\n\t\telif listener.refresh.func_code.co_argcount == 5:\n\t\t\tif listener not in self.polarListeners:\n\t\t\t\tself.polarListeners += [listener]\n\t\telse:\n\t\t\tprint listener.refresh.func_code.co_argcount\n\t\t\traise AttributeError(\"%s does not have a valid 'refresh' method.\\n Type help(Wiimote3DTracker.register) for more information on refresh methods.\" % listener.__class__)" ]
[ "0.6823617", "0.600671", "0.577597", "0.5750334", "0.56320566", "0.5572041", "0.5471202", "0.5394818", "0.5374321", "0.5368463", "0.53638417", "0.53259104", "0.5251389", "0.522504", "0.5183015", "0.5153863", "0.51429737", "0.51391524", "0.5118935", "0.5095881", "0.50898105", "0.5065597", "0.5060478", "0.50471765", "0.50368315", "0.5011039", "0.49841738", "0.49745873", "0.49703857", "0.49539843" ]
0.62694657
1
Get a specific l7 policy. If the param 'includeChildResources' is passed in as a truthy value, the details of all resources that exist under the l7 policy will be returned along with the l7 policy details.
def get(self, request, l7_policy_id): conn = get_sdk_connection(request) l7_policy = conn.load_balancer.find_l7_policy(l7_policy_id) l7_policy = _get_sdk_object_dict(l7_policy) if request.GET.get('includeChildResources'): resources = {} if l7_policy.get('rules'): l7_rules_list = _sdk_object_to_list( conn.load_balancer.l7_rules(l7_policy_id)) l7_policy['rules'] = l7_rules_list resources['l7policy'] = l7_policy return resources else: return l7_policy
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get(self, request, l7_rule_id, l7_policy_id):\n conn = get_sdk_connection(request)\n l7_rule = conn.load_balancer.find_l7_rule(l7_rule_id, l7_policy_id)\n return _get_sdk_object_dict(l7_rule)", "def get(self, request, l7_policy_id):\n conn = get_sdk_connection(request)\n l7_rule_list = _sdk_object_to_list(conn.load_balancer.l7_rules(\n l7_policy_id))\n return {'items': l7_rule_list}", "def get(self, request):\n listener_id = request.GET.get('listenerId')\n conn = get_sdk_connection(request)\n l7_policy_list = _sdk_object_to_list(conn.load_balancer.l7_policies(\n listener_id=listener_id))\n return {'items': l7_policy_list}", "def update_l7_policy(request, **kwargs):\n data = request.DATA\n l7_policy_id = data['l7policy'].get('id')\n\n conn = get_sdk_connection(request)\n l7_policy = conn.load_balancer.update_l7_policy(\n action=data['l7policy']['action'],\n admin_state_up=data['l7policy'].get('admin_state_up'),\n description=data['l7policy'].get('description'),\n l7_policy=l7_policy_id,\n name=data['l7policy'].get('name'),\n position=data['l7policy'].get('position'),\n redirect_pool_id=data['l7policy'].get('redirect_pool_id'),\n redirect_url=data['l7policy'].get('redirect_url'),\n )\n\n return _get_sdk_object_dict(l7_policy)", "def get_policy(client, policy_name):\n response = client.describe_firewall_policy(\n FirewallPolicyName=policy_name,\n )\n return response", "def get_firewall_policy(expand: Optional[str] = None,\n firewall_policy_name: Optional[str] = None,\n resource_group_name: Optional[str] = None,\n opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetFirewallPolicyResult:\n __args__ = dict()\n __args__['expand'] = expand\n __args__['firewallPolicyName'] = firewall_policy_name\n __args__['resourceGroupName'] = resource_group_name\n opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)\n __ret__ = pulumi.runtime.invoke('azure-native:network/v20200401:getFirewallPolicy', __args__, opts=opts, typ=GetFirewallPolicyResult).value\n\n return AwaitableGetFirewallPolicyResult(\n base_policy=pulumi.get(__ret__, 'base_policy'),\n child_policies=pulumi.get(__ret__, 'child_policies'),\n etag=pulumi.get(__ret__, 'etag'),\n firewalls=pulumi.get(__ret__, 'firewalls'),\n id=pulumi.get(__ret__, 'id'),\n location=pulumi.get(__ret__, 'location'),\n name=pulumi.get(__ret__, 'name'),\n provisioning_state=pulumi.get(__ret__, 'provisioning_state'),\n rule_groups=pulumi.get(__ret__, 'rule_groups'),\n tags=pulumi.get(__ret__, 'tags'),\n threat_intel_mode=pulumi.get(__ret__, 'threat_intel_mode'),\n threat_intel_whitelist=pulumi.get(__ret__, 'threat_intel_whitelist'),\n type=pulumi.get(__ret__, 'type'))", "def GetPolicy(self, request, global_params=None):\n config = self.GetMethodConfig('GetPolicy')\n return self._RunMethod(\n config, request, global_params=global_params)", "def get_firewall_policy_output(expand: Optional[pulumi.Input[Optional[str]]] = None,\n firewall_policy_name: Optional[pulumi.Input[str]] = None,\n resource_group_name: Optional[pulumi.Input[str]] = None,\n opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetFirewallPolicyResult]:\n ...", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n custom_block_response_body: Optional[pulumi.Input[str]] = None,\n custom_block_response_status_code: Optional[pulumi.Input[int]] = None,\n custom_rules: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['FirewallPolicyCustomRuleArgs']]]]] = None,\n enabled: Optional[pulumi.Input[bool]] = None,\n frontend_endpoint_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n location: Optional[pulumi.Input[str]] = None,\n managed_rules: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['FirewallPolicyManagedRuleArgs']]]]] = None,\n mode: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n redirect_url: Optional[pulumi.Input[str]] = None,\n resource_group_name: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None) -> 'FirewallPolicy':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _FirewallPolicyState.__new__(_FirewallPolicyState)\n\n __props__.__dict__[\"custom_block_response_body\"] = custom_block_response_body\n __props__.__dict__[\"custom_block_response_status_code\"] = custom_block_response_status_code\n __props__.__dict__[\"custom_rules\"] = custom_rules\n __props__.__dict__[\"enabled\"] = enabled\n __props__.__dict__[\"frontend_endpoint_ids\"] = frontend_endpoint_ids\n __props__.__dict__[\"location\"] = location\n __props__.__dict__[\"managed_rules\"] = managed_rules\n __props__.__dict__[\"mode\"] = mode\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"redirect_url\"] = redirect_url\n __props__.__dict__[\"resource_group_name\"] = resource_group_name\n __props__.__dict__[\"tags\"] = tags\n return FirewallPolicy(resource_name, opts=opts, __props__=__props__)", "def get_policy(usage_id):\r\n return policy.get(policy_key(usage_id), {})", "def policy(self) -> HwPolicy:\n return self._policy", "def get_iam_policy(self, resource, fields=None, verb='getIamPolicy',\n include_body=True, resource_field='resource', **kwargs):\n arguments = {resource_field: resource,\n 'fields': fields}\n if include_body:\n arguments['body'] = {}\n if kwargs:\n arguments.update(kwargs)\n return self.execute_query(\n verb=verb,\n verb_arguments=arguments,\n )", "def get_policy(self, policy_id: PolicyID = DEFAULT_POLICY_ID) -> Policy:\n return self.workers.local_worker().get_policy(policy_id)", "def get_sp_policy(self, context, id):\n # handling policy method in RPC\n response = self.dns_manager.get_sp_policy(context, id)\n return response", "def read(self, policy_name):\n path = self.vault.normalize(\"/sys/policies/acl/\" + policy_name)\n address = self.vault.vault_adress + \"/v1\" + path\n logging.debug(\"Reading the policy: %s\", address)\n response = self.vault.requests_request(\n \"GET\", address, headers=self.vault.token_header\n )\n policy_details = response.json()[\"data\"][\"policy\"]\n return policy_details", "def base_policy(self) -> Optional['outputs.SubResourceResponse']:\n return pulumi.get(self, \"base_policy\")", "def get_workload_policy(self, workload_policy_id):\n url = \"get_workload_policy/%s\" % workload_policy_id\n resp, body = self.get(url)\n self.expected_success(200, resp.status)\n body = json.loads(body)\n return service_client.ResponseBody(resp, body[\"workload_policy\"])", "def get_policy(self):\n return self.agent.get_policy()", "def get_hardware_specific_load_balancing_policy(cls, vendor_id, product_id):\n output = cls.execute([\"-s\", \"-t\"])\n hardware_id = cls._get_hardware_id(vendor_id, product_id)\n return cls._extract_hardware_specific_load_balacing_policy(output, hardware_id)", "def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy:\n return response", "def policy_get(request, policy_id, **kwargs):\n policy = neutronclient(request).show_qos_policy(\n policy_id, **kwargs).get('policy')\n return QoSPolicy(policy)", "def post(self, request):\n kwargs = {'listener_id': request.DATA.get('parentResourceId')}\n return create_l7_policy(request, **kwargs)", "def create_l7_policy(request, **kwargs):\n data = request.DATA\n\n conn = get_sdk_connection(request)\n l7_policy = conn.load_balancer.create_l7_policy(\n action=data['l7policy']['action'],\n admin_state_up=data['l7policy'].get('admin_state_up'),\n description=data['l7policy'].get('description'),\n listener_id=kwargs['listener_id'],\n name=data['l7policy'].get('name'),\n position=data['l7policy'].get('position'),\n redirect_pool_id=data['l7policy'].get('redirect_pool_id'),\n redirect_url=data['l7policy'].get('redirect_url'),\n )\n\n return _get_sdk_object_dict(l7_policy)", "def policy(self) -> Optional[pulumi.Input['ServicePolicyArgs']]:\n return pulumi.get(self, \"policy\")", "def policy(self) -> Optional[pulumi.Input['ServicePolicyArgs']]:\n return pulumi.get(self, \"policy\")", "def get_policy(self):\n\n return", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n categories: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n description: Optional[pulumi.Input[str]] = None,\n display_name: Optional[pulumi.Input[str]] = None,\n implementation_effort: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n remediation_description: Optional[pulumi.Input[str]] = None,\n severity: Optional[pulumi.Input[str]] = None,\n threats: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n user_impact: Optional[pulumi.Input[str]] = None) -> 'AssessmentPolicy':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _AssessmentPolicyState.__new__(_AssessmentPolicyState)\n\n __props__.__dict__[\"categories\"] = categories\n __props__.__dict__[\"description\"] = description\n __props__.__dict__[\"display_name\"] = display_name\n __props__.__dict__[\"implementation_effort\"] = implementation_effort\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"remediation_description\"] = remediation_description\n __props__.__dict__[\"severity\"] = severity\n __props__.__dict__[\"threats\"] = threats\n __props__.__dict__[\"user_impact\"] = user_impact\n return AssessmentPolicy(resource_name, opts=opts, __props__=__props__)", "def get_policy_by_id(self, id):\n for service, policy_list in self.remote_store.get_policy_list().items():\n for policy in policy_list:\n if policy.id == id:\n return policy", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n policy: Optional[pulumi.Input[str]] = None,\n resource_arn: Optional[pulumi.Input[str]] = None) -> 'Policy':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _PolicyState.__new__(_PolicyState)\n\n __props__.__dict__[\"policy\"] = policy\n __props__.__dict__[\"resource_arn\"] = resource_arn\n return Policy(resource_name, opts=opts, __props__=__props__)", "def get_frontdoor_firewall_policy_output(name: Optional[pulumi.Input[str]] = None,\n resource_group_name: Optional[pulumi.Input[str]] = None,\n opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetFrontdoorFirewallPolicyResult]:\n ..." ]
[ "0.6267911", "0.57215685", "0.56794816", "0.5608188", "0.5565996", "0.54223526", "0.53737026", "0.5277922", "0.52610373", "0.5238874", "0.5175136", "0.5146227", "0.5116967", "0.51168495", "0.5055844", "0.50002635", "0.4989549", "0.49149385", "0.490399", "0.48882803", "0.48582157", "0.48247", "0.47861603", "0.4784108", "0.4784108", "0.47804815", "0.4767793", "0.47144425", "0.4708096", "0.4677453" ]
0.8088022
0
Edit a l7 policy as well as any resources below it.
def put(self, request, l7_policy_id): kwargs = {'l7_policy_id': l7_policy_id} update_l7_policy(request, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update_l7_policy(request, **kwargs):\n data = request.DATA\n l7_policy_id = data['l7policy'].get('id')\n\n conn = get_sdk_connection(request)\n l7_policy = conn.load_balancer.update_l7_policy(\n action=data['l7policy']['action'],\n admin_state_up=data['l7policy'].get('admin_state_up'),\n description=data['l7policy'].get('description'),\n l7_policy=l7_policy_id,\n name=data['l7policy'].get('name'),\n position=data['l7policy'].get('position'),\n redirect_pool_id=data['l7policy'].get('redirect_pool_id'),\n redirect_url=data['l7policy'].get('redirect_url'),\n )\n\n return _get_sdk_object_dict(l7_policy)", "def put(self, request, l7_rule_id, l7_policy_id):\n kwargs = {'l7_rule_id': l7_rule_id, 'l7_policy_id': l7_policy_id}\n update_l7_rule(request, **kwargs)", "def update_policy(self, *args, **kwargs):\r\n pass", "def update_policy(self):\n pass", "def update_Policy(self,inputpolicy):\n \n policyob = self.SD_Map.retrieve_ob(inputpolicy)\n policyob.values[-1] = self.PolicyDicts[inputpolicy][self.translate(self.policy_option_vars[inputpolicy].get(),\n input_language = self.language,\n output_language = 'english')]", "def put(self):\n coll_policy_id = views_helper.get_request_value(self.request, \"coll_policy_id\", \"BODY\")\n name = views_helper.get_request_value(self.request, \"coll_policy_name\", \"BODY\")\n command = views_helper.get_request_value(self.request, \"command\", \"BODY\")\n desc = views_helper.get_request_value(self.request, \"desc\", \"BODY\")\n ostype = views_helper.get_request_value(self.request, \"ostype\", \"BODY\")\n coll_policy_update_data = {\n 'name': name,\n 'cli_command': command,\n 'desc': desc,\n 'ostype': ostype\n }\n if len(CollPolicy.objects.filter(~Q(coll_policy_id=coll_policy_id), name=name)):\n data = {\n 'data': '',\n 'new_token': self.new_token,\n constants.STATUS: {\n constants.STATUS: constants.FALSE,\n constants.MSG_TYPE: 'NAME_DUPLICATE',\n constants.MESSAGE: constants.COLLECTION_POLICY_NAME_DUPLICATE\n }\n\n }\n return api_return(data=data)\n obj = CollPolicy.objects.get(coll_policy_id=coll_policy_id)\n serializer = CollPolicyEditSerializer(instance=obj, data=coll_policy_update_data)\n try:\n if serializer.is_valid():\n serializer.save()\n data = {\n 'data': serializer.data,\n 'new_token': self.new_token,\n constants.STATUS: {\n constants.STATUS: constants.TRUE,\n constants.MESSAGE: constants.SUCCESS\n }\n\n }\n return api_return(data=data)\n except Exception as e:\n if constants.DEBUG_FLAG:\n print traceback.format_exc(e)\n return exception_handler(e)", "def test_update_firewall_policy(self):\r\n resource = 'firewall_policy'\r\n cmd = firewallpolicy.UpdateFirewallPolicy(test_cli20.MyApp(sys.stdout),\r\n None)\r\n self._test_update_resource(resource, cmd, 'myid',\r\n ['myid', '--name', 'newname'],\r\n {'name': 'newname', })", "def update_policy(ranger_url, policy_id, policy_data, admin_username_password):\n\n url = format(\"{ranger_url}/service/public/v2/api/policy/{policy_id}\")\n\n base_64_string = base64.encodestring(admin_username_password).replace('\\n', '')\n\n request = urllib2.Request(url, json.dumps(policy_data))\n request.get_method = lambda: 'PUT'\n request.add_header('Content-Type', 'application/json')\n request.add_header('Accept', 'application/json')\n request.add_header('Authorization', format('Basic {base_64_string}'))\n\n try:\n result = openurl(request, timeout=20)\n response_code = result.getcode()\n if response_code == 200:\n Logger.info(format(\"Successfully updated policy in Ranger Admin\"))\n return response_code\n else:\n Logger.error(format(\"Unable to update policy in Ranger Admin\"))\n return None\n except urllib2.HTTPError as e:\n raise Fail(\"HTTPError while updating policy Reason = \" + str(e.code))\n except urllib2.URLError as e:\n raise Fail(\"URLError while updating policy. Reason = \" + str(e.reason))\n except TimeoutError:\n raise Fail(\"Connection timeout error while updating policy\")\n except Exception as err:\n raise Fail(format(\"Error while updating policy. Reason = {err}\"))", "def update_l7_rule(request, **kwargs):\n data = request.DATA\n l7_rule_id = data['l7rule'].get('id')\n\n conn = get_sdk_connection(request)\n l7_rule = conn.load_balancer.update_l7_rule(\n admin_state_up=data['l7rule'].get('admin_state_up'),\n compare_type=data['l7rule']['compare_type'],\n invert=data['l7rule'].get('invert'),\n key=data['l7rule'].get('key'),\n l7_policy=kwargs['l7_policy_id'],\n l7rule=l7_rule_id,\n type=data['l7rule']['type'],\n rule_value=data['l7rule']['rule_value'],\n )\n\n return _get_sdk_object_dict(l7_rule)", "def test_update_ikepolicy(self):\r\n resource = 'ikepolicy'\r\n cmd = ikepolicy.UpdateIKEPolicy(test_cli20.MyApp(sys.stdout), None)\r\n self._test_update_resource(resource, cmd, 'myid',\r\n ['myid', '--name', 'newname'],\r\n {'name': 'newname', })", "def UpdatePolicy(self, request, global_params=None):\n config = self.GetMethodConfig('UpdatePolicy')\n return self._RunMethod(\n config, request, global_params=global_params)", "def update_apic(self):\n return self.client.policy.update(policyList=self.policy_list.response)", "def modify_audit_policy(\n self,\n request: dds_20151201_models.ModifyAuditPolicyRequest,\n ) -> dds_20151201_models.ModifyAuditPolicyResponse:\n runtime = util_models.RuntimeOptions()\n return self.modify_audit_policy_with_options(request, runtime)", "def add_to_resource_policy(self, permission: aws_cdk.aws_iam.PolicyStatement) -> None:\n ...", "def policy_update_fn(self, data: Dict[str, Any], result: Dict[str, Any]) -> None:", "def rbac_policy_update(request, policy_id, **kwargs):\n body = {'rbac_policy': kwargs}\n rbac_policy = neutronclient(request).update_rbac_policy(\n policy_id, body=body).get('rbac_policy')\n return RBACPolicy(rbac_policy)", "def Update(self,\n fp_id=None,\n only_generate_request=False,\n firewall_policy=None,\n batch_mode=False):\n\n if batch_mode:\n requests = [\n self._MakeUpdateRequestTuple(\n fp_id=fp_id, firewall_policy=firewall_policy)\n ]\n if not only_generate_request:\n return self._compute_client.MakeRequests(requests)\n return requests\n\n op_res = self._service.Patch(\n self._MakeUpdateRequestTuple(\n fp_id=fp_id, firewall_policy=firewall_policy)[2])\n return self.WaitOperation(\n op_res, message='Updating the organization firewall policy.')", "def update_policy(policy_id):\n old_policy = PolicyService.get_policy_by_id(policy_id)\n if old_policy is None:\n abort(404)\n new_policy = PolicyService.update_policy_by_id(policy_id, json_to_policy(request.json))\n if new_policy is None:\n abort(406)\n return new_policy.__dict__", "def test_update_ipsecpolicy(self):\r\n resource = 'ipsecpolicy'\r\n cmd = ipsecpolicy.UpdateIPsecPolicy(test_cli20.MyApp(sys.stdout), None)\r\n self._test_update_resource(resource, cmd, 'myid',\r\n ['myid', '--name', 'newname'],\r\n {'name': 'newname', })", "def update_firewall_policy(self, firewall_policy, body=None):\r\n return self.put(self.firewall_policy_path % (firewall_policy),\r\n body=body)", "def put(self, consumer_key, rid):\n policy = Policy.query.filter(\n Policy.consumer_key == consumer_key,\n Policy.rid == rid\n ).first_or_404()\n\n payload = json.loads(request.data)\n if \"actions\" not in payload:\n abort(400, \"Missing required field: actions\")\n\n policy.actions = set(payload[\"actions\"])\n policy.save()\n return self.jsonify(self._serialize(policy), status_code=200)", "def post_access_control_list_update(self, resource_id, resource_dict):\n pass", "def apply_policy(self, policy):\n tenant_name = policy['tenant_name']\n fw_id = policy['fw_id']\n fw_name = policy['fw_name']\n LOG.debug(\"asa_apply_policy: tenant=%s fw_id=%s fw_name=%s\",\n tenant_name, fw_id, fw_name)\n cmds = [\"conf t\", \"changeto context \" + tenant_name]\n\n rule_dict = policy['rules']\n for rule_id in rule_dict:\n rule = rule_dict[rule_id]\n protocol = rule['protocol']\n name = rule['name']\n enabled = rule['enabled']\n dst_port = rule['destination_port']\n src_port = rule['source_port']\n\n if (rule['source_ip_address'] is not None):\n src_ip = IPNetwork(rule['source_ip_address'])\n else:\n src_ip = IPNetwork('0.0.0.0/0')\n\n if (rule['destination_ip_address'] is not None):\n dst_ip = IPNetwork(rule['destination_ip_address'])\n else:\n dst_ip = IPNetwork('0.0.0.0/0')\n\n if rule['action'] == 'allow':\n action = 'permit'\n else:\n action = 'deny'\n\n LOG.debug(\"rule[%s]: name=%s enabled=%s prot=%s dport=%s sport=%s \\\n dip=%s %s sip=%s %s action=%s\",\n rule_id, name, enabled, protocol, dst_port, src_port,\n dst_ip.network, dst_ip.netmask,\n src_ip.network, src_ip.netmask, action)\n\n acl = \"access-list \"\n acl = (acl + tenant_name + \" extended \" + action + \" \" +\n protocol + \" \")\n if (rule['source_ip_address'] is None):\n acl = acl + \"any \"\n else:\n acl = acl + str(src_ip.network) + \" \" + (\n str(src_ip.netmask) + \" \")\n if (src_port is not None):\n if (':' in src_port):\n range = src_port.replace(':', ' ')\n acl = acl + \"range \" + range + \" \"\n else:\n acl = acl + \"eq \" + src_port + \" \"\n if (rule['destination_ip_address'] is None):\n acl = acl + \"any \"\n else:\n acl = acl + str(dst_ip.network) + \" \" + \\\n str(dst_ip.netmask) + \" \"\n if (dst_port is not None):\n if (':' in dst_port):\n range = dst_port.replace(':', ' ')\n acl = acl + \"range \" + range + \" \"\n else:\n acl = acl + \"eq \" + dst_port + \" \"\n if (enabled is False):\n acl = acl + 'inactive'\n\n # remove the old ace for this rule\n if (rule_id in self.rule_tbl):\n cmds.append('no ' + self.rule_tbl[rule_id])\n\n self.rule_tbl[rule_id] = acl\n if tenant_name in self.tenant_rule:\n if rule_id not in self.tenant_rule[tenant_name]['rule_lst']:\n self.tenant_rule[tenant_name]['rule_lst'].append(rule_id)\n cmds.append(acl)\n cmds.append(\"access-group \" + tenant_name + \" global\")\n cmds.append(\"write memory\")\n\n LOG.debug(cmds)\n data = {\"commands\": cmds}\n return self.rest_send_cli(data)", "def _modify_schedule_policy_properties(self):\n request_json = {\n 'taskInfo':\n {\n 'taskOperation': 1,\n 'associations': self._associations,\n 'task': self._task_json,\n \"appGroup\":\n {\n \"appGroups\": self._app_groups if self._app_groups else [],\n },\n 'subTasks': self._subtasks\n }\n }\n\n flag, response = self._commcell_object._cvpysdk_object.make_request(\n 'PUT', self._MODIFY_SCHEDULE_POLICY, request_json\n )\n output = self._process_schedule_policy_update_response(flag, response)\n self.refresh()\n\n if output[0]:\n return\n\n o_str = 'Failed to update properties of Schedule Policy\\nError: \"{0}\"'\n raise SDKException('Schedules', '102', o_str.format(output[2]))", "def set_policy(self, name, policy):\n client = self.connect(VAULT_TOKEN)\n client.set_policy(name, policy)", "def set_workflow_policy(obj):\n product = 'CMFPlacefulWorkflow'\n obj.manage_addProduct[product].manage_addWorkflowPolicyConfig()\n pc = getattr(obj, WorkflowPolicyConfig_id)\n pc.setPolicyIn(policy='one-state')\n logger.info('Workflow changed for element %s' % obj.getId())", "def __editProjectPWL(self):\n pwl = e5App().getObject(\"Project\").getProjectDictionaries()[0]\n self.__editSpellingDictionary(pwl)", "def add_grading_policy(self, grading_policy):\r\n\r\n self.course.grading_policy = grading_policy\r\n store = editable_modulestore()\r\n store.update_item(self.course, '**replace_user**')\r\n self.refresh_course()", "def setPolicy(self, value):\n return self._set(policy=value)", "def update_admin_permission(self) -> None:\n session = self.appbuilder.get_session\n dag_resources = session.scalars(\n select(Resource).where(Resource.name.like(f\"{permissions.RESOURCE_DAG_PREFIX}%\"))\n )\n resource_ids = [resource.id for resource in dag_resources]\n\n perms = session.scalars(select(Permission).where(~Permission.resource_id.in_(resource_ids)))\n perms = [p for p in perms if p.action and p.resource]\n\n admin = self.find_role(\"Admin\")\n admin.permissions = list(set(admin.permissions) | set(perms))\n\n session.commit()" ]
[ "0.7356099", "0.6932371", "0.6876846", "0.6712923", "0.6301364", "0.6158353", "0.6146165", "0.6069311", "0.60549647", "0.6009541", "0.6009057", "0.59801036", "0.58342427", "0.5770972", "0.56784624", "0.56772274", "0.5657728", "0.5577772", "0.5536992", "0.5461024", "0.54436207", "0.54007393", "0.53937346", "0.53096354", "0.5300538", "0.5277811", "0.5240096", "0.5237016", "0.5208631", "0.5198914" ]
0.76575214
0
Delete a specific l7 policy.
def delete(self, request, l7_policy_id): conn = get_sdk_connection(request) retry_on_conflict( conn, conn.load_balancer.delete_l7_policy, l7_policy_id, load_balancer_getter=l7_policy_get_load_balancer_id, resource_id=l7_policy_id)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def policy_delete(request, policy_id):\n neutronclient(request).delete_qos_policy(policy_id)", "def delete(self, request, l7_rule_id, l7_policy_id):\n conn = get_sdk_connection(request)\n retry_on_conflict(\n conn, conn.load_balancer.delete_l7_rule,\n l7_rule_id, l7_policy_id,\n load_balancer_getter=l7_policy_get_load_balancer_id,\n resource_id=l7_policy_id)", "def delete_policy(self, policy_name):\r\n return self.connection.delete_lb_policy(self.name, policy_name)", "def delete(self, policy_name):\n path = self.vault.normalize(\"/sys/policies/acl/\" + policy_name)\n address = self.vault.vault_adress + \"/v1\" + path\n # Actually run vault\n logging.info(\"Deleting the policy: %s\", address)\n self.vault.requests_request(\"DELETE\", address, headers=self.vault.token_header)", "def delete_policy(policy_id):\n policy = PolicyService.get_policy_by_id(policy_id)\n if policy is None:\n abort(404)\n\n policy.delete()\n\n return {}", "def delete_firewall_policy(self, firewall_policy):\r\n return self.delete(self.firewall_policy_path % (firewall_policy))", "def delete_ikepolicy(self, ikepolicy):\r\n return self.delete(self.ikepolicy_path % (ikepolicy))", "def rbac_policy_delete(request, policy_id):\n neutronclient(request).delete_rbac_policy(policy_id)", "def delete_policy(self, policy_ref: str) -> None:\n self.batch_write(\n [self.batch_detach_policy(policy_ref, obj_ref) for obj_ref in self.list_policy_attachments(\n policy_ref,\n ConsistencyLevel=ConsistencyLevel.SERIALIZABLE.name)])\n self.batch_write(\n [self.batch_detach_object(parent_ref, link_name) for parent_ref, link_name in self.list_object_parents(\n policy_ref,\n ConsistencyLevel=ConsistencyLevel.SERIALIZABLE.name)])\n retry(**cd_read_retry_parameters)(cd_client.delete_object)(\n DirectoryArn=self._dir_arn,\n ObjectReference={'Selector': policy_ref})", "def qos_policy_group_delete(self, policy_group):\n return self.request( \"qos-policy-group-delete\", {\n 'policy_group': [ policy_group, 'policy-group', [ basestring, 'None' ], False ],\n }, {\n } )", "def policy_delete_all(session, domain, path=\"/\"):\n client = session.client('iam')\n resp = client.list_policies(Scope='Local', PathPrefix=path)\n\n prefix = domain.replace('.', '-')\n for policy in resp.get('Policies', []):\n if policy['PolicyName'].startswith(prefix):\n ARN = policy['Arn']\n if policy['AttachmentCount'] > 0:\n # cannot delete a policy if it is still in use\n attached = client.list_entities_for_policy(PolicyArn=ARN)\n for group in attached.get('PolicyGroups', []):\n client.detach_group_policy(GroupName=group['GroupName'], PolicyArn=ARN)\n for user in attached.get('PolicyUsers', []):\n client.detach_user_policy(UserName=user['UserName'], PolicyArn=ARN)\n for role in attached.get('PolicyRoles', []):\n client.detach_role_policy(RoleName=role['RoleName'], PolicyArn=ARN)\n client.delete_policy(PolicyArn=ARN)", "def test_delete_hyperflex_ext_fc_storage_policy(self):\n pass", "def test_delete_namespaced_policy(self):\n pass", "def delete(nitro, policypatset):\r\n __policypatset = NSPatset()\r\n __policypatset.set_name(policypatset.get_name())\r\n return __policypatset.delete_resource(nitro)", "def delete_metric_policy(ContainerName=None):\n pass", "def delete_lb_health_check_policy(self, policy_id): \n params = {'command':'deleteLBHealthCheckPolicy',\n 'id':policy_id} \n\n try:\n response = self.send_request(params)\n res = json.loads(response)\n clsk_job_id = res['deletelbhealthcheckpolicyresponse']['jobid']\n self.logger.debug('Start job - deleteLBHealthCheckPolicy: %s' % res)\n return clsk_job_id\n except KeyError as ex:\n raise ClskError('Error parsing json data: %s' % ex)\n except ApiError as ex:\n raise ClskError(ex)", "def delete_group_policy(self, group_name, policy_name):\r\n params = {'GroupName' : group_name,\r\n 'PolicyName' : policy_name}\r\n return self.get_response('DeleteGroupPolicy', params, verb='POST')", "def minimum_packet_rate_rule_delete(request, policy_id, rule_id):\n neutronclient(request).delete_minimum_packet_rate_rule(rule_id, policy_id)", "def remove_policy(self, sec, ptype, rule):\n line = self.convert_to_item(ptype, rule)\n\n _id = line['id']['S']\n\n self.dynamodb.delete_item(\n Key={\n 'id': {\n 'S': _id,\n }\n },\n TableName=self.table_name,\n )\n\n return True", "def delete(self, consumer_key, rid):\n policy = Policy.query.filter(\n Policy.consumer_key == consumer_key,\n Policy.rid == rid\n ).first_or_404()\n\n policy.remove()\n return '', 204", "def delete_policy(policystore_url, policy_credentials, verbose):\n\n if verbose:\n logging.info('Deleting policy')\n pprint.pprint(policy_credentials)\n\n delete_url = policystore_url + POLICYSTORE_PREFIX + 'DeleteEntitlementPolicy'\n\n r = requests.post(delete_url, headers=headers(), json=policy_credentials)\n if r.status_code != 200:\n logging.error(f'ERROR: Unexpected response: {r.status_code}')\n pprint.pprint(r.json())\n sys.exit('Failed to delete policy')\n\n logging.info('SUCCESS: Deleted policy')", "def post_network_policy_delete(self, resource_id, resource_dict):\n pass", "def Delete(self, fp_id=None, batch_mode=False, only_generate_request=False):\n\n if batch_mode:\n requests = [self._MakeDeleteRequestTuple(fp_id=fp_id)]\n if not only_generate_request:\n return self._compute_client.MakeRequests(requests)\n return requests\n\n op_res = self._service.Delete(self._MakeDeleteRequestTuple(fp_id=fp_id)[2])\n operation_poller = DeletePoller(self._service, self.ref)\n return self.WaitOperation(\n op_res,\n operation_poller=operation_poller,\n message='Deleting the organization firewall policy.')", "def test_delete_hyperflex_ucsm_config_policy(self):\n pass", "def test_delete_hyperflex_cluster_storage_policy(self):\n pass", "def test_delete_hyperflex_node_config_policy(self):\n pass", "def delete_user_policy(self, user_name, policy_name):\r\n params = {'UserName' : user_name,\r\n 'PolicyName' : policy_name}\r\n return self.get_response('DeleteUserPolicy', params, verb='POST')", "def test_delete_hyperflex_software_version_policy(self):\n pass", "def delete_bucket_policy(Bucket=None):\n pass", "def test_delete_hyperflex_proxy_setting_policy(self):\n pass" ]
[ "0.7155855", "0.7073564", "0.7066619", "0.69722295", "0.66296524", "0.659274", "0.6546766", "0.6512928", "0.64337", "0.62496746", "0.61917585", "0.61263865", "0.6108714", "0.60930246", "0.6092611", "0.60841596", "0.60771275", "0.6044096", "0.60357344", "0.60187685", "0.5967144", "0.5930788", "0.5904425", "0.5867378", "0.5855971", "0.58380675", "0.58272773", "0.58182204", "0.5817611", "0.5805786" ]
0.77358395
0
Create a new l7 rule. Creates a new l7 rule as well as other optional resources such as l7 rules.
def post(self, request, l7_policy_id): kwargs = {'l7_policy_id': l7_policy_id} return create_l7_rule(request, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_l7_rule(request, **kwargs):\n data = request.DATA\n\n conn = get_sdk_connection(request)\n l7_rule = conn.load_balancer.create_l7_rule(\n admin_state_up=data['l7rule'].get('admin_state_up'),\n compare_type=data['l7rule']['compare_type'],\n invert=data['l7rule'].get('invert'),\n key=data['l7rule'].get('key'),\n l7_policy=kwargs['l7_policy_id'],\n type=data['l7rule']['type'],\n rule_value=data['l7rule']['rule_value'],\n )\n\n return _get_sdk_object_dict(l7_rule)", "def create_l7_policy(request, **kwargs):\n data = request.DATA\n\n conn = get_sdk_connection(request)\n l7_policy = conn.load_balancer.create_l7_policy(\n action=data['l7policy']['action'],\n admin_state_up=data['l7policy'].get('admin_state_up'),\n description=data['l7policy'].get('description'),\n listener_id=kwargs['listener_id'],\n name=data['l7policy'].get('name'),\n position=data['l7policy'].get('position'),\n redirect_pool_id=data['l7policy'].get('redirect_pool_id'),\n redirect_url=data['l7policy'].get('redirect_url'),\n )\n\n return _get_sdk_object_dict(l7_policy)", "def update_l7_rule(request, **kwargs):\n data = request.DATA\n l7_rule_id = data['l7rule'].get('id')\n\n conn = get_sdk_connection(request)\n l7_rule = conn.load_balancer.update_l7_rule(\n admin_state_up=data['l7rule'].get('admin_state_up'),\n compare_type=data['l7rule']['compare_type'],\n invert=data['l7rule'].get('invert'),\n key=data['l7rule'].get('key'),\n l7_policy=kwargs['l7_policy_id'],\n l7rule=l7_rule_id,\n type=data['l7rule']['type'],\n rule_value=data['l7rule']['rule_value'],\n )\n\n return _get_sdk_object_dict(l7_rule)", "def create_firewall_rule(self, body=None):\r\n return self.post(self.firewall_rules_path, body=body)", "def __init__(self, rule):\n Rule.__init__(self)\n self.__rule = Optional(OneOrMore(rule))", "def _addrule(self, nonterm, program, params, info):\n rule = Rule(nonterm, program, params, info)\n\n if not nonterm in self.rules:\n self.rules[nonterm] = []\n \n self.rules[nonterm].append(rule)", "def create_firewall_rule(project):\n listed_rules = subprocess.check_output(\n ['gcloud', 'compute', 'firewall-rules', 'list',\n '--format', 'value(name)',\n '--filter', 'name=%s' % LEO_FIREWALL_RULE,\n '--project', project])\n if LEO_FIREWALL_RULE in listed_rules:\n return\n Print.GN('Creating firewall rule for Leonardo VM.')\n subprocess.check_call(\n ['gcloud', 'compute', 'firewall-rules', 'create',\n LEO_FIREWALL_RULE,\n '--allow', 'tcp:80,tcp:443',\n '--priority', '900',\n '--target-tags', LEO_FIREWALL_RULE,\n '--project', project])", "def createRule(self):\n res = True\n\n try:\n PATH = os.path.dirname(os.path.realpath(__file__))\n DATABASE = os.path.join(PATH, '..', 'db', 'store.db')\n conn = sqlite3.connect(DATABASE)\n c = conn.cursor()\n c.execute('INSERT OR IGNORE INTO PRICING_RULES VALUES (?, ?, ?, ?, ?, ?, ?)',\n (self.description, self.itemCode, self.minUnits, self.divisor, self.multiplier, self.discountPerc, self.extraData))\n conn.commit()\n except sqlite3.Error as e:\n print(\"An error occurred while creating rule <\" + self.description + \"> for <\" + self.itemCode + \">: \", e.args[0])\n res = False\n finally:\n c.close()\n conn.close()\n return res", "def GachaCraftNodeExcelAddGP1107(builder, GP1107):\n return AddGP1107(builder, GP1107)", "def __init__(self, *args):\n this = _libsbml.new_Rule(*args)\n try: self.this.append(this)\n except: self.this = this", "def update_l7_policy(request, **kwargs):\n data = request.DATA\n l7_policy_id = data['l7policy'].get('id')\n\n conn = get_sdk_connection(request)\n l7_policy = conn.load_balancer.update_l7_policy(\n action=data['l7policy']['action'],\n admin_state_up=data['l7policy'].get('admin_state_up'),\n description=data['l7policy'].get('description'),\n l7_policy=l7_policy_id,\n name=data['l7policy'].get('name'),\n position=data['l7policy'].get('position'),\n redirect_pool_id=data['l7policy'].get('redirect_pool_id'),\n redirect_url=data['l7policy'].get('redirect_url'),\n )\n\n return _get_sdk_object_dict(l7_policy)", "def CreateRule(self, request):\n try:\n params = request._serialize()\n headers = request.headers\n body = self.call(\"CreateRule\", params, headers=headers)\n response = json.loads(body)\n model = models.CreateRuleResponse()\n model._deserialize(response[\"Response\"])\n return model\n except Exception as e:\n if isinstance(e, TencentCloudSDKException):\n raise\n else:\n raise TencentCloudSDKException(type(e).__name__, str(e))", "def create_rule(self: object,\n body: dict,\n cs_username: str = None # pylint: disable=W0613 # cs_username is deprecated\n ) -> dict:\n # [POST] https://assets.falcon.crowdstrike.com/support/api/swagger.html#/custom-ioa/create-rule\n return process_service_request(\n calling_object=self,\n endpoints=Endpoints,\n operation_id=\"create_rule\",\n body=body\n )", "def __init__(__self__,\n resource_name: str,\n args: RuleArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def create_snat_rule(self, **attrs):\n return self._create(_snat.Rule, **attrs)", "def __init__(self, rule):\n Rule.__init__(self)\n self.__rule = rule", "def __init__(self, rule):\n Rule.__init__(self)\n self.__rule = rule", "def __init__(self, rule):\n Rule.__init__(self)\n self.__rule = rule", "def __init__(self, rule):\n Rule.__init__(self)\n self.__rule = rule", "def __init__(self, rule):\n Rule.__init__(self)\n self.__rule = rule", "def test_create_rule(self):\n pass", "def ParseLrules(raw_lrule_str, new_lsys):\r\n lrules = StrToLrules(raw_lrule_str)\r\n lrule_priority = 0\r\n for a_rule in lrules:\r\n Lrule.objects.create(lsys = new_lsys,\r\n str_in=a_rule[0],\r\n str_out=a_rule[1],\r\n rule_priority=lrule_priority)\r\n lrule_priority += 1", "def get(self, request, l7_rule_id, l7_policy_id):\n conn = get_sdk_connection(request)\n l7_rule = conn.load_balancer.find_l7_rule(l7_rule_id, l7_policy_id)\n return _get_sdk_object_dict(l7_rule)", "def EM7(Type=\"DFA\"):\n R41, R42, R43, R44, R45 = state('R41'), state('R42'), state('R43'), state('R44'), state('R45')\n for i in range(1, 7):\n R41.transit[str(i)] = R41\n R42.transit[str(i)] = R42\n R43.transit[str(i)] = R43\n R44.transit[str(i)] = R44\n R45.transit[str(i)] = R45\n R41.transit['4'] = R42\n R42.transit['4'] = R43\n R43.transit['4'] = R44\n R44.transit['4'] = R45\n R45.transit['4'] = R42\n if Type == \"pDFA\":\n R4 = pDFA('R4', list('123456'), [R41, R42, R43, R44, R45], R41, [R45])\n else:\n R4 = DFA('R4', list('123456'), [R41, R42, R43, R44, R45], R41, [R45])\n if (SIZEOF):\n EM_size[\"EM7\"] = asizeof.asizeof(R4)\n return R4", "def get(self, request, l7_policy_id):\n conn = get_sdk_connection(request)\n l7_rule_list = _sdk_object_to_list(conn.load_balancer.l7_rules(\n l7_policy_id))\n return {'items': l7_rule_list}", "def test_build_rule_book_from_local_yaml_file_works(self):\n rules_local_path = get_datafile_path(__file__,\n \t'fwd_test_rules_1.yaml')\n rules_engine = fre.ForwardingRuleRulesEngine(rules_file_path=rules_local_path)\n rules_engine.build_rule_book()\n self.assertEqual(4, len(rules_engine.rule_book.resource_rules_map))", "def new(ruletype, **kwargs):\n try:\n ruleclass = TYPE_MAP[ruletype]\n except KeyError:\n raise error.InvalidRule('Unrecognized rule type: %s' % ruletype)\n\n try:\n return ruleclass(**kwargs)\n except TypeError:\n log.error('BADNESS. ruletype: %s, data: %s', ruletype, kwargs)\n raise\n #raise error.InvalidRule(\n # '%s does not work that way.\\nDetails: %s.\\nData: %s' % (\n # ruletype, err, kwargs))", "def create_notification_rule(headers, user_id, payload):\n\n # Alter base_url's endpoint\n url = base_url + '/' + user_id + '/notification_rules'\n\n r = requests.post(url, headers=headers, data=json.dumps(payload))\n\n print 'Notification rule response code: ' + str(r.status_code)\n return", "def put(self, request, l7_rule_id, l7_policy_id):\n kwargs = {'l7_rule_id': l7_rule_id, 'l7_policy_id': l7_policy_id}\n update_l7_rule(request, **kwargs)", "def post(self, request):\n kwargs = {'listener_id': request.DATA.get('parentResourceId')}\n return create_l7_policy(request, **kwargs)" ]
[ "0.8147097", "0.63940793", "0.62494886", "0.56453776", "0.5420765", "0.5216677", "0.52078986", "0.51725924", "0.5129113", "0.50878996", "0.5026985", "0.5000985", "0.5000185", "0.49834046", "0.49765438", "0.49352294", "0.49352294", "0.49352294", "0.49352294", "0.49352294", "0.49257082", "0.49219874", "0.48673725", "0.48438427", "0.48338857", "0.4821885", "0.4817645", "0.48060852", "0.48029885", "0.47904572" ]
0.6611101
1
Get a specific l7 rule.
def get(self, request, l7_rule_id, l7_policy_id): conn = get_sdk_connection(request) l7_rule = conn.load_balancer.find_l7_rule(l7_rule_id, l7_policy_id) return _get_sdk_object_dict(l7_rule)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get(self, request, l7_policy_id):\n conn = get_sdk_connection(request)\n l7_rule_list = _sdk_object_to_list(conn.load_balancer.l7_rules(\n l7_policy_id))\n return {'items': l7_rule_list}", "def _get_rule(self, rule):\n for kbrule in self.rules:\n if rule == kbrule:\n return kbrule", "def _get_rule(self, rule):\n for kbrule in self.rules:\n if rule == kbrule:\n return kbrule", "def _get_rule(self, rule):\n for kbrule in self.rules:\n if rule == kbrule:\n return kbrule", "def create_l7_rule(request, **kwargs):\n data = request.DATA\n\n conn = get_sdk_connection(request)\n l7_rule = conn.load_balancer.create_l7_rule(\n admin_state_up=data['l7rule'].get('admin_state_up'),\n compare_type=data['l7rule']['compare_type'],\n invert=data['l7rule'].get('invert'),\n key=data['l7rule'].get('key'),\n l7_policy=kwargs['l7_policy_id'],\n type=data['l7rule']['type'],\n rule_value=data['l7rule']['rule_value'],\n )\n\n return _get_sdk_object_dict(l7_rule)", "def getRule(self, *args):\n return _libsbml.Model_getRule(self, *args)", "def update_l7_rule(request, **kwargs):\n data = request.DATA\n l7_rule_id = data['l7rule'].get('id')\n\n conn = get_sdk_connection(request)\n l7_rule = conn.load_balancer.update_l7_rule(\n admin_state_up=data['l7rule'].get('admin_state_up'),\n compare_type=data['l7rule']['compare_type'],\n invert=data['l7rule'].get('invert'),\n key=data['l7rule'].get('key'),\n l7_policy=kwargs['l7_policy_id'],\n l7rule=l7_rule_id,\n type=data['l7rule']['type'],\n rule_value=data['l7rule']['rule_value'],\n )\n\n return _get_sdk_object_dict(l7_rule)", "def find(self, rule_name):\n return self.rules[rule_name]", "def get_rule(self, rule_name):\n assert rule_name in self.rules.keys(), 'Rule name not in current set of rules'\n return self.rules[rule_name]", "def get_rule(self):\n\n return self.__rule_", "def get_snat_rule(self, snatrule):\n return self._get(_snat.Rule, snatrule)", "def get_rule(self, name):\n if not self._rules:\n raise NoRulesException()\n if not name in self._rules:\n raise UnknownRuleException(name)\n return self._rules[name]", "def get_rule(rule_id):\n\n rule = get_db().execute('SELECT i.*, c.name as category_name FROM ruleset i JOIN categories c ON i.category_id = c.id WHERE i.id = ?', (rule_id, )).fetchone()\n\n return rule", "def get_rule(self, name):\n\n return self._control_manager.get_rule(name)", "def getRuleByVariable(self, *args):\n return _libsbml.Model_getRuleByVariable(self, *args)", "def rule(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"rule\")", "def rule(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"rule\")", "def rule(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"rule\")", "def rule(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"rule\")", "def rule(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"rule\")", "def rule(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"rule\")", "def rule(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"rule\")", "def post(self, request, l7_policy_id):\n kwargs = {'l7_policy_id': l7_policy_id}\n return create_l7_rule(request, **kwargs)", "def rule(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"rule\")", "def rule(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"rule\")", "def rule(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"rule\")", "def rules_file_71(self):\n return self.system_path(self._rules_file_71)", "def find(self, rules):\n for rule in rules:\n if self == rule:\n return rule\n return None", "def get_rule(self):\n return self.rule.state_dict()", "def get_rule(self, subverbify, short_name):\n try:\n rules = self._cf.get(subverbify._id36)\n except tdb_cassandra.NotFoundException:\n return None\n rule = rules.get(short_name, None)\n if not rule:\n return None\n rule = json.loads(rule)\n rule[\"short_name\"] = short_name\n return rule" ]
[ "0.6342875", "0.6327162", "0.6327162", "0.6327162", "0.626276", "0.60649276", "0.6019014", "0.586598", "0.57333195", "0.5679398", "0.5677424", "0.5655824", "0.5578348", "0.54845756", "0.5439656", "0.54331386", "0.54331386", "0.54331386", "0.54331386", "0.54331386", "0.54331386", "0.54331386", "0.54146147", "0.5374574", "0.5374574", "0.5374574", "0.53402567", "0.53252095", "0.523149", "0.5230738" ]
0.6893091
0
Edit a specific l7 rule.
def put(self, request, l7_rule_id, l7_policy_id): kwargs = {'l7_rule_id': l7_rule_id, 'l7_policy_id': l7_policy_id} update_l7_rule(request, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update_l7_rule(request, **kwargs):\n data = request.DATA\n l7_rule_id = data['l7rule'].get('id')\n\n conn = get_sdk_connection(request)\n l7_rule = conn.load_balancer.update_l7_rule(\n admin_state_up=data['l7rule'].get('admin_state_up'),\n compare_type=data['l7rule']['compare_type'],\n invert=data['l7rule'].get('invert'),\n key=data['l7rule'].get('key'),\n l7_policy=kwargs['l7_policy_id'],\n l7rule=l7_rule_id,\n type=data['l7rule']['type'],\n rule_value=data['l7rule']['rule_value'],\n )\n\n return _get_sdk_object_dict(l7_rule)", "def edit_rule(self, value, new=False):\n\n if value >= 0 or new:\n if new:\n name = None\n rule = {}\n else:\n name = self.keys[value]\n rule = self.rules[value]\n text = '\"\"\"\\nIf you don\\'t need a setting, just leave it as None.\\n'\n text += 'When the rule is parsed, the default will be used.\\n'\n text += 'Each variable is evaluated separately, so you cannot substitute variables '\n text += 'in other variables.\\n\"\"\"\\n'\n text += '\\n# name (str): Rule name. Required.\\n'\n text += self.format_string('name', name)\n text += '\\n# find (str): Regular expression pattern or literal string.\\n'\n text += '# Use (?i) for case insensitive. Use (?s) for dotall.\\n'\n text += '# See https://docs.python.org/3.4/library/re.html for more info on regex flags.\\n'\n text += '# Required unless \"scope\" is defined.\\n'\n text += self.format_regex_string('find', rule.get('find'))\n text += '\\n# replace (str - default=r\\'\\\\g<0>\\'): Replace pattern.\\n'\n text += self.format_regex_string('replace', rule.get('replace'))\n text += '\\n# literal (bool - default=False): Preform a non-regex, literal search and replace.\\n'\n text += self.format_bool('literal', rule.get('literal'))\n text += '\\n# literal_ignorecase (bool - default=False): Ignore case when \"literal\" is true.\\n'\n text += self.format_bool('literal_ignorecase', rule.get('literal_ignorecase'))\n text += '\\n# scope (str): Scope to search for and to apply optional regex to.\\n'\n text += '# Required unless \"find\" is defined.\\n'\n text += self.format_string('scope', rule.get('scope'))\n text += '\\n# scope_filter ([str] - default=[]): An array of scope qualifiers for the match.\\n'\n text += '# Only used when \"scope\" is not defined.\\n'\n text += '#\\n'\n text += '# - Any instance of scope qualifies match: scope.name\\n'\n text += '# - Entire match of scope qualifies match: !scope.name\\n'\n text += '# - Any instance of scope disqualifies match: -scope.name\\n'\n text += '# - Entire match of scope disqualifies match: -!scope.name\\n'\n text += self.format_array('scope_filter', rule.get('scope_filter'))\n text += '\\n# greedy (bool - default=True): Apply action to all instances (find all).\\n'\n text += '# Used when \"find\" is defined.\\n'\n text += self.format_bool('greedy', rule.get('greedy'))\n text += '\\n# greedy_scope (bool - default=True): Find all the scopes specified by \"scope.\"\\n'\n text += self.format_bool('greedy_scope', rule.get('greedy_scope'))\n text += '\\n# format_replace (bool - default=False): Use format string style replace templates.\\n'\n text += '# Works only for Regex (with and without Backrefs) and Re (with Backrefs).\\n'\n text += '# See https://facelessuser.github.io/backrefs/usage/#format-replacements for more info.\\n'\n text += self.format_bool('format_replace', rule.get('format_replace'))\n text += '\\n# selection_inputs (bool -default=False): Use selection for inputs into find pattern.\\n'\n text += '# Global setting \"selection_only\" must be disabled for this to work.\\n'\n text += self.format_bool('selection_inputs', rule.get('selection_inputs'))\n text += '\\n# multi_pass (bool - default=False): Perform multiple sweeps on the scope region to find\\n'\n text += '# and replace all instances of the regex when regex cannot be formatted to find\\n'\n text += '# all instances. Since a replace can change a scope, this can be useful.\\n'\n text += self.format_bool('multi_pass', rule.get('multi_pass'))\n text += '\\n# plugin (str): Define replace plugin for more advanced replace logic.\\n'\n text += self.format_string('plugin', rule.get('plugin'))\n text += '\\n# args (dict): Arguments for \\'plugin\\'.\\n'\n text += self.format_dict('args', rule.get('args'))\n text += '\\n# ----------------------------------------------------------------------------------------\\n'\n text += '# test: Here you can setup a test command. This is not saved and is just used for this session.\\n'\n text += '# - replacements ([str]): A list of regex rules to sequence together.\\n'\n text += '# - find_only (bool): Highlight current find results and prompt for action.\\n'\n text += '# - action (str): Apply the given action (fold|unfold|mark|unmark|select).\\n'\n text += '# This overrides the default replace action.\\n'\n text += '# - options (dict): optional parameters for actions (see documentation for more info).\\n'\n text += '# - key (str): Unique name for highlighted region.\\n'\n text += '# - scope (str - default=\"invalid\"): Scope name to use as the color.\\n'\n text += '# - style (str - default=\"outline\"): Highlight style (solid|underline|outline).\\n'\n text += '# - multi_pass (bool): Repeatedly sweep with sequence to find all instances.\\n'\n text += '# - no_selection (bool): Overrides the \"selection_only\" setting and forces no selections.\\n'\n text += '# - regex_full_file_with_selections (bool): Apply regex search to full file then apply\\n'\n text += '# action to results under selections.\\n'\n text += textwrap.dedent(\n \"\"\"\\\n test = {\n \"replacements\": [%s],\n \"find_only\": True,\n \"action\": None,\n \"options\": {},\n \"multi_pass\": False,\n \"no_selection\": False,\n \"regex_full_file_with_selections\": False\n }\n \"\"\" % (self.simple_format_string(name) if name is not None else '')\n )\n\n replace_view = self.window.create_output_panel('reg_replace')\n replace_view.run_command('reg_replace_panel_insert', {'text': text})\n for ext in ST_LANGUAGES:\n highlighter = sublime.load_settings(\n 'reg_replace.sublime-settings'\n ).get('python_highlighter', 'Python/Python')\n highlighter = 'Packages/' + highlighter + ext\n try:\n sublime.load_resource(highlighter)\n replace_view.set_syntax_file(highlighter)\n break\n except Exception:\n pass\n replace_view.settings().set('gutter', True)\n replace_view.settings().set('line_numbers', True)\n replace_view.settings().set('reg_replace.edit_view', True)\n replace_view.settings().set('bracket_highlighter.bracket_string_escape_mode', 'regex')\n replace_view.settings().set('regreplace.name', name)\n replace_view.sel().clear()\n replace_view.sel().add(sublime.Region(0, 0))\n self.window.run_command(\"show_panel\", {\"panel\": \"output.reg_replace\"})\n sublime.set_timeout(lambda w=self.window, v=replace_view: w.focus_view(v), 100)", "def edit_rules():\n my_rules = rules.get_all_rules()\n my_rules.append(DEFAULT_RULE)\n\n selected_rule_id = select(\n label=\"Existing rules\",\n options=[{\"label\": rule[\"name\"], \"value\": rule[\"id\"]} for rule in my_rules],\n )\n # Rules have unique IDs from the database:\n logging.info(f\"selected_rule: {selected_rule_id}\")\n use_rule = [r for r in my_rules if r[\"id\"] == int(selected_rule_id)][0]\n updated_rule = input_group(\n \"Rule editing\",\n [\n input(\n \"name\", type=TEXT, name=\"name\", value=use_rule[\"name\"], required=True\n ), # Need ttextarea(\n textarea(\n \"Rule names\",\n name=\"rule\",\n rows=10,\n code={\n \"mode\": \"python\", # code language\n \"theme\": \"darcula\", # Codemirror theme. Visit https://codemirror.net/demo/theme.html#cobalt to get more themes\n },\n value=f\"\"\"{use_rule['rule']}\\n\"\"\",\n ),\n actions(\n \"actions\",\n [\n # {\"label\": \"test\", \"value\": \"test\"},\n {\"label\": \"save\", \"value\": \"save\"},\n ],\n name=\"action\",\n help_text=\"Save\",\n ),\n ],\n )\n if updated_rule is not None:\n rl = dict(updated_rule)\n if rl[\"action\"] == \"save\":\n rule_info = rules.save_rule(\n rl[\"name\"], rl[\"rule\"], selected_rule_id\n )\n put_row(put_text(\"Rule\"))\n put_row(put_code(pprint.pformat(rule_info, indent=1)))\n # Use webhook_info's ID to add/update the extractor\n\n put_text(f\"The rule added is: {updated_rule}\")", "def update_l7_policy(request, **kwargs):\n data = request.DATA\n l7_policy_id = data['l7policy'].get('id')\n\n conn = get_sdk_connection(request)\n l7_policy = conn.load_balancer.update_l7_policy(\n action=data['l7policy']['action'],\n admin_state_up=data['l7policy'].get('admin_state_up'),\n description=data['l7policy'].get('description'),\n l7_policy=l7_policy_id,\n name=data['l7policy'].get('name'),\n position=data['l7policy'].get('position'),\n redirect_pool_id=data['l7policy'].get('redirect_pool_id'),\n redirect_url=data['l7policy'].get('redirect_url'),\n )\n\n return _get_sdk_object_dict(l7_policy)", "def update_rules():\n update_all_rules()\n return \"OK\"", "def test_edit_rule(self):\n pass", "def edit_rule(self, rule_number, rule):\n\n\t\tif self._mode == Mode.PassThrough:\n\t\t\traise ValueError(\"Can't edit rules while in passthrough mode\")\n\n\t\tif self._mode == Mode.BlackList:\n\t\t\tif len(self._blacklist_rules) - 1 < rule_number:\n\t\t\t\traise ValueError('Rule not found in rules list')\n\t\t\told_rule = self._blacklist_rules.pop(rule_number)\n\t\t\tself._blacklist_rules.append(rule)\n\t\t\tself._log.info('Replaced rule from the blacklist rules set: \\n old: %s\\n new: %s' % (old_rule, rule))\n\n\t\tif self._mode == Mode.WhiteList:\n\t\t\tif len(self._whitelist_rules) - 1 < rule_number:\n\t\t\t\traise ValueError('Rule not found in rules list')\n\t\t\told_rule = self._whitelist_rules.pop(rule_number)\n\t\t\tself._whitelist_rules.append(rule)\n\t\t\tself._log.info('Replaced rule from the whitelist rules set: \\n old: %s\\n new: %s' % (old_rule, rule))\n\n\t\tself._dump_configuration()\n\t\tself._remove_all_flow_records()\n\t\treturn old_rule", "def _UpdateAclRule(self, entry):\n\n print 'Update Acl rule: %s' % (entry.GetEditLink().href)\n roleValue = \"http://schemas.google.com/gCal/2005#%s\" % (\"read\")\n entry.role = gdata.acl.data.AclRole(value=roleValue)\n returned_rule = self.cal_client.Update(entry)", "def create_l7_rule(request, **kwargs):\n data = request.DATA\n\n conn = get_sdk_connection(request)\n l7_rule = conn.load_balancer.create_l7_rule(\n admin_state_up=data['l7rule'].get('admin_state_up'),\n compare_type=data['l7rule']['compare_type'],\n invert=data['l7rule'].get('invert'),\n key=data['l7rule'].get('key'),\n l7_policy=kwargs['l7_policy_id'],\n type=data['l7rule']['type'],\n rule_value=data['l7rule']['rule_value'],\n )\n\n return _get_sdk_object_dict(l7_rule)", "def put(self, request, l7_policy_id):\n kwargs = {'l7_policy_id': l7_policy_id}\n update_l7_policy(request, **kwargs)", "def post(self, request, l7_policy_id):\n kwargs = {'l7_policy_id': l7_policy_id}\n return create_l7_rule(request, **kwargs)", "def edit_ruleset(command):\n namespace = app.main(command)\n assert namespace.command == 'er' or namespace.command == \"editruleset\"\n assert namespace.name == \"test\"\n assert namespace.action in ['a','d']", "def update_firewall_rule(self, firewall_rule, body=None):\r\n return self.put(self.firewall_rule_path % (firewall_rule), body=body)", "def rewrite_lp(f_lp, statement):\n f_lp.write(statement.logic_program_form())", "def rewrite_lp(f_lp, statement):\r\n f_lp.write(statement.logic_program_form())", "def test_update_firewall_rule(self):\r\n resource = 'firewall_rule'\r\n cmd = firewallrule.UpdateFirewallRule(test_cli20.MyApp(sys.stdout),\r\n None)\r\n self._test_update_resource(resource, cmd, 'myid',\r\n ['myid', '--name', 'newname'],\r\n {'name': 'newname', })", "def on_set_rule(self) -> None:\r\n\r\n self.stop_animation()\r\n self.master.focus() # Move the cursor away from the rule entry\r\n rule_text = str(self.rule_entry.get())\r\n\r\n if not self.rule.try_set_rule(rule_text):\r\n messagebox.showinfo(message = self.INVALID_RULE_MESSAGE)\r\n return\r\n\r\n self.rule_name.configure(text = rule_text)\r\n\r\n self.board.birth_rule = self.rule.birth_rule\r\n self.board.remain_rule = self.rule.remain_rule\r\n self.anim_board.birth_rule = self.rule.birth_rule\r\n self.anim_board.remain_rule = self.rule.remain_rule", "def addRule(self, ruleLine):\n cols = ruleLine.split(' ')\n positionNumber = int(cols[0])\n self._rules[positionNumber] = {}\n for i in range(1, len(cols)):\n self._rules[positionNumber][cols[i].upper()] = 1", "def set_rule(self, rule):\n self.rule.load_state_dict(rule, strict=True)", "def edit_standard_fwl_rules(self, firewall_id, rules):\r\n rule_svc = self.client['Network_Firewall_Update_Request']\r\n template = {\r\n \"networkComponentFirewallId\": firewall_id,\r\n \"rules\": rules}\r\n\r\n return rule_svc.createObject(template)", "def test_update_rule(self):\n pass", "def set(self, subrule):\n self.__rule = subrule", "def __editProjectPWL(self):\n pwl = e5App().getObject(\"Project\").getProjectDictionaries()[0]\n self.__editSpellingDictionary(pwl)", "def put(self, request, *args, **kwargs):\n try:\n new_rule = json.loads(request.body)\n except Exception as e:\n return error('unable to marshal json', str(e))\n try:\n validate_rule_json(new_rule)\n except RuleValidationException as e:\n return error('error validating json', str(e))\n rule = Rule()\n rule.populate(new_rule)\n rule.save()\n return success(rule.summary())", "def add_rule(self, rule, on=None, off=None, strength=1.):\n\n self.x[on:off, :, get_rule_index(rule, self.config)] = strength", "def apply_ruleset(self, ruleset):\n updates = [self._get_lexicon_update(ruleset['lexicon'])]\n updates += ruleset['rules']\n self.apply_updates(updates)", "def edit():", "def __editProjectPEL(self):\n pel = e5App().getObject(\"Project\").getProjectDictionaries()[1]\n self.__editSpellingDictionary(pel)", "def add_rule(self, rule: interpreter.Rule) -> None:\n\n if rule.target not in self.rules:\n self.rules[rule.target] = rule\n else:\n self.rules[rule.target] |= rule", "def ModifyRuleAttribute(self, request):\n try:\n params = request._serialize()\n headers = request.headers\n body = self.call(\"ModifyRuleAttribute\", params, headers=headers)\n response = json.loads(body)\n model = models.ModifyRuleAttributeResponse()\n model._deserialize(response[\"Response\"])\n return model\n except Exception as e:\n if isinstance(e, TencentCloudSDKException):\n raise\n else:\n raise TencentCloudSDKException(type(e).__name__, str(e))" ]
[ "0.726384", "0.6229128", "0.5973603", "0.59081304", "0.57255226", "0.5619726", "0.56079495", "0.54919493", "0.54785186", "0.5470105", "0.5403452", "0.53685534", "0.5257742", "0.5240305", "0.52315074", "0.52089244", "0.5174361", "0.51728857", "0.5141247", "0.51000416", "0.50493824", "0.49998832", "0.49992436", "0.49586034", "0.4930078", "0.49287677", "0.48695585", "0.48458517", "0.479378", "0.47295776" ]
0.6922995
1
Delete a specific l7 rule.
def delete(self, request, l7_rule_id, l7_policy_id): conn = get_sdk_connection(request) retry_on_conflict( conn, conn.load_balancer.delete_l7_rule, l7_rule_id, l7_policy_id, load_balancer_getter=l7_policy_get_load_balancer_id, resource_id=l7_policy_id)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def delete_rule(self, index):\n del self.rules[index]", "def _delete_rule(cls, rule_suffix: str) -> None:\n delete_rule = cls._build_rule_string(IpTableCommandOption.DELETE, rule_suffix)\n log.info('Delete rule \"%s\"', delete_rule)\n utils.run_command(delete_rule, shell=True)", "def delete_rule(self, value):\n\n if value >= 0:\n if sublime.ok_cancel_dialog('Are you sure you want to delete the rule: \\'%s\\'?' % self.keys[value]):\n del self.regex_rules[self.keys[value]]\n sublime.load_settings('reg_replace_rules.sublime-settings').set('replacements', self.regex_rules)\n sublime.save_settings('reg_replace_rules.sublime-settings')", "def _DeleteAclRule(self, entry):\n\n self.cal_client.Delete(entry.GetEditLink().href)", "def delete_rule(rule, table=None):\n cmdline = [IPTABLES_PATH]\n if table:\n cmdline += [\"-t\", table]\n cmdline += [\"-D\"] + rule\n return call(cmdline)", "def delete(self, package=\"\", uid=\"\", params={}):\n return self.__post('delete-nat-rule', package, uid, params)", "def delete(self, db: Session) -> Optional[FidesopsBase]:\n _ = [rule.delete(db=db) for rule in self.rules]\n return super().delete(db=db)", "def DeleteRule(self, request):\n try:\n params = request._serialize()\n headers = request.headers\n body = self.call(\"DeleteRule\", params, headers=headers)\n response = json.loads(body)\n model = models.DeleteRuleResponse()\n model._deserialize(response[\"Response\"])\n return model\n except Exception as e:\n if isinstance(e, TencentCloudSDKException):\n raise\n else:\n raise TencentCloudSDKException(type(e).__name__, str(e))", "def delete_resolver_rule(ResolverRuleId=None):\n pass", "def delete(self, request, l7_policy_id):\n conn = get_sdk_connection(request)\n retry_on_conflict(\n conn, conn.load_balancer.delete_l7_policy,\n l7_policy_id,\n load_balancer_getter=l7_policy_get_load_balancer_id,\n resource_id=l7_policy_id)", "def delete_rule(self, ruleid):\n path = '%s/security-group-rules/%s' % (self.ver, ruleid)\n res = self.client.call(path, 'DELETE', data='', token=self.manager.identity.token)\n self.logger.debug('Delete openstack security group rule %s: %s' % \n (ruleid, truncate(res)))\n return res[0]", "def delete_metering_label_rule(self, rule):\r\n return self.delete(self.metering_label_rule_path % (rule))", "def delete_firewall_rule(self, firewall_rule):\r\n return self.delete(self.firewall_rule_path % (firewall_rule))", "def delete_rule(self, rule_name):\n assert rule_name in self.rules.keys(), 'Rule name not in current set of rules'\n\n del self.rules[rule_name]\n del self.rule_source[rule_name]\n del self.rule_str[rule_name]\n del self.rule_ft[rule_name]\n\n return True", "def test_delete_rule(self):\n pass", "def delete_snat_rule(self, rule, ignore_missing=True):\n return self._delete(_snat.Rule, rule, ignore_missing=ignore_missing)", "def delete_legislation(self, expr_uri):\n resp = self.session.delete(self.url + expr_uri, timeout=self.timeout)\n self.check_for_error(resp)", "def remove_ruleset(args, rulesengine_db):\n import os\n from src.praxxis.sqlite import sqlite_rulesengine\n from src.praxxis.rulesengine import rules\n\n if hasattr(args, \"name\"):\n name = args.name\n else:\n name = args\n\n name = rules.get_ruleset_by_ordinal(name, rulesengine_db)\n\n path = sqlite_rulesengine.get_ruleset_path(rulesengine_db, name)\n\n if os.path.isfile(path):\n os.remove(path)\n sqlite_rulesengine.remove_ruleset(rulesengine_db, name)\n else:\n from src.praxxis.util import error\n raise error.RulesetNotFoundError(name)\n\n return name", "def rule_delete(self, sgr_id):\n self.client.delete_security_group_rule(sgr_id)", "def delete_rule(uuid):\n with session_for_write() as session:\n stmt = (\n delete(\n model.RuleAction\n ).where(\n model.RuleAction.rule == uuid\n ).execution_options(synchronize_session=False)\n )\n session.execute(stmt)\n\n stmt = (\n delete(\n model.RuleCondition\n ).where(\n model.RuleCondition.rule == uuid\n ).execution_options(synchronize_session=False)\n )\n session.execute(stmt)\n\n stmt = (\n delete(\n model.Rule\n ).where(\n model.Rule.uuid == uuid\n ).execution_options(synchronize_session=False)\n )\n res = session.execute(stmt)\n if res.rowcount == 0:\n raise utils.RuleNotFoundError(uuid)", "def dscp_marking_rule_delete(request, policy_id, rule_id):\n\n neutronclient(request).delete_dscp_marking_rule(rule_id, policy_id)", "def removeRule(self, *args):\n return _libsbml.Model_removeRule(self, *args)", "def test_esg_firewall_rule_uninstall(self):\n self._common_uninstall_delete(\n 'esg_id|id', esg_firewall.delete,\n {'rule': {\n 'esg_id': 'esg_id'\n }},\n ['firewallRule'], {\n 'uri_parameters': {'edgeId': 'esg_id', 'ruleId': 'id'}\n },\n additional_params=['rule_id']\n )", "def delete_rule(self, id: str, rule_id: str) -> dict:\n r = requests.delete(self.url + '/{}/rules/{}'.format(id, rule_id), headers=self.headers)\n\n return r.json()", "def remove_rule(self, chain, rule, wrap=True, top=False):\n try:\n self.rules.remove(IptablesRule(chain, rule, wrap, top))\n if not wrap:\n self.remove_rules.append(IptablesRule(chain, rule, wrap, top))\n self.dirty = True\n except ValueError:\n pass", "def _remove_rule(cls, rule_suffix: str) -> None:\n if cls._does_rule_exist(rule_suffix):\n cls._delete_rule(rule_suffix)", "def delete(self,\n section_id,\n rule_id,\n ):\n return self._invoke('delete',\n {\n 'section_id': section_id,\n 'rule_id': rule_id,\n })", "def remove_random_rule(self):\n\n\t\ta = self.get_random_cell()\n\t\ta.remove_ProductRule(a.get_random_rule())", "def delete_acl_rule(self, sgr):\n self.security_group_driver.delete_acl_rule(sgr)", "def update_l7_rule(request, **kwargs):\n data = request.DATA\n l7_rule_id = data['l7rule'].get('id')\n\n conn = get_sdk_connection(request)\n l7_rule = conn.load_balancer.update_l7_rule(\n admin_state_up=data['l7rule'].get('admin_state_up'),\n compare_type=data['l7rule']['compare_type'],\n invert=data['l7rule'].get('invert'),\n key=data['l7rule'].get('key'),\n l7_policy=kwargs['l7_policy_id'],\n l7rule=l7_rule_id,\n type=data['l7rule']['type'],\n rule_value=data['l7rule']['rule_value'],\n )\n\n return _get_sdk_object_dict(l7_rule)" ]
[ "0.66611654", "0.6327563", "0.6271361", "0.6203873", "0.6158667", "0.6153702", "0.6061404", "0.6053456", "0.59814006", "0.59674776", "0.59412354", "0.5924343", "0.59201545", "0.5894565", "0.5891388", "0.58845353", "0.58808327", "0.58054215", "0.57890576", "0.5766243", "0.57537395", "0.57454777", "0.5741171", "0.5735523", "0.5721931", "0.57141745", "0.57120925", "0.5673118", "0.56703043", "0.56467795" ]
0.7085398
0
Get a specific pool. If the param 'includeChildResources' is passed in as a truthy value, the details of all resources that exist under the pool will be returned along with the pool details.
def get(self, request, pool_id): conn = get_sdk_connection(request) pool = conn.load_balancer.find_pool(pool_id) pool = _get_sdk_object_dict(pool) if request.GET.get('includeChildResources'): resources = {} resources['pool'] = pool if pool.get('members'): member_list = _sdk_object_to_list( conn.load_balancer.members(pool_id)) resources['members'] = member_list if pool.get('health_monitor_id'): monitor_id = pool['health_monitor_id'] monitor = conn.load_balancer.find_health_monitor( monitor_id) monitor = _get_sdk_object_dict(monitor) resources['monitor'] = monitor return resources else: return pool
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_pool(self, name, dc, cluster):\n cluster_obj = self.get_cluster(cluster, dc)\n for rp in cluster_obj.resourcePool.resourcePool:\n if rp.name == name:\n return rp", "def get_pool(self, pool_name=None, pool_id=None):\n\n id_or_name = pool_id if pool_id else pool_name\n errormsg = \"Failed to get the pool {0} with error {1}\"\n\n try:\n obj_pool = self.unity_conn.get_pool(name=pool_name, _id=pool_id)\n\n if pool_id and obj_pool.existed:\n LOG.info(\"Successfully got the pool object %s\",\n obj_pool)\n return obj_pool\n if pool_name:\n LOG.info(\"Successfully got pool %s\", obj_pool)\n return obj_pool\n else:\n msg = \"Failed to get the pool with {0}\".format(\n id_or_name)\n LOG.error(msg)\n self.module.fail_json(msg=msg)\n\n except Exception as e:\n msg = errormsg.format(id_or_name, str(e))\n LOG.error(msg)\n self.module.fail_json(msg=msg)", "def show_pool(self, pool, **_params):\r\n return self.get(self.pool_path % (pool), params=_params)", "def show_resource_pool(client, private_cloud, resource_pool, location):\n return client.get(location, private_cloud, resource_pool)", "def get_pool(self,\n instance_id: str,\n pool_id: str,\n *,\n x_correlation_id: str = None,\n **kwargs\n ) -> DetailedResponse:\n\n if instance_id is None:\n raise ValueError('instance_id must be provided')\n if pool_id is None:\n raise ValueError('pool_id must be provided')\n headers = {\n 'X-Correlation-ID': x_correlation_id\n }\n sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,\n service_version='V1',\n operation_id='get_pool')\n headers.update(sdk_headers)\n\n if 'headers' in kwargs:\n headers.update(kwargs.get('headers'))\n headers['Accept'] = 'application/json'\n\n url = '/instances/{0}/pools/{1}'.format(\n *self.encode_path_vars(instance_id, pool_id))\n request = self.prepare_request(method='GET',\n url=url,\n headers=headers)\n\n response = self.send(request)\n return response", "def _get_pool(name=None, session=None):\n if session is None:\n session = _get_session()\n pools = session.xenapi.pool.get_all()\n for pool in pools:\n pool_record = session.xenapi.pool.get_record(pool)\n if name in pool_record.get(\"name_label\"):\n return pool\n return None", "def get_pool ( self ):\n if self._poolstack:\n return self._poolstack[-1]\n else:\n return self.get_new_pool ( force=True )", "def resource_pool(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_pool\")", "def _get_pool_by_name(self, pool_name):\n pool_manager = PoolManager(organization_name=self._organization_name,\n project_name=self._project_name, creds=self._creds)\n pools = pool_manager.list_pools()\n return next((pool for pool in pools.value if pool.name == pool_name), None)", "def storage_pool_get(context, storage_pool_id):\n return _storage_pool_get(context, storage_pool_id)", "def get_pool():\n app = get_app()\n return app['pool']", "def get_pool(name):\n if name not in _CONNECTIONS:\n add_pool(name)\n return _CONNECTIONS[name]", "def _determine_resource_pool(session, vm_):\n resource_pool = \"\"\n if \"resource_pool\" in vm_.keys():\n resource_pool = _get_pool(vm_[\"resource_pool\"], session)\n else:\n pool = session.xenapi.pool.get_all()\n if not pool:\n resource_pool = None\n else:\n first_pool = session.xenapi.pool.get_all()[0]\n resource_pool = first_pool\n pool_record = session.xenapi.pool.get_record(resource_pool)\n log.debug(\"resource pool: %s\", pool_record[\"name_label\"])\n return resource_pool", "def get_cluster_pool_output(cluster_pool_name: Optional[pulumi.Input[str]] = None,\n resource_group_name: Optional[pulumi.Input[str]] = None,\n opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetClusterPoolResult]:\n ...", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n backup_pool: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n failover_ratio: Optional[pulumi.Input[float]] = None,\n health_checks: Optional[pulumi.Input[str]] = None,\n instances: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n name: Optional[pulumi.Input[str]] = None,\n project: Optional[pulumi.Input[str]] = None,\n region: Optional[pulumi.Input[str]] = None,\n self_link: Optional[pulumi.Input[str]] = None,\n session_affinity: Optional[pulumi.Input[str]] = None) -> 'TargetPool':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _TargetPoolState.__new__(_TargetPoolState)\n\n __props__.__dict__[\"backup_pool\"] = backup_pool\n __props__.__dict__[\"description\"] = description\n __props__.__dict__[\"failover_ratio\"] = failover_ratio\n __props__.__dict__[\"health_checks\"] = health_checks\n __props__.__dict__[\"instances\"] = instances\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"project\"] = project\n __props__.__dict__[\"region\"] = region\n __props__.__dict__[\"self_link\"] = self_link\n __props__.__dict__[\"session_affinity\"] = session_affinity\n return TargetPool(resource_name, opts=opts, __props__=__props__)", "def fusion_api_get_pool(self, uri=None, api=None, headers=None):\n return self.idpool.get(uri=uri, api=api, headers=headers)", "def get_connection_pool(self, params):\r\n cp_params = dict(params)\r\n cp_params.update(self.pool_cls_kwargs)\r\n return self.pool_cls(**cp_params)", "def get_cluster_pool(cluster_pool_name: Optional[str] = None,\n resource_group_name: Optional[str] = None,\n opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetClusterPoolResult:\n __args__ = dict()\n __args__['clusterPoolName'] = cluster_pool_name\n __args__['resourceGroupName'] = resource_group_name\n opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)\n __ret__ = pulumi.runtime.invoke('azure-native:hdinsight/v20230601preview:getClusterPool', __args__, opts=opts, typ=GetClusterPoolResult).value\n\n return AwaitableGetClusterPoolResult(\n aks_cluster_profile=pulumi.get(__ret__, 'aks_cluster_profile'),\n aks_managed_resource_group_name=pulumi.get(__ret__, 'aks_managed_resource_group_name'),\n cluster_pool_profile=pulumi.get(__ret__, 'cluster_pool_profile'),\n compute_profile=pulumi.get(__ret__, 'compute_profile'),\n deployment_id=pulumi.get(__ret__, 'deployment_id'),\n id=pulumi.get(__ret__, 'id'),\n location=pulumi.get(__ret__, 'location'),\n log_analytics_profile=pulumi.get(__ret__, 'log_analytics_profile'),\n managed_resource_group_name=pulumi.get(__ret__, 'managed_resource_group_name'),\n name=pulumi.get(__ret__, 'name'),\n network_profile=pulumi.get(__ret__, 'network_profile'),\n provisioning_state=pulumi.get(__ret__, 'provisioning_state'),\n status=pulumi.get(__ret__, 'status'),\n system_data=pulumi.get(__ret__, 'system_data'),\n tags=pulumi.get(__ret__, 'tags'),\n type=pulumi.get(__ret__, 'type'))", "def pool(self):\n return self._properties.get('pool')", "def pool(self) -> Pool:\n assert self._pool is not None\n return self._pool", "def _get_random_pool(pool_list):\n if not pool_list:\n return None\n if len(pool_list) == 1:\n return pool_list[0]\n\n last = len(pool_list) - 1\n index = random.randint(0, last)\n return pool_list[index]", "def resource_pool_id(self) -> str:\n return pulumi.get(self, \"resource_pool_id\")", "def get_pool(self):\n try:\n return self._pool\n except AttributeError:\n db_url = getattr(settings, self.name)\n self._pool = PostgresConnectionPool.for_url(db_url)\n return self._pool", "def get(self):\n\t\tif not self.available and not self.clean_one(destroy=False):\n\t\t\tif self.limit is not None and len(self.members) + self.creating >= self.limit:\n\t\t\t\traise PoolExhaustedException()\n\t\t\tself.create()\n\t\tassert self.available, \"Still no resources available after making one available\"\n\t\tresource = self.available[0]\n\t\tself.used.add(resource)\n\t\treturn self._wrap(resource)", "def get_default_resource_pool(self):\n try:\n return self.client.list_resource_pools()[0]['resource_pool']\n except VMwareError as e:\n raise VMwareBackendError(e)", "def get_by_url(self, url, pool_name=None):\n\t\tif not pool_name:\n\t\t\treturn self.pool[url]\n\t\treturn getattr(self, pool_name)[url]", "def get_device_pool(arn=None):\n pass", "def get_agent_pool_output(agent_pool_name: Optional[pulumi.Input[str]] = None,\n kubernetes_cluster_name: Optional[pulumi.Input[str]] = None,\n resource_group_name: Optional[pulumi.Input[str]] = None,\n opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetAgentPoolResult]:\n ...", "def get_agent_pool(agent_pool_name: Optional[str] = None,\n kubernetes_cluster_name: Optional[str] = None,\n resource_group_name: Optional[str] = None,\n opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetAgentPoolResult:\n __args__ = dict()\n __args__['agentPoolName'] = agent_pool_name\n __args__['kubernetesClusterName'] = kubernetes_cluster_name\n __args__['resourceGroupName'] = resource_group_name\n opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)\n __ret__ = pulumi.runtime.invoke('azure-native:networkcloud:getAgentPool', __args__, opts=opts, typ=GetAgentPoolResult).value\n\n return AwaitableGetAgentPoolResult(\n administrator_configuration=pulumi.get(__ret__, 'administrator_configuration'),\n agent_options=pulumi.get(__ret__, 'agent_options'),\n attached_network_configuration=pulumi.get(__ret__, 'attached_network_configuration'),\n availability_zones=pulumi.get(__ret__, 'availability_zones'),\n count=pulumi.get(__ret__, 'count'),\n detailed_status=pulumi.get(__ret__, 'detailed_status'),\n detailed_status_message=pulumi.get(__ret__, 'detailed_status_message'),\n extended_location=pulumi.get(__ret__, 'extended_location'),\n id=pulumi.get(__ret__, 'id'),\n kubernetes_version=pulumi.get(__ret__, 'kubernetes_version'),\n labels=pulumi.get(__ret__, 'labels'),\n location=pulumi.get(__ret__, 'location'),\n mode=pulumi.get(__ret__, 'mode'),\n name=pulumi.get(__ret__, 'name'),\n provisioning_state=pulumi.get(__ret__, 'provisioning_state'),\n system_data=pulumi.get(__ret__, 'system_data'),\n tags=pulumi.get(__ret__, 'tags'),\n taints=pulumi.get(__ret__, 'taints'),\n type=pulumi.get(__ret__, 'type'),\n upgrade_settings=pulumi.get(__ret__, 'upgrade_settings'),\n vm_sku_name=pulumi.get(__ret__, 'vm_sku_name'))", "def retrieve_pool_stats(self, pool, **_params):\r\n return self.get(self.pool_path_stats % (pool), params=_params)" ]
[ "0.6978908", "0.6633539", "0.6437803", "0.6430661", "0.636093", "0.6309993", "0.6266827", "0.60692585", "0.6041649", "0.60270584", "0.5997973", "0.59675074", "0.59316224", "0.58240104", "0.5701325", "0.56308126", "0.55841595", "0.5570789", "0.55605954", "0.55356663", "0.55098933", "0.54937935", "0.5470507", "0.545087", "0.54084647", "0.5332654", "0.531989", "0.527598", "0.5266754", "0.5241336" ]
0.7976712
0
Get a specific member belonging to a specific pool.
def get(self, request, member_id, pool_id): conn = get_sdk_connection(request) member = conn.load_balancer.find_member(member_id, pool_id) return _get_sdk_object_dict(member)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getMember(unique_name):", "def getMember(unique_name):", "def member(self, uid):\n try:\n member = self.search(uid=uid)[0]\n except IndexError:\n return None\n\n if self.objects:\n return member\n\n return member[1]", "def get_member(self, name):\n members = self.wls_board.get_members()\n for member in members:\n if name in member.full_name:\n return member\n return 'None'", "def getMember(self, *args):\n return _libsbml.Group_getMember(self, *args)", "def get_member(did):\n conn = create_connection(db_location)\n c = conn.cursor()\n c.execute(\"SELECT * FROM members WHERE member_uid = \" + did)\n member = dict((c.description[i][0], value) for i, value in enumerate(c.fetchone()))\n if __debug__:\n print(member)\n conn.commit()\n conn.close()\n return member", "def getMemberFromName(self, name):\n for member in self.playersAndRoles:\n if name in member.user.name:\n return member", "def get_member(self, member_id):\n\t\treturn Member(member_id, self.user_id, self.site_id)", "def find_member(message, nickname):\n for member in message.guild.members:\n if nickname in member.display_name:\n return member", "def _get(self,name):\n for node in self._members:\n if node.alias == name:\n return node", "def get_member(self, user):\n for player in self.members:\n if player.uuid == user.id:\n return player\n return None", "def getMember(self):\n pm = getToolByName(self, 'portal_membership', None)\n # stick to the CachingPolicyManager expression convention\n if not pm or pm.isAnonymousUser():\n return None \n else:\n return pm.getAuthenticatedMember()", "def get_vm_in_pool_by_name(self, name, dc, cluster, pool):\n vms = self.get_all_vms_in_pool(pool, dc, cluster)\n for vm in vms:\n if vm.name == name:\n return vm", "async def get_guild_member(guild_id, member_id):\n user_id = await token_check()\n await guild_check(user_id, guild_id)\n member = await app.storage.get_single_member(guild_id, member_id)\n return jsonify(member)", "def get_member(self, *args, **kwargs):\n return self.bot.get_chat_member(self.id, *args, **kwargs)", "def get_member_from_guild(guild_members, username):\n username = username.lower()\n if username == 'rand':\n return random.choice(guild_members)\n members = []\n for member in guild_members:\n lower_name = member.name.replace(' ', '').lower()\n if member.nick is not None:\n lower_nick = member.nick.replace(' ', '').lower()\n if username == lower_nick:\n return member\n if username in lower_nick:\n members.append(member)\n elif username == lower_name:\n return member\n elif username in lower_name:\n members.append(member)\n\n if not members:\n raise NameError(username)\n elif len(members) == 1:\n return members[0]\n else:\n raise AmbiguousInputError([member.name for member in members])", "def getMember(self, name):\r\n path = util.joinUri(self.path, name)\r\n\r\n return self.provider.getResourceInst(path, self.environ)", "def find_member(self, search_str: str) -> 'dt_member.Member':\n sp = search_str.rsplit(\"#\", 1)\n if len(sp) == 1:\n # Member name only :(\n predicate = lambda member: member.user.name == sp[0] or member.nickname == sp[0]\n else:\n # Discriminator too!\n # Don't check nicknames for this.\n predicate = lambda member: member.user.name == sp[0] \\\n and member.user.discriminator == sp[1]\n\n filtered = filter(predicate, self.members.values())\n return next(filtered, None)", "def _get_pool_by_name(self, pool_name):\n pool_manager = PoolManager(organization_name=self._organization_name,\n project_name=self._project_name, creds=self._creds)\n pools = pool_manager.list_pools()\n return next((pool for pool in pools.value if pool.name == pool_name), None)", "def get_pool(self, pool_name=None, pool_id=None):\n\n id_or_name = pool_id if pool_id else pool_name\n errormsg = \"Failed to get the pool {0} with error {1}\"\n\n try:\n obj_pool = self.unity_conn.get_pool(name=pool_name, _id=pool_id)\n\n if pool_id and obj_pool.existed:\n LOG.info(\"Successfully got the pool object %s\",\n obj_pool)\n return obj_pool\n if pool_name:\n LOG.info(\"Successfully got pool %s\", obj_pool)\n return obj_pool\n else:\n msg = \"Failed to get the pool with {0}\".format(\n id_or_name)\n LOG.error(msg)\n self.module.fail_json(msg=msg)\n\n except Exception as e:\n msg = errormsg.format(id_or_name, str(e))\n LOG.error(msg)\n self.module.fail_json(msg=msg)", "def _get_pool(name=None, session=None):\n if session is None:\n session = _get_session()\n pools = session.xenapi.pool.get_all()\n for pool in pools:\n pool_record = session.xenapi.pool.get_record(pool)\n if name in pool_record.get(\"name_label\"):\n return pool\n return None", "def member(self) -> object:\n return self._member", "def test_get_resource_group_member_by_moid(self):\n pass", "def get_member_from_database_id(self, database_id, cache=True):\n assert isinstance(database_id, (int, long)), type(database_id)\n assert isinstance(cache, bool), type(cache)\n if cache:\n try:\n return MemberFromDatabaseId(database_id)\n except LookupError:\n pass\n\n try:\n public_key, = next(self._database.execute(u\"SELECT public_key FROM member WHERE id = ?\", (database_id,)))\n except StopIteration:\n return None\n else:\n return MemberWithoutCheck(str(public_key))", "def member(self, user):\n return self.search(uid=user)[0][1]", "def show_member(self, member, **_params):\r\n return self.get(self.member_path % (member), params=_params)", "def get(user):\n if user:\n return Member.get_by_key_name(user.user_id())", "def get_member_from_group(member, group_name):\n query= \"SELECT * FROM groupmembers WHERE member='{}' AND group_id='{}'\".format(member, group_name)\n cur.execute(query)\n result = cur.fetchall()\n if len(result) > 1:\n return True\n return False", "def get_member(\n cls,\n value: str,\n ):\n\n if not value:\n return None\n\n members = [\n (member, member.value)\n for member in cls.__members__.values()\n ]\n for member, member_value in members:\n if member_value == value:\n return member\n\n return None", "def get_user(self, username: str) -> Optional[discord.Member]:\n for m in self.guild.members:\n if utils.istrcmp(m.display_name, username):\n return m\n return None" ]
[ "0.708236", "0.708236", "0.66537815", "0.6586724", "0.6484308", "0.64400035", "0.6400656", "0.6374163", "0.6358884", "0.6297418", "0.6275908", "0.6263685", "0.62490386", "0.6224761", "0.618702", "0.61346817", "0.6080308", "0.6012395", "0.59983313", "0.5996142", "0.59809816", "0.5974623", "0.59518033", "0.59371555", "0.5934343", "0.592597", "0.59259415", "0.5903227", "0.58459705", "0.5805694" ]
0.7321655
0
Delete a specific member belonging to a specific pool.
def delete(self, request, member_id, pool_id): conn = get_sdk_connection(request) retry_on_conflict( conn, conn.load_balancer.delete_member, member_id, pool_id, load_balancer_getter=pool_get_load_balancer_id, resource_id=pool_id)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def delete_member(self, context, member):\n LOG.info(\"Received request 'Delete Member' for Pool:\"\n \"%(pool_id)s \",\n {'pool_id': member['pool_id']})\n arg_dict = {'context': context,\n lb_const.MEMBER: member,\n }\n self._send_event(lb_const.EVENT_DELETE_MEMBER_V2, arg_dict,\n serialize=True,\n binding_key=member[lb_const.POOL]['loadbalancer_id'],\n key=member['id'])", "def delete_member(self, member):\r\n return self.delete(self.member_path % (member))", "def delete_entity(self, context, member):\n parent_pool_id = member.pool.id\n resource_path = \"%s/%s/%s/%s/%s\" % (RESOURCE_PREFIX,\n POOLS_RESOURCE,\n parent_pool_id,\n MEMBERS_RESOURCE,\n member.id)\n msg = _(\"NetScaler driver member removal: %s\") % member.id\n LOG.debug(msg)\n self.client.remove_resource(context.tenant_id, resource_path)", "def remove(self, member):\n with self.lock:\n try:\n self.pool.remove(member)\n except KeyError:\n pass", "def delete_member(net_id):\n connection = get_connection()\n cursor = connection.cursor()\n sql_string = \"DELETE FROM Member WHERE netID='\"+net_id+\"'\"\n cursor.execute(sql_string)\n connection.commit()", "def delete_member():\n client = RequestManager()\n client.set_method(\"DELETE\")\n member_id = STORED_ID[\"member_id\"]\n client.set_endpoint(\"/accounts/{0}/memberships/{1}\".format(CONFIG_DATA['account_id'], member_id))\n client.execute_request()", "def delete_pool(self, pool):\r\n return self.delete(self.pool_path % (pool))", "def remove_member(request, **kwargs):\n data = request.DATA\n loadbalancer_id = data.get('loadbalancer_id')\n pool_id = kwargs.get('pool_id')\n\n if kwargs.get('members_to_delete'):\n members_to_delete = kwargs['members_to_delete']\n member_id = members_to_delete.pop(0)\n\n conn = get_sdk_connection(request)\n conn.load_balancer.delete_member(member_id, pool_id,\n ignore_missing=True)\n\n args = (request, loadbalancer_id, update_member_list)\n kwargs = {'callback_kwargs': {\n 'existing_members': kwargs.get('existing_members'),\n 'members_to_add': kwargs.get('members_to_add'),\n 'members_to_delete': members_to_delete,\n 'pool_id': pool_id}}\n thread.start_new_thread(poll_loadbalancer_status, args, kwargs)", "async def _ad_remove(self, ctx, member):\n member_object = discord.utils.find(\n lambda x: x.name == member or str(x) == member or (member.isnumeric() and x.id == int(member)),\n ctx.guild.members\n )\n if member_object is not None:\n member = member_object.id\n elif member.isnumeric():\n member = int(member)\n\n admin = list(filter(lambda x: x.user_id == member, self.database.get_admins(ctx.guild.id)))\n if admin:\n self.database.remove_item(admin[0])\n if member_object:\n await ctx.send(f\"Removed admin from {member_object.name}\")\n else:\n await ctx.send(\"Removed admin from invalid user\")\n else:\n await ctx.send(\"That person isn't an admin!\")", "def delete(self):\n self._lbcall('delete_pool', [self._name])", "def delete_pool(self, context, pool):\n LOG.info(\"Received request 'Delete Pool' for Pool:%(pool_id)s \",\n {'pool_id': pool['id']})\n arg_dict = {'context': context,\n lb_const.POOL: pool,\n }\n self._send_event(lb_const.EVENT_DELETE_POOL_V2, arg_dict,\n serialize=True,\n binding_key=pool['loadbalancer_id'],\n key=pool['id'])", "def deletePool(self,ippool_name): \n self.__deletePoolCheckInput(ippool_name)\n ippool_obj=ippool_main.getLoader().getIPpoolByName(ippool_name)\n self.__deletePoolDB(ippool_obj.getIPpoolID())\n ippool_main.getLoader().unloadIPpoolByID(ippool_obj.getIPpoolID())", "def delete_pool(self, service, bigips):\n loadbalancer = service.get('loadbalancer')\n pool = self.service_adapter.get_pool(service)\n members = service.get('members', list())\n\n error = None\n for bigip in bigips:\n try:\n self.pool_helper.delete(bigip, name=pool[\"name\"],\n partition=pool[\"partition\"])\n except HTTPError as err:\n if err.response.status_code != 404:\n error = f5_ex.PoolDeleteException(err.message)\n LOG.error(\"Failed to remove pool %s from %s: %s\",\n pool['name'], bigip, error.message)\n except Exception as err:\n error = f5_ex.PoolDeleteException(err.message)\n LOG.error(\"Failed to remove pool %s from %s: %s\",\n pool['name'], bigip, error.message)\n\n for member in members:\n self._delete_member_node(loadbalancer, member, bigip)\n\n return error", "def delete_entity(self, context, pool):\n resource_path = \"%s/%s/%s\" % (RESOURCE_PREFIX, POOLS_RESOURCE,\n pool.id)\n msg = _(\"NetScaler driver pool removal: %s\") % pool.id\n LOG.debug(msg)\n self.client.remove_resource(context.tenant_id, resource_path)", "def remove_pool(ctx, pool_name):\n \n entryFound = False\n table = \"NAT_POOL\"\n key = pool_name\n\n if len(pool_name) > 32:\n ctx.fail(\"Invalid pool name. Maximum allowed pool name is 32 characters !!\")\n\n config_db = ConfigDBConnector()\n config_db.connect()\n\n data = config_db.get_entry(table, key)\n if not data:\n click.echo(\"Trying to delete pool, which is not present.\")\n entryFound = True\n\n binding_dict = config_db.get_table('NAT_BINDINGS')\n if binding_dict and entryFound == False: \n for binding_name, binding_values in binding_dict.items():\n if binding_values['nat_pool'] == pool_name:\n click.echo(\"Pool is not removed, as it is mapped to Binding {}, remove the pool binding first !!\".format(binding_name))\n entryFound = True\n break\n\n if entryFound == False:\n config_db.set_entry(table, key, None)", "def del_member(self, member, dn=False):\n\n if dn:\n if not self.check_member(member, dn=True):\n return\n mod = (ldap.MOD_DELETE, 'member', member.encode('ascii'))\n else:\n if not self.check_member(member):\n return\n mod = (ldap.MOD_DELETE, 'member', member.get_dn().encode('ascii'))\n\n if self.__lib__.__batch_mods__:\n self.__lib__.enqueue_mod(self.__dn__, mod)\n elif not self.__lib__.__ro__:\n mod_attrs = [mod]\n self.__con__.modify_s(self.__dn__, mod_attrs)\n else:\n print(\"DELETE VALUE member = {} FOR {}\".format(mod[2],\n self.__dn__))", "def subject_member_delete(context, memb_id, session=None):\n session = session or get_session()\n member_ref = _subject_member_get(context, memb_id, session)\n _subject_member_delete(context, member_ref, session)", "def remove_member(self, member_to_remove):\r\n self._members.remove(member_to_remove)", "async def on_member_remove(member):\r\n pass", "def csDeletePool(self,poolid,usedid):\n\n logger.debug(\"Attempting to delete pool.\")\n\n url = self.csurl + \"/polcentral/v1_0/pools/delete/\"+poolid\n payload = {\"requestorid\":usedid,\"disallowlostfound\":False}\n\n try:\n r = requests.delete(url, data=json.dumps(payload))\n except Exception:\n logger.error(\"Exception during api call to add pool.\")\n return 'Error'\n\n if r.status_code == 200:\n logger.debug(\"Pool with ID '\"+poolid+\"' was successfully deleted.\")\n return 'Success'\n else:\n logger.error(\"Pool with ID '\"+poolid+\"' was not deleted. Error code is \"+str(r.status_code)+\".\")\n return 'Error'", "def delete(self, request, pool_id):\n conn = get_sdk_connection(request)\n retry_on_conflict(\n conn, conn.load_balancer.delete_pool,\n pool_id,\n load_balancer_getter=pool_get_load_balancer_id,\n resource_id=pool_id)", "def mac_pool_remove(handle, name, parent_dn=\"org-root\"):\r\n dn = parent_dn + '/mac-pool-' + name\r\n mo = handle.query_dn(dn)\r\n if mo:\r\n handle.remove_mo(mo)\r\n handle.commit()\r\n else:\r\n raise ValueError(\"MAC Pool is not available\")", "def delete_pool(self, argu):\n\n if not argu:\n LOG.error(\"In delete_pool, it should not pass the None.\")\n\n # delete policy\n self._delete_policy(\n argu['listener_id'],\n argu['session_persistence_type'],\n argu['lb_algorithm']\n )\n\n cmd_apv_no_group = ADCDevice.no_group(argu['pool_id'])\n for base_rest_url in self.base_rest_urls:\n self.run_cli_extend(base_rest_url, cmd_apv_no_group)", "def remove_member(self, persona):\n if persona in self.members:\n self.members.remove(persona)", "def execute(self, pool, vthunder):\n try:\n axapi_version = acos_client.AXAPI_21 if vthunder.axapi_version == 21 else acos_client.AXAPI_30\n c = self.client_factory(vthunder)\n #need to put algorithm logic\n out = c.slb.service_group.delete(pool.id)\n LOG.info(\"Pool deleted successfully.\")\n except Exception as e:\n print(str(e))\n LOG.info(\"Error occurred\")", "def remove_member(self, request, pk):\n farm = self.get_object()\n user = request.data.get('user')\n farm.remove_member(user)\n return Response({}, status=status.HTTP_204_NO_CONTENT)", "def delete(self, *args, **kwargs):\n # Delete the User and UserProfile objects associated with the\n # Member.\n user_profile = self.userprofile\n user = user_profile.user\n user_profile.delete()\n user.delete()\n # Delete the member itself\n super(Member, self).delete(*args, **kwargs)", "async def on_member_remove(member: Member):\n await member_handler.member_left(member)", "def delete_group_member(self, group_id, member_id):\n url = self.groups_url + \"/%s/members/%s\" % (group_id, member_id)\n return requests.delete(url, headers=self.headers)", "def delete_user(group_id, member_id):\n query=\"DELETE FROM groupmembers WHERE group_id= '{}' AND member_id = {}\".format(group_id, member_id)\n cur.execute(query)\n return cur.fetchone()" ]
[ "0.8120982", "0.76730645", "0.7627825", "0.7410858", "0.7356614", "0.73075813", "0.71536845", "0.6852029", "0.68129563", "0.6670148", "0.6619707", "0.65564084", "0.65563416", "0.6519952", "0.645955", "0.64011204", "0.6315733", "0.62642276", "0.6250975", "0.62033415", "0.6201535", "0.61762", "0.61559355", "0.61541665", "0.6114055", "0.6041501", "0.603722", "0.5990337", "0.5988803", "0.5966294" ]
0.7731087
1
Edit a pool member.
def put(self, request, member_id, pool_id): data = request.DATA conn = get_sdk_connection(request) monitor_address = data.get('monitor_address') member = conn.load_balancer.update_member( member_id, pool_id, weight=data.get('weight'), monitor_address=monitor_address if monitor_address else None, monitor_port=data.get('monitor_port'), admin_state_up=data.get('admin_state_up'), backup=data.get('backup', False), name=data.get('name'), ) return _get_sdk_object_dict(member)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def put(self, request, pool_id):\n # Assemble the lists of member id's to add and remove, if any exist\n request_member_data = request.DATA.get('members', [])\n\n conn = get_sdk_connection(request)\n existing_members = _sdk_object_to_list(\n conn.load_balancer.members(pool_id))\n\n (members_to_add, members_to_delete) = get_members_to_add_remove(\n request_member_data, existing_members)\n\n if members_to_add or members_to_delete:\n kwargs = {'existing_members': existing_members,\n 'members_to_add': members_to_add,\n 'members_to_delete': members_to_delete,\n 'pool_id': pool_id}\n update_member_list(request, **kwargs)", "def put(self, request, pool_id):\n kwargs = {'pool_id': pool_id}\n update_pool(request, **kwargs)", "def do_edit(self, args):\n member = None\n rowid = args.split(' ')[0]\n \n # loop till we get a rowid which matches a member in the database\n while True:\n rowid = self.validateRowid(rowid)\n if rowid is None:\n rowid = input('Enter member id: ')\n continue\n \n member = self.roster.get(rowid)\n if member is None:\n print(\"No member with id of %d\" % rowid)\n # rowid will get validated again, but it's the same value\n # which already passed validation\n continue\n \n break\n \n print('Editing %s %s' % (member.first, member.last))\n print('Type new value, hit enter to keep current value, or enter spaces to clear a value')\n member.first = self.getNewValue('First name', member.first)\n member.last = self.getNewValue('Last name', member.last)\n member.introducedDate = self.getNewValue('introduced date', member.introducedDate) \n \n self.roster.update(member)", "def update_member(self, member, body=None):\r\n return self.put(self.member_path % (member), body=body)", "def update_pool(self, pool, body=None):\r\n return self.put(self.pool_path % (pool), body=body)", "def edit():", "def test_update_member(self):\r\n resource = 'member'\r\n cmd = member.UpdateMember(test_cli20.MyApp(sys.stdout), None)\r\n self._test_update_resource(resource, cmd, 'myid',\r\n ['myid', '--name', 'myname',\r\n '--tags', 'a', 'b'],\r\n {'name': 'myname', 'tags': ['a', 'b'], })", "def edit(self, **kwargs):\n ...", "def test_update_pool(self):\r\n resource = 'pool'\r\n cmd = pool.UpdatePool(test_cli20.MyApp(sys.stdout), None)\r\n self._test_update_resource(resource, cmd, 'myid',\r\n ['myid', '--name', 'newname'],\r\n {'name': 'newname', })", "def edit(self):\n\n pass", "def _update(self):\n path = \"/members/%s\" % self._dict['member_id']\n data = self.extract()\n if self._dict['member_status_id'] in (\n MemberStatus.Active, MemberStatus.Error, MemberStatus.OptOut):\n data['status_to'] = self._dict['member_status_id']\n if not self.account.adapter.put(path, data):\n raise ex.MemberUpdateError()", "async def edit(self, name=None):\n\t\tsanitized_name = utilities.sanitize_string(str(name))\n\n\t\tif sanitized_name in {'', None}:\n\t\t\traise exceptions.ClientError('INVALID_NAME')\n\n\t\tif sanitized_name == self.name:\n\t\t\traise exceptions.ClientError('INVALID_NAME')\n\n\t\tif not 0 < len(sanitized_name) < 32:\n\t\t\traise exceptions.ClientError('INVALID_NAME')\n\n\t\tself.name = sanitized_name\n\n\t\tif self.group != None:\n\t\t\tfor member in self.group.members:\n\t\t\t\tif member.name == sanitized_name and member.uid != self.uid:\n\t\t\t\t\traise exceptions.ClientError('TAKEN_NAME')\n\n\t\t\tawait self.group.update_user(self)", "async def edit(self, ctx, member: GeneralMember, index: int = None,\n action_type: str = None, *, reason: str = None):\n if not reason or not index or not action_type:\n await ctx.send(\n \"You need to supply the correct parameters <member, index (from 1), action_type, reason>, try again.\", # noqa\n delete_after=5)\n return\n action_type = action_type.upper()\n actions = [str(x).strip(\"Action.\") for x in Action]\n if action_type not in actions:\n await ctx.send(\n f'You need to supply the correct Action parameter. Must be within: {actions}', # noqa\n delete_after=5)\n return\n else:\n action_type = Action[action_type]\n if len(reason) > 500:\n await ctx.send(\n \"Reason must be shorter than 500 char\",\n delete_after=5\n )\n try:\n count = await self.bot.pg_utils.set_single_modaction(\n ctx.guild.id,\n member.id,\n ctx.author.id,\n reason,\n action_type,\n index,\n self.bot.logger\n )\n local_embed = embeds.ModEditEmbed(member, ctx.author, action_type, reason, count) # noqa\n await ctx.send(embed=local_embed)\n except Exception as e:\n await ctx.send(embed=embeds.InternalErrorEmbed())\n self.bot.logger.warning(f'Error trying edit modactions for user: {e}') # noqa", "def member(self, member: object):\n\n self._member = member", "def edit_person():\n # get person name from user\n responses = accept_inputs([\"Person's name\"])\n person_name = responses[\"Person's name\"]\n # check for existence\n results = query_with_results(\"select * from person where name = ?\", [person_name])\n if len(results) == 0:\n print(\"No person found with name '%s'.\" % person_name)\n return\n else:\n # get id of person\n id = query_with_results(\"select id from person where name = ?\", [person_name])[0][0]\n # the task exists, so ask the user for the new description\n responses = accept_inputs([\"New name\"])\n # update db\n query_no_results(\"update person set name = ? where id = ?\", [responses[\"New name\"], id])\n print(\"Person with old name '%s' changed to '%s'.\" % (person_name, responses[\"New name\"]))", "def edit_person(self, pk):", "def update_member(self, context, old_member, member):\n old_val, new_val = self.get_diff_of_dict(old_member, member)\n LOG.info(\"Received request 'Update Member' for Member:\"\n \"%(member_id)s in Pool:%(pool_id)s with new Param:\"\n \"%(new_val)s and old Param:%(old_val)s\",\n {'pool_id': member['pool_id'],\n 'member_id': member['id'],\n 'old_val': old_val,\n 'new_val': new_val})\n arg_dict = {'context': context,\n lb_const.OLD_MEMBER: old_member,\n lb_const.MEMBER: member,\n }\n self._send_event(lb_const.EVENT_UPDATE_MEMBER_V2, arg_dict,\n serialize=True,\n binding_key=member[lb_const.POOL]['loadbalancer_id'],\n key=member['id'])", "def testMemberCanEdit(self):\n self.client.login(username=\"admin\", password=\"test\")\n response = self.client.get(reverse(\"task_detail\", args=[1]))\n self.failUnlessEqual(response.status_code, 200)\n self.failUnless(response.content.find(\"<h2>Edit</h2>\") != -1,\n \"Authenticated users cannot edit tasks.\")\n self.client.logout()", "def fusion_api_edit_storage_pool(self, body, uri, api=None, headers=None):\n return self.pool.update(body, uri, api=api, headers=headers)", "def edit():\n database.ask(mode='single')\n F = database.check(single=True)\n if F and hasattr(F,'edit'):\n name = database[0]\n F.edit(name)", "def edit(self, proxy, *args, **kwargs):\n return proxy(self, *args, **kwargs)", "def EditLabel(self, item):\r\n \r\n self.Edit(item)", "def update_member_list(request, **kwargs):\n data = request.DATA\n loadbalancer_id = data.get('loadbalancer_id')\n pool_id = kwargs.get('pool_id')\n existing_members = kwargs.get('existing_members')\n members_to_add = kwargs.get('members_to_add')\n members_to_delete = kwargs.get('members_to_delete')\n\n if members_to_delete:\n kwargs = {'existing_members': existing_members,\n 'members_to_add': members_to_add,\n 'members_to_delete': members_to_delete,\n 'pool_id': pool_id}\n remove_member(request, **kwargs)\n elif members_to_add:\n kwargs = {'existing_members': existing_members,\n 'members_to_add': members_to_add,\n 'members_to_delete': members_to_delete,\n 'pool_id': pool_id}\n add_member(request, **kwargs)\n elif data.get('monitor'):\n args = (request, loadbalancer_id, update_monitor)\n thread.start_new_thread(poll_loadbalancer_status, args)", "def set_name(net_id, name):\n connection = get_connection()\n cursor = connection.cursor()\n sql_string = \"UPDATE Member SET name='\"+name+\"' WHERE netID='\"+net_id+\"'\"\n cursor.execute(sql_string)\n connection.commit()", "async def modify_guild_member(guild_id, member_id):\n user_id = await token_check()\n await guild_owner_check(user_id, guild_id)\n\n j = validate(await request.get_json(), MEMBER_UPDATE)\n nick_flag = False\n\n if 'nick' in j:\n await guild_perm_check(user_id, guild_id, 'manage_nicknames')\n\n nick = j['nick'] or None\n\n await app.db.execute(\"\"\"\n UPDATE members\n SET nickname = $1\n WHERE user_id = $2 AND guild_id = $3\n \"\"\", nick, member_id, guild_id)\n\n nick_flag = True\n\n if 'mute' in j:\n await guild_perm_check(user_id, guild_id, 'mute_members')\n\n await app.db.execute(\"\"\"\n UPDATE members\n SET muted = $1\n WHERE user_id = $2 AND guild_id = $3\n \"\"\", j['mute'], member_id, guild_id)\n\n if 'deaf' in j:\n await guild_perm_check(user_id, guild_id, 'deafen_members')\n\n await app.db.execute(\"\"\"\n UPDATE members\n SET deafened = $1\n WHERE user_id = $2 AND guild_id = $3\n \"\"\", j['deaf'], member_id, guild_id)\n\n if 'channel_id' in j:\n # TODO: check MOVE_MEMBERS and CONNECT to the channel\n # TODO: change the member's voice channel\n pass\n\n if 'roles' in j:\n await guild_perm_check(user_id, guild_id, 'manage_roles')\n await _update_member_roles(guild_id, member_id, j['roles'])\n\n member = await app.storage.get_member_data_one(guild_id, member_id)\n member.pop('joined_at')\n\n # call pres_update for role and nick changes.\n partial = {\n 'roles': member['roles']\n }\n\n if nick_flag:\n partial['nick'] = j['nick']\n\n await app.dispatcher.dispatch(\n 'lazy_guild', guild_id, 'pres_update', user_id, partial)\n\n await app.dispatcher.dispatch_guild(guild_id, 'GUILD_MEMBER_UPDATE', {**{\n 'guild_id': str(guild_id)\n }, **member})\n\n return '', 204", "async def edit(self, *, name, roles: Optional[Any] = ..., reason: Optional[Any] = ...):\n ...", "def put(self):\n\n data=dict(request.form)\n if g.auth:\n return g.swarm_node.updateNode(**data)\n else:\n res = {\"msg\": \"Authentication failed, permission denied.\", \"code\": 403}\n logger.warn(res)\n return res, 403", "def edit_person(self, treeview):\n model, iter_ = treeview.get_selection().get_selected()\n if iter_:\n handle = model.get_value(iter_, 0)\n try:\n person = self.dbstate.db.get_person_from_handle(handle)\n EditPerson(self.dbstate, self.uistate, [], person)\n except WindowActiveError:\n pass", "def dummy():\n\t\t\tself.edit = True", "def delete_member(self, context, member):\n LOG.info(\"Received request 'Delete Member' for Pool:\"\n \"%(pool_id)s \",\n {'pool_id': member['pool_id']})\n arg_dict = {'context': context,\n lb_const.MEMBER: member,\n }\n self._send_event(lb_const.EVENT_DELETE_MEMBER_V2, arg_dict,\n serialize=True,\n binding_key=member[lb_const.POOL]['loadbalancer_id'],\n key=member['id'])" ]
[ "0.6425986", "0.63002586", "0.619265", "0.6118293", "0.608884", "0.5876162", "0.5841779", "0.58345354", "0.56993985", "0.5678758", "0.5619991", "0.5610326", "0.56079984", "0.5544971", "0.5526997", "0.5516116", "0.5471154", "0.54462516", "0.54318666", "0.5422469", "0.53969634", "0.53654534", "0.53405565", "0.52948225", "0.5261041", "0.52399856", "0.523767", "0.52366334", "0.5226904", "0.5200564" ]
0.6802564
0
Edit a health monitor.
def put(self, request, health_monitor_id): update_monitor(request)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update_health_monitor(self, health_monitor, body=None):\r\n return self.put(self.health_monitor_path % (health_monitor), body=body)", "def test_update_health_monitor(self):\r\n resource = 'health_monitor'\r\n cmd = healthmonitor.UpdateHealthMonitor(test_cli20.MyApp(sys.stdout),\r\n None)\r\n self._test_update_resource(resource, cmd, 'myid',\r\n ['myid', '--timeout', '5'],\r\n {'timeout': '5', })", "def update_monitor(request, **kwargs):\n data = request.DATA\n monitor_id = data['monitor']['id']\n hm_type = data['monitor']['type']\n\n conn = get_sdk_connection(request)\n healthmonitor_kwargs = {\n 'delay': data['monitor'].get('delay'),\n 'timeout': data['monitor'].get('timeout'),\n 'max_retries': data['monitor'].get('max_retries'),\n 'max_retries_down': data['monitor'].get('max_retries_down'),\n 'admin_state_up': data['monitor'].get('admin_state_up'),\n 'name': data['monitor'].get('name')\n }\n if hm_type in ('HTTP', 'HTTPS'):\n healthmonitor_kwargs.update({\n 'http_method': data['monitor'].get('http_method'),\n 'url_path': data['monitor'].get('url_path'),\n 'expected_codes': data['monitor'].get('expected_codes')\n })\n\n healthmonitor = conn.load_balancer.update_health_monitor(\n monitor_id,\n **healthmonitor_kwargs\n )\n\n return _get_sdk_object_dict(healthmonitor)", "def update_healthmonitor(self, context, old_healthmonitor, healthmonitor):\n old_val, new_val = self.get_diff_of_dict(\n old_healthmonitor, healthmonitor)\n LOG.info(\"Received request 'Update Pool Health Monitor' for \"\n \"Health monitor:%(hm)s with new Param:%(new_val)s and \"\n \"old Param:%(old_val)s\",\n {'hm': healthmonitor['id'],\n 'old_val': old_val,\n 'new_val': new_val})\n arg_dict = {'context': context,\n lb_const.OLD_HEALTHMONITOR: old_healthmonitor,\n lb_const.HEALTHMONITOR: healthmonitor\n }\n self._send_event(lb_const.EVENT_UPDATE_HEALTH_MONITOR_V2,\n arg_dict, serialize=True,\n binding_key=healthmonitor[lb_const.POOL][\n 'loadbalancer_id'],\n key=healthmonitor['id'])", "def show_health_monitor(self, health_monitor, **_params):\r\n return self.get(self.health_monitor_path % (health_monitor),\r\n params=_params)", "def health(self, new_health: int) -> None:", "def create_health_monitor(self, body=None):\r\n return self.post(self.health_monitors_path, body=body)", "def set_health(self, health):\n self._health = health", "def set_health(self, health):\n self.__health = health", "def health_modifier(self, health):\n cmd = '{}testHealthModifier {}'.format(self.console, health)\n self.write_command(cmd)", "def test_create_healthmonitor_with_all_params(self):\r\n resource = 'health_monitor'\r\n cmd = healthmonitor.CreateHealthMonitor(test_cli20.MyApp(sys.stdout),\r\n None)\r\n admin_state_up = False\r\n delay = '60'\r\n expected_codes = '200-202,204'\r\n http_method = 'HEAD'\r\n max_retries = '2'\r\n timeout = '10'\r\n type = 'TCP'\r\n tenant_id = 'my-tenant'\r\n url_path = '/health'\r\n my_id = 'my-id'\r\n args = ['--admin-state-down',\r\n '--delay', delay,\r\n '--expected-codes', expected_codes,\r\n '--http-method', http_method,\r\n '--max-retries', max_retries,\r\n '--timeout', timeout,\r\n '--type', type,\r\n '--tenant-id', tenant_id,\r\n '--url-path', url_path]\r\n position_names = ['admin_state_up', 'delay',\r\n 'expected_codes', 'http_method',\r\n 'max_retries', 'timeout',\r\n 'type', 'tenant_id', 'url_path']\r\n position_values = [admin_state_up, delay,\r\n expected_codes, http_method,\r\n max_retries, timeout,\r\n type, tenant_id, url_path]\r\n self._test_create_resource(resource, cmd, '', my_id, args,\r\n position_names, position_values)", "def set_health(self, new_health):\n if new_health < 0:\n \"\"\"Health points can't be below zero.\"\"\"\n self.health = 0\n elif new_health > 100:\n \"\"\"Health points can't be above 100.\"\"\"\n self.health = 100\n else:\n self.health = new_health", "def set_humidity(self, humidity: int) -> None:\n success = self._client.set_hum_setpoint(humidity)\n\n if not success:\n _LOGGER.error(\"Failed to change the target humidity level\")\n self.schedule_update_ha_state()", "def update_monitor(self,\n instance_id: str,\n monitor_id: str,\n *,\n name: str = None,\n description: str = None,\n type: str = None,\n port: int = None,\n interval: int = None,\n retries: int = None,\n timeout: int = None,\n method: str = None,\n path: str = None,\n headers_: List['HealthcheckHeader'] = None,\n allow_insecure: bool = None,\n expected_codes: str = None,\n expected_body: str = None,\n x_correlation_id: str = None,\n **kwargs\n ) -> DetailedResponse:\n\n if instance_id is None:\n raise ValueError('instance_id must be provided')\n if monitor_id is None:\n raise ValueError('monitor_id must be provided')\n if headers_ is not None:\n headers_ = [convert_model(x) for x in headers_]\n headers = {\n 'X-Correlation-ID': x_correlation_id\n }\n sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,\n service_version='V1',\n operation_id='update_monitor')\n headers.update(sdk_headers)\n\n data = {\n 'name': name,\n 'description': description,\n 'type': type,\n 'port': port,\n 'interval': interval,\n 'retries': retries,\n 'timeout': timeout,\n 'method': method,\n 'path': path,\n 'headers': headers_,\n 'allow_insecure': allow_insecure,\n 'expected_codes': expected_codes,\n 'expected_body': expected_body\n }\n data = {k: v for (k, v) in data.items() if v is not None}\n data = json.dumps(data)\n headers['content-type'] = 'application/json'\n\n if 'headers' in kwargs:\n headers.update(kwargs.get('headers'))\n headers['Accept'] = 'application/json'\n\n url = '/instances/{0}/monitors/{1}'.format(\n *self.encode_path_vars(instance_id, monitor_id))\n request = self.prepare_request(method='PUT',\n url=url,\n headers=headers,\n data=data)\n\n response = self.send(request)\n return response", "def health(self, health):\n\n self._health = health", "def health(self, health):\n\n self._health = health", "def health(self, health):\n\n self._health = health", "def health(self, health):\n\n self._health = health", "def test_create_healthmonitor_with_mandatory_params(self):\r\n resource = 'health_monitor'\r\n cmd = healthmonitor.CreateHealthMonitor(test_cli20.MyApp(sys.stdout),\r\n None)\r\n admin_state_up = False\r\n delay = '60'\r\n max_retries = '2'\r\n timeout = '10'\r\n type = 'TCP'\r\n tenant_id = 'my-tenant'\r\n my_id = 'my-id'\r\n args = ['--admin-state-down',\r\n '--delay', delay,\r\n '--max-retries', max_retries,\r\n '--timeout', timeout,\r\n '--type', type,\r\n '--tenant-id', tenant_id]\r\n position_names = ['admin_state_up', 'delay', 'max_retries', 'timeout',\r\n 'type', 'tenant_id']\r\n position_values = [admin_state_up, delay, max_retries, timeout, type,\r\n tenant_id]\r\n self._test_create_resource(resource, cmd, '', my_id, args,\r\n position_names, position_values)", "def post_loadbalancer_healthmonitor_update(self, resource_id, resource_dict):\n pass", "def set_humidity(self, humidity):\n self.humidity = humidity", "def ion_health_info(self, ion_health_info):\n\n self._ion_health_info = ion_health_info", "def from_dict(cls, _dict: Dict) -> 'Monitor':\n args = {}\n if 'id' in _dict:\n args['id'] = _dict.get('id')\n if 'name' in _dict:\n args['name'] = _dict.get('name')\n if 'description' in _dict:\n args['description'] = _dict.get('description')\n if 'type' in _dict:\n args['type'] = _dict.get('type')\n if 'port' in _dict:\n args['port'] = _dict.get('port')\n if 'interval' in _dict:\n args['interval'] = _dict.get('interval')\n if 'retries' in _dict:\n args['retries'] = _dict.get('retries')\n if 'timeout' in _dict:\n args['timeout'] = _dict.get('timeout')\n if 'method' in _dict:\n args['method'] = _dict.get('method')\n if 'path' in _dict:\n args['path'] = _dict.get('path')\n if 'headers' in _dict:\n args['headers_'] = [HealthcheckHeader.from_dict(x) for x in _dict.get('headers')]\n if 'allow_insecure' in _dict:\n args['allow_insecure'] = _dict.get('allow_insecure')\n if 'expected_codes' in _dict:\n args['expected_codes'] = _dict.get('expected_codes')\n if 'expected_body' in _dict:\n args['expected_body'] = _dict.get('expected_body')\n if 'created_on' in _dict:\n args['created_on'] = _dict.get('created_on')\n if 'modified_on' in _dict:\n args['modified_on'] = _dict.get('modified_on')\n return cls(**args)", "def _ctrl_hum_set(self, osrs_h):\n data = osrs_h & 0x7\n self._bus.write_byte_data(self.addr, self.CTRL_HUM,\n data)", "def add_health_monitor(self, loadbalancer, type, delay=10, timeout=10,\n attemptsBeforeDeactivation=3, path=\"/\", statusRegex=None,\n bodyRegex=None, hostHeader=None):\n uri = \"/loadbalancers/%s/healthmonitor\" % utils.get_id(loadbalancer)\n req_body = {\"healthMonitor\": {\n \"type\": type,\n \"delay\": delay,\n \"timeout\": timeout,\n \"attemptsBeforeDeactivation\": attemptsBeforeDeactivation,\n }}\n uptype = type.upper()\n if uptype.startswith(\"HTTP\"):\n lb = self._get_lb(loadbalancer)\n if uptype != lb.protocol:\n raise exc.ProtocolMismatch(\"Cannot set the Health Monitor type \"\n \"to '%s' when the Load Balancer's protocol is '%s'.\" %\n (type, lb.protocol))\n if not all((path, statusRegex, bodyRegex)):\n raise exc.MissingHealthMonitorSettings(\"When creating an HTTP(S) \"\n \"monitor, you must provide the 'path', 'statusRegex' and \"\n \"'bodyRegex' parameters.\")\n body_hm = req_body[\"healthMonitor\"]\n body_hm[\"path\"] = path\n body_hm[\"statusRegex\"] = statusRegex\n body_hm[\"bodyRegex\"] = bodyRegex\n if hostHeader:\n body_hm[\"hostHeader\"] = hostHeader\n resp, body = self.api.method_put(uri, body=req_body)\n return body", "def edit(self, hardware_id, userdata=None, hostname=None, domain=None,\r\n notes=None):\r\n\r\n obj = {}\r\n if userdata:\r\n self.hardware.setUserMetadata([userdata], id=hardware_id)\r\n\r\n if hostname:\r\n obj['hostname'] = hostname\r\n\r\n if domain:\r\n obj['domain'] = domain\r\n\r\n if notes:\r\n obj['notes'] = notes\r\n\r\n if not obj:\r\n return True\r\n\r\n return self.hardware.editObject(obj, id=hardware_id)", "def delete_health_monitor(self, health_monitor):\r\n return self.delete(self.health_monitor_path % (health_monitor))", "def reset_health_meter(health_meter):\n health_meter['player'] = 40\n health_meter['enemy'] = 30", "def associate_health_monitor(self, pool, body):\r\n return self.post(self.associate_pool_health_monitors_path % (pool),\r\n body=body)", "def pre_loadbalancer_healthmonitor_update(self, resource_id, resource_dict):\n pass" ]
[ "0.7318301", "0.72835505", "0.7035468", "0.6422127", "0.6017512", "0.5552425", "0.5524849", "0.5474026", "0.54440564", "0.54111576", "0.5367544", "0.5276218", "0.5227774", "0.5200923", "0.5196495", "0.5196495", "0.5196495", "0.5196495", "0.5122279", "0.5118035", "0.5062808", "0.50406307", "0.50395876", "0.5001247", "0.49781883", "0.49685887", "0.49590328", "0.4933948", "0.4925923", "0.49258608" ]
0.7851766
0
List of flavor profiles for the current project. The listing result is an object with property "items".
def get(self, request): conn = get_sdk_connection(request) flavor_profile_list = _sdk_object_to_list( conn.load_balancer.flavor_profiles() ) return {'items': flavor_profile_list}
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def list_profiles(self):\n return self._get(\"posture\", box=BoxList)", "def get_profiles(self):\n profiles = [['Profile name', 'GUID']]\n r = self.system_cursor.execute('{Call wtGetProfileList()}')\n for row in r.fetchall():\n profiles.append([row.PROFILE_NAME, row.PROFILE_GUID])\n return profiles", "def profiles_names(self):\n url = get_url('profiles')\n response = self._get(url)\n raise_on_error(response)\n return response.json()", "def list_profiles(self, params):\n return self.profiles", "def profiles(self):\n if not self._profiles:\n self.GetAllProfiles()\n return self._profiles", "def GetAllProfiles(self):\n profiles = []\n feed_uri = self._gd_client.GetFeedUri('profiles')\n while feed_uri:\n feed = self._gd_client.GetProfilesFeed(uri=feed_uri)\n profiles.extend(feed.entry)\n feed_uri = feed.FindNextLink()\n self._profiles = profiles", "def list_profiles(self) -> dict:\n wsc = self.read_ws_configuration()\n out = OrderedDict()\n for name, json in wsc.profiles.items():\n out[name] = Profile(name, self.ws_data_folder / name, json)\n # Try to find current profile\n try:\n out[self.current_profile_name].is_current = True\n except Exception:\n pass\n return out", "def available_profiles(cls) -> List[str]:\n return list(cfg.get(\"profiles\"))", "def list_profiles(request, pk=0):\n context = {'items': [], 'resource_type': 'Profile'}\n handle_expired_profiles()\n if pk == 0:\n context['h2'] = \"Configuration Profiles\"\n context['header_1'] = \"Type\"\n context['header_2'] = \"Last Modified\"\n profiles = ConfigurationProfile.objects.all().reverse()\n for profile in profiles:\n assignment_count = profile.pending_install.count()\n install_count = profile.installed.count()\n data = {'filename': str(profile), 'type': \"macOS\", 'meta': profile, 'assignment_count': assignment_count,\n 'install_count': install_count}\n context['items'].append(data)\n else:\n device = get_object_or_404(Laptop, pk=pk)\n context['h2'] = \"Profiles for {}\".format(device.name)\n context['header_1'] = \"Version\"\n context['header_2'] = \"Expires\"\n context['device_view'] = True\n context['device_id'] = pk\n profiles = ConfigurationProfile.objects.filter(pending_install__in=[device])\n profiles |= ConfigurationProfile.objects.filter(installed__in=[device])\n for profile in profiles:\n status = 'Not assigned'\n for entry in profile.installed.all():\n if entry == device:\n status = 'Installed'\n for entry in profile.pending_install.all():\n if entry == device:\n status = 'Assigned'\n record = InstallationRecord.objects.filter(profile=profile, device=device, active=True).first()\n expires_soon = False\n if record is not None and record.expires is not None:\n if timezone.now() < record.expires < timezone.now() + timezone.timedelta(days=30):\n expires_soon = True\n data = {'filename': str(profile), 'downloadable': False, 'install_record': record, 'meta': profile,\n 'status': status, 'expires_soon': expires_soon}\n context['items'].append(data)\n\n return render(request, 'mdm/resource_list.html', context)", "def list(self):\n # List is to be extended (directories should not have a trailing slash)\n paths_to_ignore = ['.DS_Store']\n\n profiles = []\n cache = ClientCache(self._conan_api.cache_folder)\n profiles_path = cache.profiles_path\n if os.path.exists(profiles_path):\n for current_directory, _, files in os.walk(profiles_path, followlinks=True):\n files = filter(lambda file: os.path.relpath(\n os.path.join(current_directory, file), profiles_path) not in paths_to_ignore, files)\n\n for filename in files:\n rel_path = os.path.relpath(os.path.join(current_directory, filename),\n profiles_path)\n profiles.append(rel_path)\n\n profiles.sort()\n return profiles", "def flavors(self, details=True):\n flv = _flavor.FlavorDetail if details else _flavor.Flavor\n return list(self._list(flv, paginated=True))", "def profiles(self):\n return self._profiles", "def profiles(self):\n return self._profiles", "def flavors(self, **query):\n return self._list(_flavor.Flavor, **query)", "def get_all_profiles(self) -> List[Profile]:\n return [self.model.parse_obj(profile) for profile in self.read_records(SyncMode.full_refresh)]", "def test_list_profiles(\n api_client, enable_premium_requirement, profile_factory, user_factory\n):\n password = \"password\"\n user = user_factory(has_premium=True, password=password)\n api_client.log_in(user.primary_email.email, password)\n\n profile = profile_factory(km_user__user=user)\n\n url = f\"/know-me/users/{user.km_user.pk}/profiles/\"\n response = api_client.get(url)\n\n assert response.status_code == status.HTTP_200_OK\n assert response.json() == [\n {\n \"id\": profile.pk,\n \"url\": api_client.build_full_url(\n f\"/know-me/profile/profiles/{profile.pk}/\"\n ),\n \"created_at\": serialized_time(profile.created_at),\n \"updated_at\": serialized_time(profile.updated_at),\n \"is_private\": profile.is_private,\n \"name\": profile.name,\n \"permissions\": {\"read\": True, \"write\": True},\n \"topics_url\": api_client.build_full_url(\n f\"/know-me/profile/profiles/{profile.pk}/topics/\"\n ),\n }\n ]", "def getProfiles(context):\n\n analytics_tool = getToolByName(getSite(), 'portal_analytics')\n # short circuit if user hasn't authorized yet\n if not analytics_tool.is_auth():\n return SimpleVocabulary([])\n\n try:\n profiles = analytics_tool.makeCachedRequest('profiles')\n except error.BadAuthenticationError:\n choices = [('Please authorize with Google in the Google Analytics \\\n control panel.', None)]\n return SimpleVocabulary.fromItems(choices)\n except error.RequestTimedOutError:\n choices = [('The request to Google Analytics timed out. Please try \\\n again later.', None)]\n return SimpleVocabulary.fromItems(choices)\n if profiles:\n unique_choices = {}\n for entry in profiles:\n title = entry.get('name')\n title = crop(title, 40)\n tableId = entry.get('id')\n unique_choices.update({title: tableId})\n choices = unique_choices.items()\n else:\n choices = [('No profiles available', None)]\n return SimpleVocabulary([SimpleTerm(c[1], c[1], c[0]) for c in choices])", "def get_flavors(self):\n url = '%s/flavors/detail' % self.catalog['compute']\n res = self.get(url)\n if res['status'] == 200:\n return json.loads(res['body'])['flavors']\n else:\n LOG.error('Get flavors failed: %s %s %s' %\n (res['status'], res['reason'], res['body']))\n raise InvalidResponse(res)", "def list_flavors(self, limit=None, marker=None):\n return self._flavor_manager.list(limit=limit, marker=marker)", "def profile():\n from flickrAPI import FlickrAPI\n #flickr = FlickrAPI(key=session['resource_owner_key'], secret=session['resource_owner_secret'])\n flickr = FlickrAPI(key=request.cookies.get('oauth_token'), secret=request.cookies.get('oauth_token_secret'))\n faves = flickr.favorites_getList(user_id=\"44124394781@N01\", page=1, per_page=5, extras='owner_name')\n return str(faves)", "def profiles(self):\n with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:\n return list(filter(lambda x: x is not None, executor.map(self.profile_details, self.profiles_names())))", "def show_flavors():\n return get_flavors()", "def get_profiles(self): #video profiles\n return self.camera_media.GetProfiles()", "def list_flavors(cls):\n return cls.dbdriver.list_flavors()", "def get_list(profiles_folder, logger):\n profile_list = []\n with scandir(profiles_folder) as it:\n for entry in it:\n if entry.is_file():\n filepath = profiles_folder + entry.name\n profile = json_from_file(filepath, logger)\n if profile is not None:\n try:\n profile_list.append({\"filepath\":filepath, \"name\":profile[\"name\"], \"description\":profile[\"description\"]})\n except AttributeError:\n logger.error(\"Missing attributes in \" + filepath)\n logger.error(str(profile))\n return profile_list", "def fetch_all(profile):\n params = {}\n params[\"profile\"] = profile\n response = utils.do_request(instanceprofile, \"get\", params)\n data = utils.get_data(\"InstanceProfiles\", response)\n return data", "def profiles():\n images = get_uploaded_images()\n records = db.session.query(UserProfile).all()\n return render_template('profiles.html', images=images, records =records)", "def list_network_profiles(self, **params):\r\n return self.get(self.network_profiles_path, params=params)", "def flavors(self, **kwargs):\n if kwargs is None:\n result = self.get_list(self.cloudman.compute.flavors(),\n kind=\"flavor\")\n if \"name\" in kwargs:\n result = self.flavor(name=kwargs['name'])\n\n else:\n result = self.get_list(self.cloudman.compute.flavors(**kwargs),\n kind=\"flavor\")\n\n return result", "def get(profile):\n client = boto3client.get(\"iam\", profile)\n return client.list_instance_profiles()" ]
[ "0.7007203", "0.6612687", "0.6586266", "0.65075374", "0.64529777", "0.63493913", "0.62659556", "0.6237638", "0.6213668", "0.6186253", "0.61652416", "0.6098566", "0.6098566", "0.5971148", "0.5952143", "0.58324105", "0.5748105", "0.57290924", "0.57185817", "0.569918", "0.5692532", "0.5672456", "0.56475437", "0.55676514", "0.5543078", "0.54943883", "0.5489095", "0.54890156", "0.5488875", "0.54798806" ]
0.68769735
1
Create a new flavor_profile.
def post(self, request): kwargs = { 'flavor_profile': request.DATA.get('flavor_profile') } return create_flavor_profile(request, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_flavor_profile(request, **kwargs):\n data = request.DATA\n\n conn = get_sdk_connection(request)\n flavor_profile = conn.load_balancer.create_flavor(\n name=data['flavor_profile']['name'],\n provider_name=data['flavor_profile']['provider_name'],\n flavor_data=data['flavor_profile']['flavor_data'],\n )\n\n return _get_sdk_object_dict(flavor_profile)", "def _create_flavor(self, context, flavor):\n flavor_dict = flavor.__dict__\n name = self.prefix + flavor.name\n flavorid = self.prefix + flavor.id\n memory = flavor.ram\n vcpus = flavor.vcpus\n root_gb = flavor.disk\n ephemeral_gb = flavor_dict.get('OS-FLV-EXT-DATA:ephemeral', 0)\n u_swap = flavor_dict.get('swap', 0)\n rxtx_factor = flavor_dict.get('rxtx_factor', 1.0)\n is_public = flavor_dict.get('os-flavor-access:is_public', True)\n if u_swap == \"\":\n swap = 0\n else:\n swap = int(u_swap)\n\n try:\n return flavors.create(name, memory, vcpus, root_gb,\n ephemeral_gb=ephemeral_gb,\n flavorid=flavorid, swap=swap,\n rxtx_factor=rxtx_factor,\n is_public=is_public)\n except exception.InstanceExists as err:\n raise err", "def create_flavor(request, **kwargs):\n data = request.DATA\n\n conn = get_sdk_connection(request)\n flavor = conn.load_balancer.create_flavor(\n name=data['flavor']['name'],\n flavor_profile_id=data['flavor']['flavor_profile_id'],\n description=data['flavor'].get('description'),\n enabled=data['flavor'].get('enabled'),\n )\n\n return _get_sdk_object_dict(flavor)", "def create_flavor(cls, values):\n return cls.dbdriver.create_flavor(values)", "def create(*args, **kwargs):\n\n factory = V2ProfileFactory()\n output = factory.create(export_json=True)\n click.echo(output)", "def test_create_flavor(self):\n # Create Flavor\n flavor_settings = FlavorConfig(\n name=self.flavor_name, ram=1, disk=1, vcpus=1)\n self.flavor_creator = OpenStackFlavor(self.os_creds, flavor_settings)\n flavor = self.flavor_creator.create()\n self.assertTrue(validate_flavor(self.nova, flavor_settings, flavor))", "def create(profile, name):\n client = boto3client.get(\"iam\", profile)\n params = {}\n params[\"InstanceProfileName\"] = name\n return client.create_instance_profile(**params)", "def test_create_flavor_existing(self):\n # Create Flavor\n flavor_settings = FlavorConfig(\n name=self.flavor_name, ram=1, disk=1, vcpus=1)\n self.flavor_creator = OpenStackFlavor(self.os_creds, flavor_settings)\n flavor = self.flavor_creator.create()\n self.assertTrue(validate_flavor(self.nova, flavor_settings, flavor))\n\n flavor_creator_2 = OpenStackFlavor(self.os_creds, flavor_settings)\n flavor2 = flavor_creator_2.create()\n\n self.assertEqual(flavor.id, flavor2.id)", "def createProfile(self):\n if self.profile:\n return\n from soc.modules.gsoc.models.profile import GSoCProfile\n user = self.createUser()\n properties = {'link_id': user.link_id, 'student_info': None, 'user': user,\n 'parent': user, 'scope': self.program, 'status': 'active'}\n self.profile = seeder_logic.seed(GSoCProfile, properties)", "def create_flavor(self):\n logger.debug(\"Creating VM Flavor\")\n rc, flavor_id = self.cal.create_flavor(self.account, self.flavor)\n assert rc == RwTypes.RwStatus.SUCCESS\n\n return flavor_id", "def create_profile(sender, **kwargs):\n user = kwargs[\"instance\"]\n if kwargs[\"created\"]:\n user_profile = Profile(user=user)\n user_profile.save()", "def create_user_profile(instance, created, **_):\n if created:\n Profile.objects.create(user=instance)", "def create_profile(sender, **kw):\n user = kw['instance']\n if kw['created']:\n profile = UserProfile(user=user)\n profile.save()", "def update_flavor_profile(request, **kwargs):\n data = request.DATA\n flavor_profile_id = data['flavor_profile']['id']\n\n conn = get_sdk_connection(request)\n flavor_profile = conn.load_balancer.update_flavor_profile(\n flavor_profile_id,\n name=data['flavor_profile'].get('name'),\n provider_name=data['flavor_profile'].get('provider_name'),\n flavor_data=data['flavor_profile'].get('flavor_data'),\n )\n\n return _get_sdk_object_dict(flavor_profile)", "def create_pootle_profile(sender, instance, **kwargs):\n try:\n profile = instance.get_profile()\n except PootleProfile.DoesNotExist:\n profile = PootleProfile(user=instance)\n profile.save()", "async def test_create(self):\n expected = {\n 'id': 'id'\n }\n profile = {\n 'name': 'name',\n 'version': 4,\n }\n rsps = respx.post(f'{PROVISIONING_API_URL}/users/current/provisioning-profiles') \\\n .mock(return_value=Response(200, json=expected))\n id = await provisioning_client.create_provisioning_profile(profile)\n assert rsps.calls[0].request.url == f'{PROVISIONING_API_URL}/users/current/provisioning-profiles'\n assert rsps.calls[0].request.headers['auth-token'] == 'header.payload.sign'\n assert rsps.calls[0].request.content == json.dumps(profile).encode('utf-8')\n assert id == expected", "def create_profile(sender, instance, created, **kwargs):\n if created:\n profile, created = UserProfile.objects.get_or_create(user=instance)", "def create_profile(sender, instance, created, **kwargs):\n if created:\n Profile.objects.create(user=instance)", "def fusion_api_create_server_profile(self, body, api=None, headers=None, param=''):\n return self.profile.create(body, api, headers, param=param)", "def create(\n name: str,\n from_name: str = typer.Option(None, \"--from\", help=\"Copy an existing profile.\"),\n):\n\n profiles = prefect.settings.load_profiles()\n if name in profiles:\n app.console.print(\n textwrap.dedent(\n f\"\"\"\n [red]Profile {name!r} already exists.[/red]\n To create a new profile, remove the existing profile first:\n\n prefect profile delete {name!r}\n \"\"\"\n ).strip()\n )\n raise typer.Exit(1)\n\n if from_name:\n if from_name not in profiles:\n exit_with_error(f\"Profile {from_name!r} not found.\")\n\n # Create a copy of the profile with a new name and add to the collection\n profiles.add_profile(profiles[from_name].copy(update={\"name\": name}))\n else:\n profiles.add_profile(prefect.settings.Profile(name=name, settings={}))\n\n prefect.settings.save_profiles(profiles)\n\n app.console.print(\n textwrap.dedent(\n f\"\"\"\n Created profile with properties:\n name - {name!r}\n from name - {from_name or None}\n\n Use created profile for future, subsequent commands:\n prefect profile use {name!r}\n\n Use created profile temporarily for a single command:\n prefect -p {name!r} config view\n \"\"\"\n )\n )", "def create_profile(sender, **kwargs):\n\n # I import profile here cause i can't import it right in the top.\n from .profiles import Profile\n\n user = kwargs['instance']\n\n Profile.objects.get_or_create(user=user)", "def create_user_profile(sender, instance, created, **kwargs):\n if created:\n user_profile = UserProfile.objects.create(user=instance)", "def create(profile, name):\n # Make sure it doesn't exist already.\n if exists(profile, name):\n msg = \"Instance profile '\" + str(name) + \"' already exists.\"\n raise ResourceAlreadyExists(msg)\n\n # Now we can create it.\n params = {}\n params[\"profile\"] = profile\n params[\"name\"] = name\n response = utils.do_request(instanceprofile, \"create\", params)\n\n # Check that it exists.\n instance_profile_data = polling_fetch(profile, name)\n if not instance_profile_data:\n msg = \"Instance profile '\" + str(name) + \"' not created.\"\n raise ResourceNotCreated(msg)\n\n # Send back the instance profile's info.\n return instance_profile_data", "def post(self, request):\n kwargs = {\n 'flavor': request.DATA.get('flavor')\n }\n return create_flavor(request, **kwargs)", "def create_user_profile(sender, instance, created, **kwargs):\n if created:\n # create new Stellar account\n stellar.api.create_account(user=instance)", "def create_user_profile(sender, instance, created, **kwargs):\n\n if created:\n user_profile = UserProfile.objects.create(user=instance)", "def make_profile_for_user(sender, instance, **kwargs):\n if kwargs['created']:\n new_profile = ImagerProfile(user=instance)\n new_profile.save()", "def create_profile_for_new_user(sender, created, instance, **kwargs):\n if created:\n profile = self.get_model('profile')(user=instance)\n profile.save()", "def create_profile(sender, instance, created, **kwargs):\n if created: \n profile, new = UserProfile.objects.get_or_create(user=instance)", "def create_user_profile(sender, instance, created, **kwargs):\n if created:\n Profile.objects.create(user=instance)" ]
[ "0.82383496", "0.71872705", "0.70762885", "0.66092575", "0.6564234", "0.6562639", "0.6437814", "0.63737965", "0.63381845", "0.633701", "0.6322403", "0.62916726", "0.62406635", "0.6178241", "0.6152721", "0.6128486", "0.61260176", "0.6100888", "0.610016", "0.6091575", "0.60757184", "0.6035446", "0.6026803", "0.6019937", "0.60013837", "0.5995973", "0.59775406", "0.5973113", "0.59604", "0.59556866" ]
0.7608589
1
Get a specific flavor profile.
def get(self, request, flavor_profile_id): conn = get_sdk_connection(request) flavor_profile = conn.load_balancer.find_flavor_profile( flavor_profile_id) return _get_sdk_object_dict(flavor_profile)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_flavor(self, flavor):\n return self._get(_flavor.Flavor, flavor)", "def get_flavor(self, flavor):\n return self._get(_flavor.Flavor, flavor)", "def extract_profile(self, profile_name):\r\n for galaxy in self.galaxies:\r\n try:\r\n return galaxy.__dict__[profile_name]\r\n except KeyError:\r\n pass", "def get_flavor(self, flavor_id):\n return self._flavor_manager.get(flavor_id)", "def get_flavor(name):\r\n return nova.flavors.find(name=name)", "def get(profile):\n client = boto3client.get(\"iam\", profile)\n return client.list_instance_profiles()", "def get(self, request):\n conn = get_sdk_connection(request)\n flavor_profile_list = _sdk_object_to_list(\n conn.load_balancer.flavor_profiles()\n )\n\n return {'items': flavor_profile_list}", "def get_flavor(self, flavor_id):\n url = '%s/flavors/%s' % (self.catalog['compute'], flavor_id)\n res = self.get(url)\n if res['status'] == 200:\n return json.loads(res['body'])['flavor']\n else:\n LOG.error('Get flavor failed: %s %s %s' %\n (res['status'], res['reason'], res['body']))\n raise InvalidResponse(res)", "def get_profile(profile_id):\n profile = Profile.objects.get(id=profile_id)\n return profile", "def get_profile(self):\n endpoint = '/profile'\n return self.get_request(endpoint)", "def get_profile(self, profile_id: str):\n\n return self._get(f\"posture/{profile_id}\")", "def _get_profile(self):\n return self.sqlfluff_config.get_section(\n (self.templater_selector, self.name, \"profile\")\n )", "def getprofile(self, *args, **kwargs):\n return _image.image_getprofile(self, *args, **kwargs)", "def flavor(self, name=None):\n return self.find(self.flavors(), name=name)", "def get_flavor(self, request, tenant_id, flavor_id):\n response_data = get_flavor(flavor_id)\n request.setResponseCode(response_data[1])\n return json.dumps(response_data[0])", "def get_flavor_by_uuid(cls, flavor_uuid):\n return cls.dbdriver.get_flavor_by_uuid(flavor_uuid)", "def getProfile(self):\n # GET /profile\n debugMain('getProfile')\n return self._genericGet('/profile')", "def get_profile(request):\n p_obj = Profile.objects.filter(hashid=request.session.get('profile', '-'))\n if len(p_obj):\n return p_obj[0]\n else:\n return None", "def details(profile, instance_profile):\n client = boto3client.get(\"iam\", profile)\n params = {}\n params[\"InstanceProfileName\"] = instance_profile\n return client.get_instance_profile(**params)", "def get_profile():\n if environ['DB_INSTANCE'] in request.url_root:\n profile_id = request.form['id']\n profile = ndb.Key(Profile, profile_id).get()\n if profile is not None:\n activity_data = json.loads(profile.activity_data)\n items = activity_data.get('items', [])\n item = items[0]\n return json.dumps(item)\n \n # else (not DB_INSTANCE)\n return ''", "def getProfile(self, profile):\n for network in self.networks:\n if network.getProfileName() == profile:\n return network\n else:\n raise Exception('Network with profile name \"%s\" not found' % profile)", "def update_flavor_profile(request, **kwargs):\n data = request.DATA\n flavor_profile_id = data['flavor_profile']['id']\n\n conn = get_sdk_connection(request)\n flavor_profile = conn.load_balancer.update_flavor_profile(\n flavor_profile_id,\n name=data['flavor_profile'].get('name'),\n provider_name=data['flavor_profile'].get('provider_name'),\n flavor_data=data['flavor_profile'].get('flavor_data'),\n )\n\n return _get_sdk_object_dict(flavor_profile)", "def get_profile(profile_id, token):\n url = '{}{}/'.format(PROFILE_ENDPOINT, profile_id)\n res = requests.get(url, headers={\n 'Content-Type': 'application/json',\n 'Authorization': 'Token {}'.format(token)\n })\n return res.json()", "def retrieve_profile(self, name):\n\n url = get_url('profile details', profile=name)\n response = self._get(url)\n raise_on_error(response)\n if response.status_code == 404:\n raise QarnotGenericException(response.json()['message'])\n return Profile(response.json())", "async def retrieve(self, profile_id):\n profile = await self.get(self.profile_load.format(profile_id))\n log(\"retrieved card for {}\".format(profile['title']))\n return profile", "def get_profile(tag, platform=\"pc\", region=\"eu\"):\n #\n try:\n context = ssl._create_unverified_context()\n profile = json.load(\n const.codec(urlopen(const.URL + platform + \"/\" + region + \"/\" + tag + \"/profile\", context=context)))\n #\n if \"error\" in profile:\n raise BattleTagNotFound(profile['error'])\n exit(1)\n #\n result = pr.Profile(profile['data']['username'],\n profile['data']['level'],\n profile['data']['games']['quick']['wins'],\n profile['data']['games']['competitive']['wins'],\n profile['data']['games']['competitive']['lost'],\n profile['data']['playtime']['quick'],\n profile['data']['playtime']['competitive'],\n profile['data']['avatar'],\n profile['data']['competitive']['rank'])\n return result\n except urllib.error.URLError as e:\n print(\"An error occurred when fetching stats\\n\" + e)\n exit(1)\n except Exception as e:\n print(\"An error occurred:\\n \" + str(e))\n exit(1)", "def get_current_profile() -> Optional[Profile]:\n return _PROFILE[-1] if _PROFILE else None", "def get_profile():\n logger.debug(\"entering function get_profile\")\n response = read_user_profile()\n logger.debug(\"exiting function get_profile\")\n return jsonify(response)", "def get(self, request, flavor_id):\n conn = get_sdk_connection(request)\n flavor = conn.load_balancer.find_flavor(flavor_id)\n return _get_sdk_object_dict(flavor)", "def flavor(self):\n return self._flavor" ]
[ "0.70800406", "0.70800406", "0.6608732", "0.65897906", "0.65892553", "0.6582866", "0.6535673", "0.6515353", "0.6513358", "0.650227", "0.6494548", "0.6455354", "0.64349127", "0.6364045", "0.6280376", "0.62767255", "0.61917675", "0.6031149", "0.6012957", "0.5986818", "0.5954201", "0.5945183", "0.59091777", "0.58322126", "0.5826786", "0.5819298", "0.5806897", "0.57653713", "0.5759469", "0.5724199" ]
0.73463917
0
Delete a specific flavor profile.
def delete(self, request, flavor_profile_id): conn = get_sdk_connection(request) conn.load_balancer.delete_flavor_profile(flavor_profile_id, ignore_missing=True)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def delete(profile, name):\n client = boto3client.get(\"iam\", profile)\n params = {}\n params[\"InstanceProfileName\"] = name\n return client.delete_instance_profile(**params)", "def delete_flavor(self, flavor='del_flvr'):\n try:\n self.novaclient.flavors.delete(\n self.get_flavor_id(flavor))\n except Exception as e:\n print \"Flavor %s failed to delete: %s\" % (flavor, repr(e))", "def delete(self, entity, schema):\n if schema == CoreProfile.__schema__:\n raise APIBadRequest('Cannot delete the core profile.')\n\n profile = entity.profiles.get_or_404(schema=schema)\n profile.delete()\n return '', 200", "def delete(name: str):\n profiles = prefect.settings.load_profiles()\n if name not in profiles:\n exit_with_error(f\"Profile {name!r} not found.\")\n\n current_profile = prefect.context.get_settings_context().profile\n if current_profile.name == name:\n exit_with_error(\n f\"Profile {name!r} is the active profile. You must switch profiles before\"\n \" it can be deleted.\"\n )\n\n profiles.remove_profile(name)\n\n verb = \"Removed\"\n if name == \"default\":\n verb = \"Reset\"\n\n prefect.settings.save_profiles(profiles)\n exit_with_success(f\"{verb} profile {name!r}.\")", "def test_delete_profile(self):\n self.cim.delete_profile(customer_profile_id=u\"123\")", "def delete_profile(subscription_key, profile_id):\r\n\r\n helper = VerificationServiceHttpClientHelper.VerificationServiceHttpClientHelper(subscription_key)\r\n\r\n helper.delete_profile(profile_id)\r\n\r\n print('Profile {0} has been successfully deleted.'.format(profile_id))", "def delete(profile, name):\n # Make sure the instance profile exists.\n if not exists(profile, name):\n msg = \"No instance profile '\" + str(name) + \"'.\"\n raise ResourceDoesNotExist(msg)\n\n # Now try to delete it.\n params = {}\n params[\"profile\"] = profile\n params[\"name\"] = name\n response = utils.do_request(instanceprofile, \"delete\", params)\n\n # Check that it was, in fact, deleted.\n if exists(profile, name):\n msg = \"The instance profile '\" + str(name) + \"' was not deleted.\"\n raise ResourceNotDeleted(msg)", "def delete_flavor(cls, flavor_uuid):\n cls.dbdriver.delete_flavor(flavor_uuid)", "def delete_network_profile(self, profile):\r\n return self.delete(self.network_profile_path % profile)", "def delete_profile(profile_id):\n \n profile = mongo.db.profiles\n profile.delete_one({'_id': ObjectId(profile_id)})\n flash('Your profile has been deleted.', 'success')\n return redirect(url_for('dashboard'))", "def remove_vpn_profile(**kwargs):\n proxy = kwargs['proxy']\n session_token = kwargs['sessiontoken']\n display_name = kwargs['display_name']\n profile_type = kwargs['profile_type']\n\n match profile_type:\n case \"ike\":\n profile = \"ipsec-vpn-ike-profiles\"\n case \"ipsec\":\n profile = \"ipsec-vpn-tunnel-profiles\"\n case \"dpd\":\n profile = \"ipsec-vpn-dpd-profiles\"\n case other:\n print(\"Invalid profile type\")\n sys.exit(1)\n\n json_response_status_code = delete_vpn_profile(proxy, session_token, display_name, profile)\n if json_response_status_code == 200:\n sys.exit(f\"Tier-1 VPN service {display_name} was deleted successfully\")\n else:\n print(f\"There was an error deleting Tier1 VPN service {display_name}\")\n sys.exit(1)", "def delete(self,\n ipfix_dfw_profile_id,\n ):\n return self._invoke('delete',\n {\n 'ipfix_dfw_profile_id': ipfix_dfw_profile_id,\n })", "def fusion_api_delete_server_profile(self, name=None, uri=None, param='', api=None, headers=None):\n return self.profile.delete(name=name, uri=uri, param=param, api=api, headers=headers)", "def delete_profile(cls, id):\n return cls.objects.filter(id == id).delete()", "def delete(self,\n dpd_profile_id,\n ):\n return self._invoke('delete',\n {\n 'dpd_profile_id': dpd_profile_id,\n })", "def fusion_api_delete_server_profile_template(self, name=None, uri=None, api=None, headers=None):\n return self.profile_template.delete(name, uri, api, headers)", "async def test_delete(self):\n rsps = respx.delete(f'{PROVISIONING_API_URL}/users/current/provisioning-profiles/id') \\\n .mock(return_value=Response(200))\n await provisioning_client.delete_provisioning_profile('id')\n assert rsps.calls[0].request.url == \\\n f'{PROVISIONING_API_URL}/users/current/provisioning-profiles/id'\n assert rsps.calls[0].request.headers['auth-token'] == 'header.payload.sign'", "def delete(self,\n ike_profile_id,\n ):\n return self._invoke('delete',\n {\n 'ike_profile_id': ike_profile_id,\n })", "def delete(self,\n tunnel_profile_id,\n ):\n return self._invoke('delete',\n {\n 'tunnel_profile_id': tunnel_profile_id,\n })", "def delete(self, flavor_id):\n\n args = {\n 'flavor_id': flavor_id\n }\n self.session.execute(CQL_DELETE, args)", "def remove_saved_profile(self, profile):\n\n self.profiles.remove(profile)\n gamedata.GameData._delete_game_data_file(path.join(self.save_dir, profile.player_name + '.yaml'))", "def delprofile(variable, account):\n stm = shared_morphene_instance()\n if mph.rpc is not None:\n mph.rpc.rpcconnect()\n\n if not account:\n account = mph.config[\"default_account\"]\n if not unlock_wallet(stm):\n return\n acc = Account(account, morphene_instance=stm)\n json_metadata = Profile(acc[\"json_metadata\"])\n\n for var in variable:\n json_metadata.remove(var)\n\n tx = acc.update_account_profile(json_metadata)\n tx = json.dumps(tx, indent=4)\n print(tx)", "def sqdel_profile(self, profile_to_del):\r\n self.cursor.execute(\"DROP TABLE IF EXISTS \" + profile_to_del)", "def delete_network_profile(arn=None):\n pass", "def delete_user_profile(IamUserArn=None):\n pass", "def delete(self, request, flavor_id):\n conn = get_sdk_connection(request)\n conn.load_balancer.delete_flavor(flavor_id,\n ignore_missing=True)", "def test_delete_payment_profile(self):\n self.cim.delete_payment_profile(\n customer_profile_id=u\"123\",\n customer_payment_profile_id=u\"432\"\n )", "def delete_flavors(self, skip_list=None):\n skip_list = skip_list or []\n for account in self.accounts:\n rc, rsp = self.cal.get_flavor_list(account)\n\n for flavor in rsp.flavorinfo_list:\n if flavor.name in skip_list:\n continue\n if self.user not in flavor.name:\n continue\n logger.info(\"Deleting Flavor: {}\".format(flavor.name))\n if self.dry_run:\n continue\n self.cal.delete_flavor(account, flavor.id)", "def delete_profile_pic(sender, instance, **kwargs):\n if instance.profile_picture:\n if instance.profile_picture.name != \"default.png\":\n path = instance.profile_picture.path\n os.remove(path)", "def delete(self, user):\n q = \"DELETE FROM profiles WHERE user=?\"\n try:\n self._query(q, (user,), fetch='none')\n except Exception as e:\n raise e" ]
[ "0.71852785", "0.7161097", "0.7055296", "0.7039321", "0.69454604", "0.69354737", "0.69235194", "0.68888295", "0.6755909", "0.6731093", "0.6639045", "0.66346174", "0.66286516", "0.6616975", "0.6566091", "0.6523191", "0.65105796", "0.64986897", "0.6495144", "0.6456143", "0.64229137", "0.63857615", "0.6363191", "0.6358382", "0.6323715", "0.6309878", "0.6301459", "0.62203187", "0.6142058", "0.612366" ]
0.8111144
0
Edit a flavor profile.
def put(self, request, flavor_profile_id): update_flavor_profile(request)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update_flavor_profile(request, **kwargs):\n data = request.DATA\n flavor_profile_id = data['flavor_profile']['id']\n\n conn = get_sdk_connection(request)\n flavor_profile = conn.load_balancer.update_flavor_profile(\n flavor_profile_id,\n name=data['flavor_profile'].get('name'),\n provider_name=data['flavor_profile'].get('provider_name'),\n flavor_data=data['flavor_profile'].get('flavor_data'),\n )\n\n return _get_sdk_object_dict(flavor_profile)", "def edit_profile(profile_id):\n # This check is in place to avoid users trying to edit a profile via the dashboard\n # when they have not created one. If not the option is not displayed\n user = mongo.db.user.find_one({'username': session['username']})\n chck = mongo.db.profiles.find_one_or_404({'user_id': user['_id']})\n if chck: \n profile = mongo.db.profiles.find_one(\n {'_id': ObjectId(profile_id)})\n \n form=ProfileForm()\n form.headline.data = profile['headline']\n form.bio.data = profile['bio']\n form.xp.data = profile['xp']\n form.interests.data = profile['interests']\n form.stack.data = profile['stack']\n form.languages.data = profile['languages']\n form.frameworks.data = profile['frameworks']\n form.github.data = profile['github']\n form.linkedin.data = profile['linkedin']\n \n return render_template('pages/editprofile.html', form=form, profile=profile, legend='Edit your Profile')", "def edit_profile(request):\n profile = request.user.profile\n form = forms.ProfileForm(instance=profile)\n\n if request.method == 'POST':\n if settings.SYSTEM_MAINTENANCE_NO_UPLOAD:\n # Allow submitting the form, but do not allow the photo to\n # be modified.\n if 'delete_photo' in request.POST or request.FILES:\n raise ServiceUnavailable()\n\n if 'edit_profile' in request.POST:\n # Update the profile and return to the same page. Place a message\n # at the top of the page: 'your profile has been updated'\n form = forms.ProfileForm(data=request.POST, files=request.FILES,\n instance=profile)\n if form.is_valid():\n form.save()\n messages.success(request, 'Your profile has been updated.')\n elif 'delete_photo' in request.POST:\n profile.delete_photo()\n messages.success(request, 'Your profile photo has been deleted.')\n\n if not form.errors:\n form = forms.ProfileForm(instance=profile)\n\n return render(request, 'user/edit_profile.html', {'form':form})", "def edit_server_profile(profile_obj):\n FusionUIBase.navigate_to_section(SectionType.SERVER_PROFILES, time_for_loading=5)\n\n total = len(profile_obj)\n not_exists = 0\n edited = 0\n\n for n, profile in enumerate(profile_obj):\n logger.info(\"{2} No: {0} --- Total: {1} {2}\".format((n + 1), total, '-' * 14))\n logger.info(\"editing a server profile with name '%s' ...\" % profile.name)\n # checking if the profile is not existing for editing\n if not VerifyServerProfile.verify_server_profile_exist(profile.name, fail_if_false=False):\n logger.warn(\"server profile '%s' does not exist\" % profile.name)\n not_exists += 1\n continue\n # - Prep the auto_power_off switch\n # - By default, this keyword will power off the server if it's powered on -- unless the attribute 'auto_power_off' is explicitly set to 'false'\n auto_power_off = False if getattr(profile, 'auto_power_off', '').lower() == 'false' else True\n # open Edit SP dialog and enter data ...\n CommonOperationServerProfile.click_server_profile(profile.name)\n # { below 3 lines were to avoid a failure caused by 2 CR that had been fixed. leave the 3 lines here as commented in case regression issue in future\n # will remove below once 2 CRs fixed\n # EditServerProfile.select_action_edit()\n # EditServerProfile.wait_edit_server_profile_dialog_shown()\n # EditServerProfile.click_cancel_button()\n # } here is a workaround for 1st time editing server profile (sp template as well) has defect that,\n # can't close dialog by OK/Cancel button, and SAN Storage's OS Type can't be read correctly,\n # so open dialog and use Cancel button to close, then everything goes well when 2nd time open Edit dialog\n\n EditServerProfile.select_action_edit()\n EditServerProfile.wait_edit_server_profile_dialog_shown()\n BuiltIn().sleep(2)\n EditServerProfile.input_name(profile.newName) if getattr(profile, 'newName', None) is not None else None\n EditServerProfile.input_description(profile.desc) if getattr(profile, 'desc', None) is not None else None\n\n sht_selected = EditServerProfile.get_selected_server_hardware_type(profile.server)\n # 20151021 Alex Ma - discussed with Tony/Alex C and get below agreed:\n # - if 'hardwareType' is defined in test data, then will firstly select/change 'Server hardware type' from UI,\n # then select/change 'Server hardware' if 'server' is defined in test data\n # - if 'hardwareType' is not defined in test data, then will only check 'server' attribute to decide if select/change 'Server hardware' from UI\n if getattr(profile, 'hardwareType', None) is not None:\n if profile.hardwareType not in sht_selected:\n logger.warn(\"server hardware type '%s' of server '%s' is NOT consistent with test data '%s'\" % (sht_selected, profile.server, profile.hardwareType))\n EditServerProfile.ChangeServerHardwareTypeAndEnclosureGroup.change_server_hardware_type(profile.hardwareType, timeout=5, fail_if_false=False)\n elif getattr(profile, 'ref_sht_server', None) is not None:\n hardware_type = FusionUIBase.APIMethods().get_server_hardware_type_by_server_hardware_name(profile.ref_sht_server)\n if hardware_type not in sht_selected:\n logger.warn(\"server hardware type '%s' of server '%s' is NOT consistent with test data '%s'\" % (sht_selected, profile.server, hardware_type))\n EditServerProfile.ChangeServerHardwareTypeAndEnclosureGroup.change_server_hardware_type(hardware_type, timeout=5, fail_if_false=False)\n\n eg_selected = EditServerProfile.get_selected_enclosure_group(profile.server)\n if getattr(profile, 'enclgroup', None) is not None:\n if profile.enclgroup not in eg_selected:\n logger.warn(\"enclosure group '%s' of server '%s' is NOT consistent with test data '%s'\" % (eg_selected, profile.server, profile.enclgroup))\n EditServerProfile.ChangeServerHardwareTypeAndEnclosureGroup.change_enclosure_group(profile.enclgroup, timeout=5, fail_if_false=False)\n\n # Input 'Server hardware'\n # - input server name,\n # - select option from the popped out drop-down list,\n # - power off the server if the it is powered on,\n # - verify the server hardware type of the selected one is refreshed to the type name displayed in the drop-down list\n # for selecting server hardware\n if not EditServerProfile.input_select_server_hardware(profile.server, auto_power_off=auto_power_off):\n logger.warn(\"server hardware '%s' is not selected for editing server profile, may be wrong name, or powered on but failed to power it off. \"\n \"test will skip this profile '%s' and continue to edit other server profiles\" % (profile.server, profile.name))\n continue\n msg = EditServerProfile.get_error_message_from_server_hardware()\n # if not CreateServerProfile.input_select_server_hardware(profile.server, auto_power_off=auto_power_off):\n # logger.warn(\"server hardware '%s' is not selected for creating server profile, may be wrong name, or powered on but failed to power it off. \"\n # \"test will skip this profile '%s' and continue to create other server profiles\" % (profile.server, profile.name))\n # continue\n # msg = CreateServerProfile.get_error_message_from_server_hardware()\n if msg is not None:\n logger.warn(\"error occurred, server profile can not be edited successfully: \\n<%s>\" % msg)\n ui_lib.fail_test(msg)\n\n if getattr(profile, 'Affinity', None) is not None:\n logger.info(\"test data for 'Affinity' is found: <%s>, start setting Affinity ...\" % profile.Affinity)\n EditServerProfile.select_affinity_by_text(profile.Affinity)\n\n if getattr(profile, 'Firmware', None) is not None:\n logger.info(\"test data for 'Firmware' is found: <%s>, start setting Firmware Baseline ...\" % profile.Firmware)\n logger.debug(\"test data for 'Firmware' is found: <%s>\" % profile.Firmware, also_console=False)\n # set Firmware Baseline and force-installation option\n CommonOperationServerProfile.Firmware.set(profile.Firmware)\n\n if getattr(profile, 'Connections', None) is not None:\n logger.debug(\"test data for 'Connections' is found: <%s>\" % profile.Connections, also_console=False)\n logger.info(\"test data for 'Connections' is found, start adding connections ...\")\n # add connections\n CommonOperationServerProfile.Connection.set(profile.Connections)\n\n if getattr(profile, 'LocalStorage', None) is not None:\n logger.debug(\"test data for 'Local Storage' is found: <%s>\" % profile.LocalStorage, also_console=False)\n logger.info(\"test data for 'Local Storage' is found, start setting local storage options ... \")\n CommonOperationServerProfile.LocalStorage.set(profile.LocalStorage)\n\n if getattr(profile, 'SANStorage', None) is not None:\n BuiltIn().sleep(3)\n logger.debug(\"test data for 'SAN Storage' is found:<%s>\" % profile.SANStorage, also_console=False)\n logger.info(\"test data for 'SAN Storage' is found, start setting SAN storage options and adding volumes ...\")\n # select \"Manage SAN Storage\" checkbox\n CommonOperationServerProfile.SANStorage.set(profile.SANStorage)\n\n if getattr(profile, 'BootSettings', None) is not None:\n logger.debug(\"test data for 'Boot Settings' is found: <%s>\" % profile.BootSettings, also_console=False)\n logger.info(\"test data for 'Boot Settings' is found, start setting its options ...\")\n CommonOperationServerProfile.BootSettings.set(profile, server_hardware_type=sht_selected)\n\n # 'BIOSSettings' part is ignored since BIOS setting is complicated to verify the result, therefor\n # might be better to use a dedicated tool to do this part automation separately\n if getattr(profile, 'BIOSSettings', None) is not None:\n logger.debug(\"test data for 'BIOS Settings' is found: <%s>\" % profile.BIOSSettings, also_console=False)\n logger.info(\"test data for 'BIOS Settings' is found, start setting its options ...\")\n CommonOperationServerProfile.BIOSSettings.set(profile.BIOSSettings)\n\n if getattr(profile, 'Advanced', None) is not None:\n BuiltIn().sleep(3)\n logger.debug(\"test data for 'Advanced' is found: <%s>\" % profile.Advanced, also_console=False)\n logger.info(\"test data for 'Advanced' is found, start setting its options ...\")\n # select \"MAC/WWN/Serial/Hide unused FlexNICs\" radio box\n EditServerProfile.Advanced.set(profile)\n\n EditServerProfile.click_ok_button()\n # logger.debug(\"sleeping for 8 seconds ...\")\n # BuiltIn().sleep(8)\n # if EditServerProfile.get_error_message_from_boot_mode() is not None:\n if CommonOperationServerProfile.BootSettings.get_error_message_from_boot_mode() is not None:\n logger.warn(\"test data may be wrongly defined for 'Boot mode', which caused an error that blocks profile being created. \"\n \"test will skip this profile '%s' and continue to create other server profiles\" % profile.name)\n continue\n\n BuiltIn().sleep(2)\n status, msg = FusionUIBase.get_error_message_from_dialog(timeout=10)\n if status is True:\n logger.warn(\"unexpected error occurred: %s\" % msg)\n ui_lib.fail_test(msg)\n\n if EditServerProfile.wait_edit_server_profile_dialog_disappear(timeout=300) is True:\n if getattr(profile, 'wait_complete', \"True\").lower() != \"false\":\n FusionUIBase.show_activity_sidebar()\n profile_name = profile.newName if getattr(profile, 'newName', None) is not None else profile.name\n timeout = int(getattr(profile, 'timeout', \"3600\"))\n FusionUIBase.wait_activity_action_ok(profile_name, 'Update', timeout=timeout, fail_if_false=True)\n FusionUIBase.show_activity_sidebar()\n fail_if_not_ok = not getattr(profile, 'IgnoreWaitForStatusOK', '').lower() == 'true'\n # control whether to stop the case when server profile status is not ok.\n CommonOperationServerProfile.wait_server_profile_status_ok(profile_name, timeout=500, fail_if_false=fail_if_not_ok)\n logger.info(\"edited server profile '%s' successfully\" % profile_name)\n edited += 1\n else:\n logger.info(\"edit server profile '%s' successfully but no need to wait for task complete\" % profile.name)\n edited += 1\n else:\n logger.warn(\"'wait_edit_server_profile_dialog_disappear' = FALSE, skip to next profile ... \")\n EditServerProfile.click_cancel_button()\n continue\n\n logger.info(\"{0} == Summary == {0}\".format('-' * 14))\n if total - not_exists == 0:\n logger.warn(\"no server profile to edit! all %s server profile(s) is NOT existing, test is considered FAILED\" % not_exists)\n return False\n else:\n if edited < total:\n logger.warn(\"not all of the server profile(s) is successfully edited - %s out of %s edited \" % (edited, total))\n if edited + not_exists == total:\n logger.warn(\"%s not-existing server profile(s) is skipped being edited, test is considered FAILED\" % not_exists)\n return False\n else:\n ui_lib.fail_test(\"%s not-existing server profile(s) is skipped being edited, %s profile(s) left is failed being edited \" % (not_exists, total - edited - not_exists))\n\n logger.info(\"all of the server profile(s) is successfully edited - %s out of %s \" % (edited, total))\n return True", "def edit_profile(request):\n profile_to_edit = get_object_or_404(UserProfile, user=request.user)\n if request.method == \"POST\":\n form = UserProfileForm(request.POST, instance=profile_to_edit)\n if form.is_valid:\n form.save()\n messages.success(request, \"Profile updated succesfully\")\n return redirect('profile')\n else:\n messages.error(request, \"Updated failed. \\\n Please ensure the form is valid\")\n else:\n profile_form = UserProfileForm(instance=profile_to_edit)\n template = 'profiles/edit_profile.html'\n context = {\n 'form': profile_form,\n }\n return render(request, template, context)", "def fusion_api_edit_server_profile(self, body, uri, api=None, headers=None, param=''):\n return self.profile.update(body, uri, api, headers, param=param)", "def edit_flavor(thing, request, form_class=FlavorEditForm):\n\n form = form_class(csrf_context=request.session)\n form.summary.data = thing.summary\n form.description.data = thing.description\n form.notes.data = thing.notes\n form.edit_time.data = timestamp(thing.effect)\n\n if hasattr(form, 'energy'):\n form.energy.data = thing.energy\n\n return {'form': form, 'thing': thing}", "def putProfile(profileType,value):\n # PUT /profile/$profileType\n pass", "def edit_profile(request):\n form = ProfileForm(instance=request.user.profile)\n if request.method == \"POST\":\n form = ProfileForm(data=request.POST, files=request.FILES,\n instance=request.user.profile)\n if form.is_valid():\n form.save()\n return redirect('profile')\n return render(request, 'accounts/forms.html', {'form': form})", "def test_update_profile(self):\n self.cim.update_profile(\n customer_id=u\"222\",\n description=u\"Foo bar baz quz\",\n email=u\"[email protected]\",\n customer_profile_id=u\"122\"\n )", "def __profileChanged(self, inst, topic, value):\n\n old, new = value\n\n if new is orthoeditprofile.OrthoEditProfile:\n self.__addEditMenu()\n elif old is orthoeditprofile.OrthoEditProfile:\n self.__removeEditMenu()", "def edit_profile(request):\r\n\r\n user = request.user\r\n profile = Profile.objects.for_user(user)\r\n\r\n if request.method != 'POST':\r\n profile_form = ProfileForm(instance=profile)\r\n user_form = UserForm(instance=user)\r\n else:\r\n profile_form = ProfileForm(request.POST, instance=profile)\r\n user_form = UserForm(request.POST, instance=user)\r\n\r\n if profile_form.is_valid() and user_form.is_valid():\r\n profile_form.save()\r\n user_form.save()\r\n\r\n return HttpResponseRedirect(reverse('epic.core.views.view_profile', kwargs={}))\r\n\r\n return render_to_response(\r\n 'core/edit_profile.html',\r\n {'profile_form': profile_form, 'user_form': user_form,},\r\n context_instance=RequestContext(request))", "def edit_profile(request):\n if request.method == 'POST':\n form = EditProfileForm(request.POST, instance=request.user)\n\n if form.is_valid():\n form.save()\n messages.success(request, 'Profile updated successfully.')\n return redirect('profile')\n\n else:\n messages.error(request, 'Invalid entry, please try again.')\n return redirect('edit_profile')\n else:\n form = EditProfileForm(instance=request.user)\n return render(request, 'accounts/edit_profile.html', {'form': form})", "def update_flavor(cls, flavor_uuid, values):\n return cls.dbdriver.update_flavor(flavor_uuid, values)", "def post(self, request):\n kwargs = {\n 'flavor_profile': request.DATA.get('flavor_profile')\n }\n return create_flavor_profile(request, **kwargs)", "def show_profile(request, profile_uuid):\n context = context_assign_user(request.user)\n context['profile_form'] = ProfileForm(instance=context['current_user'])\n if request.method == 'POST':\n form = ProfileForm(request.POST)\n if form.is_valid():\n Profile.objects.filter(pk=context['current_user'].id).update(bio=request.POST['bio'], palette=request.POST['palette'], iex_api_key=request.POST['iex_api_key'])\n messages.success(request, 'Your settings have been saved.')\n return redirect('dash:dashboard')\n errors = form.errors\n form = ProfileForm(request, request.POST)\n messages.warning(request, f\"There's a problem with the form: {errors}\")\n return render(request, 'dash/users/show_profile.html', context)", "def fusion_api_edit_server_profile_template(self, body, uri, api=None, headers=None):\n return self.profile_template.update(body, uri, api, headers)", "def edit_profile(self, name, username, email):\n return self.app.post('/_editProfile', data = dict(\n name = name,\n username = username,\n email = email\n ), follow_redirects = True)", "def edit_profile(request, pk=None):\n profiledetails = UserProfile.objects.filter(user=request.user).first()\n if UserProfile.objects.filter(user=request.user or request.user.is_superuser):\n\n if request.method == \"POST\":\n profile_details_form = UserProfileForm(request.POST, request.FILES, instance=profiledetails)\n if profile_details_form.is_valid():\n profiledetails = profile_details_form.save()\n messages.success(request, 'Your profile has been updated!')\n return redirect(user_profile)\n else:\n profile_details_form = UserProfileForm(instance=profiledetails)\n else:\n return HttpResponseForbidden()\n \n return render(request, 'newprofiledetails.html', {'profile_details_form': profile_details_form})", "def test_editProfile(self):\n\t\turl = \"/profiles/1/\"\n\t\tdata = { \"name\" : \"testName\", \"surname\" : \"testSurname\" }\n\t\tresponse = self.client.patch(url, data, format='json')\n\t\tself.assertEqual(response.status_code, status.HTTP_200_OK)\n\t\tself.assertEqual(response.data[\"name\"], \"testName\")\n\t\tself.assertEqual(response.data[\"surname\"], \"testSurname\")", "def update_policy_profile(self, profile, body=None):\r\n return self.put(self.policy_profile_path % (profile), body=body)", "def flavor(self, flavor):\n self._flavor = flavor", "def update_profile():\n logger.debug(\"entering function update_profile\")\n response = update_user_profile(request.json)\n logger.debug(\"exiting function update_profile\")\n return jsonify(response)", "def edit_profile(request, userid):\n woofer_user = User.objects.get(id=userid)\n current_profile = Profile.objects.get(user=woofer_user)\n if woofer_user.id != request.user.id:\n return HttpResponseRedirect(reverse('view-profile', args=[userid]))\n\n if request.method == 'POST':\n form = ProfileForm(request.POST)\n if form.is_valid():\n new_profile = form.save(commit=False)\n # copy the ID of the User's current profile to the new profile so\n # Django performs an update when we call .save()\n new_profile.id = current_profile.id\n new_profile.user = woofer_user\n new_profile.save()\n return HttpResponseRedirect(reverse('view-profile', args=[userid]))\n else:\n form = ProfileForm(instance=current_profile)\n\n return render(request, 'woofer/show_form.html', {\n 'form' : form,\n 'message' : None,\n 'form_action' : reverse('edit-profile', args=[userid]),\n 'title' : \"Edit Profile\"\n })", "def set_flavor(self, oid, flavor):\n data = {\n \"resize\": {\n \"flavorRef\": flavor\n }\n }\n path = '/servers/%s/action' % oid\n res = self.client.call(path, 'POST', data=json.dumps(data), \n token=self.manager.identity.token)\n self.logger.debug('Pause openstack server: %s' % truncate(res))\n return res[0]", "def edit_server_profile_for_dl(profile_obj):\n # This keyword is deprecated, please do not use.\n FusionUIBase.navigate_to_section(SectionType.SERVER_PROFILES, time_for_loading=5)\n\n total = len(profile_obj)\n not_exists = 0\n edited = 0\n\n for n, profile in enumerate(profile_obj):\n logger.info(\"{2} No: {0} --- Total: {1} {2}\".format((n + 1), total, '-' * 14))\n\n logger.info(\"editing a server profile with name '%s' ...\" % profile.name)\n if not VerifyServerProfile.verify_server_profile_exist(profile.name, fail_if_false=False):\n logger.warn(\"server profile '%s' does not exist\" % profile.name)\n not_exists += 1\n continue\n # - Prep the auto_power_off switch\n # - By default, this keyword will power off the server if it's powered on -- unless the attribute 'auto_power_off' is explicitly set to 'false'\n auto_power_off = False if getattr(profile, 'auto_power_off', '').lower() == 'false' else True\n # open Edit SP dialog and enter data ...\n CommonOperationServerProfile.click_server_profile(profile.name)\n EditServerProfile.select_action_edit()\n EditServerProfile.wait_edit_server_profile_dialog_shown()\n\n EditServerProfile.input_name(profile.newName)\n EditServerProfile.input_description(profile.desc)\n # Input 'Server hardware'\n # - input server name,\n # - select option from the popped out drop-down list,\n # - verify the server hardware is refreshed to the type name displayed in the drop-down list for selecting server hardware\n if not EditServerProfile.input_select_server_hardware(profile.server, auto_power_off=auto_power_off):\n logger.warn(\"server hardware '%s' is not selected for editing server profile, may be wrong name, or powered on but failed to power it off. \"\n \"test will skip this profile '%s' and continue to edit other server profiles\" % (profile.server, profile.name))\n continue\n msg = EditServerProfile.get_error_message_from_server_hardware()\n if msg is not None:\n logger.warn(\"error occurred, server profile can not be edited successfully\")\n ui_lib.fail_test(msg)\n sht_selected = EditServerProfile.get_selected_server_hardware_type(profile.server)\n if profile.hardwaretype not in sht_selected:\n logger.warn(\"the server hardware type of server '%s' is NOT consistent with test data '%s'\" % (sht_selected, profile.hardwaretype))\n # set boot mode if attribute 'manageBootMode' is true - only for Gen 9 (or later) server:\n FusionUIBase.select_view_by_name('Boot Settings')\n if 'gen9' in sht_selected.lower():\n logger.info(\"setting 'Boot mode' for Gen 9 specially ...\")\n if getattr(profile, 'manageBootMode', '').lower() == 'true':\n CommonOperationServerProfile.BootSettings.tick_manage_boot_mode()\n CommonOperationServerProfile.BootSettings.select_boot_mode_by_text(profile.bootMode) if hasattr(profile, 'bootMode') else None\n if getattr(profile, 'bootMode', '').lower() == 'legacy bios':\n CommonOperationServerProfile.BootSettings.set_legacy_bios_mode_boot_order(profile)\n else:\n CommonOperationServerProfile.BootSettings.set_non_legacy_bios_mode_boot_order(profile, hardware_type=sht_selected)\n else:\n CommonOperationServerProfile.BootSettings.untick_manage_boot_mode()\n else:\n CommonOperationServerProfile.BootSettings.set_legacy_bios_mode_boot_order(profile)\n\n EditServerProfile.click_ok_button()\n # if EditServerProfile.get_error_message_from_boot_mode() is not None:\n if CommonOperationServerProfile.BootSettings.get_error_message_from_boot_mode() is not None:\n logger.warn(\"test data may be wrongly defined for 'Boot mode', which caused an error that blocks profile being edited. \"\n \"Test will skip this profile '%s' and continue to edit other server profiles\" % profile.name)\n continue\n\n status, msg = FusionUIBase.get_error_message_from_dialog(timeout=10)\n if status is True:\n logger.warn(\"unexpected error occurred: %s\" % msg)\n ui_lib.fail_test(msg)\n\n EditServerProfile.wait_edit_server_profile_dialog_disappear(timeout=180)\n FusionUIBase.show_activity_sidebar()\n FusionUIBase.wait_activity_action_ok(profile.newName, 'Update', timeout=300, fail_if_false=False)\n FusionUIBase.show_activity_sidebar()\n CommonOperationServerProfile.wait_server_profile_status_ok(profile.newName, timeout=180, fail_if_false=False)\n logger.info(\"edited server profile '%s' successfully\" % profile.newName)\n edited += 1\n\n logger.info(\"{0} == Summary == {0}\".format('-' * 14))\n if total - not_exists == 0:\n logger.warn(\"no server profile to edit! all %s server profile(s) is NOT existing, hence test is considered PASS\" % not_exists)\n return True\n else:\n if edited < total:\n logger.warn(\"not all of the server profile(s) is successfully edited - %s out of %s edited \" % (edited, total))\n if edited + not_exists == total:\n logger.warn(\"%s non-existing server profile(s) is skipped being edited, hence test is considered PASS\" % not_exists)\n return True\n else:\n logger.warn(\"%s non-existing server profile(s) is skipped being edited, but %s profile(s) left is failed being edited \" % (not_exists, total - edited - not_exists))\n return False\n\n logger.info(\"all of the server profile(s) is successfully edited - %s out of %s \" % (edited, total))\n return True", "def edit_user():\n if CURR_USER_KEY in session:\n user = g.user\n form = ProfileEditForm(obj=user)\n\n if form.validate_on_submit():\n user.first_name = form.first_name.data\n user.last_name = form.last_name.data\n user.description = form.description.data\n user.email = form.email.data\n user.image_url = form.image_url.data or \"/static/images/default-pic.png\"\n\n db.session.commit()\n\n flash(\"Profile edited.\")\n return redirect(\"/profile\")\n\n return render_template('/profile/edit-form.html', form=form)\n else:\n return redirect('/login')", "def editProfile():\n form = EditProfileForm(request.form)\n if request.method == \"GET\":\n return render_template(\"/pages/editprofile.html\", form=form)\n else:\n choose = True\n section = form.category.data\n return redirect(url_for(\"editProfileSection\", section=section))", "def viewprofile():\n user = current_user\n form = UserUpdateForm(obj=user)\n form.populate_obj(user)\n if form.validate_on_submit():\n form.populate_obj(user)\n\n db.session.commit()\n\n flash('You have successfully edited your profile!')\n return render_template('user/user.html', title=\"View Profile\",\n user=user, form=form, action='Edit')", "def edit():" ]
[ "0.74378663", "0.63604933", "0.6213224", "0.6136512", "0.612196", "0.611884", "0.60730594", "0.6056586", "0.6027391", "0.59156984", "0.58968705", "0.5880171", "0.58693844", "0.58434886", "0.58420974", "0.5836719", "0.5831102", "0.5829859", "0.5799817", "0.5710223", "0.56545454", "0.55468726", "0.55408126", "0.5535745", "0.5533902", "0.55201423", "0.5461727", "0.5413942", "0.54058707", "0.5404643" ]
0.8109677
0
List of availability zones for the current project. The listing result is an object with property "items".
def get(self, request): conn = get_sdk_connection(request) availability_zone_list = _sdk_object_to_list( conn.load_balancer.availability_zones() ) return {'items': availability_zone_list}
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_availability_zones(self, context, filters=None, fields=None,\n sorts=None, limit=None, marker=None,\n page_reverse=False):", "def availability_zones(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"availability_zones\")", "def compute_zones(self):\n path = '/os-availability-zone/detail'\n res = self.compute.call(path, 'GET', data='', \n token=self.manager.identity.token)\n self.logger.debug('Get openstack availability zone: %s' % truncate(res))\n return res[0]['availabilityZoneInfo']", "def availability_zones(self, details=False):\n if details:\n az = _availability_zone.AvailabilityZoneDetail\n else:\n az = _availability_zone.AvailabilityZone\n return list(self._list(az, paginated=False))", "def availability_zones(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"availability_zones\")", "def availability_zones(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"availability_zones\")", "def availability_zones(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"availability_zones\")", "def availability_zones(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"availability_zones\")", "def availability_zones(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"availability_zones\")", "def availability_zones(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"availability_zones\")", "def get_zones(self, context):\n # handling zones method in RPC\n response = self.dns_manager.get_zones(context)\n return response", "def list_zones(self):\n\n return [zone[\"zone\"] for zone in list(self._zones.values())]", "def ex_list_availability_zones(self, only_available=True):\n params = {'Action': 'DescribeAvailabilityZones'}\n\n if only_available:\n params.update({'Filter.0.Name': 'state'})\n params.update({'Filter.0.Value.0': 'available'})\n\n params.update({'Filter.1.Name': 'region-name'})\n params.update({'Filter.1.Value.0': self.region_name})\n\n result = self.connection.request(self.path,\n params=params.copy()).object\n\n availability_zones = []\n for element in self._findall(result, 'availabilityZoneInfo/item'):\n name = self._findtext(element, 'zoneName')\n zone_state = self._findtext(element, 'zoneState')\n region_name = self._findtext(element, 'regionName')\n\n availability_zone = ExEC2AvailabilityZone(\n name=name,\n zone_state=zone_state,\n region_name=region_name\n )\n availability_zones.append(availability_zone)\n\n return availability_zones", "def list_zones(self):\n data = self._paginated_request(\"/v2/domains\", \"domains\")\n return list(map(self._to_zone, data))", "def describe_availability_zones(\n self,\n request: dds_20151201_models.DescribeAvailabilityZonesRequest,\n ) -> dds_20151201_models.DescribeAvailabilityZonesResponse:\n runtime = util_models.RuntimeOptions()\n return self.describe_availability_zones_with_options(request, runtime)", "def list_zones(self):\n action = \"/api_dns_list.asp\"\n if self.reseller_id is not None:\n action = \"/api_dns_list_reseller.asp\"\n zones = self.connection.request(action)\n if len(zones.body) == 0:\n return []\n else:\n return self._to_zones(zones.body)", "def list_zones(self, kwargs):\n verbose = kwargs.get(\"verbose\", False)\n\n if not verbose:\n attributes = [\"dc\", \"objectClass\"]\n else:\n attributes = ALL\n\n self.display(\n self.engine.query(\n self.engine.ZONES_FILTER(),\n attributes, base=','.join([\"CN=MicrosoftDNS,DC=DomainDNSZones\", self.engine.base_dn])\n ),\n verbose\n )", "def test_aws_service_api_availability_zones_get(self):\n pass", "async def get_zones(self) -> list[str] | None:\n data: list[dict[str, str]] | None = await self.api.get(self._endpoint())\n\n if data is None:\n return None\n\n return [zone[\"name\"] for zone in data]", "def get_all_zones():\n cf = CloudFlare.CloudFlare(raw=True)\n page_number = 0\n total_pages = 1\n all_zones = []\n while page_number < total_pages:\n page_number += 1\n raw_results = cf.zones.get(params={'per_page':100, 'page':page_number})\n zones = raw_results['result']\n all_zones += zones\n total_pages = raw_results['result_info']['total_pages']\n return all_zones", "def get_athlete_zones(self):\n pass", "def index(self, req):\n # Ask the ZoneManager in the Scheduler for most recent data,\n # or fall-back to the database ...\n items = api.get_zone_list(req.environ['nova.context'])\n items = common.limited(items, req)\n items = [_scrub_zone(item) for item in items]\n return dict(zones=items)", "def get_zones(self, latitude, longitude):\n result = self.__request(\n \"GET\",\n \"https://api.voiapp.io/v1/zones?lat={}&lng={}\".format(latitude, longitude),\n )\n if result and \"zones\" in result:\n return result[\"zones\"]", "def cloudflare_waf_zone_list_request(self, args: dict = None, page: int = None, page_size: int = None) -> Dict[str, Any]:\n if args is None:\n args = {}\n\n params = remove_empty_elements({\n 'match': args.get('match'),\n 'name': args.get('name'),\n 'account_name': args.get('account_name'),\n 'order': args.get('order'),\n 'status': args.get('status'),\n 'account_id': args.get('account_id'),\n 'direction': args.get('direction'),\n 'page': page,\n 'per_page': page_size\n })\n return self._http_request(\n method='GET',\n url_suffix='zones',\n params=params)", "def zones(self) -> pulumi.Output[Sequence[str]]:\n return pulumi.get(self, \"zones\")", "def zones(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"zones\")", "def zones(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"zones\")", "def get_router_availability_zones(self, router):\n return [self._get_router_az_obj(router).name]", "def list_zones(pattern=None):\n zlist = []\n cmd = [CMD_ZONEADM, \"list\", \"-pc\"]\n proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)\n stdout, stderr = proc.communicate()\n ret = proc.returncode\n\n if ret:\n raise OSError(\"%s exited with exit code %d. stderr: '%s.'\" %\n (str(cmd), ret, stderr))\n\n def set_attr(zone, attr, line):\n \"\"\"just a helper function \"\"\"\n zone.set_attr(attr, line[attr])\n\n # line format:\n # zoneid:zonename:state:zonepath:uuid:brand:ip-type:r/w:file-mac-profile\n for line in str(stdout).split(\"\\n\"):\n if not line:\n continue\n line = line.split(\":\")\n\n if pattern and not(re.match(pattern, line[ZONE_ENTRY['ZNAME']])):\n continue # skip entries that does not pass regexp\n\n tmp_zone = Zone(line[ZONE_ENTRY['ZNAME']])\n for item in ZONE_ENTRY.values():\n set_attr(tmp_zone, item, line)\n\n zlist.append(tmp_zone)\n\n return zlist", "def zones(self) -> pulumi.Output[Optional[Sequence[str]]]:\n return pulumi.get(self, \"zones\")" ]
[ "0.7386868", "0.70580536", "0.7010989", "0.6928745", "0.6770369", "0.6770369", "0.6770369", "0.6770369", "0.6770369", "0.6770369", "0.66694915", "0.6653675", "0.66517156", "0.6599054", "0.6586983", "0.65651035", "0.6550308", "0.6516705", "0.65091455", "0.6504418", "0.6469357", "0.64565176", "0.6378176", "0.620628", "0.61076456", "0.6065016", "0.6065016", "0.6038353", "0.59875256", "0.5985202" ]
0.7326057
1
Unfreeze layers Use something like "Optim([p for p in self.parameters() if p.requires_grad])" to be sure.
def unfreeze_layers(model: torch.nn.Module) -> None: for param in model.parameters(): param.requires_grad = True
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def unfreeze(net):\n for p in net.parameters():\n p.requires_grad_(True)\n return net", "def unfreeze(self) -> None:\n self._set_requires_grad(True)", "def __freeze(self):\r\n features_layer = self._model._net\r\n for param in features_layer.parameters():\r\n param.requires_grad = False", "def unfreeeze_all_layers(self):\n # Unfreeeze\n logger.info('MODEL: Unfreeze all layers.')\n for i in range(len(self.model.layers)):\n self.model.layers[i].trainable = True\n \n # Compile model\n logger.info('MODEL: Compiling...')\n self.model.compile(optimizer = Adam(lr=1e-4),\n loss={'yolo_loss': lambda y_true, y_pred: y_pred})", "def freeze(self):\n # Freeze.\n self.frozen = True\n for param in self.parameters():\n param.requires_grad = False", "def unfreeze(self, exclude_range=None):\n # make all layers trainable\n for i, layer in enumerate(self.model.layers):\n layer.trainable = True\n if exclude_range:\n for i, layer in enumerate(self.model.layers[:exclude_range]):\n layer.trainable = False\n self._recompile()\n return", "def freeze_layers(model: torch.nn.Module) -> None:\n for param in model.parameters():\n param.requires_grad = False", "def freeze_model(self):\n # BN layers need to be freezed explicitly since they cannot be freezed via '.requires_grad=False'\n for module in self.modules():\n if isinstance(module, (nn.BatchNorm2d, nn.GroupNorm)):\n module.eval()\n \n # freeze all parameters\n for param in self.parameters():\n param.requires_grad = False", "def _freeze_stages(self) -> None:\n if self.frozen_stages >= 0:\n if self.deep_stem:\n self.stem.eval()\n for param in self.stem.parameters():\n param.requires_grad = False\n else:\n self.norm1.eval()\n for m in [self.conv1, self.norm1]:\n for param in m.parameters():\n param.requires_grad = False\n\n for i in range(1, self.frozen_stages + 1):\n m = getattr(self, f'layer{i}')\n m.eval()\n for param in m.parameters():\n param.requires_grad = False", "def freeze(net):\n for p in net.parameters():\n p.requires_grad_(False)\n return net", "def freeze_model(model):\n for param in model.parameters():\n param.requires_grad = False", "def freeze(self) -> None:\n self._set_requires_grad(False)\n for param in self.model.fc.parameters():\n param.requires_grad = True", "def unfreeze_named_layers(model, keys: Tuple = ()):\n for key in keys:\n for name, param in model.named_parameters():\n if name.startswith(key):\n param.requires_grad = True", "def _set_freeze_layers(self):\n for layer in self.encoder.layers[:self.freeze_layers]:\n layer.trainable = False", "def unroll(self) -> None:\n\n for flat in self.params:\n if self.global_ref_rank != self.global_rank and self.gradients_based:\n # this rank is not the owner, release the grad\n flat.param.grad = None\n else:\n if self.gradients_based:\n # this rank is the owner, unroll the results\n assert flat.param.grad is not None\n\n flat.param.grad.data.copy_(\n self.buffer[flat.start : flat.stop].view_as(flat.param.data), non_blocking=True\n )\n else:\n flat.param.data.copy_(\n self.buffer[flat.start : flat.stop].view_as(flat.param.data), non_blocking=True\n )\n\n self.reset()", "def freeze_params(m):\r\n for p in m.parameters():\r\n p.requires_grad = False", "def freeze_params(m):\n for p in m.parameters():\n p.requires_grad = False", "def freeze(self):\n for p in self.parameters():\n p.requires_grad = False\n FrozenBatchNorm2d.convert_frozen_batchnorm(self)\n return self", "def freeze_img_branch_params(self):\n if self.with_img_bbox_head:\n for param in self.img_bbox_head.parameters():\n param.requires_grad = False\n if self.with_img_backbone:\n for param in self.img_backbone.parameters():\n param.requires_grad = False\n if self.with_img_neck:\n for param in self.img_neck.parameters():\n param.requires_grad = False\n if self.with_img_rpn:\n for param in self.img_rpn_head.parameters():\n param.requires_grad = False\n if self.with_img_roi_head:\n for param in self.img_roi_head.parameters():\n param.requires_grad = False", "def freeze_params(model: nn.Module):\n for par in model.parameters():\n par.requires_grad = False", "def freeze_parameters(module: nn.Module):\n for p in module.parameters():\n p.requires_grad = False", "def freeze_params(module: nn.Module):\n for _, p in module.named_parameters():\n p.requires_grad = False", "def freeze(self):\n self.collect_params().setattr('grad_req', 'null')", "def freeze_highperf(self):\n \n self.freeze_model()\n\n # defreeze low-perf-exclusive parameters and BNs\n for i in range(1,5):\n layer = getattr(self, \"layer\"+str(i))\n if self.block_type == 'Bottleneck':\n layer[0].conv3_skip.weight.requires_grad = True\n layer[0].bn3_skip.train()\n elif self.block_type == 'BasicBlock':\n layer[0].conv2_skip.weight.requires_grad = True\n layer[0].bn2_skip.train()\n else:\n print(\"[Error] Unknown block type\")", "def detach_(self):\n self.requires_grad = False\n return self", "def freeze_lowperf(self):\n \n self.freeze_model()\n\n # defreeze params of only being used by the high-performance model\n for i in range(1,5):\n layer = getattr(self, \"layer\"+str(i))\n if self.block_type == 'Bottleneck':\n layer[0].conv3.weight.requires_grad = True\n layer[0].bn3.train()\n elif self.block_type == 'BasicBlock':\n layer[0].conv2.weight.requires_grad = True\n layer[0].bn2.train()\n else:\n print(\"[Error] Unknown block type\")\n\n\n num_skip = len(layer)//2\n for j in range(1, num_skip+1):\n for param in layer[j].parameters():\n param.requires_grad = True\n layer[j].train()", "def _freeze_base_model(self, modality, freeze_mode):\n\n if freeze_mode == \"all\":\n print(\"Freezing the Base model.\")\n for param in getattr(self, \"Base_{}\".format(modality)).parameters():\n param.requires_grad = False\n elif freeze_mode == \"partialbn\" and self.base_model_name == \"bninception\":\n print(\n \"Freezing the batchnorms of Base Model {} except first or new layers.\".format(\n modality\n )\n )\n for mod_no, mod in enumerate(\n getattr(self, \"Base_{}\".format(modality)).children()\n ):\n if isinstance(mod, torch.nn.BatchNorm2d):\n if (modality == \"Audio\" and mod_no > 6) or mod_no > 1:\n mod.weight.requires_grad = False\n mod.bias.requires_grad = False", "def unfreeze_up_to(self, module_name: Text) -> List[Text]:\n return self.__up_to(module_name, requires_grad=True)", "def dfs_freeze(self, model):\n for name, child in model.named_children():\n for param in child.parameters():\n param.requires_grad = False if self.freeze_flag else True\n self.dfs_freeze(child)", "def _freeze_tgt_networks(self):\n q1 = zip(self.tgt_q1.parameters(), self.soft_q1.parameters())\n q2 = zip(self.tgt_q2.parameters(), self.soft_q2.parameters())\n\n # Copy parameters\n for target_param, param in q1:\n target_param.data.copy_(param.data)\n for target_param, param in q2:\n target_param.data.copy_(param.data)\n\n # Freeze gradients\n for param in self.tgt_q1.parameters():\n param.requires_grad = False\n for param in self.tgt_q2.parameters():\n param.requires_grad = False" ]
[ "0.8058317", "0.77148205", "0.75488114", "0.7465948", "0.7341107", "0.73213917", "0.71995574", "0.7171278", "0.7010951", "0.69333744", "0.67284983", "0.66955847", "0.66885394", "0.6636516", "0.65704834", "0.6559106", "0.65190375", "0.6514125", "0.6377716", "0.63764316", "0.6345365", "0.62732255", "0.6269841", "0.623551", "0.6156299", "0.6111602", "0.6105829", "0.60959095", "0.6068241", "0.60237736" ]
0.7979869
1
Log all images in a dict as images to TensorBoard.
def log_images(self, image_dict, iterations, step_in_epoch=None, cur_epoch=None, save_to_outputs=True, include_iter=False): if len(image_dict) != 0: thing1 = f'/{step_in_epoch}' if step_in_epoch is not None else "" thing2 = f'@Epoch {cur_epoch}' if cur_epoch is not None else "" self.log_stdout(f'Step {iterations}{thing1}{thing2} Images') for k, v in image_dict.items(): np_image, plt_figure = v if save_to_outputs: if include_iter: img_name = k.replace(':', '_') + f'_{iterations}.png' else: img_name = k.replace(':', '_') + '.png' self.log_image(plt_figure, img_name) k = k.replace(':', '/') # Group in TensorBoard. self.summary_writer.add_image(k, np_image, iterations, dataformats='HWC') self.sync_summary_writer()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def save_images(self, step, images):\n\n # Save\n with self.summary_writer.as_default():\n for name, batch in images.items():\n image = batch[0]\n image = tf.expand_dims(image, axis=0)\n tf.summary.image(name, image, step)", "def write_weights_images(self):\n for weight_name, weight in self._weights.items():\n self._write_weight_image_to_tensorboard(\n name=f\"{self._Sections.WEIGHTS}/{weight_name}\",\n weight=weight,\n step=self._epochs,\n )", "def _process_batch(tensor_dict, sess, batch_index, counters):\n try:\n result_dict = sess.run(tensor_dict)\n counters['success'] += 1\n except tf.errors.InvalidArgumentError:\n logging.info('Skipping image')\n counters['skipped'] += 1\n return {}\n global_step = tf.train.global_step(sess, tf.train.get_global_step())\n eval_util.save_values_matrix(sess, result_dict)\n if batch_index < eval_config.num_visualizations:\n tag = 'image-{}'.format(batch_index)\n eval_util.visualize_detection_results(\n\t sess,\n result_dict,\n tag,\n global_step,\n categories=categories,\n summary_dir=eval_dir,\n export_dir=eval_config.visualization_export_dir,\n show_groundtruth=eval_config.visualization_export_dir)\n# print (result_dict)\n return result_dict", "def log_tensorboard(self, value_dict, step):\n for key, value in value_dict.items():\n summary = tf.Summary(value=[tf.Summary.Value(tag=key, simple_value=value)])\n self.writer.add_summary(summary, step)", "def dump_to_tensorboard(self, log_path: str) -> None:\n LOGGER.info(f'Log evaluations in tensorboard.')\n writer = SummaryWriter(log_dir=log_path)\n for key, value in self.eval_dict:\n writer.add_scalar(key, value)", "def save_test_images(images):\n for description, img in images.items():\n save_to_image(img, description)\n save_to_netcdf(img, description)", "def _add_to_tfrecord(images, labels, tfrecord_writer):\n\n shape = (_IMAGE_SIZE, _IMAGE_SIZE, _NUM_CHANNELS)\n with tf.Graph().as_default():\n image = tf.placeholder(dtype=tf.uint8, shape=shape)\n encoded_png = tf.image.encode_png(image)\n\n with tf.Session('') as sess:\n num_images = len(images)\n for i in range(num_images):\n sys.stdout.write('\\r>> Converting image %d/%d' % (i + 1, num_images))\n sys.stdout.flush()\n\n png_string = sess.run(encoded_png, feed_dict={image: images[i]})\n\n example = dataset_utils.image_to_tfexample(\n png_string, 'png'.encode(), _IMAGE_SIZE, _IMAGE_SIZE, labels[i])\n tfrecord_writer.write(example.SerializeToString())", "def plot_data(self):\n # plot every log image\n for log_img in self.log_img_map.itervalues():\n log_img.plot()", "def visualize(**images):\n n_images = len(images)\n plt.figure(figsize=(20,8))\n for idx, (name, image) in enumerate(images.items()):\n plt.subplot(1, n_images, idx + 1)\n plt.xticks([]); \n plt.yticks([])\n # get title from the parameter names\n plt.title(name.replace('_',' ').title(), fontsize=20)\n plt.imshow(image)\n plt.savefig('sample_gt_pred_2_max.jpeg')\n plt.show()", "def write_images(deployment_key, image_data):\n\n for image_data_dict in image_data:\n\n print \"------------------>>> \" + image_data_dict['longitude']+\" \"+image_data_dict['latitude']\n\n #save the image\n image = Image(deployment_id=deployment_key,\n image_name=image_data_dict['image_name'],\n date_time=image_data_dict['date_time'],\n position=\"SRID=4326;POINT(\"+image_data_dict['longitude']+\" \"+image_data_dict['latitude']+\")\",\n #depth=image_data_dict['depth'],\n #depth_uncertainty=image_data_dict['depth_uncertainty'],\n )\n image.save()\n\n write_measurement(image, 'depth', 'm', image_data_dict['depth'])\n write_measurement(image, 'depth_uncertainty', 'm', image_data_dict['depth_uncertainty'])\n write_measurement(image, 'temperature', 'cel', image_data_dict['temperature'])\n write_measurement(image, 'salinity', 'psu', image_data_dict['salinity'])\n write_measurement(image, 'pitch', 'rad', image_data_dict['pitch'])\n write_measurement(image, 'roll', 'rad', image_data_dict['roll'])\n write_measurement(image, 'yaw', 'rad', image_data_dict['yaw'])\n write_measurement(image, 'altitude', 'm', image_data_dict['altitude'])\n\n #link the camera to the image\n camera_data_dict = read_camera_data(image_data_dict)\n camera = Camera(**camera_data_dict)\n camera.image = image\n camera.save()\n\n return None", "def save_processed_images(exp_dir, img_dict):\n # save them into a directory called \"processed\"\n img_fname = os.path.join(exp_dir, str(experiment) + '_processed.jpg')", "def tensorboard_log(log_dir, tag, data):\n # Create a file writer for TensorBoard logs\n file_writer = tf.summary.create_file_writer(log_dir)\n file_writer.set_as_default()\n\n # Send to TensorBoard both results\n for i in range(len(data)):\n tf.summary.scalar(tag, data=data[i], step=i)\n file_writer.flush()", "def view_image(train_dataloader):\n for (x, target) in train_dataloader:\n np.save(\"img.npy\", x)\n print(x.shape)\n exit(0)", "def tb_log_sample_images(\n self,\n images: Dict[str, torch.Tensor],\n z_plane: Optional[int] = None,\n group: str = 'sample'\n ) -> None:\n\n out_batch = images['out']\n if not self.model_has_softmax_outputs:\n out_batch = out_batch.softmax(1) # Apply softmax before plotting\n\n batch2img = self._get_batch2img_function(out_batch, z_plane)\n\n inp = batch2img(images['inp'])[0]\n target_batch_with_c = images['target'][:, None]\n target = batch2img(target_batch_with_c)[0]\n\n out = batch2img(out_batch)\n pred = out.argmax(0)\n self.tb.log_image(f'{group}/inp', inp, step=self.step, cmap='gray')\n self.tb.log_image(f'{group}/target', target, step=self.step, num_classes=self.num_classes)\n\n for c in range(out.shape[0]):\n self.tb.log_image(f'{group}/c{c}', out[c], step=self.step, cmap='gray')\n self.tb.log_image(f'{group}/pred', pred, step=self.step, num_classes=self.num_classes)\n\n inp01 = squash01(inp) # Squash to [0, 1] range for label2rgb and plotting\n target_ov = label2rgb(target, inp01, bg_label=0, alpha=self.overlay_alpha)\n pred_ov = label2rgb(pred, inp01, bg_label=0, alpha=self.overlay_alpha)\n self.tb.log_image(f'{group}/target_overlay', target_ov, step=self.step, colorbar=False)\n self.tb.log_image(f'{group}/pred_overlay', pred_ov, step=self.step, colorbar=False)\n # TODO: Synchronize overlay colors with pred- and target colors\n # TODO: What's up with the colorbar in overlay plots?\n # TODO: When plotting overlay images, they appear darker than they should.\n # This normalization issue gets worse with higher alpha values\n # (i.e. with more contribution of the overlayed label map).\n # Don't know how to fix this currently.", "def log_images(self, images, num_images, epoch, n_batch, num_batches,\n format='NCHW', normalize=True, title=None):\n\n if type(images) == np.ndarray:\n\n if len(images.shape) == 3:\n images = torch.from_numpy(images).permute(2, 0, 1)\n else:\n images = torch.from_numpy(images)\n\n if format == 'NHWC':\n images = images.transpose(1, 3)\n\n step = Recorder.step(epoch, n_batch, num_batches)\n img_name = '{}/images: *{}*'.format(self.comment, title)\n\n # Make horizontal grid from image tensor\n horizontal_grid = vutils.make_grid(images, normalize=normalize, scale_each=True)\n # Make vertical grid from image tensor\n nrows = int(np.sqrt(num_images))\n grid = vutils.make_grid(images, nrow=nrows, normalize=True, scale_each=True)\n\n # Add horizontal images to tensorboard\n self.writer.add_image(img_name, horizontal_grid, step)\n\n # Save plots\n self.save_torch_images(horizontal_grid, grid, epoch, n_batch)", "def do_stage(self, images):\n\n for i, image in enumerate(images):\n pass\n # logging_tags = logs.image_config_to_tags(image, self.group_by_keywords)", "def save_images(images, filenames, output_dir):\n for i, filename in enumerate(filenames):\n # Images for inception classifier are normalized to be in [-1, 1] interval,\n # so rescale them back to [0, 1].\n with tf.gfile.Open(os.path.join(output_dir, filename), 'w') as f:\n img = (((images[i, :, :, :] + 1.0) * 0.5) * 255.0).astype(np.uint8)\n Image.fromarray(img).save(f, format='PNG')", "def save_images(images, filenames, output_dir):\n for i, filename in enumerate(filenames):\n # Images for inception classifier are normalized to be in [-1, 1] interval,\n # so rescale them back to [0, 1].\n with tf.gfile.Open(os.path.join(output_dir, filename), 'w') as f:\n img = np.round(((images[i] + 1.0) * 0.5) * 255.0).astype(np.uint8)\n Image.fromarray(img).save(f, format='PNG')", "def setImages( self, event_key, images ):\n print \"event index\",event_key[0]\n self.run = event_key[1]\n self.subrun = event_key[2]\n self.event_num = event_key[3]\n print self.run,self.subrun,self.event_num\n self.images = images\n #print self.images.img_v\n #for img in self.images.img_v:\n # print img.shape\n self.labeltools.setImage( event_key[0], self.images )", "def logging_summaries(\n summary_writer: tf.contrib.summary.SummaryWriter, logged: Dict\n) -> None:\n\n with summary_writer.as_default(), tf.contrib.summary.always_record_summaries():\n tf.contrib.summary.image(\"generated\", logged[\"generated_data\"])\n tf.contrib.summary.image(\"real\", logged[\"real_data\"])\n tf.contrib.summary.scalar(\"generator/loss\", logged[\"gen_loss\"])\n tf.contrib.summary.scalar(\"discriminator/loss\", logged[\"disc_loss\"])", "def log_tensorboard(self, callback, names, logs, batch_no):\n\n for name, value in zip(names, logs):\n summary = tf.Summary()\n summary_value = summary.value.add()\n summary_value.simple_value = value\n summary_value.tag = name\n callback.writer.add_summary(summary, batch_no)\n callback.writer.flush()", "def log_rgb(key, prefix, batch, i=0):\n rgb = batch[key] if is_dict(batch) else batch\n return prep_image(prefix, key,\n rgb[i])", "def convert_predictions_to_image_summaries(hook_args):\n decode_hparams = hook_args.decode_hparams\n if not decode_hparams.display_decoded_images:\n return []\n predictions = hook_args.predictions[0]\n\n # Display ten random inputs and outputs so that tensorboard does not hang.\n all_summaries = []\n rand_predictions = np.random.choice(predictions, size=10)\n for ind, prediction in enumerate(rand_predictions):\n output_summary = image_to_tf_summary_value(\n prediction[\"outputs\"], tag=\"%d_output\" % ind)\n input_summary = image_to_tf_summary_value(\n prediction[\"inputs\"], tag=\"%d_input\" % ind)\n all_summaries.append(input_summary)\n all_summaries.append(output_summary)\n return all_summaries", "def print_images_in_statistics(self):\n self._print_images_statistics(self._images_in_folder, self._pose_class_names)", "def pictures_to_tfrecord(filename, data_folder, nshards=1): \n from PIL import Image\n\n fnames = glob.glob(os.path.join(data_folder, '*.jpg'))\n npics = len(fnames)\n shard_width = int(npics/nshards)+1\n\n for i_shard in range(nshards):\n with tf.python_io.TFRecordWriter(filename+str(i_shard)+'.tfrecord') as _w:\n for ipic in range(i_shard*shard_width,(i_shard+1)*shard_width): \n if ipic%2000==0:\n print('{} iteration'.format(ipic), flush=True)\n if ipic>=npics:\n break\n with Image.open(fnames[ipic]) as im:\n im = im.resize((109,89), Image.ANTIALIAS)\n im = np.array(im).astype(np.float32)\n im = np.transpose(im, (2,0,1))\n Example = tf.train.Example(features=tf.train.Features(feature={\n #'pic': _float_feature(im.flatten().tolist()),\n 'pic': _bytes_feature(im.tostring()),\n }))\n _w.write(Example.SerializeToString())\n quit()", "def save_images(images, filenames, output_dir):\n for i, filename in enumerate(filenames):\n # Images for inception classifier are normalized to be in [-1, 1] interval,\n # so rescale them back to [0, 1].\n with tf.gfile.Open(os.path.join(output_dir, filename), 'w') as f:\n imsave(f, (images[i, :, :, :] + 1.0) * 0.5, format='png')", "def save_images(images, filenames, output_dir):\n for i, filename in enumerate(filenames):\n # Images for inception classifier are normalized to be in [-1, 1] interval,\n # so rescale them back to [0, 1].\n with tf.gfile.Open(os.path.join(output_dir, filename), 'w') as f:\n imsave(f, (images[i, :, :, :] + 1.0) * 0.5, format='png')", "def log_all_info(module: LightningModule, img: Tensor, target: Tensor, logist: Tensor, batch_idx: int,\n title: str, dice_score: float, threshold: float) -> None:\n brainSlice = BrainSlices(module, img, target, logist, threshold)\n brainSlice.log(batch_idx, title, dice_score)\n\n if not os.path.exists('./mp4'):\n os.mkdir('./mp4')\n\n brainSlice.animate_masks(fig_title=f\"epoch: {module.current_epoch}, batch: {batch_idx}, dice_score: {dice_score}\",\n outfile=Path(\n f\"./mp4/epoch={module.current_epoch}_batch={batch_idx}_dice_score={dice_score}.mp4\"))\n log_weights(module)", "def log_images(self, func, mode, batch, output,\n args, dataset, world_size, config):\n dataset_idx = 0 if len(args) == 1 else args[1]\n prefix = prepare_dataset_prefix(config, dataset_idx)\n interval = len(dataset[dataset_idx]) // world_size // config.num_logs\n if args[0] % interval == 0:\n prefix_idx = '{}-{}-{}'.format(mode, prefix, batch['idx'][0].item())\n func(prefix_idx, batch, output)", "def image_summary(self, tag, images, step):\n\n for i, img in enumerate(images):\n # Write the image to a string\n try:\n s = StringIO()\n except:\n s = BytesIO()\n scipy.misc.toimage(img).save(s, format=\"png\")\n\n # Create an Image object as a Summary value\n with self.writer.as_default():\n tf.summary.image(name='%s/%d' % (tag, i), data=s.getvalue(), step=step)\n\n # Create and write Summary\n self.writer.flush()" ]
[ "0.6842873", "0.62515694", "0.62474054", "0.6194678", "0.60632837", "0.59421414", "0.5922582", "0.5918516", "0.5903152", "0.58775586", "0.5871887", "0.58621484", "0.5861817", "0.5815416", "0.5804208", "0.57724893", "0.5751607", "0.57250875", "0.57229185", "0.57109416", "0.57008874", "0.5686224", "0.5672163", "0.5671085", "0.5662417", "0.5650151", "0.5632572", "0.5628357", "0.5616676", "0.56023514" ]
0.7405011
0
Log all text in a dict to TensorBoard.
def log_text(self, text_dict, iterations, step_in_epoch=None, cur_epoch=None, print_to_stdout=True): if len(text_dict) != 0: thing1 = f'/{step_in_epoch}' if step_in_epoch is not None else "" thing2 = f'@Epoch {cur_epoch}' if cur_epoch is not None else "" self.log_stdout(f'Step {iterations}{thing1}{thing2} Texts') for k, v in text_dict.items(): if print_to_stdout: temp_k = k.replace(':', '/') self.log_stdout(f'\t[{temp_k}: {v}]') k = k.replace(':', '/') # Group in TensorBoard. self.summary_writer.add_text(k, v, iterations) self.sync_summary_writer()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def write_tensorboard_text(self, key: str, input_dict: Dict[str, Any]) -> None:\n try:\n with tf.Session() as sess:\n s_op = tf.summary.text(\n key,\n tf.convert_to_tensor(\n ([[str(x), str(input_dict[x])] for x in input_dict])\n ),\n )\n s = sess.run(s_op)\n self.summary_writer.add_summary(s, self.get_step)\n except Exception:\n LOGGER.info(\n \"Cannot write text summary for Tensorboard. Tensorflow version must be r1.2 or above.\"\n )\n pass", "def log_tensorboard(self, value_dict, step):\n for key, value in value_dict.items():\n summary = tf.Summary(value=[tf.Summary.Value(tag=key, simple_value=value)])\n self.writer.add_summary(summary, step)", "def dump_to_tensorboard(self, log_path: str) -> None:\n LOGGER.info(f'Log evaluations in tensorboard.')\n writer = SummaryWriter(log_dir=log_path)\n for key, value in self.eval_dict:\n writer.add_scalar(key, value)", "def log_tensorboard(self, callback, names, logs, batch_no):\n\n for name, value in zip(names, logs):\n summary = tf.Summary()\n summary_value = summary.value.add()\n summary_value.simple_value = value\n summary_value.tag = name\n callback.writer.add_summary(summary, batch_no)\n callback.writer.flush()", "def tbx_logger(self, log_dict, training_i):\n for tag, value in log_dict.items():\n self.tbx_writer.add_scalar(tag, value, training_i)", "def _write_text_to_tensorboard(self, tag: str, text: str, step: int):\n pass", "def tensorboard_log(log_dir, tag, data):\n # Create a file writer for TensorBoard logs\n file_writer = tf.summary.create_file_writer(log_dir)\n file_writer.set_as_default()\n\n # Send to TensorBoard both results\n for i in range(len(data)):\n tf.summary.scalar(tag, data=data[i], step=i)\n file_writer.flush()", "def _write_args_to_tensorboard():\r\n args = get_args()\r\n writer = get_tensorboard_writer()\r\n if writer:\r\n for arg in vars(args):\r\n writer.add_text(arg, str(getattr(args, arg)))", "def log_trainable_variables(self):\n var_names = list(self.trainable_variables.keys())\n self.logger.log_trainable_variables(var_names)", "def args_to_tensorboard(writer, args):\n\n txt = \"\"\n for arg in vars(args):\n txt += arg + \": \" + str(getattr(args, arg)) + \"<br/>\"\n\n writer.add_text('command_line_parameters', txt, 0)", "def log(data):\n items = []\n for key, value in data.items():\n if value is None:\n items.append('[{}]'.format(key))\n else:\n items.append('[{} {}]'.format(key, value))\n print(' '.join(items))", "def to_tensorboard_tf(self, **kwargs) -> 'PlotLosses':\n self.outputs.append(outputs.TensorboardTFLogger(**kwargs))\n return self", "def _log_scalars(self, scalar_dict, print_to_stdout=True):\n for k, v in scalar_dict.items():\n if print_to_stdout:\n self.write('[{}: {:.3g}]'.format(k, v))\n k = k.replace('_', '/') # Group in TensorBoard by phase\n self.summary_writer.add_scalar(k, v, self.global_step)", "def log(self, key: str, val: Any, iteration: int = None) -> None:\n assert key is not None and val is not None, \"Please set key and val\"\n\n if self._tb_writer is not None:\n assert (\n iteration is not None\n ), \"Must specify iteration when logging to tensorboard\"\n self._tb_writer.add_scalar(key, val, iteration)\n if self._tqdm_bar is not None:\n # update tqdm bar\n self._tqdm_data[key] = val\n self._tqdm_bar.set_postfix(self._tqdm_data, refresh=True)", "def log_dialogue_input(log_dict, dynamodb_table):\n if not isinstance(log_dict, dict):\n raise NameError(f\"Logging information must be dictionary, not type {type(log_dict)}\")\n\n # Log in PST\n log_dict[\"time\"] = pst()\n if dynamodb_table is not None:\n try:\n dynamodb_table.put_item(Item=log_dict)\n app.logger.info(\"DB write successful\")\n except Exception as e:\n app.logger.info(f\"Could not write to database: {e}\")\n # If no db is specified, write logs to info\n app.logger.info(log_dict)", "def send_log():\n log.info(f\"UUID={UUID}\")\n log.info(f\"SPLIT={SPLIT}\")\n log.info(f\"BATCH_SIZE={BATCH_SIZE}\")\n log.info(f\"EPOCHS={EPOCHS}\")\n log.info(f\"PATIENCE={PATIENCE}\")\n log.info(f\"X_FREQ={X_FREQ}\")\n log.info(f\"LOOK_BACK={LOOK_BACK}\")\n log.info(f\"LOOK_AHEAD={LOOK_AHEAD}\")\n log.info(f\"KERNEL_SIZE={KERNEL_SIZE}\")\n log.info(f\"FILTERS={FILTERS}\")\n log.info(f\"L1L2={L1L2}\")\n log.info(f\"D1={D1}\")\n log.info(f\"D2={D2}\")\n log.info(f\"DOUT={DOUT}\")\n log.info(f\"PLOT={PLOT}\")\n log.info(f\"SHUFFLE={SHUFFLE}\")", "def to_tensorboard(self, **kwargs) -> 'PlotLosses':\n self.outputs.append(outputs.TensorboardLogger(**kwargs))\n return self", "def save(net, txt_dict, path):\n dict_m = net.state_dict()\n _ = txt_dict\n torch.save(dict_m,path)", "def log_dict(dict_to_print: dict, message: str = ''):\n for k1, v1 in dict_to_print.items():\n log.info(f'{message} {k1}: {v1} ')\n print(f'{message} {k1}: {v1} ')", "def logging_summaries(\n summary_writer: tf.contrib.summary.SummaryWriter, logged: Dict\n) -> None:\n\n with summary_writer.as_default(), tf.contrib.summary.always_record_summaries():\n tf.contrib.summary.image(\"generated\", logged[\"generated_data\"])\n tf.contrib.summary.image(\"real\", logged[\"real_data\"])\n tf.contrib.summary.scalar(\"generator/loss\", logged[\"gen_loss\"])\n tf.contrib.summary.scalar(\"discriminator/loss\", logged[\"disc_loss\"])", "def log(self, text, stdout=False, indent=''):\n\n if stdout:\n print(text)\n\n for line in text.split(\"\\n\"):\n while len(self._log) > self.log_lines:\n self._log.pop(0)\n self._log.append(urwid.Text(indent + line))\n\n self._log.set_focus(len(self._log)-1)\n\n #if self._mainloop is not None:\n #self._mainloop.draw_screen()", "def build_tensorboard(self):\n self.logger = Logger(self.log_dir)", "def _create_tensor_board(self):\n self.log_writer = tf.summary.FileWriter(\"logs/%s\" % self.model_dir, self.sess.graph)", "async def log_stats(self, stats_dict):\n\n cmd = \"PRAGMA table_info(trainer_stats)\"\n cur = self.sql.cur\n data = cur.execute(cmd).fetchall()\n valid_keys = []\n for entry in data:\n valid_keys.append(entry['name'])\n self.log.info(valid_keys)\n\n for key in stats_dict:\n if key not in valid_keys:\n raise ValueError()\n trainer_id = self.trainer_id\n for key in stats_dict:\n value = stats_dict[key]\n cmd = f\"\"\"UPDATE trainer_stats\n SET {key} = {key} + :value\n WHERE trainer_id = :trainer_id\"\"\"\n cur.execute(cmd, locals())\n await self.sql.commit(now=True)\n self.log.info(\"log completed\")", "def write(self, data: dict):\n self.logger.info(\"\\t\".join(str(x) for x in data.values()))", "def save(net,dic,path):\n dict_m = net.state_dict()\n dict_m[\"word_dic\"] = dic \n torch.save(dict_m,path)", "def on_log(self):\n monitors = self.monitors\n if self.monitors is None:\n monitors = self.trainer.metrics.keys()\n\n\n hparams = self.hparams\n if self.hparams is None:\n hparams = self.trainer.hparams.keys()\n\n metrics = {name: format_metric(self.trainer.metrics[name])\n for name in monitors\n if name in self.trainer.metrics}\n hparams = {name: format_metric(self.trainer.hparams[name])\n for name in hparams\n if name in self.trainer.hparams}\n\n\n step_bar = self.step_bars[-1]\n step_bar.set_description(\"Epoch {}\".format(self.trainer.epoch+1))\n step_bar.set_postfix(**metrics, **hparams)\n step_bar.update(self.trainer.steps_trained - self.last_step)\n self.last_step = self.trainer.steps_trained", "def report(LOGDIR, epoch, e_dict, saver, sess, fh_log):\n # print loss\n print (\"Epoch: %i; Loss: %f; KLd: %f; CE %f\" % (epoch, e_dict[\"loss\"][-1], e_dict[\"KLd\"][-1], e_dict[\"CE\"][-1]))\n fh_log.write(\"%i\\t%0.5e\\t%0.5e\\t%0.5e\\n\" % (epoch, e_dict[\"loss\"][-1], e_dict[\"KLd\"][-1], e_dict[\"CE\"][-1]))", "def save(net,dic,path):\n dict_m = net.state_dict()\n dict_m[\"word_dic\"] = dic\n torch.save(dict_m,path)", "def _save(self, data: MetricsDict) -> None:\n client = MlflowClient()\n try:\n run_id = self.run_id\n except DataSetError:\n # If run_id can't be found log_metric would create new run.\n run_id = None\n\n log_metric = (\n partial(client.log_metric, run_id)\n if run_id is not None\n else mlflow.log_metric\n )\n metrics = (\n self._build_args_list_from_metric_item(k, v) for k, v in data.items()\n )\n\n if self._logging_activated:\n for k, v, i in chain.from_iterable(metrics):\n log_metric(k, v, step=i)" ]
[ "0.7434491", "0.67299134", "0.6394467", "0.63222766", "0.61781824", "0.6059231", "0.5878002", "0.5674671", "0.56471145", "0.54971224", "0.5484033", "0.54648966", "0.54230785", "0.54153043", "0.54076815", "0.53755504", "0.5346531", "0.5332499", "0.5329605", "0.5307764", "0.52799976", "0.5272244", "0.52719146", "0.52318394", "0.52312464", "0.5226292", "0.5202436", "0.51970327", "0.51814806", "0.51569086" ]
0.69078577
1
Tests the packet `id`.
def test_id(): assert Packet40.id == 40
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_id():\n assert Packet106.id == 106", "def test_id():\n assert Packet20.id == 20", "def check_id(self, id):", "def test_pnc(id):\n if id < int('0x10ffffff', 16): # 285212671\n return 'player'\n elif id < int('0x40ffffff', 16): # 1090519039\n return 'creature'\n elif id < int('0x80000fff', 16): # 2147487743\n return 'npc'\n else:\n print 'creature.Id error', id\n return None", "def test_id():\r\n cmd = ShdlcCmdGetErrorState(clear=False)\r\n assert type(cmd.id) is int\r\n assert cmd.id == 0xD2", "def checkValidId(self, id, prep_id = False):\n new_id = unquote(id)\n if prep_id: new_id = self.prepId(id)\n try:\n globalCheckValidId(self, new_id)\n return True\n except Exception:\n return str(sys.exc_info()[1])", "def match_id(self, id):\n btest = re.compile(id, re.IGNORECASE)\n return 'ID' in self and btest.search(self['ID']) != None", "def test_process_packet_message(self):\n\n pkt = {'type': 'message',\n 'endpoint': '',\n 'data': 'woot'}\n data = self.ns.process_packet(pkt)\n self.assertEqual(data, pkt['data'])\n assert not self.environ['socketio'].error.called\n\n # processing a message packet with id and endpoint\n pkt = {'type': 'message',\n 'id': 5,\n 'ack': True,\n 'endpoint': '/tobi',\n 'data': ''}\n data = self.ns.process_packet(pkt)\n self.assertEqual(data, pkt['data'])\n assert not self.environ['socketio'].error.called", "def packetCheck(packet):\n info = [packet[i : i + 2] for i in range(0, len(packet), 2)]\n MagicNo = int.from_bytes(info[0], \"big\")\n PacketType = int.from_bytes(info[1], \"big\")\n RequestType = int.from_bytes(info[2], \"big\")\n if MagicNo != 0x497E:\n return False\n if PacketType != 0x0001:\n return False\n if RequestType != 0x0001 and RequestType != 0x0002:\n return False\n return True", "def test_write(self, packet_id, value):\n\t\tpacket = struct.pack('>B %s' % type_lookup[type(value)], data_id, value)\n\t\tprint('%s Testpacket: 0x%s' % (self.name, packet.hex().upper()))\n\t\tself.data_source.send(packet)", "def process_packet(self, packet, udp_dport=UDP_INT_DST_PORT):\n logger.info('INT Packet data - [%s]', extract_int_data(packet[Ether]))\n return False", "def identify_id(id: str) -> bool:\n return validate_handle(id)", "def test_id():\n with expected_protocol(\n DCXS,\n [(\"?\", \"DCXS750-4\"), ],\n ) as inst:\n assert inst.id == \"DCXS750-4\"", "def isValid(t_id):\n\tstr_id=str(t_id).strip()\n\treturn str_id.isdigit()", "def handle_packet(self, srcif, packet) -> bool:\n typeOfPacket = packet[\"type\"]\n if typeOfPacket == DATA:\n return self.forward(srcif, packet)\n elif typeOfPacket == DUMP:\n return self.dump(packet)\n elif typeOfPacket == UPDT:\n return self.update(srcif, packet)\n elif typeOfPacket == RVKE:\n return self.revoke(packet)\n else:\n return False", "def packet_check(argument, lineno):\n \n if argument not in symbol_table.keys() or symbol_table[argument] != 'PACKET':\n print_error(\"\\tError : undefined packet '\"+str(argument)+\"'\", str(lineno))", "def test_process_packet_event(self):\n pkt = {'type': 'event',\n 'name': 'woot',\n 'endpoint': '',\n 'args': []}\n self.ns.process_packet(pkt)\n assert not self.environ['socketio'].error.called\n\n # processing an event packet with message id and ack\n pkt = {'type': 'event',\n 'id': 1,\n 'ack': 'data',\n 'name': 'tobi',\n 'endpoint': '',\n 'args': []}\n self.ns.process_packet(pkt)\n assert not self.environ['socketio'].error.called", "def test_tamper_ip_ident(logger):\n\n packet = layers.packet.Packet(IP(src='127.0.0.1', dst='127.0.0.1')/TCP(sport=2222, dport=3333, seq=100, ack=100, flags=\"S\"))\n original = copy.deepcopy(packet)\n tamper = actions.tamper.TamperAction(None, field='id', tamper_type='replace', tamper_value=3333, tamper_proto=\"IP\")\n lpacket, rpacket = tamper.run(packet, logger)\n assert not rpacket, \"Tamper must not return right child\"\n assert lpacket, \"Tamper must give a left child\"\n assert id(lpacket) == id(packet), \"Tamper must edit in place\"\n\n # Confirm tamper replaced the field it was supposed to\n assert packet[IP].id == 3333, \"Tamper did not replace flags.\"\n\n # Confirm tamper didn't corrupt anything in the TCP header\n assert confirm_unchanged(packet, original, TCP, [])\n\n # Confirm tamper didn't corrupt anything else in the IP header\n assert confirm_unchanged(packet, original, IP, [\"id\"])", "def test_routerid(self):\n self.assertTrue(\n self.ospf.parse_state(\n pattern='routerid',\n cmd_key='sh_ospf_ints') == '192.168.45.1', 'OSPF Interface: router ID not found')", "def check_host_connectivity_by_id(self, src_id, dst_id):\n src_host, src_ip, _, src_vlan, src_bond, _ = self.host_information[src_id].values()\n dst_host, dst_ip, _, dst_vlan, dst_bond, _ = self.host_information[dst_id].values()\n connectivity = src_vlan == dst_vlan or self.is_routed_vlans(src_vlan, dst_vlan)\n if self.is_routed_vlans(src_vlan, dst_vlan):\n src_vip = self.faucet_vips[src_vlan]\n dst_vip = self.faucet_vips[dst_vlan]\n self.host_ping(src_host, src_vip.ip, src_bond) # pytype: disable=attribute-error\n self.host_ping(dst_host, dst_vip.ip, dst_bond) # pytype: disable=attribute-error\n if connectivity:\n self.host_ping(src_host, dst_ip.ip, src_bond) # pytype: disable=attribute-error\n self.host_ping(dst_host, src_ip.ip, dst_bond) # pytype: disable=attribute-error", "def test_data_type_id(self):\n self.assertTrue(self.tester.data_type(ret_id=True), 2)", "def _do_some_logic(self, packet):\n\n\n pass", "def test_id(self):\n node = Node()\n node.id = \"1234\"\n self.assertEqual(node.getId(), node.id)", "def spoof_packet(packet):", "def check_packet(self, header, string):\n\n string = string[0:11] + string[75:]\n gen_chksum = hashlib.sha256(string.encode()).hexdigest()\n try:\n if header[\"checksum\"] == gen_chksum:\n return True\n else:\n return False\n except KeyError:\n return False", "def _check_request_id(\n self,\n message: W24TechreadMessage\n ) -> None:\n self.assertEqual(type(message.request_id), UUID)", "def handle_packet(self, packet, ip_proto=None):\n logger.info('Packet data - [%s]', packet.summary())\n return False", "def test_check_query_response(self, node_id):\n\n print('\\n### Testing query node status RESPONSE ###')\n print('Remember that node_id must be the same 3 characters string that in test_query_node_id(node_id)')\n\n received_bytes = self.serport.readline()\n if received_bytes == b'E\\r\\n':\n print(\"You received Error Msg!\")\n print(f'Did not receive correct query status response from node {node_id}')\n print(f'Query again the node {node_id} if required')\n return False\n\n elif (len(received_bytes) == 13) and (received_bytes[0:8] == b'#B' + node_id.encode() + b'06V'):\n supply_voltage = received_bytes.decode()[8:13]\n print(f\"supply_voltage of {node_id} is {supply_voltage}\")\n print(\"response from the remote node SUCCESS\")\n return True\n else:\n print(f'Did not receive correct query status response from node {node_id}')\n print(f'Query again the node {node_id} if required')\n return False", "def check_id(id):\n id = id.strip()\n \n if id and id.isdigit(): # id must only be a number\n return id\n else:\n return None", "def verify_idcode(device, idcode, idcode_opcode):\n idcode_read = read_idcode_opcode(device, idcode_opcode)\n for i in range(len(idcode)):\n if idcode[i] == \"X\":\n continue # ignore 'don't cares'\n elif idcode_read[i] != idcode[i]:\n print(\"IDCODE read does not match real IDCODE from BSDL file\")\n print(f\"\\tidcode_read[{i}]: {idcode_read[i]}, idcode[{i}]: {idcode[i]}\")\n return False\n\n return True" ]
[ "0.78103393", "0.7715128", "0.7001969", "0.6294706", "0.6101701", "0.5967647", "0.5964849", "0.5934389", "0.5914396", "0.59053135", "0.5801665", "0.579754", "0.5779012", "0.57742375", "0.5641701", "0.562879", "0.5582393", "0.55690616", "0.55153644", "0.55010813", "0.547853", "0.5454692", "0.54278004", "0.5405974", "0.5388852", "0.5369423", "0.5369117", "0.53512913", "0.5344583", "0.5339192" ]
0.78331536
0
Tests the packet `size`.
def test_size(): assert Packet40.size == 2
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_size():\n assert Packet106.size == 12", "def test_size():\n assert Packet20.size == 2", "def checkPacketLength(self):\n return self.packetLength == len(self) - PRIMARY_HEADER_BYTE_SIZE - 1", "def test_invalid_packet_size(self):\n p = (\n Ether(dst=self.src_if.local_mac, src=self.src_if.remote_mac)\n / IPv6(src=self.src_if.remote_ip6, dst=self.src_if.local_ip6)\n / UDP(sport=1234, dport=5678)\n / Raw()\n )\n self.extend_packet(p, 1000, self.padding)\n fragments = fragment_rfc8200(p, 1, 500)\n bad_fragment = fragments[1]\n bad_fragment[IPv6ExtHdrFragment].offset = 65500\n self.pg_enable_capture()\n self.src_if.add_stream([bad_fragment])\n self.pg_start()\n pkts = self.src_if.get_capture(expected_count=1)\n icmp = pkts[0]\n self.assertIn(ICMPv6ParamProblem, icmp)\n self.assert_equal(icmp[ICMPv6ParamProblem].code, 0, \"ICMP code\")", "def received_is_of_size(size):\n received = {}\n # Check that payout was generated and pool_size_query answered\n for handled_transfer in echo_node.seen_transfers:\n event = get_echoed_transfer(handled_transfer)\n if not event:\n continue\n received[event.identifier] = event\n if len(received) == size:\n return received", "def _is_frame_legal_size(data: bytes) -> bool:\n return len(data) < UDP_MAX_SIZE", "def _is_size_key (self, key):\n return key == '$size' or key == 'size'", "def checkSize(rsakey, messageSize):\n keySize = key.getKeyLength(rsakey)\n if(keySize >= messageSize):\n return True\n if(keySize < messageSize):\n return False", "def test_isc_server_stmt_edns_udp_size_failing(self):\n test_string = [\n 'edns-udp-size yes;',\n 'edns-udp-size -3;',\n ]\n result = optviewserver_stmt_edns_udp_size.runTests(test_string, failureTests=True)\n self.assertTrue(result[0])", "def test_isc_server_stmt_edns_udp_size_passing(self):\n test_string = [\n 'edns-udp-size 0;',\n 'edns-udp-size 1;',\n 'edns-udp-size 102;',\n 'edns-udp-size 255;',\n ]\n result = optviewserver_stmt_edns_udp_size.runTests(test_string, failureTests=False)\n self.assertTrue(result[0])", "def verify_size_content(self, re_size):\n to_alternate = 0\n if re_size['chunck'] < re_size['size']:\n to_alternate = re_size['chunck']\n re_size['chunck'] = re_size['size']\n re_size['size'] = to_alternate\n return re_size", "def _check_size(size):\r\n\r\n if not isinstance(size, (list, tuple)):\r\n raise ValueError(\"Size must be a tuple\")\r\n if len(size) != 2:\r\n raise ValueError(\"Size must be a tuple of length 2\")\r\n if size[0] < 0 or size[1] < 0:\r\n raise ValueError(\"Width and height must be >= 0\")\r\n\r\n return True", "def CheckForSize(collection, expected_size, equal_flag, unequal_flag,\n unexpectedly_empty_flag=None):\n\n if len(collection) == expected_size:\n return equal_flag\n elif collection or unexpectedly_empty_flag is None:\n return unequal_flag\n else:\n return unexpectedly_empty_flag", "def packetCheck(packet):\n info = [packet[i : i + 2] for i in range(0, len(packet), 2)]\n MagicNo = int.from_bytes(info[0], \"big\")\n PacketType = int.from_bytes(info[1], \"big\")\n RequestType = int.from_bytes(info[2], \"big\")\n if MagicNo != 0x497E:\n return False\n if PacketType != 0x0001:\n return False\n if RequestType != 0x0001 and RequestType != 0x0002:\n return False\n return True", "def test_size_returns_length(dq_3):\n assert dq_3.size() == 3", "def correct_size():\n check50.run(\"./inheritance_test\").stdout(\"size_true.*\").exit(0)", "def check_resize_size(size):\n if isinstance(size, int):\n check_value(size, (1, FLOAT_MAX_INTEGER))\n elif isinstance(size, (tuple, list)) and len(size) == 2:\n for i, value in enumerate(size):\n check_value(value, (1, INT32_MAX), \"size at dim {0}\".format(i))\n else:\n raise TypeError(\"Size should be a single integer or a list/tuple (h, w) of length 2.\")", "def check_pack_sizes():\n conn = sqlite3.connect(DBNAME)\n c = conn.cursor()\n for row in c.execute(\"SELECT lower(hex(sum)), size FROM packs\"):\n checksum, size = row\n resp = s3.head_object(Bucket=BUCKET, Key=f\"{checksum}.pack\")\n length = resp[\"ContentLength\"]\n if length != size:\n raise ValueError(f\"pack {checksum}: expected size {size} but actual size is {length}\")", "def is_char(self, size=None):\n return False", "def check_crop_size(size):\n type_check(size, (int, list, tuple), \"size\")\n if isinstance(size, int):\n check_value(size, (1, FLOAT_MAX_INTEGER))\n elif isinstance(size, (tuple, list)) and len(size) == 2:\n for value in size:\n check_value(value, (1, FLOAT_MAX_INTEGER))\n else:\n raise TypeError(\"Size should be a single integer or a list/tuple (h, w) of length 2.\")", "def testSize(self):\n v1 = Vector(1, 2, 3, size=6)\n assert v1 == [1, 2, 3, 0, 0, 0]\n failed = False\n try:\n Vector(1, 2, 3, size=2)\n except IndexError:\n failed = True\n assert failed\n\n v3 = Vector(size=7)\n assert v3 == Vector(0, 0, 0, 0, 0, 0, 0)\n assert v3 == (0, 0, 0, 0, 0, 0, 0)", "def isElementSize(self, timeout=20.0, commandId=None):\n TestAdapterLib.check_timeout(caller=TestAdapterLib.caller(), timeout=timeout)\n \n return self.isActionAccepted(timeout=timeout, commandName=Command.GET_ELEMENT_SIZE, \n commandId=commandId)", "def is_int(self, size=None):\n return False", "def testsize(self):\n for size in range(5):\n a = AmuletAbility('Skepticism', size=size+1)\n self.assert_(str(size+1) in str(a))\n self.assertEqual(a.size, size+1)\n self.assertTrue(isinstance(a.AC, int))\n self.assertTrue(isinstance(a.description(), str))", "def testSize (self):\r\n \r\n perpixel = bytes_per_pixel [self.bih_vals [bih_BitCount]]\r\n width = self.bih_vals [bih_Width]\r\n height = self.bih_vals [bih_Height]\r\n expected = self.bih_vals [bih_SizeImage]\r\n\r\n # Rows always have multiples of 4 bytes\r\n \r\n padding = 3 - ((perpixel * width + 3) % 4)\r\n size = (width * perpixel + padding) * height\r\n\r\n if not size == expected:\r\n print \"Calculated size = %d (<> %d)\" % (size, expected)\r\n print \"***** File size error *****\"", "def recv_size(s, size):\n print 'Receive data in fixed size mode'\n reply = s.recv(size)\n print reply", "def _assert_same_size(outputs: TensorStruct, output_size: OutputSize):\n flat_output_size = nest.flatten(output_size)\n flat_output = nest.flatten(outputs)\n for output, size in zip(flat_output, flat_output_size):\n if isinstance(size, torch.Size):\n if output[0].size() != size:\n raise ValueError('The output size does not matchthe required output_size')\n elif output[0].size()[-1] != size:\n raise ValueError('The output size does not match the required output_size')", "def check_size(self, last, size):\n #check if the size pass the constraint\n valid = self.get_range(self.size['valid'], last)\n alert = self.get_range(self.size['alert'], last) if 'alert' in self.size else (0, -1)\n if valid[0] <= size <= valid[1]:\n #valid\n delta = 0\n elif alert[0] <= size <= alert[1]:\n #alert\n if size < valid[0]:\n delta = -1\n print 'Warning: Size of \"%s\" decreased by %i!' % (filename, last - size)\n else:\n delta = 1\n print 'Warning: Size of \"%s\" increased by %i!' % (filename, size - last)\n pass #todo\n else:\n #failed\n if size < alert[0]:\n delta = -2\n print 'Error: Size of \"%s\" decreased by %i!' % (filename, last - size)\n else:\n delta = 2\n print 'Error: Size of \"%s\" increased by %i!' % (filename, size - last)\n pass #todo\n return delta", "def assert_queue_size(sizes):\n for queue in sizes:\n assert_that(count_messages(queue), is_(sizes[queue]))", "def size_valid(self, field_path):\n return self._collection.size_valid(field_path)" ]
[ "0.7385397", "0.73481655", "0.6485774", "0.6421407", "0.6403327", "0.634776", "0.6263186", "0.6178231", "0.61623096", "0.6134446", "0.6119156", "0.60726506", "0.59411585", "0.5907456", "0.5892667", "0.58187336", "0.57345223", "0.5726993", "0.57237065", "0.5696267", "0.5662857", "0.56556153", "0.56537247", "0.56382746", "0.5600407", "0.55925477", "0.5591664", "0.55853975", "0.5538442", "0.5527516" ]
0.7417761
0
Restore the variable stack.
def pop(self): self._variables = self._variable_stack.pop()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def restore(self,):\n self.pos, self.dataptr, = self.stack.pop()", "def pop(self):\n self.restore(self.stack.pop())", "def restore_context(self):\r\n self.current_context = self.context_stack.pop()", "def restore(self):\n self.nodes.restore()", "def restore(self):\n self.igate.restore()\n self.fgate.restore()\n self.ogate.restore()\n super(LSTM, self).restore()", "def _Restore(self) -> None:\n self._SetNodes(self._nodes)", "def restore():\r\n\tglobal mhp, php, men, pen\r\n\tmhp = 100\r\n\tphp = 100\r\n\tmen = 100\r\n\tpen = 100", "def popFrameVariables(self):\n del self.frame_variables_stack[-1]\n del self.frame_type_descriptions[-1]", "def restore(self):\r\n token, stream, line, col = self.pushes.pop()\r\n self.token = token\r\n self.stream = stream\r\n self.line = line\r\n self.column = col", "def restore(self):\n raise NotImplementedError", "def popclear():\n stack = currentframe().f_back.f_locals.setdefault(SN, [])\n result = stack.pop()\n stack[:] = []\n return result", "def pop(self):\n assert self.local_variables.parent is not None\n self.local_variables = self.local_variables.parent\n assert self.local_types.parent is not None\n self.local_types = self.local_types.parent", "def reload(self):\n self.restore()", "def restore(self, restore):\n self._restore = restore", "def restore(self):\n self.abstract_obj.restore()", "def pop(state):\n return state.env.stack.pop()", "def restore_last_undo_point(self):\n self.unload()", "def revert(self):\n\n if len(self.stack) == 0 or not self.revertable:\n return\n\n for s in self.stack:\n s[\"model\"].setPos(s[\"model\"].getPos() + Vec3(0,0,THING_REVERT_DISTANCE))\n\n state = self.stack.pop()\n\n #not sure if this helps, but it can't hurt\n self.model.detachNode()\n\n for x in self.toRevert:\n self.toRevert[x](state[x])", "def restore_state(self, state):\n state_ref = self.ale.decodeState(state)\n self.ale.restoreState(state_ref)\n self.ale.deleteState(state_ref)", "def reset(self):\n self._varstate = None\n self.frozen = False", "def scope_pop(self) -> None:\n self.scope_stack.popleft()", "def stack():\n return currentframe().f_back.f_locals.setdefault(SN, [])", "def rollback(self):\n if len(self.__stack) == 0:\n raise EmptyStackException()\n self.__current_pos = self.__stack[-1][0]\n self.line = self.__stack[-1][1]\n self.linePos = self.__stack[-1][2]\n self.__stack = self.__stack[:-1]", "def restore_data(self):\n self.R = self._Ro\n del self._Ro", "def restore_full_state(self, state):\n state_ref = self.ale.decodeState(state)\n self.ale.restoreSystemState(state_ref)\n self.ale.deleteState(state_ref)", "def trace_stack_pop(trace_stack_var: ContextVar) -> None:\n trace_stack = trace_stack_var.get()\n trace_stack.pop()", "def restore(self, memento):\n self.state = memento.state", "def undo(self) -> CompilerEnv:\n if not self.stack:\n return\n self.env.close()\n self.env = self.stack.pop()\n return self.env", "def restore_state(self, state: ale_py.ALEState):\n self.ale.restoreState(state)", "def restore(self):\n\n self.dispersion = self.raw_dispersion\n self.flux = self.raw_flux\n self.flux_err = self.raw_flux_err\n self.reset_mask()" ]
[ "0.8118927", "0.69744605", "0.69486845", "0.6639864", "0.6619403", "0.66180235", "0.6612719", "0.65348417", "0.65342045", "0.6508564", "0.6504924", "0.64735585", "0.6442041", "0.64363015", "0.64261246", "0.6354677", "0.6338502", "0.6303261", "0.6299454", "0.62333167", "0.6191393", "0.6178933", "0.61630726", "0.6150873", "0.6148135", "0.61391824", "0.6138276", "0.6126417", "0.6122608", "0.61000955" ]
0.7232237
1
Call a task object.
def calltask(self, name, **vars): if name in self._tasks: for entry in self._tasks[name]: entry.execute(vars) else: raise Error("No such task: {0}".format(name))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def call(self, task, **options):\n pass", "def __call__(self, *args, **kw):\n return Task(self, **self.__options)(*args, **kw)", "def run_task(self) -> Task:", "def execute_task(self, task):\n t = threading.Thread(target=task)\n t.start()", "def run(self):\n task_func = getattr(self, self.task_data.get('task_type'))\n task_obj = task_func()\n return task_obj", "def do_t(self, arg):\n self.do_task(arg)", "def run_operation(task):\n return task.run()", "def __call__(self, *args, **kwargs):\n with self.task_context():\n return self._call(*args, **kwargs)", "def run_task(self, task_id):\n raise NotImplementedError", "def process_task(params):\n params['task'](params)", "def exec(cls, *args, **kwargs):\n task = cls(*args, **kwargs)\n task.run()\n return task", "def doTask(self, *args):\n taskId = self.task.get()\n document = self.document_uuid.get()\n visitor = self.visitor_uuid.get()\n self.output.set(str(self.taskEx.executeTask(visitor, document, taskId)))", "def execute_task(self):\n raise NotImplementedError(\"Execute Task method not implemented\")", "def run(self, *args, **kwargs):\n if self.task_loader is None:\n if 'task' not in kwargs:\n if len(args) == 0 or not isinstance(args[0], self.flow_class.task_class):\n raise FlowRuntimeError('Function {} should be called with task instance', self.name)\n return self.func(*args, **kwargs)\n else:\n task = self.task_loader(self, *args, **kwargs)\n return self.func(task, *args, **kwargs)", "def __call__(self, task):\n self.put(task)\n return self.get()", "def run(self, *args, **kwargs):\n raise NotImplementedError('Tasks must define the run method.')", "def call(self, task):\n call, args = task[0], task[1:]\n\n if call == codes.SCRAPE:\n return self.scrape(*args)\n if call == codes.ANALYZE:\n return self.analyze(*args)", "def task():\n pass", "def task():\n pass", "def run(self, task):\n\n self._setup()\n\n runnable = load_from_module(task.task)\n runnable(*task.get_args(), **task.get_kwargs())", "def run_task(self, cmd):\n # Not initialized here...must be overloaded from outside\n raise NotImplementedError()", "def run_task(self, cmd):\n # Not initialized here...must be overloaded from outside\n raise NotImplementedError()", "def octopus_task(self, msg, args):\r\n self.tasks.send_task_by_id(msg, args)", "def _execute_task(task, function, config):\n logging.debug('<Task-%s> started.' % task.get_id())\n start_time = time.time()\n try:\n function(task.get_data())\n logging.debug('<Task-%s> finished in %2.2f seconds with result: %s' % (task.get_id(),\n time.time() - start_time,\n task.get_data()))\n return {\n \"status\": True,\n \"task\": task\n }\n except Exception, error:\n logging.error(error)\n return {\n \"status\": False,\n \"task\": task\n }", "def __call__(self, message):\n if not hasattr(message, 'body'):\n logger.info('Got an invalid message format, skip.')\n message.ack()\n return None\n\n body = message.body\n\n reply_exchange = body['reply_exchange']\n reply_key = body['reply_key']\n\n logger.info('Got a new task call, uid: %s', body['id'])\n\n # Task is not in registry.\n if not body['name'] in self.tasks.keys():\n logger.error('<Task \"%s\"> Does not exist in registry, skip.', body['id'])\n self.consumer.reply_state_failure(\n reply_exchange=reply_exchange,\n reply_key=reply_key,\n id=body['id'],\n reason=\"ConsumerError('cant find task in registry')\"\n )\n message.ack()\n return None\n\n # Pass task to started state\n logger.info('<Task \"%s\"> Change state to STARTED', body['id'])\n if body['reply_states']:\n self.consumer.reply_state_started(\n reply_exchange=reply_exchange,\n reply_key=reply_key,\n id=body['id']\n )\n\n try:\n # Call the task.\n result = self.tasks[body['name']](*body['args'], **body['kwargs'])\n\n except Exception as reason:\n logging.error('<Task \"%s\"> Change state to FAILURE: %r', body['id'], reason)\n self.consumer.reply_state_failure(\n reply_exchange=reply_exchange,\n reply_key=reply_key,\n id=body['id'],\n reason=repr(reason)\n )\n\n else:\n logger.info('<Task \"%s\"> Change state to SUCCESS: %r', body['id'], result)\n self.consumer.reply_state_success(\n reply_exchange=reply_exchange,\n reply_key=reply_key,\n id=body['id'],\n result=result\n )\n\n finally:\n logger.debug('<Task \"%s\"> Acknowledge the message.', body['id'])\n message.ack()", "def do_tt(self, arg):\n self.do_tasks(arg)", "def do_task(self, arg):\n def _usage():\n self.do_help('task')\n args = arg.split()\n if not len(args):\n print(self.error_wrong_parameters)\n return\n commands = ['delete', 'update']\n first_arg = args[0].lower()\n if first_arg not in commands:\n # Display the task info\n self.display_task_info(first_arg.decode('utf-8'))\n return\n if len(args) == 1:\n print(\"*** Error: The task is not specified.\")\n return\n if first_arg == 'update':\n self.update_task(args[1].decode('utf-8'))\n self.set_prompt()\n elif first_arg == 'delete':\n self.delete_task(args[1].decode('utf-8'))", "def task():", "def task():\n\n\tprint('Example task executed.')", "def run(self):\n# log.trace(\" run task %s \", self.name)\n return self.target.send(self.params)" ]
[ "0.79994476", "0.73845327", "0.73509413", "0.7112236", "0.7083711", "0.7071007", "0.6963635", "0.6931252", "0.6852798", "0.68159956", "0.6761331", "0.6748111", "0.66828847", "0.6653501", "0.6650906", "0.6561055", "0.65478855", "0.6540533", "0.6540533", "0.6481358", "0.64507014", "0.64507014", "0.6436006", "0.6421812", "0.64157444", "0.64082164", "0.64071256", "0.63670313", "0.63255906", "0.6310702" ]
0.74947923
1
Call a registered function.
def callfunc(self, name, *args, **kwargs): if name in self._funcs: return self._funcs[name](*args, **kwargs) else: raise Error("No such function: {0}".format(name))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def callFunction(cmdname, far_args, far_kwargs, fn):\n ret = None\n if fn not in registered_functions:\n print(\"%s is not a registered function!\" % fn)\n return None\n try:\n funct = registered_functions[fn]\n ret = funct(*far_args,**far_kwargs)\n except:\n print(\"Error ocurred executing %s\" % fn)\n print(traceback.format_exc())\n \n return ret", "def call(self):\n self.call() # Call a function", "def call(fn, arg):\n return fn(arg)", "def call(self, func):\n\t\targs = tuple(self.__dict__.values())\n\t\ttry:\n\t\t\treturn eval(\"func\" + str(args))\n\t\texcept Exception, e:\n\t\t\traise ValueError(\"Given Function is not valid for calling: %s\" % e)", "def _call_function(self, svcname, fcallstr):\n try:\n argv = json.loads(fcallstr)\n except Exception as e:\n raise SearpcError('bad call str: ' + str(e))\n\n service = self.services[svcname]\n\n fname = argv[0]\n fn = service.func_table.get(fname, None)\n if fn is None:\n raise SearpcError('No such funtion %s' % fname)\n\n ret = fn(*argv[1:])\n return ret", "def call_function( function_name: str ) -> None:\n log.debug( f'function_name, ```{function_name}```' )\n checker = OpenTextbookChecker()\n safe_dispatcher = { 'build_keys': build_keys, 'check_opentextbook': checker.check_opentextbook }\n try:\n safe_dispatcher[function_name]()\n except:\n raise Exception( 'invalid function' )\n return", "def __call__(fun_name):", "def call(self, function: str):\n assert self._call is None, f\"Can only call one function per test! Already called {self._call}\"\n self._call = function", "def run_function(function_id):\n\n language = sys.modules[__name__] # to be used by the getattr\n\n global funcs\n funcName = funcs[function_id][1] # get the function name from the global dictionary funcs\n getattr(language, funcName)() #execute the chosen function", "def call(self, **kwargs):\n return getattr(self.resource, self.function)(**kwargs)", "def __call__(self, *args, **kw):\n return self.callable(*args, **kw)", "def register_callback(func): \n \n VoiceService.add_callback(func.__name__, func) \n\n return func", "def call(self, data):\n\t\treturn self.fnc(data)", "def callFunc(self, functionName, *args):\n getattr(self.codec, functionName)(args[0])\n return self.codec.stream.getvalue()", "def invoke_function(func, func_args, func_kwargs):\n\n return get_component(CLIPackage.COMPONENT_NAME).invoke_function(func, func_args,\n func_kwargs)", "def exec_function(self, args):\n raise NotImplementedError()", "def func_call(self, t):\n func, params = t\n func_name = func.value\n func.value = \"({}({}))\".format(func_name, params)\n return func", "def _Call(self, t):\n # check calls but let attributes check in their own dispatcher\n funcs = self._device_functions + self.pythonbuiltins + [self._input_message_var] # message_input variable is a valid function name as certain message types have arguments on iterator\n if isinstance(t.func, ast.Name):\n if (t.func.id not in funcs):\n self.RaiseWarning(t, \"Function call is not a defined FLAME GPU device function or a supported python built in.\")\n # dispatch even if warning raised\n self.dispatch(t.func)\n elif isinstance(t.func, ast.Lambda):\n self.dispatch(t.func) # not supported\n else:\n # special handler for dispatching member function calls\n # This would otherwise be an attribute\n self.dispatchMemberFunction(t.func, t) \n self.write(\"(\")\n self._CallArguments(t)\n self.write(\")\")", "def func(self, token):\n\n func_name = token.lexeme\n try:\n func = self.func_registry[func_name]\n except KeyError:\n raise Exception(f\"$'{func_name}' function is not registered\")\n\n return func", "def add_call(self, func, path):\n if path not in self.called_at:\n self.called_at[path] = {func}\n else:\n self.called_at[path].add(func)", "def func(*args, **kwargs):\n return call(*args, **kwargs) # pylint: disable = E1102", "def register_function(self, *args):\n if len(args) == 1:\n function = args[0]\n try:\n name = function.fact_name\n except AttributeError:\n name = function.__name__\n if name is None:\n raise Exception(\"Function does not have a name\")\n else:\n name, function = args\n self.functions[name] = function", "def registry_functions(function, args):\n if function == \"get_service\":\n from registry.get_service import run as _get_service\n return _get_service(args)\n elif function == \"register_service\":\n from registry.register_service import run as _register_service\n return _register_service(args)\n else:\n from admin.handler import MissingFunctionError\n raise MissingFunctionError()", "def __call__(self, key):\n\n def wrapper(func):\n self._registry[key] = func\n\n return wrapper", "def call_function(self):\n try:\n arg_list = self.argument_list()\n function_dict = {}\n info = []\n for name_arg in arg_list:\n type_arg = self.arguments_type[name_arg]\n function_dict[name_arg] = utils.value_from_rpc(self.argument(name_arg)[1])\n info.append('{0}({1}): {2}'.format(name_arg, type_arg, function_dict[name_arg]))\n\n log.info('Execute command \\'{0}\\' with arguments [{1}] from device \\'{2}\\''\n .format(self.name(), '; '.join(info), self.device.id))\n self.function(self.device, **function_dict)\n\n except Exception as err:\n t = traceback.format_exc()\n log.error('Command \\'{0}\\' raise exception: {1}'.format(self.name(), decode_string(t)))", "def __call__(self, fn=None, *args, **kwargs):\n if callable(fn):\n self.fn = fn\n\n return self", "def test_runs_given_function(self):\n from furious.processors import _handle_results\n\n processor = Mock()\n\n _handle_results({'_process_results': processor})\n\n processor.assert_called_once_with()", "def register_function(self, function, name=None):\n if name is None:\n name = function.__name__\n self.funcs[name] = function", "def call_hook(self, hook, *args, **kwargs):\n for function in self.hooks[hook]:\n function.__call__(*args, **kwargs)", "def call(self, message: Message) -> None:\n self.fn(message)" ]
[ "0.7237642", "0.69940555", "0.69778097", "0.6803792", "0.6746035", "0.6448594", "0.64403576", "0.6399366", "0.6324769", "0.62205297", "0.6144951", "0.6141472", "0.60906637", "0.6085542", "0.6026323", "0.6018302", "0.6010114", "0.6005678", "0.5979251", "0.595394", "0.59400034", "0.59355915", "0.58881706", "0.58764786", "0.5864594", "0.58608425", "0.58506095", "0.5831212", "0.58203095", "0.58049315" ]
0.7133207
1
Call a filter with a value.
def callfilter(self, name, value): if name in self._filters: return self._filters[name](value) else: raise Error("No such filter: {0}".format(name))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def call_filter(\n self,\n name: str,\n value: t.Any,\n args: t.Optional[t.Sequence[t.Any]] = None,\n kwargs: t.Optional[t.Mapping[str, t.Any]] = None,\n context: t.Optional[Context] = None,\n eval_ctx: t.Optional[EvalContext] = None,\n ) -> t.Any:\n return self._filter_test_common(\n name, value, args, kwargs, context, eval_ctx, True\n )", "def set_FilterValue(self, value):\n super(GetCallbackDataInputSet, self)._set_input('FilterValue', value)", "def test_filter_value(self):\n self.es.register_filter(foo=10)\n self.assertFalse(self.es.streamfilter(self.data))", "def add_filter(self, name: str, value: any):\n self.filters[name] = value", "def _filterfunc(self,*args,**kwargs):\n self._filterfunc = self.f\n return self.f(*args,**kwargs)", "def evaluate_filter(self, x):\n raise NotImplementedError", "def filters(self, value):\n if not isinstance(value, dict):\n raise TypeError(\"input must be a dictionary\")\n\n self._filters = value", "def filter(self, param, value, op=None):\n if op is None:\n self.params[param] = value\n elif op in ('le', 'lt', 'ge', 'gt', 'like', 'not_like', 'ne'):\n param_key = '{param}__{op}'.format(param=param, op=op.upper())\n self.params[param_key] = value\n else:\n raise TypeError('Invalid operator: %r' % op)\n return self", "def call_test(\n self,\n name: str,\n value: t.Any,\n args: t.Optional[t.Sequence[t.Any]] = None,\n kwargs: t.Optional[t.Mapping[str, t.Any]] = None,\n context: t.Optional[Context] = None,\n eval_ctx: t.Optional[EvalContext] = None,\n ) -> t.Any:\n return self._filter_test_common(\n name, value, args, kwargs, context, eval_ctx, False\n )", "def _filter(self, *args, **kwargs) -> FilterRunner:\n if not callable(self.filter_type):\n self.fail('{cls}.filter_type is not callable.'.format(\n cls=type(self).__name__,\n ))\n\n if not args:\n self.fail(\n 'First argument to {cls}._filter '\n 'must be the filtered value.'.format(\n cls=type(self).__name__,\n ),\n )\n\n return FilterRunner(\n starting_filter=self.filter_type(*args[1:], **kwargs),\n incoming_data=args[0],\n capture_exc_info=True,\n )", "def filterby(self, filterval, valueoffilter):\n if valueoffilter == '':\n fatal([\n 'Invalid flag \"value\"',\n 'value is required to flag \"filter\"'\n ])\n\n ok = self.validate_filterval(filterval)\n\n if ok is False:\n fatal([\n 'Invalid flag \"filter\"',\n 'The available filter values are:',\n 'description (name)|fulldescription (description)|completed',\n 'Use instead:',\n '$ tasks-app show --filter=description|fulldescription|completed --value={}'.format(valueoffilter)\n ])\n\n if filterval == 'completed':\n if valueoffilter != 'True' and valueoffilter != 'False':\n fatal([\n 'Invalid flag \"value\"',\n 'the available values for completed filter flag are:',\n 'True|False',\n 'Use instead:',\n '$ tasks-app show --filter={filterval} --value=True|False',\n ])\n\n if filterval == 'completed':\n if valueoffilter == 'True':\n valueoffilter = 1\n elif valueoffilter == 'False':\n valueoffilter = 0\n\n if not filterval == 'completed':\n sql = 'SELECT * FROM Tasks WHERE {} LIKE \"{}%\"'.format(filterval, valueoffilter)\n else:\n sql = 'SELECT * FROM Tasks WHERE {} LIKE \"{}\"'.format(filterval, valueoffilter)\n\n conn = sqlite3.connect(DATABASE['file'])\n cur = conn.cursor()\n cur.execute(sql)\n\n if not len(list(cur)) == 0:\n print('Tasks found')\n\n cur.execute(sql)\n\n for description, fulldescription, completed in cur:\n if completed == 0:\n completed = 'Incompleted'\n else:\n completed = 'Completed'\n\n print(' > {} - {} ({})'.format(description, fulldescription, completed))\n\n cur.execute(sql)\n\n if len(list(cur)) == 0:\n print('No tasks found with search {}={}'.format(filterval, valueoffilter))\n\n conn.close()", "def Input(self, value):\n print(\"Input: {}\".format(value))\n # Add the new value to the filter values.\n self.Values.append(value)\n # If the filter has reached its maximum Depth,\n # pop the last item from the filter values.\n if len(self.Values) > self.Depth:\n s = 0\n self.Values.pop(s)\n print(\"Filter ({}): {}\".format(len(self.Values), self.Values))", "def filterInput(val, deadZone=0.0, filterFactor=1.0, scale=0.0):\n\n sign = 1.0\n if val < 0.0:\n sign = -1.0\n\n val = math.fabs(val)\n deadZone = math.fabs(deadZone)\n\n if val < deadZone:\n val = 0.0\n else:\n val = val * ((val - deadZone) / (1 - deadZone))\n\n output = val * ((filterFactor * (val**scale)) + ((1 - filterFactor) * val))\n output *= sign\n return output\n #try using tanh with import numpy for a different scaling.", "def add_filter(self, name, value, comparator='equals',\n case_sensitive=False):\n self.filters.append({'name': name, 'value': value,\n 'comparator': comparator,\n 'case_sensitive': case_sensitive,\n 'type': 'filter'})", "def filter(self, value, model=None, context=None):\n\n # string filter: skip non-strings\n if type(value) is not str:\n return value\n\n linker = Linker(**self.linkify_params)\n return linker.linkify(value)", "def use_filter(filter_func, url, input):\n output = filter_func(url, input)\n\n if output is None:\n # If the filter does not return a value, it is\n # assumed that the input does not need filtering.\n # In this case, we simply return the input.\n return input\n\n return output", "def add_value(self, value):\n if len(self.hist) < 2:\n BaseFilter.add_value(self, value)\n else:\n filtered_value = self.hist[-1] * self.alpha + value * (1.0 - self.alpha)\n BaseFilter.add_value(self, filtered_value)", "def add_filter(self, value=''):\n # Create the filter\n filter_ = FilterWithPlaceholder(self, value=value)\n filter_.focus_force()\n filter_.bind('<Return>',\n lambda evt: self.event_generate('<<FiltersReady>>'))\n\n def _on_typing_out_event(evt):\n if filter_.get() == '':\n self._filters.remove(filter_)\n filter_.grid_forget()\n filter_.destroy()\n filter_.bind('<<TypingOut>>', _on_typing_out_event)\n\n # Push the filter in the list\n self._filters = self._filters[:-1] + [filter_] + [self._filters[-1]]\n\n # Refresh the grid\n for (i, curr) in enumerate(self._filters):\n curr.grid(row=0, column=i, sticky='EW')\n curr.lift()\n\n return filter_", "def filter(n='I'):\n if n=='':\n n = 'I'\n if type(n) == str:\n fid = filtid(n)\n fnum = filtnum(fid)\n opticalcoupler.SelectFilter(fnum)\n camera.status.filterid = fid\n camera.status.filter = fnum\n logger.info('Moved to filter '+`n`)\n else:\n if (n>=1) and (n<=8):\n opticalcoupler.SelectFilter(n)\n camera.status.filterid = filtid(filtname(n))\n camera.status.filter = n\n logger.info('Moved to filter '+`n`)\n else:\n logger.error(\"Error in filter value: \"+repr(n))", "def filter(self, param, container = None):\n\n\t\tif param[0] in self.filters:\n\t\t\ttry:\n\t\t\t\tfilter_method = getattr(self, \"filter_\" + param[0])\n\t\t\t\tself.__printer.debug(\"Command\", \"Executing filter \" + param[0])\n\t\t\t\treturn filter_method(param, container)\n\t\t\texcept AttributeError:\n\t\t\t\tself.__printer.warning(\"Command\", \"Filter \" + param[0] + \" not implemented passing\")\n\t\t\t\tpass\n\n\t\t\treturn param", "def filter(self, *args, **kwargs):", "def access_filter(f):\n return AccessFilter(f)", "def filter_by(self, key: str, *args, **kwargs):\n filter_ = self.filters.get(key)\n if not filter_:\n raise ValueError(key)\n return filter_(*args, **kwargs)", "def add_filter(self, f):\n raise NotImplementedError", "def terraform_output_filter(filter, payload):\n if filter in payload:\n return payload[filter]['value']\n else:\n return None", "def set_sensitive_to_filter(self, sensitive_name, sensitive_val):\n self.name += str(sensitive_val)\n self.sensitive_filter = sensitive_val\n self.sensitive_for_metric = sensitive_name", "def set_FilterName(self, value):\n super(GetCallbackDataInputSet, self)._set_input('FilterName', value)", "def filter(self, function):\n return FunctionalWrapper(filter(function, self.data))", "def setFilter(self, column, value) -> None:\n if not self.hasFilter(column):\n column_name = self._dataframe.columns[column]\n self._filters[column_name] = value\n self._applyFilters()", "def AddFilter(query, property_filter, value):\n p = property_filter.split()[0]\n # pylint: disable-msg=W0212\n assert p in query._model_class.properties()\n query.filter(property_filter, value)" ]
[ "0.76371175", "0.6945337", "0.66474724", "0.6641272", "0.6295771", "0.62570554", "0.6163652", "0.6126321", "0.6041286", "0.6038569", "0.5961211", "0.5926597", "0.5915547", "0.5905825", "0.58687717", "0.5861561", "0.58000666", "0.5730729", "0.5719642", "0.5706328", "0.56944907", "0.56803346", "0.5665278", "0.565565", "0.5605268", "0.55523944", "0.5518047", "0.55131555", "0.54756874", "0.54747885" ]
0.8607307
0
Find the task file.
def find_taskfile(self): filename = self.cmdline.file curdir = self.cmdline.dir if "load" in self.cmdline.verbose: self.env.errorln("Taskrun search directory: {0}".format(curdir)) self.env.errorln("Taskrun search filename: {0}".format(filename)) self.env.errorln("Taskrun walk path: {0}".format(str(self.cmdline.walk))) self.taskfile = None while True: taskfile = os.path.join(curdir, filename) if os.path.isfile(taskfile): if "load" in self.cmdline.verbose: self.env.errorln("Task file found: {0}".format(taskfile)) self.taskfile = taskfile return if not self.cmdline.walk: return (head, _) = os.path.split(curdir) if head and head != curdir: curdir = head else: break
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_taskfile(self, refobj):\n tfid = cmds.getAttr(\"%s.taskfile_id\" % refobj)\n try:\n return djadapter.taskfiles.get(pk=tfid)\n except djadapter.models.TaskFile.DoesNotExist:\n raise djadapter.models.TaskFile.DoesNotExist(\"Could not find the taskfile that was set on the node %s. Id was %s\" % (refobj, tfid))", "def get_task(self, key_task):\n task = None\n scanned_tasks = []\n\n with open(self.path_to_task_file, 'r') as file:\n for line in file:\n current_task = Task()\n current_task.load(line)\n\n if current_task.key == key_task:\n task = current_task\n else:\n scanned_tasks.append(line)\n\n self.check_time(task)\n self.save_scanned_tasks(scanned_tasks) # return unsuccessful tasks in file\n return task", "def task_file(self) -> str:\n return self._task_file", "def _find_file(self, name, check_dir='c_files'):\n testdir = os.path.dirname(__file__)\n name = os.path.join(testdir, check_dir, name)\n return name", "def get(self, guid):\n results = j.sal.fs.find(self._root, '*_%s' % guid)\n if len(results) <= 0:\n raise TaskNotFoundError(\"task %s not found\" % guid)\n if len(results) > 1:\n raise RuntimeError(\"found 2 tasks with same guid, this should not happen\")\n return self._deserialize_task(j.sal.fs.readFile(results[0]))", "def _findfile(self, path):\n return DataSource._findfile(self, self._fullpath(path))", "def task_binary_location(cmd=\"task\"):\n return binary_location(cmd, TASK_USE_PATH)", "def find_task(self, task_str):\n task_str = task_str.replace('é', 'e').title()\n task_not_found = True\n custom_quest = False\n if \":\" in task_str:\n task_strs = task_str.split(\":\")\n task_str = task_strs[0]\n quest_str = task_strs[1]\n custom_quest = True\n while task_not_found:\n for task in self.tasks:\n if (task_str == task.reward.title()) or (task_str == task.quest.replace('é', 'e').title()) or (task_str in (reward.title() for reward in task.rewards)) or (task_str in (nickname.title() for nickname in task.nicknames)):\n out_task = task\n task_not_found = False\n if custom_quest:\n out_task = copy.copy(task)\n out_task.quest = quest_str.title()\n return out_task\n break\n if task_not_found:\n raise TaskNotFound()", "def search_string(command):\n try:\n my_file.search_string(command[1])\n except FileNotFoundError:\n print('No file has been read yet')", "def _find_config_file(self) -> str or None:\n import os\n\n for path in self.paths:\n path = os.path.expanduser(path)\n for extension in self.file_extensions:\n for file_name in self.file_names:\n file_path = os.path.join(path, \"{}.{}\".format(file_name, extension))\n if os.path.isfile(file_path):\n return file_path\n\n return None", "def _FindTemplateFile(self, topdir):\n if topdir.endswith('..'):\n topdir = '/'.join(topdir.split('/')[:-2])\n fnames = os.listdir(topdir)\n for fname in fnames:\n filename = '%s/%s' % (topdir, fname)\n if filename.endswith('.yaml') and not os.path.isdir(filename) and \\\n os.path.exists(filename):\n f = open(filename, 'r')\n magic_code = f.read(22)\n f.close()\n if '#!fmri_file_template' in magic_code:\n return filename\n return None", "def find(self, task_id):\n _structs = [\n self.stack,\n self.backlog,\n self.blocked,\n self.sleeping,\n ]\n for struct in _structs:\n try:\n task_obj = struct.find(task_id)\n return task_obj\n except LookupError:\n # not found; try next structure\n continue\n\n # the graveyard is just a list; search it\n for task_obj in self.graveyard:\n if task_obj.id.startswith(task_id):\n return task_obj\n\n raise LookupError(\"No such task: '{}'\".format(task_id))", "def find(self, task_id):\n for task_obj in self.stack:\n if task_obj.id.startswith(task_id):\n return task_obj\n\n raise LookupError(\"No such task in stack: '{}'\".format(task_id))", "def find(self, task_id):\n for task_obj in self.queue:\n if task_obj.id.startswith(task_id):\n return task_obj\n\n raise LookupError(\"No such task in queue: '{}'\".format(task_id))", "def find_file(path):\n return NotImplemented", "def find(self, task_id):\n for task_obj in self._queue:\n if task_obj.id.startswith(task_id):\n return task_obj\n\n raise LookupError(\"No such task in dorm: '{}'\".format(task_id))", "def test_task_finder(test_operator, task_name, task_type):\n found_task = test_operator.find_task(task_name, task_type=task_type)\n assert found_task", "def get_task_by_name(self, task_name):\n for task in self.tasks:\n if task.name == task_name:\n logger.debug(\"Returning task with name '%s': '%s'\", task_name, task.to_xml_string())\n return task\n raise ValueError(\"A step task with the name {} can not be found.\".format(task_name))", "def readtask(self,filename_): # 3\n res = self.__obj.readtask(filename_)\n if res != 0:\n result,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)", "def locate_file(self, filename):\n return locate_file(filename, self.observatory)", "def find_file_by_name(point,target):\n launcher_path = \"\"\n for (path, dir, files) in os.walk(point):\n for filename in files:\n # if target in filename:\n if filename == target:\n launcher_path = os.path.join(path, filename)\n return launcher_path", "def test_task_dir(self):\n return os.path.join(os.path.dirname(os.path.abspath(__file__)), 'tasks')", "def find(self, name):\n path = self.directory.joinpath(name).with_suffix('.yaml')\n if path.is_file():\n return self.from_path(path)\n raise LookupError(\"Job {} does not exist\".format(repr(name)))", "def find_file(file_name):\n if (pathlib.Path(file_name).resolve()):\n file_name = str(file_name)\n logging.info(f' found {file_name}.')\n return file_name\n else:\n logging.error(f' no file {file_name} found for processing.')\n sys.exit()", "def get_task(self, code: str) -> \"Task\": # noqa: F821\n if code not in self.tasks:\n raise PyDSTaskNoFoundException(\n \"Task with code %s can not found in process definition %\",\n (code, self.name),\n )\n return self.tasks[code]", "def __findFileName(self):\n self.ui.showFindFileByNameDialog()", "def _find_tif_file(self):\n name = self.results_file.name[:-12] + \".tif\"\n try:\n tif_file = next(self.results_file.parent.glob(name))\n return tif_file\n except StopIteration:\n print(f\"Tif not found for {name}\")\n return None", "def readtask(self,filename_):\n if isinstance(filename_,unicode):\n filename_ = filename_.encode(\"utf-8\",errors=\"replace\")\n res = __library__.MSK_XX_readtask(self.__nativep,filename_)\n if res != 0:\n _,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)", "def search_summary(self, target, name=\"*summary.txt\"):\n summary_found = glob.glob(os.path.join(self.path, name))\n if summary_found:\n summary_name = summary_found[0]\n if os.path.exists(summary_name):\n with open(summary_name, \"r\") as summary_file:\n for line in summary_file:\n if target in line:\n return line[:-1]\n return None", "def find_task(self):\n date_input = raw_input(\"Enter Due Date of task year-mm-dd > \")\n try:\n if date_input.strip() != '':\n year, month, day = (int(i) for i in date_input.split('-'))\n date = datetime.date(year, month, day)\n else:\n date = None\n task_list = self.current_collection.get_task_list(date)\n except (ValueError, IndexError) as e:\n print raw_input(\"Error. Not a valid date: %s\\nPress Enter\" % e)\n return False\n\n # We have a valid date, print out the tasks\n self.clear_screen()\n for i, task in enumerate(task_list):\n print \"Task ID: {0}\".format(i)\n self.display_task(task)\n\n # Return a valid task\n index_input = raw_input(\"Enter task ID. > \")\n try:\n index = int(index_input)\n sel_task = task_list[index]\n except ValueError:\n raw_input(\"Not a valid index.\\nPress Enter.\")\n return False\n return sel_task" ]
[ "0.6496904", "0.64604664", "0.64055175", "0.62375754", "0.62346745", "0.6191733", "0.61508954", "0.61214113", "0.608487", "0.6081849", "0.60280293", "0.60252696", "0.6012723", "0.5995425", "0.5991104", "0.59663683", "0.5947834", "0.59477603", "0.5941024", "0.59160525", "0.59160316", "0.58777344", "0.58704865", "0.58593386", "0.58100116", "0.5781085", "0.57652724", "0.5750494", "0.5728155", "0.5709015" ]
0.820012
0
Return the tasks and parameters.
def get_tasks_params(self): params = {} tasks = [] for cmdparam in self.cmdline.params: if ":" in cmdparam: # task:NAME=VALUE:NAME=VALUE:NAME=VALUE parts = cmdparam.split(":") taskparams = {} for taskparam in parts[1:]: if "=" in taskparam: (name, value) = taskparam.split("=", 1) if name[:1] == "_" or name[-1:] == "_": raise Error("Setting special from command line not allowed") taskparams[name] = value tasks.append((parts[0], taskparams)) elif "=" in cmdparam: # NAME=VALUE (name, value) = cmdparam.split("=", 1) if name[:1] == "_" or name[-1:] == "_": raise Error("Setting special _VARIABLES_ from command line not allowed") params[name] = value else: # taskname tasks.append((cmdparam, {})) return (tasks, params)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def tasks():", "def get_tasks(self) -> Dict[str, Any]:\n\n ret = {}\n for k, id in self.required_tasks.items():\n ret[k] = self.storage_socket.get_procedures(id=id)[\"data\"][0]\n\n return ret", "def get_task_info(self):\n\n print()\n employee_name = self.task.get_employee_name()\n task_name = self.task.get_task_name()\n mins = self.task.get_time_spent()\n notes = self.task.get_notes()\n date = self.task.get_date()\n\n task = {\n 'employee_name': employee_name,\n 'task_name': task_name,\n 'mins': mins,\n 'notes': notes,\n 'date': date\n }\n\n return task", "def task_parameters(self):\n yield self.properties", "def get_params():\n\n parser = get_params_parser()\n args = parser.parse_args()\n\n tasks = [args.raw, args.enrich, args.identities_load, args.identities_merge, args.panels]\n\n if not any(tasks):\n print(\"No tasks enabled\")\n sys.exit(1)\n\n return args", "def get_tasks(self):\n return self.stn.get_tasks()", "def get_tasks(self):\n return self.tasks", "def get_task_list(self):\n raise NotImplementedError()", "async def list_tasks():", "def task(self):", "def task(self):", "def generate_tasks(self, task):", "def get_tasks():\n tasks = []\n example_dir = os.path.normpath(os.path.join(\n os.path.dirname(__file__), '../../openshift/ansiblegen/examples/')\n )\n yaml_names = os.listdir(example_dir)\n for yaml_name in yaml_names:\n _, api_version, resource = yaml_name.split('_', 2)\n resource = resource[0:-4]\n yaml_path = os.path.join(example_dir, yaml_name)\n\n with open(yaml_path, 'r') as f:\n data = yaml.load(f)\n\n tasks.append(((api_version, resource), data))\n return tasks", "def get_tasks(self):\n res = self.conn.cursor().execute(\"SELECT * FROM tasks\")\n return res.fetchall()", "def tasks(self):\n args = Namespace(rev=self.rev)\n data = run_query('push_results', args)['data']\n\n tasks = []\n for kwargs in data:\n # Do a bit of data sanitization.\n if any(a not in kwargs for a in ('label', 'duration', 'result', 'classification')):\n continue\n\n if kwargs['duration'] <= 0:\n continue\n\n tasks.append(Task(**kwargs))\n\n return tasks", "def get_map_task_params(self):\n return {}", "def task():", "def get_tasks(self, task_id=None):\n # Recover all config from OpenVAS\n if task_id:\n return self.make_xml_request('<get_tasks id=\"%s\"/>' % name, xml_result=True)\n else:\n return self.make_xml_request(\"<get_tasks />\", xml_result=True)", "def get(self, controller, data, *args, **kwargs): \n task_manager = controller.get_task_manager()\n res = task_manager.get_all_tasks(details=True)\n resp = {\n u'task-instances':res,\n u'count':len(res)\n } \n return resp", "def get(self):\n\n return task_service.get_tasks()", "def get_all_tasks(self):\n return [\n self.create_virtual_environment,\n self.doc,\n self.install,\n self.lint,\n self.make_distribution,\n self.reset,\n self.setup,\n self.test,\n ]", "def get_tasks(self):\n return [getattr(self, k).value() for k in self._node_dict.values()]", "def create_tasks(self):\n self.create_passport_task()\n\n self.create_visa_task()\n\n self.create_vaccines_task()\n self.create_malaria_task()\n\n self.create_weather_task()\n self.create_flight_needs_task()\n self.create_banking_task()\n\n self.create_insurance_task()\n\n self.create_systematic_tasks() # 3 tasks\n\n if self.trip.return_date_time is None or\\\n self.trip.return_date_time - self.trip.arrival_date_time > timedelta(days=14):\n\n self.create_long_travel_task()\n\n for task in self.tasks:\n task.auto = True\n\n return self.tasks", "def subtasks(self):\n return tuple(self._tasks)", "def Params(cls):\n p = super().Params()\n p.Define('train_task', None, 'Underlying task')\n p.Define('decode_task', None, 'Underlying task')\n p.Define('train_dataset_name', None, '')\n p.Define('decode_dataset_name', None, '')\n p.Define('train_steps_per_loop', 0, '')\n p.Define('decode_steps_per_loop', 0, '')\n return p", "def parse_settings(self, requested_kwargs):\n kwargs = {}\n task_list = []\n for qb in self.qubits:\n task = {}\n task_list_fields = requested_kwargs['task_list_fields']\n\n transition_name_v = task_list_fields.get('transition_name')\n tr_name = self.get_param_value('transition_name',\n qubit=qb.name,\n default=transition_name_v[1])\n task['transition_name'] = tr_name\n\n value_params = {'v_low': None, 'v_high': None, 'pts': None}\n # The information about the custom parameters above could be\n # Saved somewhere else to generalize all wrappers\n\n default = self.get_param_value(f'default_{tr_name}_amp180',\n qubit=qb.name)\n current = qb.parameters[f'{tr_name}_amp180']()\n max = self.get_param_value('max_drive_amp', qubit=qb.name)\n n = self.get_param_value('n', qubit=qb.name)\n\n for name, value in value_params.items():\n value = self.get_param_value(name, qubit=qb.name)\n if isinstance(value, str):\n value = eval(\n value.format(current=current,\n max=max,\n default=default,\n n=n))\n value_params[name] = value\n\n sweep_points_v = task_list_fields.get('sweep_points', None)\n if sweep_points_v is not None:\n # Get first dimension (there is only one)\n # TODO: support for more dimensions?\n sweep_points_kws = next(iter(\n self.kw_for_sweep_points.items()))[1]\n values = np.linspace(value_params['v_low'],\n value_params['v_high'],\n value_params['pts'])\n task['sweep_points'] = SweepPoints()\n task['sweep_points'].add_sweep_parameter(values=values,\n **sweep_points_kws)\n qb_v = task_list_fields.get('qb', None)\n if qb_v is not None:\n task['qb'] = qb.name\n\n for k, v in task_list_fields.items():\n if k not in task:\n task[k] = self.get_param_value(k,\n qubit=qb.name,\n default=v[1])\n\n task_list.append(task)\n\n kwargs['task_list'] = task_list\n\n kwargs_super = super().parse_settings(requested_kwargs)\n kwargs_super.update(kwargs)\n\n return kwargs_super", "def get(self):\n gid = self.get_query_argument('gid', None)\n\n if gid: # get a specified task\n self.write(update_fields(\n self._rpc.aria2.tellStatus(self._token, gid, TASK_FIELDS)))\n\n else: # get all tasks\n active_tasks = self._rpc.aria2.tellActive(self._token, TASK_FIELDS)\n waiting_tasks = self._rpc.aria2.tellWaiting(\n self._token, -1, 100, TASK_FIELDS)\n stopped_tasks = self._rpc.aria2.tellStopped(\n self._token, -1, 100, TASK_FIELDS)\n all_tasks = [\n update_fields(task) for task in\n itertools.chain(active_tasks, waiting_tasks, stopped_tasks)\n ]\n self.write({'tasks': all_tasks})", "def get_tasks(self):\n return self.tasks.all()", "def get_task_parameters_as_string(self):\n\t\treturn call_sdk_function('PrlRunningTask_GetTaskParametersAsString', self.handle)", "def get_running_task_dicts(tasks):\n running_task_dicts = []\n with database.engine.begin() as connection:\n for task in tasks:\n print(json.loads(task.meta))\n job = Job.fetch(task.id, connection=redis_conn)\n project = connection.execute(select([sqlalchemy.text(\n '*')]).select_from(models.projects).where(models.projects.c.project_id == task.project_id)).first()\n task_dict = dict(id=task.id, name=task.name, description=task.description,\n complete=task.complete, result=task.result, progress=task.get_progress(), project_id=task.project_id)\n task_dict['meta'] = json.loads(\n task.meta) if task.meta is not None else {}\n\n if job:\n task_dict['status'] = job.get_status()\n # task_dict['started_at'] = datetime.datetime.fromtimestamp(\n # task_dict['meta']['scheduled_at'])\n # print('scheduled_at: {}'.format(task_dict['started_at']))\n if project:\n task_dict['project_name'] = project['name']\n running_task_dicts.append(task_dict)\n return running_task_dicts" ]
[ "0.74223137", "0.7187272", "0.7059916", "0.6921149", "0.6918603", "0.69120604", "0.6889255", "0.6680344", "0.66059226", "0.6580175", "0.6580175", "0.65769786", "0.65203404", "0.6516762", "0.6511607", "0.64936393", "0.64494795", "0.6430884", "0.63634413", "0.63324594", "0.63172007", "0.63106894", "0.63011503", "0.6294913", "0.6287398", "0.6268014", "0.62608963", "0.6241105", "0.6234803", "0.6225613" ]
0.72699475
1
Construct an integer interval that includes both ends lb and ub.
def index_interval(lb: int, ub: int, nbits=None, graycode=False) -> List[int]: if graycode: assert nbits is not None else: assert lb <= ub window = [] i = lb while True: window.append(i) if i == ub: break i = increment_index(i, 1, nbits, graycode) return window
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _bi_range(start, end):\n if start == end:\n return (start,)\n\n elif end < start:\n return reversed(range(end, start + 1))\n\n else:\n return range(start, end + 1)", "def rangify(v, lb, ub):\n if lb >= ub:\n lb, ub = ub, lb\n return max(min(v, ub), lb)", "def new_range(r):\n if isinstance(r, list) or isinstance(r, tuple) and len(r) == 2:\n lower = r[0]\n upper = r[1]\n else:\n lower = r\n upper = r\n lower = int(lower)\n upper = int(upper)\n return range(lower, upper + 1)", "def binrange(_min, _max, stepsize, include_upper=False):\n _min = _min - _min % stepsize\n _max = _max - _max % stepsize + stepsize * (1 + include_upper)\n return np.arange(_min, _max, stepsize)", "def remap_interval(val, in_start, in_end, out_start, out_end):\n in_range = in_end-in_start\n out_range = out_end-out_start\n return (val-in_start)/in_range*out_range+out_start", "def clean_int(\n i: int,\n ub: int,\n lb: int = 0,\n ) -> int: \n\n # Initialisations\n i_temp = i\n\n # Check if the integer is above the upper bound\n if i_temp > ub:\n\n # Set it to the upper bound\n i_temp = ub\n\n # Check if the integer is below or equal to the lower bound\n elif i_temp <= lb:\n\n # Set it to one above the lower bound\n i_temp = lb + 1\n\n return i_temp", "def break_at_integer(interval):\n unproven = []\n lower_ceil = math.ceil(interval.lower) if interval.lower < math.ceil(interval.lower) else interval.lower + 1\n while lower_ceil != math.ceil(interval.upper):\n unproven.append(\n FractionInterval.open_closed(interval.lower, lower_ceil)\n )\n interval = FractionInterval.open_closed(lower_ceil, interval.upper)\n lower_ceil += 1\n if interval:\n unproven.append(interval)\n\n return unproven", "def test_inclusive_intervals(self):\n dim = Integer(\"yolo\", \"uniform\", -3, 5.5)\n assert dim.interval() == (-3, 3)", "def interval(start, stop=None, step=1):\n if stop is None:\n start, stop = 0, start\n result = []\n i = start\n while i < stop:\n result.append(i)\n i += step\n return result", "def se2interval(a, b):\n\n Iab = (a,neg(b))\n return Iab", "def from_inclusive(a, b):\n c = int(b > a)*2-1\n return range(a, b+c, c)", "def _translate_range(self, len_, start, end):\n start = int(start)\n end = int(end)\n if start < 0:\n start += len_\n start = max(0, min(start, len_))\n if end < 0:\n end += len_\n end = max(-1, min(end, len_ - 1))\n return start, end", "def get_interval(interval):\n interval_list = interval.split(\"-\")\n if len(interval_list) == 1:\n return (int(interval_list[0]), int(interval_list[0])+1)\n else:\n return (int(interval_list[0]), int(interval_list[1])+1)", "def _builtin_between(low, high, value, **k):\n mode = check_mode((low, high, value), ['iii', 'iiv'], functor='between', **k)\n low_v = int(low)\n high_v = int(high)\n if mode == 0: # Check\n value_v = int(value)\n if low_v <= value_v <= high_v:\n return [(low, high, value)]\n else: # Enumerate\n results = []\n for value_v in range(low_v, high_v + 1):\n results.append((low, high, Constant(value_v)))\n return results", "def range_maker(low, hi, step, lst=None):\n return numpy.arange(low, hi, step)", "def ranged_int(A, B=FLOAT_INF):\n\n class ranged_int(int):\n \"\"\"Int type in [A; B] range.\"\"\"\n\n def __init__(self, value):\n assert A <= int(value) <= B, value\n super(ranged_int, self).__init__()\n\n return ranged_int", "def intervalle(bMin, bMax):\n\tfor i in range(bMin+1,bMax):\n\t\tyield i", "def between(minl:int, maxl:int) -> str:\n return f\"{{{minl},{maxl}}}\"", "def bed_to_interval(contig, bed_start, bed_end, name='', score='', strand='',\n block_ids='', superblock_ids=''):\n try:\n # assure positions to be integers\n # convert from 0,1-based to 1,1-based positions\n start = int(bed_start) + 1\n end = int(bed_end)\n except ValueError:\n raise ValueError(\"'start' and 'end' should be integers\")\n\n # perform sanity check to check for incorrect formatting\n assert (end - start) >= 0, (\"Not a valid BED interval.\"\n \"(bedEnd - bedStart) must be >= 0.\")\n\n # fallback to empty list for optional element ids\n ids = [element_ids.split(',') if element_ids else []\n for element_ids in (block_ids, superblock_ids)]\n\n return BaseInterval(contig, start, end, name, score, strand, *ids)", "def mergeIntervals(int1,int2):\n newint=interval('(-1,1)') \n if int1.minval>int2.minval or (int2.lrbd=='(' and int1.minval==int2.minval):\n int1,int2=int2,int1\n \n if isMergeable(int1,int2):\n newrtNum=max(int1.rtnum,int2.rtnum)\n if newrtNum==int2.rtnum:\n newint=interval(int1.lrbd+str(int1.lfnum)+','+str(newrtNum)+int2.upbd)\n else:\n newint=interval(int1.lrbd+str(int1.lfnum)+','+str(newrtNum)+int1.upbd)\n\n else:\n raise Cant_be_merged('Can\\'t be merged')\n \n return newint", "def range_inclusive(start, stop):\n return range(start, stop + 1)", "def remap_interval(val, input_interval_start, input_interval_end, output_interval_start, output_interval_end):\n return (val - input_interval_start)/float(input_interval_end - input_interval_start)*(\n output_interval_end - output_interval_start) + output_interval_start", "def get_dec_i_range(data_decs):\n data_dec_is = map_list(lambda x: (x - 1800)//10, data_decs)\n lower_range_i = data_dec_is[0]\n upper_range_i = data_dec_is[-1] + 1\n return (lower_range_i, upper_range_i)", "def intervals(start, end, delta):\n intervals = []\n current = copy.deepcopy(start)\n while current < end:\n intervals.append((unix_to_iso(current.strftime('%s')),\n unix_to_iso((current + delta).strftime('%s'))))\n current += delta\n return intervals", "def remap_interval(val, input_interval_start, input_interval_end, output_interval_start, output_interval_end):\n \n return ((float(val-input_interval_start) * (output_interval_end-output_interval_start)) / (input_interval_end-input_interval_start)) + output_interval_start", "def create_bound_for_scipy(lb, ub):\n lb = tuple(map(convert_inf_to_none, lb))\n ub = tuple(map(convert_inf_to_none, ub))\n return list((lb[i], ub[i]) for i in range(len(ub)))", "def interval(self):\n return Interval(self._ll_tree.get_left(), self._ll_tree.get_right())", "def remap_interval(val, input_interval_start, input_interval_end, output_interval_start, output_interval_end):\n convert = float (val - input_interval_start) / float ( input_interval_end - input_interval_start)\n output1 = output_interval_start + convert* (output_interval_end - output_interval_start)\n return output1", "def define_intervals(self):\n i = 5 # a step of increment\n interval_sum = self.min_step\n interval_list = [self.min_step]\n while interval_sum < self.max_step:\n interval_sum += i\n interval_list.append(interval_sum)\n # interval_list.append(self.max_step)\n # print(\"Intervals\", interval_list)\n return interval_list", "def findInterval(intervals,interval):\n low,ind = algorithms.binsearch(intervals,interval.start-1,lambda a,b: cmp(a.start,b))\n return (low,ind)" ]
[ "0.6615083", "0.65613925", "0.6474376", "0.642171", "0.6403943", "0.6369164", "0.6296812", "0.6278263", "0.626915", "0.6248147", "0.62247044", "0.6186229", "0.61436677", "0.6137699", "0.61253625", "0.6121021", "0.61116767", "0.60257053", "0.60177577", "0.5968685", "0.5955275", "0.5933669", "0.5919658", "0.59194154", "0.59171116", "0.59066755", "0.59052527", "0.5904727", "0.5852452", "0.58335686" ]
0.66095185
1
Increment a bitvector's value +1 or 1.
def increment_bv(bv, increment: int, graycode=False, saturate=False) -> BitVector: assert increment == 1 or increment == -1 nbits = len(bv) if graycode: index = graytobin(bv2int(bv)) index = (index+increment) % 2**nbits return int2bv(bintogray(index), nbits) else: if bv == tuple(True for i in range(nbits)) and increment > 0: if saturate: return bv raise ValueError("Bitvector overflow for nonperiodic domain.") if bv == tuple(False for i in range(nbits)) and increment < 0: if saturate: return bv raise ValueError("Bitvector overflow for nonperiodic domain.") return int2bv(bv2int(bv) + increment, nbits)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def increment(val):\n return coerce_to_int(val) + 1", "def INC(self, value):\n result = (value + 1) & 0xff\n self.reg.N = result >> 7\n self.reg.Z = result == 0\n return result", "def increment(self):\r\n return self.add(1)", "def update(self, idx, x):\n while idx < len(self.bit):\n self.bit[idx] += x\n idx |= idx + 1", "def inc(self):\n self._value += 1", "def __iadd__(self, increment):\n self.update(self.val + increment)\n return self", "def increment(b): \n if b == 11111111:\n return 00000000\n else:\n b = bin_to_dec(b)\n b = b + 1\n res = dec_to_bin (b)\n if len(res) == 8:\n return res\n else:\n c = 8 - len(res)\n return c*'0' + res", "def __add__(self, v):\n self.n += 1\n self.cnt[v] += 1\n tmp = self.cnt[v]\n if tmp > self.most:\n self.most = tmp\n self.mode = v\n return v", "def increment(self):\n self.data[self.pointer] += 1\n self.data[self.pointer] %= 256", "def update(self, idx, add):\n idx += 1\n while idx < len(self.array):\n self.array[idx] += add\n idx += idx & -idx #Adding the last bit", "def _add(self, i, k):\n while i < self._size:\n self._bit[i] += k\n i += lsb(i)", "def increment(cls, value):\r\n value.value += 1", "def inc(reg, registers):\n registers[reg] += 1\n return 1", "def increment(self, index, value):\n self._inrange(index)\n if value==0:\n return\n found,ii = self._find_index(index)\n if found:\n self.value[ii] += value\n if self.value[ii] == 0:\n del self.index[ii]\n del self.value[ii]\n else:\n self.index.insert(ii, index)\n self.value.insert(ii, value)", "def inc(self, params):\n reg = params[0]\n if self.reg_dct[reg] == (2 ** 32) - 1:\n self.reg_dct[reg] = 0\n else:\n self.reg_dct[reg] += 1", "def increment(self) -> global___Expression:", "def update_bit(num, i, v):\n return num & ~(1 << i) | (v << i)", "def increase_counter(self):\n self.values = self.values + 1", "def incrment_1(x):\n return(x + 1)", "def incr_operand(self):\n pass", "def increment2(cls, var):\r\n var += 1", "def inc(i):\n i += 1\n return i", "def inc(self):\n with self.mutex:\n self.value += 1\n return self.value", "def add(self, val):\n self[val] += 1", "def increment(x): # pylint: disable=invalid-name\n return x + 1", "def incr(self, x, term=1):\n self.d[x] = self.d.get(x, 0) + term", "def add(self, i: int, v: int) -> None:\n while i < self.size:\n self.tree[i] += v\n i += self._lsb(i)", "def increment(self, inc):\n self.done += inc", "def succ(x=0):\n return 1+x", "def add_one(x):\n return x + 1" ]
[ "0.70842975", "0.6988396", "0.69276345", "0.6912323", "0.6882834", "0.66443974", "0.6637201", "0.6629569", "0.660645", "0.65473056", "0.65456295", "0.6478258", "0.6445333", "0.64219457", "0.64066106", "0.6358836", "0.63567686", "0.6297039", "0.6286662", "0.62488574", "0.6218182", "0.6212331", "0.6199306", "0.6174166", "0.6164157", "0.6092118", "0.60803753", "0.60594124", "0.60128325", "0.59837246" ]
0.7331502
0
Converts bitvector (list or tuple) with the standard binary encoding into an integer.
def bv2int(bv: BitVector) -> int: nbits = len(bv) index = 0 for i in range(nbits): if bv[i]: index += 2**(nbits - i - 1) return index
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def bitlist_to_int(bitlist):\n return sum([int(b) for b in bitlist])", "def bin_to_int(bit_string):\r\n return int(''.join(bit_string), 2)", "def _convert_to_int(backing: List[int]) -> int:\n return int.from_bytes(backing, byteorder=\"little\", signed=True)", "def bitstring_to_int(bitstr):\n b_list = bitstr.tolist()\n mystring = ''\n for s in b_list:\n if s:\n mystring += '1'\n else:\n mystring += '0'\n b = int(mystring, 2)\n return b", "def vecToInt(m):\r\n return (m[0] << 12) + (m[2] << 8) + (m[1] << 4) + m[3]", "def byteslist_to_int(byteslist):\n holder = bytes()\n for v in byteslist:\n holder += v\n return int.from_bytes(holder, byteorder='big')", "def bytes_to_int(bs):\n v = 0\n p = 0\n for b in reversed(bs):\n v += b * (2 ** p)\n p += 8\n return v", "def from_bit_array(bin_list):\n print(bin_list)\n byte_list = [hex(int(x,2)) for x in bin_list]\n result = bytes([int(x,0) for x in byte_list])\n return result", "def bytes_to_int(obj):\n return functools.reduce(lambda x, y: x << 8 | y, obj)", "def bitstr_to_int(a):\n return int(a, 2)", "def combine_to_int(values):\n multibyte_value = 0\n for byte_id, byte in enumerate(values):\n multibyte_value += 2**(4 * byte_id) * byte\n return multibyte_value", "def bin2int(r: str) -> int:", "def _bitfields_to_ints(bit_state, vrs):\n int_state = dict()\n for var, dom in vrs.items():\n if dom == 'boolean':\n int_state[var] = bit_state[var]\n continue\n bitnames = ['{var}@{i}'.format(var=var, i=i)\n for i in range(dom[1].bit_length())]\n bitnames[0] = '{var}@0.{min}.{max}'.format(\n var=var, min=dom[0], max=dom[1])\n bitvalues = [bit_state[b] for b in bitnames]\n # little-endian\n val = int(''.join(str(b) for b in reversed(bitvalues)), 2)\n int_state[var] = val\n return int_state", "def b2i(bts):\n return int(binascii.hexlify(bts), 16)", "def _pack_bytes(byte_list):\n return int.from_bytes(byte_list, 'big', signed=False)", "def dummies2int(cBin):\n c = []\n for i in cBin.tolist():\n c.append(i.index(1) + 1)\n return c", "def list2int(lst):\n def foo(x, y):\n return (x << 1) + y\n return reduce(foo, reversed(lst), 0)", "def bits_to_int(jit_bits: ir_bits.Bits, signed: bool) -> int:\n assert isinstance(jit_bits, ir_bits.Bits), jit_bits\n bit_count = jit_bits.bit_count()\n bits_value = jit_bits.to_uint()\n\n return (bits_value if not signed else bit_helpers.from_twos_complement(\n bits_value, bit_count))", "def to_number(bool_list):\n return sum((n << j) for (j, n) in enumerate(bool_list))", "def get_sint(bytearray_, byte_index):\n data = bytearray_[byte_index]\n packed = struct.pack('B', data)\n value = struct.unpack('>b', packed)[0]\n return value", "def _binary_string_to_int(bitstring, big_endian=True):\n if not big_endian:\n bitstring = str(reversed(bitstring))\n val = 0\n nbits = len(bitstring)\n for (n, bit) in enumerate(bitstring):\n if bit == \"1\":\n val += 2**(nbits - n - 1)\n return val", "def test_from_binary_bits_style(self):\n self.assertResult('[0001]', b4('[0001]'))", "def binary_encoding(k: int, bit_number: int=10) -> List[int]:\n return [k>>i & 1 for i in range(bit_number)]", "def unmarshal_int(b):\n return int.from_bytes(b, byteorder='little', signed=True)", "def decode_i32(as_bytes: typing.List[int]) -> int:\n return le_bytes_to_int(as_bytes, True)", "def _pack_bytes_signed(byte_list):\n return int.from_bytes(byte_list, 'big', signed=True)", "def from_byte( clist, bytesize):\n assert (len(clist)>=0)\n B = (1<<bytesize) - 1\n n=0\n while 1:\n d = bin_to_dec(clist,bytesize)\n if (d == B):\n return n\n else:\n n = n*B + d\n pass\n pass\n pass", "def b2i(data, order='big'):\n return int.from_bytes(data, order)", "def binrep2num(binrep):\n return sum([2**int(i) if bit == '1' else 0 for i, bit in enumerate(\n reversed(binrep))])", "def convertBytesToInt(self, bytes):\r\n result = 0\r\n for idx in range(len(bytes)):\r\n if idx == 0:\r\n result = int(bytes[0])\r\n else:\r\n result = (result << 8) + bytes[idx]\r\n\r\n return result" ]
[ "0.7799725", "0.7070542", "0.70149434", "0.6703559", "0.6688739", "0.66736495", "0.6657458", "0.6641372", "0.6465512", "0.6444076", "0.640635", "0.6368258", "0.63485277", "0.6318946", "0.6285462", "0.62587655", "0.6253048", "0.6239362", "0.6194942", "0.61726755", "0.6165641", "0.61507905", "0.61317784", "0.610869", "0.6092923", "0.60921913", "0.6090924", "0.6057176", "0.60544837", "0.60502833" ]
0.71379066
1
Convert a window [left, right] inclusive into a list of variable precision bitvectors.
def bvwindow(left: int, right: int, nbits: int) -> List[BitVector]: assert left >= 0 assert right >= 0 assert right <= 2**nbits - 1 bvs: List[BitVector] = [] # Empty window if right < left: return bvs while(True): if nbits == 0: return [(True,), (False,)] if left == right: bvs += [int2bv(left, nbits)] break # Catch left edge if left % 2 == 1: bvs += [int2bv(left, nbits)] left += 1 # Catch right edge if right % 2 == 0: bvs += [int2bv(right, nbits)] right -= 1 if left > right: break # Reduce precision nbits = nbits-1 left = left // 2 right = right // 2 return bvs
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def bvwindowgray(left: int, right: int, nbits: int) -> List[BitVector]:\n bvs: List[BitVector] = []\n\n assert right <= 2**nbits - 1\n assert left <= 2**nbits - 1\n\n # Split window into [0,right] and [left, 2**nbits-1]\n if left > right:\n return bvwindowgray(left, 2**nbits-1, nbits) + bvwindowgray(0, right, nbits)\n\n while(True):\n if nbits == 0:\n return [(True,), (False,)]\n\n if left == right:\n bvs += [int2bv(bintogray(left), nbits)]\n break\n\n # Catch left edge\n if left % 4 in (1, 3):\n bvs += [int2bv(bintogray(left), nbits)]\n left += 1\n\n # Catch right edge\n if right % 4 in (0, 2):\n bvs += [int2bv(bintogray(right), nbits)]\n right -= 1\n\n if left > right:\n break\n\n nbits = nbits - 1\n left = left // 2\n right = right // 2\n\n return bvs", "def index_interval(lb: int, ub: int, nbits=None, graycode=False) -> List[int]:\n if graycode:\n assert nbits is not None\n else:\n assert lb <= ub\n\n window = []\n i = lb\n while True:\n window.append(i)\n if i == ub:\n break\n i = increment_index(i, 1, nbits, graycode)\n return window", "def window_data(data: np.ndarray):\n\n w_len = 128\n stride = w_len // 2\n\n no_offset_windows = np.split(data, 10)\n offset_windows = np.split(data[stride:-stride], 9)\n windows = [0] * 19\n windows[::2] = no_offset_windows\n windows[1::2] = offset_windows\n windows = np.array(windows, dtype=np.float32)\n\n return windows", "def to_bit_list(val, width=16):\n return [(1 if val & (1<<n) else 0) for n in range(width)]", "def to_list(bits: int) -> list[Position]:\n positions = []\n for r in range(8):\n for c in range(8):\n mask = pos_mask(r, c)\n if bits & mask > 0:\n positions.append(Position(r, c))\n return positions", "def intToVec(n):\r\n return [n >> 12, (n >> 4) & 0xf, (n >> 8) & 0xf, n & 0xf]", "def set24_to_list(v):\n return [x for x in range(24) if v & (1 << x)]", "def hexgrid(self):\n n = self.n * 2\n vectors = []\n for u in range(-n, n+1):\n us = [u] * (2*n+1)\n if u < 0:\n vectors.extend(zip(us, range(-n-u, n+1), range(-n, n+u+1)))\n else:\n vectors.extend(zip(us, range(-n, n-u+1), range(-n+u, n+1)))\n return vectors", "def all_bits_list(vals, width=16):\n return flatten_list([to_bit_list(val, width) for val in vals])", "def windows(X, width, skip_last):\n ret = []\n n = X.shape[0]\n for i in range(n - width + 1 - skip_last):\n window = X[i:i + width, :]\n ret.append([tuple(x) for x in window[:]])\n return np.array(ret)", "def create_bin_values(self):\n values = [-float(\"inf\"), self.offset, float(\"inf\")]\n value = self.start\n while self.offset + value <= self.stop:\n values.insert(1, self.offset - value)\n values.insert(-1, self.offset + value)\n value *= self.step\n return values", "def vec_to_windows(x, wlen):\n n = len(x)\n # number of windows\n m = n // wlen\n # total samples to be kept\n s = m * wlen\n return jnp.reshape(x[:s], (m, wlen)).T", "def bits(self):\n return list(range(self.lsb, self.msb + 1))", "def to_bitvectors(self):\n if hasattr(self, \"ifp\"):\n df = self.to_dataframe()\n return to_bitvectors(df)\n raise AttributeError(\"Please use the `run` method before\")", "def train_sample_windowize(field, delta=1, n=20):\n padded = np.pad(field, delta, mode='constant', constant_values=-1)\n X = np.zeros((n * n, (1 + delta * 2) ** 2))\n for i in range(n):\n for j in range(n):\n X[i * n + j] = padded[i:i + 2 * delta + 1, j:j + 2 * delta + 1].ravel()\n return X", "def bitlist(n):\n return [n >> i & 1 for i in range(7,-1,-1)]", "def _sliding_window(self, image, mask, window_radius=3):\n height, width = image.shape[:2]\n features = []\n for yy in range(window_radius, height - window_radius):\n for xx in range(window_radius, width - window_radius):\n features.append(image[yy - window_radius: yy + window_radius + 1, xx - window_radius: xx + window_radius + 1].ravel())\n labels = mask[window_radius: -1 * window_radius, window_radius: -1 * window_radius].ravel()\n return np.array(features), labels", "def verticalLogBinning(v, p):\n logBin = {}\n for node in v.keys():\n logBin[node] = []\n # for each feature, the p fraction with the lowest values are assigned\n # bin number 0, next p fraction are assigned bin number 1, etc.\n numFeatures = len(v.values()[0])\n for i in range(numFeatures):\n f = getIthFeature(v, i)\n sortedIdx = sorted(range(len(f)), key=lambda k: f[k])\n assignBinValue(logBin, sortedIdx, v.keys(), p)\n return logBin", "def extract_window_data(df, window_len=30, zero_base=True):\n window_data = []\n for idx in range(len(df) - window_len):\n tmp = df[idx: (idx + window_len)].copy()\n if zero_base:\n tmp = normalise_min_max(tmp)\n window_data.append(tmp.values)\n return np.array(window_data)\n #return window_data", "def feats_array_4_window(window: np.ndarray):\n\n outvec = np.zeros((len(funclist), window.shape[1]))\n\n for i in range(len(funclist)):\n for j in range(window.shape[1]):\n outvec[i, j] = funclist[i](window[:, j])\n\n outvec = outvec.reshape(-1)\n\n return outvec", "def extract_window_data(df, window_len=10, zero_base=True):\n window_data = []\n for idx in range(len(df) - window_len):\n tmp = df[idx: (idx + window_len)].copy()\n if zero_base:\n tmp = normalise_zero_base(tmp)\n window_data.append(tmp.values)\n return np.array(window_data)", "def bits_to_verts(n):\n return [v for v in range(8) if 2**v & n > 0]", "def vector_convert(self, pos) :\n delta = AUTO_width2//3\n bright_list = []\n for i in range(-1,2):\n for j in range(-1,2) :\n b = 0\n count = 0\n for x in range(max(0, pos[0] + i*delta), min(self.m_x, pos[0] + (i+1)*delta)):\n for y in range(max(0, pos[1] + j*delta), min(self.m_y, pos[1] + (j+1)*delta)):\n b += self.current_array[x][y]\n count += 1\n if count == 0 :\n bright_list.append(0)\n else :\n if b == 0 : #prevent 0 divde\n b = 1\n bright_list.append(b/count)\n bright_list = np.array(bright_list)\n m = np.max(bright_list)/self.current_total_avg\n bright_list = bright_list/np.min(bright_list) -1\n bright_list = np.append(bright_list, m)\n return bright_list", "def game_to_bin(game : List[int]) -> List[List[bool]]:\n return list(map(int_to_bin_three_bit, game))\n # equivalent to\n # converted_game = []\n # for line in game:\n # converted_game.append(int_to_bin_three_bit(line))\n # return converted_game", "def sliding_window(frame_length, step, Xsampleslist, ysampleslist):\n Xsamples = []\n ysamples = []\n for j in range(len(Xsampleslist)):\n X = Xsampleslist[j]\n ybinary = ysampleslist[j]\n for i in range(0, X.shape[0] - frame_length, step):\n xsub = X[i:i + frame_length, :]\n ysub = ybinary\n Xsamples.append(xsub)\n ysamples.append(ysub)\n return Xsamples, ysamples", "def binary_list(dec_number, width):\n bin_str = bin(dec_number)[2:].zfill(width)\n return [int(x) for x in bin_str]", "def pyramid_polynomial_set_vector(\n domain_dim: int, range_dim: int, order: int, variables: AxisVariablesNotSingle = x\n) -> typing.List[VectorFunction]:\n set1d = pyramid_polynomial_set_1d(domain_dim, order, variables)\n return [\n VectorFunction([p if i == j else 0 for j in range(range_dim)])\n for p in set1d\n for i in range(range_dim)\n ]", "def generate_list(self):\n\n array = [False] * 25\n bits = self.generate_bits()\n\n for column in range(2, -1, -1):\n for row in range(0, 5):\n bit = next(bits)\n\n array[column + (row * 5)] = bit\n array[(4 - column) + (row * 5)] = bit\n\n return array", "def generate_binned_values( lower_lim, upper_lim, chr_length, snps_per_chr, indels_per_chr, resolution ):\n\t\n\tsnp_data = []\n\tindel_data = []\n\twhile True:\n\t\tif upper_lim >= chr_length:\n\t\t\tbreak\n\t\telse:\n\t\t\tsnp_tmp = []\n\t\t\tindel_tmp = []\n\t\t\tfor SNP in snps_per_chr:\n\t\t\t\tif SNP <= upper_lim and SNP > lower_lim:\n\t\t\t\t\tsnp_tmp.append( 'X' )\n\t\t\tfor indel in indels_per_chr:\n\t\t\t\tif indel <= upper_lim and indel > lower_lim:\n\t\t\t\t\tindel_tmp.append( 'X' )\n\t\t\tsnp_data.append( len( snp_tmp ) )\n\t\t\tindel_data.append( len( indel_tmp ) )\n\t\tupper_lim += resolution\n\t\tlower_lim += resolution\n\treturn max( snp_data ), max( indel_data ), snp_data, indel_data", "def create_lut_list(df):\n lut_to_save = df[::-1]\n lut_list = []\n\n for i in range(len(lut_to_save)):\n tau_bin_num = i\n weta_bin_min = list(lut_to_save[i]).index(1)\n weta_bin_max = len(lut_to_save[i]) - list(lut_to_save[i][::-1]).index(1) - 1\n \n lut_list.append([tau_bin_num, weta_bin_min, weta_bin_max])\n\n return lut_list" ]
[ "0.69015485", "0.5978056", "0.5879691", "0.5801877", "0.57723594", "0.5686582", "0.5606028", "0.54982144", "0.54739505", "0.5461037", "0.5431221", "0.53940713", "0.53511935", "0.531862", "0.5312564", "0.53044176", "0.5283519", "0.5200857", "0.51954573", "0.5177205", "0.5174521", "0.5171993", "0.5168145", "0.51504976", "0.51322114", "0.51189315", "0.5117617", "0.51090163", "0.5089576", "0.50879866" ]
0.7558267
0
Convert a window [left, right] inclusive into a list of variable precision bitvectors.
def bvwindowgray(left: int, right: int, nbits: int) -> List[BitVector]: bvs: List[BitVector] = [] assert right <= 2**nbits - 1 assert left <= 2**nbits - 1 # Split window into [0,right] and [left, 2**nbits-1] if left > right: return bvwindowgray(left, 2**nbits-1, nbits) + bvwindowgray(0, right, nbits) while(True): if nbits == 0: return [(True,), (False,)] if left == right: bvs += [int2bv(bintogray(left), nbits)] break # Catch left edge if left % 4 in (1, 3): bvs += [int2bv(bintogray(left), nbits)] left += 1 # Catch right edge if right % 4 in (0, 2): bvs += [int2bv(bintogray(right), nbits)] right -= 1 if left > right: break nbits = nbits - 1 left = left // 2 right = right // 2 return bvs
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def bvwindow(left: int, right: int, nbits: int) -> List[BitVector]:\n assert left >= 0\n assert right >= 0\n assert right <= 2**nbits - 1\n\n bvs: List[BitVector] = []\n # Empty window\n if right < left:\n return bvs\n\n while(True):\n\n if nbits == 0:\n return [(True,), (False,)]\n\n if left == right:\n bvs += [int2bv(left, nbits)]\n break\n\n # Catch left edge\n if left % 2 == 1:\n bvs += [int2bv(left, nbits)]\n left += 1\n\n # Catch right edge\n if right % 2 == 0:\n bvs += [int2bv(right, nbits)]\n right -= 1\n\n if left > right:\n break\n\n # Reduce precision\n nbits = nbits-1\n left = left // 2\n right = right // 2\n\n return bvs", "def index_interval(lb: int, ub: int, nbits=None, graycode=False) -> List[int]:\n if graycode:\n assert nbits is not None\n else:\n assert lb <= ub\n\n window = []\n i = lb\n while True:\n window.append(i)\n if i == ub:\n break\n i = increment_index(i, 1, nbits, graycode)\n return window", "def window_data(data: np.ndarray):\n\n w_len = 128\n stride = w_len // 2\n\n no_offset_windows = np.split(data, 10)\n offset_windows = np.split(data[stride:-stride], 9)\n windows = [0] * 19\n windows[::2] = no_offset_windows\n windows[1::2] = offset_windows\n windows = np.array(windows, dtype=np.float32)\n\n return windows", "def to_bit_list(val, width=16):\n return [(1 if val & (1<<n) else 0) for n in range(width)]", "def to_list(bits: int) -> list[Position]:\n positions = []\n for r in range(8):\n for c in range(8):\n mask = pos_mask(r, c)\n if bits & mask > 0:\n positions.append(Position(r, c))\n return positions", "def intToVec(n):\r\n return [n >> 12, (n >> 4) & 0xf, (n >> 8) & 0xf, n & 0xf]", "def set24_to_list(v):\n return [x for x in range(24) if v & (1 << x)]", "def hexgrid(self):\n n = self.n * 2\n vectors = []\n for u in range(-n, n+1):\n us = [u] * (2*n+1)\n if u < 0:\n vectors.extend(zip(us, range(-n-u, n+1), range(-n, n+u+1)))\n else:\n vectors.extend(zip(us, range(-n, n-u+1), range(-n+u, n+1)))\n return vectors", "def all_bits_list(vals, width=16):\n return flatten_list([to_bit_list(val, width) for val in vals])", "def windows(X, width, skip_last):\n ret = []\n n = X.shape[0]\n for i in range(n - width + 1 - skip_last):\n window = X[i:i + width, :]\n ret.append([tuple(x) for x in window[:]])\n return np.array(ret)", "def create_bin_values(self):\n values = [-float(\"inf\"), self.offset, float(\"inf\")]\n value = self.start\n while self.offset + value <= self.stop:\n values.insert(1, self.offset - value)\n values.insert(-1, self.offset + value)\n value *= self.step\n return values", "def vec_to_windows(x, wlen):\n n = len(x)\n # number of windows\n m = n // wlen\n # total samples to be kept\n s = m * wlen\n return jnp.reshape(x[:s], (m, wlen)).T", "def bits(self):\n return list(range(self.lsb, self.msb + 1))", "def to_bitvectors(self):\n if hasattr(self, \"ifp\"):\n df = self.to_dataframe()\n return to_bitvectors(df)\n raise AttributeError(\"Please use the `run` method before\")", "def train_sample_windowize(field, delta=1, n=20):\n padded = np.pad(field, delta, mode='constant', constant_values=-1)\n X = np.zeros((n * n, (1 + delta * 2) ** 2))\n for i in range(n):\n for j in range(n):\n X[i * n + j] = padded[i:i + 2 * delta + 1, j:j + 2 * delta + 1].ravel()\n return X", "def bitlist(n):\n return [n >> i & 1 for i in range(7,-1,-1)]", "def _sliding_window(self, image, mask, window_radius=3):\n height, width = image.shape[:2]\n features = []\n for yy in range(window_radius, height - window_radius):\n for xx in range(window_radius, width - window_radius):\n features.append(image[yy - window_radius: yy + window_radius + 1, xx - window_radius: xx + window_radius + 1].ravel())\n labels = mask[window_radius: -1 * window_radius, window_radius: -1 * window_radius].ravel()\n return np.array(features), labels", "def verticalLogBinning(v, p):\n logBin = {}\n for node in v.keys():\n logBin[node] = []\n # for each feature, the p fraction with the lowest values are assigned\n # bin number 0, next p fraction are assigned bin number 1, etc.\n numFeatures = len(v.values()[0])\n for i in range(numFeatures):\n f = getIthFeature(v, i)\n sortedIdx = sorted(range(len(f)), key=lambda k: f[k])\n assignBinValue(logBin, sortedIdx, v.keys(), p)\n return logBin", "def extract_window_data(df, window_len=30, zero_base=True):\n window_data = []\n for idx in range(len(df) - window_len):\n tmp = df[idx: (idx + window_len)].copy()\n if zero_base:\n tmp = normalise_min_max(tmp)\n window_data.append(tmp.values)\n return np.array(window_data)\n #return window_data", "def feats_array_4_window(window: np.ndarray):\n\n outvec = np.zeros((len(funclist), window.shape[1]))\n\n for i in range(len(funclist)):\n for j in range(window.shape[1]):\n outvec[i, j] = funclist[i](window[:, j])\n\n outvec = outvec.reshape(-1)\n\n return outvec", "def extract_window_data(df, window_len=10, zero_base=True):\n window_data = []\n for idx in range(len(df) - window_len):\n tmp = df[idx: (idx + window_len)].copy()\n if zero_base:\n tmp = normalise_zero_base(tmp)\n window_data.append(tmp.values)\n return np.array(window_data)", "def bits_to_verts(n):\n return [v for v in range(8) if 2**v & n > 0]", "def vector_convert(self, pos) :\n delta = AUTO_width2//3\n bright_list = []\n for i in range(-1,2):\n for j in range(-1,2) :\n b = 0\n count = 0\n for x in range(max(0, pos[0] + i*delta), min(self.m_x, pos[0] + (i+1)*delta)):\n for y in range(max(0, pos[1] + j*delta), min(self.m_y, pos[1] + (j+1)*delta)):\n b += self.current_array[x][y]\n count += 1\n if count == 0 :\n bright_list.append(0)\n else :\n if b == 0 : #prevent 0 divde\n b = 1\n bright_list.append(b/count)\n bright_list = np.array(bright_list)\n m = np.max(bright_list)/self.current_total_avg\n bright_list = bright_list/np.min(bright_list) -1\n bright_list = np.append(bright_list, m)\n return bright_list", "def game_to_bin(game : List[int]) -> List[List[bool]]:\n return list(map(int_to_bin_three_bit, game))\n # equivalent to\n # converted_game = []\n # for line in game:\n # converted_game.append(int_to_bin_three_bit(line))\n # return converted_game", "def sliding_window(frame_length, step, Xsampleslist, ysampleslist):\n Xsamples = []\n ysamples = []\n for j in range(len(Xsampleslist)):\n X = Xsampleslist[j]\n ybinary = ysampleslist[j]\n for i in range(0, X.shape[0] - frame_length, step):\n xsub = X[i:i + frame_length, :]\n ysub = ybinary\n Xsamples.append(xsub)\n ysamples.append(ysub)\n return Xsamples, ysamples", "def pyramid_polynomial_set_vector(\n domain_dim: int, range_dim: int, order: int, variables: AxisVariablesNotSingle = x\n) -> typing.List[VectorFunction]:\n set1d = pyramid_polynomial_set_1d(domain_dim, order, variables)\n return [\n VectorFunction([p if i == j else 0 for j in range(range_dim)])\n for p in set1d\n for i in range(range_dim)\n ]", "def binary_list(dec_number, width):\n bin_str = bin(dec_number)[2:].zfill(width)\n return [int(x) for x in bin_str]", "def generate_list(self):\n\n array = [False] * 25\n bits = self.generate_bits()\n\n for column in range(2, -1, -1):\n for row in range(0, 5):\n bit = next(bits)\n\n array[column + (row * 5)] = bit\n array[(4 - column) + (row * 5)] = bit\n\n return array", "def generate_binned_values( lower_lim, upper_lim, chr_length, snps_per_chr, indels_per_chr, resolution ):\n\t\n\tsnp_data = []\n\tindel_data = []\n\twhile True:\n\t\tif upper_lim >= chr_length:\n\t\t\tbreak\n\t\telse:\n\t\t\tsnp_tmp = []\n\t\t\tindel_tmp = []\n\t\t\tfor SNP in snps_per_chr:\n\t\t\t\tif SNP <= upper_lim and SNP > lower_lim:\n\t\t\t\t\tsnp_tmp.append( 'X' )\n\t\t\tfor indel in indels_per_chr:\n\t\t\t\tif indel <= upper_lim and indel > lower_lim:\n\t\t\t\t\tindel_tmp.append( 'X' )\n\t\t\tsnp_data.append( len( snp_tmp ) )\n\t\t\tindel_data.append( len( indel_tmp ) )\n\t\tupper_lim += resolution\n\t\tlower_lim += resolution\n\treturn max( snp_data ), max( indel_data ), snp_data, indel_data", "def create_lut_list(df):\n lut_to_save = df[::-1]\n lut_list = []\n\n for i in range(len(lut_to_save)):\n tau_bin_num = i\n weta_bin_min = list(lut_to_save[i]).index(1)\n weta_bin_max = len(lut_to_save[i]) - list(lut_to_save[i][::-1]).index(1) - 1\n \n lut_list.append([tau_bin_num, weta_bin_min, weta_bin_max])\n\n return lut_list" ]
[ "0.7557525", "0.59776354", "0.58777934", "0.58019704", "0.57729864", "0.56873184", "0.5606834", "0.5500135", "0.54751575", "0.5459628", "0.54305446", "0.53928643", "0.5351535", "0.53192264", "0.5311145", "0.53046113", "0.5281819", "0.5200208", "0.5194822", "0.51761657", "0.5174164", "0.517249", "0.51687", "0.5151321", "0.513147", "0.5119772", "0.5119624", "0.5110632", "0.5090162", "0.5089259" ]
0.6901498
1
Receive the data from the HTML form, then save it to a disk file, then respond with a nice friendly message to the awaiting browser.
def save_data(): # python-name = html-name: the_first = request.form["first"] the_last = request.form["last"] the_dob = request.form["dob"] # So... now, use the python-names in your code: with open("suckers.txt", "a") as sf: print(f"{the_first}, {the_last}, {the_dob}", file=sf) return f"Thanks, {the_first}, we promise not to sell your data to the bad guys."
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def do_POST(self):\n self.send_response(200)\n self.send_header('Content-type','text/html')\n self.send_header('Access-Control-Allow-Origin','*')\n self.end_headers()\n # Send the html message\n self.wfile.write(\"Aivis Panovs, 161REB125\")\n return", "def do_POST(s):\n s.send_response(200)\n s.send_header(\"Content-type\", \"text/html\")\n s.end_headers()\n postdata = s.parse_POST()\n writeToDir('data', mapValues(postdata))\n s.wfile.write(\"<html><head><title>Thanks</title></head><body>Thank you</body></html>\")", "def respondToSubmit(formData):\n\tdata = header()\n\t# The command line expected\n\targs = [\"web\", formData[\"stationName\"], formData.get(\"day\", \"Now\"), formData[\"time\"]]\n\n\t# If no time was specified\n\tif not args[-1]:\n\t\t# Remove the last argument\n\t\targs = args[:-1]\n\t\t# If today is specified, then assume current time if no time is mentioned\n\t\tif args[-1] == \"Today\":\n\t\t\targs[-1] = \"Now\"\n\t# Process all the command line\n\tweather = stage2.process(args)\n\tif \"error\" not in weather:\n\t\t# Fill in the details from the forecast\n\t\tdata += '<p class=\"bg-success lead\">%s</p><div class=\"row\">&nbsp;</div>' % details(weather)\n\telse:\n\t\t# Fill in error message\n\t\tdata += '<p class=\"bg-danger lead\">%s</p>' % weather[\"error\"]\n\t# Complete the web page\n\tdata += footer()\n\n\treturn data", "def htmlReceiver(request, model=''):\n\tinput_str = ''\n\tinput_str += parsePOST(request)\n\tpacket = io.StringIO(input_str) # write to memory\n\tjid = MetabolizerCalc().gen_jid() # create timestamp\n\tresponse = HttpResponse(packet.getvalue(), content_type='application/html')\n\tresponse['Content-Disposition'] = 'attachment; filename=' + model + '_' + jid + '.html'\n\t# packet.truncate(0) # clear from memory?\n\tpacket.close()\n\treturn response", "def _handle_post_request(self):\n form = cgi.FieldStorage(\n fp=self.rfile,\n headers=self.headers,\n environ={'REQUEST_METHOD': 'POST'})\n\n if self.path == '/URLRequest':\n # First we check, whether the formular has been filled by\n # something behaving like a bot\n if form.has_key('URL'):\n self._send_homepage('<p class=\"warning\">Please check your input</p>')\n return\n else:\n url = form['real_URL'].value if form.has_key('real_URL') else None\n tmp = self._insert_url_to_db(url)\n if tmp:\n try:\n blocked = self._db.is_hash_blocked(tmp)\n if tmp < 0:\n self._send_database_problem()\n return\n elif blocked:\n self._send_blocked_page(blocked[3])\n return\n else:\n self._send_return_page(tmp)\n return\n except YuDatabaseError:\n self._send_database_problem()\n return\n else:\n # There was a general issue with URL\n self._send_homepage('''<p class=\"warning\">Please check your input.</p>''')\n return\n elif self.path == '/ContactUs':\n if form.has_key('URL'):\n # Here we might have a bot who likes to send the webmaster some spam\n # who most likely will be not amused about.\n template_filename = self._get_config_template('contactUsResult')\n text = read_template(\n template_filename,\n title='',\n header='Mail NOT sent',\n msg='There was an issue with your request. Are you a bot? '\n '<a href=\"/ContactUs\">Please try again</a>.')\n else:\n try:\n email = form['email'].value\n subj = form['subject'].value\n descr = form['request'].value\n if self._send_mail(subj, descr, email):\n template_filename = self._get_config_template('contactUsResult')\n text = read_template(\n template_filename,\n title='',\n header='Mail sent',\n msg=\"Your request has been sent. You will receive an answer soon.\")\n else:\n self._send_internal_server_error()\n return\n except KeyError:\n template_filename = self._get_config_template('contactUsResult')\n text = read_template(\n template_filename,\n title='',\n header='Mail NOT sent',\n msg='It appers you did not fill out all needed fields.\\\n <a href=\"/ContactUs\">Please try again</a>.')\n\n elif self.path == '/Show':\n short_url = form['ShortURL'].value if form.has_key('ShortURL') else None\n if short_url != None and short_url.find(\"yaturl.net\") > -1:\n tmp = short_url.rfind(\"/\")\n if tmp > -1 and short_url != \"\":\n tmp = tmp + 1\n short_url = short_url[tmp:]\n if short_url != None and short_url.isalnum():\n try:\n result = self._db.get_link_from_db(short_url)\n except YuDatabaseError:\n self._send_database_problem()\n return\n template_filename = self._get_config_template('showpage')\n if result:\n new_url = '<p><a href=\"%(result)s\">%(result)s</a></p>' % \\\n {'result': result}\n else:\n new_url = '<p class=\"warning\">No URL found for this string. Please double check your\\\n <a href=\"/ShowURL\">input and try again</a></p>'\n\n stats = self._db.get_statistics_for_hash(short_url)\n\n text = read_template(\n template_filename,\n title=SERVER_NAME,\n header=SERVER_NAME,\n msg=new_url,\n stat=stats,\n statspage=\"/stats/\" + short_url)\n else:\n self._send_404()\n return\n\n else:\n self._send_404()\n return\n\n self._send_response(text, 200)", "def handle(self): \n \n data = self.request[0].strip()\n self.socket = self.request[1]\n\n #split off first word of file, assume is filename\n filename,sep,data = data.partition(\" \")\n\n #assume is requesting file\n if not data:\n self.sendfile(filename)\n #assume we have to save the file since data was sent\n else:\n self.savefile(filename,data)\n\n return True", "def action_POST(self):\n\n # Use the content-length header, though being user-defined input it's not really trustworthy.\n try:\n l = int(self.headers.get('content-length', 0))\n if l < 0:\n # Parsed properly, but some joker put in a negative number.\n raise ValueError()\n except ValueError:\n return self.serve_content(\"Illegal Content-Length header value: %s\" % self.headers.get('content-length', 0), 400)\n\n m = args[TITLE_MAX_LENGTH]\n if m and l > m:\n return self.serve_content('Maximum length: %d' % m, code = 413)\n\n form = cgi.FieldStorage(\n fp=self.rfile,\n headers=self.headers,\n environ={\n 'REQUEST_METHOD':'POST',\n 'CONTENT_TYPE':self.headers['Content-Type'],\n }\n )\n\n if 'file' not in form:\n return self.serve_content('No file provided.', 400)\n\n filename = form['file'].filename\n if not filename:\n # No FileName provided\n return self.serve_content('No file name.', 400)\n elif not re.match(r'^[^/\\\\]+$', filename) or filename in ['.', '..']:\n # Validate filename\n return self.serve_content('Invalid file name.', 400)\n\n if not os.path.isdir(self.file_path):\n return self.send_error(404)\n\n path_save = os.path.join(self.file_path, filename)\n\n if os.path.exists(path_save) and not os.path.isfile(path_save):\n return self.serve_content('Destination exists as a non-file', code = 406)\n\n if args[TITLE_UPLOAD_NO_CLOBBER] and os.path.isfile(path_save):\n return self.serve_content('File already exists.', code = 302)\n\n try:\n with open(path_save, 'wb') as output_file:\n # TODO: How to handle a user lying in their Content-Length header?\n self.copyobj(form['file'].file, output_file, False)\n except IOError:\n if os.path.isfile(path_save):\n os.remove(path_save)\n return self.serve_content('Failed to save file.', code = 500)\n\n return self.serve_content(self.render_file_table(self.file_path), code = 200)", "def save():\n\n subject = subject_var.get()\n category = cat_var.get()\n private = private_var.get()\n message = message_inp.get('1.0', tk.END)\n datestamp_type = datestamp_var.get()\n\n extension = 'txt' if not private else 'secret'\n filename = f'{category} - {subject}.{extension}'\n\n # Apply optional datestamp in message\n if datestamp_type == 'Date':\n datestamp = datetime.today().strftime('%Y-%m-%d')\n elif datestamp_type == 'Date+Time':\n datestamp = datetime.today().strftime('%Y-%m-%d_%H-%M-%S')\n else:\n datestamp = ''\n if datestamp:\n message = f'{message}\\n\\n{datestamp}'\n\n if private:\n password = tksd.askstring(\n 'Enter password',\n 'Enter a password to encrypt the message.'\n )\n message = weaksauce_encrypt(message, password)\n\n with open(filename, 'w') as fh:\n fh.write(message)\n\n status_var.set(f'Message was saved to {filename}')\n tkmb.showinfo('Saved', f'Message was saved to {filename}')", "def post(self):\n postUrl = 'http://' + self.ws + ':80/cgi-bin/post.py'\n\n # Create the form with simple fields\n logform = MultiPartForm()\n logfilename = string.rsplit(self.fullLogFile, '/', 1)[1]\n logform.add_file('file', logfilename, open(self.fullLogFile))\n body = str(logform)\n\n # Build the request\n request = urllib2.Request(postUrl)\n request.add_header('Content-type', logform.get_content_type())\n request.add_header('Content-length', len(body))\n request.add_data(body)\n\n # print request.get_data()\n urllib2.urlopen(request).read()\n\n htmlFile = self.format_html()\n htmlform = MultiPartForm()\n htmlfilename = string.rsplit(htmlFile, '/', 1)[1]\n htmlform.add_file('file', htmlfilename, open(htmlFile))\n\n request = urllib2.Request(postUrl)\n body = str(htmlform)\n request.add_header('Content-type', htmlform.get_content_type())\n request.add_header('Content-length', len(body))\n request.add_data(body)\n # request.get_data()\n response = urllib2.urlopen(request)\n data = response.read()\n\n s = re.search(\"^file location: (.+)\", data, re.MULTILINE)\n location = s.group(1)\n\n print \"http://%s%s\\n\" % (self.ws, location)", "def do_POST(self):\n global pages, devices, settings\n try:\n ctype, pdict = cgi.parse_header(self.headers.getheader('content-type'))\n if ctype == 'application/x-www-form-urlencoded':\n length = int(self.headers.getheader('content-length'))\n postvars = cgi.parse_qs(self.rfile.read(length), keep_blank_values=1)\n #if(self.path != '/simple/updateGPSCoordinates'):\n #print postvars\n #print self.path\n #now call the function that is meant to process this request\n if(self.path == '/simple/selectedHousehold'):\n #print 'need to get all cows in household #%s ' % postvars['household'][0]\n output = pages[postvars['page'][0]].selectedHousehold(postvars['household'][0], devices['gps'])\n self.wfile.write(output)\n elif(self.path == '/simple/selectedSite'):\n #print 'need to get all the households from the site #%s ' % postvars['sites'][0]\n output = pages[postvars['page'][0]].selectedSite(postvars['sites'][0], devices['gps'])\n self.wfile.write(output)\n elif(self.path == '/simple/nextAnimal'):\n #print 'we have finalized saving samples for one animal, now we need to go to the next animal'\n output = pages[postvars['page'][0]].nextAnimal(postvars, devices['gps'])\n self.wfile.write(output)\n elif(self.path == '/simple/sampleCow'):\n #print 'we sampling the cow'\n #we have the cow that we want to sample...now proceed with the sampling\n output = pages[postvars['page'][0]].collectSample(postvars, devices['gps'])\n self.wfile.write(output)\n elif(self.path == '/simple/saveSample'):\n #print 'we saving a new sample'\n #the user have entered a sample for an animal\n output = pages[postvars['page'][0]].saveSample(postvars, devices['gps'], settings['barcode_use'])\n self.wfile.write(output)\n elif(self.path == '/simple/updateGPSCoordinates'):\n #we want to get the current GPS position\n output = pages[postvars['page'][0]].curPosition(devices['gps']) #for the sake of consistence, we just using the passed 'page' variable\n self.wfile.write(output)\n elif(self.path == '/simple/deleteSample'):\n #print 'we need to delete the sample %s ' % postvars['sample'][0]\n #the user have entered a sample for an animal\n output = pages[postvars['page'][0]].deleteSample(postvars['sample'][0], devices['gps'])\n self.wfile.write(output)\n elif(self.path == '/simple/deleteAnimal'):\n #print postvars\n #print 'we need to delete the anial %s ' % postvars['curAnimalRead'][0]\n #the user have entered a sample for an animal\n output = pages[postvars['page'][0]].deleteAnimal(postvars['curAnimalRead'][0], devices['gps'])\n self.wfile.write(output)\n elif(self.path == '/simple/showAllSites'):\n #print postvars\n #print 'we either to show all sites or just the households within a certain radius'\n #the user have entered a sample for an animal\n output = pages[postvars['page'][0]].showSites(postvars, devices['gps'])\n self.wfile.write(output)\n elif(self.path == '/simple/refreshSampler'):\n #print 'I really dont know what to do here, so we shall evaluate it a case on case basis'\n output = pages[postvars['page'][0]].refreshSampler(postvars, devices['gps'])\n self.wfile.write(output)\n elif(self.path == '/simple/updateHouseholds'):\n #print 'The radius of interest has changed...lets update the households'\n output = pages[postvars['page'][0]].updateSites(postvars, devices['gps'])\n self.wfile.write(output)\n elif(self.path == '/admin'):\n #print 'admin page'\n \n if ctype == 'multipart/form-data':\n self.send_response(301)\n form = cgi.parse_multipart(self.rfile, pdict)\n #print form\n pages[form['page'][0]].parse_form(form, info, devices)\n self.send_header('Location', 'http://localhost:%s/%s' % (settings['port'], form['page'][0]))\n self.end_headers()\n except IOError:\n self.send_error(501, 'Unsupported Method')", "def post(self):\n return write_msg(request.json)", "def do_POST(self, request, response):\n # Store data\n self.data = to_str(request.read_data())\n\n # Respond\n if self.error:\n response.send_content(404, 'Not active', 'text/plain')\n\n else:\n response.send_content(200, 'OK', 'text/plain')", "def complete_form_and_download(self, start: datetime, end: datetime) -> str:\n log.info(\"---------------\")\n self._click_range_button()\n self._enter_start_date(start)\n self._enter_end_date(end)\n self._submit_form()\n return self._export_data()", "def post(self):\n dic = escape.json_decode(self.request.body)\n saveEditor(dic)\n # useful code goes here\n self.write(json.dumps({'status': 'ok', 'sent': dic}))\n self.finish()", "def handler(request):\n\tglobal req\n\treq = request\n\n\t\"\"\"Handles a single request from the client, checking the form data for what to do.\n\tCalls functions to get data frm the DB and renders it as XHTML\"\"\"\n\n\tnow = time.time()\n\n\tform = util.FieldStorage(req, keep_blank_values=True)\n\n\treq.update_mtime(now)\n\treq.set_last_modified()\n\n\t\"\"\"Since anything coming from this script may change at any time,\n\twe tell the client no to cache it.\"\"\"\n\treq.headers_out[\"Date\"] = time.strftime(formatDateTime, time.gmtime(time.time()))\n\treq.headers_out[\"Expires\"] = time.strftime(formatDateTime, time.gmtime(time.time()))\n\n\t\"\"\"MS Internet Explorer doesn't understand application/xhtml+xml.\n\tIf the request came from MSIE and lie to it, using text/html instead\"\"\"\n\tagent = req.headers_in[\"User-Agent\"]\n\tif \"MSIE\" in agent:\n\t\treq.content_type = \"text/html; charset=utf-8\"\n\t\t#req.write(\"User-Agent is IE: %s\" % agent)\n\telse:\n\t\treq.content_type = \"application/xhtml+xml; charset=utf-8\"\n\t\t#req.write(\"User-Agent is not IE: %s\" % agent)\n\n\t\"\"\"Here we check the requested operation from the form, defaulting to select.\n\tEach operation funtion returns a boolean revealing if it was successful.\n\tIf it was, we're happy and we tell the browser so.\n\tOtherwise, we tell the browser something went whacky.\"\"\"\n\top = form.getfirst(\"op\", \"select\").lower()\n\tif op == \"select\":\n\t\tif display(): return apache.OK\n\telif op == \"insert\":\n\t\tif insert(form): return apache.OK\n\telif op == \"delete\":\n\t\tif delete(form): return apache.OK\n\telif op == \"update\":\n\t\tif update(form): return apache.OK\n\telif op == \"dump\":\n\t\tif dump(form): return apache.OK\n\t# nothing succeeded\n\treturn apache.HTTP_BAD_REQUEST", "def handle_request(self, given_request: Request):\n with open(request.output, mode=\"w\", encoding='utf-8') as file:\n file.write(request.result)\n return True", "def do_POST(self):\r\n content_length = int(self.headers['Content-Length'])\r\n body = self.rfile.read(content_length)\r\n\r\n response = BytesIO()\r\n try:\r\n res = webServer.handle_post_msg(body)\r\n print(res)\r\n self.send_response(200)\r\n except Exception as e:\r\n print(e)\r\n res = str(e)\r\n self.send_response(500)\r\n self.end_headers()\r\n response.write(res.encode())\r\n self.wfile.write(response.getvalue())", "def writerep_general(contact_link, i):\n\n b = browser.Browser()\n print \"In writerep_general, opening contact_link\", contact_link\n b.open(contact_link)\n\n def get_challenge():\n ''' find captchas'''\n labels = b.find_nodes('label', lambda x: x.get('for') == 'HIP_response')\n if labels: return labels[0].string\n \n def fill_inhofe_lgraham(f):\n \"\"\"special function to fill in forms for inhofe and lgraham\"\"\"\n if DEBUG: print \"Filling special inhofe or lgraham form\"\n f.fill_all(A01=i.prefix, B01=i.fname, C01=i.lname, D01=i.addr1, E01=i.addr2, F01=i.city,\n G01=i.state, H01=i.zip5, H02=i.phone, H03=i.phone, I01=i.email, J01=\"Communications\", K01=i.full_msg)\n f.fill(type='textarea', value=i.full_msg)\n if DEBUG: print \"f filled and ready to submit: \", f\n \n def fill_form(f):\n ''' f is a form '''\n\n f.fill_name(i.prefix, i.fname, i.lname)\n if DEBUG: print \"in fill_form, filling addr\"\n f.fill_address(i.addr1, i.addr2)\n if DEBUG: print \"in fill_form, filling phone\"\n f.fill_phone(i.phone)\n if DEBUG: print \"in fill_form, filling textarea\"\n textareacontrol = f.fill(type='textarea', value=i.full_msg)\n if DEBUG: print 'filled textareacontrol' , textareacontrol\n if DEBUG: print \"in fill_form, filling all\"\n\n if DEBUG: print \"Printing all controls\"\n for c in f.controls:\n if DEBUG: print \"control: \", c.name, \" type: \", c.type\n \n f.fill_all(city=i.city, zipcode=i.zip5, zip4=i.zip4, state=i.state.upper(),\n email=i.email,\n issue=['TECH', 'GEN', 'OTH'],\n subject=i.subject, reply='yes',\n Re='issue', #for billnelson\n newsletter='noAction', aff1='Unsubscribe',\n MessageType=\"Express an opinion or share your views with me\")\n\n # page has one required control that has no name. so we need to fill it in\n if (i.dist == 'SD-00' or 'coburn' in b.url):\n empty_controls = [c for c in f.controls if not c.value]\n for c in empty_controls:\n if DEBUG: print f.fill('OTH', control=c)\n\n \n\n\n # Solve captchas. I included this here because it was placed here by Aaron,\n # but I haven't found a captcha that it works on. -NKF\n challenge = get_challenge()\n if challenge:\n print \"Found challenge!\"\n try:\n solution = captchasolver.solve(challenge)\n except Exception, detail:\n print >> sys.stderr, 'Exception in CaptchaSolve', detail\n print >> sys.stderr, 'Could not solve:\"%s\"' % challenge,\n \n if DEBUG: print \"f filled and ready to submit to \", b.url, \"\\n\", f\n #return b.open(f.click())\n \n \n\n # max loops\n k = 6\n\n # needed this from some weird error that I forgot to document.\n # we only want to do the WYR form once,\n # so it's a flag so we don't choose this one again. \n completedWyrForm = False\n for cnt in range(1,k):\n # todo, place newurl into cache\n if DEBUG: print \"Loop \", cnt, \":\\n\", b.url, \"\\n\" #, b.page, \"\\n Done with page \", cnt, \"\\n\\n\"\n\n # check if this is a refresh page\n # to do: see if we can get javascript window.location refreshes\n # (would require some smart parsing or using a javascript interpreter module)\n if 'http-equiv=\"refresh\"' in b.page:\n if DEBUG: print \"Redirect to a new page:\"\n newurl = r_refresh.findall(b.page)[0]\n newurl = newurl.replace(' ', '%20')\n newurl = newurl.replace('&amp;', '&')\n if DEBUG: print \"\\nNewurl:\", newurl\n try:\n b.open(newurl)\n continue #next loop\n except:\n print \"Failed to open url \", newurl, \" error: \", traceback.print_exc()\n\n # some pages have multiple forms on them.\n # For example, there may be a search tool in the sidebar.\n # or there may be forms which are hidden by not displayed by the css.\n # try to see what we can grab out the page, then we'll decide which one's the best to try\n textareaform = get_form(b, lambda f: f.find_control_by_type('textarea'))\n zipform = get_form(b, lambda f: f.has(name='zip'))\n verificationform = get_form(b, lambda f: 'formproc' in f.action)\n nameform = get_form(b, lambda f: 'wrep_const' in f.action) #see AL-06 for an example, has zip form in page too\n wyrform = get_form(b, lambda f: f.find_control_by_id('state') and f.find_control_by_name('zip') and f.find_control_by_name('zip4')) #get_form(b, not_signup_or_search)\n indexform = get_form(b, lambda f: f.has(name='Re')) # see billnelson for example\n\n #choose which form we want to use\n form = None\n if textareaform:\n if DEBUG: print \"textareaform\"\n form = textareaform\n elif wyrform and not completedWyrForm:\n if DEBUG: print \"wyrform\"\n form = wyrform\n completedWyrForm = True\n elif nameform:\n if DEBUG: print \"step2 contact form with name\"\n form = nameform\n elif zipform:\n if DEBUG: print \"zipform\"\n form = zipform\n elif verificationform:\n if DEBUG: print \"verification form\"\n form = verificationform\n elif indexform:\n if DEBUG: print \"index form\"\n form = indexform\n\n #if no redirect and no form was found, just return. can go no further\n if not form:\n return b.page\n \n \n #to do, add back in captcha solver\n if form.find_control_by_name('captcha') or form.find_control_by_name('validation'):\n if DEBUG: print \"captcha found\"\n #raise Captcha\n return b.page\n else:\n if DEBUG: print \"no captcha found\"\n\n #try:\n if DEBUG: print \"going to fill_form from \", b.url, \" now \\n\", form, \"\\n End form\", cnt, \"\\n\"\n if \"inhofe\" in contact_link or \"lgraham\" in contact_link:\n fill_inhofe_lgraham(form)\n else:\n fill_form(form) #, aggressive=True)\n\n try:\n nextpage = b.open(form.click())\n except:\n print \"caught an http error\"\n print \"Failed to submit form for url \", b.url, \" error: \", traceback.print_exc()\n return \"Failed to submit form for url \"+ b.url+ \" error: \"+ traceback.format_exc()\n\n \n # Now, look for common errors or confirmations.\n foundError = False\n thanked = False\n if DEBUG: print \"Looking for errors in page \" #, b.page\n \n errorStr = getError(b.page)\n if errorStr:\n if DEBUG: print \"Found error: \", errorStr, \" done with \", contact_link\n foundError = True\n\n if DEBUG: print \"Looking for thank you in page: \"# , nextpage.lower()\n confirmations=[cstr for cstr in confirmationStrings if cstr in nextpage.lower()]\n\n if len(confirmations) > 0:\n print 'thanked, done with ', contact_link\n thanked = True\n\n successUrls = ['https://mulvaneyforms.house.gov/submit-contact.aspx']\n if b.url in successUrls:\n thanked = True\n\n if thanked or foundError:\n return nextpage\n\n if DEBUG: print \"Tried \", k, \"times, unsuccessfully, to fill form\"\n return b.page\n #raise UnsuccessfulAfter5Attempts(b.page) ", "def do_POST(self):\n response_body = get_response_body(\"index.html\")\n\n self.send_response(200)\n self.send_header('Content-type', 'text/xml; charset=UTF-8')\n self.send_header('Content-length', len(response_body))\n self.end_headers()\n self.wfile.write(response_body)\n logging.info('[Request method] POST')\n logging.info(\"[Request headers]\\n%s\", str(self.headers))\n\n content_len = int(self.headers.getheader('content-length', 0))\n post_body = self.rfile.read(content_len)\n logging.info('[Request doby]\\n%s', post_body)", "def download_file():\n data = c.recv(BUFFER)\n \n if data == b\"terminate\":\n print(\"DOWNLOADING FAILED !!!\")\n return\n\n file = open(FILE_NAME,\"wb\")\n while True:\n if data == b\"DONE\":\n break\n \n print(\"Receiving. . . \")\n file.write(data)\n data = c.recv(BUFFER)\n \n file.close()\n print(\"Successfully received!!!\")\n \n print(\"Webpage saved as {} at {}\".format(FILE_NAME, getcwd())) \n return None", "def index():\r\n # Generate the number the WB will use to come back to\r\n # their submission\r\n wb_number = randomizer.generate_tulip_receipt()\r\n\r\n # Perform a check to see if the client is using Tor\r\n anonymity = Anonymity.TorAccessCheck(request.client, request.env)\r\n\r\n # If a session has not been created yet, create one.\r\n if not session.wb_id:\r\n session.wb_id = randomizer.generate_wb_id()\r\n\r\n # -- follow a comment preserved since 'the age of the upload'\r\n #\r\n # Tor Browser Bundle has JS enabled by default!\r\n # Hurray! I love you all!!\r\n # Yeah, even *you* the anti-JS taliban hater!\r\n # As someone put it, if you think JS is evil remember\r\n # that the world is in technicolor and not in black and white.\r\n # Look up, the sun is shining, thanks to jQuery.\r\n\r\n # This is necessary because otherwise web2py will go crazy when\r\n # it sees {{ }}\r\n upload_template = jQueryHelper.upload_tmpl()\r\n\r\n download_template = jQueryHelper.download_tmpl()\r\n\r\n # Generate the material upload elements\r\n # JavaScript version\r\n material_js = TR('Material',\r\n DIV(_id='file-uploader'),\r\n _id='file-uploader-js')\r\n\r\n # .. and non JavaScript\r\n material_njs = DIV(DIV(LABEL(\"Material:\"),\r\n _class=\"w2p_fl\"),\r\n DIV(INPUT(_name='material', _type='file',\r\n _id='file-uploader-nonjs'),\r\n _class=\"w2p_fc\"),\r\n _id=\"file-uploader-nonjs\")\r\n\r\n # Use the web2py captcha setting to generate a Captcha\r\n # captcha = TR('Are you human?', auth.settings.captcha)\r\n\r\n # The default fields and labels\r\n form_fields = ['title', 'desc']\r\n form_labels = {'title': 'Title', 'desc': 'Description'}\r\n\r\n form_extras = []\r\n\r\n # Add to the fields to be displayed the ones inside of\r\n # the extrafields setting\r\n # for i in settings.extrafields.fields:\r\n # form_extras.append(str(i['name']))\r\n # form_fields.append(str(i['name']))\r\n # form_labels[str(i['name'])] = i['desc']\r\n\r\n if settings.extrafields.wizard:\r\n the_steps = settings.extrafields.gen_wizard()\r\n\r\n form = FormShaman(db.leak, steps=the_steps)\r\n # this is the only error handled at the moment, the fact that __init__\r\n # could return only None, maybe an issue when more errors might be managed\r\n if not hasattr(form, 'vars'):\r\n return dict(error='No receiver present in the default group', existing_files=[])\r\n\r\n else:\r\n form = SQLFORM(db.leak,\r\n fields=form_fields,\r\n labels=form_labels)\r\n\r\n # Check to see if some files have been loaded from a previous session\r\n existing_files = []\r\n if session.files:\r\n for f in session.files:\r\n existing_files.append(f)\r\n\r\n # Make the submission not spooled and set the timestamp\r\n form.vars.spooled = False\r\n form.vars.submission_timestamp = time.time()\r\n\r\n # Insert all the data into the db\r\n if form.accepts(request.vars):\r\n logger.debug(\"Submission %s\", request.vars)\r\n\r\n group_ids = [] # Will contain all the groups selected by the WB\r\n\r\n # XXX Since files are processed via AJAX, maybe this is unecessary?\r\n # if we want to keep it to allow legacy file upload, then the\r\n # file count should only be one.\r\n # File upload in a slightly smarter way\r\n # http://www.web2py.com/book/default/chapter/06#Manual-Uploads\r\n for var in request.vars:\r\n if var == \"material\":\r\n try:\r\n f = Storage()\r\n f.filename = request.vars.material.filename\r\n\r\n tmp_file = db.material.file.store(request.body, filename)\r\n logger.info(\"the tmp_file is [%s] with filename [%s]\",\r\n tmp_file, filename)\r\n\r\n f.ext = mutils.file_type(filename.split(\".\")[-1])\r\n\r\n tmp_fpath = os.path(os.path.join(request.folder,\r\n 'uploads',\r\n session.upload_dir,\r\n tmp_file + filename))\r\n\r\n f.size = os.path.getsize(tmp_fpath)\r\n files.append(f)\r\n\r\n dst_folder = os.path.join(request.folder,\r\n 'material',\r\n str(leak_id.id))\r\n if not os.path.isdir(dst_folder):\r\n os.mkdir(dst_folder)\r\n os.rename(os.path.join(request.folder,\r\n 'uploads',\r\n session.upload_dir,\r\n tmp_file),\r\n dst_folder + filename)\r\n # XXX define exception for this except\r\n except:\r\n logger.error(\"There was an error in processing the \"\r\n \"submission files.\")\r\n\r\n\r\n if var.startswith(\"target_\") and var.split(\"_\")[-1].isdigit():\r\n group_ids.append(var.split(\"_\")[-1])\r\n\r\n # The metadata associated with the file is stored inside\r\n # the session variable this should be safe to use this way.\r\n if not session.files:\r\n session.files = []\r\n\r\n # Add the default files\r\n default_material(form.vars)\r\n\r\n # XXX verify that this is safe\r\n pfile = json.dumps(session.files)\r\n\r\n # leak_id has been used in the previous code as this value,\r\n # I'm keeping to don't change the following lines\r\n leak_id = form.vars.id\r\n\r\n\r\n # XXX probably a better way to do this\r\n # Create a record in submission db associated with leak_id\r\n # used to keep track of sessions\r\n if not db(db.submission.session==session.wb_id).select():\r\n db.submission.insert(session=session.wb_id,\r\n leak_id=leak_id,\r\n dirname=session.dirname)\r\n\r\n # Instantiate the Leak object\r\n leak = Leak(leak_id)\r\n\r\n # Create the material entry for the submitted data\r\n leak.add_material(leak_id, None, \"localfs\", file=pfile)\r\n\r\n # Create the leak with the GlobaLeaks factory\r\n # (the data has actually already been added to db leak,\r\n # this just creates the tulips), the first is the whistleblower tulip\r\n gl.create_tulip(form.vars.id, 0, wb_number[1])\r\n\r\n # create the tulips for every receiver inside a basket\r\n\r\n # if len(group_ids):\r\n # fixme: we're not considering the selecred group, but *all*\r\n group_id = db().select(db.targetgroup.ALL).first().id\r\n leak.create_tulip_by_group(group_id)\r\n\r\n # Make the WB number be *** *** *****\r\n pretty_number = wb_number[0][:3] + \" \" + wb_number[0][3:6] + \\\r\n \" \" + wb_number[0][6:]\r\n\r\n session.wb_number = pretty_number\r\n # Clean up all sessions\r\n session.dirname = None\r\n session.wb_id = None\r\n session.files = None\r\n\r\n return dict(leak_id=leak_id, leaker_tulip=pretty_number, error=None,\r\n form=None, tulip_url=wb_number[1], jQuery_templates=None,\r\n existing_files=existing_files)\r\n\r\n elif form.errors:\r\n response.flash = 'form has errors'\r\n\r\n return dict(form=form,\r\n error=None,\r\n leak_id=None,\r\n tulip=None,\r\n tulips=None,\r\n anonymity=anonymity.result,\r\n jQuery_templates=(XML(upload_template),\r\n XML(download_template)),\r\n existing_files=existing_files)", "def writeFromHTTPPost(self, REQUEST, RESPONSE=None):\n # Retrieve the template text from the raw form data and call the\n # the PageTemplate write method with this text.\n try:\n REQUEST.stdin.seek(0)\n text = REQUEST.stdin.read()\n \n if len(text) > 0:\n self.write(text)\n \n # respond to the caller an XML stream indicating the result\n # of the call\n if RESPONSE:\n RESPONSE.setHeader('content-type', 'text/xml; charset=%s' % \n self.char_encoding)\n RESPONSE.write('<save status=\"ok\"/>')\n return 1\n except:\n if RESPONSE:\n RESPONSE.setHeader('context-type', 'text/xml; charset=%s' %\n self.char_encoding)\n RESPONSE.write('<save status=\"failed\"/>')\n return 0", "def handle(req: bytes) -> str:\n\n try:\n pass\n except:\n dirname = os.path.dirname(__file__)\n path = os.path.join(dirname, 'html', 'upload.html')\n\n with (open(path, 'r')) as file:\n html = file.read()\n\n return html", "def save_response(self):\r\n self.q(css='input.save-button').first.click()\r\n EmptyPromise(\r\n lambda: 'save' in self.alert_message.lower(),\r\n \"Status message saved\"\r\n ).fulfill()", "def send_final_request(self):\n with open(self.output_path, \"r\") as text_file:\n data = json.load(text_file)\n print self.request_handler.send(data)", "def save(self):\n self.click(\".action-save\")\n self.page.wait_for_ajax()", "def do_POST(self):\n\n # do this before sending any response as we may raise an exception\n content = self.__get_content(self.__get_post_body())\n\n # send the response back to the client\n self.send_response(200)\n self.send_header(\"Content-type\", \"text/json\")\n self.end_headers()\n self.wfile.write(content)", "def receive_email_view(request):\n save_inbound_email(request.POST, request.FILES)\n return HttpResponse(200)", "def process(self):\n\n form = cgi.FieldStorage()\n commit = self.read_commit(form)\n\n print(\"Content-Type: text/plain; charset='utf-8'\\r\")\n print(\"Cache-Control: max-age=60\\r\")\n if form.getfirst(\"download\", \"false\") == \"true\":\n print(\"Content-Disposition: attachment; filename=\\\"patch.txt\\\"\\r\")\n\n print(\"\\r\")\n\n print((\"#\" + json.dumps(PostsaiCommitViewer.format_commit_header(commit), default=convert_to_builtin_type)))\n sys.stdout.flush()\n PostsaiCommitViewer.dump_commit_diff(commit)", "def index():\n if request.method == \"POST\":\n write_to_file(\"data/players_names.txt\", request.form[\"player_name\"] + \"\\n\")\n write_to_file(\"data/online_players.txt\", request.form[\"player_name\"] + \"\\n\")\n return redirect(request.form[\"player_name\"])\n return render_template(\"index.html\")" ]
[ "0.623124", "0.6209262", "0.61119497", "0.60251135", "0.5977981", "0.591255", "0.5905254", "0.5802934", "0.5698746", "0.569686", "0.56939495", "0.5654349", "0.5610539", "0.5597084", "0.5577267", "0.5563417", "0.5550518", "0.5540938", "0.5534455", "0.55296224", "0.55229175", "0.549628", "0.54930043", "0.5461647", "0.54543453", "0.5449847", "0.5440029", "0.5433397", "0.5416065", "0.5397815" ]
0.6650055
0
Devuelve la lista de socios que participan en el torneo
def get_socios(self): return self.__socios
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def comitentes(self):\n return self.expedientepersona_set.filter(comitente=True)", "def getCubes():", "def todos(self):\n socios = session.query(Socio).all()\n return socios", "def get_tournament_list():\n database = TinyDB('db.json')\n tournament_list = database.table('tournaments').all()\n return tournament_list", "def get_player_list(tournament):\n database = TinyDB('db.json')\n players_table = database.table('players')\n # retrieving the list of identifiers of players following a tournament\n id_list = tournament['Liste indice Joueurs']\n player_list = []\n for player_id in id_list:\n # getting the players\n player = players_table.get(doc_id=player_id)\n player_list.append(player)\n return player_list", "def get_society_list():\n\n societies = ['pr_society']\n if settings.PUBLISHING_AGREEMENT_PUBLISHER_MR != Decimal(1):\n societies.append('mr_society')\n if settings.PUBLISHING_AGREEMENT_PUBLISHER_SR != Decimal(1):\n societies.append('sr_society')\n return societies", "def get_all_elections(self) -> list:", "def listeCandidate(self, phero, visited, n_candidats):\n pheromone = phero.copy()\n\n pheromone[list(visited)] = 0\n # rn.choices returns a list with the randomly selected element from the list.\n # weights to affect a probability for each element\n\n c = rn.choices(self.all_inds, weights=[p for p in pheromone], k=n_candidats)\n c = list(set(c))\n i = len(c)\n \"\"\"while i<n_candidats:\n n=rn.randint(0,self.n_objets-1)\n if n not in visited:\n c.append(n)\n i+=1\"\"\"\n nb_candidats = len(c)\n\n return c, pheromone", "def get_matches_list(tournament):\n database = TinyDB('db.json')\n matches_table = database.table('match')\n # recuperation de la liste des match d'un un tournoi\n matches_id_list = tournament['Liste des indices des matches']\n matches_list = []\n for id_matches in matches_id_list:\n # recuperation des joueurs\n match = matches_table.get(doc_id=id_matches)\n matches_list.append(match)\n return matches_list", "def getSlaveVocabComuni(master2):\n results=[]\n lista=[]\n tupla=()\n for a in mapDisplayList(EV.allProvince()):\n if master2 == a[1]:\n results = [a for a in EV.comuni4provincia(a[0])]\n for result in results:\n tupla = (result.comune)\n lista.append(tupla)\n return lista", "def obtenerCuentas(self, unaSeccion):\n #return self.cuentasManager.searchBy(Cuenta.seccion, unaSeccion.nombre)\n return [obj for obj in self.cuentasManager.almacen.find(Cuenta, Cuenta.seccion_id == unaSeccion.ide)]", "def continents(g):\n continents = []\n for key in g.city_dict:\n if(g.city_dict[key].get_continent() not in continents):\n continents.append(g.city_dict[key].get_continent())\n \n for continent in continents:\n print(\"{}: \").format(continent)\n for key in g.city_dict:\n if(g.city_dict[key].get_continent() == continent):\n print(\" {}\").format(g.city_dict[key].get_name())", "def list_people():\n\n person_list = []\n for person in person_database:\n person_list.append(person)\n return person_list", "def relationships(self):", "def get_communities(browser: RoboBrowser, desired_communities: list):\n browser.open(URL_BASE + '/info/profil/meinetipprunden')\n content = get_kicktipp_content(browser)\n links = content.find_all('a')\n def gethreftext(link): return link.get('href').replace(\"/\", \"\")\n\n def is_community(link):\n hreftext = gethreftext(link)\n if hreftext == link.get_text():\n return True\n else:\n linkdiv = link.find('div', {'class': \"menu-title-mit-tippglocke\"})\n return linkdiv and linkdiv.get_text() == hreftext\n community_list = [gethreftext(link)\n for link in links if is_community(link)]\n if len(desired_communities) > 0:\n return intersection(community_list, desired_communities)\n return community_list", "def objets_uniques(self):\n objets = []\n for membre in self.membres:\n for objet in membre.equipe:\n if objet.unique:\n objets.append(objet)\n objets.extend(objet.prototype.objets_contenus(objet))\n if membre.tenu and membre.tenu.unique:\n objet = membre.tenu\n objets.append(objet)\n objets.extend(objet.prototype.objets_contenus(objet))\n\n return objets", "def tournament(self):\n pass", "def display_tournament_list():\r\n for tournament in tournaments_table:\r\n print(tournament['Nom'])", "def adjacence(self) -> List[List[Poids]]:\n resultat = list()\n for depart in self.sommets:\n ligne = list()\n for arrivee in self.sommets:\n if arrivee in self._voisinage[depart]:\n ligne.append(self._voisinage[depart][arrivee])\n else:\n ligne.append(0)\n resultat.append(ligne)\n return resultat", "def continents_and_cities(self):\r\n list_all = col.defaultdict(list)\r\n for code, node in self.vertices.items():\r\n list_all[node.continent].append(node.name)\r\n return list_all", "def getResponsibleUsers():", "def obtenir_joueurs(self, tournoi):\n i = 1\n for x in range(0, NOMBRE_JOUEURS):\n # collect player via view\n infos = self.vue.demander_info_joueur(i)\n joueur = Joueur(infos)\n # add to the list of players\n tournoi.enregistrer_joueur(joueur)\n i += 1", "async def sponsors(self, ctx):\n resp = await self.bot.session.get(\n \"https://raw.githubusercontent.com/kyb3r/modmail/master/SPONSORS.json\"\n )\n data = loads(await resp.text())\n\n embeds = []\n\n for elem in data:\n embed = Embed.from_dict(elem[\"embed\"])\n embeds.append(embed)\n\n random.shuffle(embeds)\n\n session = EmbedPaginatorSession(ctx, *embeds)\n await session.run()", "def list_user_contributors(n):\n\n tx = cypher_transaction()\n query = \"\"\"\n MATCH (p:project)-[:OWNED_BY]->(owner),\n (u:user)-[:CONTRIBUTES_TO]->(p)\n WHERE owner.username={username}\n RETURN DISTINCT u, p\n \"\"\"\n tx.append(query, parameters={'username': n['username']})\n results = _first(tx.commit())\n contributors = set()\n for record in results:\n user, project = record.values\n contributors.add(user)\n print(\"* {0} -> {1}\".format(user['name'], project['name']))\n return contributors", "def get_all_candidates(self) -> list:", "def participants(self):\r\n return Participants(self)", "def get_participants(reactome_id):\n react_url = 'http://www.reactome.org/ContentService/data/event/' \\\n + reactome_id + '/participatingReferenceEntities'\n headers = {'Accept': 'application/json'}\n res = requests.get(react_url, headers=headers)\n if not res.status_code == 200:\n return []\n json = res.json()\n up_ids = []\n for res in json:\n if not res.get('databaseName') == 'UniProt':\n continue\n up_id = res.get('identifier')\n if up_id is not None:\n up_ids.append(up_id)\n return up_ids", "def all_cited_dois( corpus ) :\n cites = list(set(chain.from_iterable((cited_dois(x) for x in corpus))))\n return cites", "def R1(self, i):\n results = []\n for peer in self.router:\n remotes_peers = self.get(peer)\n for friend_of_a_friend in remotes_peers:\n if friend_of_a_friend['node'] == i.threeple and friend_of_a_friend['transactions']:\n results.append(peer)\n log(\"R1: %s %s\" % (i, str(results)))\n return results", "def candidates(self):\n return self.related_users.all()" ]
[ "0.58589816", "0.57218426", "0.5660919", "0.56461746", "0.5478608", "0.5461285", "0.5444874", "0.5443482", "0.543834", "0.5438139", "0.5418964", "0.539169", "0.5383644", "0.5374273", "0.53655237", "0.53620005", "0.5355373", "0.53489214", "0.53374285", "0.52937156", "0.5281925", "0.5268706", "0.5267725", "0.5260367", "0.5255097", "0.5238736", "0.5232498", "0.5216288", "0.5215559", "0.5200921" ]
0.5800939
1
Devuelve los resultados del torneo
def get_resultados(self): return self.__resultados
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getResults():", "def results(self):\n pass", "def results(self):\r\n pass", "def _make_result_list(self,res):\n res_list = []\n for r in res:\n res_list.append(r)\n\n return res_list", "def generarConsultasConexion(self):\n for parRecursos in self.CombiConsultaLibre:\n parRecursosL0=self.limpiaRecursos(parRecursos[0])\n parRecursosL1=self.limpiaRecursos(parRecursos[1])\n \n if self.nivel_profundidad>=1:\n consultasparql = self.busConex1 % (parRecursosL0,parRecursosL1,self.limit_BC)\n print consultasparql;\n resultoCC=self.consulta(consultasparql)\n for resul in resultoCC['results']['bindings']:\n triple = parRecursos[0]+\"-|\"+parRecursos[1]+\"-|\"+resul['p1']['value']\n self.ResultConsultasConexion.append(triple) \n \n if self.nivel_profundidad>=2:\n consultasparql = self.busConex2 % (parRecursosL0,parRecursosL1,self.limit_BC)\n resultoCC=self.consulta(consultasparql)\n for resul in resultoCC['results']['bindings']:\n o1=resul['o1']['value']\n o1=o1.replace('http://dbpedia.org/resource/','')\n triple1 = parRecursos[0]+\"-|\"+o1+\"*-|\"+resul['p1']['value']\n triple2 = parRecursos[1]+\"-|\"+o1+\"*-|\"+resul['p2']['value']\n self.ResultConsultasConexion.append(triple1) \n self.ResultConsultasConexion.append(triple2) \n \n if self.nivel_profundidad>=3:\n consultasparql = self.busConex3_1 % (parRecursosL0,parRecursosL1,self.limit_BC)\n resultoCC=self.consulta(consultasparql)\n for resul in resultoCC['results']['bindings']:\n o1=resul['o1']['value']\n o1=o1.replace('http://dbpedia.org/resource/','')\n o2=resul['o2']['value']\n o2=o1.replace('http://dbpedia.org/resource/','')\n triple1 = parRecursos[0]+\"-|\"+o1+\"*-|\"+resul['p1']['value']\n triple2 = parRecursos[1]+\"-|\"+o2+\"*-|\"+resul['p2']['value']\n triple3 = o1+\"*-|\"+o2+\"*-|\"+resul['p3']['value'] \n self.ResultConsultasConexion.append(triple1) \n self.ResultConsultasConexion.append(triple2) \n self.ResultConsultasConexion.append(triple3) \n\n consultasparql = self.busConex3_2 % (parRecursosL0,parRecursosL1,self.limit_BC)\n resultoCC=self.consulta(consultasparql)\n for resul in resultoCC['results']['bindings']:\n o1=resul['o1']['value']\n o1=o1.replace('http://dbpedia.org/resource/','')\n o2=resul['o2']['value']\n o2=o1.replace('http://dbpedia.org/resource/','')\n triple1 = parRecursos[0]+\"-|\"+o1+\"*-|\"+resul['p1']['value']\n triple2 = parRecursos[1]+\"-|\"+o2+\"*-|\"+resul['p2']['value']\n triple3 = o2+\"*-|\"+o1+\"*-|\"+resul['p3']['value'] \n self.ResultConsultasConexion.append(triple1) \n self.ResultConsultasConexion.append(triple2) \n self.ResultConsultasConexion.append(triple3)", "def query3() :", "def get_all_ssresults(self):\n\n all_results = ()\n self._logger.debug(\"Getting all ss results\")\n\n try:\n self.check_if_db_connected()\n cursor = self._db_conn.cursor()\n cursor.execute(\"SELECT result_id, ss_ind_first, ss_first_char, ss_ind_second, ss_second_char, ss_ind_third, \\\nss_third_char, ss_ind_fourth, ss_fourth_char, ss_ind_fifth, ss_fifth_char, ss_ind_sixth, ss_sixth_char, ss_ind_seventh, ss_seventh_char, \\\nss_ind_eighth, ss_eighth_char, time FROM ss_ind_result ORDER BY time DESC\")\n results = cursor.fetchall()\n\n for result_id, first_id, first_char, second_id, second_char, third_id, third_char, fourth_id, fourth_char, fifth_id, fifth_char, sixth_id, sixth_char, seventh_id, seventh_char, eighth_id, eighth_char, timestamp in results:\n intermediate_results = ()\n\n cursor.execute(\"SELECT first_name, last_name, nickname FROM \\\nplayer WHERE player_id = {0}\".format(first_id))\n first = cursor.fetchall()\n first_name_first, last_name_first, \\\n nickname_first = first[0]\n cursor.execute(\"SELECT first_name, last_name, nickname FROM \\\nplayer WHERE player_id = {0}\".format(second_id))\n second = cursor.fetchall()\n first_name_second, last_name_second, \\\n nickname_second = second[0]\n try:\n cursor.execute(\"SELECT first_name, last_name, nickname FROM \\\n player WHERE player_id = {0}\".format(third_id))\n third = cursor.fetchall()\n first_name_third, last_name_third, \\\n nickname_third = third[0]\n except MySQLdb.OperationalError:\n first_name_third = ''\n last_name_third = ''\n nickname_third = ''\n third_char = ''\n try:\n cursor.execute(\"SELECT first_name, last_name, nickname FROM \\\n player WHERE player_id = {0}\".format(fourth_id))\n fourth = cursor.fetchall()\n first_name_fourth, last_name_fourth, \\\n nickname_fourth = fourth[0]\n except MySQLdb.OperationalError:\n first_name_fourth = ''\n last_name_fourth = ''\n nickname_fourth = ''\n fourth_char = ''\n try:\n cursor.execute(\"SELECT first_name, last_name, nickname FROM \\\n player WHERE player_id = {0}\".format(fifth_id))\n fifth = cursor.fetchall()\n first_name_fifth, last_name_fifth, \\\n nickname_fifth = fifth[0]\n except MySQLdb.OperationalError:\n first_name_fifth = ''\n last_name_fifth = ''\n nickname_fifth = ''\n fifth_char = ''\n try:\n cursor.execute(\"SELECT first_name, last_name, nickname FROM \\\n player WHERE player_id = {0}\".format(sixth_id))\n sixth = cursor.fetchall()\n first_name_sixth, last_name_sixth, \\\n nickname_sixth = sixth[0]\n except MySQLdb.OperationalError:\n first_name_sixth = ''\n last_name_sixth = ''\n nickname_sixth = ''\n sixth_char = ''\n try:\n cursor.execute(\"SELECT first_name, last_name, nickname FROM \\\n player WHERE player_id = {0}\".format(seventh_id))\n seventh = cursor.fetchall()\n first_name_seventh, last_name_seventh, \\\n nickname_seventh = seventh[0]\n except MySQLdb.OperationalError:\n first_name_seventh = ''\n last_name_seventh = ''\n nickname_seventh = ''\n seventh_char = ''\n try:\n cursor.execute(\"SELECT first_name, last_name, nickname FROM \\\n player WHERE player_id = {0}\".format(eighth_id))\n eighth = cursor.fetchall()\n first_name_eighth, last_name_eighth, \\\n nickname_eighth = eighth[0]\n except MySQLdb.OperationalError:\n first_name_eighth = ''\n last_name_eighth = ''\n nickname_eighth = ''\n eighth_char = ''\n\n intermediate_results = intermediate_results + \\\n (result_id, first_name_first, last_name_first,\n nickname_first, first_char, first_name_second, last_name_second,\n nickname_second, second_char, first_name_third,\n last_name_third, nickname_third, third_char, first_name_fourth,\n last_name_fourth, nickname_fourth, fourth_char, first_name_fifth,\n last_name_fifth, nickname_fifth, fifth_char, first_name_sixth,\n last_name_sixth, nickname_sixth, sixth_char, first_name_seventh,\n last_name_seventh, nickname_seventh, seventh_char, first_name_eighth,\n last_name_eighth, nickname_eighth, eighth_char,\n timestamp.strftime('%Y-%m-%d'))\n\n all_results = all_results + (intermediate_results,)\n del intermediate_results\n\n except MySQLdb.OperationalError:\n self._logger.error(\"MySQL operational error occured\")\n traceback.print_exc()\n raise exceptions.DBConnectionError(\"Cannot connect to MySQL server\")\n\n except MySQLdb.ProgrammingError:\n self._logger.error(\"MySQL programming error\")\n traceback.print_exc()\n raise exceptions.DBSyntaxError(\"MySQL syntax error\")\n\n else:\n return all_results", "def getResultAll(i=None):", "def get_all_ppresults(self):\n\n all_results = ()\n self._logger.debug(\"Getting all ping pong results\")\n\n try:\n self.check_if_db_connected()\n cursor = self._db_conn.cursor()\n cursor.execute(\"SELECT result_id, pp_winner, pp_loser, time FROM pp_result ORDER BY time DESC\")\n results = cursor.fetchall()\n\n for result_id, winner_id, loser_id, timestamp in results:\n intermediate_results = ()\n\n cursor.execute(\"SELECT first_name, last_name, nickname FROM \\\nplayer WHERE player_id = {0}\".format(winner_id))\n winner = cursor.fetchall()\n first_name_winner, last_name_winner, \\\n nickname_winner = winner[0]\n\n cursor.execute(\"SELECT first_name, last_name, nickname FROM \\\nplayer WHERE player_id = {0}\".format(loser_id))\n loser = cursor.fetchall()\n first_name_loser, last_name_loser, \\\n nickname_loser = loser[0]\n\n intermediate_results = intermediate_results + \\\n (result_id, first_name_winner, last_name_winner,\n nickname_winner, first_name_loser,\n last_name_loser, nickname_loser,\n timestamp.strftime('%Y-%m-%d'))\n\n all_results = all_results + (intermediate_results,)\n del intermediate_results\n\n except MySQLdb.OperationalError:\n self._logger.error(\"MySQL operational error occured\")\n traceback.print_exc()\n raise exceptions.DBConnectionError(\"Cannot connect to MySQL server\")\n\n except MySQLdb.ProgrammingError:\n self._logger.error(\"MySQL programming error\")\n traceback.print_exc()\n raise exceptions.DBSyntaxError(\"MySQL syntax error\")\n\n else:\n return all_results", "def get_all_fbresults(self):\n\n all_results = ()\n self._logger.debug(\"Getting all foosball results\")\n\n try:\n self.check_if_db_connected()\n cursor = self._db_conn.cursor()\n cursor.execute(\"SELECT result_id, offense_winner, defense_winner, offense_loser, \\\ndefense_loser, time FROM fb_result ORDER BY time DESC\")\n results = cursor.fetchall()\n\n for result_id, offense_winner_id, defense_winner_id, offense_loser_id, defense_loser_id, timestamp in results:\n intermediate_results = ()\n\n cursor.execute(\"SELECT first_name, last_name, nickname FROM \\\nplayer WHERE player_id = {0}\".format(offense_winner_id))\n offense_winner = cursor.fetchall()\n first_name_offense_winner, last_name_offense_winner, \\\n nickname_offense_winner = offense_winner[0]\n cursor.execute(\"SELECT first_name, last_name, nickname FROM \\\nplayer WHERE player_id = {0}\".format(defense_winner_id))\n defense_winner = cursor.fetchall()\n first_name_defense_winner, last_name_defense_winner, \\\n nickname_defense_winner = defense_winner[0]\n cursor.execute(\"SELECT first_name, last_name, nickname FROM \\\nplayer WHERE player_id = {0}\".format(offense_loser_id))\n offense_loser = cursor.fetchall()\n first_name_offense_loser, last_name_offense_loser, \\\n nickname_offense_loser = offense_loser[0]\n cursor.execute(\"SELECT first_name, last_name, nickname FROM \\\nplayer WHERE player_id = {0}\".format(defense_loser_id))\n defense_loser = cursor.fetchall()\n first_name_defense_loser, last_name_defense_loser, \\\n nickname_defense_loser = defense_loser[0]\n\n intermediate_results = intermediate_results + \\\n (result_id, first_name_offense_winner, last_name_offense_winner,\n nickname_offense_winner, first_name_defense_winner, last_name_defense_winner,\n nickname_defense_winner, first_name_offense_loser,\n last_name_offense_loser, nickname_offense_loser, first_name_defense_loser,\n last_name_defense_loser, nickname_defense_loser,\n timestamp.strftime('%Y-%m-%d'))\n\n all_results = all_results + (intermediate_results,)\n del intermediate_results\n\n except MySQLdb.OperationalError:\n self._logger.error(\"MySQL operational error occured\")\n traceback.print_exc()\n raise exceptions.DBConnectionError(\"Cannot connect to MySQL server\")\n\n except MySQLdb.ProgrammingError:\n self._logger.error(\"MySQL programming error\")\n traceback.print_exc()\n raise exceptions.DBSyntaxError(\"MySQL syntax error\")\n\n else:\n return all_results", "def returnAll(self):\n try:\n # self.checkValName()\n self.conn.execute(self.query, self.val)\n self.results = self.cursor.fetchall()\n except Exception as e:\n print \"Query failed: %s \" % e\n raise", "def query(self):", "def get_all_mkresults(self):\n\n all_results = ()\n self._logger.debug(\"Getting all mk results\")\n\n try:\n self.check_if_db_connected()\n cursor = self._db_conn.cursor()\n cursor.execute(\"SELECT result_id, mk_ind_first, mk_ind_second, mk_ind_third, \\\nmk_ind_fourth, course, time FROM mk_ind_result ORDER BY time DESC\")\n results = cursor.fetchall()\n\n for result_id, first_id, second_id, third_id, fourth_id, course, timestamp in results:\n intermediate_results = ()\n\n cursor.execute(\"SELECT first_name, last_name, nickname FROM \\\nplayer WHERE player_id = {0}\".format(first_id))\n first = cursor.fetchall()\n first_name_first, last_name_first, \\\n nickname_first = first[0]\n cursor.execute(\"SELECT first_name, last_name, nickname FROM \\\nplayer WHERE player_id = {0}\".format(second_id))\n second = cursor.fetchall()\n first_name_second, last_name_second, \\\n nickname_second = second[0]\n try:\n cursor.execute(\"SELECT first_name, last_name, nickname FROM \\\n player WHERE player_id = {0}\".format(third_id))\n third = cursor.fetchall()\n first_name_third, last_name_third, \\\n nickname_third = third[0]\n except MySQLdb.OperationalError:\n first_name_third = ''\n last_name_third = ''\n nickname_third = ''\n try:\n cursor.execute(\"SELECT first_name, last_name, nickname FROM \\\n player WHERE player_id = {0}\".format(fourth_id))\n fourth = cursor.fetchall()\n first_name_fourth, last_name_fourth, \\\n nickname_fourth = fourth[0]\n except MySQLdb.OperationalError:\n first_name_fourth = ''\n last_name_fourth = ''\n nickname_fourth = ''\n\n intermediate_results = intermediate_results + \\\n (result_id, first_name_first, last_name_first,\n nickname_first, first_name_second, last_name_second,\n nickname_second, first_name_third,\n last_name_third, nickname_third, first_name_fourth,\n last_name_fourth, nickname_fourth, course,\n timestamp.strftime('%Y-%m-%d'))\n\n all_results = all_results + (intermediate_results,)\n del intermediate_results\n\n except MySQLdb.OperationalError:\n self._logger.error(\"MySQL operational error occured\")\n traceback.print_exc()\n raise exceptions.DBConnectionError(\"Cannot connect to MySQL server\")\n\n except MySQLdb.ProgrammingError:\n self._logger.error(\"MySQL programming error\")\n traceback.print_exc()\n raise exceptions.DBSyntaxError(\"MySQL syntax error\")\n\n else:\n return all_results", "def cargar_obras(self):\n self.cargarObjetos(self.tableObra,\n ObraSocialModel.buscarTodos(\"razon_social\", self.sesion).all(),\n (\"razon_social\", \"cuit\", \"direccion\")\n )", "def scrap_results(self):\n # Find the table\n table = self.driver.find_element_by_xpath(results_table_path)\n\n found_links = []\n # For each row the table hase\n for row in table.find_elements_by_xpath(\".//tr\"):\n elements = row.find_elements_by_xpath(\".//td\")\n # If this row is not empty\n if len(elements) != 0:\n # Extract the link\n entity_link = elements[0].find_element_by_xpath(\".//a\").get_attribute(\"href\")\n found_links.append(entity_link)\n\n # Randomize the list of links so each time the order is different.\n shuffle(found_links)\n\n generic_data_found = []\n activity_data_found = []\n components_data_found = []\n components_alt_data_found = []\n historical_name_data_found = []\n historical_social_capital_data_found = []\n count = 0\n # For each link found\n for link in found_links:\n # Scrap the data from this entity\n gd, act, comp, hist_name, hist_c_s = self._scrap_single_entity(link)\n\n # Update the found data variables with the new data\n generic_data_found.append(gd)\n activity_data_found += act\n if len(comp) > 0 and \"total_miembros_patronado\" in comp[0]:\n components_alt_data_found += comp\n else:\n components_data_found += comp\n historical_name_data_found += hist_name\n historical_social_capital_data_found += hist_c_s\n\n # TODO: Remove this\n if count == 2:\n pass\n\n\n count += 1\n\n # Add data to the centralized search_result variable\n self.search_results.add_generic_data(generic_data_found)\n self.search_results.add_activity_data(activity_data_found)\n self.search_results.add_components_data(components_data_found)\n self.search_results.add_components_alt_data(components_alt_data_found)\n self.search_results.add_historical_names_data(historical_name_data_found)\n self.search_results.add_historical_social_capital_data(historical_social_capital_data_found)", "def cargarObras(self):\n self.cargarObjetos(self.tableOs,\n ObraSocialModel.buscarTodos(\"razon_social\", self.sesion).all(),\n (\"razon_social\", \"cuit\", \"direccion\")\n )", "def result(self):", "def result(self):", "def resultadosDiarios(self):\r\n self.checkingConnection()\r\n self.model = QSqlQueryModel()\r\n self.model.setQuery('''SELECT date1, ingresos, compras, gastos,\r\n (ingresos - compras - gastos) AS Saldo FROM (SELECT date1,\r\n ingresos, compras, gastos FROM ((SELECT Clients.date AS date1,\r\n SUM(Clients.value) AS ingresos FROM Clients GROUP BY Clients.date)\r\n JOIN (SELECT Compras.date AS date2, SUM(Compras.value) AS compras\r\n FROM Compras GROUP BY Compras.date) JOIN (SELECT Gastos.date AS date3,\r\n SUM(Gastos.value) AS gastos FROM Gastos GROUP BY Gastos.date)\r\n ON date1 = date2 AND date2 = date3))''', self.db)\r\n self.setModel(self.model)", "def Results(self):\n return self.data", "def Results(self):\n return self.data", "def get_results(self):\n\n super().get_results()", "def returnAll(self):\n try:\n # self.checkValName()\n self.dbc.execute(self.query, self.val)\n self.results = self.dbc.fetchall()\n except MySQLdb.Error as e:\n print \"Query failed: %s \" % e\n raise", "def Lluiteu(self) -> IResultList:\n\n if len(self._Lluitadors) != 2:\n print(\"ERROR. Falten lluitadors\")\n exit\n\n elQuePica = randint(0, 1)\n\n while self._Lluitadors[0].es_Ko() == False and self._Lluitadors[1].es_Ko() == False:\n elQueRep = (elQuePica+1) % 2\n proteccio = self._Lluitadors[elQueRep].get_Lluitador().Protegeix()\n pica = self._Lluitadors[elQuePica].get_Lluitador().Pica()\n\n if pica in proteccio:\n self._Lluitadors[elQueRep].treu_vida()\n print(\n f'{self._Lluitadors[elQueRep].get_nom()} rep un cop al {pica.name} de {self._Lluitadors[elQuePica].get_nom()}')\n else:\n print(\n f'{self._Lluitadors[elQueRep].get_nom()} atura el cop al {pica.name} de {self._Lluitadors[elQuePica].get_nom()}')\n elQuePica = elQueRep\n\n guanyador = next(x for x in self._Lluitadors if x.es_Ko() == False)\n perdedor = next(i for i in self._Lluitadors if i.es_Ko() == True)\n\n comentariLocutor = \"\"\n\n if (guanyador.get_vida() - perdedor.get_vida()) > 5:\n comentariLocutor = \"Quina pallissa!!\"\n\n print(f\"{perdedor.get_nom()} cau a terra!\")\n print(f\"VICTÒRIA DE {guanyador.get_nom()}!!! {comentariLocutor}\")\n\n return self._Lluitadors", "def resultadosAnuales(self):\r\n self.checkingConnection()\r\n self.model = QSqlQueryModel()\r\n self.model.setQuery('''SELECT years, ingresos, compras, gastos, \r\n (ingresos - compras - gastos) AS Total FROM (\r\n\t\t\tSELECT years, \r\n ingresos, compras, gastos FROM ((SELECT Clients.year AS years, \r\n SUM(Clients.value) AS ingresos FROM Clients GROUP BY Clients.year) \r\n JOIN (SELECT Compras.year AS year2, SUM(Compras.value) AS compras \r\n FROM Compras GROUP BY Compras.year) JOIN (SELECT Gastos.year AS year3, \r\n SUM(Gastos.value) AS gastos FROM Gastos GROUP BY Gastos.year) \r\n ON years = year2 AND year2 = year3)\r\n\t\t\t) ''', self.db)\r\n # Getting the table values\r\n self.years = []\r\n self.ingresos = []\r\n self.compras = []\r\n self.gastos = []\r\n self.total = []\r\n # Save the Query values in each list\r\n for i in range(self.model.rowCount()):\r\n # record is the row and value the column\r\n self.years.append(self.model.record(i).value(\"years\"))\r\n self.ingresos.append(self.model.record(i).value(\"ingresos\"))\r\n self.compras.append(self.model.record(i).value(\"compras\"))\r\n self.gastos.append(self.model.record(i).value(\"gastos\"))\r\n self.total.append(self.model.record(i).value(\"Total\"))\r\n self.setModel(self.model)\r\n # Creating the Bar Graph\r\n self.grafica(self.years)", "def results(self):\n return self._result_list", "def _get_results(self, res):\n self.async_res = res\n self.full_res = res.wait() # pragma: no cover\n self.trained = True # pragma: no cover\n self.mod_id = self.full_res['model_id'] # pragma: no cover\n self.data_id = self.full_res['data_id'] # pragma: no cover\n self.params_dump = self.full_res['params_dump'] # pragma: no cover\n if self.verbose > 0: # pragma: no cover\n print(\"Result {} | {} ready\".format(\n self.mod_id, self.data_id)) # pragma: no cover", "def get_list_of_results(self):\n return self.__result_list", "def consultVen(dataNoBs):\n for fila in dataNoBs:\n for elemento in fila:\n print(elemento + \"\\t\")", "def results(self) -> list:\n return self.__results" ]
[ "0.70406973", "0.6500303", "0.64375544", "0.6325507", "0.6182129", "0.61422825", "0.61179465", "0.610537", "0.6095539", "0.60648805", "0.60397285", "0.5990536", "0.59894574", "0.598081", "0.59751135", "0.595", "0.5937014", "0.5937014", "0.5925529", "0.5901702", "0.5901702", "0.58935356", "0.5890619", "0.58805645", "0.5863614", "0.57892334", "0.57877105", "0.5780195", "0.5774578", "0.57454616" ]
0.70208097
1
Establece un resultado en un partido
def set_resultado(self, partido, dni): self.__resultados[partido] = dni
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def cargarProductosSinObra(self):\n\n self.limpiarTabla(self.tableProductos)\n\n ##Cnsulta para obtener todos los productos del sistema, con su correspondiente\n ##codigo de barra, monodroga, descuento, importe\n query=self.sesion.query(ProductoModel.codigo_barra,ProductoModel.id_medicamento,ProductoModel.id_presentacion,MonodrogaModel.nombre,ProductoModel.importe).\\\n join(MedicamentoModel).filter(ProductoModel.id_medicamento==MedicamentoModel.nombre_comercial).\\\n join(MonodrogaModel).filter(MedicamentoModel.id_monodroga==MonodrogaModel.nombre).\\\n filter(ProductoModel.baja==False).order_by(ProductoModel.codigo_barra)\n\n ##Se cargan los datos obtenidos en la tabla de Producto\n for n, obj in enumerate(query):\n self.tableProductos.insertRow(n)\n self.tableProductos.setItem(n, 0, QtGui.QTableWidgetItem(str(obj[0])))\n self.tableProductos.setItem(n, 1, QtGui.QTableWidgetItem(str(obj[1])))\n self.tableProductos.setItem(n, 2, QtGui.QTableWidgetItem(str(obj[2])))\n self.tableProductos.setItem(n, 3, QtGui.QTableWidgetItem(str(obj[3])))\n self.tableProductos.setItem(n, 4, QtGui.QTableWidgetItem(str(0)))\n self.tableProductos.setItem(n, 5, QtGui.QTableWidgetItem(str(obj[4])))\n\n ##Se carga la cantidad de cada producto en la tabla\n for row,producto in enumerate(ProductoModel.buscarTodos(ProductoModel.codigo_barra,self.sesion)):\n self.tableProductos.setItem(row,6,QtGui.QTableWidgetItem(str(producto.getCantidad(self.sesion))))", "def custo(EstadoRestaUm, resultante):\n return 1", "def buscarFactura(self):\n\n if not self.lineNumero.isEnabled() and self.facturaSeleccionada != None:\n QtGui.QMessageBox.information(self,\"Aviso\",\"Ya se ha seleccionado una factura\")\n elif not self.lineNumero.isEnabled():\n self.lineNumero.setEnabled(True)\n self.lineNumero.clear()\n self.limpiarTabla(self.tableFactura)\n else:\n self.numeroFacturaActual=str(self.lineNumero.text())\n if len(self.numeroFacturaActual)==0:\n QtGui.QMessageBox.information(self,\"Aviso\",QtCore.QString.fromUtf8(\"No se ha ingresado número de factura\"))\n else:\n self.facturaSeleccionada=FacturaModel.existeFactura(int(self.numeroFacturaActual),self.sesion)\n if self.facturaSeleccionada==None:\n QtGui.QMessageBox.warning(self,\"Aviso\",\"La factura seleccionada no existe\")\n elif self.facturaSeleccionada.getNC()!=None:\n QtGui.QMessageBox.information(self,\"Aviso\",QtCore.QString.fromUtf8(\"La factura ya ha posee una Nota de Crédito\"))\n self.facturaSeleccionada = None\n elif self.facturaSeleccionada.getFechaEmision()+timedelta(days=int(self.plazo))<date.today():\n QtGui.QMessageBox.information(self,\"Aviso\",QtCore.QString.fromUtf8(\"El tiempo permitido para la devolución ha expirado\"))\n elif self.facturaSeleccionada.estaLiquidada(self.sesion):\n print self.facturaSeleccionada.estaLiquidada(self.sesion)\n QtGui.QMessageBox.information(self,\"Aviso\",\"La factura se encuentra liquidada a la Obra Social\")\n else:\n self.lineNumero.setEnabled(False)\n self.cargarObjetos(self.tableFactura,self.facturaSeleccionada.getDetalles(self.sesion),\n [\"nro_linea\",\"producto\",\"cantidad\",\"importe\"])", "def generarConsultasConexion(self):\n for parRecursos in self.CombiConsultaLibre:\n parRecursosL0=self.limpiaRecursos(parRecursos[0])\n parRecursosL1=self.limpiaRecursos(parRecursos[1])\n \n if self.nivel_profundidad>=1:\n consultasparql = self.busConex1 % (parRecursosL0,parRecursosL1,self.limit_BC)\n print consultasparql;\n resultoCC=self.consulta(consultasparql)\n for resul in resultoCC['results']['bindings']:\n triple = parRecursos[0]+\"-|\"+parRecursos[1]+\"-|\"+resul['p1']['value']\n self.ResultConsultasConexion.append(triple) \n \n if self.nivel_profundidad>=2:\n consultasparql = self.busConex2 % (parRecursosL0,parRecursosL1,self.limit_BC)\n resultoCC=self.consulta(consultasparql)\n for resul in resultoCC['results']['bindings']:\n o1=resul['o1']['value']\n o1=o1.replace('http://dbpedia.org/resource/','')\n triple1 = parRecursos[0]+\"-|\"+o1+\"*-|\"+resul['p1']['value']\n triple2 = parRecursos[1]+\"-|\"+o1+\"*-|\"+resul['p2']['value']\n self.ResultConsultasConexion.append(triple1) \n self.ResultConsultasConexion.append(triple2) \n \n if self.nivel_profundidad>=3:\n consultasparql = self.busConex3_1 % (parRecursosL0,parRecursosL1,self.limit_BC)\n resultoCC=self.consulta(consultasparql)\n for resul in resultoCC['results']['bindings']:\n o1=resul['o1']['value']\n o1=o1.replace('http://dbpedia.org/resource/','')\n o2=resul['o2']['value']\n o2=o1.replace('http://dbpedia.org/resource/','')\n triple1 = parRecursos[0]+\"-|\"+o1+\"*-|\"+resul['p1']['value']\n triple2 = parRecursos[1]+\"-|\"+o2+\"*-|\"+resul['p2']['value']\n triple3 = o1+\"*-|\"+o2+\"*-|\"+resul['p3']['value'] \n self.ResultConsultasConexion.append(triple1) \n self.ResultConsultasConexion.append(triple2) \n self.ResultConsultasConexion.append(triple3) \n\n consultasparql = self.busConex3_2 % (parRecursosL0,parRecursosL1,self.limit_BC)\n resultoCC=self.consulta(consultasparql)\n for resul in resultoCC['results']['bindings']:\n o1=resul['o1']['value']\n o1=o1.replace('http://dbpedia.org/resource/','')\n o2=resul['o2']['value']\n o2=o1.replace('http://dbpedia.org/resource/','')\n triple1 = parRecursos[0]+\"-|\"+o1+\"*-|\"+resul['p1']['value']\n triple2 = parRecursos[1]+\"-|\"+o2+\"*-|\"+resul['p2']['value']\n triple3 = o2+\"*-|\"+o1+\"*-|\"+resul['p3']['value'] \n self.ResultConsultasConexion.append(triple1) \n self.ResultConsultasConexion.append(triple2) \n self.ResultConsultasConexion.append(triple3)", "def cargarObra(self):\n rowActual=self.tableObra.currentItem().row()\n self.lineObra.setText(str(self.tableObra.item(rowActual,0).text()))\n self.lineCuit.setText(str(self.tableObra.item(rowActual,1).text()))\n self.tableObra.hide()\n self.lineObra.setEnabled(False)\n self.lineCuit.setEnabled(False)\n self.obraSocialSeleccionada = str(self.lineObra.text())\n self.cargar_productos(self.obraSocialSeleccionada)\n self.gbProducto.setVisible(True)", "def resultat(self, concordance_mf, concordance_pf, liste_F, liste_M, liste_P):\n resultat = {\"Marqueur\": [], \"Conclusion\": [], \"Concordance Mere/Foetus\": [], \"Détails M/F\": [],\n \"Concordance Pere/Foetus\": [], \"Détails P/F\": []}\n marqueurs_conta = 0\n marqueurs_non_conta = 0\n somme_conta = 0\n if liste_F[0].allele[1] == 0.0:\n self.set_sexe(\"F\")\n else:\n self.set_sexe(\"M\")\n if concordance_mf != 16 and concordance_pf != 16 and concordance_pf != None:\n self.set_concordance_mere_foet(\"NON\")\n self.set_concordance_pere_foet(\"NON\")\n del resultat[\"Conclusion\"]\n for nbres in range(1, len(liste_F)):\n resultat[\"Marqueur\"].append(str(liste_F[nbres].marqueur))\n resultat[\"Concordance Mere/Foetus\"].append(liste_F[nbres].concordance_mere_foetus)\n resultat[\"Concordance Pere/Foetus\"].append(liste_P[nbres].concordance_pere_foetus)\n if liste_F[nbres].concordance_mere_foetus == \"NON\" and liste_P[nbres].concordance_pere_foetus == \"NON\":\n resultat[\"Détails M/F\"].append(\n \"M : \" + str(liste_M[nbres].normalisation(liste_M[nbres].allele)) + \" F: \" + str(\n liste_F[nbres].normalisation(liste_F[nbres].allele)))\n resultat[\"Détails P/F\"].append(\n \"P : \" + str(liste_P[nbres].normalisation(liste_P[nbres].allele)) + \" F : \" + str(\n liste_F[nbres].normalisation(liste_F[nbres].allele)))\n elif liste_F[nbres].concordance_mere_foetus == \"NON\":\n resultat[\"Détails M/F\"].append(\n \"M: \" + str(liste_M[nbres].normalisation(liste_M[nbres].allele)) + \" F : \" + str(\n liste_F[nbres].normalisation(liste_F[nbres].allele)))\n resultat[\"Détails P/F\"].append(\"\")\n elif liste_P[nbres].concordance_pere_foetus == \"NON\":\n resultat[\"Détails P/F\"].append(\n \"P: \" + str(liste_P[nbres].normalisation(liste_P[nbres].allele)) + \" F: \" + str(\n liste_F[nbres].normalisation(liste_F[nbres].allele)))\n resultat[\"Détails M/F\"].append(\"\")\n else:\n resultat[\"Détails M/F\"].append(\"\")\n resultat[\"Détails P/F\"].append(\"\")\n conclusion = pd.DataFrame({\"1\": [\"Non calculé\", \"Non calculé\", \"Non calculé\", self.get_date()]},\n index=[\"Nombre de marqueurs informatifs non contaminés\",\n \"Nombre de marqueurs informatifs contaminés\",\n \"Moyenne du pourcentage de contamination\", \"Date\"])\n resultats = pd.DataFrame(resultat, columns=[\"Marqueur\", \"Concordance Mere/Foetus\", \"Détails M/F\",\n \"Concordance Pere/Foetus\", \"Détails P/F\"])\n return resultats, conclusion\n elif concordance_mf != len(liste_F) and concordance_pf == len(liste_F) or concordance_mf != len(\n liste_F) and concordance_pf == None:\n self.set_concordance_mere_foet(\"NON\")\n self.set_concordance_pere_foet(\"OUI\")\n if concordance_pf == None:\n self.set_concordance_pere_foet(\"ABS\")\n del resultat[\"Conclusion\"]\n del resultat[\"Concordance Pere/Foetus\"]\n del resultat[\"Détails P/F\"]\n for nbres in range(1, len(liste_F)):\n resultat[\"Marqueur\"].append(str(liste_F[nbres].marqueur))\n resultat[\"Concordance Mere/Foetus\"].append(liste_F[nbres].concordance_mere_foetus)\n if liste_F[nbres].concordance_mere_foetus == \"NON\":\n resultat[\"Détails M/F\"].append(\n \"M: \" + str(liste_M[nbres].normalisation(liste_M[nbres].allele)) + \" F: \" + str(\n liste_F[nbres].normalisation(liste_F[nbres].allele)))\n else:\n resultat[\"Détails M/F\"].append(\"\")\n conclusion = pd.DataFrame({\"1\": [\"Non calculé\", \"Non calculé\", \"Non calculé\", self.get_date()]},\n index=[\"Nombre de marqueurs informatifs non contaminés\",\n \"Nombre de marqueurs informatifs contaminés\",\n \"Moyenne du pourcentage de contamination\", \"Date\"])\n resultats = pd.DataFrame(resultat, columns=[\"Marqueur\", \"Concordance Mere/Foetus\", \"Détails M/F\"])\n return resultats, conclusion\n elif concordance_mf == len(liste_F) and concordance_pf == len(liste_F) or concordance_mf == len(\n liste_F) and concordance_pf == None:\n self.set_concordance_mere_foet(\"OUI\")\n self.set_concordance_pere_foet(\"OUI\")\n if concordance_pf == None:\n self.set_concordance_pere_foet(\"ABS\")\n del resultat[\"Concordance Mere/Foetus\"]\n del resultat[\"Concordance Pere/Foetus\"]\n del resultat[\"Détails P/F\"]\n for nbres in range(1, len(liste_F)):\n resultat[\"Marqueur\"].append(str(liste_F[nbres].marqueur))\n if liste_F[nbres].informatif == 0:\n resultat[\"Conclusion\"].append(\"Non informatif\")\n resultat[\"Détails M/F\"].append(\"Mère homozygote\")\n elif liste_F[nbres].informatif == 1:\n if liste_F[nbres].contamination == 0:\n marqueurs_non_conta += 1\n resultat[\"Conclusion\"].append(\"Non contaminé\")\n resultat[\"Détails M/F\"].append(\"\")\n elif liste_F[nbres].contamination == 1:\n marqueurs_conta += 1\n somme_conta = somme_conta + liste_F[nbres].taux\n resultat[\"Conclusion\"].append(\"Contaminé\")\n resultat[\"Détails M/F\"].append(\"Taux contamination : \" + str(liste_F[nbres].taux) + \"%\")\n else:\n marqueurs_conta += 1\n somme_conta = somme_conta + liste_F[nbres].taux\n resultat[\"Conclusion\"].append(\"Contaminé\")\n resultat[\"Détails M/F\"].append(\"Taux contamination : \" + str(liste_F[nbres].taux) + \"%\")\n elif liste_F[nbres].informatif == 2:\n resultat[\"Conclusion\"].append(\"Non informatif\")\n resultat[\"Détails M/F\"].append(\"Allèles semblables\")\n else:\n resultat[\"Conclusion\"].append(\"Non informatif\")\n resultat[\"Détails M/F\"].append(\"Echo\")\n resultats = pd.DataFrame(resultat, columns=[\"Marqueur\", \"Conclusion\", \"Détails M/F\"])\n try:\n moyenne_conta = somme_conta / marqueurs_conta\n except ZeroDivisionError:\n moyenne_conta = 0\n conclusion = pd.DataFrame(\n {\"1\": [int(marqueurs_non_conta), int(marqueurs_conta), round(moyenne_conta, 2), self.get_date()]},\n index=[\"Nombre de marqueurs informatifs non contaminés\", \"Nombre de marqueurs informatifs contaminés\",\n \"Moyenne du pourcentage de contamination\", \"Date\"])\n return resultats, conclusion\n elif concordance_mf == len(liste_F) and concordance_pf != len(liste_F):\n self.set_concordance_mere_foet(\"OUI\")\n self.set_concordance_pere_foet(\"NON\")\n del resultat[\"Concordance Mere/Foetus\"]\n for nbres in range(1, len(liste_F)):\n resultat[\"Concordance Pere/Foetus\"].append(liste_P[nbres].concordance_pere_foetus)\n if liste_P[nbres].concordance_pere_foetus == \"NON\":\n resultat[\"Détails P/F\"].append(\n \"P: \" + str(liste_P[nbres].normalisation(liste_P[nbres].allele)) + \" F: \" + str(liste_P[nbres].normalisation(liste_P[nbres].allele)))\n else:\n resultat[\"Détails P/F\"].append(\"\")\n for nbres in range(1, len(liste_F)):\n resultat[\"Marqueur\"].append(str(liste_F[nbres].marqueur))\n if liste_F[nbres].informatif == 0:\n resultat[\"Conclusion\"].append(\"Non informatif\")\n resultat[\"Détails M/F\"].append(\"Mère homozygote\")\n elif liste_F[nbres].informatif == 1:\n if liste_F[nbres].contamination == 0:\n marqueurs_non_conta += 1\n resultat[\"Conclusion\"].append(\"Non contaminé\")\n resultat[\"Détails M/F\"].append(\"\")\n elif liste_F[nbres].contamination == 1:\n marqueurs_conta += 1\n somme_conta = somme_conta + liste_F[nbres].taux\n resultat[\"Conclusion\"].append(\"Contaminé\")\n resultat[\"Détails M/F\"].append(\"Taux contamination : \" + str(liste_F[nbres].taux) + \"%\")\n else:\n marqueurs_conta += 1\n somme_conta = somme_conta + liste_F[nbres].taux\n resultat[\"Conclusion\"].append(\"Contaminé\")\n resultat[\"Détails M/F\"].append(\"Taux contamination : \" + str(liste_F[nbres].taux) + \"%\")\n elif liste_F[nbres].informatif == 2:\n resultat[\"Conclusion\"].append(\"Non informatif\")\n resultat[\"Détails M/F\"].append(\"Allèles semblables\")\n else:\n resultat[\"Conclusion\"].append(\"Non informatif\")\n resultat[\"Détails M/F\"].append(\"Echo\")\n resultats = pd.DataFrame(resultat,\n columns=[\"Marqueur\", \"Conclusion\", \"Détails M/F\", \"Concordance Pere/Foetus\",\n \"Détails P/F\"])\n try:\n moyenne_conta = somme_conta / marqueurs_conta\n except ZeroDivisionError:\n moyenne_conta = 0\n conclusion = pd.DataFrame(\n {\"1\": [int(marqueurs_non_conta), int(marqueurs_conta), round(moyenne_conta, 2), self.get_date()]},\n index=[\"Nombre de marqueurs informatifs non contaminés\", \"Nombre de marqueurs informatifs contaminés\",\n \"Moyenne du pourcentage de contamination\", \"Date\"])\n return resultats, conclusion", "def query1(request):\n with connection.cursor() as cursor:\n cursor.execute('SELECT * FROM (\\\n example_usuario as u \\\n JOIN\\\n example_pessoa as p \\\n ON u.cpf_pessoa_id = p.cpf)')\n result = named_tuple_fetchall(cursor)\n\n template = loader.get_template('example/query1.html')\n context = {'query1_result_list': result, }\n\n return HttpResponse(template.render(context, request))", "def query2(request):\n with connection.cursor() as cursor:\n\n query = 'SELECT p.nome as nome_pessoa, perf.tipo, \\\n serv.nome FROM (example_usuario as u JOIN \\\n example_usuario_possui_perfil as possui ON \\\n u.id_usuario = possui.usuario_id JOIN \\\n example_perfil as perf ON possui.perfil_id = \\\n perf.id_perfil JOIN example_pessoa as p ON \\\n u.cpf_pessoa_id = p.cpf JOIN example_pertence \\\n as pertence ON pertence.perfil_id = \\\n perf.id_perfil JOIN example_servico as serv ON \\\n pertence.servicos_id = serv.id_servico)'\n cursor.execute(query)\n result = named_tuple_fetchall(cursor)\n\n results_dict = {} # key: Dict_key - val: str\n Dict_key = namedtuple('Key', 'nome perfil')\n for r in result:\n key = Dict_key(r[0], r[1])\n val = results_dict.get(key)\n if val is None:\n results_dict[key] = r[-1]\n else:\n results_dict[key] = ','.join([val, r[-1]])\n\n template = loader.get_template('example/query2.html')\n context = {'query2_result_dict': results_dict, }\n\n return HttpResponse(template.render(context, request))", "def lista_ventas(self,tipo,lista,filtro):\n self.lista=self.builder.get_object(lista)\n self.lista.clear()#Limpia la lista\n busqueda = \"\"\n\n if tipo==\"\":\n print(\"Llego a buscar ventas en BD\")\n #result=self.db.execute('SELECT * FROM Venta')\n busqueda = self.db.execute('SELECT ventaID ,fechaVenta, fechaInicio, fechaFin, C.nombre, P.nombre FROM Cliente C, Paquete P, Venta V WHERE V.IdCli = C.clienteID AND V.IdPaq = P.paqueteID')\n elif tipo == \"Cliente\":\n print(\"Busco venta por nombre del cliente\")\n busqueda = self.db.execute(\"SELECT ventaID ,fechaVenta, fechaInicio, fechaFin, C.nombre, P.nombre FROM Cliente C, Paquete P, Venta V WHERE V.IdCli = C.clienteID AND V.IdPaq = P.paqueteID AND C.nombre LIKE '%\"+filtro+\"%'\")\n elif tipo == \"Viaje\":\n print(\"Busco venta por nombre del paquete\")\n busqueda = self.db.execute(\"SELECT ventaID ,fechaVenta, fechaInicio, fechaFin, C.nombre, P.nombre FROM Cliente C, Paquete P, Venta V WHERE V.IdCli = C.clienteID AND V.IdPaq = P.paqueteID AND P.nombre LIKE '%\"+filtro+\"%'\")\n elif tipo == \"Fecha de inicio\":\n print(\"Busco venta por fecha de inicio\")\n busqueda = self.db.execute(\"SELECT ventaID ,fechaVenta, fechaInicio, fechaFin, C.nombre, P.nombre FROM Cliente C, Paquete P, Venta V WHERE V.IdCli = C.clienteID AND V.IdPaq = P.paqueteID AND fechaInicio LIKE '%\"+filtro+\"%'\")\n elif tipo == \"Fecha de fin\":\n print(\"Busco venta por fecha de fin\")\n busqueda = self.db.execute(\"SELECT ventaID ,fechaVenta, fechaInicio, fechaFin, C.nombre, P.nombre FROM Cliente C, Paquete P, Venta V WHERE V.IdCli = C.clienteID AND V.IdPaq = P.paqueteID AND fechaFin LIKE '%\"+filtro+\"%'\")\n \n for row in busqueda: \n #Empieza por la [1] porque el ID es la [0]\n # self.lista.append([row[4],row[5],row[1],row[2],row[3]])\n self.lista.append([row[1],row[2],row[3],row[4],row[5],row[0]])\n print(\"Listo ventas en tabla\")", "def cargar_productos(self, obraSocial):\n self.limpiarTabla(self.tableProductos)\n\n query=self.sesion.query(ProductoModel.codigo_barra,ProductoModel.id_medicamento,ProductoModel.id_presentacion,MonodrogaModel.nombre,DescuentoModel.descuento,ProductoModel.importe).\\\n join(MedicamentoModel).filter(ProductoModel.id_medicamento==MedicamentoModel.nombre_comercial).\\\n join(MonodrogaModel).filter(MedicamentoModel.id_monodroga==MonodrogaModel.nombre).\\\n join(DescuentoModel).filter(DescuentoModel.producto==ProductoModel.codigo_barra).\\\n filter(DescuentoModel.obra_social==obraSocial,ProductoModel.baja==False).order_by(ProductoModel.codigo_barra)\n\n for n, obj in enumerate(query):\n self.tableProductos.insertRow(n)\n for m, campo in enumerate(obj):\n self.tableProductos.setItem(n, m, QtGui.QTableWidgetItem(str(campo)))\n\n for row,producto in enumerate(ProductoModel.buscarTodos(ProductoModel.codigo_barra,self.sesion)):\n self.tableProductos.setItem(row,6,QtGui.QTableWidgetItem(str(producto.getCantidad(self.sesion))))", "def buscarFactura(self):\n if not self.lineNumeroFac.isEnabled() and self.tableNC.rowCount() != 0:\n QtGui.QMessageBox.information(self,\"Aviso\",\"Ya se ha seleccionado una factura\")\n elif not self.lineNumeroFac.isEnabled():\n self.lineNumeroFac.setEnabled(True)\n self.lineNumeroFac.clear()\n self.limpiarTabla(self.tableFactura)\n else:\n self.numeroFacturaActual=str(self.lineNumeroFac.text())\n if len(self.numeroFacturaActual)==0:\n self.showMsjEstado(\"No se ha ingresado numero de factura\")\n else:\n self.facturaSeleccionada=FacturaModel.existeFactura(int(self.numeroFacturaActual),self.sesion)\n if self.facturaSeleccionada==None:\n QtGui.QMessageBox.information(self,\"Aviso\",\"La factura seleccionada no existe\")\n elif self.facturaSeleccionada.getObra() != None and self.facturaSeleccionada.getObra() != self.obraSocial:\n QtGui.QMessageBox.information(self,\"Aviso\",\"La Obra Social seleccionada no corresponde con la factura\")\n elif self.facturaSeleccionada.getFechaEmision()+timedelta(days=int(self.plazo))<date.today():\n QtGui.QMessageBox.information(self,\"Aviso\",\"El tiempo permitido para el reintegro ha expirado\")\n elif self.facturaSeleccionada.estaLiquidada(self.sesion):\n QtGui.QMessageBox.information(self,\"Aviso\",\"La factura se encuentra liquidada a la Obra Social\")\n elif self.facturaSeleccionada.getNC()!=None:\n QtGui.QMessageBox.information(self,\"Aviso\",\"La factura ya posee una Nota de Crédito\")\n else:\n self.lineNumeroFac.setEnabled(False)\n if self.facturaSeleccionada.getObra() == None:\n self.cargarObjetos(self.tableFactura,self.facturaSeleccionada.getDetalles(self.obraSocial, self.sesion),\n [\"producto\",\"cantidad\",\"importe\"])\n else:\n self.cargarObjetos(self.tableFactura,self.facturaSeleccionada.getDetallesSinDescuento(self.sesion),\n [\"producto\",\"cantidad\",\"importe\"])", "def result(self):", "def result(self):", "def getVentasPendientes(self, idVenta=\"\", idCliente=\"\", idAbono=\"\",fecha=\"\"):\n if idVenta == \"\" and idCliente == \"\" and idAbono == \"\" and fecha == \"\":\n return self.conexion.ejecutarSQL(\"select v.id,v.fecha,v.total,tP.tipo,c.id, c.nombres || ' ' || c.primer_apellido from ventas v, tipoPagos tP, clientes c \\\n where v.id_TipoPago=tP.id and v.id_Cliente=c.id and v.estado='Pendiente'\")\n elif idVenta != \"\":\n return self.conexion.ejecutarSQL(\"select v.id,v.fecha,v.total,tP.tipo,c.id, c.nombres || ' ' || c.primer_apellido from ventas v, tipoPagos tP, clientes c \\\n where v.id_TipoPago=tP.id and v.id_Cliente=c.id and \\\n v.estado='Pendiente' and v.id=%s\"%(idVenta))\n elif idCliente != \"\":\n return self.conexion.ejecutarSQL(\"select v.id,v.fecha,v.total,tP.tipo,c.id, c.nombres || ' ' || c.primer_apellido from ventas v, tipoPagos tP, clientes c \\\n where v.id_TipoPago=tP.id and v.id_Cliente=c.id and \\\n v.estado='Pendiente' and v.id_cliente='%s'\"%(idCliente))\n\telif idAbono != \"\":\n\t return self.conexion.ejecutarSQL(\"select v.id,v.fecha,v.total,tP.tipo,c.id, c.nombres || ' ' || c.primer_apellido from ventas v, tipoPagos tP, clientes c, Abonos a\\\n where v.id_TipoPago=tP.id and v.id_Cliente=c.id and v.estado='Pendiente'\\\n\t and a.id_venta=v.id and a.id=%s\"%(idAbono))\n\telif fecha != \"\":\n\t return self.conexion.ejecutarSQL(\"select v.id,v.fecha,v.total,tP.tipo,c.id, c.nombres || ' ' || c.primer_apellido from ventas v, tipoPagos tP, clientes c \\\n where v.id_TipoPago=tP.id and v.id_Cliente=c.id and v.estado='Pendiente' and\\\n\t v.fecha between '%s' and '%s'\"%(fecha[0],fecha[1]))", "def resultadosDiarios(self):\r\n self.checkingConnection()\r\n self.model = QSqlQueryModel()\r\n self.model.setQuery('''SELECT date1, ingresos, compras, gastos,\r\n (ingresos - compras - gastos) AS Saldo FROM (SELECT date1,\r\n ingresos, compras, gastos FROM ((SELECT Clients.date AS date1,\r\n SUM(Clients.value) AS ingresos FROM Clients GROUP BY Clients.date)\r\n JOIN (SELECT Compras.date AS date2, SUM(Compras.value) AS compras\r\n FROM Compras GROUP BY Compras.date) JOIN (SELECT Gastos.date AS date3,\r\n SUM(Gastos.value) AS gastos FROM Gastos GROUP BY Gastos.date)\r\n ON date1 = date2 AND date2 = date3))''', self.db)\r\n self.setModel(self.model)", "def cobroEfectivo(self):\n if self.total_a_pagar == 0:\n QtGui.QMessageBox.information(self,\"Aviso\",\"El saldo restante a pagar es cero\")\n else:\n self.rbtnEfectivo.setChecked(True)\n monto_a_pagar, ok = QtGui.QInputDialog.getDouble(self,\"Cobro Efectivo\",\"Ingrese monto a pagar\",0,0,2000,2)\n\n if ok:\n if monto_a_pagar >= self.total_a_pagar:\n QtGui.QMessageBox.information(self,\"Cobro Efectivo\",\"Su vuelto es:%.2f\" % (monto_a_pagar - self.total_a_pagar))\n temp = [\"Efectivo\",monto_a_pagar]\n self.detalles_cobro[self.tablePagos.rowCount()] = temp\n self.total_a_pagar = 0\n elif monto_a_pagar == 0:\n QtGui.QMessageBox.information(self,\"Aviso\",\"El monto ingresado no puede ser cero\")\n else:\n temp = [\"Efectivo\",monto_a_pagar]\n self.detalles_cobro[self.tablePagos.rowCount()] = temp\n self.total_a_pagar -= monto_a_pagar\n\n self.actualizar_total()\n self.actualizar_tabla()", "def make_more(self,result,con):", "def traer_enfermedad(request):\n tipo_motivos = MotivosConsultas.objects.filter(id=request.GET['pk'])\n data = serializers.serialize('json', tipo_motivos, fields={'descripcion'})\n return HttpResponse(data, content_type='application/json')", "def get_resultados(self):\n return self.__resultados", "def query3(request):\n query = \"SELECT pessoa_tutorada.nome as tutorado, pessoa_tutora.nome as tutor \\\n FROM example_usuario as usuario_tutelado \\\n JOIN \\\n example_tutelamento as tutelamento \\\n ON usuario_tutelado.id_usuario = tutelamento.id_usuario_tutelado_id \\\n JOIN \\\n example_usuario as usuario_tutor \\\n ON usuario_tutor.id_usuario = tutelamento.id_tutor_id \\\n JOIN example_pessoa as pessoa_tutorada\\\n ON pessoa_tutorada.cpf = usuario_tutelado.cpf_pessoa_id \\\n JOIN \\\n example_pessoa as pessoa_tutora \\\n ON pessoa_tutora.cpf = usuario_tutor.cpf_pessoa_id\"\n\n with connection.cursor() as cursor:\n cursor.execute(query)\n result = named_tuple_fetchall(cursor)\n\n # remove duplicates\n result = list(set(result))\n\n template = loader.get_template('example/query3.html')\n context = {'query3_result_list': result, }\n\n return HttpResponse(template.render(context, request))", "def escribir_resultados(res, archivo):\n n = len(res)\n for i in range(n):\n if i == 0:\n archivo.write(\"{:.4E}\".format(Decimal(res[i])))\n else:\n archivo.write(\"|\" + \"{:.4E}\".format(Decimal(res[i])))\n archivo.write(\"\\n\")", "def getResults():", "def almacenar_resultado_hilo(host, puerto, lista_puertos: list, mutex):\r\n if esta_abierto_puerto(host, puerto):\r\n mutex.acquire() # cierra el candado\r\n lista_puertos.append(puerto)\r\n mutex.release() # libera el candado\r", "def generarCombinaciones(self):\n combi = [list(x) for x in itertools.combinations(self.ResultConsultaLibre, 2)]\n self.CombiConsultaLibre=combi\n #print(self.CombiConsultaLibre)", "def envia_resposta(self,conexao,dados):\n conexao.sendall(dados)", "def _prepare_result(self, request):\n # TODO: get all params from request (via self)\n self._request = request\n return super(FullTextSearchModel, self)._prepare_result(self.__query_params)", "def buscarOs(self):\n\n if self.lineRazon.isEnabled():\n self.filtrarObra()\n\n elif not self.lineRazon.isEnabled() and (self.tableNC.rowCount() != 0 or self.tableFactura.rowCount() != 0):\n QtGui.QMessageBox.information(self,\"Aviso\",\"Imposible cambiar de Obra Social. Ya se ha seleccionado\\\n una\")\n else:\n self.gbNotaCredito.setEnabled(False)\n self.gbFactura.setEnabled(False)\n self.lineRazon.clear()\n self.lineRazon.setEnabled(True)\n self.lineCuit.clear()\n self.lineCuit.setEnabled(True)\n self.tableOs.setEnabled(True)", "def result_filter(self, result, **kwargs):\n return result", "def test_and_filtros(self): \n response = self.client.get('/apirest/expedientes/?tipo=PROYECTO&firm_persona_fisica_id=1566')\n self.assertEqual(response.data[\"count\"],self.CANT_EXPEDIENTES_X_FIRMANTE)\n self.assertEqual(response.data[\"results\"][0][\"tipo\"], self.TIPO_PROYECTO)\n self.assertEqual(response.data[\"results\"][0][\"periodo\"], self.PERIODO)", "def ingresarVenta(self, total, tipoPago, idCliente, usuarioColaborador, listaProductos):\n self._lockIngresarVenta.acquire()\n try:\n # agrupar totales vendidos de cada cod_barras\n numVendidosXcod = dict()\n # calcular subtotal, totalIVA basado en IVA y valorTotal de cada producto en lista. Subtotal=total-totalIVA\n totalIVA = 0\n for (cod_barras, desc, cantidad, valorUnitario, IVA, valorTotal) in listaProductos:\n totalIVA += float(valorTotal) * float(IVA) / 100\n if numVendidosXcod.has_key(cod_barras):\n numVendidosXcod[cod_barras] += float(cantidad)\n else:\n numVendidosXcod[cod_barras] = float(cantidad)\n # almacena subtotal en variable\n subtotal = total - totalIVA \n # revisar si existen las cantidades disponibles de cada codigo\n for cod, cant in numVendidosXcod.iteritems():\n cantidadDisponible = self.getInfoProducto(cod)[0][2]\n if cant > cantidadDisponible:\n return (False,\"Cantidades insuficientes del producto %s. Cantidad disponible: %s y Cantidad en Venta: %s\"%(cod,cantidadDisponible,cant),-1)\n # obtener id_TipoPago dependiendo del tipo de pago\n id_TipoPago = self.getIdTipoPago(tipoPago)\n # obtener estado de la venta dependiendo del tipo de pago\n estado = self.estadoVentaDadoTipoPago(id_TipoPago)\n # ingresar la venta en Ventas \n if estado == __PAGADA__:\n self.conexion.ejecutarSQL(\"insert into Ventas (fecha,hora,subtotal,totalIVA,total,estado,usuario_Colaborador,id_Cliente,id_TipoPago,fechaPagoTotal,horaPagoTotal) values (DATE('now','localtime'),TIME('now','localtime'),%s,%s,%s,'%s','%s','%s',%s,DATE('now','localtime'),TIME('now','localtime'))\"%(0,0,0,estado,usuarioColaborador,idCliente,id_TipoPago))\n else:\n self.conexion.ejecutarSQL(\"insert into Ventas (fecha,hora,subtotal,totalIVA,total,estado,usuario_Colaborador,id_Cliente,id_TipoPago) values (DATE('now','localtime'),TIME('now','localtime'),%s,%s,%s,'%s','%s','%s',%s)\"%(0,0,0,estado,usuarioColaborador,idCliente,id_TipoPago))\n # obtener id de última venta\n id_Venta = self.getIdUltimaVenta()\n # ingresar todos los productos de listaProductos en ProductosXVentas\n numItem = 1\n for (cod_barras, desc, cantidad, valorUnitario, IVA, valorTotal) in listaProductos:\n self.conexion.ejecutarSQL(\"insert into ProductosXVentas (numero_item,codigo_producto,id_venta,usuario_Colaborador,cantidad,valor_unitario,IVA,valor_total) values (%s,'%s',%s,'%s',%s,%s,%s,%s)\"%(numItem,cod_barras,id_Venta,usuarioColaborador,cantidad,valorUnitario,IVA,valorTotal))\n\n\t\t#Por cada producto vendido, se debe actualizar la tabla kardex.\n\t\tcantidadKardex = cantidad\n\t\tidProductoKardex = cod_barras\n\t\tsaldos = self.conexion.ejecutarSQL(\"\"\"select saldo_cantidad, saldo_valor, costo_unitario from kardex\n where codigo_Producto='%s'\n order by fecha and hora\"\"\"%(idProductoKardex))\n\t\tif len(saldos) == 0:\n\t\t v_unitarioKardex = valorUnitario\n\t\t valor_TotalKardex = float(cantidadKardex)*float(v_unitarioKardex)\n\t\t saldo_cantidadKardex = cantidadKardex\n\t\t saldo_valorKardex = valor_TotalKardex\n\t\telse:\n\t\t v_unitarioKardex = saldos[len(saldos)-1][2]\n\t\t valor_TotalKardex = float(cantidadKardex)*float(v_unitarioKardex)\n\t\t saldo_cantidadKardex = saldos[len(saldos)-1][0]-float(cantidadKardex)\n\t\t saldo_valorKardex = saldos[len(saldos)-1][1]-valor_TotalKardex\n\t\tif float(saldo_cantidadKardex) != 0:\n costo_unitarioKardex = saldo_valorKardex/float(saldo_cantidadKardex)\n else:\n costo_unitarioKardex = saldo_valorKardex\n\t\ttry:\n\t\t self.conexion.ejecutarSQL(\"\"\"insert into Kardex (codigo_Producto, fecha, hora, detalle,cantidad, valor_total,saldo_cantidad, saldo_valor, valor_unitario,costo_unitario)\n values ('%s',DATE('now','localtime'),TIME('now','localtime'),'Venta',%s,%s,%s,%s,%s,%s )\"\"\"\n %(idProductoKardex, cantidadKardex, valor_TotalKardex, saldo_cantidadKardex,\n saldo_valorKardex, v_unitarioKardex, costo_unitarioKardex))\n\t\texcept Exception, e:\n\t\t print \"Kardex Venta: \", e\n\t\t self.conexion.rollback()\n\t\t return (False,str(e),-1)\n\t \n numItem += 1 \n # comprometer\n\t self.conexion.commit()\n return (True,\"\",id_Venta)\n except Exception, e:\n print \"ingresarVenta excepcion: \", e\n self.conexion.rollback()\n return (False,str(e),-1)\n finally:\n self._lockIngresarVenta.release()" ]
[ "0.6009778", "0.59987825", "0.5782047", "0.5738906", "0.5730106", "0.5718839", "0.5662571", "0.5583611", "0.55691737", "0.5564165", "0.5527081", "0.5526736", "0.5526736", "0.55092806", "0.54842013", "0.5460418", "0.5451924", "0.5433622", "0.5415844", "0.54150385", "0.5374498", "0.537408", "0.53738135", "0.53716856", "0.5359381", "0.53497684", "0.5311578", "0.53077495", "0.53047764", "0.53034264" ]
0.6492181
0
Updates the item (requisition or aliquot) to indicate that it is packed.
def _update_item(self, item, user): item.user_modified = user try: item.panel = item.panel item.item_priority = item.priority except AttributeError: pass item.is_packed = True item.save() return item
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def item_starred(self, item):\n self.update_item(item)", "def put(self, item):\n if self.closed:\n print \"Knapsack closed!\"\n else:\n Backpack.put(self, item)", "def updateItem(self, object):\n pass", "def carry(self, item):\r\n\r\n # If you can add with the tier,\r\n # you have to check that its viable to carry\r\n if self.add is True:\r\n\r\n # This takes the new item and makes it your current item\r\n if item.size is True:\r\n self.item = item", "def update_quantity(item: dict, new_qty):\n qty = item.get('quantity')\n if isinstance(qty, dict):\n item['quantity']['value'] = new_qty\n else:\n item['quantity'] = new_qty", "def purchase(self, item_type):", "def item_read(self, item):\n self.update_item(item)", "def queue_my_modif(self, uid, new_qty):\n \n message = self.OrdTuple(ordtype=\"modif\",\n uid=uid,\n is_buy=np.nan,\n qty=new_qty,\n price=np.nan, \n timestamp=self._arrival_time()) \n self.my_queue.append(message)", "def setContents(self, item):\n if item == None:\n self.pot.a(None, 0)\n else:\n self.pot.a(CraftMagicNumbers.getItem(item.getItemType()), item.getData())\n # PAIL: rename", "def put_req(self, item):\n self.export.put_req(item)", "def update_item(self, table, item):", "def add_package(self, package_item):\r\n self.RequestedShipment.RequestedPackageLineItems.append(package_item)\r\n package_weight = package_item.Weight.Value\r\n self.RequestedShipment.TotalWeight.Value += package_weight\r\n self.RequestedShipment.PackageCount += 1", "def add_package(self, package_item):\r\n self.RequestedShipment.RequestedPackageLineItems.append(package_item)\r\n package_weight = package_item.Weight.Value\r\n self.RequestedShipment.TotalWeight.Value += package_weight\r\n self.RequestedShipment.PackageCount += 1", "def req_item_in_shipment( shipment_item,\r\n shipment_type,\r\n req_items,\r\n ): \r\n \r\n shipment_item_table = \"logs_%s_item\" % shipment_type\r\n try:\r\n item_id = shipment_item[shipment_item_table].item_id\r\n except:\r\n item_id = shipment_item.inventory_store_item.item_id\r\n \r\n #Check for req_items\r\n if item_id in req_items: \r\n quantity_req_type = \"quantity_%s\" % shipment_to_req_type[shipment_type]\r\n \r\n #This item has been requested by this store\r\n req_item = req_items[item_id]\r\n req_item_id = req_item.id\r\n \r\n #Update the quantity_fulfil \r\n #convert the shipment items quantity into the req_tem.quantity_fulfil (according to packet)\r\n quantity = req_item[quantity_req_type] + \\\r\n (shipment_item[shipment_item_table].packet_quantity / \\\r\n req_item.packet_quantity) * \\\r\n shipment_item[shipment_item_table].quantity \r\n quantity = min(quantity, req_item.quantity) #Cap at req. quantity \r\n db.logs_req_item[req_item_id] = {quantity_req_type: quantity} \r\n \r\n #link the shipment_item to the req_item \r\n db[shipment_item_table][shipment_item[shipment_item_table].id] = dict(logs_req_item_id = req_item_id)\r\n \r\n #Flag req record to update status_fulfil \r\n return req_item.logs_req_id\r\n else:\r\n return None", "def update_after_pick(self, item_id):\n request_name = \"get_shop_info\"\n items = self.make_request(request_name, url_id=item_id)\n update_dict = dict()\n for key in items[0]:\n if str(key)[0] == \"_\":\n continue\n try:\n update_dict[key.encode('utf-8')] = items[0][key].encode('utf-8')\n except AttributeError:\n update_dict[key.encode('utf-8')] = items[0][key]\n\n update_dict['quantity'] -= 1\n resp = self.make_request('set_shop', url_id=item_id, arguments=update_dict)", "def __update_package(item):\n\n conn = sqlite3.connect(DTF_DB)\n cur = conn.cursor()\n\n # Remove the line first.\n sql = ('DELETE FROM packages '\n \"WHERE name='%s'\" % item.name)\n\n cur.execute(sql)\n\n entry = [(item.name, item.version, item.author,\n item.install_name)]\n\n # Update a Package Entry\n sql = ('INSERT INTO packages (name, version, '\n 'author, install_name)'\n 'VALUES (?, ?, ?, ?)')\n\n cur.executemany(sql, entry)\n conn.commit()\n\n return cur.rowcount", "def add_item(self, item):\n\n if item.descriptor in self.__slots:\n slot = self.__slots[item.descriptor]\n slot.quantity += 1\n else:\n self.__slots[item.descriptor] = Slot(item)", "def add_item(self,itm,qty=1):\n inv = self.get_inventory()\n s = str(itm)\n inv[s] = inv.get(s, 0) + qty\n self.put_inventory(inv)", "def put_req(self, item):\n self.req_q.put(item)", "def item_shared(self, item):\n self.update_item(item)", "def update(self, request: HttpRequest) -> None:\n from .modifiers import basket_modifiers_pool\n\n self.extra_rows = OrderedDict()\n self.unit_price = Decimal(self.product.get_price(request))\n self.subtotal = self.unit_price * self.quantity\n self.total = self.subtotal\n for modifier in basket_modifiers_pool.get_modifiers():\n modifier.process_item(self, request)", "def _apply_item(self, item: Item) -> bool:\n if self.locked:\n self.__locked = item.item_type != self.__key\n return not self.locked", "def stocks(self, value):\n self._modified = True\n self.quantity = value + self.reserved", "def lock(self, item_type):", "def give_item(self,item):\n self.inv[item.alias] = item.desc", "def put_in_quiet(self, item):\n try:\n self.bag_of_holding.append(item)\n except:\n print('Error in Inventory method: put_in')", "def getitem(self):\n self.inventory += 1", "def _put(self, item, queue):", "def item_info(self, item_info):\n\n self._item_info = item_info", "def update(self, request: HttpRequest) -> None:\n from .modifiers import basket_modifiers_pool\n\n items = self.get_items()\n self.extra_rows = OrderedDict()\n self.subtotal = 0\n for item in items:\n item.update(request)\n self.subtotal += item.total\n self.total = self.subtotal\n for modifier in basket_modifiers_pool.get_modifiers():\n modifier.process_basket(self, request)\n self._cached_items = items" ]
[ "0.612828", "0.6083184", "0.56750643", "0.56587964", "0.5611489", "0.5564478", "0.5563317", "0.5547801", "0.554539", "0.5531961", "0.5513932", "0.5505557", "0.5505557", "0.5501193", "0.5470519", "0.541952", "0.540039", "0.5392359", "0.5391287", "0.53460723", "0.5334588", "0.5327968", "0.5320482", "0.53202915", "0.5309808", "0.53072584", "0.53062505", "0.5274545", "0.5268998", "0.52688825" ]
0.6141786
0
Creates or updates the packing list item for this "item".
def _create_or_update_packinglistitem(self, item_identifier, item, user, optional_attrs={}): try: packing_list_item = self.packing_list.packing_list_item_model.objects.get( packing_list=self.packing_list, item_reference=item_identifier) except self.packing_list.packing_list_item_model.DoesNotExist: try: optional_description = item.optional_description or '' except AttributeError: optional_description = None options = { 'requisition': item._meta.verbose_name, 'item_description': '{subject_identifier} ({initials}) VISIT:{visit} DOB:{dob} {optional}'.format( subject_identifier=item.registered_subject.subject_identifier, initials=item.registered_subject.initials, visit=item.visit_code, dob=item.registered_subject.dob, optional=optional_description, ), 'user_created': user, } options.update(**optional_attrs) packing_list_item = self.packing_list.packing_list_item_model.objects.create( packing_list=self.packing_list, item_reference=item_identifier, **options) return packing_list_item
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update(self):\n # convert the text list of item identifiers into a list of parsed identifiers\n item_identifiers = filter(None, self.packing_list.list_items.replace('\\r', '').split('\\n'))\n # loop through list of parsed identifiers\n for item_identifier in item_identifiers:\n # 1. get the 'item' instance for this identifier and update it (e.g. SubjectRequisition, Aliquot)\n # 2. create a 'packing_list_item' instance related to this packing_list\n for item_model in self.packing_list.item_models:\n try:\n try:\n item = item_model.objects.get(specimen_identifier=item_identifier)\n optional_attrs = {'panel': item.panel, 'item_priority': item.priority}\n except FieldError:\n item = item_model.objects.get(aliquot_identifier=item_identifier)\n optional_attrs = {}\n user = self.user or item.user_modified\n self._update_item(item, user)\n self._create_or_update_packinglistitem(\n item_identifier,\n item,\n user,\n optional_attrs=optional_attrs)\n except item_model.DoesNotExist:\n pass", "def put(self, item): \n if len(self.contents) < self.max_size:\n self.contents.append(item)\n elif len(self.contents) >= self.max_size:\n print \"Backpack Full.\"", "def _update_item(self, item, user):\n item.user_modified = user\n try:\n item.panel = item.panel\n item.item_priority = item.priority\n except AttributeError:\n pass\n item.is_packed = True\n item.save()\n return item", "def put(self, item):\n if self.closed:\n print \"Knapsack closed!\"\n else:\n Backpack.put(self, item)", "def push(self, Item):\n self.data_container.insert(0, Item)", "def hfp_firmware_pack_item_add(handle, org_dn, hfp_name, hw_vendor, hw_model,\r\n type, version):\r\n\r\n from ucsmsdk.mometa.firmware.FirmwarePackItem import FirmwarePackItem\r\n\r\n dn = org_dn + \"/fw-host-pack-\" + hfp_name\r\n obj = handle.query_dn(dn)\r\n if obj is None:\r\n raise ValueError(\"HFP '%s' does not exist\" % dn)\r\n\r\n mo = FirmwarePackItem(hw_vendor=hw_vendor,\r\n hw_model=hw_model,\r\n type=type,\r\n version=version)\r\n handle.add_mo(mo)\r\n handle.commit()\r\n\r\n return mo", "def push(self, new_item):\n self.items.append(new_item)", "def add_item ( self, offset ):\n list, index = self.get_info()\n index += offset \n item_trait = self.factory.trait_handler.item_trait\n value = item_trait.default_value()[1]\n self.value = list[:index] + [ value ] + list[index:]", "def add_to_group(self,item):\n self.items.append(item)\n self.n += 1", "def add(self, item):\n pb = self._field.add()\n new_item = self._factory.make(item)\n for field in self._factory.PB_CLASS.DESCRIPTOR.fields_by_name.keys():\n if hasattr(new_item, field):\n if isinstance(\n self._factory.PB_CLASS.DESCRIPTOR.fields_by_name[\n field\n ].message_type,\n Descriptor,\n ):\n getattr(pb, field).CopyFrom(getattr(new_item, field))\n else:\n setattr(pb, field, getattr(new_item, field))\n new_item._pb = pb\n self._items.append(new_item)\n return new_item", "def pickUpItem(self, app, newItem: Stack):\n\n if newItem.isEmpty(): return\n\n # Prioritize existing stacks of the item first\n for (i, slot) in enumerate(self.inventory):\n stack = slot.stack\n if stack.isInfinite() and stack.item == newItem.item:\n # It just stacks into an infinite slot, so no change\n return\n elif newItem.isInfinite() and stack.item == newItem.item:\n # ditto\n return \n elif stack.amount > 0 and stack.item == newItem.item:\n self.inventory[i].stack.amount += newItem.amount\n return\n\n # If that fails, then just add the item to the next open space\n for (i, slot) in enumerate(self.inventory):\n if slot.isEmpty():\n self.inventory[i].stack = newItem\n return\n \n # TODO: Full inventory??\n 1 / 0", "def updateItem(self, object):\n pass", "def put_in(self, item):\n try:\n self.bag_of_holding.append(item)\n print(\"You have added {} to your inventory.\".format(item))\n except:\n print('Error in Inventory method: put_in')", "def push(self, item):\n super().add_item_to_front(item)", "def add_item(self):\n item = LibGen.create_item()\n if not self.item_exists(item.call_number):\n self.item_list[item.call_number] = item\n print(f\"Item({item.call_number}) bas been added.\")\n else:\n print(\"This item already exists.\")", "def update_or_create_delivery(self, orderitem_data):", "def push(self,item):\n self.items.append(item)", "def push(self, item):\n\t\tself.items.append(item)", "def create(self):\n item = BasketItem.create()\n self._baskets[item.code] = item\n return item.code", "def create_work_item(self):", "def add(self, item):", "def add_item(self, item):\n\n if item.descriptor in self.__slots:\n slot = self.__slots[item.descriptor]\n slot.quantity += 1\n else:\n self.__slots[item.descriptor] = Slot(item)", "def _insert(self, item):\n if item.room is not None:\n item.room.remove(item)\n\n item.player = self\n self._inventory.append(item)\n\n # if the item is a container, add to inventory its contents\n if item.container:\n for con_item in item.items:\n self._insert(con_item)", "def add(self, item):\n # make sure there's enough space to fit all items\n if self.container.capacity(Item.MIN_SIDE_SIZE) < len(self.items) + 1:\n raise LayoutError(\"container too small to fit all items\")\n\n self.items.append(item)\n coords = self.item_coordinates(len(self.items))\n\n self.arrange(coords)\n\n if self.items_intersect():\n raise LayoutError(\"overlapping items\")", "def push(self, item):\n self.items.append(item)", "def push(self, item):\n self.items.append(item)", "def push(self, item):\n self.items.append(item)", "def push(self, item):\n self.items.append(item)", "def push(self, item):\n self.items.append(item)", "def push(self, item):\n pass" ]
[ "0.69560045", "0.637883", "0.6123722", "0.60804576", "0.58273536", "0.5745075", "0.56938714", "0.56452906", "0.56450784", "0.5631005", "0.55990785", "0.5588541", "0.5587934", "0.55638516", "0.5562925", "0.5557327", "0.55541205", "0.5547544", "0.5545593", "0.5533263", "0.55158037", "0.5514943", "0.55065244", "0.5504586", "0.5499649", "0.5499649", "0.5499649", "0.5499649", "0.5499649", "0.5491347" ]
0.7180476
0
Return the default Inline code example directory path
def get_example(): ex_dir = os.path.join(app.config['TEMPLATE_DIR'], "inline_code") app.logger.debug("Example directory is {}".format(ex_dir)) return ex_dir
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def path_notebooks():\n return os.path.abspath(\n os.path.join(os.path.dirname(__file__), os.path.pardir, \"examples\")\n )", "def get_example_filepath(filename):\n # File is relative to calling file?\n callingfn = os.path.abspath(inspect.stack()[1].filename)\n return os.path.join(\n os.path.dirname(callingfn), \"..\", \"..\", \"examples\", filename\n )", "def get_example(name):\n this = os.path.abspath(os.path.dirname(__file__))\n full = os.path.join(this, name)\n if not os.path.exists(full):\n raise FileNotFoundError(\"Unable to find example '{0}'\".format(name))\n return full", "def example(*paths):\n\n return normpath(join(dirname(__file__), '..', 'examples', *paths))", "def __default_pptx_path(self):\n thisdir = os.path.split(__file__)[0]\n return os.path.join(thisdir, 'templates', 'default.pptx')", "def _get_default_path(self):\n return os.path.join(cfg.DATA_DIR, 'visual_genome')", "def example_data_path(filename=None):\n if filename is None:\n filename = ''\n return os.path.join(os.path.abspath(os.path.dirname(__file__)),\n 'example_data', filename)", "def _get_default_path(self):\n # return os.path.join(datasets.ROOT_DIR, 'data', 'MSRC21')\n # set local path\n return u'/Users/danilonunes/workspace/datasets/msrc21/'", "def data_dir():\n #data_path = os.path.dirname(intervene.__file__)\n #data_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'example_data')\n #print(data_path)\n return os.path.join(os.path.dirname(__file__), 'example_data')", "def _get_default_path(self):\n return os.path.join(cfg.ROOT_DIR, 'data', 'KITTI')", "def default_module_dir(self):\n return os.path.dirname(self._modules['default'].path)", "def menpowidgets_src_dir_path():\n # to avoid cluttering the menpowidgets.base namespace\n from pathlib import Path\n import os.path\n\n return Path(os.path.abspath(__file__)).parent", "def config_path():\n dir_ = os.path.dirname(__file__)\n demo_dir = os.path.join(dir_, '../..')\n return os.path.join(demo_dir, 'mike_dev.ini')", "def GetSrc():\n return os.path.abspath(os.path.join(_THIS_DIR, os.pardir, os.pardir,\n os.pardir))", "def base_dir(context):\n return '{}'.format(os.getcwd())", "def demo_paths(self):\n base_path = os.path.join(self.module.__path__[0], 'demo')\n paths = []\n if os.path.isdir(base_path):\n for item in os.listdir(base_path):\n # TODO: support examples which is not auto-loaded\n if not os.path.isdir(os.path.join(base_path, 'examples')):\n paths.append(os.path.join(base_path, item))\n return paths", "def scriptpath(self, code) -> str:\n return ''", "def get_golem_path():\r\n return os.path.abspath(os.path.join(os.path.dirname(__file__), \"../\"))", "def source_root_dir():\n return os.path.abspath(os.path.dirname(__file__))", "def get_sample_data_dir():\n \n return resource_filename('cdat_lite.test.test_cdms', 'sample_data')", "def DefaultPath(self) -> str:\n return self.m_def_path", "def scriptpath(self, code):\n return '' if code == 'en' else ('/' + code)", "def samples_path():\n dir = os.path.dirname(os.path.abspath(__file__))\n samples = os.path.join(dir, 'samples')\n return samples", "def path(self) -> str:\n return self.src + \"/\"", "def defaultOutputFilepath(self):\n return self.outputFilepath('TulipOutput.txt')", "def default_output_path():\n\n documents = os.path.join(os.path.expanduser('~'))\n try:\n documents = _xdg_documents_path()\n except: pass\n if platform.system() == 'Windows':\n try:\n documents = _win_documents_path()\n except: pass\n\n return os.path.join(documents, 'Topographica')", "def _get_default_path(self):\n return os.path.join(cfg.DATA_DIR, 'VOCdevkit' + self._year)", "def _get_default_path(self):\n return os.path.join(cfg.DATA_DIR, 'VOCdevkit' + self._year)", "def experiment_dir(experiment_name: str) -> Path: # pragma: no cover\n return EXPERIMENTS_DIR / experiment_name", "def getDefaultDataSearchPath():\n return FileSearchPath(os.path.dirname(__file__))" ]
[ "0.6818397", "0.6614962", "0.6478994", "0.64300853", "0.63773614", "0.6267805", "0.6079151", "0.6002472", "0.5951702", "0.5929952", "0.5910089", "0.590533", "0.5868618", "0.5858667", "0.5810373", "0.57956874", "0.57696295", "0.57598346", "0.57552767", "0.5723426", "0.572342", "0.56989723", "0.56794435", "0.56636745", "0.56608933", "0.5659796", "0.5658323", "0.5658323", "0.565074", "0.5650069" ]
0.8290392
0
gets all dcds in a root directory
def find_dcds(src): dcd_paths = [] for root, dirs, files in os.walk(src): for filename in files: if filename.endswith(".dcd"): dcd_paths.append(os.path.join(root, filename)) return dcd_paths
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def find_all_dicom_files(root_path):\n dicoms = set()\n\n try:\n for fpath in get_all_files(root_path):\n if is_dicom_file(fpath):\n dicoms.add(fpath)\n except IOError as ioe:\n raise IOError('Error reading file {0}.'.format(fpath)) from ioe\n\n return dicoms", "def getDataFiles(directoryName):\r\n \r\n return listdir(directoryName)", "def get_dfs(d):\n dfs, nombres = [], []\n for folder in tqdm(os.listdir(d), desc=\"GETTING DFS\"):\n try:\n nombre = [\n f\n for f in os.listdir(f\"{d}/{folder}/\".replace(\".zip\", \"\"))\n if \".shp\" in f\n ][0]\n dfs.append(\n gpd.read_file(\n f\"{d}/{folder}/{nombre}\".replace(\".zip\", \"\"), encoding=\"latin1\"\n )\n )\n nombres.append(nombre)\n except Exception as e:\n print(e)\n return dfs, nombres", "def getAllCWDs():\n return [\"%01d%01d%s\" % (card, pair, dom)\n for card in range(MAXCARDS)\n for pair in range(MAXPAIRS)\n for dom in DOMLABELS]", "def find_data(self):\n data_list = []\n for root, dirs, files in os.walk(pathfinder.data_path()):\n for name in files:\n data_list.append(os.path.join(root, name))\n return data_list", "def fetch_dset_dirs(dset_name=None):\n assert (dset_name is None) or (dset_name in DATASET_DIRS), \"invalid name\"\n\n dset_name = \"default\" if dset_name is None else dset_name\n\n home = os.path.expanduser(\"~\")\n\n return list(os.path.join(home, d) for d in DATASET_DIRS[dset_name])", "def _get_docs_in_dir(dir_id):\n querystring = 'select docname from {} where dir_id = %s;'.format(TABLES[3])\n result = execute_query(querystring, (dir_id,))\n if result:\n return [x[0] for x in result]\n return []", "def get_all_files(cwd):\n return os.listdir(cwd)", "def list_all():\n if os.path.exists(DATA_DIR):\n return os.listdir(DATA_DIR)\n return []", "def get_all_cycle_data(parser: ArgumentParser) -> None:\n parser.add_argument(\n \"--input-dir\", type=Path, help=\"Directory containing database files\"\n )", "def finddirs(root):\n retval = []\n for root, dirs, files in os.walk(root):\n for d in dirs:\n retval.append(os.path.join(root, d))\n return retval", "def getImmediateSubdirectories(dir):", "def getFileListDAS(dataset,blacklist=[ ]):\n dataset = dataset.replace('__','/')\n if dataset[0]!='/':\n dataset = '/'+dataset\n instance = 'prod/global'\n if 'USER' in dataset:\n instance = 'prod/phys03'\n #cmd='das_client --limit=0 --query=\"file dataset=%s instance=%s\"'%(dataset,instance)\n cmd = 'das_client --limit=0 --query=\"file dataset=%s instance=%s status=*\"'%(dataset,instance)\n if args.verbose:\n print \"Executing \",cmd\n cmd_out = getoutput( cmd )\n tmpList = cmd_out.split(os.linesep)\n filelist = [ ]\n for line in tmpList:\n if '.root' in line and line not in blacklist:\n #files.append(\"root://cms-xrd-global.cern.ch/\"+line) # global\n filelist.append(\"root://xrootd-cms.infn.it/\"+line) # Eurasia\n filelist.sort()\n return filelist", "def scandir(url: str) -> Iterable[DirEntry]:\n authenticated = credentials.authenticate(url)\n return SCANNER_REGISTRY.get_handler(authenticated.scheme).scandir(authenticated)", "def listdir(self, subdir=''):\n\n try:\n subdir = subdir.decode()\n except AttributeError:\n pass\n subdir = subdir.rstrip('\\\\')\n # cmd = '\"%s\" \"%s\" 0 ' % (self.ndc_path, self.filename)\n cmd = [\n self.ndc_path,\n self.filename,\n '0'\n ]\n if subdir:\n cmd.append(subdir)\n # cmd += '\"%s\"' % subdir\n\n logging.info(cmd)\n try:\n result = check_output(cmd)\n except CalledProcessError:\n raise FileNotFoundError('Subdirectory not found in disk', [])\n\n result = [r.split(b'\\t') for r in result.split(b'\\r\\n')]\n result = list(filter(lambda x: len(x) == 4, result))\n\n filenames = []\n subdirs = []\n for r in result:\n try:\n decoded = r[0].decode('shift_jis')\n if r[2] != b'<DIR>':\n filenames.append(decoded)\n elif r[2] == b'<DIR>' and len(r[0].strip(b'.')) > 0:\n subdirs.append(decoded)\n except UnicodeDecodeError:\n logging.info(\"Couldn't decode one of the strings in the folder: %s\" % subdir)\n continue\n\n return filenames, subdirs", "def get_all():\n if not SERVICE_DIR:\n raise CommandExecutionError(\"Could not find service directory.\")\n # - List all daemontools services in\n return sorted(os.listdir(SERVICE_DIR))", "def get_directory_list(self):\r\n lines = []\r\n self.ftp.retrlines('LIST', lines.append)\r\n return lines", "def list_dir(self, path):", "def segment_paths(root):\n directories = []\n history = history_path(root)\n for d in os.listdir(history):\n path = os.path.join(history, d)\n if os.path.isdir(path):\n directories.append(path)\n return sorted(directories)", "def get_data_files(dirname):\r\n flist = []\r\n for dirpath, _dirnames, filenames in os.walk(dirname):\r\n for fname in filenames:\r\n flist.append(osp.join(dirpath, fname))\r\n return flist", "def get_all_disks():\n return DISKS_API.get(abs_link=False)", "def list_directory(self, path):\n dirent = self.lookup(path)\n if dirent and dirent.is_directory():\n best_fit = self.retrieve_catalog_for_path(path)\n return best_fit.list_directory(path)", "def get_sub_dir_dates(main_folder):\n # remove leading \"hdfs://nameservice1\"\n if main_folder.startswith('hdfs://nameservice1'):\n main_folder = main_folder[19:]\n ls_com = 'hdfs dfs -ls ' + main_folder\n dates =set()\n # Filter collect folders to delete\n d = '=([0-9]{4}-[0-9]{2}-[0-9]{2})'\n for x in os.popen(ls_com):\n x = x.strip()\n m = re.search(d,x)\n if bool(m):\n dt = m.group(1)\n dates.add(dt)\n return dates", "def get_final_dirs(self, root=\"\"):\n _updated = int(self.stats()[\"db_update\"])\n _hash = uhash(root)\n return self._get_final_dirs(_updated=_updated, _hash=_hash, root=root)", "def _list_datasets_from_dir(path: github_api.GithubPath) -> List[str]:\n if not path.exists():\n # Should be fault-tolerant in the future\n raise FileNotFoundError(f'Could not find datasets at {path}')\n return sorted([ds for ds in path.iterdir() if _is_dataset_path(ds)])", "def list_dirs(site_name, doctype=''):\n siteid = _get_site_id(site_name)\n if siteid is None:\n raise FileNotFoundError('no_site')\n\n ## probable inefficient approach\n ## for dir_id in dirids:\n ## docs = _get_docs_in_dir(dir_id)\n ## for docname in docs:\n ## docids = _get_doc_ids(dir_id, docname)\n ## if docids[1] is not None:\n ## diridlist.append(dir_id)\n querystring = 'select id, dirname from {} where site_id = %s;'\n result = execute_query(querystring.format(TABLES[2]), (siteid,))\n dirmap = {row['id']: row['dirname'] for row in result}\n dirids = [x for x in dirmap]\n if doctype in ('', 'src'):\n pass\n elif doctype == 'dest':\n querystring = 'select dir_id, target_docid from {} where dir_id = any(%s);'\n result = execute_query(querystring.format(TABLES[3]), (dirids,))\n dirids = set()\n for row in result:\n if row['target_docid'] is not None:\n dirids.add(row['dir_id'])\n else:\n raise RuntimeError('wrong doctype for list_dirs')\n dirlist = []\n for id in dirids:\n test = dirmap[id]\n if test != '/':\n dirlist.append(test)\n return dirlist # returns all dirs that have documents of the given type", "def get_dirs(self, path):\n ds = []\n try:\n for d in os.listdir(path):\n if os.path.isdir(os.path.join(path, d)):\n ds.append(d)\n except OSError:\n pass\n ds.sort()\n return ds", "def get_all_paths(dmt, directory_path=''):\n # Base case.\n if not dmt.children:\n return set()\n \n filesystem_items = set()\n for item in dmt.children.keys():\n filesystem_items.add(directory_path+item)\n # Also get the paths of subdirectory contents.\n if item[-1] == '/':\n subdir_name = item\n subdir_path = directory_path + subdir_name\n \n filesystem_items.add(subdir_path)\n filesystem_items.update(get_all_paths(dmt.children[subdir_name], subdir_path))\n \n return filesystem_items", "def fetch_data(data_dir):\n check_dir(data_dir)\n\n data = list()\n for data_file in os.listdir(data_dir):\n data_file_path = normalize_path([data_dir, data_file])\n data.append(read_data_file(data_file_path))\n\n data = pd.concat(data, sort=True)\n data.sort_values(by=\"datetime\", inplace=True)\n return data.reset_index(drop=True)", "def get_all_datasets_conf_ds():\n listOfDatasetDSConfig = []\n sqlObj = _DS_config_DS_SQL()\n results = sqlObj.select_all_DDI_DB()\n for element in results:\n listOfDatasetDSConfig.append(Dataset_conf_ds(element[0], element[1], element[2], element[3]))\n return listOfDatasetDSConfig" ]
[ "0.6467605", "0.6281241", "0.6223496", "0.6196441", "0.6150646", "0.6143475", "0.60883945", "0.602333", "0.6016977", "0.5946605", "0.59250385", "0.58954626", "0.58720845", "0.5869424", "0.58076346", "0.5761363", "0.57583165", "0.57532775", "0.57412195", "0.57371", "0.5716051", "0.57081", "0.5687848", "0.5683538", "0.5669276", "0.5613576", "0.5593032", "0.55924076", "0.55727917", "0.55714756" ]
0.69657594
0
Creates a list of Document tuples from all the lines of a file.
def create_document_list(lines_of_file): document = [] documents = [] for line in lines_of_file: document.append(line.rstrip()) # Either a newline of the last line if line == '\n' or line == lines_of_file[-1]: documents.append(create_document(document)) document = [] return documents
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def read_file(filename):\n\n all_documents = []\n document = []\n with tf.gfile.GFile(filename, \"r\") as reader:\n for line in reader:\n line = line.strip()\n line = tokenization.convert_to_unicode(line)\n line = line.replace(u\"\\u2018\", \"'\").replace(u\"\\u2019\", \"'\")\n sents = split_line_by_sentences(line)\n for sent_line in sents:\n if not sent_line or len(sent_line) < 4: # Arbitrary min length for line\n continue\n if sent_line.lower()[:7] == \"chapter\":\n if document:\n all_documents.append(document)\n document = []\n else:\n document.append(sent_line)\n if len(document) == FLAGS.max_para_length:\n all_documents.append(document)\n document = []\n if document:\n all_documents.append(document)\n\n # Remove small documents\n all_documents = [x for x in all_documents if len(x) >= 8]\n\n return all_documents", "def get_lines_from_source(self):\n extension = self.get_doc_file_extension()\n if extension in ('txt', ''):\n return tuple(line.decode('utf-8') for line in self.doc_file.readlines())\n elif extension == 'docx':\n docx_document = Docx(BytesIO(self.doc_file.read()))\n return tuple(paragrah.text for paragrah in docx_document.paragraphs)\n elif extension == 'pdf':\n raise NotImplementedError()\n else:\n raise ValueError(\"file_format not supported\")", "def read_docs(file_path, tokenizer):\n # working structure used to store each document\n all_docs = []\n doc, end_of_doc = [], False\n\n line_cnt = 0\n tf.logging.info(\"Start processing %s\", file_path)\n for line in tf.io.gfile.GFile(file_path):\n if line_cnt % 100000 == 0:\n tf.logging.info(\"Loading line %d\", line_cnt)\n\n if not line.strip():\n # encounter an empty line (end of a document)\n end_of_doc = True\n cur_sent = []\n else:\n cur_sent = tokenizer.convert_text_to_ids(line.strip())\n\n if cur_sent:\n line_cnt += 1\n doc.append(np.array(cur_sent))\n\n # form a doc\n if end_of_doc or sum(map(len, doc)) >= FLAGS.max_doc_len:\n # only retain docs longer than `min_doc_len`\n doc_len = sum(map(len, doc))\n if doc_len >= max(FLAGS.min_doc_len, 1):\n all_docs.append(doc)\n\n # refresh working structs\n doc, end_of_doc = [], False\n\n # deal with the leafover if any\n if doc:\n # only retain docs longer than `min_doc_len`\n doc_len = sum(map(len, doc))\n if doc_len >= max(FLAGS.min_doc_len, 1):\n all_docs.append(doc)\n\n tf.logging.info(\"Finish %s with %d docs from %d lines.\", file_path,\n len(all_docs), line_cnt)\n\n return all_docs", "def parse_document(file):\n lines = file.read_text(encoding='utf-8').split('\\n')\n # If the \"#\" character is present, it means the line contains the\n # document original link. So, if the # is not present,\n # we have a normal paragraph to append to the list.\n return [line for line in lines if line != '' and '#' not in line]", "def read_file(filename):\n\n all_documents = []\n document = []\n with tf.gfile.GFile(filename, \"r\") as reader:\n for line in reader:\n line = line.strip()\n if not line:\n continue\n if line.lower()[:7] == \"chapter\":\n if document:\n all_documents.append(document)\n document = []\n else:\n document.append(line)\n if document:\n all_documents.append(document)\n\n return all_documents", "def read_lines_from_file(fname):\n return []", "def get_file_lines(filename):\n\n with open(filename, \"r\") as lines:\n lines = lines.readlines() # Saves list of each poem line in lines\n\n for _ in range(len(lines)):\n lines[_] = lines[_].rstrip() # Removes newline char from right-side end of each poem line\n\n return lines", "def listfromfilelines(file):\r\n with open(file, 'r') as f:\r\n list = [line.strip().decode('utf-8') for line in f]\r\n return list", "def read_file(filename) -> List[Todo]:\n with pathlib.Path(filename).expanduser().open('r') as fp:\n return [Todo(_id, line) for _id, line in enumerate(fp)]", "def read_data(cls, input_file):\n with tf.gfile.Open(input_file, \"r\") as f:\n lines = []\n for line in f:\n line = line.strip()\n if line.startswith('-DOCSTART-'):\n continue\n else:\n word_labels = line.split('-seq-')\n assert len(word_labels) == 2\n\n words = word_labels[0]\n labels = word_labels[1]\n lines.append([words, labels])\n\n return lines", "def _read(self, file_path: str) -> Iterator[Instance]:\n with open(file_path) as f:\n for line in f:\n pairs = line.split()\n words, tags = zip(*(pair.split(\"###\") for pair in pairs))\n yield self.text_to_instance([Token(word) for word in words], tags)", "def _fileLinesToList(filename) :\n o = []\n with open(filename, \"r\") as fi :\n for l in fi :\n if l.strip() != \"\" :\n o.append(l.strip())\n return o", "def file_to_list(path):\n fd = open(path)\n t = list()\n for line in fd:\n t += process_line(line)\n\n return t", "def get_doc(filename :str) -> List[List[str]]:\n\tdata = []\n\ttry:\n\t\twith open(filename, 'r', encoding='utf-8') as f:\n\t\t\tcontent = f.read()\n\t\t\t# print(content)\n\t\t\tpattern = re.compile(r\"<doc.*?>(.*?)</doc>\",re.S)\n\t\t\ttexts = re.findall(pattern, content)\n\t\t\t# print(data)\n\n\t\t\tfor text in texts:\n\t\t\t\t# print(text)\n\t\t\t\ttemp = process_doc(text)\n\t\t\t\tdata.extend(temp)\n\t\t\t\t# print(len(temp))\n\n\t\t\treturn data\n\n\texcept IOError as e:\n\t\tprint(\"the file {} cannot open\".format(filename))\n\t\tprint(e)\n\t\traise IOError", "def make_documents(f, index: str) -> typing.Iterator[dict]:\n\n while True:\n line = f.readline()\n if not line:\n break\n idx = int(line.strip())\n line = f.readline()\n doc = {\n '_index': index,\n '_type': \"_doc\",\n '_source': line.strip(),\n '_id': idx,\n }\n yield doc", "def _lines(filename):\n \n handle = gzip.open(filename, 'rt') if _gz(filename) else open(filename)\n for line in handle:\n if not line.startswith('#'):\n yield line.strip().split('\\t')", "def read(self, content: str):\n documents = []\n # 1. Split the text in documents using string '-DOCSTART- -X- O O' and loop over it\n content = content.split('-DOCSTART- -X- O O')\n for doc in content:\n if doc != '':\n words = []\n sentences = []\n labels = []\n start = 0\n # 2. Split lines and loop over\n str_sentences = doc.split('\\n\\n')\n # 3. Make vectors of tokens and labels (colunn 4) and at the '\\n\\n' make a sentence\n for sentence in str_sentences:\n if sentence != '':\n tokens = sentence.split('\\n')\n for token in tokens:\n if ' ' in token :\n cols = token.split(' ')\n words.append(cols[0])\n labels.append(cols[1])\n sentences.append(Sentence(doc, start, start+len(tokens)))\n start += len(tokens)\n # 4. Create a Document object\n documents.append(Document.create_from_vectors(words, sentences, labels))\n\n return documents", "def get_lines_from_file(fname, context=None):\n content = []\n if context and context.ddboost:\n contents = get_lines_from_dd_file(fname, context.ddboost_storage_unit)\n return contents\n else:\n with open(fname) as fd:\n for line in fd:\n content.append(line.strip('\\n'))\n return content", "def parse_file(self, file_path) -> list:\n data = []\n with open(file_path, 'rb') as f:\n lines = pickle.load(f)\n for line in lines:\n input, output = line\n if input.strip() == \"\" or output.strip() == \"\":\n continue\n input_len = len(input.split())\n output_len = len(output.split())\n if input_len > 50 or output_len > 50:\n continue\n data_item = Text2TextDataItem(input_text=input, output_text=output, tokenizer=self.tokenizer,\n share_vocab=self.share_vocab)\n data.append(data_item)\n return data", "def load_lines(filename):\r\n lines = []\r\n f = open(filename)\r\n for line in f.readlines():\r\n line = line.strip()\r\n lines.append(line)\r\n return lines", "def parse(filename):\n with open(filename) as file:\n lines = [line.strip() for line in file]\n return lines", "def FileToList(FilePath):\r\n List = []\r\n with open(FilePath) as f:\r\n for line in f:\r\n List.append(line.rstrip())\r\n return List", "def read_file_lines(afile):\n with open(afile, 'r') as f:\n lines = f.read()\n return lines.splitlines()", "def from_lines(\n cls,\n lines: Iterable[str],\n encoding: str = DEFAULT_ENCODING,\n newline: str = DEFAULT_NEWLINE,\n mtime: str = \"\",\n ) -> \"TextDocument\":\n return cls(None, lines, encoding=encoding, newline=newline, mtime=mtime)", "def read_data(input_file):\n\n def process_line(labels, words):\n l = ' '.join([label for label in labels if len(label) > 0])\n w = ' '.join([word for word in words if len(word) > 0])\n lines.append((l, w))\n words = []\n labels = []\n return words, labels, lines\n\n rf = open(input_file, 'r')\n lines = [];\n words = [];\n labels = []\n for line in rf:\n word = line.strip().split(' ')[0]\n label = line.strip().split(' ')[-1]\n # here we dont do \"DOCSTART\" check\n\n if len(line.strip()) == 0: # and words[-1] == '.'\n words, labels, lines = process_line(labels, words)\n words.append(word)\n labels.append(label)\n rf.close()\n return lines", "def read_corpus(corpus_path):\n idx = 0\n data = []\n with open(corpus_path, encoding='utf-8') as fr:\n lines = fr.readlines()\n sent_, tag_, pos_, ner_ = [], [], [], []\n\n for line in lines:\n idx += 1\n if line.find(\"DOC-ID\") < 0 and line != '\\n':\n try:\n [char, label, pos_tag, ner_tag] = line.strip().split()\n sent_.append(char)\n tag_.append(label)\n pos_.append(pos_tag)\n ner_.append(ner_tag)\n except:\n print(line)\n else:\n # print(line)\n if idx > 1:\n data.append((sent_, tag_, pos_, ner_))\n sent_, tag_, pos_, ner_ = [], [], [], []\n\n return data", "def split_docx_file(filename: str) -> List[str]:\n if not filename.endswith('.docx'):\n raise ValueError('File extension must be .docx')\n\n file_text: str = docx2txt.process(filename).replace('\\xa0', '')\n return drop_empty_lines(file_text.split('\\n'))", "def read_sentences(f):\n with open(f, 'r') as conll_file:\n s = [ROOT]\n for line in conll_file:\n if line.strip() and not line.startswith('#'):\n s.append(read_token(line))\n elif len(s) != 1:\n yield s\n s = [ROOT]\n if len(s) != 1: # file ended without a new line at the end\n yield s", "def readlines(self):\n return [line for line in self]", "def read_file_into_list(filename):\n with open(filename) as file:\n return file.readlines()" ]
[ "0.67407095", "0.6698694", "0.6641464", "0.6508077", "0.6484047", "0.6472933", "0.6400455", "0.63404316", "0.6309761", "0.62904716", "0.6286486", "0.62785584", "0.6264917", "0.6254483", "0.6235207", "0.6192921", "0.6180794", "0.6166221", "0.6134856", "0.6134638", "0.613246", "0.6081402", "0.607291", "0.6051576", "0.6035715", "0.6003766", "0.59832764", "0.5966286", "0.5964349", "0.59620506" ]
0.765414
0
Annotates and prints the sentences in CQP format
def print_cwb(document, tag='<s>'): doc = NLP(document) for sentence in doc.sents: print(tag) sent = NLP(sentence.text) for token in sent: print('{word}\t{pos}\t{lemma}'.format( word=token.text, pos=token.pos_, lemma=token.lemma_)) print(tag.replace('<', '</'))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def main(args):\n corpus = read_corpus(args, verbose=True)\n for k in sorted(corpus, key=educe.stac.id_to_path):\n doc = corpus[k]\n print(\"========== %s ============\" % k)\n print()\n if args.edges:\n dialogues = sorted_first_widest(filter(is_dialogue, doc.units))\n if dialogues:\n d_first = dialogues[0]\n print(annotate_doc(doc, span=d_first.text_span()))\n if len(dialogues) > 1:\n d_last = dialogues[-1]\n txt = annotate_doc(doc, span=d_last.text_span())\n print(\"...\\n\")\n print(txt.encode('utf-8'))\n else:\n print(annotate_doc(doc).encode('utf-8'))\n print()", "def printSentenceContent(self):\n i = 0\n while i < len(self.sentence):\n print(self.sentence[i].getWord() + \" \")\n i += 1\n print(\"\\n\")", "def print_sentence_voice(self, final_subj, final_obj, verb, v_aux, v_tense, subj_tag, subj_word, final_mod2=None, final_root=None):\n new_verb = ''\n s_sentence1 = s_sentence2 = ''\n\n new_verb = gl.verb_conjugate(verb, v_aux, v_tense) + \" \"\n\n if new_verb.strip() == \"\":\n new_verb = gl.verb_conjugate(verb, v_aux, \"VMIS3S0\") + \" \"\n\n\n for k in sorted(final_subj.keys()):\n s_sentence1 += final_subj[k] + \" \"\n\n for k in sorted(final_obj.keys()):\n s_sentence2 += final_obj[k] + \" \"\n\n if final_mod2 != None:\n for k in sorted(final_mod2.keys()):\n s_sentence2 += final_mod2[k] + \" \"\n if final_root != None:\n for k in sorted(final_root.keys()):\n s_sentence2 += final_root[k] + \" \"\n\n\n #removing errors in punctuation\n s_sentence1 = s_sentence1.replace(\", .\", \".\").replace(\"; .\", \".\").replace(\": .\", \".\").replace(\", ?\", \"?\").replace(\"; ?\", \"?\").replace(\": ?\", \"?\").replace(\", !\", \"!\").replace(\"; !\", \"!\").replace(\": !\", \"!\").replace(\". .\", \".\")\n s_sentence2 = s_sentence2.replace(\", .\", \".\").replace(\"; .\", \".\").replace(\": .\", \".\").replace(\", ?\", \"?\").replace(\"; ?\", \"?\").replace(\": ?\", \"?\").replace(\", !\", \"!\").replace(\"; !\", \"!\").replace(\": !\", \"!\").replace(\". .\", \".\")\n\n\n return self.runTrueCaser(s_sentence1 + new_verb + s_sentence2)", "def print_output(data,alignments,file):\n print(\"######################################################################\")\n print(\"Task 1 : IBM model 1 and EM algorithm implementation ,with corpus @\",file)\n print(\"######################################################################\")\n\n for i in range(len(data)):\n print(\"English Sentence : \",data[i][\"en\"])\n print(\"Foreign Sentence : \",data[i][\"fr\"])\n print(\"Alignment : \",alignments[i])\n print(\"----------------------------------------------------------------------\")", "def display_cli(conversations, alt_speaker, human_speaker):\n for speaker, speech in conversations:\n if speaker == END_OF_CONVO:\n print(\"-\" * 20 + \"END OF CONVERSATION\" + \"-\" * 20)\n elif speaker == alt_speaker:\n print(\"%-15s: %s\" % (speaker[:15], speech))\n else:\n prBlueBG(\"%-15s: %s\" % (speaker[:15], speech))", "def DumpDetails(self, sentences, label=\"N.A.\"):\n AdjR = 0.0\n adjAll = []\n for sentence in sentences:\n # if sentence[\"Text\"].startswith(\"Joanie is not helpful\"):\n # x = 1\n adjectives, dependencies = self.ExtractSentDetails(sentence)\n adjAll.extend(adjectives)\n allAdjectives = adjectives | Angel.GlobalAdjList\n AdjS = 0.0\n words = wordpunct_tokenize(sentence[\"Text\"])\n if len(words) <= 3:\n allAdjectives |= set([x.lower() for x in words])\n for i in range(len(words)):\n word = words[i].lower()\n if word in {\"but\", \"if\"}:\n AdjS = 0.0\n print words[i],\n elif word in allAdjectives and word in self.lexicon:\n multiplier = self.PredictMultiplier(word, dependencies[word], words, i)\n score = float(self.lexicon[word]) * multiplier\n if multiplier < 1:\n colortext = colored(words[i] + \" (\" + '{:.3}'.format(score) + \")\", 'red',None,['underline'])\n elif multiplier > 1:\n colortext = colored(words[i] + \" (\" + '{:.3}'.format(score) + \")\", 'red',None,['bold'])\n else:\n colortext = colored(words[i] + \" (\" + '{:.3}'.format(score) + \")\", 'red')\n AdjS += score\n print colortext,\n else:\n print words[i],\n print\n colortext = colored(\"Adjectives: \" + '{:.3}'.format(AdjS),'red')\n print colortext\n AdjR += AdjS\n print\n print \"Label:\", label\n base = self.PredictBase(adjAll)\n colortext = colored(\"Adjectives: \" + str(AdjR) + \"*\" + str(base) + \" = \" + str(AdjR*base),'red')\n print colortext", "def print_part_of_speech(part_of_speech):\n\n print(part_of_speech)", "def print(self):\r\n self.print_avec_separateur()", "def print_chars(self):\n for v in voc.split('\\n'):\n pair = v.split(',')\n print(pair[0], pair[1], '\\t', self.epi.xsampa_list(pair[0]))", "def generate_new_book(text):\n\n for paragraph in text:\n for sentence in paragraph:\n for word in sentence:\n print(word, end=' ')\n print()\n print()", "def getannotationstrings(cann):\n cdesc = ''\n if cann['description']:\n cdesc += cann['description'] + ' ('\n if cann['annotationtype'] == 'diffexp':\n chigh = []\n clow = []\n call = []\n for cdet in cann['details']:\n if cdet[0] == 'all':\n call.append(cdet[1])\n continue\n if cdet[0] == 'low':\n clow.append(cdet[1])\n continue\n if cdet[0] == 'high':\n chigh.append(cdet[1])\n continue\n cdesc += ' high in '\n for cval in chigh:\n cdesc += cval + ' '\n cdesc += ' compared to '\n for cval in clow:\n cdesc += cval + ' '\n cdesc += ' in '\n for cval in call:\n cdesc += cval + ' '\n elif cann['annotationtype'] == 'isa':\n cdesc += ' is a '\n for cdet in cann['details']:\n cdesc += 'cdet,'\n elif cann['annotationtype'] == 'contamination':\n cdesc += 'contamination'\n else:\n cdesc += cann['annotationtype'] + ' '\n for cdet in cann['details']:\n cdesc = cdesc + ' ' + cdet[1] + ','\n return cdesc", "def get_sentence(self):", "def anlText(self, inputFile):\n strBuf = \"\"\n splitter = re.compile(self._stcSeps)\n for rawLine in inputFile:\n line = rawLine.replace(\"\\n\", \"\")\n if (not splitter.search(line)): # Don't have a full sentence yet\n strBuf += \" \" + line\n else: # Found a sentence end. Get and process the full sentence.\n tempText = strBuf + line\n while splitter.search(tempText):\n stcList = splitter.split(tempText, 1)\n self.anlSentence(stcList[0])\n tempText = stcList[1] # Store what's left for the next\n strBuf = tempText\n if len(strBuf): # Process whatever is left at the end.\n self.anlSentence(strBuf)", "def phrase_output(self, phrase):\n Phrase(self.selected_phrase, self.player_guess, False, False)\n print('{}'.format(''.join(self.consol_output)))", "def test_get_sentence_sentiments():\n long_comment = [\"This was a really sucky movie. I will probably never go see this movie ever again. I am going to \"\n \"tell my whole family never to watch this movie. I very much enjoyed the special cameo in it \"\n \"though. I loved the plot line.\"]\n\n sentence_score_list = get_sentence_sentiments(long_comment[0])\n print(long_comment[0])\n print('per sentence sentiment:', sentence_score_list)\n print()", "def display_text(index_to_token, gt, pr):\n index_to_token[0] = '|' # remove actual line breaks\n\n display_len = 3 * time_steps\n\n # sample 3 sentences and their start and end time steps\n (s1_s, s1_e) = (0, time_steps)\n (s2_s, s2_e) = (time_steps, 2*time_steps)\n (s3_s, s3_e) = (2*time_steps, 3*time_steps)\n\n gt_string = \"\".join([index_to_token[gt[k]] for k in range(display_len)])\n pr_string = \"\".join([index_to_token[pr[k]] for k in range(display_len)])\n\n match = np.where([gt_string[k] == pr_string[k] for k in range(display_len)])\n\n di_string = \"\".join([gt_string[k] if k in match[0] else '.'\n for k in range(display_len)])\n\n neon_logger.display('GT: [' + gt_string[s1_s:s1_e] + '] '\n '[' + gt_string[s2_s:s2_e] + '] '\n '[' + gt_string[s3_s:s3_e] + '] ')\n\n neon_logger.display('Pred: [' + pr_string[s1_s:s1_e] + '] '\n '[' + pr_string[s2_s:s2_e] + '] '\n '[' + pr_string[s3_s:s3_e] + '] ')\n\n neon_logger.display('Difference indicated by .')\n neon_logger.display('Diff: [' + di_string[s1_s:s1_e] + '] '\n '[' + di_string[s2_s:s2_e] + '] '\n '[' + di_string[s3_s:s3_e] + '] ')", "def print_sentence_appos(self, final_root, final_appos, final_subj, v_tense, n_num, subj_word):\n s_sentence = ''\n s_sentence2 = ''\n for k in sorted(final_root.keys()):\n s_sentence += final_root[k] + \" \"\n\n for k in sorted(final_subj.keys()):\n s_sentence2 += final_subj[k] + \" \"\n\n\n if \"3S\" in v_tense:\n s_sentence2 += \"é \"\n \n elif \"3P\" in v_tense:\n s_sentence2 += \"son \"\n\n\n for k in sorted(final_appos.keys()):\n s_sentence2 += final_appos[k] + \" \"\n\n #including final punctuation\n if final_root[sorted(final_root.keys())[-1]] not in (\".\", \"?\", \"!\"):\n s_sentence+= \". \"\n\n if final_subj[sorted(final_subj.keys())[-1]] not in (\".\", \"?\", \"!\"):\n s_sentence2+= \". \"\n \n #return self.runTrueCaser(s_sentence), self.runTrueCaser(s_sentence2)\n return self.runTrueCaser(s_sentence), self.runTrueCaser(s_sentence2)", "def description_ques(analysis):\n if analysis.sv[0].vrb_tense.startswith('present'):\n analysis.sv[0].vrb_tense = 'present progressive'\n if analysis.sv[0].vrb_tense.startswith('past'):\n analysis.sv[0].vrb_tense = 'present progressive'\n sentence = y_o_question(analysis)\n for i in sentence:\n if i == 'liking':\n sentence[sentence.index(i)] = 'like'\n return ['what'] + sentence", "def annotate(m, ss_seq): # -> None:\n ...", "def answer_question5():\n\n return inspect.cleandoc(\"\"\"\\\n The pre-trained POS tagger can be used to tag words that are not covered\n by the hand-crafted lexicon. In this way, any input sentence will be fully\n tagged and thus the hand-crafted grammar can parse it (e.g. Suppose originally\n we have a simple sentence with tags 'NOUN', 'unknown', 'NOUN', the POS tagger\n can fill the gap by replacing 'unknown' with 'VERB'). This approach is\n expected to be do better because the original parser cannot parse sentences\n with missing tags.\"\"\")[0:500]", "def test_display():\n builder = DocumentBuilder(\"test\")\n t0 = Token(\"foo\", 0)\n t1 = Token(\"bar\", 1)\n t2 = Token(\"baz\", 2)\n s1 = builder.create_sentence([t0, t1, t2])\n m0 = Mention.create(s1, [t0], NAME, MISC)\n m1 = Mention.create(s1, [t0, t1], NAME, ORG)\n m2 = Mention.create(s1, [t1, t2], NAME, MISC)\n m3 = Mention.create(s1, [t0, t1, t2], NAME, MISC)\n builder.add_mentions([m0, m1, m2, m3])\n system_doc = builder.build()\n\n builder = DocumentBuilder(\"test\")\n t0 = Token(\"foo\", 0)\n t1 = Token(\"bar\", 1)\n t2 = Token(\"baz\", 2)\n s1 = builder.create_sentence([t0, t1, t2])\n m0 = Mention.create(s1, [t0], NAME, MISC)\n m1 = Mention.create(s1, [t0, t1], NAME, ORG)\n m2 = Mention.create(s1, [t1, t2], NAME, MISC)\n m3 = Mention.create(s1, [t0, t1, t2], NAME, MISC)\n builder.add_mentions([m0, m1, m2, m3])\n gold_doc = builder.build()\n\n res = score_prf([gold_doc], [system_doc])\n res.print()", "def getannotationstrings2(cann):\n cdesc = ''\n if cann['description']:\n cdesc += cann['description'] + ' ('\n if cann['annotationtype'] == 'diffexp':\n chigh = []\n clow = []\n call = []\n for cdet in cann['details']:\n if cdet[0] == 'all':\n call.append(cdet[1])\n continue\n if cdet[0] == 'low':\n clow.append(cdet[1])\n continue\n if cdet[0] == 'high':\n chigh.append(cdet[1])\n continue\n cdesc += ' high in '\n for cval in chigh:\n cdesc += cval + ' '\n cdesc += ' compared to '\n for cval in clow:\n cdesc += cval + ' '\n cdesc += ' in '\n for cval in call:\n cdesc += cval + ' '\n elif cann['annotationtype'] == 'isa':\n cdesc += ' is a '\n for cdet in cann['details']:\n cdesc += 'cdet,'\n elif cann['annotationtype'] == 'contamination':\n cdesc += 'contamination'\n else:\n cdesc += cann['annotationtype'] + ' '\n for cdet in cann['details']:\n cdesc = cdesc + ' ' + cdet[1] + ','\n\n if len(cdesc) >= 1 and cdesc[-1] == ',':\n cdesc = cdesc[:-1]\n return cdesc", "def Synopsis(self, line):\n nest = 0 # [...] nesting level.\n no_split = 0 # buf[no_split:i] should not be split across lines.\n # String append on buf used below because of no_split lookbehind.\n buf = ' ' * self._indent[0]\n n = len(buf) + 1\n i = 0\n while i < len(line):\n c = line[i]\n if c == self._csi_char:\n control_len = self._attr.GetControlSequenceLen(line[i:])\n if control_len:\n j = i\n i += control_len\n buf += line[j:i]\n continue\n if c == '[':\n # [...] nesting.\n nest += 1\n if nest == 1:\n # A new [...] group - don't split until the end of the group.\n no_split = len(buf)\n elif c in [']', ' ']:\n if c == ']':\n nest -= 1\n if not nest:\n # Outside [...]. OK to split at this point if needed.\n if n >= self._width:\n # Split the line up to no_split, eliminate trailing space and write\n # the line up to no_split.\n n = no_split\n while n > 0 and buf[n - 1] == ' ':\n n -= 1\n self._out.write(buf[:n] + '\\n')\n # Reset indentation for the next line which will start at no_split.\n buf = ' ' * self._indent[0] * 2 + buf[no_split:]\n n = len(buf) + 1\n elif c == ' ':\n # Space outside [...]. Set a new split point.\n no_split = len(buf)\n if c == ' ' and buf and buf[-1] == ' ':\n # Collapse adjacent spaces to one space.\n i += 1\n continue\n buf += c\n n += 1\n i += 1\n self._out.write(buf + '\\n\\n')", "def print_sentence(self, final1, final2, root_tag=None, mark=None, mark_pos=None, modal=None): \n s_sentence = ''\n s_sentence2 = ''\n \n ## control the markers that should be added into the simplified sentences\n if mark in self.addition:\n s_sentence2 += 'And '\n if mark in self.condition:\n s_sentence += 'Suppose '\n if final2[sorted(final2.keys())[0]].lower() != 'then' and 'then' not in final2.values():\n s_sentence2 += 'Then '\n elif mark in self.concession:\n s_sentence2 += 'But '\n elif mark in self.time:\n if mark_pos > 1:\n if root_tag == 'VBP' or root_tag == 'VBZ' or root_tag == 'VB':\n s_sentence2 += 'This is ' + mark + \" \"\n else:\n s_sentence2 += 'This was ' + mark + \" \"\n else:\n\n if root_tag == 'VBP' or root_tag == 'VBZ':\n s_sentence2 += 'This happens ' + mark + \" \"\n elif root_tag == 'VB' and modal != None: \n s_sentence2 += 'This ' + modal + ' happen ' + mark + \" \"\n else:\n s_sentence2 += 'This happened ' + mark + \" \"\n\n elif mark in self.justify:\n s_sentence2 += 'So '\n elif mark in self.condition2:\n s_sentence2 += 'Alternatively '\n \n c = 0\n\n ## build first sentence\n for k in sorted(final1.keys()):\n if c == 0 and final1[k] in (\".\", \",\", \"?\", \":\", \";\", \"!\"):\n c+=1\n else:\n s_sentence += final1[k] + \" \"\n c+=1\n\n c = 0\n ## build second sentence\n for k in sorted(final2.keys()):\n if c ==0 and final2[k] in (\".\", \",\", \"?\", \":\", \";\", \"!\"):\n c+=1\n else:\n s_sentence2 += final2[k] + \" \"\n c+=1\n \n s_sentence = s_sentence.replace(\"either \", \"\")\n s_sentence2 = s_sentence2.replace(\"either \", \"\")\n s_sentence = s_sentence.replace(\"Either \", \"\")\n s_sentence2 = s_sentence2.replace(\"Either \", \"\")\n\n\n s_sentence = s_sentence[0].capitalize() + s_sentence[1:]\n s_sentence2 = s_sentence2[0].capitalize() + s_sentence2[1:]\n \n #including final punctuation\n if final1[sorted(final1.keys())[-1]] not in (\".\", \"?\", \"!\"):\n s_sentence+= \". \"\n\n if final2[sorted(final2.keys())[-1]] not in (\".\", \"?\", \"!\"):\n s_sentence2+= \". \"\n\n \n #removing errors in punctuation\n s_sentence = s_sentence.replace(\", .\", \".\").replace(\"; .\", \".\").replace(\": .\", \".\").replace(\", ?\", \"?\").replace(\"; ?\", \"?\").replace(\": ?\", \"?\").replace(\", !\", \"!\").replace(\"; !\", \"!\").replace(\": !\", \"!\")\n s_sentence2 = s_sentence2.replace(\", .\", \".\").replace(\"; .\", \".\").replace(\": .\", \".\").replace(\", ?\", \"?\").replace(\"; ?\", \"?\").replace(\": ?\", \"?\").replace(\", !\", \"!\").replace(\"; !\", \"!\").replace(\": !\", \"!\")\n \n return self.runTrueCaser(s_sentence), self.runTrueCaser(s_sentence2)", "def print_translations(sentences: List[Tuple[List[int], List[int]]], model: Seq2SeqAttentionModel,\n source_vocab: Dict[int, str], target_vocab: Dict[int, str], beam_width):\n total_log_lhood = 0\n for (source_sentence, target_sentence) in sentences:\n if beam_width > 0:\n translation, log_lhood = translate_beam_search(source_sentence, model, beam_width)\n total_log_lhood += log_lhood\n else:\n translation, _ = translate_greedy_search(source_sentence, model)\n\n print(\"source sentence:\" + \" \".join([source_vocab[word] for word in source_sentence]))\n print(\"target sentence:\" + \" \".join([target_vocab[word] for word in target_sentence]))\n print(\"translation:\\t\" + \" \".join([target_vocab[word] for word in translation]))\n print(\"\")\n print(\"Avg log Likelihood = \", total_log_lhood/len(sentences))", "def possession_ques(analysis):\n\n #processing as statement\n phrase = statement(analysis)\n\n #We have to know if it is plural or singular\n if other_functions.plural_noun(analysis.sn) == 1:\n return ['whose'] + phrase[:len(phrase) - 1] + ['these'] + ['?']\n else:\n return ['whose'] + phrase[1:len(phrase) - 1] + ['this'] + ['?']", "def print_output_task3(ef_prob_dict,dataset):\n print(\"######################################################################\")\n print(\"Task 3 : Phrase Based translation(using NLTK) on dataset\",dataset)\n print(\"######################################################################\")\n display_phrasewise_list(ef_prob_dict)", "def print_config(C):\n print(\"Corpus Preprocessing would be done for these Configuations:\")\n if(C.STEMMING == True):\n print(\"Corpus tokens would be Stemmed\")\n else:\n print(\"NO STEMMING on corpus\")\n if(C.LEMMATIZATION == True):\n print(\"Corpus tokens would be Lemmatized\")\n else:\n print(\"NO LEMMATIZATION on corpus\")\n print(\"Term Frequency list would be stored in \", C.TF_LIST)\n print(\"Inverse Document Frequency would be stored in \", C.IDF_DICT)\n print(\"Inverse Mapping would be stored in \", C.INVERSE_MAPPING)\n print(\"Extracted Document title list would be stored in \", C.DOC_TITLE_LIST)\n print(\"\")", "def main():\n args = get_args()\n str_arg = args.arg\n int_arg = args.int\n flag_arg = args.flag\n #pos_arg = args.positional\n\n #read and open the annotations file\n intpro_dict = {}\n with open('InterPro_entry_list.tsv') as csvfile:\n reader = csv.DictReader(csvfile, delimiter='\\t')\n for row in reader:\n intpro_dict[row['ENTRY_AC']] = row['ENTRY_NAME']\n\n with open('model_topics.txt', 'r') as file:\n model_topics = file.read().replace('\\n', '')\n\n model_topics = re.sub(\"'\", \"\", model_topics)\n model_topics = re.sub(\"\\[\", \"\", model_topics)\n model_topics = re.sub(\"\\]\", \"\", model_topics)\n\n mtl = model_topics.split('), ')\n\n with open('output_topics.tsv' ,'w') as f:\n print('Topic\\tModel_coefficient\\tInterpro_ID\\tInterPro_ENTRY_NAME', file=f)\n for list in mtl:\n topic = list[1]\n split_list = list.split()\n id_re = re.compile('IPR\\d{3}')\n c_words = []\n for w in split_list:\n match = id_re.search(w)\n if match:\n c_words.append(w)\n c_words = [re.sub('\"', '', i) for i in c_words]\n for w in c_words:\n re.sub('\\)', '', w)\n coef, intpro = w.split('*')\n intpro = intpro[:9]\n\n if intpro in intpro_dict.keys():\n label = intpro_dict[intpro]\n else:\n label = ''\n\n print('{}\\t{}\\t{}\\t{}'.format(topic,coef,intpro,label), file=f)", "def process(self, doc):\n # don't try to process null notes\n if not doc[1]:\n if self.verbose:\n print(\"Error segmenting doc\",doc[0])\n return []\n # odd notes may throw an error. Just continue rather than stopping the entire process\n try:\n sentences = self.sentence_tokenizer.segToSentenceSpans(doc[1])\n except KeyError:\n if self.verbose:\n print(\"Error segmenting doc\",doc[0])\n return []\n\n #context_doc = pyConTextGraph.ConTextDocument() # ConTextDoc not needed for simple usage\n\n doc_annots = list()\n\n for sentence in sentences:\n # run sentence tokenizer on input text, return the spans\n sentence_text = doc[1][sentence.begin:sentence.end]\n # process every sentence by adding markup\n markup = pyConTextGraph.ConTextMarkup()\n markup.setRawText(sentence_text)\n markup.cleanText()\n # apply targets and modifiers\n markup.markItems(self.targets, mode=\"target\")\n markup.markItems(self.modifiers, mode=\"modifier\")\n # address scope of modifiers to targets, remove inactive modifiers and self-modifying relationships\n markup.pruneMarks()\n markup.applyModifiers()\n markup.pruneSelfModifyingRelationships()\n markup.dropInactiveModifiers()\n\n marked_targets = markup.getMarkedTargets()\n for marked_target in marked_targets:\n modifiers = markup.getModifiers(marked_target)\n if not modifiers:\n span = (sentence.begin+marked_target.getSpan()[0],sentence.begin+marked_target.getSpan()[1])\n if self.mode == 'combined':\n annot = (doc[0], marked_target.getPhrase(), span[0], span[1], marked_target.getCategory()[0]+'_unspecified', marked_target.getCode())\n elif self.mode == 'separate':\n annot = (doc[0], marked_target.getPhrase(), span[0], span[1], marked_target.getCategory()[0], 'unspecified', marked_target.getCode())\n if annot not in doc_annots:\n doc_annots.append(annot)\n else:\n for modifier in modifiers:\n if marked_target.getSpan()[0] < modifier.getSpan()[0]:\n span = (sentence.begin+marked_target.getSpan()[0],sentence.begin+modifier.getSpan()[1])\n else:\n span = (sentence.begin+modifier.getSpan()[0],sentence.begin+marked_target.getSpan()[1])\n if self.mode == 'combined':\n annot = (doc[0], doc[1][span[0]:span[1]], span[0], span[1], marked_target.getCategory()[0]+'_'+modifier.getCategory()[0], marked_target.getCode())\n elif self.mode == 'separate':\n annot = (doc[0], doc[1][span[0]:span[1]], span[0], span[1], marked_target.getCategory()[0], modifier.getCategory()[0], marked_target.getCode())\n if annot not in doc_annots:\n doc_annots.append(annot)\n\n #context_doc.addMarkup(markup)\n\n return doc_annots" ]
[ "0.6000715", "0.56178945", "0.55667776", "0.5481114", "0.54786646", "0.54709184", "0.54087317", "0.5388897", "0.53651917", "0.5351183", "0.53268975", "0.53230166", "0.53183377", "0.5314303", "0.53096277", "0.5309273", "0.5307908", "0.5294618", "0.52743316", "0.5270597", "0.5262986", "0.5258033", "0.5254137", "0.5252314", "0.52443135", "0.5233867", "0.5229551", "0.52281815", "0.5194382", "0.5189792" ]
0.5949153
1
Prints end of document. Just for symmetry.
def print_footer(): print('</text>')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def end_print(outfile: TextIO) -> None:\n outfile.write(\" </body>\\n\")\n outfile.write(\"</html>\\n\")", "def doc_end(fdoc):\n fdoc.write('\\\\end{document}\\n')\n fdoc.close()", "def endDocument(self):\n pass", "def endDocument(self):\n pass", "def latex_footer():\n print(\" \\end{tikzpicture}\")\n print(\" }\")\n print(\" \\end{frame}\")\n print(\"\\end{document}\")", "def depart_document(self, node):\n self.body.append(r\"\\end{frame}\")", "def close(self):\n sys.stdout.write('\\n')", "def print_page_end(my_file):\n\t\n\tlogger.debug(\"Outputting end of page...\")\n\tprint >> my_file, \"\"\"</TABLE>\"\"\"", "def display_end(self, nsep):\n\n if self.opt['Verbose'] and self.opt['StatusHeader']:\n print(\"-\" * nsep)", "def do_eof(self, line):\n print \"\"\n return True", "def end_page_division(outfile: TextIO) -> None:\n outfile.write(\" </div>\\n\")", "def close(self):\n if self.print_msg:\n print(' ')", "def do_EOF(self, _: str) -> None:\n print()\n exit(0)", "def writeFooter(self):\n pass", "def print_out():\n pass", "def footer(self):\n file = open(\"imdb_output.html\", \"a\")\n file.write(\"\\t\\t</table>\\n\\t</body>\\n</html>\\n\")", "def flush(self):\n\n # generate bitmaps of formulae\n if self.out.tell() > 0:\n self.out.write(\"\\\\end{document}\\n\")\n self.__LaTeX2Dvi2Gif()\n self.out.close()\n self.out = io.StringIO()\n #end if\n\n #reset counter\n self.counter = 1\n self.nodelist = []", "def end(self):\n while self.position < len(self.document.characters\n ) and self.document.characters[\n self.position].character != '\\n':\n self.position += 1", "def print_endinfo(text: str):\n templateName = \"{:s}\"\n print(bcolors.BOLD + templateName.format(text) + bcolors.ENDC, flush=True)", "def do_EOF(self, line):\n print(\"\")\n return True", "def WriteFooter(self):\n self.WriteText('}')", "def WriteFooter(self):\n return", "def close(self):\n self._document.endDocument()\n self._output.close()\n return", "def close(self):\n self._document.endDocument()\n self._output.close()\n return", "def _endCountPrinting(self, count):\n if not self._quiet:\n sys.stdout.write('\\n')\n sys.stdout.flush()\n logging.info(\"Generated {} tokens.\".format(count))", "def Finish(self):\n self._Flush()\n self.Font(out=self._out)", "def double_line():\n print (\"=============================================================\")", "def finish_displayhook(self):\n IPython.utils.io.Term.cout.write(self.output_sep2)\n IPython.utils.io.Term.cout.flush()", "def output(*args):\n print(*args, end='', file=file)", "def footer(self):\n # close the svg tag\n yield '</svg>'\n # if this is a stand-alone document\n if self.standalone:\n # render a blank line\n yield ''\n # and the document footer\n yield from super().footer()\n # all done\n return" ]
[ "0.73971003", "0.6935334", "0.6862355", "0.6862355", "0.6664187", "0.6625865", "0.6573202", "0.647194", "0.6466574", "0.6457852", "0.6382236", "0.6350358", "0.6328606", "0.63048065", "0.6239536", "0.62013793", "0.6198921", "0.6173183", "0.61691064", "0.6136601", "0.6112222", "0.6111886", "0.6109088", "0.6109088", "0.6103752", "0.6097949", "0.6083835", "0.6063756", "0.60541207", "0.6051723" ]
0.6966425
1
Return True if a windows system
def win(): if platform.system() in WINDOWS: return True return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_windows():\n if os.name == \"nt\":\n return True\n return False", "def os_is_windows():\n return platform.system() == \"Windows\"", "def is_windows():\n return os.name == \"nt\"", "def _on_windows() -> bool:\n return os.name == \"nt\"", "def is_windows() -> bool:\n return sys.platform == \"win32\"", "def is_windows() -> bool:\n\n return sys.platform == 'win32'", "def is_windows():\n return sys.platform == \"win32\"", "def is_windows():\r\n return sys.platform == \"win32\"", "def is_win():\n return sys.platform[:3] == \"win\"", "def on_windows ():\n if bjam.variable(\"NT\"):\n return True\n\n elif bjam.variable(\"UNIX\"):\n\n uname = bjam.variable(\"JAMUNAME\")\n if uname and uname[0].startswith(\"CYGWIN\"):\n return True\n\n return False", "def isWindows(cls):\n return WIN", "def is_system(self) -> bool:", "def test_system_platform():\n accepted_values = ['windows', 'linux']\n output = sh.system_platform()\n assert output in accepted_values", "def is_system(self):\n\t\treturn self.__is_system", "def is_system(self) -> undefined.UndefinedOr[bool]:", "def test_os_system(self):\n self.assertEqual(self.settings.OS_SYSTEM, platform.system())", "def _in_wsl():\n return \"microsoft-standard\" in uname().release", "def platform_supported(self):\n return platform.system().lower() in self.platforms if self.platforms else False", "def getWindowingSystem():\n global g_winSys\n \n if not g_winSys:\n tkWdg = _getTkWdg()\n try:\n g_winSys = tkWdg.tk.call(\"tk\", \"windowingsystem\")\n except tkinter.TclError:\n # windowingsystem not supported; take a best guess\n if RO.OS.PlatformName == \"win\":\n g_winSys = \"win32\"\n else:\n g_winSys = \"x11\"\n\n return g_winSys", "def is_64_windows(self):\n return 'PROGRAMFILES(X86)' in os.environ", "def check_windows_firewall():\n if \"ON\" in str(subprocess.check_output('netsh advfirewall '\n 'show all state')):\n return True\n else:\n return False", "def is_64_windows():\n return 'PROGRAMFILES(X86)' in os.environ", "def is_sys(self):\n if self.mountpoint is not None and self.mountpoint in ['/', '/boot']:\n return True\n return False", "def is_sys(self):\n if self.mountpoint is not None and self.mountpoint in ['/', '/boot']:\n return True\n return False", "def osname_is_linux():\n return (\"Linux\" == g_osname)", "def system():\n return uname().system", "def system():\n return uname().system", "def IsLinux():\n return os.name == 'posix' and os.uname()[0] == 'Linux'", "def check_os():\n if sys.platform == \"win32\":\n print(\"WARNING:\")\n print(\"This program use Scapy. Scapy is primarily being developed for Unix-like systems and works best on those platforms.\")\n print(\"You should to change your OS, because some Scapy functions may not be available.\")\n time.sleep(5)", "def is_linux():\n (sysname, nodename, release, version, machine) = os.uname()\n return sysname == 'Linux'" ]
[ "0.88025236", "0.8778063", "0.8678572", "0.8537766", "0.85206264", "0.84892493", "0.84858257", "0.8479225", "0.8096643", "0.7979381", "0.7928705", "0.7763643", "0.7125087", "0.7080894", "0.6978872", "0.6966665", "0.69211066", "0.68185973", "0.6789719", "0.6747324", "0.6657324", "0.66426164", "0.66369355", "0.66369355", "0.6615155", "0.66073006", "0.66073006", "0.6604707", "0.6584267", "0.6542691" ]
0.88894486
0
When was the position of the current playing media valid.
def media_position_updated_at(self) -> datetime | None: if self._device.movie.play_status in KALEIDESCAPE_PLAYING_STATES: return utcnow() return None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def media_position_updated_at(self):\n if not self._playing_liveinput and self._state == STATE_PLAYING:\n return self._position_updated_at\n else:\n return None", "def media_position_updated_at(self):\n return self._table.active_track_remaining_time_as_of", "def media_position(self):\n # The lovelace app loops media to prevent timing out, don't show that\n if self.app_id == CAST_APP_ID_HOMEASSISTANT_LOVELACE:\n return None\n media_status = self._media_status()[0]\n if media_status is None or not (\n media_status.player_is_playing\n or media_status.player_is_paused\n or media_status.player_is_idle\n ):\n return None\n return media_status.current_time", "def media_position(self):\n if self._media_playback_trackable():\n self._media_position_updated_at = utcnow()\n return self.coordinator.data.nowplaying[self.zone.SourceID].CurrProgress\n return None", "def media_position(self):\n return (\n self._table.active_track_total_time\n - self._table.active_track_remaining_time\n ).total_seconds()", "def media_position_updated_at(self):\n if self._media_playback_trackable():\n return self._media_position_updated_at\n\n return None", "def media_position_updated_at(self):\n if self.app_id == CAST_APP_ID_HOMEASSISTANT_LOVELACE:\n return None\n return self._media_status()[1]", "def media_position(self):\n if (self._playing_localfile or self._playing_spotify or self._slave_mode or self._playing_mediabrowser or self._playing_mass) and self._state != STATE_UNAVAILABLE:\n return self._playhead_position\n else:\n return None", "def media_position(self):\n return self._media_position", "def media_position_updated_at(self):\n return self._media_position_updated_at", "def media_seek_position(self):\n return self._state.get(\"seek\", None)", "def _media_playback_trackable(self) -> bool:\n if (\n self.coordinator.data.nowplaying[self.zone.SourceID].CurrSong.Duration\n is None\n ):\n return False\n\n return (\n self.coordinator.data.nowplaying[self.zone.SourceID].CurrSong.Duration > 0\n )", "def played(self):\n # type: () -> int\n return self._played", "def get_pos(self):\n if self.player is not None and self.state == \"play\":\n return self.player.get_position() * self._length\n return 0", "def current_position(self):\n\n # It is an error to call playlist_current_pos when there are\n # no entries in the playlist.\n r = self.x.playlist_current_pos()\n r.wait()\n if r.iserror():\n print r.get_error()\n return None\n else:\n return r.get_dict()['position']", "def get_position(self):\n if self.is_running():\n try:\n micros = self.__iface_props.Position()\n return True,micros\n except dbus.exceptions.DBusException:\n return False,-1 \n else:\n return False,-1", "def media_position(self) -> int | None:\n if self._device.movie.title_location:\n return self._device.movie.title_location\n return None", "def position(self, video_display_name=None):\r\n selector = self.get_element_selector(video_display_name, CSS_CLASS_NAMES['video_time'])\r\n current_seek_position = self.q(css=selector).text[0]\r\n return current_seek_position.split('/')[0].strip()", "def isInPlay(self):\n return self.inPlay", "def is_playing(self):\n raise NotImplementedError", "def isPlaying(self) :\n raise NotImplementedError(\"isPlaying not implemented\")", "def tell(self):\n return self._upload_position", "def _acting_player_position(self):\n return self._env.acting_player_position", "def check(self):\n #\n # *****************\n # *****************\n # TODO: Check really if video is valid\n # *****************\n # *****************\n return True", "def is_playing(self):\n return self.status == \"PLAYING\"", "def __handle(self):\n\n if not self.progressbar:\n return True\n\n valor1 = None\n valor2 = None\n pos = None\n duracion = None\n\n try:\n valor1, bool1 = self.query_duration(gst.FORMAT_TIME)\n valor2, bool2 = self.query_position(gst.FORMAT_TIME)\n\n except:\n print \"ERROR en HANDLER\"\n return True\n\n if valor1 != None:\n duracion = valor1 / 1000000000\n\n if valor2 != None:\n posicion = valor2 / 1000000000\n\n if duracion == 0 or duracion == None:\n return True\n\n pos = int(posicion * 100 / duracion)\n\n if pos < 0 or pos > self.duracion:\n return True\n\n if self.duracion != duracion:\n self.duracion = duracion\n\n if pos != self.posicion:\n self.posicion = pos\n self.emit(\"newposicion\", self.posicion)\n\n return True", "def present_position(self):\n return self._read(MX_PRESENT_POSITION)", "def isCompletedAt(self, location):\n return location is not None and location.isSurface()", "def media_track(self):\n return self.coordinator.data.nowplaying[self.zone.SourceID].QueueSongIndex", "def seeked(self):\n # type: () -> int\n return self._seeked" ]
[ "0.6990031", "0.6919123", "0.6813298", "0.6809467", "0.6739905", "0.6636911", "0.65889794", "0.6545176", "0.63453555", "0.6325207", "0.62560403", "0.6117559", "0.604486", "0.6029453", "0.5994394", "0.59789467", "0.5937526", "0.586864", "0.57936734", "0.57936233", "0.57310325", "0.57267344", "0.5723838", "0.571358", "0.56394094", "0.56203955", "0.5614417", "0.5596438", "0.55897", "0.55889374" ]
0.70499295
0
Dump one symbol table showing, for each symbol, the result of the informational methods is_global() etc., and when the symbol table is for a function scope, the (nonempty) tuples of parameters, locals, frees, and globals.
def show_symbol_table(st): print(st) # Dump the name lists get_*() if isinstance(st, symtable.Function): for nlist in _NAME_LISTS: names = getattr(st, "get_"+nlist)() if names: print(' {} : {!r}'.format(nlist, names)) # Dump the properties as short names is_global -> global, etc.. for s in st.get_symbols(): scope = to_scope_name(s._Symbol__scope) props = [scope] for p in _NAME_PROPS: if getattr(s, "is_"+p)(): props.append(p) print(' "{}" : {}'.format(s.get_name(), ', '.join(props)))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getSymbolTable(self) -> ghidra.app.util.bin.format.pe.debug.DebugCodeViewSymbolTable:\n ...", "def build_gdb_symbol_table():\n\n tab = Symtab()\n n = gdb.parse_and_eval (\"symtab->nodes\")\n while (long(n)):\n if symtab_node_is_function (n):\n current_symbol = GdbFunction(tab, n)\n tab.all_functions.append (current_symbol)\n elif symtab_node_is_variable (n):\n current_symbol = GdbVariable(tab, n)\n tab.all_variables.append (current_symbol)\n else:\n raise gdb.GdbError (\"Encountered an unknown symbol table node\");\n\n tab.order_to_sym[current_symbol.order] = current_symbol\n tab.all_symbols.append (current_symbol)\n\n n = n[\"next\"]\n pass\n\n tab.fixup()\n return tab", "def print_symbols():\n\n global program\n if program is None:\n print \"no program is loaded\"\n return\n for(s, a) in program.symbols():\n print \"0x{:x} : {}\".format(a, s)", "def _LoadGlobalSymbolsFromDump(dump_obj):\n symbols = set()\n for key in (\"elf_functions\", \"elf_objects\"):\n symbols.update(\n symbol.get(\"name\", \"\") for symbol in dump_obj.get(key, []) if\n symbol.get(\"binding\", \"global\") == \"global\")\n return symbols", "def create_symbol_table(root):\n\n set_depth(root, 0)\n #Initialize the stack, with the AST root\n stack = Stack(root)\n\n #the symbol table maps the name to the scope.\n #Any node can belong to multiple scopes, therefore this\n #is a list of scope\n symbol_table = STable()\n \n #this represents objects imported from\n #other modules\n other_modules = {}\n\n for node, children, ntype in stack:\n\n if ntype == \"Import\":\n #Import object has names prop which\n #is an array of names\n for name in node.names:\n #name can be the name or an alias \n name_val = name.asname or name.name\n #insert in symbol_table \n symbol_table[name_val] = ()\n\n elif ntype == \"ImportFrom\":\n if node.names[0].name == '*':\n try:\n imp_mod = importlib.import_module(node.module)\n #Add all names in imported module, except those\n #starting with '_'\n for name in dir(imp_mod):\n if name[0] != '_':\n symbol_table[name] = stack_top(scopes)\n\n except ImportError:\n print \"Error: local system does not have {}. Skipping!\".format(node.module)\n pass\n else:\n #TODO: store node.module\n for name in node.names:\n #TODO: store name.name even if name.asname defined \n name_val = name.asname or name.name\n symbol_table[name_val] = stack.get_scopes(src_module=node.module)\n\n elif ntype == \"ClassDef\" or ntype == \"FunctionDef\": \n symbol_table[node.name] = stack.get_scopes()\n \n #NOTE: if a name is being loaded then it already exists and doesn't need\n #to be added to symbol_table\n elif ntype == \"Name\" and not is_load(children) and not has_global(stack.scope_tail(), node.id): \n symbol_table[node.id] = stack.get_scopes()\n\n elif ntype == \"arguments\":\n if node.vararg: \n symbol_table[node.vararg] = stack.get_scopes()\n if node.kwarg:\n symbol_table[node.kwarg] = stack.get_scopes()\n\n elif ntype == \"Global\":\n #add a list global vars on node on the top of \n #the stack\n #nonlocal could be handled in similar way\n set_globals(scopes[-1], node.names)\n\n #set lineno property of children nodes\n set_lineno(node, children)\n\n for child in children[::-1]:\n #set depth of child\n set_depth(child, node.depth + 1)\n #Add children to stack\n stack.append(child)\n\n #Add any new scopes\n #Need to do it here since scoping_nodes are defined in their parent scope\n stack.check_and_push_scope()\n\n print \"Symbol table is \"\n print symbol_table\n return symbol_table", "def __str__(self):\n dictt = self.getFullDict()\n return \"SymbolTable(\\n{}\\n)\".format(pprint.pformat(dictt))", "def __init__(self, DEBUG=False):\n self.DEBUG = DEBUG\n\n self.classTable = {}\n self.subroutineTable = {}\n\n self.counts = {}\n self.counts[\"STATIC\"] = 0\n self.counts[\"FIELD\"] = 0\n self.counts[\"ARG\"] = 0\n self.counts[\"VAR\"] = 0\n\n if self.DEBUG:\n print(\"DEBUG(SymbolTable): INITIALIZED SYMBOL TABLES\")", "def DumpSymbols(lib_path, dump_path):\n elf_parser = ExternalModules.elf_parser\n parser = None\n try:\n parser = elf_parser.ElfParser(lib_path)\n symbols = parser.ListGlobalDynamicSymbols()\n finally:\n if parser:\n parser.Close()\n if not symbols:\n return \"No symbols\"\n symbols.sort()\n with open(dump_path, \"w\") as dump_file:\n dump_file.write(\"\\n\".join(symbols) + \"\\n\")\n return \"Output: \" + dump_path", "def symbol_table(self) -> str:\n return self._symbol_table", "def dump_proc_self_maps():\n return", "def get_symbols(self, type_name):\n return self._symtab[type_name].get_symbols()", "def list_symbol_tables(mst):\n stlist = []\n def append_st(st):\n #print(st)\n stlist.append(st)\n for s in st.get_symbols():\n for ns in s.get_namespaces():\n append_st(ns)\n if not isinstance(mst, symtable.SymbolTable):\n # Assume it is text of a program to compile\n mst = symtable.symtable(mst, '<string>', 'exec')\n append_st(mst)\n return stlist", "def dump(co):\n for attr in [\"name\", \"argcount\", \"posonlyargcount\",\n \"kwonlyargcount\", \"names\", \"varnames\",\n \"cellvars\", \"freevars\", \"nlocals\", \"flags\"]:\n print(\"%s: %s\" % (attr, getattr(co, \"co_\" + attr)))\n print(\"consts:\", tuple(consts(co.co_consts)))", "def symbols(self) -> Dict[str, Variable]:\n symbols = {}\n symbols.update(self.args)\n symbols.update(self.locals)\n return symbols", "def testSymbolHash(self):\n gScope = pykd.diaLoadPdb( str(target.module.pdb()) )\n symSet = set([ gScope[\"g_structTest\"], gScope[\"EnumWindowsProc1\"], gScope[\"g_structTest\"] ])\n self.assertEqual( 2, len(symSet) )\n self.assertTrue( gScope[\"g_structTest\"] in symSet )\n self.assertFalse( gScope[\"EnumWindowsProc2\"] in symSet )", "def symbols_details(self):\n pass", "def info(dump_alloc_table: bytes, /) -> None:", "def seperate_symbols(func):\n params = []\n vars = []\n for symbol in func.free_symbols:\n if not str(symbol).isidentifier():\n continue # E.g. Indexed objects might print to A[i, j]\n if isinstance(symbol, Parameter):\n params.append(symbol)\n elif isinstance(symbol, Idx):\n # Idx objects are not seen as parameters or vars.\n pass\n elif isinstance(symbol, (MatrixExpr, Expr)):\n vars.append(symbol)\n else:\n raise TypeError('model contains an unknown symbol type, {}'.format(type(symbol)))\n\n for der in func.atoms(sympy.Derivative):\n # Used by jacobians and hessians, where derivatives are treated as\n # Variables. This way of writing it is purposefully discriminatory\n # against derivatives wrt variables, since such derivatives should be\n # performed explicitly in the case of jacs/hess, and are treated\n # differently in the case of ODEModels.\n if der.expr in vars and all(isinstance(s, Parameter) for s in der.variables):\n vars.append(der)\n\n params.sort(key=lambda symbol: symbol.name)\n vars.sort(key=lambda symbol: symbol.name)\n return vars, params", "def do_showglobals(self, line):\n if(Rsp.state != STOPPED):\n self.output = \"Command only possible during STOPPED-state.\"\n return\n for var in globalmap:\n self.output += \"%s:%s\\n\"%(var, typemap[globalmap[var].typenr].name)", "def do_showlocals(self, line):\n if(Rsp.state != STOPPED):\n self.output = \"Command only possible during STOPPED-state.\"\n return\n curfunc = get_func(Rsp.pc) \n self.output = \"Funktion:%s\"%curfunc\n stackmap = funcmap[curfunc].stacklocals\n regmap = funcmap[curfunc].reglocals\n for var in stackmap:\n self.output += \"%s:%s\\n\"%(var, typemap[stackmap[var].type].name) \n for var in regmap:\n self.output += \"%s:%s\\n\"%(var, typemap[regmap[var].type].name)", "def dump(self):\r\n for (name, value) in self.__table__.items():\r\n print (name)\r\n print (value)", "def cur_symbols(self):\n return self.symbols[-1].keys() + self.global_symbols.keys()", "def print_current_mappings(self):\n for mapped_name in self.__mapped_names:\n func = getattr(self, mapped_name)\n name = f'{func.__module__}.{func.__name__}'\n print(f'* {mapped_name} -- {name}')", "def all_globals_dict(self):\n return self.module_node.used_vars", "def execute(self, symbol_table, test_mode=False):", "def execute(self, symbol_table, test_mode=False):", "def _DiffElfSymbols(self, dump_obj, parser):\n dump_symbols = self._LoadGlobalSymbolsFromDump(dump_obj)\n lib_symbols = parser.ListGlobalDynamicSymbols(include_weak=True)\n return sorted(dump_symbols.difference(lib_symbols))", "def __str__(self) -> str:\n\n return self._format_symbol_table_content(\"Symbol table\", self._symbols.items())", "def generate_symbol_struct(mode, symbols, definition):\n if \"vanilla\" == mode:\n return \"\"\n definitions = []\n hashes = []\n symbol_table_content = \"\"\n for ii in symbols:\n definitions += [\" %s;\" % (ii.generate_definition())]\n hashes += [\" %s%s,\" % (ii.generate_prototype(), ii.get_hash())]\n if \"dlfcn\" != mode:\n symbol_table_content = \" =\\n{\\n%s\\n}\" % (\"\\n\".join(hashes))\n return template_symbol_table % (definition, \"\\n\".join(definitions), symbol_table_content)", "def global_symbols_size(self):\n size = 0\n for s in self.global_symbols:\n if self.global_symbols[s].type == 'procedure': continue\n size += self.global_symbols[s].size\n return size" ]
[ "0.66534007", "0.647754", "0.63893914", "0.627166", "0.6266483", "0.6264775", "0.6150325", "0.61372966", "0.58902705", "0.5874507", "0.5810592", "0.5732486", "0.56901515", "0.5665829", "0.566502", "0.56435275", "0.56126845", "0.55317366", "0.5471905", "0.5426554", "0.54175854", "0.5362631", "0.53525764", "0.5347722", "0.5316078", "0.5316078", "0.5307026", "0.5263213", "0.5262525", "0.5254799" ]
0.696746
0
Gather all the symbol tables of a module by a depthfirst exploration of its symbol table tree.
def list_symbol_tables(mst): stlist = [] def append_st(st): #print(st) stlist.append(st) for s in st.get_symbols(): for ns in s.get_namespaces(): append_st(ns) if not isinstance(mst, symtable.SymbolTable): # Assume it is text of a program to compile mst = symtable.symtable(mst, '<string>', 'exec') append_st(mst) return stlist
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_symbol_table(root):\n\n set_depth(root, 0)\n #Initialize the stack, with the AST root\n stack = Stack(root)\n\n #the symbol table maps the name to the scope.\n #Any node can belong to multiple scopes, therefore this\n #is a list of scope\n symbol_table = STable()\n \n #this represents objects imported from\n #other modules\n other_modules = {}\n\n for node, children, ntype in stack:\n\n if ntype == \"Import\":\n #Import object has names prop which\n #is an array of names\n for name in node.names:\n #name can be the name or an alias \n name_val = name.asname or name.name\n #insert in symbol_table \n symbol_table[name_val] = ()\n\n elif ntype == \"ImportFrom\":\n if node.names[0].name == '*':\n try:\n imp_mod = importlib.import_module(node.module)\n #Add all names in imported module, except those\n #starting with '_'\n for name in dir(imp_mod):\n if name[0] != '_':\n symbol_table[name] = stack_top(scopes)\n\n except ImportError:\n print \"Error: local system does not have {}. Skipping!\".format(node.module)\n pass\n else:\n #TODO: store node.module\n for name in node.names:\n #TODO: store name.name even if name.asname defined \n name_val = name.asname or name.name\n symbol_table[name_val] = stack.get_scopes(src_module=node.module)\n\n elif ntype == \"ClassDef\" or ntype == \"FunctionDef\": \n symbol_table[node.name] = stack.get_scopes()\n \n #NOTE: if a name is being loaded then it already exists and doesn't need\n #to be added to symbol_table\n elif ntype == \"Name\" and not is_load(children) and not has_global(stack.scope_tail(), node.id): \n symbol_table[node.id] = stack.get_scopes()\n\n elif ntype == \"arguments\":\n if node.vararg: \n symbol_table[node.vararg] = stack.get_scopes()\n if node.kwarg:\n symbol_table[node.kwarg] = stack.get_scopes()\n\n elif ntype == \"Global\":\n #add a list global vars on node on the top of \n #the stack\n #nonlocal could be handled in similar way\n set_globals(scopes[-1], node.names)\n\n #set lineno property of children nodes\n set_lineno(node, children)\n\n for child in children[::-1]:\n #set depth of child\n set_depth(child, node.depth + 1)\n #Add children to stack\n stack.append(child)\n\n #Add any new scopes\n #Need to do it here since scoping_nodes are defined in their parent scope\n stack.check_and_push_scope()\n\n print \"Symbol table is \"\n print symbol_table\n return symbol_table", "def build_gdb_symbol_table():\n\n tab = Symtab()\n n = gdb.parse_and_eval (\"symtab->nodes\")\n while (long(n)):\n if symtab_node_is_function (n):\n current_symbol = GdbFunction(tab, n)\n tab.all_functions.append (current_symbol)\n elif symtab_node_is_variable (n):\n current_symbol = GdbVariable(tab, n)\n tab.all_variables.append (current_symbol)\n else:\n raise gdb.GdbError (\"Encountered an unknown symbol table node\");\n\n tab.order_to_sym[current_symbol.order] = current_symbol\n tab.all_symbols.append (current_symbol)\n\n n = n[\"next\"]\n pass\n\n tab.fixup()\n return tab", "def _get_module_table(\n module: module_lib.Module,\n depth: Optional[int],\n show_repeated: bool,\n) -> Callable[..., Table]:\n\n def _get_table_fn(*args, **kwargs):\n with module_lib._tabulate_context():\n\n def _get_variables():\n return module.init(*args, **kwargs)\n\n variables = jax.eval_shape(_get_variables)\n calls = module_lib._context.call_info_stack[-1].calls\n calls.sort(key=lambda c: c.index)\n\n collections: Set[str] = set(variables.keys())\n rows = []\n all_paths: Set[Tuple[str, ...]] = set(call.path for call in calls)\n visited_paths: Set[Tuple[str, ...]] = set()\n\n for c in calls:\n call_depth = len(c.path)\n inputs = _process_inputs(c.args, c.kwargs)\n\n if c.path in visited_paths:\n if not show_repeated:\n continue\n module_vars = {}\n counted_vars = {}\n elif depth is not None:\n if call_depth > depth:\n continue\n module_vars, _ = _get_module_variables(c.path, variables, all_paths)\n if call_depth == depth:\n counted_vars = _get_path_variables(c.path, variables)\n else:\n counted_vars = module_vars\n else:\n module_vars, _ = _get_module_variables(c.path, variables, all_paths)\n counted_vars = module_vars\n\n visited_paths.add(c.path)\n rows.append(\n Row(\n c.path,\n c.module_type,\n c.method,\n inputs,\n c.outputs,\n module_vars,\n counted_vars,\n )\n )\n\n return Table(module, tuple(collections), rows)\n\n return _get_table_fn", "def getSymbolTable(self) -> ghidra.app.util.bin.format.pe.debug.DebugCodeViewSymbolTable:\n ...", "def parse_symbol_table(data, sections, elf_header):\n if is64bit(elf_header):\n symbol_entry_str = symbol_64_entry_str\n symbol_entry_spec = symbol_64_entry_spec\n else:\n symbol_entry_str = symbol_32_entry_str\n symbol_entry_spec = symbol_32_entry_spec\n entry_len = struct.calcsize(symbol_entry_str)\n \n st_offset = None\n if \".symtab\" in sections:\n section = \".symtab\"\n if \".strtab\" in sections:\n st_offset = sections[\".strtab\"][\"offset\"]\n else:\n st_offset = sections[section][\"offset\"]\n \n elif \".dynsym\" in sections:\n section = \".dynsym\"\n if \".dynstr\" in sections:\n st_offset = sections[\".dynstr\"][\"offset\"]\n else:\n st_offset = sections[section][\"offset\"]\n \n \n if section not in sections:\n return {}, {} \n \n symbols = {}\n imports = {}\n offset = sections[section][\"offset\"]\n size = sections[section][\"size\"]\n index = offset\n while index < offset + size:\n vals = {}\n if len(data) < index+entry_len: \n break\n \n val_data = struct.unpack(symbol_entry_str, data[index:index+entry_len])\n for i, elem in enumerate(symbol_entry_spec):\n vals[elem[0]] = val_data[i]\n \n if st_offset is None:\n symbols[vals[\"name\"]] = vals\n else:\n func_name = get_name_from_string_table(data, st_offset, vals[\"name\"])\n if func_name:\n vals.pop(\"name\")\n vals[\"info\"] = get_symbol_info(vals[\"info\"])\n vals[\"shndx\"] = get_symbol_shndx(vals[\"shndx\"])\n \n if vals[\"info\"] == \"UNDEFINED\" and vals[\"value\"] == 0:\n tmp_name = func_name\n import_name = \"Unknown\"\n if \"@@\" in func_name:\n i = tmp_name.find(\"@@\")\n func_name = tmp_name[:i]\n import_name = tmp_name[i:].strip(\"@@\") \n if import_name not in imports:\n imports[import_name] = {}\n imports[import_name][func_name] = vals\n symbols[func_name] = vals\n \n index += entry_len \n \n return symbols, imports", "def analyze(module_path):\n #view the module as a AST node object\n module = get_module(module_path) \n\n nodes = []\n NodeVisitor().visit(module, nodes)\n \n #Modify main module node to give it a name attr\n if not hasattr(nodes[0][0], \"name\"):\n nodes[0][0].name = name_from_path(module_path)\n\n #symbolic_pretty_print(nodes)\n #pretty_print(nodes)\n\n #create_symbol_table(nodes[0])\n find_dependencies(nodes[0])", "def show_symbol_table(st):\n print(st)\n # Dump the name lists get_*()\n if isinstance(st, symtable.Function):\n for nlist in _NAME_LISTS:\n names = getattr(st, \"get_\"+nlist)()\n if names:\n print(' {} : {!r}'.format(nlist, names))\n # Dump the properties as short names is_global -> global, etc..\n for s in st.get_symbols():\n scope = to_scope_name(s._Symbol__scope)\n props = [scope]\n for p in _NAME_PROPS:\n if getattr(s, \"is_\"+p)():\n props.append(p)\n print(' \"{}\" : {}'.format(s.get_name(), ', '.join(props)))", "def test_syntax_error_nested_symbol_table():\n reader = get_reader(\n \"\"\"\nmodule my_mod\ncontains\nFUNCTION dot_v_mod_2d( )\n REAL :: dot_v_mod_2d\n REAL, DIMENSION(:,:), POINTER, CONTIOUS :: z_msk_i\n dot_v_mod_2d = 0.0_wp\nEND FUNCTION dot_v_mod_2d\nend module my_mod\n\"\"\"\n )\n result = F2003.Module.match(reader)\n # There should be no match and, as a result, there should be no\n # symbol-table entries.\n assert result is None\n assert SYMBOL_TABLES._symbol_tables == {}", "def show_module(prog, name=\"<module>\"):\n if isinstance(prog, symtable.SymbolTable):\n # Already compiled\n mst = prog\n else:\n mst = symtable.symtable(prog, name, 'exec')\n stlist = list_symbol_tables(mst)\n for st in stlist:\n show_symbol_table(st)", "def __init__(self, DEBUG=False):\n self.DEBUG = DEBUG\n\n self.classTable = {}\n self.subroutineTable = {}\n\n self.counts = {}\n self.counts[\"STATIC\"] = 0\n self.counts[\"FIELD\"] = 0\n self.counts[\"ARG\"] = 0\n self.counts[\"VAR\"] = 0\n\n if self.DEBUG:\n print(\"DEBUG(SymbolTable): INITIALIZED SYMBOL TABLES\")", "def walk_modules(module, name=\"\", path=()):\n if not name:\n name = module.__class__.__name__\n named_children = list(module.named_children())\n path = path + (name,)\n yield Trace(path, len(named_children) == 0, module)\n # recursively walk into all submodules\n for name, child_module in named_children:\n yield from walk_modules(child_module, name=name, path=path)", "def find_dependencies(root):\n \n symbol_table = create_symbol_table(root)\n\n names = []\n #Set the depth of the root node\n set_depth(root, 0)\n #Stack of nodes to visit\n stack = Stack(root)\n \n #List of (src, dest) of dependencies\n dependency_table = DTable(symbol_table=symbol_table)\n\n for node, children, ntype in stack:\n \n stack.check_and_push_scope()\n\n #A Name is being loaded, therefore \n if ntype == \"Name\" and is_load(children):\n \"\"\"\n \"\"\"\n dependency_table.append( (stack.scopes, node))\n \n elif ntype == \"Assign\":\n #TODO need to add assignments and then revoke them\n #for child in children:\n #print children\n pass\n\n \n elif ntype == \"Attribute\":\n #TODO: attribute chains can be arbitrarily long\n #dep_dest = \"{}.{}\".format(node.value.id, node.attr)\n #print \"{} => {}\".format(scopes_to_str(scopes), dep_dest)\n\n #TODO: Can't just do dependency_table.append( (scopes, node))\n #since the unique_id function won't match the create the dep string like \n #{node.value.id}.{node.attr}.\n #Either generalize unique_id or something else.\n \n #Don't add children\n continue\n \n set_lineno(node, children)\n #Add children to stack\n #This musn't always be performed\n for child in children[::-1]:\n set_depth(child, node.depth + 1)\n stack.append(child)\n\n print \"dependency table is \"\n print dependency_table", "def get_module_name_parts(module):\n def scope_name_parts(scope):\n for s in scope.subscopes:\n # Yield the name parts, not names.\n yield s.name\n for need_yield_from in scope_name_parts(s):\n yield need_yield_from\n\n statements_or_imports = set(chain(*module.used_names.values()))\n name_parts = set(scope_name_parts(module))\n for stmt_or_import in statements_or_imports:\n if isinstance(stmt_or_import, pr.Import):\n for name in stmt_or_import.get_all_import_names():\n name_parts.add(name)\n else:\n # Running this ensures that all the expression lists are generated\n # and the parents are all set. (Important for Lambdas) Howeer, this\n # is only necessary because of the weird fault-tolerant structure\n # of the parser. I hope to get rid of such behavior in the future.\n stmt_or_import.expression_list()\n # For now this is ok, but this could change if we don't have a\n # token_list anymore, but for now this is the easiest way to get\n # all the name_parts.\n for tok in stmt_or_import._token_list:\n if isinstance(tok, pr.Name):\n name_parts.add(tok)\n\n return name_parts", "def visit_with_print(self, file_object):\n\n module = ast.parse(file_object.read())\n # TODO remove prefixes such as C:\\Users\\...\n # For now, just assume they are relative paths.\n## module_name = \".\".join(file_object.name.split(\"\\\\\"))\n## module_name.rstrip(\".py\")\n module_name = file_object.name.rstrip(\".py\")\n module_name = module_name.replace(\"/\", \".\")\n\n try: sys.path.insert(0, sys._MEIPASS)\n except: sys.path.insert(0, sys.argv[0])\n\n exec(\"import %s\"%module_name)\n self._module = eval(module_name)\n\n # Define table column headings.\n self.found = {\"vars\":[(\"name\", \"lineno\", \"value\", \"namespace\")],\n \"classes\":[(\"name\", \"lineno\", \"namespace\")],\n \"funcs\":[(\"name\", \"lineno\", \"namespace\")]}\n self.found_classes = set()\n\n self._explorer(self, [module_name]).visit(module)\n\n # Print findings in tables.\n FancyPrinter.multi_dict_table_from_dict(self.found)\n\n # Remove table column headings for easy access.\n for table in self.found:\n self.found[table].pop(0)", "def listOfTTHalfModules():\n hm = TTModulesMap_instance.dictOfHalfModules\n listOfHalfModules = []\n for ul in hm.keys():\n for reg in hm[ul].keys():\n for module in hm[ul][reg]:\n for halfmod in hm[ul][reg][module]:\n listOfHalfModules.append(halfmod.id)\n return listOfHalfModules", "def get_symbols(self, type_name):\n return self._symtab[type_name].get_symbols()", "def find_dependent_modules():\n tree = {}\n for module in sys.modules.values():\n if module is None:\n continue\n tree[module] = set()\n for attr_name in dir(module):\n attr = getattr(module, attr_name)\n if isinstance(attr, ModuleType):\n tree[module].add(attr)\n elif type(attr) in (FunctionType, type):\n tree[module].add(attr.__module__)\n return tree", "def install_ast_funcs(self, ast_ctx):\n sym_table = {}\n for name, func in self.ast_functions.items():\n sym_table[name] = func(ast_ctx)\n ast_ctx.set_local_sym_table(sym_table)", "def _inspect_module(module):\n module_list = getmembers(module, predicate=ismodule)\n classes = getmembers(module, predicate=isclass)\n for (name, cls) in classes:\n if issubclass(cls, db.Model) and not issubclass(cls, Taxonomy):\n if cls is not db.Model:\n _data_classes[name] = cls\n return [mod[1] for mod in module_list]", "def print_symbols():\n\n global program\n if program is None:\n print \"no program is loaded\"\n return\n for(s, a) in program.symbols():\n print \"0x{:x} : {}\".format(a, s)", "def itersymbols(self):\n for syms in self._symbols.itervalues():\n for sym in syms:\n yield sym", "def execute(self, symbol_table, test_mode=False):", "def execute(self, symbol_table, test_mode=False):", "def visit_Module(self, node):\n self.generic_visit(node)\n return self.functions", "def deep_iter_modules(name):\r\n mod = import_dotted_name(name)\r\n yield name\r\n if not hasattr(mod, '__path__'):\r\n return\r\n for _, name, _ in iter_modules(mod.__path__, name + '.'):\r\n for name in deep_iter_modules(name):\r\n yield name", "def find_mm_symbols(node):\n symbols = {}\n if len(node) == 0:\n return symbols\n else:\n for child in node:\n if is_menumux(child):\n try:\n # grab the number of sets of target-values defined\n num_sets = child.find('num_sets')\n for set_idx in range(0, int(num_sets.text)):\n\n target = child.find(\"target%d\" % set_idx)\n values = child.find(\"values%d\" % set_idx)\n\n if LOC_PREFIX in target.text:\n log.info(\"Skipping already updated target %s\", target.text)\n else:\n first_value = values.find('s')\n symbols[target.text] = first_value.text\n\n # Update the target text to be a (private) locPV\n target.text = create_loc_pv(target.text)\n except AttributeError as e:\n log.warn(\"Error parsing MenuMux: %s\", e)\n else:\n symbols.update(find_mm_symbols(child))\n\n return symbols", "def link_all_refs(self):\n # grab global declarations\n self.symbol_tables.append({})\n map(self.add_dec, self.tree)\n\n # link in all functions\n for dec in self.tree:\n if dec.kind == PTN.FUN_DEC:\n self.link_function(dec)\n\n # pop the global symbol table\n self.symbol_tables.pop()", "def __init__(self):\r\n self.s_table = SymbolTable.preSymbols", "def get_module_map(module, module_path):\n if not module_is_public(module):\n return {}\n m = {}\n for symbol_name in dir(module):\n if symbol_name.startswith(\"_\"):\n continue\n symbol = getattr(module, symbol_name)\n symbol_path = \"%s.%s\" % (module_path, symbol_name)\n m[symbol] = symbol_path\n if inspect.ismodule(symbol):\n m.update(get_module_map(symbol, symbol_path))\n return m", "def _LoadGlobalSymbolsFromDump(dump_obj):\n symbols = set()\n for key in (\"elf_functions\", \"elf_objects\"):\n symbols.update(\n symbol.get(\"name\", \"\") for symbol in dump_obj.get(key, []) if\n symbol.get(\"binding\", \"global\") == \"global\")\n return symbols" ]
[ "0.71760356", "0.6239357", "0.61453974", "0.5924159", "0.5663551", "0.5595732", "0.55535376", "0.5506536", "0.54608417", "0.5396278", "0.53505415", "0.53426313", "0.53191054", "0.5296071", "0.5238716", "0.5165061", "0.51395684", "0.5098256", "0.5091754", "0.50009084", "0.5000206", "0.49795815", "0.49795815", "0.49551025", "0.4954076", "0.4936001", "0.49128303", "0.4901279", "0.4887163", "0.48792157" ]
0.6286266
1
Apply a function to all values in work_list in parallel.
def ApplyInParallel(function, work_list, on_failure=None): if not work_list: return try: # Note that this is speculatively halved as an attempt to fix # crbug.com/953365. cpu_count = multiprocessing.cpu_count() // 2 if sys.platform == 'win32': # TODO(crbug.com/1190269) - we can't use more than 56 # cores on Windows or Python3 may hang. cpu_count = min(cpu_count, 56) except NotImplementedError: # Some platforms can raise a NotImplementedError from cpu_count() logging.warning('cpu_count() not implemented.') cpu_count = 4 pool = ThreadPool(min(cpu_count, len(work_list))) def function_with_try(arg): try: function(arg) except Exception: # pylint: disable=broad-except # logging exception here is the only way to get a stack trace since # multiprocessing's pool implementation does not save that data. See # crbug.com/953365. logging.exception('Exception while running %s' % function.__name__) if on_failure: on_failure(arg) try: pool.imap_unordered(function_with_try, work_list) pool.close() pool.join() finally: pool.terminate()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def do_same_job(self, func, input_list):\n task_submitted = []\n for data in input_list:\n task_submitted.append(self.executor.submit(func, *data))\n\n return [t.result() for t in task_submitted]", "def split_calculation_to_threads(iterable, func, args):\n args_list = []\n batches = list(split_iterable_to_batches(iterable))\n for batch in batches:\n temp = list(args)\n temp.insert(0, batch)\n args_list.append(tuple(temp))\n with Pool(NUM_THREADS) as p:\n results = p.starmap(func, args_list)\n return results", "def map(self, func, args_list):\n for args in args_list:\n self.add_task(func, args)", "def map(self, func, args_list):\n for args in args_list:\n self.add_task(func, args)", "def map(self, func, args_list):\n for args in args_list:\n self.add_task(func, args)", "def map(self, func, args_list):\n for args in args_list:\n self.add_task(func, args)", "def map(self, func, args_list):\n for args in args_list:\n self.add_task(func, args)", "def parallelize_task(task_list, func_name, func_args, workers_count=10, workload=1):\n indices_list = np.arange(0, len(task_list), workload)\n results = []\n with concurrent.futures.ThreadPoolExecutor(max_workers=workers_count) as executor:\n fetched_rows = {executor.submit(\n func_name,\n task_list[fromindex: fromindex + workload],\n **func_args\n ): fromindex for fromindex in indices_list}\n for future in concurrent.futures.as_completed(fetched_rows):\n row_data = fetched_rows[future]\n #try:\n data = future.result()\n results.extend(data)\n #except Exception as ex:\n # print(\"exception running parallel task \",func_name,\" for \", row_data, \"...\", ex)\n return results", "def apply(L, f):\n\n result = []\n for i in L:\n result.append(f(i))\n\n return result", "def pool_job(self, func, inputs):\n\n if self.flag_use_mp:\n output = zip(*self._pool.map(func, inputs))\n self._consolidate_mp_logs()\n else:\n logger.info(\"Performing task serially\")\n output = self.serial_job(func, inputs)\n\n return output", "def applyToEach(L,f):\n for i in range(len(L)):\n L[i] = f(L[i])", "def basic_parallel_loop(func, *args, parallel=True):\n if parallel is True:\n results = Parallel(n_jobs=cpu_count())(delayed(func)(*a) for a in args[0])\n else:\n results = []\n for a in args[0]:\n results.append(func(*a))\n\n return results", "def parfor(func, in_list, out_shape=None, n_jobs=-1, engine=\"joblib\",\n backend=\"threading\", func_args=[], func_kwargs={}):\n if n_jobs == -1:\n n_jobs = multiprocessing.cpu_count()\n n_jobs = n_jobs - 1\n\n if engine == \"joblib\":\n p = joblib.Parallel(n_jobs=n_jobs, backend=backend)\n d = joblib.delayed(func)\n d_l = []\n for in_element in in_list:\n d_l.append(d(in_element, *func_args, **func_kwargs))\n results = p(d_l)\n\n elif engine == \"dask\":\n def partial(func, *args, **keywords):\n def newfunc(in_arg):\n return func(in_arg, *args, **keywords)\n return newfunc\n p = partial(func, *func_args, **func_kwargs)\n d = [dask.delayed(p)(i) for i in in_list]\n if backend == \"multiprocessing\":\n results = dask.compute(*d, get=dask.multiprocessing.get,\n workers=n_jobs)\n elif backend == \"threading\":\n results = dask.compute(*d, get=dask.threaded.get,\n workers=n_jobs)\n else:\n raise ValueError(\"%s is not a backend for dask\" % backend)\n\n elif engine == \"serial\":\n results = []\n for in_element in in_list:\n results.append(func(in_element, *func_args, **func_kwargs))\n\n if out_shape is not None:\n return np.array(results).reshape(out_shape)\n else:\n return results", "def parallel_control(target_function, list2process, fixed_args=None, return_results=True, num_threads=None):\n if num_threads is None:\n num_threads = mp.cpu_count()\n num_threads = min(num_threads, len(list2process))\n\n if fixed_args is None:\n fixed_args = ()\n\n # Start the Queue, this could be also a list, dict or a shared array.\n if return_results:\n mp_manager = mp.Manager()\n output_queue = mp_manager.Queue()\n else:\n output_queue = None\n\n processes = []\n for rank, batch in enumerate(batchify(list2process, num_threads)):\n p = mp.Process(target=paralll_worker,\n args=(rank, num_threads),\n kwargs=dict(target_function=target_function,\n batch=batch,\n fixed_args=fixed_args,\n output_queue=output_queue)\n )\n processes.append(p)\n\n # Run processes\n for p in processes:\n p.start()\n\n # Exit completed processes\n for p in processes:\n p.join()\n\n # Extract results\n if return_results:\n results = []\n while (not output_queue.empty()):\n results.append(output_queue.get())\n else:\n results = None\n\n return results", "def parallel(self, func, args_dict=None):\n try:\n self.parallel_safe(func, args_dict)\n except Exception:\n pass", "def thread_map(f, args_list, n_threads=None):\n if n_threads is None:\n n_threads = int(multiprocessing.cpu_count() / 2)\n pool = multiprocessing.pool.ThreadPool(processes=n_threads)\n return pool.map(f, args_list)", "def paramap(func, in_list, out_shape=None, n_jobs=-1, engine=\"joblib\",\n backend=None, func_args=None, func_kwargs=None,\n **kwargs):\n\n func_args = func_args or []\n func_kwargs = func_kwargs or {}\n\n if engine == \"joblib\":\n if not has_joblib:\n raise joblib()\n if backend is None:\n backend = \"loky\"\n pp = joblib.Parallel(\n n_jobs=n_jobs, backend=backend,\n **kwargs)\n dd = joblib.delayed(func)\n d_l = [dd(ii, *func_args, **func_kwargs) for ii in in_list]\n results = pp(tqdm(d_l))\n\n elif engine == \"dask\":\n if not has_dask:\n raise dask()\n if backend is None:\n backend = \"threading\"\n\n if n_jobs == -1:\n n_jobs = multiprocessing.cpu_count()\n n_jobs = n_jobs - 1\n\n def partial(func, *args, **keywords):\n def newfunc(in_arg):\n return func(in_arg, *args, **keywords)\n return newfunc\n pp = partial(func, *func_args, **func_kwargs)\n dd = [dask.delayed(pp)(ii) for ii in in_list]\n if backend == \"multiprocessing\":\n results = dask.compute(*dd, scheduler=\"processes\",\n workers=n_jobs, **kwargs)\n elif backend == \"threading\":\n results = dask.compute(*dd, scheduler=\"threads\",\n workers=n_jobs, **kwargs)\n else:\n raise ValueError(\"%s is not a backend for dask\" % backend)\n\n if engine == \"ray\":\n if not has_ray:\n raise ray()\n\n func = ray.remote(func)\n results = ray.get([func.remote(ii, *func_args, **func_kwargs)\n for ii in in_list])\n\n elif engine == \"serial\":\n results = []\n for in_element in in_list:\n results.append(func(in_element, *func_args, **func_kwargs))\n\n if out_shape is not None:\n return np.array(results).reshape(out_shape)\n else:\n return results", "def easy_parallize(f, sequence):\n pool = Pool(processes=NPROCESSORS) # depends on available cores\n result = pool.map(f, sequence) # for i in sequence: result[i] = f(i)\n cleaned = [x for x in result if not x is []] # getting results\n pool.close() # not optimal! but easy\n pool.join()\n return cleaned", "def multiprocess_map(func, iterable, *worker_args, n_cores=None, mode=\"map\", **pool_kwargs):\n results = []\n\n with mp.Manager() as manager:\n shared_args_proxy = None\n if worker_args is not None:\n shared_args_proxy = manager.list(worker_args)\n\n with mp.Pool(processes=n_cores, initializer=init_worker,\n initargs=shared_args_proxy, **pool_kwargs) as pool:\n if mode == \"map\":\n results = pool.map(func, iterable)\n elif mode == \"starmap\":\n results = pool.starmap(func, iterable)\n elif mode == \"imap\":\n for result in pool.imap(func, iterable):\n results.append(result)\n\n return results", "def compute(args, fun, max_workers=6):\n print(\"\\nProcessing symbols in parallel\")\n ex = futures.ThreadPoolExecutor(max_workers=max_workers)\n ex.map(fun, args)", "def _maplist_vm(vm, f, xs):\n def f_(*args):\n return vm.call(f, args)\n return list(map(f_, xs))", "def m_proc(dfs, func):\n pool = Pool(processes=cpu_count())\n results = [pool.apply_async(func, args=(df,)) for df in dfs]\n output = [p.get() for p in results]\n return output", "def apply(func, path, proc=1, only=None):\n peps = get_items(path, only=only)\n total = len(peps)\n if proc < 1:\n proc = os.cpu_count()\n proc = min(total, proc)\n with mp.Pool(proc) as pool:\n return pool.starmap(partial(_apply, func, total), enumerate(peps, 1))", "def pool_process(func, iterable, process_name='Pool processing', cpus=cpu_count()):\n with Timer('\\t{0} ({1}) completed in'.format(process_name, str(func))):\n pool = Pool(cpus)\n vals = pool.map(func, iterable)\n pool.close()\n return vals", "def _process_data(f, work_queue, results_queue):\n for element in iter(work_queue.get, FINISHED):\n try:\n results_queue.put(f(element))\n except Exception, work_error:\n LOG.critical('parallel_pc Error: {0}\\n\\n\\tconfig settings {1}\\n'.format(work_error, element))\n results_queue.put(FINISHED)", "def compute_sequential(self, inputs_list, communicator_list):\n raise NotImplementedError()", "def _apply_parallel(grouped_df, func, neg_compound, compound, f_cols, n_jobs,\n method):\n n_cpu = multiprocessing.cpu_count()\n output = Parallel(n_jobs=n_jobs)(delayed(func)(\n group, neg_compound, compound, f_cols, method) for _, group in grouped_df)\n return pd.concat(output)", "def apply(self, func, *args):\n import ray\n done_ids, undone_ids = ray.wait([shard.apply.remote(func, *args)\n for shard in self.shard_list],\n num_returns=len(self.shard_list))\n assert len(undone_ids) == 0\n return self", "def apply_parallel(df, func, **kwargs):\n num_workers = cpu_count()\n\n if (df.shape[0] == 1) or (num_workers == 1):\n return apply_df((df, func, kwargs))\n\n retLst = Parallel(n_jobs=num_workers)(delayed(apply_df)(\n input_args=(d, func, kwargs)) for d in np.array_split(df, num_workers))\n return pd.concat(retLst)", "def call_functions_parallel(*worker_defs):\n # TODO(amotoki): Needs to figure out what max_workers can be specified.\n # According to e0ne, the apache default configuration in devstack allows\n # only 10 threads. What happens if max_worker=11 is specified?\n max_workers = len(worker_defs)\n # Prepare a list with enough length.\n futures = [None] * len(worker_defs)\n with futurist.ThreadPoolExecutor(max_workers=max_workers) as e:\n for index, func_def in enumerate(worker_defs):\n if callable(func_def):\n func_def = [func_def]\n args = func_def[1] if len(func_def) > 1 else []\n kwargs = func_def[2] if len(func_def) > 2 else {}\n func = functools.partial(func_def[0], *args, **kwargs)\n futures[index] = e.submit(fn=func)\n\n return tuple(f.result() for f in futures)" ]
[ "0.72197735", "0.6717606", "0.66602266", "0.66602266", "0.66602266", "0.66602266", "0.66602266", "0.6630425", "0.66126955", "0.6591178", "0.6580101", "0.65612894", "0.6519424", "0.6383438", "0.6361531", "0.63088477", "0.6260933", "0.624545", "0.62317514", "0.62278426", "0.62238276", "0.6187428", "0.6179001", "0.6171523", "0.61686206", "0.6141743", "0.6122691", "0.61125034", "0.6112332", "0.60960674" ]
0.7662204
0
Split a test path into test suite name and test case name. Telemetry and Gtest have slightly different test path formats. Telemetry uses '{benchmark_name}/{story_name}', e.g.
def SplitTestPath(test_result, test_path_format): if test_path_format == TELEMETRY_TEST_PATH_FORMAT: separator = '/' elif test_path_format == GTEST_TEST_PATH_FORMAT: separator = '.' else: raise ValueError('Unknown test path format: %s' % test_path_format) test_path = test_result['testPath'] if separator not in test_path: raise ValueError('Invalid test path: %s' % test_path) return test_path.split(separator, 1)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def extract_test_name(base_path):\n name = p.basename(base_path)\n if name == \"test.py\":\n name = \"\"\n elif name.startswith(\"test_\") and name.endswith(\".py\"):\n name = name[len(\"test_\") : (len(name) - len(\".py\"))]\n return name", "def split(test_name):\n recipe, simple_test_name = test_name.split('.', 1)\n return recipe, simple_test_name", "def GetTestSuiteName(normalized_test_name, step_ui_name):\n # For Webkit layout tests, the suite name is the immediate directory.\n if 'webkit_layout_tests' in step_ui_name:\n index = normalized_test_name.rfind('/')\n if index > 0:\n return normalized_test_name[:index]\n return None\n\n # For gtests, the suite name is the class name.\n gtest_match = GTEST_REGEX.match(normalized_test_name)\n if gtest_match:\n return gtest_match.group(1)\n\n # For Java tests, the suite name is the class name.\n java_match = _JAVA_TEST_REGEX.match(normalized_test_name)\n if java_match:\n return java_match.group(1)\n\n return None", "def extract_suite_name(file_path, project_name):\n\n suite_name = str(project_name) + \".\"\n suite_name = suite_name + os.path.splitext(str(file_path).replace(os_sep, \".\"))[0]\n return suite_name", "def _GetTestFilePath(self, path_segments):\n # Note that we need to pass the individual path segments to os.path.join\n # and not a list.\n return os.path.join(self._TEST_DATA_PATH, *path_segments)", "def get_testdata_path(category=None):\n # path.replace('tests/categories', 'test_data')\n path = os.path.dirname(os.path.abspath(__file__)).replace('/tests', '/test')\n if category:\n path = f\"{path}/{category}\"\n\n return path", "def source_test_file_name():\n return 'feature'", "def get_video_parts(video_path):\n parts = video_path.split(os.path.sep)\n filename = parts[1]\n filename_no_ext = filename.split('.')[0]\n train_or_test = parts[0]\n\n return train_or_test, filename_no_ext, filename", "def format_into_test_path(self, testitem):\n test_string = testitem.split(\"(\")\n test_path = test_string[1].split(\")\")[0]\n \"\"\"\n Test item has different path with different python version\n example:\n on 3.8 set_psu_cmd (tests.fuji.test_psu.Psu1Test)\n on 3.11 set_psu_cmd (tests.fuji.test_psu.Psu1Test.set_psu_cmd)\n \"\"\"\n if test_path.split(\".\")[-1].strip() != test_string[0].strip():\n test_path = test_path + \".\" + test_string[0]\n\n return test_path.strip()", "def named(path):\n return re.findall(r'.*(test\\d+)\\.out', path)[0]", "def _testcase_name(testcase):\n name = os.path.splitext(os.path.basename(testcase))[0]\n name = name.replace('-', '_')\n name = 'test_{name}'.format(name=name)\n\n assert name.isidentifier()\n\n return name", "def convert_testcase_path(testcase_abs_path: Text) -> Tuple[Text, Text]:\n testcase_new_path = ensure_file_abs_path_valid(testcase_abs_path)\n\n dir_path = os.path.dirname(testcase_new_path)\n file_name, _ = os.path.splitext(os.path.basename(testcase_new_path))\n testcase_python_abs_path = os.path.join(dir_path, f\"{file_name}_test.py\")\n\n # convert title case, e.g. request_with_variables => RequestWithVariables\n name_in_title_case = file_name.title().replace(\"_\", \"\")\n\n return testcase_python_abs_path, name_in_title_case", "def _GetTestFromPath(self, test_id, path):\n\n raise NotImplementedError", "def get_video_parts(video_path):\n parts = video_path.split(os.path.sep)\n print(\"parts: \", parts)\n filename = parts[7]\n filename_no_ext = filename.split('.')[0]\n classname = parts[6]\n train_or_test = parts[5]\n\n return train_or_test, classname, filename_no_ext, filename", "def flat_test_name(_id):\n return \"-\".join(_id.split(\".\")[1:])", "def get_test_fname(fname):\n path = get_test_path()\n full_path = os.path.join(path, fname)\n return full_path", "def get_train_test_split(path=\"./data/train_test_split.json\"):\n with open(path) as f:\n train_test_split = json.load(f)\n return train_test_split", "def get_test_path(bname, test_dir=TEST_CASE_DIR):\n return abspath(join(test_dir, bname))", "def _generate_test_name(source):\n out = source.replace(' ', '_').replace(':', '').replace(',', '').lower()\n return \"test_%s\" % out", "def target_test_file_name():\n return 'test'", "def testGetFromSplits(self):\n artifacts = [standard_artifacts.Examples()]\n artifacts[0].uri = '/tmp'\n artifacts[0].split_names = artifact_utils.encode_split_names(\n ['train', 'eval'])\n\n self.assertEqual(artifacts[0].split_names, '[\"train\", \"eval\"]')\n\n self.assertIs(artifact_utils.get_single_instance(artifacts), artifacts[0])\n self.assertEqual('/tmp', artifact_utils.get_single_uri(artifacts))\n self.assertEqual('/tmp/train',\n artifact_utils.get_split_uri(artifacts, 'train'))\n self.assertEqual('/tmp/eval',\n artifact_utils.get_split_uri(artifacts, 'eval'))", "def create_test_file_name(test_file):\n 'test.{}'.format(test_file.replace('.py', ''))", "def split_datastore_path(datastore_path):\n spl = datastore_path.split('[', 1)[1].split(']', 1)\n path = \"\"\n if len(spl) == 1:\n datastore_name = spl[0]\n else:\n datastore_name, path = spl\n return datastore_name, path.strip()", "def get_test_path():\n path, name = os.path.split(__file__)\n return os.path.join(path,\"..\", 'test-data')", "def test_name(self):\r\n parts = []\r\n if self.test.__module__ != '__main__':\r\n parts.append(self.test.__module__)\r\n if hasattr(self.test, 'im_class'):\r\n parts.append(self.test.im_class.__name__)\r\n parts.append(self.test.__name__)\r\n return '.'.join(parts)", "def test_path_basename():\n mock_path = \"E:\\\\Repos\\\\pc-setup\\\\powershell\\\\provision_python.ps1\"\n output = sh.path_basename(mock_path)\n assert output == \"provision_python.ps1\"", "def get_name(path):\n return path.rsplit('/',1)[1]", "def test_suite_name(self) -> str:\n return pulumi.get(self, \"test_suite_name\")", "def path_name(self, path):\r\n ind = path.rfind(\"/\") + 1\r\n return (path[:ind], path[ind:])", "def name_from_path(path):\n return path[0:-3]" ]
[ "0.68393505", "0.6673254", "0.6028965", "0.5967455", "0.59407866", "0.58373785", "0.5818909", "0.58015156", "0.5758415", "0.5726683", "0.5719242", "0.5698514", "0.5678177", "0.5632192", "0.56196624", "0.55810404", "0.55484384", "0.553336", "0.551327", "0.5507131", "0.5493983", "0.54408735", "0.5433362", "0.5423412", "0.541405", "0.5413106", "0.5257273", "0.52363867", "0.5232113", "0.5229766" ]
0.7001524
0
Update fields of a test result in a case of processing failure.
def SetUnexpectedFailure(test_result): test_result['status'] = 'FAIL' test_result['expected'] = False logging.error('Processing failed for test %s', test_result['testPath'])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def failure(self, result):\r\n raise NotImplementedError", "def set_test_failed(self):\n self.set_result(Status.FAILED)", "def addFailure(self, result):\n result.addFailure(self, (Exception, Exception(), None))\n # Since TAP will not provide assertion data, clean up the assertion\n # section so it is not so spaced out.\n test, err = result.failures[-1]\n result.failures[-1] = (test, \"\")", "def _failed(self, msg):\n self.log(msg)\n self.result.passed = False\n self.result.add_error(msg)\n self.log(u\"Failed\")", "def test_state_after_failure(self):\n pass", "def update_failed_test_info_dict(failed_testname, failed_test_path):\n global g_failed_tests_info_dict\n\n if failed_testname in g_failed_tests_info_dict[\"TestName\"]: # existing test\n g_failed_tests_info_dict[\"TestInfo\"][g_failed_tests_info_dict[\"TestName\"].index(failed_testname)] = \\\n init_update_each_failed_test_dict(\n g_failed_tests_info_dict[\"TestInfo\"][g_failed_tests_info_dict[\"TestName\"].index(failed_testname)],\n failed_test_path, failed_testname, False)\n else: # next test\n g_failed_tests_info_dict[\"TestName\"].append(failed_testname)\n g_failed_tests_info_dict[\"TestInfo\"].append(init_update_each_failed_test_dict(dict(), failed_test_path,\n failed_testname, True))", "def addFailure(self, test, err):\r\n self.failures.append((test, self._exc_info_to_string(err, test)))\r\n self._mirrorOutput = True", "def addFailure(self, test, err):\n\n super(ForceBalanceTestResult, self).addFailure(test,err)\n self.logger.warning(\"\\r\\x1b[31;1m\" + \"FAIL\" + \"\\x1b[0m \" + test.shortDescription() + \"\\n\")\n\n errorMessage = self.buildErrorMessage(test, err)\n\n for line in errorMessage.splitlines():\n self.logger.warning(\"\\t >\\t\" + line + \"\\n\")", "def mark_failed(self):\n self.status = self.FAILED\n self.traceback = self._format_traceback()\n self.save(update_fields={'status', 'traceback', 'updated_at'})", "def test_original_failure(self):\n try:\n 1 / 0\n except ZeroDivisionError:\n f = Failure()\n dr = EventualResult(fail(f), None)\n self.assertIdentical(dr.original_failure(), f)", "def update_results(failures, errors, case_):\n for check in case_.checks:\n if check.result == FAILURE:\n failures.append(check)\n elif check.result == ERROR:\n errors.append(check)", "def test_results_errors(self, affiliate_items):\n updater = mock.Mock(side_effect=ValueError())\n batch_job = BatchJob(affiliate_items, updater)\n\n error_count = 0\n for result in batch_job.run():\n error_count += int(result.is_error)\n\n assert error_count == 4", "def failed(self, id, err=''):\n\n records = self.db.get_table()\n index = -1\n\n for i in range(0, len(records)):\n if str(records[i][\"id\"]) == str(id):\n index = i\n \n if index == -1:\n return None\n\n records[index][\"status\"] = \"failed\"\n if 'end-time' in records[index]:\n records[index][\"end-time\"] = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')\n if 'comments' in records[index]:\n records[index][\"comments\"] += \" failed{ \" + err + \" };\"\n\n self.db.update_row(index, records[index])\n\n _log.info('Test %s marked as failed with message %s.' % (str(id), str(err)))\n \n return records[index]", "def test_api_object_failed_property(self, api_object):\n api_object.status = 'FAILED'\n assert api_object.failed\n assert not api_object.creating", "def addFailure(self, test, err):\n test.status = \"failed\"\n self._addError(test, err)", "def mark_failed(self, test):\n if not test:\n LOGGER.warn('Empty or None test name passed to standard_json_util')\n return\n\n if test in self.tests:\n self.tests[test]['actual'] = self.tests[test]['actual'] + \" FAIL\"\n self.tests[test]['is_unexpected'] = True\n else:\n self.tests[test] = {\n 'expected': 'PASS',\n 'actual': 'FAIL',\n 'is_unexpected': True\n }", "def failed(self):\n\t\tpass", "def update_fail(self, data, new_data, message):\n self.add_success(data)\n return self.edit_fail(data[self.id_field], new_data, message)", "def failed(self) -> None:\n self.failure_count += 1", "def extraction_failure(self, extraction_failure):\n self._extraction_failure = extraction_failure", "def init_update_each_failed_test_dict(one_test_info, failed_test_path, testName, newTest):\n if newTest:\n one_test_info = dict()\n one_test_info[\"JenkinsJobName\"] = []\n one_test_info[\"BuildID\"] = []\n one_test_info[\"Timestamp\"] = []\n one_test_info[\"GitHash\"] = []\n one_test_info[\"TestCategory\"] = [] # would be JUnit, PyUnit, RUnit or HadoopPyUnit, HadoopRUnit\n one_test_info[\"NodeName\"] = []\n one_test_info[\"FailureMessages\"] = [] # contains failure messages for the test\n one_test_info[\"FailureCount\"] = 0\n one_test_info[\"TestName\"] = testName\n\n# if g_timestamp not in one_test_info[\"Timestamp\"]:\n one_test_info[\"JenkinsJobName\"].append(g_job_name)\n one_test_info[\"BuildID\"].append(g_build_id)\n one_test_info[\"Timestamp\"].append(g_timestamp)\n one_test_info[\"GitHash\"].append(g_git_hash)\n one_test_info[\"TestCategory\"].append(g_unit_test_type) # would be JUnit, PyUnit, RUnit or HadoopPyUnit, HadoopRUnit\n one_test_info[\"NodeName\"].append(g_node_name)\n one_test_info[\"FailureCount\"] += 1\n\n error_url = '/'.join([g_resource_url, 'testReport', failed_test_path])\n get_console_out(error_url) # store failure message in temp file\n\n if os.path.isfile(g_temp_filename):\n with open(g_temp_filename, 'r') as error_file:\n one_test_info[\"FailureMessages\"].append(error_file.read())\n else:\n one_test_info[\"FailureMessages\"].append(\"\") # append empty error message if file not found\n return one_test_info", "def test_insert_batch_result_with_a_single_update(self):\n incomplete = generate_mock_result(status='IN_PROGRESS', success=False, run_id=1)\n self.db.insert_result_batch(results=[incomplete, generate_mock_result(run_id=2)])\n self.assertEqual(2, len(self.db.get_results_for_project('TEST')))\n self.assertEqual(1, len(self.db.get_failed_results_for_project('TEST')))\n incomplete.update({'status': 'SUCCESS', 'success': True})\n self.db.insert_result_batch(results=[incomplete, generate_mock_result(run_id=3)])\n self.assertEqual(3, len(self.db.get_results_for_project('TEST')))\n self.assertEqual(0, len(self.db.get_failed_results_for_project('TEST')))", "def addFailure(self, test, err):\n self.failure_count += 1\n self.total_count += 1\n unittest.TestResult.addFailure(self, test, err)\n _, _exc_str = self.failures[-1]\n output = self.complete_output()\n self.result.append((self.__class__.FAIL, test, output, _exc_str))\n if self.verbosity > 1:\n sys.stderr.write('F ')\n sys.stderr.write(str(test))\n sys.stderr.write('\\n')\n else:\n sys.stderr.write('F')", "def _actionTestResultFailureEditPost(self):\n from testmanager.core.testresultfailures import TestResultFailureLogic, TestResultFailureData;\n from testmanager.webui.wuitestresultfailure import WuiTestResultFailure;\n return self._actionGenericFormEditPost(TestResultFailureData, TestResultFailureLogic,\n WuiTestResultFailure, self.ksActionResultsUnGrouped);", "def testFailed(self):\r\n failedExprKeys = list(self.__testFailedExpressions.keys())\r\n for i in range(len(failedExprKeys)):\r\n for expr in self.__testFailedExpressions[failedExprKeys[i]]:\r\n self.__Calculator.setExpression(expr)\r\n self.__Calculator.calculateResult()\r\n self.assertEqual(self.__testErrors[failedExprKeys[i]], self.__Calculator.getError())", "def update_failure(self, talk_id, failure):\r\n QtSql.QtSqlQuery('''UPDATE failures SET Comments=\"%s\", Indicator=\"%s\", Release=\"%d\" WHERE Id=\"%s\"''' %\r\n (failure.comment,\r\n failure.indicator,\r\n failure.release,\r\n failure.talkId))\r\n log.info(\"Failure updated: %s %s\" % (failure.talkId, failure.comment))", "def indicate_failure(self):\n pass", "def _process_error(self, result):\n self.error = result\n if result['errorCode'] == 901:\n raise Exceptions.APIKeyInvalid\n elif result['errorCode'] == 902:\n raise Exceptions.APISecretInvalid\n elif result['errorCode'] == 903:\n raise Exceptions.InvalidRequestToken\n elif result['errorCode'] == 904:\n raise Exceptions.RequestTokenExpired\n elif result['errorCode'] == 905:\n raise Exceptions.InvalidAccessToken\n elif result['errorCode'] == 906:\n raise Exceptions.TokenExpired(self.access.expire)\n elif result['errorCode'] == 907:\n raise Exceptions.ParameterMissing\n elif result['errorCode'] == 908:\n raise Exceptions.ParameterNotFormatted\n elif result['errorCode'] == 909:\n raise Exceptions.FeatureNotSupported\n elif result['errorCode'] == 910:\n raise Exceptions.EndPointNotSupported\n else:\n raise Exceptions.UnknownJsonError(result)", "def test_failed_processing(self):\n # setup\n ledger_api_dialogue, fipa_dialogue = self._setup_fipa_ledger_api_dialogues(self)\n\n self.transaction_behaviour.timedout.add(ledger_api_dialogue.dialogue_label)\n\n # operation\n with patch.object(self.logger, \"log\") as mock_logger:\n self.transaction_behaviour.failed_processing(ledger_api_dialogue)\n\n # after\n self.assert_quantity_in_outbox(0)\n\n # finish_processing\n assert self.transaction_behaviour.timedout == set()\n\n mock_logger.assert_any_call(\n logging.DEBUG,\n f\"Timeout dialogue in transaction processing: {ledger_api_dialogue}\",\n )\n\n # failed_processing\n assert fipa_dialogue in self.transaction_behaviour.waiting", "def _update_crash_result(testcase, crash_result, command):\n min_state = crash_result.get_symbolized_data()\n min_unsymbolized_crash_stacktrace = crash_result.get_stacktrace(\n symbolized=False)\n min_crash_stacktrace = utils.get_crash_stacktrace_output(\n command, min_state.crash_stacktrace, min_unsymbolized_crash_stacktrace)\n testcase.crash_type = min_state.crash_type\n testcase.crash_address = min_state.crash_address\n testcase.crash_state = min_state.crash_state\n testcase.crash_stacktrace = data_handler.filter_stacktrace(\n min_crash_stacktrace)" ]
[ "0.66159004", "0.65798354", "0.6501041", "0.6283665", "0.61487025", "0.61080235", "0.6103907", "0.60715514", "0.604803", "0.602537", "0.6021293", "0.60169196", "0.6007215", "0.59884024", "0.5984656", "0.59792066", "0.5974906", "0.59517866", "0.5938297", "0.5911561", "0.5904968", "0.59039706", "0.586925", "0.5839257", "0.58343875", "0.58336556", "0.5833049", "0.58125013", "0.5808562", "0.57973915" ]
0.72470486
0
Extract features from a document and returns a dictionary of these features keyed by their abbreviation and document label.
def extract_features(self, doc): features = dict() bow = self.vectorize_doc_simple(doc) charcount = self.char_count(doc) wordcount = self.word_count(doc) sentencecount = self.sentence_count(doc) paragraphcount = self.paragraph_count(doc) # extract characters features features['characters per word'] = charcount / wordcount features['characters per sentence'] = charcount / sentencecount features['characters per paragraph'] = charcount / paragraphcount features['characters per document'] = charcount features['word characters length variance'] = numpy.std( self.word_char_length_variance(doc)) features['sentence characters length variance'] = numpy.std( self.sentence_char_length_variance(doc)) # extract words features features['words per sentence'] = wordcount / sentencecount features['words per paragraph'] = wordcount / paragraphcount features['words per document'] = wordcount features['sentence words length variance'] = numpy.std( self.sentence_words_length_variance(doc)) # extract sentences features features['sentences per paragraph'] = sentencecount / paragraphcount features['sentences per document'] = sentencecount # extract paragraphs features features['paragraphs per document'] = paragraphcount # extract syllables features syllablecount = 0 for word, count in bow.iteritems(): syllablecount += self.num_of_syllables(word) * count features['syllables per word'] = syllablecount / wordcount features['syllables per sentence'] = syllablecount / sentencecount features['syllables per paragraph'] = syllablecount / paragraphcount # extract part of speech features tokens = self.pos_tag_doc(doc) pos_counts = self.vectorize_pos_tags(tokens) poswordcount = sum(pos_counts.values()) for i in xrange(82, 101): features['%d per word' % i] = pos_counts[i] / poswordcount sorted_pos_counts = sorted(pos_counts, key=pos_counts.get, reverse=True) features['1st top tag'] = str(sorted_pos_counts[0]) features['2nd top tag'] = str(sorted_pos_counts[1]) features['3rd top tag'] = str(sorted_pos_counts[2]) features['4th top tag'] = str(sorted_pos_counts[3]) features['5th top tag'] = str(sorted_pos_counts[4]) # extract vocab features vocabsize = len(self.vectorize_doc_simple(doc)) features['vocab size'] = vocabsize features['words per vocab size'] = wordcount / vocabsize return features
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def extract_features(document):\n document_words = set(document)\n features = {}\n global word_features\t\n for word in word_features:\n features['contains(%s)' % word] = (word in document_words)\n return features", "def extract(self, document):\n f_num = len(self.feature_list)\n feature_vector = np.zeros((f_num,))\n words = document.split()\n for i in xrange(len(words)):\n for n in self.ns:\n ngram = self.try_get_ngram(words, n, i)\n if ngram and ngram in self.ngrams:\n self.add_ngram(feature_vector, ngram)\n return feature_vector", "def features_extract(document, wordset):\n words_doc = nltk.FreqDist(document)\n features = []\n for word in wordset:\n features.append(words_doc[word])\n return features", "def extract(self, documents):\n\n # Feature vector to return\n features = np.zeros((len(documents), len(self.idx_to_word)))\n\n # Raise an exception if 'extract' is called before 'preprocess'\n if len(self.word_to_idx) == 0 or len(self.idx_to_word) == 0:\n raise Exception(\"Dictionary not initialised.\")\n\n # Iterate over all documents\n for idx, doc in enumerate(documents):\n # Split the doc into a list of words\n words = extract_words(doc)\n\n # For each word\n for w in words:\n # Calculate it's frequency, however, keep in mind\n # that this word may not have been in the training\n # corpus. In that case, ignore the word.\n ''' YOUR CODE HERE '''\n try:\n features[idx][self.word_to_idx[w]] = words.count(w)\n except KeyError:\n pass\n\n ''' END CODE FOR THIS LOOP '''\n\n # Divide the vector by the total number of words in the document to\n # normalize the frequencies.\n ''' YOUR CODE HERE '''\n features[idx] = features[idx]/len(words)\n ''' END CODE FOR THIS LOOP '''\n\n return features", "def index_feats_dict(self):\n doc_features_dict = {}\n\n for index, doc in zip(self.index, self.series):\n # Sets for a doc and feature words\n doc_set = set(doc.split())\n feat_set = set(self.features)\n\n # Shared words between the two sets\n interset_words = doc_set.intersection(feat_set)\n\n # Append to doc_features_dict\n doc_features_dict[index] = list(interset_words)\n\n return doc_features_dict", "def extract_features(data, stopwords=STOPWORDS):\n tags = set()\n docs = []\n for document in data:\n doc_data = dict()\n doc_data['pmid'] = document['sourceid']\n text = document['text']\n\n # Insert PubTator annotations inside abstract\n denotations = document['denotations']\n sorted_denotations = []\n for denotation in denotations:\n begin = denotation['span']['begin']\n end = denotation['span']['end']\n obj = denotation['obj']\n for c in punctuation:\n obj = obj.replace(c, '')\n tags.add(obj)\n doc_data[obj] = doc_data.get(obj,0)+1\n sorted_denotations.append([begin,end,obj])\n sorted_denotations.sort()\n sorted_denotations.reverse()\n for begin, end, obj in sorted_denotations:\n text = text[:begin] + obj + ' ' + text[end:]\n\n doc_data['text'] = clean_text(text, stopwords)\n docs.append(doc_data)\n\n return docs", "def _extract_features(self):\n # print(os.getpid())\n return {n:self._extract_feature(f) for (n,f) in self.features.items()}", "def doc2features(self,sent):\n return [self.word2features(sent['tokens'], i) for i in range(len(sent['tokens']))]", "def extract(self, documents):\n\n # Placeholder for return value.\n features = None\n\n # Create a numpy array of all zeros for storing frequencies.\n tf = np.zeros((len(documents), len(self.idx_to_word)))\n\n # Raise an exception if 'extract' is called before 'preprocess'\n if len(self.word_to_idx) == 0 or len(self.idx_to_word) == 0:\n raise Exception(\"Extractor not initialised.\")\n\n # For each document\n for idx, doc in enumerate(documents):\n # Split strig into a list of words\n words = extract_words(doc)\n\n # Calculate it's frequency, however, keep in mind\n # that this word may not have been in the training\n # corpus. In that case, ignore the word.\n for w in words:\n ''' YOUR CODE HERE '''\n try:\n tf[idx][self.word_to_idx[w]] = words.count(w)\n except KeyError:\n pass\n\n ''' END CODE FOR THIS LOOP '''\n\n # Divide the frequencies by the number of words in document.\n ''' YOUR CODE HERE '''\n tf[idx] = tf[idx]/len(words)\n ''' END CODE FOR THIS LOOP '''\n\n # Calculate the Tf-Idf features.\n features = tf * self.idf\n\n return features", "def get_text_features(text, word_features):\n words = word_tokenize(text)\n features = {}\n for w in word_features:\n features[w] = (w in words)\n\n return features", "def _doc2features(tokens: List[Tuple[str, str]], index: int) -> Dict:\n word, pos = tokens[index]\n f = {\n \"word\": word,\n \"word_is_stopword\": _is_stopword(word),\n \"pos\": pos,\n }\n if index > 0 and index > 1:\n prevprevword, prevprevpos = tokens[index - 2]\n f[\"prev-prev-word\"] = prevprevword\n f[\"prev-prevz-word_is_stopword\"] = _is_stopword(prevprevword)\n f[\"prev-prevz-pos\"] = prevprevpos\n if index > 0:\n prevword, prevpos = tokens[index - 1]\n f[\"prev-word\"] = prevword\n f[\"prev-word_is_stopword\"] = _is_stopword(prevword)\n f[\"prev-pos\"] = prevpos\n else:\n f[\"BOS\"] = True\n if index < len(tokens) - 2:\n nextnextword, nextnextpos = tokens[index + 2]\n f[\"nextnext-word\"] = nextnextword\n f[\"nextnext-word_is_stopword\"] = _is_stopword(nextnextword)\n f[\"nextnext-pos\"] = nextnextpos\n if index < len(tokens) - 1:\n nextword, nextpos = tokens[index + 1]\n f[\"next-word\"] = nextword\n f[\"next-word_is_stopword\"] = _is_stopword(nextword)\n f[\"next-pos\"] = nextpos\n else:\n f[\"EOS\"] = True\n\n return f", "def document_to_lda_features(lda_model, document):\n topic_importances = lda_model.get_document_topics(document, minimum_probability=0)\n topic_importances = numpy.array(topic_importances)\n return topic_importances[:,1]", "def feat_dict(pos_feat,text):\n dict = {}\n bigrams = ngrams(word_tokenize(text),2)\n trigrams = ngrams(word_tokenize(text),3)\n \n for feat in pos_feat:\n dict[feat]=features(feat,text,bigrams,[],[])\n return dict", "def _extract_features(self, a_rel, a_parses):\n feats = {}\n doc_id = a_rel[DOC_ID]\n toks_pos1 = self._get_toks_pos(a_parses[doc_id][SENTENCES],\n a_rel, ARG1)\n toks_pos2 = self._get_toks_pos(a_parses[doc_id][SENTENCES],\n a_rel, ARG2)\n self._get_product_rules(feats, doc_id, a_rel, a_parses)\n self._get_dep_rules(feats, doc_id, a_rel, a_parses)\n self._get_first_last_toks(feats, toks_pos1, toks_pos2)\n self._get_modality(feats, toks_pos1, toks_pos2)\n self._get_vb_class(feats, toks_pos1, toks_pos2)\n self._get_brown_clusters(feats, toks_pos1, toks_pos2)\n self._get_inquirer(feats, toks_pos1, toks_pos2)\n self._get_MPQA(feats, toks_pos1, toks_pos2)\n return feats", "def _parse_features(cls, node: OMNode) -> Dict[str, Dict[str, bool]]:\n features = {}\n for sectname in node:\n section = node[sectname]\n if not isinstance(section, dict) or '_types' not in section:\n continue\n features[sectname] = {}\n for opt in section:\n if not opt.startswith('has'):\n continue\n value = section[opt]\n if not isinstance(value, bool):\n continue\n option = opt[3:]\n features[sectname][option] = value\n return features", "def extract_feats(word, nlp):\n feat_dict = {}\n feat_string = ''\n doc = nlp(word).to_dict()[0][0]\n if 'feats' in doc:\n for pair in doc['feats'].split('|'):\n feat, val = pair.split('=')\n feat_dict[feat] = val\n feat_string += feat + ': ' + val + ', '\n if feat_string:\n feat_string = ' (' + feat_string[:-2] + ')'\n return feat_dict, feat_string", "def to_features(self):\n to_return = dict()\n\n to_return['bias'] = 1.0\n to_return['user:' + self.user] = 1.0\n to_return['format:' + self.format] = 1.0\n to_return['token:' + self.token.lower()] = 1.0\n\n to_return['part_of_speech:' + self.part_of_speech] = 1.0\n for morphological_feature in self.morphological_features:\n to_return['morphological_feature:' + morphological_feature] = 1.0\n to_return['dependency_label:' + self.dependency_label] = 1.0\n\n return to_return", "def docExtract(self):\n\n self.fv = []\n for doc in self.documents:\n self.fv.append(self.featureSet.extract(doc))\n\n # Convert to a numpy matrix.\n return np.array(np.asmatrix(self.fv))\n # return self.fv", "def extract_features(docs_train, docs_test, perform_dimensionality_reduction):\n word_ngram_range = (1, 4)\n char_ngram_range = (2, 5)\n\n '''\n Build an n grams vectorizer with word_n_gram_range and char_n_gram_range\n '''\n\n ngrams_vectorizer = create_n_grams_vectorizer(\n word_ngram_range, char_ngram_range)\n\n # use the n_gram vectorizer to form the train and test dataset\n # it will take a lot of time... i think\n X_train = ngrams_vectorizer.fit_transform(docs_train)\n X_test = ngrams_vectorizer.transform(docs_test)\n print(\"Performed fitting of data\")\n\n ############ dimensionality reduction ################\n\n if(perform_dimensionality_reduction == True):\n X_train, X_test = perform_dimensionality_reduction(X_train, X_test)\n\n # print(docs_train[0])\n return X_train, X_test", "def extractWordFeatures(x):\n # BEGIN_YOUR_CODE (around 5 lines of code expected)\n a = Counter(x.split())\n return dict(a)\n # END_YOUR_CODE", "def get_word_list_features(word_list, word_features):\n document = ' '.join(word_list)\n words = word_tokenize(document)\n features = {}\n for w in word_features:\n features[w] = (w in words)\n\n return features", "def getFeatureDicts(self):\n return [self.data.getWordTagDict(), self.data.tags_trigrams, self.data.tags_bigrams]", "def extract(self, document):\n raise NotImplementedError('FeatureExtractorBase:extract(self, text) is not defined')", "def get_features(words):\n features = {}\n for word in [i for i in words.split() if i not in stopwords.words('english')]:\n features['contains_%s' % word.lower()] = True\n return features", "def features(self, sentence, tags, index):\n return{\n 'word': sentence[ index ],\n 'prevWord': '' if index == 0 else sentence[ index - 1 ],\n 'nextWord': '' if index == len( sentence ) -1 else sentence[ index + 1 ],\n 'isFirst': index == 0,\n 'isLast': index == len( sentence ) - 1,\n 'isCapitalized': sentence[index][0].upper() == sentence[ index ][ 0],\n 'isAllCaps': sentence[ index ].upper() == sentence[ index ],\n 'isAllLowers': sentence[ index ].lower() == sentence[ index ],\n 'prefix-1': sentence[ index ][ 0 ],\n 'prefix-2': '' if ( len(sentence) < 2 ) else sentence[ index ][:2],\n 'prefix-3': '' if ( len(sentence) < 3 ) else sentence[ index ][:3],\n 'prefix-4': '' if ( len(sentence) < 4 ) else sentence[ index ][:4],\n 'suffix-1': sentence[ index ][ -1 ],\n 'suffix-2': '' if ( len(sentence) < 2 ) else sentence[ index ][-2:],\n 'suffix-3': '' if ( len(sentence) < 3 ) else sentence[ index ][-3:],\n 'suffix-4': '' if ( len(sentence) < 4 ) else sentence[ index ][-4:],\n 'tag-1': '' if index == 0 else tags[ index - 1 ],\n 'tag-2': '' if index < 2 else tags[ index - 2 ]\n }", "def get_document_tags(self, docid):\n return [(key, json.loads(value))\n for key, value\n in self.sql_session.query(Feature)\n .filter(Feature.document == docid)\n .values(Feature.key, Feature.value)]", "def extract_features(self):\n self.extract_features_static()\n self.extract_features_dynamic()", "def extract_features(tlc):\n text = clean_text(tlc['body'])\n fields = dict()\n # add features here #\n fields['Top_comment_word_count'] = len(text.split(' '))\n fields['Top_comment_text'] = text\n\n # Extract time-based features\n def get_day_of_week(text):\n return datetime.datetime.strptime(text, '%Y-%m-%d %H:%M:%S').weekday() + 1\n\n def get_day_of_month(text):\n return datetime.datetime.strptime(text, '%Y-%m-%d %H:%M:%S').day\n\n def get_time_of_day(text):\n return datetime.datetime.strptime(text, '%Y-%m-%d %H:%M:%S').hour\n time_local = time.localtime(tlc['created_utc'])\n time_local = time.strftime(\"%Y-%m-%d %H:%M:%S\", time_local)\n fields['Top_comment_day'] = get_day_of_month(time_local)\n fields['Top_comment_day_of_week'] = get_day_of_week(time_local)\n fields['Top_comment_hour'] = get_time_of_day(time_local)\n\n # Extract gender value\n gp = GenderPerformr()\n probs, _ = gp.predict(tlc['author'])\n # Rescale it from [0,1] to [-1,1]\n fields['Top_comment_author_gender_value'] = 2 * probs - 1\n\n # Extract percentage of mispellings\n check = SpellChecker(\"en_US\")\n tokenizer = get_tokenizer(\"en_US\")\n # Prevent the denominator from 0\n def weird_division(n, d):\n return n / d if d else 0\n\n def get_mispellings_percentage(text):\n mispelling_count = 0\n total_count = 0\n if text == 'nan':\n return total_count\n else:\n check.set_text(text)\n for err in check:\n mispelling_count = mispelling_count + 1\n for w in tokenizer(text):\n total_count = total_count + 1\n value = weird_division(mispelling_count, total_count)\n return value\n fields['Top_comment_mispellings'] = get_mispellings_percentage(text)\n\n # Get politeness, agreement, support scores, and rescale them from [1,5] to [-1,1]\n ar = Agreementr()\n pr = Politenessr()\n sr = Supportr()\n fields['Top_comment_agreement_value'] = 0.5*float(ar.predict([text]))-1.5\n fields['Top_comment_politeness_value'] = 0.5*float(pr.predict([text]))-1.5\n fields['Top_comment_support_value'] = 0.5*float(sr.predict([text]))-1.5\n\n # Get toxicity scores\n KEY = \"yourkey.txt\" # os.getenv(\"GOOGLE_API_KEY\")\n service = discovery.build('commentanalyzer', 'v1alpha1', developerKey=KEY)\n\n def get_results(request_id, response, exception):\n toxicity_scores.append((request_id, response))\n\n toxicity_scores = []\n count = 0\n batch = service.new_batch_http_request(callback=get_results)\n analyze_request = {\n 'comment': {'text': text},\n \"requestedAttributes\": {\n \"TOXICITY\": {},\n \"SEVERE_TOXICITY\": {},\n \"ATTACK_ON_COMMENTER\": {}\n }\n }\n batch.add(service.comments().analyze(body=analyze_request), request_id=str(count))\n batch.execute()\n toxic_score = toxicity_scores[0][1]['attributeScores']['TOXICITY']['summaryScore']['value']\n attack_score = toxicity_scores[0][1]['attributeScores']['ATTACK_ON_COMMENTER']['summaryScore']['value']\n if toxic_score > 0.5:\n fields['Top_comment_untuned_toxicity'] = 1\n else:\n fields['Top_comment_untuned_toxicity'] = 0\n if toxic_score > 0.8 and attack_score > 0.5:\n fields['Top_comment_tuned_toxicity'] = 1\n else:\n fields['Top_comment_tuned_toxicity'] = 0\n # end of feature extractions #\n return fields", "def word2features(sent, i):\n features = []\n\n # the [-1,+1] window of words around the token\n for o in [-1,0,1]:\n if i+o >= 0 and i+o < len(sent):\n word_tuple = sent[i+o]\n word_window = get_words_in_window(word_tuple, o)\n features.extend(word_window)\n\n # # part of speech\n # pos = ('pos', sent[i][1])\n # features.append(pos)\n\n # prop = ('prop', is_proper_case(sent[i][0]))\n # features.append(prop)\n\n return dict(features)", "def to_feature_dict(self):\n return {feature:self.get_feature(feature) for feature in self._FEATURES}" ]
[ "0.7853346", "0.6725468", "0.65909344", "0.6505441", "0.64868176", "0.647813", "0.64510643", "0.6434799", "0.6289131", "0.62880605", "0.624365", "0.6175479", "0.61572194", "0.6059686", "0.60497266", "0.60189444", "0.60106367", "0.59182346", "0.58369625", "0.5827985", "0.57658345", "0.5749896", "0.57268333", "0.57094556", "0.56720555", "0.56671536", "0.5660884", "0.5658679", "0.5652102", "0.56214714" ]
0.7537927
1
Returns the number of characters in a document.
def char_count(self, doc): return len(doc)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def wordCount(document):\n return float(len(document.split(None)))", "def document_count(self):\n raise NotImplementedError", "def n_chars(doc_or_tokens: types.DocOrTokens) -> int:\n # docs are hashable, so we can leverage the lru cache as-is\n if isinstance(doc_or_tokens, Doc):\n ncpw = n_chars_per_word(doc_or_tokens)\n # otherwise, let's get an iterable of words but cast it to a hashable tuple\n # so we can leverage the lru cache on this and related calls in, say, n_long_words\n else:\n words = utils.get_words(doc_or_tokens)\n ncpw = n_chars_per_word(tuple(words))\n return sum(ncpw)", "def countCharacters(file_name, start, end):\r\n\r\n with open(file_name, \"r\") as file:\r\n counter_chars = 0\r\n\r\n for line in islice(file, start, end):\r\n counter_chars += len(line)\r\n\r\n return counter_chars", "def num_chars(word):\n return len(word)", "def corpus_size():\n return ix.doc_count()", "def document_count(self):\n return self.client.scard(self.dbprefix + 'docs')", "def document_count(self):\n return self._json['coredata'].get('document-count', '0')", "def get_number_of_letters(self):\n filename = f'{self.path}/{self.filename}'\n file = open(filename, 'r', encoding='utf-8')\n \"\"\"Count number of lettes without digits, non letter characters, without xml tags\"\"\"\n data = file.read()\n data = re.sub('<.*?binary.*?>*<.*?binary.*?>',' ', data)\n data = re.sub('\\\\s\\\\s*', '', (re.sub('\\\\W|\\\\d', ' ', re.sub('<.*?>', '', data))))\n let_count = len(data)\n sqlite_for_ht.CreateTable.update_table(f_1, self.filename, 'number_of_letters', let_count)\n print(datetime.now(), '-', 'number_of_letters for', self.filename, 'calculated =', let_count)\n return None", "def count_word(doc):\n count = count = 0\n for w in document.split(\" \"):\n count = count + 1\n return count", "def word_count(self):\n return len(self.text)", "def count(text):\n return len(text)", "def paragraph_count(self, doc):\n\n paragraphs = doc.split(\"\\n\\n\")\n # remove the empty string\n return len([paragraph for paragraph in paragraphs if paragraph])", "def __len__(self):\n return self.document_count", "def doc_count(self):\n\t\treturn self.index.collection.count()", "def get_total_number_of_documents(self):\n return self.total_number_of_documents", "def letters(self, path, filemoving, parser):\n root = parser.parsing_xml(path, filemoving)\n root_tag = root.tag[0:(root.tag.find('}') + 1)]\n number_of_letters = 0\n for i in root.iter(root_tag+'p'):\n if str(type(i.text)) == \"<class 'str'>\":\n number_of_letters = number_of_letters + len([letter for letter in i.text if letter.isalnum()])\n return number_of_letters", "def test_character_count(self):\n\t\tself.assertEqual(analyse_text(self.filename)[1], 133)", "def size(self):\n return len(self.chars)", "def test_character_count(self):\n self.assertEqual(analyze_text(self.filename)[1], 132)", "def char_size(self):\n return len(self.id2char)", "def get_length_of_article(self):\n return len(self.blob.words)", "def get_number_of_words_in_document(self, document):\n for _set in self.sets:\n if document in _set:\n return self.sets[_set][document]['number_of_words']", "def getTokenSizeOfReviews(self):\n res = 0\n with open(self.word_to_docs_path, 'rb') as bin:\n while bin.tell() != os.fstat(bin.fileno()).st_size:\n # get wordid:\n int.from_bytes(bin.read(4), 'big')\n # get frequency:\n frequency = int.from_bytes(bin.read(4), 'big')\n res += frequency\n # skip documents:\n int.from_bytes(bin.read(4 * frequency), 'big')\n return res", "def test_character_count(self):\n self.assertEqual(analyze_text(self.filename)[1], 970)", "def word_count(self, doc):\n\n return len(self.tokenize_doc_simple(doc))", "def count(self):\n return len(self.find())", "def _get_num_words(doc: Doc):\n filtered_words = [word for word in doc if not word.is_punct and \"'\" not in word.text and not word.is_space]\n return len(filtered_words)", "def n_chars_per_word(doc_or_tokens: types.DocOrTokens) -> tuple[int, ...]:\n words = utils.get_words(doc_or_tokens)\n return tuple(len(word) for word in words)", "def length(self):\n return len(self.text)" ]
[ "0.75257236", "0.6937436", "0.6925974", "0.6911785", "0.690693", "0.68599904", "0.684804", "0.683036", "0.68129593", "0.67984515", "0.6772167", "0.6742232", "0.6719053", "0.6689916", "0.6672709", "0.6668264", "0.6640832", "0.6592402", "0.6561841", "0.6555701", "0.65335816", "0.65265346", "0.6508637", "0.65071255", "0.6488639", "0.6487768", "0.6469959", "0.6459579", "0.64413697", "0.64388716" ]
0.8615984
0
Returns the number of words in a document as defined by tokenize_doc_simple.
def word_count(self, doc): return len(self.tokenize_doc_simple(doc))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def wordCount(document):\n return float(len(document.split(None)))", "def count_word(doc):\n count = count = 0\n for w in document.split(\" \"):\n count = count + 1\n return count", "def n_words(doc_or_tokens: types.DocOrTokens) -> int:\n words = utils.get_words(doc_or_tokens)\n return itertoolz.count(words)", "def _get_num_words(doc: Doc):\n filtered_words = [word for word in doc if not word.is_punct and \"'\" not in word.text and not word.is_space]\n return len(filtered_words)", "def get_number_of_words_in_document(self, document):\n for _set in self.sets:\n if document in _set:\n return self.sets[_set][document]['number_of_words']", "def count_words_per_sentence(doc):\n s = 0\n for sentence in document.split(\".\"):\n s = s + 1\n w = count_word(doc) \n return w/s", "def n_unique_words(doc_or_tokens: types.DocOrTokens) -> int:\n words = utils.get_words(doc_or_tokens)\n # NOTE: this stdlib solution is slower than itertoolz for docs with ~250+ words\n # so let's take a small hit on short docs for the sake of big wins on long docs\n # return len({word.lower for word in words})\n return itertoolz.count(itertoolz.unique(word.lower for word in words))", "def freq(word, document):\n return document.split(None).count(word)", "def word_count(self):\n return len(self.text)", "def n_chars_per_word(doc_or_tokens: types.DocOrTokens) -> tuple[int, ...]:\n words = utils.get_words(doc_or_tokens)\n return tuple(len(word) for word in words)", "def count_word_type(doc,word_type):\n count = 0\n for w in document.split(\" \"):\n if w.lower() in word_type:\n count = count + 1\n return count", "def get_number_of_document_word_occurs_in(self, word):\n return len(self.dictionary[word]['docs'])", "def num_of_words(line, context):\n return [('num_of_word', len(line.txt.split()))]", "def sentence_count(self, doc):\n\n return len(sent_tokenize(doc))", "def word_count(self):\n return self._word_count", "def count_words(sent):\n words = word_tokenize(sent)\n return len(words)", "def get_number_of_words(self):\n filename = f'{self.path}/{self.filename}'\n # word_counter = {}\n # w_cnt = 0\n # x = 0\n file = open(filename, 'r', encoding='utf-8')\n data = file.read()\n head, sep, tail = data.partition('<binary')\n head = re.sub('\\\\s\\\\s*', ' ', (re.sub('\\\\W|\\\\d', ' ', re.sub('<.*?>', '', head))))\n word_list = head.split()\n # for word in word_list:\n # w_cnt += 1\n # if word not in word_counter:\n # word_counter[word] = 1\n # else:\n # word_counter[word] = word_counter[word] + 1\n\n # for word in word_list:\n # x += 1\n # print(word, word.isalpha(), x)\n\n w_cnt = sum([a[0].isalpha() for a in word_list])\n sqlite_for_ht.CreateTable.update_table(f_1, self.filename, 'number_of_words', w_cnt)\n print(datetime.now(), '-', 'number_of_words for', self.filename, 'calculated =', w_cnt)\n return None", "def get_total_number_of_words(self, tokens):\n return {\"total_number_of_words\": len(tokens)}", "def count_words_in_file(file_name):\n\n\treturn len(get_words_in_file(file_name))", "def count_words(self, clean_func=clean_up):\n return (\n len(clean_func(self.transcript_file.text()).split())\n if self.validate()\n else 0\n )", "def wordcount(self):\n return int(self._fetch_element('user_wordcount'))", "def getTokenSizeOfReviews(self):\n res = 0\n with open(self.word_to_docs_path, 'rb') as bin:\n while bin.tell() != os.fstat(bin.fileno()).st_size:\n # get wordid:\n int.from_bytes(bin.read(4), 'big')\n # get frequency:\n frequency = int.from_bytes(bin.read(4), 'big')\n res += frequency\n # skip documents:\n int.from_bytes(bin.read(4 * frequency), 'big')\n return res", "def word_count(self):\n print(self.words())\n return len(self.words())\n #count = 0\n #for lines in self.lines:\n # line = lines.strip(os.linesep)\n # wordslst = line.split()\n # count += len(wordslst)\n #return count\n #joined_string = ''.join(self.lines)\n #for word in joined_string:\n # if word != ' ' and word != '\\n' and word != '\\t':\n # count += 1\n #print('READ ME ––––––––––', self.lines)\n #print(joined_string)\n #print(line)\n #print(wordslst)\n #print(count)", "def word_count(self):\n from collections import Counter\n counts = Counter(self._replace_non_alnum().split())\n return counts", "def count_words_sents(self, doc_array):\n total_num_sents = []\n total_num_words = []\n for doc in doc_array:\n sents = sent_tokenize(doc)\n total_num_sents.append(len(sents))\n temp_num_words = []\n for sent in sents:\n num_words = word_tokenize(sent)\n temp_num_words.append(len(num_words))\n total_num_words.append(temp_num_words)\n return np.array(total_num_sents), np.array(total_num_words)", "def computeWordsFrequencies(self):\n token_stream = self._tokenize(self.readable)\n token_map = self._countTokens(token_stream)\n # print token_map.items()\n return sorted(token_map.items(), key = lambda x : x[1], reverse = True)", "def get_doc(corpus):\n doc_info = []\n\n for idx, text in enumerate(corpus):\n count = len(word_tokenize(text))\n doc_info.append({'doc_length': count})\n\n return doc_info", "def total_words(self):\n return len(strip_tags('%s %s' % (self.lead, self.content)).split())", "def total_words(target_text):\n\n splited_text = target_text.split()\n nbwords = len(splited_text)\n return nbwords", "def calculate_word_counts(text : Text)->Counter:\n return Counter(tokenized_text(text))" ]
[ "0.8413657", "0.820246", "0.81366116", "0.7983112", "0.7650385", "0.7589769", "0.72936594", "0.72887063", "0.7262047", "0.7251532", "0.72070163", "0.7186378", "0.7142346", "0.70556736", "0.6989334", "0.6900884", "0.68984294", "0.6864594", "0.6833796", "0.6822127", "0.67934525", "0.67700934", "0.67651707", "0.6760939", "0.6732672", "0.6732603", "0.67124665", "0.6707552", "0.67022765", "0.6688901" ]
0.87191725
0
Returns the number of sentences in a document.
def sentence_count(self, doc): return len(sent_tokenize(doc))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def wordCount(document):\n return float(len(document.split(None)))", "def n_sents(doc: Doc) -> int:\n if not doc.has_annotation(\"SENT_START\"):\n LOGGER.warning(\n \"`doc` has not been segmented into sentences; applying spaCy's rule-based, \"\n \"`Sentencizer` pipeline component to `doc` before counting...\"\n )\n doc = _SENTENCIZER(doc)\n return itertoolz.count(doc.sents)", "def _get_num_sentences(doc: Doc, min_sen_length=5):\n return len([sent for sent in list(doc.sents) if len(sent.text.strip())>min_sen_length])", "def sentence_count(self):\n count = 0\n for line in self.lines:\n if '.' in line:\n count += 1\n if count == 0:\n count = 1\n return count\n #return line.count('.')\n #else:\n #return 1", "def count_words_per_sentence(doc):\n s = 0\n for sentence in document.split(\".\"):\n s = s + 1\n w = count_word(doc) \n return w/s", "def getNumod_byerSentences(self): # !!! Need to rewrite this to match graph\n return len(self.__document)", "def count_sentences(text):\n\n import re\n\n # Make a list of sentences (separated by either '.', '!' or '?')\n sentence_list = re.split(r'[.!?]', text)\n # Find the size of the list\n count = len(sentence_list)\n\n return count", "def paragraph_count(self, doc):\n\n paragraphs = doc.split(\"\\n\\n\")\n # remove the empty string\n return len([paragraph for paragraph in paragraphs if paragraph])", "def get_number_of_words_in_document(self, document):\n for _set in self.sets:\n if document in _set:\n return self.sets[_set][document]['number_of_words']", "def word_count(self, doc):\n\n return len(self.tokenize_doc_simple(doc))", "def document_count(self):\n raise NotImplementedError", "def corpus_size():\n return ix.doc_count()", "def word_count(self):\n return len(self.text)", "def word_count(excerpt):\n # Validate that we are actually give something to work with\n assert excerpt, \"excerpt cannot be blank\"\n return Counter(excerpt.split())", "def get_num_docs(self):\n return len(self.vocab)", "def count_word(doc):\n count = count = 0\n for w in document.split(\" \"):\n count = count + 1\n return count", "def count_sentences_that_meet_criteria(blob, criteria_function):\n count = len([s for s in blob.sentences if criteria_function(s)])\n return count", "def size(self):\n return len(self.sentence)", "def char_count(self, doc):\n\n return len(doc)", "def document_count(self):\n return self._json['coredata'].get('document-count', '0')", "def getScore(self, sentence):\r\n \r\n score = 0\r\n \r\n for word in sentence.words:\r\n score += len(word)\r\n \r\n return score", "def count_sentences(text):\n count = 0\n terminals = '.;?!'\n for character in text:\n \n if character in terminals:\n count += 1\n\n return count", "def get_total_number_of_documents(self):\n return self.total_number_of_documents", "def document_count(self):\n return self.client.scard(self.dbprefix + 'docs')", "def __sent_len(self, title, text):\n total = 0\n text_sent = nltk.sent_tokenize(text)\n for sent in text_sent:\n total += len(nltk.word_tokenize(sent))\n return (len(nltk.word_tokenize(title)), total / len(text_sent))", "def get_number_of_document_word_occurs_in(self, word):\n return len(self.dictionary[word]['docs'])", "def testSentences(self):\n\n textractor = Textractor(sentences=True)\n\n # Extract text as sentences\n sentences = textractor(Utils.PATH + \"/article.pdf\")\n\n # Check number of sentences is as expected\n self.assertEqual(len(sentences), 17)", "def lix(self, doc):\n num_words = _get_num_words(doc)\n num_sentences = _get_num_sentences(doc)\n num_long_words = _get_num_long_words(doc, min_characters=7)\n return num_words / num_sentences + 100 * num_long_words / num_words", "def freq(word, document):\n return document.split(None).count(word)", "def count_words_sents(self, doc_array):\n total_num_sents = []\n total_num_words = []\n for doc in doc_array:\n sents = sent_tokenize(doc)\n total_num_sents.append(len(sents))\n temp_num_words = []\n for sent in sents:\n num_words = word_tokenize(sent)\n temp_num_words.append(len(num_words))\n total_num_words.append(temp_num_words)\n return np.array(total_num_sents), np.array(total_num_words)" ]
[ "0.78057086", "0.77854717", "0.7744078", "0.7731899", "0.768384", "0.75315386", "0.7376466", "0.72666544", "0.71705437", "0.69727427", "0.6744118", "0.67347836", "0.67204547", "0.67012525", "0.6697781", "0.66801274", "0.6676519", "0.6667536", "0.6627134", "0.6607277", "0.659157", "0.6564007", "0.65435773", "0.65213627", "0.6501077", "0.649407", "0.648173", "0.647211", "0.6469542", "0.64661956" ]
0.8264526
0
Returns the number of paragraphs in a document.
def paragraph_count(self, doc): paragraphs = doc.split("\n\n") # remove the empty string return len([paragraph for paragraph in paragraphs if paragraph])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def paragraphs(self, path, filemoving, parser):\n root = parser.parsing_xml(path, filemoving)\n root_tag = root.tag[0:(root.tag.find('}')+1)]\n number_of_paragraphs = len(list(root.iter(root_tag + 'p')))\n return number_of_paragraphs", "def get_number_of_paragraph(self):\n file_to_read = f'{self.path}/{self.filename}'\n file = open(file_to_read, 'r', encoding='utf-8')\n string_to_match = '<p>'\n count = 0\n for line in file:\n if string_to_match in line:\n count += 1\n sqlite_for_ht.CreateTable.update_table(f_1, self.filename, 'number_of_paragraph', count)\n print(datetime.now(), '-', 'number_of_paragraph for', self.filename, 'calculated =', count)\n return None", "def wordCount(document):\n return float(len(document.split(None)))", "def count_paragraphs(all_articles):\n total_paragraphs = 0\n for title in all_articles:\n total_paragraphs += all_articles[title]['content'].count('\\n')\n print(f\"There are {total_paragraphs} paragraphs written.\")", "def testParagraphs(self):\n\n textractor = Textractor(paragraphs=True)\n\n # Extract text as sentences\n paragraphs = textractor(Utils.PATH + \"/article.pdf\")\n\n # Check number of paragraphs is as expected\n self.assertEqual(len(paragraphs), 13)", "def document_count(self):\n raise NotImplementedError", "def find_page_count(document_html):\n search_result = re.search(PAGE_COUNT_REGEX, document_html)\n if search_result:\n return int(search_result.group('page_count')) - 1", "def get_total_number_of_documents(self):\n return self.total_number_of_documents", "def document_count(self):\n return self._json['coredata'].get('document-count', '0')", "def get_page_count(pdf: PDFQuery) -> int:\n\n return resolve1(pdf.doc.catalog['Pages'])['Count']", "def get_number_of_words_in_document(self, document):\n for _set in self.sets:\n if document in _set:\n return self.sets[_set][document]['number_of_words']", "def get_page_count(pdf_file_path):\n pdf = PdfFileReader(file(pdf_file_path, \"rb\"))\n return pdf.getNumPages()", "def char_count(self, doc):\n\n return len(doc)", "def getNumberPages(self, pdf_path: str) -> int:\n f = open(pdf_path, 'rb')\n parser = PDFParser(f)\n document = PDFDocument(parser)\n numPages = resolve1(document.catalog['Pages'])['Count']\n f.close()\n return numPages", "def document_count(self):\n return self.client.scard(self.dbprefix + 'docs')", "def sentence_count(self, doc):\n\n return len(sent_tokenize(doc))", "def getNumod_byerSentences(self): # !!! Need to rewrite this to match graph\n return len(self.__document)", "def _count_pages_pdf(self, bin_pdf):\n pages = 0\n for match in re.compile(r\"/Count\\s+(\\d+)\").finditer(bin_pdf):\n pages = int(match.group(1))\n return pages", "def word_count(self, doc):\n\n return len(self.tokenize_doc_simple(doc))", "def get_num_docs(self):\n return len(self.vocab)", "def pdf_pages(filename):\n try:\n infile = io.open(filename, \"rb\")\n except IOError:\n return 0\n for line in infile:\n m = re.match(br'\\] /Count ([0-9]+)',line)\n if m:\n return int(m.group(1))\n return 0", "def sentence_count(self):\n count = 0\n for line in self.lines:\n if '.' in line:\n count += 1\n if count == 0:\n count = 1\n return count\n #return line.count('.')\n #else:\n #return 1", "def count_pages(paper_id):\n url = \"https://{}/iiif/2/biorxiv:{}.pdf/full/500,/0/default.jpg?page=1000\"\n url = url.format(IIIF_HOST, paper_id)\n page = req(url)\n count = re_pg.findall(page)[0]\n return int(count)", "def get_num_of_pages(self):", "def pages_count(self):\n return self._pages_count", "def word_count(self):\n return len(self.text)", "def count_word(doc):\n count = count = 0\n for w in document.split(\" \"):\n count = count + 1\n return count", "def count_documents(path):\n return len(get_document_ids(path))", "def _get_num_proposals(self):\n total_props = self._df['nprops'].sum()\n return total_props", "def doc_count(self):\n\t\treturn self.index.collection.count()" ]
[ "0.75929385", "0.7505203", "0.7096997", "0.7019801", "0.6537894", "0.64200836", "0.64090353", "0.64065015", "0.6323344", "0.62945384", "0.6270998", "0.62400544", "0.6229263", "0.6203288", "0.61751676", "0.61185807", "0.609336", "0.60698295", "0.6019978", "0.60193443", "0.6005438", "0.5978817", "0.5973092", "0.59686655", "0.59628356", "0.59488875", "0.5941347", "0.58916897", "0.5854302", "0.58507127" ]
0.8642088
0
Returns the number of syllables in a word.
def num_of_syllables(self, word): if word.lower() in self.cmu_dict: return len([phoneme for phoneme in self.cmu_dict[word.lower()][0] if phoneme[-1].isdigit()]) # If word is unknown, assume 1 syllable/3 letters (average for English) else: return len(word)//3
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def num_syllables(self, word):\n\n return 1", "def count_syllables(words):\n\n\n count = 0\n\n for word in words:\n word_count = count_syllables_in_word(word)\n count = count + word_count\n return count", "def num_syllables(self, word):\n # TODO: provide an implementation!\n word = word.lower()\n D = self._pronunciations\n #D = nltk.corpus.cmudict.dict()\n if(word not in D.keys()):\n #print word not in CMUDictionary\n return 1\n\n #count stores no of syllables for each pronunciation of the word\n count = []\n\n #for each pronunciation\n for x in D[word]:\n n = 0\n #for each syllable\n for y in x:\n #if vowel sound\n if y[-1].isdigit():\n n = n + 1\n count.append(n)\n # return the pronunciation having least syllables\n return min(count)\n #return min([len([y for y in x if y[-1].isdigit()]) for x in D[word.lower()]])", "def syllable_counter(word):\n letters = [c for c in list(word.lower()) if c.isalpha()]\n\n if len(letters) == 0:\n return 0\n\n if len(letters) in [1, 2]:\n return 1\n\n num_syllables = 0\n last_syllable_pos = 0\n for i, letter in enumerate(letters):\n if letter not in VOWELS:\n if i and letters[i - 1] in VOWELS:\n num_syllables += 1\n last_syllable_pos = i\n syllable = ''\n elif i == len(letters) - 1:\n if letter != 'e':\n num_syllables += 1\n elif i - last_syllable_pos >= 2:\n num_syllables += 1\n\n return num_syllables or 1", "def countsyllables_en(word):\r\n\tif not word:\r\n\t\treturn 0\r\n\r\n\t# Remove final silent 'e'\r\n\tif word[-1] == \"e\":\r\n\t\tword = word[:-1]\r\n\r\n\t# Check for a cached syllable count\r\n\tif word in fallback_cache:\r\n\t\treturn fallback_cache[word]\r\n\r\n\t# Count vowel groups\r\n\tresult = 0\r\n\tprev_was_vowel = False\r\n\tfor char in word:\r\n\t\tis_vowel = char in VOWELS or char == 'y'\r\n\t\tif is_vowel and not prev_was_vowel:\r\n\t\t\tresult += 1\r\n\t\tprev_was_vowel = is_vowel\r\n\r\n\t# Add & subtract syllables\r\n\tfor r in fallback_addsyl:\r\n\t\tif r.search(word):\r\n\t\t\tresult += 1\r\n\tfor r in fallback_subsyl:\r\n\t\tif r.search(word):\r\n\t\t\tresult -= 1\r\n\r\n\t# Cache the syllable count\r\n\tfallback_cache[word] = result\r\n\r\n\treturn result", "def _get_num_syllables(doc: Doc, min_syllables: int = 1):\n text = (word for word in doc if not word.is_punct and \"'\" not in word.text)\n syllables_per_word = tuple(syllapy.count(word.text) for word in text)\n return sum(c for c in syllables_per_word if c >= min_syllables)", "def syllable_count(word):\n # Count the vowels in the word\n # Subtract one vowel from every dipthong\n count = len(re.findall(r'([aeiouyAEIOUY]+)', word))\n # Subtract any silent vowels\n if len(word) > 2:\n if word[-1] == 'e' and \\\n not is_vowel(word[-2]) and \\\n is_vowel(word[-3]):\n count = count - 1\n return count", "def count_syllables(text):\n\n import re\n\n # Make a list of vowel sounds presenting in the text (converted to lower-case letters)\n syllable_list = re.findall(r'[aiouy]+e*|e(?!d\\b|ly)[aiouye]?|[td]ed|le\\b', text.lower())\n # Find the size of the list\n count = len(syllable_list)\n\n return count", "def total_syllables(target_text):\n\n splited_text = target_text.split()\n count = 0\n for word in splited_text:\n count = count + word_syllables(word)\n return count", "def word_syllables(word):\n\n count = 0\n endings = '!@#$%^&*()_+[]{}:;,.eE\"'+\"'\"\n\n while word[-1] in endings:\n word = word[: -1]\n\n if len(word) <= 3:\n return 1\n\n vows = 'aeiouAEIOU'\n prev_char_vow = False\n for char in word:\n if char in vows:\n if not prev_char_vow:\n count = count + 1\n prev_char_vow = True\n else:\n prev_char_vow = False\n\n if word[-1] in 'Yy':\n count = count + 1\n\n return count", "def count_syllables_in_word(word):\n\n count = 0\n\n endings = '!,;.?:'\n last_char = word[-1]\n\n if last_char in endings:\n processed_word = word[0:-1]\n else:\n processed_word = word\n\n\n if len(processed_word) <= 3:\n return 1\n if processed_word[-1] in 'Ee':\n processed_word = processed_word[0:-1]\n\n vowels = 'aeiouAEIOU'\n prev_char_was_vowel = False\n\n for char in processed_word:\n if char in vowels:\n if not prev_char_was_vowel:\n count += 1\n prev_char_was_vowel = True\n\n else:\n prev_char_was_vowel = False\n\n if processed_word[-1] in 'yY':\n count += 1\n \n\n return count", "def num_syllables(self, word):\n \"\"\"\n using the logic of vowel counting, count all vowels in the pronunciations\n \"\"\"\n dictionary = self._pronunciations;\n # check if word is present in the CMU dictionary\n if word in dictionary :\n word_pronunciations = dictionary[word.lower()]\n else :\n return 1\n \n vowels = ['A', 'E', 'I', 'O', 'U']\n \n ## find the shorter pronunciation for word\n shorter_arr = [];\n for pronunciation in word_pronunciations :\n if len(pronunciation) > len(shorter_arr) : shorter_arr = pronunciation\n \n num_length = 0\n \n for phoneme in shorter_arr :\n if phoneme[:1] in vowels : num_length += 1\n \n return num_length", "def estimate(word):\n parts = re.split(r'[^aeiouy]+', word)\n valid_parts = []\n\n for part in parts:\n if part != '':\n valid_parts.append(part)\n\n syllables = 0\n\n for p in re_subsyllables:\n if p.match(word):\n syllables -= 1\n\n for p in re_addsyllables:\n if p.match(word):\n syllables += 1\n\n syllables += len(valid_parts)\n\n if syllables <= 0:\n syllables = 1\n\n return syllables", "def get_syllables(word):\n\tif word not in syllable_dict:\n\t\ttry: syllables = wordApi.getHyphenation(word)\n\t\texcept UnicodeEncodeError:\n\t\t\tsyllable_dict[word] = np.NaN\n\t\tif not syllables:\n\t\t\tsyllables = wordApi.getHyphenation(word.lower())\n\t\t\tif not syllables:\n\t\t\t\tsyllables = wordApi.getHyphenation(word.capitalize())\n\t\t\t\tif not syllables:\n\t\t\t\t\tsyllable_dict[word] = np.NaN\n\t\t\t\t\treturn syllable_dict[word]\n\t\tsyllable_dict[word] = len(syllables)\n\treturn syllable_dict[word]", "def count_syllables(book):\n d = dict(cmudict.entries())\n with open(book, 'r') as myfile:\n booky = myfile.read().lower()\n tokenized_book = nltk.word_tokenize(booky)\n\n count = 0\n for word in tokenized_book:\n count += ( nsly(word, d))\n\n return count", "def update_syllable_count(word, syll_count):\n\n syllables = word.split('-')\n for i in range(1, 4):\n for j in range(len(syllables) - i + 1):\n gram = '-'.join(syllables[j: j + i])\n count = syll_count.setdefault(gram, 0)\n syll_count[gram] = count + 1", "def count_syllables(word):\n vowels = \"aeiouy\"\n count = 0\n last_was_vowel = False\n for letter in word:\n found_vowel = False\n for v in vowels:\n if v == letter:\n if not last_was_vowel: count += 1 # don't count diphthongs\n found_vowel = last_was_vowel = True\n break\n if not found_vowel: # If full cycle and no vowel found, set last_was_vowel to false\n last_was_vowel = False\n\n\n if len(word) > 2 and word[-2:] == \"es\" and count > 1: # Remove es - it's \"usually\" silent (?)\n count -= 1\n\n if len(word) > 4 and word[-1:] == \"e\": # remove silent e\n count -= 1\n\n if len(word) > 1 and word[-2:] == \"ee\": # adds 1 for na\n count += 1\n\n if len(word) > 1 and word[-2:] == \"na\": # adds 1 for na\n count += 1\n\n # Check for special case words\n special_case = ['eloise','i']\n if word in special_case:\n count += 1\n\n return count", "def countsyllables_nlde(word):\r\n\tresult = 0\r\n\tprev_was_vowel = word[0] in VOWELS\r\n\tfor char in word[1:]:\r\n\t\tis_vowel = char in VOWELS\r\n\t\tif prev_was_vowel and not is_vowel:\r\n\t\t\tresult += 1\r\n\t\tprev_was_vowel = is_vowel\r\n\r\n\tif (len(word) > 1 and word[0] in VOWELS\r\n\t\t\tand word.endswith('e') and not word[-2] in VOWELS):\r\n\t\tresult += 1\r\n\treturn result or 1", "def syllable_counter(string):\n\ti = 0 # index of while loop \n\tcounter = 0 # counter of syllables\n\tvowels = ['a','e','i','o','u','y','e '] # what are vowels\n\tdiphthongs = ['ee', 'ei', 'ea', 'oo', 'oi', 'oy', 'ou', 'ai', 'ie', 'ey', 'ay'] #what are diphthongs\n\tindex = 0 \n\n\twhile string[index] != ' ': # break at space\n\t\tchar = string[index] # look at each letter in string\n\t\tnext_char = string[index+1] # and the letter following\n\t\tif char.isalpha():\n\t\t\tif char in vowels: \n\t\t\t\tif (char + next_char in diphthongs): \n\t\t\t\t\tcounter = counter + 1 # count\n\t\t\t\t\tindex = index + 1 # skips second letter in diphthong\n\t\t\t\telif (char == 'e' and next_char == ' '): # assume if e at end of word, is not syllable\n\t\t\t\t\tpass # don't count\n\t\t\t\telse: \n\t\t\t\t\tcounter = counter + 1 # if it's a solitary vowel, add one to counter\n\t\tindex = index + 1\n\n\treturn counter", "def n_syllables_per_word(\n doc_or_tokens: types.DocOrTokens, *, lang: Optional[str] = None\n) -> tuple[int, ...]:\n if lang is None:\n if isinstance(doc_or_tokens, Doc):\n lang = doc_or_tokens.lang_\n else:\n raise ValueError(\n \"`lang` must be specified when computing n syllables per word \"\n \"from an iterable of tokens\"\n )\n hyphenator = utils.load_hyphenator(lang=lang)\n words = utils.get_words(doc_or_tokens)\n return tuple(len(hyphenator.positions(word.lower_)) + 1 for word in words)", "def number_syllables(self):\n return len(self.array_form)", "def n_syllables(doc_or_tokens: types.DocOrTokens, *, lang: Optional[str] = None) -> int:\n # docs are hashable, so we can leverage the lru cache as-is\n if isinstance(doc_or_tokens, Doc):\n nspw = n_syllables_per_word(doc_or_tokens, lang=lang)\n # otherwise, let's get an iterable of words but cast it to a hashable tuple\n # so we can leverage the lru cache on this and related calls in, say, n_long_words\n else:\n words = utils.get_words(doc_or_tokens)\n nspw = n_syllables_per_word(tuple(words), lang=lang)\n return sum(nspw)", "def n_polysyllable_words(\n doc_or_tokens: types.DocOrTokens,\n *,\n lang: Optional[str] = None,\n min_n_syllables: int = 3,\n) -> int:\n # docs are hashable, so we can leverage the lru cache as-is\n if isinstance(doc_or_tokens, Doc):\n nspw = n_syllables_per_word(doc_or_tokens, lang=lang)\n # otherwise, let's get an iterable of words but cast it to a hashable tuple\n # so we can leverage the lru cache on this and related calls in, say, n_long_words\n else:\n words = utils.get_words(doc_or_tokens)\n nspw = n_syllables_per_word(tuple(words), lang=lang)\n return itertoolz.count(ns for ns in nspw if ns >= min_n_syllables)", "def n_monosyllable_words(\n doc_or_tokens: types.DocOrTokens, *, lang: Optional[str] = None\n) -> int:\n # docs are hashable, so we can leverage the lru cache as-is\n if isinstance(doc_or_tokens, Doc):\n nspw = n_syllables_per_word(doc_or_tokens, lang=lang)\n # otherwise, let's get an iterable of words but cast it to a hashable tuple\n # so we can leverage the lru cache on this and related calls in, say, n_long_words\n else:\n words = utils.get_words(doc_or_tokens)\n nspw = n_syllables_per_word(tuple(words), lang=lang)\n return itertoolz.count(ns for ns in nspw if ns == 1)", "def num_syls(syls):\n\treturn len([c for c in syls if c in ['0','1','2']])", "def count(word):\n\n return len(word)", "def syll_over_text(data_word):\n\n step = 200\n y = []\n temp_syll = []\n\n for count, word in enumerate(data_word, 1):\n\n temp_syll.append(textstat.syllable_count(word))\n\n if count >= step:\n y.append(sum(temp_syll)/len(temp_syll))\n temp_syll = temp_syll[1:]\n\n x = range(step,len(y)+step)\n return x,y", "def getWordCharCount(w):\r\n rus = len(re.findall(r\"[а-я]\",w))\r\n eng = len(re.findall(r\"[a-z]\",w))\r\n c = len(w) \r\n return c, rus, eng", "def count(self, word):\n pass", "def word_count(self):\n from collections import Counter\n counts = Counter(self._replace_non_alnum().split())\n return counts" ]
[ "0.8795304", "0.8443985", "0.83697945", "0.81841797", "0.8142234", "0.80679905", "0.8024609", "0.7985044", "0.79726994", "0.7822673", "0.7780589", "0.77729857", "0.77016985", "0.75951684", "0.755582", "0.7286977", "0.71906304", "0.71053594", "0.7016183", "0.698643", "0.691093", "0.69043154", "0.673425", "0.6482714", "0.64272726", "0.6386275", "0.63249236", "0.6275682", "0.6152689", "0.6119472" ]
0.87104744
1
Finds the maximum value of y in a given range of x
def max_in_range(self, x, y, low, high): data = np.vstack((x,y)) y_values = data[1][np.logical_and(low < data[0], data[0] < high)] x_values = data[0][np.logical_and(low < data[0], data[0] < high)] index_max_y = y_values.argmax() max_y = y_values[index_max_y] max_x = x_values[index_max_y] return max_x, max_y
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def maxx(x, y):\n if x >= y:\n return x\n else:\n return y", "def maximum(x, y):\r\n # see decorator for function body\r", "def d_max(x, y):\n axis = np.argmax(x.shape)\n return np.max(np.array([x, y]), axis=axis)", "def getMaxima(x, y):\n# mx_x = (np.abs(np.min(x)) + np.max(x)) / 2\n# mx_y = (np.abs(np.min(y)) + np.max(y)) / 2\n# \n mx_x = np.max(x)\n mx_y = np.max(y)\n return mx_x, mx_y", "def max(x, y):\n x[:] = np.maximum(x[:], y[:])\n return x", "def find_max_numb(x,y):\n if x > y:\n print(x, \" - is max number.\")\n return x \n else:\n print(y, \" - is max number.\")\n return y", "def _bound(x, min_value, max_value):\n return np.maximum(min_value, np.minimum(x, max_value))", "def maximum(x):\n return np.maximum(x, 0)", "def find_max_f():\n fmax = fmin(g, 2)\n return fmax[0]", "def max_y_arg(self):\n return max((self(0).y,0), (self(1).y,1))[1]", "def maximum ( self , xmin = None , xmax = None , x0 = None ) :\n if xmin is None : xmin = self.xminmax()[0]\n if xmax is None : xmax = self.xminmax()[1]\n if self.xminmax() :\n xmin = max ( xmin , self.xminmax()[0] )\n xmax = min ( xmax , self.xminmax()[1] )\n\n if ymin is None : ymin = self.yminmax()[0]\n if ymax is None : ymax = self.yminmax()[1]\n if self.yminmax() :\n ymin = max ( ymin , self.yminmax()[0] )\n ymax = min ( ymax , self.yminmax()[1] )\n \n if not x0 : x0 = 0.5 * ( xmin + xmax ) , 0.5 * ( ymin + ymax )\n\n if not xmin <= x0[0] <= xmax :\n self.error(\"Wrong xmin/x0[0]/xmax: %s/%s/%s\" % ( xmin , x0[0] , xmax ) )\n\n if not ymin <= x0[1] <= ymax : \n self.error(\"Wrong ymin/x0[1]/ymax: %s/%s/%s\" % ( ymin , x0[1] , ymax ) )\n\n from ostap.math.minimize import sp_maximum_2D\n return sp_maximum_2D ( self ,\n xmin , xmax ,\n ymin , ymax , x0 )", "def y_max(self) -> ir.FloatingValue:\n return ops.GeoYMax(self).to_expr()", "def max_step(x, y):\n\n alpha = 1.0\n index = 0\n \n for i, (xi, yi) in enumerate(zip(x, y)):\n if xi == yi: continue\n value = -xi / (yi - xi)\n if alpha > value > 0:\n alpha = value\n index = i\n\n return index, alpha\n \n # func = lambda (ix, x), (iy, y): (iy, y) if (y > 0 and y < x) else (ix, x)\n # return reduce(func, enumerate(crossing(x, y)), (0, 1.0) )", "def y_max(self):\n return self.get_max_value(self.Y_INDEX)", "def max(x):\n pass", "def max_point(self):\n x = self.max(0).idxmax()\n y = self.loc[:, x].idxmax()\n return x, y", "def Find_Max_nointerp(x,y,minval=0.5):\n index = np.arange(x.shape[0])\n yy = y.copy()\n yy[yy>minval] = np.NAN\n #yy = 1-yy \n yy = np.diff(yy) # on derive\n yy = np.sign(yy) # on recupp le signe de la derivee\n idx = np.diff(yy)<0 # on reccup les point pour lesquels la derivee change de signe\n index = index[idx]\n index = index+1 # la diffrentiation décale d'un point vers la droite\n pics_pos,pics_value = x[index],y[index] # longueur d'onde des pics\n return pics_pos,pics_value", "def getYmax(self):\n return max(self.p1.y, self.p2.y)", "def _max_in_bounds(self, max):\n if max >= self.valmax:\n if not self.closedmax:\n return self.val[1]\n max = self.valmax\n\n if max <= self.val[0]:\n max = self.val[0]\n return self._stepped_value(max)", "def Find_Max_nointerp2(x,y,minval=0, maxval=1):\n index = np.arange(x.shape[0]-2)\n yy = y.copy()\n yy[yy<minval] = np.NAN\n yy[yy>maxval] = np.NAN\n #yy = 1-yy \n yy = np.diff(yy) # on derive\n yy = np.sign(yy) # on recupp le signe de la derivee\n idx = np.diff(yy)<0 # on reccup les point pour lesquels la derivee change de signe\n index = index[idx]\n index = index+1 # la diffrentiation décale d'un point vers la droite\n pics_pos,pics_value = x[index],y[index] # longueur d'onde des pics\n return pics_pos,pics_value", "def argmax(x):\n def op(a, b):\n comp = (a[1] > b[1])\n return comp.if_else(a[0], b[0]), comp.if_else(a[1], b[1])\n return tree_reduce(op, enumerate(x))[0]", "def argmax(x):\n def op(a, b):\n comp = (a[1] > b[1])\n return comp.if_else(a[0], b[0]), comp.if_else(a[1], b[1])\n return tree_reduce(op, enumerate(x))[0]", "def find_max(self):\n\n max_x = -10\n max_y = -10\n k = len(self.__col_lista)\n for i in range(k):\n x, y = self.__col_lista[i]\n if x > max_x:\n max_x = x\n if y > max_y:\n max_y = y\n return max_x, max_y", "def find_max_bin(self):\n x = self.local['clip']\n midrange = x[int(len(x)*0.2):int(len(x)*.2 + int(len(x)*.5))]\n self.max_bin = max(midrange)", "def x_y_coor_min_max(x_y_coor):\n\tx_range = [np.min(x_y_coor[\"X\"]),np.max(x_y_coor[\"X\"])]\n\ty_range = [np.min(x_y_coor[\"Y\"]),np.max(x_y_coor[\"Y\"])]\n\treturn x_range, y_range", "def argmax(module, x, axes=None):\n return module.argmax(x, axes)", "def find_max(trajectory):\n x = trajectory.s\n y = trajectory.i\n yt = np.abs(y - max(y))\n yt = yt < 1e-5\n max_idx = np.where(yt == True)[0]\n max_idx = max(max_idx)\n return [x[max_idx], y[max_idx]]", "def __get_max_peak(x_value, raw_values):\n\n\n raw_values_index = raw_values[raw_values[\"m/z\"] == x_value].index[0]\n\n value, index = float(raw_values.loc[raw_values_index - 5, \"intensity_normalized\"]), raw_values_index - 5\n\n for z in range(-5, 15):\n if float(raw_values.loc[raw_values_index + z, \"intensity_normalized\"]) > value:\n value, index = float(raw_values.loc[raw_values_index + z, \"intensity_normalized\"]), raw_values_index + z\n return value", "def maxx(self):\n return self.__maxx", "def fitMax(x,y,n=2):\n #-- position of max\n kmax = y.argmax()\n\n #-- 5 points around max\n k = range(kmax-n, kmax+n)\n k = np.array(k)\n #-- slide if to close to the edges\n if k.max()>(len(y)-2):\n k -= (len(y)-k.max())+10\n #print ' ->', k\n if k.min()<0:\n #print 'CORR:', k, '(', len(y), ')'\n k -= k.min()\n #print ' ->', k\n #-- fit poly #2\n c = np.polyfit(x[k], y[k], 2)\n xmax = np.clip(-c[1]/(2*c[0]), x.min(), x.max())\n return xmax" ]
[ "0.81750053", "0.7659551", "0.7405532", "0.73701066", "0.71251947", "0.70628524", "0.7052693", "0.7033928", "0.69642556", "0.6944658", "0.690641", "0.6903287", "0.68960166", "0.6878574", "0.6813649", "0.6787498", "0.6747394", "0.6733495", "0.6732106", "0.6720992", "0.67063314", "0.67063314", "0.6697194", "0.66956496", "0.66936845", "0.6683172", "0.6651438", "0.6646509", "0.6638271", "0.663543" ]
0.7968292
1
Returns a list of new 2theta values given a list of d_values and a wavelength via Braggs law
def bragg_law(self, d_list, wavelength): new_twotheta = [] for d in d_list: new_twotheta.append(2*math.degrees(np.arcsin(wavelength/(2*d)))) return new_twotheta
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_phase_law(N, d, wavelength, phi):\r\n phase_law = []\r\n for n in range(N):\r\n phase_law.append(-2 * np.pi * n * d / wavelength * np.sin(phi))\r\n return phase_law", "def getVals(cdli):\n \n \n swh = calcOutProd(np.reshape(cdli.datain_h, (cdli.datain_h.shape[0],1)), np.reshape(cdli.wh, (1, cdli.wh.shape[0])))\n swl = calcOutProd(np.reshape(cdli.datain_l, (cdli.datain_l.shape[0],1)), np.reshape(cdli.wl, (1, cdli.wl.shape[0]))) \n phi_h = [cdli.wh[i]*cdli.wh[i] for i in range(0,len(cdli.wh))] \n phi_l = [cdli.wl[i]*cdli.wl[i] for i in range(0,len(cdli.wl))]\n \n return [[swh, swl], [phi_h, phi_l]]", "def two_thetas(self):\n return list(self.unit_cell.two_theta(self.miller.indices(),\n self.wavelength, deg=True))", "def res(y):\n phi_s, phi_b, lamb_1, lamb_2 = y\n\n\n mS = 3.0e-4 # Mass of sleeve [kg]\n JS = 5.0e-9 # Moment of inertia of the sleeve [kgm]\n mB = 4.5e-3 # Mass of bird [kg]\n masstotal=mS+mB # total mass\n JB = 7.0e-7 # Moment of inertia of bird [kgm]\n r0 = 2.5e-3 # Radius of the bar [m]\n rS = 3.1e-3 # Inner Radius of sleeve [m]\n hS = 5.8e-3 # 1/2 height of sleeve [m]\n lS = 1.0e-2 # verical distance sleeve origin to spring origin [m]\n lG = 1.5e-2 # vertical distance spring origin to bird origin [m]\n hB = 2.0e-2 # y coordinate beak (in bird coordinate system) [m]\n lB = 2.01e-2 # -x coordinate beak (in bird coordinate system) [m]\n cp = 5.6e-3 # rotational spring constant [N/rad]\n g = 9.81 # [m/s^2]\n\n return scipy.array([(mS + mB) * g + lamb_2,\n cp * (phi_b - phi_s) - mB * lS * g - hS * lamb_1 - rS * lamb_2,\n cp * (phi_s - phi_b) - mB * lG * g,\n rS - r0 + hS * phi_s])", "def yl_derivs(self,y,s):\n dx_dPhi = np.cos(y[2])\n dz_dPhi = np.sin(y[2])\n dPhi_ds = 2 - self.beta*y[1] - np.sin(y[2])/y[0]\n dS_ds = 1.\n dV_ds = np.abs(dx_dPhi * dx_dPhi) * (dz_dPhi) * np.pi\n return [dx_dPhi, dz_dPhi, dPhi_ds,dS_ds,dV_ds]", "def doppler(self, lmdas):\n lmda_0 = 656.3\n velocities = []\n for lmda in lmdas:\n velocities.append((lmda - lmda_0)*constants.c/lmda_0)\n return np.array(velocities)", "def grad(theta, X, y, lambda_):\n # ... dopolnite (naloga 1, naloga 2)\n\n l = []\n for i, e in enumerate(theta):\n l.append(1 / len(y) * sum([(h(x, theta) - yi) * x[i] for x, yi in zip(X, y)]) + 2 * lambda_ * e)\n\n return np.array(l)", "def sample_values(self, positions, gibbs):\n \"\"\"Sample important values\"\"\"\n\n if gibbs:\n self.local_energy = self.h.local_energy_gibbs(positions)\n self.accumulate_energy += self.h.local_energy_gibbs(positions)\n self.accumulate_energy_sq += self.local_energy*self.local_energy\n gradient_wf_a = 0.5*self.w.gradient_wavefunction_a(positions)\n gradient_wf_b = 0.5*self.w.gradient_wavefunction_b(positions)\n gradient_wf_W = 0.5*self.w.gradient_wavefunction_W(positions)\n else:\n self.local_energy = self.h.local_energy(positions)\n self.accumulate_energy += self.h.local_energy(positions)\n self.accumulate_energy_sq += self.local_energy*self.local_energy\n gradient_wf_a = self.w.gradient_wavefunction_a(positions)\n gradient_wf_b = self.w.gradient_wavefunction_b(positions)\n gradient_wf_W = self.w.gradient_wavefunction_W(positions)\n # self.local_energy = self.h.local_energy_numerical(positions)\n # self.accumulate_energy += self.h.local_energy_numerical(positions)\n # gradient_wf_a = np.zeros(self.w.M)\n # gradient_wf_b = np.zeros(self.w.N)\n # gradient_wf_W = np.zeros((self.w.M, self.w.N))\n\n self.accumulate_psi_term_a += gradient_wf_a\n self.accumulate_psi_term_b += gradient_wf_b\n self.accumulate_psi_term_W += gradient_wf_W\n self.accumulate_both_a += gradient_wf_a*self.local_energy\n self.accumulate_both_b += gradient_wf_b*self.local_energy\n self.accumulate_both_W += gradient_wf_W*self.local_energy", "def orbit(self, dart, list_of_alpha_value):\r\n result = []\r\n toprocess = [dart]\r\n\r\n\r\n while len(toprocess) >0:\r\n d = toprocess.pop()\r\n if not( d in result):\r\n result.append(d)\r\n for i in list_of_alpha_value:\r\n toprocess.append(self.alphas[i][d])\r\n return result", "def govardovskii2000_template(\n wavelengths: np.ndarray,\n alpha_max: Union[float, np.ndarray],\n A_alpha: Union[float, np.ndarray] = 69.7,\n a_alpha1: Union[float, np.ndarray] = 0.8795,\n a_alpha2: Union[float, np.ndarray] = 0.0459,\n a_alpha3: Union[float, np.ndarray] = 300.0,\n a_alpha4: Union[float, np.ndarray] = 11940.0,\n B_alpha: Union[float, np.ndarray] = 28.0,\n b_alpha: Union[float, np.ndarray] = 0.922,\n C_alpha: Union[float, np.ndarray] = -14.9,\n c_alpha: Union[float, np.ndarray] = 1.104,\n D_alpha: Union[float, np.ndarray] = 0.674,\n A_beta: Union[float, np.ndarray] = 0.26,\n beta_max1: Union[float, np.ndarray] = 189.0,\n beta_max2: Union[float, np.ndarray] = 0.315,\n d_beta1: Union[float, np.ndarray] = -40.5,\n d_beta2: Union[float, np.ndarray] = 0.195,\n) -> np.ndarray:\n x_alpha = (wavelengths / alpha_max) ** -1\n a_alpha = a_alpha1 + a_alpha2 * np.exp(-((alpha_max - a_alpha3) ** 2) / a_alpha4)\n\n alpha_band = (\n np.exp(A_alpha * (a_alpha - x_alpha))\n + np.exp(B_alpha * (b_alpha - x_alpha))\n + np.exp(C_alpha * (c_alpha - x_alpha))\n + D_alpha\n ) ** -1\n\n beta_max = beta_max1 + beta_max2 * alpha_max\n d_beta = d_beta1 + d_beta2 * alpha_max\n beta_band = np.exp(-(((wavelengths - beta_max) / d_beta) ** 2))\n\n return alpha_band + A_beta * beta_band", "def integrate(l_tr_val, ind_bs):\n\n phi1max = 360. # max phi1 angle (deg) for integration domain\n phimax = 90. # max phi angle (deg) for integration domain\n phi2max = 60. # max phi2 angle (deg) for integration domain\n inc = 3. # degree increment for euler angle generation\n l_tr_cal = 8 # truncation level in the l index for the GSH\n\n indxvec = gsh.gsh_basis_info()\n N_L_cal = np.sum(indxvec[:, 0] <= l_tr_cal)\n N_L_val = np.sum(indxvec[:, 0] <= l_tr_val)\n N_L = np.max([N_L_cal, N_L_val])\n\n euler, n_tot = euler_grid_center(inc, phi1max, phimax, phi2max)\n\n \"\"\"Generate X\"\"\"\n X = gsh.gsh_eval(euler, np.arange(N_L))/(2*indxvec[:N_L, 0]+1)\n\n \"\"\"Generate Y (test function)\"\"\"\n\n np.random.seed(141)\n\n normvec = (2*indxvec[:N_L, 0]+1)**3\n bval = (np.random.normal(scale=1.0, size=N_L_cal)**3)/normvec\n\n Y = np.zeros(n_tot, dtype='complex128')\n for ii in xrange(N_L_cal):\n Y += bval[ii]*X[:, ii]\n\n Y = Y.real\n\n \"\"\"Perform the integration for the GSH coefficients\"\"\"\n coef = np.zeros(N_L_val, dtype='complex128')\n\n # domain_eul_sz is the integration domain in radians\n domain_sz = phi1max*phimax*phi2max*(np.pi/180.)**3\n # full_eul_sz is the size of euler space in radians\n full_sz = (2*np.pi)*(np.pi)*(2*np.pi)\n eul_frac = domain_sz/full_sz\n fzsz = 1./(eul_frac*8.*np.pi**2)\n bsz = domain_sz/n_tot\n\n for ii in xrange(N_L_val):\n\n l = indxvec[ii, 0]\n tmp = (2*l+1)*np.sum(Y*X[:, ii].conj()*np.sin(euler[:, 1]))*bsz*fzsz\n coef[ii] = tmp\n\n \"\"\"check accuracy of GSH representation\"\"\"\n\n Y_gsh = np.zeros(n_tot, dtype='complex128')\n for ii in xrange(N_L_val):\n Y_gsh += coef[ii]*X[:, ii]\n\n Y_gsh = Y_gsh.real\n\n error_gsh = np.abs(Y_gsh - Y)\n\n print \"\\nGSH basis representation errors\"\n print \"mean error: %s\" % np.mean(error_gsh)\n print \"std of error: %s\" % np.std(error_gsh)\n print \"max error: %s\" % np.max(error_gsh)\n\n \"\"\"Perform the integration for the indicator basis coefficients\"\"\"\n n_p1_ind = phi1max/ind_bs\n n_P_ind = phimax/ind_bs\n n_p2_ind = phi2max/ind_bs\n\n ysum = np.zeros((n_p1_ind, n_P_ind, n_p2_ind))\n ycount = np.zeros((n_p1_ind, n_P_ind, n_p2_ind))\n\n N_ind = ysum.size\n\n ind_bs_r = ind_bs*np.pi/180.\n\n for ii in xrange(np.int64(n_tot)):\n eset = np.int64(np.floor(euler[ii, :]/ind_bs_r))\n ysum[eset[0], eset[1], eset[2]] += Y[ii]\n ycount[eset[0], eset[1], eset[2]] += 1\n\n y_integrate = ysum/ycount\n del ysum, ycount\n\n Y_ind = np.zeros(n_tot, dtype='float64')\n\n for ii in xrange(np.int64(n_tot)):\n eset = np.int64(np.floor(euler[ii, :]/ind_bs_r))\n Y_ind[ii] = y_integrate[eset[0], eset[1], eset[2]]\n del y_integrate\n\n \"\"\"check accuracy of indicator function representation\"\"\"\n\n error_ind = np.abs(Y_ind - Y)\n\n print \"\\nindicator basis representation errors\"\n print \"mean error: %s\" % np.mean(error_ind)\n print \"std of error: %s\" % np.std(error_ind)\n print \"max error: %s\" % np.max(error_ind)\n\n \"\"\" Plot the regression results \"\"\"\n\n phi2_u = np.unique(euler[:, 2])\n\n ang_sel = euler[:, 2] == phi2_u[np.int64(len(phi2_u)/2.)]\n\n plt.figure(figsize=[8, 8])\n\n vmin = np.min([Y, Y_gsh, Y_ind])\n vmax = np.max([Y, Y_gsh, Y_ind])\n\n x1, x2, z = pre_surf(euler[ang_sel, 0], euler[ang_sel, 1],\n Y[ang_sel], inc)\n\n plt.subplot(311)\n ax = plt.imshow(z.T, interpolation='none', cmap='magma',\n vmin=vmin, vmax=vmax,\n extent=[0, 2*np.pi, 0, np.pi/2.])\n plt.colorbar(ax)\n plt.title(\"Reference ODF slice\")\n plt.xlabel(\"$\\phi1$\")\n plt.ylabel(\"$\\Phi$\")\n\n x1, x2, z = pre_surf(euler[ang_sel, 0], euler[ang_sel, 1],\n Y_gsh[ang_sel], inc)\n\n plt.subplot(312)\n ax = plt.imshow(z.T, interpolation='none', cmap='magma',\n vmin=vmin, vmax=vmax,\n extent=[0, 2*np.pi, 0, np.pi/2.])\n plt.colorbar(ax)\n plt.title(\"ODF slice with %s GSH bases\" % N_L_val)\n plt.xlabel(\"$\\phi1$\")\n plt.ylabel(\"$\\Phi$\")\n\n x1, x2, z = pre_surf(euler[ang_sel, 0], euler[ang_sel, 1],\n Y_ind[ang_sel], inc)\n\n plt.subplot(313)\n ax = plt.imshow(z.T, interpolation='none', cmap='magma',\n vmin=vmin, vmax=vmax,\n extent=[0, 2*np.pi, 0, np.pi/2.])\n plt.colorbar(ax)\n plt.title(\"ODF slice with %s indicator bases\" % N_ind)\n plt.xlabel(\"$\\phi1$\")\n plt.ylabel(\"$\\Phi$\")\n\n plt.show()", "def derive_cardelli(wavelength, Rv):\n x = 1.0 / np.array(wavelength)\n\n # check for applicability\n if (np.min(x) < 0.3):\n print( 'wavelength is longer than applicable range for Cardelli law')\n return None\n\n if (np.max(x) > 8.0):\n print( 'wavelength is shorter than applicable range for Cardelli law')\n return None\n \n # Set up some arrays for coefficients that we will need\n a = np.zeros(len(x), dtype=float)\n b = np.zeros(len(x), dtype=float)\n\n y = x - 1.82\n\n # Calculate coefficients for long wavelengths (low wavenumber)\n # Wavenumger <= 1.1 (Eq. 2a, 2b)\n idx = np.where(x <= 1.1)[0]\n a[idx] = 0.574 * x[idx] ** 1.61\n b[idx] = -0.527 * x[idx] ** 1.61\n\n # Calculate coefficients for intermediate wavelengths\n # 1.1 < wavenumber <= 3.3 (Eq. 3a, 3b)\n idx = np.where((x > 1.1) & (x <= 3.3))[0]\n yy = y[idx]\n a[idx] = 1 + (0.17699 * yy) - (0.50447 * yy ** 2) - \\\n (0.02427 * yy ** 3) + (0.72085 * yy ** 4) + \\\n (0.01979 * yy ** 5) - (0.77530 * yy ** 6) + \\\n (0.32999 * yy ** 7)\n b[idx] = (1.41338 * yy) + (2.28305 * yy ** 2) + \\\n (1.07233 * yy ** 3) - (5.38434 * yy ** 4) - \\\n (0.62251 * yy ** 5) + (5.30260 * yy ** 6) - \\\n (2.09002 * yy ** 7)\n\n # Calculate the long wavelength\n # 3.3 < wavenumber < 5.9 (Eq. 4a, 4b)\n idx = np.where((x > 3.3) & (x < 5.9))[0]\n xx = x[idx]\n a[idx] = 1.752 - (0.316 * xx) - (0.104/((xx - 4.67) ** 2 + 0.341))\n b[idx] = -3.090 + (1.825 * xx) + (1.206/((xx - 4.62) ** 2 + 0.263))\n\n # Calculate the longest wavelength\n # 5.9 <= wavenumber (Eq. 4a, 4b)\n idx = np.where(x >= 5.9)[0]\n xx = x[idx]\n a[idx] = 1.752 - (0.316 * xx) - (0.104/((xx - 4.67) ** 2 + 0.341)) + \\\n (-0.04473 * (xx - 5.9) ** 2) - (0.009779 * (xx - 5.9) ** 3)\n b[idx] = -3.090 + (1.825 * xx) + (1.206/((xx - 4.62) ** 2 + 0.263)) + \\\n (0.2130 * (xx - 5.9) ** 2) + (0.1207 * (xx - 5.9) ** 3)\n\n # A(lam) / A(V), from Eq. 1\n extinction = a + b/Rv\n\n # Now, want to produce A_lambda / AKs, to match other laws\n k_ind = np.where(abs(x-0.46) == min(abs(x-0.46)))\n Aks_Av = a[k_ind] + b[k_ind]/Rv # Aks / Av\n Av_Aks = 1.0 / Aks_Av # Av / Aks\n \n output = extinction * Av_Aks # (A(lamb) / Av) * (Av / Aks) = (A(lamb) / Aks)\n\n return output", "def spectral_laplace(x_values, dd_math_function, sigma, ua, ub):\n B = []\n for x in x_values:\n B += [-dd_math_function(x, sigma)]\n B[0] = ua\n B[len(x_values) - 1] = ub\n #B ferdig\n A=[]\n for i in range (len(x_values)):\n a = []\n for j in range (len(x_values)):\n if i == 0 or i == len(x_values) - 1:\n a.append(lagrange(x_values, j, x_values[i]))\n else:\n a.append(dd_lagrange(x_values, j, x_values[i]))\n A.append(a)\n #A ferdig\n return np.linalg.solve(A, B)", "def update_thetas(self, X, y, theta_list, bias, learning_rate):\n total_samples = len(X)\n theta_derivative = [0]*len(theta_list)\n bias_derivative = 0\n\n for i in range(total_samples): #update the thetas and bias by gradient descent\n hypothesis = 0\n\n hypothesis = hypothesis+bias\n\n hypothesis += np.matmul(X[i], np.array(theta_list).T)\n\n sigmoidhypothesis = 1./(1.+np.exp(-hypothesis))\n\n sigmoidhypothesis = sigmoidhypothesis - y[i]\n\n bias_derivative += sigmoidhypothesis\n\n\n feature_index=0\n for feature_index in range(len(theta_list)):\n theta_derivative[feature_index] += sigmoidhypothesis*X[i][feature_index]\n\n \n bias -= (bias_derivative/total_samples) * learning_rate\n\n for j in range(len(theta_list)):\n theta_list[j] -= (theta_derivative[j]/total_samples) * learning_rate\n\n return bias, theta_list", "def get_velocity_doublet(strength, xd, yd, X, Y):\n if isinstance(xd, list):\n u = [0.0] * len(xd)\n v = [0.0] * len(yd)\n for i in range(0, len(xd)):\n u[i] = - strength[i] / (2 * math.pi) * ((X - xd[i]) ** 2 - (Y - yd[i]) ** 2) / ((X - xd[i]) ** 2 + (\n Y - yd[i]) ** 2) ** 2\n v[i] = - strength[i] / (2 * math.pi) * 2 * (X - xd[i]) * (Y - yd[i]) / ((X - xd[i]) ** 2 + (\n Y - yd[i]) ** 2) ** 2\n u = sum(u)\n v = sum(v)\n else:\n u = - strength / (2 * math.pi) * ((X - xd) ** 2 - (Y - yd) ** 2) / ((X - xd) ** 2 + (Y - yd) ** 2) ** 2\n v = - strength / (2 * math.pi) * 2 * (X - xd) * (Y - yd) / ((X - xd) ** 2 + (Y - yd) ** 2) ** 2\n\n return u, v", "def f_doublet(x, c, i1, i2, sigma_gal, z, sigma_inst): \n dblt_mu = [3727.092, 3729.875] # the actual non-redshifted wavelengths\n l1 = dblt_mu[0] * (1+z)\n l2 = dblt_mu[1] * (1+z)\n\n sigma = np.sqrt(sigma_gal**2 + sigma_inst**2)\n\n norm = (sigma*np.sqrt(2*np.pi))\n term1 = ( i1 / norm ) * np.exp(-(x-l1)**2/(2*sigma**2))\n term2 = ( i2 / norm ) * np.exp(-(x-l2)**2/(2*sigma**2)) \n return (c*x + term1 + term2)", "def calc_muj_list(theta_list) :\n return np.sin(theta_list)**2 / (2*theta_list - np.sin(2*theta_list))", "def _grad(self, values):\n if np.min(values[0]) <= 0 or np.min(values[1]) <= 0:\n # Non-differentiable.\n return [None, None]\n else:\n div = values[0]/values[1]\n grad_vals = [np.log(div), 1 - div]\n grad_list = []\n for idx in range(len(values)):\n rows = self.args[idx].size\n cols = self.size\n grad_list += [kl_div.elemwise_grad_to_diag(grad_vals[idx],\n rows, cols)]\n return grad_list", "def vec_coords(label_coords, LAMBDA=1, spacing=1):\n\n #LAMBDA = 1\n #SPACING = 8\n SPACING = spacing\n \n coords_pial = np.array(label_coords[0])\n coords_gwb = np.array(label_coords[6]) #[::SPACING]\n\n\n ##### Normal Vector Pial\n #derivatives and velocity\n x_der = np.gradient(coords_pial[:,0])\n y_der = np.gradient(coords_pial[:,1]) #col slicing, R, np.array, [:,0]\n velo = np.array([[x_der[i], y_der[i]] for i in range(x_der.size)])\n\n #displacement, tangent\n displ = np.sqrt( x_der * x_der + y_der * y_der ) #speed, time\n tang = np.array([1/displ] *2 ).transpose() * velo\n\n #outward point surface normal, from tang flip, make first neg, opv\n pial_normal = [ [y*-1, x] for x, y in zip(tang[:,0], tang[:,1]) ]\n\n\n ##### Normal Vector GWB\n #derivatives and velocity\n x_der = np.gradient(coords_gwb[:,0])\n y_der = np.gradient(coords_gwb[:,1]) \n velo = np.array([[x_der[i], y_der[i]] for i in range(x_der.size)])\n\n #displacement, tangent\n displ = np.sqrt( x_der * x_der + y_der * y_der ) \n tang = np.array([1/displ] *2 ).transpose() * velo\n\n #outward point surface normal, owv\n gwb_normal = [ [y*-1, x] for x, y in zip(tang[:,0], tang[:,1]) ]\n\n\n\n plot_coords_lst = []\n used_energy_lst = []\n ##### FIND ENERGY\n # for each coord on the pial surface, x\n for x in range(len(coords_pial)):\n pial = coords_pial[x]\n \n #find vector pial to gwb, unit length, tv\n if x == 0:\n min_energy = []\n normal_term_lst = []\n vec_dist_lst = []\n parallel_term_lst = []\n vec_dist_lst = []\n for v in range(len(coords_gwb)):\n #find vector distance from pial to gwb\n gwb = coords_gwb[v]\n vec_pial_gwb = np.array(gwb) - np.array(pial)\n vec_mag = np.array(vec_pial_gwb[0]**2 + vec_pial_gwb[1]**2)\n unit_vec_dist = vec_pial_gwb/vec_mag\n vec_dist_lst.append(unit_vec_dist)\n\n #find dot product for tv and owhite, tv and opial\n dot_prod1 = np.dot(vec_dist_lst[v], gwb_normal[v])\n dot_prod2 = np.dot(vec_dist_lst[v], pial_normal[x])\n\n #normal term for each v\n normal_term_v = (1 - np.abs(dot_prod1)) + (1 - np.abs(dot_prod2))\n normal_term_lst.append(normal_term_v)\n\n #parallel term for each v \n # if x == 0:\n \n #find dot product, using self distance\n dot_prod3 = np.dot(vec_dist_lst[v], vec_dist_lst[v])\n parallel_term_v = (1 - np.abs(dot_prod3))\n parallel_term_lst.append(parallel_term_v)\n \n #energy, no summation\n ind_energy = list(enumerate(np.array([((1-LAMBDA)*n) + (LAMBDA*p) for n, p in \\\n zip(normal_term_lst, parallel_term_lst)]).T))\n \n #find local minima energy\n for i in range(len(ind_energy)):\n curr = ind_energy[i]\n fut = ind_energy[i+1]\n if fut[1] > curr[1]:\n min_energy.append(curr)\n used_energy_lst.append(curr)\n break\n\n # append coordinates to plot straight vector from pial to gwb, min energy\n gwb_idx = min_energy.pop()[0]\n # gwb_idx = min_energy[-1][0]\n plot_coords_lst.append([pial, list(coords_gwb[gwb_idx])])\n\n elif x > 0:\n min_energy = []\n normal_term_lst = []\n vec_dist_lst = []\n parallel_term_lst = []\n vec_dist_lst = []\n \n \n # used_start = int(used_energy_lst[-1][0])+20\n used_start = used_energy_lst[-1][0]\n\n for v in list( range(used_start, len(coords_gwb)-1) ):\n #find vector distance from pial to gwb\n gwb = coords_gwb[v]\n vec_pial_gwb = np.array(gwb) - np.array(pial)\n vec_mag = np.array(vec_pial_gwb[0]**2 + vec_pial_gwb[1]**2)\n unit_vec_dist = vec_pial_gwb/vec_mag\n vec_dist_lst.append(unit_vec_dist)\n\n #find dot product for tv and owhite, tv and opial\n dot_prod1 = np.dot(vec_dist_lst[-1], gwb_normal[v])\n dot_prod2 = np.dot(vec_dist_lst[-1], pial_normal[x])\n\n #normal term for each v\n normal_term_v = (1 - np.abs(dot_prod1)) + (1 - np.abs(dot_prod2))\n normal_term_lst.append(normal_term_v)\n\n #parallel term for each v \n #find dot product, using neighbour vector distance\n knear_vec_dist = np.array(plot_coords_lst[-1][1]) - np.array(plot_coords_lst[-1][0])\n dot_prod3 = np.dot(vec_dist_lst[-1], knear_vec_dist)\n parallel_term_v = (1 - np.abs(dot_prod3))\n parallel_term_lst.append(parallel_term_v) \n\n #energy, no summation\n ind_energy = list( enumerate(np.array([ ((1-LAMBDA)*n) + (LAMBDA*p) for n, p in \\\n zip(normal_term_lst, parallel_term_lst)]).T, used_energy_lst[-1][0])) #v\n\n #find local minima energy, and associated coordinate\n for i in range(len(ind_energy)):\n try:\n curr = ind_energy[i]\n fut = ind_energy[i+1]\n except(IndexError):\n continue\n \n if fut[1] > curr[1]:\n min_energy.append(curr)\n used_energy_lst.append(curr)\n # print(\"curr energy = \", curr)\n break\n\n try:\n gwb_idx = min_energy.pop()[0] #+ 20 #atleast deltaX apart\n plot_coords_lst.append([pial, list(coords_gwb[gwb_idx])])\n # print(\"energy coordinates = \", list( map(list, [pial, coords_gwb[gwb_idx]])) )\n except(IndexError):\n continue\n\n\n \"\"\"\n #encourage atleast one space between each end point coordinate\n energy_idx = [i[0] for i in used_energy_lst]\n new_energy_idx = []\n energy_idx_cp = energy_idx.copy()\n\n count = 0\n same_count = 0\n # loop to remove repeat indices, makes list two short\n while count < len(energy_idx):\n energy_concat = []\n i = count\n curr = energy_idx_cp[i]\n if energy_idx_cp[i] not in new_energy_idx:\n new_energy_idx.append(curr)\n same_count = 0\n else: \n energy_idx_cp = energy_idx_cp[:i] + list((np.array(energy_idx_cp[i:]) \\\n + same_count))\n\n same_count+=1\n \n count+=1\n \"\"\"\n\n\n #encourage even space between each end point coordinate\n energy_idx = [i[0] for i in used_energy_lst]\n new_energy_idx = list(map(math.floor , np.linspace(energy_idx[0] , \\\n len(coords_gwb[energy_idx[0]: len(coords_gwb)]), num=len(energy_idx)))) \n\n # new_plot_coords_lst = [[list(i[0]), list(coords_gwb[j])] for i, j in \\\n # zip(plot_coords_lst, new_energy_idx)]\n\n new_plot_coords_lst = []\n for i, j in zip(plot_coords_lst, new_energy_idx):\n try:\n pial_gwb_plot = [list(i[0]), list(coords_gwb[j])]\n new_plot_coords_lst.append(pial_gwb_plot) \n except(IndexError):\n continue\n\n #space vectors according to SPACING var\n new_plot_coords_lst = new_plot_coords_lst[::SPACING] \n\n return(new_plot_coords_lst)", "def simulate_l_b_coverage(Npoints,MW_exclusion=10,ra_range=(-180,180),dec_range=(-90,90),\n output_frame='galactic',radius=None):\n # ----------------------- #\n # -- -- #\n # ----------------------- #\n def _draw_radec_(Npoints_,ra_range_,dec_sin_range_):\n \"\"\"\n \"\"\"\n ra = np.random.random(Npoints_)*(ra_range_[1] - ra_range_[0]) + ra_range_[0]\n dec = np.arcsin(np.random.random(Npoints_)*(dec_sin_range_[1] - dec_sin_range_[0]) + dec_sin_range_[0]) / _d2r\n\n return ra,dec\n\n def _draw_without_MW_(Npoints_,ra_range_,dec_sin_range_,MW_exclusion_,radius_):\n \"\"\"\n \"\"\"\n \n l,b = np.array([]),np.array([])\n while( len(l) < Npoints_ ):\n ra,dec = _draw_radec_(Npoints_ - len(l),ra_range_,dec_sin_range_)\n l_,b_ = ct.radec2gcs(ra,dec)\n\n if radius is not None:\n as_mask = vt.ang_sep(radius[1], radius[2], l_, b_) < radius[0]\n else:\n as_mask = np.ones(len(l_), dtype=bool)\n\n mask = as_mask & (np.abs(b_)>MW_exclusion_)\n if output_frame == 'galactic':\n l = np.concatenate((l,l_[mask]))\n b = np.concatenate((b,b_[mask]))\n else:\n l = np.concatenate((l,ra[mask]))\n b = np.concatenate((b,dec[mask])) \n\n return l,b\n\n # ----------------------- #\n # -- -- #\n # ----------------------- #\n\n if output_frame not in ['galactic','j2000']:\n raise ValueError('output_frame must \"galactic\" or \"j2000\"')\n\n if ra_range[0] < -180 or ra_range[1] > 360 or ra_range[0] > ra_range[1]:\n raise ValueError('ra_range must be contained in [-180,360]')\n\n if dec_range[0] < -90 or dec_range[1] > 90 or dec_range[0] > dec_range[1]:\n raise ValueError('dec_range must be contained in [-90,90]')\n\n dec_sin_range = (np.sin(dec_range[0]*_d2r),np.sin(dec_range[1]*_d2r)) \n\n if MW_exclusion > 0. or radius is not None:\n return _draw_without_MW_(Npoints, ra_range, dec_sin_range,\n MW_exclusion, radius)\n else:\n ra,dec = _draw_radec_(Npoints, ra_range, dec_sin_range)\n if output_frame == 'galactic':\n return ct.radec2gcs(ra,dec)\n else:\n return ra,dec", "def two_theta(\n incident_beam: sc.Variable,\n scattered_beam: sc.Variable,\n wavelength: sc.Variable,\n gravity: sc.Variable,\n) -> sc.Variable:\n grav = sc.norm(gravity)\n L2 = sc.norm(scattered_beam)\n\n x_term = cylindrical_x(cyl_x_unit_vector(gravity, incident_beam), scattered_beam)\n x_term *= x_term\n\n y_term = sc.to_unit(wavelength, elem_unit(L2), copy=True)\n y_term *= y_term\n drop = L2**2\n drop *= grav * (m_n**2 / (2 * h**2))\n # Optimization when handling either the dense or the event coord of binned data:\n # - For the event coord, both operands have same dims, and we can multiply in place\n # - For the dense coord, we need to broadcast using non in-place operation\n if set(drop.dims).issubset(set(y_term.dims)):\n y_term *= drop\n else:\n y_term = drop * y_term\n y_term += cylindrical_y(cyl_y_unit_vector(gravity), scattered_beam)\n y_term *= y_term\n\n if set(x_term.dims).issubset(set(y_term.dims)):\n y_term += x_term\n else:\n y_term = y_term + x_term\n out = sc.sqrt(y_term, out=y_term)\n out /= L2\n out = sc.asin(out, out=out)\n return out", "def calc_nuj_list(theta_list) :\n return theta_list / np.sin(2*theta_list)", "def _get_f(couplelist, dpplist, bpm_name, value):\n lst = []\n x = []\n for dpp in dpplist:\n x.append(dpp)\n couplefile = couplelist[dpp]\n lst.append(getattr(couplefile, value)[couplefile.indx[bpm_name]])\n\n lreg = linreg(x, lst)\n\n return lreg[0], lreg[3]", "def _get_values(self) -> ty.List[float]:\r\n ...", "def galaxy():\n rot_ang = 1\n pol_ang = 1\n\n\n time_array = [datetime.datetime(2017, 5, 25, 2, 0),\n datetime.datetime(2017, 5, 26, 7, 0),\n #~ datetime.datetime(2017, 5, 28, 1, 0),\n #~ datetime.datetime(2017, 5, 30, 8, 0),\n datetime.datetime(2017, 6, 4, 2, 0)]\n\n lfdic = {1:{'name':'LI', 'lat':[26,33,19.676], 'long':[97,26,31.174], 't_offset':6.496132851851852},\n 2:{'name':'LII', 'lat':[34,4,43.497], 'long':[107,37,5.819], 't_offset':7.174552203703703},\n 3:{'name':'LIII', 'lat':[38,25,59.0], 'long':[79,50,23.0], 't_offset':5.322648148148148},\n 4:{'name':'LIV', 'lat':[34,12,3.0], 'long':[118,10,18.0], 't_offset':7.87811111111111}}\n lfs = lfdic[4]\n long_radians = (lfs['long'][0] + lfs['long'][1]/60.0 + lfs['long'][2]/3600.0)*np.pi/180.0\n\n LoFASM = station(lfs['name'],lfs['lat'],lfs['long'],FOV_color='b',\n time='',frequency=20.0,one_ring='inner',\n rot_angle=rot_ang,pol_angle=pol_ang)\n innerNS_FOV = 0.61975795698554226 #LoFASM.lofasm.Omega()\n inner_conversion_NS = np.divide((np.power(np.divide(3.0*1.0e8,45.0e6),2)),(innerNS_FOV))\n\n print('Stage 1/2 Done.')\n\n powe = np.multiply(LoFASM.calculate_gpowervslstarray(time_array),inner_conversion_NS)\n power = 10*np.log10(np.array(powe))\n print('Stage 2/2 Done.')\n\n return power", "def get_age_grad(self,renew=False):\n\t\ttry:\n\t\t\tdriv_lat = self['deriv_lat'].value\n\t\t\tdriv_lon = self['deriv_lon'].value\n\t\t\tdriv_msk = self['deriv_msk'].value\n\t\texcept:\n\t\t\tself._cal_age_grad()\n\t\tderiv_lat = self['deriv_lat'].value\n\t\tderiv_lon = self['deriv_lon'].value\n\t\tderiv_msk = self['deriv_msk'].value\n\t\tage_lon_Vec = self['age_lon_Vec'].value\n\t\tage_lat_Vec = self['age_lat_Vec'].value\n\t\txx, yy = np.meshgrid(age_lon_Vec, age_lat_Vec) # xx for longitude, yy for latitude\n\t\txx = xx.reshape(xx.size)\n\t\tyy = yy.reshape(yy.size)\n\t\tf_deriv_lat = NearestNDInterpolator(np.column_stack((xx,yy)),deriv_lat.reshape(deriv_lat.size),rescale=False)\n\t\tf_deriv_lon = NearestNDInterpolator(np.column_stack((xx,yy)),deriv_lon.reshape(deriv_lon.size),rescale=False)\n\t\tf_deriv_msk = NearestNDInterpolator(np.column_stack((xx,yy)),deriv_msk.reshape(deriv_msk.size),rescale=False)\n\t\tfor period in self.attrs['prd_arr']:\n\t\t\tgroup = self['%g_sec'%( period )]\n\t\t\tlons_orig = group['lonArr'].value\n\t\t\tlons = lons_orig.reshape(lons_orig.size)\n\t\t\tlats = group['latArr'].value.reshape(lons_orig.size)\n\t\t\tderiv_lat_Arr = f_deriv_lat(np.column_stack((lons,lats))).reshape(lons_orig.shape)\n\t\t\tderiv_lon_Arr = f_deriv_lon(np.column_stack((lons,lats))).reshape(lons_orig.shape)\n\t\t\tderiv_msk_Arr = f_deriv_msk(np.column_stack((lons,lats))).reshape(lons_orig.shape)\n\t\t\tif renew:\n\t\t\t\tdel group['age_deriv_lat_Arr']\n\t\t\t\tdel group['age_deriv_lon_Arr']\n\t\t\t\tdel group['age_deriv_msk_Arr']\n\t\t\tgroup.create_dataset(name='age_deriv_lat_Arr', data=deriv_lat_Arr)\n\t\t\tgroup.create_dataset(name='age_deriv_lon_Arr', data=deriv_lon_Arr)\n\t\t\tgroup.create_dataset(name='age_deriv_msk_Arr', data=deriv_msk_Arr)\n\t\tpass", "def value_at_wavelength(self, *wavelengths: float):\n self.__bounds_check(*wavelengths)\n for w in wavelengths:\n irradiance = float(self.interp(w))\n yield irradiance", "def sh( values ):\n # ECMWF normalizes the spherical harmonic coeffs differently than NCEP.\n # (m=0,n=0 is global mean, instead of sqrt(2)/2 times global mean)\n fld = 2.*values/np.sqrt(2.)\n \n #------SPLITTING IT UP IN AN IMAGARY AND REAL PART--------\n fldr = fld[ 0::2 ] #annenhver verdi fra 0\n fldi = fld[ 1::2 ] #annenhver verdi fra 1\n fldn = np.zeros( fldr.shape, 'F' ) #blir halvparten så stor som orginale fld\n fldn.real = fldr #legges da til i fldn vectoren\n fldn.imag = fldi\n #----------------------------------------------------------\n \n nlons = 360 #Have a feeling it probably is number of values like grid val\n nlats = 1280 #web sais it shourld be 180.. wellwell, seems to work\n s = spharm.Spharmt( nlons, nlats ) \n \n data = s.spectogrd( fldn ) #Hvis nlats = 180, så feiler denne delen pga hvordan formelen fungerer..\n \n lons = ( 360./nlons ) * np.arange( nlons )\n lats = 90.-( 180./( nlats - 1 ) ) * np.arange( nlats )\n lons, lats = np.meshgrid( lons, lats )\n \n #stack grids side-by-side (in longitiudinal direction), so\n # any range of longitudes (between -360 and 360) may be plotted on a world map.\n lons = np.concatenate(( lons - 360, lons ), 1 )\n lats = np.concatenate(( lats, lats ), 1 )\n data = np.concatenate(( data, data ), 1 )\n \n return lats, lons, data", "def d_func(x, y):\n return np.array((2.0 * (x - 1) - 400.0 * x * (y - x**2), 200.0 * (y - x**2)))", "def get_y_logl(self, y_list):" ]
[ "0.5911583", "0.55874676", "0.5475578", "0.5463497", "0.53700536", "0.53584486", "0.5323669", "0.5290384", "0.52637976", "0.52502567", "0.521147", "0.5211158", "0.5162651", "0.5139339", "0.5113516", "0.50975794", "0.5091019", "0.5085691", "0.5038993", "0.5032553", "0.50317675", "0.50234234", "0.5017245", "0.49976712", "0.49948746", "0.49845022", "0.4981448", "0.49778953", "0.4952438", "0.4937593" ]
0.7509082
0
Trigger a new analysis batch given a wellformatted shapefile Upload file to the 'file' key in a multipart form. Each polygon/multipolygon feature in the shapefile will have a neighborhood created for it if it doesn't exist, and the job for each neighborhood will immediately be submitted. Each feature in the shapefile should have a "city" and "state" attribute. The "city" attribute maps to Neighborhood.label and "state" maps to Neighborhood.state_abbrev. If the "city" and "state" of an uploaded feature matches an existing neighborhood, the existing one will be used and its geom updated with the one in the upload.
def create(self, request, *args, **kwargs): file_obj = request.data['file'] max_trip_distance = request.data.get('max_trip_distance') client = boto3.client('s3', config=BotocoreClientConfig(signature_version='s3v4')) organization = request.user.organization file_name = '{}.zip'.format(str(uuid4())) key = get_batch_shapefile_upload_path(organization.name, file_name).lstrip('/') response = client.upload_fileobj(file_obj, settings.AWS_STORAGE_BUCKET_NAME, key) print(response) url = client.generate_presigned_url( ClientMethod='get_object', Params={'Bucket': settings.AWS_STORAGE_BUCKET_NAME, 'Key': key} ) async_task('pfb_analysis.tasks.create_batch_from_remote_shapefile', url, max_trip_distance=max_trip_distance, group='create_analysis_batch', ack_failure=True) return Response({ 'shapefile_url': url, 'status': 'STARTED' }, status=status.HTTP_200_OK)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def post(self, request, *args, **kwargs):\n form_class = self.get_form_class()\n form = self.get_form(form_class)\n gw_location_file = request.FILES.get('gw_location_file')\n gw_level_file = request.FILES.get('gw_level_file')\n\n if form.is_valid():\n if gw_location_file:\n gw_location_file.seek(0)\n if str(gw_location_file).split('.')[-1] == \"xls\":\n sheet = xls_get(gw_location_file, column_limit=4)\n elif str(gw_location_file).split('.')[-1] == \"xlsx\":\n sheet = xlsx_get(gw_location_file, column_limit=4)\n sheetname = next(iter(sheet))\n records = sheet[sheetname]\n for record in records:\n if record[0].lower() == 'id well':\n continue\n\n point = Point(x=record[3], y=record[2], srid=4326)\n well = GWWell.objects.create(\n gwwellname=record[0],\n gwwelllocation=point,\n gwwelltotallength=record[1]\n )\n\n if gw_level_file:\n gw_level_file.seek(0)\n if str(gw_level_file).split('.')[-1] == \"xls\":\n sheet = xls_get(gw_level_file, column_limit=4)\n elif str(gw_level_file).split('.')[-1] == \"xlsx\":\n sheet = xlsx_get(gw_level_file, column_limit=4)\n sheetname = next(iter(sheet))\n records = sheet[sheetname]\n for record in records:\n if record[0].lower == 'time':\n continue\n\n try:\n well = GWWell.objects.get(gwwellname=record[3])\n time = dateparse.parse_datetime(record[0])\n well_level_log = GWGeologyLog.objects.create(\n phenomenonTime=time,\n resultTime=time,\n gw_level=record[2],\n reference=record[1]\n )\n well.gwwellgeology.add(well_level_log)\n except GWWell.DoesNotExist:\n pass\n pass\n return self.form_valid(form)\n\n else:\n return self.form_invalid(form)", "def upload(state, name, wid, file):\n client = state.api_client\n\n # Get the workspace details\n w_details = helpers.workspace.details(client, wid, name)\n if w_details is None:\n # Can only happen when the name is used and there are no results. Not\n # with the wid option because it would raise a 404 QuetzalAPIException\n raise click.ClickException(f'Workspace named \"{name}\" does not exist.')\n\n file_details = helpers.workspace.upload(client, w_details.id, file)\n click.secho(f'File {file.name} uploaded successfully. Its id is {file_details.id}.',\n fg='green')", "def execute(self, parameters, messages):\r\n\t\tin_wikiplace_IRI = parameters[0]\r\n\t\tin_stat_fields = parameters[1]\r\n\t\t# out_location = parameters[2]\r\n\t\t# out_points_name = parameters[3]\r\n\r\n\t\t\r\n\t\tif in_wikiplace_IRI.value:\r\n\t\t\tinputFeatureClassName = in_wikiplace_IRI.valueAsText\r\n\t\t\t# outLocation = out_location.valueAsText\r\n\t\t\t# outFeatureClassName = out_points_name.valueAsText\r\n\t\t\tfieldMergeRuleTest = in_stat_fields.valueAsText\r\n\r\n\t\t\t# messages.addErrorMessage(\"in_stat_fields.values: {0}\".format(in_stat_fields.values))\r\n\t\t\t# messages.addErrorMessage(\"MergeNoFunctionalProperty.relatedTableFieldList: {0}\".format(MergeNoFunctionalProperty.relatedTableFieldList))\r\n\r\n\t\t\t\r\n\r\n\t\t\t\r\n\t\t\t# fieldmappings = in_field_mapping.valueAsText\r\n\r\n\t\t\tlastIndexOFGDB = inputFeatureClassName.rfind(\"\\\\\")\r\n\t\t\tcurrentWorkspace = inputFeatureClassName[:lastIndexOFGDB]\r\n\r\n\t\t\tif currentWorkspace.endswith(\".gdb\") == False:\r\n\t\t\t\tmessages.addErrorMessage(\"Please enter a feature class in file geodatabase for the input feature class.\")\r\n\t\t\t\traise arcpy.ExecuteError\r\n\t\t\telse:\r\n\t\t\t\t# if in_related_table.value:\r\n\t\t\t\tarcpy.env.workspace = currentWorkspace\r\n\t\t\t\t# relatedTableList = UTIL.getRelatedTableFromFeatureClass(inputFeatureClassName)\r\n\t\t\t\t# fieldmappings = arcpy.FieldMappings()\r\n\t\t\t\t# fieldmappings.addTable(inputFeatureClassName)\r\n\t\t\t\t# for relatedTable in relatedTableList:\r\n\t\t\t\t# \tfieldmappings.addTable(relatedTable)\r\n\t\t\t\t# \tfieldList = arcpy.ListFields(relatedTable)\r\n\t\t\t\t# \tfieldName = fieldList[len(fieldList)-1].name\r\n\t\t\t\t# \tarcpy.AddMessage(\"fieldName: {0}\".format(fieldName))\r\n\r\n\r\n\t\t\t\t# fieldmappings.removeFieldMap(fieldmappings.findFieldMapIndex(\"wikiURL\"))\r\n\r\n\t\t\t\t# arcpy.AddMessage(\"fieldmappings: {0}\".format(fieldmappings))\r\n\t\t\t\t# if out_location.value and out_points_name.value:\r\n\t\t\t\t# \tarcpy.FeatureClassToFeatureClass_conversion(inputFeatureClassName, outLocation, outFeatureClassName, \"\", fieldmappings)\r\n\r\n\t\t\t\t# get the ValueTable(fieldName, merge rule, related table full path) \r\n\t\t\t\tfieldMergeRuleFileNameList = []\r\n\r\n\t\t\t\tif fieldMergeRuleTest:\r\n\t\t\t\t\tfieldSplitList = fieldMergeRuleTest.split(\";\")\r\n\t\t\t\t\tfor fieldSplitItem in fieldSplitList:\r\n\t\t\t\t\t\tfieldMergeList = fieldSplitItem.split(\" \", 2)\r\n\t\t\t\t\t\tfieldMergeRuleFileNameList.append(fieldMergeList)\r\n\r\n\t\t\t\tarcpy.AddMessage(\"fieldMergeRuleFileNameList: {0}\".format(fieldMergeRuleFileNameList))\r\n\r\n\t\t\t\tfor fieldMergeRuleFileNameItem in fieldMergeRuleFileNameList:\r\n\t\t\t\t\tappendFieldName = fieldMergeRuleFileNameItem[0]\r\n\t\t\t\t\tmergeRule = fieldMergeRuleFileNameItem[1]\r\n\t\t\t\t\trelatedTableName = fieldMergeRuleFileNameItem[2].replace(\"'\", \"\")\r\n\r\n\t\t\t\t\tnoFunctionalPropertyDict = UTIL.buildMultiValueDictFromNoFunctionalProperty(appendFieldName, relatedTableName)\r\n\t\t\t\t\tif noFunctionalPropertyDict != -1:\r\n\t\t\t\t\t\tUTIL.appendFieldInFeatureClassByMergeRule(inputFeatureClassName, noFunctionalPropertyDict, appendFieldName, relatedTableName, mergeRule)\r\n\r\n\t\t\t\t# UTIL.buildMultiValueDictFromNoFunctionalProperty(fieldName, tableName)\r\n\t\t\t\t# UTIL.appendFieldInFeatureClassByMergeRule(inputFeatureClassName, noFunctionalPropertyDict, appendFieldName, relatedTableName, mergeRule)\r\n\r\n\t\treturn", "def post(self, request):\n # GET REQUEST DATA\n fid = request.POST.get('fid', False)\n uuid = request.POST.get('uuid', False)\n title_text = request.POST.get('title', False)\n body = request.POST.get('body', False)\n photo = request.FILES.get('photo', False) # FOR STORAGE\n wfsxml = request.POST.get('wfsxml', False) # FOR GEOSERVER\n data = {\n 'uuid': uuid,\n 'title_text': title_text,\n 'body': body,\n 'wfsxml': wfsxml\n }\n # VALIDATE FORM\n form = GeoPostForm(data, request.FILES)\n logger.info(\"\\ninstantiate Geopost form\\n\")\n # IF FORM VALIDATION ERROR\n if not form.is_valid():\n return server_error(request.body)\n #context = self.getContext(form)\n #return render(request, 'geopost/entry.html', context)\n else:\n pass\n # GET CLEAN VALUES\n uuid = form.cleaned_data['uuid']\n wfsxml = form.cleaned_data['wfsxml']\n # UPLOAD PHOTO TO BUCKET\n # if editing existing entry, first delete existing photo\n if fid:\n delete_from_bucket(uuid, self.imageBucket)\n else:\n pass\n photo.open('rb')\n error = upload_to_bucket(\n photo, self.imageBucket, photo.content_type, uuid)\n photo.close()\n # IF ERROR UPLOADING IMAGE\n if error:\n return server_error(error)\n else:\n pass\n # MAKE GEOSERVER WFS TRANSACTION\n error = post_to_geoserver(wfsxml, self.wfsURL)\n # ALL GOOD\n if not error:\n return HttpResponseRedirect(reverse('geopost_home'))\n # IF WFS TRANSACTION ERROR\n else:\n delete_from_bucket(uuid, self.imageBucket)\n return server_error(error)", "def create(self, request):\n if len(request.FILES) == 0:\n return JsonResponse({\n 'success': False,\n 'message': 'Must pass file in as a Multipart/Form post'\n })\n\n the_file = request.data['file']\n file_type = BuildingFile.str_to_file_type(request.data.get('file_type', 'Unknown'))\n\n organization_id = self.get_organization(self.request)\n cycle = request.query_params.get('cycle_id', None)\n\n if not cycle:\n return JsonResponse({\n 'success': False,\n 'message': 'Cycle ID is not defined'\n })\n else:\n cycle = Cycle.objects.get(pk=cycle)\n\n # figure out if file is xml or zip\n the_filename = the_file._get_name()\n tmp_filename, file_extension = os.path.splitext(the_filename)\n # initialize\n p_status = True\n property_state = True\n messages = {'errors': [], 'warnings': []}\n\n if file_extension == '.zip':\n # ZIP FILE, extract and process files one by one\n # print(\"This file is a ZIP\")\n\n with zipfile.ZipFile(the_file, \"r\", zipfile.ZIP_STORED) as openzip:\n filelist = openzip.infolist()\n for f in filelist:\n # print(\"FILE: {}\".format(f.filename))\n # process xml files\n if '.xml' in f.filename and '__MACOSX' not in f.filename:\n # print(\"PROCESSING file: {}\".format(f.filename))\n with NamedTemporaryFile() as data_file:\n data_file.write(openzip.read(f))\n data_file.seek(0)\n size = os.path.getsize(data_file.name)\n content_type = 'text/xml'\n\n a_file = InMemoryUploadedFile(\n data_file, 'data_file', f.filename, content_type,\n size, charset=None)\n\n building_file = BuildingFile.objects.create(\n file=a_file,\n filename=f.filename,\n file_type=file_type,\n )\n\n p_status_tmp, property_state_tmp, property_view, messages_tmp = building_file.process(organization_id, cycle)\n\n # append errors to overall messages\n for i in messages_tmp['errors']:\n messages['errors'].append(f.filename + \": \" + i)\n for i in messages_tmp['warnings']:\n messages['warnings'].append(f.filename + \": \" + i)\n\n if not p_status_tmp:\n # capture error\n p_status = p_status_tmp\n else:\n # capture a real property_state (not None)\n property_state = property_state_tmp\n\n else:\n # just an XML\n building_file = BuildingFile.objects.create(\n file=the_file,\n filename=the_file.name,\n file_type=file_type,\n )\n\n p_status, property_state, property_view, messages = building_file.process(organization_id, cycle)\n\n if p_status and property_state:\n if len(messages['warnings']) > 0:\n return JsonResponse({\n 'success': True,\n 'status': 'success',\n 'message': {'warnings': messages['warnings']},\n 'data': {\n 'property_view': PropertyViewAsStateSerializer(property_view).data,\n # 'property_state': PropertyStateWritableSerializer(property_state).data,\n },\n })\n else:\n return JsonResponse({\n 'success': True,\n 'status': 'success',\n 'message': {'warnings': []},\n 'data': {\n 'property_view': PropertyViewAsStateSerializer(property_view).data,\n # 'property_state': PropertyStateWritableSerializer(property_state).data,\n },\n })\n else:\n return JsonResponse({\n 'success': False,\n 'status': 'error',\n 'message': messages\n }, status=status.HTTP_400_BAD_REQUEST)", "def post_file(self, file_, api=None):\n api = api or self.api\n url = utils.join_url(self.path)\n files = {'data': file_}\n new_attributes = api.post(url, {}, {}, files)\n # self.error = None\n self.merge(new_attributes)\n return self.success()", "def import_shapefile(self, shapefile, schema):\n logger.debug(\"Importing shapefile {}\".format(shapefile))\n layer = DataSource(shapefile)[0]\n for feature in layer:\n fields = schema.from_feature(feature)\n Region.objects.create(**fields)", "def _upload_to_gcs(self, file_to_upload):\n hook = GCSHook(\n gcp_conn_id=self.gcp_conn_id,\n impersonation_chain=self.impersonation_chain,\n )\n is_data_file = file_to_upload.get(\"file_name\") != self.schema_filename\n metadata = None\n if is_data_file and self.upload_metadata:\n metadata = {\"row_count\": file_to_upload[\"file_row_count\"]}\n\n object_name = file_to_upload.get(\"file_name\")\n if is_data_file and self.partition_columns:\n # Add partition column values to object_name\n partition_values = file_to_upload.get(\"partition_values\")\n head_path, tail_path = os.path.split(object_name)\n partition_subprefix = [\n f\"{col}={val}\" for col, val in zip(self.partition_columns, partition_values)\n ]\n object_name = os.path.join(head_path, *partition_subprefix, tail_path)\n\n hook.upload(\n self.bucket,\n object_name,\n file_to_upload.get(\"file_handle\").name,\n mime_type=file_to_upload.get(\"file_mime_type\"),\n gzip=self.gzip if is_data_file else False,\n metadata=metadata,\n )", "def upload_file(self, f):\n return self._telegraph.upload_file(f)", "def post(self):\n args = parser.parse(self.arg_schema_post, request, location='json_or_form')\n data_file = request.files['model_weights']\n if not data_file:\n raise FileError(\"Missing upload file.\")\n file_path = self.save_weight_file_locally(data_file, args['clinic_id'], args['severity'])\n model_id = self.save_model_file_path_to_db(file_path, args['clinic_id'], args['severity'], args['accuracy'], False)\n if 'make_in_use' in args and args['make_in_use']:\n Models().set_active_model(args['clinic_id'], model_id)", "def upload_shape(shapepath):\n\n conn = None\n cur = None\n\n try:\n # first create the sqlstring with inserts\n # call PGSQL2SHP with some parameters, -s 4326 to set lat/lon srid, -I to create a spatial index on the geometry column\n params = [settings.SHP2PGSQL, \"-s\", \"4326\", \"-I\", shapepath, settings.STATES_TABLE_NAME]\n sqlstring,info = utils.run_tool(params)\n if not sqlstring:\n raise Exception(\"cannot upload file to database\")\n\n #then use the sqlstring\n conn = utils.pgconnect(**settings.DEFAULT_CONNECTION)\n cur = conn.cursor()\n cur.execute(sqlstring)\n conn.commit()\n\n finally:\n if cur:\n cur.close()\n if conn:\n conn.close()", "def do_POST(self):\n if not self.path.endswith(\"/\"): self.path += \"/\"\n if self.path == \"/annotate/\":\n # Read message\n length = int(self.headers.get('content-length'))\n msg = self.rfile.read(length)\n\n # Do the annotation\n doc = Document()\n parseFromDelimitedString(doc, msg)\n self.annotator.annotate(doc)\n\n with io.BytesIO() as stream:\n writeToDelimitedString(doc, stream)\n msg = stream.getvalue()\n\n # write message\n self.send_response(HTTPStatus.OK)\n self.send_header(\"Content-Type\", \"application/x-protobuf\")\n self.send_header(\"Content-Length\", len(msg))\n self.end_headers()\n self.wfile.write(msg)\n\n else:\n self.send_response(HTTPStatus.BAD_REQUEST)\n self.end_headers()", "def upload_training_file(self):\n\n file_path = os.getcwd() + \"/\" + self.console_label.training_file_name\n\n with open(file_path, 'r') as f:\n r = requests.post(self.upload_url, files={'file': f})\n\n if r.status_code != requests.codes.ok:\n messagebox.showerror(\"Error\", \"The training file could not be uploaded!\")", "def submit_file(self, project_id, filename, chunk_size=30, row_offset=0, drop_props=['project_id']):\n # Read the file in as a pandas DataFrame\n f = os.path.basename(filename)\n if f.lower().endswith(\".csv\"):\n df = pd.read_csv(filename, header=0, sep=\",\", dtype=str).fillna(\"\")\n elif f.lower().endswith(\".xlsx\"):\n xl = pd.ExcelFile(filename, dtype=str) # load excel file\n sheet = xl.sheet_names[0] # sheetname\n df = xl.parse(sheet) # save sheet as dataframe\n converters = {\n col: str for col in list(df)\n } # make sure int isn't converted to float\n df = pd.read_excel(filename, converters=converters).fillna(\"\") # remove nan\n elif filename.lower().endswith((\".tsv\", \".txt\")):\n df = pd.read_csv(filename, header=0, sep=\"\\t\", dtype=str).fillna(\"\")\n else:\n raise Gen3Error(\"Please upload a file in CSV, TSV, or XLSX format.\")\n df.rename(\n columns={c: c.lstrip(\"*\") for c in df.columns}, inplace=True\n ) # remove any leading asterisks in the DataFrame column names\n\n # Check uniqueness of submitter_ids:\n if len(list(df.submitter_id)) != len(list(df.submitter_id.unique())):\n raise Gen3Error(\n \"Warning: file contains duplicate submitter_ids. \\nNote: submitter_ids must be unique within a node!\"\n )\n\n if drop_props is not None:\n if isinstance(drop_props,str):\n drop_props = [drop_props]\n elif isinstance(drop_props,list):\n for prop in drop_props:\n if prop in df:\n df.drop(columns=[prop],inplace=True)\n else:\n print(\"\\n\\n\\tSubmit drop_props argument as a list of properties, e.g.,: drop_props=['id'].\\n\\n\")\n\n # Chunk the file\n print(\"\\nSubmitting {} with {} records.\".format(filename, str(len(df))))\n program, project = project_id.split(\"-\", 1)\n api_url = \"{}/api/v0/submission/{}/{}\".format(self._endpoint, program, project)\n headers = {\"content-type\": \"text/tab-separated-values\"}\n\n start = row_offset\n end = row_offset + chunk_size\n chunk = df[start:end]\n\n count = 0\n\n results = {\n \"invalid\": {}, # these are invalid records\n \"other\": [], # any unhandled API responses\n \"details\": [], # entire API response details\n \"succeeded\": [], # list of submitter_ids that were successfully updated/created\n \"responses\": [], # list of API response codes\n }\n\n # Start the chunking loop:\n while (start + len(chunk)) <= len(df):\n\n timeout = False\n valid_but_failed = []\n invalid = []\n count += 1\n print(\n \"Chunk {} (chunk size: {}, submitted: {} of {})\".format(\n str(count),\n str(chunk_size),\n str(len(results[\"succeeded\"]) + len(results[\"invalid\"])),\n str(len(df)),\n )\n )\n\n try:\n response = requests.put(\n api_url,\n auth=self._auth_provider,\n data=chunk.to_csv(sep=\"\\t\", index=False),\n headers=headers,\n ).text\n except requests.exceptions.ConnectionError as e:\n results[\"details\"].append(e.message)\n\n # Handle the API response\n if (\n \"Request Timeout\" in response\n or \"413 Request Entity Too Large\" in response\n or \"Connection aborted.\" in response\n or \"service failure - try again later\" in response\n ): # time-out, response != valid JSON at the moment\n\n print(\"\\t Reducing Chunk Size: {}\".format(response))\n results[\"responses\"].append(\"Reducing Chunk Size: {}\".format(response))\n timeout = True\n\n else:\n try:\n json_res = json.loads(response)\n except JSONDecodeError as e:\n print(response)\n print(str(e))\n raise Gen3Error(\"Unable to parse API response as JSON!\")\n\n if \"message\" in json_res and \"code\" not in json_res:\n print(json_res) # trouble-shooting\n print(\n \"\\t No code in the API response for Chunk {}: {}\".format(\n str(count), json_res.get(\"message\")\n )\n )\n print(\"\\t {}\".format(str(json_res.get(\"transactional_errors\"))))\n results[\"responses\"].append(\n \"Error Chunk {}: {}\".format(str(count), json_res.get(\"message\"))\n )\n results[\"other\"].append(json_res.get(\"message\"))\n\n elif \"code\" not in json_res:\n print(\"\\t Unhandled API-response: {}\".format(response))\n results[\"responses\"].append(\n \"Unhandled API response: {}\".format(response)\n )\n\n elif json_res[\"code\"] == 200: # success\n\n entities = json_res.get(\"entities\", [])\n print(\"\\t Succeeded: {} entities.\".format(str(len(entities))))\n results[\"responses\"].append(\n \"Chunk {} Succeeded: {} entities.\".format(\n str(count), str(len(entities))\n )\n )\n\n for entity in entities:\n sid = entity[\"unique_keys\"][0][\"submitter_id\"]\n results[\"succeeded\"].append(sid)\n\n elif (\n json_res[\"code\"] == 400\n or json_res[\"code\"] == 403\n or json_res[\"code\"] == 404\n ): # failure\n\n entities = json_res.get(\"entities\", [])\n print(\"\\tChunk Failed: {} entities.\".format(str(len(entities))))\n results[\"responses\"].append(\n \"Chunk {} Failed: {} entities.\".format(\n str(count), str(len(entities))\n )\n )\n\n message = \"\"\n for entity in entities:\n sid = entity[\"unique_keys\"][0][\"submitter_id\"]\n if entity[\"valid\"]: # valid but failed\n valid_but_failed.append(sid)\n else: # invalid and failed\n message = str(entity[\"errors\"])\n results[\"invalid\"][sid] = message\n invalid.append(sid)\n print(\n \"\\tInvalid records in this chunk: {}, {}\".format(\n len(invalid), message\n )\n )\n\n elif json_res[\"code\"] == 500: # internal server error\n\n print(\"\\t Internal Server Error: {}\".format(response))\n results[\"responses\"].append(\n \"Internal Server Error: {}\".format(response)\n )\n\n if (\n len(valid_but_failed) > 0 and len(invalid) > 0\n ): # if valid entities failed bc grouped with invalid, retry submission\n chunk = chunk.loc[\n df[\"submitter_id\"].isin(valid_but_failed)\n ] # these are records that weren't successful because they were part of a chunk that failed, but are valid and can be resubmitted without changes\n print(\n \"Retrying submission of valid entities from failed chunk: {} valid entities.\".format(\n str(len(chunk))\n )\n )\n\n elif (\n len(valid_but_failed) > 0 and len(invalid) == 0\n ): # if all entities are valid but submission still failed, probably due to duplicate submitter_ids. Can remove this section once the API response is fixed: https://ctds-planx.atlassian.net/browse/PXP-3065\n # raise Gen3Error(\n # \"Please check your data for correct file encoding, special characters, or duplicate submitter_ids or ids.\"\n # )\n print(\"\\tUnhandled API response. Adding chunk to 'other' in results. Check for special characters or malformed links or property values.\")\n results[\"other\"].append(chunk)\n start += chunk_size\n end = start + chunk_size\n chunk = df[start:end]\n\n elif timeout == False: # get new chunk if didn't timeout\n start += chunk_size\n end = start + chunk_size\n chunk = df[start:end]\n\n else: # if timeout, reduce chunk size and retry smaller chunk\n if chunk_size >= 2:\n chunk_size = int(chunk_size / 2)\n end = start + chunk_size\n chunk = df[start:end]\n print(\n \"Retrying Chunk with reduced chunk_size: {}\".format(\n str(chunk_size)\n )\n )\n timeout = False\n else:\n raise Gen3SubmissionError(\n \"Submission is timing out. Please contact the Helpdesk.\"\n )\n\n print(\"Finished data submission.\")\n print(\"Successful records: {}\".format(str(len(set(results[\"succeeded\"])))))\n print(\"Failed invalid records: {}\".format(str(len(results[\"invalid\"]))))\n\n return results", "def post(self, request, work_batch_id):\n\n from sentry.models.workbatch import WorkBatch\n\n try:\n work_batch = WorkBatch.objects.get(pk=int(work_batch_id))\n except WorkBatch.DoesNotExist:\n raise ResourceDoesNotExist\n\n logger = logging.getLogger('clims.files')\n logger.info('workbatchfile.start')\n\n if 'file' not in request.data:\n return Response({'detail': 'Missing uploaded file'}, status=400)\n\n fileobj = request.data['file']\n\n full_name = request.data.get('name', fileobj.name)\n if not full_name or full_name == 'file':\n return Response({'detail': 'File name must be specified'}, status=400)\n\n name = full_name.rsplit('/', 1)[-1]\n\n if _filename_re.search(name):\n return Response(\n {\n 'detail': 'File name must not contain special whitespace characters'\n }, status=400\n )\n\n headers = {\n 'Content-Type': fileobj.content_type,\n }\n for headerval in request.data.getlist('header') or ():\n try:\n k, v = headerval.split(':', 1)\n except ValueError:\n return Response({'detail': 'header value was not formatted correctly'}, status=400)\n else:\n if _filename_re.search(v):\n return Response(\n {\n 'detail': 'header value must not contain special whitespace characters'\n },\n status=400\n )\n headers[k] = v.strip()\n\n file = File.objects.create(\n name=name,\n type='work_batch.file',\n headers=headers,\n )\n file.putfile(fileobj, logger=logger)\n\n try:\n with transaction.atomic():\n # TODO: Remove the organization id from the user task file\n work_batch_file = WorkBatchFile.objects.create(\n organization_id=work_batch.organization_id,\n file=file,\n name=full_name,\n work_batch_id=work_batch.id\n )\n except IOError:\n file.delete()\n return Response({'detail': ERR_FILE_EXISTS}, status=409)\n\n return Response(serialize(work_batch_file, request.user), status=201)", "def submit(request, session, **kwargs):\n\n from ..models import (\n FacilityTransaction,\n Allocation,\n FollowupRequest,\n Instrument,\n )\n\n instrument = (\n Instrument.query_records_accessible_by(request.requester)\n .join(Allocation)\n .join(FollowupRequest)\n .filter(FollowupRequest.id == request.id)\n .first()\n )\n\n name = request.obj.tns_name\n if name is None:\n request.status = 'No TNS name'\n else:\n try:\n lc = Table.read(\n f\"{lightcurve_url}/lc_{name}_cleaned\",\n format='ascii',\n header_start=1,\n )\n\n if 'BTJD' not in list(lc.columns):\n request.status = f\"TESS alert {name} could not be ingested: {lightcurve_url}/lc_{name}_cleaned\"\n else:\n IOLoop.current().run_in_executor(\n None,\n lambda: commit_photometry(\n lc, request.id, instrument.id, request.requester.id\n ),\n )\n\n except FileNotFoundError:\n request.status = f\"TESS alert {name} not found.\"\n except Exception:\n request.status = f\"TESS alert {name} could not be ingested: {lightcurve_url}/lc_{name}_cleaned\"\n\n transaction = FacilityTransaction(\n request=None,\n response=None,\n followup_request=request,\n initiator_id=request.last_modified_by_id,\n )\n\n session.add(transaction)", "def webhook_upload(user, application, complete_path, init_es, tool, scan_name, user_host, to_name,hook_log):\n hook_log = WebhookLog.objects.get(id=hook_log)\n hook_log.file_upload_event = True\n hook_log.file_upload_datetime = timezone.now()\n hook_log.save()\n process_files(user, application, complete_path, init_es, tool, scan_name, user_host, to_name,hook_log=hook_log)\n info_debug_log(event='Webhook upload',status='success')", "def send_job(file_location, url, username, password, extract):\n # Sample discovery job.\n data = {\"path\": file_location,\n \"action\": \"ADD\",\n \"entry\": {\"fields\": {\"__to_extract\":extract, \"name\": os.path.basename(file_location)}}}\n\n # Build the request and post.\n try:\n passman = urllib2.HTTPPasswordMgrWithDefaultRealm()\n passman.add_password(None, url, username, password)\n urllib2.install_opener(urllib2.build_opener(urllib2.HTTPBasicAuthHandler(passman)))\n request = urllib2.Request(url, json.dumps(data), headers={'Content-type': 'application/json'})\n response = urllib2.urlopen(request)\n if response.code == 200:\n print('Sent {0} for indexing...'.format(file_location))\n else:\n print ('Error sending {0}: {1}'.format(file_location, response.code))\n except urllib2.HTTPError as http_error:\n print(http_error)\n except urllib2.URLError as url_error:\n print(url_error)", "def upload_file(upload_url, upload_fields, filepath, callback=None):\n upload_fields = list(upload_fields.items())\n upload_fields.append(\n (\"file\", (os.path.basename(filepath), click.open_file(filepath, \"rb\")))\n )\n encoder = MultipartEncoder(upload_fields)\n monitor = MultipartEncoderMonitor(encoder, callback=callback)\n\n config = cloudsmith_api.Configuration()\n if config.proxy:\n proxies = {\"http\": config.proxy, \"https\": config.proxy}\n else:\n proxies = None\n\n headers = {\"content-type\": monitor.content_type}\n\n client = get_files_api()\n headers[\"user-agent\"] = client.api_client.user_agent\n\n session = create_requests_session()\n resp = session.post(upload_url, data=monitor, headers=headers, proxies=proxies)\n\n try:\n resp.raise_for_status()\n except requests.RequestException as exc:\n raise ApiException(\n resp.status_code, headers=exc.response.headers, body=exc.response.content\n )", "def upload():\n uploaded_file = request.files.get('file')\n\n if not uploaded_file:\n return 'No file uploaded.', 400\n\n # Create a Cloud Storage client.\n gcs = storage.Client()\n\n # Get the bucket that the file will be uploaded to.\n bucket = gcs.get_bucket('foodie_helper_bucket_1')\n #app.config['CLOUD_STORAGE_BUCKET']\n # Create a new blob and upload the file's content.\n blob = bucket.blob(uploaded_file.filename)\n\n blob.upload_from_string(\n uploaded_file.read(),\n content_type=uploaded_file.content_type\n )\n\n # The public URL can be used to directly access the uploaded file via HTTP.\n result = runImage(blob.public_url)\n machineResult = getConcept(result)\n return render_template('results.html', url=blob.public_url, machineResult=machineResult)\n #return render_template('results.html', url=\"https://www.foodiesfeed.com/wp-content/uploads/2019/02/pizza-ready-for-baking.jpg\", machineResult=\"Pizza\")", "def process_file():\n global distances_between_cities\n global number_of_cities\n global unvisited_cities\n\n text_file = open(sys.argv[1].strip('\\r'))\n distances_between_cities = [[int(i) for i in line.strip(\"\\r\\n\").split()[1:]] for line in text_file.readlines()[1:]]\n number_of_cities = len(distances_between_cities)\n\n # set the initial conditions of the problem (you have already visited madrid)\n unvisited_cities = range(number_of_cities)\n visit_city(0)", "def _push_one(self, f, **kwargs):\n\n # Copy the metadata for modifying and open the ann file\n meta = kwargs.copy()\n desc = read_InSar_annotation(f)\n\n # Expand the path for the geotiffs\n tiff_dir = abspath(expanduser(self.geotiff_dir))\n\n # form the pattern to look for and grab the tifs\n pattern = '.'.join(basename(f).split('.')[0:-1]) + '*.tif'\n rasters = glob.glob(join(tiff_dir, pattern))\n\n # Submit each geotif, modifying meta on the fly\n for r in rasters:\n # Grab information from the filename\n f_pieces = r.split('.')\n component = f_pieces[-2] # Real or imaginary component\n data_abbr = f_pieces[-3] # Key to the data name\n dname = self.dname_map[data_abbr] # Data type in db\n\n # For the data type\n meta['type'] = 'insar ' + dname.split(' ')[0]\n\n if dname == 'interferogram':\n meta['type'] += (' ' + component)\n\n # Assign the date for the respective flights\n if 'amplitude' in dname:\n meta['date'] = desc['start time of acquisition for pass {}'.format(\n dname.split(' ')[-1])]['value']\n\n # Derived products always receive the date of the last overpass\n else:\n meta['date'] = desc['start time of acquisition for pass 2']['value']\n\n # Assign only the date not the date and time\n meta['date'] = meta['date'].date()\n\n # Assign units\n meta['units'] = desc['{} units'.format(\n dname.split(' ')[0])]['value']\n\n # Flexibly form a comment for each of the products for dates\n comment = get_InSar_flight_comment(dname, desc)\n # add which dem was used which dictates the file name convert e.g.\n # ...VV_01.int.grd\n comment += ', DEM used = {}'.format(\n desc['dem used in processing']['value'])\n # Add the polarization to the the comments\n comment += ', Polarization = {}'.format(\n desc['polarization']['value'])\n meta['description'] = comment\n\n self.log.info('Uploading {} as {}...'.format(r, meta['type']))\n\n d = self.UploaderClass(r, **meta)\n\n # Submit the data to the database\n d.submit(self.session)\n\n # Uploaded set\n self.uploaded += 1", "def upload_file(file_name, file_id, model, workspace, header_put):\n filename = \"data_files/{}\".format(file_name)\n data_file = open(filename, \"r\")\n data = data_file.read()\n data_file.close()\n\n uri = (\"https://api.anaplan.com/1/3/workspaces/{}/models/{}/\"\n \"files/{}/chunks/0\").format(workspace, model, file_id)\n response = requests.put(uri, headers = header_put, data = data)\n return response.status_code", "def __init__(self, file):\n # default params\n self.lonlat = None\n self.name = None\n self.map_zoom_level = None\n \n self.tags = []\n self.buildings = []\n \n # osm-based geometry is based on explicit lat, lon coordinates,\n # while geometry rendered from 3D creation software(i.e. blender) is based on relative [x, y, z] coordinates\n # and baseline longitude, latutide coordiantes as scenario properties.\n \n if 'name' in file:\n self.name = file['name']\n \n if 'lon' in file and 'lat' in file:\n self.lonlat = [float(file['lon']), float(file['lat'])]\n\n if 'properties' in file:\n if 'mapZoomLevel' in file['properties']:\n self.map_zoom_level = int(file['properties']['mapZoomLevel'])\n \n if 'geometry' in file:\n if 'features' in file['geometry']:\n self.buildings = [Building(feature) for feature in file['geometry']['features']]", "def upload_population_by_race(url, gcs_bucket, filename):\n url_params = get_census_params_by_county(\n get_population_by_race_columns().keys())\n url_file_to_gcs(url, url_params, gcs_bucket, filename)", "def saveUploadedTopology(self, file):\r\n filename = str(file)\r\n with open(os.path.join(main.settings.TOPOLOGY_DIR, filename), 'wb+') as destination:\r\n for chunk in file.chunks():\r\n destination.write(chunk)", "def execute(self, parameters, messages):\n\n try:\n MapDirString = parameters[0].valueAsText\n BoundaryFeatureClass = parameters[1].value\n\n cursor = arcpy.da.SearchCursor(BoundaryFeatureClass, ['SHAPE@'])\n FilesWithinBoundary = []\n FilesOutsideBoundary = []\n\n\n for file in os.listdir(MapDirString): #Loop through each file in the folder\n if file.upper().endswith(\".TIF\"): #Only process *.tif files\n FileExtent = arcpy.Describe(MapDirString + \"\\\\\" + file).extent #Get the Extent object describing the extent of the tif file\n\n #Create an array with the corners of the footprint to be checked\n FootprintCorners = arcpy.Array([FileExtent.upperLeft, FileExtent.upperRight, FileExtent.lowerRight, FileExtent.lowerLeft])\n\n #Create the footprint polygon to be checked\n FootPrint = arcpy.Polygon(FootprintCorners)\n\n FootPrintOverlapsBoundary = True #Initialize the result variable\n for row in cursor: #Iterate each polygon in the Boundary Feature Class\n #Found a match if the features overlap, contains or is within eachother\n if row[0].overlaps(FootPrint) or row[0].contains(FootPrint) or row[0].within(FootPrint):\n break\n else:\n #The break never happened. The tif is outside the Boundary of the polygon.\n FootPrintOverlapsBoundary = False\n\n #Reset the cursor iterating Boundary features to the first record.\n cursor.reset()\n\n #Print the result\n if FootPrintOverlapsBoundary:\n FilesWithinBoundary.append(MapDirString + \"\\\\\" + file)\n else:\n FilesOutsideBoundary.append(MapDirString + \"\\\\\" + file)\n\n #Clean up.\n del cursor\n\n\n\n #Errorhandling\n #-------------------------\n except IOError as e:\n print(\"I/O error({0}): {1}\".format(e.errno, e.strerror))\n\n except:\n print(\"Unexpected error:\", sys.exc_info()[0])\n raise\n\n\n parameters[2].value = FilesWithinBoundary\n return", "async def create_upload_wrist(background_tasks: BackgroundTasks, file: UploadFile = File(...), db: Session = Depends(get_db)):\n background_tasks.add_task(process_single_wrist, file)\n return {\"status\": \"success\"}", "def _parse_and_store_geojson(filename):\n ds = DataSource(filename)\n _sanity_check_datasource(ds)\n\n logger.info('Data file %s was opened', ds.name)\n lm = LayerMapping(WegStuk, ds, MAPPING)\n\n with transaction.atomic():\n WegStuk.objects.all().delete()\n lm.save(strict=True, verbose=False)\n\n logger.info('Travel time dataset was updated.')", "def new_workflow(self, upload_file, name=\"\", description=\"\", submit=None):\n data = upload_file.file.read()\n if not name:\n name = upload_file.filename.replace(\".xml\", \"\")\n workflow = Workflow(name=name, description=description,\n data=data,\n created_by=identity.current.user.id)\n log.info(\"Saved new workflow %d\", workflow.id)\n raise redirect(\"/workflow/%d\" % workflow.id)" ]
[ "0.53217494", "0.5287444", "0.52646697", "0.5090431", "0.49605402", "0.49185672", "0.49150816", "0.48853952", "0.48199537", "0.48070318", "0.47955486", "0.4781198", "0.4763215", "0.47575787", "0.47164455", "0.47102332", "0.4707916", "0.47077137", "0.46862623", "0.46841776", "0.46682516", "0.46546477", "0.4653955", "0.46503818", "0.46495086", "0.46322343", "0.4629327", "0.46123812", "0.46023086", "0.46016565" ]
0.5521604
0
return every valid neighbors of a current_node in the maze
def neighbors(current_node, maze): UP, DOWN, LEFT, RIGHT = -1, 1, -1, 1 neighbors = [] pos = [(0, UP), (0, DOWN), (LEFT, 0), (RIGHT, 0)] diag = [(LEFT, UP), (RIGHT, DOWN), (LEFT, DOWN), (RIGHT, UP)] if not args.disable_diagonal: pos += diag for new_position in pos: node_position = ( current_node.position[0] + new_position[0], current_node.position[1] + new_position[1], ) # range check if ( node_position[0] > (len(maze) - 1) or node_position[0] < 0 or node_position[1] > (len(maze[node_position[0]]) - 1) or node_position[1] < 0 ): continue # wall check if new_position in diag: if ( maze[current_node.position[0]][current_node.position[1] + new_position[1]] == 0 and maze[current_node.position[0] + new_position[0]][current_node.position[1]] == 0 ): continue if maze[node_position[0]][node_position[1]] == 0: continue new_node = Node(node_position) # g is how the cost of the step if new_position[0] != 0 and new_position[1] != 0: new_node.g = current_node.g + 1.44 else: new_node.g = current_node.g + 1 new_node.parent = current_node neighbors.append(new_node) return neighbors
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def list_neighbors(current_row, current_col, grid_size):\n neighbors = []\n for row_offset, col_offset in [(-1, -1), (-1, 0), (-1, 1), (0, -1), (0, 1),\n (1, -1), (1, 0), (1, 1)]:\n new_row = current_row + row_offset\n new_col = current_col + col_offset\n if (new_row >= 0 and new_row < grid_size and new_col >= 0\n and new_col < grid_size):\n neighbors.append((new_row, new_col))\n return neighbors", "def get_neighbours(self, grid):\n\t\tfor diff in ((-1, 0), (1, 0), (0, -1), (0, 1)):\n\t\t\tres = Vector((self.row, self.col)) + diff\n\t\t\tif res[0] >= 0 and res[1] >= 0 and res[0] < len(grid) and res[1] < len(grid[0]):\n\t\t\t\tyield grid[res[0]][res[1]]", "def cell_neighbours(self, x, y):\n if self.maze_map[y][x]:\n return set()\n neighbours = set()\n for (direction, ((i, j), dummy)) in MazeGraph.DIRECTIONS.items():\n xi, yj = (x + i) % self.width, (y + j) % self.height\n if not self.maze_map[yj][xi]:\n neighbours.add((direction, (xi, yj)))\n return neighbours", "def get_neighbouring_nodes(node) :\r\n\r\n connected_nodes = [] #A list of the connected nodes\r\n\r\n #Checking if the node belongs to the 1st row\r\n if(node.coords[0] != 0) :\r\n connected_node = Node((node.coords[0] - 1, node.coords[1]), goal_pos, node.gn_value - 1)\r\n #Checking if the node is an obstacle\r\n if(not connected_node.coords in obstacle_coords) :\r\n connected_nodes.append(connected_node)\r\n\r\n #Checking if the node belongs to the last row\r\n if(node.coords[0] != grid_dims[0] - 1) :\r\n connected_node = Node((node.coords[0] + 1, node.coords[1]), goal_pos, node.gn_value - 1)\r\n #Checking if the node is an obstacle\r\n if(not connected_node.coords in obstacle_coords) :\r\n connected_nodes.append(connected_node)\r\n\r\n #Checking if the node belongs to the 1st column\r\n if(node.coords[1] != 0) :\r\n connected_node = Node((node.coords[0], node.coords[1] - 1), goal_pos, node.gn_value - 1)\r\n #Checking if the node is an obstacle\r\n if(not connected_node.coords in obstacle_coords) :\r\n connected_nodes.append(connected_node)\r\n\r\n #Checking if the node belongs to the 1st column\r\n if(node.coords[1] != grid_dims[1] - 1) :\r\n connected_node = Node((node.coords[0], node.coords[1] + 1), goal_pos, node.gn_value - 1)\r\n #Checking if the node is an obstacle\r\n if(not connected_node.coords in obstacle_coords) :\r\n connected_nodes.append(connected_node)\r\n\r\n return connected_nodes", "def get_neighbors(self, row, col):\n neighbors = set()\n for d in [-1,1]:\n if row+d >= 0 and row+d < self._height and \\\n (row+d,col) in self._empty_spaces:\n neighbors.add((row+d,col))\n if col+d >= 0 and col+d < self._width and \\\n (row,col+d) in self._empty_spaces:\n neighbors.add((row,col+d))\n return neighbors", "def get_neighbours(point, grid):\n # possible movements (diagonally is impossible)\n dy, dx = [-1, 0, 1, 0], [0, 1, 0, -1]\n\n neighbours = []\n for i in range(4):\n y, x = point[0] + dy[i], point[1] + dx[i]\n\n # skip if not within maze's bounds (NOT actually needed since there is a \"#\" barrier around the maze)\n # if not (0 <= x < len(grid) and 0 <= y < len(grid[0])):\n # continue\n\n point_type = grid[y][x]\n if point_type == \"#\": # skip if wall\n continue\n neighbours.append((y, x))\n\n return neighbours", "def neighbours(self):\n\n neighbours = []\n root = self.root\n if self == root:\n return neighbours\n\n ########################\n # IMMEDIATELY ADJACENT #\n sizes = [self.maxs[0] - self.mins[0], self.maxs[1] - self.mins[1]]\n coords = [(self.mins[0] + sizes[0] / 2, self.maxs[1] + sizes[1] / 2,),\n (self.maxs[0] + sizes[0] / 2, self.mins[1] + sizes[1] / 2,),\n (self.mins[0] + sizes[0] / 2, self.mins[1] - sizes[1] / 2,),\n (self.maxs[0] - sizes[0] / 2, self.mins[1] + sizes[1] / 2,),]\n # loop through top, right, bottom, left\n for i in range(4):\n x, y = coords[i]\n query_quad = root.query_xy(x, y)\n if query_quad is not None:\n same_size_idx = query_quad.location[: self.tree_depth]\n same_size_quad = root[same_size_idx]\n neighbours += list(self._get_border_children(same_size_quad, i))\n\n #############\n # DIAGONALS #\n root_sizes = [root.maxs[0] - root.mins[0], root.maxs[1] - root.mins[1]]\n xs, ys = (root_sizes / 2 ** root.max_tree_depth) / 2\n neighbours += [\n root.query_xy(self.mins[0] - xs, self.mins[1] - ys), # TL\n root.query_xy(self.maxs[0] + xs, self.mins[1] - ys), # TR\n root.query_xy(self.mins[0] - xs, self.maxs[1] + ys), # BL\n root.query_xy(self.maxs[0] + xs, self.maxs[1] + ys), # BR\n ]\n\n unique_neighbours = list(set(neighbours))\n try:\n unique_neighbours.remove(self)\n except ValueError:\n pass\n\n return unique_neighbours", "def check_neighbours(matrix, cur_pos, visited):\n visited[cur_pos[0]][cur_pos[1]] = True\n\n for i in range(num_of_neighbours):\n cur_neighbour = (cur_pos[0]+neighbours_positions[i][0], cur_pos[1]+neighbours_positions[i][1])\n if is_safe(matrix, cur_neighbour, visited):\n check_neighbours(matrix, cur_neighbour, visited)", "def _set_node_neighbours(self, node):\n all_neighbours = [self.BOARD[node.y + y][node.x + x] for x in reversed(range(-1, 2)) for y in\n reversed(range(-1, 2))\n if 0 <= node.x + x < self.len_x and 0 <= node.y + y < self.len_y]\n non_traversable_neighbours = []\n for neighbour in all_neighbours:\n if not neighbour.traversable:\n non_traversable_neighbours.append(neighbour)\n elif neighbour.x != node.x and neighbour.y != node.y:\n x_diff = neighbour.x - node.x\n y_diff = neighbour.y - node.y\n if not self.BOARD[node.y + y_diff][node.x].traversable and \\\n not self.BOARD[node.y][node.x + x_diff].traversable:\n non_traversable_neighbours.append(neighbour)\n node.neighbours = [neighbour for neighbour in all_neighbours if neighbour not in non_traversable_neighbours]", "def find_valid_neighbours(self, cell):\n\n delta = [('W', (-1, 0)),\n ('E', (1, 0)),\n ('S', (0, 1)),\n ('N', (0, -1))]\n neighbours = []\n for direction, (dx, dy) in delta:\n x2, y2 = cell.x + dx, cell.y + dy\n if (0 <= x2 < self.nx) and (0 <= y2 < self.ny):\n neighbour = self.cell_at(x2, y2)\n if neighbour.has_all_walls():\n neighbours.append((direction, neighbour))\n return neighbours", "def get_neighbors(self, node):\r\n neighbors = set()\r\n for neighbor in ORTHOGONAL_POSITIONS[(node.pos[0], node.pos[1])]:\r\n if self.board[neighbor[0]][neighbor[1]].color == node.color:\r\n neighbors.add(neighbor)\r\n else:\r\n continue\r\n return neighbors", "def neighbor_nodes(self,node):\n\n neighbors = []\n if node > self.cols:\n neighbors.append(node-self.cols)\n if node <= self.cols*(self.rows-1):\n neighbors.append(node+self.cols)\n if node % self.cols != 1:\n neighbors.append(node-1)\n if node % self.cols != 0:\n neighbors.append(node+1)\n\n return neighbors", "def neighbors(self, cell):\n x = cell.x\n y = cell.y\n for new_x, new_y in [(x, y - 1), (x, y + 1), (x - 1, y), (x + 1, y)]:\n neighbor = self[new_x, new_y]\n if neighbor is not None:\n yield neighbor", "def get_neighbours(self):\n return []", "def get_neighbours_and_directions(self, from_position):\n \n # Transform index into board matrix into index into index into neighbour matrix\n from_row_index = self.board_to_connection_index(from_position)\n row = self.connection_matrix[from_row_index]\n \n neighbours = []\n for col_num in range(0, len(row)): \n if row[col_num]:\n # Transform index into board index\n board_index = self.connection_to_board_index(col_num)\n if self.board[board_index[0]][board_index[1]].state != PegState.EMPTY:\n neighbours.append((board_index, row[col_num])) # Store board index and direction in neighbours\n return neighbours", "def neighbors(node, topology):\n return [n for n in topology[node]]", "def reachable(maze: list, start: tuple, goal: tuple):\n n = len(maze) # Get the dimension of the maze\n\n #========================================#\n # Some data checking statements\n\n if (not is_valid(start, n)):\n print(\"reachable: Start indices outside maze dimensions\")\n return False\n elif (not is_valid(goal, n)):\n print(\"reachable: Goal indices outside maze dimensions\")\n return False\n\n # End data checking statements\n #========================================#\n\n # We can use a copy of the maze to keep track of visited squares (Considered using a set here, thought that time efficiency was important)\n visited = copy.deepcopy(maze)\n # visited = list(map(list, maze)) # Alternative to using copy.deepcopy\n stack = [] # Define our stack of \"fringe\" squares\n stack.append(start) # Push the start square onto our stack\n visited[start[0]][start[1]] = 1 # Set our start to visited\n\n while (len(stack)): # While there exists items in the stack\n current = stack.pop() # Pop the last element\n\n if (current == goal):\n return True # If current is the goal, we found it!\n\n current_i, current_j = current # Unpack the current pair\n\n # Now we want to add all unvisited squares that are possible to get to from the current square\n for i in range(len(nearby_offsets)):\n offset_i, offset_j = nearby_offsets[i]\n possible = (current_i + offset_i, current_j + offset_j)\n # print(f\"Current possible: {possible_i} {possible_j}\") # DEBUG\n if (is_valid(possible, n)): # If the calculated square is within the maze matrix\n if (not visited[possible[0]][possible[1]]):\n stack.append(possible)\n visited[possible[0]][possible[1]] = 1\n return False # If the while loop goes out, and the stack is empty, then there is no possible path", "def _check_neighbors(self):\n for direction, dir_info in self.DIRECTIONS.items():\n pos = Point(\n self.position.x + dir_info[\"mask\"][0],\n self.position.y + dir_info[\"mask\"][1]\n )\n status = self.move(direction)\n self.grid[status].add(pos)\n if status in (1, 2):\n # moved\n self.move(dir_info[\"opposite\"])\n yield pos", "def neighbors(self):\n adjacency_matrix = self.polyhedron().vertex_adjacency_matrix()\n for x in self.polyhedron().Vrep_generator():\n if adjacency_matrix[self.index(), x.index()] == 1:\n yield x", "def neighbors(self):\n adjacency_matrix = self.polyhedron().vertex_adjacency_matrix()\n for x in self.polyhedron().Vrep_generator():\n if adjacency_matrix[self.index(), x.index()] == 1:\n yield x", "def neighbors(self):\n \n # find 0 - blank square\n \n x0 = None\n y0 = None\n \n for i in range(4):\n for j in range(4):\n if self.get_tile(i,j) == 0:\n y0 = i\n x0 = j\n\n if x0 == None or y0 == None:\n return []\n \n neighbor_list = []\n \n # move 0 to the right\n if x0 < 3:\n new_position = Position(self.tiles)\n temp = new_position.get_tile(y0,x0+1)\n new_position.set_tile(y0,x0+1,0)\n new_position.set_tile(y0,x0,temp)\n new_position.directiontomoveto = 'r'\n neighbor_list.append(new_position)\n # move 0 to the left\n if x0 > 0:\n new_position = Position(self.tiles)\n temp = new_position.get_tile(y0,x0-1)\n new_position.set_tile(y0,x0-1,0)\n new_position.set_tile(y0,x0,temp)\n new_position.directiontomoveto = 'l'\n neighbor_list.append(new_position)\n # move 0 up\n if y0 > 0:\n new_position = Position(self.tiles)\n temp = new_position.get_tile(y0-1,x0)\n new_position.set_tile(y0-1,x0,0)\n new_position.set_tile(y0,x0,temp)\n new_position.directiontomoveto = 'u'\n neighbor_list.append(new_position)\n # move 0 down\n if y0 < 3:\n new_position = Position(self.tiles)\n temp = new_position.get_tile(y0+1,x0)\n new_position.set_tile(y0+1,x0,0)\n new_position.set_tile(y0,x0,temp)\n new_position.directiontomoveto = 'd'\n neighbor_list.append(new_position)\n \n return neighbor_list", "def neighbours(self, node: list) -> list:\n dirs = [[1, 0], [0, 1], [-1, 0], [0, -1]]\n neighbours = []\n for direction in dirs:\n neighbour = (node[0] + direction[0], node[1] + direction[1])\n if neighbour in self.all_nodes:\n neighbours.append(neighbour)\n return neighbours", "def neighbours(self):\n seen = set()\n return [l.other(self) for l in self.dovetails \\\n if id(l) not in seen and not seen.add(id(l))]", "def neighbor_nodes(self, node):\n row = node[0]\n col = node[1]\n if row == -1 and col == -1:\n # The nodes that can be accessed from the start node\n # (i.e. all the nodes in the first column)\n for r in range(self.num_rows):\n yield (r, 0)\n else:\n if row < (self.num_rows - 1):\n # We can still go down\n yield (row + 1, col)\n if row > 0:\n # We can still go up\n yield (row - 1, col)\n if col < (self.num_cols - 1):\n # We can still go to the right\n yield (row, col + 1)", "def __get_neighbors(self, goal):\n neighbors = set()\n start = self.__get_position(0, self.puzzle)\n # start_x = start[0]\n # start_y = start[1]\n # Get the below neighbor.\n if(start[0] - 1 >= 0):\n temp = self.__swap(start[0], start[1], start[0] - 1, start[1])\n neighbors.add(State(temp, self.g + 1, 'D', goal))\n # Get the above neighbor\n if(start[0] + 1 <= len(self.puzzle) -1):\n temp = self.__swap(start[0], start[1], start[0] + 1, start[1])\n neighbors.add(State(temp, self.g + 1, 'U', goal))\n # Get the right neighbor\n if(start[1] - 1 >= 0):\n temp = self.__swap(start[0], start[1], start[0], start[1] - 1)\n neighbors.add(State(temp, self.g + 1, 'R', goal))\n # Get the left neighbor\n if(start[1] + 1 <= len(self.puzzle[0]) -1):\n temp = self.__swap(start[0], start[1], start[0], start[1] + 1)\n neighbors.add(State(temp, self.g + 1, 'L', goal))\n\n return neighbors", "def get_further_neighbours(self, cell):\n\t\tneighs = self.get_neighbours(cell)\n\t\ti, j = cell.find_id()\n\t\tneighbours = []\n\t\tfor neigh in neighs:\n\t\t\tx, y = neigh.find_id()\n\t\t\tif abs(x-i)+abs(y-j) > 1 or abs(x-i)+abs(y-j) == 0: \n\t\t\t\tneighbours.append(self.space[y,x])\n\t\treturn neighbours", "def possibleNeighbors(self, col, row):\n neighbors = [(col, row-1), (col-1, row), (col+1, row), (col, row+1)]\n valid_neighbors = []\n for (ncol, nrow) in neighbors:\n if ncol >= 0 and ncol < self.cols and \\\n nrow >= 0 and nrow < self.rows:\n valid_neighbors.append((ncol, nrow))\n return valid_neighbors", "def getNeighbors(self, current: MstarNode):\n neighbors = []\n options = []\n # Loop over all the agents\n for i in range(self.n_agents):\n node: Node = current.nodes[i]\n options_i = []\n if i in current.collision_set:\n # If the agent in the collision set we add the current node as well as all possible nodes\n options_i.append(node)\n (x, y) = node.position\n moves = {0: (x, y - 1), 90: (x + 1, y), 180: (x, y + 1), 270: (x - 1, y)}\n options_i.append(Node(node.position, node, node.rotation + 90, node.h))\n options_i.append(Node(node.position, node, node.rotation - 90, node.h))\n if self.grid[moves[node.rotation][1]][moves[node.rotation][0]] == 0:\n options_i.append(Node(moves[node.rotation], node, node.rotation,\n self.heuristic(i, moves[node.rotation], node.rotation)))\n else:\n # If the agent is not in the collision set we add only the optimal following node\n try:\n if (node, self.goal.nodes[i]) in self.policy:\n nextPos = self.policy[(node, self.goal.nodes[i])]\n else:\n nextPos = Astar(self.grid, node, self.goal.nodes[i]).solve()\n self.policy[(node, self.goal.nodes[i])] = nextPos\n except ValueError:\n print(f\"start: {node}, goal: {self.goal.nodes[i]}\")\n raise RuntimeError()\n options_i.append(Node(nextPos[0], node, nextPos[1], self.heuristic(i, nextPos[0], nextPos[1])))\n options.append(options_i)\n # Take the cartesian product to get all options\n for element in itertools.product(*options):\n neighbors.append(list(element))\n return neighbors", "def get_connected_nodes(node, current_path_len) :\r\n\r\n connected_nodes = [] #A list of the connected nodes\r\n closed_list_coords = get_path_coordinates(closed_list)\r\n\r\n #Checking if the node belongs to the 1st row\r\n if(node.coords[0] != 0) :\r\n connected_node = Node((node.coords[0] - 1, node.coords[1]), goal_pos, current_path_len)\r\n #Checking if the node has already been traversed or is it is an obstacle\r\n if(not connected_node.coords in closed_list_coords and not connected_node.coords in obstacle_coords) :\r\n connected_nodes.append(connected_node)\r\n\r\n #Checking if the node belongs to the last row\r\n if(node.coords[0] != grid_dims[0] - 1) :\r\n connected_node = Node((node.coords[0] + 1, node.coords[1]), goal_pos, current_path_len)\r\n #Checking if the node has already been traversed or is it is an obstacle\r\n if(not connected_node.coords in closed_list_coords and not connected_node.coords in obstacle_coords) :\r\n connected_nodes.append(connected_node)\r\n\r\n #Checking if the node belongs to the 1st column\r\n if(node.coords[1] != 0) :\r\n connected_node = Node((node.coords[0], node.coords[1] - 1), goal_pos, current_path_len)\r\n #Checking if the node has already been traversed or is it is an obstacle\r\n if(not connected_node.coords in closed_list_coords and not connected_node.coords in obstacle_coords) :\r\n connected_nodes.append(connected_node)\r\n\r\n #Checking if the node belongs to the 1st column\r\n if(node.coords[1] != grid_dims[1] - 1) :\r\n connected_node = Node((node.coords[0], node.coords[1] + 1), goal_pos, current_path_len)\r\n #Checking if the node has already been traversed or is it is an obstacle\r\n if(not connected_node.coords in closed_list_coords and not connected_node.coords in obstacle_coords) :\r\n connected_nodes.append(connected_node)\r\n\r\n return connected_nodes", "def dfs(maze, current_node):\n\n q = collections.deque()\n\n q.append(current_node)\n\n while len(q) > 0:\n current_node = q.pop()\n maze[current_node.row][current_node.cell] = 1\n yield maze\n\n for neighbour in get_neighbours(maze, current_node):\n if maze[neighbour.row][neighbour.cell] == 2:\n backtrack(maze, neighbour)\n yield maze\n return\n else:\n q.append(neighbour)\n maze[neighbour.row][neighbour.cell] = -2\n\n yield maze\n maze[current_node.row][current_node.cell] = -3" ]
[ "0.7035998", "0.6925164", "0.6904916", "0.6853418", "0.68220633", "0.6815321", "0.68057096", "0.67877156", "0.67551005", "0.6739377", "0.67376566", "0.6728888", "0.66979206", "0.66303045", "0.6626632", "0.6601675", "0.6586584", "0.654938", "0.6547735", "0.6547735", "0.65444225", "0.65406346", "0.6538829", "0.652471", "0.6517137", "0.651632", "0.65096813", "0.6449108", "0.6448771", "0.64424604" ]
0.81023574
0
Assert that array proxies return memory maps as expected
def check_mmap(hdr, offset, proxy_class, has_scaling=False, unscaled_is_view=True): shape = hdr.get_data_shape() arr = np.arange(np.prod(shape), dtype=hdr.get_data_dtype()).reshape(shape) fname = 'test.bin' # Whether unscaled array memory backed by memory map (regardless of what # numpy says). unscaled_really_mmap = unscaled_is_view # Whether scaled array memory backed by memory map (regardless of what # numpy says). scaled_really_mmap = unscaled_really_mmap and not has_scaling with InTemporaryDirectory(): with open(fname, 'wb') as fobj: fobj.write(b' ' * offset) fobj.write(arr.tostring(order='F')) for mmap, expected_mode in ( # mmap value, expected memmap mode # mmap=None -> no mmap value # expected mode=None -> no memmap returned (None, 'c'), (True, 'c'), ('c', 'c'), ('r', 'r'), (False, None)): kwargs = {} if mmap is not None: kwargs['mmap'] = mmap prox = proxy_class(fname, hdr, **kwargs) unscaled = prox.get_unscaled() back_data = np.asanyarray(prox) unscaled_is_mmap = isinstance(unscaled, np.memmap) back_is_mmap = isinstance(back_data, np.memmap) if expected_mode is None: assert_false(unscaled_is_mmap) assert_false(back_is_mmap) else: assert_equal(unscaled_is_mmap, VIRAL_MEMMAP or unscaled_really_mmap) assert_equal(back_is_mmap, VIRAL_MEMMAP or scaled_really_mmap) if scaled_really_mmap: assert_equal(back_data.mode, expected_mode) del prox, back_data # Check that mmap is keyword-only assert_raises(TypeError, proxy_class, fname, hdr, True) # Check invalid values raise error assert_raises(ValueError, proxy_class, fname, hdr, mmap='rw') assert_raises(ValueError, proxy_class, fname, hdr, mmap='r+')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_memmap():\n\n asflt = lambda x: as_float_array(x, copy=False)\n\n with NamedTemporaryFile(prefix='sklearn-test') as tmp:\n M = np.memmap(tmp, shape=100, dtype=np.float32)\n M[:] = 0\n\n for f in (array2d, np.asarray, asflt, safe_asarray):\n X = f(M)\n X[:] = 1\n assert_array_equal(X.ravel(), M)\n X[:] = 0", "def test_map_basics(self):\n self.assertDigitizerMapBasics(self.map, self.dgroup)", "def test_two_instmaps_equal(self):\n instmap1 = FakeAthens().defaults().instruction_schedule_map\n instmap2 = copy.deepcopy(instmap1)\n\n self.assertEqual(instmap1, instmap2)", "def test_transform_array_anonymize(self, mock_maps):\n # Setup\n data = np.array(['bar', 'foo', 'foo', 'tar'])\n\n # Run\n transformer = Mock()\n transformer.anonymize = 'email'\n transformer.intervals = [1, 2, 3]\n\n mock_maps[id(transformer)] = {\n 'bar': 'bar_x',\n 'foo': 'foo_x',\n 'tar': 'tar_x'\n }\n\n result = CategoricalTransformer.transform(transformer, data)\n\n # Asserts\n expect_result_len = 4\n\n self.assertEqual(\n len(result),\n expect_result_len,\n \"Unexpected length of transformed data\"\n )", "def test_int_list(self):\n \n self.assertEqual(False, \n maps.map_list([1, 2, 3]))", "def test_transform_array_no_anonymize(self, mock_maps):\n # Setup\n data = np.array(['bar', 'foo', 'foo', 'tar'])\n\n # Run\n transformer = Mock()\n transformer.anonymize = None\n transformer.intervals = [1, 2, 3]\n\n CategoricalTransformer.transform(transformer, data)\n\n # Asserts\n expect_maps_call_count = 0\n\n self.assertEqual(\n mock_maps.call_count,\n expect_maps_call_count,\n \"Dont call to the map encoder when not anonymize\"\n )", "def test_hash_memmap():\r\n filename = tempfile.mktemp(prefix='joblib_test_hash_memmap_')\r\n try:\r\n m = np.memmap(filename, shape=(10, 10), mode='w+')\r\n a = np.asarray(m)\r\n for coerce_mmap in (False, True):\r\n yield (nose.tools.assert_equal,\r\n hash(a, coerce_mmap=coerce_mmap)\r\n == hash(m, coerce_mmap=coerce_mmap),\r\n coerce_mmap)\r\n finally:\r\n if 'm' in locals():\r\n del m\r\n # Force a garbage-collection cycle, to be certain that the\r\n # object is delete, and we don't run in a problem under\r\n # Windows with a file handle still open.\r\n gc.collect()\r\n try:\r\n os.unlink(filename)\r\n except OSError as e:\r\n # Under windows, some files don't get erased.\r\n if not os.name == 'nt':\r\n raise e", "def test_check_map(self):\r\n\r\n header, mapping_data = check_map(self.valid_mapping_data_golay)\r\n\r\n expected_header =\\\r\n ['SampleID',\r\n 'BarcodeSequence',\r\n 'LinkerPrimerSequence',\r\n 'Description']\r\n expected_mapping_data =\\\r\n [['s1', 'AACTCGTCGATG', 'ATTCGATART', 's1_description'],\r\n ['s2', 'agcAGCACTTGT', 'ATTCGATART', 's2_description'],\r\n ['s3', 'ACCGCAGAGTCA', 'YATGCTGCCTCCCGTAGGAGT', 's3_description']]\r\n\r\n self.assertEquals(header, expected_header)\r\n self.assertEquals(mapping_data, expected_mapping_data)", "def test_getitem_array(self):\n random.seed(12345)\n\n nside_coverage = 32\n nside_map = 128\n\n full_map = np.zeros(hpg.nside_to_npixel(nside_map)) + hpg.UNSEEN\n full_map[0: 5000] = random.random(size=5000)\n\n sparse_map = healsparse.HealSparseMap(healpix_map=full_map, nside_coverage=nside_coverage)\n\n indices = np.array([1, 2, 100, 500, 10000])\n testing.assert_array_almost_equal(sparse_map[indices], full_map[indices])\n testing.assert_almost_equal(sparse_map[indices[0]], full_map[indices[0]])\n\n indices = np.array([1., 2, 100, 500, 10000])\n self.assertRaises(IndexError, sparse_map.__getitem__, indices)\n self.assertRaises(IndexError, sparse_map.__getitem__, indices[0])", "def test_map(self):\n\n test_cases = [\n Case(\n description=\"lists of objects\",\n val=[{\"title\": \"foo\"}, {\"title\": \"bar\"}, {\"title\": \"baz\"}],\n args=[\"title\"],\n kwargs={},\n expect=[\"foo\", \"bar\", \"baz\"],\n ),\n Case(\n description=\"missing argument\",\n val=[{\"title\": \"foo\"}, {\"title\": \"bar\"}, {\"title\": \"baz\"}],\n args=[],\n kwargs={},\n expect=FilterArgumentError,\n ),\n Case(\n description=\"too many arguments\",\n val=[{\"title\": \"foo\"}, {\"title\": \"bar\"}, {\"title\": \"baz\"}],\n args=[\"title\", \"\"],\n kwargs={},\n expect=FilterArgumentError,\n ),\n Case(\n description=\"missing property\",\n val=[{\"title\": \"foo\"}, {\"title\": \"bar\"}, {\"heading\": \"baz\"}],\n args=[\"title\"],\n kwargs={},\n expect=[\"foo\", \"bar\", None],\n ),\n Case(\n description=\"value not an array\",\n val=123,\n args=[\"title\"],\n kwargs={},\n expect=FilterValueError,\n ),\n Case(\n description=\"array contains non object\",\n val=[{\"title\": \"foo\"}, {\"title\": \"bar\"}, 5, []],\n args=[\"title\"],\n kwargs={},\n expect=FilterValueError,\n ),\n Case(\n description=\"undefined left value\",\n val=self.env.undefined(\"test\"),\n args=[\"title\"],\n kwargs={},\n expect=[],\n ),\n Case(\n description=\"undefined argument\",\n val=[{\"title\": \"foo\"}, {\"title\": \"bar\"}, {\"title\": \"baz\"}],\n args=[self.env.undefined(\"test\")],\n kwargs={},\n expect=[None, None, None],\n ),\n ]\n\n self._test(Map, test_cases)", "def test_reference_to_array(self):\n arr = numpy.arange(0.0, 10.0, 0.1)\n arr = numpy.reshape(arr, (25, 4))\n vtk_arr = array_handler.array2vtk(arr)\n arr1 = array_handler.vtk2array(vtk_arr)\n # Now make sure these are using the same memory.\n arr[0][0] = 100.0\n self.assertEqual(arr[0][0], arr1[0][0])\n self.assertEqual(arr.shape, arr1.shape)", "def testExpectationBuilderMap(self):\n m = data_types.ExpectationBuilderMap()\n e = data_types.Expectation('test', ['tag'], 'Failure')\n with self.assertRaises(AssertionError):\n m[1] = data_types.BuilderStepMap()\n with self.assertRaises(AssertionError):\n m[e] = 2\n m[e] = data_types.BuilderStepMap()\n self.assertEqual(m, {e: data_types.BuilderStepMap()})", "def test_values(self):\n self.assertEqual([self.expected_described_model], list(self.mapped_model.values()))", "def test_inmemory(self):\n\n\t\tself.assertEqual(self.inmemory, self.tracker.inmemory)", "def test_eq(self):\r\n self.assertTrue(self.empty_map == MetadataMap({}, []))\r\n self.assertTrue(self.overview_map == MetadataMap(\r\n self.overview_map._metadata, self.overview_map.Comments))", "def test_custom_memory(self):\n\n @self.variant\n def episodic_memory_intrinsic_rewards(embeddings, memory, reward_scale):\n return exploration.episodic_memory_intrinsic_rewards(\n embeddings, self.num_neighbors, reward_scale,\n exploration.IntrinsicRewardState(memory=memory, next_memory_index=2),\n max_memory_size=4)\n\n embeddings = np.array([[2., 2.], [3., 3.], [4., 4.]])\n memory = np.array([[-1., -1.,], [1., 1.], [0., 0.], [0., 0.]])\n _, intrinsic_reward_state = episodic_memory_intrinsic_rewards(\n embeddings, memory, self.reward_scale)\n\n np.testing.assert_array_equal(\n intrinsic_reward_state.memory,\n # Embeddings should have been added in a ring buffer way.\n np.array([[4., 4.,], [1., 1.], [2., 2.], [3., 3.]]))", "def test_pool_with_memmap_array_view():\r\n assert_array_equal = np.testing.assert_array_equal\r\n\r\n # Fork the subprocess before allocating the objects to be passed\r\n pool_temp_folder = os.path.join(TEMP_FOLDER, 'pool')\r\n os.makedirs(pool_temp_folder)\r\n p = MemmapingPool(10, max_nbytes=2, temp_folder=pool_temp_folder)\r\n try:\r\n\r\n filename = os.path.join(TEMP_FOLDER, 'test.mmap')\r\n a = np.memmap(filename, dtype=np.float32, shape=(3, 5), mode='w+')\r\n a.fill(1.0)\r\n\r\n # Create an ndarray view on the memmap instance\r\n a_view = np.asarray(a)\r\n assert_false(isinstance(a_view, np.memmap))\r\n assert_true(has_shareable_memory(a_view))\r\n\r\n p.map(double, [(a_view, (i, j), 1.0)\r\n for i in range(a.shape[0])\r\n for j in range(a.shape[1])])\r\n\r\n # Both a and the a_view have been updated\r\n assert_array_equal(a, 2 * np.ones(a.shape))\r\n assert_array_equal(a_view, 2 * np.ones(a.shape))\r\n\r\n # Passing memmap array view to the pool should not trigger the\r\n # creation of new files on the FS\r\n assert_equal(os.listdir(pool_temp_folder), [])\r\n\r\n finally:\r\n p.terminate()\r\n del p", "def test_memmap_based_array_reducing():\r\n assert_array_equal = np.testing.assert_array_equal\r\n filename = os.path.join(TEMP_FOLDER, 'test.mmap')\r\n\r\n # Create a file larger than what will be used by a\r\n buffer = np.memmap(filename, dtype=np.float64, shape=500, mode='w+')\r\n\r\n # Fill the original buffer with negative markers to detect over of\r\n # underflow in case of test failures\r\n buffer[:] = - 1.0 * np.arange(buffer.shape[0], dtype=buffer.dtype)\r\n buffer.flush()\r\n\r\n # Memmap a 2D fortran array on a offseted subsection of the previous\r\n # buffer\r\n a = np.memmap(filename, dtype=np.float64, shape=(3, 5, 4),\r\n mode='r+', order='F', offset=4)\r\n a[:] = np.arange(60).reshape(a.shape)\r\n\r\n # Build various views that share the buffer with the original memmap\r\n\r\n # b is an memmap sliced view on an memmap instance\r\n b = a[1:-1, 2:-1, 2:4]\r\n\r\n # c and d are array views\r\n c = np.asarray(b)\r\n d = c.T\r\n\r\n # Array reducer with auto dumping disabled\r\n reducer = ArrayMemmapReducer(None, TEMP_FOLDER, 'c')\r\n\r\n def reconstruct_array(x):\r\n cons, args = reducer(x)\r\n return cons(*args)\r\n\r\n def reconstruct_memmap(x):\r\n cons, args = reduce_memmap(x)\r\n return cons(*args)\r\n\r\n # Reconstruct original memmap\r\n a_reconstructed = reconstruct_memmap(a)\r\n assert_true(has_shareable_memory(a_reconstructed))\r\n assert_true(isinstance(a_reconstructed, np.memmap))\r\n assert_array_equal(a_reconstructed, a)\r\n\r\n # Reconstruct strided memmap view\r\n b_reconstructed = reconstruct_memmap(b)\r\n assert_true(has_shareable_memory(b_reconstructed))\r\n assert_array_equal(b_reconstructed, b)\r\n\r\n # Reconstruct arrays views on memmap base\r\n c_reconstructed = reconstruct_array(c)\r\n assert_false(isinstance(c_reconstructed, np.memmap))\r\n assert_true(has_shareable_memory(c_reconstructed))\r\n assert_array_equal(c_reconstructed, c)\r\n\r\n d_reconstructed = reconstruct_array(d)\r\n assert_false(isinstance(d_reconstructed, np.memmap))\r\n assert_true(has_shareable_memory(d_reconstructed))\r\n assert_array_equal(d_reconstructed, d)\r\n\r\n # Test graceful degradation on fake memmap instances with in-memory\r\n # buffers\r\n a3 = a * 3\r\n assert_false(has_shareable_memory(a3))\r\n a3_reconstructed = reconstruct_memmap(a3)\r\n assert_false(has_shareable_memory(a3_reconstructed))\r\n assert_false(isinstance(a3_reconstructed, np.memmap))\r\n assert_array_equal(a3_reconstructed, a * 3)\r\n\r\n # Test graceful degradation on arrays derived from fake memmap instances\r\n b3 = np.asarray(a3)\r\n assert_false(has_shareable_memory(b3))\r\n\r\n b3_reconstructed = reconstruct_array(b3)\r\n assert_true(isinstance(b3_reconstructed, np.ndarray))\r\n assert_false(has_shareable_memory(b3_reconstructed))\r\n assert_array_equal(b3_reconstructed, b3)", "def test_core_functionality(self):\n # Test typing\n self.run_map_collection(\n _map_collection=self.example_map\n )", "def test__chk_asarray(self):\r\n\r\n exp = (array([[1, 1, 1, 1], [2, 2, 2, 2], [3, 3, 3, 3]]), 0)\r\n obs = _chk_asarray([[1, 1, 1, 1], [2, 2, 2, 2], [3, 3, 3, 3]], 0)\r\n assert_almost_equal(obs[0], exp[0])\r\n self.assertEqual(obs[1], exp[1])", "def test_apply_scalar_map(self):\n super(TestObjDict, self).test_apply_scalar_map(_as_obj=True)", "def test_eq(self):\n self.assertTrue(self.empty_map == MetadataMap({}, []))\n self.assertTrue(self.overview_map == MetadataMap(\n self.overview_map._metadata, self.overview_map.Comments))", "def test_mapping_switch():\n\tassert nset != oset", "def test_sample_mapped_keys(self):\r\n\r\n # With num_coverage=1 only the keys will be sampled\r\n actual = sample_mapped_keys(self.test_map, 1)\r\n self.assertEqual(actual, {'1': ['1'], '2': ['2']})\r\n\r\n actual = sample_mapped_keys(self.test_map, 3)\r\n for key in actual.keys():\r\n # check number of sampled keys\r\n self.assertEqual(3, len(actual[key]))\r\n for x in actual[key]:\r\n # check that sampled key is in the full list\r\n correct = list(self.test_map[key])\r\n correct.append(key)\r\n self.assertTrue(x in correct)", "def test_MetadataMap_getter(self):\r\n self.assertEqual(self.cs_overview.MetadataMap, self.overview_map)", "def soft_assert_objects_are_mapped(\n selenium, soft_assert, src_obj, objs, *args, **kwargs\n):\n ui_service_cls = factory.get_cls_webui_service(\n objects.get_plural(objs[0].type))\n mapped_objs = (ui_service_cls(driver=selenium, *args, **kwargs).\n get_list_objs_from_tree_view(src_obj=src_obj))\n soft_assert.expect(\n [obj.tree_item_representation() for obj in objs] == mapped_objs,\n messages.AssertionMessages.OBJS_SHOULD_BE_MAPPED_TO_OBJ.format(\n mapped_objs_names=[obj.title for obj in objs],\n src_obj_name=src_obj.title))", "def test_dict(test_data):\n\n # Stupidly trivial map\n gpmap.read_dict({\"wildtype\":\"0\",\n \"data\":{\"genotype\":[\"0\"]}})\n\n # Make sure wildtype check is working\n with pytest.raises(ValueError):\n gpmap.read_dict({\"data\":{\"genotype\":[\"0\"]}})\n\n # Make sure wildtype length/genotype length check working\n with pytest.raises(ValueError):\n gpmap.read_dict({\"wildtype\":\"01\",\"data\":{\"genotype\":[\"0\"]}})\n\n for d in test_data:\n\n gpm = GenotypePhenotypeMap(wildtype=d[\"wildtype\"],\n genotype=d[\"genotype\"],\n phenotype=d[\"phenotype\"],\n uncertainty=d[\"uncertainty\"])\n\n # Write out as a dcitionary\n gpm_as_dict = gpm.to_dict()\n\n # Check wildtype meta data, mutations meta data\n assert gpm_as_dict[\"wildtype\"] == d[\"wildtype\"]\n for i in range(len(gpm_as_dict[\"mutations\"])):\n assert np.array_equal(gpm_as_dict[\"mutations\"][i],d[\"mutations\"][i])\n\n # This is a pandas data conversion. Don't check in detail, just make sure\n # the conversion dumped out a a dict.\n assert type(gpm_as_dict[\"data\"]) is dict\n\n # Read dictionary back in and make sure it's the same\n new_gpm = gpmap.read_dict(gpm_as_dict)\n conftest.compare_gpmap(gpm,new_gpm)", "def test__applyDataMap(t):\n t.assertEqual(t.adm._applyDataMap, t.adm.applyDataMap)", "def test_memmaping_pool_for_large_arrays_in_return():\r\n assert_array_equal = np.testing.assert_array_equal\r\n\r\n # Build an array reducers that automaticaly dump large array content\r\n # but check that the returned datastructure are regular arrays to avoid\r\n # passing a memmap array pointing to a pool controlled temp folder that\r\n # might be confusing to the user\r\n\r\n # The MemmapingPool user can always return numpy.memmap object explicitly\r\n # to avoid memory copy\r\n p = MemmapingPool(3, max_nbytes=10, temp_folder=TEMP_FOLDER)\r\n try:\r\n res = p.apply_async(np.ones, args=(1000,))\r\n large = res.get()\r\n assert_false(has_shareable_memory(large))\r\n assert_array_equal(large, np.ones(1000))\r\n finally:\r\n p.terminate()\r\n del p", "def test_values(self):\n obs = self.tester.values()\n self.assertTrue(isinstance(obs, Iterable))\n exp = {Sample('1.SKB1.640202', self.tester),\n Sample('1.SKB2.640194', self.tester),\n Sample('1.SKB3.640195', self.tester),\n Sample('1.SKB4.640189', self.tester),\n Sample('1.SKB5.640181', self.tester),\n Sample('1.SKB6.640176', self.tester),\n Sample('1.SKB7.640196', self.tester),\n Sample('1.SKB8.640193', self.tester),\n Sample('1.SKB9.640200', self.tester),\n Sample('1.SKD1.640179', self.tester),\n Sample('1.SKD2.640178', self.tester),\n Sample('1.SKD3.640198', self.tester),\n Sample('1.SKD4.640185', self.tester),\n Sample('1.SKD5.640186', self.tester),\n Sample('1.SKD6.640190', self.tester),\n Sample('1.SKD7.640191', self.tester),\n Sample('1.SKD8.640184', self.tester),\n Sample('1.SKD9.640182', self.tester),\n Sample('1.SKM1.640183', self.tester),\n Sample('1.SKM2.640199', self.tester),\n Sample('1.SKM3.640197', self.tester),\n Sample('1.SKM4.640180', self.tester),\n Sample('1.SKM5.640177', self.tester),\n Sample('1.SKM6.640187', self.tester),\n Sample('1.SKM7.640188', self.tester),\n Sample('1.SKM8.640201', self.tester),\n Sample('1.SKM9.640192', self.tester)}\n # Creating a list and looping over it since unittest does not call\n # the __eq__ function on the objects\n for o, e in zip(sorted(list(obs), key=lambda x: x.id),\n sorted(exp, key=lambda x: x.id)):\n self.assertEqual(o, e)" ]
[ "0.61014163", "0.6063968", "0.5974768", "0.5952109", "0.5929823", "0.5922987", "0.58732295", "0.5863351", "0.58366203", "0.58331126", "0.5743954", "0.5739532", "0.5704142", "0.5667479", "0.5648909", "0.5646724", "0.564385", "0.56330293", "0.56127864", "0.56041855", "0.5600273", "0.5582346", "0.55821645", "0.55819374", "0.55730337", "0.5541402", "0.554032", "0.5520557", "0.5502746", "0.549907" ]
0.61041445
0
Mean of bins This function takes two corresponding 2D arrays x and y, and calculates mean of y for specific range of x
def mean_relationship_twoD(x, y, bins_values): sort_ind_x = np.argsort(x) x = x[sort_ind_x] y = y[:, sort_ind_x] hist, bin_edges = np.histogram(x, bins=bins_values) array_end = np.cumsum(hist) array_start = np.cumsum(hist) - hist y_x = np.zeros((len(y), len(array_start))) for i in np.arange(len(array_start)): y_x[:, i] = np.mean(y[:, array_start[i]:array_end[i]], axis=1) return y_x
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def binnedAverage(x, y, bins=20):\n xbins, step = np.linspace(np.min(x), np.max(x), num=bins, retstep=True)\n xbins = (xbins + step/2)[:-1]\n emptyBins = []\n ymeans = []\n for xbi, xb in enumerate(xbins):\n ytotal = 0\n ycount = 0\n for y_i, y_ in enumerate(y):\n if xb - step/2 < x[y_i] < xb + step/2:\n ytotal += y_\n ycount += 1\n if ycount >= 1:\n ymeans.append(ytotal/ycount)\n else:\n emptyBins.append(xbi)\n xbins = np.delete(xbins, emptyBins)\n return xbins, np.array(ymeans)", "def mean_relationship(x, y, bins_values):\r\n sort_ind_x = np.argsort(x)\r\n x = x[sort_ind_x]\r\n y = y[sort_ind_x]\r\n hist, bin_edges = np.histogram(x, bins=bins_values)\r\n array_end = np.cumsum(hist)\r\n array_start = np.cumsum(hist) - hist\r\n y_x = np.zeros(len(array_start))\r\n y_x_std = np.zeros(len(array_start))\r\n for i in np.arange(len(array_start)):\r\n y_x[i] = np.mean(y[array_start[i]:array_end[i]])\r\n y_x_std[i] = np.std(y[array_start[i]:array_end[i]])\r\n return y_x, y_x_std", "def hbinavg(x,y,bins):\n\n binx = bins[:-1] + (bins[1:] - bins[:-1])/2.\n bsum = ( np.histogram(x,bins=bins,weights=y) )[0]\n bn = ( np.histogram(x,bins=bins) )[0]\n biny = bsum/bn\n\n return binx,biny", "def get_bin_means(X,Y,bin_edges=None,mean='median',error='sem',minimum_n=25):\n \n assert(X.shape == Y.shape)\n \n # Flatten if not vectors\n if X.ndim > 1:\n X = X.flatten()\n Y = Y.flatten()\n \n if (bin_edges == None).all():\n X_min = np.nanmin(X)\n X_max = np.nanmax(X)\n bin_edges = np.linspace(X_min,X_max,num=10)\n \n \n which_bin = np.digitize(X,bin_edges)\n Nbins = len(bin_edges)-1\n means = np.zeros(Nbins)\n stds = np.zeros(Nbins)\n bin_centers = np.zeros(Nbins)\n for b in range(Nbins):\n y = Y[which_bin == b+1]\n bin_centers[b] = (bin_edges[b] + bin_edges[b+1]) / 2\n # Suppress noisy bins\n if len(y) < minimum_n:\n means[b] = np.nan\n stds[b] = np.nan\n else:\n # Mean or median\n if mean == 'mean':\n means[b] = np.nanmean(y)\n elif mean == 'median':\n means[b] = np.nanmedian(y)\n \n if error == 'sem':\n stds[b] = np.nanstd(y) / np.sqrt(len(y))\n elif error == 'std':\n stds[b] = np.nanstd(y)\n \n return means", "def d_mean(x, y):\n return (x + y) / 2", "def bin_stats(x,y,xbins,stat='average'):\n nbins=len(xbins)\n if stat=='average' or stat=='mean': func=mean\n elif stat=='median': func=median\n elif stat=='rms' or stat=='std' : func=std\n elif stat=='std_robust' or stat=='rms_robust': func=std_robust\n elif stat=='mean_robust': func=mean_robust\n elif stat=='median_robust': func=median_robust\n elif stat=='sum': func=sum\n results=[]\n for i in range(nbins):\n if i<nbins-1:\n good=(greater_equal(x,xbins[i])\n *less(x,xbins[i+1]))\n else: good=(greater_equal(x,xbins[-1]))\n if sum(good)>1.: results.append(func(compress(good,y)))\n else:\n results.append(0.)\n print('Bin starting at xbins[%i] has %i points' % (i,sum(good)))\n return array(results)", "def avg(x, y):\n return (x + y)/2", "def averages(x,y):\n new_x = [x[0]]\n new_y = []\n\n cur_x = new_x[0]\n cur_ys = []\n for x_i, y_i in zip(x,y):\n if x_i == cur_x:\n cur_ys.append(y_i)\n else:\n new_y.append( sum(cur_ys)/float(len(cur_ys) ) )\n new_x.append( x_i )\n cur_ys = [y_i]\n cur_x = x_i\n new_y.append( sum(cur_ys)/float(len(cur_ys) ) )\n return new_x, new_y", "def mean_in_range(arr, args):\n mn = np.mean(arr)\n res = mn > args[0] and mn < args[1]\n return ct.Result(res, 'mean_in_range')", "def atmean(a,limits=None,inclusive=(1,1)):\r\n if a.dtype in [N.int_, N.short,N.ubyte]:\r\n a = a.astype(N.float_)\r\n if limits == None:\r\n return mean(a)\r\n assert type(limits) in [ListType,TupleType,N.ndarray], \"Wrong type for limits in atmean\"\r\n if inclusive[0]: lowerfcn = N.greater_equal\r\n else: lowerfcn = N.greater\r\n if inclusive[1]: upperfcn = N.less_equal\r\n else: upperfcn = N.less\r\n if limits[0] > N.maximum.reduce(N.ravel(a)) or limits[1] < N.minimum.reduce(N.ravel(a)):\r\n raise ValueError, \"No array values within given limits (atmean).\"\r\n elif limits[0]==None and limits[1]<>None:\r\n mask = upperfcn(a,limits[1])\r\n elif limits[0]<>None and limits[1]==None:\r\n mask = lowerfcn(a,limits[0])\r\n elif limits[0]<>None and limits[1]<>None:\r\n mask = lowerfcn(a,limits[0])*upperfcn(a,limits[1])\r\n s = float(N.add.reduce(N.ravel(a*mask)))\r\n n = float(N.add.reduce(N.ravel(mask)))\r\n return s/n", "def local_mean(x,y, n=10):\n\n xx, yy = (list(t) for t in zip(*sorted(zip(x, y)))) # sort x and y after x\n\n m = int(len(x)/n) # Number of data points in each group\n\n x_o, y_o = [], []\n x_sum, y_sum, v = 0, 0, 0\n j=1\n for i in range(len(x)):\n if v < m:\n x_sum += xx[i]\n y_sum += yy[i]\n v += 1\n else:\n x_o.append(x_sum/m)\n y_o.append(y_sum/m)\n x_sum, y_sum, v = 0, 0, 0\n j += 1\n\n return x_o, y_o", "def mape(x, y):\n return statistics.mean(ape(x, y))", "def profileX(xdata, ydata, nbins, xrange=None, yrange=None, drop_nan=True):\n xmin, xmax = (np.min(xdata), np.max(xdata)) if xrange is None else xrange\n ymin, ymax = (np.min(ydata), np.max(ydata)) if yrange is None else yrange\n\n x_out = np.linspace(xmin, xmax, nbins+1)\n y_out = np.empty(nbins)\n y_err = np.empty(nbins)\n dx = np.diff(x_out)[0]\n\n selection = in_range(xdata, xmin, xmax) & in_range(ydata, ymin, ymax)\n xdata, ydata = xdata[selection], ydata[selection]\n for i in range(nbins):\n bin_data = np.extract(in_range(xdata, x_out[i], x_out[i+1]), ydata)\n y_out[i] = np.mean(bin_data)\n y_err[i] = np.std(bin_data) / bin_data.size**0.5\n x_out += dx / 2.\n x_out = x_out[:-1]\n if drop_nan:\n selection = ~(np.isnan(y_out) | np.isnan(y_err))\n x_out = x_out[selection]\n y_out = y_out[selection]\n y_err = y_err[selection]\n return x_out, y_out, y_err", "def plot_bin_means(X,Y,bin_edges=None,mean='median',error='sem',color=None,\n style='errorbar',minimum_n=25,bin_style='equal'):\n \n assert(X.shape == Y.shape)\n \n X,Y = nonan_pairs(X,Y)\n \n # Flatten if not vectors\n if X.ndim > 1:\n X = X.flatten()\n Y = Y.flatten()\n \n if type(bin_edges) == int:\n if bin_style == 'equal':\n X_min = X.min()\n X_max = X.max() \n bin_edges = np.linspace(X_min,X_max,num=bin_edges)\n elif bin_style == 'percentile':\n bin_edges = np.percentile(nonans(X),np.linspace(0,100,num=bin_edges))\n print(bin_edges)\n else:\n raise ValueError\n \n which_bin = np.digitize(X,bin_edges)\n Nbins = len(bin_edges)-1\n means = np.zeros(Nbins)\n stds = np.zeros(Nbins)\n \n \n # bin_centers = np.zeros(Nbins)\n \n bin_centers = (bin_edges[:-1] + bin_edges[1:])/2\n \n for b in range(Nbins):\n y = Y[which_bin == b+1]\n # bin_centers[b] = (bin_edges[b] + bin_edges[b+1]) / 2\n # Suppress noisy bins\n if len(y) < minimum_n:\n means[b] = np.nan\n stds[b] = np.nan\n else:\n # Mean or median\n if mean == 'mean':\n means[b] = np.nanmean(y)\n elif mean == 'median':\n print(f'{y.shape}')\n means[b] = np.nanmedian(y)\n \n if error == 'sem':\n stds[b] = np.nanstd(y) / np.sqrt(len(y))\n elif error == 'std':\n stds[b] = y.std()\n\n # Plot\n if style == 'errorbar':\n plt.errorbar(bin_centers,means,stds,color=color)\n elif style == 'fill':\n plt.plot(bin_centers, means, color=color)\n plt.fill_between(bin_centers, means-stds, means+stds,\n color=color,alpha=0.5)\n \n return means", "def _mean_diff(x, y):\n return np.mean(x) - np.mean(y)", "def bivariate_mean(x, y, pdf):\n\n if pdf.shape[0] != x.shape[0] or pdf.shape[1] != y.shape[0]:\n print(\"Error, mesh size does not match x and y\")\n n_x = x.shape[0]\n n_y = y.shape[0]\n mean_int_x, mean_int_y = 0.0, 0.0\n p_of_x, p_of_y = np.zeros(n_x), np.zeros(n_y)\n for i in range(0, n_x):\n for j in range(1, n_y):\n delta_y = y[j] - y[j - 1]\n p_of_x[i] += delta_y / 2.0 * (pdf[i, j] + pdf[i, j - 1])\n if i > 0:\n delta_x = x[i] - x[i - 1]\n mean_int_x += delta_x / 2.0 * (x[i] * p_of_x[i] + x[i - 1] * p_of_x[i - 1])\n\n for j in range(0, n_y):\n for i in range(1, n_x):\n delta_x = x[i] - x[i - 1]\n p_of_y[j] += delta_x / 2.0 * (pdf[i, j] + pdf[i - 1, j])\n if j > 0:\n delta_y = y[j] - y[j - 1]\n mean_int_y += delta_y / 2.0 * (y[j] * p_of_y[j] + y[j - 1] * p_of_y[j - 1])\n\n return mean_int_x, mean_int_y", "def bin_spec_y(self, start, end):\n #print(self.spec_x.tolist())\n start_spec_x = closest_value_index(start, self.spec_x.tolist())\n i = 0\n bin_sum = 0\n while(start_spec_x + i < len(self.spec_x) and self.spec_x[start_spec_x + i] <= end):\n bin_sum += self.spec_y[start_spec_x + i]\n i += 1\n average = bin_sum / (i+1)\n return average", "def bin_data(data, num_bins):\n\tslices = np.linspace(0, 100, num_bins+1, True).astype(np.int)\n\tcounts = np.diff(slices)\n\n\tmean = np.add.reduceat(data, slices[:-1]) / counts\n\treturn mean", "def autobin_stats(x,y,n_bins=8,stat='average',n_points=None):\n \n if not ascend(x):\n ix=argsort(x)\n x=take(x,ix)\n y=take(y,ix)\n n=len(x)\n if n_points==None: \n #This throws out some points\n n_points=n/n_bins\n else: \n n_bins=n/n_points\n #if there are more that 2 points in the last bin, add another bin\n if n%n_points>2: n_bins=n_bins+1\n \n if n_points<=1:\n print('Only 1 or less points per bin, output will be sorted input vector with rms==y')\n return x,y\n xb,yb=[],[]\n \n #print 'stat', stat\n if stat=='average' or stat=='mean': func=mean\n elif stat=='median': func=median\n elif stat=='rms' or stat=='std' : func=std\n elif stat=='std_robust' or stat=='rms_robust': func=std_robust\n elif stat=='mean_robust': func=mean_robust\n elif stat=='median_robust': func=median_robust\n elif stat=='p2p': func=p2p # --DC\n elif stat=='min': func=min # --DC\n elif stat=='max': func=max # --DC\n \n for i in range(n_bins):\n xb.append(mean(x[i*n_points:(i+1)*n_points]))\n if func==std and n_points==2:\n print('n_points==2; too few points to determine rms')\n print('Returning abs(y1-y2)/2. in each bin as rms')\n yb.append(abs(y[i*n_points]-y[i*n_points+1])/2.)\n else:\n yb.append(func(y[i*n_points:(i+1)*n_points]))\n if i>2 and xb[-1]==xb[-2]: \n yb[-2]=(yb[-2]+yb[-1])/2.\n xb=xb[:-1]\n yb=yb[:-1]\n return array(xb),array(yb)", "def profileY(xdata, ydata, nbins, yrange=None, xrange=None, drop_nan=True):\n xmin, xmax = (np.min(xdata), np.max(xdata)) if xrange is None else xrange\n ymin, ymax = (np.min(ydata), np.max(ydata)) if yrange is None else yrange\n\n x_out = np.linspace(ymin, ymax, nbins+1)\n y_out = np.empty(nbins)\n y_err = np.empty(nbins)\n dx = np.diff(x_out)[0]\n\n selection = in_range(xdata, xmin, xmax) & in_range(ydata, ymin, ymax)\n xdata, ydata = xdata[selection], ydata[selection]\n for i in range(nbins):\n bin_data = np.extract(in_range(ydata, x_out[i], x_out[i+1]), xdata)\n y_out[i] = np.mean(bin_data)\n y_err[i] = np.std(bin_data) / bin_data.size**0.5\n x_out += dx / 2.\n x_out = x_out[:-1]\n if drop_nan:\n selection = ~(np.isnan(y_out) | np.isnan(y_err))\n x_out = x_out[selection]\n y_out = y_out[selection]\n y_err = y_err[selection]\n return x_out, y_out, y_err", "def hmean(x, y):\n if x == y:\n return float(x)\n elif x == 0.0 or y == 0.0:\n return 0.0\n else:\n return 2.0 * _div(x * y, x + y)", "def build_mean_vector(X, Y_range):\r\n C = Y_range.size -1\r\n M = np.zeros((X.shape[0], C))\r\n for c in xrange(C):\r\n Xc = get_block_col(X, c, Y_range)\r\n M[:, c] = np.mean(Xc, axis=1)\r\n return M", "def histogram2d(x, y, bins_x, bins_y):\n # x-range\n x_max, x_min = x.max(), x.min()\n delta_x = 1 / ((x_max - x_min) / bins_x)\n # y-range\n y_max, y_min = y.max(), y.min()\n delta_y = 1 / ((y_max - y_min) / bins_y)\n # compute histogram 2d\n xy_bin = np.zeros((np.int64(bins_x), np.int64(bins_y)), dtype=np.int64)\n for t in range(len(x)):\n i = (x[t] - x_min) * delta_x\n j = (y[t] - y_min) * delta_y\n if 0 <= i < bins_x and 0 <= j < bins_y:\n xy_bin[int(i), int(j)] += 1\n return xy_bin", "def binning(x, y, xmin=None, xmax=None, dx=1 / 12.,\r\n window=3 / 12., interp=False, median=False):\r\n if xmin is None:\r\n xmin = np.nanmin(x)\r\n if xmax is None:\r\n xmax = np.nanmax(x)\r\n\r\n steps = np.arange(xmin, xmax, dx) # time steps\r\n bins = [(ti, ti + window) for ti in steps] # bin limits\r\n\r\n N = len(bins)\r\n yb = np.full(N, np.nan)\r\n xb = np.full(N, np.nan)\r\n eb = np.full(N, np.nan)\r\n nb = np.full(N, np.nan)\r\n sb = np.full(N, np.nan)\r\n\r\n for i in range(N):\r\n\r\n t1, t2 = bins[i]\r\n idx, = np.where((x >= t1) & (x <= t2))\r\n\r\n if len(idx) == 0:\r\n xb[i] = 0.5 * (t1 + t2)\r\n continue\r\n\r\n ybv = y[idx]\r\n\r\n if median:\r\n yb[i] = np.nanmedian(ybv)\r\n else:\r\n yb[i] = np.nanmean(ybv)\r\n\r\n xb[i] = 0.5 * (t1 + t2)\r\n eb[i] = mad_std(ybv)\r\n nb[i] = np.sum(~np.isnan(ybv))\r\n sb[i] = np.sum(ybv)\r\n\r\n if interp:\r\n try:\r\n yb = np.interp(x, xb, yb)\r\n eb = np.interp(x, xb, eb)\r\n sb = np.interp(x, xb, sb)\r\n xb = x\r\n except:\r\n pass\r\n\r\n return xb, yb, eb, nb, sb", "def hist2d(x,y,nbins = 50 ,maskval = 0,saveloc = '',labels=[],slope = 1,sloperr = 0):\n\t# Remove NANs and masked values\n\tgood = where((isnan(x) == False) & (isnan(y) == False) & (x != maskval) & (y != maskval))\n\tx = x[good]\n\ty = y[good]\n\n\t# Create histogram\n\tH,xedges,yedges = histogram2d(x,y,bins=nbins)\n\t# Reorient appropriately\n\tH = rot90(H)\n\tH = flipud(H)\n\t# Mask zero value bins\n\tHmasked = ma.masked_where(H==0,H)\n\t# Find average values in y:\n\tyavgs = []\n\tystds = []\n\txposs = []\n\tfor j in range(len(xedges)-1):\n\t\ttoavg = where((x > xedges[j]) & (x < xedges[j+1]))\n\t\txpos = np.mean(x[toavg])\n\t\tyavg = np.median(y[toavg])\n\t\tystd = np.std(y[toavg])/len(y[toavg])\n\t\txposs.append(xpos)\n\t\tyavgs.append(yavg)\n\t\tystds.append(ystd)\n\t# Begin creating figure\n\tplt.figure(figsize=(12,10))\n\t# Make histogram pixels with logscale\n\tplt.pcolormesh(xedges,yedges,Hmasked,\n\t norm = LogNorm(vmin = Hmasked.min(),\n\t vmax = Hmasked.max()),\n\t\t \t cmap = plt.get_cmap('Spectral_r'))\n\t# Create fit line x-array\n\tuplim = nmax(x)+5\n\tdolim = nmin(x)-5\n\tx_range = arange(dolim,uplim)\n\t# Plot fit line\n\tplt.plot(x_range,slope*x_range,color = 'royalblue',linewidth = 3,label = 'Slope = {0}, Uncertainty = {1}'.format(slope,sloperr))\n\t# Plot average points\n\tplt.errorbar(xposs,yavgs,yerr = ystds,fmt = 'D',color='k',markersize = 5)\n\t# Set plot limits\n\tplt.xlim(dolim+5,uplim-5)\n\tplt.ylim(nmin(y),nmax(y))\n\t# Add colourbar\n\tcbar = plt.colorbar()\n\t# Add labels\n\tif labels != []:\n\t title,xlabel,ylabel,zlabel = labels\n\t plt.xlabel(xlabel)\n\t plt.ylabel(ylabel)\n\t plt.title(title)\n\t cbar.ax.set_ylabel(zlabel)\n\t plt.legend(loc = 'best',fontsize = 15)\n\t# Save plot\n\tif saveloc != '':\n\t\tplt.savefig(saveloc)\n\tplt.close()\n\t# Return histogram\n\treturn xedges,yedges,Hmasked", "def mean_constrained(arr, lo, hi):\n condlist = (np.logical_not(np.logical_or(arr<lo, arr>hi)),)\n arr1 = np.ones(arr.shape, dtype=np.int32)\n arr_of1 = np.select(condlist, (arr1,), 0)\n arr_ofv = np.select(condlist, (arr,), 0)\n ngood = arr_of1.sum()\n return arr_ofv.sum()/ngood if ngood else None", "def felix_binning(xs, ys, delta=1):\n \n #bins = np.arange(start, end, delta)\n #occurance = np.zeros(start, end, delta)\n BIN_STEP = delta\n BIN_START = xs.min()\n BIN_STOP = xs.max()\n\n indices = xs.argsort()\n datax = xs[indices]\n datay = ys[indices]\n\n print(\"In total we have: \", len(datax), ' data points.')\n #do the binning of the data\n bins = np.arange(BIN_START, BIN_STOP, BIN_STEP)\n print(\"Binning starts: \", BIN_START, ' with step: ', BIN_STEP, ' ENDS: ', BIN_STOP)\n\n bin_i = np.digitize(datax, bins)\n bin_a = np.zeros(len(bins)+1)\n bin_occ = np.zeros(len(bins)+1)\n\n for i in range(datay.size):\n bin_a[bin_i[i]] += datay[i]\n bin_occ[bin_i[i]] += 1\n\n binsx, data_binned = [], []\n for i in range(bin_occ.size-1):\n if bin_occ[i] > 0:\n binsx.append(bins[i]-BIN_STEP/2)\n data_binned.append(bin_a[i]/bin_occ[i])\n\n #non_zero_i = bin_occ > 0\n #binsx = bins[non_zero_i] - BIN_STEP/2\n #data_binned = bin_a[non_zero_i]/bin_occ[non_zero_i]\n\n return binsx, data_binned", "def average(x, y):\n #helper function for get_accuracy\n average = (x+y)/2 \n return average", "def mean(vals):", "def rebin(x, factor):\n return np.mean(x.reshape(-1, factor), axis=1)" ]
[ "0.7400173", "0.73563105", "0.72301424", "0.6754396", "0.6561397", "0.62552804", "0.62478745", "0.6227985", "0.6172358", "0.6170711", "0.6138506", "0.6102812", "0.6051796", "0.6045405", "0.59922403", "0.597935", "0.59535205", "0.5918458", "0.5881802", "0.58234376", "0.5800687", "0.57964957", "0.5785515", "0.57774186", "0.57551956", "0.56959844", "0.567807", "0.5658953", "0.56491673", "0.56375825" ]
0.7733718
0
check that configuration was successfully read
def test_successful_read(self): self.assertTrue(self._configuration_ is not None)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _check_config(self):", "def loadSuccessful(self):\r\n\r\n return (self.config != None)", "def check_config(cfg):", "def check_config(config):\n pass", "def read(self):\n return_code = W.config_read(self._ptr)\n if return_code == W.WEECHAT_CONFIG_READ_OK:\n return True\n if return_code == W.WEECHAT_CONFIG_READ_MEMORY_ERROR:\n return False\n if return_code == W.WEECHAT_CONFIG_READ_FILE_NOT_FOUND:\n return True\n return False", "def __check_config(self):\n if not os.path.exists(self.__config_path):\n return False\n else:\n return True", "def check_config(self):\n try:\n config_metadata = self.dbc.get_metadata(\"config.txt\")\n except rest.ErrorResponse:\n print str(datetime.datetime.now()) \\\n + \": No config.txt in Dropbox directory. Exiting.\"\n sys.exit()\n if config_metadata[\"modified\"] != self.config_date:\n print str(datetime.datetime.now()) + \": Config changed\"\n self.config_date = config_metadata[\"modified\"]\n try:\n self.dbc.get_file(\"config.txt\")\n except rest.ErrorResponse as e:\n print str(datetime.datetime.now()) + e.reason\n return False\n self.config.reload(self.local_directory + \"/\" + \"config.txt\")\n return True\n return False", "def check_configs(self):\n\n pass", "def validate_config(self):\n pass", "def validate_config(self):\n pass", "def check_configuration(self):\n self.ensure_one()\n getattr(self, '%s_check_configuration' % self.provider, lambda: None)()", "def _validate_config(self):\n pass", "def check_config_file():\n # Locate and init config.\n default_config = \"config.json\"\n if len(sys.argv) == 2:\n # config from command line\n app_config = config_reader(sys.argv[1])\n else:\n # config should be in default\n app_config = config_reader(default_config)\n # fin\n if not app_config:\n print(\"Exiting due to invalid config file.\")\n return False\n # fin\n return app_config", "def check_configuration(self):\n\n return bool(os.path.isfile(self.config_path) and\n self.validate_configuration_file())", "def check_config():\n\n if not config_instance:\n LOG.error(\"Failed to load the config!\")\n sys.exit(9)\n\n if not hasattr(config_instance, \"CONFIG_VERSION\"):\n LOG.warning( \"The config file does not specify CONFIG_VERSION! I will \"\n \"try to continue anyway, but this field is recommended to allow \"\n \"some internal tests to work. I will assume the value '(1,0)'!\" )\n config_instance.CONFIG_VERSION = (1, 0)\n\n major, minor = config_instance.CONFIG_VERSION\n expected_major, expected_minor = EXPECTED_CONFIG_VERSION\n\n if major < expected_major:\n LOG.critical(\"The config system has undergone a major change! \"\n \"I cannot continue without an upgrade!\")\n sys.exit(9)\n\n if minor < expected_minor:\n LOG.warning(\"The config system has undergone a minor change! \"\n \"It should work, but you still should review the docs!\")\n\n if major == expected_major and minor == expected_minor:\n LOG.debug( \"Config version OK!\" )\n\n if not hasattr(config_instance, \"GENERATORS\"):\n LOG.critical(\"Variable 'GENERATORS' not found in config!\")\n sys.exit(9)\n\n if not hasattr(config_instance, \"TARGETS\"):\n LOG.critical(\"Variable 'TARGETS' not found in config!\")\n sys.exit(9)", "def check_configuration(self):\n try:\n self.config.commit_check()\n self.queue_message(\"log\", \"Configuration checked.\")\n except (self.pyez_exception.RpcError,\n self.pyez_exception.ConnectError) as ex:\n raise AnsibleError('Failure checking the configuraton: %s' %\n (str(ex)))", "def check_configuration(self, configuration):\n super(Hipchap, self).check_configuration(configuration)", "def test_read_valid_configs(self):\n args = argparse.Namespace(server=None, force=False)\n with open(self._config) as config_f:\n with open(self._auth) as auth_config_f:\n (config_data, auth_tuple) = imageroller.main.read_configs(\n args,\n config_f,\n auth_config_f)\n self.assertEqual(config_data.concurrent_workers,\n CONFIG_DATA[\"ConcurrentWorkers\"])\n self.assertEqual(len(config_data.server_data), 1)\n self.assertTupleEqual(auth_tuple, (AUTH_DATA[\"ApiUser\"],\n AUTH_DATA[\"ApiKey\"]))", "def checkconfig(self): \n validconfig = {\n 'loglevel': lambda s: s in self.loglevels,\n 'logfilelevel': lambda s: s in self.loglevels,\n 'nodes': lambda s: isinstance(s, list),\n 'pynodes': lambda s: isinstance(s, list)\n }\n alive = True\n for key in self.config: \n if (key in validconfig and \n not validconfig[key](self.config[key])):\n logging.critical(\"Invalid configuration option {}: {}\".format(\n key, self.config[key]))\n alive = False\n return alive", "def testConfigA(self):\n assert type(self.config) == dict, \"Read setting not returning a dictionary\"", "def __check_configuration__(self, parser):\n if not parser.has_section('core'):\n self.logger.error('The config file should contain a core section with at least the module_path specified')\n sys.exit(1)\n\n else:\n if parser.get('core', 'modules_path', fallback=None) is None:\n self.logger.error('The configuration file should contain at least the modules_path value in core section.')\n sys.exit(1)\n\n if not parser.has_section('mysql'):\n self.logger.error('The config file should contain a mysql section.')\n sys.exit(1)\n\n else:\n if parser.get('mysql', 'host', fallback=None) is None:\n self.logger.error('The config file should contain the host value in mysql section.')\n sys.exit(1)\n\n if parser.get('mysql', 'port', fallback=None) is None:\n self.logger.error('The config file should contain the port value in mysql section.')\n sys.exit(1)\n\n if parser.get('mysql', 'user', fallback=None) is None:\n self.logger.error('The config file should contain the user in mysql section.')\n sys.exit(1)\n\n if parser.get('mysql', 'password', fallback=None) is None:\n self.logger.error('The config file should contain the password of the user in mysql section.')\n sys.exit(1)\n\n if parser.get('mysql', 'server_id', fallback=None) is None:\n self.logger.error('The config file should contain the server_id in mysql section.')\n sys.exit(1)\n\n if parser.get('mysql', 'tables', fallback=None) is not None:\n tables = [table.strip() for table in parser.get('mysql', 'tables').split(',')]\n for table in tables:\n if not parser.has_section(table):\n self.logger.error('The config file should contain a section about the table : %s' % table)\n exit(1)\n if parser.get(table, 'index_label', fallback=None) is None :\n self.logger.error('The config file should contain a table section with a index_label value.')\n exit(1)\n else:\n self.logger.error('The config file should contain a tables value with all the tables to replicate.')\n exit(1)", "def config_in_use(self) -> bool:\n config = {}\n config_files = find_config_files([self.config_dir])\n for config_file in config_files:\n with open(config_file) as stream:\n config.update(yaml.safe_load(stream))\n\n if not config:\n return False\n\n print(config.get(CONFIG_LOCK))\n return config.get(CONFIG_LOCK, True)", "def antenny_config_check(self):\n return self.antenny_config.check()", "def check_configuration(self, configuration):\n super(Pixiv_bot, self).check_configuration(configuration)", "def read_config_file():\n\tsuccess = config_parser.read([config_file])\n\n\tif not success:\n\t\tprint \"Failed to parse config file '%s'\" % config_file\n\t\treturn False\n\n\treturn True", "def is_config_exist(self) -> bool:\n pass", "def is_config_exist(self) -> bool:\n return True", "def checkConfig(self):\n\n\t\ttry:\n\t\t\t# Try to get all the required keys\n\t\t\tself.config.get(\"db\",\"host\")\n\t\t\tself.config.get(\"db\",\"username\")\n\t\t\tself.config.get(\"db\",\"password\")\n\t\t\tself.config.get(\"db\",\"database\")\n\t\t\tself.config.get(\"db\",\"pingtime\")\n\n\t\t\tself.config.get(\"server\",\"server\")\n\t\t\tself.config.get(\"server\",\"host\")\n\t\t\tself.config.get(\"server\",\"port\")\n\t\t\tself.config.get(\"server\",\"localizeusers\")\n\t\t\tself.config.get(\"server\",\"outputpackets\")\n\t\t\tself.config.get(\"server\",\"outputrequesttime\")\n\t\t\tself.config.get(\"server\",\"timeouttime\")\n\t\t\tself.config.get(\"server\",\"timeoutlooptime\")\n\n\t\t\tif (self.config[\"server\"][\"server\"] == \"flask\"):\n\t\t\t\t# Flask only config\n\t\t\t\tself.config.get(\"flask\",\"threaded\")\n\t\t\t\tself.config.get(\"flask\",\"debug\")\n\t\t\t\tself.config.get(\"flask\",\"logger\")\n\n\t\t\tself.config.get(\"ci\",\"key\")\n\t\t\treturn True\n\t\texcept:\n\t\t\treturn False", "def validateConfig(self):\n ## (boolean with the result of the validation, eventual error message)\n return (True, '')", "def check_settings(self):\n pass" ]
[ "0.7841762", "0.7464513", "0.7274292", "0.72680944", "0.7267917", "0.7208509", "0.71730393", "0.7005125", "0.69394284", "0.69394284", "0.689093", "0.6861136", "0.68522793", "0.68479437", "0.680019", "0.67950684", "0.6774416", "0.67588484", "0.67401695", "0.6729598", "0.6724944", "0.6692394", "0.6678674", "0.6664847", "0.6639106", "0.6637877", "0.6615104", "0.6584472", "0.6551143", "0.65215915" ]
0.7949921
0
check resource value is as expected
def test_resource_value(self): self.assertTrue(self._configuration_.resources()["RemoveWordTaskRepeat"] == False)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test(self, resource):\n return resource.meta.fields[self.name].present(resource)", "def test_is_valid_resource():\n mock_name = \"rg-001\"\n output = sh.is_valid_resource(mock_name)\n assert output is True", "def check_value(self, value):", "def test_check_resource(self):\n s1 = System()\n b1 = Books(\"1984\", \"George Orwell\", \"Harvill Secker\", \"1949\", \"0123456789123\")\n self.assertEqual(s1.check_resource(b1), False)\n s1.add_resource(b1)\n self.assertEqual(s1.check_resource(b1), True)", "def check_validity(self):", "def _resource_name_check(self, resource_name):\n return self._name_check(resource_name, 'resources')", "def test_col_resource_status_valid():\n ident = _id()\n status = proj.fetch('test', ident)\n status = proj.status('test', ident)\n assert status == 'complete'", "def test_custom_resource():\n data = {\n 'name': 'Wort wort',\n 'slug': 'sluggy',\n 'not_valid': 'nooo'\n }\n instance = PeopleResource(**data)\n # We should have this attribute\n assert hasattr(instance, 'name')\n # But this one is missing\n assert not hasattr(instance, 'another_thing')\n # and this one is not valid\n assert not hasattr(instance, 'not_valid')\n assert instance.__str__() == '<People | Wort wort>'\n # It should also have parent Meta attributes\n assert hasattr(instance.Meta, 'valid_status_codes')", "def __validateResourceStateEntry(self, resource: Dict[str, str]):\n if AZ_RESOURCE_ID not in resource:\n raise ValueError(\n '[%s] %s is not present in the armMapping.' % (\n self.fullName, AZ_RESOURCE_ID))\n if SID not in resource:\n raise ValueError(\n '[%s] %s is not present in the armMapping.' % (self.fullName, SID))\n if ARM_TYPE not in resource:\n raise ValueError(\n '[%s] %s is not present in the armMapping.' % (self.fullName, ARM_TYPE))", "def is_valid(self, value):\r\n pass", "def test_has_correct_value(self):\n self.assertEqual(self.node.value, 7)", "def assertResourceEqual(self, actual, expected, resource_type):\n return self.assertEqual(\n resource_type(**expected).to_dict(computed=False),\n actual.to_dict(computed=False),\n )", "def test_nonstandard_resource(self):\n manifest = copy.deepcopy(job_test_utils.COMPLETE_MANIFEST)\n manifest['job']['resources']['scalar'].append({'name': 'chocolate', 'value': 1.0 })\n config = copy.deepcopy(self.configuration)\n json_data = {\n 'manifest': manifest,\n 'configuration': config\n }\n\n url = '/%s/job-types/validation/' % self.api\n response = self.client.generic('POST', url, json.dumps(json_data), 'application/json')\n self.assertEqual(response.status_code, status.HTTP_200_OK, response.content)\n\n results = json.loads(response.content)\n self.assertTrue(results['is_valid'])\n self.assertEqual(len(results['warnings']), 1)\n self.assertEqual(results['warnings'][0]['name'], 'NONSTANDARD_RESOURCE')", "def _resource_match(chromo, resource):\n return all(resource[k] == v for (k, v) in _resource_fields(chromo).items())", "def _validate(self, value):\n return True", "def testGetStringResourceInfo(self):\n stringResource = self._createSampleResource(Tag.string)\n stringTagInfo = self.converter._getInfoFromResourceTag(stringResource)\n self.assertTrue(len(stringTagInfo) == 1)\n self.assertEqual(stringTagInfo[0][self.converter.TAG],\n stringResource.tag)\n self.assertEqual(stringTagInfo[0][self.converter.TEXT],\n stringResource.text)\n self.assertEqual(stringTagInfo[0][self.converter.NAME_FLAG],\n stringResource.attrib['name'])\n self.assertEqual(stringTagInfo[0][self.converter.TRANSLATABLE_FLAG],\n stringResource.attrib['translatable'])", "def validate(self, value):\n return True", "def _check_validity(self):\n pass", "def _extract_resource(resource: Optional[dict],\n allowed_vals: tuple[tuple[str, ...]],\n exc: Type[exception.CinderException],\n resource_name: str,\n props: tuple[str] = ('status',)) -> Optional[str]:\n\n resource_id = None\n if resource:\n for prop, allowed_states in zip(props, allowed_vals):\n if resource[prop] not in allowed_states:\n msg = _(\"Originating %(res)s %(prop)s must be one of \"\n \"'%(vals)s' values\")\n msg = msg % {'res': resource_name,\n 'prop': prop,\n 'vals': ', '.join(allowed_states)}\n # TODO(harlowja): what happens if the status changes after\n # this initial resource status check occurs??? Seems like\n # someone could delete the resource after this check passes\n # but before the volume is officially created?\n raise exc(reason=msg)\n resource_id = resource['id']\n return resource_id", "def validate(self, value):\r\n return value", "def inner_test(param: str):\n self.assertEqual(param, '256')", "def test_expected_output(self):\n # Assuming the auth/user content type is installed\n self.assertIn(\"auth/user\", str(self.form))", "def validate(self):\n # rVal = False\n # return rVal\n return True", "def _validate(self, instance, value):", "def test_validate(self):\n assert self.route.validate(str).route[\"validate\"] == str", "def test_resource_exists(self):\r\n\t\tself.assertTrue(self._configuration_.resources().has_key(\"AddWordTaskRepeat\") and self._configuration_.resources().has_key(\"RemoveWordTaskRepeat\"))", "def check():", "def validate(self, value, obj=None):\n return True", "def test_get_value_success(self):\r\n name = 'option1'\r\n option = self.config.options[name]\r\n value = self.config.values[name]\r\n\r\n self.assertEqual(self.config.get_value(name, option), value)", "def check(self, description: Description) -> bool:" ]
[ "0.69737214", "0.6735116", "0.6578639", "0.6456517", "0.6301914", "0.6208396", "0.6098021", "0.6084837", "0.60238314", "0.6014339", "0.5989801", "0.5965249", "0.5954599", "0.5902951", "0.58872235", "0.586236", "0.58403456", "0.5814619", "0.5813411", "0.5809546", "0.578816", "0.57676935", "0.5757586", "0.5746989", "0.5742798", "0.5742", "0.5737312", "0.5733451", "0.5728344", "0.57224745" ]
0.7122248
0
check specified component has correct class and module name
def test_component_class_and_module(self): self.assertTrue(self._configuration_["AddWordDefinitionTask"].class_name() == "AddWordDefinitionTask" and self._configuration_["AddWordDefinitionTask"].module_name() == "TestPlugins")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def isClassName(module, name):\n # search in classes\n if name in module.classes or name in module.structs:\n return True\n # check if name consistent\n res = dotre.split(name)\n if len(res) > 1:\n moduleName = res[0]\n className = res[1]\n # search in modules\n if moduleName in module.modules:\n mod = module.modules[moduleName]\n # search in module classes\n if className in mod.classes or className in mod.structs:\n return True\n return False", "def check_module(name):\n return importlib.util.find_spec(name) is not None", "def test_component_resolution_different_file():\n\n assert snippet_eval(ComponentSnippet(modulea.ComponentResolutionViaModule())) == \"hi from module b\\n\"", "def isKnownComponentClass(self, *args):\n return _libSALOME_LifeCycleCORBA.SALOME_LifeCycleCORBA_isKnownComponentClass(self, *args)", "def has_component(self, component):\n return component.name in self.components", "def __enabled__(component):\n registry = context.app.component_registry\n return (component.__module__ in registry.modules)", "def __call__(self, component, module, **data):\n components = list(scan_for_classes(module, self.iface))\n if len(components) == 0:\n raise UnknownError(\n \"No module-level %s for %r, please use the '%s' \"\n \"directive.\"\n % (self.component_name, component, self.directive_name),\n component)\n elif len(components) == 1:\n return components[0]\n else:\n raise UnknownError(\n \"Multiple possible %ss for %r, please use the '%s' \"\n \"directive.\"\n % (self.component_name, component, self.directive_name),\n component)", "def test_get_component_with_invalid_name():\n\n with pytest.raises(ComponentAttributeError):\n application_services.get_component('missing_component')", "def class_is_interesting(name: str):\n if name.startswith('org.chromium.'):\n return True\n return False", "def test_component_rename_ok(self):\n test_name = sys._getframe().f_code.co_name\n self._execute('component rename component1 changed_name')\n rv, output = self._execute('component list')\n self.assertEqual(0, rv)\n self.assertEqual(self.expected_results[test_name], output)", "def arg_validation(arg, cla):\n if is_subclass(cla, arg):\n return arg\n else:\n print(str(arg)+\" is not a valid \" + cla.__module__ + \" name.\")\n sys.exit(2)", "def __contains__(self, component):\n if issubclass(component, Component):\n try:\n my_component = self.type.components[component.interface]\n except KeyError:\n return False\n else:\n return issubclass(my_component, component)\n else:\n return component in self.type.components", "def _fuzzy_module_name_eq(self, module, package_name):\n return ((module.__name__ == package_name) or \n (module.__name__.replace('_pb2', '') == package_name) or \n (module.DESCRIPTOR.name == package_name) or \n (module.DESCRIPTOR.name.replace('.proto', '') == package_name) or\n (module.DESCRIPTOR.package == package_name))", "def check_component(comp_name: str, comp: defs.Component) -> None:\n if not RE_COMP_NAME.match(comp_name):\n res.append(f\"Invalid component name: {comp_name}\")\n\n for branch_name, branch in sorted(comp.branches.items()):\n check_branch(comp_name, branch_name, branch)", "def test_get_component_name(initialized_bmi):\n name = initialized_bmi.get_component_name()\n assert isinstance(name, str)\n\n return name", "def validate(self):\n Component.validate(self)\n kinds = (\"lib\", \"exe\")\n if self.kind not in kinds:\n raise Invalid(\"kind must be one of %s for component %s\" % (kinds,self.name))\n\n if self.kind == \"exe\" :\n if not self.exe_path:\n raise Invalid(\"exe_path must be defined for component %s\" % self.name)", "def IsComponent(*args):\n return _XCAFDoc.XCAFDoc_ShapeTool_IsComponent(*args)", "def isModule(self, name):\n return os.path.isfile(self.modulePath(name))", "def test_component_resolution_same_file():\n\n assert snippet_eval(ComponentSnippet(modulea.ComponentResolutionSameFile())) == \"hi\\n\"", "def comp(self, componentname):\n retv = self.components.lookup(componentname)\n if (retv == None):\n raise Exception(\"Component not found: '{0}'.\".format(componentname))\n return retv", "def name(self):\n return \"component_manager\"", "def has_classname(self):\n return self.unpack_word(0x4A) > 0", "def compServiceCheck():\n # global compileService\n return compileService.state.name", "def find_class(self, module, name):\n raise pickle.UnpicklingError(\"global '%s.%s' is forbidden\" %\n (module, name))", "def test_component_resolution_same_file_err():\n\n with pytest.raises(InterpStackTrace) as exc_info:\n snippet_eval(ComponentSnippet(modulea.ComponentResolutionSameFileErr()))\n assert 'DefinitelyNotExistingComponent' in str(exc_info.value)", "def XCAFDoc_ShapeTool_IsComponent(*args):\n return _XCAFDoc.XCAFDoc_ShapeTool_IsComponent(*args)", "def test_validate_sub_process_class_plugins(ctx, common_relax_workchain):\n from aiida_common_workflows.plugins import get_entry_point_name_from_class\n assert eos.validate_sub_process_class(get_entry_point_name_from_class(common_relax_workchain).name, ctx) is None", "def __init__(self, module):\n om.ExplicitComponent.__init__(self)\n self.module_name = module.name", "def __init__(self, module):\n om.ExplicitComponent.__init__(self)\n self.module_name = module.name", "def test_component_rename_error_bad_component(self):\n test_name = sys._getframe().f_code.co_name\n rv, output = self._execute('component rename bad_component changed_name')\n self.assertEqual(2, rv)\n self.assertEqual(self.expected_results[test_name], output)" ]
[ "0.6484528", "0.64189386", "0.6209598", "0.61298054", "0.6046658", "0.6008417", "0.5990348", "0.5964718", "0.58850896", "0.5874257", "0.5827612", "0.5805829", "0.5802875", "0.5790136", "0.5754115", "0.57177526", "0.5713081", "0.57058185", "0.57042754", "0.5684537", "0.5661433", "0.56493217", "0.56292737", "0.56106436", "0.55782276", "0.5570735", "0.55638283", "0.55442303", "0.55442303", "0.55440015" ]
0.70710343
0
check that specific component specification has correct lifetime declaration
def test_component_specification_lifetime_declaration(self): self.assertTrue(self._configuration_["ListWordDefinitionsTask"].lifetime() == "singleton")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_component_specification_lifetime_non_declaration(self):\r\n\t\tself.assertTrue(self._configuration_[\"RemoveWordDefinitionTask\"].lifetime() == \"\")", "def _check_validity(self):\n pass", "def needsResolution (self):\n return self.__unresolvedComponents is not None", "def validate(self):\n Component.validate(self)\n kinds = (\"lib\", \"exe\")\n if self.kind not in kinds:\n raise Invalid(\"kind must be one of %s for component %s\" % (kinds,self.name))\n\n if self.kind == \"exe\" :\n if not self.exe_path:\n raise Invalid(\"exe_path must be defined for component %s\" % self.name)", "def test_register_component_with_invalid_type():\n\n with pytest.raises(InvalidComponentTypeError):\n component = CoreObject()\n application_services.register_component(component)", "def test_get_component_with_invalid_name():\n\n with pytest.raises(ComponentAttributeError):\n application_services.get_component('missing_component')", "def check_validity(self):", "def check_stability(self):", "def __init__(self, *args):\n this = _libSALOME_LifeCycleCORBA.new_IncompatibleComponent(*args)\n try: self.this.append(this)\n except: self.this = this", "def validate_dependencies(self, session, entry):", "def test_remove_component_invalid():\n\n with pytest.raises(ComponentAttributeError):\n application_services.get_component('missing_component_to_remove')", "def _test_determine_alloc(lifetime: dace.AllocationLifetime, unused: bool = False) -> dace.SDFG:\r\n sdfg = dace.SDFG('lifetimetest')\r\n sdfg.add_array('A', [N], dace.float64)\r\n sdfg.add_array('B', [N], dace.float64)\r\n sdfg.add_transient('unused', [N], dace.float64, lifetime=lifetime)\r\n state = sdfg.add_state()\r\n me, mx = state.add_map('m', dict(i='0:N'))\r\n\r\n #########################################################################\r\n nsdfg = dace.SDFG('nested')\r\n nsdfg.add_array('A', [N], dace.float64)\r\n nsdfg.add_array('B', [N], dace.float64)\r\n nsdfg.add_transient('tmp', [N], dace.float64, dace.StorageType.GPU_Global, lifetime=lifetime)\r\n nsdfg.add_transient('tmp2', [1], dace.float64, dace.StorageType.Register, lifetime=lifetime)\r\n nstate = nsdfg.add_state()\r\n ime, imx = nstate.add_map('m2', dict(i='0:20'), schedule=dace.ScheduleType.GPU_Device)\r\n t1 = nstate.add_access('tmp')\r\n t2 = nstate.add_access('tmp2')\r\n nstate.add_nedge(t1, t2, dace.Memlet('tmp[0]'))\r\n nstate.add_memlet_path(nstate.add_read('A'), ime, t1, memlet=dace.Memlet('A[i]'))\r\n nstate.add_memlet_path(t2, imx, nstate.add_write('B'), memlet=dace.Memlet('B[0]', wcr='lambda a,b: a+b'))\r\n #########################################################################\r\n nsdfg_node = state.add_nested_sdfg(nsdfg, None, {'A'}, {'B'})\r\n state.add_memlet_path(state.add_read('A'), me, nsdfg_node, dst_conn='A', memlet=dace.Memlet('A[0:N]'))\r\n state.add_memlet_path(nsdfg_node, mx, state.add_write('B'), src_conn='B', memlet=dace.Memlet('B[0:N]'))\r\n\r\n # Set default storage/schedule types in SDFG\r\n infer_types.set_default_schedule_and_storage_types(sdfg, None)\r\n\r\n return sdfg, (sdfg, state, me, nsdfg, nstate, ime)", "def test_alloc_persistent_register():\r\n\r\n @dace.program\r\n def lifetimetest(input: dace.float64[N]):\r\n tmp = dace.ndarray([1], input.dtype)\r\n return tmp + 1\r\n\r\n sdfg: dace.SDFG = lifetimetest.to_sdfg()\r\n sdfg.arrays['tmp'].storage = dace.StorageType.Register\r\n sdfg.arrays['tmp'].lifetime = dace.AllocationLifetime.Persistent\r\n\r\n try:\r\n sdfg.validate()\r\n raise AssertionError('SDFG should not be valid')\r\n except dace.sdfg.InvalidSDFGError:\r\n print('Exception caught, test passed')", "def test_component_resolution_same_file_err():\n\n with pytest.raises(InterpStackTrace) as exc_info:\n snippet_eval(ComponentSnippet(modulea.ComponentResolutionSameFileErr()))\n assert 'DefinitelyNotExistingComponent' in str(exc_info.value)", "def validate_lifetime(self, for_policy, policy_info):\n units = policy_info['lifetime']['units']\n if units != 'seconds':\n raise CsrValidationFailure(resource=for_policy,\n key='lifetime:units',\n value=units)\n value = policy_info['lifetime']['value']\n if (value < LIFETIME_LIMITS[for_policy]['min'] or\n value > LIFETIME_LIMITS[for_policy]['max']):\n raise CsrValidationFailure(resource=for_policy,\n key='lifetime:value',\n value=value)", "def test_not_at_beginning(self):\n self.check_4_way('container', 'container or pod')", "def should_check_refcount(self):\n raise NotImplementedError()", "def test_alloc_persistent_register():\r\n @dace.program\r\n def lifetimetest(input: dace.float64[N]):\r\n tmp = dace.ndarray([1], input.dtype)\r\n return tmp + 1\r\n\r\n sdfg: dace.SDFG = lifetimetest.to_sdfg()\r\n sdfg.arrays['tmp'].storage = dace.StorageType.Register\r\n sdfg.arrays['tmp'].lifetime = dace.AllocationLifetime.Persistent\r\n\r\n try:\r\n sdfg.validate()\r\n raise AssertionError('SDFG should not be valid')\r\n except dace.sdfg.InvalidSDFGError:\r\n print('Exception caught, test passed')", "def sanity_check(self):\n pass", "def test_inner_deadlock_of_component(self):\n a = DummyProducingInputIncompleteBuild(scope=\"A\")\n try:\n test = ComponentTest(component=a, input_spaces=dict(input_=float))\n except RLGraphBuildError as e:\n print(\"Seeing expected RLGraphBuildError ({}). Test ok.\".format(e))\n else:\n raise RLGraphError(\"Not seeing expected RLGraphBuildError with input-incomplete model!\")", "def test_register_component_with_invalid_type_only_manager():\n\n with pytest.raises(InvalidComponentTypeError):\n component = OnlyManagerMock()\n application_services.register_component(component)", "def valid_dependency(self, dep):\r\n return True", "def test_resolves_instances():\n container = Container()\n container.register(MessageWriter, StdoutMessageWriter)\n\n mw1 = container.resolve(MessageWriter)\n mw2 = container.resolve(MessageWriter)\n expect(mw1).not_to(equal(mw2))", "def test_register_component_with_invalid_type_only_component():\n\n with pytest.raises(InvalidComponentTypeError):\n component = OnlyComponentMock('only_component')\n application_services.register_component(component)", "def sanity_check(self):\n return True", "def test_RefResolver_in_scope(self):\n\n resolver = validators.RefResolver.from_schema({})\n with self.assertWarns(DeprecationWarning) as w:\n with resolver.in_scope(\"foo\"):\n pass\n\n self.assertEqual(w.filename, __file__)\n self.assertTrue(\n str(w.warning).startswith(\n \"asdf._jsonschema.RefResolver.in_scope is deprecated \",\n ),\n )", "def test_allocation_scope_close(self):\n sequence_list = SequenceList()\n sequence_string = self.stabilise_accounting + \"\"\"\n LoginTestVifibCustomer\n CustomerRegisterNewComputer\n Tic\n Logout\n\n LoginDefaultUser\n SetComputerCoordinatesFromComputerTitle\n Logout\n\n LoginTestVifibCustomer\n ComputerSetAllocationScopeClose\n Tic\n Logout\n SetSequenceSlaXmlCurrentComputer\n\n SlapLoginCurrentComputer\n FormatComputer\n Tic\n SlapLogout\n\n LoginDefaultUser\n CheckComputerAllocationScopeClose\n CheckComputerTradeConditionDestinationSectionTestVifibCustomer\n Logout\n \"\"\" + self.prepare_published_software_release + \\\n self.request_and_install_software + \"\"\"\n # request as owner\n LoginTestVifibCustomer\n PersonRequestSoftwareInstance\n Tic\n Logout\n\n # fail to instantiate for owner\n LoginDefaultUser\n CallConfirmOrderedSaleOrderAlarm\n Tic\n CheckNoRelatedSalePackingListLineForSoftwareInstance\n Logout\n\n LoginERP5TypeTestCase\n CheckSiteConsistency\n Logout\n \"\"\"\n sequence_list.addSequenceString(sequence_string)\n sequence_list.play(self)", "def test_get_composition(self):\n pass", "def test_scaling_active_parameter_manager():\n components_2 = {\"1\": mock_scaling_component(2), \"2\": mock_scaling_component(2)}\n scaling_apm = scaling_active_parameter_manager(components_2, [\"1\"])\n assert list(scaling_apm.constant_g_values[0]) == list(\n components_2[\"2\"].calculate_scales()\n )\n assert len(scaling_apm.constant_g_values) == 1\n assert scaling_apm.n_obs == [2]\n\n # Test that no constant_g_values if both components selected\n scaling_apm = scaling_active_parameter_manager(components_2, [\"1\", \"2\"])\n assert scaling_apm.constant_g_values is None\n\n # Check that one can't initialise with an unequal number of reflections,\n # either within the selection or overall.\n with pytest.raises(AssertionError):\n components_2 = {\"1\": mock_scaling_component(2), \"2\": mock_scaling_component(1)}\n scaling_apm = scaling_active_parameter_manager(components_2, [\"1\", \"2\"])\n with pytest.raises(AssertionError):\n components_2 = {\"1\": mock_scaling_component(2), \"2\": mock_scaling_component(1)}\n scaling_apm = scaling_active_parameter_manager(components_2, [\"1\"])\n\n data_manager = mock_data_manager(components_2)\n pmg = ScalingParameterManagerGenerator(\n [data_manager], target=ScalingTarget(), mode=\"concurrent\"\n )\n assert isinstance(pmg.apm_type, type(scaling_active_parameter_manager))", "def test_type_checking_with_inconsistent_types(self):\n @component\n def a_op(field_m: {'GCSPath': {'path_type': 'file', 'file_type':'tsv'}}, field_o: 'Integer'):\n return ContainerOp(\n name = 'operator a',\n image = 'gcr.io/ml-pipeline/component-b',\n arguments = [\n '--field-l', field_m,\n '--field-o', field_o,\n ],\n )\n\n @pipeline(\n name='p1',\n description='description1'\n )\n def my_pipeline(a: {'GCSPath': {'path_type':'file', 'file_type': 'csv'}}='good', b: Integer()=12):\n a_op(field_m=a, field_o=b)\n\n test_data_dir = os.path.join(os.path.dirname(__file__), 'testdata')\n sys.path.append(test_data_dir)\n tmpdir = tempfile.mkdtemp()\n try:\n simple_package_path = os.path.join(tmpdir, 'simple.tar.gz')\n with self.assertRaises(InconsistentTypeException):\n compiler.Compiler().compile(my_pipeline, simple_package_path, type_check=True)\n compiler.Compiler().compile(my_pipeline, simple_package_path, type_check=False)\n\n finally:\n shutil.rmtree(tmpdir)" ]
[ "0.69758385", "0.5321172", "0.5216765", "0.51183474", "0.5083323", "0.5078964", "0.5065865", "0.5039631", "0.50140995", "0.4987057", "0.4973244", "0.4964374", "0.49469846", "0.49224192", "0.49184662", "0.49132386", "0.49099228", "0.49026817", "0.49012953", "0.48861352", "0.48803473", "0.48732278", "0.48547307", "0.4849212", "0.4830893", "0.47995567", "0.47850296", "0.4767401", "0.47494787", "0.47454005" ]
0.7212516
0
check that specific component specification has correct lifetime nondeclaration
def test_component_specification_lifetime_non_declaration(self): self.assertTrue(self._configuration_["RemoveWordDefinitionTask"].lifetime() == "")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_component_specification_lifetime_declaration(self):\r\n\t\tself.assertTrue(self._configuration_[\"ListWordDefinitionsTask\"].lifetime() == \"singleton\")", "def test_get_component_with_invalid_name():\n\n with pytest.raises(ComponentAttributeError):\n application_services.get_component('missing_component')", "def needsResolution (self):\n return self.__unresolvedComponents is not None", "def _check_validity(self):\n pass", "def test_remove_component_invalid():\n\n with pytest.raises(ComponentAttributeError):\n application_services.get_component('missing_component_to_remove')", "def test_component_resolution_same_file_err():\n\n with pytest.raises(InterpStackTrace) as exc_info:\n snippet_eval(ComponentSnippet(modulea.ComponentResolutionSameFileErr()))\n assert 'DefinitelyNotExistingComponent' in str(exc_info.value)", "def validate(self):\n Component.validate(self)\n kinds = (\"lib\", \"exe\")\n if self.kind not in kinds:\n raise Invalid(\"kind must be one of %s for component %s\" % (kinds,self.name))\n\n if self.kind == \"exe\" :\n if not self.exe_path:\n raise Invalid(\"exe_path must be defined for component %s\" % self.name)", "def check_validity(self):", "def test_inner_deadlock_of_component(self):\n a = DummyProducingInputIncompleteBuild(scope=\"A\")\n try:\n test = ComponentTest(component=a, input_spaces=dict(input_=float))\n except RLGraphBuildError as e:\n print(\"Seeing expected RLGraphBuildError ({}). Test ok.\".format(e))\n else:\n raise RLGraphError(\"Not seeing expected RLGraphBuildError with input-incomplete model!\")", "def check_stability(self):", "def test_register_component_with_invalid_type():\n\n with pytest.raises(InvalidComponentTypeError):\n component = CoreObject()\n application_services.register_component(component)", "def sanity_check(self):\n pass", "def test_not_at_beginning(self):\n self.check_4_way('container', 'container or pod')", "def test_component_without_owner_is_trac_error(self):\n # We create an instance of the panel so we can check existing values\n panel = ComponentAdminPanel(self.env)\n\n # Check the environment initially contains the default values.\n self.assertItemsEqual(panel.get_component_list(), self.default['component'])\n\n # create the section, option, and values in configuration\n self.env.config.set('ticket-field-config', 'component',\n ','.join(self.new['component']))\n\n # we purposely forget to add component_owner to config\n # and run the plugin expecting a TracError\n admin_command = TicketFieldConfigCommand(self.env)\n self.assertRaises(TracError,admin_command.set_fields_from_config)", "def test_register_component_with_invalid_type_only_component():\n\n with pytest.raises(InvalidComponentTypeError):\n component = OnlyComponentMock('only_component')\n application_services.register_component(component)", "def sanity_check(self):\n return True", "def should_check_refcount(self):\n raise NotImplementedError()", "def valid_dependency(self, dep):\r\n return True", "def test_component_specifications_exist(self):\r\n\t\tself.assertTrue(not (self._configuration_[\"AddWordDefinitionTask\"] is None\r\n\t\t or self._configuration_[\"ListWordDefinitionsTask\"] is None or\r\n\t\t self._configuration_[\"RemoveWordDefinitionTask\"] is None))", "def __init__(self, *args):\n this = _libSALOME_LifeCycleCORBA.new_IncompatibleComponent(*args)\n try: self.this.append(this)\n except: self.this = this", "def test_register_component_with_invalid_type_only_manager():\n\n with pytest.raises(InvalidComponentTypeError):\n component = OnlyManagerMock()\n application_services.register_component(component)", "def _allowed_components():\n pass", "def check_validity(self) -> None: # pylint: disable=no-self-use # pragma: nocover\n return None", "def test_init_decorate_error():\n with pytest.raises(\n TypeError,\n match=\"Component classes must not define a custom `__init__` method.\",\n ):\n\n @component\n class A:\n def __init__(self, a, b=5):\n self.a = a\n self.b = b", "def test_RefResolver_in_scope(self):\n\n resolver = validators.RefResolver.from_schema({})\n with self.assertWarns(DeprecationWarning) as w:\n with resolver.in_scope(\"foo\"):\n pass\n\n self.assertEqual(w.filename, __file__)\n self.assertTrue(\n str(w.warning).startswith(\n \"asdf._jsonschema.RefResolver.in_scope is deprecated \",\n ),\n )", "def test_remove_component():\n\n component = application_services.get_component('database.component')\n application_services.remove_component(component.get_id())\n\n with pytest.raises(ComponentAttributeError):\n application_services.get_component('database.component')\n\n application_services.register_component(component)", "def test_pm_Completeness(self):\n pass", "def test_register_component_with_invalid_name():\n\n with pytest.raises(InvalidComponentNameError):\n component = ComponentWithInvalidNameMock('')\n application_services.register_component(component)", "def test_type_checking_with_inconsistent_types(self):\n @component\n def a_op(field_m: {'GCSPath': {'path_type': 'file', 'file_type':'tsv'}}, field_o: 'Integer'):\n return ContainerOp(\n name = 'operator a',\n image = 'gcr.io/ml-pipeline/component-b',\n arguments = [\n '--field-l', field_m,\n '--field-o', field_o,\n ],\n )\n\n @pipeline(\n name='p1',\n description='description1'\n )\n def my_pipeline(a: {'GCSPath': {'path_type':'file', 'file_type': 'csv'}}='good', b: Integer()=12):\n a_op(field_m=a, field_o=b)\n\n test_data_dir = os.path.join(os.path.dirname(__file__), 'testdata')\n sys.path.append(test_data_dir)\n tmpdir = tempfile.mkdtemp()\n try:\n simple_package_path = os.path.join(tmpdir, 'simple.tar.gz')\n with self.assertRaises(InconsistentTypeException):\n compiler.Compiler().compile(my_pipeline, simple_package_path, type_check=True)\n compiler.Compiler().compile(my_pipeline, simple_package_path, type_check=False)\n\n finally:\n shutil.rmtree(tmpdir)", "def valid(self):\n pass" ]
[ "0.7102292", "0.5642984", "0.5629605", "0.56190115", "0.54798615", "0.5447566", "0.5431498", "0.53877485", "0.5380172", "0.53358746", "0.53217614", "0.5278221", "0.52557147", "0.52266574", "0.5219754", "0.52135813", "0.5204345", "0.5197856", "0.5118836", "0.5109223", "0.50938845", "0.5079533", "0.50743043", "0.506698", "0.5056269", "0.5045378", "0.5037392", "0.50301194", "0.5028919", "0.49986675" ]
0.73026043
0