query
stringlengths
9
9.05k
document
stringlengths
10
222k
metadata
dict
negatives
listlengths
30
30
negative_scores
listlengths
30
30
document_score
stringlengths
4
10
document_rank
stringclasses
2 values
Return the first point of a `LINESTRING` geometry as a `POINT`. Return `NULL` if the input parameter is not a `LINESTRING` Returns PointValue Start point
def start_point(self) -> PointValue: return ops.GeoStartPoint(self).to_expr()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_point_from_linestring(geom_row, X=0, behaviour='last'):\n\n lat = None\n lng = None\n try:\n X = round(X)\n except Exception as e:\n raise TypeError(\"Please enter a number for the index of the point within the linestring (X)\")\n\n if behaviour in ['last', 'ignore']:\n pass\n else:\n behaviour = 'last'\n\n if type(geom_row) == shapely.geometry.multilinestring.MultiLineString:\n total_linestrings = len(geom_row)\n lengths = {}\n total_len = 0\n for line in range(total_linestrings):\n len_line = len(geom_row[line].xy[0])\n lengths[line] = len_line\n total_len += len_line\n if X > total_len and behaviour == 'ignore':\n return lng, lat\n elif X > total_len and behaviour == 'last' or X == -1:\n lat = geom_row[-1].xy[1][-1]\n lng = geom_row[-1].xy[0][-1]\n else:\n total = 0\n for key, val in lengths.items():\n # find the location of X within the dictionary by looking if its in a given key\n total += val\n if total >= X:\n ind_key = key\n dict_ind = (val - (total - X)) - 1 # minus 1 as Python has a base-0 index\n break\n lat = geom_row[ind_key].xy[1][dict_ind]\n lng = geom_row[ind_key].xy[0][dict_ind]\n\n elif type(geom_row) == shapely.geometry.linestring.LineString:\n len_line = len(geom_row.xy)\n lng = geom_row.xy[0][X]\n lat = geom_row.xy[1][X]\n\n return lng, lat", "def get_initial_point(self):\r\n if isinstance(self.pieces[0], LineSegment):\r\n return self.pieces[0].start", "def __getPointXY(self, raw_string):\n try:\n # print 'input:',str\n pointRE = re.compile('^\\((\\d*, *\\d*)\\)$')\n x, y = pointRE.search(raw_string.strip()).groups()[0].split(',')\n # print 'x: %s, y: %s' % (x,y)\n return self.__validatePoint((int(x), int(y.strip())))\n except AttributeError:\n raise ValueError('Failed to get point coordinates.')", "def parsePoint(line):\n parts = line.split(\",\")\n return LabeledPoint(parts[0], [parts[1], parts[2]])", "def _get_point_source_location(element):\n pos = element.find('%s/%s/%s' %\n (NRML04_POINT_GEOMETRY, gml.GML_POINT, gml.GML_POS))\n pos = pos.text.split()\n\n return float(pos[0]), float(pos[1])", "def __getPointXYs(self, raw_string):\n try:\n pointsRE = re.compile('^\\((\\d*\\D*, *\\D*\\d*)\\)\\D*\\((\\d*\\D*, *\\D*\\d*)\\)$')\n points = pointsRE.search(raw_string.strip()).groups()\n startPoint = (int(points[0].split(',')[0].strip()), int(points[0].split(',')[1].strip()))\n endPoint = (int(points[1].split(',')[0].strip()), int(points[1].split(',')[1].strip()))\n return self.__validatePoint(startPoint), self.__validatePoint(endPoint)\n except AttributeError:\n traceback.print_exc()\n raise ValueError('Failed to get point coordinates.')", "def LineMinDistanceTo(line, point_or_line):\n line = rhutil.coerceline(line, True)\n test = rhutil.coerceline(point_or_line)\n if test is None: test = rhutil.coerce3dpoint(point_or_line, True)\n return line.MinimumDistanceTo(test)", "def get_point(self, str_val, x, y):\n ret_val, x.value, y.value = self._get_point(str_val.encode(), x.value, y.value)\n return ret_val", "def parse_point(self, text_line):\n record_type = self.substr(text_line, sps21point['RECORD_ID'][0], sps21point['RECORD_ID'][1]).strip()\n if record_type not in (SRC_DATA_RECORD, RCV_DATA_RECORD):\n return\n self.set_definition(sps21point)\n return self.parse(text_line)", "def get_start_point(self):\n return self.first_point", "def line(points):\n return LineString(points)", "def line_locate_point(self, right: PointValue) -> ir.FloatingValue:\n return ops.GeoLineLocatePoint(self, right).to_expr()", "def get_linestring(value):\n \n line = value['line']\n coords = [(x['x'], x['y']) for x in line]\n return geojson.Feature(\n geometry=geojson.LineString(coords),\n properties=value\n )", "def getStart(self):\n return _libsbml.LineSegment_getStart(self)", "def get_start_point_marker(self) -> PositionMarker: # pragma: no cover\n assert self.pos_marker\n return self.pos_marker.start_point_marker()", "def begining_of_line():\r\n set_point(point().begining_of_line())", "def point(surface, string):\n match = re.match('(.*?) (.*?)(?: |$)', string)\n if match:\n x, y = match.group(1, 2)\n string = string[match.end():]\n return (size(surface, x, 'x'), size(surface, y, 'y'), string)\n else:\n raise PointError", "def point(self):\n return shapely.geometry.Point(self._x[0], self._x[1])", "def test_point_parse(logger):\n raw_bytes = b''\n point_data_fragment = [\n (struct.pack(lines.X.fmt, 12.341), 12.341),\n (struct.pack(lines.Y.fmt, 107.301), 107.301),\n (struct.pack(lines.Pressure.fmt, 0.351), 0.351),\n (struct.pack(lines.RotX.fmt, 0.03), 0.03),\n (struct.pack(lines.RotY.fmt, 0.216), 0.216),\n ]\n for data in point_data_fragment:\n raw_bytes += data[0]\n\n # Set up the generator with the raw bytes:\n position = recover(raw_bytes)\n data = next(position)\n assert data == ''\n\n result = lines.Point.load(position)\n assert round(result.x, 3) == 12.341\n assert round(result.y, 3) == 107.301\n assert round(result.pressure, 3) == 0.351\n assert round(result.rot_x, 3) == 0.03\n assert round(result.rot_y, 3) == 0.216", "def extract_coordinates(x):\n if not pd.isna(x):\n x = ast.literal_eval(x)\n bbox = Polygon(x['bounding_box']['coordinates'][0])\n centroid = bbox.centroid.coords\n return Point(centroid)\n else:\n return np.nan", "def compute_start_end_points(linestrings):\n starts = []\n stops = []\n for ls in linestrings:\n pt = Point(ls.coords[0])\n starts.append(round(CONUS[\"poly\"].exterior.project(pt), 2))\n pt = Point(ls.coords[-1])\n stops.append(round(CONUS[\"poly\"].exterior.project(pt), 2))\n return starts, stops", "def _parse_point_source(element):\n ID, name, tect_reg = _get_id_name_tect_reg(element)\n\n lon, lat = _get_point_source_location(element)\n\n mfd = _get_mfd(element)\n\n return PointSourceNRML04(lon, lat, mfd)", "def make_line_points(y1, y2, line):\n if line is None:\n return None\n\n slope, intercept = line\n\n # make sure everything is integer as cv2.line requires it\n x1 = int((y1 - intercept) / slope)\n x2 = int((y2 - intercept) / slope)\n y1 = int(y1)\n y2 = int(y2)\n\n return ((x1, y1), (x2, y2))", "def parse(text):\n parts = [int(part) for part in text.strip().split(',')]\n point = Point(*parts)\n actual = \"{},{},{},{}\".format(point.x, point.y, point.z, point.t)\n assert actual == text, diff(actual, text)\n return point", "def shortest_line_to_point(point_a, point_b, point_c): # where a and b are on spin axis, c is the point spinning round\n axis_vect = np.subtract(point_a, point_b)\n axis_mag = magnitude(point_a, point_b)\n unit_axis = np.divide(axis_vect, axis_mag) # unit of pp\n # pp' constants - p\n\n # pp dot u\n t = np.sum(np.dot(unit_axis, unit_axis))\n c = np.sum(np.dot(np.subtract(point_b, point_c), unit_axis))\n p = -c / t\n project_point_on_axis_add = (np.multiply(unit_axis, p))\n project_point_on_axis = project_point_on_axis_add + point_b\n distance = magnitude(point_c, project_point_on_axis)\n return distance, project_point_on_axis", "def parse_point(line):\n return json.loads(line)", "def getPoint(self):\n return Point(*self.position)", "def _nearest_point_on_line(begin, end, point):\n b2e = _vec_sub(end, begin)\n b2p = _vec_sub(point, begin)\n nom = _vec_dot(b2p, b2e)\n denom = _vec_dot(b2e, b2e)\n if denom == 0.0:\n return begin\n u = nom / denom\n if u <= 0.0:\n return begin\n elif u >= 1.0:\n return end\n else:\n return _vec_add(begin, _vec_scale(b2e, u))", "def point_to_lng_lat(point_geometry):\n\n # cast as str\n point = str(point_geometry)\n\n # parse\n point = point.split('(')[-1]\n point = point.replace(')', '')\n\n # split lat/lng\n point = point.strip()\n lng_lat = point.split(' ')\n if(len(lng_lat) != 2):\n raise Exception('Input point is invalid')\n\n # parse\n lng, lat = lng_lat\n lng = lng.strip()\n lat = lat.strip()\n lat = float(lat)\n lng = float(lng)\n\n return [lng, lat]", "def project_point_to_object(point, geometry):\n nearest_point = None\n min_dist = float(\"inf\")\n \n if isinstance(geometry, Polygon):\n for seg_start, seg_end in pairs(list(geometry.exterior.coords)):\n line_start = Point(seg_start)\n line_end = Point(seg_end)\n \n intersection_point = project_point_to_line(point, line_start, line_end)\n cur_dist = point.distance(intersection_point)\n \n if cur_dist < min_dist:\n min_dist = cur_dist\n nearest_point = intersection_point\n \n elif isinstance(geometry, LineString):\n for seg_start, seg_end in pairs(list(geometry.coords)):\n line_start = Point(seg_start)\n line_end = Point(seg_end)\n \n intersection_point = project_point_to_line(point, line_start, line_end)\n cur_dist = point.distance(intersection_point)\n \n if cur_dist < min_dist:\n min_dist = cur_dist\n nearest_point = intersection_point\n else:\n raise NotImplementedError(\"project_point_to_object not implemented for\"+\n \" geometry type '\" + geometry.type + \"'.\")\n return nearest_point" ]
[ "0.6900281", "0.6412776", "0.6308229", "0.6055818", "0.6031474", "0.5971331", "0.5921453", "0.5892484", "0.5889433", "0.58782244", "0.5713872", "0.5669867", "0.565784", "0.5622244", "0.56021214", "0.5563299", "0.55588895", "0.5468982", "0.5464776", "0.54572684", "0.5446547", "0.54155797", "0.5411374", "0.5395566", "0.53859216", "0.5377552", "0.5372346", "0.5363435", "0.53500175", "0.5348407" ]
0.66960007
1
Return the last point of a `LINESTRING` geometry as a `POINT`. Return `NULL` if the input parameter is not a `LINESTRING` Returns PointValue End point
def end_point(self) -> PointValue: return ops.GeoEndPoint(self).to_expr()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_point_from_linestring(geom_row, X=0, behaviour='last'):\n\n lat = None\n lng = None\n try:\n X = round(X)\n except Exception as e:\n raise TypeError(\"Please enter a number for the index of the point within the linestring (X)\")\n\n if behaviour in ['last', 'ignore']:\n pass\n else:\n behaviour = 'last'\n\n if type(geom_row) == shapely.geometry.multilinestring.MultiLineString:\n total_linestrings = len(geom_row)\n lengths = {}\n total_len = 0\n for line in range(total_linestrings):\n len_line = len(geom_row[line].xy[0])\n lengths[line] = len_line\n total_len += len_line\n if X > total_len and behaviour == 'ignore':\n return lng, lat\n elif X > total_len and behaviour == 'last' or X == -1:\n lat = geom_row[-1].xy[1][-1]\n lng = geom_row[-1].xy[0][-1]\n else:\n total = 0\n for key, val in lengths.items():\n # find the location of X within the dictionary by looking if its in a given key\n total += val\n if total >= X:\n ind_key = key\n dict_ind = (val - (total - X)) - 1 # minus 1 as Python has a base-0 index\n break\n lat = geom_row[ind_key].xy[1][dict_ind]\n lng = geom_row[ind_key].xy[0][dict_ind]\n\n elif type(geom_row) == shapely.geometry.linestring.LineString:\n len_line = len(geom_row.xy)\n lng = geom_row.xy[0][X]\n lat = geom_row.xy[1][X]\n\n return lng, lat", "def __getPointXY(self, raw_string):\n try:\n # print 'input:',str\n pointRE = re.compile('^\\((\\d*, *\\d*)\\)$')\n x, y = pointRE.search(raw_string.strip()).groups()[0].split(',')\n # print 'x: %s, y: %s' % (x,y)\n return self.__validatePoint((int(x), int(y.strip())))\n except AttributeError:\n raise ValueError('Failed to get point coordinates.')", "def get_line_end_pts(line_segment, y1, y2):\n if line_segment is None:\n return None\n\n slope, intercept = line_segment\n\n x1 = int((y1 - intercept) / slope)\n x2 = int((y2 - intercept) / slope)\n y1 = int(y1)\n y2 = int(y2)\n\n return x1, y1, x2, y2", "def line_locate_point(self, right: PointValue) -> ir.FloatingValue:\n return ops.GeoLineLocatePoint(self, right).to_expr()", "def get_point(self, str_val, x, y):\n ret_val, x.value, y.value = self._get_point(str_val.encode(), x.value, y.value)\n return ret_val", "def end_of_line():\r\n set_point(point().end_of_line())", "def get_initial_point(self):\r\n if isinstance(self.pieces[0], LineSegment):\r\n return self.pieces[0].start", "def __getPointXYs(self, raw_string):\n try:\n pointsRE = re.compile('^\\((\\d*\\D*, *\\D*\\d*)\\)\\D*\\((\\d*\\D*, *\\D*\\d*)\\)$')\n points = pointsRE.search(raw_string.strip()).groups()\n startPoint = (int(points[0].split(',')[0].strip()), int(points[0].split(',')[1].strip()))\n endPoint = (int(points[1].split(',')[0].strip()), int(points[1].split(',')[1].strip()))\n return self.__validatePoint(startPoint), self.__validatePoint(endPoint)\n except AttributeError:\n traceback.print_exc()\n raise ValueError('Failed to get point coordinates.')", "def GetPointForLabel(points):\n # TODO: find the last point at a minute boundary\n return points[-1]", "def getEnd(self):\n return _libsbml.LineSegment_getEnd(self)", "def get_latest(self):\n if len(self.points) == 0:\n return None\n return self.points[-1]", "def get_end_point_marker(self) -> PositionMarker:\n assert self.pos_marker\n return self.pos_marker.end_point_marker()", "def line(points):\n return LineString(points)", "def line_substring(\n self, start: ir.FloatingValue, end: ir.FloatingValue\n ) -> ir.LineStringValue:\n return ops.GeoLineSubstring(self, start, end).to_expr()", "def start_point(self) -> PointValue:\n return ops.GeoStartPoint(self).to_expr()", "def get_line(self, str_val, min_x, min_y, max_x, max_y):\n ret_val, min_x.value, min_y.value, max_x.value, max_y.value = self._get_line(str_val.encode(), min_x.value, min_y.value, max_x.value, max_y.value)\n return ret_val", "def parse_point(self, text_line):\n record_type = self.substr(text_line, sps21point['RECORD_ID'][0], sps21point['RECORD_ID'][1]).strip()\n if record_type not in (SRC_DATA_RECORD, RCV_DATA_RECORD):\n return\n self.set_definition(sps21point)\n return self.parse(text_line)", "def point(surface, string):\n match = re.match('(.*?) (.*?)(?: |$)', string)\n if match:\n x, y = match.group(1, 2)\n string = string[match.end():]\n return (size(surface, x, 'x'), size(surface, y, 'y'), string)\n else:\n raise PointError", "def get_line_end(self):\n return self._line_end", "def _nearest_point_on_line(begin, end, point):\n b2e = _vec_sub(end, begin)\n b2p = _vec_sub(point, begin)\n nom = _vec_dot(b2p, b2e)\n denom = _vec_dot(b2e, b2e)\n if denom == 0.0:\n return begin\n u = nom / denom\n if u <= 0.0:\n return begin\n elif u >= 1.0:\n return end\n else:\n return _vec_add(begin, _vec_scale(b2e, u))", "def LineMaxDistanceTo(line, point_or_line):\n line = rhutil.coerceline(line, True)\n test = rhutil.coerceline(point_or_line)\n if test is None: test = rhutil.coerce3dpoint(point_or_line, True)\n return line.MaximumDistanceTo(test)", "def _get_point_source_location(element):\n pos = element.find('%s/%s/%s' %\n (NRML04_POINT_GEOMETRY, gml.GML_POINT, gml.GML_POS))\n pos = pos.text.split()\n\n return float(pos[0]), float(pos[1])", "def compute_start_end_points(linestrings):\n starts = []\n stops = []\n for ls in linestrings:\n pt = Point(ls.coords[0])\n starts.append(round(CONUS[\"poly\"].exterior.project(pt), 2))\n pt = Point(ls.coords[-1])\n stops.append(round(CONUS[\"poly\"].exterior.project(pt), 2))\n return starts, stops", "def end_point(self) -> Vec3:\n v = list(self.vertices([self.dxf.end_angle]))\n return v[0]", "def _afterpoint(string):\n if _isnumber(string) or _isnumber_with_thousands_separator(string):\n if _isint(string):\n return -1\n else:\n pos = string.rfind(\".\")\n pos = string.lower().rfind(\"e\") if pos < 0 else pos\n if pos >= 0:\n return len(string) - pos - 1\n else:\n return -1 # no point\n else:\n return -1 # not a number", "def get_endline(self):\n return self.get_attribute(\"endline\")", "def _distance_to_line(begin, end, point):\n return _vec_distance(point, _nearest_point_on_line(begin, end, point))", "def get_linestring(value):\n \n line = value['line']\n coords = [(x['x'], x['y']) for x in line]\n return geojson.Feature(\n geometry=geojson.LineString(coords),\n properties=value\n )", "def rl_get_point() -> int: # pragma: no cover\n if rl_type == RlType.GNU:\n return ctypes.c_int.in_dll(readline_lib, \"rl_point\").value\n\n elif rl_type == RlType.PYREADLINE:\n return int(readline.rl.mode.l_buffer.point)\n\n else:\n return 0", "def point(self):\n return shapely.geometry.Point(self._x[0], self._x[1])" ]
[ "0.6676908", "0.5809005", "0.5783823", "0.56184757", "0.55974036", "0.55868995", "0.55476713", "0.55083495", "0.54330933", "0.5421323", "0.5405748", "0.5357882", "0.5356936", "0.5310009", "0.53001106", "0.5296828", "0.52810085", "0.5279905", "0.5247025", "0.52342033", "0.52333224", "0.5171406", "0.51680666", "0.5161024", "0.5148548", "0.5137402", "0.5129273", "0.51195246", "0.50881946", "0.5049726" ]
0.6394568
1
Return the number of points in a geometry. Works for all geometries. Returns IntegerValue Number of points
def n_points(self) -> ir.IntegerValue: return ops.GeoNPoints(self).to_expr()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def GetNumberOfPoints(self):\n return self.GetNumberOfElements(ArrayAssociation.POINT)", "def nr_points(self):\n return len(self.x)", "def get_num_points(self):\n dimensions = self.data.shape\n return dimensions[0]", "def Points_Counting(self):\n return len(self.__traectory_list)", "def n_points(self):\n return self.points.shape[0]", "def num_quadrature_points(self) -> int:", "def get_vertices_count(self) -> int:\n # TODO: verify the following claim:\n raise NotImplementedError", "def return_num_vertices(self):\n return self.__size", "def get_points_number(self):\n ncontour = self.get_contours_number\n npoints = []\n for i in range(0, ncontour):\n npoints.append(len(self.x[i]))\n return npoints", "def numCoords(self):\n return self.nCoords", "def n_points(self):\n\n if self.data_reduced:\n return len(self.data_reduced[0])\n else:\n return 0", "def __len__(self):\n return len(self.geometries)", "def __len__(self):\n return self.num_points", "def num_points(self, f=None):\n if f is not None:\n return f(self.contexts.shape[0])\n return self.contexts.shape[0]", "def pointsize(self):\n\treturn self.m_pointsize", "def setNumberOfPoints(self):\n return self.numberOfPoints()", "def num_vertices(self):\n return len(self.vertices)", "def num_vertices(self):\n return len(self.vertices)", "def numpoints(self):\n return len(self.pars) + 1 # so dof is 1", "def count(self, qid):\n\n bbox = (\n self.to_frame()\n .query(f\"id == '{qid}'\")\n .geometry.bounds.values.flatten()\n .tolist()\n )\n\n # Get points that intersect the quadrant\n point_int = list(self.sindex.intersection(bbox))\n\n return len(point_int) if point_int else 0", "def num_vertices(self):\n return len(self)", "def feature_len(self):\n return len(self.coord)", "def __len__(self):\n return len(self._points)", "def n_integral_points(self):\n if self.is_empty():\n return tuple()\n box_min, box_max = self.bounding_box()\n from sage.geometry.integral_points import rectangular_box_points\n return rectangular_box_points(box_min, box_max, self, count_only=True)", "def npoints(self):\n return _property_op(arctern.ST_NPoints, self)", "def nspatials(self):\n return int(len(self)/2)", "def get_position_count(self):\n return self.positions.count()", "def get_num_features(self):\r\n \r\n return len(self[0]['x'])", "def __len__(self):\n return len(self.points)", "def __len__(self):\n return len(self.points)" ]
[ "0.7617105", "0.7232622", "0.69771457", "0.6758496", "0.67430544", "0.6689235", "0.6671096", "0.664159", "0.6603003", "0.6600424", "0.6575029", "0.6537295", "0.65371937", "0.65340865", "0.65138453", "0.65028435", "0.6462112", "0.6462112", "0.6436137", "0.64342684", "0.6429228", "0.63812524", "0.6364493", "0.6335977", "0.6329052", "0.6323973", "0.63000035", "0.6272063", "0.6269861", "0.6269861" ]
0.75139165
1
Return the spatial reference identifier for the ST_Geometry. Returns IntegerValue SRID
def srid(self) -> ir.IntegerValue: return ops.GeoSRID(self).to_expr()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def SRID():\r\n return SurveyPointMixin._SRID", "def srs(self):\n if HAS_GDAL:\n if hasattr(self, '_srs'):\n # Returning a clone of the cached SpatialReference object.\n return self._srs.clone()\n else:\n # Attempting to cache a SpatialReference object.\n\n # Trying to get from WKT first.\n try:\n self._srs = SpatialReference(self.wkt)\n return self.srs\n except Exception, msg:\n pass\n \n raise Exception('Could not get OSR SpatialReference from WKT: %s\\nError:\\n%s' % (self.wkt, msg))\n else:\n raise Exception('GDAL is not installed.')", "def get_srid_info(srid):\n # SRID=-1 is a common convention for indicating the geometry has no\n # spatial reference information associated with it. Thus, we will\n # return all None values without raising an exception.\n if srid == -1: return None, None, None\n\n # Getting the spatial reference WKT associated with the SRID from the\n # `spatial_ref_sys` (or equivalent) spatial database table. This query\n # cannot be executed using the ORM because this information is needed\n # when the ORM cannot be used (e.g., during the initialization of \n # `GeometryField`).\n from django.db import connection\n cur = connection.cursor()\n qn = connection.ops.quote_name\n stmt = 'SELECT %(table)s.%(wkt_col)s FROM %(table)s WHERE (%(table)s.%(srid_col)s = %(srid)s)'\n stmt = stmt % {'table' : qn(SpatialRefSys._meta.db_table),\n 'wkt_col' : qn(SpatialRefSys.wkt_col()),\n 'srid_col' : qn('srid'),\n 'srid' : srid,\n }\n cur.execute(stmt)\n \n # Fetching the WKT from the cursor; if the query failed raise an Exception.\n fetched = cur.fetchone()\n if not fetched:\n raise ValueError('Failed to find spatial reference entry in \"%s\" corresponding to SRID=%s.' % \n (SpatialRefSys._meta.db_table, srid))\n srs_wkt = fetched[0]\n\n # Getting metadata associated with the spatial reference system identifier.\n # Specifically, getting the unit information and spheroid information \n # (both required for distance queries).\n unit, unit_name = SpatialRefSys.get_units(srs_wkt)\n spheroid = SpatialRefSys.get_spheroid(srs_wkt)\n return unit, unit_name, spheroid", "def _sr(self, sr):\r\n if isinstance(sr, _types.SpatialReference):\r\n return sr\r\n elif isinstance(sr, dict):\r\n return _types.SpatialReference(sr)\r\n elif isinstance(sr, integer_types):\r\n return _types.SpatialReference({'wkid' : sr})\r\n elif isinstance(sr, string_types):\r\n return _types.SpatialReference({'wkt' : sr})\r\n elif hasattr(sr, 'factoryCode'):\r\n return _types.SpatialReference({'wkid' : sr.factoryCode})\r\n elif hasattr(sr, 'exportToString'):\r\n return _types.SpatialReference({'wkt' : sr.exportToString()})\r\n elif not sr is None:\r\n raise ValueError(\"sr (spatial reference) must be a _types.SpatialReference object\")\r\n else:\r\n return None", "def srid(self):\n crs = self.crs\n if crs is not None:\n srid = crss.parseEPSGCode(crs,\n (crss.fromURL, crss.fromURN, crss.fromShortCode)\n )\n if srid is None and not crss.is_image_crs(crs):\n raise InvalidSubsettingCrsException(\n \"Could not parse EPSG code from URI '%s'\" % crs\n )\n return srid\n return None", "def geometry_type(self) -> ir.StringValue:\n return ops.GeoGeometryType(self).to_expr()", "def shape_id(self):\n return self._shape_id", "def getId(self):\n return _libsbml.SimpleSpeciesReference_getId(self)", "def sosid(self):\r\n return self.word2idx.get(SOS, 0)", "def geometry_type(number):\n try:\n return GDAL_GEOMETRY_TYPES[number]\n except KeyError:\n return", "def geometry_n(self, n: int | ir.IntegerValue) -> GeoSpatialValue:\n return ops.GeoGeometryN(self, n).to_expr()", "def set_srid(self, srid: ir.IntegerValue) -> GeoSpatialValue:\n return ops.GeoSetSRID(self, srid=srid).to_expr()", "def getSpeciesReferenceId(self):\n return _libsbml.SpeciesReferenceGlyph_getSpeciesReferenceId(self)", "def identifier(self):\n return location_id(self.__dict__)", "def region_id(self) -> str:\n return self._region_id", "def get_geometry_type(self):\n return self.geometry_type", "def geom_type(self):\n return _property_op(arctern.ST_GeometryType, self)", "def get_geometry_type(self):\n return self._geometry_type", "def region_id(self):\n return self._region_id", "def getIdRef(self):\n return _libsbml.SBaseRef_getIdRef(self)", "def getReferenceId(self):\n return _libsbml.ReferenceGlyph_getReferenceId(self)", "def getId(self):\n return _libsbml.SBase_getId(self)", "def getReferenceId(self):\n return _libsbml.GeneralGlyph_getReferenceId(self)", "def getId(self):\n return _libsbml.SpeciesFeature_getId(self)", "def entity_id(self):\n return 'sensor.{}_{}'.format(self._name, self.var_id)", "def get_spheroid(cls, wkt, string=True):\n if HAS_GDAL:\n srs = SpatialReference(wkt)\n sphere_params = srs.ellipsoid\n sphere_name = srs['spheroid']\n else:\n m = cls.spheroid_regex.match(wkt)\n if m: \n sphere_params = (float(m.group('major')), float(m.group('flattening')))\n sphere_name = m.group('name')\n else: \n return None\n \n if not string: \n return sphere_name, sphere_params\n else:\n # `string` parameter used to place in format acceptable by PostGIS\n if len(sphere_params) == 3:\n radius, flattening = sphere_params[0], sphere_params[2]\n else:\n radius, flattening = sphere_params\n return 'SPHEROID[\"%s\",%s,%s]' % (sphere_name, radius, flattening)", "def _epsg(self):\n info = self._info['coordinateSystem']['wkt'].rsplit('\"EPSG\",', 1)[-1]\n return int(re.findall(r\"\\d+\", info)[0])", "def grouping_crs(self):\n return self._get_srid_name(\n self._engine.execute(select([FOOTPRINT_SRID_EXPRESSION])).scalar()\n )", "def getId(self):\n return _libsbml.SpeciesFeatureType_getId(self)", "def getId(self):\n return _libsbml.Point_getId(self)" ]
[ "0.686706", "0.65671414", "0.62904024", "0.6243877", "0.6228892", "0.5941888", "0.58783215", "0.5868545", "0.58193594", "0.57678574", "0.57584834", "0.57102376", "0.5627976", "0.55917823", "0.55889064", "0.55837923", "0.5579493", "0.5547355", "0.55289334", "0.55194235", "0.55071855", "0.5467041", "0.545534", "0.54292804", "0.54121715", "0.54108286", "0.54046035", "0.53703094", "0.53473985", "0.53424793" ]
0.7563923
0
Returns the centroid of the geometry. Returns PointValue The centroid
def centroid(self) -> PointValue: return ops.GeoCentroid(self).to_expr()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def centroid(self):\n return _property_geo(arctern.ST_Centroid, self)", "def centroid(self) -> Point[Scalar]:\n return self._context.multipoint_centroid(self)", "def centroid(self): # -> BaseGeometry:\n ...", "def centroid(self) -> Point:\n points = self.normalized_array\n centroids = [np.average(points[[0, i, i + 1], :-1], axis=0) for i in range(1, points.shape[0] - 1)]\n weights = [det(self._normalized_projection()[[0, i, i + 1]]) / 2 for i in range(1, points.shape[0] - 1)]\n return Point(*np.average(centroids, weights=weights, axis=0))", "def getCentroid(self):\n if len(self.points) == 0:\n # None\n return None\n elif len(self.points) == 1:\n # Same point\n return self.points[0]\n elif len(self.points) == 2:\n # Middle of a segment\n return Segment(*self.points).middle\n elif len(self.points) == 3:\n # Intersection point of 2 medians\n return Point.average(self.points)\n else:\n # Geometric decomposition to compute centroids (wikipedia)\n n = len(self.points) # n is the number of points\n # There are n-2 forms\n forms = [Form([self.points[0]] + self.points[i:i + 2]) for i in range(1, n - 1)]\n # So n-2 centroids and areas, except if some of the points are one upon another, no area is null\n centroids = [form.center for form in forms]\n areas = [form.area for form in forms]\n # we compute the average centroid weighted by the areas\n weighted_centroid = Point.sum([a * c for (c, a) in zip(centroids, areas)])\n centroid = weighted_centroid / sum(areas)\n return centroid", "def getCentroid(self):\r\n return self._centroid", "def getCentroid(self) -> Vec3:\n return self.centroid()", "def centroid(self):\n x, y = self.coordinates\n A = 0.5 * sum(x[i]*y[i+1] - x[i+1]*y[i] for i in range(-1, len(self)-1))\n cx = sum((x[i] + x[i+1]) * (x[i]*y[i+1] - x[i+1]*y[i])\n for i in range(-1, len(self)-1)) / (6*A)\n cy = sum((y[i] + y[i+1]) * (x[i]*y[i+1] - x[i+1]*y[i])\n for i in range(-1, len(self)-1)) / (6*A)\n return Point((cx, cy), properties=self.properties, crs=self.crs)", "def getCentroid(self):\n centroid = 0.0\n sumMagnitude = 0.0\n\n for i in range(0,self.nUniquePoints):\n freq,magnitude = self.fDomain[i]\n\n centroid += freq*magnitude\n sumMagnitude += magnitude\n \n centroid /= sumMagnitude\n return centroid", "def centroid(self):\n return self.contours_to_matrix().mean(axis=0)", "def calc_centroid(self):\n num = 0\n centroid = numpy.zeros(3, float)\n for atm in self:\n if atm.position is not None:\n centroid += atm.position\n num += 1\n return centroid / num", "def GetCentroid(self, p_float=..., p_float=..., p_float=...):\n ...", "def calc_centroid(self):\n sumX = 0.0\n sumY = 0.0\n dis = 0.0\n for p in self.points:\n sumX += p.x\n sumY += p.y\n d = p.distance(self.centroid)\n if dis < d: dis = d\n # radius is the longest distance within points\n self.radius = dis + 0.1\n size = len(self.points)\n if size:\n return Point(x=float(sumX)/size, y=float(sumY)/size)\n else:\n return self.centroid", "def calcCentroid(self):\n size = len(self.vectors)\n # zip all features together\n zipped = zip(*self.vectors)\n # Calculate the mean for each feature/column\n centroid = [math.fsum(column)/size for column in zipped]\n \n return centroid", "def Centroid(self):\n return Vector(self.centroid)", "def coordinates(self):\n # TODO: Add the feature where coordinates come from multiple sources.\n # Consider whether or not you'd want to output the categorical\n # variable indicating the source of the coordinate data or\n # make the user place coordinates a different property entirely.\n try:\n bounding_box = array(\n self.status.place\n [\"bounding_box\"]\n [\"coordinates\"]\n ).squeeze()\n centroid = bounding_box.mean(axis=0)\n return centroid\n except AttributeError:\n return zeros(2)", "def getCenter(self):\n return Point.average(self.points)", "def center(self):\n\n ca_atoms = self.ca_atoms\n ca_atom_vectors = ca_atoms[\"ca.atom\"].to_list()\n ca_atom_vectors = [i for i in ca_atom_vectors if i is not None]\n centroid = self.center_of_mass(ca_atom_vectors, geometric=False)\n centroid = Vector(centroid)\n\n return centroid", "def centroid(self) -> Point:\n # if the hydroline is defined, use the centroid of the hydroline\n if isinstance(self.geometry, Polyline):\n pt = Geometry({\n 'x': np.mean([self.putin.geometry.x, self.takeout.geometry.x]),\n 'y': np.mean([self.putin.geometry.y, self.takeout.geometry.y]),\n 'spatialReference': self.putin.geometry.spatial_reference\n })\n\n # if both accesses are defined, use the mean of the accesses\n elif isinstance(self.putin, ReachPoint) and isinstance(self.takeout, ReachPoint):\n\n # create a point geometry using the average coordinates\n pt = Geometry({\n 'x': np.mean([self.putin.geometry.x, self.takeout.geometry.x]),\n 'y': np.mean([self.putin.geometry.y, self.takeout.geometry.y]),\n 'spatialReference': self.putin.geometry.spatial_reference\n })\n\n # if only the putin is defined, use that\n elif isinstance(self.putin, ReachPoint):\n pt = self.putin.geometry\n\n # and if on the takeout is defined, likely the person digitizing was taking too many hits from the bong\n elif isinstance(self.takeout, ReachPoint):\n pt = self.takeout.geometry\n\n else:\n pt = None\n\n return pt", "def find_centroid_cell(self):\n\n x_min, y_min = self.find_min()\n x_max, y_max = self.find_max()\n x_centroid = int((x_max+x_min)/2)\n y_centroid = int((y_max+y_min)/2)\n centroide = x_centroid, y_centroid\n return centroide", "def centroid(self, coords):\r\n return np.mean(coords, axis=0)", "def element_centroid(self, element):\n return centroid_points(self.nodes_xyz(nodes=self.elements[element].nodes))", "def findCentroid(geom, preferredEpsg):\n (projTr, llTr) = makeTransformations(4326, preferredEpsg)\n \n geomProj = copyGeom(geom)\n geomProj.Transform(projTr)\n geomCentroid = geomProj.Centroid()\n geomCentroid.Transform(llTr)\n \n centroidDict = eval(geomCentroid.ExportToJson())\n centroidXY = centroidDict['coordinates']\n return centroidXY", "def center(self):\n if not hasattr(self, '_center'):\n self._center = np.unique(self.points, axis=0).mean(axis=0)\n return self._center", "def get_center(self):\n\n x = np.array(self.x)\n y = np.array(self.y)\n return np.mean(x), np.mean(y)", "def estimate_centroid(self):\r\n\t\tstrain = self.strain_distribution_compr(self.max_pure_compresive_strain,\\\r\n\t\t\tself.max_pure_compresive_strain)\r\n\t\tself.geometric_centrod = (self.depth/2) \r\n\t\tself.plastic_centroid = (self.depth/2)+\\\r\n\t\t\t(self.sectional_moment(strain, self.depth/2)/\\\r\n\t\t\tself.sectional_force(strain))", "def __CalculateCentroid(self, contour):\r\n moments = cv2.moments(contour)\r\n\r\n centroid = (-1, -1)\r\n if moments[\"m00\"] != 0:\r\n centroid = (int(round(moments[\"m10\"] / moments[\"m00\"])),\r\n int(round(moments[\"m01\"] / moments[\"m00\"])))\r\n\r\n return centroid", "def get_element_centroids(self):\n if self.centroids is None:\n self.centroids = np.vstack((\n np.mean(self.grid['x'], axis=1),\n np.mean(self.grid['z'], axis=1)\n )).T\n\n return self.centroids", "def get_centre(self):\n # just get the centroid\n # perhaps try something like:\n # https://github.com/mapbox/polylabel/blob/master/polylabel.js\n # in the future\n coords = np.array([(n.x, n.y) for n in self.nodes])\n centre_x = coords[:, 0].mean()\n centre_y = coords[:, 1].mean()\n return centre_x, centre_y", "def as_centroid_feature(self) -> Feature:\n return Feature(geometry=self.centroid, attributes=self._get_feature_attributes())" ]
[ "0.825092", "0.81068456", "0.8062672", "0.80389404", "0.7984312", "0.79386777", "0.7813523", "0.77430385", "0.75754064", "0.7557029", "0.7553213", "0.7401819", "0.7374369", "0.7330661", "0.73298866", "0.7308585", "0.7282034", "0.72087103", "0.7091601", "0.70364404", "0.7005892", "0.6983315", "0.6961536", "0.69398195", "0.6936664", "0.6912024", "0.68955797", "0.6832304", "0.6822147", "0.68017846" ]
0.8787785
0
Transform a geometry into a new SRID.
def transform(self, srid: ir.IntegerValue) -> GeoSpatialValue: return ops.GeoTransform(self, srid).to_expr()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def warp_geometry(geom, src_crs, dst_crs):\n return shapely.geometry.shape(rasterio.warp.transform_geom(src_crs, dst_crs, shapely.geometry.mapping(geom)))", "def normalizeGeometry(geom):\n\t# Convert string GEOSGeometry object to python dict\n\tgeom = json.loads(geom)\n\n\t# Normalize longitude to range [-180, 180) using saw tooth function\n\tc = geom['coordinates'][0]\n\tgeom['coordinates'][0] = (c+180 - ( math.floor( (c+180)/360 ) )*360) - 180\n\n\t# Normalize latitude to range [-90, 90) using saw tooth function\n\tc = geom['coordinates'][1]\n\tgeom['coordinates'][1] = (c+90 - ( math.floor( (c+90)/180 ) )*180) - 90\n\n\t# Encode and return GEOSGeometry object\n\treturn GEOSGeometry(json.dumps(geom))", "def transform_geometry(geom, crs=wgs84, to_crs=wgs84):\n\n from_crs = check_crs(crs)\n to_crs = check_crs(to_crs)\n\n if isinstance(to_crs, pyproj.Proj) and isinstance(from_crs, pyproj.Proj):\n project = partial(transform_proj, from_crs, to_crs)\n elif isinstance(to_crs, Grid):\n project = partial(to_crs.transform, crs=from_crs)\n elif isinstance(from_crs, Grid):\n project = partial(from_crs.ij_to_crs, crs=to_crs)\n else:\n raise NotImplementedError()\n\n from shapely.ops import transform\n return transform(project, geom)", "def srid(self) -> ir.IntegerValue:\n return ops.GeoSRID(self).to_expr()", "def create_osr_transform(src_epsg: int, dst_epsg: int):\n src_srs = osr.SpatialReference()\n src_srs.ImportFromEPSG(src_epsg)\n dst_srs = osr.SpatialReference()\n dst_srs.ImportFromEPSG(dst_epsg)\n return osr.CoordinateTransformation(src_srs, dst_srs)", "def set_geometry(self, selection_name, geometry):", "def copyGeom(geom):\n geomJson = geom.ExportToJson()\n newGeom = ogr.CreateGeometryFromJson(geomJson)\n return newGeom", "def geometry(self, geometry):\n if not isinstance(geometry, Rhino.Geometry.Sphere):\n if isinstance(geometry, Rhino.Geometry.Brep):\n if geometry.Faces.Count != 1:\n raise ConversionError(\"Object brep cannot be converted to a sphere.\")\n face = geometry.Faces.Item[0]\n if not face.IsSphere():\n raise ConversionError(\"Object brep cannot be converted to a sphere.\")\n result, geometry = face.TryGetSphere()\n if not result:\n raise ConversionError(\"Object brep cannot be converted to a sphere.\")\n elif isinstance(geometry, Sphere):\n geometry = sphere_to_rhino(geometry)\n else:\n raise ConversionError(\"Geometry object cannot be converted to a sphere: {}\".format(geometry))\n self._geometry = geometry", "def geometry(self, objectId):\n\n objectId = GeometryReference(objectId, self)\n req = urllib2.Request(self.baseUri + 'geometry/%d' % objectId.id)\n r = urllib2.urlopen(req)\n\n data = json.load(r)\n r.close()\n return data", "def geometry():\n return Geometry()", "def transform_geometries(datasource, src_epsg, dst_epsg):\n # Part 1\n src_srs = osr.SpatialReference()\n src_srs.ImportFromEPSG(src_epsg)\n dst_srs = osr.SpatialReference()\n dst_srs.ImportFromEPSG(dst_epsg)\n transformation = osr.CoordinateTransformation(src_srs, dst_srs)\n layer = datasource.GetLayerByIndex(0)\n \n # Part 2\n geoms = []\n layer.ResetReading()\n for feature in layer:\n geom = feature.GetGeometryRef().Clone()\n geom.Transform(transformation)\n geoms.append(geom)\n return geoms", "def _prepare_with_copy(geometry):\n geometry = pygeos.apply(geometry, lambda x: x) # makes a copy\n pygeos.prepare(geometry)\n return geometry", "def to_crs(self, crs):\n if crs is None:\n raise ValueError(\"Can not transform with invalid crs\")\n if self.crs is None:\n raise ValueError(\"Can not transform geometries without crs. Set crs for this GeoSeries first.\")\n if self.crs == crs:\n return self\n return _unary_geo(arctern.ST_Transform, self, self.crs, crs, crs=crs)", "def any_geom2ogr_geom(geom, osr_sref):\n\n if isinstance(geom, (tuple, list)) and (not isinstance(geom[0], (tuple, list))) and \\\n (len(geom) == 4) and osr_sref:\n geom_ogr = geometry.bbox2polygon(geom, osr_sref)\n geom_ogr = swap_axis(geom_ogr) # ensure lon lat order\n elif isinstance(geom, (tuple, list)) and (isinstance(geom[0], (tuple, list))) and \\\n (len(geom) == 2) and osr_sref:\n edge = ogr.Geometry(ogr.wkbLinearRing)\n geom = [geom[0], (geom[0][0], geom[1][1]), geom[1], (geom[1][0], geom[0][1])]\n for point in geom:\n if len(point) == 2:\n edge.AddPoint(float(point[0]), float(point[1]))\n edge.CloseRings()\n geom_ogr = ogr.Geometry(ogr.wkbPolygon)\n geom_ogr.AddGeometry(edge)\n geom_ogr.AssignSpatialReference(osr_sref)\n geom_ogr = force_axis_mapping(geom_ogr)\n elif isinstance(geom, (tuple, list)) and isinstance(geom[0], (tuple, list)) and osr_sref:\n edge = ogr.Geometry(ogr.wkbLinearRing)\n for point in geom:\n if len(point) == 2:\n edge.AddPoint(float(point[0]), float(point[1]))\n edge.CloseRings()\n geom_ogr = ogr.Geometry(ogr.wkbPolygon)\n geom_ogr.AddGeometry(edge)\n geom_ogr.AssignSpatialReference(osr_sref)\n geom_ogr = force_axis_mapping(geom_ogr)\n elif isinstance(geom, shapely.geometry.Polygon):\n geom_ogr = ogr.CreateGeometryFromWkt(geom.wkt)\n geom_ogr.AssignSpatialReference(osr_sref)\n geom_ogr = swap_axis(geom_ogr) # ensure lon lat order\n elif isinstance(geom, ogr.Geometry):\n geom_sref = geom.GetSpatialReference()\n if geom_sref is None:\n geom.AssignSpatialReference(osr_sref)\n geom_ogr = geom\n geom_ogr = swap_axis(geom_ogr) # ensure lon lat order\n else:\n raise GeometryUnkown(geom)\n\n return geom_ogr", "def set_srid(self, srid: ir.IntegerValue) -> GeoSpatialValue:\n return ops.GeoSetSRID(self, srid=srid).to_expr()", "def handle(self, geometry, fields=None, tags=None, id=None):\n pass", "def _get_geometry(self, val):\n g = OGRGeometry(val)\n return json.loads(g.json)", "def create_osr_srs(in_srs: Union[osr.SpatialReference, int, str], tradicional=True) -> osr.SpatialReference:\n if isinstance(in_srs, osr.SpatialReference):\n srs = in_srs.Clone()\n elif isinstance(in_srs, int):\n srs = osr.SpatialReference()\n srs.ImportFromEPSG(in_srs)\n elif isinstance(in_srs, str):\n srs = osr.SpatialReference()\n srs.ImportFromWkt(in_srs)\n else:\n raise ValueError(\"Formato srs desconhecido.\")\n if int(__version__[0]) >= 3 and tradicional:\n srs.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER)\n return srs", "def convertor(geometry, method=\"wgs2gcj\"):\n if geometry['type'] == 'Point':\n coords = geometry['coordinates']\n coords[0], coords[1] = methods[method](coords[0], coords[1])\n elif geometry['type'] == 'LineString' or geometry['type'] == 'MutliPoint':\n coordinates = geometry['coordinates']\n for coords in coordinates:\n coords[0], coords[1] = methods[method](coords[0], coords[1])\n elif geometry['type'] == 'Polygon' or geometry['type'] == 'MultiLineString':\n coordinates = geometry['coordinates']\n for rings in coordinates:\n for coords in rings:\n coords[0], coords[1] = methods[method](coords[0], coords[1])\n elif geometry['type'] == 'MultiPolygon':\n coordinates = geometry['coordinates']\n for rings in coordinates:\n for lines in rings:\n for coords in lines:\n coords[0], coords[1] = methods[method](coords[0], coords[1])\n return geometry", "def to_geometry(v):\n return v / 1000", "def convert_spatial_reference_system(coordinates, source_epsg, target_epsg):\n source_spatial_reference = osr.SpatialReference()\n source_spatial_reference.ImportFromEPSG(source_epsg)\n target_spatial_reference = osr.SpatialReference()\n target_spatial_reference.ImportFromEPSG(target_epsg)\n coordinate_transformation = osr.CoordinateTransformation(source_spatial_reference, target_spatial_reference)\n\n transformed_coordinates = []\n for coordinate in coordinates:\n point = ogr.Geometry(ogr.wkbPoint)\n point.AddPoint(coordinate[0], coordinate[1])\n point.Transform(coordinate_transformation)\n transformed_coordinates.append((point.GetX(), point.GetY()))\n return transformed_coordinates", "def decode_geometry(geom: str) -> BasePolygon:\n return shape(geobuf.decode(bytes.fromhex(geom))).buffer(0)", "def get_spheroid(cls, wkt, string=True):\n if HAS_GDAL:\n srs = SpatialReference(wkt)\n sphere_params = srs.ellipsoid\n sphere_name = srs['spheroid']\n else:\n m = cls.spheroid_regex.match(wkt)\n if m: \n sphere_params = (float(m.group('major')), float(m.group('flattening')))\n sphere_name = m.group('name')\n else: \n return None\n \n if not string: \n return sphere_name, sphere_params\n else:\n # `string` parameter used to place in format acceptable by PostGIS\n if len(sphere_params) == 3:\n radius, flattening = sphere_params[0], sphere_params[2]\n else:\n radius, flattening = sphere_params\n return 'SPHEROID[\"%s\",%s,%s]' % (sphere_name, radius, flattening)", "def simplify(self, tolerance):\n return _unary_geo(arctern.ST_SimplifyPreserveTopology, self, tolerance)", "def _multigeometry(self, ogr_geometry):\n\n geo_type = ogr_geometry.GetGeometryType()\n\n if geo_type == ogr.wkbPolygon:\n return ogr.ForceToMultiPolygon(ogr_geometry)\n elif geo_type == ogr.wkbPoint:\n return ogr.ForceToMultiPoint(ogr_geometry)\n elif geo_type in [ogr.wkbLineString, ogr.wkbLinearRing]:\n return ogr.ForceToMultiLineString(ogr_geometry)\n else:\n return ogr_geometry", "def simplify(self, tolerance, preserve_topology=...): # -> BaseGeometry:\n ...", "def encode_geometry(geom: BasePolygon) -> str:\n encoded_geom = geobuf.encode(mapping(geom)).hex()\n\n # if the geometry is so complex is still goes over the limit, incrementally attempting to simplify it\n if sys.getsizeof(encoded_geom) > LAMBDA_ASYNC_PAYLOAD_LIMIT_BYTES:\n encoded_geom = geobuf.encode(\n mapping(geom.simplify(0.005, preserve_topology=False))\n ).hex()\n\n if sys.getsizeof(encoded_geom) > LAMBDA_ASYNC_PAYLOAD_LIMIT_BYTES:\n encoded_geom = geobuf.encode(\n mapping(geom.simplify(0.01, preserve_topology=False))\n ).hex()\n\n return encoded_geom", "def geometry_n(self, n: int | ir.IntegerValue) -> GeoSpatialValue:\n return ops.GeoGeometryN(self, n).to_expr()", "def __init__(self, geom_input, srs=None):\n str_instance = isinstance(geom_input, str)\n\n # If HEX, unpack input to a binary buffer.\n if str_instance and hex_regex.match(geom_input):\n geom_input = memoryview(bytes.fromhex(geom_input))\n str_instance = False\n\n # Constructing the geometry,\n if str_instance:\n wkt_m = wkt_regex.match(geom_input)\n json_m = json_regex.match(geom_input)\n if wkt_m:\n if wkt_m[\"srid\"]:\n # If there's EWKT, set the SRS w/value of the SRID.\n srs = int(wkt_m[\"srid\"])\n if wkt_m[\"type\"].upper() == \"LINEARRING\":\n # OGR_G_CreateFromWkt doesn't work with LINEARRING WKT.\n # See https://trac.osgeo.org/gdal/ticket/1992.\n g = capi.create_geom(OGRGeomType(wkt_m[\"type\"]).num)\n capi.import_wkt(g, byref(c_char_p(wkt_m[\"wkt\"].encode())))\n else:\n g = capi.from_wkt(\n byref(c_char_p(wkt_m[\"wkt\"].encode())), None, byref(c_void_p())\n )\n elif json_m:\n g = self._from_json(geom_input.encode())\n else:\n # Seeing if the input is a valid short-hand string\n # (e.g., 'Point', 'POLYGON').\n OGRGeomType(geom_input)\n g = capi.create_geom(OGRGeomType(geom_input).num)\n elif isinstance(geom_input, memoryview):\n # WKB was passed in\n g = self._from_wkb(geom_input)\n elif isinstance(geom_input, OGRGeomType):\n # OGRGeomType was passed in, an empty geometry will be created.\n g = capi.create_geom(geom_input.num)\n elif isinstance(geom_input, self.ptr_type):\n # OGR pointer (c_void_p) was the input.\n g = geom_input\n else:\n raise GDALException(\n \"Invalid input type for OGR Geometry construction: %s\"\n % type(geom_input)\n )\n\n # Now checking the Geometry pointer before finishing initialization\n # by setting the pointer for the object.\n if not g:\n raise GDALException(\n \"Cannot create OGR Geometry from input: %s\" % geom_input\n )\n self.ptr = g\n\n # Assigning the SpatialReference object to the geometry, if valid.\n if srs:\n self.srs = srs\n\n # Setting the class depending upon the OGR Geometry Type\n self.__class__ = GEO_CLASSES[self.geom_type.num]", "def reproject_vector( path, epsg_from=None, epsg_to=None):\n\n if not epsg_to: raise Exception(\"please, specify the output EPSG codes\")\n\n inDataSet = None\n outDataSet = None\n inFeature = None\n outFeature = None\n outLayer = None\n\n try:\n\n driver = ogr.GetDriverByName('ESRI Shapefile')\n inDataSet = driver.Open(path, 0) # 0 means read-only\n\n # define input SpatialReference\n if not epsg_from:\n layer = inDataSet.GetLayer()\n inSpatialRef = layer.GetSpatialRef()\n else:\n inSpatialRef = osr.SpatialReference()\n inSpatialRef.ImportFromEPSG(epsg_from)\n\n # define output SpatialReference\n outSpatialRef = osr.SpatialReference()\n outSpatialRef.ImportFromEPSG(epsg_to)\n\n # create the CoordinateTransformation\n coordTrans = osr.CoordinateTransformation(inSpatialRef, outSpatialRef)\n\n # get the first input layer and the geometry type\n inLayer = inDataSet.GetLayer()\n geotype = inLayer.GetGeomType()\n lname = inLayer.GetName()\n\n drv = ogr.GetDriverByName(\"ESRI Shapefile\")\n outDataSet = drv.CreateDataSource(\"/vsimem/memory.shp\")\n\n outLayer = outDataSet.CreateLayer(lname, srs=outSpatialRef, geom_type=geotype)\n\n # add fields\n inLayerDefn = inLayer.GetLayerDefn()\n\n for i in range(0, inLayerDefn.GetFieldCount()):\n fieldDefn = inLayerDefn.GetFieldDefn(i)\n outLayer.CreateField(fieldDefn)\n\n # get the output layer\"s feature definition\n outLayerDefn = outLayer.GetLayerDefn()\n\n counter = 1\n\n # loop through the input features\n inFeature = inLayer.GetNextFeature()\n while inFeature:\n # get the input geometry\n geom = inFeature.GetGeometryRef()\n # reproject the geometry\n geom.Transform(coordTrans)\n # create a new feature\n outFeature = ogr.Feature(outLayerDefn)\n # set the geometry and attribute\n outFeature.SetGeometry(geom)\n for i in range(0, outLayerDefn.GetFieldCount()):\n outFeature.SetField(outLayerDefn.GetFieldDefn(i).GetNameRef(), inFeature.GetField(i))\n # add the feature to the shapefile\n outLayer.CreateFeature(outFeature)\n\n # destroy the features and get the next input feature\n if outFeature: outFeature = None\n inFeature = inLayer.GetNextFeature()\n\n counter += 1\n #print(counter)\n\n return outDataSet\n\n except RuntimeError as err:\n raise err\n except Exception as e:\n raise e\n\n finally:\n if inDataSet: outDataSet == None # give back control to C++\n if outDataSet: outDataSet == None\n if outLayer: outLayer == None\n if inFeature: inFeature == None\n if outFeature: outFeature = None" ]
[ "0.6257238", "0.60261506", "0.5838761", "0.5772709", "0.5628056", "0.55562454", "0.5554736", "0.5516171", "0.5461421", "0.5434449", "0.5407168", "0.5386802", "0.53795725", "0.5365964", "0.53004307", "0.5273985", "0.5265612", "0.52504003", "0.51611197", "0.5148603", "0.51126087", "0.5112559", "0.5074921", "0.5074886", "0.50730693", "0.50669616", "0.50629914", "0.50607705", "0.5035641", "0.5031923" ]
0.60514665
1
Clip a substring from a LineString. Returns a linestring that is a substring of the input one, starting and ending at the given fractions of the total 2d length. The second and third arguments are floating point values between zero and one. This only works with linestrings.
def line_substring( self, start: ir.FloatingValue, end: ir.FloatingValue ) -> ir.LineStringValue: return ops.GeoLineSubstring(self, start, end).to_expr()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def clip(st,length):\n if len(st) > length:\n return st[:length] + \"...\"\n else:\n return st", "def linestring_segment(linestring: LineString, dist: float, threshold_length: float):\n coord_1 = linestring.interpolate(dist).coords[0]\n coord_2 = linestring.interpolate(dist + threshold_length).coords[0]\n return coord_1, coord_2", "def cut_linestring(line, distance):\n\n\tpd = 0\n\n\tdistance = distance % line.length\n\n\tif distance == 0.0:\n\t\treturn [line, []]\n\n\n\tcoords = list(line.coords)\n\tfor i in range(1, len(coords)):\n\t\t\n\t\tpd = LineString(coords[:i+1]).length\n\n\t\tif pd == distance:\n\t\t\treturn [\n\t\t\t\tLineString(coords[:i+1]),\n\t\t\t\tLineString(coords[i:])]\n\n\t\tif pd > distance:\n\t\t\tcp = line.interpolate(distance)\n\t\t\treturn [\n\t\t\t\tLineString(coords[:i] + [(cp.x, cp.y)]),\n\t\t\t\tLineString([(cp.x, cp.y)] + coords[i:])]", "def segmentize_linestring(\n linestring: LineString, threshold_length: float\n) -> List[Tuple[Tuple[float, float], Tuple[float, float]]]:\n assert isinstance(linestring, LineString)\n segments: List[Tuple[Tuple[float, float], Tuple[float, float]]] = []\n for dist in np.arange(0.0, linestring.length, threshold_length):\n segments.append(linestring_segment(linestring, dist, threshold_length))\n\n return segments", "def segment_within_buffer(\n linestring: LineString,\n multilinestring: MultiLineString,\n snap_threshold: float,\n snap_threshold_error_multiplier: float,\n overlap_detection_multiplier: float,\n) -> bool:\n # Test for a single segment overlap\n if linestring.overlaps(multilinestring):\n return True\n\n buffered_linestring = safe_buffer(\n linestring, snap_threshold * snap_threshold_error_multiplier\n )\n assert isinstance(linestring, LineString)\n assert isinstance(buffered_linestring, Polygon)\n assert isinstance(multilinestring, MultiLineString)\n assert buffered_linestring.area > 0\n min_x, min_y, max_x, max_y = geom_bounds(buffered_linestring)\n\n # Crop MultiLineString near to the buffered_linestring\n cropped_mls = buffered_linestring.intersection(multilinestring)\n\n # Check for cases with no chance of stacking\n if cropped_mls.is_empty or (\n isinstance(cropped_mls, LineString)\n and cropped_mls.length < snap_threshold * overlap_detection_multiplier\n ):\n return False\n\n assert isinstance(cropped_mls, (MultiLineString, LineString))\n\n all_segments: List[Tuple[Tuple[float, float], Tuple[float, float]]] = []\n ls: LineString\n\n # Create list of LineStrings from within the crop\n mls_geoms: List[LineString] = (\n list(cropped_mls.geoms)\n if isinstance(cropped_mls, MultiLineString)\n else [cropped_mls]\n )\n\n # Iterate over list of LineStrings\n for ls in mls_geoms:\n all_segments.extend(\n segmentize_linestring(ls, snap_threshold * overlap_detection_multiplier)\n )\n for start, end in all_segments:\n if within_bounds(\n x=start[0], y=start[1], min_x=min_x, min_y=min_y, max_x=max_x, max_y=max_y\n ) and within_bounds(\n x=end[0],\n y=end[1],\n min_x=min_x,\n min_y=min_y,\n max_x=max_x,\n max_y=max_y\n # *end, min_x, min_y, max_x, max_y\n ):\n ls = LineString([start, end])\n if ls.length > snap_threshold * overlap_detection_multiplier and ls.within(\n buffered_linestring\n ):\n return True\n return False", "def ltrim1 (l,proportiontocut,tail='right'):\r\n if tail == 'right':\r\n lowercut = 0\r\n uppercut = len(l) - int(proportiontocut*len(l))\r\n elif tail == 'left':\r\n lowercut = int(proportiontocut*len(l))\r\n uppercut = len(l)\r\n return l[lowercut:uppercut]", "def delete_substr(self, y, x1, x2):\n self.lines[y] = self.lines[y][ : x1] + self.lines[y][x2 : ]", "def atrim1 (a,proportiontocut,tail='right'):\r\n if string.lower(tail) == 'right':\r\n lowercut = 0\r\n uppercut = len(a) - int(proportiontocut*len(a))\r\n elif string.lower(tail) == 'left':\r\n lowercut = int(proportiontocut*len(a))\r\n uppercut = len(a)\r\n return a[lowercut:uppercut]", "def clip_rect(string):\n match = RECT.search(normalize(string or ''))\n return match.group(1).split(' ') if match else []", "def normalise_slice(s):\n\n s = s - s.min()\n s = s / s.max()\n return s", "def truncate(string):", "def test_line_substring():\n for _x in range(100):\n l_str = random_str(50, 100)\n line = Line(l_str, random_str(10, 20), randint(1, 10000))\n # Try a single charater\n c_idx = randint(0, len(l_str)-1)\n sub_line = line[c_idx]\n assert sub_line == l_str[c_idx]\n assert isinstance(sub_line, Line)\n assert sub_line.file == line.file\n assert sub_line.number == line.number\n # Try a range\n s_idx = randint(0, (len(l_str) // 2) - 1)\n e_idx = randint(len(l_str) // 2, len(l_str) - 1)\n sub_line = line[s_idx:e_idx]\n assert sub_line == l_str[s_idx:e_idx]\n assert sub_line.file == line.file\n assert sub_line.number == line.number", "def cut_text(value, length): # Only one argument.\n return value[0:length]", "def subsettter(clipsegments, lengthtype):\n if lengthtype == 'twothirds':\n clipsegments.remove('AR8')\n clipsegments.remove('AF13')\n elif lengthtype == 'abouthalf':\n clipsegments.remove('AR8')\n clipsegments.remove('AF13')\n clipsegments.remove('AF7')\n return clipsegments", "def cut(self, piece):\n self.substrates = self.substrates.difference(piece)", "def ltrimboth (l,proportiontocut):\r\n lowercut = int(proportiontocut*len(l))\r\n uppercut = len(l) - lowercut\r\n return l[lowercut:uppercut]", "def slice(str):\n\tnew_string = ''\n\tfor i in reversed(str): #reading in str reversed\n \t\tif i != '/': #stopping building once we hit '/'\n \t\t\tnew_string += i\n \t\telse:\n \t\t\tnew_string = new_string[::-1] #re-reversing\n \t\t\tif new_string.endswith('.fastq.gz'):\n \t\t\t\tnew_string = new_string[:-9]\n \t\t\tif new_string.endswith('.fastq'): \n \t\t\t\tnew_string = new_string[:-6] #cutting out .fastq\n \t\t\treturn new_string", "def get_substr(self, y, x1, x2):\n return self.lines[y][x1 : x2]", "def part2(input_string):\n length = len(input_string[0])\n for i in range(length):\n modified_input = [line[:i] + line[i+1:] for line in input_string]\n for line in modified_input:\n if modified_input.count(line) == 2:\n return line", "def slice_by_pos(val: str, start: SourcePos, end: SourcePos) -> str:\n if \"\\n\" in val:\n lines = val.split(\"\\n\")\n if end.row > start.row:\n top = lines[start.row][start.col :]\n filling = lines[start.row + 1 : end.row]\n bottom = lines[end.row][: end.col]\n return \"\\n\".join(line for line in chain([top], filling, [bottom]))\n else:\n return lines[start.row][start.col : end.col]\n else:\n return val[start.col : end.col]", "def trim(self, start, end):", "def _get_clipping_object(strlist):\n if len(strlist) < 4:\n raise SyntaxError('Insufficient strings to constitute a clipping')\n # zeroth line is title and author\n title,author = _get_book(strlist[0])\n # next line is the clip metadata\n clip_type,page,location_range,datetime = _get_clip_meta(strlist[1])\n # clip metadata is followed by a line that seems to be always blank and is not part of clip text. \n # To be safe, if it does happen to be non-empty, preserve it in the clip_text.\n text_start = 3 \n if strlist[2].strip() != '':\n text_start = 2 # ensure this non-blank line becomes part of the clip_text\n clip_text = ''.join(strlist[text_start::])\n\n return Clipping(title, author, clip_type, page, location_range, datetime, clip_text)", "def _ProcessSubstring(self, substring):\n if not substring:\n return\n stripped_substring = StripStartParens(substring)\n stripped_remaining = StripStartParens(self.remaining_string)\n if not stripped_remaining.startswith(stripped_substring):\n raise BadlySpecifiedTemplateError(\n 'string \"{}\" should be in string \"{}\"'\n .format(stripped_substring, stripped_remaining))\n self.remaining_string = self.remaining_string.split(\n stripped_substring, 1)[1]", "def cut_line_at_point(line, point):\n\n distance = line.project(point)\n if distance <= 0.0 or distance >= line.length:\n return [LineString(line)]\n\n coords = list(line.coords)\n for i, p in enumerate(coords):\n pd = line.project(Point(p))\n if pd == distance:\n return [LineString(coords[: i + 1]), LineString(coords[i:])]\n if pd > distance:\n cp = line.interpolate(distance)\n return [\n LineString(coords[:i] + [(cp.x, cp.y)]),\n LineString([(cp.x, cp.y)] + coords[i:]),\n ]", "def splitLine(string, overflow=70):\n w=[]\n n=len(string)\n for i in range(0,n,overflow):\n w.append(string[i:i+overflow])\n return w", "def _slice(self, slc):\n char_indexes = self._char_indexes\n slice_indexes = char_indexes[slc]\n # If it's the end of the string, we need to append final color codes.\n if not slice_indexes:\n # if we find no characters it may be because we are just outside\n # of the interval, using an open-ended slice. We must replay all\n # of the escape characters until/after this point.\n if char_indexes:\n if slc.start is None and slc.stop is None:\n # a [:] slice of only escape characters\n return ANSIString(self._raw_string[slc])\n if slc.start is None:\n # this is a [:x] slice\n return ANSIString(self._raw_string[: char_indexes[0]])\n if slc.stop is None:\n # a [x:] slice\n return ANSIString(self._raw_string[char_indexes[-1] + 1 :])\n return ANSIString(\"\")\n try:\n string = self[slc.start or 0]._raw_string\n except IndexError:\n return ANSIString(\"\")\n last_mark = slice_indexes[0]\n # Check between the slice intervals for escape sequences.\n i = None\n for i in slice_indexes[1:]:\n for index in range(last_mark, i):\n if index in self._code_indexes:\n string += self._raw_string[index]\n last_mark = i\n try:\n string += self._raw_string[i]\n except IndexError:\n # raw_string not long enough\n pass\n if i is not None:\n append_tail = self._get_interleving(char_indexes.index(i) + 1)\n else:\n append_tail = \"\"\n return ANSIString(string + append_tail, decoded=True)", "def gicp(line):\n import pyperclip\n import shlex\n args = shlex.split(line)\n if len(args) == 0:\n num_lines_prior = 1\n else:\n num_lines_prior = int(args[1])\n pyperclip.copy(In[-1-num_lines_prior])", "def normalize_slice(s):\n start, stop, step = s.start, s.stop, s.step\n if start is None:\n start = 0\n if step is None:\n step = 1\n if start < 0 or step < 0 or stop is not None and stop < 0:\n raise NotImplementedError()\n return slice(start, stop, step)", "def cut_in_lines(self,line):\n limit_screen = 30 #caracteres que tiene de ancho la pantalla\n length = 0 #para comparar leineas\n res = ''\n\n for linea in line.split('\\n'):\n if length + len(linea) <= limit_screen:\n new_linea = linea\n length += len(new_linea)\n else:\n if len(linea) > limit_screen:\n linea = self.cut_in_words(linea)\n new_linea = '\\n' + linea\n length = len(new_linea) - 2 #-2 para no tener en cuenta el \\n\n res += new_linea\n return res", "def smooth_linestring(linestring, smooth_sigma):\n smooth_x = np.array(filters.gaussian_filter1d(\n linestring.xy[0],\n smooth_sigma)\n )\n smooth_y = np.array(filters.gaussian_filter1d(\n linestring.xy[1],\n smooth_sigma)\n )\n smoothed_coords = np.hstack((smooth_x, smooth_y))\n smoothed_coords = zip(smooth_x, smooth_y)\n linestring_smoothed = LineString(smoothed_coords)\n return linestring_smoothed" ]
[ "0.57219964", "0.55839944", "0.55459213", "0.5359945", "0.5296645", "0.52570975", "0.5250844", "0.51391494", "0.5098545", "0.5098532", "0.50885856", "0.5063027", "0.49769998", "0.49678054", "0.49621388", "0.48748156", "0.4868886", "0.48605305", "0.48212624", "0.4783714", "0.4764682", "0.47520906", "0.47469324", "0.47434607", "0.47057313", "0.4701463", "0.46740434", "0.46487898", "0.46448997", "0.46446294" ]
0.5694107
1
Aggregate a set of geometries into a union. This corresponds to the aggregate version of the PostGIS ST_Union. We give it a different name (following the corresponding method in GeoPandas) to avoid name conflicts with the nonaggregate version. Returns GeoSpatialScalar Union of geometries
def unary_union(self) -> ir.GeoSpatialScalar: return ops.GeoUnaryUnion(self).to_expr().name("union")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def unary_union(self):\n return GeoSeries(arctern.ST_Union_Aggr(self))", "def union(self, other):\n return self._geomgen(capi.geom_union, other)", "def union(feature):\n\n mp = MultiPolygon([Polygon([tuple(z) for z in y]) for y in feature.coord])\n union = ops.unary_union(mp)\n \n coords = [] \n if union.geom_type == 'Polygon':\n coords.append(np.array(union.exterior.coords))\n if union.geom_type == 'MultiPolygon':\n for x in union.geoms:\n coords.append(np.array(x.exterior.coords))\n\n new_feature = Feature()\n new_feature.coord = coords\n return new_feature", "def mergeGeometries(self):\n self.geometry = reduce(lambda p1,p2 : p1.union(p2) ,map(lambda tax : tax.biomeGeometry,self.taxonomies))\n return self.geometry", "def Union(*args, **kwargs):\n return _gdi_.Region_Union(*args, **kwargs)", "def union(self, right: GeoSpatialValue) -> GeoSpatialValue:\n return ops.GeoUnion(self, right).to_expr()", "def aggreg(iterable, aggregfuncs, geomfunc=None):\n def lookup_geomfunc(agg):\n # handle aliases\n if agg == \"dissolve\":\n agg = \"union\"\n elif agg == \"unique\":\n agg = \"difference\"\n\n # detect\n if agg == \"intersection\":\n def _func(fs):\n gs = (f.get_shapely() for f in fs if f.geometry)\n cur = next(gs)\n for g in gs:\n if not g.is_empty:\n cur = cur.intersection(g)\n return cur.__geo_interface__\n \n elif agg == \"difference\":\n def _func(fs):\n gs = (f.get_shapely() for f in fs if f.geometry)\n cur = next(gs)\n for g in gs:\n if not g.is_empty:\n cur = cur.difference(g)\n return cur.__geo_interface__\n\n elif agg == \"union\":\n def _func(fs):\n gs = [f.get_shapely() for f in fs if f.geometry]\n if len(gs) > 1:\n print(gs)\n from shapely.ops import cascaded_union\n return cascaded_union(gs).__geo_interface__\n elif len(gs) == 1:\n return gs[0].__geo_interface__\n\n elif hasattr(agg, \"__call__\"):\n # agg is not a string but a custom function\n return agg\n\n else:\n raise Exception(\"geomfunc must be a callable function or a valid set geometry string name\")\n\n return _func\n \n def lookup_aggfunc(agg):\n # handle aliases\n if agg in (\"average\",\"avg\"):\n agg = \"mean\"\n\n # detect\n if agg == \"count\": return len\n elif agg == \"sum\": return sum\n elif agg == \"max\": return max\n elif agg == \"min\": return min\n elif agg == \"first\": return lambda seq: seq.__getitem__(0)\n elif agg == \"last\": return lambda seq: seq.__getitem__(-1)\n elif agg == \"majority\": return lambda seq: max(itertools.groupby(sorted(seq)), key=lambda gidgroup: len(list(gidgroup[1])))[0]\n elif agg == \"minority\": return lambda seq: min(itertools.groupby(sorted(seq)), key=lambda gidgroup: len(list(gidgroup[1])))[0]\n elif agg == \"mean\": return lambda seq: sum(seq)/float(len(seq))\n elif isinstance(agg, basestring) and agg.endswith(\"concat\"):\n delim = agg[:-6]\n return lambda seq: delim.join((str(v) for v in seq))\n elif hasattr(agg, \"__call__\"):\n # agg is not a string but a function\n return agg\n else:\n raise Exception(\"aggfunc must be a callable function or a valid statistics string name\")\n\n def check_valfunc(valfunc):\n if hasattr(valfunc,\"__call__\"):\n pass\n elif isinstance(valfunc,basestring):\n hashindex = valfunc\n valfunc = lambda f: f[hashindex]\n else:\n raise Exception(\"valfunc for field '%s' must be a callable function or a string of the hash index for retrieving the value\"%name)\n return valfunc\n \n aggregfuncs = [(name,check_valfunc(valfunc),aggname,lookup_aggfunc(aggname)) for name,valfunc,aggname in aggregfuncs]\n\n def make_number(value):\n try: return float(value)\n except: return None\n\n def is_missing(val):\n return val is None or (isinstance(val, float) and math.isnan(val))\n\n iterable = list(iterable)\n row = []\n for _,valfunc,aggname,aggfunc in aggregfuncs:\n values = (valfunc(item) for item in iterable)\n\n # missing values are not considered when calculating stats\n values = [val for val in values if not is_missing(val)] \n \n if aggname in (\"sum\",\"max\",\"min\",\"mean\"):\n # only consider number values if numeric stats\n values = [make_number(value) for value in values if make_number(value) != None]\n\n if values:\n aggval = aggfunc(values)\n else:\n aggval = \"\" # or best with None\n \n row.append(aggval)\n\n if geomfunc:\n geomfunc = lookup_geomfunc(geomfunc)\n geom = geomfunc(iterable)\n return row,geom\n\n else:\n return row", "def union(self, other): # -> BaseGeometry:\n ...", "def unionFeatureCollections(*collections):\n features = []\n for collection in collections:\n if collection[\"type\"] == \"FeatureCollection\":\n collectionFeatures = collection[\"features\"]\n features.extend(collectionFeatures)\n if collection[\"type\"] == \"Feature\":\n features.append(collection)\n return geojson.FeatureCollection(features)", "def union(self, *args):\n _ub = None\n for _obj in args:\n if _ub is None:\n _ub = self.bbox(_obj)\n else:\n _b = self.bbox(_obj)\n _x = np.sort(np.array([_b[:, 0], _ub[:, 0]]), axis=None)\n _y = np.sort(np.array([_b[:, 1], _ub[:, 1]]), axis=None)\n _ub = np.array([[_x[0], _y[0]], [_x[3], _y[3]]])\n return _ub", "def union(self, querysets):\n # union() is \"New in Django 1.11.\" (docs site)\n # but buggy in 2.0, with a backport in 1.11.8 ; my ticket 29229, fixed in 1.11.12 & 2.0.4.\n # For simplicity, let's even ignore the usable 1.11.0-7 frame.\n # Ticket 29286 reintroduced a bug in 1.11.13 & 2.0.5, by considering only the annotate() case and not the extra().\n # Ticket 29694 fixed the missing extra() case, but is only effective as of 2.1.1,\n # because extra() is destined to be deprecated.\n # So the final solution here was to replace all extra() by annotate() in this app.\n if VERSION < (1, 11, 12) or (2, 0) <= VERSION < (2, 0, 4):\n result_sql, result_params = [], []\n for qs in querysets:\n sql, params = qs.query.sql_with_params()\n result_sql.append(sql)\n result_params.extend(params)\n return ' UNION '.join(result_sql), tuple(result_params)\n else:\n qs = querysets[0].union(*querysets[1:])\n return qs.query.sql_with_params()", "def union(arguments, flatten=True):\n return Component(\n \"Union\",\n arguments=arguments,\n options={\n 'flatten': flatten\n },\n constraints=None)", "def union(self, other):\n from sage.misc.misc import deprecation\n deprecation('The function union is replaced by convex_hull.', 'Sage Version 4.4.4')\n return self.convex_hull(other)", "def union(self, other):\n\n return self.intersect(other, op=np.union1d)", "def union(self, StdVectorFst other):\n cdef StdVectorFst result = self.copy()\n result.set_union(other)\n return result", "def _eval_rewrite_as_Union(self, *sets, **kwargs):\n\n dj_union = S.EmptySet\n index = 0\n for set_i in sets:\n if isinstance(set_i, Set):\n cross = ProductSet(set_i, FiniteSet(index))\n dj_union = Union(dj_union, cross)\n index = index + 1\n return dj_union", "def make_union(self, *args, **kwargs): # real signature unknown\n pass", "def UnionRegion(*args, **kwargs):\n return _gdi_.Region_UnionRegion(*args, **kwargs)", "def union(self, *args):\n return self.phy2abs.union(*args)", "def unions(iterable):\n return reduce(union, iterable)", "def _union_polygons(polygons, precision = 1e-4, max_points = 4000):\n polygons = _merge_floating_point_errors(polygons, tol = precision/1000)\n unioned = gdspy.boolean(polygons, [], operation = 'or',\n precision = precision, max_points = max_points)\n return unioned", "def boundary_polygon_by_union(self):\n cell_geoms = [None]*self.Ncells()\n\n for i in self.valid_cell_iter():\n xy = self.nodes['x'][self.cell_to_nodes(i)]\n cell_geoms[i] = geometry.Polygon(xy)\n return ops.cascaded_union(cell_geoms)", "def find_union_mask(self, seg_tags, union_mask='MASK_UNION'):\n wcs = self.images['MUSE_WHITE'].wcs\n yc, xc = wcs.sky2pix((self.DEC, self.RA), unit=u.deg)[0]\n maps = {}\n for tag in seg_tags:\n if tag[:4] == 'SEG_':\n maps[tag[4:]] = self.images[tag].data.data\n else:\n maps[tag] = self.images[tag].data.data\n\n r = findCentralDetection(maps, yc, xc, tolerance=3)\n self.images[union_mask] = Image(wcs=wcs, dtype=np.uint8, copy=False,\n data=union(list(r['seg'].values())))", "def test_self_union():\n gdf = GeoDataFrame(\n {\n \"geometry\": GeoSeries(\n [\n Polygon([(0, 0), (0, 2), (2, 2), (2, 0)]),\n Polygon([(1, 1), (3, 1), (3, 3), (1, 3)]),\n Polygon([(1, 1), (1, 2), (2, 2), (2, 1)]),\n ]\n ),\n \"x\": [0, 1, 2],\n \"y\": [4.0, 8.0, 1.0],\n }\n )\n\n result_one = self_union(gdf)\n expected_one = GeoDataFrame(\n {\n \"geometry\": GeoSeries(\n [\n Polygon([(0, 0), (0, 2), (1, 2), (1, 1), (2, 1), (2, 0)]),\n Polygon([(1, 2), (2, 2), (2, 1), (1, 1)]),\n Polygon([(1, 2), (2, 2), (2, 1), (1, 1)]),\n Polygon([(1, 2), (2, 2), (2, 1), (1, 1)]),\n Polygon([(2, 2), (1, 2), (1, 3), (3, 3), (3, 1), (2, 1)]),\n ],\n index=[(0,), (0, 1, 2), (0, 1, 2), (0, 1, 2), (1,)],\n ),\n \"x\": [0, 0, 1, 2, 1],\n \"y\": [4.0, 4.0, 8.0, 1.0, 8.0],\n }\n )\n assert_geodataframe_equal(result_one, expected_one)\n\n result_two = self_union(gdf, ratios=[\"y\"])\n expected_two = GeoDataFrame(\n {\n \"geometry\": GeoSeries(\n [\n Polygon([(0, 0), (0, 2), (1, 2), (1, 1), (2, 1), (2, 0)]),\n Polygon([(1, 2), (2, 2), (2, 1), (1, 1)]),\n Polygon([(1, 2), (2, 2), (2, 1), (1, 1)]),\n Polygon([(1, 2), (2, 2), (2, 1), (1, 1)]),\n Polygon([(2, 2), (1, 2), (1, 3), (3, 3), (3, 1), (2, 1)]),\n ],\n index=[(0,), (0, 1, 2), (0, 1, 2), (0, 1, 2), (1,)],\n ),\n \"x\": [0, 0, 1, 2, 1],\n \"y\": [3.0, 1.0, 2.0, 1.0, 6.0],\n }\n )\n assert_geodataframe_equal(result_two, expected_two)", "def union(A, B, *C):\n return setutils(\"union\", A, B, *C)", "def _union(cls, s1, s2):\n return s1.union(s2)", "def union(D, by_layer = False, precision = 1e-4, join_first = True,\n max_points = 4000, layer = 0):\n U = Device()\n\n if by_layer == True:\n all_polygons = D.get_polygons(by_spec = True)\n for layer, polygons in all_polygons.items():\n unioned_polygons = _union_polygons(polygons, precision = precision,\n max_points = max_points)\n U.add_polygon(unioned_polygons, layer = layer)\n else:\n all_polygons = D.get_polygons(by_spec = False)\n unioned_polygons = _union_polygons(all_polygons,\n precision = precision,\n max_points = max_points)\n U.add_polygon(unioned_polygons, layer = layer)\n return U", "def union(self, *lists):\n if self.is_a(set):\n return _(self._.union(*lists))\n return _(_union(self._, *lists))", "def union(self, rng_set: Union[Rangelike, Iterable[Rangelike]]) -> 'RangeSet':\n # convert to RangeSet\n rng_set = RangeSet._to_rangeset(rng_set)\n # simply merge lists\n return RangeSet(self._ranges + rng_set._ranges)", "def union_all(self, query):\n return self.union(query, True)" ]
[ "0.7116073", "0.680238", "0.6797811", "0.65559477", "0.6409281", "0.63266087", "0.62877876", "0.6236714", "0.60303354", "0.59023213", "0.587915", "0.5870682", "0.57423186", "0.5735903", "0.5733056", "0.57152426", "0.56981426", "0.567216", "0.5625306", "0.5607658", "0.55898374", "0.5586022", "0.5572578", "0.55486643", "0.55043495", "0.54946005", "0.54539496", "0.5452623", "0.5438249", "0.5425478" ]
0.74620515
0
we use Augmentor lib a pipeline of augment no params input
def augment(): print("augmenting......") path1 = '../trainp1/' path2 = '../trainp2/' # path of pair1 and pair2 similar to img & mask task for segmentation p = Augmentor.Pipeline(path1) # pair1 p.ground_truth(path2) # pair2 p.rotate(probability=0.3, max_left_rotation=3, max_right_rotation=3) p.flip_left_right(probability=0.2) p.random_distortion(0.5, 2, 2, 2) p.zoom(probability=0.5, min_factor=0.95, max_factor=1.05) p.process()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_augmenter():\n\n augmenter = iaa.Sequential([\n iaa.Fliplr(0.5), # horizontal flips\n iaa.Crop(percent=(0, 0.1)), # random crops\n # Small gaussian blur with random sigma between 0 and 0.5.\n # But we only blur about 50% of all images.\n iaa.Sometimes(\n 0.5,\n iaa.GaussianBlur(sigma=(0, 0.5))\n ),\n # Strengthen or weaken the contrast in each image.\n iaa.LinearContrast((0.75, 1.5)),\n # Add gaussian noise.\n # For 50% of all images, we sample the noise once per pixel.\n # For the other 50% of all images, we sample the noise per pixel AND\n # channel. This can change the color (not only brightness) of the\n # pixels.\n iaa.AdditiveGaussianNoise(loc=0, scale=(0.0, 0.05*255), per_channel=0.5),\n # Make some images brighter and some darker.\n # In 20% of all cases, we sample the multiplier once per channel,\n # which can end up changing the color of the images.\n iaa.Multiply((0.8, 1.2), per_channel=0.2),\n # Apply affine transformations to each image.\n # Scale/zoom them, translate/move them, rotate them and shear them.\n iaa.Affine(\n scale={\"x\": (0.80, 1.2), \"y\": (0.80, 1.2)},\n translate_percent={\"x\": (-0.2, 0.2), \"y\": (-0.2, 0.2)},\n rotate=(-25, 25),\n shear=(-6, 6)\n )\n], random_order=True) # apply augmenters in random order\n\n return augmenter", "def augment(self, *args, **kwargs):\n pass", "def augment(self, image):\n pass", "def get_augmentation_sequence():\n # Macro to apply something with 50% chance\n sometimes = lambda aug: iaa.Sometimes(0.5, aug) # 50%\n rarely = lambda aug: iaa.Sometimes(0.1, aug) # 10%\n\n # Augmentation applied to every image\n # Augmentors sampled one value per channel\n aug_sequence = iaa.Sequential(\n [\n # apply the following augmenters to most images\n iaa.Fliplr(0.5), # horizontally flip 50% of all images\n iaa.Flipud(0.5), # vertically flip 50% of all images\n\n # crop images by -0.25% to 0.25% of their height/width\n # positive values crop the image, negative pad\n sometimes(iaa.CropAndPad(\n percent=(-0.25, 0.25),\n pad_mode=['constant', 'edge'], # pad with constant value of the edge value\n pad_cval=(0, 0) # if mode is constant, use a cval between 0 and 0 to ensure mask background is preserved\n )),\n sometimes(iaa.Affine(\n scale={\"x\": (0.8, 1.2), \"y\": (0.8, 1.2)}, # scale images to 80-120% of their size, individually per axis\n translate_percent={\"x\": (-0.2, 0.2), \"y\": (-0.2, 0.2)}, # translate by -20 to +20 percent (per axis)\n rotate=(-45, 45), # rotate by -45 to +45 degrees\n shear=(-16, 16), # shear by -16 to +16 degrees\n order=[0, 1], # use nearest neighbour or bilinear interpolation (fast)\n cval=(0, 0), # if mode is constant, use a cval between 0 and 0 to ensure mask background is preserved\n mode='constant' # ia.ALL # use any of scikit-image's warping modes (see 2nd image from the top for examples)\n )),\n # rarely(iaa.Superpixels(p_replace=(0, 1.0), n_segments=(20, 200))),\n iaa.GaussianBlur((0, 3.0)),\n iaa.Add((-10, 10), per_channel=0.7), # change brightness of images (by -10 to 10 of original value)\n iaa.AddToHueAndSaturation((-20, 20)),\n # sometimes(iaa.PerspectiveTransform(scale=(0.01, 0.1)))\n ],\n random_order=True\n )\n\n return aug_sequence", "def _augment_pipeline_cfg(self):", "def set_augmentor():\n config = {'blur': {'values': ('gaussian', 0.7, 1.0), 'prob': 0.3},\n 'brightness': {'values': (0.6, 1.0), 'prob': 0.1},\n 'brightness1': {'values': (1.0, 1.5), 'prob': 0.1},\n 'flip': {'values': ('hor',), 'prob': 0.5},\n 'grid_mask': {'values': (0, 0.2, 0, 0.2, 0.01, 0.1, 0.01, 0.1, 0.1, 0.2, 0.1, 0.2), 'prob': 0.4},\n 'illumination': {'values': ('blob_negative', 0.1, 0.2, 100, 150), 'prob': 0.2},\n 'noise': {'values': (2, 10), 'use_gray_noise': True, 'prob': 1},\n 'rotate': {'values': (-45, 45), 'prob': 0.4},\n 'translate': {'values': ('RANDOM', -0.2, 0.2), 'prob': 0.2, 'use_replication': True},\n 'zoom': {'values': (0.5, 1.5), 'prob': 0.9, 'use_replication': True}}\n\n augmentor = Augmentor(config, no_repetition=True)\n\n return augmentor", "def __call__(self, video_sequence):\n for aug_op in self.augmentations:\n video_sequence = aug_op(video_sequence)\n return video_sequence", "def augment(input):\n\treturn np.insert(input, 0, 1, axis = 1)\n\n\n\n\t#np.set_printoptions(suppress=True) ", "def _augment(img):\n return flip(img, axis=2)", "def _augment(img):\r\n return flip(img, axis=2)", "def xray_augmentationFactory(augmentation, height, width):\n downsample = (260,260)\n\n if augmentation == 'autoaugment':\n transform = [\n transforms.RandomCrop((height, width)),\n transforms.RandomHorizontalFlip(),\n AutoAugment(),\n Cutout()\n ]\n elif augmentation == 'original-cifar':\n transform = [\n transforms.Resize(downsample),\n transforms.RandomCrop(size=(height, width)),\n transforms.RandomHorizontalFlip(),\n ]\n elif augmentation == 'noaugment':\n transform = [\n transforms.Resize(downsample),\n transforms.CenterCrop((height, width)),\n ]\n\n elif augmentation == 'glico':\n NotImplemented(f\"augment parameter {augmentation} not implemented\")\n else: \n NotImplemented(f\"augment parameter {augmentation} not implemented\")\n\n normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],\n std=[0.229, 0.224, 0.225])\n\n #normalize = transforms.Normalize(mean=[0.5888, 0.5888, 0.5889],\n #std=[0.1882, 0.1882, 0.1882])\n\n return transforms.Compose(transform + [transforms.ToTensor(), normalize])", "def sample_custom_augmentations_constructor(num_features: int, window_radius: int) -> albumentations.Compose:\n max_kernel = int(round(0.1 * window_radius))\n max_hole_size = int(round(0.1 * window_radius))\n additional_targets = [ADDITIONAL_TARGETS_KEY.format(idx) for idx in range(1, num_features)]\n\n return albumentations.Compose(\n [\n # The augmentations assume an image is RGB between 0 and 1\n albumentations.ToFloat(max_value=255, always_apply=True, p=1.0),\n # These augmentations should be order independent, toss 'em up front\n albumentations.Flip(p=0.5),\n albumentations.Transpose(p=0.5),\n albumentations.Rotate(limit=90, p=0.5),\n # Fogging as it's quite similar to top-down cloud effects, seems reasonable to apply up front\n albumentations.RandomFog(fog_coef_lower=0.2, fog_coef_upper=0.8, alpha_coef=0.08, p=0.5),\n # Color modifications\n albumentations.OneOf(\n [\n albumentations.RandomBrightnessContrast(\n brightness_limit=0.2, contrast_limit=0.6, brightness_by_max=True, p=1.0\n ),\n albumentations.RGBShift(r_shift_limit=0.2, g_shift_limit=0.2, b_shift_limit=0.2, p=1.0),\n ],\n p=0.25,\n ),\n # Distortions\n albumentations.OneOf(\n [\n albumentations.ElasticTransform(alpha=1, sigma=50, alpha_affine=50, p=1.0),\n albumentations.GridDistortion(num_steps=5, distort_limit=0.4, p=1.0),\n albumentations.OpticalDistortion(distort_limit=0.1, shift_limit=0.1, p=1.0),\n ],\n p=0.25,\n ),\n albumentations.GaussianBlur(blur_limit=max_kernel, p=0.25),\n # Noise\n albumentations.OneOf(\n [\n albumentations.CoarseDropout(\n max_holes=8, max_height=max_hole_size, max_width=max_hole_size, fill_value=np.nan, p=1.0\n ),\n albumentations.GaussNoise(var_limit=0.05, mean=0, p=1.0),\n ],\n p=0.25,\n ),\n # Scaling, adding last so that other augmentations are applied at a consistent resolution\n albumentations.RandomScale(scale_limit=0.05, p=0.25),\n # Augmentations may not return images of the same size, images can be both smaller and larger than expected, so\n # these two augmentations are added to keep things consistent\n albumentations.PadIfNeeded(2 * window_radius, 2 * window_radius, always_apply=True, p=1.0),\n albumentations.CenterCrop(2 * window_radius, 2 * window_radius, always_apply=True, p=1.0),\n # Return the data to its original scale\n albumentations.FromFloat(max_value=255, always_apply=True, p=1.0),\n ],\n p=1.0,\n additional_targets={target: \"image\" for target in additional_targets},\n )", "def data_augmentation(image, aug):\n if (aug == \"random_crop\") and (random.randint(0,1)):\n image = random_crop(image) \n if (aug == \"random_rotation\") and (random.randint(0,1)): \n image = random_rotation(image) \n if (aug == \"random_flip\") and (random.randint(0,1)): \n image = random_flip(image)\n if (aug == \"affine_transformation\") and (random.randint(0,1)): \n image = affine_transformation(image)\n if (aug == \"random_gaussian_noise\") and (random.randint(0,1)): \n image = random_gaussian_noise(image)\n if (aug == \"random_erasing\") and (random.randint(0,1)): \n image = random_erasing(image) \n return image", "def build_augmentation_pipeline(aug_list):\n AUGMENTATIONS = {\n 'leadlag': LeadLag(),\n 'penoff': PenOff(),\n 'addtime': AddTime(),\n 'cumsum': CumulativeSum(),\n 'basepoint': Basepoint()\n }\n\n pipeline = Pipeline([\n (tfm_str, AUGMENTATIONS[tfm_str]) for tfm_str in aug_list\n ])\n\n return pipeline", "def shift_augmentation():\n shift = np.random.randint(-200, 201, size=2)\n return lambda image: shift_with_extension(image, shift)", "def preprocess(self):", "def add_augmenter(\n self, in_stream: FilterableStream, **kwargs\n ) -> Tuple[FilterableStream, Dict]:\n raise NotImplementedError(\"Implement add_augmenter method\")", "def augmented(self, aug):\n out = getcopy(self)\n out.augment(aug)\n return out", "def run_example_augmentations():\n parser = argparse.ArgumentParser(description='Visualise example augmentations')\n parser.add_argument('--dataDir', type=str, required=True,\n help='Directory containing training data stored in the expected format. See dataset_cvppp.py')\n parser.add_argument('--outputDir', type=str, required=True,\n help='Directory to save example images to')\n parser.add_argument('--numImages', type=int, default=30,\n help='How many images to save')\n parser.add_argument('--blurImages', dest='blurImages', action='store_true')\n parser.add_argument('--dontBlurImages', dest='blurImages', action='store_false')\n parser.set_defaults(blurImages=False)\n\n args = parser.parse_args()\n\n # Create output dir\n assert not os.path.isdir(args.outputDir), \"output dir already exists\"\n os.mkdir(args.outputDir)\n\n # # Init dataset\n train_dataset = dataset_cvppp.CVPPP_Dataset()\n \n train_dataset.load_cvppp(args.dataDir, 'train')\n train_dataset.prepare()\n\n # Init config\n configuration = config_cvppp.TrainConfig()\n\n # Init augmentation\n augmentation = get_augmentation_sequence()\n\n # Generate images\n for i in range(args.numImages):\n image, meta, class_ids, bbox, mask = model.load_image_gt(train_dataset, configuration, i, augmentation=augmentation)\n\n rgb_mask = mask_to_rgb(mask)\n\n im_path = os.path.join(args.outputDir, str(i) + '_image.png')\n mask_path = os.path.join(args.outputDir, str(i) + '_mask.png')\n io.imsave(im_path, image)\n io.imsave(mask_path, rgb_mask)\n\n print(\"Saved example\", i)", "def _apply_augment(self, img: Image, name: str, level: int) -> Image:\n assert 0 <= level < self.n_level\n augment_fn, low, high = self.transforms_info[name]\n return augment_fn(img.copy(), level * (high - low) / self.n_level + low)", "def parse_function_augment(example_proto):\r\n\r\n\t# Parse through features and extract byte string\r\n\tparsed_features = tf.parse_single_example(example_proto,features ={\r\n\t\t'image': tf.FixedLenFeature([],tf.string),\r\n\t\t'joint': tf.FixedLenFeature([],tf.string),\r\n\t\t'offset': tf.FixedLenFeature([],tf.string),\r\n\t\t'handScale': tf.FixedLenFeature([],tf.string)\r\n\t\t},name='features')\r\n\r\n\t# Decode content into correct types\r\n\timage_dec = tf.decode_raw(parsed_features['image'],tf.float32)\r\n\tjoint_dec = tf.decode_raw(parsed_features['joint'],tf.float32)\r\n\toffset_dec = tf.decode_raw(parsed_features['offset'],tf.float32)\r\n\thandScale_dec = tf.decode_raw(parsed_features['handScale'],tf.float32)\r\n\r\n\t# Reshape image to 176x176\r\n\timage_reshaped = tf.reshape(image_dec,[176,176,1])\r\n\r\n\t# Crop 128x128 image around COM\r\n\timage_com_cropped = tf.image.crop_to_bounding_box(image_reshaped,24,24,128,128)\r\n\r\n\t# Data Augmentation\r\n\timage_com_cropped, joint_dec, offset_dec, handScale_dec = tf.py_func(augmentation_cv,[image_com_cropped, joint_dec, offset_dec, handScale_dec],[tf.float32, tf.float32, tf.float32, tf.float32])\r\n\timage_com_cropped = tf.reshape(image_com_cropped,[128,128,1])\r\n\r\n\t# TF IMPLEMENTATION OF DATA AUGMENTATION: MIGHT BE SLOWER WHEN TF IS NOT COMPILED FROM SOURCE\r\n\t# image_reshaped, joint_dec, offset_dec, handScale_dec = augmentation(image_reshaped, joint_dec, offset_dec, handScale_dec)\r\n\r\n\treturn image_com_cropped, joint_dec, offset_dec, handScale_dec", "def pre_processing_function(label, filename: str, augmentor: Augmentor = None):\n image = imread(filename)\n if augmentor is not None:\n image = np.round(augmentor.run(image)).astype(np.uint8)\n\n return image, label", "def __call__(self, in_data):\n # There are five data augmentation steps\n # 1. Color augmentation\n # 2. Random expansion\n # 3. Random cropping\n # 4. Resizing with random interpolation\n # 5. Random horizontal flipping\n if self.count % 10 == 0 and self.count % self.batchsize == 0 and self.count != 0:\n self.i += 1\n i = self.i % len(self.dim)\n self.output_shape = (self.dim[i], self.dim[i])\n # print(self.count, self.i, self.output_shape)\n self.count += 1\n\n img, bbox, label = in_data\n\n # 1. Color augmentation\n img = random_distort(img, brightness_delta=32,\n contrast_low=0.5, contrast_high=1.5,\n saturation_low=0.5, saturation_high=1.5,\n hue_delta=25)\n\n # Normalize. range is [0, 1]\n img /= 255.0\n\n _, H, W = img.shape\n scale = np.random.uniform(0.25, 2)\n random_expand = np.random.uniform(0.8, 1.2, 2)\n net_h, net_w = self.output_shape\n out_h = net_h * scale # random_expand[0]\n out_w = net_w * scale # random_expand[1]\n if H > W:\n out_w = out_h * (float(W) / H) * np.random.uniform(0.8, 1.2)\n elif H < W:\n out_h = out_w * (float(H) / W) * np.random.uniform(0.8, 1.2)\n\n out_h = int(out_h)\n out_w = int(out_w)\n\n img = resize_with_random_interpolation(img, (out_h, out_w))\n bbox = transforms.resize_bbox(bbox, (H, W), (out_h, out_w))\n\n if out_h < net_h and out_w < net_w:\n img, param = expand(img, out_h=net_h, out_w=net_w,\n fill=self.value, return_param=True)\n bbox = transforms.translate_bbox(\n bbox, y_offset=param['y_offset'], x_offset=param['x_offset'])\n else:\n out_h = net_h if net_h > out_h else int(out_h * 1.05)\n out_w = net_w if net_w > out_w else int(out_w * 1.05)\n img, param = expand(img, out_h=out_h, out_w=out_w,\n fill=self.value, return_param=True)\n bbox = transforms.translate_bbox(\n bbox, y_offset=param['y_offset'], x_offset=param['x_offset'])\n\n img, param = crop_with_bbox_constraints(\n img, bbox, return_param=True,\n crop_height=net_h, crop_width=net_w)\n bbox, param = transforms.crop_bbox(\n bbox, y_slice=param['y_slice'], x_slice=param['x_slice'],\n allow_outside_center=False, return_param=True)\n label = label[param['index']]\n\n\n # 5. Random horizontal flipping # OK\n img, params = transforms.random_flip(\n img, x_random=True, return_param=True)\n bbox = transforms.flip_bbox(\n bbox, self.output_shape, x_flip=params['x_flip'])\n\n # Preparation for Yolov2 network\n bbox[:, ::2] /= self.output_shape[0] # y\n bbox[:, 1::2] /= self.output_shape[1] # x\n\n num_bbox = len(bbox)\n len_max = max(num_bbox, self.max_target)\n\n gmap = create_map_anchor_gt(bbox, self.anchors, self.output_shape,\n self.downscale, self.n_boxes, len_max)\n\n out_bbox = np.zeros((len_max, 4), dtype='f')\n out_bbox[:num_bbox] = bbox[:num_bbox]\n out_label = np.zeros((len_max), dtype='i')\n out_label[:num_bbox] = label\n\n gmap = gmap[:self.max_target]\n out_bbox = out_bbox[:self.max_target]\n out_label = out_label[:self.max_target]\n num_array = min(num_bbox, self.max_target)\n\n img = np.clip(img, 0, 1)\n return img, out_bbox, out_label, gmap, np.array([num_array], dtype='i')", "def reconstruct_input_ext(self, model_in):", "def build_train_augmentor(cfg: CfgNode, keep_uncropped: bool = False, keep_non_smoothed: bool = False):\n aug_list = []\n \n names = cfg.AUGMENTOR.ADDITIONAL_TARGETS_NAME\n types = cfg.AUGMENTOR.ADDITIONAL_TARGETS_TYPE\n if names is None:\n additional_targets = None\n else:\n assert len(names) == len(types)\n additional_targets = {}\n for i in range(len(names)):\n additional_targets[names[i]] = types[i]\n\n #1. rotate\n if cfg.AUGMENTOR.ROTATE.ENABLED:\n aug_list.append(\n Rotate(rot90=cfg.AUGMENTOR.ROTATE.ROT90,\n p=cfg.AUGMENTOR.ROTATE.P,\n additional_targets=additional_targets))\n\n #2. rescale\n if cfg.AUGMENTOR.RESCALE.ENABLED:\n aug_list.append(\n Rescale(p=cfg.AUGMENTOR.RESCALE.P,\n additional_targets=additional_targets))\n\n #3. flip\n if cfg.AUGMENTOR.FLIP.ENABLED:\n aug_list.append(\n Flip(do_ztrans=cfg.AUGMENTOR.FLIP.DO_ZTRANS,\n p=cfg.AUGMENTOR.FLIP.P, \n additional_targets=additional_targets))\n\n #4. elastic\n if cfg.AUGMENTOR.ELASTIC.ENABLED:\n aug_list.append(\n Elastic(alpha=cfg.AUGMENTOR.ELASTIC.ALPHA, \n sigma=cfg.AUGMENTOR.ELASTIC.SIGMA, \n p=cfg.AUGMENTOR.ELASTIC.P,\n additional_targets=additional_targets))\n\n #5. grayscale\n if cfg.AUGMENTOR.GRAYSCALE.ENABLED:\n aug_list.append(\n Grayscale(p=cfg.AUGMENTOR.GRAYSCALE.P,\n additional_targets=additional_targets))\n\n #6. missingparts\n if cfg.AUGMENTOR.MISSINGPARTS.ENABLED:\n aug_list.append(\n MissingParts(iterations=cfg.AUGMENTOR.MISSINGPARTS.ITER,\n p=cfg.AUGMENTOR.MISSINGPARTS.P,\n additional_targets=additional_targets))\n\n #7. missingsection\n if cfg.AUGMENTOR.MISSINGSECTION.ENABLED and not cfg.DATASET.DO_2D:\n aug_list.append(\n MissingSection(\n num_sections=cfg.AUGMENTOR.MISSINGSECTION.NUM_SECTION,\n p=cfg.AUGMENTOR.MISSINGSECTION.P, \n additional_targets=additional_targets))\n\n #8. misalignment\n if cfg.AUGMENTOR.MISALIGNMENT.ENABLED and not cfg.DATASET.DO_2D:\n aug_list.append(\n MisAlignment( \n displacement=cfg.AUGMENTOR.MISALIGNMENT.DISPLACEMENT,\n rotate_ratio=cfg.AUGMENTOR.MISALIGNMENT.ROTATE_RATIO,\n p=cfg.AUGMENTOR.MISALIGNMENT.P,\n additional_targets=additional_targets))\n\n #9. motion-blur\n if cfg.AUGMENTOR.MOTIONBLUR.ENABLED:\n aug_list.append(\n MotionBlur( \n sections=cfg.AUGMENTOR.MOTIONBLUR.SECTIONS, \n kernel_size=cfg.AUGMENTOR.MOTIONBLUR.KERNEL_SIZE,\n p=cfg.AUGMENTOR.MOTIONBLUR.P,\n additional_targets=additional_targets))\n\n #10. cut-blur\n if cfg.AUGMENTOR.CUTBLUR.ENABLED:\n aug_list.append(\n CutBlur(length_ratio=cfg.AUGMENTOR.CUTBLUR.LENGTH_RATIO, \n down_ratio_min=cfg.AUGMENTOR.CUTBLUR.DOWN_RATIO_MIN,\n down_ratio_max=cfg.AUGMENTOR.CUTBLUR.DOWN_RATIO_MAX,\n downsample_z=cfg.AUGMENTOR.CUTBLUR.DOWNSAMPLE_Z,\n p=cfg.AUGMENTOR.CUTBLUR.P,\n additional_targets=additional_targets))\n\n #11. cut-noise\n if cfg.AUGMENTOR.CUTNOISE.ENABLED:\n aug_list.append(\n CutNoise(length_ratio=cfg.AUGMENTOR.CUTNOISE.LENGTH_RATIO, \n scale=cfg.AUGMENTOR.CUTNOISE.SCALE,\n p=cfg.AUGMENTOR.CUTNOISE.P, \n additional_targets=additional_targets))\n\n # compose the list of transforms\n augmentor = Compose(transforms=aug_list, \n input_size=cfg.MODEL.INPUT_SIZE, \n smooth=cfg.AUGMENTOR.SMOOTH,\n keep_uncropped=keep_uncropped, \n keep_non_smoothed=keep_non_smoothed,\n additional_targets=additional_targets)\n\n return augmentor", "def pre_process(cls, *args, **kwargs):\n pass", "def pre_process(cls, *args, **kwargs):\n pass", "def pre_process(cls, *args, **kwargs):\n pass", "def pre_process(cls, *args, **kwargs):\n pass", "def apply_augmentation_list(data, aug_list):\n pipeline = build_augmentation_pipeline(aug_list)\n\n # Transform\n data_tfmd = pipeline.fit_transform(data)\n\n return data_tfmd" ]
[ "0.7002257", "0.69078076", "0.6759206", "0.6475627", "0.6423094", "0.63598675", "0.6343083", "0.6312717", "0.6299603", "0.6285816", "0.6077885", "0.6050642", "0.6006987", "0.5946956", "0.5934204", "0.59325135", "0.5847552", "0.5737056", "0.57130325", "0.5700653", "0.56733465", "0.56687504", "0.5633425", "0.5631022", "0.5625116", "0.5608952", "0.5608952", "0.5608952", "0.5608952", "0.55739516" ]
0.700342
0
Command line interface for the ``rsyncsystembackup`` program.
def main(): # Initialize logging to the terminal and system log. coloredlogs.install(syslog=True) # Parse the command line arguments. context_opts = dict() program_opts = dict() dest_opts = dict() try: options, arguments = getopt.gnu_getopt(sys.argv[1:], 'bsrm:c:t:i:unx:fvqhVQp', [ 'backup', 'snapshot', 'rotate', 'mount=', 'crypto=', 'tunnel=', 'ionice=', 'no-sudo', 'dry-run', 'exclude=', 'force', 'disable-notifications', 'verbose', 'quiet', 'help', 'multi-fs', 'rsync-verbose', 'rsync-quiet', 'rsync-progress' ]) for option, value in options: if option in ('-b', '--backup'): enable_explicit_action(program_opts, 'backup_enabled') elif option in ('-s', '--snapshot'): enable_explicit_action(program_opts, 'snapshot_enabled') elif option in ('-r', '--rotate'): enable_explicit_action(program_opts, 'rotate_enabled') elif option in ('-m', '--mount'): program_opts['mount_point'] = value elif option in ('-c', '--crypto'): program_opts['crypto_device'] = value elif option in ('-t', '--tunnel'): ssh_user, _, value = value.rpartition('@') ssh_alias, _, port_number = value.partition(':') tunnel_opts = dict( ssh_alias=ssh_alias, ssh_user=ssh_user, # The port number of the rsync daemon. remote_port=RSYNCD_PORT, ) if port_number: # The port number of the SSH server. tunnel_opts['port'] = int(port_number) dest_opts['ssh_tunnel'] = SecureTunnel(**tunnel_opts) elif option in ('-i', '--ionice'): value = value.lower().strip() validate_ionice_class(value) program_opts['ionice'] = value elif option in ('-u', '--no-sudo'): program_opts['sudo_enabled'] = False elif option in ('-n', '--dry-run'): logger.info("Performing a dry run (because of %s option) ..", option) program_opts['dry_run'] = True elif option in ('-f', '--force'): program_opts['force'] = True elif option in ('-x', '--exclude'): program_opts.setdefault('exclude_list', []) program_opts['exclude_list'].append(value) elif option == '--multi-fs': program_opts['multi_fs'] = True elif option == '--disable-notifications': program_opts['notifications_enabled'] = False elif option in ('-V', '--rsync-verbose'): if 'rsync_verbose_count' not in program_opts: program_opts['rsync_verbose_count'] = 1 else: program_opts['rsync_verbose_count'] = program_opts['rsync_verbose_count'] + 1 elif option in ('-Q', '--rsync-quiet'): if 'rsync_quiet_count' not in program_opts: program_opts['rsync_quiet_count'] = 1 else: program_opts['rsync_quiet_count'] = program_opts['rsync_quiet_count'] + 1 elif option in ('-v', '--verbose'): coloredlogs.increase_verbosity() elif option in ('-q', '--quiet'): coloredlogs.decrease_verbosity() elif option in ('-p', '--rsync-progress'): program_opts['rsync_show_progress'] = True elif option in ('-h', '--help'): usage(__doc__) return else: raise Exception("Unhandled option! (programming error)") if len(arguments) > 2: msg = "Expected one or two positional arguments! (got %i)" raise Exception(msg % len(arguments)) if len(arguments) == 2: # Get the source from the first of two arguments. program_opts['source'] = arguments.pop(0) if arguments: # Get the destination from the second (or only) argument. dest_opts['expression'] = arguments[0] program_opts['destination'] = Destination(**dest_opts) elif not os.environ.get('RSYNC_MODULE_PATH'): # Show a usage message when no destination is given. usage(__doc__) return except Exception as e: warning("Error: %s", e) sys.exit(1) try: # Inject the source context into the program options. program_opts['source_context'] = create_context(**context_opts) # Initialize the program with the command line # options and execute the requested action(s). RsyncSystemBackup(**program_opts).execute() except Exception as e: if isinstance(e, RsyncSystemBackupError): # Special handling when the backup disk isn't available. if isinstance(e, MissingBackupDiskError): # Check if we're connected to a terminal to decide whether the # error should be propagated or silenced, the idea being that # rsync-system-backup should keep quiet when it's being run # from cron and the backup disk isn't available. if not connected_to_terminal(): logger.info("Skipping backup: %s", e) sys.exit(0) # Known problems shouldn't produce # an intimidating traceback to users. logger.error("Aborting due to error: %s", e) else: # Unhandled exceptions do get a traceback, # because it may help fix programming errors. logger.exception("Aborting due to unhandled exception!") sys.exit(1)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def main():\n # Initialize logging to the terminal and system log.\n coloredlogs.install(syslog=True)\n # Parse the command line arguments.\n context_opts = dict()\n program_opts = dict()\n dest_opts = dict()\n try:\n options, arguments = getopt.gnu_getopt(sys.argv[1:], 'bsrm:c:t:i:unx:fvqh', [\n 'backup', 'snapshot', 'rotate', 'mount=', 'crypto=', 'tunnel=',\n 'ionice=', 'no-sudo', 'dry-run', 'multi-fs', 'exclude=', 'force',\n 'disable-notifications', 'verbose', 'quiet', 'help',\n ])\n for option, value in options:\n if option in ('-b', '--backup'):\n enable_explicit_action(program_opts, 'backup_enabled')\n elif option in ('-s', '--snapshot'):\n enable_explicit_action(program_opts, 'snapshot_enabled')\n elif option in ('-r', '--rotate'):\n enable_explicit_action(program_opts, 'rotate_enabled')\n elif option in ('-m', '--mount'):\n program_opts['mount_point'] = value\n elif option in ('-c', '--crypto'):\n program_opts['crypto_device'] = value\n elif option in ('-t', '--tunnel'):\n ssh_user, _, value = value.rpartition('@')\n ssh_alias, _, port_number = value.partition(':')\n tunnel_opts = dict(\n ssh_alias=ssh_alias,\n ssh_user=ssh_user,\n # The port number of the rsync daemon.\n remote_port=RSYNCD_PORT,\n )\n if port_number:\n # The port number of the SSH server.\n tunnel_opts['port'] = int(port_number)\n dest_opts['ssh_tunnel'] = SecureTunnel(**tunnel_opts)\n elif option in ('-i', '--ionice'):\n value = value.lower().strip()\n validate_ionice_class(value)\n program_opts['ionice'] = value\n elif option in ('-u', '--no-sudo'):\n program_opts['sudo_enabled'] = False\n elif option in ('-n', '--dry-run'):\n logger.info(\"Performing a dry run (because of %s option) ..\", option)\n program_opts['dry_run'] = True\n elif option in ('-f', '--force'):\n program_opts['force'] = True\n elif option in ('-x', '--exclude'):\n program_opts.setdefault('exclude_list', [])\n program_opts['exclude_list'].append(value)\n elif option == '--multi-fs':\n program_opts['multi_fs'] = True\n elif option == '--disable-notifications':\n program_opts['notifications_enabled'] = False\n elif option in ('-v', '--verbose'):\n coloredlogs.increase_verbosity()\n elif option in ('-q', '--quiet'):\n coloredlogs.decrease_verbosity()\n elif option in ('-h', '--help'):\n usage(__doc__)\n return\n else:\n raise Exception(\"Unhandled option! (programming error)\")\n if len(arguments) > 2:\n msg = \"Expected one or two positional arguments! (got %i)\"\n raise Exception(msg % len(arguments))\n if len(arguments) == 2:\n # Get the source from the first of two arguments.\n program_opts['source'] = arguments.pop(0)\n if arguments:\n # Get the destination from the second (or only) argument.\n dest_opts['expression'] = arguments[0]\n program_opts['destination'] = Destination(**dest_opts)\n elif not os.environ.get('RSYNC_MODULE_PATH'):\n # Show a usage message when no destination is given.\n usage(__doc__)\n return\n except Exception as e:\n warning(\"Error: %s\", e)\n sys.exit(1)\n try:\n # Inject the source context into the program options.\n program_opts['source_context'] = create_context(**context_opts)\n # Initialize the program with the command line\n # options and execute the requested action(s).\n RsyncSystemBackup(**program_opts).execute()\n except Exception as e:\n if isinstance(e, RsyncSystemBackupError):\n # Special handling when the backup disk isn't available.\n if isinstance(e, MissingBackupDiskError):\n # Check if we're connected to a terminal to decide whether the\n # error should be propagated or silenced, the idea being that\n # rsync-system-backup should keep quiet when it's being run\n # from cron and the backup disk isn't available.\n if not connected_to_terminal():\n logger.info(\"Skipping backup: %s\", e)\n sys.exit(0)\n # Known problems shouldn't produce\n # an intimidating traceback to users.\n logger.error(\"Aborting due to error: %s\", e)\n else:\n # Unhandled exceptions do get a traceback,\n # because it may help fix programming errors.\n logger.exception(\"Aborting due to unhandled exception!\")\n sys.exit(1)", "def test_backupmgr_with_short_option(self):\n cmd = \"%scbbackupmgr%s \" % (self.cli_command_location, self.cmd_ext)\n cmd += \"%s \" % self.input.param(\"command\", \"backup\")\n options = \" -%s %s \" % (self.input.param(\"repo\", \"-repo\"),\n self.backupset.name)\n options += \" -%s %s\" % (self.input.param(\"archive\", \"-archive\"),\n self.backupset.directory)\n if self.input.param(\"command\", \"backup\") != \"list\":\n options += \" -%s http://%s:%s\" % (self.input.param(\"cluster\", \"-cluster\"),\n self.backupset.cluster_host.ip,\n self.backupset.cluster_host.port)\n options += \" -%s Administrator\" % self.input.param(\"bkusername\", \"-username\")\n options += \" -%s password\" % self.input.param(\"bkpassword\", \"-password\")\n self.backup_create()\n shell = RemoteMachineShellConnection(self.backupset.backup_host)\n output, error = shell.execute_command(\"%s %s \" % (cmd, options))\n shell.log_command_output(output, error)\n shell.disconnect()\n if error:\n self.fail(\"There is a error in %s \" % error)", "def test_rsync(self):\n self.assertEqual(general.rsync('from','to').command_line,\n ['rsync','-av','from','to'])\n self.assertEqual(general.rsync('from','[email protected]:to').command_line,\n ['rsync','-av','-e','ssh','from','[email protected]:to'])", "def irsync_cmd():\n\n\t# Warn empty parameter and quit.\n\t#\n\tap = init_irsync_argparser()\n\tif len(sys.argv)==1:\n\t\tap.print_usage()\n\t\texit(1)\n\n\t#\n\t# First, scan for \"--rsync ...\" parameter from command line, and extract it.\n\t#\n\n\targv = sys.argv\n\trsync_extra_params = []\n\n\ttry:\n\t\tidx_rsync = argv.index('--rsync')\n\t\trsync_extra_params = argv[idx_rsync+1:] # copy all eles after the --rsync\n\t\targv[idx_rsync:] = [] # trim all eles from after --rsync\n\texcept ValueError:\n\t\tpass\n\n\t#\n\t# Second, use argparse API to parse remaining parameters\n\t#\n\n\tapargs = ap.parse_args(argv[1:])\n\tsucc = irsync_fetch_once(apargs, rsync_extra_params)\n\n\t# ret = irsync_fetch_once(args.rsync_url, args.local_store_dir, args.shelf,\n\t# \tdatetime_pattern=args.datetime_pattern,\n\t# old_days=args.old_days,\n\t# \told_hours=args.old_hours,\n\t# \told_minutes=args.old_minutes,\n\t# \tmax_retry=args.max_retry,\n\t# \tmax_run_seconds=args.max_run_seconds,\n\t# rsync_extra_params=rsync_extra_params)\n\n\treturn succ", "def printUsage():\r\n print \"usage: rsync.py [options] source target\"\r\n print \"\"\"\r\n -q, --quiet decrease verbosity\r\n -r, --recursive recurse into directories\r\n -R, --relative use relative path names\r\n -u, --update update only (don't overwrite newer files)\r\n -t, --times preserve times\r\n -n, --dry-run show what would have been transferred\r\n --existing only update files that already exist\r\n --delete delete files that don't exist on the sending side\r\n --delete-excluded also delete excluded files on the receiving side\r\n -I, --ignore-times don't exclude files that match length and time\r\n --size-only only use file size when determining if a file should\r\n be transferred\r\n --modify-window=NUM timestamp window (seconds) for file match (default=2)\r\n --existing only update existing target files or folders\r\n -C, --cvs-exclude auto ignore files in the same way CVS does\r\n --exclude=PATTERN exclude files matching PATTERN\r\n --exclude-from=FILE exclude patterns listed in FILE\r\n --include=PATTERN don't exclude files matching PATTERN\r\n --include-from=FILE don't exclude patterns listed in FILE\r\n --version print version number\r\n -h, --help show this help screen\r\n\r\nSee http://www.vdesmedt.com/~vds2212/rsync.html for informations and updates.\r\nSend an email to [email protected] for comments and bug reports.\"\"\"", "def main(opts):\n\n if arguments['--generate-pigz']:\n gen_pigz_thread_helper()\n sys.exit(0)\n\n if arguments['--full']:\n cmd, cmd_hide, backup_path, backup_base, top_backup_base = build_full(arguments)\n clean.clean_backups(top_backup_base, int(arguments['--keep']), False)\n check_space(top_backup_base)\n succ = run_backup(cmd, cmd_hide)\n print('Backup ended {0}'.format(('Error', 'Successfully')[succ]))\n if not succ: raise BackupErrorBackupFailed('Backup', backup_path)\n if succ and not opts['--no-prepare']:\n cmd = build_full_prepare(opts, backup_path)\n succ = run_backup(cmd, cmd_hide)\n print('Prepare ended {0}'.format(('Error', 'Successfully')[succ]))\n if not succ: raise BackupErrorBackupFailed('Prepare', backup_path)\n if succ and (opts['--compress'] or int(opts['--compress-threads'])>0):\n threads = check_pigz_treads(opts['--compress-threads'])\n tar_file = tar_dir(backup_path, threads, check=not opts['--no-check'])\n if opts['--enc']:\n encrypt(tar_file, config.pass_phrase)\n elif arguments['--inc']:\n build_inc(arguments)", "def cli() -> None:", "def cli() -> None:", "def cli():\n config, auth, execute_now = read_command_line_arguments()\n main(config, auth, execute_now)", "def backup (self, source, destination, archive = None, excludeList = None, debug = False):\n dateTime = time.strftime (\"%d%m%Y-%H%M%S\")\n if (archive is not None):\n thisArchive = os.path.join (archive, dateTime[4:8], dateTime[2:4], dateTime)\n else:\n thisArchive = None\n\n cmnd = \"%s --archive\" % self.rsync\n if (thisArchive is not None):\n cmnd = \"%s --backup --backup-dir=%s\" % (cmnd, thisArchive)\n cmnd = \"%s --delete\" % cmnd\n if (excludeList is not None):\n for exclude in excludeList:\n cmnd = '%s --exclude=\"%s\"' % (cmnd, exclude)\n cmnd = \"%s '%s' '%s'\" % (cmnd, source, destination)\n if (self.testRun):\n pass\n else:\n result = subprocess.getstatusoutput (cmnd)\n if (result[0] != 0):\n return 0\n self.logger.info(\"RSync Output:\\n {} \\n\".format(result[1]))\n return 1", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():" ]
[ "0.64647496", "0.62548107", "0.6175569", "0.61126995", "0.59780735", "0.5970921", "0.59538436", "0.59538436", "0.59447503", "0.589895", "0.5860152", "0.5860152", "0.5860152", "0.5860152", "0.5860152", "0.5860152", "0.5860152", "0.5860152", "0.5860152", "0.5860152", "0.5860152", "0.5860152", "0.5860152", "0.5860152", "0.5860152", "0.5860152", "0.5860152", "0.5860152", "0.5860152", "0.5860152" ]
0.6710343
0
Explicitly enable an action and disable other implicit actions.
def enable_explicit_action(options, explicit_action): options[explicit_action] = True for implicit_action in 'backup_enabled', 'snapshot_enabled', 'rotate_enabled': if implicit_action != explicit_action: options.setdefault(implicit_action, False)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def handle_action(**kwargs):\n name = kwargs[NAME]\n automation = automations[name]\n enabled = kwargs.get(ENABLED, not automation.enabled)\n if enabled:\n automation.enable()\n else:\n automation.disable()", "def _set_action_enabled(self, action, index):\n action.setEnabled(index.flags() & QtCore.Qt.ItemIsEnabled)", "def _set_action(self, action):\n raise NotImplementedError()", "def _set_action(self, action):\n raise NotImplementedError()", "def _set_action(self, action):\n raise NotImplementedError()", "def _set_action(self, action):\n raise NotImplementedError()", "def _set_action(self, action):\n raise NotImplementedError()", "def _set_action(self, action):\n raise NotImplementedError()", "def setDefaultActionsValues (self):\n self.closeAllTabAction.setEnabled(False)\n self.closeTabAction.setEnabled(False)\n self.saveAction.setEnabled(False)\n self.saveAsAction.setEnabled(False)\n self.exportAsAction.setEnabled(False)\n self.saveAllAction.setEnabled(False)\n self.printAction.setEnabled(False)\n self.cutAction.setEnabled(False)\n self.copyAction.setEnabled(False)\n self.pasteAction.setEnabled(False)\n self.selectAllAction.setEnabled(False)\n self.deleteAction.setEnabled(False)\n self.undoAction.setEnabled(False)\n self.redoAction.setEnabled(False)\n self.commentAction.setEnabled(False)\n self.uncommentAction.setEnabled(False)\n self.indentAction.setEnabled(False)\n self.unindentAction.setEnabled(False)\n self.findAction.setEnabled(False)\n \n self.checkSyntaxAction.setEnabled(False)\n self.checkDesignAction.setEnabled(False)\n self.updateTestAction.setEnabled(False)\n self.checkAction.setEnabled(False)\n self.runSchedAction.setEnabled(False)\n self.runAction.setEnabled(False)\n self.runStepByStepAction.setEnabled(False)\n self.runBreakpointAction.setEnabled(False)\n self.runDebugAction.setEnabled(False)\n self.runNowAction.setEnabled(False)\n self.runBackgroundAction.setEnabled(False)\n # self.runWithoutProbesAction.setEnabled(False)\n self.runWithoutNotifAction.setEnabled(False)\n \n self.codeWrappingAction.setEnabled(False)\n self.codefoldingAction.setEnabled(False)\n self.foldAllAction.setEnabled(False)\n self.whitespaceVisibilityAction.setEnabled(False)\n self.indentGuidesVisibilityAction.setEnabled(False)\n self.linesNumberingAction.setEnabled(False)", "def setEnabled(self, enabled):\n def do(toUpdateList):\n self.enabled = enabled\n self.actions.addAction(do)", "def _act(self, action):\n self._set_action(action)", "async def before_action(self, action, *args, **kwargs):\n return True", "def setUseAction(self, action):\n def do(toUpdateList):\n self.useAction = action\n self.actions.addAction(do)", "async def before_action(self, action: str, *args, **kwargs) -> bool:\n return True", "def _disable(self):\n self.enabled = False", "def disable():\n ret = _LIB.oled_click_disable()\n if ret < 0:\n raise Exception(\"oled click disable failed\")", "def disable(self) -> None:", "def operation_enabled(client, name):\n client.configuration.unstable_operations[snake_case(name)] = True", "def setEnabled(*args):", "def setEnabled(*args):", "def setEnabled(*args):", "def setEnabled(*args):", "def setEnabled(*args):", "def setEnabled(*args):", "def setEnabled(*args):", "def setEnabled(*args):", "def setEnabled(*args):", "def setEnabled(*args):", "def setEnabled(*args):", "def begin_not_undoable_action(self):\n self.not_undoable_action = True" ]
[ "0.68049055", "0.659966", "0.6064936", "0.6064936", "0.6064936", "0.6064936", "0.6064936", "0.6064936", "0.60206616", "0.6017531", "0.59906226", "0.5845024", "0.584325", "0.5837786", "0.58313227", "0.57796824", "0.57416296", "0.5723639", "0.57153755", "0.57153755", "0.57153755", "0.57153755", "0.57153755", "0.57153755", "0.57153755", "0.57153755", "0.57153755", "0.57153755", "0.57153755", "0.5704441" ]
0.7606623
0
Return the Courses that the given user is an instructor for. Returns a QuerySet.
def faculty_courses_for_user(user): return Course.objects.filter(faculty_group__in=user.groups.all())
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_enrollments_for_courses_in_program(user, program):\n course_keys = [\n CourseKey.from_string(key)\n for key in course_run_keys_for_program(program)\n ]\n return CourseEnrollment.objects.filter(\n user=user,\n course_id__in=course_keys,\n mode__in=[CourseMode.VERIFIED, CourseMode.MASTERS, CourseMode.EXECUTIVE_EDUCATION],\n is_active=True,\n )", "def see_teaching_courses(self, username: str, token: str) -> List[Dict[str, object]]:\n\n # Validate user first\n if not self.validate(username=username, token=token, check_privilege='instructor'):\n raise RuntimeError(\"User not verified!\")\n\n # Get UID from user's username\n uid = self.get_uid(username=username)\n\n # Query database for courses instructed by a user with this UID\n cursor = self._db_connection.cursor()\n cursor.execute(\n '''\n SELECT \n course_id,\n course_abbreviation,\n course_name, \n time,\n seats \n FROM \n courses\n WHERE \n instructor_id = ?\n ;\n ''', (uid,))\n\n db_results = cursor.fetchall()\n\n if db_results is None:\n print(\"No associated courses found!\")\n return []\n\n # Build information dicts for every course this user is instructing\n courses = []\n for result in db_results:\n # Get the number of students enrolled in this course already\n cursor.execute('''SELECT COUNT(*) FROM enrollment_records WHERE course_id = ?;''', (result[0],))\n students_enrolled = cursor.fetchone()[0]\n if students_enrolled is None:\n students_enrolled = 0\n\n # Build a course dict from the data\n courses.append({\n \"course_abbreviation\": result[1],\n \"course_name\": result[2],\n \"time\": result[3],\n \"students_enrolled\": students_enrolled,\n \"capacity\": result[4],\n })\n\n return courses", "def get_user_courses(user_id):\n user = _get_user_require_auth(user_id)\n\n courses = list(m.Course.objects(id__in=set(user.course_ids)))\n course_dicts, user_course_dicts, _ = (\n m.Course.get_course_and_user_course_dicts(courses, user))\n\n return api_util.jsonify({\n 'courses': course_dicts,\n 'user_courses': user_course_dicts,\n })", "def courses_with_role(self):\r\n return CourseAccessRole.objects.filter(role=self.role, user=self.user)", "def get_all_student_courses(cls, user):\n member_record = CourseMember.objects.filter(user=user)\n member_teacher = member_record.filter(type = 3)\n student_list = []\n\n for member in member_teacher:\n if member.course.pk not in student_list:\n student_list.append(member.course.pk)\n\n return student_list", "def get_user_enrollments(user):\n program_enrollments = (\n ProgramEnrollment.objects.select_related(\"program__programpage\")\n .prefetch_related(\"program__courses\")\n .select_related(\"user\", \"company\")\n .filter(user=user)\n .all()\n )\n program_courses = itertools.chain(\n *(\n program_enrollment.program.courses.all()\n for program_enrollment in program_enrollments\n )\n )\n program_course_ids = set(course.id for course in program_courses)\n course_run_enrollments = (\n CourseRunEnrollment.objects.select_related(\"run__course__coursepage\", \"company\")\n .filter(user=user)\n .order_by(\"run__start_date\")\n .all()\n )\n non_program_run_enrollments, program_run_enrollments = partition(\n course_run_enrollments,\n lambda course_run_enrollment: (\n course_run_enrollment.run.course_id in program_course_ids\n ),\n )\n program_enrollments, past_program_enrollments = partition(\n program_enrollments, lambda program_enrollment: program_enrollment.is_ended\n )\n non_program_run_enrollments, past_non_program_run_enrollments = partition(\n non_program_run_enrollments,\n lambda non_program_run_enrollment: non_program_run_enrollment.is_ended,\n )\n\n return UserEnrollments(\n programs=program_enrollments,\n past_programs=past_program_enrollments,\n program_runs=program_run_enrollments,\n non_program_runs=non_program_run_enrollments,\n past_non_program_runs=past_non_program_run_enrollments,\n )", "def get_courses(db: Session = Depends(get_db)): # , _: models.User = Depends(get_current_user))\n return crud.course.get_multi(db, skip=0, limit=100)", "def requested_courses(self, user_id: str) -> np.ndarray:\n self.retrieve_leads()\n\n return self.leads_df[self.leads_df['user_id'] == user_id]['course_id'].values", "def my_courses(self, signer):\n return list(chain(*[p.user_courses(signer=signer) for p in self.providers]))", "def has_instructor_access_for_class(user, course_id):\r\n\r\n course = get_course_with_access(user, 'staff', course_id, depth=None)\r\n return has_access(user, 'staff', course)", "def users_enrolled_in(cls, course_id):\r\n return User.objects.filter(\r\n courseenrollment__course_id=course_id,\r\n courseenrollment__is_active=True\r\n )", "def search_courses():\n current_user = view_helpers.get_current_user()\n courses, has_more = m.Course.search(flask.request.values, current_user)\n\n course_dicts, user_course_dicts, _ = (\n m.Course.get_course_and_user_course_dicts(courses, current_user))\n\n return api_util.jsonify({\n 'courses': course_dicts,\n 'user_courses': user_course_dicts,\n 'has_more': has_more,\n })", "def get_course_enrollment_pairs(user, course_org_filter, org_filter_out_set):\r\n for enrollment in CourseEnrollment.enrollments_for_user(user):\r\n course = course_from_id(enrollment.course_id)\r\n if course:\r\n\r\n # if we are in a Microsite, then filter out anything that is not\r\n # attributed (by ORG) to that Microsite\r\n if course_org_filter and course_org_filter != course.location.org:\r\n continue\r\n # Conversely, if we are not in a Microsite, then let's filter out any enrollments\r\n # with courses attributed (by ORG) to Microsites\r\n elif course.location.org in org_filter_out_set:\r\n continue\r\n\r\n yield (course, enrollment)\r\n else:\r\n log.error(\"User {0} enrolled in non-existent course {1}\"\r\n .format(user.username, enrollment.course_id))", "def satisfying_courses(self):\n return (\n Course.objects.all()\n .exclude(id__in=self.overrides.all())\n .filter(\n Q(department__in=self.departments.all(), semester=self.semester)\n | Q(id__in=self.courses.all())\n )\n )", "def getUserCoursesList(self, chat_id):\n\t\tcommand = \"SELECT ID, name, description FROM courses WHERE author_id=?;\"\n\t\tparams = (chat_id,)\n\n\t\tdata = self._run_command(command, params)\n\n\t\tif not data:\n\t\t\treturn None\n\n\t\tresult = []\n\t\tfor i in data:\n\t\t\tresult.append({\"ID\": i[0], \n\t\t\t\t\"name\": i[1],\n\t\t\t\t\"description\": i[2] if i[2] else \"\",\n\t\t\t\t})\n\n\t\treturn result", "def see_associated_classes(self, username: str, token: str) -> List[Dict[str, object]]:\n\n # Validate user first\n if not self.validate(username=username, token=token, check_privilege='student'):\n raise RuntimeError(\"User not verified!\")\n\n # Get UID from user's username\n uid = self.get_uid(username=username)\n\n # Query database for courses associated with this UID\n cursor = self._db_connection.cursor()\n cursor.execute(\n '''\n SELECT \n courses.course_id,\n courses.course_abbreviation,\n courses.course_name,\n courses.instructor_id, \n courses.time,\n courses.seats \n FROM \n courses \n INNER JOIN \n enrollment_records \n ON \n courses.course_id = enrollment_records.course_id \n WHERE \n enrollment_records.uid = ?\n ;\n ''', (uid,))\n\n db_results = cursor.fetchall()\n\n if db_results is None:\n print(\"No associated courses found!\")\n return []\n\n # Build information dicts for every course this user is enrolled in\n courses = []\n for result in db_results:\n # Get the instructor's username (we don't want to be giving UIDs)\n instructor_name = self.get_username(result[3])\n\n # Get the number of students enrolled in this course already\n cursor.execute('''SELECT COUNT(*) FROM enrollment_records WHERE course_id = ?;''', (result[0],))\n students_enrolled = cursor.fetchone()[0]\n if students_enrolled is None:\n students_enrolled = 0\n\n # Build a course dict from the data\n courses.append({\n \"course_abbreviation\": result[1],\n \"course_name\": result[2],\n \"instructor\": instructor_name,\n \"time\": result[4],\n \"students_enrolled\": students_enrolled,\n \"capacity\": result[5],\n })\n\n return courses", "def view_all_courses(request, username):\n if request.method == 'GET':\n\n # if user log in \n try:\n user = User.objects.get(username=username)\n if ensure_login(user) == False:\n return JsonResponse({'login': 'User must login'}, status=403) \n except:\n return JsonResponse({'login': 'User must login'}, status=403)\n\n if user.is_staff:\n courses = courseQuerySetSerializer(user.created_courses.all())\n else:\n courses = courseQuerySetSerializer(user.enrolled_courses.all())\n\n if courses is None:\n return JsonResponse({'error': 'No courses to view'}, status=404)\n \n return JsonResponse({'success': True, 'courses': courses}, status=200) # each course_code should be stored in data-course attribte inorder to grap it when perfoming actions on a speific course\n else:\n return JsonResponse({'error': 'Method not allowed'}, status=405)", "def all_instructors(self):\n \n with sqlite3.connect(self.db_path) as conn:\n # conn.row_factory = self.create_student\n conn.row_factory = lambda cursor, row: Instructor(\n row[1], row[2], row[6], row[6], row[5]\n )\n \n \n db_cursor = conn.cursor()\n\n db_cursor.execute(\"\"\"\n select i.Id,\n i.FirstName,\n i.LastName,\n i.SlackHandle,\n i.CohortId,\n i.Specialty,\n c.Name\n from Instructor i\n join Cohort c on i.CohortId = c.Id\n order by i.CohortId\n \"\"\")\n\n all_instructors = db_cursor.fetchall()\n\n # for student in all_students:\n # print(f'{student[1]} {student[2]} is in {student[5]}')\n\n # for student in all_students:\n # print(f'{student[1]} {student[2]} is in {student[5]}')\n\n for instructor in all_instructors:\n print(instructor)", "def get_all_allowed_enrollments(self):\n if self.is_superuser:\n return Enrollment.objects.all()\n\n # Enrollments belonging to students the user manages\n manages = Q(student__case_manager=self)\n # Enrollments belonging to sections the user teaches\n teaches = Q(section__teacher=self)\n\n # Filter all terms which the user teaches a class\n taught_terms = Term.objects.filter(section__teacher=self)\n\n # The teacher of another section in the same term in which the student is enrolled\n other_teacher = Q(pk__in=[])\n for term in taught_terms:\n overlapping_terms = term.get_overlapping_terms()\n # Get all sections from this term or its overlaps\n term_sections = Section.objects.filter(term__in=overlapping_terms)\n # Get all the enrollments in any section from this term\n term_enrollments = Enrollment.objects.filter(section__in=term_sections)\n # Get all the students taught by this user this term\n term_taught_students = Student.objects.filter(enrollment__in=term_enrollments.filter(section__teacher=self))\n # Get all the enrollments of those students for this term\n other_teacher = other_teacher | Q(student__in=term_taught_students, section__term__in=overlapping_terms)\n return Enrollment.objects.filter(teaches | manages | other_teacher).distinct()", "def get_courses(self):\r\n\r\n return self.def_ms.get_courses()", "def get(self, request):\r\n\r\n if not request.user.is_staff:\r\n raise Http404\r\n data = []\r\n\r\n for course in self.get_courses(): # pylint: disable=unused-variable\r\n datum = [course.display_name, course.id]\r\n datum += [CourseEnrollment.objects.filter(\r\n course_id=course.id).count()]\r\n datum += [CourseStaffRole(course.id).users_with_role().count()]\r\n datum += [','.join([x.username for x in CourseInstructorRole(\r\n course.id).users_with_role()])]\r\n data.append(datum)\r\n\r\n datatable = dict(header=[_('Course Name'), _('course_id'),\r\n _('# enrolled'), _('# staff'),\r\n _('instructors')],\r\n title=_('Enrollment information for all courses'),\r\n data=data)\r\n context = {\r\n 'datatable': datatable,\r\n 'msg': self.msg,\r\n 'djangopid': os.getpid(),\r\n 'modeflag': {'staffing': 'active-section'},\r\n 'edx_platform_version': getattr(settings, 'EDX_PLATFORM_VERSION_STRING', ''),\r\n }\r\n return render_to_response(self.template_name, context)", "def get_all_teacher_courses(cls, user):\n member_record = CourseMember.objects.filter(user=user)\n member_teacher = member_record.filter(Q(type = 0) | Q(type = 1) | Q(type = 2))\n teacher_list = []\n\n for member in member_teacher:\n if member.course.pk not in teacher_list:\n teacher_list.append(member.course.pk)\n\n return teacher_list", "def see_courses(self, username: str, token: str, spots_available: bool = False) -> List[Dict[str, object]]:\n\n # Validate user first\n if not self.validate(username=username, token=token, check_privilege='student'):\n raise RuntimeError(\"User not verified!\")\n\n # Query database for all courses\n cursor = self._db_connection.cursor()\n cursor.execute(\n '''\n SELECT \n course_id,\n course_abbreviation,\n course_name,\n instructor_id, \n time,\n seats \n FROM \n courses\n ;\n ''')\n db_results = cursor.fetchall()\n\n # If no courses are available\n if db_results is None:\n return []\n\n # Build information dicts for every course this user is enrolled in\n courses = []\n for result in db_results:\n # Get the instructor's username (we don't want to be giving UIDs)\n instructor_name = self.get_username(result[3])\n\n # Get the number of students enrolled in this course\n cursor.execute('''SELECT COUNT(*) FROM enrollment_records WHERE course_id = ?;''', (result[0],))\n students_enrolled = cursor.fetchone()[0]\n if students_enrolled is None:\n students_enrolled = 0\n\n # Don't add if the course is full (BUT ONLY if specified)\n if spots_available and students_enrolled >= result[5]:\n continue\n\n # Build a course dict from the data\n courses.append({\n \"course_abbreviation\": result[1],\n \"course_name\": result[2],\n \"instructor\": instructor_name,\n \"time\": result[4],\n \"students_enrolled\": students_enrolled,\n \"capacity\": result[5],\n })\n\n return courses", "def get_courses(self):\n if not self.is_course_based_activity():\n raise IllegalState()\n else:\n raise Unimplemented()", "def getCoursesList(self, pageSize=100):\n results = self.service.courses().list(pageSize=pageSize).execute()\n self.courses = results.get('courses', [])\n if not self.courses:\n return []\n return self.courses # Might not have to return self.courses, but it's useful for now", "def _accessible_courses_list(request):\r\n courses = modulestore('direct').get_courses()\r\n\r\n # filter out courses that we don't have access to\r\n def course_filter(course):\r\n \"\"\"\r\n Get courses to which this user has access\r\n \"\"\"\r\n if GlobalStaff().has_user(request.user):\r\n return course.location.course != 'templates'\r\n\r\n return (has_course_access(request.user, course.id)\r\n # pylint: disable=fixme\r\n # TODO remove this condition when templates purged from db\r\n and course.location.course != 'templates'\r\n )\r\n courses = filter(course_filter, courses)\r\n return courses", "def test_user_role_instructor(self):\r\n self.assertEqual(\r\n 'instructor',\r\n access.get_user_role(self.course_instructor, self.course_key)\r\n )\r\n # Masquerade instructor\r\n self.course_instructor.masquerade_as_student = True\r\n self.assertEqual(\r\n 'student',\r\n access.get_user_role(self.course_instructor, self.course_key)\r\n )", "def get_courses(self, depth=0):\r\n return self.courses.values()", "def _get_courses(self) -> None:\n\n courses_content: NavigableString = self.soup.find(\"div\", \n {\"class\": \"coursesContent\"})\n course_items: ResultSet = courses_content.find_all(\"div\", \n {\"class\": \"courseItem\"})\n\n for item in course_items:\n course_name: str = item.a[\"href\"].split(\"/\")[-2].lower()\n course_data: ParseType = self._parse(item)\n self._update(course_name, course_data)", "def get_courses():\n courses = []\n courses_recs = Course._file.read_db()\n for course in courses_recs[\"courses\"]:\n courses.append(Course(**course))\n return courses" ]
[ "0.6672383", "0.64765507", "0.63457346", "0.62248945", "0.5967342", "0.5896697", "0.58316165", "0.5775987", "0.5749575", "0.5679381", "0.5674233", "0.56352746", "0.5620008", "0.56027764", "0.55914146", "0.55296856", "0.5508865", "0.5476848", "0.5475611", "0.53797096", "0.5378224", "0.5376067", "0.5373743", "0.53694516", "0.53634024", "0.5359408", "0.5350509", "0.5325784", "0.5314705", "0.5300574" ]
0.7003235
0
Return True if the given user is a faculty member on any courses.
def is_faculty(user): return Affil.objects.filter(user=user).exists() or \ faculty_courses_for_user(user).exists()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_faculty():\n return _is_member('uw_faculty')", "def is_member(user: User) -> bool:\n if not user:\n raise TypeError('user should not be None')\n return user.name.startswith('L')", "def registered_for_course(course, user):\r\n if user is None:\r\n return False\r\n if user.is_authenticated():\r\n return CourseEnrollment.is_enrolled(user, course.id)\r\n else:\r\n return False", "def is_prime_member(user: User) -> bool:\n if not user:\n raise TypeError('user should not be None')\n return user.name.startswith('W')", "def check_course_membership(course, user=None):\n if user is None:\n user = g.user\n\n if isinstance(course, Course):\n course_id = course.id\n else:\n course_id = course\n\n if user.courses.filter(Course.id == course_id).count() == 0:\n abort(403)", "def has_authority(self, user):\n UserModel = get_user_model()\n ADMINISTRATOR = UserModel.ROLE_MAP[UserModel.ADMINISTRATOR]\n result = True\n\n if not (user.is_superuser or user.role == ADMINISTRATOR):\n try:\n self.memberships.get(user=user)\n except Membership.DoesNotExist:\n result = False\n\n return result", "def has_access(user, role):\r\n if not user.is_active:\r\n return False\r\n # do cheapest check first even tho it's not the direct one\r\n if GlobalStaff().has_user(user):\r\n return True\r\n # CourseCreator is odd b/c it can be disabled via config\r\n if isinstance(role, CourseCreatorRole):\r\n # completely shut down course creation setting\r\n if settings.FEATURES.get('DISABLE_COURSE_CREATION', False):\r\n return False\r\n # wide open course creation setting\r\n if not settings.FEATURES.get('ENABLE_CREATOR_GROUP', False):\r\n return True\r\n\r\n if role.has_user(user):\r\n return True\r\n # if not, then check inferred permissions\r\n if (isinstance(role, (CourseStaffRole, CourseBetaTesterRole)) and\r\n CourseInstructorRole(role.course_key).has_user(user)):\r\n return True\r\n return False", "def load_user_groups(user):\n if not user.is_authenticated:\n return False\n \n user.is_faculty = len(user.groups.filter(name='faculty')) > 0\n user.is_student = not user.is_faculty\n\n return True", "def user_is_student(userobj):\n from .assignment_group import AssignmentGroup\n return AssignmentGroup.published_where_is_candidate(userobj).exists()", "def has_user(self, user):\r\n if not (user.is_authenticated() and user.is_active):\r\n return False\r\n\r\n # pylint: disable=protected-access\r\n if not hasattr(user, '_roles'):\r\n user._roles = set(\r\n CourseAccessRole.objects.filter(user=user).all()\r\n )\r\n\r\n role = CourseAccessRole(user=user, role=self._role_name, course_id=self.course_key, org=self.org)\r\n return role in user._roles", "def has_instructor_access_for_class(user, course_id):\r\n\r\n course = get_course_with_access(user, 'staff', course_id, depth=None)\r\n return has_access(user, 'staff', course)", "def faculty_courses_for_user(user):\n return Course.objects.filter(faculty_group__in=user.groups.all())", "def has_course_access(user, course_key, role=CourseStaffRole):\r\n if GlobalStaff().has_user(user):\r\n return True\r\n return auth.has_access(user, role(course_key))", "def have_permission(self, user, course_id):\n \"\"\"\n any([\n request.user.is_staff,\n CourseStaffRole(course_key).has_user(request.user),\n CourseInstructorRole(course_key).has_user(request.user)\n ])\n \"\"\"\n try:\n course_key = CourseKey.from_string(course_id)\n course = get_course_with_access(user, \"load\", course_key)\n data_researcher_access = user.has_perm(permissions.CAN_RESEARCH, course_key)\n return bool(has_access(user, 'instructor', course)) or bool(has_access(user, 'staff', course)) or data_researcher_access\n except Exception:\n return False", "def user_has_access(self, user):\n if self.visibility == self.PUBLIC:\n return True\n elif self.visibility == self.PRIVATE and self.created_by == user:\n return True\n elif self.visibility in (self.ORG_ONLY, self.ORG_ONLY_NO_EXTERNAL):\n if user.external and self.visibility == self.ORG_ONLY_NO_EXTERNAL:\n return False\n elif self.organization.memberships.filter(user=user).count() >= 1:\n return True\n return False", "def has_user(self, user, allow_superusers=True):\n return self.has_student(user, allow_superusers) or self.has_ta(user, False) or self.has_instructor(user, False)", "def is_teacher_of(cls, user_id, course_id):\n return (cls.get_highest_course_privilege(user_id, course_id) < 2)", "def user_is_examiner(userobj):\n from .assignment_group import AssignmentGroup\n return AssignmentGroup.published_where_is_examiner(userobj).exists()", "def has_object_permission(self, request, view, obj):\n if request.user.is_manager or request.user.is_staff or request.user.is_superuser:\n return True\n try:\n return request.user in obj.course.instructors.all()\n except AttributeError:\n # activitylevel => has no course element\n return request.user.is_instructor", "def is_expert(self, user):\r\n return user.is_authenticated() and len(self.expert_set.filter(user=user))>0", "def has_user(self, user): # pylint: disable=unused-argument\r\n return False", "def user_is_article_course_staff(user, article):\r\n\r\n wiki_slug = article_course_wiki_root_slug(article)\r\n\r\n if wiki_slug is None:\r\n return False\r\n\r\n # The wiki expects article slugs to contain at least one non-digit so if\r\n # the course number is just a number the course wiki root slug is set to\r\n # be '<course_number>_'. This means slug '202_' can belong to either\r\n # course numbered '202_' or '202' and so we need to consider both.\r\n\r\n courses = modulestore.django.modulestore().get_courses_for_wiki(wiki_slug)\r\n if any(courseware.access.has_access(user, 'staff', course, course.course_key) for course in courses):\r\n return True\r\n\r\n if (wiki_slug.endswith('_') and slug_is_numerical(wiki_slug[:-1])):\r\n courses = modulestore.django.modulestore().get_courses_for_wiki(wiki_slug[:-1])\r\n if any(courseware.access.has_access(user, 'staff', course, course.course_key) for course in courses):\r\n return True\r\n\r\n return False", "def has_instructor(self, user, allow_superusers=True):\n return (user.is_superuser and allow_superusers) or len(self.instructors.filter(id=user.id)) > 0", "def has_access(self, user):\n if user.is_superuser:\n return True\n return self.user_objects(user).filter(id=self.id).exists()", "def is_member(self, username):\n usernames = [user.username for user in self.members]\n return True if username in usernames else False", "def has_student(self, user, allow_superusers=True):\n return (user.is_superuser and allow_superusers) or len(self.students.filter(id=user.id)) > 0", "def is_enrolled_by_partial(cls, user, course_id_partial):\r\n assert isinstance(course_id_partial, SlashSeparatedCourseKey)\r\n assert not course_id_partial.run # None or empty string\r\n course_key = SlashSeparatedCourseKey(course_id_partial.org, course_id_partial.course, '')\r\n querystring = unicode(course_key.to_deprecated_string())\r\n try:\r\n return CourseEnrollment.objects.filter(\r\n user=user,\r\n course_id__startswith=querystring,\r\n is_active=1\r\n ).exists()\r\n except cls.DoesNotExist:\r\n return False", "def is_user(self, user='') -> int:\n try:\n if user in self.users:\n return(1)\n else:\n return(0)\n except Exception as error:\n print(f\"Error: self.is_user({user}) -> {error}\")", "def is_collaborator(self, user):\r\n url = '{0}/{1}'.format(self.get_url(), user)\r\n\r\n return http.Request('GET', url), resource.parse_boolean", "def is_candidate(self, user):\n return self.candidates.filter(pk=user.pk).exists()" ]
[ "0.8120491", "0.68026584", "0.67830014", "0.672673", "0.66688967", "0.6633363", "0.65973413", "0.65743434", "0.65683174", "0.65278286", "0.65211505", "0.65073335", "0.6498223", "0.64772063", "0.6369451", "0.6323724", "0.6298432", "0.6294453", "0.627507", "0.62634844", "0.6256079", "0.620597", "0.61934125", "0.61333466", "0.6127976", "0.6081434", "0.6079353", "0.6070818", "0.6068573", "0.60446113" ]
0.8662211
0
Handles GET /caps/mba request.
def get(): mba_info = caps.mba_info() res = { 'clos_num': mba_info['clos_num'], 'mba_enabled': mba_info['enabled'], 'mba_bw_enabled': mba_info['ctrl_enabled'] } return res, 200
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get():\n\n mba_ctrl_info = caps.mba_ctrl_info()\n\n res = {\n 'supported': mba_ctrl_info['supported'],\n 'enabled': mba_ctrl_info['enabled']\n }\n return res, 200", "def get_capsules(method=\"\"):\n return _get(\"capsules\", method)", "def get_capsule_parts(method=\"\", **query):\n return _get(\"parts/caps\", method, query)", "def subcmd_getbmc_main(args, parameter_info):\n \n from get_bmc_inventory import get_bmc_inventory\n result = get_bmc_inventory(parameter_info['ip'], parameter_info['user'], parameter_info['passwd'], parameter_info['sysid'])\n \n if result['ret'] is True:\n del result['ret']\n sys.stdout.write(json.dumps(result['entries'], sort_keys=True, indent=2))\n else:\n sys.stderr.write(result['msg'])", "def get():\n\n l2ca_info = caps.l2ca_info()\n\n res = {\n 'cache_size': l2ca_info['cache_size'],\n 'cw_size': l2ca_info['cache_way_size'],\n 'cw_num': l2ca_info['cache_ways_num'],\n 'clos_num': l2ca_info['clos_num'],\n 'cdp_supported': l2ca_info['cdp_supported'],\n 'cdp_enabled': l2ca_info['cdp_enabled']\n }\n return res, 200", "def test_abbeys_get(self):\n query_string = [('label', 'label_example'),\n ('page', 1),\n ('per_page', 100)]\n headers = { \n 'Accept': 'application/json',\n }\n response = self.client.open(\n '/v0.0.1/abbeys',\n method='GET',\n headers=headers,\n query_string=query_string)\n self.assert200(response,\n 'Response body is : ' + response.data.decode('utf-8'))", "def mba_supported():\n return common.MBA_CAP in SYSTEM_CAPS", "def fusion_api_get_metrics_capability(self, api=None, headers=None):\n return self.metrics.get(api=api, headers=headers, param='/capability')", "def put():\n json_data = request.get_json()\n\n # validate request\n try:\n schema, resolver = ConfigStore.load_json_schema('modify_mba_ctrl.json')\n jsonschema.validate(json_data, schema, resolver=resolver)\n except (jsonschema.ValidationError, OverflowError) as error:\n raise BadRequest(\"Request validation failed - %s\" % (str(error)))\n\n if not caps.mba_bw_supported():\n return {'message': \"MBA CTRL not supported!\"}, 409\n\n if common.CONFIG_STORE.is_any_pool_defined():\n return {'message': \"Please remove all Pools first!\"}, 409\n\n data = deepcopy(common.CONFIG_STORE.get_config())\n\n CapsMbaCtrl.set_mba_ctrl_enabled(data, json_data['enabled'])\n\n common.CONFIG_STORE.set_config(data)\n\n return {'message': \"MBA CTRL status changed.\"}, 200", "def get(self):\n query = Boat.query()\n results = query.fetch(limit = MAX_BOATS)\n boat_dicts = []\n for match in results:\n boat_dicts.append({'id': match.id, 'name': match.name, 'type': match.type,\n 'length': match.length, 'at_sea': match.at_sea })\n self.response.headers['Content-Type'] = 'application/json'\n self.response.write(json.dumps(boat_dicts))", "def test_get_model_breach(requests_mock):\n from DarktraceMBs import Client, get_model_breach_command\n\n # GIVEN an integration is configured to Darktrace\n mock_api_response = util_load_json('test_data/get_breach.json')\n requests_mock.get('https://mock.darktrace.com/modelbreaches?pbid=95',\n json=mock_api_response)\n\n client = Client(\n base_url='https://mock.darktrace.com',\n verify=False,\n auth=('examplepub', 'examplepri')\n )\n\n # WHEN the desired model breach has id 95\n args = {\n 'pbid': '95',\n }\n\n integration_response = get_model_breach_command(client, args)\n expected_response = util_load_json('test_data/formatted_get_breach.json')\n\n # THEN the response should be returned and formatted\n assert integration_response.outputs == expected_response\n assert integration_response.outputs_prefix == 'Darktrace.ModelBreach'", "def get_capabilities(http_conn):\n parsed, conn = http_conn\n headers = {'Accept-Encoding': 'gzip'}\n conn.request('GET', parsed.path, '', headers)\n resp = conn.getresponse()\n body = resp.read()\n http_log((parsed.geturl(), 'GET',), {'headers': headers}, resp, body)\n if resp.status < 200 or resp.status >= 300:\n raise ClientException.from_response(\n resp, 'Capabilities GET failed', body)\n resp_headers = resp_header_dict(resp)\n return parse_api_response(resp_headers, body)", "def amagama(request):\n log.debug(\"Get open source translations from amaGama service.\")\n\n try:\n text = request.GET['text']\n locale = request.GET['locale']\n except MultiValueDictKeyError as e:\n log.error(str(e))\n return HttpResponse(\"error\")\n\n try:\n text = urllib.quote(text.encode('utf-8'))\n except KeyError as e:\n log.error(str(e))\n return HttpResponse(\"error\")\n\n url = \"http://amagama.locamotion.org/tmserver\" \\\n \"/en/%s/unit/%s?max_candidates=%s\" \\\n % (locale, text, 5)\n\n try:\n r = requests.get(url)\n\n if r.text != '[]':\n translations = r.json()\n\n return HttpResponse(json.dumps({\n 'translations': translations\n }), content_type='application/json')\n\n else:\n return HttpResponse(\"no\")\n\n except Exception as e:\n log.error(e)\n return HttpResponse(\"error\")", "def get():\n\n l3ca_info = caps.l3ca_info()\n\n res = {\n 'cache_size': l3ca_info['cache_size'],\n 'cw_size': l3ca_info['cache_way_size'],\n 'cw_num': l3ca_info['cache_ways_num'],\n 'clos_num': l3ca_info['clos_num'],\n 'cdp_supported': l3ca_info['cdp_supported'],\n 'cdp_enabled': l3ca_info['cdp_enabled']\n }\n return res, 200", "def caps(self):\n return self._caps", "def handle_market_cap(request):\n ticker = request.get_slot_value(slot_name=\"stockTicker\").upper()\n\n # Query DB for stock data\n company_stats = Stock.get_stats(ticker)\n market_cap = company_stats.get('marketcap', None)\n company_name = company_stats.get('companyName', ticker)\n\n if type(market_cap) is NoneType:\n logger.error(f\"There was an error getting market capitalization for {company_name}\")\n message = strings.INTENT_MARKET_CAP_MSG_FAIL.format(ticker)\n response = ResponseBuilder.create_response(request, message=message)\n else:\n message = strings.INTENT_MARKET_CAP_MSG.format(company_name, market_cap)\n response = ResponseBuilder.create_response(request, message=message) \\\n .set_session('stockTicker', ticker)\n\n reprompt_message = strings.INTENT_GENERAL_REPROMPT\n\n return response.with_reprompt(reprompt_message)", "def list_caps():\n global _CAPABILITIES_MAP\n\n try:\n return tuple(sorted(_CAPABILITIES_MAP.keys()))\n\n except NameError:\n pass # We can remedy this.\n\n loop = get_loop()\n\n controller_connection = CioRoot(loop)\n\n _CAPABILITIES_MAP = {}\n\n for capability_id in controller_connection.init():\n _CAPABILITIES_MAP[capability_id] = {\n 'acquire': controller_connection.acquire,\n 'release': controller_connection.release,\n }\n\n return tuple(sorted(_CAPABILITIES_MAP.keys()))", "def get_mapemp(request, genome, chrom, start, end):\n logger.debug(\"annotation_server.get_mapemp called for genome: %s chromosome: %s:%s-%s\" % (genome, chrom, start, end)) \n \n if genome in SUPPORTED_GENOMES:\n current_table = eval(genome+ \"_MappabilityEmpirial\")\n curr_vals = current_table.objects.filter(\n Q(chrom__iexact=chrom),\n Q(chromStart__range=(start, end)) | Q(chromEnd__range=(start, end))\n ).values('chrom', 'chromStart', 'chromEnd' )\n data = ValuesQuerySetToDict(curr_vals)\n return HttpResponse(data, 'application/json')\n else:\n return HttpResponse(status=400)", "def test_get_capabilities():\n capabilties = (\n \"Capability Identity : Capa1\\r\\n State : Installed\\r\\n\"\n \"Capability Identity : Capa2\\r\\n State : Disabled\\r\\n\"\n )\n\n mock = MagicMock(return_value=capabilties)\n with patch.dict(dism.__salt__, {\"cmd.run\": mock}):\n with patch.dict(dism.__grains__, {\"osversion\": 10}):\n out = dism.get_capabilities()\n mock.assert_called_once_with(\n [dism.bin_dism, \"/English\", \"/Online\", \"/Get-Capabilities\"]\n )\n assert out == [\"Capa1\", \"Capa2\"]", "def bm_api(method, **kwargs):\n if \"url\" in kwargs:\n kwargs['url'] = BLAZEMETER_API_URL + kwargs['url']\n else:\n LOGGER.error(\"Must provide url to bm_api()\")\n return None\n\n try:\n LOGGER.debug(\"Making request with method = {method}, {kwargs}\")\n response = requests.request(method, **kwargs, auth=get_authentication())\n if response.json().get(\"error\"):\n LOGGER.error(\"Error making request, received response: %s\", response.json()['error'])\n return None\n return response.json()\n except ValueError as value_error:\n LOGGER.error(value_error)", "def requestBattery(self) -> None:\n self._protocol.write_line(CMD_BATTERY)", "def get_cmx_ap_mac(campus, building, floor, ap_name):\n\n url = CMX_URL + '/api/config/v1/maps/info/' + campus + '/' + building + '/' + floor\n header = {'content-type': 'application/json', 'accept': 'application/json'}\n response = requests.get(url, headers=header, auth=CMX_AUTH, verify=False)\n aps_list = response.json()['accessPoints']\n for ap in aps_list:\n if ap['name'] == ap_name:\n ap_mac = ap['radioMacAddress']\n return ap_mac", "def get(self):\n \n global tw_margarita\n success = tw_margarita.ProcessRequest(self.request)\n self.response.out.write('Success: %s' % success)", "def test_available_capabilities():\n capabilties = (\n \"Capability Identity : Capa1\\r\\n State : Installed\\r\\n\"\n \"Capability Identity : Capa2\\r\\n State : Not Present\\r\\n\"\n )\n\n mock = MagicMock(return_value=capabilties)\n with patch.dict(dism.__salt__, {\"cmd.run\": mock}):\n with patch.dict(dism.__grains__, {\"osversion\": 10}):\n out = dism.available_capabilities()\n mock.assert_called_once_with(\n [dism.bin_dism, \"/English\", \"/Online\", \"/Get-Capabilities\"]\n )\n assert out == [\"Capa2\"]", "def get(self):\n if not self.settings.get('kg_list_kernels'):\n raise tornado.web.HTTPError(403, 'Forbidden')\n self.set_header('Content-Type', 'application/json')\n self.set_status(200)\n self.finish(json.dumps(self.activity.get()))", "def getCapsules(self):\n return self.data.capsules", "def capability(self):\n code, data, capabilities = (\n self.__send_command(\"CAPABILITY\", withcontent=True))\n if code == \"OK\":\n return capabilities\n return None", "def test_00_app_get(self):\r\n # GET as Anonymous\r\n url = '/api/app'\r\n action = 'get'\r\n self.check_limit(url, action, 'app')", "def test_getattr_caps(self):\n\n import pdb;pdb.set_trace()\n if not isinstance(self.mount_a, FuseMount):\n raise SkipTest(\"Require FUSE client\")\n\n # Enable debug. Client will requests CEPH_CAP_XATTR_SHARED\n # on lookup/open\n self.mount_b.umount_wait()\n #Peng debug : since we set use_exsiting_cluster to true, so it will\n # not initiate the ctx.ceph in the ceph.py def cluster() function, so we\n # we temporarily comment this out .\n #self.set_conf('client', 'client debug getattr caps', 'true')\n self.mount_b.mount()\n self.mount_b.wait_until_mounted()\n\n # create a file and hold it open. MDS will issue CEPH_CAP_EXCL_*\n # to mount_a\n p = self.mount_a.open_background(\"testfile\")\n self.mount_b.wait_for_visible(\"testfile\")\n\n # this tiggers a lookup request and an open request. The debug\n # code will check if lookup/open reply contains xattrs\n self.mount_b.run_shell([\"cat\", \"testfile\"])\n\n self.mount_a.kill_background(p)", "def test_wmts_rest_get_capabilities(self):\n ref_hash = 'b49538ed143340f11230eac8b8f9ecca'\n req_url = r'http://localhost/reproject/test/wmts/1.0.0/WMTSCapabilities.xml'\n if DEBUG:\n print('\\nTesting WMTS (REST) GetCapablities')\n print('URL: ' + req_url)\n response = get_url(req_url)\n\n # Check if the response is valid XML\n try:\n XMLroot = ElementTree.XML(response.read())\n XMLdict = XmlDictConfig(XMLroot)\n xml_check = True\n except:\n xml_check = False\n self.assertTrue(xml_check, 'GetCapabilities response is not a valid XML file. URL: ' + req_url)\n\n refXMLtree = ElementTree.parse(os.path.join(os.getcwd(), 'mod_reproject_test_data/wmts_endpoint/1.0.0/WMTSCapabilities.xml'))\n refXMLroot = refXMLtree.getroot()\n refXMLdict = XmlDictConfig(refXMLroot)\n\n check_result = check_dicts(XMLdict, refXMLdict)\n self.assertTrue(check_result, 'WTMTS (REST) Get Capabilities Request does not match what\\'s expected. URL: ' + req_url)" ]
[ "0.6530153", "0.51816005", "0.5091409", "0.5058398", "0.5054771", "0.50249", "0.5006124", "0.50048274", "0.49493545", "0.49234304", "0.48639435", "0.48049432", "0.48024124", "0.4788288", "0.47755444", "0.4756501", "0.47273806", "0.4724545", "0.47114536", "0.47101882", "0.47085527", "0.46968162", "0.46873373", "0.46547133", "0.46342376", "0.46292222", "0.45930722", "0.45904204", "0.45821452", "0.45785484" ]
0.7371121
0
Handles HTTP /caps/mba_ctrl request. Retrieve MBA CTRL capability and current state details
def get(): mba_ctrl_info = caps.mba_ctrl_info() res = { 'supported': mba_ctrl_info['supported'], 'enabled': mba_ctrl_info['enabled'] } return res, 200
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get():\n\n mba_info = caps.mba_info()\n\n res = {\n 'clos_num': mba_info['clos_num'],\n 'mba_enabled': mba_info['enabled'],\n 'mba_bw_enabled': mba_info['ctrl_enabled']\n }\n return res, 200", "def put():\n json_data = request.get_json()\n\n # validate request\n try:\n schema, resolver = ConfigStore.load_json_schema('modify_mba_ctrl.json')\n jsonschema.validate(json_data, schema, resolver=resolver)\n except (jsonschema.ValidationError, OverflowError) as error:\n raise BadRequest(\"Request validation failed - %s\" % (str(error)))\n\n if not caps.mba_bw_supported():\n return {'message': \"MBA CTRL not supported!\"}, 409\n\n if common.CONFIG_STORE.is_any_pool_defined():\n return {'message': \"Please remove all Pools first!\"}, 409\n\n data = deepcopy(common.CONFIG_STORE.get_config())\n\n CapsMbaCtrl.set_mba_ctrl_enabled(data, json_data['enabled'])\n\n common.CONFIG_STORE.set_config(data)\n\n return {'message': \"MBA CTRL status changed.\"}, 200", "def control_capabilities(self, **kwargs):\n\n return self.api_request(self._get_method_fullname(\"control_capabilities\"), kwargs)", "def get_control(self):\n return self.o.read_register(self.dev_id, CONTROL)", "def capability(self):\n code, data, capabilities = (\n self.__send_command(\"CAPABILITY\", withcontent=True))\n if code == \"OK\":\n return capabilities\n return None", "def list_caps():\n global _CAPABILITIES_MAP\n\n try:\n return tuple(sorted(_CAPABILITIES_MAP.keys()))\n\n except NameError:\n pass # We can remedy this.\n\n loop = get_loop()\n\n controller_connection = CioRoot(loop)\n\n _CAPABILITIES_MAP = {}\n\n for capability_id in controller_connection.init():\n _CAPABILITIES_MAP[capability_id] = {\n 'acquire': controller_connection.acquire,\n 'release': controller_connection.release,\n }\n\n return tuple(sorted(_CAPABILITIES_MAP.keys()))", "def is_ctrl(self):\n return self.opcode.is_ctrl", "def is_ctrl(self):\n return self.opcode.is_ctrl", "def __get_capability(self):\n requests = self.__get_capability_request()\n exception = self.__get_capability_exception()\n layers = self.__get_capability_layer()\n \n capability = { \"requests\": requests,\n \"exception\" : exception,\n \"layers\" : layers}\n return capability", "def get_ctrlvars(chid):\n ftype = promote_type(chid, use_ctrl=True)\n dat = (1*dbr.Map[ftype])()\n\n ret = libca.ca_array_get(ftype, 1, chid, dat)\n PySEVCHK('get_ctrlvars', ret)\n poll()\n out = {}\n tmpv = dat[0]\n for attr in ('precision', 'units', 'severity', 'status',\n 'upper_disp_limit', 'lower_disp_limit',\n 'upper_alarm_limit', 'upper_warning_limit',\n 'lower_warning_limit','lower_alarm_limit',\n 'upper_ctrl_limit', 'lower_ctrl_limit'):\n if hasattr(tmpv, attr):\n out[attr] = getattr(tmpv, attr, None)\n if (hasattr(tmpv, 'strs') and hasattr(tmpv, 'no_str') and\n tmpv.no_str > 0):\n out['enum_strs'] = tuple([BYTES2STR(tmpv.strs[i].value)\n for i in range(tmpv.no_str)])\n return out", "def test_get_capabilities():\n capabilties = (\n \"Capability Identity : Capa1\\r\\n State : Installed\\r\\n\"\n \"Capability Identity : Capa2\\r\\n State : Disabled\\r\\n\"\n )\n\n mock = MagicMock(return_value=capabilties)\n with patch.dict(dism.__salt__, {\"cmd.run\": mock}):\n with patch.dict(dism.__grains__, {\"osversion\": 10}):\n out = dism.get_capabilities()\n mock.assert_called_once_with(\n [dism.bin_dism, \"/English\", \"/Online\", \"/Get-Capabilities\"]\n )\n assert out == [\"Capa1\", \"Capa2\"]", "def capability(self):\n return self.get(\"capabilityClass\")", "def capabilities(self):\n\n class Capabilities(ct.Structure):\n _fields_ = [(\"Size\", ct.c_ulong),\n (\"AcqModes\", ct.c_ulong),\n (\"ReadModes\", ct.c_ulong),\n (\"FTReadModes\", ct.c_ulong),\n (\"TriggerModes\", ct.c_ulong),\n (\"CameraType\", ct.c_ulong),\n (\"PixelModes\", ct.c_ulong),\n (\"SetFunctions\", ct.c_ulong),\n (\"GetFunctions\", ct.c_ulong),\n (\"Features\", ct.c_ulong),\n (\"PCICard\", ct.c_ulong),\n (\"EMGainCapability\", ct.c_ulong)]\n\n stru = Capabilities()\n stru.Size = ct.sizeof(stru)\n self.lib.GetCapabilities(ct.pointer(stru))\n\n return stru", "def cntrl(self):\n return self._cntrl", "def test_available_capabilities():\n capabilties = (\n \"Capability Identity : Capa1\\r\\n State : Installed\\r\\n\"\n \"Capability Identity : Capa2\\r\\n State : Not Present\\r\\n\"\n )\n\n mock = MagicMock(return_value=capabilties)\n with patch.dict(dism.__salt__, {\"cmd.run\": mock}):\n with patch.dict(dism.__grains__, {\"osversion\": 10}):\n out = dism.available_capabilities()\n mock.assert_called_once_with(\n [dism.bin_dism, \"/English\", \"/Online\", \"/Get-Capabilities\"]\n )\n assert out == [\"Capa2\"]", "def ctrlParameter(self):\n return self.ctrl.parameters()", "def _MocaCtlShowStatus(self):\n mc = subprocess.Popen([MOCACTL, 'show', '--status'], stdout=subprocess.PIPE)\n out, _ = mc.communicate(None)\n return out.splitlines()", "def get_capabilities(http_conn):\n parsed, conn = http_conn\n headers = {'Accept-Encoding': 'gzip'}\n conn.request('GET', parsed.path, '', headers)\n resp = conn.getresponse()\n body = resp.read()\n http_log((parsed.geturl(), 'GET',), {'headers': headers}, resp, body)\n if resp.status < 200 or resp.status >= 300:\n raise ClientException.from_response(\n resp, 'Capabilities GET failed', body)\n resp_headers = resp_header_dict(resp)\n return parse_api_response(resp_headers, body)", "def get_capabilities(self):\n\n service = self.__get_service()\n capability = self.__get_capability()\n contents = {\"service\" : service, \"capability\" : capability}\n return contents, self.params['format']", "async def get_capability_report(self):\n if self.query_reply_data.get(\n PrivateConstants.CAPABILITY_QUERY) is None:\n await self._send_sysex(PrivateConstants.CAPABILITY_QUERY, None)\n while self.query_reply_data.get(\n PrivateConstants.CAPABILITY_RESPONSE) is None:\n await asyncio.sleep(self.sleep_tune)\n return self.query_reply_data.get(PrivateConstants.CAPABILITY_RESPONSE)", "async def get_capability_report(self):\n if self.query_reply_data.get(\n PrivateConstants.CAPABILITY_QUERY) is None:\n await self._send_sysex(PrivateConstants.CAPABILITY_QUERY, None)\n while self.query_reply_data.get(\n PrivateConstants.CAPABILITY_RESPONSE) is None:\n await asyncio.sleep(self.sleep_tune)\n return self.query_reply_data.get(PrivateConstants.CAPABILITY_RESPONSE)", "def get_capabilities(self, method='get'):\n self.client.getcapabilities()\n\n self._has_capabilities = True", "def capabilities(self):\n pass", "async def _capability_response(self, data):\n self.query_reply_data[PrivateConstants.CAPABILITY_RESPONSE] = data[1:-1]", "async def _capability_response(self, data):\n self.query_reply_data[PrivateConstants.CAPABILITY_RESPONSE] = data[1:-1]", "def read_status(ctl):\n\tr = ctl.bus_read_struct_coherent(tm.status_addr, 'BBBBI')\n\treturn r", "def get_net_control(self) -> list:\n return self._get_json(self._URLS['GetNetControl'])", "def get_caps(self):\n return ObjectCapabilities.get_capabilities(self)", "def get_capabilities(params,defaults):\n cap = CapabilitiesController (params,defaults)\n return cap.get_capabilities()", "def detect_supported_caps():\n result = []\n # generate list of supported capabilities\n\n # Intel RDT L3 CAT\n if common.PQOS_API.is_l3_cat_supported():\n result.append(common.CAT_L3_CAP)\n\n # Intel RDT L2 CAT\n if common.PQOS_API.is_l2_cat_supported():\n result.append(common.CAT_L2_CAP)\n\n # Intel RDT MBA\n if common.PQOS_API.is_mba_supported():\n result.append(common.MBA_CAP)\n\n if sstbf.is_sstbf_enabled():\n result.append(common.SSTBF_CAP)\n\n if power.is_sstcp_enabled():\n result.append(common.POWER_CAP)\n\n return result" ]
[ "0.64376485", "0.62849987", "0.597143", "0.5640618", "0.5495004", "0.5431465", "0.5399061", "0.5399061", "0.538451", "0.53069574", "0.51636267", "0.5137286", "0.5093328", "0.50370055", "0.4995253", "0.49621525", "0.49619934", "0.49399295", "0.49174848", "0.4887582", "0.4887582", "0.48639783", "0.485076", "0.48347926", "0.48347926", "0.48227236", "0.48218787", "0.48087448", "0.48014772", "0.4794755" ]
0.752214
0
Handles PUT /caps/mba_ctrl request. Raises BadRequest, InternalError
def put(): json_data = request.get_json() # validate request try: schema, resolver = ConfigStore.load_json_schema('modify_mba_ctrl.json') jsonschema.validate(json_data, schema, resolver=resolver) except (jsonschema.ValidationError, OverflowError) as error: raise BadRequest("Request validation failed - %s" % (str(error))) if not caps.mba_bw_supported(): return {'message': "MBA CTRL not supported!"}, 409 if common.CONFIG_STORE.is_any_pool_defined(): return {'message': "Please remove all Pools first!"}, 409 data = deepcopy(common.CONFIG_STORE.get_config()) CapsMbaCtrl.set_mba_ctrl_enabled(data, json_data['enabled']) common.CONFIG_STORE.set_config(data) return {'message': "MBA CTRL status changed."}, 200
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get():\n\n mba_ctrl_info = caps.mba_ctrl_info()\n\n res = {\n 'supported': mba_ctrl_info['supported'],\n 'enabled': mba_ctrl_info['enabled']\n }\n return res, 200", "def put():\n json_data = request.get_json()\n\n # validate request\n try:\n schema, resolver = ConfigStore.load_json_schema('modify_rdt_iface.json')\n jsonschema.validate(json_data, schema, resolver=resolver)\n except (jsonschema.ValidationError, OverflowError) as error:\n raise BadRequest(\"Request validation failed - %s\" % (str(error)))\n\n if not json_data['interface'] in common.PQOS_API.supported_iface():\n raise BadRequest(\"RDT interface '%s' not supported!\" % (json_data['interface']))\n\n if common.CONFIG_STORE.is_any_pool_defined():\n return {'message': \"Please remove all Pools first!\"}, 409\n\n data = deepcopy(common.CONFIG_STORE.get_config())\n\n if 'rdt_iface' not in data:\n data['rdt_iface'] = {}\n\n data['rdt_iface']['interface'] = json_data['interface']\n CapsMbaCtrl.set_mba_ctrl_enabled(data, False)\n\n common.CONFIG_STORE.set_config(data)\n\n res = {'message': \"RDT Interface modified\"}\n return res, 200", "def get():\n\n mba_info = caps.mba_info()\n\n res = {\n 'clos_num': mba_info['clos_num'],\n 'mba_enabled': mba_info['enabled'],\n 'mba_bw_enabled': mba_info['ctrl_enabled']\n }\n return res, 200", "async def set(self, ctrl_key, command, *, key=None, value=None, data=None, ctrl_path=None):\n await super().set(\n ctrl_key, command, key=key, value=value, data=data, ctrl_path=ctrl_path\n )\n if key is not None and self._status:\n self._status.update_status(key, value)", "def set_control(self, control):\n self.o.write_register(self.dev_id, CONTROL, control)", "def control_capabilities(self, **kwargs):\n\n return self.api_request(self._get_method_fullname(\"control_capabilities\"), kwargs)", "def test_update_hyperflex_capability_info(self):\n pass", "def set_capacity(self, cap):\n return self.get_interaction().set_capacity(cap)", "def update_control(self):\n self._control_ctr += 0x01", "def put_controller(cls, args, config):\n logging.debug(\"MOLNSController.put_controller(args={0})\".format(args))\n controller_obj = cls._get_controllerobj(args, config)\n if controller_obj is None:\n return\n\n # Check if any instances are assigned to this controller\n instance_list = config.get_controller_instances(controller_id=controller_obj.id)\n\n # Check if they are running\n inst = None\n if len(instance_list) > 0:\n for i in instance_list:\n status = controller_obj.get_instance_status(i)\n logging.debug(\"instance={0} has status={1}\".format(i, status))\n if status == controller_obj.STATUS_RUNNING:\n inst = i\n if inst is None:\n raise MOLNSException(\"No active instance for this controller\")\n\n file_to_transfer = args[1]\n logging.debug(\"File to transfer: {0}\".format(file_to_transfer))\n\n remote_file_path = os.path.join(\"/home/ubuntu/shared\", os.path.basename(file_to_transfer))\n\n controller_obj.ssh.connect(inst, SSHDeploy.DEFAULT_SSH_PORT, \"ubuntu\", controller_obj.provider.sshkeyfilename())\n\n sftp = controller_obj.ssh.open_sftp()\n remote_fh = sftp.file(remote_file_path, \"w\")\n try:\n with open(file_to_transfer, \"r\") as fh:\n remote_fh.write(fh.read())\n finally:\n remote_fh.close()\n sftp.close()\n\n print \"Transferred {0} to {1}@{2}:{3}\".format(file_to_transfer, inst.ip_address, \"ubuntu\", remote_file_path)", "def _add_control_channel(self, attrs):\n _cable_data = {}\n _cable_data[\"crate\"] = self._crate\n _cable_data[\"module\"] = self._module\n _cable_data[\"channel\"] = int(attrs.get('number', \"\"))\n _cable_data[\"name\"] = str(attrs.get('name', \"\"))\n self._data.append(_cable_data)", "def control_transfer(self, bmRequestType, bRequest, wValue, wIndex, wLengthOrData):\n return self.device.ctrl_transfer(bmRequestType, bRequest, wValue, wIndex, wLengthOrData)", "def _finalize_ctrl(self):\n self._aim_ctrl()\n self._set_ctrl_color()\n\n if self.size != 1:\n for shape in self.ctrl.getShapes():\n pm.scale(shape.cv, self.size, self.size, self.size, r=1)\n pm.delete(self.ctrl, ch=1)\n\n self.ctrl_grp = xform_utils.zero(self.ctrl)", "def update_controller():\n update_items(inst, mikrotik_controller, async_add_entities, switches)", "def ControlDevice(self, p_device_obj, p_bridge_obj, p_control):\n pass", "def put(self):\n coll_policy_id = views_helper.get_request_value(self.request, \"coll_policy_id\", \"BODY\")\n name = views_helper.get_request_value(self.request, \"coll_policy_name\", \"BODY\")\n command = views_helper.get_request_value(self.request, \"command\", \"BODY\")\n desc = views_helper.get_request_value(self.request, \"desc\", \"BODY\")\n ostype = views_helper.get_request_value(self.request, \"ostype\", \"BODY\")\n coll_policy_update_data = {\n 'name': name,\n 'cli_command': command,\n 'desc': desc,\n 'ostype': ostype\n }\n if len(CollPolicy.objects.filter(~Q(coll_policy_id=coll_policy_id), name=name)):\n data = {\n 'data': '',\n 'new_token': self.new_token,\n constants.STATUS: {\n constants.STATUS: constants.FALSE,\n constants.MSG_TYPE: 'NAME_DUPLICATE',\n constants.MESSAGE: constants.COLLECTION_POLICY_NAME_DUPLICATE\n }\n\n }\n return api_return(data=data)\n obj = CollPolicy.objects.get(coll_policy_id=coll_policy_id)\n serializer = CollPolicyEditSerializer(instance=obj, data=coll_policy_update_data)\n try:\n if serializer.is_valid():\n serializer.save()\n data = {\n 'data': serializer.data,\n 'new_token': self.new_token,\n constants.STATUS: {\n constants.STATUS: constants.TRUE,\n constants.MESSAGE: constants.SUCCESS\n }\n\n }\n return api_return(data=data)\n except Exception as e:\n if constants.DEBUG_FLAG:\n print traceback.format_exc(e)\n return exception_handler(e)", "def handle_update(self):\n try:\n for controller in self.controllers:\n self._handle_single_update(controller)\n except urllib3.exceptions.HTTPError as http_error:\n raise HttpError('Error talking to Kubernetes', http_error) from http_error", "def send_breath_controller(self, value=0, ch=None):\n self.send_control_change(BREATH_CONTROLLER, value, ch=ch)", "def ControlSsm(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def ControlSsm(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def update_caps(self, caps, source):\n return ObjectCapabilities.update_capabilities(self, caps, source)", "def test_update_hyperflex_ucsm_config_policy(self):\n pass", "def Update(self, controller):\n pass", "def putconbound(self,i_,bkc_,blc_,buc_):\n res = __library__.MSK_XX_putconbound(self.__nativep,i_,bkc_,blc_,buc_)\n if res != 0:\n _,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)", "def bdev_nvme_attach_controller(client, name, trtype, traddr, adrfam=None, trsvcid=None,\n priority=None, subnqn=None, hostnqn=None, hostaddr=None,\n hostsvcid=None, prchk_reftag=None, prchk_guard=None,\n hdgst=None, ddgst=None, fabrics_timeout=None, multipath=None, num_io_queues=None,\n ctrlr_loss_timeout_sec=None, reconnect_delay_sec=None,\n fast_io_fail_timeout_sec=None, psk=None, max_bdevs=None):\n params = {'name': name,\n 'trtype': trtype,\n 'traddr': traddr}\n\n if hostnqn:\n params['hostnqn'] = hostnqn\n\n if hostaddr:\n params['hostaddr'] = hostaddr\n\n if hostsvcid:\n params['hostsvcid'] = hostsvcid\n\n if adrfam:\n params['adrfam'] = adrfam\n\n if trsvcid:\n params['trsvcid'] = trsvcid\n\n if priority:\n params['priority'] = priority\n\n if subnqn:\n params['subnqn'] = subnqn\n\n if prchk_reftag:\n params['prchk_reftag'] = prchk_reftag\n\n if prchk_guard:\n params['prchk_guard'] = prchk_guard\n\n if hdgst:\n params['hdgst'] = hdgst\n\n if ddgst:\n params['ddgst'] = ddgst\n\n if fabrics_timeout:\n params['fabrics_connect_timeout_us'] = fabrics_timeout\n\n if multipath:\n params['multipath'] = multipath\n\n if num_io_queues:\n params['num_io_queues'] = num_io_queues\n\n if ctrlr_loss_timeout_sec is not None:\n params['ctrlr_loss_timeout_sec'] = ctrlr_loss_timeout_sec\n\n if reconnect_delay_sec is not None:\n params['reconnect_delay_sec'] = reconnect_delay_sec\n\n if fast_io_fail_timeout_sec is not None:\n params['fast_io_fail_timeout_sec'] = fast_io_fail_timeout_sec\n\n if psk:\n params['psk'] = psk\n\n if max_bdevs is not None:\n params['max_bdevs'] = max_bdevs\n\n return client.call('bdev_nvme_attach_controller', params)", "def do_put(args):\n session = BMC(server=args.server, username=args.username, password=args.password)\n if session.put(\"{0}/attr/{1}\".format(args.path, args.attr), args.value):\n do_get(args)", "def bdev_nvme_enable_controller(client, name, cntlid):\n\n params = {'name': name}\n\n if cntlid is not None:\n params['cntlid'] = cntlid\n\n return client.call('bdev_nvme_enable_controller', params)", "def update_controller(self):", "def handle_io_event(self, data):\n getattr(\n self,\n 'control_{}'.format(self.model)\n )(data['action'])\n self.update_serverside_status({\n 'action': data['action'], 'event_id': data['event_id']\n })", "def put(self, registration):\n args = self.reqparse.parse_args()\n check_for_empty_fields(args)\n return Car.edit(registration, args['model'], args['capacity'])" ]
[ "0.58559453", "0.5305376", "0.49763685", "0.49655056", "0.48763028", "0.4748014", "0.4734404", "0.4720664", "0.4719354", "0.46167174", "0.46100092", "0.46091127", "0.45844567", "0.4547084", "0.45449957", "0.45313764", "0.45261696", "0.45176327", "0.44819385", "0.44819385", "0.44804242", "0.4471786", "0.4460978", "0.44450766", "0.44427183", "0.4437869", "0.4435629", "0.44322535", "0.44320974", "0.44304198" ]
0.8299291
0
Handles PUT /caps/rdt_iface request. Raises BadRequest, InternalError
def put(): json_data = request.get_json() # validate request try: schema, resolver = ConfigStore.load_json_schema('modify_rdt_iface.json') jsonschema.validate(json_data, schema, resolver=resolver) except (jsonschema.ValidationError, OverflowError) as error: raise BadRequest("Request validation failed - %s" % (str(error))) if not json_data['interface'] in common.PQOS_API.supported_iface(): raise BadRequest("RDT interface '%s' not supported!" % (json_data['interface'])) if common.CONFIG_STORE.is_any_pool_defined(): return {'message': "Please remove all Pools first!"}, 409 data = deepcopy(common.CONFIG_STORE.get_config()) if 'rdt_iface' not in data: data['rdt_iface'] = {} data['rdt_iface']['interface'] = json_data['interface'] CapsMbaCtrl.set_mba_ctrl_enabled(data, False) common.CONFIG_STORE.set_config(data) res = {'message': "RDT Interface modified"} return res, 200
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def put(self, id):\n context = request.environ.get('context')\n net_obj = dbapi.net_interfaces_update(context, id, request.json)\n return jsonutils.to_primitive(net_obj), 200, None", "def post_logical_interface_update(self, resource_id, resource_dict):\n pass", "def put(self, path, request):\n\n try:\n data = json_decode(request.body)\n self.interface_data.set(path, data)\n response = self.interface_data.get(path, False)\n status_code = 200\n except MetadataParameterError as e:\n response = {'error': str(e)}\n status_code = 400\n except (TypeError, ValueError) as e:\n response = {'error': 'Failed to decode PUT request body: {}'.format(str(e))}\n status_code = 400\n return ApiAdapterResponse(response, status_code=status_code)", "def post_physical_interface_update(self, resource_id, resource_dict):\n pass", "def post_interface_route_table_update(self, resource_id, resource_dict):\n pass", "def _plug_interface(self, context, tenant_id, net_id, port_id,\n remote_interface_id):\n LOG.debug(_(\"QuantumRestProxyV2: _plug_interface() called\"))\n\n # update attachment on network controller\n try:\n port = super(QuantumRestProxyV2, self).get_port(context, port_id)\n mac = port[\"mac_address\"]\n\n for ip in port[\"fixed_ips\"]:\n if ip.get(\"subnet_id\") is not None:\n subnet = super(QuantumRestProxyV2, self).get_subnet(\n context, ip[\"subnet_id\"])\n gateway = subnet.get(\"gateway_ip\")\n if gateway is not None:\n resource = NETWORKS_PATH % (tenant_id, net_id)\n data = {\"network\":\n {\"id\": net_id,\n \"gateway\": gateway,\n }\n }\n ret = self.servers.put(resource, data)\n if not self.servers.action_success(ret):\n raise RemoteRestError(ret[2])\n\n if mac is not None:\n resource = ATTACHMENT_PATH % (tenant_id, net_id, port_id)\n data = {\"attachment\":\n {\"id\": remote_interface_id,\n \"mac\": mac,\n }\n }\n ret = self.servers.put(resource, data)\n if not self.servers.action_success(ret):\n raise RemoteRestError(ret[2])\n except RemoteRestError as e:\n LOG.error(_(\"QuantumRestProxyV2:Unable to update remote network: \"\n \"%s\"), e.message)\n raise", "def put(self, id):\n context = request.environ.get('context')\n net_obj = dbapi.netdevices_update(context, id, request.json)\n return jsonutils.to_primitive(net_obj), 200, None", "def pre_logical_interface_update(self, resource_id, resource_dict):\n pass", "def pre_interface_route_table_update(self, resource_id, resource_dict):\n pass", "def post_virtual_machine_interface_update(self, resource_id, resource_dict):\n pass", "def modify_network_interface_attribute(\n name=None,\n network_interface_id=None,\n attr=None,\n value=None,\n region=None,\n key=None,\n keyid=None,\n profile=None,\n):\n if not (name or network_interface_id):\n raise SaltInvocationError(\n \"Either name or network_interface_id must be provided.\"\n )\n if attr is None and value is None:\n raise SaltInvocationError(\"attr and value must be provided.\")\n r = {}\n conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)\n result = _get_network_interface(conn, name, network_interface_id)\n if \"error\" in result:\n return result\n eni = result[\"result\"]\n info = _describe_network_interface(eni)\n network_interface_id = info[\"id\"]\n # munge attr into what the API requires\n if attr == \"groups\":\n _attr = \"groupSet\"\n elif attr == \"source_dest_check\":\n _attr = \"sourceDestCheck\"\n elif attr == \"delete_on_termination\":\n _attr = \"deleteOnTermination\"\n else:\n _attr = attr\n _value = value\n if info.get(\"vpc_id\") and _attr == \"groupSet\":\n _value = __salt__[\"boto_secgroup.convert_to_group_ids\"](\n value,\n vpc_id=info.get(\"vpc_id\"),\n region=region,\n key=key,\n keyid=keyid,\n profile=profile,\n )\n if not _value:\n r[\"error\"] = {\n \"message\": \"Security groups do not map to valid security group ids\"\n }\n return r\n _attachment_id = None\n if _attr == \"deleteOnTermination\":\n try:\n _attachment_id = info[\"attachment\"][\"id\"]\n except KeyError:\n r[\"error\"] = {\n \"message\": (\n \"No attachment id found for this ENI. The ENI must\"\n \" be attached before delete_on_termination can be\"\n \" modified\"\n )\n }\n return r\n try:\n r[\"result\"] = conn.modify_network_interface_attribute(\n network_interface_id, _attr, _value, attachment_id=_attachment_id\n )\n except boto.exception.EC2ResponseError as e:\n r[\"error\"] = __utils__[\"boto.get_error\"](e)\n return r", "def _conf_intf(self, conn, interface, mode, pvid, vlan_list):\n\n if not vlan_list:\n raise Exception('The interface should be in at least one vlan')\n\n if (mode == 'access') and (len(vlan_list) > 1):\n raise Exception('An access port cannot be in multiple vlans')\n\n if pvid not in vlan_list:\n raise Exception('The pvid should be in the list of vlans')\n\n req_js = {}\n req_js['if_name'] = interface\n req_js['bridgeport_mode'] = mode\n req_js['pvid'] = pvid\n req_js['vlans'] = vlan_list\n\n obj = self.VLAN_IFACE_REST_OBJ + quote(interface, safe='')\n resp = conn.put(obj, req_js)\n return resp", "def put(self, registration):\n args = self.reqparse.parse_args()\n check_for_empty_fields(args)\n return Car.edit(registration, args['model'], args['capacity'])", "def update(self):\n if_index = self._get_iface_index()\n if not if_index:\n raise InterfaceCreateError(self.interface_name)\n self.iface_index = if_index\n self.iface_exists = True", "def manage_vrf_interfaces(args):\n with IPDB() as ipdb:\n with ipdb.interfaces[args.vrf_name] as vrf:\n if args.action == \"add_interface\":\n vrf.add_port(ipdb.interfaces[args.interface].index)\n logger.info(f\"{args.interface} added to vrf {args.vrf_name}\")\n if args.action == \"remove_interface\":\n subprocess.run(f\"ip link set dev {args.interface} nomaster\", shell=True)\n logger.info(f\"{args.interface} removed from vrf {args.vrf_name}\")", "def test_lo_interface_tc4_replace(duthost):\n json_patch = [\n {\n \"op\": \"remove\",\n \"path\": \"/LOOPBACK_INTERFACE/Loopback0|FC00:1::32~1128\"\n },\n {\n \"op\": \"remove\",\n \"path\": \"/LOOPBACK_INTERFACE/Loopback0|10.1.0.32~132\"\n },\n {\n \"op\": \"add\",\n \"path\": \"/LOOPBACK_INTERFACE/Loopback0|10.1.0.33~132\",\n \"value\": {}\n },\n {\n \"op\": \"add\",\n \"path\": \"/LOOPBACK_INTERFACE/Loopback0|FC00:1::33~1128\",\n \"value\": {}\n }\n ]\n\n tmpfile = generate_tmpfile(duthost)\n logger.info(\"tmpfile {}\".format(tmpfile))\n\n try:\n output = apply_patch(duthost, json_data=json_patch, dest_file=tmpfile)\n expect_op_success(duthost, output)\n\n check_show_ip_intf(duthost, \"Loopback0\", [\"10.1.0.33/32\"], [\"10.1.0.32/32\"], is_ipv4=True)\n check_show_ip_intf(duthost, \"Loopback0\", [\"fc00:1::33/128\"], [\"fc00:1::32/128\"], is_ipv4=False)\n finally:\n delete_tmpfile(duthost, tmpfile)", "def set_interface(self, iface):\n\t\tf = os.path.join(self.config_dir, \"iface-%s\" % LibvirtFile.TEMPLATE_FILE)\n\t\tself.iface_xml = cziso.fill_template(f, iface=iface)", "def pre_physical_interface_update(self, resource_id, resource_dict):\n pass", "def put(self):\n dev = self.request.get('device')\n reg = self.request.get('registry')\n uploaded_file = self.request.POST.get('data')\n data = uploaded_file.file.read()\n\n self.response.headers['Content-Type'] = 'text/plain'\n if (not dev) and len(dev)==0:\n self.response.write('parameter device not found')\n elif (not reg) and len(reg)==0:\n self.response.write('parameter registry not found')\n elif (not data) and len(data)==0:\n self.response.write('invalid or no key file found')\n else:\n # Get user account\n ds = Datastore()\n user = ds.get_registry(reg)\n if len(user) == 0:\n self.response.write(\"Registry does not exist\")\n else:\n region = get_region_from_user(user)\n\n # Add Device on IOT Core\n iot = IOT()\n success, message = iot.create_device(dev, reg, data, region)\n if success:\n self.response.write('Device Added')\n else:\n self.response.write(message)", "def post_logical_interface_read(self, resource_id, resource_dict):\n pass", "def put(self, ip):\n data = request.json\n update_ue_sub(ip, data)\n return None, 204", "def put_cdmi(self, path, data):\n req_url = self.normalize_cdmi_url(path)\n headers = {\"user-agent\": self.u_agent, \"X-CDMI-Specification-Version\": \"1.1\"}\n if path.endswith(\"/\"):\n headers[\"Content-type\"] = CDMI_CONTAINER\n else:\n headers[\"Content-type\"] = CDMI_OBJECT\n res = requests.put(\n req_url, headers=headers, auth=self.auth, data=data, verify=False\n )\n if res.status_code in [400, 401, 403, 404, 406]:\n return Response(res.status_code, res)\n elif res.status_code == 409:\n return Response(res.status_code, \"A resource with this name already exists\")\n return Response(0, res)", "def test_lo_interface_tc6_vrf_change(duthost):\n setup_vrf_config(duthost)\n json_patch = [\n {\n \"op\": \"replace\",\n \"path\": \"/LOOPBACK_INTERFACE/Loopback0/vrf_name\",\n \"value\": \"Vrf_02\"\n }\n ]\n\n tmpfile = generate_tmpfile(duthost)\n logger.info(\"tmpfile {}\".format(tmpfile))\n\n try:\n output = apply_patch(duthost, json_data=json_patch, dest_file=tmpfile)\n expect_op_success(duthost, output)\n\n check_show_ip_intf(duthost, \"Loopback0\", [\"10.1.0.32/32\", \"Vrf_02\"], [], is_ipv4=True)\n check_show_ip_intf(duthost, \"Loopback0\", [\"fc00:1::32/128\", \"Vrf_02\"], [], is_ipv4=False)\n\n check_vrf_route_for_intf(duthost, \"Vrf_02\", \"Loopback0\", is_ipv4=True)\n check_vrf_route_for_intf(duthost, \"Vrf_02\", \"Loopback0\", is_ipv4=False)\n finally:\n delete_tmpfile(duthost, tmpfile)", "def post_interface_route_table_read(self, resource_id, resource_dict):\n pass", "def post_logical_interface_create(self, resource_dict):\n pass", "def post_logical_interface_delete(self, resource_id, resource_dict):\n pass", "def pre_virtual_machine_interface_update(self, resource_id, resource_dict):\n pass", "def post_service_appliance_update(self, resource_id, resource_dict):\n pass", "def do_nic_update(cc, args):\n\n patch = utils.args_array_to_patch(args.attributes[0])\n result = cc.nic.update(args.uuid, patch)\n cliutils.print_dict(result)", "def put(self, id):\n context = request.environ.get('context')\n obj = dbapi.netdevice_data_update(context, id, request.json)\n resp = {\"data\": jsonutils.to_primitive(obj.variables)}\n return resp, 200, None" ]
[ "0.56267", "0.5597621", "0.5405904", "0.5289383", "0.52798593", "0.5218039", "0.5103825", "0.5088581", "0.50078917", "0.49717864", "0.49103323", "0.48853874", "0.48777813", "0.48709217", "0.4857238", "0.48350698", "0.48349154", "0.48155966", "0.481546", "0.4790912", "0.47044525", "0.4699107", "0.4674309", "0.46695897", "0.46641085", "0.46467334", "0.46336442", "0.45778927", "0.45714438", "0.45297757" ]
0.77151966
0
Handles GET /caps/l3ca request.
def get(): l3ca_info = caps.l3ca_info() res = { 'cache_size': l3ca_info['cache_size'], 'cw_size': l3ca_info['cache_way_size'], 'cw_num': l3ca_info['cache_ways_num'], 'clos_num': l3ca_info['clos_num'], 'cdp_supported': l3ca_info['cdp_supported'], 'cdp_enabled': l3ca_info['cdp_enabled'] } return res, 200
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get():\n\n l2ca_info = caps.l2ca_info()\n\n res = {\n 'cache_size': l2ca_info['cache_size'],\n 'cw_size': l2ca_info['cache_way_size'],\n 'cw_num': l2ca_info['cache_ways_num'],\n 'clos_num': l2ca_info['clos_num'],\n 'cdp_supported': l2ca_info['cdp_supported'],\n 'cdp_enabled': l2ca_info['cdp_enabled']\n }\n return res, 200", "def get3wareControllers():\n\n # All 3ware controllers should be c0, c1, c2 etc.\n #Ctl Model (V)Ports Drives Units NotOpt RRate VRate BBU\n #------------------------------------------------------------------------\n #c2 7506-4LP 4 4 1 0 2 - -\n raidData = systemCommand('tw_cli show')\n results = re.findall('(c[0-9*]) ', raidData)\n return results", "def cat_l3_supported():\n return common.CAT_L3_CAP in SYSTEM_CAPS", "def execute_req3(catalog, req_category):\n return controller.execute_req3(catalog, req_category)", "def readaccl(self):\r\n\t\tdata0 = bus.read_byte_data(LSM330_ACCL_ADDRESS, LSM330_OUT_X_L_A)\r\n\t\tdata1 = bus.read_byte_data(LSM330_ACCL_ADDRESS, LSM330_OUT_X_H_A)\r\n\t\t\r\n\t\txAccl = data1 * 256 + data0\r\n\t\tif xAccl > 32767 :\r\n\t\t\txAccl -= 65536\r\n\t\t\r\n\t\t\"\"\"Read data back from LSM330_OUT_Y_L_M(0x2A), 2 bytes\r\n\t\tY-Axis Mag LSB, Y-Axis Mag MSB\"\"\"\r\n\t\tdata0 = bus.read_byte_data(LSM330_ACCL_ADDRESS, LSM330_OUT_Y_L_A)\r\n\t\tdata1 = bus.read_byte_data(LSM330_ACCL_ADDRESS, LSM330_OUT_Y_H_A)\r\n\t\t\r\n\t\tyAccl = data1 * 256 + data0\r\n\t\tif yAccl > 32767 :\r\n\t\t\tyAccl -= 65536\r\n\t\t\r\n\t\t\"\"\"Read data back from LSM330_OUT_Z_L_M(0x2C), 2 bytes\r\n\t\tZ-Axis Mag LSB, Z-Axis Mag MSB\"\"\"\r\n\t\tdata0 = bus.read_byte_data(LSM330_ACCL_ADDRESS, LSM330_OUT_Z_L_A)\r\n\t\tdata1 = bus.read_byte_data(LSM330_ACCL_ADDRESS, LSM330_OUT_Z_H_A)\r\n\t\t\r\n\t\tzAccl = data1 * 256 + data0\r\n\t\tif zAccl > 32767 :\r\n\t\t\tzAccl -= 65536\r\n\t\t\r\n\t\treturn {'x' : xAccl, 'y' : yAccl, 'z' : zAccl}", "def fetch_switch_classic(url = SWITCHclassic_url):\n import urllib2\n # SWITCHclassis ACLs holen\n opener = urllib2.build_opener()\n acls_raw = opener.open(SWITCHclassic_url)\n acls_raw = acls_raw.readlines()\n classic_acls = []\n for line in acls_raw:\n line = line.strip()\n classic_acls.append(line.split(\" \"))\n return classic_acls", "def fusion_api_get_ca_certificate(self, uri=None, api=None, headers=None, param=''):\n return self.ca.get(uri=uri, api=api, headers=headers, param=param)", "def third_cap(self):\n vas = []\n file = self.read1()\n for line in file:\n line = line.strip()\n string = re.sub(\"[^0-9a-zA-Z]\", \" \", line).split(\" \")\n for s_i in string:\n if s_i != \"\":\n if len(s_i) >= 3:\n o_i = []\n for i in s_i:\n o_i.append(i)\n t_i = o_i[2]\n o_i[2] = t_i.upper()\n str1 = \"\"\n q_i = str1.join(o_i)\n vas.append(q_i)\n else:\n vas.append(s_i)\n self.print(vas)\n self.write(vas)\n logging.debug(\"Starting with to\")\n return vas", "def page3(self):\n self.token_mode = \\\n 'Reset'\n result = request301.GET('/clusterinfo-web/controller' +\n '?target=' +\n self.token_target +\n '&mode=' +\n self.token_mode)\n # 9 different values for token_target found in response; the first matched\n # the last known value of token_target - don't update the variable.\n\n return result", "def page3(self):\n result = request301.GET('/Cars_Sample_App/car.do' +\n '?query=' +\n self.token_query +\n '&cid=' +\n self.token_cid)\n self.token_car = \\\n httpUtilities.valueFromBodyURI('car') # '2'\n self.token_carName = \\\n httpUtilities.valueFromBodyURI('carName') # 'S'\n self.token_query = \\\n httpUtilities.valueFromBodyURI('query') # 'carEnquiries'\n\n return result", "def get_database_ca_output(cluster_id: Optional[pulumi.Input[str]] = None,\n opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetDatabaseCaResult]:\n ...", "def get_lc(self, source_type, ap=1, clip=None, force_recalc=False):\n\n if source_type == 'pca':\n lcs = self._pca_lcs\n else:\n lcs = self._tfa_lcs\n\n if ap in lcs and not force_recalc:\n lc = lcs[ap]\n else:\n flux = self.get_mag(source_type, ap)\n t_ax = self.get_time_ax()\n if clip:\n mask = sigma_clip(flux, clip)\n flux = flux[mask]\n t_ax = t_ax[mask]\n lc = lk.LightCurve(time=t_ax, flux=flux)\n lcs[ap] = lc\n return lc", "def cert_challenge_http(self) -> 'outputs.CertHttpChallengeResponse':\n return pulumi.get(self, \"cert_challenge_http\")", "def certificate_auth():\r\n url = 'https://www.12306.cn'\r\n response = requests.get(url, verify=False)\r\n print(response.status_code)\r\n print(response.text)", "def lca(self, v, w):", "def fusion_api_get_internal_ca_crl(self, api=None, headers=None):\n param = '/ca/crl'\n return self.ca.get(api=api, param=param, headers=headers)", "def SCEC_LOH_3():\n\n #Initialize CrustModel\n model = CrustModel(2)\n\n #Slow layer\n vp=4.000\n vs=2.000\n rho=2.600\n Qa=54.65\n Qb=137.95\n thickness = 1.\n\n model.add_layer(thickness, vp, vs, rho, Qa, Qb)\n\n #Halfspace\n vp=6.000\n vs=3.464\n rho=2.700\n Qa=69.3\n Qb=120.\n thickness = 0 #Infinite thickness!\n model.add_layer(thickness, vp, vs, rho, Qa, Qb)\n\n return model", "def v3(self):\n from infapy.v3 import V3\n infapy.log.info(\"Created the v3 object to access the iics v3 apis\")\n return V3(self._v3,self._v3BaseURL,self._v3SessionID)", "def caget(PV):\n return epics.caget(PV)", "def do_AXIRB3 (self):\n self.__send__(bytes(pc.PROBE_CMD_AXIRB3))\n return self.__recv__()", "def get_capsules(method=\"\"):\n return _get(\"capsules\", method)", "def cplicense(server, ctype=\"vzzo\", action=\"add\"):\n # start Requests session\n sc = requests.Session()\n\n # import cookies from Firefox\n sc.cookies.update(get_cookies('imhsc.imhadmin.net'))\n\n # allow using a hostname (such as vps or dedicated server hostnames)\n try:\n servip = socket.gethostbyname(server)\n except socket.gaierror as e:\n print(\"!! %s: %s\" % (server, str(e)))\n return None\n\n # send request\n lresp = sc.post('https://imhsc.imhadmin.net/modules/Datacenter/datacenter_cplic.php',\n data={'ip': servip, 'type': ctype, 'action': action})\n\n print(\">> %s %s request for %s\" % (ctype, action.upper(), servip))\n print(\"** Got response from SC: %s\" % (lresp.text))\n\n return lresp", "def route( request, c ):", "def get_database_ca(cluster_id: Optional[str] = None,\n opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetDatabaseCaResult:\n __args__ = dict()\n __args__['clusterId'] = cluster_id\n opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)\n __ret__ = pulumi.runtime.invoke('digitalocean:index/getDatabaseCa:getDatabaseCa', __args__, opts=opts, typ=GetDatabaseCaResult).value\n\n return AwaitableGetDatabaseCaResult(\n certificate=pulumi.get(__ret__, 'certificate'),\n cluster_id=pulumi.get(__ret__, 'cluster_id'),\n id=pulumi.get(__ret__, 'id'))", "def fusion_api_get_certificate_info(self, uri=None, api=None, param='', headers=None):\n param = '/certificates/https/'\n return self.ic.get(uri=uri, api=api, headers=headers, param=param)", "def GET(self, uri):\n content_type = negotiated_content_type(self.supported_types, self.default_content_type)\n def body(conn, cur):\n self.enforce_right('enumerate', uri)\n return web.ctx.ermrest_catalog_model.acls\n\n def post_commit(acls):\n self.set_http_etag( web.ctx.ermrest_catalog_model.etag() )\n self.http_check_preconditions()\n self.emit_headers()\n web.header('Content-Type', content_type)\n web.ctx.ermrest_request_content_type = content_type\n\n meta = _acls_to_meta(acls)\n\n if self.key is not None:\n # project out single ACL from ACL set\n try:\n meta = meta[self.key]\n except KeyError:\n raise exception.rest.NotFound(uri)\n\n response = json.dumps(meta) + '\\n'\n web.header('Content-Length', len(response))\n return response\n\n return self.perform(body, post_commit)", "def retrieve_capabilities(self, url, urlchain=[], pool=None, identity=None):\n\n # detect loops in capability links\n if url in urlchain:\n return\n\n if not self._default_url:\n self.set_default_url(url)\n\n if isinstance(url, str):\n url = urllib3.util.parse_url(url)\n\n if identity is None:\n identity = self._tls_state.extract_peer_identity(url)\n\n if pool is None:\n if url.host is not None:\n pool = self._tls_state.pool_for(url.scheme, url.host, url.port)\n else:\n raise ValueError(\"HttpInitiatorClient capability retrieval missing connection pool\")\n\n if url.path is not None:\n path = url.path\n else:\n path = \"/\"\n res = pool.request('GET', path)\n\n if res.status == 200:\n ctype = res.getheader(\"Content-Type\")\n if ctype == \"application/x-mplane+json\":\n\n # Probably an envelope. Process the message.\n self.handle_message(\n mplane.model.parse_json(res.data.decode(\"utf-8\")), identity)\n elif ctype == \"text/html\":\n # Treat as a list of links to capability messages.\n parser = CrawlParser()\n parser.feed(res.data.decode(\"utf-8\"))\n parser.close()\n for capurl in parser.urls:\n self.retrieve_capabilities(url=capurl,\n urlchain=urlchain + [url],\n pool=pool, identity=identity)", "def get_sequences_from_id3c(url, username, password, lineage, segment, output):\n r = requests.get(url, auth=(username,password), stream=True)\n r.raise_for_status()\n\n with open(output, 'w+') as fasta_file:\n for line in r.iter_lines():\n if line:\n sequence = json.loads(line)\n strain = sequence['sample'][-8:] # this needs revision in ID3C to match format A/Washington/a2fb5c0f/2019\n fasta_file.write(\"\".join([\">\", strain, \"\\n\", sequence['seq'].lower(), \"\\n\"]))", "def ca(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ca\")", "def ca(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ca\")" ]
[ "0.5966446", "0.5155168", "0.4879287", "0.48043147", "0.47750655", "0.47078097", "0.46504328", "0.46455023", "0.45808417", "0.45062843", "0.448553", "0.44760966", "0.4447042", "0.44376257", "0.44348007", "0.44266585", "0.4421145", "0.44187596", "0.4409453", "0.43902916", "0.4387703", "0.43801644", "0.4379281", "0.4358308", "0.4354597", "0.43372828", "0.43330523", "0.4304923", "0.42765534", "0.42765534" ]
0.70944744
0
Handles GET /caps/l2ca request.
def get(): l2ca_info = caps.l2ca_info() res = { 'cache_size': l2ca_info['cache_size'], 'cw_size': l2ca_info['cache_way_size'], 'cw_num': l2ca_info['cache_ways_num'], 'clos_num': l2ca_info['clos_num'], 'cdp_supported': l2ca_info['cdp_supported'], 'cdp_enabled': l2ca_info['cdp_enabled'] } return res, 200
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get():\n\n l3ca_info = caps.l3ca_info()\n\n res = {\n 'cache_size': l3ca_info['cache_size'],\n 'cw_size': l3ca_info['cache_way_size'],\n 'cw_num': l3ca_info['cache_ways_num'],\n 'clos_num': l3ca_info['clos_num'],\n 'cdp_supported': l3ca_info['cdp_supported'],\n 'cdp_enabled': l3ca_info['cdp_enabled']\n }\n return res, 200", "def fetch_switch_classic(url = SWITCHclassic_url):\n import urllib2\n # SWITCHclassis ACLs holen\n opener = urllib2.build_opener()\n acls_raw = opener.open(SWITCHclassic_url)\n acls_raw = acls_raw.readlines()\n classic_acls = []\n for line in acls_raw:\n line = line.strip()\n classic_acls.append(line.split(\" \"))\n return classic_acls", "def read_lwm2m_info():\n response, secure = lwm2m.get_lwm2m_security_info()\n \n if response != return_values.RESULT_SUCCESS:\n raise Exception(\"Failed to retrieve the lwm2m connection information. Return value {}.\".format(response))\n try:\n lwm2m_uri = \"coaps://\" + secure[\"LWM2M_HOST_NAME\"] + \":5684\"\n lwm2m_endpoint = secure[\"LWM2M_ENDPOINT\"]\n lwm2m_identity = secure[\"LWM2M_IDENTITY\"]\n lwm2m_security = secure[\"LWM2M_SECRET_KEY\"]\n except KeyError:\n raise Exception(\"The lwm2m security info message received from the api server is not in the expected format. Unable to proceed.\")\n \n return lwm2m_uri, lwm2m_endpoint, lwm2m_identity, lwm2m_security", "async def snmp_v2c_get(\n self,\n address: str,\n community: str,\n oid: str,\n timeout: Optional[int] = 10,\n return_error: bool = False,\n ):\n self.logger.debug(\"SNMP v2c GET %s %s\", address, oid)\n message = \"\"\n try:\n result = await snmp_get(\n address=address,\n oids=oid,\n community=community,\n version=SNMP_v2c,\n tos=config.activator.tos,\n timeout=timeout,\n )\n self.logger.debug(\"SNMP GET %s %s returns %s\", address, oid, result)\n result = smart_text(result, errors=\"replace\") if result else result\n except SNMPError as e:\n metrics[\"error\", (\"type\", \"snmp_v2_error\")] += 1\n result, message = None, repr(e)\n self.logger.debug(\"SNMP GET %s %s returns error %s\", address, oid, e)\n except Exception as e:\n result, message = None, str(e)\n self.logger.debug(\"SNMP GET %s %s returns unknown error %s\", address, oid, e)\n if return_error:\n return result, message\n return result", "def cat_l2_supported():\n return common.CAT_L2_CAP in SYSTEM_CAPS", "def get_capsules(method=\"\"):\n return _get(\"capsules\", method)", "def readaccl(self):\r\n\t\tdata0 = bus.read_byte_data(LSM330_ACCL_ADDRESS, LSM330_OUT_X_L_A)\r\n\t\tdata1 = bus.read_byte_data(LSM330_ACCL_ADDRESS, LSM330_OUT_X_H_A)\r\n\t\t\r\n\t\txAccl = data1 * 256 + data0\r\n\t\tif xAccl > 32767 :\r\n\t\t\txAccl -= 65536\r\n\t\t\r\n\t\t\"\"\"Read data back from LSM330_OUT_Y_L_M(0x2A), 2 bytes\r\n\t\tY-Axis Mag LSB, Y-Axis Mag MSB\"\"\"\r\n\t\tdata0 = bus.read_byte_data(LSM330_ACCL_ADDRESS, LSM330_OUT_Y_L_A)\r\n\t\tdata1 = bus.read_byte_data(LSM330_ACCL_ADDRESS, LSM330_OUT_Y_H_A)\r\n\t\t\r\n\t\tyAccl = data1 * 256 + data0\r\n\t\tif yAccl > 32767 :\r\n\t\t\tyAccl -= 65536\r\n\t\t\r\n\t\t\"\"\"Read data back from LSM330_OUT_Z_L_M(0x2C), 2 bytes\r\n\t\tZ-Axis Mag LSB, Z-Axis Mag MSB\"\"\"\r\n\t\tdata0 = bus.read_byte_data(LSM330_ACCL_ADDRESS, LSM330_OUT_Z_L_A)\r\n\t\tdata1 = bus.read_byte_data(LSM330_ACCL_ADDRESS, LSM330_OUT_Z_H_A)\r\n\t\t\r\n\t\tzAccl = data1 * 256 + data0\r\n\t\tif zAccl > 32767 :\r\n\t\t\tzAccl -= 65536\r\n\t\t\r\n\t\treturn {'x' : xAccl, 'y' : yAccl, 'z' : zAccl}", "def list_caps():\n global _CAPABILITIES_MAP\n\n try:\n return tuple(sorted(_CAPABILITIES_MAP.keys()))\n\n except NameError:\n pass # We can remedy this.\n\n loop = get_loop()\n\n controller_connection = CioRoot(loop)\n\n _CAPABILITIES_MAP = {}\n\n for capability_id in controller_connection.init():\n _CAPABILITIES_MAP[capability_id] = {\n 'acquire': controller_connection.acquire,\n 'release': controller_connection.release,\n }\n\n return tuple(sorted(_CAPABILITIES_MAP.keys()))", "def certificate_auth():\r\n url = 'https://www.12306.cn'\r\n response = requests.get(url, verify=False)\r\n print(response.status_code)\r\n print(response.text)", "def fusion_api_get_ca_certificate(self, uri=None, api=None, headers=None, param=''):\n return self.ca.get(uri=uri, api=api, headers=headers, param=param)", "def fusion_api_get_internal_ca_crl(self, api=None, headers=None):\n param = '/ca/crl'\n return self.ca.get(api=api, param=param, headers=headers)", "def use_i2c():\n _LIB.oled_click_use_i2c()", "def caps20to10(repo, role):\n caps = {b'HG20'}\n capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=role))\n caps.add(b'bundle2=' + urlreq.quote(capsblob))\n return caps", "def alm2cl(self, m1, m2=None, lmin=2, lmax=None, symmetric=True):\n import healpy as hp\n\n if lmax is None:\n lmax = self.lmax\n cls = np.asarray(hp.alm2cl(m1, alms2=m2, lmax=lmax))\n if symmetric:\n cls_T = np.asarray(hp.alm2cl(m2, alms2=m1, lmax=lmax))\n cls = (cls + cls_T) / 2.0\n if lmin:\n cls[..., :lmin] = 0\n return np.atleast_2d(cls)", "def fusion_api_get_certificate_info(self, uri=None, api=None, param='', headers=None):\n param = '/certificates/https/'\n return self.ic.get(uri=uri, api=api, headers=headers, param=param)", "def get():\n\n mba_info = caps.mba_info()\n\n res = {\n 'clos_num': mba_info['clos_num'],\n 'mba_enabled': mba_info['enabled'],\n 'mba_bw_enabled': mba_info['ctrl_enabled']\n }\n return res, 200", "def get():\n\n mba_ctrl_info = caps.mba_ctrl_info()\n\n res = {\n 'supported': mba_ctrl_info['supported'],\n 'enabled': mba_ctrl_info['enabled']\n }\n return res, 200", "def get_lc(self, source_type, ap=1, clip=None, force_recalc=False):\n\n if source_type == 'pca':\n lcs = self._pca_lcs\n else:\n lcs = self._tfa_lcs\n\n if ap in lcs and not force_recalc:\n lc = lcs[ap]\n else:\n flux = self.get_mag(source_type, ap)\n t_ax = self.get_time_ax()\n if clip:\n mask = sigma_clip(flux, clip)\n flux = flux[mask]\n t_ax = t_ax[mask]\n lc = lk.LightCurve(time=t_ax, flux=flux)\n lcs[ap] = lc\n return lc", "def do_capabilities(cs, args):\n caps = cs.capabilities.list()\n fields = [\"scheme\", \"location\", \"term\", \"title\"]\n\n schemes = {i[\"scheme\"] for i in caps}\n\n print schemes\n for scheme in schemes:\n aux = [i for i in caps if scheme == i[\"scheme\"]]\n utils.print_list(aux, fields)", "def test_retrieveAnnouncedACPwithCSI(self) -> None:\n\t\tif TestRemote_Annc.remoteAcpRI is None:\n\t\t\tself.skipTest('remote ACP.ri not found')\n\t\tr, rsc = RETRIEVE(f'{REMOTEURL}/~{TestRemote_Annc.remoteAcpRI}', CSEID)\n\t\tself.assertEqual(rsc, RC.OK)", "def lca(self, v, w):", "def cplicense(server, ctype=\"vzzo\", action=\"add\"):\n # start Requests session\n sc = requests.Session()\n\n # import cookies from Firefox\n sc.cookies.update(get_cookies('imhsc.imhadmin.net'))\n\n # allow using a hostname (such as vps or dedicated server hostnames)\n try:\n servip = socket.gethostbyname(server)\n except socket.gaierror as e:\n print(\"!! %s: %s\" % (server, str(e)))\n return None\n\n # send request\n lresp = sc.post('https://imhsc.imhadmin.net/modules/Datacenter/datacenter_cplic.php',\n data={'ip': servip, 'type': ctype, 'action': action})\n\n print(\">> %s %s request for %s\" % (ctype, action.upper(), servip))\n print(\"** Got response from SC: %s\" % (lresp.text))\n\n return lresp", "def LCA(T, n1, n2):\n\n pass", "def get_lc(args, conn):\n try:\n lc = conn.get_all_launch_configurations(names=[args.old_lc_name, ])[0]\n except:\n print(\"FATAL ERROR:\")\n traceback.print_exc(file=sys.stdout)\n sys.exit(\"Failed to get source LC\")\n return lc", "def l2_from_l1c_dataset(self, datasetl1c, flags):\n if self.context.get_config_value(\"network\").lower() == \"w\":\n l2a_dim_sizes_dict = {\n \"wavelength\": len(datasetl1c[\"wavelength\"]),\n \"series\": len(np.unique(datasetl1c[\"series_id\"])),\n }\n dataset_l2a = self.hdsb.create_ds_template(\n l2a_dim_sizes_dict, \"W_L2A\", propagate_ds=datasetl1c, ds=datasetl1c\n )\n dataset_l2a = dataset_l2a.assign_coords(wavelength=datasetl1c.wavelength)\n\n series_id = np.unique(datasetl1c[\"series_id\"])\n dataset_l2a[\"series_id\"].values = series_id\n for variablestring in [\n \"acquisition_time\",\n \"viewing_azimuth_angle\",\n \"viewing_zenith_angle\",\n \"solar_azimuth_angle\",\n \"solar_zenith_angle\",\n \"epsilon\",\n \"rhof\",\n ]:\n temp_arr = np.empty(len(series_id))\n for i in range(len(series_id)):\n flagged = np.any(\n [\n DatasetUtil.unpack_flags(datasetl1c[\"quality_flag\"])[x]\n for x in flags\n ],\n axis=0,\n )\n ids = np.where(\n (datasetl1c[\"series_id\"] == series_id[i]) & (flagged == False)\n )\n # ids = np.where((datasetl1c['series_id'] == series_id[i]) & (\n # datasetl1c['quality_flag'] == 0))\n temp_arr[i] = np.mean(datasetl1c[variablestring].values[ids])\n dataset_l2a[variablestring].values = temp_arr\n\n if self.context.get_config_value(\"network\").lower() == \"l\":\n l2a_dim_sizes_dict = {\n \"wavelength\": len(datasetl1c[\"wavelength\"]),\n \"series\": len(datasetl1c[\"series_id\"]),\n }\n dataset_l2a = self.hdsb.create_ds_template(\n l2a_dim_sizes_dict, \"L_L2A\", propagate_ds=datasetl1c, ds=datasetl1c\n )\n dataset_l2a = dataset_l2a.assign_coords(wavelength=datasetl1c.wavelength)\n\n return dataset_l2a", "def get_capabilities(http_conn):\n parsed, conn = http_conn\n headers = {'Accept-Encoding': 'gzip'}\n conn.request('GET', parsed.path, '', headers)\n resp = conn.getresponse()\n body = resp.read()\n http_log((parsed.geturl(), 'GET',), {'headers': headers}, resp, body)\n if resp.status < 200 or resp.status >= 300:\n raise ClientException.from_response(\n resp, 'Capabilities GET failed', body)\n resp_headers = resp_header_dict(resp)\n return parse_api_response(resp_headers, body)", "def retrieve_capabilities(self, url, urlchain=[], pool=None, identity=None):\n\n # detect loops in capability links\n if url in urlchain:\n return\n\n if not self._default_url:\n self.set_default_url(url)\n\n if isinstance(url, str):\n url = urllib3.util.parse_url(url)\n\n if identity is None:\n identity = self._tls_state.extract_peer_identity(url)\n\n if pool is None:\n if url.host is not None:\n pool = self._tls_state.pool_for(url.scheme, url.host, url.port)\n else:\n raise ValueError(\"HttpInitiatorClient capability retrieval missing connection pool\")\n\n if url.path is not None:\n path = url.path\n else:\n path = \"/\"\n res = pool.request('GET', path)\n\n if res.status == 200:\n ctype = res.getheader(\"Content-Type\")\n if ctype == \"application/x-mplane+json\":\n\n # Probably an envelope. Process the message.\n self.handle_message(\n mplane.model.parse_json(res.data.decode(\"utf-8\")), identity)\n elif ctype == \"text/html\":\n # Treat as a list of links to capability messages.\n parser = CrawlParser()\n parser.feed(res.data.decode(\"utf-8\"))\n parser.close()\n for capurl in parser.urls:\n self.retrieve_capabilities(url=capurl,\n urlchain=urlchain + [url],\n pool=pool, identity=identity)", "def generate_l1ca_codes(self, prn):\n output_taps = self.l1_code_phase_assignments.loc[prn, 'CA_Phase_Select']\n g1 = self.generate_mls(10, self.g1_feedback_taps, [10])\n g2 = self.generate_mls(10, self.g2_feedback_taps, output_taps)\n ca_code = []\n for index, bit in enumerate(g1):\n ca_code.append(int((bit + g2[index]) % 2))\n return ca_code", "def get_actuator_info(self, c, ADDR, CH):\r\n if self.device_detected == True:\r\n resp = yield subprocess.check_output(\"cacli INFO \"+str(ADDR) + \" \" + str(CH))\r\n type = self.find_between(resp,\"TYPE :\",\"\\r\\n\")\r\n tag = self.find_between(resp,\"TAG :\",\"\\r\\n\")\r\n info = [type, tag]\r\n else:\r\n resp = \"Device not connected.\"\r\n info = [resp, resp]\r\n #Eventually make this actually throw an error instead of printing something\r\n returnValue(info)", "def get_database_ca_output(cluster_id: Optional[pulumi.Input[str]] = None,\n opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetDatabaseCaResult]:\n ..." ]
[ "0.602012", "0.5277708", "0.516976", "0.5158889", "0.5059789", "0.48698944", "0.4850678", "0.4820371", "0.4746068", "0.47289932", "0.47201055", "0.46725905", "0.46474597", "0.46150905", "0.46101293", "0.45349264", "0.4530791", "0.45228612", "0.4502531", "0.44954225", "0.4494993", "0.4485283", "0.4429776", "0.4415635", "0.44029027", "0.44002473", "0.43731055", "0.43599966", "0.43597046", "0.43554953" ]
0.706472
0
Makes call to regulations.gov and retrieves the docket data
def get_docket_data(api_key, docket_id): LOGGER.info('Requesting docket from regulations.gov') response = requests.get("https://api.data.gov:443/" + "regulations/v3/docket.json?api_key=" + api_key + "&docketId=" + docket_id) check_status(response.status_code) LOGGER.info('docket has been retrieved') return response.json()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def fetch_fuel_data():\n\treturn requests.get('http://www.fueleconomy.gov/ws/rest/fuelprices').text", "def get_drought_data():\n\n # Base URL for the DM homepage & AJAX URL for simulated request\n url = 'http://droughtmonitor.unl.edu/MapsAndData/GISData.aspx'\n ajax_url = 'http://droughtmonitor.unl.edu/Ajax.aspx/ReturnDMWeeks'\n\n # Based on AJAX HTTP POST request\n headers = {\n 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.104 Safari/537.36',\n 'Accept': 'application/json, text/javascript, */*; q=0.01',\n 'Content-Type': 'application/json; charset=UTF-8',\n 'X-Requested-With': 'XMLHttpRequest'\n }\n\n # Sets requests Session & sends simulated AJAX request\n s = requests.Session()\n s.head(url)\n r = s.post(ajax_url, data='{}', headers=headers)\n\n # Grabs data, a list of valid dates for file reference\n json_data = json.loads(r.text)\n file_dates = json_data['d']\n\n print 'Writing file dates...'\n\n # Writes the USDM dates to a file for D3 reference\n datefile = open(basedir + '/app/static/data/saved/dates.json', 'w')\n datefile.write(json.dumps(sorted(file_dates), indent=1))\n datefile.close()\n\n print 'File dates from the USDM:'\n print file_dates\n print ''\n print 'Now comparing...'\n print ''\n\n dates = check_date(file_dates)\n if dates:\n file_dates = dates\n print 'Now collecting...'\n\n # Base URL for data files\n base_url = 'http://usdmdataservices.unl.edu/?mode=table'\n\n # Loops thru dates, saves files based on geographical focus & date\n for date in file_dates:\n fileout = urllib.URLopener()\n fileout.retrieve(base_url + '&aoi=county&date=' + date, basedir + '/app/static/data/raw/' + date + '.csv')\n\n print 'Finished.'\n else:\n print 'All data collected to date.'", "def acquire_data(year: int) -> dict:\n base_url: str = \"https://ucannualwage.ucop.edu\"\n search_url: str = base_url + \"/wage/search.action\"\n\n # Request headers copied of out Chrome's devtools.\n request_headers = {\n \"Host\": re.sub('https://', '', base_url),\n \"Content-Length\": '255',\n \"Origin\": base_url,\n \"User-Agent\":\n \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36\",\n \"Content-Type\": \"application/x-www-form-urlencoded\",\n \"Accept\": \"application/json, text/javascript, */*; q=0.01\",\n \"X-Requested-With\": \"XMLHttpRequest\",\n \"DNT\": \"1\",\n \"Referer\": base_url + \"/wage/\",\n \"Accept-Encoding\": \"gzip, deflate, br\",\n \"Accept-Language\": \"en-US, en; q=0.8;q=0.6\",\n \"Cookie\": \"JSESSIONID=0000Uq2FDN8doIsM5DBz4pU0xzd:169l0fmr2\"\n }\n\n # Dummy request payload. Searches over all locations to search for any employee receiving between 1 and\n # 1 billion dollars in salary (aka, everyone).\n payload = \"_search=false&nd=1497757924608&rows=\" + \"10000000\" + \"&page=1&sidx=EAW_LST_NAM&sord=asc&year=\" + str(\n year\n ) + \"&location=ALL&firstname=&lastname=&title=&startSal=1&endSal=1000000000\"\n\n session = requests.Session()\n response = session.post(search_url, headers=request_headers, data=payload)\n\n try:\n response.raise_for_status()\n\n except requests.HTTPError as e:\n print(\"ERROR: \", e)\n exit(1)\n\n # Despite the response type being \"text/json\", calling `response.json()` fails immediately with the following error message:\n # json.errors.JSONDecodeError: Expecting property name enclosed in double quotes: line 2 column 1 (char 2)\n # Thus, we convert the response.text object to have double quotes instead of single quotes.\n # Additionally, there is an errant control character somehow embedded in response.text, which gives the error:\n # json.decoder.JSONDecodeError: Invalid control character at: line 185849 column 69 (char 22761096)\n # To override this, we must set the 'strict' property to false.\n # See: https://docs.python.org/3/library/json.html#json.JSONDecoder\n # 'If strict is false...'\n\n return json.loads(response.text.replace(\"\\'\", \"\\\"\").encode('utf-8'),\n strict=False)", "def main():\n #bearer_token = obtain_bearer_token(API_HOST, TOKEN_PATH)\n bearer_token ='SHdrjUqMJXqXBKUc7bGIplM8y6tnbwZbXXDbWPCd9wWMP8tX9PdJrC5MZHwJRhb7jMtLjXxT-hsWjNf2OkdiDWd30HsS84AVI5iRnrpxkak3HbWXAdUKvraQ_wgXWXYx'\n response = transaction_search(bearer_token, '1910 Entrepreneur Dr, Raleigh, NC 27518')\n response = response.get('businesses')\n print(json.dumps(response, indent=4))", "def get_reg_data(now, request):\n if hasattr(request, \"myuw_reg_data\"):\n return request.myuw_reg_data\n term_reg_data = {\n \"after_start\": False,\n \"after_summer1_start\": False,\n \"after_summerA_start\": False,\n \"period1_started\": False,\n \"myplan_peak_load\": False\n }\n next_term = get_next_quarter(request)\n get_term_reg_data(now, next_term, term_reg_data)\n # We need to show this term's registration stuff, because\n # the period 2 stretches past the grade submission deadline\n current_term = get_current_quarter(request)\n get_term_reg_data(now, current_term, term_reg_data)\n # We also need to be able to show the term after next, in spring quarter\n term_after_next = get_term_after(next_term)\n get_term_reg_data(now, term_after_next, term_reg_data)\n request.myuw_reg_data = term_reg_data\n return term_reg_data", "def get_data():\n \n \"\"\" Prepare variables\"\"\"\n urls = {\"cases\": \"https://github.com/CSSEGISandData/COVID-19/raw/master/csse_covid_19_data/csse_covid_19_time_series/time_series_19-covid-Confirmed.csv\",\n \"deaths\": \"https://github.com/CSSEGISandData/COVID-19/raw/master/csse_covid_19_data/csse_covid_19_time_series/time_series_19-covid-Deaths.csv\",\n \"recovered\": \"https://github.com/CSSEGISandData/COVID-19/raw/master/csse_covid_19_data/csse_covid_19_time_series/time_series_19-covid-Recovered.csv\"}\n\n localnames = {\"cases\": \"Cases.csv\",\n \"deaths\": \"Deaths.csv\",\n \"recovered\": \"Recovered.csv\"}\n\n dfs = {\"cases\": None,\n \"deaths\": None,\n \"recovered\": None}\n\n \"\"\" Download\"\"\"\n for key in urls.keys():\n url = urls[key]\n localname = localnames[key]\n urllib.request.urlretrieve(url, localname)\n\n \"\"\" Load variables\"\"\"\n for key in dfs.keys():\n dfs[key] = pd.read_csv(localnames[key])\n \n \"\"\" Return\"\"\"\n return(dfs)", "def get_car():\n params = (\n ('fields', 'batteryLevel,connection.connected,connection.since,doors.allClosed,doors.leftOpen,doors.locked,doors.rightOpen,doors.trunkOpen,engineOn,fuelLevel,geo.latitude,geo.longitude,immobilizerEngaged,mileage,powerState,vin'),)\n r = requests.get(HOST, params=params, cert='pixelcamp.pem')\n print(json.dumps(r.json(), sort_keys=True, indent=2))\n return r.json()", "def covid_fetch():\n #Sets the structure of the data retrieved from the API\n cases_and_deaths = {\n \"date\": \"date\",\n \"areaName\": \"areaName\",\n \"areaCode\": \"areaCode\",\n \"newCasesByPublishDate\": \"newCasesByPublishDate\",\n \"cumCasesByPublishDate\": \"cumCasesByPublishDate\",\n \"newDeathsByDeathDate\": \"newDeathsByDeathDate\",\n \"cumDeathsByDeathDate\": \"cumDeathsByDeathDate\"\n }\n #Sets the filter for the API using config.json\n covid_nation = ['areaType=nation']\n nation = 'areaName=' + str(config_fetcher(\"covid_region\"))\n covid_nation.append(nation)\n\n #Gets API latest data\n covid_api = Cov19API(\n filters = covid_nation,\n structure = cases_and_deaths,\n )\n #Gets data in form of dictionary\n covid_json = covid_api.get_json()\n #Gets timestamp for last update\n covid_timestamp = covid_api.last_update\n #Assign data to variables\n covid_data = covid_json['data'] #This formats the data as a list, while I want a dictionary, hence the next line.\n return covid_data", "def get_requests():\n global response\n\n #Set the parameters fot the request\n url = \"https://api.nasa.gov/planetary/apod\"\n api_key = \"DEMO_KEY\" #Use your own key\n date = calender.get_date()\n\n querystring = {'api_key':api_key, 'date':date}\n\n #Call the request and turn it into a python usable format\n response = requests.request(\"GET\", url, params=querystring)\n response = response.json()\n\n #Update output label\n set_info()", "def get_new_modelling_data():\n # get latest epidemic data from OWID \n\n df = pd.read_json(requests.get(\"https://covid.ourworldindata.org/data/owid-covid-data.json\").content)\n data = pd.DataFrame(df[\"POL\"][\"data\"])\n\n # get latest government restriction data from Oxford tracker\n response = requests.get(\"https://raw.githubusercontent.com/OxCGRT/covid-policy-tracker/master/data/OxCGRT_latest.csv\").content\n rest = pd.read_csv(io.StringIO(response.decode('utf-8')))\n rest = rest[rest.CountryName == \"Poland\"]\n\n modelling = pd.DataFrame(Mobility.objects.values())\n prepare_model_data(data,rest,modelling)", "def getMarketsData(marketsField, output_type = None):\n try:\n _create_unverified_https_context = ssl._create_unverified_context\n except AttributeError:\n pass\n else:\n ssl._create_default_https_context = _create_unverified_https_context\n \n fields =['commodities', 'currency', 'index', 'bonds']\n if marketsField not in fields:\n raise ParametersError ('Accepted values for marketsField are \\'commodity\\', \\'currency\\', \\'index\\' or \\'bonds\\'.')\n linkAPI = 'https://api.tradingeconomics.com/markets/' + quote(marketsField) \n try:\n linkAPI = linkAPI + '?c=' + glob.apikey\n except AttributeError:\n raise LoginError('You need to do login before making any request')\n try: \n code = urlopen(linkAPI)\n code = code.getcode() \n webResults = json.loads(urlopen(linkAPI).read().decode('utf-8'))\n except ValueError:\n raise WebRequestError ('Something went wrong. Error code = ' + str(code)) \n if len(webResults) > 0:\n if marketsField == 'bonds':\n names = ['symbol','name', 'country', 'date', 'last', 'group','url','importance','dailychange','dailypercentualchange','weeklychange','weeklypercentualchange','monthlychange','monthlypercentualchange','yearlychange','yearlypercentualchange','ydtchange','ydtpercentualchange','yesterday','lastweek','lastmonth','lastyear','startyear']\n names2 = ['Symbol','Name', 'Country', 'Date', 'Last', 'Group','URL','Importance','DailyChange','DailyPercentualChange','WeeklyChange','WeeklyPercentualChange','MonthlyChange','MonthlyPercentualChange','YearlyChange','YearlyPercentualChange','YTDChange','YTDPercentualChange','yesterday','lastWeek','lastMonth','lastYear','startYear']\n else:\n names = ['symbol','ticker','name', 'country', 'date', 'last', 'group','url','importance','dailychange','dailypercentualchange','weeklychange','weeklypercentualchange','monthlychange','monthlypercentualchange','yearlychange','yearlypercentualchange','ydtchange','ydtpercentualchange','yesterday','lastweek','lastmonth','lastyear','startyear']\n names2 = ['Symbol','Ticker','Name', 'Country', 'Date', 'Last', 'Group','URL','Importance','DailyChange','DailyPercentualChange','WeeklyChange','WeeklyPercentualChange','MonthlyChange','MonthlyPercentualChange','YearlyChange','YearlyPercentualChange','YTDChange','YTDPercentualChange','yesterday','lastWeek','lastMonth','lastYear','startYear'] \n maindf = pd.DataFrame() \n for i in range(len(names)):\n names[i] = [d[names2[i]] for d in webResults]\n maindf = pd.concat([maindf, pd.DataFrame(names[i], columns = [names2[i]])], axis = 1)\n else:\n raise ParametersError ('No data available for the provided parameters.')\n if output_type == None or output_type =='df': \n output = maindf.dropna()\n elif output_type == 'raw': \n output = webResults\n else: \n raise ParametersError ('output_type options : df(defoult) for data frame or raw for unparsed results.') \n return output", "def scrapeFormulations(conn):\n c = conn.cursor()\n # Return the results as discussed with Adi\n query = \"\"\"SELECT f10.description, f10.landed_cost_price, f10.fob_price,\n f10.period, f10.issue_unit, country.name, country.id,\n f10.fob_currency, f10.landed_cost_currency, f10.period\n FROM form10_row AS f10\n INNER JOIN country ON f10.country = country.id\n ORDER BY f10.description, country.name\"\"\"\n c.execute(query)\n results = []\n for row in c:\n result = {}\n result['formulation'] = row[0].replace('*', '')\n result['landed_cost_price'] = row[1] or None\n result['fob_price'] = row[2] or None\n result['period'] = row[3]\n result['unit'] = row[4]\n result['country'] = row[5]\n result['country_id'] = country_codes[row[6]]\n result['fob_currency'] = row[7]\n result['landed_currency'] = row[8]\n result['period'] = int(row[9])\n results.append(result)\n return results", "def _extract_dosing_regimens(self, dose_key, duration_key):\n # Create duration column if it doesn't exist and set it to default\n # bolus duration of 0.01\n if duration_key is None:\n duration_key = 'Duration in base time unit'\n self._data[duration_key] = 0.01\n\n # Extract regimen from dataset\n regimens = dict()\n for label in self._ids:\n # Filter times and dose events for non-NaN entries\n mask = self._data[self._id_key] == label\n data = self._data[\n [self._time_key, dose_key, duration_key]][mask]\n mask = data[dose_key].notnull()\n data = data[mask]\n mask = data[self._time_key].notnull()\n data = data[mask]\n\n # Add dose events to dosing regimen\n regimen = myokit.Protocol()\n for _, row in data.iterrows():\n # Set duration\n duration = row[duration_key]\n if np.isnan(duration):\n # If duration is not provided, we assume a bolus dose\n # which we approximate by 0.01 time_units.\n duration = 0.01\n\n # Compute dose rate and set regimen\n dose_rate = row[dose_key] / duration\n time = row[self._time_key]\n regimen.add(myokit.ProtocolEvent(dose_rate, time, duration))\n\n regimens[label] = regimen\n\n return regimens", "def test_deaths_get(self):\n query_string = [('label', 'label_example'),\n ('page', 1),\n ('per_page', 100)]\n headers = { \n 'Accept': 'application/json',\n }\n response = self.client.open(\n '/v0.0.1/deaths',\n method='GET',\n headers=headers,\n query_string=query_string)\n self.assert200(response,\n 'Response body is : ' + response.data.decode('utf-8'))", "def get_price_data(ticker, days_befoure):\r\n #config_file=raw_input('config file: ')\r\n config_file=\"d:/tmp/moex.json\" \r\n try:\r\n with open(config_file) as config_file: \r\n conn_data = json.load(config_file)\r\n except:\r\n print \"Error: Unable to read config file. \"\r\n sys.exit(1)\r\n\r\n username = conn_data['username']\r\n password = conn_data['password']\r\n my_config = Config(user=username, password=password, proxy_url='')\r\n\r\n my_auth = MicexAuth(my_config)\r\n date = datetime.datetime.now() - datetime.timedelta(days_befoure)\r\n \r\n #ticker = 'SBER' # for tesing...\r\n \r\n if my_auth.is_real_time():\r\n iss = MicexISSClient(my_config, my_auth, MyDataHandler, MyData)\r\n iss.get_history_securities('stock',\r\n 'shares',\r\n 'tqbr',\r\n ticker, \r\n date.strftime(\"%Y-%m-%d\")\r\n #here to be start end dates\r\n )\r\n #print iss.handler.data.history\r\n return iss.handler.data.as_dataframe()", "def fetch_data():\n if request.method == 'GET':\n return (\"Use this endpoint with POST method to fetch data\", 200)\n elif request.method == 'POST':\n # request data\n app.logger.info(\"Requesting data\")\n data = get_data('regulatorydecision')\n\n # write to file\n app.logger.info(\"Writing to file\")\n write_to_file(data)\n\n # upload to cloud storage\n app.logger.info(\"Uploading to GCS\")\n upload_to_gcs('data.json', 'health-ca-data-staging')\n\n # publish message to pubsub\n app.logger.info(\"Publishing status message to Pubsub\")\n message = \"Data uploaded to GCS\"\n pubsub_publish('projects/health-ca-data/topics/gcs_load', message, \"\")\n\n return (\"Fetching data\", 200)", "def main():\n station = \"Merikannontie\"\n coefs, score = cycling_weather_linregr(station)\n print(f\"Measuring station: {station}\")\n print(\n f\"Regression coefficient for variable 'precipitation': {coefs[0]:.1f}\")\n print(f\"Regression coefficient for variable 'snow depth': {coefs[1]:.1f}\")\n print(f\"Regression coefficient for variable 'temperature': {coefs[2]:.1f}\")\n print(f\"Score: {score:.2f}\")\n return", "def get_data(tstart, tstop, year, grad_list, out_dir):\n print(\"Period: \" + str(tstart) + '<-->' + str(tstop) + ' in Year: ' + str(year))\n#\n#--- extract ecach group data\n#\n for group in grad_list:\n print(group)\n\n line = 'operation=retrieve\\n'\n line = line + 'dataset = mta\\n'\n line = line + 'detector = grad\\n'\n line = line + 'level = 0.5\\n'\n line = line + 'filetype = ' + group + '\\n'\n line = line + 'tstart = ' + str(tstart) + '\\n'\n line = line + 'tstop = ' + str(tstop) + '\\n'\n line = line + 'go\\n'\n\n data_list = mcf.run_arc5gl_process(line)\n#\n#--- read the first fits file and prep for the data list\n#\n [cols, tbdata] = ecf.read_fits_file(data_list[0])\n col_list = []\n for ent in cols:\n if ent.lower() == 'time':\n continue\n mc = re.search('st_', ent.lower())\n if mc is not None:\n continue\n\n col_list.append(ent)\n\n mcf.rm_files(data_list[0])\n tdata = tbdata['time']\n mdata = []\n for col in col_list:\n mdata.append(tbdata[col])\n#\n#--- read the rest of the data\n#\n clen = len(col_list)\n for k in range(1, len(data_list)):\n fits = data_list[k]\n [cols, tbdata] = ecf.read_fits_file(fits)\n tdata = numpy.append(tdata, tbdata['time'])\n\n for m in range(0, clen):\n cdata = tbdata[col_list[m]]\n mdata[m] = numpy.append(mdata[m], cdata)\n\n mcf.rm_files(fits)\n\n dout = out_dir + group.capitalize() + '/'\n\n if not os.path.isdir(dout):\n cmd = 'mkdir ' + dout\n os.system(cmd)\n#\n#--- write out the data to fits file\n#\n for k in range(0, clen):\n col = col_list[k]\n ocols = ['time', col.lower()]\n cdata = [tdata, mdata[k]]\n\n ofits = dout + col.lower()+ '_full_data_' + str(year) +'.fits'\n\n if os.path.isfile(ofits):\n ecf.update_fits_file(ofits, ocols, cdata)\n else:\n ecf.create_fits_file(ofits, ocols, cdata)\n\n#\n#--- zip the fits file from the last year at the beginning of the year\n#\n ecf.check_zip_possible(dout)", "def main():\n data = get_sales_data()\n sales_data = [int(num) for num in data]\n update_worksheet(sales_data, 'sales')\n new_surplus_data = calculate_surplus_sandwiches(sales_data)\n update_worksheet(new_surplus_data, 'surplus')\n list_of_last_five_sales = get_last_five_sales_entries()\n stock_data = get_average_sales(list_of_last_five_sales)\n update_worksheet(stock_data, 'stock')\n return stock_data", "def get_data(rics: list, fields: list):\n data, err = ek.get_data(rics, fields)\n if err:\n print(err)\n return data", "def getEconData(self, type:str, start_date:date, end_date:date):\n #data = nasdaqdatalink.get(\"FRED/GDP\", start_date=\"2001-12-31\", end_date=\"2005-12-31\")\n data = nasdaqdatalink.get(type, start_date=start_date, end_date=end_date)\n\n print(data)", "def request_data_dictionary(main=False) -> requests.Response:\n config = load_config()\n redcap_url = 'https://redcap.miami.edu/api/'\n\n project_name = \"New REDCap\" if main is True else \"MedIT Test project\"\n base = config['test_token'] if main is False else config['main_token']\n request_data = {\n 'token': base,\n 'content': 'metadata',\n 'format': 'json',\n 'returnFormat': 'json'\n }\n\n print(f\"Downloading dictionary from {project_name}...\")\n response = requests.post(redcap_url, request_data)\n return response", "def get_recovered():\n # Deprecated warning\n url = \"https://raw.githubusercontent.com/CSSEGISandData/COVID-19/master/csse_covid_19_data/csse_covid_19_time_series/\"\n warnings.warn(\"This function is deprecated. Use get_data_jhu instead; see tutorials at <https://github.com/PayneLab/covid19pandas/tree/master/docs/>.\", DeprecatedWarning, stacklevel=2)\n print(\"These data were obtained from Johns Hopkins University (https://github.com/CSSEGISandData/COVID-19).\")\n return _get_table(url, \"time_series_covid19_recovered_global.csv\", source=\"jhu\", update=True)", "def Collect_data(clo, cla, FD, LD, ZC, idx_time, cntry, TZ):\r\n #initiate the console response\r\n rep = \"\"\r\n global ddf\r\n \r\n #define the noaa server access\r\n server_noaa = \"https://www.ncei.noaa.gov/data/global-hourly/access/\"\r\n \r\n #Initate dateframe\r\n data = pd.DataFrame()\r\n \r\n #Convert date from specified timezone to UTC\r\n FDc = Date_calibration(FD, 1, TZ)\r\n LDc = Date_calibration(LD, 1, TZ)\r\n \r\n #define timestep\r\n timestep = \"1 H\"\r\n \r\n #Loop on the range time by each year\r\n for y in range(FDc.year, (LDc.year + 1)):\r\n rep += '--- Collect Year ['+str(y) + '] --- \\n' \r\n \r\n #Loop on each weather station while the ouput data is good\r\n #weather station in the year instancied dataframe \r\n for i in range(len(ddf[y])) :\r\n \r\n #Define the memory key : year_zipcode\r\n key_d_z = str(y)+'_'+str(ZC)\r\n \r\n #Verify if the data is already in the memory\r\n if(key_d_z in memory_df.keys()) :\r\n #The data is already in the memory :\r\n #Collect the data and go next (save compute time and server solicitation)\r\n ext_data = memory_df[key_d_z]\r\n NS, DO = memory_NS_DO[key_d_z].split('_')[0], memory_NS_DO[key_d_z].split('_')[1]\r\n break\r\n else:\r\n \r\n #The data isn't in the memory :\r\n \r\n #Collect information about the nearest weather station from the zip code\r\n rs, NS, DO = Fetch_station(clo, cla, y)\r\n \r\n #Generate the ftp key weather station\r\n code_station = str(ddf[y]['USAF'][rs])+str(ddf[y]['WBAN'][rs])\r\n \r\n #Define the server access\r\n url = server_noaa+str(y)+'/'+code_station+'.csv'\r\n \r\n #Get the data\r\n req = requests.get(url)\r\n \r\n #The server answer\r\n if(req.status_code == 200):\r\n \r\n #Extract the data (only date and temperature)\r\n ext_data = pd.read_csv(url, usecols=['DATE','TMP'])\r\n \r\n #Check if the data isn't empty (1000 is arbitrary)\r\n if(len(ext_data) > 1000):\r\n \r\n #Format data\r\n ext_data, qual = Data_formatting(ext_data, y, timestep, TZ)\r\n \r\n #Check if the data quality respect the quality rule\r\n if(qual > quality_rule):\r\n \r\n #Save the date in the memory\r\n memory_df[key_d_z] = ext_data\r\n memory_NS_DO[key_d_z] = str(NS)+'_'+str(DO)\r\n \r\n #Response for the GUI\r\n rep += \"# Station [\"+str(NS)+\"] valid : \\n\"\r\n rep += \"- Quality density data : \"+str(round(qual, 2))+\"% \\n\"\r\n rep += \"- Great circle distance : \"+str(round(DO,2))+\"km \\n\"\r\n break\r\n else:\r\n #The data quality is too bad\r\n #Response for the GUI\r\n rep += \"# Station [\"+str(NS)+\"] invalid : \\n\"\r\n rep += \"- Quality density data : \"+str(round(qual,2))+\"% \\n\"\r\n rep += \"> Quality criterion unsatisfied \\n\"\r\n \r\n #Delete the weather station in the dataframe (instancied by year)\r\n ddf[y] = ddf[y].drop(rs).reset_index(drop=True)\r\n continue\r\n \r\n else:\r\n #The data density is too low\r\n \r\n #Response for the GUI\r\n rep += \"# Station [\"+str(NS)+\"] invalid : \\n\"\r\n rep += \"> Low data volume \\n\"\r\n \r\n #Delete the weather station in the dataframe (instancied by year)\r\n ddf[y] = ddf[y].drop(rs).reset_index(drop=True)\r\n\r\n continue\r\n else:\r\n #The NOAA doesn't answer for the code station\r\n rep += \"# Station [\"+str(NS)+\"] invalid : \\n\"\r\n rep += \"> Server doesn't answer\\n\"\r\n \r\n #Delete the weather station in the dataframe (instancied by year)\r\n ddf[y] = ddf[y].drop(rs).reset_index(drop=True)\r\n\r\n continue\r\n #Add data in the Dataframe\r\n data = data.append(ext_data)\r\n \r\n #Define a new dataframe mark out by the specified time range\r\n range_time = pd.DataFrame(index=pd.DatetimeIndex(start=FD, end=LD, freq=timestep))\r\n \r\n #Paste data in the time marked out Dataframe\r\n range_time['Temp'] = data['TMP']\r\n \r\n #Calculate the amount of NaN (global)\r\n nb_nan = range_time['Temp'].isnull().sum()\r\n \r\n #Calculate the global quality\r\n quality = (1 - (nb_nan) / len(range_time)) * 100\r\n \r\n #Fill the gap data by temporal interpolation\r\n data = range_time.interpolate(method='time')\r\n data = data.ffill().bfill()\r\n data['Temp'] = round(data['Temp'], 2)\r\n \r\n \r\n #If specified index time, cut the data for it\r\n if(len(idx_time)> 2):\r\n final_data = pd.DataFrame(index = pd.to_datetime(idx_time))\r\n final_data['Temp'] = round(data['Temp'], 2)\r\n #final_data = final_data.resample(rule = str(timestep)).mean()\r\n #final_data = final_data.dropna()\r\n else:\r\n final_data = data\r\n \r\n return final_data, quality, NS, DO, rep", "def get_sales_data():\n print(\"Retrieving all the sales information...\")\n data = SHEET.worksheet('sales')\n print(\"Compilation complete!\\n\")\n return data", "def dataframe():\n\t#allows function to access station, gmt, and miss_station functions\n global stations\n\tglobal gmt\n\tglobal miss_station\n\t\n\t#read predictor file\n\tcontrol = cfg.read_yaml('../registry/graphs.yaml')\n\tpred_ctrl = cfg.read_yaml(cfg.get_config_path(control.pred_file))\n\tpredd_ctrl = cfg.read_yaml(cfg.get_config_path(control.predd_file))\n\n\t#get file paths and update database\n\tpredictor_file_path = control.predictor_file_path\n\tpredictand_file_path = control.predictand_file_path\n\tpred_file_id = update(predictor_file_path)\n\tpredd_file_id = update(predictand_file_path)\n\t\n\t#store lead time and date range\n\tlead_time = control.lead_time\n\tdate_range = control.date_range\n\n\t#get info for fetch many dates\n\tstart,end,stride = read_pred.parse_range(date_range)\n\tfcst_ref_time = control.date_range[0].split('-')[0][-2:]\n\t\n\t#initialize list of predictors\n\tpred_list = pred_ctrl.predictors\n\tpredictor = []\n\n\t#loops through predictors to build camps data objects\n\tfor entry_dict in pred_list:\n\t\t#formats metadata\n\t\tpred = create.preprocess_entries(entry_dict, fcst_ref_time)\n\t\t\n\t\t#adds info to metadata that's not currently being stored\n\t\tpred.search_metadata['reserved2'] = lead_time*3600\n pred.search_metadata['file_id'] = pred_file_id\n\t\tpred.search_metadata['reserved1'] = 'vector'\n\n\t\t#build camps data objects for each day\n\t\tvariable = fetch_many_dates(predictor_file_path,start,end,stride,pred.search_metadata)\n\t\t\n\t\t#appends all data to single camps object\n\t\tif variable[0] is not None:\n\t\t\tvar = variable[0]\n\t\t\tarrs = []\n\t\t\tfor i in range(len(variable)):\n\t\t\t\tarrs.append(variable[i].data)\n\t\t\tvar.data = np.stack(arrs)\n\t\t\tpredictor.append(var)\n\n\t#initializes list of predictands\n\tpredd_list = predd_ctrl.predictands\n predictand = []\n\t\n\t#loops through predictands to build camps data objects\n for entry_dict in predd_list:\n\t\t#formats metadata\n \tvertical_coordinate = entry_dict.pop('Vertical_Coordinate')\n\t\tentry_dict['file_id'] = predd_file_id\n\n\t\t#build camps objects for each day\n variable = fetch_many_dates(predictand_file_path,start, end, stride, entry_dict)\n\n\t\t#append all data to single camps object\n var = variable[0]\n arrs = []\n for i in range(len(variable)):\n arrs.append(variable[i].data)\n try:\n\t\t\tvar.data = np.stack(arrs)\n\t\t\tpredictand.append(var)\n\t\texcept:\n\t\t\tprint(\"Can't read \" + variable.name)\n\n\t#getting predictor station and time data\n\tpredr = Dataset(predictor_file_path[0])\n\tpredr_stat = predr.variables['station'][:]\n\tif lead_time == 3:\n\t\tpredr_time = predr.variables['OM__phenomenonTimeInstant'][:]\n\telif lead_time == 6:\n\t\tpredr_time = predr.variables['OM__phenomenonTimeInstant1'][:]\n\telif lead_time == 12:\n\t\tpredr_time = predr.variables['OM__phenomenonTimeInstant2'][:]\n\tpredr.close()\n\n\t#reformatting predictor station and time data\n\tpredr_stations = stations(predr_stat)\n\tpredr_gmt = gmt(predr_time)\n\t\n\t#getting predictand station and time data\n\tpredd = Dataset(predictand_file_path[0])\n\tpredd_stat = predd.variables['station'][:]\n\tpredd_time = predd.variables['OM__resultTime'][:]\n\tpredd.close()\n\t\n\t#reformatting predictand station and time data\n\tpredd_stations = stations(predd_stat)\n\tpredd_gmt = gmt(predd_time)\n\n\t#choosing predictand observations that line up with predictor time\n\thour = (predictor[0].metadata['FcstTime_hour']/3600) + lead_time\n\tdays = len(predd_gmt)/24\n\tpredd_hours = [0]*days\n k=0\n for i in range(len(predd_gmt)):\n if i%24 == hour:\n\t\t\tpredd_hours[k]=predd_gmt[i]\n\t\t\tk+=1\n\t\n\t#catches when GFS data doesn't cover the last day of the month\n\tif len(predr_gmt) < len(predd_hours):\n\t\tpredd_hours = predd_hours[:-1]\t\n\t\n\t#find missing stations\n\tmiss_stations = miss_station(predr_stations,predd_stations)\n\tstations = predd_stations\n\t\n\t#station and time array\n\tinfo = [['',''] for k in range(len(predr_gmt)*len(stations))]\n\tfor i in range(len(predr_gmt)):\n\t\tfor j in range(len(stations)):\n\t\t\tk = i*len(stations)+j\n\t\t\tinfo[k][0]=predr_gmt[i]\n\t\t\tinfo[k][1]=stations[j]\n\n\t#create column names\n\tnames = ['']*(len(predictor)+len(predictand)+2)\n\tnames[0]='Time'\n\tnames[1]='Station'\n\n\t#creating array\n\tarr = np.zeros((len(stations)*len(predr_gmt),len(predictor)+len(predictand)))\n\t\n\t#adding predictor data\n\tfor i in range(len(predictor)):\n\t\t#remove lead time and forecast reference time from variable name\n\t\t#and add variable name to column list of final dataframe\n\t\tif lead_time == 12:\n\t\t\tnames[i+2]='GFS_'+predictor[i].get_variable_name()[:-11]\n\t\telse:\n\t\t\t names[i+2]='GFS_'+predictor[i].get_variable_name()[:-10]\n\n\t\t#create pandas dataframe of data and sort alphabetically by station name\n\t\tpredictor[i].data = np.squeeze(predictor[i].data,axis=2)\n\t\tpredictor[i].data = pd.DataFrame(predictor[i].data,columns=predr_stations,index=predr_gmt)\n\t\tpredictor[i].data = predictor[i].data.reindex(sorted(predictor[i].data.columns),axis=1)\n\t\t\n\t\t#remove stations with no predictand data\n\t\tk=0\n\t\ta=miss_stations[:]\n\t\tfor j in predictor[i].data.columns:\n\t\t\tif not a:\n\t\t\t\tbreak\n\t\t\tif j==a[k]:\n\t\t\t\tpredictor[i].data=predictor[i].data.drop(j,axis=1)\n\t\t\t\tdel a[k]\n\t\t\n\t\t#add data to final dataframe\n\t\tfor b in range(len(predr_gmt)):\n\t\t\tfor c in range(len(stations)):\n\t\t\t\tk = b*len(stations)+c\n\t\t\t\tarr[k][i] = predictor[i].data.iloc[b][c]\n\n\t#add predictand data\n\tfor i in range(len(predictand)):\n\t\t#removing extra underscore, adding variable name to column names\n\t\tnames[len(predictor)+2+i]='METAR_'+predictand[i].get_variable_name()[:-1]\n\t\n\t\t#resize array and create pandas dataframe\n\t\tpredictand[i].data = np.squeeze(predictand[i].data,axis=2)\n\t\tpredictand[i].data = pd.DataFrame(predictand[i].data,columns=predd_stations,index=predd_hours)\n\t\tpredictand[i].data = predictand[i].data.reindex(sorted(predictand[i].data.columns),axis=1)\n\t\t\n\t\t#remove extra days of predictand data\n\t\tpredictand[i].data = predictand[i].data.iloc[0:len(predr_time),:]\n\t\t\t\n\t\t#add predictand data to array\n\t\tfor b in range(len(predr_gmt)):\n\t\t\tfor c in range(len(stations)):\n\t\t\t\tk = b*len(stations)+c\n\t\t\t\tval = predictand[i].data.iloc[b][c]\n\t\t\t\t\n\t\t\t\t#catch metar fill data\n\t\t\t\tif val == 9999: \n\t\t\t\t\tval = np.nan\n\t\t\t\tarr[k][len(predictor)+i]=val\n\t\n\t#add station and time data to array and save as csv\n\tdata = np.concatenate([info,arr],axis = 1)\n\tto_save = pd.DataFrame(data,columns=names)\n\tto_save.to_csv(str(start)+'_'+str(end)+'_'+str(lead_time)+'hrs.csv')", "def getCSD (LFP_input_data=None,LFP_input_file=None,sampr=None,dt=None,spacing_um=None,minf=0.05,maxf=300,norm=True,vaknin=True,save_to_sim=True,getAllData=False): # timeRange=None,\n\n ############### DEFAULT -- CONDITION 1 : LFP DATA COMES FROM SIMULATION ###############\n\n if LFP_input_data is None and LFP_input_file is None: ### GET LFP DATA FROM SIMULATION\n try:\n from .. import sim \n except:\n print('No LFP input data, input file, or existing simulation. Cannot calculate CSD.')\n else:\n ## Check if LFP was recorded during the simulation \n print('getCSD() is using LFP data from existing simulation.')\n\n\n # time step used in simulation recording \n if dt is None:\n dt = sim.cfg.recordStep # units: ms \n print('dt = ' + str(dt) + ' (units: ms)')\n\n\n sim_data_categories = sim.allSimData.keys()\n \n # Get LFP data from sim and instantiate as a numpy array \n if 'LFP' in sim_data_categories:\n lfp_data = np.array(sim.allSimData['LFP'])\n print('lfp_data shape = ' + str(lfp_data.shape))\n elif 'LFP' not in sim_data_categories:\n print('!! WARNING: NO LFP DATA !! Need to re-run simulation with cfg.recordLFP enabled')\n\n\n # Sampling rate of data recording during the simulation \n if sampr is None:\n sampr = 1./(sim.cfg.recordStep/1000.0) # divide by 1000.0 to turn denominator from units of ms to s\n\n\n # Spacing between electrodes --> convert from micron to mm \n if spacing_um is None:\n print('NOTE: using sim.cfg.recordLFP to determine spacing_um !!')\n spacing_um = sim.cfg.recordLFP[1][1] - sim.cfg.recordLFP[0][1]\n\n\n\n\n ############### CONDITION 2 : ARBITRARY LFP DATA ############################\n ## NOTE: EXPAND CAPABILITY TO INCLUDE LIST OF MULTIPLE FILES \n \n ## LOAD SIM DATA FROM JSON FILE\n elif LFP_input_data is None and '.json' in LFP_input_file:\n data = {}\n with open(LFP_input_file) as file:\n data['json_input_data'] = json.load(file)\n\n ## FOR MULTIPLE FILES\n #for x in LFP_input_file:\n #with open(x) as file:\n #data[x] = json.load(file)\n\n\n ## EXTRACT LFP DATA \n for key in data.keys:\n lfp_data_list = data[key]['simData']['LFP'] # only works in the 1 input file scenario; expand capability for multiple files \n \n ## CAST LFP DATA AS NUMPY ARRAY \n lfp_data = np.array(lfp_data_list)\n\n ## GET CSD DATA AND RELEVANT PLOTTING PARAMS \n csd_data = {}\n for i in data.keys():\n csd_data[i] = {} # e.g. csd['json_input_data'] = {}\n\n if sampr is None:\n csd_data[i]['sampr'] = 1./((data[i]['simConfig']['recordStep'])/1000.0) # assumes that data[i]['simConfig']['recordStep'] is in units of ms\n sampr = csd_data[i]['sampr']\n else:\n csd_data[i]['sampr'] = sampr\n\n if spacing_um is None:\n csd_data[i]['spacing_um'] = data[i]['simConfig']['recordLFP'][1][1] - data[i]['simConfig']['recordLFP'][0][1]\n spacing_um = csd_data[i]['spacing_um']\n else:\n csd_data[i]['spacing_um'] = spacing_um\n\n if dt is None:\n csd_data[i]['dt'] = data[i]['simConfig']['recordStep']\n dt = csd_data[i]['dt']\n else:\n csd_data[i]['dt'] = dt\n\n\n\n ## FOR LIST OF LFP DATA WITHOUT ANY .JSON INPUT FILE \n elif len(LFP_input_data) > 0 and LFP_input_file is None: # elif LFP_input_file is None and ...\n lfp_data = np.array(LFP_input_data) # get lfp_data and cast as numpy array\n\n\n\n\n ##############################################################################\n # Now lfp_data exists for either existing (e.g. empirical) or simulated data \n ##############################################################################\n\n # Convert spacing from microns to mm \n spacing_mm = spacing_um/1000\n\n # Bandpass filter the LFP data with getbandpass() fx defined above\n datband = getbandpass(lfp_data,sampr,minf,maxf) \n\n # Take CSD along smaller dimension\n if datband.shape[0] > datband.shape[1]:\n ax = 1\n else:\n ax = 0\n\n # VAKNIN CORRECTION\n if vaknin: \n datband = Vaknin(datband)\n\n # NORM <-- ASKING SAM MORE ABOUT THIS\n if norm: \n removemean(datband,ax=ax)\n\n # now each column (or row) is an electrode -- take CSD along electrodes\n CSD_data = -np.diff(datband,n=2,axis=ax)/spacing_mm**2 ## CSD_data should be in mV/mm**2, assuming that LFP data is in mV. \n \n\n ########################################\n ########## noBandpass trial ############\n ########################################\n datband_noBandpass = lfp_data.T\n \n if datband_noBandpass.shape[0] > datband_noBandpass.shape[1]:\n ax = 1\n else:\n ax = 0\n \n if vaknin:\n datband_noBandpass = Vaknin(datband_noBandpass)\n \n if norm:\n removemean(datband_noBandpass,ax=ax)\n \n CSD_data_noBandpass = -np.diff(datband_noBandpass,n=2,axis=ax)/spacing_mm**2 # noBandpass trial \n ##########################################\n\n\n\n ################## SAVING DATA ##########################\n # Add CSD and other param values to sim.allSimData for access outside of this function or script \n if save_to_sim is True: ## FROM SIM \n try:\n from .. import sim \n sim.allSimData['CSD'] = {}\n #sim.allSimData['CSD']['timeRange'] = timeRange \n sim.allSimData['CSD']['sampr'] = sampr\n sim.allSimData['CSD']['spacing_um'] = spacing_um \n sim.allSimData['CSD']['CSD_data'] = CSD_data\n sim.allSimData['CSD']['CSD_data_noBandpass'] = CSD_data_noBandpass # noBandpass trial \n except:\n print('NOTE: No sim.allSimData construct available to store CSD data')\n\n\n\n # RETURN CSD AND OTHER RELEVANT PARAM VALUES, IF DESIRED \n if getAllData is True:\n return lfp_data, CSD_data, sampr, spacing_um, dt\n if getAllData is False:\n return CSD_data # returns CSD in units of mV/mm**2 (assuming lfps are in mV)", "def download():\r\n reader = GSODDataReader()\r\n year_list = range(2001, 2012)\r\n austin = reader.collect_data(year_list, exact_station=True,\r\n station_name='AUSTIN CAMP MABRY', state='TX', country='US')\r\n houston = reader.collect_data(year_list, exact_station=True,\r\n station_name='HOUSTON/D.W. HOOKS', state='TX', country='US')\r\n new_york = reader.collect_data(year_list, exact_station=True,\r\n station_name='NEW YORK/LA GUARDIA', state='NY', country='US')\r\n newark = reader.collect_data(year_list, exact_station=True,\r\n station_name='NEWARK INTL AIRPORT', state='NJ', country='US')\r\n punta_arenas = reader.collect_data(year_list, exact_station=True,\r\n station_name='PUNTA ARENAS', country='CH')\r\n wellington = reader.collect_data(year_list, exact_station=True,\r\n station_name='WELLINGTON AIRPORT', country='NZ')\r\n store = HDFStore('weather.h5')\r\n store['austin'] = austin\r\n store['houston'] = houston\r\n store['nyc'] = new_york\r\n store['newark'] = newark\r\n store['punta_arenas'] = punta_arenas\r\n store['wellington'] = wellington\r\n store.close()", "def fetch_data(round:int) -> pd.DataFrame:\n\n \"\"\"\n You have to implement this function. This function can take \n an option (from the command line) and produces a DataFrame.\n (If you need a different kind of output, you also need to \n modify the main function. \n \"\"\"\n import numpy as np\n from sklearn.datasets import fetch_california_housing\n\n sampling_rate = 0.2*round\n np.random.seed(51)\n\n data = fetch_california_housing(download_if_missing=True)\n df = pd.DataFrame(data.data, columns=data.feature_names)\n df[target_name] = data.target\n\n ## sampling\n scores = np.random.uniform(low=0, high=1, size=df.shape[0])\n selected = scores < sampling_rate\n\n return df.iloc[selected,:].copy()", "def main():\n data = pd.read_csv('countries.csv')\n # import_data_pandas(data)\n # continent_data(data)\n # continent_data_le(data)\n continent_data_gdp_growth(data)" ]
[ "0.57028055", "0.55154437", "0.5410844", "0.53428996", "0.52849644", "0.5160819", "0.5140988", "0.5138486", "0.51381844", "0.51164", "0.5108496", "0.50709546", "0.50521857", "0.50167906", "0.50145507", "0.5009793", "0.49903554", "0.498991", "0.49887833", "0.49823773", "0.4975772", "0.49728417", "0.49586567", "0.49527562", "0.49466214", "0.49413437", "0.49336624", "0.49267444", "0.49265698", "0.49154577" ]
0.6692245
0
Delete a page in a wiki
def delete(self, request, slug, page_name): try: wiki = Wiki.objects.get(slug=slug) except Wiki.DoesNotExist: error_msg = "Wiki not found." return api_error(status.HTTP_404_NOT_FOUND, error_msg) username = request.user.username if wiki.username != username: error_msg = _('Permission denied.') return api_error(status.HTTP_403_FORBIDDEN, error_msg) try: repo = syncwerk_api.get_repo(wiki.repo_id) if not repo: error_msg = "Wiki library not found." return api_error(status.HTTP_404_NOT_FOUND, error_msg) except RpcsyncwerkError: error_msg = _("Internal Server Error") return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) file_name = page_name + ".md" try: syncwerk_api.del_file(repo.id, '/', file_name, request.user.username) except RpcsyncwerkError as e: logger.error(e) error_msg = _('Internal Server Error') return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) return Response()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def wiki_page_deleted(self, page):\n if 'wiki' not in self.sources:\n return\n gnp = GrowlNotificationPacket(notification='wiki',\n title='Page deleted',\n description=self._wiki_repr(page))\n gs = GrowlSender(self.env)\n gs.notify(self._get_hosts('wiki'), gnp)", "def delete_page(request, page_id):\n page = models.Page.get_by_id(int(page_id))\n\n if not page:\n return utility.page_not_found(request)\n\n if not page.user_can_write(request.profile):\n return utility.forbidden(request)\n\n page.delete()\n\n url = urlresolvers.reverse('views.admin.index')\n return http.HttpResponseRedirect(url)", "def delete(request, content_type, object_id):\n user = request.user\n content_type_object = ContentType.objects.get(id = content_type)\n node = content_type_object.model_class().objects.get(id = object_id)\n community_wiki.delete_content(node)\n \n redirect_url = reverse('content-list-redirect', args=[content_type_object.id])\n return http.HttpResponseRedirect(redirect_url)", "def removepage(self, page):\r\n try:\r\n self._api_entrypoint.removePage(self._session_token, page)\r\n except XMLRPCError as e:\r\n raise ConfluenceError('Failed to delete page: %s' % e)", "def delete(request, page_id):\n page = Page.objects.get(pk=page_id)\n error_messge = \"\"\n if page:\n try:\n editor = page.get_editor()\n if editor and editor.user.id != request.user.id:\n # msg = 'You can\\'t delete this page because %s is editing it' % (\n # str(request.user.email)\n # )\n # messages.add(request, msg)\n return HttpResponse('locked')\n page.delete()\n cache.delete('%s_%s_pages' % (request.website.subdomain, str(request.website.id)))\n error_messge = \"true\"\n messages.success(request, '%s has been deleted successfully.' % page.title)\n except:\n error_messge = \"There is some Problem Appear . Please Try again Later\"\n else:\n error_messge = \"Sorry! Your requested page doesn't exist.\"\n return HttpResponse(error_messge)", "def treat(self, page):\n self.current_page = page\n if self.getOption('undelete'):\n page.undelete(self.summary)\n else:\n if page.exists():\n page.delete(self.summary, not self.getOption('always'))\n else:\n pywikibot.output(u'Skipping: %s does not exist.' % page)", "def wiki_page_version_deleted(self, page):\n if 'wiki' not in self.sources:\n return\n gnp = GrowlNotificationPacket(notification='wiki',\n title='Page suppressed',\n description=self._wiki_repr(page))\n gs = GrowlSender(self.env)\n gs.notify(self._get_hosts('wiki'), gnp)", "def delete(self, request, doc_id):\n try:\n doc = Document.objects.get(id=doc_id)\n except Document.DoesNotExist:\n raise Http404(\"Document does not exists\")\n\n if request.user.has_perm(\n Access.PERM_WRITE, doc\n ):\n page_nums = request.GET.getlist('pages[]')\n page_nums = [int(number) for number in page_nums]\n\n doc.delete_pages(page_numbers=page_nums)\n\n return Response(status=status.HTTP_204_NO_CONTENT)\n\n return Response(status=status.HTTP_401_UNAUTHORIZED)", "def delete():", "def remove(self, _id):\n self.get_page(_id)\n \n # It checks page _id exist and that\n # you have permission to remove that page\n if len(self.user) and g.my['rank'] < 15:\n try:\n g.db.pages.remove({ '_id' : ObjectId(_id) })\n return 'ok'\n except PyMongoError:\n return 'nada'\n return 'nada'", "def delete_page(self, wid, child):\n\n page_num = self.notebook.page_num(child)\n self.notebook.remove_page(page_num)\n if not self.notebook.get_n_pages():\n self.output_window.remove(self.notebook)\n placeholder = self.get_placeholder_image()\n self.output_window.add(placeholder)\n placeholder.show_all()", "def trigger_delete(cls, instance):\n es_client.delete(instance.blog.index_name(), 'blog_page_index', instance.id)", "def delete_file(request, page_id, file_id):\n record = models.FileStore.get_by_id(int(file_id))\n if record:\n if not record.user_can_write(request.profile):\n return utility.forbidden(request)\n\n record.delete()\n return utility.edit_updated_page(page_id, tab_name='files')\n else:\n return utility.page_not_found(request)", "def delete(id):\r\n get_post(id)\r\n db = get_db()\r\n db.cursor().execute('DELETE FROM novel.post WHERE id = %s', id)\r\n db.commit()\r\n return redirect(url_for('novel.index'))", "def delete(tbd, tipe):\n des = \"Xblog/docs/\" + tbd.replace(\".ipynb\", \".html\")\n uninstall(des)\n if tipe == \"Xpage\":\n os.remove(des)\n ccc.success(\"deleting \" + des)\n des_pdf = des.replace(\".html\",\".pdf\").replace(\"notebooks\", \"pdfs\")\n os.remove(des_pdf)\n ccc.success(\"deleting \" + des_pdf)\n if tbd == \"notebooks/welcome.ipynb\":\n if os.path.isfile(\"Xblog/README.md\"):\n cnv.md2html()\n else:\n with open(\"Xblog/docs/notebooks/welcomme.html\", 'w') as f:\n f.write(\"<html>\\n<body>\\n<h1 align=\\\"center\\\">Welcome to Xbooks blogs!</h1>\\n<h4 align=\\\"center\\\">This blog has no welcome page<br/>if you're maintainer of this blog, kindly write either README.md or notebooks/welcome.ipynb file!</h4>\\n</body>\\n</html>\\n\")\n f.close()\n if tipe == \"Xbook\":\n shutil.rmtree(des)\n ccc.success(\"deleting \" + des)\n return True", "def removePage(self,page):\n return self.pm_getSpaceManager().removePage(self._unbox(page))", "def main(*args):\n pageName = ''\n summary = None\n generator = None\n options = {}\n\n # read command line parameters\n local_args = pywikibot.handle_args(args)\n genFactory = pagegenerators.GeneratorFactory()\n mysite = pywikibot.Site()\n\n for arg in local_args:\n if arg == '-always':\n options['always'] = True\n elif arg.startswith('-summary'):\n if len(arg) == len('-summary'):\n summary = pywikibot.input(u'Enter a reason for the deletion:')\n else:\n summary = arg[len('-summary:'):]\n elif arg.startswith('-images'):\n pywikibot.output('\\n\\03{lightred}-image option is deprecated. '\n 'Please use -imageused instead.\\03{default}\\n')\n local_args.append('-imageused' + arg[7:])\n elif arg.startswith('-undelete'):\n options['undelete'] = True\n else:\n genFactory.handleArg(arg)\n found = arg.find(':') + 1\n if found:\n pageName = arg[found:]\n\n if not summary:\n if pageName:\n if arg.startswith('-cat') or arg.startswith('-subcats'):\n summary = i18n.twtranslate(mysite, 'delete-from-category',\n {'page': pageName})\n elif arg.startswith('-links'):\n summary = i18n.twtranslate(mysite, 'delete-linked-pages',\n {'page': pageName})\n elif arg.startswith('-ref'):\n summary = i18n.twtranslate(mysite, 'delete-referring-pages',\n {'page': pageName})\n elif arg.startswith('-imageused'):\n summary = i18n.twtranslate(mysite, 'delete-images',\n {'page': pageName})\n elif arg.startswith('-file'):\n summary = i18n.twtranslate(mysite, 'delete-from-file')\n generator = genFactory.getCombinedGenerator()\n # We are just deleting pages, so we have no need of using a preloading\n # page generator to actually get the text of those pages.\n if generator:\n if summary is None:\n summary = pywikibot.input(u'Enter a reason for the %sdeletion:'\n % ['', 'un'][options.get('undelete', False)])\n bot = DeletionRobot(generator, summary, **options)\n bot.run()\n else:\n # Show help text from the top of this file\n pywikibot.showHelp()", "def update_page(self, page, title, data=None, text=None):\n\n if text is None and data is not None:\n text = str(data, self.storage.charset, 'replace')\n self.set_last_revision(self.storage.repo_revision())\n with self.index.index_writer(self.name) as writer:\n with self.index.index_searcher(self.name) as s:\n writer.delete_by_term('title', title, searcher=s)\n self.reindex_page(page, title, writer, text=text)", "def test_public_status_page_delete_public_status_page(self):\n pass", "def delete_page(self,**app_names_and_pages):\n \n page_location = lambda app_name,app_page : os.path.join(self._main,app_name,app_page)\n css_path = os.path.join(self.folder_location,\"static\",\"css\")\n for app,pages in app_names_and_pages.items():\n for page in pages:\n shutil.rmtree(page_location(app,page))\n\n self._update_delete_app_or_page()", "def delete_story(self, story):\n raise NotImplementedError", "def delete(self):\n ...", "def delete(id):\n\tget_post(id)\n\tdb = get_db()\n\tget_post(id)\n\tdb = get_db()\n\tdb.execute('DELETE FROM post WHERE id = ?', (id,))\n\tdb.commit()\n\treturn redirect(url_for('blog.index'))", "def removeLabelFromPage(self, label, page):\n return self.pm_getSpaceManager().removeLabelFromPage(self._unbox(label), self._unbox(page))", "def delete(self, url):\n return self.request(url, \"DELETE\")", "def delete():\n id = request.data\n # Build a pymongo command to delete the document by _id. Only executes if active is set to True.\n active = True\n mode = request.headers[\"mode\"]\n client = MongoClient(db_config)\n if active == True:\n # Switch mode\n if request.headers[\"mode\"] == \"deleteCollectionNode\":\n db = client['Corpus']\n node = db['Corpus']\n # elif request.headers[\"mode\"] == \"something else:\n # db = client['Something']\n # node = db['Something']\n else:\n db = client['Publications']\n node = db['Publications']\n node.remove({\"_id\": id})\n # Return the Ajax response\n return \"Success.\"", "def delete(self, _id):", "def destroy(self, request, *args, **kwargs):\n try:\n article = self.get_object()\n except PermissionDenied as pd:\n return Response({'error': str(pd)})\n\n title = article.title\n site_name = article.site_name\n article.delete()\n return Response({'message': '{0} from {1} is removed.'.format(title, site_name)})", "def delete_entry(title):\n filename = f\"entries/{title}.md\"\n if default_storage.exists(filename):\n default_storage.delete(filename)", "def delete(self, *args, **kwargs):\n self.delete_relatives()\n old_content = self.content\n super().delete(*args, **kwargs)\n if old_content.isOrphaned():\n old_content.delete()" ]
[ "0.73903525", "0.7082593", "0.6932861", "0.6923126", "0.68809664", "0.68542206", "0.6704106", "0.6495157", "0.6459959", "0.6402674", "0.6377859", "0.6369086", "0.61840576", "0.6173155", "0.6167322", "0.61456627", "0.6114908", "0.60865897", "0.6069235", "0.60479736", "0.6029485", "0.59784573", "0.5970732", "0.5880773", "0.58431804", "0.582507", "0.58074874", "0.5783347", "0.5779333", "0.5772788" ]
0.7459889
0
Given a stream to read from, return the parsed representation. Should return parsed data, or a `DataAndFiles` object consisting of the parsed data and files.
def parse(self, stream, media_type=None, parser_context=None): encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET) decoded_stream = codecs.getreader(encoding)(stream) raw_body = decoded_stream.read() request = parser_context.get('request') setattr(request, 'raw_body', raw_body) filename = self.get_filename(stream, media_type, parser_context) if filename and (not filename.endswith('.toml') and not filename.endswith('.tml')): filename = f'{filename}.toml' setattr(request, 'filename', filename) return toml.loads(raw_body)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def read(self, stream):\n ret = json.load(stream)\n self.validate(ret)\n self.stringify(ret)\n return (ret, self.make_order(ret))", "def parse(self, stream, media_type=None, parser_context=None):\n encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET)\n decoded_stream = codecs.getreader(encoding)(stream)\n return decoded_stream", "def parse_stream(stream):\n try:\n root = etree.parse(stream).getroot()\n except:\n root = etree.fromstring(stream)\n return parse_node(root)", "def parse(self, stream, mimetype, content_length, options=None):\n if options is None:\n options = {}\n\n parse_func = self.get_parse_func(mimetype, options)\n if parse_func is not None:\n # Check content length only if we are actually going to parse\n # the data.\n if (\n self.max_content_length is not None\n and content_length is not None\n and content_length > self.max_content_length\n ):\n raise exceptions.RequestEntityTooLarge()\n\n try:\n return parse_func(self, stream, mimetype, content_length, options)\n except ValueError:\n if not self.silent:\n raise\n\n return stream, self.cls(), self.cls()", "def parse(self, stream, media_type=None, parser_context=None):\n\n parser_context = parser_context or {}\n request = parser_context['request']\n encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET)\n meta = request.META\n upload_handlers = request.upload_handlers\n filename = self.get_filename(stream, media_type, parser_context)\n\n # Note that this code is extracted from Django's handling of\n # file uploads in MultiPartParser.\n content_type = meta.get('HTTP_CONTENT_TYPE',\n meta.get('CONTENT_TYPE', ''))\n try:\n content_length = int(meta.get('HTTP_CONTENT_LENGTH',\n meta.get('CONTENT_LENGTH', 0)))\n except (ValueError, TypeError):\n content_length = None\n\n if not filename:\n filename = 'autosave.zip'\n\n # See if the handler will want to take care of the parsing.\n for handler in upload_handlers:\n result = handler.handle_raw_input(None,\n meta,\n content_length,\n None,\n encoding)\n if result is not None:\n return DataAndFiles(None, {'file': result[1]})\n\n # This is the standard case.\n possible_sizes = [x.chunk_size for x in upload_handlers if x.chunk_size]\n chunk_size = min([2 ** 31 - 4] + possible_sizes)\n chunks = ChunkIter(stream, chunk_size)\n counters = [0] * len(upload_handlers)\n\n for handler in upload_handlers:\n try:\n handler.new_file(None, filename, content_type,\n content_length, encoding)\n except StopFutureHandlers:\n break\n\n for chunk in chunks:\n for i, handler in enumerate(upload_handlers):\n chunk_length = len(chunk)\n chunk = handler.receive_data_chunk(chunk, counters[i])\n counters[i] += chunk_length\n if chunk is None:\n break\n\n for i, handler in enumerate(upload_handlers):\n file_obj = handler.file_complete(counters[i])\n if file_obj:\n return DataAndFiles(None, {'file': file_obj})\n raise ParseError(\"FileUpload parse error - \"\n \"none of upload handlers can handle the stream\")", "def parse_stream(self, stream: IO[str], debug: bool = False) -> NL:\n return self.parse_stream_raw(stream, debug)", "def parse(self, stream, media_type=None, parser_context=None):\n if isinstance(stream, WSGIRequest):\n return oadr_20b.parseString(stream.body, silence=True)\n elif hasattr(stream, 'buf'):\n return oadr_20b.parseString(stream.buf, silence=True)\n\n return None", "def read(self, stream):\n ret = yaml.load(stream)\n self.validate(ret)\n return (ret, self.make_order(ret))", "def parse(self, stream, media_type=None, parser_context=None):\n raise NotImplementedError(\".parse() must be overridden.\")", "def load(datastream):", "def parse(self, stream, media_type=None, parser_context=None):\n parser_context = parser_context or {}\n encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET)\n\n try:\n decoded_stream = codecs.getreader(encoding)(stream)\n parse_constant = strict_constant if self.strict else None\n return ujson.load(decoded_stream, parse_constant=parse_constant)\n except ValueError as exc:\n raise ParseError('JSON parse error - %s' % str(exc))", "def _stream(self, d):\n length = d['Length']\n token = self.read(6)\n if token != b'stream':\n self.on_parser_error(\"stream expected\")\n # `stream` keyword must be followed by CR+LF or by LF, but NOT by CR alone\n ch = self.next()\n if ch == CR:\n ch = self.next()\n if ch != LF:\n logging.warning(\"Missing LF after `stream` token - [CR]LF expected. Trying to proceed.\")\n self.prev()\n\n state = self.get_state()\n\n data = self.read(length)\n # According to the spec EOL should be after the data and before endstream\n # But some files do not follow this.\n #\n # See data/leesoil-cases-2.pdf\n #\n # self.eol()\n self.maybe_spaces()\n token = self.read(9)\n if token != b'endstream':\n # Work around wrong length. See https://github.com/maxpmaxp/pdfreader/issues/68\n err_state = self.get_state()\n logging.warning(\"Wrong stream length: {}. Trying to work around the issue.\".format(length))\n self.set_state(state)\n data = self.read(9)\n while not data.endswith(b'endstream'):\n ch = self.next()\n if ch is None:\n self.set_state(err_state)\n self.on_parser_error(\"endstream expected\")\n data += ch\n\n data = data[:-9]\n while data and data[-1:] in EOL:\n data = data[:-1]\n\n return Stream(d, data)", "def parse_file(self, source):\n # If this is a file-like object, we should be able to read it.\n try:\n raw_data = source.read()\n except AttributeError:\n # This raises FileNotFoundError if the file doesn't exist.\n with open(source) as source_obj:\n raw_data = source_obj.read()\n\n # Parse the data in string format.\n return self.parse_string(raw_data)", "def _ReadStream(self, stream_name):\n file_object = self._OpenStream(stream_name)\n if not file_object:\n return b''\n\n try:\n data = file_object.read()\n finally:\n file_object.close()\n\n return data", "def get_stream_reader(fh, tmp_dir):\n magic_dict = {\n b\"\\x1f\\x8b\\x08\": _get_stream_readers_for_gzip,\n b\"\\x42\\x5a\\x68\": _get_stream_readers_for_bz2,\n b\"\\x50\\x4b\\x03\\x04\": _get_stream_readers_for_zip,\n }\n start_of_file = fh.read(CHUNK_SIZE)\n try:\n fh.seek(0)\n except UnsupportedOperation: # This happens if fh has been created by urlopen\n fh = _download_file(start_of_file, fh)\n try: # Check if file is tar file\n if tarfile.open(fileobj=StringIO(start_of_file)):\n return _get_stream_readers_for_tar(fh, tmp_dir)\n except tarfile.ReadError:\n pass\n for k, v in magic_dict.items():\n if start_of_file.startswith(k):\n return v(fh, tmp_dir)\n return [fh]", "def parse_stream_raw(self, stream: IO[str], debug: bool = False) -> NL:\n tokens = tokenize.generate_tokens(stream.readline, grammar=self.grammar)\n return self.parse_tokens(tokens, debug)", "def read_stream(schema, stream, *, buffer_size=io.DEFAULT_BUFFER_SIZE):\n reader = _lancaster.Reader(schema)\n buf = stream.read(buffer_size)\n remainder = b''\n while len(buf) > 0:\n values, n = reader.read_seq(buf)\n yield from values\n remainder = buf[n:]\n buf = stream.read(buffer_size)\n if len(buf) > 0 and len(remainder) > 0:\n ba = bytearray()\n ba.extend(remainder)\n ba.extend(buf)\n buf = memoryview(ba).tobytes()\n if len(remainder) > 0:\n raise EOFError('{} bytes remaining but could not continue reading '\n 'from stream'.format(len(remainder)))", "def read_instream(instream):\n # If you need to read a csv, create a DataFrame, or whatever it might be,\n # do it here.\n return instream.read()", "def read(self, stream):\n self.inspect_quick(stream)\n pyffi.object_models.xml.struct_.StructBase.read(\n self, stream, self)\n\n # check if we are at the end of the file\n if stream.read(1):\n raise ValueError(\n 'end of file not reached: corrupt psk file?')", "def read(self, stream):\n root = []\n headings = []\n leading_sep = False\n trailing_sep = False\n for (pos, line) in enumerate(stream.read().splitlines()):\n tokens = args.regexp.split(line)\n log.debug('tokens: {tokens}'.format(**locals()))\n\n if pos == 0:\n \"\"\"\n Strip off empty beginning and trailing tokens in case the separator is used as a border\n \"\"\"\n leading_sep = not tokens[0]\n trailing_sep = (tokens[-1] == '') and (len(tokens) > 1)\n\n if leading_sep:\n if tokens[0]:\n # parser.error('Unexpected token under empty leading heading')\n pass\n # del tokens[0]\n pass\n\n if trailing_sep:\n if tokens[-1]:\n # parser.error('Unexpected token under empty trailing heading')\n pass\n # del tokens[-1]\n pass\n\n if args.headings and (pos == 0):\n if not tokens:\n parser.error('No headings')\n headings = tokens\n else:\n if headings:\n if len(tokens) > len(headings):\n parser.error('Column without heading: {tokens} > {headings}'.format(**locals()))\n root.append({heading: tokens[heading_pos] if heading_pos < len(tokens) else ''\n for (heading_pos, heading) in enumerate(headings)})\n else:\n root.append(tokens)\n\n return (root, headings)", "def schemaless_reader(stream, schema):\n acquaint_schema(schema)\n return read_data(stream, schema)", "def parse(self, stream, media_type=None, parser_context=None):\n parser_context = parser_context or {}\n encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET)\n\n try:\n data = stream.read().decode(encoding)\n return json.loads(data)\n except ValueError as exc:\n raise ParseError('JSON parse error - %s' % six.text_type(exc))", "def _record_reader(stream):\n while True:\n header = stream.read(4)\n if len(header) < 4:\n return\n size, rec_type = struct.unpack(\">HH\", header)\n data_type = rec_type & 0x00FF\n rec_type = rec_type // 256\n data = None\n if size > 4:\n if data_type == 0x01:\n data = numpy.array(\n struct.unpack(\n \">{0}H\".format((size - 4) // 2), stream.read(size - 4)\n ),\n dtype=\"uint\",\n )\n elif data_type == 0x02:\n data = numpy.array(\n struct.unpack(\n \">{0}h\".format((size - 4) // 2), stream.read(size - 4)\n ),\n dtype=\"int\",\n )\n elif data_type == 0x03:\n data = numpy.array(\n struct.unpack(\n \">{0}l\".format((size - 4) // 4), stream.read(size - 4)\n ),\n dtype=\"int\",\n )\n elif data_type == 0x05:\n data = numpy.array(\n [\n _eight_byte_real_to_float(stream.read(8))\n for _ in range((size - 4) // 8)\n ]\n )\n else:\n data = stream.read(size - 4)\n if str is not bytes:\n if data[-1] == 0:\n data = data[:-1].decode(\"ascii\")\n else:\n data = data.decode(\"ascii\")\n elif data[-1] == \"\\0\":\n data = data[:-1]\n yield [rec_type, data]", "def unpack(stream, **kwargs):\n data = stream.read()\n return unpackb(data, **kwargs)", "def as_stream(stream_or_path, mode='r'):\n if isinstance(stream_or_path, Path):\n return stream_or_path.open(mode=mode, encoding='utf-8')\n\n elif isinstance(stream_or_path, str):\n return open(stream_or_path, mode=mode, encoding='utf-8')\n else:\n return stream_or_path", "def file_contents(self, stream):\n return stream.read()", "def parse(self, stream, media_type=None, parser_context=None):\n assert etree, \"XMLParser requires defusedxml to be installed\"\n\n parser_context = parser_context or {}\n encoding = parser_context.get(\"encoding\", settings.DEFAULT_CHARSET)\n parser = etree.DefusedXMLParser(encoding=encoding)\n try:\n tree = etree.parse(stream, parser=parser, forbid_dtd=True)\n except (etree.ParseError, ValueError) as exc:\n raise ParseError(\"XML parse error - %s\" % str(exc))\n data = self._xml_convert(tree.getroot())\n\n return data", "def parse(stream):\n return xsd_models.parseString(stream, silence=True)", "def parse(\n data: Iterable[str],\n raw: bool = False,\n quiet: bool = False,\n ignore_exceptions: bool = False\n) -> StreamingOutputType:\n jc.utils.compatibility(__name__, info.compatible, quiet)\n streaming_input_type_check(data)\n\n output_line: Dict = {}\n os_type = ''\n\n for line in data:\n try:\n streaming_line_input_type_check(line)\n line = line.rstrip()\n\n # ignore blank lines\n if not line.strip():\n continue\n\n # linux output\n if line.startswith(' File: '):\n os_type = 'linux'\n\n if os_type == 'linux':\n # stats output contains 9 lines\n # line #1\n if line.startswith(' File: '):\n if output_line:\n yield output_line if raw else _process(output_line)\n\n output_line = {}\n line_list = line.split(maxsplit=1)\n output_line['file'] = line_list[1]\n\n # populate link_to field if -> found\n if ' -> ' in output_line['file']:\n filename = output_line['file'].split(' -> ')[0].strip('\\u2018').rstrip('\\u2019')\n link = output_line['file'].split(' -> ')[1].strip('\\u2018').rstrip('\\u2019')\n output_line['file'] = filename\n output_line['link_to'] = link\n else:\n filename = output_line['file'].split(' -> ')[0].strip('\\u2018').rstrip('\\u2019')\n output_line['file'] = filename\n\n continue\n\n # line #2\n if line.startswith(' Size: '):\n line_list = line.split(maxsplit=7)\n output_line['size'] = line_list[1]\n output_line['blocks'] = line_list[3]\n output_line['io_blocks'] = line_list[6]\n output_line['type'] = line_list[7]\n continue\n\n # line #3\n if line.startswith('Device: '):\n line_list = line.split()\n output_line['device'] = line_list[1]\n output_line['inode'] = line_list[3]\n output_line['links'] = line_list[5]\n continue\n\n # line #4\n if line.startswith('Access: ('):\n line = line.replace('(', ' ').replace(')', ' ').replace('/', ' ')\n line_list = line.split()\n output_line['access'] = line_list[1]\n output_line['flags'] = line_list[2]\n output_line['uid'] = line_list[4]\n output_line['user'] = line_list[5]\n output_line['gid'] = line_list[7]\n output_line['group'] = line_list[8]\n continue\n\n # line #5\n # not implemented\n if line.startswith('Context: '):\n continue\n\n # line #6\n if line.startswith('Access: 2'):\n line_list = line.split(maxsplit=1)\n output_line['access_time'] = line_list[1]\n continue\n\n # line #7\n if line.startswith('Modify: '):\n line_list = line.split(maxsplit=1)\n output_line['modify_time'] = line_list[1]\n continue\n\n # line #8\n if line.startswith('Change: '):\n line_list = line.split(maxsplit=1)\n output_line['change_time'] = line_list[1]\n continue\n\n # line #9\n if line.startswith(' Birth: '):\n line_list = line.split(maxsplit=1)\n output_line['birth_time'] = line_list[1]\n continue\n\n # catch non-stat data\n raise ParseError('Not stat data')\n\n # FreeBSD/OSX output\n if os_type != 'linux':\n value = shlex.split(line)\n\n if not value[0].isdigit() or not value[1].isdigit():\n raise ParseError('Not stat data')\n\n output_line = {\n 'file': ' '.join(value[15:]),\n 'unix_device': value[0],\n 'inode': value[1],\n 'flags': value[2],\n 'links': value[3],\n 'user': value[4],\n 'group': value[5],\n 'rdev': value[6],\n 'size': value[7],\n 'access_time': value[8],\n 'modify_time': value[9],\n 'change_time': value[10],\n 'birth_time': value[11],\n 'block_size': value[12],\n 'blocks': value[13],\n 'unix_flags': value[14]\n }\n\n if output_line:\n yield output_line if raw else _process(output_line)\n output_line = {}\n\n except Exception as e:\n yield raise_or_yield(ignore_exceptions, e, line)\n\n # gather final item\n try:\n if output_line:\n yield output_line if raw else _process(output_line)\n\n except Exception as e:\n yield raise_or_yield(ignore_exceptions, e, '')", "def _read_record(self, stream):\n header = stream.read(4)\n if len(header) < 4:\n return None\n size, rec_type = struct.unpack('>HH', header)\n data_type = (rec_type & 0x00ff)\n rec_type = rec_type // 256\n data = None\n if size > 4:\n if data_type == 0x01:\n data = numpy.array(\n struct.unpack('>{0}H'.format((size - 4) // 2),\n stream.read(size - 4)),\n dtype='uint')\n elif data_type == 0x02:\n data = numpy.array(\n struct.unpack('>{0}h'.format((size - 4) // 2),\n stream.read(size - 4)),\n dtype='int')\n elif data_type == 0x03:\n data = numpy.array(\n struct.unpack('>{0}l'.format((size - 4) // 4),\n stream.read(size - 4)),\n dtype='int')\n elif data_type == 0x05:\n data = numpy.array([\n _eight_byte_real_to_float(stream.read(8))\n for _ in range((size - 4) // 8)\n ])\n else:\n data = stream.read(size - 4)\n if str is not bytes:\n if data[-1] == 0:\n data = data[:-1].decode('ascii')\n else:\n data = data.decode('ascii')\n elif data[-1] == '\\0':\n data = data[:-1]\n return [rec_type, data]" ]
[ "0.6511268", "0.6476624", "0.621487", "0.6108035", "0.60876954", "0.6072599", "0.6060378", "0.6004329", "0.58978724", "0.5818257", "0.57974905", "0.57835144", "0.5779077", "0.5756727", "0.57229155", "0.57175076", "0.57150435", "0.56804425", "0.56538576", "0.56527823", "0.5649685", "0.56443286", "0.56273", "0.5626502", "0.56107277", "0.55985236", "0.5581387", "0.5575762", "0.5557881", "0.553904" ]
0.7039589
0
Creates an email with no attachement.
def generate_withno_attachement(sender, recipient, subject, body): # Basic Email formatting message = email.message.EmailMessage() message["From"] = sender message["To"] = recipient message["Subject"] = subject message.set_content(body) return message
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _create_email(\n to_field=\"[email protected]\",\n from_field=\"[email protected]\",\n subject=\"This is a test email\",\n body=\"Almost empty text message\",\n attachment=None,\n maintype=None,\n subtype=None,\n):\n msg = EmailMessage()\n\n msg['To'] = to_field\n msg['From'] = from_field\n msg['Subject'] = subject\n msg.set_content(body)\n\n if attachment:\n with open(attachment, 'rb') as fp:\n attachment = fp.read()\n\n msg.add_attachment(attachment, maintype=maintype, subtype=subtype)\n\n email_message = email.message_from_bytes(\n msg.as_bytes(),\n policy=email.policy.default\n )\n\n return email_message", "def test_skip_blank_emails(self):\n appt_date = datetime.date.today() + datetime.timedelta(days=7) # Default for email\n confirmed = self.create_confirmed_notification(self.test_patient, appt_date)\n\n blank_contact = self.create_contact(data={'email': ''})\n self.group.contacts.add(blank_contact)\n\n # run email job\n from aremind.apps.reminders.app import daily_email_callback\n daily_email_callback(self.router)\n\n self.assertEqual(len(mail.outbox), 1)\n message = mail.outbox[0]\n self.assertEqual(len(message.to), 1)", "def _createEmail(self, address_to, message, emailSubject):\r\n\t\tfrom_email = Email(self.sender)\r\n\t\tto_email = To(address_to)\r\n\t\tsubject = emailSubject\r\n\t\tcontent = Content(\"text/plain\", message)\r\n\t\t#creates Mail object from sendgrid api\r\n\t\tmail = Mail(from_email, to_email, subject, content)\r\n\t\treturn mail", "def test_user_creation_no_email(self):\n self.registration_profile.objects.create_inactive_user(\n site=Site.objects.get_current(),\n send_email=False, **self.user_info)\n self.assertEqual(len(mail.outbox), 0)", "def create_email(sender, to, subject, message_text):\n # This is intended to strip non-ascii chars in message_text\n message_text = ''.join(filter(lambda x: x in printable, message_text))\n \n message = MIMEText(message_text)\n message['to'] = to\n message['from'] = sender\n message['subject'] = subject\n return message.as_bytes()", "def test_skip_blank_emails(self):\n appt_date = datetime.date.today() + datetime.timedelta(days=7) # Default for email\n reminders.Patient.objects.filter(\n pk__in=[self.test_patient.pk, self.other_patient.pk]\n ).update(next_visit=appt_date)\n confirmed = self.create_confirmed_notification(self.test_patient, appt_date)\n blank_contact = self.create_contact(data={'email': ''})\n null_contact = self.create_contact(data={'email': None})\n self.group.contacts.add(blank_contact)\n self.group.contacts.add(null_contact)\n\n self.startRouter()\n self.router.logger.setLevel(logging.DEBUG)\n # run email job\n from afrims.apps.reminders.app import daily_email_callback\n daily_email_callback(self.router)\n\n self.assertEqual(len(mail.outbox), 1)\n message = mail.outbox[0]\n self.assertEqual(len(message.to), 1)\n self.stopRouter()", "def createMessage( self, *args, **kw ):\n return MailMessage( *args, **kw )", "def test_send_mail_without_mail(self):\n event_without_mail = self.create_event(self.family, name=None)\n fadm = admin.EventAdmin(Event, self.site)\n with patch.object(fadm, \"message_user\") as message_user_mock:\n fadm.send_mail(\"Request\", [self.event, event_without_mail])\n message_user_mock.assert_called_once_with(\n \"Request\", \"The event of the 2018-12-31 has no email template set\",\n admin.messages.ERROR)", "def test_create_invalid_email(self):\n self.assertEqual(\n ProjectInvite.objects.filter(project=self.project).count(), 0\n )\n\n url = reverse(\n 'projectroles:api_invite_create',\n kwargs={'project': self.project.sodar_uuid},\n )\n post_data = {\n 'email': 'NOT_AN_EMAIL!',\n 'role': PROJECT_ROLE_CONTRIBUTOR,\n 'message': INVITE_MESSAGE,\n }\n response = self.request_knox(url, method='POST', data=post_data)\n\n self.assertEqual(response.status_code, 400, msg=response.content)\n self.assertEqual(\n ProjectInvite.objects.filter(project=self.project).count(), 0\n )\n self.assertEqual(len(mail.outbox), 0)", "def plaintext_only():\n msg = EmailMessage()\n msg[\"From\"] = sender\n msg[\"To\"] = recipient\n msg[\"Subject\"] = \"Plaintext only\"\n msg.set_content(\"This is the body of a plaintext message.\")\n\n return msg", "def test_no_email(self):\n user = self.make_user()\n data: dict = {}\n\n with self.login(user):\n response = self.post(\"referrals:create\", data=data)\n\n message = list(get_messages(response.wsgi_request))[0]\n assert str(message) == \"'missing email' is an invalid email address.\"", "def createMessage( self, *args, **kw ):\n if not kw.has_key('charset'):\n kw['charset'] = self.getOutputCharset()\n kw['to_mail'] = 1\n return MailServerBase.createMessage( self, *args, **kw )", "def send_created_email(self):\n if settings.NOTIFY_NEW_REG:\n to = settings.NOTIFY_NEW_REG\n message = \"\"\"\\\nGreetings,<br><br>\n\nA new vehicle registration has been submitted by %s.<br><br>\n\nGo here to view or edit the request: <br>\n<a href=\"%s\">%s</a>\n<br><br>\nSincerely,<br><br>\nThe Janelia Parking Permit Program\n \"\"\" % (self.user_display_name(), self.get_edit_url(True), self.get_edit_url(True))\n subject = 'A new parking permit request has been entered'\n from_email = '[email protected]'\n text_content = re.sub(r'<[^>]+>','',message)\n html_content = message\n msg = EmailMultiAlternatives(subject, text_content, from_email, to)\n msg.attach_alternative(html_content, \"text/html\")\n msg.send()", "def create_associated_email(sender, **kwargs):\n user = kwargs['instance']\n if kwargs['created']:\n email = AssociatedEmail(user=user, email=user.email, is_primary_email=True)\n if user.is_active:\n email.verification_date = timezone.now()\n email.is_verified = True\n email.save()", "def create_sent_email(self, *args, **kwargs):\n receiver = kwargs['receiver']\n sender = kwargs['sender']\n user = kwargs['user']\n body = kwargs['body']\n subject = kwargs['subject']\n if receiver and sender and subject and body:\n sent_email = SentEmail()\n sent_email.receiver = receiver\n sent_email.subject = subject\n sent_email.sender = sender\n sent_email.status = 'sent'\n sent_email.user = user\n sent_email.body = body\n sent_email.save()\n return True\n else:\n return False", "def create_message(sender, to, subject, message_text_html, message_text_plain):\r\n message = MIMEMultipart('alternative')\r\n message['to'] = to\r\n message['from'] = sender\r\n message['subject'] = subject\r\n message_html = MIMEText(message_text_html, 'html') # HTML version\r\n message_plain = MIMEText(message_text_plain) # plain text version\r\n message.attach(message_plain)\r\n message.attach(message_html)\r\n return {'raw': base64.urlsafe_b64encode(message.as_string().encode()).decode()}", "def test_activation_email_missing_template(self):\n new_user = UserModel().objects.create_user(**self.user_info)\n profile = self.registration_profile.objects.create_profile(new_user)\n profile.send_activation_email(Site.objects.get_current())\n self.assertEqual(len(mail.outbox), 1)\n self.assertEqual(mail.outbox[0].to, [self.user_info['email']])", "def build_hello_email():\n from_email = Email(\"[email protected]\")\n subject = \"Hello World from the SendGrid Python Library\"\n to_email = Email(\"[email protected]\")\n content = Content(\"text/plain\", \"some text here\")\n mail = Mail(from_email, subject, to_email, content)\n mail.personalizations[0].add_to(Email(\"[email protected]\"))\n\n return mail.get()", "def send_email(to, subject, body, attachment=None):\n outlook = win32.Dispatch('outlook.application')\n new_mail = outlook.CreateItem(0)\n new_mail.Subject = subject\n new_mail.HTMLBody = body\n new_mail.To = to\n\n if attachment:\n new_mail.Attachments.Add(attachment)\n\n new_mail.Send()", "def test_send_mass_html_mail_to_send_no_email(self, send_mass_html_mail__mock: Mock):\n self.family.guests.add(\n Guest(name=\"Pierre\", email=None, phone=\"0123456789\", female=False, family=self.family),\n bulk=False\n )\n events = Event.objects.filter(pk=self.event.pk)\n\n admin.EventAdmin.send_mail(Mock(), None, events)\n\n recipient = list(send_mass_html_mail__mock.call_args[0][0])[0][4]\n self.assertListEqual(list(recipient),\n [\"Françoise <[email protected]>\", \"Jean <[email protected]>\"])", "def is_no_email(self):\n return self._tag == 'no_email'", "def create_email_confirmation(self, trigger_email=True):\n EmailConfirmation.objects.create(user=self,\n email_vc=hexlify(os.urandom(5)),\n email_vc_expiry=datetime.datetime.utcnow().replace(tzinfo=utc) +\n datetime.timedelta(hours=3))", "def get_email():\n return Email(\n subject='[Messages] Integration Test',\n body='Conducting Integration Testing',\n attachments=str(TESTDIR.joinpath('file2.png')))", "def create_email(_from, _to, _subj, _body, files):\r\n msg = MIMEMultipart()\r\n msg['From'] = _from\r\n msg['To'] = _to\r\n msg['Subject'] = _subj\r\n msg.attach(MIMEText(_body, 'plain'))\r\n\r\n if files:\r\n for file in files:\r\n part = MIMEBase('application', 'octet-stream')\r\n part.set_payload(open(file, 'rb').read())\r\n encoders.encode_base64(part)\r\n part.add_header('Content-Disposition', 'attachment; filename=' + os.path.basename(file))\r\n msg.attach(part)\r\n\r\n return msg", "def create_sent_email(self, *args, **kwargs):\n receiver = kwargs['receiver']\n sender = kwargs['sender']\n user = kwargs['user']\n body = kwargs['body']\n subject = kwargs['subject']\n tracker_id = kwargs['tracker_id']\n if receiver and sender and subject and body:\n sent_email = SentEmail()\n if tracker_id:\n sent_email.tracker_id = tracker_id\n sent_email.read_status = 'Tracking'\n else:\n sent_email.read_status = 'Not Tracking'\n sent_email.receiver = receiver\n sent_email.subject = subject\n sent_email.sender = sender\n sent_email.status = 'sent'\n sent_email.user = user\n sent_email.body = body\n sent_email.save()\n return sent_email\n else:\n return False", "def set_receive_no_mail(self):\n self.__mail = False", "def create_email(username, provider):\n print(f\"Your new email is {username}@{provider}.com\")", "def create(cls, course_id, sender, to_option, subject, html_message, text_message=None):\r\n # automatically generate the stripped version of the text from the HTML markup:\r\n if text_message is None:\r\n text_message = html_to_text(html_message)\r\n\r\n # perform some validation here:\r\n if to_option not in TO_OPTIONS:\r\n fmt = 'Course email being sent to unrecognized to_option: \"{to_option}\" for \"{course}\", subject \"{subject}\"'\r\n msg = fmt.format(to_option=to_option, course=course_id, subject=subject)\r\n raise ValueError(msg)\r\n\r\n # create the task, then save it immediately:\r\n course_email = cls(\r\n course_id=course_id,\r\n sender=sender,\r\n to_option=to_option,\r\n subject=subject,\r\n html_message=html_message,\r\n text_message=text_message,\r\n )\r\n course_email.save_now()\r\n\r\n return course_email", "def create_email(user):\n if 'research' in user.get_domains():\n domain = 'research'\n else: domain = 'academic'\n subject = \"ECE/CIS Account Created\"\n helprequest = \"https://www.eecis.udel.edu/service\"\n \n message = \"Your ECE/CIS %s account has been created with the username: %s\\n\\n\" % (domain, user.username)\n message += \"Please do not reply to this message. If you need assistance with your account, please visit:\\n\"\n message += \"%s\\n\\n\" % helprequest\n message += \"-- EE/CIS Labstaff\\n\"\n\n send('[email protected]', 'ECE/CIS Account System', \\\n [user.email], subject, message, MAILHOST)", "def _construct_message(self):\n self.message[\"text\"] = \"\"\n if self.from_:\n self.message[\"text\"] += \"From: \" + self.from_ + \"\\n\"\n if self.subject:\n self.message[\"text\"] += \"Subject: \" + self.subject + \"\\n\"\n\n self.message[\"text\"] += self.body\n self._add_attachments()" ]
[ "0.6983128", "0.65120506", "0.62612367", "0.622603", "0.61640054", "0.6127693", "0.59721655", "0.5970866", "0.5969491", "0.5878382", "0.5856451", "0.58348465", "0.5808649", "0.57627016", "0.5760926", "0.5755286", "0.57515126", "0.5740511", "0.5706194", "0.5683221", "0.5677282", "0.5674702", "0.5657391", "0.5639607", "0.56257707", "0.5583589", "0.5582309", "0.5542446", "0.5492775", "0.5491567" ]
0.8021318
0
Sends the message to the configured SMTP server.
def send(message): mail_server = smtplib.SMTP('localhost') mail_server.send_message(message) mail_server.quit()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def send_message(self, message:str):\n context = ssl.create_default_context()\n with smtplib.SMTP_SSL(self.smtp_server, self.port, context=context) as self.server:\n self.server.login(self.user, password)\n self.server.sendmail(self.user, self.recipient, message)", "def send(self, email):\r\n smtp = smtplib.SMTP(self.server, self.port)\r\n smtp.ehlo()\r\n \r\n if self.tls:\r\n smtp.starttls()\r\n smtp.ehlo()\r\n\r\n if self.user and self.passwd:\r\n smtp.login(self.user, self.passwd)\r\n\r\n smtp.sendmail(email.from_address, email.to + email.ccs, str(email))\r\n if email.bccs:\r\n email.root['X-antroy-sent'] = \"True\"\r\n smtp.sendmail(email.from_address, email.bccs, str(email))\r\n del email.root['X-antroy-sent']\r\n smtp.quit()", "def send_email(message):\n mail_server = smtplib.SMTP('localhost')\n mail_server.send_message(message)\n mail_server.quit()", "def send_email(self):\n message = MIMEText(self.email_body, 'plain', 'utf-8')\n\n message['Subject'] = self.email_subject\n message['From'] = gmail_user\n message['To'] = ', '.join(self.recipients)\n\n try:\n server = smtplib.SMTP_SSL('smtp.gmail.com', 465)\n server.ehlo()\n\n server.login(gmail_user, gmail_password)\n\n server.sendmail(message['From'], self.recipients, message.as_string())\n\n server.close()\n\n print('Email sent!')\n except Exception as err:\n # TODO Write error to log file\n raise err", "def send(self):\n msg = MIMEText(self.body) # prepare body\n s = smtplib.SMTP(self.mail_server)\n self._connect_to_exchange(s)\n for receiver in iter(self.to_adress):\n if '@' not in receiver:\n receiver = '{rcv}@cbs.nl'.format(rcv=receiver)\n msg['Subject'] = self.subject\n msg['From'] = self.from_adress\n msg['To'] = receiver\n s.sendmail(self.from_adress, [receiver], msg.as_string())\n s.quit()", "def _send_smtp(message, subject, to, to_name, sender, sender_name):\n host = app.config.get('MAIL_HOST')\n\n if not host:\n raise MailFailure('SMTP Server Not Configured')\n\n try:\n server = smtplib.SMTP(host)\n except (smtplib.SMTPConnectError, socket.error) as ex:\n app.logger.error('Unable to send mail: %s', str(ex))\n raise MailFailure('Error connecting to SMTP server.')\n\n msg = text.MIMEText(message)\n msg['Subject'] = subject\n msg['To'] = email.utils.formataddr((to_name, to))\n msg['From'] = email.utils.formataddr((sender_name, sender))\n\n try:\n if app.debug:\n server.set_debuglevel(True)\n server.sendmail(sender, [to], msg.as_string())\n except (smtplib.SMTPException, socket.error) as ex:\n app.logger.error('Unable to send mail: %s', str(ex))\n raise MailFailure('Error sending mail to SMTP server.')\n finally:\n try:\n server.quit()\n except smtplib.SMTPException:\n pass", "def send(self):\n answers = dns.resolver.query(self.domain, 'MX')\n try:\n for answer in answers:\n ex = answer.exchange.to_text()\n server = smtplib.SMTP(ex)\n server.set_debuglevel(self.verbose)\n server.sendmail(self.sender, [self.recipient], self.message.as_string())\n server.quit()\n except OSError as e:\n if e.errno is errno.ENETUNREACH:\n print('Looks like port 25 is blocked')\n raise e", "def send_mail(self):\n try:\n mail = smtplib.SMTP('smtp.gmail.com', 587)\n mail.ehlo()\n mail.starttls()\n mail.login(self.mail_user, self.mail_pass)\n content = \"Subject: Test %s %s on host %s\\n\\n%s\\n logs are save at localhost path:\\n%s\" % (\n self.test_name, self.event, self.host_name, self.event_details, self.log_path\n )\n mail.sendmail(self.mail_user, self.target_mail, content)\n mail.close()\n except Exception as e:\n self.logger.error(\"Sending mail failed with Error %s\", e)\n\n else:\n self.logger.info(\"Mail sent to %s\", self.target_mail)", "def send_email(self, email_from, email_to, message):\n logging.info(\"Attempting to send email from \" + email_from + \" to \" + email_to)\n self.conn.sendmail(email_from, email_to, message)\n logging.info(\"Email sent\")", "def send(self):\n log.debug('send {} messages'.format(len(self.messages)))\n smtp = self._connect_smtp()\n if smtp is not None:\n for msg in self.messages:\n #TODO: There could be any exception in here somewhere\n log.debug('message: \\n\\r{}'.format(msg.as_string()))\n try:\n smtp.sendmail(msg['From'], msg['To'], msg.as_string())\n except smtplib.SMTPRecipientsRefused as err:\n log.warn('Recipient refused for following message: \\n\\r{}'.format(msg.as_string()))\n log.warn(err)\n except smtplib.SMTPException as err:\n log.critical('something went wrong with sending message: \\n\\r{}'.format(msg.as_string()))\n log.critical(err)\n smtp.quit()\n else:\n log.warning('emails did not get sent because of exception in connection')", "def send(self):\n return send_mail(self.subject, self.message, self.sender, self.recipients, fail_silently=False)", "def send(self, to_addrs, subject, message, from_addr=None):\n if not from_addr: from_addr = self.user\n data = \"From: %s\\nTo: %s\\nSubject: %s\\n\\n%s\" \\\n % (from_addr, to_addrs, subject, message)\n try:\n server = smtplib.SMTP(self.host)\n server.ehlo()\n server.starttls()\n server.ehlo() # This must be done before and after starttls().\n server.login(self.user, self.password)\n server.sendmail(from_addr, to_addrs, data)\n except:\n raise\n try:\n server.quit() # This always fails and can safely be ignored.\n except:\n pass", "def send_email(self, message):\n pass", "def send(self, smtp_server_instance: SMTPServer = None):\n\t\tif not self.can_send_now():\n\t\t\treturn\n\n\t\twith SendMailContext(self, smtp_server_instance) as ctx:\n\t\t\tmessage = None\n\t\t\tfor recipient in self.recipients:\n\t\t\t\tif recipient.is_mail_sent():\n\t\t\t\t\tcontinue\n\n\t\t\t\tmessage = ctx.build_message(recipient.recipient)\n\t\t\t\tif method := get_hook_method(\"override_email_send\"):\n\t\t\t\t\tmethod(self, self.sender, recipient.recipient, message)\n\t\t\t\telse:\n\t\t\t\t\tif not frappe.flags.in_test:\n\t\t\t\t\t\tctx.smtp_server.session.sendmail(\n\t\t\t\t\t\t\tfrom_addr=self.sender, to_addrs=recipient.recipient, msg=message\n\t\t\t\t\t\t)\n\n\t\t\t\tctx.update_recipient_status_to_sent(recipient)\n\n\t\t\tif frappe.flags.in_test:\n\t\t\t\tfrappe.flags.sent_mail = message\n\t\t\t\treturn\n\n\t\t\tif ctx.email_account_doc.append_emails_to_sent_folder:\n\t\t\t\tctx.email_account_doc.append_email_to_sent_folder(message)", "def sending(self, message):\n sending_mail.send(sender=self.__class__, message=message)", "def send(self, smtpServer = \"smtp.curtin.edu.au\", username = None, password = None, ssl = True, tls = True):\n\n # Construct the email object\n emailObj = email.mime.multipart.MIMEMultipart()\n emailObj[\"From\"] = self.fromAddr\n emailObj[\"To\"] = self.toAddr\n emailObj[\"Subject\"] = self.subject\n emailObj.preamble = \"Please use a MIME-aware mail reader.\\n\"\n\n emailObj.attach(email.mime.text.MIMEText(self.message))\n\n # Contact the server and send the email\n if ssl:\n server = smtplib.SMTP_SSL(smtpServer)\n else:\n server = smtplib.SMTP(smtpServer)\n if tls:\n server.starttls()\n\n try:\n if username is not None and password is not None:\n server.login(username, password)\n server.sendmail(self.fromAddr, self.toAddr, emailObj.as_string())\n except SMTPDataError\n print \"Message was not accepted\"\n except SMTPAuthenticationError\n print \"Username and password was not accepted\"\n except SMTPException\n print \"Something went wrong (TLS may not be supported)\"\n finally:\n server.quit()", "def email(self, email_address, message):\n self.server.sendmail(self.username, email_address, message)", "def mail():\n mail_server = 'localhost'\n mail_port = 1025\n CustomSMTPServer((mail_server, mail_port), None)\n asyncore.loop()", "def send_email(self, to, content):\r\n server = smtplib.SMTP(\"smtp.gmail.com\", 587)\r\n server.ehlo()\r\n server.starttls()\r\n server.login(self.from_, self.password)\r\n server.sendmail(self.from_, to, content)\r\n speak(\"Email has been sent Succesfully!\")\r\n return \"None\"", "def send_message():\n # @todo validation & error handling.\n sg = SendGridAPIClient(apikey=os.environ.get('SENDGRID_API_KEY'))\n log(\"Message generated and sent at {}\".format(strftime('%x %H:%M:%S')))\n sg.client.mail.send.post(request_body=build_message())", "def send_email(self, fromaddr, addrs, message=\"\"):\n smtp = smtplib.SMTP(self._server, self._port)\n smtp.sendmail(fromaddr, addrs, message)\n smtp.quit()", "def send_message(self, message):\n try:\n msg = sg.client.mail.send.post(request_body=message)\n app.logger.info(\"{error} with {response}\".format(error=msg.status_code, response=msg.body))\n app.logger.info(\"Successfully sent message: {msg}\".format(msg=msg))\n except Exception as e:\n app.logger.exception(\"Error While sending emails: {msg}\".format(msg=message))\n app.logger.exception(e)", "def send_email(self, message, mail_server_id=None, smtp_server=None, smtp_port=None,\n smtp_user=None, smtp_password=None, smtp_encryption=None, smtp_debug=False,\n smtp_session=None):\n # Use the default bounce address **only if** no Return-Path was\n # provided by caller. Caller may be using Variable Envelope Return\n # Path (VERP) to detect no-longer valid email addresses.\n if smtp_user:\n _logger.error(\"smpt session --------------------\")\n _logger.error(smtp_user)\n smtp_from = smtp_user\n else:\n smtp_from = message['Return-Path'] or self._get_default_bounce_address() or message['From']\n assert smtp_from, \"The Return-Path or From header is required for any outbound email\"\n\n # The email's \"Envelope From\" (Return-Path), and all recipient addresses must only contain ASCII characters.\n from_rfc2822 = extract_rfc2822_addresses(smtp_from)\n assert from_rfc2822, (\"Malformed 'Return-Path' or 'From' address: %r - \"\n \"It should contain one valid plain ASCII email\") % smtp_from\n # use last extracted email, to support rarities like 'Support@MyComp <[email protected]>'\n smtp_from = from_rfc2822[-1]\n email_to = message['To']\n email_cc = message['Cc']\n email_bcc = message['Bcc']\n del message['Bcc']\n\n smtp_to_list = [\n address\n for base in [email_to, email_cc, email_bcc]\n for address in extract_rfc2822_addresses(base)\n if address\n ]\n assert smtp_to_list, self.NO_VALID_RECIPIENT\n\n x_forge_to = message['X-Forge-To']\n if x_forge_to:\n # `To:` header forged, e.g. for posting on mail.channels, to avoid confusion\n del message['X-Forge-To']\n del message['To'] # avoid multiple To: headers!\n message['To'] = x_forge_to\n\n # Do not actually send emails in testing mode!\n if getattr(threading.currentThread(), 'testing', False) or self.env.registry.in_test_mode():\n _test_logger.info(\"skip sending email in test mode\")\n return message['Message-Id']\n\n try:\n message_id = message['Message-Id']\n smtp = smtp_session\n smtp = smtp or self.connect(\n smtp_server, smtp_port, smtp_user, smtp_password,\n smtp_encryption, smtp_debug, mail_server_id=mail_server_id)\n smtp.sendmail(smtp_from, smtp_to_list, message.as_string())\n # do not quit() a pre-established smtp_session\n if not smtp_session:\n smtp.quit()\n except smtplib.SMTPServerDisconnected:\n raise\n except Exception as e:\n params = (ustr(smtp_server), e.__class__.__name__, ustr(e))\n msg = _(\"Mail delivery failed via SMTP server '%s'.\\n%s: %s\") % params\n _logger.info(msg)\n raise MailDeliveryException(_(\"Mail Delivery Failed\"), msg)\n return message_id", "def send_email(my_email, password, message):\n server = smtplib.SMTP(\"smtp.gmail.com\", 587)\n server.starttls()\n server.login(my_email, password)\n # send from my_email to my_email (from, to, message)\n server.sendmail(my_email, my_email, message)\n server.quit()", "def send_message(self, message, send_to, subject):\n message = message.mime()\n\n message['From'] = self.email_address\n message['To'] = send_to\n\n message['Subject'] = subject\n\n self._login()\n self.server.sendmail(self.email_address, send_to, message.as_string())\n self._logout()", "def send(self):\n logger.debug('Sending Email')\n self.mimepgp.send()", "def send_email(self, message, from_addr=None, to_addrs=None,\n delay_send=0):\n if not isinstance(message, Message) and isinstance(message, str):\n smtp_meth = 'sendmail'\n if (from_addr is None) or (to_addrs is None):\n raise ValueError('If sending string email, please provide '\n 'from_addr and to_addrs.')\n elif isinstance(message, Message):\n smtp_meth = 'send_message'\n message = message.message\n else:\n raise ValueError('The message argument must either be an '\n 'auto_emailer.emailer.Message object or a string.')\n\n # delay sending by input value\n if delay_send:\n time.sleep(delay_send)\n\n # log in to email client if not already\n if not self._connected:\n self._login()\n\n # handle disconnect and connection errors by\n # quick login and attempt to send again\n try:\n delivery_meth = getattr(self._smtp, smtp_meth)\n delivery_meth(msg=message, from_addr=from_addr,\n to_addrs=to_addrs)\n except (smtplib.SMTPConnectError, smtplib.SMTPServerDisconnected):\n self._login()\n # needs to call getattr() again once it hits\n # here otherwise it will fail\n delivery_meth = getattr(self._smtp, smtp_meth)\n delivery_meth(msg=message, from_addr=from_addr,\n to_addrs=to_addrs)\n finally:\n self._logout()", "def sent(self, message):\n sent_mail.send(sender=self.__class__, message=message)", "def send(message: Message, smtp_url: str,\n timeout: Optional[float] = None) -> None:\n with smtplib.SMTP(smtp_url, timeout=timeout) as smtp:\n smtp.send_message(message.as_mime())", "def _login(self):\n self._smtp = smtplib.SMTP(host=self._config.host,\n port=self._config.port)\n # send 'hello' to SMTP server\n self._smtp.ehlo()\n # start TLS encryption\n self._smtp.starttls()\n self._smtp.login(self._config.sender_email, self._config.password)\n self._connected = True" ]
[ "0.75489813", "0.752796", "0.75139153", "0.7477795", "0.74659395", "0.7392813", "0.7332846", "0.71724397", "0.71238166", "0.71103996", "0.71020424", "0.70806277", "0.703991", "0.7000265", "0.69934726", "0.6977711", "0.69750607", "0.6926411", "0.6919089", "0.69188124", "0.6908053", "0.69049895", "0.6870654", "0.6861637", "0.68434817", "0.6788195", "0.6768823", "0.67529607", "0.6746485", "0.673677" ]
0.76511276
0
Write interesting attributes from a ServiceInfo to the log. Information written depends on the log level, basic info is written w/ log level INFO, if the log level is DEBUG more the basic info plus more (all properties) is written w/ log level DEBUG.
def log_serviceinfo(logger, info): try: debugging = logger.isEnabledFor(logging.DEBUG) log_level = logging.INFO log_info = {'name': info.name, 'address': socket.inet_ntoa(info.addresses[0]), 'port': info.port} log_hdr = "\n {address}:{port} {name}\n" log_fmt = log_hdr if debugging: log_level = logging.DEBUG if info.server != info.name: log_info['server'] = info.server log_fmt += " server: {server}\n" for (k, v) in info.properties.items(): li_k = "prop_" + bytes2str(k) log_info[li_k] = v log_fmt += " {k}: {{{li_k}}}\n".format(k=k, li_k=li_k) logger.log(log_level, log_fmt.format(**log_info)) except: logger.exception("exception in log_tivo_serviceinfo")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def log_info(info_dict):\n pass", "def log_info(self, line):\n logging.info(\"Telemetry Logger - %s\" % line)", "def log_info(info):\n log = open(log_path, 'a+')\n log.write(info + '\\n')\n log.close()", "def logInfo(self, timestamp, info):\n self.logs['messages'].write(','.join([str(i) for i in [\n self.formatTimestamp(timestamp), info]]) + '\\n')\n self.logs['messages'].flush()", "def print_info(self):\n\n self.logging.info(str(self.filename) + ':' + str(self.__info_dict))", "def log(self, loginfo):\n logging.basicConfig(level=logging.INFO,\n format='%(asctime)s %(filename)s:%(message)s',\n datefmt='%d %b %Y %H:%M:%S',\n filename=self.logfilepath,\n filemode='w')\n filelog = logging.FileHandler(self.logfilepath)\n logging.getLogger('Functest').addHandler(filelog)\n logging.info(loginfo)", "def info(self, message, *args, **kwargs):\n method = kwargs.pop('method_name', None)\n clazz = kwargs.pop('class_name', None)\n error = kwargs.pop('error', None)\n level = Level.INFO\n if self._mode_type == TOOL:\n level = Level.FINE\n record = self._get_log_record(level, clazz, method, message, error, *args)\n self.logger.log(record)", "def log_info(self, obj, message):\n super().log_info(obj=obj, message=message)", "def write_info_to_file(self):\n\n self.info.write_mission_info()\n\n self.logger.info(\"Mission instance write succeeded.\")", "def log_info(self, message, msg_type='info'):\n pass", "def write_info(self, opt):\n path = EVT_PERIGEE_INFO_PATH(opt.data_dir, self)\n LOGGER.info(f\"Writing info to {path}\")\n path.write_text(json.dumps(self.info, indent=4))", "def set_log_info():\n set_log_level_format(logging.INFO,\n '%(asctime)s %(levelname)s:%(name)s:%(message)s')", "def notify(self, info, context=None):\n\n info[\"project\"] = self.project\n info[\"service\"] = self.service\n self.client.info(context or self.context,\n \"profiler.%s\" % info[\"service\"],\n info)", "def log(writer, name, info, step):\n if isinstance(info, dict):\n for key, value in info.items():\n tag = name + '/' + key\n writer.add_scalar(tag, value, step)\n elif isinstance(info, float):\n writer.add_scalar(name, info, step)", "def info(self, log_msg):\n now = datetime.datetime.now()\n log_level = \"info\"\n datestamp = self.create_datestamp(now)\n timestamp = self.create_timestamp(now)\n hrtimestemp = self.create_human_readable_timestamp(now)\n tags = json.dumps(self.tags)\n log_body = self.log_builder(log_level, hrtimestemp, datestamp, timestamp, log_msg, tags)\n self.logger.info(log_body)", "def _log_sim_props(self):\n return\n props = self.properties\n if isinstance(props, str):\n self._logger.error(f\"Could not get sim properties: {props}\")\n return\n self._logger.info(\n f\"UTC={props.utc_datetime}, \"\n f\"scenario_time={int(props.scenario_time):4}s, \"\n f\"speed={props.speed:.2f}x, \"\n f\"state={props.state.name}\"\n )", "def info(self, **kwargs):\n for key, value in kwargs.items():\n setattr(self, key, value)", "def __call__(self, *args, **kwargs):\n self.logger.info(*args, **kwargs)", "def _add_details(self, info):\n for (key, val) in six.iteritems(info):\n if key == \"nodes\":\n val = [Node(parent=self, **nd) for nd in val]\n elif key == \"sessionPersistence\":\n val = val['persistenceType']\n elif key == \"cluster\":\n val = val['name']\n elif key == \"virtualIps\":\n key = \"virtual_ips\"\n val = [VirtualIP(parent=self, **vip) for vip in val]\n setattr(self, key, val)", "def writeToMetadata(self, context):\n fqId = self.type + GenericMetadata.COMPOUND_KEY_SEP + self.id\n fqId = fqId.lower()\n\n climatePoints = GenericMetadata.readClimatePointEntries(context)\n try:\n stations = climatePoints['stations'].split(GenericMetadata.VALUE_DELIM)\n except KeyError:\n stations = []\n # Write station metadata (overwrite if already present)\n keys = []\n values = []\n if fqId not in stations:\n stations.append(fqId)\n stationsStr = GenericMetadata.VALUE_DELIM.join(stations)\n keys.append('stations'); values.append(stationsStr)\n # Write attributes for station\n keyProto = 'station' + GenericMetadata.COMPOUND_KEY_SEP + fqId + GenericMetadata.COMPOUND_KEY_SEP \n longitude = keyProto + 'longitude'\n keys.append(longitude); values.append(self.longitude)\n latitude = keyProto + 'latitude'\n keys.append(latitude); values.append(self.latitude)\n elevation = keyProto + 'elevation'\n keys.append(elevation); values.append(self.elevation)\n name = keyProto + 'name'\n keys.append(name); values.append(self.name)\n if self.startDate:\n startDate = keyProto + 'startdate'\n keys.append(startDate); values.append(self.startDate.strftime(ClimatePointStation.FMT_DATE))\n if self.endDate:\n endDate = keyProto + 'enddate'\n keys.append(endDate); values.append(self.endDate.strftime(ClimatePointStation.FMT_DATE))\n if self.variables:\n variablesKey = keyProto + 'variables'\n variablesValue = GenericMetadata.VALUE_DELIM.join(self.variables)\n keys.append(variablesKey); values.append(variablesValue)\n if self.data != None:\n data = keyProto + 'data'\n keys.append(data); values.append(self.data)\n elif self.variablesData:\n # Try to write data entries for each variable separately\n vars = self.variablesData.keys()\n for var in vars:\n varKey = keyProto + var + GenericMetadata.COMPOUND_KEY_SEP + 'data'\n keys.append(varKey); values.append(self.variablesData[var])\n GenericMetadata.writeClimatePointEntries(context, keys, values)", "def info(self, *lines):\n if self.__debug_level >= DEBUG_LEVELS['info']:\n self.print_lines(self.colored(('green', 'bold'), lines))", "def log_info(self, message, type='info'):\n if type == 'info':\n logger.debug(message)\n else:\n logger.error(message)", "def log_info(self, msg):\n self.log(msg, level=LOG_INFO)", "def _log_some_info(self):\n logging.info('info')", "def log_info(self, msg, *args, **kwargs):\n if self.action_logging_enabled and self._log is not None:\n self._log.info(msg, *args, **kwargs)\n return", "def loginfo(self, msg):\n self.logger.info(msg)", "def attributesFromDict(d):\n self = d.pop('self')\n for name, value in d.items():\n setattr(self, name, value)\n \n \"\"\"Manage a log file\"\"\"\n \n def __init__(self, logfile):\n \"\"\"logfile is the file name or None\"\"\"\n\n self.logfile = logfile\n if self. logfile:\n self.file = open(logfile, \"w\")\n self.starttime = time.time()\n self.file.write(\"%.2f %s Starting log\\n\" % (time.time() - self.starttime, time.asctime()))\n \n def __enter__(self):\n return self\n \n def write(self, text):\n if self.logfile:\n self.file.write(\"%.2f: %s\\n\" % (time.time() - self.starttime, text))\n self.file.flush()\n \n def close(self):\n if self.logfile:\n self.write(\"Closing log\")\n self.file.close()", "def log_info(self, msg):\n self.logger.info(msg)", "def print_info(self):\n \n print \"\"\"version: %d\\t header_len: %d\\t tos: %s\\t total_len: %d\n id: %s\\t flags_reservedbit: %d\\t flags_dont_fragment: %d\\t flags_more_fragment: %d\n fragment_offset: %d\\t TTL: %d\\t protocol: %s\\t\n header_checksum: %s\\t\n src: %s\\t dst: %s\n opt_paddings: %s\"\"\" % (\n self.version, self.header_len, self.type_of_service, self.total_len, self.id, self.flags_reservedbit, \n self.flags_dont_fragment, self.flags_more_fragment, \n self.fragment_offset, self.TTL, self.protocol, self.header_checksum, self.src, self.dst, repr(self.opt_paddings))", "def info( cls, msg ):\n cls.log( logging.INFO, msg )" ]
[ "0.5978037", "0.55530024", "0.53744555", "0.53507626", "0.5314723", "0.52885777", "0.52751833", "0.5254027", "0.5177724", "0.515748", "0.5137067", "0.51125574", "0.5083731", "0.5079099", "0.50736225", "0.5057875", "0.5031719", "0.50247157", "0.5020744", "0.49925062", "0.49780673", "0.4958515", "0.49521393", "0.49510404", "0.49441224", "0.48522556", "0.48368534", "0.48290735", "0.482779", "0.48115504" ]
0.7036564
0
Announce our shares via Zeroconf.
def __init__(self, logger): self.share_names = [] self.share_info = [] self.logger = logger self.rz = zeroconf.Zeroconf() self.renamed = {} old_titles = self.scan() address = socket.inet_aton(config.get_ip()) port = int(config.getPort()) logger.info('Announcing pytivo shares ({}:{})...'.format(config.get_ip(), port)) for section, settings in config.getShares(): try: plugin = GetPlugin(settings['type']) ct = plugin.CONTENT_TYPE # if the plugin provides a test for validity use it otherwise assume valid if hasattr(plugin, 'is_valid') and not plugin.is_valid(section, settings): logger.warning('share "%s" is invalid. It will be ignored (maybe check that path exists)', section) continue except Exception as e: logger.error('ZCBroadcast.__init__: raised %s: %s', e.__class__.__name__, e) continue if ct.startswith('x-container/'): if 'video' in ct: platform = PLATFORM_VIDEO else: platform = PLATFORM_MAIN logger.info('Registering: %s' % section) self.share_names.append(section) desc = {b'path': bytes(SHARE_TEMPLATE % quote(section), 'utf-8'), b'platform': bytes(platform, 'utf-8'), b'protocol': b'http', b'tsn': bytes('{%s}' % uuid.uuid4(), 'utf-8')} tt = ct.split('/')[1] title = section count = 1 while title in old_titles: # debugging info while I try to figure out what this loop is for logger.info(" title b4: {}".format(title)) count += 1 title = '%s [%d]' % (section, count) self.renamed[section] = title # more debugging info logger.info(" title after: {}\n section: {}".format(title, section)) info = zeroconf.ServiceInfo('_%s._tcp.local.' % tt, '%s._%s._tcp.local.' % (title, tt), port=port, addresses=[address], properties=desc) log_serviceinfo(self.logger, info) self.rz.register_service(info) self.share_info.append(info)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def arp_announce(self):\n pass", "def test_show_nas_share(self):\n pass", "def announceGenerate(self):", "def test_show_nas_share_by_nas(self):\n pass", "def test_update_nas_share(self):\n pass", "def _announceContainers(self):\n pass", "def createPublicNfsShare(self,sharepath=\"/opt\"):\n self.prepareForSSODebug() \n self.install(\"nfs-kernel-server\")\n self.writeFile(\"/etc/exports\",\"/opt *(rw,sync,no_root_squash,no_subtree_check)\") \n self.execute(\"echo '' > /etc/hosts.allow\",dieOnError=True)\n self.execute(\"echo '' > /etc/hosts.deny\",dieOnError=True)\n self.execute(\"exportfs -rav\")", "def test_update_nas_share_by_nas(self):\n pass", "def _Announce(self):\n key = self._GetServerKey(self.peer_id)\n logging.debug('Encrypting announcement.')\n value = self._Encrypt('%s:%d' % (self.host, self.port))\n logging.debug('Posting announcement.')\n self._dht.Put(key, value)", "def test_create_nas_share_by_nas(self):\n pass", "def announce(\n self,\n guardian_key: ElectionPublicKey,\n tally_share: DecryptionShare,\n ballot_shares: Dict[BallotId, Optional[DecryptionShare]] = None,\n ) -> None:\n guardian_id = guardian_key.owner_id\n\n # Only allow a guardian to announce once\n if guardian_id in self._available_guardians:\n log_info(f\"guardian {guardian_id} already announced\")\n return\n\n self._save_tally_share(guardian_id, tally_share)\n\n if ballot_shares is not None:\n self._save_ballot_shares(guardian_id, ballot_shares)\n\n self._mark_available(guardian_key)", "def test_test_nas_share(self):\n pass", "def usage(self, host):", "def _announceContainers(self):\n distance = float(self.config.container_manager.announce_distance)\n targetpeers = self.config.owner.getByDistance(self.session, distance)\n\n containers = self.announcequeue\n self.announcequeue = []\n msg = 'announcing %d %ss to %d peers'\n self.logger.log(msg % (len(containers), self.cname, len(targetpeers)))\n\n self.session.commit() # release the session lock for following long operations\n msg = '%s url: %s, owner: %s, name: %s'\n owner, name = (self.config.owner, self.config.owner.name)\n for container in containers:\n self.logger.log(msg % (self.cname, container.url, owner, name))\n for peer in targetpeers:\n try:\n peer.transport.containerOffer(self.config.owner.name, container.url,\n self.cname)\n except Exception, e:\n self.logger.log('Exception while sending to peer %r: %r' %\n (peer, e))", "def announce(self):\n m = rtorrent9.rpc.Multicall(self)\n self.multicall_add(m, \"d.tracker_announce\")\n\n return m.call()[-1]", "def publish():\n pass", "def test_show_nas_share_by_pool(self):\n pass", "def test_set_share(self):\n self.app.post_json(url=\"/config/shares\",\n params=dict(\n source='gsiftp://source',\n destination='gsiftp://nowhere',\n vo='dteam',\n share=80\n ),\n status=200\n )", "def test_update_nas_share_by_pool(self):\n pass", "def announce(self, request_announce):\n return self.client.call('POST',\n self.name + 'announce',\n payload=request_announce)", "def main(connection, info, conf) :\r\n connection.rawsend(\"NOTICE %s :\u0001TIME %s\u0001\\n\" % (info[\"sender\"], time.strftime(\"%b %d %Y, %H:%M:%S %Z\")))", "async def about_aoc(self, ctx: commands.Context) -> None:\n await ctx.send(\"\", embed=self.cached_about_aoc)", "def centralizedadvertise_announce(key, value, ttlval):\r\n # do basic argument checking / munging\r\n key = str(key)\r\n value = str(value)\r\n\r\n if not type(ttlval) is int and not type(ttlval) is long:\r\n raise TypeError(\"Invalid type '\"+str(type(ttlval))+\"' for ttlval.\")\r\n\r\n if ttlval < 1:\r\n raise ValueError(\"The argument ttlval must be positive, not '\"+str(ttlval)+\"'\")\r\n\r\n \r\n # build the tuple to send, then convert to a string because only strings\r\n # (bytes) can be transmitted over the network...\r\n datatosend = ('PUT',key,value,ttlval)\r\n datastringtosend = serialize_serializedata(datatosend)\r\n\r\n \r\n # send the data over a timeout socket using the session library, then\r\n # get a response from the server.\r\n sockobj = timeout_openconn(servername,serverport, timeout=10)\r\n try:\r\n session_sendmessage(sockobj, datastringtosend)\r\n rawresponse = session_recvmessage(sockobj)\r\n finally:\r\n # BUG: This raises an error right now if the call times out ( #260 )\r\n # This isn't a big problem, but it is the \"wrong\" exception\r\n sockobj.close()\r\n \r\n # We should check that the response is 'OK'\r\n try:\r\n response = serialize_deserializedata(rawresponse)\r\n if response != 'OK':\r\n raise CentralAdvertiseError(\"Centralized announce failed with '\"+response+\"'\")\r\n except ValueError, e:\r\n raise CentralAdvertiseError(\"Received unknown response from server '\"+rawresponse+\"'\")", "def task6(self, doc_uuid):\n doc_shares = {}\n for entry in self.records:\n if ((entry['event_type'] == 'share') and (entry['subject_doc_id'] == doc_uuid)):\n service = entry['event_service']\n if(service in doc_shares):\n doc_shares[service] += 1\n else:\n doc_shares[service] = 1\n GUI.show_histo(doc_shares, \"vert\", \"Number Of Shares for Platform\", \"Platform Distribution\")", "def v2centralizedadvertise_announce(key, value, ttlval):\r\n # do basic argument checking / munging\r\n key = str(key)\r\n value = str(value)\r\n\r\n if not type(ttlval) is int and not type(ttlval) is long:\r\n raise TypeError(\"Invalid type '\"+str(type(ttlval))+\"' for ttlval.\")\r\n\r\n if ttlval < 1:\r\n raise ValueError(\"The argument ttlval must be positive, not '\"+str(ttlval)+\"'\")\r\n\r\n \r\n # build the tuple to send, then convert to a string because only strings\r\n # (bytes) can be transmitted over the network...\r\n datatosend = ('PUT',key,value,ttlval)\r\n datastringtosend = serialize_serializedata(datatosend)\r\n\r\n \r\n # send the data over a timeout socket using the session library, then\r\n # get a response from the server.\r\n sockobj = timeout_openconn(v2servername,v2serverport, timeout=10)\r\n try:\r\n session_sendmessage(sockobj, datastringtosend)\r\n rawresponse = session_recvmessage(sockobj)\r\n finally:\r\n # BUG: This raises an error right now if the call times out ( #260 )\r\n # This isn't a big problem, but it is the \"wrong\" exception\r\n sockobj.close()\r\n \r\n # We should check that the response is 'OK'\r\n try:\r\n response = serialize_deserializedata(rawresponse)\r\n if response != 'OK':\r\n raise CentralAdvertiseError(\"Centralized announce failed with '\"+response+\"'\")\r\n except ValueError, e:\r\n raise CentralAdvertiseError(\"Received unknown response from server '\"+rawresponse+\"'\")", "def afk_command(command, server):\n\n channel = command.event.args[0] if not command.event.args[\n 0] == server.nick else command.event.name\n user = command.event.name\n\n # Set afk message\n if len(command.args) > 0:\n message = \" \".join(command.args)\n server.shared_data.set(\"afk.%s.message\" % user, message)\n\n # Mark user as afk\n server.shared_data.set(\"afk.%s.bool\" % user, True)\n\n # Send announcing message\n announce_event = irc.Irc_event(\"PRIVMSG\", channel, \"%s is now afk.\" % user)\n server.send_event(announce_event)", "def DORadvertise_announce(key, value, ttlval, timeout=None):\r\n\r\n post_params = {'command': 'announce', 'key': key, 'value': value,\r\n 'lifetime': str(int(ttlval))}\r\n\r\n _DORadvertise_command(post_params, timeout=timeout)\r\n\r\n return None", "async def announce(self, ctx, *, msg):\n if self._announce_msg is not None:\n await self.bot.say(\"Already announcing, wait until complete to\"\n \" issue a new announcement.\")\n else:\n self._announce_msg = msg", "def test_test_result_nas_share(self):\n pass", "def associate(self, sta, ssid): \n self.host = sta\n self.host.cmd(\"iw dev %s-wlan0 connect %s\" % (sta, ssid))\n self.confirmInfraAssociation(self.host)" ]
[ "0.63648856", "0.56986153", "0.558674", "0.5581721", "0.55010027", "0.5460141", "0.54527074", "0.5437008", "0.5336524", "0.5284157", "0.52205145", "0.5189006", "0.51136243", "0.50331676", "0.5020038", "0.5010464", "0.49890482", "0.49731845", "0.49262094", "0.4919421", "0.4916821", "0.49060473", "0.4878923", "0.48704636", "0.4868549", "0.48666322", "0.48659208", "0.48631036", "0.48598644", "0.48515308" ]
0.67490685
0
Look for TiVos using Zeroconf.
def scan(self): VIDS = '_tivo-videos._tcp.local.' names = [] self.logger.info('Scanning for TiVos...\n') # Get the names of servers offering TiVo videos browser = zeroconf.ServiceBrowser(self.rz, VIDS, None, ZCListener(names, logger=self.logger)) # Give them a second (or more if no one has responded in the 1st second) to respond time.sleep(1) max_sec_to_wait = 10 sec_waited = 0 while not names and sec_waited < max_sec_to_wait: sec_waited += 1 time.sleep(1) # Any results? if names: config.tivos_found = True # Now get the addresses -- this is the slow part for name in names: info = self.rz.get_service_info(VIDS, name + '.' + VIDS) log_serviceinfo(self.logger, info) if info: # zeroconf v2.7 removed ServiceInfo address member says use addresses instead. # Some debug logging to see if there is always at least the currently assumed 1 address (and maybe more?) self.logger.debug(f'Found zeroconf.ServiceInfo with {len(info.addresses)} IP addresses\n') tsn = info.properties.get(b'TSN') if config.get_togo('all'): tsn = info.properties.get(b'tsn', tsn) if tsn: if isinstance(tsn, bytes): tsn = tsn.decode('utf-8') address = socket.inet_ntoa(info.addresses[0]) port = info.port config.tivos[tsn] = {'name': name, 'address': address, 'port': port} # info.properties has bytes keys and values, but we'd rather # deal with str keys and values, so convert them before adding # them to our tivos dict. config.tivos[tsn].update(bytes2str(info.properties)) # Debugging information on what services have been found: # try: # all_services = zeroconf.ZeroconfServiceTypes.find(self.rz) # self.logger.info("All services found") # for s in all_services: # self.logger.info(" {}".format(s)) # except Exception as e: # self.logger.error(e) return names
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def tempest_ceph_services_tun(self):\n self.helper_ceph_services('tun')", "def test_get_system(self):\n pass", "def main():\n\n args = GetArgs()\n if args.password:\n password = args.password\n else:\n password = getpass.getpass(prompt='Enter password for host %s and user %s: ' % (args.vc, args.user))\n\n try:\n\n s = ssl.SSLContext(ssl.PROTOCOL_TLSv1)\n s.verify_mode = ssl.CERT_NONE\n\n\n # connection string\n\n si = connect.SmartConnect(host=args.vc,\n user=args.user,\n pwd=password,\n sslContext=s)\n\n content = si.RetrieveServiceContent()\n\n\n container = content.rootFolder # starting point to look into\n viewType = [vim.VirtualMachine] # object types to look for\n recursive = True # whether we should look into it recursively\n containerView = content.viewManager.CreateContainerView(container,\n viewType,\n recursive)\n\n children = containerView.view\n for child in children:\n has_iso(child)\n\n except vmodl.MethodFault as error:\n print(\"Caught vmodl fault : \" + error.msg)\n return -1\n\n return 0", "def _detect(self):\n if (monasca_setup.detection.find_process_name('nova-api') is not None and\n os.path.isfile(nova_conf)):\n self.available = True", "def check_for_tvh(conf):\n\n logging.info(\"Verificando TVHeadend\")\n\n resp = False\n\n logging.info(\"TVHeadend running\")\n try:\n req = urllib2.Request(\n \"http://\" + conf['tvheadendAddress'] + \":\" + conf['tvheadendPort'] + '/api/serverinfo')\n urllib2.urlopen(req)\n except urllib2.HTTPError as e_error:\n logging.info(\"TVHeadend com autenticação, utilize --help\")\n logging.info('Error code: %s', e_error.code)\n except urllib2.URLError as e_error:\n logging.info(\"TVHeadend nao encontrado\")\n logging.info('Reason: %s', e_error.reason)\n else:\n resp = True\n\n return resp", "def tempest_ceph_services_vlan(self):\n self.helper_ceph_services('vlan')", "def check_ovmf(self):\n\n for index, ovmf in enumerate(self.ovmf_bios):\n if os.path.exists(ovmf):\n continue\n for suffix in ('qcow2', 'bin'):\n path = '%s/%s.%s' % (self.get('DEPLOY_DIR_IMAGE'), ovmf, suffix)\n if os.path.exists(path):\n self.ovmf_bios[index] = path\n break\n else:\n raise Exception(\"Can't find OVMF firmware: %s\" % ovmf)", "def main():\n\n args = get_args()\n\n try:\n service_instance = connect.SmartConnect(host=args.host,\n user=args.user,\n pwd=args.password,\n port=int(args.port))\n\n atexit.register(connect.Disconnect, service_instance)\n\n content = service_instance.RetrieveContent()\n\n container = content.rootFolder # starting point to look into\n viewType = [vim.VirtualMachine] # object types to look for\n recursive = True # whether we should look into it recursively\n containerView = content.viewManager.CreateContainerView(\n container, viewType, recursive)\n\n children = containerView.view\n host_view = content.viewManager.CreateContainerView(content.rootFolder,\n [vim.HostSystem],\n True)\n for host in host_view.view:\n if host.name=='192.168.100.10':\n children = host.vm\n for child in children:\n print_vm_info(child)\n print('************')\n print(child.summary.config.memorySizeMB)\n print(child.summary.config.numCpu)\n capacity=0\n try:\n for dev in child.config.hardware.device:\n if hasattr(dev.backing, 'fileName'):\n try:\n capacity += dev.capacityInKB\n except:\n pass\n except:\n pass\n print(capacity)\n for task in child.recentTask:\n ta = task.info.state\n print (task.info.startTime)\n print (task.info.completeTime)\n print(task.info.progress)\n\n except vmodl.MethodFault as error:\n print(\"Caught vmodl fault : \" + error.msg)\n return -1\n\n return 0", "def os_discovery():\n hs = HostSearch()\n\n hosts = hs.get_hosts(ports=[445], tags=['!nmap_os'])\n\n # TODO fix filter for emtpy fields.\n hosts = [host for host in hosts if not host.os]\n\n host_dict = {}\n for host in hosts:\n host_dict[str(host.address)] = host\n\n arguments = \"--script smb-os-discovery.nse -p 445 -Pn -n --disable-arp-ping\".split(' ')\n if len(hosts):\n count = 0\n print_notification(\"Checking OS of {} systems\".format(len(hosts)))\n result = nmap(arguments, [str(h.address) for h in hosts])\n\n parser = NmapParser()\n report = parser.parse_fromstring(result)\n\n for nmap_host in report.hosts:\n for script_result in nmap_host.scripts_results:\n script_result = script_result.get('elements', {})\n\n host = host_dict[str(nmap_host.address)]\n if 'fqdn' in script_result:\n host.hostname.append(script_result['fqdn'])\n if 'os' in script_result:\n count += 1\n host.os = script_result['os']\n\n host_dict[str(nmap_host.address)] = host\n\n for host in hosts:\n host.add_tag('nmap_os')\n host.save()\n\n print_notification(\"Done, found the os of {} systems\".format(count))\n\n else:\n print_notification(\"No systems found to be checked.\")", "def register_oslo_configs(conf):\n conf.register_opts(_get_oslo_configs())", "def setupMonti():\n #Update /etc/hosts with mongo-server and management-engine nodes\n sudo(\"apt-get install zookeeper\")\n sudo(\"apt-get install zookeeperd\")\n sudo(\"pip2 install chariot-runtime\")\n #update configuration file located in /etc/chariot/chariot.conf\n run (\"cd /etc/init.d && sudo update-rc.d chariot-nmw defaults 99\")\n sudo(\"reboot\")", "def get_zones(vsys=\"1\"):\n query = {\n \"type\": \"config\",\n \"action\": \"get\",\n \"xpath\": (\n \"/config/devices/entry[@name='localhost.localdomain']/vsys/entry[@name='vsys{}']/\"\n \"zone\".format(vsys)\n ),\n }\n\n return __proxy__[\"panos.call\"](query)", "def main():\n run_nutanix_vm_creation_module()", "def openVCPEAccess(cls, volt_subscriber_info):\n OnosCtrl.install_app(cls.APP_FILE, onos_ip = cls.HEAD_NODE)\n time.sleep(2)\n s_tags = map(lambda tenant: int(tenant['voltTenant']['s_tag']), volt_subscriber_info)\n #only get unique vlan tags\n s_tags = list(set(s_tags))\n devices = OnosCtrl.get_device_ids(controller = cls.HEAD_NODE)\n if devices:\n device_config = {}\n for device in devices:\n device_config[device] = []\n for s_tag in s_tags:\n xconnect_config = {'vlan': s_tag, 'ports' : [ cls.FABRIC_PORT_HEAD_NODE, cls.FABRIC_PORT_COMPUTE_NODE ] }\n device_config[device].append(xconnect_config)\n\n cfg = { 'apps' : { 'org.ciena.xconnect' : { 'xconnectTestConfig' : device_config } } }\n OnosCtrl.config(cfg, controller = cls.HEAD_NODE)", "def test_get_virtual_service(self):\n pass", "def setup_vim():\n pass", "def test_guest_os(self):\n self.check_guest_os()", "def test_guest_os(self):\n self.check_guest_os()", "def test_guest_os(self):\n self.check_guest_os()", "def test_guest_os(self):\n self.check_guest_os()", "def test_guest_os(self):\n self.check_guest_os()", "def test_guest_os(self):\n self.check_guest_os()", "def test_guest_os(self):\n self.check_guest_os()", "def test_guest_os(self):\n self.check_guest_os()", "def test_guest_os(self):\n self.check_guest_os()", "def test_guest_os(self):\n self.check_guest_os()", "def test_guest_os(self):\n self.check_guest_os()", "def test_guest_os(self):\n self.check_guest_os()", "def test_guest_os(self):\n self.check_guest_os()", "def test_guest_os(self):\n self.check_guest_os()" ]
[ "0.5364275", "0.5249664", "0.5161627", "0.5127778", "0.50975037", "0.50634813", "0.5058263", "0.5044416", "0.4960539", "0.49410504", "0.49074653", "0.48924124", "0.48650116", "0.4830773", "0.48301288", "0.48240846", "0.48059678", "0.48059678", "0.48059678", "0.48059678", "0.48059678", "0.48059678", "0.48059678", "0.48059678", "0.48059678", "0.48059678", "0.48059678", "0.48059678", "0.48059678", "0.48059678" ]
0.66688216
0
Exchange beacons, and extract the machine name.
def get_name(self, address): our_beacon = self.format_beacon('connected', False) machine_name = re.compile('machine=(.*)\n').search try: tsock = socket.socket() tsock.connect((address, 2190)) self.send_packet(tsock, our_beacon) tivo_beacon = self.recv_packet(tsock) tsock.close() name = machine_name(tivo_beacon).groups()[0] except: name = address return name
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def get_beacons(self):\n try:\n return await self._get_gist_data(comm_type='beacon')\n except Exception:\n self.log.debug('Receiving beacons over c2 (%s) failed!' % self.__class__.__name__)\n return []", "async def handle_beacons(self, beacons):\n for beacon in beacons:\n beacon['contact'] = self.name\n agent, instructions = await self.contact_svc.handle_heartbeat(**beacon)\n await self._send_payloads(agent, instructions)\n await self._send_instructions(agent, instructions)", "def beacon(config):\n ret = []\n changes = {}\n txt = {}\n\n global LAST_GRAINS\n global SD_REF\n\n config = salt.utils.beacons.list_to_dict(config)\n\n if \"servicename\" in config:\n servicename = config[\"servicename\"]\n else:\n servicename = __grains__[\"host\"]\n # Check for hostname change\n if LAST_GRAINS and LAST_GRAINS[\"host\"] != servicename:\n changes[\"servicename\"] = servicename\n\n if LAST_GRAINS and config.get(\"reset_on_change\", False):\n # Check for IP address change in the case when we reset on change\n if LAST_GRAINS.get(\"ipv4\", []) != __grains__.get(\"ipv4\", []):\n changes[\"ipv4\"] = __grains__.get(\"ipv4\", [])\n if LAST_GRAINS.get(\"ipv6\", []) != __grains__.get(\"ipv6\", []):\n changes[\"ipv6\"] = __grains__.get(\"ipv6\", [])\n\n for item in config[\"txt\"]:\n changes_key = \"txt.\" + salt.utils.stringutils.to_unicode(item)\n if config[\"txt\"][item].startswith(\"grains.\"):\n grain = config[\"txt\"][item][7:]\n grain_index = None\n square_bracket = grain.find(\"[\")\n if square_bracket != -1 and grain[-1] == \"]\":\n grain_index = int(grain[square_bracket + 1 : -1])\n grain = grain[:square_bracket]\n\n grain_value = __grains__.get(grain, \"\")\n if isinstance(grain_value, list):\n if grain_index is not None:\n grain_value = grain_value[grain_index]\n else:\n grain_value = \",\".join(grain_value)\n txt[item] = _enforce_txt_record_maxlen(item, grain_value)\n if LAST_GRAINS and (\n LAST_GRAINS.get(grain, \"\") != __grains__.get(grain, \"\")\n ):\n changes[changes_key] = txt[item]\n else:\n txt[item] = _enforce_txt_record_maxlen(item, config[\"txt\"][item])\n\n if not LAST_GRAINS:\n changes[changes_key] = txt[item]\n\n if changes:\n txt_record = pybonjour.TXTRecord(items=txt)\n if not LAST_GRAINS:\n changes[\"servicename\"] = servicename\n changes[\"servicetype\"] = config[\"servicetype\"]\n changes[\"port\"] = config[\"port\"]\n changes[\"ipv4\"] = __grains__.get(\"ipv4\", [])\n changes[\"ipv6\"] = __grains__.get(\"ipv6\", [])\n SD_REF = pybonjour.DNSServiceRegister(\n name=servicename,\n regtype=config[\"servicetype\"],\n port=config[\"port\"],\n txtRecord=txt_record,\n callBack=_register_callback,\n )\n atexit.register(_close_sd_ref)\n ready = select.select([SD_REF], [], [])\n if SD_REF in ready[0]:\n pybonjour.DNSServiceProcessResult(SD_REF)\n elif config.get(\"reset_on_change\", False) or \"servicename\" in changes:\n # A change in 'servicename' requires a reset because we can only\n # directly update TXT records\n SD_REF.close()\n SD_REF = None\n reset_wait = config.get(\"reset_wait\", 0)\n if reset_wait > 0:\n time.sleep(reset_wait)\n SD_REF = pybonjour.DNSServiceRegister(\n name=servicename,\n regtype=config[\"servicetype\"],\n port=config[\"port\"],\n txtRecord=txt_record,\n callBack=_register_callback,\n )\n ready = select.select([SD_REF], [], [])\n if SD_REF in ready[0]:\n pybonjour.DNSServiceProcessResult(SD_REF)\n else:\n txt_record_raw = str(txt_record).encode(\"utf-8\")\n pybonjour.DNSServiceUpdateRecord(\n SD_REF, RecordRef=None, flags=0, rdata=txt_record_raw\n )\n\n ret.append({\"tag\": \"result\", \"changes\": changes})\n\n if config.get(\"copy_grains\", False):\n LAST_GRAINS = __grains__.copy()\n else:\n LAST_GRAINS = __grains__\n\n return ret", "async def get_beacons(self):\n try:\n beacons = await self._get_raw_gist_urls(comm_type='beacon')\n beacon_content = await self._get_gist_content([beacon[0] for beacon in beacons])\n await self._delete_gists([beacon[1] for beacon in beacons])\n return beacon_content\n except Exception:\n self.log.debug('Receiving beacons over c2 (%s) failed!' % self.__class__.__name__)\n return []", "def get_mac(self) -> str:\n self.sendline(\"iw {} info\".format(self.iface_dut))\n # We are looking for MAC definition of STA\n # wdev 0x1\n # addr 96:4e:c9:cc:7a:2c\n # type managed\n self.expect(\"addr (?P<mac>..:..:..:..:..:..)\\r\\n\\t(type|ssid)\")\n return self.match.group('mac')", "def bluez_decode_beacons(bluez_packet):\n # Initialize beacons list\n beacons = []\n # Check if the packet is the minimum length to be able to unpack the\n # BlueZ packet header\n if len(bluez_packet) >= 5:\n # Decode BlueZ header to see if the packet contains LE advertising info\n BlueZHeader = namedtuple('BlueZHeader', 'hci_packet_type event '\n + 'length meta_event report_num')\n bzh = BlueZHeader._make(struct.unpack('<BBBBB', bluez_packet[:5]))\n # Check if this is a valid LE advertisement packet\n if bzh.hci_packet_type == 0x04 and bzh.event == 0x3E and \\\n bzh.meta_event == 0x02 and bzh.report_num > 0 and \\\n bzh.length + 3 == len(bluez_packet):\n # Track reports\n reports = bzh.report_num\n # Move to the first advertising report\n ad_packet = bluez_packet[5:]\n # Iterate over the advertising reports\n while reports > 0 and len(ad_packet) >= 9:\n # Decode the advertising report\n ad_report = decode_ad_report(ad_packet)\n # Decrement reports counter\n reports -= 1\n # Move on to the next advertising report\n ad_packet = ad_packet[ad_report['adinfo_bytes']:]\n # Is this a valid beacon?\n if ad_report['type']:\n # Remove the adinfo_bytes\n del ad_report['adinfo_bytes']\n # Add this beacon to the beacons list\n beacons.append(ad_report)\n # Return the beacons list\n return beacons", "async def _parse_boottime_hostname(self, output, cb_token) -> None:\n\n if self.sigend:\n return\n\n if output[0][\"status\"] == 0:\n upsecs = output[0][\"data\"].split()[0]\n self.bootupTimestamp = int(int(time.time()*1000)\n - float(upsecs)*1000)\n if output[1][\"status\"] == 0:\n data = output[1].get(\"data\", '')\n hostline = data.splitlines()[0].strip()\n if hostline.startswith(\"Static hostname\"):\n _, hostname = hostline.split(\":\")\n self.hostname = hostname.strip()\n\n if output[2][\"status\"] == 0:\n data = output[2].get(\"data\", '')\n self._extract_nos_version(data)", "async def _parse_boottime_hostname(self, output, cb_token) -> None:\n\n if output[0][\"status\"] == 0:\n upsecs = output[0][\"data\"].split()[0]\n self.bootupTimestamp = int(int(time.time()*1000)\n - float(upsecs)*1000)\n if output[1][\"status\"] == 0:\n self.hostname = output[1][\"data\"].strip()\n if output[2][\"status\"] == 0:\n self._extract_nos_version(output[1][\"data\"])", "def get_eap_mab(self):\n for m in self.get_tag(self.mac):\n v = m[1]\n if not isinstance(v, int):\n v = self._get_vlan(v)\n yield [m[0], v]", "def director_address():\n while True:\n #addr = etcd.watch(\"director_publish_addr\")\n #director_address = addr.value\n break", "def _receive(self, what, address='localhost:44818', **kwargs):\n\n tag_string = ''\n tag_string = EnipProtocol._tuple_to_cpppo_tag(what)\n\n # print(\"DEBUG \" + tag_string)\n\n cmd = shlex.split(\n self._client_cmd +\n '--log ' + self._client_log +\n ' --print --address ' + address +\n ' ' + tag_string\n )\n # print 'DEBUG enip _receive cmd shlex list: ', cmd\n\n try:\n client = subprocess.Popen(cmd, shell=False,\n stdout=subprocess.PIPE)\n\n # client.communicate is blocking\n raw_out = client.communicate()\n # print('DEBUG1 ', raw_out)\n\n # value is stored as first tuple element\n # between a pair of square brackets\n\n raw_string = raw_out[0]\n # print(\"DEBUG2 \" + str(raw_string))\n raw_string = str(raw_string)\n out = raw_string[(raw_string.find('[') + 1):raw_string.find(']')]\n # print(\"DEBUG4 \" + out)\n return out\n\n except Exception as error:\n print('ERROR enip _receive: ', error)", "async def _parse_boottime_hostname(self, output, cb_token) -> None:\n\n hostname = ''\n if output[0][\"status\"] == 0:\n data = json.loads(output[0][\"data\"])\n upsecs = (24*3600*int(data.get('kern_uptm_days', 0)) +\n 3600*int(data.get('kern_uptm_hrs', 0)) +\n 60*int(data.get('kern_uptm_mins', 0)) +\n int(data.get('kern_uptm_secs', 0)))\n if upsecs:\n self.bootupTimestamp = int(int(time.time()*1000)\n - float(upsecs)*1000)\n self.version = data.get('nxos_ver_str', '')\n if not self.version:\n self.logger.error(\n f'Cannot extract version from {self.address}:{self.port}')\n\n if len(output) > 1:\n if output[1][\"status\"] == 0:\n hostname = output[1][\"data\"].strip()\n else:\n if output[0]['hostname'] != output[0]['address']:\n hostname = output[0]['hostname']\n\n if hostname:\n self.set_hostname(hostname)", "def machine_name(self) -> str:\n return pulumi.get(self, \"machine_name\")", "def machine_name(self) -> str:\n return pulumi.get(self, \"machine_name\")", "def _extract_appname(self, log):\n appname = \"\"\n if \"appLaunch\" in log:\n appname = log[\"appLaunch\"][\"appName\"]\n else:\n self.logger.info(\"no applaunch field\")\n self.logger.info(log[\"event\"])\n pass \n \n return appname", "def broadcast(loopstate):\n cmdstring = 'sudo hcitool -i hci0 cmd ' # Send cmd to hci0\n cmdstring += '0x08 ' # Set group to BLE\n cmdstring += '0x0008 ' # Set command to HCI_LE_Set_Advertising_Data\n cmdstring += '0D ' # Length of entire following data, in bytes\n cmdstring += '02 ' # Length of flag info\n cmdstring += '01 ' # Use AD flags\n cmdstring += '02 ' # Flag value:\n # bit 0 (OFF) LE Limited Discoverable Mode\n # bit 1 (ON) LE General Discoverable Mode\n # bit 2 (OFF) BR/EDR Not Supported\n # bit 3 (ON) Simultaneous LE and BR/EDR to Same Device Capable (controller)\n # bit 4 (ON) Simultaneous LE and BR/EDR to Same Device Capable (Host)\n cmdstring += '09 ' # Length of following message, in bytes\n cmdstring += '07 ' # GAP value (07 = 128 Bit Complete Service UUID List)\n cmdstring += '42 69 63 79 63 6c 65 ' # Header to identify beacon message-\n # - and it's also is Bicycle in ASCII!\n if loopstate:\n cmdstring = cmdstring + LOOP_ON\n else:\n cmdstring = cmdstring + LOOP_OFF + ' >/dev/null 2>&1'\n subprocess.call(cmdstring, shell=True)\n subprocess.call('sudo hciconfig hci0 leadv 3 >/dev/null 2>&1', shell=True)", "def get_address(machine: Machine) -> str:\n default_route, _ = machine.run(\"ip route get 8.8.8.8\")\n return re.search(\" src ([0-9.]+) \", default_route).group(1)", "def discover(self):\n self._parse_boxee_response( self._broadcast_for_boxee_info() )", "def test_away(self):\n message = \"Sorry, I'm not here.\"\n self.protocol.away(message)\n expected = [\n \"AWAY :{}\".format(message),\n \"\",\n ]\n self.assertEqualBufferValue(self.transport.value().split(b\"\\r\\n\"), expected)", "def on_register_action(self, event):\n try:\n mme_addr = event.params[\"mme-addr\"]\n gtp_bind_addr = event.params[\"gtp-bind-addr\"]\n s1c_bind_addr = event.params[\"s1c-bind-addr\"]\n command = \" \".join(\n [\n \"srsenb\",\n \"--enb.name=dummyENB01\",\n \"--enb.mcc=901\",\n \"--enb.mnc=70\",\n \"--enb.mme_addr={}\".format(mme_addr),\n \"--enb.gtp_bind_addr={}\".format(gtp_bind_addr),\n \"--enb.s1c_bind_addr={}\".format(s1c_bind_addr),\n \"--enb_files.rr_config=/config/rr.conf\",\n \"--enb_files.sib_config=/config/sib.conf\",\n \"--enb_files.drb_config=/config/drb.conf\",\n \"/config/enb.conf.fauxrf\",\n ]\n )\n stdout = subprocess.check_output(command, shell=True)\n event.set_results({\"output\": stdout})\n except subprocess.CalledProcessError as ex:\n event.fail(ex)", "def amtool_receivers(self, mess, args):\n helper = AmtoolHelper(\n alertmanager_address=self.config['server_address'])\n result = helper.get_receivers()\n return result", "def get_appname(hostname):\n e=etcd.Etcd(host=\"172.17.42.1\")\n try:\n appname=hostname.split(\".\")[0]\n listofinstances=[]\n ls = e.get(\"apps/\"+appname+\"/running\")\n for key in ls:\n listofinstances.append(key.value())\n port=choice(listofinstances)\n return \"172.17.42.1:\"+port\n except:\n return \"\"", "def broker_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"broker_name\")", "def monitor_start(event: Event) -> None:\n _LOGGER.info(\"Starting scanner for Eddystone beacons\")\n mon.start()", "def getExchange(self):\r\n\t\treturn self.pair.exchange", "def get_mos_from_localhost(self):\n rewards = dict() # saves the reward of each client from each ap\n _, data = self.command_ap('localhost', 8080, '', \"/get_mos_client\") # the interface (3rd param) does not matter\n self.log.debug(\"data for MOS @ {} => {}\".format('all', data))\n stations = {'gnu-nb3': ['cloud'],\n 'fenrir': ['storm'],\n }\n for ap in self.aps:\n d = []\n for sta in stations[ap.name]:\n entries = [x[:4] for x in data if x[4] == sta]\n d.extend(entries)\n rs = self.get_rs(d)\n rewards[ap.id] = rs\n return rewards", "async def get_hostname(self):\n\n # Display info message\n log.info(\"get_hostname\")\n\n # Get hostname\n output = await self.send_command(self.cmd_get_hostname)\n\n # Display info message\n log.info(f\"get_hostname: output: '{output}'\")\n\n # Remove the useless information in the returned string\n output = output.split(\"System Name: \")[1].strip()\n\n # Display info message\n log.info(f\"get_hostname: hostname found: '{output}'\")\n\n # Return the name of the device\n return output", "def _manufacturer(self, mac_address):\n # Initialize key variables\n manufacturer = ''\n\n # Process data\n mac_oui = mac_address[0:6]\n if mac_oui in self.oui:\n manufacturer = self.oui[mac_oui]\n\n # Return\n return manufacturer", "async def test_many_groups_same_address_ignored(hass: HomeAssistant) -> None:\n entry = MockConfigEntry(\n domain=DOMAIN,\n )\n entry.add_to_hass(hass)\n\n assert await hass.config_entries.async_setup(entry.entry_id)\n await hass.async_block_till_done()\n\n inject_bluetooth_service_info(hass, BLUECHARM_BEACON_SERVICE_INFO)\n await hass.async_block_till_done()\n\n assert (\n hass.states.get(\"sensor.bluecharm_177999_8105_estimated_distance\") is not None\n )\n\n for i in range(12):\n service_info = BluetoothServiceInfo(\n name=\"BlueCharm_177999\",\n address=\"61DE521B-F0BF-9F44-64D4-75BBE1738105\",\n rssi=-63,\n service_data={},\n manufacturer_data={\n 76: b\"\\x02\\x15BlueCharmBeacons\" + bytearray([i]) + b\"\\xfe\\x13U\\xc5\"\n },\n service_uuids=[],\n source=\"local\",\n )\n inject_bluetooth_service_info(hass, service_info)\n\n await hass.async_block_till_done()\n assert hass.states.get(\"sensor.bluecharm_177999_8105_estimated_distance\") is None", "def process_wifi_com(self, wm):\n print wm.message" ]
[ "0.53489196", "0.522713", "0.51454145", "0.50426984", "0.46323082", "0.45568532", "0.45388705", "0.4519607", "0.45112652", "0.45094717", "0.45000902", "0.44978198", "0.4472048", "0.4472048", "0.44657722", "0.44152695", "0.44033462", "0.43702632", "0.43686807", "0.43647823", "0.43574384", "0.4342379", "0.4340285", "0.43367773", "0.43236864", "0.43226504", "0.4320328", "0.43171704", "0.43110424", "0.43107817" ]
0.5799272
0
Returns the current power in watts on a desired channel
def get_power(self, channel): power = self.device.query(f':POW{channel}:VAL?') return float(power)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_power(self) -> float:\n\n #:READ[n][:CHANnel[m]][:SCALar]: POWer[:DC]?\n return float(self._inst.query(\":READ:POW?\"))", "def get_power(self):\r\n return self._api.get_power()", "def getWatts(self):\n return self.json_state.get(\"charging\").get(\"watt_power\")", "def get_power(self):\r\n _debug('simq03b_api.get_power')\r\n \r\n x = self.query('POWer?')\r\n if x == None: return None\r\n return float(x)", "def get_power(self):\r\n return self.p", "def getpower(self):\n return (self.cwt*numpy.conjugate(self.cwt)).real", "def PM_getPower(self,channel,unit='W'):\n if unit not in ApexAP1000.PM_UNIT:\n raise ValueError('Unknow physical unit during power measurement')\n if channel not in ApexAP1000.PM_CHANNELS:\n raise ValueError('Unknow channel during power measurement')\n str = {'W':'MW','mW':'MW','dBm':'DBM'}\n value = float(self.ask(self.headStr('PM')+'%s[%d]?'%(str[unit],channel)))\n if unit is 'W':\n value = value * 1e-3\n return value", "def read_power(self):\n return(self.power)", "def get_power():\n return float(cmd(\"pa?\"))", "def get_power(self):\r\n x = self.query('POW?')\r\n if x == None: return None\r\n return float(x)", "def get_power(self):\r\n x = self.query('SOURce1:POWer:POWer?')\r\n if x == None: return None\r\n return float(x)", "def get_power(self):\r\n x = self.query('SOURce1:POWer:POWer?')\r\n if x == None: return None\r\n return float(x)", "async def get_power(self):\n if not self._current_power_supported:\n return 0\n\n try:\n value = await self._get_config(STATE_POWER_V1)\n return value[STATE_POWER_V1]\n except (ValueError, InvalidRequestError):\n # Device does not support whole unit instant power usage\n self._current_power_supported = False\n return 0", "def tx_power(self) -> int:\n # Follow table 10 truth table from the datasheet for determining power\n # level from the individual PA level bits and output power register.\n pa0 = self.pa_0_on\n pa1 = self.pa_1_on\n pa2 = self.pa_2_on\n current_output_power = self.output_power\n if pa0 and not pa1 and not pa2:\n # -18 to 13 dBm range\n return -18 + current_output_power\n if not pa0 and pa1 and not pa2:\n # -2 to 13 dBm range\n return -18 + current_output_power\n if not pa0 and pa1 and pa2 and not self.high_power:\n # 2 to 17 dBm range\n return -14 + current_output_power\n if not pa0 and pa1 and pa2 and self.high_power:\n # 5 to 20 dBm range\n return -11 + current_output_power\n raise RuntimeError(\"Power amps state unknown!\")", "def current_power_w(self):\n if self._devtype == \"pod\":\n return self._current_consumption\n return False", "def power(self) -> int:\n return self._power_consumption", "def get_power(self):\n #GPIO.setmode(GPIO.BOARD)\n #GPIO.setup(self.input_pin, GPIO.IN)\n return 0", "def power(self):\n return self._power", "def power(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"power\")", "def getPowerIndex(self):\n return self.powerIndex_", "def power(self) -> int:\n return self._power", "def power(self) -> int:\n return self._power", "def power(self) -> int:\n return self._power", "def power(self) -> int:\n return self._power", "def power(self):\r\n return self.model * self.percent / 100", "def specular_power(self) -> float:\n return self.GetSpecularPower()", "def calcPower(self, inputs):\n if self.getAtt('available', inputs):\n possible_charge_rate = self.getAtt('possible_charge_rate', inputs)\n Vm = self.getAtt('Vm', inputs)\n P = possible_charge_rate * Vm\n if not self.stayConnected:\n P = P * self.calculateVoltageIndex(Vm) * self.calculateTrafoIndex()\n return P\n return 0.0", "def get_power_usage(self):\n if self.pulses_1s is None:\n return None\n return self.pulses_to_kWs(self.pulses_1s) * 1000", "def current_by_power(power=\"25 W\", voltage=\"230 V\") -> Unit(\"A\"):\n power = normalize_numeric(power)\n voltage = normalize_numeric(voltage)\n return power / voltage", "def power(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"power\")" ]
[ "0.7491432", "0.73327", "0.7195306", "0.71075034", "0.7105397", "0.70822215", "0.7051447", "0.7042748", "0.7009171", "0.70017534", "0.69192004", "0.69192004", "0.6893051", "0.6834603", "0.68033", "0.67342967", "0.6730003", "0.6683369", "0.6609198", "0.6457663", "0.6411739", "0.6411739", "0.6411739", "0.6411739", "0.64079094", "0.6406904", "0.63891256", "0.63884586", "0.6388284", "0.6368361" ]
0.8137549
0
Use configparser to load ini_file into self.config
def __init__(self, ini_file): self.config = configparser.ConfigParser() self.config.read(ini_file) #print(self.config)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(self, ini_file):\n self.config = configparser.ConfigParser()\n self.config.read(ini_file)", "def parse_ini(self):\r\n cp = SafeConfigParser(defaults=self.DEFAULTS)\r\n if self.filenames['ini'] is not None:\r\n cp.read(self.filenames['ini'])\r\n return cp", "def __init__(self):\n\n self.path = os.path.dirname(os.path.realpath(__file__)) + '/config.ini'\n self.config = configparser.ConfigParser()\n self.config.read(self.path)", "def load_config():\n config = configparser.ConfigParser()\n config.read('config.ini')\n return config", "def load_config():\n config = ConfigParser()\n config.read(os.path.join(os.path.dirname(__file__), 'config.ini'))\n return config", "def onLoadConfig(self, inifile):\n cp = ConfigParser(self.defaults)\n cp.readfp(inifile)\n depth = self.getDepth(cp)\n self.baseurl = urljoin(self.inipath, depth)\n # create child loaders for any other l10n.ini files to be included\n try:\n for title, path in cp.items('includes'):\n # skip default items\n if title in self.defaults:\n continue\n # add child config parser\n self.addChild(title, path, cp)\n except NoSectionError:\n pass\n # try to load the \"dirs\" defined in the \"compare\" section\n try:\n self.dirs.extend(cp.get('compare', 'dirs').split())\n except (NoOptionError, NoSectionError):\n pass\n # try getting a top level compare dir, as used for fennec\n try:\n self.tld = cp.get('compare', 'tld')\n # remove tld from comparison dirs\n if self.tld in self.dirs:\n self.dirs.remove(self.tld)\n except (NoOptionError, NoSectionError):\n self.tld = None\n # try to set \"all_path\" and \"all_url\"\n try:\n self.all_path = cp.get('general', 'all')\n self.all_url = urljoin(self.baseurl, self.all_path)\n except (NoOptionError, NoSectionError):\n self.all_path = None\n self.all_url = None\n return cp", "def load_config(cls, config_file = None):\n config = ConfigParser()\n \n files = [\"/etc/imp.cfg\", os.path.expanduser(\"~/.imp.cfg\"), \".wm\", \".imp\"]\n if config_file is not None:\n files.append(config_file)\n \n config.read(files)\n cls.__instance = config", "def parse_config():\n config_file = glob.glob('config.ini')\n parser = ConfigParser()\n if config_file:\n parser.read(config_file)\n else:\n cwd = os.path.abspath(os.path.dirname(__file__))\n config_file = os.path.join(cwd, 'default_config.ini')\n parser.read(config_file)\n return _parse_config(parser)", "def load_conf(self):\n\n self.load_file(self.ini_file)\n self.files = []\n conf_file = open(self.ini_file, \"r\")\n for l in conf_file:\n self.files.append(l.strip())\n conf_file.close()", "def __init__(self):\n self.filename = pathlib.Path(__file__).parent.absolute().__str__() + '/../../data/config.ini'\n self.data = ConfigParser()\n self.data.read(self.filename)", "def load_config(self, config_file):\n self.config = ConfigParser.ConfigParser()\n self.config.read(config_file)", "def parse_config(self):\n # TODO: parse config file\n pass", "def load_configuration(self) -> None:\n config_file = self.default_config_file\n if self.config_file:\n config_file = self.config_file\n self.config = configparser.ConfigParser(delimiters=\"=\")\n # mypy is unhappy with us assigning to a method - (monkeypatching?)\n self.config.optionxform = lambda option: option # type: ignore\n self.config.read(config_file)", "def parse_config():\n config_path = Path(\"config.ini\")\n if config_path.exists():\n config.read(config_path)\n else:\n config[\"database\"] = {\"location\": \"image-database.db\"}\n config[\"images\"] = {\"extensions\": \".jpeg,.jpg,.png,.gif,.tiff\"}\n with open(config_path, \"w\") as configfile:\n config.write(configfile)\n config.read(config_path)", "def load( self ):\n ini = codecs.open(self.filename,\"r\",\"utf-8\",errors=\"replace\",buffering=0)\n for l in ini:\n l = l.strip()\n if l:\n (name,value) = l.split(\"=\",1)\n self.conf[name.strip()] = value.strip()\n ini.close()", "def parse(self):\n try:\n with open(self.path, 'r') as ymlfile:\n self.__cfg = yaml.load(ymlfile)\n except IOError:\n self.log(\"File {0} not found -- aborting\".format(self.path))\n raise ConfigFileException", "def load_config( self, config_file=None ):\n if config_file is None:\n config_file = os.path.dirname(self.dbfile) + '/final.ini'\n config_parser = ConfigParser.ConfigParser()\n config_parser.read( config_file )\n self.config = config_parser\n return self.config", "def _parse_file(cls, config_file, namespace):\n config_file = _fixpath(config_file)\n\n sections = {}\n normalized = {}\n parser = cls(config_file, sections)\n parser._add_normalized(normalized)\n\n try:\n parser.parse()\n except iniparser.ParseError as pe:\n raise ConfigFileParseError(pe.filename, str(pe))\n except IOError as err:\n if err.errno == errno.ENOENT:\n namespace._file_not_found(config_file)\n return\n if err.errno == errno.EACCES:\n namespace._file_permission_denied(config_file)\n return\n raise\n\n namespace._add_parsed_config_file(config_file, sections, normalized)\n namespace._parse_cli_opts_from_config_file(\n config_file, sections, normalized)", "def _load_config():\n\tcfg = configparser.ConfigParser()\n\tcfg.read(join(get_current_path(), 'ib.config'))\n\treturn cfg", "def __init__(self, __file):\n\n\t\tself.fileName = __file\n\t\tif (os.path.isfile(self.fileName)):\n\t\t\t# config.ini found, load it\n\t\t\tself.config.read(self.fileName)\n\t\t\tself.default = False\n\t\telse:\n\t\t\t# config.ini not found, generate a default one\n\t\t\tself.generateDefaultConfig()\n\t\t\tself.default = True", "def _load_config():\n\tcfg = configparser.ConfigParser()\n\tcfg.read(os.path.join(get_current_directory(), 'citi.config'))\n\treturn cfg", "def __init__(self, filepath=None):\n self.path = self.__default_filepath if filepath is None else filepath\n self.parser = configparser.ConfigParser()\n if self.__section_default not in self.parser.sections():\n self.parser.add_section(self.__section_default)\n\n self.parser.read(self.path)", "def parse(self, config_file):\n\t\tself.options = yaml.load(open(config_file))", "def read_config(self, config_filename):", "def load_config(self, filename, fileconfout=None):\n self._filename = filename\n self._init_config = open(filename).read().splitlines()\n metaconfig = [l for l in self._init_config\n if not (l.startswith(\"#\") or l.startswith(\"\\t\") or l.startswith(\" \")) and len(l)>0]\n\n for k in metaconfig:\n key, *value = k.split()\n if len(value)==1:\n self.set_value(key, value[0], None)\n \n elif len(value)>1:\n if value[1]==\"#\":\n self.set_value(key, value[0], \" \".join(value[2:]))\n else:\n raise IOError(\"Cannot parse the line %s\"%k)\n else:\n raise IOError(\"cannot parse the line %s\"%k)\n if fileconfout is not None:\n self.set_value(\"PARA_OUT\", fileconfout)", "def InitConfigReader(configFile):\n configReader.readfp(open(configFile))", "def InitConfigReader(configFile):\n configReader.readfp(open(configFile))", "def _read_config(self):\n config = configparser.ConfigParser()\n config.read('config.ini')\n\n self.batch_size = int(config['MODEL']['batch_size'])\n self.num_filters = int(config['MODEL']['num_filters'])\n self.dropout_dim = float(config['MODEL']['dropout_dim'])\n self.dense_neurons = int(config['MODEL']['dense_neurons'])\n _pool_size = config['MODEL']['pool_size']\n _kernel_size = config['MODEL']['kernel_size']\n self.IMG_SIZE = int(config['DATA']['image_size'])\n self.num_classes = int(config['CUSTOM']['num_classes'])\n self.epochs = int(config['MODEL']['epochs'])\n self.b_eval_advanced = (\n config['MODEL']['complex_analysis'] == 'true' or config['MODEL']['complex_analysis'] == 'True')\n\n self.pool_size = tuple(map(int, _pool_size.split(',')))\n self.kernel_size = tuple(map(int, _kernel_size.split(',')))\n\n self.img_rows, self.img_cols = self.IMG_SIZE, self.IMG_SIZE", "def iniparse(filename, flat=True):\n config = ConfigParser.RawConfigParser()\n config.read(filename)\n\n params = {}\n for section in config.sections():\n if not flat:\n params[section] = {}\n for key, value in config.items(section):\n try:\n eval_val = ast.literal_eval(value)\n except (SyntaxError, ValueError):\n eval_val = value\n\n if flat:\n params[key] = eval_val\n else:\n params[section][key] = eval_val\n\n return params", "def get_confg(self):\n\n ini = ConfigParser()\n self.config_parser = ini\n # if isinstance(cfile, (file, StringIO.StringIO, io.BytesIO)):\n if isinstance(self.config_data, str) and self.config_data:\n fp = io.BytesIO(self.config_data)\n ini.readfp(fp)\n elif self.config_file is not None:\n ini.read([self.config_file, os.path.expanduser('~/.' + self.config_file)])\n\n if ini.has_section('whoshere'):\n return ini.items('whoshere')\n\n return {}" ]
[ "0.83436626", "0.7763483", "0.75989544", "0.752344", "0.7485136", "0.7407618", "0.7354351", "0.7283953", "0.7280698", "0.7256217", "0.72532433", "0.7218413", "0.7164233", "0.71115434", "0.7076736", "0.7010528", "0.70052344", "0.6945134", "0.69198495", "0.68875235", "0.68658423", "0.6847237", "0.6828934", "0.6812457", "0.67934805", "0.6756749", "0.6756749", "0.6756659", "0.6708159", "0.66981" ]
0.81050164
1
Return the number of sections in the ini file.
def number_of_sections(self): #print (len(self.config.sections())) return len(self.config.sections())
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def number_of_sections(self):\n sections = self.config.sections()\n return len(sections)", "def sections(self) -> int:\n return len(self.string.split(\".\"))", "def testSectionCount(self):\n\n self.sectionCount(3640)", "def getSectionIndex(self) -> int:\n ...", "def file_len(fname):\n with open(fname) as f:\n for i, l in enumerate(f):\n pass\n Nrows = i + 1\n return Nrows", "def countsubcatchments(inputfilename=FileSettings.settingsdict['inputfilename']):\r\n global count\r\n with open(inputfilename, 'r') as swmmput:\r\n contents = swmmput.readlines()\r\n count = len(contents)\r\n return(count)", "def config_count(self) -> int:\n return pulumi.get(self, \"config_count\")", "def section(self, idx: int) -> int:\n if self.sections >= (idx + 1):\n return int(RE_DIGIT.match(self.string.split(\".\")[idx]).group(1))\n return 0", "def total_number():\r\n total_number = 0\r\n file_read = read_file()\r\n for key in file_read:\r\n total_number = total_number + len(file_read[key])\r\n return total_number", "def config_count():\n return int(len([name for name in os.listdir(nginx_sites_enabled) \\\n if os.path.isfile(os.path.join(nginx_sites_enabled, name))]))", "def contig_count(contig):\n return sum([1 for line in open(contig, 'rU').readlines() if line.startswith('>')])", "def file_len(fname):\n \n with open(fname) as f:\n for i, l in enumerate(f):\n pass\n return i + 1", "def sections(self) -> Iterable[int]:\n return self._sections.keys()", "def numLinesInFile(fname):\n with open(fname, 'rb') as f:\n for i, l in enumerate(f):\n pass\n return i + 1", "def header_len(self):\n if self.num_lines_header is None:\n Nheader = 0\n with self._compression_safe_file_opener(self.input_fname, \"r\") as f:\n for i, l in enumerate(f):\n if (l[0 : len(self.header_char)] == self.header_char) or (\n l == \"\\n\"\n ):\n Nheader += 1\n else:\n break\n\n return Nheader\n else:\n return self.num_lines_header", "def file_len(full_path):\n f = open(full_path)\n nr_of_lines = sum(1 for line in f)\n f.close()\n return nr_of_lines", "def file_len(filename):\n with open(filename) as f:\n for i, l in enumerate(f):\n pass\n return i + 1", "def file_len(file_name):\n with open(file_name) as f:\n for i, l in enumerate(f):\n pass\n return i + 1", "def get_num_examples(path_in):\n i = 0\n with open(path_in, 'r', encoding='utf8') as f:\n for _ in f:\n i += 1\n return i", "def num_instances_mgf(infile_name):\n\tinfile = open(infile_name)\n\tnum_instances = 0\n\tfor line in infile:\n\t\tif line.startswith(\"BEGIN IONS\"):\n\t\t\tnum_instances += 1\n\treturn(num_instances)", "def read_ram_sections_count(self):\n count = ctypes.c_uint32()\n \n result = self._lib.NRFJPROG_read_ram_sections_count(ctypes.byref(count))\n if result != NrfjprogdllErr.SUCCESS:\n raise APIError(result)\n\n return count.value", "def getSegmentCount(self) -> int:\n ...", "def num_lines(fname):\n with open(fname) as f:\n for i, l in enumerate(f):\n pass\n return( i + 1 )", "def get_array_size():\n tg_file = 'NA_CAS_gauges.txt'\n lines = open(tg_file).readlines()\n tg_nbr = len(lines)\n return tg_nbr", "def n_conf(self):\n return self._configuration_sets[0].n_conf", "def numSegments(self):\n\n return self.getHierView().numSegments()", "def exists_ini_section( inifile, section ):\n found_section = False\n\n # read jobfile\n with open(inifile) as f:\n # loop over all lines\n for line in f:\n # until we find the section\n if \"[\"+section+\"]\" in line and line[0]!=\";\" and line[0]!=\"!\" and line[0]!=\"#\":\n found_section = True\n\n\n return found_section", "def file_length(fileName):\n with open(f_pass) as f:\n for i, l in enumerate(f):\n pass\n return i + 1", "def __len__(self):\n nlines = self.get_endline() - self.get_startline() + 1\n if nlines < 0:\n nlines = 0\n return nlines", "def calculate_number_of_segments(self):\n return sum(len(eg.transcript_file.segments) for eg in self.exemplars)" ]
[ "0.79689074", "0.74084735", "0.6703189", "0.61651725", "0.61540675", "0.6144179", "0.606634", "0.60374355", "0.5971765", "0.59619844", "0.5946345", "0.59424305", "0.5894277", "0.58499515", "0.58399177", "0.58327585", "0.5830617", "0.58236206", "0.5805731", "0.58001965", "0.5794479", "0.57830656", "0.57657844", "0.5757404", "0.5731272", "0.57073677", "0.57009304", "0.56866455", "0.5655577", "0.56540155" ]
0.80345184
0
Return a list of environments (= "envlist" attribute of [tox] section)
def environments(self): envs = self.config["tox"]["envlist"] #result = re.split("[^a-zA-Z0-9]", envs) result = re.split(r'\n| ,|,', envs) #print ([string for string in result if string != ""]) result = (([string.strip() for string in result if string != ""])) print(list(dict.fromkeys(result))) return ((list(dict.fromkeys(result))))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def environments(self):\n env_txt = self.config[\"tox\"][\"envlist\"]\n env_lst_raw = env_txt.strip().replace(\"\\n\",\",\").split(\",\")\n env_lst = [x.strip() for x in env_lst_raw if x != \"\"]\n return env_lst", "def get_all_environments():\n return ENVIRONMENTS", "def envs():\n\n # update and grab the envs from the metadata keys\n metadata = _init()\n return list(metadata.keys())", "def env(self): # type: () -> t.List[str]\n return self.config['Env']", "def list_envs(self):\n if self.hdfs:\n files = self.hdfs.ls(self.hdfs_home + '/.knitDeps/', True)\n return [f for f in files if f['name'].endswith('.zip')]\n else:\n raise ImportError('Set the `hdfs` attribute to be able to list'\n 'environments.')", "def get_environments(self):\n environments = list()\n for group in self._ncfile.groups:\n environments.append( str(group) )\n return environments", "def conda_list_environments():\n conda = '{0}/bin/conda'.format(utils.home('apps', 'miniconda'))\n\n run('{conda} info --envs'.format(conda=conda))", "def all_envs():\n return all_tasks.keys()", "def envs(self) -> Optional[Sequence['outputs.CSIVXFlexOSSpecDriverNodeEnvs']]:\n return pulumi.get(self, \"envs\")", "def _get_environments() -> List[EnvironmentRecord]:\n env_path = Path(__file__).parent.resolve().parents[1] / Path(ENV_FILE)\n\n if not env_path.exists():\n raise FileNotFoundError(f\"Program requires the environments file to function. Expect \"\n f\"file in\\n{env_path.as_posix()}\")\n\n environments = []\n try:\n with env_path.open(\"rt\", encoding=\"utf-8\") as infile:\n json = load(infile)\n for environment in json[\"environments\"]:\n environments.append(EnvironmentRecord(**environment))\n except KeyError as error:\n print(f\"Key \\\"{error}\\\" not found. Invalid file format\")\n exit(-1)\n\n return environments", "def container_environment(self):\n environment = []\n for name, value in sorted(self.environment.items()):\n environment.append({'name': name, 'value': value})\n return environment", "def envs(self) -> Optional[Sequence['outputs.CSIVXFlexOSSpecDriverCommonEnvs']]:\n return pulumi.get(self, \"envs\")", "def _get_env_list(obj, env):\n # add the [default] env\n env_list = [obj.get(\"DEFAULT_ENV_FOR_DYNACONF\")]\n # compatibility with older versions that still uses [dynaconf] as\n # [default] env\n global_env = obj.get(\"ENVVAR_PREFIX_FOR_DYNACONF\") or \"DYNACONF\"\n if global_env not in env_list:\n env_list.append(global_env)\n # add the current env\n if obj.current_env and obj.current_env not in env_list:\n env_list.append(obj.current_env)\n # add a manually set env\n if env and env not in env_list:\n env_list.append(env)\n # add the [global] env\n env_list.append(\"GLOBAL\")\n return [env.lower() for env in env_list]", "def envs(self) -> Optional[Sequence['outputs.CSIUnitySpecDriverNodeEnvs']]:\n return pulumi.get(self, \"envs\")", "def env_variables(self) -> list[\"EnvVariable\"]:\n _args: list[Arg] = []\n _ctx = self._select(\"envVariables\", _args)\n _ctx = EnvVariable(_ctx)._select_multiple(\n _name=\"name\",\n _value=\"value\",\n )\n return _ctx.execute_sync(list[EnvVariable])", "def _get_env(self):\n env = {}\n for k, v in os.environ.items():\n k = k.decode() if isinstance(k, bytes) else k\n v = v.decode() if isinstance(v, bytes) else v\n env[k] = v\n return list(env.items())", "def envs(self) -> Optional[Sequence['outputs.CSIPowerStoreSpecDriverNodeEnvs']]:\n return pulumi.get(self, \"envs\")", "def envs(self) -> Optional[Sequence['outputs.CSIIsilonSpecDriverNodeEnvs']]:\n return pulumi.get(self, \"envs\")", "def get_execution_envs(self):\n return self.execution_envs", "def envs(self) -> Optional[Sequence['outputs.CSIIsilonSpecDriverCommonEnvs']]:\n return pulumi.get(self, \"envs\")", "def envs(self) -> Optional[Sequence['outputs.CSIPowerStoreSpecDriverCommonEnvs']]:\n return pulumi.get(self, \"envs\")", "def envs(self) -> Optional[Sequence['outputs.CSIUnitySpecDriverCommonEnvs']]:\n return pulumi.get(self, \"envs\")", "def envs(self) -> Optional[Sequence['outputs.CSIPowerMaxSpecDriverNodeEnvs']]:\n return pulumi.get(self, \"envs\")", "def get_execution_envs_str(self):\n\n return ' '.join(str(x) for x in self.execution_envs)", "def envs(self) -> Optional[Sequence['outputs.CSIVXFlexOSSpecDriverControllerEnvs']]:\n return pulumi.get(self, \"envs\")", "def deploy_environments(self):\n for key in self.deploy:\n yield key", "def envs(self) -> Optional[Sequence['outputs.CSIPowerMaxSpecDriverCommonEnvs']]:\n return pulumi.get(self, \"envs\")", "def envs(self):\n for member in self.members:\n if not isinstance(member, EnvGroup):\n yield member\n continue\n for member in member.envs():\n yield member", "def generateEnvList( self, index ):\n EnvList = [ \n (\"GLOBUS_DUROC_SUBJOB_INDEX\", \"%d\" % index),\n (\"LD_LIBRARY_PATH\", \"/usr/local/globus/globus-3.2/lib/\") \n ]\n return EnvList", "def list_venvs(obj) -> None:\n if not isinstance(obj, VenvConfig): # pragma: no cover\n raise TypeError(\"ctx.obj must be a VEnvConfig\")\n obj.list()" ]
[ "0.8490215", "0.76675767", "0.7466307", "0.74636424", "0.70641226", "0.70295143", "0.69092244", "0.6908665", "0.67778206", "0.6665895", "0.6590437", "0.6576218", "0.65429485", "0.65250677", "0.6523051", "0.65151244", "0.65044487", "0.64991295", "0.64786613", "0.6424447", "0.6412644", "0.6396338", "0.63757706", "0.63158894", "0.630349", "0.62752336", "0.6270023", "0.6256219", "0.62392676", "0.6237509" ]
0.8205622
1
Return a list of all basepython across the ini file
def base_python_versions(self): result = [] #print("HELLO?") #print (len(self.config.sections())) for section in self.config.sections(): #print(self.config.options(section)) for part in self.config[section]: if part == "basepython": #print(self.config[section][part]) basepy = self.config[section][part] if basepy not in result: result.append(basepy) #print(result) #print("\n") return result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def base_python_versions(self):\n sections = self.config.sections()\n bp_set = set()\n for section in sections:\n if \"basepython\" in self.config[section]:\n bp = self.config[section][\"basepython\"]\n bp_set.add(bp)\n return list(bp_set)", "def _get_base_files(self):\n setup_file = path.join(self.PyCogentDirectory, 'setup.py')\n #reqs_file = path.join(self.PyCogentDirectory, 'cogent-requirements.txt')\n #return [(setup_file, 'Python'), (reqs_file, 'Properties')]\n return [(setup_file, 'Python')]", "def ini_get_all():\n raise NotImplementedError()", "def get_pythons_from_registry():\n\n if HAVE_WIN32_REGISTRY == 0:\n return []\n \n # get the toplevel key\n topkey = OpenKey(HKEY_LOCAL_MACHINE,\"SOFTWARE\\\\Python\\\\PythonCore\")\n \n # under PythonCore will be subkeys like '2.0', '2.1', etc.\n nr_vers = QueryInfoKey(topkey)[0]\n namelist = []\n\n # for each of those keys, get the InstallPath\n for i in range(nr_vers):\n verkey = OpenKey(topkey, \"%s\\\\InstallPath\" % EnumKey(topkey,i))\n path,typ = QueryValueEx(verkey,None)\n name = os.path.join(path,'python.exe')\n if os.path.isfile(name):\n namelist.append(name)\n\n return namelist", "def base():\n print(CFG.base.path)", "def loaded_modules() -> List[str]:\n return PYSTAC_IO.keys()", "def php_ini_scanned_files():\n raise NotImplementedError()", "def find_all_pythons():\n \n allpys = []\n \n # split PATH according to platform rules\n pathlist = string.split( os.environ['PATH'], os.pathsep )\n\n # search PATH, excluding nonexistant dirs\n for path in filter( os.path.isdir, pathlist ):\n allpys.extend( find_pythons_in_dir( path ) )\n\n # check the win32 registry, as appropriate\n allpys.extend( get_pythons_from_registry() )\n\n # and of course I'm running under a Python, in case\n # no others were found\n allpys.append( os.path.abspath(sys.executable) )\n \n return allpys", "def default_controls(self):\n\t\tcontrol_list = []\n\t\tconfig = ConfigParser.ConfigParser()\n\t\tconfig.read(\"./config.ini\")\n\t\tcontrols = config.options(\"default_controls\")\n\t\tfor c in controls:\n\t\t\ttry: control_list.append( config.get(\"default_controls\", c) )\n\t\t\texcept:\n\t\t\t\tprint \"ERROR: missing control settings. Check config.ini.\"\n\t\t\t\traise(SystemExit)\n\t\treturn control_list", "def read_confs():\n debugger = 'import pdb; pdb.set_trace()\\n'\n ignore = []\n\n home = expanduser('~')\n pdberc = home + '/.pdberc'\n\n confs = {}\n\n if not exists(pdberc):\n return {\n 'debugger': debugger,\n 'ignore': ignore,\n }\n\n with open(pdberc, 'r') as file:\n content = [line.strip() for line in file.readlines()]\n\n for line in content:\n if '=' in line and line.strip()[0] != '#':\n key, value = line.split('=')\n confs[key] = value\n\n if 'debugger' in confs:\n if confs['debugger'] == 'ipdb':\n debugger = 'import ipdb; ipdb.set_trace()\\n'\n\n if 'ignore' in confs:\n ignore = confs['ignore'].split(',')\n\n result = {\n 'debugger': debugger,\n 'ignore': ignore,\n }\n\n return result", "def readConfig(file=\"config.ini\"):\n ip_pool = []\n cmd_pool = []\n Config=ConfigParser.ConfigParser()\n Config.read(file)\n machines = Config.items(\"MACHINES\")\n commands = Config.items(\"COMMANDS\")\n for ip in machines:\n ip_pool.append(ip[1])\n for cmd in commands:\n cmd_pool.append(cmd[1])\n print cmd[1]\n return ip_pool,cmd_pool", "def listFeatures() :\n global features\n features = [feature.split(\".\")[0] for feature in os.listdir(os.path.abspath(__file__)[:-11])\n if feature.endswith(\".py\") and feature != \"__init__.py\"]", "def getModules() -> tuple:\n return data.getFoldersOf(data.ETC)", "def read_setup(inifile):\n # inifile = os.path.join(spathy_path, inifile)\n print(inifile)\n cfg = configparser.ConfigParser()\n cfg.read(inifile)\n\n pp = {}\n for s in cfg.sections():\n section = s.encode('ascii', 'ignore')\n pp[section] = {}\n for k, v in cfg.items(section):\n key = k.encode('ascii', 'ignore')\n val = v.encode('ascii', 'ignore')\n if section == 'General': # 'general' section\n pp[section][key] = val\n else:\n pp[section][key] = float(val)\n pp['General']['dt'] = float(pp['General']['dt'])\n\n pgen = pp['General']\n pcpy = pp['CanopyGrid']\n pbu = pp['BucketGrid']\n ptop = pp['Topmodel']\n\n return pgen, pcpy, pbu, ptop", "def python_like_exts():\r\n exts = []\r\n for lang in sourcecode.PYTHON_LIKE_LANGUAGES:\r\n exts.extend(list(sourcecode.ALL_LANGUAGES[lang]))\r\n return ['.' + ext for ext in exts]", "def _config_files():\n from .plugin import plugins\n return [p for p in (p.config_file() for p in plugins()) if p is not None]", "def _iter_configurations() -> Iterable[pathlib.Path]:\n for ext in CONFIGURATION_FILE_FORMATS:\n yield from HERE.rglob(f\"*{ext}\")", "def list_logging_conf():\n import pkg_resources\n\n configs = set()\n for plugin in plugin_manager.load_all(__name__):\n configs.update({\n cfg for cfg in pkg_resources.resource_listdir(__name__, '.')\n if cfg.endswith('.json')\n })\n\n return configs", "def find(cls, paths):\r\n pythons = []\r\n for path in paths:\r\n for fn in cls.expand_path(path):\r\n basefile = os.path.basename(fn)\r\n if any(matcher.match(basefile) is not None for matcher in cls.REGEXEN):\r\n try:\r\n pythons.append(cls.from_binary(fn))\r\n except Exception as e:\r\n TRACER.log('Could not identify %s: %s' % (fn, e))\r\n continue\r\n return pythons", "def binary_bases(cls):\n return cls._BINARY_BASES", "def getBaseSrcFile(self) -> List[int]:\n ...", "def available_binary_choices() -> Iterable[str]:\n for name, _ in inspect.getmembers(sys.modules[__name__], inspect.isclass):\n if name.startswith('Binary'):\n yield name", "def x_list():\n\t_loadconfig()", "def builtin_keys(self) -> List[str]:\n return _ba.get_appconfig_builtin_keys()", "def _get_default_config_list(parm_base=None):\n default_config_list = []\n if parm_base is None:\n parm_base = PARM_BASE\n\n conf_dir = os.path.join(parm_base,\n METPLUS_CONFIG_DIR)\n\n # if both are found, set old base confs first so the new takes precedence\n for base_conf in OLD_BASE_CONFS + BASE_CONFS:\n conf_path = os.path.join(conf_dir,\n base_conf)\n if os.path.exists(conf_path):\n default_config_list.append(conf_path)\n\n if not default_config_list:\n print(f\"FATAL: No default config files found in {conf_dir}\")\n sys.exit(1)\n\n return default_config_list", "def loadCfg(self):\n objFile = open('/usr/local/bin/defaults.bind', 'r')\n fileContents = objFile.read()\n objFile.close()\n cfg = fileContents.split('\\n')\n cfgData = []\n for i in cfg:\n if i.startswith('bind '):\n bind, gKey, kbKey = i.split(' ')\n cfgData.append([gKey, kbKey.rstrip('\\n')])\n return cfgData", "def get_startup_extensions(self):\n final_list = []\n for entry in self.bot_data_file[\"startup_extensions\"]:\n final_list.append(str(entry[\"name\"]))\n return final_list", "def _get_doc_files(self):\n return [(path.join(self.DocDirectory, 'conf.py'), 'Python')]", "def listConfigModules(etcdir):\n if not os.path.isdir(etcdir):\n return iter(())\n return (name for name in os.listdir(etcdir)\n if (name.endswith('.py')\n and os.path.isfile(os.path.join(etcdir, name)))\n )", "def list_configurations(path):\n configurations = []\n\n for afile in os.listdir(path):\n afile = os.path.join(path, afile)\n if os.path.isfile(afile) and afile.endswith('.py'):\n configurations.append(afile)\n\n return configurations" ]
[ "0.7340574", "0.6669972", "0.6395513", "0.6079625", "0.5901388", "0.5851641", "0.5810437", "0.5688062", "0.56150764", "0.5586401", "0.5571002", "0.55457985", "0.5521257", "0.54795694", "0.54672885", "0.54662114", "0.5465213", "0.54544663", "0.54446226", "0.54256517", "0.542181", "0.54175436", "0.5387736", "0.5382453", "0.5379999", "0.53742707", "0.5362782", "0.5362345", "0.53550935", "0.5340597" ]
0.75692475
0
Update and return an object. This is a thin wrapper around the findAndModify_ command. The positional arguments are designed to match the first three arguments
def find_and_modify(self, query={}, update=None, upsert=False, sort=None, **kwargs): if (not update and not kwargs.get('remove', None)): raise ValueError("Must either update or remove") if (update and kwargs.get('remove', None)): raise ValueError("Can't do both update and remove") if query: kwargs['query'] = query if update: kwargs['update'] = update if upsert: kwargs['upsert'] = upsert if sort: if isinstance(sort, list): kwargs['sort'] = helpers._index_document(sort) elif (isinstance(sort, OrderedDict) or isinstance(sort, dict) and len(sort) == 1): kwargs['sort'] = sort else: raise TypeError("sort must be a list of (key, direction) " "pairs, a dict of len 1, or an instance of " "OrderedDict") out = self.database.command("findAndModify", self.name, **kwargs) if not out['ok']: if out["errmsg"] == "No matching object found": return None else: raise ValueError("Unexpected Error: %s" % (out,)) return out.get('value')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def find_and_modify(self, cls, *args, **kwargs):\n\n decorate = kwargs.pop('decorate', None)\n if self.autoflush:\n self.flush()\n m = mapper(cls)\n obj = self.impl.find_and_modify(m.collection, *args, **kwargs)\n if obj is None: return None\n cursor = ODMCursor(self, cls, iter([ obj ]), refresh=True, decorate=decorate)\n result = cursor.first()\n state(result).status = ObjectState.clean\n return result", "def update_object(self, name: str) -> None:", "def update(\n self,\n *args: Union[dict, Mapping],\n session: Optional[ClientSession] = None\n ):\n self.set_session(session=session)\n return (\n self.UpdateQueryType(\n document_model=self.document_model,\n find_query=self.get_filter_query(),\n )\n .update(*args)\n .set_session(session=self.session)\n )", "def update(self, request, pk):\n if pk is None:\n for item in request.data:\n # get object by its primary key\n obj = self._object_get(item[self.model._meta.pk.attname])\n self._object_update(obj, item)\n else:\n obj = self._object_get(pk)\n self._object_update(obj, request.data)\n return obj", "def update(self, request, pk=None): #update a specific object\n return Response({'http_method': 'PUT'})", "def do_update(self, arg):\n args = arg.split()\n object_dict = storage.all()\n if len(args) == 0:\n print(\"** class name missing **\")\n return\n if args[0] in self.class_dict:\n if len(args) == 1:\n print(\"** instance id missing **\")\n return\n elif len(args) == 2:\n print(\"** attribute name missing **\")\n return\n elif len(args) == 3:\n print(\"** value missing **\")\n return\n else:\n print(\"** class doesn't exist **\")\n return\n\n for i in range(len(args)):\n if args[i].startswith('\"') and args[i].endswith('\"'):\n args[i] = args[i][1:-1]\n\n for full_key in object_dict.keys():\n key = full_key.split('.')\n key_id = key[1]\n if args[0] in self.class_dict:\n if args[1] == object_dict[full_key].id:\n setattr(object_dict[full_key], args[2], args[3])\n setattr(object_dict[full_key], \"updated_at\",\n datetime.now())\n storage.save()\n return\n else:\n print(\"** class doesn't exist **\")\n return\n print(\"** no instance found **\")", "def update(self, **kwargs):\n return self._object.update(meta=kwargs)", "def update(self, **kwargs):\n return self.parent.update_instance(self.name, kwargs)", "def update_object(self, name: str) -> None:\n try:\n object = Object.from_name(name)\n except Object.NotFound as error:\n log.warning(f'Cannot add new objects using TNSQueryManager')\n raise TNSError(str(error)) from error\n if 'iau' in object.aliases:\n if name != object.aliases['iau']:\n log.debug(f'Searching with name {object.aliases[\"iau\"]} <- {name}')\n name = object.aliases['iau']\n elif 'ztf' in object.aliases:\n log.debug(f'Searching TNS for IAU name {name}')\n name = self.tns.search_name(name).objname\n if name is None:\n raise TNSError(f'Could not find IAU name {name}')\n else:\n raise TNSError(f'No support identifier found {name}')\n response = self.tns.search_object(name)\n if response.is_empty:\n raise TNSError(f'No data on object {name}')\n else:\n if info := self.__build_info(name, object, response):\n Object.update(object.id, **info)\n else:\n log.info(f'No changes for {name}')", "def update(self, **kwargs):\n return self._update_data(self.put(None, data=kwargs))", "def update(self, *args):\n qry = UpdateEntityQuery(self)\n self.context.add_query(qry)\n return self", "def update(self, **kwargs):\n return self.manager.update(self, **kwargs)", "def update(self, **kwargs):\n return self.manager.update(self, **kwargs)", "def update(self, **kwargs):\n return self.manager.update(self, **kwargs)", "def update(cls, row_id, **kwargs):\n cls.delete(row_id)\n # obj = cls.query.filter_by(id=row_id).first()\n # for k, v in kwargs.items():\n # obj[k] = v\n # obj = cls.query.filter_by(id=row_id).update(kwargs)\n kwargs[\"id\"] = row_id\n obj = cls(**kwargs)\n #print(\"the type of updated object is\", type(obj))\n return commit(obj)", "def update(self, *args, **kwargs): # real signature unknown\n pass", "def update(self, *args, **kwargs): # real signature unknown\n pass", "def update(self, *args, **kwargs): # real signature unknown\n pass", "def update(self, *args, **kwargs): # real signature unknown\n pass", "def update(self, *args, **kwargs): # real signature unknown\n pass", "def update(self, *args, **kwargs): # real signature unknown\n pass", "def do_update(self, *args):\n if len(args) == 1:\n args = [ele for ele in args[0].split(' ')]\n if args[0] == '':\n print(\"** class name missing **\")\n return\n if args[0] not in self.list_classes:\n print(\"** class doesn't exist **\")\n return\n if len(args) < 2:\n print(\"** instance id missing **\")\n return\n elif len(args) < 3:\n print(\"** attribute name missing **\")\n return\n elif len(args) < 4:\n print(\"** value missing **\")\n return\n\n storage.reload()\n dict_objs = storage.all()\n if dict_objs is None or dict_objs == []:\n print(\"** no instance found **\")\n return\n\n key = \"{}.{}\".format(args[0], args[1])\n if key in dict_objs.keys():\n obj = dict_objs[key]\n if args[2] in obj.__class__.__dict__:\n obj.__dict__[args[2]] =\\\n type(obj.__class__.__dict__[args[2]])(args[3])\n else:\n obj.__dict__[args[2]] = args[3]\n storage.save()\n else:\n print(\"** no instance found **\")", "async def update_one(\n self,\n where: t.Mapping[str, t.Any],\n data: t.Mapping[str, t.Any],\n ) -> t.Optional[t.Type[Model]]:\n\n data = await self.collection.find_one_and_update(\n filter=where,\n update={'$set': data},\n return_document=ReturnDocument.AFTER,\n )\n return self.model_class(**data) if data else None", "def update(self, obj, data):\n self.get(obj[self.model.pk_field.name])\n self.validate_fields(data)\n\n fields = []\n values = []\n\n for k, v in data.iteritems():\n if k in self.model.get_fields_name():\n fields.append(k)\n values.append(v)\n\n conn = self.get_connector()\n cursor = conn.cursor()\n update = \" ,\".join([\"{0}='{1}'\".format(f, v) for f, v in zip(fields,\n values)])\n query = \"update {0} set {1} WHERE {2}={3}\".format(\n self.ressource_config[\"table\"],\n update,\n self.model.pk_field.name,\n obj[self.model.pk_field.name]\n )\n\n cursor.execute(query)\n conn.commit()\n conn.close()\n\n return self.get(obj[self.model.pk_field.name])", "def update(self, commit=True, **kwargs):\n # Prevent changing IDS\n kwargs.pop('id', None)\n for attr, value in kwargs.iteritems():\n # Flask-restful makes everything None by default\n if value is not None:\n setattr(self, attr, value)\n return commit and self.save() or self", "async def find_one_and_update(self, spec, update_fields, upsert=False, return_document=False, fields=None, cursor=None):\n if not cursor:\n cursor = self._cursor\n spec[DELETE_FLAG] = {\"$ne\": True}\n if \"_id\" in spec:\n spec[\"_id\"] = self._convert_id_object(spec[\"_id\"])\n result = await cursor.find_one_and_update(spec, update_fields, projection=fields, upsert=upsert, return_document=return_document)\n #if result and \"_id\" in result:\n # result[\"_id\"] = str(result[\"_id\"])\n return result, None", "def update_one(\n self, *args, session: Optional[ClientSession] = None\n ) -> UpdateOne:\n return self.update(*args, session=session)", "def update(*args):", "def salesforce_update(self, obj_name, obj_id, **kwargs):\n self.builtin.log(\n \"Updating {} {} with values {}\".format(obj_name, obj_id, kwargs)\n )\n obj_class = getattr(self.cumulusci.sf, obj_name)\n return obj_class.update(obj_id, kwargs)", "def _update(self, model_obj):\n conn = self._get_session()\n db_item = None\n\n # Fetch the record from database\n try:\n identifier = getattr(model_obj, id_field(self.entity_cls).attribute_name)\n db_item = conn.query(self.model_cls).get(\n identifier\n ) # This will raise exception if object was not found\n except DatabaseError as exc:\n logger.error(f\"Database Record not found: {exc}\")\n raise\n\n if db_item is None:\n conn.rollback()\n conn.close()\n raise ObjectNotFoundError(\n {\n \"_entity\": f\"`{self.entity_cls.__name__}` object with identifier {identifier} \"\n f\"does not exist.\"\n }\n )\n\n # Sync DB Record with current changes. When the session is committed, changes are automatically synced\n try:\n for attribute in attributes(self.entity_cls):\n if attribute != id_field(self.entity_cls).attribute_name and getattr(\n model_obj, attribute\n ) != getattr(db_item, attribute):\n setattr(db_item, attribute, getattr(model_obj, attribute))\n except DatabaseError as exc:\n logger.error(f\"Error while updating: {exc}\")\n raise\n finally:\n if not current_uow:\n conn.commit()\n conn.close()\n\n return model_obj" ]
[ "0.75251395", "0.63186234", "0.62915385", "0.62504715", "0.62350994", "0.61769533", "0.6068322", "0.604922", "0.59849715", "0.595233", "0.5935577", "0.5917528", "0.5917528", "0.5917528", "0.59154737", "0.5905923", "0.5905923", "0.5905923", "0.5905923", "0.5905923", "0.5905923", "0.59025794", "0.589282", "0.5888998", "0.5886701", "0.5886035", "0.5854972", "0.583416", "0.58321935", "0.5812958" ]
0.7273801
1
Get a list of distinct values for `key` among all documents in this collection.
def distinct(self, key): return self.database.command({'distinct': self.name, 'key': key})['values']
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def find_distinct(self, collection, key):\n obj = getattr(self.db, collection)\n result = obj.distinct(key)\n return result", "def getall(self, key):\n return self.values.get(key, [])", "async def get_all(self, key: datastore.Key) -> RV:\n\t\treturn await (await self.get(key)).collect() # type: ignore[return-value]", "def list_values(key):\n return meta.list_values(key=key)", "def unique(self, key: Callable[[T], Union[str, int, float]]=None) -> 'List[T]':\n return unique_values(self.array, key)", "def list_ids (self, key):\n\n list_of_key_values = [str(x[key]) for x in self.result]\n\n self.result = list(dict.fromkeys([re.findall(r'\\b\\d+\\b', x)[0] for x in list_of_key_values if len(re.findall(r'\\b\\d+\\b', x)) !=0]))\n\n return self", "def get_keys(self, ckey=None):\n if ckey:\n keys = self._get_keys(ckey)\n else:\n keys = self.keys()\n for key in self.keys():\n keys += [k for k in self._get_keys(key)]\n return list(set(keys))", "def unique(self, key, lst=None):\n d = self.find(key, lst)\n vals = set(d.values())\n return sorted(list(vals))", "def union(self, key: str, skip_duplicates=False) -> list:\n result = []\n for items in self.get(key):\n for item in items:\n if skip_duplicates and item in result:\n continue\n result.append(item)\n return result", "def get_for_key(self, key) -> list:\n return [res[key] for res in self.list]", "def keys(self):\n return [kvp.key for kvp in self.keyvaluepair_set.all()]", "def getlist(self, key):\n try:\n vals = _dict_getitem(self, key.lower())\n except KeyError:\n return []\n else:\n if isinstance(vals, tuple):\n return [vals[1]]\n else:\n return vals[1:]", "def get_all_keys(self):\n r = []\n with self.lock:\n for key in self.keys():\n if self.get(key):\n r.append(key)\n\n return r", "def keys(self):\n # Collect all keys in each bucket\n all_keys = []\n for bucket in self.buckets:\n for key, value in bucket.items():\n all_keys.append(key)\n return all_keys", "def keys(self):\n # TODO: Collect all keys in each of the buckets\n all_keys = [] # Will store all the key\n for bucket in self.buckets:\n for key in bucket:\n if key is not None:\n all_keys.append(key[0])\n return all_keys", "def keys(self):\n return [key for key, value in self.items()]", "def getAllValues(self, keyName):\n self._db._c.execute(\"SELECT \" + keyName + \" FROM \" + self.tableName)\n\n return [ col[0] for col in self._db._c.fetchall() ]", "def get_key_values(self):\n return self.key_values", "def keys(self):\r\n return [key for key, value in self.iteritems()]", "def unique_values(array: Iterable[T], key: Callable[[T], Union[str, int, float]]=None) -> List[T]:\n values = set()\n unique_array = []\n\n if key is None:\n for v in array:\n if v not in values:\n unique_array.append(v)\n values.add(v)\n else:\n for v in array:\n v_key = key(v)\n if v_key not in values:\n unique_array.append(v)\n values.add(v_key)\n\n return unique_array", "def getPubs(self, key):\n if hasattr(key, \"encode\"):\n key = key.encode(\"utf-8\") # convert str to bytes\n return self.getVal(self.pubs, key)", "def list_all_keys(self):\n \n return self.keys", "def getlist(self, key):\n try:\n return dict.__getitem__(self, key)\n except KeyError:\n return []", "def quantize_key_values(key):\n if isinstance(key, dict):\n return key.keys()\n\n return key", "def find_all_bykey(cls, keydict):\n return cls.dbm().modelclass_find_all_bykey(cls, keydict)", "def values(self):\n # TODO: Collect all values in each of the buckets\n all_values = [] # Will store all the key\n\n for bucket in self.buckets:\n for value in bucket:\n if value is not None:\n all_values.append(value[1])\n return all_values", "def values(self):\n return [kvp.value for kvp in self.keyvaluepair_set.all()]", "def key_list(dict):\n list = []\n for key in dict:\n list.append(key)\n return list", "def keys(self):\r\n return [k for k in self]", "def get_field_vals(self, field):\n\n if field not in self.metadata_fields:\n raise MissingMetadataError([field])\n\n values = set()\n for document in self.documents:\n values.add(getattr(document, field))\n\n return sorted(list(values))" ]
[ "0.6683744", "0.6641708", "0.6641306", "0.6383321", "0.62784886", "0.61069864", "0.6057863", "0.60040236", "0.5974263", "0.5915189", "0.58046377", "0.5777479", "0.5773848", "0.5760654", "0.5703736", "0.5653801", "0.56428313", "0.56197447", "0.5613837", "0.5603023", "0.5600389", "0.5582602", "0.5581219", "0.5570501", "0.55584645", "0.55540854", "0.54938036", "0.5483039", "0.5477797", "0.54748964" ]
0.8222723
0
Insert a document or documents into this collection.
def insert(self, doc_or_docs): return self.database.connection.request.insert_documents( self.database.name, self.name, doc_or_docs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def insert(self, data):\n return self.collection.insert(data)", "def insert_documents(self, database, collection, doc_or_docs):\n validators.check_documents_to_insert(doc_or_docs)\n r = self.__get_response(settings.INS_DOCS,\n {\"db\": database, \"col\": collection}, data=doc_or_docs)\n if r[\"status\"] == 200:\n return r[\"result\"]\n raise Exception(r[\"result\"][\"message\"])", "def insert_to_collection(db, coll_name, docs):\n if isinstance(docs, list):\n db[coll_name].insert_many(docs)\n else:\n db[coll_name].insert_one(docs)", "def add(self, data):\n self.collection.insert(data)", "def insert_document(self, collection, doc):\n # Create/Access your collection\n mycol = self.db[collection]\n # Insert your document into the collection\n x = mycol.insert_one(doc)\n # Return the inserted id to verify success\n return x.inserted_id", "def add(cls, doc):\n cls.get_collection().add(doc)", "def insert_one(self, document: dict) -> None:\n if isinstance(document, dict):\n self._store_document(document)\n else:\n raise TypeError(\"The document must be a dictionary.\")\n self._dump()", "def add(self, docs: DocumentArray, *args, **kwargs):\n cursor = self.connection.cursor()\n try:\n psycopg2.extras.execute_batch(\n cursor,\n f'INSERT INTO {self.table} (ID, DOC) VALUES (%s, %s)',\n [\n (\n doc.id,\n doc.SerializeToString(),\n )\n for doc in docs\n ],\n )\n except psycopg2.errors.UniqueViolation as e:\n self.logger.warning(\n f'Document already exists in PSQL database. {e}. Skipping entire transaction...'\n )\n self.connection.rollback()\n self.connection.commit()", "def insert(db_name, collection_name, docs):\n db = client[db_name]\n collection = db[collection_name]\n return collection.insert_many(docs)", "def insert(cls, document, doc_id=None):\n if not doc_id:\n doc_id = ObjectId\n if callable(doc_id):\n doc_id = doc_id()\n\n document['_id'] = doc_id\n cls._add_shard(document)\n\n cls._make_call('insert', document)\n return doc_id", "def add_document(self, db_name, collection_name, doc) -> None:\n if not self._mongo_available:\n return\n collection: Collection = self._mongo_client[db_name][collection_name]\n collection.insert_one(document=doc)", "def add_document(collection: str, document: dict) -> None:\n validate_arguments({'collection': [collection, str],\n 'document': [document, dict]})\n DB[collection].insert_one(document)", "def DocumentInsert(self, wave_id, wavelet_id, blip_id, content, index=0):\n op = Operation(DOCUMENT_INSERT, wave_id, wavelet_id, blip_id,\n index=index, prop=content)\n self.__context.AddOperation(op)", "def dataInsert(self, collectionName, data):\n result = collectionName.insert(data)\n return result", "def insert_player(document):\n players_col.insert_one(document)", "def write_to_db(self, doc):\n self.db_connection[self.db_name][self.db_collection].insert_one(doc)", "def add(cls, document: dict) -> dict:\n errors = cls.validate_insert(document)\n if errors:\n raise ValidationFailed(document, errors)\n\n cls.deserialize_insert(document)\n try:\n if cls.logger.isEnabledFor(logging.DEBUG):\n cls.logger.debug(f\"Inserting {document}...\")\n cls._insert_one(document)\n if cls.logger.isEnabledFor(logging.DEBUG):\n cls.logger.debug(\"Document inserted.\")\n return cls.serialize(document)\n except pymongo.errors.DuplicateKeyError:\n raise ValidationFailed(\n cls.serialize(document), message=\"This document already exists.\"\n )", "def _store_document(self, document: dict) -> None:\n\n for item in document.items():\n if not is_bson_valid(item):\n raise InvalidTypeException(item)\n\n self._db[\"documents\"].append(document)", "def insert_one(self, data):\n _client = self.client\n _db = _client[self.database]\n _col = _db[self.collection]\n\n x = _col.insert_one(data)\n\n return x", "def insert(self, documents, index=0):\n\n # Initialize graph backend\n self.initialize()\n\n for _, document, _ in documents:\n if isinstance(document, dict):\n # Require text or object field\n document = document.get(self.text, document.get(self.object))\n\n if document is not None:\n if isinstance(document, list):\n # Join tokens as text\n document = \" \".join(document)\n\n # Create node\n self.addnode(index, data=document)\n index += 1", "def insert_many(self, documents: Iterable[dict]) -> None:\n for i, document in enumerate(documents):\n if isinstance(document, dict):\n self._store_document(document)\n else:\n raise TypeError(\n f\"The document at index {i} was not a dictionary. All documents must be dictionaries.\"\n )\n self._dump()", "def insert(self, data):\n\n if not data:\n raise ValueError('invalid data')\n\n # TODO: validate and insert data into model", "def add(self, document):\n return self.db.update({document['id']: document})", "async def bulk_insert(self, documents, alias=None):\n\n is_valid = True\n docs_to_insert = []\n\n for document_index, document in enumerate(documents):\n self.update_field_on_save_values(document, document._id is not None)\n try:\n is_valid = is_valid and self.validate_document(document)\n except Exception:\n err = sys.exc_info()[1]\n raise ValueError(\n \"Validation for document %d in the documents you are saving failed with: %s\"\n % (document_index, str(err))\n )\n\n if not is_valid:\n return\n\n docs_to_insert.append(document.to_son())\n\n if not is_valid:\n return\n\n doc_ids = await self.coll(alias).insert(docs_to_insert)\n\n for object_index, object_id in enumerate(doc_ids):\n documents[object_index]._id = object_id\n\n return documents", "def insert_school(mongo_collection, **kwargs):\n return mongo_collection.insert(kwargs)", "def insert_data(data, collec, many):\n db = client.get_database('tweetstorm')\n collection = db.get_collection(collec)\n if many:\n collection.insert_many(data)\n logger.info(f\"{ymdhms()} inserted {len(data)} tweets to {collec} collection\")\n else:\n collection.insert_one(data)\n logger.info(f\"{ymdhms()} inserted data {data} to {collec} collection\")", "def DocumentElementInsert(self, wave_id, wavelet_id, blip_id, position,\n element):\n op = Operation(DOCUMENT_ELEMENT_INSERT, wave_id, wavelet_id, blip_id,\n index=position,\n prop=element)\n self.__context.AddOperation(op)", "def save(self):\n if self.document.id:\n self.db.insert(self.document)\n else:\n self.db.update(self.document.id,self.document)", "def create(cls, collection, data, schema=None):\n validated = cls.validate(data, schema=schema)\n result = collection.insert_one(validated)\n return collection.find_one({\"_id\": result.inserted_id})", "def findtinsert(dictonary, collection):\n try:\n return collection.find_one(dictonary)['_id']\n except TypeError:\n return collection.insert(dictonary, safe=True)" ]
[ "0.742801", "0.73531365", "0.699744", "0.6972718", "0.6788306", "0.6659301", "0.65745", "0.6541688", "0.6511618", "0.6510872", "0.6499058", "0.6496551", "0.6489356", "0.64245105", "0.6365389", "0.6357426", "0.63096535", "0.6292675", "0.62807304", "0.62685513", "0.6178108", "0.6167567", "0.61562485", "0.6141049", "0.61000323", "0.6099198", "0.6077268", "0.60547924", "0.6039038", "0.60386235" ]
0.7562972
0
Update a document or documents into this collection.
def update(self, spec, document, upsert=False, multi=False): return self.database.connection.request.update_documents( self.database.name, self.name, spec, document, upsert, multi)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update_documents(self, database, collection, spec, doc_or_docs, upsert,\n multi):\n validators.check_document_to_update(doc_or_docs)\n r = self.__get_response(settings.UPD_DOCS,\n {\"db\": database, \"col\": collection},\n data=doc_or_docs, q=spec, m=multi, u=upsert)\n if r[\"status\"] == 200:\n if r[\"result\"][\"error\"]:\n raise Exception(r[\"result\"][\"error\"])\n return r[\"result\"][\"n\"]\n raise Exception(r[\"result\"][\"message\"])", "def update_document(self, database, collection, _id, document):\n r = self.__get_response(settings.UPD_DOC,\n {\"db\": database, \"col\": collection, \"id\": str(_id)},\n data=document)\n if r[\"status\"] == 200:\n return r[\"result\"]\n raise Exception(r[\"result\"][\"message\"])", "def update(self, docs, commit=False):\n if not docs:\n return\n\n data = json.dumps(\n docs,\n default=lambda obj: obj.isoformat() if isinstance(\n obj, dt.datetime) else None\n )\n\n params = {}\n\n if commit:\n params['commit'] = 'true'\n\n return self.client.post(\n self._get_collection_url('update/json'),\n params=params,\n body=data\n )", "def update_document(collection: str, query: dict, data: dict) -> None:\n validate_arguments({'collection': [collection, str],\n 'query': [query, dict],\n 'data': [data, dict]})\n new_document = find_document(collection, query=query)\n if new_document is None:\n raise Exception('Didnt find a document to update')\n DB[collection].delete_one(query)\n for key in data:\n new_document[key] = data[key]\n add_document(collection, new_document)", "def update(cls, collection, uid, data):\n validated = cls.validate(data)\n validated.pop(\"_id\", None) # remove field \"_id\" if set\n object_uid = cls.object_id(uid)\n collection.update_one({\"_id\": object_uid}, {\"$set\": validated}, upsert=True)\n return collection.find_one({\"_id\": object_uid})", "def update_document(obj):\n index = obj.get_index_name()\n doc_type = obj.get_document_type()\n body = dict(doc=obj.get_document_body())\n try:\n ES.update(index=index, doc_type=doc_type, body=body, id=obj.pk)\n except NotFoundError:\n raise DocumentNotFound(obj.get_index_name(), obj.pk)", "def update_document(self, collection, query, mongo_id):\n\n try:\n self.client[self.db][collection].update_one(\n {'_id': mongo_id},\n query)\n except errors.PyMongoError as e:\n print \"Exception\", e", "def update_document(self):\n pass", "def update_document(\n self,\n index: str,\n doc_id: str,\n document: Dict[str, Any],\n partial_update: bool = False,\n ):\n if partial_update:\n self.__client__.update(index=index, id=doc_id, body={\"doc\": document})\n self.__client__.index(index=index, id=doc_id, body=document)", "def update_document(self, data):\n if not isinstance(data, pylastica.document.Document) and not isinstance(data, pylastica.script.Script):\n raise TypeError(\"data must be an instance of Document or Script: %r\" % data)\n if not data.has_id():\n raise pylastica.exception.InvalidException(\"Document id is not set.\")\n return self.index.client.update_document(data.doc_id, data, self.index.name, self.name)", "def update(self, data, id_obj=None, query_data=None):\n if id_obj:\n return self.collection.update({'_id': id_obj}, {\"$set\": data})\n return self.collection.update(query_data, {\"$set\": data})", "def update(\n self,\n *args: Union[dict, Mapping],\n session: Optional[ClientSession] = None\n ):\n self.set_session(session=session)\n return (\n self.UpdateQueryType(\n document_model=self.document_model,\n find_query=self.get_filter_query(),\n )\n .update(*args)\n .set_session(session=self.session)\n )", "def update(self, document_id, update_spec, namespace, timestamp):\n\n index, doc_type = self._index_and_mapping(namespace)\n document = self.BulkBuffer.get_from_sources(index,doc_type,u(document_id))\n if document:\n updated = self.apply_update(document, update_spec)\n # _id is immutable in MongoDB, so won't have changed in update\n updated['_id'] = document_id\n self.upsert(updated, namespace, timestamp)\n else:\n updated = {\"_id\": document_id}\n self.upsert(updated, namespace, timestamp, update_spec)\n # upsert() strips metadata, so only _id + fields in _source still here\n return updated", "def update_document(self, type: Type, id: str,\n new_data: Dict[str, Any]) -> None:\n if collection := self.client.collection(f'{type}'):\n if document_ref := collection.document(document_id=id):\n if document_ref.get().exists:\n document_ref.update(new_data)\n else:\n document_ref.create(new_data)", "def update(self, parameters):\n self.__enforce_connected()\n self.collection._update(self, parameters)", "def update(self, docs: DocumentArray, *args, **kwargs):\n cursor = self.connection.cursor()\n psycopg2.extras.execute_batch(\n cursor,\n f'UPDATE {self.table} SET DOC = %s WHERE ID = %s',\n [\n (\n doc.SerializeToString(),\n doc.id,\n )\n for doc in docs\n ],\n )\n self.connection.commit()", "def update(self, force = False):\n self.__enforce_connected()\n parameters = {}\n if(force):\n parameters[\"force\"] = \"true\"\n self.collection._update(self, parameters)", "def update_doc(c, i, d, u=False):\n try:\n c.update_one({'_id': i}, {'$set': d}, upsert = u)\n return True\n except:\n return False", "def update(self, index, id, **kwargs):\n url = f'{self.host}{index}/_doc/{id}/_update'\n data = {'doc': {**kwargs}}\n requests.post(url, json=data)\n self.flush(index)\n return self.get(index, id)", "def update(self, collection_id, body, **kwargs):\n kwargs['_return_http_data_only'] = True\n if kwargs.get('callback'):\n return self.update_with_http_info(collection_id, body, **kwargs)\n else:\n (data) = self.update_with_http_info(collection_id, body, **kwargs)\n return data", "def update(cls, doc_id, document, specs=None, updater=raw_updater, *args, **kwargs):\n document = updater(document)\n ret = cls._make_call('update', cls._make_specs(doc_id, specs), document, *args, **kwargs)\n if ret:\n return ret['n']", "def update(self, record):\n record = dict_for_mongo(record)\n id_dict = {'_id': self.record['_id']}\n self.collection.update(id_dict, {'$set': record})\n\n # Set record to the latest record from the database\n self.record = self.__class__.collection.find_one(id_dict)", "def document_update(index_name, doc_type, doc_id, doc=None, new=None):\n if doc:\n resp = es.index(index=index_name, doc_type=doc_type,\n id=doc_id, body=doc)\n print(resp)\n else:\n resp = es.update(index=index_name, doc_type=doc_type,\n id=doc_id, body={\"doc\": new})", "def update(self, doc):\n if app.config.get(\"READ_ONLY_MODE\", False) and app.config.get(\"SCRIPTS_READ_ONLY_MODE\", False):\n app.logger.warn(\"System is in READ-ONLY mode, update command cannot run\")\n return\n\n return requests.post(self.target() + self.id + \"/_update\", data=json.dumps({\"doc\": doc}))", "def collection_update(request, *args, **kwargs):\n patch_data = request.data\n # Extract form data and validate\n form = CollectionForm(patch_data)\n if not form.is_valid():\n data = json.dumps({\"errors\": form.errors})\n return HttpResponse(content=data, content_type=\"application/json\", status=status.HTTP_400_BAD_REQUEST)\n # Update the collection\n collection = Collection.objects.get(id=int(kwargs['pk']))\n if \"title\" in patch_data:\n collection.title = patch_data[\"title\"]\n if \"permission\" in patch_data:\n collection.public = patch_data[\"permission\"] == \"Public\"\n if \"comment\" in patch_data:\n collection.comment = patch_data[\"comment\"]\n collection.save()\n # Prepare a response\n data = json.dumps({'success': True, 'id': collection.id, 'url': \"/collection/{0}\".format(collection.id)})\n return HttpResponse(data, content_type=\"json\")", "def updateDocument(self, document):\n data = self.updateDocumentAll([document])\n try:\n return data[0]\n except: pass", "def update_documents(self, engine_name, documents):\n endpoint = \"engines/{}/documents\".format(engine_name)\n data = json.dumps(documents)\n\n return self.swiftype_session.request('patch', endpoint, data=data)", "def put(self, data):\n self.validate(data)\n\n mongo['readable-api'].foo.update(\n {\"foo\": self.data[\"foo\"]},\n data,\n )", "def update(self, docid, doc):\n self.delete(docid)\n wrapper = RedisWrapper(self.dbprefix, self.client, docid)\n self._store_doc(doc, wrapper)", "def my_find_update(the_coll, search_dict, update_dict):\n x = the_coll.find(search_dict,limit=1)\n if x.count() == 0:\n the_coll.insert(update_dict)\n else:\n for x in the_coll.find(search_dict):\n x.update(update_dict)\n the_coll.save(x)" ]
[ "0.7344931", "0.7210694", "0.7126921", "0.69508326", "0.69400084", "0.6855809", "0.685467", "0.683156", "0.6809697", "0.6797412", "0.67653096", "0.6735676", "0.6715537", "0.66198725", "0.66006666", "0.65405786", "0.6447487", "0.64346045", "0.64316946", "0.63662523", "0.6353866", "0.6344282", "0.63040996", "0.6298283", "0.62332565", "0.6207454", "0.6142577", "0.61263853", "0.6115672", "0.6100395" ]
0.7591059
0
Remove a document or documents into this collection.
def remove(self, spec_or_id=None): if isinstance(spec_or_id, ObjectId) or \ isinstance(spec_or_id, basestring): return self.database.connection.request.delete_document( self.database.name, self.name, spec_or_id) if not spec_or_id: spec_or_id = {} return self.database.connection.request.delete_replace_documents( self.database.name, self.name, spec_or_id, [])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def remove(self, document):\n return self.db.pop(document['id'], None)", "async def remove_doc(self, *args, **kwargs):\n pass", "def remove(self, query: dict, limit: Optional[int] = 0) -> None:\n\n matches = self.find(query, limit)\n for match in matches:\n self._db[\"documents\"].remove(match)\n\n self._dump()", "def delete_document(self):\n pass", "def drop_from_couch(self):\n try:\n self.itr.couch_db.delete_doc(self.itr.iteration_id)\n except ResourceNotFound:\n pass", "def delete(cls, collection, uid):\n result = collection.remove({\"_id\": cls.object_id(uid)})\n return result", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n try:\n self._data.remove(self)\n except ValueError:\n pass", "def clear(self) -> None:\r\n utils.rm_collection(self.target_collection)", "def remove_person(self, document):\n del self.__people[document]", "def obj_delete(self, request=None, **kwargs):\n self.get_collection(request).remove({ \"_id\": ObjectId(kwargs.get(\"pk\")) })", "def remove(self):\r\n\t\tself._delete()", "def delete(self, query):\n self.collection.remove(query)", "def __delete__(self, instance):\n instance.doc.pop(self.slug, None)", "def delete_document(self, db_name, collection_name, doc) -> None:\n if not self._mongo_available:\n return\n collection: Collection = self._mongo_client[db_name][collection_name]\n collection.delete_one(doc)", "def remove(self, document_id, namespace, timestamp):\n index, doc_type = self._index_and_mapping(namespace)\n\n action = {\n '_op_type': 'delete',\n '_index': index,\n '_type': doc_type,\n '_id': u(document_id)\n }\n\n meta_action = {\n '_op_type': 'delete',\n '_index': self.meta_index_name,\n '_type': self.meta_type,\n '_id': u(document_id)\n }\n\n self.index(action, meta_action)", "def clear_document(self, document):\n self._clear_document(document)\n #self.commit()" ]
[ "0.7030107", "0.70286924", "0.66033906", "0.64376163", "0.641092", "0.62891567", "0.62752926", "0.62752926", "0.62752926", "0.62752926", "0.62752926", "0.62752926", "0.62752926", "0.62752926", "0.62752926", "0.62752926", "0.62752926", "0.62752926", "0.62752926", "0.62752926", "0.6182326", "0.6156195", "0.61414003", "0.60919845", "0.6079766", "0.60382503", "0.60327893", "0.6024673", "0.60023916", "0.6001092" ]
0.7208417
0
Returns a "real" Project object. The returned object is "writable" too that is its state can be changed etc.
def real_obj(self): return RemoteProject(xml_data=etree.tostring(self))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def project(self, project_id):\r\n return p.Project(self, project_id)", "def project(project_no_init: Project) -> Project:\n from pdm.cli.utils import merge_dictionary\n\n data = {\n \"project\": {\n \"name\": \"test-project\",\n \"version\": \"0.0.0\",\n \"description\": \"\",\n \"authors\": [],\n \"license\": {\"text\": \"MIT\"},\n \"dependencies\": [],\n \"requires-python\": \">=3.7\",\n },\n \"build-system\": DEFAULT_BACKEND.build_system(),\n }\n\n merge_dictionary(project_no_init.pyproject._data, data)\n project_no_init.pyproject.write()\n # Clean the cached property\n project_no_init._environment = None\n return project_no_init", "def project(self):\n if self.__project is not None:\n return self.__project\n\n # Original path might not be our final path\n # For example: loading angr on a remote android project\n orig_path = self._process.modules[self._thread.pc].path\n new_path = common.load_file(self._process, orig_path).name\n\n self.__project = angr.Project(\n new_path,\n load_options=self.load_options,\n concrete_target=self._concrete_target,\n use_sim_procedures=self.use_sim_procedures,\n exclude_sim_procedures_list=self.exclude_sim_procedures_list,\n support_selfmodifying_code=self.support_selfmodifying_code)\n\n self.__sim_procedures_resolved = False\n return self.__project", "def make_instance(self, include_optional):\n # model = openapi_client.models.project.Project() # noqa: E501\n if include_optional :\n return Project(\n budget_amount = 10000, \n budget_time = 60, \n customer_id = 56, \n hourly_rate = 3000.0, \n id = 56, \n login_id = 56, \n name = 'My Project', \n note = '0', \n status = 'OPEN', \n due_at = datetime.datetime.strptime('1975-12-30', '%Y-%m-%d').date(), \n consumed_time = 56, \n consumed_amount = 56\n )\n else :\n return Project(\n name = 'My Project',\n )", "def make_instance(self, include_optional):\n # model = telestream_cloud_qc.models.project.Project() # noqa: E501\n if include_optional :\n return Project(\n id = '0', \n name = '0', \n status = 'active', \n template = '0', \n options = {\"file_tests\":{\"container_test\":{\"checked\":true,\"container\":\"Mov\",\"reject_on_error\":true},\"video_codec_test\":{\"checked\":true,\"video_codec\":\"ProRes\",\"video_profile\":\"VideoProfileNone\",\"video_level\":\"VideoLevelNone\",\"reject_on_error\":true},\"container_essence_consistency_test\":{\"checked\":true},\"enhanced_syntax_test\":{\"checked\":true},\"framesize_test\":{\"checked\":true,\"horizontal_size\":1920,\"vertical_size\":1080},\"pixel_aspect_ratio_test\":{\"checked\":true,\"pixel_aspect_ratio_numerator\":1,\"pixel_aspect_ratio_denominator\":1},\"i_tunes_compatibility_test\":{\"checked\":true,\"reject_on_error\":true},\"framerate_test\":{\"checked\":true,\"framerate_numerator\":25,\"framerate_denominator\":1},\"video_bit_rate_mode_test\":{\"checked\":true,\"mode\":\"VBR\"},\"video_bitrate_test\":{\"checked\":true,\"video_bitrate_lower\":88,\"video_bitrate_upper\":220}}}\n )\n else :\n return Project(\n )", "def get_project(self):\n raise NotImplementedError(\"get_project is not implemented\")", "def project(self, request):\n return self._project(request, 'project')", "def project(self) -> aws_cdk.aws_codebuild.IProject:\n return jsii.get(self, \"project\")", "def project():\n\n return M(c=\"project\", f=\"task\")(\n M(\"Tasks\", f=\"task\")(\n M(\"Create\", m=\"create\"),\n M(\"My Open Tasks\", vars={\"mine\":1}),\n ),\n )", "def getProject(self):\n\t\treturn self.__project", "def getVirtualProject(self):\n\n # Create only once per session\n if self.__virtualProject:\n return self.__virtualProject\n\n # Place virtual project in application's \".jasy\" folder\n path = os.path.abspath(os.path.join(\".jasy\", \"virtual\"))\n\n # Set package to empty string to allow for all kind of namespaces in this virtual project\n jasy.core.File.write(os.path.join(path, \"jasyproject.yaml\"), 'name: virtual\\npackage: \"\"\\n')\n\n # Generate project instance from path, store and return\n project = Project.getProjectFromPath(path, self)\n self.__virtualProject = project\n self.__projects.append(project)\n\n return project", "def getProject(self):\r\n return self.project", "def project(self):\n return self._project", "def project(self):\n return self._project", "def project(self):\n return self._project", "def project(self):\n return self._project", "def project(self):\n return self._project", "def project(self):\n return read_small_file(self.homeDirectory + \"/.project\")", "def projects(self):\r\n return p.Projects(self)", "def GetProject(self):\n errors = []\n objects = list(request_helper.MakeRequests(\n requests=[(self.compute.projects,\n 'Get',\n self.messages.ComputeProjectsGetRequest(\n project=properties.VALUES.core.project.Get(\n required=True),\n ))],\n http=self.http,\n batch_url=self.batch_url,\n errors=errors,\n custom_get_requests=None))\n if errors:\n utils.RaiseToolException(\n errors,\n error_message='Could not fetch project resource:')\n return objects[0]", "def getProject(self):\n return self.project", "def get_project(project_id):\n return Project.objects.get(id=project_id)", "def project(self, v):\n return v", "def project(self):\n\n return self._project", "def project(self, id: Optional[ProjectID] = None) -> Project:\n _args = [\n Arg(\"id\", id, None),\n ]\n _ctx = self._select(\"project\", _args)\n return Project(_ctx)", "def null(cls):\n return GXSTORAGEPROJECT()", "def copy(self):\n p = Project()\n p.name = self.name\n p.path = self.path\n p._plugin = self._plugin\n p.stage = self.stage.copy()\n p.stage.project = p\n\n for sprite in self.sprites:\n s = sprite.copy()\n s.project = p\n p.sprites.append(s)\n\n for actor in self.actors:\n if isinstance(actor, Sprite):\n p.actors.append(p.get_sprite(actor.name))\n else:\n a = actor.copy()\n if isinstance(a, Watcher):\n if isinstance(a.target, Project):\n a.target = p\n elif isinstance(a.target, Stage):\n a.target = p.stage\n else:\n a.target = p.get_sprite(a.target.name)\n p.actors.append(a)\n\n p.variables = dict((n, v.copy()) for (n, v) in self.variables.items())\n p.lists = dict((n, l.copy()) for (n, l) in self.lists.items())\n p.thumbnail = self.thumbnail\n p.tempo = self.tempo\n p.notes = self.notes\n p.author = self.author\n return p", "def get_permission_object(self):\n if not self.project:\n self.project = get_object_or_404(Proyecto, pk=self.kwargs['project_pk'])\n return self.project", "def _create_project(org, project_name):\n project = Project(\n org=org,\n name=project_name\n )\n project.save()\n return project", "def _change_project(self):\n project_key = utils.prompt_string(\n 'You are currently managing Google Cloud Project {!r}.\\n'\n 'This project is currently saved as {!r}.\\n'\n 'All of the currently configured projects include: {}.\\n'\n 'Which project would you like to switch to?'.format(\n self._config.project, self._config.key,\n ', '.join(common.get_available_configs(self._config.path))))\n return _Manager.new(\n self._config.path, self._prefer_gcs, project_key=project_key,\n version=self._version)" ]
[ "0.76120716", "0.7492247", "0.73942703", "0.71796495", "0.7143161", "0.7013733", "0.70007664", "0.6964538", "0.69511306", "0.6901069", "0.68862545", "0.68084615", "0.6767753", "0.6767753", "0.6767753", "0.6767753", "0.6767753", "0.67560315", "0.6730307", "0.67096895", "0.6690807", "0.66904545", "0.6633559", "0.65842766", "0.65606534", "0.6514309", "0.6506554", "0.6490862", "0.6482335", "0.646138" ]
0.7535224
1
Returns a Collection with objects which match the xpath. path is the remote path which is used for the http request. xp is the xpath which is used for the search (either an Expression object or a string).
def _find(path, xp, tag_class={}, **kwargs): request = Osc.get_osc().get_reqobj() xpath = xp if hasattr(xp, 'tostring'): xpath = xp.tostring() f = request.get(path, match=xpath, **kwargs) return fromstring(f.read(), **tag_class)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def find(self, path, root=None):\n\n root = root if root is not None else self.root\n return root.xpath(path, namespaces=self.nsmap) or []", "def findall(self, path: str, namespaces: Optional[NamespacesType] = None) -> List[E]:\n path = _REGEX_TAG_POSITION.sub('', path.strip()) # Strip tags positions from path\n namespaces = self._get_xpath_namespaces(namespaces)\n parser = XPath2Parser(namespaces, strict=False)\n context = XPathSchemaContext(self.xpath_node)\n\n return cast(List[E], parser.parse(path).get_results(context))", "def xpath (entry, path):\n if isinstance (path, str):\n path = path.split('/')\n result=entry\n for key in path: result=result[key]\n return result", "def elements(xpath_selection):\n driver = Driver().connect()\n return driver.find_elements_by_xpath(xpath_selection)", "def retrieve_requests(self, path):\n body = {}\n body['path'] = path\n data = json.dumps(body)\n return self._send_request(\"/retrieve\", data)", "def find_elements(self, xpath:str):\n try:\n elements = self.driver.find_elements_by_xpath(xpath)\n \n except NoSuchElementException:\n elements = []\n \n return elements", "def xpath_get_all(self, path, dtype=str, default=None):\n items = self._tag.xpath(path)\n response = []\n for item in items:\n try:\n response.append(dtype(item.text))\n except TypeError:\n response.append(default)\n\n return response", "def scrape(etree, xpaths):\n return {k: get_xpath_val(apply_xpath(v, etree, k), v.path)\n if isinstance(v, lxml.etree.XPath)\n else [scrape(i, v[1]) for i in apply_xpath(v[0], etree, k)]\n for k, v in xpaths.items()}", "def explore(self):\n path = self.req_get.get('path') or ''\n root_path = self.root_path\n abspath = browser.absolute_path(path, root_path)\n try:\n folders, filenames = browser.get_files(self.extensions, abspath,\n root_path, relative=True)\n except IOError, e:\n if os.path.isfile(abspath):\n raise exc.HTTPFound()\n # TODO: make sure we don't have absolute url in the error message.\n raise exc.HTTPNotFound(str(e))\n\n lis = []\n\n for folder in folders:\n lis += [{\n 'name': os.path.basename(folder),\n 'type': 'folder',\n 'path': folder,\n # status will be updated in js\n 'status': None,\n }]\n for filename in filenames:\n lis += [{\n 'name': os.path.basename(filename),\n 'type': 'file',\n 'path': filename,\n # status will be updated in js\n 'status': None,\n }]\n # We want to order alphabetically by columns\n n = int(math.ceil(len(lis) / 2.0))\n return filter(bool, list(sum(izip_longest(lis[:n], lis[n:]), ())))", "def query(self, path, method=None, headers=None, **kwargs):\n url = self.url(path)\n method = method or self.session.get\n log.info('%s %s', method.__name__.upper(), url)\n h = self.headers().copy()\n if headers:\n h.update(headers)\n response = method(url, headers=h, timeout=TIMEOUT, **kwargs)\n if response.status_code not in [200, 201]:\n codename = codes.get(response.status_code)[0]\n raise BadRequest('(%s) %s' % (response.status_code, codename))\n data = response.text.encode('utf8')\n return ElementTree.fromstring(data) if data else None", "def list(self, path):\n req = self.session.get(self.url + path + '/list',\n verify=False)\n jdata = req.json()\n if jdata['status'] != 'ok':\n raise Exception(\"Failed to query list: \\n\" + req.text)\n\n return jdata['data']", "def _parse_path(p_names):\n gnmi_elems = []\n for word in p_names:\n word_search = _RE_PATH_COMPONENT.search(word)\n if not word_search: # Invalid path specified.\n raise XpathError('xpath component parse error: %s' % word)\n if word_search.group('key') is not None: # A path key was provided.\n tmp_key = {}\n for x in re.findall(r'\\[([^]]*)\\]', word):\n tmp_key[x.split(\"=\")[0]] = x.split(\"=\")[-1]\n gnmi_elems.append(gnmi_pb2.PathElem(name=word_search.group(\n 'pname'), key=tmp_key))\n else:\n gnmi_elems.append(gnmi_pb2.PathElem(name=word, key={}))\n return gnmi_pb2.Path(elem=gnmi_elems)", "def xpath_parser(x_path, getall=False):\n try:\n xpath_parser.response\n except AttributeError:\n raise ('response not attached to function')\n\n sel = Selector(text=xpath_parser.response.text)\n getter = 'getall' if getall else 'get'\n output = getattr(sel.xpath(x_path), getter)()\n if isinstance(output, str):\n output = output.replace('\\n', '')\n else:\n exclude = {'\\n', ' '}\n output = ', '.join([i.strip() for i in output if i not in exclude])\n return output", "def find_by_path(self):\n db = CrawlDBI.DBI(dbtype='crawler')\n rv = db.select(table='checkables',\n fields=['rowid',\n 'path',\n 'type',\n 'cos',\n 'cart',\n 'ttypes',\n 'checksum',\n 'last_check',\n 'fails',\n 'reported'\n ],\n where='path=?',\n data=(self.path,))\n db.close()\n return rv", "def xpath_findall(xpath, xml_content):\n if LXML:\n # print(xml_content)\n root = etree.fromstring(xml_content.encode('utf-8'))\n for node in root.xpath(\"//node\"):\n node.tag = safe_xmlstr(node.attrib.pop(\"class\"))\n return root.xpath(\n xpath, namespaces={\"re\": \"http://exslt.org/regular-expressions\"})\n else:\n root = ET.fromstring(xml_content)\n for node in root.findall(\".//node\"):\n node.tag = safe_xmlstr(node.attrib.pop(\"class\"))\n return root.findall(xpath if xpath.startswith(\".\") else \".\" + xpath)", "def get_tpx_xpaths():\n\txpaths = {\"tpx_all_abs\" : \"count(//TIMEX3)\",\n\t\"tpx_date_abs\" : \"count(//TIMEX3[@type='DATE'])\",\n\t\"tpx_date_past_ref_abs\" : \"count(//TIMEX3[@type='DATE'][@value='PAST_REF'])\",\n\t\"tpx_date_present_ref_abs\" : \"count(//TIMEX3[@type='DATE'][@value='PRESENT_REF'])\",\n\t\"tpx_date_future_ref_abs\" : \"count(//TIMEX3[@type='DATE'][@value='FUTURE_REF'])\",\n\t\"tpx_time_abs\" : \"count(//TIMEX3[@type='TIME'])\",\n\t\"tpx_duration_abs\" : \"count(//TIMEX3[@type='DURATION'])\",\n\t\"tpx_set_abs\" : \"count(//TIMEX3[@type='SET'])\"\n\t}\n\treturn xpaths", "def _path_names(xpath):\n if not xpath or xpath == '/': # A blank xpath was provided at CLI.\n return []\n return xpath.strip().strip('/').split('/') # Remove leading and trailing '/'. For example it turns it into ['interfaces', 'interface[name=Ethernet1]', 'state', 'counters']", "def children(self, path):\n url = u'/'.join(\n [self.conf[\"api\"], \"path\", escape_path(path).strip('/'), \"@children\"])\n params = {}\n self.logger.info(path)\n self.logger.debug(url)\n return self._get_iter(url, params)", "def localQuery(self,node,REQUEST=None, **kw):\n kw['path'] = '/'.join(node.getPhysicalPath())\n return ZCatalog.searchResults(self, REQUEST, **kw)", "def by_path(self, path):\n return self._client.get(\n url=self._client.get_full_url(\n self.\n get_path('by_path', realm=self._realm_name, group_path=path)\n )\n )", "def PathNames(xpath: Text) -> List[Text]:\n if not xpath or xpath == '/': # A blank xpath was provided.\n return []\n return re.split(r'''/(?=(?:[^\\[\\]]|\\[[^\\[\\]]+\\])*$)''',\n xpath.strip('/').strip('/')) # Removes leading/trailing '/'.", "def list_objects(self, path):\n return [x for x in self.list_objects_generator(path)]", "def list_path(self, path):\n return LocalResources(\"\").list_path(path)", "def find(self, xpath: str):\n\n elements = xpath.split('/')[1:]\n current_data = self.data\n for index, element in enumerate(elements):\n current_data = current_data.get(f'/{element}')\n if current_data is None:\n current_xpath = '/' + '/'.join(elements[:index + 1])\n raise RuntimeError(\n f'Cannot find visual attributes for {current_xpath!r} ' +\n f'while searching for {xpath!r} in {self.path!r}.')\n return current_data", "def do_extract(self, xpath):\n s = Selector(self.driver.page_source)\n for i, result in enumerate(s.xpath(xpath).getall(), 1):\n print(i, result)", "def xmlListing(path):\n tree = et.parse(path+\"product.xml\")\n nodes = tree.getroot()\n listing = []\n for node in nodes[0]:\n listing.append(path+node.attrib['key'])\n return listing", "def xpath(self, xpathable, xpath):\n return xpathable.xpath(xpath, namespaces=self._ns_map)", "def node_lookup_bulk(self, paths):\n\n placeholders = ','.join('?' for path in paths)\n q = \"select node from nodes where path in (%s)\" % placeholders\n self.execute(q, paths)\n r = self.fetchall()\n if r is not None:\n return [row[0] for row in r]\n return None", "def ParsePath(p_names: Iterable[Text]) -> gnmi_pb2.Path:\n gnmi_elems = []\n for word in p_names:\n word_search = _RE_PATH_COMPONENT.search(word)\n if not word_search: # Invalid path specified.\n raise XpathError('xpath component parse error: %s' % word)\n if word_search.group('key') is not None: # A path key was provided.\n tmp_key = {}\n for x in re.findall(r'\\[([^]]*)\\]', word):\n tmp_key[x.split('=')[0]] = x.split('=')[-1]\n gnmi_elems.append(gnmi_pb2.PathElem(name=word_search.group(\n 'pname'), key=tmp_key))\n else:\n gnmi_elems.append(gnmi_pb2.PathElem(name=word, key={}))\n return gnmi_pb2.Path(elem=gnmi_elems)", "def get_by_path(root, items):\n return reduce(operator.getitem, items, root)" ]
[ "0.57604206", "0.54872465", "0.54560804", "0.5449077", "0.53759116", "0.5275861", "0.5176571", "0.51548195", "0.51258445", "0.50880694", "0.5071189", "0.505888", "0.50456196", "0.50447965", "0.50162345", "0.5004446", "0.5002337", "0.5001135", "0.49842414", "0.49813622", "0.49768394", "0.49705914", "0.4966068", "0.49616677", "0.49502203", "0.4949281", "0.49384683", "0.49061793", "0.48900667", "0.4884137" ]
0.61770296
0
Returns a RequestCollection with objects which match the xpath. xp is the xpath which is used for the search (either an Expression object or a string).
def find_request(xp, **kwargs): path = '/search/request' if 'schema' not in kwargs: kwargs['schema'] = RequestCollection.SCHEMA tag_class = {'collection': RequestCollection, 'request': RORequest} return _find(path, xp, tag_class, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def elements(xpath_selection):\n driver = Driver().connect()\n return driver.find_elements_by_xpath(xpath_selection)", "def _find(path, xp, tag_class={}, **kwargs):\n request = Osc.get_osc().get_reqobj()\n xpath = xp\n if hasattr(xp, 'tostring'):\n xpath = xp.tostring()\n f = request.get(path, match=xpath, **kwargs)\n return fromstring(f.read(), **tag_class)", "def scrape(etree, xpaths):\n return {k: get_xpath_val(apply_xpath(v, etree, k), v.path)\n if isinstance(v, lxml.etree.XPath)\n else [scrape(i, v[1]) for i in apply_xpath(v[0], etree, k)]\n for k, v in xpaths.items()}", "def find_elements(self, xpath:str):\n try:\n elements = self.driver.find_elements_by_xpath(xpath)\n \n except NoSuchElementException:\n elements = []\n \n return elements", "def xpath_findall(xpath, xml_content):\n if LXML:\n # print(xml_content)\n root = etree.fromstring(xml_content.encode('utf-8'))\n for node in root.xpath(\"//node\"):\n node.tag = safe_xmlstr(node.attrib.pop(\"class\"))\n return root.xpath(\n xpath, namespaces={\"re\": \"http://exslt.org/regular-expressions\"})\n else:\n root = ET.fromstring(xml_content)\n for node in root.findall(\".//node\"):\n node.tag = safe_xmlstr(node.attrib.pop(\"class\"))\n return root.findall(xpath if xpath.startswith(\".\") else \".\" + xpath)", "def get_tpx_xpaths():\n\txpaths = {\"tpx_all_abs\" : \"count(//TIMEX3)\",\n\t\"tpx_date_abs\" : \"count(//TIMEX3[@type='DATE'])\",\n\t\"tpx_date_past_ref_abs\" : \"count(//TIMEX3[@type='DATE'][@value='PAST_REF'])\",\n\t\"tpx_date_present_ref_abs\" : \"count(//TIMEX3[@type='DATE'][@value='PRESENT_REF'])\",\n\t\"tpx_date_future_ref_abs\" : \"count(//TIMEX3[@type='DATE'][@value='FUTURE_REF'])\",\n\t\"tpx_time_abs\" : \"count(//TIMEX3[@type='TIME'])\",\n\t\"tpx_duration_abs\" : \"count(//TIMEX3[@type='DURATION'])\",\n\t\"tpx_set_abs\" : \"count(//TIMEX3[@type='SET'])\"\n\t}\n\treturn xpaths", "def get_all_elements_with_xpath(_xpath=None):\n if _xpath:\n try:\n return WebDriverWait(driver, timeout).until(\n ec.presence_of_all_elements_located((By.XPATH, _xpath)))\n except TimeoutException:\n print(f'Element with xpath \"{_xpath}\" was not found in \"{timeout}\" seconds.')\n close_and_finish_execution()\n else:\n print(f'Attribute xpath is mandatory.')\n close_and_finish_execution()", "def xpath(element, xpath):\n for ns in namespaces:\n xpath_fmt = xpath.format(ns=\"\" if ns is None else \"adm:\")\n for found_element in element.xpath(xpath_fmt,\n namespaces=dict() if ns is None else dict(adm=ns)):\n yield found_element", "def xpath(self, xpathable, xpath):\n return xpathable.xpath(xpath, namespaces=self._ns_map)", "def check_and_get_all_elements_by_xpath(element, xpath):\r\n if element is None or not xpath:\r\n return []\r\n try:\r\n return element.find_elements_by_xpath(xpath)\r\n except NoSuchElementException:\r\n return []", "def xpath_as_xml(self, expr=''):\n results = []\n for result in self.xpath(expr):\n if result:\n results.append(result.toxml())\n \n return results", "def get_items(id_name, request, client):\n result = client.quick_search(request)\n \n items_pages = []\n limit_to_x_pages = None\n for page in result.iter(limit_to_x_pages):\n items_pages.append(page.get())\n\n items = [item for page in items_pages for item in page['features']]\n \n \n return (id_name, items)", "def get_multiple_tags(url, xpathExpressionList, params=None):\n headers = {'User-Agent': 'curl/7.35.0'}\n page = requests.get(url, params=params, headers=headers)\n page.raise_for_status()\n tree = html.fromstring(page.text)\n out = []\n for expression in xpathExpressionList:\n out.append(tree.xpath(expression))\n return out", "def queryForNodes(self, doc):\n return self.compiled.evaluate(xml.xpath.CreateContext(doc))", "def find_package(xp, **kwargs):\n path = '/search/package'\n if 'schema' not in kwargs:\n kwargs['schema'] = PackageCollection.SCHEMA\n tag_class = {'collection': PackageCollection, 'package': ROPackage}\n return _find(path, xp, tag_class, **kwargs)", "def xpath(self, expr=''):\n from xml.dom.ext import GetAllNs\n from xml import xpath\n \n dom = self.get_dom(self(method='xml'))\n context = xpath.Context.Context(dom,\n processorNss=GetAllNs(dom.documentElement))\n return xpath.Evaluate(expr, context=context)", "def find(self, path, root=None):\n\n root = root if root is not None else self.root\n return root.xpath(path, namespaces=self.nsmap) or []", "def xpath_parser(x_path, getall=False):\n try:\n xpath_parser.response\n except AttributeError:\n raise ('response not attached to function')\n\n sel = Selector(text=xpath_parser.response.text)\n getter = 'getall' if getall else 'get'\n output = getattr(sel.xpath(x_path), getter)()\n if isinstance(output, str):\n output = output.replace('\\n', '')\n else:\n exclude = {'\\n', ' '}\n output = ', '.join([i.strip() for i in output if i not in exclude])\n return output", "def get_xpath(xpath=\"\"):\n query = {\"type\": \"config\", \"action\": \"get\", \"xpath\": xpath}\n\n return __proxy__[\"panos.call\"](query)", "def do_extract(self, xpath):\n s = Selector(self.driver.page_source)\n for i, result in enumerate(s.xpath(xpath).getall(), 1):\n print(i, result)", "def get_price_xpaths(prod):\n xpath_tup_list = []\n\n # Check full Price Xpath\n full_price_xpath = get_xpath_for_scrape_type(prod, 'Price')\n if full_price_xpath:\n xpath_tup_list.append(('Price', full_price_xpath))\n\n # Check Partial Price Xpath\n whole_xpath = get_xpath_for_scrape_type(prod, 'PriceWholeNumber')\n fraction_xpath = get_xpath_for_scrape_type(prod, 'PriceFraction')\n if whole_xpath and fraction_xpath:\n xpath_tup_list.append(('PriceWholeNumber', whole_xpath))\n xpath_tup_list.append(('PriceFraction', fraction_xpath))\n\n return xpath_tup_list", "def xpath (entry, path):\n if isinstance (path, str):\n path = path.split('/')\n result=entry\n for key in path: result=result[key]\n return result", "def test_generator4(self):\n xpb = XPathBuilder()\n xp1 = xp2 = None\n base_xp = xpb.base.foo.where(xpb.attr('abc').equals('x'))\n with base_xp as b:\n xp1 = b().bar.text().equals('foo')\n xp2 = b().x.y.z.where(42)\n base_exp = '/base/foo[@abc = \"x\"]'\n xp1_exp = '/base/foo[@abc = \"x\"]/bar/text() = \"foo\"'\n xp2_exp = '/base/foo[@abc = \"x\"]/x/y/z[42]'\n self.assertEqual(base_xp.tostring(), base_exp)\n self.assertEqual(xp1.tostring(), xp1_exp)\n self.assertEqual(xp2.tostring(), xp2_exp)", "def get_element(self, xpath):\n self.check_for_root()\n if xpath[:2] is not '//':\n # Add the // to the front of the string if it isn't there\n return [element for element in self.tree.xpath('{}//{}'.format(self.root, xpath))]\n return [element for element in self.tree.xpath('{}{}'.format(self.root, xpath))]", "def _request_all_objects_in_expression(self, expr, **other_request_params):\n _ast = ast.parse(expr, mode='eval')\n _reqs = []\n for _node in ast.walk(_ast):\n if isinstance(_node, ast.Name):\n _obj_spec = _node.id\n elif isinstance(_node, ast.Str):\n _obj_spec = _node.s\n else:\n continue\n\n if ':' in _obj_spec:\n _reqs.append(dict(object_spec=_obj_spec, force_rerequest=False, **other_request_params))\n self.request(_reqs)", "def PathNames(xpath: Text) -> List[Text]:\n if not xpath or xpath == '/': # A blank xpath was provided.\n return []\n return re.split(r'''/(?=(?:[^\\[\\]]|\\[[^\\[\\]]+\\])*$)''',\n xpath.strip('/').strip('/')) # Removes leading/trailing '/'.", "def test_predicate7(self):\n xpb = XPathBuilder()\n xp = xpb.foo.bar[(xpb.attr('name') == 'foo') & (xpb.attr('x') == 'x')]\n exp = '/foo/bar[@name = \"foo\" and @x = \"x\"]'\n self.assertEqual(xp.tostring(), exp)", "def get_questions(api_site_parameter, page = 1, pagesize = 30, sort = 'votes'):\n path = \"questions\"\n \n query_filter = ')(Ybxw_gbz'\n \n results = __fetch_results(path, api_site_parameter, page = page, pagesize = pagesize, filter = query_filter, sort = sort)\n return results", "def _get_objects(self, cr, uid, name, args=[], ids=None): \n obj = self.pool.get(name)\n if not ids:\n ids = obj.search(cr, uid, args)\n return obj.browse(cr, uid, ids)", "def test_generator3(self):\n xpb = XPathBuilder()\n xp1 = xp2 = None\n base_xp = xpb.base.foo[xpb.attr('abc') == 'x']\n with base_xp as b:\n xp1 = b().bar.text() == 'foo'\n xp2 = b().x.y.z[42]\n base_exp = '/base/foo[@abc = \"x\"]'\n xp1_exp = '/base/foo[@abc = \"x\"]/bar/text() = \"foo\"'\n xp2_exp = '/base/foo[@abc = \"x\"]/x/y/z[42]'\n self.assertEqual(base_xp.tostring(), base_exp)\n self.assertEqual(xp1.tostring(), xp1_exp)\n self.assertEqual(xp2.tostring(), xp2_exp)" ]
[ "0.5570661", "0.53328407", "0.5145625", "0.51360446", "0.51189804", "0.5032274", "0.49823588", "0.49291036", "0.49249616", "0.48981836", "0.48864037", "0.48758155", "0.4858352", "0.47892767", "0.47724596", "0.470658", "0.4663911", "0.46564332", "0.46533957", "0.4639455", "0.45988873", "0.45519644", "0.4550048", "0.4491371", "0.44594026", "0.44335198", "0.4408756", "0.4404963", "0.43879882", "0.43827012" ]
0.5768637
0
Returns a ProjectCollection with objects which match the xpath. xp is the xpath which is used for the search (either an Expression object or a string).
def find_project(xp, **kwargs): path = '/search/project' if 'schema' not in kwargs: kwargs['schema'] = ProjectCollection.SCHEMA tag_class = {'collection': ProjectCollection, 'project': ROProject} return _find(path, xp, tag_class, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def elements(xpath_selection):\n driver = Driver().connect()\n return driver.find_elements_by_xpath(xpath_selection)", "def find_package(xp, **kwargs):\n path = '/search/package'\n if 'schema' not in kwargs:\n kwargs['schema'] = PackageCollection.SCHEMA\n tag_class = {'collection': PackageCollection, 'package': ROPackage}\n return _find(path, xp, tag_class, **kwargs)", "def get_tpx_xpaths():\n\txpaths = {\"tpx_all_abs\" : \"count(//TIMEX3)\",\n\t\"tpx_date_abs\" : \"count(//TIMEX3[@type='DATE'])\",\n\t\"tpx_date_past_ref_abs\" : \"count(//TIMEX3[@type='DATE'][@value='PAST_REF'])\",\n\t\"tpx_date_present_ref_abs\" : \"count(//TIMEX3[@type='DATE'][@value='PRESENT_REF'])\",\n\t\"tpx_date_future_ref_abs\" : \"count(//TIMEX3[@type='DATE'][@value='FUTURE_REF'])\",\n\t\"tpx_time_abs\" : \"count(//TIMEX3[@type='TIME'])\",\n\t\"tpx_duration_abs\" : \"count(//TIMEX3[@type='DURATION'])\",\n\t\"tpx_set_abs\" : \"count(//TIMEX3[@type='SET'])\"\n\t}\n\treturn xpaths", "def _find(path, xp, tag_class={}, **kwargs):\n request = Osc.get_osc().get_reqobj()\n xpath = xp\n if hasattr(xp, 'tostring'):\n xpath = xp.tostring()\n f = request.get(path, match=xpath, **kwargs)\n return fromstring(f.read(), **tag_class)", "def all(cls):\r\n projects_url = 'https://www.pivotaltracker.com/services/v3/projects'\r\n response = _perform_pivotal_get(projects_url)\r\n\r\n root = ET.fromstring(response.text)\r\n if root is not None:\r\n return [Project.from_node(project_node) for project_node in root]", "def get_projects(selection):\n project=[]\n\n # project with keyname\n\n if 'project' in selection.facets:\n project+=selection.facets['project']\n\n\n # project without keyname\n\n # WARNING\n #\n # The code below uses sdinference and query the database to retrieve ESGF parameters.\n # Doing those things here may raise circular dependencies\n # as well as making the whole thing very complex.\n #\n # We do this to make this syntax work (i.e. project value without key)\n # synda search GeoMIP\n #\n # Note that this syntax always works (i.e. load the project level default file), even without this code.\n # synda search project=GeoMIP\n #\n #\n pending_projects=sdearlystreamutils.get_facet_values_early([selection.facets],'project',extract_item=True) # project without keyname or project as part of an identifier.\n\n li=pending_projects+project\n\n li=list(set(li)) # remove duplicate\n\n return li", "def test_predicate2(self):\n xpb = XPathBuilder()\n xp = xpb.action.source.where(xpb.attr('project').equals('bar'))\n exp = '/action/source[@project = \"bar\"]'\n self.assertEqual(xp.tostring(), exp)", "def test_predicate1(self):\n xpb = XPathBuilder()\n xp = xpb.action.source[xpb.attr('project') == 'bar']\n exp = '/action/source[@project = \"bar\"]'\n self.assertEqual(xp.tostring(), exp)", "def find_elements(self, xpath:str):\n try:\n elements = self.driver.find_elements_by_xpath(xpath)\n \n except NoSuchElementException:\n elements = []\n \n return elements", "def all(cls):\n projects_url = 'https://www.pivotaltracker.com/services/v5/projects'\n root = _perform_pivotal_get(projects_url)\n if root is not None:\n return [Project.from_json(project_node) for project_node in root]", "def parse_xml(path):\r\n ns = {'SSIS': \"www.microsoft.com/SqlServer/SSIS\",}\r\n proj_xml = et.parse(path)\r\n proj_packages = get_packages(proj_xml, ns)\r\n\r\n packages = [Package(*package_properties(package, ns))\r\n for package in proj_packages]\r\n \r\n #package_props = {}\r\n #for package in packages:\r\n # name, version = package_properties(package, ns)\r\n # package_props[name] = version\r\n\r\n return packages", "def ConvertProjectToXquery(oData, basket):\n\n arErr = []\n sCodeDef = \"\"\n sCodeQry = \"\"\n template_main = \"\"\n template_def = \"\"\n method = 'recursive' # \"plain\" # Methods: 'recursive', 'plain'\n bUseDefinitionFunctions = True # Make use of the definition part to have functions\n oErr = utils.ErrHandle()\n\n try:\n # Unpack the data from [oData]\n targetType = oData['targetType']\n gateway = oData['gateway']\n format = oData['format']\n\n # Determine which template to use\n if format == 'psdx':\n if bUseDefinitionFunctions:\n template_main = 'seeker/xqmain_psdx.xq'\n template_def = 'seeker/xqdef_psdx.xq'\n else:\n template_main = 'seeker/main_psdx.xq'\n template_def = 'seeker/def_psdx.xq'\n elif format == 'folia':\n if bUseDefinitionFunctions:\n template_main = 'seeker/xqmain_folia.xq'\n template_def = 'seeker/xqdef_folia.xq'\n else:\n template_main = 'seeker/main_folia.xq'\n template_def = 'seeker/def_folia.xq'\n \n # Is everything okay?\n if template_main != \"\":\n # Reset any errors\n gateway.error_clear()\n \n # Collect all relevant information\n basket.set_status(\"collecting global variables\")\n gvars = gateway.globalvariables.all()\n\n # Search elements are the 'constructions'\n basket.set_status(\"collecting constructions\")\n constructions = gateway.constructions.all()\n\n # the names of the constructions plus their search group and specification\n search_list = gateway.get_search_list()\n\n # If there are any related constituents in a simple search, then create the correct variables and things\n bResult, sMsg = gateway.do_simple_related()\n if not bResult:\n # Show error message\n oErr.DoError(\"ConvertProjectToXquery error: \" + sMsg)\n # REset the values for definitions and query\n sCodeDef = \"\"\n sCodeQry = \"\"\n else:\n\n # The data-dependant variables need to be divided over the search elements\n basket.set_status(\"converting data-dependant variables\")\n dvar_list = []\n for var in gateway.get_vardef_list():\n cvar_list = []\n for cons in constructions:\n # Determine what the construction variable is\n cvar = ConstructionVariable.objects.filter(construction=cons, variable=var).first()\n try:\n oCode = cvar.get_code(format, method)\n oCvarInfo = {'grp': cons.name, \n 'code': oCode['main'],\n 'type': cvar.type,\n 'dvars': oCode['dvars'],\n 'dvarnum': oCode['dvarnum'],\n 'fname': \"tb:dvar_{}_cons_{}\".format(var.name, cons.name)}\n # Check for coding errors\n if 'error' in oCode:\n arErr.append(\"Error in the definition of variable {} for search element {}: {}\".format(\n var.name,cons.name, oCode['error']))\n # Check for possible error(s)\n errors = gateway.get_errors()\n if errors != \"\" and errors != \"[]\":\n return \"\", ERROR_CODE\n else:\n cvar_list.append(oCvarInfo)\n except:\n iStop = True\n # Add the cvar_list to the dvar_list\n oDvarInfo = {'name': var.name, 'cvar_list': cvar_list}\n dvar_list.append(oDvarInfo)\n dvar_all = \", \".join([\"$\"+item['name'] for item in dvar_list])\n\n # Also add the conditions\n basket.set_status(\"converting conditions\")\n cond_list = []\n for cnd in gateway.get_condition_list():\n # make sure we have the latest version\n cnd.refresh_from_db()\n # Double check the include value of this option\n if cnd.include == \"\" or cnd.include == \"true\":\n oCode = cnd.get_code(format, method)\n sCode = oCode['main']\n if sCode != \"\":\n cond_list.append(sCode)\n # Check for coding errors\n if 'error' in oCode:\n arErr.append(\"Error in the definition of condition {}: {}\".format(\n cnd.name, oCode['error']))\n # Check for an empty condition list\n if len(cond_list) == 0:\n # We still have a 'where' clause, so create one condition that is always true\n cond_list.append(\"true()\")\n\n # And then we add the features\n basket.set_status(\"converting features\")\n feature_list = []\n for ft in gateway.get_feature_list():\n ft.refresh_from_db()\n # Double check the include value of this option\n if ft.include == \"\" or ft.include == \"true\" or ft.include == \"yes\":\n oCode = ft.get_code(format, method)\n sCode = oCode['main']\n if sCode != \"\":\n feature_list.append({\n 'name': ft.name, \n 'type': ft.feattype, \n 'dvar': ft.variable,\n 'code': sCode,\n 'fname': \"tb:feat_{}\".format(ft.name)})\n # Check for coding errors\n if 'error' in oCode:\n arErr.append(\"Error in the definition of feature {}: {}\".format(\n ft.name, oCode['error']))\n\n # Specify the context variable for the Xquery template determination\n context = dict(gvar_list=gvars, \n cons_list=constructions, \n search_list=search_list,\n dvar_list=dvar_list,\n dvar_all=dvar_all,\n cond_list=cond_list,\n feature_list=feature_list,\n targetType=targetType)\n\n # The action NO LONGER depends on the target type\n # Step #1: make the start of the main query\n basket.set_status(\"Combining Main query\")\n sCodeQry = loader.get_template(template_main).render(context)\n sCodeQry = re.sub(r'\\n\\s*\\n', '\\n', sCodeQry).strip()\n\n # Step #2: create the definitions part\n basket.set_status(\"Combining Definitions\")\n sCodeDef = loader.get_template(template_def).render(context)\n sCodeDef = re.sub(r'\\n\\s*\\n', '\\n', sCodeDef).strip()\n\n except:\n # Show error message\n oErr.DoError(\"ConvertProjectToXquery error: \")\n # REset the values for definitions and query\n sCodeDef = \"\"\n sCodeQry = \"\"\n\n # Return what has been produced\n return sCodeDef, sCodeQry, arErr", "def xpath_findall(xpath, xml_content):\n if LXML:\n # print(xml_content)\n root = etree.fromstring(xml_content.encode('utf-8'))\n for node in root.xpath(\"//node\"):\n node.tag = safe_xmlstr(node.attrib.pop(\"class\"))\n return root.xpath(\n xpath, namespaces={\"re\": \"http://exslt.org/regular-expressions\"})\n else:\n root = ET.fromstring(xml_content)\n for node in root.findall(\".//node\"):\n node.tag = safe_xmlstr(node.attrib.pop(\"class\"))\n return root.findall(xpath if xpath.startswith(\".\") else \".\" + xpath)", "def projects(self):\r\n return p.Projects(self)", "def xpath_as_xml(self, expr=''):\n results = []\n for result in self.xpath(expr):\n if result:\n results.append(result.toxml())\n \n return results", "def get_projects():\n return Project.query.all()", "def scrape(etree, xpaths):\n return {k: get_xpath_val(apply_xpath(v, etree, k), v.path)\n if isinstance(v, lxml.etree.XPath)\n else [scrape(i, v[1]) for i in apply_xpath(v[0], etree, k)]\n for k, v in xpaths.items()}", "def test_generator4(self):\n xpb = XPathBuilder()\n xp1 = xp2 = None\n base_xp = xpb.base.foo.where(xpb.attr('abc').equals('x'))\n with base_xp as b:\n xp1 = b().bar.text().equals('foo')\n xp2 = b().x.y.z.where(42)\n base_exp = '/base/foo[@abc = \"x\"]'\n xp1_exp = '/base/foo[@abc = \"x\"]/bar/text() = \"foo\"'\n xp2_exp = '/base/foo[@abc = \"x\"]/x/y/z[42]'\n self.assertEqual(base_xp.tostring(), base_exp)\n self.assertEqual(xp1.tostring(), xp1_exp)\n self.assertEqual(xp2.tostring(), xp2_exp)", "def get_source_packages(self, project, expand=False):\n query = {'expand': 1} if expand else {}\n root = ET.parse(osc.core.http_GET(osc.core.makeurl(self.apiurl,['source', project],\n query=query))).getroot()\n packages = [i.get('name') for i in root.findall('entry')]\n\n return packages", "def get_projects(self, *criterion):\n from wkcdd.models.helpers import get_project_list\n return get_project_list([self.id], *criterion)", "def find(self, path, root=None):\n\n root = root if root is not None else self.root\n return root.xpath(path, namespaces=self.nsmap) or []", "def queryForNodes(self, doc):\n return self.compiled.evaluate(xml.xpath.CreateContext(doc))", "def test_generator3(self):\n xpb = XPathBuilder()\n xp1 = xp2 = None\n base_xp = xpb.base.foo[xpb.attr('abc') == 'x']\n with base_xp as b:\n xp1 = b().bar.text() == 'foo'\n xp2 = b().x.y.z[42]\n base_exp = '/base/foo[@abc = \"x\"]'\n xp1_exp = '/base/foo[@abc = \"x\"]/bar/text() = \"foo\"'\n xp2_exp = '/base/foo[@abc = \"x\"]/x/y/z[42]'\n self.assertEqual(base_xp.tostring(), base_exp)\n self.assertEqual(xp1.tostring(), xp1_exp)\n self.assertEqual(xp2.tostring(), xp2_exp)", "def get_all_for_project(cls, project_root: str) -> list[FeatureSet]:\n project_root_abs = os.path.abspath(project_root)\n\n # Only do this once per project.\n if project_root_abs not in _g_feature_sets:\n _g_feature_sets[project_root_abs] = _build_feature_set_list(\n project_root_abs\n )\n return _g_feature_sets[project_root_abs]", "def _get_projects(project_ids):\n if _ALL in project_ids:\n return projects_lib.get_all()\n return projects_lib.get_selective(project_ids)", "def get_projects(self, source=\"all\"):\n self.projects = []\n self._project_indices_by_id = {}\n self._project_indices_by_name = {}\n\n if self.hub_type == self.NAMESPACES[\"a.\"]:\n if not self.auth.three_legged:\n self.logger.warning(\n \"Failed to get projects. '{}' hubs only supports 3-legged access token.\".format( # noqa:E501\n self.NAMESPACES[\"a.\"]\n )\n )\n else:\n for project in self.api.dm.get_projects():\n self.projects.append(\n Project(\n project[\"attributes\"][\"name\"],\n project[\"id\"][2:],\n data=project,\n app=self,\n )\n )\n\n self._project_indices_by_id[project[\"id\"][2:]] = (\n len(self.projects) - 1\n )\n self._project_indices_by_name[\n project[\"attributes\"][\"name\"]\n ] = (len(self.projects) - 1)\n\n elif self.hub_type == self.NAMESPACES[\"b.\"]:\n\n if source.lower() in (\"all\", \"docs\"):\n for project in self.api.dm.get_projects():\n self.projects.append(\n Project(\n project[\"attributes\"][\"name\"],\n project[\"id\"][2:],\n data=project,\n app=self,\n )\n )\n\n self._project_indices_by_id[project[\"id\"][2:]] = (\n len(self.projects) - 1\n )\n self._project_indices_by_name[\n project[\"attributes\"][\"name\"]\n ] = (len(self.projects) - 1)\n\n if (\n source.lower() in (\"all\", \"admin\")\n and not self.auth.three_legged\n ):\n\n for project in self.api.hq.get_projects():\n if project[\"id\"] in self._project_indices_by_id:\n self.projects[\n self._project_indices_by_id[project[\"id\"]]\n ].data = project\n else:\n self.projects.append(\n Project(\n project[\"name\"],\n project[\"id\"],\n data=project,\n app=self,\n )\n )\n self._project_indices_by_id[project[\"id\"]] = (\n len(self.projects) - 1\n )\n\n self._project_indices_by_name[project[\"name\"]] = (\n len(self.projects) - 1\n )\n\n elif source.lower() in (\"all\", \"admin\"):\n self.logger.debug(\n \"Failed to get projects. The BIM 360 API only supports 2-legged access tokens\" # noqa:E501\n )", "def derive_xpubs_from_xprv(xprv, paths: list, rpc):\n derived_xprvs = []\n for path in paths:\n derivation = parse_path(path)\n if len(derivation) == 0:\n # tuple: (parent, derived)\n derived_xprvs.append((None, xprv))\n else:\n # we need parent for fingerprint\n parent = xprv\n for idx in derivation[:-1]:\n parent = get_child(parent, idx, rpc)\n child = get_child(parent, derivation[-1], rpc)\n derived_xprvs.append((parent, child))\n xpubs = []\n for parent, child in derived_xprvs:\n res = rpc.getdescriptorinfo(f\"wpkh({child})\")\n xpub = res[\"descriptor\"].split(\"(\")[1].split(\")\")[0]\n if parent is not None:\n res = rpc.getdescriptorinfo(f\"wpkh({parent})\")\n parent_xpub = res[\"descriptor\"].split(\"(\")[1].split(\")\")[0]\n fingerprint = get_xpub_fingerprint(parent_xpub)\n xpub = swap_fingerprint(xpub, fingerprint)\n xpubs.append(xpub)\n return xpubs", "def get_element(self, xpath):\n self.check_for_root()\n if xpath[:2] is not '//':\n # Add the // to the front of the string if it isn't there\n return [element for element in self.tree.xpath('{}//{}'.format(self.root, xpath))]\n return [element for element in self.tree.xpath('{}{}'.format(self.root, xpath))]", "def _get_children(self, x):\n try:\n return x._pfp__children\n\n except AttributeError:\n return []", "def xpath(element, xpath):\n for ns in namespaces:\n xpath_fmt = xpath.format(ns=\"\" if ns is None else \"adm:\")\n for found_element in element.xpath(xpath_fmt,\n namespaces=dict() if ns is None else dict(adm=ns)):\n yield found_element" ]
[ "0.5264987", "0.5037761", "0.5019994", "0.49843824", "0.49831843", "0.49377376", "0.4897356", "0.48519707", "0.4850672", "0.48110655", "0.47897774", "0.4785051", "0.4735383", "0.4722277", "0.47054696", "0.45902702", "0.45883238", "0.4580754", "0.45582223", "0.45063794", "0.4487188", "0.44864473", "0.44831407", "0.4473853", "0.4440327", "0.44348478", "0.44337627", "0.44310576", "0.4426316", "0.4420978" ]
0.6296943
0
Returns a PackageCollection with objects which match the xpath. xp is the xpath which is used for the search (either an Expression object or a string).
def find_package(xp, **kwargs): path = '/search/package' if 'schema' not in kwargs: kwargs['schema'] = PackageCollection.SCHEMA tag_class = {'collection': PackageCollection, 'package': ROPackage} return _find(path, xp, tag_class, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_packages(xml, ns):\r\n execs = []\r\n for x in xml.iter():\r\n if x.xpath('./SSIS:PackageMetaData', namespaces = ns):\r\n for x_exec in x:\r\n execs.append(x_exec)\r\n\r\n return execs", "def elements(xpath_selection):\n driver = Driver().connect()\n return driver.find_elements_by_xpath(xpath_selection)", "def parse_xml(path):\r\n ns = {'SSIS': \"www.microsoft.com/SqlServer/SSIS\",}\r\n proj_xml = et.parse(path)\r\n proj_packages = get_packages(proj_xml, ns)\r\n\r\n packages = [Package(*package_properties(package, ns))\r\n for package in proj_packages]\r\n \r\n #package_props = {}\r\n #for package in packages:\r\n # name, version = package_properties(package, ns)\r\n # package_props[name] = version\r\n\r\n return packages", "def xpath_findall(xpath, xml_content):\n if LXML:\n # print(xml_content)\n root = etree.fromstring(xml_content.encode('utf-8'))\n for node in root.xpath(\"//node\"):\n node.tag = safe_xmlstr(node.attrib.pop(\"class\"))\n return root.xpath(\n xpath, namespaces={\"re\": \"http://exslt.org/regular-expressions\"})\n else:\n root = ET.fromstring(xml_content)\n for node in root.findall(\".//node\"):\n node.tag = safe_xmlstr(node.attrib.pop(\"class\"))\n return root.findall(xpath if xpath.startswith(\".\") else \".\" + xpath)", "def getPackages(self):\n cat = getToolByName(self.context, 'portal_catalog')\n ideeSejour = getattr(self.context, 'idee-sejour')\n url = '/'.join(ideeSejour.getPhysicalPath())\n contentFilter = {}\n path = {}\n path['query'] = url\n path['depth'] = 1\n contentFilter['path'] = path\n contentFilter['portal_type'] = ['Package']\n contentFilter['sort_on'] = 'effective'\n contentFilter['sort_order'] = 'reverse'\n results = cat.queryCatalog(contentFilter)\n results = list(results)\n return results", "def package_all(q):\n\n query = (q.dict_of_lists())[\"q\"][0]\n datasets = p.toolkit.get_action(\"package_search\")(\n {}, data_dict={\"q\": query, \"include_private\": True}\n )\n\n result = datasets[\"results\"]\n results = []\n for res in result:\n results.append(res)\n return results", "def get_packages(path):\n\n files = [y for x in os.walk(path) for y in glob(os.path.join(x[0], 'package.xml'))]\n packages = []\n for file in files:\n tree = ET.parse(file)\n root = tree.getroot()\n\n name = root.find('name').text\n path = os.path.dirname(os.path.abspath(file))\n \n packages.append(Package(name, path))\n\n return packages", "def _find(path, xp, tag_class={}, **kwargs):\n request = Osc.get_osc().get_reqobj()\n xpath = xp\n if hasattr(xp, 'tostring'):\n xpath = xp.tostring()\n f = request.get(path, match=xpath, **kwargs)\n return fromstring(f.read(), **tag_class)", "def _get_nodes_from_package(*, package_name: str) -> List[Node]:\n nodl_files = _get_nodl_files_from_package_share(package_name=package_name)\n return _parse_multiple(paths=nodl_files)", "def _get_package_items(self):\r\n mask = \"mask[description,capacity,prices.id,categories[name,id]]\"\r\n package = self.client['Product_Package']\r\n return package.getItems(id=46, mask=mask)", "def find_elements(self, xpath:str):\n try:\n elements = self.driver.find_elements_by_xpath(xpath)\n \n except NoSuchElementException:\n elements = []\n \n return elements", "def get_tpx_xpaths():\n\txpaths = {\"tpx_all_abs\" : \"count(//TIMEX3)\",\n\t\"tpx_date_abs\" : \"count(//TIMEX3[@type='DATE'])\",\n\t\"tpx_date_past_ref_abs\" : \"count(//TIMEX3[@type='DATE'][@value='PAST_REF'])\",\n\t\"tpx_date_present_ref_abs\" : \"count(//TIMEX3[@type='DATE'][@value='PRESENT_REF'])\",\n\t\"tpx_date_future_ref_abs\" : \"count(//TIMEX3[@type='DATE'][@value='FUTURE_REF'])\",\n\t\"tpx_time_abs\" : \"count(//TIMEX3[@type='TIME'])\",\n\t\"tpx_duration_abs\" : \"count(//TIMEX3[@type='DURATION'])\",\n\t\"tpx_set_abs\" : \"count(//TIMEX3[@type='SET'])\"\n\t}\n\treturn xpaths", "def xpath(element, xpath):\n for ns in namespaces:\n xpath_fmt = xpath.format(ns=\"\" if ns is None else \"adm:\")\n for found_element in element.xpath(xpath_fmt,\n namespaces=dict() if ns is None else dict(adm=ns)):\n yield found_element", "def xpath_as_xml(self, expr=''):\n results = []\n for result in self.xpath(expr):\n if result:\n results.append(result.toxml())\n \n return results", "def get_packages():\n\tapps=[]\n\tlspackages = glob.glob(u'/home/wpkg/packages/*.xml')\n\tfor fic in lspackages:\n #print fic\n\t\tif os.path.isfile(fic):\n\t\t\ttry:\n\t\t\t\txml = etree.parse(fic)\n\t\t\texcept:\n\t\t\t\t#si erreur on tente de corriger le fichier\n\t\t\t\toput = open(\"output.txt\",\"w\")\n\t\t\t\tdata = open(fic).read()\n\t\t\t\toput.write( re.sub('&(?!amp;|quot;|nbsp;|gt;|lt;|laquo;|raquo;|copy;|reg;|bul;|rsquo;)', '&amp;', data) )\n\t\t\t\toput.close()\n\t\t\t\tshutil.move(\"output.txt\", fic)\n\n\t\t\tfinally:\n\t\t\t\tparser = etree.XMLParser(remove_comments=False)\n\t\t\t\txml = etree.parse(fic,parser)\n\t\t\t\tstrxml = etree.tostring(xml.getroot())\n\t\t\t\tfor group in xml.getiterator(u'package'):\n\t\t\t\t\tuid = group.get(u'id')\n\t\t\t\t\tname = group.get(u'name')\n\t\t\t\t\tdvars = dict()\n\t\t\t\t\tfor vars in xml.getiterator('variable'):\n\t\t\t\t\t\tdvars[vars.get('name').lower()] = vars.get('value').lower()\n\n\t\t\t\t#on remplit le dico des appli dispo\n\t\t\t\t#print fic,uid,name\n\t\t\t\tapps.append((fic,uid,name))\n\n\treturn apps", "def find_packages(name, pkg_dir):\n for c in (FileSystemPackageBuilder, ZipPackageBuilder, ExcelPackageBuilder):\n\n package_path, cache_path = c.make_package_path(pkg_dir, name)\n\n if package_path.exists():\n\n yield c.type_code, package_path, cache_path #c(package_path, pkg_dir)", "def get_all_packages(cls):\n packages = Package.query.all()\n return packages", "def check_and_get_all_elements_by_xpath(element, xpath):\r\n if element is None or not xpath:\r\n return []\r\n try:\r\n return element.find_elements_by_xpath(xpath)\r\n except NoSuchElementException:\r\n return []", "def get_source_packages(self, project, expand=False):\n query = {'expand': 1} if expand else {}\n root = ET.parse(osc.core.http_GET(osc.core.makeurl(self.apiurl,['source', project],\n query=query))).getroot()\n packages = [i.get('name') for i in root.findall('entry')]\n\n return packages", "def GetPackages(self, package_target):\n return self._packages.get(package_target, None)", "def test_search_and(self):\n pkgs = [\n make_package(factory=SQLPackage),\n make_package(\n \"somepackage\",\n version=\"1.3\",\n filename=\"mypath3\",\n summary=\"this is mypkg\",\n factory=SQLPackage,\n ),\n make_package(\"mypkg2\", \"1.3.4\", \"my/other/path\", factory=SQLPackage),\n make_package(\"package\", factory=SQLPackage),\n ]\n self.sql.add_all(pkgs)\n criteria = {\"name\": [\"my\", \"pkg\"], \"summary\": [\"this\", \"mypkg\"]}\n packages = self.db.search(criteria, \"and\")\n self.assertCountEqual(packages, pkgs[:-1])", "def scrape(etree, xpaths):\n return {k: get_xpath_val(apply_xpath(v, etree, k), v.path)\n if isinstance(v, lxml.etree.XPath)\n else [scrape(i, v[1]) for i in apply_xpath(v[0], etree, k)]\n for k, v in xpaths.items()}", "def test_search_and(self):\n pkgs = [\n make_package(factory=DynamoPackage),\n make_package(\n \"somepackage\",\n version=\"1.3\",\n filename=\"mypath3\",\n summary=\"this is mypkg\",\n factory=DynamoPackage,\n ),\n make_package(\"mypkg2\", \"1.3.4\", \"my/other/path\", factory=DynamoPackage),\n make_package(\"package\", factory=DynamoPackage),\n ]\n self._save_pkgs(*pkgs)\n criteria = {\"name\": [\"my\", \"pkg\"], \"summary\": [\"this\", \"mypkg\"]}\n packages = self.db.search(criteria, \"and\")\n self.assertCountEqual(packages, pkgs[:-1])", "def test_search_and(self):\n pkgs = [\n make_package(factory=SQLPackage),\n make_package(\n \"somepackage\",\n version=\"1.3\",\n filename=\"mypath3\",\n summary=\"this is mypkg\",\n factory=SQLPackage,\n ),\n make_package(\"mypkg2\", \"1.3.4\", \"my/other/path\", factory=SQLPackage),\n make_package(\"package\", factory=SQLPackage),\n ]\n for pkg in pkgs:\n self.db.save(pkg)\n criteria = {\"name\": [\"my\", \"pkg\"], \"summary\": [\"this\", \"mypkg\"]}\n packages = self.db.search(criteria, \"and\")\n self.assertCountEqual(packages, pkgs[:-1])", "def _extract_packages(self, index: int, packages: Packages) -> Packages:\n log = self[index]\n extracted_packages = Packages()\n for package in packages:\n package_expression = re.compile(f\"(({package.name})(=[a-z0-9_=.]+)?)\")\n spec = package_expression.search(log).group(0)\n extracted_packages.append_spec(spec)\n return extracted_packages", "def find_all_packages_to_test(root, options):\n if options.verbose:\n print_message(\"Locating packages under '%s'\" % root)\n tests = []\n dirs = os.listdir(root)\n dirs.sort()\n for packageName in dirs:\n if packageName.startswith(\".\"):\n continue\n package_dir = os.path.join(root, packageName)\n if not os.path.isdir(package_dir):\n continue\n bam_dir = os.path.join(package_dir, 'bam')\n if not os.path.isdir(bam_dir):\n continue\n xml_files = glob.glob(os.path.join(bam_dir, \"*.xml\"))\n if len(xml_files) == 0:\n continue\n if len(xml_files) > 1:\n raise RuntimeError(\"Too many XML files found in %s to identify a package definition file\" % bam_dir)\n package = Package.from_xml(xml_files[0])\n if options.verbose:\n print_message(\"\\t%s\" % package.get_id())\n tests.append(package)\n return tests", "def derive_xpubs_from_xprv(xprv, paths: list, rpc):\n derived_xprvs = []\n for path in paths:\n derivation = parse_path(path)\n if len(derivation) == 0:\n # tuple: (parent, derived)\n derived_xprvs.append((None, xprv))\n else:\n # we need parent for fingerprint\n parent = xprv\n for idx in derivation[:-1]:\n parent = get_child(parent, idx, rpc)\n child = get_child(parent, derivation[-1], rpc)\n derived_xprvs.append((parent, child))\n xpubs = []\n for parent, child in derived_xprvs:\n res = rpc.getdescriptorinfo(f\"wpkh({child})\")\n xpub = res[\"descriptor\"].split(\"(\")[1].split(\")\")[0]\n if parent is not None:\n res = rpc.getdescriptorinfo(f\"wpkh({parent})\")\n parent_xpub = res[\"descriptor\"].split(\"(\")[1].split(\")\")[0]\n fingerprint = get_xpub_fingerprint(parent_xpub)\n xpub = swap_fingerprint(xpub, fingerprint)\n xpubs.append(xpub)\n return xpubs", "def find(self, path, root=None):\n\n root = root if root is not None else self.root\n return root.xpath(path, namespaces=self.nsmap) or []", "def xmlextractor(url):\r\n\tListOfLinks = []\r\n\tresponse = urlopen(url)\r\n\troot = ET.fromstring(response.read())\r\n\tfor link in root.iter('{http://www.sitemaps.org/schemas/sitemap/0.9}loc'):\r\n\t\tListOfLinks.append(link.text)\r\n\treturn ListOfLinks", "def expand_package(self, pkg):\n return [(pkg, c) for c in self.packages[pkg].components]" ]
[ "0.57963234", "0.5450684", "0.5410452", "0.5273573", "0.5228217", "0.5190546", "0.5179356", "0.5171915", "0.51098835", "0.5105376", "0.49959934", "0.49159732", "0.489464", "0.48571792", "0.48402214", "0.48206335", "0.4783399", "0.4775626", "0.47538108", "0.47523183", "0.4738191", "0.473434", "0.47306314", "0.47045508", "0.46904406", "0.46810165", "0.46636423", "0.46614063", "0.46339932", "0.4630083" ]
0.63874054
0
test al_contacts.view.View.notify() with nonlist data.
def testNotifyWithNonListData(self): self.assertRaises(ViewException, self.view.notify, self.mockViews, 'Invalid Data')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def testNotifyWithValidListDate(self):\r\n self.assertEqual(self.view.notify(self.mockViews, []), None)", "def test_notify_user(self):\n foo = Foo.objects.create(name='foo', description='foo object')\n notify_users([self.user_a], foo, notification_type='foo')\n self.assertEqual(len(mail.outbox), 1)", "def notify(self, observable, *args, **kwargs) -> None:", "async def notify_view(self):\n await self.game_view.notify()", "def notify(self) -> None:\n pass", "def notify(self) -> None:\n pass", "def test_notify_users(self):\n foo = Foo.objects.create(name='foo', description='foo object')\n notify_users(User.objects.all(), foo, notification_type='foo')\n self.assertEqual(len(mail.outbox), 2)", "def on_notify(self, name):\r\n pass", "def _notify(self, observable):\n pass", "def receiveContactList(self, contactList):", "def notifyPut(self, data):\n if self.notify_hidden:\n self.notify_gui.dataQ.put('*SHOW*') # if gui is hidden when you send display data it will wake\n self.notify_hidden = False\n self.notify_gui.dataQ.put(data)", "def test_notify(self):\n disco = create_disco()\n messages = [object(), NodeActive(create_node(\"hello\"))]\n result = []\n disco.notify(result.append)\n for m in messages:\n disco.onMessage(None, m)\n self.assertEqual(messages, result)", "def _notify_update(self, cuds_object):", "def notify(self, event):\n raise NotImplementedError", "def notify_wrap(self, func, *args, **kw):\n val = func(self, *args,**kw)\n if not self._observable_frozen:\n self.notify('list', None, self)\n return val", "def notifyObservers(self):", "def test_group_notification_not_called(self):\n send_message(self.directmessage1.pk)\n self.assertFalse(self.groupnotify_mock.called)", "def test_notify(self):\n # self.client.force_authenticate(user=self.admin)\n\n FIXED_TIME = datetime(2018, 1, 1, tzinfo=LOCAL_TIMEZONE)\n\n # Old notification that will be deleted\n with mock.patch(\n 'django.utils.timezone.now', return_value=FIXED_TIME):\n WaitQueueNotification.objects.create(\n user=self.user,\n retreat=self.retreat,\n )\n\n waiting_user = WaitQueue.objects.create(\n user=self.user,\n retreat=self.retreat,\n )\n\n waiting_user2 = WaitQueue.objects.create(\n user=self.user2,\n retreat=self.retreat,\n )\n\n notification_count = WaitQueueNotification.objects.all().count()\n\n response = self.client.get(\n '/'.join([\n reverse('retreat:waitqueuenotification-list'),\n 'notify',\n ])\n )\n\n self.retreat.refresh_from_db()\n\n # Assert that the wait queue index is updated\n # All users (2) are notified since there are more (4) reserved_seats\n self.assertEqual(\n self.retreat.next_user_notified,\n 2,\n \"next_user_notified index invalid\"\n )\n\n # Assert that only 2 reserved seats remain (since only 2 users are\n # waiting)\n self.assertEqual(\n self.retreat.reserved_seats,\n 2,\n \"reserved_seats index invalid\"\n )\n\n # Assert that 2 new notifications are created (2 users in wait_queue)\n # Assert that 2 old notification has been deleted (too old)\n self.assertEqual(\n WaitQueueNotification.objects.all().count(),\n notification_count + 2 - 2,\n \"WaitQueueNotification count invalid\"\n )\n\n self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)\n\n self.assertEqual(len(mail.outbox), 2)\n\n waiting_user.delete()\n waiting_user2.delete()", "def test_group_notification_called(self):\n sender = self.create_user()\n thread = self.create_thread(sender=sender)\n newmessage = mommy.make(Message, thread=thread, sender=sender)\n send_message(newmessage.pk)\n self.groupnotify_mock.assert_called_with(newmessage.pk)", "def test_adviser_notified(self, mocked_notify_client):\n order = OrderFactory()\n adviser = AdviserFactory()\n\n notify.adviser_removed(order=order, adviser=adviser)\n\n assert mocked_notify_client.send_email_notification.called\n call_args = mocked_notify_client.send_email_notification.call_args_list[0][1]\n assert call_args['email_address'] == adviser.contact_email\n assert call_args['template_id'] == Template.you_have_been_removed_for_adviser.value\n\n assert call_args['personalisation']['recipient name'] == adviser.name", "def on_new_notification(data, index, *args):\n print(data)", "def test_advisers_notified(self, mocked_notify_client):\n order = OrderCompleteFactory(assignees=[])\n assignees = OrderAssigneeCompleteFactory.create_batch(2, order=order)\n subscribers = OrderSubscriberFactory.create_batch(2, order=order)\n\n notify.order_completed(order)\n\n assert mocked_notify_client.send_email_notification.called\n # 4 = assignees/subscribers\n assert len(mocked_notify_client.send_email_notification.call_args_list) == 4\n\n calls_by_email = {\n data['email_address']: {\n 'template_id': data['template_id'],\n 'personalisation': data['personalisation'],\n }\n for _, data in mocked_notify_client.send_email_notification.call_args_list\n }\n for item in itertools.chain(assignees, subscribers):\n call = calls_by_email[item.adviser.get_current_email()]\n assert call['template_id'] == Template.order_completed_for_adviser.value\n assert call['personalisation']['recipient name'] == item.adviser.name\n assert call['personalisation']['embedded link'] == order.get_datahub_frontend_url()", "def test_advisers_notified(self, mocked_notify_client):\n order = OrderWithOpenQuoteFactory(assignees=[])\n assignees = OrderAssigneeFactory.create_batch(2, order=order)\n subscribers = OrderSubscriberFactory.create_batch(2, order=order)\n\n notify.order_cancelled(order)\n\n assert mocked_notify_client.send_email_notification.called\n # 1 = customer, 4 = assignees/subscribers\n assert len(mocked_notify_client.send_email_notification.call_args_list) == (4 + 1)\n\n calls_by_email = {\n data['email_address']: {\n 'template_id': data['template_id'],\n 'personalisation': data['personalisation'],\n }\n for _, data in mocked_notify_client.send_email_notification.call_args_list\n }\n for item in itertools.chain(assignees, subscribers):\n call = calls_by_email[item.adviser.get_current_email()]\n assert call['template_id'] == Template.order_cancelled_for_adviser.value\n assert call['personalisation']['recipient name'] == item.adviser.name\n assert call['personalisation']['embedded link'] == order.get_datahub_frontend_url()", "def test_customer_notified(self, mocked_notify_client):\n order = OrderFactory()\n\n notify.quote_cancelled(order, by=AdviserFactory())\n\n assert mocked_notify_client.send_email_notification.called\n call_args = mocked_notify_client.send_email_notification.call_args_list[0][1]\n assert call_args['email_address'] == order.get_current_contact_email()\n assert call_args['template_id'] == Template.quote_cancelled_for_customer.value\n assert call_args['personalisation']['recipient name'] == order.contact.name\n assert call_args['personalisation']['embedded link'] == order.get_public_facing_url()", "def test_notification_batch(self):\n req = '''[{\"jsonrpc\": \"2.0\", \"method\": \"notify_hello\", \"params\": [7]},\n {\"jsonrpc\": \"2.0\", \"method\": \"notify_hello\", \"params\": [7]}\n ]'''\n resp = ''\n status = 204\n r_status, r_resp = self.exec_handler(req)\n self.assertEqual(r_status, status)\n self.assertEqual(r_resp, resp)", "def notify(self, data):\n\n if 'personId' in data.keys():\n person_id = data['personId']\n if data['type'] == EventTimeLine.PERSON_CREATION:\n self._registry[person_id] = {\n 'name': data['name'],\n 'address': data['address'],\n 'status': data['status'],\n 'version': 1\n }\n\n if data['type'] == EventTimeLine.PERSON_STATUS_CHANGE:\n p = self._registry[person_id]\n p['status'] = data['newStatus']\n p['version'] += 1\n\n if data['type'] == EventTimeLine.PERSON_MOVE:\n p = self._registry[person_id]\n p['address'] = data['newAddress']\n p['version'] += 1", "def notify(self, **kwargs):\n return self.send(kwargs)", "def notify(self, **kwargs):\n return self.send(kwargs)", "def test_advisers_notified(self, mocked_notify_client):\n order = OrderFactory(assignees=[])\n assignees = OrderAssigneeFactory.create_batch(2, order=order)\n subscribers = OrderSubscriberFactory.create_batch(2, order=order)\n canceller = AdviserFactory()\n\n notify.quote_cancelled(order, by=canceller)\n\n assert mocked_notify_client.send_email_notification.called\n # 1 = customer, 4 = assignees/subscribers\n assert len(mocked_notify_client.send_email_notification.call_args_list) == (4 + 1)\n\n calls_by_email = {\n data['email_address']: {\n 'template_id': data['template_id'],\n 'personalisation': data['personalisation'],\n }\n for _, data in mocked_notify_client.send_email_notification.call_args_list\n }\n for item in itertools.chain(assignees, subscribers):\n call = calls_by_email[item.adviser.get_current_email()]\n assert call['template_id'] == Template.quote_cancelled_for_adviser.value\n assert call['personalisation']['recipient name'] == item.adviser.name\n assert call['personalisation']['embedded link'] == order.get_datahub_frontend_url()\n assert call['personalisation']['canceller'] == canceller.name", "def notify(self, event, user):\n raise NotImplementedError(\"Subclasses must override notify() method\")" ]
[ "0.77015924", "0.61490875", "0.608443", "0.6079039", "0.60519356", "0.60519356", "0.60217476", "0.60056156", "0.5996854", "0.592306", "0.59182304", "0.5905801", "0.57024175", "0.56915534", "0.56626695", "0.56549096", "0.5612467", "0.5543305", "0.55374557", "0.5504479", "0.54982406", "0.5448741", "0.54437774", "0.54142153", "0.5408485", "0.54055905", "0.5403149", "0.5403149", "0.54016674", "0.5392498" ]
0.7656733
1
test al_contacts.view.View.notify() with valid list data.
def testNotifyWithValidListDate(self): self.assertEqual(self.view.notify(self.mockViews, []), None)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def testNotifyWithNonListData(self):\r\n self.assertRaises(ViewException, self.view.notify, self.mockViews, 'Invalid Data')", "def receiveContactList(self, contactList):", "def test_notify_user(self):\n foo = Foo.objects.create(name='foo', description='foo object')\n notify_users([self.user_a], foo, notification_type='foo')\n self.assertEqual(len(mail.outbox), 1)", "def test_notify_users(self):\n foo = Foo.objects.create(name='foo', description='foo object')\n notify_users(User.objects.all(), foo, notification_type='foo')\n self.assertEqual(len(mail.outbox), 2)", "def test_advisers_notified(self, mocked_notify_client):\n order = OrderCompleteFactory(assignees=[])\n assignees = OrderAssigneeCompleteFactory.create_batch(2, order=order)\n subscribers = OrderSubscriberFactory.create_batch(2, order=order)\n\n notify.order_completed(order)\n\n assert mocked_notify_client.send_email_notification.called\n # 4 = assignees/subscribers\n assert len(mocked_notify_client.send_email_notification.call_args_list) == 4\n\n calls_by_email = {\n data['email_address']: {\n 'template_id': data['template_id'],\n 'personalisation': data['personalisation'],\n }\n for _, data in mocked_notify_client.send_email_notification.call_args_list\n }\n for item in itertools.chain(assignees, subscribers):\n call = calls_by_email[item.adviser.get_current_email()]\n assert call['template_id'] == Template.order_completed_for_adviser.value\n assert call['personalisation']['recipient name'] == item.adviser.name\n assert call['personalisation']['embedded link'] == order.get_datahub_frontend_url()", "def test_notify(self):\n disco = create_disco()\n messages = [object(), NodeActive(create_node(\"hello\"))]\n result = []\n disco.notify(result.append)\n for m in messages:\n disco.onMessage(None, m)\n self.assertEqual(messages, result)", "def test_advisers_notified(self, mocked_notify_client):\n order = OrderWithOpenQuoteFactory(assignees=[])\n assignees = OrderAssigneeFactory.create_batch(2, order=order)\n subscribers = OrderSubscriberFactory.create_batch(2, order=order)\n\n notify.order_cancelled(order)\n\n assert mocked_notify_client.send_email_notification.called\n # 1 = customer, 4 = assignees/subscribers\n assert len(mocked_notify_client.send_email_notification.call_args_list) == (4 + 1)\n\n calls_by_email = {\n data['email_address']: {\n 'template_id': data['template_id'],\n 'personalisation': data['personalisation'],\n }\n for _, data in mocked_notify_client.send_email_notification.call_args_list\n }\n for item in itertools.chain(assignees, subscribers):\n call = calls_by_email[item.adviser.get_current_email()]\n assert call['template_id'] == Template.order_cancelled_for_adviser.value\n assert call['personalisation']['recipient name'] == item.adviser.name\n assert call['personalisation']['embedded link'] == order.get_datahub_frontend_url()", "def update_list_view(self):\n self.model.dataChanged.emit(self.model.index(0, 1),\n self.model.index(len(self.model.data_list), 1))\n #self.pBar.setValue(localization.localizationProgress() * 100)", "def test_update_list_changes_data(qtbot):\n # Given\n model = SourcesModel()\n assert model.rowCount() == 0\n\n sources = []\n source = Source(\"I001\", \"Test\", \"Person\", \"Pub\", \"Abbr\")\n sources.append(source)\n\n # When\n with qtbot.waitSignals([model.modelAboutToBeReset, model.modelReset]):\n model.update_list(sources)\n\n # Then\n assert model.rowCount() == 1", "async def notify_view(self):\n await self.game_view.notify()", "def test_advisers_notified(self, mocked_notify_client):\n order = OrderFactory(assignees=[])\n assignees = OrderAssigneeFactory.create_batch(2, order=order)\n subscribers = OrderSubscriberFactory.create_batch(2, order=order)\n canceller = AdviserFactory()\n\n notify.quote_cancelled(order, by=canceller)\n\n assert mocked_notify_client.send_email_notification.called\n # 1 = customer, 4 = assignees/subscribers\n assert len(mocked_notify_client.send_email_notification.call_args_list) == (4 + 1)\n\n calls_by_email = {\n data['email_address']: {\n 'template_id': data['template_id'],\n 'personalisation': data['personalisation'],\n }\n for _, data in mocked_notify_client.send_email_notification.call_args_list\n }\n for item in itertools.chain(assignees, subscribers):\n call = calls_by_email[item.adviser.get_current_email()]\n assert call['template_id'] == Template.quote_cancelled_for_adviser.value\n assert call['personalisation']['recipient name'] == item.adviser.name\n assert call['personalisation']['embedded link'] == order.get_datahub_frontend_url()\n assert call['personalisation']['canceller'] == canceller.name", "def test_message_list():", "def test_POST_send_list(self):\n\t\tself.POST_list()\n\t\tlist = self.GET_data('/api/list/' + self.list_id)\n\t\tself.POST_data('/api/list/' + self.list_id + '/send', data=list)", "def test_advisers_notified(self, mocked_notify_client):\n order = OrderWithOpenQuoteFactory(assignees=[])\n assignees = OrderAssigneeFactory.create_batch(2, order=order)\n subscribers = OrderSubscriberFactory.create_batch(2, order=order)\n\n notify.quote_generated(order)\n\n assert mocked_notify_client.send_email_notification.called\n # 1 = customer, 4 = assignees/subscribers\n assert len(mocked_notify_client.send_email_notification.call_args_list) == (4 + 1)\n\n calls_by_email = {\n data['email_address']: {\n 'template_id': data['template_id'],\n 'personalisation': data['personalisation'],\n }\n for _, data in mocked_notify_client.send_email_notification.call_args_list\n }\n for item in itertools.chain(assignees, subscribers):\n call = calls_by_email[item.adviser.get_current_email()]\n assert call['template_id'] == Template.quote_sent_for_adviser.value\n assert call['personalisation']['recipient name'] == item.adviser.name\n assert call['personalisation']['embedded link'] == order.get_datahub_frontend_url()", "def test_notification_batch(self):\n req = '''[{\"jsonrpc\": \"2.0\", \"method\": \"notify_hello\", \"params\": [7]},\n {\"jsonrpc\": \"2.0\", \"method\": \"notify_hello\", \"params\": [7]}\n ]'''\n resp = ''\n status = 204\n r_status, r_resp = self.exec_handler(req)\n self.assertEqual(r_status, status)\n self.assertEqual(r_resp, resp)", "def test_list_update(self):\r\n vm = List.value_manager(None, None, [1,2,3])\r\n assert not vm.changed\r\n vm.value = [4,5,6]\r\n assert vm.changed", "def test_interest_list_subscribe(self):\n list_id = self.list_2.pk\n url = reverse('xds_api:interest-list-subscribe', args=(list_id,))\n _, token = AuthToken.objects.create(self.user_1)\n response = self.client \\\n .patch(url, HTTP_AUTHORIZATION='Token {}'.format(token))\n\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertEqual(len(self.list_2.subscribers.all()), 1)", "def test_notify(self):\n # self.client.force_authenticate(user=self.admin)\n\n FIXED_TIME = datetime(2018, 1, 1, tzinfo=LOCAL_TIMEZONE)\n\n # Old notification that will be deleted\n with mock.patch(\n 'django.utils.timezone.now', return_value=FIXED_TIME):\n WaitQueueNotification.objects.create(\n user=self.user,\n retreat=self.retreat,\n )\n\n waiting_user = WaitQueue.objects.create(\n user=self.user,\n retreat=self.retreat,\n )\n\n waiting_user2 = WaitQueue.objects.create(\n user=self.user2,\n retreat=self.retreat,\n )\n\n notification_count = WaitQueueNotification.objects.all().count()\n\n response = self.client.get(\n '/'.join([\n reverse('retreat:waitqueuenotification-list'),\n 'notify',\n ])\n )\n\n self.retreat.refresh_from_db()\n\n # Assert that the wait queue index is updated\n # All users (2) are notified since there are more (4) reserved_seats\n self.assertEqual(\n self.retreat.next_user_notified,\n 2,\n \"next_user_notified index invalid\"\n )\n\n # Assert that only 2 reserved seats remain (since only 2 users are\n # waiting)\n self.assertEqual(\n self.retreat.reserved_seats,\n 2,\n \"reserved_seats index invalid\"\n )\n\n # Assert that 2 new notifications are created (2 users in wait_queue)\n # Assert that 2 old notification has been deleted (too old)\n self.assertEqual(\n WaitQueueNotification.objects.all().count(),\n notification_count + 2 - 2,\n \"WaitQueueNotification count invalid\"\n )\n\n self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)\n\n self.assertEqual(len(mail.outbox), 2)\n\n waiting_user.delete()\n waiting_user2.delete()", "def contacts_list_update(self):\n\t\tself.database.contacts_clear()\n\t\tclient_log.debug(f'Запрос контакт листа для пользователся {self.name}')\n\t\treq = {\n\t\t\tACTION: GET_CONTACTS,\n\t\t\tTIME: time.time(),\n\t\t\tUSER: self.username\n\t\t}\n\t\tclient_log.debug(f'Сформирован запрос {req}')\n\t\twith socket_lock:\n\t\t\tsend_message(self.transport, req)\n\t\t\tans = get_message(self.transport)\n\t\tclient_log.debug(f'Получен ответ {ans}')\n\t\tif RESPONSE in ans and ans[RESPONSE] == 202:\n\t\t\tfor contact in ans[LIST_INFO]:\n\t\t\t\tself.database.add_contact(contact)\n\t\telse:\n\t\t\tclient_log.error('Не удалось обновить список контактов.')", "def test_advisers_notified(self, mocked_notify_client):\n order = OrderPaidFactory(assignees=[])\n assignees = OrderAssigneeFactory.create_batch(2, order=order)\n subscribers = OrderSubscriberFactory.create_batch(2, order=order)\n\n notify.order_paid(order)\n\n assert mocked_notify_client.send_email_notification.called\n # 1 = customer, 4 = assignees/subscribers\n assert len(mocked_notify_client.send_email_notification.call_args_list) == (4 + 1)\n\n calls_by_email = {\n data['email_address']: {\n 'template_id': data['template_id'],\n 'personalisation': data['personalisation'],\n }\n for _, data in mocked_notify_client.send_email_notification.call_args_list\n }\n for item in itertools.chain(assignees, subscribers):\n call = calls_by_email[item.adviser.get_current_email()]\n assert call['template_id'] == Template.order_paid_for_adviser.value\n assert call['personalisation']['recipient name'] == item.adviser.name\n assert call['personalisation']['embedded link'] == order.get_datahub_frontend_url()", "def test_list_alerts(self):\n pass", "def notify_wrap(self, func, *args, **kw):\n val = func(self, *args,**kw)\n if not self._observable_frozen:\n self.notify('list', None, self)\n return val", "def notify(self, observable, *args, **kwargs) -> None:", "def test_advisers_notified(self, mocked_notify_client):\n order = OrderPaidFactory(assignees=[])\n assignees = OrderAssigneeFactory.create_batch(2, order=order)\n subscribers = OrderSubscriberFactory.create_batch(2, order=order)\n\n notify.quote_accepted(order)\n\n assert mocked_notify_client.send_email_notification.called\n # 1 = customer, 4 = assignees/subscribers\n assert len(mocked_notify_client.send_email_notification.call_args_list) == (4 + 1)\n\n calls_by_email = {\n data['email_address']: {\n 'template_id': data['template_id'],\n 'personalisation': data['personalisation'],\n }\n for _, data in mocked_notify_client.send_email_notification.call_args_list\n }\n for item in itertools.chain(assignees, subscribers):\n call = calls_by_email[item.adviser.get_current_email()]\n assert call['template_id'] == Template.quote_accepted_for_adviser.value\n assert call['personalisation']['recipient name'] == item.adviser.name\n assert call['personalisation']['embedded link'] == order.get_datahub_frontend_url()", "def test_edit_contact_list(self):\n c1 = ContactFactory(company_id=self.company.id)\n contact_list = ContactList.objects.first()\n data = ContactListSerializer(contact_list).data\n\n data['title'] = 'Nestle'\n data['contact_ids'] = [c1.id]\n\n url, parsed = self.prepare_urls('v1:contact_list-detail', subdomain=self.company.subdomain, kwargs={'pk':contact_list.id})\n \n response = self.client.put(url, data, HTTP_HOST=parsed.netloc, format='json')\n self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)\n\n self.authenticate_user()\n response = self.client.put(url, data, HTTP_HOST=parsed.netloc, format='json')\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n\n url, parsed = self.prepare_urls('v1:contact_list-detail', subdomain=self.company.subdomain, kwargs={'pk':contact_list.id})\n response = self.client.get(url, HTTP_HOST=parsed.netloc)\n content = json.loads(response.content)\n self.assertEqual(content['title'], 'Nestle')\n self.assertEqual(content['contacts'], [c1.id])", "def test_adviser_notified(self, mocked_notify_client):\n order = OrderFactory()\n adviser = AdviserFactory()\n\n notify.adviser_removed(order=order, adviser=adviser)\n\n assert mocked_notify_client.send_email_notification.called\n call_args = mocked_notify_client.send_email_notification.call_args_list[0][1]\n assert call_args['email_address'] == adviser.contact_email\n assert call_args['template_id'] == Template.you_have_been_removed_for_adviser.value\n\n assert call_args['personalisation']['recipient name'] == adviser.name", "def notify(self) -> None:\n pass", "def notify(self) -> None:\n pass", "def test_create_contact_list(self):\n c1 = ContactFactory(company_id=self.company.id)\n data = {\n 'title': 'ContactList1',\n 'contact_ids': [c1.id],\n }\n\n url, parsed = self.prepare_urls('v1:contact_list-list', subdomain=self.company.subdomain)\n \n response = self.client.post(url, data, HTTP_HOST=parsed.netloc, format='json')\n self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)\n\n self.authenticate_user()\n response = self.client.post(url, data, HTTP_HOST=parsed.netloc, format='json')\n self.assertEqual(response.status_code, status.HTTP_201_CREATED)\n content = json.loads(response.content)\n self.assertEqual(content['title'], 'ContactList1')\n self.assertEqual(content['contacts'], [c1.id])\n self.assertNotEqual(content['company_id'], None)\n self.assertNotEqual(content['owner'], None)\n\n url, parsed = self.prepare_urls('v1:contact_list-list', subdomain=self.company.subdomain)\n response = self.client.get(url, HTTP_HOST=parsed.netloc)\n content = json.loads(response.content)\n self.assertEqual(self.contact_lists_count+1, len(content))", "def _notify_update(self, cuds_object):" ]
[ "0.75171095", "0.655433", "0.6169801", "0.61209685", "0.58530337", "0.5754011", "0.5736475", "0.57315934", "0.57258004", "0.5683047", "0.567312", "0.565161", "0.55990887", "0.55708456", "0.5561683", "0.555424", "0.55427134", "0.55381984", "0.55344033", "0.55282605", "0.5498881", "0.5497229", "0.5477855", "0.5475975", "0.54723155", "0.54703957", "0.54567826", "0.54567826", "0.54497546", "0.5429025" ]
0.8113745
0
test String Representation On Instantiation
def testStringRepresentationOnInstantiation(self): self.assertEqual(str(self.tv), 'table')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_str():\n c = Circle(4) \n assert c.__str__() == 'Circle with radius: 4.000000'", "def test_string_representation(self):\n entry = Meal(name=\"Foo Meal\")\n self.assertEqual(str(entry), 'Foo Meal')", "def test_str(self):\n account = AccountFactory(username=\"bill\")\n self.assertEqual(account.__str__(), \"bill\")", "def test_str(self):\n s1 = Square(4, 2, 3, 47)\n self.assertEqual(str(s1), \"[Square] (47) 2/3 - 4\")", "def test_str_method(self):\n s1 = Square(4, 6, 2, 12)\n self.assertEqual(s1.__str__(), '[Square] (12) 6/2 - 4')", "def test_str(self):\n self.assertEqual(str(self.john), \"J. Doe\")\n self.assertEqual(str(self.solar), \"N. Graule\")", "def test___str__(self):\n self.assertEqual(\n str(self.mineral),\n 'mineralxy')", "def test_string():", "def test_str():\n # Test for string special method with scalar Rnode objects\n x = Rnode(1.0)\n try:\n assert str(x) == 'Reverse-mode Rnode Object ( Values: 1.0 )'\n except AssertionError as e:\n print(e)\n raise AssertionError", "def test_str(self):\n company = CompanyFactory()\n self.assertEqual(str(company),\n f\"Company {company.name} in {company.city}\")", "def testStringRepresentationOnInstantiation(self):\r\n self.assertEqual(str(self.lv), 'list')", "def test_str(self):\n tour = G(Tour, display_name='test1')\n self.assertEqual('test1', str(tour))", "def test_is_str_rep_string(self):\n self.assertIsInstance(cardutils.Card(10,1).__str__(), str)", "def test_init(self):\n orig = \"\"\n r = self.SequenceClass(orig)\n self.assertEqual(str(r), orig)\n\n orig = \"TCAGGA\"\n r = self.SequenceClass(orig)\n self.assertEqual(r._data, array([6, 62]))\n self.assertEqual(str(r), orig)", "def test_str(self):\n self.assertEqual(str(self.bioe), str(self.wbioe))\n self.assertEqual(str(self.uioe), str(self.wuioe))\n self.assertEqual(str(self.bose), str(self.wbose))\n self.assertEqual(str(self.uose), str(self.wuose))", "def __init__(self) -> None:\n str.__init__(self)", "def test_str(self):\n \n # Create a Resource object\n resource = Resource(1, \"White Noise\", Name(\"Don\", \"\", \"DeLillo\"), \n \"Delillo's White Noise follows narrator Jack \"\\\n \"Gladney, a professor at a small Liberal Arts \"\\\n \"college and describes an academic year. Jack \"\\\n \"teaches at a school called the \"\\\n \"College-on-the-Hill, where he serves as the \"\\\n \"department chair of Hitler studies. He lives in \"\\\n \"Blacksmith, a quiet college town, with his wife, \"\\\n \"Babette, and four of their children from earlier \"\\\n \"marriages: Heinrich, Steffie, Denise, and \"\\\n \"Wilder. Throughout the novel, various \"\\\n \"half-siblings and ex-spouses drift in and out \"\\\n \"of the family’s home.\",\n \"sci-fi\", \"English\", 1985, \"US\", 326, \"book\",\n [\"culture\", \"survival\", \"life\", \"society\"])\n \n # Assert expected result of the str function\n self.assertEqual(str(resource), (\"ID: 1 \\nTitle: White Noise \"\\\n \"\\nCreator: Don DeLillo \\nSummary: Delillo's White Noise follows \"\\\n \"narrator Jack Gladney, a professor at a \\nsmall Liberal Arts \"\\\n \"college and describes an academic year. Jack teaches \\nat ... \"\\\n \"\\nGenre: sci-fi \\nLanguage: English \\nYear: 1985 \"\\\n \"\\nCountry: US \\nLength: 326p \\nType: book \"\\\n \"\\nKeywords: culture, life, society, survival\"))", "def test_str_method(self):\n _name = 'test-name'\n el = MarkerId(_name)\n self.assertEqual(el.__str__(), _name)", "def test_str(self):\n dummy = DummyCryptographicObject()\n str(dummy)", "def test_init(self):\n orig = \"TC---\"\n seq = self.SequenceClass(orig)\n self.assertEqual(str(seq), orig)", "def test_str(self):\r\n self.assertEqual(str(self.black), 'black:#000000')\r\n self.assertEqual(str(self.red), 'red:#ff0000')\r\n self.assertEqual(str(self.pink), 'pink:#640000')", "def test_str(self, r, rep):\n assert str(r) == rep", "def test_str(self):\n user = User()\n user_details = {\"student_id\": user.id, \"first_name\": \"Joe\"}\n student = Student(**user_details)\n string = \"[Student] ({}) {}\".format(student.id, student.to_dict())\n self.assertEqual(string, str(student))", "def test_str(self):\n user = User()\n string = \"[User] ({}) {}\".format(user.id, user.__dict__)\n self.assertEqual(user.__str__(), string)", "def test_string(self):\n\n new_jawn = Amenity()\n name = getattr(new_jawn, \"name\")\n self.assertIsInstance(name, str)", "def test_str(self):\n \n # Create a Resource object\n book = Book(\"Penguin Group\", \"New York\", \"fiction\", 1, \"White Noise\", \n Name(\"Don\", \"\", \"DeLillo\"), \n \"Delillo's White Noise follows narrator Jack \"\\\n \"Gladney, a professor at a small Liberal Arts \"\\\n \"college and describes an academic year. Jack \"\\\n \"teaches at a school called the \"\\\n \"College-on-the-Hill, where he serves as the \"\\\n \"department chair of Hitler studies. He lives in \"\\\n \"Blacksmith, a quiet college town, with his wife, \"\\\n \"Babette, and four of their children from earlier \"\\\n \"marriages: Heinrich, Steffie, Denise, and \"\\\n \"Wilder. Throughout the novel, various \"\\\n \"half-siblings and ex-spouses drift in and out \"\\\n \"of the family’s home.\",\n \"sci-fi\", \"English\", 1985, \"US\", 326, \"book\",\n [\"culture\", \"survival\", \"life\", \"society\"])\n \n # Assert expected result of the str function\n self.assertEqual(str(book), (\"ID: 1 \\nTitle: White Noise \"\\\n \"\\nCreator: Don DeLillo \\nSummary: Delillo's White Noise follows \"\\\n \"narrator Jack Gladney, a professor at a \\nsmall Liberal Arts \"\\\n \"college and describes an academic year. Jack teaches \\nat ... \"\\\n \"\\nGenre: sci-fi \\nLanguage: English \\nYear: 1985 \"\\\n \"\\nCountry: US \\nLength: 326p \\nType: book \"\\\n \"\\nKeywords: culture, life, society, survival\\nPublisher: \"\\\n \"Penguin Group \\nCity: New York \\nCategory: fiction\"))", "def test_str_(self):\n str(self.standardcode)\n repr(self.standardcode)", "def test_str(self):\n # mocks of files\n rsa_ca_priv_file, rsa_priv_file, rsa_cert_file = range(3)\n\n ap = APInfo(port_id=1, ip=\"2.2.2.2\", mac=\"bb:bb:bb:bb:bb:bb\", radio_mac=\"bb:bb:bb:bb:bb:00\", udp_port=12345, wlc_ip='1.1.1.1',\n gateway_ip='1.1.1.2', ap_mode=APMode.LOCAL, rsa_ca_priv_file=rsa_ca_priv_file, rsa_priv_file=rsa_priv_file, rsa_cert_file=rsa_cert_file)\n\n self.assertEqual(str(ap), 'APbbbb.bbbb.bbbb')\n self.assertEqual(str(ap), ap.name)", "def test_str(self):\n step = G(Step, display_name='test1')\n self.assertEqual('test1', str(step))", "def __init__(self, string: str):\r\n self.string = string" ]
[ "0.74348164", "0.73944116", "0.72476286", "0.7242773", "0.7213901", "0.7211895", "0.70888114", "0.70745707", "0.70544636", "0.70401263", "0.69723976", "0.6969948", "0.69382244", "0.69192183", "0.69022226", "0.6894632", "0.6893857", "0.68858206", "0.68720406", "0.6862307", "0.6855417", "0.68542737", "0.68454057", "0.6834895", "0.6832447", "0.68252087", "0.6814447", "0.6806365", "0.6801969", "0.6793897" ]
0.76065
0
Returns information about the dependencies required by this repository. The return value should be an OrderedDict if the repository supports multiple configurations (aka is configurable) or a single Configuration if not.
def GetDependencies(): # To support a single (unnamed) configuration... return Configuration( "Standard Build Environment", [ Dependency( "5C7E1B3369B74BC098141FAD290288DA", # Id for Common_Environment; found in <Common_Environment>/__RepositoryId__ "Common_SimpleSchemaGenerator", # Name used if Common_Environment cannot be found during setup None, # Configuration value used when activating Common_Environment (can be None or skipped for repos that only support a single configuration) "https://github.com/davidbrownell/Common_SimpleSchemaGenerator.git", # Uri for repo; can be string or def Func(scm_or_none) -> string ), # Other dependencies go here (if any) ], # By default, the most recent version of all tools and libraries will be activated for this repository and its dependencies. # If necessary, you can override this behavior by specifying specific versions for tools that should be used when activating # this repository with this configuration. VersionSpecs( # Tools [], # Libraries, organized by language {}, ), )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_dependency_configurations(self):\n deps = []\n\n for variant in self.resolve_variants():\n # Note: the variants have already been resolved\n # This for loop simply needs to resolve the dependencies one\n # by one, potentially overwriding earlier ones\n name, value = next(iter(variant.items()))\n if 'requires' in value and value['requires'] is not None:\n requires = value['requires']\n for req_name, req_config in requires.items():\n deps.append((req_name, req_config['version']))\n\n return deps", "def dependencies(self) -> typing.Optional[typing.List[aws_cdk.core.IDependable]]:\n return self._values.get('dependencies')", "def get_dependencies(self):\n dependencies = self._dependencies\n if self.ansible is not None:\n dependencies.append(\"ansible=={}.*\".format(self.ansible))\n else:\n dependencies.append(\"ansible\")\n # Drivers can have their own dependencies\n if self.scenario.driver is not None \\\n and self.scenario.driver in DRIVER_DEPENDENCIES.keys():\n dependencies.extend(DRIVER_DEPENDENCIES[self.scenario.driver])\n # Scenarios can specify a requirements.txt\n if self.scenario.requirements is not None:\n dependencies.append(\"-r\" + self.scenario.requirements)\n return dependencies", "def getDependenciesList(self) -> List[Mapping[Any, Any]]:\n if self._dependencyList is not None:\n return self._dependencyList\n\n chartfile = self.getChartFile()\n if chartfile['apiVersion'] == 'v2':\n if 'dependencies' in chartfile:\n self._dependencyList = chartfile['dependencies']\n else:\n self._dependencyList = []\n elif chartfile['apiVersion'] == 'v1':\n self.readArchiveFiles()\n if self._archiveFiles is not None and 'requirements.yaml' in self._archiveFiles:\n self._dependencyList = self._getFile('requirements.yaml')['dependencies']\n else:\n self._dependencyList = []\n else:\n raise ConfigurationError('Unknown chart file version: {}'.format(chartfile))\n return self._dependencyList", "def ParseDependencies():\n dependencies = {}\n try:\n for values in CONFIG.DEPENDENCIES:\n job = values['job'].lower()\n dependencies[job] = {}\n dependencies[job]['programs'] = values['programs']\n dependencies[job]['docker_image'] = values.get('docker_image')\n except (KeyError, TypeError) as exception:\n raise TurbiniaException(\n 'An issue has occurred while parsing the '\n 'dependency config: {0!s}'.format(exception))\n return dependencies", "def getDependencies(self) -> Mapping[Any, Any]:\n ret: Dict[Any, Any] = {}\n for dep in self.getDependenciesList():\n ret[dep['name']] = dep\n return ret", "def get_dependencies(self):\n raise NotImplementedError()", "def initial_dependencies(self) -> List[str]:\n return self.options[\"general\"][\"dependencies\"]", "def get_dependencies():\n return config.check_driver_dependencies(\n __virtualname__, {\"profitbricks\": HAS_PROFITBRICKS}\n )", "def get_dependencies(self):\n return [[\"uuid\", \"ossp-uuid\"]]", "def get_dependencies(self):\n return [\"make\", \"g++\", \"gcc\", \"cmake-2.8.12.1\", \"boost_1_56_0\"]", "def _get_dependencies(self, requirement_name, version):\n pkg_metadata = self._get_metadata(requirement_name)\n versions = pkg_metadata.get('versions', dict())\n version = versions.get(str(version), dict())\n return sorted(version.get('dependencies', dict()).items())", "def repository_dependencies(self, host: (str), owner: (str), repo: (str)) -> Any:\n\n return search_api(\"repository_dependencies\", host, owner, repo)", "def getDependenciesCharts(self) -> Mapping[str, 'ChartVersionInfo']:\n deps = self.getDependenciesList()\n ret: Dict[str, 'ChartVersionInfo'] = {}\n for dep in deps:\n ret[dep['name']] = self.getDependencyChart(dep['name'])\n return ret", "def dependencies(self, configuration):\n for typ, binding_options in self.bindings.items():\n if typ in configuration:\n for item in binding_options.wanted(configuration[typ].values()):\n if item in configuration[typ]:\n yield configuration[typ][item].long_name", "def plugin_get_dependency():\n return []", "def DEPENDENCIES(self):\n pass", "def _get_dependencies():\n return config.check_driver_dependencies(__virtualname__, {\"XenAPI\": HAS_XEN_API})", "def dependencies(self) -> Dict[Union[Language, None], List['RadsProjectVersion']]:\n\n logger.debug(f\"retrieve dependencies of {self}\")\n\n path = f\"{self.path}/solutionmanifest\"\n self.solution.storage.download(path, path)\n with open(self.solution.storage.fspath(path)) as f:\n lines = f.read().splitlines()\n assert lines[0] == \"RADS Solution Manifest\", \"unexpected solutionmanifest magic line\"\n assert lines[1] == \"1.0.0.0\", \"unexpected solutionmanifest version\"\n assert lines[2] == self.solution.name, \"solution name mismatch in solutionmanifest header\"\n assert lines[3] == self.version, \"solution version mismatch in solutionmanifest header\"\n idx = 4\n\n required_projects = [] # [name, ...]\n projects = {} # {name: RadsProjectVersion}\n nprojects, idx = int(lines[idx]), idx + 1\n for _ in range(nprojects):\n (name, version, unk1, unk2), idx = lines[idx:idx+4], idx + 4\n unk1, unk2 = int(unk1), int(unk2)\n if unk1 == 0:\n required_projects.append(name)\n else:\n assert unk1 == 10\n assert unk2 == 0\n projects[name] = RadsProjectVersion(RadsProject(self.solution.storage, name), RadsVersion(version))\n\n langs = {} # {Language: [RadsProjectVersion, ...]}\n nlangs, idx = int(lines[idx]), idx + 1\n for _ in range(nlangs):\n (lang, unk1, ndeps), idx = lines[idx:idx+3], idx + 3\n unk1, ndeps = int(unk1), int(ndeps)\n assert unk1 == 0\n deps, idx = lines[idx:idx+ndeps], idx + ndeps\n langs[Language(lang)] = [projects[name] for name in deps]\n\n langs[None] = list(projects[name] for name in required_projects)\n return langs", "def checkDeps( self ):\n\n # skip dependency check for downloading only\n if( self.downloadOnly ):\n return True\n\n # skip dependency check if package is going to be installed\n if( self.mode == \"install\" ):\n return True\n\n log.debug( 'Checking dependencies of %s', self.name )\n \n file = self.realPath() + \"/.dependencies\"\n \n r = True\n\n # if file doesn't exist return True\n if( not os.path.exists( file )):\n return True\n\n # open dependencies file\n f = open( file )\n filedeplist = {}\n for line in f.readlines():\n line = line.strip()\n if( (not line.startswith(os.linesep)) and (not line.startswith(\"#\")) \\\n and (len(line) > 0 )):\n tokens = line.split(\":\")\n filedeplist[ tokens[0] ] = tokens[1]\n f.close()\n\n log.debug( 'Dependencies read from file: %s', filedeplist )\n\n # get actual dependecies\n deplist={}\n self.getDepList(deplist)\n del deplist[self.name]\n\n log.debug( 'Dependencies found in current cfg file: %s', deplist )\n \n # compare dependencies\n for k, v in filedeplist.iteritems():\n if( deplist.has_key( k )):\n if( deplist[k] != v ):\n if( os.path.basename(deplist[k]) != os.path.basename(v) ):\n if( r ):\n print \"*** WARNING: ***\\n***\\tFollowing dependencies from \" + self.name + \" located at [ \" \\\n + self.realPath() + \" ] failed:\\n***\"\n print \"***\\t * \" + k + \" \" + os.path.basename(v) + \" differs from version \" \\\n + os.path.basename(deplist[k]) + \" defined in your config file..\"\n r = False\n else:\n if( r ): #just print this once\n print \"*** WARNING: ***\\n***\\tFollowing dependencies from \" + self.name + \" located at [ \" + self.realPath() \\\n + \" ] failed:\\n***\"\n print \"***\\t * \" + k + \" not found in your config file!!\"\n r = False\n \n\n if( not r ):\n print \"***\"\n if( self.useLink ):\n print \"***\\t\" + self.name + \" is in \\\"link\\\" mode, if you want to rebuild it with the new dependencies set it to \\\"use\\\" mode...\"\n r = True\n else:\n if( not self.parent.noAutomaticRebuilds ):\n print \"***\\t * \" + self.name + \" changed to \\\"install\\\" mode and rebuild flag set to True...\"\n self.mode = \"install\"\n self.rebuild = True\n self.preCheckDeps()\n print \"***\\n***\\tUpdating dependency tree ( modules that depend on \" + self.name + \" need also to be rebuilt )...\\n***\"\n self.updateDepTree([])\n print \"***\\n***\\tif you do NOT want to rebuild this module(s) just answer \\\"no\\\" later on in the installation process,\\n\" \\\n + \"***\\tor set the global flag ilcsoft.noAutomaticRebuilds=True in your config file...\"\n else:\n print \"***\\n***\\tglobal flag ilcsoft.noAutomaticRebuilds is set to True, nothing will be done...\\n***\"\n return r", "def gather_configs(self):\n configs = []\n for what in self.order:\n for key in self.plugins[what]:\n mgr = self.plugins[what][key]\n c = mgr.config(what='get')\n if c is not None:\n c.update({\n 'description': mgr.description\n })\n # print(\"Gathering configuration from \", c)\n configs.append(c)\n return configs", "def compute_dependencies(repositories, requirement, transitive=False):\n pool = Pool(repositories)\n neighbors = _neighbors_in_repositories(pool, transitive)\n dependencies = _neighbors_for_requirement(pool, neighbors, requirement)\n return dependencies", "def dependencies(self, dep_context):\n if self.strict_deps:\n return strict_dependencies(self.target, dep_context)\n else:\n return all_dependencies(self.target, dep_context)", "def get_configuration_data(self):\n keys = self.get_configuration_parameters_names()\n data = self.get_configuration_parameters_values()\n\n ordered_data = OrderedDict()\n for pos, key in enumerate(keys):\n ordered_data[key] = data[pos]\n\n return ordered_data", "def component_configurations(self):\n return self._component_configurations", "def get_dependencies(self, alias):\n dependencies = {\"Ensembl2Reactome_All_Levels\": ['ReactomePathways'],\n \"ReactomePathways\": list(),\n \"reactome.homo_sapiens.interactions.tab-delimited\": list(),\n \"ReactomePathwaysRelation\": ['ReactomePathways']}\n return dependencies[alias]", "def conda_dependencies(self):\n raise NotImplementedError", "def getDependencyList(self):\n return self.getDocumentedObject().getDependencyList()", "def dependencies(self) -> List[Bundle]:\n return []", "def criteria(self) -> Optional[Sequence['outputs.MetadataDependenciesResponse']]:\n return pulumi.get(self, \"criteria\")" ]
[ "0.7249918", "0.71176404", "0.6591938", "0.6591409", "0.64736134", "0.62895435", "0.6217748", "0.614492", "0.6107794", "0.59998703", "0.5892432", "0.5852836", "0.58257455", "0.5819717", "0.57458514", "0.57398283", "0.57112324", "0.5689855", "0.56824005", "0.55657053", "0.5564035", "0.55571544", "0.5554542", "0.5548016", "0.5536628", "0.5527794", "0.5526307", "0.5498799", "0.5479501", "0.5464565" ]
0.7559669
0
Returns an action or list of actions that should be invoked as part of the setup process. Actions are generic command line statements defined in /Libraries/Python/CommonEnvironment/v1.0/CommonEnvironment/Shell/Commands/__init__.py that are converted into statements appropriate for the current scripting language (in most cases, this is Bash on Linux systems and Batch or PowerShell on Windows systems.
def GetCustomActions(debug, verbose, explicit_configurations): return []
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_available_actions() -> tuple:\n return tuple(method for method in dir(cli_commands) if callable(getattr(cli_commands, method)))", "def actions(self):\n from moztrap.view.lists.actions import actions\n return actions", "def _generate_actions(self) -> list:\n pass", "def get_available_actions(self):\n return self.actions", "def setup_commands(self):\n return self.get_data(\"setup_commands\")", "def get_actions(self):\n return []", "def _run_actions(self):\n\n if \"install-bento\" in self.actions:\n self._do_action_bento_setup()\n\n if \"create-tables\" in self.actions:\n self._do_action_tables_create()\n\n if \"import-ratings\" in self.actions:\n self._do_action_import_ratings()\n\n if \"import-user-info\" in self.actions:\n self._do_action_import_user_info()\n\n if \"import-movie-info\" in self.actions:\n self._do_action_import_movie_info()\n\n if \"train-item-item-cf\" in self.actions:\n self._do_action_train()\n\n if \"register-freshener\" in self.actions:\n self._do_action_register_freshener()", "def actions() -> None:\n pass", "def actions(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:\n return pulumi.get(self, \"actions\")", "def actions(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:\n return pulumi.get(self, \"actions\")", "def actions():\n pass", "def get_action_choices():\n from hardware.management.commands.gpio_buttons import Command\n import re\n pattern = re.compile(r'^on_(?P<name>\\w+)_press$')\n choices = []\n for member in dir(Command):\n match = pattern.match(member)\n if match:\n action = match.groupdict()['name']\n name = action.replace('_', ' ').title()\n choices.append((action, name))\n return choices", "def _get_actions(self):\n return self.__actions", "def _get_actions(self):\n return self.__actions", "def _get_actions(self):\n return self.__actions", "def _help_actions(self):\n actions_str = \"\"\n for (key, value) in self.actions_help.items():\n actions_str += \"command: %s\\n%s\\n\\n\" % (key, value)\n print(actions_str)\n sys.exit(0)", "def getActions():\n return getPlugins(IRenamingAction, plugins)", "def get_list_of_actions(self):\n return self.actions", "def actions(self):\n return self._action_list", "def actions(self):\n raise NotImplementedError", "def actions(self):\n\n return self._actions.getSlice(0)", "def actions(self):\n\n return self._actions.getSlice(0)", "def get_actions(self):\n\n if self.description == exceptions.NotAvailableError:\n raise exceptions.NotAvailableError('Can\\'t get actions because a description for this service is'\n ' not available.')\n return list(self.actions.values())", "def apply_sh(actions):\n return [ a if six.callable(a) else sh(a, log_command=False) for a in actions ]", "def list_actions() -> None:\n colorama_init()\n max_action_name_len = max(len(name) for name in KNOWN_ACTIONS.keys())\n wrapper = textwrap.TextWrapper(\n width=80 - max_action_name_len - 3,\n subsequent_indent=' ' * (max_action_name_len + 3),\n )\n print(\n '{bright}{name:<{max_action_name_len}} -{normal} {doc}'.format(\n bright=Style.BRIGHT,\n name='name',\n max_action_name_len=max_action_name_len,\n normal=Style.NORMAL,\n doc='description [(argument: type, ...)]',\n )\n )\n print('-' * 80)\n for name, action in KNOWN_ACTIONS.items():\n wrapped_doc = wrapper.fill(' '.join(str(action.__doc__).split()))\n print(\n '{bright}{name:<{max_action_name_len}} -{normal} {doc}'.format(\n bright=Style.BRIGHT,\n name=name,\n max_action_name_len=max_action_name_len,\n normal=Style.NORMAL,\n doc=wrapped_doc,\n )\n )\n return None", "def actions(self):\n self._actions = {}\n self._actions['getItems'] = ('FileCrawler', None)\n #self._actions['getContents'] = ('ParseContents', ('path'))\n return self._actions", "def get_action_command(self):\n if self.action.value == \"start\":\n self.action_command = self.ServerStartSubCommand()\n else:\n self.action_command = None", "def getActions(self, state): \n util.raiseNotDefined()", "def exec_actions(actions, watch_path, var_name_ext):\n var_name = var_name_ext.rsplit('.', 1)[0]\n current_dir = os.getcwd()\n os.chdir(watch_path)\n watch_path_absolute = os.getcwd()\n for action_type, action in actions:\n action_name = action.replace('$NAME_EXT', var_name_ext)\\\n .replace('$NAME', var_name)\\\n .replace('$CURRENT_DIR', current_dir)\\\n .replace('$WATCH_DIR_LAST', watch_path_absolute.split('/')[-1])\\\n .replace('$WATCH_DIR', watch_path_absolute)\n if action_type == 'SHELL_COMMAND':\n os.system(action_name)\n os.chdir(current_dir)", "def actions(self):\n return self._actions" ]
[ "0.6928276", "0.6423232", "0.64215183", "0.6394122", "0.6382282", "0.63595027", "0.62756604", "0.620738", "0.6174386", "0.6174386", "0.6173213", "0.61705583", "0.6168872", "0.6168872", "0.6168872", "0.6158207", "0.6088489", "0.60616827", "0.60305166", "0.5990831", "0.5982534", "0.5982534", "0.59741706", "0.5954827", "0.5884643", "0.5879944", "0.58322716", "0.58283424", "0.5823889", "0.58221686" ]
0.69521284
0
Returns a list of each transaction profiled along with the time spent.
def showTransactions(self): self.scanTransactions() txns = [] # Summarize the stats for x in range(len(self._trans)): stats = self._trans[x] trans_time = 0 remote_calls = 0 for name, stat in stats: trans_time += stat.total_tt remote_calls += 1 txns.append((x, trans_time, remote_calls)) results = ["TX#\tTime\tCalls", "=" * 22] for item in txns: results.append("%3d\t%4f\t%5d" % item) return "\n".join(results)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_profile_stats():\n return p_stats", "def profiles(self):\n with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:\n return list(filter(lambda x: x is not None, executor.map(self.profile_details, self.profiles_names())))", "def test_wallets_get_transaction_list(self):\n pass", "def transaction_data(self):\n return list(map(lambda transaction:transaction.to_json(), self.transaction_map.values()))", "def profiler(self):\r\n\r\n class Task(object):\r\n \"Private class to nicely wrap up the profile data\"\r\n def __init__(self, block, addr):\r\n self.block = block\r\n self.addr = addr\r\n self.name = None\r\n def tidy(self, sym):\r\n self.name = sym.varfind(self.addr).name\r\n self.CPU_FRAC = sym.constfind(\"$profiler.CPU_FRACTION_FIELD\").value\r\n def __repr__(self):\r\n if self.name is None:\r\n raise Exception(\"Need to call the tidy method before using\")\r\n return \"%-50s - %2.1f %%\" % (self.name, self.block[self.CPU_FRAC]/1000)\r\n\r\n\r\n # get the head of the list and a couple of constants\r\n head = self._core.sym.varfind(\"$profiler.last_addr\").addr\r\n NULL = self._core.sym.constfind(\"$profiler.LAST_ENTRY\").value\r\n SIZE = self._core.sym.constfind(\"$profiler.STRUC_SIZE\").value\r\n NEXT = self._core.sym.constfind(\"$profiler.NEXT_ADDR_FIELD\").value\r\n\r\n # get the first address\r\n curr = self._core.dm[head]\r\n\r\n # read all the structures off the chip as fast as we can\r\n tasks = []\r\n while curr != NULL:\r\n block = self._core.dm[curr:(curr+SIZE)]\r\n tasks.append(self.Task(block, curr))\r\n curr = block[NEXT]\r\n\r\n # now fill in the other bits\r\n for t in tasks:\r\n t.tidy(self._core.sym)\r\n\r\n # finally return\r\n return tasks", "def get_transactions(self):\n # open a cursor object\n cur = self.get_cursor()\n\n # get transactions from database\n cur.execute(\"SELECT * FROM transactions\")\n transactions_data = cur.fetchall()\n\n # convert into a dict of values.\n transactions_list = []\n [transactions_list.append({'transaction_id': transaction[0],\n 'date': transaction[1],\n 'payee_id': transaction[2],\n 'description': transaction[3],\n 'amount': transaction[4]})\n for transaction in transactions_data]\n\n # close the cursor\n self.close_cursor()\n\n return transactions_list", "def slow_transaction_data(self):\n\n # XXX This method no longer appears to be used. Being replaced\n # by the transaction_trace_data() method.\n\n if not self.__settings:\n return []\n\n if not self.__slow_transaction:\n return []\n\n maximum = self.__settings.agent_limits.transaction_traces_nodes\n\n transaction_trace = self.__slow_transaction.transaction_trace(\n self, maximum)\n\n data = [transaction_trace,\n list(self.__slow_transaction.string_table.values())]\n\n if self.__settings.debug.log_transaction_trace_payload:\n _logger.debug('Encoding slow transaction data where '\n 'payload=%r.', data)\n\n json_data = json_encode(data)\n\n level = self.__settings.agent_limits.data_compression_level\n level = level or zlib.Z_DEFAULT_COMPRESSION\n\n zlib_data = zlib.compress(six.b(json_data), level)\n\n pack_data = base64.standard_b64encode(zlib_data)\n\n if six.PY3:\n pack_data = pack_data.decode('Latin-1')\n\n root = transaction_trace.root\n\n trace_data = [[root.start_time,\n root.end_time - root.start_time,\n self.__slow_transaction.path,\n self.__slow_transaction.request_uri,\n pack_data]]\n\n return trace_data", "async def get_stats():\n ret = {}\n counter = 0\n counter_error = 0\n counter_completed = 0\n list_duration = list()\n for j in mngr.transcode_requests:\n counter = counter + 1\n task = mngr.transcode_requests[j]\n if task.state == State.ERROR:\n counter_error = counter_error + 1\n elif task.state == State.COMPLETED:\n counter_completed += 1\n list_duration.append(task.duration)\n\n if not counter_completed:\n percent = 0\n aver = 0\n else:\n percent = np.percentile(np.array(list_duration), np.array(95))\n aver = np.average(np.array(list_duration))\n\n ret[\"completed ratio\"]=float(counter_completed/counter)\n ret[\"duration 95th percentile\"] = percent\n ret[\"average\"] = aver\n\n return ret", "def timings(self):\r\n return self._timings", "def task4(self) ->list:\n user_readTimes = {}\n for entry in self.records:\n if(entry['event_type'] == 'pagereadtime'):\n if (entry['visitor_uuid'] in user_readTimes):\n user_readTimes[entry['visitor_uuid']] += entry['event_readtime']\n else:\n user_readTimes[entry['visitor_uuid']] = entry['event_readtime']\n readTimes = list(sorted(user_readTimes.items(), key=operator.itemgetter(1), reverse = True))[0:10]\n for times in readTimes:\n print(times)\n return readTimes", "def _compute_user_stats():\n user_stats = []\n \n wmt16_group = Group.objects.filter(name='WMT16')\n wmt16_users = _get_active_users_for_group(wmt16_group)\n \n for user in wmt16_users:\n _user_stats = HIT.compute_status_for_user(user)\n _name = user.username\n _avg_time = seconds_to_timedelta(_user_stats[1])\n _total_time = seconds_to_timedelta(_user_stats[2])\n _data = (_name, _user_stats[0], _avg_time, _total_time)\n \n if _data[0] > 0:\n user_stats.append(_data)\n \n # Sort by total number of completed HITs.\n user_stats.sort(key=lambda x: x[1])\n user_stats.reverse()\n \n return user_stats", "def get_staking_transaction_history(address, page=0, page_size=1000, include_full_tx=False, tx_type='ALL',\n order='ASC', endpoint=_default_endpoint, timeout=_default_timeout\n ) -> list:\n params = [\n {\n 'address': address,\n 'pageIndex': page,\n 'pageSize': page_size,\n 'fullTx': include_full_tx,\n 'txType': tx_type,\n 'order': order\n }\n ]\n # Using v2 API, because getStakingTransactionHistory not implemented in v1\n method = 'hmyv2_getStakingTransactionsHistory'\n stx_history = rpc_request(method, params=params, endpoint=endpoint, timeout=timeout)['result']\n try:\n return stx_history['staking_transactions']\n except KeyError as e:\n raise InvalidRPCReplyError(method, endpoint) from e", "def transaction_trace_data(self, connections):\n\n _logger.debug('Generating transaction trace data.')\n\n if not self.__settings:\n return []\n\n # Create a set 'traces' that is a union of slow transaction,\n # and Synthetics transactions. This ensures we don't send\n # duplicates of a transaction.\n\n traces = set()\n if self.__slow_transaction:\n traces.add(self.__slow_transaction)\n traces.update(self.__synthetics_transactions)\n\n # Return an empty list if no transactions were captured.\n\n if not traces:\n return []\n\n # We want to limit the number of explain plans we do across\n # these. So work out what were the slowest and tag them.\n # Later the explain plan will only be run on those which are\n # tagged.\n\n agent_limits = self.__settings.agent_limits\n explain_plan_limit = agent_limits.sql_explain_plans_per_harvest\n maximum_nodes = agent_limits.transaction_traces_nodes\n\n database_nodes = []\n\n if explain_plan_limit != 0:\n for trace in traces:\n for node in trace.slow_sql:\n # Make sure we clear any flag for explain plans on\n # the nodes in case a transaction trace was merged\n # in from previous harvest period.\n\n node.generate_explain_plan = False\n\n # Node should be excluded if not for an operation\n # that we can't do an explain plan on. Also should\n # not be one which would not be included in the\n # transaction trace because limit was reached.\n\n if (node.node_count < maximum_nodes and\n node.connect_params and node.statement.operation in\n node.statement.database.explain_stmts):\n database_nodes.append(node)\n\n database_nodes = sorted(database_nodes,\n key=lambda x: x.duration)[-explain_plan_limit:]\n\n for node in database_nodes:\n node.generate_explain_plan = True\n\n else:\n for trace in traces:\n for node in trace.slow_sql:\n node.generate_explain_plan = True\n database_nodes.append(node)\n\n # Now generate the transaction traces. We need to cap the\n # number of nodes capture to the specified limit.\n\n trace_data = []\n\n for trace in traces:\n transaction_trace = trace.transaction_trace(\n self, maximum_nodes, connections)\n\n data = [transaction_trace,\n list(trace.string_table.values())]\n\n if self.__settings.debug.log_transaction_trace_payload:\n _logger.debug('Encoding slow transaction data where '\n 'payload=%r.', data)\n\n json_data = json_encode(data)\n\n level = self.__settings.agent_limits.data_compression_level\n level = level or zlib.Z_DEFAULT_COMPRESSION\n\n zlib_data = zlib.compress(six.b(json_data), level)\n\n pack_data = base64.standard_b64encode(zlib_data)\n\n if six.PY3:\n pack_data = pack_data.decode('Latin-1')\n\n root = transaction_trace.root\n\n if trace.record_tt:\n force_persist = True\n else:\n force_persist = False\n\n if trace.include_transaction_trace_request_uri:\n request_uri = trace.request_uri\n else:\n request_uri = None\n\n trace_data.append([transaction_trace.start_time,\n root.end_time - root.start_time,\n trace.path,\n request_uri,\n pack_data,\n trace.guid,\n None,\n force_persist,\n None,\n trace.synthetics_resource_id, ])\n\n return trace_data", "def map_profile_info(profile):\n result = map(\n lambda p: {\n 'callcount': p.callcount,\n 'time': p.totaltime,\n 'name': p.code if isinstance(p.code, str) else p.code.co_name,\n 'file': None if isinstance(p.code, str) else p.code.co_filename},\n profile.getstats())\n return result", "def getTransferListSummary(self):\n p_ids_and_prices = {}\n players = self.getAllPlayerInfoTransferlist()\n\n # Get IDs of all players\n log_event(self.queue, \"Gathering player prices... \")\n for p in players:\n p_bidstatus = p[1]\n p_id = p[8]\n # removed Filter for unlisted / expired players\n if p_id not in p_ids_and_prices:\n p_sellprice = self.getPlayerSellPrice(p_id)\n # If sell price returns 0, need to fetch from Futbin\n if p_sellprice == 0:\n p_sellprice = self.getFutbinPrice_opentab(p_id)\n self.sleep_approx(5) # Delay iteration to not anger futbin\n # Add player ID and price to dict\n p_ids_and_prices[p_id] = p_sellprice\n\n for p_id in p_ids_and_prices:\n p_price = p_ids_and_prices[p_id]\n p_name = self.getPlayerCardName(p_id)\n log_event(self.queue, str(p_name) + \" - #\" +\n str(p_id) + \" Price \" + str(p_price))\n\n num_p_sold = 0\n num_p_expired = 0\n num_p_unlisted = 0\n num_p_listed = 0\n\n sold_p_value = 0\n expired_p_value = 0\n unlisted_p_value = 0\n listed_p_value = 0\n\n for p in players:\n p_bidstatus = p[1]\n p_id = p[8]\n p_soldprice = p[5] # is 0 if unlisted\n p_sellprice = int(p_ids_and_prices[p_id])\n\n if \"won\" in p_bidstatus:\n num_p_sold += 1\n sold_p_value += p_soldprice\n if \"expired\" in p_bidstatus:\n num_p_expired += 1\n expired_p_value += p_sellprice\n if (p_bidstatus == \"listFUTItem\"):\n num_p_unlisted += 1\n unlisted_p_value += p_sellprice\n if (p_bidstatus == \"listFUTItem has-auction-data\"):\n num_p_listed += 1\n listed_p_value += p_sellprice\n\n log_event(self.queue, \"Players sold: \" + str(num_p_sold))\n log_event(self.queue, \"Players expired: \" + str(num_p_expired))\n log_event(self.queue, \"Players listed: \" + str(num_p_listed))\n log_event(self.queue, \"Players unlisted: \" + str(num_p_unlisted))\n log_event(self.queue, \" - - - \")\n log_event(self.queue, \"Sold players value: \" + str(sold_p_value))\n log_event(self.queue, \"Expired players value: \" +\n str(expired_p_value))\n log_event(self.queue, \"Unlisted players value: \" +\n str(unlisted_p_value))\n log_event(self.queue, \"Listed players value: \" + str(listed_p_value))\n\n # TODO subtract bought price\n self.user_players_won += int(num_p_unlisted)\n self.p_ids_and_prices = p_ids_and_prices\n intel = [p_ids_and_prices, num_p_sold, num_p_expired, num_p_unlisted,\n num_p_listed, sold_p_value, expired_p_value, unlisted_p_value, listed_p_value]\n return intel", "def _profile_function(function, profiles, game):\n return [function(game, prof) for prof in profiles] # pragma: no cover", "def result_array(self) -> np.ndarray:\n return np.array([r[\"time\"] for r in self.profile_result])", "def get_account_transactions(self, StartTime, EndTime):\n params = clean_locals(locals())\n date_time_sent = datetime.datetime.utcnow()\n response = self.request('ListAccountPostings', params, secure=True)\n data = self.process_response(response, date_time_sent, None)\n return parse_account_postings(data.get('data', {})) if data.get('data') else {}", "def _parse_account_transactions(self, body):\n\n transactions = []\n\n soup = BeautifulSoup(body, 'html.parser')\n for row in soup.select('.history.data-list-wrapper-inner tr'):\n transaction = {\n 'date': row.select('td')[1].text,\n 'type': row.select('td')[2].select('span')[0].text,\n 'text': row.select('td')[2].select('div')[0].text,\n 'amount': self._fix_balance(row.select('td')[3].text)\n }\n transactions.append(transaction)\n\n return transactions", "def get_transactions():\n\n wallet = \"TTfoWGU2M939cgZm8CksPtz1ytJRM9GiN7\"\n\n url = \"https://api.trongrid.io/v1/accounts/{}/transactions\".format(wallet)\n\n response = requests.request(\"GET\", url)\n\n print(response.text)", "def get_transaction_data():\n data = parse_json()\n income_instances = create_transactions(data['incomes'])\n expense_instances = create_transactions(data['expenses'])\n for expense in expense_instances:\n expense.amount = -(expense.amount)\n transactions = income_instances + expense_instances\n return transactions", "def get_times(self):\n times = []\n for i in range(1, len(self.events)):\n times.append(self.events[i-1].elapsed_time(self.events[i]))\n return times", "def listunspent(self, minconf=1, maxconf=999999):\n return [TransactionInfo(**tx) for tx in\n self.proxy.listunspent(minconf, maxconf)]", "def transactions(self):\r\n return tx.AccountTransactions(self)", "def ordered_profiles(self):\n l = []\n self.ordered_profiles_helper(l)\n return l", "def transactions(self):\n return self._call_account_method(\n 'transactions'\n )", "def get_tx_history(account_id, total):\n query = iroha.query(\"GetTransactions\", account_id=account_id, page_size=total)\n ic.sign_query(query, user_private_key)\n response = net.send_query(query)\n data = MessageToDict(response)\n pprint(data, indent=2)", "def get_transaction_list(self,\n address: str,\n start_block: Optional[int] = None,\n end_block: Optional[int] = None) -> Tuple[Transaction, ...]:\n ...", "def sum_transactions(profile):\n total = 0\n try:\n transactions = profile['$properties']['$transactions']\n for t in transactions:\n total = total + t['$amount']\n except KeyError:\n pass\n return {'Revenue': total}", "def profile(x):\n return x" ]
[ "0.5761275", "0.5728272", "0.56845474", "0.56802225", "0.567081", "0.5665692", "0.5645627", "0.5556072", "0.5549113", "0.54833037", "0.5478709", "0.5462746", "0.54535574", "0.54297113", "0.5412375", "0.54095745", "0.5403506", "0.5382719", "0.537519", "0.53725266", "0.53631335", "0.53415024", "0.53255713", "0.5323948", "0.5323295", "0.5282732", "0.52778256", "0.5263226", "0.5248104", "0.5239888" ]
0.64055634
0
counts the number of infinities within a 1D matrix or list
def countInfinites(mat): isFinite = np.all(np.isfinite(mat)) if not isFinite: count = 0 indices = [] for i in range(0,len(mat)): if mat[i] in [-np.inf,np.inf]: count+=1 indices.append(i)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def numSpecial(self, mat: list[list[int]]) -> int:\n ans = 0\n col_cache = {}\n for row in mat:\n # print(row)\n ones = []\n for i, n in enumerate(row):\n if n == 1:\n ones.append(i)\n # print(ones)\n if len(ones) == 1:\n j = ones[0]\n cols = [row[j] for row in mat]\n s = col_cache.get(j, sum(cols))\n col_cache[j] = s\n if s == 1:\n ans += 1\n return ans", "def count_noninf(multilayer):\n out = 0\n for x in multilayer:\n out = out + 0 if numpy.isinf(x.thickness) else out + 1\n return out", "def nancnt_nb(a):\n out = np.empty(a.shape[1], dtype=np.float_)\n for col in range(a.shape[1]):\n out[col] = np.sum(~np.isnan(a[:, col]))\n return out", "def count_each_tag(mat):\n cnts = {}\n for vec in mat:\n if vec[-1] not in cnts:\n cnts[vec[-1]] = 0.0\n cnts[vec[-1]] += 1.0\n return cnts", "def counts(e, x):\n arr = np.asarray(arr)\n return len(np.where(arr == x)[0])", "def observed_species(counts):\n return (counts!=0).sum()", "def main():\n row, col, island = make_matrix()\n print(count_island(row, col, island))", "def count_pegs(self):\r\n count = 0\r\n\r\n for i in range(0, len(self.matrix)):\r\n for j in range(0, len(self.matrix[i])):\r\n if self.matrix[i][j] == \"1\":\r\n count += 1\r\n\r\n return count", "def flagser_contain(adjacency_matrix):\n N=adjacency_matrix.shape[0]\n row,col=convertCOO(adjacency_matrix,ret_data=False)\n return compute_cell_count(N, np.transpose(np.array( (row,col))))", "def count_NN(KL):\n zvals = (KL != 0).sum(1)\n return zvals", "def count_nonzero(a):\n return (np.count_nonzero(a))", "def nfactors(self):\n return self.L.nnz", "def totlen(inputlist):\n tot = 0.0\n for x in inputlist:\n if isinstance(x, matrix):\n tot += totlen(x.getitems())\n else:\n try:\n test = len(x)\n except:\n tot += 1.0\n else:\n tot += test\n return tot", "def num_linearly_ind_features(self, S, eps=1e-11):\n return len(S[S >= eps])", "def count_ones(self):\r\n count = 0\r\n for x in range(self.xspan):\r\n for y in range(self.yspan):\r\n if (self.cells[x][y] == 1):\r\n count = count + 1\r\n return count", "def count_element (input_list):\n counter = 0\n for dummy_i in input_list:\n counter +=1\n return counter", "def __calc_empirical_counts__(self):\n self.empirical_counts = np.zeros(self._features_vector_length, dtype=float)\n for feature, freq in self.features_dict.items():\n for index in feature:\n self.empirical_counts[index] += freq\n assert len(self.empirical_counts) == np.count_nonzero(self.empirical_counts), \"0 in empirical counts vector\"", "def detect_num_feats_in_image(img, **kwargs):\n # We dont need to find vectors at all here\n kwargs['only_count'] = True\n #kwargs['only_count'] = False\n #Valid keyword arguments are: + str(HESAFF_PARAM_DICT.keys())\n hesaff_ptr = _new_image_hesaff(img, **kwargs)\n if __DEBUG__:\n print('[hes] detect')\n # Get num detected\n nKpts = HESAFF_CLIB.detect(hesaff_ptr)\n HESAFF_CLIB.free_hesaff(hesaff_ptr)\n return nKpts", "def count_ge_one(array):\r\n return numpy.count_nonzero(array >= 1)", "def count_islands(matrix):\n visited = init_visited(matrix)\n num_islands = 0\n for i in range(len(matrix)):\n for j in range(len(matrix)):\n if matrix[i][j] and not visited[i][j]:\n check_neighbours(matrix, (i, j), visited)\n num_islands += 1\n # print(visited)\n return num_islands", "def count(x):\n return sum(np.asarray(x).astype(bool))", "def count_nonzero(tensor):\n raise NotImplementedError", "def count_reduce_nb(col, a, *args):\n return np.sum(~np.isnan(a))", "def count_island(row, col, island):\n count = 0\n for i in range(row):\n for j in range(col):\n count = count + floodfill(i, j, row, col, island)\n return count", "def n_ins(self):\n pass", "def _get_observation_count(self):\n observation_count = 0\n for sequence in self.seq_list:\n observation_count += sequence.shape[0] \n \n return observation_count", "def NNZ(self):\n return len(self.__IndList)", "def count_matrix(datasets, labels):\n \n fn = lambda fd, axis: fd.shape[0]\n return fn_matrix(datasets, fn, axes = None, label = labels)", "def test_counts(self):\n c = array([5,0,1,1,5,5])\n obs = counts(c)\n exp = array([1,2,0,0,0,3])\n self.assertEqual(obs, exp)\n d = array([2,2,1,0])\n obs = counts(d, obs)\n exp = array([2,3,2,0,0,3])\n self.assertEqual(obs, exp)", "def nsites(self) -> int:\n return len(self.A)" ]
[ "0.6742529", "0.6486157", "0.6432761", "0.63083744", "0.6287594", "0.6236425", "0.6225274", "0.6219248", "0.6169383", "0.61690223", "0.6164067", "0.61547816", "0.6121042", "0.6110688", "0.6110112", "0.60931724", "0.60797334", "0.60647225", "0.6044696", "0.6028637", "0.6010755", "0.59888077", "0.59780794", "0.59732264", "0.5963566", "0.59614885", "0.5958164", "0.59539294", "0.59389395", "0.59049624" ]
0.7966337
0
removes infinities from a matrix returns a matrix with the infinities replaced with the average of the matrix values
def removeInfinities(mat): isFinite = np.all(np.isfinite(mat)) if not isFinite: nrow, ncol = mat.shape matCopy = mat.copy() matReshaped = matCopy.reshape(-1) minVal = np.nanmin(matReshaped[matReshaped != -np.inf]) maxVal = max(matReshaped) #count infinities and get their indicies #countInfinites(matReshaped) #replace all infinities with nan for i in range(0,len(matReshaped)): if matReshaped[i] in [-np.inf, np.inf]: matReshaped[i] = minVal ''' #get average of matrix values and replace the nan with averages def averageVal(array): total = 0 count = 0 for elmt in array: if elmt != -90001: total+=elmt count+=1 return total/count average = averageVal(matReshaped) #replace nan with average for i in range(0,len(matReshaped)): if matReshaped[i] == -90001: matReshaped[i] = average ''' #catch any potential errors if np.any(np.isinf(matReshaped)): raise ValueError return matReshaped.reshape((nrow, ncol)) else: return mat
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def interpolation_matrix(m):\n return np.nanmean(m,axis=1)", "def matrix_mean(matrix):\n return sum(map(mean,matrix))", "def de_mean_matrix(a_matrix):\n nr, nc = shape(a_matrix)\n column_means, _ = scale(a_matrix)\n return make_matrix(nr, nc, lambda i, j: a_matrix[i][j] - column_means[j])", "def fmean(field):\n warnings.simplefilter(\"ignore\")\n return np.nanmean(field, axis=3, keepdims=True)", "def gaussian_elim(mat):\n up_mat = np.array(mat, dtype=float)\n n = up_mat.shape[0]\n for r in range(0,n-1):\n for rr in range(r+1, n):\n try:\n ratio = up_mat[rr][r] / up_mat[r][r]\n except ZeroDivisionError:\n print(\"zero\")\n continue\n for i in range(r,n):\n up_mat[rr][i] -= up_mat[r][i] * ratio\n return up_mat", "def fix_matrix_gauge(emat):\n # fix mean\n for j in range(emat.shape[1]):\n emat[:,j] = emat[:,j] -sp.mean(emat[:,j])\n # fix sum of variances equal to length of matrix\n svar = np.sum(np.var(emat,axis=0))\n emat = sp.sqrt(emat.shape[1])*emat/sp.sqrt(svar)\n return emat", "def de_mean_matrix(A):\n nr, nc = shape(A)\n column_means, _ = scale(A)\n return make_matrix(nr, nc, lambda i, j: A[i][j] - column_means[j])", "def normalize_matrix(matrix):\n\n nrows = matrix.shape[0]\n for col in xrange(matrix.shape[1]):\n tot = float(sum(matrix[:,col]))\n \n for row in xrange(nrows):\n try:\n matrix[row][col] = matrix[row][col]/tot\n except ZeroDivisionError:\n pass\n return matrix", "def replace_nan(data):\r\n lst_ind = np.array(['valence_intensity', 'anger_intensity',\r\n 'fear_intensity', 'sadness_intensity', 'joy_intensity'])\r\n for i in lst_ind:\r\n native = data[:][i]\r\n avg = np.nanmean(native)\r\n data[:][i] = np.where(np.isnan(native), avg, native)\r\n return data", "def nanmean_nb(a):\n out = np.empty(a.shape[1], dtype=np.float_)\n for col in range(a.shape[1]):\n out[col] = np.nanmean(a[:, col])\n return out", "def normalize_matrix(mat):\n return (mat + abs(mat.min())) / (mat.max() - mat.min())", "def mean(data_matrix):\n return np.asmatrix(np.mean(data_matrix, axis=0))", "def _subtract_row_mean(A):\n assert type(A) is csr_matrix, \"The given argument should be of type scipy.sparse.csr_matrix\"\n\n sum_rows = np.array(A.sum(axis=1).squeeze())[0]\n size_rows = np.diff(A.indptr)\n avg_rows = np.divide(sum_rows, size_rows, where=size_rows != 0)\n avg_diag_matrix = diags(avg_rows, 0)\n ones_matrix = A.copy()\n ones_matrix.data = np.ones_like(A.data)\n\n return A - avg_diag_matrix * ones_matrix", "def scipy_nanmean(x, axis=0):\n x, axis = _chk_asarray(x,axis)\n x = x.copy()\n Norig = x.shape[axis]\n factor = 1.0-np.sum(np.isnan(x),axis)*1.0/Norig\n\n x[np.isnan(x)] = 0\n return np.mean(x,axis)/factor", "def normalisation_l_inf(x):\n res = np.zeros(x.shape)\n for i in range(x.shape[0]):\n for j in range(x.shape[1]):\n res[i,j] = x[i,j]/(np.max(x[i,j])+1e-5)\n return(res)", "def mean_impute(self, column_val):\n mean = np.mean(column_val)\n column_val = column_val.fillna(mean)\n return column_val", "def normalize_row(input_matrix):\n\n # print(\"input:\", input_matrix)\n # print(\"row sum:\", row_sums)\n row_sums = np.nan_to_num(input_matrix).sum(axis=1, keepdims=True)\n # print(\"row sum:\", row_sums)\n\n #new_matrix = input_matrix / row_sums if np.isscalar(row_sums) else input_matrix / row_sums[:, np.newaxis]\n new_matrix = np.divide(input_matrix, row_sums)\n return np.nan_to_num(new_matrix)", "def normalize_mat(mat):\n # Gather matrix columns indices.\n gather_split_i = 0\n gather_avg_5_i = 1\n gather_avg_25_i = 2\n gather_avg_50_i = 3\n gather_volume_i = 4\n gather_dates_indicator_i = 5\n # Normalize prices. We want to keep relationship between prices\n # (eg. avg_5 > split) untouched, so we use single set of max and mean for\n # split and all averages.\n prices_indices = [\n gather_split_i, gather_avg_5_i, gather_avg_25_i, gather_avg_50_i\n ]\n mat[:, prices_indices] /= np.max(mat[:, prices_indices])\n mat[:, prices_indices] *= 2\n mat[:, prices_indices] -= np.mean(mat[:, prices_indices])\n # Normalize volume.\n mat[:, gather_volume_i] /= np.max(mat[:, gather_volume_i])\n mat[:, gather_volume_i] *= 2\n mat[:, gather_volume_i] -= np.mean(mat[:, gather_volume_i])\n # Subtract 1.0 from dates indicator multiplied by 2.0 as it is already in\n # range 0.0, 1.0 and we don't want characteristic values to vary between\n # matrices as it is data outside of one company scope.\n dates_indicator_mean = 1.0\n mat[:, gather_dates_indicator_i] *= 2\n mat[:, gather_dates_indicator_i] -= dates_indicator_mean\n return mat", "def StandardizeMatrix(mat):\n nObjs = len(mat)\n avgs = sum(mat,0)/float(nObjs)\n mat -= avgs\n devs =sqrt(sum(mat*mat,0)/(float(nObjs-1)))\n try:\n newMat = mat/devs\n except OverflowError:\n newMat = numpy.zeros(mat.shape,'d')\n for i in range(mat.shape[1]):\n if devs[i] != 0.0:\n newMat[:,i] = mat[:,i]/devs[i]\n return newMat", "def remove_mean(image):\n mean = [0.48462227599918, 0.45624044862054, 0.40588363755159]\n image = image.astype(np.float32)\n image = np.subtract(np.divide(image, 255.0), mean)\n return image", "def K_nanmean_infmean(tensor):\n notnan = K.cast((~tf.math.is_nan(tensor)) & (~tf.math.is_inf(tensor)), \"float32\")\n num_notnan = K.sum(K.flatten(notnan))\n\n nonan = K.cast(\n tf.where((~tf.math.is_nan(tensor)) & (~tf.math.is_inf(tensor)),\n tensor,\n tf.zeros_like(tensor)), \"float32\"\n )\n\n loss = K.sum(nonan) / num_notnan\n\n return loss#tf.where(~tf.math.is_inf(loss), loss, 0)", "def algo(GENE_VALUES_MATRIX):\n\n\tA = GENE_VALUES_MATRIX\n\n\tAA = np.zeros_like(A)\n\n\tI = np.argsort(A,axis=0)\n\n\tAA[I,np.arange(A.shape[1])] = np.mean(A[I,np.arange(A.shape[1])],axis=1)[:,np.newaxis]\n\n\treturn AA", "def demean_normalize(one_d_array):\n\n temp_arr = one_d_array - np.nanmean(one_d_array)\n\n return temp_arr/np.nanstd(temp_arr)", "def mntd(distmat):\r\n return masked_array(distmat, eye(distmat.shape[0])).min(0).mean()", "def safemean(xs):\n return np.nan if len(xs) == 0 else np.mean(xs)", "def normalize(input_matrix):\n\n row_sums = input_matrix.sum(axis=1)\n try:\n assert (np.count_nonzero(row_sums)==np.shape(row_sums)[0]) # no row should sum to zero\n except Exception:\n raise Exception(\"Error while normalizing. Row(s) sum to zero\")\n new_matrix = input_matrix / row_sums[:, np.newaxis]\n return new_matrix", "def forward_avg(array_in):\n return (array_in[:-1] + array_in[1:]) * 0.5", "def normalize_matrix(matrix, min_val, max_val):\n return (max_val - min_val) * (matrix - np.min(matrix)) / (np.max(matrix) - np.min(matrix)) + min_val", "def normalize_col(input_matrix):\n\n col_sums = np.nan_to_num(input_matrix).sum(axis=0, keepdims=True)\n\n #new_matrix = input_matrix / col_sums if np.isscalar(col_sums) else input_matrix / col_sums[np.newaxis, :]\n new_matrix = np.divide(input_matrix, col_sums)\n return np.nan_to_num(new_matrix)", "def StandardizeMatrix(mat):\n nObjs = len(mat)\n avgs = sum(mat, 0) / float(nObjs)\n mat -= avgs\n devs = math.sqrt(sum(mat * mat, 0) / (float(nObjs - 1)))\n try:\n newMat = mat / devs\n except OverflowError:\n newMat = numpy.zeros(mat.shape, 'd')\n for i in range(mat.shape[1]):\n if devs[i] != 0.0:\n newMat[:, i] = mat[:, i] / devs[i]\n return newMat" ]
[ "0.6844182", "0.64544636", "0.6360755", "0.61718273", "0.6136079", "0.6116416", "0.61018276", "0.6037287", "0.5991986", "0.5922164", "0.58949125", "0.58504534", "0.58174664", "0.5794598", "0.57463723", "0.5705612", "0.56955564", "0.5682768", "0.5673174", "0.5667653", "0.56648284", "0.5664376", "0.56275505", "0.55934644", "0.559187", "0.5591004", "0.55685467", "0.5567082", "0.55636305", "0.5561645" ]
0.7850853
0
Converts the matrix to the equivalent matrix of the unsigned 8 bit integer datatype Returns the equivalent uint8 matrix
def make8UC(mat): mat_256 = mat[:,:]# *255 mat_256.round() mat_8UC = np.uint8(mat_256) return mat_8UC
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def make8UC3(mat):\n mat_8UC = make8UC(mat)\n mat_8UC3 = np.stack((mat_8UC,)*3, axis = -1)\n \n return mat_8UC3", "def to_uint8(image):\n\tnp.clip(image, 0, 255, out=image)\n\treturn image.astype(np.uint8)", "def float32_to_uint8(inputs):\n return np.uint8(np.clip(np.round(inputs * 255), 0, 255))", "def bool_2_uint8(bool_arr):\n assert (bool_arr.dtype == 'bool'), \\\n 'improc.bool_2_uint8() only accepts boolean arrays.'\n return (255*bool_arr).astype('uint8')", "def matrix2bytes(matrix):\n return bytes(sum(matrix, []))", "def one_2_uint8(one_arr):\n assert (one_arr.dtype == 'float' and np.max(one_arr <= 1.0)), \\\n 'improc.one_2_uint8() only accepts floats arrays from 0 to 1.'\n return (255*one_arr).astype('uint8')", "def read_uint8(self):\n return self.read(BitTypes.UINT8.value)", "def rgb_matrix_to_image(matrix):\n matrix = np.clip(matrix, 0, 1)\n rgb_image = (matrix * 255).astype(\"uint8\")\n return rgb_image", "def to_uint8(f):\n from numpy import array, clip, uint8\n\n img = array(clip(f,0,255),uint8)\n return img", "def convert_image_np(inp):\n inp = inp.numpy().transpose((1, 2, 0))\n inp = (inp*255).astype(np.uint8)\n return inp", "def read_uint8(self):\n bytes = self.data[:1]\n value = struct.unpack('!B',bytes)[0]\n self.data = self.data[1:]\n return value", "def to_nibble_array(arr: ndarray) -> ndarray:\n arr = arr.ravel()\n return (arr[::2] + (arr[1::2] << 4)).astype(\"uint8\")", "def Read_uInt8s(self,Address,Register,Number=0):\n pass", "def as_matrix(self) -> types.Matrix:", "def uint8_t(n):\n return int(n).to_bytes(1, byteorder='little', signed=False)", "def read_U8(self, register):\n raise NotImplementedError", "def conv8bitToInt8(byte): \n\n if byte > 127:\n return (256-byte) * (-1)\n else:\n return byte", "def to_uint(tensor_0to1, target_type='uint8'):\n if isinstance(tensor_0to1, tf.Tensor):\n target_type = tf.as_dtype(target_type)\n tensor_0to1 = _clip_0to1_warn(tensor_0to1)\n tensor_uint = tf.cast(tensor_0to1 * target_type.max, target_type)\n else:\n tensor_0to1 = _clip_0to1_warn(tensor_0to1)\n tensor_uint = (np.iinfo(target_type).max * tensor_0to1).astype(\n target_type)\n return tensor_uint", "def unpack_uint8(data):\n value = unpack(DecodeUtils.UINT8_BYTE_FORMAT, data[:1])[0]\n return value, 1", "def asi8(self) -> None:\n # Always returns None for MultiIndex\n return None", "def _convert_matrix(m):\n\n return [m[0][0], m[0][1], m[0][2], m[0][3],\n m[2][0], m[2][1], m[2][2], m[2][3],\n -m[1][0], -m[1][1], -m[1][2], -m[1][3],\n m[3][0], m[3][1], m[3][2], m[3][3]]", "def transpose_2d_int8_tensor(tensor: onnx_proto.TensorProto):\n if not isinstance(tensor, onnx_proto.TensorProto):\n raise ValueError(\"Expected input type is an ONNX TensorProto but got %s\" % type(tensor))\n\n if len(tensor.dims) != 2 or tensor.data_type != onnx_proto.TensorProto.INT8:\n raise ValueError(\"Only INT8 2-D tensors can be transposed\")\n\n if tensor.raw_data:\n int32_data = numpy.reshape(numpy.frombuffer(tensor.raw_data, dtype=\"int8\"), tensor.dims)\n int32_transposed_data = numpy.transpose(int32_data, [1, 0])\n tensor.raw_data = int32_transposed_data.tobytes()\n\n else:\n raise ValueError(\"only raw buffer supported\")\n\n return tensor", "def test_to_from_matrix(self):\n # The equality is only guaranteed up to a sign\n converted = rowan.from_matrix(rowan.to_matrix(input1))\n self.assertTrue(\n np.all(\n np.logical_or(\n np.isclose(input1 - converted, 0),\n np.isclose(input1 + converted, 0),\n )\n )\n )", "def get_u(m):\n U = m.copy()\n for i in range(1, U.shape[0]):\n U[i, :i] = 0\n return U", "def test_round_trip_conversion(self):\n\n for num_qubits in range(1, 5):\n # Since we know the implementations don't depend on well-formed input data,\n # just generate a random d^2 x d^2 matrix as test data for all conversions.\n dim = 2**(2 * num_qubits)\n mat = np.random.rand(dim, dim)\n assert_allclose(mat, vec2mat(mat2vec(mat)))\n assert_allclose(liou2choi(choi2liou(mat)), mat)\n assert_allclose(choi2liou(liou2choi(mat)), mat)", "def rgb2yiq(imRGB):\n return np.dot(imRGB, np.array(MATRIX).T)", "def to_matrix(self):\n return self.to_operator().data", "def matrixRepresentation(self,decimals=8):\n temp = self.circuit.copy()\n temp.remove_final_measurements()\n \n simulator = Aer.get_backend('unitary_simulator')\n result = execute(temp, backend=simulator).result()\n unitary = result.get_unitary(decimals=decimals).tolist()\n for i in range(len(unitary)):\n for j in range(len(unitary[i])):\n if unitary[i][j]==0:\n unitary[i][j]=\"0\"\n else:\n string=str(unitary[i][j].real).replace(\".0\", \"\")\n string=\"\" if unitary[i][j].real==0 else string\n string+=self.numberFormat(unitary[i][j].imag,True)\n unitary[i][j]=string.lstrip(\"+\")\n return unitary", "def GetBits8(self):\n raw_data = self.GetBits(8)[0]\n arg = \"%c%c%c%c\" % (0,0, 0, raw_data[0])\n return struct.unpack(\">L\", arg)[0]", "def as_matrix(self, fore=1, back=0):\n return tuple(\n tuple(fore if _c else back for _c in _row)\n for _row in self._rows\n )" ]
[ "0.7031675", "0.6641266", "0.62361336", "0.6099381", "0.593577", "0.59259516", "0.58690447", "0.5846691", "0.5846227", "0.57782173", "0.5744485", "0.5635741", "0.5600039", "0.5564243", "0.55328864", "0.5520065", "0.5480853", "0.5471177", "0.54408866", "0.54239094", "0.54226804", "0.5407388", "0.5397264", "0.5378646", "0.5340886", "0.5335776", "0.5332118", "0.52768105", "0.5271386", "0.5241911" ]
0.7921637
0
Converts the matrix to the equivalent matrix of the unsigned 8 bit integer datatype with 3 channels Returns the equivalent uint8 matrix
def make8UC3(mat): mat_8UC = make8UC(mat) mat_8UC3 = np.stack((mat_8UC,)*3, axis = -1) return mat_8UC3
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def make8UC(mat):\n mat_256 = mat[:,:]# *255\n mat_256.round()\n mat_8UC = np.uint8(mat_256)\n \n return mat_8UC", "def float32_to_uint8(inputs):\n return np.uint8(np.clip(np.round(inputs * 255), 0, 255))", "def to_uint8(image):\n\tnp.clip(image, 0, 255, out=image)\n\treturn image.astype(np.uint8)", "def convert_image_np(inp):\n inp = inp.numpy().transpose((1, 2, 0))\n inp = (inp*255).astype(np.uint8)\n return inp", "def rgb_matrix_to_image(matrix):\n matrix = np.clip(matrix, 0, 1)\n rgb_image = (matrix * 255).astype(\"uint8\")\n return rgb_image", "def to_nibble_array(arr: ndarray) -> ndarray:\n arr = arr.ravel()\n return (arr[::2] + (arr[1::2] << 4)).astype(\"uint8\")", "def bool_2_uint8(bool_arr):\n assert (bool_arr.dtype == 'bool'), \\\n 'improc.bool_2_uint8() only accepts boolean arrays.'\n return (255*bool_arr).astype('uint8')", "def to_uint8(f):\n from numpy import array, clip, uint8\n\n img = array(clip(f,0,255),uint8)\n return img", "def one_2_uint8(one_arr):\n assert (one_arr.dtype == 'float' and np.max(one_arr <= 1.0)), \\\n 'improc.one_2_uint8() only accepts floats arrays from 0 to 1.'\n return (255*one_arr).astype('uint8')", "def transpose_2d_int8_tensor(tensor: onnx_proto.TensorProto):\n if not isinstance(tensor, onnx_proto.TensorProto):\n raise ValueError(\"Expected input type is an ONNX TensorProto but got %s\" % type(tensor))\n\n if len(tensor.dims) != 2 or tensor.data_type != onnx_proto.TensorProto.INT8:\n raise ValueError(\"Only INT8 2-D tensors can be transposed\")\n\n if tensor.raw_data:\n int32_data = numpy.reshape(numpy.frombuffer(tensor.raw_data, dtype=\"int8\"), tensor.dims)\n int32_transposed_data = numpy.transpose(int32_data, [1, 0])\n tensor.raw_data = int32_transposed_data.tobytes()\n\n else:\n raise ValueError(\"only raw buffer supported\")\n\n return tensor", "def matrix2bytes(matrix):\n return bytes(sum(matrix, []))", "def rgb2yiq(imRGB):\n return np.dot(imRGB, np.array(MATRIX).T)", "def test_quantize_conv_transpose_u8u8(self):\n\n np.random.seed(1)\n model_fp32_path = \"conv_transpose_fp32.onnx\"\n self.construct_model(model_fp32_path)\n data_reader = self.input_feeds(1, {\"input\": [1, 1, 7, 7]})\n\n self.static_quant_test_qdq(\n model_fp32_path,\n data_reader,\n activation_type=QuantType.QUInt8,\n weight_type=QuantType.QUInt8,\n )", "def _convert_matrix(m):\n\n return [m[0][0], m[0][1], m[0][2], m[0][3],\n m[2][0], m[2][1], m[2][2], m[2][3],\n -m[1][0], -m[1][1], -m[1][2], -m[1][3],\n m[3][0], m[3][1], m[3][2], m[3][3]]", "def image_to_matrix(image):\n\n\tpic = np.array([t[0] for t in image.getdata()]).reshape(image.size[1],image.size[0])\n\n\treturn pic", "def as_matrix(self) -> types.Matrix:", "def convert_images_from_uint8(images, drange=[-1,1], nhwc_to_nchw=False):\n if nhwc_to_nchw:\n imgs_roll=np.rollaxis(images, 3, 1)\n return imgs_roll/ 255 *(drange[1] - drange[0])+ drange[0]", "def form_triu_matrix(arr):\n n = int(np.ceil((np.sqrt(1 + 8 * len(arr)) - 1) * 0.5))\n M = np.zeros((n, n))\n c = 0\n for i in range(n):\n for j in range(n):\n if j >= i:\n if c < len(arr):\n M[i, j] = arr[c]\n c += 1\n else:\n break\n return M", "def read_uint8(self):\n bytes = self.data[:1]\n value = struct.unpack('!B',bytes)[0]\n self.data = self.data[1:]\n return value", "def rgb2yiq(imRGB):\n return np.dot(imRGB, TRANSFORM.T.copy())", "def convertToMatrix(path):\n\n\t\tmultitrack = Multitrack(path, beat_resolution=Converter.beat_resolution, name=os.path.basename(path))\n\n\t\t#Merge into 4 tracks\n\t\tmergedTracks = Converter._merge(multitrack)\n\t\t\n\t\t#merged.save(os.path.join(converter_path, midi_name + '.npz'))\n\t\tmergedTracks.binarize()\n\t\tret = mergedTracks.get_stacked_pianoroll()\n\t\treturn ret", "def yiq2rgb(imYIQ):\n return np.dot(imYIQ, np.linalg.inv(np.array(MATRIX).T))", "def uint8_to_float(im: np.array):\n if im.dtype == np.float32:\n warnings.warn(\"Image is already np.float32\")\n return im\n im = im.astype(np.float32) / 255\n return im", "def uint8_to_float(im: np.array):\n if im.dtype == np.float32:\n warnings.warn(\"Image is already np.float32\")\n return im\n im = im.astype(np.float32) / 255\n return im", "def read_uint8(self):\n return self.read(BitTypes.UINT8.value)", "def matrix_3d_to_4x4(matrix: np.matrix) -> np.matrix:\n return np.matrix([\n [matrix.item(0, 0), matrix.item(0, 1), matrix.item(0, 2), 0],\n [matrix.item(1, 0), matrix.item(1, 1), matrix.item(1, 2), 0],\n [matrix.item(2, 0), matrix.item(2, 1), matrix.item(2, 2), 0],\n [0, 0, 0, 1]])", "def matrix_2d_to_3d(matrix: np.matrix) -> np.matrix:\n return np.matrix([\n [matrix.item(0, 0), matrix.item(0, 1), 0, matrix.item(0, 2)],\n [matrix.item(1, 0), matrix.item(1, 1), 0, matrix.item(1, 2)],\n [0, 0, 1, 0],\n [matrix.item(2, 0), matrix.item(2, 1), 0, matrix.item(2, 2)]])", "def test_quantize_conv_transpose_s8s8(self):\n\n np.random.seed(1)\n model_fp32_path = \"conv_transpose_fp32.onnx\"\n self.construct_model(model_fp32_path)\n data_reader = self.input_feeds(1, {\"input\": [1, 1, 7, 7]})\n\n self.static_quant_test_qdq(\n model_fp32_path,\n data_reader,\n activation_type=QuantType.QInt8,\n weight_type=QuantType.QInt8,\n extra_options={\"ActivationSymmetric\": True},\n )", "def translation_from_matrix(matrix):\r\n return numpy.array(matrix, copy=False)[:3, 3].copy()", "def test_round_trip_conversion(self):\n\n for num_qubits in range(1, 5):\n # Since we know the implementations don't depend on well-formed input data,\n # just generate a random d^2 x d^2 matrix as test data for all conversions.\n dim = 2**(2 * num_qubits)\n mat = np.random.rand(dim, dim)\n assert_allclose(mat, vec2mat(mat2vec(mat)))\n assert_allclose(liou2choi(choi2liou(mat)), mat)\n assert_allclose(choi2liou(liou2choi(mat)), mat)" ]
[ "0.80124557", "0.6552259", "0.65122837", "0.6258835", "0.60967845", "0.60502964", "0.6043003", "0.59487987", "0.59082747", "0.5775716", "0.5764657", "0.5682428", "0.5681985", "0.56541044", "0.5578906", "0.5575298", "0.5475508", "0.5466485", "0.5463385", "0.5410142", "0.5387199", "0.5386014", "0.5366366", "0.5366366", "0.53650105", "0.53537387", "0.53145206", "0.5314464", "0.53116596", "0.53111" ]
0.78152
1
Do watershed segmentation on a non noisy binary image Returns the image with the nuclei segmented
def watershed(mask, img, plotImage = False, kernelSize = None): imgCopy = img.copy() maskCopy = np.array(mask.copy(), dtype=np.uint8) if kernelSize is None: kernelSize = 2 # Finding sure foreground area #dist_transform = cv2.distanceTransform(mask, cv2.DIST_L2, 5) #ret, sure_fg = cv2.threshold(dist_transform,0.3*dist_transform.max(),255,0) #change the second argument to change the sensitivity maskClosed = skimage.morphology.closing(np.array(maskCopy, dtype=np.uint8)) maskClosed = skimage.morphology.closing(np.array(maskClosed, dtype=np.uint8)) kernel = np.ones((kernelSize,kernelSize), np.uint8) # maskCopy = img_as_bool(maskCopy) sure_fg = cv2.erode(maskClosed, kernel, iterations = 2) ### sure_fg = skimage.morphology.closing(np.array(sure_fg, dtype=np.uint8)) # kernel = np.ones((2,2), np.uint8) # sure_fg = binary_closing(sure_fg, kernel) # sure background area #kernel = np.ones((5, 5), np.uint8) #sure_bg = cv2.dilate(mask, kernel, iterations = 1) sure_fg_bool = 1 - img_as_bool(sure_fg) # sure_bg = np.uint8(1 - morphology.medial_axis(sure_fg_bool)) ### sure_bg = np.uint8(1 - morphology.skeletonize(sure_fg_bool)) sure_bg[0, :] = 1 sure_bg[-1, :] = 1 sure_bg[:, 0] = 1 sure_bg[:, -1] = 1 # Finding unknown region sure_fg = np.uint8(sure_fg) unknown = cv2.subtract(sure_bg, sure_fg) if plotImage: plt.figure() plt.imshow(sure_fg) plt.title("Inner Marker") plt.figure() plt.imshow(sure_bg) plt.title("Outer Marker") plt.figure() plt.imshow(unknown) plt.title("Unknown") # Marker labelling ret, markers = cv2.connectedComponents(sure_fg) # Add one to all labels so that sure background is not 0, but 1 markers = markers+1 # Now, mark the region of unknown with zero markers[unknown==1] = 0 if plotImage: plt.figure() plt.imshow(markers, cmap='jet') plt.title("Markers") # Do watershed markers = cv2.watershed(imgCopy, markers) imgCopy[markers == -1] = [0, 255 ,0] if plotImage: plt.figure() plt.imshow(markers,cmap='jet') plt.title("Mask") plt.figure() plt.imshow(img) plt.title("Original Image") plt.figure() plt.imshow(imgCopy) plt.title("Marked Image") plt.show() return markers
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def watershed(img):\n\ttmp = img.copy()\n\tgray = cv2.cvtColor(tmp, cv2.COLOR_BGR2GRAY)\n\tret, thresh = cv2.threshold(gray, 0, 255, cv2.THRESH_BINARY_INV+cv2.THRESH_OTSU)\n\tkernel = np.ones((3,3), np.uint8)\n\topening = cv2.morphologyEx(thresh, cv2.MORPH_OPEN, kernel, iterations=2)\n\tsure_bg = cv2.dilate(opening, kernel, iterations=3)\n\tdist_transform = cv2.distanceTransform(opening, cv2.cv.CV_DIST_L2, 5)\n\tret, sure_fg = cv2.threshold(dist_transform, 0.7*dist_transform.max(), 255, 0)\n\tsure_fg = np.uint8(sure_fg)\n\tunknown = cv2.subtract(sure_bg, sure_fg)\n\tret, markers = cv2.connectedComponents(sure_fg) #IS THIS REALLY NOT IMPLEMENTED IN PYTHON?\n\tmarkers = markers+1\n\tmarkers[unknown==255] = 0\n\tmarkers = cv2.watershed(tmp, markers)\n\ttmp[markers == -1] = [255,0,0]\n\treturn tmp", "def watershed_segment(M,xM=None,yM=None):\n\n if xM != None and yM != None:\n sel = np.ones((int(ceil(23.9*xM)),int(ceil(23.9*yM)))) # for opening\n sel2 = np.ones((int(ceil(127.2*xM)),int(ceil(127.2*yM)))) # for local thresholding\n sel3 = np.ones((int(ceil(11.9*xM)),int(ceil(11.9*yM)))) # for erosion\n ma,mi =(44245.21*xM*yM),(316.037*xM*yM) \n else:\n selD = np.array([int(M.shape[0]*.012),int(M.shape[1]*.012)])\n selD = np.where(selD!=0,selD,1)\n \n sel2D = np.array([int(M.shape[0]*.12),int(M.shape[1]*.12)])\n sel2D = np.where(sel2D!=0,sel2D,1)\n\n sel3D = np.array([int(M.shape[0]*.01),int(M.shape[1]*.01)])\n sel3D = np.where(sel3D!=0,sel3D,1)\n\n\n sel = np.ones(selD) # for opening\n sel2 = np.ones(sel2D) # for local thresholding\n sel3 = np.ones(sel3D) # for erosion\n ma,mi = (M.shape[0]*M.shape[1]*.0075),(M.shape[0]*M.shape[1]*.0003)\n\n # get a few points in the center of each blob\n \n # threshold\n bw = ((M>=ndi.percentile_filter(M,80,footprint=sel2)))\n #& (M>=stats.scoreatpercentile(M.flatten(),80)))\n\n # open and erode\n blobs = snm.binary_opening(bw,structure=sel)\n blobs = snm.binary_erosion(blobs,structure=sel3,iterations=2)\n \n # label\n labels,_ = ndi.label(blobs)\n labels[labels > 0] += 1\n labels[0,0] = 1\n\n # rescale and cast to int16, then use watershed\n #M2 = rescaled(M,0,65000).astype(np.uint16)\n #newlabels = ndi.watershed_ift(M2,labels)\n newlabels = labels\n \n # get rid of groups unless they have the right number of pixels\n\n counts = np.bincount(newlabels.flatten())\n old2new = np.arange(len(counts)) \n old2new[(counts < int(mi)) | (counts > int(ma))] = 0\n newlabels = old2new[newlabels]\n\n return newlabels", "def segment_nuclei3D_5(instack, sigma1=3, sigma_dog_small=5, sigma_dog_big=40, seed_window=(70,100,100),\n erosion_length=5, dilation_length=10, sensitivity=0.5, size_min=1e4, \n size_max=5e5, circularity_min=0.5, display=False):\n\n\n def smart_dilate(stack, labelmask, sensitivity, dilation_length):\n \"\"\"\n Dilate nuclei, then apply a threshold to the newly-added pixels and\n only retains pixels that cross it. Change mask in place.\n \"\"\"\n # Get mean pixel values of foreground and background and define threshold.\n bg_mean = np.mean(stack[labelmask == 0])\n fg_mean = np.mean(stack[labelmask > 0])\n t = bg_mean + ((fg_mean - bg_mean) * sensitivity)\n # Dilate labelmask, return as new mask.\n labelmask_dilated = labelmask_apply_morphology(labelmask, \n mfunc=ndi.morphology.binary_dilation, \n struct=np.ones((1, dilation_length, dilation_length)), \n expand_size=(1, dilation_length + 1, dilation_length + 1))\n # Remove any pixels from dilated mask that are below threshhold.\n labelmask_dilated[stack < t] = 0\n # Add pixels matching nuc in dilated mask to old mask, pixels in old mask that are n\n # and 0 in dilated mask are kept at n. So dilation doesn't remove any nuclear pixels.\n for n in np.unique(labelmask)[1:]:\n if (n != 0):\n labelmask[labelmask_dilated == n] = n\n\n # Normalize each Z-slice to mean intensity to account for uneven illumination.\n stack = zstack_normalize_mean(instack)\n # Apply gaussian filter.\n stack_smooth = ndi.filters.gaussian_filter(stack, sigma=sigma1)\n # Threshold, make binary mask, fill.\n t = threshold_otsu(stack_smooth)\n mask = np.where(stack_smooth >= t, 1, 0)\n mask = imfill(mask, find_background_point(mask))\n # Use morphological erosion to remove spurious connections between objects.\n mask = ndi.morphology.binary_erosion(mask, structure=np.ones((1, erosion_length, erosion_length)))\n # Perform distance transform of mask.\n dist = ndi.distance_transform_edt(mask)\n # Find local maxima for watershed seeds.\n seeds, _ = peak_local_max_nD(dist, size=seed_window)\n # Add a background seed.\n seeds[find_background_point(mask)] = seeds.max() + 1\n # Re-smooth, do gradient transform to get substrate for watershedding.\n dog = dog_filter(stack, sigma_dog_small, sigma_dog_big)\n grad = gradient_nD(dog)\n # Remove nan from grad, replace with non-nan max values.\n grad[np.isnan(grad)] = grad[~np.isnan(grad)].max()\n # Segment by watershed algorithm.\n ws = watershed(grad, seeds.astype(int))\n # Filter nuclei for size and circularity.\n labelmask = labelmask_filter_objsize(ws, size_min, size_max)\n labelmask = filter_labelmask(labelmask, object_circularity, circularity_min, 1000)\n # Dilate labeled structures.\n smart_dilate(stack_smooth, labelmask, sensitivity, dilation_length)\n\n if (display):\n middle_slice = int(stack.shape[0] / 2)\n fig, ax = plt.subplots(3,2, figsize=(10,10))\n # Display mask.\n ax[0][0].imshow(mask.max(axis=0))\n ax[0][0].set_title('Initial Mask')\n # Display watershed seeds.\n seeds_vis = ndi.morphology.binary_dilation(seeds, structure=np.ones((1,8,8)))\n ax[0][1].imshow(stack_smooth.max(axis=0), alpha=0.5)\n ax[0][1].imshow(seeds_vis.max(axis=0), alpha=0.5)\n ax[0][1].set_title('Watershed seeds')\n # Display gradient.\n ax[1][0].imshow(grad[middle_slice])\n ax[1][0].set_title('Gradient')\n # Display watershed output.\n ax[1][1].imshow(ws.max(axis=0))\n ax[1][1].set_title('Watershed')\n # Display final mask.\n ax[2][0].imshow(labelmask.max(axis=0))\n ax[2][0].set_title('Final Segmentation')\n \n return labelmask", "def watershed(image, markers=None, connectivity=1, offset=None, mask=None,\n compactness=0, watershed_line=False):\n from ..segmentation import watershed as _watershed\n return _watershed(image, markers, connectivity, offset, mask,\n compactness, watershed_line)", "def opencv_watershed(masked, mask) -> JSON_TYPE:\n # For code and detailed explanation see:\n # http://datahacker.rs/007-opencv-projects-image-segmentation-with-watershed-algorithm/\n threshold: int = 30\n gray = cv2.cvtColor(masked, cv2.COLOR_RGB2GRAY)\n ret, thresh_img = cv2.threshold(gray, threshold, 255, cv2.THRESH_BINARY)\n # Noise removal\n kernel = np.ones((3), np.uint8)\n opening_img = cv2.morphologyEx(thresh_img, cv2.MORPH_OPEN, kernel, iterations=9)\n # Noise removal\n closing_img = cv2.morphologyEx(thresh_img, cv2.MORPH_CLOSE, kernel, iterations=4)\n dist_transform = cv2.distanceTransform(255 - closing_img, cv2.DIST_L2, 3)\n local_max_location = peak_local_max(dist_transform, min_distance=1, indices=True)\n\n n_increases: int = 0\n while local_max_location.shape[0] < 30 and n_increases < 15:\n threshold += 20\n ret, thresh_img = cv2.threshold(gray, threshold, 255, cv2.THRESH_BINARY)\n # Noise removal\n kernel = np.ones((3), np.uint8)\n opening_img = cv2.morphologyEx(thresh_img, cv2.MORPH_OPEN, kernel, iterations=9)\n # Noise removal\n closing_img = cv2.morphologyEx(thresh_img, cv2.MORPH_CLOSE, kernel, iterations=4)\n dist_transform = cv2.distanceTransform(255 - closing_img, cv2.DIST_L2, 3)\n local_max_location = peak_local_max(dist_transform, min_distance=1, indices=True)\n n_increases += 1\n # Reset threshold\n threshold = 30\n\n num_clusters: int = 30\n if n_increases >= 15:\n num_clusters = local_max_location.shape[0]\n kmeans = KMeans(n_clusters=num_clusters)\n # If local_max_location size is 0, return 0 predictions\n if not local_max_location.size:\n return {\n \"count\": 0\n }\n kmeans.fit(local_max_location)\n local_max_location = kmeans.cluster_centers_.copy()\n # Kmeans is returning a float data type so we need to convert it to an int. \n local_max_location = local_max_location.astype(int)\n dist_transform_copy = dist_transform.copy()\n for i in range(local_max_location.shape[0]):\n cv2.circle(dist_transform_copy, (local_max_location[i][1], local_max_location[i][0]), 5, 255)\n # markers = np.zeros_like(dist_transform)\n ret, sure = cv2.threshold(dist_transform, 0.01*dist_transform.max(), 255, 0)\n sure = np.uint8(sure)\n ret, markers = cv2.connectedComponents(sure)\n labels = np.arange(kmeans.n_clusters)\n markers[local_max_location[:,0], local_max_location[:,1]] = labels + 1\n # Convert all local markers to an integer. This because cluster centers will be float numbers. \n markers = markers.astype(int)\n markers_copy = markers.copy()\n index_non_zero_markers = np.argwhere(markers != 0)\n markers_copy = markers_copy.astype(np.uint8)\n font = cv2.FONT_HERSHEY_SIMPLEX\n for i in range(index_non_zero_markers.shape[0]):\n string_text = str(markers[index_non_zero_markers[i][0], index_non_zero_markers[i][1]])\n cv2.putText(markers_copy, string_text, (index_non_zero_markers[i][1], index_non_zero_markers[i][0]), font, 1, 255)\n markers = markers.astype(np.int32)\n segmented = cv2.watershed(masked, markers)\n count_segments(markers)\n #return {\n # \"count\": local_max_location.shape[0]\n #}\n return {\n \"count\": count_segments(markers),\n }", "def segment_nuclei3D_monolayer(stack, sigma1=3, sigma_dog_big=15, \n sigma_dog_small=5, seed_window=(30,30), min_seed_dist=25, \n dilation_length=5, size_min=0, size_max=np.inf, display=False):\n # Make max projection on Z.\n maxp = stack.max(axis=0)\n # Filter with DoG to make nuclei into blobs.\n dog = dog_filter(maxp, sigma_dog_small, sigma_dog_big)\n # Get threshold, use thresh to make initial mask and fill holes.\n t = threshold_otsu(dog)\n mask = np.where(dog > t, 1, 0)\n mask = imfill(mask)\n # Perform distance transform, find local maxima for watershed seeds.\n dist = ndi.distance_transform_edt(mask)\n seeds, _ = peak_local_max_nD(dist, size=seed_window, min_dist=min_seed_dist)\n # Smooth image and take gradient, use as input for watershed.\n im_smooth = ndi.filters.gaussian_filter(maxp, sigma=sigma1)\n grad = gradient_nD(im_smooth)\n ws = watershed(grad, seeds.astype(int))\n # Filter object size, relabel to set background to 0.\n labelmask = labelmask_filter_objsize(ws, size_min, size_max)\n labelmask = relabel_labelmask(labelmask)\n # Dilate segmented nuclei.\n labelmask = labelmask_apply_morphology(labelmask, \n mfunc=ndi.morphology.binary_dilation, \n struct=np.ones((dilation_length, dilation_length)), \n expand_size=(dilation_length + 1, dilation_length + 1))\n\n if (display):\n fig, ax = plt.subplots(3,2, figsize=(10,10))\n # Display mask.\n ax[0][0].imshow(mask)\n ax[0][0].set_title('Initial Mask')\n # Display watershed seeds.\n seeds_vis = ndi.morphology.binary_dilation(seeds, structure=np.ones((8,8)))\n ax[0][1].imshow(im_smooth, alpha=0.5)\n ax[0][1].imshow(seeds_vis, alpha=0.5)\n ax[0][1].set_title('Watershed seeds')\n # Display gradient.\n ax[1][0].imshow(grad)\n ax[1][0].set_title('Gradient')\n # Display watershed output.\n ws = relabel_labelmask(ws)\n ax[1][1].imshow(ws.astype('bool'))\n ax[1][1].set_title('Watershed')\n # Display final mask.\n ax[2][0].imshow(labelmask.astype('bool'))\n ax[2][0].set_title('Final Segmentation')\n \n # Make 2D labelmask into 3D mask by repeating.\n labelmask = np.repeat([labelmask], stack.shape[0], axis=0)\n return labelmask", "def create_binary(image):\n #Channel 1 of the output image highlights the area consisting of the nuclei\n channel1=image[:,:,0]\n \n # Channel 2 of the output image consists of the boundaries between adjoining nuclei\n channel2=image[:,:,1]\n _,channel1=cv2.threshold(channel1, 127,255,cv2.THRESH_BINARY) \n _,channel2=cv2.threshold(channel2, 127,255,cv2.THRESH_BINARY) \n \n #Subtracting channel 2 from channel 1 to get the desired output\n img1=channel1-channel2\n \n return img1", "def segment(self):\n\n #Run the marker selection GUI\n self.ps.startGUI()\n self.numSegments = self.ps.numSegments\n markerPoints = self.ps.result\n if(markerPoints == 0):\n print(\"No markers, exiting watershed...\")\n return False\n\n markers = np.zeros(self.imgShape, dtype = np.uint8)\n \n #Format the markers to matrix\n for i in range(0,len(markerPoints)):\n for j in range(0,len(markerPoints[i])):\n x = markerPoints[i][j][0]\n y = markerPoints[i][j][1]\n\n markers[x,y] = (i+1)\n\n watershed = markers.copy().astype(np.int32)\n self.segmentedImg = cv2.watershed(self.img,watershed)\n return self.segmentedImg", "def segmentation_bin(image_gray, th):\n try:\n if len(image_gray.shape) > 2:\n print('[ERROR]: Dimension > 2. Is an image gray?')\n return None \n \n ret, image_bin = cv2.threshold(image_gray, th, 255, cv2.THRESH_BINARY_INV)\n \n return image_bin\n except:\n print('[ERROR]: could not segmentation image')\n return None", "def segment(self, sg=NucleiSegmenter()):\n # mask_path = self.name.replace('w1', 'w3').replace('561', '405')\n # cell_mask = io.imread(mask_path)\n # self.mask = numpy.swapaxes(cell_mask, 0, 2)\n with warnings.catch_warnings():\n warnings.simplefilter('ignore')\n if self._verbose > 0:\n print('Segmenting...', end='', flush=True)\n self.nucleis = sg.method(self.image_raw)\n if self._verbose > 0:\n print('%i nucleis found.' % (numpy.unique(self.nucleis).shape[0] - 1))", "def watershed(self, debug=False):\n kernel = cv2.getStructuringElement(cv2.MORPH_CROSS, (3, 3))\n opening = cv2.morphologyEx(self.th[:, :, 0], cv2.MORPH_OPEN, kernel, iterations=2)\n sure_bg = cv2.dilate(self.th[:, :, 0], kernel, iterations=3)\n dist_transform = cv2.distanceTransform(opening, cv2.DIST_L2, 5)\n ret, sure_fg = cv2.threshold(dist_transform, 0.1 * dist_transform.max(), 255, 0)\n sure_fg = np.uint8(sure_fg)\n unknown = cv2.subtract(sure_bg, sure_fg)\n ret, markers = cv2.connectedComponents(sure_fg)\n markers += 1\n markers[unknown == 255] = 0\n markers = cv2.watershed(self.img, markers)\n self.add_color(markers)\n if debug:\n cv2.imshow(\"fg\", unknown)\n cv2.imshow(\"op\", opening)\n cv2.imshow(\"o3\", sure_bg)", "def watershed_segment_2(M,click_coords):\n \n # todo: choose these structures based on aspect ratio of M and input parameters\n sel = np.ones((4,10)) # for opening\n sel2 = np.ones((15,75)) # for local thresholding\n sel3 = np.ones((2,5)) # for erosion\n # get a few points in the center of each blob\n \n # threshold\n #bw = ((M>=ndi.percentile_filter(M,80,footprint=sel2)) & (M>=scoreatpercentile(M.flatten(),60)))\n \n score = stats.percentileofscore(M.flatten(),M[int(click_coords[0][1]),int(click_coords[0][0])])\n bw = (M>=stats.scoreatpercentile(M.flatten(),score))\n\n # open and erode\n #bools = sp.zeros((M.shape[0],M.shape[1]),int)\n #bools[int(click_coords[0]),int(click_coords[1])] = 1\n #blobs = sp.where(bools == 1,True,False)\n blobs = snm.binary_opening(bw,structure=sel)\n blobs = snm.binary_dilation(blobs,iterations=3)\n blobs = snm.binary_erosion(blobs,structure=sel3)\n \n \n # label\n labels,_ = ndi.label(blobs)\n labels[labels > 0] += 1\n #labels[0,0] = 1\n\n # rescale and cast to int16, then use watershed\n M2 = rescaled(M,0,65000).astype(np.uint16)\n newlabels = ndi.watershed_ift(M2,labels)\n \n # get rid of groups unless they have the right number of pixels\n counts = np.bincount(newlabels.flatten())\n old2new = np.arange(len(counts))\n old2new[(counts < 100) | (counts > 600)] = 0\n newlabels = old2new[newlabels]\n \n return newlabels", "def preprocessing(image, smooth_size, folder):\n from skimage.restoration import denoise_tv_chambolle\n \n dim = int(image.shape[0] / 50.)\n smoothed = rank.median(image, disk(smooth_size))\n #smoothed = denoise_tv_chambolle(image, weight=0.002)\n smoothed = rank.enhance_contrast(smoothed, disk(smooth_size))\n \n pl.subplot(2, 3, 1)\n pl.title(\"after median\")\n pl.imshow(smoothed)\n pl.gray()\n # If after smoothing the \"dot\" disappears\n # use the image value\n \n # TODO: wat do with thresh?\n try:\n im_max = smoothed.max()\n thresh = threshold_otsu(image)\n except:\n im_max = image.max()\n thresh = threshold_otsu(image)\n\n \n if im_max < thresh:\n labeled = np.zeros(smoothed.shape, dtype=np.int32)\n \n else:\n binary = smoothed > thresh\n \n # TODO: this array size is the fault of errors\n bin_open = binary_opening(binary, np.ones((dim, dim)), iterations=5)\n bin_close = binary_closing(bin_open, np.ones((5,5)), iterations=5)\n \n pl.subplot(2, 3, 2)\n pl.title(\"threshold\")\n pl.imshow(binary, interpolation='nearest')\n pl.subplot(2, 3, 3)\n pl.title(\"opening\")\n pl.imshow(bin_open, interpolation='nearest')\n pl.subplot(2, 3, 4)\n pl.title(\"closing\")\n pl.imshow(bin_close, interpolation='nearest')\n \n distance = ndimage.distance_transform_edt(bin_open)\n local_maxi = peak_local_max(distance,\n indices=False, labels=bin_open)\n \n markers = ndimage.label(local_maxi)[0]\n \n labeled = watershed(-distance, markers, mask=bin_open)\n pl.subplot(2, 3, 5)\n pl.title(\"label\")\n pl.imshow(labeled)\n #pl.show()\n pl.savefig(folder)\n pl.close('all')\n\n #misc.imsave(folder, labeled)\n# labels_rw = random_walker(bin_close, markers, mode='cg_mg')\n# \n# pl.imshow(labels_rw, interpolation='nearest')\n# pl.show()\n\n return labeled", "def segment_nuclei3D_monolayer_rpb1(stack, sigma1=3, sigma_dog_big=15, \n sigma_dog_small=5, seed_window=(30,30), min_seed_dist=25, \n dilation_length=5, dilation_length_foci=10, size_min=0, \n circularity_min=0, size_max=np.inf, display=False):\n # Make max projection on Z.\n maxp = stack.max(axis=0)\n # Filter with DoG to make nuclei into blobs.\n dog = dog_filter(maxp, sigma_dog_small, sigma_dog_big)\n # Get threshold, use thresh to make initial mask and fill holes.\n t = threshold_otsu(dog)\n mask = np.where(dog > t, 1, 0)\n mask = imfill(mask)\n # Perform distance transform, find local maxima for watershed seeds.\n dist = ndi.distance_transform_edt(mask)\n seeds, _ = peak_local_max_nD(dist, size=seed_window, min_dist=min_seed_dist)\n # Smooth image and take gradient, use as input for watershed.\n im_smooth = ndi.filters.gaussian_filter(maxp, sigma=sigma1)\n grad = gradient_nD(im_smooth)\n # Make second mask of pol2 foci (presumed HLBs) by re-thresholding within nuclei.\n t_foci = threshold_otsu(im_smooth[mask.astype('bool')])\n mask_foci = np.where(im_smooth > t_foci, True, False)\n mask_foci = ndi.morphology.binary_dilation(mask_foci, structure=np.ones((dilation_length_foci, dilation_length_foci)))\n # Mask out pol2 foci in gradient.\n grad = np.where(mask_foci, 0, grad)\n # Perform watershed segmentation.\n ws = watershed(grad, seeds.astype(int))\n # Filter object size and circularity, relabel to set background to 0.\n labelmask = labelmask_filter_objsize(ws, size_min, size_max)\n # Note: object_circularity works on 3D labelmasks, requiring adding (expand_dims) and removing (squeeze) a dimension.\n labelmask = np.squeeze(filter_labelmask(np.expand_dims(labelmask, axis=0), object_circularity, circularity_min, 1000))\n labelmask = relabel_labelmask(labelmask)\n # Dilate segmented nuclei.\n labelmask = labelmask_apply_morphology(labelmask, \n mfunc=ndi.morphology.binary_dilation, \n struct=np.ones((dilation_length, dilation_length)), \n expand_size=(dilation_length + 1, dilation_length + 1))\n\n if (display):\n fig, ax = plt.subplots(3,2, figsize=(10,10))\n # Display mask.\n ax[0][0].imshow(mask)\n ax[0][0].set_title('Initial Mask')\n # Display watershed seeds.\n seeds_vis = ndi.morphology.binary_dilation(seeds, structure=np.ones((8,8)))\n ax[0][1].imshow(im_smooth, alpha=0.5)\n ax[0][1].imshow(seeds_vis, alpha=0.5)\n ax[0][1].set_title('Watershed seeds')\n # Display gradient.\n ax[1][0].imshow(grad)\n ax[1][0].set_title('Gradient')\n # Display watershed output.\n ws = relabel_labelmask(ws)\n ax[1][1].imshow(ws.astype('bool'))\n ax[1][1].set_title('Watershed')\n # Display final mask.\n ax[2][0].imshow(labelmask.astype('bool'))\n ax[2][0].set_title('Final Segmentation')\n \n # Make 2D labelmask into 3D mask by repeating.\n labelmask = np.repeat([labelmask], stack.shape[0], axis=0)\n return labelmask", "def segmentation_grey(self, image, k = 2):\n\n return image", "def segment(self, sg=CytoSegmenter()):\n # mask_path = self.name.replace('w1', 'w3').replace('561', '405')\n # cell_mask = io.imread(mask_path)\n # self.mask = numpy.swapaxes(cell_mask, 0, 2)\n with warnings.catch_warnings():\n warnings.simplefilter('ignore')\n if self._verbose > 0:\n print('Segmenting...', end='', flush=True)\n self.cells = sg.method(self.image_raw, self.nuclei_image)\n if self._verbose > 0:\n print('%i cells found.' % (numpy.unique(self.cells).shape[0] - 1))", "def segment_region_of_interest(image):\n binary = image < 604\n cleared = clear_border(binary)\n\n label_image = label(cleared)\n\n areas = [r.area for r in regionprops(label_image)]\n areas.sort()\n if len(areas) > 2:\n for region in regionprops(label_image):\n if region.area < areas[-2]:\n for coordinates in region.coords:\n label_image[coordinates[0], coordinates[1]] = 0\n\n binary = label_image > 0\n\n selem = disk(2)\n binary = binary_erosion(binary, selem)\n\n selem = disk(10)\n binary = binary_closing(binary, selem)\n\n edges = roberts(binary)\n binary = scipy.ndimage.binary_fill_holes(edges)\n\n get_high_vals = binary == 0\n image[get_high_vals] = 0\n\n return image", "def filter_fusion(luma_bin, sat_bin, grad_bin, mentor_bin):\n binary = np.zeros_like(luma_bin)\n binary[ (((grad_bin==1) | (sat_bin==1)) & (luma_bin==1)) | (mentor_bin==1) ] = 1\n\n # Erosion and dilation - Seems doesn't work. Mask-off\n #kernel = np.ones((5,5))\n #binary_dilation = cv2.dilate(binary, kernel, iterations=1)\n #binary_erosion = cv2.erode(binary_dilation, kernel, iterations=1)\n #binary = binary_erosion\n\n return binary", "def make_lungmask(img, display=False):\n row_size= img.shape[0]\n col_size = img.shape[1]\n \n mean = np.mean(img)\n std = np.std(img)\n img = img-mean\n img = img/std\n\n # uses hounsfield values near lungs to normalize images\n\n middle = img[int(col_size/5):int(col_size/5*4),int(row_size/5):int(row_size/5*4)] \n mean = np.mean(middle) \n max = np.max(img)\n min = np.min(img)\n img[img==max]=mean\n img[img==min]=mean\n \n # uses kmeans to separate foreground (soft tissue / bone) and background (lung/air)\n\n kmeans = KMeans(n_clusters=2).fit(np.reshape(middle,[np.prod(middle.shape),1]))\n centers = sorted(kmeans.cluster_centers_.flatten())\n threshold = np.mean(centers)\n thresh_img = np.where(img<threshold,1.0,0.0)\n\n # performs erosion and dilation\n\n eroded = morphology.erosion(thresh_img,np.ones([3,3]))\n dilation = morphology.dilation(eroded,np.ones([8,8]))\n\n labels = measure.label(dilation) # Different labels are displayed in different colors\n label_vals = np.unique(labels)\n regions = measure.regionprops(labels)\n good_labels = []\n for prop in regions:\n B = prop.bbox\n if B[2]-B[0]<row_size/10*9 and B[3]-B[1]<col_size/10*9 and B[0]>row_size/5 and B[2]<col_size/5*4:\n good_labels.append(prop.label)\n mask = np.ndarray([row_size,col_size],dtype=np.int8)\n mask[:] = 0\n\n # makes mask\n\n for N in good_labels:\n mask = mask + np.where(labels==N,1,0)\n mask = morphology.dilation(mask,np.ones([10,10])) # one last dilation\n final = mask * img\n \n # shows and saves output\n\n plt.imshow(final)\n im = Image.fromarray(final*128)\n im = im.convert(\"L\")\n im.save(S)\n \n return", "def textDetectWatershed(thresh, original):\n # According to: http://docs.opencv.org/trunk/d3/db4/tutorial_py_watershed.html\n img = resize(original, 3000)\n thresh = resize(thresh, 3000)\n # noise removal\n kernel = np.ones((3,3),np.uint8)\n opening = cv2.morphologyEx(thresh,cv2.MORPH_OPEN,kernel, iterations = 3)\n \n # sure background area\n sure_bg = cv2.dilate(opening,kernel,iterations=3)\n\n # Finding sure foreground area\n dist_transform = cv2.distanceTransform(opening,cv2.DIST_L2,5)\n ret, sure_fg = cv2.threshold(dist_transform,0.01*dist_transform.max(),255,0)\n\n # Finding unknown region\n sure_fg = np.uint8(sure_fg)\n unknown = cv2.subtract(sure_bg,sure_fg)\n \n # Marker labelling\n ret, markers = cv2.connectedComponents(sure_fg)\n\n # Add one to all labels so that sure background is not 0, but 1\n markers += 1\n\n # Now, mark the region of unknown with zero\n markers[unknown == 255] = 0\n \n markers = cv2.watershed(img, markers)\n implt(markers, t='Markers')\n image = img.copy()\n gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)\n \n # Creating result array\n boxes = []\n for mark in np.unique(markers):\n # mark == 0 --> background\n if mark == 0:\n continue\n\n # Draw it on mask and detect biggest contour\n mask = np.zeros(gray.shape, dtype=\"uint8\")\n mask[markers == mark] = 255\n\n cnts = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)[-2]\n c = max(cnts, key=cv2.contourArea)\n \n # Draw a bounding rectangle if it contains text\n x,y,w,h = cv2.boundingRect(c)\n cv2.drawContours(mask, c, 0, (255, 255, 255), cv2.FILLED)\n maskROI = mask[y:y+h, x:x+w]\n # Ratio of white pixels to area of bounding rectangle\n r = cv2.countNonZero(maskROI) / (w * h)\n \n # Limits for text\n if r > 0.1 and 2000 > w > 15 and 1500 > h > 15:\n boxes += [[x, y, w, h]]\n \n # Group intersecting rectangles\n boxes = group_rectangles(boxes)\n bounding_boxes = np.array([0,0,0,0])\n for (x, y, w, h) in boxes:\n cv2.rectangle(image, (x, y),(x+w,y+h), (0, 255, 0), 8)\n bounding_boxes = np.vstack((bounding_boxes, np.array([x, y, x+w, y+h])))\n \n implt(image)\n\n # Recalculate coordinates to original size\n boxes = bounding_boxes.dot(ratio(original, img.shape[0])).astype(np.int64)\n return boxes[1:]", "def ColorSegmentation(image, kernel_sigma, color_seg, sim_threshold): \n \n color_seg = np.array(color_seg) / 255;\n \n if kernel_sigma >= 1:\n for cha_no in range(image.shape[2]):\n image[:, :, cha_no] = Denoising(image[:, :, cha_no], kernel_sigma);\n \n image = image / 255;\n mask = np.zeros((image.shape[0], image.shape[1]), dtype = bool);\n \n similarity = np.exp(-np.sum((image - color_seg) ** 2, axis = 2));\n mask[similarity > sim_threshold] = 1;\n\n return mask;", "def tz_ws(img,markers,Bc):\n \n D2 = False\n if img.ndim == 2:\n temp_H,temp_W = img.shape\n img = img.reshape(1,temp_H,temp_W)\n markers = markers.reshape(1,temp_H,temp_W)\n D2 = True\n off = se2off(Bc) #offsets\n \n if off.shape[1] == 2:\n off = np.concatenate((np.zeros((off.shape[0],1), dtype = np.int32),off), axis = 1)\n\n \tftype = img.dtype\n\tmtype = markers.dtype\n\tif (mtype != np.int32):\n\t\twarnings.warn(\"markers should be int32, forcing conversion\")\n\t\tmarkers = markers.astype(np.int32)\n\tif (ftype == np.uint8):\n\t\timg = img.astype(np.uint16)\t\n\n seg = markers.copy()\n ii32 = np.iinfo(np.int32).max # infinity\n P = np.empty(img.shape, dtype = np.int32)\n P[:] = -1\n C1 = np.empty(img.shape, dtype = np.int32)\n C1[:] = ii32\n C2 = np.zeros(img.shape, dtype = np.int32)\n done =np.zeros(img.shape, dtype = np.int32)\n watershed_c.tz_ws_c(ii32,off,img,seg,C1,C2,done,P)\n\n if D2:\n L,M,N = seg.shape\n seg = seg.reshape(M,N)\n return seg", "def segmentation_pipeline(ipm_img):\n \n # Compute individual thresholded images\n sobel_abs = abs_sobel_thresh(ipm_img, 'x', 30, 255)\n sobel_mag = mag_thresh(ipm_img, 15, (58, 255))\n sobel_dir = dir_threshold(ipm_img, 15, (0,0.2))\n color_hsl = hls_select(ipm_img, (210,255))\n brght_lab = lab_select(ipm_img, (184,255))\n\n # Compute combined threshold\n segmented_img = np.zeros_like(sobel_abs)\n segmented_img[(color_hsl==1) | (brght_lab==1)] = 1\n return segmented_img", "def segment_and_find_positions(self):\n initial_image = self.data\n xdim = self.data.shape[0]\n\n ydim = self.data.shape[1]\n downsized_image = transform.resize(\n initial_image,\n (xdim / DOWNSCALING_FACTOR, ydim / DOWNSCALING_FACTOR),\n mode=\"constant\",\n )\n rescaled_image = exposure.rescale_intensity(downsized_image)\n print(\"Starting Canny filtering\")\n g_edges = skimage.feature.canny(\n rescaled_image,\n sigma=self.canny_sigma,\n low_threshold=self.canny_low_threshold,\n )\n print(\"Starting dilation\")\n dilation = morphology.dilation(g_edges, morphology.disk(3))\n print(\"Starting erosion\")\n eroded = morphology.erosion(dilation, morphology.disk(4))\n dilation = morphology.dilation(\n eroded, morphology.diamond(4)\n ) # Dont change to disk\n print(\"Starting to remove small holes\")\n filled = morphology.remove_small_holes(\n dilation, area_threshold=self.remove_small_holes_area_threshold\n )\n print(\"Starting erosion\")\n eroded = morphology.erosion(filled, morphology.diamond(3))\n print(\"Applying filters\")\n filtered_image = eroded\n if self.colony_filters_dict is not None:\n for filter_name in self.colony_filters_dict.keys():\n filtered_image = segmentation_filters.apply_filter(\n filter_name, filtered_image, self.colony_filters_dict[filter_name]\n )\n\n colony_edges = morphology.dilation(feature.canny(filtered_image, 0.01))\n print(\"Starting outlining\")\n outline = downsized_image.copy()\n outline[colony_edges] = 65535\n distance = ndimage.distance_transform_edt(filtered_image)\n smoothed_well = ndimage.gaussian_filter(downsized_image, 0.35)\n outline.copy()\n objs, num_objs = ndimage.label(filtered_image)\n print(\"Applying filters for points\")\n if self.mode == \"A\":\n # point selection: Smoothest point in the center region\n for obj in range(1, num_objs + 1):\n print(\"On object {} of {}\".format(obj, num_objs))\n mask = objs == obj\n dist_mask = distance * mask\n # for each colony,\n # find the maximum distance from the two fold distance map.\n # The edge is at 0% and the center of the colony is at 100%\n d_max = dist_mask.max()\n # Getting the points which is at least 40% away from the edge\n top_percent = dist_mask > (d_max * 0.40)\n colony_mask = smoothed_well * top_percent\n colony_edges = feature.canny(colony_mask, 0.1)\n # applying the second distance transform\n # to find the smoothest point in the correct region\n inner_edges = ndimage.distance_transform_edt(\n ~colony_edges * top_percent\n )\n smooth_point = numpy.where(inner_edges == inner_edges.max())\n smooth_point = (smooth_point[0][0], smooth_point[1][0])\n smooth_point_corrected = (\n smooth_point[0] * DOWNSCALING_FACTOR,\n smooth_point[1] * DOWNSCALING_FACTOR,\n )\n self._point_locations.append(smooth_point_corrected)\n elif self.mode == \"C\":\n for obj in range(1, num_objs + 1):\n print(\"On object {} of {}\".format(obj, num_objs))\n mask = objs == obj\n dist_mask = distance * mask\n # point selection: edge, ridge & center respectively\n self.get_mode_c_points(dist_mask, 0, 0.03)\n self.get_mode_c_points(dist_mask, 0.15, 0.20)\n self.get_mode_c_points(dist_mask, 0.90, 0.99)", "def ws_markers(img,markers,Bc):\n\n\tD2 = False\n if img.ndim == 2:\n temp_H,temp_W = img.shape\n img = img.reshape(1,temp_H,temp_W)\n markers = markers.reshape(1,temp_H,temp_W)\n D2 = True\n off = se2off(Bc) #offsets\n \n if off.shape[1] == 2:\n off = np.concatenate((np.zeros((off.shape[0],1), dtype = np.int32),off), axis = 1)\n \n\tftype = img.dtype\n\tmtype = markers.dtype\n\tif (mtype != np.int32):\n\t\twarnings.warn(\"markers should be int32, forcing conversion\")\n\t\tmarkers = markers.astype(np.int32)\n\tif (ftype == np.uint8):\n\t\timg = img.astype(np.uint16)\t\n\n seg = markers.copy()\n ii32 = np.iinfo(np.int32).max # infinity\n P = np.empty(img.shape, dtype = np.int32)\n P[:] = -1\n C1 = np.empty(img.shape, dtype = np.int32)\n C1[:] = ii32\n C2 = np.zeros(img.shape, dtype = np.int32)\n done =np.zeros(img.shape, dtype = np.int32)\n watershed_c.ws_markers_c(ii32,off,img,seg,C1,C2,done,P)\n\n if D2:\n L,M,N = seg.shape\n seg = seg.reshape(M,N)\n return seg", "def _seg_image(self, x, y, r_cut=100):\n snr=self.snr\n npixels=self.npixels\n bakground = self.bakground\n error= self.bkg_rms(x,y,r_cut)\n kernel = self.kernel\n image_cutted = self.cut_image(x,y,r_cut)\n image_data = image_cutted\n threshold_detect_objs=detect_threshold(data=image_data, nsigma=snr,error=error)\n segments=detect_sources(image_data, threshold_detect_objs, npixels=npixels, filter_kernel=kernel)\n segments_deblend = deblend_sources(image_data, segments, npixels=npixels,nlevels=10)\n segments_deblend_info = source_properties(image_data, segments_deblend)\n nobjs = segments_deblend_info.to_table(columns=['id'])['id'].max()\n xcenter = segments_deblend_info.to_table(columns=['xcentroid'])['xcentroid'].value\n ycenter = segments_deblend_info.to_table(columns=['ycentroid'])['ycentroid'].value\n image_data_size = np.int((image_data.shape[0] + 1) / 2.)\n dist = ((xcenter - image_data_size) ** 2 + (ycenter - image_data_size) ** 2) ** 0.5\n c_index = np.where(dist == dist.min())[0][0]\n center_mask=(segments_deblend.data==c_index+1)*1 #supposed to be the data mask\n obj_masks = []\n for i in range(nobjs):\n mask = ((segments_deblend.data==i+1)*1)\n obj_masks.append(mask)\n xmin = segments_deblend_info.to_table(columns=['bbox_xmin'])['bbox_xmin'].value\n xmax = segments_deblend_info.to_table(columns=['bbox_xmax'])['bbox_xmax'].value\n ymin = segments_deblend_info.to_table(columns=['bbox_ymin'])['bbox_ymin'].value\n ymax = segments_deblend_info.to_table(columns=['bbox_ymax'])['bbox_ymax'].value\n xmin_c, xmax_c = xmin[c_index], xmax[c_index]\n ymin_c, ymax_c = ymin[c_index], ymax[c_index]\n xsize_c = xmax_c - xmin_c\n ysize_c = ymax_c - ymin_c\n if xsize_c > ysize_c:\n r_center = np.int(xsize_c)\n else:\n r_center = np.int(ysize_c)\n center_mask_info= [center_mask, r_center, xcenter, ycenter, c_index]\n return obj_masks, center_mask_info, segments_deblend", "def _get_watershed_boundaries(self, class_mask, dist_thresh=0.6):\n\n kernel = np.ones((5, 5), np.float32)\n\n # Use a distance transform to find the seed points for watershed\n tmp = class_mask\n tmp[tmp>0] = 1 # here\n dist = cv2.distanceTransform(tmp, cv2.DIST_L2, 5) # here .astype(np.uint8), cv2.DIST_L2, 5)\n dist = (dist / np.max(dist)) * 255.\n\n # Since there may be multiple peaks, we use dilation to find them\n dilate = cv2.dilate(dist, kernel, iterations=3)\n peaks = np.float32(np.where(dilate == dist, 1, 0))\n peaks = peaks * class_mask * 255\n\n sure_fg = np.where(peaks > 125, 255., 0.)\n sure_fg = cv2.dilate(sure_fg, kernel, iterations=2)\n sure_fg = np.uint8(sure_fg)\n\n sure_bg = cv2.dilate(class_mask, kernel, iterations=3) * 255\n unknown = sure_bg - sure_fg\n\n # Add one to all labels so that known background is not 0, but 1\n _, markers = cv2.connectedComponents(sure_fg)\n markers = markers + 1\n\n markers[unknown == 255] = 0\n\n markers = cv2.watershed(self.image, markers)\n\n watershed_superpixels = np.zeros(class_mask.shape, dtype=np.uint8)\n watershed_superpixels[markers == -1] = 255\n\n return watershed_superpixels", "def segment_cells(frame, mask=None):\n \n blurred = filters.gaussian(frame, 2)\n ridges = enhance_ridges(frame)\n \n # threshold ridge image\n thresh = filters.threshold_otsu(ridges)\n thresh_factor = 0.5\n prominent_ridges = ridges > thresh_factor*thresh\n prominent_ridges = morphology.remove_small_objects(prominent_ridges, min_size=256)\n prominent_ridges = morphology.binary_closing(prominent_ridges)\n prominent_ridges = morphology.binary_dilation(prominent_ridges)\n \n # skeletonize\n ridge_skeleton = morphology.medial_axis(prominent_ridges)\n ridge_skeleton = morphology.binary_dilation(ridge_skeleton)\n ridge_skeleton *= mask\n ridge_skeleton = np.bitwise_xor(ridge_skeleton, mask)\n \n # label\n cell_label_im = measure.label(ridge_skeleton)\n \n # morphological closing to fill in the cracks\n for cell_num in range(1, cell_label_im.max()+1):\n cell_mask = cell_label_im==cell_num\n cell_mask = morphology.binary_closing(cell_mask, disk(3))\n cell_label_im[cell_mask] = cell_num\n \n return cell_label_im", "def get_segmented_image(image_path):\n\n # Setup Caffe Segnet\n sys.path.append('/usr/local/lib/python2.7/site-packages')\n caffe_root = '/opt/caffe-segnet/'\n sys.path.insert(0, caffe_root + 'python')\n import caffe\n\n model = 'static/nn_files/segnet_model_driving_webdemo.prototxt'\n weights = 'static/nn_files/segnet_weights_driving_webdemo.caffemodel'\n colours = 'static/nn_files/camvid12.png'\n\n net = caffe.Net(model,weights, caffe.TEST)\n caffe.set_mode_cpu()\n\n input_shape = net.blobs['data'].data.shape\n output_shape = net.blobs['argmax'].data.shape\n label_colours = cv2.imread(colours).astype(np.uint8)\n\n resized_images = slice_and_resize(image_path)\n\n images = [ cv2.cvtColor(np.array(img), cv2.COLOR_RGB2BGR) for img in resized_images ]\n\n\n\n def segment_image(image):\n input_image = image.transpose((2,0,1))\n input_image = image.transpose((2,0,1))\n input_image = np.asarray([input_image])\n\n out = net.forward_all(data=input_image)\n\n segmentation_ind = np.squeeze(net.blobs['argmax'].data)\n segmentation_ind_3ch = np.resize(segmentation_ind, (3, input_shape[2], input_shape[3]))\n segmentation_ind_3ch = segmentation_ind_3ch.transpose(1,2,0).astype(np.uint8)\n segmentation_rgb = np.zeros(segmentation_ind_3ch.shape, dtype=np.uint8)\n\n cv2.LUT(segmentation_ind_3ch, label_colours, segmentation_rgb)\n\n return segmentation_rgb\n\n segmented_images = map(segment_image, images)\n\n # 5. Create a single full image from the segmented parts\n segmented_full_image = join_images_horizontally(segmented_images)\n\n folder = \"static/images/segmented\"\n os.system(\"rm %s/*.png\" % (folder))\n\n name = next(tempfile._get_candidate_names())\n segment_path = \"%s/%s_resized.png\" % (folder, name)\n segmented_full_image.save(segment_path)\n return segment_path", "def combined_threshold(img, plot=False):\n gray_image = cv2.cvtColor(img, cv2.COLOR_RGB2HSV)[:, :, 2] # Better gray image from HSV - V channel\n # gray_image = cv2.cvtColor(img, cv2.COLOR_RGB2LAB)[:, :, 2]\n gray_image = image_erosion(gray_image, 3, 1)\n gradx = abs_sobel_thresh(gray_image, orient='x', sobel_kernel=9, thresh=(20, 100))\n grady = abs_sobel_thresh(gray_image, orient='y', sobel_kernel=15, thresh=(20, 100))\n mag_binary = mag_thresh(gray_image, sobel_kernel=9, mag_thresh=(30, 100))\n dir_binary = dir_threshold(gray_image, sobel_kernel=15, thresh=(0.7, 1.3))\n combined = np.zeros_like(dir_binary)\n combined[((gradx == 1) & (grady == 1))] = 1\n combined[(mag_binary == 1) & (dir_binary == 1)] = 1\n combined = image_dilation(combined, 3, 1)\n if plot:\n plot_convolution_results(img, gradx, grady, mag_binary, dir_binary, combined)\n return combined" ]
[ "0.73304313", "0.69821537", "0.6795016", "0.67470026", "0.6652876", "0.65473425", "0.65426207", "0.64990574", "0.6482789", "0.647807", "0.6431419", "0.6372488", "0.6349134", "0.62908137", "0.6180611", "0.61678934", "0.61438453", "0.6135391", "0.6117248", "0.60778475", "0.6072808", "0.60662097", "0.6060353", "0.6047306", "0.60075915", "0.59831744", "0.598271", "0.5941686", "0.59344417", "0.59281486" ]
0.7320055
1
Parse XML File and returns an object containing all the vertices
def parseXML(xmlFile, pattern): tree = ET.parse(xmlFile) # Convert XML file into tree representation root = tree.getroot() regions = root.iter('Region') # Extract all Regions vertices = {pattern: []} # Store all vertices in a dictionary for region in regions: label = region.get('Text') # label either as 'ROI' or 'normal' if label == pattern: vertices[label].append({'X':[], 'Y':[]}) for vertex in region.iter('Vertex'): X = float(vertex.get('X')) Y = float(vertex.get('Y')) vertices[label][-1]['X'].append(X) vertices[label][-1]['Y'].append(Y) return vertices
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __parse(self):\n\t\tparser=xml.sax.make_parser()\n\t\tparser.setContentHandler(OSMXMLFileParser(self))\n\t\tparser.parse(self.filename)\n\t\n\t\t# convert them back to lists\n\t\tself.nodes = self.nodes.values()\n\t\tself.ways = self.ways.values()\n\t\tself.relations = self.relations.values()", "def loadVesselucida_xml(self):\n\n\t\txmlFilePath, ext = os.path.splitext(self.tifPath)\n\t\txmlFilePath += '.xml'\n\t\tif not os.path.isfile(xmlFilePath):\n\t\t\t#print('bSlabList.loadVesselucida_xml() warning, did not find', xmlFilePath)\n\t\t\treturn False\n\n\t\tprint('loadVesselucida_xml() file', xmlFilePath)\n\t\tmydoc = minidom.parse(xmlFilePath)\n\n\t\tvessels = mydoc.getElementsByTagName('vessel')\n\t\t#print('found', len(vessels), 'vessels')\n\n\t\tself.x = []\n\t\tself.y = []\n\t\tself.z = []\n\t\tself.d = []\n\t\tself.id = []\n\t\tself.orig_x = []\n\t\tself.orig_y = []\n\t\tself.orig_z = []\n\n\t\tmasterNodeIdx = 0\n\t\tmasterEdgeIdx = 0\n\t\tmasterSlabIdx = 0\n\t\tfor i, vessel in enumerate(vessels):\n\t\t\tprint('vessel i:', i, 'name:', vessel.attributes['name'].value)\n\n\t\t\t#\n\t\t\t# nodes\n\t\t\tstartNodeIdx = masterNodeIdx\n\t\t\tnodes = vessel.getElementsByTagName('nodes')\n\t\t\t#print(' has', len(nodes), 'nodes')\n\t\t\tfor j, node in enumerate(nodes):\n\t\t\t\tnodeList = vessel.getElementsByTagName('node')\n\t\t\t\tfor k in range(len(nodeList)):\n\t\t\t\t\tnode_id = nodeList[k].attributes['id'].value\n\t\t\t\t\tpoint = nodeList[k].getElementsByTagName('point') # node is only one 3d point\n\t\t\t\t\tfor point0 in point:\n\t\t\t\t\t\tx = float(point0.attributes['x'].value)\n\t\t\t\t\t\ty = float(point0.attributes['y'].value)\n\t\t\t\t\t\tz = float(point0.attributes['z'].value)\n\t\t\t\t\t\tdiam = float(point0.attributes['d'].value)\n\n\t\t\t\t\t\tx,y,z,diam = self._massage_xyz(x,y,z,diam)\n\n\t\t\t\t\t\tself.nodex.append(x)\n\t\t\t\t\t\tself.nodey.append(y)\n\t\t\t\t\t\tself.nodez.append(z)\n\t\t\t\t\t\tself.noded.append(diam)\n\n\t\t\t\t\t\t# todo: somehow assign edge list\n\t\t\t\t\t\t# important so user can scroll through all nodes and\n\t\t\t\t\t\t# check they have >1 edge !!!\n\t\t\t\t\t\tnodeDict = {\n\t\t\t\t\t\t\t'idx': masterNodeIdx, # used by stack widget table\n\t\t\t\t\t\t\t'x': x,\n\t\t\t\t\t\t\t'y': y,\n\t\t\t\t\t\t\t'z': z,\n\t\t\t\t\t\t\t'zSlice': int(z), #todo remember this when I convert to um/pixel !!!\n\t\t\t\t\t\t\t'edgeList':[],\n\t\t\t\t\t\t\t'nEdges':0,\n\t\t\t\t\t\t}\n\t\t\t\t\t\tself.nodeDictList.append(nodeDict)\n\n\t\t\t\t\tmasterNodeIdx += 1\n\n\t\t\t#\n\t\t\t# edges\n\t\t\tstartEdgeIdx = masterEdgeIdx\n\t\t\tedges = vessel.getElementsByTagName('edges')\n\t\t\t#print(' found', len(edges), 'edges')\n\t\t\tfor j, edge in enumerate(edges):\n\t\t\t\tedgeList = vessel.getElementsByTagName('edge')\n\t\t\t\t#print('\t found', len(edgeList), 'edges')\n\t\t\t\t# one edge (vessel segment between 2 branch points)\n\t\t\t\tfor k in range(len(edgeList)):\n\t\t\t\t\tedge_id = edgeList[k].attributes['id'].value\n\t\t\t\t\tpoints = edgeList[k].getElementsByTagName('point') # edge is a list of 3d points\n\t\t\t\t\t# this is my 'edge' list, the tubes between branch points ???\n\t\t\t\t\t#print('\t\t for edge id', edge_id, 'found', len(points), 'points')\n\t\t\t\t\t# list of points for one edge\n\t\t\t\t\tthisSlabList = []\n\t\t\t\t\tnewZList = []\n\t\t\t\t\tfor point in points:\n\t\t\t\t\t\tx = float(point.attributes['x'].value)\n\t\t\t\t\t\ty = float(point.attributes['y'].value)\n\t\t\t\t\t\tz = float(point.attributes['z'].value)\n\t\t\t\t\t\tdiam = float(point.attributes['d'].value)\n\n\t\t\t\t\t\tself.orig_x.append(x)\n\t\t\t\t\t\tself.orig_y.append(y)\n\t\t\t\t\t\tself.orig_z.append(z)\n\n\t\t\t\t\t\tx,y,z,diam = self._massage_xyz(x,y,z,diam)\n\n\t\t\t\t\t\tself.x.append(x)\n\t\t\t\t\t\tself.y.append(y)\n\t\t\t\t\t\tself.z.append(z)\n\t\t\t\t\t\tself.d.append(diam)\n\t\t\t\t\t\tself.id.append(masterEdgeIdx)\n\n\t\t\t\t\t\tnewZList.append(z)\n\t\t\t\t\t\t'''\n\t\t\t\t\t\tself.d.append(diam)\n\t\t\t\t\t\tself.edgeIdx.append(masterEdgeIdx)\n\t\t\t\t\t\t'''\n\t\t\t\t\t\tthisSlabList.append(masterSlabIdx)\n\t\t\t\t\t\tmasterSlabIdx += 1\n\n\t\t\t\t\t# default\n\t\t\t\t\tedgeDict = {\n\t\t\t\t\t\t'type': 'edge',\n\t\t\t\t\t\t'idx': masterEdgeIdx, # used by stack widget table\n\t\t\t\t\t\t'edgeIdx': masterEdgeIdx,\n\t\t\t\t\t\t'n': len(newZList),\n\t\t\t\t\t\t'Diam': None,\n\t\t\t\t\t\t'Len 3D': None,\n\t\t\t\t\t\t'Len 2D': None,\n\t\t\t\t\t\t'Tort': None,\n\t\t\t\t\t\t'z': int(round(statistics.median(newZList))),\n\t\t\t\t\t\t'preNode': None,\n\t\t\t\t\t\t'postNode': None,\n\t\t\t\t\t\t'Bad': False,\n\t\t\t\t\t\t'slabList': thisSlabList, # list of slab indices on this edge\n\t\t\t\t\t\t'popList': [], # cases where we should be popped in joinEdges\n\t\t\t\t\t\t'editList': [], # cases where we should be edited in joinEdges\n\t\t\t\t\t\t'otherEdgeIdxList': [],\n\t\t\t\t\t\t'editPending': False,\n\t\t\t\t\t\t'popPending': False,\n\t\t\t\t\t\t}\n\n\t\t\t\t\tself.edgeDictList.append(edgeDict)\n\n\t\t\t\t\t# add nan\n\t\t\t\t\tself.x.append(np.nan)\n\t\t\t\t\tself.y.append(np.nan)\n\t\t\t\t\tself.z.append(np.nan)\n\t\t\t\t\tself.d.append(np.nan)\n\t\t\t\t\tself.id.append(np.nan)\n\t\t\t\t\tmasterSlabIdx += 1\n\t\t\t\t\t'''\n\t\t\t\t\tself.d.append(np.nan)\n\t\t\t\t\tself.edgeIdx.append(np.nan)\n\t\t\t\t\t'''\n\t\t\t\t\tself.orig_x.append(np.nan)\n\t\t\t\t\tself.orig_y.append(np.nan)\n\t\t\t\t\tself.orig_z.append(np.nan)\n\t\t\t\t\tmasterEdgeIdx += 1\n\n\t\t\t#\n\t\t\t# edgelists\n\t\t\tedgeListList = vessel.getElementsByTagName('edgelist')\n\t\t\t#print(' found', len(edgeListList), 'edgelists')\n\t\t\tfor j, edgeList in enumerate(edgeListList):\n\t\t\t\t# src/dst node are 0 based for given vessel\n\t\t\t\t# todo: save original indices from xml in my data structures !\n\t\t\t\tid = edgeList.attributes['id'].value # gives us the edge list index in self.x\n\t\t\t\tsrcNode = int(edgeList.attributes['sourcenode'].value)\n\t\t\t\tdstNode = int(edgeList.attributes['targetnode'].value)\n\t\t\t\t#print(' srcNode:', srcNode, 'dstNode:', dstNode)\n\n\t\t\t\tif srcNode != -1:\n\t\t\t\t\tself.edgeDictList[startEdgeIdx+j]['preNode'] = startNodeIdx+srcNode\n\t\t\t\tif dstNode != -1:\n\t\t\t\t\tself.edgeDictList[startEdgeIdx+j]['postNode'] = startNodeIdx+dstNode\n\n\t\t\t\t# using startNodeIdx is wrong !!!\n\t\t\t\tif srcNode != -1:\n\t\t\t\t\tself.nodeDictList[startNodeIdx+srcNode]['edgeList'].append(startEdgeIdx+j)\n\t\t\t\t\tself.nodeDictList[startNodeIdx+srcNode]['nEdges'] = len(self.nodeDictList[startNodeIdx+srcNode]['edgeList'])\n\t\t\t\tif dstNode != -1:\n\t\t\t\t\tself.nodeDictList[startNodeIdx+dstNode]['edgeList'].append(startEdgeIdx+j)\n\t\t\t\t\tself.nodeDictList[startNodeIdx+dstNode]['nEdges'] = len(self.nodeDictList[startNodeIdx+dstNode]['edgeList'])\n\n\t\t\t# debug\n\t\t\t'''\n\t\t\tfor idx, edge in enumerate(self.edgeDictList):\n\t\t\t\tprint('edge:', idx, 'preNode:', edge['preNode'], 'postNode:', edge['postNode'])\n\t\t\t\tprint(' edge[\"slabList\"]:', edge[\"slabList\"])\n\t\t\t\tif edge['preNode'] is not None:\n\t\t\t\t\tprint(' node self.nodeDictList[preNode]:', self.nodeDictList[edge['preNode']])\n\t\t\t\tif edge['postNode'] is not None:\n\t\t\t\t\tprint(' self.nodeDictList[postNode]:', self.nodeDictList[edge['postNode']])\n\t\t\t'''\n\t\t#\n\t\t# end vessels\n\t\t# for i, vessel in enumerate(vessels):\n\t\t#\n\n\t\t'''\n\t\tnPoints = len(self.x)\n\t\tself.id = np.full(nPoints, 0) #Return a new array of given shape and type, filled with fill_value.\n\t\t'''\n\n\t\t#\n\t\t# convert to numpy array\n\t\t# nodes\n\t\tself.nodex = np.array(self.nodex, dtype='float32')\n\t\tself.nodey = np.array(self.nodey, dtype='float32')\n\t\tself.nodez = np.array(self.nodez, dtype='float32')\n\n\t\t# edges\n\t\tself.x = np.array(self.x, dtype='float32')\n\t\tself.y = np.array(self.y, dtype='float32')\n\t\tself.z = np.array(self.z, dtype='float32')\n\t\tself.d = np.array(self.d, dtype='float32')\n\t\tself.id = np.array(self.id, dtype='float32')\n\n\t\t#\n\t\t# create dead ends\n\t\tself.deadEndx = []\n\t\tself.deadEndy = []\n\t\tself.deadEndz = []\n\t\tfor edgeDict in self.edgeDictList:\n\t\t\tif edgeDict['preNode'] is None:\n\t\t\t\tfirstSlabIdx = edgeDict['slabList'][0]\n\t\t\t\ttmpx = self.x[firstSlabIdx]\n\t\t\t\ttmpy = self.y[firstSlabIdx]\n\t\t\t\ttmpz = self.z[firstSlabIdx]\n\t\t\t\tself.deadEndx.append(tmpx)\n\t\t\t\tself.deadEndy.append(tmpy)\n\t\t\t\tself.deadEndz.append(tmpz)\n\t\t\tif edgeDict['postNode'] is None:\n\t\t\t\tlastSlabIdx = edgeDict['slabList'][-1]\n\t\t\t\ttmpx = self.x[lastSlabIdx]\n\t\t\t\ttmpy = self.y[lastSlabIdx]\n\t\t\t\ttmpz = self.z[lastSlabIdx]\n\t\t\t\tself.deadEndx.append(tmpx)\n\t\t\t\tself.deadEndy.append(tmpy)\n\t\t\t\tself.deadEndz.append(tmpz)\n\n\t\t# convert list of dead ends to nump array\n\t\tself.deadEndx = np.array(self.deadEndx, dtype='float32')\n\t\tself.deadEndy = np.array(self.deadEndy, dtype='float32')\n\t\tself.deadEndz = np.array(self.deadEndz, dtype='float32')\n\n\t\t# debug min/max of x/y/z\n\t\tif 1:\n\t\t\tprint(' x min/max', np.nanmin(self.x), np.nanmax(self.x))\n\t\t\tprint(' y min/max', np.nanmin(self.y), np.nanmax(self.y))\n\t\t\tprint(' z min/max', np.nanmin(self.z), np.nanmax(self.z))\n\n\t\t\tprint('taking abs value of z')\n\t\t\tself.z = np.absolute(self.z)\n\t\t\tself.deadEndz = np.absolute(self.deadEndz)\n\t\t\tself.nodez = np.absolute(self.nodez)\n\n\t\tprint(' loaded', masterNodeIdx, 'nodes,', masterEdgeIdx, 'edges, and approximately', masterSlabIdx, 'points')\n\n\t\t#\n\t\tself.__analyze()\n\n\t\tfor i in range(1):\n\t\t\tself.joinEdges()\n\t\tself.findCloseSlabs()\n\n\t\t# this works\n\t\t#self.makeVolumeMask()\n\n\t\treturn True", "def parse_xml1(filename):\r\n tree = ET.parse(filename)\r\n # tree=ElementTree()\r\n # tree.parse(filename)\r\n\r\n baseInfo={}\r\n baseInfo['foder'] = tree.find('foder').text\r\n baseInfo['filename'] = tree.find('filename').text\r\n baseInfo['path'] = tree.find('path').text\r\n baseInfo['source/database'] = tree.find('source/database').text\r\n #tree.find('database')\r\n baseInfo['size/width'] = tree.find('size/width').text\r\n baseInfo['size/height'] = tree.find('size/height').text\r\n baseInfo['size/depth'] = tree.find('size/depth').text\r\n baseInfo['segmented'] = tree.find('segmented').text\r\n objects = []\r\n for obj in tree.findall('object'):\r\n obj_struct = {}\r\n obj_struct['score'] = obj.find('score').text\r\n obj_struct['region'] = obj.find('region').text\r\n obj_struct['imageptr'] = obj.find('imageptr').text\r\n if obj.find('label_des') is None:\r\n obj_struct['label_des']=\"\"\r\n else:\r\n obj_struct['label_des'] = obj.find('label_des').text\r\n obj_struct['name'] = obj.find('name').text\r\n obj_struct['pose'] = obj.find('pose').text\r\n obj_struct['truncated'] = obj.find('truncated').text #remove int()\r\n obj_struct['difficult'] = obj.find('difficult').text #remove int()\r\n bbox = obj.find('bndbox')\r\n obj_struct['bbox'] = [int(bbox.find('xmin').text),\r\n int(bbox.find('ymin').text),\r\n int(bbox.find('xmax').text),\r\n int(bbox.find('ymax').text)]\r\n objects.append(obj_struct)\r\n\r\n return baseInfo,objects", "def ParseGraph(filename):\n vertices = []\n edges = set([])\n\n for l in open(filename):\n fields = [int(f) for f in l.split()]\n vertex = fields.pop(0)\n incident = [tuple(sorted([vertex, f])) for f in fields]\n vertices.append(vertex)\n edges.update(incident)\n\n return vertices, list(edges)", "def __init__(self, vfile, efile):\n # vertex_properties[i] stores the property of the vertex with ID = i\n self.vertex_properties = []\n\n # edge_list[i] stores the vertices that can be reached from the ith vertex\n self.edge_list = [[]]\n\n f = open(str(vfile))\n vertex_counter = int(f.readline())\n # Each vertex has its own index in vertex_properties and edge_list\n self.vertex_properties = [None for _ in range(vertex_counter)]\n self.edge_list = [[] for _ in range(vertex_counter)]\n\n # Reads each line in vfile after the first line and stores the vertex property\n for _ in range(vertex_counter):\n line = f.readline()\n x = re.findall(r'([0-9]*)( )([0-9]*)', line)\n self.vertex_properties[int(x[0][0])] = int(x[0][2])\n f.close()\n f = open(str(efile))\n edge_counter = int(f.readline())\n\n # Reads each line in efile after the first line and stores the edge's destination in the edge_list. If the edge\n # is between u and v, it stores v in u's edge list and stores u in v's edge list\n for _ in range(edge_counter):\n line = f.readline()\n x = re.findall(r'([0-9]*)( )([0-9]*)', line)\n u = x[0][0]\n v = x[0][2]\n self.edge_list[int(u)].append(int(v))\n self.edge_list[int(v)].append(int(u))\n f.close()", "def from_xml_file(xml_file_path):\n with open(xml_file_path, 'r+b') as f:\n xml = f.read()\n return GeometryTopologyData.from_xml(xml)", "def parse_vertex(lines):\n print \" * Parsing vertex\"\n return _parse_vn(lines, \"v %.6f %.6f %.6f\")", "def build_from_file(path):\n with open(path) as obj:\n raw_file = obj.read()\n file_lines = [line.split(\" \") for line in raw_file.split(\"\\n\")]\n\n vertices = {}\n faces = []\n for number, line in enumerate(file_lines):\n if line[0] == \"v\":\n vertices[number + 1] = tuple(map(float, line[1:]))\n if line[0] == \"f\":\n face = []\n for index in line[1:]:\n face.append(vertices[int(index)])\n face.append(vertices[int(line[1])])\n faces.append(face)\n return Object(points=faces)", "def __init__(self, inputStream):\n numVertices = int(inputStream.readline())\n self.vertices = {}\n for _ in range(numVertices):\n line = inputStream.readline().split()\n self.vertices[line[0]] = set(line)\n for v, adj in list(self.vertices.items()):\n for u in adj:\n if u in self.vertices:\n self.vertices[u].add(v)\n else:\n self.vertices[u] = set([v, u])", "def vaex_vertices_from_plyfile(filename):\n xyz = vertex_dict_from_plyfile(filename)\n return vx.from_dict(xyz)", "def ReadFenics(self, filename, element_type):\n\n if element_type == \"tet\":\n etype = \"tetrahedron\"\n elif element_type == \"hex\":\n etype = \"hexahedron\"\n elif element_type == \"tri\":\n etype = \"triangle\"\n elif element_type == \"quad\":\n etype = \"quadrilateral\"\n\n import xml.etree.cElementTree as ET\n root = ET.parse(filename).getroot()\n X = []\n T = []\n for child in root:\n if child.attrib['celltype'] != etype:\n raise ValueError(\"xml file does not contain {} elements\".format(element_type))\n\n for child in root:\n for cchild in child:\n if cchild.tag == \"vertices\":\n if element_type == \"tet\" or element_type == \"hex\":\n for child3 in cchild:\n x = float(child3.attrib['x'])\n y = float(child3.attrib['y'])\n z = float(child3.attrib['z'])\n X.append([x,y,z])\n elif element_type == \"tri\" or element_type == \"quad\":\n for child3 in cchild:\n x = float(child3.attrib['x'])\n y = float(child3.attrib['y'])\n X.append([x,y])\n\n elif cchild.tag == \"cells\":\n if element_type == \"tet\":\n for child3 in cchild:\n v0 = int(child3.attrib['v0'])\n v1 = int(child3.attrib['v1'])\n v2 = int(child3.attrib['v2'])\n v3 = int(child3.attrib['v3'])\n T.append([v0,v1,v2,v3])\n elif element_type == \"tri\":\n for child3 in cchild:\n v0 = int(child3.attrib['v0'])\n v1 = int(child3.attrib['v1'])\n v2 = int(child3.attrib['v2'])\n T.append([v0,v1,v2])\n\n\n X = np.array(X)\n T = np.array(T,dtype=np.int64)\n\n self.elements = T\n self.points = X\n self.element_type = element_type\n self.nelem = self.elements.shape[0]\n self.nnode = self.points.shape[0]\n\n if self.points.shape[1] == 3:\n if np.allclose(self.points[:,2],0.):\n self.points = np.ascontiguousarray(self.points[:,:2])\n\n ndim = self.InferSpatialDimension()\n if self.element_type == \"tri\" or self.element_type == \"quad\":\n self.GetEdges()\n self.GetBoundaryEdges()\n elif self.element_type == \"tet\" or self.element_type == \"hex\":\n self.GetFaces()\n self.GetBoundaryFaces()\n self.GetBoundaryEdges()", "def parse_xml(filename):\r\n tree = ET.parse(filename)\r\n # tree=ElementTree()\r\n # tree.parse(filename)\r\n\r\n baseInfo={}\r\n #baseInfo['folder'] = tree.find('folder').text\r\n baseInfo['filename'] = tree.find('filename').text\r\n baseInfo['path'] = tree.find('path').text\r\n baseInfo['source/database'] = tree.find('source/database').text\r\n #tree.find('database')\r\n baseInfo['size/width'] = tree.find('size/width').text\r\n baseInfo['size/height'] = tree.find('size/height').text\r\n baseInfo['size/depth'] = tree.find('size/depth').text\r\n baseInfo['segmented'] = tree.find('segmented').text\r\n objects = []\r\n for obj in tree.findall('object'):\r\n obj_struct = {}\r\n if obj.find('score') is None:\r\n obj_struct['score']=\"\"\r\n else:\r\n obj_struct['score'] = obj.find('score').text\r\n if obj.find('region') is None:\r\n obj_struct['region']=\"\"\r\n else:\r\n obj_struct['region'] = obj.find('region').text\r\n if obj.find('imageptr') is None:\r\n obj_struct['imageptr']=\"\"\r\n else:\r\n obj_struct['imageptr'] = obj.find('imageptr').text\r\n # obj_struct['score'] = obj.find('score').text\r\n # obj_struct['region'] = obj.find('region').text\r\n # obj_struct['imageptr'] = obj.find('imageptr').text\r\n if obj.find('label_des') is None:\r\n obj_struct['label_des']=\"\"\r\n else:\r\n obj_struct['label_des'] = obj.find('label_des').text\r\n obj_struct['name'] = obj.find('name').text\r\n obj_struct['pose'] = obj.find('pose').text\r\n obj_struct['truncated'] = obj.find('truncated').text #remove int()\r\n obj_struct['difficult'] = obj.find('difficult').text #remove int()\r\n bbox = obj.find('bndbox')\r\n obj_struct['bbox'] = [int(bbox.find('xmin').text),\r\n int(bbox.find('ymin').text),\r\n int(bbox.find('xmax').text),\r\n int(bbox.find('ymax').text)]\r\n objects.append(obj_struct)\r\n\r\n return baseInfo,objects", "def parse_rig_vertices(f):\n vertices_list = []\n for line in f:\n vertices = line.split()\n point = [tuple(map(int, str_point.split(','))) for str_point in vertices]\n vertices_list.append(point)\n\n return vertices_list", "def parse_xml(file: Path):\n check_file(file) # Check the existency of the file\n\n doc = ET.parse(file)\n data = parse_xml_tree(doc.getroot())\n return data", "def parse_voc(filename):\n tree = ET.parse(filename)\n objects = []\n for obj in tree.findall('object'):\n obj_struct = {}\n obj_struct['name'] = obj.find('name').text\n obj_struct['difficult'] = int(obj.find('difficult').text)\n bbox = obj.find('bndbox')\n obj_struct['bbox'] = [int(bbox.find('xmin').text),\n int(bbox.find('ymin').text),\n int(bbox.find('xmax').text),\n int(bbox.find('ymax').text)]\n objects.append(obj_struct)\n\n return objects", "def load_xml(self,filename):\n self.initvars()\n source = iter(ET.iterparse(filename, events = ('start','end')))\n self.name = source.next()[1].tag\n for event,elem in source:\n if event == 'end' and elem.tag == 'row':\n row = [None]*self.numcols()\n for name,val in elem.attrib.items():\n try:\n idx = self.getColIndex(name)\n except ColumnNotFoundError:\n idx = len(self.cols)\n row.append(None)\n # Add new column to the table\n self.cols.append(set([name]))\n for oldrow in self.data:\n oldrow.append(None)\n row[idx] = val\n self.data.append(row)\n self.initTypes()", "def parseXML(xmlFile):\n\n tree = etree.parse(xmlFile)\n root = tree.getroot() \n transitionTable = dict()\n transitionTable = getTransitions(tree, root, transitionTable)\n return tree, root, transitionTable", "def parse_xml(xmlfile):\n # create element tree object\n root = ET.parse(xmlfile).getroot()\n return root", "def read(self, filename):\n tree = ElementTree.parse(filename)\n root = tree.getroot()\n\n # Only consider first trk ! \n trk = root.find('{http://www.topografix.com/GPX/1/1}trk')\n for segID, trkseg in enumerate(trk.findall('{http://www.topografix.com/GPX/1/1}trkseg')):\n segment = []\n for ptID, trkpt in enumerate(trkseg.findall('{http://www.topografix.com/GPX/1/1}trkpt')):\n point = {\n \"segID\": segID,\n \"ptID\": ptID,\n \"lon\": float(trkpt.attrib[\"lon\"]),\n \"lat\": float(trkpt.attrib[\"lat\"]),\n \"ele\": 0,\n \"time\": 0\n }\n\n ele = trkpt.find('{http://www.topografix.com/GPX/1/1}ele')\n if ele is not None:\n point[\"ele\"] = float(ele.text)\n else:\n point[\"ele\"] = None\n\n time = trkpt.find('{http://www.topografix.com/GPX/1/1}time')\n if time is not None:\n point[\"time\"] = time.text.replace('T', ' ').replace('Z', '')\n else:\n point[\"time\"] = None\n\n # print(point)\n segment.append(point)\n\n self.data.append(segment)", "def vertices(self):\n return self.pointlist", "def read_xml_file(filename):\n###############################################################################\n with __FILE_OPEN(filename) as file_:\n tree = ET.parse(file_)\n root = tree.getroot()\n # End with\n return tree, root", "def parse_graph(self):\n\t\tnx_graph = nx.Graph()\n\t\tfor node in self.vertices:\n\t\t\tnx_graph.add_node(node)\n\n\t\tfor edge in self.edges:\n\t\t\tnode1, node2, weight = edge\n\t\t\tnx_graph.add_edge(node1, node2, weight=weight)\n\n\t\treturn nx_graph", "def parsexml(self):\n raise NotImplementedError", "def __init__(self, sources, sourcebyid, vertexsource, primitives, xmlnode=None):\n self.sources = sources\n \"\"\"Source list inside this geometry tag.\"\"\"\n self.sourceById = sourcebyid\n \"\"\"Sources indexed by id.\"\"\"\n self.vertexsource = vertexsource\n \"\"\"The source id used as vertex list.\"\"\"\n self._primitives = primitives\n if xmlnode != None: \n self.xmlnode = xmlnode\n self.id = xmlnode.get('id')\n else:\n self.id = gid or 'geometry' + str(id(self))\n self.xmlnode = ElementTree.Element('geometry')\n mesh = ElementTree.Element('mesh')\n self.xmlnode.append( mesh )\n for source in sources:\n mesh.append( source.xmlnode )\n vxml = ''\n for semantic, source in self.sourceById[self.vertexsource].items():\n vxml.append('<input semantic=\"%s\" source=\"#%s\" />' % (semantic, source.id))\n vxml = '<vertices id=\"%s\">%s</vertices>' % (self.vertexsource, vxml)\n mesh.append( ElementTree.fromstring(vxml) )\n for tset in _primitives:\n mesh.append(tset.xmlnode)", "def get_vertices(self):\n return self.vertices", "def get_vertices(self):\n return self.vertices", "def read_xml(self):\n pass", "def parse_rec(filename):\n tree = et.parse(filename)\n objects = []\n for obj in tree.findall('object'):\n obj_struct = {}\n obj_struct['name'] = obj.find('name').text\n obj_struct['pose'] = obj.find('pose').text\n obj_struct['truncated'] = int(obj.find('truncated').text)\n obj_struct['difficult'] = int(obj.find('difficult').text)\n bbox = obj.find('bndbox')\n obj_struct['bbox'] = [int(bbox.find('xmin').text) - 1,\n int(bbox.find('ymin').text) - 1,\n int(bbox.find('xmax').text) - 1,\n int(bbox.find('ymax').text) - 1]\n objects.append(obj_struct)\n\n return objects", "def get_data_from_file(file_name):\n try:\n with open(file_name, 'rb') as f:\n raw_dict = xmltodict.parse(f.read())\n ways = [w for w in clean_list(raw_dict['osm']['way'], element_type='way') if filter_out(w)]\n nodes = clean_list(raw_dict['osm']['node'], element_type='node')\n relations = clean_list(raw_dict['osm']['relation'], element_type='relation')\n selection = [{'version': parse_xml_parameter('version', raw_dict),\n 'osm3s': parse_xml_parameter('osm3s', raw_dict),\n 'generator': parse_xml_parameter('generator', raw_dict),\n 'bounds': parse_xml_parameter('bounds', raw_dict),\n 'elements': ways + nodes + relations\n }\n ]\n return selection\n except: # (IOError, xmltodict.ExpatError):\n return None", "def read_xml(self):\n connection = urlopen(self.url)\n in_xml = connection.read()\n state = ElementTree.fromstring(in_xml)\n records = []\n record = []\n\n # Specific to CHP\n # TODO(David) Nested for loops are bad. Change this to be more\n # efficient, possibly use generators.\n for center in state:\n rec_center = center.attrib['ID']\n\n for dispatch in center:\n rec_dispatch = dispatch.attrib['ID']\n\n for log in dispatch:\n record = [rec_center, rec_dispatch]\n\n record.append(log.attrib['ID'])\n\n log_time = log.find('LogTime').text.strip('\"')\n log_type = log.find('LogType').text.strip('\"')\n location = log.find('Location').text.strip('\"')\n loc_desc = log.find('LocationDesc').text.strip('\"')\n area = log.find('Area').text.strip('\"')\n\n record.append(log_time)\n record.append(log_type)\n record.append(location)\n record.append(loc_desc)\n record.append(area)\n\n latlon = log.find('LATLON').text.strip('\"')\n\n (lat, lon) = latlon.split(':')\n lat = str(lat[:2]) + '.' + str(lat[2:])\n lon = '-' + str(lon[:3]) + '.' + str(lon[3:])\n\n record.append(lat)\n record.append(lon)\n\n records.append(record)\n\n self.records = records" ]
[ "0.65993637", "0.638659", "0.6165882", "0.6143035", "0.61255836", "0.6096457", "0.60697263", "0.59190804", "0.59172356", "0.588213", "0.58239716", "0.58230084", "0.58069694", "0.5771421", "0.5767745", "0.5759676", "0.57542926", "0.57433605", "0.57209617", "0.57055056", "0.56912017", "0.5686367", "0.5683834", "0.56520975", "0.56448984", "0.56448984", "0.56369936", "0.56279314", "0.56083506", "0.5602248" ]
0.7063151
0
Calculates the ratio between the highest resolution image and lowest resolution image. Returns the ratio as a tuple (Xratio, Yratio).
def calculateRatio(levelDims): highestReso = np.asarray(levelDims[0]) lowestReso = np.asarray(levelDims[-1]) Xratio, Yratio = highestReso/lowestReso return (Xratio, Yratio)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def determine_real_to_pixel_ratio(\n image_shape: Tuple[int, int],\n min_x: float,\n min_y: float,\n max_x: float,\n max_y: float,\n):\n image_x = image_shape[1]\n image_y = image_shape[0]\n diff_x = max_x - min_x\n diff_y = max_y - min_y\n\n resolution_x = image_x / diff_x\n resolution_y = image_y / diff_y\n\n # Should be similar as cells are rectangles\n if not np.isclose(resolution_x, resolution_y):\n logging.info(\n f\"Resolution in x and y axes differ: x: {resolution_x} y: {resolution_y}\"\n )\n\n resolution = np.mean([resolution_x, resolution_y])\n\n return resolution", "def get_cursor_ratio(image_size: tuple, screen_size: tuple):\n x_ratio = screen_size[0] / image_size[0]\n y_ratio = screen_size[1] / image_size[1]\n\n return x_ratio, y_ratio", "def get_ratio(image1, image2):\n if np.diff(np.array(image1.shape[:2]) / np.array(image2.shape[:2])) == 0:\n ratio = image1.shape[0] / image2.shape[0]\n else:\n raise ValueError(\"different height and width ratio\")\n return ratio", "def get_scaling_ratio(img):\n\n healthy_img_area = 4872 * 6496\n input_img_area = img.shape[0] * img.shape[1]\n ratio = input_img_area / healthy_img_area\n return ratio", "def ratio(self):\n return float(self.max_width) / self.max_height", "def effective_resolution(self) -> Tuple[int, int]:\n import numpy as np\n\n assert self.info.resolution, 'No base resolution specified'\n rot = (self.info.rotate or 0) * math.pi / 180\n sin = math.sin(rot)\n cos = math.cos(rot)\n scale = np.array([[self.info.scale_x or 1.0, self.info.scale_y or 1.0]])\n resolution = np.array([[self.info.resolution[0], self.info.resolution[1]]])\n rot_matrix = np.array([[sin, cos], [cos, sin]])\n resolution = (scale * abs(np.cross(rot_matrix, resolution)))[0]\n return int(round(resolution[0])), int(round(resolution[1]))", "def pixel_size_ratio(self):\n return 2**(self.levels[-1] - self.levels[0])", "def calculate_image_scale(source_width, source_height, target_width, target_height):\n if source_width == target_width and source_height == target_height:\n return 1.0\n\n source_ratio = source_width / source_height\n target_ratio = target_width / target_height\n\n if target_ratio < source_ratio:\n scale = target_width / source_width\n else:\n scale = target_height / source_height\n\n return scale", "def horizontal_ratio(self):\n if self.pupils_located:\n pupil_left = self.eye_left.pupil.x / (self.eye_left.center[0] * 2 - 10)\n pupil_right = self.eye_right.pupil.x / (self.eye_right.center[0] * 2 - 10)\n return (pupil_left + pupil_right) / 2", "def gridratio( grid1, grid2):\n\n nx1 = grid1.img_width\n ny1 = grid1.img_height\n nx2 = grid2.img_width\n ny2 = grid2.img_height\n\n ratio = 0.\n rms = 0.\n\n if nx1 != nx2:\n print(\"GridRatio: Nx1 != Nx2 (%d, %d)\" % (nx1, nx2))\n return ratio, rms\n\n if ny1 != ny2:\n print(\"GridRatio: Ny1 != Ny2 (%d, %d)\" % (ny1, ny2))\n return ratio, rms\n\n count = 0\n nonzero = np.zeros(nx1*ny1)\n\n # copy to ratio array\n gridratio = copy.deepcopy( grid1)\n\n for iii in range(nx1):\n for jjj in range(ny1):\n # put in zero as default\n gridratio.image[jjj,iii] = 0.\n if grid1.image[jjj,iii] > EPSILON:\n if grid2.image[jjj,iii] > EPSILON:\n nonzero[count] = grid1.image[jjj,iii]/grid2.image[jjj,iii]\n count = count + 1\n if count < 2:\n print (\"No overlap in non-zero samples\")\n return ratio, rms, gridratio\n\n nonzero = nonzero[0:count]\n asum = np.sum( nonzero)\n ratio = asum/float(count)\n rms = np.std( nonzero)\n print (\"Grid Ratio: %.4f +/- %.4f for %d samples\" % (ratio, rms/np.sqrt(count), count))\n # return the ratio grid \n return ratio, rms, gridratio", "def get_resolution(ds):\n\n if 'x' in ds.coords and 'y' in ds.coords:\n x = ds.coords['x'].values\n y = ds.coords['y'].values\n resx = abs(x[-1] - x[0]) / (len(x) - 1)\n resy = abs(y[-1] - y[0]) / (len(y) - 1)\n return (resx, resy)\n else:\n transform = get_transform(ds)\n if transform is not None:\n return (abs(transform.a), abs(transform.e))\n elif 'res' in ds.attrs:\n return ds.attrs['res']\n\n return None", "def _resolution(self):\n _, xres, _, _, _, yres = self.geotransform\n return xres, yres", "def _image_resolution(image_filename):\n img = mpimg.imread(image_filename)\n return img.shape", "def get_image_size(self, **kwargs):\n points = kwargs['points']\n max_val = points.max(0)\n min_val = points.min(0)\n height = np.ceil((max_val[0] - min_val[0]) * self.res_x).astype(int)\n width = np.ceil((max_val[1] - min_val[1]) * self.res_y).astype(int)\n\n return height, width", "def get_screen_resolution() -> (int, int):\n h_desktop = user32.GetDesktopWindow()\n\n # Get screen resoltion virtualized for DPI\n rect = RECT()\n success = user32.GetWindowRect(h_desktop, pointer(rect))\n if not success:\n raise OSError(GetLastError())\n\n # Get rescale factor for primary monitor\n hmonitor = user32.MonitorFromWindow(\n h_desktop, MONITOR_DEFAULTTOPRIMARY)\n rescale_factor = c_long(0)\n result = shcore.GetScaleFactorForMonitor(\n hmonitor, pointer(rescale_factor))\n if result != S_OK:\n logging.error(\"GetScaleFactorForMonitor failed.\")\n raise OSError(GetLastError())\n\n # Calcuate the resolution before scaling.\n rescale_factor = rescale_factor.value\n res_x = int((rect.right - rect.left) * rescale_factor / 100)\n res_y = int((rect.bottom - rect.top) * rescale_factor / 100)\n return res_x, res_y", "def raw_resolution(resolution, splitter=False):\n width, height = resolution\n if splitter:\n fwidth = (width + 15) & ~15\n else:\n fwidth = (width + 31) & ~31\n fheight = (height + 15) & ~15\n return fwidth, fheight", "def get_current_resolution(self):\n return self.display_info[\"width\"], self.display_info[\"height\"]", "def pe_ratio(self):\n if self._pe_ratio == None:\n return float('inf')\n return self._pe_ratio", "def vertical_ratio(self):\n if self.pupils_located:\n pupil_left = self.eye_left.pupil.y / (self.eye_left.center[1] * 2 - 10)\n pupil_right = self.eye_right.pupil.y / (self.eye_right.center[1] * 2 - 10)\n return (pupil_left + pupil_right) / 2", "def pixelSize(self):\n br = self.sceneBoundingRect()\n if self.image is None:\n return 1,1\n return br.width()/self.width(), br.height()/self.height()", "def convert_coordinate(X, Y, im_w, im_h):\n display_w, display_h = 1680, 1050\n target_ratio = display_w / float(display_h)\n ratio = im_w / float(im_h)\n\n delta_w, delta_h = 0, 0\n if ratio > target_ratio:\n new_w = display_w\n new_h = int(new_w / ratio)\n delta_h = display_h - new_h\n else:\n new_h = display_h\n new_w = int(new_h * ratio)\n delta_w = display_w - new_w\n dif_ux = delta_w // 2\n dif_uy = delta_h // 2\n scale = im_w / float(new_w)\n X = (X - dif_ux) * scale\n Y = (Y - dif_uy) * scale\n return X, Y", "def __execute_ratio(self, position: tuple):\n x = position[0] * self.__ratio[0]\n y = position[1] * self.__ratio[1]\n\n return x, y", "def division(self, x,y,a,b):\n real = (a*x + b*y)/(a*a + b*b)\n img = (a*y - b*x)/(a*a + b*b)\n return real, img", "def get_new_img_size(w, h, img_min_side = 600):\n if w <= h:\n f = float(img_min_side) / w\n resized_h = int(f * h)\n resized_w = img_min_side\n else:\n f = float(img_min_side) / h\n resized_w = int(f * w)\n resized_h = img_min_side\n \n return resized_w, resized_h", "def get_params(\n img: Tensor, scale: List[float], ratio: List[float]\n ) -> Tuple[int, int, int, int]:\n width, height = F._get_image_size(img)\n area = height * width\n\n for _ in range(10):\n target_area = area * torch.empty(1).uniform_(scale[0], scale[1]).item()\n log_ratio = torch.log(torch.tensor(ratio))\n aspect_ratio = torch.exp(\n torch.empty(1).uniform_(log_ratio[0], log_ratio[1])\n ).item()\n\n w = int(round(math.sqrt(target_area * aspect_ratio)))\n h = int(round(math.sqrt(target_area / aspect_ratio)))\n\n if 0 < w <= width and 0 < h <= height:\n i = torch.randint(0, height - h + 1, size=(1,)).item()\n j = torch.randint(0, width - w + 1, size=(1,)).item()\n return i, j, h, w\n\n # Fallback to central crop\n in_ratio = float(width) / float(height)\n if in_ratio < min(ratio):\n w = width\n h = int(round(w / min(ratio)))\n elif in_ratio > max(ratio):\n h = height\n w = int(round(h * max(ratio)))\n else: # whole image\n w = width\n h = height\n i = (height - h) // 2\n j = (width - w) // 2\n return i, j, h, w", "def compute_resolution(zoom, size_px):\n # Calibration data:\n dist_in_um = 10\n dist_in_px = np.array([21.13, 19.62, 8.93])\n zooms = np.array([1.5, 3, 4.5])\n image_max_sizes = np.array([330, 610, 410])\n \n return np.mean((dist_in_um/dist_in_px) * (zoom/zooms) * (image_max_sizes/size_px))", "def calculate_minimum_height_width(image_width, image_height, desired_width, desired_height):\n image_width, image_height = float(image_width), float(image_height)\n desired_width, desired_height = float(desired_width), float(desired_height)\n\n # resize the width and height to match the desired height, while maintaining ratio\n scaled_width = desired_height / image_height * image_width\n scaled_height = desired_height\n\n # if the new width is below the desired width, scale up to match width\n if scaled_width < desired_width:\n scaled_height = desired_width / scaled_width * scaled_height\n scaled_width = desired_width\n\n scaled_width, scaled_height = int(scaled_width), int(scaled_height)\n return scaled_width, scaled_height", "def compare_images(self):\r\n m = round(self.mse(self.image_a, self.image_b), 4)\r\n s = round(ssim(self.image_a, self.image_b) * 100, 5)\r\n return (\r\n m, s)", "def get_thumbnail_magnification(slide):\n ratio = np.asarray(slide.dimensions) / np.asarray(slide.associated_images[\"thumbnail\"].size)\n # np.sqrt(np.prod(ratio))\n return ratio", "def _resolve_size(self, width, height, center_x, center_y):\n if self.size_type == 'explicit':\n size_x, size_y = self.size\n size_x = percentage(size_x, width)\n size_y = percentage(size_y, height)\n return size_x, size_y\n left = abs(center_x)\n right = abs(width - center_x)\n top = abs(center_y)\n bottom = abs(height - center_y)\n pick = min if self.size.startswith('closest') else max\n if self.size.endswith('side'):\n if self.shape == 'circle':\n size_xy = pick(left, right, top, bottom)\n return size_xy, size_xy\n # else: ellipse\n return pick(left, right), pick(top, bottom)\n # else: corner\n if self.shape == 'circle':\n size_xy = pick(math.hypot(left, top), math.hypot(left, bottom),\n math.hypot(right, top), math.hypot(right, bottom))\n return size_xy, size_xy\n # else: ellipse\n corner_x, corner_y = pick(\n (left, top), (left, bottom), (right, top), (right, bottom),\n key=lambda a: math.hypot(*a))\n return corner_x * math.sqrt(2), corner_y * math.sqrt(2)" ]
[ "0.72195196", "0.71032196", "0.6577099", "0.64753616", "0.6439772", "0.64074016", "0.6393361", "0.6141478", "0.6115658", "0.6099829", "0.6093291", "0.60600996", "0.6044847", "0.5948316", "0.59124255", "0.5852547", "0.57684225", "0.5760546", "0.57544696", "0.5742023", "0.57306653", "0.5715605", "0.5711597", "0.5668587", "0.56465125", "0.5643573", "0.56403893", "0.5622601", "0.5618265", "0.5592258" ]
0.74833775
0
Parses XML File to get mask vertices and returns matrix masks where 1 indicates the pixel is inside the mask, and 0 indicates outside the mask.
def getMask(xmlFile, svsFile, pattern): vertices = parseXML(xmlFile, pattern) # Parse XML to get vertices of mask if not len(vertices[pattern]): slide = 0 mask = 0 return slide, mask slide = open_slide(svsFile) levelDims = slide.level_dimensions mask = createMask(levelDims, vertices, pattern) return slide, mask
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def mask(self):\n mask = np.zeros((self.height, self.width))\n pts = [\n np.array(anno).reshape(-1, 2).round().astype(int)\n for anno in self.segmentation\n ]\n mask = cv2.fillPoly(mask, pts, 1)\n return mask", "def _prepare_mask_file(mask):\n result = np.ndarray((mask.shape[0], mask.shape[1]), dtype=np.uint8)\n for i in range(mask.shape[0]):\n for j in range(mask.shape[1]):\n\n if mask[i][j] > 0:\n result[i][j] = 1\n else:\n result[i][j] = 0\n \n return result", "def get_image_mask_from_xml(bbox_path, image_size, valid_class_names=[]):\n masked_img = np.ones(image_size, dtype='uint8')\n \n root = elem.parse(bbox_path).getroot()\n annotations = root.findall('object')\n if valid_class_names:\n annotations = filter(lambda x: x.find('name').text in valid_class_names, annotations)\n \n for obj in annotations:\n bbox = obj.find('bndbox')\n get_coord = lambda name: int(bbox.find(name).text)\n masked_img[\n get_coord('ymin'):get_coord('ymax'),\n get_coord('xmin'):get_coord('xmax')\n ] = 0\n return masked_img", "def _build_mask(self, xg, yg):\n\n # 1. create mask based on meshes\n points = np.vstack((xg.flatten(), yg.flatten())).T\n\n # 2. extract edge points using el_pos\n edge_points = self.node[np.arange(16)]\n path = Path(edge_points, closed=False)\n mask = path.contains_points(points)\n\n return mask", "def coordinates(self, mask):\n y,x = mask.nonzero()\n return list(zip(x,y))", "def _populate_mask_data(self, filename: str) -> None:\n if self.seg_images.get(filename) is None:\n return None\n\n mask = cv2.imread(self.seg_targets[filename])\n mask = cv2.cvtColor(mask, cv2.COLOR_BGR2RGB)\n\n # convert pixel masks to multidimentional\n height, width = mask.shape[:2]\n segmentation_mask = np.zeros((height, width, len(VOC_COLORMAP)), dtype=np.float32)\n for label_index, label in enumerate(VOC_COLORMAP):\n segmentation_mask[:, :, label_index] = np.all(mask == label, axis=-1).astype(float)\n\n return segmentation_mask", "def load_mask(self, image_id):\n # Override this function to load a mask from your dataset.\n # Otherwise, it returns an empty mask.\n mask = np.empty([0, 0, 0])\n class_ids = np.empty([0], np.int32)\n return mask, class_ids", "def load_mask(self, image_id):\n # Override this function to load a mask from your dataset.\n # Otherwise, it returns an empty mask.\n logging.warning(\"You are using the default load_mask(), maybe you need to define your own one.\")\n mask = np.empty([0, 0, 0])\n class_ids = np.empty([0], np.int32)\n return mask, class_ids", "def load_mask(self, image_id):\n image_info = self.image_info[image_id]\n if image_info[\"source\"] != \"face\":\n return super(self.__class__, self).load_mask(image_id)\n info = self.image_info[image_id]\n mask = np.zeros([info['height'], info['width'], len(info['boundingbox'])], dtype=np.uint8)\n for i, p in enumerate(info['boundingbox'].values()):\n rr, cc = skimage.draw.polygon(p['y'], p['x'])\n mask[rr, cc, i] = 1\n return mask, np.ones([mask.shape[-1]], dtype=np.int32)", "def parseXML(xmlFile, pattern):\n \n tree = ET.parse(xmlFile) # Convert XML file into tree representation\n root = tree.getroot()\n\n regions = root.iter('Region') # Extract all Regions\n vertices = {pattern: []} # Store all vertices in a dictionary\n\n for region in regions: \n label = region.get('Text') # label either as 'ROI' or 'normal'\n if label == pattern:\n vertices[label].append({'X':[], 'Y':[]})\n\n for vertex in region.iter('Vertex'): \n X = float(vertex.get('X'))\n Y = float(vertex.get('Y'))\n\n vertices[label][-1]['X'].append(X)\n vertices[label][-1]['Y'].append(Y)\n\n return vertices", "def get_mask(self, anno, img_info) -> np.ndarray:\n m = np.zeros((img_info[\"height\"], img_info[\"width\"]), dtype=np.float32)\n\n for obj in anno:\n if obj[\"iscrowd\"]:\n rle = pycocotools.mask.frPyObjects(obj[\"segmentation\"], img_info[\"height\"], img_info[\"width\"])\n mask = pycocotools.mask.decode(rle)\n if mask.shape != m.shape:\n logger.warning(f\"Mask shape {mask.shape} does not match image shape {m.shape} for image {img_info['file_name']}\")\n continue\n m += mask\n elif obj[\"num_keypoints\"] == 0:\n rles = pycocotools.mask.frPyObjects(obj[\"segmentation\"], img_info[\"height\"], img_info[\"width\"])\n for rle in rles:\n mask = pycocotools.mask.decode(rle)\n if mask.shape != m.shape:\n logger.warning(f\"Mask shape {mask.shape} does not match image shape {m.shape} for image {img_info['file_name']}\")\n continue\n\n m += mask\n\n return (m < 0.5).astype(np.float32)", "def load_mask(self, image_id):\n info = self.image_info[image_id]\n mask_paths = glob.glob(info['path'].replace('images', 'masks').replace('.png', '*.png'))\n masks = []\n class_ids = []\n for mask_path in mask_paths:\n# print(mask_path)\n mask = cv2.imread(mask_path,cv2.IMREAD_GRAYSCALE) \n masks.append(mask)\n if 'normal' in mask_path:\n class_ids.append(0)\n if 'benign' in mask_path:\n class_ids.append(1)\n if 'malignant' in mask_path:\n class_ids.append(2)\n masks = np.moveaxis(masks,0,-1)\n class_ids = np.array(class_ids)\n return masks, class_ids", "def test_get_mask(self):\n\n spine_data_loader = SpineDataLoader(dirpath_data=self.dirpath,\n batch_size=4)\n\n for idx in range(4):\n mask = spine_data_loader.get_mask(str(idx))\n assert mask.shape == (256, 256, 1)\n assert mask.dtype == 'int64'", "def load_mask(self, image_id):\n image_info = self.image_info[image_id]\n annotations = image_info['annotations']\n instance_masks = []\n class_ids = []\n \n for annotation in annotations:\n class_id = annotation['category_id']\n mask = Image.new('1', (image_info['width'], image_info['height']))\n mask_draw = ImageDraw.ImageDraw(mask, '1')\n for segmentation in annotation['segmentation']:\n mask_draw.polygon(segmentation, fill=1)\n bool_array = np.array(mask) > 0\n instance_masks.append(bool_array)\n class_ids.append(class_id)\n\n mask = np.dstack(instance_masks)\n class_ids = np.array(class_ids, dtype=np.int32)\n \n return mask, class_ids", "def load_mask(self, image_id):\n\n image_info = self.image_info[image_id]\n if image_info[\"source\"] != \"pcb\":\n return super(self.__class__, self).load_mask(image_id)\n\n # convert polygons to a bitmap mask of shape\n # [height, width, instance_count]\n info = self.image_info[image_id]\n \n mask = np.zeros([info[\"height\"], info[\"width\"], len(info[\"polygons\"])],\n dtype=np.uint8)\n \n for i, p in enumerate(info[\"polygons\"]):\n # get indexes of pixels inside the polygon and set them to 1\n rr, cc = skimage.draw.polygon(p['all_points_y'], p['all_points_x'])\n mask[rr, cc, i] = 1\n\n # return mask, and array of class IDs of each instance.\n # since we have one class ID only, we return an array of 1s\n return mask.astype(np.bool), info[\"class_ids\"]", "def _get_mask(self, anno, idx):\n coco = self.coco\n img_info = coco.loadImgs(self.img_ids[idx])[0]\n\n m = np.zeros((img_info['height'], img_info['width']), dtype=np.float32)\n\n for obj in anno:\n if 'segmentation' in obj:\n if obj['iscrowd']:\n rle = pycocotools.mask.frPyObjects(obj['segmentation'],\n img_info['height'],\n img_info['width'])\n m += pycocotools.mask.decode(rle)\n elif obj['num_keypoints'] == 0:\n rles = pycocotools.mask.frPyObjects(obj['segmentation'],\n img_info['height'],\n img_info['width'])\n for rle in rles:\n m += pycocotools.mask.decode(rle)\n\n return m < 0.5", "def getHitmask(self,image):\n\t\tmask = []\n\t\tfor x in range(image.get_width()):\n\t\t\tmask.append([])\n\t\t\tfor y in range(image.get_height()):\n\t\t\t\tmask[x].append(bool(image.get_at((x,y))[3]))\n\t\treturn mask", "def choose_mask(self, bboxFile):\n f = open(bboxFile, 'r')\n content = f.read().split('\\n')\n ind = np.random.randint(len(content) - 1)\n line = content[ind]\n words = line.split(',')\n\n maskFile = words[0]\n maskCoord = []\n\n for item in words[1:]:\n item = util.strip_paren(item)\n item = item.lstrip().rstrip()\n maskCoord.append(item)\n\n f.close()\n\n return maskFile, maskCoord", "def load_mask_pre(self, image_id, mask_path):\n img = Image.open(mask_path)\n colors = img.getcolors()\n n_dim = np.shape(colors)\n num_obj = n_dim[0]-1 #not include the background\n\n mask = np.zeros([np.shape(img)[0], np.shape(img)[1], num_obj], dtype=np.uint8)\n mask = self.draw_mask(num_obj, mask, img, colors)\n\n # Map class names to class IDs.\n class_ids = []\n for i in range(num_obj):\n class_ids.append(colors[i+1][1])\n\n return mask.astype(np.bool), np.array(class_ids, dtype=np.int32) #mask.astype(np.bool)", "def get_mask_arr(path):\n with rasterio.open(path) as src:\n img = src.read().transpose((1, 2, 0))\n seg = np.array(img, dtype=int)\n\n return seg[:, :, 0]", "def load_mask(filename):\n nib_image = nib.load(filename)\n mask_affine = nib_image.affine\n\n return preprocess_nib(nib_image, is_mask=True), mask_affine", "def load_mask(self, image_id):\n # If not a vesicle dataset image, delegate to parent class.\n image_info = self.image_info[image_id]\n if image_info[\"source\"] != \"vesicle\":\n return super(self.__class__, self).load_mask(image_id)\n\n # Convert polygons to a bitmap mask of shape\n # [height, width, instance_count]\n info = self.image_info[image_id]\n mask = np.zeros([info[\"height\"], info[\"width\"], len(info[\"polygons\"])],\n dtype=np.uint8)\n for i, p in enumerate(info[\"polygons\"]):\n rr, cc = skimage.draw.polygon(p[1], p[0])\n mask[rr, cc, i] = 1\n\n # Return mask, and array of class IDs of each instance. Since we have\n # one class ID only, we return an array of 1s\n return mask.astype(np.bool), np.ones([mask.shape[-1]], dtype=np.int32)", "def load_mask(self, image_id):\r\n info = self.image_info[image_id]\r\n mask = tifffile.imread(self.mask_path[self.ids[image_id]])\r\n\r\n if np.unique(mask).__len__() > 1:\r\n count = np.unique(mask).__len__()-1 # one less because of 0\r\n\r\n mask_new = np.zeros([info['height'], info['width'], count], dtype=np.uint8) # one more for background\r\n running = 0\r\n for i in np.unique(mask): #range(1, count):\r\n if ((i > 0) & ((mask == i).sum() > 0)):\r\n mask_new[:, :, running] = (mask == i)\r\n running = running + 1\r\n # Map class names to class IDs.\r\n class_ids = np.ones(count)\r\n else:\r\n mask_new = np.zeros([info['height'], info['width'], 1], dtype=np.uint8)\r\n class_ids = np.zeros([1])\r\n return mask_new, class_ids.astype(np.int32)", "def load_inbreast_mask(mask_path, imshape=(4084, 3328)):\n\n def load_point(point_string):\n x, y = tuple([float(num) for num in point_string.strip('()').split(',')])\n return y, x\n\n mask_shape = np.transpose(imshape)\n mask = np.zeros(mask_shape)\n with open(mask_path, 'rb') as mask_file:\n plist_dict = plistlib.load(mask_file, fmt=plistlib.FMT_XML)['Images'][0]\n numRois = plist_dict['NumberOfROIs']\n rois = plist_dict['ROIs']\n assert len(rois) == numRois\n for roi in rois:\n numPoints = roi['NumberOfPoints']\n points = roi['Point_px']\n assert numPoints == len(points)\n points = [load_point(point) for point in points]\n if len(points) <= 2:\n for point in points:\n mask[int(point[0]), int(point[1])] = 1\n else:\n x, y = zip(*points)\n x, y = np.array(x), np.array(y)\n poly_x, poly_y = polygon(x, y, shape=mask_shape)\n mask[poly_x, poly_y] = 1\n return mask", "def read_masks(self):\n structure_mask = self.read_image(\n self.filenames[\"structure_mask\"], grayscale=True\n ).astype(np.bool)\n unknown_mask = self.read_image(self.filenames[\"unknown_mask\"], grayscale=True).astype(\n np.bool\n )\n return structure_mask, unknown_mask", "def load_mask(self, image_id):\n info = self.image_info[image_id]\n # Get mask directory from image path\n mask_dir = os.path.join(os.path.dirname(os.path.dirname(info['path'])), \"masks\")\n\n # Read mask files from .png image\n mask = []\n # for f in next(os.walk(mask_dir))[2]:\n m = skimage.io.imread(os.path.join(mask_dir, info['id']+'.png')).astype(np.bool)\n mask.append(m)\n # print(mask)\n mask = np.stack(mask, axis=-1)\n # Return mask, and array of class IDs of each instance. Since we have\n # one class ID, we return an array of ones\n return mask, np.ones([mask.shape[-1]], dtype=np.int32)", "def detect(self, mask):\n # 1) Return Non zero indices\n det_idx = np.where(mask > 0.0)\n idx_x, idx_y = det_idx[0], det_idx[1]\n # 2) Create 1x1 box for each pixel detected.\n detections = []\n for i in range(0, len(idx_x)):\n x, y = idx_x[i], idx_y[i]\n detections.append((x, y, x+1, y+1, 1)) # x1, y1, x2, y2, area\n # 3) merge boxes\n bounding_boxes = self.bounding_boxes(detections)\n return bounding_boxes", "def createMaks(self):\n mask = np.zeros((self.height, self.width)) # (H, W)\n center = self.width // 2\n\n for lat in range(self.height):\n count = int(self.counts[lat])\n # print(lat, count)\n # print(center - count, center, center + count)\n mask[lat][center: center + count] = 1\n mask[lat][center - count: center] = 1\n\n return mask # (H, W)", "def load_mask(self, image_id):\n # If not a vesicle dataset image, delegate to parent class.\n image_info = self.image_info[image_id]\n if image_info[\"source\"] != \"vesicle\":\n return super(self.__class__, self).load_mask(image_id)\n\n # Convert 16 bit mask to a bitmap mask of shape\n # [height, width, instance_count]\n info = self.image_info[image_id]\n mask_path = info['mask_path']\n mask = cv.imread(mask_path, cv.IMREAD_GRAYSCALE + cv.IMREAD_ANYDEPTH)\n bin_mask = get_bin_mask(mask)\n n_instance = bin_mask.shape[-1]\n return bin_mask, np.ones([n_instance], dtype=np.int32)", "def load_mask(self, image_id):\n # If not a balloon dataset image, delegate to parent class.\n image_info = self.image_info[image_id]\n if image_info[\"source\"] != \"glomerulus\":\n return super(self.__class__, self).load_mask(image_id)\n\n # Convert polygons to a bitmap mask of shape\n # [height, width, instance_count]\n info = self.image_info[image_id]\n mask = np.zeros([info[\"height\"], info[\"width\"], len(info[\"polygons\"])],\n dtype=np.uint8)\n for i, p in enumerate(info[\"polygons\"]):\n # Get indexes of pixels inside the polygon and set them to 1\n rr, cc = skimage.draw.polygon(p['all_points_y'], p['all_points_x'])\n mask[rr, cc, i] = 1\n\n # Return mask, and array of class IDs of each instance. Since we have\n # one class ID only, we return an array of 1s\n return mask, np.ones([mask.shape[-1]], dtype=np.int32)" ]
[ "0.62886024", "0.6162003", "0.60080963", "0.59190094", "0.58852595", "0.58592284", "0.58489436", "0.5744096", "0.5695545", "0.56776625", "0.5628904", "0.56098294", "0.55625856", "0.5558411", "0.55395186", "0.5523523", "0.54945314", "0.5481902", "0.5481397", "0.5474356", "0.5452861", "0.54481", "0.5439688", "0.5419861", "0.54002756", "0.5377563", "0.5368812", "0.5367815", "0.53629375", "0.534521" ]
0.6738076
0
Returns [x,y] numpy array of random pixel. {numpy matrix} mask from which to choose random pixel.
def chooseRandPixel(mask): array = np.transpose(np.nonzero(mask)) # Get the indices of nonzero elements of mask. index = random.randint(0,len(array)-1) # Select a random index return array[index]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _get_rand_array(self):\n return np.random.random((self.w + 1, self.h + 1, 2))", "def _get_rand_array(self):\n return np.random.random((self.w, self.h))", "def cell_sample(mask, samplingPoints):\n maskedArea = np.array(np.where(mask)).T\n maskedAreaLength = len(maskedArea)\n randomIndex = sp.random.randint(0, maskedAreaLength, samplingPoints)\n coordsRandom = maskedArea[randomIndex] + sp.rand(samplingPoints, 2)\n return(coordsRandom)", "def find_background_point(mask):\n zerocoords = np.where(mask == 0)\n i = np.random.randint(0,len(zerocoords[0]))\n coord = zerocoords[0][i]\n for n in range(1, len(zerocoords)):\n coord = np.append(coord, zerocoords[n][i])\n return tuple(coord)", "def RandomCoordinate(): \r\n return ReturnRounded(np.random.uniform(-10,10))", "def get_random_pos(self):\n i = np.random.randint(self.n)\n j = np.random.randint(self.m)\n return [i, j]", "def get_random_image_values():\n border = 20\n rand_x = random.randrange(70, 100) # maximum pixel index = 99\n rand_y = random.randrange(70, 100) # maximum pixel index = 99\n rand_w = random.randrange(5, 21 + 1, 2)\n rand_h = random.randrange(5, 21 + 1, 2)\n rand_cx = random.randrange(border + (rand_w // 2), rand_x - border - (rand_w // 2), 2)\n rand_cy = random.randrange(border + (rand_h // 2), rand_y - border - (rand_h // 2), 2)\n return rand_x, rand_y, rand_w, rand_h, rand_cx, rand_cy", "def generateRandomMask(size, p=0.5):\n mask_array = (np.random.random(size) > p).astype(int)\n mask = sitk.GetImageFromArray(mask_array) \n return mask", "def random_sample_from_masked_image(img_mask, num_samples):\n idx_tuple = img_mask.nonzero()\n num_nonzero = len(idx_tuple[0])\n if num_nonzero == 0:\n empty_list = []\n return empty_list\n rand_inds = random.sample(range(0,num_nonzero), num_samples)\n\n sampled_idx_list = []\n for i, idx in enumerate(idx_tuple):\n sampled_idx_list.append(idx[rand_inds])\n\n return sampled_idx_list", "def point_random_position(self, point_distribution, mask):\n batch_size = point_distribution.size(0)\n mask_np = to_np(mask) # batch x time\n indices = []\n for i in range(batch_size):\n msk = mask_np[i] # time\n indices.append(np.random.choice(len(msk), 2, p=msk / np.sum(msk, -1)))\n indices = to_pt(np.stack(indices, 0), self.use_cuda) # batch x 2\n return indices", "def get_random_coords(width, height):\n return randrange(1, width-2), randrange(1, height-2)", "def randomize_pixels(image):\n shape_ = image.size()\n image_flat = image.view(-1, image.size(-1))\n shuffled_image = shuffle(image_flat)\n return shuffled_image.view(shape_)", "def sample(self, x, y):\n px = int(round(x*self.w))\n py = int(round(y*self.h))\n return self.getPixel(px, py)", "def random_masks(self):\n # initialize mask\n mask = np.ones((3, self.dim, self.dim))\n\n # generate one of 4 random masks\n choose = 1 # np.random.randint(0, 1)\n if choose == 0:\n mask[:, :self.dim // 2] = 0\n elif choose == 1:\n mask[:, :, :self.dim // 2] = 0\n elif choose == 2:\n mask[:, :, self.dim // 2:] = 0\n elif choose == 3:\n mask[:, self.dim // 2:] = 0\n\n return mask", "def get_random_coordinates(self):\n array_shape = np.shape(self.cells) # type: tuple\n points_on_island = []\n for i in range(1, array_shape[0] - 1):\n for j in range(1, array_shape[1] - 1):\n points_on_island.append((i, j))\n random.shuffle(points_on_island)\n return points_on_island", "def get_random_point(self):\n\t\tx = np.random.uniform(self.xmin, self.xmax)\n\t\ty = np.random.uniform(self.ymin, self.ymax)\n\t\treturn [x, y, 0.0]", "def get_random_patch(self):\n y = numpy.random.randint(0, self.height - self.patch_height)\n x = numpy.random.randint(0, self.width - self.patch_width)\n\n return numpy.copy(self.image[y:y + self.patch_height, x:x + self.patch_width])", "def _get_rand_array(self):\n rand_array = np.random.random((self.n, self.n, 2))\n rand_array[0, :, 1] = 0.\n rand_array[-1, :, 1] = 0.\n return rand_array", "def sample(self, random_seed=None):\r\n if random_seed:\r\n seed(random_seed)\r\n return self._generate_mask()", "def generate_mask(\n self,\n noise_background,\n noise_value,\n generated_points_x,\n generated_points_y,\n xsize,\n ysize,\n ):\n\n # background of noise mask\n img_mask = np.random.randint(\n noise_background[0],\n noise_background[1] + 1,\n (ysize, xsize),\n )\n\n # mask of random value\n img_mask_random = np.random.randint(\n low=noise_value[0],\n high=noise_value[1] + 1,\n size=(ysize, xsize),\n )\n\n # insert random value into background\n img_mask[generated_points_y, generated_points_x] = img_mask_random[generated_points_y, generated_points_x]\n\n return img_mask.astype(\"uint8\")", "def randomSelection(self):\n indA = self.matingPool[ random.randint(0, self.popSize-1) ]\n indB = self.matingPool[ random.randint(0, self.popSize-1) ]\n return [indA, indB]", "def getPixel (self, x, y):\r\n return self.image [y][x]", "def __get_random_indices(self):\n rand_row = random.randint(0, self.__row_count - 1)\n rand_col = random.randint(0, self.__col_count - 1)\n return [rand_row, rand_col]", "def randColor():\r\n return np.array([random.random(), random.random(), random.random()]).reshape((1, 1, 3))", "def random_crop(img, mask):\n if str(img.dtype) != 'uint8':\n img = (img * 255).astype(np.uint8)\n if str(mask.dtype) != 'uint8':\n mask = (mask * 255).astype(np.uint8)\n img = Image.fromarray(img)\n mask = Image.fromarray(mask)\n x, y = img.size\n matrix = 256\n img_list = []\n label_list = []\n for i in range(CROP_NUM):\n x1 = randrange(0, x - matrix)\n y1 = randrange(0, y - matrix)\n img_list.append(img.crop((x1, y1, x1 + matrix, y1 + matrix)))\n label_list.append(mask.crop((x1, y1, x1 + matrix, y1 + matrix)))\n\n return img_list, label_list", "def random_position(self):\n while True:\n h = random.randrange(0, self.height)\n w = random.randrange(0, self.width)\n if self.grid[h, w] == 0:\n return (h, w)", "def targetpoint(self, initpoint):\n while True:\n col = int(random.uniform(0, COLS))\n row = int(random.uniform(0, ROWS))\n if (row, col) != initpoint:\n break\n return (row, col)", "def rand(self):\n return np.random.rand(self.nx)", "def _rand_pos(self, xLow, xHigh, yLow, yHigh):\n\n return (\n self.np_random.randint(xLow, xHigh),\n self.np_random.randint(yLow, yHigh)\n )", "def rand_inside(x1, y1, x2, y2):\n\n rx = map_between(random.random(), x1, x2)\n ry = map_between(random.random(), y1, y2)\n\n return rx, ry" ]
[ "0.66753733", "0.66301894", "0.657764", "0.64053166", "0.6381481", "0.63747543", "0.6344417", "0.6315105", "0.62708867", "0.6258122", "0.6184198", "0.6177347", "0.6172799", "0.61617583", "0.6153667", "0.6137647", "0.6054718", "0.601941", "0.60083723", "0.5987019", "0.59859353", "0.5962575", "0.59291786", "0.5909361", "0.5898755", "0.58850855", "0.58732736", "0.58692354", "0.5859409", "0.58533514" ]
0.8018285
0
Generates and saves 'numPatches' patches with dimension 'dims' from image 'slide' contained within 'mask'.
def getPatches(slide, mask, numPatches=0, dims=(0,0), dirPath='', slideNum='', plot=False, plotMask=False): # extractPatchByXMLLabeling w,h = dims levelDims = slide.level_dimensions Xratio, Yratio = calculateRatio(levelDims) i = 0 while i < numPatches: firstLoop = True # Boolean to ensure while loop runs at least once. while firstLoop: # or not mask[rr,cc].all(): # True if it is the first loop or if all pixels are in the mask firstLoop = False x, y = chooseRandPixel(mask) # Get random top left pixel of patch. xVertices = np.array([x, x+(w/Xratio), x+(w/Xratio), x, x]) yVertices = np.array([y, y, y-(h/Yratio), y-(h/Yratio), y]) rr, cc = polygon(xVertices, yVertices) image = slide.read_region((x*Xratio, y*Yratio), 0, (w,h)) isWhite = checkWhiteSlide(image) newPath = 'other' if isWhite else dirPath if not isWhite: i += 1 slideName = '_'.join([slideNum, 'x'.join([str(x*Xratio),str(y*Yratio)])]) image.save(os.path.join(newPath, slideName+".png")) if plot: plotImage(image) if plotMask: mask[rr,cc] = 0 if plotMask: plotImage(mask)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def collect_patches(tif, mask, lev1, lev2, num_per_img, patch_size, patch_centre, save_folder, num_random_sample):\n table=pd.DataFrame(columns=['slide_name','x','y','label'])\n # init output lists\n patch_images_lev1 = []\n patch_images_lev2 = []\n patch_labels = []\n\n num_cancer = 0\n num_health = 0\n\n # file paths\n slide_path = tif\n mask_path = mask\n f_num = slide_path.split('/')[-1].split('.')[0]\n slide_name=os.path.basename(slide_path).rstrip('.tif')\n\n # get images with OpenSlide\n slide = open_slide(slide_path)\n tumor_mask = open_slide(mask_path)\n\n # read level 4 slide image and mask - for the purposes of getting healthy\n # and tumor pixels\n # 读取slide和mask,read_slide就是返回一shape == (height, width, 3) #3:rgb\n slide_image = read_slide(slide,\n x=0,\n y=0,\n level=4,\n width=tumor_mask.level_dimensions[4][0],\n height=tumor_mask.level_dimensions[4][1])\n\n mask_image = read_slide(tumor_mask,\n x=0,\n y=0,\n level=4,\n width=tumor_mask.level_dimensions[4][0],\n height=tumor_mask.level_dimensions[4][1])\n\n\n\n print('--------checking mask image shape after read slide', mask_image.shape)\n print('--------checking slide_image shape after read slide', slide_image.shape)\n mask_image = mask_image[:, :, 0]\n # print ('--------checking mask image shape after mask_image[:, :, 0]', mask_image.siz)\n\n # get a list of tumor pixels at level 4\n mask_lev_4_cancer = np.nonzero(mask_image)\n # print ('checking length of mask_lev_4_cancer', mask_lev_4_cancer)\n\n # make a healthy tissue mask by subtracting tumor mask from tissue mask\n tissue_pixels = find_tissue_pixels(slide_image)\n # print ('---checking tissue_pixels ', tissue_pixels )\n tissue_regions = apply_mask(slide_image, tissue_pixels)\n # print ('------checking tissue_regions', tissue_regions)\n\n mask_health = tissue_regions[:, :, 0] - mask_image\n # print ('------checking mask_health = tissue_regions[:, :, 0] - mask_image-------', mask_health.shape)\n mask_health = mask_health > 0\n # print ('------checking mask_health = mask_health > 0---------', mask_health.shape)\n mask_health = mask_health.astype('int')\n # print ('------checking mask_health = mask_health.astypeint-------', mask_health.shape)\n\n # get a list of healthy pixels at level 4\n mask_lev_4_health = np.nonzero(mask_health)\n # print ('------checking mask_lev_4_health----', len(mask_lev_4_health[0]))\n\n # print()\n # print('lenmask_lev_4_cancerpatch_size ** 2, lenmask_lev_4_health0patch_size ** 2:',\n # len(mask_lev_4_cancer[0]) // (patch_size ** 2), len(mask_lev_4_health[0]) // (patch_size ** 2))\n\n # -------------------------------------------------------------\n if len(mask_lev_4_cancer[0]) != 0:\n print('extracting tumor patches------')\n #logging.info('extracting tumor patches')\n # extract TUMOR patches\n\n # get a random sample of tumor pixels\n # Note: did random.sample here rather than random.choice inside the while loop because os speed\n random_sample = min(len(list(zip(mask_lev_4_cancer[1], mask_lev_4_cancer[0])))-1,num_random_sample)\n sample_cancer = random.sample(list(zip(mask_lev_4_cancer[1], mask_lev_4_cancer[0])), random_sample)\n\n c = 0\n idx= 0\n # continue until enough patches extracted\n while num_cancer < num_per_img:\n c += 1\n if c == random_sample:\n break\n # print('-----checking-------c', c)\n # if c % 10 == 0:\n # print(c, end=', ')\n\n # get the next pixel from the sample - coordinates at level4\n (x4, y4) = sample_cancer[c]\n\n # convert level 4 coordinates to level 0\n x0 = x4 * (2 ** 4)\n y0 = y4 * (2 ** 4)\n \n # extract patches at lev1 CENTERED at that pixel\n patch_image_lev1, patch_mask_lev1, patch_tissue_lev1 = \\\n get_patches(slide, tumor_mask, lev1, x0, y0, patch_size)\n\n # calc tissue ratio in that patch\n tissue_ratio = np.sum(patch_tissue_lev1[:, :, 0]) / (patch_size ** 2)\n\n # double-check if the patch has tumor\n has_cancer = check_patch_centre(patch_mask_lev1, patch_centre)\n\n # if it has more than 50% tissue and has tumor\n if (tissue_ratio > 0.5) & has_cancer:\n # collect lev1 patch\n num_cancer += 1\n table.loc[idx]=(slide_name,x0,y0,1)\n idx+=1\n\n # -------------------------------------------------------------\n # extract HEALTHY patches\n # repeat the above for the healthy pixels\n print('extracting normal patches------')\n #logging.info('extracting normal patches')\n\n # print()\n # get a random sample of healthy pixels\n random_sample = min(len(list(zip(mask_lev_4_health[1], mask_lev_4_health[0])))-1, num_random_sample)\n sample_health = random.sample(list(zip(mask_lev_4_health[1], mask_lev_4_health[0])), random_sample)\n # print('-------checking sample_health------', len(sample_health))\n\n c = 0\n\n # get healthy images\n while num_health < num_per_img:\n c += 1\n if c == random_sample:\n break\n # if c % 10 == 0:\n # print(c, end=', ')\n\n # get the next pixel from the sample - coordinates at level 4\n (x4, y4) = sample_health[c]\n\n # convert level 4 coordinates to level 0\n x0 = x4 * (2 ** 4)\n y0 = y4 * (2 ** 4)\n\n # extract patches at lev1 CENTERED at that pixel\n patch_image_lev1, patch_mask_lev1, patch_tissue_lev1 = \\\n get_patches(slide, tumor_mask, lev1, x0, y0, patch_size)\n\n # calc tissue ratio in that patch\n tissue_ratio = np.sum(patch_tissue_lev1[:, :, 0]) / (patch_size ** 2)\n\n # check if the patch has tumor\n has_cancer = check_patch_centre(patch_mask_lev1, patch_centre)\n\n # if it has more than 50% tissue and doens't have tumor in the 128x128 centre\n if (tissue_ratio > 0.5) & (not has_cancer):\n\n # collect lev1 patch\n num_health += 1\n table.loc[idx]=(slide_name,x0,y0,0)\n idx+=1\n table.to_csv(save_folder,header=True)\n return table", "def create_patches_from_mask(image, mask, patchSize=32, pad=32, depth=1, searchSlices=None):\n rois = []\n images = []\n labels = []\n searchSlices = range(len(mask)) if searchSlices is None else searchSlices\n for i in searchSlices:\n # For each voxel, generate a ROI centered there\n if not np.any(mask[i]):\n continue\n xS, yS = np.nonzero(mask[i, :, :])\n xS -= xS % patchSize\n yS -= yS % patchSize\n allPatches = set(zip(xS, yS))\n for x, y in allPatches:\n patch = np.copy(\n # agafem el patch que ens interessa i agafem un contorn per si de cas (padding)\n # potser seria interessant reduir el padding (la quantitat de marge que deixem)\n # ara mateix tenim patches de 96, quan ens interessa el centre de 32 d'aquests\n image[i - depth: i + 1 + depth, x - pad:x + patchSize + pad, y - pad:y + patchSize + pad]\n )\n label = np.copy(\n # quan fem rotacio al fer data augmentation, ens volem assegurar d'estar treballant amb\n # el mateix\n mask[i: i + 1, x - pad: x + patchSize + pad, y - pad:y + patchSize + pad]\n )\n\n rois.append(np.array([x, y, i]))\n images.append(patch)\n labels.append(label)\n return rois, images, labels", "def _make_slices(self, img_stacks, mask_stacks, patient_id, out_pth):\n img_file_name = \"{patient}_{id}_stack\"\n msk_file_name = \"{patient}_{id}_stack_mask\"\n for s in range(1, img_stacks.shape[0] + 1):\n if s < self.stack_size or img_stacks.shape[0] - s <= self.stack_size:\n continue\n slice_idx = np.arange(-1, self.stack_size-1) + s\n im_block = img_stacks[slice_idx,:, :, 1]\n msk_block = mask_stacks[s, :, :, 1] # Output is the mask for the center channel\n np.save(os.path.join(out_pth, img_file_name.format(patient=patient_id, id=s)), im_block)\n np.save(os.path.join(out_pth, msk_file_name.format(patient=patient_id, id=s)), msk_block)", "def find_patches_from_slide(slide_path, result_folder, filter_non_tissue):\n print(slide_path)\n\n dimensions = []\n\n with openslide.open_slide(slide_path) as slide:\n dtotal = (slide.dimensions[0] / 224, slide.dimensions[1] / 224)\n thumbnail = slide.get_thumbnail((dtotal[0], dtotal[1]))\n thum = np.array(thumbnail)\n ddtotal = thum.shape\n dimensions.extend(ddtotal)\n hsv_image = cv2.cvtColor(thum, cv2.COLOR_RGB2HSV)\n # when the image was read into memory by opencv imread function, the array will switch columns between first and third columns. However, if the array already loded into memory.\n # and it is stored as rgb, it is ok to use RGB instead of BGR\n #hsv_image = cv2.cvtColor(thum, cv2.COLOR_BGR2HSV)\n h, s, v = cv2.split(hsv_image)\n hthresh = threshold_otsu(h)\n sthresh = threshold_otsu(s)\n vthresh = threshold_otsu(v)\n # be min value for v can be changed later. according to dayong wang's paper, v value should be set to full range\n minhsv = np.array([hthresh, sthresh, 0], np.uint8)\n maxhsv = np.array([180, 255, 255], np.uint8)\n thresh = [minhsv, maxhsv]\n # print(thresh)\n # extraction the countor for tissue\n\n rgbbinary = cv2.inRange(hsv_image, thresh[0], thresh[1])\n _, contours, _ = cv2.findContours(\n rgbbinary, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)\n bboxtcols = ['xmin', 'xmax', 'ymin', 'ymax']\n bboxt = pd.DataFrame(columns=bboxtcols)\n for c in contours:\n (x, y, w, h) = cv2.boundingRect(c)\n bboxt = bboxt.append(\n pd.Series([x, x+w, y, y+h], index=bboxtcols), ignore_index=True)\n bboxt = pd.DataFrame(bboxt)\n\n xxmin = list(bboxt['xmin'].get_values())\n xxmax = list(bboxt['xmax'].get_values())\n yymin = list(bboxt['ymin'].get_values())\n yymax = list(bboxt['ymax'].get_values())\n\n xxxmin = np.min(xxmin) # xxxmin = math.floor((np.min(xxmin))*224)\n xxxmax = np.max(xxmax) # xxxmax = math.floor((np.max(xxmax))*224)\n yyymin = np.min(yymin) # yyymin = math.floor((np.min(yymin))*224)\n yyymax = np.max(yymax) # yyymax = math.floor((np.max(yymax))*224)\n\n dcoord = (xxxmin, xxxmax, yyymin, yyymax)\n # print(dcoord)\n dimensions.extend(dcoord)\n\n # bboxt = math.floor(np.min(xxmin)*256), math.floor(np.max(xxmax)*256), math.floor(np.min(yymin)*256), math.floor(np.max(yymax)*256)\n\n samplesnew = pd.DataFrame(rgbbinary)\n # samplesnew = pd.DataFrame(pd.DataFrame(\n # np.array(thumbnail.convert('L'))))\n # print(samplesnew)\n # very critical: y value is for row, x is for column\n samplesforpred = samplesnew.loc[yyymin:yyymax, xxxmin:xxxmax]\n # tissue_patches_bounding_box = tissue_patches.loc[yyymin:yyymax, xxxmin:xxxmax]\n\n # samplesforpred2 = samplesforpred*224\n dsample = samplesforpred.shape\n\n dimensions.extend(dsample)\n # print(dimensions)\n np.save('%s/%s' %\n (result_folder, osp.splitext(osp.basename(slide_paths[i]))[0]), dimensions)\n\n # print(samplesforpred)\n\n image_patch_index = pd.DataFrame(samplesforpred.stack())\n\n image_patch_index['is_tissue'] = image_patch_index[0]\n\n # print(image_patch_index)\n\n image_patch_index['tile_loc'] = list(image_patch_index.index)\n\n image_patch_index['slide_path'] = slide_path\n\n # tissue_patches_bounding_box_stack = pd.DataFrame(tissue_patches_bounding_box.stack())\n\n image_patch_index.reset_index(inplace=True, drop=True)\n\n # print(image_patch_index)\n\n if filter_non_tissue:\n\n image_patch_index = image_patch_index[image_patch_index.is_tissue != 0]\n\n image_patch_index.to_pickle(\n '%s/%s.pkl' % (result_folder, osp.splitext(osp.basename(slide_path))[0]))\n\n return image_patch_index", "def wsi_patch_splitting(wsi_path, patch_dir, patch_size=299, save_size=299,\n wsi_ext=\"tiff\", save_ext=\"png\",\n pyramid_flag=True, overlap_flag=True, level=0):\n\n if pyramid_flag == False:\n try:\n img = io.imread(wsi_path)\n if img.dtype == \"uint16\":\n img = (img / 256.0).astype(np.uint8)\n elif img.dtype == \"uint8\":\n pass\n else:\n raise Exception(\"Unknow imge data type\")\n except:\n print(\"Cannot handle {}\".format(wsi_path))\n else:\n wsi_header = openslide.OpenSlide(wsi_path)\n img = wsi_header.read_region(location=(0, 0), level=level,\n size=wsi_header.level_dimensions[level])\n img = np.asarray(img)[:,:,:-1]\n\n coors_arr = wsi_coor_splitting(wsi_h=img.shape[0], wsi_w=img.shape[1],\n length=patch_size, overlap_flag=overlap_flag)\n filename = os.path.splitext(os.path.basename(wsi_path))[0]\n for coor in coors_arr:\n h_start, w_start = coor[0], coor[1]\n cur_patch = img[h_start:h_start+patch_size, w_start:w_start+patch_size, :]\n if patch_size != save_size:\n save_patch = transform.resize(cur_patch, (save_size, save_size))\n save_patch = (save_patch * 255.0).astype(np.uint8)\n else:\n save_patch = cur_patch\n\n patch_name = \"{}_{}.{}\".format(filename, str(uuid.uuid4())[:8], save_ext)\n patch_filepath = os.path.join(patch_dir, patch_name)\n io.imsave(patch_filepath, save_patch)", "def generate_patches(scaled_imgs, constants, all_patches):\n patch_size = constants.PATCH_SIZE\n step = 1 if all_patches else 2\n patches = []\n for k, sc in enumerate(scaled_imgs):\n img_patches = []\n for i in range(0, sc.shape[0] - patch_size, step):\n for j in range(0, sc.shape[1] - patch_size, step):\n raw_patch = sc[i:i + patch_size, j:j + patch_size, :]\n patch = Patch(\n raw_patch=raw_patch,\n patch_size=patch_size,\n )\n patch.store(sc, [i, j])\n img_patches.append(patch)\n patches.append(img_patches)\n return patches", "def create_all_mask(mask, num, stride):\n scale_factor = 1.0 / stride\n small_mask = cv2.resize(mask, (0, 0), fx=scale_factor, fy=scale_factor, interpolation=cv2.INTER_CUBIC)\n small_mask = small_mask[:, :, np.newaxis]\n return np.repeat(small_mask, num, axis=2)", "def load_slices(self, dataset_dir, n_images, n_patches, channels = [\"base\"]):\n \n # add classes to be trained on\n \n self.add_class(\"slices\", 1, \"tissue\")\n self.add_class(\"slices\", 2, \"mag\")\n \n # collect image list and initialize counter\n \n image_list = os.listdir(dataset_dir)\n image_counter = 0\n patch_counter = 0\n \n # cycle over images and save patches to database.\n \n for i in range(n_images):\n \n image_path = os.path.join(dataset_dir,image_list[i])\n patch_list = os.listdir(image_path)\n \n print(f\"processing: image {i}\") \n \n for j in range(n_patches):\n \n patch_path = os.path.join(image_path, patch_list[j])\n \n patch_image_path = os.path.join(patch_path,\"images\")\n \n file_list = os.listdir(patch_image_path)\n \n image_file_path = os.path.join(patch_image_path,file_list[0])\n \n image = skimage.io.imread(image_file_path)\n \n height, width = image.shape\n \n self.add_image(\n \"slices\",\n image_id = patch_counter,\n path = patch_path,\n width = width, height = height,\n channels = channels,\n )\n patch_counter += 1", "def get_patch_predictions(slide, model):\n dims = slide.dims\n ratio = slide.ratio\n imgArr = np.zeros((BATCH_THRESHOLD, WINDOW_SIZE[0], WINDOW_SIZE[1], 3))\n totalIndex = -1 # Total count of all patches\n imgIndex = 0 # Count of only image patches\n \n for iy, y in enumerate(np.arange(0, dims[-1][1], STEP_SIZE/ratio[1])):\n for ix, x in enumerate(np.arange(0, dims[-1][0], STEP_SIZE/ratio[0])):\n totalIndex += 1\n\n # If there are no cells in this row, add the index to the set of white patches. \n if iy not in slide.regionDict:\n slide.add_white_patch(totalIndex)\n continue\n\n # Else, check if the x values fall within the region boundaries.\n regions = slide.regionDict[iy]\n cont = np.any([(region[0] <= x < region[1]) for region in regions])\n if not cont: # If x doesn't fall within the boundaries, add index to white patches.\n slide.add_white_patch(totalIndex)\n continue\n \n # Extract the image patch, convert it to a numpy matrix, and normalize.\n image = slide.image.read_region((x*ratio[0] ,y*ratio[1]), 0, WINDOW_SIZE )\n imgMat = np.array(image.convert(mode=\"RGB\")) / 255. \n imgArr[imgIndex] = imgMat\n imgIndex += 1 \n\n # For every batch (specified by BATCH_THRESHOLD),\n if imgIndex%BATCH_THRESHOLD == 0:\n # Predict the classes for each image patch\n classes = model.predict(imgArr, batch_size=32, verbose=1)\n yield classes\n\n imgIndex = 0\n imgArr = np.zeros((BATCH_THRESHOLD, WINDOW_SIZE[0], WINDOW_SIZE[1], 3))\n\n if totalIndex%2500==0: slide.print_status()\n\n # Trim zeros and predict final batch if any.\n imgArr_trimmed = imgArr[:imgIndex] \n if len(imgArr_trimmed):\n classes = model.predict(imgArr_trimmed, batch_size=32, verbose=1)\n yield classes", "def generate(self, size, count, channels=3):\n self.size = size\n self.h, self.w = size\n self.count = count\n self.channels = channels\n self.make_path()\n self.create_list()\n self.create_json()\n t = time.time()\n for i, (path, img, mask) in enumerate(self.gen()):\n cv2.imwrite(path, img)\n if mask:\n *p, id_ = path.split(\"/\")\n cv2.imwrite(f\"{self.save_path}{self.name}/masks/{id_}\", mask)\n if self.print:\n print(\"[Done {:6d}] [Time: {:.2f} s]\".format(i, time.time() - t))\n t = time.time()", "def extract_patch(n, patch_size, imgs):\n # Extract patches from input images\n img_patches = [img_crop(imgs[i], patch_size, patch_size) for i in range(n)]\n #gt_patches = [img_crop(gt_imgs[i], patch_size, patch_size) for i in range(n)]\n\n # Linearize list of patches\n img_patches = np.asarray([img_patches[i][j] for i in range(len(img_patches)) for j in range(len(img_patches[i]))])\n #gt_patches = np.asarray([gt_patches[i][j] for i in range(len(gt_patches)) for j in range(len(gt_patches[i]))])\n \n return img_patches #,gt_patches", "def _sample_patches(imgs, \n labelimgs, \n patch_size, \n patchgroup, \n padding_mode, \n padding_values, \n ignore_labels,\n startidx=0):\n samplelist = []\n \n # number of bands should be constant, therefore the dimensionality can be read from any \n # sub img\n bands = imgs[0].shape[-1]\n\n # calculate remapping for labels when removing `ignore_labels`\n # flatten labelimgs and convert to numpy array to use np.unique function on it\n flattened_labelimgs = np.concatenate([labelimg.reshape(-1) for labelimg in labelimgs])\n max_label = np.unique(flattened_labelimgs).max()\n remaining_labels = np.setdiff1d(np.arange(max_label+1), ignore_labels)\n label_remap = np.full((max_label+1), -1)\n for i, val in enumerate(remaining_labels):\n label_remap[val] = i\n\n valid_sample_count = 0\n for labelimg in labelimgs:\n valid_sample_count += np.invert(np.isin(labelimg, ignore_labels)).sum()\n print(f'Extracting {valid_sample_count} valid samples...')\n \n if ('data' in patchgroup) and ('labels' in patchgroup):\n # resize existing dataset to append patches from test set\n patchgroup['data'].resize((patchgroup['data'].shape[0] + valid_sample_count), axis=0)\n patchgroup['labels'].resize((patchgroup['labels'].shape[0] + valid_sample_count), axis=0)\n else:\n patchgroup.create_dataset('data', (valid_sample_count, patch_size, patch_size, bands)\n , chunks=(1, patch_size, patch_size, bands)\n , maxshape=(None, patch_size, patch_size, bands)\n , dtype=imgs[0].dtype) # datatype should be the same for all imgs\n patchgroup.create_dataset('labels', (valid_sample_count,1)\n , chunks=True, maxshape=(None, 1)\n , dtype=labelimgs[0].dtype) # datatype should be the same for all labelimgs\n \n idx = startidx\n with tqdm(total=valid_sample_count) as pbar:\n for img, labelimg in zip(imgs, labelimgs):\n\n # pad along spatial axes\n margin = int((patch_size - 1) / 2)\n X = np.pad(img, ((margin, margin), (margin, margin), (0,0)), \n mode=padding_mode, constant_values=padding_values) \n\n # split patches\n for r in range(margin, X.shape[0] - margin):\n for c in range(margin, X.shape[1] - margin):\n patchlabel = labelimg[r-margin, c-margin]\n\n # do not create a sample for 'ignore_labels'\n if patchlabel in ignore_labels:\n continue\n else :\n # correct label\n patchlabel = label_remap[patchlabel]\n\n patch = X[r - margin:r + margin + 1, c - margin:c + margin + 1]\n # store sample in hdf file\n patchgroup['data'][idx] = patch\n patchgroup['labels'][idx] = patchlabel\n\n # update\n idx += 1\n pbar.update(1)\n\n patchgroup.attrs['patch_size'] = patch_size\n patchgroup.attrs['padding_mode'] = padding_mode\n patchgroup.attrs['padding_values'] = padding_values\n patchgroup.attrs['ignore_labels'] = ignore_labels\n\n return valid_sample_count", "def visualise_patches_on_slide(ps: PatchSet, vis_level: (int), project_root: Path = Path('/')) -> Image:\n assert len(ps.settings) == 1, \"The input patch set contains patches from more than one slide, or more than one patch size / level\"\n slide_settings = ps.settings[0] \n\n def convert_ps_to_thumb_level(ps, thumb_lev):\n ps_df = ps.df.copy()\n level_diff = thumb_lev - ps.settings[0].level\n ps_df.x = ps_df.x.divide(2 ** level_diff).astype(int)\n ps_df.y = ps_df.y.divide(2 ** level_diff).astype(int)\n thumb_patch_size = slide_settings.patch_size // 2 ** level_diff\n return PatchSet(ps_df, [PatchSetting(slide_settings.level, thumb_patch_size, slide_settings.slide_path, slide_settings.loader)])\n\n def create_visualisation_frame(ps_in):\n vis_frame = ps_in.df\n # TODO: ps.settings[0] as only one settings is there a neater way to do this\n vis_frame[\"x2\"] = vis_frame.x.add(ps_in.settings[0].patch_size)\n vis_frame[\"y2\"] = vis_frame.y.add(ps_in.settings[0].patch_size)\n return vis_frame\n\n with slide_settings.loader.load_slide(project_root / slide_settings.slide_path) as slide:\n thumb = slide.get_thumbnail(vis_level)\n \n thumb = Image.fromarray(np.array(thumb, dtype=np.uint8))\n ps_out = convert_ps_to_thumb_level(ps, vis_level)\n vis_frame = create_visualisation_frame(ps_out)\n\n thumbdraw = ImageDraw.Draw(thumb) \n for row in vis_frame.itertuples():\n thumbdraw.rectangle([row.x, row.y, row.x2, row.y2], fill=None, outline='black', width=1)\n \n return thumb", "def _post_process_masks_pt(\n self, masks, original_sizes, reshaped_input_sizes, mask_threshold=0.0, binarize=True, pad_size=None\n ):\n requires_backends(self, [\"torch\"])\n pad_size = self.pad_size if pad_size is None else pad_size\n target_image_size = (pad_size[\"height\"], pad_size[\"width\"])\n if isinstance(original_sizes, (torch.Tensor, np.ndarray)):\n original_sizes = original_sizes.tolist()\n if isinstance(reshaped_input_sizes, (torch.Tensor, np.ndarray)):\n reshaped_input_sizes = reshaped_input_sizes.tolist()\n output_masks = []\n for i, original_size in enumerate(original_sizes):\n if isinstance(masks[i], np.ndarray):\n masks[i] = torch.from_numpy(masks[i])\n elif not isinstance(masks[i], torch.Tensor):\n raise ValueError(\"Input masks should be a list of `torch.tensors` or a list of `np.ndarray`\")\n interpolated_mask = F.interpolate(masks[i], target_image_size, mode=\"bilinear\", align_corners=False)\n interpolated_mask = interpolated_mask[..., : reshaped_input_sizes[i][0], : reshaped_input_sizes[i][1]]\n interpolated_mask = F.interpolate(interpolated_mask, original_size, mode=\"bilinear\", align_corners=False)\n if binarize:\n interpolated_mask = interpolated_mask > mask_threshold\n output_masks.append(interpolated_mask)\n\n return output_masks", "def postprocess_masks(\n self,\n masks: paddle.Tensor,\n input_size: Tuple[int, ...],\n original_size: Tuple[int, ...], ) -> paddle.Tensor:\n masks = F.interpolate(\n masks,\n (self.image_encoder.img_size, self.image_encoder.img_size),\n mode=\"bilinear\",\n align_corners=False, )\n masks = masks[..., :input_size[0], :input_size[1]]\n masks = F.interpolate(\n masks, original_size, mode=\"bilinear\", align_corners=False)\n return masks", "def get_patches(rimage, gimage, mimage, num_patches=48, patch_size=80, patch_stride=80):\n num_FSpatches = 16\n num_RApatches = 32\n rpatches = []\n gpatches = []\n mpatches = []\n #R_imgs = ((rimage+1)*127.5).astype(np.uint8)\n #scipy.misc.imsave('results'+'/' + 'rainy.jpg', R_imgs[0,:,:,:])\n for i in range(int(math.sqrt(num_FSpatches))):\n for j in range(int(math.sqrt(num_FSpatches))):\n point_x = patch_stride*i\n point_y = patch_stride*j\n rpatch = rimage[:,(point_x):(point_x+patch_size), (point_y):(point_y+patch_size),:]\n #print(point_x)\n #print(point_y)\n #print(point_y+patch_size)\n #P_imgs = ((rpatch+1)*127.5).astype(np.uint8)\n #scipy.misc.imsave('results'+'/' + 'patch_%d_%d.jpg'%(i,j), P_imgs[0,:,:,:])\n #print(rpatch.shape)\n rpatches.append(rpatch)\n #print(np.array(rpatches).shape)\n gpatch = gimage[:,(point_x):(point_x+patch_size), (point_y):(point_y+patch_size),:]\n gpatches.append(gpatch)\n mpatch = mimage[:,(point_x):(point_x+patch_size), (point_y):(point_y+patch_size),:]\n mpatches.append(mpatch)\n\n for k in range(num_RApatches):\n point1 = random.randint(0,240) # 116 comes from the image source size (320) - the patch dimension (80)\n point2 = random.randint(0,240)\n #rpatch = tf.image.crop_to_bounding_box(rimage, point1, point2, patch_size, patch_size)\n rpatch = rimage[:,(point1):(point1+patch_size), (point2):(point2+patch_size),:]\n #P_imgs = ((rpatch+1)*127.5).astype(np.uint8)\n #scipy.misc.imsave('results'+'/' + 'patch_%d.jpg'%i, P_imgs[0,:,:,:])\n #print(rpatch.shape)\n rpatches.append(rpatch)\n #print(np.array(rpatches).shape)\n gpatch = gimage[:,(point1):(point1+patch_size), (point2):(point2+patch_size),:]\n gpatches.append(gpatch)\n mpatch = mimage[:,(point1):(point1+patch_size), (point2):(point2+patch_size),:]\n mpatches.append(mpatch)\n\n rpatches = np.array(rpatches)\n rpatches = np.squeeze(rpatches)\n #print(rpatches.shape)\n gpatches = np.array(gpatches)\n gpatches = np.squeeze(gpatches)\n mpatches = np.array(mpatches)\n mpatches = np.squeeze(mpatches)\n #assert rpatches.get_shape().dims == [num_patches, patch_size, patch_size, 3]\n assert rpatches.shape == (num_patches, patch_size, patch_size, 3)\n return rpatches, gpatches, mpatches", "def make_patchset_for_slide(\n slide_path: Path,\n annot_path: Path,\n slide_label: str,\n loader: Loader,\n tissue_detector: TissueDetector,\n patch_finder: PatchFinder,\n project_root: Path = Path('/')\n) -> PatchSet:\n\n with loader.load_slide(project_root / slide_path) as slide:\n annotations = loader.load_annotations(project_root / annot_path, slide_label)\n labels_shape = slide.dimensions[patch_finder.labels_level].as_shape()\n scale_factor = 2**patch_finder.labels_level\n labels_image = annotations.render(labels_shape, scale_factor)\n tissue_mask = tissue_detector(slide.get_thumbnail(patch_finder.labels_level))\n labels_image[~tissue_mask] = 0\n df, level, size = patch_finder(\n labels_image, slide.dimensions[patch_finder.patch_level]\n )\n df[\"setting\"] = 0 # set all the patches to reference the first patchsetting\n patchset = PatchSet(df, [PatchSetting(level, size, slide_path, loader)])\n return patchset", "def load_shapes(self, count, img_floder, mask_floder, imglist, creatnpzfile:bool=True):\n # Add classes\n \n self.add_class(\"shapes\", 1, \"grasper\")\n self.add_class(\"shapes\", 2, \"grasper2\")\n self.add_class(\"shapes\", 3, \"grasper3\")\n self.add_class(\"shapes\", 4, \"irrigator\")\n self.add_class(\"shapes\", 5, \"hook\")\n self.add_class(\"shapes\", 6, \"clipper\")\n\n # Add images\n # Generate random specifications of images (i.e. color and\n # list of shapes sizes and locations). This is more compact than\n # actual images. Images are generated on the fly in load_image().\n for i in range(count):\n img = imglist[i]\n if img.endswith(\".jpg\"):\n img_name = img.split(\".\")[0]\n img_path = os.path.join(img_floder,img)\n mask_path = os.path.join(mask_floder,img_name+\".png\")\n #save the mask infomation with numpy\n mask_info = None\n \n if not os.path.exists(os.path.join(mask_infofloder,\"{}.npz\".format(img_name))):\n mask_info = self.load_mask_pre(i,mask_path)\n np.savez(os.path.join(mask_infofloder,img_name),mask_ = mask_info[0], id_=mask_info[1])\n else:\n data = np.load(os.path.join(mask_infofloder,\"{}.npz\".format(img_name)))\n mask_info = data['mask_'],data['id_']\n\n self.add_image(\"shapes\", image_id=i, path=img_path, name=img_name, mask_path=mask_path, mask_info=mask_info)\n sys.stdout.write('-------creating the np file:--%s-------------pross:--%.4f%%--'%(os.path.join(mask_infofloder,\"{}.npz\".format(img_name)),\n (i+1)/float(count)*100))\n sys.stdout.write('\\r')\n sys.stdout.flush()", "def crop_images_color(dataset_dir, is_mask=True):\n data = []\n for folder in os.listdir(dataset_dir):\n path = os.path.join(dataset_dir, folder, \"*_labelIds.png\")\n data.extend(glob(path))\n\n for index, filePath in enumerate(data):\n print ('{}/{}'.format(index, len(data)))\n\n img = scipy.misc.imread(filePath).astype(np.uint8)\n img = scipy.misc.imresize(img, 0.25, interp='bilinear', mode=None)\n if is_mask:\n mask = np.ones((img.shape[0], img.shape[1]), dtype=np.uint8) * 255\n\n idx_person = np.where(np.all(img == [220, 20, 60, 255], axis=-1))\n #idx_rider = np.where(np.all(img == [255, 0, 0, 255], axis=-1))\n #idx_void = np.where(np.all(img == [0, 0, 0, 255], axis=-1))\n\n #indices = np.concatenate((idx_person, idx_rider, idx_void), axis=1)\n indices = idx_person\n # mask[indices[0], indices[1], :] = (0, 0, 0, 255)\n mask[indices[0], indices[1]] = 0\n mask = np.reshape(mask, (256, 512))\n\n #scipy.misc.imsave('/home/andy/dataset/CITYSCAPES/CITYSCAPES_crop_random/' + filePath.split('/')[-1],\n # img[offs_h[index]:offs_h_end[index], offs_w[index]:offs_w_end[index] :])\n scipy.misc.imsave('/home/andy/dataset/CITYSCAPES/for_wonderful_chou/image/' + filePath.split('/')[-1],\n img[0:192, :])\n #break", "def sliding_window(image, patch_size: tuple, step: int, show_debug: bool = False) -> list:\n if isinstance(image, Image.Image):\n image = np.array(image)\n\n if step == 0:\n h, w = image.shape[0], image.shape[1] # 720, 1280\n w_iter, h_iter = w // patch_size[0], h // patch_size[1]\n crop_image_list = []\n for i in range(h_iter):\n for j in range(w_iter):\n bbox = (i*patch_size[0], j*patch_size[0],\n (i+1)*patch_size[0], (j+1)*patch_size[0])\n crop_image = image[bbox[0]:bbox[2], bbox[1]: bbox[3]]\n if show_debug:\n crop_image = Image.fromarray(crop_image)\n crop_image.save(f\"/data/jiangmingchao/patches/{i}.png\")\n cv2.rectangle(image,\n (i*patch_size[0], j*patch_size[0]),\n ((i+1)*patch_size[0], (j+1)*patch_size[0]),\n (255, 255, 0),\n 2,\n )\n\n crop_image_list.append(Image.fromarray(crop_image))\n\n if show_debug:\n cv2.imwrite(\"1.jpg\", image)\n\n else:\n h, w = image.shape[0], image.shape[1]\n step_w_iter, step_h_iter = (w - patch_size[0]) // step, (h - patch_size[0]) // step\n crop_image_list = []\n for i in range(step_h_iter):\n for j in range(step_w_iter):\n bbox = (i * step, j * step, patch_size[0] + i * step, patch_size[1] + j * step)\n crop_image = image[bbox[0]: bbox[2], bbox[1]: bbox[3]]\n print(crop_image.shape)\n crop_image_list.append(Image.fromarray(crop_image))\n\n return crop_image_list", "def sample_patches(images, npatches, patch_sz):\n\tnimages, nrows, ncols = images.shape\n\timg_index = np.random.randint(0, nimages, npatches)\n\trow_index = np.random.randint(0, nrows-patch_sz, npatches)\n\tcol_index = np.random.randint(0, ncols-patch_sz, npatches)\n\tpatches = np.empty((npatches, patch_sz, patch_sz))\n\tfor i, (img, row, col) in enumerate(zip(img_index, row_index, col_index)):\n\t\tpatches[i] = images[img, row:row+patch_sz, col:col+patch_sz]\n\treturn patches", "def iter_patch_slices(dims, patch_size, start_pos=()):\n\n # ensure patchSize and startPos are the right length\n ndim = len(dims)\n patch_size = get_valid_patch_size(dims, patch_size)\n start_pos = ensure_tuple_size(start_pos, ndim)\n\n # collect the ranges to step over each dimension\n ranges = tuple(starmap(range, zip(start_pos, dims, patch_size)))\n\n # choose patches by applying product to the ranges\n for position in product(*ranges[::-1]): # reverse ranges order to iterate in index order\n yield tuple(slice(s, s + p) for s, p in zip(position[::-1], patch_size))", "def extract_patches(image_list, mask_src, image_src, mask_dst, image_dst, patch_size):\n class_counts = defaultdict(lambda: 0)\n skipped = 0\n total = 0\n for im in tqdm(image_list):\n img = cv2.imread(os.path.join(image_src, im))\n msk = cv2.imread(os.path.join(mask_src, im), 0)\n \n assert (img.shape[0] == msk.shape[0]) \\\n and (img.shape[1] == msk.shape[1]), \"Mismatch!\"\n\n img_patches = patchify(img, (patch_size, patch_size, 3), step=patch_size)\n msk_patches = patchify(msk, (patch_size, patch_size), step=patch_size)\n img_patches = img_patches.reshape((-1, patch_size, patch_size, 3))\n msk_patches = msk_patches.reshape((-1, patch_size, patch_size))\n # Step = 256 for patch size means no overlap\n for i in range(img_patches.shape[0]):\n # Replace class labels\n mask_patch = replace_classes(msk_patches[i])\n unique, counts = np.unique(mask_patch, return_counts=True)\n # If outside of RoI takes > 90% and there is only 1 class, ignore the patch.\n outside = np.mean(mask_patch == 0) > 0.9\n if outside and (len(unique) < 2):\n skipped += 1\n continue\n for x, y in enumerate(unique):\n class_counts[y] += counts[x].item()\n img_patch = img_patches[i]\n filename = im.split(\".png\")[0] + \"_\" + str(i) + \".png\"\n cv2.imwrite(os.path.join(image_dst, filename), img_patch)\n cv2.imwrite(os.path.join(mask_dst, filename), mask_patch)\n total += 1\n print('Skipped: {} / {}'.format(skipped, total))\n return class_counts", "def image_mask(image, patch_R, patch_C, seg_model):\n\n im = Image.open(image)\n im_name = os.path.basename(image).split('.')[0]\n im_width, im_height = im.width, im.height\n\n N = patch_R // patch_C\n\n W_ps_NI = im_width // patch_C # 31782 // 256 = 124\n # W_ps_NR = slide_width % patch_C # 31782 % 256 = 38\n H_ps_NI = im_height // patch_R # 24529 // 1024 = 23\n # H_ps_NR = slide_height % patch_R # 24529 % 1024 = 977\n\n cell_ratio = 0.85 # the threshold that decide the patch is background or not\n\n output_dir = os.path.join(current_path, \"..\", \"output\", \"output_mask\")\n if not os.path.isdir(output_dir): os.makedirs(output_dir)\n\n np_im = np.array(im)[:, :, 0:3] # exclude alpha\n for w in range(W_ps_NI):\n for h in range(H_ps_NI):\n subHIC = np_im[h * patch_R: (h+1) * patch_R, w * patch_C:(w+1) * patch_C, :]\n\n # rgb three channels value that >200 and <40 are ignored segment\n rgb_s = (abs(subHIC[:, :, 0] - 120) >= 80) & (abs(subHIC[:, :, 1] - 120) >= 80) & (\n abs(subHIC[:, :, 2] - 120) >= 80) # >200 <40\n\n if np.sum(rgb_s) <= (patch_R * patch_C) * cell_ratio:\n # segment\n subHIC = np.where(rgb_similarity(subHIC, 15, 195), 250, subHIC)\n # adjust equalization histogram and adjust brightness\n for k in range(subHIC.shape[2]):\n clahe = cv2.createCLAHE(clipLimit=2.0, tileGridSize=(N * 4, 4))\n subHIC[:, :, k] = clahe.apply(subHIC[:, :, k])\n subHIC = exposure.adjust_gamma(subHIC, gamma=1.5)\n subHIC = subHIC.reshape(N, patch_C, patch_C, 3)\n\n subHIC = subHIC.reshape(N, patch_C, patch_C, 3)\n allmask_prob_list = maskrcnn_detection(seg_model, subHIC)\n\n for i in range(len(allmask_prob_list)):\n for layer in range(allmask_prob_list[i].shape[2]):\n image, cnts, hierarchy = cv2.findContours(allmask_prob_list[i][:, :, layer],\n cv2.RETR_EXTERNAL,\n cv2.CHAIN_APPROX_SIMPLE)\n np_im[h * patch_R + i * patch_C: h * patch_R + (i + 1) * patch_C, w * patch_C:(w + 1) * patch_C,\n :] = cv2.drawContours(np_im[h * patch_R + i*patch_C: h*patch_R+(i+1)*patch_C, w * patch_C:(w + 1) * patch_C, :],\n cnts, -1, (0, 255, 0), 1)\n\n # np_im[h * patch_R + i*patch_C: h*patch_R+(i+1)*patch_C, w * patch_C:(w + 1) * patch_C, :] = subHIC[i]\n\n # plt.savefig(os.path.join(output_dir, f\"{im_name}w{w}h{h}N{i}.png\"))\n\n io.imsave(os.path.join(output_dir, f\"{im_name}.png\"), np_im)", "def find_patches_from_slide(slide_path, filter_non_tissue=True):\n\n #sampletotal = pd.DataFrame([])\n #base_truth_dir = Path(BASE_TRUTH_DIR)\n #anno_path = Path(anno_path)\n #slide_contains_tumor = osp.basename(slide_paths[i]).startswith('tumor_')\n print (slide_path)\n\n dimensions = []\n \n with openslide.open_slide(slide_path) as slide:\n dtotal = (slide.dimensions[0] / 224, slide.dimensions[1] / 224)\n thumbnail = slide.get_thumbnail((dtotal[0], dtotal[1]))\n thum = np.array(thumbnail)\n ddtotal = thum.shape\n dimensions.extend(ddtotal)\n hsv_image = cv2.cvtColor(thum, cv2.COLOR_BGR2HSV)\n h, s, v = cv2.split(hsv_image)\n hthresh = threshold_otsu(h)\n sthresh = threshold_otsu(s)\n vthresh = threshold_otsu(v)\n # be min value for v can be changed later\n minhsv = np.array([hthresh, sthresh, 70], np.uint8)\n maxhsv = np.array([180, 255, vthresh], np.uint8)\n thresh = [minhsv, maxhsv]\n #extraction the countor for tissue\n\n rgbbinary = cv2.inRange(hsv_image, thresh[0], thresh[1])\n _, contours, _ = cv2.findContours(rgbbinary, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)\n bboxtcols = ['xmin', 'xmax', 'ymin', 'ymax']\n bboxt = pd.DataFrame(columns=bboxtcols)\n for c in contours:\n (x, y, w, h) = cv2.boundingRect(c)\n bboxt = bboxt.append(pd.Series([x, x+w, y, y+h], index = bboxtcols), ignore_index=True)\n bboxt = pd.DataFrame(bboxt)\n \n xxmin = list(bboxt['xmin'].get_values())\n xxmax = list(bboxt['xmax'].get_values())\n yymin = list(bboxt['ymin'].get_values())\n yymax = list(bboxt['ymax'].get_values())\n\n xxxmin = np.min(xxmin)\n xxxmax = np.max(xxmax)\n yyymin = np.min(yymin)\n yyymax = np.max(yymax)\n\n dcoord = (xxxmin, xxxmax, yyymin, yyymax)\n\n dimensions.extend(dcoord)\n\n # bboxt = math.floor(np.min(xxmin)*256), math.floor(np.max(xxmax)*256), math.floor(np.min(yymin)*256), math.floor(np.max(yymax)*256)\n \n samplesnew = pd.DataFrame(pd.DataFrame(np.array(thumbnail.convert('L'))))\n print(samplesnew)\n # very critical: y value is for row, x is for column\n samplesforpred = samplesnew.loc[yyymin:yyymax, xxxmin:xxxmax]\n\n dsample = samplesforpred.shape\n\n dimensions.extend(dsample)\n\n np.save ('dimensions_%s' % (osp.splitext(osp.basename(slide_paths[i]))[0]), dimensions)\n\n print(samplesforpred)\n\n samplesforpredfinal = pd.DataFrame(samplesforpred.stack())\n\n print(samplesforpredfinal)\n\n samplesforpredfinal['tile_loc'] = list(samplesforpredfinal.index)\n\n samplesforpredfinal.reset_index(inplace=True, drop=True)\n\n\n samplesforpredfinal['slide_path'] = slide_paths[i]\n\n\n print(samplesforpredfinal)\n\n\n return samplesforpredfinal", "def fpn_mask_graph(rois, feature_maps, image_meta,\n pool_size, num_classes, train_bn=True):\n # ROI Pooling\n # Shape: [batch, boxes, pool_height, pool_width, channels]\n x = modellib.PyramidROIAlign([pool_size, pool_size],\n name=\"roi_align_mask\")([rois, image_meta] + feature_maps)\n\n # Conv layers\n x = KL.TimeDistributed(KL.Conv2D(256, (3, 3), padding=\"same\"),\n name=\"mrcnn_mask_conv1\")(x)\n x = KL.TimeDistributed(modellib.BatchNorm(),\n name='mrcnn_mask_bn1')(x, training=train_bn)\n x = KL.Activation('relu')(x)\n\n x = KL.TimeDistributed(KL.Conv2D(256, (3, 3), padding=\"same\"),\n name=\"mrcnn_mask_conv2\")(x)\n x = KL.TimeDistributed(modellib.BatchNorm(),\n name='mrcnn_mask_bn2')(x, training=train_bn)\n x = KL.Activation('relu')(x)\n\n x = KL.TimeDistributed(KL.Conv2D(256, (3, 3), padding=\"same\"),\n name=\"mrcnn_mask_conv3\")(x)\n x = KL.TimeDistributed(modellib.BatchNorm(),\n name='mrcnn_mask_bn3')(x, training=train_bn)\n x = KL.Activation('relu')(x)\n\n x = KL.TimeDistributed(KL.Conv2D(256, (3, 3), padding=\"same\"),\n name=\"mrcnn_mask_conv4\")(x)\n x = KL.TimeDistributed(modellib.BatchNorm(),\n name='mrcnn_mask_bn4')(x, training=train_bn)\n x = KL.Activation('relu')(x)\n\n x = KL.TimeDistributed(KL.Conv2DTranspose(256, (2, 2), strides=2, activation=\"relu\"),\n name=\"mrcnn_mask_deconv\")(x)\n x = KL.TimeDistributed(KL.Conv2D(1, (1, 1), strides=1, activation=\"sigmoid\"),\n name=\"mrcnn_mask\")(x)\n # Duplicate output for fg/bg detections\n x = KL.Concatenate(axis=-1)([x for i in range(num_classes)])\n return x", "def build_fpn_mask_graph(rois, feature_maps, image_meta,\n pool_size, num_classes, train_bn=True):\n # ROI Pooling\n # Shape: [batch, boxes, pool_height, pool_width, channels]\n x = PyramidROIAlign([pool_size, pool_size],\n name=\"roi_align_mask\")([rois, image_meta] + feature_maps)\n\n # Conv layers\n x = KL.TimeDistributed(KL.Conv2D(256, (3, 3), padding=\"same\"),\n name=\"mrcnn_mask_conv1\")(x)\n x = KL.TimeDistributed(BatchNorm(),\n name='mrcnn_mask_bn1')(x, training=train_bn)\n x = KL.Activation('relu')(x)\n\n x = KL.TimeDistributed(KL.Conv2D(256, (3, 3), padding=\"same\"),\n name=\"mrcnn_mask_conv2\")(x)\n x = KL.TimeDistributed(BatchNorm(),\n name='mrcnn_mask_bn2')(x, training=train_bn)\n x = KL.Activation('relu')(x)\n\n x = KL.TimeDistributed(KL.Conv2D(256, (3, 3), padding=\"same\"),\n name=\"mrcnn_mask_conv3\")(x)\n x = KL.TimeDistributed(BatchNorm(),\n name='mrcnn_mask_bn3')(x, training=train_bn)\n x = KL.Activation('relu')(x)\n\n x = KL.TimeDistributed(KL.Conv2D(256, (3, 3), padding=\"same\"),\n name=\"mrcnn_mask_conv4\")(x)\n x = KL.TimeDistributed(BatchNorm(),\n name='mrcnn_mask_bn4')(x, training=train_bn)\n x = KL.Activation('relu')(x)\n\n x = KL.TimeDistributed(KL.Conv2DTranspose(256, (2, 2), strides=2, activation=\"relu\"),\n name=\"mrcnn_mask_deconv\")(x)\n x = KL.TimeDistributed(KL.Conv2D(num_classes, (1, 1), strides=1, activation=\"sigmoid\"),\n name=\"mrcnn_mask\")(x)\n return x", "def read_mask(mask_path, mask_type, mask_name,patch_size,show_image=None):\n path_test = mask_path\n\n mask= Image.open(path_test+\"/\"+\"{}\".format(mask_type)+\n \"/\"+\"{}.tif\".format(mask_name))\n mask_list = np.asarray(list (mask.getdata() ))\n\n mask_list = mask_list / np.amax(mask_list)\n #either use from future or use // to get float result\n mask_list = np.reshape(mask_list,(patch_size,patch_size))\n if (show_image == True):\n\n print(mask_list.shape)\n plt.figure()\n plt.imshow(mask_list,cmap='gray')\n plt.show()\n print(mask_list)\n return mask_list", "def crop_acc_mask(images_dir, images_output_dir, masks_dir, mask_suffix=None, masks_output_dir=None): \n image_suffix_list = [\"C0\", \"DE\", \"T2\"]\n if not os.path.exists(images_output_dir):\n os.makedirs(images_output_dir)\n if masks_output_dir is not None and (not os.path.exists(masks_output_dir)):\n os.makedirs(masks_output_dir)\n margin = [0, 30, 30]\n masks_list = os.listdir(masks_dir)\n masks_list.sort()\n json_dict = OrderedDict()\n for mask in masks_list:\n mask_path = os.path.join(masks_dir, mask)\n if mask.endswith(\".nii.gz\"):\n print(\"#\" * 11 *11)\n print(mask_path)\n mask_sitk = sitk.ReadImage(mask_path)\n mask_npy = sitk.GetArrayFromImage(mask_sitk)\n mask_shape = mask_npy.shape\n crop_bbox_min, crop_bbox_max = get_ND_bounding_box(mask_npy, margin=margin)\n # do not crop along depth dimension\n crop_bbox_min[0] = 0\n crop_bbox_max[0] = mask_shape[0]\n print(crop_bbox_min, crop_bbox_max)\n json_dict[mask_path] = {\"crop_bbox_min\": crop_bbox_min, \"crop_bbox_max\": crop_bbox_max}\n mask_output_npy = crop_ND_volume_with_bounding_box(mask_npy, crop_bbox_min, crop_bbox_max)\n if mask_suffix is not None:\n mask = mask.replace(\"_\" + mask_suffix + \".nii.gz\", \".nii.gz\")\n if masks_output_dir is not None:\n save_cropped_array_as_nifty_volume(mask_output_npy, os.path.join(masks_output_dir, mask), mask_sitk)\n save_cropped_array_as_nifty_volume(convert_label(mask_output_npy, [1, 2, 3, 4, 5], [1, 2, 3, 1, 1]), \\\n os.path.join(images_output_dir, mask.replace(\".nii.gz\", \"_{0:04d}.nii.gz\".format(len( \\\n image_suffix_list)))), mask_sitk)\n for i, image_suffix in enumerate(image_suffix_list):\n image = mask.replace(\".nii.gz\", \"_{}.nii.gz\".format(image_suffix))\n image_path = os.path.join(images_dir, image)\n print(image_path)\n image_sitk = sitk.ReadImage(image_path)\n image_npy = sitk.GetArrayFromImage(image_sitk)\n image_output_npy = crop_ND_volume_with_bounding_box(image_npy, crop_bbox_min, crop_bbox_max)\n save_cropped_array_as_nifty_volume(image_output_npy, os.path.join(images_output_dir, mask.replace( \\\n \".nii.gz\", \"_{0:04d}.nii.gz\".format(i))), image_sitk)\n save_json(json_dict, os.path.join(images_output_dir, \"crop_information.json\"))\n if masks_output_dir is not None:\n save_json(json_dict, os.path.join(masks_output_dir, \"crop_information.json\"))", "def blending_example1():\n pic_desert = read_image(relpath(\"./externals/pic_desert.jpg\"), 2)\n pic_pool = read_image(relpath(\"./externals/pic_swim.jpg\"), 2)\n mask = read_image(relpath(\"./externals/mask_desert.jpg\"), 1)\n # making the mask binary (normalizing 2 original values)\n mask = strech_helper(mask).astype(np.bool)\n print(pic_desert.shape[2])\n [R1, G1, B1] = np.dsplit(pic_desert, pic_desert.shape[2])\n [R2, G2, B2] = np.dsplit(pic_pool, pic_pool.shape[2])\n R1 = np.reshape(R1, (512,1024))\n R2 = np.reshape(R2, (512,1024))\n G1 = np.reshape(G1, (512,1024))\n G2 = np.reshape(G2, (512,1024))\n B1 = np.reshape(B1, (512,1024))\n B2 = np.reshape(B2, (512,1024))\n\n blend1 = pyramid_blending(R2, R1, mask, 3, 3, 3)\n blend2 = pyramid_blending(G2, G1, mask, 3, 3, 3)\n blend3 = pyramid_blending(B2, B1, mask, 3, 3, 3)\n\n blend1 = np.reshape(blend1, (blend1.shape[0], blend1.shape[1], 1))\n blend2 = np.reshape(blend2, (blend2.shape[0], blend3.shape[1], 1))\n blend3 = np.reshape(blend3, (blend3.shape[0], blend3.shape[1], 1))\n\n new_pic = np.concatenate((blend1, blend2, blend3), axis=2)\n # plotting the images\n fig = plt.figure()\n ax1 = fig.add_subplot(221)\n ax2 = fig.add_subplot(222)\n ax3 = fig.add_subplot(223)\n ax4 = fig.add_subplot(224)\n ax1.imshow(pic_desert)\n ax2.imshow(pic_pool)\n ax3.imshow(mask, cmap='gray')\n ax4.imshow(new_pic)\n plt.show()\n\n return pic_desert, pic_pool, mask, new_pic" ]
[ "0.62094754", "0.586022", "0.5515826", "0.5515428", "0.54270107", "0.5426682", "0.5372924", "0.5332166", "0.53296727", "0.53201824", "0.5299715", "0.5277059", "0.52504194", "0.51970434", "0.5176687", "0.5150225", "0.51451766", "0.50600135", "0.5047835", "0.504694", "0.50378984", "0.5024521", "0.49966982", "0.49264503", "0.49155766", "0.4908132", "0.48892796", "0.48789176", "0.48675075", "0.48277682" ]
0.7592444
0
Return a unique name.
def unique_name(): return "unique-{0}".format(uuid.uuid4())
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def uniqueName(self):\n return \"{0}::{1}\".format(self.name(), str(self.uid))", "def unique_id(self):\r\n name_slug = slugify(self._name)\r\n return f\"{name_slug}\"", "def name(self):\n return self._unique_id", "def generate_unique_name():\n return 'titanic-' + str(get_mac())", "def get_name() -> str:", "def GetUniqueName():\n dt = datetime.datetime.now()\n return '%s%s%s%s%s%s%s' % (dt.year, dt.month, dt.day, dt.hour, dt.minute,\n dt.second, dt.microsecond)", "def get_name() -> str:\n pass", "def unique_id(self) -> str:\n return slugify(self._name)", "def _get_random_name(self, base_name):\n return base_name + '_' + self.__id_generator()", "def generate_unique_job_name(self, name='no_name_job'):\n # TODO: Make it more suitable for disk paths. (no *, -)\n from base64 import urlsafe_b64encode\n name = os.path.basename(name)\n return \"_\".join([os.path.split(name)[1], urlsafe_b64encode(os.urandom(3))])", "def unique_identifier(self) -> str:\n return pulumi.get(self, \"unique_identifier\")", "def fixture_make_unique_name():\n def _make_unique_name(prefix):\n return f\"{prefix}{time.time_ns()}\"\n return _make_unique_name", "def generate_name(host, subject_type_or_type_name):\n id = UniqueId.create_id()\n return '%s/%s.%d' % (host, get_name(subject_type_or_type_name), id)", "def get_name():", "def name() -> str:\n pass", "def unique_id() -> str:", "def name(self):\n return self._name if self._name is not None else (\"0x%x\" % id(self))", "def generate_name(prefix):\n suffix = generate_uuid()[:8]\n return '{0}_{1}'.format(prefix, suffix)", "def _get_unique_name(self, name: str, prefix: str):\n if name is None:\n return prefix + \"-\" + PolicyPool.__generate_random_string()\n elif name in self._active_workers.keys():\n return name + PolicyPool.__generate_random_string()\n else:\n return name", "def account_name_generator():\n return 'jdoe-' + str(uuid()).lower()[:16]", "def generate_unique_name(base):\n random_length = 10\n random_string = ''.join(random.choices(string.ascii_lowercase,\n k=random_length))\n return \"%s-%s\" % (base, random_string)", "def name(self):\n uid_string = str.__str__(self)\n if uid_string in UID_dictionary:\n return UID_dictionary[self][0]\n\n return uid_string", "def unique_id(self) -> str:\n return f\"{self._host}_{self._name}_{self._unique_id}\"", "def name(self):\n return str()", "def unique_id(self) -> str:\n return \"_\".join([self._name, \"climate\"])", "def get_name(self) -> str:\n pass", "def generate_name(self):\n name = self._generate_test_name()\n while self.exists(name):\n name = self._generate_test_name()\n return name", "def _build_name(name_id):\n return \"xp_%08d\" % name_id", "def name(self):\n return f\"{self._name.replace('_', ' ')}\".title()", "def get_unique_name(self):\n return self._idx" ]
[ "0.84412026", "0.83484423", "0.80431694", "0.8036474", "0.79056853", "0.78406745", "0.78334075", "0.7792117", "0.77658874", "0.7755335", "0.7641484", "0.7600844", "0.7589731", "0.7563908", "0.7560276", "0.7515592", "0.7508697", "0.75038445", "0.7497832", "0.74735886", "0.74684656", "0.7463778", "0.7440275", "0.7435658", "0.74325377", "0.7421503", "0.74200934", "0.741275", "0.73910624", "0.7385772" ]
0.9123852
0
Mock the nuke library.
def nuke_mocker(request): m = mock.patch.dict("sys.modules", {"nuke": mock.Mock()}) m.start() request.addfinalizer(m.stop)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_package(self):\n pass", "def mock_remote_unit(monkeypatch):\n monkeypatch.setattr(\"libgitlab.hookenv.remote_unit\", lambda: \"unit-mock/0\")", "def test_stub(self):\n pass", "def mockup(cls):\n pass", "def setUp(self):\n self.dbus_mock = MagicMock()\n self.dbus_exception_mock = MagicMock()\n self.dbus_service_mock = MagicMock()\n self.mainloop_mock = MagicMock()\n self.gobject_mock = MagicMock()\n\n modules = {\n 'dbus': self.dbus_mock,\n 'dbus.exceptions': self.dbus_exception_mock,\n 'dbus.service': self.dbus_service_mock,\n 'dbus.mainloop.glib': self.mainloop_mock,\n 'gi.repository': self.gobject_mock,\n }\n self.dbus_mock.Interface.return_value.GetManagedObjects.return_value = tests.obj_data.full_ubits\n self.dbus_mock.Interface.return_value.Get = mock_get\n self.dbus_mock.Interface.return_value.Set = mock_set\n self.dbus_mock.return_value\n self.dbus_mock.SystemBus = MagicMock()\n self.module_patcher = patch.dict('sys.modules', modules)\n self.module_patcher.start()\n from bluezero import advertisement\n from bluezero import dbus_tools\n self.module_under_test = advertisement\n self.module_tools = dbus_tools", "def setUp(self):\n self.dbus_mock = MagicMock()\n self.mainloop_mock = MagicMock()\n self.gobject_mock = MagicMock()\n\n modules = {\n 'dbus': self.dbus_mock,\n 'dbus.mainloop.glib': self.mainloop_mock,\n 'gi.repository': self.gobject_mock,\n }\n self.dbus_mock.Interface.return_value.GetManagedObjects.return_value = tests.obj_data.full_ubits\n self.module_patcher = patch.dict('sys.modules', modules)\n self.module_patcher.start()\n from bluezero import tools\n self.module_under_test = tools", "def setUp(self):\n self.mok = Mokka( {} )", "def setUp(self):\n util.create_mocks()", "def setUp(self):\n util.create_mocks()", "def setUp(self):\n util.create_mocks()", "def setUp(self):\n util.create_mocks()", "def setUp(self):\n util.create_mocks()", "def setUp(self):\n util.create_mocks()", "def unitary_test():", "def setUp(self):\n self.bot = helpers.MockBot()\n self.bot.api_client.get = unittest.mock.AsyncMock()\n self.cog = information.Information(self.bot)\n self.member = helpers.MockMember(id=1234)", "def test_untar(self):", "def setUp(self):\n self.setUpPyfakefs()", "def setUp(self):\n self.bot = MockBot()\n self.cog = TokenRemover(bot=self.bot)\n\n self.msg = MockMessage(id=555, content=\"hello world\")\n self.msg.channel.mention = \"#lemonade-stand\"\n self.msg.author.__str__ = MagicMock(return_value=self.msg.author.name)\n self.msg.author.avatar_url_as.return_value = \"picture-lemon.png\"", "def setUp(self):\n self.mock_model = Mock()", "def setUp(self):\n self.bot = helpers.MockBot()\n self.bot.api_client.get = unittest.mock.AsyncMock()\n self.cog = information.Information(self.bot)", "def testApi(self):", "def setUp(self): \n self.mock.setUp(__file__)", "def test_main(mocker):\n mocker.patch(\"builtins.input\", return_value=\"\"\"\n {\n \"keywords\": [\n \"ahsldkjhfalksjhflashf\"\n ],\n \"proxies\": [\n \"194.126.37.94:8080\"\n ],\n \"type\": \"Repositories\"\n }\n \"\"\")\n mocker.patch(\"crawler.Crawler.get\", return_value=\"\")\n out = mocker.patch(\"builtins.print\")\n Crawler.main()\n out.assert_called_once_with(\"[]\")", "def setUp(self):\n self.dbus_mock = MagicMock()\n self.mainloop_mock = MagicMock()\n self.gobject_mock = MagicMock()\n\n modules = {\n 'dbus': self.dbus_mock,\n 'dbus.mainloop.glib': self.mainloop_mock,\n 'gi.repository': self.gobject_mock,\n }\n self.dbus_mock.Interface.return_value.GetManagedObjects.return_value = tests.obj_data.full_ubits\n self.dbus_mock.Interface.return_value.Get = mock_get\n self.dbus_mock.Interface.return_value.Set = mock_set\n self.dbus_mock.Interface.return_value.GetAll = mock_get_all\n self.module_patcher = patch.dict('sys.modules', modules)\n self.module_patcher.start()\n from bluezero import adapter\n self.module_under_test = adapter\n self.adapter_device = 'hci0'\n self.adapter_name = 'linaro-alip'\n self.path = '/org/bluez/hci0'", "def setUp(self):\n self.client = mock.create_autospec(CQLClient)\n\n self.maas_client = mock.create_autospec(MaasClient)\n patcher = mock.patch('bobby.worker.MaasClient')\n self.addCleanup(patcher.stop)\n _MaasClient = patcher.start()\n _MaasClient.return_value = self.maas_client", "def test_patch_none():", "def test_album_tracks_lastfm_notfound(bot, monkeypatch):\n\n def get_lastfm(*args, **kwargs):\n return []\n\n song = Song('Horrendous', 'The Idolater', album='Idol')\n monkeypatch.setattr(bot, 'get_lastfm', get_lastfm)\n assert bot.get_album_tracks_lastfm(song) == []", "def setup_class(cls):\n cls.mock_get_patcher = patch('project.services.requests.get')\n cls.mock_get = cls.mock_get_patcher.start()", "def setup_method(self) -> None:\n self.client = Mock()", "def test_analyzer():\n import analyzer\n\n analyzer # Fake usage." ]
[ "0.6105179", "0.607087", "0.6026963", "0.5972712", "0.5946644", "0.58715075", "0.58674854", "0.58065623", "0.58065623", "0.58065623", "0.58065623", "0.58065623", "0.58065623", "0.5782152", "0.56846625", "0.5671526", "0.5624159", "0.55935633", "0.55542195", "0.55484384", "0.55413485", "0.5532667", "0.5532509", "0.54862267", "0.5479548", "0.54696465", "0.5458453", "0.54582644", "0.54550827", "0.54528135" ]
0.7317321
0
Mock the PySide2 library.
def pyside_mocker(request): m = mock.patch.dict("sys.modules", {"PySide2": mock.Mock()}) m.start() request.addfinalizer(m.stop)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def load_PySide_QtGui(finder, module):\n from PySide import QtCore\n # Pyside.__version* is PySide version, PySide.QtCore.__version* is Qt version\n _QtGui(finder, module, QtCore.__version__)", "def load_PySide_QtCore(finder, module):\n finder.IncludeModule(\"atexit\")", "def main():\n\n log.init_logger()\n logger = logging.getLogger('main')\n\n logger.info(PySide.__version__)\n logger.info(PySide.QtCore.__version__)\n\n qt_app = QtGui.QApplication(sys.argv)\n\n dyninspector = DynInspector()\n\n window = gui.MainWindow(dyninspector)\n window.run()\n\n sys.exit(qt_app.exec_())", "def __init__(self):\r\n super().__init__()\r\n self._setupSideMenu()", "def mock_dear_py_gui():\n def _gui_thread(self):\n while not self.stop:\n _ = self.process_data.get()\n\n BaseRealTimeVisualizer._gui_thread = _gui_thread\n BaseRealTimeVisualizer.should_close = lambda self: False", "def testHelp(self):\n self.widget.manager = QtWidgets.QWidget()\n self.widget.manager.showHelp = MagicMock()\n self.widget.displayHelp()\n self.assertTrue(self.widget.manager.showHelp.called_once())\n args = self.widget.manager.showHelp.call_args\n self.assertIn('sld_calculator_help.html', args[0][0])", "def set_qt_lib():\n import pymel\n try:\n from anima import ui\n if pymel.versions.current() > 201500:\n ui.SET_PYSIDE2()\n else:\n ui.SET_PYSIDE()\n except AttributeError:\n pass", "def ui_setup(self):\n loader = QUiLoader()\n file = QFile('./user_interface/form/main_window.ui')\n file.open(QFile.ReadOnly)\n self._window = loader.load(file)\n file.close()\n\n status_bar = QStatusBar(self._window)\n status_bar.showMessage(__copyright__)\n self._window.setStatusBar(status_bar)\n self._window.setWindowIcon(QIcon('./user_interface/media/bucketing_icon.jpeg'))\n self._window.setWindowTitle('PySide2 Project - Basic UI Framework')\n\n self._option_panel = OptionPanel()\n self._option_panel.add_button('DekBan', './user_interface/media/dekban.png')\n self._option_panel.add_button('Charlie', './user_interface/media/charlie.jpeg')\n self._option_panel.add_button('Simon', './user_interface/media/Simon.jpeg')\n\n # Add widget to main layout\n main_layout = self._window.main_layout\n main_layout.itemAtPosition(0, 0).setAlignment(QtCore.Qt.AlignCenter)\n main_layout.itemAtPosition(0, 1).setAlignment(QtCore.Qt.AlignVCenter)\n main_layout.addWidget(self._option_panel, 2, 0, 1, 1)\n\n # Add page widget to stack\n self._pages['item'] = ItemWidget()\n self._pages['text1'] = TextPage(text=PAUSE_TEXT)\n self._pages['text2'] = TextPage(text=STOP_TEXT)\n\n for index, name in enumerate(self._pages):\n print('pages {} : {} page'.format(index, name))\n self._window.widget_stack.addWidget(self._pages[name].widget)\n\n self._window.widget_stack.setCurrentIndex(0)\n\n # Build up signal / slot\n self._option_panel.currentItemChanged.connect(self.set_page)", "def test_cli():\n interface.ResourceBundleCli(MagicMock())([\"render\"])", "def init():\n\n bindings = (_pyside2, _pyqt5, _pyside, _pyqt4)\n\n if QT_PREFERRED_BINDING:\n # Internal flag (used in installer)\n if QT_PREFERRED_BINDING == \"None\":\n self.__wrapper_version__ = self.__version__\n return\n\n preferred = QT_PREFERRED_BINDING.split(os.pathsep)\n available = {\n \"PySide2\": _pyside2,\n \"PyQt5\": _pyqt5,\n \"PySide\": _pyside,\n \"PyQt4\": _pyqt4\n }\n\n try:\n bindings = [available[binding] for binding in preferred]\n except KeyError:\n raise ImportError(\n \"Available preferred Qt bindings: \"\n \"\\n\".join(preferred)\n )\n\n for binding in bindings:\n _log(\"Trying %s\" % binding.__name__, QT_VERBOSE)\n\n try:\n binding = binding()\n\n except ImportError as e:\n _log(\" - ImportError(\\\"%s\\\")\" % e, QT_VERBOSE)\n continue\n\n else:\n # Reference to this module\n binding.QtCompat = self\n binding.__shim__ = self # DEPRECATED\n\n sys.modules.update({\n __name__: binding,\n\n # Fix #133, `from Qt.QtWidgets import QPushButton`\n __name__ + \".QtWidgets\": binding.QtWidgets,\n\n # Fix #158 `import Qt.QtCore;Qt.QtCore.Signal`\n __name__ + \".QtCore\": binding.QtCore,\n __name__ + \".QtGui\": binding.QtGui,\n\n })\n\n return\n\n # If not binding were found, throw this error\n raise ImportError(\"No Qt binding were found.\")", "def testOnHelp(self):\n webbrowser.open = MagicMock()\n\n # invoke the tested method\n self.widget.onHelp()\n\n # see that webbrowser open was attempted\n webbrowser.open.assert_called_once()", "def setUp(self):\n self.display = StubDisplay()", "def setUp(self):\n self.ui = UI()", "def setUp(self):\n self._plugin = spotlight_volume.SpotlightVolumePlugin()\n self._parser = plist.PlistParser()", "def test_standard_cqt():\n run_framesync(CQT)", "def test_stub(self):\n pass", "def load_PyQt4_QtGui(finder, module):\n name, QtCore = _qt_implementation(module)\n _QtGui(finder, module, QtCore.QT_VERSION_STR)", "def setUp(self):\r\n \r\n self._editorFactory = EditorFactory()\r\n self._app = QtGui.QApplication(sys.argv)", "def test_window_loaded(self):", "def pytest_runtest_setup(item):\n global _widget\n module, line, method = item.location\n module = module.replace('.py', '.')\n title = module + method\n widgets = QApplication.instance().topLevelWidgets()\n for w in widgets:\n w.setWindowTitle(title)\n logging.info(\"------------------- %s -------------------\", title)", "def test_set_wrap(self):\n self.server_widget.wrap = True\n assert self.client_widget.wrap == self.server_widget.wrap", "def setup_qt():\n app = QApplication([])\n return app", "def test_empty_ui(self):", "def setUp(self):\n self.dbus_mock = MagicMock()\n self.dbus_exception_mock = MagicMock()\n self.dbus_service_mock = MagicMock()\n self.mainloop_mock = MagicMock()\n self.gobject_mock = MagicMock()\n\n modules = {\n 'dbus': self.dbus_mock,\n 'dbus.exceptions': self.dbus_exception_mock,\n 'dbus.service': self.dbus_service_mock,\n 'dbus.mainloop.glib': self.mainloop_mock,\n 'gi.repository': self.gobject_mock,\n }\n self.dbus_mock.Interface.return_value.GetManagedObjects.return_value = tests.obj_data.full_ubits\n self.dbus_mock.Interface.return_value.Get = mock_get\n self.dbus_mock.Interface.return_value.Set = mock_set\n self.dbus_mock.return_value\n self.dbus_mock.SystemBus = MagicMock()\n self.module_patcher = patch.dict('sys.modules', modules)\n self.module_patcher.start()\n from bluezero import advertisement\n from bluezero import dbus_tools\n self.module_under_test = advertisement\n self.module_tools = dbus_tools", "def setUp(self):\n self.dbus_mock = MagicMock()\n self.mainloop_mock = MagicMock()\n self.gobject_mock = MagicMock()\n\n modules = {\n 'dbus': self.dbus_mock,\n 'dbus.mainloop.glib': self.mainloop_mock,\n 'gi.repository': self.gobject_mock,\n }\n self.dbus_mock.Interface.return_value.GetManagedObjects.return_value = tests.obj_data.full_ubits\n self.module_patcher = patch.dict('sys.modules', modules)\n self.module_patcher.start()\n from bluezero import tools\n self.module_under_test = tools", "def _hijack_qt4(self):\n from PyQt4 import QtGui, QtCore\n orig_mainloop = QtGui.qApp.exec_\n dumb_ml = _DummyMainloop(orig_mainloop, self, GUI_QT4)\n QtGui.qApp.exec_ = dumb_ml\n QtGui.QApplication.exec_ = dumb_ml\n QtCore.QCoreApplication.exec_ = dumb_ml\n return orig_mainloop", "def setUp(self):\n self.console = ConsoleWidget()\n self.text_edit = self.console._control", "def load_PyQt4_QtCore(finder, module):\n name, QtCore = _qt_implementation(module)\n finder.IncludeModule(\"sip\")\n try:\n finder.IncludeModule(\"%s._qt\" % name)\n except ImportError:\n pass", "def __init__(self):\r\n super().__init__()\r\n self.init_ui()", "def setup(self): \n pass" ]
[ "0.63506424", "0.6021899", "0.5957812", "0.5710402", "0.56602085", "0.55488366", "0.5523198", "0.5519488", "0.5501204", "0.5468531", "0.53808707", "0.5348533", "0.53077203", "0.5290195", "0.5266464", "0.52590716", "0.52429485", "0.5218564", "0.52131027", "0.5189418", "0.5183658", "0.513865", "0.5128613", "0.511401", "0.50921607", "0.50428355", "0.50396895", "0.5032038", "0.5006114", "0.50028735" ]
0.8482418
0
Lifts a function into the elevated world
def lift(cls, func): raise NotImplementedError
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update_func_world(mech, world):\n globals().update(world)\n for modkey in dir(funcs):\n mod = getattr(funcs, modkey)\n if not hasattr(mod, 'update_func_world'): continue\n mod.update_func_world(mech, world)", "def wrappedFn(*args, **kw):\n setCurrent(context)\n fn(*args, **kw)", "def __call__(self, x, y):\n xShift = self.lowerBound(y)\n f_out = self.func(x - xShift, y)\n return f_out", "def elevate_priv_if_needed(func):\n def inner(*args, **kwargs):\n try:\n return func(*args, **kwargs)\n except OSError as e:\n logger.debug('Elevating privileges due to receiving permission errror')\n logger.debug(e)\n return run_as_root(func)(*args, **kwargs)\n\n return inner", "def liftn(cls, func):\n raise NotImplementedError", "def execute_under_special_role(roles, function, *args, **kwargs):\n\n portal = getSite()\n sm = getSecurityManager()\n\n try:\n try:\n # Clone the current user and assign a new role.\n # Note that the username (getId()) is left in exception\n # tracebacks in the error_log,\n # so it is an important thing to store.\n tmp_user = UnrestrictedUser(\n sm.getUser().getId(), '', roles, ''\n )\n\n # Wrap the user in the acquisition context of the portal\n tmp_user = tmp_user.__of__(portal.acl_users)\n newSecurityManager(None, tmp_user)\n\n # Call the function\n return function(*args, **kwargs)\n\n except:\n # If special exception handlers are needed, run them here\n raise\n finally:\n # Restore the old security manager\n setSecurityManager(sm)", "def __call__(self, x, y, z):\n xShift = self.lowerBound(y)\n f_out = self.func(x - xShift, y, z)\n return f_out", "def E_local(Walker):\n return Local_Kinetic(Walker)+potential(Walker)", "def set_make_move(function: Callable) -> None:\n main.make_move = function", "def inject_and_run(self,func,**kw):\r\n args=inspect.getargspec(func).args\r\n if 'request' in args:\r\n kw['request']=self.request\r\n if 'user' in args:\r\n user=self._get_user()\r\n if user:\r\n kw['user']=user\r\n else:\r\n raise UserWarning,'function need login ,but you are not login'\r\n \r\n return func(**kw)", "def fn_wrapper(request, *args, **kwargs):\n user = get_user(request.user.username)\n if not user:\n return HttpResponseForbidden()\n if not user.seen_welcome:\n return HttpResponseRedirect('/about/')\n request.rooms_user = user\n return fn(request, *args, **kwargs)", "def do_east(self, arg):\r\n moveDirection('east')", "def _mod_only(func):\n func._mods_only = True\n return func", "def move_lift_ground():\n return _move_lift(0)", "def pony_func(func):\n func.is_pony_func = True\n return func", "def move_lift_up():\n return _move_lift(1)", "def __wrapper(request, *args, **kwds):\n if request.user_is_admin:\n return func(request, *args, **kwds) # pylint: disable-msg=W0142\n else:\n return utility.forbidden(\n request,\n error_message='You must be an administrator to view this page.')", "def fireEast(self):\n self.rotate('e')\n gun = Laser(self)\n gun.shoot('e')\n self.agent.actionCompleted()", "def event1922():\n header(1922)\n end_if_this_event_on()\n if_player_has_special_effect(0, SPEFFECT.AriamisSoulEffect)\n flag.enable(EVENT.AriamisWarp)\n warp.warp_player(11, 0, 1100990)", "def position_op(x, wfunc):\n return x*wfunc", "def restricted(func):\n @wraps(func)\n def wrapped(bot, update, *args, **kwargs):\n user_id = update.effective_user.id\n if user_id not in LIST_OF_ADMINS:\n print(\"Unauthorized access denied for {}.\".format(user_id))\n # tell the unauthorized user to go away\n update.message.reply_text('Go away.')\n return\n return func(bot, update, *args, **kwargs)\n return wrapped", "def make_torpedo_re_attack(self, current_gameboard, alternate_func):\n action_choices._remove_torpedo_after_die_roll = getattr(sys.modules[__name__], alternate_func)", "def mlift(func):\n return compose(unit, func)", "def fireWest(self):\n self.rotate('w')\n gun = Laser(self)\n gun.shoot('w')\n self.agent.actionCompleted()", "def call_orig_func(func, *args, **kwargs):\n return func(*args, **kwargs)", "def func():", "def apply(self, func, *args):\n pass", "def apply_only(self, function, worker, *args, **kwargs):\n pass", "def rollout(self, node, scratch_game):\n pass", "def fn():" ]
[ "0.55809885", "0.5369843", "0.53542167", "0.53514177", "0.53293675", "0.5246924", "0.52123636", "0.5193386", "0.5180565", "0.5180341", "0.51693743", "0.5167798", "0.51637644", "0.5138282", "0.5133104", "0.50698644", "0.5064343", "0.5046755", "0.5043975", "0.5039459", "0.502154", "0.5015883", "0.5012937", "0.49706197", "0.49511203", "0.4948568", "0.4947836", "0.49385428", "0.49108282", "0.4907876" ]
0.5964458
0
Get year with links
def get_year_with_links(): response = get_response(MAIN_PAGE) if response.ok: soup = BeautifulSoup(response.text, 'html.parser') years_li = soup.find_all( 'md-card-footer' ) years_dict = {} # Not including the last <a> tag because that is not relevant. for years_html in years_li[:-1]: year = [num for num in years_html.text.split() if num.isdigit()][0] relative_link = years_html.select('a')[0].get('href') full_link = HOME_PAGE + relative_link years_dict[year] = full_link return years_dict else: print('Something Went Wrong') print(f'Status Code: {response.status_code}') sys.exit(1)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def scrape_article_links(year: int) -> List[str]:\n # Take into considerations leap years and days when no articles are published\n pass", "def get_year(url):\n year = re.search(\"\\d{4}\", url).group(0)\n return int(year)", "async def All_orgs():\n\n links_13 = []\n links_14 = []\n valid_url = \"/?archive/?gsoc/\\d+[0-9]/orgs/[a-zA-Z]+\"\n for year in range(2009, 2016):\n year_url = melange + \"/archive/gsoc/{}\".format(year)\n soup = await get_page(year_url)\n\n for url in soup.find_all('a'):\n if re.match(valid_url, url.get(\"href\")):\n if year <= 2013:\n links_13.append(join(melange, url.get(\"href\")[1:]))\n else:\n links_14.append(join(melange, url.get(\"href\")[1:]))\n return links_13, links_14", "def get_organizations_list_with_links(year_link):\n response = get_response(year_link)\n if response.ok:\n soup = BeautifulSoup(response.text, 'html.parser')\n orgs_li = soup.find_all(\n 'li', attrs={'class': 'organization-card__container'})\n orgs_dict = {}\n for orgs_html in orgs_li:\n org_name = orgs_html.select('h4')[0].text.replace('\\n', '')\n relative_link = orgs_html.select('a')[0].get('href')\n full_link = HOME_PAGE + relative_link\n orgs_dict[org_name] = full_link\n return orgs_dict\n else:\n print('Something Went Wrong')\n print(f'Status Code: {response.status_code}')\n sys.exit(1)", "def getYears():\n url = \"http://www.boxofficemojo.com/weekend/\"\n src = urllib.request.urlopen(url).read()\n soup = BeautifulSoup(src, 'html.parser')\n year_header = soup.find_all(name = \"b\")[1]\n year_elems = year_header.find_all([\"a\", \"font\"])\n years = [int(year.get_text()) for year in year_elems]\n return years", "def make_year(res):\n return str(res['issued']['date-parts'][0][0])", "def filter_years(text):\n months = ['january', 'february', 'march', 'april', 'may', 'june',\n 'july', 'august', 'september', 'october', 'november', 'december']\n prepositions = ['around', 'after', 'at', 'as',\n 'approximately', 'before', 'between', 'by',\n 'during', 'from', 'in', 'near', 'past',\n 'since', 'until', 'within'] # removed: about, on\n conjugations = ['and']\n articles = ['the']\n times = ['early', 'mid', 'late']\n patterns = months + prepositions + conjugations + articles + times\n re_string = r'\\b(' + '|'.join(patterns) + r')\\b(\\s|-)\\b([0-9]{3,4})s?\\b(?i)(?!\\sMYA)\\s?(BCE|BC)?'\n years = [int(match.group(3)) * (-2*bool(match.group(4))+1)\n for match in re.finditer(re_string, text, re.IGNORECASE)]\n re_string = r'([0-9]{1,2})(st|nd|rd|th) century\\s?(BCE|BC)?'\n centuries = [(int(match.group(1)) * 100 - 100) * (-2*bool(match.group(2))+1)\n for match in re.finditer(re_string, text, re.IGNORECASE)]\n years += centuries\n years = [y for y in years if y<Dump.MAX_YEAR]\n return sorted(years + centuries)", "def get_year(date):\n return date.strftime('%Y')", "def create_urls(years):\n urls = []\n for year in years:\n url = f\"http://billboardtop100of.com/{year}-2/\"\n urls.append(url)\n return urls", "def create_yearmonth_link(d,fieldname):\n title = smart_unicode(d.strftime('%Y %B'))\n param_dict = { \n '%s__year' % fieldname: str(d.year), \n '%s__month' % fieldname: str(d.month), \n }\n return title,param_dict", "def date_year(date):\n return date.year", "def metacritic_crawl_by_year(year, verbose):\n _crawl_by_year_helper(year, verbose, False, True)", "def year(self):\n return self._years", "def get_year(self, grab):\n return int(\n grab.doc.select(\n '//time[@itemprop=\"releaseDate\"]'\n ).attr('datetime')\n )", "def imdb_crawl_by_year(year, verbose):\n _crawl_by_year_helper(year, verbose, True, False)", "def extract_year(text):\n # type: (str) -> int\n data = re.search(r\"\\d{4}\", text)\n return int(data.group()) if data else 0", "def get_year(x):\n return x[\"SALE DATE\"].year", "def year_archive(request, year):\n articles = Article.objects.filter(pub_date__year=year)\n context = { 'year': year, 'articles': articles }\n return render(request, 'news/year_archive.html', context)", "def crawl_all_by_year(year, verbose):\n _crawl_by_year_helper(year, verbose, True, True)", "def _get_links(self, from_year):\n self.links = []\n self.titles = []\n self.speakers = []\n self.dates = []\n\n r = requests.get(self.calendar_url)\n soup = BeautifulSoup(r.text, \"html.parser\")\n\n if self.verbose:\n print(\"Getting links for press conference scripts...\")\n presconfs = soup.find_all(\n \"a\", href=re.compile(\"^/monetarypolicy/fomcpresconf\\d{8}.htm\")\n )\n presconf_urls = [\n self.base_url + presconf.attrs[\"href\"] for presconf in presconfs\n ]\n for presconf_url in presconf_urls:\n r_presconf = requests.get(presconf_url)\n soup_presconf = BeautifulSoup(r_presconf.text, \"html.parser\")\n contents = soup_presconf.find_all(\n \"a\", href=re.compile(\"^/mediacenter/files/FOMCpresconf\\d{8}.pdf\")\n )\n for content in contents:\n # print(content)\n self.links.append(content.attrs[\"href\"])\n self.speakers.append(\n self._speaker_from_date(self._date_from_link(content.attrs[\"href\"]))\n )\n self.titles.append(\"FOMC Press Conference Transcript\")\n self.dates.append(\n datetime.strptime(\n self._date_from_link(content.attrs[\"href\"]), \"%Y-%m-%d\"\n )\n )\n if self.verbose:\n print(\"{} links found in current page.\".format(len(self.links)))\n\n # Archived before 2015\n if from_year <= 2014:\n print(\"Getting links from archive pages...\")\n for year in range(from_year, 2015):\n yearly_contents = []\n fomc_yearly_url = (\n self.base_url\n + \"/monetarypolicy/fomchistorical\"\n + str(year)\n + \".htm\"\n )\n r_year = requests.get(fomc_yearly_url)\n soup_yearly = BeautifulSoup(r_year.text, \"html.parser\")\n\n presconf_hists = soup_yearly.find_all(\n \"a\", href=re.compile(\"^/monetarypolicy/fomcpresconf\\d{8}.htm\")\n )\n presconf_hist_urls = [\n self.base_url + presconf_hist.attrs[\"href\"]\n for presconf_hist in presconf_hists\n ]\n for presconf_hist_url in presconf_hist_urls:\n # print(presconf_hist_url)\n r_presconf_hist = requests.get(presconf_hist_url)\n soup_presconf_hist = BeautifulSoup(\n r_presconf_hist.text, \"html.parser\"\n )\n yearly_contents = soup_presconf_hist.find_all(\n \"a\",\n href=re.compile(\"^/mediacenter/files/FOMCpresconf\\d{8}.pdf\"),\n )\n for yearly_content in yearly_contents:\n # print(yearly_content)\n self.links.append(yearly_content.attrs[\"href\"])\n self.speakers.append(\n self._speaker_from_date(\n self._date_from_link(yearly_content.attrs[\"href\"])\n )\n )\n self.titles.append(\"FOMC Press Conference Transcript\")\n self.dates.append(\n datetime.strptime(\n self._date_from_link(yearly_content.attrs[\"href\"]),\n \"%Y-%m-%d\",\n )\n )\n if self.verbose:\n print(\n \"YEAR: {} - {} links found.\".format(\n year, len(presconf_hist_urls)\n )\n )\n print(\"There are total \", len(self.links), \" links for \", self.content_type)", "def _year_range(m):\n return (m.group(1), m.group(2))", "def yearname(self):\n return self.strftime(\"%Y\")", "def get_number_year(text):\n val = get_number(text)\n if val is None or val < 1700 or val > (datetime.date.today().year + 1):\n return None\n return val", "def showNextYear(self):\n pass", "def get_date(self,yearlimits=[1500,2020]):\n\t\thead = self.raw_text()[:300] \t \t \n\t\tparser = Regexdate(head) \t \t\t\n\t\tyear = parser.find_year(yearlimits)\t\t\n\t\tmonth = parser.find_month()\n\t\tday = parser.find_day()\n\t\tif day and year != \"\":\n\t\t\treturn year + \"-\" + month + \"-\" + day\t\n\t\tif year:\n\t\t\treturn year\n\t\treturn \"\"", "def articleAuthorsByYear(g = None, year = None):\n if not g:\n return\n articles = g.find(\"Article\", property_key='year', property_value=str(year))\n ret = dict()\n for article in articles:\n ret[article['title']] = article['authors'].replace('\\n','').lower()\n return ret", "def get_track_urls(year):\r\n # assert int(year) >= 2023, f\"only support year >= 2023, but get {year}!!!\"\r\n project_root_folder = os.path.abspath(\r\n os.path.dirname(os.path.dirname(os.path.abspath(__file__))))\r\n dat_file_pathname = os.path.join(\r\n project_root_folder, 'urls', f'track_archive_url_AAAI_{year}.dat'\r\n )\r\n proceeding_th_dict = {\r\n 1980: 1,\r\n 1902: 2,\r\n 1983: 3,\r\n 1984: 4,\r\n 1986: 5,\r\n 1987: 6,\r\n 1988: 7,\r\n 1990: 8,\r\n 1991: 9,\r\n 1992: 10,\r\n 1993: 11,\r\n 1994: 12,\r\n 1996: 13,\r\n 1997: 14,\r\n 1998: 15,\r\n 1999: 16,\r\n 2000: 17,\r\n 2002: 18,\r\n 2004: 19,\r\n 2005: 20,\r\n 2006: 21,\r\n 2007: 22,\r\n 2008: 23\r\n }\r\n if year >= 2023:\r\n base_url = r'https://ojs.aaai.org/index.php/AAAI/issue/archive'\r\n headers = {\r\n 'User-Agent': user_agents[-1],\r\n 'Host': 'ojs.aaai.org',\r\n 'Referer': \"https://ojs.aaai.org\",\r\n 'GET': base_url\r\n }\r\n if os.path.exists(dat_file_pathname):\r\n with open(dat_file_pathname, 'rb') as f:\r\n content = pickle.load(f)\r\n else:\r\n req = urllib.request.Request(url=base_url, headers=headers)\r\n content = urllib.request.urlopen(req).read()\r\n # content = open(f'..\\\\AAAI_{year}.html', 'rb').read()\r\n with open(dat_file_pathname, 'wb') as f:\r\n pickle.dump(content, f)\r\n soup = BeautifulSoup(content, 'html5lib')\r\n tracks = soup.find('ul', {'class': 'issues_archive'}).find_all('li')\r\n track_urls = dict()\r\n for tr in tracks:\r\n h2 = tr.find('h2')\r\n this_track = slugify(h2.a.text)\r\n if this_track.startswith(f'aaai-{year-2000}'):\r\n this_track += slugify(h2.div.text) + '-' + this_track\r\n this_url = h2.a.get('href')\r\n track_urls[this_track] = this_url\r\n print(f'find track: {this_track}({this_url})')\r\n else:\r\n if year >= 2010:\r\n proceeding_th = year - 1986\r\n elif year in proceeding_th_dict:\r\n proceeding_th = proceeding_th_dict[year]\r\n else:\r\n print(f'ERROR: AAAI proceeding was not held in year {year}!!!')\r\n return\r\n\r\n base_url = f'https://aaai.org/proceeding/aaai-{proceeding_th:02d}-{year}/'\r\n headers = {\r\n 'User-Agent': user_agents[-1],\r\n 'Host': 'aaai.org',\r\n 'Referer': \"https://aaai.org\",\r\n 'GET': base_url\r\n }\r\n if os.path.exists(dat_file_pathname):\r\n with open(dat_file_pathname, 'rb') as f:\r\n content = pickle.load(f)\r\n else:\r\n req = urllib.request.Request(url=base_url, headers=headers)\r\n content = urllib.request.urlopen(req).read()\r\n # content = open(f'..\\\\AAAI_{year}.html', 'rb').read()\r\n with open(dat_file_pathname, 'wb') as f:\r\n pickle.dump(content, f)\r\n soup = BeautifulSoup(content, 'html5lib')\r\n tracks = soup.find('main', {'class': 'content'}).find_all('li')\r\n track_urls = dict()\r\n for tr in tracks:\r\n this_track = slugify(tr.a.text)\r\n this_url = tr.a.get('href')\r\n track_urls[this_track] = this_url\r\n print(f'find track: {this_track}({this_url})')\r\n return track_urls", "def parse_academic_year(year):\n return int(year.split(\"/\")[0])", "def find_year(self,datelimits): \t \t \n year = \"\"\n\n\t \tmatch = re.search(r\"[I1][\\dG]{3}\",self.string)\n\n\t \tif match: \t \t \t \n\t \t\tif re.search(r\"(\\d{4})\",match.group()):\n\t \t\t\tyear = match.group()\n elif re.search(r\"I\\d{3}\",match.group()):\n\t \t\t\tmatch = re.sub(r\"I(\\d{3})\",r\"1\\1\",match.group())\n\t \t\t\tyear = match\n\t \t\telif re.search(r\"(\\d[G\\d]{3})\",match.group()):\n\t \t\t\tmatch = re.sub(r\"G\",r\"6\",match.group())\n\t \t\t\tyear = match\n\n \n if year == \"\" or int(year) < datelimits[0] or int(year) > datelimits[1]:\n year = \"\"\n \n\n\t \treturn year", "def test_spider_gets_specific_year(self):\n spider = Eia923Spider()\n resp = factories.TestResponseFactory(eia923=True)\n\n result = spider.form_for_year(resp, 2007)\n\n assert result is not None\n assert result.url == \"https://www.eia.gov/electricity/data/eia923/\" \\\n \"archive/xls/f906920_2007.zip\"\n assert result.meta[\"year\"] == 2007\n\n for year in range(2001, 2019):\n result = spider.form_for_year(resp, year)\n assert result is not None" ]
[ "0.70485425", "0.70415264", "0.6887806", "0.68229485", "0.6784531", "0.6646109", "0.6411402", "0.6376097", "0.63456386", "0.6339458", "0.62634367", "0.6259343", "0.62293506", "0.61992395", "0.6168758", "0.6096239", "0.60911524", "0.60729194", "0.6053873", "0.6046361", "0.6040469", "0.6033628", "0.60159624", "0.60152304", "0.60036767", "0.5986087", "0.5983793", "0.59549826", "0.5952482", "0.5951264" ]
0.79827887
0
Calculate x/y projection of RA1/Dec1 in system with center at RAcen, Deccen. Input radians.
def gnomonic_project_toxy(RA1, Dec1, RAcen, Deccen): # also used in Global Telescope Network website cosc = np.sin(Deccen) * np.sin(Dec1) + np.cos(Deccen) * np.cos(Dec1) * np.cos(RA1-RAcen) x = np.cos(Dec1) * np.sin(RA1-RAcen) / cosc y = (np.cos(Deccen)*np.sin(Dec1) - np.sin(Deccen)*np.cos(Dec1)*np.cos(RA1-RAcen)) / cosc return x, y
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def cartesian_coordinates(self):\n # extract RA items\n ra_hours, ra_minutes, ra_seconds = RA_RE.match(str(self.ra)).groups()\n # then cast\n ra_hours = int(ra_hours)\n ra_minutes = int(ra_minutes)\n ra_seconds = float(ra_seconds)\n\n # extract DEC items\n dec_sign, dec_degrees, dec_minutes, dec_seconds = DEC_RE.match(str(self.dec)).groups()\n # then cast\n dec_sign = -1 if dec_sign == '-' else 1\n dec_degrees = int(dec_degrees)\n dec_minutes = int(dec_minutes)\n dec_seconds = float(dec_seconds)\n\n # to degrees\n a = (ra_hours*15) + (ra_minutes*0.25) + (ra_seconds*0.004166)\n b = abs(dec_degrees + dec_minutes/60 + dec_seconds/3600) * dec_sign\n\n # to radians\n a = math.radians(a)\n b = math.radians(b)\n\n distance = float(self.distance)\n\n x = (distance * math.cos(b)) * math.cos(a)\n y = (distance * math.cos(b)) * math.sin(a)\n z = distance * math.sin(b)\n\n return x, y, z", "def deproject_coords(center = None, ra = None, dec = None, pa = 0, inclination = 0., **kwargs): \n\n from numpy import radians, degrees, sin, cos, arctan2\n\n\n dra = (ra - center[0]) * np.cos(radians(dec)) * 3600\n ddec = (dec -center[1]) * 3600\n \n x1 = 0-dra # east is usually positive; switch the coordinate axis\n y1 = ddec\n\n theta = radians(pa - 90.0) #because PA is measured east of north (y-axis) instead of east of west.\n xp = x1*cos(theta) + y1*sin(theta)\n yp = -x1*sin(theta) + y1*cos(theta)\n\n ypp = yp/cos(radians(inclination)) # de-project\n radii = hypot(xp,ypp) # [arcsec]\n\n if (x1 == 0.0):\n phi = 0.0\n else :\n phi = degrees(arctan2(ypp, xp))\n\n return radii, phi", "def _spherical_to_cartesian(ra, dec):\n rar = np.radians(ra)\n decr = np.radians(dec)\n\n x = np.cos(rar) * np.cos(decr)\n y = np.sin(rar) * np.cos(decr)\n z = np.sin(decr)\n \n return x, y, z", "def calc_ra_dec():\n \n pos_str = source['ra_hms'].val + \" \" + source['dec_dms'].val\n pos_str = re.sub(r'[,:dhms]', ' ', pos_str)\n args = pos_str.split()\n\n if len(args) != 6:\n raise ValueError(\"Input source position '%s' needs 6 values\" % pos_str)\n\n rah = int(args[0])\n ram = int(args[1])\n ras = float(args[2])\n decsign = '-' if args[3].startswith('-') else '+'\n decd = abs(int(args[3]))\n decm = int(args[4])\n decs = float(args[5])\n\n ra = 15.0 * (rah + ram/60. + ras/3600.)\n dec = abs(decd) + decm/60. + decs/3600.\n if decsign == '-':\n dec = -dec\n\n source['ra'] = ra\n source['dec'] = dec\n logger.verbose(pyyaks.context.render('RA={{source.ra}} Dec={{source.dec}}'))", "def rotate(self, ra1, dec1, ra2, dec2, ra3, dec3):\n # Turns Right Ascension/Declination into Azimuth/Zenith for healpy\n phi1 = ra1 - np.pi\n zen1 = np.pi/2. - dec1\n phi2 = ra2 - np.pi\n zen2 = np.pi/2. - dec2\n phi3 = ra3 - np.pi\n zen3 = np.pi/2. - dec3\n\n # Rotate each ra1 and dec1 towards the pole?\n x = np.array([hp.rotator.rotateDirection(\n hp.rotator.get_rotation_matrix((dp, -dz, 0.))[0], z, p)\n for z, p, dz, dp in zip(zen1, phi1, zen2, phi2)])\n\n # Rotate **all** these vectors towards ra3, dec3 (source_path)\n zen, phi = hp.rotator.rotateDirection(np.dot(\n hp.rotator.get_rotation_matrix((-phi3, 0, 0))[0],\n hp.rotator.get_rotation_matrix((0, zen3, 0.))[0]), x[:, 0], x[:, 1])\n\n dec = np.pi/2. - zen\n ra = phi + np.pi\n return np.atleast_1d(ra), np.atleast_1d(dec)", "def ra2xy(self, ra):\n return -math.sin(ra), math.cos(ra)", "def _radec_to_xyz(ra_deg, dec_deg):\n ra = np.asarray(ra_deg) * RAD_PER_DEG\n dec = np.asarray(dec_deg) * RAD_PER_DEG\n cosd = np.cos(dec)\n xyz = np.array([cosd * np.cos(ra),\n cosd * np.sin(ra),\n np.sin(dec)]).T\n\n return np.atleast_2d(xyz)", "def get_center_radius(ra, dec):\n center_ra = (np.max(ra) + np.min(ra))/2\n center_dec = (np.max(dec) + np.min(dec))/2\n radius = np.max([np.max(ra) - np.min(ra),\n np.max(dec) - np.min(dec)])\n return center_ra, center_dec, radius", "def calc_R(center):\r\n xc = center[0]\r\n yc = center[1]\r\n return np.sqrt((x-xc)**2 + (y-yc)**2)", "def get_radec(radec,\\\n ndraws=1,random_state=np.random.RandomState()):\n ramin,ramax= radec['ra1'],radec['ra2']\n dcmin,dcmax= radec['dec1'],radec['dec2']\n u1,u2= random_state.uniform(size=(2, ndraws) )\n #\n cmin = np.sin(dcmin*np.pi/180)\n cmax = np.sin(dcmax*np.pi/180)\n #\n RA = ramin + u1*(ramax-ramin)\n DEC = 90-np.arccos(cmin+u2*(cmax-cmin))*180./np.pi\n return RA,DEC", "def azalt(ra, dec):\n\tx = rectanglize(ra, dec)\n\ty = np.dot(R_1, x)\n\tz = np.dot(R_2, y)\n\treturn sphericalize(z)", "def find_greatcircle(ra_deg, dec_deg):\n \n #stream = stream_model(name, pparams0=pparams, dt=dt)\n \n ## find the pole\n #ra = np.radians(stream.obs[0])\n #dec = np.radians(stream.obs[1])\n ra = np.radians(ra_deg)\n dec = np.radians(dec_deg)\n \n rx = np.cos(ra) * np.cos(dec)\n ry = np.sin(ra) * np.cos(dec)\n rz = np.sin(dec)\n r = np.column_stack((rx, ry, rz))\n #r = sph2cart(ra, dec)\n\n # fit the plane\n x0 = np.array([0, 1, 0])\n lsq = scipy.optimize.minimize(wfit_plane, x0, args=(r,))\n x0 = lsq.x/np.linalg.norm(lsq.x)\n ra0 = np.arctan2(x0[1], x0[0])\n dec0 = np.arcsin(x0[2])\n \n ra0 += np.pi\n dec0 = np.pi/2 - dec0\n\n # euler rotations\n R0 = myutils.rotmatrix(np.degrees(-ra0), 2)\n R1 = myutils.rotmatrix(np.degrees(dec0), 1)\n R2 = myutils.rotmatrix(0, 2)\n R = np.dot(R2, np.matmul(R1, R0))\n \n xi, eta = myutils.rotate_angles(ra_deg, dec_deg, R)\n \n # put xi = 50 at the beginning of the stream\n xi[xi>180] -= 360\n xi += 360\n xi0 = np.min(xi) - 50\n R2 = myutils.rotmatrix(-xi0, 2)\n R = np.dot(R2, np.matmul(R1, R0))\n xi, eta = myutils.rotate_angles(ra_deg, dec_deg, R)\n \n return R", "def radec2pos(ra, dec):\n\tpos = np.empty(len(ra), dtype=('f8', 3))\n\tra = ra * (np.pi / 180)\n\tdec = dec * (np.pi / 180)\n\tpos[:, 2] = np.sin(dec)\n\tpos[:, 0] = np.cos(dec) * np.sin(ra)\n\tpos[:, 1] = np.cos(dec) * np.cos(ra)\n\treturn pos", "def makePlot(ra, dec, date=None, name=None, figsize=(6.,6.), dpi=80, s=50, center=None, airmass=True, moon=True, des=True):\n #figsize=(10.5,8.5)\n if date is None: date = ephem.now()\n if type(date) != ephem.Date:\n date = ephem.Date(date)\n\n observatory = utils.ctio()\n observatory.date = date\n \n #fig, ax = plt.subplots(fig='ortho', figsize=FIGSIZE, dpi=DPI)\n #fig = plt.figure('ortho')\n #ax = plt.subplots(figure=fig, figsize=FIGSIZE, dpi=DPI)\n fig = plt.figure(name, figsize=figsize, dpi=dpi)\n\n ra_zenith, dec_zenith = observatory.radec_of(0, '90') # RA and Dec of zenith\n ra_zenith = np.degrees(ra_zenith)\n dec_zenith = np.degrees(dec_zenith)\n\n # Zenith position\n #lon_zen = LMC_RA; lat_zen = LMC_DEC\n lon_zen = ra_zenith; lat_zen = dec_zenith\n\n # Create the basemap\n proj_kwargs = dict(projection='ortho', celestial=True)\n if center is None:\n lon_0, lat_0 = -lon_zen, lat_zen # Center position\n else:\n lon_0, lat_0 = center[0], center[1]\n\n proj_kwargs.update(lon_0=lon_0, lat_0=lat_0)\n #print proj_kwargs\n print(proj_kwargs)\n basemap = DECamBasemap(**proj_kwargs)\n\n parallels = np.arange(-90.,120.,30.)\n basemap.drawparallels(parallels)\n meridians = np.arange(0.,420.,60.)\n basemap.drawmeridians(meridians)\n\n if des: drawDES(basemap)\n if airmass: drawAirmassContour(basemap, observatory, 2., s=s)\n if moon: drawMoon(basemap, date)\n plt.title('%s UTC'%(datestring(date)))\n \n drawTarget(basemap, ra, dec)\n\n #return fig, ax, basemap\n return fig, basemap", "def get_cartesian_coords(self):\n r = 1\n dec = self.dec + 90\n x = r * math.sin(np.deg2rad(dec)) * math.cos(np.deg2rad(self.ra))\n y = r * math.sin(np.deg2rad(dec)) * math.sin(np.deg2rad(self.ra))\n z = r * math.cos(np.deg2rad(dec))\n\n return [x, y, z]", "def getAngDist(ra1, dec1, ra2, dec2): \n \n delt_lon = (ra1 - ra2)*np.pi/180.\n delt_lat = (dec1 - dec2)*np.pi/180.\n # Haversine formula\n dist = 2.0*np.arcsin( np.sqrt( np.sin(delt_lat/2.0)**2 + np.cos(dec1*np.pi/180.)*np.cos(dec2*np.pi/180.)*np.sin(delt_lon/2.0)**2 ) ) \n\n return dist/np.pi*180.", "def cartesian_To_Center(self, x, y, z):\n\n if x > 0.0 and -self.L_cap <= y <= 0.0:\n s = self.L_cap + y\n xc = x - self.rb\n yc = z\n else:\n theta = full_arctan2(y, x)\n if theta <= self.ang:\n s = theta * self.rb + self.L_cap\n xc = np.sqrt(x ** 2 + y ** 2) - self.rb\n yc = z\n elif self.ang < theta <= 2 * np.pi: # i'm being lazy here and not limiting the real end\n x0, y0 = np.cos(self.ang) * self.rb, np.sin(self.ang) * self.rb\n thetaEndPerp = np.pi - np.arctan(-1 / np.tan(self.ang))\n x, y = x - x0, y - y0\n deltaS, xc = np.cos(thetaEndPerp) * x + np.sin(-thetaEndPerp) * y, np.sin(thetaEndPerp) * x + np.cos(\n thetaEndPerp) * y\n yc = z\n xc = -xc\n s = (self.ang * self.rb + self.L_cap) + deltaS\n else:\n raise ValueError\n return s, xc, yc", "def radec_to_galactic(coords):\n\n def gross_coords_to_rads(coords):\n ra, dec = coords\n coords = SkyCoord(ra=ra, dec=dec, frame='icrs')\n ra_rad, dec_rad = [float(a) * np.pi/180\n for a in coords.to_string().split()]\n return (ra_rad, dec_rad)\n\n ra, dec = gross_coords_to_rads(coords)\n ra_NGP, dec_NGP = gross_coords_to_rads(['12h51m26.00s', '+27d 7m 42.0s'])\n l_NCP = 122.93 * np.pi/180\n\n b = np.arcsin(np.sin(dec_NGP) * np.sin(dec) \\\n + np.cos(dec_NGP) * np.cos(dec) \\\n * np.cos(ra - ra_NGP))\n\n x1 = np.cos(dec) * np.sin(ra - ra_NGP)\n x2 = np.cos(dec_NGP) * np.sin(dec) \\\n - np.sin(dec_NGP) * np.cos(dec) * np.cos(ra - ra_NGP)\n\n # Arctan2 is basically a smart version of arctan(x1/x2)\n l = l_NCP - np.arctan2(x1, x2)\n\n # Convert to degrees and round out to 4 decs for prettiness.\n l, b = round(l * 180/np.pi, 4), round(b * 180/np.pi, 4)\n return [l, b]", "def _rad_center(self):\n return ((self.rad_hi + self.rad_lo) / 2).to(\"deg\")", "def return_obs_RA_DEC():\n return SkyCoord('03h 32m 30s', '10d 00m 24s')", "def celestial_2_cartesian(r, ra, dec, units='degrees', output='both'):\n phi = np.copy(ra)\n theta = np.copy(dec)\n if units == 'degrees':\n phi, theta = np.deg2rad(phi), np.deg2rad(theta)\n elif units == 'radians':\n pass\n else:\n raise AssertionError(\"Unexpected value entered for 'units', only supports either degrees or radians\", units)\n theta = np.pi / 2. - theta\n if output == 'spherical':\n return phi, theta\n else:\n x = r * np.cos(phi) * np.sin(theta)\n y = r * np.sin(phi) * np.sin(theta)\n z = r * np.cos(theta)\n if output == 'cartesian':\n return x, y, z\n elif output == 'both':\n return phi, theta, x, y, z\n else:\n raise AssertionError(\"Unexpected value entered for 'output', should be either 'cartesian' or 'both'.\",\n output)", "def radec2detector(self,ra,dec):\n y_detector = np.int(self.wcs.wcs_world2pix([[ra, dec]], 1)[0][0])\n x_detector = np.int(self.wcs.wcs_world2pix([[ra, dec]], 1)[0][1])\n\n return x_detector, y_detector", "def radec_to_xyz(ra_deg, dec_deg):\n # TODO -- Handle large angle separations properly\n\n ra = np.asarray(ra_deg) * RAD_PER_DEG\n dec = np.asarray(dec_deg) * RAD_PER_DEG\n cosd = np.cos(dec)\n xyz = np.array([cosd * np.cos(ra),\n cosd * np.sin(ra),\n np.sin(dec)]).T\n\n return np.atleast_2d(xyz)", "def _spherical_to_cartesian_fast(ra, dec, threads):\n import numexpr as ne\n\n #nthreads = ne.detect_number_of_cores()\n nthreads = threads\n ne.set_num_threads(nthreads)\n\n pi = math.pi\n rar = ne.evaluate('ra*pi/180.0')\n decr = ne.evaluate('dec*pi/180.0')\n\n hold1=ne.evaluate('cos(decr)') \n\n x = ne.evaluate('cos(rar) * hold1')\n y = ne.evaluate('sin(rar) * hold1')\n z = ne.evaluate('sin(decr)')\n \n return x, y, z", "def radec(self, shift_crpix1=0.0, shift_crpix2=0.0,\n shift_cd1_1=0.0, shift_cd1_2=0.0,\n shift_cd2_1=0.0, shift_cd2_2=0.0):\n header = dict(self.fits[self.ccd].header)\n header['CRPIX1'] += shift_crpix1\n header['CRPIX2'] += shift_crpix2\n header['CD1_1'] += shift_cd1_1\n header['CD1_2'] += shift_cd1_2\n header['CD2_1'] += shift_cd2_1\n header['CD2_2'] += shift_cd2_2\n mywcs = wcs.WCS(header, relax=True)\n # Only return stars and extended objets\n mask = ((self.column('Classification') == -1) \n | (self.column('Classification') == -2)\n | (self.column('Classification') == 1) \n | (self.column('Classification') == -3))\n ra, dec = mywcs.wcs_pix2world(self.column('X_coordinate')[mask],\n self.column('Y_coordinate')[mask],\n 1)\n return (ra, dec)", "def shiftRADec(ra1, dec1, deltaRA, deltaDec):\n\n d2r = math.pi/180.\n as2r = math.pi/648000.\n\n # Convert everything to radians\n #rara1 = ra1*d2r\n dcrad1 = dec1*d2r\n shiftRArad = deltaRA*as2r\n shiftDCrad = deltaDec*as2r\n\n # Shift!\n #deldec2 = 0.0\n sindis = math.sin(shiftRArad / 2.0)\n sindelRA = sindis / math.cos(dcrad1)\n delra = 2.0*math.asin(sindelRA) / d2r\n\n # Make changes\n ra2 = ra1+delra\n dec2 = dec1 + deltaDec / 3600.0\n\n return ra2, dec2", "def cartesian_to_lon_lat(x, y, z, R = 1):\n lon = np.degrees(np.arctan2(y,x))\n lat = np.degrees(np.pi/2-np.arctan2((x**2+y**2)**0.5,z))\n\n return lon,lat", "def lcc_projection(indata, r=6370000):\n wrf_proj = pyproj.Proj(proj='lcc', # projection type: Lambert Conformal Conic\n lat_1=indata.TRUELAT1, lat_2=indata.TRUELAT2, # Cone intersects with the sphere\n lat_0=indata.MOAD_CEN_LAT, lon_0=indata.STAND_LON, # Center point\n a=r, b=r) # This is it! The Earth is a perfect sphere\n wgs_proj = pyproj.Proj(proj='latlong', datum='WGS84')\n e, n = pyproj.transform(wgs_proj, wrf_proj, indata.CEN_LON, indata.CEN_LAT)\n # Grid parameters\n dx, dy = indata.DX, indata.DY\n nx, ny = float(indata.dims['west_east']), float(indata.dims['south_north'])\n # Down left corner of the domain\n x0 = -(nx - 1) / 2. * dx + e\n y0 = -(ny - 1) / 2. * dy + n\n\n x = da.arange(nx) * dx + x0\n y = da.arange(ny) * dy + y0\n return x, y", "def fieldCenter(self):\n if self.ra0 is None:\n self.ra0 = reduce(lambda x, y: x + y, [src.pos.ra for src in self.sources]) / len(\n self.sources) if self.sources else 0\n if self.dec0 is None:\n self.dec0 = reduce(lambda x, y: x + y, [src.pos.dec for src in self.sources]) / len(\n self.sources) if self.sources else 0\n return self.ra0, self.dec0", "def to_tracking_radec(self) -> astropy.coordinates.SkyCoord:\n radecsys = (\"RADECSYS\", \"OBJRADEC\", \"RADESYS\")\n radecpairs = ((\"RA_DEG\", \"DEC_DEG\"), (\"BORE-RA\", \"BORE-DEC\"))\n return tracking_from_degree_headers(self, radecsys, radecpairs)" ]
[ "0.6103878", "0.60720265", "0.5966464", "0.5929416", "0.583443", "0.58247656", "0.5805627", "0.5766797", "0.5751981", "0.5746876", "0.56683046", "0.5656354", "0.5649699", "0.5625664", "0.5614997", "0.56088257", "0.5603104", "0.55825555", "0.5581316", "0.55377346", "0.5510478", "0.5491437", "0.5480595", "0.54746795", "0.5465726", "0.5460247", "0.54404527", "0.5436218", "0.54190034", "0.53963727" ]
0.7208224
0
Project the stars to x,y plane for a given visit.
def starsProject(stars, visit): names=['x','y','radius'] types=[float,float,float] xtemp,ytemp = gnomonic_project_toxy(np.radians(stars['ra']),np.radians(stars['decl']), visit['ra'], visit['dec']) # Rotate the field using the visit rotSkyPos. Hope I got that sign right... # Hopefully this can be replaced with some cameraGeom stuff. sin_rot = np.sin(visit['rotSkyPos']) cos_rot = np.cos(visit['rotSkyPos']) stars['x'] = cos_rot*xtemp + sin_rot*ytemp stars['y'] = -1.*sin_rot*xtemp+cos_rot*ytemp # XXX-temp try not rotating #stars['x'] = xtemp #stars['y'] = ytemp stars['radius'] = (stars['x']**2+stars['y']**2)**0.5 return stars
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def project(self):\n def _project(point):\n return (\n point[0]/(point[2]/Window.COP_DISTANCE+1),\n point[1]/(point[2]/Window.COP_DISTANCE+1))\n\n self._points = [list(map(_project, face)) for face in self._points]", "def calcul_point_plan_projection(cls,cx,cy,cz,spx,spy,axe_x,axe_y):\n projX=gs.Vector3(spx*axe_x.x,spx*axe_x.y,spx*axe_x.z)\n projY=gs.Vector3(spy*axe_y.x,spy*axe_y.y,spy*axe_y.z)\n point=gs.Vector3(projX+projY)+gs.Vector3(cx,cy,cz)\n return point", "def project(self, win_width, win_height, fov, viewer_distance):\r\n factor = fov / (viewer_distance + self.z)\r\n x = self.x * factor + win_width / 2\r\n y = -self.y * factor + win_height / 2\r\n return Point3D(x, y, 1)", "def ingame_to_scene(self, x, y):\n new_x = (x*8)+4\n # TODO: this y coord may be slightly off\n new_y = (self.world.height*8) - (y*8) - 4\n # Okay, seems we don't actually need this here, for what we're using\n # it for, at least. May want to rename or refactor these a bit so\n # that these two functions are analagous, 'cause they technically do\n # slightly different things now.\n #(scene_x, scene_y) = self.mainwindow.get_zoom_transform().map(new_x, new_y)\n #return (scene_x, scene_y)\n return (new_x, new_y)", "def projective_transform(self, x):\n\n x = np.asarray(x)\n # Assume no intensity column\n x0, y0, z0 = x\n\n # Camera coors to pixel coors\n u = ((x0 / z0) * self.f) + (self.sensor_size[0] // 2)\n v = ((y0 / z0) * self.f) + (self.sensor_size[1] // 2)\n\n u_min = np.min(u)\n v_min = np.min(v)\n\n n = len(u)\n u_list = []\n v_list = []\n if self.error_on_oob:\n for i in range(n):\n if (u[i] >= u_min and u[i] <= self.sensor_size[0] and v[i] >= v_min and v[i] <= self.sensor_size[1]):\n u_list.append(u[i])\n v_list.append(v[i])\n else:\n raise OutOfSensorBoundsError(\"Projected coordinate was outside the sensor\")\n else:\n for i in range(n):\n u_list.append(u[i])\n v_list.append(v[i])\n\n u = np.asarray(u_list)\n v = np.asarray(v_list)\n\n return np.vstack((u, v))", "def projectPoint(self,p):\n a,b,c = self.a, self.b, self.c\n x,y = p\n return numpy.array( [ b*(x*b-y*a) - c*a, a*(y*a-x*b) - c*b ] )", "def project(self, win_width, win_height, fov, viewer_distance):\n factor = fov / (viewer_distance + self.z)\n x = self.x * factor + win_width / 2\n y = -self.y * factor + win_height / 2\n return Point3D(x, y, 1)", "def project(self, (lng, lat)):\n x = lng * DEG_TO_RAD\n lat = max(min(MAX_LATITUDE, lat), -MAX_LATITUDE)\n y = lat * DEG_TO_RAD\n y = math.log(math.tan((math.pi / 4) + (y / 2)))\n return (x*EARTH_RADIUS, y*EARTH_RADIUS)", "def project(self, win_width, win_height, fov, viewer_distance):\n\t\tfactor = fov / (viewer_distance + self.z)\n\t\tx = self.x * factor + win_width / 2\n\t\ty = -self.y * factor + win_height / 2\n\t\treturn Point3D(x, y, 1)", "def project(self):\n # update positions compared to observer\n pos = self.pos.copy()\n\n # center coordinates around obs coords\n pos[:, 0] -= np.sin(self.theta) * self.V * self.time_elapsed\n pos[:, 2] -= np.cos(self.theta) * self.V * self.time_elapsed\n\n # wrap in a novel box around obs coords\n for i in range(3):\n pos[:, i] = self.bounds[2*i] + np.mod(pos[:, i], self.bounds[2*i + 1]-self.bounds[2*i])\n\n d = (pos**2).sum(axis=1)**.5\n # ind_visible = (pos[:, 2] > 0) * (self.d_min<d) * (d<self.d_max)\n ind_visible = (pos[:, 2] > self.d_min) * (d < self.d_max)\n N_visible = int(np.sum(ind_visible))\n\n # self.state = [X, Y, size]\n self.state = np.ones((N_visible, 7))\n for i in range(2):\n self.state[:, i] = self.mag * pos[ind_visible, i] / pos[ind_visible, 2]\n print(i, self.state[:, i].min(), self.state[:, i].max())\n self.state[:, 2] = self.size / d[ind_visible]\n\n # colors do not change\n self.state[:, 3:] = pos[ind_visible, 3:]\n\n # TODO: larger transparency at larger distance => too fancy :-)\n # self.state[:, 2] = self.size / d[ind_visible]\n\n # for i in range(3):\n # self.state[:, i] *= (self.bounds[2*i+1] - self.bounds[2*i])\n # self.state[:, i] -= self.bounds[2*i]", "def scatter_state(self,x):\n self.m[0:self.n] = x[0,0:self.n] \n self.r[0:self.n,0] = x[1,0:self.n]\n self.r[0:self.n,1] = x[2,0:self.n]\n self.r[0:self.n,2] = x[3,0:self.n]\n self.v[0:self.n:,0] = x[4,0:self.n]\n self.v[0:self.n:,1] = x[5,0:self.n]\n self.v[0:self.n:,2] = x[6,0:self.n]", "def projection(self, point):\n return gs.copy(point)", "def assignPatches(stars, visit, nPatches=16, radiusFoV=1.8):\n maxx, maxy = gnomonic_project_toxy(0., np.radians(radiusFoV), 0., 0.)\n nsides = nPatches**0.5\n\n # This should move all coords to 0 < x < nsides-1\n px = np.floor((stars['x'] + maxy)/(2.*maxy)*nsides)\n py = np.floor((stars['y'] + maxy)/(2.*maxy)*nsides)\n\n stars['subPatch'] = px + py*nsides\n stars['patchID'] = stars['subPatch'] + visit['visitID']*nPatches\n return stars", "def project(self, a):\n for g in xrange(0, len(a), 3):\n\n ax = a[g + 0]\n ay = a[g + 1]\n az = a[g + 2]\n anorm = ax ** 2.0 + ay ** 2.0 + az ** 2.0\n i = anorm > 1.0\n\n anorm_i = anorm[i] ** 0.5 # Square root is taken here. Faster.\n ax[i] = np.divide(ax[i], anorm_i)\n ay[i] = np.divide(ay[i], anorm_i)\n az[i] = np.divide(az[i], anorm_i)\n\n a[g + 0] = ax\n a[g + 1] = ay\n a[g + 2] = az\n\n return a", "def project(self, point):\n return np.round(project(self.camera.P, point)).astype(int)", "def project(self, win_width, win_height, fov, viewer_distance):\n factor = fov / (viewer_distance + self.z)\n x = self.x * factor + win_width / 2\n y = -self.y * factor + win_height / 2\n return Point3D(x, y, self.z)", "def project(self, win_width, win_height, fov, viewer_distance):\n factor = fov / (viewer_distance + self.z)\n x = self.x * factor + win_width / 2\n y = -self.y * factor + win_height / 2\n return Point3D(x, y, self.z)", "def project(self, win_width, win_height, fov, viewer_distance):\n factor = fov / (viewer_distance + self.z)\n x = self.x * factor + win_width / 2\n y = -self.y * factor + win_height / 2\n return Point3D(x, y, self.z)", "def project_point_along_2Dvector(): \n \n # 2d vector \n a = vec2( 1 , 1 )\n b = vec2( -1 , -1 )\n com = vec2() \n\n #fb = pixel_op() \n #fb.create_buffer(800, 800)\n #fb.graticule(pixels_per_unit)\n\n vecs = [a,b]\n pts = [com.project_pt(a, b, 2)]\n\n bloody_simple_2drender('2d_render.png', vecs=vecs, pts=pts, gridsize=40)", "def lift(point):\n return gs.copy(point)", "def _create_galaxy(self):\n # Make a star.\n star = Star(self)\n stars_width, stars_height = star.rect.size\n # Fill galaxy across the screen\n available_space_x = self.settings.screen_width - (2 * stars_width)\n number_stars_x = available_space_x // (2 * stars_width)\n # Determine the number of rows of stars that fit on the screen.\n ship_height = self.ship.rect.height\n available_space_y = (self.settings.screen_height - (3 * stars_height) - ship_height)\n number_rows = available_space_y // (2 * stars_height)\n # Create the full galaxy of stars.\n for row_number in range(number_rows):\n # Create the first row of stars.\n for stars_number in range(number_stars_x):\n self._create_stars(stars_number, row_number)", "def to_world(self, x, y, **kwargs):", "def _create_stars(self, stars_number, row_number):\n star = Star(self)\n stars_width, stars_height = star.rect.size\n star.x = stars_width + 2 * stars_width * stars_number\n star.rect.x = star.x\n star.rect.y = star.rect.height + 2 * star.rect.height * row_number\n self.stars.add(star)", "def proj(self, X, G):\n raise NotImplementedError", "def getProjections(self): \n x, y, z = self.XYZCoordinate\n origin = self.SkeletonPoints[0]\n self.coorOrigin = origin\n self.XYProjections = [GeometryToolBox.projected_point(p, origin, x, y) for p in self.SkeletonPoints]\n self.XZProjections = [GeometryToolBox.projected_point(p, origin, x, z) for p in self.SkeletonPoints]", "def star_graph():\n pylon_graph = graph.graph()\n idx = pylon_graph.add_unique_node(ORIGIN, \"base\")\n star_list = pylon_graph.add_star_to_node(idx, 6)\n pylon_graph.connect_nodes(star_list)\n pylon_graph.save_graph(\"star\")\n return pylon_graph", "def cambiovelocidad(self, x, y):\n self.cambio_x += x\n self.cambio_y += y", "def WorldToProjected(self, coords):\n return coords", "def undo_mercator_project(x,y):\n lon = y*np.pi\n ex = np.exp(4*np.pi*x)\n lat = np.arcsin((ex - 1)/(ex +1 ))\n lon = lon*360/2/np.pi\n lat = lat*360 /2/np.pi\n return lon, lat", "def scatter_state(self,x):\n self.m[0:self.n] = x[0,0:self.n] \n self.r[0:self.n,0] = x[1,0:self.n]\n self.r[0:self.n,1] = x[2,0:self.n]\n self.r[0:self.n,2] = x[3,0:self.n]\n self.v[0:self.n:,0] = x[4,0:self.n]\n self.v[0:self.n:,1] = x[5,0:self.n]\n self.v[0:self.n:,2] = x[6,0:self.n]\n self.rho[0:self.n] = x[7,0:self.n]\n self.p[0:self.n] = x[8,0:self.n]\n self.pco[0:self.n] = x[9,0:self.n]\n self.u[0:self.n] = x[10,0:self.n]" ]
[ "0.5567584", "0.5320066", "0.52733254", "0.5250975", "0.52450126", "0.5238371", "0.52286285", "0.52112776", "0.5208136", "0.5184843", "0.5184538", "0.51708573", "0.51540446", "0.5112105", "0.50959843", "0.5084342", "0.5084342", "0.5084342", "0.50772774", "0.50741136", "0.5027122", "0.50193244", "0.50176036", "0.4988111", "0.4976324", "0.49629334", "0.4961467", "0.4951402", "0.49497876", "0.49497685" ]
0.724595
0
Assign PatchIDs to everything. Assume that stars have already been projected to x,y
def assignPatches(stars, visit, nPatches=16, radiusFoV=1.8): maxx, maxy = gnomonic_project_toxy(0., np.radians(radiusFoV), 0., 0.) nsides = nPatches**0.5 # This should move all coords to 0 < x < nsides-1 px = np.floor((stars['x'] + maxy)/(2.*maxy)*nsides) py = np.floor((stars['y'] + maxy)/(2.*maxy)*nsides) stars['subPatch'] = px + py*nsides stars['patchID'] = stars['subPatch'] + visit['visitID']*nPatches return stars
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def extract_patch(self, patch_radius, full_x, full_y, full_i, full_j):\n com_ijs = [self.center_of_mass_ij(time) for time in self.times]\n patch_grid = []\n patch_mask = []\n patch_x = []\n patch_y = []\n patch_i = []\n patch_j = []\n for t, time in enumerate(self.times):\n obj_slice_buff = (slice(com_ijs[t][0] - patch_radius, com_ijs[t][0] + patch_radius),\n slice(com_ijs[t][1] - patch_radius, com_ijs[t][1] + patch_radius))\n obj_slice_local = [[com_ijs[t][0] - self.i[t].min() - patch_radius,\n com_ijs[t][0] - self.i[t].min() + patch_radius],\n [com_ijs[t][1] - self.j[t].min() - patch_radius,\n com_ijs[t][1] - self.j[t].min() + patch_radius]]\n patch_i.append(full_i[obj_slice_buff])\n patch_j.append(full_j[obj_slice_buff])\n patch_x.append(full_x[obj_slice_buff])\n patch_y.append(full_y[obj_slice_buff])\n pad_i_l = abs(obj_slice_local[0][0]) if obj_slice_local[0][0] < 0 else 0\n pad_i_u = obj_slice_local[0][1] - self.timesteps[t].shape[0] \\\n if obj_slice_local[0][1] - self.timesteps[t].shape[0] > 0 else 0\n pad_j_l = abs(obj_slice_local[1][0]) if obj_slice_local[1][0] < 0 else 0\n pad_j_u = obj_slice_local[1][1] - self.timesteps[t].shape[1] \\\n if obj_slice_local[1][1] - self.timesteps[t].shape[1] > 0 else 0\n\n if obj_slice_local[0][0] < 0:\n obj_slice_local[0][0] = 0\n obj_slice_local[0][1] += pad_i_l\n if obj_slice_local[1][0] < 0:\n obj_slice_local[1][0] = 0\n obj_slice_local[1][1] += pad_j_l\n pad_grid = np.pad(self.timesteps[t], pad_width=[(pad_i_l, pad_i_l + pad_i_u), (pad_j_l, pad_j_l + pad_j_u)])\n pad_mask = np.pad(self.masks[t], pad_width=[(pad_i_l, pad_i_l + pad_i_u), (pad_j_l, pad_j_l + pad_j_u)])\n obj_slice_const = (slice(obj_slice_local[0][0], obj_slice_local[0][1]),\n slice(obj_slice_local[1][0], obj_slice_local[1][1]))\n patch_grid.append(pad_grid[obj_slice_const])\n patch_mask.append(pad_mask[obj_slice_const])\n patch_obj = STObject(patch_grid, patch_mask, patch_x, patch_y, patch_i, patch_j, self.start_time,\n self.end_time, step=self.step, dx=self.dx, u=self.u, v=self.v)\n return patch_obj", "def _apply_patches(self) -> None:\n first_patch, last_patch = False, False\n patches = {}\n for patch_name, locations in self.patches.items():\n residues = []\n for location in locations:\n if location == \"FIRST\":\n location = 0\n first_patch = True\n elif location == \"LAST\":\n location = -1\n last_patch = True\n residue = self.residues[location]\n residues.append(residue)\n patches[patch_name] = residues\n if not first_patch:\n first_residue = self.residues[0]\n first_patch = first_residue.first\n patches[first_patch] = [first_residue]\n if not last_patch:\n last_residue = self.residues[-1]\n last_patch = last_residue.last\n patches[last_patch] = [last_residue]\n\n for patch_name, residues in patches.items():\n if patch_name == \"NONE\":\n continue\n patch = self.topology.patches[patch_name]\n self.topology_files.add(patch.rtf_file_name)\n patch.apply(*residues)", "def __init__(self, *patch_tuples):\n self._patches = [\n Patch(patch_tuple[0], patch_tuple[1], patch_tuple[2])\n for patch_tuple in patch_tuples\n ]", "def get_patches(self):\n self.get_source_patch_masks()\n self.get_target_patch_masks()\n self.get_source_patches()", "def assignGroupIDs(self):\n components = self.getComponents(graph_dictionary=self.graph_dict)\n self._gIDs = np.zeros(self.no_plants, dtype='object')\n for i in components.keys():\n self._gIDs[components[i]] = 'gID_' + str(i)", "def _change_objs_to_IDs(self):\n if self.location:\n self.location = self.location.id\n if self.contents:\n self.contents = [obj.id for obj in self.contents]", "def get_source_patch_masks(self):\n self.source_patch_masks = {\n patch_center: self.get_patch_mask(patch_center)\n for patch_center in self.patch_centers\n if not np.bitwise_and(self.get_patch_mask(patch_center), self.unknown_mask).any()\n }\n self.patch_centers = tuple(list(self.source_patch_masks.keys()))", "def set_star_ids(aca):\n from chandra_aca.transform import radec_to_yagzag\n from Quaternion import Quat\n\n from kadi.commands import conf\n\n obs = aca[\"meta\"]\n q_att = Quat(obs[\"att\"])\n stars = get_agasc_cone_fast(\n q_att.ra, q_att.dec, radius=1.2, date=obs[\"date\"], matlab_pm_bug=True\n )\n yang_stars, zang_stars = radec_to_yagzag(\n stars[\"RA_PMCORR\"], stars[\"DEC_PMCORR\"], q_att\n )\n idxs_aca = np.where(np.isin(aca[\"type\"], (\"ACQ\", \"GUI\", \"BOT\")))[0]\n for idx_aca in idxs_aca:\n yang = aca[\"yang\"][idx_aca]\n zang = aca[\"zang\"][idx_aca]\n dys = np.abs(yang - yang_stars)\n dzs = np.abs(zang - zang_stars)\n\n # Get the brightest star within a box (default = 1.5 arcsec halfwidth)\n halfw = conf.star_id_match_halfwidth\n ok = (dys < halfw) & (dzs < halfw)\n if np.any(ok):\n idx = np.argmin(stars[\"MAG_ACA\"][ok])\n aca[\"id\"][idx_aca] = int(stars[\"AGASC_ID\"][ok][idx])\n aca[\"mag\"][idx_aca] = float(stars[\"MAG_ACA\"][ok][idx])\n else:\n logger.info(\n f\"WARNING: star idx {idx_aca + 1} not found in obsid {obs['obsid']} at \"\n f\"{obs['date']}\"\n )", "def assignPointsToShapes(self):\n pointsCopy = self.mission['points'].copy()\n\n while len(pointsCopy):\n shape = []\n self.recursiveAddPointToShape(pointsCopy, [pointsCopy[0]], shape)\n shape.append(shape[0])\n self.mission['shapes'].append(shape)", "def get_target_patch_masks(self):\n self.target_patch_masks = {\n patch_center: self.get_patch_mask(patch_center)\n for patch_center in self.anchor_points\n }\n for patch in self.target_patch_masks:\n self.updated_structure_mask[self.target_patch_masks[patch]] = True", "def get_patches(rimage, gimage, mimage, num_patches=48, patch_size=80, patch_stride=80):\n num_FSpatches = 16\n num_RApatches = 32\n rpatches = []\n gpatches = []\n mpatches = []\n #R_imgs = ((rimage+1)*127.5).astype(np.uint8)\n #scipy.misc.imsave('results'+'/' + 'rainy.jpg', R_imgs[0,:,:,:])\n for i in range(int(math.sqrt(num_FSpatches))):\n for j in range(int(math.sqrt(num_FSpatches))):\n point_x = patch_stride*i\n point_y = patch_stride*j\n rpatch = rimage[:,(point_x):(point_x+patch_size), (point_y):(point_y+patch_size),:]\n #print(point_x)\n #print(point_y)\n #print(point_y+patch_size)\n #P_imgs = ((rpatch+1)*127.5).astype(np.uint8)\n #scipy.misc.imsave('results'+'/' + 'patch_%d_%d.jpg'%(i,j), P_imgs[0,:,:,:])\n #print(rpatch.shape)\n rpatches.append(rpatch)\n #print(np.array(rpatches).shape)\n gpatch = gimage[:,(point_x):(point_x+patch_size), (point_y):(point_y+patch_size),:]\n gpatches.append(gpatch)\n mpatch = mimage[:,(point_x):(point_x+patch_size), (point_y):(point_y+patch_size),:]\n mpatches.append(mpatch)\n\n for k in range(num_RApatches):\n point1 = random.randint(0,240) # 116 comes from the image source size (320) - the patch dimension (80)\n point2 = random.randint(0,240)\n #rpatch = tf.image.crop_to_bounding_box(rimage, point1, point2, patch_size, patch_size)\n rpatch = rimage[:,(point1):(point1+patch_size), (point2):(point2+patch_size),:]\n #P_imgs = ((rpatch+1)*127.5).astype(np.uint8)\n #scipy.misc.imsave('results'+'/' + 'patch_%d.jpg'%i, P_imgs[0,:,:,:])\n #print(rpatch.shape)\n rpatches.append(rpatch)\n #print(np.array(rpatches).shape)\n gpatch = gimage[:,(point1):(point1+patch_size), (point2):(point2+patch_size),:]\n gpatches.append(gpatch)\n mpatch = mimage[:,(point1):(point1+patch_size), (point2):(point2+patch_size),:]\n mpatches.append(mpatch)\n\n rpatches = np.array(rpatches)\n rpatches = np.squeeze(rpatches)\n #print(rpatches.shape)\n gpatches = np.array(gpatches)\n gpatches = np.squeeze(gpatches)\n mpatches = np.array(mpatches)\n mpatches = np.squeeze(mpatches)\n #assert rpatches.get_shape().dims == [num_patches, patch_size, patch_size, 3]\n assert rpatches.shape == (num_patches, patch_size, patch_size, 3)\n return rpatches, gpatches, mpatches", "def modify_ids(self, images, annotations):\n print(\"Reinitialicing images and annotation IDs ...\")\n ### Images\n old_new_imgs_ids = {} # necessary for the annotations!\n for n,im in enumerate(images):\n old_new_imgs_ids[images[n]['id']] = n+1 # dicto with old im_ids and new im_ids\n images[n]['id'] = n+1 # reorganize the ids\n ### Annotations\n for n,ann in enumerate(annotations):\n annotations[n]['id'] = n+1\n old_image_id = annotations[n]['image_id']\n annotations[n]['image_id'] = old_new_imgs_ids[old_image_id] # replace im_ids in the annotations as well\n return images, annotations", "def addPatch(self,p):\n indexPatch = [self.register(v) for v in p]\n self.get('patchmesh.patches').append(indexPatch)", "def assign_rings(self):\n rings = self.make_rings()\n ring_angles = [rings[r][0] for r in rings]\n self.rp = np.zeros((self.npks), dtype=int)\n for i in range(self.npks):\n self.rp[i] = (np.abs(self.polar_angle[i] - ring_angles)).argmin()", "def _create_patch_for_index(index):\n mu_idx, rho_idx = index\n prev_rho_patch = None\n prev_mu_patch = None\n if mu_idx == 0 and rho_idx == 0:\n shell_point = initial_shell_point\n screen_point = initial_screen_point\n mu_start_plane = optics.arcplane.ArcPlane(mu=0.0)\n mu_end_plane = optics.arcplane.ArcPlane(mu=angle_step)\n rho_start_plane = optics.arcplane.ArcPlane(rho=0.0)\n rho_end_plane = optics.arcplane.ArcPlane(rho=angle_step)\n else:\n if mu_idx != 0:\n if mu_idx > 0:\n prev_mu_idx = mu_idx - 1\n else:\n prev_mu_idx = mu_idx + 1\n prev_mu_patch = patch_grid[(prev_mu_idx, rho_idx)]\n mu_start_plane = prev_mu_patch.mu_end_plane\n if mu_idx > 0:\n mu_end_plane = optics.arcplane.ArcPlane(mu=angle_step * (mu_idx+1))\n else:\n mu_end_plane = optics.arcplane.ArcPlane(mu=angle_step * mu_idx)\n else:\n mu_start_plane = optics.arcplane.ArcPlane(mu=0.0)\n mu_end_plane = optics.arcplane.ArcPlane(mu=angle_step)\n if rho_idx != 0:\n if rho_idx > 0:\n prev_rho_idx = rho_idx - 1\n else:\n prev_rho_idx = rho_idx + 1\n prev_rho_patch = patch_grid[(mu_idx, prev_rho_idx)]\n rho_start_plane = prev_rho_patch.rho_end_plane\n if rho_idx > 0:\n rho_end_plane = optics.arcplane.ArcPlane(rho=angle_step * (rho_idx+1))\n else:\n rho_end_plane = optics.arcplane.ArcPlane(rho=angle_step * rho_idx)\n else:\n rho_start_plane = optics.arcplane.ArcPlane(rho=0.0)\n rho_end_plane = optics.arcplane.ArcPlane(rho=angle_step)\n prev_patches = [patch for patch in (prev_mu_patch, prev_rho_patch) if patch != None]\n \n #figure out the screen and shell point\n shell_point = get_shell_point(prev_patches, mu_start_plane.angle, rho_start_plane.angle)\n screen_point = get_focal_point(prev_patches, shell_point)\n \n return optics.patch.create_patch(\n shell_point,\n screen_point,\n mu_start_plane,\n mu_end_plane,\n rho_start_plane,\n rho_end_plane,\n prev_mu_patch,\n prev_rho_patch\n )", "def prepare(self):\n per_col = 5\n spray_diameter = 10\n jids = []\n for i in range(self.gom_count):\n # Create JIDs\n gom_jid = f\"{settings.AGENT_NAMES['gom_base']}{i + 1}@{settings.HOST}\"\n tr_jid = f\"{settings.AGENT_NAMES['tr_base']}{i + 1}@{settings.HOST}\"\n jids.append((gom_jid, tr_jid))\n\n # Create GoM and TR positions\n y = (i % per_col) * 48 - 96\n x = int(i / per_col) * 64 - 32\n xo = random.gauss(0, spray_diameter)\n yo = random.gauss(0, spray_diameter)\n\n self.factory_map[gom_jid] = Point(x=float(x), y=float(y))\n self.tr_map[tr_jid] = Point(x=float(x + xo), y=float(y + yo))\n\n return jids", "def registerPatches(self,pl):\n self.set('patchmesh.patches',pl)", "def add_patch(self, pset, patch):\n car = patch.pop()\n if car in pset:\n sel = [ x for x in pset[car] if patch.path == x.path ]\n if sel:\n sel[0].combine(patch)\n else:\n pset[car].append(patch)\n else:\n pset[car] = [patch]", "def set_patches(\n self, patch_list: List[PatchMetadata], patch_id_digits: int = 4\n ) -> None:\n if not patch_list:\n return\n\n if all(p.present_in_specfile for p in patch_list):\n logger.debug(\n \"All patches are present in the spec file, nothing to do here 🚀\"\n )\n return\n\n # we could have generated patches before (via git-format-patch)\n # so let's reload the spec\n self.reload()\n\n applied_patches: Dict[str, PatchObject] = {\n p.get_patch_name(): p for p in self.get_applied_patches()\n }\n\n for patch_metadata in patch_list:\n if patch_metadata.present_in_specfile:\n logger.debug(\n f\"Patch {patch_metadata.name} is already present in the spec file.\"\n )\n continue\n\n if patch_metadata.name in applied_patches:\n logger.debug(\n f\"Patch {patch_metadata.name} is already defined in the spec file.\"\n )\n continue\n\n self.add_patch(patch_metadata, patch_id_digits)", "def assign_ids(ast):\n def f_either(obj, *child_results):\n id_ = slast.SlAst.id_\n obj.id_ = id_[0]\n id_[0] += 1\n\n # def f_either(obj, *child_results):\n # _id_dict = slast.SlAst._id_dict\n # id_ = slast.SlAst.id_\n # # FIXME: Assign same id to all data predicate calls with the same root/stop-nodes\n # key = str(obj.to_sl_expr())\n # if key in _id_dict:\n # obj.id_ = _id_dict[key]\n # else:\n # obj.id_ = id_[0]\n # _id_dict[key] = id_[0]\n # id_[0] += 1\n\n astutils.fold(f_either, f_either, ast)", "def fill_blockgroups(sf, df,geoids, colors):\n color_ids = []\n for i in geoids:\n color_ids.append(df[df.GEOID==i].index[0])\n \n i = 0\n for bg in color_ids:\n shape_ex = sf.shape(bg)\n x_lon = np.zeros((len(shape_ex.points),1))\n y_lat = np.zeros((len(shape_ex.points),1))\n for ip in range(len(shape_ex.points)):\n x_lon[ip] = shape_ex.points[ip][0]\n y_lat[ip] = shape_ex.points[ip][1]\n plt.fill(x_lon,y_lat, colors[i])\n i = i +1", "def setHolesCoordinates(self):\r\n # productive\r\n profprint()\r\n self.p = [[0 for j in range(63)] for j in range(3)]\r\n self.p[0][0] = 35\r\n self.p[1][0] = 34\r\n self.p[0][1] = 25\r\n self.p[1][1] = 36.679\r\n self.p[0][2] = 17.679\r\n self.p[1][2] = 44\r\n self.p[0][3] = 15\r\n self.p[1][3] = 54\r\n self.p[0][4] = 17.679\r\n self.p[1][4] = 64\r\n self.p[0][5] = 25\r\n self.p[1][5] = 71.321\r\n self.p[0][6] = 35\r\n self.p[1][6] = 74\r\n self.p[0][7] = 45\r\n self.p[1][7] = 71.321\r\n self.p[0][8] = 52.321\r\n self.p[1][8] = 64\r\n self.p[0][9] = 55\r\n self.p[1][9] = 54\r\n self.p[0][10] = 52.321\r\n self.p[1][10] = 44\r\n self.p[0][11] = 45\r\n self.p[1][11] = 36.679\r\n self.p[0][12] = 29.791\r\n self.p[1][12] = 24.456\r\n self.p[0][13] = 20\r\n self.p[1][13] = 28.019\r\n self.p[0][14] = 12.019\r\n self.p[1][14] = 34.716\r\n self.p[0][15] = 6.809\r\n self.p[1][15] = 43.739\r\n self.p[0][16] = 5\r\n self.p[1][16] = 54\r\n self.p[0][17] = 6.809\r\n self.p[1][17] = 64.261\r\n self.p[0][18] = 12.019\r\n self.p[1][18] = 73.284\r\n self.p[0][19] = 20\r\n self.p[1][19] = 79.981\r\n self.p[0][20] = 29.791\r\n self.p[1][20] = 83.544\r\n self.p[0][21] = 40.209\r\n self.p[1][21] = 83.544\r\n self.p[0][22] = 50\r\n self.p[1][22] = 79.981\r\n self.p[0][23] = 57.981\r\n self.p[1][23] = 73.284\r\n self.p[0][24] = 63.191\r\n self.p[1][24] = 64.262\r\n self.p[0][25] = 65\r\n self.p[1][25] = 54\r\n self.p[0][26] = 63.191\r\n self.p[1][26] = 43.739\r\n self.p[0][27] = 57.981\r\n self.p[1][27] = 34.716\r\n self.p[0][28] = 50\r\n self.p[1][28] = 28.019\r\n self.p[0][29] = 40.209\r\n self.p[1][29] = 24.456\r\n self.p[0][30] = 35\r\n self.p[1][30] = 14\r\n self.p[0][31] = 24.647\r\n self.p[1][31] = 15.363\r\n self.p[0][32] = 15\r\n self.p[1][32] = 19.359\r\n self.p[0][33] = 15\r\n self.p[1][33] = 88.641\r\n self.p[0][34] = 24.647\r\n self.p[1][34] = 92.637\r\n self.p[0][35] = 35\r\n self.p[1][35] = 94\r\n self.p[0][36] = 45.353\r\n self.p[1][36] = 92.637\r\n self.p[0][37] = 55\r\n self.p[1][37] = 88.641\r\n self.p[0][38] = 55\r\n self.p[1][38] = 19.359\r\n self.p[0][39] = 45.353\r\n self.p[1][39] = 15.363\r\n self.p[0][40] = 30.642\r\n self.p[1][40] = 4.19\r\n self.p[0][41] = 22.059\r\n self.p[1][41] = 5.704\r\n self.p[0][42] = 22.059\r\n self.p[1][42] = 102.296\r\n self.p[0][43] = 30.642\r\n self.p[1][43] = 103.81\r\n self.p[0][44] = 39.358\r\n self.p[1][44] = 103.81\r\n self.p[0][45] = 47.941\r\n self.p[1][45] = 102.296\r\n self.p[0][46] = 47.941\r\n self.p[1][46] = 5.704\r\n self.p[0][47] = 39.358\r\n self.p[1][47] = 4.19\r\n self.p[0][48] = 29.7\r\n self.p[1][48] = 44.82\r\n self.p[0][49] = 24.4\r\n self.p[1][49] = 54\r\n self.p[0][50] = 29.7\r\n self.p[1][50] = 63.18\r\n self.p[0][51] = 40.3\r\n self.p[1][51] = 63.18\r\n self.p[0][52] = 45.6\r\n self.p[1][52] = 54\r\n self.p[0][53] = 40.3\r\n self.p[1][53] = 44.82\r\n self.p[0][54] = 35\r\n self.p[1][54] = 54\r\n self.p[0][55] = 9\r\n self.p[1][55] = 12\r\n self.p[0][56] = 5\r\n self.p[1][56] = 18\r\n self.p[0][57] = 5\r\n self.p[1][57] = 90\r\n self.p[0][58] = 9\r\n self.p[1][58] = 96\r\n self.p[0][59] = 61\r\n self.p[1][59] = 96\r\n self.p[0][60] = 65\r\n self.p[1][60] = 90\r\n self.p[0][61] = 65\r\n self.p[1][61] = 18\r\n self.p[0][62] = 61\r\n self.p[1][62] = 12\r\n\r\n return self.p", "def _update_markers(self, markers, key):\n\n if len(markers.shape) > 2 and markers.shape[2] > 1:\n raise IndexError(\"Markers should be from one frame only\")\n if markers.channel.size != self.markers[key].data.channel.size:\n self._new_marker_set(markers, key)\n return # Prevent calling update_markers recursively\n self.markers[key].data = markers\n markers = np.array(markers)\n\n for i, actor in enumerate(self.markers[key].actors):\n # mapper = actors.GetNextActor().GetMapper()\n mapper = actor.GetMapper()\n self.markers[key].actors[i].GetProperty().SetColor(self.markers[key].color)\n self.markers[key].actors[i].GetProperty().SetOpacity(self.markers[key].opacity)\n source = vtkSphereSource()\n source.SetCenter(markers[0:3, i])\n source.SetRadius(self.markers[key].size)\n mapper.SetInputConnection(source.GetOutputPort())", "def set_grids(self, core_size, patch_shape, psf_model_shape):\n # core foo\n ravel_size = patch_shape[0] * patch_shape[1]\n self.core_shape = (core_size, core_size)\n xcenter = (patch_shape[0] - 1) / 2\n ycenter = (patch_shape[1] - 1) / 2\n buff = (core_size - 1) / 2\n xcore = xcenter - buff, xcenter + buff + 1\n ycore = ycenter - buff, ycenter + buff + 1\n core_ind = np.arange(ravel_size, dtype=np.int).reshape(patch_shape)\n self.core_ind = core_ind[xcore[0]:xcore[1], ycore[0]:ycore[1]].ravel()\n\n # grid defs\n self.psf_grid, self.patch_grid = get_grids(patch_shape, psf_model_shape)", "def _fill_impropers_cross_maps(self) -> None:\n impropers, cross_maps = [], []\n for residue in self.residues:\n for improper in residue.impropers:\n impropers.append([self._id_to_index[x] for x in improper])\n for cross_map in residue.cross_maps:\n cross_maps.append([self._id_to_index[x] for x in cross_map])\n self.impropers, self.cross_maps = impropers, cross_maps", "def get_raster_ids(self):\n return numpy.array(range(self._lo_atom, self._lo_atom + self._n_atoms))", "def get_patches(image, label, coordmaps, sample, num_pos = 100, num_neg = 100, all_patches=False, patch_shape= (48,48,48), spacing=(24,24,24), start_idx = 0):\n image_shape = np.shape(image)\n cn_size = image_shape[0]\n sg_size = image_shape[1]\n cr_size = image_shape[2]\n ax_size = image_shape[3]\n\n if not all_patches:\n idx_pos = np.stack(np.where(label[0, ...] > 0))\n \n # Only include points not near boundary\n #sg_idx = np.where(((patch_shape[0]/2) < idx_pos[0]) & (idx_pos[0] < (sg_size - (patch_shape[0]/2))))\n #idx_pos = idx_pos[:,sg_idx[0]]\n #cr_idx = np.where(((patch_shape[1]/2) < idx_pos[1]) & (idx_pos[1] < (cr_size - (patch_shape[1]/2))))\n #idx_pos = idx_pos[:, cr_idx[0]]\n #ax_idx = np.where(((patch_shape[2]/2) < idx_pos[2]) & (idx_pos[2] < (ax_size - (patch_shape[2]/2))))\n #idx_pos = idx_pos[:, ax_idx[0]]\n \n idx_rand = np.random.choice(idx_pos[0].shape[0], num_pos, replace = False)\n cpts_pos_sampled = idx_pos[:, idx_rand] \n \n image_patch_list = []\n label_patch_list = []\n coordmaps_patch_list = []\n for i in range(num_pos):\n idx1_sg = cpts_pos_sampled[0][i] - int(patch_shape[0]/2)\n idx1_cr = cpts_pos_sampled[1][i] - int(patch_shape[1]/2)\n idx1_ax = cpts_pos_sampled[2][i] - int(patch_shape[2]/2)\n \n idx2_sg = idx1_sg + patch_shape[0]\n idx2_cr = idx1_cr + patch_shape[1]\n idx2_ax = idx1_ax + patch_shape[2]\n \n image_patch_orig = image[:, idx1_sg:idx2_sg, idx1_cr:idx2_cr, idx1_ax:idx2_ax]\n image_patch = p.standardize_image(image_patch_orig)\n #image_patch = p.Normalize(image_patch)\n label_patch = label[:, idx1_sg:idx2_sg, idx1_cr:idx2_cr, idx1_ax:idx2_ax]\n coordmaps_patch = coordmaps[:, idx1_sg:idx2_sg, idx1_cr:idx2_cr, idx1_ax:idx2_ax]\n \n image_patch_list.append(image_patch)\n label_patch_list.append(label_patch)\n coordmaps_patch_list.append(coordmaps_patch)\n \n #Write patch/image and control points to csv and save image\n write_patch_to_file(image_patch, label_patch, coordmaps_patch, sample, cpts_pos_sampled[:,i], start_idx + i)\n \n # For negative points\n idx_neg = np.stack(np.where(label[0, ...]==0), axis = 0)\n \n # Only include points not near boundary\n sg_idx = np.where(((patch_shape[0]/2) < idx_pos[0]) & (idx_pos[0] < (sg_size - (patch_shape[0]/2))))\n idx_neg = idx_neg[:,sg_idx[0]]\n cr_idx = np.where(((patch_shape[1]/2) < idx_pos[1]) & (idx_pos[1] < (cr_size - (patch_shape[1]/2))))\n idx_neg = idx_neg[:, cr_idx[0]]\n ax_idx = np.where(((patch_shape[2]/2) < idx_pos[2]) & (idx_pos[2] < (ax_size - (patch_shape[2]/2))))\n idx_neg = idx_neg[:, ax_idx[0]]\n \n idx_rand = np.random.choice(idx_neg[0].shape[0], num_neg, replace = False)\n cpts_neg_sampled = idx_neg[:, idx_rand] \n \n for i in range(num_neg):\n idx1_sg = cpts_pos_sampled[0][i] - int(patch_shape[0]/2)\n idx1_cr = cpts_pos_sampled[1][i] - int(patch_shape[1]/2)\n idx1_ax = cpts_pos_sampled[2][i] - int(patch_shape[2]/2)\n \n idx2_sg = idx1_sg + patch_shape[0]\n idx2_cr = idx1_cr + patch_shape[1]\n idx2_ax = idx1_ax + patch_shape[2]\n \n image_patch_orig = image[:, idx1_sg:idx2_sg, idx1_cr:idx2_cr, idx1_ax:idx2_ax]\n image_patch = p.standardize_image(image_patch_orig)\n #image_patch = p.Normalize(image_patch)\n label_patch = label[:, idx1_sg:idx2_sg, idx1_cr:idx2_cr, idx1_ax:idx2_ax]\n coordmaps_patch = coordmaps[:, idx1_sg:idx2_sg, idx1_cr:idx2_cr, idx1_ax:idx2_ax]\n \n image_patch_list.append(image_patch)\n label_patch_list.append(label_patch)\n coordmaps_patch_list.append(coordmaps_patch)\n \n #Write patch/image and control points to csv and save image\n write_patch_to_file(image_patch, label_patch, coordmaps_patch, sample, cpts_pos_sampled[:,i], start_idx + num_pos + i)\n \n cpts = np.concatenate((cpts_pos_sampled, cpts_neg_sampled), axis = 1)\n \n return image_patch_list, label_patch_list, coordmaps_patch_list, cpts, start_idx + num_pos + i\n\n else:\n \n idx = p.grid_center_points(image.shape[1:], spacing)\n \n # Only include points not near boundary\n sg_idx = np.where(((patch_shape[0]/2) < idx[0]) & (idx[0] < (sg_size - (patch_shape[0]/2))))\n idx = idx[:,sg_idx[0]]\n cr_idx = np.where(((patch_shape[1]/2) < idx[1]) & (idx[1] < (cr_size - (patch_shape[1]/2))))\n idx = idx[:, cr_idx[0]]\n ax_idx = np.where(((patch_shape[2]/2) < idx[2]) & (idx[2] < (ax_size - (patch_shape[2]/2))))\n idx = idx[:, ax_idx[0]]\n \n image_patch_list = []\n label_patch_list = []\n coordmaps_patch_list = []\n \n for i in range(idx.shape[1]):\n \n idx1_sg = idx[0][i] - int(patch_shape[0]/2)\n idx1_cr = idx[1][i] - int(patch_shape[1]/2)\n idx1_ax = idx[2][i] - int(patch_shape[2]/2)\n \n idx2_sg = idx1_sg + patch_shape[0]\n idx2_cr = idx1_cr + patch_shape[1]\n idx2_ax = idx1_ax + patch_shape[2]\n \n image_patch_orig = image[:, idx1_sg:idx2_sg, idx1_cr:idx2_cr, idx1_ax:idx2_ax]\n image_patch = p.standardize_image(image_patch_orig)\n #image_patch = p.Normalize(image_patch)\n label_patch = label[:, idx1_sg:idx2_sg, idx1_cr:idx2_cr, idx1_ax:idx2_ax]\n coordmaps_patch = coordmaps[:, idx1_sg:idx2_sg, idx1_cr:idx2_cr, idx1_ax:idx2_ax]\n \n image_patch_list.append(image_patch)\n label_patch_list.append(label_patch)\n coordmaps_patch_list.append(coordmaps_patch)\n \n return image_patch_list, label_patch_list, coordmaps_patch_list, idx, len(image_patch_list)", "def match(self, grp, healpixIDs, pixRA, pixDec):\n\n # print('hello', grp.columns)\n pixRA_rad = np.deg2rad(pixRA)\n pixDec_rad = np.deg2rad(pixDec)\n\n # convert data position in rad\n pRA = np.median(grp[self.RACol])\n pDec = np.median(grp[self.DecCol])\n pRA_rad = np.deg2rad(pRA)\n pDec_rad = np.deg2rad(pDec)\n\n # gnomonic projection of pixels on the focal plane\n x, y = proj_gnomonic_plane(pRA_rad, pDec_rad, pixRA_rad, pixDec_rad)\n # x, y = proj_gnomonic_plane(np.deg2rad(self.LSST_RA-pRA),np.deg2rad(self.LSST_Dec-pDec), pixRA_rad, pixDec_rad)\n\n # get LSST FP with the good scale\n # pnew = LSSTPointing(0., 0., area=np.pi*self.fpscale**2)\n fpnew = LSSTPointing_circular(0., 0., maxbound=self.fpscale)\n # fpnew = LSSTPointing(np.deg2rad(self.LSST_RA-pRA),np.deg2rad(self.LSST_Dec-pDec),area=np.pi*self.fpscale**2)\n # maxbound=self.fpscale)\n\n \"\"\"\n import matplotlib.pyplot as plt\n fig, ax = plt.subplots()\n ax.plot(x, y, 'ko')\n pf = PolygonPatch(fpnew, facecolor=(0, 0, 0, 0), edgecolor='red')\n ax.add_patch(pf)\n plt.show()\n \"\"\"\n\n # print(shapely.vectorized.contains(\n # fpnew, x, y), self.fpscale, fpnew.area)\n\n idf = shapely.vectorized.contains(fpnew, x, y)\n\n pixID_matched = list(healpixIDs[idf])\n pixRA_matched = list(pixRA[idf])\n pixDec_matched = list(pixDec[idf])\n\n # names = [grp.name]*len(pixID_matched)\n df_pix = pd.DataFrame({'healpixID': pixID_matched,\n 'pixRA': pixRA_matched,\n 'pixDec': pixDec_matched, })\n # 'groupName': names})\n\n return df_pix\n \"\"\"\n n_index = len(grp.index.values)\n\n arr_index = grp.index.values\n\n n_pix = len(df_pix)\n if n_pix > 1:\n arr_index = arr_index.repeat(n_pix)\n if n_index > 1:\n df_pix = df_pix.append([df_pix]*(n_index-1), ignore_index=True)\n\n df_pix.loc[:, 'index'] = arr_index\n \n return df_pix\n \"\"\"", "def set_neighbor_markers(self):\n marker_texture_path = self.PATHS[\"MINIMAP_BG_TEXTURE\"]\n marker_texture = self.loader.loadTexture(marker_texture_path)\n for location in self.locations:\n location_pos = location.get_position()\n for neighbor_id in location.get_neighbors():\n neighbor = next(self.find_location_by_id(neighbor_id))\n neighbor_pos = neighbor.get_position()\n neighbor_displaced = self.calculate_displacement(location_pos, neighbor_pos).tolist()\n neighbor_displaced_x, neighbor_displaced_y = neighbor_displaced\n reference_displaced = self.calculate_displacement(location_pos, self.reference_point).tolist()\n reference_displaced_x, reference_displaced_y = reference_displaced\n angle = self.calculate_angle(neighbor_displaced, reference_displaced)\n\n def reference_line(x_pos):\n slope = reference_displaced_y / reference_displaced_x\n return slope * x_pos\n\n if reference_line(neighbor_displaced_x) > neighbor_displaced_y:\n angle = 360-angle\n\n location.add_neighbor_marker(neighbor, angle, marker_texture)", "def remap_partition(particles):\n remap_gid_partition_cython(particles, gl_to_loc_map_b.value)\n return particles" ]
[ "0.596348", "0.56947297", "0.5616027", "0.557297", "0.5553619", "0.55308986", "0.55085593", "0.5400612", "0.538122", "0.53603476", "0.53538245", "0.5268745", "0.5244884", "0.52312773", "0.5227159", "0.52247417", "0.5194903", "0.5181613", "0.51150405", "0.5111688", "0.51025075", "0.50984716", "0.5078885", "0.50573117", "0.50434023", "0.5014575", "0.50137746", "0.49856287", "0.49644825", "0.49635494" ]
0.6892715
0
Find the number of jumps before reaching the exit (rule 1)
def puzzle1(offsets): return find_jumps_to_exit(offsets, lambda o: o + 1)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def jumps(self, currFloor, floor):\r\n count = 0\r\n for t in self.targs:\r\n if (t > currFloor and t < floor) or (t < currFloor and t > floor):\r\n count = count + 1\r\n return count", "def puzzle2(offsets):\n return find_jumps_to_exit(offsets, lambda o: o + 1 if o < 3 else o - 1)", "def jump_factor(self):\n return self._jump_factor", "def jump(self, A):\n inf = len(A)\n min_step = [inf if i else 0 for i in xrange(len(A))]\n for loc, max_step in enumerate(A):\n for step in xrange(max_step, 0, -1):\n target_loc = loc + step\n if target_loc < len(A):\n if min_step[target_loc] > min_step[loc] + 1:\n min_step[target_loc] = min_step[loc] + 1\n else:\n break\n return min_step[-1]", "def solve(k=[]):\n if len(k) < 2:\n return 1\n min_jumps = len(k)\n for i in reversed(range(k[0])):\n if i > len(k):\n return 1\n if k[i] != 0:\n sub_jumps = solve(k[i + 1 :])\n min_jumps = min(min_jumps, sub_jumps)\n return min_jumps", "def backtrack_steps():\n\n # Initialize position and number of steps\n x = 0\n n_steps = 0\n\n # Walk until we get to positive 1\n while x < 1:\n x += 2 * np.random.randint(0, 2) - 1\n n_steps += 1\n\n return n_steps", "def lemur(branches):\n\n assert branches[0] == 0, \"First branch must be alive\"\n assert branches[-1] == 0, \"Last branch must be alive\"\n\n # loop through list\n # if next one is dead, jump 2 spaces\n # if the next one is alive, check next next\n # if next AND next next are alive, jump 2 spaces\n # if next is alive and next next is dead, jump 1 space\n # increment counter each jump\n # if len(branches) < 2:\n # return 0\n\n # if len(branches) < 3:\n # return 1\n\n # count = 0\n # branch = 0\n\n # while branch < (len(branches)-1):\n # if branches[branch + 1] == 1:\n # branch += 2\n # else:\n # if branches[branch + 2] == 0:\n # branch+= 2\n # else:\n # branch += 1\n # count += 1\n\n # return count\n\n#####################################################################\n# way shorter version\n\n branch = 0\n count = 0\n\n while branch < len(branches) - 1:\n branch += 2\n if branch >= len(branches) or branches[branch] == 1:\n # We can jump this far, so only jump 1\n branch -= 1\n count += 1\n\n return count", "def get_steps_num():\n return 0", "def f(i):\n l = checkout_values[i]\n two_or_less_checkouts = 0 #No crashes possible for checkouts of length 1 or 2.\n three_throws = []\n for e in l:\n if len(e) < 3:\n two_or_less_checkouts += 1\n elif [e[1], e[0], e[2]] not in three_throws: #The only possible crash.\n three_throws.append(e)\n return len(three_throws) + two_or_less_checkouts", "def solution(A):\n \n cars = 0\n ones = 0\n\n for i in range(len(A), 0, -1):\n\n if A[i-1] == 1:\n ones += 1\n else:\n cars += ones\n\n return (-1 if cars > 1000000000 else cars)", "def __numHeads(self):\n count = 1\n\n while (self.__coinFlip() == 1):\n count += 1\n return count", "def traverse(self):\r\n return self.count_pillars_and_exit(0, 0) == 5", "def test_findtotalConsecutive(self):\n treesList = [1, 2, 5, 3]\n result = findtotalConsecutive(treesList, 2, 4)\n self.assertEqual(result, -1)", "def branch_length(self, u):\n ret = 0\n parent = self.parent(u)\n if parent != NULL:\n ret = self.time(parent) - self.time(u)\n return ret", "def step_count(group_idx):\n cmp_pos = 0\n steps = 1\n if len(group_idx) < 1:\n return 0\n for i in range(len(group_idx)):\n if group_idx[cmp_pos] != group_idx[i]:\n cmp_pos = i\n steps += 1\n return steps", "def n_steps(self) -> int:\n return len(self) - 1 # subtract the base metric", "def obstacle_count(self):\n self.wide_scan()\n found_something = False\n counter = 0\n for distance in self.scan:\n if distance and distance < 200 and not found_something:\n found_something = True\n counter += 1\n print(\"Object # %d found, I think\" % counter)\n if distance and distance > 200 and found_something:\n found_something = False\n print(\"\\n----I SEE %d OBJECTS----\\n\" % counter)", "def counter(): # Local function\n nonlocal count\n if count < n:\n count += 1\n return count", "def count_pillars_and_exit(self, x, y):\r\n\r\n if not self.is_valid_room(x, y) or self.__maze[x][y].is_visited():\r\n return 0\r\n\r\n # check for exit or any pillar\r\n item_count = 0\r\n if self.__maze[x][y].get_exit():\r\n item_count = 1\r\n elif self.__maze[x][y].get_pillar_a():\r\n item_count = 1\r\n elif self.__maze[x][y].get_pillar_e():\r\n item_count = 1\r\n elif self.__maze[x][y].get_pillar_i():\r\n item_count = 1\r\n elif self.__maze[x][y].get_pillar_p():\r\n item_count = 1\r\n\r\n # not at exit so try another room: south, east, north, west\r\n self.__maze[x][y].set_visited(True)\r\n # if east_wall is not true, then we can go row +1\r\n if self.__maze[x][y].walls['E'] is False:\r\n item_count += self.count_pillars_and_exit(x + 1, y)\r\n if self.__maze[x][y].walls['S'] is False:\r\n item_count += self.count_pillars_and_exit(x, y + 1)\r\n if self.__maze[x][y].walls['W'] is False:\r\n item_count += self.count_pillars_and_exit(x - 1, y)\r\n if self.__maze[x][y].walls['N'] is False:\r\n item_count += self.count_pillars_and_exit(x, y - 1)\r\n\r\n return item_count", "def getNrOfRulesWithoutRepetitions(prg):\n\n nr_rules = len(prg)\n for rule in prg:\n if (rule.main_type != sim.RuleType.conditional):\n if (rule.lhs == rule.rhs and rule.lhs == \"e\"):\n nr_rules -= 1\n\n return nr_rules", "def obstacle_count(self):\n found_something = False\n count = 0\n starting_postion = self.get_heading()\n self.right(primary=60, counter=60)\n time.sleep(0.5)\n while self.get_heading() != starting_postion:\n if self.read_distance() < 250 and not found_something:\n found_something = True\n count += 1\n print (\"I found something\")\n elif self.read_distance() > 250 and found_something:\n found_something = False\n print(\"I have a clear view\")\n self.stop()\n\n print(\"I have found this many things: %d\" % count)\n return count", "def break_count(self):\n return len(self.link_ids) + len(self.crossring_cleavages)", "def number_of_iterations(self) -> int:\n pass", "def count_until(match_value):", "def bisect_steps_remaining():\n # https://github.com/git/git/blob/566a1439f6f56c2171b8853ddbca0ad3f5098770/bisect.c#L1043\n return floor(log(bisect_revisions(), 2))", "def _get_number_of_shifts(self, action):\n\n destination = self.vehicle_data[3][action]\n\n no_veh_destination1 = len(np.argwhere(self.grid_destination.T[self.current_Lane] == 1))\n\n if destination == 2 and no_veh_destination1 != 0:\n return 1\n else:\n return 0", "def n_pos(self):\n running_total = 0\n for i in range(self.prob.num):\n if self.alphas[i] > 1e-5 > self.prob.C - self.deltas[i] and self.prob.Y[i] == 1:\n running_total += 1\n return running_total if running_total > 0 else 1", "def n_pos(self):\n running_total = 0\n for i in range(self.prob.num):\n if self.alphas[i] > 1e-5 > self.prob.C - self.deltas[i] and self.prob.Y[i] == 1:\n running_total += 1\n return running_total if running_total > 0 else 1", "def Solve(bases):\r\n n = 1\r\n while 1:\r\n n += 1\r\n done = True\r\n for b in bases:\r\n if not Happy(n, b):\r\n done = False\r\n break\r\n if done:\r\n return n", "def dfs(self, node):\n if not node:\n return 0\n\n l = self.dfs(node.left)\n r = self.dfs(node.right)\n self.ret = max(self.ret, l + 1 + r - 1) # path length is the #nodes - 1\n return max(l, r) + 1" ]
[ "0.6420323", "0.6123828", "0.60416996", "0.60376054", "0.59392315", "0.5912451", "0.5872377", "0.5807086", "0.57804805", "0.57612854", "0.57415116", "0.57337457", "0.5717321", "0.570134", "0.5666641", "0.5646191", "0.56335825", "0.5621903", "0.5615216", "0.56013274", "0.55957234", "0.55939555", "0.5561641", "0.5549823", "0.5543121", "0.5500079", "0.54855293", "0.54855293", "0.5478444", "0.5465955" ]
0.66415817
0
Get GFS sounding data for the given timeslot.
def get_gfs_sounding(timeslot, lat, lon, interp=None, step=200.): fname = fetch_gfs_data(timeslot, lat, lon) return read_gfs_data(fname, lat, lon, interp=interp, step=200.)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def fetch_gfs_data(timeslot, lat, lon):\n pass", "def get_sound_data(sec, freq_func, amplitude):\n t = np.linspace(0, sec, SAMPLERATE * sec)\n data = np.array(t, dtype=np.int16)\n for i in range(data.size):\n data[i] = amplitude * np.sin(2 * np.pi * quad(freq_func, 0, t[i])[0])\n return data", "def get_sound(self, lump_name):\n\n if lump_name in self.sound_cache:\n return self.sound_cache[lump_name]\n\n lump = self.get_lump(lump_name)\n if lump is None:\n return\n\n sound_data = sound.Sound()\n sound_data.read_from(lump.get_data())\n self.sound_cache[lump_name] = sound_data\n\n return sound_data", "def get_sound(self, ):\r\n return _channeldata[self.chan].sound", "def retrieve(self, sid):\n zx = \"\".join([chr(random.randint(97,122)) for i in xrange(0, 11)])\n resdat = wavehttp.get(\"wave.google.com\", \n \"/wave/wfe/channel?VER=6&RID=rpc&SID=\"+sid+\n \"&CI=0&AID=0&TYPE=xmlhttp&zx=\"+zx+\"&t=1\").read()\n file(\"./tempdata\",\"w+\").write(resdat)\n print resdat", "def get_data(subject, session, only_success, silent) :\n\n game_data = get_game_data3(subject, session, silent=silent)\n physio_data = get_physio_data(subject, session)\n\n # test if there is any data!\n if not all(game_data.shape):\n raise DataAccessError('empty game data')\n if not all(physio_data.shape):\n raise DataAccessError('empty physio data')\n \n\n # using my block times extraction\n trials = extract_trial_times(game_data, only_success)\n if len(trials[0]) == 0 :\n raise DataAccessError('no trials extracted')\n\n # convert trial times to UTC\n if int(subject) >= 400:\n one_hour_in_secs = 60*60\n trials[0] -= one_hour_in_secs\n trials[1] -= one_hour_in_secs \n \n # get first and last timestamp\n min_tr0 = min(trials[0])\n min_tr1 = min(trials[1])\n min_phy = min(physio_data['time'])\n min_time = min(min_tr0, min_tr1, min_phy)\n max_tr0 = max(trials[0])\n max_tr1 = max(trials[1])\n max_phy = max(physio_data['time'])\n max_time = max(max_tr0, max_tr1, max_phy)\n\n # transform to relative time scales\n physio_data['time'] -= min_time\n trials[0] -= min_time\n trials[1] -= min_time\n\n return physio_data, trials, (min_time, max_time)", "def get_data(self, scan_range=None, time_range=None):\n data = deepcopy(self.data)\n if time_range is not None:\n scan_range=self.get_scans_from_times(time_range)\n print \"Getting times:\", time_range\n\n if scan_range is not None:\n data = data[int(scan_range[0]):int(scan_range[1])]\n print \"Getting scans:\", scan_range\n else:\n print \"Getting all scans, length:\", len(self.scans), data.shape\n\n if len(data) > 1:\n try:\n data = merge_spectra(data)\n except Exception, e:\n concat = np.concatenate(data)\n sort = concat[concat[:, 0].argsort()]\n data = ud.removeduplicates(sort)\n print e\n elif len(data) == 1:\n data = data[0]\n else:\n data = data\n #plt.figure()\n #plt.plot(data)\n #plt.show()\n return data", "def get_balloon_sounding(timeslot, lat, lon, interp=None, step=200.):\n pass", "def get_gga_data(port):\n \n \n # Wait for GGA message :\n gga = port.readline().decode(\"utf-8\")\n while not 'GGA' in gga:\n if gga: print(\"Wait for GGA : \", gga)\n gga = port.readline().decode(\"utf-8\")\n \n \n t = np.float(gga[7:16])\n \n # Print messages :\n print(\"Heading antenna frame :\")\n print(\" GGA: \",gga)\n \n # Quality check :\n if not 'GGA' in gga:\n print(\"Issue with GGA frame decoding !\\nMessage:\\nGGA:{0}\\n\".format(gga))\n gga, t = get_gga_data(port)\n \n return gga, t", "def retrieve_timeseries(start_time, end_time, channel_name, IFO, frame_type):\n\td = pylal.frutils.AutoqueryingFrameCache(frametype=frame_type, scratchdir=None)\n\tdata = d.fetch(channel_name, start_time, end_time)\n\t\n\ttime_series = {\n\t\t'waveform': data.A,\n\t\t'dt' : data.metadata.dt,\n\t\t'fs' : 1.0/data.metadata.dt,\n\t}\n\treturn time_series", "def tempo(signal,fs,hop_len = 64, **kwargs):\n tempo, beats = librosa.beat.beat_track(y=signal, sr=fs, hop_length=hop_len)\n return tempo", "def generateSound(amps_samples, channel_fs, sampleRate):\r\n\r\n samples_to_gen = len(amps_samples[0]) \r\n nb_channels = len(amps_samples)\r\n duration = samples_to_gen / sampleRate # in s\r\n\r\n \r\n t = np.linspace(0.0, duration, samples_to_gen) # Produces length of samples\r\n\r\n sines = amps_samples * np.sin(2 * np.pi * np.outer(channel_fs, t) )\r\n ySum = np.sum(sines, axis=0)\r\n\r\n\r\n # Normalize data, so that it is in playable amplitude\r\n res_data = 10* ySum / np.linalg.norm(ySum)\r\n\r\n return res_data", "def getStationData(self):\n dtime = datetime.strptime(self.refTime, \"%y%m%d/%H%M\")\n trange = TimeRange()\n trange.setStart(dtime)\n trange.setEnd(dtime)\n dataTime = DataTime(refTime=dtime, validPeriod=trange)\n req = StationDataRequest()\n req.setPluginName(self.pluginName)\n req.setStationId(self.stationId)\n req.setRefTime(dataTime)\n req.setParmList(self.parmList)\n req.setPartNumber(self.partNumber)\n resp = self.client.sendRequest(req)\n\n for i, rec in enumerate(resp):\n resp[i] = {\n key.decode() if isinstance(key, bytes) else key:\n val.decode() if isinstance(val, bytes) else val\n for key, val in rec.items()\n }\n\n return resp", "def getData(self,sensor_id,t):\r\n assert t > 0 and t < self.measurements\r\n try:\r\n return round(float(self.app.data[int(t*self.samplerate)][sensor_id].text),3)\r\n except:# No data loaded, or scrubber out of bounds\r\n return 0", "def getSound(self):\r\n return self._shipsound", "def load_data():\n # Load in data\n sample_frame = energy_connection.sample_series('energy_readings')\n # TODO: Rooms/QL Extract\n sample_frame = energy_connection.sample_series('external_readings', append_frame=sample_frame)\n\n # To object\n sample = TimeSeriesSample(sample_frame, 'time')\n\n return sample", "def get_data(path):\n if path.endswith('.mp3'):\n path = prepare_file(path, path.rstrip('mp3')+'wav')\n x, sr = librosa.load(path, duration=30)\n\n else:\n x, sr = librosa.load(path, duration=30)\n directory, file_name = os.path.split(path)\n return x, sr, file_name", "def tick(timeslot, _sd, _curr_ply):\n try:\n\n data = serial.readline()\n print(data)\n\n sound_id = ceil(int(data) / 2)\n\n # disregard double recognition\n if _curr_ply[sound_id - 1] + ds.latency_double < timeslot:\n _curr_ply[sound_id - 1] = timeslot\n\n handle_sound_ext(sound_id)\n handle_sound_int(sound_id, timeslot, _sd)\n\n except ValueError:\n pass\n except (KeyError, SerialException) as e:\n print(e)\n finally:\n sleep(ds.max_tick)", "def get_data(station,starttime,endtime,activity=False,\n rep='/GNOMEDrive/gnome/serverdata/',resample=None):\n setname = \"MagneticFields\"\n dstr = ['%Y','%m','%d','%H','%M']\n dsplit = '-'.join(dstr[:starttime.count('-')+1])\n start = datetime.strptime(starttime,dsplit)\n starttime = construct_utc_from_metadata(start.strftime(\"%Y/%m/%d\"),\n start.strftime(\"%H:%M:%S.%d\"))\n dsplit = '-'.join(dstr[:endtime.count('-')+1])\n end = datetime.strptime(endtime,dsplit)\n endtime = construct_utc_from_metadata(end.strftime(\"%Y/%m/%d\"),\n end.strftime(\"%H:%M:%S.%d\"))\n dataset = []\n for date in numpy.arange(start,end,timedelta(minutes=1)):\n date = date.astype(datetime)\n path1 = rep+station+'/'+date.strftime(\"%Y/%m/%d/\")\n path2 = station+'_'+date.strftime(\"%Y%m%d_%H%M*.hdf5\")\n fullpath = os.path.join(path1,path2)\n dataset += glob.glob(fullpath)\n if len(dataset)==0:\n print \"ERROR: No data files were found...\"\n quit()\n file_order,data_order = {},{}\n for fname in dataset:\n hfile = h5py.File(fname, \"r\")\n segfile = file_to_segment(hfile,setname)\n file_order[segfile] = fname\n data_order[segfile] = hfile\n # Extract sample rate from metadata of last read data file\n sample_rate = hfile[setname].attrs[\"SamplingRate(Hz)\"]\n # Estimate full segment activity list\n activity = create_activity_list(station,data_order)\n # Generate an ASCII representation of the GPS timestamped\n # segments of time covered by the input data\n seglist = segmentlist(data_order.keys())\n # Sort the segment list\n seglist.sort()\n # Create list of time series from every segment\n ts_list = generate_timeseries(file_order,setname)\n # Retrieve channel data for all the segments\n full_data = numpy.hstack([retrieve_channel_data(data_order[seg],setname)\n for seg in seglist])\n new_sample_rate = sample_rate if resample==None else resample\n new_data_length = len(full_data)*new_sample_rate/float(sample_rate)\n full_data = scipy.signal.resample(full_data,int(new_data_length))\n # Models a time series consisting of uniformly sampled scalar values\n ts_data = types.TimeSeries(full_data,delta_t=1./new_sample_rate,\n epoch=seglist[0][0])\n for v in data_order.values():\n v.close()\n return ts_data,ts_list,activity,int(starttime),int(endtime)", "def get_data(self, symbol):\n # Collect stock market data\n self.data = self.get_stock_data(symbol)", "def get_sound() -> str:\n with open(os.path.dirname(os.path.abspath(__file__))+'\\\\data.json', 'r') as test:\n test = json.load(test)\n sound = test['stop_sound']\n return sound", "def queryPHdata(timestamp):\n\n if len(flight_data_log.values()) == 0:\n return DEFAULT_FLIGHTDATA\n\n index = flight_data_log.bisect(timestamp)\n\n r_index = max(index-1, 0)\n l_index = min(index, len(flight_data_log))\n\n #\n # TODO interpolate the sorrounding flight data records.\n #\n return flight_data_log.values()[r_index]#, flight_data_log.values()[l_index]", "def get_song(self): \n\n song = self.tracks.sample(n=1).to_dict('index')\n return list(song.values())[0]", "def getRicker(f,t):\n # assert len(f) == 1, 'Ricker wavelet needs 1 frequency as input'\n # f = f[0]\n pift = pi*f*t\n wav = (1 - 2*pift**2)*np.exp(-pift**2)\n return wav", "def AcquiredData (self, arguments=None) :\n\t\tself.OODriver.Wrapper_getSpectrum(self.wrapperHandle,self.spectrometerIndex,self.bufferHandle)\n\t\t\n\t\tif self.OODriver.Wrapper_isSaturated(self.wrapperHandle,self.spectrometerIndex) :\n\t\t\tprint \"Warning: OcenOptics spectrometer is saturated!\"\n\t\t\t\n\t\ttry : return self.buffer[self.spectral_interval]\n\t\texcept AttributeError : return self.buffer", "def sample_gls(times, gls_obs, freq, jitter, rvs_err, fbeg, fend, object_name,\n peaks_data, search_phase_range):\n np.random.seed()\n rvs_sim = np.ones(np.shape(times)) \\\n + sim_any_sinmode(gls_obs, freq, times) \\\n + np.random.normal(0, np.sqrt(jitter**2), times.size)\n ls_sim = af_utils.get_gls(times,\n rvs_sim,\n rvs_err,\n fbeg,\n fend,\n object_name,\n freq_array=gls_obs.freq)\n dummy_freq_array = np.zeros(np.size(peaks_data[0]))\n # search for phases of max power using a certian frequency\n # range and the prior of data peaks\n for j in range(0, np.size(peaks_data[0])):\n index_frequencies = np.where(\n np.logical_and(\n ls_sim.freq >= peaks_data[0][j] - search_phase_range,\n ls_sim.freq <= peaks_data[0][j] + search_phase_range))\n index_maxfreqs = max(np.arange(len(ls_sim.p[index_frequencies])),\n key=ls_sim.p[index_frequencies].__getitem__)\n index_maxfreq = np.argwhere(\n ls_sim.freq == ls_sim.freq[index_frequencies][index_maxfreqs])[0]\n\n dummy_freq_array[j] = ls_sim.freq[index_maxfreq]\n peaks_sim = get_phase_info(ls_sim,\n frequency_array=dummy_freq_array,\n sim=True)\n return ls_sim.power, ls_sim.freq, (peaks_sim[2] % 1) * 2. * np.pi", "def note(freq):\n data = np.sin(2.0 * np.pi * freq * t) * amp\n return data", "def getWaveSample( self, position ):\n\t\td = self.data[position]\n\t\treturn d", "def getSoundByIndex(self, index):\n value = index % len(self._soundsDictionary)\n return list(self._soundsDictionary.values())[value]", "def getData(dig, pipe, event, pulses):\n logging.info(\"Started getData\")\n start_time = time.time()\n for pulse in range(pulses):\n samples = dig.get_data_raw()\n# logging.info(\"GetData retrieved: %d\", len(samples))\n pipe.put(samples)\n end_time = time.time()\n elapsed = end_time - start_time\n samplesProcessed = (pulses * len(samples[0]) * len(samples))\n logging.info(\"getData processed %d Msamples in %.3f s\",\n samplesProcessed / 1e6,\n elapsed)\n logging.info(\"getData rate: %.3f Msa/s in lumps of %d samples\",\n samplesProcessed / elapsed / 1e6,\n dig.pointsPerCycle)" ]
[ "0.58685505", "0.5698143", "0.53198844", "0.5312702", "0.5265088", "0.5250593", "0.5247578", "0.5088149", "0.50585455", "0.50518686", "0.5035242", "0.50214183", "0.50213265", "0.49981782", "0.49438947", "0.49307737", "0.49250743", "0.48810902", "0.4880546", "0.48561516", "0.4845538", "0.48440313", "0.48272988", "0.48249954", "0.48068252", "0.4794241", "0.47880104", "0.478053", "0.47724587", "0.47722983" ]
0.7687082
0
>>> m = PSYCOP() >>> m.encode_proposition("Aac") (A(x_0) > C(x_0)) >>> m.encode_proposition("Iac") (A(a_1) AND C(a_1))
def encode_proposition(self, p, hat=False): i = self.get_fresh_id() if p[0] == "A": # A(x) -> B(x) return self.Prop(self.PT.implies, self.get_atomic_proposition(p[1].upper(), i, False, hat), self.get_atomic_proposition(p[2].upper(), i, False, hat)) elif p[0] == "E": # not (A(x) and B(x)) return self.Prop(self.PT.negation, self.Prop(self.PT.conjunction, self.get_atomic_proposition(p[1].upper(), i, False, hat), self.get_atomic_proposition(p[2].upper(), i, False, hat)), None) elif p[0] == "I": # A(a) and B(a) return self.Prop(self.PT.conjunction, self.get_atomic_proposition(p[1].upper(), i, True, hat), self.get_atomic_proposition(p[2].upper(), i, True, hat)) else: # A(a) and not B(a) return self.Prop(self.PT.conjunction, self.get_atomic_proposition(p[1].upper(), i, True, hat), self.Prop(self.PT.negation, self.get_atomic_proposition(p[2].upper(), i, True, hat), None))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def prove_and_commutativity() -> Proof:\n all_lines = []\n all_lines.append(Proof.Line(Formula.parse('(p&q)')))\n all_lines.append(Proof.Line(Formula.parse('q'), AE1_RULE, [0]))\n all_lines.append(Proof.Line(Formula.parse('p'), AE2_RULE, [0]))\n all_lines.append(Proof.Line(Formula.parse('(q&p)'), A_RULE, [1, 2]))\n statement = InferenceRule([Formula.parse('(p&q)')], Formula.parse('(q&p)'))\n all_rules = {A_RULE, AE1_RULE, AE2_RULE}\n return Proof(statement, all_rules, all_lines)", "def encode(input_message: str, cond_prob_dict: Dict, cumulative_prob_dict: Dict, context_char_no: int):\n precision = 32\n one = int(2 ** precision - 1)\n quarter = int(ceil(one / 4))\n\n compressed_message, lo, hi, straddle = [], 0, one, 0\n\n for char_counter, msg_char in enumerate(input_message): # for every symbol\n\n # Progress bar\n if char_counter % 100 == 0:\n so.flush()\n so.write('Arithmetic encoded %d%% \\r' % int(floor(char_counter / len(input_message) * 100)))\n\n # When enough context exists to use full dictionary\n context_characters = ''\n if char_counter >= context_char_no:\n for cont_count in range(context_char_no):\n ref = cont_count - context_char_no\n context_characters += input_message[char_counter + ref]\n # Before there is sufficient context\n else:\n context_characters = 'no_context'\n\n char_probs = cond_prob_dict[context_characters]\n cum_prob_dict = cumulative_prob_dict[context_characters]\n\n lohi_range = hi - lo + 1 # The added 1 is necessary to avoid rounding issues\n\n # 2) narrow the interval end-points [lo,hi) to the new range [f,f+p]\n # within the old interval [lo,hi], being careful to round 'innwards'\n lo += ceil(cum_prob_dict[msg_char] * lohi_range)\n hi = lo + floor(char_probs[msg_char] * lohi_range)\n\n if lo == hi:\n raise NameError('Zero interval!')\n\n lo, hi, compressed_message, straddle = encode_rescale(lo, hi, compressed_message, one, straddle)\n\n # termination bits - after processing all input symbols, flush any bits still in the 'straddle' pipeline\n straddle += 1 # add 1 to straddle for \"good measure\" (ensures prefix-freeness)\n if lo < quarter: # the position of lo determines the dyadic interval that fits\n compressed_message.append(0)\n compressed_message += [1 for s in range(straddle)]\n else:\n compressed_message.append(1)\n compressed_message += [0 for s in range(straddle)]\n\n return compressed_message", "def _process_prosody(sonority):\n assert 9 not in sonority[1:-1]\n assert sonority[0] == sonority[-1] == 9\n\n # create the output values\n psequence = []\n first = True # stores whether first syllable is currently being processed\n\n for i in range(1, len(sonority) - 1):\n # get a segment with context\n a, b, c = sonority[i - 1], sonority[i], sonority[i + 1]\n\n if b == 7: # a vowel\n if first:\n psequence.append('X')\n first = False\n elif c == 9: # last\n psequence.append('Z')\n else:\n psequence.append('Y')\n elif b == 8: # a tone\n psequence.append('T')\n elif a >= b >= c or c == 8: # descending\n if c == 9: # word final position\n psequence.append('Z' if b == 7 else 'N') # vowel or consonant\n else:\n if first:\n first = False\n psequence.append('A')\n else:\n psequence.append('L')\n elif b < c or a > b <= c or a < b <= c: # ascending\n # check for syllable first\n if a == 9:\n psequence.append('A')\n elif a >= b:\n if c == 9:\n psequence.append('N')\n else:\n if psequence[-1] != 'A':\n psequence = psequence[:-1] + [psequence[-1].replace('L', 'M')] + ['B']\n else:\n psequence.append('C')\n else:\n psequence.append('C')\n elif a < b > c: # consonant peak\n if first:\n psequence.append('X')\n first = False\n else:\n psequence.append('Y')\n else:\n raise ValueError(\n \"Conversion to prosodic string failed due to a condition which was not \"\n \"defined in the convertion, for details compare the numerical string \"\n \"{0} with the profile string {1}\".format(sonority, psequence))\n return psequence", "def encoder(ne, nj):\n #contrainte sup les equipes ne peuvent pas s'affronter elles-meme\n contrainte = ''\n for e in range(ne):\n for j in range(nj):\n contrainte += str(-codage(ne,nj,j,e,e))+' 0\\n'\n return contrainte+encoderC1(ne, nj) +'\\n'+ encoderC2(ne, nj)+'\\n'+ \\\n contrainteExtDimanche(ne,nj,0.5)+'\\n'+contrainteDomDimanche(ne,nj,0.4)+'\\n'+ \\\n contrainteExtConsecutif(ne,nj)+'\\n'+contrainteDomConsecutif(ne,nj)", "def testStratifiedAxioms(self):\n \n a1a = Parser.parse_as(strat1a.split(\"\\n\"), axioms.Axiom, self.prob)\n a1b = Parser.parse_as(strat1b.split(\"\\n\"), axioms.Axiom, self.prob)\n a2 = Parser.parse_as(strat2.split(\"\\n\"), axioms.Axiom, self.prob)\n \n self.dom.axioms = [a1a, a1b, a2]\n self.dom.stratify_axioms()\n\n self.assert_(a1a.predicate in self.dom.stratification[1])\n self.assert_(a1b.predicate in self.dom.stratification[1])\n self.assert_(a2.predicate in self.dom.stratification[2])\n \n state = State.from_problem(self.prob).get_extended_state()\n\n oc1 = StateVariable(self.prob.predicates[\"occupied\"][0], [self.prob[\"pos1\"]])\n oc2 = StateVariable(self.prob.predicates[\"occupied\"][0], [self.prob[\"pos2\"]])\n oc3 = StateVariable(self.prob.predicates[\"occupied\"][0], [self.prob[\"apt1\"]])\n oc4 = StateVariable(self.prob.predicates[\"occupied\"][0], [self.prob[\"apt2\"]])\n\n self.assertEqual(state[oc1], TRUE)\n self.assertEqual(state[oc2], TRUE)\n self.assertEqual(state[oc3], FALSE)\n self.assertEqual(state[oc4], TRUE)\n\n int1 = StateVariable(self.prob.predicates[\"interesting\"][0], [self.prob[\"pos1\"]])\n int2 = StateVariable(self.prob.predicates[\"interesting\"][0], [self.prob[\"pos2\"]])\n int3 = StateVariable(self.prob.predicates[\"interesting\"][0], [self.prob[\"apt1\"]])\n int4 = StateVariable(self.prob.predicates[\"interesting\"][0], [self.prob[\"apt2\"]])\n \n self.assertEqual(state[int1], TRUE)\n self.assertEqual(state[int2], TRUE)\n self.assertEqual(state[int3], FALSE)\n self.assertEqual(state[int4], TRUE)\n \n free1 = StateVariable(self.prob.predicates[\"free\"][0], [self.prob[\"pos1\"]])\n free2 = StateVariable(self.prob.predicates[\"free\"][0], [self.prob[\"pos2\"]])\n free3 = StateVariable(self.prob.predicates[\"free\"][0], [self.prob[\"apt1\"]])\n free4 = StateVariable(self.prob.predicates[\"free\"][0], [self.prob[\"apt2\"]])\n\n # print \"-----------------_\"\n # print state[free1]\n # print state\n # print \"-----------------\"\n # print state[free1]\n self.assertEqual(state[free1], FALSE)\n self.assertEqual(state[free2], FALSE)\n self.assertEqual(state[free3], TRUE)\n self.assertEqual(state[free4], FALSE)", "def test_special_PSX(self, angexp):\n a, b, c = angexp[0]\n tgt = U3Gate(a, b, c).to_matrix()\n exp = {(\"p\", \"sx\")[g]: angexp[1][g] for g in (0, 1) if angexp[1][g]}\n self.check_oneq_special_cases(tgt, \"PSX\", exp)", "def encode_syn(prime, base, a_public):\n\n return 'SYN;%i;%i;%i' % (prime, base, a_public)", "def subspace2proposition(primes: dict, subspace: Union[dict, str]) -> str:\n\n if not subspace or subspace == len(primes) * \"-\":\n return \"TRUE\"\n\n if type(subspace) is str:\n subspace = pyboolnet.state_space.subspace2dict(primes, subspace)\n\n return \"&\".join([name if value == 1 else f\"!{name}\" for name, value in sorted(subspace.items())])", "def associativity(ob):\n return 0", "def encode_affine(msg, a, b):\n \n #Code to numbers\n encoded_message = [ RVALUES[(a * VALUES[i] + b) % 26] for i in msg ]\n \n return ''.join(encoded_message)", "def get_preamble():\n state = np.array([1,1,0,1,0], dtype=np.bool)\n taps = np.array([0,0,1,0,1], dtype=np.bool)\n p = np.zeros(80, dtype=np.uint8)\n for i in range(80):\n p[i] = state[-1]\n state = np.concatenate(([np.sum(state&taps)&1], state[0:-1]))\n a = np.zeros(80, common.SYMB_SCRAMBLE_DTYPE)\n ## BPSK modulation\n constellation = PhysicalLayer.make_psk(2,range(2))['points']\n a['symb'] = constellation[p,]\n a['scramble'] = 1\n return a", "def encode(self, peptides):\n raise NotImplementedError", "def _eqz_2PC(self):\n # Create BinarySharedTensors from shares\n x0 = MPCTensor(self.share, src=0, ptype=Ptype.binary)\n x1 = MPCTensor(-self.share, src=1, ptype=Ptype.binary)\n\n # Perform equality testing using binary shares\n x0._tensor = x0._tensor.eq(x1._tensor)\n x0.encoder = self.encoder\n\n # Convert to Arithmetic sharing\n result = x0.to(Ptype.arithmetic, bits=1)\n result.encoder._scale = 1\n\n return result", "def comp_save(self, *args, **kwargs):\n t = self.get_temp()\n self.add_pc(2)\n self.pb[self.pc - 2] = \"EQ\", _m(self.ss_i(0)), _m(self.ss_i(1)), _m(t)\n self.pop(1)\n self.push(t)\n self.push(self.pc - 1)", "def create_model():\n # Get list of all syllables: [\"<s>\", \"AH\", \"</s>\", \"<s>\", \"T\", ...]\n syllabifier = Syllabifier()\n all_syllables = syllabifier.all_syllables()\n\n # Count conditional probabilties of phoneme tuples\n tcf = TrigramCollocationFinder.from_words(all_syllables)\n bcf = BigramCollocationFinder.from_words(all_syllables)\n tri_dict = dict(sorted(tcf.ngram_fd.items(), key=lambda t: (-t[1], t[0])))\n bi_dict = dict(sorted(bcf.ngram_fd.items(), key=lambda t: (-t[1], t[0])))\n\n # Create dictionary to count cond prob all phoneme tuples\n accepted_phonemes = [i[0] for i in cmudict.phones()]\n accepted_phonemes.append('<s>')\n accepted_phonemes.append('</s>')\n phoneme_tups = [p for p in itertools.product(accepted_phonemes, repeat=3)]\n cond_probs_dict = dict([(char, 0) for char in phoneme_tups])\n\n for t in tri_dict:\n p1, p2, p3 = t[0], t[1], t[2]\n tri_count = tri_dict[t]\n bi_count = bi_dict[(p1, p2)]\n if bi_count > 1:\n cond_prob = tri_count * 1.0 / bi_count\n else:\n cond_prob = 0.0\n cond_probs_dict[(p1, p2, p3)] = cond_prob\n\n pickle.dump(cond_probs_dict, open(COND_PROBS_PATH, \"wb\"))\n return", "def encode_instruction(instruction: str) -> int:\n\ta, b, c = convert_instruction(instruction)\n\treturn encode_pair(a, encode_pair(b, c))", "def encode_canonical(pc, vec, ang):\n if len(ang.shape) == 1:\n ang = ang[:, np.newaxis]\n\n theta = np.arctan2(pc[1], pc[0])\n R = get_R(theta) # p_canonical = R * p_world\n vec_cano = R @ vec.T[..., np.newaxis] # (N, 3, 1)\n vec_cano = vec_cano[:, :, 0].T\n ang_cano = ang - theta[:, np.newaxis]\n\n return vec_cano, ang_cano", "def translate_sequence(sequence, genetic_code = {'GUC': 'V', 'ACC': 'T', 'GUA': 'V', 'GUG': 'V', 'ACU': 'T', 'AAC': 'N', 'CCU': 'P', 'UGG': 'W', 'AGC': 'S', 'AUC': 'I', 'CAU': 'H', 'AAU': 'N', 'AGU': 'S', 'GUU': 'V', 'CAC': 'H', 'ACG': 'T', 'CCG': 'P', 'CCA': 'P', 'ACA': 'T', 'CCC': 'P', 'UGU': 'C', 'GGU': 'G', 'UCU': 'S', 'GCG': 'A', 'UGC': 'C', 'CAG': 'Q', 'GAU': 'D', 'UAU': 'Y', 'CGG': 'R', 'UCG': 'S', 'AGG': 'R', 'GGG': 'G', 'UCC': 'S', 'UCA': 'S', 'UAA': '*', 'GGA': 'G', 'UAC': 'Y', 'GAC': 'D', 'UAG': '*', 'AUA': 'I', 'GCA': 'A', 'CUU': 'L', 'GGC': 'G', 'AUG': 'M', 'CUG': 'L', 'GAG': 'E', 'CUC': 'L', 'AGA': 'R', 'CUA': 'L', 'GCC': 'A', 'AAA': 'K', 'AAG': 'K', 'CAA': 'Q', 'UUU': 'F', 'CGU': 'R', 'CGC': 'R', 'CGA': 'R', 'GCU': 'A', 'GAA': 'E', 'AUU': 'I', 'UUG': 'L', 'UUA': 'L', 'UGA': '*', 'UUC': 'F'}, start_pos = 0):\n #find first orf\n #first_orf_seq = find_first_orf(sequence)\n\n # ensure sequence is uppercase\n seq = sequence.upper()\n\n #translate the sequence\n protein = \"\"\n for i in range(0, len(seq) - (len(seq) % 3), 3):\n codon = seq[i:i + 3]\n if genetic_code[codon] == \"*\":\n break\n protein += genetic_code[codon]\n return protein", "def type3coarsen(sep_to_comps, dict_of_vars, G):\n dec_vars = dict_of_vars.keys()\n A3ineq = list()\n b3ineq = list()\n A3eq = list()\n b3eq = list()\n for s in sep_to_comps.keys():\n comps = sep_to_comps[s]\n p_set = list(powerset(comps))\n for p in p_set:\n hold_ineq = list()\n hold_eq = list()\n add_ineq = False\n if not (len(p) == len(comps)):\n add_ineq = True\n combos = list(combinations(p, 2))\n for pair in combos:\n for u in pair[0]:\n for v in pair[1]:\n uv = tuple(sorted([u, v]))\n if (uv, s) in dec_vars:\n hold_ineq.append(dict_of_vars[(uv, s)])\n else:\n combos = list(combinations(p, 2))\n for pair in combos:\n for u in pair[0]:\n for v in pair[1]:\n uv = tuple(sorted([u, v]))\n if (uv, s) in dec_vars:\n hold_eq.append(dict_of_vars[(uv, s)])\n if add_ineq:\n A3ineq.append(hold_ineq)\n b3ineq.append(len(p) - 1)\n else:\n A3eq.append(hold_eq)\n b3eq.append(len(ch.get_neighbour(G, s)) - 1)\n\n A3 = sp.lil_matrix((len(A3ineq) + len(A3eq), len(dict_of_vars.keys())))\n b3 = np.zeros((len(b3ineq) + len(b3eq), ))\n row = 0\n for item in A3eq:\n for vals in item:\n A3[row, vals] = 1\n b3[row] = b3eq[row]\n row += 1\n hold = row\n for item in A3ineq:\n for vals in item:\n A3[row, vals] = 1\n b3[row] = b3ineq[row - hold]\n row += 1\n\n return A3, b3, len(b3eq)", "def pepComp(align,useConsensus=True):\n if useConsensus:\n ref = consensus(align)\n else:\n ref = identifyMindist(align)\n out = []\n for seq in align:\n out.append(''.join([aa.upper() if aa.upper()==refaa.upper() else aa.lower() for aa, refaa in zip(seq, ref)]))\n return out", "def solution(s, p, q):\n a_prefix = [0 for _ in range(len(s) + 1)]\n c_prefix = a_prefix.copy()\n g_prefix = a_prefix.copy()\n for i, nucleotide in enumerate(s):\n a_prefix[i + 1] = a_prefix[i] + (1 if nucleotide == 'A' else 0)\n c_prefix[i + 1] = c_prefix[i] + (1 if nucleotide == 'C' else 0)\n g_prefix[i + 1] = g_prefix[i] + (1 if nucleotide == 'G' else 0)\n result = []\n for left, right in zip(p, q):\n if a_prefix[right + 1] - a_prefix[left] > 0:\n result.append(1)\n elif c_prefix[right + 1] - c_prefix[left] > 0:\n result.append(2)\n elif g_prefix[right + 1] - g_prefix[left] > 0:\n result.append(3)\n else:\n result.append(4)\n return result", "def A_trans(p, trans, Ci, Tleaf=None):\n\n # get CO2 diffusive conduct.\n gc, __, __ = leaf_energy_balance(p, trans, Tleaf=Tleaf)\n A_P = conv.U * gc * (p.CO2 - Ci) / (p.Patm * conv.MILI)\n\n try:\n A_P[np.isclose(np.squeeze(gc), cst.zero, rtol=cst.zero,\n atol=cst.zero)] = cst.zero\n\n except TypeError:\n pass\n\n return A_P", "def test_composition(self):\n\n i = Code()\n j = Code()\n k = Code()\n l = Code()\n\n c = j + i ** i // 5 / l < j - k\n self.assertEqual(str(c), 'j + i ** i // 5 / l < j - k')", "def prove_CP() -> Proof:\n # Optional Task 6.7d", "def test_encode_pair():\n\tassert encode_pair(0, 0) == 0\n\tassert encode_pair(1, 0) == 1\n\tassert encode_pair(0, 1) == 2\n\tassert encode_pair(4, 6) == 207", "def prop(q1,abcd,mode=[0,0],p1=1):\n\n A=abcd[0][0]\n B=abcd[0][1]\n C=abcd[1][0]\n D=abcd[1][1]\n \n n=mode[0]\n m=mode[1]\n \n q = (A*q1 + B)/(C*q1 + D)\n p = p1*np.exp(1j*np.angle(1/(A+B/q1)**(1+n+m)))\n \n return q,p", "def _encode_supplement(self):", "def _encode_supplement(self):", "def pull_out_quantifications_from_left_across_binary_operator(formula:\r\n Formula) -> \\\r\n Tuple[Formula, Proof]:\r\n assert has_uniquely_named_variables(formula)\r\n assert is_binary(formula.root)\r\n # Task 11.7.1\r\n\r\n\r\n prover = Prover(Prover.AXIOMS.union(ADDITIONAL_QUANTIFICATION_AXIOMS))\r\n\r\n # Basic Case - No quantifier to change n = 0 and no n = 1\r\n if not is_quantifier(formula.first.root):\r\n ccl = equivalence_of(formula, formula)\r\n prover.add_tautology(ccl)\r\n return formula, prover.qed()\r\n\r\n\r\n # Without the predicate\r\n form = Formula(formula.root, formula.first.predicate, formula.second)\r\n pred, proof = pull_out_quantifications_from_left_across_binary_operator(form)\r\n\r\n my_quantifier = formula.first.root\r\n\r\n # Define (or change) the quantifier and define the axioms depending on the binary operator\r\n if formula.root == \"->\":\r\n if my_quantifier == \"A\":\r\n my_quantifier = \"E\"\r\n axiom_scd = 10\r\n else: # \"E\"\r\n my_quantifier = \"A\"\r\n axiom_scd = 11\r\n\r\n elif formula.root == \"&\":\r\n axiom_scd = 2 if my_quantifier == \"A\" else 3\r\n\r\n else: # \"|\" or\r\n axiom_scd = 6 if my_quantifier == \"A\" else 7\r\n\r\n\r\n\r\n # proof for changing quantifier\r\n # because add_proof() is my friend\r\n step1 = prover.add_proof(proof.conclusion, proof)\r\n\r\n form2 = Formula(\"->\", proof.conclusion, equivalence_of(Formula(my_quantifier, formula.first.variable, form),\r\n Formula(my_quantifier, formula.first.variable, pred)))\r\n my_map2 = {'R': str(form.substitute({formula.first.variable: Term(\"_\")})),\r\n 'Q': str(pred.substitute({formula.first.variable: Term(\"_\")})), \"x\": formula.first.variable, \"y\": formula.first.variable}\r\n\r\n step2 = prover.add_instantiated_assumption(form2,\r\n ADDITIONAL_QUANTIFICATION_AXIOMS[14 if my_quantifier==\"A\" else 15], my_map2)\r\n\r\n step3 = prover.add_mp(equivalence_of(Formula(my_quantifier, formula.first.variable, form),\r\n Formula(my_quantifier, formula.first.variable, pred)), step1, step2)\r\n\r\n\r\n my_map4 = {'R': str(formula.first.predicate.substitute({formula.first.variable: Term(\"_\")})), \"x\": formula.first.variable, \"Q\" : str(formula.second)}\r\n form4 = equivalence_of(formula, Formula(my_quantifier, formula.first.variable, form))\r\n step4 = prover.add_instantiated_assumption(form4,\r\n ADDITIONAL_QUANTIFICATION_AXIOMS[axiom_scd], my_map4)\r\n\r\n prover.add_tautological_implication(equivalence_of(formula, Formula(my_quantifier, formula.first.variable, pred)), [step3, step4])\r\n\r\n return Formula(my_quantifier, formula.first.variable, pred), prover.qed()", "def three2one(prot):\n\n code = {\"GLY\": \"G\", \"ALA\": \"A\", \"LEU\": \"L\", \"ILE\": \"I\",\n \"ARG\": \"R\", \"LYS\": \"K\", \"MET\": \"M\", \"CYS\": \"C\",\n \"TYR\": \"Y\", \"THR\": \"T\", \"PRO\": \"P\", \"SER\": \"S\",\n \"TRP\": \"W\", \"ASP\": \"D\", \"GLU\": \"E\", \"ASN\": \"N\",\n \"GLN\": \"Q\", \"PHE\": \"F\", \"HIS\": \"H\", \"VAL\": \"V\",\n \"M3L\": \"K\", \"MSE\": \"M\", \"CAS\": \"C\"}\n\n newprot = \"\"\n for a in prot:\n newprot += code.get(a, \"?\")\n\n return newprot" ]
[ "0.61940044", "0.5337898", "0.52257913", "0.5214563", "0.5168398", "0.516138", "0.5157602", "0.5139996", "0.5095796", "0.5051536", "0.49918026", "0.4982666", "0.49343562", "0.48973984", "0.48611212", "0.48607367", "0.48571798", "0.4817117", "0.48151308", "0.48129684", "0.47942108", "0.47899354", "0.4787591", "0.47845274", "0.47794843", "0.47781914", "0.47768307", "0.47768307", "0.4773969", "0.47696862" ]
0.7646737
0
Encode premises as propositions, possibly adding implicatures
def encode_premises(self, syllogism, ex_implicatures=True, grice_implicatures=False): to = sylutil.term_order(syllogism[2]) premises = [] pr = [] for i in [0, 1]: pr.append(syllogism[i] + to[i]) pr = sylutil.add_implicatures(pr, existential=ex_implicatures, gricean=grice_implicatures) for p in pr: premises.append(self.encode_proposition(p, True)) return premises
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def encode(self, peptides):\n raise NotImplementedError", "def to_prenex_normal_form(formula: Formula) -> Tuple[Formula, Proof]:\r\n # Task 11.10\r\n\r\n prover = Prover(Prover.AXIOMS.union(ADDITIONAL_QUANTIFICATION_AXIOMS))\r\n\r\n\r\n form, form_proof = uniquely_rename_quantified_variables(formula)\r\n prenex, prenex_proof = to_prenex_normal_form_from_uniquely_named_variables(form)\r\n\r\n step1 = prover.add_proof(form_proof.conclusion, form_proof)\r\n step2 = prover.add_proof(prenex_proof.conclusion, prenex_proof)\r\n\r\n ccl = equivalence_of(formula, prenex)\r\n prover.add_tautological_implication(ccl, [step1, step2])\r\n\r\n return prenex, prover.qed()", "def encode_proposition(self, p, hat=False):\r\n\r\n i = self.get_fresh_id()\r\n\r\n if p[0] == \"A\":\r\n # A(x) -> B(x)\r\n return self.Prop(self.PT.implies,\r\n self.get_atomic_proposition(p[1].upper(), i, False, hat),\r\n self.get_atomic_proposition(p[2].upper(), i, False, hat))\r\n elif p[0] == \"E\":\r\n # not (A(x) and B(x))\r\n return self.Prop(self.PT.negation,\r\n self.Prop(self.PT.conjunction,\r\n self.get_atomic_proposition(p[1].upper(), i, False, hat),\r\n self.get_atomic_proposition(p[2].upper(), i, False, hat)),\r\n None)\r\n elif p[0] == \"I\":\r\n # A(a) and B(a)\r\n return self.Prop(self.PT.conjunction,\r\n self.get_atomic_proposition(p[1].upper(), i, True, hat),\r\n self.get_atomic_proposition(p[2].upper(), i, True, hat))\r\n else:\r\n # A(a) and not B(a)\r\n return self.Prop(self.PT.conjunction,\r\n self.get_atomic_proposition(p[1].upper(), i, True, hat),\r\n self.Prop(self.PT.negation,\r\n self.get_atomic_proposition(p[2].upper(), i, True, hat),\r\n None))", "def _encode_pre(self):\n\t\tpre_data = self.config.get('pre_data')\n\t\tif pre_data is not None:\n\t\t\tpre_data = self._encode_data(pre_data, self.config.get('pre_data_bits'))\n\t\t\tif self.config.get('pre'):\n\t\t\t\tpre_pulse = self._encode_tuple(self.config['pre'])\n\t\t\t\treturn pre_data + pre_pulse\n\t\t\telse:\n\t\t\t\treturn pre_data", "def encode(self, preprocessed: List[str]) -> List[int]:\n return [\n self.vocab.get(statement, self.unknown_vocab_element)\n for statement in preprocessed\n ]", "def classical_preprocessing(*args, **kwargs):\r\n qnode.construct(args, kwargs)\r\n return qml.math.stack(qnode.qtape.get_parameters())", "def preprocess(data):\n raise NotImplementedError", "def preprocess(sent):\n return sent", "def preprocess(self, latent_codes, **kwargs):\n raise NotImplementedError(f'Should be implemented in derived class!')", "def encode_point(P):\n return P if type(P)==type('') else str([list(c) for c in P]).replace(\" \", \"\")", "def preprocess(self):", "def __init__(self, prepositions,preposition_data, bigram_prob, pos_trigrams_prob):\n self._bigram_prob = bigram_prob\n self._trigram_prob = pos_trigrams_prob\n self._prepositions = tuple(prepositions)\n self._preposition_data = preposition_data\n self._outcome_and_sents = []\n for key in self._preposition_data.keys():\n sentences = self._preposition_data[key]\n for sents in sentences:\n temp = []\n temp.append(self._prepositions.index(key))\n temp.append(sents)\n self._outcome_and_sents.append(temp)", "def encoder(ne, nj):\n #contrainte sup les equipes ne peuvent pas s'affronter elles-meme\n contrainte = ''\n for e in range(ne):\n for j in range(nj):\n contrainte += str(-codage(ne,nj,j,e,e))+' 0\\n'\n return contrainte+encoderC1(ne, nj) +'\\n'+ encoderC2(ne, nj)+'\\n'+ \\\n contrainteExtDimanche(ne,nj,0.5)+'\\n'+contrainteDomDimanche(ne,nj,0.4)+'\\n'+ \\\n contrainteExtConsecutif(ne,nj)+'\\n'+contrainteDomConsecutif(ne,nj)", "def _process_prosody(sonority):\n assert 9 not in sonority[1:-1]\n assert sonority[0] == sonority[-1] == 9\n\n # create the output values\n psequence = []\n first = True # stores whether first syllable is currently being processed\n\n for i in range(1, len(sonority) - 1):\n # get a segment with context\n a, b, c = sonority[i - 1], sonority[i], sonority[i + 1]\n\n if b == 7: # a vowel\n if first:\n psequence.append('X')\n first = False\n elif c == 9: # last\n psequence.append('Z')\n else:\n psequence.append('Y')\n elif b == 8: # a tone\n psequence.append('T')\n elif a >= b >= c or c == 8: # descending\n if c == 9: # word final position\n psequence.append('Z' if b == 7 else 'N') # vowel or consonant\n else:\n if first:\n first = False\n psequence.append('A')\n else:\n psequence.append('L')\n elif b < c or a > b <= c or a < b <= c: # ascending\n # check for syllable first\n if a == 9:\n psequence.append('A')\n elif a >= b:\n if c == 9:\n psequence.append('N')\n else:\n if psequence[-1] != 'A':\n psequence = psequence[:-1] + [psequence[-1].replace('L', 'M')] + ['B']\n else:\n psequence.append('C')\n else:\n psequence.append('C')\n elif a < b > c: # consonant peak\n if first:\n psequence.append('X')\n first = False\n else:\n psequence.append('Y')\n else:\n raise ValueError(\n \"Conversion to prosodic string failed due to a condition which was not \"\n \"defined in the convertion, for details compare the numerical string \"\n \"{0} with the profile string {1}\".format(sonority, psequence))\n return psequence", "def productions(self):\n P = {}\n for line in self.grammar.split(sep=';'):\n left, right = [s.strip() for s in line.split(sep='=', maxsplit=1)]\n right = [s.strip() for s in right.split(sep='|')]\n P[left] = right\n for N in P:\n if '[' + N + ']' in self.grammar:\n P[N].append('eps')\n return P", "def write_preverbs(recs,fileout):\n fout = codecs.open(fileout,'w')\n n = 0\n nadj=0\n for rec in recs:\n L = rec.L # headword record number\n hw = rec.hw # the headword\n pfx = rec.pfx # the preverb prefixes\n pfxhw = rec.pfxhw\n linenum = rec.linenum\n out = \"%s:%s:%s:%s:%s\" %(L,hw,pfx,pfxhw,linenum)\n fout.write(out + '\\n')\n n = n + 1\n dumb_pfxhw = pfx + hw\n if dumb_pfxhw != pfxhw:\n nadj = nadj+1\n outadj = \"ADJUST %03d: %s:%s:%s:%s (dumb=%s)\" %(nadj,L,hw,pfx,pfxhw,dumb_pfxhw)\n try:\n #print outadj.encode('utf-8')\n pass\n except :\n print \"ERROR PRINTING for line=\",n,rec.line\n fout.close()\n print n,\"records written to\",fileout\n print nadj,\"prefixed verbs required sandhi adjustments\"", "def stage_two_preprocessing(data: pd.Series) -> pd.Series:\n # designed to be run after remove_contractions\n data_ = data.dropna()\n data_ = remove_punctuation(data_)\n data_ = numbers_to_words(data_)\n data_ = remove_stopwords(data_)\n return data_", "def edit_probs(result):\n for i in range(TOP_E):\n p = result.data[i][1]\n p = round(p, 4)\n # p_str = str(p)[1:]\n result.data[i][1] = p\n\n return result", "def to_poincare(s,axis=0):\n d = list(np.shape(s))\n d[axis] = 4\n p = np.empty(shape=d)\n pv = p.swapaxes(0,axis)\n pv[0] = Stokes.intensity(s,axis)\n pv[1] = Stokes.dop(s,axis)\n pv[2] = Stokes.aop(s,axis)\n pv[3] = Stokes.ella(s,axis)\n return p", "def to_prenex_normal_form_from_uniquely_named_variables(formula: Formula) -> \\\r\n Tuple[Formula, Proof]:\r\n assert has_uniquely_named_variables(formula)\r\n # Task 11.9\r\n\r\n prover = Prover(Prover.AXIOMS.union(ADDITIONAL_QUANTIFICATION_AXIOMS))\r\n\r\n # First case\r\n if is_relation(formula.root) or is_equality(formula.root):\r\n prover.add_tautology(equivalence_of(formula, formula))\r\n return formula, prover.qed()\r\n\r\n elif is_unary(formula.root):\r\n form, proof = to_prenex_normal_form_from_uniquely_named_variables(formula.first)\r\n step1 = prover.add_proof(proof.conclusion, proof)\r\n step2 = prover.add_tautological_implication(equivalence_of(formula, Formula(\"~\", form)), [step1])\r\n\r\n neg_form, neg_proof = pull_out_quantifications_across_negation(Formula(\"~\", form))\r\n step3 = prover.add_proof(neg_proof.conclusion, neg_proof)\r\n\r\n prover.add_tautological_implication(equivalence_of(formula, neg_form), [step2, step3])\r\n\r\n return neg_form, prover.qed()\r\n\r\n elif is_binary(formula.root):\r\n left_f, left_p = to_prenex_normal_form_from_uniquely_named_variables(formula.first)\r\n step1 = prover.add_proof(left_p.conclusion, left_p)\r\n\r\n right_f, right_p = to_prenex_normal_form_from_uniquely_named_variables(formula.second)\r\n step2 = prover.add_proof(right_p.conclusion, right_p)\r\n\r\n form, proof = pull_out_quantifications_across_binary_operator(Formula(formula.root, left_f, right_f))\r\n step3 =prover.add_proof(proof.conclusion, proof)\r\n\r\n step4 = prover.add_tautological_implication(equivalence_of(Formula(formula.root, formula.first, formula.second), Formula(formula.root, left_f, right_f)), [step1, step2])\r\n\r\n prover.add_tautological_implication(equivalence_of(Formula(formula.root, formula.first, formula.second), form), [step4, step3])\r\n\r\n return form, prover.qed()\r\n\r\n\r\n else: # is_quantifier(formula.root)\r\n form, proof = to_prenex_normal_form_from_uniquely_named_variables(formula.predicate)\r\n\r\n step1 = prover.add_proof(proof.conclusion, proof)\r\n\r\n map = {\"x\":formula.variable, \"y\":formula.variable, \"R\":form.substitute({formula.variable: Term(\"_\")}), \"Q\":formula.predicate.substitute({formula.variable:Term(\"_\")})}\r\n step2 = prover.add_instantiated_assumption(ADDITIONAL_QUANTIFICATION_AXIOMS[14 if formula.root == \"A\" else 15].instantiate(map), ADDITIONAL_QUANTIFICATION_AXIOMS[14 if formula.root == \"A\" else 15], map)\r\n\r\n prover.add_tautological_implication(equivalence_of(formula, Formula(formula.root, formula.variable, form)), [step2, step1])\r\n\r\n return Formula(formula.root, formula.variable, form), prover.qed()", "def _preorder_encode(self):\n features = np.expand_dims(self.get_features(), axis=0)\n\n features = np.pad(features, (1, 0),\n 'constant', constant_values=(0, 0))\n return features.transpose(1, 0), np.array([[1], [0], [0]])", "def preprocess(self):\n pass", "def preprocess(self):\n pass", "def preprocess(self):\n pass", "def find_needed_premises(self):\n premises = []\n self.rule.left.visit_find_premises(premises)\n self.needed_premises = premises", "def _preproccAnki(anki):\n anki = mathSyntax.sub(r\"\\g<1>\\g<2>\\g<1>\", anki)\n anki_split = re.split(r\"(\\$\\$?)\",anki)\n if len(anki_split) > 1:\n anki_split = _preproccSplits(anki_split)\n anki = \"\".join(anki_split)\n else:\n anki = _escapeMarkdownChars(anki)\n anki = _replaceBrWithNl(anki)\n return anki", "def make_productions_preterminals(self):\n # first we add a bunch of preterminals.\n preterminal_productions = set()\n for i in self.terminals:\n rhs = (i,)\n lhs = random.choice(self.nonterminals)\n preterminal_productions.add((lhs, rhs))\n while len(preterminal_productions) < self.number_preterminal_productions:\n lhs = random.choice(self.nonterminals)\n rhs = (random.choice(self.terminals),)\n preterminal_productions.add((lhs, rhs))\n return preterminal_productions", "def preprocess_smile(self, smile):\n pass", "def prepross(I):\r\n I = I[35:195]\r\n I = I[::2, ::2, 0]\r\n I[I == 144] = 0\r\n I[I == 109] = 0\r\n I[I != 0] = 1\r\n return I.astype(np.float).ravel()", "def preprocess(series):\n def delimit(sent):\n \"\"\"Inserts special symbols for start and end into sentence\"\"\"\n arr = sent.split()\n if arr:\n arr.insert(0, '<START>')\n arr.append('<END>')\n else:\n arr = ['<EMPTY>']\n return arr\n return series.apply(delimit)" ]
[ "0.5838688", "0.5672843", "0.56586784", "0.5371028", "0.5223843", "0.5174113", "0.51273966", "0.50668716", "0.49949104", "0.49896973", "0.49739707", "0.49531832", "0.48929024", "0.4891601", "0.48232752", "0.4822193", "0.48097894", "0.48033124", "0.47871912", "0.47796017", "0.4764412", "0.47635716", "0.47635716", "0.47635716", "0.475806", "0.47576797", "0.4751002", "0.47442445", "0.4730418", "0.47231773" ]
0.7270002
0
same_nameness = True "notational variant", see p. 197 >>> m = PSYCOP() >>> a0 = m.Prop(m.PT.atomic, m.Atom("A", 0, False, False), None) >>> a1 = m.Prop(m.PT.atomic, m.Atom("A", 1, False, False), None) >>> b = m.Prop(m.PT.atomic, m.Atom("B", 2, False, False), None) >>> p1 = m.Prop(m.PT.implies, a0, b) >>> p2 = m.Prop(m.PT.implies, a1, b) >>> m.isomorphic(p1,p2) True >>> m.isomorphic(m.Prop(m.PT.negation, p1, None),m.Prop(m.PT.negation, p2, None)) True >>> m.isomorphic(p1,m.Prop(m.PT.negation, p2, None)) False >>> p3 = m.Prop(m.PT.conjunction, a1, b) >>> m.isomorphic(p1,p3) False
def isomorphic(self, p1, p2, same_nameness=False): if p1 is None and p2 is None: return True if p1 is None or p2 is None: return False if type(p1) is self.Atom and type(p2) is self.Atom: if p1.predicate == p2.predicate: if same_nameness: if p1.is_name == p2.is_name: return True return False return True return False if type(p1) is self.Atom or type(p2) is self.Atom: return False if p1.type == p2.type: return self.isomorphic(p1.v1, p2.v1) and self.isomorphic(p1.v2, p2.v2) return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_isomorphic_general(self):\n # check that hill formula fails are caught\n ethanol = create_ethanol()\n acetaldehyde = create_acetaldehyde()\n assert ethanol.is_isomorphic_with(acetaldehyde) is False\n assert acetaldehyde.is_isomorphic_with(ethanol) is False\n # check that different orderings work with full matching\n ethanol_reverse = create_reversed_ethanol()\n assert ethanol.is_isomorphic_with(ethanol_reverse) is True\n # check a reference mapping between ethanol and ethanol_reverse matches that calculated\n ref_mapping = {0: 8, 1: 7, 2: 6, 3: 3, 4: 4, 5: 5, 6: 1, 7: 2, 8: 0}\n assert (\n Molecule.are_isomorphic(ethanol, ethanol_reverse, return_atom_map=True)[1]\n == ref_mapping\n )\n # check matching with nx.Graph atomic numbers and connectivity only\n assert (\n Molecule.are_isomorphic(\n ethanol,\n ethanol_reverse.to_networkx(),\n aromatic_matching=False,\n formal_charge_matching=False,\n bond_order_matching=False,\n atom_stereochemistry_matching=False,\n bond_stereochemistry_matching=False,\n )[0]\n is True\n )\n # check matching with nx.Graph with full matching\n assert ethanol.is_isomorphic_with(ethanol_reverse.to_networkx()) is True\n\n from openff.toolkit.topology.topology import Topology\n\n topology = Topology.from_molecules(ethanol)\n assert (\n Molecule.are_isomorphic(\n ethanol,\n [*topology.molecules][0],\n aromatic_matching=False,\n formal_charge_matching=False,\n bond_order_matching=False,\n atom_stereochemistry_matching=False,\n bond_stereochemistry_matching=False,\n )[0]\n is True\n )\n # test hill formula passes but isomorphic fails\n mol1 = Molecule.from_smiles(\"Fc1ccc(F)cc1\")\n mol2 = Molecule.from_smiles(\"Fc1ccccc1F\")\n assert mol1.is_isomorphic_with(mol2) is False\n assert mol2.is_isomorphic_with(mol1) is False", "def test_is_isomorphic(self):\n mol1 = converter.s_bonds_mol_from_xyz(self.xyz1['dict'])\n mol2 = converter.s_bonds_mol_from_xyz(self.xyz1['dict_diff_order'])\n self.assertTrue(mol1.is_isomorphic(mol2, save_order=True, strict=False))\n\n mol1 = converter.s_bonds_mol_from_xyz(self.xyz11['dict'])\n mol2 = converter.s_bonds_mol_from_xyz(self.xyz11['dict_diff_order'])\n self.assertTrue(mol1.is_isomorphic(mol2, save_order=True, strict=False))\n\n mol1 = converter.s_bonds_mol_from_xyz(self.xyz10['dict'])\n mol2 = converter.s_bonds_mol_from_xyz(self.xyz10['dict_diff_order1'])\n mol3 = converter.s_bonds_mol_from_xyz(self.xyz10['dict_diff_order2'])\n self.assertTrue(mol1.is_isomorphic(mol2, save_order=True, strict=False))\n self.assertTrue(mol1.is_isomorphic(mol3, save_order=True, strict=False))", "def is_isomorphic(A,B):\n return A.cardinality == B.cardinality and is_subalgebra(A,B)", "def test_isomorphic_general(self):\n # check that hill formula fails are caught\n ethanol = create_ethanol()\n acetaldehyde = create_acetaldehyde()\n assert ethanol.is_isomorphic_with(acetaldehyde) is False\n assert acetaldehyde.is_isomorphic_with(ethanol) is False\n # check that different orderings work with full matching\n ethanol_reverse = create_reversed_ethanol()\n assert ethanol.is_isomorphic_with(ethanol_reverse) is True\n # check a reference mapping between ethanol and ethanol_reverse matches that calculated\n ref_mapping = {0: 8, 1: 7, 2: 6, 3: 3, 4: 4, 5: 5, 6: 1, 7: 2, 8: 0}\n assert (\n Molecule.are_isomorphic(ethanol, ethanol_reverse, return_atom_map=True)[1]\n == ref_mapping\n )\n # check matching with nx.Graph atomic numbers and connectivity only\n assert (\n Molecule.are_isomorphic(\n ethanol,\n ethanol_reverse.to_networkx(),\n aromatic_matching=False,\n formal_charge_matching=False,\n bond_order_matching=False,\n atom_stereochemistry_matching=False,\n bond_stereochemistry_matching=False,\n )[0]\n is True\n )\n # check matching with nx.Graph with full matching\n assert ethanol.is_isomorphic_with(ethanol_reverse.to_networkx()) is True\n # check matching with a TopologyMolecule class\n from openforcefield.topology.topology import Topology, TopologyMolecule\n\n topology = Topology.from_molecules(ethanol)\n topmol = TopologyMolecule(ethanol, topology)\n assert (\n Molecule.are_isomorphic(\n ethanol,\n topmol,\n aromatic_matching=False,\n formal_charge_matching=False,\n bond_order_matching=False,\n atom_stereochemistry_matching=False,\n bond_stereochemistry_matching=False,\n )[0]\n is True\n )\n # test hill formula passes but isomorphic fails\n mol1 = Molecule.from_smiles(\"Fc1ccc(F)cc1\")\n mol2 = Molecule.from_smiles(\"Fc1ccccc1F\")\n assert mol1.is_isomorphic_with(mol2) is False\n assert mol2.is_isomorphic_with(mol1) is False", "def is_isomorphic(self, other, return_map=False):\n if return_map:\n if not(self.degree() == other.degree() and\n self.length() == other.length()):\n return False, None\n sn, sn_map = self.relabel(return_map=True)\n on, on_map = other.relabel(return_map=True)\n if sn != on:\n return False, None\n return True, sn_map * ~on_map\n\n return (self.degree() == other.degree() and\n self.length() == other.length() and\n self.relabel() == other.relabel())", "def test_check_isomorphism(self):\n mol1 = Molecule(smiles='[O-][N+]#N')\n mol2 = Molecule(smiles='[N-]=[N+]=O')\n self.assertTrue(converter.check_isomorphism(mol1, mol2))", "def really_covalent_isomorphic(mol1, mol2):\n return nx.is_isomorphic(\n mol1.covalent_graph,\n mol2.covalent_graph,\n node_match = iso.categorical_node_match('specie', None)\n )", "def _io_similar(lhs, rhs):\n ldecl = lhs.decl()\n rdecl = rhs.decl()\n if not ldecl[::2] == rdecl[::2]: # names are the same\n return False\n size = len(ldecl)\n return all(ldecl[i] is rdecl[i] for i in range(1, size, 2))", "def test_isomorphic_perumtations(self, inputs):\n # get benzene with all aromatic atoms/bonds labeled\n benzene = Molecule.from_smiles(\"c1ccccc1\")\n # get benzene with no aromatic labels\n benzene_no_aromatic = create_benzene_no_aromatic()\n # now test all of the variations\n assert (\n Molecule.are_isomorphic(\n benzene,\n benzene_no_aromatic,\n aromatic_matching=inputs[\"aromatic_matching\"],\n formal_charge_matching=inputs[\"formal_charge_matching\"],\n bond_order_matching=inputs[\"bond_order_matching\"],\n atom_stereochemistry_matching=inputs[\"atom_stereochemistry_matching\"],\n bond_stereochemistry_matching=inputs[\"bond_stereochemistry_matching\"],\n )[0]\n is inputs[\"result\"]\n )\n\n assert (\n benzene.is_isomorphic_with(\n benzene_no_aromatic,\n aromatic_matching=inputs[\"aromatic_matching\"],\n formal_charge_matching=inputs[\"formal_charge_matching\"],\n bond_order_matching=inputs[\"bond_order_matching\"],\n atom_stereochemistry_matching=inputs[\"atom_stereochemistry_matching\"],\n bond_stereochemistry_matching=inputs[\"bond_stereochemistry_matching\"],\n )\n is inputs[\"result\"]\n )", "def test_isomorphic_perumtations(self, inputs):\n # get benzene with all aromatic atoms/bonds labeled\n benzene = Molecule.from_smiles(\"c1ccccc1\")\n # get benzene with no aromatic labels\n benzene_no_aromatic = create_benzene_no_aromatic()\n # now test all of the variations\n assert (\n Molecule.are_isomorphic(\n benzene,\n benzene_no_aromatic,\n aromatic_matching=inputs[\"aromatic_matching\"],\n formal_charge_matching=inputs[\"formal_charge_matching\"],\n bond_order_matching=inputs[\"bond_order_matching\"],\n atom_stereochemistry_matching=inputs[\"atom_stereochemistry_matching\"],\n bond_stereochemistry_matching=inputs[\"bond_stereochemistry_matching\"],\n )[0]\n is inputs[\"result\"]\n )", "def test_isomorphic_stripped_stereochemistry(self):\n mol1 = Molecule.from_smiles(\"CCC[N@](C)CC\")\n mol2 = Molecule.from_smiles(\"CCC[N@@](C)CC\")\n\n # Ensure default value is respected and order does not matter\n assert Molecule.are_isomorphic(mol1, mol2, strip_pyrimidal_n_atom_stereo=True)\n assert Molecule.are_isomorphic(mol1, mol2)\n assert Molecule.are_isomorphic(mol2, mol1)\n\n assert mol1 == mol2\n assert Molecule.from_smiles(\"CCC[N@](C)CC\") == Molecule.from_smiles(\n \"CCC[N@@](C)CC\"\n )", "def is_graph_isomorphic(self):\n out=True\n for node in self.node_names:\n self.move_to_node(node)\n if not self.check_closed_path:\n out=False\n return out", "def test_isomorphic_striped_stereochemistry(self):\n mol1 = Molecule.from_smiles(\"CCC[N@](C)CC\")\n mol2 = Molecule.from_smiles(\"CCC[N@@](C)CC\")\n\n # Ensure default value is respected and order does not matter\n assert Molecule.are_isomorphic(mol1, mol2, strip_pyrimidal_n_atom_stereo=True)\n assert Molecule.are_isomorphic(mol1, mol2)\n assert Molecule.are_isomorphic(mol2, mol1)\n\n assert mol1 == mol2\n assert Molecule.from_smiles(\"CCC[N@](C)CC\") == Molecule.from_smiles(\n \"CCC[N@@](C)CC\"\n )\n\n mol1 = Molecule.from_smiles(\"CCC[N@](C)CC\")\n mol2 = Molecule.from_smiles(\"CCC[N@@](C)CC\")\n\n assert not Molecule.are_isomorphic(\n mol1,\n mol2,\n strip_pyrimidal_n_atom_stereo=False,\n atom_stereochemistry_matching=True,\n bond_stereochemistry_matching=True,\n )[0]", "def prop_to_eq(prop, all_props, ops):\n if isinstance(prop, DirAlgProp) and Comm(prop.op) in all_props:\n prop.side = Side.BOTH\n prop.derived = True\n return prop.eq(ops)\n else:\n return prop.eq(ops)", "def is_isomorphic(self, s1, s2):\n # encode strings\n enc1, enc2 = [], []\n count1, count2 = 0, 0\n dict1, dict2 = dict(), dict()\n for i in range(len(s1)):\n char1, char2 = s1[i], s2[i]\n if char1 in dict1:\n enc1.append(dict1[char1])\n else:\n count1 += 1\n dict1[char1] = count1\n enc1.append(dict1[char1])\n if char2 in dict2:\n enc2.append(dict2[char2])\n else:\n count2 += 1\n dict2[char2] = count2\n enc2.append(dict2[char2])\n return enc1 == enc2 # compare encodings", "def _do_eq_sympify(self, other):\n for superclass in type(other).__mro__:\n conv = _external_converter.get(superclass)\n if conv is not None:\n return self == conv(other)\n if hasattr(other, '_sympy_'):\n return self == other._sympy_()\n return NotImplemented", "def nonterm_equal(g, n1, n2):\n p1 = g.productions(n1)\n p2 = g.productions(n2)\n\n if len(p1) != len(p2):\n return False\n\n rules1 = {p.rhs() for p in p1}\n rules2 = {p.rhs() for p in p2}\n\n temp = rules1.copy()\n\n rules1.difference_update(rules2)\n rules2.difference_update(temp)\n\n if not rules1:\n return True\n\n def rename(t, from_, to):\n if from_ not in t:\n return tuple(t)\n\n ls = list(t)\n i = ls.index(from_)\n ls[i] = to\n\n return rename(ls, from_, to)\n\n rules1 = {rename(r, n2, n1) for r in rules1}\n rules2 = {rename(r, n2, n1) for r in rules2}\n\n if rules1.difference(rules2):\n return False\n else:\n return True", "def test_disjunct_hs():\n hs1 = LocalSpace(\"1\")\n hs2 = LocalSpace(\"2\")\n alpha, beta = symbols('alpha, beta')\n A = OperatorSymbol('A', hs=hs1)\n B = OperatorSymbol('B', hs=hs2)\n assert Commutator.create(A, B) == ZeroOperator\n assert Commutator.create(alpha, beta) == ZeroOperator\n assert Commutator.create(alpha, B) == ZeroOperator\n assert Commutator.create(A, beta) == ZeroOperator", "def prove_implies_self():\n return DeductiveProof(\n InferenceRule([], Formula.from_infix('(p->p)')),\n [MP, I1, I2],\n [DeductiveProof.Line(Formula.from_infix('(p->((q->p)->p))'), 1, []),\n DeductiveProof.Line(Formula.from_infix('((p->((q->p)->p))->((p->(q->p))->(p->p))'), 2, []),\n DeductiveProof.Line(Formula.from_infix('(p->(q->p))'), 1, []),\n DeductiveProof.Line(Formula.from_infix('((p->(q->p))->(p->p))'), 0, [0, 1]),\n DeductiveProof.Line(Formula.from_infix('(p->p)'), 0, [2, 3])])", "def test_equality(self):\n\n # change .phones\n pw1: PhonologicalWord = PhonologicalWord(\n phones=(\"P\", \"ER0\", \"M\", \"IH1\", \"T\"),\n stress_pattern=[\n Stress.NON_VOWEL,\n Stress.NO_STRESS,\n Stress.NON_VOWEL,\n Stress.PRIMARY,\n Stress.NON_VOWEL,\n ],\n )\n pw2: PhonologicalWord = PhonologicalWord(\n phones=(\"P\", \"ER0\", \"M\", \"IH1\", \"P\"),\n stress_pattern=[\n Stress.NON_VOWEL,\n Stress.NO_STRESS,\n Stress.NON_VOWEL,\n Stress.PRIMARY,\n Stress.NON_VOWEL,\n ],\n )\n self.assertNotEqual(pw1, pw2)\n\n # change .stress_pattern\n pw1: PhonologicalWord = PhonologicalWord(\n phones=(\"P\", \"ER0\", \"M\", \"IH1\", \"T\"),\n stress_pattern=[\n Stress.NON_VOWEL,\n Stress.NO_STRESS,\n Stress.NON_VOWEL,\n Stress.PRIMARY,\n Stress.NON_VOWEL,\n ],\n )\n pw2: PhonologicalWord = PhonologicalWord(\n phones=(\"P\", \"ER0\", \"M\", \"IH1\", \"T\"),\n stress_pattern=[\n Stress.NON_VOWEL,\n Stress.PRIMARY,\n Stress.NO_STRESS,\n Stress.NON_VOWEL,\n Stress.NO_STRESS,\n Stress.NON_VOWEL,\n ],\n )\n self.assertNotEqual(pw1, pw2)", "def rule_exclusivity(self, p1, p2, domain):\r\n\r\n if p1.type == self.PT.implies and p2.type == self.PT.negation:\r\n if p2.v1.type == self.PT.conjunction:\r\n if p1.v1.type == self.PT.atomic and p1.v2.type == self.PT.atomic:\r\n if p2.v1.v1.type == self.PT.atomic and p2.v1.v2.type == self.PT.atomic:\r\n if p1.v1.v1.arg_id == p1.v2.v1.arg_id and p2.v1.v1.v1.arg_id == p2.v1.v2.v1.arg_id:\r\n if not p1.v1.v1.is_name and not p1.v2.v1.is_name and not p2.v1.v1.v1.is_name and not p2.v1.v2.v1.is_name:\r\n if p1.v2.v1.predicate == p2.v1.v1.v1.predicate:\r\n i = self.get_fresh_id()\r\n p = self.Prop(self.PT.negation,\r\n self.Prop(self.PT.conjunction,\r\n self.atom_prop_replace_properties(p1.v1,\r\n i),\r\n self.atom_prop_replace_properties(\r\n p2.v1.v2, i)),\r\n None)\r\n if not self.contains_isomorphic_proposition(domain, p):\r\n return [p]\r\n return []", "def isomorphic(graph1, graph2):\r\n\r\n gd1 = _TripleCanonicalizer(graph1).to_hash()\r\n gd2 = _TripleCanonicalizer(graph2).to_hash()\r\n return gd1 == gd2", "def test_ontology_ic_similarity(ontology):\n\tassert ontology.similarity_ic([\"TO:0000001\"],[\"TO:0000002\"], inherited=False, as_weight=False) == 0\n\tassert ontology.similarity_ic([\"TO:0000001\"],[\"TO:0000003\"], inherited=False, as_weight=False) == 0\n\tassert ontology.similarity_ic([\"TO:0000002\"],[\"TO:0000003\"], inherited=False, as_weight=False) == 0\n\tassert ontology.similarity_ic([\"TO:0000003\"],[\"TO:0000005\"], inherited=False, as_weight=False) == 0.3690702464285426\n\tassert ontology.similarity_ic([\"TO:0000007\"],[\"TO:0000008\"], inherited=False, as_weight=False) == 0.5\n\tassert ontology.similarity_ic([\"TO:0000005\"],[\"TO:0000009\"], inherited=False, as_weight=False) == 0\n\n\tassert ontology.similarity_ic([\"TO:0000001\"],[\"TO:0000002\",\"TO:0000001\"], inherited=False, as_weight=False) == 0\n\tassert ontology.similarity_ic([\"TO:0000003\"],[\"TO:0000001\",\"TO:0000009\"], inherited=False, as_weight=False) == 0\n\tassert ontology.similarity_ic([\"TO:0000002\"],[\"TO:0000003\",\"TO:0000002\"], inherited=False, as_weight=False) == 0.3690702464285426\n\tassert ontology.similarity_ic([\"TO:0000003\"],[\"TO:0000005\",\"TO:0000002\"], inherited=False, as_weight=False) == 0.3690702464285426\n\tassert ontology.similarity_ic([\"TO:0000008\"],[\"TO:0000008\",\"TO:0000007\"], inherited=False, as_weight=False) == 1.3690702464285427\n\tassert ontology.similarity_ic([\"TO:0000005\"],[\"TO:0000009\",\"TO:0000002\"], inherited=False, as_weight=False) == 0.3690702464285426", "def is_consistent(self, other):\n return self.name != other.name or self.type is other.type", "def rule_transitivity(self, p1, p2, domain):\r\n if p1.type == self.PT.implies and p2.type == self.PT.implies:\r\n if p1.v1.type == self.PT.atomic and p1.v2.type == self.PT.atomic and \\\r\n p2.v1.type == self.PT.atomic and p2.v2.type == self.PT.atomic:\r\n if p1.v1.v1.arg_id == p1.v2.v1.arg_id and p2.v1.v1.arg_id == p2.v2.v1.arg_id:\r\n if not p1.v1.v1.is_name and not p1.v2.v1.is_name and not p2.v1.v1.is_name and not p2.v2.v1.is_name:\r\n if p1.v2.v1.predicate == p2.v1.v1.predicate:\r\n i = self.get_fresh_id()\r\n p = self.Prop(self.PT.implies,\r\n self.atom_prop_replace_properties(p1.v1, i),\r\n self.atom_prop_replace_properties(p2.v2, i))\r\n if not self.contains_isomorphic_proposition(domain, p):\r\n return [p]\r\n return []", "def is_equivalence(self) -> bool:", "def prove_and_commutativity() -> Proof:\n all_lines = []\n all_lines.append(Proof.Line(Formula.parse('(p&q)')))\n all_lines.append(Proof.Line(Formula.parse('q'), AE1_RULE, [0]))\n all_lines.append(Proof.Line(Formula.parse('p'), AE2_RULE, [0]))\n all_lines.append(Proof.Line(Formula.parse('(q&p)'), A_RULE, [1, 2]))\n statement = InferenceRule([Formula.parse('(p&q)')], Formula.parse('(q&p)'))\n all_rules = {A_RULE, AE1_RULE, AE2_RULE}\n return Proof(statement, all_rules, all_lines)", "def pod_equals(x, y):\n return type(x) == type(y) and x.__dict__ == y.__dict__", "def is_isomorphic_fast(self, s1, s2):\n # encode strings\n count1, count2 = 0, 0\n dict1, dict2 = dict(), dict()\n for i in range(len(s1)):\n char1, char2 = s1[i], s2[i] # current characters\n if char1 in dict1:\n curr1 = dict1[char1] # current index of character in s1\n else:\n count1 += 1\n dict1[char1] = count1\n curr1 = dict1[char1]\n if char2 in dict2:\n curr2 = dict2[char2] # current index of character in s2\n else:\n count2 += 1\n dict2[char2] = count2\n curr2 = dict2[char2]\n if curr1 != curr2:\n return False\n return True", "def __eq__(A, B):\n if not isinstance(A, type(B)):\n return NotImplemented\n return A.domain == B.domain and A.rep == B.rep" ]
[ "0.62979865", "0.6274842", "0.6224292", "0.6149181", "0.6145336", "0.6130434", "0.5929788", "0.59002215", "0.57942015", "0.57850647", "0.56774276", "0.56770355", "0.5673986", "0.55917203", "0.55060273", "0.5470484", "0.5424346", "0.5401022", "0.5356674", "0.5350009", "0.53413874", "0.5335867", "0.5314499", "0.5290897", "0.5288424", "0.5266159", "0.5264136", "0.5246088", "0.5244986", "0.52382547" ]
0.75391
0
PSYCOP transitivity rule >>> m = PSYCOP() >>> i = m.get_fresh_id() >>> a = m.Prop(m.PT.atomic, m.Atom("A", i, False, False), None) >>> b = m.Prop(m.PT.atomic, m.Atom("B", i, False, False), None) >>> c = m.Prop(m.PT.atomic, m.Atom("C", i, False, False), None) >>> p1 = m.Prop(m.PT.implies, a, b) >>> p2 = m.Prop(m.PT.implies, b, c) >>> m.rule_transitivity(p1, p2, set()) [(A(x_1) > C(x_1))]
def rule_transitivity(self, p1, p2, domain): if p1.type == self.PT.implies and p2.type == self.PT.implies: if p1.v1.type == self.PT.atomic and p1.v2.type == self.PT.atomic and \ p2.v1.type == self.PT.atomic and p2.v2.type == self.PT.atomic: if p1.v1.v1.arg_id == p1.v2.v1.arg_id and p2.v1.v1.arg_id == p2.v2.v1.arg_id: if not p1.v1.v1.is_name and not p1.v2.v1.is_name and not p2.v1.v1.is_name and not p2.v2.v1.is_name: if p1.v2.v1.predicate == p2.v1.v1.predicate: i = self.get_fresh_id() p = self.Prop(self.PT.implies, self.atom_prop_replace_properties(p1.v1, i), self.atom_prop_replace_properties(p2.v2, i)) if not self.contains_isomorphic_proposition(domain, p): return [p] return []
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def rule_exclusivity(self, p1, p2, domain):\r\n\r\n if p1.type == self.PT.implies and p2.type == self.PT.negation:\r\n if p2.v1.type == self.PT.conjunction:\r\n if p1.v1.type == self.PT.atomic and p1.v2.type == self.PT.atomic:\r\n if p2.v1.v1.type == self.PT.atomic and p2.v1.v2.type == self.PT.atomic:\r\n if p1.v1.v1.arg_id == p1.v2.v1.arg_id and p2.v1.v1.v1.arg_id == p2.v1.v2.v1.arg_id:\r\n if not p1.v1.v1.is_name and not p1.v2.v1.is_name and not p2.v1.v1.v1.is_name and not p2.v1.v2.v1.is_name:\r\n if p1.v2.v1.predicate == p2.v1.v1.v1.predicate:\r\n i = self.get_fresh_id()\r\n p = self.Prop(self.PT.negation,\r\n self.Prop(self.PT.conjunction,\r\n self.atom_prop_replace_properties(p1.v1,\r\n i),\r\n self.atom_prop_replace_properties(\r\n p2.v1.v2, i)),\r\n None)\r\n if not self.contains_isomorphic_proposition(domain, p):\r\n return [p]\r\n return []", "def rule_conversion(self, p, domain):\r\n\r\n if p.type == self.PT.negation:\r\n if p.v1.type == self.PT.conjunction:\r\n if p.v1.v1.type == self.PT.atomic and p.v1.v2.type == self.PT.atomic:\r\n i = self.get_fresh_id()\r\n p_new = self.Prop(self.PT.negation,\r\n self.Prop(self.PT.conjunction,\r\n self.atom_prop_replace_properties(p.v1.v2, i),\r\n self.atom_prop_replace_properties(p.v1.v1, i)),\r\n None)\r\n if not self.contains_isomorphic_proposition(domain, p_new):\r\n return [p_new]\r\n return []", "def prove_and_commutativity() -> Proof:\n all_lines = []\n all_lines.append(Proof.Line(Formula.parse('(p&q)')))\n all_lines.append(Proof.Line(Formula.parse('q'), AE1_RULE, [0]))\n all_lines.append(Proof.Line(Formula.parse('p'), AE2_RULE, [0]))\n all_lines.append(Proof.Line(Formula.parse('(q&p)'), A_RULE, [1, 2]))\n statement = InferenceRule([Formula.parse('(p&q)')], Formula.parse('(q&p)'))\n all_rules = {A_RULE, AE1_RULE, AE2_RULE}\n return Proof(statement, all_rules, all_lines)", "def prove_implies_self():\n # i1_with_assumptions = InferenceRule([I1.conclusion.first],I1.conclusion.second)\n # i2_with_assumptions = InferenceRule([I2.conclusion.first,I2.conclusion.second.first],I2.conclusion.second.second)\n\n statement = InferenceRule([], Formula.from_infix('(p->p)')) # create conclusion\n\n rules = [MP, I1, I2] # create rules for the proof\n\n # create lines\n lines = []\n lines.append(DeductiveProof.Line(Formula.from_infix('((p->((p->p)->p))->((p->(p->p))->(p->p)))'), 2, []))\n lines.append(DeductiveProof.Line(Formula.from_infix('(p->(p->p))'), 1, []))\n lines.append(DeductiveProof.Line(Formula.from_infix('(p->((p->p)->p))'), 1, []))\n lines.append(DeductiveProof.Line(Formula.from_infix('((p->(p->p))->(p->p))'), 0, [2, 0]))\n lines.append(DeductiveProof.Line(Formula.from_infix('(p->p)'), 0, [1, 3]))\n dec = DeductiveProof(statement, rules, lines)\n return dec", "def cross_transitivity_sparse(self, node_list1, node_list2):\n cross_degree = self.cross_degree(node_list1, node_list2)\n\n # Get sparse adjacency matrix\n A = self.sp_A[node_list1+node_list2, :][:, node_list1+node_list2]\n # Get subnetwork sizes\n N1, N2 = len(node_list1), len(node_list2)\n # Initialize\n cross_transitivity = 0.0\n # Set counter\n counter_triangles = 0.0\n counter_triples = 0.0\n # Calculate cross transitivity from subnetwork 1 to subnetwork 2\n # Loop over nodes in subnetwork 1\n for i in range(N1):\n node1 = i\n if cross_degree[i] > 1:\n # Loop over unique pairs of nodes in subnetwork 2\n for j in range(N1, N1+N2):\n node2 = j\n for k in range(N1, j):\n node3 = k\n if A[node1, node2] == 1 and A[node1, node3] == 1:\n counter_triples += 1\n if A[node2, node3] == 1:\n counter_triangles += 1\n\n if counter_triples:\n cross_transitivity = counter_triangles / counter_triples\n return cross_transitivity", "def prove_implies_self():\n return DeductiveProof(\n InferenceRule([], Formula.from_infix('(p->p)')),\n [MP, I1, I2],\n [DeductiveProof.Line(Formula.from_infix('(p->((q->p)->p))'), 1, []),\n DeductiveProof.Line(Formula.from_infix('((p->((q->p)->p))->((p->(q->p))->(p->p))'), 2, []),\n DeductiveProof.Line(Formula.from_infix('(p->(q->p))'), 1, []),\n DeductiveProof.Line(Formula.from_infix('((p->(q->p))->(p->p))'), 0, [0, 1]),\n DeductiveProof.Line(Formula.from_infix('(p->p)'), 0, [2, 3])])", "def test_CONTRADICTION():\n\tk, outputs = 2, [0,0,0,0]\n\t# Prime Implicants\n\ttrue_pi0s = set(['22'])\n\ttrue_pi1s = set([])\n\n\ttdt0, tdt1 = make_transition_density_tables(k=k, outputs=outputs)\n\tpi0s, pi1s = find_implicants_qm(tdt0) , find_implicants_qm(tdt1)\n\n\tassert (pi0s == true_pi0s) , ('Prime Implicants for 0 does not match. %s != %s' % (pi0s,true_pi0s))\n\tassert (pi1s == true_pi1s) , ('Prime Implicants for 1 does not match. %s != %s' % (pi1s,true_pi1s))\n\t# Two Symbols\n\ttrue_ts0s = [('22',[],[[0,1]])]\n\ttrue_ts1s = []\n\n\tts0s,ts1s = find_two_symbols_v2(k=k, prime_implicants=pi0s) , find_two_symbols_v2(k=k, prime_implicants=pi1s)\n\n\tassert (ts0s == true_ts0s) , ('Two Symbol for 0 does not match. %s != %s' % (ts0s,true_ts0s))\n\tassert (ts1s == true_ts1s) , ('Two Symbol for 1 does not match. %s != %s' % (ts1s,true_ts1s))", "def rule_backward_conjunctive_syllogism(self, p, g):\r\n\r\n if g.type == self.PT.negation and p.type == self.PT.negation:\r\n # g = NOT(A(x))\r\n if p.v1.type == self.PT.conjunction:\r\n # p = NOT(A(x) AND B(x))\r\n if self.matching(p.v1.v1, g.v1):\r\n return [self.atom_prop_replace_properties(p.v1.v2, new_arg_id=g.v1.v1.arg_id,\r\n new_is_name=g.v1.v1.is_name,\r\n new_hat=g.v1.v1.hat)]\r\n elif self.matching(p.v1.v2, g.v1):\r\n return [self.atom_prop_replace_properties(p.v1.v1, new_arg_id=g.v1.v1.arg_id,\r\n new_is_name=g.v1.v1.is_name,\r\n new_hat=g.v1.v1.hat)]\r\n return []", "def nsi_cross_transitivity(self, node_list1, node_list2):\n return _nsi_cross_transitivity(\n to_cy(self.adjacency + np.eye(self.N, dtype=ADJ), ADJ),\n np.array(node_list1, dtype=NODE),\n np.array(node_list2, dtype=NODE),\n to_cy(self.node_weights, DWEIGHT))", "def ClusteringTransitivity(graph):\n transitivity = nx.transitivity(graph)\n return transitivity", "def test_action_independence_multiple(self):\n DST1, DST2 = ('SET_FIELD', ('IPV4_DST', 0x1)), ('SET_FIELD', ('IPV4_DST', 0x2))\n SRC1, SRC2 = ('SET_FIELD', ('IPV4_SRC', 0x1)), ('SET_FIELD', ('IPV4_SRC', 0x2))\n OUT1, OUT2 = ('OUTPUT', 1), ('OUTPUT', 2)\n n1 = normalise([\n Rule(priority=10,\n match=Match([('IPV4_DST', 0x0, 0xFFFFFFFE)]),\n instructions=inst_from_acts([DST1, SRC2, OUT1, DST2, SRC1, OUT2])),\n Rule(priority=0)\n ], match_redundancy=True)\n \"\"\"\n dst:1, src:2 -> output:1, dst:2, src:1, output:2\n dst:0/31 -> dst:1, src:2, output:1, dst:2, src:1, output:2\n \"\"\"\n n2 = normalise([\n Rule(priority=10,\n match=Match([('IPV4_DST', 1, None),\n ('IPV4_SRC', 2, None)]),\n instructions=inst_from_acts([OUT1, DST2, SRC1, OUT2])),\n Rule(priority=9,\n match=Match([('IPV4_DST', 0x0, 0xFFFFFFFE)]),\n instructions=inst_from_acts([DST1, SRC2, OUT1, DST2, SRC1, OUT2])),\n Rule(priority=0)\n ], match_redundancy=True)\n \"\"\"\n dst:1 -> src:2, output:1, dst:2, src:1, output:2\n dst:0/31 -> dst:1, src:2, output:1, dst:2, src:1, output:2\n \"\"\"\n n3 = normalise([\n Rule(priority=10,\n match=Match([('IPV4_DST', 1, None)]),\n instructions=inst_from_acts([SRC2, OUT1, DST2, SRC1, OUT2])),\n Rule(priority=9,\n match=Match([('IPV4_DST', 0x0, 0xFFFFFFFE)]),\n instructions=inst_from_acts([DST1, SRC2, OUT1, DST2, SRC1, OUT2])),\n Rule(priority=0)\n ], match_redundancy=True)\n\n self.assertTrue(check_equal(n1, n2))\n self.assertTrue(check_equal(n2, n3))\n self.assertTrue(check_equal(n1, n3))", "def test_action_independence_single(self):\n SF1, OUT = ('SET_FIELD', ('IPV4_DST', 0x01010101)), ('OUTPUT', 6)\n DEC_TTL = ('DEC_NW_TTL', None)\n # 0.1.1.0/30 -> ip:1.1.1.1, output:1\n n1 = normalise([\n Rule(priority=10,\n match=Match([('IPV4_DST', 0x01010100, 0xFFFFFFFE)]),\n instructions=inst_from_acts([SF1, OUT])),\n Rule(priority=0)\n ])\n # 1.1.1.1/32 -> output:1\n # 1.1.1.0/31 -> ip:1.1.1.1, output:1\n n2 = normalise([\n Rule(priority=10,\n match=Match([('IPV4_DST', 0x01010101, None)]),\n instructions=inst_from_acts([OUT])),\n Rule(priority=9,\n match=Match([('IPV4_DST', 0x01010100, 0xFFFFFFFE)]),\n instructions=inst_from_acts([SF1, OUT])),\n Rule(priority=0)\n ])\n # 1.1.1.0/32 -> ip:1.1.1.1, output1\n # 1.1.1.0/31 -> output:1\n n3 = normalise([\n Rule(priority=10,\n match=Match([('IPV4_DST', 0x01010100, None)]),\n instructions=inst_from_acts([SF1, OUT])),\n Rule(priority=9,\n match=Match([('IPV4_DST', 0x01010100, 0xFFFFFFFE)]),\n instructions=inst_from_acts([OUT])),\n Rule(priority=0)\n ])\n n4 = normalise([\n Rule(priority=10,\n match=Match([('IPV4_DST', 0x01010101, None)]),\n instructions=inst_from_acts([OUT])),\n Rule(priority=9,\n match=Match([('IPV4_DST', 0x01010100, 0xFFFFFFFE)]),\n instructions=inst_from_acts([DEC_TTL, SF1, OUT])),\n Rule(priority=0)\n ])\n self.assertTrue(check_equal(n1, n2))\n self.assertFalse(check_equal(n1, n4))\n self.assertTrue(check_equal(n2, n3))\n self.assertTrue(check_equal(n1, n3))", "def cross_transitivity(self, node_list1, node_list2):\n return _cross_transitivity(\n to_cy(self.adjacency, ADJ),\n np.array(node_list1, dtype=NODE), np.array(node_list2, dtype=NODE))", "def encode_proposition(self, p, hat=False):\r\n\r\n i = self.get_fresh_id()\r\n\r\n if p[0] == \"A\":\r\n # A(x) -> B(x)\r\n return self.Prop(self.PT.implies,\r\n self.get_atomic_proposition(p[1].upper(), i, False, hat),\r\n self.get_atomic_proposition(p[2].upper(), i, False, hat))\r\n elif p[0] == \"E\":\r\n # not (A(x) and B(x))\r\n return self.Prop(self.PT.negation,\r\n self.Prop(self.PT.conjunction,\r\n self.get_atomic_proposition(p[1].upper(), i, False, hat),\r\n self.get_atomic_proposition(p[2].upper(), i, False, hat)),\r\n None)\r\n elif p[0] == \"I\":\r\n # A(a) and B(a)\r\n return self.Prop(self.PT.conjunction,\r\n self.get_atomic_proposition(p[1].upper(), i, True, hat),\r\n self.get_atomic_proposition(p[2].upper(), i, True, hat))\r\n else:\r\n # A(a) and not B(a)\r\n return self.Prop(self.PT.conjunction,\r\n self.get_atomic_proposition(p[1].upper(), i, True, hat),\r\n self.Prop(self.PT.negation,\r\n self.get_atomic_proposition(p[2].upper(), i, True, hat),\r\n None))", "def thermal_operating_costs_rule(_m, y, s):\r\n\r\n return sum((m.C_MC[g, y] + ((m.EMISSIONS_RATE[g] - m.baseline[y]) * m.permit_price[y])) * m.p[g, y, s, t]\r\n for g in m.G_THERM for t in m.T)", "def association_rule(mtv, X, Y):\n\n prob_X = mtv.query(X)\n prob_Y = mtv.query(Y)\n prob_XY = mtv.query(X|Y)\n\n # X -> Y\n confX_Y = prob_XY / prob_X\n liftX_Y = confX_Y / prob_Y\n\n ruleX_Y = AssociationRule()\n ruleX_Y.X = X\n ruleX_Y.Y = Y\n ruleX_Y.confidence = confX_Y\n ruleX_Y.lift = liftX_Y\n\n # Y -> X\n confY_X = prob_XY / prob_Y\n liftY_X = confY_X / prob_X\n\n ruleY_X = AssociationRule()\n ruleY_X.X = Y\n ruleY_X.Y = X\n ruleY_X.confidence = confY_X\n ruleY_X.lift = liftY_X\n\n return (ruleX_Y, ruleY_X)", "def hydro_operating_costs_rule(_m, y, s):\r\n\r\n return sum(m.C_MC[g, y] * m.p[g, y, s, t] for g in m.G_E_HYDRO for t in m.T)", "def A_trans(p, trans, Ci, Tleaf=None):\n\n # get CO2 diffusive conduct.\n gc, __, __ = leaf_energy_balance(p, trans, Tleaf=Tleaf)\n A_P = conv.U * gc * (p.CO2 - Ci) / (p.Patm * conv.MILI)\n\n try:\n A_P[np.isclose(np.squeeze(gc), cst.zero, rtol=cst.zero,\n atol=cst.zero)] = cst.zero\n\n except TypeError:\n pass\n\n return A_P", "def mtx_minimize(p, trans, all_Cis, photo):\n\n demand, __, __, __ = calc_photosynthesis(p, np.expand_dims(trans, axis=1),\n all_Cis, photo)\n supply = A_trans(p, np.expand_dims(trans, axis=1), all_Cis)\n\n # closest match to ~ 0. (i.e. supply ~ demand)\n idx = bn.nanargmin(abs(supply - demand), axis=1)\n\n # each Ci on the transpiration stream\n Ci = np.asarray([all_Cis[e, idx[e]] for e in range(len(trans))])\n Ci = np.ma.masked_where(idx == 0, Ci)\n\n return Ci", "def action_cost(self, action):\n\n\t\tif len(action) == 1:\n\n\t\t\t# Only 1 person is crossing\n\t\t\tpersonI = action[0]\n\t\t\treturn self.crossingTime[personI]\n\n\t\telif len(action) == 2:\n\n\t\t\t# 2 people are crossing\n\t\t\tpersonI = action[0]\n\t\t\tpersonJ = action[1]\n\t\t\tctPersonI = self.crossingTime[personI] # the Crossing Time of the first person\n\t\t\tctPersonJ = self.crossingTime[personJ] # the Crossing Time of the second person\n\t\t\treturn max(ctPersonI, ctPersonJ)", "def generate_connectivity_constraint_all(problem):\n\n if problem.num_vars == None:\n problem.compute_num_var()\n\n ret = Constraint()\n\n # Iterator over all (v, t) subsets in the graph\n for b, b_r in enumerate(problem.src):\n # Convert each set in the iterator to (v,t) format\n add_S = map(\n lambda S: list(map(problem.get_time_augmented_n_t, S)),\n problem.powerset_exclude_agent(b_r),\n )\n ret &= generate_connectivity_constraint(problem, [b], add_S)\n\n return ret", "def rule(model, i, j):\n active = 1-self.task_spread[j]\n den = sum(tril[i, :])\n ind = model.timeslots\n total = sum(tril[i, k] * (\n 1 - model.A[k, j] - model.A2[k, j] - model.A3[k, j] - model.A4[\n k, j]) for k in ind)\n total /= den\n total *= active\n return -1 + EPS, model.CTl[i, j] - total, EPS + self.slack_cont", "def test_RULE_110():\n\tk, outputs = 3, [0,1,1,1,0,1,1,0]\n\n\ttrue_pi0s = set(['200','111'])\n\ttrue_pi1s = set(['021','201','012','210'])\n\n\ttdt0, tdt1 = make_transition_density_tables(k=k, outputs=outputs)\n\tpi0s, pi1s = find_implicants_qm(tdt0) , find_implicants_qm(tdt1)\n\n\tassert (pi0s == true_pi0s) , ('Prime Implicants for 0 does not match. %s != %s' % (pi0s,true_pi0s))\n\tassert (pi1s == true_pi1s) , ('Prime Implicants for 1 does not match. %s != %s' % (pi1s,true_pi1s))\n\t# Two Symbols\n\ttrue_ts0s = [('200',[],[[1,2]]),('111',[],[[0,1,2]])]\n\ttrue_ts1s = [('201',[[0,1]],[]),('012',[[1,2]],[]),('201',[[1,2]],[]),('012',[[0,2]],[])]\n\n\tts0s,ts1s = find_two_symbols_v2(k=k, prime_implicants=pi0s) , find_two_symbols_v2(k=k, prime_implicants=pi1s)\n\n\tassert (ts0s == true_ts0s) , ('Two Symbol for 0 does not match. %s != %s' % (ts0s,true_ts0s))\n\tassert (ts1s == true_ts1s) , ('Two Symbol for 1 does not match. %s != %s' % (ts1s,true_ts1s))", "def test_nofission_as_unprofitable():\n grid = Grid(shape=(20, 20))\n x, y = grid.dimensions\n t = grid.stepping_dim\n\n yl = SubDimension.left(name='yl', parent=y, thickness=4)\n yr = SubDimension.right(name='yr', parent=y, thickness=4)\n\n u = TimeFunction(name='u', grid=grid)\n\n eqns = [Eq(u.forward, u[t + 1, x, y + 1] + 1.).subs(y, yl),\n Eq(u.forward, u[t + 1, x, y - 1] + 1.).subs(y, yr)]\n\n op = Operator(eqns, opt='fission')\n\n assert_structure(op, ['t,x,yl', 't,x,yr'], 't,x,yl,yr')", "def pr_one_constraint(self, output_prop):\n raise NotImplementedError(\"subclasses need to override this method\")", "def _eval_prior_casadi(self, state, action):\n\n return mtimes(self.a, state.T) + mtimes(self.b, action.T)", "def test_distinguish_path_polarity1():\n Monomer('A')\n Monomer('B', ['act'], {'act' :['y', 'n']})\n Monomer('C', ['T185'], {'T185':['u', 'p']})\n Parameter('k', 1)\n Rule('A_activate_B', A() + B(act='n') >> A() + B(act='y'), k)\n Rule('B_dephos_C', B(act='y') + C(T185='p') >>\n B(act='y') + C(T185='u'), k)\n Initial(A(), k)\n Initial(B(act='y'), k)\n Initial(C(T185='p'), k)\n Annotation(A, 'http://identifiers.org/hgnc/HGNC:1')\n Annotation(B, 'http://identifiers.org/hgnc/HGNC:2')\n Annotation(C, 'http://identifiers.org/hgnc/HGNC:3')\n Annotation('A_activate_B', 'A', 'rule_has_subject')\n Annotation('A_activate_B', 'B', 'rule_has_object')\n Annotation('B_dephos_C', 'B', 'rule_has_subject')\n Annotation('B_dephos_C', 'C', 'rule_has_object')\n C.site_annotations = [\n Annotation(('T185', 'p'), 'phosphorylation', 'is_modification'),\n Annotation('T185', 'T', 'is_residue'),\n Annotation('T185', '185', 'is_position'),\n ]\n # Create the model checker\n stmts = _path_polarity_stmt_list()\n mc = ModelChecker(model, stmts)\n results = mc.check_model()\n assert len(results) == len(stmts)\n assert isinstance(results[0], tuple)\n path_results = [res[1] for res in results]\n assert path_results[0].paths == []\n assert path_results[1].paths == [[('A_activate_B', 1), ('B_dephos_C', 1),\n ('C_T185_p_obs', -1)]]\n assert path_results[2].paths == []\n assert path_results[3].paths == [[('B_dephos_C', 1), ('C_T185_p_obs', -1)]]", "def uniformCrossover(self, cl):\n if cons.env.format_data.discrete_action: #Always crossover condition if the phenotype is discrete (if continuous phenotype, half the time phenotype crossover is performed instead)\n self_specified_atts = copy.deepcopy(self.specified_attributes)\n cl_specified_atts = copy.deepcopy(cl.specified_attributes)\n probability = 0.5 #Equal probability for attribute alleles to be exchanged.\n\n #Make list of attribute references appearing in at least one of the parents.-----------------------------\n combined_atts = []\n for i in self_specified_atts:\n combined_atts.append(i)\n for i in cl_specified_atts:\n if i not in combined_atts:\n combined_atts.append(i)\n elif not cons.env.format_data.attribute_info[i][0]: #Attribute specified in both parents, and the attribute is discrete (then no reason to cross over)\n combined_atts.remove(i)\n combined_atts.sort()\n #--------------------------------------------------------------------------------------------------------\n changed = False;\n for att in combined_atts: #Each condition specifies different attributes, so we need to go through all attributes in the dataset.\n att_info = cons.env.format_data.attribute_info[att]\n #-----------------------------\n ref = 0\n #if att in self.specified_attributes:\n if att in self_specified_atts:\n ref += 1\n #if att in cl.specified_attributes:\n if att in cl_specified_atts:\n ref += 1\n #-----------------------------\n\n if ref == 0: #Attribute not specified in either condition (Attribute type makes no difference)\n print(\"Error: UniformCrossover!\")\n pass\n\n elif ref == 1: #Attribute specified in only one condition - do probabilistic switch of whole attribute state (Attribute type makes no difference)\n if att in self_specified_atts and random.random() > probability:\n i = self.specified_attributes.index(att) #reference to the position of the attribute in the rule representation\n cl.condition.append(self.condition.pop(i)) #Take attribute from self and add to cl\n cl.specified_attributes.append(att)\n self.specified_attributes.remove(att)\n changed = True #Remove att from self and add to cl\n\n\n if att in cl_specified_atts and random.random() < probability:\n i = cl.specified_attributes.index(att) #reference to the position of the attribute in the rule representation\n self.condition.append(cl.condition.pop(i)) #Take attribute from self and add to cl\n self.specified_attributes.append(att)\n cl.specified_attributes.remove(att)\n changed = True #Remove att from cl and add to self.\n\n\n else: #Attribute specified in both conditions - do random crossover between state alleles. The same attribute may be specified at different positions within either classifier\n #-------------------------------------------------------\n # CONTINUOUS ATTRIBUTE\n #-------------------------------------------------------\n if att_info[0]:\n i_cl1 = self.specified_attributes.index(att) #pairs with self (classifier 1)\n i_cl2 = cl.specified_attributes.index(att) #pairs with cl (classifier 2)\n tmp_key = random.randint(0,3) #Make random choice between 4 scenarios, Swap minimums, Swap maximums, Self absorbs cl, or cl absorbs self.\n if tmp_key == 0: #Swap minimum\n temp = self.condition[i_cl1][0]\n self.condition[i_cl1][0] = cl.condition[i_cl2][0]\n cl.condition[i_cl2][0] = temp\n elif tmp_key == 1: #Swap maximum\n temp = self.condition[i_cl1][1]\n self.condition[i_cl1][1] = cl.condition[i_cl2][1]\n cl.condition[i_cl2][1] = temp\n else: #absorb range\n all_list = self.condition[i_cl1] + cl.condition[i_cl2]\n new_min = min(all_list)\n new_max = max(all_list)\n if tmp_key == 2: #self absorbs cl\n self.condition[i_cl1] = [new_min,new_max]\n #Remove cl\n cl.condition.pop(i_cl2)\n cl.specified_attributes.remove(att)\n else: #cl absorbs self\n cl.condition[i_cl2] = [new_min,new_max]\n #Remove self\n self.condition.pop(i_cl1)\n self.specified_attributes.remove(att)\n #-------------------------------------------------------\n # DISCRETE ATTRIBUTE\n #-------------------------------------------------------\n else:\n pass\n tmp_list1 = copy.deepcopy(self_specified_atts)\n tmp_list2 = copy.deepcopy(cl.specified_attributes)\n tmp_list1.sort()\n tmp_list2.sort()\n if changed and (tmp_list1 == tmp_list2):\n changed = False\n\n if self.action != cl.action and random.random() > probability:\n # Switch phenotypes of 2 classifiers if GA is run in match set\n temp = self.action\n self.action = cl.action\n cl.action = temp\n changed = True\n return changed\n #-------------------------------------------------------\n # CONTINUOUS PHENOTYPE CROSSOVER\n #-------------------------------------------------------\n elif random.random() < 0.5:\n return self.actionCrossover(cl)", "def wind_operating_costs_rule(_m, y, s):\r\n\r\n # Cost for existing wind units\r\n existing = sum(m.C_MC[g, y] * m.p[g, y, s, t] for g in m.G_E_WIND for t in m.T)\r\n\r\n # Cost for candidate wind units\r\n candidate = sum((m.C_MC[g, y] - (m.baseline[y] * m.permit_price[y])) * m.p[g, y, s, t]\r\n for g in m.G_C_WIND for t in m.T)\r\n\r\n return existing + candidate", "def fprop(self, input):\n\t\tactivations = [self.array(i, 1) for i in self.netSize]\t\t# Activations have same dimensions as network\t\n\t\tz = [self.array(i, 1) for i in self.netSize] # Weighted input have same dimensions as network\n\t\n\t\t\n\t\t#Assign the input to first layer of activations, and weighted inputs\n\t\tz[0] = input\n\t\tactivations[0] = input\n\t\truns = len(self.weights[0])\n\t\t\n\t\t#Propagate z through the network.\n\t\tfor i in range(runs):\n\t\t\tz[i + 1] = self.matrixAddition(self.matrixMultiply(self.transpose(self.weights[i]), activations[i]), self.biases[i]) \n\t\t\tactivations[i + 1] = self.sigmoidArray(z[i + 1])\n\t\t\t\n\t\t\t\n\t\tprint \"OUTPUTS:\"\t\n\t\tpprint (activations)\n\t\treturn activations, z" ]
[ "0.61967784", "0.6186182", "0.5359495", "0.5156224", "0.5084931", "0.5043622", "0.49144766", "0.48813945", "0.4814908", "0.48014724", "0.47573727", "0.4749955", "0.4715951", "0.46684822", "0.46408987", "0.4638725", "0.45815122", "0.45572296", "0.45256448", "0.45139882", "0.4501624", "0.44936222", "0.44817245", "0.44691968", "0.4464272", "0.4461573", "0.445965", "0.44553918", "0.44202614", "0.44195578" ]
0.78320956
0
PSYCOP exclusivity rule >>> m = PSYCOP() >>> i = m.get_fresh_id() >>> j = m.get_fresh_id() >>> ai = m.Prop(m.PT.atomic, m.Atom("A", i, False, False), None) >>> bi = m.Prop(m.PT.atomic, m.Atom("B", i, False, False), None) >>> bj = m.Prop(m.PT.atomic, m.Atom("B", j, False, False), None) >>> cj = m.Prop(m.PT.atomic, m.Atom("C", j, False, False), None) >>> p1 = m.Prop(m.PT.implies, ai, bi) >>> p2 = m.Prop(m.PT.negation, m.Prop(m.PT.conjunction, bj, cj), None) >>> m.rule_exclusivity(p1, p2, set()) [NOT ((A(x_2) AND C(x_2)))]
def rule_exclusivity(self, p1, p2, domain): if p1.type == self.PT.implies and p2.type == self.PT.negation: if p2.v1.type == self.PT.conjunction: if p1.v1.type == self.PT.atomic and p1.v2.type == self.PT.atomic: if p2.v1.v1.type == self.PT.atomic and p2.v1.v2.type == self.PT.atomic: if p1.v1.v1.arg_id == p1.v2.v1.arg_id and p2.v1.v1.v1.arg_id == p2.v1.v2.v1.arg_id: if not p1.v1.v1.is_name and not p1.v2.v1.is_name and not p2.v1.v1.v1.is_name and not p2.v1.v2.v1.is_name: if p1.v2.v1.predicate == p2.v1.v1.v1.predicate: i = self.get_fresh_id() p = self.Prop(self.PT.negation, self.Prop(self.PT.conjunction, self.atom_prop_replace_properties(p1.v1, i), self.atom_prop_replace_properties( p2.v1.v2, i)), None) if not self.contains_isomorphic_proposition(domain, p): return [p] return []
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def rule_transitivity(self, p1, p2, domain):\r\n if p1.type == self.PT.implies and p2.type == self.PT.implies:\r\n if p1.v1.type == self.PT.atomic and p1.v2.type == self.PT.atomic and \\\r\n p2.v1.type == self.PT.atomic and p2.v2.type == self.PT.atomic:\r\n if p1.v1.v1.arg_id == p1.v2.v1.arg_id and p2.v1.v1.arg_id == p2.v2.v1.arg_id:\r\n if not p1.v1.v1.is_name and not p1.v2.v1.is_name and not p2.v1.v1.is_name and not p2.v2.v1.is_name:\r\n if p1.v2.v1.predicate == p2.v1.v1.predicate:\r\n i = self.get_fresh_id()\r\n p = self.Prop(self.PT.implies,\r\n self.atom_prop_replace_properties(p1.v1, i),\r\n self.atom_prop_replace_properties(p2.v2, i))\r\n if not self.contains_isomorphic_proposition(domain, p):\r\n return [p]\r\n return []", "def prove_and_commutativity() -> Proof:\n all_lines = []\n all_lines.append(Proof.Line(Formula.parse('(p&q)')))\n all_lines.append(Proof.Line(Formula.parse('q'), AE1_RULE, [0]))\n all_lines.append(Proof.Line(Formula.parse('p'), AE2_RULE, [0]))\n all_lines.append(Proof.Line(Formula.parse('(q&p)'), A_RULE, [1, 2]))\n statement = InferenceRule([Formula.parse('(p&q)')], Formula.parse('(q&p)'))\n all_rules = {A_RULE, AE1_RULE, AE2_RULE}\n return Proof(statement, all_rules, all_lines)", "def prove_implies_self():\n # i1_with_assumptions = InferenceRule([I1.conclusion.first],I1.conclusion.second)\n # i2_with_assumptions = InferenceRule([I2.conclusion.first,I2.conclusion.second.first],I2.conclusion.second.second)\n\n statement = InferenceRule([], Formula.from_infix('(p->p)')) # create conclusion\n\n rules = [MP, I1, I2] # create rules for the proof\n\n # create lines\n lines = []\n lines.append(DeductiveProof.Line(Formula.from_infix('((p->((p->p)->p))->((p->(p->p))->(p->p)))'), 2, []))\n lines.append(DeductiveProof.Line(Formula.from_infix('(p->(p->p))'), 1, []))\n lines.append(DeductiveProof.Line(Formula.from_infix('(p->((p->p)->p))'), 1, []))\n lines.append(DeductiveProof.Line(Formula.from_infix('((p->(p->p))->(p->p))'), 0, [2, 0]))\n lines.append(DeductiveProof.Line(Formula.from_infix('(p->p)'), 0, [1, 3]))\n dec = DeductiveProof(statement, rules, lines)\n return dec", "def rule_conversion(self, p, domain):\r\n\r\n if p.type == self.PT.negation:\r\n if p.v1.type == self.PT.conjunction:\r\n if p.v1.v1.type == self.PT.atomic and p.v1.v2.type == self.PT.atomic:\r\n i = self.get_fresh_id()\r\n p_new = self.Prop(self.PT.negation,\r\n self.Prop(self.PT.conjunction,\r\n self.atom_prop_replace_properties(p.v1.v2, i),\r\n self.atom_prop_replace_properties(p.v1.v1, i)),\r\n None)\r\n if not self.contains_isomorphic_proposition(domain, p_new):\r\n return [p_new]\r\n return []", "def prove_implies_self():\n return DeductiveProof(\n InferenceRule([], Formula.from_infix('(p->p)')),\n [MP, I1, I2],\n [DeductiveProof.Line(Formula.from_infix('(p->((q->p)->p))'), 1, []),\n DeductiveProof.Line(Formula.from_infix('((p->((q->p)->p))->((p->(q->p))->(p->p))'), 2, []),\n DeductiveProof.Line(Formula.from_infix('(p->(q->p))'), 1, []),\n DeductiveProof.Line(Formula.from_infix('((p->(q->p))->(p->p))'), 0, [0, 1]),\n DeductiveProof.Line(Formula.from_infix('(p->p)'), 0, [2, 3])])", "def test_disjunct_hs():\n hs1 = LocalSpace(\"1\")\n hs2 = LocalSpace(\"2\")\n alpha, beta = symbols('alpha, beta')\n A = OperatorSymbol('A', hs=hs1)\n B = OperatorSymbol('B', hs=hs2)\n assert Commutator.create(A, B) == ZeroOperator\n assert Commutator.create(alpha, beta) == ZeroOperator\n assert Commutator.create(alpha, B) == ZeroOperator\n assert Commutator.create(A, beta) == ZeroOperator", "def encode_proposition(self, p, hat=False):\r\n\r\n i = self.get_fresh_id()\r\n\r\n if p[0] == \"A\":\r\n # A(x) -> B(x)\r\n return self.Prop(self.PT.implies,\r\n self.get_atomic_proposition(p[1].upper(), i, False, hat),\r\n self.get_atomic_proposition(p[2].upper(), i, False, hat))\r\n elif p[0] == \"E\":\r\n # not (A(x) and B(x))\r\n return self.Prop(self.PT.negation,\r\n self.Prop(self.PT.conjunction,\r\n self.get_atomic_proposition(p[1].upper(), i, False, hat),\r\n self.get_atomic_proposition(p[2].upper(), i, False, hat)),\r\n None)\r\n elif p[0] == \"I\":\r\n # A(a) and B(a)\r\n return self.Prop(self.PT.conjunction,\r\n self.get_atomic_proposition(p[1].upper(), i, True, hat),\r\n self.get_atomic_proposition(p[2].upper(), i, True, hat))\r\n else:\r\n # A(a) and not B(a)\r\n return self.Prop(self.PT.conjunction,\r\n self.get_atomic_proposition(p[1].upper(), i, True, hat),\r\n self.Prop(self.PT.negation,\r\n self.get_atomic_proposition(p[2].upper(), i, True, hat),\r\n None))", "def get_all_exclusives(self):\r\n if self.exclusives is None:\r\n self._propagate_exclusives()\r\n return self.exclusives", "def rule_backward_conjunctive_syllogism(self, p, g):\r\n\r\n if g.type == self.PT.negation and p.type == self.PT.negation:\r\n # g = NOT(A(x))\r\n if p.v1.type == self.PT.conjunction:\r\n # p = NOT(A(x) AND B(x))\r\n if self.matching(p.v1.v1, g.v1):\r\n return [self.atom_prop_replace_properties(p.v1.v2, new_arg_id=g.v1.v1.arg_id,\r\n new_is_name=g.v1.v1.is_name,\r\n new_hat=g.v1.v1.hat)]\r\n elif self.matching(p.v1.v2, g.v1):\r\n return [self.atom_prop_replace_properties(p.v1.v1, new_arg_id=g.v1.v1.arg_id,\r\n new_is_name=g.v1.v1.is_name,\r\n new_hat=g.v1.v1.hat)]\r\n return []", "def test_conjuction_with_disjunction(self):\n kb = logic.PropKB()\n kb.tell(logic.expr('Color(Cat, Black)'))\n kb.tell(logic.expr('Age(Cat, 35)'))\n kb.tell(logic.expr('Name(Cat, Ted)'))\n self.assertBindingsEqual(\n kb.ask(logic.expr(\n '(Color(Cat, Black) & (Age(Cat, 36) | Name(Cat, Ted)))')), {})\n self.assertBindingsEqual(\n kb.ask(logic.expr(\n '(Color(Cat, Black) & (Age(Cat, 36) | Name(Cat, John)))')), False)\n self.assertBindingsEqual(\n kb.ask(logic.expr(\n '((Age(Cat, 36) | Name(Cat, Ted)) & Color(Cat, Black))')), {})\n self.assertBindingsEqual(\n kb.ask(logic.expr(\n '((Age(Cat, 36) | Name(Cat, John)) & Color(Cat, Black))')), False)", "def test_nofission_as_unprofitable():\n grid = Grid(shape=(20, 20))\n x, y = grid.dimensions\n t = grid.stepping_dim\n\n yl = SubDimension.left(name='yl', parent=y, thickness=4)\n yr = SubDimension.right(name='yr', parent=y, thickness=4)\n\n u = TimeFunction(name='u', grid=grid)\n\n eqns = [Eq(u.forward, u[t + 1, x, y + 1] + 1.).subs(y, yl),\n Eq(u.forward, u[t + 1, x, y - 1] + 1.).subs(y, yr)]\n\n op = Operator(eqns, opt='fission')\n\n assert_structure(op, ['t,x,yl', 't,x,yr'], 't,x,yl,yr')", "def implies_rule(self, step):\n self.proof[step.seq_num] = self.proof[step.assms[0]].on_prop(conv.rewr_conv(\"imp_disj_eq\"))", "def prove_hypothetical_syllogism():\n assumptions = [Formula(IMPLICATION_OPERATOR, Formula('p'), Formula('q')), Formula(IMPLICATION_OPERATOR,\n Formula('q'), Formula('r')), Formula('p')]\n rules = [MP, I1, I2]\n lines = []\n conclusion = Formula('r')\n statement = InferenceRule(assumptions, conclusion)\n # temp_statement =\n lines.append(DeductiveProof.Line(Formula('p')))\n lines.append(DeductiveProof.Line(assumptions[0]))\n lines.append(DeductiveProof.Line(assumptions[1]))\n lines.append(DeductiveProof.Line(Formula('q'), 0, [0, 1]))\n lines.append(DeductiveProof.Line(Formula('r'), 0, [3, 2]))\n proof = DeductiveProof(statement, rules, lines)\n return_proof = inverse_mp(proof, Formula('p'))\n\n return return_proof", "def prove_hypothetical_syllogism() -> Proof:\n #first we will prove hs with the added assumption of 'p'\n all_lines = []\n all_rules = {MP, I1, I0, D}\n assumptions_with_p = (Formula.parse('(p->q)'), Formula.parse('(q->r)'), Formula('p'))\n all_lines.append(Proof.Line(assumptions_with_p[2]))\n all_lines.append(Proof.Line(assumptions_with_p[0]))\n all_lines.append(Proof.Line(Formula('q'), MP, (0,1)))\n all_lines.append(Proof.Line(assumptions_with_p[1]))\n all_lines.append(Proof.Line(Formula('r'), MP, (2,3)))\n hs_with_p = InferenceRule(assumptions_with_p, Formula('r'))\n proof_with_p = Proof(hs_with_p, all_rules, all_lines)\n\n #now we will return the relevant proof using the Deduction Theorem\n return remove_assumption(proof_with_p)", "def state_exclusion(\n states: List[np.ndarray], probs: List[float] = None, method: str = \"conclusive\"\n) -> float:\n obj_func = []\n measurements = []\n constraints = []\n\n __is_states_valid(states)\n if probs is None:\n probs = [1 / len(states)] * len(states)\n __is_probs_valid(probs)\n\n supported_methods = [\"conclusive\", \"unambiguous\"]\n if method not in supported_methods:\n raise ValueError(\n f\"Exclusion method {method} not supported. Please \"\n f\"select one from {supported_methods}.\"\n )\n\n dim_x, dim_y = states[0].shape\n\n # The variable `states` is provided as a list of vectors. Transform them\n # into density matrices.\n if dim_y == 1:\n for i, state_ket in enumerate(states):\n states[i] = state_ket * state_ket.conj().T\n\n for i, _ in enumerate(states):\n measurements.append(cvxpy.Variable((dim_x, dim_x), PSD=True))\n\n obj_func.append(probs[i] * cvxpy.trace(states[i].conj().T @ measurements[i]))\n\n if method == \"unambiguous\":\n constraints.append(cvxpy.trace(states[i] @ measurements[i]) == 0)\n\n if method == \"conclusive\":\n constraints.append(sum(measurements) == np.identity(dim_x))\n elif method == \"unambiguous\":\n constraints.append(sum(measurements) <= np.identity(dim_x))\n\n if method == \"conclusive\":\n if np.iscomplexobj(states[0]):\n objective = cvxpy.Minimize(cvxpy.real(sum(obj_func)))\n else:\n objective = cvxpy.Minimize(sum(obj_func))\n elif method == \"unambiguous\":\n if np.iscomplexobj(states[0]):\n objective = cvxpy.Maximize(cvxpy.real(sum(obj_func)))\n else:\n objective = cvxpy.Maximize(sum(obj_func))\n\n problem = cvxpy.Problem(objective, constraints)\n sol_default = problem.solve()\n\n return 1 / len(states) * sol_default", "def test_redundant_set_field(self):\n SF1, SF2 = (\"SET_FIELD\", (\"IPV4_DST\", 1)), (\"SET_FIELD\", (\"IPV4_DST\", 2))\n SF3, SF4 = (\"SET_FIELD\", (\"IPV4_DST\", 3)), (\"SET_FIELD\", (\"IPV4_DST\", 4))\n OUT = (\"OUTPUT\", 1)\n n1 = normalise([\n Rule(priority=10,\n instructions=inst_from_acts([SF2, OUT])),\n Rule(priority=0)\n ])\n n2 = normalise([\n Rule(priority=10,\n instructions=inst_from_acts([SF1, SF2, OUT])),\n Rule(priority=0)\n ])\n n3 = normalise([\n Rule(priority=10,\n instructions=inst_from_acts([SF3, SF2, OUT])),\n Rule(priority=0)\n ])\n n4 = normalise([\n Rule(priority=10,\n instructions=inst_from_acts([SF4, SF3, SF1, SF2, OUT])),\n Rule(priority=0)\n ])\n n5 = normalise([\n Rule(priority=10,\n instructions=inst_from_acts([SF2, SF2, SF2, SF2, OUT])),\n Rule(priority=0)\n ])\n self.assertTrue(check_equal(n1, n2))\n self.assertTrue(check_equal(n1, n3))\n self.assertTrue(check_equal(n1, n4))\n self.assertTrue(check_equal(n1, n5))\n\n # Sanity check\n n6 = normalise([\n Rule(priority=10,\n instructions=inst_from_acts([SF4, SF3, SF1, SF1, OUT])),\n Rule(priority=0)\n ])\n self.assertFalse(check_equal(n1, n6))", "def test_XOR():\n\tk, outputs = 2, [0,1,1,0]\n\n\ttrue_pi0s = set(['00','11'])\n\ttrue_pi1s = set(['01','10'])\n\n\ttdt0, tdt1 = make_transition_density_tables(k=k, outputs=outputs)\n\tpi0s, pi1s = find_implicants_qm(tdt0) , find_implicants_qm(tdt1)\n\n\tassert (pi0s == true_pi0s) , ('Prime Implicants for 0 does not match. %s != %s' % (pi0s,true_pi0s))\n\tassert (pi1s == true_pi1s) , ('Prime Implicants for 1 does not match. %s != %s' % (pi1s,true_pi1s))\n\t# Two Symbols\n\ttrue_ts0s = [('11',[],[[0,1]]),('00',[],[[0,1]])]\n\ttrue_ts1s = [('10',[[0,1]],[])]\n\n\tts0s,ts1s = find_two_symbols_v2(k=k, prime_implicants=pi0s) , find_two_symbols_v2(k=k, prime_implicants=pi1s)\n\n\tassert (ts0s == true_ts0s) , ('Two Symbol for 0 does not match. %s != %s' % (ts0s,true_ts0s))\n\tassert (ts1s == true_ts1s) , ('Two Symbol for 1 does not match. %s != %s' % (ts1s,true_ts1s))", "def _findRedundantProteins(protToPeps, pepToProts, proteins=None):\n if proteins is None:\n proteins = viewkeys(protToPeps)\n\n pepFrequency = _getValueCounts(pepToProts)\n protPepCounts = _getValueCounts(protToPeps)\n\n getCount = operator.itemgetter(1)\n getProt = operator.itemgetter(0)\n\n #TODO: quick and dirty solution\n #NOTE: add a test for merged proteins\n proteinTuples = list()\n for protein in proteins:\n if isinstance(protein, tuple):\n proteinTuples.append(protein)\n else:\n proteinTuples.append(tuple([protein]))\n\n sort = list()\n for protein in sorted(proteinTuples, reverse=True):\n if len(protein) == 1:\n protein = protein[0]\n\n protPepFreq = [pepFrequency[pep] for pep in protToPeps[protein]]\n if min(protPepFreq) > 1:\n sortValue = (len(protPepFreq)*-1, sorted(protPepFreq, reverse=True))\n sort.append((protein, sortValue))\n sortedProteins = map(getProt, sorted(sort, key=getCount, reverse=True))\n\n redundantProteins = set()\n for protein in sortedProteins:\n for pep in protToPeps[protein]:\n if pepFrequency[pep] <= 1:\n break\n else:\n protPepFrequency = Counter(protToPeps[protein])\n pepFrequency.subtract(protPepFrequency)\n redundantProteins.add(protein)\n return redundantProteins", "def _filter_satisfied(self, update_setd=False):\n\n model = self.oracle.get_model()\n setd = set()\n\n for i, cl in enumerate(self.soft):\n if not self.satc[i]:\n if self._satisfied(cl, model):\n self.satc[i] = True\n self.ss_assumps.append(self.sels[i])\n else:\n setd = setd.union(set(cl))\n\n if update_setd:\n self.setd = list(setd)", "def extract_ac_conclusions(self, propositions):\r\n\r\n prop_ac = []\r\n for p in propositions:\r\n s = self.proposition_to_string(p)\r\n if s is not None:\r\n if {s[1], s[2]} == {\"a\", \"c\"}:\r\n prop_ac.append(s)\r\n return prop_ac", "def implies_not_clause(self, source_variable, state):\n assert state in self.states\n yield Implies(source_variable, self.deassert_state(state))", "def generate_exclusions(proteins):\n pass", "def reasoning_rule_1(self):\n # TODO: determine if we need severe/extreme/something else (e.g. keep it)\n # return\n # self.webgenesis_client.update_incident_severity(incident_uri, \"severe\")\n\n query = \"\"\"\n SELECT ?incident (group_concat(?severity_value;separator=\"|\") as ?severities) \n WHERE {\n ?incident rdf:type baw:Incident .\n\n ?participant baw:participantIsInvolvedIn ?incident .\n ?participant rdf:type baw:Human .\n \n OPTIONAL {\n ?incident baw:hasIncidentSeverity ?severity_value .\n } .\n\n } GROUP BY ?incident\n \"\"\"\n\n # MINUS {\n # ?incident baw:hasIncidentSeverity \"severe\" .\n # }\n # BIND(IF(BOUND(?severity_value), ?severity_value, \"unknown\") AS ?severity) .\n # print(\"Rule 1\")\n # print(query)\n # ?incident baw:hasIncidentSeverity \"extreme\" .\n # ?incident baw:hasIncidentSeverity \"severe\" .\n results = self.webgenesis_client.execute_sparql_select(query)\n\n # print(\"Rule 1\" + str(results))\n # print(\"cluster_type:\" + str(cluster_type))\n\n # print(results)\n if results is not None:\n for result in results['results']['bindings']:\n # cluster_type = wg_client.get_incident_category(\n # self.webgenesis_client.get_incident_report_psap_id(incident_id))\n\n cluster_type = \"Other\"\n if \"severities\" in result and result['severities']['value']:\n previous_severities = result['severities']['value'].split(\"|\")\n else:\n previous_severities = []\n incident_uri = result['incident']['value']\n cluster_type = self.webgenesis_client.get_cluster_type_from_incident_uri(incident_uri)\n # psap_id = wg_client.get_incident_report_id(incident_uri)\n # psap_id\n # print(\"INCIDENT:\" + str(incident_uri))\n if cluster_type != \"Other\" and \"severe\" not in previous_severities and \"extreme\" not in previous_severities:\n # if \"severe\" not in previous_severities and \"extreme\" not in previous_severities:\n print(\"RULE 1:\" + str(previous_severities) + \",\" + str(cluster_type) + \", \" + str(incident_uri))\n print(\"Severity Update:\" + str(previous_severities) + \" ---------> \" + str(\"severe\"))\n # Update incident severity value\n # self.webgenesis_client.update_incident_severity(incident_uri, \"severe\")\n self.webgenesis_client.add_incident_severity(incident_uri, \"severe\")\n # if \"current_level\" in result and result['current_level']['value'] !=\"severe\" and result['current_level']['value'] !=\"extreme\":", "def pull_out_quantifications_across_negation(formula: Formula) -> \\\r\n Tuple[Formula, Proof]:\r\n assert is_unary(formula.root)\r\n # Task 11.6\r\n\r\n prover = Prover(Prover.AXIOMS.union(ADDITIONAL_QUANTIFICATION_AXIOMS))\r\n\r\n # Basic Case - No quantifier to change\r\n if not is_quantifier(formula.first.root):\r\n ccl = equivalence_of(formula, formula)\r\n prover.add_tautology(ccl)\r\n return formula, prover.qed()\r\n\r\n # The ~ and the predicate without the quantifier to keep changing it\r\n form = Formula(\"~\", formula.first.predicate)\r\n pred, proof = pull_out_quantifications_across_negation(form)\r\n\r\n # Definig the new quantifier\r\n if formula.first.root == \"A\":\r\n my_quantifier = \"E\"\r\n else: # \"E\"\r\n my_quantifier = \"A\"\r\n\r\n # proof for changing quantifier\r\n # because add_proof() is my friend\r\n step1 = prover.add_proof(proof.conclusion, proof)\r\n\r\n\r\n form2 = Formula(\"->\", proof.conclusion, equivalence_of(Formula(my_quantifier, formula.first.variable, form),\r\n Formula(my_quantifier, formula.first.variable, pred)))\r\n my_map2 = {'R': str(form.substitute({formula.first.variable: Term(\"_\")})),\r\n 'Q': str(pred.substitute({formula.first.variable: Term(\"_\")})), \"x\": formula.first.variable, \"y\": formula.first.variable}\r\n\r\n step2 = prover.add_instantiated_assumption(form2,\r\n ADDITIONAL_QUANTIFICATION_AXIOMS[14 if my_quantifier==\"A\" else 15], my_map2)\r\n\r\n step3 = prover.add_mp(equivalence_of(Formula(my_quantifier, formula.first.variable, form),\r\n Formula(my_quantifier, formula.first.variable, pred)), step1, step2)\r\n\r\n\r\n my_map4 = {'R': str(formula.first.predicate.substitute({formula.first.variable: Term(\"_\")})), \"x\": formula.first.variable}\r\n form4 = equivalence_of(formula, Formula(my_quantifier, formula.first.variable, form))\r\n step4 = prover.add_instantiated_assumption(form4,\r\n ADDITIONAL_QUANTIFICATION_AXIOMS[0 if my_quantifier==\"E\" else 1], my_map4)\r\n\r\n prover.add_tautological_implication(equivalence_of(formula, Formula(my_quantifier, formula.first.variable, pred)), [step3, step4])\r\n\r\n return Formula(my_quantifier, formula.first.variable, pred), prover.qed()", "def __no_crossing(self):\n for pos_left_1 in range(self.n):\n for pos_left_2 in range(pos_left_1 + 1, self.n):\n for pos_right_2 in range(self.n):\n for pos_right_1 in range(pos_right_2 + 1, self.n):\n # For all i, j, k, m | k < i and m > j . not w(i, j) or not w(k, m)\n self.__clause(-self.preds.w(pos_left_1, pos_right_1),\n -self.preds.w(pos_left_2, pos_right_2))", "def nsi_cross_transitivity(self, node_list1, node_list2):\n return _nsi_cross_transitivity(\n to_cy(self.adjacency + np.eye(self.N, dtype=ADJ), ADJ),\n np.array(node_list1, dtype=NODE),\n np.array(node_list2, dtype=NODE),\n to_cy(self.node_weights, DWEIGHT))", "def prove_hypothetical_syllogism():\n return inverse_mp(DeductiveProof(InferenceRule([Formula.from_infix('(p->q)'),\n Formula.from_infix('(q->r)'),\n Formula.from_infix('p')], Formula.from_infix('r')),\n [MP, I1, I2],\n [DeductiveProof.Line(Formula.from_infix('p')),\n DeductiveProof.Line(Formula.from_infix('(p->q)')),\n DeductiveProof.Line(Formula.from_infix('(q->r)')),\n DeductiveProof.Line(Formula.from_infix('q'), 0, [0, 1]),\n DeductiveProof.Line(Formula.from_infix('r'), 0, [3, 2])\n ]), Formula.from_infix('p'))", "def proof_or_counterexample_implies_not(formula):\n return proof_or_counterexample_with_func(formula, prove_in_model_implies_not)", "def is_commutative(*args):\n return _ida_hexrays.is_commutative(*args)", "def hydro_operating_costs_rule(_m, y, s):\r\n\r\n return sum(m.C_MC[g, y] * m.p[g, y, s, t] for g in m.G_E_HYDRO for t in m.T)" ]
[ "0.6254757", "0.6071033", "0.54834473", "0.5454009", "0.53239495", "0.5123699", "0.5068382", "0.50099224", "0.5007349", "0.4857696", "0.4847564", "0.48285633", "0.4740116", "0.47349322", "0.47092235", "0.46827868", "0.4668266", "0.46587437", "0.4649622", "0.46377978", "0.4580551", "0.45796296", "0.45566282", "0.4526433", "0.45245653", "0.4521665", "0.44917044", "0.4485907", "0.44616154", "0.44437116" ]
0.8137308
0
PSYCOP conversion rule >>> m = PSYCOP() >>> i = m.get_fresh_id() >>> a = m.Prop(m.PT.atomic, m.Atom("A", i, False, False), None) >>> b = m.Prop(m.PT.atomic, m.Atom("B", i, False, False), None) >>> p = m.Prop(m.PT.negation, m.Prop(m.PT.conjunction, a, b), None) >>> m.rule_conversion(p, set()) [NOT ((B(x_1) AND A(x_1)))]
def rule_conversion(self, p, domain): if p.type == self.PT.negation: if p.v1.type == self.PT.conjunction: if p.v1.v1.type == self.PT.atomic and p.v1.v2.type == self.PT.atomic: i = self.get_fresh_id() p_new = self.Prop(self.PT.negation, self.Prop(self.PT.conjunction, self.atom_prop_replace_properties(p.v1.v2, i), self.atom_prop_replace_properties(p.v1.v1, i)), None) if not self.contains_isomorphic_proposition(domain, p_new): return [p_new] return []
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def rule_transitivity(self, p1, p2, domain):\r\n if p1.type == self.PT.implies and p2.type == self.PT.implies:\r\n if p1.v1.type == self.PT.atomic and p1.v2.type == self.PT.atomic and \\\r\n p2.v1.type == self.PT.atomic and p2.v2.type == self.PT.atomic:\r\n if p1.v1.v1.arg_id == p1.v2.v1.arg_id and p2.v1.v1.arg_id == p2.v2.v1.arg_id:\r\n if not p1.v1.v1.is_name and not p1.v2.v1.is_name and not p2.v1.v1.is_name and not p2.v2.v1.is_name:\r\n if p1.v2.v1.predicate == p2.v1.v1.predicate:\r\n i = self.get_fresh_id()\r\n p = self.Prop(self.PT.implies,\r\n self.atom_prop_replace_properties(p1.v1, i),\r\n self.atom_prop_replace_properties(p2.v2, i))\r\n if not self.contains_isomorphic_proposition(domain, p):\r\n return [p]\r\n return []", "def rule_exclusivity(self, p1, p2, domain):\r\n\r\n if p1.type == self.PT.implies and p2.type == self.PT.negation:\r\n if p2.v1.type == self.PT.conjunction:\r\n if p1.v1.type == self.PT.atomic and p1.v2.type == self.PT.atomic:\r\n if p2.v1.v1.type == self.PT.atomic and p2.v1.v2.type == self.PT.atomic:\r\n if p1.v1.v1.arg_id == p1.v2.v1.arg_id and p2.v1.v1.v1.arg_id == p2.v1.v2.v1.arg_id:\r\n if not p1.v1.v1.is_name and not p1.v2.v1.is_name and not p2.v1.v1.v1.is_name and not p2.v1.v2.v1.is_name:\r\n if p1.v2.v1.predicate == p2.v1.v1.v1.predicate:\r\n i = self.get_fresh_id()\r\n p = self.Prop(self.PT.negation,\r\n self.Prop(self.PT.conjunction,\r\n self.atom_prop_replace_properties(p1.v1,\r\n i),\r\n self.atom_prop_replace_properties(\r\n p2.v1.v2, i)),\r\n None)\r\n if not self.contains_isomorphic_proposition(domain, p):\r\n return [p]\r\n return []", "def rule_backward_conjunctive_syllogism(self, p, g):\r\n\r\n if g.type == self.PT.negation and p.type == self.PT.negation:\r\n # g = NOT(A(x))\r\n if p.v1.type == self.PT.conjunction:\r\n # p = NOT(A(x) AND B(x))\r\n if self.matching(p.v1.v1, g.v1):\r\n return [self.atom_prop_replace_properties(p.v1.v2, new_arg_id=g.v1.v1.arg_id,\r\n new_is_name=g.v1.v1.is_name,\r\n new_hat=g.v1.v1.hat)]\r\n elif self.matching(p.v1.v2, g.v1):\r\n return [self.atom_prop_replace_properties(p.v1.v1, new_arg_id=g.v1.v1.arg_id,\r\n new_is_name=g.v1.v1.is_name,\r\n new_hat=g.v1.v1.hat)]\r\n return []", "def encode_proposition(self, p, hat=False):\r\n\r\n i = self.get_fresh_id()\r\n\r\n if p[0] == \"A\":\r\n # A(x) -> B(x)\r\n return self.Prop(self.PT.implies,\r\n self.get_atomic_proposition(p[1].upper(), i, False, hat),\r\n self.get_atomic_proposition(p[2].upper(), i, False, hat))\r\n elif p[0] == \"E\":\r\n # not (A(x) and B(x))\r\n return self.Prop(self.PT.negation,\r\n self.Prop(self.PT.conjunction,\r\n self.get_atomic_proposition(p[1].upper(), i, False, hat),\r\n self.get_atomic_proposition(p[2].upper(), i, False, hat)),\r\n None)\r\n elif p[0] == \"I\":\r\n # A(a) and B(a)\r\n return self.Prop(self.PT.conjunction,\r\n self.get_atomic_proposition(p[1].upper(), i, True, hat),\r\n self.get_atomic_proposition(p[2].upper(), i, True, hat))\r\n else:\r\n # A(a) and not B(a)\r\n return self.Prop(self.PT.conjunction,\r\n self.get_atomic_proposition(p[1].upper(), i, True, hat),\r\n self.Prop(self.PT.negation,\r\n self.get_atomic_proposition(p[2].upper(), i, True, hat),\r\n None))", "def to_implies_false(formula: Formula) -> Formula:\r\n # Task 3.6d\r\n convert_implies = to_implies_not(formula)\r\n map_false = {'~': Formula('->', Formula('p'), Formula('F'))}\r\n return convert_implies.substitute_operators(map_false)", "def to_implies_not(formula: Formula) -> Formula:\r\n # Task 3.6c\r\n convert_and_op_1 = to_not_and(formula)\r\n and_formula_1 = Formula('->', Formula('p'), Formula('~', Formula('q')))\r\n and_formula_2 = Formula('->', Formula('~', Formula('p')), Formula('q'))\r\n\r\n map_and = {'&': Formula('~', Formula('->', and_formula_2, and_formula_1))}\r\n return convert_and_op_1.substitute_operators(map_and)", "def prove_and_commutativity() -> Proof:\n all_lines = []\n all_lines.append(Proof.Line(Formula.parse('(p&q)')))\n all_lines.append(Proof.Line(Formula.parse('q'), AE1_RULE, [0]))\n all_lines.append(Proof.Line(Formula.parse('p'), AE2_RULE, [0]))\n all_lines.append(Proof.Line(Formula.parse('(q&p)'), A_RULE, [1, 2]))\n statement = InferenceRule([Formula.parse('(p&q)')], Formula.parse('(q&p)'))\n all_rules = {A_RULE, AE1_RULE, AE2_RULE}\n return Proof(statement, all_rules, all_lines)", "def rule_backward_if_elimination(self, p, g):\r\n\r\n if p.type == self.PT.implies:\r\n # p = IF A(x) THEN B(x)\r\n if self.matching(p.v2, g):\r\n return [self.atom_prop_replace_properties(p.v1, new_arg_id=g.v1.arg_id,\r\n new_is_name=g.v1.is_name,\r\n new_hat=g.v1.hat)]\r\n return None", "def sat_apply_assignment(self, assignment):\n # YOUR CODE HERE\n o = set()\n print(s)\n print({x.simplify(assignment) for x in self.clauses if not isinstance(x.simplify(assignment), bool)})\n for x in s.clauses:\n if not isinstance(x.simplify(assignment), bool):\n o.add(x.simplify(assignment))\n print(\"ASSIGN SET\", o)\n\n return SAT(o)\n # return SAT({x.simplify(assignment) for x in self.clauses if not isinstance(x.simplify(assignment), bool)})", "def apply_rule(seq):\n for idx,prop in enumerate(seq.ant):\n\n if prop.conn == \"not\":\n # create a copy of seq (we don't want to mutate it)\n new_seq = Sequent(seq.ant[:],seq.con[:])\n # pop the proposition from the list\n not_a = new_seq.ant.pop(idx)\n # make sure we popped the correct one\n assert not_a.conn == \"not\"\n # apply the rule\n new_seq.con = [ not_a.p1 ] + new_seq.con\n # return a list of 3 values with seq2 being None\n # (since there is not split in this rule)\n return [new_seq , None, \"not left\"]\n\n elif prop.conn == \"or\":\n # create two copies of seq\n new_seq1 = Sequent(seq.ant[:], seq.con[:])\n new_seq2 = Sequent(seq.ant[:], seq.con[:])\n # pop the proposition from the list\n b_or_c = new_seq1.ant.pop(idx)\n # make sure we popped the correct one\n assert b_or_c.conn == \"or\"\n assert b_or_c == new_seq2.ant.pop(idx)\n # apply the rule\n new_seq1.ant.append(b_or_c.p1)\n new_seq2.ant.append(b_or_c.p2)\n # return the obtained sequents and the rule name\n # here we have two sequents since \"or left\"\n # has two sequents at the top\n return [new_seq1 , new_seq2, \"or left\"]\n\n elif prop.conn == \"and\":\n #create one copy of seq\n new_seq = Sequent(seq.ant[:], seq.con[:])\n # pop the proposition from the list\n b_and_c = new_seq.ant.pop(idx)\n # make sure we popped the correct one\n assert b_and_c.conn == \"and\"\n # apply the rule\n new_seq.ant.append(b_and_c.p1)\n new_seq.ant.append(b_and_c.p2)\n # return a list of 3 values with seq2 being None\n return [new_seq, None, 'and left']\n\n \n elif prop.conn == \"imp\":\n # create two copies of seq\n new_seq1 = Sequent(seq.ant[:], seq.con[:])\n new_seq2 = Sequent(seq.ant[:], seq.con[:])\n # pop the proposition from the list\n b_imp_c = new_seq1.ant.pop(idx)\n # make sure we popped the correct one\n assert b_imp_c.conn == \"imp\"\n assert b_imp_c == new_seq2.ant.pop(idx)\n # apply the rule\n new_seq1.ant.append(b_imp_c.p2)\n new_seq2.con.append(b_imp_c.p1)\n # return the obtained sequents and the rule name\n return [new_seq1 , new_seq2, \"implies left\"]\n\n for idx,prop in enumerate(seq.con):\n if prop.conn == \"not\":\n new_seq = Sequent(seq.ant[:],seq.con[:])\n # pop the proposition from the list\n not_a = new_seq.con.pop(idx)\n # make sure we popped the correct one\n assert not_a.conn == \"not\"\n # apply the rule\n new_seq.ant = [ not_a.p1 ] + new_seq.ant\n # return a list of 3 values with seq2 being None\n return [new_seq , None, \"not right\"]\n elif prop.conn == \"or\":\n # create one copy of seq\n new_seq = Sequent(seq.ant[:], seq.con[:])\n # pop the proposition from the list\n b_or_c = new_seq.con.pop(idx)\n # make sure we popped the correct one\n assert b_or_c.conn == \"or\" \n # apply the rule\n new_seq.con.append(b_or_c.p1)\n new_seq.con.append(b_or_c.p2)\n # return the obtained sequent and the rule name\n return [new_seq , None, \"or right\"]\n\n elif prop.conn == 'and':\n new_seq1 = Sequent(seq.ant[:], seq.con[:])\n new_seq2 = Sequent(seq.ant[:], seq.con[:])\n b_and_c = new_seq1.con.pop(idx)\n assert b_and_c.conn == \"and\"\n assert b_and_c == new_seq2.con.pop(idx)\n new_seq1.con.append(b_and_c.p1)\n new_seq2.con.append(b_and_c.p2)\n return [new_seq1 , new_seq2, \"and right\"]\n\n elif prop.conn == 'imp':\n new_seq = Sequent(seq.ant[:], seq.con[:])\n b_imp_c = new_seq.con.pop(idx)\n assert b_imp_c.conn == \"imp\"\n new_seq.ant.append(b_imp_c.p1)\n new_seq.con.append(b_imp_c.p2)\n return [new_seq , None, \"implies right\"]", "def prove_implies_self():\n # i1_with_assumptions = InferenceRule([I1.conclusion.first],I1.conclusion.second)\n # i2_with_assumptions = InferenceRule([I2.conclusion.first,I2.conclusion.second.first],I2.conclusion.second.second)\n\n statement = InferenceRule([], Formula.from_infix('(p->p)')) # create conclusion\n\n rules = [MP, I1, I2] # create rules for the proof\n\n # create lines\n lines = []\n lines.append(DeductiveProof.Line(Formula.from_infix('((p->((p->p)->p))->((p->(p->p))->(p->p)))'), 2, []))\n lines.append(DeductiveProof.Line(Formula.from_infix('(p->(p->p))'), 1, []))\n lines.append(DeductiveProof.Line(Formula.from_infix('(p->((p->p)->p))'), 1, []))\n lines.append(DeductiveProof.Line(Formula.from_infix('((p->(p->p))->(p->p))'), 0, [2, 0]))\n lines.append(DeductiveProof.Line(Formula.from_infix('(p->p)'), 0, [1, 3]))\n dec = DeductiveProof(statement, rules, lines)\n return dec", "def one_time_rules(self):\n # There is also a hidden sameAs rule in RDF Semantics: if a literal appears in a triple, and another one has\n # the same value, then the triple should be duplicated with the other value.\n literals = self.literal_proxies.lit_to_bnode\n items = ((lt1, lt2) for lt1, lt2 in product(literals, literals) if lt1 != lt2)\n for lt1, lt2 in items:\n try:\n lt1_d = lt1.lit.toPython()\n lt2_d = lt2.lit.toPython()\n if lt1_d == lt2_d:\n # In OWL, this line is simply stating a sameAs for the corresponding BNodes, and then let\n # the usual rules take effect. In RDFS this is not possible, so the sameAs rule is,\n # essentially replicated...\n bn1 = self.literal_proxies.lit_to_bnode[lt1]\n bn2 = self.literal_proxies.lit_to_bnode[lt2]\n for (s, p, o) in self.graph.triples((None, None, bn1)):\n self.graph.add((s, p, bn2))\n except:\n # there may be a problem with one of the python conversions; the rule is imply ignored\n # raise e\n pass", "def convert_clifford(self, operator: PauliSumOp) -> OperatorBase:\n\n if not self._symmetries or not self._sq_paulis or not self._sq_list:\n raise OpflowError(\n \"Z2 symmetries, single qubit pauli and single qubit list cannot be empty.\"\n )\n\n if not operator.is_zero():\n for clifford in self.cliffords:\n operator = cast(PauliSumOp, clifford @ operator @ clifford)\n operator = operator.reduce(atol=0)\n\n return operator", "def convert(self):\n return _libsbml.SBMLRuleConverter_convert(self)", "def to_not_and(formula: Formula) -> Formula:\r\n # Task 3.6a\r\n map_operators = {'->': Formula.parse('~(~~p&~q)'),\r\n '+': Formula.parse('~(~(p&~q)&~(~p&q))'),\r\n '<->': Formula.parse('~~(~(p&~q)&~(~p&q))'),\r\n '-&': Formula.parse('~(p&q)'),\r\n '-|': Formula.parse('~~(~p&~q)'),\r\n 'F': Formula.parse('(p&~p)'),\r\n 'T': Formula.parse('~(p&~p)'),\r\n '|': Formula.parse('~(~p&~q)')}\r\n return formula.substitute_operators(map_operators)", "def test_disjunct_hs():\n hs1 = LocalSpace(\"1\")\n hs2 = LocalSpace(\"2\")\n alpha, beta = symbols('alpha, beta')\n A = OperatorSymbol('A', hs=hs1)\n B = OperatorSymbol('B', hs=hs2)\n assert Commutator.create(A, B) == ZeroOperator\n assert Commutator.create(alpha, beta) == ZeroOperator\n assert Commutator.create(alpha, B) == ZeroOperator\n assert Commutator.create(A, beta) == ZeroOperator", "def to_equivalence_rule(self) -> \"EquivalenceRule\":\n assert (\n self.is_equivalence()\n ), f\"EquivalenceRule can only be created for equivalence rules\\n{self}\"\n return EquivalenceRule(self)", "def get_bprop_logical_not(self):\n\n def bprop(x, out, dout):\n return (zeros_like(x),)\n return bprop", "def reversed(self) -> \"Conversion\":\n return Conversion(\n categorization_a=self.categorization_b,\n categorization_b=self.categorization_a,\n rules=[rule.reversed() for rule in self.rules],\n auxiliary_categorizations=self.auxiliary_categorizations,\n comment=self.comment,\n references=self.references,\n institution=self.institution,\n last_update=self.last_update,\n version=self.version,\n )", "def convert_logical_not(g, op, block):\n\n ipt0 = g.get_node(op.input(\"X\")[0])\n op_func = get_relay_op(op.type)\n out = op_func(ipt0)\n g.add_node(op.output(\"Out\")[0], out)", "def pr_one_constraint(self, output_prop):\n raise NotImplementedError(\"subclasses need to override this method\")", "def _parse_unground_proposition(self, array):\n negative = False\n if array[1] == 'not':\n negative = True\n array = array[2:-1]\n return Predicate(array[1], array[2:-1], False, negative)", "def clone(self):\n return _libsbml.ConversionProperties_clone(self)", "def to_not_and_or(formula: Formula) -> Formula:\r\n # Task 3.5\r\n\r\n map_operators = {'->': Formula.parse('(~p|q)'),\r\n '+': Formula.parse('((p&~q)|(~p&q))'),\r\n '<->': Formula.parse('~((p&~q)|(~p&q))'),\r\n '-&': Formula.parse('~(p&q)'),\r\n '-|': Formula.parse('~(p|q)'),\r\n 'F': Formula.parse('(p&~p)'),\r\n 'T': Formula.parse('~(p&~p)')}\r\n return formula.substitute_operators(map_operators)", "def _to_ops(from_op):\n\n for to_op in OPERATORS:\n if to_op and isinstance(from_op, ast.Not):\n # 'not' can only be removed but not replaced with\n # '+', '-' or '~' b/c that may lead to strange results\n pass\n elif isinstance(from_op, ast.UAdd) and (to_op is None):\n # '+1' => '1' yields equivalent mutations\n pass\n else:\n yield to_op", "def prove_implies_self():\n return DeductiveProof(\n InferenceRule([], Formula.from_infix('(p->p)')),\n [MP, I1, I2],\n [DeductiveProof.Line(Formula.from_infix('(p->((q->p)->p))'), 1, []),\n DeductiveProof.Line(Formula.from_infix('((p->((q->p)->p))->((p->(q->p))->(p->p))'), 2, []),\n DeductiveProof.Line(Formula.from_infix('(p->(q->p))'), 1, []),\n DeductiveProof.Line(Formula.from_infix('((p->(q->p))->(p->p))'), 0, [0, 1]),\n DeductiveProof.Line(Formula.from_infix('(p->p)'), 0, [2, 3])])", "def can_convert(self, from_type, to_type):\n #\n # Test if the glpsol executable is available\n #\n if not pyomo.common.Executable(\"pico_convert\"):\n return False\n #\n # Return True for specific from/to pairs\n #\n if from_type == ProblemFormat.nl and to_type == ProblemFormat.cpxlp:\n return True\n if from_type == ProblemFormat.nl and to_type == ProblemFormat.mps:\n return True\n if from_type == ProblemFormat.mps and to_type == ProblemFormat.cpxlp:\n return True\n if from_type == ProblemFormat.cpxlp and to_type == ProblemFormat.mps:\n return True\n return False", "def _init_default_conversion(self):\n for p in self.required_properties:\n if p not in self.property_conversion:\n self.property_conversion[p] = 1.0", "def simplification(self):\n from copy import deepcopy\n\n fsm = deepcopy(self)\n fsm.prepone_output()\n return fsm.quotient(fsm.equivalence_classes())", "def pull_out_quantifications_across_negation(formula: Formula) -> \\\r\n Tuple[Formula, Proof]:\r\n assert is_unary(formula.root)\r\n # Task 11.6\r\n\r\n prover = Prover(Prover.AXIOMS.union(ADDITIONAL_QUANTIFICATION_AXIOMS))\r\n\r\n # Basic Case - No quantifier to change\r\n if not is_quantifier(formula.first.root):\r\n ccl = equivalence_of(formula, formula)\r\n prover.add_tautology(ccl)\r\n return formula, prover.qed()\r\n\r\n # The ~ and the predicate without the quantifier to keep changing it\r\n form = Formula(\"~\", formula.first.predicate)\r\n pred, proof = pull_out_quantifications_across_negation(form)\r\n\r\n # Definig the new quantifier\r\n if formula.first.root == \"A\":\r\n my_quantifier = \"E\"\r\n else: # \"E\"\r\n my_quantifier = \"A\"\r\n\r\n # proof for changing quantifier\r\n # because add_proof() is my friend\r\n step1 = prover.add_proof(proof.conclusion, proof)\r\n\r\n\r\n form2 = Formula(\"->\", proof.conclusion, equivalence_of(Formula(my_quantifier, formula.first.variable, form),\r\n Formula(my_quantifier, formula.first.variable, pred)))\r\n my_map2 = {'R': str(form.substitute({formula.first.variable: Term(\"_\")})),\r\n 'Q': str(pred.substitute({formula.first.variable: Term(\"_\")})), \"x\": formula.first.variable, \"y\": formula.first.variable}\r\n\r\n step2 = prover.add_instantiated_assumption(form2,\r\n ADDITIONAL_QUANTIFICATION_AXIOMS[14 if my_quantifier==\"A\" else 15], my_map2)\r\n\r\n step3 = prover.add_mp(equivalence_of(Formula(my_quantifier, formula.first.variable, form),\r\n Formula(my_quantifier, formula.first.variable, pred)), step1, step2)\r\n\r\n\r\n my_map4 = {'R': str(formula.first.predicate.substitute({formula.first.variable: Term(\"_\")})), \"x\": formula.first.variable}\r\n form4 = equivalence_of(formula, Formula(my_quantifier, formula.first.variable, form))\r\n step4 = prover.add_instantiated_assumption(form4,\r\n ADDITIONAL_QUANTIFICATION_AXIOMS[0 if my_quantifier==\"E\" else 1], my_map4)\r\n\r\n prover.add_tautological_implication(equivalence_of(formula, Formula(my_quantifier, formula.first.variable, pred)), [step3, step4])\r\n\r\n return Formula(my_quantifier, formula.first.variable, pred), prover.qed()" ]
[ "0.6225471", "0.6093774", "0.6014474", "0.56270057", "0.55422795", "0.549347", "0.5387336", "0.5229568", "0.5196128", "0.5100265", "0.49939024", "0.49359933", "0.490674", "0.49049196", "0.49046978", "0.48515642", "0.48485404", "0.47913727", "0.47747165", "0.47721443", "0.4759954", "0.47501355", "0.47423428", "0.4736106", "0.47133428", "0.4709141", "0.46874967", "0.467632", "0.46402067", "0.4633688" ]
0.8312214
0
Returns leftmost atom in p.
def get_leftmost_atom(self, p): if p.type == self.PT.atomic: return p.v1 else: return self.get_leftmost_atom(p.v1)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def left(self, p):\n node = self._validate(p)\n return self._make_position(node._left)", "def left(self, p):\n node = self._validate(p)\n return self._make_position(node.left)", "def left(self, p):\n node = self._validate_position(p)\n return self._make_position(node.left)", "def left(self, p):\n node = self._validate(p)\n return self._make_position(node._left)", "def left(self, p):\n node = self._validate(p)\n return self._make_position(node._left)", "def getLeftmost(self, root):\n current = root\n while current.left is not None:\n current = current.left\n return current", "def left(self, node):\r\n if self._col(node.count) > 0:\r\n return self.nodes[node.count - 1]\r\n else:\r\n return None", "def left(self):\n return self.l", "def pleft(self):\n return -self.pfill(1) + self.plen(-1, s=True)", "def left(self):\n\t\treturn self._left", "def peekleft(self):\n if self.front:\n return self.front.val", "def left(self, n):\n return n._left", "def left(self):\n return self._left", "def left(self):\n return self._left", "def left(self):\n x, y = (self.loc[0] - 1, self.loc[1])\n\n if x < 0:\n return None # None\n\n return self.garden.cells[y][x]", "def peekleft(self):\n return self.buffer[self.start]", "def getLeft(self):\n return self.left", "def get_left(self):\n return self.left", "def get_left(self):\n return self.__left", "def left(self):\n\n return self._left", "def left(self):\n return self.points['topLeft'].x", "def left(self):\n return self.points['topLeft'].x", "def get_leftmost_child(self):\n\t\tif self.left_child == None:\n\t\t\treturn self\n\t\telse:\n\t\t\treturn self.left_child.get_leftmost_child()", "def left(self, node):\n self._validate_node(node)\n idx = node._index\n left_idx = 2*idx + 1\n if left_idx >= self._N:\n return None # Exceeds length of array\n return self._array[left_idx]", "def get_left(self):\n return BinaryNode.or_none(self.left)", "def atoms_left(self):\r\n return self._board.get_atoms()", "def atoms_left(self):\n return self._atoms", "def _subtree_first_position(self, p):\n \"\"\"will be used by before()\"\"\"\n walk = p\n #recursivly walking to the left child until the left subtree has no child\n while self.left(walk) is not None:\n walk = self.left(walk)\n return walk", "def get_node_left(self, n: MazeCell) -> MazeCell:\n if n.x == 0:\n return None\n else:\n return self.get_node(n.x - 1, n.y)", "def _subtree_first_position(self, p):\n walk = p\n while self.left(walk) is not None:\n walk = self.left(walk) # keep walking left\n return walk" ]
[ "0.7379185", "0.73784184", "0.73633677", "0.73547095", "0.73547095", "0.7014217", "0.6914837", "0.68129015", "0.68106556", "0.6807587", "0.67974186", "0.67950267", "0.67447543", "0.67447543", "0.6734669", "0.6727005", "0.6716256", "0.6640088", "0.6608549", "0.65990144", "0.6588703", "0.6588703", "0.65246034", "0.6515882", "0.65025693", "0.6485929", "0.6485491", "0.6471238", "0.647", "0.64611936" ]
0.8858748
0
a = m.Prop(m.PT.atomic, v1='a', v2=None) b = m.Prop(m.PT.atomic, v1='b', v2=None) >>> m = PSYCOP() >>> i = m.get_fresh_id() >>> a = m.Prop(m.PT.atomic, m.Atom("A", i, False, False), None) >>> b = m.Prop(m.PT.atomic, m.Atom("B", i, False, False), None) >>> prop = m.Prop(m.PT.negation, m.Prop(m.PT.conjunction, a, b), None) >>> m.rule_backward_conjunctive_syllogism(prop, m.Prop(m.PT.negation, a, None)) [B(x_0)]
def rule_backward_conjunctive_syllogism(self, p, g): if g.type == self.PT.negation and p.type == self.PT.negation: # g = NOT(A(x)) if p.v1.type == self.PT.conjunction: # p = NOT(A(x) AND B(x)) if self.matching(p.v1.v1, g.v1): return [self.atom_prop_replace_properties(p.v1.v2, new_arg_id=g.v1.v1.arg_id, new_is_name=g.v1.v1.is_name, new_hat=g.v1.v1.hat)] elif self.matching(p.v1.v2, g.v1): return [self.atom_prop_replace_properties(p.v1.v1, new_arg_id=g.v1.v1.arg_id, new_is_name=g.v1.v1.is_name, new_hat=g.v1.v1.hat)] return []
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_disjunct_hs():\n hs1 = LocalSpace(\"1\")\n hs2 = LocalSpace(\"2\")\n alpha, beta = symbols('alpha, beta')\n A = OperatorSymbol('A', hs=hs1)\n B = OperatorSymbol('B', hs=hs2)\n assert Commutator.create(A, B) == ZeroOperator\n assert Commutator.create(alpha, beta) == ZeroOperator\n assert Commutator.create(alpha, B) == ZeroOperator\n assert Commutator.create(A, beta) == ZeroOperator", "def rule_exclusivity(self, p1, p2, domain):\r\n\r\n if p1.type == self.PT.implies and p2.type == self.PT.negation:\r\n if p2.v1.type == self.PT.conjunction:\r\n if p1.v1.type == self.PT.atomic and p1.v2.type == self.PT.atomic:\r\n if p2.v1.v1.type == self.PT.atomic and p2.v1.v2.type == self.PT.atomic:\r\n if p1.v1.v1.arg_id == p1.v2.v1.arg_id and p2.v1.v1.v1.arg_id == p2.v1.v2.v1.arg_id:\r\n if not p1.v1.v1.is_name and not p1.v2.v1.is_name and not p2.v1.v1.v1.is_name and not p2.v1.v2.v1.is_name:\r\n if p1.v2.v1.predicate == p2.v1.v1.v1.predicate:\r\n i = self.get_fresh_id()\r\n p = self.Prop(self.PT.negation,\r\n self.Prop(self.PT.conjunction,\r\n self.atom_prop_replace_properties(p1.v1,\r\n i),\r\n self.atom_prop_replace_properties(\r\n p2.v1.v2, i)),\r\n None)\r\n if not self.contains_isomorphic_proposition(domain, p):\r\n return [p]\r\n return []", "def rule_conversion(self, p, domain):\r\n\r\n if p.type == self.PT.negation:\r\n if p.v1.type == self.PT.conjunction:\r\n if p.v1.v1.type == self.PT.atomic and p.v1.v2.type == self.PT.atomic:\r\n i = self.get_fresh_id()\r\n p_new = self.Prop(self.PT.negation,\r\n self.Prop(self.PT.conjunction,\r\n self.atom_prop_replace_properties(p.v1.v2, i),\r\n self.atom_prop_replace_properties(p.v1.v1, i)),\r\n None)\r\n if not self.contains_isomorphic_proposition(domain, p_new):\r\n return [p_new]\r\n return []", "def test_conjuction_with_disjunction(self):\n kb = logic.PropKB()\n kb.tell(logic.expr('Color(Cat, Black)'))\n kb.tell(logic.expr('Age(Cat, 35)'))\n kb.tell(logic.expr('Name(Cat, Ted)'))\n self.assertBindingsEqual(\n kb.ask(logic.expr(\n '(Color(Cat, Black) & (Age(Cat, 36) | Name(Cat, Ted)))')), {})\n self.assertBindingsEqual(\n kb.ask(logic.expr(\n '(Color(Cat, Black) & (Age(Cat, 36) | Name(Cat, John)))')), False)\n self.assertBindingsEqual(\n kb.ask(logic.expr(\n '((Age(Cat, 36) | Name(Cat, Ted)) & Color(Cat, Black))')), {})\n self.assertBindingsEqual(\n kb.ask(logic.expr(\n '((Age(Cat, 36) | Name(Cat, John)) & Color(Cat, Black))')), False)", "def prove_and_commutativity() -> Proof:\n all_lines = []\n all_lines.append(Proof.Line(Formula.parse('(p&q)')))\n all_lines.append(Proof.Line(Formula.parse('q'), AE1_RULE, [0]))\n all_lines.append(Proof.Line(Formula.parse('p'), AE2_RULE, [0]))\n all_lines.append(Proof.Line(Formula.parse('(q&p)'), A_RULE, [1, 2]))\n statement = InferenceRule([Formula.parse('(p&q)')], Formula.parse('(q&p)'))\n all_rules = {A_RULE, AE1_RULE, AE2_RULE}\n return Proof(statement, all_rules, all_lines)", "def rule_transitivity(self, p1, p2, domain):\r\n if p1.type == self.PT.implies and p2.type == self.PT.implies:\r\n if p1.v1.type == self.PT.atomic and p1.v2.type == self.PT.atomic and \\\r\n p2.v1.type == self.PT.atomic and p2.v2.type == self.PT.atomic:\r\n if p1.v1.v1.arg_id == p1.v2.v1.arg_id and p2.v1.v1.arg_id == p2.v2.v1.arg_id:\r\n if not p1.v1.v1.is_name and not p1.v2.v1.is_name and not p2.v1.v1.is_name and not p2.v2.v1.is_name:\r\n if p1.v2.v1.predicate == p2.v1.v1.predicate:\r\n i = self.get_fresh_id()\r\n p = self.Prop(self.PT.implies,\r\n self.atom_prop_replace_properties(p1.v1, i),\r\n self.atom_prop_replace_properties(p2.v2, i))\r\n if not self.contains_isomorphic_proposition(domain, p):\r\n return [p]\r\n return []", "def rule_backward_if_elimination(self, p, g):\r\n\r\n if p.type == self.PT.implies:\r\n # p = IF A(x) THEN B(x)\r\n if self.matching(p.v2, g):\r\n return [self.atom_prop_replace_properties(p.v1, new_arg_id=g.v1.arg_id,\r\n new_is_name=g.v1.is_name,\r\n new_hat=g.v1.hat)]\r\n return None", "def get_bprop_logical_not(self):\n\n def bprop(x, out, dout):\n return (zeros_like(x),)\n return bprop", "def eliminate_implications(s):\n if not s.args or is_symbol(s.op): return s ## (Atoms are unchanged.)\n args = map(eliminate_implications, s.args)\n a, b = args[0], args[-1]\n if s.op == '>>':\n return (b | ~a)\n elif s.op == '<<':\n return (a | ~b)\n elif s.op == '<=>':\n return (a | ~b) & (b | ~a)\n elif s.op == '^':\n assert len(args) == 2 ## TODO: relax this restriction\n return (a & ~b) | (~a & b)\n else:\n assert s.op in ('&', '|', '~')\n return Expr(s.op, *args)", "def apply_rule(seq):\n for idx,prop in enumerate(seq.ant):\n\n if prop.conn == \"not\":\n # create a copy of seq (we don't want to mutate it)\n new_seq = Sequent(seq.ant[:],seq.con[:])\n # pop the proposition from the list\n not_a = new_seq.ant.pop(idx)\n # make sure we popped the correct one\n assert not_a.conn == \"not\"\n # apply the rule\n new_seq.con = [ not_a.p1 ] + new_seq.con\n # return a list of 3 values with seq2 being None\n # (since there is not split in this rule)\n return [new_seq , None, \"not left\"]\n\n elif prop.conn == \"or\":\n # create two copies of seq\n new_seq1 = Sequent(seq.ant[:], seq.con[:])\n new_seq2 = Sequent(seq.ant[:], seq.con[:])\n # pop the proposition from the list\n b_or_c = new_seq1.ant.pop(idx)\n # make sure we popped the correct one\n assert b_or_c.conn == \"or\"\n assert b_or_c == new_seq2.ant.pop(idx)\n # apply the rule\n new_seq1.ant.append(b_or_c.p1)\n new_seq2.ant.append(b_or_c.p2)\n # return the obtained sequents and the rule name\n # here we have two sequents since \"or left\"\n # has two sequents at the top\n return [new_seq1 , new_seq2, \"or left\"]\n\n elif prop.conn == \"and\":\n #create one copy of seq\n new_seq = Sequent(seq.ant[:], seq.con[:])\n # pop the proposition from the list\n b_and_c = new_seq.ant.pop(idx)\n # make sure we popped the correct one\n assert b_and_c.conn == \"and\"\n # apply the rule\n new_seq.ant.append(b_and_c.p1)\n new_seq.ant.append(b_and_c.p2)\n # return a list of 3 values with seq2 being None\n return [new_seq, None, 'and left']\n\n \n elif prop.conn == \"imp\":\n # create two copies of seq\n new_seq1 = Sequent(seq.ant[:], seq.con[:])\n new_seq2 = Sequent(seq.ant[:], seq.con[:])\n # pop the proposition from the list\n b_imp_c = new_seq1.ant.pop(idx)\n # make sure we popped the correct one\n assert b_imp_c.conn == \"imp\"\n assert b_imp_c == new_seq2.ant.pop(idx)\n # apply the rule\n new_seq1.ant.append(b_imp_c.p2)\n new_seq2.con.append(b_imp_c.p1)\n # return the obtained sequents and the rule name\n return [new_seq1 , new_seq2, \"implies left\"]\n\n for idx,prop in enumerate(seq.con):\n if prop.conn == \"not\":\n new_seq = Sequent(seq.ant[:],seq.con[:])\n # pop the proposition from the list\n not_a = new_seq.con.pop(idx)\n # make sure we popped the correct one\n assert not_a.conn == \"not\"\n # apply the rule\n new_seq.ant = [ not_a.p1 ] + new_seq.ant\n # return a list of 3 values with seq2 being None\n return [new_seq , None, \"not right\"]\n elif prop.conn == \"or\":\n # create one copy of seq\n new_seq = Sequent(seq.ant[:], seq.con[:])\n # pop the proposition from the list\n b_or_c = new_seq.con.pop(idx)\n # make sure we popped the correct one\n assert b_or_c.conn == \"or\" \n # apply the rule\n new_seq.con.append(b_or_c.p1)\n new_seq.con.append(b_or_c.p2)\n # return the obtained sequent and the rule name\n return [new_seq , None, \"or right\"]\n\n elif prop.conn == 'and':\n new_seq1 = Sequent(seq.ant[:], seq.con[:])\n new_seq2 = Sequent(seq.ant[:], seq.con[:])\n b_and_c = new_seq1.con.pop(idx)\n assert b_and_c.conn == \"and\"\n assert b_and_c == new_seq2.con.pop(idx)\n new_seq1.con.append(b_and_c.p1)\n new_seq2.con.append(b_and_c.p2)\n return [new_seq1 , new_seq2, \"and right\"]\n\n elif prop.conn == 'imp':\n new_seq = Sequent(seq.ant[:], seq.con[:])\n b_imp_c = new_seq.con.pop(idx)\n assert b_imp_c.conn == \"imp\"\n new_seq.ant.append(b_imp_c.p1)\n new_seq.con.append(b_imp_c.p2)\n return [new_seq , None, \"implies right\"]", "def get_bprop_logical_or(self):\n\n def bprop(x, y, out, dout):\n return zeros_like(x), zeros_like(y)\n return bprop", "def get_bprop_not_equal(self):\n\n def bprop(x, y, out, dout):\n return zeros_like(x), zeros_like(y)\n return bprop", "def reversedsign(self):\n a, b = self.args\n if not (isinstance(a, BooleanAtom) or isinstance(b, BooleanAtom)):\n ops = {Eq: Eq, Gt: Lt, Ge: Le, Lt: Gt, Le: Ge, Ne: Ne}\n return Relational.__new__(ops.get(self.func, self.func), -a, -b)\n else:\n return self", "def makeDisjunction(solver, v, disjuncts):\n cn = solver.Constraint(0, (len(disjuncts) - 1))\n cn.SetCoefficient(v, len(disjuncts))\n for x in disjuncts:\n cn.SetCoefficient(x, -1)", "def prove_or_disprove(prop):\n #create sequent\n sequent = Sequent([],[prop])\n #create proof tree\n tree = ProofTree(sequent)\n tree.build_proof()\n #check leaves\n found = True\n seq_leaves = tree.get_leaves()\n for s in seq_leaves:\n if s.is_id() == False:\n assign(prop, s)\n found = False\n break\n if found == True:\n print tree", "def commutator(A, B):\n return A @ B - B @ A", "def as_relational(self, symbol):\n A, B = self.args\n\n A_rel = A.as_relational(symbol)\n B_rel = Not(B.as_relational(symbol))\n\n return And(A_rel, B_rel)", "def commutator(A, B, kind=\"normal\"):\n if kind == 'normal':\n return A @ B - B @ A\n\n elif kind == 'anti':\n return A @ B + B @ A\n\n else:\n raise TypeError(\"Unknown commutator kind '%s'\" % kind)", "def encode_proposition(self, p, hat=False):\r\n\r\n i = self.get_fresh_id()\r\n\r\n if p[0] == \"A\":\r\n # A(x) -> B(x)\r\n return self.Prop(self.PT.implies,\r\n self.get_atomic_proposition(p[1].upper(), i, False, hat),\r\n self.get_atomic_proposition(p[2].upper(), i, False, hat))\r\n elif p[0] == \"E\":\r\n # not (A(x) and B(x))\r\n return self.Prop(self.PT.negation,\r\n self.Prop(self.PT.conjunction,\r\n self.get_atomic_proposition(p[1].upper(), i, False, hat),\r\n self.get_atomic_proposition(p[2].upper(), i, False, hat)),\r\n None)\r\n elif p[0] == \"I\":\r\n # A(a) and B(a)\r\n return self.Prop(self.PT.conjunction,\r\n self.get_atomic_proposition(p[1].upper(), i, True, hat),\r\n self.get_atomic_proposition(p[2].upper(), i, True, hat))\r\n else:\r\n # A(a) and not B(a)\r\n return self.Prop(self.PT.conjunction,\r\n self.get_atomic_proposition(p[1].upper(), i, True, hat),\r\n self.Prop(self.PT.negation,\r\n self.get_atomic_proposition(p[2].upper(), i, True, hat),\r\n None))", "def _disjunction_op(spec, *expressions):", "def get_bprop_assign_sub(self):\n\n def bprop(x, y, out, dout):\n return zeros_like(x), zeros_like(y)\n return bprop", "def get_bprop_sub(self):\n neg_func = P.Neg()\n\n def bprop(x, y, out, dout):\n return binop_grad_common(x, y, dout, neg_func(dout))\n return bprop", "def test_pull_out_scalars():\n hs = LocalSpace(\"sys\")\n A = OperatorSymbol('A', hs=hs)\n B = OperatorSymbol('B', hs=hs)\n alpha, beta = symbols('alpha, beta')\n assert Commutator.create(alpha * A, B) == alpha * Commutator(A, B)\n assert Commutator.create(A, beta * B) == beta * Commutator(A, B)\n assert Commutator.create(alpha * A, beta * B) == alpha * beta * Commutator(\n A, B\n )", "def proof_or_counterexample_implies_not(formula):\n return proof_or_counterexample_with_func(formula, prove_in_model_implies_not)", "def conjuncts(s):\n return dissociate('&', [s])", "def __init__(self):\n GinacFunction.__init__(self, \"conjugate\",\n conversions=dict(sympy='conjugate'))", "def as_relational(self, symbol):\n A, B = self.args\n\n A_rel = A.as_relational(symbol)\n B_rel = B.as_relational(symbol)\n\n return Xor(A_rel, B_rel)", "def get_bprop_logical_and(self):\n\n def bprop(x, y, out, dout):\n return zeros_like(x), zeros_like(y)\n return bprop", "def Jac(M):\n x = [sy.Dummy() for _ in range(nargs(M))]\n y = M(*x)\n J = [sy.diff(yi, xi) for yi in y for xi in x]\n return sy.lambdify(x, J, 'sympy')", "def prove_implies_self():\n # i1_with_assumptions = InferenceRule([I1.conclusion.first],I1.conclusion.second)\n # i2_with_assumptions = InferenceRule([I2.conclusion.first,I2.conclusion.second.first],I2.conclusion.second.second)\n\n statement = InferenceRule([], Formula.from_infix('(p->p)')) # create conclusion\n\n rules = [MP, I1, I2] # create rules for the proof\n\n # create lines\n lines = []\n lines.append(DeductiveProof.Line(Formula.from_infix('((p->((p->p)->p))->((p->(p->p))->(p->p)))'), 2, []))\n lines.append(DeductiveProof.Line(Formula.from_infix('(p->(p->p))'), 1, []))\n lines.append(DeductiveProof.Line(Formula.from_infix('(p->((p->p)->p))'), 1, []))\n lines.append(DeductiveProof.Line(Formula.from_infix('((p->(p->p))->(p->p))'), 0, [2, 0]))\n lines.append(DeductiveProof.Line(Formula.from_infix('(p->p)'), 0, [1, 3]))\n dec = DeductiveProof(statement, rules, lines)\n return dec" ]
[ "0.6137344", "0.60341465", "0.5974888", "0.59086096", "0.589563", "0.57486683", "0.5693098", "0.540354", "0.5260626", "0.5234264", "0.522255", "0.516357", "0.5155492", "0.5135247", "0.5105075", "0.5066817", "0.5008767", "0.50022006", "0.4978444", "0.49776587", "0.49762285", "0.49727237", "0.49243286", "0.49182218", "0.49157274", "0.48935", "0.4892575", "0.48772645", "0.48677388", "0.48480228" ]
0.7656689
0
>>> m = PSYCOP() >>> i = m.get_fresh_id() >>> a = m.Prop(m.PT.atomic, m.Atom("A", i, False, False), None) >>> b = m.Prop(m.PT.atomic, m.Atom("B", i, False, False), None) >>> m.rule_backward_if_elimination(m.Prop(m.PT.implies, a, b), b) [A(x_0)]
def rule_backward_if_elimination(self, p, g): if p.type == self.PT.implies: # p = IF A(x) THEN B(x) if self.matching(p.v2, g): return [self.atom_prop_replace_properties(p.v1, new_arg_id=g.v1.arg_id, new_is_name=g.v1.is_name, new_hat=g.v1.hat)] return None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def rule_backward_conjunctive_syllogism(self, p, g):\r\n\r\n if g.type == self.PT.negation and p.type == self.PT.negation:\r\n # g = NOT(A(x))\r\n if p.v1.type == self.PT.conjunction:\r\n # p = NOT(A(x) AND B(x))\r\n if self.matching(p.v1.v1, g.v1):\r\n return [self.atom_prop_replace_properties(p.v1.v2, new_arg_id=g.v1.v1.arg_id,\r\n new_is_name=g.v1.v1.is_name,\r\n new_hat=g.v1.v1.hat)]\r\n elif self.matching(p.v1.v2, g.v1):\r\n return [self.atom_prop_replace_properties(p.v1.v1, new_arg_id=g.v1.v1.arg_id,\r\n new_is_name=g.v1.v1.is_name,\r\n new_hat=g.v1.v1.hat)]\r\n return []", "def rule_transitivity(self, p1, p2, domain):\r\n if p1.type == self.PT.implies and p2.type == self.PT.implies:\r\n if p1.v1.type == self.PT.atomic and p1.v2.type == self.PT.atomic and \\\r\n p2.v1.type == self.PT.atomic and p2.v2.type == self.PT.atomic:\r\n if p1.v1.v1.arg_id == p1.v2.v1.arg_id and p2.v1.v1.arg_id == p2.v2.v1.arg_id:\r\n if not p1.v1.v1.is_name and not p1.v2.v1.is_name and not p2.v1.v1.is_name and not p2.v2.v1.is_name:\r\n if p1.v2.v1.predicate == p2.v1.v1.predicate:\r\n i = self.get_fresh_id()\r\n p = self.Prop(self.PT.implies,\r\n self.atom_prop_replace_properties(p1.v1, i),\r\n self.atom_prop_replace_properties(p2.v2, i))\r\n if not self.contains_isomorphic_proposition(domain, p):\r\n return [p]\r\n return []", "def rule_conversion(self, p, domain):\r\n\r\n if p.type == self.PT.negation:\r\n if p.v1.type == self.PT.conjunction:\r\n if p.v1.v1.type == self.PT.atomic and p.v1.v2.type == self.PT.atomic:\r\n i = self.get_fresh_id()\r\n p_new = self.Prop(self.PT.negation,\r\n self.Prop(self.PT.conjunction,\r\n self.atom_prop_replace_properties(p.v1.v2, i),\r\n self.atom_prop_replace_properties(p.v1.v1, i)),\r\n None)\r\n if not self.contains_isomorphic_proposition(domain, p_new):\r\n return [p_new]\r\n return []", "def rule_exclusivity(self, p1, p2, domain):\r\n\r\n if p1.type == self.PT.implies and p2.type == self.PT.negation:\r\n if p2.v1.type == self.PT.conjunction:\r\n if p1.v1.type == self.PT.atomic and p1.v2.type == self.PT.atomic:\r\n if p2.v1.v1.type == self.PT.atomic and p2.v1.v2.type == self.PT.atomic:\r\n if p1.v1.v1.arg_id == p1.v2.v1.arg_id and p2.v1.v1.v1.arg_id == p2.v1.v2.v1.arg_id:\r\n if not p1.v1.v1.is_name and not p1.v2.v1.is_name and not p2.v1.v1.v1.is_name and not p2.v1.v2.v1.is_name:\r\n if p1.v2.v1.predicate == p2.v1.v1.v1.predicate:\r\n i = self.get_fresh_id()\r\n p = self.Prop(self.PT.negation,\r\n self.Prop(self.PT.conjunction,\r\n self.atom_prop_replace_properties(p1.v1,\r\n i),\r\n self.atom_prop_replace_properties(\r\n p2.v1.v2, i)),\r\n None)\r\n if not self.contains_isomorphic_proposition(domain, p):\r\n return [p]\r\n return []", "def prove_in_model_implies_not(formula, model):\n statement = InferenceRule(extract_assumptions(model), formula)\n lines = lines_from_statments(statement)\n create_proof_lines(formula, model, lines)\n return DeductiveProof(statement, AXIOMATIC_SYSTEM_IMPLIES_NOT, lines)", "def proof_or_counterexample_implies_not(formula):\n return proof_or_counterexample_with_func(formula, prove_in_model_implies_not)", "def get_bprop_logical_not(self):\n\n def bprop(x, out, dout):\n return (zeros_like(x),)\n return bprop", "def __remove_from_rhs(rule, A, epsilon_prob, seen_epsilon_vars):\n derivation = rule.derivation\n if A not in derivation:\n return [rule.copy()]\n\n variable = rule.variable\n without_A = []\n # Define p, q as in the formula from class\n p, q = epsilon_prob, rule.probability\n indexes_with_A = [i for i in range(len(derivation)) if derivation[i] == A]\n\n # Do not remove all A's from right-hand side if this results in an epsilon rule which we've previously removed.\n num_iterations = len(indexes_with_A)\n if not ((variable in seen_epsilon_vars) and (all(rhs_var == A for rhs_var in derivation))):\n num_iterations += 1\n\n for num_indexes_to_remove in range(num_iterations):\n for indexes_to_remove in combinations(indexes_with_A, num_indexes_to_remove):\n # Define K and L as in the formula from class\n K = num_indexes_to_remove # Number of A's removed\n L = len(indexes_with_A) - K # Number of A's not removed\n rhs_without_A = []\n for i in range(len(derivation)):\n if derivation[i] != A or i not in indexes_to_remove:\n rhs_without_A.append(derivation[i])\n probability = q * (p ** K) * ((1.0 - p) ** L)\n without_A.append(PCFGRule(variable, rhs_without_A, probability,\n {\"rule\": rule, \"indexes_to_remove\": indexes_to_remove, \"removed_variable\": A}))\n return without_A", "def implies_not_clause(self, source_variable, state):\n assert state in self.states\n yield Implies(source_variable, self.deassert_state(state))", "def implies_rule(self, step):\n self.proof[step.seq_num] = self.proof[step.assms[0]].on_prop(conv.rewr_conv(\"imp_disj_eq\"))", "def prove_implies_self():\n return DeductiveProof(\n InferenceRule([], Formula.from_infix('(p->p)')),\n [MP, I1, I2],\n [DeductiveProof.Line(Formula.from_infix('(p->((q->p)->p))'), 1, []),\n DeductiveProof.Line(Formula.from_infix('((p->((q->p)->p))->((p->(q->p))->(p->p))'), 2, []),\n DeductiveProof.Line(Formula.from_infix('(p->(q->p))'), 1, []),\n DeductiveProof.Line(Formula.from_infix('((p->(q->p))->(p->p))'), 0, [0, 1]),\n DeductiveProof.Line(Formula.from_infix('(p->p)'), 0, [2, 3])])", "def to_implies_false(formula: Formula) -> Formula:\r\n # Task 3.6d\r\n convert_implies = to_implies_not(formula)\r\n map_false = {'~': Formula('->', Formula('p'), Formula('F'))}\r\n return convert_implies.substitute_operators(map_false)", "def _poputil_recompute_backward(op, grads):\n return grads", "def indirect_proof(proof_function):\n\tdef indirect_proof_result(proof_function, assumption):\n\t\tchecked_proposition(assumption)\n\t\tinterp = _strict_match((A, conj, (neg, A)), proof_function(assumption))\n\t\treturn _proposition_class((neg, assumption[:]))\n\t\n\tif type(proof_function) != types.FunctionType:\n\t\traise TypeError()\n\treturn lambda assumption: indirect_proof_result(proof_function, assumption)", "def prove_implies_self():\n # i1_with_assumptions = InferenceRule([I1.conclusion.first],I1.conclusion.second)\n # i2_with_assumptions = InferenceRule([I2.conclusion.first,I2.conclusion.second.first],I2.conclusion.second.second)\n\n statement = InferenceRule([], Formula.from_infix('(p->p)')) # create conclusion\n\n rules = [MP, I1, I2] # create rules for the proof\n\n # create lines\n lines = []\n lines.append(DeductiveProof.Line(Formula.from_infix('((p->((p->p)->p))->((p->(p->p))->(p->p)))'), 2, []))\n lines.append(DeductiveProof.Line(Formula.from_infix('(p->(p->p))'), 1, []))\n lines.append(DeductiveProof.Line(Formula.from_infix('(p->((p->p)->p))'), 1, []))\n lines.append(DeductiveProof.Line(Formula.from_infix('((p->(p->p))->(p->p))'), 0, [2, 0]))\n lines.append(DeductiveProof.Line(Formula.from_infix('(p->p)'), 0, [1, 3]))\n dec = DeductiveProof(statement, rules, lines)\n return dec", "def L_model_backward(AL, Y, caches):\n pass", "def apply_backward_step(self, id, th_name, *, prevs=None, instsp=None):\n self.apply_tactic(id, tactic.rule(), args=(th_name, instsp), prevs=prevs)", "def inverse_mp(proof: DeductiveProof, assumption: Formula):\n new_statement = InferenceRule([a for a in proof.statement.assumptions if a != assumption],\n Formula(IMPLICATION_OPERATOR, assumption, proof.statement.conclusion))\n\n new_proof_lines = []\n\n for line_index, line in enumerate(proof.lines):\n # case 1 - if the assumption(A) is in the line\n\n if line.conclusion == assumption:\n check_if_THE_assumption(assumption, new_proof_lines)\n\n elif line.rule == 0: # case 3 - if there is an MP line\n check_if_MP(assumption, line, new_proof_lines, proof)\n\n\n # case 2 - if there is another assumption in the line\n # else it's another rule or assumption, we would like to prove it in a way that the conclusion of this\n # line will be proven by 'assumption -> conclusion'\n else:\n cheack_assumptions(assumption, line, new_proof_lines)\n return DeductiveProof(new_statement, proof.rules, new_proof_lines)", "def get_bprop_neg(self):\n neg_grad = P.Neg()\n\n def bprop(x, out, dout):\n dx = neg_grad(dout)\n return (dx,)\n return bprop", "def test_forward_backward_Propagator_consistency():\n from qmlify.propagation import Propagator, BackwardPropagator\n import copy\n\n #forward\n pdf_state, pdf_state_subset, integrator, ani_handler, atom_map, particle = propagator_testprep()\n backward_state = copy.deepcopy(pdf_state)\n propagator = Propagator(openmm_pdf_state = pdf_state,\n openmm_pdf_state_subset = pdf_state_subset,\n subset_indices_map = atom_map,\n integrator = integrator,\n ani_handler = ani_handler,\n context_cache=None,\n reassign_velocities=True,\n n_restart_attempts=0)\n particle_state, _return_dict = propagator.apply(particle.state, n_steps = 1, reset_integrator=True, apply_pdf_to_context=True)\n forward_work = propagator.state_works[0][-1]\n\n #backward\n pdf_state, pdf_state_subset, integrator, ani_handler, atom_map, particle = propagator_testprep()\n propagator = BackwardPropagator(openmm_pdf_state = backward_state,\n openmm_pdf_state_subset = pdf_state_subset,\n subset_indices_map = atom_map,\n integrator = integrator,\n ani_handler = ani_handler,\n context_cache=None,\n reassign_velocities=True,\n n_restart_attempts=0)\n particle_state, _return_dict = propagator.apply(particle.state, n_steps =1, reset_integrator=True, apply_pdf_to_context=True)\n backward_work = propagator.state_works[0][-1]\n\n #assert the forward work is equal to the backward work\n assert np.isclose(forward_work, -backward_work)", "def apply_rule(seq):\n for idx,prop in enumerate(seq.ant):\n\n if prop.conn == \"not\":\n # create a copy of seq (we don't want to mutate it)\n new_seq = Sequent(seq.ant[:],seq.con[:])\n # pop the proposition from the list\n not_a = new_seq.ant.pop(idx)\n # make sure we popped the correct one\n assert not_a.conn == \"not\"\n # apply the rule\n new_seq.con = [ not_a.p1 ] + new_seq.con\n # return a list of 3 values with seq2 being None\n # (since there is not split in this rule)\n return [new_seq , None, \"not left\"]\n\n elif prop.conn == \"or\":\n # create two copies of seq\n new_seq1 = Sequent(seq.ant[:], seq.con[:])\n new_seq2 = Sequent(seq.ant[:], seq.con[:])\n # pop the proposition from the list\n b_or_c = new_seq1.ant.pop(idx)\n # make sure we popped the correct one\n assert b_or_c.conn == \"or\"\n assert b_or_c == new_seq2.ant.pop(idx)\n # apply the rule\n new_seq1.ant.append(b_or_c.p1)\n new_seq2.ant.append(b_or_c.p2)\n # return the obtained sequents and the rule name\n # here we have two sequents since \"or left\"\n # has two sequents at the top\n return [new_seq1 , new_seq2, \"or left\"]\n\n elif prop.conn == \"and\":\n #create one copy of seq\n new_seq = Sequent(seq.ant[:], seq.con[:])\n # pop the proposition from the list\n b_and_c = new_seq.ant.pop(idx)\n # make sure we popped the correct one\n assert b_and_c.conn == \"and\"\n # apply the rule\n new_seq.ant.append(b_and_c.p1)\n new_seq.ant.append(b_and_c.p2)\n # return a list of 3 values with seq2 being None\n return [new_seq, None, 'and left']\n\n \n elif prop.conn == \"imp\":\n # create two copies of seq\n new_seq1 = Sequent(seq.ant[:], seq.con[:])\n new_seq2 = Sequent(seq.ant[:], seq.con[:])\n # pop the proposition from the list\n b_imp_c = new_seq1.ant.pop(idx)\n # make sure we popped the correct one\n assert b_imp_c.conn == \"imp\"\n assert b_imp_c == new_seq2.ant.pop(idx)\n # apply the rule\n new_seq1.ant.append(b_imp_c.p2)\n new_seq2.con.append(b_imp_c.p1)\n # return the obtained sequents and the rule name\n return [new_seq1 , new_seq2, \"implies left\"]\n\n for idx,prop in enumerate(seq.con):\n if prop.conn == \"not\":\n new_seq = Sequent(seq.ant[:],seq.con[:])\n # pop the proposition from the list\n not_a = new_seq.con.pop(idx)\n # make sure we popped the correct one\n assert not_a.conn == \"not\"\n # apply the rule\n new_seq.ant = [ not_a.p1 ] + new_seq.ant\n # return a list of 3 values with seq2 being None\n return [new_seq , None, \"not right\"]\n elif prop.conn == \"or\":\n # create one copy of seq\n new_seq = Sequent(seq.ant[:], seq.con[:])\n # pop the proposition from the list\n b_or_c = new_seq.con.pop(idx)\n # make sure we popped the correct one\n assert b_or_c.conn == \"or\" \n # apply the rule\n new_seq.con.append(b_or_c.p1)\n new_seq.con.append(b_or_c.p2)\n # return the obtained sequent and the rule name\n return [new_seq , None, \"or right\"]\n\n elif prop.conn == 'and':\n new_seq1 = Sequent(seq.ant[:], seq.con[:])\n new_seq2 = Sequent(seq.ant[:], seq.con[:])\n b_and_c = new_seq1.con.pop(idx)\n assert b_and_c.conn == \"and\"\n assert b_and_c == new_seq2.con.pop(idx)\n new_seq1.con.append(b_and_c.p1)\n new_seq2.con.append(b_and_c.p2)\n return [new_seq1 , new_seq2, \"and right\"]\n\n elif prop.conn == 'imp':\n new_seq = Sequent(seq.ant[:], seq.con[:])\n b_imp_c = new_seq.con.pop(idx)\n assert b_imp_c.conn == \"imp\"\n new_seq.ant.append(b_imp_c.p1)\n new_seq.con.append(b_imp_c.p2)\n return [new_seq , None, \"implies right\"]", "def forward_backward(self, x):\n observation_log_probs = self._observation_log_probs(x, mask=None)\n with tf.name_scope('forward_belief_propagation'):\n self.forward_log_probs = self._forward(observation_log_probs)\n\n with tf.name_scope('backward_belief_propagation'):\n self.backward_log_probs = self._backward(observation_log_probs)", "def test_remove_assignment_rule(self):\n pass", "def get_bprop_not_equal(self):\n\n def bprop(x, y, out, dout):\n return zeros_like(x), zeros_like(y)\n return bprop", "def eliminate_implications(s):\n if not s.args or is_symbol(s.op): return s ## (Atoms are unchanged.)\n args = map(eliminate_implications, s.args)\n a, b = args[0], args[-1]\n if s.op == '>>':\n return (b | ~a)\n elif s.op == '<<':\n return (a | ~b)\n elif s.op == '<=>':\n return (a | ~b) & (b | ~a)\n elif s.op == '^':\n assert len(args) == 2 ## TODO: relax this restriction\n return (a & ~b) | (~a & b)\n else:\n assert s.op in ('&', '|', '~')\n return Expr(s.op, *args)", "def test_disjunct_hs():\n hs1 = LocalSpace(\"1\")\n hs2 = LocalSpace(\"2\")\n alpha, beta = symbols('alpha, beta')\n A = OperatorSymbol('A', hs=hs1)\n B = OperatorSymbol('B', hs=hs2)\n assert Commutator.create(A, B) == ZeroOperator\n assert Commutator.create(alpha, beta) == ZeroOperator\n assert Commutator.create(alpha, B) == ZeroOperator\n assert Commutator.create(A, beta) == ZeroOperator", "def prove_and_commutativity() -> Proof:\n all_lines = []\n all_lines.append(Proof.Line(Formula.parse('(p&q)')))\n all_lines.append(Proof.Line(Formula.parse('q'), AE1_RULE, [0]))\n all_lines.append(Proof.Line(Formula.parse('p'), AE2_RULE, [0]))\n all_lines.append(Proof.Line(Formula.parse('(q&p)'), A_RULE, [1, 2]))\n statement = InferenceRule([Formula.parse('(p&q)')], Formula.parse('(q&p)'))\n all_rules = {A_RULE, AE1_RULE, AE2_RULE}\n return Proof(statement, all_rules, all_lines)", "def test_Integrator_BackwardPropagator(annealing_steps=10):\n from qmlify.propagation import BackwardPropagator\n pdf_state, pdf_state_subset, integrator, ani_handler, atom_map, particle = propagator_testprep()\n\n propagator = BackwardPropagator(openmm_pdf_state = pdf_state,\n openmm_pdf_state_subset = pdf_state_subset,\n subset_indices_map = atom_map,\n integrator = integrator,\n ani_handler = ani_handler,\n context_cache=None,\n reassign_velocities=True,\n n_restart_attempts=0)\n\n #assert that the iteration is equal to the total number of iterations\n assert propagator._iteration == propagator._n_iterations\n\n particle_state, _return_dict = propagator.apply(particle.state, n_steps = annealing_steps, reset_integrator=True, apply_pdf_to_context=True)\n\n #the length of the state works must be the annealing step length + 1 since the first work is defaulted as 0.\n assert len(propagator.state_works[0]) == annealing_steps + 1\n\n #check to make sure that the particle state is maintained in memory\n assert particle_state == particle.state\n\n #the work should be positive\n assert propagator.state_works[0][-1] > 0.", "def prove_or_disprove(prop):\n #create sequent\n sequent = Sequent([],[prop])\n #create proof tree\n tree = ProofTree(sequent)\n tree.build_proof()\n #check leaves\n found = True\n seq_leaves = tree.get_leaves()\n for s in seq_leaves:\n if s.is_id() == False:\n assign(prop, s)\n found = False\n break\n if found == True:\n print tree", "def inverse_mp(proof, assumption):\n statement = create_new_statement(proof, assumption)\n rules = proof.rules\n lines = create_inverse_mp_proof(proof, assumption)\n return DeductiveProof(statement, rules, lines)" ]
[ "0.62638587", "0.5995946", "0.56629103", "0.55696833", "0.5512795", "0.5496695", "0.5350145", "0.5332289", "0.5228541", "0.52102053", "0.52015907", "0.51500666", "0.5129461", "0.5123244", "0.510226", "0.5068586", "0.50645703", "0.50582373", "0.5038378", "0.50190765", "0.49946788", "0.49903998", "0.49759275", "0.49566618", "0.49538586", "0.49394506", "0.49289978", "0.48636875", "0.4844654", "0.48397663" ]
0.76120085
0
Removes isomorphic propositions where both involve variables
def remove_duplicates(self, propositions): propositions_copy = list(propositions) uniques = [] while True: duplicates = [] if len(propositions_copy) == 0: return uniques p1 = propositions_copy[0] for p2 in propositions_copy: if self.isomorphic(p1, p2): if not (self.get_leftmost_atom(p1).is_name or self.get_leftmost_atom( p2).is_name): duplicates.append(p2) uniques.append(p1) propositions_copy.remove(p1) [propositions_copy.remove(x) for x in duplicates if x in propositions_copy]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def unproductive(g):\n nonts = set(nonterminals(g))\n\n useful = {n for n in nonts if endings(g, n)}\n change = True\n\n while change:\n change = False\n\n for n in nonts.difference(useful):\n for prod in g.productions(n):\n if all(child in useful for child in children(g, prod)):\n useful.add(n)\n change = True\n break\n\n return nonts.difference(useful)", "def _reducedProtToPeps(protToPeps, proteins):\n return {k: v for k, v in viewitems(protToPeps) if k not in proteins}", "def eliminate_implications(s):\n if not s.args or is_symbol(s.op): return s ## (Atoms are unchanged.)\n args = map(eliminate_implications, s.args)\n a, b = args[0], args[-1]\n if s.op == '>>':\n return (b | ~a)\n elif s.op == '<<':\n return (a | ~b)\n elif s.op == '<=>':\n return (a | ~b) & (b | ~a)\n elif s.op == '^':\n assert len(args) == 2 ## TODO: relax this restriction\n return (a & ~b) | (~a & b)\n else:\n assert s.op in ('&', '|', '~')\n return Expr(s.op, *args)", "def eliminate_variable(variable, factors):\r\n containing_var = []\r\n not_containing_var = []\r\n for fac in factors:\r\n if variable in fac.get_variables():\r\n containing_var.append(fac)\r\n else:\r\n not_containing_var.append(fac)\r\n\r\n if not containing_var:\r\n return factors\r\n else:\r\n T = factor_module.multiply_batch(variable, containing_var)\r\n new_factor = factor_module.sum_out(variable, T)\r\n not_containing_var.append(new_factor)\r\n return not_containing_var", "def del_variables(self, variables):\n variables = [variables] if isinstance(variables, str) else set(variables)\n indices = [\n index\n for index, variable in enumerate(self.variables)\n if variable in variables\n ]\n self.variables = np.delete(self.variables, indices, 0)\n self.cardinality = np.delete(self.cardinality, indices, 0)\n self.inhibitor_probability = [\n prob_array\n for index, prob_array in enumerate(self.inhibitor_probability)\n if index not in indices\n ]", "def to_prenex_normal_form_from_uniquely_named_variables(formula: Formula) -> \\\r\n Tuple[Formula, Proof]:\r\n assert has_uniquely_named_variables(formula)\r\n # Task 11.9\r\n\r\n prover = Prover(Prover.AXIOMS.union(ADDITIONAL_QUANTIFICATION_AXIOMS))\r\n\r\n # First case\r\n if is_relation(formula.root) or is_equality(formula.root):\r\n prover.add_tautology(equivalence_of(formula, formula))\r\n return formula, prover.qed()\r\n\r\n elif is_unary(formula.root):\r\n form, proof = to_prenex_normal_form_from_uniquely_named_variables(formula.first)\r\n step1 = prover.add_proof(proof.conclusion, proof)\r\n step2 = prover.add_tautological_implication(equivalence_of(formula, Formula(\"~\", form)), [step1])\r\n\r\n neg_form, neg_proof = pull_out_quantifications_across_negation(Formula(\"~\", form))\r\n step3 = prover.add_proof(neg_proof.conclusion, neg_proof)\r\n\r\n prover.add_tautological_implication(equivalence_of(formula, neg_form), [step2, step3])\r\n\r\n return neg_form, prover.qed()\r\n\r\n elif is_binary(formula.root):\r\n left_f, left_p = to_prenex_normal_form_from_uniquely_named_variables(formula.first)\r\n step1 = prover.add_proof(left_p.conclusion, left_p)\r\n\r\n right_f, right_p = to_prenex_normal_form_from_uniquely_named_variables(formula.second)\r\n step2 = prover.add_proof(right_p.conclusion, right_p)\r\n\r\n form, proof = pull_out_quantifications_across_binary_operator(Formula(formula.root, left_f, right_f))\r\n step3 =prover.add_proof(proof.conclusion, proof)\r\n\r\n step4 = prover.add_tautological_implication(equivalence_of(Formula(formula.root, formula.first, formula.second), Formula(formula.root, left_f, right_f)), [step1, step2])\r\n\r\n prover.add_tautological_implication(equivalence_of(Formula(formula.root, formula.first, formula.second), form), [step4, step3])\r\n\r\n return form, prover.qed()\r\n\r\n\r\n else: # is_quantifier(formula.root)\r\n form, proof = to_prenex_normal_form_from_uniquely_named_variables(formula.predicate)\r\n\r\n step1 = prover.add_proof(proof.conclusion, proof)\r\n\r\n map = {\"x\":formula.variable, \"y\":formula.variable, \"R\":form.substitute({formula.variable: Term(\"_\")}), \"Q\":formula.predicate.substitute({formula.variable:Term(\"_\")})}\r\n step2 = prover.add_instantiated_assumption(ADDITIONAL_QUANTIFICATION_AXIOMS[14 if formula.root == \"A\" else 15].instantiate(map), ADDITIONAL_QUANTIFICATION_AXIOMS[14 if formula.root == \"A\" else 15], map)\r\n\r\n prover.add_tautological_implication(equivalence_of(formula, Formula(formula.root, formula.variable, form)), [step2, step1])\r\n\r\n return Formula(formula.root, formula.variable, form), prover.qed()", "def eliminate(values):\n for b in boxes:\n if len(values[b]) == 1:\n for p in peers[b]:\n values = assign_value(values, p, values[p].replace(values[b], ''))\n return values", "def neq_inplace(a,b):", "def revise(self, assignment, i, j):\n revised = False\n # For all the values in i's variables\n for x in assignment[i]:\n # if there exist NO possible values in the constraints between i and j\n # then remove this value from i\n if not any([(x,y) for y in assignment[j] if (x,y) in self.constraints[i][j]]):\n assignment[i].remove(x)\n revised = True\n return revised", "def remove_duplicate_binds(variable_binds):\n no_duplicates = []\n seen = set()\n for bind in variable_binds:\n t = tuple(bind.items())\n if t not in seen:\n seen.add(t)\n no_duplicates.append(bind)\n\n return no_duplicates", "def _anti_commuting_products(q_1: Q, q_2: Q) -> Dict:\n\n s_x, s_y, s_z = q_1.x, q_1.y, q_1.z\n q_2_x, q_2_y, q_2_z = q_2.x, q_2.y, q_2.z\n\n dif_dict = {\n \"yz-zy\": s_y * q_2_z - s_z * q_2_y,\n \"zx-xz\": s_z * q_2_x - s_x * q_2_z,\n \"xy-yx\": s_x * q_2_y - s_y * q_2_x,\n \"zy-yz\": -s_y * q_2_z + s_z * q_2_y,\n \"xz-zx\": -s_z * q_2_x + s_x * q_2_z,\n \"yx-xy\": -s_x * q_2_y + s_y * q_2_x,\n }\n\n return dif_dict", "def vertex_no_simultaneos(self):\n clauses = []\n for position in range(0,self.graph.num_vertices):\n for (v1,v2) in itertools.combinations(range(0,self.graph.num_vertices),2):\n clauses.append([ClauseVariable(True,v1,position),\n ClauseVariable(True,v2,position)])\n return clauses", "def rameaux(p):\r\n return list(set([Father(x) for x in uc1_leafy(p) if Father(x) != None]))", "def _remove_self_(p, pnts):\r\n keep = ~np.all(pnts == p, axis=1)\r\n return pnts[keep]", "def sup_dicti(self, x, y):\n for key in self.dict_possiblity:\n if x in self.dict_possiblity[key]:\n self.dict_possiblity[key].remove(x)\n if y in self.dict_possiblity[key]:\n self.dict_possiblity[key].remove(y)\n del self.dict_possiblity[y]\n del self.dict_possiblity[x]", "def _FilterProtonsAndElectrons(self):\n self.reactants = filter(lambda c: c.compound.kegg_id not in \n ['C00080', 'C05359'], self.reactants)", "def remove_nonterminal(g, nont):\n prods = [p for p in g.productions() if p.lhs() != nont and nont not in p.rhs()]\n\n return CFG(prods)", "def rule_exclusivity(self, p1, p2, domain):\r\n\r\n if p1.type == self.PT.implies and p2.type == self.PT.negation:\r\n if p2.v1.type == self.PT.conjunction:\r\n if p1.v1.type == self.PT.atomic and p1.v2.type == self.PT.atomic:\r\n if p2.v1.v1.type == self.PT.atomic and p2.v1.v2.type == self.PT.atomic:\r\n if p1.v1.v1.arg_id == p1.v2.v1.arg_id and p2.v1.v1.v1.arg_id == p2.v1.v2.v1.arg_id:\r\n if not p1.v1.v1.is_name and not p1.v2.v1.is_name and not p2.v1.v1.v1.is_name and not p2.v1.v2.v1.is_name:\r\n if p1.v2.v1.predicate == p2.v1.v1.v1.predicate:\r\n i = self.get_fresh_id()\r\n p = self.Prop(self.PT.negation,\r\n self.Prop(self.PT.conjunction,\r\n self.atom_prop_replace_properties(p1.v1,\r\n i),\r\n self.atom_prop_replace_properties(\r\n p2.v1.v2, i)),\r\n None)\r\n if not self.contains_isomorphic_proposition(domain, p):\r\n return [p]\r\n return []", "def eliminate(values):\n # TODO: Copy your code from the classroom to complete this function\n for box,value in values.items():\n #print (box,value)\n if len(values[box]) == 1:\n for peer in peers[box]:\n if value in values[peer]:\n values[peer] = values[peer].replace(value,'')\n return values", "def eliminate(values):\n\tsolved = [box for box in boxes if len(values[box]) == 1]\n\tempties = [box for box in boxes if len(values[box]) == 0]\n\n\tfor empty in empties:\n\t\tvalues[empty] = '123456789'\n\n\tfor box in solved:\n\n\t\tfor peer in peers[box]:\n\t\t\tvalues = assign_value(values, peer, values[peer].replace(values[box], ''))\n\n\treturn values", "def remove_abs_vars(self):\n self.m.remove(self.bp_abs)\n self.m.remove(self.bn_abs)\n self.m.remove(self.gp_abs)\n self.m.remove(self.gn_abs)\n self.m.remove(self.beta_p)\n self.m.remove(self.beta_n)\n self.m.remove(self.gamma_p)\n self.m.remove(self.gamma_n)", "def simplify(self):\n if 0 in self.numer:\n self.numer = [0, ]\n self.denom = []\n self.neg = False\n return\n # Eliminate common factors:\n for factor in set(self.numer):\n # number of shared occurrences:\n count = min(self.denom.count(factor),\n self.numer.count(factor))\n # Remove each shared occurrence:\n for i in range(count):\n self.numer.remove(factor)\n self.denom.remove(factor)", "def exactly_one(variables):\n cnf = [variables]\n n = len(variables)\n\n for i in range(n):\n for j in range(i + 1, n):\n v1 = variables[i]\n v2 = variables[j]\n cnf.append([-v1, -v2])\n\n return cnf", "def remove_stems(graph = None):\n\tfor x,y in basepairs(graph = graph):\n\t\tgraph.remove_node(x)\n\t\tgraph.remove_node(y)", "def nonphysicalxs_remotion(a2_data,res_nufi_removal):\n for i in a2_data['I'].keys():\n if i=='MACR' and res_nufi_removal==True:\n if 'nufi' in a2_data['I'][i]['R'].keys():\n a2_data['I'][i]['R'].pop('nufi')\n for r in a2_data['I'][i]['R'].keys():\n if any(x in r for x in ['111', '112', '122', '212', '222', '211', '322',\n '321', '312', '311', '221', '121']):\n a2_data['I'][i]['R'].pop(r)\n return a2_data", "def disconnect(self, varpath, varpath2=None):\n to_remove = []\n if varpath2 is None:\n if self.parent and '.' not in varpath: # boundary var. make sure it's disconnected in parent\n self.parent.disconnect('.'.join([self.name, varpath]))\n graph = self._exprmapper._exprgraph\n to_remove = set()\n for expr in self._exprmapper.find_referring_exprs(varpath):\n for u, v in graph.edges(expr):\n to_remove.add((u, v))\n for u, v in graph.in_edges(expr):\n to_remove.add((u, v))\n else:\n to_remove = [(varpath, varpath2)]\n\n for u, v in to_remove:\n super(Assembly, self).disconnect(u, v)\n\n self._exprmapper.disconnect(varpath, varpath2)", "def unify_walk(a, b, U):\r\n opt = union(a.not_options, b.not_options)\r\n v = NotVariable(\"?\", opt)\r\n return U.merge(v, a, b)", "def revise(self, x, y):\n # return set a default return value of False\n ret_val = False\n # define a tuple of the two variables without their domains\n var_tup = (x, y)\n # define lists of the variable's domains\n x_values = self.domains[x].copy()\n y_values = self.domains[y].copy()\n # if the two variables exist in overlaps\n if var_tup in self.crossword.overlaps:\n # if that overlap is not None\n if self.crossword.overlaps.get(var_tup) is not None:\n # assign the overlap\n overlap = self.crossword.overlaps[var_tup]\n # generate the list of letters that x has to match with\n y_matches = [val[overlap[1]] for val in y_values]\n # for each of x's domain values\n for value in x_values:\n # if that value cannot match with y's domain values\n if value[overlap[0]] not in y_matches:\n # remove that value from the domain\n self.domains[x].remove(value)\n # set a flag for return value\n ret_val = True\n # return True if any changes were made\n return ret_val", "def rule_backward_if_elimination(self, p, g):\r\n\r\n if p.type == self.PT.implies:\r\n # p = IF A(x) THEN B(x)\r\n if self.matching(p.v2, g):\r\n return [self.atom_prop_replace_properties(p.v1, new_arg_id=g.v1.arg_id,\r\n new_is_name=g.v1.is_name,\r\n new_hat=g.v1.hat)]\r\n return None", "def add_all_different_constraint(self, variables):\n for (i, j) in self.get_all_possible_pairs(variables, variables):\n if i != j:\n self.add_constraint_one_way(i, j, lambda x, y: x != y)" ]
[ "0.5546809", "0.55113703", "0.5385168", "0.5358705", "0.5351735", "0.5314533", "0.53132886", "0.5306827", "0.5304884", "0.521399", "0.51803505", "0.5164956", "0.51246977", "0.51216865", "0.51209795", "0.5113263", "0.50640446", "0.5055276", "0.50446403", "0.5026258", "0.5015764", "0.5007304", "0.49918985", "0.49813545", "0.4978521", "0.49774358", "0.4976812", "0.495892", "0.49565858", "0.495521" ]
0.63047653
0
>>> m = PSYCOP() >>> i = m.get_fresh_id() >>> a = m.Prop(m.PT.atomic, m.Atom("A", i, False, False), None) >>> b = m.Prop(m.PT.atomic, m.Atom("B", i, False, False), None) >>> c = m.Prop(m.PT.atomic, m.Atom("C", i, False, False), None) >>> p1 = m.Prop(m.PT.implies, a, b) >>> p2 = m.Prop(m.PT.implies, b, c) >>> p3 = m.Prop(m.PT.implies, a, c) >>> m.extract_ac_conclusions({p1, p2, p3}) ['Aac'] >>> m.extract_ac_conclusions({p1, p2}) []
def extract_ac_conclusions(self, propositions): prop_ac = [] for p in propositions: s = self.proposition_to_string(p) if s is not None: if {s[1], s[2]} == {"a", "c"}: prop_ac.append(s) return prop_ac
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def rule_exclusivity(self, p1, p2, domain):\r\n\r\n if p1.type == self.PT.implies and p2.type == self.PT.negation:\r\n if p2.v1.type == self.PT.conjunction:\r\n if p1.v1.type == self.PT.atomic and p1.v2.type == self.PT.atomic:\r\n if p2.v1.v1.type == self.PT.atomic and p2.v1.v2.type == self.PT.atomic:\r\n if p1.v1.v1.arg_id == p1.v2.v1.arg_id and p2.v1.v1.v1.arg_id == p2.v1.v2.v1.arg_id:\r\n if not p1.v1.v1.is_name and not p1.v2.v1.is_name and not p2.v1.v1.v1.is_name and not p2.v1.v2.v1.is_name:\r\n if p1.v2.v1.predicate == p2.v1.v1.v1.predicate:\r\n i = self.get_fresh_id()\r\n p = self.Prop(self.PT.negation,\r\n self.Prop(self.PT.conjunction,\r\n self.atom_prop_replace_properties(p1.v1,\r\n i),\r\n self.atom_prop_replace_properties(\r\n p2.v1.v2, i)),\r\n None)\r\n if not self.contains_isomorphic_proposition(domain, p):\r\n return [p]\r\n return []", "def generate_exclusions(proteins):\n pass", "def get_available_cops():\n allIncidents = Incident.get_all()\n cops = []\n \n for i in allIncidents:\n if(inicioAmostragem <= i.reporting_date and i.reporting_date <=terminoAmostragem):\n cops.append(i['operations_center']['id'])\n \n allReports = RelatoDeSituacao.get_all()\n \n for r in allReports:\n if (\n inicioAmostragem <= r.data_hora and \n r.data_hora <=terminoAmostragem and\n 'cop' in r.relator and # todos tem que ter o COP\n 'id' in r.relator['cop'] # todos tem que ter o id \n ):\n cops.append(r.relator['cop']['id'])\n \n return set(cops)", "def rule_transitivity(self, p1, p2, domain):\r\n if p1.type == self.PT.implies and p2.type == self.PT.implies:\r\n if p1.v1.type == self.PT.atomic and p1.v2.type == self.PT.atomic and \\\r\n p2.v1.type == self.PT.atomic and p2.v2.type == self.PT.atomic:\r\n if p1.v1.v1.arg_id == p1.v2.v1.arg_id and p2.v1.v1.arg_id == p2.v2.v1.arg_id:\r\n if not p1.v1.v1.is_name and not p1.v2.v1.is_name and not p2.v1.v1.is_name and not p2.v2.v1.is_name:\r\n if p1.v2.v1.predicate == p2.v1.v1.predicate:\r\n i = self.get_fresh_id()\r\n p = self.Prop(self.PT.implies,\r\n self.atom_prop_replace_properties(p1.v1, i),\r\n self.atom_prop_replace_properties(p2.v2, i))\r\n if not self.contains_isomorphic_proposition(domain, p):\r\n return [p]\r\n return []", "def find_conclusions(self):\n conc = []\n self.rule.right.visit_find_premises(conc)\n self.conclusions = conc", "def find_biomass_precursors(reaction):\n return [met for met in reaction.reactants\n if met.id != 'atp_c' or met.id != 'h2o_c']", "def find_own_attributes(cs):\n own_attributes = {}\n for con in cs:\n own_attributes[con] = []\n for attr in con.intent:\n own_attributes[con].append(attr)\n for sub_con in cs:\n if sub_con.intent < con.intent and\\\n attr in sub_con.intent:\n own_attributes[con].pop()\n break\n return own_attributes", "def terms_covered_once(prime_implicants, m_terms):\n covered = []\n for prime_implicant in prime_implicants:\n for covered_term in prime_implicant.get_covered_terms():\n covered.append(covered_term)\n return [i for i in covered if covered.count(i) == 1 and i in m_terms]", "def get_conjugate_acids_of(chebi_ent):\n if hasattr(chebi_ent, 'OntologyParents'):\n return [ent.chebiId for ent in chebi_ent.OntologyParents if\n (ent.type == \"is conjugate acid of\")]\n else:\n return []", "def rule_conversion(self, p, domain):\r\n\r\n if p.type == self.PT.negation:\r\n if p.v1.type == self.PT.conjunction:\r\n if p.v1.v1.type == self.PT.atomic and p.v1.v2.type == self.PT.atomic:\r\n i = self.get_fresh_id()\r\n p_new = self.Prop(self.PT.negation,\r\n self.Prop(self.PT.conjunction,\r\n self.atom_prop_replace_properties(p.v1.v2, i),\r\n self.atom_prop_replace_properties(p.v1.v1, i)),\r\n None)\r\n if not self.contains_isomorphic_proposition(domain, p_new):\r\n return [p_new]\r\n return []", "def prove_and_commutativity() -> Proof:\n all_lines = []\n all_lines.append(Proof.Line(Formula.parse('(p&q)')))\n all_lines.append(Proof.Line(Formula.parse('q'), AE1_RULE, [0]))\n all_lines.append(Proof.Line(Formula.parse('p'), AE2_RULE, [0]))\n all_lines.append(Proof.Line(Formula.parse('(q&p)'), A_RULE, [1, 2]))\n statement = InferenceRule([Formula.parse('(p&q)')], Formula.parse('(q&p)'))\n all_rules = {A_RULE, AE1_RULE, AE2_RULE}\n return Proof(statement, all_rules, all_lines)", "def __eliminate_unused_constraits (self, objects):\n result = []\n for c in self.constraints_:\n if c [0] in objects and c [1] in objects:\n result.append (c)\n\n return result", "def cvars(occs):\n names = []\n canonicals = []\n for occ in occs:\n if occ.name in names:\n continue\n canonicals.append(occ)\n return canonicals", "def _get_conclusion(self, data):\n pass", "def get_all_incidents():\n allIncidents = Incident.get_all()\n #allCops = get_all_cops()\n incidents = []\n for i in allIncidents:\n if(\n (i['operations_center']['id'] in allCops) and\n (inicioAmostragem <= i.reporting_date and i.reporting_date <=terminoAmostragem)\n ):\n \n i['operations_center']['id'] = changeCop(i['operations_center']['id'])\n incidents.append(i)\n \n return incidents", "def findNonCriticalAct(listActivities): \n # Find all paths and critical paths\n \n listPaths = find_all_paths(listActivities[0])\n listDurations = []\n for cpt, path in enumerate(listPaths):\n listDurations.append(sum(act.duration for act in path))\n projectDuration = max(listDurations)\n listCriticalPaths = []\n for cpt, path in enumerate(listPaths):\n if listDurations[cpt] == projectDuration:\n listCriticalPaths.append(path)\n listNonCritActs = []\n for act in listActivities:\n if act not in sum(listCriticalPaths, []):\n listNonCritActs.append(act)\n return listNonCritActs", "def _find_memberless_constituencies(self):\n constituencies = Constituency.objects.filter(\n end=None, # Constituency currently exists/is not historical\n mp=None,\n )\n\n self.stdout('Constituencies with missing MP:')\n for constituency in constituencies:\n self.stdout(f'[{constituency.parliamentdotuk}] {constituency.name} {constituency.start}')", "def get_clarifications_copa(ex, nlp, comet_model):\n category_to_prefix_causes = {\"xIntent\": CATEGORY_TO_PREFIX[\"xIntent\"],\n \"xNeed\": CATEGORY_TO_PREFIX[\"xNeed\"]}\n\n category_to_prefix_effects = CATEGORY_TO_PREFIX.copy()\n category_to_prefix_effects.pop(\"xIntent\")\n category_to_prefix_effects.pop(\"xNeed\")\n category_to_prefix_effects.pop(\"xAttr\")\n\n input_event = ex[\"premise\"]\n personx, is_named_entity = get_personx(nlp, input_event)\n\n if personx == \"\":\n return []\n\n personx = personx if (is_named_entity or personx == \"I\") else personx.lower()\n outputs = {category: comet_model.predict(input_event, category, num_beams=5) for category in comet_model.categories}\n\n if ex[\"question\"] == \"cause\":\n category_to_prefix = category_to_prefix_causes\n else:\n category_to_prefix = category_to_prefix_effects\n\n curr_events = []\n for category, prefix in category_to_prefix.items():\n for out_event in outputs[category]:\n if out_event != \"none\" and out_event != \"\":\n if not out_event.lower().startswith(\"person\") and not out_event.lower().startswith(\"other\"):\n out_event = \" \".join((prefix, out_event))\n\n out_event = re.sub(\"personx\", personx, out_event, flags=re.I)\n out_event = re.sub(\"person x\", personx, out_event, flags=re.I)\n out_event = re.sub(\"persony\", \"others\", out_event, flags=re.I)\n out_event = re.sub(\"person y\", \"others\", out_event, flags=re.I)\n\n question = CATEGORY_TO_QUESTION[category].replace(\"PersonX\", personx)\n curr_events.append((question, out_event))\n\n return curr_events", "def accidentals(a_list):\n key = a_list.track.bars[0].key\n notes_in_key = get_notes(key.name)\n return [\n note for note in a_list\n if note.name not in notes_in_key\n and not note.is_rest\n ]", "def copy_attributes(ncin, ncout,exclude=None, include=None):\n att_dict = odict()\n for attribute_name in ncin.ncattrs():\n if include is not None and attribute_name not in include:\n continue #if include is defined, and this attribute is not there\n if exclude is not None and attribute_name in exclude:\n continue #if exclude is defined, and this attribute is there\n att_dict[attribute_name] = ncin.getncattr(attribute_name)\n ncout.setncatts(att_dict)", "def fd_projection(attributes: Set[A],\n fds: List[FunctionalDependency]) -> \\\n Iterator[FunctionalDependency]:\n for x in powerset(attributes):\n for b in attributes.intersection(closure(x, fds) - x):\n yield FunctionalDependency(x, {b})", "def find_properties(\r\n data=None, *, \r\n constraint: bool=..., \r\n keep_acronyms: bool=..., \r\n fill_value: str=None, \r\n kind: str='geology', \r\n attribute :str='code', \r\n property: str='description',\r\n ):\r\n constraint, keep_acronyms= ellipsis2false(constraint, keep_acronyms)\r\n kind = str(kind).lower().strip() \r\n if 'geology'.find (kind) >=0: kind ='geology'\r\n \r\n fname = buffer_file if kind =='geology' else 'AGSO_STCODES.csv'\r\n path_file = os.path.join( AGSO_PROPERTIES.get(\"props_dir\"), fname) \r\n _agso_data=get_agso_properties(path_file)\r\n dp =list ()\r\n for cod in ( attribute, property ): \r\n d = key_search (str( cod), \r\n default_keys= list(_agso_data.keys()), \r\n deep= True, \r\n parse_keys= False, \r\n raise_exception= True \r\n )\r\n dp.append (d[0])\r\n # unpack attribute and properties \r\n attribute , property = dp \r\n attribute = attribute.upper(); property = str(property).upper() \r\n prop_data={key:value for key , value in zip (\r\n _agso_data[attribute], _agso_data[property])}\r\n if not constraint: \r\n return tuple (prop_data.items() )\r\n \r\n if data is None: \r\n raise TypeError (\r\n \"Data cannot be None when constraint is set to ``True``\")\r\n # for consistency \r\n data = np.array (\r\n is_iterable (data , exclude_string= True, transform= True) ) \r\n if not _is_arraylike_1d(data ): \r\n raise GeoPropertyError (\r\n \"Geology or Geochemistry samples expects\"\r\n f\" one dimensional array. Got shape ={data.shape}\")\r\n \r\n found=False # flags if structure is found\r\n for kk, item in enumerate ( data) : \r\n for key, value in prop_data.items(): \r\n if str(value).lower().find (str(item).lower() )>=0: \r\n data[kk] = str(key).lower() if keep_acronyms else value\r\n found = True \r\n break\r\n # if item not found then \r\n # property data.\r\n if not found:\r\n if fill_value is not None:\r\n data[kk] = fill_value\r\n found =False \r\n return data", "def elimination_ofconc(a2_data):\n for data in a2_data.values():\n data.pop('conc')\n return a2_data", "def prove_implies_self():\n # i1_with_assumptions = InferenceRule([I1.conclusion.first],I1.conclusion.second)\n # i2_with_assumptions = InferenceRule([I2.conclusion.first,I2.conclusion.second.first],I2.conclusion.second.second)\n\n statement = InferenceRule([], Formula.from_infix('(p->p)')) # create conclusion\n\n rules = [MP, I1, I2] # create rules for the proof\n\n # create lines\n lines = []\n lines.append(DeductiveProof.Line(Formula.from_infix('((p->((p->p)->p))->((p->(p->p))->(p->p)))'), 2, []))\n lines.append(DeductiveProof.Line(Formula.from_infix('(p->(p->p))'), 1, []))\n lines.append(DeductiveProof.Line(Formula.from_infix('(p->((p->p)->p))'), 1, []))\n lines.append(DeductiveProof.Line(Formula.from_infix('((p->(p->p))->(p->p))'), 0, [2, 0]))\n lines.append(DeductiveProof.Line(Formula.from_infix('(p->p)'), 0, [1, 3]))\n dec = DeductiveProof(statement, rules, lines)\n return dec", "def neutralise(self):\n m = self.m\n\n # Regenerates computed properties like implicit\n # valence and ring information.\n m.UpdatePropertyCache(strict=False)\n numHs = []; tvs = []\n for ai in m.GetAtoms():\n numHs.append( ai.GetNumExplicitHs() + ai.GetNumImplicitHs() )\n tvs.append( ai.GetTotalValence() )\n\n self.get_charged_pairs()\n\n for i in range(self.na):\n ai = m.GetAtomWithIdx(i)\n ci = self.charges[i]\n if ci != 0:\n if i not in self.cpairs.ravel():\n msg = ' zi = %d, tvi = %d, ci = %d, neib = %d'%(self.zs[i], tvs[i], ci, cnsDic[zs[i]])\n assert tvs[i] - ci == cnsDic[zs[i]], msg\n if numHs[i] == 0 and ci > 0:\n # in the case of >[N+]<, i.e., N with CoordNum = 4\n # we don't have to do anything\n continue\n ai.SetFormalCharge( 0 )\n ai.SetNoImplicit(True)\n ai.SetNumExplicitHs( numHs[i]-ci )\n print('i, zi, ci, nH = ', self.zs[i], ci, numHs[i])\n self.m = m", "def generate_association_rules(frequent_patterns, min_confidance):\n frequent_patterns_dict = create_dictionary(frequent_patterns)\n association_rules_list = []\n for pattern in frequent_patterns:\n if len(pattern[0]) == 1:\n pass\n else:\n for i in range(1, len(pattern[0])):\n pattern[0].sort()\n sub_patterns = itertools.combinations(pattern[0], i)\n for each_sub_pattern in sub_patterns:\n a = list(each_sub_pattern)\n a.sort()\n b = set(pattern[0]) - set(each_sub_pattern)\n b = list(b)\n b.sort()\n confidance = (frequent_patterns_dict[tuple(pattern[0])]/frequent_patterns_dict[tuple(a)])*100\n if confidance >= min_confidance:\n if (a, b) not in association_rules_list:\n association_rules_list.append((a, b))\n return association_rules_list", "def filter_depend( self, atoms ):\n\n def dep_string_reduce(dep_string,enabled_useflags):\n dest = []\n tokens = iter(dep_string.split())\n useflags = enabled_useflags.split()\n\n for token in tokens:\n if token[-1] == \"?\":\n if token.startswith(\"!\"):\n skip = token[1:-1] in useflags\n else:\n skip = token[:-1] not in useflags\n if skip:\n level = 0\n while 1:\n token = next(tokens)\n if token == \"(\":\n level+=1\n if token == \")\":\n level-=1\n if level < 1:\n break\n continue\n elif token == \"(\" or token == \")\":\n continue\n else:\n dest.append(token)\n\n return \" \".join(dest)\n\n # gjl does not use use flags\n try:\n use = os.environ[\"USE\"]\n atoms = dep_string_reduce(atoms, use)\n except KeyError:\n pass\n return atoms", "def pred(self):\n return [ self.simple_reflection(i) for i in self.descents() ]", "def pl_resolve(ci, cj):\n clauses = []\n for di in disjuncts(ci):\n for dj in disjuncts(cj):\n if di == ~dj or ~di == dj:\n dnew = unique(removeall(di, disjuncts(ci)) +\n removeall(dj, disjuncts(cj)))\n clauses.append(associate('|', dnew))\n return clauses", "def test_enumerating_no_protomers(self):\n\n mol = Molecule.from_smiles(\"CC\")\n\n assert mol.enumerate_protomers() == []" ]
[ "0.5383962", "0.5216982", "0.5187525", "0.5153276", "0.51042193", "0.50277936", "0.4886437", "0.4801194", "0.47278795", "0.47071022", "0.468511", "0.46064875", "0.45927116", "0.45851114", "0.4572136", "0.4562979", "0.4547506", "0.45278445", "0.45021546", "0.44991416", "0.44809648", "0.44426805", "0.4430734", "0.442933", "0.44267243", "0.44057262", "0.43447128", "0.43345624", "0.4334463", "0.43225077" ]
0.7077263
0
Removes megahit intermediates in place. Renames contigs to contig.fa
def megahit(path_in): shutil.rmtree("{0}/{1}".format(path_in, "intermediate_contigs/")) # os.rename("{0}/final.contigs.fa".format(path_in), "{0}/contig.fa".format(path_in)) os.listdir(path_in) return
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def cleanup_precluster_intermediate_files(batch_index):\n files = [\"seed{0}.S.fasta\".format(batch_index),\n \"seed{0}.orphans.fasta\".format(batch_index),\n \"batch{0}.fasta\".format(batch_index),\n \"batch{0}.remains.fasta\".format(batch_index),\n \"batch{0}.remains2.fasta\".format(batch_index)]\n\n files += glob.glob(\"batch{0}*.minimap\".format(batch_index))\n for file in files:\n try:\n os.remove(file)\n except:\n print >> sys.stderr, \"Failure to remove {0}. Ignore.\".format(file)", "def idba(path_in):\n files_to_keep = [\"contig.fa\", \"scaffold.fa\", \"log\"]\n for file in os.scandir(path_in):\n if file.name not in files_to_keep:\n os.remove(file.path)\n return", "def clean():\n try:\n os.unlink(options.coords + 'mirza_mrna_input' + '.fa')\n os.unlink(options.coords + 'mirza_mirna_input' + '.fa')\n os.unlink(options.coords + 'mirza_mirna_expressions' + '.fa')\n except:\n pass", "def clean(self):\r\n\r\n for _, data in self.composition.items():\r\n index_file = Path(data['file'] + '.fxi')\r\n if index_file.exists():\r\n index_file.unlink()", "def remove_indiv_files(path):\n if isinstance(path, FSMap):\n path.fs.delete(path.root, recursive=True)\n else:\n fname, ext = os.path.splitext(path)\n if ext == '.zarr':\n shutil.rmtree(path)\n else:\n os.remove(path)", "def cleanup_callback(self):\n\n # Remove from include\n ghtin = self.idf.output_directory / \"GHTIn.idf\"\n if ghtin.exists():\n try:\n self.idf.include.remove(ghtin)\n ghtin.remove()\n except ValueError:\n log(\"nothing to remove\", lg.DEBUG)", "def unprepare_fasta_after_blastclust(in_fasta):\n tmp_file_name = in_fasta + TMP\n\n with open(tmp_file_name, 'w') as tmp_file:\n for seq_record in SeqIO.parse(in_fasta, \"fasta\"):\n tmp_file.write('>' + seq_record.id.split('_')[0] + '\\n' + str(seq_record.seq) + '\\n')\n\n os.remove(in_fasta)\n os.rename(tmp_file_name, in_fasta)", "def test_cleanup():\n os.remove(test_file[:-4] + \"_no_grave.h5m\")", "def space_cleaning(file=\"\"):\n intermediate = str(file) + str(\"_intermediate\")\n output_file = str(file)\n\n os.rename(intermediate, output_file)", "def delete_b_files(intermediate_files: List[File]) -> None:\n for f in intermediate_files:\n f.remove()", "def restart():\n for pig in pigs.copy():\n space.remove(pig.shape, pig.shape.body)\n pigs.remove(pig)\n for bird in birds.copy():\n space.remove(bird.shape, bird.shape.body)\n birds.remove(bird)\n for column in columns.copy():\n space.remove(column.shape, column.shape.body)\n columns.remove(column)\n for beam in beams.copy():\n space.remove(beam.shape, beam.shape.body)\n beams.remove(beam)", "def remove_matching_reads(filename, cont_file):\n if not os.path.exists(cont_file + '.bwt'):\n cml = shlex.split('bwa index %s' % cont_file)\n subprocess.call(cml)\n cml = 'bwa mem -t 2 %s %s 2> /dev/null | samtools view -f 4 -h - | samtools bam2fq - ' % (cont_file, filename)\n cml += '| seqtk seq -A - > clean_reads.fasta'\n\n subprocess.call(cml, shell=True)\n return 'clean_reads.fasta'", "def cleanup_intermediate_files(self):\n self.cmd(\"rm -f {local_temp_dir}/*rg_dict* \\\n {local_temp_dir}/*aln* \\\n {local_temp_dir}/snappy*\".\n format(\n local_temp_dir=self.local_temp_dir\n ),\n shell=True)", "def clean_chunk_files(dirpath):\n workdir = os.getcwd()\n os.chdir(dirpath)\n for filename in glob.glob(\"[0-9]*_[0-9]*_[0-9]*.hdf5\"):\n os.remove(filename)\n os.chdir(workdir)", "def clean():\n for f in [f for f in os.listdir() if f.endswith(\".part\")]:\n os.remove(f)", "def removeIncompatibleHabTags(self):\n habList = self.getHablistSettings()\n for imf in self.imageDirectoryObj.images:\n if imf.xmp_habitat not in habList:\n imf.remove_habitattagging()", "def deleteIntermediateFiles(self):\n uniq_files = set(self.files_to_delete)\n print (\"Deleting %d intermediate files\" % len(uniq_files))\n for fn in uniq_files:\n # don't delete log files\n if not fn.endswith(\".log\"):\n os.remove(fn)", "def removeMeanFile(indexName,DATA_path):\n print 'Finish \\n Remove useless files'\n tileList = glob.glob(DATA_path +'/' + indexName+'/*')\n for tile in tileList:\n meanList = glob.glob(tile + '/*_MEAN.tif')\n for file in meanList:\n os.remove(file)\n return 0", "def clean_face_rig():\r\n #TODO: Eventually create a non-linear way to re-add the nodes for a pose if you want to edit it later.\r\n if DRYRUN:\r\n print('clean face rig function - DRY RUN ONLY')\r\n return False\r\n\r\n def analyze_face():\r\n print('# All Nodes: {}'.format(len(pm.ls('*'))))\r\n print('# MLT Nodes: {}'.format(len(pm.ls(type='multiplyDivide'))))\r\n print('# MAP Nodes: {}'.format(len(pm.ls(type='remapValue'))))\r\n print('# JNT Nodes: {}'.format(len(pm.ls(type='joint'))))\r\n print('# TRS Nodes: {}'.format(len(pm.ls(type='transform'))))\r\n print('# ADD Nodes: {}'.format(len(pm.ls(type='plusMinusAverage'))))\r\n\r\n if DEBUG:\r\n analyze_face()\r\n\r\n for oPos in pm.ls('*_POSE', type='transform'):\r\n poseMLT = set( oPos.outputs(type='multiplyDivide') )\r\n for each in poseMLT:\r\n # track back to the MLT input to separate out each pose translate, rotate and scale.\r\n # sum the abs() values to see if the pose delta is empty.\r\n poseDelta = sum([abs(x.get()) for x in each.inputs(type='transform', plugs=True)])\r\n if poseDelta < 0.001:\r\n pm.delete(each)\r\n\r\n if DEBUG:\r\n analyze_face()\r\n\r\n allZones = pm.ls('*_zone', type='objectSet')\r\n pm.delete(allZones)\r\n print('The rig has been cleaned. Unused MLT and MAP nodes have been removed. Zone sets have been deleted.')\r\n return True", "def CleanEpi(self):\n for entry in self.info.keys():\n info = self.info[entry]\n if info['psdname'] == 'epi':\n for tag in ('imgfile', 'imgfile_m', 'imgfile_mf', 'imgfile_t'):\n if info.has_key(tag) and info[tag] is not None and \\\n os.path.exists(info[tag]):\n print 'Deleting %s*' % (info[tag], info['suffix'])\n cmd = '/bin/rm %s%s*' % (info[tag], info['suffix'])\n self.ExecCmd(cmd)\n if '.BRIK' in info['suffix']:\n cmd = '/bin/rm %s%s*' % (info[tag], \\\n info['suffix'].replace('.BRIK','.HEAD'))\n self.ExecCmd(cmd)", "def remove_intermediate_files(self):\r\n\r\n # tmp files are written in the current dir,\r\n # app controller always jumps into dir specified via exec_dir\r\n # Note: blast intermediates are not removed\r\n exec_dir = str(self.Parameters['--exec_dir'].Value)\r\n inp_file_name = str(self.Parameters['--query_NAST'].Value)\r\n\r\n exec_dir = exec_dir.rstrip('\"')\r\n exec_dir = exec_dir.lstrip('\"')\r\n\r\n inp_file_name = inp_file_name.rstrip('\"')\r\n inp_file_name = inp_file_name.lstrip('\"')\r\n\r\n tmp_suffixes = [\".CPS\", \".CPS.CPC\", \".CPS_RENAST\", \".CPS_RENAST.cidx\",\r\n \".CPS.CPC.wTaxons\", \".cidx\"]\r\n cs_tmp_files = [\r\n exec_dir +\r\n '/' +\r\n inp_file_name +\r\n x for x in tmp_suffixes]\r\n remove_files(cs_tmp_files, error_on_missing=False)\r\n\r\n db_param = self.Parameters['--db_NAST']\r\n if db_param.isOn():\r\n nast_db_name = str(db_param.Value)\r\n nast_db_name = nast_db_name.rstrip('\"')\r\n nast_db_name = nast_db_name.lstrip('\"')\r\n\r\n # Better do not remove this file since other ChimeraSlayer\r\n # instances running on the same ref set might use this file\r\n # Should be rather deleted in the calling function\r\n# remove_files([nast_db_name + \".cidx\"],\r\n# error_on_missing=False)\r\n\r\n fasta_param = self.Parameters['--db_FASTA']\r\n if fasta_param.isOn():\r\n fasta_name = str(fasta_param.Value)\r\n fasta_name = fasta_name.rstrip('\"')\r\n fasta_name = fasta_name.lstrip('\"')\r\n\r\n blast_db_files = [\r\n fasta_name +\r\n x for x in [\r\n \".nsq\",\r\n \".nin\",\r\n \".nhr\",\r\n \".cidx\"]]\r\n remove_files(blast_db_files, error_on_missing=False)", "def cleanup() -> None:\n\n for fname in glob(os.path.join(tdir, 'alexandria.*')):\n if os.path.splitext(fname)[1] not in {'.c', '.h'}:\n os.unlink(fname)", "def _cleanup(self, fnum):\n while os.path.exists('%s.%s' % (self.name, fnum)):\n try:\n fname = '%s.%s' % (self.name, fnum)\n os.unlink(fname)\n # self.log.debug(\"Cleaned up file: %s\", fname)\n except:\n pass\n fnum -= 1", "def _remove_old_nastran_geometry(self, bdf_filename):\n #return self._remove_old_geometry(bdf_filename)\n\n # skip_reading = self.removeOldGeometry(bdf_filename)\n skip_reading = False\n if bdf_filename is None or bdf_filename == '':\n #self.grid = vtk.vtkUnstructuredGrid()\n #self.scalar_bar_actor.VisibilityOff()\n skip_reading = True\n return skip_reading\n else:\n self.gui.turn_text_off()\n self.gui.grid.Reset()\n\n #self.gui.eid_map = {}\n #self.gui.nid_map = {}\n\n self.gui.result_cases = {}\n self.gui.ncases = 0\n\n # TODO: is this doing anything?\n for name in ('case_keys', 'icase', 'isubcase_name_map'):\n if hasattr(self, name):\n del name\n return skip_reading", "def censor_contig(contig_end, u_contigs, o_dict):\n for c_e in [contig_end, other_end(contig_end)]:\n if c_e in u_contigs:\n u_contigs.remove(c_e)\n if c_e in o_dict:\n o_dic = o_dict[c_e]\n if o_dic != {}:\n overlapped_contig = list(o_dic.keys())[0]\n if overlapped_contig in o_dict: del o_dict[overlapped_contig][c_e]\n del o_dict[c_e]\n return", "def preprocess_nico(path: Path) -> None:\n for superclass in (\"animals\", \"vehicles\"):\n superclass_dir = path / superclass\n for class_dir in superclass_dir.glob(\"*\"):\n for context_dir in class_dir.glob(\"*\"):\n images_paths: list[Path] = []\n for ext in (\"jpg\", \"jpeg\", \"png\", \"gif\"):\n images_paths.extend(context_dir.glob(f\"**/*.{ext}\"))\n for counter, image_path in enumerate(images_paths):\n try:\n image = Image.open(image_path)\n if image.format == \"GIF\":\n image = image.convert(\"RGBA\")\n # Convert from gif to jpeg by extracting the first frame\n new_image = _gif_to_jpeg(image)\n new_image_path = image_path.with_suffix(\".jpg\")\n # Delete the original gif\n image_path.unlink()\n new_image.save(new_image_path, \"JPEG\")\n assert new_image_path.exists()\n image_path = new_image_path\n\n concept = image_path.parent.parent.stem\n context = image_path.parent.stem\n new_name = (\n image_path.parent\n / f\"{concept}_{context}_{counter:04}{image_path.suffix}\".replace(\n \" \", \"_\"\n )\n )\n image_path.rename(new_name)\n # Image is corrupted - delete it\n except UnidentifiedImageError:\n image_path.unlink()", "def handleCleanMetadataKeep(self):\n logging.debug(\"Removing all metadata found...\")\n filePath = self.filesList.selectedItems()[0].text(2)\n self.filesList.removeAllMeta(filePath)", "def removez_all(self,name):\n\t\tnew_name = string.replace(name,' ', '.')\n\t\tnew_name = self.remove_uploader(new_name)\n\t\tnew_name = string.replace(new_name,'..', '.')\n\t\t\n\t\t#new_name = string.replace(name,'\\&.', '.') BUG\n\t\t\n\t\tnew_name = string.replace(new_name,'-', '.')\n\t\tnew_name = string.replace(new_name,'_', '.')\t\t\n\t\tnew_name = string.replace(new_name,'(', '')\n\t\tnew_name = string.replace(new_name,')', '')\n\t\tnew_name = string.replace(new_name,'..', '.')\n\t\t\t\t\t\n\t\tnew_name = string.replace(new_name,'X264', 'x264')\n\t\tnew_name = string.replace(new_name,'XVID', 'XviD')\n\t\tnew_name = string.replace(new_name,'TRUEHD', 'TrueHD')\n\t\t\t\t\t\n\t\tnew_name = string.replace(new_name,'multi', 'MULTi')\n\t\tnew_name = string.replace(new_name,'Multi', 'MULTi')\n\t\tnew_name = string.replace(new_name,'MULTI', 'MULTi')\n\t\tnew_name = string.replace(new_name,'MULTiF', 'MULTi')\n\t\tnew_name = string.replace(new_name,'VO.VF','MULTi')\n\t\tnew_name = string.replace(new_name,'VF.VOSTFR','MULTi')\n\t\tnew_name = string.replace(new_name,'VF.VO+ST','MULTi')\n\t\t\n\t\t\n\t\tnew_name = string.replace(new_name,'TRUE.HD', 'TRUEHD')\n\t\tnew_name = string.replace(new_name,'blueray', 'BluRay')\n\t\tnew_name = string.replace(new_name,'bluray', 'BluRay')\n\t\tnew_name = string.replace(new_name,'Bluray', 'BluRay')\n\t\tnew_name = string.replace(new_name,'BluraY', 'BluRay')\n\t\tnew_name = string.replace(new_name,'Blu-Ray', 'BluRay')\n\t\tnew_name = string.replace(new_name,'Blu.Ray', 'BluRay')\n\t\tnew_name = string.replace(new_name,'Blu.ray', 'BluRay')\n\t\tnew_name = string.replace(new_name,'(Bluray-rip)', 'BluRay')\n\t\tnew_name = string.replace(new_name,'Blu-Ray Rip', 'BluRay')\n\t\tnew_name = string.replace(new_name,'BDRip', 'BluRay')\n\t\tnew_name = string.replace(new_name,'BDRIP', 'BluRay')\n\t\tnew_name = string.replace(new_name,'BDRiP', 'BluRay')\n\t\tnew_name = string.replace(new_name,'BRDRiP', 'BluRay')\n\t\tnew_name = string.replace(new_name,'BRDRip', 'BluRay')\n\t\tnew_name = string.replace(new_name,'BRRip', 'BluRay')\n\t\tnew_name = string.replace(new_name,'BD', 'BluRay')\n\t\tnew_name = string.replace(new_name,'HD-DVDRiP', 'HDRiP')\n\t\tnew_name = string.replace(new_name,'HD.DVDRiP', 'HDRiP')\n\t\tnew_name = string.replace(new_name,'HDVD', 'HDRiP')\n\t\tnew_name = string.replace(new_name,'HDDVD', 'HDRiP')\t\t\t\t\n\t\tnew_name = string.replace(new_name,'DVDrip','DVDRiP')\n\t\tnew_name = string.replace(new_name,'DVDriP','DVDRiP')\n\t\tnew_name = string.replace(new_name,'dvdrip','DVDRiP')\n\t\tnew_name = string.replace(new_name,'DVD5','DVDRiP')\n\t\tnew_name = string.replace(new_name,'.DVD.','DVDRiP')\n\t\t\n\t\t\n\t\tnew_name = string.replace(new_name,'.DD.5.1','DD5.1')\n\t\tnew_name = string.replace(new_name,'6.Canaux','5.1')\t\n\t\tnew_name = string.replace(new_name,'dts', 'DTS')\n\t\tnew_name = string.replace(new_name,'Dts', 'DTS')\n\t\tnew_name = string.replace(new_name,'DtS', 'DTS')\n\t\tnew_name = string.replace(new_name,'DTS.DTS','DTS')\n\t\tnew_name = string.replace(new_name,'DTSHD.','DTS.')\n\t\tnew_name = string.replace(new_name,'.HD.','.')\n\t\t\n\t\tnew_name = string.replace(new_name,'hdma', 'HDMA')\n\t\tnew_name = string.replace(new_name,'HD MA', 'HDMA')\n\t\tnew_name = string.replace(new_name,'HD.MA', 'HDMA')\n\t\tnew_name = string.replace(new_name,'.MA.', '.HDMA.')\n\t\tnew_name = string.replace(new_name,'ac3','AC3')\n\t\tnew_name = string.replace(new_name,'Ac3','AC3')\n\t\tnew_name = string.replace(new_name,'AC.3.','AC3.')\n\t\t\n\t\tnew_name = string.replace(new_name,'HD.HRA','HRA') #High resolution audio\n\t\t#new_name = string.replace(new_name,'.HRA.', '.')\n\t\t\n\t\tnew_name = string.replace(new_name,'.fr.', '.FRENCH.')\n\t\tnew_name = string.replace(new_name,'.Fr.', '.FRENCH.')\n\t\tnew_name = string.replace(new_name,'.FR.', '.FRENCH.')\n\t\tnew_name = string.replace(new_name,'french', 'FRENCH')\n\t\tnew_name = string.replace(new_name,'French', 'FRENCH')\n\t\tnew_name = string.replace(new_name,'VF.', 'FRENCH.')\n\t\tnew_name = string.replace(new_name,'VFF', 'TRUEFRENCH')\t\t\n\t\tnew_name = string.replace(new_name,'truefrench', 'TRUEFRENCH')\n\t\tnew_name = string.replace(new_name,'Truefrench', 'TRUEFRENCH')\n\t\tnew_name = string.replace(new_name,'TrueFrench', 'TRUEFRENCH')\n\t\tnew_name = string.replace(new_name,'TrueFRENCH', 'TRUEFRENCH')\n\t\t\n\t\tnew_name = string.replace(new_name,'VF', 'FRENCH')\n\t\tnew_name = string.replace(new_name,'.PAL.', '.')\n\t\tnew_name = string.replace(new_name,'HD1080', '1080p')\n\t\tnew_name = string.replace(new_name,'1080P', '1080p')\n\t\tnew_name = string.replace(new_name,'720P', '720p')\n\t\t\n\t\tnew_name = string.replace(new_name,'VERSION.LONGUE','EXTENDED')\n\t\tnew_name = string.replace(new_name,'Version.Longue','EXTENDED')\n\t\tnew_name = string.replace(new_name,'Extended.Cut', 'EXTENDED')\n\t\tnew_name = string.replace(new_name,'Extended.Edition', 'EXTENDED')\n\t\tnew_name = string.replace(new_name,'Director\\'s.Cut', 'DIRECTOR.CUT')\n\t\tnew_name = string.replace(new_name,'Directors.Cut', 'DIRECTOR.CUT')\n\t\tnew_name = string.replace(new_name,'DC', 'DIRECTOR.CUT')\n\t\tnew_name = string.replace(new_name,'D/C', 'DIRECTOR.CUT')\t\t\n\t\tnew_name = string.replace(new_name,'Remastered','REMASTERED')\n\t\tnew_name = string.replace(new_name,'Theatrical.Cut','THEATRICAL.CUT')\n\t\tnew_name = string.replace(new_name,'Theatricul.Cut','THEATRICAL.CUT')\n\t\tnew_name = string.replace(new_name,'Sunshine.Edition','SUNSHINE.EDITION')\n\t\tnew_name = string.replace(new_name,'Revisited.The.Final.Cut','REVISITED.FiNAL.CUT')\t\t\n\t\tnew_name = string.replace(new_name,'LIMITED','LiMiTED')\n\t\t\n\t\tnew_name = string.replace(new_name,'iNT','iNTERNAL')\n\t\tnew_name = string.replace(new_name,'JKF.3D', 'JFK3D')\n\t\tnew_name = string.replace(new_name,'GAIA', 'GAÏA')\n\t\tnew_name = string.replace(new_name,'Gaïa', 'GAÏA')\n\t\tnew_name = string.replace(new_name,'GAÏA', 'GAÏA')\n\t\tnew_name = string.replace(new_name,'GAϏA', 'GAÏA')\n\t\tnew_name = string.replace(new_name,'GAiA', 'GAÏA')\n\t\t\n\t\tnew_name = string.replace(new_name,'dxva', 'DXVA') #<harwdare decode\n\t\tnew_name = string.replace(new_name,'rip','')\n\t\tnew_name = string.replace(new_name,'Rip','')\n\t\tnew_name = string.replace(new_name,'Ripp','')\n\t\tnew_name = string.replace(new_name,'.mkv.mkv', '.mkv')\n\t\t#new_name = string.replace(new_name,'..', '.')\t#USELESS\n\t\treturn self.refactor_line(new_name)", "def clean_fasta(filename):\n\tfoldedfile = filename+\".folded.fa\"\n\tfoldcmd = \"fold \"+filename+\" > \"+foldedfile\n\tsubprocess.call(foldcmd, shell=True)\n\tcleanedfile = filename+\".cleaned.fa\"\n\tsedcmd = \"sed -e 's/\\r//g' \"+foldedfile+\" > \"+cleanedfile\n\tsubprocess.call(sedcmd, shell=True)\n\treturn cleanedfile", "def cleanup_sub(vg_dir):\n\tnew_dir = vg_dir+'_clean'\n\tos.mkdir(new_dir)\n\tphot_vg_files = filter(lambda x: '.txt' in x, os.listdir(vg_dir))\n\t# phot_vg_phottot_files = filter(lambda x: 'phottot' in x, phot_vg_files)\n\tfor f in phot_vg_files:\n\t\tdf = pd.read_table(vg_dir+'/'+f,\n\t\t\tnames = ['id','ra','dec','flux','unc','x','y','flux_uncor'],\n\t\t\tdelim_whitespace=True)\n\t\tstarnums, dithers = zip(*[i.split('_')[1:4:2] for i in df.id])\n\t\tdf['id'] = [int(i) for i in starnums]\n\t\tdf['dither'] = [int(i) for i in dithers]\n\t\tsorted_df = df.sort(['id','dither'])\n\t\t# new: remove the aperture correction applied by varoujan to the uncertainties\n\t\tch = f.split('-')[2]\n\t\tif ch == 'ch1':\n\t\t\tsorted_df['unc'] /= 1.205\n\t\telif ch == 'ch2':\n\t\t\tsorted_df['unc'] /= 1.221\n\t\telse:\n\t\t\traise(TypeError(\"unexpected channel\"))\n\t\tfnew = '_'.join(f.split('-')[::2])+'_raw.csv'\n\t\tsorted_df.to_csv(new_dir+'/'+fnew, index=False, float_format='%.8f')\n\t\t# also calculate mean RA/Dec, flux, and quadrature sum uncertainty\n\t\tgrouped = sorted_df.groupby('id')\n\t\tagg = grouped[['ra','dec','flux']].aggregate(np.median)\n\t\tquadsum = grouped['unc'].aggregate(lambda x: np.sqrt(np.sum(x**2)))\n\t\tagg['unc'] = quadsum\n\t\tfnew = '_'.join(f.split('-')[::2])+'_agg.csv'\n\t\tagg.to_csv(new_dir+'/'+fnew, index=True, float_format='%.8f')" ]
[ "0.6157111", "0.6016867", "0.5937992", "0.5761502", "0.5596034", "0.5555919", "0.5539349", "0.550775", "0.54886466", "0.54612345", "0.54590577", "0.54370695", "0.5422165", "0.5405502", "0.53871214", "0.53869516", "0.5386505", "0.53295577", "0.5305551", "0.52941173", "0.5290946", "0.52829796", "0.5274832", "0.52658665", "0.52323455", "0.52149063", "0.51983035", "0.51966894", "0.5192643", "0.5179099" ]
0.81108123
0
1. Pop last element in the heap 2. If it isn't the only element, then the root is the max. Insert the last element to the root and sift up the max element 3. If it is the last element so just return it
def heap_pop_max(heap): last = heap.pop() if heap: return_item = heap[0] heap[0] = last heapq._siftup_max(heap, 0) else: return_item = last return return_item
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def delete_top_from_max_heap(x):\n last = x[-1]\n x = x.at[0].set(last)[:-1]\n return heapify_subtree(x, 0)", "def max_heapify(array, root):\n print(array)\n while True:\n left_child = 2*i\n right_child = 2*i + 1\n print(left_child, right_child)\n n = len(array)\n\n if left_child <= n and array[left_child] > array[i]:\n largest = left_child\n else:\n largest = i\n print(right_child)\n print('largest index',largest)\n if right_child <= n and array[right_child] > array[largest]:\n largest = right_child\n print(array[largest])\n\n if largest != i:\n a, b = array.index(array[i]), array.index(array[largest])\n array[b], array[a] = array[a], array[b]\n print(largest)\n return max_heapify(array, largest)", "def delete_max(self):\n retval = self.heap_list[1]\n self.heap_list[1] = self.heap_list[self.size]\n self.size = self.size - 1\n pop_val = self.heap_list.pop()\n self.percolate_down(1)\n return retval", "def max_heapify(lst, n, root):\n\n largest = root\n l = 2 * root + 1\n r = 2 * root + 2\n\n if l < n and lst[l] > lst[largest]:\n largest = l\n if r < n and lst[r] > lst[largest]:\n largest = r\n if largest != root:\n lst[root], lst[largest] = lst[largest], lst[root]\n max_heapify(lst, n, largest)", "def heap_extract_max(self, A):\n max = A[0]\n A[0] = A[len(A)-1]\n del A[-1] #use del so it stays out of the function\n self.max_heapify(A, 0)\n return max", "def heap_push_max(heap, item):\n heap.append(item)\n heapq._siftdown_max(heap, 0, len(heap)-1)", "def max_heapify_unrecursive(heap, i):\n while True:\n left_child = left(i)\n right_child = right(i)\n largest = i\n if left_child < len(heap) and heap[left_child] > heap[i]:\n largest = left_child\n if right_child < len(heap) and heap[right_child] > heap[largest]:\n largest = right_child\n if largest == i:\n return\n swap(heap, i, largest)\n i = largest", "def max_heapify(heap, i):\n left_child = left(i)\n right_child = right(i)\n if left_child < len(heap) and heap[left_child] > heap[i]:\n largest = left_child\n else:\n largest = i\n if right_child < len(heap) and heap[right_child] > heap[largest]:\n largest = right_child\n if largest != i:\n swap(heap, i, largest)\n max_heapify(heap, largest)", "def heap_extract_max(self, A):\n maxi = A[0]\n A[0] = A[-1]\n del A[-1] #use del so it stays out of the function\n self.max_heapify(A, 0)\n return maxi", "def build_max_heap(A):\r\n i = int((len(A)-2)//2)\r\n while i >= 0:\r\n max_heapify(A, i)\r\n i -= 1\r\n return A", "def _find_max(self, root):\n while root.right:\n root = root.right\n return root", "def build_max_heap(heap):\n\tfor j in range(heap.len//2, -1, -1):\n\t\tmax_heapify(heap, j)", "def build_max_heap(A):\n A.insert(0, len(A))\n for i in range(len(A)//2, 0, -1):\n max_heapify(A, i)", "def build_max_heap(a):\r\n for i in range(math.floor((len(a) - 1)/2), -1, -1):\r\n max_heapify(a, i)", "def find_min_in_max_heap(self):\n min_number = None\n last_parent = (self.size - 1) // 2\n first_leaf = last_parent + 1\n # Shortcut to find first_leaf:\n # (self.size - 1) //2 + 1 = (self.size + 1) // 2\n # But for simplicity, we will use variable first_leaf in steps\n for index in range(first_leaf, self.size):\n if min_number is None:\n min_number = self.heap[index]\n else:\n min_number = min(min_number, self.heap[index])\n\n return min_number", "def pop(self):\n if self.heap == [0]:\n raise EmptyHeapException('Heap is empty.')\n self.heap[1], self.heap[-1] = self.heap[-1], self.heap[1]\n minimum = self.heap[-1] # Store min val to return later\n self.heap = self.heap[:-1] # Remove final element\n self._percolate_down(1)\n return minimum", "def construct_max_heap(self, lst):\n self.heap_list = lst\n #start compare node\n node = (len(self.heap_list)-2)/2\n while node >= 0:\n self.sift_down(node, len(self.heap_list)-1)\n node -= 1", "def max_heapify(A, i):\n # ensure to create copy of the list\n l = 2 * i\n r = (2 * i) + 1\n if l <= len(A) and A[l] > A[i]:#base case\n print(A[i])\n largest = l\n else:\n largest = i\n if r<=len(A) and A[r] > A[largest]:\n largest = r\n if largest != i:\n A[i], A[largest] = A[largest], A[i]\n max_heapify(A, largest)\n return largest", "def pop(self):\n root = self.heap[1]\n del self.rank[root]\n x = self.heap.pop() # remove last leaf\n if self: # if heap is not empty\n self.heap[1] = x # move the last leaf\n self.rank[x] = 1 # to the root\n self.down(1) # maintain heap order\n return root", "def remove(self):\n max_item = self.heaplist[1]\n self.heaplist[1] = self.heaplist[self.currentsize]\n self.currentsize -= 1\n self.heaplist.pop()\n self.shift_item_down(1)\n return max_item", "def top(heap):\n return heap[_root()]", "def heappop(heap):\n lastelt = heap.pop() # raises appropriate IndexError if heap is empty\n if heap:\n returnitem = heap[0]\n heap[0] = lastelt\n Heap.siftup(heap, 0)\n return returnitem\n return lastelt", "def _get_new_max(self, insert=True):\n right = 1\n left = 1\n if self._root:\n if self._root._rkid:\n right = self._new_depth(self._root._rkid, 2)\n if self._root._lkid:\n left = self._new_depth(self._root._lkid, 2)\n self._rbal = right\n self._lbal = left\n if insert:\n if right > left:\n if right > self._max_depth:\n self._max_depth = right\n elif left > self._max_depth:\n self._max_depth = left\n else:\n if right > left:\n if right < self._max_depth:\n self._max_depth = right\n elif left < self._max_depth:\n self._max_depth = left", "def pop(self):\n # O(1)\n # Your code here\n item = self.stack.pop() # O(1)\n # check if we're removing the max\n #if item == max: #O(1)\n # if so, we need to update self. max\n #new_max = self.find_max() # O(n) # Don't need find anymore\n # self.max = new_max #O(1)\n # self.max = item\n #return self.stack.pop()\n self.max_stack.pop()\n return item", "def get_median(max_heap, min_heap):\n e = None\n if len(max_heap) or len(min_heap):\n if len(max_heap) >= len(min_heap):\n e = heap_pop_max(max_heap)\n heap_push_max(max_heap, e)\n\n else:\n e = heapq.heappop(min_heap)\n heapq.heappush(min_heap, e)\n return e", "def pop(self):\n try:\n top_node = self._heap[0]\n self._heap = [self._heap[-1]] + self._heap[1:-1]\n self.sort_down(0)\n return top_node\n except IndexError:\n raise IndexError('Cannot pop from an empty heap')", "def max_heapify(A, i):\n l = left(i)\n r = right(i)\n if l <= A[0] and A[l] > A[i]:\n largest = l\n else:\n largest = i\n if r <= A[0] and A[r] > A[largest]:\n largest = r\n if largest != i:\n A[i], A[largest] = A[largest], A[i]\n max_heapify(A, largest)", "def pop(self):\n item = self.stack.pop()\n\n if item == self.max[-1]: # pop if the same element\n self.max.pop()\n\n return item", "def pop(self):\n root = self.heap[1]\n del self.rank[root]\n x = self.heap.pop() # remove last leaf\n if self: # if heap is not empty\n self.heap[1] = x # put last leaf to root\n self.rank[x] = 1\n self.down(1) # maintain heap order\n return root", "def max_heapify(self, i):\n largest, left_index, right_index = i, 2*i+1, 2*i+2\n current_length = self.heap_size\n\n if (left_index < current_length) and (self.heap[left_index].priority_key > self.heap[largest].priority_key):\n largest = left_index\n\n if (right_index < current_length) and (self.heap[right_index].priority_key > self.heap[largest].priority_key):\n largest = right_index\n\n if largest != i:\n self.heap[largest], self.heap[i] = self.heap[i], self.heap[largest]\n self.max_heapify(largest)\n return self.heap" ]
[ "0.73450583", "0.72503364", "0.7051829", "0.69888616", "0.6927613", "0.69274133", "0.69107723", "0.6852765", "0.68425155", "0.6783528", "0.6773965", "0.66724384", "0.65653837", "0.65010023", "0.64840037", "0.64583856", "0.64395285", "0.6422981", "0.64171094", "0.64117384", "0.6409446", "0.64004433", "0.640022", "0.6376286", "0.6372338", "0.6369126", "0.63503224", "0.6333978", "0.6330086", "0.6317847" ]
0.76896995
0
Test The probas_to_classes class method
def test_RecurrentNeuralNetwork_probas_to_classes(): arr1 = np.asarray([0.1, 0.2, 0.7], dtype=np.float32) arr2 = np.asarray([0.1], dtype=np.float32) assert RecurrentNeuralNetwork.probas_to_classes(arr1) == 2 assert RecurrentNeuralNetwork.probas_to_classes(arr2) == 0
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __convert_prob_into_class(self, probs):\n probs = T.set_subtensor(probs[probs > 0.5], 1)\n return T.set_subtensor(probs[probs <= 0.5], 0)", "def classProbs(observation, tree, classes):\n res = classify(observation, tree) #res = results\n total = sum(res.values())\n probs = []\n for c in classes:\n if c in res.keys():\n probs.append(float(res[c])/total)\n else:\n probs.append(0)\n return probs", "def testModel( self, classTest, classPred):", "def convert_output_to_class(preds,mu_stds,use_thresh=True,scale=1.0):\n preds_prob = torch.sigmoid(preds) # convert logits to probability with sigmoid\n max_class = torch.argmax(preds_prob,dim=-1).numpy().tolist() # get class with the largest probability\n max_prob = torch.max(preds_prob,dim=-1).values.detach().numpy().tolist() # get the max value of probability\n pred_class = [] # predicted class\n for i in range(len(max_prob)): # loop each output of the model\n max_class_one = max_class[i] # get class with the largest probability\n threshold = max(0.5, 1. - scale * mu_stds[max_class_one][1]) if use_thresh is True else 0.5 # find threshold for the predicted class\n # print(threshold)\n if max_prob[i] >= threshold: # if the max value of probability greater than threshold\n pred_class.append(max_class[i]) # append the max class\n else:\n pred_class.append(-1) # append unseen class\n return pred_class", "def classify(priors, likelihoods, testData, classes):\r\n results = []\r\n for document in testData:\r\n bestClass = None\r\n bestProb = None\r\n currentProb = 0.0\r\n for cls in classes:\r\n prior = priors[cls]\r\n currentProb = log(prior)\r\n lhoods = likelihoods[cls]\r\n for (word, count) in document:\r\n if word in lhoods:\r\n currentProb += log(lhoods[word])\r\n else:\r\n currentProb += log(lhoods[None])\r\n if currentProb > bestProb or bestClass == None:\r\n bestProb = currentProb\r\n bestClass = cls\r\n results.append(bestClass)\r\n return results", "def determineClasses(self, particles):\n\t\tapDisplay.printMsg(\"sorting refineparticledata into classes\")\n\t\tt0 = time.time()\n\t\tclasses={}\n\t\tclass_stats={}\n\t\tquality=numpy.zeros(len(particles))\n\t\tfor partnum in range(len(particles)):\n\t\t\tquality[partnum] = particles[partnum]['quality_factor']\n\t\t\tkey = (\"%.3f_%.3f\"%(particles[partnum]['euler1'], particles[partnum]['euler2']))\n\t\t\tif key not in classes.keys():\n\t\t\t\tclasses[key]={}\n\t\t\t\tclasses[key]['particles']=[]\n\t\t\t\tclasses[key]['euler1'] = particles[partnum]['euler1']\n\t\t\t\tclasses[key]['euler2'] = particles[partnum]['euler2']\n\t\t\t\t#classes have no inplane rotation\n\t\t\t\tclasses[key]['euler3'] = 0.0 #particles[partnum]['euler3']\n\t\t\tclasses[key]['particles'].append(particles[partnum])\n\t\tclass_stats['meanquality']=quality.mean()\n\t\tclass_stats['stdquality']=quality.std()\n\t\tclass_stats['max']=quality.max()\n\t\tclass_stats['min']=quality.min()\n\t\tapDisplay.printMsg(\"sorted %d particles into %d classes\"%(len(particles), len(classes)))\n\t\t### print stats\n\t\tprint \"-- quality factor stats --\"\n\t\tprint (\"mean/std :: \"+str(round(class_stats['meanquality'],2))+\" +/- \"\n\t\t\t+str(round(class_stats['stdquality'],2)))\n\t\tprint (\"min/max :: \"+str(round(class_stats['min'],2))+\" <> \"\n\t\t\t+str(round(class_stats['max'],2)))\n\t\tapDisplay.printMsg(\"finished sorting in \"+apDisplay.timeString(time.time()-t0))\n\t\treturn classes, class_stats", "def verify_probability_shapes(probs):\n if probs.ndim == 2:\n num_classes = probs.shape[1]\n if num_classes == 1:\n probs = probs[:, 0]\n probs = binary_converter(probs)\n num_classes = 2\n elif probs.ndim == 1:\n # Cover binary case\n probs = binary_converter(probs)\n num_classes = 2\n else:\n raise ValueError('Probs must have 1 or 2 dimensions.')\n return probs, num_classes", "def get_pred_class_probs(self, pred_mu, pred_sigma):", "def preds_proba_to_preds_class(preds_proba,threshold):\n return [True if pred > threshold else False for pred in preds_proba]", "def test_get_classes(self):\n self.view.learning_model = TestSingleLabelClassifierModel()\n self.assertEqual(self.view.get_classes(), TestSingleLabelClassifierModel.classes)", "def _prob_to_class(self, forward):\n predict_pos = self.activation(forward)\n predict_neg = 1 - predict_pos\n\n return torch.stack((predict_neg, predict_pos)).argmax(0).float()", "def classify_proba(self, X):\n return self._expectation(X)", "def class_probability(self, x):\n # permutation before softmax b x a x c x spatial dims --> b x c x a x spatial dims\n # as expected by PyTorch Softmax the class axis = 1 \n return self._class_prob(x.permute([0, 2, 1, 3, 4]))", "def classify(self, toBeClassified, laPlace = 0):\n #counting P(B|A) probability\n probabilitiesDictionary = {}\n for label in self.trainingY:\n probability = 1.0\n #for every part of toBeClassified count the probability of occuring if the given test was correct choice and multiply all of them\n for case in toBeClassified:\n if case in self.instanceCounterDict[label].keys():\n probability *= self.instanceCounterDict[label][case] + laPlace / self.totalInstanceCounter[label] + laPlace\n else:\n probability *= laPlace / self.totalInstanceCounter[label] + laPlace\n #P(B|A)P(A)\n probabilitiesDictionary[probability * self.prioProbability[label]] = label\n #find the highest probability case and return it\n highestProbability = max(probabilitiesDictionary.keys())\n #return class name of highest probability class\n return probabilitiesDictionary[highestProbability]", "def __init__(self, classes, data_size):\r\n self.classes = classes\r\n self.data_size = data_size\r\n self.conditional_prob = {class_:{} for class_ in classes} # Conditional Probability Table for storing parameters useful to compute P(feat|class_)\r\n self.class_prob = {} # Stores the priors\r", "def return_class_probas(pnode, pY):\n\n nof_objects = pY.shape[0]\n nof_classes = pY.shape[1]\n class_probas = numpy.zeros(nof_classes)\n\n for i in range(nof_objects):\n class_probas += pnode[i] * pY[i, :]\n\n # class_probas = class_probas/numpy.sum(pnode)\n class_probas = class_probas / len(pnode)\n # class_probas = pY\n\n return class_probas", "def getMetricsClass(pred_bboxes, gt_bboxes, nclasses):\r\n aps = []\r\n iou = []\r\n for cls in range(nclasses):\r\n if bool(pred_bboxes):\r\n if len(pred_bboxes[0]) == 4: \r\n avg_precision_class, iou_class = getMetrics(pred_bboxes, gt_bboxes)\r\n if len(pred_bboxes[0]) == 5:\r\n avg_precision_class, iou_class = getMetrics(pred_bboxes, gt_bboxes, confidence = True)\r\n else:\r\n avg_precision_class = 0\r\n iou_class = 0\r\n\r\n aps.append(avg_precision_class)\r\n iou.append(iou_class)\r\n \r\n return np.mean(aps), np.mean(iou)", "def convert_to_class(sim, dat_out):\n return dat_out", "def _correct_class_confidence(self, X, y):\n probas = self.model.predict_proba(X)\n values = []\n for i, proba in enumerate(probas):\n proba_dict = {self.model.classes_[j]: v for j, v in enumerate(proba)}\n values.append(proba_dict[y[i]])\n return np.array(values)", "def _classifier(self, classes):\n # Initialize key variables\n pseudo = np.linalg.pinv(self.data)\n result = np.dot(pseudo, classes)\n return result", "def test_naive_bayes_soy(test_set, classes, class_probabilities, class_feature_probs):\n\n print('[ INFO ]: Testing soy data with Naive Bayes Classifier...')\n\n class_results = {}\n scores = {}\n\n for soy_class in classes:\n\n # Create new column for class predictions\n feature_set = test_set.drop(classes, axis=1)\n feature_set['pred_class'] = 0\n true_class = test_set[soy_class]\n\n for row in range(len(feature_set)):\n\n # Initialize probability sums for each class\n true_probs_sum = 1\n false_probs_sum = 1\n true_conditional_prob_sum = 1\n false_conditional_prob_sum = 1\n\n for col in feature_set.columns:\n\n if col != 'pred_class':\n\n # Calculate probabilities assuming the class is present or 1\n if feature_set[col].iloc[row] == 1:\n\n # Compute conditional feature probabilities based on\n # wether or not the feature is present (1 or 0)\n true_prob = class_feature_probs[soy_class][0].get(col)\n false_prob = 1 - class_feature_probs[soy_class][1].get(col)\n\n else:\n\n # Calculate probabilities assuming the class is not present or 0\n true_prob = 1 - class_feature_probs[soy_class][0].get(col)\n false_prob = class_feature_probs[soy_class][1].get(col)\n\n # Multiply all feature probabilities together for each record\n true_probs_sum = true_probs_sum * true_prob\n false_probs_sum = false_probs_sum * false_prob\n\n # Multiply class conditional probabilities by conditional feature probabilities\n true_conditional_prob_sum = class_probabilities[soy_class] * true_probs_sum\n false_conditional_prob_sum = (1 - class_probabilities[soy_class]) * false_probs_sum\n\n # Determine which probability is highest - highest one is selected as the prediction value\n if true_conditional_prob_sum > false_conditional_prob_sum:\n feature_set['pred_class'].iloc[row] = 1\n\n # Place the results into a data frame for comparison\n results = pd.concat([feature_set['pred_class'], true_class], axis=1)\n results.columns = ['pred_class', 'true_class']\n class_results[soy_class] = results\n\n # Calculate the number of TP, TN, FP, FN\n true_positives = len(results.loc[(results['true_class'] == 1) & (results['pred_class'] == 1)])\n true_negatives = len(results.loc[(results['true_class'] == 0) & (results['pred_class'] == 0)])\n false_positives = len(results.loc[(results['true_class'] == 0) & (results['pred_class'] == 1)])\n false_negatives = len(results.loc[(results['true_class'] == 1) & (results['pred_class'] == 0)])\n\n scores[soy_class] = {\n 'TP' : true_positives,\n 'TN' : true_negatives,\n 'FP' : false_positives,\n 'FN' : false_negatives\n }\n\n return class_results, scores", "def get_num_classes(self):", "def detect_class_onpic(boxes, allowed_classes):\n object_class = \"all\"\n highest_prob = 0\n for box in boxes:\n box_prob = float(box[1].strip('%')) / 100.0\n if box[0] in allowed_classes and box_prob > highest_prob:\n highest_prob = box_prob\n object_class = box[0]\n return object_class, highest_prob", "def GMMClassfierVal(GMMs,Xtest):\n prob = np.zeros((Xtest.shape[0], len(GMMs)))\n \n #pista explora los metodos de la libreria, que metodo retorna probabilidades?\n for k,v in GMMs.items():\n \n \n # la etiqueta la asignas seleccionando le maximo de probabilidad\n Yest= \n \n return Yest, prob", "def ap_per_class(tp, conf, pred_cls, target_cls):\n\n # lists/pytorch to numpy\n tp, conf, pred_cls, target_cls = np.array(tp), np.array(conf.cpu().data), np.array(pred_cls.cpu().data), np.array(target_cls)\n\n # Sort by objectness\n i = np.argsort(-conf)\n tp, conf, pred_cls = tp[i], conf[i], pred_cls[i]\n\n # Find unique classes\n unique_classes = np.unique(np.concatenate((pred_cls, target_cls), 0))\n\n # Create Precision-Recall curve and compute AP for each class\n ap = []\n for c in unique_classes:\n i = pred_cls == c\n n_gt = sum(target_cls == c) # Number of ground truth objects\n n_p = sum(i) # Number of predicted objects\n\n if (n_p == 0) and (n_gt == 0):\n continue\n elif (np == 0) and (n_gt > 0):\n ap.append(0)\n elif (n_p > 0) and (n_gt == 0):\n ap.append(0)\n else:\n # Accumulate FPs and TPs\n fpa = np.cumsum(1 - tp[i])\n tpa = np.cumsum(tp[i])\n\n # Recall\n recall = tpa / (n_gt + 1e-16)\n\n # Precision\n precision = tpa / (tpa + fpa)\n\n # AP from recall-precision curve\n ap.append(compute_ap(recall, precision))\n\n return np.array(ap)", "def adjusted_classes(pred_prob, threshold):\n return [1 if y >= threshold else 0 for y in pred_prob]", "def test_typeclass_passtype_priority(order):\n data = np.arange(20, dtype='d')\n if order==0:\n objects = [\n C.Points(data[:1]), # 0\n C.Histogram(data[:2]), # 1\n C.Points(data[:5]), # 2\n C.Histogram(data[:6]), # 3\n C.Points(data[:10]), # 4\n C.Histogram(data[:11]), # 5\n ]\n elif order==1:\n objects = [\n C.Points(data[:1]), # 0\n C.Points(data[:5]), # 1\n C.Histogram(data[:2]), # 2\n C.Histogram(data[:6]), # 3\n C.Points(data[:10]), # 4\n C.Histogram(data[:11]), # 5\n ]\n else:\n assert False\n outputs = [p.single() for p in objects]\n\n obj = C.DummyType()\n for i in range(4):\n obj.add_output(f'{i}')\n for i, out in enumerate(outputs):\n obj.add_input(out, f'input_{i}')\n\n dt1 = R.TypeClasses.PassTypePriorityT(context.current_precision())((0,-1), (0,0))\n dt2 = R.TypeClasses.PassTypePriorityT(context.current_precision())((0,-1), (1,1), True, False)\n dt3 = R.TypeClasses.PassTypePriorityT(context.current_precision())((0,-1), (2,2), False, True)\n dt4 = R.TypeClasses.PassTypePriorityT(context.current_precision())((0,-1), (3,3), False, False)\n dts=[dt1, dt2, dt3, dt4]\n for dt in dts:\n R.SetOwnership(dt, False)\n dt.dump(); print()\n obj.add_typeclass(dt)\n res = obj.process_types();\n assert res\n\n obj.print()\n dt = outputs[0].datatype()\n\n dtypes = [out.datatype() for out in outputs]\n doutputs = obj.transformations.back().outputs\n if order==0:\n assert doutputs[0].datatype()==dtypes[3]\n assert doutputs[1].datatype()==dtypes[1]\n assert doutputs[2].datatype()==dtypes[2]\n assert doutputs[3].datatype()==dtypes[0]\n elif order==1:\n assert doutputs[0].datatype()==dtypes[3]\n assert doutputs[1].datatype()==dtypes[2]\n assert doutputs[2].datatype()==dtypes[1]\n assert doutputs[3].datatype()==dtypes[0]", "def _class_distribution(y):\n unique, counts = np.unique(y, return_counts = True)\n\n percentages = counts / np.sum(counts)\n\n return unique, counts, percentages", "def _classify_from_probs(predicts_proba):\n def find_majority(dict_probs):\n \"\"\"Find the majority class\"\"\"\n # if there is no majority class, pick the first from the sorted\n max_val = max(dict_probs.values())\n max_keys = [key for key in dict_probs.keys()\n if dict_probs[key] == max_val]\n return sorted(max_keys)[0]\n\n predicts = [find_majority(dict_probs) for dict_probs in predicts_proba]\n return predicts", "def test_class_counts(self):\n oz = ClassificationScoreVisualizer(GaussianNB())\n oz.fit(self.multiclass.X.train, self.multiclass.y.train)\n\n unique, counts = np.unique(self.multiclass.y.train, return_counts=True)\n npt.assert_array_equal(oz.classes_, unique)\n npt.assert_array_equal(oz.class_counts_, counts)" ]
[ "0.6498094", "0.64421034", "0.6430607", "0.6242911", "0.6197098", "0.6133029", "0.61081195", "0.6107384", "0.6036182", "0.60294396", "0.6015596", "0.6013046", "0.598101", "0.59633774", "0.5955908", "0.5945819", "0.5933108", "0.5814961", "0.58133847", "0.5807453", "0.575121", "0.5740699", "0.5732573", "0.5718888", "0.5709956", "0.57081574", "0.5703674", "0.56778693", "0.56649786", "0.5625617" ]
0.705267
0
Determines whether or not a specific marker with a ship has been sunk when visually representing the board
def is_ship_sunk(self, x, y): marker = self.markers[x][y] total_hits = self.ship_hits[marker] return total_hits == MarkerType.MAX_HITS[marker]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def hit(self):\n\n self.units.pop()\n return (len(self.units) == 0) # Returns True if the ship has been sunk", "def is_board_lost(self):\n\n return bool(self.all_ship_locations) and bool(\n not self.all_ship_locations.difference(self.shot_locations)\n )", "def isScatteringSatFlagSet(self):\r\n\t\tdecoded_flag = bin(self.flag)[2:].rjust(8, '0')\r\n\t\tif decoded_flag[0] == True:\r\n\t\t\tself.scatteringSatFlag = True\r\n\t\t\tprint 'scattering saturation flag set'", "def isGameOver(self):\n for row in range(0, self.rows):\n for col in range(0, self.cols):\n if self.isMine(row, col) and self.isClicked(row, col):\n return True\n return False", "def sinkShips(self):\r\n while True:\r\n stillSinkingShips = False\r\n for i in range(len(self.shipsToSink) - 1, -1, -1):\r\n sunkShip, shipCoordinates = self.positionAndSinkShip(self.shipsToSink[i])\r\n if sunkShip:\r\n stillSinkingShips = True\r\n for coordinates in shipCoordinates:\r\n self.enemyBoard[coordinates.x][coordinates.y] = BoardState.SUNK\r\n del(self.shipsToSink[i])\r\n if not stillSinkingShips:\r\n break", "def test_miss(self):\n ship = Ship([self.hit])\n self.assertEqual([self.hit], ship.location)\n self.assertEqual(0, ship.check_hit((1, 1)))\n self.assertEqual(1, len(ship.location))", "def allBoatsSunk(self):\n for boat in self.boats:\n if not boat.isCaput():\n return False\n return True", "def has_destroyed_ship(self):\n if self.mark == constants.DEAD_SHIP_MARK:\n return True\n return False", "def is_wall_marker(self):\n return self.id in WALL", "def is_self_crossing(cursor, marker, grid):\n grid_point_wires = grid.get(cursor[0], cursor[1])\n\n for wire in grid_point_wires:\n if wire['marker'] == marker:\n return True\n return False", "def IsSnappable(self):\r\n \r\n return self.IsTopSnappable() or self.IsBottomSnappable() or self.IsLeftSnappable() or \\\r\n self.IsRightSnappable()", "def tile_fits(self, location, tile):\n x, y = location\n CONNECTIONS_TO_CHECK = [\n [(x+1, y), 'east', 'west'],\n [(x-1, y), 'west', 'east'],\n [(x, y+1), 'north', 'south'],\n [(x, y-1), 'south', 'north']\n ]\n\n for neighbor_loc, my_offset, their_offset in CONNECTIONS_TO_CHECK:\n neighbor_tile = self.board.get(neighbor_loc)\n if neighbor_tile and tile.edges._asdict()[my_offset] != neighbor_tile.edges._asdict()[their_offset]:\n return False\n return True", "def has_neighbor(self, tile: 'games.saloon.tile.Tile') -> bool:\n return bool(tile and tile in self.get_neighbors())", "def is_mine(board, x, y):\n return board[x, y] == MINE", "def any_neighbor_burning(self):\n neighbors = self.world.get_four_neighbors(self, Patch.null)\n states = [patch.state for patch in neighbors]\n return \"orange\" in states", "def board_tiles_availability(self):\n for row in range(GameData.rows):\n for col in range(GameData.columns):\n if self.board[row][col] == 0:\n return False\n # Game is draw, no more moves left!\n return True", "def display_ship(player_1: bool, position: str, kind: str) -> None:\r\n hit = pygame.image.load(\"assets/Misc/x-mark-48.png\")\r\n miss = pygame.image.load(\"assets/Misc/x-mark-48 (1).png\")\r\n position = convert_letter_coord(player_1, position)\r\n if kind == 'Ca':\r\n pygame.draw.circle(screen, (0, 128, 255), position, 25)\r\n elif kind == 'B':\r\n pygame.draw.circle(screen, (157, 0, 255), position, 25)\r\n elif kind == 'Cr':\r\n pygame.draw.circle(screen, (0, 17, 255), position, 25)\r\n elif kind == 'S':\r\n pygame.draw.circle(screen, (100, 100, 100), position, 25)\r\n elif kind == 'D':\r\n pygame.draw.circle(screen, (25, 80, 30), position, 25)\r\n elif kind == 'M':\r\n screen.blit(miss, (position[0]-24, position[1]-24))\r\n else:\r\n screen.blit(hit, (position[0] - 24, position[1] - 24))", "def check_surroundings(x_coord, y_coord, value):\n\t\tfor i in range(3):\n\t\t\tfor j in range(3):\n\t\t\t\texamining = world[x_coord - 1 + i][y_coord - 1 + j]\n\t\t\t\tif examining.name == value:\n\t\t\t\t\treturn True\n\t\t\t\telse:\n\t\t\t\t\tpass\n\t\treturn False", "def game_over(self) -> bool:\n for row in range(9):\n for col in range(9):\n if self._grid_sol[row][col] != self.get_cell(row, col):\n return False\n return True", "def ship_hit(si_settings, screen, stats, sb, ship, aliens, bullets, alienBullets, images):\r\n if stats.ships_left > 0:\r\n # Decrement ships_left.\r\n stats.ships_left -= 1\r\n\r\n # Animate the ship explosion\r\n ship_explosion(si_settings, screen, ship)\r\n\r\n # Update scoreboard.\r\n sb.prep_ships()\r\n\r\n # Empty the list of aliens and bullets.\r\n aliens.empty()\r\n bullets.empty()\r\n alienBullets.empty()\r\n\r\n # Create a new fleet and center the ship.\r\n create_fleet(si_settings, screen, ship, aliens, images)\r\n ship.center_ship()\r\n\r\n # Pause.\r\n sleep(0.5)\r\n else:\r\n stats.game_active = False\r\n pygame.mouse.set_visible(True)", "def is_map_obstacle_in_screen_range(self):\n raise NotImplementedError", "def is_unk(self, entry):\n return entry not in self.__entries", "def cell_is_game_over(self, y, x, map_data):\n # check for water\n if map_data[y][x] == self.WATER_SYMBOL:\n return True\n\n # check for anti-tank\n # up direction\n for i in range(y, -1, -1):\n if map_data[i][x] == self.ANTI_TANK_DOWN_SYMBOL:\n return True\n # if blocked, can stop checking for anti-tank\n if self.cell_is_blocked(i, x, map_data):\n break\n\n # down direction\n for i in range(y, self.y_size):\n if map_data[i][x] == self.ANTI_TANK_UP_SYMBOL:\n return True\n # if blocked, can stop checking for anti-tank\n if self.cell_is_blocked(i, x, map_data):\n break\n\n # left direction\n for i in range(x, -1, -1):\n if map_data[y][i] == self.ANTI_TANK_RIGHT_SYMBOL:\n return True\n # if blocked, can stop checking for anti-tank\n if self.cell_is_blocked(y, i, map_data):\n break\n\n # right direction\n for i in range(x, self.x_size):\n if map_data[y][i] == self.ANTI_TANK_LEFT_SYMBOL:\n return True\n # if blocked, can stop checking for anti-tank\n if self.cell_is_blocked(y, i, map_data):\n break\n\n # no water or anti-tank danger\n return False", "def revealBombs(self, win):\n for row in self.tiles:\n for tile in row:\n tile.inPlay = False\n if tile.isMine():\n if win:\n #flag non-flagged mines after winning\n if not tile.isFlagged():\n tile.configure(image=Tile.images[11])\n self.numFlags += 1\n else:\n #show unexploded mines after losing \n if not tile.isShown():\n tile.configure(image=Tile.images[9])\n #if incorrectly flagged, mark as such \n elif tile.isFlagged():\n tile.configure(image=Tile.images[12])", "def mark(board, player, row, col):\r\n pass", "def ship_hit(si_settings,screen,stats,sb,ship,aliens,bullets):\n if stats.ships_left > 0:\n # Decrement ships_left.\n stats.ships_left -= 1\n #update Scoreboard\n sb.prep_ships()\n else:\n stats.game_active = False\n pygame.mouse.set_visible(True)\n #empties aliens and bullets\n aliens.empty()\n bullets.empty()\n #makes new aliens and centers ship\n create_fleet(si_settings,screen,ship,aliens)\n ship.center_ship()\n #stop\n sleep(0.5)", "def sliding(self):\n for i in range(self.tiles_len):\n x, y = self.tilepos[i] # current pos\n X, Y = self.tilePOS[self.tiles[i]] # target pos\n if x != X or y != Y:\n return True", "def shotResult(self, shot, hit, sunk):\r\n logging.debug(\"shot result: %s, hit: %d, sunk: %d\" % (shot, hit, sunk))\r\n coordinates = self.mapToCoordinates(shot)\r\n # If a ship was sunk, remove it from the fleet.\r\n if sunk:\r\n sunk = str(sunk)\r\n assert(self.shipsAfloat[sunk] > 0)\r\n self.shipsAfloat[sunk] -= 1\r\n # Remove any counts that went to 0.\r\n if self.shipsAfloat[sunk] == 0:\r\n del(self.shipsAfloat[sunk])\r\n self.enemyBoard[coordinates.x][coordinates.y] = BoardState.BULLSEYE\r\n else:\r\n if hit:\r\n self.enemyBoard[coordinates.x][coordinates.y] = BoardState.HIT\r\n else:\r\n self.enemyBoard[coordinates.x][coordinates.y] = BoardState.MISS", "def sinkShipSearch(self, coordinates, size, direction):\r\n if size == 0:\r\n # Successfully searched the required size.\r\n return True, []\r\n if coordinates.x < 0 or coordinates.y < 0 or coordinates.x == self.boardDimensions or coordinates.y == self.boardDimensions:\r\n # Can't go off the board.\r\n return False, None\r\n if self.enemyBoard[coordinates.x][coordinates.y] != BoardState.HIT:\r\n # This search is all for naught since the ship can't possibly have sunk at this position.\r\n return False, None\r\n sunkShip, shipCoordinates = self.sinkShipSearch(Coordinates(coordinates.x + direction.x, coordinates.y + direction.y), size - 1, direction)\r\n if sunkShip:\r\n shipCoordinates.append(coordinates)\r\n return sunkShip, shipCoordinates", "def check_ship_fits(self, ship_length, row, column, orientation):\n if orientation == 'H':\n if column + ship_length > 10:\n if self.user == 'player':\n print('SHIP DOES NOT FIT, TRY AGAIN!\\n')\n return False\n else:\n return False\n else:\n return True\n else:\n if row + ship_length > 10:\n if self.user == 'player':\n print('SHIP DOES NOT FIT, TRY AGAIN!\\n')\n return False\n else:\n return False\n else:\n return True" ]
[ "0.6861421", "0.626912", "0.60083276", "0.60054564", "0.5866003", "0.5835053", "0.57888705", "0.57358277", "0.567922", "0.5657924", "0.56397027", "0.563559", "0.562256", "0.5606837", "0.5604321", "0.5599502", "0.55966866", "0.55774707", "0.55633444", "0.5560819", "0.5549613", "0.5541968", "0.55394095", "0.5531508", "0.55310524", "0.5524253", "0.5518848", "0.55181843", "0.55181557", "0.5506956" ]
0.80348986
0
Perform translation on data according to desc and language
def translate(desc, data, language = None): # sanity checks if desc is None or data is None: return data # create a translation configuration prepared = translator.prepare(desc, language) # perform the translation if isinstance(data, types.DictType): # translate single entry data = translator.translate(prepared, data) elif isinstance(data, types.ListType): # translate each entry for i in range(len(data)): data[i] = translator.translate(prepared, data[i]) # return the translated data return data
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def translate(self, language=None):", "def translate():\n pass", "def get_translation(self):", "def translate(self):\n pass", "def question_new_translate():", "def translate(self,phrase, **kwargs):\n \n #Load the input and output languages\n if 'output_language' in kwargs:\n out_lang = kwargs.pop('output_language')\n else:\n out_lang = self.language\n \n if 'input_language' in kwargs:\n in_lang = kwargs.pop('input_language')\n else:\n in_lang = 'english'\n \n #Identify the language based on intput\n if out_lang in ['Spanish', 'spanish', 'Espanol', 'espanol', 's', 'S']:\n output_language = 'spanish'\n elif out_lang in ['Portuguese', 'portuguese', 'Português', 'português', 'p', 'P']:\n output_language = 'portuguese'\n elif out_lang in ['English', 'english', 'E', 'e']:\n output_language = 'english'\n else:\n output_language = 'english'\n print('Unable to find language:', out_lang)\n \n #Open CSV with translations\n with open(self.translations, encoding='ISO-8859-15') as csv_file:\n csvread = csv.DictReader(csv_file)\n found = 0\n for row in csvread:\n if row[in_lang] == phrase:\n output_phrase = row[output_language] #translate phrase\n found = 1 #set flag indicating that the phrase was successfully translated\n\n #If no translation was found, return original phrase and present an error message\n if found == 0:\n output_phrase = phrase\n print('Unable to find phrase ', phrase, \"in language \", out_lang)\n \n return output_phrase", "async def translate(self,ctx,lang=\"ja\",txt=None):\r\n if await bMsg(ctx,ctx.message.author.name,client):\r\n return\r\n await self.translater(ctx,lang,txt)", "def process_text(self, text, language):", "def translate_wrapper(atext):\n print(\"translating:\",atext)\n res=\"\"\n res=translate(atext,\"pl\",\"fr\")\n time.sleep(0.5)\n print(\"translation:\",res)\n return res", "def fetchTranslation(self, language):\n pass", "def translateText(text):\r\n\treturn translator.translate(text, src='en', dest='ro')", "def retranslate(self):\r\n pass", "def retranslate(self):\r\n pass", "def translate_to(self, lang):\n TranslatableWindow.translate_all(lang)", "def _label_language_swap(self, languagename, curr_lang):\n chrdict = self._chrdict\n varlist = self._varlist\n vlblist = self._vlblist\n lbllist = self._lbllist\n \n old_varlab_key = \"_lang_v_\" + curr_lang\n old_vallab_key = \"_lang_l_\" + curr_lang\n \n new_varlab_key = \"_lang_v_\" + languagename\n new_vallab_key = \"_lang_l_\" + languagename\n \n # Replace data label and _lang_c. No need to set _lang_list: \n # can only swap between two defined languages.\n dta_dict = chrdict[\"_dta\"]\n dta_dict[\"_lang_c\"] = languagename\n if self._data_label != '':\n dta_dict[old_varlab_key] = self._data_label\n self._data_label = (dta_dict.pop(new_varlab_key) \n if new_varlab_key in dta_dict else '')\n \n # put current variable and value labels in chrdict \n # and replace with languagename's\n for varname, i in zip(varlist, range(self._nvar)):\n varlab = vlblist[i]\n vallab = lbllist[i]\n \n if varname not in chrdict: # then nothing to retreive\n if varlab == '' and vallab == '': # then nothing to store\n continue\n chrdict[varname] = {}\n \n var_dict = chrdict[varname]\n \n # store current if non-empty\n if varlab != '': var_dict[old_varlab_key] = varlab\n if vallab != '': var_dict[old_vallab_key] = vallab\n \n # set languagename's labels as current\n vlblist[i] = (var_dict.pop(new_varlab_key) \n if new_varlab_key in var_dict else '')\n lbllist[i] = (var_dict.pop(new_vallab_key) \n if new_vallab_key in var_dict else '')\n \n # delete sub-dict from chrdict if empty\n if len(var_dict) == 0:\n del chrdict[varname]", "def set_description(self, text, lang=0):\n self.localized_strings[lang] = text", "def translate(data:object, **kwargs) -> object:\n\n return translator.TranslateVisitor(**kwargs).translate(data)", "def translate(self, to_lang: str = TARGET_LANG):\n if not self.language:\n self.detect_language()\n if not all([self.clean, self.language != to_lang]):\n return\n self.payload += '&source={}&target={}'.format(self.language, to_lang)\n resp = requests.request('POST', self.url_translation, data=self.payload.encode('utf-8'),\n headers=self.translate_headers)\n try:\n self.translation = json.loads(resp.text)['data']['translations'][0]['translatedText']\n except KeyError:\n return", "def translate_leet(phrase):", "def tr(text, sourcelang, targetlang):\n request = urllib2.Request(url.format(text, sourcelang, targetlang),\n headers={ 'User-Agent': 'Mozilla/5.0', 'Accept-Charset': 'utf-8' })\n response = urllib2.urlopen(request).read()\n fixedJSON = re.sub(r',{2,}', ',', response).replace(',]', ']')\n data = json.loads(fixedJSON)\n result = {}\n result[\"definition\"] = data[0][0]\n for row in data[1]:\n try:\n result[row[0]] = row[1]\n except:\n pass\n return result", "async def translate_to(self, ctx, language: to_language, *, text: str):\n if len(text) >= 200:\n raise TextTooLongError(message=f\"This text is too long to be processed\")\n \n if language is None:\n raise LanguageNotFoundError(message=f\"Couldn't find the language : {language}\")\n \n resp = await self.translator.translate(text, dest=language)\n \n await self._display(ctx, resp, text)", "def translate(self):\n raise NotImplementedError('subclass must override this method')", "def translate(self, text: str, src_lang: str, target_lang: str) -> str:\n result = self.__translate(text, src_lang, target_lang)\n obj_result = json.loads(result)\n\n list_sentence = [x[0] for x in obj_result[0][:-1]]\n\n return ''.join(list_sentence)", "def translate(self):\n\t\tself._translate(True)", "def translate(self, oracion: str) -> None:\n\n if self.saved_translator:\n result = self.saved_translator(oracion).numpy()\n print(f\"... English translation: {result}\\n\")\n else:\n print(\"INFO: Couldn't find a saved model. Train the translator first with the `train` command.\\n\")", "def translate_text(target, text):\n return text", "def gettext_translate( s ):\n return catalogs.translate(s)", "def __translate(self, text, originalLanguage, translationLanguage):\n if self.__translatorRequest is None:\n from .TranslatorRequest import TranslatorRequest\n self.__translatorRequest = TranslatorRequest(self)\n \n self.__ensureTranslationEngineReady()\n if self.__translationEngine is None:\n return \"\", False\n else:\n result, ok = self.__translationEngine.getTranslation(\n self.__translatorRequest, text, originalLanguage,\n translationLanguage)\n \n return result, ok", "def get_translation(self, lang):\n if lang==\"it\":\n url=self.backstring+\"en/\"+self.name\n image=self.backstring+\"img/uk.png\"\n alttext='English version'\n elif lang==\"en\":\n url=self.backstring+\"it/\"+self.name\n image=self.backstring+\"img/it.png\"\n alttext='Italian version'\n img='<img src=\"%s\" height=\"15\" alt=\"%s\"><br>%s' % (image, alttext,alttext, )\n a=A(img, url, \"translation\")\n return str(a)", "def use_en(self):\n pass" ]
[ "0.7879175", "0.739186", "0.7055538", "0.70379674", "0.6981347", "0.68133277", "0.6648589", "0.6586364", "0.656703", "0.65603155", "0.6507135", "0.6446331", "0.6446331", "0.63955307", "0.63876665", "0.6365477", "0.6363538", "0.635846", "0.6329896", "0.6292852", "0.6292246", "0.62687916", "0.62415564", "0.61839795", "0.6178972", "0.61286235", "0.60986066", "0.6065934", "0.6060492", "0.604792" ]
0.7615052
1
Get the list of cached code tables
def list(): return cache.codeTableList()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def codelists():\n return CodelistSet()", "def list_cached():\n for json_name in cached_files():\n source_name = get_source_file_name(json_name)\n yield (json_name, source_name)", "def get_tables(self):\n r = self.client.query(\"show tables\")\n if r:\n tables = [tuple(reversed(x.split(','))) for x in filter(None, r.split('\\n'))][1:]\n FT.table_cache = dict(tables)\n return tables\n else:\n logging.error(\"get_tables: no response\")", "def find_cache_files():\n files = []\n\n for root, dirnames, filenames in os.walk(\".\"):\n for filename in fnmatch.filter(filenames, \"*.pyc\"):\n files.append(os.path.join(root, filename))\n\n for root, dirnames, filenames in os.walk(\".\"):\n for filename in fnmatch.filter(filenames, \"__pycache__\"):\n files.append(os.path.join(root, filename))\n\n return files", "def get_cache_names():\n\n return get_component(CachingPackage.COMPONENT_NAME).get_cache_names()", "def get(self):\n if path.exists(self.cachefile):\n self.invalidion()\n full_cache = self._get_all()\n return full_cache\n else:\n return []", "def getTables(self):\n\treturn self.dbNames", "def load_cache():\n return {}", "async def get_cache_names(self) -> list:\n conn = await self.random_node()\n return await cache_get_names_async(conn)", "def _get_compile_cache_dep_files():\n if entry_script_path is None:\n logger.warning(\"Can not get the entry script file path.\")\n return []\n compile_cache_dep_files = []\n logger.debug(f\"entry script file path: {entry_script_path}\")\n compile_cache_dep_files.append(entry_script_path)\n __get_compile_cache_dep_files(entry_script_path, compile_cache_dep_files, None)\n return compile_cache_dep_files", "def _list_dir(self):\n return [os.path.join(self.cache_dir, fn)\n for fn in os.listdir(self.cache_dir)]", "def get_tables(self):\n return list(self._metadata['tables'].keys())", "def c_code_cache_version(self):\r\n return ()", "def c_code_cache_version(self):\r\n return ()", "def list_tables(self):\n return LIST_TABLES(db=self.db)", "async def get() -> list:\n if _cache is None:\n await _update()\n return _cache", "def load_tilenames_cache(tablename):\n import desdb\n fname=get_tilename_cache_file(tablename)\n\n if not os.path.exists(fname):\n print(\" caching from database\")\n dir=get_tilename_cache_dir()\n if not os.path.exists(dir):\n print(\"making dir:\",dir)\n os.makedirs(dir)\n\n with desdb.Connection() as conn:\n q=\"\"\"\n select\n coadd_objects_id, tilename\n from\n %s\\n\"\"\" % tablename\n\n print(q)\n data=conn.quick(q, array=True)\n \n print(\" writing to:\",fname)\n fitsio.write(fname, data, clobber=True)\n else:\n print(\" reading:\",fname)\n data=fitsio.read(fname)\n\n return data", "def get(self):\n return {\"tables\": public_tables}", "def _find_cache():\n app = _find_app()\n return app.cache", "def library_caching(self):\n return self._library_caching", "def meta_db_tables(self) -> list:\r\n def _passer(**kwargs):\r\n data = self.engine.execute(\"\"\"\r\n SELECT * FROM sqlite_master WHERE type='table';\r\n \"\"\").fetchall()\r\n table_names = [i[1] for i in data]\r\n return table_names\r\n return self._connectionController(_passer)", "def __getListTables(self):\n\n listTables = \"{\\\\*\\\\listtable\\n\"\n overrideTables = \"{\\\\listoverridetable\\n\"\n for listDef in self.lists:\n id = listDef.id\n listTables += listDef.getRtf()\n overrideTables += (\"{\\\\listoverride\\\\listid%d\"\n \"\\\\listoverridecount0\\\\ls%d}\\n\" % (id, id))\n return listTables + \"}\\n\" + overrideTables + \"}\\n\"", "def tables(self) -> list:\n return self.list_tables()", "def get_all_cached_instances(cls):\n return list(cls.__dbclass__.__instance_cache__.values())", "def get_tables(self, *, only_names=True, verbose=False, cache=True):\n\n if cache and self._cached_tables is not None:\n tables = self._cached_tables\n else:\n tables = self._tap.load_tables(only_names=only_names, include_shared_tables=False, verbose=verbose)\n self._cached_tables = tables\n if only_names:\n return [t.name for t in tables]\n else:\n return tables", "def get_cached(cls):\n if not hasattr(cls, u'_cached'):\n cls._cached = KanaTable()\n\n return cls._cached", "def get_cached_models(cache_dir: Union[str, Path] = None) -> List[Tuple]:\n if cache_dir is None:\n cache_dir = TRANSFORMERS_CACHE\n elif isinstance(cache_dir, Path):\n cache_dir = str(cache_dir)\n if not os.path.isdir(cache_dir):\n return []\n\n cached_models = []\n for file in os.listdir(cache_dir):\n if file.endswith(\".json\"):\n meta_path = os.path.join(cache_dir, file)\n with open(meta_path, encoding=\"utf-8\") as meta_file:\n metadata = json.load(meta_file)\n url = metadata[\"url\"]\n etag = metadata[\"etag\"]\n if url.endswith(\".bin\"):\n size_MB = os.path.getsize(meta_path.strip(\".json\")) / 1e6\n cached_models.append((url, etag, size_MB))\n\n return cached_models", "def get_tables():\n return execute(\"SELECT name FROM sqlite_master WHERE type = 'table';\")", "def list_symbol_tables(mst):\n stlist = []\n def append_st(st):\n #print(st)\n stlist.append(st)\n for s in st.get_symbols():\n for ns in s.get_namespaces():\n append_st(ns)\n if not isinstance(mst, symtable.SymbolTable):\n # Assume it is text of a program to compile\n mst = symtable.symtable(mst, '<string>', 'exec')\n append_st(mst)\n return stlist", "def traffic_statuscodes_cachecodes(self, **kwargs):\n url_path = 'traffic/statuscodes/cachecodes'\n self.logger.debug(f\"Get list of cache codes\")\n body = self._make_body(kwargs)\n return self._common_get(request_path=url_path, parameters=body)" ]
[ "0.65682197", "0.6535312", "0.65018505", "0.642341", "0.6260274", "0.62120306", "0.61924624", "0.61917317", "0.61468893", "0.6070429", "0.6061271", "0.6048635", "0.59974474", "0.59974474", "0.5924543", "0.59052294", "0.5889158", "0.58332527", "0.5788907", "0.578401", "0.5766249", "0.5765013", "0.57405645", "0.57353175", "0.5711999", "0.57077837", "0.5700342", "0.56938314", "0.5692462", "0.56894094" ]
0.8586865
0
Move file(s) to a given destination.
def move_files(file: str, destination: str): try: result = _process_files("mv", "-v", file, destination) except FileNotFoundError: print("ERROR: '{}' does not exist.".format(file)) except FolderNotFoundError: print( "ERROR: '{}' destination does not exist.".format(destination) ) except InsufficientRightsError: print("ERROR: Insufficient rights to destination '{}'.".format( destination) ) else: print(result)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def move_file(source, destination):\n shutil.move(source, destination)", "def move_files(src, dst, filenames):\n for filename in filenames:\n os.rename(os.path.join(src, filename), os.path.join(dst, filename))", "def moveFiles(outputDir, files):\n\tfor fn in files:\n\t\tshutil.move(fn, join(outputDir, getFilenameWithoutPath(fn)))", "def moveFile(source, dest):\n try:\n shutil.move(source, dest) \n except IOError as e:\n print (\"Unable to move file. %s\" %(e))", "def move_file(source, destination):\n #source = client_variables.output_folder\n #destination = client_variables.client_folder\n copyfiles = os.listdir(source)\n ext = (\".xlsx\", \".csv\", \".pdf\", \".png\")\n for copyfile in copyfiles:\n if copyfile.endswith(ext):\n copyfile = source + \"/\" + copyfile\n print \"copying\", copyfile\n shutil.move(copyfile, destination)\n elif copyfile.startswith('GetTotalByYearReport'):\n copyfile = source + \"/\" + copyfile\n print \"copying\", copyfile\n shutil.move(copyfile, destination)", "def move_file(self, path: PathLike, dest: PathLike, force: bool = False):", "def act_move_file(self, file_source, file_target):\n try:\n if not os.path.isfile(file_source):\n return\n path = os.path.dirname(file_target)\n if not os.path.exists(path):\n os.makedirs(path)\n shutil.move(file_source, file_target)\n #shutil.copy2(file_source, file_target)\n #os.remove(file_source)\n self.logger.debug('%s: Action: <move> %s -> %s', self.name, file_source, file_target)\n except:\n self.logger.exception('Error on file move: %s -> %s', file_source, file_target)", "def move_files(src_dir, dst_dir):\n for f in os.listdir(src_dir):\n try:\n name, season, episode = FILENAME_PATTERN.search(f).groups()\n except AttributeError:\n try:\n name, season, episode = FILENAME_PATTERN2.search(f).groups()\n except AttributeError:\n print \"Cannot parse\", f\n pass\n\n name = name.replace('.', ' ').replace('_', ' ').strip().title()\n\n dir_path = os.path.join(dst_dir, name, 'Season %02d' % int(season))\n full_path = os.path.join(dir_path, f)\n source_path = os.path.join(src_dir, f)\n\n if not os.path.exists(dir_path):\n os.makedirs(dir_path, 0777)\n\n if not os.path.exists(full_path):\n shutil.move(source_path, full_path)\n os.symlink(full_path, source_path)", "def file_move(self, from_path, to_path):\n params = {'root': self.session.root,\n 'from_path': format_path(from_path),\n 'to_path': format_path(to_path)}\n\n url, params, headers = self.request(\"/fileops/move\", params)\n\n return self.rest_client.POST(url, params, headers)", "def move_file():\n # print(\"\\n\".join(os.listdir(filepath)))\n # folders = [os.path.join(filepath, fld) for fld in os.listdir(filepath)]\n # print(filepath + \":\\n \" + \"\\n \".join(folders))\n folders = filter(os.path.isdir, os.listdir(u\".\"))\n # print(\"Sub-folders: \", u\"\\n\".join(folders))\n for folder in folders:\n files = [os.path.join(folder, fn) for fn in os.listdir(folder)]\n files = filter(os.path.isfile, files)\n for fn in files:\n _, filename = os.path.split(fn)\n shutil.move(fn, filename)\n assert 0 == len(os.listdir(folder))", "def transfer_files(src: str, dst: str, move_src_data: bool = False):\n if move_src_data:\n logger.info('Move {0} to {1}'.format(src, dst))\n shutil.move(src, dst)\n else:\n logger.info('Copy {0} to {1}'.format(src, dst))\n copy_tree(src, dst)", "def move_files(sim_dir, dest_dir, file_patterns):\n for f in file_patterns:\n for p in glob.glob1(sim_dir, f):\n try:\n shutil.move(os.path.join(sim_dir, p), os.path.join(dest_dir, p))\n except Exception as e:\n print(\n \"error while copy ing file from {} to {}\\n{}\".format(\n sim_dir, dest_dir, e\n )\n )", "def move_file(src, dst):\n # Sanity checkpoint\n src = re.sub('[^\\w/\\-\\.\\*]', '', src)\n dst = re.sub('[^\\w/\\-\\.\\*]', '', dst)\n if len(re.sub('[\\W]', '', src)) < 5 or len(re.sub('[\\W]', '', dst)) < 5:\n debug.log(\"Error: Moving file failed. Provided paths are invalid! src='%s' dst='%s'\"%(src, dst))\n else:\n # Check destination\n check = False\n if dst[-1] == '/':\n if os.path.exists(dst):\n check = True # Valid Dir\n else:\n debug.log(\"Error: Moving file failed. Destination directory does not exist (%s)\"%(dst)) #DEBUG\n elif os.path.exists(dst):\n if os.path.isdir(dst):\n check = True # Valid Dir\n dst += '/' # Add missing slash\n else:\n debug.log(\"Error: Moving file failed. %s exists!\"%dst)\n elif os.path.exists(os.path.dirname(dst)):\n check = True # Valid file path\n else:\n debug.log(\"Error: Moving file failed. %s is an invalid distination!\"%dst)\n if check:\n # Check source\n files = glob.glob(src)\n if len(files) != 0:\n debug.log(\"Moving File(s)...\", \"Move from %s\"%src, \"to %s\"%dst)\n for file_ in files:\n # Check if file contains invalid symbols:\n invalid_chars = re.findall('[^\\w/\\-\\.\\*]', os.path.basename(file_))\n if invalid_chars:\n debug.graceful_exit((\"Error: File %s contains invalid \"\n \"characters %s!\"\n )%(os.path.basename(file_), invalid_chars))\n continue\n # Check file exists\n if os.path.isfile(file_):\n debug.log(\"Moving file: %s\"%file_)\n shutil.move(file_, dst)\n else:\n debug.log(\"Error: Moving file failed. %s is not a regular file!\"%file_)\n else: debug.log(\"Error: Moving file failed. No files were found! (%s)\"%src)", "def Move(args):\n\n parser = argparse.ArgumentParser(usage='mv [Options] sources... dest',\n description=Move.__doc__)\n parser.add_argument(\n '-v', '--verbose', dest='verbose', action='store_true',\n default=False,\n help='verbose output.')\n parser.add_argument(\n '-f', '--force', dest='force', action='store_true',\n default=False,\n help='force, do not error it files already exist.')\n parser.add_argument('srcs', nargs='+')\n parser.add_argument('dest')\n\n options = parser.parse_args(args)\n\n if options.verbose:\n print('mv %s %s' % (' '.join(options.srcs), options.dest))\n\n for src in options.srcs:\n MovePath(options, src, options.dest)\n return 0", "def move_files(self, download_path):\n if self.file_list is None:\n self._set_file_list()\n\n for individual_file in self.file_list:\n source_path = os.path.join(self.base_dir, individual_file)\n dest_path = os.path.join(download_path, individual_file)\n # We don't move files that don't exist\n if not os.path.exists(source_path):\n continue\n\n # Make sure the destination directory exists\n if not os.path.exists(os.path.dirname(dest_path)):\n os.makedirs(os.path.dirname(dest_path))\n if self.to_copy:\n shutil.copy(source_path, dest_path)\n else:\n os.rename(source_path, dest_path)\n return", "def move_media(items, dest):\n for file in items:\n filename = os.path.basename(file)\n os.rename(file, dest + '\\\\' + filename)", "def simple_move_files(selected_image_list, out_dir='/command/results/top_images_test_set/'):\n for file_no in range(len(selected_image_list)):\n shutil.move(selected_image_list[file_no], out_dir + selected_image_list[file_no].split('/')[-1])\n return", "def move_files(from_dir, to_dir, keyword):\n \n if not os.path.exists(to_dir):\n os.mkdir(to_dir)\n \n if keyword == None:\n # If keyword is left empty, from_dir is considered a list of files.\n to_move = from_dir\n else:\n to_move = glob.glob(os.path.join(from_dir, '*' + keyword + '*'))\n \n n_moved = 0 \n for f in to_move:\n if os.path.isfile(f):\n shutil.move(f, to_dir)\n n_moved += 1\n \n print \"Moved %i files to %s.\" % (n_moved, to_dir)", "def mv(self, src_path, dst_path):\n try:\n postdata = codecs.encode(json.dumps({ 'src': src_path, 'dst': dst_path }), 'utf-8')\n self._urlopen('/api/fileops/move', postdata).read()\n except HTTPError as err:\n raise RuntimeError(\"Unable to move '{}' to '{}'\".format(src_path, dst_path))", "def move(self, destination, **kwargs):\n assert _os.path.exists(self.__str__()) == True\n _shutil.move(self.__str__(), destination, **kwargs)", "def move(matches):\n for source in matches:\n target = matches[source]\n os.rename(source, target)", "def move_file_to_dir(f, dest_dir):\n ls = list_files(dest_dir)\n if f not in ls:\n shutil.move(f, dest_dir)", "def move_files(fname_fout, root_dir, dest_dir):\n fname, f_ext = os.path.splitext(fname_fout)\n # Find files which filename of fname_fout\n matches = []\n pattern = fname + '*'\n root_fnames = os.listdir(root_dir)\n for filename in fnmatch.filter(root_fnames, pattern):\n matches.append([filename, os.path.join(root_dir, filename)])\n # Extract new folder name based on fname_fout\n new_folder_name = reshape_fname(fname_fout, ['nairfoil', 'nsetup'])\n dest_dir = os.path.join(dest_dir, new_folder_name)\n # Move files\n for cur_file in matches:\n os.renames(cur_file[1], os.path.join(dest_dir, cur_file[0]))", "def movefile(destpath,filename,sourcepath):\n\n\tcommand = 'mv ' + filename + ' ' + destpath\n\t\n\ttry :\n\t\tst = commands.getstatusoutput(command)\n\texcept Exception:\n\t\traise", "def move(self,fileName,destDir):\n self.unload(fileName)\n FileInfos.move(self,fileName,destDir)", "def moveFiles(inputDir, inputFiles):\n\tfor file in inputFiles:\n\t\tlogger.debug('moveFiles: {0}'.format(file))\n\t\tshutil.move(join(inputDir, file), join(inputDir, 'processed', file))\n\n\treturn 0", "def move_files(self, files: List[str], directory=\"\"):\n result = []\n for file in files:\n if directory == \"\":\n temp_file = File(file)\n new_directory = self._create_or_define(temp_file)\n origin_folder = \"\"\n else:\n new_directory = directory\n origin_folder = os.path.basename(os.path.dirname(file))\n temp_file = File(os.path.basename(file))\n\n if not file.startswith(new_directory):\n if temp_file.get_extension():\n temp_extension = \".\" + temp_file.get_extension()\n else:\n temp_extension = \"\"\n\n ordinal_number = self.check_same_objects(new_directory, temp_file)\n target_name = temp_file.get_just_name() + temp_extension\n if ordinal_number:\n formatted_ordinal_number = f\" ({ordinal_number - 1})\"\n target_name = (\n temp_file.get_just_name()\n + formatted_ordinal_number\n + temp_extension\n )\n\n if self.underscore_flag:\n target_name = target_name.replace(\" \", \"_\")\n\n new_position = os.path.join(self.directory, new_directory, target_name)\n\n file_position = os.path.join(\n self.directory, origin_folder, str(temp_file)\n )\n if file_position != os.path.join(\n self.directory,\n new_directory,\n temp_file.get_just_name() + temp_extension,\n ):\n result.append(os.path.join(origin_folder, str(temp_file)))\n self.possibilities[new_directory].files.append(temp_file)\n if not self.dry_run:\n os.rename(file_position, new_position)\n else:\n print(f\"{file_position} would be moved to {new_position}\")\n elif self.dry_run:\n print(\n f\"{file_position} won't be move since the location is the same\"\n )\n\n self.log_result(result, directory)", "def findfif2move(self, source, destination, foldername):\n import glob\n import shutil\n\n os.chdir(source)\n mainfolders = os.listdir(u'.')\n\n for fname in mainfolders:\n try:\n if fname[:2] == foldername:\n subjectdir = os.path.join(source, fname)\n os.chdir(subjectdir)\n subfolders = os.listdir(u'.')\n \n # for each subject in the provided subfolders \n for s in subfolders:\n if s[0] == 's':\n sessiondir = os.path.join(subjectdir, s)\n os.chdir(sessiondir)\n file = glob.glob(\"*.fif\") # find files to move\n\n for files in file: \n shutil.copy(os.path.join(sessiondir,files),\n destination + fname[1:])\n except Exception:\n print(\"Something went wrong while copying the data >>>\", fname)\n pass\n os.chdir(source)", "def file_move(session, dc_ref, src_file, dst_file):\n LOG.debug(\"Moving file from %(src)s to %(dst)s.\",\n {'src': src_file, 'dst': dst_file})\n vim = session._get_vim()\n move_task = session._call_method(\n session._get_vim(),\n \"MoveDatastoreFile_Task\",\n vim.get_service_content().fileManager,\n sourceName=src_file,\n sourceDatacenter=dc_ref,\n destinationName=dst_file,\n destinationDatacenter=dc_ref)\n session._wait_for_task(move_task)\n LOG.debug(\"File moved\")", "def move_file(file, dest_path):\n if os.path.isdir(dest_path):\n shutil.move(file, dest_path)\n else:\n os.mkdir(dest_path)\n shutil.move(file, dest_path)" ]
[ "0.78872573", "0.7738285", "0.7380401", "0.7375994", "0.7297225", "0.72372043", "0.72019434", "0.7157788", "0.7145809", "0.7066401", "0.7051393", "0.7048595", "0.69972205", "0.69623786", "0.69306546", "0.6854708", "0.6807551", "0.6799462", "0.6773344", "0.6734271", "0.6706912", "0.6700862", "0.669761", "0.66598344", "0.66577417", "0.66546583", "0.6623486", "0.661655", "0.6611619", "0.65984035" ]
0.78143567
1
Draws the restart menu state.
def draw(screen): MY.restart_button.draw(screen) MY.display_text.draw(screen)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def restart_menu(self):\n self.__show_menu = True", "def restart(self):\n self.board.container.destroy()\n # create a new board object and draw board + buttons again\n self.board = TicTacToeBoard(self.parent, 100, \"#ECECEC\")\n self.board.draw_board()\n self.initialize_buttons()\n self.show_menu()", "def on_draw(self):\n self.clear()\n arcade.draw_text(\n \"Game Over - Click to restart\",\n SCREEN_WIDTH / 2,\n SCREEN_HEIGHT / 2,\n arcade.color.WHITE,\n 30,\n anchor_x=\"center\",\n )", "def drawButtons(self):\n self.__pausedTitle.draw(self.__screen)\n self.__exitGameButton.draw(self.__screen)\n self.__resumeButton.draw(self.__screen)\n self.__mainMenuButton.draw(self.__screen)", "def draw_menu(self):\n self.screen.fill(self.menu_color, self.rect)\n pygame.draw.rect(self.screen, self.border_color, self.rect, 5)\n self.screen.blit(self.title_image, self.title_image_rect)\n\n self.play_button.draw_button()", "def restart(self):\n self.set_random_pos('starting')\n self.set_random_pos('finishing')\n self.game_loop()", "def restart(self):\n self.gui_block.set()", "def draw(self):\n self.menu_pointer.draw()", "def restart(self):\n\t\tself.destroy()\n\t\t\n\t\tself.resetBoard()\n\t\tself.i_play_again = 0\n\t\tself.adversary_play_again = 0\n\t\tself.isMyTurn = False\n\n\t\tself.initGame(self.parent)\n\n\t\t# Update the screen\n\t\tself.pack()", "def draw_game_over(self):\n output = \"Game Over!\"\n arcade.draw_text(output, 250, 400, arcade.color.BLACK, 54)\n\n output = \"Click to restart\"\n arcade.draw_text(output, 330, 200, arcade.color.BLACK, 24)", "def redraw_menu(self, event):\n self.appInit()\n self.redraw()", "def draw_menu(self):\n self.__screen.fill(pygame.Color(\"black\"))\n self.__screen.blit(Constants.Assets.MENU_BACKGROUND_IMG, (0, 0))\n self.__start_button.draw(self.__screen, Constants.WHITE)\n self.__end_button.draw(self.__screen, Constants.WHITE)\n self.__about_button.draw(self.__screen, Constants.WHITE)", "def restartGame(self):\n\t\tself.state = [[0 for x in range(3)] for y in range(3)]\n\t\tself.turn = self.whoGoesFirst()\n\t\tself.win = 0", "def update_reset_button(self):\r\n if self.board.hovered_tiles and self.is_left_mouse_down:\r\n self.reset_button.draw_uhoh()\r\n else:\r\n self.reset_button.draw_smiley()", "def draw(self):\r\n self.scr.fill(SCREEN_COLOR)\r\n self.label.draw()\r\n pygame.display.flip()", "def draw(self):\n\n self.state_stack.peek().draw(self.screen)", "def drawPauseMenu(self):\n\n # draws pause menu\n self.__screen.blit(self._image, self._rect)\n\n # draws buttons\n self.drawButtons()", "def reset_game(self):\n self._buttons_active = False\n self._laser_label.clear()\n self.add_items_labels()\n self.add_question_marks()\n QtCore.QTimer.singleShot(conf.second_display_duration,\n self.enter_phase_two)", "def draw_main_menu():\n draw_cover()\n draw_menu_buttons()\n draw_border()", "def _optionsmenu_restart():\n self.input_box.delete(1.0, END)\n pass", "def restart(self):\n self.main_grid_values = [\n [0] * self.TILES_PER_ROW for _ in range(self.TILES_PER_ROW)\n ]\n\n self.score_value.set('0')\n self.add_two()\n self.add_two()\n self.update_grid()\n\n self.bind('<{}>'.format(self.controller.slide_left_control), self.push_left)\n self.bind('<{}>'.format(self.controller.slide_right_control), self.push_right)\n self.bind('<{}>'.format(self.controller.slide_up_control), self.push_up)\n self.bind('<{}>'.format(self.controller.slide_down_control), self.push_down)\n\n self.game_over_button.destroy()", "def restart_environment(self, start_state):\n\n if self.update_animation:\n self.canvas.delete(self.agent)\n row, col = start_state\n x1 = col * self.GRID_ROW_HEIGHT\n y1 = row * self.GRID_ROW_HEIGHT\n self.agent = self.canvas.create_image(x1 + self.GRID_ROW_HEIGHT / 2, y1 + self.GRID_ROW_HEIGHT / 2,\n image=self.penguin)", "def paint(self):\r\n self.win.bkgd(\" \", COLOR_PAIR[\"con_text\"])", "def on_reset(self) -> None:\r\n\r\n if self.board == None:\r\n return\r\n \r\n self.stop_animation()\r\n self.anim_board.copy(self.board)\r\n self.gen_number.config(text = 0)\r\n TkState.enable(self.edit_menu.winfo_children())\r\n TkState.disable([self.reset_button])\r\n \r\n self.painter.draw_board()", "def resetPressed(self):\n print(\"Reset button has been pressed!\")\n self.save_file = open(os.path.join(args.parent_img_path, self.save_file_name), \"a\")\n self.save_file.write(\"\\n\" + \"-------*Reset Button Pressed*-------\" + \"\\n\")\n self.save_file.close()\n # CTWM Resetting\n if self.showCTWM:\n self.outcomesCTWM = np.array([])\n self.CTWMx = [0]\n self.CTWMy = [0]\n self.curveCTWMGraph.setData(x=self.CTWMx, y=self.CTWMy)\n self.CTWMGraph.clear()\n self.CTWMGraph.draw()\n # Does what initImgCTWM does\n self.labelFailureGrayCTWM.setPixmap(self.images['imageFailureGrayCTWM'])\n self.labelAdvancedGrayCTWM.setPixmap(self.images['imageAdvancedGrayCTWM'])\n self.labelAverageGrayCTWM.setPixmap(self.images['imageAverageGrayCTWM'])\n self.labelGoodGrayCTWM.setPixmap(self.images['imageGoodGrayCTWM'])\n\n # WHM resetting\n if self.showWHM:\n self.outcomesWHM = np.array([])\n self.curveWHMGraph.setData(x=[0], y=[0])\n self.WHMGraph.clear()\n self.WHMGraph.draw()\n # Does what initImgWHM does\n self.labelLevelFourWHM.setPixmap(self.images['imageLevelFourWHMgray'])\n self.labelLevelThreeWHM.setPixmap(self.images['imageLevelThreeWHMgray'])\n self.labelLevelTwoWHM.setPixmap(self.images['imageLevelTwoWHMgray'])\n self.labelLevelOneWHM.setPixmap(self.images['imageLevelOneWHMgray'])\n\n # RTLE resetting\n if self.showRTLE:\n self.speed.setText(\"\")\n self.feed.setText(\"\")\n self.RTLEGraph.clear()\n self.RTLEGraph.draw()\n self.labelTimeAverageWear.setText(\"\")\n self.labelTimeAdvancedWear.setText(\"\")\n self.labelTimeFailureWear.setText(\"\")\n self.curveRTLEGraph = pg.BarGraphItem(name=\"RLTEGraph\", x=[1], height=30, width=3, brush='d9d9d9')\n self.curveRTLEGraph.rotate(-90) # horizontal graph\n self.RTLEGraph.addItem(self.curveRTLEGraph)\n\n ###############################################################################################################\n # #\n # INITIALIZING ALL THE WIDGETS #\n # #\n ###############################################################################################################", "def restart(self):\n for tile in self.canvas.find_all():\n self.canvas.itemconfigure(tile, fill=self.default_color)\n self.reset_data()\n self.score_label.config(text=f'Score: {self.score}')", "def restart(self):\n self.points_arr.append(self.click_count)\n self.grid.destroy()\n self.click_count = 0\n self.ranking_label.destroy()\n self.ranking_box.destroy()\n self.ranking_box = Gtk.Grid()\n self.vbox.add(self.ranking_box)\n self.ranking_panel()\n self.point_label_score.set_markup(\"<b>0</b>\")\n self.point_label_score.show_all()\n self.grid = BallsGrid(self.rows, self.cols)\n self.create_grid()\n self.grid.show()", "def displayStartScreen(self):\n self.model.buttons.draw(self.screen)\n Title=myfont.render(\"THE WORLD ENDS WITH COINS\", 1, random.choice(all_color))\n self.screen.blit(Title, (550, 300))\n pygame.display.update()", "def update(delta_time):\n for event in coda.event.listing():\n if coda.event.quit_game(event):\n coda.stop()\n if coda.event.mouse_l_button_down(event):\n if MY.restart_button.collides_with_point(coda.event.mouse_position()):\n coda.state.change(0)", "def draw_pause_menu(self):\n if self.pause_menu_surface is None:\n self.init_pause_menu()\n self.screen.blit(self.pause_menu_surface, (0, 0))\n title = text_helper.create_text(\"Pause\", menu_fonts, 50, white)\n self.screen.blit(title, (center_horizontally(title, self.screen_dimensions), 50))\n for button in self.buttons:\n self.screen.blit(button.get_rendered_button(), button.get_position())" ]
[ "0.69464964", "0.6642332", "0.6490532", "0.6384677", "0.63402164", "0.62999594", "0.62544376", "0.6199466", "0.61894584", "0.6133734", "0.6122533", "0.6107369", "0.60496926", "0.6044342", "0.60208946", "0.59724265", "0.5916732", "0.587158", "0.58492255", "0.5844009", "0.5827783", "0.5811326", "0.5773518", "0.57587683", "0.5756935", "0.5740862", "0.57003105", "0.5691331", "0.5684594", "0.56714845" ]
0.69568
0
Tests that the barracks helper cannot be built without api_key argument
def test_init_barracks_helper_fail_when_no_api_key_given(): try: BarracksHelper(None, _base_url) assert False except ValueError: assert True
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_api_key_error(api):\n\twith pytest.raises(top_stories.APIKeyError):\n\t\tmissingAPI = top_stories.TopStoriesAPI()", "def test_init_barracks_helper_succeed_when_api_key_given():\n helper = BarracksHelper(_api_key)\n assert helper.get_api_key() == _api_key\n assert helper.get_base_url() == _base_url\n\n assert helper.update_checker_helper\n assert helper.update_checker_helper._apiKey == _api_key\n assert helper.update_checker_helper._baseUrl == _base_url + _check_update_endpoint\n\n assert helper.package_download_helper\n assert helper.package_download_helper._apiKey == _api_key", "def test_init_barracks_helper_succeed_when_api_key_and_base_url_given():\n base_url = 'http://some.url'\n\n helper = BarracksHelper(_api_key, base_url)\n assert helper.get_api_key() == _api_key\n assert helper.get_base_url() == base_url\n\n assert helper.update_checker_helper\n assert helper.update_checker_helper._apiKey == _api_key\n assert helper.update_checker_helper._baseUrl == base_url + _check_update_endpoint\n\n assert helper.package_download_helper\n assert helper.package_download_helper._apiKey == _api_key", "def test_no_api_key(self):\n\n self.assertRaises(Exception, kaput.init, None, '123')", "def test_api_key_not_found(self):\n runner = CliRunner()\n\n with patch(\"greynoise.cli.decorator.load_config\") as load_config:\n load_config.return_value = {\"api_key\": \"\"}\n result = runner.invoke(\n subcommand.quick,\n [\"0.0.0.0\"],\n parent=Context(main, info_name=\"greynoise\"),\n )\n assert result.exit_code == -1\n assert \"Error: API key not found\" in result.output", "def test_missing_api_key(self):\n with self.assertRaises(TypeError):\n ConnectorWebexTeams()", "def test_setup_bad_api_key(self, mock_get_forecast):\n # The Dark Sky API wrapper that we use raises an HTTP error\n # when you try to use a bad (or no) API key.\n url = \"https://api.darksky.net/forecast/{}/{},{}?units=auto\".format(\n self.key, str(self.lat), str(self.lon)\n )\n msg = f\"400 Client Error: Bad Request for url: {url}\"\n mock_get_forecast.side_effect = HTTPError(msg)\n\n response = darksky.setup_platform(\n self.hass, VALID_CONFIG_MINIMAL[\"sensor\"], MagicMock()\n )\n assert not response", "def test_api_key_not_found(self):\n runner = CliRunner()\n\n with patch(\"greynoise.cli.decorator.load_config\") as load_config:\n load_config.return_value = {\"api_key\": \"\"}\n result = runner.invoke(\n subcommand.ip, [\"0.0.0.0\"], parent=Context(main, info_name=\"greynoise\")\n )\n assert result.exit_code == -1\n assert \"Error: API key not found\" in result.output", "def test_api_key_not_found(self):\n runner = CliRunner()\n\n with patch(\"greynoise.cli.decorator.load_config\") as load_config:\n load_config.return_value = {\"api_key\": \"\"}\n result = runner.invoke(\n subcommand.stats, [\"query\"], parent=Context(main, info_name=\"greynoise\")\n )\n assert result.exit_code == -1\n assert \"Error: API key not found\" in result.output", "def test_unknown_api_key(self, app, data_queues, redis, metricsmock, logs):\n res = self._call(app, api_key=\"abcdefg\", ip=self.test_ip, status=400)\n self.check_response(data_queues, res, \"invalid_key\")\n metricsmock.assert_incr_once(\n self.metric_type + \".request\", tags=[self.metric_path, \"key:invalid\"]\n )\n assert redis.keys(\"apiuser:*\") == []\n assert logs.only_entry[\"api_key\"] == \"invalid\"\n assert logs.only_entry[\"invalid_api_key\"] == \"abcdefg\"", "def test_api_key_not_found(self):\n runner = CliRunner()\n\n with patch(\"greynoise.cli.decorator.load_config\") as load_config:\n load_config.return_value = {\"api_key\": \"\"}\n result = runner.invoke(\n subcommand.query,\n [\"<query>\"],\n parent=Context(main, info_name=\"greynoise\"),\n )\n assert result.exit_code == -1\n assert \"Error: API key not found\" in result.output", "def test_failure_with_invalid_api_key(self):\n self.geocoder = Yandex(\n api_key='bad key'\n )\n with self.assertRaises(GeocoderInsufficientPrivileges):\n self.geocode_run(\n {\"query\": \"площадь Ленина Донецк\"},\n {}\n )", "def test_missing_api_key(self):\n runner = CliRunner()\n expected_error = 'Error: Missing option \"-k\" / \"--api-key\"'\n\n result = runner.invoke(subcommand.setup, [])\n assert result.exit_code == 2\n assert expected_error in result.output", "def test_warning_with_no_api_key(self):\n with warnings.catch_warnings(record=True) as w:\n Yandex()\n self.assertEqual(len(w), 1)", "def test_api_key_is_None(self):\n settings.GTMETRIX_REST_API_KEY = None\n with raises(GTmetrixAPIKeyIsNone):\n gt = GTmetrixInterface()", "def test_no_api_key(self, app, data_queues, redis, metricsmock, logs):\n res = self._call(app, api_key=None, ip=self.test_ip, status=400)\n self.check_response(data_queues, res, \"invalid_key\")\n metricsmock.assert_incr_once(\n self.metric_type + \".request\", tags=[self.metric_path, \"key:none\"]\n )\n assert redis.keys(\"apiuser:*\") == []\n assert logs.only_entry[\"api_key\"] == \"none\"\n assert \"invalid_api_key\" not in logs.only_entry", "async def test_invalid_key(hass: HomeAssistant, invalid_key_api: Mock) -> None:\n result = await hass.config_entries.flow.async_init(\n DOMAIN, context={\"source\": SOURCE_USER}\n )\n assert result.get(\"type\") == data_entry_flow.FlowResultType.FORM\n assert result.get(\"step_id\") == \"user\"\n\n # Test filling in API key\n result = await hass.config_entries.flow.async_init(\n DOMAIN,\n context={\"source\": SOURCE_USER},\n data={CONF_API_TOKEN: \"psk_123456789\"},\n )\n assert result.get(\"type\") == data_entry_flow.FlowResultType.FORM\n # Goes back to the user step\n assert result.get(\"step_id\") == \"user\"\n assert result.get(\"errors\") == {\"api_token\": \"invalid_api_token\"}", "def _check_api_key(self):\n try:\n self.maps.places_nearby(\n location=(53.909804, 27.580184),\n radius=650,\n open_now=False,\n language=config.LANGUAGE,\n type='cafe',\n # rank_by='distance', # IMPORTANT: cannot use rank_by and radius options together\n page_token=None,\n )\n except Exception as e:\n\n with self.__writelock:\n self.print(f'ERROR: bad API key \"{self.maps.key}\" (tracker={self.stats.previous_requests})\\n')\n raise e", "def test_invalid_api_key(self, app, data_queues, redis, metricsmock, logs):\n res = self._call(app, api_key=\"invalid_key\", ip=self.test_ip, status=400)\n self.check_response(data_queues, res, \"invalid_key\")\n metricsmock.assert_incr_once(\n self.metric_type + \".request\", tags=[self.metric_path, \"key:none\"]\n )\n assert redis.keys(\"apiuser:*\") == []\n assert logs.only_entry[\"api_key\"] == \"none\"\n assert logs.only_entry[\"invalid_api_key\"] == \"invalid_key\"", "def test_key_none(self):\n try:\n AlphaVantage()\n self.fail(msg='A None api key must raise an error')\n except ValueError:\n self.assertTrue(True)", "def test_key_none(self):\n try:\n AlphaVantage()\n self.fail(msg='A None api key must raise an error')\n except ValueError:\n self.assertTrue(True)", "def test_create_api_key(self):\n pass", "async def test_create_bike_bad_value(database, key_name):\n with pytest.raises(ValueError):\n await register_bike(public_key=key_name, master_key=key_name)", "def _check_settings(self):\n if self.api_key is None:\n raise ImproperlyConfigured(\"You must provide an API key.\")", "def test_init__no_sdk_key_no_datafile__fails(self, _):\n self.assertRaisesRegex(\n optimizely_exceptions.InvalidInputException,\n enums.Errors.MISSING_SDK_KEY,\n config_manager.PollingConfigManager,\n sdk_key=None,\n datafile=None,\n )", "def _apikey():\n return __opts__.get(\"bamboohr\", {}).get(\"apikey\", None)", "async def test_form_invalid_api_key(hass: HomeAssistant) -> None:\n mocked_owm = _create_mocked_owm(True)\n\n with patch(\n \"pyowm.weatherapi25.weather_manager.WeatherManager\",\n return_value=mocked_owm,\n side_effect=UnauthorizedError(\"\"),\n ):\n result = await hass.config_entries.flow.async_init(\n DOMAIN, context={\"source\": SOURCE_USER}, data=CONFIG\n )\n\n assert result[\"errors\"] == {\"base\": \"invalid_api_key\"}", "def a_valid_api_key(configuration):\n configuration.api_key[\"apiKeyAuth\"] = os.getenv(\"DD_TEST_CLIENT_API_KEY\", \"fake\")", "async def test_create_bike_bad_master(database):\n with pytest.raises(BadKeyError):\n await register_bike(random_key(32), \"BADBAD\")", "def test_dweet_for_with_an_invalid_key(self):\n try:\n dweepy.dweet_for(self.my_thing_id, test_data, key='badkey')\n except dweepy.DweepyError as e:\n self.assertEqual(e.args[0], 'the key you provided doesn\\'t work with this thing')\n else:\n self.fail(\"shouldn't ever get called\")" ]
[ "0.7188455", "0.7150424", "0.6868079", "0.6843092", "0.68007284", "0.6641392", "0.65708363", "0.6499072", "0.64861906", "0.64667827", "0.6463625", "0.6444715", "0.6391974", "0.6325179", "0.6301825", "0.62591594", "0.61920804", "0.6184099", "0.6144185", "0.6124961", "0.6124961", "0.61112493", "0.6110981", "0.6095638", "0.6078354", "0.6036671", "0.6036224", "0.601149", "0.59877634", "0.5979662" ]
0.8481922
0
Tests that the barracks helper is correctly built with an api_key and no base_url
def test_init_barracks_helper_succeed_when_api_key_and_base_url_given(): base_url = 'http://some.url' helper = BarracksHelper(_api_key, base_url) assert helper.get_api_key() == _api_key assert helper.get_base_url() == base_url assert helper.update_checker_helper assert helper.update_checker_helper._apiKey == _api_key assert helper.update_checker_helper._baseUrl == base_url + _check_update_endpoint assert helper.package_download_helper assert helper.package_download_helper._apiKey == _api_key
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_init_barracks_helper_fail_when_no_api_key_given():\n try:\n BarracksHelper(None, _base_url)\n assert False\n except ValueError:\n assert True", "def test_init_barracks_helper_succeed_when_api_key_given():\n helper = BarracksHelper(_api_key)\n assert helper.get_api_key() == _api_key\n assert helper.get_base_url() == _base_url\n\n assert helper.update_checker_helper\n assert helper.update_checker_helper._apiKey == _api_key\n assert helper.update_checker_helper._baseUrl == _base_url + _check_update_endpoint\n\n assert helper.package_download_helper\n assert helper.package_download_helper._apiKey == _api_key", "def test_api_base(self):\n # load api base\n r = requests.get('{server}/api/0.1/'.format(\n server=self.get_server_url()))\n j = r.json()\n self.assertIn('gages', j)\n self.assertIn('sections', j)\n self.assertIn('regions', j)\n self.assertIn('rivers', j)\n self.assertIn('sensors', j)\n self.assertIn('samples', j)", "def test_api_key(self):\n self.assertEqual(self.route4me.key, '11111111111111111111111111111111')", "def test_create_api_key(self):\n pass", "def test_api_key_error(api):\n\twith pytest.raises(top_stories.APIKeyError):\n\t\tmissingAPI = top_stories.TopStoriesAPI()", "def test_random_quotes_api(self):\n response = api_client.get(\"/\")\n self.assertEqual(response.status_code, 200)", "def testApi(self):", "def test_api_key():\n assert gather_stock_returns('abc', 'AAPL', buy_date, sell_date) == msg3", "def test_no_api_key(self):\n\n self.assertRaises(Exception, kaput.init, None, '123')", "def __init__(self, api_key):\n self.api_key = api_key\n self.base_url = 'https://studio.spotflock.com/api/v1'", "def _apikey():\n return __opts__.get(\"bamboohr\", {}).get(\"apikey\", None)", "def test_api_key_not_found(self):\n runner = CliRunner()\n\n with patch(\"greynoise.cli.decorator.load_config\") as load_config:\n load_config.return_value = {\"api_key\": \"\"}\n result = runner.invoke(\n subcommand.quick,\n [\"0.0.0.0\"],\n parent=Context(main, info_name=\"greynoise\"),\n )\n assert result.exit_code == -1\n assert \"Error: API key not found\" in result.output", "def test_set_api_key(self):\n\n api_key = 'abc'\n project_id = '123'\n\n kaput.init(api_key, project_id)\n\n self.assertEqual(api_key, kaput._API_KEY)\n self.assertEqual(project_id, kaput._PROJECT_ID)\n self.assertFalse(kaput._DEBUG)\n self.assertEqual(kaput._handle_exception, sys.excepthook)", "def test_api_river(self):\n # load api base\n r = requests.get('{server}/api/0.1/'.format(\n server=self.get_server_url())).json()\n # load rivers from url specified in api base\n r = requests.get(r['rivers']).json()\n r = requests.get(r['rivers'][0]['url']).json()\n self.assertIn('html', r)\n self.assertIn('url', r)\n self.assertIn('sections', r)\n self.assertIn('gages', r)\n self.assertIn('regions', r)\n self.assertIn('tributaries', r)", "def test_generate_url_with_api_key():\n config = core.Config(api_key='FAKE')\n expected = \"{}?{}\".format(ENTREZ_URL, \"retmode=text&id=FAKE&db=nucleotide&api_key=FAKE&rettype=gbwithparts\")\n assert expected == core.generate_url(\"FAKE\", config)\n\n config.format = 'gff3'\n expected = \"{}?{}\".format(SVIEWER_URL, \"retmode=text&id=FAKE&db=nucleotide&api_key=FAKE&report=gff3\")\n assert expected == core.generate_url(\"FAKE\", config)", "def test_setup_bad_api_key(self, mock_get_forecast):\n # The Dark Sky API wrapper that we use raises an HTTP error\n # when you try to use a bad (or no) API key.\n url = \"https://api.darksky.net/forecast/{}/{},{}?units=auto\".format(\n self.key, str(self.lat), str(self.lon)\n )\n msg = f\"400 Client Error: Bad Request for url: {url}\"\n mock_get_forecast.side_effect = HTTPError(msg)\n\n response = darksky.setup_platform(\n self.hass, VALID_CONFIG_MINIMAL[\"sensor\"], MagicMock()\n )\n assert not response", "def test_returns_passed_apiurl_over_everything(self):\n arguments = {'--api-url': 'passed_stuff'}\n config = {'api_url': 'irrelevant_stuff'}\n result = get_api_url(arguments, config)\n self.assertEqual(result, 'passed_stuff')\n self.mock_sanitize_host.assert_not_called()", "def test_get_api_resources(self):\n pass", "def test_api_key_is_None(self):\n settings.GTMETRIX_REST_API_KEY = None\n with raises(GTmetrixAPIKeyIsNone):\n gt = GTmetrixInterface()", "def test_base_url(self):\n\n # Each of these URLs should be invalid\n for url in [\n \"test.com/123\",\n \"http://:80/123\",\n \"//xyz.co.uk\",\n ]:\n with self.assertRaises(Exception):\n a = api.InvenTreeAPI(url, connect=False)\n\n # test for base URL construction\n a = api.InvenTreeAPI('https://test.com', connect=False)\n self.assertEqual(a.base_url, 'https://test.com/')\n self.assertEqual(a.api_url, 'https://test.com/api/')\n\n # more tests that the base URL is set correctly under specific conditions\n urls = [\n \"http://a.b.co:80/sub/dir/api/\",\n \"http://a.b.co:80/sub/dir/api\",\n \"http://a.b.co:80/sub/dir/\",\n \"http://a.b.co:80/sub/dir\",\n ]\n\n for url in urls:\n a = api.InvenTreeAPI(url, connect=False)\n self.assertEqual(a.base_url, \"http://a.b.co:80/sub/dir/\")\n self.assertEqual(a.api_url, \"http://a.b.co:80/sub/dir/api/\")", "async def test_dev_fetch_api_key(client):\n params = [('username', '[email protected]')]\n headers = { \n 'Accept': 'application/json',\n }\n response = await client.request(\n method='POST',\n path='/api/v1/dev_fetch_api_key',\n headers=headers,\n params=params,\n )\n assert response.status == 200, 'Response body is : ' + (await response.read()).decode('utf-8')", "def test_api() -> bool:\r\n weather = False\r\n news = False\r\n covid = False\r\n if check_weather_version():\r\n logging.info(\"Weather API version is up to date (check_weather_version())\")\r\n weather = True\r\n else:\r\n logging.info(\"Weather API version is not up to date (check_weather_version()) - ACTION REQUIRED\")\r\n if check_news_version():\r\n logging.info(\"News API version is up to date (check_news_version())\")\r\n news = True\r\n else:\r\n logging.info(\"News API version is not up to date (check_news_version()) - ACTION REQUIRED\")\r\n if check_covid_version():\r\n logging.info(\"Covid-19 API version is up to date (check_covid_version())\")\r\n covid = True\r\n else:\r\n logging.info(\"Covid-19 API version is not up to date (check_covid_version()) - ACTION REQUIRED\")\r\n return bool(weather and news and covid)", "def test_get_boat(self):\n pass", "def test_add_api_key_to_org(self):\n pass", "def a_valid_api_key(configuration):\n configuration.api_key[\"apiKeyAuth\"] = os.getenv(\"DD_TEST_CLIENT_API_KEY\", \"fake\")", "def test_missing_api_key(self):\n with self.assertRaises(TypeError):\n ConnectorWebexTeams()", "def test_get_user_api_keys(self):\n pass", "def test_apiBaseURL(self):\n apiBaseURL = \"http://%s\"\n builder = DistributionBuilder(self.rootDir, self.outputDir,\n apiBaseURL=apiBaseURL)\n loreInput, loreOutput = self.getArbitraryLoreInputAndOutput(\n \"0.3.0\", apiBaseURL=apiBaseURL)\n structure = {\n \"LICENSE\": \"copyright!\",\n \"twisted\": {\"web\": {\"__init__.py\": \"import WEB\",\n \"topfiles\": {\"setup.py\": \"import WEBINST\"}}},\n \"doc\": {\"web\": {\"howto\": {\"index.xhtml\": loreInput}},\n \"core\": {\"howto\": {\"template.tpl\": self.template}}\n }\n }\n\n outStructure = {\n \"LICENSE\": \"copyright!\",\n \"setup.py\": \"import WEBINST\",\n \"twisted\": {\"web\": {\"__init__.py\": \"import WEB\"}},\n \"doc\": {\"howto\": {\"index.html\": loreOutput}}}\n\n self.createStructure(self.rootDir, structure)\n outputFile = builder.buildSubProject(\"web\", \"0.3.0\")\n self.assertExtractedStructure(outputFile, outStructure)", "def test_base_url(self):\n r = self.base_check_request(\"get\", \"/\")\n\n base_urls = {\n 'apartments': self.build_url('apartments/'),\n 'companies': self.build_url('companies/'),\n 'companies-types': self.build_url('companies-types/'),\n 'complexes': self.build_url('complexes/'),\n 'locations': self.build_url('locations/')\n }\n self.assertDictEqual(r, base_urls)" ]
[ "0.8263585", "0.8078991", "0.6654061", "0.6618654", "0.66106915", "0.6487298", "0.6291824", "0.62910557", "0.6287132", "0.62710816", "0.6215637", "0.61554646", "0.60898596", "0.60878843", "0.6084779", "0.60764563", "0.60689425", "0.6059673", "0.6006741", "0.5999861", "0.59782606", "0.5962283", "0.59528166", "0.5941255", "0.59354895", "0.5924228", "0.5920006", "0.5890537", "0.5870705", "0.5842974" ]
0.81723756
1
Tests that the client ''check'' callback is called with UpdateDetail when status code is available
def test_check_update_calls_callback_when_update_available(): with requests_mock.mock() as mocked_server: mocked_server.post(_base_url + _check_update_endpoint, text=_json_update_response, status_code=200) request = UpdateDetailRequest('v1', 'MyDevice', '{"AnyCustomData":"any_value"}') update_helper = UpdateCheckHelper(_api_key, _base_url) update_helper.check_update(request, update_available_callback)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def updates_check():\n data = wait_for_callback(client, cb_updates_name)\n self.assertTrue(isinstance(data, dict))", "def check_status(self):", "def test_check_update_calls_callback_when_no_update_available():\n with requests_mock.mock() as mocked_server:\n mocked_server.post(_base_url + _check_update_endpoint, text='', status_code=204)\n\n request = UpdateDetailRequest('v1', 'MyDevice', '{\"AnyCustomData\":\"any_value\"}')\n update_helper = UpdateCheckHelper(_api_key, _base_url)\n\n update_helper.check_update(request, no_update_available_callback)", "def __check(self):\n status = '200 OK'\n try:\n response = get(self.__url)\n status = '{} {}'.format(\n response.status_code,\n http.client.responses[response.status_code]\n )\n except Exception as e:\n status = e.__class__.__name__\n \n if status[:3] == '200':\n self.__notify_up()\n else:\n if not self.downtime_info:\n self.downtime_info = DowntimeInfo(status)\n self.__notify_down()", "def test_status_code(self):\n assert self.detail_response.status_code == 200", "def test_get_status(self):\n pass", "def test_get_status(self):\n pass", "def verify_get_response(self, status):\n validate(status, STATUS)\n self.assertTrue(status['database_connection']['connected'])\n self.assertTrue(status['redis_connection']['connected'])\n self.assertEqual(status['missing_workers'], [])\n self.assertNotEqual(status['online_workers'], [])\n self.assertNotEqual(status['versions'], [])", "def test_check_status(self):\n post_json = {\"submission_id\": self.status_check_submission_id}\n # Populating error info before calling route to avoid changing last update time\n\n with create_app().app_context():\n sess = GlobalDB.db().session\n populate_submission_error_info(self.status_check_submission_id)\n\n response = self.app.post_json(\"/v1/check_status/\", post_json, headers={\"x-session-id\": self.session_id})\n\n self.assertEqual(response.status_code, 200, msg=str(response.json))\n self.assertEqual(response.headers.get(\"Content-Type\"), \"application/json\")\n json = response.json\n # response ids are coming back as string, so patch the jobIdDict\n job_id_dict = {k: str(self.jobIdDict[k]) for k in self.jobIdDict.keys()}\n job_list = json[\"jobs\"]\n approp_job = None\n cross_job = None\n for job in job_list:\n if str(job[\"job_id\"]) == str(job_id_dict[\"appropriations\"]):\n # Found the job to be checked\n approp_job = job\n elif str(job[\"job_id\"]) == str(job_id_dict[\"cross_file\"]):\n # Found cross file job\n cross_job = job\n\n # Must have an approp job and cross-file job\n self.assertNotEqual(approp_job, None)\n self.assertNotEqual(cross_job, None)\n # And that job must have the following\n self.assertEqual(approp_job[\"job_status\"], \"ready\")\n self.assertEqual(approp_job[\"job_type\"], \"csv_record_validation\")\n self.assertEqual(approp_job[\"file_type\"], \"appropriations\")\n self.assertEqual(approp_job[\"filename\"], \"approp.csv\")\n self.assertEqual(approp_job[\"file_status\"], \"complete\")\n self.assertIn(\"missing_header_one\", approp_job[\"missing_headers\"])\n self.assertIn(\"missing_header_two\", approp_job[\"missing_headers\"])\n self.assertIn(\"duplicated_header_one\", approp_job[\"duplicated_headers\"])\n self.assertIn(\"duplicated_header_two\", approp_job[\"duplicated_headers\"])\n # Check file size and number of rows\n self.assertEqual(approp_job[\"file_size\"], 2345)\n self.assertEqual(approp_job[\"number_of_rows\"], 567)\n\n # Check error metadata for specified error\n rule_error_data = None\n for data in approp_job[\"error_data\"]:\n if data[\"field_name\"] == \"header_three\":\n rule_error_data = data\n self.assertIsNotNone(rule_error_data)\n self.assertEqual(rule_error_data[\"field_name\"], \"header_three\")\n self.assertEqual(rule_error_data[\"error_name\"], \"rule_failed\")\n self.assertEqual(rule_error_data[\"error_description\"], \"A rule failed for this value.\")\n self.assertEqual(rule_error_data[\"occurrences\"], \"7\")\n self.assertEqual(rule_error_data[\"rule_failed\"], \"Header three value must be real\")\n self.assertEqual(rule_error_data[\"original_label\"], \"A1\")\n # Check warning metadata for specified warning\n warning_error_data = None\n for data in approp_job[\"warning_data\"]:\n if data[\"field_name\"] == \"header_three\":\n warning_error_data = data\n self.assertIsNotNone(warning_error_data)\n self.assertEqual(warning_error_data[\"field_name\"], \"header_three\")\n self.assertEqual(warning_error_data[\"error_name\"], \"rule_failed\")\n self.assertEqual(warning_error_data[\"error_description\"], \"A rule failed for this value.\")\n self.assertEqual(warning_error_data[\"occurrences\"], \"7\")\n self.assertEqual(warning_error_data[\"rule_failed\"], \"Header three value looks odd\")\n self.assertEqual(warning_error_data[\"original_label\"], \"A2\")\n\n rule_error_data = None\n for data in cross_job[\"error_data\"]:\n if data[\"field_name\"] == \"header_four\":\n rule_error_data = data\n\n self.assertEqual(rule_error_data[\"source_file\"], \"appropriations\")\n self.assertEqual(rule_error_data[\"target_file\"], \"award\")\n\n # Check submission metadata\n self.assertEqual(json[\"cgac_code\"], \"SYS\")\n self.assertEqual(json[\"reporting_period_start_date\"], \"Q1/2016\")\n self.assertEqual(json[\"reporting_period_end_date\"], \"Q1/2016\")\n\n # Check submission level info\n self.assertEqual(json[\"number_of_errors\"], 17)\n self.assertEqual(json[\"number_of_rows\"], 667)\n\n # Get submission from db for attribute checks\n submission = sess.query(Submission).filter(\n Submission.submission_id == self.status_check_submission_id).one()\n\n # Check number of errors and warnings in submission table\n self.assertEqual(submission.number_of_errors, 17)\n self.assertEqual(submission.number_of_warnings, 7)\n\n # Check that submission was created today, this test may fail if run right at midnight UTC\n self.assertEqual(json[\"created_on\"], datetime.utcnow().strftime(\"%m/%d/%Y\"))\n self.assertEqual(json[\"last_updated\"], submission.updated_at.strftime(\"%Y-%m-%dT%H:%M:%S\"))", "def test_status_ok(api_client):\n response = api_client.get()\n assert response.ok", "def check(client: Client):\n pass", "def test_check_status(mock_send_message):\n A1sim.check_status(BASE_URL)\n mock_send_message.assert_called_once_with('GET',\n 'Get ric status',\n (f\"{BASE_URL}\"))", "def test_status_endpoint(self):\n Org(id='test1').put()\n response = self.app.get('/adapter/test1/status')\n self.assertEqual(response.status_code, 200)\n self.assertEqual(response.json['connected'], False)\n self.assertEqual(response.json['synced'], False)\n self.assertEqual(response.json['updating'], False)\n self.assertEqual(response.json['synced_at'], None)\n\n Org(id='test2', status=2).put()\n response = self.app.get('/adapter/test2/status')\n self.assertEqual(response.status_code, 200)\n self.assertEqual(response.json['connected'], True)\n\n Org(id='test3', status=2).put()\n OrgChangeset(org_uid='test3', publish_job_finished=True, publish_job_failed=False).put()\n response = self.app.get('/adapter/test3/status')\n self.assertEqual(response.status_code, 200)\n self.assertEqual(response.json['synced'], True)", "def test_check_status_admin(self):\n post_json = {\"submission_id\": self.status_check_submission_id}\n # Log in as admin user\n self.login_admin_user()\n # Call check status route (also checking case insensitivity of header here)\n response = self.app.post_json(\"/v1/check_status/\", post_json, expect_errors=True,\n headers={\"x-SESSION-id\": self.session_id})\n # Assert 200 status\n self.assertEqual(response.status_code, 200)", "def test_client_update(self):\n pass", "def test_get_status(self):\n response = self.client.open(\n '/v1/status',\n method='GET')\n self.assert200(response,\n 'Response body is : ' + response.data.decode('utf-8'))", "def test_client_verification_retrieve(self):\n pass", "def test_status_code(self):\n assert self.list_response.status_code == 200", "def test_client_nationlity_update(self):\n pass", "def status_check():\n return {\"status\": \"OK\"}", "def status(code):\r\n def has_status(client, response, testcase):\r\n testcase.assertEqual(\r\n response.status_code,\r\n code\r\n )\r\n return has_status", "def test_get_status(self) -> None:\n\n given = \"example.org\"\n\n # This is an abstract method. So we need to define it.\n self.checker.query_status = lambda: None\n\n self.checker.subject = given\n\n actual = self.checker.get_status()\n\n self.assertIsInstance(actual, CheckerStatusBase)", "async def _check_status(\n self, update: Update, context: ContextTypes.DEFAULT_TYPE\n ) -> None:\n self.system_status_lock.acquire()\n info = self.system_status_proxy._getvalue()\n self.system_status_lock.release()\n await update.message.reply_markdown(\"*System Status*\")\n for key in info:\n await update.message.reply_text(f\"{key}: {info[key]}\")", "def test_1():\n\tassert api_call().status_code == 200", "def checkStatus(self):\n return None", "async def test_health_check(client: AsyncClient):\n\n response = await client.get(f\"/health-check\")\n assert response.status_code == 200\n\n data = response.json()\n assert data[\"service\"][\"status\"] == \"healthy\"\n assert data[\"service\"][\"error\"] is None\n assert data[\"database\"][\"status\"] == \"healthy\"\n assert data[\"database\"][\"error\"] is None", "def test_check_health_success(self):\n ok, msg = self.db.check_health()\n self.assertTrue(ok)", "def test_check_health_success(self):\n ok, msg = self.db.check_health()\n self.assertTrue(ok)", "def test_check_health_success(self):\n ok, msg = self.db.check_health()\n self.assertTrue(ok)", "def test_endpoint_status(self) -> None:\n status = self.client.endpoint_status\n self.assertIsInstance(status, dict)" ]
[ "0.7000315", "0.6929515", "0.6763312", "0.6612882", "0.6596048", "0.6583341", "0.6583341", "0.6580416", "0.65675205", "0.655343", "0.65316373", "0.6483704", "0.64354014", "0.64349073", "0.64082783", "0.6399724", "0.63660103", "0.6345494", "0.6345222", "0.6334975", "0.62610376", "0.6209289", "0.6192881", "0.6185302", "0.61833847", "0.61794305", "0.6171136", "0.6171136", "0.6171136", "0.6166668" ]
0.7042606
0
Tests that the download request is built with appropriate parameters and headers
def test_download_package_helper_properly_build_request_when_valid_url_given(): download_helper = PackageDownloadHelper(_api_key) built_request = download_helper.build_download_request(_update_response_url) headers = built_request.headers assert headers['Authorization'] == _api_key assert headers['Content-Type'] == 'application/json' assert built_request.url == _update_response_url
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_download(self, file_list, _):\n url = reverse(\"report_download\")\n response = self.client.get(url)\n body = response.json()\n\n self.assertEqual(response.status_code, 200)\n self.assertIn(\"Download Request Task ID\", body)\n\n url_w_params = url + \"?provider_uuid=1&bill_date=2021-04-01\"\n response = self.client.get(url_w_params)\n body = response.json()\n\n self.assertEqual(response.status_code, 200)\n self.assertIn(\"Download Request Task ID\", body)", "def test_download(self):\n pass", "def test_download1(self):\n pass", "def test_download2(self):\n pass", "async def test_backup_download_headers(\n hassio_client, aioclient_mock: AiohttpClientMocker, mock_not_onboarded\n) -> None:\n content_disposition = \"attachment; filename=test.tar\"\n aioclient_mock.get(\n \"http://127.0.0.1/backups/1234abcd/download\",\n headers={\n \"Content-Length\": \"50000000\",\n \"Content-Disposition\": content_disposition,\n },\n )\n\n resp = await hassio_client.get(\"/api/hassio/backups/1234abcd/download\")\n\n # Check we got right response\n assert resp.status == HTTPStatus.OK\n\n assert len(aioclient_mock.mock_calls) == 1\n\n assert resp.headers[\"Content-Disposition\"] == content_disposition", "def test_file_download(self):\n\n # Downloading without auth = unauthorized error (401)\n with self.assertRaises(requests.exceptions.HTTPError):\n self.assertFalse(self.api.downloadFile('/media/part/files/1/test.pdf', 'test.pdf'))", "def download(self, download_request):\n raise NotImplementedError", "def test_get_project_file(self):\n query_string = [('id', 'id_example'),\n ('path', 'path_example')]\n headers = { \n 'Accept': 'application/json',\n 'Authorization': 'Bearer special-key',\n }\n response = self.client.open(\n '/api/v1/project-files/download',\n method='GET',\n headers=headers,\n query_string=query_string)\n self.assert200(response,\n 'Response body is : ' + response.data.decode('utf-8'))", "def test_download_view_valid(self):\n link = DownloadLink(path='/home/blah/file')\n link.save()\n factory = RequestFactory()\n request = factory.get('/downloads');\n error = False\n response = views.download(request, link.id);\n self.assertEqual(response['X-SendFile'], link.path)", "def test_download_file(self):\n expected_full_path = \"{}/{}/azure/{}/{}\".format(\n Config.TMP_DIR, self.customer_name.replace(\" \", \"_\"), self.mock_data.container, self.mock_data.export_file\n )\n full_file_path, etag, _, __ = self.downloader.download_file(self.mock_data.export_key)\n self.assertEqual(full_file_path, expected_full_path)\n self.assertEqual(etag, self.mock_data.export_etag)", "def test_download(self):\n\n # Test for all correct data\n self.assertEqual(\n download(TestSteelEye.url, self.xmlfilepath, \"sourcefile.xml\"),\n self.xmlfilepath + os.sep + \"sourcefile.xml\",\n )\n\n # Test for incorrect url\n self.assertEqual(\n download(\"http://example.com\", self.xmlfilepath, \"sourcefile.xml\"), \"\"\n )\n\n # Test for different download path\n self.assertEqual(\n download(\n TestSteelEye.url,\n os.path.join(os.getcwd(), \"anotherpath\"),\n \"sourcefile.xml\",\n ),\n os.path.join(os.getcwd(), \"anotherpath\") + os.sep + \"sourcefile.xml\",\n )\n\n # Test for incorrect download path\n self.assertEqual(download(TestSteelEye.url, \"E:\", \"sourcefile.xml\"), \"\")", "def _verify_export_succeeded(self, resp):\r\n self.assertEquals(resp.status_code, 200)\r\n self.assertTrue(resp.get('Content-Disposition').startswith('attachment'))", "def test_download_host(self):\n pass", "def test_download_to_file(req, tmpdir):\n req.get(ENTREZ_URL, text='This works.')\n outdir = tmpdir.mkdir('outdir')\n filename = outdir.join('foo')\n expected = outdir.join('foo.gbk')\n config = core.Config(molecule='nucleotide', verbose=False)\n\n core.download_to_file('FOO', config, filename=filename)\n\n assert expected.check()", "def test_download_view_invalid_url(self):\n factory = RequestFactory()\n request = factory.get('/downloads');\n error = False\n try:\n response = views.download(request, 'fake_id');\n except: \n error = True\n\n self.assertTrue(error)", "def test_get_request_output(self):\n pass", "def test_get_experiment_artifact(self):\n query_string = [('id', 'id_example'),\n ('path', 'path_example')]\n headers = { \n 'Accept': 'application/json',\n 'Authorization': 'Bearer special-key',\n }\n response = self.client.open(\n '/api/v1/experiment-artifacts/download',\n method='GET',\n headers=headers,\n query_string=query_string)\n self.assert200(response,\n 'Response body is : ' + response.data.decode('utf-8'))", "def test_download_file(token):\n\n # github => repo => release => asset_list => asset => url => download\n\n g_h = github.Github(token, per_page=100)\n repo = g_h.get_repo(TEST_SLUG, lazy=False)\n release = repo.get_release(TEST_TAG)\n asset_list = release.get_assets()\n sha_filename = Template(Arguments.HASH_FILE).safe_substitute({\n 'platform': platform.system().lower()\n })\n\n assets_calculated_sha = 'notasha'\n sha_dict = {}\n\n for check_asset in asset_list:\n # look through list of assets for uploaded file and sha file\n\n if check_asset.name == os.path.basename(TEST_FILENAME):\n\n # the uploaded asset\n request = requests.get(check_asset.browser_download_url)\n open(TEST_DOWNLOAD, 'wb').write(request.content)\n\n # recalc hash of downloaded file\n assets_calculated_sha = Arguments.get_hash(TEST_DOWNLOAD)\n\n elif check_asset.name == sha_filename:\n\n # the sha hash file\n request = requests.get(check_asset.browser_download_url)\n sha_dict = request.json()\n\n assert assets_calculated_sha == sha_dict[os.path.basename(TEST_FILENAME)]", "def test_build_headers(self):\n\n headers = self_signed.build_headers()\n assert 'Content-Length' in headers\n assert 'X-Amz-Date' in headers\n assert 'Host' in headers\n assert 'X-Amz-Security-Token' in headers\n assert 'Content-Type' in headers\n assert 'Authorization' in headers", "def test_download_to_file_retry(req, tmpdir):\n req.get(ENTREZ_URL, response_list=[\n {\"text\": u'Whoa, slow down', \"status_code\": 429, \"headers\": {\"Retry-After\": \"0\"}},\n {\"text\": 'This works.'},\n ])\n outdir = tmpdir.mkdir('outdir')\n filename = outdir.join('foo')\n expected = outdir.join('foo.gbk')\n config = core.Config(molecule='nucleotide', verbose=False)\n\n core.download_to_file('FOO', config, filename=filename)\n\n assert expected.check()", "def test_specific_url_is_used_for_request(self):\n req = self.httpbin.get_my_headers(dry_run=True)\n\n url = self.httpbin.client[\"get_my_headers\"][\"url\"]\n self.assertIn(url, req.prepared_request.url)", "def test_search(self):\n req = http.make_request('http://xxx', 'GET', None, None)\n self.assertIsNone(req.data)\n\n req = http.make_request('http://xxx', 'GET', 'ignored', None)\n self.assertIsNone(req.data)\n\n req = http.make_request('http://xxx', 'DELETE', None, None)\n self.assertIsNone(req.data)\n\n req = http.make_request('http://xxx', 'DELETE', 'ignored', None)\n self.assertIsNone(req.data)\n\n req = http.make_request('http://xxx', 'POST', '', None)\n self.assertEqual(0, len(req.data))\n\n req = http.make_request('http://xxx', 'POST', 'abc', None)\n self.assertEqual(3, len(req.data))\n\n req = http.make_request('http://xxx', 'POST', '', [('xxx', 'yyy'),\n ('foo',)])\n\n self.assertEqual('yyy', req.get_header('Xxx'))\n self.assertEqual('1', req.get_header('Foo'))", "def test_prep_file(self, mock_open):\n path = \"/tmp/foo\"\n request = DownloadRequest(None, None, None, path)\n download_threads._MultithreadedDownloader._prep_file(request)\n mock_open.assert_called_once_with(path, \"wb\")\n\n mock_open.return_value.close.assert_called_once_with()", "def test_download_deployment_run_test_report(self):\n pass", "def test_download(api):\n # upload file prior to download\n # with pytest.raises(APIConnectionError):\n uploaded_file = api.upload(\n tag='test_upload',\n expiry='1w',\n path='tests/test_file.txt'\n )\n\n # check that instance of FileIO has these fields\n assert uploaded_file.link\n assert uploaded_file.key\n assert uploaded_file.tag\n assert uploaded_file.path\n\n # remove the uploaded file from the os\n remove('tests/test_file.txt')\n\n # download and save the file\n api.download(tag='test_upload')\n\n # check that file was saved in a filesystem\n assert path.isfile('tests/test_file.txt')", "def test_submission_download(client, two_challenge_sets):\n submission = SubmissionFactory(\n phase=two_challenge_sets.challenge_set_1.challenge.phase_set.get(),\n creator=two_challenge_sets.challenge_set_1.participant,\n )\n\n tests = [\n # (\n # image response + annotation response not test ground truth,\n # user\n # )\n (403, None),\n (403, two_challenge_sets.challenge_set_1.non_participant),\n (302, two_challenge_sets.challenge_set_1.participant),\n (403, two_challenge_sets.challenge_set_1.participant1),\n (302, two_challenge_sets.challenge_set_1.creator),\n (302, two_challenge_sets.challenge_set_1.admin),\n (403, two_challenge_sets.challenge_set_2.non_participant),\n (403, two_challenge_sets.challenge_set_2.participant),\n (403, two_challenge_sets.challenge_set_2.participant1),\n (403, two_challenge_sets.challenge_set_2.creator),\n (403, two_challenge_sets.challenge_set_2.admin),\n (302, two_challenge_sets.admin12),\n (403, two_challenge_sets.participant12),\n (302, two_challenge_sets.admin1participant2),\n ]\n\n for test in tests:\n response = get_view_for_user(\n url=submission.predictions_file.url, client=client, user=test[1]\n )\n assert response.status_code == test[0]", "def test_download_file(self):\n\n file_handle_id = 1234\n object_id = \"syn123\"\n path = \"/tmp/foo\"\n url = \"http://foo.com/bar\"\n file_size = int(1.5 * (2**20))\n request = DownloadRequest(file_handle_id, object_id, None, path)\n\n with mock.patch.object(\n download_threads, \"PresignedUrlProvider\"\n ) as mock_url_provider_init, mock.patch.object(\n download_threads, \"TransferStatus\"\n ) as mock_transfer_status_init, mock.patch.object(\n download_threads, \"_get_file_size\"\n ) as mock_get_file_size, mock.patch.object(\n download_threads, \"_generate_chunk_ranges\"\n ) as mock_generate_chunk_ranges, mock.patch.object(\n _MultithreadedDownloader, \"_prep_file\"\n ) as mock_prep_file, mock.patch.object(\n _MultithreadedDownloader, \"_submit_chunks\"\n ) as mock_submit_chunks, mock.patch.object(\n _MultithreadedDownloader, \"_write_chunks\"\n ) as mock_write_chunks, mock.patch(\n \"concurrent.futures.wait\"\n ) as mock_futures_wait, mock.patch.object(\n _MultithreadedDownloader, \"_check_for_errors\"\n ) as mock_check_for_errors:\n mock_url_info = mock.create_autospec(PresignedUrlInfo, url=url)\n mock_url_provider = mock.create_autospec(PresignedUrlProvider)\n mock_url_provider.get_info.return_value = mock_url_info\n\n mock_url_provider_init.return_value = mock_url_provider\n mock_get_file_size.return_value = file_size\n chunk_generator = mock.Mock()\n mock_generate_chunk_ranges.return_value = chunk_generator\n\n transfer_status = TransferStatus(file_size)\n mock_transfer_status_init.return_value = transfer_status\n\n first_future = mock.Mock()\n second_future = mock.Mock()\n third_future = mock.Mock()\n\n # 3 parts total, submit 2, then 1, then no more the third time through the loop\n mock_submit_chunks.side_effect = [\n set([first_future, second_future]),\n set([third_future]),\n set(),\n ]\n\n # on first wait 1 part is done, one is pending,\n # on second wait last remaining part is completed\n\n mock_futures_wait.side_effect = [\n (set([first_future]), set([second_future])),\n (set([second_future, third_future]), set()),\n ]\n\n syn = mock.Mock()\n executor = mock.Mock()\n max_concurrent_parts = 5\n downloader = _MultithreadedDownloader(syn, executor, max_concurrent_parts)\n\n downloader.download_file(request)\n\n mock_prep_file.assert_called_once_with(request)\n\n expected_submit_chunks_calls = [\n mock.call(mock_url_provider, chunk_generator, set()),\n mock.call(mock_url_provider, chunk_generator, set([second_future])),\n mock.call(mock_url_provider, chunk_generator, set()),\n ]\n assert expected_submit_chunks_calls == mock_submit_chunks.call_args_list\n\n expected_write_chunk_calls = [\n mock.call(request, set(), transfer_status),\n mock.call(request, set([first_future]), transfer_status),\n mock.call(request, set([second_future, third_future]), transfer_status),\n ]\n assert expected_write_chunk_calls == mock_write_chunks.call_args_list\n\n expected_futures_wait_calls = [\n mock.call(\n set([first_future, second_future]),\n return_when=concurrent.futures.FIRST_COMPLETED,\n ),\n mock.call(\n set([second_future, third_future]),\n return_when=concurrent.futures.FIRST_COMPLETED,\n ),\n ]\n assert expected_futures_wait_calls == mock_futures_wait.call_args_list\n\n expected_check_for_errors_calls = [\n mock.call(request, set([first_future])),\n mock.call(request, set([second_future, third_future])),\n ]\n assert (\n expected_check_for_errors_calls == mock_check_for_errors.call_args_list\n )", "def test_submission_download(client, two_challenge_sets):\n submission = SubmissionFactory(\n challenge=two_challenge_sets.challenge_set_1.challenge,\n creator=two_challenge_sets.challenge_set_1.participant,\n )\n\n tests = [\n # (\n # image response + annotation response not test ground truth,\n # user\n # )\n (403, None),\n (403, two_challenge_sets.challenge_set_1.non_participant),\n (403, two_challenge_sets.challenge_set_1.participant),\n (403, two_challenge_sets.challenge_set_1.participant1),\n (302, two_challenge_sets.challenge_set_1.creator),\n (302, two_challenge_sets.challenge_set_1.admin),\n (403, two_challenge_sets.challenge_set_2.non_participant),\n (403, two_challenge_sets.challenge_set_2.participant),\n (403, two_challenge_sets.challenge_set_2.participant1),\n (403, two_challenge_sets.challenge_set_2.creator),\n (403, two_challenge_sets.challenge_set_2.admin),\n (302, two_challenge_sets.admin12),\n (403, two_challenge_sets.participant12),\n (302, two_challenge_sets.admin1participant2),\n ]\n\n for test in tests:\n response = get_view_for_user(\n url=submission.predictions_file.url, client=client, user=test[1]\n )\n assert response.status_code == test[0]", "def test_drs_get_object(self, testapp, testing_download): # noQA fixture\n res = testapp.get(testing_download)\n drs_object_uri = res.json['uuid']\n drs_object_1 = testapp.get(f'/ga4gh/drs/v1/objects/{drs_object_uri}').json\n for key in REQUIRED_FIELDS:\n assert key in drs_object_1\n assert drs_object_1['self_uri'] == f'drs://localhost:80/ga4gh/drs/v1/objects/{drs_object_uri}'\n assert (drs_object_1['access_methods'][0]['access_url']['url']\n == f'{self.BASE_URL}{drs_object_uri}/@@download')\n\n # failure cases\n testapp.get(f'/ga4gh/drs/v1/objects/not_a_uri', status=404)\n\n # @@drs case\n drs_object_2 = testapp.get(f'/{drs_object_uri}/@@drs')\n for key in REQUIRED_FIELDS:\n assert key in drs_object_2", "def test_make_request_headers(self, m_requests, m_sleep):\r\n request = testing.DummyRequest({mut.URL_KEY: SAMPLE_URL, \r\n mut.HEADERS_KEY: json.dumps(SAMPLE_REQUEST_HEADERS)})\r\n m_response, response_dict = self.mock_response()\r\n m_requests.get.return_value = m_response\r\n self.assertEqual(response_dict, mut.make_request(request))\r\n m_requests.get.assert_called_with(url=SAMPLE_URL, \r\n headers=SAMPLE_REQUEST_HEADERS)\r\n m_sleep.assert_called_with(mut.SECURITY_SLEEP)" ]
[ "0.6950403", "0.68646777", "0.67196494", "0.66881627", "0.6502818", "0.6358916", "0.63298976", "0.63289493", "0.6266413", "0.61539286", "0.60930276", "0.6060562", "0.60434884", "0.6040505", "0.59982693", "0.5925682", "0.59115666", "0.5896234", "0.5890364", "0.5867373", "0.58642596", "0.58600026", "0.58516437", "0.5833041", "0.5826582", "0.5826068", "0.5822348", "0.5815186", "0.5784977", "0.5760702" ]
0.7400816
0
Tests that downloaded the file is removed when the md5 not match
def test_file_integrity_remove_file_in_case_of_fail(): test_file = open('./testfile.tmp', 'a') test_file.close() test_file_path = os.path.realpath('./testfile.tmp') test_file_md5 = hashlib.md5(open(test_file_path, 'rb').read()).hexdigest() bad_md5 = 'some_noise_%s' % test_file_md5 PackageDownloadHelper.check_file_integrity(test_file_path, bad_md5) assert not os.path.isfile(test_file_path)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _check_final_md5(self, key, file_name):\r\n fp = open(file_name, 'r')\r\n if key.bucket.connection.debug >= 1:\r\n print 'Checking md5 against etag.'\r\n hex_md5 = key.compute_md5(fp)[0]\r\n if hex_md5 != key.etag.strip('\"\\''):\r\n file_name = fp.name\r\n fp.close()\r\n os.unlink(file_name)\r\n raise ResumableDownloadException(\r\n 'File changed during download: md5 signature doesn\\'t match '\r\n 'etag (incorrect downloaded file deleted)',\r\n ResumableTransferDisposition.ABORT)", "def checksum(self, md5_file, file_name):\n try:\n with open(md5_file, 'r') as f:\n md5_file_contents = f.read()\n md5_str = md5_file_contents.split(' ')[0]\n os.remove(md5_file)\n except Exception as e:\n logging.exception('Could not read MD5 file {}. \\\n \\nTry to download the file again'.format(file_name))\n return False\n if not self.check_md5(file_name, md5_str):\n logging.error('Failed in checksum. Download the file again.')\n return False\n return True", "def test_file_integrity_return_error_in_case_of_bad_md5():\n test_file = open('./testfile.tmp', 'a')\n test_file.close()\n\n test_file_path = os.path.realpath('./testfile.tmp')\n test_file_md5 = hashlib.md5(open(test_file_path, 'rb').read()).hexdigest()\n\n bad_md5 = 'some_noise_%s' % test_file_md5\n\n result = PackageDownloadHelper.check_file_integrity(test_file_path, bad_md5)\n\n assert isinstance(result, ApiResponse)", "def test_download_and_unlink(self):\n scrape_category.get_simfile_from_ziv(self.simfile, self.link, self.dest)\n assert os.path.exists(os.path.join(self.dest, \"sim100.zip\"))\n\n scrape_category.unlink_zip(self.simfile, self.dest)\n assert not os.path.exists(os.path.join(self.dest, \"sim100.zip\"))", "def test_unpack_file_url_download_bad_hash(self, tmpdir, data,\n monkeypatch):\n self.prep(tmpdir, data)\n\n # add in previous download (copy simple-2.0 as simple-1.0 so it's wrong\n # hash)\n dest_file = os.path.join(self.download_dir, self.dist_file)\n copy(self.dist_path2, dest_file)\n\n with open(self.dist_path, 'rb') as f:\n dist_path_md5 = hashlib.md5(f.read()).hexdigest()\n with open(dest_file, 'rb') as f:\n dist_path2_md5 = hashlib.md5(f.read()).hexdigest()\n\n assert dist_path_md5 != dist_path2_md5\n\n url = '{}#md5={}'.format(self.dist_url.url, dist_path_md5)\n dist_url = Link(url)\n unpack_file_url(dist_url, self.build_dir,\n download_dir=self.download_dir,\n hashes=Hashes({'md5': [dist_path_md5]}))\n\n # confirm hash is for simple1-1.0\n # the previous bad download has been removed\n with open(dest_file, 'rb') as f:\n assert hashlib.md5(f.read()).hexdigest() == dist_path_md5", "def _check_final_md5(self, key, etag):\r\n if key.bucket.connection.debug >= 1:\r\n print 'Checking md5 against etag.'\r\n if key.md5 != etag.strip('\"\\''):\r\n # Call key.open_read() before attempting to delete the\r\n # (incorrect-content) key, so we perform that request on a\r\n # different HTTP connection. This is neededb because httplib\r\n # will return a \"Response not ready\" error if you try to perform\r\n # a second transaction on the connection.\r\n key.open_read()\r\n key.close()\r\n key.delete()\r\n raise ResumableUploadException(\r\n 'File changed during upload: md5 signature doesn\\'t match etag '\r\n '(incorrect uploaded object deleted)',\r\n ResumableTransferDisposition.ABORT)", "def _clearFile(url):\n md5 = hashlib.md5(url).hexdigest()\n filename = os.path.join(config.WEB_CACHE_DIR, md5)\n if os.path.exists(filename):\n os.remove(filename)", "def download_and_validate_checksum(name, checksum):\n dst = os.path.join(DOWNLOADS_DIR, os.path.basename(name))\n download_file(src=name, dst=dst)\n md5 = hashlib.md5()\n for chunk in chunked_reader(dst):\n md5.update(chunk)\n dl_checksum = md5.digest().hex()\n if dl_checksum != checksum:\n raise ValueError(f\"expected checksum {checksum} but received {dl_checksum}\")\n os.remove(dst)", "def check_md5(filename, stored_md5):\n computed_md5 = _get_file_md5(filename)\n if stored_md5 != computed_md5:\n print (\"MD5 checksum of filename\", filename, \"failed. Expected MD5 was\", stored_md5,\n \"but computed MD5 was\", computed_md5, '\\n',\n \"Please check if the data has been downloaded correctly or if the upstream data has changed.\")", "def test_downloading(self):\n month = '2013-01' # smallest of available datasets\n path = download_data(month)\n self.assertTrue(os.path.isfile(path), msg='File on returned location does not exist')\n os.remove(path)", "def test_download_file_no_sha(token):\n\n # github => repo => release => asset_list => asset => url => download\n\n g_h = github.Github(token, per_page=100)\n repo = g_h.get_repo(TEST_SLUG, lazy=False)\n release = repo.get_release(TEST_TAG)\n asset_list = release.get_assets()\n sha_filename = Template(Arguments.HASH_FILE).safe_substitute({\n 'platform': platform.system().lower()\n })\n\n pass_test = True\n\n for check_asset in asset_list:\n # look through list of assets for uploaded file and sha file\n\n if check_asset.name == sha_filename:\n\n pass_test = False\n\n assert pass_test", "def test_remove_manifest_file(self):\n test_report_date = datetime(year=2018, month=9, day=7)\n self.assertTrue(os.path.isfile(self.test_manifest_path))\n self.ocp_report_downloader._remove_manifest_file(test_report_date)\n self.assertFalse(os.path.isfile(self.test_manifest_path))", "def test_unpack_http_url_bad_downloaded_checksum(mock_unpack_file):\n base_url = 'http://www.example.com/somepackage.tgz'\n contents = b'downloaded'\n download_hash = hashlib.new('sha1', contents)\n link = Link(base_url + '#sha1=' + download_hash.hexdigest())\n\n session = Mock()\n session.get = Mock()\n response = session.get.return_value = MockResponse(contents)\n response.headers = {'content-type': 'application/x-tar'}\n response.url = base_url\n\n download_dir = mkdtemp()\n try:\n downloaded_file = os.path.join(download_dir, 'somepackage.tgz')\n create_file(downloaded_file, 'some contents')\n\n unpack_http_url(\n link,\n 'location',\n download_dir=download_dir,\n session=session,\n hashes=Hashes({'sha1': [download_hash.hexdigest()]})\n )\n\n # despite existence of downloaded file with bad hash, downloaded again\n session.get.assert_called_once_with(\n 'http://www.example.com/somepackage.tgz',\n headers={\"Accept-Encoding\": \"identity\"},\n stream=True,\n )\n # cached file is replaced with newly downloaded file\n with open(downloaded_file) as fh:\n assert fh.read() == 'downloaded'\n\n finally:\n rmtree(download_dir)", "def test_md5sum(self, changes_file):\n for file in changes_file['Files']:\n log.debug('Checking md5sum of %s' % file['name'])\n filename = os.path.join(pylons.config['debexpo.upload.incoming'], file['name'])\n if not os.path.isfile(filename):\n raise OSError(\"Missing file %s in incoming\" % (file['name']))\n sum = md5sum(filename)\n\n if sum != file['md5sum']:\n log.critical('%s != %s' % (sum, file['md5sum']))\n raise OSError(\"MD5 sum mismatch in file %s: %s != %s\" % (file['name'], sum, file['md5sum']))\n\n return True", "def test_unpack_file_url_bad_hash(self, tmpdir, data,\n monkeypatch):\n self.prep(tmpdir, data)\n url = '{}#md5=bogus'.format(self.dist_url.url)\n dist_url = Link(url)\n with pytest.raises(HashMismatch):\n unpack_file_url(dist_url,\n self.build_dir,\n hashes=Hashes({'md5': ['bogus']}))", "def download(url, md5sum, target_dir):\n if not os.path.exists(target_dir): os.makedirs(target_dir)\n filepath = os.path.join(target_dir, url.split(\"/\")[-1])\n if not (os.path.exists(filepath) and md5file(filepath) == md5sum):\n print(\"Downloading %s ...\" % url)\n os.system(\"wget -c \" + url + \" -P \" + target_dir)\n print(\"\\nMD5 Chesksum %s ...\" % filepath)\n if not md5file(filepath) == md5sum:\n raise RuntimeError(\"MD5 checksum failed.\")\n else:\n print(\"File exists, skip downloading. (%s)\" % filepath)\n return filepath", "def md5check(fname, md5fname):\n\tmd5fh = open(md5fname, \"r\")\n\treturn (md5sum(fname) == md5fh.readline())", "def check_md5checksum_in_cache_modified(file_hash: str, cache_path: Path, update: bool) -> bool:\n if cache_path.exists():\n old_md5_checksum_content = Path(cache_path).read_text()\n if old_md5_checksum_content.strip() != file_hash.strip():\n if update:\n save_md5_file(cache_path, file_hash)\n return True\n else:\n if update:\n save_md5_file(cache_path, file_hash)\n return True\n return False", "def check_md5(file1, file2):\r\n with open(file1, \"rb\") as f1:\r\n h1 = hashlib.md5(f1.read()).digest()\r\n with open(file2, \"rb\") as f2:\r\n h2 = hashlib.md5(f2.read()).digest()\r\n return h1 == h2", "def test_check_md5_crit_md5sum_mismatch(self, mock_generate_md5):\n jdata = b'{\"/etc/swift/object.ring.gz\": ' \\\n b'\"6b4f3a0ef3731f18291ecd053ce0d9b6\", ' \\\n b'\"/etc/swift/account.ring.gz\": ' \\\n b'\"93fc4ae496a7343362ebf13988a137e7\", ' \\\n b'\"/etc/swift/container.ring.gz\": ' \\\n b'\"0ea1ec9585ef644ce2b5c5b1dced4128\"}'\n pmock_jdata = PropertyMock(return_value=jdata)\n mock_generate_md5.return_value = 'xxxx'\n with patch('urllib.request.urlopen') as mock_urlopen:\n mock_urlopen.return_value = MagicMock(read=pmock_jdata)\n result = check_md5('.')\n mock_urlopen.assert_called_with('.ringmd5')\n expected_result = [(STATUS_CRIT,\n 'Ringfile /etc/swift/{}.ring.gz '\n 'MD5 sum mismatch'.format(name))\n for name in ('object', 'account', 'container')]\n self.assertEqual(result, expected_result)", "def excludeTested( files ):\n\n cache = getTestedCache()\n\n result = []\n count = 0\n for index in xrange( 0, len( files ) ):\n if cache.has_key( files[ index ] ):\n abspath = os.path.realpath( files[ index ] )\n # Check the new MD5\n if cache[ files[ index ] ] == getMD5( abspath ):\n # The same, no need\n count += 1\n continue\n result.append( files[ index ] )\n return count", "def test_download(self):\n test_file = os.path.join(self._system.get_temporary_path(), \"nusoft.test\")\n self._system.download(\"http://www.github.com\", name=test_file)\n self.assertTrue(os.path.exists(test_file))\n os.remove(test_file)", "def test_file_deleted(self):\n try:\n with get_temp_file() as (fd, name):\n os.unlink(name)\n except Exception as err:\n self.fail('Failed with exception \"{}\"'.format(err))", "def maybe_download(filename, expected_bytes):\n if not os.path.exists(filename):\n filename, _ = urllib.request.urlretrieve(url + filename, filename)\n statinfo = os.stat(filename)\n if statinfo.st_size == expected_bytes:\n print('Found and verified', filename)\n else:\n print(statinfo.st_size)\n raise Exception(\n 'Failed to verify ' + filename + '. Can you get to it with a browser?')\n return filename", "def maybe_download(filename, expected_bytes):\n if not os.path.exists(filename):\n filename, _ = urllib.request.urlretrieve(url + filename, filename)\n statinfo = os.stat(filename)\n if statinfo.st_size == expected_bytes:\n print('Found and verified', filename)\n else:\n print(statinfo.st_size)\n raise Exception(\n 'Failed to verify ' + filename + '. Can you get to it with a browser?')\n return filename", "def _check_md5sum(_setup_str, src_host, src_pfn):\n\n error = PilotErrors()\n\n _cmd = '%suberftp %s \"quote cksm md5sum 0 -1 %s\"' % (_setup_str, src_host, src_pfn)\n estat, coutp = commands.getstatusoutput(_cmd)\n tolog('md5 uberftp done <%s> (%s): %s' % (_cmd, estat, coutp))\n\n if estat != 0:\n check_syserr(estat, coutp)\n if coutp.find('not understood') >= 0:\n tolog('!!WARNING!!2999!! MD5 unsupported by the server')\n return error.ERR_FAILEDMD5, coutp\n try:\n tmp0 = coutp.split('\\n')[-1]\n fmd5usm = tmp0.split()[1]\n # split removes also the trailing \"\\r\" that uberftp returns, no fmd5sum.strip()\n except:\n tolog('!!WARNING!!2999!! Unable to parse MD5')\n fmd5usm = ''\n return 0, fmd5usm", "def maybe_download(filename, url, expected_bytes):\n if not os.path.exists(filename):\n filename, _ = urllib.request.urlretrieve(url + filename, filename)\n statinfo = os.stat(filename)\n if statinfo.st_size == expected_bytes:\n print('Found and verified', filename)\n else:\n print(statinfo.st_size)\n raise Exception(\n 'Failed to verify ' + filename + '. Can you get to it with a browser?')\n return filename", "def maybe_download(filename, url, expected_bytes):\n if not os.path.exists(filename):\n filename, _ = urllib.request.urlretrieve(url + filename, filename)\n statinfo = os.stat(filename)\n if statinfo.st_size == expected_bytes:\n print('Found and verified', filename)\n else:\n print(statinfo.st_size)\n raise Exception(\n 'Failed to verify ' + filename + '. Can you get to it with a browser?')\n return filename", "def maybe_download(filename, url, expected_bytes):\n if not os.path.exists(filename):\n filename, _ = urllib.request.urlretrieve(url + filename, filename)\n statinfo = os.stat(filename)\n if statinfo.st_size == expected_bytes:\n print('Found and verified', filename)\n else:\n print(statinfo.st_size)\n raise Exception(\n 'Failed to verify ' + filename + '. Can you get to it with a browser?')\n return filename", "def __maybeDownload():\n if not os.path.isdir(Download.DATA_ROOT): # 若 data 目录不存在,创建 data 目录\n os.mkdir(Download.DATA_ROOT)\n file_path = os.path.join(Download.DATA_ROOT, Download.FILE_NAME)\n\n if os.path.exists(file_path): # 若已存在该文件\n statinfo = os.stat(file_path)\n if statinfo.st_size == Download.FILE_SIZE: # 若该文件正确,直接返回 file_path\n print('Found and verified %s' % file_path)\n return file_path\n else: # 否则,删除文件重新下载\n os.remove(file_path)\n\n download_url = Download.URL + Download.FILE_NAME\n print('Downloading %s ...' % download_url)\n filename, _ = urlretrieve(download_url, file_path) # 下载数据\n print('Finish downloading')\n\n statinfo = os.stat(filename)\n if statinfo.st_size == Download.FILE_SIZE: # 校验数据是否正确下载\n print('Found and verified %s' % filename)\n else:\n print(statinfo.st_size)\n raise Exception('Failed to verify ' + filename + '. Can you get to it with a browser ?')\n return filename" ]
[ "0.80135876", "0.74356925", "0.72578675", "0.71941775", "0.7100078", "0.6969924", "0.6895048", "0.6885091", "0.68015236", "0.6569086", "0.6435141", "0.638278", "0.63422084", "0.63062805", "0.626479", "0.6257084", "0.62355447", "0.62270766", "0.6220227", "0.6191675", "0.6178103", "0.61623603", "0.6116824", "0.60938364", "0.60938364", "0.60908216", "0.6076418", "0.6076418", "0.6076418", "0.6058132" ]
0.7904508
1
The getter method for the forecast date. If the forecast date is not supplied, it will be set to the current date.
def forecast_date(self): return self._forecast_date
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def forecast_date(self, forecast_date):\n self._forecast_date = forecast_date.strftime(\"%a %b %d\")", "def get_date(self):\n return self.date", "def get_date(self):\n return self.date", "def get_date(self):\n return self.date", "def get_date(self):\n return self.date", "def date(self):\n return self.date_value", "def _date(self) -> datetime:\n return self.__date", "def get_date(self):\n raise Unimplemented()", "def date(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"date\")", "def date(self):\n return self._date", "def date(self):\n return self._date", "def date(self):\n return self._date", "def date(self):\n return self._date", "def getDate(self):\n return self.date", "def date(self) -> Optional[int]:\n return pulumi.get(self, \"date\")", "def date(self):\n # type: () -> date\n return self._date", "def DefenceDate(self, default=None):\n return self.data.get('defense_date', default)", "def Date(self, default=None):\n return self.data.get('date', default)", "def Date(self, default=None):\n return self.data.get('date', default)", "def Date(self, default=None):\n return self.data.get('date', default)", "def settlement_date(self) -> datetime.date:\n return self.__settlement_date", "def founding_date(self) -> datetime:\n return self._founding_date", "def date(self, date):\n self.value = date.strftime(\"%Y-%m-%d\") if date else \"\"", "def get_date(self, datetime):\n return datetime.date()", "def date(self) -> datetime.datetime:\n return self._data['Date'] - datetime.timedelta(0, float(self.exposuretime), 0)", "def __get_settlement_date():\n day_after_tomorrow = datetime.now(timezone.utc).date() + \\\n timedelta(days=2)\n settlement_date = day_after_tomorrow.strftime(\"%Y%m%d\")\n\n return settlement_date", "def get_date(self):\n return datetime.date(\n int(self.kwargs['year']),\n int(self.kwargs['month']),\n int(self.kwargs['day'])\n )", "def evaluated_date(self):\n return self._evaluated_date", "def formalDateToday():\n return dt.date.today().strftime(\"%B %d, %Y\")", "def dep_date(self):\n return self._dep_date" ]
[ "0.7395979", "0.6631649", "0.6631649", "0.6631649", "0.6631649", "0.6575731", "0.6429508", "0.63298404", "0.63246965", "0.6283063", "0.6283063", "0.6283063", "0.627989", "0.62717545", "0.62594414", "0.6252879", "0.6240903", "0.6228745", "0.6228745", "0.6228745", "0.61254066", "0.61244965", "0.61119914", "0.6045755", "0.60262436", "0.59983605", "0.599549", "0.59809536", "0.59382933", "0.5921475" ]
0.8569457
0
The setter method for the forecast date. Every time we need to set the date in an instance of Forecast every, we need to make sure that it will be formatted accordingly.
def forecast_date(self, forecast_date): self._forecast_date = forecast_date.strftime("%a %b %d")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_date(self, date):\n self.date = date", "def date(self, date):\n self.value = date.strftime(\"%Y-%m-%d\") if date else \"\"", "def set_date(self, date):\n self.date = date\n return", "def set_date(self, date):\n self.data['date'] = date", "def _date(self, _date):\n\n self.__date = _date", "def _date(self, _date):\n\n self.__date = _date", "def set_datetime(self, date):\n self.date = date", "def date(self, value):\n self.date_value = value", "def set_date(self, date):\n self.date = self.date_to_local(date)\n # ephem deals only in UTC\n self.site.date = ephem.Date(self.date_to_utc(self.date))", "def date(self, date):\n self._date = date", "def forecast_date(self):\n return self._forecast_date", "def date(self, date):\n\n self._date = date", "def date(self, date):\n\n self._date = date", "def date(self, date):\n\n self._date = date", "def date(self, date):\n\n self._date = date", "def date(self, date):\n\n self._date = date", "def update(self, date):\r\n self.date = date", "def set_to_date(self):\n self.set_value_into_input_field(self.set_to_date_locator, self.get_current_date())", "def settlement_date(self, value):\n if value:\n self._settlement_date = (\n parse(value).date() if isinstance(value, type_check) else value\n )", "def date(self, new_date):\n self._date.date = new_date", "def dep_date(self, dep_date):\n\n self._dep_date = dep_date", "def setDateAsString(self, *args):\n return _libsbml.Date_setDateAsString(self, *args)", "def set_end_date(self, date):\n pass", "def date_format(self, date_format):\n\n self._date_format = date_format", "def set_birthdate(self, date):\n\n if validate_date_format(self.birt):\n self.birt = date\n else:\n raise ValueError('Invalid date!')", "def set_from_date(self, date):\n self.set_value_into_input_field(self.set_from_date_locator, date)", "def setDate(self, p_int, p_int_1, p_int_2): # real signature unknown; restored from __doc__\r\n return False", "def setSelectedDate(self, data):\n # print('setSelectedDate ', data)\n self.currentDate = data", "def date(self, date_):\n # type: (date) -> None\n\n if date_ is not None:\n if not isinstance(date_, date):\n raise TypeError(\"Invalid type for `date`, type has to be `date`\")\n\n self._date = date_", "def _set_dt_string(self):\n if self.period is Period.WEEKLY:\n self._set_date_weekly()\n try:\n self._dt_string = self._get_datetime_or_error().strftime(self._dt_format)\n except ValueError:\n self._dt_string = \"\"\n raise InvalidDateError(detail={\n \"message\": \"Invalid Date Provided\",\n \"period\": self.period.value,\n \"date\": self._given_date\n })" ]
[ "0.71004164", "0.70625865", "0.69669646", "0.69329244", "0.6891429", "0.6891429", "0.6873222", "0.6835358", "0.6825212", "0.6773724", "0.67453426", "0.6618986", "0.6618986", "0.6618986", "0.6618986", "0.6618986", "0.6599774", "0.6594992", "0.6584389", "0.6485541", "0.63232476", "0.6226157", "0.6212241", "0.6157681", "0.6075714", "0.6073261", "0.5991009", "0.5969999", "0.59670043", "0.59598386" ]
0.81627464
0
The getter method for the day's current wind levels.
def wind(self): return self._wind
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_wind_values(self):\n return (\n int(self.data[2]), # dir\n float(self.data[3]) / 10, # gust\n float(self.data[4]) / 10, # avg\n float(self.data[5]) / 10, # chill\n )", "def get_current_water_level(self):\n \n url = f'http://waterservices.usgs.gov/nwis/iv/?format=json&sites={self.site_number}&parameterCd=00060,00065&siteStatus=all'\n\n response = requests.request(\"GET\", url)\n data = json.loads(response.text)\n \n #parses json response to get only value of current water level for given river\n current_water_level = data['value']['timeSeries'][0]['values'][0]['value'][0]['value']\n \n return current_water_level", "def get_levels(self):\n return self.levels[self.game]", "def _current_wind(self):\n query = \"SELECT LAST(value) FROM wind_direction WHERE board='wunderground'\"\n message = False\n try:\n rs = self.server.query(query)\n message = direction(list(rs.get_points(measurement='wind_direction'))[0]['last'])\n except ValueError:\n raise Exception(\"Unable to connect to the dataserver\")\n return message", "def getSupportResistanceLevelsTimeSeries(self):\n return self.levels_ts", "def wind_direction(self):\n names = ['anc_wind_direction']\n return self.sensor.get_with_fallback('wind_direction', names)", "def wind_meta(self):\n return self.data.wind_meta", "def get_data(self):\n return DataGatherer().get_wind_data()", "def get_lux(self):\n\n svc = \"urn:micasaverde-com:serviceId:LightSensor1\"\n if not svc in self.services:\n raise RuntimeError, \"Device doesn't support the service\"\n\n return self.get_variable(svc, \"CurrentLevel\")", "def wind_speed(self):\n names = ['anc_mean_wind_speed']\n return self.sensor.get_with_fallback('wind_speed', names)", "def getLevel(self):\n return self.level", "def get_water_level(self):\n return self.water_level", "def fetch_levels(self):\n rootLogger.info(\"[*] Fetching water levels...\")\n\n headers = {\"User-Agent\": \"Edwards Aquifer Bot - Follow on Twitter: @edwardsaquabot\"}\n\n response = requests.get(self.url, headers=headers, verify=True, timeout=60)\n if response.status_code != 200:\n rootLogger.error(\n \"HTTP status code: {} -- unsuccessfully retrieved: {}\".format(response.status_code, self.url)\n )\n return\n\n # Use beautiful soup to grab the levels...works, maybe not the best though.\n soup = BeautifulSoup(response.text, \"html.parser\")\n table = soup.find_all(\"table\")[1]\n\n # Today's Reading.\n column = table.find_all(\"td\")[0]\n today_water_level = column.find(\"span\").contents[0].strip()\n\n # Yesterday's Reading.\n column = table.find_all(\"td\")[2]\n yesterday_water_level = column.find(\"span\").contents[0].strip()\n\n # 10 Day Average Reading.\n column = table.find_all(\"td\")[4]\n ten_day_average = column.find(\"span\").contents[0].strip()\n\n return today_water_level, yesterday_water_level, ten_day_average", "def wind_speed(self):\r\n return self._yesterdays_weather.get_average_wind_speed()", "def getLevels():", "def wind_time_index(self):\n return self.data.wind_time_index", "def wind_direction(self):\n return self.flow_field.wind_direction", "def wind_veer(self):\n return self.flow_field.wind_veer", "def getSupportResistanceLevels(self):\n return self.levels", "def get_weather(self):\n return self.__weather", "def getToWinding(self):\n return self._ToWinding", "def currentLevel( self ):\n assert isinstance( self._env, Env )\n assert isinstance( self._steps, list )\n\n return self._env.level( )", "def day(self):\n data = await self.get_data(LIGHT)\n return data['day']", "def levels(ohlc_day):\r\n high = round(ohlc_day[\"high\"][-1],2)\r\n low = round(ohlc_day[\"low\"][-1],2)\r\n close = round(ohlc_day[\"close\"][-1],2)\r\n pivot = round((high + low + close)/3,2)\r\n r1 = round((2*pivot - low),2)\r\n r2 = round((pivot + (high - low)),2)\r\n r3 = round((high + 2*(pivot - low)),2)\r\n s1 = round((2*pivot - high),2)\r\n s2 = round((pivot - (high - low)),2)\r\n s3 = round((low - 2*(high - pivot)),2)\r\n return (pivot,r1,r2,r3,s1,s2,s3)", "def get_wind():\n return get_next_random(wind, WIND_MAX, WIND_MIN, WIND_DELTA)", "def get_vwind(self):\n return self.read_register(4100, 1, 3)", "def wind_time_index(self):\n if self._wind_time_index is None:\n with Resource(self.wind_fpath) as res:\n self._wind_time_index = res.time_index\n return self._wind_time_index", "def getLevel(self):\n return self._level", "def get_level(self, level):\n return", "def levels(ohlc_day):\n high = round(ohlc_day[\"high\"][-1],2)\n low = round(ohlc_day[\"low\"][-1],2)\n close = round(ohlc_day[\"close\"][-1],2)\n pivot = round((high + low + close)/3,2)\n r1 = round((2*pivot - low),2)\n r2 = round((pivot + (high - low)),2)\n r3 = round((high + 2*(pivot - low)),2)\n s1 = round((2*pivot - high),2)\n s2 = round((pivot - (high - low)),2)\n s3 = round((low - 2*(high - pivot)),2)\n return (pivot,r1,r2,r3,s1,s2,s3)" ]
[ "0.66269565", "0.6551266", "0.634967", "0.6340421", "0.62732714", "0.6218584", "0.62073404", "0.6126908", "0.6100952", "0.596197", "0.59491724", "0.5942488", "0.5918629", "0.59090155", "0.59058", "0.58645844", "0.5827256", "0.5817197", "0.5807483", "0.58023006", "0.578426", "0.57733804", "0.5742081", "0.5738013", "0.5728951", "0.5725743", "0.5707825", "0.5699119", "0.56578857", "0.5654335" ]
0.7064748
0
implement the calculation W_battery = TOW W_payload W_empty
def compute(self, inputs, outputs): outputs['TOW'] = inputs['W_battery'] + inputs['W_payload'] + inputs['W_empty']
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _battery_cb(self, msg):\n # self.battery_voltages[msg.header.seq %\n # len(self.battery_voltages)] = msg.voltage\n self.battery_voltages[msg.header.seq % len(\n self.battery_voltages)] = msg.percentage * 100.\n # delta = self.INIT_VOLTAGE - self.MINIMUM_VOLTAGE\n # self.low_battery = (np.mean(self.battery_voltages) <=\n # (self.MINIMUM_VOLTAGE +\n # (0.1 * delta))) and (self._current_wp != 0)\n self.low_battery = (np.mean(self.battery_voltages) <=\n self.MINIMUM_VOLTAGE * 1.5) and (self._current_wp\n != 0)", "def get_battery(self) -> int:\r\n return self.state['bat']", "def getBattery(self):\n raise NotImplementedError", "def get_battery_information(self):\n BATTERY_DEVTYPE_GAMEPAD = 0x00\n BATTERY_DEVTYPE_HEADSET = 0x01\n # Set up function argument types and return type\n XInputGetBatteryInformation = xinput.XInputGetBatteryInformation\n XInputGetBatteryInformation.argtypes = [ctypes.c_uint, ctypes.c_ubyte, ctypes.POINTER(XINPUT_BATTERY_INFORMATION)]\n XInputGetBatteryInformation.restype = ctypes.c_uint \n\n battery = XINPUT_BATTERY_INFORMATION(0,0)\n XInputGetBatteryInformation(self.device_number, BATTERY_DEVTYPE_GAMEPAD, ctypes.byref(battery))\n\n #define BATTERY_TYPE_DISCONNECTED 0x00\n #define BATTERY_TYPE_WIRED 0x01\n #define BATTERY_TYPE_ALKALINE 0x02\n #define BATTERY_TYPE_NIMH 0x03\n #define BATTERY_TYPE_UNKNOWN 0xFF\n #define BATTERY_LEVEL_EMPTY 0x00\n #define BATTERY_LEVEL_LOW 0x01\n #define BATTERY_LEVEL_MEDIUM 0x02\n #define BATTERY_LEVEL_FULL 0x03\n battery_type = \"Unknown\" if battery.BatteryType == 0xFF \\\n else [\"Disconnected\", \"Wired\", \"Alkaline\", \"Nimh\"][battery.BatteryType]\n level = [\"Empty\", \"Low\", \"Medium\", \"Full\"][battery.BatteryLevel]\n return battery_type, level", "def describe_battery(self):\n print(f\"The power of battery is - {self.battery_size}-kWh\")", "def describe_battery(self):\n self.battery.describe_battery()", "def upgrade_battery(self):\n if self.battery_size != 100:\n self.battery_size = 100", "def battery(self):\n return self._battery", "def upgrade_battery(self):\n if self.battery_size <= 75:\n self.battery_size = 100", "def get_battery(self) -> float:\r\n resp = self.send_command(self._GET_BATTERY)\r\n try:\r\n return float(resp)\r\n except ValueError as e:\r\n print(f\"Error parsing battery voltage '{resp}':\", e)\r\n return 0.0", "def describe_battery(self):\n print(\"Ten samochod ma akumulator o pojemnosci \"\n + str(self.battery_size) + \" kWh.\")", "def manage_battery(agent, total_power, request_inject_power, power_to_battery, time_scale):\n # Dismantle tuples and do some little computation\n\n # power demand of the agent\n rated_active_power = agent.power_rating\n rated_reactive_power = reactive_power(power_rating=rated_active_power, power_factor=agent.power_factor)\n\n # power demand including the incoming power\n total_active_power, total_reactive_power = total_power\n total_apparent_power = np.abs(np.complex(total_active_power, total_reactive_power))\n if total_apparent_power > 0:\n total_power_factor = total_active_power / total_apparent_power\n else:\n total_power_factor = 1\n demand_power = np.complex(total_active_power, total_reactive_power)\n\n # power that the agent should inject into the grid\n request_inject_active_power, request_inject_reactive_power = request_inject_power\n request_inject_apparent_power = np.abs(np.complex(request_inject_active_power, request_inject_reactive_power))\n # power from the energy sources to the battery\n pv_power_to_battery, wind_power_to_battery = power_to_battery\n # power from the grid into the battery (related to the incoming power)\n\n # actual injected power taken from the battery\n inject_active_power = 0\n inject_reactive_power = 0\n\n inject_power = 0\n\n total_active_power_from_battery = 0\n total_reactive_power_from_battery = 0\n if agent.incoming_power > 0:\n incoming_power = agent.incoming_power - agent.incoming_power * 0.05\n else:\n incoming_power = 0\n power_surplus = incoming_power - rated_active_power\n\n # If the battery is installed / working\n if agent.battery.is_active():\n # if the incoming power is less than the actual demand\n if power_surplus <= 0:\n desired_active_power_from_battery = rated_active_power * agent.battery.contribution_active\n desired_reactive_power_from_battery = rated_reactive_power * agent.battery.contribution_reactive\n desired_power_from_battery = np.abs(\n np.complex(desired_active_power_from_battery, 1e-2 * desired_reactive_power_from_battery)) # check\n # this 1e-2\n if agent.battery.erogate(desired_power_from_battery, time_scale):\n total_active_power_from_battery = desired_active_power_from_battery\n total_reactive_power_from_battery = desired_reactive_power_from_battery\n\n total_active_power -= total_active_power_from_battery\n total_reactive_power -= total_reactive_power_from_battery\n # If there is no incoming power I try to inject what I'm asked to inject, otherwise I just bypass the injecting\n # thing\n if incoming_power <= 0:\n if agent.battery.erogate(request_inject_apparent_power, time_scale):\n inject_active_power = request_inject_active_power\n inject_reactive_power = request_inject_reactive_power\n\n total_active_power_from_battery += request_inject_active_power\n total_reactive_power_from_battery += request_inject_reactive_power\n\n # manage excess incoming power\n current_surplus_to_battery = 0\n if power_surplus > 0:\n current_surplus_to_battery = power_surplus / agent.battery.voltage\n current_pv_to_battery = pv_power_to_battery * agent.pv_panel.battery_coupling_efficiency / agent.battery.voltage\n current_wind_to_battery = wind_power_to_battery * agent.wind_generator.battery_coupling_efficiency / \\\n agent.battery.voltage\n energy_left = agent.battery.charge(\n current=current_pv_to_battery + current_wind_to_battery + current_surplus_to_battery, time_scale=time_scale)\n\n # If the battery is full, I just send the excess to the water tank\n if energy_left > 0:\n agent.water_tank.charge(energy_left / time_scale, time_scale)\n\n if np.abs(np.complex(total_active_power, total_reactive_power)) != 0:\n total_power_factor = total_active_power / np.abs(np.complex(total_active_power, total_reactive_power))\n demand_power = np.complex(total_active_power, total_reactive_power)\n inject_power = np.complex(inject_active_power, inject_reactive_power)\n else:\n total_power_factor = 1\n demand_power = np.complex(0, 0)\n inject_power = np.complex(inject_active_power, inject_reactive_power)\n power_from_battery = np.complex(total_active_power_from_battery, total_reactive_power_from_battery)\n return total_active_power, total_power_factor, demand_power, inject_power, power_from_battery", "def requestBattery(self) -> None:\n self._protocol.write_line(CMD_BATTERY)", "def getBatteryCharge(self, board=0):\n return self.callModule('butia', board, 0, 'get_volt')", "def testSendBattery(self):\n self.v.message_factory.gopro_get_request_encode.return_value = 7\n self.mgr.sendGoProRequest(mavutil.mavlink.GOPRO_COMMAND_BATTERY)\n\n self.v.message_factory.gopro_get_request_encode.assert_called_with(0, mavutil.mavlink.MAV_COMP_ID_GIMBAL,\n mavutil.mavlink.GOPRO_COMMAND_BATTERY)\n self.mgr.queueMsg.assert_called_with(7)", "def battery(self):\n return self._state['batteryPercent']", "def state(self):\n return self.roller.battery", "def get_battery_state(self):\n summary = \" \".join(self.get_summary().split())\n pattern = '\\$.... (.). .*? .*? .*? .*? .*? . .*? .*? . . . .*?'\n state = re.findall(pattern,summary).pop()\n if state == 'f':\n msg = 'OFF'\n elif state == 'd':\n msg = 'DISCHARGING.'\n elif state == 'c':\n msg = 'CHARGING.'\n elif state == 'b':\n msg = 'BALANCING.'\n return state,msg", "def battery_level(self):\n return self.battery", "def describe_battery(self):\n print(\"This car has a \"+str(self.battery_size)+\"-KWh battery.\")", "def describe_battery(self):\r\n\t\tprint(\"This car has a \" + str(self.battery_size) + \"-kWh battery.\")", "def test_water_regulation(self):\n\n for action in self.controller.actions.values():\n for water_level in range(90, 110, 2):\n\n # measure water level\n self.controller.sensor.measure = MagicMock(return_value=water_level)\n\n # get the state of the pump\n self.controller.pump.get_state = \\\n MagicMock(return_value=self.decider.decide(water_level, action, \\\n self.controller.actions)) \\\n\n self.controller.tick()", "def describe_battery(self):\n print(f'This car has a {self.battery_size}-kilowatt-hour battery.')", "def _publish_battery(self):\n # only publish if we have a subscriber\n if self._battery_pub.get_num_connections() == 0:\n return\n\n battery = BatteryState()\n battery.header.stamp = rospy.Time.now()\n battery.voltage = self._cozmo.battery_voltage\n battery.present = True\n if self._cozmo.is_on_charger: # is_charging always return False\n battery.power_supply_status = BatteryState.POWER_SUPPLY_STATUS_CHARGING\n else:\n battery.power_supply_status = BatteryState.POWER_SUPPLY_STATUS_NOT_CHARGING\n self._battery_pub.publish(battery)", "def check_ec500_general_battery_current(item, params, info):\n state = 3\n infotext = \"unknown_value\"\n index = 0\n perfdata = []\n ec500_general_battery_current = None\n try:\n #print info\n for line in info:\n index= index + 1\n ec500_general_battery_current = line[0]\n #print rec_share_value \n try:\n ec500_general_battery_current = float(ec500_general_battery_current)\n except Exception,e:\n ec500_general_battery_current = line[0].replace(' ','@')\n state = 0\n perfdata.append((\"ec500_general_battery_%d_current\" %index,ec500_general_battery_current))\n infotext = \"ec500_general_battery_current=%s\" % ec500_general_battery_current\n except Exception,e:\n infotext = \"unknown_value\"\n return (state,infotext,perfdata)", "def calc_power(self) -> (Power, None):\n amp: NumType = 0\n amp_unit: str = ''\n volt: NumType = 0\n volt_unit: str = ''\n ohm: ComType = complex(0)\n ohm_unit: str = ''\n\n if self._amp_exists and self._volt_exists:\n if hasattr(self._obj1, 'amps'):\n amp, amp_unit = self._obj1.amps, self._obj1.amp_unit\n elif hasattr(self._obj1, 'volts'):\n volt, volt_unit = self._obj1.volts, self._obj1.volt_unit\n if hasattr(self._obj2, 'amps'):\n amp, amp_unit = self._obj2.amps, self._obj2.amp_unit\n elif hasattr(self._obj2, 'volts'):\n volt, volt_unit = self._obj2.volts, self._obj2.volt_unit\n pwr = amp * volt\n pwr_unit: str = f'{volt_unit}*{amp_unit}'\n\n elif self._amp_exists and self._ohm_exists:\n if hasattr(self._obj1, 'amps'):\n amp, amp_unit = self._obj1.amps, self._obj1.amp_unit\n elif hasattr(self._obj1, 'ohm'):\n ohm, ohm_unit = self._obj1.ohm, self._obj1.ohm_unit\n if hasattr(self._obj2, 'amps'):\n amp, amp_unit = self._obj2.amps, self._obj2.amp_unit\n elif hasattr(self._obj2, 'ohm'):\n ohm, ohm_unit = self._obj2.ohm, self._obj2.ohm_unit\n pwr = amp**2 * ohm\n pwr_unit: str = f'{amp_unit}^2*{ohm_unit}'\n\n elif self._volt_exists and self._ohm_exists:\n if hasattr(self._obj1, 'ohm'):\n ohm, ohm_unit = self._obj1.ohm, self._obj1.ohm_unit\n elif hasattr(self._obj1, 'volts'):\n volt, volt_unit = self._obj1.volts, self._obj1.volt_unit\n if hasattr(self._obj2, 'ohm'):\n ohm, ohm_unit = self._obj2.ohm, self._obj2.ohm_unit\n elif hasattr(self._obj2, 'volts'):\n volt, volt_unit = self._obj2.volts, self._obj2.volt_unit\n pwr = volt**2 / ohm\n pwr_unit: str = f'{volt_unit}^2/{ohm_unit}'\n\n else:\n return None\n\n return Power(pwr, pwr_unit, self._obj1.frequency, self._obj1.freq_unit)", "def describe_battery(self):\n print(f\"This car has a {self.battery_size}-KWh battery.\")", "def getWatts(self):\n return self.json_state.get(\"charging\").get(\"watt_power\")", "def describe_battery(self):\n print(f\"This car has a {self.battery_size}-kWh battery.\")", "def describe_battery(self):\n print(f\"This car has a {self.battery_size}-kWh battery.\")" ]
[ "0.6269014", "0.59985936", "0.5943292", "0.5867068", "0.58323497", "0.5787141", "0.5725671", "0.5711988", "0.5711663", "0.56736314", "0.5655628", "0.5649336", "0.56464905", "0.56165636", "0.5588463", "0.5568272", "0.5558334", "0.5558226", "0.5557175", "0.5550101", "0.55433625", "0.5541492", "0.5520296", "0.5516366", "0.5515665", "0.5500875", "0.54997694", "0.54946434", "0.5493273", "0.5493273" ]
0.7274236
0
Evaluate if two hypercubes cross each other.
def hypercubes_overlap(hypercube1, hypercube2): if not isinstance(hypercube1, Volume) or \ not isinstance(hypercube2, Volume): raise TypeError() lowercorner1, uppercorner1 = hypercube1.get_corners() lowercorner2, uppercorner2 = hypercube2.get_corners() nb_dims = len(uppercorner1) for i in range(nb_dims): if not uppercorner1[i] > lowercorner2[i] or \ not uppercorner2[i] > lowercorner1[i]: return False return True
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def cross(self, other):\n return self.scalar(other) == 0", "def crosses(self, other): # -> bool:\n ...", "def cross(series1: Sequence, series2: Sequence) -> bool:\n return crossover(series1, series2) or crossover(series2, series1)", "def test_cross(self):\n self.assertEqual(solution.cross(solution.ROWS, solution.COLS), self.boxes)", "def crossover(x, y):\n return x[-1] > y[-1] and x[-2] < y[-2]", "def crossunder(x, y):\n return x[-1] < y[-1] and x[-2] > y[-2]", "def cross(a,b):\n \n return [ a[1]*b[2] - a[2]*b[1],\n a[2]*b[0] - a[0]*b[2],\n a[0]*b[1] - a[1]*b[0],\n 1.0 ]", "def cross(a, b):\n #return np.cross(a,b)\n\n return vector(a[1] * b[2] - a[2] * b[1],\n a[2] * b[0] - a[0] * b[2],\n a[0] * b[1] - a[1] * b[0])", "def cross(a, b):\n c1 = a[1]*b[2] - a[2]*b[1]\n c2 = a[2]*b[0] - a[0]*b[2]\n c3 = a[0]*b[1] - a[1]*b[0]\n return sp.array([c1,c2,c3])", "def test_cross():\n assert_equal(cross(Vector(1, 0, 0), Vector(0, 1, 0)), Vector(0, 0, 1))\n assert_equal(cross(Vector(1, 3, 2), Vector(-1, 1, 0)), Vector(-2, -2, 4))", "def cross(v1, v2):\n return np.cross(v1, v2)", "def cross(a, b):\n return np.array([a[1]*b[2] - a[2]*b[1],\n a[2]*b[0] - a[0]*b[2],\n a[0]*b[1] - a[1]*b[0]])", "def cross_product(p0,p1,p2):\n\treturn (((p1[0]-p0[0])*(p2[1]-p0[1]))-((p2[0]-p0[0])*(p1[1]-p0[1])))", "def is_same_side(p1, p2, a, b):\n vector_ab = [y - x for x, y in zip(a, b)]\n vector_ap1 = [y - x for x, y in zip(a, p1)]\n vector_ap2 = [y - x for x, y in zip(a, p2)]\n cross_vab_ap1 = vector_ab[0] * vector_ap1[1] - vector_ab[1] * vector_ap1[0]\n cross_vab_ap2 = vector_ab[0] * vector_ap2[1] - vector_ab[1] * vector_ap2[0]\n return (cross_vab_ap1 * cross_vab_ap2) >= 0", "def cross_product(a, b):\n return (a[1]*b[2] - a[2]*b[0],\n a[2]*b[0] - a[0]*b[2],\n a[0]*b[1] - a[1]*b[0])", "def crosses(a, b, **kwargs):\n return lib.crosses(a, b, **kwargs)", "def cross_(vec1, vec2, result):\n a1, a2, a3 = double(vec1[0]), double(vec1[1]), double(vec1[2])\n b1, b2, b3 = double(vec2[0]), double(vec2[1]), double(vec2[2])\n result[0] = a2 * b3 - a3 * b2\n result[1] = a3 * b1 - a1 * b3\n result[2] = a1 * b2 - a2 * b1\n return result", "def cross(vec1, vec2):\n result = np.zeros(3)\n return cross_(vec1, vec2, result)", "def __ge__(self, other):\n return self.x ** 2 + self.y ** 2 >= other.x ** 2 + other.y ** 2", "def cross_product(a, b):\n a1, a2, a3 = a\n b1, b2, b3 = b\n return (a2 * b3 - a3 * b2, a3 * b1 - a1 * b3, a1 * b2 - a2 * b1)", "def cross(self, other):\n ox, oy = other\n return self[0] * oy - self[1] * ox", "def __le__(self, other):\n return self.x ** 2 + self.y ** 2 <= other.x ** 2 + other.y ** 2", "def test_cross_v3(self):\n\n vec1 = Vec3(1, 0, 0)\n vec2 = Vec3(0, 1, 0)\n cross = vec1.cross(vec2)\n\n expected = Vec3(0, 0, 1)\n\n self.assertEqual(cross, expected)", "def collinear(a:tuple, b:tuple, c:tuple)->bool:\n return ((b[1] - c[1]) * (a[0] - b[0])) == ((a[1] - b[1]) * (b[0] - c[0]))", "def cross(self, other):\n \n return self.x * other[1] - self.y * other[0]", "def cross(self, other):\n return self.x * other.y - self.y * other.x", "def ucross(a, b):\n ev = a / np.linalg.norm(a)\n return np.cross(ev, b)", "def cross(self, other):\n return self.x*other[1] - self.y*other[0]", "def crossing(self, *args):\n return self.phy2abs.crossing(*args)", "def crossproduct(first, other=FreeCAD.Vector(0,0,1)):\n if isinstance(first,FreeCAD.Vector) and isinstance(other,FreeCAD.Vector):\n return FreeCAD.Vector(first.y*other.z - first.z*other.y, first.z*other.x - first.x*other.z, first.x*other.y - first.y*other.x)" ]
[ "0.72744024", "0.65915084", "0.6428907", "0.6256766", "0.6243645", "0.62124735", "0.6199499", "0.6097191", "0.6094316", "0.60560596", "0.6046024", "0.60402244", "0.6022108", "0.59828514", "0.59746", "0.5966605", "0.5957315", "0.5934511", "0.5931654", "0.5926318", "0.59211856", "0.5886853", "0.58751625", "0.5871814", "0.5864656", "0.5855752", "0.5834682", "0.5816603", "0.58136886", "0.58136797" ]
0.66413707
1
Return the number of small arrays in big array in all dimensions as a shape.
def get_blocks_shape(big_array, small_array): return tuple([int(b/s) for b, s in zip(big_array, small_array)])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def ndims(x):\n return len(x.get_shape())", "def estimate_size(shape):\n total_bytes = reduce(np.multiply, shape) * 8\n return total_bytes / 1E6", "def size(self):\n return int(misc.intprod(self.shape))", "def dim(self) -> int:", "def find_largest_shape(arrays):\r\n out = np.array([0])\r\n for array in arrays:\r\n out = out * np.zeros_like(array)\r\n return out.shape", "def dims(x):\n return len(x.shape)", "def size(self):\n\t\treturn self.dims", "def size(self) -> int:\n return int(np.multiply(*self.shape))", "def matrix_shape(matrix_sub):\n\n size = []\n try:\n size.append(len(matrix_sub))\n size.append(len(matrix_sub[0]))\n size.append(len(matrix_sub[0][0]))\n return size\n except Exception as ex:\n return size", "def sizeof(shape, dtype=\"uint8\"):\n itemsize = numpy.dtype(dtype).itemsize\n cnt = 1\n if \"__len__\" in dir(shape):\n for dim in shape:\n cnt *= dim\n else:\n cnt = int(shape)\n return cnt * itemsize", "def size(self)->int:\n\n return np.prod([axes if is_integer(axes) else len(axes) for axes in self._dim_axes])", "def count_dims(da):\n return len(da.dims)", "def array_dimensions(array):\n height = len(array)\n width = len(array[0])\n\n return width, height", "def num_lines(dim: int, size: int) -> int:\n\n count = 0\n for i in range(1, dim + 1):\n count += comb(dim, i, True) * (size ** (dim - i)) * (2 ** (i - 1)) \n return count", "def num_elements(shape):\n return 1 if shape is None else int(np.prod(shape))", "def size(self):\n return reduce(mul, self.shape, 1)", "def ndarray_size(self) -> int:\n pass", "def batch_len(batch):\n flatlist, _ = tree_util.tree_flatten(batch)\n if len(flatlist) < 1:\n return 0\n b = flatlist[0].shape[0]\n assert all(\n arr.shape[0] == b for arr in flatlist if th.is_tensor(arr)\n ), \"Not all arrays have same batchsize!\"\n return b", "def flattened_size(x):\n return numel_from_size(x.size()[1:])", "def _ExtractInputShapes(inputs):\n if context.executing_eagerly():\n return array_ops.shape_n(inputs)\n sizes = []\n fully_known = True\n for x in inputs:\n input_shape = array_ops.shape(x)\n if not isinstance(input_shape,\n tensor.Tensor) or input_shape.op.type != \"Const\":\n fully_known = False\n break\n sizes.append(input_shape)\n\n if fully_known:\n return sizes\n else:\n return array_ops.shape_n(inputs)", "def n_dims(self):\n return len(self.dimensions)", "def num(an_array):\n return an_array.size", "def _num_samples(x: npt.ArrayLike) -> int:\n if not hasattr(x, \"__len__\") and not hasattr(x, \"shape\"):\n if hasattr(x, \"__array__\"):\n x = np.asarray(x)\n else:\n raise TypeError(\"Expected sequence or array-like, got %s\" % type(x))\n if hasattr(x, \"shape\"):\n if len(x.shape) == 0:\n raise TypeError(\"Singleton array %r cannot be considered\" \" a valid collection.\" % x)\n # Check that shape is returning an integer or default to len\n # Dask dataframes may not return numeric shape[0] value\n if isinstance(x.shape[0], numbers.Integral):\n return x.shape[0]\n else:\n return len(x)\n else:\n return len(x)", "def num_dims(self):\n return self.h5['{}/{}'.format(SETTINGS, N_DIMS_STR)][()]", "def size(self):\n return numpy.prod(self.shape)", "def ndarray_size(data, dtype=\"int32\"):\n return _make.ndarray_size(data, dtype)", "def ndim(a):\n if isinstance(a, np.ndarray):\n return a.ndim\n else:\n return K.ndim(a)", "def size(self, batch):\n x,y,m = batch \n return sum([mm.sum() for mm in m])", "def getNbin(self):\n return self.shape(squeeze=False)[3]", "def dim(self) -> int:\n pass" ]
[ "0.66448617", "0.66011816", "0.6496022", "0.649422", "0.6473923", "0.6465132", "0.6461578", "0.6414655", "0.64061123", "0.63932633", "0.6376898", "0.6368664", "0.63096297", "0.6296904", "0.6292392", "0.62549186", "0.62271947", "0.62253815", "0.6215773", "0.6177364", "0.61439663", "0.6137378", "0.61146265", "0.6084744", "0.6076694", "0.60366", "0.60191935", "0.60153544", "0.6005665", "0.59863114" ]
0.70311415
0
Returns list of output files that are crossing buffer at buffer_index.
def get_crossed_outfiles(buffer_index, buffers, outfiles): crossing = list() buffer_of_interest = buffers[buffer_index] for outfile in outfiles.values(): if hypercubes_overlap(buffer_of_interest, outfile): crossing.append(outfile) return crossing
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def output_files(self):\n output_files = []\n for split in self.split_files:\n output_files.extend(split.filepaths)\n return output_files", "def whichBuffers(self, paramIndex):\n return paramIndex % self.inmod.outdim, paramIndex // self.inmod.outdim", "def buffers_with_matches(self):\n _set = set(self.misc.buffers()) & set(self.matches.keys())\n _set.add(self.curr_buf.number)\n return list(_set)", "def getBufferChildren(self, buffertop):\n children = [v for k, v in self.GS_arcs if k == buffertop]\n return children", "def list_output_files(self):\r\n fname = self.__get_output_filename()\r\n return [fname] if fname else []", "def get_list_of_comitted_files():\n files = []\n output = []\n try:\n output = subprocess.check_output(['git','diff-index', '--name-status', '--cached','HEAD']\n ).decode(\"utf-8\")\n except subprocess.CalledProcessError:\n print(\"Error diff files get: trace %s\" % subprocess.CalledProcessError)\n return files\n\n for result in output.split(\"\\n\"):\n logging.info(result)\n if result != '':\n match = modified.match(result)\n if match:\n files.append(match.group('name'))\n\n return files", "def getFilesAtStamp(self, timestamp):\n\t\tout = []\n\t\tfor stream_name in self.stamps_by_stream.keys():\n\t\t\tts_index = bisect.bisect_right(self.stamps_by_stream[stream_name], timestamp)-1\n\t\t\tif ts_index < 0:\n\t\t\t\tcontinue\n\t\t\ttuple_ts = self.streams[stream_name].keys()\n\t\t\ttuple_ts.sort()\n\t\t\tout.append(self.streams[stream_name][tuple_ts[ts_index]])\n\t\treturn out", "def get_merged_buffers(ptr):\n\n\thdata = weechat.hdata_get(\"buffer\")\n\tbuffers = weechat.hdata_get_list(hdata, \"gui_buffers\")\n\tbuffer = weechat.hdata_search(hdata, buffers, \"${buffer.number} == %i\" % weechat.hdata_integer(hdata, ptr, \"number\"), 1)\n\tnbuffer = weechat.hdata_move(hdata, buffer, 1)\n\n\tret = []\n\twhile buffer:\n\t\tret.append(weechat.hdata_string(hdata, buffer, \"full_name\"))\n\n\t\tif (weechat.hdata_integer(hdata, buffer, \"number\") == weechat.hdata_integer(hdata, nbuffer, \"number\")):\n\t\t\tbuffer = nbuffer\n\t\t\tnbuffer = weechat.hdata_move(hdata, nbuffer, 1)\n\t\telse:\n\t\t\tbuffer = None\n\n\treturn ret", "def compare_files(input_index_file, output_index_file ):\n \n # -------------\n # open the input index file for reading\n # -------------\n input_set = open_read_file(input_index_file)\n\n # -------------\n # open the output index file for reading\n # -------------\n output_set = open_read_file(output_index_file)\n\n # -------------\n # get the difference in the files where\n # the input_set is the larger set\n # -------------\n unproc_files = set_difference(output_set, input_set)\n #print unproc_files\n\n return unproc_files", "def get_output_slice_idx(self, output_index):\r\n ipos = 0\r\n opos = output_index\r\n for otaps in zip(self.mitmot_out_taps()):\r\n if len(otaps) > 0:\r\n return ipos\r\n else:\r\n opos = opos - 1\r\n ipos += len(otaps)\r\n return ipos + opos", "def get_key_frames_for_points(self, index):\n key_frames = []\n for point in self._points[index]:\n key_frames.append(get_frame(point))\n\n return key_frames", "def _most_recent_event_files(self):\n regex = re.compile(r\"\\w*events.log\")\n return [\n os.path.join(self._output_dir, x)\n for x in os.listdir(self._output_dir)\n if regex.search(x)\n ]", "def buffers_exist(self):\n for buff in self.buffers:\n if not buff.is_separate_file:\n continue\n\n path = self.path.parent / buff.uri\n if not path.exists():\n raise FileNotFoundError(\n \"Buffer {} referenced in {} not found\".format(path, self.path)\n )", "def pull_buffer(self, last_shared_index):\n buffer = []\n explicit_buffer = []\n\n # report_implicit_count = 0\n # report_last_shared_time = self.ledger[last_shared_index][\"time\"]\n # report_now_last_shared_time = rospy.get_rostime()\n # report_duration = report_now_last_shared_time - report_last_shared_time\n\n meas_dict = self._get_shareable_meas_dict(last_shared_index)\n print(\"PULLING BUFFER: current index {}\".format(len(self.ledger)))\n for msg_id in meas_dict:\n times = meas_dict[msg_id][\"times\"] # Should be sorted\n explicit = meas_dict[msg_id][\"explicit\"]\n bursts = self._get_bursts(times)\n # print(\"Delta: {} | Msg id: {} | Num Explicit: {}\".format(self.delta_multiplier, msg_id, len(explicit)))\n # print(\"size(times): {}\".format(len(times)))\n # print(\"size(explicit): {}\".format(len(explicit)))\n # print(\"bursts: {}\".format(bursts))\n\n if len(bursts) > 1:\n print(\"ERROR MULTIPLE BURSTS DETECTED\")\n print(bursts)\n\n b = bursts[-1] # Only use last burst\n b_numpy = np.array(b)\n start_time = b[0]\n # print(\"Constructing msg: {}\".format(msg_id))\n if len(b) > 1:\n cumdiff = b_numpy[1:] - b_numpy[:-1] # Get the adjacent difference\n latencies = [lat.to_sec() for lat in cumdiff]\n mean_lat = np.mean(latencies)\n # print(\"Avg latency: {}\".format(mean_lat))\n else:\n mean_lat = 0\n # print(\"Num msgs: {}\".format(len(b)))\n burst_msg = self._make_burst_msg(msg_id, len(b), start_time, mean_lat)\n buffer.append( burst_msg )\n explicit_buffer.extend( explicit )\n # report_implicit_count += (len(b) - len(explicit))\n \n meas_sort = lambda x : x.stamp\n explicit_buffer.sort(key=meas_sort, reverse=True)\n buffer.extend(explicit_buffer)\n\n # REPORT\n # print(\"******* BUFFER SHARING REPORT FOR {} w/ Delta {}*******\".format(self.my_name, self.delta_multiplier))\n # print(\"Last shared time: {}\".format(report_last_shared_time.to_sec()))\n # print(\"Sharing duration: {}\".format(report_duration.to_sec()))\n # print(\"Sharing time now: {}\".format(report_now_last_shared_time.to_sec()))\n # print(\"Implicit cnt: {}\".format(report_implicit_count))\n # print(\"Explicit cnt: {}\".format(len(explicit_buffer)))\n\n return buffer # Delta-Tiering\n # return explicit_buffer # N-most recent", "def get_files_to_be_committed():\n current_staging_hashes = get_all_path_hashes(staging_path)\n head_path = get_wit_path(keyword=get_current_commit_id())\n head_hashes = get_all_path_hashes(path=head_path)\n new_file_hashes = []\n files_to_be_committed = []\n for staging_hash in current_staging_hashes:\n if staging_hash not in head_hashes:\n new_file_hashes.append(staging_hash)\n files_to_be_committed = [staging_hash_decoder(h) for h in new_file_hashes]\n return files_to_be_committed", "def frame_paths(self, indx):\n if isinstance(indx, (int,np.integer)):\n return os.path.join(self['directory'][indx], self['filename'][indx])\n return [os.path.join(d,f) for d,f in zip(self['directory'][indx], self['filename'][indx])]", "def check_bonuses(self, hit_index):\n result = []\n if hit_index >= 1 and self.balls[hit_index - 1].type < 0:\n result.append(hit_index - 1)\n if hit_index < len(self.balls) - 1 and \\\n self.balls[hit_index + 1].type < 0:\n result.append(hit_index + 1)\n return result", "def get_checkpoint_list(dir):\n ckpt_fnames = glob.glob(os.path.join(dir, '*.index'))\n ckpt_fnames = [x.replace('.index', '') for x in ckpt_fnames]\n ckpt_fnames.sort(key=lambda key: int(os.path.basename(key).split('-')[-1]))\n return ckpt_fnames", "def read_all_files(\n pathname: Path, index: int = 0, pattern: str = \"dump-Trimer-*.gsd\"\n) -> List[Tuple[Variables, HoomdFrame]]:\n pathname = Path(pathname)\n snapshots = []\n for filename in sorted(glob.glob(str(pathname / pattern))):\n logger.debug(\"Reading %s\", Path(filename).stem)\n with gsd.hoomd.open(str(filename)) as trj:\n try:\n snapshots.append((get_filename_vars(filename), HoomdFrame(trj[index])))\n except IndexError:\n continue\n if not snapshots:\n logger.warning(\n \"There were no files found with a configuration at index %s\", index\n )\n return snapshots", "def _get_files_list(self):\n ts_filepaths = []\n conn_filepaths = []\n ts_filepaths_from_dir = sorted(os.listdir(self.ts_dir))\n conn_filepaths_from_dir = sorted(os.listdir(self.conn_dir))\n for sub_id in self.ids:\n for ts_file in ts_filepaths_from_dir:\n if sub_id in ts_file:\n ts_filepaths += [os.path.join(self.ts_dir, ts_file)]\n ts_filepaths_from_dir.remove(ts_file)\n break\n for conn_file in conn_filepaths_from_dir:\n if sub_id in conn_file:\n conn_filepaths += [os.path.join(self.conn_dir, conn_file)]\n conn_filepaths_from_dir.remove(conn_file)\n break\n\n return ts_filepaths, conn_filepaths", "def get_files_to_be_indexed(self):\n\t\tfiles = self.get_all_files()\n\t\tfiles_list = []\n\t\tfor name in files:\n\t\t\tif(name.split('.')[-1] in self.accepted_formats and os.stat(os.path.join(self.root, name)).st_size < 5000000):\n\t\t\t\tfiles_list.append(os.path.join(self.root, name))\n\t\treturn files_list[0:-1]", "def _get_replay_buffer_filled_indices(self, replay_buffers, actor_index):\n # We know that the reservoir value > 0 if it's been filled, so check for entries where it == 0\n buffer_indicator = replay_buffers['reservoir_val'][actor_index].squeeze(1)\n replay_indices = np.where(buffer_indicator != 0)[0]\n return replay_indices", "def backProjection(r, index_list):\n\n logging.debug('-------- BACK PROJECTION: %d POINTS ---', len(index_list))\n\n # reverse_index = {index_list[i]: i for i in range(len(index_list))}\n\n source_points = []\n\n pipe = r.pipeline()\n for idx in index_list:\n # Negation indicates historical index:\n index = int(idx)\n if index < 0:\n continue\n else:\n pipe.lindex('xid:reference', index)\n\n # Load higher dim point indices from catalog\n generated_framelist = [i for i in pipe.execute() if i is not None]\n\n ref = deshaw.topo_prot # Hard coded for now\n\n # Group all Generated indidces by file index \n groupbyFileIdx = {}\n for i, idx in enumerate(generated_framelist):\n file_index, frame = eval(idx)\n if file_index not in groupbyFileIdx:\n groupbyFileIdx[file_index] = []\n groupbyFileIdx[file_index].append(frame)\n\n # Dereference File index to filenames\n generated_frameMask = {}\n generated_filemap = {}\n for file_index in groupbyFileIdx.keys():\n filename = r.lindex('xid:filelist', file_index)\n if filename is None:\n logging.error('Error file not found in catalog: %s', filename)\n else:\n key = os.path.splitext(os.path.basename(filename))[0]\n generated_frameMask[key] = groupbyFileIdx[file_index]\n generated_filemap[key] = filename\n\n # Check cache for generated data points\n bplist = []\n for filename, frames in generated_frameMask.items():\n bplist.append(('sim', generated_filemap[filename], frames))\n\n source_points = []\n logging.debug('Sequentially Loading %d trajectories', len(bplist))\n for ftype, fileno, framelist in bplist:\n traj = datareduce.load_trajectory(fileno)\n selected_frames = traj.slice(framelist)\n source_points.extend(selected_frames.xyz)\n\n logging.debug('All Uncached Data collected Total # points = %d', len(source_points))\n source_traj = md.Trajectory(np.array(source_points), ref.top)\n\n logging.info('-------- Back Projection Complete ---------------')\n return source_traj", "def break_output(dirname, files, index, freqs):\n locs = [(i, file_to_loc(filename)) for (i, filename) in enumerate(files)]\n loc_level = max(loc_resolution(loc) for _, loc in locs)\n locs.sort(key=operator.itemgetter(1))\n\n focus = focus_fn(loc_level)\n group_key = lambda pair: focus(pair[1])\n for place, focused_pairs in itertools.groupby(locs, group_key):\n focused_i, focused_locs = unzip(focused_pairs)\n focused = freqs[focused_i,]\n\n headers = ['token'] + [format_loc(loc) for loc in focused_locs]\n for letter, indexes in index.items():\n output = '{}-{}-{}-colls.csv'.format(\n dirname,\n '.'.join(str(n) for n in place),\n letter,\n )\n\n print('{} ({}) => {}'.format(dirname, place, output))\n with open(output, 'w') as fout:\n writer = csv.writer(fout)\n writer.writerow(headers)\n writer.writerows(\n [word] + list(focused[:, i])\n for i, word in indexes\n )", "def _list_outputs(self):\n outputs = self._outputs().get()\n\n out_dir = os.path.abspath(os.path.join(os.getcwd(), \"slicesdir\"))\n outputs[\"out_dir\"] = out_dir\n outputs[\"out_files\"] = [\n self._gen_fname(\n basename=f.replace(os.sep, \"_\"),\n cwd=out_dir,\n ext=self.inputs.out_extension,\n )\n for f in self.inputs.in_files\n ]\n return outputs", "def filter_out_flickers(total_buffer,index_disappeared):\n \n wait_for_disparition = False\n candidate_for_disparition = -1\n to_destroy = [] #List of 3D tuples (value,first_index,last_index) of segmented elements to remove from image\n beginning_index = -1\n premier_i =-1\n list_of_is =[]\n \n previous_index, osef2, osef3 = index_disappeared[0] #Get the index for the first event\n\n for i in range(0,len(index_disappeared)):\n index,diff,list_index = index_disappeared[i]\n #Remove an appearing and disappearing object from the experiment only if it\n #disappears in the next 5 (arbitrary) frames. If longer, conseder that something relevant\n #happened.\n \n if wait_for_disparition:\n #If sth appeared, destroy it if:\n #-It is the same object that disappears\n #-If the event is a disparition\n #-If it disappears in a time<time_thr\n size = np.count_nonzero(total_buffer[:,:,index-1]==list_index[0])\n if list_index[0]==candidate_for_disparition and diff<0 and size<500:\n to_destroy.append((list_index[0],beginning_index,index))\n list_of_is.append(premier_i)\n list_of_is.append(i)\n wait_for_disparition=False\n \n if diff>0: #Creation, do wait for disparition\n candidate_for_disparition = list_index[0]\n beginning_index = index\n wait_for_disparition =True\n premier_i = i\n \n return to_destroy,list_of_is", "def get_current_drops(output):\n has_skipped_netem = False\n for line in output.split(\"\\n\"):\n # it looks like the tbf share a buffer\n # So, the we only need to recor the first one seen\n # break after the second one\n match = re.search(q_pattern, line)\n if (match):\n if (not has_skipped_netem):\n has_skipped_netem = True\n continue\n sent = int(match.group(\"sent\"))\n dropped = int(match.group(\"dropped\"))\n return (sent, dropped)", "def extract_index_urls(self, index: int) -> ListLike:\n cmd_pieces = self[index].split()\n index_urls = []\n for i, piece in enumerate(cmd_pieces):\n if piece in [\"--index-url\", \"--extra-index-url\"]:\n index_urls.append(cmd_pieces[i + 1])\n return index_urls", "def _get_output_branch(self, coating_idx: int, pitch: float) -> str:\n if self.pitch_ranges == []:\n return self.output_branches[0]\n\n # use coating to pick proper pitch ranges\n # 0 state is unknown, 1 is the first coating. decrement to get index\n pitch_limit_list = self.pitch_ranges[coating_idx]\n\n # find indices ranges where pitch is valid\n pitch_idxs = self._find_matching_range_indices(pitch_limit_list, pitch)\n valid_pitch_idx = np.where(pitch_idxs)[0]\n\n # pitch should only be within one valid range\n if len(valid_pitch_idx) != 1:\n raise MirrorLogicError('only one pitch-range should be valid')\n\n # index of valid range = index of output_branch + 1\n # assuming first output_branch is through line\n return self.output_branches[valid_pitch_idx[0] + 1]", "def get_output_bands(self):\n dlist=self.dest_list.children()\n out_list=[]\n for item in dlist:\n out_list.append((self.output_bands[item][0],\n self.output_bands[item][1]))\n return out_list" ]
[ "0.56127596", "0.55306166", "0.5441968", "0.5265472", "0.5262707", "0.5152076", "0.5129678", "0.51254493", "0.5097565", "0.50565803", "0.5016646", "0.5007455", "0.49993044", "0.4994247", "0.49855304", "0.49773374", "0.49699283", "0.49558246", "0.49224594", "0.4905285", "0.48742262", "0.4860917", "0.4831018", "0.48286334", "0.4814926", "0.47999886", "0.4754472", "0.47506952", "0.4734951", "0.4729577" ]
0.7601003
0