query
stringlengths
9
9.05k
document
stringlengths
10
222k
metadata
dict
negatives
sequencelengths
30
30
negative_scores
sequencelengths
30
30
document_score
stringlengths
4
10
document_rank
stringclasses
2 values
Given an image numpy array, it returns all the points in each object in the image. Assuming background have maximum number of points, it will remove background object points. input
def _get_mask_points(img_arr): img_unique_val = np.unique(img_arr) max_point_object_id = -1 max_num_points = -1 masks_point_dict = dict() for mask_id in img_unique_val: points_location = np.where(img_arr == mask_id) min_height = min(points_location[0]) max_height = max(points_location[0]) min_width = min(points_location[1]) max_width = max(points_location[1]) # not a 2D data for convex hull function if (max_height - min_height) <= 2 or (max_width - min_width) <= 2: continue mask_points = list(zip(points_location[0], points_location[1])) mask_points = list(set(mask_points)) # unique points in the mask if len(mask_points) <= 2: continue masks_point_dict[mask_id] = mask_points if len(mask_points) > max_num_points: max_num_points = len(mask_points) max_point_object_id = mask_id # assuming background have maximum number of points if max_point_object_id != -1: del masks_point_dict[max_point_object_id] return masks_point_dict
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def find_objects(img, threshold=.3):\n thresholded_img = np.uint8(img > threshold)\n _, markers = cv2.connectedComponents(thresholded_img)\n object_centers = []\n for ii in range(1, np.max(markers)):\n masked_img = mask_img(img, markers == ii)\n object_index = np.argmax(masked_img)\n object_center = np.unravel_index(object_index, img.shape)\n object_centers.append(object_center)\n return np.array(object_centers)", "def checker_img_points(images, objpoint, checkerboard):\r\n #Criteria for subpixel refinement\r\n criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 100, 0.001)\r\n\r\n objpoints = [] # 3d point in world space\r\n imgpoints = [] # 2d point in image plane\r\n\r\n dellist = []\r\n\r\n for i,img in enumerate(images):\r\n print(\"image number:\", i)\r\n\r\n #when running raw_plane_chess, don't run below code\r\n #gray = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)\r\n gray = img\r\n\r\n # Find the chess board corners\r\n ret, corners = cv2.findChessboardCorners(gray, checkerboard, None)\r\n # If found, add object points, image points (after refining them)\r\n if ret == True:\r\n objpoints.append(objpoint.T)\r\n cv2.cornerSubPix(gray,corners,(3,3),(-1,-1),criteria)\r\n cv2.drawChessboardCorners(img, checkerboard, corners, ret)\r\n imgpoints.append(corners.T[:,0,:])\r\n else:\r\n print(\"Found no checkerboard in this image {}\".format(i))\r\n cv2.imshow(\"image\",img)\r\n cv2.waitKey(0)\r\n cv2.destroyAllWindows()\r\n dellist.append(i)\r\n\r\n for index in sorted(dellist, reverse=True):\r\n del images[index]\r\n\r\n print(\"Found {} checkerboards of size {}\".format(len(objpoints),checkerboard))\r\n return objpoints,imgpoints", "def charuco_img_points(images, objpoint, board, a_dict):\r\n #Criteria for subpixel refinement\r\n criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 100, 0.001)\r\n\r\n objpoints = [] # 3d point in world space\r\n imgpoints = [] # 2d point in image plane\r\n\r\n for img in images:\r\n gray = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)\r\n corners, ids, rejpoints = cv2.aruco.detectMarkers(gray, a_dict)\r\n if len(corners)>0:\r\n res2 = cv2.aruco.interpolateCornersCharuco(corners,ids,gray,board)\r\n if res2[1] is not None:\r\n cv2.cornerSubPix(gray,res2[1],(3,3),(-1,1),criteria)\r\n imgpoints.append(res2[1].T[:,0,:])\r\n objpoints.append(objpoint[:,res2[2].flatten()])\r\n cv2.aruco.drawDetectedCornersCharuco(img,res2[1],res2[2])\r\n cv2.imshow(\"frame\",img)\r\n cv2.waitKey(0)\r\n cv2.destroyAllWindows()\r\n\r\n return objpoints,imgpoints", "def clean(img):\n\n label_img = label(img, connectivity=2)\n props = sorted(regionprops(label_img), key=lambda x: x.area)\n clean = morphology.binary_closing(img)\n\n clean = morphology.remove_small_holes(clean)\n return morphology.remove_small_objects(clean,\n int(np.floor(props[-1].area) / 10), connectivity=2)", "def find_dots(img):\n # will hold all points\n coordinates = []\n # will hold only relevant points\n points = []\n # losing the side\n img[:, 475:] = 0\n # using for finding the best corners in edged image 65\n corners = cv2.goodFeaturesToTrack(img, 75, 0.085, 61)\n corners = np.int0(corners)\n for corner in corners:\n x, y = corner.ravel()\n if y > 350 or y < 10: # avoid from top and bottom\n continue\n coordinates.append((x, y))\n # sort in order to start from right to left\n sort_coordinates = sorted(coordinates)\n num_of_dot = 1\n for i in reversed(sort_coordinates):\n # when its 9, break\n if num_of_dot > 9:\n break\n points.append((i[0], i[1]))\n num_of_dot += 1\n return points", "def image_to_points(numpy_image):\r\n res = []\r\n for i in range(numpy_image.shape[0]):\r\n for j in range(numpy_image.shape[1]):\r\n if numpy_image[i,j]==0:\r\n res.append([i,j])\r\n return res", "def _collect_points(self, image, point_value=0):\n return zip(*np.where(image == point_value))", "def draw_features(self, image):\n \n for x,y in self.new_points.reshape(-1,2):\n cv2.circle(image, (x,y), 2, (255,0,255), 2)\n return image", "def image_preprocessing(img):\n\n # Removing parasite data (sky, trees and front of the car)\n return img[60:-20, :, :]", "def getFillPoints(image):\n image = cv.CloneMat(image)\n retList = []\n _, maxVal, _ , maxLoc = cv.MinMaxLoc(image)\n while maxVal > 0:\n retList.append(maxLoc)\n cv.FloodFill(image, maxLoc, 0)\n _, maxVal, _, maxLoc = cv.MinMaxLoc(image)\n return retList", "def extract(img):\n # perform selective search\n img_lbl, regions = selectivesearch.selective_search(\n img, scale=500, sigma=0.9, min_size=1)\n\n # delete the region which contains whole image\n regions = sorted(regions, key=lambda x: x['size'], reverse=True)\n\n candidates = []\n\n for r in regions:\n # excluding biggest retangle which contains whole image\n if r['rect'][0] == 0 and r['rect'][1] == 0:\n continue\n # excluding same rectangle (with different segments)\n if r['rect'] in candidates:\n continue\n # excluding parts that are too small\n x, y, w, h = r['rect']\n\n if w * h < 9:\n continue\n\n # ecludeing parts too sharp\n if w > 100 * h or h > 100 * w:\n continue\n\n candidates.append(r['rect'])\n\n # remove rectangles opverlap each other with nms technique\n candidates = nms.non_max_suppression_slow(candidates)\n\n return candidates", "def remove_small_objects(img, min_size=7500):\n img2 = np.copy(img)\n img2 = np.uint8(img2)\n nb_components, output, stats, centroids = cv2.connectedComponentsWithStats(img2, connectivity=8)\n # connectedComponentswithStats yields every seperated component with information on each of them, such as size\n # the following part is just taking out the background which is also considered a component, but most of the time we don't want that.\n sizes = stats[1:, -1]\n nb_components = nb_components - 1\n\n # your answer image\n # for every component in the image, you keep it only if it's above min_size\n for i in range(0, nb_components):\n if sizes[i] < min_size:\n img2[output == i + 1] = 0\n\n return img2", "def get_objects(color, depth, threshold1, threshold2):\n\n gray = cv2.cvtColor(color, cv2.COLOR_BGR2GRAY)\n blur = cv2.GaussianBlur(gray, (5, 5), 0)\n surf = cv2.xfeatures2d.SURF_create(500)\n\n # find and draw the keypoints\n kp = surf.detect(blur,None)\n\n pts = [p.pt for p in kp]\n xpts = []\n ypts = []\n\n # evaluate the keypoints and only save the keypoints who are between the given threshold\n depth_values = []\n for i in range(0,len(pts)):\n xco = int(pts[i][0])\n yco = int(pts[i][1])\n depth_value = depth[yco][xco]\n if depth_value >= float(threshold1) and depth_value <= float(threshold2):\n xpts.append(xco)\n ypts.append(yco)\n depth_values.append(depth_value)\n\n # make histogram of x coordinates of the saved keypoints\n n, distr, _ = plt.hist(xpts)\n plt.savefig('hist.png')\n\n # evaluate the histogram and make seperate arrays for the different objects\n objectarray = []\n temp = []\n for i in range(len(n)):\n if n[i] > 0:\n temp.append(distr[i])\n temp.append(distr[i+1])\n else:\n if len(temp)!=0:\n objectarray.append(temp)\n temp = []\n objectarray.append(temp)\n\n objects = []\n\n # determine the objects with the previous calculated arrays\n for i in range(len(objectarray)):\n y_values = []\n min_x = int(np.amin(objectarray[i]))\n max_x = int(np.amax(objectarray[i]))\n\n for j in range(len(xpts)):\n if xpts[j] > min_x and xpts[j] < max_x:\n y_values.append(ypts[j])\n\n min_y = int(np.amin(y_values))\n max_y = int(np.amax(y_values))\n x = min_x\n y = min_y\n w = max_x - min_x\n h = max_y - min_y\n\n depth_mean = round(get_depth_mean(depth, x, y, w, h), 3)\n\n object = DetectedObject(x, y, w, h, depth_mean)\n objects.append(object)\n\n return objects", "def slice_array():\n img = Image.open(\"flamingo.jpg\")\n image_as_array = np.array(img)\n width, height, depth = image_as_array.shape\n\n red_channel = image_as_array[:, :, 0]\n green_channel = image_as_array[:, :, 1]\n blue_channel = image_as_array[:, :, 2]\n\n top_left_corner = image_as_array[:height // 2, :width // 2, :]\n top_right_corner = image_as_array[:height // 2, width // 2:, :]\n random_middle_pixels = image_as_array[11:29, 101:400, :]", "def pts_filter_color(points):\n pts = np.array(points).tolist()\n # Get rid of all points behind camera\n pts_fil = []\n for pt in pts:\n if pt[2] > 0: \n pts_fil.append(pt)\n \n # get pix size for x distance\n pts_col = []\n for point in pts_fil: \n x = point[0]\n y = point[1]\n z = point[2]\n pix_width = (2 * z * np.tan(fov_width/2))/img_width\n pix_height = (2 * z * np.tan(fov_height/2))/img_height\n # Get row and column coordinates\n y_mod = img_width/2 + y/pix_height + height_offset\n x_mod = img_height/2 - x/pix_width + width_offset\n row = int(y_mod)\n col = int(x_mod)\n # Check if point is inside image bounds\n if 0 <= col < img_msg_now.width and 0 <= row < img_msg_now.height: \n rgb = img[row][col] # Get color of that row and column\n pts_col.append(point + rgb)", "def extract_blobs_closest_points(this_robot, in_image, active_mask):\n\n out_image = PointSampleImage(in_image.calib_array, in_image.neighbour_array)\n\n G = nx.Graph()\n\n # First add all nodes, where each node consists of an index into\n # calib_array for one of the active pixels.\n for i in range(in_image.n_rows):\n G.add_node(i)\n\n # We will add edges between neighbouring pixels. See\n # sensors/pointsamplecam for the definition of neighbouring.\n node_list = G.nodes()\n n = len(node_list)\n for i in range(n):\n if in_image.masks[i] & active_mask != 0:\n (ixi, iyi) = in_image.calib_array[i,0], in_image.calib_array[i,1]\n for j in in_image.neighbour_array[i]:\n if in_image.masks[j] & active_mask != 0:\n G.add_edge(i, j)\n\n clusters = nx.connected_component_subgraphs(G, copy=False)\n n_clusters = 0\n for cluster in clusters:\n n_clusters += 1\n # Find the closest pixel to the robot in this cluster. \n closest_i = None\n closest_distance = float('inf')\n for i in cluster.nodes():\n #(xr, yr) = in_image.calib_array[i,2], in_image.calib_array[i,3]\n #d = sqrt(xr*xr + yr*yr)\n\n # The pre-computed distance sqrt(xr*xr + yr*yr)\n d = in_image.calib_array[i,5]\n\n if d < closest_distance:\n closest_i = i\n closest_distance = d\n if closest_i != None:\n out_image.masks[closest_i] = in_image.masks[closest_i]\n\n return out_image", "def extract_features(image):\n\n return maximum_curvature(*preprocess(image), sigma = 3)", "def detect_points(self):\r\n\r\n\t\r\n\r\n\t\tfeature_mask = np.zeros_like(self.gray) ## Create a mask so we only look for template features in the ROI\r\n\t\t\r\n\t\tfeature_mask[max(0,self.bb[1]):min(360,self.bb[1] + self.bb[3]),max(0,self.bb[0]):min(640,self.bb[0] + self.bb[2])] = 255\r\n\r\n\t\t# search for good points\r\n\t\tfeatures = cv2.goodFeaturesToTrack(self.gray, mask = feature_mask, **feature_params)\r\n\t\t# refine the corner locations\r\n\t\tcv2.cornerSubPix(self.gray,features, **subpix_params)\r\n\r\n\t\tself.features = features\r\n\r\n\t\tself.tracks = [[p] for p in features.reshape((-1,2))]\r\n\r\n\t\tself.prev_gray = self.gray", "def del_alpha(img): \n img = img[:,:,0:3].copy()\n return img", "def get_background_extracted_images(img):\n kernel = np.ones((5, 5), np.float32) / 25\n img = cv2.filter2D(img, -1, kernel)\n\n background_color = br.get_primary_background_color(img)\n spot_size = 200\n background_location = br.get_background_spot(img, background_color, spot_size)\n binary_threshold = 25\n binary_img = br.generate_binary_background_image(img, background_color, binary_threshold)\n binary_background_img = br.separate_background(binary_img, background_location)\n cropped_images = br.crop_image_rectangles(img, binary_background_img)\n feature_threshold = 10\n valid_cropped_images = br.validate_cropped_images(cropped_images, feature_threshold)\n return valid_cropped_images", "def get_mask_puzzle_pieces_background(background: np.ndarray,\n image: np.ndarray) -> list:\n background = cv.cvtColor(background, cv.COLOR_BGR2GRAY)\n image = cv.cvtColor(image, cv.COLOR_BGR2GRAY)\n image_mask = cv.absdiff(background, image)\n image_mask = cv.medianBlur(image_mask, 11)\n ret, image_mask = cv.threshold(image_mask, 20, 255, cv.THRESH_BINARY)\n image_mask = cv.morphologyEx(image_mask,\n cv.MORPH_OPEN,\n (5, 5),\n iterations=3)\n image_mask = cv.filter2D(image_mask, -1, KERNEL_SHARPENING)\n puzzle_pieces, hiearchy = cv.findContours(image_mask,\n cv.RETR_EXTERNAL,\n cv.CHAIN_APPROX_NONE)\n pieces_mask = []\n for piece in range(len(puzzle_pieces)):\n blank = np.zeros((image_mask.shape[0], image_mask.shape[1]))\n single_piece = cv.drawContours(blank, puzzle_pieces, piece, 255,\n cv.FILLED)\n single_piece = cv.cvtColor(single_piece.astype(np.uint8),\n cv.COLOR_GRAY2BGR)\n pieces_mask.append(single_piece)\n return pieces_mask", "def extract_point_pixel_coordinates(image, use_BGR: bool = False):\n mask, clean_mask = get_cube_mask(np.array(image), use_BGR=use_BGR)\n x_ind, y_ind = np.where(mask)\n\n points = [[], [], [], []]\n point_cnt = 0\n recognized_points = 0\n while point_cnt < len(x_ind):\n next_point = np.array([x_ind[point_cnt], y_ind[point_cnt]])\n\n # add the first point\n if recognized_points == 0:\n points[recognized_points].append(next_point)\n recognized_points += 1\n else:\n dist_from_prev_points = [np.linalg.norm(next_point - np.mean(points[i], axis=0)) for i in range(recognized_points)]\n point_clusters = np.array(dist_from_prev_points) < 6\n # add pixel to previous point if is it close to it\n if np.any(point_clusters):\n cluster = np.where(point_clusters==True)[0][0]\n points[cluster].append(next_point)\n # add pixel to a new point\n elif recognized_points < 4:\n points[recognized_points].append(next_point)\n recognized_points += 1\n else:\n assert False, \"Error, there should only be 4 point to recognize\"\n point_cnt += 1\n\n\n pixel_coordinates = [np.mean(p, axis=0) for p in points]\n return pixel_coordinates", "def pixelPoints(img, cnt):\n\tm = np.zeros(grayscale(img).shape, np.uint8)\n\tcv2.drawContours(m, [cnt], 0, 255, -1)\n\tpixelpoints = cv2.findNonZero(m)\n\treturn pixelpoints", "def get_background_pixels(img, rect):\n mask = np.zeros(img.shape[:2], np.uint8)\n bg_model = np.zeros((1,65), np.float64)\n fg_model = np.zeros((1,65), np.float64)\n cv2.grabCut(img, mask, rect, bg_model, fg_model, 5, cv2.GC_INIT_WITH_RECT)\n flattened = mask.flatten()\n background_coords = np.where((flattened == 0) | (flattened == 2))\n return background_coords", "def calculate_hit_box_points_simple(image):\r\n left_border = 0\r\n good = True\r\n while good and left_border < image.width:\r\n for row in range(image.height):\r\n pos = (left_border, row)\r\n pixel = image.getpixel(pos)\r\n if type(pixel) is int or len(pixel) != 4:\r\n raise TypeError(\"Error, calculate_points called on image not in RGBA format\")\r\n else:\r\n if pixel[3] != 0:\r\n good = False\r\n break\r\n if good:\r\n left_border += 1\r\n\r\n right_border = image.width - 1\r\n good = True\r\n while good and right_border > 0:\r\n for row in range(image.height):\r\n pos = (right_border, row)\r\n pixel = image.getpixel(pos)\r\n if pixel[3] != 0:\r\n good = False\r\n break\r\n if good:\r\n right_border -= 1\r\n\r\n top_border = 0\r\n good = True\r\n while good and top_border < image.height:\r\n for column in range(image.width):\r\n pos = (column, top_border)\r\n pixel = image.getpixel(pos)\r\n if pixel[3] != 0:\r\n good = False\r\n break\r\n if good:\r\n top_border += 1\r\n\r\n bottom_border = image.height - 1\r\n good = True\r\n while good and bottom_border > 0:\r\n for column in range(image.width):\r\n pos = (column, bottom_border)\r\n pixel = image.getpixel(pos)\r\n if pixel[3] != 0:\r\n good = False\r\n break\r\n if good:\r\n bottom_border -= 1\r\n\r\n # If the image is empty, return an empty set\r\n if bottom_border == 0:\r\n return []\r\n\r\n def _check_corner_offset(start_x, start_y, x_direction, y_direction):\r\n\r\n bad = False\r\n offset = 0\r\n while not bad:\r\n y = start_y + (offset * y_direction)\r\n x = start_x\r\n for count in range(offset + 1):\r\n my_pixel = image.getpixel((x, y))\r\n # print(f\"({x}, {y}) = {pixel} | \", end=\"\")\r\n if my_pixel[3] != 0:\r\n bad = True\r\n break\r\n y -= y_direction\r\n x += x_direction\r\n # print(f\" - {bad}\")\r\n if not bad:\r\n offset += 1\r\n # print(f\"offset: {offset}\")\r\n return offset\r\n\r\n def _r(point, height, width):\r\n return point[0] - width / 2, (height - point[1]) - height / 2\r\n\r\n top_left_corner_offset = _check_corner_offset(left_border, top_border, 1, 1)\r\n top_right_corner_offset = _check_corner_offset(right_border, top_border, -1, 1)\r\n bottom_left_corner_offset = _check_corner_offset(left_border, bottom_border, 1, -1)\r\n bottom_right_corner_offset = _check_corner_offset(right_border, bottom_border, -1, -1)\r\n\r\n p1 = left_border + top_left_corner_offset, top_border\r\n p2 = (right_border + 1) - top_right_corner_offset, top_border\r\n p3 = (right_border + 1), top_border + top_right_corner_offset\r\n p4 = (right_border + 1), (bottom_border + 1) - bottom_right_corner_offset\r\n p5 = (right_border + 1) - bottom_right_corner_offset, (bottom_border + 1)\r\n p6 = left_border + bottom_left_corner_offset, (bottom_border + 1)\r\n p7 = left_border, (bottom_border + 1) - bottom_left_corner_offset\r\n p8 = left_border, top_border + top_left_corner_offset\r\n\r\n result = []\r\n\r\n h = image.height\r\n w = image.width\r\n\r\n result.append(_r(p7, h, w))\r\n if bottom_left_corner_offset:\r\n result.append(_r(p6, h, w))\r\n\r\n result.append(_r(p5, h, w))\r\n if bottom_right_corner_offset:\r\n result.append(_r(p4, h, w))\r\n\r\n result.append(_r(p3, h, w))\r\n if top_right_corner_offset:\r\n result.append(_r(p2, h, w))\r\n\r\n result.append(_r(p1, h, w))\r\n if top_left_corner_offset:\r\n result.append(_r(p8, h, w))\r\n\r\n # Remove duplicates\r\n result = tuple(dict.fromkeys(result))\r\n\r\n return result", "def extract_walls(img_array,x_scale,y_scale,wall_height):\n\n wall_th = 2\n length = 0\n wall_list = []\n\n #check for horizontal walls first\n for row in range(img_array.shape[0]):\n for col in range(img_array.shape[1]):\n \n sec = img_array.astype(int)[row:row+2,col:col+2]\n \n if left_edge(sec):\n #check two steps to the right\n next_sec = img_array.astype(int)[row:row+2, col+1:col+3]\n next_next_sec = img_array.astype(int)[row:row+2, col+2:col+4]\n\n #if horizontal wall, get coordinates and count length\n if is_wall(next_sec) and not right_edge(next_next_sec): \n #record corner coordinates\n x = col +1\n y = row\n while is_wall(next_sec):\n #start counting length across, until right edge found\n length +=1\n col +=1\n next_sec = img_array.astype(int)[row:row+2, col:col+2]\n #create wall object and store in list \n new_wall = Wall(x*x_scale,y*y_scale,length*x_scale,wall_th*y_scale,wall_height)\n wall_list.append(new_wall)\n length = 0\n\n #check for vertical walls\n for col in range(img_array.shape[1]):\n for row in range(img_array.shape[0]):\n\n sec = img_array.astype(int)[row:row+2,col:col+2]\n \n if top_edge(sec): \n #check two steps below\n next_sec = img_array.astype(int)[row+1:row+3, col:col+2]\n next_next_sec = img_array.astype(int)[row+2:row+4, col:col+2]\n\n #if vertical wall, get coordinates and count length\n if is_wall(next_sec) and is_wall(next_next_sec):\n x = col\n y = row\n while is_wall(next_sec):\n #start counting length downwards, until bottom edge found\n length += 1\n row += 1\n next_sec = img_array.astype(int)[row:row+2, col:col+2]\n #create wall object and store in list\n new_wall = Wall(x*x_scale,y*y_scale,wall_th*x_scale,length*y_scale, wall_height)\n wall_list.append(new_wall)\n length = 0\n\n return wall_list", "def preprocessing(self, img):\n [a, contours, c] = cv2.findContours(img, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)\n return contours", "def draw_birdseye_points(self, image, points):\n point_image = np.copy(image)\n for point in points:\n cv2.circle(point_image, point, 5, [0, 0, 255], cv2.FILLED)\n\n cv2.imshow('image1', point_image)\n cv2.waitKey(1)", "def oneEntryPoints(self):\n points = []\n if self.isRed:\n for x in range(self.midWidth, self.width):\n for y in range(0, self.height):\n if (x, y) not in self.walls:\n adjacent = self.adjacentValidPoints(x, y)\n if len(adjacent) == 1:\n points.append((x, y))\n else:\n for x in range(0, self.midWidth-1):\n for y in range(0, self.height):\n if(x, y) not in self.walls:\n adjacent = self.adjacentValidPoints(x, y)\n if len(adjacent) == 1:\n points.append((x, y))\n return points", "def get_bin_color_features(img, p):\n return cv2.resize(img, p.spatial_size).ravel()" ]
[ "0.6686785", "0.6322959", "0.62514174", "0.61938", "0.6173828", "0.61347634", "0.61281", "0.6068525", "0.60538", "0.5940649", "0.5925896", "0.5898862", "0.5897576", "0.5851124", "0.5839876", "0.5823407", "0.5781983", "0.57549876", "0.5752311", "0.57283723", "0.57160974", "0.57095665", "0.5672619", "0.5664569", "0.5641804", "0.5635485", "0.56256807", "0.56202793", "0.559491", "0.55756503" ]
0.64154553
1
Function to convert velocity to spherical coordinates velocity Returns ~einsteinpy.coordinates.velocity.SphericalDifferential Spherical representation of the velocity in Cartesian Coordinates.
def spherical_differential(self): r, theta, phi, v_r, v_t, v_p = self.convert_spherical() return SphericalDifferential( r * u.m, theta * u.rad, phi * u.rad, v_r * u.m / u.s, v_t * u.rad / u.s, v_p * u.rad / u.s, )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _velocity_cartesian2spherical(pos,vel):\n\n \n #save cartesian position of each particle\n x=pos[:,0]\n y=pos[:,1]\n z=pos[:,2]\n\n #save cartesian velocities\n vx=vel[:,0]\n vy=vel[:,1]\n vz=vel[:,2]\n\n #convert to spherical coordinates\n pos_sph=_position_cartesian2spherical(pos) #spherical coordinates\n r=pos_sph[:,0]\n theta=pos_sph[:,1]\n phi=pos_sph[:,2]\n\n\n #compute spherical velocities\n vr = vx*np.sin(theta)*np.cos(phi) + vy*np.sin(theta)*np.sin(phi) + vz*np.cos(theta)\n vtheta = vx*np.cos(theta)*np.cos(phi) + vy*np.cos(theta)*np.sin(phi) - vz*np.sin(theta)\n vphi = -vx*np.sin(phi) + vy*np.cos(phi)\n\n if np.sum(r==0)!=0: #if some points are at the origin\n warnings.warn(\"Spherical velocity is not defined at origin. Returning 0.\")\n vr[r==0]=0\n vtheta[r==0]=0\n vphi[r==0]=0\n\n\n return np.dstack((vr,vtheta,vphi))[0]", "def _velocity_cylindrical2spherical(pos,vel):\n \n pos_cart=_position_cylindrical2cartesian(pos)\n vel_cart=_velocity_cylindrical2cartesian(pos,vel)\n vel_sph=_velocity_cartesian2spherical(pos_cart,vel_cart)\n\n return vel_sph", "def cartesian_to_spherical(self, v):\n x = Vector.x(v)\n y = Vector.y(v)\n z = Vector.z(v)\n r = Vector.length(v)\n phi = atan2(y, x)\n theta = acos(z / r)\n \n return [r, phi, theta]", "def spherical2cartesian(v):\n \n x = np.cos(v[0]) * np.cos(v[1]) \n y = np.cos(v[0]) * np.sin(v[1]) \n z = np.sin(v[0]) \n \n return [x,y,z]", "def cartesian2spherical(v):\n theta = np.arcsin(v[2]) \n phi = np.arctan2(v[1], v[0])\n \n return [theta, phi]", "def CartesianToSpherical(Cartesian):\n\n # x,y,z -> r,theta,phi\n x = Cartesian[:,0]\n y = Cartesian[:,1]\n z = Cartesian[:,2]\n r = np.sqrt(x*x + y*y + z*z)\n projR = np.sqrt(x*x + y*y)\n theta = np.arccos(z/r)\n phi = np.arctan2(y,x)\n theta[theta<0.] +=2.*np.pi\n \n if (len(Cartesian[0,:])==3):\n Spherical = np.column_stack((r,theta,phi))\n return Spherical\n else:\n # vx,vy,vz -> vr,vtheta,vphi\n vx = Cartesian[:,3]\n vy = Cartesian[:,4]\n vz = Cartesian[:,5]\n vr = (x*vx + y*vy + z*vz)/r\n vt = (z*vr - r*vz)/projR\n vp = r*np.sin(theta)*(vy*x-y*vx)/(projR*projR) \n Spherical = np.column_stack((r,theta,phi,vr,vt,vp))\n return Spherical", "def _velocity_spherical2cylindrical(pos,vel):\n \n pos_cart=_position_spherical2cartesian(pos)\n vel_cart=_velocity_spherical2cartesian(pos,vel)\n vel_cyl=_velocity_cartesian2cylindrical(pos_cart,vel_cart)\n\n return vel_cyl", "def _velocity_spherical2cartesian(pos,vel):\n \n #save cartesian position of each particle\n r=pos[:,0]\n theta=pos[:,1]\n phi=pos[:,2]\n\n\n #save cyindrical velocities\n vr=vel[:,0]\n vtheta=vel[:,1]\n vphi=vel[:,2]\n\n\n #compute cartesian velocities\n vx = vr*np.sin(theta)*np.cos(phi) + vtheta*np.cos(theta)*np.cos(phi) - vphi*np.sin(phi)\n vy = vr*np.sin(theta)*np.sin(phi) + vtheta*np.cos(theta)*np.sin(phi) + vphi*np.cos(phi)\n vz = vr*np.cos(theta) - vtheta*np.sin(theta)\n\n return np.dstack((vx,vy,vz))[0]", "def spherical_to_cartesian(self, r, phi, theta):\n x = r*cos(phi)*sin(theta)\n y = r*sin(phi)*sin(theta)\n z = r*cos(theta)\n \n return Vector(float(x), float(y), float(z))", "def unit_to_sphere(v):\n return (math.acos(v[2]), math.atan2(v[1], v[0]))", "def cartesianToSpherical(x=0, y=0, z=0):\n\n hxy = np.hypot(x, y)\n radius = np.hypot(hxy, z)\n altitude = np.arctan2(z, hxy)\n azimuth = np.arctan2(y, x)\n return altitude, azimuth, radius", "def SphericalToCartesian(Spherical):\n\n # r,theta,phi -> x,y,z\n r = Spherical[:,0]\n st = np.sin(Spherical[:,1])\n sp = np.sin(Spherical[:,2])\n ct = np.cos(Spherical[:,1])\n cp = np.cos(Spherical[:,2])\n x = r*st*cp\n y = r*st*sp\n z = r*ct\n\n if (len(Spherical[0,:])==3):\n Cartesian = np.column_stack((x,y,z))\n return Cartesian\n else:\n # vr,vtheta,vphi -> vx,vy,vz\n vr = Spherical[:,3]\n vt = Spherical[:,4]\n vp = Spherical[:,5]\n vx = vr*st*cp - vt*ct*cp - vp*sp\n vy = vr*st*sp + vt*ct*sp + vp*cp\n vz = vr*ct - vt*st\n Cartesian= np.column_stack((x,y,z,vx,vy,vz))\n return Cartesian", "def cartesian2spherical(cartesian):\n cartesian = np.array(cartesian).squeeze()\n x, y, z = cartesian\n distance = np.linalg.norm(cartesian)\n azimuth = np.arccos(z / distance)\n elevation = np.arctan2(y, x) # Use arctan2 instead of arctan to get proper sign!\n return np.array([distance, azimuth, elevation])", "def sphere_to_unit(v):\n sin_theta = math.sin(v[0])\n cos_theta = math.cos(v[0])\n return (sin_theta * math.cos(v[1]),\n sin_theta * math.sin(v[1]),\n cos_theta)", "def cartesian2spherical(vector: tuple[float, float, float]) -> tuple[float, float, float]:\n x, y, z = vector\n r = m.sqrt(x**2 + y**2 + z**2)\n # acos returns the angle in radians between 0 and pi\n theta = m.degrees(m.acos(z / r))\n # atan2 returns the angle in radians between -pi and pi\n phi = m.degrees(m.atan2(y, x))\n # lets ensure the angle in degrees is always between 0 and 360, as SHIELD-HIT12A requires\n if phi < 0.:\n phi += 360.\n return theta, phi, r", "def to_spherical(self):\n return cartesian_to_spherical(self.x, self.y, self.z)", "def spherical2cartesian(spherical):\n spherical = np.array(spherical).squeeze()\n distance, azimuth, elevation = spherical\n x = distance * np.sin(azimuth) * np.cos(elevation)\n y = distance * np.sin(azimuth) * np.sin(elevation)\n z = distance * np.cos(azimuth)\n return np.array([x, y, z])", "def spherical2cartesian(sphere):\n cart = np.zeros(sphere.shape, dtype=np.float64)\n sine_phi = np.sin(sphere[:, 2])\n\n cart[:, 0] = sphere[:, 0] * np.cos(sphere[:, 1]) * sine_phi\n cart[:, 1] = sphere[:, 0] * np.sin(sphere[:, 1]) * sine_phi\n cart[:, 2] = sphere[:, 0] * np.cos(sphere[:, 2])\n return cart", "def _position_cylindrical2spherical(pos):\n\n rho=pos[:,0]\n theta_cylindrical=pos[:,1]\n z=pos[:,2]\n\n r=np.sqrt(rho**2+z**2)\n theta_spherical=np.arctan2(rho,z)\n phi=theta_cylindrical\n\n return np.dstack((r,theta_spherical,phi))[0]", "def spherical_function(j, x, y, z):\n theta = np.arccos(z)\n phi = np.arctan2(y, x)\n return angular_function(j, theta, phi)", "def sphrad(vol):\n return (3.*vol/(4.*np.pi))**(1./3.)", "def _position_spherical2cylindrical(pos):\n \n\n r=pos[:,0]\n theta_spherical=pos[:,1]\n phi_spherical=pos[:,2]\n\n if any(theta_spherical>np.pi) or any(theta_spherical<0): #sanity check. not necessary for phi.\n raise ValueError, \"Theta beyond [0,pi]. Exiting.\"\n\n rho=r*np.sin(theta_spherical)\n theta_cylindrical=phi_spherical\n z=r*np.cos(theta_spherical)\n\n return np.dstack((rho,theta_cylindrical,z))[0]", "def spherical_to_cartesian(grid, vec=None):\n grid = np.atleast_2d(grid)\n\n if vec is None:\n return np.hstack([\n mkvc(grid[:, 0] * np.sin(grid[:, 2]) * np.cos(grid[:, 1]), 2),\n mkvc(grid[:, 0] * np.sin(grid[:, 2]) * np.sin(grid[:, 1]), 2),\n mkvc(grid[:, 0] * np.cos(grid[:, 2]), 2)\n ])\n\n if len(vec.shape) == 1 or vec.shape[1] == 1:\n vec = vec.reshape(grid.shape, order='F')\n\n x = (\n vec[:, 0] * np.sin(grid[:, 2]) * np.cos(grid[:, 1]) +\n vec[:, 2] * np.cos(grid[:, 2]) * np.cos(grid[:, 1]) -\n vec[:, 1] * np.sin(grid[:, 1])\n )\n y = (\n vec[:, 0] * np.sin(grid[:, 2]) * np.sin(grid[:, 1]) +\n vec[:, 2] * np.cos(grid[:, 2]) * np.sin(grid[:, 1]) -\n vec[:, 1] * np.cos(grid[:, 1])\n )\n z = (\n vec[:, 0] * np.cos(grid[:, 2]) -\n vec[:, 2] * np.sin(grid[:, 2])\n )\n\n newvec = [x, y, z]\n\n return np.vstack(newvec).T", "def calcul_v_sphere(r):\n volume = 4/3 * math.pi * (r ** 3)\n return volume", "def _position_spherical2cartesian(pos):\n \n r=pos[:,0]\n theta=pos[:,1]\n phi=pos[:,2]\n\n if any(theta>np.pi) or any(theta<0): #sanity check. not necessary for phi.\n raise ValueError, \"Theta beyond [0,pi]. Exiting.\"\n\n\n x=r*np.sin(theta)*np.cos(phi)\n y=r*np.sin(theta)*np.sin(phi)\n z=r*np.cos(theta)\n\n return np.dstack((x,y,z))[0]", "def cart2spheric(x, y, z):\n # doesn't compute r because chosen egal to 1\n with np.errstate(all='ignore'):\n theta = np.arccos(z)\n phi = np.arctan2(y, x)\n\n return theta, phi", "def spherical_parameters(self):\n phi_mu_list = []\n theta_mu_list = []\n \n for mu in self.mu_list:\n r, phi, theta = T_cartesian_to_spherical(x=mu[0], y=mu[1], z=mu[2])\n phi_mu_list.append(phi)\n theta_mu_list.append(theta)\n \n return phi_mu_list, theta_mu_list", "def cartesian2spherical(coords):\n sphere = np.zeros(coords.shape)\n xy_sq = coords[:, 0]**2 + coords[:, 1]**2\n sphere[:, 0] = np.sqrt(xy_sq + coords[:, 2]**2)\n sphere[:, 1] = np.arctan2(coords[:, 1], coords[:, 0])\n sphere[:, 2] = np.arctan2(np.sqrt(xy_sq), coords[:, 2])\n return sphere", "def sphericalToCartesian(altitude=0, azimuth=0, radius=0):\n\n rcos_theta = radius * np.cos(altitude)\n x = rcos_theta * np.cos(azimuth)\n y = rcos_theta * np.sin(azimuth)\n z = radius * np.sin(altitude)\n return x, y, z", "def _velocity_cylindrical2cartesian(pos,vel):\n \n \n #save cartesian position of each particle\n theta=pos[:,1]\n\n #save cyindrical velocities\n vr=vel[:,0]\n vtheta=vel[:,1]\n vz=vel[:,2]\n\n #compute cartesian velocities\n vx = vr*np.cos(theta) - vtheta*np.sin(theta)\n vy = vr*np.sin(theta) + vtheta*np.cos(theta)\n vz = vz\n\n return np.dstack((vx,vy,vz))[0]" ]
[ "0.7311547", "0.71452445", "0.6971911", "0.67958176", "0.6771112", "0.66505325", "0.6608041", "0.6568496", "0.6398689", "0.63865006", "0.6365045", "0.63537866", "0.6316992", "0.6254722", "0.61298764", "0.60265553", "0.60205036", "0.59701353", "0.59636456", "0.59336466", "0.5924651", "0.5890837", "0.58808523", "0.58608484", "0.5831081", "0.5824532", "0.5822491", "0.58188736", "0.5804805", "0.58022356" ]
0.7613973
1
Function for returning values in SI units. Returns ~numpy.ndarray Array containing values in SI units (m, rad, rad, m/s, rad/s, rad/s)
def si_values(self): element_list = [ self.r.to(u.m), self.theta.to(u.rad), self.phi.to(u.rad), self.v_r.to(u.m / u.s), self.v_t.to(u.rad / u.s), self.v_p.to(u.rad / u.s), ] return np.array([e.value for e in element_list], dtype=float)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def unit_array(self):\n return self._data_array.values * units(self._units)", "def si_values(self):\n element_list = [\n self.x.to(u.m),\n self.y.to(u.m),\n self.z.to(u.m),\n self.v_x.to(u.m / u.s),\n self.v_y.to(u.m / u.s),\n self.v_z.to(u.m / u.s),\n ]\n return np.array([e.value for e in element_list], dtype=float)", "def kts_to_si(vals):\n return vals * meters_per_nautical_mile / 3600.0", "def mph_to_si(vals):\n return vals * meters_per_mile / 3600.0", "def get_converted_si_unit():\n units = request.args.get('units')\n response = ConvertUnit(units).convert()\n return jsonify(response)", "def convertUnits(self, varname, arr):\n if varname == \"SPDQ\" or varname == \"PHQ\":\n return arr*2.5e6/1000.\n return arr", "def convertToSI(*args):\n return _libsbml.Unit_convertToSI(*args)", "def kmh_to_si(vals):\n return vals * 1000.0 / 3600.0", "def Unit_convertToSI(*args):\n return _libsbml.Unit_convertToSI(*args)", "def si_2_kts(vals):\n return vals * 3600.0 / meters_per_nautical_mile", "def unit_array(self, values):\n self._data_array.values = values\n self._units = self._data_array.attrs['units'] = str(values.units)", "def convertToSI(*args):\n return _libsbml.UnitDefinition_convertToSI(*args)", "def eps_as_simarray(f, eps):\n if isinstance(eps, str):\n eps = units.Unit(eps)\n if not isinstance(eps, units.UnitBase):\n eps = eps * f['pos'].units\n logger.info(f\"Considering eps = {eps}\")\n eps_value = eps._scale\n eps_unit = eps/eps_value\n eps = SimArray(np.ones(len(f), dtype=f['mass'].dtype) * eps_value, eps_unit)\n return eps", "def unit_of_measurement(self) -> Any:\n return TEMP_CELSIUS", "def galactic_to_MS():\n return MS_MATRIX", "def spectral():\n c = _si.c.value\n h = _si.h.value\n hc = h * c\n two_pi = 2.0 * np.pi\n inv_m_spec = si.m**-1\n inv_m_ang = si.radian / si.m\n\n return Equivalency(\n [\n (si.m, si.Hz, lambda x: c / x),\n (si.m, si.J, lambda x: hc / x),\n (si.Hz, si.J, lambda x: h * x, lambda x: x / h),\n (si.m, inv_m_spec, lambda x: 1.0 / x),\n (si.Hz, inv_m_spec, lambda x: x / c, lambda x: c * x),\n (si.J, inv_m_spec, lambda x: x / hc, lambda x: hc * x),\n (inv_m_spec, inv_m_ang, lambda x: x * two_pi, lambda x: x / two_pi),\n (si.m, inv_m_ang, lambda x: two_pi / x),\n (si.Hz, inv_m_ang, lambda x: two_pi * x / c, lambda x: c * x / two_pi),\n (si.J, inv_m_ang, lambda x: x * two_pi / hc, lambda x: hc * x / two_pi),\n ],\n \"spectral\",\n )", "def extractSI(s):\n\n # If this is representing a range, just return it as is.\n if \"[\" in s:\n return (s, None)\n\n types = {\"T\": \"Time\", \"O\": \"Rate\", \"A\": \"Rate\", \"s\": \"Seconds\", \"%\": \"Percentage\"}\n du = s.split()\n # Preserve integers as such, so that columns like \"Threads\" generate an X axis \"1 2 3\",\n # rather than \"1.0 2.0 3.0\"\n num = float(du[0]) if \".\" in du[0] else int(du[0])\n units = du[1] if len(du) == 2 else \" \"\n if s[-1] == \" \":\n units = units + \" \"\n\n # http://physics.nist.gov/cuu/Units/prefixes.html\n factor = {\n \"Y\": 1e24,\n \"Z\": 1e21,\n \"E\": 1e18,\n \"P\": 1e15,\n \"T\": 1e12,\n \"G\": 1e9,\n \"M\": 1e6,\n \"k\": 1e3,\n \" \": 1,\n \"m\": -1e3, # Yes, I do mean that, see below for the explanation.\n \"u\": -1e6,\n \"n\": -1e9,\n \"p\": -1e12,\n \"f\": -1e15,\n \"a\": -1e18,\n \"z\": -1e21,\n \"y\": -1e24,\n }[units[0] if len(units) == 2 else \" \"]\n # print (\"units = '\" + units + \"'\" + \" factor=\" + str(factor))\n\n # Minor trickery here is an attempt to preserve accuracy by using a single divide,\n # rather than multiplying by 1/x, which introduces two roundings since 1/10 is not representable\n # perfectly in IEEE floating point.\n # (Not that this really matters, other than for cleanliness, since we're likely reading numbers with\n # at most five decimal digits of precision).\n return (\n num * factor if factor > 0 else num / -factor,\n types.get(units[-1], \"Count\"),\n )", "def SI(As):\n return [A for A in As if A.is_SI()]", "def psi(x):\n return np.sin(x)", "def unit_of_measurement(self):\n return TEMP_CELSIUS", "def unit_of_measurement(self):\n return TEMP_CELSIUS", "def unit_of_measurement(self):\n return TEMP_CELSIUS", "def getarray(str_array):\n if ':' in str_array:\n s=str_array.split(':')\n if 'm' in s[0]:\n s2=[cu.convlen(x) for x in s]\n else: \n s2=[float(x) for x in s]\n ns=round((s2[2]-s2[0])/s2[1])+1\n s3=np.linspace(s2[0],s2[2],ns)\n else:\n s3=cu.convlen(str_array)\n \n return s3", "def convert_units(self, units):\n self.unit_array = self.unit_array.to(units)", "def _get_values(self) -> ty.List[float]:\r\n ...", "def sin(fs: float, Ns: int, Ss: int) -> np.ndarray:\n t = np.arange(Ns)\n omega = 2 * np.pi * fs / Ss\n return np.sin(omega * t)", "def _compute_e_S1S2(self, x):\r\n psi = x[PSI_IDX]\r\n return np.array([-np.sin(psi), np.cos(psi)])", "def fluid_func(self):\n residual = []\n for fluid, x in self.inl[0].fluid.val.items():\n res = x * self.inl[0].m.val_SI\n for o in self.outl:\n res -= o.fluid.val[fluid] * o.m.val_SI\n residual += [res]\n return residual", "def get(self) -> list[float]:", "def meter_to_cart(self, meters):\n # 1 AU = 149.6e9 m\n pixels = meters*self.scale/149.6e9\n return pixels" ]
[ "0.6597943", "0.649187", "0.63240147", "0.6257522", "0.6255147", "0.6213908", "0.6213201", "0.6091031", "0.60509723", "0.58182454", "0.55522114", "0.55494267", "0.55082214", "0.54812056", "0.5480648", "0.54716265", "0.54494315", "0.5425861", "0.53436226", "0.5266943", "0.5266943", "0.5266943", "0.52616924", "0.5261035", "0.52532506", "0.52363974", "0.52344924", "0.52330065", "0.5227268", "0.5208177" ]
0.6856912
1
Function to convert velocity to cartesian coordinates Returns ~einsteinpy.coordinates.velocity.CartesianDifferential Cartesian representation of the velocity in Spherical Coordinates.
def cartesian_differential(self): x, y, z, v_x, v_y, v_z = self.convert_cartesian() return CartesianDifferential( x * u.m, y * u.m, z * u.m, v_x * u.m / u.s, v_y * u.m / u.s, v_z * u.m / u.s )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _velocity_spherical2cartesian(pos,vel):\n \n #save cartesian position of each particle\n r=pos[:,0]\n theta=pos[:,1]\n phi=pos[:,2]\n\n\n #save cyindrical velocities\n vr=vel[:,0]\n vtheta=vel[:,1]\n vphi=vel[:,2]\n\n\n #compute cartesian velocities\n vx = vr*np.sin(theta)*np.cos(phi) + vtheta*np.cos(theta)*np.cos(phi) - vphi*np.sin(phi)\n vy = vr*np.sin(theta)*np.sin(phi) + vtheta*np.cos(theta)*np.sin(phi) + vphi*np.cos(phi)\n vz = vr*np.cos(theta) - vtheta*np.sin(theta)\n\n return np.dstack((vx,vy,vz))[0]", "def _velocity_cylindrical2cartesian(pos,vel):\n \n \n #save cartesian position of each particle\n theta=pos[:,1]\n\n #save cyindrical velocities\n vr=vel[:,0]\n vtheta=vel[:,1]\n vz=vel[:,2]\n\n #compute cartesian velocities\n vx = vr*np.cos(theta) - vtheta*np.sin(theta)\n vy = vr*np.sin(theta) + vtheta*np.cos(theta)\n vz = vz\n\n return np.dstack((vx,vy,vz))[0]", "def _velocity_cartesian2spherical(pos,vel):\n\n \n #save cartesian position of each particle\n x=pos[:,0]\n y=pos[:,1]\n z=pos[:,2]\n\n #save cartesian velocities\n vx=vel[:,0]\n vy=vel[:,1]\n vz=vel[:,2]\n\n #convert to spherical coordinates\n pos_sph=_position_cartesian2spherical(pos) #spherical coordinates\n r=pos_sph[:,0]\n theta=pos_sph[:,1]\n phi=pos_sph[:,2]\n\n\n #compute spherical velocities\n vr = vx*np.sin(theta)*np.cos(phi) + vy*np.sin(theta)*np.sin(phi) + vz*np.cos(theta)\n vtheta = vx*np.cos(theta)*np.cos(phi) + vy*np.cos(theta)*np.sin(phi) - vz*np.sin(theta)\n vphi = -vx*np.sin(phi) + vy*np.cos(phi)\n\n if np.sum(r==0)!=0: #if some points are at the origin\n warnings.warn(\"Spherical velocity is not defined at origin. Returning 0.\")\n vr[r==0]=0\n vtheta[r==0]=0\n vphi[r==0]=0\n\n\n return np.dstack((vr,vtheta,vphi))[0]", "def _velocity_cartesian2cylindrical(pos,vel):\n \n \n \n #save cartesian velocities\n vx=vel[:,0]\n vy=vel[:,1]\n vz=vel[:,2]\n\n #convert to cylindrical coordinates\n pos_cyl=_position_cartesian2cylindrical(pos) #cylindrical coordinates\n theta=pos_cyl[:,1]\n\n #compute cylindrical velocities\n vr=vx*np.cos(theta) + vy*np.sin(theta)\n vtheta=-vx*np.sin(theta) + vy*np.cos(theta)\n vz=vz\n \n\n return np.dstack((vr,vtheta,vz))[0]", "def _velocity_spherical2cylindrical(pos,vel):\n \n pos_cart=_position_spherical2cartesian(pos)\n vel_cart=_velocity_spherical2cartesian(pos,vel)\n vel_cyl=_velocity_cartesian2cylindrical(pos_cart,vel_cart)\n\n return vel_cyl", "def spherical2cartesian(v):\n \n x = np.cos(v[0]) * np.cos(v[1]) \n y = np.cos(v[0]) * np.sin(v[1]) \n z = np.sin(v[0]) \n \n return [x,y,z]", "def _velocity_cylindrical2spherical(pos,vel):\n \n pos_cart=_position_cylindrical2cartesian(pos)\n vel_cart=_velocity_cylindrical2cartesian(pos,vel)\n vel_sph=_velocity_cartesian2spherical(pos_cart,vel_cart)\n\n return vel_sph", "def SphericalToCartesian(Spherical):\n\n # r,theta,phi -> x,y,z\n r = Spherical[:,0]\n st = np.sin(Spherical[:,1])\n sp = np.sin(Spherical[:,2])\n ct = np.cos(Spherical[:,1])\n cp = np.cos(Spherical[:,2])\n x = r*st*cp\n y = r*st*sp\n z = r*ct\n\n if (len(Spherical[0,:])==3):\n Cartesian = np.column_stack((x,y,z))\n return Cartesian\n else:\n # vr,vtheta,vphi -> vx,vy,vz\n vr = Spherical[:,3]\n vt = Spherical[:,4]\n vp = Spherical[:,5]\n vx = vr*st*cp - vt*ct*cp - vp*sp\n vy = vr*st*sp + vt*ct*sp + vp*cp\n vz = vr*ct - vt*st\n Cartesian= np.column_stack((x,y,z,vx,vy,vz))\n return Cartesian", "def spherical_to_cartesian(self, r, phi, theta):\n x = r*cos(phi)*sin(theta)\n y = r*sin(phi)*sin(theta)\n z = r*cos(theta)\n \n return Vector(float(x), float(y), float(z))", "def cartesian_to_spherical(self, v):\n x = Vector.x(v)\n y = Vector.y(v)\n z = Vector.z(v)\n r = Vector.length(v)\n phi = atan2(y, x)\n theta = acos(z / r)\n \n return [r, phi, theta]", "def GalacticToCartesian(Galactic,SolarPosition): \n \n # l,b,s->x,y,z\n cl = np.cos(Galactic[:,0])\n sl = np.sin(Galactic[:,0])\n cb = np.cos(Galactic[:,1])\n sb = np.sin(Galactic[:,1])\n x = SolarPosition[0]-Galactic[:,2]*cb*cl\n y = Galactic[:,2]*cb*sl\n z = Galactic[:,2]*sb+SolarPosition[1]\n\n if(len(Galactic[0,:])==3):\n Cartesian = np.column_stack((x,y,z))\n else:\n # vlos,mu_lcos(b),mu_b -> vx,vy,vz\n vl = pm2vel*Galactic[:,2]*Galactic[:,4]\n vb = pm2vel*Galactic[:,2]*Galactic[:,5]\n tmp2 = cb*Galactic[:,3]-sb*vb\n vx = cl*tmp2-sl*vl+SolarPosition[2]\n vy = sl*tmp2+cl*vl+SolarPosition[3]\n vz = sb*Galactic[:,3]+cb*vb+SolarPosition[4]\n Cartesian = np.column_stack((x,y,z,-vx,vy,vz))\n \n return Cartesian", "def cartesian2spherical(v):\n theta = np.arcsin(v[2]) \n phi = np.arctan2(v[1], v[0])\n \n return [theta, phi]", "def _position_spherical2cartesian(pos):\n \n r=pos[:,0]\n theta=pos[:,1]\n phi=pos[:,2]\n\n if any(theta>np.pi) or any(theta<0): #sanity check. not necessary for phi.\n raise ValueError, \"Theta beyond [0,pi]. Exiting.\"\n\n\n x=r*np.sin(theta)*np.cos(phi)\n y=r*np.sin(theta)*np.sin(phi)\n z=r*np.cos(theta)\n\n return np.dstack((x,y,z))[0]", "def CartesianToSpherical(Cartesian):\n\n # x,y,z -> r,theta,phi\n x = Cartesian[:,0]\n y = Cartesian[:,1]\n z = Cartesian[:,2]\n r = np.sqrt(x*x + y*y + z*z)\n projR = np.sqrt(x*x + y*y)\n theta = np.arccos(z/r)\n phi = np.arctan2(y,x)\n theta[theta<0.] +=2.*np.pi\n \n if (len(Cartesian[0,:])==3):\n Spherical = np.column_stack((r,theta,phi))\n return Spherical\n else:\n # vx,vy,vz -> vr,vtheta,vphi\n vx = Cartesian[:,3]\n vy = Cartesian[:,4]\n vz = Cartesian[:,5]\n vr = (x*vx + y*vy + z*vz)/r\n vt = (z*vr - r*vz)/projR\n vp = r*np.sin(theta)*(vy*x-y*vx)/(projR*projR) \n Spherical = np.column_stack((r,theta,phi,vr,vt,vp))\n return Spherical", "def spherical2cartesian(spherical):\n spherical = np.array(spherical).squeeze()\n distance, azimuth, elevation = spherical\n x = distance * np.sin(azimuth) * np.cos(elevation)\n y = distance * np.sin(azimuth) * np.sin(elevation)\n z = distance * np.cos(azimuth)\n return np.array([x, y, z])", "def _position_cartesian2cylindrical(pos):\n\n \n #save cartesian position of each particle\n x=pos[:,0]\n y=pos[:,1]\n z=pos[:,2]\n\n rho= np.sqrt(x**2+y**2)\n theta=np.arctan2(y,x)\n\n\n return np.dstack((rho,theta,z))[0]", "def spherical2cartesian(sphere):\n cart = np.zeros(sphere.shape, dtype=np.float64)\n sine_phi = np.sin(sphere[:, 2])\n\n cart[:, 0] = sphere[:, 0] * np.cos(sphere[:, 1]) * sine_phi\n cart[:, 1] = sphere[:, 0] * np.sin(sphere[:, 1]) * sine_phi\n cart[:, 2] = sphere[:, 0] * np.cos(sphere[:, 2])\n return cart", "def cartesian2spherical(cartesian):\n cartesian = np.array(cartesian).squeeze()\n x, y, z = cartesian\n distance = np.linalg.norm(cartesian)\n azimuth = np.arccos(z / distance)\n elevation = np.arctan2(y, x) # Use arctan2 instead of arctan to get proper sign!\n return np.array([distance, azimuth, elevation])", "def PolarToCartesian(Polar):\n\t \n # R,phi,z -> x,y,z\n cp = np.cos(Polar[:,1])\n sp = np.sin(Polar[:,1])\n x = Polar[:,0] * cp\n y = Polar[:,0] * sp\n z = Polar[:,2]\n\n if (len(Polar[0,:])==3):\n Cartesian = np.column_stack((x,y,z))\n else:\n # vR,vphi,vz -> vx,vy,vz\n vx = Polar[:,3]*cp-Polar[:,4]*sp\n vy = Polar[:,4]*cp+Polar[:,3]*sp\n vz = Polar[:,5]\n Cartesian = np.column_stack((x,y,z,vx,vy,vz))\n \n return Cartesian", "def CartesianToPolar(Cartesian):\n \n # x,y,z -> R,phi,z\n R = np.sqrt(Cartesian[:,0]*Cartesian[:,0]+Cartesian[:,1]*Cartesian[:,1])\n phi = np.arctan2(Cartesian[:,1],Cartesian[:,0])\n z = Cartesian[:,2]\n phi[phi<0.] += 2.*np.pi\n if (len(Cartesian[0,:])==3):\n Polar = np.column_stack((R,phi,z))\n else:\n # vx,vy,vz -> vR,vphi,vz\n cp = np.cos(phi)\n sp = np.sin(phi)\n vR = Cartesian[:,3]*cp+Cartesian[:,4]*sp\n vphi = Cartesian[:,4]*cp-Cartesian[:,3]*sp\n vz = Cartesian[:,5]\n Polar = np.column_stack((R,phi,z,vR,vphi,vz))\n\t\t\n return Polar", "def to_cartesian(self):\n\n if self.cartesian is None:\n theta = math.radians(self.lat)\n phi = math.radians(self.long)\n x = R_EARTH * math.cos(theta) * math.cos(phi)\n y = R_EARTH * math.cos(theta) * math.sin(phi)\n z = R_EARTH * math.sin(theta)\n self.cartesian = CartesianPoint(x, y, z)\n return self.cartesian", "def cartesian2polar(cartesian):\n cartesian = np.array(cartesian).squeeze()\n x, y = cartesian\n r = np.linalg.norm([x, y])\n azimuth = np.arctan2(y, x)\n return np.array([r, azimuth])", "def polar2cartesian(polar):\n polar = np.array(polar).squeeze()\n r, azimuth = polar\n x = r * np.cos(azimuth)\n y = r * np.sin(azimuth)\n return np.array([x, y])", "def _spherical_to_cartesian(ra, dec):\n rar = np.radians(ra)\n decr = np.radians(dec)\n\n x = np.cos(rar) * np.cos(decr)\n y = np.sin(rar) * np.cos(decr)\n z = np.sin(decr)\n \n return x, y, z", "def spherical_to_cartesian(grid, vec=None):\n grid = np.atleast_2d(grid)\n\n if vec is None:\n return np.hstack([\n mkvc(grid[:, 0] * np.sin(grid[:, 2]) * np.cos(grid[:, 1]), 2),\n mkvc(grid[:, 0] * np.sin(grid[:, 2]) * np.sin(grid[:, 1]), 2),\n mkvc(grid[:, 0] * np.cos(grid[:, 2]), 2)\n ])\n\n if len(vec.shape) == 1 or vec.shape[1] == 1:\n vec = vec.reshape(grid.shape, order='F')\n\n x = (\n vec[:, 0] * np.sin(grid[:, 2]) * np.cos(grid[:, 1]) +\n vec[:, 2] * np.cos(grid[:, 2]) * np.cos(grid[:, 1]) -\n vec[:, 1] * np.sin(grid[:, 1])\n )\n y = (\n vec[:, 0] * np.sin(grid[:, 2]) * np.sin(grid[:, 1]) +\n vec[:, 2] * np.cos(grid[:, 2]) * np.sin(grid[:, 1]) -\n vec[:, 1] * np.cos(grid[:, 1])\n )\n z = (\n vec[:, 0] * np.cos(grid[:, 2]) -\n vec[:, 2] * np.sin(grid[:, 2])\n )\n\n newvec = [x, y, z]\n\n return np.vstack(newvec).T", "def _position_cylindrical2cartesian(pos):\n \n rho=pos[:,0]\n theta=pos[:,1]\n z=pos[:,2]\n\n x=rho*np.cos(theta)\n y=rho*np.sin(theta)\n z=z\n\n return np.dstack((x,y,z))[0]", "def sphericalToCartesian(altitude=0, azimuth=0, radius=0):\n\n rcos_theta = radius * np.cos(altitude)\n x = rcos_theta * np.cos(azimuth)\n y = rcos_theta * np.sin(azimuth)\n z = radius * np.sin(altitude)\n return x, y, z", "def spherical_2_cartesian(grid, vec=None):\n return spherical_to_cartesian(grid, vec)", "def to_cartesian(self):\n w = 1.73205 # sqrt(3)\n h = 2\n dx = 0.5 * w if self.y % 2 == 1 else 0\n x = 0.5 * w + self.x * w + dx\n y = 0.5 * h + 0.75 * self.y * h\n return (x, y)", "def cylindrical2cartesian(cylinder):\n cart = np.zeros(cylinder.shape)\n cart[:, 0] = cylinder[:, 0] * np.cos(cylinder[:, 1])\n cart[:, 1] = cylinder[:, 0] * np.sin(cylinder[:, 1])\n cart[:, 2] = cylinder[:, 2]\n return cart" ]
[ "0.75717354", "0.749493", "0.73145956", "0.7314483", "0.72647214", "0.7194624", "0.7028506", "0.6992018", "0.6795275", "0.67766845", "0.6697479", "0.6671886", "0.6653969", "0.6644937", "0.65747213", "0.6547389", "0.64662033", "0.6464509", "0.6451586", "0.64358544", "0.64145577", "0.6383352", "0.6353237", "0.6343334", "0.63371915", "0.63320756", "0.63124704", "0.63086224", "0.6297687", "0.62810266" ]
0.752729
1
Function for returning values in SI units. Returns ~numpy.ndarray Array containing values in SI units (m, rad, rad, m/s, rad/s, rad/s)
def si_values(self): element_list = [ self.r.to(u.m), self.theta.to(u.rad), self.phi.to(u.rad), self.v_r.to(u.m / u.s), self.v_t.to(u.rad / u.s), self.v_p.to(u.rad / u.s), ] return np.array([e.value for e in element_list], dtype=float)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def unit_array(self):\n return self._data_array.values * units(self._units)", "def si_values(self):\n element_list = [\n self.x.to(u.m),\n self.y.to(u.m),\n self.z.to(u.m),\n self.v_x.to(u.m / u.s),\n self.v_y.to(u.m / u.s),\n self.v_z.to(u.m / u.s),\n ]\n return np.array([e.value for e in element_list], dtype=float)", "def kts_to_si(vals):\n return vals * meters_per_nautical_mile / 3600.0", "def mph_to_si(vals):\n return vals * meters_per_mile / 3600.0", "def get_converted_si_unit():\n units = request.args.get('units')\n response = ConvertUnit(units).convert()\n return jsonify(response)", "def convertUnits(self, varname, arr):\n if varname == \"SPDQ\" or varname == \"PHQ\":\n return arr*2.5e6/1000.\n return arr", "def convertToSI(*args):\n return _libsbml.Unit_convertToSI(*args)", "def kmh_to_si(vals):\n return vals * 1000.0 / 3600.0", "def Unit_convertToSI(*args):\n return _libsbml.Unit_convertToSI(*args)", "def si_2_kts(vals):\n return vals * 3600.0 / meters_per_nautical_mile", "def unit_array(self, values):\n self._data_array.values = values\n self._units = self._data_array.attrs['units'] = str(values.units)", "def convertToSI(*args):\n return _libsbml.UnitDefinition_convertToSI(*args)", "def eps_as_simarray(f, eps):\n if isinstance(eps, str):\n eps = units.Unit(eps)\n if not isinstance(eps, units.UnitBase):\n eps = eps * f['pos'].units\n logger.info(f\"Considering eps = {eps}\")\n eps_value = eps._scale\n eps_unit = eps/eps_value\n eps = SimArray(np.ones(len(f), dtype=f['mass'].dtype) * eps_value, eps_unit)\n return eps", "def unit_of_measurement(self) -> Any:\n return TEMP_CELSIUS", "def galactic_to_MS():\n return MS_MATRIX", "def spectral():\n c = _si.c.value\n h = _si.h.value\n hc = h * c\n two_pi = 2.0 * np.pi\n inv_m_spec = si.m**-1\n inv_m_ang = si.radian / si.m\n\n return Equivalency(\n [\n (si.m, si.Hz, lambda x: c / x),\n (si.m, si.J, lambda x: hc / x),\n (si.Hz, si.J, lambda x: h * x, lambda x: x / h),\n (si.m, inv_m_spec, lambda x: 1.0 / x),\n (si.Hz, inv_m_spec, lambda x: x / c, lambda x: c * x),\n (si.J, inv_m_spec, lambda x: x / hc, lambda x: hc * x),\n (inv_m_spec, inv_m_ang, lambda x: x * two_pi, lambda x: x / two_pi),\n (si.m, inv_m_ang, lambda x: two_pi / x),\n (si.Hz, inv_m_ang, lambda x: two_pi * x / c, lambda x: c * x / two_pi),\n (si.J, inv_m_ang, lambda x: x * two_pi / hc, lambda x: hc * x / two_pi),\n ],\n \"spectral\",\n )", "def extractSI(s):\n\n # If this is representing a range, just return it as is.\n if \"[\" in s:\n return (s, None)\n\n types = {\"T\": \"Time\", \"O\": \"Rate\", \"A\": \"Rate\", \"s\": \"Seconds\", \"%\": \"Percentage\"}\n du = s.split()\n # Preserve integers as such, so that columns like \"Threads\" generate an X axis \"1 2 3\",\n # rather than \"1.0 2.0 3.0\"\n num = float(du[0]) if \".\" in du[0] else int(du[0])\n units = du[1] if len(du) == 2 else \" \"\n if s[-1] == \" \":\n units = units + \" \"\n\n # http://physics.nist.gov/cuu/Units/prefixes.html\n factor = {\n \"Y\": 1e24,\n \"Z\": 1e21,\n \"E\": 1e18,\n \"P\": 1e15,\n \"T\": 1e12,\n \"G\": 1e9,\n \"M\": 1e6,\n \"k\": 1e3,\n \" \": 1,\n \"m\": -1e3, # Yes, I do mean that, see below for the explanation.\n \"u\": -1e6,\n \"n\": -1e9,\n \"p\": -1e12,\n \"f\": -1e15,\n \"a\": -1e18,\n \"z\": -1e21,\n \"y\": -1e24,\n }[units[0] if len(units) == 2 else \" \"]\n # print (\"units = '\" + units + \"'\" + \" factor=\" + str(factor))\n\n # Minor trickery here is an attempt to preserve accuracy by using a single divide,\n # rather than multiplying by 1/x, which introduces two roundings since 1/10 is not representable\n # perfectly in IEEE floating point.\n # (Not that this really matters, other than for cleanliness, since we're likely reading numbers with\n # at most five decimal digits of precision).\n return (\n num * factor if factor > 0 else num / -factor,\n types.get(units[-1], \"Count\"),\n )", "def SI(As):\n return [A for A in As if A.is_SI()]", "def psi(x):\n return np.sin(x)", "def unit_of_measurement(self):\n return TEMP_CELSIUS", "def unit_of_measurement(self):\n return TEMP_CELSIUS", "def unit_of_measurement(self):\n return TEMP_CELSIUS", "def getarray(str_array):\n if ':' in str_array:\n s=str_array.split(':')\n if 'm' in s[0]:\n s2=[cu.convlen(x) for x in s]\n else: \n s2=[float(x) for x in s]\n ns=round((s2[2]-s2[0])/s2[1])+1\n s3=np.linspace(s2[0],s2[2],ns)\n else:\n s3=cu.convlen(str_array)\n \n return s3", "def convert_units(self, units):\n self.unit_array = self.unit_array.to(units)", "def _get_values(self) -> ty.List[float]:\r\n ...", "def sin(fs: float, Ns: int, Ss: int) -> np.ndarray:\n t = np.arange(Ns)\n omega = 2 * np.pi * fs / Ss\n return np.sin(omega * t)", "def _compute_e_S1S2(self, x):\r\n psi = x[PSI_IDX]\r\n return np.array([-np.sin(psi), np.cos(psi)])", "def fluid_func(self):\n residual = []\n for fluid, x in self.inl[0].fluid.val.items():\n res = x * self.inl[0].m.val_SI\n for o in self.outl:\n res -= o.fluid.val[fluid] * o.m.val_SI\n residual += [res]\n return residual", "def get(self) -> list[float]:", "def meter_to_cart(self, meters):\n # 1 AU = 149.6e9 m\n pixels = meters*self.scale/149.6e9\n return pixels" ]
[ "0.6597943", "0.649187", "0.63240147", "0.6257522", "0.6255147", "0.6213908", "0.6213201", "0.6091031", "0.60509723", "0.58182454", "0.55522114", "0.55494267", "0.55082214", "0.54812056", "0.5480648", "0.54716265", "0.54494315", "0.5425861", "0.53436226", "0.5266943", "0.5266943", "0.5266943", "0.52616924", "0.5261035", "0.52532506", "0.52363974", "0.52344924", "0.52330065", "0.5227268", "0.5208177" ]
0.6856912
0
Function to convert velocity to spherical coordinates Returns ~einsteinpy.coordinates.velocity.SphericalDifferential Spherical representation of the velocity in BoyerLindquist Coordinates.
def spherical_differential(self): r, theta, phi, v_r, v_t, v_p = self.convert_spherical() return SphericalDifferential( r * u.m, theta * u.rad, phi * u.rad, v_r * u.m / u.s, v_t * u.rad / u.s, v_p * u.rad / u.s, )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _velocity_cylindrical2spherical(pos,vel):\n \n pos_cart=_position_cylindrical2cartesian(pos)\n vel_cart=_velocity_cylindrical2cartesian(pos,vel)\n vel_sph=_velocity_cartesian2spherical(pos_cart,vel_cart)\n\n return vel_sph", "def _velocity_cartesian2spherical(pos,vel):\n\n \n #save cartesian position of each particle\n x=pos[:,0]\n y=pos[:,1]\n z=pos[:,2]\n\n #save cartesian velocities\n vx=vel[:,0]\n vy=vel[:,1]\n vz=vel[:,2]\n\n #convert to spherical coordinates\n pos_sph=_position_cartesian2spherical(pos) #spherical coordinates\n r=pos_sph[:,0]\n theta=pos_sph[:,1]\n phi=pos_sph[:,2]\n\n\n #compute spherical velocities\n vr = vx*np.sin(theta)*np.cos(phi) + vy*np.sin(theta)*np.sin(phi) + vz*np.cos(theta)\n vtheta = vx*np.cos(theta)*np.cos(phi) + vy*np.cos(theta)*np.sin(phi) - vz*np.sin(theta)\n vphi = -vx*np.sin(phi) + vy*np.cos(phi)\n\n if np.sum(r==0)!=0: #if some points are at the origin\n warnings.warn(\"Spherical velocity is not defined at origin. Returning 0.\")\n vr[r==0]=0\n vtheta[r==0]=0\n vphi[r==0]=0\n\n\n return np.dstack((vr,vtheta,vphi))[0]", "def unit_to_sphere(v):\n return (math.acos(v[2]), math.atan2(v[1], v[0]))", "def sphere_to_unit(v):\n sin_theta = math.sin(v[0])\n cos_theta = math.cos(v[0])\n return (sin_theta * math.cos(v[1]),\n sin_theta * math.sin(v[1]),\n cos_theta)", "def cartesian_to_spherical(self, v):\n x = Vector.x(v)\n y = Vector.y(v)\n z = Vector.z(v)\n r = Vector.length(v)\n phi = atan2(y, x)\n theta = acos(z / r)\n \n return [r, phi, theta]", "def cartesian2spherical(v):\n theta = np.arcsin(v[2]) \n phi = np.arctan2(v[1], v[0])\n \n return [theta, phi]", "def _velocity_spherical2cylindrical(pos,vel):\n \n pos_cart=_position_spherical2cartesian(pos)\n vel_cart=_velocity_spherical2cartesian(pos,vel)\n vel_cyl=_velocity_cartesian2cylindrical(pos_cart,vel_cart)\n\n return vel_cyl", "def spherical2cartesian(v):\n \n x = np.cos(v[0]) * np.cos(v[1]) \n y = np.cos(v[0]) * np.sin(v[1]) \n z = np.sin(v[0]) \n \n return [x,y,z]", "def sphrad(vol):\n return (3.*vol/(4.*np.pi))**(1./3.)", "def calcul_v_sphere(r):\n volume = 4/3 * math.pi * (r ** 3)\n return volume", "def _velocity_spherical2cartesian(pos,vel):\n \n #save cartesian position of each particle\n r=pos[:,0]\n theta=pos[:,1]\n phi=pos[:,2]\n\n\n #save cyindrical velocities\n vr=vel[:,0]\n vtheta=vel[:,1]\n vphi=vel[:,2]\n\n\n #compute cartesian velocities\n vx = vr*np.sin(theta)*np.cos(phi) + vtheta*np.cos(theta)*np.cos(phi) - vphi*np.sin(phi)\n vy = vr*np.sin(theta)*np.sin(phi) + vtheta*np.cos(theta)*np.sin(phi) + vphi*np.cos(phi)\n vz = vr*np.cos(theta) - vtheta*np.sin(theta)\n\n return np.dstack((vx,vy,vz))[0]", "def sphvol(r):\n return (4./3.)*np.pi*(r**3.)", "def uv_to_spddir(u, v):\n if isinstance(u, list) or isinstance(v, list):\n u = np.array(u)\n v = np.array(v)\n\n wdir = (270 - np.rad2deg(np.arctan2(v, u))) % 360\n wspd = np.sqrt(u * u + v * v)\n\n return wspd.round(3), wdir.round(3)", "def spherical_function(j, x, y, z):\n theta = np.arccos(z)\n phi = np.arctan2(y, x)\n return angular_function(j, theta, phi)", "def spherical_to_cartesian(self, r, phi, theta):\n x = r*cos(phi)*sin(theta)\n y = r*sin(phi)*sin(theta)\n z = r*cos(theta)\n \n return Vector(float(x), float(y), float(z))", "def cartesianToSpherical(x=0, y=0, z=0):\n\n hxy = np.hypot(x, y)\n radius = np.hypot(hxy, z)\n altitude = np.arctan2(z, hxy)\n azimuth = np.arctan2(y, x)\n return altitude, azimuth, radius", "def carla_angular_velocity_to_numpy_vector(carla_angular_velocity):\n return numpy.array([math.radians(carla_angular_velocity.x), \n -math.radians(carla_angular_velocity.y), \n -math.radians(carla_angular_velocity.z)])", "def CartesianToSpherical(Cartesian):\n\n # x,y,z -> r,theta,phi\n x = Cartesian[:,0]\n y = Cartesian[:,1]\n z = Cartesian[:,2]\n r = np.sqrt(x*x + y*y + z*z)\n projR = np.sqrt(x*x + y*y)\n theta = np.arccos(z/r)\n phi = np.arctan2(y,x)\n theta[theta<0.] +=2.*np.pi\n \n if (len(Cartesian[0,:])==3):\n Spherical = np.column_stack((r,theta,phi))\n return Spherical\n else:\n # vx,vy,vz -> vr,vtheta,vphi\n vx = Cartesian[:,3]\n vy = Cartesian[:,4]\n vz = Cartesian[:,5]\n vr = (x*vx + y*vy + z*vz)/r\n vt = (z*vr - r*vz)/projR\n vp = r*np.sin(theta)*(vy*x-y*vx)/(projR*projR) \n Spherical = np.column_stack((r,theta,phi,vr,vt,vp))\n return Spherical", "def cylindrical2spherical(cyl):\n sph = np.zeros(cyl.shape)\n sph[:, 0] = np.sqrt(cyl[:, 0]**2 + cyl[:, 2]**2)\n sph[:, 1] = cyl[:, 1]\n sph[:, 2] = np.arctan2(cyl[:, 0], cyl[:, 2])\n return sph", "def spherical_parameters(self):\n phi_mu_list = []\n theta_mu_list = []\n \n for mu in self.mu_list:\n r, phi, theta = T_cartesian_to_spherical(x=mu[0], y=mu[1], z=mu[2])\n phi_mu_list.append(phi)\n theta_mu_list.append(theta)\n \n return phi_mu_list, theta_mu_list", "def _position_cylindrical2spherical(pos):\n\n rho=pos[:,0]\n theta_cylindrical=pos[:,1]\n z=pos[:,2]\n\n r=np.sqrt(rho**2+z**2)\n theta_spherical=np.arctan2(rho,z)\n phi=theta_cylindrical\n\n return np.dstack((r,theta_spherical,phi))[0]", "def spddir_to_uv(wspd, wdir):\n if isinstance(wspd, list) or isinstance(wdir, list):\n wspd = np.array(wspd, dtype=float)\n wdir = np.array(wdir, dtype=float)\n\n rad = 4.0 * np.arctan(1) / 180.0\n u = -wspd * np.sin(rad * wdir)\n v = -wspd * np.cos(rad * wdir)\n\n # If the speed is zero, then u and v should be set to zero (not NaN)\n if hasattr(u, \"__len__\"):\n u[np.where(wspd == 0)] = 0\n v[np.where(wspd == 0)] = 0\n elif wspd == 0:\n u = float(0)\n v = float(0)\n\n return np.round(u, 3), np.round(v, 3)", "def _position_spherical2cylindrical(pos):\n \n\n r=pos[:,0]\n theta_spherical=pos[:,1]\n phi_spherical=pos[:,2]\n\n if any(theta_spherical>np.pi) or any(theta_spherical<0): #sanity check. not necessary for phi.\n raise ValueError, \"Theta beyond [0,pi]. Exiting.\"\n\n rho=r*np.sin(theta_spherical)\n theta_cylindrical=phi_spherical\n z=r*np.cos(theta_spherical)\n\n return np.dstack((rho,theta_cylindrical,z))[0]", "def E2V(E):\n# for energy in mev returns velocity in m/s\n return sqrt(E/5.227e-6)", "def spherical_distance(coord_pair, radius=MEAN_EARTH_RADIUS_M):\n\n return spherical_distance_haversine(np.array([coord_pair]), radius)[0]", "def bv_to_radius(b_minus_v):\n # Boyajian 2012\n X = b_minus_v\n a0 = 0.3830\n a1 = 0.9907\n a2 = -0.6038\n Y = 0\n # Ignore metallicity\n a3 = 0\n a4 = 0\n a5 = 0\n return (a0 + a1 * X + a2 * X ** 2 + a3 * X * Y +\n a4 * Y + a5 * Y ** 2) * R_sun", "def to_spherical(self):\n return cartesian_to_spherical(self.x, self.y, self.z)", "def SphericalToCartesian(Spherical):\n\n # r,theta,phi -> x,y,z\n r = Spherical[:,0]\n st = np.sin(Spherical[:,1])\n sp = np.sin(Spherical[:,2])\n ct = np.cos(Spherical[:,1])\n cp = np.cos(Spherical[:,2])\n x = r*st*cp\n y = r*st*sp\n z = r*ct\n\n if (len(Spherical[0,:])==3):\n Cartesian = np.column_stack((x,y,z))\n return Cartesian\n else:\n # vr,vtheta,vphi -> vx,vy,vz\n vr = Spherical[:,3]\n vt = Spherical[:,4]\n vp = Spherical[:,5]\n vx = vr*st*cp - vt*ct*cp - vp*sp\n vy = vr*st*sp + vt*ct*sp + vp*cp\n vz = vr*ct - vt*st\n Cartesian= np.column_stack((x,y,z,vx,vy,vz))\n return Cartesian", "def odf(self, sphere):\n self.gqi_vector = self.model.cache_get('gqi_vector', key=sphere)\n if self.gqi_vector is None:\n if self.model.method == 'gqi2':\n H = squared_radial_component\n # print self.gqi_vector.shape\n self.gqi_vector = np.real(H(np.dot(\n self.model.b_vector, sphere.vertices.T) *\n self.model.Lambda))\n if self.model.method == 'standard':\n self.gqi_vector = np.real(np.sinc(np.dot(\n self.model.b_vector, sphere.vertices.T) *\n self.model.Lambda / np.pi))\n self.model.cache_set('gqi_vector', sphere, self.gqi_vector)\n\n return np.dot(self.data, self.gqi_vector)", "def spherical_gradient(l, r, sph_func):\n assert l%1 == 0 and l >= 0, \"l must be a natural number\"\n return np.array((sph_func(l, r, derivative=True), 0, 0))" ]
[ "0.672321", "0.6639501", "0.635419", "0.6353", "0.6298572", "0.61776495", "0.6175607", "0.61004883", "0.601829", "0.5944876", "0.58419675", "0.5830476", "0.5705863", "0.5697523", "0.56473684", "0.5615951", "0.5607414", "0.55717176", "0.55411494", "0.5518485", "0.54982543", "0.54708976", "0.5470521", "0.54353696", "0.5396941", "0.53950286", "0.5391145", "0.5388428", "0.5373035", "0.53496855" ]
0.71730846
1
remove background from image
def remove_background(img): mask = np.zeros(img.shape[:2], np.uint8) bgdModel = np.zeros((1, 65), np.float64) fgdModel = np.zeros((1, 65), np.float64) rect = (50, 50, 450, 290) cv.grabCut(img, mask, rect, bgdModel, fgdModel, 5, cv.GC_INIT_WITH_RECT) mask2 = np.where((mask == 2)|(mask == 0), 0, 1).astype('uint8') img = img*mask2[:, :, np.newaxis] return img
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def remove_background(img):\n\n img = img.astype(np.uint8)\n # Binarize the image using OTSU's algorithm. This is used to find the center\n # of mass of the image, and find the threshold to remove background noise\n threshold, _ = cv2.threshold(img, 0, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU)\n # Remove noise - anything higher than the threshold. Note that the image is still grayscale\n img[img > threshold] = 255\n\n return img", "def remove_background(img):\n \n img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)\n\n\n img = img.astype(np.uint8)\n # Binarize the image using OTSU's algorithm. This is used to find the center\n # of mass of the image, and find the threshold to remove background noise\n threshold, _ = cv2.threshold(img, 0, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU)\n \n # Remove noise - anything higher than the threshold. Note that the image is still grayscale\n img[img > threshold] = 255\n\n return img", "def __delete_background(self):\n if self.tag is not None:\n self.c.delete(self.tag)\n self.tag = None\n if self._im:\n self._im.im = None # silly but... :)\n self._im = None\n if self._bgcolor:\n # was setted, so restore old\n self.c[\"bg\"] = self._oldbgcolor", "def remove_background(self, frame):\n logging.debug(\"Performing background subtraction\")\n\n #cv.CvtColor(frame, self.Igray, cv.CV_BGR2GRAY)\n cv.Sub(frame, self.bg, self.Imask)\n\n return self.Imask", "def remove_background1(img):\n #img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)\n img = img.astype(np.uint8)\n # Binarize the image using OTSU's algorithm. This is used to find the center\n # of mass of the image, and find the threshold to remove background noise\n threshold, _ = cv2.threshold(img, 0, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU)\n # Remove noise - anything higher than the threshold. Note that the image is still grayscale\n img[img > threshold] = 255\n\n return img", "def OnEraseBackground(self, event):\n\n dc = wx.ClientDC(self)\n bmp = wx.Bitmap(\"bg.png\", wx.BITMAP_TYPE_PNG)\n dc.DrawBitmap(bmp, 0, 0, 0)", "def OnEraseBackground(self, event):\r\n\r\n # Can we actually do something here (or in OnPaint()) To Handle\r\n # background images that are stretchable or always centered?\r\n # I tried but I get enormous flickering...\r\n \r\n if not self._backgroundImage:\r\n event.Skip()\r\n return\r\n\r\n if self._imageStretchStyle == _StyleTile:\r\n dc = event.GetDC()\r\n\r\n if not dc:\r\n dc = wx.ClientDC(self)\r\n rect = self.GetUpdateRegion().GetBox()\r\n dc.SetClippingRect(rect)\r\n\r\n self.TileBackground(dc)", "def remove_background(frame, bgModel):\n \n global learningRate\n fgmask = bgModel.apply(frame, learningRate=learningRate)\n kernel = np.ones((3, 3), np.uint8)\n fgmask = cv2.erode(fgmask, kernel, iterations=1)\n res = cv2.bitwise_and(frame, frame, mask=fgmask)\n\n return res", "def strip(self):\n result = library.MagickStripImage(self.wand)\n if not result:\n self.raise_exception()", "def OnEraseBackground(self, event):\r\n \r\n pass", "def OnEraseBackground(self, event):\r\n \r\n pass", "def OnEraseBackground(self, event):\r\n\r\n pass", "def OnEraseBackground(self, event):\r\n\r\n pass", "def remove_transparency(\n img: Image.Image,\n bg_color: Tuple[int, int, int] = DEFAULT_BG_COLOR) -> Image.Image:\n if img.mode in ('RGBA', 'LA') or (img.mode == 'P' and 'transparency' in img.info):\n orig_image = img.convert('RGBA')\n background = Image.new('RGBA', orig_image.size, bg_color)\n img = Image.alpha_composite(background, orig_image)\n return img.convert(\"RGB\")\n else:\n return img", "def clear(self):\n self.img = PIL.Image.new(self.img.mode, self.img.size, self.background)\n self.drawer = aggdraw.Draw(self.img)", "def del_alpha(img): \n img = img[:,:,0:3].copy()\n return img", "def clearImage(self):\n if self.hasImage():\n self.scene.removeItem(self._image)\n self._image = None", "def OnEraseBackground(self, event):\n pass # Do nothing, to avoid flashing on MSW.", "def remove_transparency(image):\n new_image = Image.new(\"RGBA\", image.size, \"WHITE\")\n new_image.paste(image, (0, 0), image)\n new_image.convert('RGB')\n return new_image", "def unpropagateImage(self, dryrun):\n pass", "def _remove_transparency(self, im, bg_colour=(255, 255, 255)):\n # Only process if image has transparency (http://stackoverflow.com/a/1963146)\n if im.mode in (\"RGBA\", \"LA\") or (im.mode == \"P\" and \"transparency\" in im.info):\n # Need to convert to RGBA if LA format due to a bug in PIL (http://stackoverflow.com/a/1963146)\n alpha = im.convert(\"RGBA\").split()[-1]\n bg = Image.new(\"RGBA\", im.size, bg_colour + (255,))\n bg.paste(im, mask=alpha)\n return bg\n else:\n return im", "def remove_alpha(img: Image.Image, bg_color: tuple[int, int, int] = (255, 255, 255)):\n img_rgb = Image.new(\"RGB\", img.size, bg_color)\n alpha_layer = img.split()[3]\n img_rgb.paste(img, mask=alpha_layer)\n return img_rgb", "def remove_color(image):\n return image[:, :, 0]", "def remove_color(image):\n return image[:, :, 0]", "def _clear(self):\n\n self.image = Image.new(\"RGB\", (self._width, self._height), self._color)", "def OnPanelEraseBg(self, event):\r\n\r\n pass", "def clearImage(self):\n if self.hasImage():\n self.scene.removeItem(self._pixmapHandle)\n self._pixmapHandle = None\n self.zoom=-1\n self.scene.clear()", "def cut_background(img):\n height, width = img.shape[0], img.shape[1]\n tmp_img = img[:, int(width / 2 - height / 2): int(width / 2 + height / 2)]\n return tmp_img", "def remove_colors(images):\n images = images[:, :, :, :, 0]\n return images", "def OnEraseBackground(self, event):\r\n \r\n if wx.Platform == \"__WXMAC__\":\r\n event.Skip()" ]
[ "0.7596473", "0.7517804", "0.7516415", "0.7453885", "0.7361087", "0.72094864", "0.7066235", "0.678405", "0.6615194", "0.66145176", "0.66145176", "0.6563456", "0.6563456", "0.6557612", "0.65447515", "0.6516783", "0.6507971", "0.64584637", "0.644767", "0.6401402", "0.637965", "0.6294601", "0.6290942", "0.6290942", "0.6288847", "0.628215", "0.6258977", "0.6222591", "0.61948913", "0.6167085" ]
0.76232606
0
Gets the full name of sender. Uses a get request.
def get_full_name(sender, token): url = "https://graph.facebook.com/v2.6/" + sender + "?fields=first_name,last_name&access_token=" + token headers = {'content-type': 'application/json'} response = requests.get(url, headers=headers) data = json.loads(response.content) return ''.join(data['first_name'] + ' ' + data['last_name'])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def sender(self):\n key, alt = ('Sender', 'From') if not self.resent else \\\n ('Resent-Sender', 'Resent-From')\n value = self.get(key) or self.get(alt)\n _, addr = getaddresses([value])[0]\n return addr", "def name(self):\n name = self.__telegram_info.message.from_user.name\n return name[0].upper() + name[1::]", "def sender(self) -> str:\n return self._sender", "def get_sender_username(self, mess):\n jid = mess.getFrom()\n typ = mess.getType()\n username = jid.getNode()\n domain = jid.getDomain()\n if typ == \"chat\":\n return \"%s@%s\" %(username, domain)\n else:\n return \"\"", "def get_full_name(self):\n\t\treturn self.email", "def get_name(self) -> str:\n return self.message[42:74].decode().rstrip(\"\\x00\")", "def getpeername(self):\r\n return self.__proxypeername", "def getpeername(self):\r\n return self.__proxypeername", "def getpeername(self):\r\n return self.__proxypeername", "def SenderScreenName(self):\n return self._sender_screen_name", "def _get_sender(self, sender):\n if isinstance(sender, tuple):\n return \"%s <%s>\" % sender, sender[0], sender[1]\n else:\n return sender, sender, sender", "def getpeername(self):\r\n return self.sock.getpeername()", "def _get_username_from_api(self):\n result = self.api_query(action=\"query\", meta=\"userinfo\")\n return result[\"query\"][\"userinfo\"][\"name\"]", "def get_given_name(self):\n return self.given_name", "def showsender(self):\n return self.sender", "def display_name(self) -> str:\n return self.requester.username", "def name(self):\n for message in self:\n if message.type == 'track_name':\n return message.name\n else:\n return u''", "def getpeername(self):\n return self.sock.getpeername()", "def get_username(self, request):\r\n try:\r\n return request.user.username\r\n except AttributeError:\r\n return ''", "def get_name(self, request, *args, **kwargs):\n raise NotImplementedError", "def get_name(self):\n return self.load_name(self.subject)", "def getpeername(self):\r\n return self._fd.getpeername()", "def get_local_name(self) -> str:\n if self.username:\n return self.username\n if self.email:\n return self.email\n return self.identifier", "def getproxypeername(self):\r\n return _orgsocket.getpeername(self)", "def _get_name(self):\n return self.name", "def get_name(self):\n return self.user.username if self.user.username else self.user.email", "def get_name() -> str:", "def get_full_name(self):\n return self.username", "def get_full_name(self):\n return self.username", "def getproxypeername(self):\r\n return _orgsocket.getpeername(self)" ]
[ "0.64837116", "0.6365123", "0.6297198", "0.62794083", "0.62514067", "0.6221852", "0.6220194", "0.6220194", "0.6200113", "0.61839944", "0.617625", "0.60754865", "0.6052838", "0.60519934", "0.60513854", "0.6046378", "0.6003633", "0.59597677", "0.5958653", "0.5941031", "0.5933582", "0.5932757", "0.5909138", "0.5899981", "0.58939356", "0.5879899", "0.58644146", "0.58539397", "0.58539397", "0.58503693" ]
0.64352673
1
if noninteractive is truthy, always return default. intended primarily as a wrapper to preempt attempts to prompt user input.
def catch_interaction( noninteractive: Any, func: Callable, *args, _default: Any = "", **kwargs ): if noninteractive: return _default return func(*args, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def input_with_default(prompt, default):\n response = raw_input(\"%s (Default %s) \"%(prompt, default))\n if not response:\n return default\n return response", "def user_prompt(prompt, default=None):\n prompt = f\"\\n {prompt} [{default}] runs or type an amount: \"\n response = input(prompt)\n if not response and default:\n return default\n else:\n return response", "def default(prompt, default, validator=(lambda x: True), hint=None):\n user_input = input(\"{0} [{1}]\".format(prompt, default))\n while not validator(user_input):\n user_input = input(\"{0} [{1}]\".format(prompt, default))\n return user_input or default", "def prompt(name, default):\n value = raw_input('%s [%s]: ' %(name, default))\n if not value:\n value = default\n return value", "def prompt_user(prompt):\r\n # raw_input returns the empty string for \"enter\"\r\n yes = set(['yes', 'y'])\r\n no = set(['no','n'])\r\n\r\n try:\r\n print(prompt)\r\n choice = raw_input().lower()\r\n # would like to test for exception here, but not sure if we can do that without importing IPython\r\n except:\r\n print('Stdin is not implemented.')\r\n print('You need to set')\r\n print('overide_manual_authorize=True')\r\n print('to proceed with the download. Please set that variable and continue.')\r\n raise\r\n\r\n\r\n if choice in yes:\r\n return True\r\n elif choice in no:\r\n return False\r\n else:\r\n print(\"Your response was a \" + choice)\r\n print(\"Please respond with 'yes', 'y' or 'no', 'n'\")\r\n #return prompt_user()\r", "def noinput():\n env.prompt = False", "def default_input(prompt, default_value):\r\n item = input(prompt + \"[Enter for \" + default_value + \"]: \").lower()\r\n if item == \"\":\r\n item = default_value\r\n return item", "def _ask_prompt(question: str,\n console: io.IO,\n validate: Optional[Callable[[str], None]] = None,\n default: Optional[str] = None) -> str:\n validate = validate or (lambda x: None)\n while True:\n answer = console.ask(question)\n if default and not answer:\n answer = default\n try:\n validate(answer)\n break\n except ValueError as e:\n console.error(e)\n\n return answer", "def prompt_string(prompt=\"Enter a value\",\n default=None):\n _new = None\n while True:\n try:\n _new = str(input(f\"{prompt}? [{str(default)}]: \")) # nosec\n break\n except ValueError:\n print(\"Sorry, I didn't understand that.\")\n continue\n except KeyboardInterrupt:\n break\n return default if _new in [None, ''] else _new", "def prompt(msg, default=NO_DEFAULT, validate=None):\n while True:\n response = input(msg + \" \").strip()\n if not response:\n if default is NO_DEFAULT:\n continue\n return default\n if validate is None or validate(response):\n return response", "def get_yes_no_input(logger, text, default=None):\n if default:\n default = default.strip().lower()\n\n y = \"Y\" if default == \"y\" else \"y\"\n n = \"N\" if default == \"n\" else \"n\"\n\n prompt = f\"{text} [{yellow(y)}/{yellow(n)}]\"\n user_input = \"\"\n\n while not user_input:\n logger(prompt, end=\"\")\n user_input = input(\" \").strip().lower()\n if user_input == \"\" and default:\n user_input = default\n\n return user_input", "def text_input(self, prompt, default=None):\n try:\n user_input = self(prompt)\n if default is not None and user_input == \"\":\n return default\n except InputDisabled:\n if default is not None:\n return default\n raise\n\n return user_input", "def prompt(self):\n # TODO: fix null input\n print('Enter user input: ')\n userinput = input()\n print(f'User chose: {userinput}')\n return userinput", "def get_value(prompt, default=None, hidden=False):\n _prompt = '%s : ' % prompt\n if default:\n _prompt = '%s [%s]: ' % (prompt, default)\n\n if hidden:\n ans = getpass(_prompt)\n else:\n ans = raw_input(_prompt)\n\n # If user hit Enter and there is a default value\n if not ans and default:\n ans = default\n return ans", "def ask(prompt):\n\n return renpy.exports.invoke_in_new_context(renpy.store.layout.yesno_prompt, None, prompt)", "def run_if_interactive(self):\n pass", "def get_input(prompt, default=None, choices=None, option_value=None):\r\n if option_value is not None:\r\n return option_value\r\n \r\n choices = choices or []\r\n while 1:\r\n r = raw_input(prompt+' ').strip()\r\n if not r and default is not None:\r\n return default\r\n if choices:\r\n if r not in choices:\r\n r = None\r\n else:\r\n break\r\n else:\r\n break\r\n return r", "def test_prompt_msg_shows_default(self):\n with mock.patch('__builtin__.raw_input', return_value=\"Andrew\") as mockinput:\n result = self.prompt._prompt(self.response, {\n \"say\": \"First Name\",\n \"ask\": \"first_name\",\n \"default\": \"foobar\"\n })\n\n args, kwargs = mockinput.call_args\n\n self.assertEquals(\"First Name [foobar]? \", args[0])\n self.assertEquals(result['ansible_facts']['first_name'], 'Andrew')", "def prompt_yes_no(question, default):\n again = 'Unknown response.'\n if default.lower() in ('y', 'yes'):\n options = '(Y/n): '\n elif default.lower() in ('n', 'no'):\n options = '(y/N): '\n else:\n raise ValueError('default must be \"y\", \"yes\", \"n\", or \"no\"')\n\n response = input(' '.join((question, options))).lower()\n while response not in ('y', 'yes', 'n', 'no', ''):\n response = input(' '.join((again, question, options))).lower()\n if response == '':\n return default\n return response", "def prompt_with_options(prompt, default=None, options=None):\n\n msg = \"%s [%s]: \" % (prompt, default) if default is not None else \"%s: \" % prompt\n value = None\n while value is None:\n value = raw_input(msg).strip()\n if value:\n if options and value not in options:\n value = None\n elif default is not None:\n value = default\n\n return value", "def NoPrompt(self) -> bool:", "def safe_input(prompt=\"\"):\n\n\ttry:\n\t\tresult = input(prompt)\n\t\treturn result\n\texcept KeyboardInterrupt:\n\t\tsys.exit()\n\texcept:\n\t\treturn \"\"", "def prompt_selection(self,\r\n prompt_text: str,\r\n validate: Union[Callable[[str], Optional[Any]], partial],\r\n default: Any) -> Any:\r\n while True:\r\n try:\r\n if self.__use_standard_console:\r\n user_input = prompt(prompt_text)\r\n else:\r\n user_input = self.__alt_prompt(prompt_text)\r\n except KeyboardInterrupt:\r\n return default\r\n if user_input == '':\r\n return default\r\n user_input = validate(user_input)\r\n if user_input is not None:\r\n break\r\n return user_input", "def promptyn(msg: str, default: Optional[bool] = None) -> bool:\n\n while True:\n yes = \"Y\" if default else \"y\"\n if default or default is None:\n no = \"n\"\n else:\n no = \"N\"\n confirm = prompt(\"%s [%s/%s]\" % (msg, yes, no), \"\").lower()\n if confirm in (\"y\", \"yes\"):\n return True\n elif confirm in (\"n\", \"no\"):\n return False\n elif not confirm and default is not None:\n return default", "def test_prompt_msg_defaults(self):\n with mock.patch('__builtin__.raw_input', return_value=\"\") as mockinput:\n result = self.prompt._prompt(self.response, {\n \"say\": \"First Name\",\n \"ask\": \"first_name\",\n \"default\": \"foobar\"\n })\n\n args, kwargs = mockinput.call_args\n\n self.assertEquals(\"First Name [foobar]? \", args[0])\n self.assertEquals(result['ansible_facts']['first_name'], 'foobar')", "def test_prompt_msg_confirm_blank_default_no(self):\n with mock.patch('__builtin__.raw_input', return_value=\"\") as mockinput:\n result = self.prompt._prompt(self.response, {\n \"say\": \"Continue\",\n \"ask\": \"result\",\n \"confirm\": False\n })\n\n args, kwargs = mockinput.call_args\n\n self.assertEquals(\"Continue [yN]? \", args[0])\n self.assertEquals(result['ansible_facts']['result'], False)", "def safe_input():\n try:\n input(\"Please enter something: \")\n except EOFError:\n return None\n except KeyboardInterrupt:\n return None", "def input_helper(prompt):\n if version_info[0] == 2:\n # python2 input is scary - we want raw_input\n return raw_input(prompt)\n else:\n return input(prompt)", "def raw_input_default_config(q, default=None, obj=None):\n if default is None:\n if callable(q['default']):\n f1 = q['default']\n try:\n default = f1(obj)\n except TypeError:\n pass\n else:\n default = q['default']\n if 'ask' in q and not q['ask']:\n return default\n if 'obfuscate' in q and q['obfuscate']:\n return raw_input_default(q['q'], default=default, obfuscate=True)\n else:\n return raw_input_default(q['q'], default=default, obfuscate=False)", "def yes_no(prompt, default=None):\n if default is None:\n response = input(prompt + ' (y/n): ')\n elif default:\n response = input(prompt + ' ([y]/n): ')\n elif not default:\n response = input(prompt + ' (y/[n]): ')\n else:\n raise KeyError('Default must be True or False')\n if response.lower() == 'y':\n return True\n elif response.lower() == 'n':\n return False\n elif response == '' and default is not None:\n return default\n else:\n print('Please enter \\'y\\' or \\'n\\' as a valid response.')\n return yes_no(prompt, default)" ]
[ "0.6602298", "0.64760804", "0.63981557", "0.63623685", "0.6330591", "0.61910653", "0.614743", "0.6139188", "0.6115366", "0.6073459", "0.6009013", "0.5976778", "0.59015214", "0.58762276", "0.5871548", "0.5866867", "0.584779", "0.58292", "0.5807008", "0.5786783", "0.57751215", "0.57716495", "0.5764525", "0.5759338", "0.5736841", "0.57302976", "0.5716623", "0.56950176", "0.56904215", "0.5671866" ]
0.67750555
0
iterable > function returns function that checks if its single argument contains all (or by changing oper, perhaps any) items
def are_in(items: Collection, oper: Callable = and_) -> Callable: def in_it(container: Collection) -> bool: inclusion = partial(contains, container) return reduce(oper, map(inclusion, items)) return in_it
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def all(iterable):\n for item in iterable:\n if not item:\n return False\n return True", "def every(lst, fn):\n return reduce(lambda acc, elem: acc and fn(elem), lst, True)", "def contains_all(self, *items):\n return all(item in self for item in items)", "def any(iterable):\n for item in iterable:\n if item:\n return True\n return False", "def some(self, func=bool):\n for i in self._:\n if func(i):\n return True\n return False", "def all(self, func=bool):\n return all(map(func, self._))", "def has_items(*items):\n matchers = []\n for item in items:\n matchers.append(wrap_matcher(item))\n return IsSequenceContainingEvery(*matchers)", "def every(predicate: Predicate[_O]) -> Predicate[Iterable]:\n\n def compare(iterable: Iterable, /) -> bool:\n return all(predicate(item) for item in iterable)\n\n return compare", "def any_user(iterable):\n for element in iterable:\n if element:\n return True\n return False", "def containsAll(self, *args):\n pass", "def all_user(iterable):\n for element in iterable:\n if not element:\n return False\n return True", "def all(selectors, subitem): #pylint: disable=redefined-builtin\n for sel in selectors:\n if isinstance(sel, list):\n passed = False\n for subsel in sel:\n if subsel(subitem):\n passed = True\n break\n if not passed:\n return False\n elif not sel(subitem):\n return False\n return True", "def all(x) -> bool:\n pass", "def ANY(*R):\n return lambda l, i: any(r(l, i) for r in R)", "def all(self, predicate):\n return all(predicate(item) for item in self)", "def has_args(iterable, args):\n\n try:\n return all(x in iterable for x in args)\n\n except TypeError:\n return False", "def contains_any(self, *items):\n return any(item in self for item in items)", "def all(seq, pred=None):\n for elem in itertoos.ifilterfalse(pred, seq):\n return False\n return True", "def fn(p, s):\n ss = iter(s)\n return all(ch in ss for ch in p)", "def forall(seq,cond):\n for x in seq:\n if not cond(x): return False\n return True", "def forall(seq,cond):\n for x in seq:\n if not cond(x): return False\n return True", "def _all_equal(arg):\n return arg.count(arg[0]) == len(arg)", "def any(self, *args, **kwargs):\n if len(args):\n func = args[0]\n args = args[1:]\n else:\n func = bool\n for x in self:\n if func(x, *args, **kwargs):\n return self\n return plist()", "def all(self, *args, **kwargs):\n if len(args):\n func = args[0]\n args = args[1:]\n else:\n func = bool\n for x in self:\n if not func(x, *args, **kwargs):\n return plist()\n return self", "def any(self, predicate):\n return any(predicate(item) for item in self)", "def all_fn(*args) -> bool:\n for arg in args:\n if not isinstance(arg, (FunctionType, partial)):\n return False\n\n return True", "def any(self) -> bool:", "def all_in_set(the_set, the_list):\n return True", "def __contains__(self, item): # __iter__ would do this job by itself\n return (item in self.__values)", "def contains(list_, filter_):\n for x in list_:\n if filter_(x):\n return True\n return False" ]
[ "0.7025728", "0.68925816", "0.6799579", "0.6778553", "0.67687476", "0.67141616", "0.6710857", "0.6701432", "0.6675492", "0.662684", "0.6607761", "0.655131", "0.6546161", "0.6533346", "0.65054846", "0.64836633", "0.64503634", "0.6401916", "0.6399416", "0.6397432", "0.6397432", "0.6335617", "0.62752014", "0.6247938", "0.61874795", "0.6179602", "0.61554575", "0.613054", "0.6073723", "0.6060464" ]
0.7199069
0
'greedy map' function. map `func` across `iterables` using `mapper` and evaluate with `evaluator`. because we splat the variadic `iterables` argument into `mapper`, behavior is roughly equivalent to `itertools.starmap` if you pass more than one iterable. for cases in which you need a terse or configurable way to map and immediately evaluate functions.
def gmap( func: Callable, *iterables: Iterable, mapper: Callable[[Callable, tuple[Iterable]], Iterable] = map, evaluator: Callable[[Iterable], Any] = tuple ): return evaluator(mapper(func, *iterables))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def eager_map(func, iterable):\n for _ in map(func, iterable):\n continue", "def map(iterable, function):\n for x in iterable:\n yield function(x)", "def map(function, iterable):\n\n return [function(x) for x in iterable]", "def map(self, fn, *iterables, **kwargs):\n fn = self._prepare_fn(fn)\n return self._self.map(fn, *iterables, **kwargs)", "def multimap(funcs, iterable):\n\n for f in funcs:\n iterable = map(f, iterable)\n\n return iterable", "def map(iteratee, *seqs):\n return _map(fnc.iteratee(iteratee), *seqs)", "def map_all(f: Callable[[GT], GS], *args, **kwargs) -> Callable[[GT], GS]:\n\n def _map_all(arr: GT) -> GS:\n return f(arr, *args, **kwargs)\n\n return _map_all", "def map_readers(func, *readers):\n\n def reader():\n rs = []\n for r in readers:\n rs.append(r())\n for e in itertools.imap(func, *rs):\n yield e\n\n return reader", "def mapf(f: Callable[[D_], R_], C: Iterable[D_]) -> Iterator[R_]:\n return (f(x) for x in C)", "def map(self, func, *sequences):\n return self.mapper().map(func, *sequences)", "def map_(func, some_list):\n \n result = []\n \n for arg in some_list:\n result.append(func(arg))\n \n return result", "def deepmap(func, *seqs):\n if isinstance(seqs[0], (list, Iterator)):\n return [deepmap(func, *items) for items in zip(*seqs)]\n else:\n return func(*seqs)", "def mapg(f: Callable[[D_], R_], C: Iterable[D_]) -> Iterator[R_]:\n for x in C:\n yield f(x)", "def recursive_map(iterable, callable, recursion_condition=None):\n recursion_condition = recursion_condition or is_iterable\n res = general_new(iterable)\n\n callable_nargs = len(inspect.getargspec(callable).args) - inspect.ismethod(callable)\n if callable_nargs == 0 or callable_nargs > 2:\n raise RuntimeError(\"`callable` should be a one or two argument function\")\n\n for k, v in general_iterator(iterable):\n if recursion_condition(v):\n res = general_append(\n res,\n k,\n recursive_map(\n callable(v),\n callable,\n recursion_condition\n )\n )\n else:\n\n if callable_nargs == 1:\n v = callable(v)\n else:\n v = callable(k, v)\n\n res = general_append(res, k, v)\n\n return res", "def lmap(f: Callable, *xs) -> list:\n return list(map(f, *xs))", "def flatmap(func, *iterable) -> Iterator:\n return map(func, chain(*chain(*iterable)))", "def num_func_mapper(nums, funs):\n pass", "def map_functions(x, functions):\n res = []\n for func in functions:\n res.append(map(func,x))\n return res", "def flatmap2(func, *iterable) -> Iterator:\n return map(func, chain(*chain(*chain(*iterable))))", "def pfmap(func, workers=8):\n return fmap(func)", "def simple_map_2(f, l):\n # Same as above without comprehension:\n mapped_l = []\n for item in l:\n mapped_l.append( f(item) ) # the extra blanks are just for readability\n return mapped_l", "def simple_map(f, l):\n # Again, my first take is a list comprehension.\n return [ f(item) for item in l ]", "def flat_map(fn, collection):\n return chain.from_iterable(map(fn, collection))", "def imap_c(func):\n return functools.partial(imap, func)", "def map(f):\n def _map_xducer(step):\n def _map_step(r=Missing, x=Missing):\n if r is Missing: return step()\n return step(r) if x is Missing else step(r, f(x))\n return _map_step\n return _map_xducer", "def map_fn(fn, elems, dtype=None, parallel_iterations=None, back_prop=True,\n swap_memory=False, infer_shape=True, name=None):\n if not callable(fn):\n raise TypeError(\"fn must be callable.\")\n\n if isinstance(elems, sparse_tensor.SparseTensor):\n raise TypeError(\n \"To perform a map on the values of a sparse tensor use either \"\n \" SparseTensor(input.indices, fn(input.values), input.dense_shape) or \"\n \" SparseTensor(input.indices, map_fn(fn, input.values), \"\n \"input.dense_shape)\")\n\n in_graph_mode = not context.executing_eagerly()\n # Set the default number of parallel_iterations depending on graph/eager mode.\n if in_graph_mode and not parallel_iterations:\n parallel_iterations = 10\n elif not in_graph_mode and not parallel_iterations:\n parallel_iterations = 1\n\n if not in_graph_mode and parallel_iterations > 1:\n logging.log_first_n(logging.WARN, \"Setting parallel_iterations > 1 has no \"\n \"effect when executing eagerly. Consider calling map_fn\"\n \" with tf.contrib.eager.defun to execute fn in \"\n \"parallel.\", 1)\n parallel_iterations = 1\n\n input_is_sequence = nest.is_sequence(elems)\n input_flatten = lambda x: nest.flatten(x) if input_is_sequence else [x]\n def input_pack(x):\n return nest.pack_sequence_as(elems, x) if input_is_sequence else x[0]\n\n if dtype is None:\n output_is_sequence = input_is_sequence\n output_flatten = input_flatten\n output_pack = input_pack\n else:\n output_is_sequence = nest.is_sequence(dtype)\n output_flatten = lambda x: nest.flatten(x) if output_is_sequence else [x]\n def output_pack(x):\n return (nest.pack_sequence_as(dtype, x)\n if output_is_sequence else x[0])\n\n elems_flat = input_flatten(elems)\n\n with ops.name_scope(name, \"map\", elems_flat):\n # TODO(akshayka): Remove the in_graph_mode check once caching devices are\n # supported in Eager\n if in_graph_mode:\n # Any get_variable calls in fn will cache the first call locally\n # and not issue repeated network I/O requests for each iteration.\n varscope = vs.get_variable_scope()\n varscope_caching_device_was_none = False\n if varscope.caching_device is None:\n # TODO(ebrevdo): Change to using colocate_with here and in other\n # methods.\n varscope.set_caching_device(lambda op: op.device)\n varscope_caching_device_was_none = True\n\n elems_flat = [\n ops.convert_to_tensor(elem, name=\"elem\") for elem in elems_flat]\n\n dtype = dtype or input_pack([elem.dtype for elem in elems_flat])\n dtype_flat = output_flatten(dtype)\n\n # Convert elems to tensor array. n may be known statically.\n static_shape = elems_flat[0].shape\n if static_shape.ndims is not None and static_shape.ndims < 1:\n if len(elems_flat) == 1:\n raise ValueError(\"elems must be a 1+ dimensional Tensor, not a scalar\")\n else:\n raise ValueError(\n \"elements in elems must be 1+ dimensional Tensors, not scalars\"\n )\n n = (tensor_shape.dimension_value(static_shape[0])\n or array_ops.shape(elems_flat[0])[0])\n\n # TensorArrays are always flat\n elems_ta = [\n tensor_array_ops.TensorArray(dtype=elem.dtype,\n size=n,\n dynamic_size=False,\n infer_shape=True)\n for elem in elems_flat]\n # Unpack elements\n elems_ta = [\n elem_ta.unstack(elem) for elem_ta, elem in zip(elems_ta, elems_flat)]\n\n i = constant_op.constant(0)\n\n accs_ta = [\n tensor_array_ops.TensorArray(dtype=dt,\n size=n,\n dynamic_size=False,\n infer_shape=infer_shape)\n for dt in dtype_flat]\n\n def compute(i, tas):\n \"\"\"The loop body of map_fn.\n\n Args:\n i: the loop counter\n tas: the flat TensorArray accumulator list\n\n Returns:\n (i + 1, tas): the updated counter + updated TensorArrays\n\n Raises:\n TypeError: if dtype and packed_fn_values structure do not match\n ValueType: if dtype and packed_fn_values lengths do not match\n \"\"\"\n packed_values = input_pack([elem_ta.read(i) for elem_ta in elems_ta])\n packed_fn_values = fn(packed_values)\n nest.assert_same_structure(dtype or elems, packed_fn_values)\n flat_fn_values = output_flatten(packed_fn_values)\n tas = [ta.write(i, value) for (ta, value) in zip(tas, flat_fn_values)]\n return (i + 1, tas)\n\n _, r_a = control_flow_ops.while_loop(\n lambda i, _: i < n, compute, (i, accs_ta),\n parallel_iterations=parallel_iterations,\n back_prop=back_prop,\n swap_memory=swap_memory,\n maximum_iterations=n)\n results_flat = [r.stack() for r in r_a]\n\n n_static = tensor_shape.Dimension(tensor_shape.dimension_value(\n elems_flat[0].get_shape().with_rank_at_least(1)[0]))\n for elem in elems_flat[1:]:\n n_static.merge_with(tensor_shape.Dimension(tensor_shape.dimension_value(\n elem.get_shape().with_rank_at_least(1)[0])))\n for r in results_flat:\n r.set_shape(tensor_shape.TensorShape(n_static).concatenate(\n r.get_shape()[1:]))\n\n # TODO(akshayka): Remove the in_graph_mode check once caching devices are\n # supported in Eager\n if in_graph_mode and varscope_caching_device_was_none:\n varscope.set_caching_device(None)\n\n return output_pack(results_flat)", "def map(self, func):\n if self.is_right(): return self.right.map(func)\n if self.is_left(): return self.left.map(func)", "def json_imap(mapping, iterable):\n for item in iterable:\n yield scraper.json_map(mapping, item)", "def recursive_map(func, data):\n\n def recurse(item):\n return recursive_map(func, item)\n\n items_mapped = map_collection(recurse, data)\n return func(items_mapped)", "def foreach(function):\n return partial(map, function)" ]
[ "0.727791", "0.724331", "0.7051725", "0.6898326", "0.6776462", "0.6744139", "0.6707595", "0.6653942", "0.6592648", "0.6541584", "0.6536001", "0.6513968", "0.648274", "0.64515436", "0.63744414", "0.63716865", "0.62399644", "0.6196002", "0.61775833", "0.6175089", "0.6169941", "0.6161399", "0.61553156", "0.61458665", "0.61383706", "0.6128796", "0.61205804", "0.6075871", "0.6063134", "0.6041264" ]
0.8387936
0
Return the boolean version of a number
def CBool(num): n = float(num) if n: return 1 else: return 0
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test04_boolean_operator(self):\n\n import _cppyy\n number = _cppyy.gbl.number\n\n n = number(20)\n assert n\n\n n = number(0)\n assert not n", "def bool(self):\n return bool(self.int(2))", "def _usable_number(self, num):\n real = isinstance(num, numbers.Real)\n non_nan = not numpy.isnan(num)\n non_bool = not (num is True or num is False)\n return real and non_nan and non_bool", "def getBoolean(self, int: int, int2: int) -> bool:\n ...", "def isgoodnum(n):\n return (not isinstance(n,bool)) and isinstance(n,(int,float))", "def make_boolean(value):\n if value == '1':\n return True\n return False", "def is_number(value, allow_bool=False):\n if isinstance(value, bool):\n return allow_bool\n return isinstance(value, _Number)", "def is_number(self) -> bool:\n return False", "def f_boolean(node, pos, size, context, v):\n if xpath.tools.nodesetp(v):\n return len(v) > 0\n elif xpath.tools.numberp(v):\n if v == 0 or v != v:\n return False\n return True\n elif xpath.tools.stringp(v):\n return v != ''\n\n return v", "def is_number(G):\n return True", "def is_numberish(G):\n return True", "def to_es_bool(boolean_value):\n return '1' if boolean_value else '0'", "def isNumber(num):\n try:\n abs(num)\n return True\n except:\n return False", "def __bool__(x):\n if x.value == 1:\n return True\n elif x.value == -1:\n return False\n else:\n raise ValueError('cannot determine boolean value of Unknown')", "def get_bool_value(obj):\n value = get_signed_value(obj)\n if value is None:\n return None\n if value == 0:\n return False\n return True", "def convertToInt(boolean: bool) -> int:\n ...", "def is_numeric(number):\n\n if isinstance(number, bool):\n return False\n elif isinstance(number, int) or isinstance(number, float):\n return True\n else:\n return False", "def bool(a):\n # Booleans need to be converted to integers for Theano\n if cf.use_theano and isinstance(a, (builtins.bool, np.bool_)):\n return np.int8(a)\n elif cf.use_theano or is_theano_object(a):\n return a\n else:\n return builtins.bool(a)", "def posnegtoggle(number):\n if bool(number > 0):\n return number - number * 2\n elif bool(number < 0):\n return number + abs(number) * 2\n elif bool(number == 0):\n return number", "def Int2Boolean(*args):\n\tSpssMapToVar(\"(%s ~= 0)\", args)\n\t# Does not perform EXECUTE.", "def boolean(val):\n\tif val == \"True\" or val == \"1\":\n\t\treturn True\n\telse:\n\t\treturn False", "def is_int(self):\n return self.v & 1 != 0", "def test_int_to_bool_false(self):\n self.assertEqual(TransformList.int_to_bool({'varname': 0}, 'varname'), False)", "def get_integer(bool_var):\n if bool_var:\n return \"1\"\n else:\n return \"0\"", "def isNumber(self):\n return _libsbml.ASTNode_isNumber(self)", "def getBool(t, swipl):\n b = c_int()\n if swipl.PL_get_long(t, byref(b)):\n return bool(b.value)\n else:\n raise InvalidTypeError(\"bool\")", "def convert_bool(self, v, t):\n return v.asnumpy().item()", "def is_number(entity: Any) -> Tuple[bool, Optional[float]]:\n try:\n number: float = float(entity)\n return True, number\n except ValueError:\n return False, None", "def get_boolean_array_from(number: int) -> List[bool]:\n return_value = [False] * MAX_BIT_LENGTH\n last_bit_position = len(bin(number)) - 1\n for i in range(0, last_bit_position):\n return_value[i] = (number & (1 << i)) != 0\n return return_value", "def test_int_to_bool_true(self):\n self.assertEqual(TransformList.int_to_bool({'varname': 1}, 'varname'), True)" ]
[ "0.67085755", "0.6664846", "0.6640058", "0.6522127", "0.6354467", "0.6318844", "0.6232596", "0.61336666", "0.61238825", "0.61155796", "0.60645497", "0.60555285", "0.59960485", "0.5991613", "0.5946587", "0.59410155", "0.5910892", "0.5907826", "0.5907112", "0.5859945", "0.5832539", "0.5828679", "0.5795011", "0.5789807", "0.5774984", "0.57552457", "0.5740792", "0.57400954", "0.5729494", "0.5723992" ]
0.7226725
0
Choose from a list of options If the index is out of range then we return None. The list is indexed from 1.
def Choose(index, *args): if index <= 0: return None try: return args[index - 1] except IndexError: return None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def the_option_at_index(index: Union[int, str]) -> \"SelectByIndex\":\n return SelectByIndex(index)", "def choose_from(self,index_list):\r\n\r\n if len(index_list)==1:\r\n return index_list[0]\r\n\r\n if len(index_list)==2:\r\n while True:\r\n imp_temp = input('< >')\r\n if imp_temp in ['<','>',EMPTYCHAR]:\r\n return index_list[{'<':0,\r\n '>':1,\r\n EMPTYCHAR:1}[imp_temp]]\r\n\r\n showtext = []\r\n for counter,index_temp in enumerate(index_list):\r\n if index_temp in self.indexes():\r\n showtext.append(str(counter+1)\\\r\n +' '+str(index_temp)+' : '\\\r\n +abridge(nformat\\\r\n .format_keys(self.get_keys_from_note(index_temp))))\r\n display.noteprint(('/C/NOTES',EOL.join(showtext)))\r\n\r\n choice_temp = input('?')\r\n if choice_temp.isnumeric() \\\r\n and 1 <= int(choice_temp) <= len(index_list):\r\n return index_list[int(choice_temp)-1]\r\n return index_list[-1]", "def _tryGet(self, list, index, default):\n\t\tif (list and (len(list) > index)):\n\t\t\treturn list[index]\n\t\telif True:\n\t\t\treturn None", "def choice(\n\t\toptions: Union[List[str], Mapping[str, str]],\n\t\ttext: str = '',\n\t\tdefault: Optional[str] = None,\n\t\tprompt_suffix: str = \": \",\n\t\tshow_default: bool = True,\n\t\terr: bool = False,\n\t\tstart_index: int = 0\n\t\t) -> Union[str, int]:\n\n\t# TODO: completer for numbers?\n\n\ttype_: click.ParamType\n\n\tif isinstance(options, Mapping):\n\t\t# (Y/I/N/O/D/Z) [default=N]\n\n\t\ttext = f\"{text} ({'/'.join(options.keys())})\"\n\t\ttype_ = click.STRING\n\n\t\tfor choice, descripton in options.items():\n\t\t\tclick.echo(f\" {choice} : {descripton}\")\n\n\telse:\n\t\ttype_ = click.IntRange(start_index, len(options) + 1 - start_index)\n\n\t\tfor idx, descripton in enumerate(options):\n\t\t\tidx += start_index\n\t\t\tclick.echo(f\" [{idx}] {descripton}\")\n\n\tif default is not None and show_default:\n\t\ttext += f\" [default={default}]\"\n\n\twhile True:\n\t\tselection = prompt(\n\t\t\t\ttext=text,\n\t\t\t\tdefault=default,\n\t\t\t\ttype=type_,\n\t\t\t\tprompt_suffix=prompt_suffix,\n\t\t\t\tshow_default=False,\n\t\t\t\terr=err,\n\t\t\t\t)\n\t\tif isinstance(options, Mapping):\n\t\t\tselection = selection.strip().upper()\n\t\t\tif selection not in options:\n\t\t\t\tclick.echo(f\"Please enter a valid option.\")\n\t\t\telse:\n\t\t\t\treturn selection\n\t\telse:\n\t\t\treturn selection - start_index", "def __or(items, index, default):\n if len(items) > index:\n return items[index]\n else:\n return default", "def _choose_best_option(self):", "def randomized_select(a_list, start, end, index):\n if len(a_list) == 1:\n return a_list[0]\n # get random partitioned index\n curr = random_partition(a_list, start, end)\n\n if index == curr:\n return a_list[curr]\n elif index < curr:\n return randomized_select(a_list, start, curr, index)\n else:\n return randomized_select(a_list, curr, end, index-curr)", "def __choose_options(self):\n\t\tswitcher = {\n\t\t\t0: self.__zero,\n\t\t\t1: self.__one,\n\t\t\t2: self.__two,\n\t\t\t3: self.__three,\n\t\t\t4: self.four,\n\t\t\t5: self.four,\n\t\t\t6: self.four,\n\t\t\t7: self.four,\n\t\t}\n\t\tfunc = switcher.get(self.__options(), lambda: \"Invalid option\")\n\t\treturn func", "def select(arrays, index):\n if arrays is None or any(i is None for i in arrays):\n return arrays\n return tuple(i.ravel()[index] for i in arrays)", "def pickResultOption(self, resultOptions):\r\n l = []\r\n for key in resultOptions:\r\n for x in range(resultOptions[key]):\r\n l.append(key)\r\n \r\n return random.choice(l)", "def ask_multiple_option(options, prefix = 'Choose between', prompt = ': '):\n\n def exists(index):\n return 0 <= index < len(options)\n\n while True:\n print(prefix)\n for index, option in enumerate(options):\n print(' {} - {}'.format(index + 1, option))\n answer = input(prompt).strip()\n if answer is not '':\n index = int(answer) - 1\n if exists(index):\n return options[index]", "def get_answer(option_list):\n # Print the options\n print(\"Options:\")\n for i in range(len(option_list)):\n print(f\"{i + 1}. {option_list[i]}\")\n\n # Return the selected option from the user\n while True:\n try:\n selection = int(input(\">>>\"))\n if 1 <= selection <= len(option_list):\n print()\n return selection\n else:\n raise ValueError\n except ValueError:\n print(f\"Invalid option: Must be a number between 1 and {len(option_list)}\")", "def basic_menu_non_functional(list_of_options):\n choose = True\n list_of_options.append('back')\n\n while choose:\n print('The following options are available:\\n')\n for option in enumerate(list_of_options):\n print('\\t{} - {}'.format(option[0], option[1]))\n picks = input('\\nType the numeric codes you wish to run\\n\\n').split(',')\n choice = []\n if str(len(list_of_options)) in picks:\n return True\n for pick in picks:\n if pick in [str(i) for i in range((len(list_of_options)))]:\n choice.append(list_of_options[int(pick)])\n else:\n print('{} is not currently an option!\\n'.format(pick))\n if len(choice) > 0:\n return choice", "def _random_pick(lst):\n\n choice = random.randint(0, len(lst) - 1)\n return lst[choice]", "def valid_options(self):\n choices = self.choices()\n\n if not choices:\n return None\n\n return [opt[0] for opt in choices]", "def check_list(lst: list) -> Optional[int]:\n if len(lst) == 0:\n return None\n else:\n return choice(lst)", "def pick_one(_lst):\n if len(_lst) == 2:\n return _lst[0] if int(random(2)) else _lst[1]\n elif len(_lst) == 3:\n return _lst[int(random(3))]", "def quick_select(items: list, index: int):\n # index = len(items) // 2 when trying to find the median\n # (value of index when items is sorted)\n\n # invalid input\n if index >= len(items) or index < 0:\n return None\n\n pivot = items[random.randint(0, len(items) - 1)]\n count = 0\n smaller, equal, larger = _partition(items, pivot)\n count = len(equal)\n m = len(smaller)\n\n # index is the pivot\n if m <= index < m + count:\n return pivot\n # must be in smaller\n elif m > index:\n return quick_select(smaller, index)\n # must be in larger\n else:\n return quick_select(larger, index - (m + count))", "def get_choice(self):\n number = -1\n while (number < 0) or (number > len(self.options)):\n number = int(input('Enter your menu choice: '))\n return number", "def index(a_list, i):\n try:\n return a_list[int(i)]\n except IndexError:\n return None", "def decideResponseOptionsIndex(subject, options):\n subj_pos = Sentience._cleanupPositivityValue(\n getSentencePositivity(Understanding.unparse_sentence(subject))\n )\n if subj_pos is None:\n return random.randint(0,len(options))\n opts_pos = []\n for i, option in enumerate(options):\n opts_pos.append(\n (\n i,\n Sentience._cleanupPositivityValue(\n getSentencePositivity(Understanding.unparse_sentence(option))\n )\n )\n )\n\n random.seed(time.time())\n random.shuffle(opts_pos)\n deviation = random.uniform(-0.5,0.5) * (1-Sentience.getMoodStability())\n\n if subj_pos > -0.15:\n #subject is neutral or positive, look for positive answer\n roll = random.uniform(-0.2 + deviation, 1.0)\n else:\n #subject is negative, look for negative response\n roll = random.uniform(-1.0 , 0.2 + deviation)\n\n if abs(roll) < (1-Sentience.getMoodStability())*0.3:\n return None\n\n opti, _ = min(map(lambda x: (x[0], abs(roll-x[1])), opts_pos), key=lambda x: x[1])\n return opti", "def get_choice(self, option: int) -> Choice:\n return self._choices[option - 1]", "def get_value(_list, _index):\n # print(_list, len(_list))\n if _index >= len(_list):\n return None\n return _list[_index]", "def select(self, options, prompt='Your choice? '):\n local_opts = options\n if isinstance(options, string_types):\n local_opts = list(zip(options.split(), options.split()))\n fulloptions = []\n for opt in local_opts:\n if isinstance(opt, string_types):\n fulloptions.append((opt, opt))\n else:\n try:\n fulloptions.append((opt[0], opt[1]))\n except IndexError:\n fulloptions.append((opt[0], opt[0]))\n for (idx, (value, text)) in enumerate(fulloptions):\n self.poutput(' %2d. %s\\n' % (idx + 1, text))\n while True:\n response = sm.input(prompt)\n try:\n response = int(response)\n result = fulloptions[response - 1][0]\n break\n except (ValueError, IndexError):\n self.stdout.write(\"{!r} isn't a valid choice. Pick a number \"\n \"between 1 and {}:\\n\".format(\n response, len(fulloptions)))\n return result", "def select_search(search_result: list, index: int):\n return search_result[index][0]", "def select_option(options, input, prompt='> ', error=\"Invalid selection\"):\n if isinstance(options, dict):\n ordered = list(\n sorted(\n options.items(),\n key=operator.itemgetter(1)\n )\n )\n else:\n ordered = options\n\n if input.enabled:\n for i, (key, value) in enumerate(ordered, start=1):\n print(' {i}) {label}'.format(i=i, label=value))\n\n print()\n\n choices = [str(index) for index in range(1, len(ordered) + 1)]\n index = input.selection_input(prompt=prompt, choices=choices, error_message=error)\n return ordered[int(index) - 1][0]", "def select_by_index(self, option):\n\n select = self._get_selenium_select()\n option = to_int(option)\n\n if select and isinstance(option, int):\n\n try:\n\n select.select_by_index(option)\n return True\n\n except NoSuchElementException:\n pass\n\n return False", "def get_selection(self, pointer, answer_sheet, sel_none_of_above):\n def answer_parsing(answer_str):\n selections = answer_str.split(\", \")\n try:\n selections = [int(sel) for sel in selections]\n except:\n return None\n else:\n assert len(selections)\n if sel_none_of_above in selections:\n assert len(selections) == 1 # mutual exclusive \"none of the above\"\n return selections\n\n answer = input(\"Please enter the option id(s) delimited by comma ', ': \")\n selections = answer_parsing(answer)\n while selections is None:\n answer = input(\"Please enter the option id(s) delimited by comma ', ': \")\n selections = answer_parsing(answer)\n\n return selections", "def first_selected_option(self):\n try:\n return self.all_selected_options[0]\n except IndexError:\n raise ValueError(\"No options are selected\")", "def options():\n print(\"1: Compute the sum of 1..n\")\n print(\"2: Compute the product of 1..n\")\n print(\"9: Quit\")\n try:\n option = int(input(\"Choice: \"))\n except:\n return None\n return option" ]
[ "0.6644077", "0.6347903", "0.6231553", "0.6197753", "0.61250263", "0.60571647", "0.6016472", "0.6006558", "0.59368837", "0.5905473", "0.5868816", "0.5859146", "0.58081263", "0.57136124", "0.5678431", "0.5664946", "0.56619287", "0.56481266", "0.5636362", "0.56212187", "0.56143427", "0.5612708", "0.5590802", "0.5589986", "0.5564092", "0.5545763", "0.55343944", "0.5527501", "0.5526988", "0.55030024" ]
0.7803504
0
Try to create an OLE object This only works on windows!
def CreateObject(classname, ipaddress=None): if not win32com: raise ImportError('Not on Windows - cannot create COM object') if ipaddress: raise VB2PYNotSupported("DCOM not supported") return win32com.client.Dispatch(classname)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _winoffice(self):\n # LOG: processing_type property\n self.set_property('processing_type', 'WinOffice')\n oid = oletools.oleid.OleID(self.src_path) # First assume a valid file\n if not olefile.isOleFile(self.src_path):\n # Manual processing, may already count as suspicious\n try:\n ole = olefile.OleFileIO(self.src_path, raise_defects=olefile.DEFECT_INCORRECT)\n except:\n self.make_dangerous('not parsable')\n if ole.parsing_issues:\n self.make_dangerous('parsing issues')\n else:\n if ole.exists('macros/vba') or ole.exists('Macros') \\\n or ole.exists('_VBA_PROJECT_CUR') or ole.exists('VBA'):\n self.make_dangerous('macro')\n else:\n indicators = oid.check()\n # Encrypted can be set by multiple checks on the script\n if oid.encrypted.value:\n self.make_dangerous('encrypted')\n if oid.macros.value or oid.ole.exists('macros/vba') or oid.ole.exists('Macros') \\\n or oid.ole.exists('_VBA_PROJECT_CUR') or oid.ole.exists('VBA'):\n self.make_dangerous('macro')\n for i in indicators:\n if i.id == 'ObjectPool' and i.value:\n # TODO: Is it suspicious?\n # LOG: user defined property\n self.set_property('objpool', True)\n elif i.id == 'flash' and i.value:\n self.make_dangerous('flash')", "def fl_make_object(flobjclass, otype, xpos, ypos, width, height, label,\n pyfn_HandlePtr):\n #FL_HANDLEPTR = cty.CFUNCTYPE(cty.c_int, cty.POINTER(xfdata.FL_OBJECT),\n # cty.c_int, xfdata.FL_Coord, xfdata.FL_Coord, cty.c_int, cty.c_void_p)\n _fl_make_object = library.cfuncproto(\n library.load_so_libforms(), \"fl_make_object\",\\\n cty.POINTER(xfdata.FL_OBJECT), [cty.c_int, cty.c_int, xfdata.FL_Coord,\n xfdata.FL_Coord, xfdata.FL_Coord, xfdata.FL_Coord, xfdata.STRING,\n xfdata.FL_HANDLEPTR],\n \"\"\"FL_OBJECT * fl_make_object(int objclass, int type, FL_Coord x,\n FL_Coord y, FL_Coord w, FL_Coord h, const char * label,\n FL_HANDLEPTR handle)\"\"\")\n library.check_if_flinitialized()\n library.checkfatal_allowed_value_in_list(flobjclass, \\\n xfdata.OBJCLASS_list)\n i_flobjclass = library.convert_to_intc(flobjclass)\n i_otype = library.convert_to_intc(otype)\n i_xpos = library.convert_to_FL_Coord(xpos)\n i_ypos = library.convert_to_FL_Coord(ypos)\n i_width = library.convert_to_FL_Coord(width)\n i_height = library.convert_to_FL_Coord(height)\n s_label = library.convert_to_bytestrc(label)\n library.verify_function_type(pyfn_HandlePtr)\n cfn_HandlePtr = xfdata.FL_HANDLEPTR(pyfn_HandlePtr)\n library.keep_cfunc_refs(cfn_HandlePtr, pyfn_HandlePtr)\n library.keep_elem_refs(flobjclass, otype, xpos, ypos, width, \\\n height, label, i_flobjclass, i_otype, i_xpos, i_ypos, \\\n i_width, i_height, s_label)\n retval = _fl_make_object(i_flobjclass, i_otype, i_xpos, i_ypos, \\\n i_width, i_height, s_label, cfn_HandlePtr)\n return retval", "def createWrapper():\n\n # read properties file and get MANO name and IP\n config = RawConfigParser()\n config.read(\"../../coreMano/coreMano.properties\")\n name = config.get(\"CoreMano\", \"coreMano.name\")\n host_ip = config.get(\"CoreMano\", \"coreMano.ip\")\n\n # instanciate and return the MANO\n if name == \"osm\":\n mano = OsmWrapper(name, host_ip)\n if name == \"cloudify\":\n mano = CloudifyWrapper(name, host_ip)\n return mano", "def allocate_object(object_to_use, variance):\n\thHandle = HANDLE(0)\n\tif object_to_use == 'unnamed_mutex':\n\t\thHandle = kernel32.CreateMutexA(None, False, None)\n\telif object_to_use == 'named_mutex':\n\t\thHandle = kernel32.CreateMutexA(None, False, \"Pool spraying is cool %s\" % variance)\n\telif object_to_use == 'unnamed_job':\n\t\thHandle = kernel32.CreateJobObjectA(None, None)\n\telif object_to_use == 'named_job':\n\t\thHandle = kernel32.CreateJobObjectA(None, \"Job %s\" % variance)\n\telif object_to_use == 'iocompletionport':\n\t\thHandle = kernel32.CreateIoCompletionPort(-1, None, 0, 0)\n\telif object_to_use == 'iocompletionreserve':\n\t\tIO_COMPLETION_OBJECT = 1\n\t\tntdll.NtAllocateReserveObject(byref(hHandle), 0x0, IO_COMPLETION_OBJECT)\n\t\thHandle = hHandle.value\n\telif object_to_use == 'unnamed_semaphore':\n\t\thHandle = kernel32.CreateSemaphoreA(None, 0, 3, None)\n\telif object_to_use == 'named_semaphore':\n\t\thHandle = kernel32.CreateSemaphoreA(None, 0, 3, \"My little Semaphore %s\" % variance)\n\telif object_to_use == 'event':\n\t\thHandle = kernel32.CreateEventA(None, False, False, None)\n\tif hHandle == None:\n\t\tprint \"[-] Error while creating object: %s\" % object_to_use\n\t\treturn -1\n\treturn hHandle", "def createNewEmptyObject(objName=\"new Empty Object\"):\n # create a new object, which mesh will be replaced by the new BMesh\n mesh = bpy.data.meshes.new(objName)\n obj = bpy.data.objects.new(objName, mesh)\n obj.location = bpy.context.scene.cursor.location\n bpy.context.collection.objects.link(obj)\n\n # set object as active\n bpy.context.view_layer.objects.active = obj\n\n return obj, mesh", "def createACTopol(self):\n #sleap = False\n if self.engine == 'sleap':\n if self.execSleap():\n self.printError(\"Sleap failed\")\n self.printMess(\"... trying Tleap\")\n if self.execTleap():\n self.printError(\"Tleap failed\")\n if self.engine == 'tleap':\n if self.execTleap():\n self.printError(\"Tleap failed\")\n if self.extOld == '.pdb':\n self.printMess(\"... trying Sleap\")\n self.ext = self.extOld\n self.inputFile = self.baseName+self.ext\n if self.execSleap():\n self.printError(\"Sleap failed\")\n if not self.debug:\n self.delOutputFiles()", "def make_object():\n return object()", "def createWindow():\n\n windowName = \"ObjectSpawner\"\n\n if cmds.window(windowName, query=True, exists=True):\n cmds.deleteUI(windowName)\n\n cmds.window(windowName)\n\n populateUI()\n enableEditorDrop()\n\n cmds.showWindow(windowName)", "def open_create_obj_modal(obj_type):\n selenium_utils.open_url(url.dashboard())\n obj_modal = dashboard.Dashboard().open_create_obj_modal(obj_type=obj_type)\n return obj_modal", "def _WmiInit(self):\n try:\n import pythoncom # pylint: disable=g-import-not-at-top\n self._pythoncom = pythoncom\n self._pythoncom.CoInitialize()\n except ImportError:\n raise WmiError('No pythoncom module available on this platform.')\n\n try:\n from win32com import client # pylint: disable=g-import-not-at-top\n self._client = client\n except ImportError:\n raise WmiError('No win32com module available on this platform.')\n\n try:\n import pywintypes # pylint: disable=g-import-not-at-top\n self._pywintypes = pywintypes\n except ImportError:\n raise WmiError('No pywintypes module available on this platform.')", "def load_win32com(finder, module):\n baseDir = os.path.dirname(os.path.dirname(module.file))\n module.path.append(os.path.join(baseDir, \"win32comext\"))", "def cmdCreator(cls):\n return ommpx.asMPxPtr(cls())", "def createObject(self, *args):\n return _libsbml.CompSBasePlugin_createObject(self, *args)", "def creaXl(nombre):\r\n return xlw.Workbook(nombre)", "def create_instance(c_instance):\n return OpenLabs(c_instance)", "def as_pyobj(space, w_obj, w_userdata=None, immortal=False):\n assert not is_pyobj(w_obj)\n if w_obj is not None:\n py_obj = w_obj._cpyext_as_pyobj(space)\n if not py_obj:\n py_obj = create_ref(space, w_obj, w_userdata, immortal=immortal)\n #\n # Try to crash here, instead of randomly, if we don't keep w_obj alive\n ll_assert(py_obj.c_ob_refcnt >= rawrefcount.REFCNT_FROM_PYPY,\n \"Bug in cpyext: The W_Root object was garbage-collected \"\n \"while being converted to PyObject.\")\n return py_obj\n else:\n return lltype.nullptr(PyObject.TO)", "def test_failure(t):\n objmap = ObjectMap({}, modname=\"py.module.name\", classname=\"ClassName\")\n ret = _create_object(objmap)\n t.assertIsNone(ret)", "def test_write_win(vasp2w90_calc_and_ref):\n vasp_calc, reference = vasp2w90_calc_and_ref\n with managed_temp_object() as temp_object:\n with pytest.raises(NotImplementedError):\n vasp_calc.write_win(temp_object)\n with open(temp_object, 'r', encoding='utf8') as result_incar_fo:\n assert result_incar_fo.read() == reference['win']", "def connect():\n Rhino = win32com.client.Dispatch(\"Rhino4.Interface\")\n time.sleep(1)\n Rhino.Visible = True\n return Rhino", "def create_b_obj(ob_name, b_obj_data):\n b_obj = bpy.data.objects.new(ob_name, b_obj_data)\n bpy.context.scene.objects.link(b_obj)\n bpy.context.scene.objects.active = b_obj\n b_obj.select = True\n return b_obj", "def AutoAddToGUI(self):\n\t\treturn self._oleobj_.InvokeTypes(50371086, LCID, 1, (24, 0), (),)", "def aeroInit():\n return RoboCaller().call(\"aeroInit\", \"void\")", "def create_icosaedron():\n obj = glGenLists(1)\n glNewList(obj, GL_COMPILE)\n glPushMatrix()\n try:\n glutSolidIcosahedron()\n except:\n if not _ERRS[8]:\n printGLError(\n \"la version actual de OpenGL no posee la funcion glutSolidIcosahedron\")\n _ERRS[8] = True\n glPopMatrix()\n glEndList()\n return obj", "def test_objref(self):\n dt = h5t.special_dtype(ref=h5r.Reference)\n htype = h5t.py_create(dt)\n self.assertEqual(htype, h5t.PYTHON_OBJECT)", "def cmd_creator():\n return OpenMayaMPx.asMPxPtr(AzureBatchSetup())", "def create_ogr_geom(geom) -> ogr.Geometry:\n if isinstance(geom, ogr.Geometry):\n return geom\n\n # Converte os tipos para diferentes situações (python 2.7).\n # if isinstance(geom, str):\n # geom = str(geom)\n # elif isinstance(geom, unicode):\n # geom = str(geom)\n try:\n ogr_geom = ogr.CreateGeometryFromWkb(geom)\n except RuntimeError:\n ogr_geom = ogr.CreateGeometryFromWkt(geom)\n if not ogr_geom:\n ogr_geom = ogr.CreateGeometryFromWkt(geom)\n return ogr_geom", "def test_drive_create_fail():\n driver_klass = factory.get(\"application/zip\")\n driver: Driver = driver_klass(entries=None, mappings=None)\n\n assert driver.__class__ == \"Shapefile\"", "def create_instance(c_instance):\n\treturn 0", "def createUnoService( cClass ):\n oServiceManager = getServiceManager()\n oObj = oServiceManager.createInstance( cClass )\n return oObj", "def createUnoService( cClass ):\n oServiceManager = getServiceManager()\n oObj = oServiceManager.createInstance( cClass )\n return oObj" ]
[ "0.5287344", "0.51087296", "0.51052445", "0.5105081", "0.5038427", "0.50332063", "0.500756", "0.4986537", "0.4856805", "0.48357683", "0.4802901", "0.48026037", "0.48021197", "0.4792462", "0.4780404", "0.47746176", "0.47093895", "0.46859106", "0.46847957", "0.46789405", "0.46671683", "0.46549788", "0.46533942", "0.46515942", "0.46204877", "0.46190742", "0.4610966", "0.46105176", "0.46001333", "0.46001333" ]
0.61552715
0
Return the String associated with an operating system environment variable envstring Optional. String expression containing the name of an environment variable. number Optional. Numeric expression corresponding to the numeric order of the environment string in the environmentstring table. The number argument can be any numeric expression, but is rounded to a whole number before it is evaluated. Remarks If envstring can't be found in the environmentstring table, a zerolength string ("") is returned. Otherwise, Environ returns the text assigned to the specified envstring; that is, the text following the equal sign (=) in the environmentstring table for that environment variable.
def Environ(envstring): try: envint = int(envstring) except ValueError: return os.environ.get(envstring, "") # Is an integer - need to get the envint'th value try: return "%s=%s" % (list(os.environ.keys())[envint], list(os.environ.values())[envint]) except IndexError: return ""
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getenv_string(setting, default=''):\n return os.environ.get(setting, default)", "def test_get_environment_string(self):\n pass", "def _get_env(key: str) -> str:\n value = os.getenv(key)\n assert isinstance(value, str), (\n f\"the {key} environment variable must be set and a string, \" f\"{value=}\"\n )\n return value", "def env_str(name: str, default: str) -> str:\n value = stringify(env.get(name))\n return default if value is None else value", "def eval_env_as_integer(varname, standard_value) -> int:\n return int(os.getenv(varname, standard_value))", "def getenv(space, var):\n e = os.environ.get(var)\n if e is None:\n return space.w_False\n return space.newstr(e)", "def _build_environment(envdict):\n lines = []\n for k, v in envdict.iteritems():\n if \" \" in v: # NOTE: per the spec, one might want to handle all 'whitespace' chars.\n v = v.replace(\"'\", \"''\")\n v = \"'%s'\" % v\n v = v.replace('\"', '\"\"')\n lines.append('%s=%s' % (k, v))\n return '\"%s\"' % ' '.join(lines)", "def env(var):\n return os.environ[var]", "def getenv_or_raise_exception(varname) -> str:\n\n env = os.getenv(varname)\n if env is None:\n raise EnvironmentError(f\"Environment variable {varname} is not set!\")\n return env", "def maybe_environ(key):\n try:\n return os.environ[key]\n except KeyError:\n return \"\"", "def env(key, default=None, required=False):\n try:\n value = os.environ[key]\n return ast.literal_eval(value)\n except (SyntaxError, ValueError):\n return value\n except KeyError:\n if default or not required:\n return default\n raise ImproperlyConfigured(\n \"Missing required environment variable '%s'\" % key)", "def windows_get_env_value(var_name: str) -> str:\n if var_name in os.environ.keys():\n return os.environ[var_name]", "def get_environment_var(env_name, default_value):\n if env_name in os.environ:\n return os.environ[env_name]\n else:\n return default_value", "def _GetEnvironmentVars(benchmark_spec):\n return ' '.join([\n 'NUM_GPUS=%s' % benchmark_spec.total_gpus,\n 'OMP_NUM_THREADS=%s' % benchmark_spec.cpus_per_rank\n ])", "def env(*vars, **kwargs):\r\n for v in vars:\r\n value = os.environ.get(v)\r\n if value:\r\n return value\r\n return kwargs.get('default', '')", "def env_var_line(key: str) -> str:\n return str(os.environ.get(key) or \"\").strip()", "def env(*vars, **kwargs):\n for v in vars:\n value = os.environ.get(v, None)\n if value:\n return value\n return kwargs.get('default', '')", "def env(*vars, **kwargs):\n for v in vars:\n value = os.environ.get(v, None)\n if value:\n return value\n return kwargs.get('default', '')", "def get_envint(key, *default):\n return get_env(key, *default, coerce=_int)", "def envsubst(string):\n # handle simple un-bracketed env vars like $FOO\n a = _simple_re.sub(_repl_simple_env_var, string)\n\n # handle bracketed env vars with optional default specification\n b = _extended_re.sub(_repl_extended_env_var, a)\n return b", "def env(key: str) -> Optional[Any]:\n return os.getenv(key)", "def env(*vars, **kwargs):\n for v in vars:\n value = os.environ.get(v)\n if value:\n return value\n return kwargs.get('default', '')", "def get_env(environ_name):\n temp = os.getenv(environ_name)\n if temp is None:\n if ('ProgramFiles' in environ_name) or ('ProgramW6432' in environ_name):\n temp = os.getenv('ProgramFiles')\n return temp", "def get_from_environ(key: str, default: Any = None) -> str:\n return os.environ.get(key, default)", "def environment_variable_string(self, name):\n return \"$(\" + name + \")\"", "def get_env(env_name: str, default: Optional[str] = None) -> str:\n if env_name not in os.environ:\n if default is None:\n raise KeyError(f\"{env_name} not defined and no default value is present!\")\n return default\n\n env_value: str = os.environ[env_name]\n if not env_value:\n if default is None:\n raise ValueError(\n f\"{env_name} has yet to be configured and no default value is present!\"\n )\n return default\n\n return env_value", "def environment_value(self, name):\n if not os.environ.has_key(name):\n return None\n return os.environ[name]", "def update_env_from_string(env_string):\n excluded_keys = [\"_\", \"SHLVL\", \"PWD\", \"OLDPWD\"]\n env = os.environ\n for line in env_string.split(\"\\n\"):\n (key, _, value) = line.partition(\"=\")\n if key and value and key not in excluded_keys:\n env[key] = value\n return env", "def getenv(self, var):\n return os.environ[var]", "def env(*_vars, **kwargs):\r\n for v in _vars:\r\n value = os.environ.get(v, None)\r\n if value:\r\n return value\r\n return kwargs.get('default', '')" ]
[ "0.6552138", "0.62165606", "0.6127461", "0.61043334", "0.5977026", "0.5974327", "0.5880715", "0.58638054", "0.58064455", "0.57543194", "0.5713163", "0.5692808", "0.56204563", "0.5599559", "0.55874664", "0.5569792", "0.5565246", "0.5565246", "0.55345047", "0.5521654", "0.5520985", "0.55171037", "0.5483568", "0.54649293", "0.54380065", "0.5424862", "0.5409286", "0.54061747", "0.5389227", "0.53808826" ]
0.780673
0
Determine if we reached the end of file for the particular channel
def EOF(channel): return VBFiles.EOF(channel)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def reached_end_of_stream(self):\n pass", "def eof_check(self) -> bool:\n eof = False\n curr_pos = self.fileobject.tell()\n # print(curr_pos, self.st_size)\n chunk = self.fileobject.read(25)\n if chunk == '':\n # Is there something on the back burner??\n if len(self._backburner) > 0:\n self.fileobject = self._backburner.pop()\n # TODO: what if it is the end of the back burner file? Is that handled?\n else:\n eof = True\n else:\n self.fileobject.seek(curr_pos)\n return eof", "def is_eof(self) -> bool:\n ...", "def at_eof(self):\n return self.tell() == len(self)", "def at_eof(self) -> bool:\n ...", "def at_eof(self) -> bool:\n ...", "def at_eof(self) -> bool:\n ...", "def at_eof(self) -> bool:\n ...", "def eof_received(self):\n self.connection_lost('EOF')\n return False", "def eof(self):\r\n\t\treturn self.index == len(self.data)", "def at_eof(self):\n return self._eof and not self._buffer", "def _is_last_chunk(self, bytes_read, previous_read):\n return bytes_read == previous_read and bytes_read != 0", "def end(self): # type: () -> bool\n return self._idx >= len(self._src) or self._current == \"\\0\"", "def is_data_format_channel_last(data_format):\n if data_format is None:\n return True\n return data_format.endswith(\"C\")", "def _is_at_end(self):\n return self._peek().token_type == scanner.TokenType.EOF", "def eof(self):\n try:\n next_line = self.read_pkt_line()\n except HangupException:\n return True\n self.unread_pkt_line(next_line)\n return False", "def is_closed(self, channel=None):\n return self.get_state(channel) == 0", "def eofReceived(self):\n channel.SSHChannel.eofReceived(self)\n # print 'DirectTcpIpChannelClient:: remote eof'\n self.loseConnection()", "def endfile(self) :\n\n\t\tself.fd.close()\n\t\treturn 1", "def eof(self):\n\t\treturn not self.is_alive() and self._queue.empty()", "def EndOfPacket(self) -> bool:", "def has_more_lines(self):\n pos = self.stream.tell()\n res = self.stream.readline() != ''\n self.stream.seek(pos)\n return res", "def can_write_eof(self):\n return True", "def is_closed(self, channel=None):\n return not self.get_state(channel)", "def is_eof(line):\n return line == \"\"", "def on_send_eof(self):\n flag, msg_s = super(FileComm, self).on_send_eof()\n try:\n self.fd.flush()\n except (AttributeError, ValueError): # pragma: debug\n if self.is_open:\n raise\n # self.close()\n return flag, msg_s", "def eof(self):\n return not self.is_alive() and self._queue.empty()", "def isComplete(self):\n return self.bytesToRead == 0", "def _is_end(self, symbol):\n if symbol.id == self.scanner.END_ID:\n return True\n else:\n return False", "def _is_at_end(self):\n return self.current >= len(self.source)" ]
[ "0.66775554", "0.6652134", "0.6621471", "0.6582723", "0.65502644", "0.65502644", "0.65502644", "0.65502644", "0.65212584", "0.64770806", "0.6436078", "0.6431244", "0.6417623", "0.63535255", "0.6350785", "0.63307154", "0.61963063", "0.616728", "0.6133811", "0.611205", "0.60584694", "0.59690493", "0.5962042", "0.59602356", "0.5911316", "0.5903272", "0.587597", "0.58752906", "0.58747756", "0.58520466" ]
0.72668666
0
Return the length of a given file
def FileLen(filename): return os.stat(str(filename))[6]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _filelength(self):\r\n with open(self.fileName, 'rb') as f:\r\n f.seek(0, 2) # move to end of file\r\n length = f.tell() # get current position\r\n return length", "def findFLength(filename):\n f = os.popen('wc -l < {}'.format(filename))\n return int(f.read())", "def get_file_size(file_path):\n with open(file_path, 'rb') as infile:\n infile.seek(0, 2)\n return infile.tell()", "def file_size():\n return os.path.getsize(FILE_NAME)", "def get_file_size(file):\n\tfile.seek(0, os.SEEK_END)\n\treturn file.tell()", "def file_length(fileName):\n with open(f_pass) as f:\n for i, l in enumerate(f):\n pass\n return i + 1", "def file_len(f):\n\n for n, l in enumerate(f, 1):\n pass\n f.seek(0) # rewind\n return n", "def get_file_size(input_file):\n old_file_position = input_file.tell()\n input_file.seek(0, os.SEEK_END)\n size = input_file.tell()\n input_file.seek(old_file_position, os.SEEK_SET)\n return size", "def get_file_size(filename):\n return os.stat(filename).st_size", "def get_line_length(file_path):\n with open(file_path, 'rb+') as f:\n return len(f.readline())", "def get_file_size(fname):\n return os.path.getsize(fname)", "def file_len(filename):\n with open(filename) as f:\n for i, l in enumerate(f):\n pass\n return i + 1", "def file_size(file_path):\n if os.path.isfile(file_path):\n file_info = os.stat(file_path)\n return (file_info.st_size)", "def get_file_size(file_path):\n return os.path.getsize(file_path)", "def file_size(file_path):\n if os.path.isfile(file_path):\n file_info = os.stat(file_path)\n return convert_bytes(file_info.st_size)", "def file_size(file_path):\n if os.path.isfile(file_path):\n file_info = os.stat(file_path)\n return convert_bytes(file_info.st_size)", "def file_size(file_path):\n if os.path.isfile(file_path):\n file_info = os.stat(file_path)\n return convert_bytes(file_info.st_size)", "def file_len(file_name):\n with open(file_name) as f:\n for i, l in enumerate(f):\n pass\n return i + 1", "def svn_fs_file_length(*args):\r\n return _fs.svn_fs_file_length(*args)", "def file_size(self,file_path):\n if os.path.isfile(file_path):\n file_info = os.stat(file_path)\n return self.convert_bytes(file_info.st_size)", "def file_size(self, file_id: int):\n file_path = self._path_to_file(file_id)\n return os.path.getsize(file_path)", "def file_len(full_path):\n f = open(full_path)\n nr_of_lines = sum(1 for line in f)\n f.close()\n return nr_of_lines", "def file_len(fname):\n \n with open(fname) as f:\n for i, l in enumerate(f):\n pass\n return i + 1", "def GetFileSize(file_path):\n return os.path.getsize(file_path)", "def get_size(fname):\n return os.path.getsize(fname)", "def fileSize(pathAndFilename):\n return os.stat(pathAndFilename).st_size", "def total_file_length(self):\n if self.is_multi_file():\n return sum([file['length'] for file in self.torrent['info']['files']])\n else:\n # single file\n return self.torrent['info']['length']", "def _get_file_length(self, file):\n self[file] = file.stat().st_size", "def file_size(fn, compression=None):\n if compression == 'gzip':\n with open(fn, 'rb') as f:\n f.seek(-4, 2)\n result = struct.unpack('I', f.read(4))[0]\n else:\n result = os.stat(fn).st_size\n return result", "def file_size(file_path):\n if os.path.isfile(file_path):\n file_info = os.stat(file_path)\n # return humanize.naturalsize(file_info.st_size)\n return file_info.st_size" ]
[ "0.8291489", "0.8033439", "0.80026686", "0.79879576", "0.7905295", "0.78845304", "0.7876845", "0.78724664", "0.7828686", "0.7820954", "0.78019077", "0.7798586", "0.77813584", "0.7755042", "0.7709965", "0.7709965", "0.7709965", "0.77049273", "0.7685765", "0.76809627", "0.76497996", "0.7617404", "0.7610787", "0.75871193", "0.75647205", "0.7552819", "0.74897194", "0.74772125", "0.7453534", "0.74485886" ]
0.834456
0
Returns a zerobased array containing subset of a string array based on a specified filter criteria
def Filter(sourcesarray, match, include=1): if include: return Array(*[item for item in sourcesarray if item.find(match) > -1]) else: return Array(*[item for item in sourcesarray if item.find(match) == -1])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def filter_fn(arr):\n return lambda l: ([n for n in arr if n == l])", "def filter(self, filterarray):\n return FeatureSet(list(np.array(self)[np.array(filterarray)]))", "def subset(arr, start, end):\n return [[row_data for row_data in row[start[1]:end[1]]] for row in arr[start[0]:end[0]]]", "def subset(mylist,mybool):\n myarray = np.array(mylist)\n return(np.squeeze(myarray.take(np.where(mybool),axis=0)))", "def array_filter(item, func):\n return filter(func, item)", "def filter(self, filter_strings):\n if filter_strings is None:\n filter_strings = []\n\n result = RowSet()\n for row in self.rows:\n match = True\n for s in filter_strings:\n if not row.filter(s):\n match = False\n break\n if match:\n result.append(row)\n return result", "def kmer_seq_to_filters(kmers):\n\treturn np.concatenate([dna_string_to_array(s) for s in kmers])", "def get_relevant_zones(array,threshold=3):\n\n\treturn [item for item in array if len(item)>3]", "def cfilter(func,iterable):\n result = []\n\n for i in iterable:\n\n if func(i) == True:\n result.append(i)\n\n return result", "def filterRansac():\n pass", "def filter(self, op):\n def op_filter(seqs):\n r = [s for s in seqs if op(s)]\n if len(r) == 0:\n return None\n else:\n return r\n return self.element_wise(op_filter)", "def filter(self, func: Callable[[T], bool]) -> 'List[T]':\n return [v for v in self.array if func(v)]", "def filter_classes(class_ints, class_list, class_filt):\n class_names = [class_list[int(c)] for c in class_ints]\n filter = [name in class_filt for name in class_names]\n return np.array(filter)", "def filtered(self, func):\n return PSetList(list(filter(func, self.sets)))", "def filterExtStrToArray(filterExt):\n \n if filterExt is None:\n return []\n \n fe = filterExt.split(\" \")\n \n for f in fe:\n assert f[0] == \".\"\n \n return fe", "def get_zones(array,kind,relevant=False,threshold=3):\n\n\tresulting_set=[]\n\n\ti=0\n\tif array[i]==kind:\n\t\tcount=1\n\telse:\n\t\tcount=0\n\n\twhile i<len(array):\n\t\t\n\t\tif array[i]==kind:\n\t\t\tcount+=1\n\t\telif array[i]!=kind and array[i-1]==kind:\n\t\t\tresulting_set.append(([kind]*count,i-count))\n\t\t\tcount=0\n\t\telse:\n\t\t\tpass\n\n\t\ti+=1\n\n\tif count>0:\n\t\tresulting_set.append(([kind]*count, i-count))\n\n\tif relevant == False:\n\t\treturn resulting_set\n\telse:\n\t\treturn [item for item in resulting_set if len(item[0])>threshold]", "def filter_data(data,filters):\n final_filter = pd.Series(np.array([True] * data.shape[0]))\n for attribute, value in filters:\n final_filter &= data[attribute] == value\n return data[final_filter]", "def filter(self, filters):", "def get_filter_stringlist(self):\n return text_filter", "def get_contract_filters(*contracts):\n return [generate_filter(filter_text) for filter_text in contracts]", "def filterPick(list, filter, classification):\n y = []\n for job in list:\n x = [(job, classification) for l in job for m in (filter(l),) if m]\n y.append(x)\n return y", "def remove(data, pattern):\n return [''.join(filter(pattern, str)) for str in data]", "def grep(pattern, arr):\n\n assert isinstance(pattern, basestring)\n assert isinstance(arr, list)\n\n ret = []\n\n for el in arr:\n if re.search(pattern, el):\n ret.append(el)\n\n return ret", "def filter(self, func):\r\n\r\n d = self.data\r\n f = []\r\n for i in d:\r\n if func(i):\r\n f.append(i)\r\n return Records(f)", "def filter_list(data: List[dict], field: str, selected: List[str]):\n if len(selected):\n return [x for x in data if x[field] in selected]\n else:\n return data", "def filter_by_subset(self, *args):\n self.subset_labels = sorted(set(self.subset_labels + list(args)))\n return self", "def slices_using_group_array(group_array):\n unique_elements = np.unique(group_array)\n slices = []\n for unique_element in unique_elements:\n indexes = np.flatnonzero(group_array == unique_element)\n low, high = (indexes[0], indexes[-1] + 1)\n slices.append((unique_element, slice(low, high)))\n return slices", "def trans(array,dim):\n return array[filter(lambda x: x != dim,range(len(array)) ) ]", "def _pick_elements(self,regexp_ind,array_list):\r\n new_array_list = [] #New list with elements matching regexp_ind\r\n array_indices = [] #Indices that matches the arrays in new_array_list and array_list\r\n\r\n array_index = 0\r\n for array in array_list:\r\n _new = []\r\n for ai in array:\r\n if ai in regexp_ind:\r\n _new.append(ai)\r\n if len(_new):\r\n new_array_list.append(np.array(_new))\r\n array_indices.append(array_index)\r\n array_index += 1\r\n return new_array_list, array_indices", "def random_subset(array, count):\n indices = np.random.permutation(len(array))[:count]\n return array[indices]" ]
[ "0.5791858", "0.57571936", "0.569493", "0.56764555", "0.56472826", "0.5434943", "0.5370937", "0.5358635", "0.52934223", "0.5246935", "0.52461225", "0.5188089", "0.51835763", "0.5115934", "0.5108654", "0.50504124", "0.5034765", "0.50195634", "0.50031567", "0.49780133", "0.49755558", "0.49511606", "0.49495035", "0.49484068", "0.4934201", "0.49102905", "0.49066117", "0.49039465", "0.4882444", "0.48797366" ]
0.59934235
0
Determine if an object is an array
def IsArray(obj): return isinstance(obj, (list, tuple))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_arraylike(obj):\n if isinstance(obj, list):\n return True\n elif isinstance(obj, np.ndarray):\n return True\n elif isinstance(obj, pd.Series):\n return True\n elif isinstance(obj, pd.DataFrame):\n return True\n return False", "def is_array(schema_obj):\n\n if isinstance(schema_obj, schema.Field):\n return schema_obj.is_array\n return False", "def is_array(val):\n return (\n isinstance(val, tuple) or \\\n isinstance(val, dict) or \\\n isinstance(val, list)\n )", "def is_array(t):\n return isinstance(t, ast.Array)", "def is_array(self):\n return False", "def is_array_type(self, objtype):\n return isinstance(objtype, self.__arrayt) # _ctypes.PyCArrayType", "def is_array(self, arr):\n return isinstance(arr, np.ndarray)", "def is_array(space, w_obj):\n return space.wrap(w_obj.tp == space.tp_array)", "def is_array(self):\n return len(self.descriptor) > 1", "def is_a_numpy_array(obj):\n return type(obj).__module__ == np.__name__", "def isarray(a):\r\n try:\r\n validity = isinstance(a, ndarray)\r\n except:\r\n validity = False\r\n\r\n return validity", "def is_array(type):\n nake_type = remove_alias(type)\n nake_type = remove_reference(nake_type)\n nake_type = remove_cv(nake_type)\n return isinstance(nake_type, cpptypes.array_t)", "def isarray(a):\n try:\n validity=isinstance(a,ndarray)\n except:\n validity=False\n\n return validity", "def NeedsArray(self, type_):\n return self._NameComponents(type_) in self._array_types", "def is_json_array(typename):\n return typename and typename.startswith('[') and typename.endswith(']')", "def is_numpy(obj):\n return 'numpy' in str(type(obj))", "def is_arrayexpress_array(val):\n return arrayexpress_array_regexp.match(val)", "def is_array(a):\n try:\n shape = a.shape\n return len(shape) >= 1\n except AttributeError:\n return False", "def is_array_of_basic_instance(self, obj):\n # FIXME: deprecated\n if not hasattr(obj, '_type_'):\n return False\n if self.is_array_type(type(obj)):\n if len(obj) == 0:\n return False # no len is no BasicType\n if self.is_pointer_type(obj._type_):\n return False\n if self.is_basic_type(obj._type_):\n return True\n return False", "def is_array_param(param):\n return param.get('tags') and param['tags']['type'] == 'array'", "def is_array_of_basic_type(self, objtype):\n return self.is_array_type(objtype) and hasattr(objtype, '_type_') and self.is_basic_type(objtype._type_)", "def is_list(obj):\n return type(obj) is list", "def check_array(arr: Arrayable) -> np.ndarray:\n if isinstance(arr, np.ndarray):\n return arr\n return np.array(arr)", "def is_iterable(obj):\n if isinstance(obj, (str, bytes, bytearray)):\n return False\n return isinstance(obj, Iterable)", "def isIterable(obj):\n return isinstance(obj, ListType)", "def is_pointer_to_array_type(self, objtype):\n if hasattr(objtype, '_subtype_'): # haystack\n return self.is_array_type(objtype._subtype_)\n return self.is_pointer_type(objtype) and hasattr(objtype, '_type_') and self.is_array_type(objtype._type_)", "def is_string_array(self):\n return self.type == Property.PropertyType.stringArray", "def is_list(annotation):\n\n annotation_origin = getattr(annotation, \"__origin__\", None)\n\n return annotation_origin == list", "def _is_key_value_array(self, data):\n for d in data:\n if not self._is_key_value(d):\n return False\n return True", "def _is_DataArrays(data):\n if isinstance(data, (Dataset, DataArray)):\n return True\n if isinstance(data, Mapping):\n for da in data.values():\n if not isinstance(da, DataArray):\n raise TypeError(\"Please provide List/Mapping of DataArrays\")\n return True\n if isinstance(data, Iterable):\n for da in data:\n if not isinstance(da, DataArray):\n return False\n # raise TypeError(\"Please provide List/Mapping of DataArrays\")\n return True\n return False" ]
[ "0.8131859", "0.78075755", "0.77648115", "0.774824", "0.76568", "0.75508946", "0.7546086", "0.74527085", "0.7409406", "0.73134905", "0.71920586", "0.7132876", "0.70407593", "0.70193404", "0.6981981", "0.68952763", "0.6841536", "0.6817918", "0.67961663", "0.67788136", "0.67067933", "0.67043287", "0.6693951", "0.6649249", "0.6610916", "0.656376", "0.6525894", "0.64382946", "0.64148784", "0.6359439" ]
0.8533337
0
Return the left most characters in the text
def Left(text, number): return text[:number]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def characters_left(self):\r\n return self.max_chars - len(self.variable.get())", "def Right(text, number):\n return text[-number:]", "def keyword_length(text):\n text = scrub_string(text)\n a = [fabs(IC(text, ncol) - ENGLISH_IC) for ncol in range(1, MAX_LEN)]\n return a.index(min(a)) + 1", "def pleft(self):\n return -self.pfill(1) + self.plen(-1, s=True)", "def nextCharLowerLeft(self):\n pmodel = self.model.pos * self.transform.scale\n x, y, _ = self.transform.pos + pmodel\n y += ((self.font.table['ascent'] + self.y_sign * self._labelHeight)\n * self.transform.scale[1])\n x += self._string_metric[-1][2][0] * self.transform.scale[0]\n return x, y", "def findAlphabeticallyLastWord(text):\n return max(text.split(' '))", "def get_max_character(strings):\n m=0\n for string in strings:\n for char in string:\n if char>m:\n m=char\n return m", "def longest_word_length(words):", "def checkio_best(text):\n text = text.lower()\n # text.count为函数,返回指定char的数量\n return max(string.ascii_lowercase, key=text.count)", "def mostwantedletter(text):\n temp = Counter(text.lower())\n alpha_keys = {k:temp[k] for k,v in temp.items() if k.isalpha()}\n max_keys = [k for k,v in alpha_keys.items() if alpha_keys[k] == max(alpha_keys.values())]\n if len(max_keys) == 1:\n return max_keys[0]\n else:\n return min(max_keys)", "def get_remaining_character_count(self):\n return self.driver.find(CHARACTER_COUNT).text", "def theLoveLetterMystery(s):\n mincount = 0\n for i in range(len(s) // 2):\n mincount += abs(ord(s[i]) - ord(s[-1 - i]))\n\n return mincount", "def LEN(text):\n return len(text)", "def computeMaxWordLength(text):\n # BEGIN_YOUR_CODE (our solution is 1 line of code, but don't worry if you deviate from this)\n return(max(sorted(text.split(), reverse = True), key = len))\n # END_YOUR_CODE", "def get_lenght(text):\n return range(len(Articles.split(text)))", "def longest_word_len(text):\r\n return np.max(np.array([len(word) for word in tokenization(text)]))", "def _left(self, index):\r\n return 2*index + 1", "def peekleft(self):\n return self.buffer[self.start]", "def find_longest_word(s):\n return sorted(map(lambda si: (si, len(si)), s.split()), key=lambda item: item[1], reverse=True)[0][0]", "def readLeft():\n return readAll()[0]", "def LEFT(string, num_chars=1):\n if num_chars < 0:\n raise ValueError(\"num_chars invalid\")\n return string[:num_chars]", "def lengthOfLastWord(self, s):\n l = len(s)\n c = 0\n i = 1\n while l - i > -1:\n if s[-i] != ' ':\n c += 1\n elif c != 0:\n break\n\n i += 1\n return c", "def words_before_index(text, idx):\n while text[idx] != ' ':\n idx -= 1\n if idx <= 0:\n return 0\n n_words = len(text[:idx].split(' '))\n return n_words", "def cut_text(value, length): # Only one argument.\n return value[0:length]", "def alphabet_position(text):\n return ' '.join(str(ord(c) - 96) for c in text.lower() if c.isalpha())\n # return ' '.join(str(string.ascii_lowercase.index(s.lower())+1) for s in text if s.lower() in string.ascii_lowercase)", "def MID(text, start_num, num_chars):\n if start_num < 1:\n raise ValueError(\"start_num invalid\")\n return text[start_num - 1 : start_num - 1 + num_chars]", "def getTextLength(self):\r\n return 0", "def computeMaxWordLength(text):\n return max(text.split(), key=getWordKey) # usually key argument is a function defined by 'def' or 'lambda'", "def findBestShift(wordList, text):\n import string\n decoded = ''\n r = 0\n max_count = 0\n for i in range(26):\n count = 0\n decoded = applyShift(text,i)\n for word in decoded.split():\n if word.strip(string.punctuation+string.digits).lower() in wordList:\n count += 1\n if count > max_count:\n max_count = count\n r = i\n return r", "def left(self, i):\n return (i + self.seats - 1) % self.seats" ]
[ "0.76972735", "0.6075009", "0.60736424", "0.6047909", "0.6003288", "0.5963332", "0.5958215", "0.5919674", "0.59092534", "0.58404374", "0.5834725", "0.5794999", "0.579165", "0.57874477", "0.5779034", "0.5778344", "0.57569116", "0.5754645", "0.5738545", "0.57297117", "0.5723335", "0.57188624", "0.57172483", "0.56701076", "0.5668679", "0.563882", "0.5618289", "0.5591183", "0.55638826", "0.5561634" ]
0.7287347
1
Return true if the text matches the pattern The pattern is a string containing wildcards = any string of characters ? = any one character Fortunately, the fnmatch library module does this for us!
def Like(text, pattern): return fnmatch.fnmatch(text, pattern)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def regex_match(text, pattern):\n try:\n pattern = re.compile(\n pattern,\n flags=re.IGNORECASE + re.UNICODE + re.MULTILINE,\n )\n except BaseException:\n return False\n return pattern.search(text) is not None", "def match(cls, text):\r\n return cls.main.pattern.match(text)", "def contains(text: str, pattern: str) -> bool:\n assert isinstance(text, str), 'text is not a string: {}'.format(text)\n assert isinstance(pattern, str), 'pattern is not a string: {}'.format(text)\n # COMPLEXITY: O(n) b/c we are using find_index method which is O(n)\n return find_index(text, pattern) is not None", "def contains(text, pattern):\n assert isinstance(text, str), 'text is not a string: {}'.format(text)\n assert isinstance(pattern, str), 'pattern is not a string: {}'.format(text)\n # TODO: Implement contains here (iteratively and/or recursively)\n\n which = 'contains'\n\n # if pattern == '': # All strings have an empty string\n # return True\n #\n # sub_string = ''\n # for i in range(len(text) - len(pattern) + 1): # Iterate through text with limit based on length of pattern\n # for j in range(i, len(pattern) + i): # Iterate through as many characters as pattern has\n # sub_string += text[j] # add characters to substring\n # if pattern == sub_string: # compare\n # return True # pattern exists\n # sub_string = '' # reset substring if not found\n # return False # pattern does not exist\n\n return string_master_func(text, pattern, which)", "def match(pattern, string):\n if not len(pattern) and not len(string):\n return True\n\n if len(pattern) > 1 and pattern[0] == '*' and len(string) == 0:\n return False\n\n if (len(pattern) > 0 and pattern[0] == '?') or \\\n (len(pattern) != 0 and len(string) != 0 and pattern[0] == string[0]):\n return match(pattern[1:], string[1:])\n\n if len(pattern) != 0 and pattern[0] == '*':\n return match(pattern[1:], string) or match(pattern, string[1:])\n\n return False", "def match(pattern, target):\n pattern = ''.join('.*' if c == '*' else re.escape(c) for c in pattern)\n return bool(re.match('^' + pattern + '$', target))", "def find_pattern(self, pattern: str) -> bool:\n\n if not pattern and self.is_word:\n return True\n\n node = self\n for i, char in enumerate(pattern):\n if char == \".\":\n res = []\n for each in node.children.values():\n res.append(each.find_pattern(pattern[i + 1:]))\n return any(res)\n\n if char in node.children:\n node = node.children[char]\n else:\n return False\n\n return node.is_word", "def search (text, pattern):\n\tfor i in xrange(len(text)-len(pattern)+1):\n\t\tfound = True\n\t\tfor j in xrange(len(pattern)):\n\t\t\tif text[i+j] != pattern[j]:\n\t\t\t\tfound = False\n\t\t\t\tbreak\n\t\tif found:\n\t\t\tprint 'Pattern found at index:', i\n\treturn", "def isMatch(self, s: str, p: str) -> bool:\n def is_match(self, text, pattern):\n if not pattern:\n return not text\n\n first_match = bool(text) and pattern[0] in {text[0], '.'}\n\n if len(pattern) >= 2 and pattern[1] == '*':\n return (self.isMatch(text, pattern[2:]) or\n first_match and self.isMatch(text[1:], pattern))\n else:\n return first_match and self.isMatch(text[1:], pattern[1:])\n\n def isMatch(self, text, pattern):\n memo = {}\n\n def dp(i, j):\n if (i, j) not in memo:\n if j == len(pattern):\n ans = i == len(text)\n else:\n first_match = i < len(text) and pattern[j] in {text[i], '.'}\n if j + 1 < len(pattern) and pattern[j + 1] == '*':\n ans = dp(i, j + 2) or first_match and dp(i + 1, j)\n else:\n ans = first_match and dp(i + 1, j + 1)\n\n memo[i, j] = ans\n return memo[i, j]\n\n return dp(0, 0)", "def fnmatch(pattern, filename) -> bool:\n return _fnmatch(filename, pattern)", "def regex(value, pattern):\r\n c_pattern = re.compile(r\"\\b\" + pattern.lower() + r\"\\b\")\r\n return c_pattern.search(value) is not None", "def REGEXMATCH(text, regular_expression):\n return bool(re.search(regular_expression, text))", "def matchPatterns(path, patterns):\n name = os.path.basename(path)\n for p in patterns:\n if fnmatch.fnmatch(name, p):\n return True\n return False", "def is_matching(patterns, blob):\n for pattern in patterns:\n if re.match(fnmatch.translate(pattern), blob.path):\n return True\n return False", "def get_match_with_string(pattern, unknown):\n pattern, unknown = _check_params(pattern, unknown)\n if pattern not in unknown:\n return False\n return True", "def match_star(p, pattern, text):\n \"\"\"p*pattern <> text\"\"\"\n return (\n # matches pattern <> text\n match(pattern, text) \n or \n # p matches the first character of text, then match p*pattern <> text[1:]\n (match1(p, text) and match_star(p, pattern, text[1:]))\n )", "def match(self, text):\n if self.sense:\n return (self.regex.match(text) != None)\n else:\n return (self.regex.match(text) == None)", "def match(text, pattern):\n def get_next_upper(s, start):\n \"\"\"\n determines the next occurrence of an upper case letter, or returns the length of the string\n if there are no more upper cases found.\n :param s:\n :param start:\n :return:\n \"\"\"\n for i in range(start, len(s)):\n if s[i].isupper():\n return i\n return i\n\n i = 0\n j = 0\n while i < len(text) and j < len(pattern):\n if pattern[j].isupper():\n i = get_next_upper(text, i)\n if i != len(text) and text[i] == pattern[j]:\n i += 1\n j += 1\n else:\n return False\n else:\n while i < len(text) and j < len(pattern) and not pattern[j].isupper():\n if text[i] != pattern[j]:\n return False\n i += 1\n j += 1\n return True", "def check_match_pattern(self):\n text = self.ui.plainTextEdit.toPlainText()\n pattern = self.ui.textPattern.text()\n result = re.search(pattern, text)\n group = int(self.ui.spinGroup.text())\n if result:\n self.ui.textMatch.setText(result.group(group))", "def validate_string_search(self, pattern, file):\r\n try:\r\n file_open = open(file, 'r')\r\n except:\r\n logging.info(\"file not found\")\r\n return -1\r\n file_data = file_open.read()\r\n ret_out = re.search(pattern, file_data)\r\n if ret_out:\r\n return True, ret_out\r\n else:\r\n return False, ret_out", "def has_pattern(self, pattern):\n\n pat_len = len(pattern)\n if pat_len > self.text_len:\n raise ValueError(\"Pattern length is bigger than text\")\n\n if self.first_occurence(pattern) == -1:\n return False\n\n return True", "def __check_pattern(node):\n if node.tag != \"discover_datasets\":\n return False\n if \"from_tool_provided_metadata\" in node.attrib and string_as_bool(\n node.attrib.get(\"from_tool_provided_metadata\", \"false\")\n ):\n return True\n if \"pattern\" not in node.attrib:\n return False\n pattern = node.attrib[\"pattern\"]\n regex_pattern = NAMED_PATTERNS.get(pattern, pattern)\n # TODO error on wrong pattern or non-regexp\n if \"(?P<ext>\" in regex_pattern:\n return True", "def topic_pattern_match(pattern):\n client = AdminClient({\"bootstrap.servers\": \"PLAINTEXT://localhost:9092\"})\n topic_metadata = client.list_topics()\n topics = topic_metadata.topics\n filtered_topics = {key: value for key, value in topics.items() if contains_substring(key, pattern)}\n return len(filtered_topics) > 0", "def file_contains_pattern(file, pattern):\r\n if not os.path.isfile(file):\r\n raise NameError('file %s does not exist' % file)\r\n return not utils.system('egrep -q \"' + pattern + '\" ' + file,\r\n ignore_status=True)", "def word_and_pattern (word,pattern):\r\n for i in range(len(pattern)):\r\n if pattern[i]!= '_' and pattern.count(pattern[i]) != word.count(pattern[i]):\r\n return False\r\n return True", "def check_pattern(pattern, token):\n split_token = re.split('\\W+', token, 1)\n if split_token[0] == '':\n split_token = split_token[1]\n else:\n split_token = split_token[0]\n return split_token == pattern", "def __reWildcard(self, regexp, string):\n regexp = re.sub(\"\\*+\", \"*\", regexp)\n match = True\n if regexp.count(\"*\") == 0:\n if regexp == string:\n return True\n else:\n return False\n blocks = regexp.split(\"*\")\n start = \"\"\n end = \"\"\n if not regexp.startswith(\"*\"):\n start = blocks[0]\n if not regexp.endswith(\"*\"):\n end = blocks[-1]\n if start != \"\":\n if string.startswith(start):\n blocks = blocks[1:]\n else:\n return False\n if end != \"\":\n if string.endswith(end):\n blocks = blocks[:-1]\n else:\n return False\n blocks = [block for block in blocks if block != \"\"]\n if blocks == []:\n return match\n for block in blocks:\n i = string.find(block)\n if i == -1:\n return False\n string = string[i + len(block):]\n return match", "def match_patterns(pathname, patterns):\n for pattern in patterns:\n if fnmatch(pathname, pattern):\n return True\n return False", "def isValid(text):\n return bool(re.search(r'\\bnews\\b', text, re.IGNORECASE))", "def _memorized_fnmatch(name: str, pattern: str) -> bool:\n return bool(_compile_fnmatch(pattern).match(name))" ]
[ "0.7849038", "0.76907206", "0.76165885", "0.73174125", "0.7219586", "0.7164653", "0.68953913", "0.6827941", "0.67844343", "0.6763894", "0.6674704", "0.6618624", "0.66003686", "0.6596534", "0.6573882", "0.6555509", "0.655082", "0.65427655", "0.65384084", "0.65255356", "0.6514509", "0.6504024", "0.6499674", "0.64985913", "0.64705473", "0.6465665", "0.6441402", "0.64269567", "0.6425986", "0.6424524" ]
0.82227486
0
Return True if X implies Y Performs a bitwise comparison of identically positioned bits and sets corresponding bit in the output.
def Imp(x, y): ix, iy = int(x), int(y) if not (ix or iy): result = 1 else: result = 0 while ix or iy: # Shift result by one bit result = result << 1 # # Get the bits for comparison x_bit1 = ix & 1 y_bit1 = iy & 1 if not (x_bit1 and not y_bit1): result = result | 1 # ix = ix >> 1 iy = iy >> 1 # if isinstance(x, bool) and isinstance(y, bool): return bool(result) else: return result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __and__(self, other):\n return BitBoard(self.num & other.num)", "def __iand__(self, other):\n self.truths = self.truths | other.truths\n return self", "def __or__(self, y):\n return self.__and__(y) ^ self ^ y", "def __and__(self, other):\n return np.logical_and(self.array, other.array)", "def _logical_equal(x, y):\n x_ = _static_value(x)\n y_ = _static_value(y)\n if x_ is None or y_ is None:\n return math_ops.equal(x, y)\n return constant_op.constant(np.array_equal(x_, y_))", "def compare(self, x, y):\n return (self.ordering[x][y] is True) or (x == y)", "def is_mutually_bit_compatible_with(self, other: 'CompositeType') -> bool:\n return self.bit_length_set == other.bit_length_set", "def equals(x, y):\n return x == y", "def equals(self, other: InputTransform) -> bool:\n return super().equals(other=other) and (self.reverse == other.reverse)", "def __eq__(self, other):\n\n if isinstance(other, (int, type(Zero()))):\n if other == 0:\n if self.args == []:\n return True\n else:\n return False\n\n frame = self.args[0][1]\n for v in frame:\n if expand((self - other) & v) != 0:\n return False\n return True", "def x_in_y(self, x: int, y: int) -> bool:\n return len(set(self.MAPPING[x] + self.MAPPING[y])) == len(self.MAPPING[y])", "def add_bitwise(x, y):\n\n answer = 0\n carry_in = 0\n k = 1\n iterations_tracker = x | y\n\n while (iterations_tracker):\n x_k = x & k\n y_k = y & k\n\n answer |= x_k ^ y_k ^ carry_in\n carry_out = majority_bitwise(x_k, y_k, carry_in)\n\n carry_in = carry_out << 1\n\n k <<= 1\n iterations_tracker >>= 1\n\n if (carry_in):\n answer |= carry_in\n\n return answer", "def _coincident(a,b):\n return np.array_equal(a, b) or np.array_equal(np.flipud(a),b)", "def is_equal(self, a, b):\n return a.X[0] == b.X[0]", "def __eq__(self, other):\n return self.x == other.x and self.y == other.y", "def c_equals(a, b):\n alpha = library.PixelGetAlpha\n return bool(library.IsPixelWandSimilar(a, b, 0) and\n alpha(a) == alpha(b))", "def same(self, x: int, y: int):\n\n return self.find(x) == self.find(y)", "def compare(a,b=0,kind=0):\n return { 0: (a ^ b ) & a,\n 1: (a ^ lshift(a)) & a,\n -1: (a ^ rshift(a)) & a}[kind] & _mask", "def __eq__(self, other):\n x_eq = self.x == other.x\n y_eq = self.y == other.y\n return x_eq and y_eq", "def __eq__(self, other):\n x_eq = self.x == other.x\n y_eq = self.y == other.y\n return x_eq and y_eq", "def same(self, x, y):\n return self.find(x) == self.find(y)", "def is_same_as(self, other) -> bool:\n return self.x == other.x and self.y == other.y", "def __eq__(self, other):\n return self.x == other.x and self.y == other.y", "def __le__(self, other):\n return self.x ** 2 + self.y ** 2 <= other.x ** 2 + other.y ** 2", "def __ior__(self, y):\n xor_result = self ^ y\n return self.__iand__(y).__ixor__(xor_result)", "def __eq__(self, rhs):\n return self.x == rhs.x and self.y == rhs.y", "def __eq__(self, other):\r\n return self.label == other.label and self.positive_state == other.positive_state", "def alpha_equivalent(self, other) -> bool:\n raise NotImplementedError()", "def __eq__(self, other):\n if not isinstance(other, PantsMappingClass):\n # print(\"A\")\n return False\n # if other._pants_decomposition != self._pants_decomposition:\n # print(\"B\")\n # return False\n # print(\"C\")\n return (self * other.inverse()).is_identity()", "def implies(A, B):\n\treturn B or not A" ]
[ "0.5854761", "0.584245", "0.58119255", "0.57361525", "0.57329947", "0.57323116", "0.5731651", "0.5688032", "0.5653026", "0.56213224", "0.5614277", "0.5599483", "0.55569243", "0.5550215", "0.55319524", "0.5531421", "0.5518942", "0.5518383", "0.5518057", "0.5518057", "0.5515992", "0.54956555", "0.5491868", "0.5487839", "0.5482557", "0.5481608", "0.5451818", "0.54309666", "0.54309624", "0.5422514" ]
0.62820095
0
Load an image as a bitmap for display in a BitmapImage control
def LoadPicture(filename): return Bitmap(filename)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def BitmapFromImage(*args, **kwargs):\n val = _gdi_.new_BitmapFromImage(*args, **kwargs)\n return val", "def load_image(path_to_image, image_name):\n print(\"Loading: \", path_to_image + image_name, \" ...\")\n return Image.open(path_to_image + image_name)", "def set_image(self, image_URL, bkg = None):\r\n\r\n self.image = self.image = pygame.image.load(image_URL).convert()\r\n if not bkg == None:\r\n # Set our transparent color\r\n self.image.set_colorkey(white)\r\n self.rect = self.image.get_rect()\r\n if self.drawable:\r\n self.set_drawable()", "def CreateBitmapFromImage(*args, **kwargs):\n return _gdi_.GraphicsContext_CreateBitmapFromImage(*args, **kwargs)", "def loadImage(self, imagePath, customScaleFactor=None):\n\t\tif customScaleFactor: scaleFactor = customScaleFactor\n\t\telse: scaleFactor = self.IMAGESCALEUP\n\n\t\timg = pygame.image.load(imagePath)\n\t\timg = pygame.transform.scale(img, (img.get_width() * scaleFactor, img.get_height() * scaleFactor))\n\t\timg.convert_alpha()\n\t\treturn img", "def load_image(self, image_id):\n info = self.image_info[image_id]\n # bg_color = np.array(info['bg_color']).reshape([1, 1, 3])\n # image = np.ones([info['height'], info['width'], 3], dtype=np.uint8)\n # image = image * bg_color.astype(np.uint8)\n # for shape, color, dims in info['shapes']:\n # image = self.draw_shape(image, shape, dims, color)\n\n width, height = info['width'], info['height']\n\n if info['real']:\n # load image from disk\n impath = os.path.join(self.real_image_dirpath, info['real_image_path'])\n image = cv2.imread(impath,1)\n image = cv2.resize(image, (width, height), cv2.INTER_CUBIC)\n else:\n # synthesize image\n background_path = info['background_image_path']\n card_template_path = info['card_template_path']\n cornerpoints = info['cornerpoints']\n image = self.synthesize_image(card_template_path, background_path, cornerpoints, (width, height))\n return image", "def load_image(img_file_name):\n file_name = os.path.join('.', 'images', img_file_name)\n img = pygame.image.load(file_name)\n img.convert()\n return img", "def load_image(file_path):\r\n return Image.open(file_path)", "def CreateBitmapFromImage(*args, **kwargs):\n return _gdi_.GraphicsRenderer_CreateBitmapFromImage(*args, **kwargs)", "def set_image(self, path):\r\n \r\n image = self._load_image(path)\r\n self.image_raw = image\r\n self.image = ImageTk.PhotoImage(image)\r\n self.image_panel.configure(image=self.image)", "def load_image(self, **kwargs):\n ...", "def load_image():\n # pylint: disable=global-statement\n global current_frame, current_loop, frame_count, frame_duration, bitmap\n while sprite_group:\n sprite_group.pop()\n\n filename = SPRITESHEET_FOLDER + \"/\" + file_list[current_image]\n\n bitmap = displayio.OnDiskBitmap(filename)\n ### Change the palette value proportional to BRIGHTNESS\n bitmap.pixel_shader[1] = image_brightness(brightness)\n sprite = displayio.TileGrid(\n bitmap,\n pixel_shader=bitmap.pixel_shader,\n tile_width=bitmap.width,\n tile_height=matrix.display.height,\n )\n\n sprite_group.append(sprite)\n\n current_frame = 0\n current_loop = 0\n frame_count = int(bitmap.height / matrix.display.height)\n frame_duration = DEFAULT_FRAME_DURATION", "def load_image(self, filename):\n return pygame.image.load(os.path.join('images', filename))", "def load_image_i(self, img_tk):\n\n self.p2_label_img.configure(image=img_tk)\n self.p2_label_img.image = img_tk", "def _load_image(path):\r\n image = Image.open(path)\r\n size = image.size\r\n \r\n image = image.resize((550,550), Image.ANTIALIAS)\r\n# image = image.thumbnail((200,200), Image.ANTIALIAS)\r\n return image", "def _image(filename):\n return TK.PhotoImage(file=filename)", "def _load_image(self, imfile):\n im = Image.open(str(imfile))\n dpi_ratio = num.true_divide(self.expected_dpi, num.array(im.info['dpi']))\n newsize = (num.array(im.size) * dpi_ratio).astype('i')\n if not all(newsize == num.array(im.size)):\n im = im.resize(newsize, Image.BICUBIC)\n img = num.array(im.convert('L')) # convert to greyscale array 0-255\n return img", "def wx2PIL(self, bitmap):\n size = tuple(bitmap.GetSize())\n try:\n buf = size[0]*size[1]*3*\"\\x00\"\n bitmap.CopyToBuffer(buf)\n except:\n del buf\n buf = bitmap.ConvertToImage().GetData()\n return Image.frombuffer(\"RGB\", size, buf, \"raw\", \"RGB\", 0, 1)", "def setImage(self, img):\n self.img.setPixmap(QtGui.QPixmap(img))", "def load_image(data_dir, image_file):\n image_path = os.path.join(data_dir, image_file)\n image = mpimg.imread(image_path)\n return image", "def image(self):\r\n\r\n if sys.version < '3':\r\n imageio = StringIO.StringIO(self._image_data)\r\n else:\r\n imageio = StringIO.BytesIO(self._image_data)\r\n\r\n try:\r\n source_image = PILImage.open(imageio)\r\n img = PILImage.new('RGBA', source_image.size, (0, 0, 0, 0))\r\n\r\n if source_image.mode == 'L':\r\n alpha = source_image.split()[0]\r\n transparency = source_image.info.get('transparency')\r\n mask = PILImage.eval(alpha, lambda a: 0 if a == transparency else 255)\r\n img.paste(source_image, (0, 0), mask=mask)\r\n else:\r\n img.paste(source_image, (0, 0))\r\n except IOError, e:\r\n raise PILUnavailableError(e.args[0].split()[1])\r\n finally:\r\n imageio.close()\r\n\r\n self.original_width, self.original_height = img.size\r\n\r\n # Crop the image searching for the smallest possible bounding box\r\n # without losing any non-transparent pixel.\r\n # This crop is only used if the crop flag is set in the config.\r\n if self.config['crop']:\r\n img = img.crop(img.split()[-1].getbbox())\r\n return img", "def load_image(self, image_id):\n# logger.info(\"image {}\".format(image_id))\n info = self.image_info[image_id]\n if info[\"image\"] is None:\n im = self.gen_imgs[info[\"path\"]][\"input_images\"][info[\"image_index\"]]\n image = np.ones([info['height'], info['width'], 1], dtype=np.uint8)\n image[:,:,0] = self.gen_imgs[info[\"path\"]][\"input_images\"][info[\"image_index\"]]\n# image[:,:,1] = self.gen_imgs[info[\"path\"]][\"input_images\"][info[\"image_index\"]]\n# image[:,:,2] = self.gen_imgs[info[\"path\"]][\"input_images\"][info[\"image_index\"]]\n self.image_info[image_id][\"image\"] = image\n# logger.info(\"cached {}\".format(image_id))\n else:\n image = self.image_info[image_id][\"image\"]\n# logger.info(\"missed {}\".format(image_id))\n\n return image", "def get_image(image_path):\r\n\r\n return Image.open(image_path)", "def load_image(self):\n # Minimal progress display while image is loaded.\n group = displayio.Group()\n group.append(centered_label('LOADING...', 40, 3))\n #self.rect = Rect(-board.DISPLAY.width, 120,\n # board.DISPLAY.width, 40, fill=0x00B000)\n #group.append(self.rect)\n board.DISPLAY.show(group)\n\n # pylint: disable=eval-used\n # (It's cool, is a 'trusted string' in the code)\n duration = eval(TIMES[self.time]) # Playback time in seconds\n # The 0.9 here is an empirical guesstimate; playback is ever-so-\n # slightly slower than benchmark speed due to button testing.\n rows = int(duration * self.rows_per_second * 0.9 + 0.5)\n # Remap brightness from 0.0-1.0 to brightness_range.\n brightness = (self.brightness_range[0] + self.brightness *\n (self.brightness_range[1] - self.brightness_range[0]))\n try:\n self.num_rows = self.bmp2led.process(self.path + '/' +\n self.images[self.image_num],\n self.tempfile,\n rows, brightness,\n self.loop,\n self.load_progress)\n except (MemoryError, BMPError):\n group = displayio.Group()\n group.append(centered_label('TOO BIG', 40, 3))\n board.DISPLAY.show(group)\n sleep(4)\n\n board.DISPLAY.show(displayio.Group()) # Clear display\n self.clear_strip() # LEDs off", "def load_image(self, image_name, piece_name):\n img = ImageTk.PhotoImage(Image.open(image_name))\n self.loaded_images[piece_name] = (img, image_name)\n return img", "def load_image(self):\n try:\n return Image.open(self._path, 'r')\n except IOError:\n messagebox.showerror(\"Error\", \"Wrong sprite file path!\")", "def from_image(img):\n canvas = Canvas(100, 100)\n canvas.img = img\n if not canvas.img.mode in (\"RGB\",\"RGBA\"):\n canvas.img = canvas.img.convert(\"RGBA\")\n canvas.drawer = aggdraw.Draw(canvas.img)\n canvas.pixel_space()\n return canvas", "def load(path) -> Image:\n return Image.open(path)", "def load_image(image_path):\n image = io.imread(image_path)\n io.imshow(image)\n io.show()\n print(\"Size of the image is {} KB\".format(round(os.path.getsize(image_path)/1024,2)))\n return image", "def LoadFile(*args, **kwargs):\n return _gdi_.Bitmap_LoadFile(*args, **kwargs)" ]
[ "0.6720157", "0.6664427", "0.66112494", "0.6585068", "0.6558995", "0.655867", "0.65482175", "0.65217286", "0.6480828", "0.6468993", "0.6439372", "0.64379865", "0.6414927", "0.6369924", "0.6343527", "0.63117355", "0.62969905", "0.625913", "0.6219303", "0.621666", "0.62067175", "0.6172451", "0.6164556", "0.61632955", "0.61631775", "0.616003", "0.6159165", "0.6158131", "0.6150944", "0.61489" ]
0.7395635
0
Do a VB LSet Left aligns a string within a string variable, or copies a variable of one userdefined type to another variable of a different userdefined type. LSet stringvar = string LSet replaces any leftover characters in stringvar with spaces. If string is longer than stringvar, LSet places only the leftmost characters, up to the length of the stringvar, in stringvar. Warning Using LSet to copy a variable of one userdefined type into a variable of a different userdefined type is not recommended. Copying data of one data type into space reserved for a different data type can cause unpredictable results. When you copy a variable from one userdefined type to another, the binary data from one variable is copied into the memory space of the other, without regard for the data types specified for the elements.
def LSet(var, value): return value[:len(var)] + " " * (len(var) - len(value))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def RSet(var, value):\n return \" \" * (len(var) - len(value)) + value[:len(var)]", "def setto(self, s):\n\t\tlength = len(s)\n\t\tself.b = self.b[:self.j+1] + s + self.b[self.j+length+1:]\n\t\tself.k = self.j + length", "def string_swap(seq1, seq2):\n\n l1 = len(seq1)\n l2 = len(seq2)\n if l1 >= l2: # If l1 is already longer than l2, no need to swap strings\n s1 = seq1\n s2 = seq2\n else: # If l2 is longer than l1, swap the strings and the lengths\n s1 = seq2\n s2 = seq1\n l1, l2 = l2, l1 # swaps the two lengths\n\n return s1, s2, l1, l2", "def set_string(string, hash):\r\n # Pad out string with 3 nulls\r\n string = string + ([NULL_STRING] * 3)\r\n\r\n # If the string now longer than STRING_LENGTH, cut it shorter\r\n if len(string) > STRING_LENGTH:\r\n string = string[:STRING_LENGTH]\r\n\r\n # If the string is still too short, pad out with the hash\r\n if len(string) < STRING_LENGTH:\r\n string = string + hash[len(string) : STRING_LENGTH]\r\n\r\n return string", "def assign(self, *args):\n return _libsbml.string_assign(self, *args)", "def set_raw_string(self, string, length):\n if len(string) != length:\n raise ValueError('Length of passed string does not match length')\n self.originstring = string\n self.stringlength = length", "def _string_reversial(string : list, start : int, end : int): # function is inplace\n if len(string) < 2:\n return\n\n while end > start:\n string[start], string[end] = string[end], string[start]\n start += 1\n end -=1", "def laceStrings(s1, s2):\n # Your Code Here\n minLen = min(len(s1), len(s2))\n s3 = \"\".join(y for x in zip(s1, s2) for y in x) + s1[minLen:] + s2[minLen:]\n return s3", "def set_strmem_type(self, *args):\n return _ida_hexrays.vdui_t_set_strmem_type(self, *args)", "def __Subst(self, m, s, l):\n if s is None:\n s = ''\n #if type(s) is types.LongType:\n #1.5.2: s = str(s)[:-1]\n return self.regexp.Subst(l, DTL.TemplateRegExp.macros[m], str(s))", "def setLSData(*args):\n args[0].Data.LSData.lc_data = args[1]", "def lsplit(self, string):\n rhs = string()\n lhs = string()\n pattern_match=string()\n return lhs, pattern_match, rhs", "def laceStrings(s1, s2):\n s3= \"\"\n x=0\n \n while x < len(s1):\n s3 += s1[x]\n s3 += s2[x]\n x += 1\n if x >= len(s2):\n s3 += s1[len(s2):]\n return s3\n \n s3 += s2[len(s1):]\n return s3", "def __le__(self, *args):\n return _libsbml.string___le__(self, *args)", "def __setitem__(self, *args):\n return _libsbml.string___setitem__(self, *args)", "def set0 (strlist,pos):\r\n hold = []\r\n while len(strlist) > pos:\r\n hold.append(strlist.pop(len(strlist)-1))\r\n hold.reverse()\r\n return strlist,hold", "def STRtoMEM_memcpy(string, addr, constraint, assertion, limit=None, lmax=STR_TO_MEM_LMAX , addr_str=None, hex_info=False):\n if( not addr_str ):\n addr_str = hex(addr)\n \n # Getting strcpy function \n (func_name, func_addr ) = getFunctionAddress('memcpy')\n if( not func_addr ):\n verbose('Could not find memcpy function')\n return (None,None)\n elif( not constraint.badBytes.verifyAddress(func_addr)):\n verbose(\"memcpy address ({}) contains bad bytes\".format(hex(func_addr)))\n return (None,None)\n \n # We decompose the string in substrings to be copied\n substrings_addr = findBytes(string, badBytes = constraint.getBadBytes())\n if( not substrings_addr ):\n return (None,None)\n\n # Find delivery address\n substr_lengthes = [len(substr[1]) for substr in substrings_addr]\n if( not limit is None ):\n custom_stack = find_closest_base_fake_stack_address(addr, limit, substr_lengthes, constraint)\n if( custom_stack is None ):\n verbose(\"Couldn't write string in memory because of bad bytes\")\n return (None,None)\n else:\n custom_stack = find_closest_base_fake_stack_address(addr, addr+sum(substr_lengthes), substr_lengthes, constraint)\n if( custom_stack is None ):\n verbose(\"Couldn't write string in memory because of bad bytes\")\n return (None,None)\n if( custom_stack != addr ):\n addr_str = hex(custom_stack)\n \n # Build chain \n res = ROPChain()\n offset = 0 \n saved_custom_stack = custom_stack\n for (substring_addr,substring_str) in substrings_addr:\n if( hex_info ):\n substring_info = \"'\"+'\\\\x'+'\\\\x'.join([\"%02x\"%ord(c) for c in substring_str])+\"'\"\n else:\n substring_info = \"'\"+substring_str+\"'\"\n comment3 =\"Arg3: \" + string_ropg(str(len(substring_str)))\n comment2 =\"Arg2: \" + string_ropg(substring_info)\n comment1 =\"Arg1: \" + string_ropg(\"{} + {}\".format(addr_str, offset))\n \n func_call = build_call(func_name, [custom_stack, substring_addr, len(substring_str)],\\\n constraint, assertion, [comment1, comment2, comment3], optimizeLen=True)\n \n if( isinstance(func_call, str) ):\n verbose(\"memcpy: \" + func_call)\n return (None,None)\n \n res.addChain(func_call)\n if( len(res) > lmax ):\n return (None,None)\n \n # Adjust\n custom_stack = custom_stack + len(substring_str)\n offset = offset + len(substring_str)\n\n return (saved_custom_stack,res)", "def truncate(self, Ls=None, germs=None, prepStrs=None, effectStrs=None, seqs=None):\n Ls = self.Ls if (Ls is None) else Ls\n germs = self.germs if (germs is None) else germs\n prepStrs = self.prepStrs if (prepStrs is None) else prepStrs\n effectStrs = self.effectStrs if (effectStrs is None) else effectStrs\n cpy = LsGermsStructure(Ls, germs, prepStrs,\n effectStrs, self.aliases, self.sequenceRules)\n\n #OLD iPreps = [i for i, prepStr in enumerate(self.prepStrs) if prepStr in prepStrs]\n #OLD iEffects = [i for i, eStr in enumerate(self.effectStrs) if eStr in effectStrs]\n #OLD fidpairs = list(_itertools.product(iPreps, iEffects))\n all_fidpairs = list(_itertools.product(list(range(len(prepStrs))), list(range(len(effectStrs)))))\n\n for (L, germ), plaq in self._plaquettes.items():\n basestr = plaq.base\n if seqs is None:\n fidpairs = all_fidpairs\n else:\n fidpairs = []\n for i, j in all_fidpairs:\n if prepStrs[i] + basestr + effectStrs[j] in seqs:\n fidpairs.append((i, j))\n\n if (L in Ls) and (germ in germs):\n cpy.add_plaquette(basestr, L, germ, fidpairs)\n\n cpy.add_unindexed(self.unindexed) # preserve unindexed strings\n return cpy", "def set(self, U):\n pass", "def set(self, U):\n pass", "def __size_restriction_correct_string_string(self):\n\n strTestName = 'String size lower or equal to the size of another string (correct)'\n RxCSObject = _RxCSobject()\n\n # Firstly, let us define a reference parameter\n RxCSObject.paramAddMan('strRefParameter1', 'Str ref. parameter')\n RxCSObject.paramType('strRefParameter1', str)\n\n # Now, let me define a string\n RxCSObject.paramAddMan('parameter1', 'String parameter')\n RxCSObject.paramType('parameter1', str)\n RxCSObject.paramSizLE('parameter1', 'strRefParameter1')\n\n RxCSObject.strRefParameter1 = 'bbbccc'\n RxCSObject.parameter1 = 'aaabb'\n\n self.__parametersCheck_error(RxCSObject, 'correct', strTestName)", "def set(self, arg: SeField[Any]) -> str:\n if is_bare_set(arg.type):\n return f\"list({arg.varname}) if convert_sets else {arg.varname}\"\n else:\n earg = arg[0]\n earg.name = \"v\"\n return (\n f\"[{self.render(earg)} for v in {arg.varname}] \"\n f\"if convert_sets else set({self.render(earg)} for v in {arg.varname})\"\n )", "def set_lvar_type(self, *args):\n return _ida_hexrays.lvar_t_set_lvar_type(self, *args)", "def setstring(self):\n self._str = 's '+' '.join([self.src, self.start, self.size,\n self.strand, self.srcSize, self.text])+'\\n'", "def set_lvar_type(self, *args):\n return _ida_hexrays.vdui_t_set_lvar_type(self, *args)", "def SetAlignment(self, l):\r\n\r\n self.alignment = l", "def test_vlstring_log(self):\n dt = h5t.special_dtype(vlen=str)\n htype = h5t.py_create(dt, logical=True)\n self.assertIsInstance(htype, h5t.TypeStringID)\n self.assertEqual(htype.is_variable_str(), True)\n self.assertEqual(htype.get_cset(), h5t.CSET_ASCII)\n self.assertEqual(htype.get_strpad(), h5t.STR_NULLTERM)", "def format_set(set, isWide=False):\n if isWide:\n list = [s for s in set]\n ret = \";\".join(list)\n return ret\n else:\n if len(set) == 0:\n return \"\"\n elif len(set) == 1:\n return \"%s\" % iter(set).next()\n else:\n return \"%s...\" % iter(set).next()", "def _set_lb(o, d):\n o.setlb(d)", "def laceStrings(s1, s2):\n # \n s3 = '' # new interlaced string\n i = 0 \n for letter in s1:\n s3 += letter\n if i < len(s2):\n s3 += s2[i]\n i+= 1\n while i < len(s2):\n s3 += s2[i]\n i+= 1 \n return s3\n # end of code " ]
[ "0.5684025", "0.52701616", "0.5119802", "0.5114327", "0.50312454", "0.5026097", "0.48728633", "0.48196828", "0.47808087", "0.46736914", "0.46631265", "0.46533147", "0.46418238", "0.4633189", "0.46091893", "0.457231", "0.45466822", "0.45454702", "0.45427588", "0.45427588", "0.45113173", "0.45078713", "0.45042405", "0.44874698", "0.44544134", "0.44503942", "0.44496775", "0.4431152", "0.44299313", "0.44286713" ]
0.6876957
0
Returns a string in which a specified substring has been replaced with another substring a specified number of times The return value of the Replace function is a string, with substitutions made, that begins at the position specified by start and and concludes at the end of the expression string. It is not a copy of the original string from start to finish.
def Replace(expression, find, replace, start=1, count=-1): if find: return expression[:start - 1] + expression[start - 1:].replace(find, replace, count) else: return expression
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def REPLACE(old_text, start_num, num_chars, new_text):\n if start_num < 1:\n raise ValueError(\"start_num invalid\")\n return old_text[:start_num - 1] + new_text + old_text[start_num - 1 + num_chars:]", "def replace_in_string(string, length, substring, idx):\n return string[:idx]+substring+string[idx+length:]", "def sub(self, replace, string, count=0):\n return self.re.sub(replace, string, count)", "def bytes_replace(byte_str, start_idx, stop_idx, replacement):\n return byte_str[:start_idx] + replacement + byte_str[stop_idx:]", "def _CreateReplacement(content_string, old_substring, new_substring):\n b_content_string = content_string.encode(\"utf-8\")\n b_old_string = old_substring.encode(\"utf-8\")\n b_new_string = new_substring.encode(\"utf-8\")\n offset = b_content_string.find(b_old_string)\n return apply_edits.Edit('r', offset, len(b_old_string), b_new_string)", "def _string_subst_partial(self, val):\n def repl(m):\n k = m.group('id')\n replacement = self.bib_database.strings[k.lower()] if k.lower() in self.bib_database.strings else k\n pre = '\"' if m.group('pre') != '\"' else ''\n post = '\"' if m.group('post') != '\"' else ''\n return pre + replacement + post\n\n logger.debug('Substitute string definitions inside larger expressions')\n if '#' not in val:\n return val\n\n # TODO?: Does not match two subsequent variables or strings, such as \"start\" # foo # bar # \"end\" or \"start\" # \"end\".\n # TODO: Does not support braces instead of quotes, e.g.: {start} # foo # {bar}\n # TODO: Does not support strings like: \"te#s#t\"\n return self.replace_all_re.sub(repl, val)", "def replace_instance(s, to_replace, replacement):\n index=0\n matchedIndeces=0\n while index<len(s):\n if s[index]==to_replace[matchedIndeces]:\n matchedIndeces+=1\n if matchedIndeces>=len(to_replace):\n s=s[:index-(matchedIndeces-1)]+replacement+s[index+1:]\n index-=matchedIndeces-1\n matchedIndeces=0\n else:\n matchedIndeces=0\n index+=1\n return s", "def replacements(input_str, query, replace=\"\", num=0):\n check_parentheses = re.findall(\"\\([^()]*\\)\", query)\n check_replacement = re.findall(r\"\\\\[0-9]+\", replace)\n check_replacement = sorted([int(match[1:]) for match in check_replacement])\n if check_replacement and check_replacement[-1] > len(check_parentheses):\n raise AttributeError(\"There are more replacement match values specified than query parenthesized groups\")\n\n if num < 0:\n if check_replacement:\n for indx in sorted(range(check_replacement[-1]), reverse=True):\n indx += 1\n replace = re.sub(r\"\\\\%s\" % indx, r\"\\\\%s\" % (indx + 1), replace)\n right_replace = \"\\\\%s\" % (len(check_replacement) + 2)\n else:\n right_replace = \"\\\\2\"\n leftmost = str(input_str)\n new_str = str(input_str)\n rightmost = \"\"\n hash_to_split_on = \"UPNFSZ7FQ6RBhfFzwt0Cku4Yr1n2VvwVUG7x97G7\"\n for _ in range(abs(num)):\n if leftmost == \"\":\n break\n new_str = re.sub(r\"(.*)%s(.*)\" % query,\n r\"\\1%s%s%s\" % (hash_to_split_on, replace, right_replace), leftmost, 1)\n new_str = new_str.split(hash_to_split_on)\n if len(new_str) == 2:\n leftmost = new_str[0]\n rightmost = new_str[1] + rightmost\n new_str = leftmost + rightmost\n else:\n new_str = leftmost + rightmost\n break\n else:\n new_str = re.sub(query, replace, input_str, num)\n\n return new_str", "def multi_replace(string, substitutions):\n substrings = sorted(substitutions, key=len, reverse=True)\n regex = re.compile('|'.join(map(re.escape, substrings)))\n return regex.sub(lambda match: substitutions[match.group(0)], string)", "def multireplace(string, replacements):\n # Place longer ones first to keep shorter substrings from matching where the longer ones should take place\n # For instance given the replacements {'ab': 'AB', 'abc': 'ABC'} against the string 'hey abc', it should produce\n # 'hey ABC' and not 'hey ABc'\n substrs = sorted(replacements, key=len, reverse=True)\n\n # Create a big OR regex that matches any of the substrings to replace\n regexp = re.compile('|'.join(map(re.escape, substrs)))\n\n # For each match, look up the new string in the replacements\n return regexp.sub(lambda match: replacements[match.group(0)], string)", "def replace(text, search, repl, *indexes):\n search_len = len(search)\n len_diff = search_len - len(repl)\n while True:\n try:\n idx = text.index(search)\n except ValueError:\n return (text, *indexes)\n\n text = text[:idx] + repl + text[idx+search_len:]\n indexes = [i if i <= idx else i-len_diff for i in indexes]", "def rreplace(string, old, new, count):\n \n li = string.rsplit(old, count)\n return new.join(li)", "def rreplace(s, old, new, occurrence):\r\n li = s.rsplit(old, occurrence)\r\n return new.join(li)", "def rreplace(string, old, new, count):\n return string[::-1].replace(old[::-1], new[::-1], count)[::-1]", "def regex_replace(s, old, new, count=0):\n\n return re.sub(old, new, s, count=count)", "def multi_replace(string, replacements):\n # Place longer ones first to keep shorter substrings from matching where the longer ones should take place\n # For instance given the replacements {'ab': 'AB', 'abc': 'ABC'} against the string 'hey abc', it should produce\n # 'hey ABC' and not 'hey ABc'\n substrs = sorted(replacements, key=len, reverse=True)\n\n # Create a big OR regex that matches any of the substrings to replace\n regexp = re.compile('|'.join(map(re.escape, substrs)))\n\n # For each match, look up the new string in the replacements\n return regexp.sub(lambda match: replacements[match.group(0)], string)", "def SUBSTITUTE(text, old_text, new_text, instance_num=None):\n if not old_text:\n return text\n\n if not isinstance(new_text, basestring):\n new_text = str(new_text)\n\n if instance_num is None:\n return text.replace(old_text, new_text)\n\n if instance_num <= 0:\n raise ValueError(\"instance_num invalid\")\n\n # No trivial way to replace nth occurrence.\n i = -1\n for c in xrange(instance_num):\n i = text.find(old_text, i + 1)\n if i < 0:\n return text\n return text[:i] + new_text + text[i + len(old_text):]", "def replace(self):\n\n if self.replace_with is not None: # pylint: disable=no-member\n return substrings(\n self.regex,\n self.replace_with, # pylint: disable=no-member\n self.data,\n self.occurences, # pylint: disable=no-member\n )\n return self.data", "def replace_substrings(s, mapping):\n for (s1, repl) in mapping:\n s = s.replace(s1, repl)\n return s", "def _MultiReplace(data, repl):\n res = []\n prev = 0\n for (lo, hi, s) in sorted(repl):\n if prev < lo:\n res.append(data[prev:lo])\n res.append(s)\n prev = hi\n res.append(data[prev:])\n return ''.join(res)", "def test_simple_substitutions(self):\n m = strutils.MultiReplace({r'cat': 'kedi', r'purple': 'mor', })\n self.assertEqual(m.sub('The cat is purple'), 'The kedi is mor')", "def replace_and_adjust(\n input_text: str, match: Any, prefix_len: int,\n inverted_mapping: np.ndarray) -> Tuple[str, np.ndarray]:\n\n original_span_start = match.start() + prefix_len + 1\n original_span_end = match.end() - 1\n actual_string = input_text[original_span_start:original_span_end]\n new_text = input_text[:match.start()] + actual_string + input_text[match\n .end():]\n\n # Inverted mapping maps from remaining tokens to positions in original text\n new_inverted_mapping = np.zeros(len(new_text), dtype=np.int32)\n new_inverted_mapping[:match.start()] = inverted_mapping[:match.start()]\n\n new_span_start = match.start()\n new_span_end = match.start() + len(actual_string)\n new_inverted_mapping[new_span_start:new_span_end] = inverted_mapping[\n original_span_start:original_span_end]\n new_inverted_mapping[new_span_end:] = inverted_mapping[original_span_end +\n 1:]\n\n return new_text, new_inverted_mapping", "def test_substitutions_in_word(self):\n m = strutils.MultiReplace({r'cat': 'kedi', r'purple': 'mor', })\n self.assertEqual(m.sub('Thecatispurple'), 'Thekediismor')", "def replace_in_str(rstring, repres):\n for k in sorted(repres.keys(), key=len, reverse=True):\n rstring = rstring.replace(k, repres[k])\n return rstring", "def replace_nth(sentence, word, new_word, n):\n find = sentence.find(word)\n # If find is not -1 we have found at least one match for the substring\n i = find != -1\n # loop util we find the nth or we find no match\n while find != -1 and i != n:\n # find + 1 means we start searching from after the last match\n find = sentence.find(word, find + 1)\n i += 1\n # If i is equal to n we found nth match so replace\n if i == n:\n return sentence[:find] + new_word + sentence[find+len(word):]\n return sentence", "def replace(string, replacements):\n # get all occurrences of the place holder key\n parts = string.split(place_holder_key)\n # only one part -> no place holder key found -> return the whole string\n if len(parts) == 1:\n return string\n\n keys = [part[:1] for part in parts[1:]]\n\n retval = parts[0]\n for i in range(0, len(keys)):\n # replace the place holder by the desired string and add the remaining of the command\n retval += str(replacements[keys[i]]) + str(parts[i+1][1:])\n\n return retval", "def str_replace(pat, rep, subject):\n return subject.replace(pat, rep)", "def expand_repeat(string, size):\n return_string = ''\n i = 0\n while len(return_string) < size:\n return_string += string[i]\n i += 1\n if i >= len(string):\n i = 0\n return return_string", "def laceStrings(s1, s2):\n # \n s3 = '' # new interlaced string\n i = 0 \n for letter in s1:\n s3 += letter\n if i < len(s2):\n s3 += s2[i]\n i+= 1\n while i < len(s2):\n s3 += s2[i]\n i+= 1 \n return s3\n # end of code ", "def prefix_replace(original, old, new):\n ..." ]
[ "0.69472235", "0.6930251", "0.66695154", "0.64962995", "0.63499135", "0.6083488", "0.6069293", "0.6031655", "0.6022877", "0.598244", "0.5966292", "0.5940701", "0.5935663", "0.5875606", "0.58752525", "0.58342385", "0.57794666", "0.57153463", "0.5702825", "0.56442857", "0.561269", "0.56062704", "0.5506044", "0.5496952", "0.5477836", "0.54694235", "0.5432323", "0.53693026", "0.5328188", "0.53181016" ]
0.7357595
0
Return the right most characters in the text
def Right(text, number): return text[-number:]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def characters_left(self):\r\n return self.max_chars - len(self.variable.get())", "def findAlphabeticallyLastWord(text):\n return max(text.split(' '))", "def get_max_character(strings):\n m=0\n for string in strings:\n for char in string:\n if char>m:\n m=char\n return m", "def longest_word_len(text):\r\n return np.max(np.array([len(word) for word in tokenization(text)]))", "def computeMaxWordLength(text):\n # BEGIN_YOUR_CODE (our solution is 1 line of code, but don't worry if you deviate from this)\n return(max(sorted(text.split(), reverse = True), key = len))\n # END_YOUR_CODE", "def longest_word_length(words):", "def Left(text, number):\n return text[:number]", "def checkio_best(text):\n text = text.lower()\n # text.count为函数,返回指定char的数量\n return max(string.ascii_lowercase, key=text.count)", "def mostwantedletter(text):\n temp = Counter(text.lower())\n alpha_keys = {k:temp[k] for k,v in temp.items() if k.isalpha()}\n max_keys = [k for k,v in alpha_keys.items() if alpha_keys[k] == max(alpha_keys.values())]\n if len(max_keys) == 1:\n return max_keys[0]\n else:\n return min(max_keys)", "def find_longest_word(s):\n return sorted(map(lambda si: (si, len(si)), s.split()), key=lambda item: item[1], reverse=True)[0][0]", "def computeMaxWordLength(text):\n return max(text.split(), key=getWordKey) # usually key argument is a function defined by 'def' or 'lambda'", "def findAlphabeticallyLastWord(text):\n # BEGIN_YOUR_CODE (our solution is 1 line of code, but don't worry if you deviate from this)\n return sorted(text.split())[-1]\n # END_YOUR_CODE", "def lengthOfLastWord(self, s):\n l = len(s)\n c = 0\n i = 1\n while l - i > -1:\n if s[-i] != ' ':\n c += 1\n elif c != 0:\n break\n\n i += 1\n return c", "def longest_ORF(dna):\n both_strings=find_all_ORFs_both_strands(dna)\n L=max(both_strings,key=len)\n Q=len(L)\n return Q\n\n #save out put of find all orfboth string to some variable", "def Right(n=1):\n return ESC + str(n) + 'C'", "def longest_ORF(dna):\n orfs = find_all_ORFs_both_strands(dna)\n maxorf =orfs[1];\n for s in orfs:\n if len(s)>len(maxorf):\n maxorf=s\n return maxorf", "def get_y_chars(self):\n _max = max([s.max() for s in self])\n return len(str(int(_max)))", "def last(word):\n\treturn word[-1]", "def RIGHT(string, num_chars=1):\n if num_chars < 0:\n raise ValueError(\"num_chars invalid\")\n return string[-num_chars:]", "def longest_ORF(dna):\n l = find_all_ORFs_both_strands(dna)\n max_len = 0\n r = \"\"\n # [???] what if there are ORFs with same length?\n # this function just get the first one.\n if len(l) == 0:\n return \"\"\n for o in l:\n if len(o) > max_len:\n r = o\n max_len = len(o)\n return r", "def lengthof_lastword(s):\n a = s.split()\n if a:\n return len(a[len(a)-1])\n return 0", "def last_token(self, text):\n if text is not None:\n text = text.strip()\n if len(text) > 0:\n word = self.safe_split(text)[-1]\n word = word.strip()\n return word\n return ''", "def keyword_length(text):\n text = scrub_string(text)\n a = [fabs(IC(text, ncol) - ENGLISH_IC) for ncol in range(1, MAX_LEN)]\n return a.index(min(a)) + 1", "def longest_ORF(dna):\n l = find_all_ORFs_both_strands(dna)\n longest=''\n if len(l)>=1:\n\t longest =max(l,key=len)\n return longest", "def get_most_common_non_ascii_char(file_path: str) -> str:\n with open(file_path, encoding=\"unicode-escape\") as f:\n text = f.read()\n chars = set(text) - set(\n string.punctuation + string.ascii_letters + string.digits + \"\\n \"\n )\n char_dict = {char: text.count(char) for char in chars}\n return sorted(char_dict.items(), key=lambda x: x[1])[-1][0]", "def last_chars(fh):\n output = ''\n with open(fh.name) as fh:\n for line in fh:\n output += line[-2]\n return output", "def last(word):\n return word[-1]", "def get_rarest_char(file_path: str) -> str:\n with open(file_path, encoding=\"unicode-escape\") as f:\n text = f.read()\n char_dict = {char: text.count(char) for char in set(text)}\n return sorted(char_dict.items(), key=lambda x: x[1])[0][0]", "def longest_ORF(dna):\n ORFs = find_all_ORFs_both_strands(dna)\n longest = ''\n for ORF in ORFs:\n if len(ORF) > len(longest):\n longest = ORF\n return longest", "def max_chars(self):\n return self.range_field[0] * self.range_field[1]" ]
[ "0.66617775", "0.6458333", "0.62414336", "0.61733127", "0.6130016", "0.6114353", "0.611083", "0.60179204", "0.59715396", "0.59695387", "0.59517705", "0.59111524", "0.59110934", "0.5877624", "0.58610624", "0.5852691", "0.58513427", "0.5844956", "0.58354264", "0.583465", "0.5832706", "0.58310366", "0.5825959", "0.57992584", "0.57957834", "0.5761962", "0.57616997", "0.57479453", "0.57318795", "0.56815475" ]
0.67059237
0
Do a VB RSet Right aligns a string within a string variable. RSet stringvar = string If stringvar is longer than string, RSet replaces any leftover characters in stringvar with spaces, back to its beginning.
def RSet(var, value): return " " * (len(var) - len(value)) + value[:len(var)]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def LSet(var, value):\n return value[:len(var)] + \" \" * (len(var) - len(value))", "def rstring(string):\n return RTEXT + string + NTEXT", "def _padright(width, s):\n fmt = \"{0:<%ds}\" % width\n return fmt.format(s)", "def rightpad(field, length):\r\n field = str(field)\r\n field_length = len(field)\r\n if field_length>length:\r\n field = field[:length]\r\n if field_length<length:\r\n while len(field)<length:\r\n field+=' '\r\n return field.upper()", "def setstring(self):\n self._str = 's '+' '.join([self.src, self.start, self.size,\n self.strand, self.srcSize, self.text])+'\\n'", "def _rzfill(string, to_len):\n if len(string) > to_len:\n raise ValueError(\"string is already longer than to_len\")\n return string + '0' * (to_len - len(string))", "def replace_in_str(rstring, repres):\n for k in sorted(repres.keys(), key=len, reverse=True):\n rstring = rstring.replace(k, repres[k])\n return rstring", "def set_string(string, hash):\r\n # Pad out string with 3 nulls\r\n string = string + ([NULL_STRING] * 3)\r\n\r\n # If the string now longer than STRING_LENGTH, cut it shorter\r\n if len(string) > STRING_LENGTH:\r\n string = string[:STRING_LENGTH]\r\n\r\n # If the string is still too short, pad out with the hash\r\n if len(string) < STRING_LENGTH:\r\n string = string + hash[len(string) : STRING_LENGTH]\r\n\r\n return string", "def r_pad(arg, length):\n if length <= len(arg):\n return arg\n else:\n return arg + \" \" * (length - len(arg))", "def rjustText( text = \"\", fillchar= \" \", fieldwidth = 78 ):\n ansistring_text = stringExtends.ansiStringClass( \"\" )\n if isinstance( text, ( str, unicode ) ):\n ansistring_text.Text = text\n\n ansistring_fillchar = stringExtends.ansiStringClass( \" \" )\n if isinstance( fillchar, ( str, unicode ) ):\n ansistring_fillchar.Text = fillchar\n\n return_fieldwidth = 78\n if isinstance( fieldwidth, ( int, float ) ):\n return_fieldwidth = int( fieldwidth )\n\n r = stringExtends.ansiStringClass( \"\" )\n if ansistring_text.rawTextLen() < return_fieldwidth:\n # need to do a little math ro figure out padding length, and apply padding\n padding_length = int( math.floor( ( return_fieldwidth - ansistring_text.rawTextLen() ) / ansistring_fillchar.rawTextLen() ) )\n r.Text = ( ansistring_fillchar.ansiTextFormat() * padding_length )\n if ( ansistring_text.rawTextLen() + r.rawTextLen() ) < return_fieldwidth:\n r.Text += ansistring_fillchar.ansiSlice( 0, ( return_fieldwidth - ( r.rawTextLen( ) + ansistring_text.rawTextLen( ) ) ) )\n r.Text += ansistring_text.ansiTextFormat()\n else:\n # we have to slice into the original text since it's longer than the fieldwidth\n r.Text = ansistring_text.ansiSlice( 0, return_fieldwidth )\n\n return r.Text", "def ljust(value, length):\n\n if value is None:\n value = ''\n else:\n value = str(value)\n value = value.ljust(length, ' ')\n return value", "def set_raw_string(self, string, length):\n if len(string) != length:\n raise ValueError('Length of passed string does not match length')\n self.originstring = string\n self.stringlength = length", "def _padboth(width, s):\n fmt = \"{0:^%ds}\" % width\n return fmt.format(s)", "def command_rset(self, arg):\n if arg:\n raise errors.BadArguments('RSET')\n self.reset_arguments()\n self.write_ok()", "def rjust(self, width, fillchar, _difference):\n return self._filler(fillchar, _difference) + self", "def r(self, s):\n\t\tif self.m() > 0:\n\t\t\tself.setto(s)", "def align_str(kv_str, block):\n # Align string to the next block boundary. The -1 is to accommodate\n # a newline at the end of the string.\n aligned_len = int((len(kv_str) + block - 1) / block) * block - 1\n return '{:<{width}}\\n'.format(kv_str, width=aligned_len)", "def rjust(value, length):\n\n if value is None or value == '':\n value = '0'\n else:\n value = str(value)\n value = value.rjust(length, '0')\n return value", "def setto(self, s):\n\t\tlength = len(s)\n\t\tself.b = self.b[:self.j+1] + s + self.b[self.j+length+1:]\n\t\tself.k = self.j + length", "def rjust(a, width, fillchar=' '):\n a_arr = numpy.asarray(a)\n width_arr = numpy.asarray(width)\n size = int(numpy.max(width_arr.flat))\n if numpy.issubdtype(a_arr.dtype, numpy.bytes_):\n fillchar = asbytes(fillchar)\n return _vec_string(\n a_arr, type(a_arr.dtype)(size), 'rjust', (width_arr, fillchar))", "def pad_string(self, string):\n return string.ljust(self.blockSize, self.PAD_CHAR)", "def _fixed_width_str(self, x, fill=' '):\n x_str = str(x)\n l = len(x_str)\n pad = self.width - l\n if pad < 0:\n raise Exception(\"Your string is too long!\")\n return fill * pad + x_str", "def ljustText( text = \"\", fillchar= \" \", fieldwidth = 78 ):\n ansistring_text = stringExtends.ansiStringClass( \"\" )\n if isinstance( text, ( str, unicode ) ):\n ansistring_text.Text = text\n\n ansistring_fillchar = stringExtends.ansiStringClass( \" \" )\n if isinstance( fillchar, ( str, unicode ) ):\n ansistring_fillchar.Text = fillchar\n\n return_fieldwidth = 78\n if isinstance( fieldwidth, ( int, float ) ):\n return_fieldwidth = int( fieldwidth )\n\n r = stringExtends.ansiStringClass( \"\" )\n if ansistring_text.rawTextLen() < return_fieldwidth:\n # need to do a little math ro figure out padding length, and apply padding\n padding_length = int( math.floor( ( return_fieldwidth - ansistring_text.rawTextLen() ) / ansistring_fillchar.rawTextLen() ) )\n r.Text = ansistring_text.ansiTextFormat() + ( ansistring_fillchar.ansiTextFormat() * padding_length )\n if r.rawTextLen() < return_fieldwidth:\n r.Text += ansistring_fillchar.ansiSlice( 0, ( return_fieldwidth - r.rawTextLen() ) )\n else:\n # we have to slice into the original text since it's longer than the fieldwidth\n r.Text = ansistring_text.ansiSlice( 0, return_fieldwidth )\n\n return r.Text", "def update(self):\n self.entire_string = self.lines[-1]\n self.str_ = self.REX.search(self.entire_string)['curstr']\n self.str_len = len(self.str_)\n self.whitespace_len = len(self.entire_string) - len(self.str_)", "def wrap(string, left=\"[\", right=\"]\"):\n if string:\n return left+string+right\n return \"\"", "def aligned_text(string: str, length: int, alignment: AlignmentType = AlignmentType.CENTER):\n diff = length - len(string)\n\n if diff < 0:\n raise AssertionError(\"Length of the string cannot be greater than the maximum length for alignment\")\n\n if alignment == AlignmentType.LEFT:\n return string + ' ' * diff\n elif alignment == AlignmentType.CENTER:\n bit = ' ' * (diff // 2)\n return bit + string + bit + (' ' if diff % 2 == 1 else '')\n elif alignment == AlignmentType.RIGHT:\n return ' ' * diff + string", "def align_strings(strings, header=''):\n spaces = [len(l)-len(l.lstrip()) for l in strings.data if l]\n min_spaces = min(spaces) if spaces else 0\n if min_spaces > 0 or header:\n for index in range(len(strings.data)):\n strings.data[index] = header + strings.data[index][min_spaces:]", "def strval(space, w_obj):\n return space.wrap(w_obj.str(space, quiet=False))", "def fill_with_spaces(line: string, width: int) -> string:\n size = len(line)\n spaces_left = width - size\n return line + (' ' * spaces_left)", "def pad_str_left(string, length: int, add: str) -> str:\n out_string = string\n while len(out_string) < length:\n out_string = add + out_string\n return out_string" ]
[ "0.66794926", "0.5602928", "0.5465059", "0.52692467", "0.52135104", "0.5187947", "0.5178085", "0.51736885", "0.51162773", "0.50952494", "0.50288147", "0.5014927", "0.5000405", "0.4966606", "0.49568656", "0.49412823", "0.4936924", "0.49336472", "0.49120805", "0.4890213", "0.4877539", "0.48762622", "0.48677623", "0.4850956", "0.48171985", "0.4774107", "0.47655937", "0.4760873", "0.4756103", "0.4739065" ]
0.7263928
0
Delete a setting in the central setting file
def DeleteSetting(appname, section, key): settings = _OptionsDB(appname) settings.delete(section, key)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def delsetting(name):\r\n if '__delattr__' in settings.__class__.__dict__:\r\n delattr(settings, name)\r\n else:\r\n delattr(settings._wrapped, name)", "def clearSetting(self, name: unicode) -> None:\n ...", "def remove_setting(self, category, setting):\n category_instance = self.get_setting_category(category)\n if not category_instance:\n return\n if not setting in category_instance:\n return\n del category_instance[setting]\n settings = self.get_collection('_settings')\n\n if len(category_instance.keys()) == 1:\n settings.remove(category_instance['_id'])\n else:\n settings.update(category_instance)", "def remove_temporary_settings():\n if os.path.exists(\"settings.json\"):\n os.remove(\"settings.json\")", "def _remove_settings_file(self, server_id):\n\t\tsilent_remove(self.SettingsFolder + '{}.yml'.format(server_id))", "def delete(self, section, name):\n section = self._getSettingName(section)\n self._config.remove_option(section, name)\n self.save()", "def clear_settings(site_name): # untested - do I need/want this?\n return update_settings(site_name, {})", "def delKey(self, key ):\n if key in self.conf:\n del self.conf[key]", "def remove(key: str):\n global PREFERENCES\n\n if PREFERENCES.get(key):\n del PREFERENCES[key]\n\n write_config(PREFERENCES)", "def del_conf(self, path):\n\t\tself.monitor.removePath(path)\n\t\tself.cache.pop(path, None)", "def remove_console_setting(db, linenum):\n config_db = db.cfgdb\n\n table = \"CONSOLE_PORT\"\n\n data = config_db.get_entry(table, linenum)\n if data:\n config_db.mod_entry(table, linenum, None)\n else:\n ctx = click.get_current_context()\n ctx.fail(\"Trying to delete console port setting, which is not present.\")", "def handle_remove_setting(event):\n forex_type, currency_type, price_type = None, None, None\n tokens = event.message.text.split(\" \")\n if len(tokens) >= 4:\n forex_type = ForexType.get_type(tokens[1])\n currency_type = CurrencyType.get_type(tokens[2])\n price_type = PriceType.get_type(tokens[3])\n\n if forex_type is None or currency_type is None or price_type is None:\n line_bot.replyMessage(event.reply_token, \"設定格式錯誤\\n範例: '取消 買入 美元 低於'\")\n elif forex_notifier.removeNotify(event.source.user_id, currency_type, forex_type, price_type):\n line_bot.replyMessage(event.reply_token, \"成功設定-不通知\")\n else:\n line_bot.replyMessage(event.reply_token, \"設定失敗\")", "def remove_config(name):\n db = dbm.open(config_file, 'c')\n del db[name]\n db.close()", "def clean():\n Log.d(DEBUG_TAG, \"Delete config file...\")\n try:\n os.remove(CONFIG_FILE)\n except os.error as e:\n Log.e(DEBUG_TAG, \"Delete config file%s error, reason:%s\"%(CONFIG_FILE, e))", "def delete_account_key(configuration):\n os.remove(configuration.cm_key)", "def reset_settings():\n settings = Settings()\n settings.reset()\n settings.save()", "def test_del_property():\n\n contents = (\"[Info]\\n\"\n \"sdk = 23\")\n\n testutils.deploy_config_raw(contents)\n\n prop.del_prop('info', 'sdk')\n\n testutils.undeploy()\n\n return 0", "def settings_care(self, label): \n if label == 'del': \n try:\n os.remove(\"../logs/MaintainanceLog.xls\")\n except:\n print \"No file\"\n else:\n pass\n\n return", "def Run(self, args):\n p = parent.GetParent(args)\n return settings.Delete(name=('%s/accessApprovalSettings' % p))", "def delete(self):\r\n return self.connection.delete_launch_configuration(self.name)", "def _delete_option(key: str) -> None:\n try:\n del _config_options_template[key]\n del cast(Dict[str, ConfigOption], _config_options)[key]\n except Exception:\n # We don't care if the option already doesn't exist.\n pass", "def pre_global_system_config_delete(self, resource_id):\n pass", "def delete(configsetname):\n cnfset = configsetPath(configsetname)\n files = os.listdir(cnfset)\n for f in files: os.remove(os.path.join(cnfset, f))\n os.rmdir(cnfset)\n return None", "def unset(self, id, key):\n try:\n id_settings = self.id_dict[id]\n except KeyError:\n return\n del id_settings[key]", "def clearAllSettings(self) -> None:\n ...", "def remove_list_setting(self, category, setting, value):\n category_instance = self.get_setting_category(category)\n\n # To remove the value from the setting, the setting must exist\n if not category_instance:\n return\n if not setting in category_instance:\n return\n\n # Now lets try to remove the named setting\n try:\n category_instance[setting].remove(value)\n except ValueError:\n # It was not in the list.\n return\n\n settings = self.get_collection('_settings')\n settings.save(category_instance)\n return", "def remove_prompt(name, delete_config):\n\n with open(DATABASE_FILE_PATH) as f:\n config = json.load(f)\n path = config[name]\n del config[name]\n\n with open(DATABASE_FILE_PATH, 'w') as f:\n json.dump(config, f)\n\n if delete_config:\n os.remove(path)", "async def bing_clearsettings(self, ctx):\n message = ctx.message\n await self.bot.say(\"Are you sure you want to delete all of the \" +\n \"Bing cog's settings?\\n(y/n)\")\n response = await self.bot.wait_for_message(author=message.author)\n if response.content.lower().strip() == \"y\":\n clearauth()\n return await self.bot.say(\"Settings successfully cleared. \" +\n \"You need to reset the API key \" +\n \"before using the Bing cog again.\")\n else:\n return await self.bot.say(\"Cancelled clear operation.\")", "def remove_user_configuration(self):\n shutil.rmtree(self.test.user_conf_dir())", "def delete_rule(self, value):\n\n if value >= 0:\n if sublime.ok_cancel_dialog('Are you sure you want to delete the rule: \\'%s\\'?' % self.keys[value]):\n del self.regex_rules[self.keys[value]]\n sublime.load_settings('reg_replace_rules.sublime-settings').set('replacements', self.regex_rules)\n sublime.save_settings('reg_replace_rules.sublime-settings')" ]
[ "0.72362244", "0.7093934", "0.66168183", "0.6608237", "0.65604854", "0.64848113", "0.6457743", "0.63939166", "0.635336", "0.6336297", "0.6328854", "0.623674", "0.61954266", "0.6193954", "0.6153951", "0.612454", "0.604961", "0.60189205", "0.6010476", "0.5962726", "0.5918145", "0.58966357", "0.58932215", "0.58918273", "0.583471", "0.58235157", "0.581753", "0.5813701", "0.5768876", "0.5748755" ]
0.754308
0
Split a string using the delimiter If the optional limit is present then this defines the number of items returned. The compare is used for different string comparison types in VB, but this is not implemented at the moment
def Split(text, delimiter=" ", limit=-1, compare=None): if compare is not None: raise VB2PYNotSupported("Compare options for Split are not currently supported") # if limit == 0: return VBArray(0) elif limit > 0: return Array(*str(text).split(delimiter, limit - 1)) else: return Array(*str(text).split(delimiter))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def split(self, sep=None, maxsplit=None):\n return split(self, sep, maxsplit)", "def split(self, string, maxsplit=MAX_INT, include_separators=False):\n return self._split(\n string, maxsplit=maxsplit, include_separators=include_separators\n )", "def explode(delim, val, limit = None): \n if limit != None:\n return val.split(delim, limit)\n else:\n return val.split(delim)", "def __split_for_delimiter__(self, string):\n if not self.__delimiter__ == '':\n return string.split(self.__delimiter__)\n return string.split()", "def isplit(s, sep=None):\r\n sep, hardsep = r'\\s+' if sep is None else re.escape(sep), sep is not None\r\n exp, pos, l = re.compile(sep), 0, len(s)\r\n while True:\r\n m = exp.search(s, pos)\r\n if not m:\r\n if pos < l or hardsep:\r\n # ^ mimic \"split()\": ''.split() returns []\r\n yield s[pos:]\r\n break\r\n start = m.start()\r\n if pos < start or hardsep:\r\n # ^ mimic \"split()\": includes trailing empty string\r\n yield s[pos:start]\r\n pos = m.end()", "def split(a, sep=None, maxsplit=None):\n # This will return an array of lists of different sizes, so we\n # leave it as an object array\n return _vec_string(\n a, object_, 'split', [sep] + _clean_args(maxsplit))", "def split(\n string: str,\n splitters: Union[str, List[str]],\n count: Optional[int] = None,\n removeEmpty: int = 0,\n) -> List[str]:\n\n if count and count < 0:\n raise ValueError(\"Count cannot be less than zero\")\n\n if count == 0:\n return []\n\n if isinstance(splitters, str):\n if not removeEmpty:\n return string.split(splitters, count - 1 if count else -1)\n\n splitters = [splitters]\n\n splitters = [escape(x) for x in splitters] or [\" \"]\n\n i = 0\n splits: List[str] = []\n matches = re.finditer(\"|\".join(splitters), string)\n for m in matches:\n if count is not None and count <= 1:\n break\n\n split = string[i : m.start()]\n if split or not removeEmpty:\n splits.append(split)\n\n count = count - 1 if count is not None else count\n\n i = m.end()\n\n if (count is None or count and count > 0) and len(string) - i > -1:\n split = string[i:]\n if split or not removeEmpty:\n splits.append(split)\n\n return splits", "def test_string_ends_with_sep():\n assert my_splitter(\"aaa,bbb,\", \",\") == [\"aaa\", \"bbb\", \"\"]", "def test_splitDelimiters(self):\n r = irc.split(\"xx yyz\", 2)\n self.assertEqual([\"xx\", \"yy\", \"z\"], r)\n r = irc.split(\"xx\\nyyz\", 2)\n self.assertEqual([\"xx\", \"yy\", \"z\"], r)", "def partition(string, delimiter):\r\n sp = string.split(delimiter, 1)\r\n if len(sp) > 1:\r\n return sp[0], sp[1]\r\n else:\r\n return sp[0], \"\"", "def split_text(text: Union[str, List], max_size: int = 2000, delimiter: str = \"\\n\") -> List[str]:\n delim_length = len(delimiter)\n\n if isinstance(text, str):\n if len(text) < max_size:\n return [text]\n text = text.split(delimiter)\n else:\n if sum(len(i) for i in text) < max_size:\n return [\"\\n\".join(text)]\n\n output = []\n tmp_str = \"\"\n count = 0\n for fragment in text:\n fragment_length = len(fragment) + delim_length\n if fragment_length > max_size:\n raise ValueError(\"A single line exceeded the max length. Can not split!\") # TODO: Find a better way than throwing an error.\n if count + fragment_length > max_size:\n output.append(tmp_str)\n tmp_str = \"\"\n count = 0\n\n count += fragment_length\n tmp_str += f\"{fragment}{delimiter}\"\n\n output.append(tmp_str)\n\n return output", "def _split(string: str, n: int):\n return [string[start : start + n] for start in range(0, len(string), n)]", "def test_split_string(self):\n mytext = '2011 Senior PGA Championship presented by'\n string1, string2 = split_string(mytext, 25, 25)\n self.assertEqual(string1, '2011 Senior PGA')\n self.assertEqual(string2, 'Championship presented')", "def split(string, sep='\\t'):\n return text_type.split(string, sep)", "def split(value, delimiter):\n return value.split(delimiter)", "def test_split_string(self):\n self.assertEqual(('1-4', 14), split_string('1-4/14'))", "def split_escaped_delim (delimiter, string, count=0):\n assert len(delimiter) == 1\n\n split_expression = re.compile(r\"\"\"(?<!\\\\)%s\"\"\" % (delimiter))\n\n result = split_expression.split(string, count)\n\n return result", "def test_words_with_sep():\n assert my_splitter(\"bla,bla\", \",\") == [\"bla\", \"bla\"]", "def test_separators_only():\n assert my_splitter(\",ad,\", \"ad\") == [\",\", \",\"]", "def __string_splitter(self, arr, string, split_length):\n if len(string) < split_length:\n arr.append(string)\n return arr\n else:\n arr.append(string[:split_length])\n return self.__string_splitter(arr, string[split_length:], split_length)", "def test_two_chars_and_separator():\n assert my_splitter(\",J\", \",\") == [\"\", \"J\"]", "def my_splitter(to_split, separator=None):\n if separator is None:\n split_list_regex = re.compile(r'[^\\s]+')\n return split_list_regex.findall(to_split)\n\n split_list = []\n\n while separator in to_split:\n separators_location = to_split.find(separator, 0)\n separated_word = to_split[:separators_location]\n split_list.append(separated_word)\n to_split = to_split[separators_location + len(separator):]\n\n split_list.append(to_split)\n\n return split_list", "def rsplit(self, sep=None, maxsplit=None):\n return rsplit(self, sep, maxsplit)", "def split_str_into_len(s, l=2):\r\n return [s[i:i+l] for i in range(0, len(s), l)]", "def smart_split(x):\n return R_SPLIT_DELIM.split(x)", "def test_splitValidatesLength(self):\n self.assertRaises(ValueError, irc.split, \"foo\", 0)\n self.assertRaises(ValueError, irc.split, \"foo\", -1)", "def splitCount(self):\n return 0", "def split_string(self, string, n):\n if len(string) == 0:\n return ['']\n blocks = []\n while len(string) > 0:\n blocks.append(string[:n])\n string = string[n:]\n return blocks", "def rsplit(s, sep, maxsplits=0):\n L = s.split(sep)\n if not 0 < maxsplits <= len(L):\n return L\n return [sep.join(L[0:-maxsplits])] + L[-maxsplits:]", "def split( self, string ):\n splitted_string = []\n \n str_len = len( string )\n i = 0\n for j in range( str_len ):\n if string[j] in self.delimiters:\n if i != j:\n splitted_string.append( string[i:j] )\n i = j+1\n \n if i != j:\n splitted_string.append( string[i:j+1] )\n \n return splitted_string" ]
[ "0.6533399", "0.64230263", "0.6183412", "0.60216355", "0.5943199", "0.5683144", "0.5631569", "0.55919874", "0.5572784", "0.5486138", "0.5480019", "0.5479996", "0.54729277", "0.5463248", "0.54153234", "0.5413893", "0.5308845", "0.5301453", "0.5294461", "0.52909225", "0.528777", "0.5263682", "0.52502215", "0.5230965", "0.5200876", "0.5185365", "0.51816976", "0.51272196", "0.5126445", "0.51257473" ]
0.7360037
0
Choose from a list of expression each with its own condition The arguments are presented as a sequence of condition, expression pairs and the first condition that returns a true causes its expression to be returned. If no conditions are true then the function returns None
def Switch(*args): arg_list = list(args) arg_list.reverse() # while arg_list: cond, expr = arg_list.pop(), arg_list.pop() if cond: return expr return None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def find_if(cond,seq):\n for x in seq:\n if cond(x): return x\n return None", "def find_if(cond,seq):\n for x in seq:\n if cond(x): return x\n return None", "def or_list(conditionList):\n return functools.reduce(numpy.logical_or, conditionList)", "def select(condlist, choicelist, default=0):\n n = len(condlist)\n n2 = len(choicelist)\n if n2 != n:\n raise ValueError, \"list of cases must be same length as list of conditions\"\n choicelist = [default] + choicelist\n S = 0\n pfac = 1\n for k in range(1, n+1):\n S += k * pfac * asarray(condlist[k-1])\n if k < n:\n pfac *= (1-asarray(condlist[k-1]))\n # handle special case of a 1-element condition but\n # a multi-element choice\n if type(S) in ScalarType or max(asarray(S).shape)==1:\n pfac = asarray(1)\n for k in range(n2+1):\n pfac = pfac + asarray(choicelist[k])\n if type(S) in ScalarType:\n S = S*ones(asarray(pfac).shape, type(S))\n else:\n S = S*ones(asarray(pfac).shape, S.dtype)\n return choose(S, tuple(choicelist))", "def _return(*args):\r\n to_return = ()\r\n\r\n for arg in args:\r\n cond, value = arg\r\n if cond:\r\n to_return += (value,)\r\n\r\n if len(to_return) == 1:\r\n return to_return[0]\r\n return to_return", "def and_list(conditionList):\n return functools.reduce(numpy.logical_and, conditionList)", "def cond(conditions, value):\n for predicate, transformer in conditions:\n if predicate(value):\n return transformer(value)", "def select(condlist, choicelist, default=0): # NOQA\n\n if len(condlist) != len(choicelist):\n raise ValueError(\n 'list of cases must be same length as list of conditions')\n\n if len(condlist) == 0:\n raise ValueError(\"select with an empty condition list is not possible\")\n\n if not cupy.isscalar(default):\n raise TypeError(\"default only accepts scalar values\")\n\n for i in range(len(choicelist)):\n if not isinstance(choicelist[i], cupy.ndarray):\n raise TypeError(\"choicelist only accepts lists of cupy ndarrays\")\n cond = condlist[i]\n if cond.dtype.type is not cupy.bool_:\n raise ValueError(\n 'invalid entry {} in condlist: should be boolean ndarray'\n .format(i))\n\n dtype = cupy.result_type(*choicelist)\n\n condlist = cupy.broadcast_arrays(*condlist)\n choicelist = cupy.broadcast_arrays(*choicelist, default)\n\n if choicelist[0].ndim == 0:\n result_shape = condlist[0].shape\n else:\n result_shape = cupy.broadcast_arrays(condlist[0],\n choicelist[0])[0].shape\n\n result = cupy.empty(result_shape, dtype)\n cupy.copyto(result, default)\n\n choicelist = choicelist[-2::-1]\n condlist = condlist[::-1]\n for choice, cond in zip(choicelist, condlist):\n cupy.copyto(result, choice, where=cond)\n\n return result", "def first_true(cls, iterable, default=None, pred=None):\n # first_true([a,b,c], x) --> a or b or c or x\n # first_true([a,b], x, f) --> a if f(a) else b if f(b) else x\n return next(filter(pred, iterable), default)", "def any_of(*conditions):\n def check():\n for c in conditions:\n if c():\n return True\n return False\n return check", "def And(*conditions):\n def andPred(db):\n from functools import reduce\n return reduce(lambda result, c: c(result),\n conditions, db)\n\n return andPred", "def which(*args):\n\n if len(args) == 0:\n raise InvalidParameterError(\n \"conditions and cases\", \"nothing\", explanation=\"'which' statement without arguments\"\n )\n if len(args) % 2 == 1:\n return which(*args[:-1], True, args[-1])\n for i in range(0, len(args), 2):\n if args[i]:\n return args[i + 1]\n raise InvalidParameterError(\n \"at least one condition applies\",\n \"no condition applied\",\n explanation=\"'which' command fell through (no case applied)\",\n )", "def get_conditions(inputs): \n # itertools.product summarizes all combinations of ordered conditions\n # at len = 1 it wraps values in tuples (0,) that confuse the timer below\n if hasattr(inputs[0], '__iter__'):\n return list(product(*inputs))\n else:\n return [[n] if not isinstance(n,(list,tuple)) else n for n in inputs]", "def select_first(condition):\n return where(condition) | unless(StopIteration, next)", "def Or(*conditions):\n def orPred(db):\n from functools import reduce\n return reduce(lambda result, c: result.add(c(db)),\n conditions, Result())\n\n return orPred", "def select(condlist, choicelist, default=0):\n\n if not use_origin_backend():\n if not isinstance(condlist, list):\n pass\n elif not isinstance(choicelist, list):\n pass\n elif len(condlist) != len(choicelist):\n pass\n else:\n val = True\n size_ = condlist[0].size\n for i in range(len(condlist)):\n if condlist[i].size != size_ or choicelist[i].size != size_:\n val = False\n if not val:\n pass\n else:\n return dpnp_select(condlist, choicelist, default).get_pyobj()\n\n return call_origin(numpy.select, condlist, choicelist, default)", "def _first(self, \n iterable, \n condition=lambda x: True):\n try:\n return next(x for x in iterable if condition(x))\n except:\n return None", "def cond(\n scheduler: Scheduler,\n parent_job: Job,\n sexpr: SchedulerExpression,\n cond_expr: Any,\n then_expr: Any,\n *rest: Any,\n) -> Promise:\n exprs = (cond_expr, then_expr) + rest\n\n def then(args):\n i, cond_value = args\n\n if cond_value:\n # Return 'then' clause.\n return scheduler.evaluate(exprs[i + 1], parent_job=parent_job)\n\n elif len(exprs) - i == 3:\n # No more expresses, so return 'otherwise' clause.\n return scheduler.evaluate(exprs[i + 2], parent_job=parent_job)\n\n else:\n # Recurse to next conditional clause.\n return scheduler.evaluate((i + 2, exprs[i + 2]), parent_job=parent_job).then(then)\n\n # Evaluate conditional clause.\n return scheduler.evaluate((0, cond_expr), parent_job=parent_job).then(then)", "def piecewise(x, condlist, funclist, *args, **kw):\n x = asanyarray(x)\n n2 = len(funclist)\n if isscalar(condlist) or \\\n not (isinstance(condlist[0], list) or\n isinstance(condlist[0], ndarray)):\n condlist = [condlist]\n condlist = [asarray(c, dtype=bool) for c in condlist]\n n = len(condlist)\n if n == n2-1: # compute the \"otherwise\" condition.\n totlist = condlist[0]\n for k in range(1, n):\n totlist |= condlist[k]\n condlist.append(~totlist)\n n += 1\n if (n != n2):\n raise ValueError, \"function list and condition list \" \\\n \"must be the same\"\n\n zerod = False\n # This is a hack to work around problems with NumPy's\n # handling of 0-d arrays and boolean indexing with\n # numpy.bool_ scalars\n if x.ndim == 0:\n x = x[None]\n zerod = True\n newcondlist = []\n for k in range(n):\n if condlist[k].ndim == 0:\n condition = condlist[k][None]\n else:\n condition = condlist[k]\n newcondlist.append(condition)\n condlist = newcondlist\n\n y = zeros(x.shape, x.dtype)\n for k in range(n):\n item = funclist[k]\n if not callable(item):\n y[condlist[k]] = item\n else:\n y[condlist[k]] = item(x[condlist[k]], *args, **kw)\n return y", "def parse_conditions_to_expr(\n expr: Sequence[Any], entity: Entity, arrayjoin: Set[str]\n) -> Optional[Expression]:\n\n def and_builder(expressions: Sequence[Expression]) -> Optional[Expression]:\n if not expressions:\n return None\n return combine_and_conditions(expressions)\n\n def or_builder(expressions: Sequence[Expression]) -> Optional[Expression]:\n if not expressions:\n return None\n return combine_or_conditions(expressions)\n\n def preprocess_literal(op: str, literal: Any) -> Expression:\n \"\"\"\n Replaces lists with a function call to tuple.\n \"\"\"\n if isinstance(literal, (list, tuple)):\n if op not in [\"IN\", \"NOT IN\"]:\n raise ParsingException(\n (\n f\"Invalid operator {op} for literal {literal}. Literal is a sequence. \"\n \"Operator must be IN/NOT IN\"\n ),\n report=False,\n )\n literals = tuple([Literal(None, lit) for lit in literal])\n return FunctionCall(None, \"tuple\", literals)\n else:\n if op in [\"IN\", \"NOT IN\"]:\n raise ParsingException(\n (\n f\"Invalid operator {op} for literal {literal}. Literal is not a sequence. \"\n \"Operator cannot be IN/NOT IN\"\n ),\n report=False,\n )\n return Literal(None, literal)\n\n def unpack_array_condition_builder(\n lhs: Expression, op: str, literal: Any\n ) -> Expression:\n function_name = \"arrayExists\" if op in POSITIVE_OPERATORS else \"arrayAll\"\n\n # This is an expression like:\n # arrayExists(x -> assumeNotNull(notLike(x, rhs)), lhs)\n return FunctionCall(\n None,\n function_name,\n (\n Lambda(\n None,\n (\"x\",),\n FunctionCall(\n None,\n \"assumeNotNull\",\n (\n FunctionCall(\n None,\n OPERATOR_TO_FUNCTION[op],\n (Argument(None, \"x\"), preprocess_literal(op, literal)),\n ),\n ),\n ),\n ),\n lhs,\n ),\n )\n\n def simple_condition_builder(lhs: Expression, op: str, literal: Any) -> Expression:\n if op in UNARY_OPERATORS:\n if literal is not None:\n raise ParsingException(\n f\"Right hand side operand {literal} provided to unary operator {op}\",\n report=False,\n )\n return unary_condition(OPERATOR_TO_FUNCTION[op], lhs)\n\n else:\n if literal is None:\n raise ParsingException(\n f\"Missing right hand side operand for binary operator {op}\",\n report=False,\n )\n return binary_condition(\n OPERATOR_TO_FUNCTION[op], lhs, preprocess_literal(op, literal)\n )\n\n return parse_conditions(\n parse_expression,\n and_builder,\n or_builder,\n unpack_array_condition_builder,\n simple_condition_builder,\n entity,\n expr,\n arrayjoin,\n 0,\n )", "def all(*args):\n if not args:\n raise ValueError(\"Any must take at least 1 argument\")\n if len(args) == 1:\n return args[0]\n ret = _make.And(args[0], args[1])\n for i in range(2, len(args)):\n ret = _make.And(ret, args[i])\n return ret", "def all_of(*conditions):\n def check():\n for c in conditions:\n if not c():\n return False\n return True\n return check", "def stop_when_true(test_expr, result_expr, seq):\n result = None\n for e in seq:\n if test_expr(e):\n result = result_expr(e)\n break\n return result", "def any(*args):\n if not args:\n raise ValueError(\"Any must take at least 1 argument\")\n if len(args) == 1:\n return args[0]\n ret = _make.Or(args[0], args[1])\n for i in range(2, len(args)):\n ret = _make.Or(ret, args[i])\n return ret", "def _chooseAmongstExpansions(self, expansions):\n if expansions:\n return [expansions[0]]\n return expansions", "def FirstTrue(values, default=None):\n for value in values:\n if value:\n return value\n return default", "def evcond(be, a_list, d_list):\n if be.null():\n raise error.LispException(\"boolean expression cannot be NIL\")\n if not (eval_lisp(be.car().car(), a_list, d_list)).null():\n return eval_lisp(be.car().cdr().car(), a_list, d_list)\n return evcond(be.cdr(), a_list, d_list)", "def find(func, list_seq):\n for list_item in list_seq:\n if func(list_item):\n return list_item", "def any(self, *args, **kwargs):\n if len(args):\n func = args[0]\n args = args[1:]\n else:\n func = bool\n for x in self:\n if func(x, *args, **kwargs):\n return self\n return plist()", "def parse_conditions(\n operand_builder: Callable[[Any, ColumnSet, Set[str]], TExpression],\n and_builder: Callable[[Sequence[TExpression]], Optional[TExpression]],\n or_builder: Callable[[Sequence[TExpression]], Optional[TExpression]],\n unpack_array_condition_builder: Callable[[TExpression, str, Any], TExpression],\n simple_condition_builder: Callable[[TExpression, str, Any], TExpression],\n entity: Entity,\n conditions: Any,\n arrayjoin_cols: Set[str],\n depth: int = 0,\n) -> Optional[TExpression]:\n from snuba.clickhouse.columns import Array\n\n if not conditions:\n return None\n\n if depth == 0:\n parsed = [\n parse_conditions(\n operand_builder,\n and_builder,\n or_builder,\n unpack_array_condition_builder,\n simple_condition_builder,\n entity,\n cond,\n arrayjoin_cols,\n depth + 1,\n )\n for cond in conditions\n ]\n return and_builder([c for c in parsed if c])\n elif is_condition(conditions):\n try:\n lhs, op, lit = conditions\n except Exception as cause:\n raise ParsingException(f\"Cannot process condition {conditions}\") from cause\n\n # facilitate deduping IN conditions by sorting them.\n if op in (\"IN\", \"NOT IN\") and isinstance(lit, tuple):\n lit = tuple(sorted(lit))\n\n # If the LHS is a simple column name that refers to an array column\n # (and we are not arrayJoining on that column, which would make it\n # scalar again) and the RHS is a scalar value, we assume that the user\n # actually means to check if any (or all) items in the array match the\n # predicate, so we return an `any(x == value for x in array_column)`\n # type expression. We assume that operators looking for a specific value\n # (IN, =, LIKE) are looking for rows where any array value matches, and\n # exclusionary operators (NOT IN, NOT LIKE, !=) are looking for rows\n # where all elements match (eg. all NOT LIKE 'foo').\n columns = entity.get_data_model()\n if (\n isinstance(lhs, str)\n and lhs in columns\n and isinstance(columns[lhs].type, Array)\n and columns[lhs].base_name not in arrayjoin_cols\n and columns[lhs].flattened not in arrayjoin_cols\n and not isinstance(lit, (list, tuple))\n ):\n return unpack_array_condition_builder(\n operand_builder(lhs, entity.get_data_model(), arrayjoin_cols), op, lit,\n )\n else:\n return simple_condition_builder(\n operand_builder(lhs, entity.get_data_model(), arrayjoin_cols), op, lit,\n )\n\n elif depth == 1:\n sub_expression = (\n parse_conditions(\n operand_builder,\n and_builder,\n or_builder,\n unpack_array_condition_builder,\n simple_condition_builder,\n entity,\n cond,\n arrayjoin_cols,\n depth + 1,\n )\n for cond in conditions\n )\n return or_builder([s for s in sub_expression if s])\n else:\n raise InvalidConditionException(str(conditions))" ]
[ "0.6422477", "0.6422477", "0.6359925", "0.635987", "0.6204265", "0.6166474", "0.6049754", "0.5968415", "0.59649223", "0.59408706", "0.58751965", "0.5863526", "0.58623445", "0.5844146", "0.57734656", "0.5697268", "0.56084704", "0.5544484", "0.5540427", "0.55290014", "0.55009335", "0.54759437", "0.5472489", "0.5468551", "0.545176", "0.544756", "0.5440578", "0.5421477", "0.5387807", "0.5318024" ]
0.65245444
0
Return the value of a string This function finds the longest leftmost number in the string and returns it. If there are no valid numbers then it returns 0. The method chosen here is very poor we just keep trying to convert the string to a float and just use the last successful as we increase the size of the string. A Regular expression approach is probably quicker.
def Val(text): best = 0 for idx in range(len(text)): try: best = float(text[:idx + 1]) except ValueError: pass return best
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_last_number(s:str):\n array = re.findall(r'[0-9]+', s)\n if array.__len__() is 0:\n return -1\n return int(array[-1])", "def _get_number_from_string(x):\n try:\n return float(x)\n except ValueError:\n raise ValueError('Unknown element')", "def float(s):\n if s is None or s == \"\":\n f = float(-maxsize)\n else:\n f = float(s)\n\n return f", "def try_parse_number(s):\n if s.startswith(\"0\") and len(s) != 1 and not s.startswith(\"0.\"):\n return s\n # Try parsing a nmeric\n try:\n return int(s)\n except ValueError: # Try float or return s\n try:\n return float(s)\n except:\n return s", "def to_digit(s: str) -> Union[float, str, int]:\n out = s.strip()\n f_twin = r'\\d+[,.]\\d{2,} {0,}- {0,}\\d+[.,]\\d{2,}'\n f_rank = r'\\d/10'\n f_score = r'[ ]{0,}\\d+[ ]{0,}'\n f_date = r'\\d\\d\\.\\d\\d\\.\\d\\d\\d\\d'\n f_main = r'(-?\\d*\\,?\\d+\\.?\\d*)[%BM]?'\n\n if isinstance(s, str) and re.findall(f_date, s) == [] and len(s) < 50 and s != '-':\n try: # begin from big one, because bigs consist small re\n\n if re.search(f_main, s) is not None:\n res = re.search(f_main, s.strip()).groups()[0]\n if res == '-':\n return '-'\n k = 1\n mul = 1\n after_point = res.split('.')\n if len(after_point) == 2:\n k = 10 ** len(after_point[1].replace(',', ''))\n\n mul = 1000000000 if s.find('B') > 0 else mul # found Billions\n mul = 1000000 if s.find('M') > 0 else mul # found Millions\n mul = 0.01 if s.find('%') > 0 else mul # found Percent format\n mul = mul * -1 if s.find(')') > 0 else mul # financial format to show minus : -192.34 = (192.34)\n\n return round(float(res.replace('.', '').replace(',', '')), 2) * mul / k if k > 1 else \\\n int(res.replace('.', '').replace(',', '')) * mul\n\n if len(re.findall(f_twin, s)) > 0: # format range xxx.xx - xxx.xx\n return float(re.findall(f_twin, s)[0]\n .replace(' ', '')\n .split('-')[0]\n .replace(',', '')\n .replace('.', '')) / 100\n\n if len(re.findall(f_rank, s)) > 0: # format score like 9/10 -> 9\n return int(re.findall(f_rank, s)[0].split('/')[0])\n\n if len(re.findall(f_score, s)) > 0: # format one digit score like ' 5 ' -> 5\n return int(re.findall(f_score, s)[0].replace(' ', ''))\n\n except Exception as e:\n\n logging.error(f\"Error in to_digit(). Input {s}, Out \")\n return out", "def string_to_number(string):\n if not string:\n return 0\n try:\n return int(string)\n except ValueError:\n return float(string)", "def find_max_tidy_num(s_number):\n\n len_input = len(s_number) - 1\n\n if len_input == 0:\n return s_number\n\n for i in range(0, len_input):\n if int(s_number[i]) > int(s_number[i+1]):\n\n final_str = '9' * (len_input - i)\n s_number = s_number[:(i+1)]\n\n return ''.join([find_max_tidy_num(str(int(s_number)-1)), final_str])\n\n return s_number", "def getFloat(string):\n return (0.0)", "def ffloat(string):\n try:\n return float(string.strip())\n except:\n return 0", "def string_to_number(s):\n\n if type(s).__name__==\"str\":\n s = s.strip()\n if s ==\"-\":\n s = 0\n else:\n s = s.replace(\",\",\"\").replace(\"$\",\"\")\n if s.find(\"(\")>=0 and s.find(\")\")>=0:\n s = s.replace(\"(\",\"-\").replace(\")\",\"\")\n return float(s)", "def num(s: str):\n try: return int(s)\n except ValueError: return float(s)", "def extract_float(self, s: str) -> float:\n f = re.findall(r'([0-9]*[.]*[0-9]+)', s)\n return float(f[0]) if len(f) > 0 else None", "def parseFloat(s, ret=0.0):\n if not isinstance(s, str):\n return float(s)\n elif s:\n if s[0] in \"+-\":\n ts = s[1:]\n else:\n ts = s\n\n if ts and ts.count(\".\") <= 1 and all([_ in \".0123456789\" for _ in ts]):\n return float(s)\n\n return ret", "def _string_to_float(s):\n try:\n f = float(s)\n return f\n except ValueError:\n return None", "def _to_float(self, s: str) -> float:\n return int(s[:-1]) / 1e9 if s.endswith('n') else float(s[:-1])", "def strToDec(string):\n\tstring = string.lstrip(\"0\")\n\tif len(string) == 0:\n\t\treturn 0\n\telse:\n\t\treturn eval(string)", "def get_number(text):\n# if (isinstance(text, str) or isinstance(text, unicode)):\n if True:\n text.replace(\",\",\".\")\n text = re.sub(\"\\xa0\",\"\", text)\n rst = re.findall(\"[0-9]+\\.{0,1}[0-9]*\", text)\n if rst:\n rst = rst[0]\n else:\n rst = \"nan\"\n else:\n rst = text\n try:\n rst = float(rst)\n except:\n rst = float(\"nan\")\n return(rst)", "def _parseNumber(self, str):\r\n\t\tif (str.count(\".\") == 0):\r\n\t\t\treturn int(str)\r\n\t\tif (str.count(\".\") == 1):\r\n\t\t\treturn float(str)\r\n\t\treturn str", "def safe_float(str):\n if not str:\n return None\n try:\n return float(str)\n except ValueError:\n return 0", "def _afterpoint(string):\n if _isnumber(string) or _isnumber_with_thousands_separator(string):\n if _isint(string):\n return -1\n else:\n pos = string.rfind(\".\")\n pos = string.lower().rfind(\"e\") if pos < 0 else pos\n if pos >= 0:\n return len(string) - pos - 1\n else:\n return -1 # no point\n else:\n return -1 # not a number", "def get_number(x):\n\n return re.findall(r'\\d+', x)[0]", "def find_float(input: str) -> float:\n str_split = input.split('<@')\n if (len(str_split) == 0):\n raise AmountMissingException(\"amount_not_found\")\n input_text = str_split[0]\n regex = r'(?:^|\\s)(\\d*\\.?\\d+)(?=$|\\s)'\n matches = re.findall(regex, input_text, re.IGNORECASE)\n if len(matches) >= 1:\n return abs(float(matches[0].strip()))\n raise AmountMissingException(\"amount_not_found\")", "def _parseNumbers(s):\n ss = utils.unclump(s)\n\n m3 = re.match('^\\d+$', ss)\n if m3 is not None:\n return decimal.Decimal(round(float(ss), 2))\n\n m1 = re.match(r'(\\d+)\\s+(\\d)/(\\d)', ss)\n if m1 is not None:\n num = int(m1.group(1)) + (float(m1.group(2)) / float(m1.group(3)))\n return decimal.Decimal(str(round(num, 2)))\n\n m2 = re.match(r'^(\\d)/(\\d)$', ss)\n if m2 is not None:\n num = float(m2.group(1)) / float(m2.group(2))\n return decimal.Decimal(str(round(num, 2)))\n\n return None", "def get_value_from_string(text):\n if len(text.strip()) == 0:\n return None\n\n try:\n if '-' in text or '+' in text:\n tl = [ti for ti in text.split('-')]\n for i in range(1, len(tl)):\n tl[i] = '-' + tl[i]\n ntl = []\n for ti in tl:\n ntl = ntl + ti.split('+')\n ntl = [ti.replace(' ', '') for ti in ntl]\n values = [float(ti) for ti in ntl if len(ti) > 0]\n value = sum(values)\n else:\n value = float(text)\n return value\n\n except Exception:\n return None", "def str_to_num(s):\n\n method = {\n \"float\": string.atof,\n \"int\": string.atoi\n }\n\n if not type(s) is StringType:\n return 0\n\n if \".\" in s:\n return method[\"float\"](s)\n else:\n return method[\"int\"](s, 10)", "def str2num(s):\r\n try:\r\n return int(s)\r\n except ValueError:\r\n return float(s)", "def convert_to_float(s):\n try:\n return float(s)\n except TypeError:\n return None", "def float_from_string(data):\n return float(maybe_number(data))", "def guess_string_format(string):\n try:\n _ = int(string)\n return int\n except ValueError:\n try:\n _ = float(string)\n return float\n except ValueError:\n return str", "def get_freq(string:str) -> float:\n import numpy\n try:\n freq = float(string.replace(\"%\", \"\")) / 100\n except AttributeError as e:\n # if string is np.nan\n freq = numpy.nan\n return freq" ]
[ "0.66451454", "0.64589363", "0.63910717", "0.6363433", "0.63494194", "0.630394", "0.61770207", "0.616602", "0.6151756", "0.61081976", "0.60855013", "0.6082988", "0.60598946", "0.6020905", "0.60046595", "0.5965904", "0.5914045", "0.58885247", "0.58840555", "0.58837414", "0.5845547", "0.57826924", "0.57607114", "0.5758916", "0.5752836", "0.57396096", "0.56763893", "0.5675276", "0.5666232", "0.56626195" ]
0.7048725
0
Return arguments passed in an event VB Control events have parameters passed in the call, eg MouseMove(Button, Shift, X, Y). In vb2py.PythonCard the event parameters are all passed as a single event object. We can easily unpack the attributes back to the values in the Event Handler but we also have to account for the fact that someone might call the Handler directly and therefore assume that they can pass parameters individually. This function tries to unpack the params from an event object and, if successful, returns them as a tuple. If this fails then it tries to assume that they were already in a tuple and return them that way. This can still fail if there are keyword arguments ... TODO!
def vbGetEventArgs(names, arguments): # arguments is the *args tuple # # Is there only one parameter if len(arguments) == 1: # Try to unpack names from this argument try: ret = [] for name in names: if name.endswith("()"): ret.append(getattr(arguments[0], name[:-2])()) else: ret.append(getattr(arguments[0], name)) return ret except AttributeError: pass # If we have as many arguments as we need then just return them if len(names) == len(arguments): return arguments # Couldn't unpack the event and didn't have the right number of args so we are dead raise VB2PYCodeError("EventHandler couldn't unpack arguments")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _convert_args(self, expr, args, kwargs):\n assert expr is not None\n\n if not kwargs:\n return args\n\n if kwargs and not isinstance(expr, Function):\n raise Exception(\"can only supply keyword parameters for a \"\n \"relay.Function, found {0}\".format(expr))\n\n params = expr.params\n param_names = [p.name_hint for p in params]\n num_of_args = len(args)\n\n cargs = list(args)[:]\n for i, name in enumerate(param_names):\n if i < num_of_args:\n if kwargs.get(name):\n raise Exception(\n \"duplicate argument supplied in \"\n \"both positional args (at position: {0}), \"\n \"and keyword argument (with name: {1})\".format(i, name))\n else:\n cargs.append(kwargs[name])\n\n if len(cargs) != len(params):\n raise Exception(\n \"insufficient arguments, expected \"\n \"{0}, provided {1}\".format(len(cargs), len(params)))\n\n return tuple(cargs)", "def _dispatch_kwargs(self, **kwargs) -> Tuple[Dict, Dict, Dict, Dict]:\n # Ensure each argument only matches one function\n method_kwargs = self.preprocess_kwargs | self.forward_kwargs | \\\n self.visualize_kwargs | self.postprocess_kwargs\n\n union_kwargs = method_kwargs | set(kwargs.keys())\n if union_kwargs != method_kwargs:\n unknown_kwargs = union_kwargs - method_kwargs\n raise ValueError(\n f'unknown argument {unknown_kwargs} for `preprocess`, '\n '`forward`, `visualize` and `postprocess`')\n\n preprocess_kwargs = {}\n forward_kwargs = {}\n visualize_kwargs = {}\n postprocess_kwargs = {}\n\n for key, value in kwargs.items():\n if key in self.preprocess_kwargs:\n preprocess_kwargs[key] = value\n elif key in self.forward_kwargs:\n forward_kwargs[key] = value\n elif key in self.visualize_kwargs:\n visualize_kwargs[key] = value\n else:\n postprocess_kwargs[key] = value\n\n return (\n preprocess_kwargs,\n forward_kwargs,\n visualize_kwargs,\n postprocess_kwargs,\n )", "def _get_args(function, varargs=False):\n\n try:\n params = signature(function).parameters\n except ValueError:\n # Error on builtin C function\n return []\n args = [\n key\n for key, param in params.items()\n if param.kind not in (param.VAR_POSITIONAL, param.VAR_KEYWORD)\n ]\n if varargs:\n varargs = [\n param.name\n for param in params.values()\n if param.kind == param.VAR_POSITIONAL\n ]\n if len(varargs) == 0:\n varargs = None\n return args, varargs\n else:\n return args", "def GetFunctionParametersAndValues():\n frame = inspect.currentframe().f_back\n args, _, _, values = inspect.getargvalues(frame)\n return ([(i, values[i]) for i in args])", "def inspect_args_func(frame):\n args, _, _, values = inspect.getargvalues(frame)\n return {key: values[key] for key in args if key != 'self'}", "def process(self, msg, kwargs) -> Tuple[str, Dict]:\n return msg, kwargs", "def preprocess_arguments(self, *args, **kwargs):\n return (args, kwargs)", "def getArgs(useKwargFormat=True, includeVariableArgs=True, numFramesAgo=1, excludeList=[]):\n\tframe = inspect.getouterframes(inspect.currentframe())[numFramesAgo][0]\n\targNames, varArgs_name, varKwargs_name, locals_ = inspect.getargvalues(frame)\n\tvarArgs = locals_[varArgs_name] if varArgs_name != None else tuple()\n\tvarKwargs = locals_[varKwargs_name] if varKwargs_name != None else {}\n\tnotArgs = set(locals_.iterkeys()) - set(argNames)\n\t\n\tfor notArg in notArgs:\tdel locals_[notArg]\n\texcludeList.append(\"self\")\n\texcludeList.append(\"cls\")\n\tmixedKwargsArgs = OrderedDict((argName, locals_[argName]) for argName in argNames if argName not in excludeList)\n\t\n\tif useKwargFormat == True:\n\t\tkwargs = dict(mixedKwargsArgs)\n\t\tif includeVariableArgs:\n\t\t\tkwargs.update(varKwargs)\n\t\treturn kwargs\n\telif useKwargFormat == False:\n\t\targs = tuple(mixedKwargsArgs.values())\n\t\tif includeVariableArgs:\n\t\t\targs += varArgs\n\t\treturn args\n\telif useKwargFormat == None:\n\t\tkwargs = dict(mixedKwargsArgs)\n\t\tif includeVariableArgs:\n\t\t\tkwargs.update(varKwargs)\n\t\treturn varArgs, kwargs\n\telse:\n\t\traise Exception(\"Invalid useKwargFormat\")", "def _get_deltas(event):\n delta_x = round(event.deltaX())\n delta_y = round(event.deltaY())\n delta_z = round(event.deltaZ())\n return delta_x, delta_y, delta_z", "def get_events_params(args: Dict[str, Any]) -> Dict[str, Any]:\n params: Dict[str, Any] = {'event_type': 'Ips Event'}\n arg_keys = args.keys()\n\n if 'duration' in arg_keys:\n params['duration'] = args.get('duration', '')\n\n if 'start_time' in arg_keys:\n start_time = args.get('start_time', '')\n date_time = dateparser.parse(start_time)\n if date_time:\n params['start_time'] = str(\n date_time.strftime(API_SUPPORT_DATE_FORMAT)\n )\n else:\n raise ValueError(\n MESSAGES['INVALID_TIME_VALIDATION'].format('start_time')\n )\n\n if 'end_time' in arg_keys:\n end_time = args.get('end_time', '')\n date_time = dateparser.parse(end_time)\n if date_time:\n params['end_time'] = str(\n date_time.strftime(API_SUPPORT_DATE_FORMAT)\n )\n else:\n raise ValueError(\n MESSAGES['INVALID_TIME_VALIDATION'].format('end_time')\n )\n\n if 'mvx_correlated_only' in arg_keys:\n mvx_correlated_only = args.get('mvx_correlated_only', '').lower()\n try:\n mvx_correlated_only = argToBoolean(mvx_correlated_only)\n params['mvx_correlated_only'] = mvx_correlated_only\n except ValueError:\n raise ValueError(\n MESSAGES['INVALID_BOOLEAN_VALUE_ERROR'].format(\n 'mvx_correlated_only'\n )\n )\n\n return params", "def __call__(\n self, q_events: typing.Sequence[QEvent]\n ) -> tuple[tuple[abjad.NamedPitch, ...], tuple, None]:\n q_event = q_events[-1]\n if isinstance(q_event, PitchedQEvent):\n return tuple(q_event.pitches), tuple(q_event.attachments), None\n return (), (), None", "def _handle_func_args(func, *args, **kwargs):\n if not isinstance(func, (types.FunctionType, types.MethodType)):\n raise RuntimeError('fn {} is not function or method'.format(func))\n if kwargs:\n bound_arguments = inspect.signature(func).bind(*args, **kwargs)\n bound_arguments.apply_defaults()\n args = bound_arguments.args\n kwargs = bound_arguments.kwargs\n\n positional_args = 0\n default_args = 0\n has_var = False\n for value in inspect.signature(func).parameters.values():\n if value.kind is inspect.Parameter.VAR_POSITIONAL or value.kind is inspect.Parameter.VAR_KEYWORD:\n has_var = True\n if value.kind is inspect.Parameter.POSITIONAL_OR_KEYWORD:\n if value.default is inspect.Parameter.empty:\n positional_args += 1\n else:\n default_args += 1\n\n if has_var:\n return args, kwargs\n\n if len(args) < positional_args:\n raise TypeError(f\"Function {func.__name__} needs {positional_args} positional argument, but got {len(args)}.\")\n if len(args) > positional_args + default_args:\n raise TypeError(f\"Function {func.__name__} needs {positional_args} positional argument and {default_args} \"\n f\"default argument, total {positional_args + default_args}, but got {len(args)}.\")\n return args, kwargs", "def ReviewServiceArgs(cls, event = None, x = 0, y = 0, submenuchar = ''):\n return event, x, y, submenuchar", "def _process_events(self, events):\n # Seperate modifiers and keys\n modifiers = []\n key_act_tuples = []\n for e in events:\n if e == []:\n pass\n elif e[0] in self.modifiers:\n modifiers.append(e[0])\n elif e[0] in self.keys:\n key_act_tuples.append(e)\n else:\n raise ValueError\n # Create processed events\n new_events = []\n new_events.append((None, None, None)) # The default event\n for key, key_act in key_act_tuples:\n new_events.append((key, key_act, None)) # None for any modifiers\n new_events.append((key, key_act, tuple(set(modifiers))))\n return new_events", "def get_input_arguments(kwargs, function, warn=True):\n np.set_printoptions(threshold=20)\n print('\\narguments to {}:'.format(function.__qualname__))\n params = inspect.signature(function)\n input_kwargs = {}\n not_arguments = {}\n for k, v in kwargs.items():\n if k in params.parameters:\n input_kwargs[k] = v\n print_item(k, v)\n else:\n not_arguments[k] = v\n if warn:\n print('\\nother arguments:')\n for k, v in not_arguments.items():\n #print('{}: {}'.format(k, v))\n print_item(k, v)\n print('\\n')\n return input_kwargs", "def get_input_arguments(kwargs, function, warn=True):\n np.set_printoptions(threshold=20)\n print('\\narguments to {}:'.format(function.__qualname__))\n params = inspect.signature(function)\n input_kwargs = {}\n not_arguments = {}\n for k, v in kwargs.items():\n if k in params.parameters:\n input_kwargs[k] = v\n print_item(k, v)\n else:\n not_arguments[k] = v\n if warn:\n print('\\nother arguments:')\n for k, v in not_arguments.items():\n #print('{}: {}'.format(k, v))\n print_item(k, v)\n print('\\n')\n return input_kwargs", "def GetAndValidateParameters(args, event_type):\n # Check the passed parameters for unknown keys or missing required keys\n parameters = {}\n from_file_flag = '{}_from_file'.format(_PARAMETERS_FLAG_NAME)\n if args.IsSpecified(from_file_flag):\n parameters.update(getattr(args, from_file_flag))\n if args.IsSpecified(_PARAMETERS_FLAG_NAME):\n parameters.update(getattr(args, _PARAMETERS_FLAG_NAME))\n _ValidateParameters(event_type, parameters)\n\n # Check the passed secret parameters for unknown keys or missing required keys\n secret_parameters = _ParseSecretParameters(args)\n _ValidateParameters(\n event_type, secret_parameters, properties='secret_properties')\n\n parameters.update(secret_parameters)\n return parameters", "def getargspec(self,obj):\n\n if inspect.isfunction(obj):\n func_obj = obj\n elif inspect.ismethod(obj):\n func_obj = obj.im_func\n else:\n raise TypeError, 'arg is not a Python function'\n args, varargs, varkw = inspect.getargs(func_obj.func_code)\n return args, varargs, varkw, func_obj.func_defaults", "def _getargs(fn_sig):\n params = fn_sig.parameters\n args = []\n for k, v in params.items():\n if (v.kind & v.POSITIONAL_OR_KEYWORD) == v.POSITIONAL_OR_KEYWORD:\n args.append(k)\n else:\n msg = \"%s argument type unsupported in jitclass\" % v.kind\n raise errors.UnsupportedError(msg)\n return args", "def _process_inputs(args, kwargs) -> Any:\n if args and kwargs:\n input_values = (*args, kwargs)\n elif args and not kwargs:\n input_values = args[0] if len(args) == 1 else args\n elif kwargs and not args:\n input_values = kwargs\n else:\n input_values = ()\n\n return input_values", "def _check_params(self, events):\n\n # check to make sure it's a list of dictionaries with the right keys\n\n assert type(events) == list, \"events should be a list\"\n\n for event in events:\n\n assert type(event) == dict, \"each event should be a dictionary\"\n\n assert \"name\" in event, 'each event should have a \"name\" key'\n\n assert \"params\" in event, 'each event should have a \"params\" key'\n\n # check for any missing or invalid parameters\n\n for e in events:\n event_name = e[\"name\"]\n event_params = e[\"params\"]\n if event_name in params_dict.keys():\n for parameter in params_dict[event_name]:\n if parameter not in event_params.keys():\n logger.warning(\n f\"WARNING: Event parameters do not match event type.\\nFor {event_name} event type, the correct parameter(s) are {params_dict[event_name]}.\\nFor a breakdown of currently supported event types and their parameters go here: https://support.google.com/analytics/answer/9267735\\n\"\n )", "def get_Tuple_params(tpl):\n try:\n return tpl.__tuple_params__\n except AttributeError:\n pass\n try:\n if tpl.__args__ is None:\n return None\n # Python 3.6\n if tpl.__args__[0] == ():\n return ()\n else:\n if tpl.__args__[-1] is Ellipsis:\n return tpl.__args__[:-1] if len(tpl.__args__) > 1 else None\n else:\n return tpl.__args__\n except (AttributeError, IndexError):\n return None", "def unpack_args(kwargs):\n return [v for p in zip(list(kwargs.keys()), list(kwargs.values())) for v in p]", "def get_Callable_args_res(clb):\n try:\n return clb.__args__, clb.__result__\n except AttributeError:\n # Python 3.6\n return clb.__args__[:-1], clb.__args__[-1]", "def get_parameters(event, required_parameters, optionnal_parameters):\n parameters = event.get(\"queryStringParameters\")\n if not parameters:\n raise MissingParameterException(required_parameters)\n\n dict_params = {}\n missing_params = []\n for param in required_parameters:\n value = parameters.get(param)\n if value is None:\n missing_params.append(param)\n else:\n dict_params[param] = value\n\n if missing_params:\n raise MissingParameterException(missing_params)\n\n for param in optionnal_parameters:\n value = parameters.get(param, \"\")\n dict_params[param] = value\n\n return dict_params", "def extract_captured_arguments(func):\n captured_arguments = getattr(func, ATTR_NAME)\n if type(captured_arguments) is not _CapturedArguments: # pylint: disable=unidiomatic-typecheck\n # The attribute was not set by tcm, so effectively it does not exist.\n raise AttributeError\n delattr(func, ATTR_NAME)\n return captured_arguments", "def _helper_parameters(func, args=(), kwargs=None, onlykeys=False, onlyused=False):\n if kwargs is None:\n kwargs = {}\n # params = list(inspect.signature(self.__init__).parameters.keys())\n params = inspect.getargspec(func).args[1:] # TODO replace deprecated getargspec to work with py2 and py3, perhaps by getfullargspec\n\n if onlykeys and not onlyused: # only add to keywords\n covered = 0 # simulate no args\n else:\n covered = len(args)\n\n if onlyused and onlykeys: # only add modified by user\n adds = [(True if i < covered or key in kwargs else False) for i, key in\n enumerate(params)]\n # add keys from args\n for i, val in enumerate(args):\n kwargs[params[i]] = val\n elif onlyused:\n adds = [(True if i >= covered and key in kwargs else False) for i, key\n in\n enumerate(params)]\n else:\n adds = [(True if i >= covered else False) for i, key in\n enumerate(params)]\n return adds, params, kwargs", "def getParams(self):\n return self.W, self.b", "def get_args( self, **kwargs ):\n args = []\n for at in self.arg_types:\n args.append( kwargs[at] )\n return args", "def get_python_function_arguments(f):\n # Note that we only return non-optional arguments (we assume that any optional args are not specified).\n # This allows to, e.g., accept max(a, b, *more, name='') as a binary function\n param_specs = inspect.getfullargspec(f)\n annotations = param_specs.annotations\n arg_names = param_specs.args\n defaults = param_specs.defaults # \"if this tuple has n elements, they correspond to the last n elements listed\n # in args\"\n if defaults:\n arg_names = arg_names[:-len(defaults)]\n return (arg_names, annotations)" ]
[ "0.5983954", "0.58679104", "0.5814009", "0.55884343", "0.5439283", "0.5410738", "0.53881556", "0.5294634", "0.52934223", "0.5280633", "0.52511024", "0.5245851", "0.52443963", "0.5239429", "0.5231915", "0.5231915", "0.5226041", "0.5197778", "0.51946074", "0.5181564", "0.51726574", "0.51656467", "0.51422197", "0.51187074", "0.50795984", "0.5035743", "0.5026771", "0.49988532", "0.49746665", "0.4972325" ]
0.69104767
0
Return the AddressOf a variable This does not work in Python and likely the code using it is going to need significant work so we just throw an error here.
def AddressOf(obj): raise NotImplementedError('AddressOf does not work in Python and code will likely need refactoring')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def LocalAddress(self) -> _n_5_t_0:", "def get_address_value(obj):\n no_dynamic = obj.GetDynamicValue(lldb.eNoDynamicValues)\n \"\"\":type: lldb.SBValue\"\"\"\n address = no_dynamic.GetAddress()\n \"\"\":type: lldb.SBAddress\"\"\"\n address_value = address.GetFileAddress()\n \"\"\":type: int\"\"\"\n return address_value", "def Address(self) -> _n_5_t_0:", "def addr(label_name):\n\n if not utils.is_string_type(label_name):\n return None\n\n return labelmanager.addr(label_name)", "def _get_address(self):\n return self.__address", "def getReferencedAddress(program: ghidra.program.model.listing.Program, address: ghidra.program.model.address.Address) -> ghidra.program.model.address.Address:\n ...", "def eval_take_address(self, expr):\n if isinstance(expr, expressions.VariableAccess):\n declaration = expr.variable.declaration\n if isinstance(\n declaration,\n (\n declarations.VariableDeclaration,\n declarations.ParameterDeclaration,\n declarations.ConstantDeclaration,\n declarations.FunctionDeclaration,\n ),\n ):\n value = self.codegenerator.ir_var_map[declaration]\n cval = (ir.ptr, value.name)\n else: # pragma: no cover\n raise NotImplementedError()\n elif isinstance(expr, expressions.CompoundLiteral):\n cval = self.eval_compound_literal(expr)\n else: # pragma: no cover\n raise NotImplementedError()\n return cval", "def getAddressOfIndex(self) -> long:\n ...", "def getAddress(self) -> int:\n ...", "def address(self):\n return self._ref_address", "def nameToAddress(self, name):\n pass", "def resolve(self, address):\n address_map = self._address_map_from_spec_path(address.spec_path)\n if address not in address_map:\n self._raise_incorrect_address_error(address.spec_path, address.target_name, address_map)\n else:\n return address_map[address]", "def staticAddress(self, arg):\n label = self.ns + '.' + arg\n return label", "def getAbsoluteAddress(program: ghidra.program.model.listing.Program, address: ghidra.program.model.address.Address) -> ghidra.program.model.address.Address:\n ...", "def getReference(self, instruction: ghidra.program.model.listing.Instruction, toAddress: ghidra.program.model.address.Address) -> ghidra.program.model.symbol.Reference:\n ...", "def addr(self):\r\n return self._addr", "def address(self) -> tuple[str, int]:", "def get_address(self):\n return logic.address(self.get_program())", "def _get_addr(self, protocol, address):\n if address:\n return address[0]\n else:\n return protocol.transport.getPeer().host", "def decodeAddress(self, value: long, useMemorySegmentation: bool) -> ghidra.program.model.address.Address:\n ...", "def lookup(self, variable):\n for binding in self.bindings:\n if binding.variable.name == variable:\n return binding\n if self.static_link is not None:\n return self.enclosing_frame.lookup(variable)\n return None", "def get_address_value(cls, addr):\n\t\tprint \" Called get_address_value({})\".format(addr)\n\t\ttype = abs(addr) // 1000 # integer division\n\t\trelative_address = abs(addr) - (type * 1000)\n\t\tprint \"> Get mem value: type = {}, addr = {}\".format(type, relative_address)\n\t\t# use heap for search if addr is negative, else the current local mem\n\t\tif addr >= 14000:\n\t\t\tprint \"> Const vars memory: {}\".format(cls.const_vars)\n\t\t\treturn cls.const_vars[addr]\n\t\telif addr < 0:\n\t\t\tprint \"> Heap memory: {}\".format(cls.heap.memory)\n\t\t\treturn cls.heap.memory[type][abs(relative_address)]\n\t\telse:\n\t\t\tprint \"> Stack memory: {}\".format(cls.stack.peek().memory)\n\t\t\treturn cls.stack.peek().memory[type][relative_address]", "def _get_pointer(self) -> str:\n pointers = {1: \"THAT\", 0: \"THIS\"}\n try:\n return pointers[self.value]\n except KeyError:\n raise InvalidSegmentException(\n f\"Expected pointer be 0 or 1 but got {self.value}\"\n )", "def decode_addr(self, addr):\n self._check_pid_wrap()\n # Find the binary that contains the specified address.\n # For .so files, look at the relative address; for the main\n # executable, look at the absolute address.\n for binary, (start, end) in self.code_ranges.items():\n if addr >= start and addr <= end:\n offset = addr - start \\\n if binary.endswith(\".so\") else addr\n return \"%s [%s]\" % (self._decode_sym(binary, offset),\n binary)\n return \"%x\" % addr", "def _read_addr_resolve(self, addr: 'bytes', htype: 'int') -> 'str':\n if htype == Enum_Hardware.Ethernet: # Ethernet\n if py38:\n _addr = addr.hex(':')\n else:\n _addr = ':'.join(textwrap.wrap(addr.hex(), 2))\n else:\n _addr = addr.hex()\n return _addr", "def resolve(self, address):", "def address(self, symbol):\r\n return self.s_table[symbol]", "def Address(self, value, default_type=\"uint\"):\n return Address(value, process=self.process, default_type=default_type)", "def get_entry_addr(afile):\n cmd = 'readelf -h ' + cmd_quote(afile) + ' | grep \"Entry point address:\" || true'\n output = get_shell_cmd_output(cmd)\n if output:\n tokens = output.split(\":\")\n entry_addr = tokens[1].split()[0]\n #verbose(afile + \" entry point address is: \" + entry_addr, LEVEL_2)\n return int(entry_addr, 0)\n return 0", "def _get_reference_by_variable(self, var):\n if not var[0] == consts.VARIABLE:\n raise Exception('Internal error: Expected a variable, got: \"%r\"' % var)\n res = self._bindings.get(var, var)\n if res == consts.TOPIC_IN_FOCUS:\n res = self.focus\n while res[0] == consts.VARIABLE and self.parent:\n res = self.parent._get_reference_by_variable(res) #pylint: disable-msg=W0212\n if res == consts.TOPIC_IN_FOCUS:\n res = self.focus\n return res" ]
[ "0.6837938", "0.67366517", "0.6421531", "0.61839086", "0.6139431", "0.60965663", "0.6088812", "0.6042399", "0.6003365", "0.5988634", "0.5901362", "0.5862584", "0.58369744", "0.58360106", "0.58224577", "0.5810336", "0.5739616", "0.56862915", "0.56791055", "0.56228083", "0.5603202", "0.5591109", "0.557885", "0.5548035", "0.5505608", "0.5502445", "0.54992765", "0.5478836", "0.54773635", "0.5476808" ]
0.69282764
0
Close any pipes we have to the process (both input and output) and wait for it to exit. If cancelled, kills the process and waits for it to finish exiting before propagating the cancellation.
async def aclose(self): with _core.open_cancel_scope(shield=True): if self.stdin is not None: await self.stdin.aclose() if self.stdout is not None: await self.stdout.aclose() if self.stderr is not None: await self.stderr.aclose() try: await self.wait() finally: if self.returncode is None: self.kill() with _core.open_cancel_scope(shield=True): await self.wait()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _cleanup_proc(self):\n logger.debug(\"{}: Cleaning up and waiting for process to exit\".format(\n self))\n try:\n self._loop.remove_reader(self._proc.stdout)\n self._proc.stdout.close()\n self._proc.stdin.close()\n except Exception:\n # Log errors, but otherwise ignore.\n logger.error(\"{}: Failed cleaning up process\".format(self),\n exc_info=True)\n finally:\n # If the wait fails, the sub-process will appear in the process\n # tree (labelled defunct). This is mostly harmless so just log a\n # warning.\n try:\n self._proc.wait(0)\n except subprocess.TimeoutExpired:\n logger.warning(\"{}: Wait failed\".format(self),\n exc_info=True)", "def exit_gracefully():\n input_channel.close()\n output_channel.close()\n cmd_channel.close()\n connection.close()", "def close(self) -> None:\n if self._process:\n self._process.terminate()\n self._process.wait()\n self._process = None", "def _stop_process(self):\n self.stdin_queue.put_nowait(\"quit\")\n ExternalProcess._stop_process(self)", "def finalize(self):\n self.busy = False\n self.pipe_start.send((\"FINISH\",None))\n self.process.join()\n if self.process.is_alive():\n self.process.terminate()", "def close(self):\n self.log_debug(\"Waiting for processing thread to close...\")\n self.input_processing_running = False\n\n if self.input_thread is not None:\n self.input_thread.join()", "def closeEvent(self, event=None):\n log.debug(\"Cleanup close, %s\", event)\n result = self.dev.close_pipe()\n log.info(\"Close pipe result: %s\", result)\n self.data_timer.stop()\n self.close()", "def close(self):\n if self.__exit_rslt is None:\n self.wait()\n os.remove(self.__errpipe_path)\n os.remove(self.__script_fname)", "def close(self):\r\n try:\r\n self.proc.terminate()\r\n except (OSError, AttributeError): # pragma: no cover\r\n pass\r\n self.proc = None", "def endTasks():\n __terminalState.bufferedReaderTask.cancel()", "def _run(proc: Popen, timeout):\n try:\n return proc.wait(timeout=timeout)\n except TimeoutExpired:\n pass\n if sys.platform != 'win32':\n proc.send_signal(signal.SIGINT)\n try:\n return proc.wait(timeout=5)\n except TimeoutExpired:\n pass\n\n proc.terminate() # SIGTERM\n try:\n return proc.wait(timeout=5)\n except TimeoutExpired:\n pass\n\n proc.kill() # SIGKILL\n return proc.wait(timeout=5)", "def terminate(self):\n if not self.running:\n return\n self._process.stdin.write(b\"-stay_open\\nFalse\\n\")\n self._process.stdin.flush()\n self._process.communicate()\n del self._process\n self.running = False", "def terminate(self):\n while self._conns:\n conn = self._conns.pop()\n try:\n conn.send((self.EXIT, ()))\n except BrokenPipeError:\n pass\n conn.close()\n while self._processes:\n p = self._processes.pop()\n p.join(1)\n if p.exitcode is None:\n # Force termination if necessary\n p.terminate()\n p.join()\n self._running = False", "def close(self): # pragma: no cover\n if hasattr(super, \"close\"):\n return super().close()\n if self._popen is not None:\n if self._popen.poll() is None:\n raise ValueError(\n \"Cannot close a process while it is still running. \"\n \"You should first call join() or terminate().\"\n )\n self._popen = None\n del self._sentinel\n self._closed = True", "def __exit__(self, exc_type, exc_value, traceback):\n if self.returncode is None and self.proc.poll() is None:\n self.proc.terminate()", "def kill_gracefully(process, timeout=2):\n try:\n with suppress(ProcessLookupError):\n process.terminate()\n stdout, stderr = process.communicate(timeout=timeout)\n except TimeoutExpired:\n process.kill()\n stdout, stderr = process.communicate()\n\n return process.returncode, stdout, stderr", "def close(self):\n if not self._closed:\n self._thread_finalizer()\n self._process = None\n self._closed = True", "def close(self):\n if self.primary:\n os.close(self.primary)\n self.primary = None\n if self.secondary:\n os.close(self.secondary)\n self.secondary = None\n if hasattr(self, \"_process\") and self._process:\n if self._process.poll() is None:\n self._process.terminate()\n while self._process.poll() is None:\n time.sleep(0.001)\n self._process = None", "def wait_for_cancel():\n for pid in pids.values():\n # ps will return nonzero when the pid doesn't exist\n with pytest.raises(subprocess.CalledProcessError):\n self.env.execute_on_manager(['ps', str(pid)])", "def stop(self, timeout=1):\n if self.async:\n self.queue.put((STOP_ASYNC_PRODUCER, None))\n self.proc.join(timeout)\n\n if self.proc.is_alive():\n self.proc.terminate()", "async def main():\n\n # duplex pipe for communication between network and terminal i/o tasks\n net_pipe, term_pipe = aiopipe.aioduplex()\n\n # network process\n with net_pipe.detach() as net_pipe:\n proc = multiprocessing.Process(target=net_proc, args=(net_pipe,))\n proc.start()\n\n terminal = term.Terminal(term_pipe, fps=60)\n\n # catch ctrl-c and send it to the terminal task\n signal.signal(signal.SIGINT, terminal.sig_handler)\n\n # reason for the terminal process exiting\n reason = None\n\n try:\n reason = await terminal\n terminal.cleanup()\n except KeyboardInterrupt:\n reason = 'caught unprocessed ctrl-c multiple times'\n terminal.cleanup()\n finally:\n print(reason if not None else 'closed terminal?')\n\n # restore the default handler for the ctrl-c event\n signal.signal(signal.SIGINT, signal.SIG_DFL)\n\n proc.terminate()", "def quit(self):\n\n logging.warning(\"IPC: Quitting.\")\n try:\n self.subprocess.terminate()\n except OSError:\n logging.warning(\"Failed to terminate subprocess.\")\n self.thread_stop.set()\n\n if self.reader_thread:\n try:\n self.reader_thread.join()\n except RuntimeError:\n pass", "def abort(self):\n if self.processes is None:\n return\n\n for p in self.processes:\n if p.poll() is None:\n p.terminate()\n try:\n p.wait(timeout=2)\n except subprocess.TimeoutExpired:\n p.kill()\n # Don't catch the TimeoutExpired exception as\n # wait should return immediately after the process\n # was killed. If this wait times out just let\n # the exception terminate the execution as\n # something has serriously gone wrong if the\\\n # process is still running.\n p.wait(timeout=5)", "def stop(self):\n if self.is_running():\n self._stdin_queue.put_nowait(None) # Ask to stop the stdin_thread\n try:\n self._popen.terminate() # Send SIGTERM to the player, asking to stop\n log.debug('SIGTERM ' + self.name)\n except:\n pass\n self._watchdog.join(timeout=0.2) # Waiting maximum of 250 ms before killing brutaly the processus\n if self._watchdog.is_alive():\n self._popen.kill() # Send SIGNKILL to brutaly kill the process\n log.warning('KILLED ' + self.name)\n unregister_thread(self)\n self.join() # Wait for watchdog thread to terminate", "def shutdown(self, *args, **kwargs):\n # Set shared variable to 0 to signal shutdown\n logger.debug(\"Setting value to cancel\")\n self.cancel_value.value = 0\n\n self.submit_process.join()\n self.collector_thread.join()\n\n return True", "def close(self):\n logging.debug(\"Closing stream\")\n # pyserial cancel_read doesn't seem to work, therefore we ask the reader thread to close things for us\n self._wantExit = True\n if self._rxThread != threading.current_thread():\n self._rxThread.join() # wait for it to exit", "def close(self):\n self.input_queue.put(None)\n self.input_queue.join()", "def exit(self):\n if self._isSubProcessRunning() and self._exitCommand is not None:\n self.__process.stdin.write(self._exitCommand)\n self.__process.stdin.write(os.linesep)\n self.__process.stdin.flush()\n time.sleep(0.5)\n \n if self._isSubProcessRunning() :\n self.__process.kill()\n time.sleep(0.1)\n print 'Done!'", "def stop(self):\n if self._is_running():\n self._stop_event.set()\n\n for process in self._processes:\n if process.is_alive():\n os.kill(process.pid, signal.SIGINT)\n process.join()\n\n if self._queue is not None:\n self._queue.close()\n\n self._queue = None\n self._stop_event = None\n self._processes = []", "def stop(self):\n if self._is_running():\n self._stop_event.set()\n\n for process in self._processes:\n if process.is_alive():\n os.kill(process.pid, signal.SIGINT)\n process.join()\n\n if self._queue is not None:\n self._queue.close()\n\n self._queue = None\n self._stop_event = None\n self._processes = []" ]
[ "0.6373793", "0.62173724", "0.6169654", "0.61305934", "0.60700935", "0.59654534", "0.5956748", "0.5948705", "0.5810264", "0.5779637", "0.5764489", "0.57414836", "0.5654102", "0.563886", "0.5624931", "0.5586638", "0.55606866", "0.5533835", "0.54964846", "0.5493101", "0.5481666", "0.54457855", "0.544087", "0.54200304", "0.54021907", "0.5377744", "0.5371558", "0.5351986", "0.53494185", "0.53494185" ]
0.63417107
1
Fetch the permission for the specified team.
def get(self, namespace_name, repository_name, teamname): logger.debug( "Get repo: %s/%s permissions for team %s", namespace_name, repository_name, teamname ) role = model.get_repo_role_for_team(teamname, namespace_name, repository_name) return role.to_dict()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _get_team(self, filter_) -> Optional[Team]:\n have_access_to_all_data = current_user.has_role(\n \"superadmin\"\n ) or current_user.has_role(\"dataprovider\")\n if have_access_to_all_data:\n return super()._get_team(filter_)\n\n return current_user.writable_teams.filter(filter_).first()", "def get(self, team_id):\n team = api.team.get_team(team_id)\n if not team:\n raise PicoException(\"Team not found\", 404)\n\n team_members = api.team.get_team_members(tid=team_id, show_disabled=False)\n all_scoreboards = api.scoreboards.get_all_scoreboards()\n member_eligibilities = dict()\n for member in team_members:\n member_eligibilities[member[\"uid\"]] = {\n scoreboard[\"sid\"]\n for scoreboard in all_scoreboards\n if api.scoreboards.is_eligible(member, scoreboard)\n }\n\n team_eligibilities = list(set.intersection(*member_eligibilities.values()))\n db = api.db.get_conn()\n db.teams.find_one_and_update(\n {\"tid\": team_id}, {\"$set\": {\"eligibilities\": team_eligibilities}}\n )\n\n return jsonify({\"success\": True, \"eligibilities\": team_eligibilities})", "def get_membership(self, username, team):\n try:\n return CourseTeamMembership.objects.get(user__username=username, team=team)\n except CourseTeamMembership.DoesNotExist:\n raise Http404 # lint-amnesty, pylint: disable=raise-missing-from", "def get_permission(db_manager, permission_id):\n query = f\"SELECT action, forum_id, thread_id FROM auth_permission WHERE id={permission_id};\"\n result = db_manager.process_query(query)\n\n if result:\n permission_action = result.get(\"action\")\n forum_id = result.get(\"forum_id\")\n thread_id = result.get(\"thread_id\")\n return Permission(db_manager, permission_id, permission_action, forum_id, thread_id)\n return None", "def find_by_id(self, team, params={}, **options):\n path = \"/teams/%s\" % (team)\n return self.client.get(path, params, **options)", "def get(self, request: Request, organization, team) -> Response:\n query_params = self.get_query_parameters(request)\n\n context = serialize(\n team,\n serializer=TeamSCIMSerializer(expand=_team_expand(query_params[\"excluded_attributes\"])),\n )\n return Response(context)", "def get_team(self):\n try:\n team_id = self.request.GET.get('team')\n if team_id is not None:\n team_id = int(team_id)\n return self.get_available_teams().get(pk=team_id)\n return self.get_available_teams().latest()\n except (Team.DoesNotExist, ValueError):\n return None", "def test_get_one_for_other_team_forbidden(self):\n team = Team.create(name='foo', captain_id='User_cap',\n program_id=self.program.uid)\n team.put()\n teammate = User.create(name='teammate', email='[email protected]',\n owned_teams=[team.uid])\n teammate.put()\n user = User.create(name='foo', email='[email protected]')\n user.put()\n response = self.testapp.get(\n '/api/teams/{}/users/{}'.format(team.uid, teammate.uid),\n headers=self.login_headers(user),\n status=403,\n )", "async def fetch_permissions(self, condensed=False):\n\n logging.debug(\"Getting permissions (%scondensed)\" % (\n \"\" if condensed else \"not \"))\n\n if condensed:\n perms = await self.client.request.get(\n \"/auth/permissions\", params={\"condensed\": True})\n return perms[\"data\"]\n else:\n perms = await self.client.request.get(\"/auth/permissions\")\n return [BasePermission.build_permission(\n self.client, perm, self.loop) for perm in perms[\"data\"]]", "def test_get_requests_for_team_by_user(self):\n\n token = Token.objects.get(user=self.another_user1)\n self.client.credentials(\n HTTP_AUTHORIZATION=f'Token {token.key}')\n params = {'teamID': self.team.id}\n response = self.client.get(reverse('api:user-team-requests-get-requests-for-team'), params)\n self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)", "def retrieve(self, request, pk=None):\n team = self.get_team_object(pk)\n serializer = data_serializers.PresentTeamSerializer(team)\n return Response(serializer.data)", "def permission_membership(account=None):\n if not account:\n raise Exception('Membership permission: account missing')\n \n if account.id == g.account.id:\n return app.access('profile', action='read', account=account)\n else:\n return app.access('profile', action='read', account=account) and app.access('project', action='list')", "def get_permissions(self, principal_id):", "def get_team(self, team_id):\n try:\n return CourseTeam.objects.get(team_id=team_id)\n except CourseTeam.DoesNotExist:\n raise Http404 # lint-amnesty, pylint: disable=raise-missing-from", "def facade_retrieve_side_effect(*args, **kwargs):\r\n if args[0] == Project:\r\n return Project(\"\", [])\r\n elif args[0] == Team:\r\n team = Team(\"GTID\", \"team-name\", \"display-name\")\r\n return team\r\n else:\r\n calling_user = User(user)\r\n calling_user.permissions_level = Permissions.admin\r\n return calling_user", "def facade_retrieve_side_effect(*args, **kwargs):\r\n if args[0] == Project:\r\n return Project(\"GTID\", [])\r\n elif args[0] == Team:\r\n team = Team(\"GTID\", \"team-name\", \"display-name\")\r\n return team\r\n else:\r\n calling_user = User(user)\r\n calling_user.permissions_level = Permissions.admin\r\n return calling_user", "def get_team(self, team_reference, include_users=False):\n url = 'teams/{0}'.format(team_reference)\n result = self.get(url, {'include_users': include_users})\n #TODO: check how included users returned\n return result.get('team', result)", "def get_team(uid=None):\n user = get_user(uid=uid)\n return api.team.get_team(tid=user[\"tid\"])", "def team_members_id_team_permission_get(self, id, **kwargs):\n kwargs['_return_http_data_only'] = True\n if kwargs.get('callback'):\n return self.team_members_id_team_permission_get_with_http_info(id, **kwargs)\n else:\n (data) = self.team_members_id_team_permission_get_with_http_info(id, **kwargs)\n return data", "def team_members_id_team_permission_get_with_http_info(self, id, **kwargs):\n\n all_params = ['id', 'refresh']\n all_params.append('callback')\n all_params.append('_return_http_data_only')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method team_members_id_team_permission_get\" % key\n )\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'id' is set\n if ('id' not in params) or (params['id'] is None):\n raise ValueError(\"Missing the required parameter `id` when calling `team_members_id_team_permission_get`\")\n\n\n collection_formats = {}\n\n resource_path = '/TeamMembers/{id}/team/permission'.replace('{format}', 'json')\n path_params = {}\n if 'id' in params:\n path_params['id'] = params['id']\n\n query_params = {}\n if 'refresh' in params:\n query_params['refresh'] = params['refresh']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])\n\n # Authentication setting\n auth_settings = ['access_token']\n\n return self.api_client.call_api(resource_path, 'GET',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='TeamPermissionSet',\n auth_settings=auth_settings,\n callback=params.get('callback'),\n _return_http_data_only=params.get('_return_http_data_only'),\n collection_formats=collection_formats)", "def getpermission(self, context=None, componentid=None, app=None):\n return jsoncall.do_call(\"getpermission\", {'modelname':self.modelname,\\\n 'user':self.user,\\\n 'password':self.password,\\\n 'context': context,\\\n 'componentid': componentid,\\\n 'app': app},\n self.connection)", "async def getch_team(self, id: str):\n return self.get_team(id) or await self.fetch_team(id)", "def test_get_all_for_other_team_forbidden(self):\n team = Team.create(name='foo', captain_id='User_cap',\n program_id=self.program.uid)\n team.put()\n user = User.create(name='foo', email='[email protected]')\n user.put()\n response = self.testapp.get(\n '/api/teams/{}/users'.format(team.uid),\n headers=self.login_headers(user),\n status=403\n )", "def find_pick_for_team(self, team, key_only=False):\n return self.find_pick('team', team)", "async def _get_team_info(self, server_id: str, team_id: str):\n params = {}\n url = self.api_url + 'teams/{}'.format(team_id)\n\n return await self._make_request(url, params, server_id)", "def team_read(token_user, team_id):\n team = Team.query.get(team_id)\n if team is None:\n abort(404, 'team not found')\n\n return json.dumps(team.as_dict(for_user=token_user))", "def octopus_permissions_get(self, msg, args):\r\n return self.permissions.get_permissions()", "def facade_retrieve_side_effect(*args, **kwargs):\r\n if args[0] == Project:\r\n return Project(\"\", [])\r\n elif args[0] == Team:\r\n team = Team(\"GTID\", \"team-name\", \"display-name\")\r\n return team\r\n else:\r\n raise LookupError(\"team lookup error\")", "def facade_retrieve_side_effect(*args, **kwargs):\r\n if args[0] == Project:\r\n return Project(\"GTID\", [])\r\n elif args[0] == Team:\r\n team = Team(\"GTID\", \"team-name\", \"display-name\")\r\n team.team_leads.add(user)\r\n return team\r\n else:\r\n calling_user = User(user)\r\n return calling_user", "def facade_retrieve_side_effect(*args, **kwargs):\r\n if args[0] == Project:\r\n return Project(\"\", [])\r\n elif args[0] == Team:\r\n team = Team(\"GTID\", \"team-name\", \"display-name\")\r\n team.team_leads.add(user)\r\n return team\r\n else:\r\n calling_user = User(user)\r\n return calling_user" ]
[ "0.6106908", "0.5748608", "0.56881076", "0.56774604", "0.55650204", "0.55000824", "0.5437011", "0.5423022", "0.5415314", "0.537421", "0.5367137", "0.536253", "0.53582126", "0.5344388", "0.5340846", "0.5338855", "0.53167564", "0.52845156", "0.5233721", "0.5189439", "0.51863176", "0.51605", "0.51601017", "0.5117823", "0.5102754", "0.509327", "0.50812477", "0.5040067", "0.502456", "0.49981228" ]
0.58408916
1
Update the existing team permission.
def put(self, namespace_name, repository_name, teamname): new_permission = request.get_json() logger.debug("Setting permission to: %s for team %s", new_permission["role"], teamname) try: perm = model.set_repo_permission_for_team( teamname, namespace_name, repository_name, new_permission["role"] ) resp = perm.to_dict() except SaveException as ex: raise request_error(exception=ex) log_action( "change_repo_permission", namespace_name, {"team": teamname, "repo": repository_name, "role": new_permission["role"]}, repo_name=repository_name, ) return resp, 200
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def patch(self, team_id, project_id):\n try:\n role = request.get_json(force=True)[\"role\"]\n except DataError as e:\n current_app.logger.error(f\"Error validating request: {str(e)}\")\n return {\"Error\": str(e), \"SubCode\": \"InvalidData\"}, 400\n\n try:\n if not ProjectAdminService.is_user_action_permitted_on_project(\n token_auth.current_user, project_id\n ):\n raise ValueError()\n TeamService.change_team_role(team_id, project_id, role)\n return {\"Status\": \"Team role updated successfully.\"}, 200\n except ValueError:\n return {\n \"Error\": \"User is not a manager of the project\",\n \"SubCode\": \"UserPermissionError\",\n }, 403\n except TeamServiceError as e:\n return str(e), 402", "def team_update(token_user, team_id):\n team = Team.query.get(team_id)\n\n if team is None:\n abort(404, 'team not found')\n\n if not json_param_exists('name'):\n abort(400, 'one or more required parameter is missing')\n\n name = request.json['name']\n\n if not (token_user.has_permission('team.update.elevated') or\n (token_user.has_permission('team.update') and\n team.has_member(token_user))):\n abort(403, 'insufficient permissions to modify team')\n\n team.name = name\n\n try:\n get_db().add(team)\n get_db().commit()\n except IntegrityError:\n abort(409, 'team name is already in use')\n\n return '', 204", "def mutateTeam(self, team):\n self.removeLearners(team)\n self.addLearners(team)\n self.mutateLearners(team)", "def update_admin_permission(self) -> None:\n session = self.appbuilder.get_session\n dag_resources = session.scalars(\n select(Resource).where(Resource.name.like(f\"{permissions.RESOURCE_DAG_PREFIX}%\"))\n )\n resource_ids = [resource.id for resource in dag_resources]\n\n perms = session.scalars(select(Permission).where(~Permission.resource_id.in_(resource_ids)))\n perms = [p for p in perms if p.action and p.resource]\n\n admin = self.find_role(\"Admin\")\n admin.permissions = list(set(admin.permissions) | set(perms))\n\n session.commit()", "def update(request, role_id):\n\n role = get_object_or_404(ProjectRole, pk=role_id)\n\n # require permission to proceed\n must_have_permission(request.user, role.project, \"can_edit_roles\")\n\n permittee = Permittee.objects.get_as_permittee(request.user)\n\n initial_set = list(role.obj_permissions.values_list(\"pk\", flat=True))\n\n # Get the permissions that the user can delegate to others as well\n # as the ones that are already in the role. Obtain DISTINCT values.\n obj_permissions = ObjectPermission.objects.filter_from_instance(\n role.project).filter(\n Q(permissionownership__permittee=permittee,\n permissionownership__can_delegate=True) |\n Q(id__in=initial_set)\n ).distinct()\n\n project_url = reverse(\"project_detail\", args=[role.project.id])\n\n # Use to update the permissions in the ProjectRole object so\n # users with that role are affected from the time this is updated\n def post_save(instance, created):\n from expedient.clearinghouse.roles.models import ObjectPermission\n new_obj_permissions_pks = [ p.pk for p in instance.obj_permissions.all() ]\n for permission in obj_permissions:\n # Add and delete permissions accordingly...\n try:\n instance.remove_permission(permission)\n except:\n pass\n if permission.pk in new_obj_permissions_pks:\n instance.add_permission(permission)\n\n return generic_crud(\n request,\n obj_id=role_id,\n model=ProjectRole,\n template=TEMPLATE_PATH+\"/update.html\",\n redirect=lambda instance: project_url,\n template_object_name=\"role\",\n form_class=ProjectRoleForm,\n extra_form_params={\n \"obj_permissions\": obj_permissions,\n },\n extra_context={\n \"project\": role.project,\n \"breadcrumbs\": (\n (\"Home\", reverse(\"home\")),\n (\"Project %s\" % role.project.name, project_url),\n (\"Update Role %s\" % role.name, request.path),\n )\n },\n post_save = post_save,\n )", "async def team_role(self, ctx: commands.Context, *, team: HockeyTeams) -> None:\n guild = ctx.message.guild\n if team is None:\n return await ctx.send(_(\"You must provide a valid current team.\"))\n try:\n role = [\n role\n for role in guild.roles\n if (team.lower() in role.name.lower() and \"GOAL\" not in role.name)\n ]\n if role[0] >= guild.me.top_role:\n return\n await ctx.author.add_roles(role[0])\n await ctx.send(role[0].name + _(\" role applied.\"))\n except Exception:\n log.error(\"error adding team role\", exc_info=True)\n await ctx.send(team + _(\" is not an available role!\"))", "def update(self, request, pk):\n print(\"Update a team\")\n serializer = data_serializers.UpdateTeamSerializer(data=request.data)\n if serializer.is_valid(raise_exception=True):\n request_data = serializer.save()\n new_team_entity = self.controller.update_team(request_data=request_data)\n\n serializer = data_serializers.PresentTeamSerializer(new_team_entity)\n return Response(serializer.data, status=status.HTTP_201_CREATED)\n else:\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)", "def test_member_of_team_with_non_PM_role_cannot_update_project(self):\n # Arrange\n team = create_canned_team()\n add_user_to_team(team, self.test_user, TeamMemberFunctions.MEMBER.value, True)\n project_team = assign_team_to_project(\n self.test_project, team, TeamRoles.VALIDATOR.value\n )\n # Act\n response = self.client.patch(\n self.url,\n json=self.project_update_body,\n content_type=\"application/json\",\n headers={\"Authorization\": self.user_session_token},\n )\n # Assert\n self.assertEqual(response.status_code, 403)\n # Cleanup\n project_team.delete()", "def modify_tournament(self, nickname, team):\n cmd = '{}modifyTournament \"{}\" {}'.format(self.console, Commands.aquote(nickname), Commands.get_team(team))\n self.write_command(cmd)", "def _update(self, cursor, team):\n existing = self.get(team_id=team.id)\n if team.name != existing.name:\n cursor.execute(\n 'UPDATE %s SET name = %%s WHERE id = %%s' % self.table_name,\n (team.name, team.id)\n )\n\n existing_users = set(u for u in existing)\n new_users = set(u for u in team)\n\n for u in new_users - existing_users:\n self.user_repository.save(u)\n self.user_repository.set_team(u, team)", "def update(\n self,\n redditor: str | praw.models.Redditor,\n *,\n permissions: list[str] | None = None,\n ):\n url = API_PATH[\"setpermissions\"].format(subreddit=self.subreddit)\n data = self._handle_permissions(\n other_settings={\"name\": str(redditor), \"type\": \"moderator\"},\n permissions=permissions,\n )\n self.subreddit._reddit.post(url, data=data)", "def test_update_team_user_group(client):\n group = client.update_team_user_group(TEAM_ID, GROUP_ID, {\n \"name\": \"Updated Python group\",\n \"is_reviewer\": False,\n \"is_admin\": True,\n \"admin_rights\": [\"upload\"]\n })\n assert group.team_id == TEAM_ID\n assert group.group_id == GROUP_ID\n assert group.name == \"Updated Python group\"\n assert group.permissions['is_admin']\n assert not group.permissions['is_reviewer']", "def update_object_permissions(self, agent, Field, Set, Mask, Override = False):\n\n self.send_ObjectPermissions(agent, agent.agent_id, agent.session_id, Field, Set, Mask, Override)", "def update(self, request, pk):\n print(\"Update a team\")\n serializer = data_serializers.UpdateTeamLeaderRequestDataSerializer(data=request.data)\n if serializer.is_valid(raise_exception=True):\n request_data = serializer.save()\n try:\n new_team_entity = self.controller.change_team_leader(request_data=request_data)\n serializer = data_serializers.TeamLeaderPresenterSerializer(new_team_entity)\n return Response(serializer.data, status=status.HTTP_201_CREATED)\n except (\n domain_exceptions.TeamDoesNotExist,\n domain_exceptions.ObjectEntityDoesNotExist,\n domain_exceptions.UpdateOfTeamLeaderOfWrongTeam,\n domain_exceptions.EmployeeDoesNotExist\n )as e:\n return Response(e.message, status=status.HTTP_400_BAD_REQUEST)\n else:\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)", "def put(self, orgname, prototypeid):\n permission = AdministerOrganizationPermission(orgname)\n if permission.can() or allow_if_superuser():\n try:\n org = model.organization.get_organization(orgname)\n except model.InvalidOrganizationException:\n raise NotFound()\n\n existing = model.permission.get_prototype_permission(org, prototypeid)\n if not existing:\n raise NotFound()\n\n details = request.get_json()\n role_name = details[\"role\"]\n prototype = model.permission.update_prototype_permission(org, prototypeid, role_name)\n if not prototype:\n raise NotFound()\n\n log_prototype_action(\n \"modify_prototype_permission\", orgname, prototype, original_role=existing.role.name\n )\n\n users_filter = {prototype.activating_user, prototype.delegate_user}\n org_members = model.organization.get_organization_member_set(\n org, users_filter=users_filter\n )\n return prototype_view(prototype, org_members)\n\n raise Unauthorized()", "def modify_everyone(self, team):\n team_number = Commands.get_team(team)\n for player in self.players.all_nicknames():\n cmd = '{}modifyTournament \"{}\" {}'.format(self.console, Commands.aquote(player), team_number)\n self.write_command(cmd)", "def test_update_team(self):\n pass", "def update_team(self, vid, team):\n return self \\\n .team(vid) \\\n .is_team_identifier(team.identifier) \\\n .property(Cardinality.single, 'name', team.name) \\\n .elementMap()", "async def team_goals(self, ctx: commands.Context, *, team: HockeyTeams = None) -> None:\n guild = ctx.message.guild\n member = ctx.message.author\n if not guild.me.guild_permissions.manage_roles:\n return\n if team is None:\n team_roles = []\n for role in guild.roles:\n if role.name in [r.name + \" GOAL\" for r in member.roles]:\n team_roles.append(role)\n if team_roles != []:\n for role in team_roles:\n if role[0] >= guild.me.top_role:\n continue\n await ctx.message.author.add_roles(role)\n role_list = \", \".join(r.name for r in team_roles)\n await ctx.message.channel.send(f\"{role_list} role applied.\")\n return\n else:\n await ctx.send(\n _(\"Please provide the team you want the goal notification role for.\")\n )\n return\n else:\n try:\n role = [\n role\n for role in guild.roles\n if (team.lower() in role.name.lower() and role.name.endswith(\"GOAL\"))\n ]\n await ctx.message.author.add_roles(role[0])\n await ctx.message.channel.send(role[0].name + _(\" role applied.\"))\n except Exception:\n await ctx.message.channel.send(team + _(\" is not an available role!\"))", "def update_member_teams(request, team_name):\n if request.method == 'GET':\n email = request.session.get('email', None)\n member = Member.objects.get(email=email)\n all_teams = Team.objects.all()\n\n for team in all_teams:\n if team.name == team_name:\n member.teams.add(team)\n break\n\n message = 'Member teams updated succesffully'\n messages.add_message(request, messages.INFO, message)\n return redirect('teamsapp:teams')\n else:\n raise Http404('Not allowed')", "def update(self, permission, **kwargs):\n kwargs['permission'] = permission\n return self.update_instance(**kwargs)", "def _update(self, uuid, name, permissions):\n data = {\"name\": name, \"permissions\": permissions, \"uuid\": uuid}\n path = self.router.roles_by_uuid.format(uuid=uuid)\n return self.request(\n method=\"post\", path=path, json=data, error_json_invalid=False\n )", "def update_invite(\n self,\n redditor: str | praw.models.Redditor,\n *,\n permissions: list[str] | None = None,\n ):\n url = API_PATH[\"setpermissions\"].format(subreddit=self.subreddit)\n data = self._handle_permissions(\n other_settings={\"name\": str(redditor), \"type\": \"moderator_invite\"},\n permissions=permissions,\n )\n self.subreddit._reddit.post(url, data=data)", "def put_grants(team_id):\n if not TeamPermission.is_manager(team_id):\n abort(403)\n\n payload = get_payload()\n grants = TeamController.put_grants(team_id=team_id, grants=payload[\"grants\"])\n return jsonify(grants)", "def set_user_role(request):\n id_user = request.POST.get('user_id')\n role = request.POST.get('role')\n id_projet = request.POST.get('project_id')\n # retrieves the user whose role needs to be changed\n user_to_modify = User.objects.get(pk=id_user)\n # check if user can attribute role for the project\n project = UtilsData.get_object_by_type_and_id('project', id_projet)\n if request.user.can_affect(project):\n # Verifies if the user whose role is to be changed is the administrator\n if user_to_modify.is_superuser:\n return HttpResponse(json.dumps(\"error you can't remove admin role\"),\n content_type=\"application/json\")\n else:\n # change role\n project.setRole(user_to_modify, role)\n return HttpResponse(json.dumps(\"ok\"),\n content_type=\"application/json\")", "def register_team_membership(self, body):\n url_regex = re.compile(r'^{url}teams/\\d+/memberships/\\w+$'.format(\n url=re.escape(self.URL),\n ))\n httpretty.register_uri(\n httpretty.PUT, url_regex, body=body\n )\n httpretty.register_uri(\n httpretty.DELETE, url_regex, body=body\n )", "def change_team(self, timestamp, team):\n new_team_id = TEAM_CHOICES_REVERSE[team]\n if new_team_id == self.team_current:\n return\n\n self.update_team_time(timestamp, save=False)\n self.team_current = new_team_id\n self.save()", "def test_remove_from_team_forbidden(self):\n team = Team.create(name='foo', captain_id='User_cap',\n program_id=self.program.uid)\n team.put()\n user = User.create(name='foo', email='[email protected]', user_type='user',\n owned_teams=['Team_foo'])\n req = User.create(name='requestor', email='[email protected]',\n user_type='user')\n user.put()\n req.put()\n\n response = self.testapp.put_json(\n '/api/users/{}'.format(user.uid),\n {'owned_teams': []},\n headers=self.login_headers(req),\n status=403,\n )\n\n # Not changed in the db.\n fetched_user = User.get_by_id(user.uid)\n self.assertEqual(user.user_type, fetched_user.user_type)\n self.assertEqual(user.owned_teams, fetched_user.owned_teams)", "def changePermissions(self, event):\n pass", "def changeRoleInfo(self, role, info):" ]
[ "0.6878937", "0.6413856", "0.63367105", "0.6301946", "0.6131176", "0.6117391", "0.6080835", "0.6073183", "0.60508394", "0.5943088", "0.5899792", "0.5887498", "0.5872705", "0.58438593", "0.58361334", "0.5809778", "0.5783322", "0.57781184", "0.5766685", "0.57608736", "0.5757116", "0.5722047", "0.57102764", "0.5676126", "0.5660831", "0.56593055", "0.56269133", "0.5620134", "0.5560845", "0.55449706" ]
0.7250105
0
add event 'event', which is of type event to minheap of events assign an id to it return the assigned id
def addEvent(self, event): event.__id=id id+=1 self.addToHeap(event) return event.__id
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def event_id(self, event_name):\n try:\n event_id = self.gui_event_ids[event_name]\n except KeyError:\n event_id = len(self.gui_events)\n self.gui_event_ids[event_name] = event_id\n self.gui_events.append(event_name)\n if event_id >= 16383:\n raise RuntimeError(\"Maximum number of events exceeded\")\n return event_id", "def insertEvent(self, event):\r\n key=event.time\r\n newitem = [key, event]\r\n for i,item in enumerate(self.__agenda):\r\n if key < item[0]: \r\n self.__agenda.insert(i,newitem)\r\n break", "def _get_next_event_id():\n VenueCrawler._event_id += 1\n return VenueCrawler._event_id", "def push(self, event):\n ignored = self.ignored__.pop(event_key(event), None)\n if ignored:\n if event.date - ignored.date < 2:\n return\n\n cursor = self.conn.cursor()\n\n cursor.execute(\"\"\"\n SELECT * FROM events\n WHERE path = ? AND event = ? AND folder = ? AND countdown > 0\n \"\"\", [event.path, event.event, event.folder])\n\n row = cursor.fetchone()\n if row:\n # print \"Ingoring Duplicate event: %s\" % (event, )\n pass\n else:\n cursor.execute(\n \"REPLACE INTO events VALUES (?, ?, ?, ?, ?, ?, ?)\", event\n )\n self.conn.commit()", "def add_event(\n self,\n event_time: float,\n event_action: Callable,\n event_id: Optional[uuid.UUID] = None,\n ) -> uuid.UUID:\n if event_id is None:\n event_id = uuid.uuid4()\n\n self.scheduled_events[event_id] = self.scheduler.enterabs(\n event_time, 0, event_action,\n )\n\n return event_id", "def _gen_id(event):\n eid = np.sort(np.unique(event))\n iid = {}\n for i in xrange(len(eid)):\n iid[eid[i]] = i\n return len(eid), eid, iid", "def push(event):\n _pushedEvents.append(event)", "def __genNewEventId(self):\n while 1:\n tmp = ''.join(random.choice(ID_LETTERS) for _ in range(EID_SIZE))\n if tmp in self._events: continue\n else: return tmp", "def add_event(self, event):\r\n return super().insert_event(event)", "def create_event(conn, event):\n sql = ''' INSERT INTO events(ISO_Week,Event_ISO_Date,Unix_Time,Node,Event_Type,Duration)\n VALUES(?,?,?,?,?,?) '''\n c = conn.cursor()\n c.execute(sql, event)\n conn.commit()\n return c.lastrowid", "def add_event(event):\n # add event + add all the to be generated recurrences\n with session_scope(DBSession) as session:\n session.add(event)\n session.flush()\n add_recurrences(session, event)", "def createEvent(self, mispevent):\n\t\tif mispevent is None:\n\t\t\treturn None\n\n\t\t# Not empty event\n\t\tjevent = json.dumps(mispevent, cls=EncodeUpdate)\n\t\tmisp_event = self.misp.add_event(jevent)\n\t\tmispid = misp_event[\"Event\"][\"id\"]\n\t\treturn mispid", "def _AddSerializedEvent(self, event):\n identifier = identifiers.SerializedStreamIdentifier(\n self._last_stream_numbers['event'],\n self._serialized_event_heap.number_of_events)\n event.SetIdentifier(identifier)\n\n serialized_data = self._SerializeAttributeContainer(event)\n\n self._serialized_event_heap.PushEvent(event.timestamp, serialized_data)\n\n if self._serialized_event_heap.data_size > self._maximum_buffer_size:\n self._WriteSerializedEvents()", "def add_event(self, event: Event) -> Event:\n if event.name not in self._events:\n self._events[event.name] = event\n else:\n if event != self._events[event.name]:\n raise KeyError(\"An event with this name already exists.\")\n # TODO - This is totally recoverable. We need to combine constraints,\n # ensure there is only one agreed-upon set of variables, etc. Just need\n # to decide how smart to be. So for now, just error.\n return event", "def _get_event_id(oracle_cursor): # -> (int)\r\n\r\n oracle_cursor.execute(\"\"\"select event_seq.NEXTVAL from dual\"\"\")\r\n row = oracle_cursor.fetchone()\r\n\r\n event_id = int(row[0])\r\n\r\n oracle_cursor.execute(\"\"\"\r\n INSERT INTO event (event_id, event_type_cd, event_timestamp)\r\n VALUES (:event_id, 'SYST', sysdate)\r\n \"\"\",\r\n event_id=event_id\r\n )\r\n\r\n return event_id", "def publish_event(self, event):\n event_priority = getattr(event, \"event_priority\", 99999)\n heappush(self.sprite_events, (event_priority, event))", "def _get_event_id(oracle_cursor): # -> (int)\n\n oracle_cursor.execute(\"\"\"select event_seq.NEXTVAL from dual\"\"\")\n row = oracle_cursor.fetchone()\n\n event_id = int(row[0])\n\n oracle_cursor.execute(\"\"\"\n INSERT INTO event (event_id, event_type_cd, event_timestamp)\n VALUES (:event_id, 'SYST', sysdate)\n \"\"\",\n event_id=event_id\n )\n\n return event_id", "def event_id(self):\n return self._event_id", "def event(self, id):\r\n return Event(self, id)", "def register_event(self, event):\n if event not in self.events:\n self.events[event] = list()", "def event(self, event_id):\r\n return e.Event(self, event_id)", "def GetNextEventId(cls):\n cls._parsed_events += 1\n return cls._parsed_events", "def push_event(self, evt):\n self.event_list.append(evt)", "async def createEvent(self, event: Event) -> None:", "def set_val_event_id():\n\n # Get the max event_id in the database\n result = db.session.query(func.max(Event.event_id)).one()\n max_id = int(result[0])\n\n # Set the value for the next event_id to be max_id\n query = \"SELECT setval('events_event_id_seq', :new_id)\"\n db.session.execute(query, {'new_id': max_id})\n db.session.commit()", "def testPushEvent(self):\n event_heap = psort.PsortEventHeap()\n\n self.assertEqual(len(event_heap._heap), 0)\n\n event, event_data, event_data_stream = (\n containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0]))\n event_heap.PushEvent(event, event_data, event_data_stream)\n\n self.assertEqual(len(event_heap._heap), 1)", "def add_event(self):\n event_time = int(time())\n\n if self.events_by_seconds[-1][0] == event_time:\n self.events_by_seconds[-1][1] +=1\n else:\n self.events_by_seconds.append([event_time, 1])", "def create_event(company, topic, date):\n connection = get_connection()\n cursor = connection.cursor()\n data = cursor.execute('SELECT MAX(eventID) FROM Event')\n event_id = data.fetchone()[0]\n if event_id is None:\n event_id = 0\n else:\n event_id += 1\n sql_string = \"INSERT INTO Event VALUES(\"+str(event_id)+\", '\"+company+\"', '\"+topic+\"', '\"+date+\"')\"\n cursor.execute(sql_string)\n connection.commit()", "def event_id(self, event_id):\n\n self._event_id = event_id", "def event_id(self, event_id):\n\n self._event_id = event_id" ]
[ "0.64335954", "0.6268853", "0.61110896", "0.61066127", "0.6027039", "0.5884291", "0.587974", "0.5795214", "0.5773811", "0.57574904", "0.56374335", "0.56098413", "0.56089914", "0.5601371", "0.5556427", "0.5553871", "0.5553545", "0.55280185", "0.5497093", "0.5451976", "0.5448499", "0.54436207", "0.54336494", "0.5419043", "0.54107594", "0.54012954", "0.53893924", "0.5344427", "0.53389627", "0.53389627" ]
0.82262385
0
Given `src_lines`, a list of lines of a single record, this will instantiate and populate an object corresponding to the data.
def __init__(self, src_lines): self.study_id = None self.citation = None self.abstract = None self.authors = [] self.study_matrices = {} self.history_date = None self.history_time = None self.history_person = None self.history_event = None self.analyses = [] self.parse_src_lines(src_lines)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def from_lines(cls, lines: List[str], mode: str):\n for line in lines:\n if line.startswith('Original Input'):\n _input = line[line.find(':') + 1 :].strip()\n elif line.startswith('Predicted Str'):\n pred = line[line.find(':') + 1 :].strip()\n elif line.startswith('Ground-Truth'):\n target = line[line.find(':') + 1 :].strip()\n elif line.startswith('Ground Classes'):\n classes = line[line.find(':') + 1 :].strip()\n return cls(_input, target, pred, classes, mode)", "def from_abslines(cls, abslines):\n # Check\n if not isinstance(abslines,list):\n raise IOError('Need a list of AbsLine objects')\n if not all(isinstance(x,AbsLine) for x in abslines):\n raise IOError('List needs to contain AbsLine objects')\n # Instantiate with the first line\n init_line = abslines[0]\n slf = cls( init_line.attrib['coord'],\n (init_line.data['Z'],init_line.data['ion']),\n init_line.attrib['z'], init_line.analy['vlim']) \n slf._abslines.append(init_line)\n # Append with component checking\n if len(abslines) > 1:\n for absline in abslines[1:]:\n slf.add_absline(absline)\n # Return\n return slf", "def __init__(self, source):\n if isinstance(source, str):\n self.line_iter = iter(source.splitlines())\n elif isinstance(source, io.TextIOBase):\n self.line_iter = source\n else:\n raise TypeError('source must be either a string or a text file')\n self.line_iter = enumerate(self.line_iter)\n self.source = source", "def parse_lines(lines):\n for line in lines:\n yield Record(line)", "def __init__(self, path, exts, fields, **kwargs):\n if not isinstance(fields[0], (tuple, list)):\n fields = [('src', fields[0]), ('trg', fields[1])]\n\n src_path, trg_path = tuple(os.path.expanduser(path + x) for x in exts)\n\n examples = []\n with open(src_path) as src_file, open(trg_path) as trg_file:\n for src_line, trg_line in zip(src_file, trg_file):\n src_line, trg_line = src_line.strip(), trg_line.strip()\n if src_line != '' and trg_line != '':\n examples.append(data.Example.fromlist(\n [src_line, trg_line], fields))\n\n super(TranslationDataset, self).__init__(examples, fields, **kwargs)", "def __init__(self, lines):\n self.shop = None\n\n if lines:\n # Detect if user used semicolon and convert to comma\n if len(lines[0].split(';'))>1:\n lines = '\\n'.join(lines)\n lines = lines.replace(';', ',')\n lines = lines.split('\\n')\n # Ignore comments\n lines = [line for line in lines if not line.startswith('#')]\n\n self.lines = lines\n self.line_count = len(lines)\n self.warning = None", "def __init__(self, scn_line_list):\n self.scn_line_list = scn_line_list", "def __init__(self, lines):\n self.table = OrderedDict()\n\n # Load bp file, put lines in table, where the key is the key\n # and the value is the list of values on that bp line\n for line in lines:\n elements = line.split(';')\n self.table[elements[0]] = elements[1:]", "def __init__(self, *records: ScalarSequence):\n self._records = [r for r in records if r]", "def from_lines(\n cls,\n lines: Iterable[str],\n encoding: str = DEFAULT_ENCODING,\n newline: str = DEFAULT_NEWLINE,\n mtime: str = \"\",\n ) -> \"TextDocument\":\n return cls(None, lines, encoding=encoding, newline=newline, mtime=mtime)", "def parse_lines(self, lines):\n assert isinstance(lines, Iterable)\n\n for line in lines:\n name, values = self.parse_line(line)\n self.add(name, values)", "def fromLines(cls, listOfLine: list):\n points = cls.edgeToVertex(listOfLine)\n return cls.fromPoints(points)", "def fromLines(cls, listOfLine: list):\n points = cls.edgeToVertex(listOfLine)\n return cls.fromPoints(points)", "def fromLines(cls, listOfLine: list):\n points = cls.edgeToVertex(listOfLine)\n return cls.fromPoints(points)", "def init_line_list():\n # Get str lengths from defs\n len_line = defs.str_len()['ion']\n len_src = defs.str_len()['Source']\n # Load sources to check\n sources = arcl_io.load_source_table()\n src_files = sources['File'].data\n if len(src_files[0]) > len_src:\n raise ValueError(\"Source filename now exceeds table. Should fix source name\")\n dummy_src = str('#')*len_src\n # Arc Line name\n dummy_line = str('#')*len_line\n #\n\n # Dict for Table\n idict = OrderedDict()\n idict['ion'] = dummy_line\n idict['wave'] = 0.\n idict['NIST'] = 0\n idict['Instr'] = 0 # Flag for instrument\n idict['amplitude'] = 0\n idict['Source'] = dummy_src\n\n # Table\n tkeys = idict.keys()\n lst = [[idict[tkey]] for tkey in tkeys]\n init_tbl = Table(lst, names=tkeys)\n\n # Return\n return init_tbl", "def _create_cfg_line_objects(self):\n start = timeit.default_timer()\n for number, text in enumerate(self.config_lines_str):\n if re.match(pattern=r\"^interface\\s\\S+\", string=text, flags=re.MULTILINE):\n self.lines.append(self.INTERFACE_LINE_CLASS(number=number, text=text, config=self, verbosity=self.verbosity).return_obj())\n else:\n self.lines.append(BaseConfigLine(number=number, text=text, config=self, verbosity=self.verbosity).return_obj())\n for line in self.lines:\n line.type = line.get_type\n self.logger.debug(msg=\"Created {} ConfigLine objects in {} ms.\".format(len(self.lines), (timeit.default_timer()-start)*1000))", "def __init__(self, source_file, template):\n self.source_file = source_file\n self.serializer = template.serializer\n self.headers = template.fields\n # sometimes empty cells are treated as cells with values. So we need to trim them.\n self.slice_index = len(self.headers)\n\n self.rows_imported = 0\n self.rows_skipped = 0\n\n self.objects = []\n self.is_parsed = False\n self.errors = {}\n\n self._is_valid = None\n self._iterator = None", "def __init__(\n self,\n sources: List[str],\n source_parameters: Sequence[SourceParameterFactory] = [],\n hints: Sequence[HintRowFactory] = [],\n ) -> None:\n\n self.sources = sources\n self.source_parameters = list(source_parameters)\n self.hints = list(hints)\n return", "def __init__(self, network, lines, preimages=True):\n self.network = network\n self.lines = lines\n self.preimages = preimages\n\n self.partially_computed = False\n self.transformed_lines = None\n\n self.computed = False\n self.classifications = None", "def test_lines_class_parse(logger):\n raw_bytes = b''\n point_data_fragment = [\n # Only 1 line in this \"data\"\n (\n struct.pack(lines.Lines.fmt, 1),\n 1,\n ),\n # line:\n #\n # brush type\n (\n struct.pack(lines.BrushType.fmt, lines.BrushType.REVERSE['pen']),\n lines.BrushType.REVERSE['pen'],\n ),\n # colour\n (\n struct.pack(lines.Colour.fmt, lines.Colour.REVERSE['black']),\n lines.Colour.REVERSE['black']\n ),\n # magical unknown line attribute 1\n (\n struct.pack(lines.LineAttribute1.fmt, 0),\n 0\n ),\n # base brush size\n (\n struct.pack(\n lines.BrushBaseSize.fmt, lines.BrushBaseSize.REVERSE['small']\n ),\n lines.BrushBaseSize.REVERSE['small']\n ),\n # one point:\n (struct.pack(lines.Points.fmt, 1), 1),\n # the single point's data:\n (struct.pack(lines.X.fmt, 12.341), 12.341),\n (struct.pack(lines.Y.fmt, 107.301), 107.301),\n (struct.pack(lines.Pressure.fmt, 0.351), 0.351),\n (struct.pack(lines.RotX.fmt, 0.03), 0.03),\n (struct.pack(lines.RotY.fmt, 0.216), 0.216),\n ]\n for data in point_data_fragment:\n raw_bytes += data[0]\n\n # Set up the generator with the raw bytes:\n position = recover(raw_bytes)\n data = next(position)\n assert data == ''\n\n result = lines.Lines.load(position)\n assert result.count == 1\n assert len(result.lines) == 1\n result = result.lines[0]\n assert result.brush_type.name == 'pen'\n assert result.colour.name == 'black'\n assert result.line_attribute1.value == 0\n assert result.brush_base_size.name == 'small'\n assert result.points.count == 1\n result = result.points.points[0]\n assert round(result.x, 3) == 12.341\n assert round(result.y, 3) == 107.301\n assert round(result.pressure, 3) == 0.351\n assert round(result.rot_x, 3) == 0.03\n assert round(result.rot_y, 3) == 0.216", "def __init__(self, lines):\n\t\tself.lines = lines\n\t\tself.points = set()\n\t\tfor l in lines:\n\t\t\tif not l.a in self.points:\n\t\t\t\tself.points.add(l.a)\n\t\t\tif not l.b in self.points:\n\t\t\t\tself.points.add(l.b)", "def __init__(self, common_blocks, records):\n self.common_blocks = common_blocks\n self.records = records", "def __init__(self, line, match):\n\n self.line = line\n self.match = match", "def __init__(self, *sources, **kwd):\n if not sources:\n raise TypeError('expected 1 or more sources, got 0')\n\n missing = kwd.pop('missing', '') # Accept as keyword-only argument.\n\n if kwd: # Enforce keyword-only argument\n key, _ = kwd.popitem() # behavior that works in Python 2.x.\n msg = \"__init__() got an unexpected keyword argument \" + repr(key)\n raise TypeError(msg)\n\n if not all(isinstance(s, BaseSource) for s in sources):\n raise TypeError('sources must be derived from BaseSource')\n\n all_columns = []\n for s in sources:\n for c in s.columns():\n if c not in all_columns:\n all_columns.append(c)\n\n normalized_sources = []\n for s in sources:\n if set(s.columns()) < set(all_columns):\n columns = s.columns()\n make_old = lambda x: x if x in columns else None\n interface = [(make_old(x), x) for x in all_columns]\n s = AdapterSource(s, interface, missing)\n normalized_sources.append(s)\n\n self._columns = all_columns\n self._sources = normalized_sources\n self.__wrapped__ = sources # <- Original sources.", "def from_line(cls, line):\n # Define slices\n RECORD = slice(0, 6)\n NATOM = slice(7, 12)\n ATOM = slice(13, 18)\n RES = slice(19, 22)\n CHAIN = slice(23, 24)\n NRES = slice(24, 29)\n X = slice(30, 40)\n Y = slice(40, 50)\n Z = slice(50, 60)\n TYPE = slice(61, 66)\n NBOND = slice(66, 69)\n NLP = slice(70, 71)\n CHARGE = slice(71, 80)\n FIXED = slice(81, 82)\n record = line[RECORD].strip()\n natom = int(line[NATOM])\n atom = line[ATOM].strip()\n res = line[RES].strip()\n chain = line[CHAIN].strip()\n nres = int(line[NRES])\n x = float(line[X])\n y = float(line[Y])\n z = float(line[Z])\n fftype = line[TYPE].strip()\n nbond = int(line[NBOND])\n nlonepair = int(line[NLP])\n charge = float(line[CHARGE])\n try:\n fixed = int(line[FIXED])\n except IndexError:\n fixed = 0\n return cls(record, natom, atom, res, chain, nres, x, y, z, fftype,\n nbond, nlonepair, charge, fixed)", "def __init__(self, url):\n\n lines = [l.strip().split() for l in open(url)]\n\n # Split the lines and the headers\n i = 0\n self.refs = {} # name: [len, offset-to-first-record, offset-to-last-record + 1]\n\n while(lines[i][0][0] == '@'):\n line = lines[i]\n if line[0] == '@SQ':\n sn = line[1].split(':')[1]\n ln = int(line[2].split(':')[1])\n self.refs[sn] = [ln, None, None]\n i += 1\n\n # Process the mapped reads\n # - create offset pointers to the start of each chromosome\n # - convert the position to an int\n cur_chr = lines[i][2]\n self.refs[cur_chr][1] = i\n \n while(i < len(lines)):\n if not (int(lines[i][1]) & 0x4): \n lines[i][3] = int(lines[i][3])\n\n if lines[i][2] != cur_chr:\n self.refs[cur_chr][2] = i # mark the end\n cur_chr = lines[i][2] \n self.refs[cur_chr][1] = i # mark the start\n i += 1\n\n self.lines = lines\n \n return", "def from_line(self, line: str):\n raise NotImplementedError()", "def __init__(self, source_data: Dict[str, dict], verbose: bool = True):\n self.verbose = verbose\n self._validate_source_data(source_data=source_data, verbose=self.verbose)\n self.data_interface_objects = {\n name: data_interface(**source_data[name])\n for name, data_interface in self.data_interface_classes.items()\n if name in source_data\n }", "def __init__(self, line):\n (self.seqid, \n self.source, \n self.type, \n self.start, \n self.end, \n self.score, \n self.strand, \n self.phase, \n self.attributes_str) = line.strip().split('\\t')\n # preserve attribute order as a list of keys (attributes_order)\n attributes_list = self.attributes_str.split(';')\n self.attributes_order = [attr.split('=')[0] for attr in \n attributes_list]\n # store attribute keys and their values in a dictionary\n self.attributes = {attr.split('=')[0]:attr.split('=')[1] for attr in \n attributes_list}\n # rename the name attribute key to Name so it conforms to the\n # GFF3 specification, where Name is a reserved attribute key\n if 'name' in self.attributes:\n self.attributes['Name'] = self.attributes.pop('name')\n self.attributes_order[self.attributes_order.index('name')] = 'Name'", "def __init__(self, lines):\n self.tiles = {}\n self.parse(lines)\n self.find_neighbors()\n self.find_corners()\n self.build_grid_top()\n self.build_grid_left()\n self.fill_grid()\n self.stitch_image()" ]
[ "0.6313051", "0.614401", "0.60776645", "0.60666007", "0.5890082", "0.5835735", "0.57116175", "0.56871676", "0.56600964", "0.5596233", "0.5578453", "0.556857", "0.556857", "0.556857", "0.55126864", "0.5510738", "0.54882395", "0.54770136", "0.54094434", "0.5392297", "0.53723717", "0.5368272", "0.5360179", "0.5350066", "0.53225636", "0.53224504", "0.53148", "0.5268784", "0.5252119", "0.52123356" ]
0.7402475
0
Return a string representing the key sequence used to get the specified message using the given dictionary
def messagetokeystring(message, keydict): return ''.join([' ' + str(keydict[char]) if i - 1 >= 0 and str(keydict[char])[0] == str(keydict[message[i - 1]])[0] else str(keydict[char]) for i, char in enumerate(message)])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _GetKeyString(self):", "def _GetKeyString(self):", "def keysequence(value):\r\n return value.toString()", "def _get_key(self, val: Span) -> str:\n return \"\".join(val._.phonemes)", "def create_key(message, key):\n if len(key) > len(message):\n return key[0:len(message)]\n new_key = key * int(len(message)/len(key))\n new_key += key[0:len(message) - len(new_key)]\n return new_key", "def build_message(self, dict_of_tuples, command, uuid, transaction_id=b'\\x00'):\n\t\t# NOTE: uuid must be a byte array\n\t\t# available app_message commands:\n\t\tapp_messages = {\n\t\t\t\"PUSH\": b'\\x01',\n\t\t\t\"REQUEST\": b'\\x02',\n\t\t\t\"ACK\": b'\\xFF',\n\t\t\t\"NACK\": b'\\x7F'\n\t\t}\n\t\t# finally build the entire message\n\t\tapp_message = OrderedDict([\n\t\t\t(\"COMMAND\", app_messages[command]),\n\t\t\t(\"TRANSACTIONID\", transaction_id),\n\t\t\t(\"UUID\", uuid),\n\t\t\t(\"DICT\", ''.join(dict_of_tuples.values()))\n\t\t])\n\t\treturn ''.join(app_message.values())", "def key(self):\n return self.sentence_idx * (10 ** 6) + self.get_id()", "def _get_kid(message) -> str:\n if KID in message.phdr.keys():\n return base64.b64encode(message.phdr[KID]).decode(\"UTF-8\")\n return base64.b64encode(message.uhdr[KID]).decode(\"UTF-8\")", "def get_message(self, *args, **kwargs):\n\n message = ''\n message += ', '.join([str(key) + ': ' + str(val) for key, val in kwargs.items()]) + '; ' if kwargs else ''\n message += ', '.join(str(val) for val in args) if args else ''\n\n return message", "def get_key(self, state: Dict) -> str:\n\n return \"_\".join(sorted(state))", "def _GetKeyString(self):\n return self.__key_string", "def _key(self):\n key_args = [self.__class__.__name__] + [str(a) for a in self.args]\n return (\":\".join(key_args))", "def answer_key(cls, response_num=2, input_num=1):\r\n return (\r\n \"%s_%d_%d\" % (\r\n \"-\".join(['i4x', 'edX', 'capa_test', 'problem', 'SampleProblem%d' % cls.num]),\r\n response_num,\r\n input_num\r\n )\r\n )", "def __GetKeyString(self):\n return self._GetKeyString()", "def gen_symkey(self, message):\n return int(hashlib.sha1(str(message)).hexdigest(), 16)", "def _keygen(self, event, ts=None):\n return \"%s:%s\" % (self.namespace(ts or time.time()), event)", "def _get_recipient_key(self, protected_message):\n return self.recipient_key", "def _GetKeyString(self):\n return self.__key_string", "def debounce_get_incident_key(event):\n key = \"{} for {}\".format(event.name, event.message[\"incident\"][\"id\"])\n return key", "def sequence(self, keys):\n out = ''\n for j in range(len(keys)):\n out += '>SEQUENCE_{}'.format(keys[j]) + '\\n'\n for i in range(len(self._d_seqs[keys[j]])):\n out += self._d_seqs[keys[j]][i] + '\\n'\n return out", "def _get_raw_key(self, key_id):", "def translation_key(self) -> str | None:\n return TRANSLATION_KEY_MAPPING.get(self.capability.id)", "def __GetKeyString(self):\n return self._GetKeyString()", "def answer_key(cls, input_num=2):\r\n return (\r\n \"%s_%d_1\" % (\r\n \"-\".join(['i4x', 'edX', 'capa_test', 'problem', 'SampleProblem%d' % cls.num]),\r\n input_num,\r\n )\r\n )", "def pgettext(msgctxt, message):\r\n key = msgctxt + '\\x04' + message\r\n translation = get_translation().gettext(key)\r\n return message if translation == key else translation", "def message_for_key(self, key, context):\n raise NotImplementedError('message_for_key() should have been replaced by a metaclass')", "def game_key(proto_obj):\n return game_key_full(proto_obj.id_str)", "def key(self)->str:\n return \"{}:{}.{}.{}\".format(self.source, self.db, self.ed, self.rec)", "def decode(msg, mov):\n new = \"\"\n minus = [chr(x) for x in range(97, 123)]\n mayus = [chr(x) for x in range(65, 91)]\n corres = {}\n mv = mov % 26\n for i in range(0, 26):\n if i - mov < 26:\n corres[minus[i]] = minus[(i - mv) % 26]\n corres[mayus[i]] = mayus[(i - mv) % 26]\n else:\n corres[minus[i]] = minus[(i - mv - 26) % 26]\n corres[mayus[i]] = mayus[(i - mv - 26) % 26]\n for l in msg:\n if l in mayus + minus:\n new += corres[l]\n else:\n new += l\n return new", "def key(key):\n return key" ]
[ "0.6046165", "0.6046165", "0.59045553", "0.5791385", "0.5728902", "0.56914884", "0.56797653", "0.56692123", "0.5655976", "0.56355053", "0.5619594", "0.5616496", "0.55933964", "0.5576578", "0.5567661", "0.55604446", "0.5541098", "0.5538641", "0.5535081", "0.5518018", "0.5506105", "0.54777235", "0.5476038", "0.54746896", "0.54734296", "0.54608035", "0.5456805", "0.54541785", "0.5453448", "0.54480416" ]
0.7381706
0
Return a dict mapping each alphabet letter to the corresponding T9 number sequence
def getT9dict(): T9dict = {} all_letters = string.lowercase T9dict.update(mapkeystoletter(2, all_letters[0:3])) T9dict.update(mapkeystoletter(3, all_letters[3:6])) T9dict.update(mapkeystoletter(4, all_letters[6:9])) T9dict.update(mapkeystoletter(5, all_letters[9:12])) T9dict.update(mapkeystoletter(6, all_letters[12:15])) T9dict.update(mapkeystoletter(7, all_letters[15:19])) T9dict.update(mapkeystoletter(8, all_letters[19:22])) T9dict.update(mapkeystoletter(9, all_letters[22:26])) T9dict[' '] = 0 return T9dict
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def GetAlphabet(self):\n alphabet = list(self._charAlphabet) #Creates a list of the alphabet characters\n numbers = [i for i in range(0,26)] #Creates a list of numbers up to 25\n numberOff = dict( zip(alphabet, numbers)) #Pairs each character with a number in a chronological sequence to number the characters from 0 to 25\n \n return numberOff", "def get_letter_to_code_mappings():\n return {\n \"a\": \"Alfa\", \"b\": \"Bravo\", \"c\": \"Charlie\", \"d\": \"Delta\", \"e\": \"Echo\",\n \"f\": \"Foxtrot\", \"g\": \"Golf\", \"h\": \"Hotel\", \"i\": \"India\", \"j\":\n \"Juliett\", \"k\": \"Kilo\", \"l\": \"Lima\", \"m\": \"Mike\", \"n\": \"November\", \"o\":\n \"Oscar\", \"p\": \"Papa\", \"q\": \"Quebec\", \"r\": \"Romeo\", \"s\": \"Sierra\", \"t\":\n \"Tango\", \"u\": \"Uniform\", \"v\": \"Victor\", \"w\": \"Whiskey\", \"x\": \"Xray\",\n \"y\": \"Yankee\", \"z\": \"Zulu\", \"0\": \"Zero\", \"1\": \"One\", \"2\": \"Two\", \"3\":\n \"Three\", \"4\": \"Four\", \"5\": \"Five\", \"6\": \"Six\", \"7\": \"Seven\", \"8\":\n \"Eight\", \"9\": \"Niner\", \"=\": \"Equals\", \"?\": \"Query\", \"/\": \"Slash\", \",\":\n \"Comma\", \".\": \"Stop\", \":\": \"Colon\", \"'\": \"Apostrophe\", \"-\": \"Dash\",\n \"(\": \"Open\", \")\": \"Close\", \"@\": \"At\",\n }", "def get_letter_dict():\n\treturn {\n\t\t'A': 0,\n\t\t'B': 0,\n\t\t'C': 0,\n\t\t'D': 0,\n\t\t'E': 0,\n\t\t'F': 0,\n\t\t'G': 0,\n\t\t'H': 0,\n\t\t'I': 0,\n\t\t'J': 0,\n\t\t'K': 0,\n\t\t'L': 0,\n\t\t'M': 0,\n\t\t'N': 0,\n\t\t'O': 0,\n\t\t'P': 0,\n\t\t'Q': 0,\n\t\t'R': 0,\n\t\t'S': 0,\n\t\t'T': 0,\n\t\t'U': 0,\n\t\t'V': 0,\n\t\t'W': 0,\n\t\t'X': 0,\n\t\t'Y': 0,\n\t\t'Z': 0\n\t}", "def character_map(text):\n\n print(f\"Total character count: {len(text)}\\n\")\n\n characters = sorted(list(set(text))) # Get sorted list of individual characters\n n_to_char = {}\n char_to_n = {}\n\n num = 0\n for char in characters:\n n_to_char[num] = char\n char_to_n[char] = num\n num += 1\n\n return characters, n_to_char, char_to_n", "def mapkeystoletter(key, letters):\r\n return dict((v, ''.join([str(key) for i in range(k)]))\r\n for k, v in enumerate(letters, 1))", "def init_dict() -> None:\n for elem in letters:\n ascii_dict[elem] = []\n for elem in numbers:\n ascii_dict[elem] = []\n for elem in symbols:\n ascii_dict[elem] = []", "def tally_letters(string):\n output = dict()\n for char in list(string):\n freq = output.get(char, 0)\n output[char]= freq+1\n return output", "def mapping_letter(letters):\n my_list = list(map(lambda x: x.upper(), letters))\n return dict(zip(letters, my_list))", "def letter_to_num(self, string, dict_):\n #dict_= {'A': '0', 'C': '1', 'D': '2', 'E': '3', 'F': '4', 'G': '5', 'H': '6', 'I': '7', 'K': '8', 'L': '9', 'M': '10', 'N': '11', 'P': '12', 'Q': '13', 'R': '14', 'S': '15', 'T': '16', 'V': '17', 'W': '18', 'Y': '19'}\n patt = re.compile('[' + ''.join(dict_.keys()) + ']')\n num_string = patt.sub(lambda m: dict_[m.group(0)] + ' ', string)\n #print(num_string)\n #print(type(num_string))\n num = [int(i) for i in num_string.split()]\n return num", "def create_dictionaries(chars):\n return dict((c, i) for i, c in enumerate(chars)), dict((i, c) for i, c in enumerate(chars))", "def binary_to_seq():\n bin_seq, dico_binary, comp_seq, file_comp = utf8_to_binary()\n \n #for each binary value associate the corresponding letter (key) \n #according to the dictionnary \n dna_seq = \"\"\n reading_binary = \"\"\n for value in bin_seq:\n reading_binary += value\n for letter, code in dico_binary.items():\n if code == reading_binary:\n dna_seq += letter\n reading_binary = \"\"\n break\n \n #print(dna_seq, bin_seq, comp_seq, file_comp)\n return dna_seq, bin_seq, comp_seq, file_comp", "def get_alphabet(number):\n return chr(number + 96)", "def get_alphabet(number):\n return chr(number + 96)", "def nmer_dictionary(self,n,dic={}):\n if self.sequence == \"\":\n self.fetchSequence()\n self.sequence = self.sequence.upper()\n for i in range(0,len(self.sequence)-n):\n subseq = self.sequence[i:][:n]\n dic[subseq]=1+dic.get(subseq,0)\n del subseq\n return dic", "def buildCoder(shift):\n alphabet = string.ascii_lowercase \n alphabet2 = string.ascii_uppercase \n \n \n #Create our substitution dictionary \n dic={} \n dic2={}\n for i in range(0,len(alphabet)): \n dic[alphabet[i]]=alphabet[(i+shift)%len(alphabet)]\n dic2[alphabet2[i]]=alphabet2[(i+shift)%len(alphabet2)]\n \n dic.update(dic2)\n \n return dic", "def _generate_character_map(self):\n self._ct = [-1] * 256\n index = 0\n for c_range in self._meta.character_ranges:\n for c_pos in range(c_range['min'], c_range['max'] + 1):\n self._ct[c_pos] = index\n index += 1", "def get_alphabet():\n\n alphabet = {}\n # Organized by how final output will look. ...alternative org isn't much better\n # May want to look into an external font solution TBH\n # Beware, the \" \" char is also basically the padding\n alphabet[\" \"] = [o,\n o,\n o,\n o,\n o]\n alphabet[\"A\"] = [o + X + o,\n X + o + X,\n X + X + X,\n X + o + X,\n X + o + X]\n alphabet[\"B\"] = [X + X + o,\n X + o + X,\n X + X + o,\n X + o + X,\n X + X + o]\n alphabet[\"C\"] = [X + X + X,\n X + o + o,\n X + o + o,\n X + o + o,\n X + X + X]\n alphabet[\"D\"] = [X + X + o,\n X + o + X,\n X + o + X,\n X + o + X,\n X + X + o]\n alphabet[\"E\"] = [X + X + X,\n X + o + o,\n X + X + X,\n X + o + o,\n X + X + X]\n alphabet[\"F\"] = [X + X + X,\n X + o + o,\n X + X + o,\n X + o + o,\n X + o + o]\n alphabet[\"G\"] = [X + X + X + X,\n X + o + o + o,\n X + o + X + X,\n X + o + o + X,\n X + X + X + X]\n alphabet[\"H\"] = [X + o + X,\n X + o + X,\n X + X + X,\n X + o + X,\n X + o + X]\n alphabet[\"I\"] = [X + X + X,\n o + X + o,\n o + X + o,\n o + X + o,\n X + X + X]\n alphabet[\"J\"] = [o + o + X,\n o + o + X,\n o + o + X,\n X + o + X,\n o + X + o]\n alphabet[\"K\"] = [X + o + o + X,\n X + o + X + o,\n X + X + o + o,\n X + o + X + o,\n X + o + o + X]\n alphabet[\"L\"] = [X + o + o,\n X + o + o,\n X + o + o,\n X + o + o,\n X + X + X]\n alphabet[\"M\"] = [X + o + o + o + X,\n X + X + o + X + X,\n X + o + X + o + X,\n X + o + o + o + X,\n X + o + o + o + X]\n alphabet[\"N\"] = [X + o + o + X,\n X + o + o + X,\n X + X + o + X,\n X + o + X + X,\n X + o + o + X]\n alphabet[\"O\"] = [X + X + X,\n X + o + X,\n X + o + X,\n X + o + X,\n X + X + X]\n alphabet[\"P\"] = [X + X + X,\n X + o + X,\n X + X + X,\n X + o + o,\n X + o + o]\n alphabet[\"Q\"] = [X + X + X,\n X + o + X,\n X + o + X,\n X + X + X,\n o + o + X]\n alphabet[\"R\"] = [X + X + X,\n X + o + X,\n X + X + X,\n X + X + o,\n X + o + X]\n alphabet[\"S\"] = [X + X + X,\n X + o + o,\n X + X + X,\n o + o + X,\n X + X + X]\n alphabet[\"T\"] = [X + X + X,\n o + X + o,\n o + X + o,\n o + X + o,\n o + X + o]\n alphabet[\"U\"] = [X + o + X,\n X + o + X,\n X + o + X,\n X + o + X,\n X + X + X]\n alphabet[\"V\"] = [X + o + X,\n X + o + X,\n X + o + X,\n o + X + o,\n o + X + o]\n alphabet[\"W\"] = [X + o + o + o + X,\n X + o + X + o + X,\n X + o + X + o + X,\n X + o + X + o + X,\n o + X + o + X + o]\n alphabet[\"X\"] = [X + o + X,\n X + o + X,\n o + X + o,\n X + o + X,\n X + o + X]\n alphabet[\"Y\"] = [X + o + X,\n X + o + X,\n o + X + o,\n o + X + o,\n o + X + o]\n alphabet[\"Z\"] = [X + X + X,\n o + o + X,\n o + X + o,\n X + o + o,\n X + X + X]\n alphabet[\"1\"] = [X + X + o,\n o + X + o,\n o + X + o,\n o + X + o,\n X + X + X]\n alphabet[\"2\"] = [X + X + X,\n o + o + X,\n X + X + X,\n X + o + o,\n X + X + X]\n alphabet[\"3\"] = [X + X + X,\n o + o + X,\n o + X + X,\n o + o + X,\n X + X + X]\n alphabet[\"4\"] = [X + o + X,\n X + o + X,\n X + X + X,\n o + o + X,\n o + o + X]\n alphabet[\"5\"] = [X + X + X,\n X + o + o,\n X + X + X,\n o + o + X,\n X + X + X]\n alphabet[\"6\"] = [X + X + X,\n X + o + o,\n X + X + X,\n X + o + X,\n X + X + X]\n alphabet[\"7\"] = [X + X + X,\n o + o + X,\n o + o + X,\n o + X + o,\n o + X + o]\n alphabet[\"8\"] = [X + X + X,\n X + o + X,\n X + X + X,\n X + o + X,\n X + X + X]\n alphabet[\"9\"] = [X + X + X,\n X + o + X,\n X + X + X,\n o + o + X,\n o + o + X]\n alphabet[\"0\"] = [X + X + X + X + X,\n X + o + o + X + X,\n X + o + X + o + X,\n X + X + o + o + X,\n X + X + X + X + X]\n\n return alphabet", "def kmers_composition(dna: str, k: int, alphabet: str = \"ACGT\"):\n dna = Counter(string_to_kmers(dna, k))\n for k_mer in enumerate_kmers(alphabet, k):\n yield dna[k_mer]", "def _create_subscript_mapping():\n # Create the normal and subscript digits list.\n normal_digits = [i for i in range(10)]\n subscript_digits = [chr(0x2080 + i) for i in range(10)]\n\n # Convert the normal digits to strings.\n normal_digits = [str(i) for i in normal_digits]\n\n # Create a dict mapping the two.\n return DefaultDictionary(zip(normal_digits, subscript_digits))", "def create_dictionary():\n chars = sorted(ch for ch in string.printable if ch not in (\"\\x0b\", \"\\x0c\", \"\\r\"))\n char2id = dict((ch, i + 1) for i, ch in enumerate(chars))\n char2id.update({\"\": 0})\n id2char = dict((char2id[ch], ch) for ch in char2id)\n vocab_size = len(char2id)\n id2char.update({98:'\\\\unk',99:'\\\\unk'})\n return char2id, id2char, vocab_size,chars", "def transcribe(seq):\n rna = ''\n for letter in seq:\n if letter == 'A':\n rna = rna + 'U'\n elif letter == 'T':\n rna = rna + 'A'\n elif letter == 'G':\n rna = rna + 'C'\n else:\n rna = rna + 'G'\n return rna", "def bin_code(self):\n self.alphabet = np.unique(self.sequence)\n\n for s, n in zip([chr(k + ord('a') - 1) for k in self.alphabet], self.alphabet):\n self.alphabet_symbol[s] = n\n\n sigm = len(self.alphabet)\n bin_code = []\n for i, e in enumerate(self.alphabet):\n em = [0] * sigm\n em[sigm - 1 - i] = 1\n bin_code.append(em)\n\n for i in range(len(bin_code)):\n self.alphabet_dict[self.alphabet[i]] = bin_code[i]\n\n return reduce(lambda r, e: r + self.alphabet_dict[e], self.sequence, [])", "def nucleotide_numbering():\n nucleotide_to_number = {'A': 0, 'C': 1, 'G': 2, 'T': 3}\n number_to_nucleotide = {0: 'A', 1: 'C', 2: 'G', 3: 'T'}\n return nucleotide_to_number, number_to_nucleotide", "def get_letter_counts(str_):\n return dict(Counter(str_))", "def base_alphabet_to_10(letters):\r\n\r\n return sum(\r\n (ord(letter) - A_UPPERCASE + 1) * ALPHABET_SIZE ** i\r\n for i, letter in enumerate(reversed(letters.upper()))\r\n )", "def get_table(text, size = 1):\r\n result = {}\r\n for i in range(len(text)):\r\n chars = text[i:i+size]\r\n try:\r\n out = text[i + size]\r\n except IndexError:\r\n break\r\n char_dict = result.get(chars, {})\r\n if out not in char_dict:\r\n char_dict[out] = 0\r\n char_dict[out] += 1\r\n result[chars] = char_dict\r\n return result", "def getCharMapping(tweets):\n text = map(lambda x: x.getText(), tweets)\n allChars = [c for s in text for c in s]\n x = collections.Counter(allChars)\n chars_used = x.most_common()[:max_chars]\n charset = map(lambda x: x[0], chars_used)\n # Add padding, start, end and unknown characters\n mapping = dict((c, i) for i, c in enumerate(charset + ['<s>', '</s>', '<pad>', '<unknown>', '<unknown_test>']))\n dump(mapping, open(char_mapping_filename, 'wb'))\n return mapping", "def encode_identifier(alphabet, n):\r\n c = alphabet[n & 0b1111]\r\n n>>=4\r\n while n > 0:\r\n c = c + alphabet[n & 0b111111]\r\n n>>=6\r\n return c", "def create_char_dicts(non_letter_chars, lower_case=True, upper_case=True):\n lower_case_letter_dict={}\n upper_case_letter_dict={}\n index_count = 0\n # Create a dictionary with upper and lower case letters and associated index\n # Note: We include underscores, hyphens, and apostrophes but ignore other characters\n # found in word2vec model, including chinese symbols, emojis, etc\n if lower_case:\n lower_case_letter_dict = {letter: int(index)+index_count for index, letter in enumerate(ascii_lowercase, start=1)}\n index_count += 26\n if upper_case:\n upper_case_letter_dict = {letter: int(index)+index_count for index, letter in enumerate(ascii_uppercase, start=1)} \n index_count += 26\n \n chardict = {**lower_case_letter_dict, **upper_case_letter_dict}\n \n for char in non_letter_chars:\n chardict[char] = index_count\n index_count += 1\n\n # Creation of reverse character lookup for debugging and word creation\n reverse_chardict = {}\n for k,v in chardict.items():\n reverse_chardict[v] = k\n \n return chardict, reverse_chardict", "def buildCoder(shift):\n out_dic = {}\n lo = string.ascii_lowercase\n up = string.ascii_uppercase\n for i in lo:\n out_dic[i] = lo[(lo.index(i) + shift) % len(lo)]\n for i in up:\n out_dic[i] = up[(up.index(i) + shift) % len(up)]\n return out_dic" ]
[ "0.6782532", "0.6357176", "0.63480806", "0.6239313", "0.61492985", "0.613621", "0.6120783", "0.6088525", "0.59723955", "0.59683824", "0.5959332", "0.59540445", "0.59540445", "0.59315383", "0.59235865", "0.5881295", "0.58502054", "0.5826376", "0.5803708", "0.5794347", "0.5697483", "0.5652648", "0.5639329", "0.55982924", "0.5578239", "0.5571164", "0.55708987", "0.55695134", "0.55647194", "0.5558304" ]
0.7683451
0
Return a dict mapping each key appropriately to each letter such that each letter is mapped to a string containing the key n number of times, where n is the position of the letter in the given letters string
def mapkeystoletter(key, letters): return dict((v, ''.join([str(key) for i in range(k)])) for k, v in enumerate(letters, 1))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_dictionaries(chars):\n return dict((c, i) for i, c in enumerate(chars)), dict((i, c) for i, c in enumerate(chars))", "def english_dictionary(letters, n): \n assert (isinstance(letters, list)), \"First argument must be a list\"\n assert (isinstance(n, int)), \"Second argument must be an integer\"\n assert (n >= 0), \"Second argument must be positive\"\n\n answer = {}\n dict_key = ''\n dict_value = ''\n\n for i in range(len(letters)):\n \tfor j in range(n + 1):\n \t\tif (i + j < len(letters)):\n \t\t\tdict_key += letters[i + j]\n \t\t\tdict_value += \" \" + letters[i + j]\n \t\t\t# print(dict_value)\n \t\t# print(dict_value)\n \tif (dict_value.strip()[:1] == 'x' or dict_value.strip()[:1] == 'z'):\n \t\tdict_key = \"empty\"\n \t\tdict_value = \"\"\n\n \tanswer[dict_key] = dict_value.strip()[::-1]\n \tdict_key = \"\"\n \tdict_value = \"\"\n return answer", "def tally_letters(string):\n output = dict()\n for char in list(string):\n freq = output.get(char, 0)\n output[char]= freq+1\n return output", "def mapping_letter(letters):\n my_list = list(map(lambda x: x.upper(), letters))\n return dict(zip(letters, my_list))", "def get_letter_counts(str_):\n return dict(Counter(str_))", "def create_char_dicts(non_letter_chars, lower_case=True, upper_case=True):\n lower_case_letter_dict={}\n upper_case_letter_dict={}\n index_count = 0\n # Create a dictionary with upper and lower case letters and associated index\n # Note: We include underscores, hyphens, and apostrophes but ignore other characters\n # found in word2vec model, including chinese symbols, emojis, etc\n if lower_case:\n lower_case_letter_dict = {letter: int(index)+index_count for index, letter in enumerate(ascii_lowercase, start=1)}\n index_count += 26\n if upper_case:\n upper_case_letter_dict = {letter: int(index)+index_count for index, letter in enumerate(ascii_uppercase, start=1)} \n index_count += 26\n \n chardict = {**lower_case_letter_dict, **upper_case_letter_dict}\n \n for char in non_letter_chars:\n chardict[char] = index_count\n index_count += 1\n\n # Creation of reverse character lookup for debugging and word creation\n reverse_chardict = {}\n for k,v in chardict.items():\n reverse_chardict[v] = k\n \n return chardict, reverse_chardict", "def build_permutation_dictionary(input_string):\n string_contents = {}\n\n for char in input_string:\n if char not in string_contents:\n string_contents[char] = 0\n else:\n string_contents[char] += 1\n\n return string_contents", "def _create_dictionaries(self, chars):\n dictionary = dict()\n for char in chars:\n dictionary[char] = len(dictionary)\n reverse_dictionary = dict(zip(dictionary.values(), dictionary.keys()))\n return dictionary, reverse_dictionary", "def generate_map():\n known_mappings = {\"a zoo\": \"y qee\",\n \"our language is impossible to understand\": \"ejp mysljylc kd kxveddknmc re jsicpdrysi\",\n \"there are twenty six factorial possibilities\": \"rbcpc ypc rtcsra dkh wyfrepkym veddknkmkrkcd\",\n \"so it is okay if you want to just give up\": \"de kr kd eoya kw aej tysr re ujdr lkgc jv\",\n }\n all_letters = \"abcdefghijklmnopqrstuvwxyz\"\n letter_map = {}\n for english, googlerese in known_mappings.items():\n pairs = zip(english, googlerese)\n for e,g in pairs:\n if e not in letter_map:\n letter_map[e] = g\n if len(letter_map) == 26:\n e_letter = \"\"\n g_letter = \"\"\n for letter in all_letters:\n if not e_letter and letter not in letter_map.keys():\n e_letter = letter\n if not g_letter and letter not in letter_map.values():\n g_letter = letter\n letter_map[e_letter] = g_letter\n return \"\".join(letter_map.keys()), \"\".join(letter_map.values())", "def character_map(text):\n\n print(f\"Total character count: {len(text)}\\n\")\n\n characters = sorted(list(set(text))) # Get sorted list of individual characters\n n_to_char = {}\n char_to_n = {}\n\n num = 0\n for char in characters:\n n_to_char[num] = char\n char_to_n[char] = num\n num += 1\n\n return characters, n_to_char, char_to_n", "def compress_v3(string):\n\n string_dict = collections.OrderedDict()\n final = \"\"\n\n for letter in string:\n string_dict[letter] = string_dict.get(letter, 0)+1\n\n for letter, count in string_dict.iteritems():\n final += letter + str(count)\n\n return final", "def getT9dict():\r\n T9dict = {}\r\n all_letters = string.lowercase\r\n T9dict.update(mapkeystoletter(2, all_letters[0:3]))\r\n T9dict.update(mapkeystoletter(3, all_letters[3:6]))\r\n T9dict.update(mapkeystoletter(4, all_letters[6:9]))\r\n T9dict.update(mapkeystoletter(5, all_letters[9:12]))\r\n T9dict.update(mapkeystoletter(6, all_letters[12:15]))\r\n T9dict.update(mapkeystoletter(7, all_letters[15:19]))\r\n T9dict.update(mapkeystoletter(8, all_letters[19:22]))\r\n T9dict.update(mapkeystoletter(9, all_letters[22:26]))\r\n T9dict[' '] = 0\r\n\r\n return T9dict", "def makeKey(text):\n key, n = {}, 0\n for i in text:\n key[i] = str(n)\n n += 1\n return key", "def generate_letter_maps(self):\n\n word_count = len(self.words)\n last_percent = 0\n\n # Do no-blank words.\n for i, word in enumerate(self.words):\n letters = \"\".join(sorted(set(word)))\n self.letters_map[letters].append(word)\n\n # Do one-blank words.\n for subword in self.remove_one_letter(letters):\n self.letters_map_one_blank[subword].append(word)\n\n # Do two-blank words.\n for subword in self.remove_two_letters(letters):\n self.letters_map_two_blanks[subword].append(word)\n\n # Show progress information.\n percent = int(i*100/word_count)\n if percent/10 != last_percent/10:\n print \" %d%%\" % percent\n last_percent = percent", "def create_encrypt_dict(shift):\n lower_case = \"acbcdefghijklmnopqrstuvwxyz\"\n asc2_a = ord(\"a\")\n\n shifted_dict = {}\n for letter in lower_case:\n val_letter = ord(letter)\n shifted_val = asc2_a + ((val_letter - asc2_a + shift) % 26)\n shifted_dict[letter] = chr(shifted_val)\n shifted_dict[letter.upper()] = shifted_dict[letter].upper()\n\n return shifted_dict", "def createCharDict(word):\n d = {}\n for char in word:\n if char not in d:\n d[char] = 1\n else:\n d[char] += 1\n return d", "def get_letter_dict():\n\treturn {\n\t\t'A': 0,\n\t\t'B': 0,\n\t\t'C': 0,\n\t\t'D': 0,\n\t\t'E': 0,\n\t\t'F': 0,\n\t\t'G': 0,\n\t\t'H': 0,\n\t\t'I': 0,\n\t\t'J': 0,\n\t\t'K': 0,\n\t\t'L': 0,\n\t\t'M': 0,\n\t\t'N': 0,\n\t\t'O': 0,\n\t\t'P': 0,\n\t\t'Q': 0,\n\t\t'R': 0,\n\t\t'S': 0,\n\t\t'T': 0,\n\t\t'U': 0,\n\t\t'V': 0,\n\t\t'W': 0,\n\t\t'X': 0,\n\t\t'Y': 0,\n\t\t'Z': 0\n\t}", "def build_cypher_map(keyword, decrypt):\n\n # Build a list of uppercase letters, making it easier to loop and to\n # generate the cypher dictionary\n alphabet = list(string.ascii_uppercase)\n\n # This cypher works with uppercase only\n keyword = keyword.upper()\n\n # Remove duplicated letters from the keyword\n sequence = sorted(set(keyword), key=keyword.index)\n\n # Append the other letters of the alphabet in reverse order\n sequence.extend([c for c in reversed(alphabet) if c not in sequence])\n\n if decrypt:\n return dict(zip(sequence, alphabet))\n else:\n return dict(zip(alphabet, sequence))", "def letter_freq( text ):\n\tchars = string.ascii_uppercase\n\ttext = text.upper()\n\tresult = get_letter_dict()\n\ttotal = 0\n\tfor char in chars:\n\t\tcount = text.count(char)\n\t\tresult[char] = count\n\t\ttotal += count\n\tif total != 0:\n\t\tfor char in chars:\n\t\t\tresult[char] = (result[char]*10000 / total) / float(100)\n\treturn result", "def count_chars(s: str) -> dict:\n count_dict = {}\n\n for c in s:\n if c in count_dict:\n count_dict[c] += 1\n else:\n count_dict[c] = 1\n\n return count_dict", "def grouped_anagrams(strings):\r\n anagram_dict = {}\r\n for string in strings:\r\n # this will take O(n logn) time with n being the number of chars in a word\r\n sorted_chars = \"\".join(sorted(list(string))) \r\n anagram_dict[sorted_chars] = anagram_dict.get(sorted_chars, []) + [string]\r\n\r\n return list(anagram_dict.values())", "def letterCount(dict):\n # Making a new dictionary to store each letter's information in\n letters = dict.fromkeys(string.ascii_lowercase, 0)\n total = 0\n\n # Populating the letters-totals dictionary\n for key in dict:\n for letter in key:\n letters[letter] += dict[key].getNumber()\n # Total to keep track of all letters\n total += dict[key].getNumber()\n\n # Changing the letter-total data to letter-frequency data\n for entry in letters:\n letters[entry] = letters[entry] / total\n\n return letters", "def get_table(text, size = 1):\r\n result = {}\r\n for i in range(len(text)):\r\n chars = text[i:i+size]\r\n try:\r\n out = text[i + size]\r\n except IndexError:\r\n break\r\n char_dict = result.get(chars, {})\r\n if out not in char_dict:\r\n char_dict[out] = 0\r\n char_dict[out] += 1\r\n result[chars] = char_dict\r\n return result", "def init_dict() -> None:\n for elem in letters:\n ascii_dict[elem] = []\n for elem in numbers:\n ascii_dict[elem] = []\n for elem in symbols:\n ascii_dict[elem] = []", "def get_letter_to_code_mappings():\n return {\n \"a\": \"Alfa\", \"b\": \"Bravo\", \"c\": \"Charlie\", \"d\": \"Delta\", \"e\": \"Echo\",\n \"f\": \"Foxtrot\", \"g\": \"Golf\", \"h\": \"Hotel\", \"i\": \"India\", \"j\":\n \"Juliett\", \"k\": \"Kilo\", \"l\": \"Lima\", \"m\": \"Mike\", \"n\": \"November\", \"o\":\n \"Oscar\", \"p\": \"Papa\", \"q\": \"Quebec\", \"r\": \"Romeo\", \"s\": \"Sierra\", \"t\":\n \"Tango\", \"u\": \"Uniform\", \"v\": \"Victor\", \"w\": \"Whiskey\", \"x\": \"Xray\",\n \"y\": \"Yankee\", \"z\": \"Zulu\", \"0\": \"Zero\", \"1\": \"One\", \"2\": \"Two\", \"3\":\n \"Three\", \"4\": \"Four\", \"5\": \"Five\", \"6\": \"Six\", \"7\": \"Seven\", \"8\":\n \"Eight\", \"9\": \"Niner\", \"=\": \"Equals\", \"?\": \"Query\", \"/\": \"Slash\", \",\":\n \"Comma\", \".\": \"Stop\", \":\": \"Colon\", \"'\": \"Apostrophe\", \"-\": \"Dash\",\n \"(\": \"Open\", \")\": \"Close\", \"@\": \"At\",\n }", "def buildCoder(shift):\n alphabet = string.ascii_lowercase \n alphabet2 = string.ascii_uppercase \n \n \n #Create our substitution dictionary \n dic={} \n dic2={}\n for i in range(0,len(alphabet)): \n dic[alphabet[i]]=alphabet[(i+shift)%len(alphabet)]\n dic2[alphabet2[i]]=alphabet2[(i+shift)%len(alphabet2)]\n \n dic.update(dic2)\n \n return dic", "def anagrams(word_lst):\n words_dict = {}\n for word in word_lst:\n characters = ''.join(sorted(list(word)))\n if characters in words_dict:\n words_dict[characters].append(word)\n else:\n words_dict[characters] = [word]\n return words_dict", "def create_dictionary():\n chars = sorted(ch for ch in string.printable if ch not in (\"\\x0b\", \"\\x0c\", \"\\r\"))\n char2id = dict((ch, i + 1) for i, ch in enumerate(chars))\n char2id.update({\"\": 0})\n id2char = dict((char2id[ch], ch) for ch in char2id)\n vocab_size = len(char2id)\n id2char.update({98:'\\\\unk',99:'\\\\unk'})\n return char2id, id2char, vocab_size,chars", "def anagrams(word_list):\n output = dict()\n\n for word in word_list:\n word = word.strip()\n letters = word_to_tuple(word)\n # add letters as key to output dict\n # if not present already\n output[letters] = output.get(letters, [])\n # append word to list at key\n output[letters].append(word)\n\n return output", "def letter_freq(txt):\n frequencies = {}\n txt_lower = txt.lower()\n\n for i in txt_lower:\n keys = frequencies.keys()\n if i in keys:\n frequencies[i] += 1\n else:\n frequencies[i] = 1\n return frequencies" ]
[ "0.72586536", "0.7226152", "0.71658975", "0.7153065", "0.71221644", "0.7014463", "0.6943903", "0.68634564", "0.6783479", "0.6693765", "0.66723007", "0.6574703", "0.65711844", "0.6381239", "0.63740134", "0.63730866", "0.63467705", "0.6322638", "0.62977695", "0.6290113", "0.62740433", "0.62615365", "0.62528145", "0.6232577", "0.62186354", "0.6197077", "0.61752194", "0.61714137", "0.6161926", "0.6123817" ]
0.7698916
0
Test that the equality dunder method is correct for Resources.
def test_eq(self): r1 = Resources(4, 2, {"Hadamard": 1, "CNOT": 1}, {1: 1, 2: 1}, 2, Shots(100)) r2 = Resources(4, 2, {"Hadamard": 1, "CNOT": 1}, {1: 1, 2: 1}, 2, Shots(100)) r3 = Resources(4, 2, {"CNOT": 1, "Hadamard": 1}, {2: 1, 1: 1}, 2, Shots(100)) # all equal r4 = Resources(1, 2, {"Hadamard": 1, "CNOT": 1}, {1: 1, 2: 1}, 2, Shots(100)) # diff wires r5 = Resources( 4, 1, {"Hadamard": 1, "CNOT": 1}, {1: 1, 2: 1}, 2, Shots(100) ) # diff num_gates r6 = Resources(4, 2, {"CNOT": 1}, {1: 1, 2: 1}, 2, Shots(100)) # diff gate_types r7 = Resources( 4, 2, {"Hadamard": 1, "CNOT": 1}, {1: 3, 2: 2}, 2, Shots(100) ) # diff gate_sizes r8 = Resources(4, 2, {"Hadamard": 1, "CNOT": 1}, {1: 1, 2: 1}, 1, Shots(100)) # diff depth r9 = Resources( 4, 2, {"Hadamard": 1, "CNOT": 1}, {1: 1, 2: 1}, 2, Shots((10, 10)) ) # diff shots assert r1.__eq__(r1) assert r1.__eq__(r2) assert r1.__eq__(r3) assert not r1.__eq__(r4) assert not r1.__eq__(r5) assert not r1.__eq__(r6) assert not r1.__eq__(r7) assert not r1.__eq__(r8) assert not r1.__eq__(r9)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def testEquality(self):\n pass", "def __eq__(self, other: 'Resource') -> bool:\n if not isinstance(other, self.__class__):\n return False\n return self.__dict__ == other.__dict__", "def assert_equal_resource(res1, res2):\n assert isinstance(res1, FakedBaseResource)\n assert isinstance(res2, FakedBaseResource)\n assert res1.uri == res2.uri\n assert res1.oid == res2.oid\n names1 = set(res1.properties.keys())\n names2 = set(res2.properties.keys())\n if names1 != names2:\n raise AssertionError(\n \"Resources do not have the same set of properties:\\n\"\n \"- res1 names: {}\\n\"\n \"- res2 names: {}\\n\".\n format(names1, names2))\n for name in res1.properties:\n value1 = res1.properties[name]\n value2 = res2.properties[name]\n if value1 != value2:\n raise AssertionError(\n \"Resources do not have the same value for property {}:\\n\"\n \"- res1 value: {}\\n\"\n \"- res2 value: {}\\n\".\n format(name, value1, value2))", "def assertResourceEqual(self, actual, expected, resource_type):\n return self.assertEqual(\n resource_type(**expected).to_dict(computed=False),\n actual.to_dict(computed=False),\n )", "def test_releaseresourcesrequest_equality_with_other_objects():\n constructor_args = dict(\n interface=\"https://schema.skao.int/ska-low-mccs-releaseresources/2.0\",\n subarray_id=1,\n release_all=True,\n )\n request = ReleaseResourcesRequest(**constructor_args)\n\n assert request != 1\n assert request != object()", "def test_eq(self):\n\n self.assertEqual(\n description.BaseDescription('/path/to/local'),\n description.BaseDescription('/path/to/local'),\n 'equality between two descriptions'\n )\n\n self.assertNotEqual(\n description.BaseDescription('/path/to/local/a'),\n description.BaseDescription('/path/to/local/b'),\n 'inequality between two descriptions'\n )", "def test_check_resource(self):\n s1 = System()\n b1 = Books(\"1984\", \"George Orwell\", \"Harvill Secker\", \"1949\", \"0123456789123\")\n self.assertEqual(s1.check_resource(b1), False)\n s1.add_resource(b1)\n self.assertEqual(s1.check_resource(b1), True)", "def __eq__(self, other):\n if not isinstance(other, ResourceUsage):\n return False\n\n return self.to_dict() == other.to_dict()", "def test_releaseresourcesrequest_object_equality():\n constructor_args = dict(\n interface=\"https://schema.skao.int/ska-low-mccs-releaseresources/2.0\",\n subarray_id=1,\n release_all=True,\n )\n request = ReleaseResourcesRequest(**constructor_args)\n\n # objects with same property values are considered equal\n other = ReleaseResourcesRequest(**constructor_args)\n assert request == other\n\n # objects where any property differs are considered unequal\n different_args = dict(\n interface=\"https://schema.skao.int/ska-low-mccs-releaseresources/999.0\",\n subarray_id=2,\n release_all=False,\n )\n for k, v in different_args.items():\n other_args = dict(constructor_args)\n other_args[k] = v\n assert request != ReleaseResourcesRequest(**other_args)", "def test_getResourceRelations(self):\n pass", "def __eq__(self, other):\n if not isinstance(other, GetVariationsResource):\n return False\n\n return self.__dict__ == other.__dict__", "def test_equals(self):\n measurement_1 = Measurement(self.metric())\n measurement_2 = Measurement(self.metric())\n self.assertTrue(measurement_1.equals(measurement_2))", "def test_eq(self):\n dummy = DummyCryptographicObject()\n self.assertTrue(dummy == dummy)", "def test_equality(self):\n # Make explicitly sure we're using ==:\n self.assertTrue(Comparable(1) == Comparable(1))\n self.assertFalse(Comparable(2) == Comparable(1))", "def test_equal_on_equal(self):\n a = objects.OpaqueObject(self.bytes_a, enums.OpaqueDataType.NONE)\n b = objects.OpaqueObject(self.bytes_a, enums.OpaqueDataType.NONE)\n self.assertTrue(a == b)\n self.assertTrue(b == a)", "def test_compare() -> None:\n\n obj = SpecificLocation()\n obj2 = SpecificLocation()\n\n assert obj != obj2\n\n obj._id = obj2.id\n\n assert obj == obj2", "def test_equal_on_equal(self):\n a = Certificate(\n certificate_type=self.certificate_type_b,\n certificate_value=self.certificate_value_b)\n b = Certificate(\n certificate_type=self.certificate_type_b,\n certificate_value=self.certificate_value_b)\n\n self.assertTrue(a == b)\n self.assertTrue(b == a)", "def __eq__(self, other):\n if not isinstance(other, FreeResourceDetail):\n return False\n\n return self.__dict__ == other.__dict__", "def test_equality(self):\n self.assertTrue(Record(1, 2) == Record(1, 2))\n self.assertFalse(Record(1, 2) == Record(1, 3))\n self.assertFalse(Record(1, 2) == Record(2, 2))\n self.assertFalse(Record(1, 2) == Record(3, 4))", "def __ne__(self, other: 'Resource') -> bool:\n return not self == other", "def __eq__(self, other: 'ResourceGroupReference') -> bool:\n if not isinstance(other, self.__class__):\n return False\n return self.__dict__ == other.__dict__", "def test_eq(self):\n\n self.assertEqual(description.RepositoryDescription(\n '[email protected]:/example/remote',\n '/path/to/local'),\n description.RepositoryDescription(\n '[email protected]:/example/remote',\n '/path/to/local'),\n 'equality between two descriptions'\n )\n\n self.assertNotEqual(description.RepositoryDescription(\n '[email protected]:/example/remote',\n '/path/to/local'),\n description.RepositoryDescription(\n 'github.com:/example/remote',\n '/path/to/local'),\n 'inequality between two descriptions'\n )", "def assertResourceListEqual(self, actual, expected, resource_type):\n self.assertEqual(\n [resource_type(**f).to_dict(computed=False) for f in expected],\n [f.to_dict(computed=False) for f in actual],\n )", "def test_equal(self):\n self.assertTrue(self.a == self.a)\n self.assertFalse(self.a != self.a)", "def test_eq_true(self):\n other = Sample(self.sample_id, self.sample_template)\n self.assertTrue(self.tester == other)", "def test_equals_with_different_sources(self):\n measurement_1 = Measurement(self.metric(), sources=[{\"source_uuid\": SOURCE_ID}])\n measurement_2 = Measurement(self.metric())\n self.assertFalse(measurement_1.equals(measurement_2))", "def test_equals(self):\n othercompound = PyFBA.metabolism.Compound(\"t2\", \"test compound\")\n self.assertEqual(self.compound, othercompound)\n othercompound.name = \"Another compound\"\n self.assertNotEqual(self.compound, othercompound)", "def test_instance_equality(self):\r\n class EqualityModel(Model):\r\n pk = columns.Integer(primary_key=True)\r\n\r\n m0 = EqualityModel(pk=0)\r\n m1 = EqualityModel(pk=1)\r\n\r\n self.assertEqual(m0, m0)\r\n self.assertNotEqual(m0, m1)", "def test_equality(self):\n self.assertEqual(self._version1, self._version1)\n self.assertNotEqual(self._version2, self._version1)\n self.assertEqual(self._version1, PrcsVersion(self._version1))", "def test_identical(self):\n write this test!" ]
[ "0.7314152", "0.72189605", "0.7174685", "0.7077528", "0.6794791", "0.6717936", "0.67031395", "0.66849434", "0.6618718", "0.6528539", "0.6495091", "0.6438724", "0.6425555", "0.63503706", "0.63453996", "0.6344413", "0.63160825", "0.62906426", "0.6265871", "0.62443507", "0.6232666", "0.6215966", "0.6212099", "0.61917907", "0.61846125", "0.61829615", "0.61780643", "0.61648154", "0.61642885", "0.6161368" ]
0.73252475
0
Test that a not type error is raised if the class is initialized without a `resources` method.
def test_raise_not_implemented_error(self): class CustomOpNoResource(ResourcesOperation): # pylint: disable=too-few-public-methods num_wires = 2 class CustomOPWithResources(ResourcesOperation): # pylint: disable=too-few-public-methods num_wires = 2 def resources(self): return Resources(num_wires=self.num_wires) with pytest.raises(TypeError, match="Can't instantiate"): _ = CustomOpNoResource(wires=[0, 1]) # pylint:disable=abstract-class-instantiated assert CustomOPWithResources(wires=[0, 1]) # shouldn't raise an error
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_create_instance(self):\n with self.assertRaises(exceptions.NoInitiation):\n Config()", "def test_cannot_instantiate(self):\n with self.assertRaises(TypeError):\n Distribution()", "def raise_init(cls):\r\n def init(self):\r\n raise TypeError(\"Instance creation is not allowed for %s\" % cls)\r\n cls.__init__ = init\r\n return cls", "def test_constructor_missing_config():\n with pytest.raises(TypeError):\n Unpacker()", "def test_init(self):\n\n class TestResource(BaseResource):\n\n name = 'test_resource'\n\n def process(self, message):\n pass\n\n api = Mock()\n api.endpoint = 'http://an_endpoint'\n route = '/a_route'\n TestResource.init(api, route)\n\n # validate the attribute values of the class\n self.assertEqual(api, TestResource.api)\n self.assertEqual(route, TestResource.route)\n self.assertEqual(api.mongodb, TestResource.mongodb)\n self.assertEqual(api.conf, TestResource.conf)\n self.assertEqual('http://an_endpoint/a_route', TestResource.endpoint)\n self.assertEqual('test_resource', TestResource.logger.name)", "def test_can_instantiate(self):\n\n exc_thrown = False\n\n try:\n self.klass(*self.instantiate_args)\n except Exception:\n exc_thrown = True\n\n self.assertFalse(exc_thrown)", "def test_class_errored(self, cls, exception):", "def test_raises_when_accessing_none_implementation(self):\n\n class APIObj(\n platform.APIObject,\n collections.namedtuple(\"APIObj\", \"implementation\"),\n ):\n def __new__(cls):\n return super().__new__(cls, implementation=None)\n\n obj = APIObj()\n\n with pytest.raises(AttributeError) as exc_info:\n obj.implementation # pylint: disable=pointless-statement\n\n assert \"invalid access to 'implementation': not initialized\" in str(\n exc_info.value\n )", "def test_instantiate_non_existent_class(self):\n # create test configs\n test_configs = [\n {\"_target_\": \"collections.NonExistentClass\"},\n {\"_target_\": \"collections.OtherNonExistentClass\", \"a\": 1, \"b\": 2}\n ]\n\n # check that instantiate raises AttributeError for each test config\n for test_conf in test_configs:\n self.assertRaises(AttributeError, instantiate, test_conf)", "def test_doesnt_implement_can_handle(self):\r\n self.assertRaises(NotImplementedError, Importer.can_handle, \"\")", "def test_exception(self):\n self.assertRaises(TypeError, lambda: self.init_model())", "def test_custom_resource():\n data = {\n 'name': 'Wort wort',\n 'slug': 'sluggy',\n 'not_valid': 'nooo'\n }\n instance = PeopleResource(**data)\n # We should have this attribute\n assert hasattr(instance, 'name')\n # But this one is missing\n assert not hasattr(instance, 'another_thing')\n # and this one is not valid\n assert not hasattr(instance, 'not_valid')\n assert instance.__str__() == '<People | Wort wort>'\n # It should also have parent Meta attributes\n assert hasattr(instance.Meta, 'valid_status_codes')", "def test_setup_object_without__all__name__(self):\n with self.assertRaises(AttributeError):\n pluggable_package.setup(self._test_object)", "def test_constructor_invalid():\n with pytest.raises(TypeError, match='missing 1 required positional argument'):\n PseudoPotentialData() # pylint: disable=no-value-for-parameter", "def test_cannot_be_instantiated(self):\n with self.assertRaises(NotImplementedError):\n ClassicalIOChannel(0)", "def test_min_resources(self):\n with pytest.raises(AttributeError) as exc:\n Fidelity(\"epoch\", 0, 2)\n assert \"Minimum resources must be a positive number.\" == str(exc.value)", "def test_exceptions_init_valid():\n exceptions = Exceptions(os.path.join(os.path.dirname(__file__),\n 'valid_exceptions.yaml'))\n assert exceptions.exceptions", "def test_cannot_be_instantiated(self):\n with self.assertRaises(NotImplementedError):\n Channel(0)", "def test__init__raise_exception(self):\n self.assertRaises(TypeError, MasterNodeInterface)", "def test_cannot_be_instantiated(self):\n with self.assertRaises(NotImplementedError):\n PulseChannel(0)", "def test_12_No_args(self):\n with self.assertRaises(TypeError) as x:\n r = Rectangle()\n self.assertEqual(\"__init__() missing 2 required positional\\\n arguments: 'width' and 'height'\", str(x.exception))", "def __init__(self, root):\n self._root = root\n if not self.get_resources():\n raise Exception('Your application has no Resource.')", "def test_unknown_resource_under_service(self):\n raise NotImplementedError # FIXME", "def test_empty_source_constructor_exception():\n with pytest.raises(robox.RDJParameterErr):\n test01 = Source()", "def test_11_None_input(self):\n with self.assertRaises(TypeError) as x:\n r = Rectangle(None)\n self.assertEqual(\"__init__() missing 1 required positional argument:\\\n 'height'\", str(x.exception))", "def test_get_fail(self):\n with self.assertRaises(AssertionError):\n self.resource.get(-1)", "def test_registry_requires_implementation(self):\n def make_class():\n class FakeSerializer(Serializer):\n pass\n\n self.assertRaises(AttributeError, make_class)", "def test_exceptions_init_nonexistent():\n with pytest.raises(IOError):\n Exceptions(os.path.join(os.path.dirname(__file__),\n 'nonexistent_exceptions.yaml'))", "def test_fail_on_init(self):\n\n with self.assertRaises(IcypawException):\n class Node:\n my_metric = Metric(Int64, read_only=True)\n\n @my_metric.net_hook\n def my_metric(self, value):\n pass", "def testUnknownHttpMethod(self):\n api = Api({'name': 'dummy', 'version': 'v1', 'resources': {}})\n unused_resource = Resource(api, 'temp', {'methods': {}})\n self.assertRaises(ApiException,\n Method, api, 'bad', {\n 'rpcMethod': 'rpc',\n 'httpMethod': 'Not GET/POST/PUT/DELETE',\n 'parameters': {}\n })" ]
[ "0.6509014", "0.6454623", "0.64346546", "0.64189184", "0.6415098", "0.6371529", "0.6346806", "0.63281137", "0.63169825", "0.63013685", "0.6288929", "0.62795115", "0.6273733", "0.6269042", "0.6252056", "0.62284595", "0.62220055", "0.622168", "0.6193215", "0.61502784", "0.614811", "0.614528", "0.6144411", "0.6104908", "0.60944456", "0.60921514", "0.6085857", "0.6075969", "0.6068589", "0.6067185" ]
0.7228034
0
Test the count resources method.
def test_count_resources(ops_and_shots, expected_resources): ops, shots = ops_and_shots computed_resources = _count_resources(QuantumScript(ops=ops, shots=shots)) assert computed_resources == expected_resources
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_get_resource_license_resource_count_list(self):\n pass", "def count(self, resource):\n return len(self.all(resource))", "def test_get_resource_license_resource_count_by_moid(self):\n pass", "def test_count(self):\n self._test_count_func(count)", "def test_all_count(self):\n self.assertEqual(2, self.alice_storage.all_count)\n self.assertEqual(3, self.bob_storage.all_count)\n self.assertEqual(0, self.carol_storage.all_count)\n self.assertEqual(0, self.anonymous_storage.all_count)", "def test_read_count(self):\n self.assertEqual(1, self.alice_storage.read_count)\n self.assertEqual(1, self.bob_storage.read_count)\n self.assertEqual(0, self.carol_storage.read_count)\n self.assertEqual(0, self.anonymous_storage.read_count)", "def test_download_count_per_resource(self):\n\n for path, count in [('test1', 1), ('test2', 2), ('test3', 3), ('test40', 5), ('testten', 8)]:\n for i in range(count):\n FileDownloadEvent.objects.create(resource=self.test_resource, path=path, session_key=f'{path}{i}')\n\n for path, count in [('test1', 1), ('test2', 1), ('test3', 2), ('test40', 3), ('testten', 5)]:\n for i in range(count):\n FileDownloadEvent.objects.create(resource=self.test_resource_two, path=path, session_key=f'{path}{i}')\n\n test_resource_one_total = FileDownloadEvent.objects.download_count_for_resource(self.test_resource)\n self.assertEqual(test_resource_one_total, 19)\n test_resource_two_total = FileDownloadEvent.objects.download_count_for_resource(self.test_resource_two)\n self.assertEqual(test_resource_two_total, 12)", "def count():", "def test_count(db_4_tasks):\n assert(tasks.count() == 4)", "def test_objectresource_countobjects(self):\n\n home01 = yield self.homeUnderTest(txn=self.theTransactionUnderTest(0), name=\"user01\", create=True)\n self.assertTrue(home01 is not None)\n calendar01 = yield home01.childWithName(\"calendar\")\n yield calendar01.createCalendarObjectWithName(\"1.ics\", Component.fromString(self.caldata1))\n yield calendar01.createCalendarObjectWithName(\"2.ics\", Component.fromString(self.caldata2))\n yield self.commitTransaction(0)\n\n home = yield self._remoteHome(self.theTransactionUnderTest(1), \"user01\")\n self.assertTrue(home is not None)\n calendar = yield home.childWithName(\"calendar\")\n count = yield calendar.countObjectResources()\n self.assertEqual(count, 2)\n yield self.commitTransaction(1)", "def Count(self) -> int:", "def Count(self) -> int:", "def Count(self) -> int:", "def Count(self) -> int:", "def count(cls, client) :\n try :\n obj = nshttpprofile()\n option_ = options()\n option_.count = True\n response = obj.get_resources(client, option_)\n if response :\n return response[0].__dict__['___count']\n return 0\n except Exception as e :\n raise e", "def test_count(self):\n\n command = Command()\n modellist = command.get_modellist()\n for model_name, count in modellist:\n # taking model class by it's name\n model = ContentType.objects.get(model=model_name).model_class()\n # testing we've counted objects in this model right\n self.assert_count(model, count)", "def test_count(self):\n eq_(Signoff.objects.count(), 5)\n eq_(Action.objects.count(), 8)", "def test_properties_count_get(self):\n pass", "def testArticleCount(self):\n\n self.articleCount(17)", "def test_counter(self):\n self.assertEqual(self._n_registered, 1)", "def count() -> int:\n pass", "def test_own_count(self):\n self._test_count_func(it_count)", "async def count(self, **kw):\n\n pass", "def test_all_count(self):\n self.assertEqual(2, self.alice_inbox.all_count)\n self.assertEqual(3, self.bob_inbox.all_count)\n self.assertEqual(0, self.carol_inbox.all_count)", "def count(cls, client) :\n\t\ttry :\n\t\t\tobj = rewriteaction()\n\t\t\toption_ = options()\n\t\t\toption_.count = True\n\t\t\tresponse = obj.get_resources(client, option_)\n\t\t\tif response :\n\t\t\t\treturn response[0].__dict__['___count']\n\t\t\treturn 0\n\t\texcept Exception as e :\n\t\t\traise e", "def test_new_count(self):\n self.assertEqual(2, self.alice_storage.new_count)\n self.assertEqual(3, self.bob_storage.new_count)\n self.assertEqual(0, self.carol_storage.new_count)\n self.assertEqual(0, self.anonymous_storage.new_count)", "def test_unread_count(self):\n self.assertEqual(1, self.alice_storage.unread_count)\n self.assertEqual(2, self.bob_storage.unread_count)\n self.assertEqual(0, self.carol_storage.unread_count)\n self.assertEqual(0, self.anonymous_storage.unread_count)", "def test_count(database):\n assert len(database.credentials) == 2", "def test_getSampleCount(self):\r\n self.assertEqual(self.res1.getSampleCount(), 0)\r\n\r\n self.res1.addSample('S1', 42)\r\n self.assertEqual(self.res1.getSampleCount(), 1)\r\n\r\n self.res1.addSample('S2', 43)\r\n self.assertEqual(self.res1.getSampleCount(), 2)", "def count(cls, client) :\n\t\ttry :\n\t\t\tobj = lbprofile()\n\t\t\toption_ = options()\n\t\t\toption_.count = True\n\t\t\tresponse = obj.get_resources(client, option_)\n\t\t\tif response :\n\t\t\t\treturn response[0].__dict__['___count']\n\t\t\treturn 0\n\t\texcept Exception as e :\n\t\t\traise e" ]
[ "0.780781", "0.7554697", "0.72033167", "0.71104103", "0.7008526", "0.690465", "0.6903304", "0.6899983", "0.679157", "0.67801946", "0.6760998", "0.6760998", "0.6760998", "0.6760998", "0.67398643", "0.6697201", "0.66854256", "0.66848224", "0.6672156", "0.6637181", "0.6620758", "0.66197455", "0.6614067", "0.66118467", "0.65891695", "0.6580853", "0.6513094", "0.6509492", "0.6468866", "0.64616734" ]
0.76768863
1
Clamp value between mini and maxi
def clamp(value, mini, maxi): if value < mini: return mini elif maxi < value: return maxi else: return value
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def clamp(value, mn, mx):\n\n return max(min(value, mx), mn)", "def clamp(n, min_, max_):\n return max(min(max_,n),min_)", "def clamp(self, value, minv, maxv):\n if value > maxv:\n return maxv\n if value < minv:\n return minv\n return value", "def clamp(value, min_value, max_value):\n return max(min_value, min(value, max_value))", "def clamp(num, min, max): \n if num < min:\n num = min\n elif num > max:\n num = max\n return num", "def clamp(value, minval, maxval):\n return sorted((minval, int(value), maxval))[1]", "def clamp(min_value: float, max_value: float, value: float):\n\t\tvalue = min(value, max_value)\n\t\tvalue = max(value, min_value)\n\t\treturn value", "def clamp(n, minn, maxn):\n return max(min(maxn, n), minn)", "def clamp(n, minn, maxn):\n return max(min(maxn, n), minn)", "def clamp(minimum, n, maximum):\n return max(minimum, min(n, maximum))", "def clamp(minimum, value, maximum):\n return max(minimum, min(maximum, value))", "def clamp(self, value, minVal, maxVal):\n if type(value) is type(\"string\"):\n return value\n if minVal != None and max != None:\n return max(min(value, maxVal), minVal)\n if minVal != None and maxVal == None:\n return max(value, minVal)\n if minVal == None and maxVal != None:\n return min(value, maxVal)\n return value", "def clip(val, val_min, val_max):\n return min(val_max, max(val_min, val))", "def range_limit(val, minv, maxv):\n\tif (val < minv):\n\t\tval = minv\n\telif (val > maxv):\n\t\tval = maxv\n\treturn val", "def clamp(lower, value, upper):\n if lower > value:\n return lower\n if upper < value:\n return upper\n return value", "def vc_clamp(x, lb, ub):\n\n y = min(x, ub)\n y = max(y, lb)\n\n return y", "def clamp(num,start,end):\n if num >= start and num <= end: return num\n elif num < start: return start\n elif num > end: return end", "def clamp(n: int, a: int, b: int):\n return min(max(n, a), b)", "def _limit(value, min_value, max_value):\n\n if value < min_value:\n return min_value\n if value > max_value:\n return max_value\n return value", "def clamp(x, l, u):\n return l if x < l else u if x > u else x", "def clamp(self):\n self.threshold.data.clamp_(self.min_threshold)", "def clip_to_output_limits(self, value):\n return max(self.out_min, min(self.out_max, value))", "def Clamp(val, min, max):\n\tval = float(val)\n\tmin = float(min)\n\tmax = float(max)\n\n\tif val < min:\n\t\treturn min\n\telif val > max:\n\t\treturn max\n\telse:\n\t\treturn val", "def cpfclamp(f, min_, max_):\n return min(max(f, min_), max_)", "def clamp(x: float, min_x: float, max_x: float) -> float:\n if x < min_x:\n return min_x\n elif x > max_x:\n return max_x\n return x", "def clip(x, min, max):\r\n # see decorator for function body\r\n # for grep: clamp, bound\r", "def rangeLimit(val, minv, maxv):\n\treturn range_limit(val, minv, maxv)", "def constrain(small, value, big):\n return min(max(value, small), big)", "def limit(val, arr):\n # Make copy\n new = np.array(val)\n extr = minmax(arr)\n # Enforce lower bound\n new = np.maximum(new, extr[0])\n # Enforce upper bound\n new = np.minimum(new, extr[1])\n return new", "def __limit_value(self, value, v_range):\n if np.isnan(value):\n print('Warning: trying to limit nan value in range {0}'.format(v_range))\n return value\n\n return np.min([v_range[1], np.max([value, v_range[0]])])" ]
[ "0.80389065", "0.7782929", "0.77185524", "0.76578045", "0.76456124", "0.74849397", "0.74809366", "0.7454212", "0.73672295", "0.7320122", "0.72616553", "0.7163171", "0.7146835", "0.71296495", "0.71129787", "0.71004564", "0.7080147", "0.70638114", "0.70379007", "0.70337254", "0.69668514", "0.6898514", "0.6779924", "0.67688483", "0.6742431", "0.6737552", "0.673642", "0.67195827", "0.66509795", "0.66436124" ]
0.8780609
0
Show a saved search.
def show(ctx, saved_search_id): r = SavedSearch(ctx.obj['TOKEN'], ctx.obj['DEBUG']).show(saved_search_id) click.echo(json_dumps(r, ctx.obj['PRETTY']))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def on_save_search(self, event):\r\n\r\n search = self.m_searchfor_textbox.GetValue()\r\n if search == \"\":\r\n errormsg(_(\"There is no search to save!\"))\r\n return\r\n dlg = SaveSearchDialog(self, search, self.m_regex_search_checkbox.GetValue())\r\n dlg.ShowModal()\r\n dlg.Destroy()", "def showFind(self, txt=\"\"):\n self.__searchWidget.showFind(txt)", "def on_searchButton_clicked(self):\n self.__search()", "def search():\n query = input('Please enter your search query\\n')\n # For now, we will just print the whole database\n #db_actions.display()\n db_actions.search(query)", "def load_search_gui(self):\n pass", "def showSearchWidget(self):\n self.__replaceWidget.hide()\n self.__searchWidget.show()\n self.__searchWidget.show(self.textForFind())", "def search():\r\n return render_template(\"/home/search.html\")", "def new_search(self, widget, data=None):\n self.artist_name.set_text(\"\")\n self.song_name.set_text(\"\")\n self.status_bar.hide()\n self.lyrics_view.hide()\n self.scroll.hide()\n self.window.resize(self.width, self.height)", "def search_page():\n return render_template('page_query.html', search_label=g_search_type)", "def search():\n form = SearchForm()\n if form.validate_on_submit():\n return render_template('reports/search_results.html', reports=form.reports)\n else:\n flash_form_errors(form)\n return render_template('reports/search.html', form=form)", "def search_command():\n listing.delete(0, END)\n for row in backend.search(title_text.get(), \n author_text.get(), \n year_text.get(), \n isbn_text.get()):\n listing.insert(END, row)", "def searchInspector(search):\n \n try:\n from PyQt4.QtGui import QApplication, QDialog\n from dialog import Ui_Dialog\n except Exception:\n print \"Missing a required library - please install pyQt4.\"\n return\n \n app = QApplication(sys.argv)\n window = QDialog()\n ui = Ui_Dialog()\n ui.setupUi(window)\n ui.updateList(search)\n window.show()\n app.exec_()", "def search_btn_clicked(self, widget, data=None):\n # Method to handle search here.\n search_text = self.get_text(\"txt_search\")\n print search_text", "def show_saved(self):\n self._saved_text.set_text(\"Saved\")", "def search():\n # Check for database tables\n check_db()\n # Check for GET data\n search_query = request.args.get(\"q\", None)\n # Format search results as HTML\n search_results = get_search_results_html(search_query)\n # Format recent searches as HTML\n recent_searches = get_recent_searches_html()\n\n return html_wrapper('<h1>' + SITE_NAME + '''</h1>\n <form action=\"/\" method=\"GET\">\n <input type=\"text\" name=\"q\">\n <input type=\"submit\" value=\"search\">\n </form>''' + search_results + recent_searches)", "def search(request):\n return render(request, \"search.html\")", "def search(request):\n\n if request.method == \"POST\":\n form = SearchForm(request.POST)\n\n if form.is_valid():\n title = form.cleaned_data[\"title\"]\n entryMD = util.get_entry(title)\n\n print('search request: ', title)\n\n if entryMD:\n return redirect(reverse('entry', args=[title]))\n else:\n relatedTitles = util.relatedTitles(title)\n\n return render(request, \"encyclopedia/search.html\", {\n \"title\": title,\n \"relatedTitles\": relatedTitles,\n \"searchForm\": SearchForm()\n })\n return redirect(reverse('index'))", "def index(request):\r\n form = forms.SearchForm()\r\n \r\n return render_to_response('search/search.html', {'form': form})", "def go_to_search():\n\tuser_id = session.get(\"user_id\")\n\tuser = User.query.filter_by(user_id=user_id).first()\n\n\treturn render_template(\"/nowsearch.html\", user=user)", "def search(request):\n raise NotImplementedError", "def search_form_servee(context, cl):\r\n return {\r\n \"request\": context[\"request\"],\r\n \"cl\": cl,\r\n \"show_result_count\": cl.result_count != cl.full_result_count,\r\n \"search_var\": \"q\"\r\n }", "def search_google(self, widget):\n if not self.searchstring:\n return\n base_uri = \"https://www.google.com/search?q=%s\"\n uri = base_uri % urllib.parse.quote(self.searchstring.encode(\"utf-8\"))\n Gtk.show_uri_on_window(None, uri, Gdk.CURRENT_TIME)", "def search():\n pass", "def show_search_resuls(builder, show: bool):\n show_ui_component(builder, 'findYourInstituteScrolledWindow', show)", "def onSearch(self):\n self.mainGrid.showSearchPopup()\n self.popupActive = True", "def search(self):\r\n return resources.Search(self)", "def search(request):\n if 'find_project' in request.GET and request.GET['find_project']:\n project_name=request.GET.get('find_project')\n \n searched_project=Project.search_project(project_name)\n \n return render(request,'search_results.html',{'searched_project':searched_project})", "def __ask_query(self):\n self.__output = list()\n return input(form('What do you want to search?\\n> '))", "def search(request):\r\n\tinput_text = request.GET.get('search-text', '')\r\n\tgames = Game.objects.filter(name__icontains=input_text)\r\n\treturn render(request, 'home.html', {'games': games, 'MEDIA_URL': settings.MEDIA_URL})", "def search_venues_form():\n # seach for Hop should return \"The Musical Hop\".\n # search for \"Music\" should return \"The Musical Hop\" and \"Park Square Live Music & Coffee\"\n return render_template(\n 'pages/search_venues.html'\n )" ]
[ "0.684537", "0.66723734", "0.65726525", "0.6466039", "0.62601984", "0.6245371", "0.6242621", "0.62300265", "0.6214036", "0.6172219", "0.6159994", "0.6143998", "0.60995394", "0.6086033", "0.60684097", "0.6028625", "0.59793204", "0.5916503", "0.5916453", "0.5899437", "0.583377", "0.5811491", "0.5792607", "0.57917416", "0.5780091", "0.57794523", "0.57757056", "0.5774353", "0.5770432", "0.5766154" ]
0.79775244
0
Create a saved search.
def create(ctx, payload): payload = parse_payload(ctx, payload) r = SavedSearch(ctx.obj['TOKEN'], ctx.obj['DEBUG']).create(payload) click.echo(json_dumps(r, ctx.obj['PRETTY']))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def createSearch(self, authenticationToken, search):\r\n pass", "def saveSearch(self, queryString, searchName):\n facade = self._getFacade()\n if facade.noSaveSearchProvidersPresent():\n return DirectResponse.succeed()\n\n creator = self._getLoggedinUserId()\n\n # save the search\n facade.saveSearch(queryString, searchName, creator)\n return DirectResponse.succeed()", "def on_save_search(self, event):\r\n\r\n search = self.m_searchfor_textbox.GetValue()\r\n if search == \"\":\r\n errormsg(_(\"There is no search to save!\"))\r\n return\r\n dlg = SaveSearchDialog(self, search, self.m_regex_search_checkbox.GetValue())\r\n dlg.ShowModal()\r\n dlg.Destroy()", "def createSearch(self, authenticationToken, search):\r\n self.send_createSearch(authenticationToken, search)\r\n return self.recv_createSearch()", "def new_search(self):\n return {'search_parameters': h.get_search_parameters(self.query_builder)}", "def save(self, *args, **kwargs):\n self._update_search_tokens()\n super().save(*args, **kwargs)", "def saveQuery(self, query):\n items_temp = []\n field = self.txtSearchHistory # Initialise Search History textbox as 'field'\n field.config(state='normal') # Enable 'field' for editing (removing and adding texts)\n index = 1\n\n # Iterate through 'field' to check if query made matches previous searches\n for item in field.get(\"1.0\", 'end').splitlines():\n if item:\n if str(item).lower() == query.lower():\n field.delete(str(index) + '.0',\n str(index) + '.end + 1 char') # Remove text from 'field' if matches with current query\n index += 1\n\n self.txtSearchHistory.insert('1.0', query.capitalize() + \"\\n\") # Insert current query to first line of 'field'\n field.config(state='disabled') # Disable user from changing 'field' text box\n\n # Get updated search history to store in file\n for item in field.get(\"1.0\", 'end').splitlines():\n if item: items_temp.append(item)\n\n # Store queries (past and current) to file\n de.addSearchHist(items_temp)", "def create_new_search(self) -> Hashable:\n search_id_counter = self._redis.incr(\"search_id_counter\", amount=1) - 1\n search_id = f\"{search_id_counter}\" # converting to str\n self._redis.rpush(\"search_id_list\", search_id)\n return search_id", "def runNewSearch(self):\n self.__searchJob = self.__startSearch()\n\n self.monitorSearchJob()", "def search(self, search):\n raise NotImplementedError", "def create_search(search_dic,f_symb,f_label,f_fftype,f_residue,f_resname,f_chain,f_ring):\n\n search_dic = addtagDic(search_dic,\"symbol\",f_symb)\n search_dic = addtagDic(search_dic,\"label\",f_label)\n search_dic = addtagDic(search_dic,\"fftype\",f_fftype)\n search_dic = addtagDic(search_dic,\"residue\",f_residue,setint=True)\n search_dic = addtagDic(search_dic,\"resname\",f_resname)\n search_dic = addtagDic(search_dic,\"chain\",f_chain,setint=True)\n search_dic = addtagDic(search_dic,\"ring\",f_ring,setint=True)\n \n return search_dic", "def create_search(search_dic,f_symb,f_label,f_fftype,f_residue,f_resname,f_chain,f_ring):\n\n search_dic = addtagDic(search_dic,\"symbol\",f_symb)\n search_dic = addtagDic(search_dic,\"label\",f_label)\n search_dic = addtagDic(search_dic,\"fftype\",f_fftype)\n search_dic = addtagDic(search_dic,\"residue\",f_residue,setint=True)\n search_dic = addtagDic(search_dic,\"resname\",f_resname)\n search_dic = addtagDic(search_dic,\"chain\",f_chain,setint=True)\n search_dic = addtagDic(search_dic,\"ring\",f_ring,setint=True)\n \n return search_dic", "def save(searches):\n # type: (list) -> None\n with Cache(CACHE_URI) as c:\n c.set(SAVED_SEARCH, json.dumps(searches, ensure_ascii=False))", "def pickupSearch(self):\n self.__searchJob = self.loadSavedHyperSearchJob(\n permWorkDir=self._options[\"permWorkDir\"],\n outputLabel=self._options[\"outputLabel\"])\n\n\n self.monitorSearchJob()", "def save(self, **kwargs):\n if self.search_terms is None:\n self.search_terms = ''\n super().save(**kwargs)\n return self", "def append(self, search):\n query_values = {\n \"id\": str(search.id),\n \"term\": search.term,\n \"timestamp\": search.timestamp\n }\n\n self._cursor.execute(f\"\"\"\n INSERT INTO {self._table_name}\n VALUES (:id, :term, :timestamp);\"\"\", query_values)\n\n self._conn.commit()", "def search(self, query):", "def build_search_data(self):\n # Must be overriden by subclass.\n pass", "def construct(self):\n return self.as_search().construct()", "def do_search(self, *args, **kwargs):\n return [{}]", "def search(self, *args, **kwargs):", "def __generate_search_query(self) -> None:\n if self.query_accuracy < 100:\n if self.title is not None and self.title != '' and self.artist is not None and self.artist != '':\n # Use the title and the artist name to find more information about the song.\n query: str = self.title + ' ' + self.artist\n query = re.sub(self.__get_filter_regex(), '', query)\n self.query = query\n # Remove unnecessary information in order to get a simpler query version.\n self.minimal_query = re.sub(r'\\([\\s\\S]+\\)', '', query).strip()\n self.query_accuracy = 100\n return\n if self.query_accuracy < 50:\n # No title nor artist name available, use the filename as search query.\n filename: str = os.path.basename(self.original_path)\n filename = os.path.splitext(filename)[0]\n query: str = filename.lower()\n query = re.sub(self.__get_filter_regex(), '', query)\n query = query.replace('_', ' ')\n query = query.strip()\n self.query = query\n self.minimal_query = re.sub(r'\\([\\s\\S]+\\)', '', query).strip()\n self.query_accuracy = 50", "def search():\n pass", "def search(self):\r\n return v3.Search(self)", "def cloneSavedSearch(savedSearch, dashId):\n clonedSavedSearch = savedSearch\n clonedSavedSearch.id = None\n clonedSavedSearch.dashboard = dashId\n clonedSavedSearch.save()\n return clonedSavedSearch", "def generate_search_for_saved_table(user, id=None,request=None):\n from crits.core.handlers import data_query\n response = {}\n savedSearch = None\n try:\n savedSearch = SavedSearch.objects(id=id).first()\n if not savedSearch:\n response['Result'] = \"ERROR\"\n response['Message'] = \"Error finding table, please try again later.\"\n return response\n except:\n savedSearch = SavedSearch()\n savedSearch.isDefaultOnDashboard = True\n savedSearch.name = id.replace(\"_\", \" \")\n id = None\n results = []\n records = []\n term = \"\"\n url = \"\"\n if not savedSearch.isDefaultOnDashboard:\n objType = get_obj_type_from_string(savedSearch.objType)\n resp = get_query_without_request(objType, user.username, savedSearch.searchTerm, \"global\")\n if resp['Result'] == \"ERROR\":\n return resp\n formatted_query = resp['query']\n term = resp['term']\n resp = data_query(objType, user.username, query=formatted_query, count=True)\n results.append({'count': resp['count'],\n 'name': savedSearch.objType}) \n else:\n results = {\"name\":savedSearch.name,\n \"count\":str(len(records)),\n \"type\":get_obj_name_from_title(savedSearch.name)}\n #special url to get the records of a default dashboard since their queries are different \n url = reverse(\"crits.dashboards.views.get_dashboard_table_data\", \n kwargs={\"tableName\":str(savedSearch.name.replace(\" \", \"_\"))})\n args = {'term': term,\n 'results': results,\n 'dataUrl':url,\n 'Result': \"OK\"\n }\n if savedSearch:\n args.update({'tableId':id,\n 'tableName': savedSearch.name,\n 'columns': savedSearch.tableColumns,\n 'sortBy': savedSearch.sortBy,\n 'sizex' : savedSearch.sizex,\n 'maxRows': savedSearch.maxRows,\n 'isDefaultOnDashboard': savedSearch.isDefaultOnDashboard,\n })\n if savedSearch.dashboard:\n args[\"currentDash\"] = str(savedSearch.dashboard)\n args[\"dashtheme\"] = Dashboard.objects(id=savedSearch.dashboard).first().theme\n return args", "def store_current_search(self):\n search_query = self.request.GET.urlencode()\n self.request.session[settings.SEARCH_COOKIE_NAME] = search_query", "def search(self, *args, **kwargs): # real signature unknown\n pass", "def test_create_saved_app_map_search(self):\n pass", "def save(self, db):\n db.googleResults.insert_one(\n {\n \"searchQuery\": self.search_query,\n \"title\": self.title,\n \"link\": self.link,\n \"subtext\": self.subtext,\n \"searchterms\" : self.searchterms, # array\n \"queryTime\": datetime.datetime.now(),\n \"details\": self.link_scripts\n }\n )" ]
[ "0.6784601", "0.6628552", "0.65851784", "0.6335346", "0.6261639", "0.6222785", "0.61650014", "0.6154353", "0.6143282", "0.60384107", "0.6017802", "0.6017802", "0.60088694", "0.59932554", "0.5887634", "0.58723336", "0.586306", "0.5861253", "0.5856038", "0.5761087", "0.57178247", "0.5716542", "0.57036376", "0.5689215", "0.5673548", "0.5661247", "0.5658911", "0.5652331", "0.56251186", "0.5622927" ]
0.7173232
0
Update a saved search.
def update(ctx, saved_search_id, payload): payload = parse_payload(ctx, payload) r = SavedSearch(ctx.obj['TOKEN'], ctx.obj['DEBUG']).update(payload) click.echo(json_dumps(r, ctx.obj['PRETTY']))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def updateSavedSearch(self, searchName, queryString):\n facade = self._getFacade()\n if facade.noSaveSearchProvidersPresent():\n return DirectResponse.succeed()\n\n # save the search\n facade.updateSavedSearch(searchName, queryString)\n return DirectResponse.succeed()", "def updateSearch(self, authenticationToken, search):\r\n pass", "def save(self, *args, **kwargs):\n self._update_search_tokens()\n super().save(*args, **kwargs)", "def __on_query_edited(self):\n self.__refresh_search_results()", "def updateSearch(self, authenticationToken, search):\r\n self.send_updateSearch(authenticationToken, search)\r\n return self.recv_updateSearch()", "def update_search_parameters(self, selected_gender, selected_category, selected_subcategory):\r\n self.model.set_gender(selected_gender)\r\n self.model.set_category(selected_category)\r\n self.model.set_subcategory(selected_subcategory)\r\n self.model.fetch_results()", "def update_search_space(self, search_space):\n raise NotImplementedError('Tuner: update_search_space not implemented')", "def on_save_search(self, event):\r\n\r\n search = self.m_searchfor_textbox.GetValue()\r\n if search == \"\":\r\n errormsg(_(\"There is no search to save!\"))\r\n return\r\n dlg = SaveSearchDialog(self, search, self.m_regex_search_checkbox.GetValue())\r\n dlg.ShowModal()\r\n dlg.Destroy()", "def update(self, es, **kwargs):\n pass", "def saveSearch(self, queryString, searchName):\n facade = self._getFacade()\n if facade.noSaveSearchProvidersPresent():\n return DirectResponse.succeed()\n\n creator = self._getLoggedinUserId()\n\n # save the search\n facade.saveSearch(queryString, searchName, creator)\n return DirectResponse.succeed()", "def _(event):\n input_buffer = event.cli.buffers.previous(event.cli)\n search_buffer = event.cli.buffers[SEARCH_BUFFER]\n\n # Update search state.\n if search_buffer.text:\n get_search_state(event.cli).text = search_buffer.text\n\n # Apply search.\n input_buffer.apply_search(get_search_state(event.cli), include_current_position=True)\n\n # Add query to history of search line.\n search_buffer.append_to_history()\n search_buffer.reset()\n\n # Focus previous document again.\n event.cli.pop_focus()", "def update_query(self):\n text = self.lineedit.text()\n self.results = self.lookup(text)\n self.update_ui()\n self.copy_entry(self.table.currentRow(), self.table.currentColumn())", "def _update_search_info(self):\n page_size = int(self._search_data['pageSize'])\n begin_index = int(self._params['beginIndex']) + page_size\n self._params['beginIndex'] = str(begin_index)", "def search(self, search):\n raise NotImplementedError", "def update(self):\r\n self.data = [self.make_item_tuple(i) for i in self.query]\r\n self._fetched = True\r\n query_cache.set(self.iden, self.data)", "def runNewSearch(self):\n self.__searchJob = self.__startSearch()\n\n self.monitorSearchJob()", "def search_settings(self, search_settings):\n\n self._search_settings = search_settings", "def save(self, **kwargs):\n if self.search_terms is None:\n self.search_terms = ''\n super().save(**kwargs)\n return self", "def saveQuery(self, query):\n items_temp = []\n field = self.txtSearchHistory # Initialise Search History textbox as 'field'\n field.config(state='normal') # Enable 'field' for editing (removing and adding texts)\n index = 1\n\n # Iterate through 'field' to check if query made matches previous searches\n for item in field.get(\"1.0\", 'end').splitlines():\n if item:\n if str(item).lower() == query.lower():\n field.delete(str(index) + '.0',\n str(index) + '.end + 1 char') # Remove text from 'field' if matches with current query\n index += 1\n\n self.txtSearchHistory.insert('1.0', query.capitalize() + \"\\n\") # Insert current query to first line of 'field'\n field.config(state='disabled') # Disable user from changing 'field' text box\n\n # Get updated search history to store in file\n for item in field.get(\"1.0\", 'end').splitlines():\n if item: items_temp.append(item)\n\n # Store queries (past and current) to file\n de.addSearchHist(items_temp)", "def search_text(self, search_text):\n\n self._search_text = search_text", "def after_search(self):\n self.search_number += 1\n\n if not self.store():\n logger.debug('''\n No results to store for keyword: \"{}\" in search engine: {}\n '''.format(\n self.query,\n self.search_engine_name)\n )\n\n if self.progress_queue:\n self.progress_queue.put(1)\n self.cache_results()", "def make_updater(instance):\n components = instance.index_components()\n pk = instance.pk\n\n def on_commit():\n search_vectors = []\n for text, weight in components:\n search_vectors.append(\n SearchVector(Value(text, output_field=TextField()), weight=weight)\n )\n instance.__class__.objects.filter(pk=pk).update(\n search_document=reduce(operator.add, search_vectors)\n )\n\n return on_commit", "def update_search_vector(cls, recipeid):\n\n QUERY = \"\"\"\n UPDATE recipes SET searchdata = setweight(to_tsvector(coalesce(tags_line, '')), 'A')\n || setweight(to_tsvector(coalesce(raw_Search.recipe_title, '')), 'B') ||\n setweight(to_tsvector(coalesce(item_line, '')), 'C')\n FROM raw_Search WHERE raw_Search.recipe_id = recipes.recipe_id\n \"\"\"\n\n db.session.execute(QUERY)\n db.session.commit()\n\n recipe = Recipe.query.filter_by(recipe_id=recipeid).one()\n return recipe", "def update_search_filter(\n *,\n db_session: Session = Depends(get_db),\n search_filter_id: int,\n search_filter_in: SearchFilterUpdate,\n):\n search_filter = get(db_session=db_session, search_filter_id=search_filter_id)\n if not search_filter:\n raise HTTPException(status_code=404, detail=\"A search_filter with this id does not exist.\")\n search_filter = update(\n db_session=db_session, search_filter=search_filter, search_filter_in=search_filter_in\n )\n return search_filter", "def append(self, search):\n query_values = {\n \"id\": str(search.id),\n \"term\": search.term,\n \"timestamp\": search.timestamp\n }\n\n self._cursor.execute(f\"\"\"\n INSERT INTO {self._table_name}\n VALUES (:id, :term, :timestamp);\"\"\", query_values)\n\n self._conn.commit()", "def update(\n self,\n *args: Union[dict, Mapping],\n session: Optional[ClientSession] = None\n ):\n self.set_session(session=session)\n return (\n self.UpdateQueryType(\n document_model=self.document_model,\n find_query=self.get_filter_query(),\n )\n .update(*args)\n .set_session(session=self.session)\n )", "def save(searches):\n # type: (list) -> None\n with Cache(CACHE_URI) as c:\n c.set(SAVED_SEARCH, json.dumps(searches, ensure_ascii=False))", "def updateModel(self):\n pass", "def update(self, *args, **kwargs):\n pass", "def update(self, *args, **kwargs):\n pass" ]
[ "0.7534025", "0.7301899", "0.69800997", "0.68826675", "0.67916095", "0.62492573", "0.62392795", "0.62191606", "0.6211305", "0.60335565", "0.5985524", "0.59692556", "0.59547627", "0.5949781", "0.59346545", "0.5899091", "0.58907944", "0.5874972", "0.5809071", "0.5713413", "0.57082725", "0.56955117", "0.56919223", "0.5690056", "0.5656557", "0.5645331", "0.5611697", "0.5592686", "0.55874527", "0.55874527" ]
0.7637842
0
Delete a saved search.
def delete(ctx, saved_search_id): r = SavedSearch(ctx.obj['TOKEN'], ctx.obj['DEBUG']).delete(saved_search_id) click.echo(json_dumps(r, ctx.obj['PRETTY']))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def removeSavedSearch(self, searchName):\n facade = self._getFacade()\n if facade.noSaveSearchProvidersPresent():\n return DirectResponse.succeed()\n\n # save the search\n facade.removeSavedSearch(searchName)\n return DirectResponse.succeed()", "def delete(self):\n self.solr.delete(q=self.q)", "def delete(saved_query):\n saved_query.delete()", "def DeleteSearch(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def delete(self):\n\n cursor = self._conn.cursor()\n cursor.execute(\"DELETE FROM saves\")\n self._conn.commit()", "def solr_delete(self, **kwargs):\n\n solrconn = solr.SolrConnection(settings.SOLR_SERVER)\n solrconn.delete_query(\"uuid:{0}\".format(str(self.uuid)))\n\n if kwargs.get('commit', True):\n solrconn.commit()", "def delete(self, keyword, key):", "def delete():", "def remove(query):\n # type: (str) -> bool\n if not query or not SEARCH_SAVED:\n return False\n searches = retrieve()\n if query in searches:\n searches.remove(query)\n save(searches)\n return True\n return False", "def delete(self, where=None):\n\n return self._delete(\"\", where)", "def delete_document(self):\n pass", "def __deleteSave(self) -> None:\n os.remove(self.save_location)", "def delete(self):\n pass", "def delete(self):\n pass", "def delete(self):\n pass", "def delete(self):\n pass", "def test_delete_saved_app_map_search(self):\n pass", "def delete(self, *args, **kwargs):\n pass", "def delete(self, *args, **kwargs):\n pass", "def delete(self):\n ...", "def test_model_object_delete(self):\n car = Car.objects.first()\n car.delete_from_appsearch()\n self.assertEqual(self.client_destroy.call_count, 1)", "def delete(self):\n self.request().delete()", "def _post_delete_hook(klass, key, future):\n if klass.get_kind(key) in config.indexed_models:\n logging.info(\"Removing hard-deleted content from search: {}\"\n .format(key.id()))\n search.Index(config.content_index).delete(key.id())", "def delete(self):\n self.dbm().model_delete(self)", "def delete(self):\n expr = self.model.__table__.delete().where(self.query)\n return self._runquery(expr)", "def deletionsearch(apiKey, payload):\r\n if apiKey is None and os.path.exists(KEY_FILE):\r\n apiKey = _get_saved_key(apiKey)\r\n url = '{}/deletionsearch'.format(USGS_API_ENDPOINT)\r\n payload = {\r\n \"jsonRequest\": payloads.deletionsearch(apiKey, **payload)\r\n }\r\n logger.debug(\"API call URL: {}\".format(url))\r\n logger.debug(\"API call payload: {}\".format(payload))\r\n response = requests.post(url, payload).json()\r\n logger.debug(\"Received response:\\n{}\".format(json.dumps(response, indent=4)))\r\n _catch_usgs_error(response)\r\n\r\n return response", "def test_delete__DeleteForm__2(search_data, browser):\n browser.login('mgr')\n browser.keyword_search('church', 'Delete')\n # Seleting the `cancel` button leads to the person list without deleting\n # anybody:\n browser.getControl('No, cancel').click()\n assert 'Deletion canceled.' == browser.message\n assert browser.PERSONS_LIST_URL == browser.url\n assert 'Koch' in browser.contents\n assert 'Liebig' in browser.contents\n assert 'Velleuer' in browser.contents", "def delete(self, query):\n self.collection.remove(query)", "def deleteMatches():\n db = connect()\n c = db.cursor()\n query = (\"DELETE FROM results;\")\n c.execute(query)\n db.commit()\n db.close()", "def delete_search_task(self, args=None):\r\n result = {\"Task\": \"DeleteSearchTask\", \"Status\": \"Deleted\", \"Error\": \"NoError\", \"JobID\": args}\r\n\r\n with EndaceWebSession(app_url=self.applianceurl, username=self.username, password=self.password,\r\n cert_verify=self.cert_verify) as sess:\r\n api = EndaceVisionAPIAdapter(sess)\r\n path = \"files\"\r\n rd = api.get(path)\r\n if rd.status_code == 200:\r\n path = \"queries/\" + args\r\n dr = api.delete(path)\r\n if dr.status_code == 200:\r\n try:\r\n response = dr.json()\r\n except json.decoder.JSONDecodeError:\r\n raise Exception(f\"JsonDecodeError - path {path}\")\r\n else:\r\n meta = response.get('meta', {})\r\n if meta:\r\n meta_error = meta.get(\"error\")\r\n if meta_error is not None:\r\n if meta_error is not False:\r\n result['Status'] = \"complete\"\r\n result['Error'] = str(meta_error)\r\n else:\r\n result['Status'] = \"Failed\"\r\n result['Error'] = f\"ServerError - empty meta data from {path}\"\r\n else:\r\n result['Status'] = \"Failed\"\r\n result['Error'] = f\"ServerError - HTTP {rd.status_code} to /{path}\"\r\n\r\n if result['Status'] == 'Failed':\r\n self.handle_error_notifications(result['Error'])\r\n return result" ]
[ "0.71051794", "0.6984621", "0.6617019", "0.64270055", "0.62136894", "0.6194005", "0.6189915", "0.61731374", "0.6161926", "0.61286575", "0.6077793", "0.60397846", "0.6024016", "0.6024016", "0.6024016", "0.6024016", "0.60068905", "0.59796524", "0.59796524", "0.59375215", "0.59159416", "0.5904847", "0.58847576", "0.5839957", "0.58358985", "0.58267725", "0.58119094", "0.5786463", "0.5776464", "0.5773078" ]
0.8261131
0
Extend the size of an image by adding borders. The sides argument defaults to
def add_border(image: np.ndarray, width=2, value=0, sides='ltrb'): result = image sides = sides.upper() if 'L' in sides: result = add_left(result, width, value) if 'T' in sides: result = add_top(result, width, value) if 'R' in sides: result = add_right(result, width, value) if 'B' in sides: result = add_bottom(result, width, value) return result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def add_border(original_img,border_size):\r\n new_image=SimpleImage.blank(2 * border_size + original_img.width ,2 * border_size + original_img.height )\r\n\r\n \"\"\"\r\n Task 2: Creating black border\r\n \r\n \"\"\"\r\n for y in range(new_image.height):\r\n for x in range(new_image.width):\r\n\r\n # converting all the border pixels to black\r\n\r\n if x <= border_size or x >= border_size + original_img.height or y <= border_size or y >= border_size + original_img.width:\r\n\r\n px=new_image.get_pixel(x,y)\r\n px.red=0\r\n px.green=0\r\n px.blue=0\r\n\r\n # replacing the pixels other than the border with inage\r\n\r\n else:\r\n\r\n new_image.get_pixel(x,y).red=original_img.get_pixel(x-border_size, y-border_size).red\r\n new_image.get_pixel(x,y).blue = original_img.get_pixel(x - border_size, y - border_size).blue\r\n new_image.get_pixel(x,y).green = original_img.get_pixel(x - border_size, y - border_size).green\r\n\r\n\r\n return new_image", "def draw_borders(img):\n ret = img.copy()\n ret[0, :] = GRAY # top\n ret[-1, :] = GRAY # bottom\n ret[:, 0] = GRAY # left\n ret[:, -1] = GRAY # right\n return ret", "def addBorders(img, top=0, bottom=0, left=0, right=0, borderType='constant', color='black'):\n\n def _checks(img, top, bottom, left, right, borderType, color):\n intdetector(top)\n intdetector(bottom)\n intdetector(left)\n intdetector(right)\n if top < 0 or bottom < 0 or left < 0 or right < 0:\n raise ValueError(\"Values must be over zero\")\n stringdetector(borderType)\n borderType = borderType.lower()\n if borderType not in ['constant', 'reflect', 'default', 'replicate']:\n raise ValueError(\"Border types are 'constant', 'reflect', 'default' and 'replicate'\")\n if borderType == 'constant':\n stringdetector(color)\n color = color.lower()\n if color not in ['black', 'white']:\n raise ValueError(\"Supported colors are 'black' and 'white'\")\n return borderType, color\n\n \n def _borderChoice(borderType):\n if borderType == 'constant':\n return cv2.BORDER_CONSTANT\n elif borderType == 'reflect':\n return cv2.BORDER_REFLECT\n elif borderType == 'default':\n return cv2.BORDER_DEFAULT\n else:\n return cv2.BORDER_REPLICATE\n\n\n def _colorChoice(color):\n if color == 'black':\n return [0, 0, 0]\n else:\n return [255, 255, 255]\n\n\n borderType, color = _checks(img, top, bottom, left, right, borderType, color)\n border = _borderChoice(borderType)\n\n if top == 0 and bottom == 0 and left == 0 and right == 0:\n return img\n\n if borderType == 'constant':\n color = _colorChoice(color)\n return cv2.copyMakeBorder(img, top, bottom, left, right, border, value=color)\n else:\n return cv2.copyMakeBorder(img, top, bottom, left, right, border)", "def create_half_pic(self):\n number_of_line = 0\n for i in range(*self.horizontal_pic_border):\n number_of_column = self.parameter//2\n for j in range(*self.vertical_pic_border):\n if number_of_column > (number_of_line + 1):\n self.flag[i][j] = ' '\n elif number_of_column == (number_of_line + 1):\n self.flag[i][j] = CIRCLE_BORDER\n else:\n self.flag[i][j] = CIRCLE_INNER\n number_of_column -= 1\n mirror_update(self.flag[i])\n number_of_line += 1", "def make_border(data, wanted_height: int, wanted_width: int):\n current_height = data.shape[0]\n current_width = data.shape[1]\n add_sides = (wanted_width - current_width) // 2\n add_top_bottom = (wanted_height - current_height) // 2\n\n border_img = cv2.copyMakeBorder(data, add_top_bottom, add_top_bottom, add_sides, add_sides, cv2.BORDER_CONSTANT,\n value=[0, 0, 0])\n\n return border_img", "def reduce_whitespace(self, border: int = 5) -> None:\n if self.img is None:\n raise FileExistsError(\"Load an image first with from_url.\")\n\n pix = np.asarray(self.img)\n\n pix = pix[:, :, 0:3] # Drop the alpha channel\n idx = np.where(pix - 255)[0:2] # Drop the color when finding edges\n bbox = list(map(min, idx))[::-1] + list(map(max, idx))[::-1]\n larger_box = add_whitespace(bbox, border)\n\n self.img = self.img.crop(larger_box)", "def brandify_img(img, img_name, border_width_percentage=0.10, \\\nborder_width_cap=150, inner_border_width=7, \\\ninner_border_color=(50, 255, 50, 0), outer_border_color=(50, 50, 50, 0)):\n if img_name.startswith(\"Modified-\"):\n return\n width = img.size[0]\n height = img.size[1]\n # A new image that will be embiggened to fit the border\n border_width = int(float(width + height) / 2 * border_width_percentage)\n if border_width > border_width_cap:\n border_width = border_width_cap\n total_width = width + border_width * 2\n total_height = height + border_width * 2\n img.convert(mode=\"RGBA\")\n bordered_img = PIL.Image.new(\"RGBA\", img.size)\n bordered_img = PIL.ImageOps.expand(img, border=border_width)\n draw = PIL.ImageDraw.Draw(bordered_img)\n \n # Fill in the border for the image\n # The inner part of the border will be later covered up by the image itself\n draw_border_gradient(bordered_img, center_color=inner_border_color, \\\n outer_color=outer_border_color, excluding_region=(border_width, \\\n width + border_width, border_width, height + border_width))\n \n # Inner border\n # \"bb\" stands for border for border\n bb_color = (255 - 50, 255 - 50, 255 - 50, 255)\n draw.rectangle((border_width - inner_border_width, border_width - \\\n inner_border_width, width + border_width + inner_border_width, height + \\\n border_width + inner_border_width), fill=bb_color)\n \n # Add the original image\n bordered_img.paste(img, (border_width, border_width, width + border_width, \\\n height + border_width))\n \n # Add some basic design to the border\n # Add lines on each corner\n # Order is top left, top right, bottom left, bottom right\n draw.line((0, 0, border_width, border_width), fill=bb_color, width=3)\n draw.line((total_width, 0, total_width - border_width, border_width), \\\n fill=bb_color, width=3)\n draw.line((0, total_height, border_width, total_height - border_width), \\\n fill=bb_color, width=3)\n draw.line((total_width, total_height, total_width - border_width, \\\n total_height - border_width), fill=bb_color, width=3)\n \n # Apply the brand image\n new_width = int((float(brand_image.size[0]) / brand_image.size[1]) * \\\n border_width)\n new_height = border_width - inner_border_width\n resized_img = brand_image.resize((new_width, new_height))\n bordered_img.paste(resized_img, box=((width - resized_img.size[0] / 2) \\\n / 2, 0), mask=resized_img)\n \n bordered_img.save(image_directory + os.sep + \"Modified-\" + img_name)", "def setImageDimensions(*args):", "def addBorder(img, flag=0, top=10, bottom=10, left=10, right=10, color = (255,0,0)):\n\tif flag != 0:\n\t\tborderImg = cv2.copyMakeBorder(img, top, bottom, left, right, flag)\n\t\treturn borderImg\n\telif flag == 0:\n\t\tborderImg = cv2.copyMakeBorder(img, top, bottom, left, right, flag, value=color)\n\t\treturn borderImg\n\telse:\n\t\tprint \"ERROR: AddBorder: Invalid Flag\"\n\t\tsys.exit()", "def process_image(im, border_size=5, im_size=50):\n\n\tim = im[border_size:-border_size, border_size:-border_size]\n\n\t\n\t'''for i in range(0,len(im)):\n\t\tfor j in range(0,len(im[i])):\n\t\t\tim[i][j] = 255 if im[i][j] > 64 else 0'''\n\t\t\t\t\n\tim = resize(im, (im_size, im_size))\n\n\treturn im", "def tile_border(draw, r_s, r_e, c_s, c_e, color, border_size=TILE_BORDER_SIZE):\n for x in range(0, border_size):\n draw.rectangle([(c_s + x, r_s + x), (c_e - 1 - x, r_e - 1 - x)], outline=color)", "def expand_rect_padding(img_path, padding_x, padding_top, padding_bottom, out_path):\n pil_image_frame = Image.open(img_path)\n im_width, im_height = pil_image_frame.size \n \n n_width = im_width + 2 * padding_x\n n_height = im_height + padding_top + padding_bottom\n \n old_size = (im_width, im_height)\n new_size = (n_width, n_height)\n new_im = Image.new(\"RGB\", new_size, \"white\") \n new_im.paste(pil_image_frame, ((new_size[0]-old_size[0])/2, padding_top)) # insert image into center of new canvas with vertical shift = padding_top \n\n new_im.save(out_path, \"JPEG\")", "def add_border(im, border_width, value):\n assert (im.ndim == 3) and (im.shape[0] == 3)\n im = np.copy(im)\n\n if isinstance(value, np.ndarray):\n # reshape to [3, 1, 1]\n value = value.flatten()[:, np.newaxis, np.newaxis]\n im[:, :border_width, :] = value\n im[:, -border_width:, :] = value\n im[:, :, :border_width] = value\n im[:, :, -border_width:] = value\n\n return im", "def pad_image(img, output_path, pad_size=[8,8,8,8], buckets=None):\n top, left, bottom, right = pad_size\n old_im = Image.open(img)\n old_size = (old_im.size[0] + left + right, old_im.size[1] + top + bottom)\n new_size = get_new_size(old_size, buckets)\n new_im = Image.new(\"RGB\", new_size, (255,255,255))\n new_im.paste(old_im, (left, top))\n new_im.save(output_path)", "def mask_border(self, left=3, right=3, top=3, bottom=3):\n self.MaskPrefix = 'b' + self.MaskPrefix #prepend 'b' for border\n print('Masking edge pixels: left={0}, right={1}, top={2}, bottom={3}'.format(left,right,top,bottom))\n for ig in self.Set:\n igram = self.load_ma(ig)\n igram[:top,:] = ma.masked\n igram[-bottom:,:] = ma.masked\n igram[:,:left] = ma.masked\n igram[:,-right:] = ma.masked\n mskFile = self.MaskPrefix + 'Mask_' + ig.Name[:-4]\n np.save(os.path.join(self.ProcDir, mskFile), igram.mask)\n print(mskFile)\n print('mask_border() complete: {0} interferograms'.format(self.Set.Nig))", "def pad_image(img_path, width, height, pad_type, value=(0, 0, 0)):\n\n\n\n def get_left_right(margin_width):\n if margin_width % 2 == 0:\n left = margin_width // 2\n right = margin_width // 2\n else:\n left = margin_width // 2\n right = margin_width // 2 + 1\n return left, right\n\n def get_top_bottom(margin_height):\n if margin_height % 2 == 0:\n top = margin_height // 2\n bottom = margin_height // 2\n else:\n top = margin_height // 2\n bottom = margin_height // 2 + 1\n return top, bottom\n \n img = cv2.imread(img_path)\n h, w, _ = img.shape\n img_ratio=h/w\n target_ratio=height/width\n margin_width = width - w\n margin_height = height - h\n # if h >= height and w >= width:\n margin_width = abs(margin_width)\n margin_height = abs(margin_height)\n if img_ratio<target_ratio:\n resize_image(img_path, width=width, type='scale')\n img = cv2.imread(img_path)\n _h, _w, _ = img.shape\n # print(\"new h:\",_h)\n # print(\"new w:\",_w)\n # print(\"t w:\",width)\n # print(\"t h:\",height)\n margin_height = height - _h\n top, bottom = get_top_bottom(margin_height)\n # print(top,bottom)\n img = cv2.copyMakeBorder(img, top, bottom, 0, 0, pad_type, value=value)\n\n else:\n resize_image(img_path, height=height, type='scale')\n img = cv2.imread(img_path)\n _h, _w, _ = img.shape\n margin_width = width - _w\n left, right = get_left_right(margin_width)\n img = cv2.copyMakeBorder(img, 0, 0, left, right, pad_type, value=value)\n\n # elif h <= height and w <= width:\n # img = cv2.resize(img, (width, height))\n # elif h >= height:\n # img = cv2.resize(img, (w, height))\n # h, w, _ = img.shape\n # left, right = get_left_right(margin_width)\n # img = cv2.copyMakeBorder(img, 0, 0, left, right, pad_type, value=value)\n # elif w >= width:\n # img = cv2.resize(img, (width, h))\n # h, w, _ = img.shape\n # top, bottom = get_top_bottom(margin_height)\n # img = cv2.copyMakeBorder(img, top, bottom, 0, 0, pad_type, value=value)\n cv2.imwrite(img_path, img)", "def _add_border(self):\n top = TopWallCell(self)\n left = SideWallCell(self, False)\n right = SideWallCell(self, True)\n for col in range(self._columns):\n self.cell_at(col, self._rows - 1, top)\n for row in range(self._rows):\n self.cell_at(0, row, left)\n self.cell_at(self._columns - 1, row, right)", "def make_layers(self):\n w, h = self.image.get_size()\n shrink = pg.transform.smoothscale(self.image, (w//2, h//2))\n self.mid_image = tools.tile_surface((w,h), shrink, True)\n shrink = pg.transform.smoothscale(self.image, (w//4, h//4))\n self.base = tools.tile_surface(prepare.SCREEN_SIZE, shrink, True)", "def resize_as_min_side(im, masks, mask, boxes, classes, min_side, max_side):\n h, w, c = im.shape\n n = classes.size\n im_shape = im.shape\n im_size_min = np.min(im_shape[0:2])\n im_size_max = np.max(im_shape[0:2])\n im_scale = float(min_side) / float(im_size_min)\n if np.round(im_scale * im_size_max) > max_side:\n im_scale = float(max_side) / float(im_size_max)\n\n new_w, new_h = int(im_scale * w), int(im_scale * h)\n im = cv2.resize(im, (new_w, new_h))\n mask = cv2.resize(mask, (new_w, new_h), interpolation=cv2.INTER_NEAREST)\n boxes *= im_scale\n if masks.size > 0:\n masks = np.transpose(masks, (1, 2, 0)) # to (h, w, n)\n masks = cv2.resize(masks, (new_w, new_h), interpolation=cv2.INTER_NEAREST)\n if masks.ndim > 2:\n masks = np.transpose(masks, (2, 0, 1)) # to (n, h, w)\n else:\n masks = masks.reshape((1, new_h, new_w))\n\n return im, masks, mask, boxes, classes, im_scale", "def update_border(self):\n # side borders\n for row_position in range(NUM_ROWS+BORDER_WIDTH*2):\n self.stdscr.addstr(row_position, 0, '|', curses.color_pair(7))\n self.stdscr.addstr(row_position, NUM_COLUMNS*BLOCK_WIDTH+1, '|', curses.color_pair(7))\n # top and bottom borders\n for column_position in range(NUM_COLUMNS*BLOCK_WIDTH+BORDER_WIDTH*2):\n self.stdscr.addstr(0, column_position, '-', curses.color_pair(7))\n self.stdscr.addstr(NUM_ROWS+1, column_position, '-', curses.color_pair(7))", "def imBox(self, width, height):\n img = Image.new(\"1\", (width, height))\n draw = ImageDraw.Draw(img)\n bgColor=255\n draw.rectangle((0,0) + img.size,fill=bgColor)\n return img", "def __padding(self, image, boxes, height, width):\n temp = boxes[:, :4].astype(np.int)\n y1 = np.where(temp[:, 0] < 0)[0]\n if len(y1) > 0:\n temp[y1, 0] = 0\n x1 = np.where(temp[:, 1] < 0)[0]\n if len(x1) > 0:\n temp[x1, 0] = 0\n y2 = np.where(temp[:, 2] > image.shape[0] - 1)[0]\n if len(y2) > 0:\n temp[y2, 0] = image.shape[0] - 1\n x2 = np.where(temp[:, 3] > image.shape[1] - 1)[0]\n if len(x2) > 0:\n temp[x2, 0] = image.shape[1] - 1\n pad_top = np.abs(temp[:, 0] - boxes[:, 0]).astype(np.int)\n pad_left = np.abs(temp[:, 1] - boxes[:, 1]).astype(np.int)\n pad_bottom = np.abs(temp[:, 2] - boxes[:, 2]).astype(np.int)\n pad_right = np.abs(temp[:, 3] - boxes[:, 3]).astype(np.int)\n input_data = np.empty([boxes.shape[0], 3, height, width], dtype=np.float32)\n for i in range(boxes.shape[0]):\n crop_img = image[temp[i, 0]:temp[i, 2] + 1, temp[i, 1]:temp[i, 3] + 1, :]\n crop_img = cv2.copyMakeBorder(crop_img, pad_top[i], pad_bottom[i], \\\n pad_left[i], pad_right[i], cv2.BORDER_CONSTANT, value=0)\n if crop_img is None:\n continue\n crop_img = cv2.resize(crop_img, (width, height)).astype(np.float32)\n crop_img[:, :, 0] -= self.mean[0]\n crop_img[:, :, 1] -= self.mean[1]\n crop_img[:, :, 2] -= self.mean[2]\n crop_img *= self.scale_factor\n crop_img = np.transpose(crop_img, (2, 0, 1))\n input_data[i] = crop_img.copy()\n return input_data", "def resize_image(self, im, max_side_len=512):\n h, w, _ = im.shape\n\n resize_w = w\n resize_h = h\n\n # limit the max side\n if max(resize_h, resize_w) > max_side_len:\n ratio = float(max_side_len) / resize_h if resize_h > resize_w else float(max_side_len) / resize_w\n else:\n ratio = 1.\n resize_h = int(resize_h * ratio)\n resize_w = int(resize_w * ratio)\n\n resize_h = resize_h if resize_h % 32 == 0 else (resize_h // 32 - 1) * 32\n resize_w = resize_w if resize_w % 32 == 0 else (resize_w // 32 - 1) * 32\n im = cv2.resize(im, (int(resize_w), int(resize_h)))\n\n ratio_h = resize_h / float(h)\n ratio_w = resize_w / float(w)\n\n return im, (ratio_h, ratio_w)", "def create_border(self, grid_size, scale):\n # self.array.clear()\n\n for j in range(grid_size):\n for i in range(grid_size):\n if (i == 0 or i == grid_size-1) or (j == 0 or j == grid_size-1):\n self.array.append((i*scale, j*scale))\n self.array_length += 1\n\n # print(self.array)", "def resize_image_to_square(img, side, pad_cval=0, dtype=np.float64):\n\n if len(img.shape) == 2:\n h, w = img.shape\n if h == w:\n padded = img.copy()\n elif h > w:\n padded = np.full((h, h), pad_cval, dtype=dtype)\n l = int(h / 2 - w / 2) # guaranteed to be non-negative\n r = l + w\n padded[:, l:r] = img.copy()\n else:\n padded = np.full((w, w), pad_cval, dtype=dtype)\n l = int(w / 2 - h / 2) # guaranteed to be non-negative\n r = l + h\n padded[l:r, :] = img.copy()\n elif len(img.shape) == 3:\n h, w, ch = img.shape\n if h == w:\n padded = img.copy()\n elif h > w:\n padded = np.full((h, h, ch), pad_cval, dtype=dtype)\n l = int(h / 2 - w / 2) # guaranteed to be non-negative\n r = l + w\n padded[:, l:r, :] = img.copy()\n else:\n padded = np.full((w, w, ch), pad_cval, dtype=dtype)\n l = int(w / 2 - h / 2) # guaranteed to be non-negative\n r = l + h\n padded[l:r, :, :] = img.copy()\n else:\n raise Exception('only images of 2d and 3d shape are accepted')\n\n resized_img = resize(padded, output_shape=(side, side))\n\n return resized_img", "def pad(img, pad_size=32):\n\n if pad_size == 0:\n return img\n\n height, width = img.shape[:2]\n\n if height % pad_size == 0:\n y_min_pad = 0\n y_max_pad = 0\n else:\n y_pad = pad_size - height % pad_size\n y_min_pad = int(y_pad / 2)\n y_max_pad = y_pad - y_min_pad\n\n if width % pad_size == 0:\n x_min_pad = 0\n x_max_pad = 0\n else:\n x_pad = pad_size - width % pad_size\n x_min_pad = int(x_pad / 2)\n x_max_pad = x_pad - x_min_pad\n\n img = cv2.copyMakeBorder(img, y_min_pad, y_max_pad, x_min_pad, x_max_pad, cv2.BORDER_REFLECT_101)\n\n return img, (x_min_pad, y_min_pad, x_max_pad, y_max_pad)", "def borders(w, h):\r\n pygame.draw.line(window, WHITE, [25, 0], [25, h - 50], 6)\r\n pygame.draw.line(window, WHITE, [w - 25, 0], [w - 25, h - 50], 6)\r\n pygame.draw.line(window, WHITE, [25, h - 50], [w - 25, h - 50], 6)\r\n pygame.draw.line(window, WHITE, [25, 25], [w - 25, 25], 6)", "def concat_images_corner(imga, imgb, xoffset=0, yoffset=0, direction='horizontal',\n ontop=True, adjust_z=False):\n if direction == 'horizontal':\n max_dim = np.maximum.reduce([imga.shape, imgb.shape])\n\n offset = (abs(yoffset), abs(xoffset))\n tmp_offset = np.array(offset)\n\n # if (max_dim == imgb.shape).all():\n # tmp = np.copy(imgb)\n # imgb = np.copy(imga)\n # imga = np.copy(tmp)\n # ontop = toggle(ontop)\n # xoffset *= -1\n # yoffset *= -1\n\n # center_new = np.array(np.divide(max_dim, 2), dtype=int)\n new_img = np.full(np.add(max_dim, np.abs(offset)), np.nan)\n\n Sa0 = slice(0, imga.shape[0])\n Sa1 = slice(0, imga.shape[1])\n Sb0 = slice(abs(yoffset), abs(yoffset) + imgb.shape[0])\n Sb1 = slice(abs(xoffset), abs(xoffset) + imgb.shape[1])\n\n xdir = np.sign(xoffset)\n ydir = np.sign(yoffset)\n\n if ydir == 0:\n ydir = 1\n if xdir == 0:\n xdir = 1\n\n imga = imga[::ydir, ::xdir]\n imgb = imgb[::ydir, ::xdir]\n\n if adjust_z:\n top_img = 1 * new_img\n top_img[Sa0, Sa1] = imga\n top_img[Sb0, Sb1] = imgb\n low_img = 1 * new_img\n low_img[Sb0, Sb1] = imgb\n low_img[Sa0, Sa1] = imga\n\n diff = top_img - low_img\n m = np.nanmean(diff)\n s = np.nanstd(diff)\n mask = np.abs(diff) < m + s\n diff[mask] = np.nan\n add = np.nanmean(diff)\n\n print(add)\n\n imgb -= add\n\n if ontop:\n new_img[Sa0, Sa1] = imga\n new_img[Sb0, Sb1] = imgb\n else:\n new_img[Sb0, Sb1] = imgb\n new_img[Sa0, Sa1] = imga\n\n return new_img[::ydir, ::xdir]", "def plot_image(image_origin, boxes, pad_h, pad_w):\n cmap = plt.get_cmap(\"Set1\")\n class_labels = config.CUSTOM_CLASSES\n colors = [cmap(i) for i in np.linspace(0, 1, len(class_labels))]\n \n origin_h, origin_w, _ = image_origin.shape\n\n # Create figure and axes\n fig, ax = plt.subplots(1)\n # Display the image\n ax.imshow(image_origin)\n\n # box[0] is x midpoint, box[2] is width\n # box[1] is y midpoint, box[3] is height\n\n # Create a Rectangle patch\n for box in boxes:\n assert len(box) == 6, \"box should contain class pred, confidence, x, y, width, height\"\n class_pred = box[0]\n box = box[2:]\n upper_left_x = box[0] - box[2] / 2\n upper_left_y = box[1] - box[3] / 2\n\n rect = patches.Rectangle(\n (int((upper_left_x * config.IMAGE_SIZE - pad_w) * (origin_w / (config.IMAGE_SIZE - 2*pad_w))), \n int((upper_left_y * config.IMAGE_SIZE - pad_h) * (origin_h / (config.IMAGE_SIZE - 2*pad_h)))),\n box[2] * (origin_w * config.IMAGE_SIZE / (config.IMAGE_SIZE - 2*pad_w)),\n box[3] * (origin_h * config.IMAGE_SIZE / (config.IMAGE_SIZE - 2*pad_h)),\n linewidth=2,\n edgecolor=colors[int(class_pred)],\n facecolor=\"none\",\n )\n ax.add_patch(rect)\n\n plt.show()", "def build_border(self, kind, size=np.array([[100], [100]])):\n border_size = np.array([100, 100]).reshape(-1, 1)\n if kind == \"wall\":\n self.border = Wall(border_size)\n elif kind == \"wrap\":\n self.border = Toric(border_size)\n elif kind == \"none\":\n self.border = Infinite(border_size)" ]
[ "0.6268577", "0.62324625", "0.607379", "0.597834", "0.5885386", "0.58061814", "0.5716965", "0.5715706", "0.5707968", "0.5658754", "0.5645058", "0.56255275", "0.55907595", "0.5515744", "0.543367", "0.53568774", "0.5346766", "0.53422046", "0.53231376", "0.5271381", "0.52616733", "0.52441955", "0.5235134", "0.5232481", "0.5207687", "0.5203015", "0.5202438", "0.5168493", "0.51603764", "0.51580137" ]
0.68208927
0
Horizontally concatenate a list of images with a border. This is similar to numpy's hstack except that it adds a border around each image. The borders can be controlled with the optional border_width and border_value arguments. See also vstack.
def hstack(images, border_width=2, border_value=0): if border_width == 0: return np.hstack(images) T, V = border_width, border_value result = [] for image in images[:-1]: result.append(add_border(image, T, V, 'LTB')) result.append(add_border(images[-1], T, V)) return np.hstack(result)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def vstack(images, border_width=2, border_value=0):\n if border_width == 0: return np.vstack(images)\n T, V = border_width, border_value\n result = []\n for image in images[:-1]:\n result.append(add_border(image, T, V, 'LTR'))\n result.append(add_border(images[-1], T, V))\n return np.vstack(result)", "def join_images_horizontally(images):\n array = np.concatenate((images[0], images[1]), axis=1)\n return Image.fromarray(np.uint8(array))", "def concat_images(images, axis=0):\n # Get the width and the heights\n widths, heights = zip(*(i.size for i in images))\n\n # Initalize an offset to append the next image to the end of the previous\n offset = 0\n\n # Concatenate along the lines\n if axis == 1:\n # Get the width of the final image and the height\n max_width = max(widths)\n total_height = sum(heights)\n # Initalize the final image with the first subimage\n new_im = Image.new(images[0].mode, (max_width, total_height))\n\n # Append all consecutive images\n for im in images:\n new_im.paste(im, (0, offset))\n offset += im.size[1]\n\n # Concatenate along the columns\n else:\n # Get the width and the height of the final image\n total_width = sum(widths)\n max_height = max(heights)\n # Initalize the final image with the first subimage\n new_im = Image.new(images[0].mode, (total_width, max_height))\n\n # Append all consecutive images\n for im in images:\n new_im.paste(im, (offset, 0))\n offset += im.size[0]\n\n return new_im", "def makestack(inputList):\n stack = []\n for img in inputList:\n if img.ndim == 2:\n stack.append(img[numpy.newaxis, ...])\n elif img.ndim == 3:\n stack.append(img)\n \n return numpy.concatenate(stack, axis=0)", "def stack_images(cls, imgs, horizontal=True):\n assert isinstance(imgs, (list, tuple))\n if horizontal:\n H = max([i.shape[0] for i in imgs])\n W = sum([i.shape[1] for i in imgs])\n stacked_img = np.full((H, W, 3), 255, dtype=np.uint8)\n offset_w = 0\n for i in imgs:\n stacked_img[:i.shape[0], offset_w:offset_w +\n i.shape[1], :] = i.data\n offset_w += i.shape[1]\n else:\n H = sum([i.shape[0] for i in imgs])\n W = max([i.shape[1] for i in imgs])\n stacked_img = np.full((H, W, 3), 255, dtype=np.uint8)\n offset_h = 0\n for i in imgs:\n stacked_img[offset_h:offset_h +\n i.shape[0], :i.shape[1], :] = i.data\n offset_h += i.shape[0]\n\n return cls(stacked_img)", "def merge_images(images, axis=0):\n assert axis in [0, 1]\n total_len = sum(map(lambda i: i.size[axis], images))\n if axis == 0:\n new_shape = (total_len, images[0].size[1])\n step = images[0].size[0]\n else:\n new_shape = (images[0].size[0], total_len)\n step = images[0].size[1]\n\n canvas = Image.new('RGB', new_shape)\n\n shift = 0\n for image in images:\n if axis == 0:\n canvas.paste(image, (shift, 0))\n else:\n canvas.paste(image, (0, shift))\n \n shift += image.size[axis]\n\n return canvas", "def concat_images_horiz(imga, imgb):\n ha,wa = imga.shape[:2]\n hb,wb = imgb.shape[:2]\n max_height = np.max([ha, hb])\n total_width = wa+wb\n new_img = np.zeros(shape=(max_height, total_width, 3), dtype=np.uint8)\n new_img[:ha,:wa]=imga\n new_img[:hb,wa:wa+wb]=imgb\n return new_img", "def concat_n_images(images,direction='H'):\n if(len(images)==1):\n output=images[0]\n elif isinstance(images, np.ndarray):\n output=images\n else: \n concatenator={'H':concat_images_horiz,'V':concat_images_vert}[direction]\n output = None\n for i, img in enumerate(images):\n \n if(len(img.shape)==2):\n img=np.expand_dims(img,2)\n \n if i==0:\n output = img[:,:,:3]\n else:\n output = concatenator(output,(img)[:,:,:3])\n return output", "def glue(shreds):\n width = sum([s.width for s in shreds])\n height = shreds[0].height\n img = Image.new(\"RGBA\", (width, height))\n x_dest = 0 # x coordinate of insertion point.\n for s in shreds:\n img.paste(s.image, (x_dest, 0))\n x_dest += s.width # Move insertion point for next shred.\n return img", "def combine_pictures(images):\n widths, heights = zip(*(i.size for i in images))\n\n total_width = sum(widths)\n max_height = max(heights)\n\n new_im = Image.new('RGB', (total_width, max_height))\n\n x_offset = 0\n for im in images:\n new_im.paste(im, (x_offset, 0))\n x_offset += im.size[0]\n\n new_im.save('test.jpg')\n\n return True", "def horizontalConcat(image1, image2):\n shape1 = image1.shape\n shape2 = image2.shape\n if shape1[0] > shape2[0]:\n resizeMaintainAspectRatio(image2, height=shape1[0])\n elif shape2[0] > shape1[0]:\n resizeMaintainAspectRatio(image1, height=shape2[0])\n\n return np.hstack((image1, image2))", "def add_border(im, border_width, value):\n assert (im.ndim == 3) and (im.shape[0] == 3)\n im = np.copy(im)\n\n if isinstance(value, np.ndarray):\n # reshape to [3, 1, 1]\n value = value.flatten()[:, np.newaxis, np.newaxis]\n im[:, :border_width, :] = value\n im[:, -border_width:, :] = value\n im[:, :, :border_width] = value\n im[:, :, -border_width:] = value\n\n return im", "def combine_images(images: list) -> Image:\n img_width = images[0][0].width\n img_height = images[0][0].height\n new_size = (img_width * len(images[0]), img_height * len(images))\n new_image = Image.new('RGB', new_size)\n\n # Add all the images from the grid to the new, blank image\n for rowindex, row in enumerate(images):\n for colindex, image in enumerate(row):\n location = (colindex * img_width, rowindex * img_height)\n new_image.paste(image, location)\n\n return new_image", "def concat_images(X):\n nc,h,w,_ = X.shape\n X = X.reshape(nc,h,w)\n n = np.ceil(np.sqrt(nc)).astype(\"int8\")\n img = np.zeros((n*w,n*h))\n x = 0\n y = 0\n for example in range(nc):\n img[x*w:(x+1)*w,y*h:(y+1)*h] = X[example]\n y += 1\n if y >= n:\n y = 0\n x += 1\n return img", "def joinImages(imgs):\n d = imgs.shape[0]\n h, w = imgs.shape[1], imgs.shape[2]\n colour = imgs.shape[3]\n img = np.zeros((h, w * d, colour))\n for idx, image in enumerate(imgs):\n i = idx\n img[0:h, i * w:i * w + w, :] = image\n return ((img * 255.) + 1) * 2", "def pad_images_to_match_shapes(self):\n # Force all the images to have the same shape\n imageShapes = np.array([img.shape for img in self.imageList])\n ny, nx = imageShapes.max(axis=0)\n\n # Loop through each image and add padding if necessary\n for ny1nx1 in imageShapes:\n ny1, nx1 = ny1nx1\n padY = ny - ny1 if ny1 < ny else 0\n padX = nx - nx1 if nx1 < nx else 0\n\n # Extract the first image in the imageList\n thisImg = self.pop_image(0)\n\n if padX > 0 or padY > 0:\n # Pad the image as necessary\n thisImg = thisImg.pad(((0, padY), (0, padX)), 'constant')\n\n # Return the image to the imageList (at the END of the list)\n self.add_image(thisImg)\n\n # Hand the padded ImageStack back to the user\n return self", "def concatenate_vert(images, deltaW, offsetW, offsetH):\n \n images = map(Image.open, images)\n W = max(img.size[0] for img in images)\n H = sum(img.size[1] for img in images)\n\n result = Image.new(\"RGBA\", (W, H))\n\n result.paste(images[0], (0, 0))\n \n # re-sizing \n new_width = images[0].size[0]-deltaW\n ratio = new_width/float(images[1].size[0])\n new_height = int(images[1].size[1]*ratio)\n \n img = images[1].resize((new_width, new_height), Image.ANTIALIAS)\n result.paste(img, (offsetW, images[0].size[1]-offsetH))\n result.save('result.png')", "def merge_images(filenames, outfile, vgap=20):\n images = [Image.open(filename) for filename in filenames]\n\n widths = [image.size[0] for image in images]\n heights = [image.size[1] for image in images]\n\n result_width = max(widths)\n result_height = sum(heights) + len(images) * vgap\n\n result = Image.new('RGB', (result_width, result_height), (255, 255, 255))\n y = 0\n for image in images:\n result.paste(im=image, box=(0, y))\n y += image.size[1] + vgap\n\n\n result.save(outfile)", "def _tile_images(imgs, tile_shape, concatenated_image, margin_color=None):\n x_num, y_num = tile_shape\n one_width = imgs[0].shape[1]\n one_height = imgs[0].shape[0]\n if concatenated_image is None:\n concatenated_image = np.zeros((one_height * y_num, one_width * x_num, 3),\n dtype=np.uint8)\n if margin_color is not None:\n concatenated_image[:, :] = margin_color\n for y in range(y_num):\n for x in range(x_num):\n i = x + y * x_num\n if i >= len(imgs):\n pass\n else:\n concatenated_image[y*one_height:(y+1)*one_height,x*one_width:(x+1)*one_width,] = imgs[i]\n return concatenated_image", "def concat(list_of_arrays):\n shape = np.shape(list_of_arrays)\n newShape = [ shape[0]*shape[1] ]\n if len(shape)>2:\n for i in range(2,len(shape)):\n newShape.append(shape[i])\n \n array_concat = np.zeros(newShape)\n s=0\n e=shape[1]\n \n for i in range(0,shape[0]):\n array_concat[s:e] = list_of_arrays[i]\n s=e\n e=e+shape[1] \n return array_concat", "def tile_images(image_stack):\n assert len(image_stack.shape) == 4\n image_list = [image_stack[i, :, :, :] for i in range(image_stack.shape[0])]\n tiled_images = np.concatenate(image_list, axis=1)\n return tiled_images", "def concat_images_corner(imga, imgb, xoffset=0, yoffset=0, direction='horizontal',\n ontop=True, adjust_z=False):\n if direction == 'horizontal':\n max_dim = np.maximum.reduce([imga.shape, imgb.shape])\n\n offset = (abs(yoffset), abs(xoffset))\n tmp_offset = np.array(offset)\n\n # if (max_dim == imgb.shape).all():\n # tmp = np.copy(imgb)\n # imgb = np.copy(imga)\n # imga = np.copy(tmp)\n # ontop = toggle(ontop)\n # xoffset *= -1\n # yoffset *= -1\n\n # center_new = np.array(np.divide(max_dim, 2), dtype=int)\n new_img = np.full(np.add(max_dim, np.abs(offset)), np.nan)\n\n Sa0 = slice(0, imga.shape[0])\n Sa1 = slice(0, imga.shape[1])\n Sb0 = slice(abs(yoffset), abs(yoffset) + imgb.shape[0])\n Sb1 = slice(abs(xoffset), abs(xoffset) + imgb.shape[1])\n\n xdir = np.sign(xoffset)\n ydir = np.sign(yoffset)\n\n if ydir == 0:\n ydir = 1\n if xdir == 0:\n xdir = 1\n\n imga = imga[::ydir, ::xdir]\n imgb = imgb[::ydir, ::xdir]\n\n if adjust_z:\n top_img = 1 * new_img\n top_img[Sa0, Sa1] = imga\n top_img[Sb0, Sb1] = imgb\n low_img = 1 * new_img\n low_img[Sb0, Sb1] = imgb\n low_img[Sa0, Sa1] = imga\n\n diff = top_img - low_img\n m = np.nanmean(diff)\n s = np.nanstd(diff)\n mask = np.abs(diff) < m + s\n diff[mask] = np.nan\n add = np.nanmean(diff)\n\n print(add)\n\n imgb -= add\n\n if ontop:\n new_img[Sa0, Sa1] = imga\n new_img[Sb0, Sb1] = imgb\n else:\n new_img[Sb0, Sb1] = imgb\n new_img[Sa0, Sa1] = imga\n\n return new_img[::ydir, ::xdir]", "def horizontal_stack(*args):\r\n # Note: 'horizontal_stack' and 'vertical_stack' do not behave exactly like\r\n # Numpy's hstack and vstack functions. This is intended, because Numpy's\r\n # functions have potentially confusing/incoherent behavior (try them on 1D\r\n # arrays). If this is fixed in a future version of Numpy, it may be worth\r\n # trying to get closer to Numpy's way of doing things. In the meantime,\r\n # better keep different names to emphasize the implementation divergences.\r\n assert len(args) >= 2\r\n for arg in args:\r\n assert arg.type.ndim == 2\r\n return concatenate(args, axis=1)", "def concat_images(imga, imgb):\n ha, wa = imga.shape[:2]\n hb, wb = imgb.shape[:2]\n max_height = np.max([ha, hb])\n total_width = wa + wb\n\n new_img = np.zeros(shape=(max_height, total_width))\n new_img -= 1\n\n new_img[:ha, :wa] = imga\n new_img[:hb, wa:wa + wb] = imgb\n\n return new_img", "def appendimages(im1, im2):\n row1 = im1.shape[0]\n row2 = im2.shape[0]\n\n if row1 < row2:\n im1 = concatenate((im1, zeros((row2 - row1, im1.shape[1]))), axis=0)\n elif row1 > row2:\n im2 = concatenate((im2, zeros((row1 - row2, im2.shape[1]))), axis=0)\n\n return concatenate((im1, im2), axis=1)", "def hconcat_resize_min(im_list, interpolation=cv2.INTER_CUBIC):\n h_min = min(im.shape[0] for im in im_list)\n im_list_resize = [cv2.resize(im, (int(im.shape[1] * h_min / im.shape[0]), h_min), interpolation=interpolation) for im in im_list]\n return cv2.hconcat(im_list_resize)", "def appendimages(im1,im2):\n # select the image with the fewest rows and fill in enough empty rows\n rows1 = im1.shape[0]\n rows2 = im2.shape[0]\n if rows1 < rows2:\n im1 = np.concatenate((im1,zeros((rows2-rows1,im1.shape[1]))),axis=0)\n elif rows1 > rows2:\n im2 = np.concatenate((im2,zeros((rows1-rows2,im2.shape[1]))),axis=0)\n # if none of these cases they are equal, no filling needed.\n return np.concatenate((im1,im2), axis=1)", "def appendimages(im1, im2):\n\n # select the image with the fewest rows and fill in enough empty rows\n rows1 = im1.shape[0]\n rows2 = im2.shape[0]\n\n if rows1 < rows2:\n im1 = concatenate((im1, zeros((rows2-rows1, im1.shape[1]))), axis=0)\n elif rows1 > rows2:\n im2 = concatenate((im2, zeros((rows1-rows2, im2.shape[1]))), axis=0)\n # if none of these cases they are equal, no filling needed.\n assert (im1.shape[0] != im2.shape[0])\n return concatenate((im1, im2), axis=1)", "def appendimages(im1,im2):\n # select the image with the fewest rows and fill in enough empty rows\n rows1 = im1.shape[0]\n rows2 = im2.shape[0]\n if rows1 < rows2:\n im1 = np.concatenate((im1,np.zeros((rows2-rows1,im1.shape[1]))),axis=0)\n elif rows1 > rows2:\n im2 = np.concatenate((im2,np.zeros((rows1-rows2,im2.shape[1]))),axis=0)\n # if none of these cases they are equal, no filling needed.\n return np.concatenate((im1,im2), axis=1)", "def appendimages(im1,im2):\n # select the image with the fewest rows and fill in enough empty rows\n rows1 = im1.shape[0]\n rows2 = im2.shape[0]\n if rows1 < rows2:\n im1 = np.concatenate((im1,zeros((rows2-rows1,im1.shape[1]))),axis=0)\n elif rows1 > rows2:\n im2 = np.concatenate((im2,zeros((rows1-rows2,im2.shape[1]))),axis=0)\n # if none of these cases they are equal, no filling needed.\n return np.concatenate((im1,im2), axis=1)" ]
[ "0.7960261", "0.69222003", "0.67834777", "0.6604934", "0.6577598", "0.6484754", "0.6211543", "0.61573285", "0.6119428", "0.61114365", "0.61083424", "0.5949176", "0.5925623", "0.59040356", "0.5842073", "0.5829911", "0.581459", "0.58126813", "0.5722141", "0.56652904", "0.56414413", "0.5590594", "0.5589238", "0.5547682", "0.5519266", "0.55074865", "0.54922783", "0.5490457", "0.54867905", "0.548045" ]
0.83008605
0
Vertically concatenate a list of images with a border. This is similar to numpy's vstack except that it adds a border around each image. The borders can be controlled with the optional border_width and border_value arguments. See also hstack.
def vstack(images, border_width=2, border_value=0): if border_width == 0: return np.vstack(images) T, V = border_width, border_value result = [] for image in images[:-1]: result.append(add_border(image, T, V, 'LTR')) result.append(add_border(images[-1], T, V)) return np.vstack(result)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def hstack(images, border_width=2, border_value=0):\n if border_width == 0: return np.hstack(images)\n T, V = border_width, border_value\n result = []\n for image in images[:-1]:\n result.append(add_border(image, T, V, 'LTB'))\n result.append(add_border(images[-1], T, V))\n return np.hstack(result)", "def concat_images(images, axis=0):\n # Get the width and the heights\n widths, heights = zip(*(i.size for i in images))\n\n # Initalize an offset to append the next image to the end of the previous\n offset = 0\n\n # Concatenate along the lines\n if axis == 1:\n # Get the width of the final image and the height\n max_width = max(widths)\n total_height = sum(heights)\n # Initalize the final image with the first subimage\n new_im = Image.new(images[0].mode, (max_width, total_height))\n\n # Append all consecutive images\n for im in images:\n new_im.paste(im, (0, offset))\n offset += im.size[1]\n\n # Concatenate along the columns\n else:\n # Get the width and the height of the final image\n total_width = sum(widths)\n max_height = max(heights)\n # Initalize the final image with the first subimage\n new_im = Image.new(images[0].mode, (total_width, max_height))\n\n # Append all consecutive images\n for im in images:\n new_im.paste(im, (offset, 0))\n offset += im.size[0]\n\n return new_im", "def concatenate_vert(images, deltaW, offsetW, offsetH):\n \n images = map(Image.open, images)\n W = max(img.size[0] for img in images)\n H = sum(img.size[1] for img in images)\n\n result = Image.new(\"RGBA\", (W, H))\n\n result.paste(images[0], (0, 0))\n \n # re-sizing \n new_width = images[0].size[0]-deltaW\n ratio = new_width/float(images[1].size[0])\n new_height = int(images[1].size[1]*ratio)\n \n img = images[1].resize((new_width, new_height), Image.ANTIALIAS)\n result.paste(img, (offsetW, images[0].size[1]-offsetH))\n result.save('result.png')", "def stack_images(cls, imgs, horizontal=True):\n assert isinstance(imgs, (list, tuple))\n if horizontal:\n H = max([i.shape[0] for i in imgs])\n W = sum([i.shape[1] for i in imgs])\n stacked_img = np.full((H, W, 3), 255, dtype=np.uint8)\n offset_w = 0\n for i in imgs:\n stacked_img[:i.shape[0], offset_w:offset_w +\n i.shape[1], :] = i.data\n offset_w += i.shape[1]\n else:\n H = sum([i.shape[0] for i in imgs])\n W = max([i.shape[1] for i in imgs])\n stacked_img = np.full((H, W, 3), 255, dtype=np.uint8)\n offset_h = 0\n for i in imgs:\n stacked_img[offset_h:offset_h +\n i.shape[0], :i.shape[1], :] = i.data\n offset_h += i.shape[0]\n\n return cls(stacked_img)", "def add_border(im, border_width, value):\n assert (im.ndim == 3) and (im.shape[0] == 3)\n im = np.copy(im)\n\n if isinstance(value, np.ndarray):\n # reshape to [3, 1, 1]\n value = value.flatten()[:, np.newaxis, np.newaxis]\n im[:, :border_width, :] = value\n im[:, -border_width:, :] = value\n im[:, :, :border_width] = value\n im[:, :, -border_width:] = value\n\n return im", "def join_images_horizontally(images):\n array = np.concatenate((images[0], images[1]), axis=1)\n return Image.fromarray(np.uint8(array))", "def merge_images(images, axis=0):\n assert axis in [0, 1]\n total_len = sum(map(lambda i: i.size[axis], images))\n if axis == 0:\n new_shape = (total_len, images[0].size[1])\n step = images[0].size[0]\n else:\n new_shape = (images[0].size[0], total_len)\n step = images[0].size[1]\n\n canvas = Image.new('RGB', new_shape)\n\n shift = 0\n for image in images:\n if axis == 0:\n canvas.paste(image, (shift, 0))\n else:\n canvas.paste(image, (0, shift))\n \n shift += image.size[axis]\n\n return canvas", "def makestack(inputList):\n stack = []\n for img in inputList:\n if img.ndim == 2:\n stack.append(img[numpy.newaxis, ...])\n elif img.ndim == 3:\n stack.append(img)\n \n return numpy.concatenate(stack, axis=0)", "def merge_images(filenames, outfile, vgap=20):\n images = [Image.open(filename) for filename in filenames]\n\n widths = [image.size[0] for image in images]\n heights = [image.size[1] for image in images]\n\n result_width = max(widths)\n result_height = sum(heights) + len(images) * vgap\n\n result = Image.new('RGB', (result_width, result_height), (255, 255, 255))\n y = 0\n for image in images:\n result.paste(im=image, box=(0, y))\n y += image.size[1] + vgap\n\n\n result.save(outfile)", "def concat_images_horiz(imga, imgb):\n ha,wa = imga.shape[:2]\n hb,wb = imgb.shape[:2]\n max_height = np.max([ha, hb])\n total_width = wa+wb\n new_img = np.zeros(shape=(max_height, total_width, 3), dtype=np.uint8)\n new_img[:ha,:wa]=imga\n new_img[:hb,wa:wa+wb]=imgb\n return new_img", "def combine_pictures(images):\n widths, heights = zip(*(i.size for i in images))\n\n total_width = sum(widths)\n max_height = max(heights)\n\n new_im = Image.new('RGB', (total_width, max_height))\n\n x_offset = 0\n for im in images:\n new_im.paste(im, (x_offset, 0))\n x_offset += im.size[0]\n\n new_im.save('test.jpg')\n\n return True", "def verticalConcat(image1, image2):\n shape1 = image1.shape\n shape2 = image2.shape\n if shape1[1] > shape2[1]:\n resizeMaintainAspectRatio(image2, width=shape1[1])\n elif shape2[1] > shape1[1]:\n resizeMaintainAspectRatio(image1, width=shape2[1])\n\n return np.hstack((image1, image2))", "def concat_n_images(images,direction='H'):\n if(len(images)==1):\n output=images[0]\n elif isinstance(images, np.ndarray):\n output=images\n else: \n concatenator={'H':concat_images_horiz,'V':concat_images_vert}[direction]\n output = None\n for i, img in enumerate(images):\n \n if(len(img.shape)==2):\n img=np.expand_dims(img,2)\n \n if i==0:\n output = img[:,:,:3]\n else:\n output = concatenator(output,(img)[:,:,:3])\n return output", "def add_border(image: np.ndarray, width=2, value=0, sides='ltrb'):\n result = image\n sides = sides.upper()\n if 'L' in sides: result = add_left(result, width, value)\n if 'T' in sides: result = add_top(result, width, value)\n if 'R' in sides: result = add_right(result, width, value)\n if 'B' in sides: result = add_bottom(result, width, value)\n return result", "def concat_images_vert(imga, imgb):\n ha,wa = imga.shape[:2]\n hb,wb = imgb.shape[:2]\n max_width = np.max([wa, wb])\n total_height = ha+hb\n new_img = np.zeros(shape=(total_height, max_width, 3), dtype=np.uint8)\n new_img[:ha,:wa]=imga\n #new_img[:hb,wa:wa+wb]=imgb\n new_img[ha:ha+hb,:wb]=imgb\n return new_img", "def dispims_color(M, border=0, bordercolor=[0.0, 0.0, 0.0], *imshow_args, **imshow_keyargs):\n bordercolor = np.array(bordercolor)[None, None, :]\n numimages = len(M)\n M = M.copy()\n for i in range(M.shape[0]):\n M[i] -= M[i].flatten().min()\n M[i] /= M[i].flatten().max()\n height, width, three = M[0].shape\n assert three == 3\n n0 = np.int(np.ceil(np.sqrt(numimages)))\n n1 = np.int(np.ceil(np.sqrt(numimages)))\n im = np.array(bordercolor)*np.ones(\n ((height+border)*n1+border,(width+border)*n0+border, 1),dtype='<f8')\n for i in range(n0):\n for j in range(n1):\n if i*n1+j < numimages:\n im[j*(height+border)+border:(j+1)*(height+border)+border,\n i*(width+border)+border:(i+1)*(width+border)+border,:] = np.concatenate((\n np.concatenate((M[i*n1+j,:,:,:],\n bordercolor*np.ones((height,border,3),dtype=float)), 1),\n bordercolor*np.ones((border,width+border,3),dtype=float)\n ), 0)\n imshow_keyargs[\"interpolation\"]=\"nearest\"\n plt.imshow(im, *imshow_args, **imshow_keyargs)\n plt.show()", "def joinImages(imgs):\n d = imgs.shape[0]\n h, w = imgs.shape[1], imgs.shape[2]\n colour = imgs.shape[3]\n img = np.zeros((h, w * d, colour))\n for idx, image in enumerate(imgs):\n i = idx\n img[0:h, i * w:i * w + w, :] = image\n return ((img * 255.) + 1) * 2", "def glue(shreds):\n width = sum([s.width for s in shreds])\n height = shreds[0].height\n img = Image.new(\"RGBA\", (width, height))\n x_dest = 0 # x coordinate of insertion point.\n for s in shreds:\n img.paste(s.image, (x_dest, 0))\n x_dest += s.width # Move insertion point for next shred.\n return img", "def _tile_images(imgs, tile_shape, concatenated_image, margin_color=None):\n x_num, y_num = tile_shape\n one_width = imgs[0].shape[1]\n one_height = imgs[0].shape[0]\n if concatenated_image is None:\n concatenated_image = np.zeros((one_height * y_num, one_width * x_num, 3),\n dtype=np.uint8)\n if margin_color is not None:\n concatenated_image[:, :] = margin_color\n for y in range(y_num):\n for x in range(x_num):\n i = x + y * x_num\n if i >= len(imgs):\n pass\n else:\n concatenated_image[y*one_height:(y+1)*one_height,x*one_width:(x+1)*one_width,] = imgs[i]\n return concatenated_image", "def crop_border(imgs, crop_border):\n if crop_border == 0:\n return imgs\n else:\n if isinstance(imgs, list):\n return [v[crop_border:-crop_border, crop_border:-crop_border, ...] for v in imgs]\n else:\n return imgs[crop_border:-crop_border, crop_border:-crop_border, ...]", "def combine_images(images: list) -> Image:\n img_width = images[0][0].width\n img_height = images[0][0].height\n new_size = (img_width * len(images[0]), img_height * len(images))\n new_image = Image.new('RGB', new_size)\n\n # Add all the images from the grid to the new, blank image\n for rowindex, row in enumerate(images):\n for colindex, image in enumerate(row):\n location = (colindex * img_width, rowindex * img_height)\n new_image.paste(image, location)\n\n return new_image", "def concat_images_corner(imga, imgb, xoffset=0, yoffset=0, direction='horizontal',\n ontop=True, adjust_z=False):\n if direction == 'horizontal':\n max_dim = np.maximum.reduce([imga.shape, imgb.shape])\n\n offset = (abs(yoffset), abs(xoffset))\n tmp_offset = np.array(offset)\n\n # if (max_dim == imgb.shape).all():\n # tmp = np.copy(imgb)\n # imgb = np.copy(imga)\n # imga = np.copy(tmp)\n # ontop = toggle(ontop)\n # xoffset *= -1\n # yoffset *= -1\n\n # center_new = np.array(np.divide(max_dim, 2), dtype=int)\n new_img = np.full(np.add(max_dim, np.abs(offset)), np.nan)\n\n Sa0 = slice(0, imga.shape[0])\n Sa1 = slice(0, imga.shape[1])\n Sb0 = slice(abs(yoffset), abs(yoffset) + imgb.shape[0])\n Sb1 = slice(abs(xoffset), abs(xoffset) + imgb.shape[1])\n\n xdir = np.sign(xoffset)\n ydir = np.sign(yoffset)\n\n if ydir == 0:\n ydir = 1\n if xdir == 0:\n xdir = 1\n\n imga = imga[::ydir, ::xdir]\n imgb = imgb[::ydir, ::xdir]\n\n if adjust_z:\n top_img = 1 * new_img\n top_img[Sa0, Sa1] = imga\n top_img[Sb0, Sb1] = imgb\n low_img = 1 * new_img\n low_img[Sb0, Sb1] = imgb\n low_img[Sa0, Sa1] = imga\n\n diff = top_img - low_img\n m = np.nanmean(diff)\n s = np.nanstd(diff)\n mask = np.abs(diff) < m + s\n diff[mask] = np.nan\n add = np.nanmean(diff)\n\n print(add)\n\n imgb -= add\n\n if ontop:\n new_img[Sa0, Sa1] = imga\n new_img[Sb0, Sb1] = imgb\n else:\n new_img[Sb0, Sb1] = imgb\n new_img[Sa0, Sa1] = imga\n\n return new_img[::ydir, ::xdir]", "def horizontalConcat(image1, image2):\n shape1 = image1.shape\n shape2 = image2.shape\n if shape1[0] > shape2[0]:\n resizeMaintainAspectRatio(image2, height=shape1[0])\n elif shape2[0] > shape1[0]:\n resizeMaintainAspectRatio(image1, height=shape2[0])\n\n return np.hstack((image1, image2))", "def draw_borders(img):\n ret = img.copy()\n ret[0, :] = GRAY # top\n ret[-1, :] = GRAY # bottom\n ret[:, 0] = GRAY # left\n ret[:, -1] = GRAY # right\n return ret", "def plot_images(images, ax, ims_per_row=5, padding=5, digit_dimensions=(28,28),\n cmap=matplotlib.cm.binary, vmin=None):\n N_images = images.shape[0]\n N_rows = np.ceil(float(N_images) / ims_per_row)\n pad_value = np.min(images.ravel())\n concat_images = np.full(((digit_dimensions[0] + padding) * N_rows + padding,\n (digit_dimensions[0] + padding) * ims_per_row + padding), pad_value)\n for i in range(N_images):\n cur_image = np.reshape(images[i, :], digit_dimensions)\n row_ix = i / ims_per_row # Integer division.\n col_ix = i % ims_per_row\n row_start = padding + (padding + digit_dimensions[0])*row_ix\n col_start = padding + (padding + digit_dimensions[0])*col_ix\n concat_images[row_start: row_start + digit_dimensions[0],\n col_start: col_start + digit_dimensions[0]] \\\n = cur_image\n cax = ax.matshow(concat_images, cmap=cmap, vmin=vmin)\n plt.xticks(np.array([]))\n plt.yticks(np.array([]))\n return cax", "def padFrames(frameList, maxHeight):\n \n # writeText('\\n')\n for i, frame in enumerate(frameList):\n extraSpace = maxHeight - frame.shape[0]\n #frameList[i] = np.lib.pad(frame,((int(np.floor(extraSpace/2)),int(np.ceil(extraSpace/2))),(0,0)),'constant', constant_values=(4000,8000))\n frameList[i] = np.lib.pad(frame,((extraSpace,0),(0,0)),'constant', constant_values=0)\n print('\\rPadding Frames: {:.2f} % done'.format((100.0*((i+1)/len(frameList)))),end='',flush=True)\n print('\\n')\n stack = np.stack(frameList, axis=0)\n \n return stack", "def immerge(images, n_rows=None, n_cols=None, padding=0, pad_value=0):\n # 将几张小图片整合到一张大图片中(大图片每行每列会显示好几张小图片)\n images = np.array(images)\n n = images.shape[0]\n if n_rows:\n n_rows = max(min(n_rows, n), 1)\n n_cols = int(n - 0.5) // n_rows + 1\n elif n_cols:\n n_cols = max(min(n_cols, n), 1)\n n_rows = int(n - 0.5) // n_cols + 1\n else:\n n_rows = int(n ** 0.5)\n n_cols = int(n - 0.5) // n_rows + 1\n\n h, w = images.shape[1], images.shape[2]\n shape = (h * n_rows + padding * (n_rows - 1),\n w * n_cols + padding * (n_cols - 1))\n if images.ndim == 4:\n shape += (images.shape[3],)\n img = np.full(shape, pad_value, dtype=images.dtype)\n\n for idx, image in enumerate(images):\n i = idx % n_cols\n j = idx // n_cols\n img[j * (h + padding):j * (h + padding) + h,\n i * (w + padding):i * (w + padding) + w, ...] = image\n\n return img", "def tile_images(image_stack):\n assert len(image_stack.shape) == 4\n image_list = [image_stack[i, :, :, :] for i in range(image_stack.shape[0])]\n tiled_images = np.concatenate(image_list, axis=1)\n return tiled_images", "def pad_images_to_match_shapes(self):\n # Force all the images to have the same shape\n imageShapes = np.array([img.shape for img in self.imageList])\n ny, nx = imageShapes.max(axis=0)\n\n # Loop through each image and add padding if necessary\n for ny1nx1 in imageShapes:\n ny1, nx1 = ny1nx1\n padY = ny - ny1 if ny1 < ny else 0\n padX = nx - nx1 if nx1 < nx else 0\n\n # Extract the first image in the imageList\n thisImg = self.pop_image(0)\n\n if padX > 0 or padY > 0:\n # Pad the image as necessary\n thisImg = thisImg.pad(((0, padY), (0, padX)), 'constant')\n\n # Return the image to the imageList (at the END of the list)\n self.add_image(thisImg)\n\n # Hand the padded ImageStack back to the user\n return self", "def grid(images, cols = 2, save = False, filename = \"\", show = False):\n \n rows = ceil(len(images) / cols)\n \n fig, ax = plt.subplots(rows, 1)\n\n index = 0\n element = []\n for row in range(rows):\n for col in range(cols): \n if index < len(images):\n element.append(images[index])\n index += 1\n \n stack = np.hstack(tuple(element))\n ax[row].axis('off')\n ax[row].imshow(stack)\n element = []\n \n plt.tight_layout()\n \n if save:\n fig.savefig(filename)\n\n if show:\n plt.show(fig)\n \n return 0" ]
[ "0.7994192", "0.62722176", "0.6255651", "0.61048955", "0.61031604", "0.6095366", "0.6059274", "0.60569215", "0.6039915", "0.58855444", "0.584056", "0.5793362", "0.5777069", "0.57281417", "0.57201326", "0.56275475", "0.5617591", "0.5614883", "0.5610327", "0.5589554", "0.55825037", "0.55532885", "0.5545826", "0.55114347", "0.55025125", "0.54847157", "0.54832286", "0.5469746", "0.5461149", "0.54574513" ]
0.82072884
0
Compose a source image with alpha onto a destination image.
def compose(dst: np.ndarray, src: np.ndarray) -> np.ndarray: a, b = ensure_alpha(src), ensure_alpha(dst) alpha = extract_alpha(a) result = b * (1.0 - alpha) + a * alpha if dst.shape[2] == 3: return extract_rgb(result) return result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def alpha_composite(self, im, dest=(0, 0), source=(0, 0)):\r\n\r\n if not isinstance(source, (list, tuple)):\r\n raise ValueError(\"Source must be a tuple\")\r\n if not isinstance(dest, (list, tuple)):\r\n raise ValueError(\"Destination must be a tuple\")\r\n if not len(source) in (2, 4):\r\n raise ValueError(\"Source must be a 2 or 4-tuple\")\r\n if not len(dest) == 2:\r\n raise ValueError(\"Destination must be a 2-tuple\")\r\n if min(source) < 0:\r\n raise ValueError(\"Source must be non-negative\")\r\n if min(dest) < 0:\r\n raise ValueError(\"Destination must be non-negative\")\r\n\r\n channels, depth = self._get_channels_and_depth(im)\r\n _mode = self._get_mode(im.shape, im.dtype)\r\n _im = self._new(_mode, (im.shape[1], im.shape[0]))\r\n if len(source) == 2:\r\n source = source + _im.size\r\n\r\n # over image, crop if it's not the whole thing.\r\n if source == (0, 0) + _im.size:\r\n overlay = _im\r\n else:\r\n overlay = _im.crop(source)\r\n\r\n # target for the paste\r\n box = dest + (dest[0] + overlay.width, dest[1] + overlay.height)\r\n\r\n # destination image. don't copy if we're using the whole image.\r\n if box == (0, 0) + self.size:\r\n background = self._instance\r\n else:\r\n background = self.crop(box)\r\n\r\n result = alpha_composite(background, overlay)\r\n self.paste(result, box)", "def _blend(img1, img2, alpha):\n return img1.mul(alpha).add(1 - alpha, img2)", "def compose_premultiplied(dst: np.ndarray, src: np.ndarray):\n a, b = ensure_alpha(src), ensure_alpha(dst)\n alpha = extract_alpha(a)\n result = b * (1.0 - alpha) + a\n if dst.shape[2] == 3:\n return extract_rgb(result)\n return result", "def overlay_alpha_images(img1, img2, keepalpha=True, dtype=np.float32,\n impl='inplace'):\n rgb1, alpha1 = _prep_rgb_alpha(img1, dtype=dtype)\n rgb2, alpha2 = _prep_rgb_alpha(img2, dtype=dtype)\n\n # Perform the core alpha blending algorithm\n if impl == 'simple':\n rgb3, alpha3 = _alpha_blend_simple(rgb1, alpha1, rgb2, alpha2)\n elif impl == 'inplace':\n rgb3, alpha3 = _alpha_blend_inplace(rgb1, alpha1, rgb2, alpha2)\n elif impl == 'numexpr1':\n rgb3, alpha3 = _alpha_blend_numexpr1(rgb1, alpha1, rgb2, alpha2)\n elif impl == 'numexpr2':\n rgb3, alpha3 = _alpha_blend_numexpr2(rgb1, alpha1, rgb2, alpha2)\n else:\n raise ValueError('unknown impl={}'.format(impl))\n\n if keepalpha:\n raster = np.dstack([rgb3, alpha3[..., None]])\n # Note: if we want to output a 255 img we could do something like this\n # out = np.zeros_like(img1)\n # out[..., :3] = rgb3\n # out[..., 3] = alpha3\n else:\n raster = rgb3\n return raster", "def alpha_blend(input_image, segmentation_mask, alpha=0.5):\n blended = np.zeros(input_image.size, dtype=np.float32)\n blended = input_image * alpha + segmentation_mask * (1 - alpha)\n return blended", "def alpha_composite_with_color(image, color=(255, 255, 255)):\n back = Image.new('RGBA', size=image.size, color=color + (255,))\n return alpha_composite(image, back)", "def alpha_composite(im1, im2):\r\n r1, g1, b1, a1 = Image().split(im1)\r\n r2, g2, b2, a2 = Image().split(im2)\r\n alphacomp = np.zeros(im1.shape, dtype=im1.dtype)\r\n im3 = composite(alphacomp, im1, a1)\r\n alphacomp = np.zeros(im2.shape, dtype=im2.dtype)\r\n im4 = composite(alphacomp, im2, a2)\r\n return blend(im3, im4, 0.5)", "def putalpha(self, alpha):\r\n channels, depth = self._get_channels_and_depth(self._mode)\r\n\r\n if isinstance(alpha, np.ndarray): \r\n paste_image = True\r\n else:\r\n paste_image = False\r\n\r\n if channels==4:\r\n r, g, b, a = self.split()\r\n if not paste_image:\r\n a[:] = alpha\r\n else:\r\n a = alpha.copy()\r\n colorband = (r, g, b, a)\r\n self._instance = merge(\"RGBA\", colorband, image=True)\r\n elif channels == 3:\r\n if not paste_image:\r\n sh = self._instance.shape\r\n sh = (sh[0], sh[1], 1)\r\n a = np.zeros(sh, dtype=depth)\r\n a[:] = alpha\r\n else:\r\n a = alpha.copy()\r\n r, g, b = self.split()\r\n colorband = (r, g, b, a)\r\n self._instance = merge(\"RGBA\", colorband, image=True)\r\n elif channels < 2: # \"L\" or \"LA\"\r\n if not paste_image:\r\n sh = self._instance.shape\r\n sh = (sh[0], sh[1], 1)\r\n a = np.zeros(sh, dtype=depth)\r\n a[:] = alpha\r\n else:\r\n a = alpha.copy()\r\n if channels == 2:\r\n l, a_old = self.split()\r\n colorband = (l, a)\r\n else:\r\n colorband = (self._instance, a)\r\n self._instance = merge(\"LA\", colorband, image=True)", "def getTransparentImg(src, imageName, writepath):\n tmp = cv2.cvtColor(src, cv2.COLOR_RGB2GRAY)\n _,alpha = cv2.threshold(tmp,0,255,cv2.THRESH_BINARY)\n r, g, b = cv2.split(src)\n rgba = [b,g,r, alpha]\n dst = cv2.merge(rgba,4)\n save_name = writepath + '/' + imageName\n cv2.imwrite(save_name, dst)", "def alpha_composite(front, back):\n front = np.asarray(front)\n back = np.asarray(back)\n result = np.empty(front.shape, dtype='float')\n alpha = np.index_exp[:, :, 3:]\n rgb = np.index_exp[:, :, :3]\n falpha = front[alpha] / 255.0\n balpha = back[alpha] / 255.0\n result[alpha] = falpha + balpha * (1 - falpha)\n old_setting = np.seterr(invalid='ignore')\n result[rgb] = (front[rgb] * falpha + back[rgb] * balpha * (1 - falpha)) / result[alpha]\n np.seterr(**old_setting)\n result[alpha] *= 255\n np.clip(result, 0, 255)\n # astype('uint8') maps np.nan and np.inf to 0\n result = result.astype('uint8')\n result = Image.fromarray(result, 'RGBA')\n return result", "def transparent(file, dest, transparenc):\r\n img = Image.open(file)\r\n img = img.convert('RGBA')\r\n r, g, b, alpha = img.split()\r\n alpha = alpha.point(lambda i: i > 0 and transparenc)\r\n img.putalpha(alpha)\r\n img.save(dest)", "def overlay_image_alpha(self,img, img_overlay, pos, alpha_mask):\n\n x, y = pos\n\n # Image ranges\n y1, y2 = max(0, y), min(img.shape[0], y + img_overlay.shape[0])\n x1, x2 = max(0, x), min(img.shape[1], x + img_overlay.shape[1])\n\n # Overlay ranges\n y1o, y2o = max(0, -y), min(img_overlay.shape[0], img.shape[0] - y)\n x1o, x2o = max(0, -x), min(img_overlay.shape[1], img.shape[1] - x)\n\n # Exit if nothing to do\n if y1 >= y2 or x1 >= x2 or y1o >= y2o or x1o >= x2o:\n return\n\n channels = img.shape[2]\n\n alpha = alpha_mask[y1o:y2o, x1o:x2o]\n alpha_inv = 1.0 - alpha\n\n for c in range(channels):\n img[y1:y2, x1:x2, c] = (alpha * img_overlay[y1o:y2o, x1o:x2o, c] +\n alpha_inv * img[y1:y2, x1:x2, c])", "def merge_into(self, dst):\n # We must respect layer visibility, because saving a\n # transparent PNG just calls this function for each layer.\n src = self\n dst.strokes.extend(self.strokes)\n for tx, ty in dst._surface.get_tiles():\n surf = dst._surface.get_tile_memory(tx, ty, readonly=False)\n surf[:,:,:] = dst.effective_opacity * surf[:,:,:]\n for tx, ty in src._surface.get_tiles():\n surf = dst._surface.get_tile_memory(tx, ty, readonly=False)\n src._surface.composite_tile(surf, tx, ty,\n opacity=self.effective_opacity,\n mode=self.compositeop)\n dst.opacity = 1.0", "def pure_pil_alpha_to_color_v2(image, color=(255, 255, 255)):\n image.load() # needed for split()\n background = Image.new('RGB', image.size, color)\n background.paste(image, mask=image.split()[3]) # 3 is the alpha channel\n return background", "def layer_blend(foreground, background, foreground_alpha=.6):\n cv2.addWeighted(foreground, foreground_alpha,\n background, 1 - foreground_alpha, 0, background)\n\n return background", "def remove_alpha(img: Image.Image, bg_color: tuple[int, int, int] = (255, 255, 255)):\n img_rgb = Image.new(\"RGB\", img.size, bg_color)\n alpha_layer = img.split()[3]\n img_rgb.paste(img, mask=alpha_layer)\n return img_rgb", "def paste(self, other):\n r, g, b, alpha = other.pil_image.split()\n pil_image = self.pil_image.copy()\n pil_image.paste(other.pil_image, mask=alpha)\n return kurt.Image(pil_image)", "def create_overlay_image(background, foreground):\n bg = PIL.Image.fromarray(np.uint8(background), mode='RGBA')\n\n fg = PIL.Image.fromarray(np.uint8(foreground), mode='RGBA')\n img = PIL.Image.alpha_composite(bg, fg)\n\n return(img)", "def _blend_layers(self, imagecontent, (z, x, y)):\n result = self._tile_image(imagecontent)\n # Paste each layer\n for (layer, opacity) in self._layers:\n try:\n # Prepare tile of overlay, if available\n overlay = self._tile_image(layer.tile((z, x, y)))\n except (DownloadError, ExtractionError), e:\n logger.warn(e)\n continue\n # Extract alpha mask\n overlay = overlay.convert(\"RGBA\")\n r, g, b, a = overlay.split()\n overlay = Image.merge(\"RGB\", (r, g, b))\n a = ImageEnhance.Brightness(a).enhance(opacity)\n overlay.putalpha(a)\n mask = Image.merge(\"L\", (a,))\n result.paste(overlay, (0, 0), mask)\n # Read result\n return self._image_tile(result)", "def add_alpha(image_data):\n\n # get hsv image\n hsv = rgb_to_hsv(image_data[:, :, :3].astype(float) / 255)\n\n # create new image and set alpha channel\n new_image_data = np.zeros(image_data.shape)\n new_image_data[:, :, 3] = hsv[:, :, 2]\n\n # set value of hsv image to either 0 or 1.\n hsv[:, :, 2] = np.where(hsv[:, :, 2] > 0, 1, 0)\n\n # combine alpha and new rgb\n new_image_data[:, :, :3] = hsv_to_rgb(hsv)\n return new_image_data", "def overlay_two_imgs(img1, img2, alpha=0.5):\n # Validate alpha\n if alpha > 1 or alpha < 0:\n fatal_error(\"The value of alpha should be in the range of (0,1)!\")\n\n # Validate image sizes are the same\n size_img1 = img1.shape[0:2]\n size_img2 = img2.shape[0:2]\n if size_img1 != size_img2:\n fatal_error(f\"The height/width of img1 ({size_img1}) needs to match img2 ({size_img2}).\")\n\n # Copy the input images\n img1_ = np.copy(img1)\n img2_ = np.copy(img2)\n # If the images are grayscale convert to BGR\n if len(img1_.shape) == 2:\n img1_ = cv2.cvtColor(img1_, cv2.COLOR_GRAY2BGR)\n if len(img2_.shape) == 2:\n img2_ = cv2.cvtColor(img2_, cv2.COLOR_GRAY2BGR)\n\n # initialize the output image\n out_img = np.zeros(size_img1 + (3,), dtype=np.uint8)\n\n # blending\n out_img[:, :, :] = (alpha * img1_[:, :, :]) + ((1 - alpha) * img2_[:, :, :])\n\n params.device += 1\n if params.debug == 'print':\n print_image(out_img, os.path.join(params.debug_outdir, str(params.device) + '_overlay.png'))\n elif params.debug == 'plot':\n plot_image(out_img)\n return out_img", "def to_alpha(self):\n if self.channels == 3:\n alpha = opencv.cvtColor(self.img, opencv.COLOR_BGR2BGRA)\n return Image(alpha)\n elif self.channels == 1:\n alpha = opencv.cvtColor(self.img, opencv.COLOR_GRAY2BGRA)\n return Image(alpha)\n else:\n return Image(self.img)", "def _blend_layers(self, imagecontent, (z, x, y)):\n result = self._tile_image(imagecontent)\n # Paste each layer\n for (layer, opacity) in self._layers:\n try:\n # Prepare tile of overlay, if available\n overlay = self._tile_image(layer.tile((z, x, y)))\n except (DownloadError, ExtractionError), e:\n logger.warn(e)\n continue\n # Extract alpha mask\n overlay = overlay.convert(\"RGBA\")\n r, g, b, a = overlay.split()\n overlay = Image.merge(\"RGB\", (r, g, b))\n a = ImageEnhance.Brightness(a).enhance(opacity)\n overlay.putalpha(a)\n mask = Image.merge(\"L\", (a,))\n result.paste(overlay, (0, 0), mask)\n # Read result\n return self._image_tile(result)", "def alpha_extend(color: C3I, alpha: int = 255) -> C4I:\n return (*color, alpha)", "def composite(args):\n\n # load the input image\n logging.info('Loading input image %s' % (args.input))\n inputImage = load_image(args.input)\n\n # load the target image\n logging.info('Loading target image %s' % (args.target))\n targetImage = load_image(args.target)\n\n # load the mask image\n logging.info('Loading mask image %s' % (args.mask))\n maskImage = load_image(args.mask)\n\n # If None, set the source points or sets them to the whole input image\n if args.source == None:\n (height, width, _) = inputImage.shape\n args.source = [0.0, height, 0.0, 0.0, width, 0.0, width, height]\n\n # Loads the source points into a 4-by-2 array\n source_points = np.array(args.source).reshape(4, 2)\n\n # Loads the target points into a 4-by-2 array\n target_points = np.array(args.dst).reshape(4, 2)\n\n # Compute the composite image\n result = composite_image(inputImage, targetImage,\n source_points, target_points, maskImage)\n result=np.uint8(result)\n # save the result\n logging.info('Saving result to %s' % (args.output))\n imageio.imwrite(args.output, result)", "def apply_png_overlay(image, overlay, start_row, start_col, height=200, width=200):\n\n alpha_s = overlay[:, :, 3] / 255.0\n alpha_l = 1.0 - alpha_s\n for c in range(0, 3):\n image[start_row:start_row+height, start_col:start_col+width, c] = (\n alpha_s * overlay[:, :, c] +\n alpha_l * image[start_row:start_row+height, start_col:start_col+width, c])\n \n return image", "def opacity(im,alpha):\n if im.mode != 'RGBA':\n im = im.convert('RGBA')\n else:\n im = im.copy()\n alphachannel = im.split()[3]\n alphachannel = ImageEnhance.Brightness(alphachannel).enhance(alpha)\n im.putalpha(alphachannel)\n return im", "def image_overlay(image, image_blend, alpha=0.2, cmap_image=\"Greys_r\", cmap_blend=\"jet\"):\n plt.imshow(image, cmap=cmap_image)\n plt.imshow(image_blend, cmap=cmap_blend, interpolation=\"none\", alpha=alpha)", "def overlay_alpha_layers(layers, keepalpha=True, dtype=np.float32):\n layer_iter = iter(layers)\n img1 = next(layer_iter)\n rgb1, alpha1 = _prep_rgb_alpha(img1, dtype=dtype)\n\n for img2 in layer_iter:\n rgb2, alpha2 = _prep_rgb_alpha(img2, dtype=dtype)\n rgb1, alpha1 = _alpha_blend_inplace(rgb1, alpha1, rgb2, alpha2)\n\n if keepalpha:\n raster = np.dstack([rgb1, alpha1[..., None]])\n else:\n raster = rgb1\n return raster", "def paste(self, src, x_off, y_off):\n x_off, y_off = int(x_off), int(y_off)\n\n # Overlap rectangle in target image coordinates\n width, height = src.width, src.height\n x1 = max(x_off, 0)\n y1 = max(y_off, 0)\n x2 = min(x_off + width, self.width)\n y2 = min(y_off + height, self.height)\n\n # Paste location is totally outside image\n if x1 >= x2 or y1 >= y2:\n return\n\n # Overlap rectangle in source image coordinates\n sx1 = x1 - x_off\n sy1 = y1 - y_off\n sx2 = x2 - x_off\n sy2 = y2 - y_off\n\n # Perform paste\n target = self.img\n source = src.img\n alpha = 3\n\n if self.channels == 4 and src.channels == 4:\n # Use alpha blending\n for c in range(0, 3):\n target[y1:y2, x1:x2, c] = source[sy1:sy2, sx1:sx2, c] * (source[sy1:sy2, sx1:sx2, alpha] / 255.0) \\\n + target[y1:y2, x1:x2, c] * (1.0 - source[sy1:sy2, sx1:sx2, alpha] / 255.0)\n\n target[y1:y2, x1:x2, alpha] = np.full((y2-y1, x2-x1), 255, np.uint8)\n\n else:\n # No alpha blending\n target[y1:y2, x1:x2] = src.img[sy1:sy2, sx1:sx2]" ]
[ "0.7422852", "0.70978785", "0.6976313", "0.6866716", "0.6865255", "0.68406856", "0.6805374", "0.6720108", "0.6681318", "0.66069293", "0.65929514", "0.65593785", "0.6482925", "0.6482741", "0.6438921", "0.63782775", "0.6372395", "0.63322204", "0.6287815", "0.6284114", "0.62300265", "0.6203474", "0.6174149", "0.61579734", "0.6146324", "0.6144757", "0.6118578", "0.6104984", "0.60928345", "0.60825557" ]
0.7438169
0
Calculate bandpass filters with adjustable length for given frequency ranges. This function returns for the given frequency band ranges the filter coefficients with length "filter_len". Thus the filters can be sequentially used for band power estimation.
def calc_band_filters(f_ranges, sfreq, filter_length="1000ms", l_trans_bandwidth=4, h_trans_bandwidth=4): filter_list = list() for f_range in f_ranges: h = mne.filter.create_filter(None, sfreq, l_freq=f_range[0], h_freq=f_range[1], fir_design='firwin', l_trans_bandwidth=l_trans_bandwidth, h_trans_bandwidth=h_trans_bandwidth, filter_length=filter_length) filter_list.append(h) filter_bank = np.vstack(filter_list) return filter_bank
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def bandpass_filtfilt(rawsong, samp_freq, freq_cutoffs=(500, 10000)):\n if freq_cutoffs[0] <= 0:\n raise ValueError('Low frequency cutoff {} is invalid, '\n 'must be greater than zero.'\n .format(freq_cutoffs[0]))\n\n Nyquist_rate = samp_freq / 2\n if freq_cutoffs[1] >= Nyquist_rate:\n raise ValueError('High frequency cutoff {} is invalid, '\n 'must be less than Nyquist rate, {}.'\n .format(freq_cutoffs[1], Nyquist_rate))\n\n if rawsong.shape[-1] < 387:\n numtaps = 64\n elif rawsong.shape[-1] < 771:\n numtaps = 128\n elif rawsong.shape[-1] < 1539:\n numtaps = 256\n else:\n numtaps = 512\n\n cutoffs = np.asarray([freq_cutoffs[0] / Nyquist_rate,\n freq_cutoffs[1] / Nyquist_rate])\n # code on which this is based, bandpass_filtfilt.m, says it uses Hann(ing)\n # window to design filter, but default for matlab's fir1\n # is actually Hamming\n # note that first parameter for scipy.signal.firwin is filter *length*\n # whereas argument to matlab's fir1 is filter *order*\n # for linear FIR, filter length is filter order + 1\n b = scipy.signal.firwin(numtaps + 1, cutoffs, pass_zero=False)\n a = np.zeros((numtaps+1,))\n a[0] = 1 # make an \"all-zero filter\"\n padlen = np.max((b.shape[-1] - 1, a.shape[-1] - 1))\n filtsong = scipy.signal.filtfilt(b, a, rawsong, padlen=padlen)\n return filtsong", "def bandpass_filtfilt(rawsong, samp_freq, freq_cutoffs=None):\n\n Nyquist_rate = samp_freq / 2\n if freq_cutoffs is None:\n freq_cutoffs = [500, 10000]\n if rawsong.shape[-1] < 387:\n numtaps = 64\n elif rawsong.shape[-1] < 771:\n numtaps = 128\n elif rawsong.shape[-1] < 1539:\n numtaps = 256\n else:\n numtaps = 512\n\n cutoffs = np.asarray([freq_cutoffs[0] / Nyquist_rate,\n freq_cutoffs[1] / Nyquist_rate])\n # code on which this is based, bandpass_filtfilt.m, says it uses Hann(ing)\n # window to design filter, but default for matlab's fir1\n # is actually Hamming\n # note that first parameter for scipy.signal.firwin is filter *length*\n # whereas argument to matlab's fir1 is filter *order*\n # for linear FIR, filter length is filter order + 1\n b = scipy.signal.firwin(numtaps + 1, cutoffs, pass_zero=False)\n a = np.zeros((numtaps+1,))\n a[0] = 1 # make an \"all-zero filter\"\n padlen = np.max((b.shape[-1] - 1, a.shape[-1] - 1))\n filtsong = scipy.signal.filtfilt(b, a, rawsong, padlen=padlen)\n return filtsong", "def bandpass_filter(data, lowcut, highcut, fs=2000, numtaps=255):\n nyq = fs / 2\n\n # design filter\n fe1 = lowcut / nyq\n fe2 = highcut / nyq\n b = firwin(numtaps, (fe1, fe2), pass_zero=False)\n\n filtered = lfilter(b, 1, data)\n\n return filtered", "def __band_filter(data: dict, lowFreq: Union[int, float], highFreq: Union[int, float], timestep: int=0,\n samplingFreq: int=240, order: int=5, eegSensor: int=0, filterType: str='bandpass',\n lengthOfTestSeconds: Union[int, float]=32, example: int=0) -> dict:\n #Test\n # Filter.__band_filter_test(data=data, low=lowFreq, high=highFreq, samplingFreq=samplingFreq, order=order,\n # eegSensor=eegSensor, filterType=filterType, lengthOfTestSeconds=lengthOfTestSeconds)\n #Code\n nyq = 0.5 * samplingFreq\n low = lowFreq / nyq\n high = highFreq / nyq\n b, a = signal.butter(order, [low, high], btype=filterType)\n y = signal.lfilter(b, a, data['Signal'])\n ##Graph - This belongs somewhere else probably.\n # t = np.linspace(0, len(data), len(data), endpoint=False)\n # plt.plot(t, y, label='Sensor #' + str(eegSensor) + ' (' + str(lowFreq) + '-' + str(highFreq) + ') Hz')\n # plt.grid(True)\n # plt.axis('tight')\n # plt.xticks(range(10), range(lengthOfTestSeconds)) ##32 seconds per test?\n # plt.xlabel(\"Time in Seconds\")\n # plt.legend(loc='upper left')\n # plt.show()\n output = {}\n timestep = []\n for index, eegChannel in enumerate(y[0]):#the extra [0] is becuase signal.lfilter() puts it in a 1D array. Grrr\n timestep.append(eegChannel)\n output['Signal'] = timestep\n Visualization.channelGraph(y[0][0])\n return output #output is 2D 64xTimeSamples", "def _make_filters(self):\n\n \"\"\"\n filter_bank = bandpass_filterbank(\n self.bands, fs=self.fs, order=order, output=output\n )\n\n return [lambda sig: sosfiltfilt(bpf, sig) for bpf in filter_bank]\n \"\"\"\n\n # This seems to work only for Octave bands out of the box\n centers = self.centers\n n = len(self.centers)\n\n new_bands = [[centers[0] / 2, centers[1]]]\n for i in range(1, n - 1):\n new_bands.append([centers[i - 1], centers[i + 1]])\n new_bands.append([centers[-2], self.fs / 2])\n\n n_freq = self.n_fft // 2 + 1\n freq_resp = np.zeros((n_freq, n))\n freq = np.arange(n_freq) / self.n_fft * self.fs\n\n for b, (band, center) in enumerate(zip(new_bands, centers)):\n lo = np.logical_and(band[0] <= freq, freq < center)\n freq_resp[lo, b] = 0.5 * (1 + np.cos(2 * np.pi * freq[lo] / center))\n\n if b != n - 1:\n hi = np.logical_and(center <= freq, freq < band[1])\n freq_resp[hi, b] = 0.5 * (1 - np.cos(2 * np.pi * freq[hi] / band[1]))\n else:\n hi = center <= freq\n freq_resp[hi, b] = 1.0\n\n filters = np.fft.fftshift(\n np.fft.irfft(freq_resp, n=self.n_fft, axis=0),\n axes=[0],\n )\n\n # remove the first sample to make them odd-length symmetric filters\n self.filters = filters[1:, :]", "def filters(data, f_interval, f_resolution=None, sampling=None, w_column=None):\n print('-------------------------- filters')\n\n # Avoid overwritting data:\n data0 = data.copy()\n \n # Avoid 0 as input as not peaks are found:\n if f_interval[0]==0:\n f_interval = [f_resolution, f_interval[1]]\n \n # Calculates power spectrum:\n Pf_power, P_comp, _, _, = tt.power(data0, f_interval, f_resolution, sampling, w_column)\n t = data0[:,0]\n f = Pf_power[:,0]\n alpha = P_comp[:,0] \n beta = P_comp[:,1]\n\n # Calculates P_filter:\n P_filter = np.zeros(len(t))\n fpicon = 2*np.pi*f # Optimization constant\n for i in range(len(t)):\n tfpicon = fpicon*t[i] # Optimization constant\n alpha_sin = alpha*np.sin(tfpicon)\n beta_cos = beta* np.cos(tfpicon)\n P_filter[i] = np.sum(alpha_sin + beta_cos)\n\n # Calculates window function:\n Pf_window = tt.window(data0, f_interval, f_resolution, sampling)\n P_window = Pf_window[:,1]\n \n # Bandpass/Lowpass and Highpass filter:\n S_low_band = P_filter/np.sum(P_window)\n S_high = data0[:,1]-S_low_band\n St_low_band = np.vstack([t, S_low_band]).T\n St_high = np.vstack([t, S_high]).T\n return St_low_band, St_high", "def bandpassFilter (self, lowerFreq, upperFreq):\n self.bandpassLimits = (lowerFreq, upperFreq)\n # stuff to do", "def make_filter_banks(power_frames, sampling_rate, NFFT, num_filt = 40):\n low_freq_mel = 0\n high_freq_mel = Hz_to_Mel(sampling_rate/2) # Convert Hz to Mel\n #mel_points = np.arange(low_freq_mel, high_freq_mel, (high_freq_mel - low_freq_mel)/(num_filt + 2)) # Equally spaced in Mel scale\n mel_points = np.linspace(low_freq_mel, high_freq_mel, num_filt + 2) # Equally spaced in Mel scale\n #hz_points = Mel_to_Hz(mel_points) # Convert Mel to Hz\n bins = np.floor((NFFT + 1) * Mel_to_Hz(mel_points) / sampling_rate)\n \n #bank = np.empty((num_filt, int(np.floor(NFFT / 2 + 1))))\n bank = np.zeros((num_filt, int(np.floor(NFFT / 2 + 1))))\n for m in range(1, num_filt + 1):\n f_s = bins[m - 1 : m + 2]\n f_prev = int(f_s[0]) # left\n f = int(f_s[1]) # center\n f_next = int(f_s[2]) # right\n\n np.put(bank[m - 1], list(range(f_prev)), 0) # k < f_prev\n\n for k in range(f_prev, f):\n np.put(bank, ((m - 1)*int(np.floor(NFFT / 2 + 1))) + k, (k - f_prev) / (f - f_prev)) \n \n for k in range(f, f_next):\n np.put(bank, ((m - 1)*int(np.floor(NFFT / 2 + 1))) + k, (f_next - k) / (f_next - f))\n\n np.put(bank[m - 1], list(range(f_next, len(bank))), 0) # k > f_next\n\n filter_banks = np.where(np.dot(power_frames, bank.T) == 0, np.finfo(float).eps, np.dot(power_frames, bank.T))\n #filter_banks = np.where(filter_banks == 0, np.finfo(float).eps, filter_banks) # Numerical Stability\n filter_banks = 20 * np.log10(filter_banks) # dB\n\n return filter_banks", "def butterworth_bandpass_filter( values, lowFreq, highFreq, sampleFreq, order=5):\n nyq = 0.5 * sampleFreq\n low = lowFreq / nyq\n high = highFreq /nyq\n b,a = butter( order, [low,high], btype='band' )\n return y = lfilter(b, a, data)", "def _generate_conv1D_filter_widths(\n num_unique_filters: int = 25,\n list_len_bounds: List[int] = [5, 5],\n first_filter_width_bounds: List[int] = [50, 150],\n probability_vary_filter_width: float = 0.5,\n vary_filter_scale_bounds: List[float] = [1.25, 1.75],\n) -> List[List[int]]:\n list_of_filters: List[List[int]] = []\n\n while len(list_of_filters) < num_unique_filters:\n\n # Generate length of filter sizes\n list_len = np.random.randint(\n low=list_len_bounds[0],\n high=list_len_bounds[1] + 1,\n size=1,\n dtype=int,\n )[0]\n\n # Generate first filter size\n first_filter_width = np.random.randint(\n low=first_filter_width_bounds[0],\n high=first_filter_width_bounds[1] + 1,\n size=1,\n dtype=int,\n )[0]\n first_filter_width = _ensure_even_number(first_filter_width)\n\n # Randomly determine if filter size varies or not\n if probability_vary_filter_width >= np.random.rand():\n\n # Randomly generate filter scale value by which to divide subsequent\n # filter sizes\n vary_filter_scale = np.random.uniform(\n low=vary_filter_scale_bounds[0],\n high=vary_filter_scale_bounds[1],\n )\n\n # Iterate through list of filter sizes\n this_filter = []\n\n for _ in range(list_len):\n this_filter.append(first_filter_width)\n\n # Check if we want to vary filter size\n current_filter_width = first_filter_width\n first_filter_width = int(first_filter_width / vary_filter_scale)\n first_filter_width = _ensure_even_number(first_filter_width)\n\n # If reducing filter size makes it 0, reset to prior filter size\n if first_filter_width == 0:\n first_filter_width = current_filter_width\n\n if this_filter not in list_of_filters:\n list_of_filters.append(this_filter)\n\n # Else the filter size is constant\n else:\n list_of_filters.append([first_filter_width])\n\n return list_of_filters", "def generate_filter(length, fs, f_low=None, f_high=None, mode='box', is_plot=False):\n\n freqs = np.fft.fftfreq(int(length), d=(1. / float(fs)))\n\n filter_array = np.ones(length)\n\n if f_low is None and f_high is None:\n print('no filtering required!')\n elif f_low is None and f_high is not None:\n print('low-pass fileter')\n if f_high <= 0:\n raise(ValueError, 'Higher cutoff frquency should be positive!')\n filter_array[freqs >= f_high] = 0.\n filter_array[freqs <= -f_high] = 0.\n elif f_low is not None and f_high is None:\n print('high-pass fileter')\n if f_low < 0:\n raise (ValueError, 'Lower cutoff frquency should be non-negative!')\n filter_array[np.logical_and((freqs >= -f_low), (freqs <= f_low))] = 0.\n else:\n print('band-pass filter')\n if f_high <= 0:\n raise (ValueError, 'Higher cutoff frquency should be positive!')\n if f_low < 0:\n raise (ValueError, 'Lower cutoff frquency should be non-negative!')\n filter_array[freqs >= f_high] = 0.\n filter_array[freqs <= -f_high] = 0.\n filter_array[np.logical_and((freqs >= -f_low), (freqs <= f_low))] = 0.\n\n if mode == '1/f':\n filter_array[1:] = filter_array[1:] / abs(freqs[1:])\n filter_array[0] = 0\n filter_array = bas.array_nor(filter_array)\n elif mode == 'box':\n filter_array[0] = 0\n else:\n raise(NameError, 'Variable \"mode\" should be either \"1/f\" or \"box\"!')\n\n if is_plot:\n plot_array = zip(freqs, filter_array)\n plot_array.sort(key=lambda x: x[0])\n plot_array = zip(*plot_array)\n\n _ = plt.figure(figsize=(10, 3))\n plt.plot(plot_array[0], plot_array[1])\n plt.xlabel('frequency (Hz)')\n plt.ylim([-0.1, 1.1])\n plt.show()\n\n return freqs, filter_array", "def bandpass_filter(s, sample_rate, low_freq, high_freq, filter_order=5, rescale=False):\n #create a butterworth filter\n nyq = sample_rate / 2.0\n f = np.array([low_freq, high_freq]) / nyq\n b,a = filter_design.butter(filter_order, f, btype='bandpass')\n\n #filter the signal\n filtered_s = filtfilt(b, a, s)\n\n if rescale:\n #rescale filtered signal\n filtered_s /= filtered_s.max()\n filtered_s *= s.max()\n\n return filtered_s", "def butter_bandpass_filter(data, lowcut, highcut, fs, order=5, axis=0): \n omega = 0.5 * fs\n low = lowcut / omega\n high = highcut / omega\n b, a = signal.butter(order, [low, high], btype='band')\n y = signal.lfilter(b, a, data, axis=0)\n return y", "def bandstop_filter(data, lowcut, highcut, fs=2000, numtaps=255):\n nyq = fs / 2\n\n # design filter\n fe1 = lowcut / nyq\n fe2 = highcut / nyq\n b = firwin(numtaps, (fe1, fe2), pass_zero=True)\n\n filtered = lfilter(b, 1, data)\n\n return filtered", "def butterworth_filter(freq):\n\tf_raw = 1/(0.00000002*100*33)\n\tb = np.array([[-32092,15750],[-31238,14895]])*2.0**(-14)\n\tomega = 2*np.pi*freq/f_raw\n\te1, e2 = np.exp(-1j*omega), np.exp(-2j*omega)\n\ttmp = (1+2*e1+e2)**2/(1+b[0,0]*e1+b[0,1]*e2)/(1+b[1,0]*e1+b[1,1]*e2)\n\treturn tmp * (1+sum(b[0]))*(1+sum(b[1]))/16", "def butterworth(dx, order, cutoff, /, btype='low'):\n # Initial stuff\n # * Need to run *forward and backward* to prevent time-shifting.\n # * The 'analog' means units of cutoffs are rad/s.\n # * Unlike Lanczos filter, the *length* of this should be\n # determined always as function of timestep, because really high\n # order filters can get pretty wonky.\n # * Cutoff is point at which gain reduces to 1/sqrt(2) of the\n # initial frequency. If doing bandpass, can\n # N = (width/dx)//1 # convert to timestep units\n # N = (N//2)*2 + 1 # odd numbered\n N = order # or order\n analog = False # lfilter seems to need digital\n if analog:\n cutoff = 2 * np.pi / (cutoff / dx) # from wavelengths to rad/steps\n else:\n cutoff = 1.0 / cutoff # to Hz, or cycles/unit\n cutoff = cutoff * (2 * dx) # to cycles / (2 timesteps)\n if cutoff > 1:\n raise ValueError(\n 'Cuttoff frequency must be in [0, 1]. Remember you pass a cutoff '\n '*wavelength* to this function, not a frequency.'\n )\n\n # Apply filter\n print(f'Order-{order} Butterworth filter')\n b, a = signal.butter(N - 1, cutoff, btype=btype, analog=analog, output='ba')\n return b, a", "def butterworth_filter(signal, Fs, highpassfreq=None, lowpassfreq=None, order=4, filtfunc='filtfilt'):\n Fn = Fs / 2.\n \n # set the function for filtering\n if filtfunc is 'lfilter':\n ffunc = spsig.lfilter\n elif filtfunc is 'filtfilt':\n ffunc = spsig.filtfilt\n else:\n raise ValueError(\"filtfunc must to be either 'filtfilt' or 'lfilter'\")\n \n # set parameters\n if lowpassfreq and highpassfreq:\n if highpassfreq < lowpassfreq:\n Wn = (highpassfreq / Fn, lowpassfreq / Fn)\n btype = 'bandpass'\n else:\n Wn = (lowpassfreq / Fn, highpassfreq / Fn)\n btype = 'bandstop'\n elif lowpassfreq:\n Wn = lowpassfreq / Fn\n btype = 'lowpass'\n elif highpassfreq:\n Wn = highpassfreq / Fn\n btype = 'highpass'\n else:\n raise ValueError(\"Specify highpassfreq and/or lowpathfreq\")\n \n # filter design\n b, a = spsig.butter(order, Wn, btype=btype)\n \n return ffunc(b, a, signal)", "def butter_bandpass_filter(data, lowcut, highcut, fs, order=2):\n nyq = 0.5 * fs\n low = lowcut / nyq\n high = highcut / nyq\n \n # butter() and lfilter() are from scipy.signal\n \n b, a = butter(order, [low, high], btype='band')\n y = lfilter(b, a, data)\n return y", "def butter_bandpass_filter(data, lowcut, highcut, fs, order=1):\n b, a = butter_bandpass(lowcut, highcut, fs, order=order)\n y = filtfilt(b, a, data)\n return y", "def filter_callback(self, assemblers, filters):\n # Find the characteristic length of the domain and set the filter length scale\n r0 = self.r0_frac * self.a\n mfilter = TopOptUtils.Mfilter(self.N, assemblers, filters, dim=3, r=r0)\n mfilter.initialize()\n return mfilter", "def bandpass_cnt(data, low_cut_hz, high_cut_hz, fs, filt_order=3, axis=0):\n if (low_cut_hz == 0 or low_cut_hz is None) and (\n high_cut_hz == None or high_cut_hz == fs / 2.0):\n log.info(\"Not doing any bandpass, since low 0 or None and \"\n \"high None or nyquist frequency\")\n return data.copy()\n if low_cut_hz == 0 or low_cut_hz == None:\n log.info(\"Using lowpass filter since low cut hz is 0 or None\")\n return lowpass_cnt(data, high_cut_hz, fs, filt_order=filt_order, axis=axis)\n if high_cut_hz == None or high_cut_hz == (fs / 2.0):\n log.info(\n \"Using highpass filter since high cut hz is None or nyquist freq\")\n return highpass_cnt(data, low_cut_hz, fs, filt_order=filt_order, axis=axis)\n\n nyq_freq = 0.5 * fs\n low = low_cut_hz / nyq_freq\n high = high_cut_hz / nyq_freq\n b, a = scipy.signal.butter(filt_order, [low, high], btype='bandpass')\n assert filter_is_stable(a), \"Filter should be stable...\"\n data_bandpassed = scipy.signal.lfilter(b, a, data, axis=axis)\n return data_bandpassed", "def make_erb_filters(fs, centre_freqs, width=1.0):\n T = 1/fs\n # Change the followFreqing three parameters if you wish to use a different\n # ERB scale. Must change in ERBSpace too.\n # TODO: factor these out\n ear_q = 9.26449 # Glasberg and Moore Parameters\n min_bw = 24.7\n order = 1\n\n erb = width*((centre_freqs/ear_q)**order + min_bw**order)**(1/order)\n B = 1.019*2*np.pi*erb\n\n arg = 2*centre_freqs*np.pi*T\n vec = np.exp(2j*arg)\n\n A0 = T\n A2 = 0\n B0 = 1\n B1 = -2*np.cos(arg)/np.exp(B*T)\n B2 = np.exp(-2*B*T)\n \n rt_pos = np.sqrt(3 + 2**1.5)\n rt_neg = np.sqrt(3 - 2**1.5)\n \n common = -T * np.exp(-(B * T))\n \n # TODO: This could be simplified to a matrix calculation involving the\n # constant first term and the alternating rt_pos/rt_neg and +/-1 second\n # terms\n k11 = np.cos(arg) + rt_pos * np.sin(arg)\n k12 = np.cos(arg) - rt_pos * np.sin(arg)\n k13 = np.cos(arg) + rt_neg * np.sin(arg)\n k14 = np.cos(arg) - rt_neg * np.sin(arg)\n\n A11 = common * k11\n A12 = common * k12\n A13 = common * k13\n A14 = common * k14\n\n gain_arg = np.exp(1j * arg - B * T)\n\n gain = np.abs(\n (vec - gain_arg * k11)\n * (vec - gain_arg * k12)\n * (vec - gain_arg * k13)\n * (vec - gain_arg * k14)\n * ( T * np.exp(B*T)\n / (-1 / np.exp(B*T) + 1 + vec * (1 - np.exp(B*T)))\n )**4\n )\n\n allfilts = np.ones_like(centre_freqs)\n \n fcoefs = np.column_stack([\n A0*allfilts, A11, A12, A13, A14, A2*allfilts,\n B0*allfilts, B1, B2,\n gain\n ])\n \n return fcoefs", "def bandpass_filter(files, lowpass_freq=0.1, highpass_freq=0.01, tr=2):\n import os\n\n import nibabel as nb\n import numpy as np\n from nipype.utils.filemanip import (\n filename_to_list,\n list_to_filename,\n split_filename\n )\n\n fs = 1./tr\n\n out_files = []\n for filename in filename_to_list(files):\n path, name, ext = split_filename(filename)\n out_file = os.path.join(os.getcwd(), name + '_bandpassed' + ext)\n\n img = nb.load(filename)\n timepoints = img.shape[-1]\n F = np.zeros((timepoints))\n\n lowidx = int(timepoints / 2) + 1\n if lowpass_freq > 0:\n lowidx = np.round(float(lowpass_freq) / fs * timepoints)\n\n highidx = 0\n if highpass_freq > 0:\n highidx = np.round(float(highpass_freq) / fs * timepoints)\n F[int(highidx):int(lowidx)] = 1\n F = ((F + F[::-1]) > 0).astype(int)\n data = img.get_data()\n if np.all(F == 1):\n filtered_data = data\n else:\n filtered_data = np.real(np.fft.ifftn(np.fft.fftn(data) * F))\n img_out = nb.Nifti1Image(filtered_data, img.affine, img.header)\n img_out.to_filename(out_file)\n out_files.append(out_file)\n\n return list_to_filename(out_files)", "def apply_filter(data, filter_bank, sfreq): \n if data.ndim == 1:\n filtered = np.zeros((1, filter_bank.shape[0], sfreq))\n for filt in range(filter_bank.shape[0]):\n filtered[0, filt, :] = np.convolve(filter_bank[filt,:], data)[int(sfreq-sfreq/2):int(sfreq+sfreq/2)]\n elif data.ndim == 2:\n filtered = np.zeros((data.shape[0], filter_bank.shape[0], sfreq))\n for chan in range(data.shape[0]):\n for filt in range(filter_bank.shape[0]):\n filtered[chan, filt, :] = np.convolve(filter_bank[filt, :], \\\n data[chan,:])[int(sfreq-sfreq/2):int(sfreq+sfreq/2)] # mode=\"full\"\n return filtered", "def apply_bandpass_filter_timeseries(self, folder_name, indices, start_stop_freq, stop_stop_freq):\n (x_index, y_index) = indices\n photo_list = self.get_photo_list(folder_name)\n\n ts = self.get_pixel_timeseries(folder_name, (x_index, y_index))\n self.plot_fft_pixel_timeseries(folder_name, ts, str(x_index) + '_' + str(y_index) + 'pre_butterworth')\n n = len(ts)\n frequency = self.get_sampling_frequency(folder_name)\n d = 1.0 / frequency # 'sample spacing'\n fig, ax = plt.subplots()\n sample_freqs = np.fft.rfftfreq(n, d)\n fourier = np.fft.rfft(ts)\n print(sample_freqs)\n nyquist = frequency / 2.0\n\n start_stop_band = start_stop_freq / nyquist\n stop_stop_band = stop_stop_freq / nyquist\n\n print(start_stop_band)\n print(stop_stop_band)\n\n sos = sgnl.butter(2, Wn=[start_stop_band, stop_stop_band], btype='bandstop', output='sos')\n filtered = sgnl.sosfilt(sos, ts)\n self.plot_fft_pixel_timeseries(folder_name, filtered, str(x_index) + '_' + str(y_index) + 'post_butterworth')\n fig, ax = plt.subplots()\n indices = self.get_indices_from_filenames(folder_name)\n index_dates = dates.date2num(indices)\n ax.plot_date(index_dates, ts, xdate=True, linestyle='solid', marker='None',\n label=str(x_index) + ' , ' + str(y_index))\n ax.plot_date(index_dates, filtered, xdate=True, linestyle='solid', marker='None',\n label=str(x_index) + ' , ' + str(y_index) + ' filtered')\n\n ax.legend()\n ax.grid(b=True, which='major', color='#666666', linestyle='-')\n\n # Show the minor grid lines with very faint and almost transparent grey lines\n ax.minorticks_on()\n ax.grid(b=True, which='minor', color='#999999', linestyle='-', alpha=0.2)\n fig.set_figwidth(40)\n fig.savefig(self.parent_folder + 'analysis/timeseries_filtered_' + str(x_index) + '_' + str(y_index) + '.png')\n fig.savefig(self.parent_folder + 'analysis/timeseries_filtered_' + str(x_index) + '_' + str(y_index) + '.svg')\n fig.clf()", "def bandpass_filterbank(bands, fs=1.0, order=8, output=\"sos\"):\n\n filters = []\n nyquist = fs / 2.0\n\n for band in bands:\n # remove bands above nyquist frequency\n if band[0] >= nyquist:\n raise ValueError(\"Bands should be below Nyquist frequency\")\n\n # Truncate the highest band to Nyquist frequency\n norm_band = np.minimum(0.99, np.array(band) / nyquist)\n\n # Compute coefficients\n coeffs = butter(order / 2, norm_band, \"bandpass\", output=output)\n filters.append(coeffs)\n\n return filters", "def _butter_bandpass_filter(self, data: np.ndarray, lowcut: float, highcut: float, fs: float, order: int = 5):\n b, a = self._butter_bandpass(lowcut, highcut, fs, order=order)\n y = lfilter(b, a, data)\n return y", "def LP_filt(filterLength, x):\n b=np.ones(filterLength,)/(filterLength) #Finite Impulse Response (FIR) Moving Average (MA) filter with one second filter length\n a=1\n y = signal.filtfilt(b, a, x)\n return y", "def Pool2DOptionsAddFilterWidth(builder, filterWidth):\n return AddFilterWidth(builder, filterWidth)", "def filterbank(min_freq, max_freq, number, srate, N):\n points = numpy.linspace(M(min_freq), M(max_freq), number + 2)\n freqs = Mi(points)\n bins = freq2bin(freqs, srate, N)\n\n filters = numpy.zeros((number, N/2 +1))\n\n for i in xrange(0, number):\n bot = int(math.floor(bins[i]))\n mid = int(round(bins[i+1]))\n top = int(math.ceil(bins[i+2]))\n\n filters[i][bot:mid] = numpy.linspace(0, 1, mid - bot +1)[:-1]\n filters[i][mid:top+1] = numpy.linspace(1, 0, top - mid +1)\n\n return filters" ]
[ "0.6331097", "0.6286624", "0.62640613", "0.6151228", "0.613698", "0.60637957", "0.60447794", "0.60300845", "0.60072994", "0.59426826", "0.59128374", "0.58972484", "0.587391", "0.5848961", "0.5806512", "0.5764639", "0.57619053", "0.57331616", "0.5716473", "0.57109064", "0.5686527", "0.5668391", "0.56621665", "0.5656835", "0.5654213", "0.5643664", "0.56430876", "0.5591863", "0.5577557", "0.5555655" ]
0.78691727
0
Loads the database of People objects as a specified data type
def load_database(database_type): f = open("database.p", "rb") database = pickle.load(f) f.close() if database_type is "dict": return database elif database_type is "list": return database.values()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _load_db(self):\n for type_ in self._types:\n try:\n type_.table(self._metadata)\n except InvalidRequestError:\n pass\n # Reflect metadata so auto-mapping works\n self._metadata.reflect(self._engine)\n # Make sure the tables exist\n self._metadata.create_all()", "def importPerson ( c, peopleInstance ):\n assert str(type(c)) == \"<type '_mysql.connection'>\"\n assert str(type(peopleInstance)) == \"<type 'instance'>\"\n personID = peopleInstance.attrib[\"personIdent\"]\n \n #Gets location sub elements in list. Inserts into PeopleLocation table by indexing list\n locationElements = list(peopleInstance.findall(\"Location\"))\n for instance in locationElements:\n sqlQuery ( c, \"insert into PeopleLocations values ( '\"+personID+\"', '\"+instance[0].text+\"', '\"+instance[1].text+\"', '\"+instance[2].text+\"');\")\n \n #Gets list of all RelatedOrganizations and inserts into PeopleToOrganizations table\n relatedOrgs = peopleInstance.find(\"RelatedOrganizations\")\n if relatedOrgs != None :\n for instance in relatedOrgs:\n sqlQuery ( c, \"insert into PeopleToOrganizations values ( '\"+personID+\"', '\"+instance.attrib[\"organizationIdent\"]+\"');\")\n\n #Gets all URL's in a list. Indexes list, splices tag to get type, inserts data into table\n externalResources = peopleInstance.find(\"ExternalResources\")\n \n #Get all resources. Checks for Citation because it's the only one not ending in 'URL'. Get index of URL for others to splice off. Add to table\n for instance in externalResources:\n sqlQuery ( c, \"insert into PersonExternalResources values ( '\"+personID+\"', '\"+instance.tag+\"', '\"+instance.text+\"');\")\n \n #Finds values of remaining elements and inserts into People table\n firstName = peopleInstance.find(\".//\" + \"FirstName\").text\n middleName = peopleInstance.findtext(\".//\" + \"MiddleName\", \"\")\n lastName = peopleInstance.find(\".//\" + \"LastName\").text\n suffix = peopleInstance.findtext(\"Suffix\", \"\")\n kind = peopleInstance.find(\"Kind\").attrib[\"personKindIdent\"]\n sqlQuery ( c, \"insert into People values ( '\"+personID+\"', '\"+firstName+\"', '\"+middleName+\"', '\"+lastName+\"', '\"+suffix+\"', '\"+kind+\"');\")", "def importPersonKind ( c, personKindInstance ) :\n assert str(type(c)) == \"<type '_mysql.connection'>\"\n assert str(type(personKindInstance)) == \"<type 'instance'>\"\n personKindID = personKindInstance.attrib[\"personKindIdent\"]\n name = personKindInstance.find(\"Name\").text\n description = personKindInstance.find(\"Description\").text \n sqlQuery ( c, \"insert into PersonKind values ( '\"+personKindID+\"', '\"+name+\"', '\"+description+\"');\")", "def data_types(self):", "def populate_persons():\n\n logging.basicConfig(level=logging.INFO)\n logger = logging.getLogger(__name__)\n\n database = SqliteDatabase('personjob.db')\n\n logger.info('Working with Person class')\n\n PERSON_NAME = 0\n LIVES_IN_TOWN = 1\n NICKNAME = 2\n\n people = [\n ('Andrew', 'Sultan', 'Andy'),\n ('Peter', 'Seattle', None),\n ('Susan', 'Boston', 'Beannie'),\n ('Pam', 'Coventry', 'PJ'),\n ('Steven', 'Stevens Pass', None),\n ('Ryan', 'New York', 'Private'),\n ('Pamela', 'Spokane', 'Patrol'),\n ('Monica', 'Portland', None),\n ]\n\n logger.info('Creating Person records: iterate through the list of tuples')\n\n try:\n database.connect()\n database.execute_sql('PRAGMA foreign_keys = ON;')\n for person in people:\n with database.transaction():\n new_person = Person.create(\n person_name = person[PERSON_NAME],\n lives_in_town = person[LIVES_IN_TOWN],\n nickname = person[NICKNAME])\n new_person.save()\n logger.info('Database add successful')\n\n logger.info('Print the Person records we saved...')\n for saved_person in Person:\n logger.info(f'{saved_person.person_name} lives in {saved_person.lives_in_town} ' +\\\n f'and likes to be known as {saved_person.nickname}')\n\n except Exception as e:\n logger.info(f'Error creating = {person[PERSON_NAME]}')\n logger.info(e)\n\n finally:\n logger.info('database closes')\n database.close()", "def read_people():\n try:\n conn = sqlite3.connect(settings.database_name)\n conn.row_factory = sqlite3.Row\n c = conn.cursor()\n c.execute(\"PRAGMA foreign_keys = ON\")\n c.execute(\"SELECT * FROM person LIMIT {0};\".format(settings.search_result_row_limit))\n p = []\n for row in c:\n _person = Person()\n _person.person_id = row[\"personid\"]\n _person.first_name = row[\"firstname\"]\n _person.last_name = row[\"lastname\"]\n _person.middle_initial = row[\"middleinitial\"]\n _person.nick_name = row[\"nickname\"]\n _person.date_of_birth = row[\"dateofbirth\"]\n _person.date_of_death = row[\"dateofdeath\"]\n p.append(_person)\n conn.close()\n return p\n except:\n return []", "def populate_person():\n\n logging.basicConfig(level=logging.INFO)\n logger = logging.getLogger(__name__)\n\n database = SqliteDatabase('personjob.db')\n\n logger.info('Working with Person class')\n logger.info('Note how I use constants and a list of tuples as a simple '\n 'schema')\n logger.info('Normally you probably will have prompted for this from a '\n 'user')\n\n PERSON_NAME = 0\n LIVES_IN_TOWN = 1\n NICKNAME = 2\n\n people = [\n ('Andrew', 'Sumner', 'Andy'),\n ('Peter', 'Seattle', None),\n ('Susan', 'Boston', 'Beannie'),\n ('Pam', 'Coventry', 'PJ'),\n ('Steven', 'Colchester', None),\n ]\n\n logger.info('Creating Person records: iterate through the list of tuples')\n logger.info('Prepare to explain any errors with exceptions')\n logger.info('and the transaction tells the database to fail on error')\n\n try:\n database.connect()\n database.execute_sql('PRAGMA foreign_keys = ON;')\n for person in people:\n with database.transaction():\n new_person = Person.create(\n person_name=person[PERSON_NAME],\n lives_in_town=person[LIVES_IN_TOWN],\n nickname=person[NICKNAME])\n new_person.save()\n logger.info('Database add successful')\n\n logger.info('Print the Person records we saved...')\n for saved_person in Person:\n logger.info(f'{saved_person.person_name} lives in '\n f'{saved_person.lives_in_town} '\n f'and likes to be known as {saved_person.nickname}')\n\n except Exception as e:\n logger.info(f'Error creating = {person[PERSON_NAME]}')\n logger.info(e)\n logger.info('See how the database protects our data')\n\n finally:\n logger.info('database closes')\n database.close()", "def data_types():\n\n return ...", "def convert_data_types(fields, src_db='mysql', dest_db='postgres'):\n\n data_type_map = {\n 'mysql': {\n 'postgres': {\n 'date': 'date',\n 'tinyint': 'smallint',\n 'smallint': 'smallint',\n 'mediumint': 'integer',\n 'int': 'bigint',\n 'bigint': 'numeric',\n 'float': 'real',\n 'double': 'double precision',\n 'tinytext': 'varchar',\n 'mediumtext': 'varchar',\n 'longtext': 'varchar',\n 'varchar': 'varchar',\n 'text': 'varchar',\n 'char': 'char',\n 'binary': 'bytea',\n 'varbinary': 'bytea',\n 'tinyblob': 'bytea',\n 'blob': 'bytea',\n 'mediumblob': 'bytea',\n 'longblob': 'bytea',\n 'datetime': 'timestamp',\n 'time': 'time',\n 'decimal': 'decimal',\n 'json': 'jsonb'\n }\n }\n }\n\n for elem in fields:\n elem['data_type'] = data_type_map[src_db][dest_db][elem['data_type']]\n\n if elem['data_type'] == 'decimal':\n elem['data_type'] += f'''{int(elem['numeric_precision']), int(elem['numeric_scale'])}'''\n\n fields = {e['column_name']: e['data_type'] for e in fields}\n\n return fields", "def read_person(person_id):\n try:\n conn = sqlite3.connect(settings.database_name)\n conn.row_factory = sqlite3.Row\n c = conn.cursor()\n c.execute(\"PRAGMA foreign_keys = ON\")\n c.execute(\"SELECT * FROM person WHERE personid =?\", (person_id,))\n _person = None\n for row in c:\n _person = Person()\n _person.person_id = row[\"personid\"]\n _person.first_name = row[\"firstname\"]\n _person.last_name = row[\"lastname\"]\n _person.middle_initial = row[\"middleinitial\"]\n _person.nick_name = row[\"nickname\"]\n _person.date_of_birth = row[\"dateofbirth\"]\n _person.date_of_death = row[\"dateofdeath\"]\n conn.close()\n return _person\n except:\n return None", "def _get_types(self):\n\n db = Database()\n self.c_built_ins = list(map(lambda tup: tup[0], db.select_built_types()))\n self.c_built_in_array_types = r'^(' + '|'.join(self.escaped(self.c_built_ins)) + ')\\[[0-9]*\\]'\n self.c_types = list(map(lambda tup: tup[0], db.select_types()))\n self.c_array_types = r'^(' + '|'.join(self.escaped(self.c_types)) + ')\\[[0-9]*\\]'\n db.close_connection()", "def test_type_conversion(registry: AdapterLoader) -> None:\n registry.add(\"dummy\", FakeAdapterWithDateTime)\n\n connection = connect(\":memory:\", [\"dummy\"], isolation_level=\"IMMEDIATE\")\n cursor = connection.cursor()\n\n cursor.execute('SELECT * FROM \"dummy://\"')\n assert cursor.fetchall() == []\n\n cursor.execute(\n 'INSERT INTO \"dummy://\" (birthday) VALUES (?)',\n (datetime(2021, 1, 1, 0, 0),),\n )\n cursor.execute('SELECT * FROM \"dummy://\"')\n assert cursor.fetchall() == [\n (\n None,\n datetime(2021, 1, 1, 0, 0),\n None,\n None,\n ),\n ]\n\n # make sure datetime is stored as a datetime\n assert FakeAdapterWithDateTime.data == [\n {\n \"age\": None,\n \"birthday\": datetime(2021, 1, 1, 0, 0),\n \"name\": None,\n \"pets\": None,\n \"rowid\": 1,\n },\n ]\n assert isinstance(FakeAdapterWithDateTime.data[0][\"birthday\"], datetime)\n\n cursor.execute(\n 'SELECT * FROM \"dummy://\" WHERE birthday > ?',\n (datetime(2020, 12, 31, 0, 0),),\n )\n assert cursor.fetchall() == [\n (None, datetime(2021, 1, 1, 0, 0), None, None),\n ]", "def populate_db():\n\n populate_table(db, models.Department, departments_data)\n populate_table(db, models.Employee, employees_data)", "def load_data(ctx, klass=None):\n if klass:\n if klass and not klass.startswith(\"public_data.models\"):\n klass = f\"public_data.models.{klass}\"\n options = {\"class\": klass}\n connecter = ScalingoInterface(ctx.obj)\n connecter.manage_py(\"load_data\", **options)", "def test_dummydb_add_data_to_table_wrong_column_type(self):\n db = DummyDB()\n columns = {\n \"one\": int,\n \"two\": str,\n \"three\": bool,\n }\n db.create_table(\"new_table\", columns)\n result = db.select(\"new_table\", two=1)", "def test_get_contact_person_types(self):\n pass", "def load_data(self, **kwargs):\n type = kwargs['type']\n file = kwargs['file']\n with open(file, 'r') as data_file:\n for line in data_file:\n line = line[:-1]\n items_dict = ast.literal_eval(line)\n\n item = type.from_dict(items_dict)\n\n self.add_item(item, lambda i: i.uid)", "def retrieve_from_db(self):\n pass", "def populate_db():\n\n logging.basicConfig(level=logging.INFO)\n logger = logging.getLogger(__name__)\n\n database = SqliteDatabase('personjob.db')\n\n logger.info('Working with Person class')\n logger.info('Note how I use constants and a list of tuples as a simple schema')\n logger.info('Normally you probably will have prompted for this from a user')\n\n PERSON_NAME = 0\n LIVES_IN_TOWN = 1\n NICKNAME = 2\n\n people = [\n ('Andrew', 'Sumner', 'Andy'),\n ('Peter', 'Seattle', None),\n ('Susan', 'Boston', 'Beannie'),\n ('Pam', 'Coventry', 'PJ'),\n ('Steven', 'Colchester', None),\n ]\n\n logger.info('Creating Person records: iterate through the list of tuples')\n logger.info('Prepare to explain any errors with exceptions')\n logger.info('and the transaction tells the database to fail on error')\n\n logger.info('Working with Job class')\n logger.info('Creating Job records. We use the foreign key')\n\n JOB_NAME = 0\n START_DATE = 1\n END_DATE = 2\n SALARY = 3\n PERSON_EMPLOYED = 4\n\n jobs = [\n ('Analyst', '2001-09-22', '2003-01-30',65500, 'Andrew'),\n ('Senior analyst', '2003-02-01', '2006-10-22', 70000, 'Andrew'),\n ('Senior business analyst', '2006-10-23', '2016-12-24', 80000, 'Andrew'),\n ('Admin supervisor', '2012-10-01', '2014-11,10', 45900, 'Peter'),\n ('Admin manager', '2014-11-14', '2018-01,05', 45900, 'Peter')\n ]\n\n try:\n database.connect()\n database.execute_sql('PRAGMA foreign_keys = ON;')\n try:\n for person in people:\n with database.transaction():\n new_person = Person.create(\n person_name = person[PERSON_NAME],\n lives_in_town = person[LIVES_IN_TOWN],\n nickname = person[NICKNAME])\n new_person.save()\n logger.info('Database add successful')\n\n logger.info('Print the Person records we saved...')\n for saved_person in Person:\n logger.info(f'{saved_person.person_name} lives in {saved_person.lives_in_town} and likes to be known as {saved_person.nickname}')\n\n except Exception as e:\n logger.info(f'Error creating = {person[PERSON_NAME]}')\n logger.info(e)\n logger.info('See how the database protects our data')\n try:\n for job in jobs:\n with database.transaction():\n new_job = Job.create(\n job_name = job[JOB_NAME],\n start_date = job[START_DATE],\n end_date = job[END_DATE],\n salary = job[SALARY],\n person_employed = job[PERSON_EMPLOYED])\n new_job.save()\n\n logger.info('Reading and print all Job rows (note the value of person)...')\n for job in Job:\n logger.info(f'{job.job_name} : {job.start_date} to {job.end_date} for {job.person_employed}')\n\n except Exception as e:\n logger.info(f'Error creating = {job[JOB_NAME]}')\n logger.info(e)\n\n finally:\n logger.info('database closes')\n database.close()", "def load_perso(self, id_utilisateur):\n \"\"\"\n res = self.db.requete_db(\\\"\"\"SELECT * FROM utilisateurs\\\n WHERE id_utilisateur=?\\\"\"\",\\\n (id_utilisateur,))\n \"\"\"\n perso = Personnage(self, id_utilisateur)\n self.personnages[id_utilisateur] = perso", "def load_people(self, file_path):\n pass", "def import_data_to_database(self, database_type, data):\n\n if database_type == \"render\":\n connection = sqlite3.connect(self.filepath_render_database)\n pointer = connection.cursor()\n pointer.executemany(\"\"\"\n INSERT INTO render_information\n VALUES (?,?,?,?,?,?,?,?) \n \"\"\",\n (data)\n )\n connection.commit()\n connection.close()\n print(\"addet render information to database\")\n if database_type == \"object\":\n connection = sqlite3.connect(self.filepath_object_database)\n pointer = connection.cursor()\n pointer.executemany(\"\"\"\n INSERT INTO object_information\n VALUES (?,?,?,?,?,?,?,?,?,?,?,?) \n \"\"\",\n (data)\n )\n connection.commit()\n connection.close()\n print(\"addet objectinformation information to database\")\n if database_type == \"output\":\n connection = sqlite3.connect(self.filepath_object_database)\n pointer = connection.cursor()\n pointer.executemany(\"\"\"\n INSERT INTO output_information\n VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,\n ?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?) \n \"\"\",\n (data)\n )\n connection.commit()\n connection.close()\n print(\"addet outputinformation information to database\")\n \n\n return", "def load(cls, data):\n cls.check_for_schema()\n models = None\n if type(data) is list:\n models = cls.json_to_models(data)\n # if len(models) > 0 and issubclass(models[0].__class__, PillowtalkBase):\n # # [m._add_relationships() for m in models]\n elif type(data) is dict:\n models = cls.json_to_model(data)\n else:\n raise PillowtalkError(\"Data not recognized. Supply a dict or list: \\\"{0}\\\"\".format(data))\n return models", "def load(cls, data):\n if cls.Schema is None: # pragma: no cover\n msg = (\"Schema of this Model are not specified! For example: \"\n \"class User(BaseModel): ...; class UserSchema(Schema): ...; \"\n \"User.Schema = UserSchema\")\n raise NotImplementedError(msg)\n\n res = cls.Schema().load(data)\n if len(res.errors) == 0:\n return res.data\n else:\n raise Exception(\"Errors: {}\".format(res.errors))", "def class_to_db(self):", "def test_find_relation_types(self):\n pass", "def load(self, file_name, file_type):\n\n del self.db[:] # Empty existing DB list\n\n if file_type == 'db':\n # Create filename from file name and file type\n file_name = 'form_db_' + file_name\n\n elif file_type == 'list':\n # Create filename from file name and file type\n file_name = 'form_lt_' + file_name\n\n else:\n print \"Invalid file type. Must be 'db' or 'list'.\"\n return False\n\n file_path = 'JSONs/' + file_name + '.json'\n\n if isfile(file_path):\n with open(file_path, 'r') as f:\n self.db = json.load(f)\n f.close()\n\n else:\n print \"File does not exist.\"\n return False\n\n return True", "def load(uri: str, type: Optional[str] = None, *args, **kwargs) -> DataObject:\n from . import data # Loads all formats\n from . import core\n\n if type:\n return core.DataObject.registered_types[type].from_uri(uri, *args, **kwargs)\n else:\n return core.DataObject.from_uri(uri, *args, **kwargs)", "def change_person_to_db(self):\n self.EditPeople.update_sql(self.sql)\n info = self.EditPeople.updated_info()\n self.fullname.setText(info[\"fullname\"])\n # self.search_people_by_name(info['fullname'])", "def _read_rows(geo_id: int, geo_type: str, *names: str) -> geo.GeoRecord:\n cls = geo.GeoMeta.registry[geo_type]\n record = geo.GeoRecord(geo_id, cls.from_row_record(*names))\n return record" ]
[ "0.6036072", "0.5892125", "0.5795159", "0.5658464", "0.56407386", "0.5621719", "0.54812354", "0.54151523", "0.53258556", "0.5289917", "0.5287105", "0.52844226", "0.52658", "0.5242722", "0.5219611", "0.5181131", "0.5163433", "0.5148321", "0.5140113", "0.51361847", "0.5131861", "0.5084057", "0.506503", "0.50583035", "0.5026963", "0.5011548", "0.5002847", "0.50002265", "0.49852613", "0.49472195" ]
0.61409664
0
Wait for container to be healthy.
def test_wait_for_healthy(main_container): # This could take a while TIMEOUT = 180 for i in range(TIMEOUT): inspect = main_container.inspect() status = inspect["State"]["Health"]["Status"] assert status != "unhealthy", "The container became unhealthy." if status == "healthy": break time.sleep(1) else: raise Exception( f"Container status did transition to 'healthy' within {TIMEOUT} seconds." )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def wait_for_container():\n for i in xrange(30):\n print(\"Waiting for service to come up\")\n try:\n requests.get(URL).raise_for_status()\n return True\n except Exception as e:\n print e\n sleep(1)\n\n return False", "def wait_for_container(self):\n i = 0\n while True:\n ip_address = self.btcd_container.attrs[\"NetworkSettings\"][\"IPAddress\"]\n if ip_address.startswith(\"172\"):\n self.rpcconn.ipaddress = ip_address\n break\n self.btcd_container.reload()\n time.sleep(0.5)\n i = i + 1\n if i > 20:\n raise Exception(\"Timeout while starting bitcoind-docker-container!\")", "def waitForReady(self):\n log = get_log(\"DKInfluxDB.waitForReady\")\n\n # wait for the first port to respond to connections:\n interface = self.settings['interface']\n name = self.settings['export']['wait_for_port']\n ports = self.settings['export']['ports']\n port = [p['export_port'] for p in ports if p['name'] == name]\n port = port[0]\n\n log.info(\"Testing container is ready for use.\")\n db = \"testreadytorolldb_{}\".format(uuid.uuid4().hex)\n from influxdb import InfluxDBClient\n conn = InfluxDBClient(\n interface,\n int(port),\n self.settings['auth']['user'],\n self.settings['auth']['password'],\n db\n )\n\n # Create a database then drop it which should test influxdb is running\n # and ready. This may fail with ConnectionError as the container is\n # still in the process of starting influxdb.\n count_down = self.retries\n while True:\n try:\n conn.create_database(db)\n\n except requests.ConnectionError:\n log.warn(\"Connection to DB failed. Retrying...\")\n time.sleep(self.sleep_period)\n count_down -= 1\n if not count_down:\n # Give up:\n raise\n\n except:\n # Raise any other exception.\n log.exception(\"error: \")\n raise\n\n else:\n conn.drop_database(db)\n break", "def test_wait_for_exits(main_container, version_container):\n assert (\n version_container.wait() == 0\n ), \"Container service (version) did not exit cleanly\"", "def test_wait_for_ready(main_container):\n # This could take a while, as we download the application.\n TIMEOUT = 180\n for i in range(TIMEOUT):\n logs = main_container.logs().decode(\"utf-8\")\n if READY_MESSAGE in logs:\n break\n time.sleep(1)\n else:\n raise Exception(\n f\"Container does not seem ready. \"\n f'Expected \"{READY_MESSAGE}\" in the log within {TIMEOUT} seconds.'\n f\"\\nLog output follows:\\n{logs}\"\n )", "def wait_for_cadvisor_up(self):\n ping = None\n while ping is None:\n ping = requests.get(self._url_prefix, timeout=CONNECTION_TIMEOUT)\n if ping is None:\n logger.debug(\"Unable to connect to cadvisor %s. Will sleep for %s sec\",\n self._url_prefix, CHECK_LIVELINESS_INTERVAL)\n time.sleep(CHECK_LIVELINESS_INTERVAL)\n logger.info(\"cAdvisor client is up for endpoint %s\", self._url_prefix)", "def _wait(self):\n conn = None\n try:\n conn = libvirt.open(\"qemu:///system\")\n while True:\n time.sleep(10)\n try:\n state = conn.lookupByName(self.domain).info()[0]\n except (libvirt.libvirtError, TypeError, IndexError):\n break\n if state in [4, 5, 6]: # crashed or shutdown\n break\n finally:\n if conn is not None:\n conn.close()", "def wait(self):\n AbstractDaemon.wait(self, SCHEDULER_PERIOD)", "def wait_for_instance_become_healthy(self, instance_selfLink, TIME_OUT=300):\n start = datetime.now()\n print('Waiting for %s being healthy with time out %s seconds.' % (\n instance_selfLink, TIME_OUT))\n while not self.check_backend_health(instance_selfLink):\n time.sleep(3)\n current_time = datetime.now()\n if (current_time - start).seconds > TIME_OUT:\n print('Health waiting operation is timed out.')\n return\n print('At least one of the backend in %s is healthy.' % (\n self.target_pool_name))", "def wait_for_termination(self):\n self.server.wait_for_termination()", "def wait_for_termination(self):\n self.server.wait_for_termination()", "def wait(self) -> None:\n self._executor.shutdown(wait=True)", "def test_readiness(self):\n with DockerHost('host1',\n additional_docker_options=CLUSTER_STORE_DOCKER_OPTIONS) as host1:\n retry_until_success(host1.assert_is_ready, retries=30)", "def wait(self, timeout=None):\n with self.condition:\n if not self.ready:\n self.condition.wait(timeout)", "def wait():\n time.sleep(1)", "def wait(self):\n self.mainloop().wait()", "def wait_rabbit_became_exited(self, admin_remote, timeout_min=1):\n wait(lambda: admin_remote.execute(self.cmd_grep_exited).is_ok,\n timeout_seconds=60 * timeout_min,\n sleep_seconds=20,\n waiting_for='service rabbitmq-server became exited')", "def wait_rabbit_became_active(self, admin_remote, timeout_min=1):\n wait(lambda: admin_remote.execute(self.cmd_grep_active).is_ok,\n timeout_seconds=60 * timeout_min,\n sleep_seconds=20,\n waiting_for='service rabbitmq-server became active')", "async def wait_until_ready(self):\n await self._ready.wait()", "def _wait_for_cassandra_service(self, instance):\n wait_time = 3\n with settings(host_string=instance.public_dns_name, warn_only=True):\n with FULL_HIDE:\n try:\n while not files.exists(\"/var/run/cassandra.pid\", use_sudo=use_sudo()):\n self.logger.debug(\"Sleeping for %d seconds...\" % wait_time)\n time.sleep(wait_time)\n # catch SystemExit because paramiko will call abort when it detects a failure\n # in establishing an SSH connection\n except SystemExit:\n pass", "def ensure_ready(self, proc, errmsg=None, timeout=20):\n\n if not errmsg:\n errmsg = \"ensure_ready failed\"\n\n ev = Event()\n\n def cb(*args, **kwargs):\n ev.set()\n\n # link either a greenlet failure due to exception OR a success via ready event\n proc.proc.link_exception(cb)\n proc.get_ready_event().rawlink(cb)\n\n retval = ev.wait(timeout=timeout)\n\n # unlink the events: ready event is probably harmless but the exception one, we want to install our own later\n proc.get_ready_event().unlink(cb)\n\n # if the thread is stopped while we are waiting, proc.proc is set to None\n if proc.proc is not None:\n proc.proc.unlink(cb)\n\n # raise an exception if:\n # - we timed out\n # - we caught an exception\n if not retval:\n raise ContainerError(\"%s (timed out)\" % errmsg)\n elif proc.proc is not None and proc.proc.dead and not proc.proc.successful():\n raise ContainerError(\"%s (failed): %s\" % (errmsg, proc.proc.exception))", "def wait(self) -> None:\n\n self.event_.wait()", "async def wait_until_ready(self) -> None:\n await self._ready.wait()", "def wait_until_ready(self):\n while not self.is_ready():\n time.sleep(0.01)", "def wait_for_stop(timeout=30):\n starttime = time.time()\n while(time.time() < starttime + timeout):\n time.sleep(1)\n if container.state == \"Stopped\":\n return", "def wait():\n pass", "def healthcheck(self):\n while True:\n time.sleep(NAMENODE_HEALTH_CHECK_INTERVAL)\n self.check_datanodes()", "def do_wait(self):\n pass", "def wait(self):\n try:\n if self._server is not None:\n self._server.wait()\n except greenlet.GreenletExit:\n LOG.info(_(\"WSGI server has stopped.\"))", "def wait(self):\n response = self._client.api.operations[self.id].wait.get()\n\n try:\n if response.json()['metadata']['status'] == 'Failure':\n raise exceptions.LXDAPIException(response)\n except KeyError:\n # Support for legacy LXD\n pass" ]
[ "0.70266175", "0.69631106", "0.6873483", "0.68020195", "0.6711998", "0.6690538", "0.6673794", "0.6620189", "0.6472391", "0.6412175", "0.63628274", "0.6351408", "0.63411796", "0.63291925", "0.6318016", "0.63093585", "0.6306202", "0.6234826", "0.6229725", "0.62052643", "0.6195873", "0.61826277", "0.6177594", "0.6159999", "0.61495167", "0.61449844", "0.6139164", "0.61322844", "0.61119974", "0.6084714" ]
0.7739588
0
Wait for containers to exit.
def test_wait_for_exits(main_container, version_container): assert ( version_container.wait() == 0 ), "Container service (version) did not exit cleanly"
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def wait_for_termination(self):\n self.server.wait_for_termination()", "def wait(self) -> None:\n self._executor.shutdown(wait=True)", "def wait_for_termination(self):\n self.server.wait_for_termination()", "def wait_for_stop(timeout=30):\n starttime = time.time()\n while(time.time() < starttime + timeout):\n time.sleep(1)\n if container.state == \"Stopped\":\n return", "def wait(self):\n try:\n self.relay.wait()\n self.responder.wait()\n except KeyboardInterrupt:\n print_notification(\"Stopping\")\n finally:\n self.terminate_processes()", "def wait(self):\n self.mainloop().wait()", "def waitFinish(self):\n while self.job_queue_count > 0:\n sleep(0.5)\n\n # If there was a failure, we don't want to wait for possibly halted threads\n # while performing a 'join'. So just exit now with a failure.\n if self.failure:\n sys.exit(1)\n\n self.worker_pool.close()\n self.worker_pool.join()\n self.status_pool.close()\n self.status_pool.join()", "def wait(self):\n try:\n self._server.wait()\n except greenlet.GreenletExit:\n LOG.info(_(\"WSGI server has stopped.\"))", "def wait(self):\n try:\n if self._server is not None:\n self._server.wait()\n except greenlet.GreenletExit:\n LOG.info(_(\"WSGI server has stopped.\"))", "def wait_until_exit(self):\n\n if self._timeout is None:\n raise Exception(\"Thread will never exit. Use stop or specify timeout when starting it!\")\n\n self._thread.join()\n self.stop()", "def wait():\n pass", "def waitUntilFinished():", "def waitUntilFinished():", "def waitUntilFinished():", "def waitUntilFinished():", "def wait(self):\n [p.join() for p in self._downloaders]\n self._ckq.join()\n [p.terminate() for p in self._checkers]\n [p.join() for p in self._checkers]", "def _wait(self):\n conn = None\n try:\n conn = libvirt.open(\"qemu:///system\")\n while True:\n time.sleep(10)\n try:\n state = conn.lookupByName(self.domain).info()[0]\n except (libvirt.libvirtError, TypeError, IndexError):\n break\n if state in [4, 5, 6]: # crashed or shutdown\n break\n finally:\n if conn is not None:\n conn.close()", "def wait_for_container():\n for i in xrange(30):\n print(\"Waiting for service to come up\")\n try:\n requests.get(URL).raise_for_status()\n return True\n except Exception as e:\n print e\n sleep(1)\n\n return False", "def wait(self):\r\n self.jobs.join()", "def wait(self):\n self.Popen.wait()", "def wait_finish(self):\r\n self.proc.join()", "def wait_rabbit_became_exited(self, admin_remote, timeout_min=1):\n wait(lambda: admin_remote.execute(self.cmd_grep_exited).is_ok,\n timeout_seconds=60 * timeout_min,\n sleep_seconds=20,\n waiting_for='service rabbitmq-server became exited')", "def wait(self) -> None:\n if self.futures:\n wait(self.futures, return_when='FIRST_COMPLETED').done", "def wait(self) -> None:\n\n self.event_.wait()", "def wait(self):\n AbstractDaemon.wait(self, SCHEDULER_PERIOD)", "async def wait_closed(self):\n await self._close_state.wait()", "def test_wait_for_healthy(main_container):\n # This could take a while\n TIMEOUT = 180\n for i in range(TIMEOUT):\n inspect = main_container.inspect()\n status = inspect[\"State\"][\"Health\"][\"Status\"]\n assert status != \"unhealthy\", \"The container became unhealthy.\"\n if status == \"healthy\":\n break\n time.sleep(1)\n else:\n raise Exception(\n f\"Container status did transition to 'healthy' within {TIMEOUT} seconds.\"\n )", "def loop_wait(self):\n self.log_debug(\"Waiting for loop to finish\")\n if self.loop_state() != LState.Stopped:\n self.event_loop_proc.Wait()\n self.log_debug(\"Loop finished\")", "async def wait(self, collection, interval=60, timeout=600):\n end = time.time() + timeout\n\n not_responded = self.not_responding_instances(collection)\n\n def get_container(inst):\n try:\n inst.state.docker.get_containers()\n inst.state.docker.responded = True\n except DOCKER_RETRY_EXC:\n logger.debug(\"Docker not ready yet on %s\",\n str(inst.instance.id))\n except Exception as exc:\n logger.debug(\"Got exception on %s: %r\",\n str(inst.instance.id), exc)\n\n # Attempt to fetch until they've all responded\n while not_responded and time.time() < end:\n await gen.multi([collection.execute(get_container, x)\n for x in not_responded])\n\n # Update the not_responded\n not_responded = self.not_responding_instances(collection)\n\n if not_responded:\n await collection.wait(interval)\n\n # Prune the non-responding\n logger.debug(\"Pruning %d non-responding instances.\",\n len(not_responded))\n await collection.remove_instances(not_responded)", "def wait_for_container(self):\n i = 0\n while True:\n ip_address = self.btcd_container.attrs[\"NetworkSettings\"][\"IPAddress\"]\n if ip_address.startswith(\"172\"):\n self.rpcconn.ipaddress = ip_address\n break\n self.btcd_container.reload()\n time.sleep(0.5)\n i = i + 1\n if i > 20:\n raise Exception(\"Timeout while starting bitcoind-docker-container!\")" ]
[ "0.7033026", "0.6967925", "0.6909914", "0.68500364", "0.67150754", "0.6529941", "0.6527739", "0.64836293", "0.6441292", "0.64279807", "0.6425159", "0.6388", "0.6388", "0.6388", "0.6388", "0.6386584", "0.63711345", "0.6333819", "0.63210857", "0.6277999", "0.62738025", "0.62602854", "0.62510836", "0.62377274", "0.6214323", "0.6200847", "0.6195308", "0.6187654", "0.6176698", "0.61760026" ]
0.72377574
0
Verify the container outputs the correct version to the logs.
def test_log_version(version_container): version_container.wait() # make sure container exited if running test isolated log_output = version_container.logs().decode("utf-8").strip() pkg_vars = {} with open(VERSION_FILE) as f: exec(f.read(), pkg_vars) # nosec project_version = pkg_vars["__version__"] assert ( log_output == project_version ), f"Container version output to log does not match project version file {VERSION_FILE}"
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_check_version_release(self):\n with self.assertLogs(\"dakara_feeder.version\", \"DEBUG\") as logger:\n with patch.multiple(\n \"dakara_feeder.version\", __version__=\"0.0.0\", __date__=\"1970-01-01\"\n ):\n check_version()\n\n # assert effect on logs\n self.assertListEqual(\n logger.output,\n [\"INFO:dakara_feeder.version:\" \"Dakara feeder 0.0.0 (1970-01-01)\"],\n )", "def test_check_version_non_release(self):\n with self.assertLogs(\"dakara_feeder.version\", \"DEBUG\") as logger:\n with patch.multiple(\n \"dakara_feeder.version\", __version__=\"0.1.0-dev\", __date__=\"1970-01-01\"\n ):\n check_version()\n\n # assert effect on logs\n self.assertListEqual(\n logger.output,\n [\n \"INFO:dakara_feeder.version:\" \"Dakara feeder 0.1.0-dev (1970-01-01)\",\n \"WARNING:dakara_feeder.version:\"\n \"You are running a dev version, use it at your own risks!\",\n ],\n )", "def test_version(self):\n assert dockerprettyps.__version__\n assert dockerprettyps.version()", "def test_version(self):\n result = check_output([b\"flocker-reportstate\"] + [b\"--version\"])\n self.assertEqual(result, b\"%s\\n\" % (__version__,))", "def test_container_version_label_matches(version_container):\n pkg_vars = {}\n with open(VERSION_FILE) as f:\n exec(f.read(), pkg_vars) # nosec\n project_version = pkg_vars[\"__version__\"]\n assert (\n version_container.labels[\"org.opencontainers.image.version\"] == project_version\n ), \"Dockerfile version label does not match project version\"", "def test_showVersion(self):\n origout = sys.stdout\n try:\n out = io.StringIO()\n sys.stdout = out\n ArmiCLI.showVersion()\n finally:\n sys.stdout = origout\n\n self.assertIn(\"armi\", out.getvalue())\n self.assertIn(meta.__version__, out.getvalue())", "def test_version(webserver, docker_client):\n airflow_version = get_label(docker_client, 'io.astronomer.docker.airflow.version')\n version_output = webserver.check_output('airflow version')\n assert airflow_version in version_output", "def test_version(self):\n result = check_output([b\"flocker-changestate\"] + [b\"--version\"])\n self.assertEqual(result, b\"%s\\n\" % (__version__,))", "def test_show_version():\n result = runner.invoke(app, [\"--version\"])\n assert result.exit_code == 0\n assert \"Confluence poster version\" in result.stdout", "def test_version(self):\n pass", "def test_get_version(self):\n pass", "def check_image_version(duthost):\n pytest_require(parse_version(duthost.kernel_version) > parse_version(\"4.9.0\"),\n \"Test was not supported for 201911 and older image versions!\")", "def test_python_version(container, python_next_version=\"3.10\"):\n LOGGER.info(f\"Checking that python version is lower than {python_next_version}\")\n c = container.run(\n tty=True,\n command=[\"start.sh\"],\n )\n cmd = c.exec_run(\"python --version\")\n output = cmd.output.decode(\"utf-8\")\n assert \"ERROR\" not in output\n assert \"WARNING\" not in output\n actual_python_version = version.parse(output.split()[1])\n assert actual_python_version < version.parse(\n python_next_version\n ), f\"Python version shall be lower than {python_next_version}\"", "def check_version(ctx, _, value):\n if not value or ctx.resilient_parsing:\n return\n\n click.echo(f\"geocube v{importlib.metadata.version('geocube')}\")\n\n ctx.exit()", "def check_image_version(duthost):\n skip_release(duthost, per_command_check_skip_versions)", "def test_release_version_found(self, mock_git_info): # pylint: disable=invalid-name, unused-argument\n set_version_from_git_tag(self.project, self.logger)\n self.assertEqual(self.logger.info.call_count, 2)\n self.assertEqual(self.project.version, '1.2.3')", "def test_version(self):\n version_instance = get_version('kolibri', __file__)\n self.assertIn(version_instance.major_version, kolibri.__version__)", "def test_logs_pod_status_after_upgrade(self):\n\n wait_for_storage_pods(timeout=10), \"Some pods were not in expected state\"\n pod_name = get_ocs_operator_pod().name\n unexpected_log_after_upgrade = (\n \"spec.csi.controllerExpandSecretRef.name: Required value,\"\n \" spec.csi.controllerExpandSecretRef.namespace: Required value\"\n )\n pod_logs = get_pod_logs(pod_name=pod_name, all_containers=True)\n assert not (\n unexpected_log_after_upgrade in pod_logs\n ), f\"The unexpected log after upgrade exist on pod {pod_name}\"", "def check_tag_version(self):\n import subprocess\n\n version = self.get_tag()\n version = version[version.rfind(\"-\") + 1 :]\n\n if robocorp_code.__version__ == version:\n sys.stderr.write(\"Version matches (%s) (exit(0))\\n\" % (version,))\n sys.exit(0)\n else:\n sys.stderr.write(\n \"Version does not match (found in sources: %s != tag: %s) (exit(1))\\n\"\n % (robocorp_code.__version__, version)\n )\n sys.exit(1)", "def test_mismatching_releases_displays_err_msg(self):\n\n # The failure message that we expect to see\n expected_fail_msg = (\n f\"Provided release ({self.old_matlab_release}) does not match \"\n \"release found in VersionInfo.xml\"\n )\n\n build_msg = utils.get_build_output(\n docker_api_client=self.client.api,\n dockerfile_dirpath=self.dockerfile_dirpath,\n release=self.old_matlab_release,\n )\n\n self.assertTrue(\n any([expected_fail_msg in line for line in build_msg]),\n f\"The error message '{expected_fail_msg}' was not displayed\",\n )", "def check_image_version(duthost):\n skip_version(duthost, [\"201811\", \"201911\", \"202012\"])", "def test_versionString(self):\n self.assertIn(\"%d.%d.%d\" % nevow.__version_info__, nevow.__version__)", "def test_check_version(mock_send_message):\n A1sim.check_version(BASE_URL)\n mock_send_message.assert_called_once_with('GET',\n 'Get ric version',\n (f\"{BASE_URL}/counter/interface\"))", "def test_version(server):\n\n assert isinstance(server.version(), six.string_types)", "def test_docker_unknown_version(mock_tools, user_mapping_run_calls, capsys):\n # Mock a bad return value of `docker --version`\n mock_tools.subprocess.check_output.return_value = \"ceci nest pas un Docker\\n\"\n\n # Invoke Docker verify\n result = Docker.verify(mock_tools)\n\n # The verify call should return the Docker wrapper\n assert isinstance(result, Docker)\n\n # Docker version and plugins were verified\n mock_tools.subprocess.check_output.assert_has_calls(DOCKER_VERIFICATION_CALLS)\n\n # Docker user mapping inspection occurred\n mock_tools.subprocess.run.assert_has_calls(user_mapping_run_calls)\n\n # console output\n output = capsys.readouterr()\n assert \"** WARNING: Unable to determine the version of Docker\" in output.out\n assert output.err == \"\"", "async def test_version(doof, repo_info, event_loop, mocker):\n a_hash = 'hash'\n version = '1.2.3'\n fetch_release_hash_mock = mocker.patch('bot.fetch_release_hash', autospec=True, return_value=a_hash)\n get_version_tag_mock = mocker.patch('bot.get_version_tag', autospec=True, return_value=\"v{}\".format(version))\n await doof.run_command(\n manager='mitodl_user',\n channel_id=repo_info.channel_id,\n words=['version'],\n loop=event_loop,\n )\n assert doof.said(\n \"Wait a minute! My evil scheme is at version {}!\".format(version)\n )\n\n fetch_release_hash_mock.assert_called_once_with(repo_info.prod_hash_url)\n get_version_tag_mock.assert_called_once_with(GITHUB_ACCESS, repo_info.repo_url, a_hash)", "def test_version_check_update_available(self):\n output = self.run_command(\"selfupdate --check bennr01:selfupdate_test_future\", exitcode=0)\n self.assertIn(\"Target: bennr01:selfupdate_test_future\", output)\n self.assertNotIn(\"Target: ywangd:master\", output)\n self.assertNotIn(\"Already at latest version\", output)\n self.assertIn(\"New version available\", output)\n self.assertNotIn(\"Error: \", output)", "def test_version_check_outdated(self):\n output = self.run_command(\"selfupdate --check bennr01:selfupdate_test_outdated\", exitcode=0)\n self.assertIn(\"Target: bennr01:selfupdate_test_outdated\", output)\n self.assertNotIn(\"Target: ywangd:master\", output)\n self.assertIn(\"Already at latest version\", output)\n self.assertNotIn(\"New version available\", output)\n self.assertNotIn(\"Error: \", output)", "def test_version_time_ok(self):\n test_name = sys._getframe().f_code.co_name\n self._execute('version time 2.0 \"%s\"' % self._test_date)\n rv, output = self._execute('version list')\n self.assertEqual(0, rv)\n self.assertEqual(self.expected_results[test_name], output)", "def test_main_version(\n app_tester: ApplicationTester, valiant_app_title: str, valiant_version: str\n) -> None:\n app_tester.execute(\"--version\")\n expected = f\"{valiant_app_title} version {valiant_version}\\n\"\n assert expected == app_tester.io.fetch_output()" ]
[ "0.69504476", "0.6847581", "0.6835782", "0.67436117", "0.66984177", "0.66706485", "0.66361153", "0.66052985", "0.6452293", "0.64389616", "0.63589865", "0.6269801", "0.6172591", "0.616603", "0.614854", "0.6137157", "0.61313576", "0.61284804", "0.61186343", "0.60721433", "0.603655", "0.602873", "0.6006925", "0.6000305", "0.59787625", "0.59778476", "0.594089", "0.5935636", "0.59265447", "0.5920191" ]
0.78575236
0
Verify the container version label is the correct version.
def test_container_version_label_matches(version_container): pkg_vars = {} with open(VERSION_FILE) as f: exec(f.read(), pkg_vars) # nosec project_version = pkg_vars["__version__"] assert ( version_container.labels["org.opencontainers.image.version"] == project_version ), "Dockerfile version label does not match project version"
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def check_version(ctx, _, value):\n if not value or ctx.resilient_parsing:\n return\n\n click.echo(f\"geocube v{importlib.metadata.version('geocube')}\")\n\n ctx.exit()", "def test_versionString(self):\n self.assertIn(\"%d.%d.%d\" % nevow.__version_info__, nevow.__version__)", "def test_version(webserver, docker_client):\n airflow_version = get_label(docker_client, 'io.astronomer.docker.airflow.version')\n version_output = webserver.check_output('airflow version')\n assert airflow_version in version_output", "def test_version(self):\n assert dockerprettyps.__version__\n assert dockerprettyps.version()", "def is_valid_version(self):\n pass", "def check_image_version(duthost):\n pytest_require(parse_version(duthost.kernel_version) > parse_version(\"4.9.0\"),\n \"Test was not supported for 201911 and older image versions!\")", "def test_log_version(version_container):\n version_container.wait() # make sure container exited if running test isolated\n log_output = version_container.logs().decode(\"utf-8\").strip()\n pkg_vars = {}\n with open(VERSION_FILE) as f:\n exec(f.read(), pkg_vars) # nosec\n project_version = pkg_vars[\"__version__\"]\n assert (\n log_output == project_version\n ), f\"Container version output to log does not match project version file {VERSION_FILE}\"", "def test_version(self):\n version_instance = get_version('kolibri', __file__)\n self.assertIn(version_instance.major_version, kolibri.__version__)", "def test_python_version(container, python_next_version=\"3.10\"):\n LOGGER.info(f\"Checking that python version is lower than {python_next_version}\")\n c = container.run(\n tty=True,\n command=[\"start.sh\"],\n )\n cmd = c.exec_run(\"python --version\")\n output = cmd.output.decode(\"utf-8\")\n assert \"ERROR\" not in output\n assert \"WARNING\" not in output\n actual_python_version = version.parse(output.split()[1])\n assert actual_python_version < version.parse(\n python_next_version\n ), f\"Python version shall be lower than {python_next_version}\"", "def test_version(server):\n\n assert isinstance(server.version(), six.string_types)", "def validateLabel(cls, label: str, labeling_version: int) -> bool:\r\n\r\n return len(label.split('.')) in [2, 3]", "def test_version(self):\n pass", "def validate_backend_version(self):\n pass", "def is_valid_from_version(self) -> bool:\n if Version(self.from_version) < Version(FROM_VERSION_LAYOUTS_CONTAINER):\n error_message, error_code = Errors.invalid_version_in_layoutscontainer(\n \"fromVersion\"\n )\n if self.handle_error(error_message, error_code, file_path=self.file_path):\n return False\n return True", "def check_version(self, node):\n assert \"version\" in node, \"Version node does not contain attribute 'version'\"\n assert len(node[\"version\"]) >= 1, \"Expecting at least one 'version' value\"\n # TODO: add more thorough checks", "def test_package_version():\n coverage_version = package_version('coverage')\n pytest_version = package_version('pytest')\n\n assert coverage_version is not None\n assert coverage_version < (1000, 0, 0)\n assert pytest_version is not None\n assert pytest_version > (5, 0)", "def check_image_version(duthost):\n skip_version(duthost, [\"201811\", \"201911\", \"202012\"])", "def test_release_tag(self) -> None:\n self.assertEqual(\"v3.14.15\", release_tag())", "def verify_ios_versionNumber():\r\n msg = \"\"\r\n try:\r\n 'Getting Version number for IOS '\r\n if g.platform == 'ios':\r\n text_view = ui_controls.text_view(get_obj_identifier('about_versionNumber_lbl'), label=True)\r\n\r\n 'Verifying whether Version number is matching with expected value IOS'\r\n if g.platform == 'ios' and text_view.strip() == g.version_number :\r\n print \"Version number is verified successfully. Expected : %s. Actual : %s\" % (g.version_number,text_view.strip())\r\n else:\r\n if g.platform == 'ios':\r\n print \"Version number is not verified successfully. Expected : %s. Actual : %s\" % (g.version_number, text_view.strip())\r\n return False, msg\r\n except Exception as excp:\r\n traceback.print_exc()\r\n msg += str(excp)\r\n return True, msg", "def test_version() -> None:\n assertion.assert_(Version, nanoqm.__version__)", "def test_get_version(self):\n pass", "def test1_version(self):\n lVersion = rdbhdb.__version__.split('.')\n nVersion = need_version.split('.')\n self.assert_(lVersion >= nVersion, rdbhdb.__version__)", "def test_version():\n versions = ((2, 7, 16), (3, 5, 7), (3, 6, 8), (3, 7, 3))\n assert sys.version_info[:3] in versions", "def test_version(self):\n self.assertIsInstance(nevow.version, Version)", "def check_tag_version(self):\n import subprocess\n\n version = self.get_tag()\n version = version[version.rfind(\"-\") + 1 :]\n\n if robocorp_code.__version__ == version:\n sys.stderr.write(\"Version matches (%s) (exit(0))\\n\" % (version,))\n sys.exit(0)\n else:\n sys.stderr.write(\n \"Version does not match (found in sources: %s != tag: %s) (exit(1))\\n\"\n % (robocorp_code.__version__, version)\n )\n sys.exit(1)", "def is_valid_to_version(self) -> bool:\n if self.to_version and Version(self.to_version) < Version(\n FROM_VERSION_LAYOUTS_CONTAINER\n ):\n error_message, error_code = Errors.invalid_version_in_layoutscontainer(\n \"toVersion\"\n )\n if self.handle_error(error_message, error_code, file_path=self.file_path):\n return False\n return True", "def test_version(self):\n result = check_output([b\"flocker-changestate\"] + [b\"--version\"])\n self.assertEqual(result, b\"%s\\n\" % (__version__,))", "def test_version(self):\n result = check_output([b\"flocker-reportstate\"] + [b\"--version\"])\n self.assertEqual(result, b\"%s\\n\" % (__version__,))", "def ensure_docker_version() -> None:\n compose_version = get_compose_version()\n if compose_version < version.parse(MINIMUM_DOCKER_COMPOSE_VERSION):\n raise VersionError(\n f\"Docker-compose {compose_version} is not supported. \"\n f\"You need to have at least {MINIMUM_DOCKER_COMPOSE_VERSION}\"\n )\n docker_version = get_docker_version()\n if docker_version < version.parse(MINIMUM_DOCKER_VERSION):\n raise VersionError(\n f\"Docker {docker_version} is not supported. You need to have at least {MINIMUM_DOCKER_VERSION}\"\n )", "def test_version_type(self):\n self.assertIsInstance(VERSION, str)" ]
[ "0.66752607", "0.64611894", "0.6417991", "0.6392024", "0.6365091", "0.6316447", "0.6304623", "0.62754697", "0.62334216", "0.6207172", "0.6132876", "0.6128137", "0.61197776", "0.6101531", "0.6085407", "0.60392827", "0.60341907", "0.6024869", "0.6014097", "0.5978784", "0.5966738", "0.5955904", "0.5903245", "0.58929694", "0.58853465", "0.587749", "0.5867017", "0.5860186", "0.58345807", "0.5830445" ]
0.83354795
0
Test the main splash page.
def test_splash_page(self): response = self.testapp.get('/') self.assertEqual(response.status_int, 200) response.mustcontain( 'Bite-sized learning journeys', 'Browse the explorations gallery', '100% free!', 'Learn', 'About', 'Contact', # No navbar tabs should be highlighted. no=['class="active"'])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_login_and_logout_on_splash_page(self):\n response = self.testapp.get('/')\n self.assertEqual(response.status_int, 200)\n response.mustcontain(\n 'Login', 'Create an Oppia account', 'Contribute',\n self.get_expected_login_url('/'),\n no=['Profile', 'Logout', 'Create an exploration',\n self.get_expected_logout_url('/')])\n\n self.login('[email protected]')\n\n response = self.testapp.get('/')\n self.assertEqual(response.status_int, 200)\n response.mustcontain(\n 'Contribute', 'Profile', 'Logout', 'Create an exploration',\n self.get_expected_logout_url('/'),\n no=['Login', 'Create an Oppia account',\n self.get_expected_login_url('/')])\n\n self.logout()", "def test_home_page(self):\n\n self.browser.get('http://localhost:8000/index.html')\n\n # there is a page title defined by <title></title> on the home page\n # check it\n\n self.assertIn('Stability within Movement',self.browser.title)\n\n # You will have an image for your home page I am assuming.\n # Put the name of your image here in place of homebrew.png\n # In general this is how we check for images on a page.\n\n # The user sees an image of sun hitting the Washington Monument\n\n m=self.browser.find_element_by_tag_name('img')\n self.assertIn('help.jpg',m.get_attribute('src'))\n\n a=self.browser.find_element_by_id('sun')\n a.click()\n\n self.assertIn('sun',self.browser.title)\n\n h=self.browser.find_element_by_tag_name('h1')\n\n m=self.browser.find_element_by_tag_name('img')\n\n # the user goes back to the home page\n # self.browser.back()\n self.browser.get('http://localhost:8000/index.html')\n\n # the user sees at the bottom of the page a link to credits\n l=self.browser.find_element_by_link_text('Credits')\n\n # the user clicks on the credits link\n l.click()\n # and sees the credits.html page\n a=self.browser.current_url\n self.assertIn(\"credits.html\",a)", "def test_main_page_load(self):\n response = self.client.get(reverse(\"index\"))\n self.assertEqual(response.status_code, 200)", "def test_homepage(self):\n rc = self.app.get('/')\n assert b'Welcome to Code TA' in rc.data\n assert b'Logout' not in rc.data", "def test_main_page(remote_browser, base_url, logger_fixture):\n logger_fixture.info(\"===== test_main_page =====\")\n main_page = MainPage(remote_browser, base_url)\n main_page\\\n .open(logger_fixture)\\\n .check_featured_block_is_not_empty()", "def test_main_app(self):\n resp = self.app.get('/')\n # ensure relevant pieces of UI are returned\n assert 'Foggy Fork' in resp.data\n assert 'A San Francisco Food Truck Map' in resp.data\n assert 'Where in the fog are you looking for food?' in resp.data\n assert '<div id=\"map-canvas\"></div>' in resp.data", "def test_homepage(self):\n rv = self.app.get('/')\n assert 'Enter your url here' in rv.data", "def test_home(self):\n\t\tresponse = self.client.get('/')\n\t\tself.assertContains(response, 'Home Page', 1, 200)", "def test_show_on_homepage(self) -> None:\n self.assert_show_on_homepage(apps.wakeup.main.Controller)", "def splash_screen():\n print(Fore.YELLOW + Style.BRIGHT + \"\\n\" + ProjInfo.LOGO + Style.RESET_ALL)\n print_version_info(False)", "def test_home(self):\n response = self.client.get('/')\n self.assertContains(response, 'Home Page', 1, 200)", "def test_main_page(self):\n response = self.client.get(reverse('home'))\n self.assertEqual(response.status_code, 200)\n\n content = response.content.decode('utf-8')\n self.assertTrue('Improving the FOIA request experience' in content)", "def test_homepage_view(self):\n response = self.client.get(url_for('home'))\n self.assertEqual(response.status_code, 200)", "def test_homepage_view(self):\n response = self.client.get(url_for('home'))\n self.assertEqual(response.status_code, 200)", "def test_splash_lede(self):\n\t\n n = 0\n sel = self.selenium\n sel.open(BASEURL)\n sel.wait_for_page_to_load(\"50000\")\n test = \"Test A - Presence of elements via CSS\"\n print test\n \n # Loops through the data in the CSS file asserting each element is on the page\n \n for each in CSS:\n\n\t c = CSS[n].strip('\\n')\n \n try:\n self.failUnless(sel.is_element_present(\"css=\" + c))\n \n except AssertionError, e:\n \tprint \"FAILURE \" + c\n \tself.verificationErrors.append(str(e))\n L.log(BROWSERS[x], test, \"FAIL, ELEMENT NOT FOUND\", c, exception=str(e))\n \n else:\n L.log(BROWSERS[x], test, \"PASS, ELEMENT FOUND\", c)\n \n n += 1\n \n self.b_count_test()\t\t# Verifies that each article has a headline, image and author \t\n self.c_arrow_test()\t\t# Tests to make sure that the arrow buttons and lede 'dots' (circles below the lede) work)\n #self.d_responsive_test()\t# Tests the responsive design of the page\n self.e_clicky_test()\t\t# Functional test - clicks on links and images to make sure they load the correct page\n \n ########################################################################", "def test_homepage(self):\n\n result = self.client.get(\"/\")\n self.assertEqual(result.status_code, 200)\n self.assertIn(b\"Homepage\", result.data)", "def test_main(self):\n path = reverse(\"main\")\n request = RequestFactory().get(path)\n response = index(request)\n assert response.status_code == 200", "def test_homepage(self):\r\n\r\n result = self.client.get(\"/\")\r\n self.assertIn(b\"Welcome!\", result.data)", "def test_home(self):\n response = self.client.get('/')\n self.assert_200(response)\n self.assert_template_used('index.html')", "def test_page_intro(self, client, projects_landing_page):\n # create link page for project list\n sponsored_projects = LinkPage(\n title=\"Sponsored Projects\", link_url=\"projects/sponsored\"\n )\n projects_landing_page.add_child(instance=sponsored_projects)\n # create a snippet for the sponsored projects page\n PageIntro.objects.create(\n page=sponsored_projects, paragraph=\"<i>test content</i>\"\n )\n\n # visit and check that it renders\n response = client.get(reverse(\"projects:sponsored\"))\n assertContains(response, \"<i>test content</i>\")", "def test_verify_main_screen_elements(self):\n\n test_name = sys._getframe().f_code.co_name\n\n log.info(\"###### TEST EXECUTION STARTED :: \" + test_name + \" ######\")\n\n with allure.step(\"Verify Main Screen Elements\"):\n result = self.main_page.verify_main_screen_elements()\n self.exe_status.mark_final(test_step=test_name, result=result)", "def test_microsite_anonymous_homepage_content(self):\r\n\r\n resp = self.client.get('/', HTTP_HOST=MICROSITE_TEST_HOSTNAME)\r\n self.assertEqual(resp.status_code, 200)\r\n\r\n # assert various branding definitions on this Microsite\r\n # as per the configuration and Microsite overrides\r\n\r\n self.assertContains(resp, 'This is a Test Microsite Overlay') # Overlay test message\r\n self.assertContains(resp, 'test_microsite/images/header-logo.png') # logo swap\r\n self.assertContains(resp, 'test_microsite/css/test_microsite') # css override\r\n self.assertContains(resp, 'Test Microsite') # page title\r\n\r\n # assert that test course display name is visible\r\n self.assertContains(resp, 'Robot_Super_Course')\r\n\r\n # assert that test course that is outside microsite is not visible\r\n self.assertNotContains(resp, 'Robot_Course_Outside_Microsite')\r\n\r\n # assert that footer template has been properly overriden on homepage\r\n self.assertContains(resp, 'This is a Test Microsite footer')\r\n\r\n # assert that the edX partners section is not in the HTML\r\n self.assertNotContains(resp, '<section class=\"university-partners university-partners2x6\">')\r\n\r\n # assert that the edX partners tag line is not in the HTML\r\n self.assertNotContains(resp, 'Explore free courses from')", "def test_landing_page(self):\n response = self.app.get(\"/\", follow_redirects=True)\n self.assertEqual(response.status_code, 200)\n\n res_txt = response.get_data(as_text=True)\n\n self.assertIn(\"input\", res_txt)\n self.assertIn(\"button\", res_txt)\n self.assertIn(\"Welcome to\", res_txt)", "def test_frontpage(self):\n response = self.client.get('/')\n self.assertEqual(response.status_code, 200)", "def test_homepage(self):\n\n with self.client as client:\n response = client.get('/')\n html = response.get_data(as_text=True)\n self.assertEqual(response.status_code, 200)\n self.assertIn('<table class=\"board\">', html)\n self.assertIn('<table', html)\n self.assertIn('boggle homepage. used in testing', html)\n # test that you're getting a template", "def test_home_page(self):\r\n url = reverse('home')\r\n response = self.client.get(url)\r\n\r\n self.assertEqual(response.status_code, 200)", "def test_homepage_render(self):\n\n result = self.client.get(\"/\")\n self.assertIn(\"<h1 class=\\\"title\\\">Bark Park!</h1>\", result.data)", "def splash_page_view(request):\n # Build the context\n context = {}\n\n # Relevant values from .env\n context['project_name'] = os.environ['PROJECT_NAME']\n context['rollbar_project_url'] = os.environ['ROLLBAR_PROJECT_URL']\n context['rabbitmq_management_url'] = (\n os.environ['RABBITMQ_MANAGEMENT_URL'])\n context['flower_url'] = os.environ['FLOWER_URL']\n\n return render(request, 'splashpage/splash_page.html', context)", "def test_01_index(self):\r\n res = self.app.get(\"/\", follow_redirects=True)\r\n assert self.html_title() in res.data, res\r\n assert \"Create an App\" in res.data, res", "def testindex(self):\n rv = self.app.get('/')\n self.assertEqual(rv.status_code, 302, \"homepage didnot load\")" ]
[ "0.70376253", "0.6987304", "0.6967448", "0.68484825", "0.6787349", "0.67342925", "0.66842026", "0.666418", "0.6645239", "0.66426563", "0.66338295", "0.6604585", "0.6584727", "0.6584727", "0.6568594", "0.6441818", "0.64151394", "0.6395155", "0.6382844", "0.63790137", "0.6295453", "0.6260428", "0.625934", "0.6249647", "0.6237451", "0.62284625", "0.6228396", "0.62175924", "0.62168556", "0.6216017" ]
0.8406544
0
Accepts list of nested dictionaries and produces a single dictionary containing mean values and estimated errors from these dictionaries. Errors are estimated as confidence intervals lengths.
def dict_recur_mean_err(dlist): if isinstance(dlist[0], dict): res_dict = {} for k in dlist[0]: n_dlist = [d[k] for d in dlist] res_dict[k] = dict_recur_mean_err(n_dlist) return res_dict else: n = len(dlist) mean = float(sum(dlist)) / n variance = float(sum(map(lambda xi: (xi-mean)**2, dlist))) / n std = math.sqrt(variance) err = t.ppf(1-alpha/2.,n-1) * std / math.sqrt(n-1) return (mean, err)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def average_dictlist(dict_list):\r\n avg=sum(dict_list)/len(dict_list)\r\n return avg", "def summaries(e_dict, m_dict):\n for key, value in m_dict.items():\n e_dict[key].append(np.mean(value))\n return e_dict", "def calculate_averages(data):\n def mean(item_key):\n all_items = [i[item_key] for i in data]\n return sum(all_items)/float(len(all_items))\n\n return {\n \"mean_error_count\": mean('error_count'),\n \"mean_success_count\": mean('success_count'),\n \"mean_mean_response_time\": mean('mean_respone_time'),\n \"mean_data_sent_mb\": mean('data_sent_mb'),\n }", "def fmean(configuration):\n fmean_dict_all = {\n \"HL\" : {'H1' : 100., 'L1' : 100.},\n \"HLV\" : {'H1' : 100., 'L1' : 100., 'V1': 130.},\n \"HLVK\" : {'H1' : 100., 'L1' : 100., 'V1': 130., 'K1' : 130.},\n \"HLVKI\" : {'H1' : 100., 'L1' : 100., 'V1': 130., 'K1' : 130., 'I1' : 100.},\n \"GW170817\" : {'H1' : 100., 'L1' : 100., 'V1': 130.},\n \"GW170814\" : {'H1' : 117.4, 'L1' : 117.4, 'V1': 148.9},\n \"GW170817_without_Virgo\" : {'H1' : 100., 'L1' : 100.},\n \"steve\" : {'H1' : 100.0, 'L1' : 100.0, 'V1': 100.0, \"I1\" : 100.0 },\n \"design\" : {'H1' : 100., 'L1' : 100., 'V1': 130. },\n \"india\" : {'H1' : 100., 'I1' : 100., 'L1' : 100., 'V1': 130. },\n \"s6vsr2\" : {'H1' : 180., 'L1' : 180., 'V1': 150. }\n }\n return(fmean_dict_all[configuration])", "def pollster_errors(pollster_predictions, state_edges_actual):\r\n\r\n totalAverage = {} #Create an empty dictionary\r\n\r\n for k in pollster_predictions:\r\n states = pollster_predictions[k]\r\n for j in states:\r\n if j in state_edges_actual: \r\n average = average_error(pollster_predictions[k], state_edges_actual)\r\n totalAverage[k] = average \r\n #Map each pollster to its calculated average error of each state\r\n\r\n return totalAverage", "def dictionary_of_metrics(items):\n\n 'Initialize dict'\n d = {}\n\n # Add 'mean' key to the dict with the value of the mean calculate by using\n # np.mean rounded to 2 decimal places\n d['mean'] = round(np.mean(items), 2)\n\n # Add 'median' key to the dict with the value of the median calculate by\n # using np.median rounded to 2 decimal places\n d['median'] = round(np.median(items), 2)\n\n # Add 'var' key to the dict with the value of the varience calculate by\n # using np.var rounded to 2 decimal places\n d['var'] = round(np.var(items, ddof=1), 2)\n\n # Add 'std' key to the dict with the value of the standard deviation\n # calculate by using np.std to 2 decimal places\n d['std'] = round(np.std(items, ddof=1), 2)\n\n # Add 'min' key to the dict with the value of the minimum calculate by\n # using np.min to 2 decimal places\n d['min'] = round(np.min(items), 2)\n\n # Add 'max' key to the dict with the value of the maximum calculate by\n # using np.max to 2 decimal places\n d['max'] = round(np.max(items), 2)\n\n # returns dictionary, d\n return d", "def dictionary_of_metrics(items):\n \n np_list = np.array(items) #create an array of list to use numpy functions on list\n metric_dict = {'mean': np.mean(np_list).round(2),\n 'median': np.median(np_list).round(2),\n 'var': np.var(np_list, ddof=1).round(2),\n 'std': np.std(np_list, ddof=1).round(2),\n 'min': np.min(np_list).round(2),\n 'max': np.max(np_list).round(2),} #create a dictionary that calculates the five metrics\n \n return metric_dict #return result as a dictionary", "def get_mean_dict(self):\n mean = {}\n for c in self.cellLines:\n mean[c] = {}\n for l in self.ligands:\n mean[c][l] = self.aucs[c][l]['mean']\n return mean", "def pollster_errors(pollster_predictions, state_edges_actual):\n # declare errors to be the return object which is a dict\n errors = dict()\n # traverse the predictions to calculate the error\n for key in pollster_predictions:\n \terrors[key] = average_error(pollster_predictions[key], state_edges_actual)\n return errors", "def _avg_sd_from_list(lst):\n arr = flex.double(lst)\n avg = round(flex.mean(arr), 5)\n std = round(arr.standard_deviation_of_the_sample(), 5)\n return avg, std", "def average_metrics_results(results):\n res = {}\n\n if len(results) == 0:\n return {}\n\n for key in results[0]:\n try:\n res[key] = sum([r[key] for r in results]) / len(results)\n except TypeError:\n res[key] = [sum(b)/len(results) for b in zip(*[s[key] for s in results])]\n return res", "def dictionary_of_metrics(items):\n total = 0\n count = 0\n for value in items:\n total = total + value\n count = count + 1\n the_mean = round(total / count, 2)\n sorted_items = sorted(items)\n if count % 2 == 1:\n the_median = sorted_items[int(round(count+1)/2-1)]\n else:\n lower_median = sorted_items[int(round(count/2-1))]\n upper_median = sorted_items[int(round(count/2))]\n the_median = (lower_median + upper_median) / 2\n sum_of_sqz = 0 # Calculate Sum of squares for Varience\n for j in items:\n sqrz_calc = (j - the_mean)**2\n sum_of_sqz = sum_of_sqz + sqrz_calc\n the_varience = round(sum_of_sqz / (count - 1), 2)\n the_standard_dev = round((the_varience)**(1/2), 2)\n the_min = sorted_items[0]\n the_max = sorted_items[count - 1]\n dict = {\n 'mean': the_mean,\n 'median': the_median,\n 'var': the_varience,\n 'std': the_standard_dev,\n 'min': the_min,\n 'max': the_max\n }\n return dict", "def get_normalized_regret(evaluator_list):\n values = collections.defaultdict(list)\n for e in evaluator_list:\n values[e.task_name].append(e.get_normalized_regret())\n return _map(np.mean, values), _map(np.std, values)", "def mean(self) -> Dict:\n mean_params = {}\n for key in self.samples[0].keys():\n mean_params[key] = np.mean([self.samples[i][key] for i in range(self.num_samples)], axis=0)\n if key == 'weights':\n mean_params[key] /= np.linalg.norm(mean_params[key])\n return mean_params", "def derive_error_dicts(self, error_obj_list):\n results = []\n for error_obj in error_obj_list:\n if error_obj:\n results.append(self.derive_error_dict(error_obj))\n return results", "def get_mean_metrics(metric_dict):\n return {k: np.mean(v) for k, v in metric_dict.items()}", "def get_leg_average():\n animals = [json.loads(rd.get(key)) for key in rd.keys(\"*\")]\n legs = [animal[\"legs\"] for animal in animals]\n return jsonify(sum(legs) / len(legs))", "def mean(data, *args, **kwargs):\n result = OrderedDict()\n for observation_label, variant_values in data.items():\n result[observation_label] = OrderedDict()\n for label, values in variant_values.items():\n result[observation_label][label] = np.mean(values)\n return result", "def get_th_mean_values(self, value_list):\n if not len(value_list):\n return None, None, None\n t, h, d = map(list, zip(*value_list))\n return (\n round(statistics.mean(t), 1),\n int(round(statistics.mean(h), 0)),\n round(statistics.mean(d), 1),\n )", "def _fit_result_from_list(a: Sequence[float]) -> Union[Dict[str, float], None]:\n if not len(a):\n return None\n return {\"amplitude\": a[0], \"center\": a[1:3], \"sigma\": a[3:5],\n \"rotation\": a[5], \"offset\": a[6] if len(a) > 6 else 0}", "def average_examples(\n example_dict, use_pmm,\n max_pmm_percentile_level=DEFAULT_MAX_PMM_PERCENTILE_LEVEL):\n\n error_checking.assert_is_boolean(use_pmm)\n error_checking.assert_is_geq(max_pmm_percentile_level, 90.)\n error_checking.assert_is_leq(max_pmm_percentile_level, 100.)\n\n mean_scalar_predictor_matrix = numpy.mean(\n example_dict[SCALAR_PREDICTOR_VALS_KEY], axis=0\n )\n mean_scalar_predictor_matrix = numpy.expand_dims(\n mean_scalar_predictor_matrix, axis=0\n )\n\n mean_scalar_target_matrix = numpy.mean(\n example_dict[SCALAR_TARGET_VALS_KEY], axis=0\n )\n mean_scalar_target_matrix = numpy.expand_dims(\n mean_scalar_target_matrix, axis=0\n )\n\n if use_pmm:\n mean_vector_predictor_matrix = pmm.run_pmm_many_variables(\n input_matrix=example_dict[VECTOR_PREDICTOR_VALS_KEY],\n max_percentile_level=max_pmm_percentile_level\n )\n else:\n mean_vector_predictor_matrix = numpy.mean(\n example_dict[VECTOR_PREDICTOR_VALS_KEY], axis=0\n )\n\n mean_vector_predictor_matrix = numpy.expand_dims(\n mean_vector_predictor_matrix, axis=0\n )\n\n if use_pmm:\n mean_vector_target_matrix = pmm.run_pmm_many_variables(\n input_matrix=example_dict[VECTOR_TARGET_VALS_KEY],\n max_percentile_level=max_pmm_percentile_level\n )\n else:\n mean_vector_target_matrix = numpy.mean(\n example_dict[VECTOR_TARGET_VALS_KEY], axis=0\n )\n\n mean_vector_target_matrix = numpy.expand_dims(\n mean_vector_target_matrix, axis=0\n )\n\n return {\n SCALAR_PREDICTOR_NAMES_KEY: example_dict[SCALAR_PREDICTOR_NAMES_KEY],\n SCALAR_PREDICTOR_VALS_KEY: mean_scalar_predictor_matrix,\n SCALAR_TARGET_NAMES_KEY: example_dict[SCALAR_TARGET_NAMES_KEY],\n SCALAR_TARGET_VALS_KEY: mean_scalar_target_matrix,\n VECTOR_PREDICTOR_NAMES_KEY: example_dict[VECTOR_PREDICTOR_NAMES_KEY],\n VECTOR_PREDICTOR_VALS_KEY: mean_vector_predictor_matrix,\n VECTOR_TARGET_NAMES_KEY: example_dict[VECTOR_TARGET_NAMES_KEY],\n VECTOR_TARGET_VALS_KEY: mean_vector_target_matrix,\n HEIGHTS_KEY: example_dict[HEIGHTS_KEY]\n }", "def summarize_metrics(metrics):\n summarized = {}\n for k in metrics:\n if k.endswith('mse'):\n summarized[k[:-3] + 'rmse'] = np.sqrt(np.mean(metrics[k]))\n elif k.startswith('err'):\n summarized[k + '_mean'] = np.mean(metrics[k])\n summarized[k + '_rmse'] = np.sqrt(np.mean(metrics[k]**2))\n elif k.endswith('nomean'):\n summarized[k] = metrics[k]\n else:\n summarized[k] = np.mean(metrics[k])\n\n return summarized", "def compute_average_metrics(meters):\n metrics = {m: vs.avg for m, vs in meters.items()}\n metrics = {\n m: v if isinstance(v, float) else v.item()\n for m, v in metrics.items()\n }\n return metrics", "def find_average(dict_list, key):\n working_sum = 0\n for num in dict_list:\n working_sum += float(num[key])\n return round(working_sum / len(dict_list), 2)", "def dictionary_of_metrics(items):\n \n n = len(items)\n average = round(np.mean(items), 2)\n median = round(np.median(items), 2)\n variance = round((sum((items-np.mean(items))**2))/(n-1), 2)\n standard_dev = round(((sum((items-np.mean(items))**2))/(n-1))**(1/2), 2)\n minimum = round(min(items), 2)\n maximum = round(max(items), 2)\n \n return {'mean':average,'median':median,'var':variance,'std':standard_dev,'min':minimum,'max':maximum}\n pass", "def average(weighted, unweighted):\n avg = {}\n error = {}\n for k, it in unweighted.items():\n avg[k] = np.mean(it, axis=0)\n error[k] = scipy.stats.sem(it, axis=0)\n\n N = np.abs(avg[\"overlap\"].diagonal())\n Nij = np.sqrt(np.outer(N, N))\n\n for k, it in weighted.items():\n avg[k] = np.mean(it, axis=0) / Nij\n error[k] = scipy.stats.sem(it, axis=0) / Nij\n return avg, error", "def mean_per_subject_dict(data: Dict[str, Any], dict_levels: Sequence[str], param_name: str) -> pd.DataFrame:\n result_data = {}\n\n one_col_df = False\n for key, value in data.items():\n _assert_is_dtype(value, (dict, pd.DataFrame))\n if isinstance(value, dict):\n if len(dict_levels) <= 1:\n raise ValueError(\"Invalid number of 'dict_levels' specified!\")\n # nested dictionary\n key_len = 1 if isinstance(key, (str, int)) else len(key)\n result_data[key] = mean_per_subject_dict(value, dict_levels[key_len:], param_name)\n else:\n value.columns.name = \"subject\"\n if len(value.columns) == 1:\n one_col_df = True\n df = pd.DataFrame(value.mean(axis=0), columns=[param_name])\n result_data[key] = df\n\n key_lengths = list({1 if isinstance(k, (str, int)) else len(k) for k in result_data})\n if len(key_lengths) != 1:\n raise ValueError(\"Inconsistent dictionary key lengths!\")\n key_lengths = key_lengths[0]\n names = dict_levels[0:key_lengths]\n if isinstance(names, str):\n names = [names]\n ret = pd.concat(result_data, names=names)\n if one_col_df:\n ret.index = ret.index.droplevel(-1)\n return ret", "def normalize(raw_feature_list):\n result={}\n for feature in raw_feature_list:\n mean=statistics.mean(raw_feature_list[feature])\n stdev=statistics.pstdev(raw_feature_list[feature])\n print(feature,':','mean:',mean,'stdev:',stdev)\n for i in range(len(raw_feature_list[feature])):\n raw_feature_list[feature][i]-= mean\n raw_feature_list[feature][i]/= stdev", "def average_error(state_edges_predicted, state_edges_actual):\r\n\r\n d1 = state_edges_predicted #Assign the predicted state edges to a variable\r\n length = list(state_edges_predicted.values()) #Create a list of the predicted state edge values\r\n z = len(length) #Find the number of elements in the list \"length\"\r\n d2 = state_edges_actual #Assign the actual state edge values to a variable\r\n d3 = {} #Create an empty dictionary\r\n\r\n for k, v in d1.items(): \r\n d3[k] = v - d2.get(k, 0) #Find the difference between the predicted values and the actual values\r\n\r\n list1 = list(d3.values()) #Assign the differences to a list\r\n x = list(map(abs, list1)) #Take the absolute value of the differences\r\n a = sum(x) #Take the sum of the new values\r\n\r\n return a/z #Calculate the average of all the errors\r", "def compute_average_metrics(meters):\n metrics = {m: vs.avg for m, vs in meters.items()}\n metrics = {\n m: float(v) if isinstance(v, float) or isinstance(v, int) else v.item()\n for m, v in metrics.items()\n }\n return metrics" ]
[ "0.6419814", "0.6262054", "0.61799544", "0.6119319", "0.59288824", "0.5902225", "0.5875992", "0.58529663", "0.5829413", "0.5736729", "0.5708193", "0.5633498", "0.55455", "0.5544103", "0.55197287", "0.54795134", "0.5463006", "0.54573137", "0.544684", "0.5438539", "0.5426526", "0.54002124", "0.5397387", "0.53875715", "0.5386275", "0.53737754", "0.5347166", "0.5342086", "0.5330013", "0.531288" ]
0.78539246
0
Helper function to connect reach from/to wastewater network elements
def connect_reach(self, reach_id, from_id=None, to_id=None): data = {} if from_id is not None: data['rp_from_fk_wastewater_networkelement'] = from_id if to_id is not None: data['rp_to_fk_wastewater_networkelement'] = to_id self.update('vw_qgep_reach', data, reach_id)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def swconnect(localpop, remotepop, mac, vc, meter):\n core = Container.fromAnchor(localpop.properties['CoreRouter'])\n corename = core.resourceName\n (corename,coredom,coreport,corevlan) = getvcnode(vc, corename)\n remotecore = Container.fromAnchor(remotepop.properties['CoreRouter'])\n remotecorename = remotecore.resourceName\n (remotecorename,remotecoredom,remotecoreport,remotecorevlan) = getvcnode(vc, remotecorename)\n\n hwswitch = Container.fromAnchor(localpop.properties['HwSwitch'])\n hwswitchname = hwswitch.resourceName\n swswitch = Container.fromAnchor(localpop.properties['SwSwitch'])\n swswitchname = swswitch.resourceName\n\n remotehwswitch = Container.fromAnchor(remotepop.properties['HwSwitch'])\n remotehwswitchname = remotehwswitch.resourceName\n remoteswswitch = Container.fromAnchor(remotepop.properties['SwSwitch'])\n remoteswswitchname = remoteswswitch.resourceName\n\n topology = Container.getContainer(localpop.properties['SwSwitch']['containerName'])\n\n # Find hwswitch/port - core/port\n hwport_tocore = getgriport(topology, hwswitch, core, coreport)\n # Find remotehwswitch/port - remotecore/port\n remotehwport_tocore = getgriport(topology, remotehwswitch, remotecore, remotecoreport)\n\n links = getlinks2(topology, hwswitchname, swswitchname)\n if links == None or len(links) == 0:\n print \"No links from \", hwswitchname, \" to \", swswitchname\n return None\n hwswlink = None\n for l in links:\n (node, port) = linkednode2(l, swswitchname)\n if port != None:\n # Found the (a) link\n hwswlink = l\n hwport_tosw = port\n break\n\n remotelinks = getlinks2(topology, remotehwswitchname, remoteswswitchname)\n if remotelinks == None or len(remotelinks) == 0:\n print \"No links from \", remotehwswitchname, \" to \", remoteswswitchname\n return None\n remotehwswlink = None\n for l in remotelinks:\n (node, port) = linkednode2(l, remoteswswitchname)\n if port != None:\n # Found the (a) link\n remotehwswlink = l\n remotehwport_tosw = port\n break\n\n # Find the ports on hwswitch and remotehwswitch that go to the corresponding software switches\n\n # Set up forwarding for broadcast traffic from the new local pop\n # Install outbound flow on hwswitch from swswitch to the GRI\n fh1 = SCC.SdnInstallForward1(javaByteArray2(hwswitch.properties['DPID']),\n 1,\n BigInteger.ZERO,\n str(hwport_tosw), # hw port facing software switch\n int(corevlan),\n \"00:00:00:00:00:00\",\n mac,\n str(hwport_tocore),\n int(corevlan),\n mac,\n 0,\n 0,\n meter)\n if fh1 == None:\n return None\n\n # Install inbound flow on remotehwswitch from GRI to remoteswswitch\n fh2 = SCC.SdnInstallForward1(javaByteArray2(remotehwswitch.properties['DPID']),\n 1,\n BigInteger.ZERO,\n str(remotehwport_tocore),\n int(remotecorevlan),\n \"00:00:00:00:00:00\",\n mac,\n str(remotehwport_tosw), # remotehw port facing remote software switch\n int(remotecorevlan),\n mac,\n 0,\n 0,\n meter)\n if fh2 == None:\n SCC.deleteforward(fh1)\n return None\n\n # Set up forwarding for broadcast traffic to the new local pop\n # Install inbound flow on hwswitch from GRI to swswitch\n fh3 = SCC.SdnInstallForward1(javaByteArray2(hwswitch.properties['DPID']),\n 1,\n BigInteger.ZERO,\n str(hwport_tocore),\n int(corevlan),\n \"00:00:00:00:00:00\",\n mac,\n str(hwport_tosw), # hw port facing software switch\n int(corevlan),\n mac,\n 0,\n 0,\n meter)\n if fh3 == None:\n SCC.deleteforward(fh1)\n SCC.deleteforward(fh2)\n return None\n\n # Install outbound flow on remotehwswitch from remoteswswitch to GRI\n fh4 = SCC.SdnInstallForward1(javaByteArray2(remotehwswitch.properties['DPID']),\n 1,\n BigInteger.ZERO,\n str(remotehwport_tosw), # remotehw port facing remote software switch\n int(remotecorevlan),\n \"00:00:00:00:00:00\",\n mac,\n str(remotehwport_tocore),\n int(remotecorevlan),\n mac,\n 0,\n 0,\n meter)\n if fh4 == None:\n SCC.deleteforward(fh1)\n SCC.deleteforward(fh2)\n SCC.deleteforward(fh3)\n return None\n\n # Return something\n return (fh1, fh2, fh3, fh4)", "def connectToWifi(strip, start):\r\n wifi = network.WLAN(network.STA_IF)\r\n wifi.active(True)\r\n wifi.connect(SSID,PW)\r\n while not wifi.isconnected():\r\n # only flash the wifi connection wait signal if starting\r\n if start:\r\n ledFlash(strip, LED_COLOR_BLUE, 0.5)\r\n pass\r\n return wifi", "def navigate_to_network_then_to_interfaces(driver):\n driver.find_element_by_xpath('//mat-list-item[@ix-auto=\"option__Network\"]').click()\n wait_on_element(driver, 0.5, 30, '//mat-list-item[@ix-auto=\"option__Interfaces\"]')\n driver.find_element_by_xpath('//mat-list-item[@ix-auto=\"option__Interfaces\"]').click()", "def connect():", "def connect(src, target, reftype):", "def connect_to(self, inf1, router2, inf2):\n self.interfaces[inf1]['connect'] = [router2.hostname, inf2]\n router2.interfaces[inf2]['connect'] = [self.hostname, inf1]", "def connect_to_reference_network(self):\n self.dut.droid.wakeLockAcquireBright()\n self.dut.droid.wakeUpNow()\n try:\n self.dut.droid.wifiConnectByConfig(self.reference_networks[0][\"2g\"])\n connect_result = self.dut.ed.pop_event(\n wifi_constants.CONNECT_BY_CONFIG_SUCCESS, SHORT_TIMEOUT)\n self.log.info(connect_result)\n return wutils.track_connection(self.dut,\n self.reference_networks[0][\"2g\"][\"SSID\"], 1)\n except Exception as error:\n self.log.exception(traceback.format_exc())\n self.log.error(\"Connection to network fail because %s\", error)\n return False\n finally:\n self.dut.droid.wifiLockRelease()\n self.dut.droid.goToSleepNow()", "def createWIFIAccessPoint():\n ifname = config.get(\"interface\", \"wifi\")\n ipaddress = config.get(\"hotspot\", \"ip\")\n prefix = int(config.get(\"hotspot\", \"prefix\"))\n ssid = config.get(\"hotspot\", \"ssid\")\n password = config.get(\"hotspot\", \"password\")\n ################################\n s_wifi = dbus.Dictionary(\n {\n \"ssid\": dbus.ByteArray(ssid.encode(\"utf-8\")),\n \"mode\": \"ap\",\n })\n s_wsec = dbus.Dictionary(\n {\n \"key-mgmt\": \"wpa-psk\",\n \"psk\": password\n })\n s_con = dbus.Dictionary(\n {\"type\": \"802-11-wireless\",\n \"interface-name\":ifname ,\n \"uuid\": str(uuid.uuid4()),\n \"id\": ssid,\n \"autoconnect\":dbus.Boolean(True)\n })\n addr1 = dbus.Dictionary({\"address\": ipaddress, \"prefix\": dbus.UInt32(prefix)})\n dns = []\n s_ip4 = dbus.Dictionary(\n {\n \"address-data\": dbus.Array([addr1], signature=dbus.Signature(\"a{sv}\")),\n \"dns\": dbus.Array(dns, signature=dbus.Signature('u'), variant_level=1),\n \"method\": \"manual\",\n })\n s_ip6 = dbus.Dictionary({\"method\": \"ignore\"})\n con = dbus.Dictionary(\n {\n \"802-11-wireless\": s_wifi,\n \"802-11-wireless-security\":s_wsec,\n \"connection\": s_con,\n \"ipv4\": s_ip4,\n \"ipv6\": s_ip6\n })\n try:\n logging.info(\"Creating hotspot connection: {} - {}\".format(s_con[\"id\"], s_con[\"uuid\"]))\n ##########\n bus = dbus.SystemBus()\n proxy = bus.get_object(\n \"org.freedesktop.NetworkManager\", \"/org/freedesktop/NetworkManager/Settings\"\n )\n settings = dbus.Interface(proxy, \"org.freedesktop.NetworkManager.Settings\")\n connection = settings.AddConnection(con)\n logging.info(f\"Created access point connection {connection}\")\n except Exception as e:\n logging.error(\"Hotspot connection creation failed\")\n logging.error(e)", "def connect(self) -> None:", "def connect():\n \n print(\"*****Starting connection*****\")\n \n ssid = id_key.network_id #hidden ssid\n key = id_key.network_key #hidden key\n \n station = network.WLAN(network.STA_IF)\n \n if station.isconnected() == True:\n print(\"*****Already connected*****\")\n return\n \n station.active(True)\n station.connect(ssid, key)\n \n while station.isconnected() == False:\n pass\n \n print(\"*****Connection successful*****\")\n print(station.ifconfig())", "def connect(self, node1, node2):\n self.neighbour1 = node1\n self.neighbour2 = node2", "def connect(self, dev_eui, app_eui, app_key):\n \n dev_eui = unhexlify(dev_eui)\n app_eui = unhexlify(app_eui)\n app_key = unhexlify(app_key)\n \n # Disable blue blinking and turn LED off\n LED.heartbeat(False)\n LED.off()\n\n # Initialize LoRa in LORAWAN mode\n self.lora = LoRa(mode = LoRa.LORAWAN)\n\n # Join a network using OTAA (Over the Air Activation)\n self.lora.join(activation = LoRa.OTAA, auth = (dev_eui, app_eui, app_key), timeout = 0)\n\n # Wait until the module has joined the network\n count = 0\n while not self.lora.has_joined():\n LED.blink(1, 2.5, 0xff0000)\n # print(\"Trying to join: \" , count)\n count = count + 1\n\n # Create a LoRa socket\n LED.blink(2, 0.1)\n self.s = socket.socket(socket.AF_LORA, socket.SOCK_RAW)\n\n # Set the LoRaWAN data rate\n self.s.setsockopt(socket.SOL_LORA, socket.SO_DR, 5)\n\n # Make the socket non-blocking\n self.s.setblocking(False)\n\n # print (\"Joined! \", count)\n # print(\"Create LoRaWAN socket\")\n\n # Create a raw LoRa socket\n self.s = socket.socket(socket.AF_LORA, socket.SOCK_RAW)\n self.s.setblocking(False)", "def _create_special_connections(self):\n\t\tfor connection in self._infoSpecialConnections:\n\t\t\t# List of source cells ids\n\t\t\tsourcesId = self.cellsId[connection[0]][connection[1]]\n\t\t\t# gather the sources all together\n\t\t\tsourcesId = comm.gather(sourcesId,root=0)\n\t\t\tif rank==0: sourcesId = sum(sourcesId,[])\n\t\t\tsourcesId = comm.bcast(sourcesId,root=0)\n\t\t\t# List of taget cells ids\n\t\t\ttargetsId = self.cellsId[connection[2]][connection[3]]\n\t\t\t# Ratio of connection\n\t\t\tconRatio = connection[4]\n\t\t\t# Number of connections\n\t\t\tconNum = int(connection[5])\n\t\t\t# Weight of connections\n\t\t\tconWeight = float(connection[6])\n\t\t\t# Type of synapse\n\t\t\tsynType = connection[7]\n\t\t\t# connect sources to targets\n\t\t\tself._connect(sourcesId,targetsId,conRatio,conNum,conWeight,synType)", "def connect(self):", "def connect(self):", "def joinwifi():\n station = network.WLAN(network.STA_IF) # initiate a station mode\n\n if not station.isconnected():\n print('connecting to network:', ssid())\n station.active(True)\n station.connect(ssid(), password())\n \n\n while not station.isconnected():\n pass\n\n # deactivating access point mode\n ap = network.WLAN(network.AP_IF)\n ap.active(False)\n\n ip = station.ifconfig()[0]\n print('connected as:', ip)\n\n return ip", "def connect(self):\n self.net.active(True)\n self.net.config(essid=self.ssid, password=self.pwd, channel=3)\n\n while not self.net.active():\n pass\n\n self.net.ifconfig((\"192.168.4.5\", \"255.255.255.0\", \"192.168.4.1\", \"208.67.222.222\"))", "def connect(self) -> None:\n ...", "def connect_one_way(node1, node2, weight):\n node1.add_or_update_neighbour(node2, weight)", "async def async_step_connect_to_wifi(self, user_input=None):\n wifi_network = self.hass.states.get(\"input_select.ais_android_wifi_network\")\n networks = wifi_network.attributes[\"options\"]\n # remove empty option\n if networks[0] == ais_global.G_EMPTY_OPTION:\n networks.pop(0)\n\n errors = {}\n\n if len(networks) == 0:\n errors[\"general\"] = \"wifi_error\"\n return self.async_abort(reason=\"search_failed\")\n\n if user_input is None:\n data_schema = vol.Schema(\n {\n vol.Required(\"networks\", default=networks[0]): vol.In(\n list(networks)\n ),\n vol.Optional(CONF_PASSWORD): str,\n vol.Optional(\"rescan_wifi\", default=False): bool,\n }\n )\n\n else:\n # check if user want to rescan\n if \"rescan_wifi\" in user_input:\n if user_input[\"rescan_wifi\"]:\n return await self.async_step_one(user_input=None)\n\n password = \"\"\n if CONF_PASSWORD in user_input:\n password = user_input[CONF_PASSWORD]\n data_schema = vol.Schema(\n {\n vol.Required(\"networks\", default=user_input[\"networks\"]): vol.In(\n list(networks)\n ),\n vol.Optional(CONF_PASSWORD, default=password): str,\n vol.Optional(\"rescan_wifi\", default=False): bool,\n }\n )\n\n # try to connect\n if errors == {}:\n # send a request to frame to add the new device\n network = user_input[\"networks\"]\n text = \"Łączę z siecią \" + network.split(\";\")[0]\n self.hass.async_run_job(\n self.hass.services.async_call(\n \"ais_ai_service\", \"say_it\", {\"text\": text}\n )\n )\n await self.hass.async_add_executor_job(\n connect_to_wifi, network, password\n )\n # request was correctly send, now check and wait for the answer\n for x in range(0, 7):\n result = await self.hass.async_add_executor_job(\n check_wifi_connection, self.hass, x\n )\n _LOGGER.info(\"Spawdzam połączenie z siecią WiFi: \" + str(result))\n if result == network.split(\";\")[0]:\n # remove if exists\n exists_entries = [\n entry.entry_id for entry in self._async_current_entries()\n ]\n if exists_entries:\n await asyncio.wait(\n [\n self.hass.config_entries.async_remove(entry_id)\n for entry_id in exists_entries\n ]\n )\n # return await self.async_step_connect_to_wifi(user_input=None)\n return self.async_create_entry(title=\"WiFi\", data=user_input)\n else:\n errors = {\"base\": \"conn_failed\"}\n\n # check wifi list len - without empty option\n l_net = str(len(networks) - 1)\n\n return self.async_show_form(\n step_id=\"connect_to_wifi\",\n errors=errors if errors else {},\n data_schema=data_schema,\n description_placeholders={\"wifi_number_info\": l_net},\n )", "def connect(from_tuple, to_tuple):\n\n from_unit, from_port = from_tuple\n to_unit, to_port = to_tuple\n\n to_unit.connect(from_unit, from_port, to_port)", "def ConnectWireless(self, id):\n self.SetForcedDisconnect(False)\n self.wifi.before_script = self.GetWirelessProperty(id, 'beforescript')\n self.wifi.after_script = self.GetWirelessProperty(id, 'afterscript')\n self.wifi.disconnect_script = self.GetWirelessProperty(id,\n 'disconnectscript')\n print 'Connecting to wireless network ' + self.LastScan[id]['essid']\n return self.wifi.Connect(self.LastScan[id], debug=self.debug_mode)", "def connect(self):\n self.sta_if = network.WLAN(network.STA_IF)\n self.sta_if.active(False)\n sleep(1)\n self.sta_if.active(True)\n\n dbg(\"Interface active\")\n if self.check_ap(self.ssid):\n # connect to access point\n if not self.sta_if.isconnected():\n dbg('connecting to AP...')\n self.sta_if.active(True)\n self.sta_if.connect(self.ssid, self.key)\n while not self.sta_if.isconnected():\n machine.idle()\n # Do we need a timeout here?\n dbg(self.sta_if.ifconfig())\n else:\n dbg(\"WLAN already connected\")\n dbg(self.sta_if.ifconfig())\n else:\n dbg(\"Target SSID not found.\")\n reset(\"Could not connect to network - target SSID is not availble.\", HARD)", "def connect() -> None:\n wlan = network.WLAN(network.STA_IF)\n wlan.active(True)\n\n if not wlan.isconnected():\n wlan.connect(config.WIFI_SSID, config.WIFI_PASSWORD)\n # Wait for connection.\n for _ in range(20):\n if wlan.isconnected():\n return\n utime.sleep(1)\n\n raise Exception('Could not connect to network')", "def connect(self):\n # check if network is connected. If yes: return, finished\n # 2019-0801 changed: if self._wlan.isconnected():\n if self.isconnected:\n if USE_DEBUG:\n print('WLAN already connected')\n return self._wlan.ifconfig()\n\n # activate Wifi interface\n if self._wlan.active() is False:\n self._wlan.active(True)\n # scan available networks for the required one\n nets = self._wlan.scan()\n for net in nets:\n ssid = net[0]\n if ssid == bytearray(self._config['SSID']): # must use bytearray!\n if USE_DEBUG:\n print(\"Startup WiFi ...\" + self._config['SSID'])\n # specify if static or dynamic IP is requested\n # STATIC IP: an IP is given\n # DYNAMIC IP: None\n if self._config['STATIC_IP'] is not '':\n if USE_DEBUG:\n print('WifiManager::Static IP configuration')\n # configure network for static IP\n self._wlan.ifconfig((self._config['STATIC_IP'],\n self._config['MASKER'],\n self._config['GATEWAY_IP'],\n self._config['DNS']))\n\n # connect to SSID... either for STATIC or DYNAMIC IP\n self._wlan.connect(self._config['SSID'],\n self._config['PASSWRD'])\n while not self.isconnected:\n idle() # save power while waiting\n time.sleep_ms(100) # give it some time\n if USE_DEBUG:\n print(\"Network '{}' connection succeeded!\".format(ssid))\n break\n\n # check connection, if not succesfull: raise exception\n if not self._wlan.active():\n raise exception('Network {0} not found.'.format(ssid))\n\n # returns network configuration...\n # although 'myPy.local' should work on MacOS X (Bonjour)\n return self._wlan.ifconfig()", "def connecthostbroadcast(localpop,\n hwport_tosite,\n sitevlan,\n meter=3,\n broadcast_rewritemac = None):\n\n hwswitch = Container.fromAnchor(localpop.properties['HwSwitch'])\n hwswitchname = hwswitch.resourceName\n swswitch = Container.fromAnchor(localpop.properties['SwSwitch'])\n swswitchname = swswitch.resourceName\n topology = Container.getContainer(localpop.properties['SwSwitch']['containerName'])\n\n # Find the port on the HwSwitch connected to the software switch\n links = getlinks2(topology, hwswitchname, swswitchname)\n if links == None or len(links) == 0:\n print \"No links from\", hwswitchname, \"to\", swswitchname\n return False\n hwport_tosw = None\n for link in links:\n (node, port) = linkednode2(link, swswitchname)\n if port != None:\n # Found the link we're looking for\n hwport_tosw = port\n break\n\n broadcast = \"FF:FF:FF:FF:FF:FF\"\n translated_broadcast = broadcast\n if broadcast_rewritemac != None:\n translated_broadcast = broadcast_rewritemac\n\n fh1 = SCC.SdnInstallForward1(javaByteArray2(hwswitch.properties['DPID']),\n 1,\n BigInteger.ZERO,\n str(hwport_tosw),\n int(sitevlan),\n \"00:00:00:00:00:00\",\n translated_broadcast,\n str(hwport_tosite),\n int(sitevlan),\n broadcast,\n 0,\n 0,\n meter)\n if fh1 == None:\n return None\n\n fh2 = SCC.SdnInstallForward1(javaByteArray2(hwswitch.properties['DPID']),\n 1,\n BigInteger.ZERO,\n str(hwport_tosite),\n int(sitevlan),\n \"00:00:00:00:00:00\",\n broadcast,\n str(hwport_tosw),\n int(sitevlan),\n translated_broadcast,\n 0,\n 0,\n meter)\n if fh2 == None:\n SCC.deleteforward(fh1)\n return None\n\n return (fh1, fh2)", "def connect(self, address: Tuple[str, int]) -> None:\n ...", "def connect(self,ip,port):\n return self.network.connect(ip,port)", "def join_network(self):\n connect_nodes_bi(self.nodes, 1, 2)\n self.sync_all()", "def test_make_connection(network_with_devices):\n network = network_with_devices\n devices = network.devices\n names = devices.names\n\n [SW1_ID, SW2_ID, OR1_ID, I1, I2] = names.lookup([\"Sw1\", \"Sw2\", \"Or1\", \"I1\",\n \"I2\"])\n\n or1 = devices.get_device(OR1_ID)\n\n # or1 inputs are initially unconnected\n assert or1.inputs == {I1: None,\n I2: None}\n\n # Make connections\n network.make_connection(SW1_ID, None, OR1_ID, I1)\n network.make_connection(SW2_ID, None, OR1_ID, I2)\n\n # or1 inputs should now be connected\n assert or1.inputs == {I1: (SW1_ID, None),\n I2: (SW2_ID, None)}" ]
[ "0.6267456", "0.61050874", "0.6091898", "0.5930662", "0.59238946", "0.5836637", "0.5822732", "0.5821258", "0.58018726", "0.58003306", "0.5727876", "0.5715374", "0.5697749", "0.5681944", "0.5681944", "0.5671002", "0.5660585", "0.5634978", "0.562125", "0.56210154", "0.56170815", "0.561587", "0.5601899", "0.5545056", "0.55373883", "0.5528132", "0.5517429", "0.5480495", "0.54691344", "0.5467837" ]
0.6569376
0
Resolve activelink values into x and y directions. Takes a set of values defined on active links, and returns those values
def resolve_values_on_active_links(grid, active_link_values): link_lengths = grid.length_of_link[grid.active_links] return ( np.multiply( ( ( grid.node_x[grid._activelink_tonode] - grid.node_x[grid._activelink_fromnode] ) / link_lengths ), active_link_values, ), np.multiply( ( ( grid.node_y[grid._activelink_tonode] - grid.node_y[grid._activelink_fromnode] ) / link_lengths ), active_link_values, ), )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def resolve_values_on_links(grid, link_values):\n return (\n np.multiply(\n (\n (\n grid.node_x[grid.node_at_link_head]\n - grid.node_x[grid.node_at_link_tail]\n )\n / grid.length_of_link\n ),\n link_values,\n ),\n np.multiply(\n (\n (\n grid.node_y[grid.node_at_link_head]\n - grid.node_y[grid.node_at_link_tail]\n )\n / grid.length_of_link\n ),\n link_values,\n ),\n )", "def getActive(self):\n x=[];y=[]\n for i in zip(self.x,self.y,self.active):\n if i[0]!=None and i[1]!=None and i[2] == 1:\n x.append(i[0])\n y.append(i[1])\n return x,y", "def coordinates(self, distances):\n \n for i, anchor_id in enumerate(self.anchor_ids):\n if distances.has_key(anchor_id):\n self.distances_array[i] = distances[anchor_id]\n else:\n self.distances_array[i] = -1.0\n\n self.le_coordinates(self.handle, self.n_distance, self.distances_array, self.location_array)\n x, y = self.location_array\n \n return x, y", "def resolve(self, anchors):\n\n for anchor in anchors:\n if self.node[DuAttrRefid] in anchor.ids():\n self.toAnchor = anchor\n break", "def build_links(self):\n xygrid = self.xymap.xygrid\n\n # we must use the xygrid coordinates\n x, y = self.x, self.y\n\n # scan in all directions for links\n for direction, (dx, dy) in MAPSCAN.items():\n\n lx, ly = x + dx, y + dy\n\n if lx in xygrid and ly in xygrid[lx]:\n link = xygrid[lx][ly]\n\n # just because there is a link here, doesn't mean it has a\n # connection in this direction. If so, the `end_node` will be None.\n end_node, weight, steps = link.traverse(REVERSE_DIRECTIONS[direction])\n\n if end_node:\n # the link could be followed to an end node!\n\n self.first_links[direction] = link\n\n # check the actual direction-alias to use, since this may be\n # different than the xygrid cardinal directions. There must be\n # no duplicates out of this node or there will be a\n # multi-match error later!\n first_step_name = steps[0].direction_aliases.get(direction, direction)\n if first_step_name in self.closest_neighbor_names:\n raise MapParserError(\n f\"has more than one outgoing direction '{first_step_name}'. \"\n \"All directions out of a node must be unique.\",\n self,\n )\n self.closest_neighbor_names[first_step_name] = direction\n\n node_index = end_node.node_index\n self.weights[node_index] = weight\n self.links[direction] = end_node\n # this is useful for map building later - there could be multiple\n # links tied together until getting to the node\n self.xy_steps_to_node[direction] = steps\n\n # used for building the shortest path. Note that we store the\n # aliased link directions here, for quick display by the\n # shortest-route solver\n shortest_route = self.shortest_route_to_node.get(node_index, (\"\", [], BIGVAL))[\n 2\n ]\n if weight < shortest_route:\n self.shortest_route_to_node[node_index] = (first_step_name, steps, weight)", "def get_directions():\n return [(1, 0), (0, 1), (-1, 0), (0, -1)]", "def evolve_assuming_no_enemy_and_get_origin_and_target_and_move(self, moves_as_yx_coordinates_direction_list):\n origin_target_and_moves = []\n for location, direction in moves_as_yx_coordinates_direction_list:\n y, x = location\n if direction is STILL:\n self.strength[y, x] += self.prod[y, x]\n continue\n new_x, new_y = self.get_new_coordinates(x, y, direction)\n origin_target_and_moves.append((location, (new_y, new_x), direction))\n if self.owners[(new_y, new_x)] == self.playerID:\n self.strength[new_y, new_x] += self.strength[y, x]\n self.strength[y, x] = 0\n elif self.strength[y, x] < self.strength[new_y, new_x]:\n self.strength[new_y, new_x] -= self.strength[y, x]\n else: # site gets overtaken!\n self.strength[new_y, new_x] = self.strength[y, x] - self.strength[new_y, new_x]\n self.owners[new_y, new_x] = self.playerID\n self.strength[y, x] = 0\n if self.strength[(new_y, new_x)] > 255:\n self.strength[(new_y, new_x)] = 255\n return origin_target_and_moves", "def locations_adjacent_to(loc):\n return [(loc[0] + direction[0], loc[1] + direction[1]) for direction in [(0,-1),(0,1),(-1,0),(1,0)]]", "def get_sources_and_targets(index_pairings):\n\n source_target_dictionary = {}\n links_list = []\n \n itr = 0\n \n for pair in index_pairings:\n source = pair[0]\n target = pair[1]\n\n source_target_dictionary = {\"source\":source, \"target\":target}\n links_list.append(source_target_dictionary)\n\n return links_list", "def coords_in_range(self, anchor, steps):\n coords = list()\n x_low = -steps\n x_high = steps\n\n #Generate using an axial formula to make it easier\n #calculate z via the other two and throw away ones that aren't in bounds\n for x in range(x_low, x_high+1):\n for y in range(max(-steps, -x-steps), min(steps, -x+steps)+1):\n z = -x - y\n coords.append(anchor+self.coord(x, y, z))\n return coords", "def cache_links(self):\n for source_location in self.gen_locations():\n for vi, delta in vi_delta_pairs:\n drow, dcol = delta\n for command, magnitude in ((vi, 1), (vi.upper(), 8)):\n target_location = source_location\n for i in range(magnitude):\n trow, tcol = target_location\n next_target_location = (trow + drow, tcol + dcol)\n if self.is_inbounds(next_target_location):\n target_location = next_target_location\n else:\n break\n triple = (source_location, target_location, command)\n self.cached_links.append(triple)", "def __one_forward_open(x, y, c, l):\n x -= 1\n y -= 1\n if not c: x, y = l - y, l - x\n return x, y", "def forward(self, anchors, pullers, pushers):\n\n x = self.features(anchors)\n y = self.features(pullers)\n z = self.features(pushers)\n\n return x, y, z", "def setup_array_of_orientation_codes(self):\n # Create array for the orientation of each active link\n self.active_link_orientation = zeros(self.grid.number_of_active_links, dtype=int)\n\n # Set its value according to the different in y coordinate between each\n # link's TO and FROM nodes (the numpy \"astype\" method turns the\n # resulting array into integer format)\n dy = (self.grid.node_y[self.grid.node_at_link_head[self.grid.active_links]] -\n self.grid.node_y[self.grid.node_at_link_tail[self.grid.active_links]])\n self.active_link_orientation = dy.astype(int)\n\n if _DEBUG:\n six.print_(self.active_link_orientation)", "def get_anchor_points(self):\n rows, cols = np.where(self.overlap_mask)\n self.anchor_points = tuple(zip(rows, cols))[:: self.sampling_int]\n print(\"# of anchors: {}\".format(len(self.anchor_points)))", "def relativize_coordinates(self):\n if len(self.nodes) + len(self.connecting) < 1:\n return\n smallest_c = (self.nodes+self.connecting)[0].c\n for node in self.nodes+self.connecting:\n if node.c < smallest_c:\n smallest_c = node.c\n for node in self.nodes+self.connecting:\n node.c = node.c - smallest_c", "def anchor_pairs(self):\n # TODO unit test for this method\n def _anchors(given_anchor):\n if given_anchor is not None:\n yield given_anchor\n else:\n yield from anchors.Anchor\n for src_anch in _anchors(self.orig_anchor):\n for dest_anch in _anchors(self.dest_anchor):\n yield (src_anch, dest_anch)", "def _mkanchors(ws, hs, x_ref, y_ref):\n\n ws = ws[:, np.newaxis]\n hs = hs[:, np.newaxis]\n\n anchors = np.hstack(\n (\n x_ref - 0.5 * (ws - 1),\n y_ref - 0.5 * (hs - 1),\n x_ref + 0.5 * (ws - 1),\n y_ref + 0.5 * (hs - 1)\n )\n )\n return anchors", "def _maping(x,y,l,a):\n newx = (x**2 *(l* ((x**2 + y**2)**(a/2) - 1) + 2) - l * y**2 *((x**2 + y**2)**(a/2) - 1))/(x**2 + y**2) \n newy = (2 * x* y *(l* ((x**2 + y**2)**(a/2) - 1) + 1))/(x**2 + y**2)\n return newx, newy", "def __get_adjacents_from_id(self, position):\n if position == 1: #Upper-left corner.\n return [position + 5, position + 1]\n elif position == 5: #Upper-right corner.\n return [position + 5, position - 1]\n elif position == 21: #Lower-left corner.\n return [position - 5, position + 1]\n elif position == 25: #Lower-right corner.\n return [position - 5, position - 1]\n elif position == 2 or position == 3 or position == 4: #Upper wall.\n return [position + 5, position - 1, position + 1]\n elif position == 10 or position == 15 or position == 20: #Right wall.\n return [position + 5, position - 5, position - 1]\n elif position == 6 or position == 11 or position == 16: #Left wall.\n return [position + 5, position - 5, position + 1]\n elif position == 22 or position == 23 or position == 24: #Bottom wall.\n return [position - 5, position - 1, position + 1]\n else: #All other positions.\n return [position - 5, position + 5, position - 1, position + 1]", "def dof_1r_to_point(link, center, from_pt, to_pt, axis, axis_1, axis_2):\n\n return", "def __get_position(self, value, state):\n coords = np.argwhere(state == value).flatten()\n return coords", "def findRelationships(RelationShipList):\r\n for i in RelationShipList:\r\n getPos = cmds.xform(i[1], q=True, t=True, ws=True)\r\n cmds.xform(i[0], t=getPos, ws=True)", "def ref_values(x, y):\n check_evaluation_points(x, y)\n values = np.empty((21,x.shape[0]))\n _ap.ap_ref_values(x, y, x.shape[0], values)\n return values", "def navigate_waypoint(commands: list, verbose=False) -> tuple:\n\n position: list = [0, 0] # x, y\n waypoint_cartesian: list = [10, 1] # x, y (relative to ship)\n waypoint_polar: list = [0, 1, 10] # bearing, distance, and offset in clockwise direction\n\n conversion: dict = {'N': 1, 'S': -1, 'E': 1, 'W': -1, 'L': -1, 'R': 1, 'F': 1}\n \n if verbose: \n print(f'position: {position}, waypoint_cartesian: {waypoint_cartesian}, waypoint_polar: {waypoint_polar}')\n\n for command in commands: \n \n if verbose: \n print(f'command: {command}')\n \n change: int = command['value'] * conversion[command['action']]\n\n if command['action'] in ['N', 'S']: \n waypoint_cartesian[1] += change # in y\n waypoint_polar = cartesian_to_polar(waypoint_cartesian)\n elif command['action'] in ['E', 'W']: \n waypoint_cartesian[0] += change # in x\n waypoint_polar = cartesian_to_polar(waypoint_cartesian)\n elif command['action'] in ['L', 'R']: \n \n bearing_old = waypoint_polar[0]\n bearing_new = bearing_old + change\n\n if bearing_new > 270:\n bearing_new -= 360\n if bearing_new < 0: \n bearing_new += 360\n \n waypoint_polar[0] = bearing_new\n waypoint_cartesian = polar_to_cartesian(waypoint_polar)\n\n elif command['action'] == 'F': \n \n position[0] += waypoint_cartesian[0] * change\n position[1] += waypoint_cartesian[1] * change\n\n else: \n raise ValueError(\"Invalid action\")\n \n if verbose: \n print(f'position: {position}, waypoint_cartesian: {waypoint_cartesian}, waypoint_polar: {waypoint_polar}')\n\n return tuple(position)", "def set_goal_pos(self):\n goal_list = np.where(self.value_map == self.value_map.max())\n # assume the first one\n self.goal_pos = (goal_list[0][0], goal_list[1][0])", "def mapping(x, xp, fp):\n xmin, xmax = xp\n fmin, fmax = fp\n slope = (fmax - fmin) / (xmax - xmin)\n return (x - xmin) * slope + fmin", "def light_source_directions():\n L = np.array([[-0.06059872, -0.44839055, 0.8917812],\n [-0.05939919, -0.33739538, 0.93948714],\n [-0.05710194, -0.21230722, 0.97553319],\n [-0.05360061, -0.07800089, 0.99551134],\n [-0.04919816, 0.05869781, 0.99706274],\n [-0.04399823, 0.19019233, 0.98076044],\n [-0.03839991, 0.31049925, 0.9497977],\n [-0.03280081, 0.41611025, 0.90872238],\n [-0.18449839, -0.43989616, 0.87889232],\n [-0.18870114, -0.32950199, 0.92510557],\n [-0.1901994, -0.20549935, 0.95999698],\n [-0.18849605, -0.07269848, 0.97937948],\n [-0.18329657, 0.06229884, 0.98108166],\n [-0.17500445, 0.19220488, 0.96562453],\n [-0.16449474, 0.31129005, 0.93597008],\n [-0.15270716, 0.4160195, 0.89644202],\n [-0.30139786, -0.42509698, 0.85349393],\n [-0.31020115, -0.31660118, 0.89640333],\n [-0.31489186, -0.19549495, 0.92877599],\n [-0.31450962, -0.06640203, 0.94692897],\n [-0.30880699, 0.06470146, 0.94892147],\n [-0.2981084, 0.19100538, 0.93522635],\n [-0.28359251, 0.30729189, 0.90837601],\n [-0.26670649, 0.41020998, 0.87212122],\n [-0.40709586, -0.40559588, 0.81839168],\n [-0.41919869, -0.29999906, 0.85689732],\n [-0.42618633, -0.18329412, 0.88587159],\n [-0.42691512, -0.05950211, 0.90233197],\n [-0.42090385, 0.0659006, 0.90470827],\n [-0.40860354, 0.18720162, 0.89330773],\n [-0.39141794, 0.29941372, 0.87013988],\n [-0.3707838, 0.39958255, 0.83836338],\n [-0.499596, -0.38319693, 0.77689378],\n [-0.51360334, -0.28130183, 0.81060526],\n [-0.52190667, -0.16990217, 0.83591069],\n [-0.52326874, -0.05249686, 0.85054918],\n [-0.51720021, 0.06620003, 0.85330035],\n [-0.50428312, 0.18139393, 0.84427174],\n [-0.48561334, 0.28870793, 0.82512267],\n [-0.46289771, 0.38549809, 0.79819605],\n [-0.57853599, -0.35932235, 0.73224555],\n [-0.59329349, -0.26189713, 0.76119165],\n [-0.60202327, -0.15630604, 0.78303027],\n [-0.6037003, -0.04570002, 0.7959004],\n [-0.59781529, 0.06590169, 0.79892043],\n [-0.58486953, 0.17439091, 0.79215873],\n [-0.56588359, 0.27639198, 0.77677747],\n [-0.54241965, 0.36921337, 0.75462733],\n [0.05220076, -0.43870637, 0.89711304],\n [0.05199786, -0.33138635, 0.9420612],\n [0.05109826, -0.20999284, 0.97636672],\n [0.04919919, -0.07869871, 0.99568366],\n [0.04640163, 0.05630197, 0.99733494],\n [0.04279892, 0.18779527, 0.98127529],\n [0.03870043, 0.30950341, 0.95011048],\n [0.03440055, 0.41730662, 0.90811441],\n [0.17290651, -0.43181626, 0.88523333],\n [0.17839998, -0.32509996, 0.92869988],\n [0.18160174, -0.20480196, 0.96180921],\n [0.18200745, -0.07490306, 0.98044012],\n [0.17919505, 0.05849838, 0.98207285],\n [0.17329685, 0.18839658, 0.96668244],\n [0.1649036, 0.30880674, 0.93672045],\n [0.1549931, 0.41578148, 0.89616009],\n [0.28720483, -0.41910705, 0.8613145],\n [0.29740177, -0.31410186, 0.90160535],\n [0.30420604, -0.1965039, 0.9321185],\n [0.30640529, -0.07010121, 0.94931639],\n [0.30361153, 0.05950226, 0.95093613],\n [0.29588748, 0.18589214, 0.93696036],\n [0.28409783, 0.30349768, 0.90949304],\n [0.26939905, 0.40849857, 0.87209694],\n [0.39120402, -0.40190413, 0.8279085],\n [0.40481085, -0.29960803, 0.86392315],\n [0.41411685, -0.18590756, 0.89103626],\n [0.41769724, -0.06449957, 0.906294],\n [0.41498764, 0.05959822, 0.90787296],\n [0.40607977, 0.18089099, 0.89575537],\n [0.39179226, 0.29439419, 0.87168279],\n [0.37379609, 0.39649585, 0.83849122],\n [0.48278794, -0.38169046, 0.78818031],\n [0.49848546, -0.28279175, 0.8194761],\n [0.50918069, -0.1740934, 0.84286803],\n [0.51360856, -0.05870098, 0.85601427],\n [0.51097962, 0.05899765, 0.8575658],\n [0.50151639, 0.17420569, 0.84742769],\n [0.48600297, 0.28260173, 0.82700506],\n [0.46600106, 0.38110087, 0.79850181],\n [0.56150442, -0.35990283, 0.74510586],\n [0.57807114, -0.26498677, 0.77176147],\n [0.58933134, -0.1617086, 0.7915421],\n [0.59407609, -0.05289787, 0.80266769],\n [0.59157958, 0.057798, 0.80417224],\n [0.58198189, 0.16649482, 0.79597523],\n [0.56620006, 0.26940003, 0.77900008],\n [0.54551481, 0.36380988, 0.7550205]], dtype=float)\n return L", "def _calcFollow(self, FOLLOW):\n for A in self.N:\n for prod in self.P[A]:\n text = prod.split(sep=' ')\n for i in range(len(text) - 1):\n B = text[i].strip('[]')\n succ = text[i + 1]\n\n if B in self.N:\n FOLLOW[B] |= self.first(succ) - {'eps'}\n\n if 'eps' in self.first(succ) and B in self.N:\n FOLLOW[B] |= FOLLOW[A]\n\n if text[-1].strip('[]') in self.N:\n FOLLOW[text[-1].strip('[]')] |= FOLLOW[A]", "def getPositionValues(a, x):\n raise NotImplementedError('getPositionValues not implemented')" ]
[ "0.6321813", "0.56443125", "0.5429352", "0.53935385", "0.52528846", "0.5167584", "0.5058837", "0.5045933", "0.50244004", "0.49784845", "0.49667338", "0.4963591", "0.4912155", "0.4907423", "0.48758897", "0.48721966", "0.48685086", "0.48672333", "0.4834237", "0.4823273", "0.48097178", "0.48063394", "0.47946766", "0.47799188", "0.4775936", "0.47322178", "0.47308403", "0.47252828", "0.47244537", "0.47239852" ]
0.75788385
0
Resolve link values into x and y directions. Takes a set of values defined on active links, and returns those values
def resolve_values_on_links(grid, link_values): return ( np.multiply( ( ( grid.node_x[grid.node_at_link_head] - grid.node_x[grid.node_at_link_tail] ) / grid.length_of_link ), link_values, ), np.multiply( ( ( grid.node_y[grid.node_at_link_head] - grid.node_y[grid.node_at_link_tail] ) / grid.length_of_link ), link_values, ), )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def resolve_values_on_active_links(grid, active_link_values):\n link_lengths = grid.length_of_link[grid.active_links]\n return (\n np.multiply(\n (\n (\n grid.node_x[grid._activelink_tonode]\n - grid.node_x[grid._activelink_fromnode]\n )\n / link_lengths\n ),\n active_link_values,\n ),\n np.multiply(\n (\n (\n grid.node_y[grid._activelink_tonode]\n - grid.node_y[grid._activelink_fromnode]\n )\n / link_lengths\n ),\n active_link_values,\n ),\n )", "def build_links(self):\n xygrid = self.xymap.xygrid\n\n # we must use the xygrid coordinates\n x, y = self.x, self.y\n\n # scan in all directions for links\n for direction, (dx, dy) in MAPSCAN.items():\n\n lx, ly = x + dx, y + dy\n\n if lx in xygrid and ly in xygrid[lx]:\n link = xygrid[lx][ly]\n\n # just because there is a link here, doesn't mean it has a\n # connection in this direction. If so, the `end_node` will be None.\n end_node, weight, steps = link.traverse(REVERSE_DIRECTIONS[direction])\n\n if end_node:\n # the link could be followed to an end node!\n\n self.first_links[direction] = link\n\n # check the actual direction-alias to use, since this may be\n # different than the xygrid cardinal directions. There must be\n # no duplicates out of this node or there will be a\n # multi-match error later!\n first_step_name = steps[0].direction_aliases.get(direction, direction)\n if first_step_name in self.closest_neighbor_names:\n raise MapParserError(\n f\"has more than one outgoing direction '{first_step_name}'. \"\n \"All directions out of a node must be unique.\",\n self,\n )\n self.closest_neighbor_names[first_step_name] = direction\n\n node_index = end_node.node_index\n self.weights[node_index] = weight\n self.links[direction] = end_node\n # this is useful for map building later - there could be multiple\n # links tied together until getting to the node\n self.xy_steps_to_node[direction] = steps\n\n # used for building the shortest path. Note that we store the\n # aliased link directions here, for quick display by the\n # shortest-route solver\n shortest_route = self.shortest_route_to_node.get(node_index, (\"\", [], BIGVAL))[\n 2\n ]\n if weight < shortest_route:\n self.shortest_route_to_node[node_index] = (first_step_name, steps, weight)", "def cache_links(self):\n for source_location in self.gen_locations():\n for vi, delta in vi_delta_pairs:\n drow, dcol = delta\n for command, magnitude in ((vi, 1), (vi.upper(), 8)):\n target_location = source_location\n for i in range(magnitude):\n trow, tcol = target_location\n next_target_location = (trow + drow, tcol + dcol)\n if self.is_inbounds(next_target_location):\n target_location = next_target_location\n else:\n break\n triple = (source_location, target_location, command)\n self.cached_links.append(triple)", "def coordinates(self, distances):\n \n for i, anchor_id in enumerate(self.anchor_ids):\n if distances.has_key(anchor_id):\n self.distances_array[i] = distances[anchor_id]\n else:\n self.distances_array[i] = -1.0\n\n self.le_coordinates(self.handle, self.n_distance, self.distances_array, self.location_array)\n x, y = self.location_array\n \n return x, y", "def get_directions():\n return [(1, 0), (0, 1), (-1, 0), (0, -1)]", "def resolve(self, anchors):\n\n for anchor in anchors:\n if self.node[DuAttrRefid] in anchor.ids():\n self.toAnchor = anchor\n break", "def dof_1r_to_point(link, center, from_pt, to_pt, axis, axis_1, axis_2):\n\n return", "def get_link(self, site, direction, shape, links=None):\n if links is None:\n links = self.links\n return links[tuple(pbc(site, shape) + [direction])]", "def relativize_coordinates(self):\n if len(self.nodes) + len(self.connecting) < 1:\n return\n smallest_c = (self.nodes+self.connecting)[0].c\n for node in self.nodes+self.connecting:\n if node.c < smallest_c:\n smallest_c = node.c\n for node in self.nodes+self.connecting:\n node.c = node.c - smallest_c", "def produce_links_search(self, value_list:list) -> list:\n return [\n [self.produce_link_google(f) for f in value_list],\n [self.produce_link_qwant(f) for f in value_list],\n [self.produce_link_bing(f) for f in value_list],\n [self.produce_link_duckduckgo(f) for f in value_list],\n [self.produce_link_yahoo(f) for f in value_list]\n ]", "def evolve_assuming_no_enemy_and_get_origin_and_target_and_move(self, moves_as_yx_coordinates_direction_list):\n origin_target_and_moves = []\n for location, direction in moves_as_yx_coordinates_direction_list:\n y, x = location\n if direction is STILL:\n self.strength[y, x] += self.prod[y, x]\n continue\n new_x, new_y = self.get_new_coordinates(x, y, direction)\n origin_target_and_moves.append((location, (new_y, new_x), direction))\n if self.owners[(new_y, new_x)] == self.playerID:\n self.strength[new_y, new_x] += self.strength[y, x]\n self.strength[y, x] = 0\n elif self.strength[y, x] < self.strength[new_y, new_x]:\n self.strength[new_y, new_x] -= self.strength[y, x]\n else: # site gets overtaken!\n self.strength[new_y, new_x] = self.strength[y, x] - self.strength[new_y, new_x]\n self.owners[new_y, new_x] = self.playerID\n self.strength[y, x] = 0\n if self.strength[(new_y, new_x)] > 255:\n self.strength[(new_y, new_x)] = 255\n return origin_target_and_moves", "def get_sources_and_targets(index_pairings):\n\n source_target_dictionary = {}\n links_list = []\n \n itr = 0\n \n for pair in index_pairings:\n source = pair[0]\n target = pair[1]\n\n source_target_dictionary = {\"source\":source, \"target\":target}\n links_list.append(source_target_dictionary)\n\n return links_list", "def get_direction(self, start_direction):\n # get all visually connected links\n if not self.directions:\n directions = {}\n unhandled_links = list(self.get_linked_neighbors().keys())\n\n # get all straight lines (n-s, sw-ne etc) we can trace through\n # the dynamic link and remove them from the unhandled_links list\n unhandled_links_copy = unhandled_links.copy()\n for direction in unhandled_links_copy:\n if REVERSE_DIRECTIONS[direction] in unhandled_links_copy:\n directions[direction] = REVERSE_DIRECTIONS[\n unhandled_links.pop(unhandled_links.index(direction))\n ]\n\n # check if we have any non-cross-through paths left to handle\n n_unhandled = len(unhandled_links)\n if n_unhandled:\n # still remaining unhandled links. If there's not exactly\n # one 'incoming' and one 'outgoing' we can't figure out\n # where to go in a non-ambiguous way.\n if n_unhandled != 2:\n links = \", \".join(unhandled_links)\n raise MapParserError(\n f\"cannot determine how to connect in/out directions {links}.\", self\n )\n\n directions[unhandled_links[0]] = unhandled_links[1]\n directions[unhandled_links[1]] = unhandled_links[0]\n\n self.directions = directions\n\n return self.directions.get(start_direction)", "def ref_values(x, y):\n check_evaluation_points(x, y)\n values = np.empty((21,x.shape[0]))\n _ap.ap_ref_values(x, y, x.shape[0], values)\n return values", "def getPositionValues(a, x):\n raise NotImplementedError('getPositionValues not implemented')", "def findRelationships(RelationShipList):\r\n for i in RelationShipList:\r\n getPos = cmds.xform(i[1], q=True, t=True, ws=True)\r\n cmds.xform(i[0], t=getPos, ws=True)", "def local_action(self, *links, all_links):\n S = 0.0\n for link in links:\n site1 = link[:-1]\n u = link[-1]\n for v in range(self.dim):\n if v != u:\n site2 = np.array(site1) - self.bases[v]\n plaq1 = self.plaquette_operator(site1, u, v, all_links)\n plaq2 = self.plaquette_operator(site2, u, v, all_links)\n S += (plaq1 + plaq2)\n return S", "def further_query(link, coords, unassigned_data, r ):\n\n sep = np.abs(coords - unassigned_data)\n link = (sep <= r) & (link==True)\n\n return link", "def routes(x, y, results_dict):\n if (x, y) in results_dict: # if the value is already in the results cache then we don't need to calculate again\n return results_dict[(x, y)]\n\n # we only look at the top half of the grid\n # (as if you swap moves right/down you get equivalent number of paths)\n elif x > y:\n r = routes(y, x, results_dict)\n\n elif x == 0:\n return 1 # only one path when x coordinate is 0\n\n # from any one point you can either go down or left, then the sum of the positions gives the total for the original\n else:\n r = routes(x - 1, y, results_dict) + routes(x, y - 1, results_dict)\n results_dict[(x, y)] = r\n return r", "def __get_adjacents_from_id(self, position):\n if position == 1: #Upper-left corner.\n return [position + 5, position + 1]\n elif position == 5: #Upper-right corner.\n return [position + 5, position - 1]\n elif position == 21: #Lower-left corner.\n return [position - 5, position + 1]\n elif position == 25: #Lower-right corner.\n return [position - 5, position - 1]\n elif position == 2 or position == 3 or position == 4: #Upper wall.\n return [position + 5, position - 1, position + 1]\n elif position == 10 or position == 15 or position == 20: #Right wall.\n return [position + 5, position - 5, position - 1]\n elif position == 6 or position == 11 or position == 16: #Left wall.\n return [position + 5, position - 5, position + 1]\n elif position == 22 or position == 23 or position == 24: #Bottom wall.\n return [position - 5, position - 1, position + 1]\n else: #All other positions.\n return [position - 5, position + 5, position - 1, position + 1]", "def determine_addresses(self, x1, y1, d):\n rez = []\n addresses = self.__repository.get_all()\n for address in addresses:\n x2 = address.get_x()\n y2 = address.get_y()\n distance = math.sqrt((x1 - x2) ** 2 + (y1 - y2) ** 2)\n if distance < d:\n rez.append([address, distance])\n return rez", "def navigate_waypoint(commands: list, verbose=False) -> tuple:\n\n position: list = [0, 0] # x, y\n waypoint_cartesian: list = [10, 1] # x, y (relative to ship)\n waypoint_polar: list = [0, 1, 10] # bearing, distance, and offset in clockwise direction\n\n conversion: dict = {'N': 1, 'S': -1, 'E': 1, 'W': -1, 'L': -1, 'R': 1, 'F': 1}\n \n if verbose: \n print(f'position: {position}, waypoint_cartesian: {waypoint_cartesian}, waypoint_polar: {waypoint_polar}')\n\n for command in commands: \n \n if verbose: \n print(f'command: {command}')\n \n change: int = command['value'] * conversion[command['action']]\n\n if command['action'] in ['N', 'S']: \n waypoint_cartesian[1] += change # in y\n waypoint_polar = cartesian_to_polar(waypoint_cartesian)\n elif command['action'] in ['E', 'W']: \n waypoint_cartesian[0] += change # in x\n waypoint_polar = cartesian_to_polar(waypoint_cartesian)\n elif command['action'] in ['L', 'R']: \n \n bearing_old = waypoint_polar[0]\n bearing_new = bearing_old + change\n\n if bearing_new > 270:\n bearing_new -= 360\n if bearing_new < 0: \n bearing_new += 360\n \n waypoint_polar[0] = bearing_new\n waypoint_cartesian = polar_to_cartesian(waypoint_polar)\n\n elif command['action'] == 'F': \n \n position[0] += waypoint_cartesian[0] * change\n position[1] += waypoint_cartesian[1] * change\n\n else: \n raise ValueError(\"Invalid action\")\n \n if verbose: \n print(f'position: {position}, waypoint_cartesian: {waypoint_cartesian}, waypoint_polar: {waypoint_polar}')\n\n return tuple(position)", "def coords_in_range(self, anchor, steps):\n coords = list()\n x_low = -steps\n x_high = steps\n\n #Generate using an axial formula to make it easier\n #calculate z via the other two and throw away ones that aren't in bounds\n for x in range(x_low, x_high+1):\n for y in range(max(-steps, -x-steps), min(steps, -x+steps)+1):\n z = -x - y\n coords.append(anchor+self.coord(x, y, z))\n return coords", "def translateValues(nadPoints):\n with arcpy.da.UpdateCursor(nadPoints,\n ['StN_PreDir', 'StN_PosDir', 'StN_PosTyp', 'County']) as cursor:\n for row in cursor:\n row[0] = directionDomain.get(row[0], None)\n row[1] = directionDomain.get(row[1], None)\n row[2] = streetDomain.get(row[2], None)\n row[3] = countyFipsDomain.get(row[3], None)\n cursor.updateRow(row)", "def light_source_directions():\n L = np.array([[-0.06059872, -0.44839055, 0.8917812],\n [-0.05939919, -0.33739538, 0.93948714],\n [-0.05710194, -0.21230722, 0.97553319],\n [-0.05360061, -0.07800089, 0.99551134],\n [-0.04919816, 0.05869781, 0.99706274],\n [-0.04399823, 0.19019233, 0.98076044],\n [-0.03839991, 0.31049925, 0.9497977],\n [-0.03280081, 0.41611025, 0.90872238],\n [-0.18449839, -0.43989616, 0.87889232],\n [-0.18870114, -0.32950199, 0.92510557],\n [-0.1901994, -0.20549935, 0.95999698],\n [-0.18849605, -0.07269848, 0.97937948],\n [-0.18329657, 0.06229884, 0.98108166],\n [-0.17500445, 0.19220488, 0.96562453],\n [-0.16449474, 0.31129005, 0.93597008],\n [-0.15270716, 0.4160195, 0.89644202],\n [-0.30139786, -0.42509698, 0.85349393],\n [-0.31020115, -0.31660118, 0.89640333],\n [-0.31489186, -0.19549495, 0.92877599],\n [-0.31450962, -0.06640203, 0.94692897],\n [-0.30880699, 0.06470146, 0.94892147],\n [-0.2981084, 0.19100538, 0.93522635],\n [-0.28359251, 0.30729189, 0.90837601],\n [-0.26670649, 0.41020998, 0.87212122],\n [-0.40709586, -0.40559588, 0.81839168],\n [-0.41919869, -0.29999906, 0.85689732],\n [-0.42618633, -0.18329412, 0.88587159],\n [-0.42691512, -0.05950211, 0.90233197],\n [-0.42090385, 0.0659006, 0.90470827],\n [-0.40860354, 0.18720162, 0.89330773],\n [-0.39141794, 0.29941372, 0.87013988],\n [-0.3707838, 0.39958255, 0.83836338],\n [-0.499596, -0.38319693, 0.77689378],\n [-0.51360334, -0.28130183, 0.81060526],\n [-0.52190667, -0.16990217, 0.83591069],\n [-0.52326874, -0.05249686, 0.85054918],\n [-0.51720021, 0.06620003, 0.85330035],\n [-0.50428312, 0.18139393, 0.84427174],\n [-0.48561334, 0.28870793, 0.82512267],\n [-0.46289771, 0.38549809, 0.79819605],\n [-0.57853599, -0.35932235, 0.73224555],\n [-0.59329349, -0.26189713, 0.76119165],\n [-0.60202327, -0.15630604, 0.78303027],\n [-0.6037003, -0.04570002, 0.7959004],\n [-0.59781529, 0.06590169, 0.79892043],\n [-0.58486953, 0.17439091, 0.79215873],\n [-0.56588359, 0.27639198, 0.77677747],\n [-0.54241965, 0.36921337, 0.75462733],\n [0.05220076, -0.43870637, 0.89711304],\n [0.05199786, -0.33138635, 0.9420612],\n [0.05109826, -0.20999284, 0.97636672],\n [0.04919919, -0.07869871, 0.99568366],\n [0.04640163, 0.05630197, 0.99733494],\n [0.04279892, 0.18779527, 0.98127529],\n [0.03870043, 0.30950341, 0.95011048],\n [0.03440055, 0.41730662, 0.90811441],\n [0.17290651, -0.43181626, 0.88523333],\n [0.17839998, -0.32509996, 0.92869988],\n [0.18160174, -0.20480196, 0.96180921],\n [0.18200745, -0.07490306, 0.98044012],\n [0.17919505, 0.05849838, 0.98207285],\n [0.17329685, 0.18839658, 0.96668244],\n [0.1649036, 0.30880674, 0.93672045],\n [0.1549931, 0.41578148, 0.89616009],\n [0.28720483, -0.41910705, 0.8613145],\n [0.29740177, -0.31410186, 0.90160535],\n [0.30420604, -0.1965039, 0.9321185],\n [0.30640529, -0.07010121, 0.94931639],\n [0.30361153, 0.05950226, 0.95093613],\n [0.29588748, 0.18589214, 0.93696036],\n [0.28409783, 0.30349768, 0.90949304],\n [0.26939905, 0.40849857, 0.87209694],\n [0.39120402, -0.40190413, 0.8279085],\n [0.40481085, -0.29960803, 0.86392315],\n [0.41411685, -0.18590756, 0.89103626],\n [0.41769724, -0.06449957, 0.906294],\n [0.41498764, 0.05959822, 0.90787296],\n [0.40607977, 0.18089099, 0.89575537],\n [0.39179226, 0.29439419, 0.87168279],\n [0.37379609, 0.39649585, 0.83849122],\n [0.48278794, -0.38169046, 0.78818031],\n [0.49848546, -0.28279175, 0.8194761],\n [0.50918069, -0.1740934, 0.84286803],\n [0.51360856, -0.05870098, 0.85601427],\n [0.51097962, 0.05899765, 0.8575658],\n [0.50151639, 0.17420569, 0.84742769],\n [0.48600297, 0.28260173, 0.82700506],\n [0.46600106, 0.38110087, 0.79850181],\n [0.56150442, -0.35990283, 0.74510586],\n [0.57807114, -0.26498677, 0.77176147],\n [0.58933134, -0.1617086, 0.7915421],\n [0.59407609, -0.05289787, 0.80266769],\n [0.59157958, 0.057798, 0.80417224],\n [0.58198189, 0.16649482, 0.79597523],\n [0.56620006, 0.26940003, 0.77900008],\n [0.54551481, 0.36380988, 0.7550205]], dtype=float)\n return L", "def getActive(self):\n x=[];y=[]\n for i in zip(self.x,self.y,self.active):\n if i[0]!=None and i[1]!=None and i[2] == 1:\n x.append(i[0])\n y.append(i[1])\n return x,y", "def locations_adjacent_to(loc):\n return [(loc[0] + direction[0], loc[1] + direction[1]) for direction in [(0,-1),(0,1),(-1,0),(1,0)]]", "def all_distances(self):\n points = self.color_lookup_table_points\n\n red = np.repeat(np.expand_dims(points[0], axis=0), points[0].size, axis=0)\n green = np.repeat(np.expand_dims(points[1], axis=0), points[1].size, axis=0)\n blue = np.repeat(np.expand_dims(points[2], axis=0), points[2].size, axis=0)\n\n self.distances = np.sqrt(\n np.square(red - red.transpose())\n + np.square(green - green.transpose())\n + np.square(blue - blue.transpose()))", "def _calcFollow(self, FOLLOW):\n for A in self.N:\n for prod in self.P[A]:\n text = prod.split(sep=' ')\n for i in range(len(text) - 1):\n B = text[i].strip('[]')\n succ = text[i + 1]\n\n if B in self.N:\n FOLLOW[B] |= self.first(succ) - {'eps'}\n\n if 'eps' in self.first(succ) and B in self.N:\n FOLLOW[B] |= FOLLOW[A]\n\n if text[-1].strip('[]') in self.N:\n FOLLOW[text[-1].strip('[]')] |= FOLLOW[A]", "def iter_links(self):\n for site in self.iter_sites():\n for u in range(self.dim):\n yield tuple(list(site) + [u])" ]
[ "0.7462041", "0.5736362", "0.5348133", "0.5260617", "0.5245823", "0.5127555", "0.5098766", "0.50402445", "0.49898282", "0.49610978", "0.496048", "0.49522752", "0.4950983", "0.49342787", "0.48836175", "0.48753074", "0.48695916", "0.48694083", "0.4847132", "0.48457292", "0.482966", "0.4814302", "0.48096976", "0.4796198", "0.4793605", "0.47925663", "0.47917324", "0.47911704", "0.47811052", "0.47706512" ]
0.71486044
1
Temporary fix until master of pyzmq is released
def fix_zmq_exit(): import zmq ctx = zmq.Context.instance() ctx.term()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def zmq_version():\n return \"%i.%i.%i\" % zmq_version_info()", "def zmq_version():\n return \"%i.%i.%i\" % zmq_version_info()", "def __init__(self, ip='127.0.0.1', port='50020'):\n self.ip = ip \n self.port = port\n self.ctx = zmq.Context()\n self.socket = zmq.Socket(self.ctx, zmq.REQ) # this is pub socket", "def pyzmq_version():\n if __revision__:\n return '@'.join([__version__,__revision__[:6]])\n else:\n return __version__", "def pyzmq_version():\n if __revision__:\n return '@'.join([__version__,__revision__[:6]])\n else:\n return __version__", "def pyzmq_version_info():\n return version_info", "def __init__(self, port=1071):\n\n context = zmq.Context()\n\n self.socket = context.socket(zmq.REP)\n self.socket.bind('tcp://*:' + str(port))\n\n self.socket.recv()", "def main(connection_file):\n\n ctx = zmq.Context.instance()\n\n with open(connection_file) as f:\n cfg = json.loads(f.read())\n\n reg_url = cfg['interface']\n iopub_port = cfg['iopub']\n iopub_url = f\"{reg_url}:{iopub_port}\"\n\n session = Session(key=cfg['key'].encode('ascii'))\n sub = ctx.socket(zmq.SUB)\n\n # This will subscribe to all messages:\n sub.SUBSCRIBE = b''\n # replace with b'' with b'engine.1.stdout' to subscribe only to engine 1's stdout\n # 0MQ subscriptions are simple 'foo*' matches, so 'engine.1.' subscribes\n # to everything from engine 1, but there is no way to subscribe to\n # just stdout from everyone.\n # multiple calls to subscribe will add subscriptions, e.g. to subscribe to\n # engine 1's stderr and engine 2's stdout:\n # sub.SUBSCRIBE = b'engine.1.stderr'\n # sub.SUBSCRIBE = b'engine.2.stdout'\n sub.connect(iopub_url)\n while True:\n try:\n idents, msg = session.recv(sub, mode=0)\n except KeyboardInterrupt:\n return\n # ident always length 1 here\n topic = idents[0].decode('utf8', 'replace')\n if msg['msg_type'] == 'stream':\n # stdout/stderr\n # stream names are in msg['content']['name'], if you want to handle\n # them differently\n print(\"{}: {}\".format(topic, msg['content']['text']))\n elif msg['msg_type'] == 'error':\n # Python traceback\n c = msg['content']\n print(topic + ':')\n for line in c['traceback']:\n # indent lines\n print(' ' + line)\n elif msg['msg_type'] == 'error':\n # Python traceback\n c = msg['content']\n print(topic + ':')\n for line in c['traceback']:\n # indent lines\n print(' ' + line)", "def receive_zmq_send(self, zmq_host, zmq_rcv_port):\n\n interrupted = False\n while not interrupted:\n print ('waiting')\n msg = self.socket.recv_json()\n key = msg.get(\"key\")\n if key == \"end\":\n print ('end of data, closing connection')\n interrupted = True\n self.destroy()\n elif key == \"dim\":\n print('initializing dims')\n self.dims = (msg[\"dim_x\"], msg[\"dim_y\"])\n if self.num_sinograms > 0:\n if (self.beg_sinogram < 0) or (self.beg_sinogram + self.num_sinograms > self.dims[0]):\n raise Exception(\"Exceeds the sinogram boundary: {} vs. {}\".format(\n self.beg_sinogram + self.num_sinograms, self.dims[0]))\n self.beg_index = self.beg_sinogram * self.dims[1]\n self.end_index = self.beg_sinogram * self.dims[1] + self.num_sinograms * self.dims[1]\n elif key == \"image\":\n print('got msg')\n msg[\"receiving_timestamp\"] = time.time()\n dtype = msg[\"dtype\"]\n uniqueId = msg['image_number']\n theta = msg['theta']\n ver_result = msg['ver']\n\n\n img = np.frombuffer(self.socket.recv(), dtype=dtype)\n\n if self.num_sinograms != 0:\n img = img[self.beg_index: self.end_index]\n img = img.reshape((self.num_sinograms, self.dims[1]))\n else:\n img = img.reshape(self.dims)\n\n self.builder.Reset()\n serializer = TraceSerializer.ImageSerializer(self.builder)\n serialized_data = serializer.serialize(image=img, uniqueId=uniqueId,\n rotation=theta, seq=self.seq)\n self.publisher_socket.send(serialized_data)\n self.seq += 1\n\n else:\n pass\n\n print(\"Connection ended\")", "def main(_):\n context = zmq.Context()\n socket = context.socket(zmq.REQ)\n socket.connect(CORENLP_ADDRESS)\n socket.send(\"stop\")\n message = socket.recv()\n print(\"Received reply [%s]\" % message)", "def controls():\n\n context = zmq.Context()\n\n print(\"Transmitting commands to process.\")\n socket = context.socket(zmq.REQ)\n rc = socket.connect(\"ipc:///tmp/mail_queue_ipc\")\n #print(rc)\n\n\n for request in range(2):\n print(\"Sending request %s\" % request)\n socket.send(b\"insert\")\n\n message = socket.recv()\n print(\"Recieved reply %s [ %s ]\" % (request, message))\n time.sleep(1)", "def start(self):\n zmq_uri = (\n \"{protocol}://{address}:{port}\".format(\n protocol=self.protocol, address=self.address, port=self.port\n )\n if self.port\n else \"{protocol}://{address}\".format( # noqa\n protocol=self.protocol, address=self.address\n )\n )\n log.debug(\"ZMQ URI: %s\", zmq_uri)\n self.ctx = zmq.Context()\n if hasattr(zmq, self.type):\n skt_type = getattr(zmq, self.type)\n else:\n skt_type = zmq.PULL\n self.sub = self.ctx.socket(skt_type)\n self.sub.connect(zmq_uri)\n if self.hwm is not None:\n self.sub.setsockopt(zmq.RCVHWM, self.hwm)\n if self.recvtimeout is not None:\n log.debug(\"Setting RCVTIMEO to %d\", self.recvtimeout)\n self.sub.setsockopt(zmq.RCVTIMEO, self.recvtimeout)\n if self.keepalive is not None:\n log.debug(\"Setting TCP_KEEPALIVE to %d\", self.keepalive)\n self.sub.setsockopt(zmq.TCP_KEEPALIVE, self.keepalive)\n if self.keepalive_idle is not None:\n log.debug(\"Setting TCP_KEEPALIVE_IDLE to %d\", self.keepalive_idle)\n self.sub.setsockopt(zmq.TCP_KEEPALIVE_IDLE, self.keepalive_idle)\n if self.keepalive_interval is not None:\n log.debug(\"Setting TCP_KEEPALIVE_INTVL to %d\", self.keepalive_interval)\n self.sub.setsockopt(zmq.TCP_KEEPALIVE_INTVL, self.keepalive_interval)", "def zmq_qry_pub(context):\n app.logger.info(\"zmq_qry_pub started\")\n socket = context.socket(zmq.PUB)\n socket.connect('tcp://127.0.0.1:7000')\n\n timestamps = ['0810', '0811', '0812']\n idx = EquityIndex('CAC')\n\n # for ts in cycle(timestamps):\n for ts in timestamps:\n price_data = idx.components_last_px(ts)\n\n for topic, msg_data in price_data.iteritems():\n if msg_data:\n # push the code/ticker into the dict\n msg_data['ticker'] = topic\n # reformat with a colon\n msg_data['ts'] = ts[:2] + ':' + ts[2:]\n # and jsonify....\n msg = json.dumps(msg_data)\n socket.send(msg)\n\n gevent.sleep(WAIT)\n\n app.logger.info(\"zmq_qry_pub closed\")", "def receiveData(self):\n self.context = zmq.Context()\n self.socket = self.context.socket(zmq.PAIR)\n #self.socket.connect(\"tcp://localhost:5556\")\n self.socket.connect(\"ipc:///tmp/mysocket\")\n print(\"Communication via IPC - Mac and Linux Only\")\n #Envia uma mensagem pedindo para comecar\n startstr = \"START\"\n self.socket.send(startstr.encode('utf-8'))\n time.sleep(1)\n #Recebe os dados\n while True:\n contents = self.socket.recv()\n self.commsqueue.put(contents)", "def load_zmq(finder, module):\n finder.IncludePackage(\"zmq.backend.cython\")\n if sys.platform == \"win32\":\n # Not sure yet if this is cross platform\n import zmq.libzmq\n srcFileName = os.path.basename(zmq.libzmq.__file__)\n finder.IncludeFiles(\n os.path.join(module.path[0], srcFileName), srcFileName)", "def initzmq(self):\n\n if \"topics\" not in self.configData:\n raise Exception(\"Topics not found in %s\" % self.configPath)\n\n for topic in self.configData['topics']:\n addr = self.gen_address(topic['protocol'], topic['address'],\n topic['port'])\n socket = self.build_socket(topic['paradigm'], topic['topic'], addr)\n self.topics[topic['name']] = socket", "def _program_zynq_ps(self):\r\n\r\n pass", "def test_ipc_queues():\n IPCComm.ipc_queues()", "def dispatcher( port, cmd, files, allworkers, start ):\n # Only the host running as dispatcher should be calling this.\n\n host = ipaddrs( socket.gethostname() )\n\n # Initialize a 0mq context\n\n context = zmq.Context()\n\n # Set up a socket to receive task requests and send replies over.\n # The linger option is set to help make sure all comunication is\n # delivered when the thread ends. The time unit is milliseconds. A\n # rigorous receive request - send reply pattern must be followed as\n # the zmq.REP socket keeps track of who sent the request and thus\n # were the reply should go. Trying to do two receives or two sends\n # in a row will cause a fatal error or hang the program. Here we\n # set up the REP side of the socket pattern.\n\n dispatcher_socket = context.socket( zmq.REP )\n dispatcher_socket.setsockopt( zmq.LINGER, 5000 )\n dispatcher_socket.bind( \"tcp://%s:%s\" % ( host, port ) )\n\n maxtime = 0\n tasknum = 0\n workers = {}\n already_notified = 0\n\n sys.stderr.write ( \"Dispatcher:Start:%d\\n\" % ( start ) )\n sys.stderr.flush()\n\n # Adjust starting task for 0 offset:\n\n start = start - 1\n tasknum = start\n lasttask = 0\n\n for f in files[start:]:\n\n request = dispatcher_socket.recv_json()\n worker = request['worker']\n workers[worker] = 1\n\n # Interpret a negative maxtime value as the time up signal.\n\n if request['maxtime'] >= 0 :\n\n if request['maxtime'] > maxtime :\n\n maxtime = request['maxtime']\n sys.stderr.write( \"Dispatcher:Maxtime:%s:%.2f:%.2f\\n\"\n % ( worker, maxtime, time.time() ) )\n sys.stderr.flush()\n\n tasknum = tasknum + 1\n task_message = { 'cmd' : cmd, 'file' : f.strip(),\n 'maxtime' : maxtime, 'tasknum' : tasknum }\n\n else:\n\n maxtime = -1\n sys.stderr.write( \"Dispatcher:Timeup:%s:%.2f\\n\"\n % ( worker, time.time() ) )\n sys.stderr.flush()\n task_message = { 'cmd' : \"FINI\", 'file' : \"None\",\n 'maxtime' : -1, 'tasknum' : tasknum }\n already_notified += 1\n lasttask = request['lasttask']\n\n dispatcher_socket.send_json( task_message )\n if maxtime < 0 :\n break\n\n # Now make sure all workers have received the shutdown message.\n\n shutdown = allworkers - already_notified\n\n if lasttask == 0 :\n # All tasks handed out before any completions received.\n # Have to assume all will complete.\n lasttask = tasknum\n\n if shutdown > 0 :\n task_message = { 'cmd' : \"FINI\", 'file' : \"None\",\n 'maxtime' : -1, 'tasknum' : tasknum }\n sys.stderr.write( \"Dispatcher:Shutdown:%d\\n\" % ( shutdown ) )\n sys.stderr.flush()\n\n # There is always a chance multiple assignments went out before\n # a timeout was received. All should sense time out as well,\n # so check for that when handling their final requests.\n\n for w in range( shutdown ):\n\n request = dispatcher_socket.recv_json()\n\n if request['maxtime'] < 0 :\n if request['lasttask'] < lasttask :\n lasttask = request['lasttask']\n\n dispatcher_socket.send_json( task_message )\n\n sys.stderr.write( \"Dispatcher:Last:%d\\n\" % ( lasttask ) )\n sys.stderr.flush()", "def __get_zmq_pub(self):\n print(\"Publishing to tcp://127.0.0.1:%d channel: tweets\" % self.port)\n context = zmq.Context()\n socket = context.socket(zmq.PUB)\n socket.bind(\"tcp://127.0.0.1:%d\" % self.port)\n return socket", "def zpipe(ctx):\n a = ctx.socket(zmq.PAIR)\n b = ctx.socket(zmq.PAIR)\n a.linger = b.linger = 0\n a.hwm = b.hwm = 1\n iface = f\"inproc://{binascii.hexlify(os.urandom(8))}\"\n a.bind(iface)\n b.connect(iface)\n return a, b", "def __init__(self, worker_id=0, base_port=5005):", "def connect_to_worker():\n socket = context.socket(zmq.REQ)\n socket.connect(\"tcp://localhost:5555\")\n return socket", "def test_rmq_es_connector_connections():\n rmq_es = RmqEs()\n rmq_es.connections(False)", "def __init__(self, mq_choice=\"zmq\"):\n self.mq = mq_choice\n func = getattr(self, \"_init_{}\".format(self.mq))\n func()", "def main():\n # Prepare context and sockets\n context = zmq.Context.instance()\n frontend = context.socket(zmq.ROUTER)\n frontend.identity = 'BROKER'.encode('utf-8')\n frontend.bind(names.BROKER_IN)\n\n # Initialize main loop state\n count = NBR_DEVS\n devs = set()\n poller = zmq.Poller() \n \n # setup Tk window\n top = tk.Tk()\n top.geometry(\"1000x250\")\n devs_frame = tk.Frame(top)\n lbl = tk.Label(devs_frame, text=\"Connected Devices\")\n listbox = tk.Listbox(devs_frame)\n lbl.pack()\n listbox.pack()\n devs_frame.pack(side=tk.LEFT)\n log_frame = tk.Frame(top)\n msg_log = tk.Text(log_frame)\n msg_log.pack()\n log_frame.pack(side=tk.LEFT)\n gui_running = True\n\n poller.register(frontend, zmq.POLLIN)\n\n mail_table = {}\n\n def update_gui():\n listbox.delete(0, tk.END)\n i = 1\n for dev in devs:\n listbox.insert(i, str(dev))\n i = i + 1\n top.update_idletasks()\n top.update()\n \n def on_closing():\n gui_running = False\n\n top.protocol(\"WM_DELETE_WINDOW\", on_closing)\n\n def print_connections():\n for id, (from_addr, to_addr, timestamp, msg) in mail_table.items():\n if time.time() - timestamp > 5:\n app_log.debug('Message from {} to {} is older than 5 seconds'.format(from_addr.decode('utf-8'), to_addr.decode('utf-8')))\n if len(devs) > 0:\n app_log.info('connected devices: {}'.format(devs))\n else:\n app_log.info('no connected devices.')\n\n tasks = [print_connections, update_gui]\n times = [1, 0.2]\n timers = times.copy()\n\n while gui_running:\n start_time = time.time()\n\n msg_log.delete(1.0, tk.END)\n msg_log.insert(tk.END, print_mail_table(mail_table))\n\n sockets = dict(poller.poll(20))\n\n if frontend in sockets:\n # msg will be [socket identity, b'', b'HI' or b'BYE' or msg_id]\n msg = frontend.recv_multipart()\n app_log.info('received: {}'.format(msg))\n from_addr = msg[0]\n cmd = msg[2]\n \"\"\"\n Handle messages from workers to broker\n \"\"\"\n if cmd == b\"HI\":\n if from_addr in devs:\n app_log.warning(\"{} tried to join, but it already joined\".format(from_addr))\n msg = [from_addr, b\"\", b\"ERR\", b\"Device already connected\"]\n else:\n devs.add(from_addr)\n msg = [from_addr, b\"\", b\"OK\"]\n frontend.send_multipart(msg)\n app_log.debug('sending {}'.format(msg))\n elif cmd == b\"BYE\":\n try:\n devs.remove(from_addr)\n for x in mail_table:\n if x[0] == from_addr:\n del x\n except KeyError:\n app_log.warning('received BYE from {} but {} is not listed in devs'.format(from_addr, from_addr))\n else:\n \"\"\"\n Message should begin with msg_id, possibly dest, then (GET, SET, RET, MET), then extra info\n out_msg is the message sent to to_addr\n reply is the message sent to from_addr\n \"\"\"\n msg_id = msg[2]\n msg = msg[3:] # strip everything up to and including msg_id\n out_msg = None # goes to the to_addr, which is extracted from message or mail_table\n reply = None # goes to the from_addr\n if msg_id not in mail_table: # this could be a new request\n to_addr = msg[0]\n cmd = msg[1]\n msg = msg[1:] # strip the to_addr\n if to_addr in devs:\n if cmd == b'GET':\n out_msg = [to_addr, b'', msg_id] + msg\n reply = [from_addr, b'', msg_id, b'ACK']\n mail_table[msg_id] = (from_addr, to_addr, time.time(), msg)\n app_log.debug('Processed GET')\n elif cmd == b'SET':\n out_msg = [to_addr, b'', msg_id] + msg\n reply = [from_addr, b'', msg_id, b'ACK']\n mail_table[msg_id] = (from_addr, to_addr, time.time(), msg)\n app_log.debug('Processed SET')\n else:\n reply = [from_addr, b'', msg_id, b'ERR', b'Command not understood']\n app_log.warning('command {} not yet supported'.format(cmd))\n else:\n reply = [from_addr, b'', msg_id, b'ERR', b'Device not connected']\n app_log.debug('requested device {} does not exist'.format(to_addr))\n app_log.debug(print_mail_table(mail_table))\n else: # this could be a reply to a request\n to_addr = mail_table[msg_id][0] # lookup message requestor\n cmd = msg[0]\n if to_addr in devs:\n if from_addr != mail_table[msg_id][1]:\n app_log.critical('{} sent a message ID that does not agree with mail table.'.format(from_addr))\n app_log.critical(msg)\n app_log.critical(print_mail_table(mail_table))\n elif cmd == b'RET':\n out_msg = [to_addr, b'', msg_id] + msg\n del mail_table[msg_id]\n app_log.debug('Processed RET')\n elif cmd == b'MET':\n out_msg = [to_addr, b'', msg_id] + msg\n del mail_table[msg_id]\n app_log.debug('Processed MET')\n elif cmd == b'ERR':\n del mail_table[msg_id]\n out_msg = [to_addr, b'', msg_id] + msg\n else:\n del mail_table[msg_id]\n out_msg = [to_addr, b'', msg_id, b'ERR', b'Device replied poorly']\n app_log.warning('{} sent unrecognized response: {}'.format(from_addr, msg))\n else:\n app_log.warning('original requestor {} no longer connected'.format(to_addr))\n if out_msg is not None:\n try:\n frontend.send_multipart(out_msg)\n app_log.debug('sending {}'.format(out_msg))\n except zmq.ZMQBaseError as err:\n app_log.debug('failed to send {} with error: {}'.format(msg, err))\n if reply is not None:\n try:\n frontend.send_multipart(reply)\n app_log.debug('sending {}'.format(reply))\n except zmq.ZMQBaseError as err:\n app_log.debug('failed to send {} with error: {}'.format(msg, err))\n\n end_time = time.time()\n dt = end_time - start_time\n timers = [x - dt for x in timers]\n for i, x in enumerate(timers):\n if x < 0:\n tasks[i]()\n timers[i] = times[i]\n\n # Clean up\n frontend.close()\n context.term()\n top.destroy()", "def setup(self):\n self.context = zmq.Context()\n self.sub_socket = self.context.socket(zmq.SUB)\n if self.filter:\n self.sub_socket.setsockopt(zmq.SUBSCRIBE, self.filter)\n self.sub_socket.connect('tcp://'+self.host+':'+str(self.com_port))\n return self", "def _recv_thread_func(self):\r\n raise NotImplementedError()", "def onSlave(self):", "def device(front, end):\n try:\n context = SerializingContext(1)\n # Socket facing clients\n frontend = context.socket(zmq.XREP)\n frontend.bind(\"tcp://*:\" + str(front))\n # Socket facing services\n backend = context.socket(zmq.XREQ)\n backend.bind(\"tcp://*:\" + str(end))\n\n zmq.device(zmq.QUEUE, frontend, backend)\n except Exception as e:\n print(e)\n print(\"bringing down zmq device\")\n finally:\n pass\n frontend.close()\n backend.close()\n context.term()" ]
[ "0.6027707", "0.6027707", "0.58552617", "0.58471286", "0.58471286", "0.57001704", "0.56426144", "0.5506162", "0.54845864", "0.540713", "0.53992295", "0.5322331", "0.53118736", "0.5300016", "0.5295095", "0.5244118", "0.5238676", "0.52331984", "0.5188429", "0.5173681", "0.51529914", "0.505896", "0.50536233", "0.50410354", "0.5016093", "0.5006984", "0.5002173", "0.49711177", "0.4964692", "0.4958415" ]
0.6925526
0
Applies filters and stores job postings in a database job_list a list of Job_Posting objects avoids saving duplicate job postings into the database by hashing job_description creates the database if it does not exist yet
def store_data(job_list): if not job_list: raise ValueError('Job list is empty. To proceed, it must contain at least one item.') if not isfile('/data/visited_jobs.db'): print('DB not found') ds.create_db() accepted, not_accepted = 0, 0 for job in job_list: job.hash = h.get_hash(job.description) if h.is_seen(job.hash): not_accepted += 1 continue elif f.accepted_title(job.title) and f.accepted_level(job.level) and f.accepted_description(job.description): job.accepted = True ds.insert_job(job) accepted += 1 else: job.accepted = False ds.insert_job(job) not_accepted += 1 print(f'Jobs accepted: {accepted}\nJobs not accepted: {not_accepted}')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def updateJobDB(request,Q={}):\n\tuser = request.user\n\t# Get metadata\n\tresponse = agaveRequestMetadataList(user,Q=Q)\n\t# Add job if not in db\n\tfor metadata in response['result']:\n\t\tvalue = metadata['value']\n\t\tif 'jobName' in value and 'parameters' in value:\n\t\t\tlogger.info('SetName: ' + value['jobName'] + ', Parameters: [' + ', '.join(value['parameters']) + '], Length: ' + str(len(value['parameters'])))\n\t\t\tif len(value['parameters']) == 2: \n\t\t\t\tjobName = value['jobName']\n\t\t\t\tpara1name = value['parameters'][0]\n\t\t\t\tpara2name = value['parameters'][1]\n\t\t\t\tjobsInDB = Job.objects.filter(name=jobName)\n\n\t\t\t\t# Update status if not 'FINISHED'\n\t\t\t\tfor job in jobsInDB:\n\t\t\t\t\tif job.status not in ['FINISHED']:\n\t\t\t\t\t\tjobResponse = agaveRequestJobSearch(user,jobId=job.jobid)\n\t\t\t\t\t\tstatus = jobResponse['result'][0]['status']\n\t\t\t\t\t\tcolor = 'red'\n\t\t\t\t\t\tif status == 'FINISHED':\n\t\t\t\t\t\t\tcolor = 'blue'\n\t\t\t\t\t\telif status not in ['FINISHED','FAILED','STOPPED']: # Running\n\t\t\t\t\t\t\tcolor = 'orange'\n\t\t\t\t\t\t# else failed or stopped (color = 'red')\n\t\t\t\t\t\tjob.status = status\n\t\t\t\t\t\tjob.color = color\n\t\t\t\t\t\tjob.save()\n\n\t\t\t\t# Create new job entries\n\t\t\t\tjobsInDB = [job.jobid for job in Job.objects.filter(name=jobName)]\n\t\t\t\tjobsNotInDB = (set(jobsInDB) ^ set(metadata['associationIds'])) & set(metadata['associationIds'])\n\t\t\t\tfor jobId in jobsNotInDB:\n\t\t\t\t\tjobResponse = agaveRequestJobSearch(user,jobId=jobId)\n\t\t\t\t\tstatus = jobResponse['result'][0]['status']\n\t\t\t\t\tcolor = 'red'\n\t\t\t\t\tif status == 'FINISHED':\n\t\t\t\t\t\tcolor = 'blue'\n\t\t\t\t\telif status == 'RUNNING':\n\t\t\t\t\t\tcolor = 'orange'\n\t\t\t\t\tpara1value = value['paraValues'][jobId][para1name]\n\t\t\t\t\tpara2value = value['paraValues'][jobId][para2name]\n\t\t\t\t\tJob(name=jobName,\n\t\t\t\t\t\tjobid=jobId,\n\t\t\t\t\t\tuser=user,\n\t\t\t\t\t\tvalue=8,\n\t\t\t\t\t\tpara1name=para1name,\n\t\t\t\t\t\tpara1value=para1value,\n\t\t\t\t\t\tpara2name=para2name,\n\t\t\t\t\t\tpara2value=para2value,\n\t\t\t\t\t\tstatus=status,\n\t\t\t\t\t\tcolor=color).save()", "def execute_queries():\n fetch_job_listings(engine)\n update_job_listing(engine)", "def submit_jobs(environment, facility_list):\n client = boto3.client('batch')\n queue_arn = fetch_batch_queue_arn(client, environment)\n job_def_arn = fetch_latest_active_batch_job_definition_arn(client,\n environment)\n job_time = (datetime.utcnow().isoformat()\n .replace(':', '-').replace('.', '-').replace('T', '-'))\n\n def submit_job(action, is_array=False, depends_on=None):\n if depends_on is None:\n depends_on = []\n array_properties = {}\n if is_array:\n array_properties = {\n 'size': facility_list.facilitylistitem_set.count()\n }\n job_name = 'list-{0}-{1}-{2}'.format(\n facility_list.id, action, job_time)\n job = client.submit_job(\n jobName=job_name,\n jobQueue=queue_arn,\n jobDefinition=job_def_arn,\n dependsOn=depends_on,\n arrayProperties=array_properties,\n parameters={\n 'listid': str(facility_list.id),\n 'action': action,\n }\n )\n if 'jobId' in job:\n return job['jobId']\n else:\n raise RuntimeError(\n 'Failed to submit job {0}. Response {1}'.format(job_name, job))\n\n # PARSE\n started = str(datetime.utcnow())\n # The parse task is just quick string manipulation. We submit it as a\n # normal job rather than as an array job to avoid extra overhead that just\n # slows things down.\n parse_job_id = submit_job('parse')\n finished = str(datetime.utcnow())\n with transaction.atomic():\n for item in facility_list.facilitylistitem_set.all():\n item.processing_results.append({\n 'action': ProcessingAction.SUBMIT_JOB,\n 'type': 'parse',\n 'job_id': parse_job_id,\n 'error': False,\n 'started_at': started,\n 'finished_at': finished,\n })\n item.save()\n\n # GEOCODE\n started = str(datetime.utcnow())\n geocode_job_id = submit_job('geocode',\n depends_on=[{'jobId': parse_job_id}],\n is_array=True)\n finished = str(datetime.utcnow())\n facility_list.refresh_from_db()\n with transaction.atomic():\n for item in facility_list.facilitylistitem_set.all():\n item.processing_results.append({\n 'action': ProcessingAction.SUBMIT_JOB,\n 'type': 'geocode',\n 'job_id': '{0}:{1}'.format(geocode_job_id, item.row_index),\n 'error': False,\n 'started_at': started,\n 'finished_at': finished,\n })\n item.save()\n\n # MATCH\n started = str(datetime.utcnow())\n match_job_id = submit_job('match',\n depends_on=[{'jobId': geocode_job_id,\n 'type': 'N_TO_N'}],\n is_array=True)\n finished = str(datetime.utcnow())\n facility_list.refresh_from_db()\n with transaction.atomic():\n for item in facility_list.facilitylistitem_set.all():\n item.processing_results.append({\n 'action': ProcessingAction.SUBMIT_JOB,\n 'type': 'match',\n 'job_id': '{0}:{1}'.format(match_job_id, item.row_index),\n 'error': False,\n 'started_at': started,\n 'finished_at': finished,\n })\n item.save()", "def load_jobs():\n JobSkillCount.query.delete()\n Job.query.delete()\n\n titles = db.session.query(Posting.title).all()\n titles = set(titles)\n\n for title in titles:\n title = title.lower()\n job = Job(title=title[0])\n\n db.session.add(job)\n db.session.commit()", "def updateList(self):\n self._recreateJobs()", "def save_posts(self):\n logger.info(\"Savings posts to database\")\n records = self.df.to_dict(\"records\")\n\n for record in records:\n Company.objects.get_or_create(name=record[\"company\"])\n\n Post.objects.get_or_create(\n title=record[\"title\"],\n company_id=record[\"company\"],\n defaults={\n \"date_posted\": record[\"date_posted\"],\n \"description\": record[\"description\"],\n \"location\": record[\"location\"],\n \"is_sponsored\": False,\n \"date_added_db\": record[\"date_added_db\"],\n \"source_id\": record[\"source\"],\n \"link\": record[\"link\"],\n },\n )", "def _parse_job_list(job_list_element):\n def text_or_none(tag):\n elem = job_list_element.find(tag)\n return None if elem is None else elem.text\n return {\n 'job_id': int(job_list_element.find('JB_job_number').text),\n 'state': job_list_element.get('state'),\n 'name': job_list_element.find('JB_name').text,\n 'owner': job_list_element.find('JB_owner').text,\n 'state_code': job_list_element.find('state').text,\n 'start_time': _text_or_none(job_list_element, 'JAT_start_time'),\n 'submission_time': _text_or_none(job_list_element, 'JB_submission_time'),\n 'queue_name': job_list_element.find('queue_name').text\n }", "def crawler():\n job_entries = []\n for job in job_info(URL):\n labels = \"\"\n if job[\"labels\"]:\n for label in job[\"labels\"]:\n labels += label[\"name\"]\n if job[\"labels\"].index(label) != len(job[\"labels\"]) - 1:\n labels += \",\"\n job_entries.append((job[\"number\"], job[\"id\"],\n job[\"title\"], job[\"html_url\"], labels))\n\n conn = sqlite3.connect('jobber/jobber.db')\n c = conn.cursor()\n c.executemany(('INSERT OR IGNORE INTO job_entries '\n 'VALUES (?,?,?,?,?)'), job_entries)\n conn.commit()\n conn.close()", "def create_Jobs_from_dicts_and_paths(self,\n jobs_list,\n ):\n # | - create_Jobs_from_dicts_and_paths\n for job_i in jobs_list:\n\n path_i = job_i[\"path\"]\n job_params_dict = job_i[\"properties\"]\n\n rev_dirs, max_rev = self.__revision_list_and_max__(\n path_i,\n )\n\n for rev_i in rev_dirs:\n path_i = os.path.join(path_i, rev_i)\n path_i = os.path.normpath(path_i)\n\n Job_i = Job(\n path_i=path_i,\n job_params_dict=job_params_dict,\n max_revision=max_rev,\n root_dir=None,\n )\n\n self.Job_list.append(Job_i)\n # __|", "def get_all_jobs(self):\n all_jobs = self.job_set.all().order_by(\"-time_last_updated\", \"project__name\", \"-id\")\n # for job in all_jobs:\n # job.check_exists()\n\n # get the list of jobs listed in the database as running and update them.\n dbrunning = all_jobs.filter(state__in=['in queue', 'started'])\n for runningjob in dbrunning: runningjob.update();\n\n # get the updated list \n all_jobs = self.job_set.all().order_by(\"-time_last_updated\", \"project__name\", \"-id\")\n\n return all_jobs", "def update_all():\n req_data = request.get_json()\n jobs = JobModel.get_one_job(job_id)\n if not jobs:\n return custom_response({'Error': 'Job Not Found'}, 404)\n\n data, error = job_schema.load(req_data, partial=True)\n if error:\n return custom_response(error, 400)\n\n for job in jobs:\n job.update(data)\n job_message = job_schema.dump(job)\n\n return custom_response(job_message, 200)", "def search_jobs(self, bill_id: int = 0, limit: int = 0) -> List[Job]:\n pass", "def saveDB(self):\n job_obj = JobData.objects.all()\n new_id = len(job_obj)+1\n print new_id\n\n newjob = JobData()\n\n for job in self.job_list:\n newjob.job_id = new_id\n newjob.tool_name = job['jobname']\n newjob.tool_id = job['job_id']\n newjob.save()\n\n return", "def populate_jobs():\n\n logging.basicConfig(level=logging.INFO)\n logger = logging.getLogger(__name__)\n\n database = SqliteDatabase('personjob.db')\n\n logger.info('Working with Job class')\n logger.info('Creating Job records: just like Person. We use the foreign key')\n\n JOB_NAME = 0\n START_DATE = 1\n END_DATE = 2\n SALARY = 3\n PERSON_EMPLOYED = 4\n DEPARTMENT = 5\n\n jobs = [\n ('Analyst', '2001-09-22', '2003-01-30',65500, 'Andrew', 'ASYS'),\n ('Senior analyst', '2003-02-01', '2006-10-22', 70000, 'Andrew', 'ASYS'),\n ('Senior business analyst', '2006-10-23', '2016-12-24', 80000, 'Andrew', 'BUSI'),\n ('Admin supervisor', '2012-10-01', '2014-11-10', 45900, 'Peter', 'ADMN'),\n ('Admin manager', '2014-11-14', '2018-01-05', 45900, 'Peter', 'ADMN'),\n ('Sr Project Manager', '2014-11-14', '2018-01-05', 100000, 'Ryan', 'ASYS'),\n ('Manager', '2014-11-14', '2018-01-05', 100000, 'Pamela', 'BUSI'),\n ('Director', '2014-11-14', '2018-01-05', 120000, 'Monica', 'MGMT'),\n ]\n\n try:\n database.connect()\n database.execute_sql('PRAGMA foreign_keys = ON;')\n for job in jobs:\n with database.transaction():\n new_job = Job.create(\n job_name = job[JOB_NAME],\n start_date = job[START_DATE],\n end_date = job[END_DATE],\n duration = dates_diff(job[END_DATE], job[START_DATE]),\n salary = job[SALARY],\n person_employed = job[PERSON_EMPLOYED],\n job_department = job[DEPARTMENT])\n new_job.save()\n\n logger.info('Reading and print all Job rows (note the value of person)...')\n for job in Job:\n logger.info(f'{job.job_name} : {job.start_date} to {job.end_date} for {job.person_employed} in {job.job_department}')\n\n except Exception as e:\n logger.info(f'Error creating = {job[JOB_NAME]}')\n logger.info(e)\n\n finally:\n logger.info('database closes')\n database.close()", "def run_joblist(self):\n\n for message in self.message_list:\n self.run_job(message)", "def get_job_list(self):\n return self.job_list", "def get_job_list(self):\n return self.job_list", "def get_stack_overflow_jobs(cursor: sqlite3.Cursor):\n cursor.execute('''DELETE FROM s_jobs''') # Scrub previous results to start over\n url = f\"https://stackoverflow.com/jobs/feed\"\n feed = feedparser.parse(url)\n\n for jobs in feed.entries:\n date = \"(%d/%02d/%02d)\" % (jobs.published_parsed.tm_year, jobs.published_parsed.tm_mon,\n jobs.published_parsed.tm_mday) # Format date entries to be uniform\n title = jobs.title\n location = title[title.rfind(\"(\")+1:title.rfind(\")\")] # Clips location data nested in title field\n\n cursor.execute(f\"\"\"INSERT INTO s_jobs(id, author, link, location, date, summary, title) VALUES\n (?,?,?,?,?,?,?)\"\"\", (jobs.id, jobs.author, jobs.link, location, date, jobs.summary, jobs.title))", "def save_to_github_db(cursor: sqlite3.Cursor, all_jobs: List[Dict[str, Any]]):\n cursor.execute(f'''DELETE FROM g_jobs''') # Scrub previous results to start over\n insert_statement = f\"\"\"INSERT INTO g_jobs(id, type, url, created_at, company, company_url, location, title, \n description, how_to_apply, company_logo) VALUES(?,?,?,?,?,?,?,?,?,?,?)\"\"\"\n\n # Turn all values from the jobs dict into a tuple\n for job_info in all_jobs:\n data_to_enter = tuple(job_info.values())\n cursor.execute(insert_statement, data_to_enter)", "def scrape_job_page(driver, job_title, job_location):\n \n current_date = str(datetime.datetime.now(pytz.timezone('US/Mountain')))\n json_dct = {'search_title': job_title, \\\n 'search_location': job_location, \\\n 'search_date': current_date, 'job_site': 'glassdoor'}\n\n jobs = driver.find_elements_by_class_name('jobListing')\n\n mongo_update_lst = [query_for_data(driver, json_dct, job, idx) for \n idx, job in enumerate(jobs[:-1])]\n\n store_in_mongo(mongo_update_lst, 'job_postings', 'glassdoor')", "def create_job_detail(company_name, job_title, application_deadline, job_listing_url, state, city, application_listed, salary):\n\n job_detail = JobDetail(company_name = company_name, job_title = job_title, application_deadline = application_deadline, job_listing_url = job_listing_url, state = state , city = city, application_listed = application_listed, salary = salary)\n db.session.add(job_detail)\n db.session.commit()\n\n return job_detail", "def __Job_list__(self):\n # | - __Job_list__\n\n # | - Adding Jobs From Individual Directory List\n if self.indiv_dir_lst is not None:\n for job_i_dir in self.indiv_dir_lst:\n\n rev_dirs, max_rev = self.__revision_list_and_max__(job_i_dir)\n\n print(job_i_dir)\n if rev_dirs:\n\n print(\"rev_dirs:\", rev_dirs)\n\n if self.parse_all_revisions is False:\n\n last_rev_int = np.sort(\n [int(i.split(\"_\")[-1]) for i in rev_dirs])[-1]\n rev_dirs = [\"_\" + str(last_rev_int), ]\n # rev_dirs = [rev_dirs[-1]]\n\n print(\"rev_dirs:\", rev_dirs)\n print(\"IOPSDFJOKIDSIJFIJDSF\")\n\n for rev_i in rev_dirs:\n path_i = os.path.join(job_i_dir, rev_i)\n path_i = os.path.normpath(path_i)\n\n Job_i = Job(\n path_i=path_i,\n job_params_dict=None,\n max_revision=max_rev,\n root_dir=None,\n )\n\n self.Job_list.append(Job_i)\n else:\n print(\"Didn't find any job dirs here:\")\n print(job_i_dir)\n pass\n # __|\n\n # | - Adding Jobs From Enumerated Job Properties Tree\n if self.job_var_lst is not None:\n for job_i in self.job_var_lst:\n job_var_dict = self.__job_i_vars_to_dict__(job_i)\n\n if self.folders_exist:\n path_i = self.var_lst_to_path(\n job_i,\n job_rev=\"Auto\",\n relative_path=False,\n )\n\n # | - __old__\n # else:\n # print(\"else *s8fs*sdf\")\n # path_i = os.path.join(\n #\n # self.var_lst_to_path(\n # job_i,\n # job_rev=\"Auto\",\n # relative_path=False,\n # ),\n #\n # # self.var_lst_to_path(\n # # job_i,\n # # ),\n #\n # \"_1\",\n # )\n # __|\n\n rev_dirs, max_rev = self.__revision_list_and_max__(\n # path_i\n self.var_lst_to_path(\n job_i,\n job_rev=\"None\",\n relative_path=False,\n )\n )\n\n Job_i = Job(\n path_i=path_i,\n job_params_dict=job_var_dict,\n max_revision=max_rev,\n root_dir=self.root_dir,\n )\n\n self.Job_list.append(Job_i)\n # __|\n\n # | - TEMP | I don't remember why this is here\n indiv_job = self.indiv_job_lst is not None\n level_labels = self.tree_level_labels is not None\n if indiv_job and level_labels:\n print(\"LSKDJFKLDS_-09sdfsdfs9dfas\")\n for job_params_i in self.indiv_job_lst:\n\n job_var_lst_i = self.__job_i_param_dict_to_job_var_lst__(\n job_params_i,\n )\n\n path_i = os.path.join(\n self.new_var_lst_to_path(job_var_lst_i),\n \"_1\",\n )\n\n Job_i = Job(\n path_i=path_i,\n job_params_dict=job_params_i,\n max_revision=None,\n root_dir=self.root_dir,\n )\n\n self.Job_list.append(Job_i)\n # __|\n\n if self.indiv_job_dict_lst is not None:\n self.create_Jobs_from_dicts_and_paths(\n self.indiv_job_dict_lst,\n )\n # __|", "def search_jobs(self, bill_id: int = 0, limit: int = 0) -> List[Job]:\n res = []\n query = QSqlQuery()\n q = \"select id, hours, price, job from jobs\"\n if bill_id > 0:\n q += \" where b_id=?\"\n q += \" order by id desc\"\n if limit > 0:\n q += \" limit ?\"\n query.prepare(q)\n if bill_id > 0:\n query.addBindValue(bill_id)\n if limit > 0:\n query.addBindValue(limit)\n query.exec_()\n while query.next():\n res.append(_extract_job(query))\n return res", "def database_script_list(bs_id, command, arguments_list, threads, expe_proc_time,\n attempt=1):\n #works out the table from the command\n if command == 'make_beam':\n table = 'Beamform'\n if command == 'prepsubband':\n table = 'Prepdata'\n elif command == 'realfft':\n table = 'FFT'\n elif command == 'accelsearch':\n table = 'Accel'\n elif command == 'prepfold':\n table = 'Fold'\n con = lite.connect(DB_FILE, timeout = TIMEOUT)\n with con:\n cur = con.cursor()\n for ai, arguments in enumerate(arguments_list):\n cur.execute(\"INSERT OR IGNORE INTO {0} (Rownum, AttemptNum, BSID, Command, Arguments, CPUs, ExpProc) VALUES(?, ?, ?, ?, ?, ?, ?)\".format(table), (ai, attempt, bs_id, command, arguments, threads, expe_proc_time))\n #update expected jobs\n if attempt == 1:\n cur.execute(\"UPDATE PulsarSearch SET {0}JobExp=? WHERE Rownum=?\".format(table), (len(arguments_list),bs_id))\n else:\n cur.execute(\"SELECT {0}JobExp FROM PulsarSearch WHERE Rownum=?\".format(table), (bs_id,))\n table_job_exp = cur.fetchone()[0]\n cur.execute(\"UPDATE PulsarSearch SET {0}JobExp=? WHERE Rownum=?\".format(table), (len(arguments_list) + table_job_exp, bs_id))\n cur.execute(\"SELECT TotalJobExp FROM PulsarSearch WHERE Rownum=?\", (bs_id,))\n search_job_exp = cur.fetchone()[0]\n if search_job_exp is None:\n search_job_exp = 0\n cur.execute(\"UPDATE PulsarSearch SET TotalJobExp=? WHERE Rownum=?\", (len(arguments_list) + search_job_exp, bs_id))\n\n return", "def get_sports_job_postings():\n # Gather HTML code from TeamworkOnline.com\n url = 'https://www.teamworkonline.com/jobs-in-sports'\n response = requests.get(url)\n html = response.content\n soup = BeautifulSoup(html, features='lxml')\n matches = soup.findAll(name='h3')\n orgs = soup.findAll(name='span', attrs={'class': 'icon-bullet__content icon-bullet__content--recent-job-card'})\n links = soup.findAll(name='a', attrs={'class': 'result-cta button button--wire'})\n \n # Create lists for job title, organization, and link to posting\n joblist = []\n orglist = []\n linklist = []\n for match in matches:\n joblist.append(match.text)\n \n for org in orgs:\n orglist.append(org.text)\n \n for link in links:\n linklist.append('https://www.teamworkonline.com/' + link.get('href'))\n\n # Eliminate certain responses since they're just ads for the site\n joblist = [x for x in joblist if x != 'Know when your application is reviewed.']\n joblist = [x for x in joblist if x != 'Teamwork Online Individual Resume and Profile Review'] \n orglist = [x for x in orglist if 'Sports Jobs in' not in x]\n\n # Create zipped list of all three data points\n all_list = [i + ' | ' + j + ' | ' + k for i, j, k in zip(joblist, orglist, linklist)]\n\n # Write master list to text file to compare each running with it to only print new jobs\n with open ('/Users/User/Desktop/falck_workspace/one_offs/other_files/two_jobs.txt', 'rb') as f:\n old_all_list = pickle.load(f)\n\n just_new = set(all_list) - set(old_all_list)\n too_many = 'Too many job postings...'\n\n # Check if the set of differences between the old set and new set is empty, then print only the differences\n isEmpty = (len(just_new) == 0)\n total_entry = []\n if isEmpty:\n total_entry += ['No new postings.']\n else:\n for entry in just_new:\n entrysplit = entry.split('|')\n post = '\\n- ' + entrysplit[0] + ' at ' + entrysplit[1] + '\\n'\n total_entry.append(post)\n \n # Turn list of all new jobs into one string\n return_string = ' '.join(total_entry)\n \n # Overwrite the old text file with new data so when it's run again, it only compares postings to the last time function was run\n with open('/Users/User/Desktop/falck_workspace/one_offs/other_files/two_jobs.txt', 'wb') as f:\n pickle.dump(all_list, f)\n\n if len(return_string)>1600:\n return too_many\n else:\n return return_string", "def execute(self, jobID = None, couchID = None, bulkList = None, conn = None,\n transaction = False):\n\n if isinstance(bulkList, list):\n binds = bulkList\n else:\n binds = {\"jobid\": jobID, \"couchid\": couchID}\n\n self.dbi.processData(self.sql, binds,\n conn = conn, transaction = transaction)\n return", "def jobs(self, jobs):\n self._jobs = jobs", "def store_listings():\n\n scraped_listings = scrape_listings() # Used to store scraped listings\n valid_listings = [] # Used to store valid listings (after filtering)\n num_valid_listings = 1 # Counts the number of valid listings for output\n\n for listing in scraped_listings:\n # If the Listing ID of the current listing does not exist in the database...\n if session.query(DatabaseListing).filter_by(listing_id=listing.listing_id).first() is None:\n # Safe-guard distance variable (decimal)\n if listing.distance == \"n/a\":\n listing.distance = 0.0\n else:\n listing.distance = listing.distance[:-2]\n\n # Create listing object for database\n listing = DatabaseListing(\n created=listing.created,\n available=listing.available,\n listing_type=listing.listing_type,\n room_type=listing.room_type,\n address=listing.address,\n distance=listing.distance,\n sublet=listing.sublet,\n rooms=int(listing.rooms),\n features=listing.features,\n price=listing.price,\n listing_id=int(listing.listing_id),\n link=listing.link\n )\n\n # Save listing to database\n session.add(listing)\n session.commit()\n \n\n if (program_features.FILTER == True) and (filter_listings(listing) == False):\n print (\"Listing #{} does not match filter\".format(num_valid_listings))\n else:\n # Save valid listing (with ASCII encoding for Email server)\n valid_listings.append(\"\"\"Date Posted: {} | Available: {} | {} | Type: {}\n Address: {}\n Price: {} | Distance: {} | Sublet: {} | Rooms: {}\n Features: {}\n {}\"\"\".format(listing.created, listing.available,\n listing.listing_type, listing.room_type,\n listing.address, listing.price,\n (str(listing.distance) + \" km\"),\n listing.sublet, listing.rooms,\n str(listing.features.encode('utf-8')), listing.link))\n\n # Output success statement \n print(\"Listing #{} loaded\".format(num_valid_listings))\n \n # Increment valid listings counter\n num_valid_listings += 1\n \n # If the Listing ID of the current listing exists in the database...\n else:\n print(\"Listing already loaded\")\n\n return valid_listings # Return valid listings array", "def job_posting_matches(self, job_title_posting, html_posting):\n raise NotImplemented()", "def get_job_list(self):\n job_list = []\n if mysql.job_list() == None:\n return job_list\n return mysql.job_list()" ]
[ "0.6064062", "0.5675799", "0.561844", "0.55301267", "0.5519273", "0.53884274", "0.5382857", "0.53754044", "0.536694", "0.5362908", "0.5361829", "0.53558755", "0.53386945", "0.5303524", "0.52626956", "0.5243744", "0.5243744", "0.5236328", "0.5209559", "0.5199003", "0.5169366", "0.5166747", "0.5155694", "0.51468825", "0.5121095", "0.5118854", "0.51133287", "0.5109777", "0.5086652", "0.5062952" ]
0.6501171
0
Gets two positive integer numbers m and n (m > n). Returns True if they are coprime, otherwise, returns False.
def coprime(m,n): # The function uses the Euclid's algorithm for finding the greatest common divisor. The algorithm is recursive. # If the GCD is 1, when the numbers are coprime. If it is greater than 1, when the numbers aren't coprime. if n == 0 and m > 1: return False elif n == 0 and m == 1: return True return coprime(n, m - n * (m // n))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def coprime(a: int, b: int):\n\n return euclid(a, b) == 1", "def coprime(a, b):\n return gcd(a, b) == 1", "def coprime(self,x,y):\r\n return x == 1 or y == 1 or not bool(self.cofactors(x,y))", "def is_relatively_prime(n, m):\n result = True\n larger = n\n if m > n:\n larger = m\n for i in range(1, larger + 1):\n if n % i == 0 and m % i == 0:\n if i == 1:\n result = True\n else:\n result = False\n return result", "def comprobar_primo(num):\n primo = True\n for i in range(2, num):\n if num%i == 0:\n primo = False\n return primo", "def es_primo(n):\n \n for i in range(2, n):\n if n % i == 0:\n return False\n return True", "def is_prime(n):\n return mr_prime(n)", "def pairwise_coprime(listing: list):\n\n assert isinstance(listing, list)\n\n size = len(listing)\n\n for i in range(0, size - 1):\n for j in range(i + 1, size):\n if not coprime(listing[i], listing[j]):\n return False\n\n return True", "def is_prime(n):\n if n < 2:\n return False\n if n == 2 | n == 3:\n return True\n if n % 2 == 0 | n % 3 == 0:\n return False\n for i in range(2, int(sqrt(n))+1):\n if n % i == 0:\n return False\n return True", "def is_prime(n):\n if n < 2:\n return False\n if n == 2 | n == 3:\n return True\n if n % 2 == 0 | n % 3 == 0:\n return False\n for i in range(2, int(sqrt(n))+1):\n if n % i == 0:\n return False\n return True", "def equivalence(self, n):\n return n % self.prime", "def is_prime(n):\n\n def mr(n, _known_primes=[2, 3], _precision_for_huge_n=16, ):\n\n def _try_composite(a, d, n, s):\n if pow(a, d, n) == 1:\n return False\n for i in range(s):\n if pow(a, 2**i * d, n) == n-1:\n return False\n return True # n is definitely composite\n\n if n in _known_primes:\n return True\n if n in (0, 1):\n return False\n if any((n % p) == 0 for p in _known_primes):\n return False\n d, s = n - 1, 0\n while not d % 2:\n d, s = d >> 1, s + 1\n\n # Returns exact according to http://primes.utm.edu/prove/prove2_3.html\n if n < 1373653:\n return not any(_try_composite(a, d, n, s) for a in (2, 3))\n if n < 25326001:\n return not any(_try_composite(a, d, n, s) for a in (2, 3, 5))\n if n < 118670087467:\n if n == 3215031751:\n return False\n return not any(_try_composite(a, d, n, s) for a in (2, 3, 5, 7))\n if n < 2152302898747:\n return not any(_try_composite(a, d, n, s) for a in (2, 3, 5, 7, 11))\n if n < 3474749660383:\n return not any(_try_composite(a, d, n, s) for a in (2, 3, 5, 7, 11, 13))\n if n < 341550071728321:\n return not any(_try_composite(a, d, n, s) for a in (2, 3, 5, 7, 11, 13, 17))\n # otherwise\n return not any(_try_composite(a, d, n, s)\n for a in _known_primes[:_precision_for_huge_n])\n\n def trial_division(n):\n if n < 2:\n return False\n if n < 4:\n return True\n if n % 2 == 0 or n % 3 == 0:\n return False\n\n limit = int(math.sqrt(n))\n divisor = 5\n\n while divisor <= limit:\n if n % divisor == 0 or n % (divisor + 2) == 0:\n return False\n divisor += 6\n\n return True\n\n if 30000000 < n < 341550071728321:\n return mr(n)\n else:\n return trial_division(n)", "def if_prime(cls, n):\n\n if (n <= 1):\n return False\n if (n <= 3):\n return True\n\n if (n % 2 == 0 or n % 3 == 0):\n return False\n\n i = 5\n while(i * i <= n):\n if (n % i == 0 or n % (i + 2) == 0):\n return False\n i = i + 6\n\n return True", "def __xor__(p1, p2):\n return not isparallel(p1, p2) and (abs(p1 * p2) < 10*_eps )", "def is_prime(n):\n if n < 2:\n return False\n if n == 2 or n == 3:\n return True\n elif n % 2 == 0:\n return False\n else:\n x = 0\n for i in range(3, n, 2):\n if n % i == 0:\n x = 1\n return x == 0", "def check_composite(n):\n # type: (int) -> RE\n if n % 2 == 0:\n return RSAResult.EVEN_MODULUS\n return RSAResult.OK", "def primenumber(x):\n if x >= 2:\n for y in range(2,x):\n if not (x % y):\n return False\n else:\n return False\n return True", "def coPrime(x):\n\n n = x * 2 + 100000 # Upper limit for range of random integers\n y = random.randint(x * 2, n)\n if (fractions.gcd(x, y) != 1):\n return coPrime(x)\n else:\n return y", "def get_prime_digits_for_one(a: int) -> bool:\r\n b = a\r\n c = 0\r\n c1 = 0\r\n while b > 0:\r\n c1 += 1\r\n n = b % 10\r\n if isprime(n):\r\n c += 1\r\n b = b // 10\r\n if c == c1:\r\n return True\r\n else:\r\n return False", "def is_multiple(n, m):\n # Check the given numbers are integer\n if m%n:\n # print('%d is NOT a multiple of %d' % (n, m))\n return False\n # print('%d is a multiple of %d' % (n, m))\n return True", "def relPrime(a, b):\n if gcd(a, b) == 1:\n return True\n else:\n return False", "def prime(n: int) -> bool:\n if len(divisors(n)) > 2 or n < 1:\n return False\n else:\n return True", "def test_prime(n):\n if SIEVE[n]:\n return True\n else:\n return False", "def check_prime(x, y):\n pri = (3,5,7,11,13,17,19,23)\n for i in pri:\n if (x % i == 0) and (y % i == 0):\n return i\n return 0", "def is_circular_prime(n):\r\n\r\n # pdb.set_trace()\r\n s = str(n)\r\n for i in xrange(len(s)):\r\n if not is_prime(n):\r\n return False\r\n s = s[1:] + s[0]\r\n n = int(s)\r\n\r\n return True", "def prog1(a,b):\n num=[]\n for m in range(a,b):\n if (m%2)==0 and m!=a and m!=b :\n num.append(m)\n print(num)", "def is_prime(n):\n for i in range(2,n):\n if n % i == 0:\n return False\n return True", "def checarPs(self,p1,p2):\n return abs(p1-p2) < 0.00001", "def prime():\n number = random.randint(1, 100)\n if len(primfacs(number)) == 1:\n return number, 'yes'\n return number, 'no'", "def isprime(n):\n for i in range(2, n):\n if n % i == 0:\n return False\n return True" ]
[ "0.79076767", "0.7667604", "0.72574323", "0.6289784", "0.59262717", "0.5853223", "0.5806163", "0.5744549", "0.5737769", "0.5737769", "0.5732557", "0.5720091", "0.56966656", "0.56965023", "0.568913", "0.5660022", "0.5632094", "0.5599034", "0.5597036", "0.55894285", "0.5589392", "0.55861366", "0.55858415", "0.5570516", "0.5569134", "0.5560804", "0.5551977", "0.55498266", "0.5547965", "0.5527664" ]
0.7949466
0
Create a shift from a datetime.
def from_datetime(cls, position, datetime): return cls( position = position, date = datetime.date(), name = position.shiftForTime(datetime.time()), )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def timeshift(self, shift='random'):\n\n if shift == 'random':\n one_month = pd.Timedelta('30 days').value\n two_years = pd.Timedelta('730 days').value\n random_timedelta = - pd.Timedelta(random.uniform(one_month, two_years)).round('min')\n self.timeshift(random_timedelta)\n\n if not self.data.index.empty:\n if isinstance(shift, pd.Timestamp):\n timedeltas = self.data.index - self.data.index[0]\n self.data.index = shift.round('min') + timedeltas\n if isinstance(shift, pd.Timedelta):\n self.data.index += shift.round('min')\n self.data['date'] = self.data.index.map(lambda timestamp: timestamp.date())\n self.data['time'] = self.data.index.map(lambda timestamp: timestamp.time())\n else:\n if isinstance(shift, pd.Timestamp):\n timedeltas = self.data['timestamp'] - self.data['timestamp'].min()\n self.data['timestamp'] = shift.round('min') + timedeltas\n if isinstance(shift, pd.Timedelta):\n self.data['timestamp'] += shift.round('min')\n self.data['date'] = self.data['timestamp'].map(lambda timestamp: timestamp.date())\n self.data['time'] = self.data['timestamp'].map(lambda timestamp: timestamp.time())", "def shifted(self, shift):\n new_location = None if self.location is None else self.location + shift\n reference = None if self.reference is None else self.reference + shift\n return self.copy_with_changes(\n location=new_location, reference=reference, derived_from=self,\n )", "def shift_timestamp(self, shift, timestamp):\n\n shift_msecs = self.convert_shift_to_msecs(shift)\n timestamp_msecs = self.convert_timestamp_to_msecs(timestamp)\n shifted_timestamp_msecs = timestamp_msecs + shift_msecs\n new_timestamp = self.convert_msecs_to_timestamp(shifted_timestamp_msecs)\n\n return new_timestamp", "def __get_date_days_shift(stock_time: datetime, days: int) -> datetime:\n if days < 1 or days > 2:\n raise ValueError(\"This function can only shift for maximum 2 days\")\n if (2 <= stock_time.weekday() <= 5) or (stock_time.weekday() == 1 and days == 1):\n return stock_time - timedelta(days=days)\n if stock_time.weekday() == 0 or stock_time.weekday() == 1:\n return stock_time - timedelta(days=days + 2)\n if stock_time.weekday() == 6:\n return stock_time - timedelta(days=days + 1)", "def shift(self, years: int = 0, months: int = 0, days: int = 0, weeks: int = 0) -> Date:\n return self.from_date(self + relativedelta(years=years, months=months, days=days, weeks=weeks))", "def shift(self, timedelta=timedelta(0.)):\n new = DoasResults(self.values, \n self.index + timedelta, \n self.start_acq, \n self.stop_acq, \n self.fit_errs, self.name, self.fit_id, \n self.fit_errs_corr_fac)\n if self.has_start_stop_acqtamps:\n new.start_acq += timedelta\n new.stop_acq += timedelta\n return new", "def start(year, month, day):\n output=datetime.datetime(year, month, day)\n return output", "def shift_by(self, xshift):\n return Waveform(self.xvec + xshift, self.yvec, self.xtol, order=self.order, ext=self.ext)", "def create(data):\n db = core.connect()\n theTime = utils.utctime()\n data[\"created\"] = theTime\n data[\"modified\"] = theTime\n data[\"domain\"] = utils.domain(data[\"href\"])\n newShift = schema.shift()\n newShift.update(data)\n newShift[\"type\"] = \"shift\"\n id = db.create(newShift)\n newShift = db[id]\n return joinData(newShift, newShift[\"createdBy\"])", "def shift(self, da, dim, shift):\n # TODO: generalize rolling function, allow custom shifts, handle\n # boundary conditions, etc.\n return da.roll(**{dim: shift})", "def from_datetime_time(cls, dtime_obj, time_obj):\n offset = ((time_obj.seconds - dtime_obj.seconds + SECONDS_PER_DAY // 2)\n % SECONDS_PER_DAY - SECONDS_PER_DAY // 2)\n day_offset, seconds = divmod(dtime_obj.seconds + offset, SECONDS_PER_DAY)\n # pylint: disable=protected-access\n return cls.from_daynum_secs_nanos(dtime_obj._days + day_offset, seconds,\n time_obj.nanosecond)", "def make_datetime_obj(date, time):\n\n conv_date = datetime.strptime(date, \"%Y-%m-%d\").date()\n conv_time = datetime.strptime(time, \"%H:%M\").time()\n\n return datetime.combine(conv_date, conv_time)", "def init_shiftind(self, n_t):\n i = np.arange(n_t * n_t)\n i2 = np.arange(n_t).repeat(n_t)\n ik = np.arange(n_t).repeat(n_t)\n ii = np.arange(n_t)[np.newaxis].repeat(n_t, 0).flatten()\n\n si = ik * n_t + (ik + ii) % n_t\n self.shiftinds_fwd = np.roll(si.reshape((n_t, n_t)), int((n_t - 1) / 2), 1)[:, ::-1].flatten()\n\n si = ik * n_t + (ii - ik) % n_t\n self.shiftinds_back = np.roll(np.arange(n_t * n_t).reshape((n_t, n_t))[:, ::-1], -int((n_t - 1) / 2), 1).flatten()[si]\n\n self.shiftinds = ((i + i2 - n_t) % n_t + i2 * n_t).astype(int)\n self.shiftinds_neg = ((i + i2 - n_t) % n_t + i2 * n_t).astype(int)\n self.shiftinds_pos = ((-n_t + i - i2) % n_t + i2 * n_t).astype(int)\n # self.shiftinds = ((i + i2 - n_t) % n_t + i2 * n_t).astype(int).reshape((n_t, n_t)).transpose().flatten()\n # self.shiftinds_neg = ((i + i2 - n_t) % n_t + i2 * n_t).astype(int).reshape((n_t, n_t)).transpose().flatten()\n # self.shiftinds_pos = ((-n_t + i - i2) % n_t + i2 * n_t).astype(int).reshape((n_t, n_t)).transpose().flatten()", "def date_to_midnight(date):\n return timezone.make_aware(timezone.datetime(date.year, date.month, date.day, 0, 0))", "def apply_shift(text, shift):\n ### TODO.", "def timeShift(requestContext, seriesList, timeShift, resetEnd=True):\n # Default to negative. parseTimeOffset defaults to +\n if timeShift[0].isdigit():\n timeShift = '-' + timeShift\n delta = parseTimeOffset(timeShift)\n myContext = requestContext.copy()\n myContext['startTime'] = requestContext['startTime'] + delta\n myContext['endTime'] = requestContext['endTime'] + delta\n results = []\n if len(seriesList) > 0:\n # if len(seriesList) > 1, they will all have the same pathExpression, which is all we care about.\n series = seriesList[0]\n\n for shiftedSeries in evaluateTarget(myContext, series.pathExpression):\n shiftedSeries.name = 'timeShift(%s, \"%s\")' % (shiftedSeries.name, timeShift)\n if resetEnd:\n shiftedSeries.end = series.end\n else:\n shiftedSeries.end = shiftedSeries.end - shiftedSeries.start + series.start\n shiftedSeries.start = series.start\n results.append(shiftedSeries)\n\n return results", "def shift(self,\n other=None,\n years=0,\n months=0,\n weeks=0,\n days=0,\n hours=0,\n minutes=0,\n seconds=0,\n microseconds=0):\n if isinstance(other, (timedelta, relativedelta)):\n return self + other\n\n dt = self + relativedelta(years=years,\n months=months,\n weeks=weeks,\n days=days,\n hours=hours,\n minutes=minutes,\n seconds=seconds,\n microseconds=microseconds)\n\n return self.fromdatetime(dt)", "def new_datetime(year, month=None, day=None,\n hour=0, minute=0, second=0,\n microsecond=0):\n return datetime(year, month, day, hour, minute,\n second, microsecond, tzinfo=timezone)", "def random_time_shift(_spec, Tshift=50):\n\n n_frames = _spec.shape[0]\n\n # deltat drawn from a uniform distribution from 1 to parameter Tshift\n # minimum shift is one frame (avoids crash)\n deltat = int(np.random.uniform(low=1.0, high=Tshift))\n\n # allocate\n _spec_out = np.zeros_like(_spec)\n\n # delay shift\n # end\n _spec_out[deltat:, :] = _spec[:n_frames-deltat, :]\n # begin\n _spec_out[:deltat, :] = _spec[-deltat:, :]\n return _spec_out", "def shift(t, word):\n return t[1:] + (word,)", "def shift(self, t, word):\n return t[1:] + (word,)", "def shift(h, k, dt=None, t=None, periodic=False):\n out = np.zeros((len(h),))\n if dt is None:\n if t is not None:\n dt = (max(t)-min(t))/len(t)\n else:\n dt = 1\n k = int(k/dt)\n if k > 0:\n out[:-k] = h[k:]\n if periodic:\n out[-k:] = h[:k]\n else:\n out[-k:] = h[:k]\n if periodic:\n out[:-k] = h[k:]\n return out", "def _make_datetime(self, datetime_):\n datetime_ = datetime_.rstrip('Z').replace('Z/', '/')\n if '/' in datetime_:\n begin, end = datetime_.split('/')\n if begin == '..':\n begin = self._data[self.time_field].min().values\n if end == '..':\n end = self._data[self.time_field].max().values\n if np.datetime64(begin) < np.datetime64(end):\n return slice(begin, end)\n else:\n LOGGER.debug('Reversing slicing from high to low')\n return slice(end, begin)\n else:\n return datetime_", "def date_handler(end):\n calc_date = datetime.strptime(str(end), \"%Y-%m-%d\")\n shift = timedelta(8)\n new_date = calc_date + shift\n\n return new_date.date()", "def wire_to(self, shift: Move) -> Segment:\n return Segment(self.end, self.end.move_to(shift))", "def shift(self, delay):\n self.__begin.shift(delay)\n self.__end.shift(delay)", "def timeStamp(mval,dval,tval,yearval=2015):\n hour = int(tval)\n minute = int((tval-hour)*60)\n return _dt.datetime(yearval,int(mval),int(dval),hour,minute)", "def calculate_shift(self, y, x, t, axis):\n return DeformationModel.calculate_shifts_from_coeffs(y, x, t,\n self.coeffs[axis])", "def get_datetime(hours):\n return datetime.datetime.utcfromtimestamp(hours * 60 * 60)", "def __init__(self, motion, **kwargs):\n super(ShiftIn, self).__init__(motion)" ]
[ "0.6044403", "0.5398259", "0.5287457", "0.51769984", "0.5132189", "0.50019145", "0.49231648", "0.4896712", "0.48872542", "0.48718226", "0.47676012", "0.4703589", "0.46973696", "0.46934074", "0.4664867", "0.46602306", "0.46434352", "0.46422106", "0.46262237", "0.46083966", "0.46060944", "0.45985317", "0.4583214", "0.45746753", "0.45379353", "0.4477081", "0.44757375", "0.44638517", "0.44438124", "0.44421688" ]
0.62300646
0
LSTM over tweet only
def get_model_tweetonly(batch_size, max_seq_length, input_size, hidden_size, target_size, vocab_size, pretrain, tanhOrSoftmax, dropout): # batch_size x max_seq_length inputs = tf.placeholder(tf.int32, [batch_size, max_seq_length]) cont_train = True if pretrain == "pre": cont_train = False embedding_matrix = tf.Variable(tf.random_uniform([vocab_size, input_size], -0.1, 0.1), # input_size is embeddings size name="embedding_matrix", trainable=cont_train) # batch_size x max_seq_length x input_size embedded_inputs = tf.nn.embedding_lookup(embedding_matrix, inputs) # [batch_size x inputs_size] with max_seq_length elements # fixme: possibly inefficient # inputs_list[0]: batch_size x input[0] <-- word vector of the first word inputs_list = [tf.squeeze(x) for x in tf.split(1, max_seq_length, embedded_inputs)] lstm_encoder = Encoder(rnn_cell.BasicLSTMCell, input_size, hidden_size) start_state = tf.zeros([batch_size, lstm_encoder.state_size]) # [h_i], [h_i, c_i] <-- LSTM # [h_i], [h_i] <-- RNN outputs, states = lstm_encoder(inputs_list, start_state, "LSTM") drop_prob = None if dropout: drop_prob = 0.1 lstm_encoder = Encoder(rnn_cell.BasicLSTMCell, input_size, hidden_size, drop_prob, drop_prob) outputs_fin = outputs[-1] if tanhOrSoftmax == "tanh": model = Projector(target_size, non_linearity=tf.nn.tanh)(outputs_fin) #tf.nn.softmax else: model = Projector(target_size, non_linearity=tf.nn.softmax)(outputs_fin) # tf.nn.softmax return model, [inputs]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def LSTM_train(X_train, Y_train, X_dev, Y_dev, R_train, R_dev, hyperparams):", "def lstm_infer_vector(lstm_model, txt, stopwords,word_indices, maxlen=10, taillemax=300) :\n \n txt_prep = gensim.utils.simple_preprocess(txt, deacc=True)\n txt_wo_uw = remove_unknown_words(txt_prep, word_indices)\n txt_wo_ws = remove_stopwords(txt_wo_uw, stopwords)\n \n if len(txt_wo_ws)>taillemax:\n sentence = txt_wo_ws[-taillemax:]\n \n if len(txt_wo_ws)<maxlen :\n #cas du texte trop court\n sentence = txt_wo_ws\n X = np.zeros((1, maxlen, len(word_indices)), dtype=np.bool)\n y = np.zeros((1, len(word_indices)), dtype=np.bool)\n for t, word in enumerate(sentence):\n X[0, t, word_indices[word]] = 1\n preds = lstm_model.predict(X, verbose=0)[0]\n else :\n \n for current_part in range(len(txt_wo_ws)/maxlen):\n sentence = txt_wo_ws[current_part*maxlen:(current_part+1)*maxlen]\n X = np.zeros((1, maxlen, len(word_indices)), dtype=np.bool)\n y = np.zeros((1, len(word_indices)), dtype=np.bool)\n for t, word in enumerate(sentence):\n X[0, t, word_indices[word]] = 1\n preds = lstm_model.predict(X, verbose=0)[0]\n \n\n return preds", "def TestLSTM(test_x, test_y): \r\n loss = 0.0\r\n seq_length = test_y.shape[1]\r\n for t in range(seq_length):\r\n lstm_in = StepProcess(test_x, batch_size, source_length, lstm_step)\r\n logit = lstm_restored(lstm_in)\r\n # loss function : RSME TODO\r\n loss_0 = tf.keras.losses.MSE(test_y[:, t, 1:3], logit[:, 1:3])\r\n loss += tf.sqrt(loss_0)# TODO\r\n pred_point = np.reshape(logit.numpy(), [batch_size, 1, 5])\r\n test_x = np.concatenate((test_x[:, 1:source_length, :], pred_point), axis=1) \r\n \r\n loss = tf.reduce_mean(loss)\r\n loss = loss / seq_length\r\n return loss", "def __init__(self, vocab_size, output_size, embedding_dim, hidden_dim, n_layers, drop_prob=0.7):\n super(SentimentLSTM, self).__init__()\n\n self.output_size = output_size\n self.n_layers = n_layers\n self.hidden_dim = hidden_dim\n\n\n # define all layers\n self.embed = nn.Embedding(vocab_size,embedding_dim)\n self.lstm = nn.LSTM(embedding_dim,hidden_dim,n_layers,dropout=drop_prob,batch_first=True)\n self.fc = nn.Linear(hidden_dim,output_size)\n self.sigmoid = nn.Sigmoid()\n self.drp = nn.Dropout(p=0.7)", "def create_LSTM_LSTM_model(feats2d, shapes, model_settings, is_training):\n\n if is_training:\n dropout_prob = model_settings['dropout_prob'] \n\n # Get dimensions\n lstm_size = model_settings['lstm_size']\n\n batch_size = tf.shape(feats2d)[0] \n feats2d = tf.reshape(feats2d, shape=[batch_size,-1,model_settings['feature_width']]) # features are of shape [max seq length for batch, 40]\n seq_lengths = shapes[:,0] # all shapes are [seq_length, 40], we extract seq_length\n\n # First LSTM \n\n # LSTM cells\n cell_fw = tf.contrib.rnn.LSTMCell(lstm_size, state_is_tuple=True)\n cell_bw = tf.contrib.rnn.LSTMCell(lstm_size, state_is_tuple=True)\n\n # Bi-directional RNN (+ Dropout)\n (output_fw, output_bw), (state_fw, state_bw) = tf.nn.bidirectional_dynamic_rnn(cell_fw, cell_bw, feats2d, \n sequence_length=seq_lengths, \n dtype=tf.float32)\n\n # TODO: make predictions after every 64 time slices\n\n concat_rnn = tf.concat([state_fw[0], state_bw[0]], axis=1)\n\n if is_training:\n first_dropout = tf.nn.dropout(concat_rnn, dropout_prob)\n else:\n first_dropout = concat_rnn\n\n # Second LSTM \n # TODO\n\n # Logits Layer\n num_classes = model_settings['num_classes']\n logits = tf.layers.dense(inputs=first_dropout, units=num_classes)\n \n if is_training:\n return logits, dropout_prob\n else:\n return logits", "def local_trend_rnn(features, labels, mode, params):\n\n ''' set up variables & parameters'''\n # get parameters & super parameters\n length = params['length']\n batch_size = params['batch_size']\n state_size = params['state_size']\n drop_rate = params['drop_rate']\n attention_size = params['attention_size']\n learning_rate = params['learning_rate']\n\n # get training features and label\n local_prices = features['local_prices'] # [b, n, 1]\n local_events = features['local_events'] # [b, n, d]\n\n ''' building basic model structure'''\n # inner trend capture\n price_trend_capture = DynamicLstm(batch_size=batch_size, state_size=state_size,\n keep_rate=1 - drop_rate, variable_scope=\"price_trend_capture\")\n price_trend_h_vec = price_trend_capture.run(local_prices) # [b, n, h]\n\n # event trend capture\n event_trend_capture = DynamicLstm(batch_size=batch_size, state_size=state_size,\n keep_rate=1 - drop_rate, variable_scope=\"event_trend_capture\")\n event_trend_h_vec = event_trend_capture.run(local_events) # [b, n, h]\n\n ''' combine price trend and event trend '''\n hybrid_trend = tf.concat([price_trend_h_vec, event_trend_h_vec], axis=2) # [b, n, 2h]\n hybrid_trend_capture = DynamicLstm(batch_size=batch_size, state_size=state_size,\n keep_rate=1 - drop_rate, variable_scope=\"hybrid_trend_capture\")\n hybrid_trend_h_vec = hybrid_trend_capture.run(hybrid_trend) # [b, n, h]\n\n ''' generate final prediction '''\n output_dense = tf.layers.Dense(units=1, name=\"output_dense\")\n hybrid_attention = AttentionLayer(attention_size=attention_size, name=\"hybrid_attention\")\n values = output_dense(hybrid_attention.run(hybrid_trend_h_vec)) # [b, 1]\n\n ''' config '''\n # Compute predictions.\n if mode == tf.estimator.ModeKeys.PREDICT:\n predictions = {'values': values}\n return tf.estimator.EstimatorSpec(mode, predictions=predictions)\n\n # Compute loss.\n loss = tf.losses.mean_squared_error(labels, values)\n\n # Compute evaluation metrics.\n mse = tf.metrics.mean_squared_error(labels=labels, predictions=values, name='MSE')\n metrics = {'MSE': mse}\n tf.summary.scalar('mean_squared_error', loss)\n\n if mode == tf.estimator.ModeKeys.EVAL:\n return tf.estimator.EstimatorSpec(\n mode, loss=loss, eval_metric_ops=metrics)\n\n # Create training op.\n assert mode == tf.estimator.ModeKeys.TRAIN\n optimizer = tf.train.AdagradOptimizer(learning_rate=learning_rate)\n train_op = optimizer.minimize(loss, global_step=tf.train.get_global_step())\n return tf.estimator.EstimatorSpec(mode, loss=loss, train_op=train_op)", "def apply_lstm(x, seq_len):\n return cudnn_layers.stacked_bilstm(\n input_emb=x,\n input_len=seq_len,\n hidden_size=FLAGS.lstm_dim,\n num_layers=1,\n dropout_ratio=0.0,\n mode=tf_estimator.ModeKeys.TRAIN,\n use_cudnn=None)", "def han():\n sentence_input = Input(shape=(maxlen,), dtype='int32')\n embedded_sequences = Embedding(input_dim=tokenizer.num_words, output_dim=tokenizer.m,\n input_length=maxlen)(sentence_input)\n l_lstm = Bidirectional(LSTM(100, return_sequences=True))(embedded_sequences)\n l_att = Attention()(l_lstm)\n sentEncoder = Model(sentence_input, l_att)\n review_input = Input(shape=(10, maxlen), dtype='int32') # 10代表有几个句子\n review_encoder = TimeDistributed(sentEncoder)(review_input)\n l_lstm_sent = Bidirectional(LSTM(100, return_sequences=True))(review_encoder)\n l_lstm_att = Attention()(l_lstm_sent)\n preds = Dense(1, activation='softmax')(l_lstm_att)\n model = Model(review_input, preds)", "def __init__(self, embed_size, hidden_size, vocab_size, num_layers): #(512,1024,29,1)\r\n super(DecoderRNN, self).__init__()\r\n self.embed = nn.Embedding(vocab_size, embed_size)#(29,512)\r\n self.lstm = nn.LSTM(embed_size, hidden_size, num_layers, batch_first=True) #(512,1024,1)\r\n self.linear = nn.Linear(hidden_size, vocab_size)#(1024,29)\r\n self.init_weights()", "def lstm_atten(self):\n # Model.\n model = Sequential()\n model.add(LSTM(2048, return_sequences=True,\n input_shape=self.input_shape,\n dropout=0.5))\n model.add(Flatten()) #qiao_added\n # model.add(Dense(1024, activation='relu'))\n # model.add(Dropout(0.5))\n\n attention = Dense(1, activation='tanh')(activations)\n attention = Flatten()(attention)\n attention = Activation('softmax')(attention)\n attention = RepeatVector(2048)(attention)\n attention = Permute([2, 1])(attention)\n\n sent_representation = concatenate([activations, attention], mode='mul')\n sent_representation = Lambda(lambda xin: K.sum(xin, axis=-2), output_shape=(2048,))(sent_representation)\n\n probabilities = Dense(self.nb_classes, activation='softmax')(sent_representation)\n\n model = model(input=self.input_shape, output=probabilities )\n\n dense1800 = Dense(4096, activation='relu')\n\n #dense1800 = Dense(1800, activation='relu', kernel_regularizer=regularizers.l2(0.01))(inputs)\n attention_probs = Dense(4096, activation='sigmoid', name='attention_probs')(dense1800)\n attention_mul = multiply([dense1800, attention_probs], name='attention_mul')\n dense7 = Dense(self.nb_classes, kernel_regularizer=regularizers.l2(0.01), activation='softmax')(attention_mul)\n model = Model(input=[self.input_shape], output=dense7)\n return model", "def __init__(self):\n super(biLSTM, self).__init__()\n \n # Words\n self.embedding = nn.Embedding(config.VOCAB, config.EMBED_DIM) \n self.embedding.shape = torch.Tensor(config.BATCH_SIZE, 5*config.EMBED_DIM)\n self.lstm_words = nn.LSTM(5*config.EMBED_DIM, config.HIDDEN_DIM, num_layers=1,bidirectional=True)\n \n # Letters\n self.embedding_chars = nn.Embedding(config.VOCAB_CHARS, config.EMBED_DIM)\n self.embedding_chars.shape = torch.Tensor(config.BATCH_SIZE, config.WORD_MAX_LENGTH*config.EMBED_DIM) \n self.lstm_chars = nn.LSTM(config.WORD_MAX_LENGTH*config.EMBED_DIM, config.HIDDEN_DIM, num_layers=1, bidirectional=True)\n\n #Subwords \n self.embeds_prefix = nn.Embedding(config.VOCAB, config.EMBED_DIM) \n self.embeds_prefix.shape = torch.Tensor(config.BATCH_SIZE, 5*config.EMBED_DIM)\n self.lstm_prefix = nn.LSTM(5*config.EMBED_DIM, config.HIDDEN_DIM, num_layers=1, bidirectional=True) \n\n self.embeds_suffix = nn.Embedding(config.VOCAB, config.EMBED_DIM)\n self.embeds_suffix.shape = torch.Tensor(config.BATCH_SIZE, 5*config.EMBED_DIM) \n self.lstm_suffix = nn.LSTM(5*config.EMBED_DIM, config.HIDDEN_DIM, num_layers=1, bidirectional=True)\n \n # Concatination\n self.embedding_concat_words = nn.Embedding(config.VOCAB, config.EMBED_DIM)\n self.embedding_concat_words.shape = torch.Tensor(config.BATCH_SIZE, 5*config.EMBED_DIM)\n\n self.embedding_concat_chars = nn.Embedding(config.VOCAB, config.EMBED_DIM)\n self.embedding_concat_chars.shape = torch.Tensor(config.BATCH_SIZE, 5*config.WORD_MAX_LENGTH*config.EMBED_DIM) \n \n self.lstm_concat_dim = (5*config.EMBED_DIM + 5*config.WORD_MAX_LENGTH*config.EMBED_DIM)\n self.lstm_concat = nn.LSTM(self.lstm_concat_dim, config.HIDDEN_DIM, num_layers=1, bidirectional=True)\n \n self.fc = nn.Linear(2*config.HIDDEN_DIM, config.OUTPUT_SIZE) \n self.softmax = nn.LogSoftmax(dim=0)", "def forward(self, s):\n # -> batch_size x seq_len\n # apply the embedding layer that maps each token to its embedding\n # dim: batch_size x seq_len x embedding_dim\n s = self.embedding(s)\n\n # run the LSTM along the sentences of length seq_len\n # dim: batch_size x seq_len x lstm_hidden_dim\n s, _ = self.lstm(s)\n\n # make the Variable contiguous in memory (a PyTorch artefact)\n s = s.contiguous()\n\n # reshape the Variable so that each row contains one token\n # dim: batch_size*seq_len x lstm_hidden_dim\n s = s.view(-1, s.shape[2])\n\n # apply the fully connected layer and obtain the output (before softmax) for each token\n s = self.fc(s) # dim: batch_size*seq_len x num_tags\n s = self.fc2(s)\n s = self.fc3(s)\n\n # apply log softmax on each token's output (this is recommended over applying softmax\n # since it is numerically more stable)\n return F.log_softmax(s, dim=1) # dim: batch_size*seq_len x num_tags", "def make_attention_lstm():\n from tensorflow.keras import activations\n from tensorflow.keras import backend as K\n from tensorflow.keras import constraints, initializers, regularizers\n\n # from keras.legacy import interfaces\n from tensorflow.keras.layers import RNN, InputSpec, Layer\n\n def _time_distributed_dense(\n x,\n w,\n b=None,\n dropout=None,\n input_dim=None,\n output_dim=None,\n timesteps=None,\n training=None,\n ):\n \"\"\"Apply `y . w + b` for every temporal slice y of x.\n\n # Arguments\n x: input tensor.\n w: weight matrix.\n b: optional bias vector.\n dropout: wether to apply dropout (same dropout mask\n for every temporal slice of the input).\n input_dim: integer; optional dimensionality of the input.\n output_dim: integer; optional dimensionality of the output.\n timesteps: integer; optional number of timesteps.\n training: training phase tensor or boolean.\n # Returns\n Output tensor.\n \"\"\"\n if not input_dim:\n input_dim = K.shape(x)[2]\n if not timesteps:\n timesteps = K.shape(x)[1]\n if not output_dim:\n output_dim = K.int_shape(w)[1]\n\n if dropout is not None and 0.0 < dropout < 1.0:\n # apply the same dropout pattern at every timestep\n ones = K.ones_like(K.reshape(x[:, 0, :], (-1, input_dim)))\n dropout_matrix = K.dropout(ones, dropout)\n expanded_dropout_matrix = K.repeat(dropout_matrix, timesteps)\n x = K.in_train_phase(x * expanded_dropout_matrix, x, training=training)\n\n # collapse time dimension and batch dimension together\n x = K.reshape(x, (-1, input_dim))\n x = K.dot(x, w)\n if b is not None:\n x = K.bias_add(x, b)\n # reshape to 3D tensor\n if K.backend() == \"tensorflow\":\n x = K.reshape(x, K.stack([-1, timesteps, output_dim]))\n x.set_shape([None, None, output_dim])\n else:\n x = K.reshape(x, (-1, timesteps, output_dim))\n return x\n\n class AttentionLSTMCell(Layer):\n \"\"\"Long-Short Term Memory unit - with Attention.\n\n # Arguments\n units: Positive integer, dimensionality of the output space.\n activation: Activation function to use\n (see [activations](keras/activations.md)).\n If you pass None, no activation is applied\n (ie. \"linear\" activation: `a(x) = x`).\n recurrent_activation: Activation function to use\n for the recurrent step\n (see [activations](keras/activations.md)).\n attention_activation: Activation function to use\n for the attention step. If you pass None, no activation is applied\n (ie. \"linear\" activation: `a(x) = x`).\n (see [activations](keras/activations.md)).\n use_bias: Boolean, whether the layer uses a bias vector.\n kernel_initializer: Initializer for the `kernel` weights matrix,\n used for the linear transformation of the inputs.\n (see [initializers](../initializers.md)).\n recurrent_initializer: Initializer for the `recurrent_kernel`\n weights matrix,\n used for the linear transformation of the recurrent state.\n (see [initializers](../initializers.md)).\n bias_initializer: Initializer for the bias vector\n (see [initializers](../initializers.md)).\n attention_initializer: Initializer for the `attention_kernel` weights\n matrix, used for the linear transformation of the inputs.\n (see [initializers](../initializers.md)).\n use_chrono_initialization: Boolean.\n If True, add 1 to the bias of the forget gate at initialization.\n Setting it to true will also force `bias_initializer=\"zeros\"`.\n This is recommended in [Jozefowicz et al.]\n (http://www.jmlr.org/proceedings/papers/v37/jozefowicz15.pdf)\n kernel_regularizer: Regularizer function applied to\n the `kernel` weights matrix\n (see [regularizer](../regularizers.md)).\n recurrent_regularizer: Regularizer function applied to\n the `recurrent_kernel` weights matrix\n (see [regularizer](../regularizers.md)).\n bias_regularizer: Regularizer function applied to the bias vector\n (see [regularizer](../regularizers.md)).\n activity_regularizer: Regularizer function applied to\n the output of the layer (its \"activation\").\n (see [regularizer](../regularizers.md)).\n attention_regularizer: Regularizer function applied to\n the `attention_kernel` weights matrix\n (see [regularizer](../regularizers.md)).\n kernel_constraint: Constraint function applied to\n the `kernel` weights matrix\n (see [constraints](../constraints.md)).\n recurrent_constraint: Constraint function applied to\n the `recurrent_kernel` weights matrix\n (see [constraints](../constraints.md)).\n bias_constraint: Constraint function applied to the bias vector\n (see [constraints](../constraints.md)).\n attention_constraint: Constraint function applied to\n the `attention_kernel` weights matrix\n (see [constraints](../constraints.md)).\n dropout: Float between 0 and 1.\n Fraction of the units to drop for\n the linear transformation of the inputs.\n recurrent_dropout: Float between 0 and 1.\n Fraction of the units to drop for\n the linear transformation of the recurrent state.\n return_attention: Returns the attention vector instead of\n the internal state.\n # References\n - [Long short-term memory]\n (http://deeplearning.cs.cmu.edu/pdfs/Hochreiter97_lstm.pdf)\n (original 1997 paper)\n - [Learning to forget: Continual prediction with LSTM]\n (http://www.mitpressjournals.org/doi/pdf/10.1162/089976600300015015)\n - [Supervised sequence labeling with recurrent neural networks]\n (http://www.cs.toronto.edu/~graves/preprint.pdf)\n - [A Theoretically Grounded Application of Dropout\n in Recurrent Neural Networks]\n (http://arxiv.org/abs/1512.05287)\n - [Bahdanau, Cho & Bengio (2014),\n \"Neural Machine Translation by Jointly Learning to Align and Translate\"]\n (https://arxiv.org/pdf/1409.0473.pdf)\n - [Xu, Ba, Kiros, Cho, Courville, Salakhutdinov, Zemel & Bengio (2016)\n \"Show, Attend and Tell: Neural Image Caption Generation\n with Visual Attention\"]\n (http://arxiv.org/pdf/1502.03044.pdf)\n \"\"\"\n\n _tags = {\"python_dependencies\": \"tensorflow\"}\n\n def __init__(\n self,\n units,\n activation=\"tanh\",\n recurrent_activation=\"hard_sigmoid\",\n attention_activation=\"tanh\",\n use_bias=True,\n kernel_initializer=\"glorot_uniform\",\n recurrent_initializer=\"orthogonal\",\n attention_initializer=\"orthogonal\",\n bias_initializer=\"zeros\",\n unit_forget_bias=True,\n kernel_regularizer=None,\n recurrent_regularizer=None,\n bias_regularizer=None,\n activity_regularizer=None,\n attention_regularizer=None,\n kernel_constraint=None,\n recurrent_constraint=None,\n bias_constraint=None,\n attention_constraint=None,\n dropout=0.0,\n recurrent_dropout=0.0,\n return_attention=False,\n implementation=1,\n **kwargs,\n ):\n super().__init__(**kwargs)\n self.input_spec = [InputSpec(ndim=2)]\n self.units = units\n self.activation = activations.get(activation)\n self.recurrent_activation = activations.get(recurrent_activation)\n self.attention_activation = activations.get(attention_activation)\n self.use_bias = use_bias\n\n self.kernel_initializer = initializers.get(kernel_initializer)\n self.recurrent_initializer = initializers.get(recurrent_initializer)\n self.attention_initializer = initializers.get(attention_initializer)\n self.bias_initializer = initializers.get(bias_initializer)\n self.unit_forget_bias = unit_forget_bias\n\n self.kernel_regularizer = regularizers.get(kernel_regularizer)\n self.recurrent_regularizer = regularizers.get(recurrent_regularizer)\n self.bias_regularizer = regularizers.get(bias_regularizer)\n self.activity_regularizer = regularizers.get(activity_regularizer)\n self.attention_regularizer = regularizers.get(attention_regularizer)\n\n self.kernel_constraint = constraints.get(kernel_constraint)\n self.recurrent_constraint = constraints.get(recurrent_constraint)\n self.bias_constraint = constraints.get(bias_constraint)\n self.attention_constraint = constraints.get(attention_constraint)\n\n self.dropout = min(1.0, max(0.0, dropout))\n self.recurrent_dropout = min(1.0, max(0.0, recurrent_dropout))\n self.return_attention = return_attention\n self._dropout_mask = None\n self._recurrent_dropout_mask = None\n self.implementation = implementation\n self.state_spec = [\n InputSpec(shape=(None, self.units)),\n InputSpec(shape=(None, self.units)),\n ]\n self.state_size = (self.units, self.units)\n\n def build(self, input_shape):\n \"\"\"Build the AttentionLSTMCell object.\"\"\"\n if hasattr(self, \"timesteps\") and self.timesteps is not None:\n self.timestep_dim = self.timesteps\n else:\n self.timestep_dim = 1 # input_shape[0]\n\n self.input_dim = input_shape[-1]\n\n self.kernel = self.add_weight(\n shape=(self.input_dim, self.units * 4),\n name=\"kernel\",\n initializer=self.kernel_initializer,\n regularizer=self.kernel_regularizer,\n constraint=self.kernel_constraint,\n )\n self.recurrent_kernel = self.add_weight(\n shape=(self.units, self.units * 4),\n name=\"recurrent_kernel\",\n initializer=self.recurrent_initializer,\n regularizer=self.recurrent_regularizer,\n constraint=self.recurrent_constraint,\n )\n\n # add attention kernel\n self.attention_kernel = self.add_weight(\n shape=(self.input_dim, self.units * 4),\n name=\"attention_kernel\",\n initializer=self.attention_initializer,\n regularizer=self.attention_regularizer,\n constraint=self.attention_constraint,\n )\n\n # add attention weights\n # weights for attention model\n self.attention_weights = self.add_weight(\n shape=(self.input_dim, self.units),\n name=\"attention_W\",\n initializer=self.attention_initializer,\n regularizer=self.attention_regularizer,\n constraint=self.attention_constraint,\n )\n\n self.attention_recurrent_weights = self.add_weight(\n shape=(self.units, self.units),\n name=\"attention_U\",\n initializer=self.recurrent_initializer,\n regularizer=self.recurrent_regularizer,\n constraint=self.recurrent_constraint,\n )\n\n if self.use_bias:\n if self.unit_forget_bias:\n\n def bias_initializer(shape, *args, **kwargs):\n return K.concatenate(\n [\n self.bias_initializer((self.units,), *args, **kwargs),\n initializers.Ones()((self.units,), *args, **kwargs),\n self.bias_initializer(\n (self.units * 2,), *args, **kwargs\n ),\n ]\n )\n\n else:\n bias_initializer = self.bias_initializer\n self.bias = self.add_weight(\n shape=(self.units * 4,),\n name=\"bias\",\n initializer=bias_initializer,\n regularizer=self.bias_regularizer,\n constraint=self.bias_constraint,\n )\n\n self.attention_bias = self.add_weight(\n shape=(self.units,),\n name=\"attention_b\",\n initializer=self.bias_initializer,\n regularizer=self.bias_regularizer,\n constraint=self.bias_constraint,\n )\n\n self.attention_recurrent_bias = self.add_weight(\n shape=(self.units, 1),\n name=\"attention_v\",\n initializer=self.bias_initializer,\n regularizer=self.bias_regularizer,\n constraint=self.bias_constraint,\n )\n else:\n self.bias = None\n self.attention_bias = None\n self.attention_recurrent_bias = None\n\n self.kernel_i = self.kernel[:, : self.units]\n self.kernel_f = self.kernel[:, self.units : self.units * 2]\n self.kernel_c = self.kernel[:, self.units * 2 : self.units * 3]\n self.kernel_o = self.kernel[:, self.units * 3 :]\n\n self.recurrent_kernel_i = self.recurrent_kernel[:, : self.units]\n self.recurrent_kernel_f = self.recurrent_kernel[\n :, self.units : self.units * 2\n ]\n self.recurrent_kernel_c = self.recurrent_kernel[\n :, self.units * 2 : self.units * 3\n ]\n self.recurrent_kernel_o = self.recurrent_kernel[:, self.units * 3 :]\n\n self.attention_i = self.attention_kernel[:, : self.units]\n self.attention_f = self.attention_kernel[:, self.units : self.units * 2]\n self.attention_c = self.attention_kernel[:, self.units * 2 : self.units * 3]\n self.attention_o = self.attention_kernel[:, self.units * 3 :]\n\n if self.use_bias:\n self.bias_i = self.bias[: self.units]\n self.bias_f = self.bias[self.units : self.units * 2]\n self.bias_c = self.bias[self.units * 2 : self.units * 3]\n self.bias_o = self.bias[self.units * 3 :]\n else:\n self.bias_i = None\n self.bias_f = None\n self.bias_c = None\n self.bias_o = None\n\n self.built = True\n\n def _generate_dropout_mask(self, inputs, training=None):\n if 0 < self.dropout < 1:\n ones = K.ones_like(K.squeeze(inputs[:, 0:1, :], axis=1))\n\n def dropped_inputs():\n return K.dropout(ones, self.dropout)\n\n self._dropout_mask = [\n K.in_train_phase(dropped_inputs, ones, training=training)\n for _ in range(4)\n ]\n else:\n self._dropout_mask = None\n\n def _generate_recurrent_dropout_mask(self, inputs, training=None):\n if 0 < self.recurrent_dropout < 1:\n ones = K.ones_like(K.reshape(inputs[:, 0, 0], (-1, 1)))\n ones = K.tile(ones, (1, self.units))\n\n def dropped_inputs():\n return K.dropout(ones, self.dropout)\n\n self._recurrent_dropout_mask = [\n K.in_train_phase(dropped_inputs, ones, training=training)\n for _ in range(4)\n ]\n else:\n self._recurrent_dropout_mask = None\n\n def call(self, inputs, states, training=None):\n \"\"\"Call the AttentionLSTMCell.\"\"\"\n # dropout matrices for input units\n dp_mask = self._dropout_mask\n # dropout matrices for recurrent units\n rec_dp_mask = self._recurrent_dropout_mask\n\n h_tm1 = states[0] # previous memory state\n c_tm1 = states[1] # previous carry state\n\n # alignment model\n h_att = K.repeat(h_tm1, self.timestep_dim)\n att = _time_distributed_dense(\n inputs,\n self.attention_weights,\n self.attention_bias,\n input_dim=self.input_dim,\n output_dim=self.units,\n timesteps=self.timestep_dim,\n )\n attention_ = self.attention_activation(\n K.dot(h_att, self.attention_recurrent_weights) + att\n ) # energy\n attention_ = K.squeeze(\n K.dot(attention_, self.attention_recurrent_bias), 2\n ) # energy\n\n alpha = K.exp(attention_)\n\n if dp_mask is not None:\n alpha *= dp_mask[0]\n\n alpha /= K.sum(alpha, axis=1, keepdims=True)\n alpha_r = K.repeat(alpha, self.input_dim)\n alpha_r = K.permute_dimensions(alpha_r, (0, 2, 1))\n\n # make context vector (soft attention after Bahdanau et al.)\n z_hat = inputs * alpha_r\n # context_sequence = z_hat\n z_hat = K.sum(z_hat, axis=1)\n\n if self.implementation == 1:\n if 0 < self.dropout < 1.0:\n inputs_i = inputs * dp_mask[0]\n inputs_f = inputs * dp_mask[1]\n inputs_c = inputs * dp_mask[2]\n inputs_o = inputs * dp_mask[3]\n else:\n inputs_i = inputs\n inputs_f = inputs\n inputs_c = inputs\n inputs_o = inputs\n x_i = K.dot(inputs_i, self.kernel_i)\n x_f = K.dot(inputs_f, self.kernel_f)\n x_c = K.dot(inputs_c, self.kernel_c)\n x_o = K.dot(inputs_o, self.kernel_o)\n if self.use_bias:\n x_i = K.bias_add(x_i, self.bias_i)\n x_f = K.bias_add(x_f, self.bias_f)\n x_c = K.bias_add(x_c, self.bias_c)\n x_o = K.bias_add(x_o, self.bias_o)\n\n if 0 < self.recurrent_dropout < 1.0:\n h_tm1_i = h_tm1 * rec_dp_mask[0]\n h_tm1_f = h_tm1 * rec_dp_mask[1]\n h_tm1_c = h_tm1 * rec_dp_mask[2]\n h_tm1_o = h_tm1 * rec_dp_mask[3]\n else:\n h_tm1_i = h_tm1\n h_tm1_f = h_tm1\n h_tm1_c = h_tm1\n h_tm1_o = h_tm1\n i = self.recurrent_activation(\n x_i\n + K.dot(h_tm1_i, self.recurrent_kernel_i)\n + K.dot(z_hat, self.attention_i)\n )\n f = self.recurrent_activation(\n x_f\n + K.dot(h_tm1_f, self.recurrent_kernel_f)\n + K.dot(z_hat, self.attention_f)\n )\n c = f * c_tm1 + i * self.activation(\n x_c\n + K.dot(h_tm1_c, self.recurrent_kernel_c)\n + K.dot(z_hat, self.attention_c)\n )\n o = self.recurrent_activation(\n x_o\n + K.dot(h_tm1_o, self.recurrent_kernel_o)\n + K.dot(z_hat, self.attention_o)\n )\n else:\n if 0.0 < self.dropout < 1.0:\n inputs *= dp_mask[0]\n z = K.dot(inputs, self.kernel)\n if 0.0 < self.recurrent_dropout < 1.0:\n h_tm1 *= rec_dp_mask[0]\n z += K.dot(h_tm1, self.recurrent_kernel)\n z += K.dot(z_hat, self.attention_kernel)\n\n if self.use_bias:\n z = K.bias_add(z, self.bias)\n\n z0 = z[:, : self.units]\n z1 = z[:, self.units : 2 * self.units]\n z2 = z[:, 2 * self.units : 3 * self.units]\n z3 = z[:, 3 * self.units :]\n\n i = self.recurrent_activation(z0)\n f = self.recurrent_activation(z1)\n c = f * c_tm1 + i * self.activation(z2)\n o = self.recurrent_activation(z3)\n\n h = o * self.activation(c)\n if 0 < self.dropout + self.recurrent_dropout:\n if training is None:\n h._uses_learning_phase = True\n return h, [h, c]\n\n class AttentionLSTM(RNN):\n \"\"\"Long-Short Term Memory unit - with Attention.\n\n # Arguments\n units: Positive integer, dimensionality of the output space.\n activation: Activation function to use\n (see [activations](keras/activations.md)).\n If you pass None, no activation is applied\n (ie. \"linear\" activation: `a(x) = x`).\n recurrent_activation: Activation function to use\n for the recurrent step\n (see [activations](keras/activations.md)).\n attention_activation: Activation function to use\n for the attention step. If you pass None, no activation is applied\n (ie. \"linear\" activation: `a(x) = x`).\n (see [activations](keras/activations.md)).\n use_bias: Boolean, whether the layer uses a bias vector.\n kernel_initializer: Initializer for the `kernel` weights matrix,\n used for the linear transformation of the inputs.\n (see [initializers](../initializers.md)).\n recurrent_initializer: Initializer for the `recurrent_kernel`\n weights matrix,\n used for the linear transformation of the recurrent state.\n (see [initializers](../initializers.md)).\n bias_initializer: Initializer for the bias vector\n (see [initializers](../initializers.md)).\n attention_initializer: Initializer for the `attention_kernel` weights\n matrix, used for the linear transformation of the inputs.\n (see [initializers](../initializers.md)).\n use_chrono_initialization: Boolean.\n If True, add 1 to the bias of the forget gate at initialization.\n Setting it to true will also force `bias_initializer=\"zeros\"`.\n This is recommended in [Jozefowicz et al.]\n (http://www.jmlr.org/proceedings/papers/v37/jozefowicz15.pdf)\n kernel_regularizer: Regularizer function applied to\n the `kernel` weights matrix\n (see [regularizer](../regularizers.md)).\n recurrent_regularizer: Regularizer function applied to\n the `recurrent_kernel` weights matrix\n (see [regularizer](../regularizers.md)).\n bias_regularizer: Regularizer function applied to the bias vector\n (see [regularizer](../regularizers.md)).\n activity_regularizer: Regularizer function applied to\n the output of the layer (its \"activation\").\n (see [regularizer](../regularizers.md)).\n attention_regularizer: Regularizer function applied to\n the `attention_kernel` weights matrix\n (see [regularizer](../regularizers.md)).\n kernel_constraint: Constraint function applied to\n the `kernel` weights matrix\n (see [constraints](../constraints.md)).\n recurrent_constraint: Constraint function applied to\n the `recurrent_kernel` weights matrix\n (see [constraints](../constraints.md)).\n bias_constraint: Constraint function applied to the bias vector\n (see [constraints](../constraints.md)).\n attention_constraint: Constraint function applied to\n the `attention_kernel` weights matrix\n (see [constraints](../constraints.md)).\n dropout: Float between 0 and 1.\n Fraction of the units to drop for\n the linear transformation of the inputs.\n recurrent_dropout: Float between 0 and 1.\n Fraction of the units to drop for\n the linear transformation of the recurrent state.\n implementation: Implementation mode, either 1 or 2.\n return_sequences: Boolean. Whether to return the last output.\n in the output sequence, or the full sequence.\n return_state: Boolean. Whether to return the last state\n in addition to the output.\n return_attention: Returns the attention vector instead of\n the internal state.\n go_backwards: Boolean (default False).\n If True, process the input sequence backwards and return the\n reversed sequence.\n stateful: Boolean (default False). If True, the last state\n for each sample at index i in a batch will be used as initial\n state for the sample of index i in the following batch.\n unroll: Boolean (default False).\n If True, the network will be unrolled,\n else a symbolic loop will be used.\n Unrolling can speed-up a RNN,\n although it tends to be more memory-intensive.\n Unrolling is only suitable for short sequences.\n # References\n - [Long short-term memory]\n (http://deeplearning.cs.cmu.edu/pdfs/Hochreiter97_lstm.pdf)\n (original 1997 paper)\n - [Learning to forget: Continual prediction with LSTM]\n (http://www.mitpressjournals.org/doi/pdf/10.1162/089976600300015015)\n - [Supervised sequence labeling with recurrent neural networks]\n (http://www.cs.toronto.edu/~graves/preprint.pdf)\n - [A Theoretically Grounded Application of Dropout\n in Recurrent Neural Networks]\n (http://arxiv.org/abs/1512.05287)\n - [Bahdanau, Cho & Bengio (2014)\n \"Neural Machine Translation by Jointly Learning to Align and Translate\"]\n (https://arxiv.org/pdf/1409.0473.pdf)\n - [Xu, Ba, Kiros, Cho, Courville, Salakhutdinov, Zemel & Bengio (2016)\n \"Show, Attend and Tell: Neural Image Caption Generation\n with Visual Attention\"]\n (http://arxiv.org/pdf/1502.03044.pdf)\n \"\"\"\n\n _tags = {\"python_dependencies\": \"tensorflow\"}\n\n # '@interfaces.legacy_recurrent_support\n def __init__(\n self,\n units,\n activation=\"tanh\",\n recurrent_activation=\"hard_sigmoid\",\n attention_activation=\"tanh\",\n use_bias=True,\n kernel_initializer=\"glorot_uniform\",\n recurrent_initializer=\"orthogonal\",\n attention_initializer=\"orthogonal\",\n bias_initializer=\"zeros\",\n unit_forget_bias=True,\n kernel_regularizer=None,\n recurrent_regularizer=None,\n bias_regularizer=None,\n activity_regularizer=None,\n attention_regularizer=None,\n kernel_constraint=None,\n recurrent_constraint=None,\n bias_constraint=None,\n attention_constraint=None,\n dropout=0.0,\n recurrent_dropout=0.0,\n implementation=1,\n return_sequences=False,\n return_state=False,\n return_attention=False,\n go_backwards=False,\n stateful=False,\n unroll=False,\n **kwargs,\n ):\n import warnings\n\n if implementation == 0:\n warnings.warn(\n \"`implementation=0` has been deprecated, \"\n \"and now defaults to `implementation=1`.\"\n \"Please update your layer call.\",\n stacklevel=2,\n )\n implementation = 1\n\n if K.backend() == \"cntk\":\n if not kwargs.get(\"unroll\") and (dropout > 0 or recurrent_dropout > 0):\n warnings.warn(\n \"RNN dropout is not supported with the CNTK backend \"\n \"when using dynamic RNNs (i.e. non-unrolled). \"\n \"You can either set `unroll=True`, \"\n \"set `dropout` and `recurrent_dropout` to 0, \"\n \"or use a different backend.\",\n stacklevel=2,\n )\n dropout = 0.0\n recurrent_dropout = 0.0\n\n cell = AttentionLSTMCell(\n units,\n activation=activation,\n recurrent_activation=recurrent_activation,\n attention_activation=attention_activation,\n use_bias=use_bias,\n kernel_initializer=kernel_initializer,\n recurrent_initializer=recurrent_initializer,\n attention_initializer=attention_initializer,\n bias_initializer=bias_initializer,\n unit_forget_bias=unit_forget_bias,\n kernel_regularizer=kernel_regularizer,\n recurrent_regularizer=recurrent_regularizer,\n bias_regularizer=bias_regularizer,\n activity_regularizer=activity_regularizer,\n attention_regularizer=attention_regularizer,\n kernel_constraint=kernel_constraint,\n recurrent_constraint=recurrent_constraint,\n bias_constraint=bias_constraint,\n attention_constraint=attention_constraint,\n dropout=dropout,\n recurrent_dropout=recurrent_dropout,\n return_attention=return_attention,\n implementation=implementation,\n )\n super().__init__(\n cell,\n return_sequences=return_sequences,\n return_state=return_state,\n go_backwards=go_backwards,\n stateful=stateful,\n unroll=unroll,\n **kwargs,\n )\n self.return_attention = return_attention\n\n def build(self, input_shape):\n \"\"\"Build the AttentionLSTM object.\"\"\"\n self.cell.timesteps = input_shape[1]\n self.cell.build(input_shape)\n\n def call(self, inputs, mask=None, training=None, initial_state=None):\n \"\"\"Call the AttentionLSTM object.\"\"\"\n self.cell._generate_dropout_mask(inputs, training=training)\n self.cell._generate_recurrent_dropout_mask(inputs, training=training)\n return super().call(\n inputs, mask=mask, training=training, initial_state=initial_state\n )\n\n @property\n def units(self):\n \"\"\"Return property units.\"\"\"\n return self.cell.units\n\n @property\n def activation(self):\n \"\"\"Return property activation.\"\"\"\n return self.cell.activation\n\n @property\n def recurrent_activation(self):\n \"\"\"Return property recurrent_activation.\"\"\"\n return self.cell.recurrent_activation\n\n @property\n def attention_activation(self):\n \"\"\"Return property attention_activation.\"\"\"\n return self.cell.attention_activation\n\n @property\n def use_bias(self):\n \"\"\"Return property use_bias.\"\"\"\n return self.cell.use_bias\n\n @property\n def kernel_initializer(self):\n \"\"\"Return property kernel_initializer.\"\"\"\n return self.cell.kernel_initializer\n\n @property\n def recurrent_initializer(self):\n \"\"\"Return property recurrent_initializer.\"\"\"\n return self.cell.recurrent_initializer\n\n @property\n def attention_initializer(self):\n \"\"\"Return property attention_initializer.\"\"\"\n return self.cell.attention_initializer\n\n @property\n def bias_initializer(self):\n \"\"\"Return property bias_initializer.\"\"\"\n return self.cell.bias_initializer\n\n @property\n def unit_forget_bias(self):\n \"\"\"Return property unit_forget_bias.\"\"\"\n return self.cell.unit_forget_bias\n\n @property\n def kernel_regularizer(self):\n \"\"\"Return property kernel_regularizer.\"\"\"\n return self.cell.kernel_regularizer\n\n @property\n def recurrent_regularizer(self):\n \"\"\"Return property recurrent_regularizer.\"\"\"\n return self.cell.recurrent_regularizer\n\n @property\n def bias_regularizer(self):\n \"\"\"Return property bias_regularizer.\"\"\"\n return self.cell.bias_regularizer\n\n @property\n def activity_regularizer(self):\n \"\"\"Return property activity_regularizer.\"\"\"\n return self.cell.activity_regularizer\n\n @property\n def attention_regularizer(self):\n \"\"\"Return property attention_regularizer.\"\"\"\n return self.cell.attention_regularizer\n\n @property\n def kernel_constraint(self):\n \"\"\"Return property kernel_constraint.\"\"\"\n return self.cell.kernel_constraint\n\n @property\n def recurrent_constraint(self):\n \"\"\"Return property recurrent_constraint.\"\"\"\n return self.cell.recurrent_constraint\n\n @property\n def bias_constraint(self):\n \"\"\"Return property bias_constraint.\"\"\"\n return self.cell.bias_constraint\n\n @property\n def attention_constraint(self):\n \"\"\"Return property attention_constraint.\"\"\"\n return self.cell.attention_constraint\n\n @property\n def dropout(self):\n \"\"\"Return property dropout.\"\"\"\n return self.cell.dropout\n\n @property\n def recurrent_dropout(self):\n \"\"\"Return property recurrent_dropout.\"\"\"\n return self.cell.recurrent_dropout\n\n @property\n def implementation(self):\n \"\"\"Return property implementation.\"\"\"\n return self.cell.implementation\n\n def get_config(self):\n \"\"\"Return configuration dict of the AttentionLSTM object.\"\"\"\n config = {\n \"units\": self.units,\n \"activation\": activations.serialize(self.activation),\n \"recurrent_activation\": activations.serialize(\n self.recurrent_activation\n ),\n \"attention_activation\": activations.serialize(\n self.attention_activation\n ),\n \"use_bias\": self.use_bias,\n \"kernel_initializer\": initializers.serialize(self.kernel_initializer),\n \"recurrent_initializer\": initializers.serialize(\n self.recurrent_initializer\n ),\n \"bias_initializer\": initializers.serialize(self.bias_initializer),\n \"attention_initializer\": initializers.serialize(\n self.attention_initializer\n ),\n \"use_chrono_initialization\": self.unit_forget_bias,\n \"kernel_regularizer\": regularizers.serialize(self.kernel_regularizer),\n \"recurrent_regularizer\": regularizers.serialize(\n self.recurrent_regularizer\n ),\n \"bias_regularizer\": regularizers.serialize(self.bias_regularizer),\n \"activity_regularizer\": regularizers.serialize(\n self.activity_regularizer\n ),\n \"attention_regularizer\": regularizers.serialize(\n self.attention_regularizer\n ),\n \"kernel_constraint\": constraints.serialize(self.kernel_constraint),\n \"recurrent_constraint\": constraints.serialize(\n self.recurrent_constraint\n ),\n \"bias_constraint\": constraints.serialize(self.bias_constraint),\n \"attention_constraint\": constraints.serialize(\n self.attention_constraint\n ),\n \"dropout\": self.dropout,\n \"recurrent_dropout\": self.recurrent_dropout,\n \"return_attention\": self.return_attention,\n }\n base_config = super().get_config()\n del base_config[\"cell\"]\n return dict(list(base_config.items()) + list(config.items()))\n\n @classmethod\n def from_config(cls, config):\n \"\"\"Create a new AttentionLSTM object from a configuration dict.\"\"\"\n if \"implementation\" in config and config[\"implementation\"] == 0:\n config[\"implementation\"] = 1\n return cls(**config)\n\n return AttentionLSTM", "def lstmdouble(self):\n # Model.\n model = Sequential()\n model.add(LSTM(2048, return_sequences=True,\n input_shape=self.input_shape,\n dropout=0.0))\n model.add(LSTM(2048, return_sequences=True,\n input_shape=self.input_shape,\n dropout=0.0))\n #model.add(Flatten()) #qiao_added\n # model.add(Dense(1024, activation='relu'))\n model.add(Dropout(0.5))\n model.add(Dense(2048, activation='relu'))\n model.add(Dropout(0.5))\n model.add(Flatten())\n model.add(Dense(self.nb_classes, activation='softmax'))\n return model", "def train_rnn(df):\r\n print(\"--- training RNN(LSTM) model ---\")\r\n sTime = time()\r\n print(\"--- preprocessing the data ---\")\r\n sentences = preprocessing(df['description'])\r\n\r\n print(\"--- making sequences and labels ---\")\r\n seq, labels = sents_to_seqs(sentences)\r\n\r\n print(\"--- Converting words in sequences labels into vectors ---\")\r\n seq = seq2Vec(seq)\r\n labels = labels2Vec(labels)\r\n # splitting the data to 80/20\r\n x_train, x_test, y_train, y_test = train_test_split(seq, labels, test_size=0.2, shuffle=42)\r\n\r\n # memory cleaning\r\n seq = []\r\n labels = []\r\n sentences = []\r\n\r\n # building our RNN model\r\n print(\"--- building our RNN model ---\")\r\n model = Sequential()\r\n\r\n model.add(LSTM(300, return_sequences=True, activation=\"tanh\", input_shape=(4, 300)))\r\n model.add(Dropout(0.3))\r\n model.add(LSTM(300, return_sequences=True, activation=\"tanh\"))\r\n model.add(Dropout(0.3))\r\n model.add(LSTM(300, return_sequences=True, activation=\"tanh\"))\r\n model.add(Dropout(0.3))\r\n model.add(LSTM(300, return_sequences=False, activation=\"tanh\"))\r\n model.add(Dense(300, activation='relu'))\r\n model.add(BatchNormalization())\r\n model.add(Dropout(0.4))\r\n model.add(Dense(250, activation='relu'))\r\n model.add(BatchNormalization())\r\n model.add(Dropout(0.4))\r\n model.add(Dense(250, activation='relu'))\r\n model.add(BatchNormalization())\r\n model.add(Dropout(0.4))\r\n model.add(Dense(300, activation=\"linear\"))\r\n model.compile(loss='mean_squared_error', optimizer='adam', metrics=['accuracy'])\r\n\r\n print(\"--- fitting the data to our model ---\")\r\n\r\n model.fit(x_train, y_train, epochs=10, batch_size=30000)\r\n\r\n x_train = []\r\n y_train = []\r\n\r\n score, accuracy = model.evaluate(x_test, y_test, batch_size=4000)\r\n\r\n print(\"Model Score = {:.2f}\".format(score))\r\n print(\"Accuracy = {:.2f}\".format(accuracy * 100))\r\n model.save(\"text_model.h5\")\r\n print(\"--- text_model has been saved to current working directory ---\")\r\n\r\n m, s = divmod(calculate_time(sTime), 60)\r\n print(f\"--- Done training our RNN model in {int(m):02d}:{int(s):02d} minutes ---\")", "def __init__(self, input_dim, hidden_dim, output_dim):\r\n super(LstmEstimator, self).__init__()\r\n \r\n # The LSTM takes track features as inputs, and outputs hidden states\r\n # with dimensionality hidden_dim\r\n self.lstm = nn.LSTM(input_dim, hidden_dim)\r\n \r\n self.hidden2target = nn.Linear(hidden_dim, output_dim)", "def MultiLSTM(inputs,weights):\n w_kernel = weights['w_kernel']\n w_recurrent = weights['w_recurrent']\n w_bias = weights['w_bias']\n T = inputs.shape[-2]\n H = []\n for t in range(T):\n if t > 0:\n z = tf.einsum('knx,xj->knj',inputs[:,:,t,:],w_kernel) + tf.einsum('knl,lj->knj',h,w_recurrent) + tf.expand_dims(w_bias,0)\n else:\n z = tf.einsum('knx,xj->knj',inputs[:,:,t,:],w_kernel) + tf.expand_dims(w_bias,0)\n i,f,o,u = tf.split(z,4,axis=-1)\n i = tf.sigmoid(i) #input gate\n f = tf.sigmoid(f + 1.0) #forget gate\n o = tf.sigmoid(o) #output gate\n u = tf.tanh(u) #information let in by input gate\n if t > 0:\n c = f * c + i * u\n else:\n c = i * u\n h = o * tf.tanh(c)\n H.append(h)\n H = tf.stack(H,-2)\n return(H)", "def lstm(self):\n # Model.\n model = Sequential()\n model.add(LSTM(2048, return_sequences=True,\n input_shape=self.input_shape,\n dropout=0.0))\n #model.add(Flatten()) #qiao_added\n # model.add(Dense(1024, activation='relu'))\n model.add(Dropout(0.5))\n model.add(Dense(2048, activation='relu'))\n model.add(Dropout(0.5))\n model.add(Flatten())\n model.add(Dense(self.nb_classes, activation='softmax'))\n return model", "def __init__(self, vocab_size, output_size, embedding_dim, hidden_dim, n_layers, drop_prob=0.5):\n super(SentimentRNN, self).__init__()\n\n self.output_size = output_size\n self.n_layers = n_layers\n self.hidden_dim = hidden_dim\n \n # embedding and LSTM layers\n self.embedding = nn.Embedding(vocab_size, embedding_dim)\n self.lstm = nn.LSTM(embedding_dim, hidden_dim, n_layers,\n dropout=drop_prob, batch_first=True)\n \n # dropout layer\n self.dropout = nn.Dropout(0.3)\n \n # linear and sigmoid layer\n self.fc = nn.Linear(hidden_dim, output_size)\n self.sig = nn.Sigmoid()", "def _lstm_unroll_base(num_lstm_layer, seq_len, num_hidden):\n param_cells = []\n last_states = []\n for i in range(num_lstm_layer):\n param_cells.append(LSTMParam(i2h_weight=mx.sym.Variable(\"l%d_i2h_weight\" % i),\n i2h_bias=mx.sym.Variable(\"l%d_i2h_bias\" % i),\n h2h_weight=mx.sym.Variable(\"l%d_h2h_weight\" % i),\n h2h_bias=mx.sym.Variable(\"l%d_h2h_bias\" % i)))\n state = LSTMState(c=mx.sym.Variable(\"l%d_init_c\" % i),\n h=mx.sym.Variable(\"l%d_init_h\" % i))\n last_states.append(state)\n assert len(last_states) == num_lstm_layer\n\n # embedding layer\n data = mx.sym.Variable('data')\n wordvec = mx.sym.SliceChannel(data=data, num_outputs=seq_len, squeeze_axis=1)\n\n hidden_all = []\n for seqidx in range(seq_len):\n hidden = wordvec[seqidx]\n for i in range(num_lstm_layer):\n next_state = _lstm(\n num_hidden=num_hidden,\n indata=hidden,\n prev_state=last_states[i],\n param=param_cells[i],\n seqidx=seqidx,\n layeridx=i)\n hidden = next_state.h\n last_states[i] = next_state\n hidden_all.append(hidden)\n\n hidden_concat = mx.sym.Concat(*hidden_all, dim=0)\n pred_fc = mx.sym.FullyConnected(data=hidden_concat, num_hidden=11, name=\"pred_fc\")\n return pred_fc", "def __init__(self, vocab_size, output_size, embedding_dim, hidden_dim, n_layers, seq_len, drop_prob=0.5):\n super(SentimentRNN, self).__init__()\n\n self.output_size = output_size\n self.n_layers = n_layers\n self.hidden_dim = hidden_dim\n self.seq_len = seq_len\n \n self.bidirectional=True\n \n # define all layers\n self.emb_layer = nn.Embedding(num_embeddings=vocab_size, embedding_dim=embedding_dim,\n padding_idx=None)\n self.lstm = nn.LSTM(input_size=embedding_dim, hidden_size=hidden_dim,\n num_layers=n_layers, batch_first=True, dropout=drop_prob,\n bidirectional=self.bidirectional)\n \n self.dropout = nn.Dropout(drop_prob)\n self.output_layer = nn.Linear(in_features=hidden_dim, out_features=output_size)", "def run_single_step(self): \n contexts = tf.placeholder(tf.float32, [self.batch_size, self.num_ctx, self.dim_ctx]) \n last_memory = tf.placeholder(tf.float32, [self.batch_size, self.dim_hidden])\n last_output = tf.placeholder(tf.float32, [self.batch_size, self.dim_hidden])\n last_word = tf.placeholder(tf.int32, [self.batch_size])\n initial_step = tf.placeholder(tf.bool)\n\n context_mean = tf.reduce_mean(contexts, 1) \n\n lstm = tf.nn.rnn_cell.LSTMCell(self.dim_hidden, initializer=tf.random_normal_initializer(stddev=0.033)) \n\n # Attention mechanism\n alpha = self.attend(contexts, last_output) \n weighted_context = tf.cond(initial_step,\n lambda: tf.identity(context_mean),\n lambda: tf.reduce_sum(contexts*tf.expand_dims(alpha, 2), 1))\n\n word_emb = tf.cond(initial_step, \n lambda: tf.zeros([self.batch_size, self.dim_embed]), \n lambda: tf.nn.embedding_lookup(self.emb_w, last_word))\n \n # Apply the LSTM\n with tf.variable_scope(\"LSTM\"):\n last_state = last_memory, last_output\n output, state = lstm(tf.concat([weighted_context, word_emb], 1), last_state)\n memory, _ = state\n \n # Compute the logits and probs\n expanded_output = tf.concat([output, weighted_context, word_emb], 1)\n\n logits1 = fully_connected(expanded_output, self.dim_dec, 'dec_fc')\n logits1 = nonlinear(logits1, 'tanh')\n logits2 = tf.nn.xw_plus_b(logits1, self.dec_w, self.dec_b)\n probs = tf.nn.softmax(logits2) \n logprobs = tf.log(probs)\n\n tf.get_variable_scope().reuse_variables() \n\n self.contexts = contexts\n self.last_memory = last_memory\n self.last_output = last_output\n self.last_word = last_word\n self.initial_step = initial_step\n\n self.memory = memory\n self.output = output\n self.logprobs = logprobs", "def lstm_layer(self):\n if self.pooling:\n ret_seq = True\n else:\n ret_seq = False\n ker_in = glorot_uniform(seed=self.seed)\n rec_in = Orthogonal(seed=self.seed)\n if self.type_of_weights == \"shared\":\n if self.recurrent == \"bilstm\" or self.recurrent is None:\n out_a = Bidirectional(LSTM(self.hidden_dim,\n input_shape=(self.max_sequence_length, self.embedding_dim,),\n kernel_initializer=ker_in,\n recurrent_initializer=rec_in,\n return_sequences=ret_seq), merge_mode='concat')\n elif self.recurrent == \"lstm\":\n out_a = LSTM(self.hidden_dim,\n input_shape=(self.max_sequence_length, self.embedding_dim,),\n kernel_initializer=ker_in,\n recurrent_initializer=rec_in,\n return_sequences=ret_seq)\n return out_a, out_a\n elif self.type_of_weights == \"separate\":\n if self.recurrent == \"bilstm\" or self.recurrent is None:\n out_a = Bidirectional(LSTM(self.hidden_dim,\n input_shape=(self.max_sequence_length, self.embedding_dim,),\n kernel_initializer=ker_in,\n recurrent_initializer=rec_in,\n return_sequences=ret_seq), merge_mode='concat')\n out_b = Bidirectional(LSTM(self.hidden_dim,\n input_shape=(self.max_sequence_length, self.embedding_dim,),\n kernel_initializer=ker_in,\n recurrent_initializer=rec_in,\n return_sequences=ret_seq), merge_mode='concat')\n elif self.recurrent == \"lstm\":\n out_a = LSTM(self.hidden_dim,\n input_shape=(self.max_sequence_length, self.embedding_dim,),\n kernel_initializer=ker_in,\n recurrent_initializer=rec_in,\n return_sequences=ret_seq)\n out_b = LSTM(self.hidden_dim,\n input_shape=(self.max_sequence_length, self.embedding_dim,),\n kernel_initializer=ker_in,\n recurrent_initializer=rec_in,\n return_sequences=ret_seq)\n return out_a, out_b", "def __init__(self, vocab_size, output_size, embedding_dim, hidden_dim, n_layers, drop_prob=0.5):\n super(SentimentRNN, self).__init__()\n\n self.output_size = output_size\n self.n_layers = n_layers\n self.hidden_dim = hidden_dim\n\n # embedding and LSTM layers\n self.embedding = nn.Embedding(vocab_size, embedding_dim)\n self.lstm = nn.LSTM(embedding_dim, hidden_dim, n_layers,\n dropout=drop_prob, batch_first=True)\n\n # dropout layer\n self.dropout = nn.Dropout(0.3)\n\n # linear and sigmoid layers\n self.fc = nn.Linear(hidden_dim, output_size)\n self.sig = nn.Sigmoid()", "def __init__(self,\n embedding_size,\n hidden_size,\n vocab_size,\n num_layers=1):\n super(RNNDecoder,self).__init__()\n self.embed = nn.Embedding(vocab_size,embedding_size)\n self.lstm = nn.LSTM(embedding_size,hidden_size,num_layers,batch_first=True)\n self.linear = nn.Linear(hidden_size,vocab_size)", "def forward(self, X, X_lens):\n\n\t\t# get the batch size and sequence length (max length of the batch)\n\t\t# dim of X: batch_size x batch_max_len x input feature vec dim\n\t\tbatch_size, seq_len, _ = X.size()\n\n\t\t###Your code here --\n\t\t# Get the output of LSTM - (output dim: batch_size x batch_max_len x lstm_hidden_dim)\n\t\thid, _ = self.lstm(X)\n\t\t# print('hid \\n{}'.format(hid))\n\n\t\t# reshape (before passing to linear layer) so that each row contains one token\n\t\t# essentially, flatten the output of LSTM\n\t\t# dim will become batch_size*batch_max_len x lstm_hidden_dim\n\t\thid_flat = hid.contiguous().view(hid.shape[0] * hid.shape[1], hid.shape[2])\n\t\t# print('hid_flat \\n{}'.format(hid_flat))\n\n\t\t# Get logits from the final linear layer\n\t\tlogits = self.hidden_to_label(hid_flat)\n\t\t# print('logits \\n {}'.format(logits))\n\t\t# print('logits shape {}, hid shape {}, hid_flat shape {}'.format(logits.shape, hid.shape, hid_flat.shape))\n\t\t# --shape of logits -> (batch_size, seq_len, self.n_output)\n\t\treturn logits", "def forward(self, x):\n x = tensor(x).unsqueeze(1)\n x = self.cnn(x)\n\n # LSTM from here\n batch_size = x.shape[0]\n x = x.view(batch_size, x.shape[1] * x.shape[2], x.shape[3])\n x = x.permute(2, 0, 1) # Converting from (B,H,W)->(W,B,H)\n\n output = self.rnn(x)\n return output", "def forward(self, t):\n x = self.embeddings(t)\n logits = self.model(x.view(x.shape[0], -1))\n return logits", "def forward(self, embedded_words, input_lens):\r\n # Takes the embedded sentences, \"packs\" them into an efficient Pytorch-internal representation\r\n packed_embedding = nn.utils.rnn.pack_padded_sequence(embedded_words, input_lens, batch_first=True,\r\n enforce_sorted=False)\r\n # Runs the RNN over each sequence. Returns output at each position as well as the last vectors of the RNN\r\n # state for each sentence (first/last vectors for bidirectional)\r\n # print('here', packed_embedding.size())\r\n output, hn = self.rnn(packed_embedding)\r\n # Unpacks the Pytorch representation into normal tensors\r\n output, sent_lens = nn.utils.rnn.pad_packed_sequence(output)\r\n max_length = max(input_lens.data).item()\r\n context_mask = self.sent_lens_to_mask(sent_lens, max_length)\r\n\r\n # Note: if you want multiple LSTM layers, you'll need to change this to consult the penultimate layer\r\n # or gather representations from all layers.\r\n if self.bidirect:\r\n h, c = hn[0], hn[1]\r\n # Grab the representations from forward and backward LSTMs\r\n h_, c_ = torch.cat((h[0], h[1]), dim=1), torch.cat((c[0], c[1]), dim=1)\r\n # Reduce them by multiplying by a weight matrix so that the hidden size sent to the decoder is the same\r\n # as the hidden size in the encoder\r\n new_h = self.reduce_h_W(h_)\r\n new_c = self.reduce_c_W(c_)\r\n h_t = (new_h, new_c)\r\n else:\r\n h, c = hn[0][0], hn[1][0]\r\n h_t = (h, c)\r\n output = self.reduce_h_W(output)\r\n # print(output.shape, h_t[0].shape)\r\n return output, context_mask, h_t", "def sample_beam(model, input_embedding, char2idx, idx2char, k=5, maxlen=30,\n start='START', use_head=True):\n with torch.no_grad():\n device = input_embedding.device\n softmax = nn.Softmax(dim=1)\n if use_head:\n input_embedding = input_embedding.view(1, -1)\n\n inp = [torch.LongTensor([char2idx[start]]).to(device)]\n inp = nn.utils.rnn.pack_sequence(inp)\n out, hidden = model(input_embedding, inp, use_head=use_head)\n\n out = softmax(out.data).view(-1).cpu().numpy()\n max_k = np.argsort(out)[-k:][::-1]\n oldprobs = out[max_k]\n words = [[i] for i in max_k]\n inp = pack([torch.LongTensor([j]).to(device) for j in max_k])\n\n if model.mode == 'LSTM':\n hidden0 = torch.cat([hidden[0] for i in range(k)], dim=1)\n hidden1 = torch.cat([hidden[1] for i in range(k)], dim=1)\n hidden = hidden0, hidden1\n else:\n hidden = torch.cat([hidden for i in range(k)], dim=1)\n WORDS = []\n for c in range(maxlen):\n out, hidden = model(hidden, inp, use_head=False)\n out = softmax(out.data).cpu().numpy()\n\n #print(out.shape)\n inpnp = inp.data.detach().cpu().numpy()\n done = np.where(inpnp == char2idx['END'])\n out[done] = 0\n if len(out[done]) != 0:\n #print(out[done].shape)\n for d in done[0]:\n out[d][char2idx['END']] = 1\n #print(done)\n\n #print(out)\n #print(out[done])\n out = (oldprobs.reshape(-1, 1)*out)\n max_k = np.argsort(out)[:, -k:][:, ::-1]\n\n #print(max_k)\n probs = np.array([out[i][max_k[i]] for i in range(k)])\n #print(probs)\n flat = probs.reshape(-1)\n max_k2 = np.argsort(flat)[::-1][:k]\n word_inds = max_k2//k\n next_chars_inds = max_k2%k\n\n oldprobs = flat[max_k2]\n #print(oldprobs)\n\n new_words = []\n new_inp = []\n for i, word_ind in enumerate(word_inds):\n next_char = max_k[word_ind][next_chars_inds[i]]\n if next_char == char2idx['END']:\n #print(\"HIT AN END at word {}\".format(word_ind))\n WORDS.append((words[word_ind], oldprobs[i]))\n #the_word = words[word_ind]\n #return ''.join([idx2char[i] for i in the_word])\n new_inp.append(torch.LongTensor([next_char]).to(device))\n word = words[word_ind][:]\n word = word + [next_char]\n new_words.append(word)\n words = new_words[:]\n\n if model.mode == 'LSTM':\n h1, h2 = hidden\n h1, h2 = h1[0][word_inds].view(1, k, -1), h2[0][word_inds].view(1, k, -1)\n hidden = h1, h2\n else:\n hidden = hidden[0][word_inds].view(1, k, -1)\n\n\n inp = pack(new_inp)\n\n return [''.join([idx2char[i] for i in word if i != char2idx['END']]) for word in words], oldprobs" ]
[ "0.69259703", "0.6396579", "0.63238174", "0.6247593", "0.6204757", "0.62018347", "0.62017447", "0.6161664", "0.61093134", "0.6070913", "0.6062975", "0.60444057", "0.6001166", "0.5980393", "0.59802103", "0.5965758", "0.5951675", "0.5950302", "0.5941786", "0.5909258", "0.5893609", "0.5869647", "0.5863272", "0.5860783", "0.5843536", "0.5841134", "0.58181155", "0.5810438", "0.5792893", "0.5780377" ]
0.65407586
1
Get teams owned by the account.
def get_teams(self, account_id): endpoint = '/accounts/{}/teams'.format(account_id) return self._api_call('get', endpoint)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_teams(self):\n url = 'teams'\n result = self.get(url)\n return result.get('teams', result)", "def get_teams(self, *args, **kwargs):\n\n teams_data = api.get_teams(\n *args,\n api_key=self.__creds.api_key_v2,\n **kwargs)\n return [en.Team(creds=self.__creds, **team_data) for team_data in teams_data]", "def teams(self):\n return self._get_by_class(Team)", "def teams(self):\r\n url = '{0}/{1}'.format(self.get_url(), 'teams')\r\n\r\n return http.Request('GET', url), parsers.parse_json", "def get_teams(self):\n url = '{}/api/v4/teams'.format(self.server_url)\n response = self._request(self._session.get, url)\n return json.loads(response.content)", "def get_teams():", "def get_all_teams(self):\n return self._db.Teams.find({})", "def get_user_teams(user_id):\n teams = []\n response = Litmos.get_user_teams(user_id)\n for res in response:\n try:\n team = Team.objects.get(team_id=res['Id'])\n teams.append(team)\n except Team.DoesNotExist:\n continue\n return teams", "def getAllTeams(self):\n return []", "def get_teams(self, *teams):\n teams = [convert_name(team, how='abbr') for team in teams]\n return self._db.Teams.find({'Tm' : {'$in' : teams}})", "def teams(self) -> 'outputs.OfficeDataConnectorDataTypesResponseTeams':\n return pulumi.get(self, \"teams\")", "def get_team_list(self):\n result = dict\n managers = User.get_users([UserRole.ProjectManager])\n for manager in managers:\n result.update({manager.get_username(): manager.get_team_members()})\n return build_team_list(result)", "def get_company_teams(self, company_referece):\n url = 'companies/{0}/teams'.format(company_referece)\n result = self.get(url)\n return result.get('teams', result)", "def get_teams():\n teams = []\n for teamId in range(1, 68):\n t = requests.get(TEAMS_URL.format(teamId)).json()\n team_list = t.get('teams')\n if team_list is None or len(team_list) == 0:\n continue\n teams.append(Team.from_json(team_list[0]))\n return teams", "def test_teams_get_users_teams_v1(self):\n pass", "def teams(self):\n t = [e[0] for e in self.pick_set.all().values_list(\"team\")]\n return Team.objects.filter(id__in=set(t))", "def test_teams_get_users_teams_v2(self):\n pass", "def test_get_all_for_team(self):\n team = Team.create(name='foo', captain_id='User_cap',\n program_id=self.program.uid)\n team.put()\n user = User.create(name='foo', email='[email protected]',\n owned_teams=[team.uid])\n user.put()\n response = self.testapp.get(\n '/api/teams/{}/users'.format(team.uid),\n headers=self.login_headers(user),\n )\n response_list = json.loads(response.body)\n self.assertEqual(len(response_list), 1)", "def teams():\n print 'Getting Teams'\n\n substring = \"%\" + request.args.get('t') + \"%\"\n\n team_list = datastore.get_teams_typeahead(engine, substring, max_teams=10)\n\n print 'Teams:', team_list\n return jsonify(team_list)", "def list_teams(self, user_id=None):\n # Create initial query with placeholder for the table that the teams\n # are selected from.\n sql = 'SELECT t.id, t.name, t.owner_id, COUNT(*) as member '\n sql += 'FROM {} t, team_member m WHERE t.id = m.team_id '\n sql += 'GROUP BY t.id, t.name, t.owner_id'\n # Depending on whether the user id is given the teams are either\n # taken directly from the teams table of a sub-query that filters\n # teams that the user is member of.\n if not user_id is None:\n team_table = 'SELECT id, name, owner_id FROM team t1, team_member m1 '\n team_table += 'WHERE t1.id = m1.team_id AND m1.user_id = ?'\n team_table = '(' + team_table + ')'\n bindings = (user_id,)\n else:\n team_table = 'team'\n bindings = ()\n sql = sql.format(team_table)\n result = list()\n for team in self.con.execute(sql, bindings).fetchall():\n result.append(\n TeamDescriptor(\n identifier=team['id'],\n name=team['name'],\n owner_id=team['owner_id'],\n member_count=team['member']\n )\n )\n return result", "def users(self, team, params={}, **options):\n path = \"/teams/%s/users\" % (team)\n return self.client.get_collection(path, params, **options)", "def get_team(uid=None):\n user = get_user(uid=uid)\n return api.team.get_team(tid=user[\"tid\"])", "def test_get_teams(self):\n owner2 = AnotherUserFactory(email_confirmed=True)\n owner3 = AnotherUserFactory(username='team owner 3', email='[email protected]', email_confirmed=True,)\n TeamFactory(owner=owner2, name='second team')\n TeamFactory(owner=owner3, name='third team')\n\n usual_user = UserFactory(\n username='usualuser',\n email='[email protected]',\n email_confirmed=True,\n )\n token = Token.objects.get(user=usual_user)\n self.client.credentials(\n HTTP_AUTHORIZATION=f'Token {token.key}')\n\n response = self.client.get(reverse('api:teams-list'))\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertEqual(len(response.data.get('results')), 3)", "def test_user_get_teams():\n app = create_ctfd()\n with app.app_context():\n register_user(app)\n client = login_as_user(app)\n r = client.get('/teams')\n assert r.status_code == 200\n destroy_ctfd(app)", "def test_teams_get_teams_v1(self):\n pass", "def get_available_teams(self):\n teams = self.request.user.team_set.filter(competition__is_open=True)\n if not teams.exists():\n msg = \"Can't send invites at this time. You're not\"\n msg += \" registered for any open competitions\"\n messages.error(self.request, msg)\n raise Http404(msg)\n return teams", "def list(self, request):\n teams = self.controller.retrieve_all_teams()\n serializer = data_serializers.PresentTeamSerializer(teams, many=True)\n return Response(serializer.data)", "def restricted_teams(self, user):\n return []", "def list_teams():\n name = request.args.get(\"name\", None)\n\n # Search team by name\n if name:\n team = TeamController.get(filters={\"Team\": {\"name\": name}})\n return jsonify(format_team(team)), 200\n\n # Otherwise list of the teams\n teams = TeamController.list()\n return jsonify({\"teams\": [format_team(s) for s in teams]}), 200", "def get_teams(event):\n teams_raw = tba_session.get(BASE_URL + '/event/%s/teams/keys' % event).json()\n teams = []\n for team_raw in teams_raw:\n teams.append(team_raw[3:])\n return teams" ]
[ "0.7548286", "0.7198719", "0.7053132", "0.70429367", "0.70303184", "0.6919348", "0.6908395", "0.6904372", "0.68936974", "0.6875712", "0.6783006", "0.67675626", "0.67675245", "0.6744305", "0.6626937", "0.66002136", "0.65531397", "0.64464307", "0.6432558", "0.6408208", "0.6394704", "0.6379137", "0.6224228", "0.6205131", "0.6189481", "0.61880904", "0.6182936", "0.6147859", "0.61425835", "0.61417276" ]
0.7758869
0
Get projects owned by the team.
def get_projects(self, team_id): endpoint = '/teams/{}/projects'.format(team_id) return self._api_call('get', endpoint)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_projects(self):\n return self._gitlab.owned_projects(per_page=1000)", "def get_projects(self):\n return self.http_call(\"get\", url=f\"{self.base_url}/projects\").json()", "def get_projects(self):\n projects = []\n page = 1\n while not len(projects) % 100:\n projects += self._get('/projects?{0}'.format(urllib.urlencode({'per_page': 100, 'page': page})))\n if not projects:\n break\n page += 1\n return projects", "def get_projects(self):\n return conf.projects", "def get_projects(self):\n response = self.request(verb=requests.get, address=\"projects\")\n # FIXME: if no results, must we raise an exception?\n return response[\"results\"] if \"results\" in response else response", "def get_projects(self):\n return self.jira.projects()", "def get_projects(self):\n res = self.conn.cursor().execute(\"SELECT * FROM projects\")\n return res.fetchall()", "def get_projects():\n if current_user.get_id() is None:\n return\n with database.engine.begin() as connection:\n result = connection.execute(select(\n [models.projects.c.project_id, models.projects.c.name, models.projects.c.path, models.projects.c.creation_date, models.projects.c.user_id, func.count(models.objects.c.object_id).label('object_count')])\n .select_from(models.projects.outerjoin(models.objects))\n .where(and_(models.projects.c.active == True, models.projects.c.user_id == current_user.id))\n .group_by(models.projects.c.project_id)\n .order_by(models.projects.c.project_id))\n projects = [dict(row) for row in result]\n for project in projects:\n user = models.User.query.filter_by(\n id=project['user_id']).first()\n if user:\n project['email'] = user.email\n return projects", "def getProjects(self):\n\n return self.__projects", "def projects(self):\r\n return p.Projects(self)", "def project_list(self):\n try:\n ids = self.request[api.DATA][api.DATA][\"ids\"]\n return self._get_keystone_projects(ids)\n except Exception as e:\n LOG.exception(\"Error occurred: %s\" % e)", "def get_all_projects(self, scope):\n url = \"{0}/{1}/{2}\".format(self.keystone_server_url, DEFAULT_KEYSTONE_API_VERSION, \"projects\")\n headers = {'X-Auth-Token': scope.auth_token}\n try:\n r = self._make_request_with_auth_fallback(url, headers)\n return r['projects']\n\n except Exception as e:\n self.warning('Unable to get projects: %s', e)\n raise e\n\n return None", "def repository_projects(self, host: (str), owner: (str), repo: (str)) -> Any:\n\n return search_api(\"repository_projects\", host, owner, repo)", "def get_projects(self, *criterion):\n from wkcdd.models.helpers import get_project_list\n return get_project_list([self.id], *criterion)", "def get_projects():\n return Project.query.all()", "def get_projects(self):\n session = self.session_factory()\n results = [row.project for row in session.query(PipelineRun.project.distinct().label('project')).all()]\n session.close()\n return results", "def get_for(user):\n return Project.objects.filter(\n user_group__members=user\n ).distinct()", "def get_projects(self):\n if not self.validate():\n raise SettingCustomVisionAccessFailed\n return self.get_trainer_obj().get_projects()", "def projects(self):\n ret_val = []\n params = {\"fields\": Project.FIELDS}\n projects = self._request(\"get\", \"projects\", params=params)\n\n for project in projects:\n ret_val.append(Project(project))\n\n return ret_val", "def projects(args):\n _projects = lib.get_projects(\n args.target, username=args.username, password=args.password\n )\n if _projects:\n print(\"\\n\".join(_projects))", "def get_all_projects(self, org):\n return [proj for proj in Project.objects.filter(org=org)]", "def get_projects(self, _is_simple=False):\n req_url = f\"{self.url}/projects\"\n if _is_simple:\n req_url += \"?simple=true\"\n ret = requests.get(req_url, headers = self.req_header)\n return ret.json()", "def get_queryset(self):\n path_issue = str(self.request.path).split(\"/projects/\")[1]\n projects_pk = int(path_issue.split(\"/\")[0])\n\n return Contributor.objects.filter(project=projects_pk)", "def get_projects(self):\n rps = self.start_date\n\n return Project.objects.filter(\n Q(active=True)\n & Q(\n Q(start_date__lte=rps)\n | Q(\n Q(start_date__gte=rps)\n & Q(start_date__lte=datetime.datetime.now().date())\n )\n | Q(start_date__isnull=True)\n )\n & Q(\n Q(end_date__gte=rps)\n | Q(end_date__isnull=True)\n )\n )", "def active_projects(self):\n return self.projects.filter(active=True)", "def projects(self):\n campaigns = self.campaigns.all()\n return Project.published_objects.filter(campaigns__in=campaigns)", "def get_user_projects(username):\n\n tx = cypher_transaction()\n query = \"\"\"\n MATCH (p:project)-[:OWNED_BY]->(u:user {username:{uname}})\n RETURN p\n \"\"\"\n tx.append(query, parameters={'uname': username})\n results = _first(tx.commit())\n projects = []\n for r in results:\n proj, = r.values\n print(\"* {0}\".format(proj['name']))\n projects.append(proj)\n return projects", "def get_projects_of_user(self, user_id):\n res = self.conn.cursor().execute(\"\"\"SELECT * FROM projects p JOIN users_projects up \n ON p.id = up.project_id \n WHERE owner=? OR up.user_id=?\n GROUP BY p.id\n ORDER BY last_update DESC\"\"\", (user_id, user_id,))\n return res.fetchall()", "def projects():\n response = jsonify(projects_service.get_top_level_projects_ids())\n return response", "def get_projects(self):\n ret = self.v1_projects.get()\n return [each.metadata.name for each in ret.items]" ]
[ "0.8055631", "0.71431756", "0.6982873", "0.6950596", "0.69087213", "0.68157524", "0.6773247", "0.6765266", "0.67424154", "0.66995823", "0.66808534", "0.6675837", "0.66692793", "0.6655742", "0.66494966", "0.6611001", "0.66000354", "0.6599352", "0.6567951", "0.65166676", "0.65141135", "0.6510823", "0.6510153", "0.65014684", "0.64882296", "0.6461389", "0.6432126", "0.6346145", "0.6316578", "0.6311803" ]
0.720342
1
Get an asset by id.
def get_asset(self, asset_id): endpoint = '/assets/{}'.format(asset_id) return self._api_call('get', endpoint)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_asset(self, asset_id):\n text, code = ApiClient(self._config, 'assets/' + asset_id).get()\n return Asset.deserialize(text)", "def get_asset(self, asset_id, asset_type):\n return self.asset(asset_id, asset_type=asset_type)", "def asset(self, asset_id):\n headers, items = self._get('/asset/%s' % asset_id)\n return Asset.fromdict(items[0], api=self, full=True)", "def retrieveAsset(self, assetId):\n return self.get_json('/asset/%s' % assetId)", "def get_asset(self, id):\n\n if not isinstance(id, six.string_types):\n raise ValueError('Param \"id\" must be a str|unicode.')\n\n asset = self.stub.get_asset(opac_pb2.TaskId(id=id))\n\n return {\n 'file': asset.file,\n 'filename': asset.filename,\n 'type': asset.type,\n 'metadata': asset.metadata,\n 'task_id': asset.task_id\n }", "def retrieve_asset(self, sid, default_none=False):\n try:\n asset = self._asset_cache[sid]\n if asset is None and not default_none:\n raise SidsNotFound(sids=[sid])\n return asset\n except KeyError:\n return self.retrieve_all((sid,), default_none=default_none)[0]", "def getAssetWithName(self, name):\n return self.__assets[name]", "def retrieve_asset(self, site_id: Identifier, asset_id: Identifier\n ) -> Asset:\n try:\n site = self._registry_client.get_site_by_id(site_id)\n except KeyError:\n raise RuntimeError(f'Site or store at site {site_id} not found')\n\n if site.has_store:\n safe_asset_id = quote(asset_id, safe='')\n r = requests.get(\n f'{site.endpoint}/assets/{safe_asset_id}',\n params={'requester': self._site},\n verify=self._verify, cert=self._cred)\n if r.status_code == 404:\n raise KeyError('Asset not found')\n elif not r.ok:\n raise RuntimeError('Server error when retrieving asset')\n\n asset_json = r.json()\n validate_json('Asset', asset_json)\n return deserialize(Asset, asset_json)\n\n raise ValueError(f'Site {site_id} does not have a store')", "def get_asset(self, name):\n assert self.has_asset(name), \"Asset is not created yet, use has_asset for checking\"\n return self.assets[name]", "def get_by_id(cls, id):\n e = api.get([key.Key(cls.__name__, id)])\n if e:\n return cls.from_entity(e[0])\n raise ObjectDoesNotExist", "def get_url_asset(self, asset_id):\n return self.get_asset(asset_id, 'URL')", "def getbyid(self, id):\n\n return esd.retrieve(id)", "def getItem(self, id):\n path = 'item/' + id\n return self.sendRestRequest('GET', path)", "def get_by_id(cls, id):\n return cls.query().get(id)", "def get_asset_info(self, id):\n\n if not isinstance(id, six.string_types):\n msg = 'Param id must be a str|unicode.'\n logger.exception(msg)\n raise ValueError(msg)\n\n asset_info = self.stub.get_asset_info(opac_pb2.TaskId(id=id))\n\n return {\n 'url': asset_info.url,\n 'url_path': asset_info.url_path\n }", "def asset(self, asset_id, asset_type, action='GET'):\n if not self.can_update():\n self._handle_error(910, [self.type])\n\n asset_methods = {\n 'handle': self.tc_requests.adversary_handle_asset,\n 'phone': self.tc_requests.adversary_phone_asset,\n 'url': self.tc_requests.adversary_url_asset,\n }\n\n # handle invalid input\n if asset_methods.get(asset_type.lower()) is None:\n self._handle_error(\n 925, ['asset_type', 'assets', 'asset_type', 'asset_type', asset_type]\n )\n\n return asset_methods[asset_type.lower()](self.unique_id, asset_id, action=action)", "def get_object(self, id_):\n return self._objects.get(id_, None)", "def get(id=None):\n return requests.get(\"/{}\".format(id))", "def get_object(self, id, **args):\n return self.request(\"{0}/{1}\".format(self.version, id), args)", "def get(self, id):\n file = (\n self.drive.files()\n .get(\n fileId=id,\n fields=\"id, name\",\n supportsAllDrives=self.shared_drive[0],\n )\n .execute()\n )\n return file", "def get_image_by_id(id):\n return Image.objects.get(id=id)", "def get(cls, id):\n\n return cls.query.get(id)", "def get(cls, id):\n\n return cls.query.get(id)", "def get_handle_asset(self, asset_id):\n return self.get_asset(asset_id, 'HANDLE')", "def image_by_id(self, id):\n if not id:\n return None\n return next((image for image in self.images() if image['Id'] == id),\n None)", "def get(self, id):\n activity = Activity().get(id)\n if not activity:\n abort(404, \"Activity not found\")\n return activity", "def get_asset(self, short_name):\n return self._assets[short_name]", "def get_asset(location, filename):\r\n return contentstore().find(Transcript.asset_location(location, filename))", "def get_asset(collection=None, _id=None, **params):\n\n if collection == 'images':\n return models.images.Image(_id=_id)\n elif collection == 'figure':\n return models.figures.Figure(_id=_id, **params)\n elif collection == 'figures':\n return models.figures.Figure(_id=_id, **params)\n elif collection == 'posts':\n return models.posts.Post(_id=_id, **params)\n elif collection == 'post':\n return models.posts.Post(_id=_id, **params)\n elif collection == 'paint':\n return models.posts.Paint(_id=_id, **params)\n elif collection == 'paints':\n return models.posts.Paint(_id=_id, **params)\n elif collection == 'attachment':\n return models.posts.Attachment(_id=_id, **params)\n elif collection == 'attachments':\n return models.posts.Attachment(_id=_id, **params)\n elif collection == 'tag':\n return models.posts.Tag(_id=_id, **params)\n elif collection == 'tags':\n return models.posts.Tag(_id=_id, **params)\n\n raise ValueError('get_asset() is not supported for %s yet!' % collection)", "def asset_info(self, asset_id):\n response = self._client.get('workbenches/assets/%(asset_id)s/info',\n path_params={'asset_id': asset_id})\n return AssetInfo.from_dict(loads(response.text).get('info'))" ]
[ "0.8605014", "0.8052259", "0.7977259", "0.78436553", "0.7251006", "0.7176857", "0.69656426", "0.6938977", "0.6917505", "0.66144365", "0.65986526", "0.6570589", "0.6543234", "0.6528059", "0.64717233", "0.6465809", "0.6415963", "0.6392111", "0.638581", "0.6384934", "0.6381671", "0.63442415", "0.63442415", "0.6341465", "0.6339361", "0.6317151", "0.6308187", "0.6304288", "0.6278947", "0.6274152" ]
0.8775442
0
Get an asset's children.
def get_asset_children(self, asset_id): endpoint = '/assets/{}/children'.format(asset_id) return self._api_call('get', endpoint)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def children(self) -> \"AssetList\":\n return self._cognite_client.assets.list(parent_ids=[self.id], limit=None)", "def get_children(self):\r\n\r\n if not self.has_children:\r\n return []\r\n\r\n if getattr(self, '_child_instances', None) is None:\r\n self._child_instances = [] # pylint: disable=attribute-defined-outside-init\r\n for child_loc in self.children:\r\n try:\r\n child = self.runtime.get_block(child_loc)\r\n child.runtime.export_fs = self.runtime.export_fs\r\n except ItemNotFoundError:\r\n log.exception(u'Unable to load item {loc}, skipping'.format(loc=child_loc))\r\n continue\r\n self._child_instances.append(child)\r\n\r\n return self._child_instances", "def get_children(self):\n return self.children", "def get_children(self):\n return self.children", "def get_children(self):\n return self.children", "def get_children(self):\n return self.children", "def get_children(self):\r\n return self.children", "def get_children(self, item, level):\n return item.children", "def get_children(self):\r\n return self._children", "def get_children(self):\n return self._children", "def get_children(self):\n return self.items", "def GetChildren(self):\r\n\r\n return self._children", "def subtree(self, depth: int = None) -> \"AssetList\":\n return self._cognite_client.assets.retrieve_subtree(id=self.id, depth=depth)", "def getChildren(self):\n \n return self._children", "def children(self):\n \n return self._children", "def get_children(self):\n\n pass", "def children(self):\n return self._children", "def children(self):\n return self._children", "def children(self):\n return list(self._children)", "def children(self):\n return self.contents", "def children(self):\n return self.contents", "def children(self, path):\n url = u'/'.join(\n [self.conf[\"api\"], \"path\", escape_path(path).strip('/'), \"@children\"])\n params = {}\n self.logger.info(path)\n self.logger.debug(url)\n return self._get_iter(url, params)", "def get_children(self):\n return [node for node in self._children.values()]", "def GetChildren(self, *args, **kwargs):\n pass", "def get_children(self, refobj):\n children = cmds.listConnections(\"%s.children\" % refobj, d=False)\n if not children:\n children = []\n return children", "def get_b_children(self, b_obj):\n return [child for child in Blender.Object.Get()\n if child.parent == b_obj]", "def children(self) -> List[Region]:\n return self._children", "def GetChildren( self ):\n children = [\n cWrpr \n for cWrpr in GameNodePath.GetChildren( self ) \n if not cWrpr.data.getPythonTag( TAG_IGNORE )\n ]\n return children", "def get_children(self):\n return []", "def get_children(self):\n raise NotImplementedError()" ]
[ "0.80006397", "0.691222", "0.6860828", "0.68368614", "0.68368614", "0.68368614", "0.68265384", "0.6793676", "0.6740953", "0.6736009", "0.6700646", "0.6693795", "0.6691179", "0.6656253", "0.66030985", "0.6577579", "0.655715", "0.655715", "0.6483371", "0.64426744", "0.64426744", "0.64330935", "0.64138573", "0.64049226", "0.64038795", "0.6379516", "0.63567805", "0.63486683", "0.6339393", "0.63297254" ]
0.8664441
0
Upload an asset. The method will exit once the file is uploaded.
def upload(self, asset, file): uploader = FrameioUploader(asset, file) uploader.upload()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def upload_asset(ctx, asset, release):\n\n try:\n\n gh = ctx.obj.github\n\n log.echo('Uploading {} to release {}...'\n .format(os.path.basename(asset), release), break_line=False)\n asset_url = gh.upload_asset(asset=asset, release=release)\n log.checkmark()\n log.echo('Uploaded asset: {}'.format(asset_url))\n return asset_url\n except BaseException as _:\n log.xmark()\n raise", "def put_upload(self):\n # print \"starting upload...\", self.current_upload['filepath']\n self.touch()\n self.log(\"STARTING_UPLOAD\", level=INFO)\n try:\n Backend.put_file(self.fileobj, self.current_upload[\"gcs_url\"])\n except exceptions.FilePutError as err:\n self.handle_put_error(err, self.fileobj)\n raise", "def upload(self, filename, file_path):\n return", "def upload_file(self, file_path, file_name, output_path):", "def upload_file( processor, user, local_path ):\n operations.publish_work_item(\n operations.create_asset_from_file(\n file_name = local_path,\n owner = user,\n producer = processor,\n child_number = 0,\n asset_class = models.AssetClass.UPLOAD ))", "async def _upload(self) -> None:\n\n # filename given?\n filename = str(uuid.uuid4()) if self.filename is None else self.filename\n\n # check\n if self._upload_path is None:\n raise ValueError(\"No upload URL given.\")\n\n # send data and return image ID\n async with aiohttp.ClientSession() as session:\n data = aiohttp.FormData()\n data.add_field(\"file\", self._buffer, filename=self.filename)\n async with session.post(self._upload_path, auth=self._auth, data=data, timeout=self._timeout) as response:\n if response.status == 401:\n log.error(\"Wrong credentials for uploading file.\")\n raise FileNotFoundError\n elif response.status != 200:\n log.error(f\"Could not upload file to filecache: {response.status} {response.reason}\")\n raise FileNotFoundError", "def upload_file(\n self, bucket_id: uplink.Path, filename: uplink.Path, file: uplink.Body\n ):\n pass", "def upload(self, file_path, bucket_name, file_name):\n\n self.client.upload_file(file_path, bucket_name, file_name)", "def upload_file(self, source, dest):\n print(f\"Uploading {source} to {dest}\")\n with open(source, \"rb\") as data:\n self.client.upload_blob(name=dest, data=data)", "def post_asset_update(lock, course):\r\n upload_date = datetime(2013, 6, 1, 10, 30, tzinfo=UTC)\r\n asset_location = course.id.make_asset_key('asset', 'sample_static.txt')\r\n url = reverse_course_url('assets_handler', course.id, kwargs={'asset_key_string': unicode(asset_location)})\r\n\r\n resp = self.client.post(\r\n url,\r\n json.dumps(assets._get_asset_json(\"sample_static.txt\", upload_date, asset_location, None, lock)),\r\n \"application/json\"\r\n )\r\n self.assertEqual(resp.status_code, 201)\r\n return json.loads(resp.content)", "def upload_file(self, file_name, bucket, destination_name):\n try:\n not self.client.upload_file(file_name, bucket, destination_name)\n except Exception as ex:\n raise ex", "def upload(ctx: click.Context, **kwargs):\n root_commands.cmd_upload(ctx.obj, **kwargs)", "def upload_finish(self, cloud_file):", "def post(self, slug = None):\n filename = self.request.form.get(\"filename\")\n imgdata = base64.b64decode(self.request.form['data'])\n stream = StringIO.StringIO(imgdata)\n content_length = len(imgdata)\n content_type = \"image/png\"\n\n asset = self.app.module_map.uploader.add(\n stream, \n filename = filename,\n content_type = content_type,\n content_length = content_length,\n )\n\n asset_id = unicode(asset._id)\n return {\n 'url' : self.url_for(\"asset\", asset_id = asset.variants['medium_user']._id),\n 'status' : \"success\",\n 'asset_id' : asset_id\n }", "def asset(self, asset):\n\n self._asset = asset", "def publish_asset(\n self,\n *,\n asset_id: str,\n asset_manifest_path: str,\n asset_selector: str,\n asset_type: \"AssetType\",\n ) -> None:\n ...", "def upload(self):\n if not self.prepare():\n Settings.err_print(\"unable to upload file - {}\".format(self.get_title()))\n return False\n self.backup()\n self.delete()\n return True", "def upload_file(Filename=None, Bucket=None, Key=None, ExtraArgs=None, Callback=None, Config=None):\n pass", "def upload_file(self, f):\n return self._telegraph.upload_file(f)", "def upload_file(self, file_name, content):\n return self.files.upload(file_name, content)", "def upload_file_handle(\n self,\n bucket: str,\n object_name: str,\n src_file_handle: typing.BinaryIO):\n raise NotImplementedError()", "def file_upload():\n\n click.secho('*** Uploading image...', fg='green')\n uploaded = _uploaded_file('cover.jpg')\n click.secho(json.dumps(uploaded, indent=2, sort_keys=True), fg='yellow')\n\n click.secho('*** Creating a Picture document for it...', fg='green')\n picture = _make_document('picture', title='cover image', sys_filename=uploaded['path'])\n click.secho(json.dumps(picture, indent=2, sort_keys=True), fg='yellow')\n\n click.secho('*** Attaching it to a Blueray as cover...', fg='green')\n slp = _make_document('movie', title='Silver Linings Playbook')\n blueray = _make_document('blueray', movie_id=slp['_id'], cover_id=picture['_id'])\n click.secho(json.dumps(blueray, indent=2, sort_keys=True), fg='yellow')", "def upload_asset(self, upload_json_path, max_speed=None, is_db=True,\n engine_type=\"aspera\", server_ip=None, server_port=None,\n transmit_type=\"upload_json\", network_mode=0, redis_flag=None,\n is_record=False, redis_obj=None):\n max_speed = max_speed if max_speed is not None else \"1048576\"\n cmd_params = [transmit_type, upload_json_path, '/', max_speed,\n 'false', 'input_bid']\n if is_db:\n db_ini_path = self.create_db_ini(upload_json_path)\n else:\n db_ini_path = None\n main_input_bid, main_user_id = get_share_info(self.api)\n cmd = self.trans.create_cmd(cmd_params, db_ini_path, engine_type, server_ip, server_port,\n main_user_id=main_user_id, main_input_bid=main_input_bid,\n network_mode=network_mode)\n\n return run_cmd(cmd, flag=True, logger=self.logger, is_record=is_record, redis_flag=redis_flag, redis_obj=redis_obj)", "def upload(self, upload_request):\n raise NotImplementedError", "def upload_file(cls, uri, fobj):\n msg = \"Backend doesn't implement upload_file()\"\n raise NotImplementedError(msg)", "def upload():\n file = None\n if 'file' in request.files:\n file = request.files['file']\n if file and allowed_file(file.filename):\n filename = secure_filename(file.filename)\n file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))\n return json_response(\n message=\"Upload successful\",\n result=\"/v/{}\".format(filename)\n )\n return json_response(\n message=\"Invalid filename or extension (jpg, png, gif)\",\n status_code=500\n )", "async def upload_file(self):\n logger.debug(\"uploading %s\", self.tgzfile)\n with aiohttp.MultipartWriter(\"form-data\") as mpwriter:\n with open(self.tgzfile, \"rb\") as file_handle:\n part = mpwriter.append(file_handle)\n part.set_content_disposition(\n \"form-data\", name=\"file\", filename=\"inventory.gz\"\n )\n part.headers[aiohttp.hdrs.CONTENT_TYPE] = self.UPLOAD_CONTENT_TYPE\n\n headers = {}\n # TODO : Use mTLS certs not userid/password\n auth = aiohttp.BasicAuth(\n self.config[\"AUTH\"][\"username\"], self.config[\"AUTH\"][\"password\"]\n )\n headers[\"Authorization\"] = auth.encode()\n async with aiohttp.ClientSession(headers=headers) as session:\n async with session.post(\n self.upload_url, ssl=self.ssl_context, data=mpwriter\n ) as response:\n logger.debug(\"Status: %s\", response.status)\n logger.debug(\n \"Content-type: %s\", response.headers[\"Content-Type\"]\n )\n\n return await response.text()", "def upload(self, request, pk=None):\n app = self.get_object()\n deployment = Revision()\n deployment.compressed_archive = request.FILES['file']\n deployment.app = app\n deployment.save()\n app.deploy()\n response = {}\n return Response(response)", "def test_submit_asset_to_submission_service(self):\n pass", "def post(self):\n filename = str(time.time())\n filepath = os.path.join(\n os.path.join(current_app.config['UPLOAD_FOLDER'], filename))\n with open(filepath, 'bw') as uploadfile:\n chunk_size = 1024\n while True:\n chunk = request.stream.read(chunk_size)\n if len(chunk) == 0:\n break\n uploadfile.write(chunk)\n current_app.logger.info('file %s upload successfully', filename)\n return {'timestamp': filename}, http.HTTPStatus.CREATED" ]
[ "0.7121886", "0.6898182", "0.67980725", "0.6593342", "0.6541552", "0.6339619", "0.6255667", "0.6252402", "0.62079424", "0.6194437", "0.6188706", "0.61695176", "0.6163366", "0.61349344", "0.61055654", "0.6055838", "0.6012874", "0.5993593", "0.5967045", "0.59558564", "0.59323484", "0.591592", "0.5913448", "0.59105784", "0.5908581", "0.5892904", "0.5870005", "0.5864889", "0.58646065", "0.58595765" ]
0.7988663
0
Get an asset's comments.
def get_comments(self, asset_id): endpoint = '/assets/{}/comments'.format(asset_id) return self._api_call('get', endpoint)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_comments(self):\n\t\treturn self._client.get_comments(self)", "def comments(self):\n comments_url = self.data['comments_url']\n return json.load(urllib2.urlopen(comments_url))", "def comments(self):\n return self.container['comments']", "def get(self, id):\n return get_comments(id)", "def comments(self):\n return self._comments", "def comments(self):\n return self._comments", "def get_comments(self):\n raise NotImplementedError", "def get_comments(self):\n url = \"https://api.imgur.com/3/gallery/{0}/comments\".format(self.id)\n resp = self._imgur._send_request(url)\n return [Comment(com, self._imgur) for com in resp]", "def get_comments(self):\n url = \"https://api.imgur.com/3/account/{0}/comments\".format(self.name)\n resp = self._imgur._send_request(url)\n return [Comment(com, self._imgur) for com in resp]", "def get_comments(self, sort, time):\r\n from r2.models import Comment\r\n return self.get_links(sort, time, Comment)", "def get_comments(self):\n\t\tself.comments = graph.get_connections(post['id'], 'comments')", "def comments(self):\r\n return c.Comments(self)", "def comments(self):\r\n return c.Comments(self)", "def comments(self):\r\n return c.Comments(self)", "def comments(self):\n return comments.Comments(self)", "def get_comments(video_id, CLIENT_SECRETS_FILE):", "def comments(self):\r\n return comments.Comments(self)", "def comments(self) -> list:\n return self._node[\"app_data\"][\"ui_data\"].get(\"comments\", [])", "def getComment(self, n = None):\n \n if n is None:\n return self._comments\n else:\n return self._comments[n]", "def get_comments(self, project, story):\n ret_val = []\n resource = \"projects/{0:d}/stories/{1:d}/comments\".format(project.id,\n story.id)\n params = {\"fields\": Comment.FIELDS}\n comments = self._request(\"get\", resource, params=params)\n\n for comment in comments:\n ret_val.append(Comment(comment))\n\n return ret_val", "def get_repo_comments(owner, repo, session=None):\n url = f'{GITHUB_API_URL}/repos/{owner}/{repo}/comments'\n return get_one_item_at_a_time(url, session=session)", "def _get_comments(self, issue_id):\n data = self._get(\"/issues/{}/comments\".format(issue_id))\n comments = []\n for item in data:\n comments.append(\n Comment(item['user']['login'], item['body'])\n )\n return comments", "def get_comments(self, resp):\n comments = CommentList()\n for value in resp['comments']:\n comment = Comment()\n comment.set_comment_id(value['comment_id'])\n comment.set_expense_id(value['expense_id'])\n comment.set_description(value['description'])\n comment.set_commented_by_id(value['commented_by_id'])\n comment.set_commented_by(value['commented_by'])\n comment.set_date(value['date'])\n comment.set_date_description(value['date_description'])\n comment.set_time(value['time'])\n comment.set_operation_type(value['operation_type'])\n comment.set_transaction_id(value['transaction_id'])\n comment.set_transaction_type(value['transaction_type'])\n comments.set_comments(comment)\n return comments", "def get_comments(self,comments):\n all_comments = []\n for comment in comments:\n try :\n all_comments.append({\n 'comment':comment['data']['body'],\n 'score':comment['data']['score']\n })\n except: pass\n return all_comments", "def get_comments(id_post):\n return Comms.objects.filter(post__id=id_post)", "def get(pid, sid, aid, cid):\n helpers.abort_if_invalid_parameters(pid, sid)\n helpers.abort_if_unknown_comment(cid, aid)\n project = Project.query.get(pid)\n\n if not project.is_public:\n user = User.query.filter_by(email=get_jwt_identity()).first()\n helpers.abort_if_not_a_member_and_private(user, project)\n children = CommentsModel.query.filter_by(parent_id=cid).all()\n return custom_response(200, data=UserAnnotationCommentSchema(many=True).dump(children))", "def _get_comments(self):\n if not hasattr(self, 'id'):\n raise BadReference('No matching issue on disk')\n return filter(lambda x: len(x) == 40, os.listdir(self.paths['comments']))", "def comments(self):\r\n return Comments(self)", "def comments(self):\r\n return Comments(self)", "def comments(self):\r\n return Comments(self)" ]
[ "0.7266864", "0.6898062", "0.67627263", "0.66801363", "0.66657114", "0.66657114", "0.6650718", "0.6621939", "0.65967107", "0.65728563", "0.6548351", "0.6507441", "0.6507441", "0.6507441", "0.63552064", "0.63516146", "0.6345878", "0.6339328", "0.6319697", "0.63146293", "0.6279501", "0.6276689", "0.6239993", "0.6236769", "0.6178735", "0.61735564", "0.6169842", "0.6065907", "0.6065907", "0.6065907" ]
0.87023836
0
Get the review links of a project
def get_review_links(self, project_id): endpoint = '/projects/{}/review_links'.format(project_id) return self._api_call('get', endpoint)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def grab_project_links(soup):\n project_urls = []\n valid_project_url = \"/?archive/?gsoc/\\d+[0-9]/orgs/[a-zA-Z]+/[a-zA-Z]+/[a-zA-Z]+.html\"\n try:\n # Grab links to all the projects\n all_link = soup.find_all(\"a\")\n for link in all_link:\n if re.match(valid_project_url, link.get(\"href\")):\n project_urls.append(join(melange, link.get(\"href\")[1:]))\n except TypeError:\n print(link)\n\n return project_urls", "def getLinks(tvshow, season, episode):\n urltv = getTvShowUrl(tvshow, season,episode)\n urlbase = 'http://projectfreetv.so'\n src_urltv = (''.join(getPage(urltv))).split('</a>')\n possible_links = []\n if (src_urltv == -1):\n return possible_links\n for line in src_urltv:\n for nameModule in projectfreetv_mod.__all__:\n if ((nameModule in line) and (('aff_id') in line)):\n link = line.split('\"')[1]\n possible_links.append([link, \\\n \"projectfreetv_mod.\" + nameModule])\n #print possible_links\n return possible_links", "def parse_links(html):\n\n soup = BeautifulSoup(html, 'lxml')\n content_cards = soup.find_all('a', {'class': 'content-card-link'})\n review_links = [cc.get('href') for cc in content_cards]\n review_links = [review_link.split('/')[-1] for review_link in review_links]\n return review_links", "def getRelevantPRData():\n prInfoFromAPI = getPRsFromAPI()\n diffHeader = headers.copy()\n diffHeader['Accept'] = \"application/vnd.github.v3.diff\"\n textForReviewPRs = []\n\n for PR in prInfoFromAPI:\n labels = [label[\"name\"] for label in PR['labels']]\n if \"Text for Review\" in labels:\n diffResponse = requests.get(PR[\"url\"], headers=diffHeader)\n diff = diffResponse.text\n # Add the info the list\n textForReviewPRs.append({\n \"pull_request_link\": PR[\"html_url\"],\n \"diff\": diff\n })\n if int(diffResponse.headers[\"X-RateLimit-Remaining\"]) <= 2:\n print('GitHub api rate limit will be exceeded; the GITHUB_TOKEN env variable needs to be set.')\n break\n return textForReviewPRs", "def get_links(proj,exp):\n response = do_method(\"experiment.info\",\n {\"proj\":proj,\"exp\":exp,\"aspect\":\"links\"})\n check_response(response)\n return response['value']", "def get_docs_urls(self):\n docs_urls = []\n link_labels = []\n for tag in self.post_div.find_all(\"a\"):\n url = tag[\"href\"]\n if url.startswith(\"https://docs.google.com\") or \\\n url.startswith(\"https://drive.google.com\"):\n docs_urls += [url]\n link_labels += [tag.text]\n return docs_urls, link_labels", "def list(state='open'):\n reviews = parse(gh_request('GET', '/repos/:user/:repo/pulls'))\n printers.print_review_list(reviews)", "def links(self):\r\n return links.RepoLinks(self)", "def get_urls():\n return (constants.UNREVIEWED.col_values(3) +\n constants.REVIEWED.col_values(3) +\n constants.LAST.col_values(3))", "def get_guide_urls(self):\n # data structures for returns\n urls = []\n link_labels = []\n link_class = []\n # data structures for tracking classes for links\n cur_class = None\n dict_counter = {}\n for tag in self.post_div.find_all(\"a\"):\n url = tag[\"href\"]\n # update class for the links if boundary found\n if url in url_to_class:\n dict_count = min(dict_counter.get(url, 0), len(url_to_class[url]) - 1)\n cur_class = url_to_class[url][dict_count]\n dict_counter[url] = dict_counter.get(url, 0) + 1\n # record the data for the link\n if cur_class is not None:\n urls += [url]\n link_labels += [tag.text]\n link_class += [cur_class]\n return urls, link_labels, link_class", "def get_all_links(self):\n links_url = \"{}/links\".format(self._project_url)\n print(links_url)\n response = requests.get(links_url).json()\n return json.dumps(response, indent=4, sort_keys=True)", "def getUrls(self):\n # in case you need to move from a read only Url to a writeable one, here it gets replaced\n repopath = self.repositoryUrl().replace(\"[git]\", \"\")\n repoString = utils.replaceVCSUrl(repopath)\n [repoUrl, repoBranch, repoTag] = utils.splitVCSUrl(repoString)\n if not repoBranch and not repoTag:\n repoBranch = \"master\"\n print(\"|\".join([repoUrl, repoBranch, repoTag]))\n return True", "def links(iati_import, activity, project, activities_globals):\n imported_links = []\n changes = []\n\n for website in activity.findall('activity-website'):\n url = get_text(website, activities_globals['version'])\n\n # Skip RSR links\n if url and 'rsr.akvo.org' in url:\n continue\n\n link, created = get_model('rsr', 'link').objects.get_or_create(\n project=project,\n url=url\n )\n\n if created:\n changes.append(u'added link (id: %s): %s' % (str(link.pk), link))\n\n imported_links.append(link)\n\n for doc_link in activity.findall(\"document-link[@format='application/http']\"):\n url = ''\n caption = ''\n\n if 'url' in doc_link.attrib.keys():\n url = doc_link.attrib['url']\n\n # Skip RSR links\n if url and 'rsr.akvo.org' in url:\n continue\n\n title_element = doc_link.find('title')\n if not title_element is None:\n caption = get_text(title_element, activities_globals['version'])\n if len(caption) > 50:\n add_log(iati_import, 'link_caption', 'caption is too long (50 characters allowed)',\n project, IatiImportLog.VALUE_PARTLY_SAVED)\n caption = caption[:50]\n\n link, created = get_model('rsr', 'link').objects.get_or_create(\n project=project,\n url=url,\n caption=caption\n )\n\n if created:\n changes.append(u'added link (id: %s): %s' % (str(link.pk), link))\n\n imported_links.append(link)\n\n for link in project.links.all():\n if not link in imported_links:\n changes.append(u'deleted link (id: %s): %s' %\n (str(link.pk),\n link.__unicode__()))\n link.delete()\n\n return changes", "def find_links(author, end_time, method='sh'):\n\tout = bash('echo \"'+ author + '\" | ~/lookup/getValues a2P')\n\tpr = [x for x in out.strip().split(';')[1:]]\n\t\n\tif method == 'pr_timeline':\t\t\n\t\tp = Proj()\n\t\tfor project in pr:\n\t\t\trows = p.project_timeline(['time','repo', 'author'], project)\n\t\t\tfor row in rows:\n\t\t\t\tprint row", "async def get_project_info(project_urls):\n project_info = []\n for url in project_urls:\n soup = await get_page(url)\n about = soup.find_all(\"p\")\n title = soup.find(\"h3\").text\n student = about[0].text.splitlines()[2].strip()\n details = about[1].text\n name = about[0].find(\"a\").text\n project_info.append({'Organization': name, 'title': title,\n 'student': student, 'details': details,\n 'link': url})\n\n return project_info", "def links(self):\n\t\treturn self.list_of_links", "def get_links(self):\r\n return self.links", "def getLink(self):", "def to_projectlink(self):\n\n thumb_image_url = reverse('project_serve_file', args=[self.short_name,self.logo])\n\n args = {\"abreviation\":self.short_name,\n \"title\":self.short_name,\n \"description\":self.description,\n \"URL\":reverse('comicsite.views.site', args=[self.short_name]),\n \"download URL\":\"\",\n \"submission URL\":self.get_submission_URL(),\n \"event name\":self.event_name,\n \"year\":\"\",\n \"event URL\":self.event_url,\n \"image URL\":self.logo,\n \"thumb_image_url\":thumb_image_url,\n \"website section\":\"active challenges\",\n \"overview article url\":self.publication_url,\n \"overview article journal\":self.publication_journal_name,\n \"overview article citations\":\"\",\n \"overview article date\":\"\",\n \"submission deadline\":\"\",\n \"workshop date\":self.workshop_date,\n \"open for submission\":\"yes\" if self.is_open_for_submissions else \"no\",\n \"data download\":\"yes\" if self.offers_data_download else \"no\",\n \"dataset downloads\":self.number_of_downloads,\n \"registered teams\":\"\",\n \"submitted results\":self.number_of_submissions,\n \"last submission date\":self.last_submission_date,\n \"hosted on comic\":True,\n \"created at\":self.created_at\n }\n\n projectlink = ProjectLink(args)\n return projectlink", "def getquicklinks(self):\n from pootle_app.models.permissions import check_profile_permission\n projects = self.projects.all()\n quicklinks = []\n for language in self.languages.iterator():\n langlinks = []\n if projects.count():\n for translation_project in language.translationproject_set.filter(project__in=self.projects.iterator()).iterator():\n isprojectadmin = check_profile_permission(self, 'administrate',\n translation_project.directory)\n\n langlinks.append({\n 'code': translation_project.project.code,\n 'name': translation_project.project.fullname,\n 'isprojectadmin': isprojectadmin,\n })\n\n islangadmin = check_profile_permission(self, 'administrate', language.directory)\n quicklinks.append({'code': language.code,\n 'name': language.localname(),\n 'islangadmin': islangadmin,\n 'projects': langlinks})\n quicklinks.sort(cmp=locale.strcoll, key=lambda dict: dict['name'])\n return quicklinks", "def GetReviewers(host, change):\n path = '%s/reviewers' % _GetChangePath(change)\n return FetchUrlJson(host, path)", "def urls(gh, user):\n return [repo.url for repo in getuserrepos(gh, user)]", "def test_all_projectlinks(self):\n \n content = \"Here is a test overview of all projects : <allprojects> {% all_projectlinks %} </allprojects>\" \n testallprojectlinkspage = create_page_in_admin(self.testproject,\"testallprojectlinkspage\",content)\n \n\n # This overview should be viewable by anyone \n self._test_page_can_be_viewed(self.signedup_user,testallprojectlinkspage)\n response = self._test_page_can_be_viewed(None,testallprojectlinkspage)\n \n # Extract rendered content from included file, see if it has been rendered\n # In the correct way\n allprojectsHTML = find_text_between('<allprojects>','</allprojects>',response.content)\n \n self.assertTrue(allprojectsHTML != \"\",\"Nothing was rendered for projects overview\")", "def getLinks(self):\n\t\threfs = []\n\t\tfor link in self.bsource.find_all('a'):\n\t\t\threfs.append(link.get('href'))\n\t\treturn hrefs", "def get_resource_urls():\n base_url = 'http://developer.pardot.com/'\n pattern = re.compile(\n r'(?ims)\\<a [^>]*?href=\"(kb/api-version-3/[^>]*?/)\"[^>]*?\\>'\n r'[^<]*?\\</a\\>')\n response = requests.get(base_url)\n return [\n '%s/%s' % (base_url, url) for url in pattern.findall(response.text)]", "def _parse_reviewers(self, content):\n soup = bs(content, ['fast', 'lxml'])\n table = soup.find('table', {'id': 'productReviews'})\n reviewers = [link['href'] for link in table.findAll('a')\\\n if link.contents == ['See all my reviews']]\n return reviewers", "def getURLs():", "def get_reviews(review_url):\n print review_url\n html = urllib.urlopen(review_url).read()\n soup = bs4.BeautifulSoup(html, 'html.parser')\n\n rating_scores = soup.findAll(\"span\", \"ratingScore\")\n num_ratings = len(rating_scores) - 1\n\n current_reviews = soup.findAll(\"div\", \"currentVintageProfessinalReviews\")\n num_cur_reviews = str(current_reviews).count('ratingProvider')\n past_reviews = soup.findAll(\"ul\", \"pastVintagesProfessionalReviews\")\n num_past_reviews = str(past_reviews).count('ratingProvider')\n\n print 'There are {0} reviews for prior vintages of this wine.'.format(num_past_reviews)\n print 'There are {0} current reviews for this vintage.\\n'.format(num_cur_reviews)\n\n rating_provider = soup.findAll(\"span\", \"ratingProvider\")\n rating_score = soup.findAll(\"span\", \"ratingScore\")\n reviewers = re.findall('(?<![A-Z])[>]([A-Z]+(?![A-Z]))', str(rating_provider))\n ratings = re.findall('(?<![A-Z])[0-9]{2}(?![A-Z])', str(rating_score))\n\n print \"Ratings List:\", ratings\n print \"Current Reviews: \", num_cur_reviews\n\n currentreviews = []\n for j in range(num_cur_reviews):\n print \"Current Review #\"+str(j+1)+\":\", reviewers[j], ratings[j]\n currentreviews.append((reviewers[j], ratings[j]))\n print currentreviews\n\n print \"\\nPast Reviews: \", num_past_reviews\n past_review_ratings = []\n for k in range(num_cur_reviews, num_past_reviews+num_cur_reviews):\n #print \"Past Review #\"+str(k-num_cur_reviews+1)+\":\", reviewers[k], int(ratings[k])\n past_review_ratings.append(float(ratings[k]))\n if k > 30:\n break\n if num_past_reviews != 0:\n avg_past_reviews = sum(past_review_ratings)/len(past_review_ratings)\n round(avg_past_reviews, 2)\n else:\n avg_past_reviews = 0\n\n print \"Average of Past Reviews: \", avg_past_reviews\n\n return currentreviews, avg_past_reviews", "def get_links(match_set, sha_validation=validate_sha_github):\n links = []\n for ticket in match_set.tickets:\n links.append(ticket_url % ticket)\n for PR in match_set.github_PRs:\n links.append(github_PR_url % PR)\n\n # validate github changeset SHA's\n for c in match_set.github_changesets:\n if sha_validation and sha_validation(c):\n links.append(github_changeset_url % c)\n\n return links", "def get_links(self, project_id):\n return self.http_call(\n \"get\", url=f\"{self.base_url}/projects/{project_id}/links\"\n ).json()" ]
[ "0.6268083", "0.62558377", "0.6122463", "0.6031433", "0.60285616", "0.59528255", "0.5925954", "0.58337975", "0.57853", "0.57034177", "0.5673401", "0.56721526", "0.563363", "0.56289285", "0.56215906", "0.5591778", "0.5589715", "0.5549578", "0.5547254", "0.55213386", "0.55127126", "0.54990417", "0.548586", "0.5484526", "0.5480388", "0.54803485", "0.54780245", "0.5477476", "0.5472809", "0.54679966" ]
0.7728808
0
Create a review link.
def create_review_link(self, project_id, **kwargs): endpoint = '/projects/{}/review_links'.format(project_id) return self._api_call('post', endpoint, payload=kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def review_link(self, review_link):\n\n self._review_link = review_link", "def createLink(context, title, link, exclude_from_nav=False):\n oid = idnormalizer.normalize(title, 'es')\n if not hasattr(context, oid):\n context.invokeFactory('Link', id=oid, title=title, remoteUrl=link)\n link = context[oid]\n if exclude_from_nav:\n link.setExcludeFromNav(True)\n link.reindexObject()", "def create(title, head, base='master', message=''):\n review_info = {\n 'title': title,\n 'body': message,\n 'head': head,\n 'base': base,\n }\n\n data = json_encode(review_info)\n review = parse(gh_request('POST', '/repos/:user/:repo/pulls', body=data))\n printers.print_review_created(review)", "def addreview(self, name, year, genre, rating, review, reviewer):\n pass", "def create_trackurl(self, context):\n t_url, created = TrackableURL.objects.get_or_create(url=self.url)\n t_url.save()\n\n # key = generate_url_key()\n redirect, created = RedirectUrl.objects.get_or_create(user=context['user'], target_url=t_url)\n if created:\n redirect.save()\n\n text = self.url\n if hasattr(self, 'display_text') and self.display_text is not None:\n text = self.display_text\n else:\n text = self.url\n if self.mode == 'link':\n return \"<a href='{0}' target='_blank'>{1}</a>\".format(reverse('api_redirect', kwargs={'key': redirect.redirect_key}), text)\n else:\n return reverse('api_redirect', kwargs={'key': redirect.redirect_key})", "def create_resource_access_review(self, body, **kwargs):\n\n all_params = ['body', 'pretty']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method create_resource_access_review\" % key\n )\n params[key] = val\n del params['kwargs']\n\n # verify the required parameter 'body' is set\n if ('body' not in params) or (params['body'] is None):\n raise ValueError(\"Missing the required parameter `body` when calling `create_resource_access_review`\")\n\n resource_path = '/oapi/v1/resourceaccessreviews'.replace('{format}', 'json')\n path_params = {}\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n if 'body' in params:\n body_params = params['body']\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json', 'application/yaml'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['*/*'])\n\n # Authentication setting\n auth_settings = []\n\n response = self.api_client.call_api(resource_path, 'POST',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='V1ResourceAccessReview',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response", "def create_link(self):\n self.filename = App.get_running_app().root.ids.camera_screen.capture()\n self.url = FileSharer(self.filename).share()\n self.ids.label.text = self.url", "def make_link_node(rawtext, app, type, slug, options):\r\n\r\n try:\r\n base = app.config.github_project_url\r\n if not base:\r\n raise AttributeError\r\n if not base.endswith('/'):\r\n base += '/'\r\n except AttributeError, err:\r\n raise ValueError('github_project_url configuration value is not set (%s)' % str(err))\r\n\r\n ref = base + type + '/' + slug + '/'\r\n set_classes(options)\r\n prefix = \"#\"\r\n if type == 'pull':\r\n prefix = \"PR \" + prefix\r\n node = nodes.reference(rawtext, prefix + utils.unescape(slug), refuri=ref,\r\n **options)\r\n return node", "def preview_create(self, obj, include_link=False):\n return self._create(obj, preview=True, include_link=include_link)", "def add_review(game_name):\n game = Game.from_mongo(**mongo.db.games.find_one({ \"name\": game_name }))\n username = session.get('username')\n if username is not None:\n user_dict = mongo.db.users.find_one({\"name\": username})\n user = User.from_mongo(**user_dict)\n\n form = ReviewForm()\n if form.validate_on_submit():\n author_ref = user.create_author_ref()\n pub_date = str(datetime.now(timezone.utc))\n game_ref = game.create_game_ref()\n \n new_review = Review.add_review(\n name=form.title.data,\n game=game.label,\n author=user.name, \n author_id=user._id, \n text=form.review_text.data, \n game_id=game._id, \n pub_date=pub_date, \n game_ref=game_ref, \n author_ref=author_ref\n )\n flash('Review Successfully Posted')\n review_ref = new_review.create_review_ref()\n game.reviews.append(review_ref)\n game.update_game()\n game_ref = game.create_game_ref()\n user.reviews.append(review_ref)\n if game_ref not in user.game_list:\n user.game_list.append(game_ref)\n user.update_user()\n return redirect(url_for('review', review_id=new_review._id))\n return render_template('add_review.html', game_name=game_name, user=user, game=game, form=form)\n else:\n flash('Please log in to post a review')\n return redirect(url_for('login'))", "def make_link_node(rawtext, app, type, slug, options):\n\n try:\n base = app.config.github_project_url\n if not base:\n raise AttributeError\n if not base.endswith('/'):\n base += '/'\n except AttributeError as err:\n raise ValueError('github_project_url configuration value is not set (%s)' % str(err)) from err\n\n ref = base + type + '/' + slug + '/'\n set_classes(options)\n prefix = \"#\"\n if type == 'pull':\n prefix = \"PR \" + prefix\n node = nodes.reference(rawtext, prefix + utils.unescape(slug), refuri=ref,\n **options)\n return node", "def add_review(self):\n url = \"/review/create/%s\" % self.picture.id\n self.browser.get(\"%s%s\" %\n (str(self.live_server_url), url))\n\n select = Select(self.browser.find_element_by_id(\n \"id_score_intention\"))\n select.select_by_index(4)\n select = Select(self.browser.find_element_by_id(\n \"id_score_technical\"))\n select.select_by_index(4)\n select = Select(self.browser.find_element_by_id(\n \"id_score_picture\"))\n select.select_by_index(4)\n select = Select(self.browser.find_element_by_id(\n \"id_score_global\"))\n select.select_by_index(4)\n\n self.browser.find_element_by_id(\n \"id_comment_intention\").send_keys(\"Commentaire intention\")\n\n submission_button = self.browser.find_element_by_class_name(\n 'btn-secondary')\n submission_button.click()\n time.sleep(2)\n html = self.browser.page_source\n self.assertInHTML(\"\"\"\n <h4 class=\"rouge-fonce\">Critique de test_login</h4>\n \"\"\",\n html)\n self.assertInHTML(\"\"\"\n <strong>Note moyenne de la revue : 4,0</strong>\n \"\"\",\n html)", "def add_review(self, review: Review):\n raise NotImplementedError", "def review():\r\n\r\n # Ensure isbn_number is submitted\r\n if not request.form.get(\"isbn_number\"):\r\n return apology(\"Invalid book\", 403)\r\n\r\n # Ensure review is submitted\r\n if not request.form.get(\"review\"):\r\n return apology(\"Text is not submitted\", 403)\r\n\r\n # Check if book exist, if not error out\r\n\r\n # add review to db\r\n\r\n return redirect(url_for(details, isbn_number=request.form.get(\"isbn_number\")))", "def _create(self, obj, preview=None, include_link=False):\n # TODO: validation, error handling\n assert preview in (False, True)\n type = obj.get('objectType')\n verb = obj.get('verb')\n base_id, base_url = self.base_object(obj, verb=verb)\n if base_id and not base_url:\n base_url = self.object_url(base_id)\n\n content = self._content_for_create(obj)\n if not content:\n if type == 'activity':\n content = verb\n else:\n return source.creation_result(\n abort=False, # keep looking for things to post\n error_plain='No content text found.',\n error_html='No content text found.')\n\n url = obj.get('url')\n if include_link and url:\n content += '\\n\\n(%s)' % url\n preview_content = util.linkify(content)\n msg_data = {'message': content.encode('utf-8')}\n if appengine_config.DEBUG:\n msg_data['privacy'] = json.dumps({'value': 'SELF'})\n msg_data = urllib.urlencode(msg_data)\n\n if type == 'comment':\n if not base_url:\n return source.creation_result(\n abort=True,\n error_plain='Could not find a Facebook status to reply to.',\n error_html='Could not find a Facebook status to <a href=\"http://indiewebcamp.com/comment\">reply to</a>. '\n 'Check that your post has an <a href=\"http://indiewebcamp.com/comment\">in-reply-to</a> '\n 'link a Facebook URL or to an original post that publishes a '\n '<a href=\"http://indiewebcamp.com/rel-syndication\">rel-syndication</a> link to Facebook.')\n\n if preview:\n return source.creation_result(\n 'will <span class=\"verb\">comment</span> <em>%s</em> on '\n '<a href=\"%s\">this post</a>:\\n%s' %\n (preview_content, base_url, EMBED_POST % base_url))\n else:\n resp = json.loads(self.urlopen(API_COMMENTS_URL % base_id,\n data=msg_data).read())\n resp.update({'url': self.comment_url(base_id, resp['id']),\n 'type': 'comment'})\n\n elif type == 'activity' and verb == 'like':\n if not base_url:\n return source.creation_result(\n abort=True,\n error_plain='Could not find a Facebook status to like.',\n error_html='Could not find a Facebook status to <a href=\"http://indiewebcamp.com/favorite\">like</a>. '\n 'Check that your post has an <a href=\"http://indiewebcamp.com/favorite\">like-of</a> '\n 'link a Facebook URL or to an original post that publishes a '\n '<a href=\"http://indiewebcamp.com/rel-syndication\">rel-syndication</a> link to Facebook.')\n\n if preview:\n return source.creation_result(\n 'will <span class=\"verb\">like</span> <a href=\"%s\">this post</a>:\\n%s' %\n (base_url, EMBED_POST % base_url))\n else:\n resp = json.loads(self.urlopen(API_LIKES_URL % base_id, data='').read())\n assert resp == True, resp\n resp = {'type': 'like'}\n\n elif type == 'activity' and verb in RSVP_ENDPOINTS:\n if not base_url:\n return source.creation_result(\n abort=True,\n error_plain=\"This looks like an RSVP, but it's missing an \"\n \"in-reply-to link to the Facebook event.\",\n error_html=\"This looks like an <a href='http://indiewebcamp.com/rsvp'>RSVP</a>, \"\n \"but it's missing an <a href='http://indiewebcamp.com/comment'>in-reply-to</a> \"\n \"link to the Facebook event.\")\n\n # TODO: event invites\n if preview:\n assert verb.startswith('rsvp-')\n return source.creation_result(\n 'will <span class=\"verb\">RSVP %s</span> to '\n '<a href=\"%s\">this event</a>.<br />' % (verb[5:], base_url))\n else:\n resp = json.loads(self.urlopen(RSVP_ENDPOINTS[verb] % base_id, data='').read())\n assert resp == True, resp\n resp = {'type': 'rsvp'}\n\n elif type in ('note', 'article'):\n if preview:\n return source.creation_result(\n 'will <span class=\"verb\">post</span>:<br /><br />'\n '<em>%s</em><br />' % preview_content)\n else:\n resp = json.loads(self.urlopen(API_FEED_URL, data=msg_data).read())\n resp.update({'url': self.post_url(resp), 'type': 'post'})\n\n elif type == 'activity' and verb == 'share':\n return source.creation_result(\n abort=True,\n error_plain='Cannot publish shares on Facebook.',\n error_html='Cannot publish <a href=\"https://www.facebook.com/help/163779957017799\">shares</a> '\n 'on Facebook. This limitation is imposed by the '\n '<a href=\"https://developers.facebook.com/docs/graph-api/reference/v2.0/object/sharedposts/#publish\">Facebook Graph API</a>.')\n\n else:\n return source.creation_result(\n abort=False,\n error_plain='Cannot publish type=%s, verb=%s to Facebook' % (type, verb),\n error_html='Cannot publish type=%s, verb=%s to Facebook' % (type, verb))\n\n if 'url' not in resp:\n resp['url'] = base_url\n return source.creation_result(resp)", "def newreview():\n objectid = request.values.get('objectid', 0, type=int)\n if not objectid:\n abort(400)\n workflow_object = workflow_object_class.get(objectid)\n\n form = AuthorUpdateForm(\n data=workflow_object.extra_data[\"formdata\"], is_review=True)\n ctx = {\n \"action\": url_for('.reviewhandler', objectid=objectid),\n \"name\": \"authorUpdateForm\",\n \"id\": \"authorUpdateForm\",\n \"objectid\": objectid\n }\n\n return render_template('authors/forms/review_form.html', form=form, **ctx)", "def new():\n\n add_review = True\n\n form = CreateReview()\n if form.validate_on_submit():\n\n try:\n review = {\n \"score\": float(form.score.data),\n \"description\": form.description.data,\n \"games_id\": form.game_id.data,\n \"users_id\": form.users_id.data\n }\n\n print(review)\n new_review = Reviews()\n new_review.create(**review)\n \n # add employee to the database\n flash('You have successfully created a Review.')\n except:\n # in case department name already exists\n flash('Error: review already exists.')\n \n\n # redirect to the login page\n return redirect(url_for('review.index'))\n\n return render_template('review/new.html', action=\"Add\", add_review=add_review, form=form, title=\"Add Review\")", "def test_create_review(self):\n yield self.nodes[0].overlay.create_project(\"test\", \"specpointer\", \"01-02-03\", 300, \"EUR\", 5)\n yield self.deliver_messages()\n project = self.nodes[1].overlay.persistence.get_projects()[0]\n yield self.nodes[1].overlay.create_submission(project['public_key'].decode('hex'), project['id'], 'test')\n yield self.deliver_messages()\n\n # Do a review\n submission = self.nodes[0].overlay.persistence.get_submissions_for_project(project['public_key'].decode('hex'), project['id'])[0]\n yield self.nodes[0].overlay.create_review(submission['public_key'].decode('hex'), submission['id'], 'test')\n yield self.deliver_messages()\n\n self.assertTrue(self.nodes[1].overlay.persistence.get_reviews(submission['public_key'].decode('hex'), submission['id']))", "def createLink(self, source, destination):\n log(\"creating link\")\n\n if \"flix\" in source:\n return \"%s\" % +OSUtils.createLink(source, destination)\n return \"0\"", "def create_review(place_id):\n place = storage.get(\"Place\", place_id)\n if place is None:\n abort(404)\n if not request.get_json():\n return jsonify({'error': 'Not a JSON'}), 400\n if 'user_id' not in request.get_json():\n return jsonify({'error': 'Missing user_id'}), 400\n user = storage.get(\"User\", request.get_json().get('user_id'))\n if user is None:\n abort(404)\n user_id = request.get_json().get('user_id')\n if 'text' not in request.get_json():\n return jsonify({'error': 'Missing text'}), 400\n text = request.get_json().get('text')\n obj = Review(text=text, place_id=place_id, user_id=user_id)\n obj.save()\n return jsonify(obj.to_dict()), 201", "def create_issue_link(self, link_type, inwardissue,\r\n outwardissue, comment=None):\r\n self.jira.create_issue_link(type=link_type,\r\n inwardIssue=str(inwardissue),\r\n outwardIssue=str(outwardissue))", "def test_add_reviews(self):\n metadata = Metadata(DataSource.CONTENT_CAFE)\n content = self.data_file(\"reviews.html\")\n self.http.queue_requests_response(200, 'text/html', content=content)\n self.api.add_reviews(metadata, self.identifier, self.args)\n\n # We extracted six reviews from the sample file.\n reviews = metadata.links\n eq_(6, len(reviews))\n assert all([x.rel==Hyperlink.REVIEW for x in reviews])\n assert \"isn't a myth!\" in reviews[0].content\n\n # We incidentally figured out the book's title.\n eq_(\"Shadow Thieves\", metadata.title)", "def createLink(self, downloadUrl, title):\n newUrl = downloadUrl.replace(\"details\", \"download\") \n return self.url + '/' + newUrl", "def publish(self, review_request):\r\n self.debug('Publishing')\r\n self.api_call('api/review-requests/%s/publish/' %\r\n review_request['id'])", "def post_review(recipe_id=None):\n\n if not storage.get(Recipe, recipe_id):\n abort(404)\n data = request.get_json()\n if not data:\n abort(400, 'Not a JSON')\n if 'user_id' not in data.keys():\n abort(400, 'Missing user_id')\n if not storage.get(User, data['user_id']):\n abort(404)\n if 'text' not in data.keys():\n abort(400, 'Missing text')\n data['recipe_id'] = recipe_id\n new_review = Review(**data)\n storage.new(new_review)\n storage.save()\n return make_response(jsonify(new_review.to_dict()), 201)", "def create(self, request, *args, **kwargs):\n request.data[\"shop\"] = 1\n self.is_review_body_valid(self.get_serializer(data=request.data)) # checks if body data is valid\n\n shop_pk = self.get_shop_pk(request.data.pop(\"shop_link\"))\n request.data[\"shop\"] = shop_pk\n\n return super().create(request, *args, **kwargs)", "def create_review(place_id=None):\n place = storage.get(Place, place_id)\n if place:\n review = request.get_json()\n if not review:\n abort(400, \"Not a JSON\")\n if \"user_id\" not in review:\n abort(400, \"Missing user_id\")\n if not storage.get(\"User\", review[\"user_id\"]):\n abort(404)\n if \"text\" not in review:\n abort(400, \"Missing text\")\n else:\n review['place_id'] = place.id\n new_review = Review(**review)\n storage.new(new_review)\n storage.save()\n return jsonify(new_review.to_dict()), 201\n abort(404)", "def new_review(place_id):\n body_dic = request.get_json()\n place = storage.get(Place, place_id)\n if not place:\n abort(404)\n if not body_dic:\n return jsonify({'error': 'Not a JSON'}), 400\n if \"user_id\" not in body_dic:\n return jsonify({'error': 'Missing user_id'}), 400\n user = storage.get(User, body_dic.get(\"user_id\", None))\n if not user:\n abort(404)\n if \"text\" not in body_dic:\n return jsonify({'error': 'Missing text'}), 400\n\n new_review = Review(**body_dic)\n setattr(new_review, \"place_id\", place_id)\n storage.new(new_review)\n storage.save()\n return jsonify(new_review.to_dict()), 201", "def add_new_review(restaurant_id):\n # If the user isn't logged in, send to the login page\n if helper.handle_login(login_session) is False:\n return redirect('/login')\n\n if request.method == 'POST':\n post = request.get_json()\n if 'username' not in login_session:\n new_review = Reviews(reviewer_name='anonymous',\n review=post.get('review'),\n stars=post.get('stars'),\n restaurant_id=restaurant_id,\n time=datetime.utcnow())\n else:\n new_review = Reviews(reviewer_name=login_session['username'],\n review=post.get('review'),\n stars=post.get('stars'),\n restaurant_id=restaurant_id,\n time=datetime.utcnow())\n session.add(new_review)\n session.commit()\n\n return redirect(url_for('restaurants_page'))", "def get_review_link(self, link_id, **kwargs):\n endpoint = '/review_links/{}'.format(link_id)\n return self._api_call('get', endpoint, payload=kwargs)" ]
[ "0.67446196", "0.59985214", "0.58935666", "0.5819669", "0.57771635", "0.57548815", "0.56993616", "0.5654442", "0.5638429", "0.5629432", "0.56153315", "0.56034595", "0.5595337", "0.5588098", "0.558482", "0.55711055", "0.5541868", "0.55295026", "0.5524608", "0.5522368", "0.54973185", "0.5489872", "0.5477701", "0.5461392", "0.5427208", "0.54108745", "0.538248", "0.53798795", "0.5368702", "0.53590333" ]
0.77476805
0
Get a single review link
def get_review_link(self, link_id, **kwargs): endpoint = '/review_links/{}'.format(link_id) return self._api_call('get', endpoint, payload=kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_review_page(review_link):\n\n session = r.Session()\n response = session.get(BASE_URL + '/music/albumreviews/' + review_link,\n headers=HEADERS)\n return response", "def get_review(review_id):\n return get(cls, review_id)", "def get_content_object_url(self):\n return urlresolvers.reverse(\n \"reviews-url-redirect\",\n args=(self.content_type_id, self.object_pk)\n )", "def one_review(review_id=None):\n if review_id:\n for item in storage.all(Review).values():\n if review_id == item.id:\n return (jsonify(item.to_dict()))\n abort(404)", "def get_review(review_id):\n obj = storage.get(Review, review_id)\n if obj is None:\n abort(404)\n return jsonify(obj.to_dict())", "def get_review(self, id):\n endpoint = '/v3/educator/reviews/%s' % id\n result = self.request(endpoint)", "def get_review(review_id):\n review_obj = storage.get(Review, review_id)\n if review_obj:\n return jsonify(review_obj.to_dict())\n else:\n abort(404)", "def _product_reviews_url(self, url):\n temp_url = re.sub('/dp/', '/product-reviews/', url)\n return re.sub('ref=(.+)\\?', 'cm_cr_pr_top_link_1', temp_url)", "def getLink(self):", "def get_review(review_id=None):\n\n review = storage.get(Review, review_id)\n if not review:\n abort(404)\n return jsonify(review.to_dict())", "def get_absolute_url(self):\n return reverse('tour-review', args=[str(self.id)])", "def test_get_url_on_review_request(self) -> None:\n review_request = self.create_review_request()\n\n self.assertEqual(\n self.action.get_url(context=self._create_request_context(\n review_request=review_request,\n url_name='review-request-detail')),\n '/r/%s/diff/raw/' % review_request.display_id)", "def review_by_id(review_id):\n obj = storage.get(\"Review\", review_id)\n if obj is None:\n abort(404)\n return jsonify(obj.to_dict())", "def getLink(self):\n return self.link", "def review_by_id(review_id):\n review = storage.get(\"Review\", review_id)\n if review is None:\n abort(404)\n return jsonify(review.to_json())", "def link(self):\n\n return self._get_field(\"link\")", "def link(self) -> Optional[str]:\n return pulumi.get(self, \"link\")", "def link(self) -> Optional[str]:\n return pulumi.get(self, \"link\")", "def get_link(model_url):\n\n response = requests.get(model_url)\n\n return response.content", "def build_review_url(self, cipherid, offset=0, limit=20, sort='helpful'):\n params = {\n 'offset': offset,\n 'limit': limit,\n 'sort': sort\n }\n query = urllib.urlencode(params)\n return 'https://www.beautylish.com/rest/reviews/p-{cipherid}?{query}'.format(cipherid=cipherid, query=query)", "def review_link(self, review_link):\n\n self._review_link = review_link", "def shorten_link(post):\n return f\"redd.it/{post.id}\"", "def get(self, request, *args, **kwargs):\n view = ReviewDisplay.as_view()\n return view(request, *args, **kwargs)", "def get_review_request(self, rid):\r\n rsp = self.api_call('api/review-requests/%s/' % rid)\r\n return rsp['review_request']", "def get_url(\n self,\n *,\n context: Context,\n ) -> str:\n request = context['request']\n\n # We want to use a relative URL in the diff viewer as we will not be\n # re-rendering the page when switching between revisions.\n from reviewboard.urls import diffviewer_url_names\n match = request.resolver_match\n\n if match.url_name in diffviewer_url_names:\n return 'raw/'\n\n return local_site_reverse(\n 'raw-diff',\n request,\n kwargs={\n 'review_request_id': context['review_request'].display_id,\n })", "def get_review_info(link):\n\n html = requests.get(link)\n page = bs(html.text, 'lxml')\n try: name = page.find_all(\"div\", {\"class\": \"_1hkogt_o\"})[0].h1.text\n except: name = link\n\n review_html = page.find_all('div', {'class': 'review-container'})\n ratings = []\n reviews = []\n dates = []\n user_names = []\n\n for container in review_html:\n num_reviews = container.find(\"span\", {\"class\": \"badgeText\"})\n try:\n num_reviews = int(num_reviews.text.split()[0])\n except:\n continue\n\n if num_reviews >= 1:\n\n review = container.find(\"div\", {\"class\": \"ui_column is-9\"})\n rating = review.span['class'][1].split('_')[1]\n rating = int(rating)\n\n text_review = review.find('p', {'class': 'partial_entry'})\n try: text_review = text_review.text\n except: continue\n\n date = review.find('div', {'class': 'prw_rup prw_reviews_stay_date_hsx'})\n try: date = date.text.split(':')[1][1:]\n except: continue\n\n user_name = container.find(\"div\", {\"class\": \"info_text pointer_cursor\"})\n try: user_name = user_name.text\n except:continue\n\n ratings.append(rating)\n reviews.append(text_review)\n dates.append(date)\n user_names.append(user_name)\n\n data = pd.DataFrame(\n {'user_name': user_names, 'rating': ratings, 'review': reviews, 'date': dates, 'restaurant': name})\n return data", "def get_review_links(self, project_id):\n endpoint = '/projects/{}/review_links'.format(project_id)\n return self._api_call('get', endpoint)", "def getIdLink(self):\n return self.urlLink()", "def get_review(self, id_):\n cursor = self._connection.cursor()\n select_command = make_select_command(\"reviews\")\n select_command += \" WHERE id_ = ?\"\n cursor.execute(select_command, (id_,))\n for row in cursor:\n return expandable_from_tuple(row, FIELD_DESCRIPTIONS) \n return None", "def review(self):\n return self._review" ]
[ "0.6831752", "0.64643896", "0.6345438", "0.6279402", "0.6228978", "0.61699647", "0.61587733", "0.612717", "0.6065012", "0.60605973", "0.603731", "0.5991732", "0.5958937", "0.5925658", "0.58885676", "0.576672", "0.57320416", "0.57320416", "0.57136726", "0.5682893", "0.5650391", "0.5630293", "0.56275755", "0.5623186", "0.5622224", "0.5592715", "0.5592466", "0.55761325", "0.55524486", "0.55481565" ]
0.6617292
1
Add or update assets for a review link.
def update_review_link_assets(self, link_id, **kwargs): endpoint = '/review_links/{}/assets'.format(link_id) return self._api_call('post', endpoint, payload=kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def review_link(self, review_link):\n\n self._review_link = review_link", "def linkAssets(des, Xrc):\n with open(des, 'r') as f:\n body = f.read()\n f.close()\n with open(des, 'w') as f:\n body = body.replace(\"custom.css\", \"\\\\\" + Xrc[\"gh_repo_name\"] + \"/Assets\" + \"/css\" + \"/custom.css\")\n f.write(body)\n f.close()\n ccc.success(\"linking assets to \" + des)", "def add(self, link):\n # if path.exists(self.cachefile):\n with open(self.cachefile, 'a') as cache:\n cache.write(f\"{link}\\n\")", "def addLinkToResource(link):\n\n\tif link not in variables.resources:\n\t\tvariables.resources.append(link)", "def add_asset(self, asset_name):\r\n self._assets.extend(asset_name)", "def review_element(request, review_id):\n try:\n review = Review.objects.get(id=review_id)\n except Review.DoesNotExist:\n return Response(status=status.HTTP_404_NOT_FOUND)\n\n if request.method == \"PUT\":\n data = json.loads(request.body)\n \n review.rating = data.get(\"rating\", \"\")\n review.content = data.get(\"content\", \"\")\n \n review.save()\n return JsonResponse({\"message\": \"Review updated successfully\"}, status=204)", "def set_media(link):\r\n results = {}\r\n make_link_info_job(results, link, g.useragent)()\r\n update_link(link, *results[link])", "def add_review(self, review: Review):\n raise NotImplementedError", "def put_review(review_id=None):\n if review_id:\n for item in storage.all(Review).values():\n if review_id == item.id:\n is_json = request.get_json()\n if is_json is None:\n abort(400, description=\"Not a Json\")\n\n item.text = is_json.get(\"text\")\n storage.save()\n return (jsonify(item.to_dict()))\n abort(404)\n abort(404)", "def update_link(link, thumbnail, media_object):\r\n if thumbnail:\r\n link.has_thumbnail = True\r\n\r\n if media_object:\r\n link.media_object = media_object\r\n\r\n link._commit()", "def update_metadata(sess, asset_link):\n get_json = sess.get(asset_link).json()\n update_metadata = get_json['customFields']\n update_metadata['SANDAG Last Access Date'] = datetime.datetime.today().strftime('%D')\n\n sess.patch(asset_link, \n data=json.dumps(update_metadata)).json()", "def addreview(self, name, year, genre, rating, review, reviewer):\n pass", "def put_review(review_id=None):\n\n review = storage.get(Review, review_id)\n if not review:\n abort(404)\n data = request.get_json()\n if not data:\n abort(400, 'Not a JSON')\n keys_ignore = ['id', 'user_id', 'recipe_id', 'created_at', 'updated_at']\n for key in data.keys():\n if key not in keys_ignore:\n setattr(review, key, data[key])\n review.save()\n return make_response(jsonify(review.to_dict()), 200)", "def add_review(self, rid, review, exts, w2v, threshold):\n self.rids.append(rid)\n self.reviews.append(review)\n cur_exts = []\n for ext in exts:\n if len(ext.strip()) < 1:\n continue\n opn, asp, att, pol = ext.split(\",\")\n ext_obj = Extraction(opn, asp, att, pol, w2v, threshold)\n if ext_obj.is_valid and ext_obj.emb is not None:\n cur_exts.append(ext_obj)\n self.exts.append(cur_exts)", "def test_add_reviews(self):\n metadata = Metadata(DataSource.CONTENT_CAFE)\n content = self.data_file(\"reviews.html\")\n self.http.queue_requests_response(200, 'text/html', content=content)\n self.api.add_reviews(metadata, self.identifier, self.args)\n\n # We extracted six reviews from the sample file.\n reviews = metadata.links\n eq_(6, len(reviews))\n assert all([x.rel==Hyperlink.REVIEW for x in reviews])\n assert \"isn't a myth!\" in reviews[0].content\n\n # We incidentally figured out the book's title.\n eq_(\"Shadow Thieves\", metadata.title)", "def _update_course_assets(self, user_id, asset_key, update_function):\n with self.bulk_operations(asset_key.course_key):\n original_structure = self._lookup_course(asset_key.course_key).structure\n index_entry = self._get_index_if_valid(asset_key.course_key)\n new_structure = self.version_structure(asset_key.course_key, original_structure, user_id)\n course_assets = new_structure.setdefault('assets', {})\n\n asset_type = asset_key.asset_type\n all_assets = SortedAssetList(iterable=course_assets.setdefault(asset_type, []))\n asset_idx = all_assets.find(asset_key)\n\n all_assets_updated = update_function(all_assets, asset_idx)\n new_structure['assets'][asset_type] = list(all_assets_updated)\n\n # update index if appropriate and structures\n self.update_structure(asset_key.course_key, new_structure)\n\n if index_entry is not None:\n # update the index entry if appropriate\n self._update_head(asset_key.course_key, index_entry, asset_key.branch, new_structure['_id'])", "def addAsset(self, name, asset):\n self.__assets[name] = asset\n return True", "def add_link(self, link):\n raise NotImplementedError", "def test_update_asset_content(self):\n pass", "def add_review(self, review):\n # Assume this method body has been correctly implemented.\n self.reviews.append(review)", "def save_review():\n prod_id = int(request.vars.prod_id)\n logger.info(\"saving review on prod_id {%s}\" %prod_id)\n content = request.vars.content\n db.reviews.update_or_insert(\n (db.reviews.prod_id == prod_id) & (db.reviews.user_email == auth.user.email),\n prod_id = prod_id,\n user_email = auth.user.email,\n review_content = content\n )\n return \"ok\" # Might be useful in debugging.", "def link(request, link_id):\n link = Link.objects.get(id=link_id)\n link.visualization += 1\n link.save()\n return HttpResponseRedirect(link.url)", "def add_url(p_id, url):\n for product in all_products:\n if product['id'] == p_id:\n product['url'] = url\n product['product_id'] = p_id\n product.move_to_end('product_id', last=False)", "def __addPost(self, link):\n self.currenturi = link\n self.__setStoneSoupForCurrentUri()\n try:\n page = self.__getData()\n if not page:\n return True \n if checkSessionInfo(self.genre, self.session_info_out, self.currenturi,\\\n self.task.instance_data.get('update'),parent_list\\\n = [self.task.instance_data['uri']]):\n log.info(self.log_msg('Session info returns True'))\n return False\n except:\n log.exception(self.log_msg('Cannot add the post for the url %s'%\\\n self.currenturi))\n return False\n try:\n result=updateSessionInfo(self.genre, self.session_info_out, self.currenturi, \\\n get_hash( page ),'review', self.task.instance_data.get('update'),\\\n parent_list=[self.task.instance_data['uri']])\n if not result['updated']:\n log.exception(self.log_msg('Update session info returns False'))\n return True\n page['parent_path'] = [self.task.instance_data['uri']]\n page['path'] = [self.task.instance_data['uri'], self.currenturi]\n page['uri'] = self.currenturi\n page['entity'] = 'review'\n page['uri_domain'] = urlparse.urlparse(page['uri'])[1]\n page.update(self.__task_elements_dict)\n self.pages.append(page)\n #log.info(page)\n log.info(self.log_msg('page added %s'%self.currenturi))\n return True\n except:\n log.exception(self.log_msg('Error while adding session info'))\n return False", "def test_update_asset(self):\n pass", "def add_rss(url):", "def review_add(request):\n result = {}\n\n u = request.user\n\n p = Product.objects.get_by_sku(request.POST['sku'])\n\n if p is None:\n result[\"result\"] = '0'\n elif TransactionLineItem.objects.filter(transaction__party=u, product=p).count() > 0:\n # need to check if I bought this item\n\n r, created = Review.objects.get_or_create(reviewer=u, product=p)\n r.content =request.POST['content']\n r.rating=int(request.POST['rating'])\n\n # reply to review request\n rto = request.POST.get('reply_to', None)\n if rto:\n rev_request = ReviewRequest.objects.get(id=int(rto))\n r.reply_to.add(rev_request)\n # change wish item review status to review=2\n for w in Wishlist.objects.filter(product=p, party=rev_request.requester):\n w.review = Wishlist.REVIEW_RESPONDED\n w.save()\n \n r.public = bool(request.POST['public'])\n r.save() \n\n # add a feed\n f = Feed(actor=u, action=Feed.REVIEWED, product=p) \n f.save()\n \n result[\"result\"] = str(r.id)\n else:\n result['result'] = '-1'\n\n return JSONHttpResponse(result)", "def collect_links(self, env=None):\n for asset in self.assets.values():\n if asset.has_bundles():\n asset.collect_files()\n if env is None:\n env = self.config.env\n if env == static_bundle.ENV_PRODUCTION:\n self._minify(emulate=True)\n self._add_url_prefix()", "def addLinks(self, data, package):\n self.db.addLinks(data, package, OWNER)\n self.evm.dispatchEvent(\"packageUpdated\", package)", "def add_item(self, item):\n if item.get_type() == ebooklib.ITEM_STYLE:\n self.add_link(href=item.get_name(), rel=\"stylesheet\", type=\"text/css\")\n\n if item.get_type() == ebooklib.ITEM_SCRIPT:\n self.add_link(src=item.get_name(), type=\"text/javascript\")" ]
[ "0.59623325", "0.5327148", "0.51797146", "0.5134346", "0.49140236", "0.48908433", "0.4881912", "0.48579592", "0.4842742", "0.48399702", "0.4804743", "0.47892767", "0.47684696", "0.47508633", "0.4741797", "0.47353303", "0.47250083", "0.46850595", "0.46558216", "0.4654844", "0.46091422", "0.46083573", "0.457866", "0.45725244", "0.45676255", "0.4556471", "0.45552728", "0.45372206", "0.45190123", "0.4515364" ]
0.74828833
0
Get items from a single review link.
def get_review_link_items(self, link_id): endpoint = '/review_links/{}/items'.format(link_id) return self._api_call('get', endpoint)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_review_page(review_link):\n\n session = r.Session()\n response = session.get(BASE_URL + '/music/albumreviews/' + review_link,\n headers=HEADERS)\n return response", "def get_reviews(item_id, shop_id, review_num=10) -> list:\n get_url = f\"{_shopee_base_url}/api/v2/item/get_ratings?filter=0&flag=1&itemid={item_id}&limit={review_num}&offset=0&shopid={shop_id}\"\n r = requests.get(get_url, headers=_user_agent_header, proxies=proxy_dict)\n ratings = r.json()['data']['ratings']\n reviews = []\n for rating in ratings:\n reviews.append({\n 'origin': 'Shopee',\n 'author': rating['author_username'],\n 'rating': rating['rating_star'],\n 'review': rating['comment'], \n 'review_likes': rating['like_count'],\n 'summary': 'Summary is very nice. Amazing!'\n })\n return reviews", "def get_reviews(rest_link):\n\tfilename = rest_link.split('/')[-1]\n\n\tcontents = None\n\n\tif contents is None:\n\t\tstart = time()\n\t\tdriver = init_chromedriver()\n\t\tdriver.get(rest_link + '/reviews')\n\n\t\t# print('There are {} reviews'.format(self.review_count))\n\n\t\t# click on the button 'All reviews'\n\t\tsleep(5)\n\t\tdriver.execute_script(\"window.scrollBy(0, 950);\")\n\t\twhile(1):\n\t\t\ttry:\n\t\t\t\tel = driver.find_element_by_css_selector('#selectors > a.item.default-section-title.everyone.empty')\n\t\t\t\twebdriver.ActionChains(driver).move_to_element(el).click(el).perform()\n\t\t\texcept TimeoutException:\n\t\t\t\tcontinue\t\t\n\t\t\texcept (NoSuchElementException):\n\t\t\t\tbreak\n\t\t\tbreak\n\n\t\tsleep(5)\t\n\t\tload_more = '#reviews-container > div.notifications-content > div.res-reviews-container.res-reviews-area > div > div > div.mt0.ui.segment.res-page-load-more.zs-load-more > div.load-more.bold.ttupper.tac.cursor-pointer.fontsize2'\n\t\tsleep(5)\n\t\twhile element_present(driver, load_more):\n\t\t\ttry:\n\t\t\t\tel2 = driver.find_element_by_css_selector(load_more)\n\t\t\t\tdriver.execute_script(\"return arguments[0].scrollIntoView();\", el2)\n\t\t\t\tdriver.execute_script(\"window.scrollBy(0, -150);\")\n\t\t\t\tsleep(0.5)\n\t\t\t\twebdriver.ActionChains(driver).move_to_element(el2).click(el2).perform()\n\t\t\texcept TimeoutException:\n\t\t\t\tcontinue\n\t\t\texcept (StaleElementReferenceException, NoSuchElementException):\n\t\t\t\tbreak\n\n\t\tsource = get_source(driver)\n\t\tdriver.quit()\n\n\telse:\n\t\tprint('Using cached page')\n\t\tsource = contents\n\n\tsoup = source_to_soup(source)\n\t#review_blocks = soup.find_all('div', class_=re.compile('ui segments res-review-body'))\n\n\treview_blocks = (soup.find_all('div', class_='ui segment clearfix brtop '))\n\tif len(review_blocks) == 0:\n\t\tprint('Error in parsing reviews...\\n Review blocks size is 0\\n')\n\t\twith open('not_parsed','a+') as f:\n\t\t\tf.write(rest_link)\n\t\treturn\n\tprint('Loaded {} reviews'.format(len(review_blocks)))\n\n\n\tlastreview = filename + '_last'\n\n\twith open(filename,'a+', encoding='utf-8') as f:\n\n\t\treviews = []\n\t\ti = start\n\t\tmy_str = None\n\t\tfor review in review_blocks[:]:\n\t\t\ttry:\n\t\t\t\tname_and_link = review.find('div', class_='header nowrap ui left')\n\t\t\t\t# print(name_and_link.contents)\n\n\t\t\t\tu_link = name_and_link.contents[1].attrs['href']\n\t\t\t\tu_entity_id = int(name_and_link.contents[1].attrs['data-entity_id'])\n\t\t\t\tu_name = name_and_link.contents[1].contents[0].strip()\n\t\t\t\t# print(u_name)\n\n\t\t\t\ttup = (u_name,u_entity_id)\n\t\t\t\t#userset.add(tup)\n\n\t\t\t\tuserset.add(u_link)\t\t\t\n\t\t\t\trating_and_rev_text = review.find('div', text='Rated')\n\t\t\t\tcomment_time = review.find('time').attrs['datetime']\n\t\t\t\trating = float(rating_and_rev_text.attrs['aria-label'].split()[-1])\n\t\t\t\treview_text = rating_and_rev_text.parent.contents[2].strip()\n\t\t\t\t#f.write('Review number '+str(my_ctr)+'\\n')\n\n\t\t\t\tif my_str is None:\n\t\t\t\t\tmy_str=comment_time\n\n\t\t\t\tf.write(str(comment_time)+'\\n')\n\t\t\t\tf.write(u_name+'\\n')\n\t\t\t\tf.write(str(u_entity_id)+'\\n')\n\t\t\t\tf.write(str(rating)+'\\n')\n\t\t\t\tf.write(review_text+'\\n\\n##\\n\\n')\n\t\t\t\tcomm_file = filename + 'last_review_date'\n\n\t\t\t\twith open (comm_file,'w') as myfile200:\n\t\t\t\t\tmyfile200.write(my_str)\n\t\t\t\t\n\t\t\texcept:\n\t\t\t\tpass\n\t\t\ti += 1", "def _parse_reviewers(self, content):\n soup = bs(content, ['fast', 'lxml'])\n table = soup.find('table', {'id': 'productReviews'})\n reviewers = [link['href'] for link in table.findAll('a')\\\n if link.contents == ['See all my reviews']]\n return reviewers", "def get_review_info(link):\n\n html = requests.get(link)\n page = bs(html.text, 'lxml')\n try: name = page.find_all(\"div\", {\"class\": \"_1hkogt_o\"})[0].h1.text\n except: name = link\n\n review_html = page.find_all('div', {'class': 'review-container'})\n ratings = []\n reviews = []\n dates = []\n user_names = []\n\n for container in review_html:\n num_reviews = container.find(\"span\", {\"class\": \"badgeText\"})\n try:\n num_reviews = int(num_reviews.text.split()[0])\n except:\n continue\n\n if num_reviews >= 1:\n\n review = container.find(\"div\", {\"class\": \"ui_column is-9\"})\n rating = review.span['class'][1].split('_')[1]\n rating = int(rating)\n\n text_review = review.find('p', {'class': 'partial_entry'})\n try: text_review = text_review.text\n except: continue\n\n date = review.find('div', {'class': 'prw_rup prw_reviews_stay_date_hsx'})\n try: date = date.text.split(':')[1][1:]\n except: continue\n\n user_name = container.find(\"div\", {\"class\": \"info_text pointer_cursor\"})\n try: user_name = user_name.text\n except:continue\n\n ratings.append(rating)\n reviews.append(text_review)\n dates.append(date)\n user_names.append(user_name)\n\n data = pd.DataFrame(\n {'user_name': user_names, 'rating': ratings, 'review': reviews, 'date': dates, 'restaurant': name})\n return data", "def get_review(review_id):\n return get(cls, review_id)", "def one_review(review_id=None):\n if review_id:\n for item in storage.all(Review).values():\n if review_id == item.id:\n return (jsonify(item.to_dict()))\n abort(404)", "def get_item_reviews(self, soup: BeautifulSoup) -> None:\n try:\n reviews = soup.find(\"span\", class_=\"_a7a5sx\").get_text()\n reviews = re.findall(\"[0-9]+\", reviews)[0]\n except AttributeError:\n reviews = None\n self.__collected_dic[\"reviews\"].append(reviews)", "def getitem(itemID):\n\n return harvest(GET_ITEM_URL, itemID)", "def get_review_link(self, link_id, **kwargs):\n endpoint = '/review_links/{}'.format(link_id)\n return self._api_call('get', endpoint, payload=kwargs)", "def get_items():\n return requester.perform_request(Uri.items)", "def single_crawl(self, urlitem: str):\n # print(\"Item: \", urlitem)\n try:\n hdr = {\n \"User-Agent\": \"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36 \",\n \"Accept\": \"text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8\",\n \"Accept-Charset\": \"ISO-8859-1,utf-8;q=0.7,*;q=0.3\",\n \"Accept-Encoding\": \"none\",\n \"Accept-Language\": \"en-US,en;q=0.8\",\n \"Connection\": \"keep-alive\",\n }\n try:\n req = Request(urlitem, headers=hdr)\n html_page = urlopen(req)\n soup = BeautifulSoup(html_page, \"lxml\")\n links = [\n requests.compat.urljoin(urlitem, link.get(\"href\"))\n for link in soup.findAll(\"a\")\n ]\n links = [x for x in links if \"#\" not in x]\n except Exception as e:\n # print(e)\n pass\n return links\n\n except:\n pass", "def fetch_reviews(self, rb_id, start=0, max_results=25):\r\n return self.api_call('/api/review-requests/%s/reviews/?start=%s&max-results=%s'\r\n % (rb_id, start, max_results))['reviews']", "def get_reviews(business_id):\n\n reviews_path = BUSINESS_PATH + business_id + '/reviews'\n\n return request(reviews_path)", "def parse_links(html):\n\n soup = BeautifulSoup(html, 'lxml')\n content_cards = soup.find_all('a', {'class': 'content-card-link'})\n review_links = [cc.get('href') for cc in content_cards]\n review_links = [review_link.split('/')[-1] for review_link in review_links]\n return review_links", "def program_item(url):\n items = []\n \n soup = abcradionational.get_soup(url)\n\n playable_podcast = abcradionational.get_playable_podcast(soup)\n\n items = abcradionational.compile_playable_podcast(playable_podcast)\n\n return items", "def get_item(self, item_id: int, category: str = None):\n if category:\n # ensuring that it will be in lowercase\n category = category.lower()\n\n if not category or not category in self.item_categories:\n # Assuming that if category isnt set, we are searching for anime\n category = \"anime\"\n\n search_url = f\"{SITE_URL}/{self.item_categories[category]}/{item_id}\"\n\n return self.fetch_url(search_url)", "def get_review(self, id_):\n cursor = self._connection.cursor()\n select_command = make_select_command(\"reviews\")\n select_command += \" WHERE id_ = ?\"\n cursor.execute(select_command, (id_,))\n for row in cursor:\n return expandable_from_tuple(row, FIELD_DESCRIPTIONS) \n return None", "def get_review(self, id):\n endpoint = '/v3/educator/reviews/%s' % id\n result = self.request(endpoint)", "def subject_item(url):\n soup = abcradionational.get_soup(url)\n \n playable_podcast = abcradionational.get_playable_podcast(soup)\n\n items = abcradionational.compile_playable_podcast(playable_podcast)\n\n\n return items", "def get_item_with_href(self, href):\n for item in self.get_items():\n if item.get_name() == href:\n return item\n\n return None", "def fetch(self):\n try:\n self.genre = 'Review'\n log.debug(self.log_msg(\"Fetching the prouct page url %s\"%self.currenturi))\n res=self._getHTML(self.currenturi) # Assuming self.currenturi is at the product page\n self.rawpage=res['result']\n self._setCurrentPage()\n try:\n self.parent_page_title = stripHtml(self.soup.find('h1',{'id':'pgTitleDetail'}).renderContents())\n except:\n self.parent_page_title =''\n try:\n self.__product_price = self.soup.find('tbody',{'class':'prices'}).td.renderContents().replace('$','')\n except:\n log.exception(\"Error in fetching product_price\")\n self.__product_price = None\n\n parent_page_url = self.task.instance_data['uri']\n review_first_page_url = self.soup.find('a',text=\"Show All Customer Reviews &#187; \").parent['href']\n review_url_order = \"&sortReviewsBy=DateDescending\"\n self.currenturi = self.base_url + review_first_page_url + review_url_order\n log.info(self.log_msg('current_uri :: %s'%(self.currenturi)))\n self._getParentPage()\n self.next_url_links=[]\n self.fetch_next_link = True\n while self.fetch_next_link:\n self._iterateReviewPages(parent_page_url)\n return True\n except Exception,e:\n log.exception(self.log_msg(\"Exception occured in fetch()\"))\n return False", "def get_reviews(review_url):\n print review_url\n html = urllib.urlopen(review_url).read()\n soup = bs4.BeautifulSoup(html, 'html.parser')\n\n rating_scores = soup.findAll(\"span\", \"ratingScore\")\n num_ratings = len(rating_scores) - 1\n\n current_reviews = soup.findAll(\"div\", \"currentVintageProfessinalReviews\")\n num_cur_reviews = str(current_reviews).count('ratingProvider')\n past_reviews = soup.findAll(\"ul\", \"pastVintagesProfessionalReviews\")\n num_past_reviews = str(past_reviews).count('ratingProvider')\n\n print 'There are {0} reviews for prior vintages of this wine.'.format(num_past_reviews)\n print 'There are {0} current reviews for this vintage.\\n'.format(num_cur_reviews)\n\n rating_provider = soup.findAll(\"span\", \"ratingProvider\")\n rating_score = soup.findAll(\"span\", \"ratingScore\")\n reviewers = re.findall('(?<![A-Z])[>]([A-Z]+(?![A-Z]))', str(rating_provider))\n ratings = re.findall('(?<![A-Z])[0-9]{2}(?![A-Z])', str(rating_score))\n\n print \"Ratings List:\", ratings\n print \"Current Reviews: \", num_cur_reviews\n\n currentreviews = []\n for j in range(num_cur_reviews):\n print \"Current Review #\"+str(j+1)+\":\", reviewers[j], ratings[j]\n currentreviews.append((reviewers[j], ratings[j]))\n print currentreviews\n\n print \"\\nPast Reviews: \", num_past_reviews\n past_review_ratings = []\n for k in range(num_cur_reviews, num_past_reviews+num_cur_reviews):\n #print \"Past Review #\"+str(k-num_cur_reviews+1)+\":\", reviewers[k], int(ratings[k])\n past_review_ratings.append(float(ratings[k]))\n if k > 30:\n break\n if num_past_reviews != 0:\n avg_past_reviews = sum(past_review_ratings)/len(past_review_ratings)\n round(avg_past_reviews, 2)\n else:\n avg_past_reviews = 0\n\n print \"Average of Past Reviews: \", avg_past_reviews\n\n return currentreviews, avg_past_reviews", "def extract_reviews(url, review_count):\n\n api_url = url + \"%3Fstart%3D40\"\n\n html_obj = retrieve_html(url)\n\n review_list = parse_page(html_obj)\n\n result = review_list\n\n num_pages = review_count // 20 + 1\n\n for i in range(1, num_pages):\n curr_offset = i * 20\n curr_url = api_url + \"&start=%d\" % curr_offset\n\n curr_page_reviews = parse_page(retrieve_html(curr_url)[1])\n\n result += curr_page_reviews\n\n return result", "def _star_reviewers(self, star_num, page_num):\n one_star_url = self._star_reviews_url(star_num, page_num)\n req = Request(one_star_url, headers=self.firefox)\n content = urlopen(req).read()\n return self._parse_reviewers(content)", "def get_one_item_at_a_time(url, additional_params=None, session=None):\n query_params = {'page': 1, 'per_page': 100}\n query_params.update(additional_params or {})\n req = session or requests\n response = req.get(url, headers=get_headers(), params=query_params)\n response.raise_for_status()\n yield from response.json()\n\n pages_count = get_pages_count(response.links)\n while query_params['page'] < pages_count:\n query_params['page'] += 1\n response = req.get(\n url, headers=get_headers(), params=query_params,\n )\n response.raise_for_status()\n yield from response.json()", "def get_reviews(bearer_token, business_id):\n reviews_path = BUSINESS_PATH + business_id + '/reviews'\n\n return request(API_HOST, reviews_path, bearer_token)", "def get_item_detail(item_id):\n pass", "def get_review(review_id):\n obj = storage.get(Review, review_id)\n if obj is None:\n abort(404)\n return jsonify(obj.to_dict())", "def item_view_reviews(request):\n\n result = {}\n u = request.user\n\n p = Product.objects.get_by_sku(request.POST['sku'])\n if p is not None:\n # product details are not needed\n #result = p.details(u)\n\n reviews = Review.objects.filter(product=p).exclude(reviewer=u)\n result['count'] = str(reviews.count())\n result['reviews'] = [r.get_json(me=u) for r in reviews]\n else:\n result['result'] = '0'\n\n return JSONHttpResponse(result)" ]
[ "0.6445965", "0.6111887", "0.59627175", "0.59522223", "0.5802556", "0.5668858", "0.56402814", "0.56334907", "0.5598594", "0.5478629", "0.5376781", "0.5351566", "0.5350795", "0.534827", "0.53360826", "0.5320217", "0.52967834", "0.5288006", "0.52783906", "0.52698594", "0.5253392", "0.5231493", "0.5175653", "0.51122576", "0.511061", "0.5093686", "0.5091866", "0.50821376", "0.50772876", "0.5061385" ]
0.70730585
0
Given an imaging server fqdn, get its ID; raises NotFound if not found.
def get_id(self, fqdn): res = self.db.execute(sqlalchemy.select([ model.imaging_servers.c.id ], whereclause=(model.imaging_servers.c.fqdn==fqdn))) return self.singleton(res)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def id(self):\n if self.cloudserver:\n return self.cloudserver.id\n else:\n return None", "def get_dnid_by_dnname(self, dnname):\r\n for dn in self.dns:\r\n if dn.name == dnname:\r\n return dn.id\r\n return None", "def fqdn_identifier(fqdn):\n return messages.Identifier(\n typ=messages.IDENTIFIER_FQDN, value=fqdn)", "def get_domain_ip_via_sni(self, path_tracefile, domain):\n packets = self.get_client_hello_packets(path_tracefile)\n for packet in packets:\n servername = self.get_client_hello_servername(packet)\n if servername == domain:\n ip = packet.getlayer(IP).dst\n return ip\n return -1", "def instanceid_lookup(session, hostname):\n if session is None:\n return None\n\n client = session.client('ec2')\n response = client.describe_instances(\n Filters=[{\"Name\": \"tag:Name\", \"Values\": [hostname]}])\n\n item = response['Reservations']\n if len(item) == 0:\n return None\n else:\n item = item[0]['Instances']\n if len(item) == 0:\n return None\n else:\n item = item[0]\n if 'InstanceId' in item:\n return item['InstanceId']\n return None", "def _getHostname(fqdn):\n\treturn fqdn.split('.')[0]", "def findLocalImage(client, name):\n try:\n image = client.images.get(name)\n except Exception:\n return None\n return image.id", "def _GetIdFromInstanceDirStr(instance_dir):\n match = _RE_LOCAL_INSTANCE_ID.match(instance_dir)\n if match:\n return match.group(\"ins_id\")\n\n # To support the device which is not created by acloud.\n if os.path.expanduser(\"~\") in instance_dir:\n return \"1\"\n\n return None", "def get_image_id(self, image_name):\n _url = \"http://\" + self.host_ip + \":8774/v2/\" +\\\n self.cloud_admin_info[\"project_id\"] + \"/images/detail\"\n _headers = {'Content-type': 'application/json',\n 'x-auth-token': self.cloud_admin_info[\"token_project\"]}\n _body = None\n\n _result = self.request(\"GET\", _url, _headers, _body)\n if _result is None:\n LOG_OBJ.error(\"No response from server while getting images.\")\n return\n if _result.status not in [200, 201, 202, 203, 204]:\n LOG_OBJ.error(\"Get image ID Failed with status %s \" %\n _result.status)\n return _result.status\n\n _output = json.loads(_result.data)\n for _images in _output['images']:\n if _images['name'].lower() == image_name.lower():\n LOG_OBJ.info(\"Image Name: %s, Image ID : %s \" %\n (image_name, _images['id']))\n return _images['id']\n LOG_OBJ.error(\"The image: %s is NOT found\" % image_name)", "def get_id(self):\n try:\n return self.inst.query('*IDN?')[:36]\n except errors.VisaIOError as e:\n logger.warning(e)\n return 'Device not connected.'", "def get_fqdn_ip():\n hn = 'localhost'\n try:\n hn = socket.getfqdn()\n except Exception:\n pass\n\n return hn, socket.gethostbyname(hn)", "def get_fs_id_from_filesystem(self, filesystem, nas_server):\n\n is_valid_uuid = utils.name_or_id(filesystem)\n try:\n if is_valid_uuid == \"NAME\":\n # Get the filesystem details using name\n nas_server_id = nas_server\n if nas_server is not None:\n is_valid_uuid = utils.name_or_id(nas_server)\n if is_valid_uuid == \"ID\":\n nas_server_id = self.get_nas_server_id(\n nas_server_id=nas_server)\n else:\n nas_server_id = self.get_nas_server_id(\n nas_server_name=nas_server)\n else:\n error_msg = \"Please provide NAS Server details along \" \\\n \"with filesystem\"\n LOG.error(error_msg)\n self.module.fail_json(msg=error_msg)\n\n fs = self.provisioning.get_filesystem_by_name(\n filesystem_name=filesystem, nas_server_id=nas_server_id)\n if fs:\n return fs[0]['id']\n else:\n # Get the filesystem details using id\n fs = self.provisioning.get_filesystem_details(filesystem)\n return fs['id']\n\n error_msg = \"Filesystem {0} not found on the array.\".format(\n filesystem)\n LOG.error(error_msg)\n self.module.fail_json(msg=error_msg)\n except Exception as e:\n error_msg = \"Failed to get the filesystem {0} by name with \" \\\n \"error {1}\".format(filesystem, str(e))\n LOG.error(error_msg)\n self.module.fail_json(msg=error_msg)", "def _get_id(mf, url=None):\n\n\tprops = mf['properties']\n\n\tif 'uid' in props:\n\t\treturn props['uid'][0]\n\telif 'url' in props:\n\t\treturn props['url'][0]\n\telse:\n\t\treturn None", "def get_server_id():\n with open(\"data.json\", \"r\") as file:\n data = json.load(file)\n\n id = data[\"server_id\"]\n\n return id", "def get_keystone_v3_domain_id(self, domain_name):\n LOG_OBJ.debug(\"Get the domain ID.\")\n\n _url = \"http://\" + self.host_ip + \":35357/v3/domains?name=\" + \\\n str(domain_name)\n _headers = {'x-auth-token': self.cloud_admin_info[\"token_domain\"],\n 'content-type': 'application/json'}\n _body = None\n\n response = self.request(\"GET\", _url, _headers, _body)\n\n if response is None:\n LOG_OBJ.error(\"No response from Server while getting the \"\n \"ID of domain\")\n print (\"No response from Server while getting the \"\n \"ID of domain\")\n return response\n\n if response.status not in [200, 201, 202, 203, 204]:\n LOG_OBJ.error(\"Get domain ID Failed with status %s and error \"\n \": %s\" % (response.status, response.data))\n print (\"Get domain ID Failed with status %s and error : %s\" %\n (response.status, response.data))\n return response.status\n\n output = json.loads(response.data)\n LOG_OBJ.info(\"Domain details : %s \" % output)\n if len(output['domains']) != 1:\n LOG_OBJ.debug(\"No. of domains with name %s is %s\"\n % (domain_name, len(output['domains'])))\n print(\"No. of domains with name %s is %s\"\n % (domain_name, len(output['domains'])))\n return\n\n return output['domains'][0]['id']", "def get_imageId_from_fackmask(filename):\n filename = os.path.splitext(filename)[0]\n regex = re.compile(r'\\d+')\n iid = regex.search(filename).group(0)\n image_id = int(iid)\n if filename.isdigit():\n return int(filename)\n return image_id", "def get_record_id(self):\n subdomain, record_id = self.key().name().split(':', 1)\n return record_id", "def get_cross_id(self, entrez_id, xref_db):\n \n try:\n entrez_id = int(entrez_id)\n except ValueError:\n raise ValueError(\"entrez_id must be an integer\")\n\n self.cursor.execute(\"\"\"\n SELECT entrez_id\n FROM gene_xrefs\n WHERE Xref_db = %(db)s\n AND entrez_id = %(eid)s\"\"\", {'db': xref_db, 'eid': entrez_id})\n row = self.cursor.fetchone()\n if row is not None:\n return row[0]\n \n raise KeyError(\"Unable to find an external identifer for database \" + \\\n \"%s using Entrez ID %d\" % (xref_db, entrez_id))", "def _get_sd_id(name):\n cohesity_client = _get_client()\n log.info(\"Getting storage domain with name %s\", name)\n resp = cohesity_client.view_boxes.get_view_boxes(names=name)\n if resp:\n return resp[0].id", "def get_internal_host(self):\n prefer_internal_ip = self.charm_config.get(\"prefer-internal-ip\")\n fqdn = socket.getfqdn()\n ip = socket.gethostbyname(fqdn)\n if prefer_internal_ip:\n return ip\n return fqdn", "def id(self): \n if self.cloudnet:\n return self.cloudnet.id\n else:\n return None", "def get_serverid(self):\n return self.get_ipv4_serverid()", "def get_image_ref() -> str:\n images_rq = request(\n method=\"GET\", url=app.config[\"IMAGE_REF\"], headers=build_header(),\n )\n if not images_rq.ok:\n HTTPError(f\"Can not get image id for virtual machine: {images_rq.status_code}\")\n\n [image] = images_rq.json()[\"images\"]\n return image[\"id\"]", "def server_id(self) -> str:\n return pulumi.get(self, \"server_id\")", "def server_id(self) -> str:\n return pulumi.get(self, \"server_id\")", "def get_image_id_by_name(self, image_identifier):\n try:\n filters = {'name': image_identifier}\n image_list = self.client().images.find(**filters)\n except sahara_base.APIException as ex:\n raise exception.Error(\n _(\"Error retrieving image list from sahara: \"\n \"%s\") % six.text_type(ex))\n num_matches = len(image_list)\n if num_matches == 0:\n raise exception.EntityNotFound(entity='Image',\n name=image_identifier)\n elif num_matches > 1:\n raise exception.PhysicalResourceNameAmbiguity(\n name=image_identifier)\n else:\n return image_list[0].id", "def get_imageId_from_fileName(filename):\n filename = os.path.splitext(filename)[0]\n if filename.isdigit():\n return int(filename)\n return id_iter", "def get_instance_id():\n global _instance_id\n if _instance_id == '__unset':\n try:\n _instance_id = _fetch_instance_id()\n except IOError:\n log.exception(\"Exception retrieving InstanceId\")\n _instance_id = None\n\n return _instance_id", "def get_Entrez_id(gid,conn):\n\n get_Entrez = ('SELECT DISTINCT dx.accession '\n 'FROM feature f, feature_dbxref fd, db, dbxref dx '\n 'WHERE f.feature_id = fd.feature_id AND fd.dbxref_id = dx.dbxref_id '\n 'AND dx.db_id = db.db_id AND db.name = \\'EntrezGene\\' AND '\n 'fd.is_current = \\'t\\' AND f.uniquename = %s')\n Entrez_id = connect(get_Entrez,gid,conn)\n if Entrez_id:\n id = Entrez_id[0][0]\n else:\n id = None\n return(id)", "def get_id(endpoint):\n _entity, _id = parser_endpoint(endpoint)\n\n return _id" ]
[ "0.60768455", "0.5970655", "0.5924284", "0.5919874", "0.58456326", "0.5822033", "0.5815973", "0.580934", "0.5719458", "0.56800956", "0.56767", "0.566704", "0.5608655", "0.55602825", "0.5556568", "0.55522054", "0.5534244", "0.55294096", "0.5518612", "0.5512121", "0.5504768", "0.549888", "0.5496569", "0.54896146", "0.54896146", "0.54602545", "0.54552835", "0.5452534", "0.5449368", "0.54488885" ]
0.7955763
0
Return a list of the fqdn's of all imaging servers
def list(self): res = self.db.execute(select([model.imaging_servers.c.fqdn])) return self.column(res)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def ns_list(self):\n return sorted(self.get_ns_name(ns) for ns in self.profile.authoritative_servers)", "def ip_addresses(self):\n try:\n return socket.gethostbyaddr(self.fqdn)[-1]\n except socket.error as _:\n return ['127.0.0.1']", "def list_distributed_cameras(ns_host=None, metadata=None):\n with get_running_nameserver() as name_server:\n camera_uris = name_server.yplookup(meta_all=metadata)\n camera_uris = {k: v[0] for k, v in camera_uris.items()}\n logger.debug(f\"Found {len(camera_uris)} cameras on name server.\")\n return camera_uris", "def list_distributed_cameras(ns_host=None, metadata=None):\n with get_running_nameserver() as name_server:\n camera_uris = name_server.yplookup(meta_all=metadata)\n camera_uris = {k: v[0] for k, v in camera_uris.items()}\n logger.debug(f\"Found {len(camera_uris)} cameras on name server.\")\n return camera_uris", "def list_domain_names():\n pass", "def get_dns_servers(self):\n self.__not_implemented()", "def list_images(self):\n \n logging.debug(\"list_images entered for %s\" % self.machine_name) \n snapshots = cs.list_snapshots()\n res = []\n server_id = self.cloudserver.id\n # find the one for this server\n for snapshot in snapshots:\n img = snapshot.metadata.get(\"instance_uuid\", None)\n # print img\n\n if img == server_id:\n print \"Server %s has snapshot %s\" % (server_id, img)\n res.append(img)\n\n return res", "def discover_servers():\n servers = set()\n for p in glob.glob1(SPDK_SERVER_APP_DIR, \"*\"):\n m = SERVERS_PATTERN.match(p)\n if m:\n servers.add(m.group())\n return list(servers)", "def get_dns_servers(self):\n\t\treturn handle_to_object(call_sdk_function('PrlVmDevNet_GetDnsServers', self.handle))", "def get_dns_servers(self):\n\t\treturn handle_to_object(call_sdk_function('PrlSrvCfgNet_GetDnsServers', self.handle))", "def get_dns_servers(self):\n\t\treturn handle_to_object(call_sdk_function('PrlSrvCfg_GetDnsServers', self.handle))", "def hostnames(self) -> Sequence[str]:\n return pulumi.get(self, \"hostnames\")", "def name_servers(self) -> Sequence[str]:\n return pulumi.get(self, \"name_servers\")", "def getFileCatalogHosts(thisExperiment):\n # Since FAX can download files from many sources, all hosts need to be queried for the replicas\n # In the case of ATLAS, TiersOfATLAS is used as a source of the hosts\n\n hosts_list = [thisExperiment.getFileCatalog()]\n\n tolog(\"Will extend file catalog host list\")\n hosts = thisExperiment.getFileCatalogHosts()\n if hosts != []:\n for host in hosts:\n if not host in hosts_list:\n hosts_list.append(host)\n else:\n tolog(\"(No additional hosts)\")\n\n tolog(\"File catalog host list: %s\" % str(hosts_list))\n\n return hosts_list", "def get_srv_list():\n srv_list = [splitext(basename(sock))[0] \\\n for sock in glob.glob(CEPH_SOCKET_PATH + \"*.asok\")]\n return srv_list", "def get_dns_list(self):\n return self.get_ipv4_dns_list()", "def server_names(self):\n return self._server_names", "def _get_server_prefixes(spec: Spec) -> t.List[str]:\n servers = spec.get(\"servers\")\n if not servers:\n return []\n\n prefixes = []\n for server in servers:\n path = urlparse(server[\"url\"]).path\n path = f\"/{path}\" if not path.startswith(\"/\") else path\n if path != \"/\":\n prefixes.append(path)\n return prefixes", "def get_servers(self):\n\t\treturn self.__servers", "def public_ip_dns(resolv, nameservers, rdatatype, server, responsetype):\n for ns in nameservers:\n try:\n answer = resolv.query(ns, rdatatype)\n nameserver = answer[0].to_text()\n except Exception as e:\n print(e)\n continue\n resolve_public_ip(nameserver, server, responsetype)", "def address(self):\n addrlist = []\n for s in self.srv_socks:\n addrlist.append(s.getsockname())\n return addrlist", "def get_all_servers(self) -> List[Server]:\n pass", "def get_urls(self):\r\n if self.mod.filename:\r\n return [x + self.mod.filename for x in self.mod.service.get_mirrors()]", "def list_servers():\n (code, message) = rest_api.list_servers(request)\n if (code == 200):\n return message\n else:\n abort(code)", "def hostnames(self) -> pulumi.Output[Sequence[str]]:\n return pulumi.get(self, \"hostnames\")", "def ipaddrs( host ):\n return socket.gethostbyaddr(host)[2][0]", "def dns_server_ips(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"dns_server_ips\")", "def servers(self):\n return self._servers", "def get_dbservers(self):\n ret = []\n for i in self.all_instances:\n if i.is_dbserver():\n ret.append(i)\n return ret", "def getServersAddrs(i_ServerList):\n serverAddrList =[]\n\n with open(PLATFORMS_TO_SERVER_FILE, \"r\") as txtFile:\n data = txtFile.readlines()\n table = []\n filteredTable = []\n for line in data:\n if line.startswith(\"#\"):\n continue\n eachLine = line.split(\";\")\n table.append(eachLine)\n filteredTable.append([])\n for element in range(0, len(table)):\n filteredTable.append(table[element][0])\n\n with open(SERVERS_IP_PATH) as serversFile:\n serversFileLines = serversFile.readlines()\n for line in serversFileLines:\n if line[-1:] == '\\n':\n line = line[:-1]\n serverDetails = line.split(\",\")\n if (i_ServerList != True):\n if(serverDetails[0] in i_ServerList and serverDetails[0] in filteredTable):\n serverAddrList.append(serverDetails[1])\n else:\n if(serverDetails[0] in filteredTable):\n serverAddrList.append(serverDetails[1])\n \n return serverAddrList" ]
[ "0.6587762", "0.65380913", "0.6459177", "0.6459177", "0.6430747", "0.6372296", "0.6362174", "0.633885", "0.6327382", "0.6309077", "0.6303938", "0.62505966", "0.62255406", "0.6212317", "0.6186019", "0.6185925", "0.6149783", "0.6135975", "0.61257327", "0.60628873", "0.60275596", "0.599795", "0.5966829", "0.59667087", "0.5937396", "0.5920702", "0.5888082", "0.58459806", "0.58443415", "0.5814805" ]
0.77899426
0
Place various obstacles. E.g. put in rectangles which block the line of site of the towers.
def place_obstacles(): #Randomly generate different sized rectangles #Soem may overlap, which gives more variety in shape of obstacles xvals = np.random.randint(0,self.map_dimensions[1],size=self.N_obstacles) yvals = np.random.randint(0,self.map_dimensions[0],size=self.N_obstacles) lower_left = zip(xvals,yvals) rects = [] for LL in lower_left: x = LL[0] y = LL[1] wmax = self.map_dimensions[1] - x w = np.random.randint(0,wmax,size=1)[0] hmax = self.map_dimensions[0] - y h = np.random.randint(0,hmax,size=1)[0] rects += [(x,y,w,h)] self.coordinates__obstacles = rects
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def draw_obstacles(self):\n for obstacle in self.obstacles:\n obstacle.draw(self.window, Colors.BLACK.value)", "def draw_obstacles():\n for obstacle in obstacles:\n plt.gca().add_patch(obstacle)", "def spawn_obstacles(self):\n self.obstacle_sprites.empty()\n number_of_obstacles = random.randint(MIN_OBSTACLES, MAX_OBSTACLES)\n while len(self.obstacle_sprites) < number_of_obstacles:\n obstacle = Obstacle(random.randrange(0, WIDTH), random.randrange(HEIGHT - 500, HEIGHT))\n obstacle_collision = pygame.sprite.spritecollide(obstacle, self.obstacle_sprites, False)\n if not obstacle_collision:\n self.obstacle_sprites.add(obstacle)", "def get_obstacles_map(obstacles, placed_pecies):\n \n #create a mask image to draw the obstacles on\n blocks = np.zeros(ARENA_SIZE[::-1], np.uint8)\n\n #get the grid points where the robot needs to placed\n grid = get_grid(ARENA_SIZE)\n\n #draw the obstacles and their safety region on the map\n for i in obstacles.keys():\n cv2.circle(blocks, i, int(CIRCULAR_SAFETY_FACTOR*BLOCK_SIZE[0]), 129, -1)\n cv2.rectangle(blocks, (i[0]-int(obstacles[i][0]/4), i[1]-int(obstacles[i][1]/4)), (i[0]+int(obstacles[i][0]/4), i[1]+int(obstacles[i][1]/4)), 255, -1)\n\n #draw the obstacles and their safety region on the map\n for i in placed_pecies.keys():\n try:\n if not i == grid[5]:\n cv2.circle(blocks, i, int(CIRCULAR_SAFETY_FACTOR*BLOCK_SIZE[0]), 129, -1)\n else:\n cv2.rectangle(blocks, (int(i[0]-7.4*placed_pecies[i][0]/4), int(i[1]-7.4*placed_pecies[i][1]/4)),\n (int(i[0]+7.4*placed_pecies[i][0]/4), int(i[1]+7.4*placed_pecies[i][1]/4)), 129, -1)\n cv2.rectangle(blocks, (i[0]-int(placed_pecies[i][0]/4), i[1]-int(placed_pecies[i][1]/4)), (i[0]+int(placed_pecies[i][0]/4), i[1]+int(placed_pecies[i][1]/4)), 255, -1)\n except Exception as e:\n print(e)\n\n return cv2.bitwise_not(blocks)", "def getInitialObstacles():\n # hardcode number of blocks\n # will account for movemnet\n from random import choice\n from globals import TILEWIDTH, TILEHEIGHT, WINHEIGHT, TILEFLOORHEIGHT, LEVEL, HALFWINWIDTH\n\n no_of_blocks = 50\n for b in range(no_of_blocks // 2):\n # get image\n # image = globals.IMAGESDICT['rock']\n for y in range(1,5):\n image = globals.IMAGESDICT[choice(['ugly tree', 'rock', 'tall tree'])]\n # make rect\n spaceRect = pygame.Rect((b * TILEWIDTH, y * TILEFLOORHEIGHT, TILEWIDTH, TILEFLOORHEIGHT))\n landscape = Landscape(image, spaceRect)\n allLandscapeList.add(landscape)\n allSpriteList.add(landscape)\n\n image = globals.IMAGESDICT['corner']\n negativeRect = pygame.Rect([-150, WINHEIGHT - TILEHEIGHT, TILEWIDTH, TILEHEIGHT])\n landscape = Landscape(image, negativeRect)\n allLandscapeList.add(landscape)\n allSpriteList.add(landscape)\n\n image = globals.IMAGESDICT['corner']\n positiveRect = pygame.Rect([LEVEL[0] - TILEWIDTH, WINHEIGHT - TILEHEIGHT, TILEWIDTH, TILEFLOORHEIGHT])\n landscape = Landscape(image, positiveRect)\n allLandscapeList.add(landscape)\n allSpriteList.add(landscape)\n\n bottomRect = pygame.Rect([HALFWINWIDTH, LEVEL[1] - TILEHEIGHT, TILEWIDTH, TILEFLOORHEIGHT])\n landscape = Landscape(image, bottomRect)\n allLandscapeList.add(landscape)\n allSpriteList.add(landscape)\n\n for x in range(0, LEVEL[0], 50):\n for y in range(10):\n image = globals.IMAGESDICT[choice(['ugly tree', 'rock', 'tall tree'])]\n spaceRect = pygame.Rect((x, LEVEL[1] - (y * TILEHEIGHT), TILEWIDTH, TILEFLOORHEIGHT))\n landscape = Landscape(image, spaceRect)\n if choice([0,1,0]):\n allLandscapeList.add(landscape)\n allSpriteList.add(landscape)\n\n\n return", "def _place_objs(self, (screen_width, screen_height)):\n for x_pos in xrange(0, screen_width, self.itter_width):\n self.objects.put(Grass((x_pos, 0), self.width, self.height))", "def obstacles_form(self,image):\r\n major_axis=60\r\n minor_axis=30\r\n c_y=246\r\n c_x=145\r\n c_y1=90\r\n c_x1=70\r\n radius=35\r\n for i in range(len(image)):\r\n for j in range(len(image[0])):\r\n\r\n #self.ellipse(image,major_axis,minor_axis,i,j,c_x,c_y)\r\n self.circle(image,100,i,j,200,200)\r\n self.circle(image,100,i,j,800,200)\r\n #self.slanted_rect(image,i,j)\r\n self.boundary(image,i,j)\r\n self.boundary1(image,i,j)\r\n self.boundary2(image,i,j)\r\n self.c_shape(image,i,j)\r\n #exploration.c_shape(image,i,j)\r", "def updateHardObstacles(self):\r\n global_obs = self.calcGlobalObstaclePosition([[10, 20],[10, 0],[10, -20]])\r\n self.globalHardObstaclesList.extend(global_obs)", "def obstacles(self):\r\n\r\n #Radious arround the head\r\n limit_sight = self.snake_sight\r\n head = self.body[0].position\r\n binary_map_complete = self.complete_mapping()\r\n map_matrix = np.matrix(binary_map_complete)\r\n obstacles = []\r\n\r\n #limits in all directions\r\n left_x = head[0] - limit_sight\r\n right_x = head[0] + limit_sight\r\n up_y = head[1] - limit_sight\r\n down_y = head[1] + limit_sight\r\n\r\n #submatrix with limits size\r\n snake_sight = map_matrix[up_y:down_y+1, left_x:right_x+1]\r\n\r\n #Special cases where the snake approximates to the borders\r\n ##Corners\r\n if left_x < 0 and up_y < 0:\r\n snake_sight = map_matrix[0:down_y+1, 0:right_x+1]\r\n interval_x = [self.limits[0] + left_x, self.limits[0]]\r\n interval_y = [self.limits[1] + up_y, self.limits[1]]\r\n interval_x_matrix = map_matrix[0:down_y+1, interval_x[0]:interval_x[1]]\r\n interval_y_matrix = map_matrix[interval_y[0]:interval_y[1], 0:right_x+1]\r\n interval_x_y_matrix = map_matrix[interval_y[0]:interval_y[1], interval_x[0]:interval_x[1]]\r\n temporal = np.c_[interval_x_y_matrix, interval_y_matrix]\r\n snake_sight = np.c_[interval_x_matrix, snake_sight]\r\n snake_sight = np.r_[temporal, snake_sight] \r\n return snake_sight\r\n \r\n if left_x < 0 and down_y > self.limits[1] - 1:\r\n snake_sight = map_matrix[up_y:self.limits[1], 0:right_x+1]\r\n interval_x = [self.limits[0] + left_x, self.limits[0]]\r\n interval_y = [0, down_y - self.limits[1] + 1]\r\n interval_x_matrix = map_matrix[up_y:self.limits[1], interval_x[0]:interval_x[1]]\r\n interval_y_matrix = map_matrix[interval_y[0]:interval_y[1], 0:right_x+1]\r\n interval_x_y_matrix = map_matrix[interval_y[0]:interval_y[1], interval_x[0]:interval_x[1]]\r\n temporal = np.c_[interval_x_y_matrix, interval_y_matrix]\r\n snake_sight = np.c_[interval_x_matrix, snake_sight]\r\n snake_sight = np.r_[snake_sight, temporal]\r\n return snake_sight\r\n \r\n if right_x > self.limits[0]-1 and up_y < 0:\r\n snake_sight = map_matrix[0:down_y+1, left_x:self.limits[0]]\r\n interval_x = [0, right_x - self.limits[0] + 1]\r\n interval_y = [self.limits[1] + up_y, self.limits[1]]\r\n interval_x_matrix = map_matrix[0:down_y+1, interval_x[0]:interval_x[1]]\r\n interval_y_matrix = map_matrix[interval_y[0]:interval_y[1], left_x:self.limits[0]]\r\n interval_x_y_matrix = map_matrix[interval_y[0]:interval_y[1], interval_x[0]:interval_x[1]]\r\n temporal = np.c_[interval_y_matrix, interval_x_y_matrix]\r\n snake_sight = np.c_[snake_sight, interval_x_matrix]\r\n snake_sight = np.r_[temporal, snake_sight]\r\n return snake_sight\r\n \r\n if right_x > self.limits[0]-1 and down_y > self.limits[1]-1:\r\n snake_sight = map_matrix[up_y:self.limits[1], left_x:self.limits[0]]\r\n interval_x = [0, right_x - self.limits[0] + 1]\r\n interval_y = [0, down_y - self.limits[1] + 1]\r\n interval_x_matrix = map_matrix[up_y:self.limits[1], interval_x[0]:interval_x[1]]\r\n interval_y_matrix = map_matrix[interval_y[0]:interval_y[1], left_x:self.limits[0]]\r\n interval_x_y_matrix = map_matrix[interval_y[0]:interval_y[1], interval_x[0]:interval_x[1]]\r\n temporal = np.c_[interval_y_matrix, interval_x_y_matrix]\r\n snake_sight = np.c_[snake_sight, interval_x_matrix]\r\n snake_sight = np.r_[snake_sight, temporal]\r\n return snake_sight\r\n\r\n ##Middle\r\n if left_x < 0:\r\n snake_sight = map_matrix[up_y:down_y+1, 0:right_x+1]\r\n interval_x = [self.limits[0] + left_x, self.limits[0]]\r\n interval_x_matrix = map_matrix[up_y:down_y+1, interval_x[0]:interval_x[1]]\r\n snake_sight = np.c_[interval_x_matrix, snake_sight]\r\n return snake_sight\r\n\r\n if right_x > self.limits[0]-1:\r\n snake_sight = map_matrix[up_y:down_y+1, left_x:self.limits[0]]\r\n interval_x = [0, right_x - self.limits[0] + 1]\r\n interval_x_matrix = map_matrix[up_y:down_y+1, interval_x[0]:interval_x[1]]\r\n snake_sight = np.c_[snake_sight, interval_x_matrix]\r\n return snake_sight\r\n\r\n if up_y < 0:\r\n snake_sight = map_matrix[0:down_y+1, left_x:right_x+1]\r\n interval_y = [self.limits[1] + up_y, self.limits[1]]\r\n interval_y_matrix = map_matrix[interval_y[0]:interval_y[1], left_x:right_x+1]\r\n snake_sight = np.r_[interval_y_matrix, snake_sight]\r\n return snake_sight\r\n \r\n if down_y > self.limits[1]-1:\r\n snake_sight = map_matrix[up_y:self.limits[1], left_x:right_x+1]\r\n interval_y = [0, down_y - self.limits[1] + 1]\r\n interval_y_matrix = map_matrix[interval_y[0]:interval_y[1], left_x:right_x+1]\r\n snake_sight = np.r_[snake_sight, interval_y_matrix]\r\n return snake_sight\r\n\r\n return snake_sight", "def _find_obstacle(self, obstacle_type='*traffic_light*'): \r\n obst = list()\r\n \r\n _actors = self._world.get_actors()\r\n _obstacles = _actors.filter(obstacle_type)\r\n\r\n\r\n for _obstacle in _obstacles:\r\n trigger = _obstacle.trigger_volume\r\n\r\n _obstacle.get_transform().transform(trigger.location)\r\n \r\n distance_to_car = trigger.location.distance(self._vehicle.get_location())\r\n\r\n a = np.sqrt(\r\n trigger.extent.x ** 2 +\r\n trigger.extent.y ** 2 +\r\n trigger.extent.z ** 2)\r\n b = np.sqrt(\r\n self._vehicle.bounding_box.extent.x ** 2 +\r\n self._vehicle.bounding_box.extent.y ** 2 +\r\n self._vehicle.bounding_box.extent.z ** 2)\r\n\r\n s = a + b + 10\r\n \r\n if distance_to_car <= s:\r\n # the actor is affected by this obstacle.\r\n obst.append(_obstacle)\r\n\r\n \"\"\"self._debug.draw_box(carla.BoundingBox(_obstacle.get_transform().location, carla.Vector3D(0.5,0.5,2)),\r\n _obstacle.get_transform().rotation, \r\n 0.05, \r\n carla.Color(255,255,0,0),\r\n 0\r\n )\"\"\"\r\n \"\"\"self._debug.draw_box(carla.BoundingBox(trigger.location, carla.Vector3D(0.1,0.1,10)),\r\n _obstacle.get_transform().rotation, \r\n 0.05, \r\n carla.Color(255,0,0,0),\r\n 0\r\n )\"\"\"\r\n \r\n \"\"\"self._debug.draw_box(carla.BoundingBox(trigger.location, carla.Vector3D(0.1,0.1,2)),\r\n _obstacle.get_transform().rotation, \r\n 0.05, \r\n carla.Color(255,0,0,0),\r\n 0\r\n )\"\"\"\r\n \"\"\"self._debug.draw_box(trigger,\r\n _obstacle.get_transform().rotation, \r\n 0.05, \r\n carla.Color(255,0,0,0),\r\n 0\r\n )\"\"\"\r\n\r\n return obst", "async def show_obstacles(canvas):\n\n while True:\n boxes = []\n\n for obstacle in obstacle_manager:\n boxes.append(obstacle.get_bounding_box())\n\n for x, y, frame in boxes:\n draw_frame(canvas, x, y, frame)\n\n await Sleep(1)\n\n for x, y, frame in boxes:\n draw_frame(canvas, x, y, frame, negative=True)", "def update_obstacles(self, new_obs):\n self.obstacles = new_obs", "def setup_walls(self):\n self.wall_list = self.get_current_map().get_layer_by_name(\"Obstacles\").sprite_list", "async def show_obstacles(canvas):\n\n while True:\n boxes = []\n\n for obstacle in OBSTACLES:\n boxes.append(obstacle.dump_bounding_box())\n\n for row, column, frame in boxes:\n draw_frame(canvas, row, column, frame)\n\n await asyncio.sleep(0)\n\n for row, column, frame in boxes:\n draw_frame(canvas, row, column, frame, negative=True)", "def place_allowed_tower_sites():\n self.coordinates__tower_sites = []\n for tk in xrange(self.N_tower_kinds):\n #Each kind of tower will have the correct number of sites placed\n \n coords = []\n while len(coords)<self.N_tower_sites[tk]:\n x = np.random.randint(self.BORDER_MARGIN,self.map_dimensions[1]+1-self.BORDER_MARGIN,size=1)[0]\n y = np.random.randint(self.BORDER_MARGIN,self.map_dimensions[0]+1-self.BORDER_MARGIN,size=1)[0]\n p = (x,y) \n all_valid = True\n for rect in self.coordinates__obstacles:\n if not check_valid_placement(p,rect):\n all_valid = False\n break\n if all_valid:\n coords.append(p)\n self.coordinates__tower_sites.append(coords)", "def _blit_objects(self):\n\t\tself.clouds.blitme()\n\t\tself.floor.blitme()\n\t\tself.bolan.blitme()\n\t\tself.obstacles.blitme()\n\t\tself._blit_text()\n\t\tself.scoreboard.blitme()", "def add_obstacle(self, x, y):\n self.BOARD[y][x].traversable = False\n self.board_array[y][x] = 1", "def add_obstacle(self, x, y):\n self.BOARD[y][x].traversable = False\n self.board_array[y][x] = 1", "def check_for_obstacles(self):\n obs = False\n obs_p = []\n for point in self.obstacles:\n if -0.15 <= point[1] <= 0.15: # robot is 178mm wide\n # Obstacles should be less than or equal to 0.2 m away before being detected\n if 0 <= point[0] <= .2:\n obs_p.append(point)\n obs = True\n if obs:\n pos = self.determine_pos_of_obstacle(obs_p)\n data = Obstacle()\n data.x = pos[0]\n data.y = pos[1]\n data.obstacle = True\n self.obs_pub.publish(data)", "def put_bomb(self):\n s = self\n if s.bombs == 0:\n return\n\n block = s.physics.blocks[s.stype][0]\n xinf = block.inf.x - block.inf.x % s.physics.len_blocks\n yinf = block.inf.y - block.inf.y % s.physics.len_blocks\n\n length = s.physics.len_blocks\n new_bomb = Rectangle(Vector(xinf, yinf), Vector(xinf + length, yinf + length))\n\n bombs = list()\n if 'bomb' in s.physics.blocks:\n bombs = s.physics.blocks['bomb']\n\n for bomb in bombs:\n if bomb.overlap(new_bomb):\n return\n\n bomb = Bomb(s, s.pjs, s.bombradius, s.fps, Vector(xinf, yinf))\n s.last_bomb = bomb\n s.pjs.add_bomb(bomb)\n s.bombs -= 1", "def graze(sprite) :\n x, y = sprite.pos\n choices = [(x,y), (x, y-1), (x, y+1), (x+1, y), (x-1, y)]\n random.shuffle(choices)\n obstacles = [at(p) for p in choices]\n visibility = [visible(p) for p in choices]\n\n for i in range(len(choices)):\n if obstacles[i] is None and visibility[i]:\n if choices[i] != (x, y):\n sprite.move((choices[i][0] - x, choices[i][1] - y))\n break", "def place_targets():\n\n \n coords = []\n while len(coords)<self.N_targets:\n x = np.random.randint(self.BORDER_MARGIN,self.map_dimensions[1]+1-self.BORDER_MARGIN,size=1)[0]\n y = np.random.randint(self.BORDER_MARGIN,self.map_dimensions[0]+1-self.BORDER_MARGIN,size=1)[0]\n p = (x,y)\n all_valid = True\n for rect in self.coordinates__obstacles:\n if not check_valid_placement(p,rect):\n all_valid = False\n break\n if all_valid:\n coords +=[p]\n self.coordinates__targets = coords", "def __generate_rectangle_obstacles(self, world):\n obs_min_dim = self.cfg[\"obstacle\"][\"rectangle\"][\"min_dim\"]\n obs_max_dim = self.cfg[\"obstacle\"][\"rectangle\"][\"max_dim\"]\n obs_max_combined_dim = self.cfg[\"obstacle\"][\"rectangle\"][\"max_combined_dim\"]\n obs_min_count = self.cfg[\"obstacle\"][\"rectangle\"][\"min_count\"]\n obs_max_count = self.cfg[\"obstacle\"][\"rectangle\"][\"max_count\"]\n obs_min_dist = self.cfg[\"obstacle\"][\"rectangle\"][\"min_distance\"]\n obs_max_dist = self.cfg[\"obstacle\"][\"rectangle\"][\"max_distance\"]\n\n # generate the obstacles\n obstacles = []\n obs_dim_range = obs_max_dim - obs_min_dim\n obs_dist_range = obs_max_dist - obs_min_dist\n num_obstacles = randrange(obs_min_count, obs_max_count + 1)\n\n test_geometries = [r.global_geometry for r in world.robots]\n while len(obstacles) < num_obstacles:\n # generate dimensions\n width = obs_min_dim + (random() * obs_dim_range )\n height = obs_min_dim + (random() * obs_dim_range )\n while width + height > obs_max_combined_dim:\n height = obs_min_dim + (random() * obs_dim_range )\n\n # generate position\n dist = obs_min_dist + (random() * obs_dist_range)\n phi = -pi + (random() * 2 * pi)\n x = dist * sin(phi)\n y = dist * cos(phi)\n\n # generate orientation\n theta = -pi + (random() * 2 * pi)\n\n # test if the obstacle overlaps the robots or the goal\n obstacle = RectangleObstacle(width, height, Pose(x, y, theta))\n intersects = False\n for test_geometry in test_geometries:\n intersects |= geometrics.convex_polygon_intersect_test(test_geometry, obstacle.global_geometry)\n if not intersects:\n obstacles.append(obstacle)\n return obstacles", "def create_bounds(self):\n # Bounds should be created for\n x0, y0 = (0, 0)\n x1 = self.game_area_size[0]\n y1 = self.game_area_size[1]\n space = self.space\n\n borders = [\n phy.Segment(space.static_body, phy.Vec2d(x0, y0), phy.Vec2d(x1, y0), 0),\n phy.Segment(space.static_body, phy.Vec2d(x0, y0), phy.Vec2d(x0, y1), 0),\n phy.Segment(space.static_body, phy.Vec2d(x1, y0), phy.Vec2d(x1, y1), 0),\n phy.Segment(space.static_body, phy.Vec2d(x0, y1), phy.Vec2d(x1, y1), 0),\n ]\n for b in borders:\n b.elasticity = 0.5\n self.space.add(borders)", "def updateObstacles(self, obstacles):\r\n global_obs = self.calcGlobalObstaclePosition(obstacles)\r\n self.globalObstaclesList.extend(global_obs)", "def recreate_obstacles(self):\n self.board_matrix = np.full(Dimension.board_size(), 1)\n self.obstacles = self.create_obstacles()", "def top_blit(self, x, y):\n xcoord = [int(math.ceil(x)), int(math.floor(x))]\n ycoord = [int(math.ceil(y)), int(math.floor(y)), int(math.ceil(y))+1]\n for i in xcoord:\n for j in ycoord:\n if (in_range(i,j)):\n if (self.blocks[i][j].image == Tree1):\n gameDisplay.blit(Tree1Part, self.blocks[i][j].realcoordinates)\n elif (self.blocks[i][j].image == Tree2):\n gameDisplay.blit(Tree2Part, self.blocks[i][j].realcoordinates)", "def place_object(self, thing):\n color = [i * 255 for i in thing.color.rgb]\n size = (20, 20)\n if thing.name == \"luna\":\n size = (5, 5)\n if self.is_visible(thing.position, max(size)):\n position = self.get_position(thing.position, size)\n pygame.draw.ellipse(self.screen, color, (position, size))", "def __init__(self, map_obstacle, main_graph):\n\n self.map_obstacle = map_obstacle\n self.main_graph = main_graph\n\n self.sight_range = self.calculate_sight_range()\n\n self.top_left_y = None\n self.top_left_x = None\n self.bottom_right_y = None\n self.bottom_right_x = None\n self.height = None\n self.width = None\n self.size = self.calculate_size()\n\n # nodes specific to this threat zone\n self.nodes = []", "def get_obstacles(image):\n\n ih, iw = image.shape[:2]\n image_copy = image.copy()\n\n #resize the image to the size of arena\n image = cv2.resize(image, ARENA_SIZE, interpolation=cv2.INTER_CUBIC)\n gray = cv2.cvtColor(image,cv2.COLOR_BGR2GRAY)\n\n #replace all black pixels to white pixels\n gray[np.where(gray == 0)]= 255\n\n #get the thresholded binary image\n ret,threshold = cv2.threshold(gray,200,255,cv2.THRESH_BINARY_INV)\n\n #find all the countours in the binary image\n _, contours, heiarchy = cv2.findContours(threshold, cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE)\n\n cont = []\n\n #create a mask to draw contours on\n blocks = mask = np.zeros(threshold.shape[:2], np.uint8)\n\n #create a dictionary to hold image roi of all puzzle peices\n blocks_roi = {}\n\n #iterate through all contours\n for i, c in enumerate(contours[1:]):\n\n #find the minimum area fitting rectangle of the contour\n rect = cv2.minAreaRect(c)\n box = cv2.boxPoints(rect)\n box = np.int0(box)\n\n #create the copy of the mask\n mask_copy = mask.copy()\n\n #draw the rectangle on the mask\n cv2.drawContours(mask_copy, [box], -1, (255,255,255), 3)\n\n #floodfill the rectangle\n cv2.floodFill(mask_copy, None, (0,0), 255)\n mask_inv = cv2.bitwise_not(mask_copy)\n blocks = cv2.add(blocks, mask_inv)\n\n _, contours, heiarchy = cv2.findContours(blocks, cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE)\n\n obstacles = {}\n\n for c in contours:\n x,y,w,h = cv2.boundingRect(c)\n obstacles.update({(int(x+w/2), int(y+h/2)): BLOCK_SIZE})\n #obstacles.update({(int(x+w/2), int(y+h/2)): (w, h)}) # for unknown block sizes\n bottom_r = remap((x+w, y+h), ARENA_SIZE, (iw,ih))\n top_l = remap((x, y), ARENA_SIZE, (iw,ih))\n blocks_roi.update({(int(x+w/2), int(y+h/2)): image_copy[top_l[1]:bottom_r[1], top_l[0]:bottom_r[0]]})\n\n return obstacles, blocks_roi" ]
[ "0.7180668", "0.6907903", "0.66251636", "0.65257436", "0.6507786", "0.6495422", "0.64418054", "0.63769335", "0.6216832", "0.6200093", "0.6169618", "0.6096015", "0.6086622", "0.60711867", "0.601048", "0.60011566", "0.59804374", "0.59804374", "0.59792244", "0.5943572", "0.5933348", "0.5918852", "0.5910208", "0.5904036", "0.58658147", "0.5864696", "0.5849546", "0.5849514", "0.5847125", "0.5832528" ]
0.79376906
0
Place the target locations
def place_targets(): coords = [] while len(coords)<self.N_targets: x = np.random.randint(self.BORDER_MARGIN,self.map_dimensions[1]+1-self.BORDER_MARGIN,size=1)[0] y = np.random.randint(self.BORDER_MARGIN,self.map_dimensions[0]+1-self.BORDER_MARGIN,size=1)[0] p = (x,y) all_valid = True for rect in self.coordinates__obstacles: if not check_valid_placement(p,rect): all_valid = False break if all_valid: coords +=[p] self.coordinates__targets = coords
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _init_targets(self):\n for ga_main, ga_targ in zip(self.ga.variables, self.ga_.variables):\n ga_targ.assign(ga_main)\n if self.use_lyapunov:\n for lc_main, lc_targ in zip(self.lc.variables, self.lc_.variables):\n lc_targ.assign(lc_main)\n else:\n for q_1_main, q_1_targ in zip(self.q_1.variables, self.q_1_.variables):\n q_1_targ.assign(q_1_main)\n for q_2_main, q_2_targ in zip(self.q_2.variables, self.q_2_.variables):\n q_2_targ.assign(q_2_main)", "def setUp(self):\n self.location = [(0, 0), (0, 1)]\n self.hit = (0, 0)", "def add_path_target(self, paths):\n for p in paths:\n self.rg.set_target(p)\n self.rg.set_blocked(p,False)", "def replace_targets(self, target_roots):\r\n self._target_roots = list(target_roots)\r\n\r\n self._targets = OrderedSet()\r\n for target in self._target_roots:\r\n self.add_target(target)\r\n self.id = Target.identify(self._targets)", "def set_loc(self, moves):\n for move in moves:\n move.start_rank = self.location.rank\n move.start_file = self.location.file", "def setup_targets(self):\n neighbourhood_distance = self.k_max_node_id / 10.\n for i in range(self.min_peers):\n distance = random.randint(0, neighbourhood_distance)\n address = (self.id + distance) % (self.k_max_node_id + 1)\n tolerance = self.k_max_node_id / self.min_peers\n self.targets.append(dict(address=address, tolerance=tolerance, connected=False))", "def _update_targets(self):\n for ga_main, ga_targ in zip(self.ga.variables, self.ga_.variables):\n ga_targ.assign(self._polyak * ga_targ + (1 - self._polyak) * ga_main)\n if self.use_lyapunov:\n for lc_main, lc_targ in zip(self.lc.variables, self.lc_.variables):\n lc_targ.assign(self._polyak * lc_targ + (1 - self._polyak) * lc_main)\n else:\n for q_1_main, q_1_targ in zip(self.q_1.variables, self.q_1_.variables):\n q_1_targ.assign(self._polyak * q_1_targ + (1 - self._polyak) * q_1_main)\n for q_2_main, q_2_targ in zip(self.q_2.variables, self.q_2_.variables):\n q_2_targ.assign(self._polyak * q_2_targ + (1 - self._polyak) * q_2_main)", "def set_location(self, location_set):", "def add_locations(self):\n for _ in range(0, self.num_locations):\n detector_id = self.generate_id()\n detector_direction = self.generate_direction()\n detector_point = self.generate_point()\n self.dataset[detector_id] = (detector_direction, detector_point)\n assert len(self.dataset) == self.num_locations", "def prep_robot_target(self):\n x = int(self.robot.target_x)\n y = int(self.robot.target_y)\n target_str = f\"Target (X,Y): {str(x)}, {str(y)}\"\n # Prepare the image and positions it on the screen\n self.target_image = self.font.render(target_str, True, self.text_color, self.bg_color)\n self.target_rect = self.target_image.get_rect()\n self.target_rect.left = self.location_rect.left\n self.target_rect.top = self.location_rect.bottom + self.line_gap", "def set_home_locations(self):\n self.swarmie.set_home_gps_location(self.swarmie.get_gps_location())\n\n current_location = self.swarmie.get_odom_location()\n current_pose = current_location.get_pose()\n home_odom = Location(current_location.Odometry)\n\n detections = self.swarmie.get_latest_targets().detections\n try:\n for detection in detections:\n if detection.id == 256:\n see_home_tag = True\n home_detection = self._transform_to_odom(detection)\n\n quat = [home_detection.pose.orientation.x,\n home_detection.pose.orientation.y,\n home_detection.pose.orientation.z,\n home_detection.pose.orientation.w]\n _r, _p, yaw = tf.transformations.euler_from_quaternion(\n quat\n )\n yaw += math.pi / 2\n\n home_odom.Odometry.pose.pose.position.x = float(\n home_detection.pose.position.x + 0.5 * math.cos(yaw)\n )\n home_odom.Odometry.pose.pose.position.y = float(\n home_detection.pose.position.y + 0.5 * math.sin(yaw)\n )\n self.swarmie.set_home_odom_location(home_odom)\n return\n\n except tf.Exception:\n pass # use backup below\n\n # project home_odom location 50cm in front of rover's current location\n home_odom.Odometry.pose.pose.position.x = (\n current_pose.x + 0.5 * math.cos(current_pose.theta)\n )\n home_odom.Odometry.pose.pose.position.y = (\n current_pose.y + 0.5 * math.sin(current_pose.theta)\n )\n self.swarmie.set_home_odom_location(home_odom)\n return", "def run(self, locations):\n return locations", "def deploy(self):\n step = 10\n for i in range(0, self.x, step): \n for j in range(0, self.y, step):\n self._place_nodes(i,j, step, max_nodes = 3)", "def send_destination(self):\n\n print('send the target to the robot')\n move_base_action_goal=MoveBaseActionGoal()\n move_base_action_goal.goal.target_pose.header.frame_id=\"map\"\n move_base_action_goal.goal.target_pose.pose.orientation.w=1\n move_base_action_goal.goal.target_pose.pose.position.x=self.x_des\n move_base_action_goal.goal.target_pose.pose.position.y=self.y_des\n print('des_x='+str(self.x_des))\n print('des_y='+str(self.y_des))\n self.des_pub.publish(move_base_action_goal)", "def execute(self, targets):", "def prep_robot_location(self):\n x = int(self.robot.odo_x)\n y = int(self.robot.odo_y)\n o = round(self.robot.odo_o, 2)\n location_str = f\"Location (X,Y,O): {str(x)}, {str(y)}, {str(o)}\"\n # Prepare the image and positions it on the screen\n self.location_image = self.font.render(location_str, True, self.text_color, self.bg_color)\n self.location_rect = self.location_image.get_rect()\n self.location_rect.left = self.action_rect.left\n self.location_rect.top = self.action_rect.bottom + self.line_gap", "def move_buildings(self):", "def set_locations():\n STATUS['locations']['monster'][0] = generate_random_coord(STATUS['grid_size'])\n STATUS['locations']['monster'][1] = generate_random_coord(STATUS['grid_size'])\n STATUS['locations']['weapon'][0] = generate_random_coord(STATUS['grid_size'])\n STATUS['locations']['weapon'][1] = generate_random_coord(STATUS['grid_size'])", "def set_new_location(self, xPos, yPos):", "def targets_placeholder(self):", "def run(self, target):\n linear_dist = lambda x1, x2, y1, y2: math.sqrt((x1 - x2)**2 + \n (y1 - y2)**2)\n max_dist_to_target = linear_dist(self.x, target.x, \n self.y, target.y)\n possible_posn = [[1, 0], [-1, 0], [0, 1], [0, -1]]\n move_to_make = None\n\n for posn in possible_posn:\n if not self.handler.world.is_solid(self.x + posn[0], self.y + posn[1]):\n new_dist = linear_dist(self.x + posn[0], target.x, \n self.y + posn[1], target.y)\n if new_dist > max_dist_to_target:\n max_dist_to_target = new_dist\n move_to_make = posn\n\n if move_to_make:\n self.move(move_to_make[0], move_to_make[1])", "def move_to(self, destination_coords):\n self.x = destination_coords[0]\n self.y = destination_coords[1]\n return", "def setup_targets(self):\n for i in range(self.min_peers):\n self.targets.append(dict(address=0, tolerance=0, connected=False))\n # NOT IMPLEMENTED HERE", "def move(self):\n for agent in self.agents:\n if not agent.fidelity:\n options = agent.get_move_options(agent.hex, self.kernel_size, None, extend=True)\n target = random36.choices(population=options,weights=[x.quality**2 for x in options])\n agent.move(target[0])", "def calculate_target_location(self, alphas, epsilons, data_collected):\n if len(alphas) == 1:\n \tfor i in range(0, self.number_sampling_points-1):\n \t\talphas.append(alphas[0])\n \t\tepsilons.append(epsilons[0])\n\n # if self.target_location == None:\n # # dBm_list = []\n # # for sample in data_collected[0][3]:\n # # dBm_list.append(sample)\n\n # # average_dBm = sum(dBm_list) / float(len(dBm_list))\n # # radius_target_position = 10 ** ((average_dBm - self.epsilon) / self.alpha)\n # # ###TODO: fix radius_target_position\n # # if radius_target_position > self.altitude:\n # # horizontal_distance = sqrt((radius_target_position**2) - (self.altitude**2))\n # # else:\n # # horizontal_distance = 0\n\n # local_position = self.dc.read_gps()\n # local_coord = Coordinate(local_position.lat, local_position.lon)\n\n # first_emulated_target = local_coord.offset_toward_target(self.region.center(), DISTANCE_TO_TARGET)\n\n # self.log.debug('=========================================================================')\n # self.log.debug('Calculated emulated target at location: {}'.format(first_emulated_target))\n # self.log.debug('=========================================================================')\n\n # return first_emulated_target\n\n # else:\n prediction = predict(dronenum=self.number_sampling_points,\n maxRun=1,\n numIterations=GDParameters.NUM_ITERATIONS,\n numEpoch=GDParameters.NUM_EPOCH,\n threshold=GDParameters.THRESHOLD,\n learning_rate=GDParameters.LEARNING_RATE,\n numberBatch=1,\n data_length=NUMBER_SAMPLES*self.number_sampling_points)\n\n try:\n target = prediction.swarm(drone_data=data_collected,\n alphas=alphas,\n epsilons=epsilons)\n except IndexError:\n self.log.warn('Target localization failed. Data not good enough.')\n return False\n\n computed_target_position = Coordinate(target[0], target[1])\n\n self.log.debug('=========================================================================')\n self.log.debug('Calculated new target at location: {}'.format(computed_target_position))\n\n if IS_SIMULATION:\n error = computed_target_position.distance_to(self.current_simulated_target)\n self.log.debug('Simulated error: {err}, Simulated target has moved {dist} meters to: {loc}'.format(\n err=error,\n dist=self.target_meters_moved,\n loc=self.current_simulated_target\n ))\n self.log.debug('=========================================================================')\n\n if not self.region.contains(computed_target_position) and not IS_SIMULATION:\n self.log.debug('New target is out of region')\n self.log.debug('Setting new target location as the latest one calculated')\n return self.target_location\n\n return computed_target_position", "def move_loc_position(target, source):\r\n # a temp dumb hack until I figure out how to translate world to local coordinates.\r\n # your math is bad and you should feed bad\r\n tempLoc = pm.spaceLocator(n='ZZZ_TEMP_LOCATOR_{}'.format(target.name()))\r\n pm.parent(tempLoc, target)\r\n tempLoc.setRotation([0,0,0])\r\n tempLoc.setTranslation(source, space='world')\r\n target.localPosition.set(tempLoc.getTranslation(space='object'))\r\n pm.delete(tempLoc)", "def place_allowed_tower_sites():\n self.coordinates__tower_sites = []\n for tk in xrange(self.N_tower_kinds):\n #Each kind of tower will have the correct number of sites placed\n \n coords = []\n while len(coords)<self.N_tower_sites[tk]:\n x = np.random.randint(self.BORDER_MARGIN,self.map_dimensions[1]+1-self.BORDER_MARGIN,size=1)[0]\n y = np.random.randint(self.BORDER_MARGIN,self.map_dimensions[0]+1-self.BORDER_MARGIN,size=1)[0]\n p = (x,y) \n all_valid = True\n for rect in self.coordinates__obstacles:\n if not check_valid_placement(p,rect):\n all_valid = False\n break\n if all_valid:\n coords.append(p)\n self.coordinates__tower_sites.append(coords)", "def assign_targets(classes, source_dir):\n for cl in classes.values():\n cl['target'] = get_target(source_dir, cl['filepath'])", "def move_all_animals(self):\n\n y_lim, x_lim = np.shape(self.map)\n for y in range(y_lim):\n for x in range(x_lim):\n loc = y, x\n self.map[loc].migration(self.get_neighbour((y, x)))", "def positioning(self):\n pass" ]
[ "0.6060867", "0.5983119", "0.59704787", "0.5942067", "0.5901007", "0.588003", "0.58684736", "0.58369666", "0.58147174", "0.58143884", "0.5814113", "0.5796882", "0.5793904", "0.57164675", "0.5716242", "0.57001746", "0.56815624", "0.56712925", "0.5663553", "0.5660132", "0.5631085", "0.5611559", "0.55698436", "0.5562478", "0.5536042", "0.5501836", "0.5501583", "0.5476973", "0.54716766", "0.54707575" ]
0.71361285
0
Place the potential tower locations. These are the locations where towers can potentially be placed. Not every location is necesarily used (only when N_tower_sites = N_towers). THe optimization problem is to determine which of these possible locations to use.
def place_allowed_tower_sites(): self.coordinates__tower_sites = [] for tk in xrange(self.N_tower_kinds): #Each kind of tower will have the correct number of sites placed coords = [] while len(coords)<self.N_tower_sites[tk]: x = np.random.randint(self.BORDER_MARGIN,self.map_dimensions[1]+1-self.BORDER_MARGIN,size=1)[0] y = np.random.randint(self.BORDER_MARGIN,self.map_dimensions[0]+1-self.BORDER_MARGIN,size=1)[0] p = (x,y) all_valid = True for rect in self.coordinates__obstacles: if not check_valid_placement(p,rect): all_valid = False break if all_valid: coords.append(p) self.coordinates__tower_sites.append(coords)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def solve_environment(self):\n \n #The first problem formulation\n #K kinds of towers\n #See more details about problem formulation in the writeup \n \n #Get a full matrix of the concatenated coverage matrices for \n #each tower type. THis new matrix has dimensions:\n #(Ntowers) x (sum(potential sites)), where the sum o=is over all tower types\n coverage = np.hstack(i for i in self.coverage_matrices)\n print coverage\n print coverage.shape \n \n #Diagonal matrix of the values of each target\n #(for the scenarios where we don't care about maximizing covered value,\n #target_values is just all ones, so this is just the identity matrix)\n V = np.diag(self.target_values)\n \n #If doing scenario where we want to fortify weakest link, only makes\n #sense if all targets are equal value:\n if self.objective_type == 'min_entries':\n V = np.eye(len(self.target_values))\n\n #Get the matrix of coverage values / expected value saved:\n C = np.dot(V,coverage)\n print 'V', V\n print 'coverage', coverage\n print 'C', C\n \n \n #Since not gauranteed to reach global optimum on any particular initialization,\n #run a few times and take the best result.\n #Just define \"best result\" as the result which had the most overall \n #\"converged\" x, combined over all tower kinds. \n# for j in xrange(self.N_random_starts_max):\n \n \n a = 2. #1.\n tau = 1e-4\n N = sum(i for i in self.N_tower_sites)\n w = np.zeros(N)\n ones = np.ones(N)\n p = 1. #the exponents power when doing he exponent method:\n \n for i in xrange(self.N_reweighting_iterations_max):\n #The concatenated vector of occupancies: Concatenated over all\n #of the kinds of towers.\n x = cvx.Variable(N)\n \n #Different objective functions depending on which optimization problem.\n #These are defined in the scenarios in the main function.\n if self.objective_type == 'min_entries':\n operation = cvx.min_entries\n elif self.objective_type == 'sum_entries':\n operation = cvx.sum_entries\n else:\n raise Exception('must specify valid objective_type')\n \n #Objective function includes penalty term for non-binary x values\n if self.penalty_type == 'reweighted_L1':\n #objective = cvx.Maximize(t - x.T*w)\n objective = cvx.Maximize(operation(C*x - x.T*w))\n\n\n #Main constraints on 0<=x<=1\n constraints = [0<=x, x<=1]\n \n \n #And then for each kind of tower, append the constraint that there\n #be exactly N_i towers, or <= quota (depending on constraint type)\n if self.constraints__type == 'fixed_N_towers' or self.constraints__type == 'tower_quotas':\n for tk in xrange(self.N_tower_kinds):\n before_sum = np.concatenate(([0],np.cumsum(self.N_tower_sites)))[tk]\n print before_sum\n print before_sum + self.N_tower_sites[tk]\n if self.constraints__type == 'fixed_N_towers':\n constraints.append(cvx.sum_entries(\n x[before_sum : before_sum + self.N_tower_sites[tk]]\n )==self.N_towers[tk])\n elif self.constraints__type == 'tower_quotas':\n constraints.append(cvx.sum_entries(\n x[before_sum : before_sum + self.N_tower_sites[tk]]\n )<=self.budget__tower_quotas[tk])\n print x[before_sum : before_sum + self.N_tower_sites[tk]]\n \n elif self.constraints__type == 'total_cost':\n costs = np.hstack([np.repeat(self.budget__tower_unit_costs[tk],self.N_tower_sites[tk]) for tk in xrange(self.N_tower_kinds)])\n constraints.append(cvx.sum_entries(costs * x) <= self.budget__total_cost) \n \n \n \n\n\n \n \n print 'penalty_type', self.penalty_type\n print 'objective_type', self.objective_type\n print 'constraints__type', self.constraints__type\n print 'budget__tower_quotas', self.budget__tower_quotas\n print 'operation', operation\n print 'objective', objective\n print 'constraints', constraints\n cvx.Problem(objective, constraints).solve(verbose=self.VERBOSE)\n x = np.array(x.value).flatten()\n print 'x', x\n w = a/(tau+np.abs(x))\n p += 1.\n plt.figure(figsize=(5,5))\n plt.plot(x,marker='o')\n plt.savefig('histrograms_{}.png'.format(i))\n print \n \n \n \n \n #From the solution x, get the coordinates of those tower sites where we\n #really do want to place a tower\n #use = np.isclose(x,1.)\n for tk in xrange(self.N_tower_kinds):\n before_sum = np.concatenate(([0],np.cumsum(self.N_tower_sites)))[tk]\n y = x[before_sum : before_sum + self.N_tower_sites[tk]]\n inds = np.argsort(y)\n s = y[inds]\n use = np.where(s>.5)[0]\n print inds\n print s\n print use \n if self.constraints__type == 'fixed_N_towers':\n if len(use) != self.N_towers[tk]:\n print 'Solution did not converge properly. Choosing the K best towers.'\n print self.N_towers[tk], len(use)\n # use = use[-self.N_towers[tk]:]\n use = inds[-self.N_towers[tk]:]\n elif self.constraints__type == 'tower_quotas':\n pass #Just use the towers thresholded at > .5\n print use\n \n \n self.coordinates__solved_towers.append([self.coordinates__tower_sites[tk][mm] for mm in inds[use]])", "def gen_placecells(self, min_spread=0.2):\r\n\r\n N = None\r\n num_tries = 1000 # a limit on the number of attempts to place a new placecell\r\n\r\n # assign random x,y locations to each neuron\r\n locations = [self.random_location()]\r\n while True:\r\n # generate a random new point\r\n new_loc = self.random_location()\r\n\r\n # check that the point isn't too close to previous points\r\n count = 0\r\n while min([self.calc_dist(new_loc, l) for l in locations]) < min_spread and count < num_tries:\r\n new_loc = self.random_location()\r\n count += 1\r\n\r\n # add the new point\r\n locations += [new_loc]\r\n\r\n if (N == None and count >= num_tries) or len(locations) == N:\r\n # stop when required number of place cells built (if N specified),\r\n # or when world has been decently filled\r\n break\r\n\r\n return locations", "def optimize(self):\n # Loop through every WD and WS individually\n wd_array = self.fi_subset.floris.flow_field.wind_directions\n ws_array = self.fi_subset.floris.flow_field.wind_speeds\n for nwsi, ws in enumerate(ws_array):\n\n self.fi_subset.reinitialize(wind_speeds=[ws])\n\n for nwdi, wd in enumerate(wd_array):\n # Find turbines to optimize\n turbs_to_opt = self._turbs_to_opt_subset[nwdi, nwsi, :]\n if not any(turbs_to_opt):\n continue # Nothing to do here: no turbines to optimize\n\n # Extract current optimization problem variables (normalized)\n yaw_lb = self._minimum_yaw_angle_subset_norm[nwdi, nwsi, turbs_to_opt]\n yaw_ub = self._maximum_yaw_angle_subset_norm[nwdi, nwsi, turbs_to_opt]\n bnds = [(a, b) for a, b in zip(yaw_lb, yaw_ub)]\n x0 = self._x0_subset_norm[nwdi, nwsi, turbs_to_opt]\n\n J0 = self._farm_power_baseline_subset[nwdi, nwsi]\n yaw_template = self._yaw_angles_template_subset[nwdi, nwsi, :]\n turbine_weights = self._turbine_weights_subset[nwdi, nwsi, :]\n yaw_template = np.tile(yaw_template, (1, 1, 1))\n turbine_weights = np.tile(turbine_weights, (1, 1, 1))\n\n # Define cost function\n def cost(x):\n x_full = np.array(yaw_template, copy=True)\n x_full[0, 0, turbs_to_opt] = x * self._normalization_length\n return (\n - 1.0 * self._calculate_farm_power(\n yaw_angles=x_full,\n wd_array=[wd],\n turbine_weights=turbine_weights\n )[0, 0] / J0\n )\n\n # Perform optimization\n residual_plant = minimize(\n fun=cost,\n x0=x0,\n bounds=bnds,\n method=self.opt_method,\n options=self.opt_options,\n )\n\n # Undo normalization/masks and save results to self\n self._farm_power_opt_subset[nwdi, nwsi] = -residual_plant.fun * J0\n self._yaw_angles_opt_subset[nwdi, nwsi, turbs_to_opt] = (\n residual_plant.x * self._normalization_length\n )\n\n # Finalize optimization, i.e., retrieve full solutions\n df_opt = self._finalize()\n return df_opt", "def place_targets():\n\n \n coords = []\n while len(coords)<self.N_targets:\n x = np.random.randint(self.BORDER_MARGIN,self.map_dimensions[1]+1-self.BORDER_MARGIN,size=1)[0]\n y = np.random.randint(self.BORDER_MARGIN,self.map_dimensions[0]+1-self.BORDER_MARGIN,size=1)[0]\n p = (x,y)\n all_valid = True\n for rect in self.coordinates__obstacles:\n if not check_valid_placement(p,rect):\n all_valid = False\n break\n if all_valid:\n coords +=[p]\n self.coordinates__targets = coords", "def place_cities(self, n=20):\n self.city_score = self.flow ** 0.5\n self.city_score[self.elevation[:-1] <= 0] = -9999999\n self.cities = []\n while len(self.cities) < n:\n # location of potential new city is place with maximum score\n newcity = np.argmax(self.city_score)\n\n # Only place cities between 0.1 and 0.9 axes.\n city_max_ax = 0.85\n city_min_ax = 0.15\n # Chance that this location has no city, scales with number of cities placed so far\n if (\n np.random.random() < (len(self.cities) + 1) ** -0.2\n and city_min_ax < self.vxs[newcity, 0] < city_max_ax\n and city_min_ax < self.vxs[newcity, 1] < city_max_ax\n ):\n self.cities.append(newcity)\n\n # penalize city score for the newcity location.\n self.city_score -= 0.01 * 1 / (distance(self.vxs, self.vxs[newcity, :]) + 1e-9)", "def set_locations():\n STATUS['locations']['monster'][0] = generate_random_coord(STATUS['grid_size'])\n STATUS['locations']['monster'][1] = generate_random_coord(STATUS['grid_size'])\n STATUS['locations']['weapon'][0] = generate_random_coord(STATUS['grid_size'])\n STATUS['locations']['weapon'][1] = generate_random_coord(STATUS['grid_size'])", "def tryout_new_location(self):\n try_location = [0, 0]\n \n # try locations until a not-occupied location is found and not all folds are checked\n while try_location in self.occupied:\n\n # folds north everytime\n current_type = 2\n \n # check if location is possible \n try_location = self.assign_location(current_type)\n\n # if location is not possible, try next fold\n if try_location in self.occupied:\n continue\n # if location is possible, use location\n else:\n self.next_location = try_location\n return", "def get_valid_locations(location_list, grid, shape):", "def generate_nearby_cells(self):\n for y in range(len(self.island_map)):\n for x in range(len(self.island_map[y])):\n list_of_nearby_cells = []\n\n if y != 0:\n self.generate_cell_above(x, y, list_of_nearby_cells)\n\n if x != 0:\n self.generate_cell_left(x, y, list_of_nearby_cells)\n\n if y != len(self.island_map)-1:\n self.generate_cell_below(x, y, list_of_nearby_cells)\n\n if x != len(self.island_map[y])-1:\n self.generate_cell_right(x, y, list_of_nearby_cells)\n\n self.island_map[y][x].nearby_cells = list_of_nearby_cells", "def _set_servers_location(servers):\n if not servers:\n return\n\n geoip_db_reader = geoip2.database.Reader(app.config['GEOIP_DATABASE_FILE'])\n\n for server in servers:\n try:\n location = geoip_db_reader.city(server.ip)\n except (ValueError, geoip2.errors.AddressNotFoundError):\n continue\n\n if location:\n if location.continent.geoname_id:\n server.location.continent_code = location.continent.code.lower()\n server.location.continent_name = location.continent.names['en']\n\n if location.country.geoname_id:\n server.location.country_code = location.country.iso_code.lower()\n server.location.country_name = location.country.names['en']\n\n if location.city.geoname_id:\n server.location.city_name = location.city.names['en']\n\n server.location.text = '{}{}'.format(\n server.location.city_name + ', ' if server.location.city_name else '',\n server.location.country_name\n )\n\n if current_app:\n server.location.set_flags()\n else:\n with app.app_context():\n server.location.set_flags()\n\n geoip_db_reader.close()", "def assign_tasks(self, task_list):\n\n relevant = {}\n\n for task in task_list:\n if task.pickup:\n trucker = min(self.truckers, key=lambda t: distance(t.potential_location(), task.location))\n trucker.potential_tasks.append(task)\n relevant[task.order] = trucker\n\n elif task.dropoff:\n if task.order not in relevant:\n raise invalidPlanErrora\n relevant[task.order].potential_tasks.append(task)", "def calc_synLocations(post_branches, n_syns, dist):\n\n\t\t\t\tassert dist in ['uniform', 'random', 'one'], 'Which synapse distribution for %s population? (uniform/random/one) '%self.population_name\n\t\t\t\t\n\t\t\t\tn_branches = len(post_branches)\n\t\t\t\tbranch_locs = {}\n\t\t\t\t\n\t\t\t\tif dist == 'uniform':\n\t\t\t\t\traise Exception('uniform', '{} dist is under construction!'.format(dist))\n\t\t\t\t\t# density = n_syns / L\n\t\t\t\t\t# locs = sorted(np.arange(0, L, 1/density))\n\t\t\t\t\t# locs = [i/L for i in locs]\n\n\t\t\t\t\t# assert len(locs)==n_syns, ['Sanity check warning: unexpected locs length!', pdb.set_trace()]\n\n\t\t\t\telif dist == 'random':\n\t\t\t\t\t\n\t\t\t\t\tfor i in range(n_syns):\n\n\t\t\t\t\t\t# Randomly choose branch\n\t\t\t\t\t\trand_branch_idx = np.random.randint(n_branches)\n\t\t\t\t\t\trand_branch \t = post_branches[rand_branch_idx]\n\t\t\t\t\t\trand_branch_name = rand_branch.name().split('].')[-1]\n\t\t\t\t\t\t\n\t\t\t\t\t\t# Randomly choose location\n\t\t\t\t\t\trand_loc = np.random.rand()\n\n\t\t\t\t\t\tif rand_branch_name in branch_locs.keys():\n\t\t\t\t\t\t\tbranch_locs[rand_branch_name]['locs'].append(rand_loc)\n\t\t\t\t\t\telse:\n\t\t\t\t\t\t\tbranch_locs[rand_branch_name] \t\t\t\t= {}\n\t\t\t\t\t\t\tbranch_locs[rand_branch_name]['locs'] \t\t= [rand_loc]\n\t\t\t\t\t\t\tbranch_locs[rand_branch_name]['branch_obj'] = rand_branch\t\t\t\t\t\t\t\t\n\n\t\t\t\t\tfor key in branch_locs:\n\t\t\t\t\t\tbranch_locs[key]['locs'] = sorted(branch_locs[key]['locs'])\n\t\t\t\t\n\t\t\t\telif dist == 'one':\n\t\t\t\t\tsingle_branch_idx \t= np.random.randint(n_branches)\n\t\t\t\t\tsingle_branch \t \t= post_branches[single_branch_idx]\n\t\t\t\t\tsingle_branch_name \t= single_branch.name().split('].')[-1]\n\t\t\t\t\t\n\t\t\t\t\tbranch_locs[single_branch_name] = {'branch_obj': single_branch, 'locs': [0.5]*n_syns}\n\n\t\t\t\treturn branch_locs", "async def find_nearby_independent_worlds(context: Anacreon) -> List[World]:\n jump_beacon_trait_ids = {\n e.id\n for e in context.game_info.scenario_info\n if e.is_jump_beacon and e.id is not None\n }\n\n jump_beacon_location = [\n world.pos\n for world in context.space_objects.values()\n if isinstance(world, OwnedWorld)\n and any(\n anacreonlib.utils.world_has_trait(\n context.game_info.scenario_info, world, trait_id\n )\n for trait_id in jump_beacon_trait_ids\n )\n ]\n\n return [\n world\n for world in context.space_objects.values()\n if isinstance(world, World)\n and world.sovereign_id == 1 # Is a sovereign world\n and any(\n utils.dist(world.pos, jump_beacon_pos) <= 250\n for jump_beacon_pos in jump_beacon_location\n ) # Is in distance\n ]", "def _setOceanLocation(self):\r\n\t\t## If the fluids_hrc exists\r\n\t\tif cmds.objExists('fluids_hrc'):\r\n\t\t\tif cmds.objExists('ocean_srf'):\r\n\t\t\t\tcmds.connectAttr('fluids_hrc.translateX', 'ocean_srf.translateX', f = True)\r\n\t\t\t\tcmds.connectAttr('fluids_hrc.translateZ', 'ocean_srf.translateZ', f = True)\r\n\t\t\telse:\r\n\t\t\t\tcmds.warning('MISSING ocean_srf node from scene....')\r\n\r\n\t\t\tif cmds.objExists('oceanPreviewPlane_prv'):\r\n\t\t\t\tcmds.connectAttr('fluids_hrc.translateX', 'oceanPreviewPlane_prv.translateX', f = True)\r\n\t\t\t\tcmds.connectAttr('fluids_hrc.translateZ', 'oceanPreviewPlane_prv.translateZ', f = True)\r\n\t\t\telse:\r\n\t\t\t\tcmds.warning('MISSING oceanPreviewPlane_prv node from scene....')\r\n\t\telse:\r\n\t\t\tcmds.warning('NO fluids_hrc FOUND! Can not move the ocean into final position. PLEASE CHECK FX PUBLISH NOW!')", "def set_home_locations(self):\n self.swarmie.set_home_gps_location(self.swarmie.get_gps_location())\n\n current_location = self.swarmie.get_odom_location()\n current_pose = current_location.get_pose()\n home_odom = Location(current_location.Odometry)\n\n detections = self.swarmie.get_latest_targets().detections\n try:\n for detection in detections:\n if detection.id == 256:\n see_home_tag = True\n home_detection = self._transform_to_odom(detection)\n\n quat = [home_detection.pose.orientation.x,\n home_detection.pose.orientation.y,\n home_detection.pose.orientation.z,\n home_detection.pose.orientation.w]\n _r, _p, yaw = tf.transformations.euler_from_quaternion(\n quat\n )\n yaw += math.pi / 2\n\n home_odom.Odometry.pose.pose.position.x = float(\n home_detection.pose.position.x + 0.5 * math.cos(yaw)\n )\n home_odom.Odometry.pose.pose.position.y = float(\n home_detection.pose.position.y + 0.5 * math.sin(yaw)\n )\n self.swarmie.set_home_odom_location(home_odom)\n return\n\n except tf.Exception:\n pass # use backup below\n\n # project home_odom location 50cm in front of rover's current location\n home_odom.Odometry.pose.pose.position.x = (\n current_pose.x + 0.5 * math.cos(current_pose.theta)\n )\n home_odom.Odometry.pose.pose.position.y = (\n current_pose.y + 0.5 * math.sin(current_pose.theta)\n )\n self.swarmie.set_home_odom_location(home_odom)\n return", "def generate_map(self):\n\n # Create main streets first\n self.create_main_streets()\n\n # Then create the commercial buildings in the center of town\n self.create_commercial_center()\n\n # Then create the neighborhoods that populate the rest of the city\n while(self.create_neighborhood()):\n pass\n\n # Clean up any invalid buildings that were created\n self.delete_inaccessible_buildings()", "def check_location(position: tuple,\n all_sites: np.matrix,\n busy_locations: List[tuple]) -> List[tuple]:\n N, M = all_sites.shape\n potential_sites = [(position[0], position[1] + 1),\n (position[0], position[1] - 1),\n (position[0] + 1, position[1]),\n (position[0] - 1, position[1])]\n potential_sites = [(site[0] % N, site[1] % M) for site in potential_sites]\n valid_sites = [site for site in potential_sites if site not in busy_locations]\n return valid_sites", "def make_boundaries(self):\n p = self.project\n c = p[0]\n outlet = p.NewOutlet('GW', c.x, c.y, c.z - c.soildepth)\n cmf.FreeDrainagePercolation(c.layers[-1], outlet)\n rainfall = cmf.timeseries.from_sequence(self.starttime, cmf.day, [25, 0, 0, 0, 0, 0, 0] * 200)\n p.rainfall_stations.add('Heavy rain once a week', rainfall, (0, 0, 0))\n print(cmf.describe(p.rainfall_stations))\n p.use_nearest_rainfall()\n\n return outlet", "def createWarpGateSims(self):\n # create warpgates\n import anwp.sims\n self.warpgateSims = []\n for systemID, systemDict in self.game.allSystems.iteritems():\n systemEmpireDict = self.game.allEmpires[systemDict['myEmpireID']]\n if systemDict['myEmpireID'] == self.game.myEmpireID or anwp.func.globals.diplomacy[self.game.myEmpire['diplomacy'][systemDict['myEmpireID']]['diplomacyID']]['trade'] == 1:\n # look for warp gates\n for indID, myIndustryDict in systemDict['myIndustry'].iteritems():\n myIndustryDataDict = self.game.industrydata[myIndustryDict['industrytype']]\n if myIndustryDataDict['abr'][1:] == 'WG':\n # warp gate industry found, create sim and exit\n imageFileName = '%swarpgate_%s_%s.png' % (self.game.app.simImagePath, systemEmpireDict['color1'], systemEmpireDict['color2'])\n sim = WarpGateEntity(self, anwp.sims.categories.StaticCategory(imageFileName, 'warpgate'))\n \n # add sim to world\n self.warpgateSims.append(sim)\n x = systemDict['x']-65\n y = systemDict['y']-42\n facing = 0\n speed = 0\n sim.turnRate = 0\n force = 1\n self.world.addToWorld(sim, x, y, facing, speed, force)\n break", "def create_neighborhood(self):\n if len(self.available_building_cells) == 0:\n return False\n # Pick cell\n shuffle(self.available_building_cells)\n\n neighborhood_origin = self.available_building_cells[0]\n if not self.creates_valid_building(neighborhood_origin):\n # If not a valid placement, remove location from list\n self.available_building_cells.remove(neighborhood_origin)\n # Retry!\n self.create_neighborhood()\n return True # Exit after neighborhood is created\n\n final_cells = [neighborhood_origin]\n self.available_building_cells.remove(neighborhood_origin)\n\n # Place building on origin\n self.place_building(Building(self.environment, self.environment.next_building_id, neighborhood_origin, attractiveness=random()))\n neighborhood_cells = self.environment.grid.get_neighborhood(neighborhood_origin, moore=True, include_center=True)\n\n # Create a random number of residence buildings in this neighborhood\n number_of_residences = randrange(2,6)\n for i in range(number_of_residences):\n while len(neighborhood_cells) > 0:\n shuffle(neighborhood_cells)\n # Only place building if space is empty\n if self.environment.grid.is_cell_empty(neighborhood_cells[0]):\n self.place_building(Building(self.environment, self.environment.next_building_id, neighborhood_cells[0], attractiveness=random()))\n final_cells.append(neighborhood_cells[0])\n try:\n # If this space was available before, remove it from list\n self.available_building_cells.remove(neighborhood_cells[0])\n except:\n pass\n\n continue\n\n # Remove cell from list\n neighborhood_cells.remove(neighborhood_cells[0])\n\n # Fill surrounding space around buildings with roads!\n for building_location in final_cells:\n for surrounding_cell in self.environment.grid.get_neighborhood(building_location, moore=True):\n if self.environment.grid.is_cell_empty(surrounding_cell):\n self.place_road(Road(surrounding_cell))\n\n return True", "def _populate_placements(self, preferred):\n # local copy of placement energies\n self._placements = deepcopy(placements)\n \n # top right is the Imhof-approved default\n if preferred == 'top right' or not preferred:\n return\n \n # bump up the cost of every placement artificially to leave room for new preferences\n self._placements = dict([ (key, .4 + v*.6) for (key, v) in self._placements.items() ])\n \n if preferred == 'top':\n self.placement = N\n self._placements.update({ N: .0, NNW: .3, NNE: .3 })\n \n elif preferred == 'top left':\n self.placement = NW\n self._placements.update({ NW: .0, WNW: .1, NNW: .1 })\n \n elif preferred == 'bottom':\n self.placement = S\n self._placements.update({ S: .0, SSW: .3, SSE: .3 })\n \n elif preferred == 'bottom right':\n self.placement = SE\n self._placements.update({ SE: .0, ESE: .1, SSE: .1 })\n \n elif preferred == 'bottom left':\n self.placement = SW\n self._placements.update({ SW: .0, WSW: .1, SSW: .1 })\n \n else:\n raise Exception('Unknown preferred placement \"%s\"' % preferred)", "def __find_all_moves(self, tower) -> list:\r\n choice = []\r\n for height in range(1,len(tower.tower)-2):\r\n for index in range(1,4):\r\n if self.stat_brain.is_valid(height, index, tower):\r\n choice.append((height, index))\r\n \r\n r.shuffle(choice)\r\n return choice", "def run(self, locations):\n return locations", "def update_players_locations(self):\n self.loc = self.find_value(1)\n self.opponent_loc = self.find_value(2)", "def testNSELocations(self):\n t = CoalescenceTree(self.c2)\n t.set_speciation_parameters(speciation_rates=[0.6, 0.7], record_spatial=True, record_fragments=False)\n t.apply()\n locations = t.get_species_locations()\n for row in locations:\n self.assertEqual(0, row[1])\n self.assertEqual(0, row[2])", "def scheduleNight(self, nightNum):\n # decide which way to point tonight\n NCoverage = self.NVisitsComplete / utils.areaInDir(NORTH)\n SCoverage = self.SVisitsComplete / utils.areaInDir(SOUTH)\n ECoverage = self.EVisitsComplete / utils.areaInDir(EAST)\n SECoverage = ((self.SVisitsComplete + self.EVisitsComplete) /\n utils.areaInDir(SOUTHEAST))\n\n if NCoverage < SECoverage:\n self.nightDirection = NORTH\n else:\n self.nightDirection = SOUTHEAST\n\n # reset the slew times array\n self.tonightsSlewTimes = []\n prevAlt = prevAz = None\n prevFilter = self.telescope.filters[0]\n\n # return each visit prescribed by tonight's NightScheduler\n self.nightScheduler = NightScheduler(self.telescope, nightNum,\n self.nightDirection, self.makeupVPs)\n prevTime = None\n for visit in self.nightScheduler.schedule():\n time = self.context.time()\n alt, az = sky.radec2altaz(visit.ra, visit.dec, self.context.time())\n if alt < self.telescope.minAlt:\n # East is +pi/2, so if the field has az < pi, it is rising\n # and if az > pi then setting\n if az >= np.pi:\n # this field is setting, so skip it\n continue\n else:\n # this field is rising, so wait a while until it's\n # visible\n while alt < self.telescope.minAlt:\n # if we yield None the simulator (or the world) will\n # progress time for us\n yield None\n alt, az = sky.radec2altaz(visit.ra, visit.dec,\n self.context.time())\n prevAlt = prevAz = None\n if prevAlt is not None:\n # Don't change laxDome param without changing in Simulator too\n slewTime = self.telescope.calcSlewTime(prevAlt, prevAz, prevFilter,\n alt, az, visit.filter,\n laxDome = config.laxDome)\n self.tonightsSlewTimes.append(slewTime)\n prevAlt = alt\n prevAz = az\n prevFilter = visit.filter\n prevTime = time\n yield visit", "def make_move(self, tower):\r\n height, index = self.__find_random_moves(tower)\r\n \r\n if self.stat_brain.all_valid(tower) == 0 or self.stat_brain.is_valid(height, index, tower):\r\n return height, index\r\n else:\r\n while not self.stat_brain.is_valid(height, index, tower):\r\n height, index = self.__find_random_moves(tower)\r\n \r\n return height, index", "def get_moves(self):\n grid = self.model.grid\n # List of agents we can't overlap with\n no_overlap = [\"wall\", \"human\", \"zombie\"]\n\n if self.agent_type == \"zombie\" or \\\n (\"AvoidingZombie\" not in self.states and os.environ[\"mode\"] == \"5\"):\n no_overlap.append(\"road\")\n\n # Always give the option to stay on your current location(stand still)\n all_cells = self.neighbors()\n free_cells = [self.pos]\n\n # Get rid of cells that we may not move to by iterating through all\n # cells next to the agent, and only adding non-occupied cells\n for cell in all_cells:\n cell_occupied = False\n x, y = cell.pos\n # If there are agents in the current cell, and we are not allowed\n # to overlap with any of those agents, the cell is occupied.\n # Only add cells which are not occupied.\n if not grid.is_cell_empty((x, y)):\n for agent in grid[x][y]:\n if agent.agent_type in no_overlap:\n cell_occupied = True\n break\n if not cell_occupied:\n free_cells.append((x, y))\n return free_cells", "def setNeighbors(self):\n for cellIndex in range(len(self.cells)):\n cell = self.cells[cellIndex]\n\n #Checks the 8 cells around the living one. \n for neighborsX in range(cell.x - 1, cell.x + 2):\n for neighborsY in range(cell.y - 1, cell.y + 2):\n\n #If the position is outside the world, loop around.\n neighborsX = neighborsX % self.screen.worldSize\n neighborsY = neighborsY % self.screen.worldSize\n\n #Skipping itself. Becouse we do not want to calculate itself as a neighbor\n if(neighborsX == cell.x and neighborsY == cell.y):\n continue\n else:\n #Checks if a cell exist at neighborsX, neighborsY\n cellToCheck = self.getCellFromPosition(neighborsX, neighborsY)\n if(cellToCheck != False):\n #Add one to the neighbor var if there already exist and cell for the given position.\n cellToCheck.numOfNeighbor += 1\n else:\n #Creates a new cell if it do not exist any.\n newCell = Cell(self.screen, neighborsX, neighborsY, True)\n newCell.numOfNeighbor += 1\n self.cells.append(newCell)", "def find_near_location():\n return render_template(\"location.html\", latitude=None, longitude=None,\n list_stops=None)" ]
[ "0.58460766", "0.57772356", "0.5284448", "0.52179706", "0.5182957", "0.5147743", "0.5096863", "0.50870997", "0.50867623", "0.5045977", "0.502303", "0.49666467", "0.49650604", "0.49317968", "0.49126053", "0.49000195", "0.48980615", "0.48955286", "0.4892086", "0.48821265", "0.4865679", "0.48635742", "0.48565167", "0.4855526", "0.48496827", "0.48390162", "0.48290113", "0.48195168", "0.48092422", "0.4801244" ]
0.79918706
0
return True if r1 r2 line of sight is obstrucetd; oherwise False
def check_obstructed(r1,r2): if r1==r2: return False #Densely sample line connecting r1 and r2. #If any of those sampled points is inside the rectangle, then the #line of sight intersects the rectangle and the tower's view is #obstructed. NP = 1000 sampled_x = np.linspace(r1[0],r2[0],NP) sampled_y = np.linspace(r1[1],r2[1],NP) for x,y,w,h in self.coordinates__obstacles: for pt in xrange(NP): if (sampled_x[pt] > x) and (sampled_x[pt] < x+w) and \ (sampled_y[pt] > y) and (sampled_y[pt] < y+h): return True return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_ok_two_lines(line1, line2):\n card1 = line1[0]\n card2 = line1[1]\n card3 = line1[2]\n card4 = line2[0]\n card5 = line2[1]\n card6 = line2[2]\n idents1 = [card.ident for card in line1]\n idents2 = [card.ident for card in line2]\n intersection = list(set(idents1) & set(idents2))\n if intersection:\n return False\n if not is_coupled(card1.south, card4.north):\n return False\n if not is_coupled(card2.south, card5.north):\n return False\n if not is_coupled(card3.south, card6.north):\n return False\n return True", "def intersects(self, other_line):\n intpt= self.intersection(other_line)\n return bool(intpt)", "def is_interfered(r, c, s1, s2):\n return r == 0 and c < s1 + s2 or r < s2 and c < s1", "def has_crossing_len2_ob(self) -> bool:\n fcell = self.first_cell\n scell = self.second_cell\n if self._fuse_row:\n possible_obs = [\n GriddedPerm((0, 1), (fcell, scell)),\n GriddedPerm((1, 0), (scell, fcell)),\n ]\n else:\n possible_obs = [\n GriddedPerm((0, 1), (fcell, scell)),\n GriddedPerm((1, 0), (fcell, scell)),\n ]\n return any(ob in possible_obs for ob in self._tiling.obstructions)", "def is_line_on_line(feature_1: Sequence, feature_2: Sequence) -> bool:\n\n line_on_line = False\n\n for coords in feature_1:\n\n line_on_line = boolean_point_on_line(coords, feature_2)\n if not line_on_line:\n break\n\n return line_on_line", "def circles_overlapping(x1, y1, x2, y2, r):\n # print(abs((x2-x1)**2 + (y2-y1)**2))\n # print((2*r)**2)\n if (abs((x2-x1)**2 + (y2-y1)**2) > (2*r)**2):\n return False\n else: return True", "def is_on_line(p0, p1, p2, threshold = 0.01):\n p0, p1, p2 = map(lambda tup : np.array(tup[:2]), [p0, p1, p2])\n p1 -= p0\n p2 -= p0\n return abs((p1[0] / p1[1]) - (p2[0] / p2[1])) < threshold", "def is_occluding(p1: np.ndarray, r1: float, p2: np.ndarray, r2: float):\n d1, d2 = np.linalg.norm(p1), np.linalg.norm(p2) # compute distances\n u1, u2 = p1 / d1, p2 / d2 # project to unit circle\n rs1, rs2 = r1 / d1, r2 / d2 # scale radii by distance\n d = np.linalg.norm(u1 - u2) # compute distance between projected points\n return d < rs1 + rs2 and (d1 - r1) <= (d2 - r2)", "def YRoadConnect(data, x1, y1, x2, y2):\n flag = True\n if not x1 == x2:\n return False\n y_start = min(y1, y2)\n y_end = max(y1, y2)\n for i in range(y_start + 1, y_end):\n if not data[i][x1] == 0:\n flag = False\n break\n return flag", "def rOverlap (x1, y1, w1, h1, x2, y2, w2, h2):\n if x1<=x2<=(x1+w1) or y1<=y2<=(y1+h1):\n return True\n elif x1<=(x2+w2)<=(x1+w1):\n return True\n else:\n return False", "def is_over(self, state: StonehengeState) -> bool:\n total_result = state.hori_result + state.left_result + state.right_result\n total_line = len(total_result)\n p1_taken = 0\n p2_taken = 0\n # all_taken = True\n for item in total_result:\n if item == '1':\n p1_taken+=1\n elif item =='2':\n p2_taken += 1\n # else:\n # all_taken = False\n # print('p1 taken:' + str(p1_taken))\n # print('p2 taken:' + str(p2_taken))\n # print('p1_taken more than half?')\n # print(float(p1_taken) >= total_line/2)\n # print('p2_taken more than half?')\n # print(float(p2_taken) >= total_line/2)\n return float(p1_taken) >= total_line/2 or float(p2_taken) >= total_line/2", "def hasTwoSons(self):\n \n return self._leftSon is not None and self._rightSon is not None", "def _overlapping(self, atom1, atom2):\n\n if np.linalg.norm(atom1.pos-atom2.pos) < (atom1.rad+atom2.rad):\n return True\n else:\n return False", "def in_sight(x1, y1, x2, y2, area):\n # skip itself\n if x1 == x2 and y1 == y2:\n return False\n\n # go to the coordinate\n x_d, y_d = x2 - x1, y2 - y1\n multiple = gcd(x_d, y_d)\n\n x_step, y_step = x_d // multiple, y_d // multiple\n\n x1 += x_step\n y1 += y_step\n\n # jump to x2, y2 until we hit something\n while x1 != x2 or y1 != y2:\n if area[y1][x1] == \"#\":\n return False\n\n x1 += x_step\n y1 += y_step\n\n # if we didn't hit something, the position is valid!\n return True", "def is_equivalence(self) -> bool:", "def is_red(ab, sp):\n lya_em = (1. + sp.zem) * 1216.\n z = ab.z\n for line in ab.get_lines():\n if line.get_obs(z) > lya_em and line.get_obs(z) < sp.HI_sp.waves[-1]:\n return True\n return False", "def is_overlap(line1, line2):\n if line1.p1 < line2.p1 and line1.p2 <= line2.p1:\n # if line 1 lies to the left of line 2 return false.\n return False\n elif line1.p1 < line2.p1 <= line1.p2:\n # if first coordinate of line2 lies between the points of line1 then return true.\n return True\n elif line2.p1 <= line1.p1 and line2.p2 <= line1.p1:\n # if line2 lies to the left of line1 return false.\n return False\n elif line2.p1 <= line1.p1 <= line2.p2:\n # if first coordinate of line1 lies between the points of line2 then return true.\n return True", "def do_overlap(r1, r2):\n r1_s, r1_e = r1\n r2_s, r2_e = r2\n\n return r1_s <= r2_s <= r1_e or r2_s <= r1_s <= r2_e", "def ate_itself(self):\r\n ate_flag = False\r\n\r\n for i in self.body:\r\n if self.head[0] + self.direction[0]*10 == i[0] and self.head[1] + self.direction[1]*10 == i[1]:\r\n ate_flag = True\r\n\r\n return ate_flag", "def crosses(self, other): # -> bool:\n ...", "def is_in_collision_line(self, a, b):\n return abs((b[0]-a[0])*self.x + (a[1]-b[1])*self.y + (a[0]-b[0])*b[1] + (b[1]-a[1])*a[0]) /\\\n sqrt((b[0]-b[1])**2 + (a[1]-b[1])**2 + 0.0000001)< self.r", "def lines_intersect(x1, y1, x2, y2, a1, b1, a2, b2):\n\n\t# Ensures that x1 < x2 \n\t(x1, x2, y1, y2) = (x1, x2, y1, y2) if x1 < x2 else (x2, x1, y2, y1) \n\t(a1, a2, b1, b2) = (a1, a2, b1, b2) if a1 < a2 else (a2, a1, b2, b1) \n\t\n\t# Make lines same domain\n\tif x1 > a1:\n\t\tif x1 > a2 or a1 == a2:\n\t\t\treturn False \n\n\t\ta = x1 \n\telse:\n\t\tif a1 > x2 or x1 == x2:\n\t\t\treturn False\n\t\t\n\t\ta = a1 \n\n\tif x2 < a2:\n\t\tif x2 < a1 or a1 == a2:\n\t\t\treturn False \n\n\t\tb = x2\n\telse:\n\t\tif a2 < x1 or x1 == x2:\n\t\t\treturn False \n\n\t\tb = a2\n\n\tif x1 != x2:\n\t\tx1, y1, x2, y2 = trim_line(x1, y1, x2, y2, a, b)\n\tif a1 != a2:\n\t\ta1, b1, a2, b2 = trim_line(a1, b1, a2, b2, a, b)\n\n\t\n\treturn (y1 >= b1 and y2 <= b2) or (y1 <= b1 and y2 >= b2)", "def is_ringing(self) -> bool:", "def is_ate(self, snake_x, snake_y):\n if snake_x == self.x and snake_y == self.y:\n return True", "def test_does_intersect() -> None:\n line_1 = Line(k=1, n=0)\n line_2 = Line(k=2.5, n=1)\n line_3 = Line(k=2.5, n=3)\n\n assert line_1.does_intersect(line_1) == True\n assert line_1.does_intersect(line_2) == True\n assert line_2.does_intersect(line_3) == False", "def is_cr(self, y, t):\n return t == 0 and y != 0", "def XRoadConnect(data, x1, y1, x2, y2):\n flag = True\n if not y1 == y2:\n return False\n x_start = min(x1, x2)\n x_end = max(x1, x2)\n for i in range(x_start + 1, x_end):\n if not data[y1][i] == 0:\n flag = False\n break\n return flag", "def intersect_or_on(s1, s2, c1, c2):\n den = float( (c2.y - c1.y) * (s2.x - s1.x) - (c2.x - c1.x) * (s2.y - s1.y) )\n if not den:\n return None\n\n us = ((c2.x - c1.x) * (s1.y - c1.y) - (c2.y - c1.y) * (s1.x - c1.x)) / den\n uc = ((s2.x - s1.x) * (s1.y - c1.y) - (s2.y - s1.y) * (s1.x - c1.x)) / den\n\n if (0 <= us <= 1) and (0 <= uc <= 1):\n #subj and clip line intersect eachother somewhere in the middle\n #this includes the possibility of degenerates (edge intersections)\n x = s1.x + us * (s2.x - s1.x)\n y = s1.y + us * (s2.y - s1.y)\n return (x, y), us, uc\n else:\n return None", "def __le__(self, other):\n return self.x ** 2 + self.y ** 2 <= other.x ** 2 + other.y ** 2", "def is_cue_line(point1, point2, image):\n if point1[0] <= point2[0]:\n pointL, pointR = point1, point2\n else:\n pointL, pointR = point2, point1\n deltaY = pointR[1] - pointL[1]\n deltaX = pointR[0] - pointL[0]\n if deltaX != 0:\n for x in range(pointL[0], pointR[0] + 1):\n dx = x - pointL[0]\n dy = dx * deltaY/deltaX\n y = pointL[1] + dy\n if not is_cue_color(image.getpixel((x,y))):\n return False\n else:\n up = min(point1[1], point2[1])\n down = max(point1[1], point2[1])\n x = point1[0]\n for y in range(up, down + 1):\n if not is_cue_color(image.getpixel((x, y))):\n return False\n\n return True" ]
[ "0.6848249", "0.64294106", "0.63830334", "0.6372023", "0.6302667", "0.6286635", "0.627175", "0.62661767", "0.62325734", "0.6230347", "0.61978024", "0.6182168", "0.6139048", "0.6096409", "0.60867286", "0.6068303", "0.60645324", "0.60228837", "0.60018134", "0.5998949", "0.599368", "0.59920514", "0.5983781", "0.59797925", "0.59748274", "0.59709096", "0.5967944", "0.5957216", "0.59541464", "0.59540576" ]
0.7739377
0
Visualize the map environment and solved tower locations. env_state = 'solved', 'initial'
def visualize_environment(self,env_state): fig=plt.figure(figsize=self.figsize) ax=plt.subplot(111) #Plot the targets plt.plot([i[0] for i in self.coordinates__targets],\ [i[1] for i in self.coordinates__targets],\ marker='x',markersize=15,linestyle='None',color='k',label='Target') plot_target_values = True if plot_target_values: for i ,t in enumerate(self.coordinates__targets): plt.text(t[0],t[1],self.target_values[i]) #Plot the towers tower_colors = ['r','b','g'] for tk in xrange(self.N_tower_kinds): plt.plot([i[0] for i in self.coordinates__tower_sites[tk]],\ [i[1] for i in self.coordinates__tower_sites[tk]],\ marker='o',markersize=10,linestyle='None',color=tower_colors[tk],alpha=.5,label='Tower {} Sites'.format(tk+1)) if env_state == 'solved': for tk in xrange(self.N_tower_kinds): plt.plot([i[0] for i in self.coordinates__solved_towers[tk]],\ [i[1] for i in self.coordinates__solved_towers[tk]],\ marker='^',markersize=20,linestyle='None',color=tower_colors[tk],label='Tower {} Placed'.format(tk+1)) for x,y,w,h in self.coordinates__obstacles: r = plt.Rectangle((x,y),w,h,fc='c') ax.add_patch(r) plt.xlim(0,self.map_dimensions[1]) plt.ylim(0,self.map_dimensions[0]) plt.legend(numpoints=1,loc='best') savename = 'SolvedMap.png' if env_state == 'solved' else 'InitialMap.png' plt.savefig(savename)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def visualize_world(self, brain):\n state_str = ' || '.join([str(self.sensors),\n str(self.actions),\n str(self.reward),\n str(self.size),\n str(self.color),\n str(self.timestep)])\n print(state_str)", "def run_scenario(self):\n self.initialize_random_map()\n self.visualize_environment('initial')\n self.get_tower_target_coverages()\n self.solve_environment()\n self.visualize_environment('solved')", "def show_env(self, img):\n plt.figure(1)\n plt.subplot(111)\n plt.imshow(img, interpolation=\"nearest\")\n plt.show()", "def plot_envs_opt_vs_gym():\n for i in range(len(envs)):\n env = envs[i]\n vel_new = get_log_key(fdir + envs[i] + '-v1/VRPG/', string='fvel_avg', plot=False)\n vel_old = get_log_key(fdir + envs[i] + '-v1/VRPG_old/', string='fvel_avg', plot=False)\n print(len(vel_new), len(vel_old))\n fig = plot_trials(vel_new, fdir + envs[i] + '-v1/', f=lambda it: vel_new[it], name='Hopper_opt', shape='-s',\n step=20)\n plt.ylabel(envs[i] + ' Cumulative reward $R_{iter}$')\n fig = plot_trials(vel_old, fdir + envs[i] + '-v1/', f=lambda it: vel_old[it], name='Hopper_Gym', fig=fig,\n shape='-o', step=20)\n return", "def show_map(pdb,show_sticks_all=False, show_sticks_metalbinding=True, show_probes=True, show_pdb_metals=True):\n view=py3Dmol.view(width=1000, height=800)\n\n view.addModel(open(pdb+'.pdb', 'r').read(),'pdb')\n if show_probes:\n view.addModel(open(pdb+'_PredictedSites.xyz', 'r').read(),'xyz')\n probes = open(pdb+'_PredictedSites.xyz', 'r').readlines()\n if(int(probes[0])!=0):\n probabilities = [p.replace('#','').split()[-1] for p in probes[2:]] # read p from comment in xyz file\n colors = {}\n # use different colors for the probabilities\n for i,x in enumerate(probabilities):\n colors[i] = '#%02x%02x%02x' % (0, 128, int(float(x)/float(probabilities[0])*255))\n else: #no predicted site\n colors = [] \n view.addLabel(\"No probe predicted\", {'position': {'x':0, 'y':0, 'z':0}, 'backgroundColor': '#0080FF', 'fontColor': 'white'});\n \n view.zoomTo()\n view.setBackgroundColor('white')\n view.setStyle({},{'cartoon': {'color':'gray'}})\n if show_sticks_all:\n view.setStyle({}, {'stick':{},'cartoon': {'color':'gray'}})\n if show_pdb_metals:\n view.getModel(0).setStyle({'resn':\"ZN\"},{'sphere': {'opacity':.75}})\n view.getModel(0).setStyle({'resn':\"CA\"},{'sphere': {'opacity':.75}})\n view.getModel(0).setStyle({'resn':\"CU\"},{'sphere': {'opacity':.75}})\n view.getModel(0).setStyle({'resn':\"HG\"},{'sphere': {'opacity':.75}})\n view.getModel(0).setStyle({'resn':\"MG\"},{'sphere': {'opacity':.75}})\n view.getModel(0).setStyle({'resn':\"FE\"},{'sphere': {'opacity':.75}})\n view.getModel(0).setStyle({'resn':\"MN\"},{'sphere': {'opacity':.75}})\n view.getModel(0).setStyle({'resn':\"NI\"},{'sphere': {'opacity':.75}})\n view.getModel(0).setStyle({'resn':\"MB\"},{'sphere': {'opacity':.75}})\n \n if show_probes:\n view.getModel(1).setStyle({},{'sphere': {'colorscheme':{'prop':'index', 'map':colors}}})\n \n # add hoverable labels for the residues and the predicted metals\n # two callbacks are needed, one for the residues and one for the metals\n # the metal one displays the probability\n view.getModel(0).setHoverable({},True,'''function(atom,viewer,event,container) {\n if(!atom.label) {\n atom.label = viewer.addLabel(atom.resn+atom.resi+\":\"+atom.atom,{position: atom, backgroundColor: 'mintcream', fontColor:'black'});\n }}''',\n '''function(atom,viewer) { \n if(atom.label) {\n viewer.removeLabel(atom.label);\n delete atom.label;\n }\n }''')\n view.getModel(1).setHoverable({},True,'''function(atom,viewer,event,container) {\n if(!atom.label) {\n atom.label = viewer.addLabel(atom.atom+\" [\"+atom.serial+\"]\",{position: atom, backgroundColor: 'mintcream', fontColor:'black'});\n }}''',\n '''function(atom,viewer) { \n if(atom.label) {\n viewer.removeLabel(atom.label);\n delete atom.label;\n }\n }''')\n if show_sticks_metalbinding:\n view.setStyle({'resn':\"HIS\"},{'stick': {}, 'cartoon': {'color':'gray'}})\n view.setStyle({'resn':\"ASP\"},{'stick': {}, 'cartoon': {'color':'gray'}})\n view.setStyle({'resn':\"GLU\"},{'stick': {}, 'cartoon': {'color':'gray'}})\n view.setStyle({'resn':\"CYS\"},{'stick': {}, 'cartoon': {'color':'gray'}})\n\n return view.show()", "def show_map(self):\n self.m1.display()", "def display(self):\n map_show = self.map.copy()\n map_show[self.currY, self.currX] = 4 \n print(map_show)", "def _plot_map(self):\n\n # Plot points if they exist\n\n if len(self._laserX) > 0:\n self._plot_laser()\n\n if len(self._goalX) > 0:\n self._plot_goal()\n\n if len(self._summitX) > 0:\n self._plot_summit()\n\n self._plot_objects()\n\n # Update Plot\n self._fig.canvas.draw_idle()\n\n plt.pause(0.01)", "def visualize_routes(self):\n visualize_tsp.plotTSP([self.best_solution], self.coords)", "def example_3():\n\n # maze = klyubin_world()\n maze = mazeworld.door_world()\n emptymaze = MazeWorld(maze.height, maze.width)\n # maze = mazeworld.tunnel_world()\n n_step = 3\n start = time.time()\n initpos = np.random.randint(maze.dims[0], size=2)\n initpos = [1,4]\n s = maze._cell_to_index(initpos)\n T = emptymaze.compute_model()\n B = maze.compute_model()\n E = maze.compute_empowerment(n_step = n_step).reshape(-1)\n n_s, n_a, _ = T.shape\n agent = EmpowermentMaximiser(alpha=0.1, gamma=0.9, T = T, n_step=n_step, n_samples=1000, det=1.)\n steps = int(10000) \n visited = np.zeros(maze.dims)\n tau = np.zeros(steps)\n D_emp = np.zeros(steps)\n D_mod = n_s*n_a*np.ones(steps)\n for t in range(steps):\n # append data for plotting \n tau[t] = agent.tau\n D_emp[t] = np.mean((E - agent.E)**2)\n D_mod[t] = D_mod[t] - np.sum(np.argmax(agent.T, axis=0) == np.argmax(B, axis=0))\n a = agent.act(s)\n pos = maze._index_to_cell(s)\n visited[pos[0],pos[1]] += 1\n s_ = maze.act(s,list(maze.actions.keys())[a])\n agent.update(s,a,s_)\n s = s_\n print(\"elapsed seconds: %0.3f\" % (time.time() - start) )\n plt.figure(1)\n plt.title(\"value map\")\n Vmap = np.max(agent.Q, axis=1).reshape(*maze.dims)\n maze.plot(colorMap= Vmap )\n plt.figure(2)\n plt.title(\"subjective empowerment\")\n maze.plot(colorMap= agent.E.reshape(*maze.dims))\n plt.figure(3)\n plt.title(\"tau\")\n plt.plot(tau)\n plt.figure(4)\n plt.scatter(agent.E, visited.reshape(n_s))\n plt.xlabel('true empowerment')\n plt.ylabel('visit frequency')\n plt.figure(5)\n plt.title(\"visited\")\n maze.plot(colorMap=visited.reshape(*maze.dims))\n fig, ax1 = plt.subplots()\n red = 'tab:red'\n ax1.set_xlabel('time')\n ax1.set_ylabel('MSE of empowerment map', color=red)\n ax1.plot(D_emp, color=red)\n ax1.tick_params(axis='y', labelcolor=red)\n ax2 = ax1.twinx() \n ax2.set_ylabel('Model disagreement', color='tab:blue') \n ax2.plot(D_mod, color='tab:blue')\n ax2.tick_params(axis='y', labelcolor='tab:blue')\n plt.show()", "def example_2(): \n maze = MazeWorld(8,8)\n for i in range(maze.width):\n if i is not 6 : maze.add_wall([2, i], \"N\") \n for i in range(maze.width):\n if i is not 2 : maze.add_wall([5, i], \"N\")\n n_step = 4\n E = maze.compute_empowerment(n_step=n_step, n_samples=8000)\n maze.plot(colorMap=E)\n plt.title('%i-step empowerment' % n_step)\n plt.show()", "def view_map():\n print(\"\"\"\n ____________________________________Client Rooms______________________\n| |1 Locker Rooms 2| 1 | 2 | |\n| |_________ ________| | | Dance |\n| | | |__| |__| Floor |\n| | | Hall |\n| Garage | Front | _______|_______ |\n| | Lobby | | |_ ____________|\n| | | | Storage |\n| | | Lounge |______________|\n| ______________ Car\n|___________________Front Entrance______________________| Allyway\n\"\"\")", "def drought_env_risk_map(request):\n \n view_center = [-105.2, 39.0]\n view_options = MVView(\n projection='EPSG:4326',\n center=view_center,\n zoom=7.0,\n maxZoom=12,\n minZoom=5\n )\n\n # TIGER state/county mapserver\n tiger_boundaries = MVLayer(\n source='TileArcGISRest',\n options={'url': 'https://tigerweb.geo.census.gov/arcgis/rest/services/TIGERweb/State_County/MapServer'},\n legend_title='States & Counties',\n layer_options={'visible':True,'opacity':0.2},\n legend_extent=[-112, 36.3, -98.5, 41.66]) \n \n ##### WMS Layers - Ryan\n usdm_legend = MVLegendImageClass(value='Drought Category',\n image_url='http://ndmc-001.unl.edu:8080/cgi-bin/mapserv.exe?map=/ms4w/apps/usdm/service/usdm_current_wms.map&version=1.3.0&service=WMS&request=GetLegendGraphic&sld_version=1.1.0&layer=usdm_current&format=image/png&STYLE=default')\n usdm_current = MVLayer(\n source='ImageWMS',\n options={'url': 'http://ndmc-001.unl.edu:8080/cgi-bin/mapserv.exe?',\n 'params': {'LAYERS':'usdm_current','FORMAT':'image/png','VERSION':'1.1.1','STYLES':'default','MAP':'/ms4w/apps/usdm/service/usdm_current_wms.map'}},\n layer_options={'visible':False,'opacity':0.25},\n legend_title='USDM',\n legend_classes=[usdm_legend],\n legend_extent=[-126, 24.5, -66.2, 49])\n \n # USGS Rest server for HUC watersheds \n watersheds = MVLayer(\n source='TileArcGISRest',\n options={'url': 'https://hydro.nationalmap.gov/arcgis/rest/services/wbd/MapServer'},\n legend_title='HUC Watersheds',\n layer_options={'visible':False,'opacity':0.4},\n legend_extent=[-112, 36.3, -98.5, 41.66])\n \n # Sector drought vulnerability county risk score maps -> from 2018 CO Drought Plan update\n vuln_legend = MVLegendImageClass(value='Risk Score',\n image_url='/static/tethys_gizmos/data/ag_vuln_legend.jpg')\n environ_vuln_kml = MVLayer(\n source='KML',\n options={'url': '/static/tethys_gizmos/data/CO_Environ_vuln_score_2018.kml'},\n layer_options={'visible':True,'opacity':0.75},\n legend_title='Environ Risk Score',\n feature_selection=True,\n legend_classes=[vuln_legend],\n legend_extent=[-109.5, 36.5, -101.5, 41.6])\n \n # Define GeoJSON layer\n # Data from CoCoRaHS Condition Monitoring: https://www.cocorahs.org/maps/conditionmonitoring/\n with open(como_cocorahs) as f:\n data = json.load(f)\n \n # the section below is grouping data by 'scalebar' drought condition\n # this is a work around for displaying each drought report classification with a unique colored icon\n data_sd = {}; data_md ={}; data_ml={}\n data_sd[u'type'] = data['type']; data_md[u'type'] = data['type']; data_ml[u'type'] = data['type']\n data_sd[u'features'] = [];data_md[u'features'] = [];data_ml[u'features'] = []\n for element in data['features']:\n if 'Severely Dry' in element['properties']['scalebar']:\n rdate = datetime.datetime.strptime(element['properties']['reportdate'][:10],\"%Y-%m-%d\")\n if rdate >= week20:\n data_sd[u'features'].append(element)\n if 'Moderately Dry' in element['properties']['scalebar']:\n rdate = datetime.datetime.strptime(element['properties']['reportdate'][:10],\"%Y-%m-%d\")\n if rdate >= week20:\n data_md[u'features'].append(element)\n if 'Mildly Dry' in element['properties']['scalebar']:\n rdate = datetime.datetime.strptime(element['properties']['reportdate'][:10],\"%Y-%m-%d\")\n if rdate >= week20:\n data_ml[u'features'].append(element)\n \n cocojson_sevdry = MVLayer(\n source='GeoJSON',\n options=data_sd,\n legend_title='CoCoRaHS Condition Monitor',\n legend_extent=[-112, 36.3, -98.5, 41.66],\n feature_selection=False,\n legend_classes=[MVLegendClass('point', 'Severely Dry', fill='#67000d')],\n layer_options={'style': {'image': {'circle': {'radius': 6,'fill': {'color': '#67000d'},'stroke': {'color': '#ffffff', 'width': 1},}}}})\n\n cocojson_moddry = MVLayer(\n source='GeoJSON',\n options=data_md,\n legend_title='',\n legend_extent=[-112, 36.3, -98.5, 41.66],\n feature_selection=False,\n legend_classes=[MVLegendClass('point', 'Moderately Dry', fill='#a8190d')],\n layer_options={'style': {'image': {'circle': {'radius': 6,'fill': {'color': '#a8190d'},'stroke': {'color': '#ffffff', 'width': 1},}}}})\n\n cocojson_mildry = MVLayer(\n source='GeoJSON',\n options=data_ml,\n legend_title='',\n legend_extent=[-112, 36.3, -98.5, 41.66],\n feature_selection=False,\n legend_classes=[MVLegendClass('point', 'Mildly Dry', fill='#f17d44')],\n layer_options={'style': {'image': {'circle': {'radius': 6,'fill': {'color': '#f17d44'},'stroke': {'color': '#ffffff', 'width': 1},}}}})\n\n \n # Define map view options\n drought_env_risk_map_view_options = MapView(\n height='100%',\n width='100%',\n controls=['ZoomSlider', 'Rotate', 'ScaleLine', 'FullScreen',\n {'MousePosition': {'projection': 'EPSG:4326'}},\n {'ZoomToExtent': {'projection': 'EPSG:4326', 'extent': [-130, 22, -65, 54]}}],\n layers=[tiger_boundaries,cocojson_sevdry,cocojson_moddry,cocojson_mildry,environ_vuln_kml,usdm_current,watersheds],\n view=view_options,\n basemap='OpenStreetMap',\n legend=True\n )\n\n context = {\n 'drought_env_risk_map_view_options':drought_env_risk_map_view_options,\n }\n\n return render(request, 'co_drought/drought_env_risk.html', context)", "def visualize_control_activation_maps(FLAGS, model, input_images=[]):\n # load input\n if len(input_images) == 0:\n # use predefined images\n img_dir='/esat/opal/kkelchte/docker_home/pilot_data/visualization_images'\n input_images=sorted([img_dir+'/'+f for f in os.listdir(img_dir)])\n inputs = load_images(input_images, model.input_size[1:])\n \n # evaluate input to get activation maps\n weights, activation_maps = model.sess.run([[v for v in tf.trainable_variables() if v.name == 'outputs/kernel:0'][0],\n model.endpoints['eval']['activation_maps']], {model.inputs: inputs})\n\n # combine the activation maps\n activation_maps = np.dot(activation_maps,np.squeeze(weights))\n \n if len(activation_maps.shape) != 4: activation_maps = np.expand_dims(activation_maps, axis=-1)\n\n # create a nice plot with on the columns the different images and the rows the different experts\n\n number_of_maps = activation_maps.shape[-1] \n\n fig, axes = plt.subplots(number_of_maps+1, # number of rows\n activation_maps.shape[0], # number of columns\n figsize=(23, 5*(number_of_maps+1)))\n \n # fill first row with original image\n for i in range(axes.shape[1]):\n axes[0, i].set_title(os.path.basename(input_images[i]).split('.')[0])\n axes[0, i].imshow(matplotlibprove(inputs[i]))\n axes[0, i].axis('off')\n\n # get expert names for titling\n experts=np.asarray([[k]*(FLAGS.action_quantity if FLAGS.discrete else 1) for v in sorted(model.factor_offsets.values()) for k in model.factor_offsets.keys() if model.factor_offsets[k]==v]).flatten()\n\n # add following rows for different experts with different upscaled activation maps\n # for j in range(activation_maps.shape[-1]): # loop over diferent outputs\n for j in range(number_of_maps): # loop over diferent outputs\n for i in range(axes.shape[1]):\n axes[j+1, i].set_title(experts[j])\n # pure upscaled heat maps:\n axes[j+1, i].imshow(matplotlibprove(activation_maps[i,:,:,j]), cmap='seismic')\n # concatenated in alpha channels:\n # axes[j+1, i].imshow(np.zeros(inputs[i].shape[0:3]))\n # axes[j+1, i].imshow(matplotlibprove(np.concatenate((inputs[i], deprocess_image(sm.resize(activation_maps[i,:,:,j],inputs[i].shape[0:2]+(1,),order=1,mode='constant', preserve_range=True))), axis=2)))\n axes[j+1, i].axis('off')\n\n plt.savefig(FLAGS.summary_dir+FLAGS.log_tag+'/control_activation_maps.jpg',bbox_inches='tight')\n print(\"saved control_activation_maps\")\n # plt.show()\n # import pdb; pdb.set_trace()", "def show_map(self):\n print(self.__str__())", "def visualize(self):\n self.octree.updateInnerOccupancy()\n print(\"Start Octomap Visualization\")\n\n # define parameters\n data = imgviz.data.arc2017()\n camera_info = data['camera_info']\n K = np.array(camera_info['K']).reshape(3, 3)\n width=camera_info['width']\n height=camera_info['height']\n\n # get free and occupied grid\n occupied, _ = self.octree.extractPointCloud()\n #frontier = self.gen_frontier()\n \n print(\"load point cloud\")\n window = pyglet.window.Window(\n width=int(1280), height=int(960)\n )\n\n @window.event\n def on_key_press(symbol, modifiers):\n if modifiers == 0:\n if symbol == pyglet.window.key.Q:\n window.on_close()\n\n gui = glooey.Gui(window)\n hbox = glooey.HBox()\n hbox.set_padding(5)\n\n camera = trimesh.scene.Camera(\n resolution=(width, height), focal=(K[0, 0], K[1, 1])\n )\n\n # initial camera pose\n camera_transform = np.array(\n [\n [1, 0, 0, 0],\n [0, -1, 0, 0],\n [0, 0, -1, -5],\n [0.0, 0.0, 0.0, 1.0],\n ],\n )\n\n \n\n occupied_geom = trimesh.voxel.ops.multibox(\n occupied, pitch=self.resolution, colors=[0.0, 0.0, 0.0, 0.5]\n )\n\n # frontier_geom = trimesh.voxel.ops.multibox(\n # frontier, pitch=self.resolution, colors=[1.0, 0, 0, 0.5]\n # )\n scene = trimesh.Scene(camera=camera, geometry=[occupied_geom])#, frontier_geom])\n scene.camera_transform = camera_transform\n hbox.add(self.labeled_scene_widget(scene, label='octomap'))\n\n\n gui.add(hbox)\n pyglet.app.run()", "def generateStationPlot(dir_path, traj_list, color_scheme='light'):\n\n\n # Choose the color scheme\n cs = MapColorScheme()\n \n if color_scheme == 'light':\n cs.light()\n\n else:\n cs.dark()\n\n\n plt.figure(figsize=(19.2, 10.8))\n\n # Init the map\n m = Basemap(projection='cyl', resolution='i')\n\n # Draw the coast boundary and fill the oceans with the given color\n m.drawmapboundary(fill_color=cs.map_background)\n\n # Fill continents, set lake color same as ocean color\n m.fillcontinents(color=cs.continents, lake_color=cs.lakes, zorder=1)\n\n # Draw country borders\n m.drawcountries(color=cs.countries)\n m.drawstates(color=cs.states, linestyle='--')\n\n\n\n ### PLOT WORLD MAP ###\n\n # Group stations into countries\n country_dict = {}\n for traj in traj_list:\n\n for obs in traj.observations:\n\n # Extract country code\n country_code = obs.station_id[:2]\n\n if country_code not in country_dict:\n country_dict[country_code] = {}\n \n\n if obs.station_id not in country_dict[country_code]:\n country_dict[country_code][obs.station_id] = [obs.lat, obs.lon]\n\n\n\n # Plot stations in all countries\n for country_code in country_dict:\n\n station_dict = country_dict[country_code]\n\n # Extract lat/lon\n lat = np.degrees([station_dict[station_id][0] for station_id in station_dict])\n lon = np.degrees([station_dict[station_id][1] for station_id in station_dict])\n\n # Convert lat/lon to x/y\n x, y = m(lon, lat)\n\n plt.scatter(x, y, s=0.75, zorder=5, label=\"{:s}: {:d}\".format(country_code, len(lat)))\n\n\n plt.legend(loc='lower left')\n\n plt.tight_layout()\n\n plt.savefig(os.path.join(dir_path, \"world_map.png\"), dpi=100)\n\n plt.close()\n\n ### ###", "def graphics(env, fovea, objects, unit):\n plt.clf()\n\n env = environment.redraw(env, unit, objects)\n fovea_im = fovea.get_focus_image(env)\n\n plt.subplot(121)\n plt.title('Training environment')\n plt.xlim(0, unit)\n plt.ylim(0, unit)\n plt.imshow(env)\n\n # PLOT DESK EDGES\n plt.plot([0.2*unit, 0.2*unit, 0.8*unit, 0.8*unit, 0.2*unit],\n [0.2*unit, 0.8*unit, 0.8*unit, 0.2*unit, 0.2*unit], 'w-'\n )\n\n # PLOT FOVEA EDGES\n fov_indices = fovea.get_index_values()\n plt.plot([fov_indices[0][0], fov_indices[0][0], fov_indices[0][1],\n fov_indices[0][1], fov_indices[0][0]],\n [fov_indices[1][0], fov_indices[1][1], fov_indices[1][1],\n fov_indices[1][0], fov_indices[1][0]], 'w-'\n )\n\n plt.subplot(122)\n plt.title('Focus image')\n plt.imshow(fovea_im)\n\n plt.draw()\n plt.pause(0.01)", "def plot_state(mu, sigma, landmarks, timestep, observedLandmarks, z, window):\n\n plt.clf()\n plt.grid('on')\n \n draw_probe_ellipse(mu[:2], sigma[:2,:2], 0.6, 'r')\n plt.plot(landmarks['x'], landmarks['y'], 'k+', markersize=10, linewidth=5)\n\n for i in range(len(observedLandmarks)):\n\tif observedLandmarks[i]:\n\t plt.plot(mu[2*i + 3],mu[2*i + 4], 'bo', fillstyle='none', markersize=10, linewidth=5)\n \t draw_probe_ellipse(mu[2*i + 3:2*i+ 5], sigma[2*i + 3:2*i+ 5,2*i + 3:2*i + 5], 0.6, 'b')\n\n for i in range(len(z)):\n\tmX = mu[2*z[i]['id'] + 3]\n\tmY = mu[2*z[i]['id'] + 4]\n \tplt.plot([mu[0], mX], [mu[1], mY], color='k', linewidth=1)\n\n drawrobot(mu[:3], 'r', 3, 0.3, 0.3)\n plt.xlim([-2., 12.])\n plt.ylim([-2., 12.])\n\n if window:\n plt.draw()\n plt.pause(0.1)\n else:\n filename = '../ekf_%03d.png'.format(timestep)\n plt.savefig(filename)", "def plot_Q_function(self):\r\n input_state = np.zeros([1, self.feature_number])\r\n input_action = np.zeros([1, self.action_space])\r\n actions = np.linspace(-3., 3., 50)\r\n v_ego = np.linspace(0., 30., 50)\r\n if self.feature_number == 1:\r\n Q_map = np.zeros((len(v_ego), len(actions)))\r\n for v in range(len(v_ego)):\r\n for a in range(len(actions)):\r\n input_state[0, 0] = self.v_set - v_ego[v]\r\n input_state = input_state.astype(float)\r\n input_action[0, 0] = actions[a]\r\n Q_map[v, a] = self.critic.predict([input_state, input_action])\r\n elif self.feature_number == 2:\r\n \"\"\"TODO: Adjust to DDPG critic layout\"\"\"\r\n Q_map = np.zeros((500, 20, self.action_space))\r\n for distance in range(500):\r\n for delta_v in range(-10, 10):\r\n input[0, 0] = distance\r\n input[0, 1] = delta_v\r\n Q_map[distance, delta_v, :] = self.critic.predict(input)\r\n elif self.feature_number == 3:\r\n \"\"\"TODO: Implementation\"\"\"\r\n return Q_map", "def preview_ways(geodataframe):\n\n # Map tiles from contextily are provided in the Web Mercator coordinate reference system (EPSG:3857).\n gdf_wm = geodataframe.to_crs(epsg='3857')\n # Add a column for the centre of each geometry\n gdf_wm['centroid'] = gdf_wm.geometry.centroid\n # Create plot using matplotlib functionality\n ax = gdf_wm.plot(figsize=(10, 6), color='blue', linewidth=2)\n gdf_wm.centroid.plot(ax=ax, marker='o', color='red', alpha=0.5, markersize=40)\n # Add a basemap from contextily. This map should look a lot like Overpass Turbo!\n ctx.add_basemap(ax, source=ctx.providers.OpenStreetMap.Mapnik)", "def drawCoordinatePlane_region():\r\n turtle2 = t.Screen()\r\n turtle2.title(\"Life Expectancy versus Region\")\r\n t2.speed(0)\r\n t3.speed(0)\r\n setTurtle(t0)\r\n setTurtle(t1)\r\n setTurtle(t2)\r\n setTurtle(t3)\r\n drawAxes(t0)\r\n t1.left(90)\r\n drawAxes(t1)\r\n t0.pu()\r\n t0.fd(-80)\r\n t0.lt(90)\r\n drawlabels(t0, t1)\r\n drawPoints(t0, t1)\r\n t0.pu()\r\n t1.pu()\r\n t2.pu()\r\n t3.pu()\r\n t0.goto(initialCoordinates())\r\n t1.goto(initialCoordinates())\r\n t2.goto(initialCoordinates())\r\n t3.goto(initialCoordinates())\r\n t1.lt(90)", "def runCheck(self):\n # Select the layers open in the legendInterface and add them to an array\n crs = QgsCoordinateReferenceSystem()\n layers = self.iface.legendInterface().layers()\n layer_list = []\n # Declare coordinate system to print out screen\n # VN2000 Noi bo mui 3\n htd_103_nb = \"+proj=tmerc +lat_0=0 +lon_0=103 +k=0.9999 +x_0=500000 +y_0=0 +ellps=WGS84 +towgs84=0,0,0,0,0,0,0 +units=m +no_defs\"\n htd_104_nb = \"+proj=tmerc +lat_0=0 +lon_0=104 +k=0.9999 +x_0=500000 +y_0=0 +ellps=WGS84 +towgs84=0,0,0,0,0,0,0 +units=m +no_defs\"\n htd_104_5_nb = \"+proj=tmerc +lat_0=0 +lon_0=104.5 +k=0.9999 +x_0=500000 +y_0=0 +ellps=WGS84 +towgs84=0,0,0,0,0,0,0 +units=m +no_defs\"\n htd_104_75_nb = \"+proj=tmerc +lat_0=0 +lon_0=104.75 +k=0.9999 +x_0=500000 +y_0=0 +ellps=WGS84 +towgs84=0,0,0,0,0,0,0 +units=m +no_defs\"\n htd_105_nb = \"+proj=tmerc +lat_0=0 +lon_0=105 +k=0.9999 +x_0=500000 +y_0=0 +ellps=WGS84 +towgs84=0,0,0,0,0,0,0 +units=m +no_defs\"\n htd_105_5_nb = \"+proj=tmerc +lat_0=0 +lon_0=105.5 +k=0.9999 +x_0=500000 +y_0=0 +ellps=WGS84 +towgs84=0,0,0,0,0,0,0 +units=m +no_defs\"\n htd_105_75_nb = \"+proj=tmerc +lat_0=0 +lon_0=105.75 +k=0.9999 +x_0=500000 +y_0=0 +ellps=WGS84 +towgs84=0,0,0,0,0,0,0 +units=m +no_defs\"\n htd_106_nb = \"+proj=tmerc +lat_0=0 +lon_0=106 +k=0.9999 +x_0=500000 +y_0=0 +ellps=WGS84 +towgs84=0,0,0,0,0,0,0 +units=m +no_defs\"\n htd_106_25_nb = \"+proj=tmerc +lat_0=0 +lon_0=106.25 +k=0.9999 +x_0=500000 +y_0=0 +ellps=WGS84 +towgs84=0,0,0,0,0,0,0 +units=m +no_defs\"\n htd_106_5_nb = \"+proj=tmerc +lat_0=0 +lon_0=106.5 +k=0.9999 +x_0=500000 +y_0=0 +ellps=WGS84 +towgs84=0,0,0,0,0,0,0 +units=m +no_defs\"\n htd_107_nb = \"+proj=tmerc +lat_0=0 +lon_0=107 +k=0.9999 +x_0=500000 +y_0=0 +ellps=WGS84 +towgs84=0,0,0,0,0,0,0 +units=m +no_defs\"\n htd_107_25_nb = \"+proj=tmerc +lat_0=0 +lon_0=107.25 +k=0.9999 +x_0=500000 +y_0=0 +ellps=WGS84 +towgs84=0,0,0,0,0,0,0 +units=m +no_defs\"\n htd_107_5_nb = \"+proj=tmerc +lat_0=0 +lon_0=107.5 +k=0.9999 +x_0=500000 +y_0=0 +ellps=WGS84 +towgs84=0,0,0,0,0,0,0 +units=m +no_defs\"\n htd_107_75_nb = \"+proj=tmerc +lat_0=0 +lon_0=107.75 +k=0.9999 +x_0=500000 +y_0=0 +ellps=WGS84 +towgs84=0,0,0,0,0,0,0 +units=m +no_defs\"\n htd_108_nb = \"+proj=tmerc +lat_0=0 +lon_0=108 +k=0.9999 +x_0=500000 +y_0=0 +ellps=WGS84 +towgs84=0,0,0,0,0,0,0 +units=m +no_defs\"\n htd_108_25_nb = \"+proj=tmerc +lat_0=0 +lon_0=108.25 +k=0.9999 +x_0=500000 +y_0=0 +ellps=WGS84 +towgs84=0,0,0,0,0,0,0 +units=m +no_defs\"\n htd_108_5_nb = \"+proj=tmerc +lat_0=0 +lon_0=108.5 +k=0.9999 +x_0=500000 +y_0=0 +ellps=WGS84 +towgs84=0,0,0,0,0,0,0 +units=m +no_defs\"\n\n # VN2000 Hoi nhap mui 3\n htd_103_hn = \"+proj=tmerc +lat_0=0 +lon_0=103 +k=0.9999 +x_0=500000 +y_0=0 +ellps=WGS84 +towgs84=-191.90441429,-39.30318279,-111.45032835,0.00928836,-0.01975479,0.00427372,0.252906278 +units=m +no_defs\"\n htd_104_hn = \"+proj=tmerc +lat_0=0 +lon_0=104 +k=0.9999 +x_0=500000 +y_0=0 +ellps=WGS84 +towgs84=-191.90441429,-39.30318279,-111.45032835,0.00928836,-0.01975479,0.00427372,0.252906278 +units=m +no_defs\"\n htd_104_5_hn = \"+proj=tmerc +lat_0=0 +lon_0=104_5 +k=0.9999 +x_0=500000 +y_0=0 +ellps=WGS84 +towgs84=-191.90441429,-39.30318279,-111.45032835,0.00928836,-0.01975479,0.00427372,0.252906278 +units=m +no_defs\"\n htd_104_75_hn = \"+proj=tmerc +lat_0=0 +lon_0=104.75 +k=0.9999 +x_0=500000 +y_0=0 +ellps=WGS84 +towgs84=-191.90441429,-39.30318279,-111.45032835,0.00928836,-0.01975479,0.00427372,0.252906278 +units=m +no_defs\"\n htd_105_hn = \"+proj=tmerc +lat_0=0 +lon_0=105 +k=0.9999 +x_0=500000 +y_0=0 +ellps=WGS84 +towgs84=-191.90441429,-39.30318279,-111.45032835,0.00928836,-0.01975479,0.00427372,0.252906278 +units=m +no_defs\"\n htd_105_5_hn = \"+proj=tmerc +lat_0=0 +lon_0=105.5 +k=0.9999 +x_0=500000 +y_0=0 +ellps=WGS84 +towgs84=-191.90441429,-39.30318279,-111.45032835,0.00928836,-0.01975479,0.00427372,0.252906278 +units=m +no_defs\"\n htd_105_75_hn = \"+proj=tmerc +lat_0=0 +lon_0=105.75 +k=0.9999 +x_0=500000 +y_0=0 +ellps=WGS84 +towgs84=-191.90441429,-39.30318279,-111.45032835,0.00928836,-0.01975479,0.00427372,0.252906278 +units=m +no_defs\"\n htd_106_hn = \"+proj=tmerc +lat_0=0 +lon_0=106 +k=0.9999 +x_0=500000 +y_0=0 +ellps=WGS84 +towgs84=-191.90441429,-39.30318279,-111.45032835,0.00928836,-0.01975479,0.00427372,0.252906278 +units=m +no_defs\"\n htd_106_25_hn = \"+proj=tmerc +lat_0=0 +lon_0=106.25 +k=0.9999 +x_0=500000 +y_0=0 +ellps=WGS84 +towgs84=-191.90441429,-39.30318279,-111.45032835,0.00928836,-0.01975479,0.00427372,0.252906278 +units=m +no_defs\"\n htd_106_5_hn = \"+proj=tmerc +lat_0=0 +lon_0=106.5 +k=0.9999 +x_0=500000 +y_0=0 +ellps=WGS84 +towgs84=-191.90441429,-39.30318279,-111.45032835,0.00928836,-0.01975479,0.00427372,0.252906278 +units=m +no_defs\"\n htd_107_hn = \"+proj=tmerc +lat_0=0 +lon_0=107 +k=0.9999 +x_0=500000 +y_0=0 +ellps=WGS84 +towgs84=-191.90441429,-39.30318279,-111.45032835,0.00928836,-0.01975479,0.00427372,0.252906278 +units=m +no_defs\"\n htd_107_25_hn = \"+proj=tmerc +lat_0=0 +lon_0=107.25 +k=0.9999 +x_0=500000 +y_0=0 +ellps=WGS84 +towgs84=-191.90441429,-39.30318279,-111.45032835,0.00928836,-0.01975479,0.00427372,0.252906278 +units=m +no_defs\"\n htd_107_5_hn = \"+proj=tmerc +lat_0=0 +lon_0=107.5 +k=0.9999 +x_0=500000 +y_0=0 +ellps=WGS84 +towgs84=-191.90441429,-39.30318279,-111.45032835,0.00928836,-0.01975479,0.00427372,0.252906278 +units=m +no_defs\"\n htd_107_75_hn = \"+proj=tmerc +lat_0=0 +lon_0=107.75 +k=0.9999 +x_0=500000 +y_0=0 +ellps=WGS84 +towgs84=-191.90441429,-39.30318279,-111.45032835,0.00928836,-0.01975479,0.00427372,0.252906278 +units=m +no_defs\"\n htd_108_hn = \"+proj=tmerc +lat_0=0 +lon_0=108 +k=0.9999 +x_0=500000 +y_0=0 +ellps=WGS84 +towgs84=-191.90441429,-39.30318279,-111.45032835,0.00928836,-0.01975479,0.00427372,0.252906278 +units=m +no_defs\"\n htd_108_25_hn = \"+proj=tmerc +lat_0=0 +lon_0=108.25 +k=0.9999 +x_0=500000 +y_0=0 +ellps=WGS84 +towgs84=-191.90441429,-39.30318279,-111.45032835,0.00928836,-0.01975479,0.00427372,0.252906278 +units=m +no_defs\"\n htd_108_5_hn = \"+proj=tmerc +lat_0=0 +lon_0=108.5 +k=0.9999 +x_0=500000 +y_0=0 +ellps=WGS84 +towgs84=-191.90441429,-39.30318279,-111.45032835,0.00928836,-0.01975479,0.00427372,0.252906278 +units=m +no_defs\"\n\n # UTM 48,49\n htd_utm_48 = \"+proj=utm +zone=48 +datum=WGS84 +units=m +no_defs\"\n htd_utm_49 = \"+proj=utm +zone=49 +datum=WGS84 +units=m +no_defs\"\n\n # WGS84 Latlong - 4326\n htd_latlong_4326 = \"+proj=longlat +datum=WGS84 +no_defs\"\n\n #Loop all layers\n for layer in layers:\n if layer.crs().toProj4() == htd_103_nb :\n layer_list.append(layer.name() + \" -->\" + \"VN-2000 Noi bo KTT 103 mui 3 \")\n elif layer.crs().toProj4() == htd_104_nb :\n layer_list.append(layer.name() + \" -->\" + \"VN-2000 Noi bo KTT 104 mui 3 \")\n elif layer.crs().toProj4() == htd_104_5_nb :\n layer_list.append(layer.name() + \" -->\" + \"VN-2000 Noi bo KTT 104.5 mui 3 \")\n elif layer.crs().toProj4() == htd_104_75_nb :\n layer_list.append(layer.name() + \" -->\" + \"VN-2000 Noi bo KTT 104.75 mui 3 \")\n elif layer.crs().toProj4() == htd_105_nb :\n layer_list.append(layer.name() + \" -->\" + \"VN-2000 Noi bo KTT 105 mui 3 \")\n elif layer.crs().toProj4() == htd_105_5_nb :\n layer_list.append(layer.name() + \" -->\" + \"VN-2000 Noi bo KTT 105.5 mui 3 \")\n elif layer.crs().toProj4() == htd_105_75_nb :\n layer_list.append(layer.name() + \" -->\" + \"VN-2000 Noi bo KTT 105.75 mui 3 \")\n elif layer.crs().toProj4() == htd_106_nb :\n layer_list.append(layer.name() + \" -->\" + \"VN-2000 Noi bo KTT 106 mui 3 \")\n elif layer.crs().toProj4() == htd_106_25_nb :\n layer_list.append(layer.name() + \" -->\" + \"VN-2000 Noi bo KTT 106.25 mui 3 \")\n elif layer.crs().toProj4() == htd_106_5_nb :\n layer_list.append(layer.name() + \" -->\" + \"VN-2000 Noi bo KTT 106.5 mui 3 \")\n elif layer.crs().toProj4() == htd_107_nb :\n layer_list.append(layer.name() + \" -->\" + \"VN-2000 Noi bo KTT 107 mui 3 \")\n elif layer.crs().toProj4() == htd_107_25_nb :\n layer_list.append(layer.name() + \" -->\" + \"VN-2000 Noi bo KTT 107.25 mui 3 \")\n elif layer.crs().toProj4() == htd_107_5_nb :\n layer_list.append(layer.name() + \" -->\" + \"VN-2000 Noi bo KTT 107.5 mui 3 \")\n elif layer.crs().toProj4() == htd_107_75_nb :\n layer_list.append(layer.name() + \" -->\" + \"VN-2000 Noi bo KTT 107.75 mui 3 \")\n elif layer.crs().toProj4() == htd_108_nb :\n layer_list.append(layer.name() + \" -->\" + \"VN-2000 Noi bo KTT 108 mui 3 \")\n elif layer.crs().toProj4() == htd_108_25_nb :\n layer_list.append(layer.name() + \" -->\" + \"VN-2000 Noi bo KTT 108.25 mui 3 \")\n elif layer.crs().toProj4() == htd_108_5_nb :\n layer_list.append(layer.name() + \" -->\" + \"VN-2000 Noi bo KTT 108.5 mui 3 \")\n # VN2000 Hoi nhap\n elif layer.crs().toProj4() == htd_103_hn :\n layer_list.append(layer.name() + \" -->\" + \"VN-2000 Hoi nhap KTT 103 mui 3 \")\n elif layer.crs().toProj4() == htd_104_hn :\n layer_list.append(layer.name() + \" -->\" + \"VN-2000 Hoi nhap KTT 104 mui 3 \")\n elif layer.crs().toProj4() == htd_104_5_hn :\n layer_list.append(layer.name() + \" -->\" + \"VN-2000 Hoi nhap KTT 104.5 mui 3 \")\n elif layer.crs().toProj4() == htd_104_75_hn :\n layer_list.append(layer.name() + \" -->\" + \"VN-2000 Hoi nhap KTT 104.75 mui 3 \")\n elif layer.crs().toProj4() == htd_105_hn :\n layer_list.append(layer.name() + \" -->\" + \"VN-2000 Hoi nhap KTT 105 mui 3 \")\n elif layer.crs().toProj4() == htd_105_5_hn :\n layer_list.append(layer.name() + \" -->\" + \"VN-2000 Hoi nhap KTT 105.5 mui 3 \")\n elif layer.crs().toProj4() == htd_105_75_hn :\n layer_list.append(layer.name() + \" -->\" + \"VN-2000 Hoi nhap KTT 105.75 mui 3 \")\n elif layer.crs().toProj4() == htd_106_hn :\n layer_list.append(layer.name() + \" -->\" + \"VN-2000 Hoi nhap KTT 106 mui 3 \")\n elif layer.crs().toProj4() == htd_106_25_hn :\n layer_list.append(layer.name() + \" -->\" + \"VN-2000 Hoi nhap KTT 106.25 mui 3 \")\n elif layer.crs().toProj4() == htd_106_5_hn :\n layer_list.append(layer.name() + \" -->\" + \"VN-2000 Hoi nhap KTT 106.5 mui 3 \")\n elif layer.crs().toProj4() == htd_107_hn :\n layer_list.append(layer.name() + \" -->\" + \"VN-2000 Hoi nhap KTT 107 mui 3 \")\n elif layer.crs().toProj4() == htd_107_25_hn :\n layer_list.append(layer.name() + \" -->\" + \"VN-2000 Hoi nhap KTT 107.25 mui 3 \")\n elif layer.crs().toProj4() == htd_107_5_hn :\n layer_list.append(layer.name() + \" -->\" + \"VN-2000 Hoi nhap KTT 107.5 mui 3 \")\n elif layer.crs().toProj4() == htd_107_75_hn :\n layer_list.append(layer.name() + \" -->\" + \"VN-2000 Hoi nhap KTT 107.75 mui 3 \")\n elif layer.crs().toProj4() == htd_108_hn :\n layer_list.append(layer.name() + \" -->\" + \"VN-2000 Hoi nhap KTT 108 mui 3 \")\n elif layer.crs().toProj4() == htd_108_25_hn :\n layer_list.append(layer.name() + \" -->\" + \"VN-2000 Hoi nhap KTT 108.25 mui 3 \")\n elif layer.crs().toProj4() == htd_108_5_hn :\n layer_list.append(layer.name() + \" -->\" + \"VN-2000 Hoi nhap KTT 108.5 mui 3 \")\n\n # UTM 48,49, Latlong\n elif layer.crs().toProj4() == htd_utm_48 :\n layer_list.append(layer.name() + \" -->\" + \"UTM Zone 48N - EPSG: 32648\")\n elif layer.crs().toProj4() == htd_utm_49 :\n layer_list.append(layer.name() + \" -->\" + \"UTM Zone 49N - EPSG: 32649\")\n elif layer.crs().toProj4() == htd_latlong_4326 :\n layer_list.append(layer.name() + \" -->\" + \"WGS 84 Lat/Long - EPSG: 4326\")\n else:\n layer_list.append(layer.name() + \" -->\" +layer.crs().toProj4())\n # Add layer_list array to listWidget, clear layer if removed to layer in tools\n self.dlgtool3.listWidget_check.clear()\n self.dlgtool3.listWidget_check.addItems(layer_list)\n # show the dialog\n self.dlgtool3.show()\n # Run the dialog event loop\n result = self.dlgtool3.exec_()\n # See if OK was pressed\n if result:\n # Do something useful here - delete the line containing pass and\n # substitute with your code.\n pass", "def visualize_M_gridworld(self, state=0):\n\n\t\tplt.subplot(221); plt.imshow(self.M[12,0,:].reshape(5,5)), plt.colorbar()\n\t\tplt.subplot(222); plt.imshow(self.M[12,1,:].reshape(5,5)), plt.colorbar()\n\t\tplt.subplot(223); plt.imshow(self.M[12,2,:].reshape(5,5)), plt.colorbar()\n\t\tplt.subplot(224); plt.imshow(self.M[12,3,:].reshape(5,5)), plt.colorbar()\n\t\tplt.show()", "def display(self):\n ob = self._convert_state(self._env.reset())\n done = False\n while not done:\n ac, _ = self._act(ob, stochastic=False)\n ob, rew, done, _ = self._env.step(ac)\n ob = self._convert_state(ob)\n self._env.render()\n self._env.close()", "def test_simple_pass():\n m = view(nybb)\n m = view(world)\n m = view(cities)\n m = view(world.geometry)", "def drawSAT(self):\r\n\t\tif self.show_weather == True:\r\n\t\t\timport weather\r\n\t\t\tweatherdata = weather.get_weather(self)\r\n\t\t\tweatherdata.start()\r\n\t\tsat = draw_sat(self,self.satBlocks)\r\n\t\tsat.start()\r\n\t\tmap = draw_map(self,self.mapBlocks)\r\n\t\tmap.start()\r\n\t\tself.redraw_markers()\r\n\t\tvirtualEarth = draw_virtualearth(self,self.satBlocks)\r\n\t\tvirtualEarth.start()\r\n\t\tsat.join()\r\n\t\tmap.join()\r\n\t\tvirtualEarth.join()\r\n\t\tif self.routecontainer['enable'] == 1:\r\n\t\t\tself.makeRoute(self.routecontainer['linestring'])\r\n\t\t\tself.route_pic.setVisible(True)\r\n\t\telse:\r\n\t\t\tself.route_pic.setVisible(False)\r\n\t\tif self.hybrid == 1:\r\n\t\t\tself.xbmcearth_communication.get_Google_Analytics( referer_url, urllib.quote(\"Google Hybrid\"), \"/xbmc_earth/browse/Google/Hybrid_act.html\",self)\r\n\t\telif self.hybrid == 2:\r\n\t\t\tself.xbmcearth_communication.get_Google_Analytics( referer_url, urllib.quote(\"Google Map\"), \"/xbmc_earth/browse/Google/Map_act.html\",self)\r\n\t\telif self.hybrid == 3:\r\n\t\t\tself.xbmcearth_communication.get_Google_Analytics( referer_url, urllib.quote(\"Google Area\"), \"/xbmc_earth/browse/Google/Area_act.html\",self)\r\n\t\telif self.hybrid == 0:\r\n\t\t\tself.xbmcearth_communication.get_Google_Analytics( referer_url, urllib.quote(\"Google Satelite\"), \"/xbmc_earth/browse/Google/Satelite_act.html\",self)\r\n\t\telif self.hybrid == 4:\r\n\t\t\tself.xbmcearth_communication.get_Google_Analytics( referer_url, urllib.quote(\"VirtualEarth Satelite\"), \"/xbmc_earth/browse/VirtualEarth/Satelite_act.html\",self)\r\n\t\telif self.hybrid == 5:\r\n\t\t\tself.xbmcearth_communication.get_Google_Analytics( referer_url, urllib.quote(\"VirtualEarth Hybrid\"), \"/xbmc_earth/browse/VirtualEarth/Hybrid_act.html\",self)\r\n\t\telif self.hybrid == 6:\r\n\t\t\tself.xbmcearth_communication.get_Google_Analytics( referer_url, urllib.quote(\"VirtualEarth Map\"), \"/xbmc_earth/browse/VirtualEarth/Map_act.html\",self)", "def population_results_map():\n start_time = time()\n fig= Figure(figsize=(60,52), frameon=True, tight_layout=True)\n ax = fig.add_subplot(1,1,1, axisbg='#EEEEEE')\n ax.grid(color='white', linestyle='solid')\n rstyle(ax)\n\n queryset = Unit.objects.all()\n # It might be faster to request a flat value list and then construct new tuples based on that\n latlong = [(u.latitude, u.longitude, \n u.unitstats.cumulative_infected, \n u.unitstats.cumulative_vaccinated,\n u.unitstats.cumulative_destroyed,\n u.unitstats.cumulative_zone_focus, \n u.initial_size,\n ) if hasattr(u, \"unitstats\") else\n (u.latitude, u.longitude, -1, -1, -1, -1, u.initial_size)\n for u in queryset]\n total_iterations = float(len(list_of_iterations()))\n latitude, longitude, infected, vaccinated, destroyed, zone_focus, herd_size = zip(*latlong)\n zone_blues, red_infected, green_vaccinated = define_color_mappings()\n \n graph_zones(ax, latitude, longitude, total_iterations, zone_blues, zone_focus)\n graph_states(ax, latitude, longitude, total_iterations, infected, vaccinated, destroyed)\n \n neutral_longitude = [entry[1] for entry in latlong if not any(x > 0 for x in (entry[2], entry[3], entry[4]))]\n neutral_latitude = [entry[0] for entry in latlong if not any(x > 0 for x in (entry[2], entry[3], entry[4]))]\n # to ensure zero occurrences has a different color\n uninvolved = ax.scatter(neutral_longitude,\n neutral_latitude,\n marker='s',\n s=[min(max(0.25, size / 100), 1000) for size in herd_size],\n color=(0.2, 0.2, 0.2, 1.0),\n zorder=1000)\n Results.graphing.crop_to_fit_map(ax)\n print(\"Population Map took %i seconds\" % int(time() - start_time))\n return fig", "def draw_on_world(self, world):\n for index, wp in enumerate(self.waypoints):\n # Adds 0.5 to z to ensure that the point is above the road surface.\n loc = (wp.location +\n pylot.utils.Location(0, 0, 0.5)).as_simulator_location()\n world.debug.draw_point(loc, size=0.1, life_time=DEFAULT_VIS_TIME)\n # if self.road_options and index < len(self.road_options):\n # world.debug.draw_string(loc,\n # str(self.road_options[index]),\n # life_time=DEFAULT_VIS_TIME)", "def basic_map(proj):\n fig = plt.figure(figsize=(15, 10))\n add_metpy_logo(fig, 0, 80, size='large')\n view = fig.add_axes([0, 0, 1, 1], projection=proj)\n view.set_extent([-120, -70, 20, 50])\n view.add_feature(cfeature.STATES.with_scale('50m'))\n view.add_feature(cfeature.OCEAN)\n view.add_feature(cfeature.COASTLINE)\n view.add_feature(cfeature.BORDERS, linestyle=':')\n return fig, view" ]
[ "0.6409097", "0.6119604", "0.5912742", "0.586634", "0.58319414", "0.57472837", "0.5705525", "0.5673055", "0.5668513", "0.5658401", "0.56487894", "0.56268203", "0.5603009", "0.55793464", "0.55743", "0.55685604", "0.5551185", "0.5547571", "0.54791296", "0.547258", "0.54570925", "0.5455921", "0.54464084", "0.543607", "0.5417081", "0.54159206", "0.53964335", "0.5385521", "0.5371122", "0.53694564" ]
0.8492099
0
Run the whole scenario. Initialize map, solve placement, visualize everything.
def run_scenario(self): self.initialize_random_map() self.visualize_environment('initial') self.get_tower_target_coverages() self.solve_environment() self.visualize_environment('solved')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def main():\n # Return needed Data Frames to analyze\n data_frame, seasons, col, labels, stats, kaggle = load_frames()\n\n # Create the maps now\n create_shot_maps(data_frame,seasons)\n create_scenario_map()\n \n # Create the Plots\n plot_season_graphs(stats)\n plot_pie_charts(kaggle)\n plot_shot_timings(kaggle)\n plot_radar(stats, col, labels)", "def run(self):\n self.simulate_test_data()\n self.pipeline_test_data()\n self.plot_jump_flags_image()\n self.plot_groupdq_flags(pixel=[884, 550])\n self.plot_ramps_pre_post_correction(pixel=[884, 550])", "def run(self):\n self.initialise()\n self.setup_disks()\n self.solve_puzzle()\n input('Finished. Press ENTER to exit.')", "def main():\n\n\tglobal theMap, width, height\n\n\tprint \"Starting to do everything...\"\n\tprint \"Loading the map...\"\n\n\ttheMap = test_map.t_d\n\twidth = test_map.t_w\n\theight = test_map.t_h\n\n\tprint \"Starting to process...\"\n\tprint \"\"\n\n\tnodeDict, distanceDict = getNodesAndDistances()\n\n\tprint \"\"\n\tprint \"Got the node and distance dictionaries...\"\n\tprint \"Opening files...\"\n\n\tnodes = open(\"node_list.dat\", \"w\")\n\tdistances = open(\"node_distances.dat\", \"w\")\n\n\tprint \"Pickleing the node dictionary...\"\n\n\tpickle.dump(nodeDict, nodes)\n\n\tprint \"Pickeling the distance dictionary...\"\n\n\tpickle.dump(distanceDict, distances)\n\t\n\tprint \"Closing files...\"\n\n\tnodes.close()\n\tdistances.close()\n\n\tprint \"Done!\"", "def run(self):\n self.iface.mapCanvas().setMapTool(self.tool)", "def main():\n for task in range(1, 6):\n # get map object for the current task\n map_obj = MapObj(task=task)\n # display map\n map_obj.show_map()\n # find cost optimal path using a-star\n node = search(\n map_obj=map_obj,\n heuristic=euclidian_distance,\n moving_goal=(task == 5)\n )\n # draw optimal path on map\n map_obj.draw_path(node)\n # display the map\n map_obj.show_map()", "def run_scenario(self):\n self.start_system_time = time.time()\n self.start_game_time = GameTime.get_time()\n\n self._watchdog.start()\n self._running = True\n\n while self._running:\n timestamp = None\n world = CarlaDataProvider.get_world()\n if world:\n snapshot = world.get_snapshot()\n if snapshot:\n timestamp = snapshot.timestamp\n if timestamp:\n self._tick_scenario(timestamp)", "def main():\n \n cities, coordinates, speedlimits, adjlist = data_for_app()\n \n ui(cities, coordinates, speedlimits, adjlist)", "def run(self) -> None:\n\n num_trials = 3\n num_successes = 0\n\n # Initialize the scene.\n self.init_scene()\n for i in range(num_trials):\n status = self.lift_container()\n if status != TaskStatus.success:\n continue\n status = self.lift_target_object()\n if status != TaskStatus.success:\n continue\n status = self.try_put_in_container()\n print(f\"Tried to put object in container: {status}\")\n if status == TaskStatus.success:\n num_successes += 1\n accuracy = float(num_successes) / num_trials\n print(f\"Accuracy: {accuracy}\")", "def main():\n st.sidebar.title(\"Controlling\")\n st.markdown(\n \"\"\"\n# Bewegungsdaten verschiedener Datenquellen - Social Distancing\nResulate von politischen Maßnamen sowie andere Faktoren die sich auf die Anzahl der Infektionen auswirken.\n\"\"\"\n )\n\n select_block_container_style()\n\n # Map with data from uber | EXAMPLE FROM STREAMLIT\n place1 = load_data(100000)\n\n hour = st.slider(\"Hour to look at\", 0, 23)\n\n place1 = place1[place1[DATE_TIME].dt.hour == hour]\n\n st.subheader(\"Geo data between %i:00 and %i:00\" % (hour, (hour + 1) % 24))\n midpoint = (np.average(place1[\"lat\"]), np.average(place1[\"lon\"]))\n\n st.write(pdk.Deck(\n map_style=\"mapbox://styles/mapbox/light-v9\",\n initial_view_state={\n \"latitude\": midpoint[0],\n \"longitude\": midpoint[1],\n \"zoom\": 11,\n \"pitch\": 50,\n },\n layers=[\n pdk.Layer(\n \"HexagonLayer\",\n data=place1,\n get_position=[\"lon\", \"lat\"],\n radius=100,\n elevation_scale=4,\n elevation_range=[0, 1000],\n pickable=True,\n extruded=True,\n ),\n ],\n ))\n\n # My preliminary idea of an API for generating a grid\n with Grid(\"1 1 1\", color=COLOR, background_color=BACKGROUND_COLOR) as grid:\n grid.cell(\n class_=\"a\",\n grid_column_start=2,\n grid_column_end=3,\n grid_row_start=1,\n grid_row_end=2,\n ).markdown(\"# Hier vielleicht plots oder Tabellen oder einfach nur Text.\")\n grid.cell(\"b\", 2, 3, 2, 3).text(\"The cell to the left is a dataframe\")\n grid.cell(\"c\", 3, 4, 2, 3).text(\"The cell to the left is a textframe\")\n grid.cell(\"d\", 1, 2, 1, 3).dataframe(get_dataframe())\n grid.cell(\"e\", 3, 4, 1, 2).markdown(\n \"Try changing the **block container style** in the sidebar!\"\n )\n grid.cell(\"f\", 1, 3, 3, 4).text(\n \"The cell to the right is a matplotlib svg image\"\n )\n grid.cell(\"g\", 3, 4, 3, 4).pyplot(get_matplotlib_plt())\n\n st.plotly_chart(get_plotly_subplots())", "def run_agent(self):\n do_plot = False\n\n # -- Load and init the Helper mission --#\n print('Generate and load the ' + self.mission_type + ' mission with seed ' + str(\n self.mission_seed) + ' allowing ' + self.AGENT_MOVEMENT_TYPE + ' movements')\n mission_xml, reward_goal, reward_intermediate, n_intermediate_rewards, reward_timeout, reward_sendcommand, timeout = init_mission(\n self.agent_host, self.agent_port, self.AGENT_NAME, self.mission_type, self.mission_seed,\n self.AGENT_MOVEMENT_TYPE)\n self.solution_report.setMissionXML(mission_xml)\n\n # -- Define local capabilities of the agent (sensors)--#\n self.agent_host.setObservationsPolicy(MalmoPython.ObservationsPolicy.LATEST_OBSERVATION_ONLY)\n self.agent_host.setVideoPolicy(MalmoPython.VideoPolicy.LATEST_FRAME_ONLY)\n self.agent_host.setRewardsPolicy(MalmoPython.RewardsPolicy.KEEP_ALL_REWARDS)\n\n time.sleep(1)\n\n # -- Get the state of the world along with internal agent state...--#\n state_t = self.agent_host.getWorldState()\n\n # -- Get a state-space model by observing the Orcale/GridObserver--#\n if state_t.is_mission_running:\n # -- Make sure we look in the right direction when observing the surrounding (otherwise the coordinate system will rotated by the Yaw !) --#\n # Look East (towards +x (east) and +z (south) on the right, i.e. a std x,y coordinate system) yaw=-90\n self.agent_host.sendCommand(\"setPitch 20\")\n time.sleep(1)\n self.agent_host.sendCommand(\"setYaw -90\")\n time.sleep(1)\n\n # -- Basic map --#\n state_t = self.agent_host.getWorldState()\n\n if state_t.number_of_observations_since_last_state > 0:\n msg = state_t.observations[-1].text # Get the details for the last observed state\n oracle_and_internal = json.loads(msg) # Parse the Oracle JSON\n grid = oracle_and_internal.get(u'grid', 0)\n xpos = oracle_and_internal.get(u'XPos', 0)\n zpos = oracle_and_internal.get(u'ZPos', 0)\n ypos = oracle_and_internal.get(u'YPos', 0)\n yaw = oracle_and_internal.get(u'Yaw', 0)\n pitch = oracle_and_internal.get(u'Pitch', 0)\n\n # -- Parste the JOSN string, Note there are better ways of doing this! --#\n full_state_map_raw = str(grid)\n full_state_map_raw = full_state_map_raw.replace(\"[\", \"\")\n full_state_map_raw = full_state_map_raw.replace(\"]\", \"\")\n full_state_map_raw = full_state_map_raw.replace(\"u'\", \"\")\n full_state_map_raw = full_state_map_raw.replace(\"'\", \"\")\n full_state_map_raw = full_state_map_raw.replace(\" \", \"\")\n aa = full_state_map_raw.split(\",\")\n vocs = list(set(aa))\n for word in vocs:\n for i in range(0, len(aa)):\n if aa[i] == word:\n aa[i] = vocs.index(word)\n\n X = np.asarray(aa);\n nn = int(math.sqrt(X.size))\n X = np.reshape(X, [nn, nn]) # Note: this matrix/table is index as z,x\n\n # -- Visualize the discrete state-space --#\n if do_plot:\n print yaw\n plt.figure(1)\n imgplot = plt.imshow(X.astype('float'), interpolation='none')\n plt.pause(4)\n # plt.show()\n\n # -- Define the unique states available --#\n state_wall = vocs.index(\"stained_hardened_clay\")\n state_impossible = vocs.index(\"stone\")\n state_initial = vocs.index(\"emerald_block\")\n state_goal = vocs.index(\"redstone_block\")\n\n # -- Extract state-space --#\n offset_x = 100 - math.floor(xpos);\n offset_z = 100 - math.floor(zpos);\n\n state_space_locations = {}; # create a dict\n\n for i_z in range(0, len(X)):\n for j_x in range(0, len(X)):\n if X[i_z, j_x] != state_impossible and X[i_z, j_x] != state_wall:\n state_id = \"S_\" + str(int(j_x - offset_x)) + \"_\" + str(int(i_z - offset_z))\n state_space_locations[state_id] = (int(j_x - offset_x), int(i_z - offset_z))\n if X[i_z, j_x] == state_initial:\n state_initial_id = state_id\n loc_start = state_space_locations[state_id]\n elif X[i_z, j_x] == state_goal:\n state_goal_id = state_id\n loc_goal = state_space_locations[state_id]\n\n # -- Generate state / action list --#\n # First define the set of actions in the defined coordinate system \n actions = {\"west\": [-1, 0], \"east\": [+1, 0], \"north\": [0, -1], \"south\": [0, +1]}\n state_space_actions = {}\n for state_id in state_space_locations:\n possible_states = {}\n for action in actions:\n # -- Check if a specific action is possible --#\n delta = actions.get(action)\n state_loc = state_space_locations.get(state_id)\n state_loc_post_action = [state_loc[0] + delta[0], state_loc[1] + delta[1]]\n\n # -- Check if the new possible state is in the state_space, i.e., is accessible --#\n state_id_post_action = \"S_\" + str(state_loc_post_action[0]) + \"_\" + str(\n state_loc_post_action[1])\n if state_space_locations.get(state_id_post_action) != None:\n possible_states[state_id_post_action] = 1\n\n # -- Add the possible actions for this state to the global dict --#\n state_space_actions[state_id] = possible_states\n\n # -- Kill the agent/mission --#\n agent_host.sendCommand(\"tp \" + str(0) + \" \" + str(0) + \" \" + str(0))\n time.sleep(2)\n\n # -- Save the info an instance of the StateSpace class --\n self.state_space.state_actions = state_space_actions\n self.state_space.state_locations = state_space_locations\n self.state_space.start_id = state_initial_id\n self.state_space.start_loc = loc_start\n self.state_space.goal_id = state_goal_id\n self.state_space.goal_loc = loc_goal\n\n # -- Reward location and values --#\n # OPTIONAL: If you want to account for the intermediate rewards \n # in the Random/Simple agent (or in your analysis) you can \n # obtain ground-truth by teleporting with the tp command \n # to all states and detect whether you recieve recieve a \n # diamond or not using the inventory field in the oracle variable \n #\n # As default the state_space_rewards is just set to contain \n # the goal state which is found above.\n # \n state_space_rewards = {}\n state_space_rewards[state_goal_id] = reward_goal\n\n # HINT: You can insert your own code for getting \n # the location of the intermediate rewards\n # and populate the state_space_rewards dict \n # with more information (optional). \n # WARNING: This is a bit tricky, please consult tutors before starting\n\n # -- Set the values in the state_space container --#\n self.state_space.reward_states = state_space_rewards\n self.state_space.reward_states_n = n_intermediate_rewards + 1\n self.state_space.reward_timeout = reward_timeout\n self.state_space.timeout = timeout\n self.state_space.reward_sendcommand = reward_sendcommand\n else:\n self.state_space = None\n # -- End if observations --#\n\n return", "def run(self):\n\n counter = 0\n timer = time.clock()\n\n # wait 10 seconds for arduino to connect\n print(\"Connecting to Arduino, please wait till confirmation message\")\n time.sleep(4)\n\n # This asks nicely for goal location, etc\n self.initiate_world()\n\n try:\n c = True\n\n while c != 27: # the ESC key\n if self.task is None:\n print(\"Please enter the task you wish to execute:\")\n self.task = sys.stdin.readline().strip()\n\n t2 = time.time()\n # change of time between frames in seconds\n delta_time = t2 - timer\n timer = t2\n\n # getting all the data from the world state\n data, modified_frame = self.vision.get_world_state()\n\n # update the gui\n self.gui.update(delta_time, self.vision.frame, modified_frame, data)\n\n # Update our world with the positions of robot and ball\n self.world.update_positions(data)\n\n # Only run the task every 20 cycles, this allows us to catch up with vision\n if counter % 21 == 0:\n self.task_execution()\n\n key = cv2.waitKey(4) & 0xFF\n if key == ord('q'):\n break\n # self.save_calibrations()\n\n counter += 1\n\n finally:\n pass\n # self.robot.stop()", "def main():\n\n ocp = prepare_ocp(\n biorbd_model_path=\"models/cube_and_line.bioMod\",\n n_shooting=30,\n final_time=2,\n initialize_near_solution=True,\n )\n\n # --- Solve the program --- #\n sol = ocp.solve(Solver.IPOPT(show_online_optim=platform.system() == \"Linux\"))\n\n # --- Show results --- #\n sol.animate()", "def run():\r\n \r\n match = a4_acc.Game() # Instantiate a Game object \r\n setup(match)\r\n\r\n if constants.SHOW_GRAPHICS:\r\n axes= startGraphics(match.board) #step 0\r\n \r\n \r\n for k in range(constants.STEPS):\r\n update(match)\r\n updateGraphics(board, k, caxes)\r\n \r\n ########\r\n # TO DO: \r\n # Simulate game given the intial state for constants.STEPS iterations\r\n \r\n # Example code to call the updateGraphics function; the second argument\r\n # needs to be replaced:\r\n # if constants.SHOW_GRAPHICS:\r\n # updateGraphics(match.board, None, axes) \r\n \r\n # Do not change or add code below here for function run\r\n endNow= raw_input('Press ENTER to continue.')", "def run(self):\n\n self._get_routes()\n self._calculate_emissions()", "def stage(self):\n\n # prepare projected land allocation data\n self.prep_projected()\n\n # prepare base land use data\n self.prep_base()\n\n # harmonize grid area between projected and base layer land allocation\n self.harmony()\n\n # apply constraints\n self.set_constraints()\n\n # create kernel density filter if not running multiple jobs\n self.kernel_filter()\n\n # set data for step zero\n self.set_step_zero()", "def main():\n\n # initialize a random 3x3 TileGame problem\n tg = TileGame(3)\n # print(TileGame.board_to_pretty_string(tg.get_start_state()))\n # compute path using dfs\n path1 = id_astar(tg, tilegame_heuristic)\n path = ids(tg)\n print(tg.get_start_state())\n # display path\n print('ids')\n # TileGame.print_pretty_path(path)\n print('astar')\n TileGame.print_pretty_path(path1)\n print((time.time() - start_time))\n\n # initialize a small DGraph\n small_dgraph = DGraph([[None, 1], [1, None]], {1})\n # print the path using ids\n # print(ids(small_dgraph))", "def run():\n\n # Set up environment and agent\n e = Environment() # create environment (also adds some dummy traffic)\n a = e.create_agent(LearningAgent) # create agent\n e.set_primary_agent(a, enforce_deadline=False) # set agent to track\n\n # Now simulate it\n sim = Simulator(e, update_delay=0.0) # reduce update_delay to speed up simulation\n sim.run(n_trials=num_of_experiments) # press Esc or close pygame window to quit\n \n pd.Series(a.success).to_pickle('success_' + exp_id + '.pickle')\n a.Q_table.to_pickle('qtable_' + exp_id + '.pickle')\n pd.Series(a.q_delta_avg).to_pickle('convergence_' + exp_id + '.pickle')\n pd.Series(a.t_total).to_pickle('steps_' + exp_id + '.pickle')", "def run():\n import argparse\n parser = argparse.ArgumentParser(description=\"Create and solve mazes\")\n parser.add_argument(\"-c\", \"--cli\", help=\"Switch to CLI mode\", action='store_true')\n parser.add_argument(\"-f\", \"--file\", help=\"File to import map from\")\n parser.add_argument(\"-s\", \"--start\", help=\"Starting position in the maze\")\n parser.add_argument(\"-e\", \"--end\", help=\"Ending position in the maze\")\n args = parser.parse_args()\n if args.file:\n myfile = args.file\n else:\n myfile = 'map1.txt'\n with open(myfile, 'r') as mapfile:\n maze_str = mapfile.read()\n maze = Maze(maze_str, cli=args.cli, start=parse_seq(args.start), finish=parse_seq(args.end))\n maze.game_loop()", "def run():\n print('*-----------------------------------*')\n print('Running main.py ...')\n model = MLPModel(CFG, name='tfds_tryout')\n print('* Model defined')\n model.load_data(method='tfds')\n print('* Data Loaded')\n print(model.datasetinfo)\n model.build()\n model.train()\n model.evaluate()\n model.save()", "def run(self):\n print('PSO start running...')\n self.init_population()\n self.iterator()\n print(\"Iteration completed.\")\n self.plot_curve()\n print_params(self.GlobalBest_Pos, self.candidate, net=self.net)", "def main():\n draw_sun()\n draw_pavement()\n draw_building()\n martin.goto(12, 40) # lines 171, 173, and 175 move the turtle down to space out the windows on the building.\n draw_windows()\n martin.goto(12, 0)\n draw_windows()\n martin.goto(12, -40)\n draw_windows()\n draw_door()\n draw_doorknob()", "def main():\n cfg = config.load(\"config.yaml\")\n size = cfg.getProperty(\"grid.size\")\n cells = cfg.getProperty(\"grid.initial_cells\")\n print(f\"Initializing grid of size {size} with {cells} cells\")\n grid.show_grid(cfg)\n input(\"Press Enter to continue...\")", "def run_game_logic(self):\n pass", "def start() -> None:\n\n # PREPARE\n clone_game_files()\n\n # SIMULATE\n turns = run.simulation()\n\n # LOG\n logs = read.combine_logs(turns)\n\n # CALCULATE\n results = calculate.results(logs)\n\n # DISPLAY\n visualize.charts(results)\n\n # CLEAN\n remove_cloned_files()", "def run_simulation(self):\n print('RUNNING')\n self.table.clearContents()\n self.table.setRowCount(0)\n medium_tube = self.get_medium_inputs(self.input_tube)\n medium_shell = self.get_medium_inputs(self.input_shell)\n rest = self.get_main_inputs(self.input_rest)\n try:\n calculate = Calculate(medium_tube, medium_shell, rest)\n getattr(self, 'TubeT2').setText(str(round(calculate.tube.t2, 2)))\n getattr(self, 'ShellT2').setText(str(round(calculate.shell.t2, 2)))\n vysledky = calculate.calculate_all() \n except Exception as error:\n self.show_error_dialog_to_user(error.args[0])\n else:\n print('Pozadavky na vymenik splnilo {} vymeniku.'.format(len(vysledky)))\n self.show_output(vysledky)\n self.show_graph(vysledky)\n print('DONE!')", "def main():\n\n\tif len(sys.argv) > 1 and sys.argv[1]:\n\t\t_, _, hash = read_file(sys.argv[1])\n\t\toffset_x = 0\n\t\toffset_y = 0\n\telse:\n\t\toffset_x, offset_y, hash = screenshot()\n\n\tprint(hash)\n\tgame = eliza_logic.Game(0)\n\tgame.exact_setup(hash)\n\tprint(game)\n\tresult = game.global_solve(-1)\n\tprint(result)\n\n\t# If it was a screen grab, we can actually do this -- just type n/q/c to quit or anything else to continue\n\tif result is not None and offset_x and offset_y:\n\t\tx = six.moves.input(\"Ready for automated solution? \")\n\t\tif x.lower() in [\"n\", \"q\", \"c\"]:\n\t\t\treturn\n\n\t\texecute_solution(offset_x, offset_y, result)", "def run_simulator(self):\n\n self.update_settings()\n\n # Pass in the progress bar and the master so that the simulator can\n # update the progress bar and then refresh the screen when the progress\n # checkpoints are hit\n\n self.sim_results = self.sim.run(self.progress_bar, self.master)\n self.graph_results()", "def do_stuff(self):\n self.create_tourism_raster()", "def run(self):\n super().run()\n # more verbose creation date for clarity\n creation_date = f'20{self.creation_date}'\n make_e3sm_to_cmip_maps(self.config, self.logger, self.mesh_short_name,\n creation_date, self.ntasks)" ]
[ "0.7093898", "0.6963387", "0.67339575", "0.6569679", "0.61990684", "0.61964035", "0.61749303", "0.6154679", "0.61539835", "0.6101129", "0.60104483", "0.5999573", "0.5993726", "0.5986674", "0.59698826", "0.5956886", "0.5940285", "0.5927748", "0.5921149", "0.5909592", "0.5900162", "0.5897567", "0.589164", "0.5889294", "0.5882642", "0.5882121", "0.5878542", "0.58762413", "0.5867327", "0.5847702" ]
0.8375138
0
converting from cv2 image class to yolo image class
def _convert_to_yolo_img(self, img): img = img / 255.0 h, w, c = img.shape img = img.transpose(2, 0, 1) outimg = make_image(w, h, c) img = img.reshape((w*h*c)) data = c_array(c_float, img) outimg.data = data rgbgr_image(outimg) return outimg
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def yolo_detection(raw_image):\n class_ids = []\n confidences = []\n boxes = []\n height , width ,c= raw_image.shape\n blob = cv2.dnn.blobFromImage(raw_image, 0.00392, (416,416), (0,0,0), True, crop=False)\n net.setInput(blob)\n outs = net.forward(output_layers)\n\n for out in outs:\n for detection in out:\n scores = detection[5:]\n class_id = np.argmax(scores)\n confidence = scores[class_id]\n if confidence > 0.4:\n center_x = int(detection[0]*width)\n center_y = int(detection[1]*height)\n w = int(detection[2]*width)\n h = int(detection[3]*height)\n ##Rectangle Draw\n topleft_x = int(center_x-(w/2))\n topleft_y = int(center_y-(h/2))\n\n boxes.append([topleft_x,topleft_y,w,h])\n confidences.append(float(confidence))\n class_ids.append(class_id)\n indexes = cv2.dnn.NMSBoxes(boxes, confidences, 0.5, 0.4)\n #DISPLAY DETECTION\n total_detections = len(boxes)\n for i in range(total_detections):\n if i in indexes:\n topleft_x, topleft_y, w,h = boxes[i]\n label = detection_classes[class_ids[i]]\n cv2.rectangle(raw_image, (topleft_x,topleft_y), (topleft_x+w,topleft_y+h), (0,100,255), 1)\n cv2.putText(raw_image, label, (topleft_x, topleft_y),cv2.FONT_HERSHEY_COMPLEX,1,(0,165,255))\n\n\n return raw_image", "def yolo_object_detection(image_filename, net, confidence, threshold, labels, colors):\n # read image file\n # image is an array of image data (row, column, channel)\n image = cv2.imread(image_filename)\n (H, W) = image.shape[:2]\n\n # preprocess image data with rescaling and resizing to fit YOLO input shape\n # OpenCV assumes BGR images: we have to convert to RGB, with swapRB=True\n blob = cv2.dnn.blobFromImage(image, 1 / 255.0, (416, 416), swapRB=True, crop=False)\n\n # set a new input to the network\n net.setInput(blob)\n\n # get YOLOv3's output layer names\n ln = net.getLayerNames()\n ln_out = [ln[i[0] - 1] for i in net.getUnconnectedOutLayers()]\n\n # perform object detection\n layerOutputs = net.forward(ln_out)\n\n\n # Get the result from outputs, and filter them by confidence\n boxes = []\n scores = []\n classes = []\n for output in layerOutputs: # There are three output layers in YOLO v3\n # Filter outputs by confidence\n (xywh_filterd, score_filtered, class_filtered) = filter_outputs(output, confidence)\n\n boxes.append(xywh_filterd)\n scores.append(score_filtered)\n classes.append(class_filtered)\n\n # Change shapes of arrays so that all boxes from any output layers are stored together\n boxes = np.vstack([r for r in boxes])\n scores = np.concatenate([r for r in scores], axis=None)\n classes = np.concatenate([r for r in classes], axis=None)\n\n # Apply Non-max supression\n boxes_coord = rescale_box_coord(boxes, W, H)\n nms_idx = yolo_non_max_supression(boxes_coord, scores, confidence, threshold)\n \n # filter the good ones\n return image, [{'box':boxes[_], 'score':scores[_], 'class':classes[_]} for _ in nms_idx]", "def predict_from_cv2(yolo, inputfilepath):\n\n print(\"call func of predict_from_cv2\")\n img = cv2.imread(inputfilepath)\n yolo_results = yolo.predict(img)\n for yolo_result in yolo_results:\n print(yolo_result.get_detect_result())", "def yolo_forward(net, LABELS, image, confidence_level, save_image=False):\n\n # initialize a list of colors to represent each possible class label\n np.random.seed(42)\n colors = np.random.randint(0, 255, size=(10000, 3),\n dtype='uint8')\n\n # grab image spatial dimensions\n (H, W) = image.shape[:2]\n\n # determine only the *output* layer names that we need from YOLO\n ln = net.getLayerNames()\n ln = [ln[i[0] - 1] for i in net.getUnconnectedOutLayers()]\n\n # construct a blob from the input image and then perform a forward\n # pass of the YOLO object detector, giving us our bounding boxes and\n # associated probabilities\n # also time it\n blob = cv2.dnn.blobFromImage(image, 1 / 255.0, (416, 416),\n swapRB=True, crop=False)\n net.setInput(blob)\n start = time.time()\n layer_outputs = net.forward(ln)\n end = time.time()\n\n # show timing information on YOLO\n print('[INFO] YOLO took {:.6f} seconds'.format(end - start))\n\n # initialize our lists of detected bounding boxes, confidences, and\n # class IDs, respectively\n boxes = []\n confidences = []\n class_ids = []\n\n # loop over each of the layer outputs\n for output in layer_outputs:\n # loop over each of the detections\n for detection in output:\n # extract the class ID and confidence (i.e., probability) of\n # the current object detection\n scores = detection[5:]\n class_id = np.argmax(scores)\n confidence = scores[class_id]\n\n # filter out weak predictions by ensuring the detected\n # probability is greater than the minimum probability\n if confidence > confidence_level:\n # scale the bounding box coordinates back relative to the\n # size of the image, keeping in mind that YOLO actually\n # returns the center (x, y)-coordinates of the bounding\n # box followed by the boxes' width and height\n box = detection[0:4] * np.array([W, H, W, H])\n (centerX, centerY, width, height) = box.astype('int')\n\n # use the center (x, y)-coordinates to derive the top and\n # and left corner of the bounding box\n x = int(centerX - (width / 2))\n y = int(centerY - (height / 2))\n\n # update our list of bounding box coordinates, confidences,\n # and class IDs\n boxes.append([x, y, int(width), int(height)])\n confidences.append(float(confidence))\n class_ids.append(class_id)\n\n # apply non-maxima suppression to suppress weak, overlapping bounding\n # boxes\n # idxs = cv2.dnn.NMSBoxes(boxes, confidences, confidence_level, threshold)\n\n print(class_ids)\n print(LABELS)\n # print(labels)\n\n labels = [LABELS[i] for i in class_ids]\n\n if save_image:\n yolo_save_img(image, class_ids, boxes, labels, confidences, colors, 'python_predictions.jpg')\n\n return class_ids, labels, boxes, confidences", "def detect_image(yolo_v3_model, image_paths, batch_frames, output_path, train_input_size, classes_file_path, \n score_threshold, iou_threshold, num_of_anchor_bbox, strides, anchors, show = False, \n rectangle_colors = ''):\n \n # obtain number of classes\n num_of_classes = len(read_class_names(classes_file_path))\n \n # create list to store images\n original_images = []\n \n # iterate over images in chronological order (last image is image of interest to put bbox)\n for x in range(batch_frames):\n \n # obtain original image\n original_image = cv2.imread(image_paths[x])\n \n # append original image to original_images list\n original_images.append(original_image[:])\n \n # convert original image to grayscale \n image = cv2.cvtColor(original_image, cv2.COLOR_BGR2RGB)\n \n # preprocess image\n image = transform_images(image[:], train_input_size)\n\n # obtain concat frame if none exist\n if x == 0: \n\n concat_image = image[:]\n\n # concatenate subsequent frames to concat_image\n else:\n\n concat_image = np.concatenate((concat_image, image), axis = -1)\n \n # add batch dimensions to concatenated image \n concat_image = concat_image[np.newaxis, ...].astype(np.float32)\n \n # create constant tensor from concatenated image and feed it to yolo_v3_model\n batched_input = tf.constant(concat_image)\n yolo_output = yolo_v3_model(batched_input)\n \n # list to store bboxes from respective scales\n pred_bbox = []\n \n # iterate over 3 scales\n for i in range(3):\n\n # decode resepctive yolo_output from each scale\n pred_result = decode(yolo_output = yolo_output[i], num_of_anchor_bbox = num_of_anchor_bbox, \n classes = num_of_classes, strides = strides, anchors = anchors, index = i)\n \n # obtain results of shape (:, 5 + num_classes), i.e all bboxes\n pred_result_reshaped = tf.reshape(pred_result, (-1, tf.shape(pred_result)[-1]))\n \n # append to pred_bbox\n pred_bbox.append(pred_result_reshaped)\n \n # concatenate all bboxes from all scales\n pred_bbox = tf.concat(pred_bbox, axis = 0)\n \n # post process all bboxes using latest image in orignal_images\n bboxes = postprocess_boxes(pred_bbox, original_images[-1], train_input_size, score_threshold)\n \n # non maximal supression for bboxes\n bboxes = nms(bboxes, iou_threshold, method = 'nms')\n \n # draw bbox on latest image in orignal_images\n image = draw_bbox(original_images[-1], bboxes, classes_file_path, rectangle_colors = rectangle_colors)\n \n # save image if path to save is given\n if output_path != '': cv2.imwrite(output_path, image)\n \n # display image if show is true \n if show:\n \n # show the image\n cv2.imshow(\"predicted image\", image)\n \n # load and hold the image\n cv2.waitKey(0)\n \n # to close the window after the required kill value was provided\n cv2.destroyAllWindows()\n \n return image", "def rgb2yuv(image):\n return cv2.cvtColor(image, cv2.COLOR_RGB2YUV)", "def yolo_save_img(image, class_ids, boxes, labels, confidences, colors, file_path):\n for i, box in enumerate(boxes):\n # extract the bounding box coordinates\n (x, y) = (box[0], box[1])\n (w, h) = (box[2], box[3])\n\n # draw a bounding box rectangle and label on the image\n color = [int(c) for c in colors[class_ids[i]]]\n cv2.rectangle(image, (x, y), (x + w, y + h), color, 3)\n text = '{}'.format(labels[i])\n # text = '{}: {:.4f}'.format(labels[i], confidences[i])\n print(text)\n\n font_scale = 1.3\n # set the rectangle background to white\n rectangle_bgr = color\n # set some text\n # get the width and height of the text box\n (text_width, text_height) = cv2.getTextSize(text, cv2.FONT_HERSHEY_SIMPLEX, fontScale=font_scale, thickness=1)[0]\n # set the text start position\n text_offset_x = x\n text_offset_y = y - 3 \n # make the coords of the box with a small padding of two pixels\n box_coords = ((text_offset_x, text_offset_y), (text_offset_x + text_width + 10, text_offset_y - text_height - 10 ))\n cv2.rectangle(image, box_coords[0], box_coords[1], rectangle_bgr, cv2.FILLED)\n cv2.putText(image, text, (text_offset_x, text_offset_y), cv2.FONT_HERSHEY_SIMPLEX, \n fontScale=font_scale, color=(255, 255, 255), thickness=2)\n cv2.imwrite(file_path, image)\n return image", "def preprocess(image):\n image = rgb2yuv(image)\n return image", "def deepfash_to_yolo(image_shape, row):\n # On images displayed using skimage.io.imshow, DeepFashion bounding box\n # values look like:\n # small y\n # ||\n # ||\n # \\||/\n # \\/\n # large y\n # small x ----------> large x\n #\n # This means image_shape[0] corresponds to y values\n # The YOLO format seems to match (but use fractions)\n frac_x1 = row['x_1'] / image_shape[1]\n frac_x2 = row['x_2'] / image_shape[1]\n frac_y1 = row['y_1'] / image_shape[0]\n frac_y2 = row['y_2'] / image_shape[0]\n\n width_x = frac_x2 - frac_x1\n width_y = frac_y2 - frac_y1\n center_x = (frac_x1 + frac_x2)/2\n center_y = (frac_y1 + frac_y2)/2\n\n return [row['image_name'], row['category_label'],\n center_x, center_y, width_x, width_y]", "def yolo_show_img(image, class_ids, boxes, labels, confidences, colors):\n for i, box in enumerate(boxes):\n # extract the bounding box coordinates\n (x, y) = (box[0], box[1])\n (w, h) = (box[2], box[3])\n\n # draw a bounding box rectangle and label on the image\n color = [int(c) for c in colors[class_ids[i]]]\n cv2.rectangle(image, (x, y), (x + w, y + h), color, 3)\n text = '{}: {:.4f}'.format(labels[i], confidences[i])\n print(text)\n\n font_scale = 1.3\n # set the rectangle background to white\n rectangle_bgr = color\n # set some text\n # get the width and height of the text box\n (text_width, text_height) = cv2.getTextSize(text, cv2.FONT_HERSHEY_SIMPLEX, fontScale=font_scale, thickness=1)[0]\n # set the text start position\n text_offset_x = x\n text_offset_y = y - 3 \n # make the coords of the box with a small padding of two pixels\n box_coords = ((text_offset_x, text_offset_y), (text_offset_x + text_width + 10, text_offset_y - text_height - 10 ))\n cv2.rectangle(image, box_coords[0], box_coords[1], rectangle_bgr, cv2.FILLED)\n cv2.putText(image, text, (text_offset_x, text_offset_y), cv2.FONT_HERSHEY_SIMPLEX, \n fontScale=font_scale, color=(255, 255, 255), thickness=2)\n\n cv2.imshow('yolo prediction', image)\n cv2.waitKey(0)", "def yolo_test_file(self):\n # Detect objects\n annotatedImage, predictedObjects = self.detect_from_file(\n self.inputFile)\n # Show image\n if self.showImage:\n cv2.imshow('YOLO Detection', annotatedImage)\n cv2.waitKey(10)\n # Save annotated image\n if self.saveAnnotatedImage:\n cv2.imwrite(self.outputFile, annotatedImage)\n # Save the parameters of detected objects in xml format\n if self.saveAnnotatedXML:\n xmlFileName = os.path.join(\n self.textOutputFolder,\n self.outputFile.split('.')[0] + '.xml')\n self.save_xml(xmlFileName, predictedObjects)", "def convert_image(img):\n def dodgeV2(x, y): # dodging and merging\n return cv2.divide(x, 255 - y, scale=256)\n # convert to grey\n img_gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)\n # bitwising\n img_invert = cv2.bitwise_not(img_gray)\n # smoothing and scaling\n img_smoothing = cv2.GaussianBlur(img_invert, (27, 27),sigmaX=-1.0, sigmaY=-1.0) # blurring by applying Gaussian filter to the inverted image\n final_img = dodgeV2(img_gray, img_smoothing)\n # adjust the shape and return\n pp_image= np.stack([final_img,final_img,final_img],axis=-1)\n return pp_image", "def run_yolo(net, image, coco_classes, save_image=False):\n\n global frame, classes\n # Give the configuration and weight files for the model and load the network using them.\n classes = coco_classes\n\n frame = cv2.imread(str(image))\n\n # Crop the frame\n # (y_min, y_max) (x_min, x_max)\n # frame = frame[300:1080, 200:1920] # Classifying people\n # frame = frame[0:500, 0:1920] # Classifying Cars\n\n # Stop the program if reached end of video\n if frame is None:\n return\n\n # Create a 4D blob from a frame.\n blob = cv2.dnn.blobFromImage(\n frame, 1 / 255, (inpWidth, inpHeight), [0, 0, 0], 1, crop=False\n )\n\n # Sets the input to the network\n net.setInput(blob)\n\n # Runs the forward pass to get output of the output layers\n outs = net.forward(getOutputsNames(net))\n\n # Remove the bounding boxes with low confidence\n postprocess(frame, outs, save_image)\n\n # Get the overall time for inference(t) and the timings for each of the layers(in layersTimes)\n t, _ = net.getPerfProfile()\n label = \"Inference time: %.2f ms\" % (t * 1000.0 / cv2.getTickFrequency())\n # cv2.putText(frame, label, (0, 15), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255))\n print(label)\n\n # Save image with all bounding boxes\n # utils.write_image(frame)", "def detect_video(yolo_v3_model, video_path, batch_frames, output_path, train_input_size, classes_file_path, \n score_threshold, iou_threshold, num_of_anchor_bbox, strides, anchors, show = False, \n rectangle_colors = ''):\n \n # obtain number of classes\n num_of_classes = len(read_class_names(classes_file_path))\n \n # obtain VideoCapture object \n vid = cv2.VideoCapture(video_path)\n \n # obtain width, height and fps of video\n # by default VideoCapture returns float instead of int\n width = int(vid.get(cv2.CAP_PROP_FRAME_WIDTH))\n height = int(vid.get(cv2.CAP_PROP_FRAME_HEIGHT))\n fps = int(vid.get(cv2.CAP_PROP_FPS))\n\n # obtain video codec\n codec = cv2.VideoWriter_fourcc(*'XVID')\n \n # obtain output_path\n # output_path must be .mp4\n out = cv2.VideoWriter(output_path, codec, fps+1, (width, height)) \n\n # create list to store images\n images = []\n \n # variable to track frame\n frame = 0 \n \n while True:\n \n try:\n \n # grabs, decodes and returns the next video frame\n _, image = vid.read()\n \n # append original image to original_images list\n images.append(image[:])\n \n # increment frame\n frame += 1\n \n \n # if current frame is less than batch_frames\n if frame < batch_frames:\n \n # move to next frame \n continue\n \n # iterate over images in chronological order (last image is image of interest to put bbox)\n for x in range(batch_frames):\n \n # convert original image to grayscale \n image = cv2.cvtColor(images[-batch_frames + x + 1], cv2.COLOR_BGR2RGB)\n \n # preprocess image\n image = transform_images(image[:], train_input_size)\n \n # obtain concat frame if none exist\n if x == 0: \n \n concat_image = image[:]\n \n # concatenate subsequent frames to concat_image\n else:\n \n concat_image = np.concatenate((concat_image, image), axis = -1)\n \n except:\n \n break\n \n # add batch dimensions to concatenated image \n concat_image = concat_image[np.newaxis, ...].astype(np.float32)\n \n # create constant tensor from concatenated image and feed it to yolo_v3_model\n batched_input = tf.constant(concat_image)\n yolo_output = yolo_v3_model(batched_input)\n\n # list to store bboxes from respective scales\n pred_bbox = []\n\n # iterate over 3 scales\n for i in range(3):\n\n # decode resepctive yolo_output from each scale\n pred_result = decode(yolo_output = yolo_output[i], num_of_anchor_bbox = num_of_anchor_bbox, \n classes = num_of_classes, strides = strides, anchors = anchors, index = i)\n\n # append to pred_bbox\n pred_bbox.append(pred_result)\n \n # obtain results of shape (:, 5 + num_classes), i.e all bboxes\n pred_bbox = [tf.reshape(x, (-1, tf.shape(x)[-1])) for x in pred_bbox]\n \n # concatenate all bboxes from all scales\n pred_bbox = tf.concat(pred_bbox, axis = 0)\n\n # post process all bboxes using latest image in orignal_images\n bboxes = postprocess_boxes(pred_bbox, images[-1], train_input_size, score_threshold)\n\n # non maximal supression for bboxes\n bboxes = nms(bboxes, iou_threshold, method = 'nms')\n\n # draw bbox on latest image in orignal_images\n image = draw_bbox(images[-1], bboxes, classes_file_path, rectangle_colors = rectangle_colors)\n \n # save image frame to video path if path to save is given\n if output_path != '': out.write(image)\n \n # display image frame (i.e play video) if show is true \n if show:\n \n # show the image\n cv2.imshow('output', image)\n \n # if q key is presssed\n if cv2.waitKey(25) & 0xFF == ord(\"q\"):\n \n # end session\n cv2.destroyAllWindows()\n \n # break out of while loop\n break\n \n # When everything done, release the capture\n vid.release()\n cv2.destroyAllWindows()", "def load_and_process_image(self, im_path):\n image = Image.open(im_path).convert('RGB')\n image = transforms.ToTensor()(image)\n image = 2 * image - 1\n return image", "def predict_from_pil(yolo, inputfilepath):\n\n print(\"call func of predict_from_pil\")\n img = np.array(Image.open(inputfilepath))\n yolo_results = yolo.predict(img)\n for yolo_result in yolo_results:\n print(yolo_result.get_detect_result())", "def convert_image(rel_path_in, rel_path_out):\n #Lade Bild mit Originalmaske im Grayscale-Modus\n img = cv2.imread(rel_path_in, cv2.IMREAD_GRAYSCALE)\n #Jetzt steht in img ein 2D-Array/Matrix mit jedem Graufstufen-Wert der Pixel\n #Skaliere Pixelwerte runter\n for zeilen_index in range(0,img.__len__()):\n for spalten_index in range(0, img[zeilen_index].__len__()):\n #Hole Pixel-Wert an aktueller Stelle\n wert = img[zeilen_index][spalten_index]\n #Falls Wert != 0 (also Pixel gehoert nicht zum Hintergrund)\n if wert != 0: # != 0 statt == 255, da auch z.B. 253er Werte in den Masken existieren... (vielleicht durch Konvertierung in anderes Format?)\n #Markiere den Pixel mit 1 statt 255\n img[zeilen_index][spalten_index]=1\n #print(img)\n #*NACHDEM* alle Pixel skaliert wurden, zeichne Umrandung der Objekte\n umrandung_zeichnen(img)\n #change_color(img, 0, 255)\n #change_color(img, 1, 0)\n #print(img)\n #Schreibe Ergebnis-Bild in uebergebene Datei\n cv2.imwrite(rel_path_out, img)", "def load_image(self, image_id):\n # Load image\n# print(self.image_info[image_id]['path'])\n image = cv2.imread(self.image_info[image_id]['path'],cv2.IMREAD_GRAYSCALE) \n image = image[:,:, np.newaxis] #Add 1 dimension for grayscale images\n return image", "def _convert_to_features(self, img: np.ndarray) -> np.ndarray:", "def display_yolo(img, out, threshold):\n import numpy as np\n numClasses = 20\n anchors = [1.08, 1.19, 3.42, 4.41, 6.63, 11.38, 9.42, 5.11, 16.62, 10.52]\n\n def sigmoid(x, derivative=False):\n return x * (1 - x) if derivative else 1 / (1 + np.exp(-x))\n\n def softmax(x):\n scoreMatExp = np.exp(np.asarray(x))\n return scoreMatExp / scoreMatExp.sum(0)\n\n clut = [(0, 0, 0), (255, 0, 0), (255, 0, 255), (0, 0, 255), (0, 255, 0),\n (0, 255, 128), (128, 255, 0), (128, 128, 0), (0, 128, 255),\n (128, 0, 128), (255, 0, 128), (128, 0, 255), (255, 128, 128),\n (128, 255, 128), (255, 255, 0),\n (255, 128, 128), (128, 128, 255), (255, 128, 128),\n (128, 255, 128), (128, 255, 128)]\n\n draw = ImageDraw.Draw(img)\n for cy in range(0, 13):\n for cx in range(0, 13):\n for b in range(0, 5):\n channel = b * (numClasses + 5)\n tx = out[channel][cy][cx]\n ty = out[channel + 1][cy][cx]\n tw = out[channel + 2][cy][cx]\n th = out[channel + 3][cy][cx]\n tc = out[channel + 4][cy][cx]\n\n x = (float(cx) + sigmoid(tx)) * 32\n y = (float(cy) + sigmoid(ty)) * 32\n\n w = np.exp(tw) * 32 * anchors[2 * b]\n h = np.exp(th) * 32 * anchors[2 * b + 1]\n\n confidence = sigmoid(tc)\n\n classes = np.zeros(numClasses)\n for c in range(0, numClasses):\n classes[c] = out[channel + 5 + c][cy][cx]\n classes = softmax(classes)\n detectedClass = classes.argmax()\n\n if threshold < classes[detectedClass] * confidence:\n color = clut[detectedClass]\n x = x - w / 2\n y = y - h / 2\n draw.line((x, y, x + w, y), fill=color, width=3)\n draw.line((x, y, x, y + h), fill=color, width=3)\n draw.line((x + w, y, x + w, y + h), fill=color, width=3)\n draw.line((x, y + h, x + w, y + h), fill=color, width=3)\n\n return img", "def transform_images(img1,img2):", "def hload_cv2(filepath):\n img = cv2.imread(filepath, cv2.IMREAD_COLOR)\n cv2.cvtColor(img, cv2.COLOR_BGR2RGB, img)\n return img", "def _process_img_semantic(self, sensor_data):\n sensor_data.convert(self.cc)\n img = np.array(sensor_data.raw_data).reshape((self.img_y, self.img_x, 4))\n img = img[:, :, :3] # sensor is actualy rgba, we dont need alpha values\n self.semantic = img # need to scale rgb values to be {0,1}", "def preprocessing(image, w, h):\n image = cv2.resize(image, (w, h))\n image = np.transpose(image, (2, 0, 1))\n image = image.reshape(1, 3, h, w)\n \n return image", "def convert_image(self, ros_img):\n try:\n cv_image = self.bridge.imgmsg_to_cv2(ros_img, \"bgr8\")\n return cv_image\n except CvBridgeError as e:\n print(e)", "def array2ipl(img): \n img_new = cv.CreateImageHeader((img.shape[1], img.shape[0]), cv.IPL_DEPTH_8U, 3)\n cv.SetData(img_new, img.copy().data,img.dtype.itemsize*3*img.shape[1])\n img_new[50,75]\n return img_new", "def convert_image(img, source, target):\r\n assert source in {'pil', '[0, 1]', '[-1, 1]'}, \"Cannot convert from source format %s!\" % source\r\n assert target in {'pil', '[0, 255]', '[0, 1]', '[-1, 1]', 'imagenet-norm',\r\n 'y-channel'}, \"Cannot convert to target format %s!\" % target\r\n\r\n # Convert from source to [0, 1]\r\n if source == 'pil':\r\n img = F.to_tensor(img)\r\n\r\n elif source == '[0, 1]':\r\n pass # already in [0, 1]\r\n\r\n elif source == '[-1, 1]':\r\n img = (img + 1.) / 2.\r\n\r\n # Convert from [0, 1] to target\r\n if target == 'pil':\r\n img = F.to_pil_image(img)\r\n\r\n elif target == '[0, 255]':\r\n img = 255. * img\r\n\r\n elif target == '[0, 1]':\r\n pass # already in [0, 1]\r\n\r\n elif target == '[-1, 1]':\r\n img = 2. * img - 1.\r\n\r\n elif target == 'imagenet-norm':\r\n if img.ndimension() == 3:\r\n img = (img - imagenet_mean) / imagenet_std\r\n elif img.ndimension() == 4:\r\n img = (img - imagenet_mean_cuda) / imagenet_std_cuda\r\n\r\n elif target == 'y-channel':\r\n # Based on definitions at https://github.com/xinntao/BasicSR/wiki/Color-conversion-in-SR\r\n # torch.dot() does not work the same way as numpy.dot()\r\n # So, use torch.matmul() to find the dot product between the last dimension of an 4-D tensor and a 1-D tensor\r\n img = torch.matmul(255. * img.permute(0, 2, 3, 1)[:, 4:-4, 4:-4, :], rgb_weights) / 255. + 16.\r\n\r\n return img", "def load_and_preprocess_image(path):\n\n img = cv2.imread(path, 0) # Load image into greyscale\n img = cv2.equalizeHist(img) # Histogram equilization\n return img", "def convert_yuv_to_rgb(img_arr): \n rgb = cv2.cvtColor(img_arr, cv2.COLOR_YUV2BGR_I420)\n rgb = cv2.cvtColor(rgb, cv2.COLOR_BGR2RGB)\n return Image.fromarray(rgb)", "def process_new_image(name):\n #preprocess the image\n img = cv2.imread(name)\n img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)\n img = img/255.-.5\n return img" ]
[ "0.69979674", "0.6875691", "0.6409599", "0.6264038", "0.6263965", "0.62305474", "0.6210611", "0.61554116", "0.6152523", "0.61343765", "0.6091251", "0.60643476", "0.6000393", "0.59585893", "0.59381723", "0.59326524", "0.5932447", "0.591459", "0.58689374", "0.5849591", "0.5830072", "0.58261037", "0.5794554", "0.5790353", "0.57902133", "0.5789647", "0.57885116", "0.578624", "0.5783836", "0.57830375" ]
0.73024845
0
Predicting from cv2 format
def predict_from_cv2(yolo, inputfilepath): print("call func of predict_from_cv2") img = cv2.imread(inputfilepath) yolo_results = yolo.predict(img) for yolo_result in yolo_results: print(yolo_result.get_detect_result())
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def predict(self, img_path):\n\n img = cv2.imread(img_path)\n img0 = img.copy()\n \n #This happens inside datasets\n # Convert\n img = letterbox(img, new_shape=self.img_size)[0]\n\n # Convert\n img = img[:, :, ::-1].transpose(2, 0, 1) # BGR to RGB, to 3x416x416\n img = np.ascontiguousarray(img)\n \n #this happens on detect\n img = torch.from_numpy(img).to(self.device)\n img = img.float() # uint8 to fp16/32\n img /= 255.0 # 0 - 255 to 0.0 - 1.0\n if img.ndimension() == 3:\n img = img.unsqueeze(0)\n\n # Inference\n pred = self.model(img)[0]\n\n # Apply NMS\n pred = non_max_suppression(pred, self.conf_thres, self.iou_thres, classes=self.classes, agnostic=self.agnostic_nms)\n \n # Process detections\n for i, det in enumerate(pred): # detections per image\n if det is not None and len(det):\n # Rescale boxes from img_size to im0 size\n det[:, :4] = scale_coords(img.shape[2:], det[:, :4], img0.shape).round()\n\n pred = [d.cpu().detach().numpy() for d in pred if d is not None]\n pred = pred[0] if len(pred) else pred\n \n pred = [[[x1, y1, x2, y2],conf] for x1, y1, x2, y2, conf, clss in pred]\n\n return pred", "def predict_again(src):\n global rcnt\n global lcnt\n H,W = src.shape[:2]\n #cv2.imshow(\"cROPPPPPPED\",src)\n #print (src.shape)\n\n img1 = src[:,:int(W/2)]\n img2 = src[:,int(W/2)+1:]\n contoured1,area1 = drawContours11111(img1)\n contoured2,area2 = drawContours11111(img2)\n #cv2.imshow(\"blank_image\",contoured1)\n #cv2.imshow(\"blank_image1\",contoured2)\n print (area1,area2)\n if area2>area1:\n #print (\"New:::::::::::::RIGGGGGGGGGHT\")\n if rcnt >=3:\n print (\"New:::::::::::::RIGGGGGGGGGHT\")\n feedback.direction = 1\n feedback.detection = 1\n rcnt += 1\n lcnt = 0\n elif area1>area2:\n #print (\"New:::::::::::::LEFTTTTTTTTT\")\n if lcnt >=3:\n print (\"New:::::::::::::LEFTTTTTTTTT\")\n feedback.direction = -1\n feedback.detection = 1\n lcnt += 1\n rcnt = 0", "def predict(model, img):\n\tx = image.img_to_array(img)\n\tx = np.expand_dims(x, axis=0)\n\tx = preprocess_input(x)\n\tpreds = model.predict(x)\n\treturn preds[0]", "def predict_data(img): \n return gennet.predict_data(img, 'Resnet50')", "def predictor(path):\n # get keypoints from the image in a DF\n TEST_keypoints = []\n path = cv2.cvtColor(path, cv2.COLOR_BGR2RGB)\n img = movenet_inference_flat_v10(hub_model, path)\n TEST_keypoints.append(img)\n TEST_keypoints_df = pd.DataFrame(TEST_keypoints)\n\n # Rename columns in the DataFrames according to the values\n columns = []\n for point in kp_descriptions:\n for value in ('y', 'x', 'score'):\n columns.append(f'{point}_{value}')\n\n TEST_keypoints_df.columns = columns\n \n # add additional positional features\n TEST_keypoints_df = add_pos_features(TEST_keypoints_df, drop_scores=True)\n # predict the asana\n prediction_existing = model_fl.predict(TEST_keypoints_df)\n # initialize the predicted_asana to 107 (no asan found)\n predicted_asana = 107\n\n # assign the precited asana if accuracy more than threshold (12.5%)\n for i in range(1):\n mx = 0\n mx_label = -1\n for j in range(107):\n if(prediction_existing[i, j] > mx):\n mx_label = j\n mx = prediction_existing[i, j]\n predicted_asana = mx_label\n predicted_accuracy = prediction_existing[0, mx_label]\n if(predicted_accuracy < 0.125):\n predicted_asana = 107\n\n # print(predicted_asana)\n \n # find label from the json\n a = inv_map[str(predicted_asana)]\n # b = \"null\"\n\n print(\"predicted pose --> \", a)\n print(\"confidence = \", predicted_accuracy)\n # print(\"actual pose -->\", b)\n return a, img", "def predict(self, img):\n return self._predict([img])[0]", "def model_predict(img_path):\n img = open_image(img_path)\n pred_class, pred_idx, outputs = learn.predict(img)\n print(pred_class)\n return pred_class", "def predict(self, X):", "def predict(self, X):", "def predict_car():\n img = open_image(request.files['image'])\n pred_class, pred_idx, outputs = learn.predict(img)\n return str(pred_class)", "def predict(self):\n self.canv.update()\n ps = self.canv.postscript(colormode='mono')\n img = Image.open(io.BytesIO(ps.encode('utf-8')))\n img.save('result.png')\n x = Predict.transform_image(self)\n \n #prediction with multivariate regression\n Y_hat_test = self.multivariate_model.predict([x])\n C_multivariate = map(np.argmax, Y_hat_test) # classification vector\n C_multivariate = list(C_multivariate)\n multivariate_predict = C_multivariate[0]\n\n \n #prediction with Linear Discriminant Analysis (LDA)\n lda_predict = self.lda_model.predict([x])[0]\n qda_predict = self.qda_model.predict([x])[0]\n log_predict = self.log_model.predict([x])[0]\n \n baseline_label = Label(self, text='Baseline: ' + str(multivariate_predict) )\n baseline_label.grid(row=0, column=1, padx=5, pady=5)\n lda_label = Label(self, text=' LDA: '+ str(lda_predict))\n lda_label.grid(row=0, column=2, padx=5, pady=5)\n qda_label = Label(self, text='QDA: '+ str(qda_predict))\n qda_label.grid(row=1, column=1, padx=5, pady=5)\n log_label = Label(self, text=' Logistic: '+str(log_predict))\n log_label.grid(row=1, column=2, padx=5, pady=5)", "def predict_from_image(image):\n cvimage = cv2.resize(image, config_utils.SHAPE)\n config_utils.logger.info(\"img shape after resize: '{}'.\".format(cvimage.shape))\n\n img = np.asarray(cvimage, dtype='float32')\n img /= 255.0 # scale 0 to 1\n mean = np.array([0.485, 0.456, 0.406]) \n std = np.array([0.229, 0.224, 0.225])\n img = (img - mean) / std\n img = np.transpose(img, (2,0,1)) \n img = np.expand_dims(img, axis=0) # e.g., [1x3x224x224]\n\n config_utils.logger.info(\"img shape final: '{}'.\".format(img.shape))\n\n predict(img)", "def predict(image_path):\n img = image.load_img(image_path, target_size=image_size)\n x = image.img_to_array(img)\n x = np.expand_dims(x, axis=0)\n x = preprocess_input(x)\n predictions = model.predict(x)\n plt.imshow(img)\n print('Predicted:', decode_predictions(predictions, top=1)[0])\n return decode_predictions(predictions, top=1)[0]", "def predict(self, image_path, save_vis=False, save_dir=None):\n print(image_path)\n image = cv2.imread(image_path)\n results = self.model.detect([image], verbose=0)\n r = results[0]\n image_id=os.path.split(image_path)[1][0:-4]\n if save_vis:\n class_names = ['Bench', 'Billboard', 'Catch Basin', 'CCTV Camera', 'Fire Hydrant', 'Junction Box', 'Mailbox', 'Manhole', 'Phone Booth', 'Street Light', 'Pole', 'Traffic Sign Frame', 'Utility Pole', 'Traffic Light', 'Traffic Sign (Back)', 'Traffic Sign (Front)', 'Trash Can']\n visualize.save_image(image = image[:,:,::-1], image_name=image_id, boxes=r['rois'], masks=r['masks'], class_ids=r['class_ids'], class_names=class_names, scores=r['scores'], save_dir=save_dir)\n features = {'image_id': image_id, 'classes': r['class_ids'].tolist(), 'boxes': r['rois'].tolist()}\n return features, r['masks']", "def predict(model, img, imgSize):\n \n #Reajusta o tamanho da imagem para o tamanho esperado caso necessario.\n if img.size != imgSize :\n img = img.resize(imgSize)\n\n #Converte a imagem num array tridimensional.\n x = image.img_to_array(img)\n x = numpy.expand_dims(x, axis=0)\n #Normaliza a imagem.\n x = preprocess_input(x)\n \n #Faz a previsao atraves da rede.\n pred = model.predict(x)\n return imagenet_utils.decode_predictions(pred, top=5)[0]", "def predict(model, img, target_size=(229, 229)): #fixed size for InceptionV3 architecture\r\n if img.size != target_size:\r\n img = img.resize(target_size)\r\n\r\n x = image.img_to_array(img)\r\n x = np.expand_dims(x, axis=0)\r\n x = preprocess_input(x)\r\n preds = model.predict(x)\r\n return preds[0]", "def predict(frame):\n cv_net = cv2.dnn.readNetFromTensorflow(PATH_TO_MODEL_WEIGHTS, PATH_TO_GRAPH)\n labels = coco_label_reader(PATH_TO_LABELS)\n\n rows, cols, _ = frame.shape\n blob = cv2.dnn.blobFromImage(frame, size=(rows, cols), swapRB=True, crop=False)\n cv_net.setInput(blob)\n cv_out = cv_net.forward()\n boxes = []\n classes = []\n for detection in cv_out[0, 0, :, :]:\n score = float(detection[2])\n if score > 0.3:\n left = detection[3] * cols\n top = detection[4] * rows\n right = detection[5] * cols\n bottom = detection[6] * rows\n class_ = int(detection[1])\n if left > right:\n left, right = right, left\n if top > bottom:\n top, bottom = bottom, top\n boxes.append([left, top, right, bottom])\n classes.append(labels[class_])\n return non_max_suppression(np.asarray(boxes), np.asarray(classes))", "def predict(self, request):\r\n f = request.files['image']\r\n \r\n img = Image.open(f)\r\n \r\n image = img.convert('RGB')\r\n \r\n image_np = load_image_into_numpy_array(image)\r\n output_dict = run_inference_for_single_image(model, image_np)\r\n vis_util.visualize_boxes_and_labels_on_image_array(\r\n image_np,\r\n output_dict['detection_boxes'],\r\n output_dict['detection_classes'],\r\n output_dict['detection_scores'],\r\n category_index,\r\n instance_masks=output_dict.get('detection_masks_reframed', None),\r\n use_normalized_coordinates=True,\r\n line_thickness=2, \r\n min_score_thresh=0.45, \r\n skip_scores=True)\r\n \r\n result_image = Image.fromarray(image_np)\r\n \r\n raw_bytes = BytesIO()\r\n result_image.save(raw_bytes, \"PNG\")\r\n \r\n return base64.b64encode(raw_bytes.getvalue()).decode(\"utf-8\")", "def predict(self, src): # real signature unknown; restored from __doc__\n pass", "def predict(model, img, target_size, top_n=3):\r\n print('img.size=',img.size)\r\n if img.size != target_size:\r\n img = img.resize(target_size)\r\n \r\n x = image.img_to_array(img)\r\n x = np.expand_dims(x, axis=0)\r\n x = preprocess_input(x)\r\n preds = model.predict(x)\r\n return decode_predictions(preds,top=top_n)[0]", "def predict(self, images, batch_size):\n pass", "def predict(self, X):\n ...", "def predict(self, X):\n ...", "def predict(self, X):\n ...", "def predict(model, image, score_thresh, screen_mode, fill):\n\n global COLOR_DICT, prev_bboxes, prev_classes\n\n # Run the prediction\n scores, boxes, classes = model.predict(image)\n \n # Prepare the images for augmentation\n if screen_mode:\n new_image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)\n else:\n new_image = np.zeros((image.shape[0], image.shape[1], 3), dtype=np.uint8)\n cv2.rectangle(new_image, (0, 0), (image.shape[1], image.shape[0]), (255, 0, 0), 5)\n\n # Go through each bounding box and only draw and save the ones above the score threshold\n detected = []\n for i in range(len(scores)):\n if scores[i] > score_thresh:\n detected.append([i, classes[i] + 1])\n detected = bbox_sort(detected) \n \n text_list = [] \n bboxes = []\n classes = []\n for i in range(len(detected)):\n box = boxes[detected[i][0]] * np.array([image.shape[0], image.shape[1], image.shape[0], image.shape[1]])\n bboxes.append(box)\n classes.append(detected[i][0])\n \n matched_indices = matchBBoxes(bboxes, prev_bboxes, 100)\n \n for i in range(len(detected)):\n color = COLOR_DICT[detected[i][1]]\n \n x0 = bboxes[i][1] - 20\n y0 = bboxes[i][0] - (1080 - bboxes[i][0]) * 50 / 1080\n x1 = bboxes[i][3] + 20\n y1 = bboxes[i][2]\n \n num_pairs = 0\n \n for index_pair in matched_indices:\n if index_pair[0] == i and detected[i][0] == prev_classes[index_pair[1]]:\n num_pairs += 1\n x0 = ((x0 * num_pairs) + prev_bboxes[index_pair[1]][1] - 20) / (num_pairs + 1.0)\n y0 = ((y0 * num_pairs) + prev_bboxes[index_pair[1]][0] - (1080 - prev_bboxes[index_pair[1]][1]) * 50 / 1080) / (num_pairs + 1.0)\n x1 = ((x1 * num_pairs) + prev_bboxes[index_pair[1]][3] + 20) / (num_pairs + 1.0)\n y1 = ((y1 * num_pairs) + prev_bboxes[index_pair[1]][2]) / (num_pairs + 1.0)\n \n line_type = 3\n if fill and not screen_mode:\n line_type = cv2.FILLED\n \n cv2.rectangle(new_image, (int(x0), int(y0)), (int(x1), int(y1)), color, line_type)\n\n name = CLASS_DICT[detected[i][1]]\n \n prev_bboxes = bboxes\n prev_classes = classes\n dy = 50 # Change in y position for each item\n for text in text_list:\n color = COLOR_DICT[text[2]]\n cv2.putText(new_image, str(text[1]) + \"x \" + text[0], (1500, y), cv2.FONT_HERSHEY_DUPLEX, 0.5, color, lineType=cv2.LINE_AA)\n y += dy\n\n return new_image", "def predict(model, images):\n return model.predict_classes(images)", "def predict(image_path, wrapper):\n \"\"\"\n #Don't forget to store your prediction into ImgPred\n img_prediction = ImgPred(...)\n \"\"\"\n\n #This is where all of our code will probably go. Here are the steps to success\n\n \n #Step One: Make a list which will contain the locations of every character in our source Image.\n SymPredList = []\n\n #Step Two: Go down that list we just made and use the code from PA4 in conjunction with our new Model to analyze each character. George made this part.\n #This is the find a character part of the code. Max and George worked it out.\n im = cv2.imread(image_path,0)\n (thresh, imbw) = cv2.threshold(im,20,255,cv2.THRESH_BINARY | cv2.THRESH_OTSU)\n #cv2.imwrite('clapfuck.jpg', imbw)\n im3,contours,hierarchy = cv2.findContours(imbw,cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_SIMPLE)\n idx = 0\n for cnt in contours:\n idx += 1\n x1,y1,w,h = cv2.boundingRect(cnt)\n roi=imbw[y1:y1+h,x1:x1+w]\n\n #Step Two.1: Make a Numpy Array of all the pixels starting from the top left corner of an identified character to the bottom right corner of the identified character.\n height, width = roi.shape\n if height >= width:\n padded = cv2.copyMakeBorder(roi,0,0,(height-width)//2,(height-width)//2,cv2.BORDER_CONSTANT,value=[0,0,0])\n else:\n padded = cv2.copyMakeBorder(roi,(width-height)//2,(width-height)//2,0,0,cv2.BORDER_CONSTANT,value=[0,0,0])\n Smol = cv2.resize(padded, (28, 28))\n (thresh, evaluateMe) = cv2.threshold(Smol, 20, 255, cv2.THRESH_BINARY | cv2.THRESH_OTSU)\n #scipy.misc.imsave(os.path.basename(file), ree)\n #Step Two.2: Feed that numpy into our PA4 image analyzer converter thing but using our new trained model\n evaluateMeMe = numpy.reshape(evaluateMe, (1, 28, 28, 1))\n prediction = tf.argmax(y_conv,1)\n final_number = prediction.eval(feed_dict={x:evaluateMeMe,y_:numpy.zeros((1,40)), keep_prob:1.0})\n #Step Two.3: Record what we think it is as the prediction field of the SymPred we are currently on\n final_guess = wrapper.label_types[int(final_number)]\n DisSymPred = SymPred(final_guess,x1,y1,x1+w,y1-h)\n SymPredList.append(DisSymPred)\n\n #Step Three: Wrap that now complete SymPred list, in an ImgPred, fill out all the fields of that ImgPred, and then return that shit.\n img_prediction = ImgPred(os.path.basename(image_path), SymPredList)\n\n #Step Four: Were Donezo\n return img_prediction", "def detect_fn(image) :\n image, shapes = detection_model.preprocess(image)\n prediction_dict = detection_model.predict(image, shapes)\n detections = detection_model.postprocess(prediction_dict, shapes)\n\n return detections, prediction_dict, tf.reshape(shapes, [-1])", "def predict(self, img):\n logger.info(\"predict() for %s\" %threading.current_thread())\n\n #detect face from the image\n face, rect = self.detect_face(img)\n\n if face is None or rect is None:\n #print(\"No face found for img \", type(img))\n return None, None, None, None\n\n if self.redis_server_password is None:\n # No training data available. Just perform detection and return\n # an error message in the subject value.\n warning = \"Training data not available. Redis password not set.\"\n subject = \"No Training Password\" # This will be displayed with the face\n confidence = 0\n logger.warning(\"%s\" %warning)\n return None, subject, confidence, rect\n\n #predict the image using our face recognizer\n label, confidence = self.face_recognizer.predict(face)\n #get name of respective label returned by face recognizer\n label_text = self.face_recognizer.getLabelInfo(label)\n logger.info(\"label=%s label_text=%s\" %(label, label_text))\n\n # print(label_text, confidence, rect)\n return img, label_text, confidence, rect", "def model_predict(img_path, model_path):\n learn = load_model(model_path)\n img = open_image(img_path)\n # get the outputs from the model\n pred_class,pred_idx,outputs = learn.predict(img)\n # return the classification the model returns\n return pred_class" ]
[ "0.769187", "0.7126001", "0.7057096", "0.7030363", "0.70179856", "0.7012954", "0.6988399", "0.696144", "0.696144", "0.6959898", "0.69489837", "0.6909902", "0.6900875", "0.68756694", "0.6821238", "0.6792172", "0.6742851", "0.67373735", "0.6706675", "0.668598", "0.6666994", "0.6646091", "0.6646091", "0.6646091", "0.6644694", "0.6640978", "0.6620343", "0.66023225", "0.65947485", "0.6576007" ]
0.72321135
1
Predicting from PIL format
def predict_from_pil(yolo, inputfilepath): print("call func of predict_from_pil") img = np.array(Image.open(inputfilepath)) yolo_results = yolo.predict(img) for yolo_result in yolo_results: print(yolo_result.get_detect_result())
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def predict_image(pic_style):\n classes = [\"BAROQUE\", \"EARLY-RENAISSANCE\", \"HIGH-RENAISSANCE\", \"IMPRESSIONISM\", \"MANNERISM\",\n \"MEDIEVAL\", \"MINIMALISM\", \"NEOCLASSICISM\", \"REALISM\", \"ROCOCO\",\n \"ROMANTICISM\", \"SURREALISM\"\n ]\n\n if pic_style.upper() in classes:\n pic = load(os.path.join(os.path.dirname(os.path.realpath(__file__)), \"../../\", \"examples\", pic_style + \".jpg\") )\n model = create_model()\n preds = model.predict(pic)\n else:\n print(\"Input is not a style\")\n return\n\n # Match up pred with class & sort\n preds = [(classes[i], preds[0][i]) for i in range(len(preds[0]))]\n preds = sorted(preds, key=lambda x: x[1], reverse=True)\n\n # Print the output nicely\n print(\"\\n\")\n print('{:18s} {:8}'.format(\"Style\", \"Probability%\"))\n print(\"-------------------------------\")\n for pred in preds:\n print('{:17s} | {:7.2f}%'.format(pred[0], pred[1]*100, 2))", "def predict(uploaded_file):\n loc = AudioPredict.return_image(uploaded_file)\n return loc", "def preprocess(image):\n return (image / 255) * 2 - 1", "def preprocess(image):\n return image - MEAN_PIXEL", "def predict(self, img):\n return self._predict([img])[0]", "def process(self, image):", "def predict(self, request):\r\n f = request.files['image']\r\n \r\n img = Image.open(f)\r\n \r\n image = img.convert('RGB')\r\n \r\n image_np = load_image_into_numpy_array(image)\r\n output_dict = run_inference_for_single_image(model, image_np)\r\n vis_util.visualize_boxes_and_labels_on_image_array(\r\n image_np,\r\n output_dict['detection_boxes'],\r\n output_dict['detection_classes'],\r\n output_dict['detection_scores'],\r\n category_index,\r\n instance_masks=output_dict.get('detection_masks_reframed', None),\r\n use_normalized_coordinates=True,\r\n line_thickness=2, \r\n min_score_thresh=0.45, \r\n skip_scores=True)\r\n \r\n result_image = Image.fromarray(image_np)\r\n \r\n raw_bytes = BytesIO()\r\n result_image.save(raw_bytes, \"PNG\")\r\n \r\n return base64.b64encode(raw_bytes.getvalue()).decode(\"utf-8\")", "def predict_car():\n img = open_image(request.files['image'])\n pred_class, pred_idx, outputs = learn.predict(img)\n return str(pred_class)", "def predict(input_shape, model, image_path):\n \n # Load and resize the image using PIL.\n img = PIL.Image.open(image_path)\n print('input_shape: ', input_shape)\n img_resized = img.resize(input_shape, PIL.Image.LANCZOS)\n\n # Plot the image.\n plt.imshow(img_resized)\n plt.show()\n\n # Convert the PIL image to a numpy-array with the proper shape.\n img_array = np.expand_dims(np.array(img_resized), axis=0)\n\n # Use the ResNet50 model to make a prediction.\n # This outputs an array with 1000 numbers corresponding to\n # the classes of the ImageNet-dataset.\n pred = model.predict(img_array)\n \n # Decode the output of the ResNet50 model.\n pred_decoded = decode_predictions(pred)[0]\n\n # Print the predictions.\n for code, name, score in pred_decoded:\n print(\"{0:>6.2%} : {1}\".format(score, name))\n \n return", "def pre_analyse():\n t = transform()\n model = modified_resnet50()\n model.load_state_dict(\n torch.load(\n \"model.pth.tar\",\n map_location=torch.device(\"cpu\"),\n )[\"state_dict\"]\n )\n model.eval()\n\n def get_preds(img_path):\n \"\"\"\n Gives labelds and probabilities for a single image\n This is were we preprocess the image, using a function defined in the model class\n \"\"\"\n # load image\n img = Image.open(img_path).convert(\"RGB\")\n # process it\n x = t(img)\n # get in in the right format\n x = Variable(x).unsqueeze(0)\n # predictions\n output = model(x)\n # decode\n output = decode(output.cpu().data.numpy()[0])\n\n # filter\n # return pred, proba\n return output\n\n return get_preds(\"image.jpg\")", "def pre_processing_function(label, filename: str, augmentor: Augmentor = None):\n image = imread(filename)\n if augmentor is not None:\n image = np.round(augmentor.run(image)).astype(np.uint8)\n\n return image, label", "def predict(self, data, version='default'):\n return self.skil.api.transformimage(\n deployment_name=self.deployment.name,\n image_transform_name=self.model_name,\n version_name=version,\n files=data\n )", "def predict(self):\n self.canv.update()\n ps = self.canv.postscript(colormode='mono')\n img = Image.open(io.BytesIO(ps.encode('utf-8')))\n img.save('result.png')\n x = Predict.transform_image(self)\n \n #prediction with multivariate regression\n Y_hat_test = self.multivariate_model.predict([x])\n C_multivariate = map(np.argmax, Y_hat_test) # classification vector\n C_multivariate = list(C_multivariate)\n multivariate_predict = C_multivariate[0]\n\n \n #prediction with Linear Discriminant Analysis (LDA)\n lda_predict = self.lda_model.predict([x])[0]\n qda_predict = self.qda_model.predict([x])[0]\n log_predict = self.log_model.predict([x])[0]\n \n baseline_label = Label(self, text='Baseline: ' + str(multivariate_predict) )\n baseline_label.grid(row=0, column=1, padx=5, pady=5)\n lda_label = Label(self, text=' LDA: '+ str(lda_predict))\n lda_label.grid(row=0, column=2, padx=5, pady=5)\n qda_label = Label(self, text='QDA: '+ str(qda_predict))\n qda_label.grid(row=1, column=1, padx=5, pady=5)\n log_label = Label(self, text=' Logistic: '+str(log_predict))\n log_label.grid(row=1, column=2, padx=5, pady=5)", "def predict(self, img_path):\n\n img = cv2.imread(img_path)\n img0 = img.copy()\n \n #This happens inside datasets\n # Convert\n img = letterbox(img, new_shape=self.img_size)[0]\n\n # Convert\n img = img[:, :, ::-1].transpose(2, 0, 1) # BGR to RGB, to 3x416x416\n img = np.ascontiguousarray(img)\n \n #this happens on detect\n img = torch.from_numpy(img).to(self.device)\n img = img.float() # uint8 to fp16/32\n img /= 255.0 # 0 - 255 to 0.0 - 1.0\n if img.ndimension() == 3:\n img = img.unsqueeze(0)\n\n # Inference\n pred = self.model(img)[0]\n\n # Apply NMS\n pred = non_max_suppression(pred, self.conf_thres, self.iou_thres, classes=self.classes, agnostic=self.agnostic_nms)\n \n # Process detections\n for i, det in enumerate(pred): # detections per image\n if det is not None and len(det):\n # Rescale boxes from img_size to im0 size\n det[:, :4] = scale_coords(img.shape[2:], det[:, :4], img0.shape).round()\n\n pred = [d.cpu().detach().numpy() for d in pred if d is not None]\n pred = pred[0] if len(pred) else pred\n \n pred = [[[x1, y1, x2, y2],conf] for x1, y1, x2, y2, conf, clss in pred]\n\n return pred", "def predict(image_path):\n img = image.load_img(image_path, target_size=image_size)\n x = image.img_to_array(img)\n x = np.expand_dims(x, axis=0)\n x = preprocess_input(x)\n predictions = model.predict(x)\n plt.imshow(img)\n print('Predicted:', decode_predictions(predictions, top=1)[0])\n return decode_predictions(predictions, top=1)[0]", "def predict(model, img, imgSize):\n \n #Reajusta o tamanho da imagem para o tamanho esperado caso necessario.\n if img.size != imgSize :\n img = img.resize(imgSize)\n\n #Converte a imagem num array tridimensional.\n x = image.img_to_array(img)\n x = numpy.expand_dims(x, axis=0)\n #Normaliza a imagem.\n x = preprocess_input(x)\n \n #Faz a previsao atraves da rede.\n pred = model.predict(x)\n return imagenet_utils.decode_predictions(pred, top=5)[0]", "def _prepare_image_and_label(self, data):\n image = tf.io.decode_image(data['image/encoded'], channels=3)\n label = tf.io.decode_image(data['image/segmentation/class/encoded'],\n channels=1)\n height = data['image/height']\n width = data['image/width']\n image = tf.reshape(image, (height, width, 3))\n label = tf.reshape(label, (1, height, width))\n label = tf.cast(label, tf.float32)\n # Normalizes image with mean and std pixel values.\n image = input_utils.normalize_image(image)\n return image, label", "def classify_images():\n\n # Load the desired image\n img_path = 'dataset/colorize_images/n02085782_919.jpg'\n img = image.load_img(img_path, target_size=(299, 299))\n x = image.img_to_array(img)\n x = np.expand_dims(x, axis=0)\n x = preprocess_input(x)\n\n model = InceptionV3(weights=\"imagenet\")\n preds = model.predict(x)\n # decode the results into a list of tuples (class, description, probability)\n # (one such list for each sample in the batch)\n print('Predicted:', decode_predictions(preds, top=3)[0])", "def prediction_to_pic(prediction, mode=\"RBYG\"):\n prediction_shape = prediction.shape\n layer_num = 5\n color_list = np.array([\n [255, 0, 0], # Red:Main vessel\n [0, 0, 255], # Blue:Catheter\n [255, 255, 0], # Yellow:SubVessel\n [0, 255, 0], # Green:outer\n [0, 0, 0] # Black:Tissue\n ])\n\n if mode == \"RB\":\n layer_num = 3\n color_list = np.array([\n [255, 0, 0], # Red: main\n [0, 0, 255], # Blue: Catheter\n [0, 0, 0] # Black:NOT Tissue and not main\n ])\n if mode == \"old\":\n layer_num = 5\n color_list = np.array([\n [0, 0, 0], # Black:Tissue\n [0, 0, 255], # Blue: Catheter\n [255, 0, 0], # Red:Main vessel\n [0, 255, 0], # Green:outer\n [255, 255, 0] # Yellow:SubVessel\n ])\n _, max_color = prediction.cpu().max(1)\n max_color.numpy()\n #print(max_color)\n image = np.zeros((prediction_shape[2], prediction_shape[3], 3), dtype=np.uint8)\n #print(image.shape)\n for i in range(layer_num):\n image[max_color[0] == i] = color_list[i]\n return image", "def embed_image_pred(image):\n image_pil2 = Image.fromarray((255 * image).astype('uint8'))\n #image_pil2 = image_pil.resize((256, 256))\n string_buf2 = StringIO.StringIO()\n image_pil2.save(string_buf2, format='png')\n data = string_buf2.getvalue().encode('base64').replace('\\n', '')\n return 'data:image/png;base64,' + data", "def grocepre(img):\n img_prepro = pre.get_grayscale(img)\n img_prepro = pre.remove_noise(img_prepro)\n img_prepro = pre.dilate(img_prepro)\n img_prepro = pre.erode(img_prepro)\n img_prepro = pre.opening(img_prepro)\n img_prepro = pre.thresholding(img_prepro)\n\n return img_prepro", "def imagenet_preprocess(image, label):\n i = image\n i = tf.cast(i, tf.float32)\n i = tf.image.resize_with_crop_or_pad(i, 224, 224)\n if model_name == 'ResNet50' or model_name == 'ResNet152':\n i = tf.keras.applications.resnet.preprocess_input(i)\n else:\n i = tf.keras.applications.densenet.preprocess_input(i)\n return (i, label)", "def predict(model, img):\n\tx = image.img_to_array(img)\n\tx = np.expand_dims(x, axis=0)\n\tx = preprocess_input(x)\n\tpreds = model.predict(x)\n\treturn preds[0]", "def predict_data(img): \n return gennet.predict_data(img, 'Resnet50')", "def Predict_image(image_path, model, show_img = True):\n test_image, preprocessed_image = Preproces_image(image_path)\n predictions = model.predict(preprocessed_image)\n prediction = Configs.CLASS_NAMES[np.argmax(predictions)]\n if show_img:\n plt.imshow(test_image)\n plt.title(prediction)\n plt.axis(\"off\")\n plt.show()\n return print(f\"Predictiona: {prediction}\")\n else:\n return print(f\"Predictiona: {prediction}\")", "def predict(self, src): # real signature unknown; restored from __doc__\n pass", "def preprocessing(image_data, max_height, max_width):\n img = image_data[\"image\"]\n img = resize_image(img, max_height, max_width)\n gt_boxes = image_data[\"objects\"][\"bbox\"]\n gt_labels = image_data[\"objects\"][\"label\"]\n return img, gt_boxes, gt_labels", "def preprocess_input(img):\n img /= 255.\n img -= 0.5\n img *= 2.\n return img", "def predict(model, img, target_size=(229, 229)): #fixed size for InceptionV3 architecture\r\n if img.size != target_size:\r\n img = img.resize(target_size)\r\n\r\n x = image.img_to_array(img)\r\n x = np.expand_dims(x, axis=0)\r\n x = preprocess_input(x)\r\n preds = model.predict(x)\r\n return preds[0]", "def image_model_predict(input_ms_image_filename, input_pan_image_filename, pan_img_height_size, pan_img_width_size, \r\n fitted_model, write, output_filename):\r\n \r\n with rasterio.open(input_ms_image_filename) as f:\r\n metadata = f.profile\r\n ms_img = np.transpose(f.read(tuple(np.arange(metadata['count']) + 1)), [1, 2, 0])\r\n \r\n with rasterio.open(input_pan_image_filename) as g:\r\n metadata_pan = g.profile\r\n pan_img = g.read(1)\r\n \r\n pan_img = np.expand_dims(pan_img, axis = 2)\r\n \r\n ms_to_pan_ratio = metadata['transform'][0] / metadata_pan['transform'][0]\r\n \r\n class_layer = np.zeros((pan_img.shape[0], pan_img.shape[1]))\r\n \r\n img_pan_holder = []\r\n img_ms_holder = []\r\n \r\n for i in range(0, pan_img.shape[0] - pan_img_height_size, int(ms_to_pan_ratio)):\r\n for j in range(0, pan_img.shape[1] - pan_img_width_size, int(ms_to_pan_ratio)):\r\n img_pan_iter = pan_img[i : i + pan_img_height_size, j : j + pan_img_width_size, 0]\r\n img_pan_holder.append(img_pan_iter)\r\n \r\n for i in range(0, int(ms_img.shape[0] - (pan_img_height_size / ms_to_pan_ratio)), int(ms_to_pan_ratio)):\r\n for j in range(0, int(pan_img.shape[1] - (pan_img_width_size / ms_to_pan_ratio)), int(ms_to_pan_ratio)):\r\n img_ms_iter = ms_img[i : int(i + (pan_img_height_size / ms_to_pan_ratio)), \r\n j : int(j + (pan_img_width_size / ms_to_pan_ratio)), \r\n 0 : metadata['count']]\r\n img_ms_holder.append(img_ms_iter)\r\n \r\n img_pan_array = np.concatenate(img_pan_holder, axis = 0)\r\n img_ms_array = np.concatenate(img_ms_holder, axis = 0)\r\n \r\n pred_array = np.argmax(fitted_model.predict([img_ms_array, img_pan_array]), axis = 1)\r\n \r\n n = 0 \r\n for i in range(int(pan_img_height_size / 2), pan_img.shape[0] - int(pan_img_height_size / 2), int(ms_to_pan_ratio)):\r\n for j in range(int(pan_img_width_size / 2), pan_img.shape[1] - int(pan_img_width_size / 2), int(ms_to_pan_ratio)):\r\n class_layer[i, j] = pred_array[n]\r\n n += 1\r\n \r\n if write:\r\n with rasterio.open(output_filename, 'w', **metadata_pan) as dst:\r\n dst.write(class_layer)\r\n \r\n return class_layer" ]
[ "0.6664149", "0.6654456", "0.65723336", "0.6510777", "0.6439882", "0.6420322", "0.6417145", "0.6387115", "0.6382623", "0.63690084", "0.6353849", "0.6336394", "0.63333225", "0.63284427", "0.63270974", "0.6322199", "0.63044155", "0.6290391", "0.62887764", "0.6276978", "0.62401617", "0.62170947", "0.6213686", "0.6204883", "0.61997706", "0.61993754", "0.61720127", "0.6168732", "0.61664504", "0.616061" ]
0.6737026
0
Fetch a single user's data if a user_id is specified. Otherwise fetch the list of all users. Returned info contains user_id, name, group name,email, admin status, and date_created.
def get(self, user_id): if user_id: return get_from_user_id(user_id) else: # No user_id given; this is a GET all users request. if not current_user.is_admin: error(403, "Logged in user not admin ") user_db_data = user_db_util.fetchall(g.database) response_data: Dict[str, List[Dict[str, str]]] = {"users": []} for user_entry in user_db_data: response_data["users"].append( { "id": user_entry["user_id"], "email": user_entry["email"], "name": user_entry["name"], "group": user_entry["group_name"], "admin": user_entry["admin"], "timestamp": user_entry["date_created"], } ) return jsonify(response_data), 201
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def fetch_user(self, id: str):\n user = await self.http.get_user(id)\n return User(state=self.http, data=user)", "def get(self, id):\n\t\ttry:\n\t\t\tflask_app.logger.debug('We are getting the user: %d', id)\n\t\t\treturn user_service.get(id)\n\t\texcept AssertionError as e:\n\t\t\tuser_space.abort(400, e.args[0], status = \"Could not get user\", statusCode = \"400\")\n\t\texcept Exception as e:\n\t\t\tuser_space.abort(500, e.args[0], status = \"Could not get user\", statusCode = \"500\")", "async def fetch_user(self, id: utils.Intable) -> User | None:\n id64 = make_id64(id=id, type=Type.Individual)\n return await self._connection.fetch_user(id64)", "def get_user(self, user_id):\n uri = 'users/' + user_id\n return self.make_request(uri)", "def get_user(id=None, name=None):\n found_id = get_user_id(id, name)\n if not found_id:\n return\n response = utils.checked_api_call(users_api, 'get_specific', id=found_id)\n if response:\n return response.content", "async def fetch_user_simple(self, user_id: uuid.UUID) -> Optional[dict]:\n row = await self.fetchrow(\n \"\"\"\n select\n id, username, display_name, website\n from users where id = $1\n \"\"\",\n user_id,\n )\n\n if not row:\n return None\n\n return {\n \"id\": uuid_(row[0]),\n \"username\": row[1],\n \"display_name\": row[2],\n \"website\": row[3],\n }", "def get(self, user_id):\n res = self._user.get_single_user(user_id)\n\n if res:\n return {\n \"status\": 200,\n \"data\": [res]\n }, 200\n else:\n return {\n \"status\": 404,\n \"error\": \"user with id {} \"\n \"was not found \".format(user_id)\n }, 404", "def get_user(self, user_id):\n return self.my_get_user(self.get_all_dbusers(), user_id)", "def user_by_id(self, user_id):\n\n cur = self.db.cursor()\n cur.execute(\n \"\"\"SELECT user_id, username, password, phone, email, role\n FROM users WHERE user_id = %s\"\"\", (user_id, ))\n \n user_from_db = cur.fetchone()\n if cur.rowcount == 1: \n user_id, username, password, phone, email, role = user_from_db\n resp = dict(user_id=user_id, username=username, password=password, phone=phone, email=email, role=role)\n \n return resp\n return None", "def get_user(self, user_id):\n _email = self._email_for_user_id(user_id)\n response = self._get('/users?{0}'.format(urllib.urlencode({'search': _email})))\n for _user in response:\n if _user['email'] == _email:\n return _user\n return None", "def get(self, user_id):\n user = UserServices(public_id=user_id).get_an_item()\n if not user:\n api.abort(404)\n else:\n return user", "def get(\n user_id=None, discord_id=None, google_id=None, email=None,\n ):\n temp_cursor = user_db.cursor()\n\n pos_selectors = {\n \"user_id\": user_id,\n \"discord_id\": discord_id,\n \"google_id\": google_id,\n \"email\": email,\n }\n\n user = None\n for selector in pos_selectors.keys():\n sel_value = pos_selectors[selector]\n if sel_value is None:\n continue\n user = temp_cursor.execute(\n \"SELECT * FROM users WHERE \" + selector + \" = ?\", (sel_value,)\n ).fetchone()\n\n if user is not None:\n return User_Info.init_from_db(user)\n\n return None", "def get_user_by_id(self, user_id: str) -> typing.Optional[User]:\n query_params = {\n \"$select\": \",\".join(\n [\"displayName\", \"id\", \"mail\", \"department\", \"companyName\"]\n ),\n }\n\n request = self._prepare_request(\n method=\"get\",\n resource_path=f\"users/{user_id}\",\n query_params=query_params,\n )\n with requests.Session() as session:\n response = session.send(request=request)\n try:\n response.raise_for_status()\n except requests.exceptions.HTTPError as exception:\n if response.status_code == 400:\n return None\n raise exception\n user = response.json()\n return User.from_dict(**user) if user.get(\"id\") else None", "async def get(cls, user_id):\n try:\n user = await db.one(cls.SELECT_USER, user_id=user_id)\n except exceptions.NoResultFound:\n LOGGER.error(\"Could not find user=%s.\", user_id)\n raise DatabaseError\n except SQLAlchemyError as err:\n LOGGER.error(\"Failed to fetch user=%s. Error: %s\", user_id, err)\n raise DatabaseError\n\n return user", "def get_user(user_id):\r\n data = slack_client.api_call(\"users.info\", user=user_id)\r\n if not data[\"ok\"]:\r\n return False\r\n response = {}\r\n response[\"username\"] = data[\"user\"][\"name\"]\r\n response[\"name\"] = data[\"user\"][\"profile\"][\"real_name_normalized\"]\r\n response[\"user_id\"] = data[\"user\"][\"id\"]\r\n return response", "def get(self, user_id):\n\n current_app.logger.info(\"GET: {}\".format(request.full_path))\n\n res = UserModel.get_user_info_by_id(user_id)\n if res is None:\n current_app.logger.warn(\"Resource not found\")\n return {'message': 'user not found'}, 404\n else:\n resp_body = res[0].to_json()\n resp_body.update(res[1].to_json())\n return {'user info': resp_body}, 200", "def get(self, user_id):\n return User.get(user_id)", "def fetch_user(user_id):\n user = user_collection.find_one({\"_id\": user_id})\n user_bookmarks = list()\n for project_id in user[\"bookmarks\"]:\n project = project_collection.find_one({\"_id\": project_id})\n if project is None:\n continue\n bookmark_details = {\n \"PROJECT_ID\": str(project_id),\n \"projectTitle\": project[\"projectTitle\"],\n \"projectDescription\": project[\"projectDescription\"],\n }\n user_bookmarks.append(bookmark_details)\n user_contributions = list()\n for project_id in user[\"contributions\"]:\n project = project_collection.find_one({\"_id\": project_id})\n if project is None:\n continue\n contribution_details = {\n \"projectTitle\": project[\"projectTitle\"],\n \"projectDescription\": project[\"projectDescription\"],\n }\n user_contributions.append(contribution_details)\n user_dict = {\n \"username\": user[\"username\"],\n \"userid\": user[\"userid\"],\n \"email\": user[\"email\"],\n \"avatar\": user[\"avatar\"],\n \"githubURL\": user[\"githubURL\"],\n \"linkedinURL\": user[\"linkedinURL\"],\n \"stackoverflowURL\": user[\"stackoverflowURL\"],\n \"skills\": user[\"skills\"],\n \"bookmarks\": user_bookmarks,\n \"contributions\": user_contributions,\n }\n return user_dict", "def get_user_data(khoros_object, user_settings=None, user_id=None, login=None, email=None):\n user_settings = _process_settings_and_user_id(khoros_object, user_settings, user_id, login, email)\n api_response = query_users_table_by_id(khoros_object, '*', user_settings['id'])\n return api_response['data']", "def get_user(user_id=None):\n users = storage.all('User')\n user = users.get('User' + \".\" + user_id)\n if user is None:\n abort(404)\n else:\n return jsonify(user.to_dict()), 200", "async def fetch_user(self, user_id: uuid.UUID) -> Optional[dict]:\n row = await self.fetchrow(\n \"\"\"\n select\n id, username, display_name, website, created_at, modified_at,\n last_heartbeat_at, last_plugin, last_plugin_name, last_project,\n timezone\n from users where id = $1\n \"\"\",\n user_id,\n )\n\n if not row:\n return None\n\n user = {\n \"id\": uuid_(row[0]),\n \"username\": row[1],\n # no \"full legal names\" here uwuwuwuwu\n # trans rights\n \"display_name\": row[2],\n \"full_name\": row[2],\n \"website\": row[3],\n \"created_at\": timestamp_(row[4]),\n \"modified_at\": timestamp_(row[5]),\n \"last_heartbeat_at\": row[6],\n \"last_plugin\": row[7],\n \"last_plugin_name\": row[8],\n \"last_project\": row[9],\n \"timezone\": row[10],\n \"logged_time_public\": False,\n \"languages_used_public\": False,\n # i do not store full name or email pls\n \"email\": \"[email protected]\",\n \"email_public\": False,\n # TODO: should we put something here?\n \"photo\": None,\n \"is_hireable\": False,\n \"has_premium_features\": False,\n \"plan\": \"basic\",\n \"location\": \"Canberra, Australia\",\n }\n\n if user[\"website\"] is not None:\n # TODO: use urllib.parse\n user[\"human_readable_website\"] = user[\"website\"].lstrip(\"https://\")\n\n return user", "def read_user(user_id: int, db: Session = Depends(get_db)):\n # Call function to retrieve the details of a given User\n return crud.get_user(db, user_id)", "def user(self, user_id=None, username=None):\n if not (user_id or username):\n return self.auth_user()\n resp = self.request(\"user/show\", {\"id\": user_id, \"username\": username})\n return resp", "def get_user_by_id(self, user_id):\n query = \"SELECT * FROM users WHERE user_id = %s\"\n self.cursor.execute(query,[user_id])\n result = self.cursor.fetchone()\n return result", "def user_info(user_id):\n return User.query.filter_by(id=user_id).first()", "def get_user(user_id):\n netAdminToolDB = app.config['DATABASE']\n user = netAdminToolDB.get_user(user_id)\n\n if user == None:\n return jsonify({'error': 'User_id not found'}), 404\n\n uri = url_for('get_user', user_id=user.id, _external=True)\n return jsonify({'user':{\n 'id': user.id,\n 'uri': uri,\n 'username': user.username,\n 'display_name': user.display_name,\n 'role': user.role_name\n }\n })", "def user_loader(user_id):\n return Users.query.get(user_id)", "def cli(ctx, user_id, deleted=False):\n return ctx.gi.users.show_user(user_id, deleted=deleted)", "def load_user(user_id):\n return Users.query.get(user_id)", "def user_loader(user_id):\r\n return User.query.get(user_id)" ]
[ "0.72157097", "0.7131999", "0.70872533", "0.7083722", "0.7074586", "0.7070638", "0.7040221", "0.70230114", "0.6946631", "0.69372654", "0.6927964", "0.6918322", "0.69075227", "0.68962216", "0.68914783", "0.688945", "0.6877585", "0.6864886", "0.6854097", "0.68460727", "0.6818941", "0.67824805", "0.6782273", "0.6743209", "0.6738516", "0.6720927", "0.6710717", "0.6705258", "0.6700813", "0.669688" ]
0.7235378
0
Create a new user with provided email, password, and admin flag. If required fields are missing in the request, return 400 Password must be 8 or more characters long. Otherwise return 422 Email must not already be in use by an existing user. Otherwise return 422 If success, return 201 with the new user's email, admin flag, and creation timestamp.
def post(self): data = request.get_json() if data is None: error(400, "No json data in request body") check_data_fields(data, ["email", "name", "group_name", "password", "admin"]) if len(data["password"]) < 8: error(422, "New password is less than 8 characters long!") password_hash = generate_password_hash(data["password"]) try: create_user_result = user_db_util.create( g.database, email=data["email"], name=data["name"], group_name=data["group_name"], password_hash=password_hash, admin=data["admin"], ) except UniqueViolation: error(422, "User with that email address already exists") response_data = { "user_id": create_user_result["user_id"], "email": create_user_result["email"], "admin": create_user_result["admin"], "name": create_user_result["name"], "group_name": create_user_result["group_name"], "timestamp": create_user_result["date_created"], } return jsonify(response_data), 201
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_user():\n record = request.get_json()\n if record is None:\n return {\"Error\": \"No data Supplied.\"}, 400\n\n schema = user_schema.load(record)\n\n if UserModel.objects(email=schema['email']):\n return {\"Error\": \"User Data already exists.\"}, 400\n user = UserModel(**schema)\n user.hash_password()\n user.save()\n ser_data = user_schema.dump(user)\n token = Auth.generate_token(ser_data[\"_id\"])\n return {\"message\": \"User Created Successfully\", \"Token\": token, \"id\": str(user.id)}, 200", "def post(self):\n args = usr_parser.parse_args()\n # convert admin parameter into a boolean\n admin = bool(args['admin'])\n # check if the id of user is provided\n if args['uid'] is not None:\n user = User.new_user(admin, args['uid'])\n else:\n user = User.new_user(admin)\n \n \"\"\" check if the user is created, \n if the user with the same id exists it won't be created \"\"\"\n if user is None:\n return abort(422, message=\"User id already exists\")\n \n \"\"\" create an object to represent the user with the password provided\n and return it as a response \"\"\"\n userToReturn = { 'uid' : user.id, 'password':user.password,'admin':user.admin }\n return userToReturn", "def create_user():\n email = request.json.get('email')\n username = request.json.get('username')\n password = request.json.get('password')\n\n details = [email, username, password]\n\n if not all(details):\n return bad_request(\"you must supply email, username and password\")\n if User.query.filter_by(email=email).first() is not None and User.query.filter_by(username=username) is not None:\n return forbidden(\"email or username already exist\")\n\n user = User(email=email, username=username)\n user.hash_password(password)\n user.save()\n\n return {'status': (user.username + ' has successfully registered')}", "def post(self):\n self.parser.add_argument(\n 'name', required=True, type=self.validator.validate_string_fields, help='Enter a valid name')\n self.parser.add_argument(\n 'email', required=True, type=self.validator.validate_string_fields, help='Must be a valid email')\n self.parser.add_argument(\n 'password', required=True, type=self.validator.validate_string_fields, help='Must enter a valid password')\n\n user = self.parser.parse_args()\n response = self.user_models.create_user(user['name'],\n user['email'],\n user['password'])\n return {\"message\": response}, 201", "def create_user(self, username, email, is_admin, password):\n data = {\n \"username\": username,\n \"password\": password,\n \"email\": email,\n \"administrator\": is_admin,\n }\n headers = {\"user-agent\": self.u_agent}\n req_url = self.normalize_admin_url(\"users\")\n res = requests.post(\n req_url,\n headers=headers,\n auth=self.auth,\n data=json.dumps(data),\n verify=False,\n )\n if res.status_code == 201:\n return Response(0, u\"User {} has been created\".format(username))\n else:\n return Response(res.status_code, res)", "def create_user():\n body = request.json\n username = body.get('username')\n password = body.get('password')\n validation = validate_user(username, password)\n password = md5(password.encode('utf-8')).hexdigest()\n if validation != \"OK\":\n return HTTPResponse(status=500, body={\"message\":validation})\n try:\n with db.atomic():\n user = User.create(username=username, password=password)\n user.save()\n ret = json.dumps({'message':'user created'})\n return HTTPResponse(status=200, body=ret)\n except IntegrityError:\n ret = json.dumps({'message':'user already exists'})\n return HTTPResponse(status=500, body=ret)", "def create_user(cls, username, email, password, admin=False):\n try:\n cls.create(\n username = username,\n email = email,\n password = generate_password_hash(password),\n is_admin = True)\n except IntegrityError:\n raise ValueError(\"User already exists\")", "def create_user():\n new_dict = request.get_json(silent=True)\n if type(new_dict) is dict:\n if \"email\" not in new_dict.keys():\n return jsonify({\"error\": \"Missing email\"}), 400\n elif \"password\" not in new_dict.keys():\n return jsonify({\"error\": \"Missing password\"}), 400\n else:\n user = User(email=new_dict[\"email\"], password=new_dict[\"password\"])\n for k, v in new_dict.items():\n setattr(user, k, v)\n user.save()\n return jsonify(user.to_dict()), 201\n else:\n return jsonify({\"error\": \"Not a JSON\"}), 400", "def create_user():\n body = request.get_json(silent=True)\n if body is None:\n abort(400, jsonify(error=\"Not a JSON\"))\n if 'email' not in body:\n abort(400, jsonify(error=\"Missing email\"))\n if 'password' not in body:\n abort(400, jsonify(error=\"Missing password\"))\n user = models.user.User(**body)\n models.storage.new(user)\n models.storage.save()\n return make_response(jsonify(user.to_dict()), 201)", "def api_create_user():\n data = request.json\n\n errs, res = self.user_manager.create_user_as_admin(\n email=data['email'],\n username=data['username'],\n role=data['role'],\n passwd=data['password'],\n passwd2=data['password'],\n name=data.get('full_name', ''))\n\n # validate\n if errs:\n return {'errors': errs}\n\n user, first_coll = res\n return {'user': user.name, 'first_coll': first_coll.name if first_coll else ''}", "def create_user():\n usr = request.get_json()\n if not usr:\n abort(400, {'Not a JSON'})\n elif 'email' not in usr:\n abort(400, {'Missing email'})\n elif 'password' not in usr:\n abort(400, {'Missing password'})\n else:\n new_usr = User(**usr)\n storage.new(new_usr)\n storage.save()\n return jsonify(new_usr.to_dict()), 201", "def create_user():\n if not check_content_type():\n return jsonify(status=CONTENT_TYPE_ERROR)\n\n data = request.json\n #TODO check if request body contain required keys\n #if [\"login\", \"password\", \"user\", \"email\", \"first_name\", \"second_name\", \"phone\"].sort() != (data.keys()).sort():\n # return jsonify(status=\"err\")\n\n login = data[\"login\"]\n hash_password = raw_password_to_string(str(data[\"password\"]))\n role = \"user\"\n email = data[\"email\"]\n first_name = data[\"first_name\"]\n second_name = data[\"second_name\"]\n phone = data[\"phone\"] \n #TODO data validation\n #if login == \"\" or hash_password == \"\" or role == \"\" or email == \"\" or first_name == \"\" or second_name == \"\":\n # return jsonify(status=\"error\")\n\n db.session.add(User(login=login, hash_password=hash_password, role=role, email=email, first_name=first_name, second_name=second_name, phone=phone))\n try:\n db.session.commit()\n return jsonify(status=OK_STATUS)\n except:\n db.session.rollback()\n return jsonify(status=DATABASE_INTEGRITY_ERROR)", "def create_user():\n try:\n payload = _validatePayload(request)\n timestamp = int(time.time() * 1000)\n user = {\n 'name': payload.get('name'),\n 'email': payload.get('email'),\n 'password': _encodePassword(payload.get('password')),\n 'createdAt': timestamp,\n 'updatedAt': timestamp,\n }\n\n resp = table.put_item(\n Item=user,\n Expected={'email': {'Exists': False}}\n )\n return jsonify(user), 200\n except Exception as e:\n logger.info('ERROR {}'.format(str(e)))\n return _customizeErrorMessage(e)", "def create(self, validated_data):\n user = User.objects.create_user(\n email=validated_data['email'],\n password=validated_data['password'],\n )\n return user", "def create_user():\n username = request.get_json().get(\"name\", None)\n role = request.get_json().get(\"role\", None)\n email = request.get_json().get(\"email\", None)\n return jsonify(\n admin.create_user(current_app.scoped_session(), username, role, email)\n )", "def create_user():\r\n if not request.is_json or 'name' not in request.get_json() or 'phone_number' not in request.get_json() or 'password' not in request.get_json():\r\n return bad_request('Missing required data.')\r\n try:\r\n return add_user(request)\r\n except:\r\n return bad_request(error_messages['user_exist'])", "def create_user():\r\n data = request.get_json() or {}\r\n print(data)\r\n # some data checks\r\n if 'username' not in data or 'password' not in data:\r\n return bad_request('must include username and password fields')\r\n if User.query.filter_by(username=data['username']).first():\r\n return bad_request('please use a different username')\r\n user = User()\r\n # add user to database\r\n user.add_user(data)\r\n # check that the transaction was successful\r\n res = User.query.filter_by(username=data['username']).one_or_none()\r\n # return added user as query response\r\n if res:\r\n response = jsonify(res.to_dict())\r\n response.status_code = 201\r\n # else return error\r\n else:\r\n response.status_code = 403\r\n response.headers['Location'] = url_for('api.get_user', id=user.id)\r\n return response", "def create_user(email='[email protected]', password='testpass123'):\n return get_user_model().objects.create_user(email=email, password=password)", "def create_user(email, password):\n um = logic.UserManager()\n try:\n # validate if email contains actually a valid email address:\n validate_email(email)\n # create account\n user = um.create_user(email)\n if password:\n user.set_password(password)\n else:\n user.reset()\n except ex.TickeeError as e:\n transaction.abort()\n # build failed result\n return marshalling.error(e)\n except ValidationError as e:\n transaction.abort()\n return marshalling.error(e)\n else:\n user_info = marshalling.user_to_dict(user)\n transaction.commit()\n # build success result\n result = marshalling.created_success_dict.copy()\n result['user'] = user_info\n return result", "def create(self, validated_data):\n username = validated_data.get('username')\n email = validated_data.get('email')\n password = validated_data.get('password')\n first_name = validated_data.get('first_name', '')\n last_name = validated_data.get('last_name', '')\n return User.objects.create_user(username, email, password, first_name=first_name,\n last_name=last_name)", "def new_user(request):\r\n rdict = request.params\r\n\r\n u = User()\r\n\r\n u.username = unicode(rdict.get('username'))\r\n if u.username:\r\n u.username = u.username.lower()\r\n u.email = unicode(rdict.get('email')).lower()\r\n passwd = get_random_word(8)\r\n u.password = passwd\r\n u.activated = True\r\n u.is_admin = False\r\n u.api_key = User.gen_api_key()\r\n\r\n try:\r\n DBSession.add(u)\r\n DBSession.flush()\r\n # We need to return the password since the admin added the user\r\n # manually. This is only time we should have/give the original\r\n # password.\r\n ret = dict(u)\r\n ret['random_pass'] = passwd\r\n return _api_response(request, ret)\r\n\r\n except IntegrityError, exc:\r\n # We might try to add a user that already exists.\r\n LOG.error(exc)\r\n request.response.status_int = 400\r\n return _api_response(request, {\r\n 'error': 'Bad Request: User exists.',\r\n })", "def create(self, validated_data):\n username = validated_data.pop('username')\n email = validated_data.pop('email')\n password = validated_data.pop('password')\n user = User.objects.create_user(\n username, email, password, **validated_data)\n return user", "def create(self, validated_data):\n user = User(\n email=validated_data['email'],\n username=validated_data['username']\n )\n user.set_password(validated_data['password'])\n user.save()\n return user", "def post(self):\n new_user = register_parser.parse_args()\n user = User.get_user_by_email(dict_cursor, new_user['email'])\n if user:\n return {\n 'warning':\n 'Email exists, please login or register with another email'\n }, 400\n invalid_data = validate_user_data(new_user)\n if invalid_data:\n return invalid_data\n # check in the db if user exists\n user = User.get_user_by_username(dict_cursor, new_user[\"username\"])\n if not user:\n hash_password = Bcrypt().generate_password_hash(\n new_user[\"password\"]).decode()\n User.create_user(\n cursor, new_user[\"username\"], new_user[\"email\"], hash_password)\n return {\"message\": \"User registered successfully\"}, 201\n return {\"warning\": \"User already exists. Please login.\"}, 202", "def _create_user(self, email, password, **extra_fields):\n if not email:\n raise ValueError('The given email must be set')\n email = self.normalize_email(email)\n user = self.model(email=email, **extra_fields)\n user.set_password(password)\n user.save(using=self._db)\n print(\"create user\")\n return user", "def _create_user(self, email, password, **extra_fields):\n if not email:\n raise ValueError('The given email must be set')\n email = self.normalize_email(email)\n user = self.model(email=email, **extra_fields)\n user.set_password(password)\n user.save(using=self._db)\n return user", "def _create_user(self, email, password, **extra_fields):\n\n email = self.normalize_email(email)\n #username = self.model.normalize_username(username)\n user = self.model( email=email, **extra_fields)\n user.set_password(password)\n user.save(using=self._db)\n return user", "def create(self,validated_data):\n\n user = models.User.object.create_user(\n email = validated_data['email'],\n full_name = validated_data['full_name'],\n phone = validated_data['phone'],\n password = validated_data['password']\n )\n\n #user.set_password(validated_data['password'])\n user.save()\n return user", "def post(self):\n\n user_data, error = user_schema.load(api.payload)\n user_data[\"public_id\"] = uuid.uuid4()\n\n try:\n pswd = user_data[\"password\"]\n except KeyError as e:\n return {\"msg\": \"Password required.\"}, 400\n else:\n user_data[\"password\"] = bcrypt.generate_password_hash(pswd).decode('utf-8')\n\n try:\n new_user = User(**user_data).save()\n except Exception as e:\n return str(e), 400\n \n return user_schema.dump(new_user), 200", "def create_user(self,email,password=None, **extra_fields):\n\n if not email: \n raise ValueError('Users must have an email address')\n #sets the email field of your user model, this is done on the model itself because there are no functions to change it.\n user = self.model(email=self.normalize_email(email), **extra_fields) \n user.set_password(password)\n user.save(using=self._db) #save using the defualt database in the settings.py file.\n\n return user" ]
[ "0.7782959", "0.76835763", "0.7680565", "0.7676472", "0.76109964", "0.7539318", "0.753589", "0.75105494", "0.75042003", "0.74783903", "0.7470725", "0.7454548", "0.7424451", "0.7331659", "0.73239493", "0.7322408", "0.73050135", "0.72956514", "0.7292638", "0.7289475", "0.728914", "0.72499436", "0.7229765", "0.7225833", "0.7205834", "0.7203175", "0.72027916", "0.7201589", "0.7191927", "0.718233" ]
0.7710605
1
Distatches an event to any matching event handlers. The handler which specifically matches the event name will be called first, followed by any handlers with a 'match' method which matches the event name concatenated to the args string.
def dispatch(self, event, args=''): try: if event in self.events: self.events[event](args) for matcher, action in self.eventmatchers.iteritems(): ary = matcher.match(' '.join((event, args))) if ary is not None: action(*ary) except Exception, e: try: traceback.print_exc(sys.stderr) except: pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def run_handlers(self, event, method=EVENT_CAPTURE):\n if event not in self.events:\n return None\n for handler in self.events[str(event)].with_method(method):\n handler(event)", "def onEvent(self, event):\n if event is None:\n return\n\n target_class = type(self.target).__name__\n event_class = type(event).__name__\n self.log.debug(\"onEvent, looking for matching method for argument: %s\" % event_class)\n\n if event_class in self.invokeMap:\n try:\n func = getattr(self.target, self.invokeMap[event_class])\n func(event)\n except Exception as e:\n self.log.error(\"Failed to invoke target method %s.%s for %s: %s\" % (target_class, self.invokeMap[event_class], event_class, e.message))\n else:\n self.log.warn(\"Could not dispatch for event of type %s in %s:, no matching method found \" % (event_class, target_class))", "def emit(self, name, data=None):\n data = data or {}\n event = Event(self, name, data)\n for decorated, bound_handler in self.event_handlers:\n if decorated.match(event):\n bound_handler(event)", "def fire(self, sender, argument=None):\n for handler in self.__handlers:\n handler(sender, argument)", "def emit(self, name, **kwargs):\n\n event_listeners = self._listeners.get(name, None)\n if event_listeners:\n debug('trigger event: {}', name)\n event_listeners.sort(key=lambda x: x.priority)\n for listener in event_listeners:\n listener.handler(**kwargs)", "def dispatch(self, event: Any) -> None:\n if event.type:\n getattr(self, \"ev_%s\" % (event.type.lower(),))(event)", "def dispatch(self, name, *args, **kwargs):\n # Notify all listeners of this event\n for listener, events in self._observers.iteritems():\n name2 = name if name in events else 'default' if 'default' in events else None\n if name2 is not None:\n e = Observable.Event(self, name)\n try:\n e.__dict__.setdefault(events[name2]['func']['attrib'],\n events[name2]['func']['callable'](*args))\n except KeyError:\n pass\n\n e.__dict__.update(kwargs)\n\n try:\n # Create the event to send\n listener.notify(e)\n except Exception as ex:\n import sys\n import traceback\n exc_type, exc_value, exc_traceback = sys.exc_info()\n traceback.print_exception(exc_type, exc_value, exc_traceback)\n sys.exit(1)", "def dispatch(self, event):\n \n is_directory = event.is_directory\n if self.ignore_directories and is_directory:\n return \n \n event_type = event.event_type\n def match(path):\n return match_path(path, \n included_patterns=self.patterns,\n excluded_patterns=self.ignore_patterns,\n case_sensitive=self.case_sensitive)\n \n has_match = ((event_type in (EVENT_TYPE_CREATED, EVENT_TYPE_MODIFIED, EVENT_TYPE_DELETED) and match(event.src_path) ) \n or (event_type == EVENT_TYPE_MOVED and match(event.dest_path)))\n if not has_match and not is_directory:\n return\n\n self.on_any_event(event)\n _method_map = {\n EVENT_TYPE_MODIFIED: self.on_modified,\n EVENT_TYPE_MOVED: self.on_moved,\n EVENT_TYPE_CREATED: self.on_created,\n EVENT_TYPE_DELETED: self.on_deleted,\n }\n _method_map[event_type](event)", "async def send_event(self, event_name : str, *args, **kwargs):\n\n print(\"TRIGGERED {}\".format(event_name))\n event_name = \"on_\" + event_name\n\n # If an event exists in this object, call it\n event_fn = getattr(self, event_name, None)\n if event_fn:\n coro = event_fn(*args, **kwargs)\n asyncio.ensure_future(coro)\n\n # If event has been registered in the collection, call it\n for callback in self._events.get(event_name, []):\n coro = callback(*args, **kwargs)\n asyncio.ensure_future(coro)", "def invoke(self, event_args, *args, **kwargs):\n pass # pragma: no cover", "def runEventCallbacks(self, event, *args):\n\n if not event in self.EVENT_TYPES:\n raise Exception(\"XnatIo (onEvent): invalid event type '%s'\"%(\\\n event))\n if not hasattr(self, 'eventCallbacks__'):\n print('self has no attribute eventCallbacks__')\n return\n\n for callback in self.eventCallbacks__[event]:\n #print(f\"EVENT CALLBACK {event}\")\n callback(*args)", "def on_match(self, patterns, priority=0):\n\n def decorator(func):\n router = ListRouter(priority=priority)\n\n @functools.wraps(func)\n async def _wrapper(update, ctx):\n if not isinstance(update, Message):\n return SKIPPED\n\n for pattern in patterns:\n match = re.match(pattern, update.text)\n if match:\n break\n else:\n return SKIPPED\n\n ctx.match = match\n\n return await func(update, ctx)\n\n router.add_handler(_wrapper)\n\n self._routers.append(router)\n\n return func\n\n return decorator", "def test_local_handler():\n event_to_be_dispatched = (\"cat_ate_a_fish\",\n \"cat_requested_something\",\n \"dog_asked_for_petting\",\n \"dog_finished_the_food\",\n \"dad_made_beet_juice\",\n \"juice_is_spoiled\",\n \"she_danced_with_her_partner\")\n events_handled = {event_name: []\n for event_name in (\"cat\", \"all\", \"dog\", \"juice\", \"dance\")}\n\n @local_handler.register(event_name=\"cat_*\")\n async def handle_all_cat_events(event: Event):\n events_handled[\"cat\"].append(event)\n\n @local_handler.register(event_name=\"*\")\n def handle_all_events(event: Event):\n events_handled[\"all\"].append(event)\n\n @local_handler.register(event_name=\"dog_*\")\n async def handle_all_dog_events(event: Event):\n events_handled[\"dog\"].append(event)\n\n @local_handler.register(event_name=\"*juice\")\n def handle_all_juice_events(event: Event):\n events_handled[\"juice\"].append(event)\n\n @local_handler.register(event_name=\"*dance*\")\n async def handle_all_dance_events(event: Event):\n events_handled[\"dance\"].append(event)\n\n app = Starlette(middleware=[\n Middleware(EventHandlerASGIMiddleware,\n handlers=[local_handler])])\n\n @app.route(\"/\")\n async def root(request: Request) -> JSONResponse:\n for event_name in event_to_be_dispatched:\n dispatch(event_name=event_name)\n\n return JSONResponse()\n\n client = TestClient(app)\n client.get(\"/\")\n\n for event_category, expected_count in (\n (\"cat\", 2),\n (\"all\", 7),\n (\"dog\", 2),\n (\"juice\", 1), # `juice_is_spoiled` is not matching `*juice`\n (\"dance\", 1)\n ):\n assert len(events_handled[event_category]) == expected_count", "def Handler(self, *events: str, colon: bool = False,\n ircv3: bool = False) -> Callable:\n ...", "def dispatch(self, event: str, message: str) -> None:\n\t\tfor subscriber, callback in self.get_subscribers(event).items():\n\t\t\tcallback(event, message)", "def test_attrib_exact_regex(self):\n\n eventFilter = EventFilter(\"FooEvent[a~==^H.*?lo+]\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should not be handled\n fooEvent1 = FooEvent(a=\"Helllll\", traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n ])\n\n # The second FooEvent should be handled\n fooEvent2 = FooEvent(a=\"Heloooo\", traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])\n\n # The BarEvent should not be handled\n barEvent = BarEvent(traceid=traceids)\n session.handle(barEvent)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])", "def dispatch_module_event(self, event: str, *args, **kwargs):\n return [callback(event, *args, **kwargs) for callback in self.event_registry[event]]", "def dispatch_event(self, event):\n event_type = event.get('event_type')\n handler = self._event_handlers.get(event_type,\n self.consume_unknown_event)\n return handler(event)", "def fire(obj, name, *args, **kwargs):\n for func in _signals(obj, name):\n func(*args, **kwargs)", "def fire_event(self, event = None):\n for e in self.events:\n if e[\"event\"] == event:\n if type(e[\"args\"]) == type([]):\n e[\"callback\"](*e[\"args\"])\n elif type(e[\"args\"]) == type({}):\n e[\"callback\"](**e[\"args\"])\n elif e[\"args\"] == None:\n e[\"callback\"]()\n else:\n e[\"callback\"](e[\"args\"])\n return True", "def apply_handler(self):\n tmp = self.event_type\n if hasattr(self, tmp):\n getattr(self, tmp)()\n elif(self.target):\n self.message = self.message +\"\\r\\n\"\n self.target[0].send(self.message)", "def event(self, fn):\n self.bind({fn.__name__: fn})", "def dispatch(self, sender, event, *args, **kwargs):\n pass # pragma: no cover", "def get_handlers_for_event(self, event):\n pass # pragma: no cover", "def processEvent(self, fsm, event, **kwargs):\n handler = self._handlers.get(event, None)\n if handler is not None:\n handler(fsm, **kwargs)\n return", "def chat_handler(self, regex, order=100):\n def decorator(func):\n self.register_handler(regex, func, order)\n return func\n\n return decorator", "def handle(self, body):\n event_type = body['event_type']\n method_name = event_type.replace('.', '_')\n try:\n method = getattr(self, method_name)\n method(body)\n except AttributeError:\n LOG.debug('%s needs a method called `%s` to handle %s' %\n (self.__class__.__name__, method_name, event_type))", "def event_pattern(self):\n pass # pragma: no cover", "def apply_event(self, ev):\n handler = getattr(self, 'apply_%s_event' % ev.ev_type)\n handler(**ev.kw)", "def send(self, *args, **kw):\n result = []\n for handler in self.registry.values():\n result.append(handler(*args, **kw))\n return result" ]
[ "0.6142079", "0.6090708", "0.58892405", "0.57285976", "0.56782824", "0.56358707", "0.56105155", "0.5587811", "0.5533092", "0.55268854", "0.54496866", "0.541662", "0.5377691", "0.53475493", "0.53425974", "0.5300932", "0.52983147", "0.5238735", "0.5219943", "0.52198094", "0.5159539", "0.51082546", "0.5096151", "0.5082168", "0.50788754", "0.50595963", "0.50480556", "0.5036465", "0.5028771", "0.50264025" ]
0.73175037
0
Enters the event loop, reading lines from wmii's '/event' and dispatching them, via dispatch, to event handlers. Continues so long as alive is True.
def loop(self): keys.mode = 'main' for line in client.readlines('/event'): if not self.alive: break self.dispatch(*line.split(' ', 1)) self.alive = False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def run():\n\n while True:\n\n # get event, blah\n event_name, event_data = revent.get_event(block=True, timeout=5)\n\n if event_name is not None:\n print 'received: %s' % event_name\n\n if event_name.endswith('_oembed_details'):\n handle_new_oembed_details(event_data)\n\n elif event_name == 'new_tweet':\n handle_new_tweet(event_data)\n\n # and we're done\n assert revent.verify_msg(event_name, event_data), \\\n \"Could not verify %s\" % event_name", "def main_loop(self):\n LOGGER.info('Entering main event loop...')\n try:\n while self._handle_faucet_events():\n while not self._faucet_events.event_socket_connected:\n LOGGER.info('Attempting faucet event sock connection...')\n time.sleep(1)\n try:\n self._faucet_events.connect()\n self._restore_states()\n self._faucet_collector.set_state_restored(True)\n except Exception as e:\n LOGGER.error(\"Cannot restore states or connect to faucet: %s\", e)\n self._faucet_collector.set_state_restored(False, e)\n except KeyboardInterrupt:\n LOGGER.info('Keyboard interrupt. Exiting.')\n self._faucet_events.disconnect()\n except Exception as e:\n LOGGER.error(\"Exception: %s\", e)\n raise", "def run(self):\n while True:\n time.sleep(RTM_READ_DELAY)\n for event in self._slack_client.rtm_read():\n self.handle_event(event)", "def listen(self):\n while self.active:\n self.handle_input()", "def run(self):\r\n while self._go.isSet(): #while app is running\r\n if self._check_console_input(): #if something to read on the console\r\n cmd = sys.stdin.readline() #read it\r\n self.inq.put(cmd) #dispatch it tpo the server\r\n response = self.outq.get(timeout=2.0) #wait for an answer\r\n sys.stdout.write(response) #write the answer on the console\r", "def loop_run(self):\n super(EventLoop, self).loop_run()\n self.inq = self.cothread.EventQueue()", "def _eventloop(self):\n logging.debug(\"%s - eventloop started\" % self.name)\n while not self.stopped:\n event = self.inqueue.get()\n if not event: break\n self.doevent(event)\n logging.debug(\"%s - eventloop stopped\" % self.name)", "def _thread_loop(self):\n while not self.stop_thread.is_set():\n # First, read a line\n try:\n line = self._read_line()\n except EvseTimeoutError:\n continue\n # Then if the line is a status change, execute the callback\n if line[:3] in ('ST ', '$ST'):\n self.callback(states[int(line.split()[1], 16)])\n # write_allowed is only cleared if the board has been reset ;\n # in this case, we should wait 1 more second before executing\n # commands in order for the board to finish booting.\n if not self.write_allowed.is_set():\n threading.Timer(1, self.write_allowed.set).start()\n continue\n # Do not write a new line if\n # the previous one isn't read and is not old enough\n previous_newline_age = 0\n while (self.newline_available.is_set() and\n previous_newline_age <= NEWLINE_MAX_AGE):\n time.sleep(SYNC_SERIAL_TIMEOUT)\n previous_newline_age += SYNC_SERIAL_TIMEOUT\n # Write the new received line\n self.newline = line\n self.newline_available.set()", "def enter_read_loop(self):\n\n try:\n while True:\n try:\n request = DAPBaseMessage.recv(self._current_client)\n except Exception as e:\n # TODO send error\n traceback.print_exc()\n continue\n\n if request is None:\n # client terminated without termination request\n return\n try:\n self.resolve_message(request)\n except Exception as e:\n # TODO send error\n traceback.print_exc()\n self.next_seq += 1\n DAPErrorResponse.create(self.next_seq, rq.seq, False, message=\"Error\").send(self._current_client)\n continue\n\n if self._current_client is None:\n self._ready_for_events = False\n return # terminated\n\n except BaseException as e:\n # failure while communicating\n traceback.print_exc()\n pass\n finally:\n # final handler, clear active client\n self._current_client = None\n self._ready_for_events = False\n\n debugger.reset()", "def __process_input(self):\n\n while not self.stop_event.isSet():\n\n readable, writable, exceptional = select.select([self.event_queue], [], [])\n\n if readable[0] is self.event_queue:\n\n event = self.event_queue.get()\n \n if (time.time() - event.creation) > INSTRUCTION_TIMEOUT:\n self.logging_queue.put(self.__create_event_obj(ERROR, 'TimeOut', str(time.time() - event.creation)))\n self.logger1.info(\"Instruction rejected due to timeout: '{}', '{}', '{}'\".format(event.source, event.type, event.value))\n \n elif not self.__input_filter(event):\n self.logging_queue.put(self.__create_event_obj(ERROR, 'Filtered', '{}, {}, {}'.format(event.source, event.type, event.value)))\n \n else:\n \n self.logging_queue.put(event) \n \n if event.type == self.input_commands.toggle_door_cmd:\n self.__toggle_door()\n self.__update_door_info()\n elif event.type == self.input_commands.light_cmd:\n self.__light()\n elif event.type == self.input_commands.open_door_cmd:\n self.__open_door()\n self.__update_door_info()\n elif event.type == self.input_commands.close_door_cmd:\n self.__close_door()\n self.__update_door_info()\n elif event.type == self.input_commands.control_wire:\n self.__log_output_pin_state(event)\n self.__update_door_info()\n elif event.type == self.input_commands.stop_cmd:\n self.__del__()\n return None\n \n \n #if event.hardware:\n # self.__log_input_pin_state(event) ", "def run(self):\n while True:\n try:\n data = self._read()\n except IOError:\n break\n\n if len(data) == 0:\n self.finalize(\"Connection closed.\")\n break\n\n gevent.spawn(self.process_data, data)", "def run(self):\n while True:\n try:\n data = self._read()\n except IOError:\n break\n\n if len(data) == 0:\n self.finalize(\"Connection closed.\")\n break\n\n gevent.spawn(self.process_data, data)", "def run(self):\n while self.is_connected():\n self.__ticker.tick() # Tick (sleep)\n\n if self.process and self.process.is_alive():\n self.update()\n continue\n\n c = getkey() \n if c:\n if c == 'w':\n print \"Moving forward\"\n self.add_property(\"name\", \"pioneer_command\")\n self.add_property(\"pioneer_command\", \"mmove\")\n elif c == 'a':\n print \"Turning left\"\n self.add_property(\"name\", \"pioneer_command\")\n self.add_property(\"pioneer_command\", \"mleft\")\n elif c == 'd':\n print \"Turning right\"\n self.add_property(\"name\", \"pioneer_command\")\n self.add_property(\"pioneer_command\", \"mright\")\n elif c == 'f':\n self.add_property(\"name\", \"pioneer_command\")\n self.add_property(\"pioneer_command\", \"finish\")\n elif c == 'p':\n self.add_property(\"name\", \"remote_command\")\n self.add_property(\"pioneer_command\", \"record\")\n elif c == 'h':\n print \"[w] = forward [a] = left [d] = right [f] = finish\"\n\n \n ############################\n # Send data\n self.update()", "def event_loop(self):\n for event in pg.event.get():\n self.keys = pg.key.get_pressed()\n if event.type == pg.QUIT or self.keys[pg.K_ESCAPE]:\n self.done = True\n self.cannon.get_event(event, self.objects)", "def _flow_in(self):\n print(\"MESSENGER: flow_in online!\")\n while self.running:\n data = b\"\"\n while data[-5:] != b\"ROGER\" and self.running:\n try:\n slc = self.sock.recv(1024)\n except socket.timeout:\n time.sleep(0.1)\n except socket.error as E:\n print(\"MESSENGER: caught socket exception:\", E)\n self.teardown(1)\n except Exception as E:\n print(\"MESSENGER: generic exception:\", E)\n self.teardown(1)\n else:\n data += slc\n if not self.running:\n if data:\n print(\"MESSENGER: data left hanging:\" + data[:-5].decode(\"utf8\"))\n return\n data = data[:-5].decode(\"utf8\")\n self.recvbuffer.extend(data.split(\"ROGER\"))\n print(\"MESSENGER: flow_in exiting...\")", "def rfactor_event_loop():\n if RfactorLiveEvent.event.is_set():\n is_live = RfactorLiveEvent.get_nowait()\n # -- Update rFactor live state to front end\n if is_live is not None:\n eel.rfactor_live(is_live)\n\n if RfactorStatusEvent.event.is_set():\n status = RfactorStatusEvent.get_nowait()\n # -- Update rFactor status message in front end\n if status is not None:\n logging.debug('Updating rf2 status message: %s', status)\n eel.rfactor_status(status)\n\n RfactorStatusEvent.reset()", "def start(self):\n self.has_event = False\n self.running = True\n self._condition.acquire()\n self._thread = threading.Thread(target=read_input, args=(self,))\n self._thread.start()", "def __listener__(self):\n frame_interval = 0.1\n str_list = []\n c = ''\n while True:\n with Timeout(frame_interval, False):\n while True:\n try:\n c = self.ser.read()\n except:\n self.ser.close()\n self.make_connection.go()\n self.connection_made.wait()\n str_list.append(c)\n if c == \"\\n\" or c == '':\n break\n received = ''.join(str_list)\n str_list = []\n if received:\n for i in self.read_handlers:\n gevent.spawn(i, received)\n sleep(0.001)", "def run(self):\n while True:\n event, values = self.window.read()\n if event == sg.WIN_CLOSED:\n break\n\n ev.fire(self.window, event, values)", "def _read_loop(self):\n while True:\n self.read()", "def work(self):\n while True:\n message = self.get()\n self.handle(message)", "def run(self):\n \n # Wrap the outer loop in a try block so we can do an orderly shutdown\n # should an exception occur:\n try:\n # Send out a STARTUP event:\n self.dispatchEvent(weewx.Event(weewx.STARTUP))\n \n syslog.syslog(syslog.LOG_INFO, \"engine: Starting main packet loop.\")\n\n last_gc = int(time.time())\n\n # This is the outer loop. \n while True:\n\n # See if garbage collection is scheduled:\n if int(time.time()) - last_gc > self.gc_interval:\n ngc = gc.collect()\n syslog.syslog(syslog.LOG_INFO, \"engine: garbage collected %d objects\" % ngc)\n last_gc = int(time.time())\n\n # First, let any interested services know the packet LOOP is\n # about to start\n self.dispatchEvent(weewx.Event(weewx.PRE_LOOP))\n \n # Get ready to enter the main packet loop. An exception of type\n # BreakLoop will get thrown when a service wants to break the\n # loop and interact with the console.\n try:\n \n # And this is the main packet LOOP. It will continuously\n # generate LOOP packets until some service breaks it by\n # throwing an exception (usually when an archive period\n # has passed).\n for packet in self.console.genLoopPackets():\n \n # Package the packet as an event, then dispatch it.\n self.dispatchEvent(weewx.Event(weewx.NEW_LOOP_PACKET, packet=packet))\n\n # Allow services to break the loop by throwing\n # an exception:\n self.dispatchEvent(weewx.Event(weewx.CHECK_LOOP, packet=packet))\n\n syslog.syslog(syslog.LOG_CRIT, \"engine: Internal error. Packet loop has exited.\")\n \n except BreakLoop:\n \n # Send out an event saying the packet LOOP is done:\n self.dispatchEvent(weewx.Event(weewx.POST_LOOP))\n\n finally:\n # The main loop has exited. Shut the engine down.\n syslog.syslog(syslog.LOG_DEBUG, \"engine: Main loop exiting. Shutting engine down.\")\n self.shutDown()", "def processIncoming(self):\n while (self.queue.qsize()):\n try:\n message = self.queue.get_nowait()\n \n self.terminal.insert(END,message)\n\n # Autoscroll the terminal if set\n if (self.autoscroll_value.get()):\n self.terminal.yview(END)\n\n except Queue.Empty:\n pass", "def _dispatch(f = None):\n #first install signal handler\n #this way we can quit the program easily from the command line\n #also, this makes libevent block on the first loop\n #otherwise when there are no events in the beginning, loop will not\n #block and our main dispatch loop would claim 100% CPU time\n def interrupt():\n quit(EXIT_CODE_SIGINT)\n event_interrupt = SignalEvent(SIGINT, interrupt)\n\n #the heartbeat makes sure the main loop below at least\n #makes a cycle every second. otherwise, if there are no pending signals\n #libevent._loop would block indefinitly, causing our loop never to check\n #if it still must be _running...\n event_heartbeat = TimeoutEvent(1.0, None, True)\n\n #as a convenience, user can provide a callable *f* to start a new task\n #lets start it here\n if callable(f):\n Tasklet.new(f)()\n\n global _running\n _running = True\n e = None\n try:\n #this is it, the main dispatch loop...\n #tasklets are scheduled to run by stackless,\n #and if no more are runnable, we wait for IO events to happen\n #that will trigger tasks to become runnable\n #ad infinitum...\n while _running:\n #first let any tasklets run until they have all become blocked on IO\n try:\n while stackless.getruncount() > 1:\n stackless.schedule()\n except TaskletExit:\n pass\n except:\n logging.exception(\"unhandled exception in dispatch schedule\")\n\n #now block on IO till any IO is ready.\n #care has been taken to not callback directly into python\n #from libevent. that would add c-data on the stack which would\n #make stackless need to use hard-switching, which is slow.\n #so we call 'loop' which blocks until something available.\n try:\n event.loop()\n except TaskletExit:\n raise\n except:\n logging.exception(\"unhandled exception in event loop\")\n\n #we iterate over the available triggered events and\n #call the callback which is available as the 'data' object of the event\n #some callbacks may trigger direct action (for instance timeouts, signals)\n #others might resume a waiting task (socket io).\n while event.has_next():\n try:\n e, event_type, fd = event.next()\n e.data(event_type)\n except TaskletExit:\n raise\n except:\n logging.exception(\"unhandled exception in event callback\")\n\n finally:\n del e\n event_interrupt.close()\n del event_interrupt\n event_heartbeat.close()\n del event_heartbeat\n\n if DEBUG_LEAK:\n logging.warn(\"alive objects:\")\n gc.collect()\n _print_objects(gc.get_objects())\n logging.warn('garbage:')\n _print_objects(gc.garbage)\n\n sys._exit(_exitcode)", "def event_loop(self, index):\n logger.debug(\"Starting event loop \"+str(index))\n self.stop_flag = Event()\n stop_flag = self.stop_flag # Saving a reference.\n # stop_flag is an object that will signal the current input thread to exit or not exit once it's done processing a callback.\n # It'll be called just before self.stop_flag will be overwritten. However, we've got a reference to it and now can check the exact flag this thread itself constructed.\n # Praise the holy garbage collector.\n stop_flag.clear()\n while not stop_flag.isSet():\n if self.get_current_proxy() is not None:\n try:\n key = self.queue.get(False, 0.1)\n except Queue.Empty:\n # here an active event_loop spends most of the time\n sleep(0.1)\n except AttributeError:\n # typically happens upon program termination\n pass\n else:\n # here event_loop is usually busy\n self.process_key(key)\n else:\n # No current proxy set yet, not processing anything\n sleep(0.1)\n logger.debug(\"Stopping event loop \"+str(index))", "def run(self):\n while True:\n try:\n logging.debug(\">>> Waiting for action\")\n talk_action = self.action_queue.get(True, 150)\n except Queue.Empty:\n break\n else:\n self.handle(talk_action)\n self.close()", "def on_open(self):\n def event_loop():\n logger.debug(pformat(self.query.request))\n self.send(json.dumps(self.query.request))\n while not self.event.is_set():\n #print('Waiting around on the socket: %s' % self.gettimeout())\n self.event.wait(self.gettimeout())\n \n logger.debug('Event loop terminating.')\n \n self.thread = threading.Thread(\n target=event_loop)\n self.thread.setDaemon(True)\n self.thread.start()", "def run():\n #LOG.debug(\"and so it begins\")\n intro()\n\n reloop = True\n while reloop is True:\n lines = []\n print(\"Awaiting your input: \")\n print('EXIT or ctrl-c to quit WPM')\n test = ''\n while test != 'END' and test != 'EXIT':\n line = input()\n if line == 'EXIT':\n exit()\n elif line != \"END\":\n lines.append(line)\n else:\n test = 'END'\n #LOG.debug(lines)\n\n parse_lines(lines, p)\n\n #LOG.debug(p)", "def loop(self):\n while self.dispatch(True) is not QUIT:\n pass", "def listen(self):\n self.processor_thread = Thread(target = self.event_loop, name=\"InputThread-\"+str(self.thread_index), args=(self.thread_index, ))\n self.thread_index += 1\n self.processor_thread.daemon = True\n self.processor_thread.start()" ]
[ "0.6532273", "0.64159447", "0.61325336", "0.6103995", "0.6085888", "0.60446244", "0.60411006", "0.600487", "0.59969455", "0.5978081", "0.5910201", "0.5910201", "0.59055114", "0.5898839", "0.5848962", "0.5841019", "0.5816226", "0.5811897", "0.580067", "0.5787875", "0.57788646", "0.5762028", "0.5737086", "0.5722296", "0.5718663", "0.5706343", "0.5700027", "0.56666327", "0.56637144", "0.5659754" ]
0.7531345
0
Binds a number of event handlers for wmii events. Keyword arguments other than 'items' are added to the 'items' dict. Handlers are called by loop when a matching line is read from '/event'. Each handler is called with, as its sole argument, the string read from /event with its first token stripped.
def bind(self, items={}, **kwargs): kwargs.update(items) for k, v in flatten(kwargs.iteritems()): if hasattr(k, 'match'): self.eventmatchers[k] = v else: self.events[k] = v
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _register_handlers(self):\n self.jm.register_handler(\"move_node\", self.move_node)\n self.jm.register_handler(\"copy_node\", self.copy_node)\n self.jm.register_handler(\"push_to_vospace\", self.push_to_vospace)\n self.jm.register_handler(\"push_from_vospace\", self.push_from_vospace)\n self.jm.register_handler(\"pull_to_vospace\", self.pull_to_vospace)\n self.jm.register_handler(\"pull_from_vospace\", self.pull_from_vospace)", "def event_table(self):\n self.Bind(wx.EVT_TASKBAR_LEFT_DOWN, self.on_main_action)\n self.Bind(wx.EVT_TASKBAR_RIGHT_DOWN, self.on_actions_list)\n self.Bind(wx.EVT_MENU, self.on_menu_item)\n self.Bind(ev.PRESENCE_UPDATED_EVENT, self.on_presence_updated)", "def _register_handlers(self):\n DBG(\"\\nregister handlers\")\n for hook, handler in self.handlers:\n g.registerHandler(hook, handler)\n\n signal_manager.connect(self.c, 'body_changed', self._after_body_key)", "def hook_events(self):\n wxMediator.hook_events(self)\n EVT_MINE(self, wxEVT_SOCKET_DATA, self.on_data)\n EVT_MINE(self, wxEVT_NEW_LISTEN_CONN, self.new_listen_conn)\n EVT_MINE(self, wxEVT_NEW_TALK_CONN, self.new_talk_conn)", "def Handler(self, *events: str, colon: bool = False,\n ircv3: bool = False) -> Callable:\n ...", "def make_keyhandler(events):\n def handler(key):\n for k in events:\n if key == simplegui.KEY_MAP[k]:\n events[k]()\n return handler", "def handle_events(self, events):\n for event in events:\n event_type = event['type']\n if event_type == types.SO_CHANGE:\n for key in event['data']:\n self.data[key] = event['data'][key]\n self.on_change(key)\n\n elif event_type == types.SO_REMOVE:\n key = event['data']\n assert key in self.data, (key, self.data.keys())\n del self.data[key]\n self.on_delete(key)\n\n elif event_type == types.SO_SEND_MESSAGE:\n self.on_message(event['data'])\n else:\n assert False, event", "def setupInputEventHandlers(self):\n\n default.Script.setupInputEventHandlers(self)\n self.inputEventHandlers.update(\n self.structuralNavigation.inputEventHandlers)\n\n self.inputEventHandlers[\"sayAllHandler\"] = \\\n input_event.InputEventHandler(\n Script.sayAll,\n cmdnames.SAY_ALL)\n\n self.inputEventHandlers[\"panBrailleLeftHandler\"] = \\\n input_event.InputEventHandler(\n Script.panBrailleLeft,\n cmdnames.PAN_BRAILLE_LEFT,\n False) # Do not enable learn mode for this action\n\n self.inputEventHandlers[\"panBrailleRightHandler\"] = \\\n input_event.InputEventHandler(\n Script.panBrailleRight,\n cmdnames.PAN_BRAILLE_RIGHT,\n False) # Do not enable learn mode for this action", "def create_default_events(self):\n self.events.register_class(\"commands\", LineEvent)\n self.events.register_class(\"commands_out\", LineEvent)\n self.events.register_class(\"hooks\", HookEvent)", "def get_handlers_for_event(self, event):\n pass # pragma: no cover", "def on_events(self, events, handler, once=False):\n\n if not hasattr(self, '_events'):\n setattr(self, '_events', {})\n\n # Create wrapper to prevent modification of the original handler.\n @wraps(handler)\n def wrapper(*args, **kwargs):\n print 'INVOKING', handler.__name__, args\n return handler(*args, **kwargs)\n\n wrapper.wait_for = {}\n wrapper.wait_for_keys = list(events)\n wrapper.run_once = once\n\n for ev in events:\n print 'SUBSCRIBE', ev, '->', handler.__name__\n wrapper.wait_for[ev] = None\n self._events.setdefault(ev, set()).add(wrapper)", "def handleEvents(self, events):\n pass", "def runEventCallbacks(self, event, *args):\n\n if not event in self.EVENT_TYPES:\n raise Exception(\"XnatIo (onEvent): invalid event type '%s'\"%(\\\n event))\n if not hasattr(self, 'eventCallbacks__'):\n print('self has no attribute eventCallbacks__')\n return\n\n for callback in self.eventCallbacks__[event]:\n #print(f\"EVENT CALLBACK {event}\")\n callback(*args)", "def event0():\n header(0, 0)\n\n if DEBUG.GET_CHTHONIC_SPARK:\n item.award_item_to_host_only(1600)\n if DEBUG.GET_DARKMOON_SEANCE_RING:\n item.award_item_to_host_only(1600310)\n if DEBUG.GWYNDOLIN_DEAD:\n flag.enable(EVENT.GwyndolinDead)\n item.award_item_to_host_only(2600)\n if DEBUG.ORNSTEIN_AND_SMOUGH_DEAD:\n flag.enable(EVENT.OrnsteinAndSmoughDead)\n if DEBUG.DARK_ANOR_LONDO:\n flag.enable(EVENT.OrnsteinAndSmoughDead)\n flag.enable(EVENT.GwyndolinDead)\n flag.enable(EVENT.DarkAnorLondo)\n if DEBUG.GET_LAUTREC_BLACK_EYE_ORB:\n item.award_item_to_host_only(2034)\n if DEBUG.CAPRICIOUS_THRALL_ACTIVE:\n flag.enable(EVENT.CapriciousThrallActive)\n if DEBUG.GET_BUTTERFLY_SOUL:\n item.award_item_to_host_only(2530)\n item.award_item_to_host_only(0)\n if DEBUG.DISABLE_FOG_ARCHER:\n chr.disable(CHR.SilverKnightArcherNearBossFog)\n if DEBUG.JAREEL_DEAD:\n flag.enable(EVENT.JareelDead)\n\n skip_if_event_flag_off(1, EVENT.OrnsteinAndSmoughDead)\n map.register_bonfire(11510920, 1511950)\n for bonfire_flag, bonfire_id, kindle_level in zip((11510992, 11510984, 11510976), (1511960, 1511961, 1511962),\n (10, 0, 0)):\n map.register_bonfire(bonfire_flag, bonfire_id, initial_kindle_level=kindle_level)\n map.register_ladder(11510010, 11510011, 1511140)\n map.register_ladder(11510012, 11510013, 1511141)\n\n # Make elevator work immediately (and skip cutscene).\n flag.enable(11510305)\n\n flag.disable(11510304)\n skip_if_client(2)\n obj.disable(1511994)\n sfx.delete_map_sfx(1511995, False)\n obj.disable(1511310)\n for hitbox_id in (1513301, 1513302, 1513303):\n hitbox.disable_hitbox(hitbox_id)\n skip_if_event_flag_off(1, 11510300)\n skip_if_event_flag_off(6, 11510303)\n flag.disable(11510301)\n flag.disable(11510302)\n flag.enable(11510303)\n anim.end_animation(1511300, 53)\n hitbox.enable_hitbox(1513303)\n skip(13)\n skip_if_event_flag_off(6, 11510302)\n flag.disable(11510301)\n flag.enable(11510302)\n flag.disable(11510303)\n anim.end_animation(1511300, 50)\n hitbox.enable_hitbox(1513302)\n skip(6)\n skip_if_event_flag_off(5, 11510301)\n flag.enable(11510301)\n flag.disable(11510302)\n flag.disable(11510303)\n anim.end_animation(1511300, 51)\n hitbox.enable_hitbox(1513301)\n\n obj.disable(1511450)\n flag.disable(11510460)\n run_event_with_slot(11510090, 0, (1511700, 1511701, 1512600, 1512601))\n run_event_with_slot(11510090, 1, (1511702, 1511703, 1512602, 1512603))\n\n for event_id in (11515040, 11515041, 11515042):\n run_event(event_id)\n\n run_event(11510200) # Rotating lever to open palace.\n run_event(11510205) # (New) Rotating lever to open palace in Dark Anor Londo (Jareel must be dead).\n run_event(11510201) # Palace locked from the outside.\n run_event(11510100) # Break chandelier.\n run_event(11510210) # Open one-way gate to blacksmith.\n run_event(11510211) # Blacksmith gate is locked.\n run_event(11510220) # First activation of gondola. (Now pre-enabled.)\n run_event(11510300) # Main gondola activation.\n run_event(11510319) # Gondola flags.\n run_event(11510340) # Gondola navimesh.\n run_event(11510350) # Gondola sync.\n run_event(11510310) # Gondola lever can't be pushed.\n run_event(11515250) # Painting Guardian ambush.\n run_event(11515251) # Provoke a Silver Knight.\n run_event(11510110) # Open door to Sun Chamber. (Now requires key.)\n run_event(11510111) # (New) Sun Chamber is locked.\n run_event(11510400) # Trigger Dark Anor Londo.\n run_event(11510401) # Disable Darkmoon Tomb statue.\n run_event(11510230) # Enter Painted World if you have the Painted Doll.\n run_event(11510240) # Return to Sen's Fortress.\n run_event(11515050) # Offend Pale Demon and cut off Fortress return.\n run_event(11510120) # Enable special effect 4501 in Darkmoon Tomb.\n run_event(11510130) # (Updated) Control Dark Anor Londo enemies.\n # (Gone) Player always respawns at 'Anor Londo' bonfire in Dark Anor Londo.\n run_event(11510460) # Kneel to Darkmoon Covenant.\n run_event(11510462) # Two-frame sync for above.\n run_event(11510461) # Kneel to Darkmoon Covenant, simple version.\n run_event(11510140) # Move your bloodstain out of endless Gwyndolin corridor when you win.\n run_event(11510150) # Trigger flag for quivering Black Eye Orb.\n run_event(11512008) # (New) Message that Thrall has fled higher again.\n\n run_event(11512043) # (NEW) Monitor resting at Sun Chamber bonfire for warping (11512045).\n run_event(11512044) # (NEW) Monitor resting at Gwyn's Altar bonfire for warping (11512046).\n\n run_event(151)\n run_event(11510215)\n\n # Sentinel shield parts.\n for slot, sentinel_id in zip(range(14), range(1510400, 1510414)):\n run_event_with_slot(11515060, slot, (sentinel_id,))\n\n # Gargoyle tails removed.\n\n # One-way shortcut doors.\n run_event_with_slot(11510260, 0, (11510251, 1512251, 1512250), 'iii')\n run_event_with_slot(11510260, 1, (11510257, 1512253, 1512252), 'iii')\n run_event_with_slot(11510260, 2, (11510258, 1512255, 1512254), 'iii')\n\n # ORNSTEIN AND SMOUGH / GWYN, LORD OF LIGHT\n\n sound.disable_map_sound(1513800) # Ornstein and Smough.\n sound.disable_map_sound(1513805) # Gwyn.\n\n # GWYN:\n run_event(11512200) # Gwyn trigger.\n run_event(11512201) # Gwyn death.\n skip_if_event_flag_on(22, EVENT.AnorLondoGwynWarp) # Skip O&S events (light and dark). Keep an eye on length.\n\n skip_if_event_flag_off(10, EVENT.OrnsteinAndSmoughDead)\n # Already dead:\n anim.force_animation(1511401, 0, loop=True) # Start elevators\n anim.force_animation(1511402, 0, loop=True)\n run_event(11515392)\n for fog_wall, fog_sfx in zip((1511990, 1511992, 1511988), (1511991, 1511993, 1511989)):\n obj.disable(fog_wall)\n sfx.delete_map_sfx(fog_sfx, False)\n skip(11)\n\n # Alive:\n for relative_id in (5390, 5391, 5393, 5392, 1, 5394, 5395, 5396, 5397, 5398, 5399):\n run_event(BASE_FLAG + relative_id)\n\n # FORSAKEN KNIGHT ORNSTEIN & SUN-EATER SMOUGH\n\n run_event(11515492) # Trigger. Handles all other events within.\n run_event(11512001) # Die.\n\n # DARK SUN GWYNDOLIN\n\n sound.disable_map_sound(1513802)\n skip_if_event_flag_off(6, EVENT.GwyndolinDead)\n # Already dead:\n run_event(11515382)\n obj.disable(1511890)\n sfx.delete_map_sfx(1511891, False)\n obj.disable(1511892)\n sfx.delete_map_sfx(1511893, False)\n skip(13)\n # Alive:\n # Disable Jareel fog (otherwise visible in boss start cutscene).\n obj.disable(1511970)\n sfx.delete_map_sfx(1511971, False)\n obj.disable(1511972)\n sfx.delete_map_sfx(1511973, False)\n for relative_id in (5380, 5381, 5383, 5382, 900, 5384, 5385, 5386, 450):\n run_event(BASE_FLAG + relative_id)\n\n # NEW: Abyssal King Jareel.\n sound.disable_map_sound(1513803)\n skip_if_event_flag_off(6, EVENT.JareelDead)\n # Already dead:\n run_event(11515372)\n obj.disable(1511970)\n sfx.delete_map_sfx(1511971, False)\n obj.disable(1511972)\n sfx.delete_map_sfx(1511973, False)\n skip(7)\n # Alive:\n run_event(11515370)\n run_event(11515371)\n run_event(11515373)\n run_event(11515372)\n run_event(11515374)\n run_event(11515375)\n run_event(11510901)\n\n # Open three doors for enemies (I think).\n for relative_door_id, base_slot in zip((251, 257, 258), (0, 20, 40)):\n run_event_with_slot(11510710, base_slot, (BASE_FLAG + relative_door_id, 6750,\n 1512000 + relative_door_id, 1512000 + relative_door_id - 1))\n for i, relative_enemy_id in enumerate((300, 301, 302, 305, 320, 321, 322, # Silver Knights\n 323, 324, 325, 326, 327, 328, 329, 500,\n 177, 178, 179, 180, 181, 181, 182, 183, # Darkwraiths\n 184, 185, 186, 187, 188, 189, 190)):\n run_event_with_slot(\n 11510710, base_slot + i + 1, (BASE_FLAG + relative_door_id, 1510000 + relative_enemy_id,\n 1512000 + relative_door_id, 1512000 + relative_door_id - 1))\n\n # Mimic triggers.\n for slot, relative_mimic_id in enumerate(range(4)):\n run_event_with_slot(11515200, slot, (1510200 + relative_mimic_id,))\n run_event_with_slot(11515210, slot, (1510200 + relative_mimic_id,))\n run_event_with_slot(11515220, slot, (1510200 + relative_mimic_id,))\n run_event_with_slot(11515230, slot, (1510200 + relative_mimic_id,))\n run_event_with_slot(11515240, slot, (1510200 + relative_mimic_id, 1512010 + relative_mimic_id))\n run_event_with_slot(11510850, slot, (1510200 + relative_mimic_id,))\n run_event_with_slot(11515190, slot, (1510200 + relative_mimic_id,))\n\n # Treasure chests.\n for i in range(1, 21):\n if i == 12 or i == 19:\n continue\n run_event_with_slot(11510600, i, (1511650 + i, 11510600 + i))\n anim.end_animation(1511662, 0) # Gwyn's chest already looted\n # Only activate chapel chest before Dark Anor Londo (replaced by Mimic).\n skip_if_event_flag_on(1, EVENT.DarkAnorLondo)\n run_event_with_slot(11510600, 19, (1511669, 11510619))\n\n # Non-respawning enemies.\n run_event_with_slot(11510860, 0, (1510250, 0)) # Haunting Semblance\n run_event_with_slot(11510860, 3, (6640, 0)) # Dark Anor Londo Knight 1\n run_event_with_slot(11510860, 4, (6650, 0)) # Dark Anor Londo Knight 2\n run_event_with_slot(11510870, 0, (CHR.DarkmoonGuardian,))\n\n # NEW: Allied Silver Knights and Sentinels stop respawning in Dark Anor Londo if killed (unless Jareel is dead).\n for slot, enemy_id in enumerate(DarkAnorLondoAllies):\n run_event_with_slot(11512050, slot, (enemy_id,))\n run_event_with_slot(11512150, slot, (enemy_id,)) # They also turn hostile again if attacked in Dark AL.\n\n # NEW: Darkwraiths stop respawning in Dark Anor Londo if killed (unless Jareel is alive).\n for slot, enemy_id in enumerate(Darkwraiths):\n run_event_with_slot(11512100, slot, (enemy_id,))\n\n # NEW: Scripted rampart battle between archers and Darkwraiths. Also disables gravity\n # for the high archer.\n run_event(11512040)\n\n # NEW: Scripted battle between Darkwraith and Pale Demons.\n run_event(11512041)\n\n # NEW: Angry Giant Blacksmith in Dark Anor Londo.\n run_event(11512042)\n\n # NEW: Capricious Thrall one-off attack on the rooftop.\n sound.disable_map_sound(1513804)\n obj.disable(1511974)\n sfx.delete_map_sfx(1511975, False)\n obj.disable(1511976)\n sfx.delete_map_sfx(1511977, False)\n obj.disable(1511978)\n sfx.delete_map_sfx(1511979, False)\n run_event(11512060) # Trigger and timer.\n run_event(11512061) # Death.", "def add_handlers(self, host_pattern, host_handlers):\n pass", "def _setup_events(conf):\n events = {}\n for name in conf.keys():\n events[name] = Event(name=name)\n for listener in conf[name]:\n action = 'run'\n if ':' in listener:\n listener, action = listener.rsplit(':')\n events[name].add_listener(listener, action)\n\n # Add events to module scope.\n globals().update(events)", "def _bind_events(self):\n \n # main\n self.Bind(wx.EVT_CLOSE, self.OnClose)\n \n # drag files\n self.DragAcceptFiles(True)\n self.Bind(wx.EVT_DROP_FILES, self._on_documents_dropped)\n \n # collections view events\n self.Bind(events.EVT_COLLECTIONS_SELECTION_CHANGED, self._on_collections_selection_changed)\n self.Bind(events.EVT_COLLECTIONS_ITEM_ACTIVATED, self._on_collections_item_activated)\n \n # articles view events\n self.Bind(events.EVT_ARTICLES_SET, self._on_articles_set)\n self.Bind(events.EVT_ARTICLES_SELECTION_CHANGED, self._on_articles_selection_changed)\n self.Bind(events.EVT_ARTICLES_ITEM_ACTIVATED, self._on_articles_item_activated)\n self.Bind(events.EVT_ARTICLES_TO_COLLECTION, self._on_articles_to_collection)\n self.Bind(events.EVT_ARTICLES_PUBMED, self._on_repository_search)\n \n self.Bind(events.EVT_ARTICLES_DROPPED_TO_TRASH, self._on_articles_dropped_to_trash)\n self.Bind(events.EVT_ARTICLES_DROPPED_TO_COLLECTION, self._on_articles_dropped_to_collection)\n self.Bind(events.EVT_ARTICLES_DROPPED_TO_LABEL, self._on_articles_dropped_to_label)\n \n # details view\n self.Bind(events.EVT_DETAILS_NAVIGATING, self._on_details_navigating)\n \n # menu events\n self.Bind(wx.EVT_MENU, self.OnClose, id=ID_QUIT)\n \n self.Bind(wx.EVT_MENU, self._on_library_new, id=ID_LIBRARY_NEW)\n self.Bind(wx.EVT_MENU, self._on_library_open, id=ID_LIBRARY_OPEN)\n self.Bind(wx.EVT_MENU, self._on_library_backup, id=ID_LIBRARY_BACKUP)\n\n self.Bind(wx.EVT_MENU, self._on_articles_search, id=ID_ARTICLES_SEARCH)\n self.Bind(wx.EVT_MENU, self._on_articles_open_pdf, id=ID_ARTICLES_OPEN_PDF)\n self.Bind(wx.EVT_MENU, self._on_articles_open_doi, id=ID_ARTICLES_OPEN_DOI)\n self.Bind(wx.EVT_MENU, self._on_articles_open_pmid, id=ID_ARTICLES_OPEN_PMID)\n self.Bind(wx.EVT_MENU, self._on_articles_reveal_pdf, id=ID_ARTICLES_REVEAL_PDF)\n self.Bind(wx.EVT_MENU, self._on_articles_copy_citation, id=ID_ARTICLES_COPY_CITATION)\n self.Bind(wx.EVT_MENU, self._on_articles_copy_summary, id=ID_ARTICLES_COPY_SUMMARY)\n self.Bind(wx.EVT_MENU, self._on_articles_copy_link, id=ID_ARTICLES_COPY_LINK)\n self.Bind(wx.EVT_MENU, self._on_articles_new, id=ID_ARTICLES_NEW)\n self.Bind(wx.EVT_MENU, self._on_articles_import, id=ID_ARTICLES_IMPORT)\n self.Bind(wx.EVT_MENU, self._on_articles_edit, id=ID_ARTICLES_EDIT)\n self.Bind(wx.EVT_MENU, self._on_articles_delete, id=ID_ARTICLES_DELETE)\n self.Bind(wx.EVT_MENU, self._on_articles_trash, id=ID_ARTICLES_TRASH)\n self.Bind(wx.EVT_MENU, self._on_articles_restore, id=ID_ARTICLES_RESTORE)\n self.Bind(wx.EVT_MENU, self._on_articles_colour, id=ID_ARTICLES_COLOUR_GRAY)\n self.Bind(wx.EVT_MENU, self._on_articles_colour, id=ID_ARTICLES_COLOUR_RED)\n self.Bind(wx.EVT_MENU, self._on_articles_colour, id=ID_ARTICLES_COLOUR_ORANGE)\n self.Bind(wx.EVT_MENU, self._on_articles_colour, id=ID_ARTICLES_COLOUR_YELLOW)\n self.Bind(wx.EVT_MENU, self._on_articles_colour, id=ID_ARTICLES_COLOUR_GREEN)\n self.Bind(wx.EVT_MENU, self._on_articles_colour, id=ID_ARTICLES_COLOUR_BLUE)\n self.Bind(wx.EVT_MENU, self._on_articles_colour, id=ID_ARTICLES_COLOUR_PURPLE)\n self.Bind(wx.EVT_MENU, self._on_articles_rating, id=ID_ARTICLES_RATING_0)\n self.Bind(wx.EVT_MENU, self._on_articles_rating, id=ID_ARTICLES_RATING_1)\n self.Bind(wx.EVT_MENU, self._on_articles_rating, id=ID_ARTICLES_RATING_2)\n self.Bind(wx.EVT_MENU, self._on_articles_rating, id=ID_ARTICLES_RATING_3)\n self.Bind(wx.EVT_MENU, self._on_articles_rating, id=ID_ARTICLES_RATING_4)\n self.Bind(wx.EVT_MENU, self._on_articles_rating, id=ID_ARTICLES_RATING_5)\n self.Bind(wx.EVT_MENU, self._on_articles_labels, id=ID_ARTICLES_LABELS)\n self.Bind(wx.EVT_MENU, self._on_articles_match, id=ID_ARTICLES_MATCH)\n self.Bind(wx.EVT_MENU, self._on_articles_update, id=ID_ARTICLES_UPDATE)\n self.Bind(wx.EVT_MENU, self._on_articles_attach_pdf, id=ID_ARTICLES_ATTACH_PDF)\n self.Bind(wx.EVT_MENU, self._on_articles_to_collection, id=ID_ARTICLES_COLLECTIONS)\n \n self.Bind(wx.EVT_MENU, self._on_collections_new, id=ID_COLLECTIONS_NEW_MANUAL)\n self.Bind(wx.EVT_MENU, self._on_collections_new, id=ID_COLLECTIONS_NEW_SMART)\n self.Bind(wx.EVT_MENU, self._on_collections_new, id=ID_COLLECTIONS_NEW_FROM_SELECTION)\n self.Bind(wx.EVT_MENU, self._on_collections_edit, id=ID_COLLECTIONS_EDIT)\n self.Bind(wx.EVT_MENU, self._on_collections_delete, id=ID_COLLECTIONS_DELETE)\n self.Bind(wx.EVT_MENU, self._on_collections_empty_trash, id=ID_COLLECTIONS_EMPTY_TRASH)\n \n self.Bind(wx.EVT_MENU, self._on_labels_new, id=ID_LABELS_NEW)\n self.Bind(wx.EVT_MENU, self._on_labels_edit, id=ID_LABELS_EDIT)\n self.Bind(wx.EVT_MENU, self._on_labels_delete, id=ID_LABELS_DELETE)\n \n self.Bind(wx.EVT_MENU, self._on_repository_search, id=ID_REPOSITORY_SEARCH)\n self.Bind(wx.EVT_MENU, self._on_repository_search, id=ID_REPOSITORY_RECENT_FIRST_AUTHOR)\n self.Bind(wx.EVT_MENU, self._on_repository_search, id=ID_REPOSITORY_RECENT_LAST_AUTHOR)\n self.Bind(wx.EVT_MENU, self._on_repository_search, id=ID_REPOSITORY_RECENT_JOURNAL)\n \n self.Bind(wx.EVT_MENU, self._on_library_analyze, id=ID_LIBRARY_ANALYZE)\n self.Bind(wx.EVT_MENU, self._on_authors_list, id=ID_AUTHORS_LIST)\n \n self.Bind(wx.EVT_MENU, self._on_view_pane, id=ID_VIEW_COLLECTIONS)\n self.Bind(wx.EVT_MENU, self._on_view_pane, id=ID_VIEW_PDF)\n self.Bind(wx.EVT_MENU, self._on_view_pane, id=ID_VIEW_DETAILS)", "def __init__(self, fsm):\n self._handlers = {}\n for methodName, method in inspect.getmembers(self, inspect.ismethod):\n if methodName.startswith('on'):\n eventName = _NameHelper.eventNameFromHandlerName(methodName)\n event = fsm._events.__dict__[eventName]\n self._handlers[event] = method", "def CmdHandler(self, *events: str, colon: bool = False,\n ircv3: bool = False) -> Callable:\n ...", "def _list_items_changed_handler ( self, name, not_used, event ):\n arg_lists = self._get_instance_handlers( name[:-6] )\n\n for item in event.removed:\n for args in arg_lists:\n item.on_trait_change( remove = True, *args )\n\n for item in event.added:\n for args in arg_lists:\n item.on_trait_change( *args )", "def addhandler(self, txt, handler):\n self.handlers[txt] = handler\n rlog(0, 'webserver', '%s handler added' % txt)", "def logevents(self, events, request = None):\n for event in events:\n self.logevent(event, request)", "def handle_sc_event(store, changed_keys, info):\n\n for key in changed_keys:\n SC_HANDLERS[key](key=key, info=info)", "def events(self):\n for line_num, line in enumerate(self.file_handler):\n if not line:\n break\n # process line input to dictionary\n data = json.loads(line)\n # add id information\n data['id'] = line_num\n # update timestamp history\n timestamp = self._get_timestamp(data)\n self.last_two_timestamps = [self.last_two_timestamps[-1], timestamp]\n self.event_timestamps[line_num] = timestamp\n\n self.alarms.append(0) # add field for alarms\n self.users.append(data['user']) # add field for user\n self.anomalies.append(data.get('is_anomaly', 0)) # add field for anomalies\n if 'is_anomaly' in data:\n del data['is_anomaly'] # remove anomaly information from data for contestants\n\n # return line id and serialized JSON as string representing one event\n str_dump = json.dumps(data)\n logger.info(self._get_inner_time() + ' > ' + str_dump)\n yield line_num, str_dump", "def addHandlers(self, handlers):\n self._eventHandlers.update(handlers)\n keys = self._eventHandlers.keys()\n pygame.event.set_allowed(keys)", "def add_map(self, event, handle, *args):\n\n item = self.base.setdefault(event, list())\n item.append((handle, args))", "def _update_handlers(self):\n handler_map = defaultdict(list)\n for i, obj in enumerate(self.handlers):\n for dummy, handler in inspect.getmembers(obj, callable):\n if not hasattr(handler, \"_pyxmpp_event_handled\"):\n continue\n # pylint: disable-msg=W0212\n event_class = handler._pyxmpp_event_handled\n handler_map[event_class].append( (i, handler) )\n self._handler_map = handler_map", "def __initHandlersUser(self):\n handlers = {}\n handlers['WRITE_FILE'] = self.write_file\n handlers['READU_FILE'] = self.read_file\n handlers['DELET_FILE'] = self.delete_file\n handlers['STATUS_SRV'] = self.status_server\n handlers['RSYNC_FILE'] = self.rsync_file\n handlers['WSYNC_FILE'] = self.wsync_file\n return handlers", "def event0():\n header(0, 0)\n end_if_client()\n\n if DEBUG.GET_MASTER_KEY:\n flag.disable(50004066)\n item.award_item_to_host_only(4073)\n if DEBUG.HAS_RUSTBONE:\n flag.enable(EVENT.HasBonerust)\n if DEBUG.SPEED_UP_PLAYER:\n chr.set_special_effect(CHR.Player, 2370)\n if DEBUG.GET_CHTHONIC_SPARK:\n flag.disable(50001510) # Thrall Spark drop flag.\n item.award_item_to_host_only(ITEMLOT.ThrallReward)\n\n for flag_id in (760, 762, 765):\n flag.disable(flag_id)\n\n # Display a message after an event flag is enabled (with optional delay).\n run_event_with_slot(260, 0, args=(11810000, 10010600, 0), arg_types='iif') # Arrival in Lordran.\n run_event_with_slot(260, 1, args=(257, 10010610, 0), arg_types='iif') # Rite of Kindling.\n run_event_with_slot(260, 2, args=(EVENT.ObtainedChthonicSpark, 10010620, 0), arg_types='iif') # Chthonic Spark.\n run_event_with_slot(260, 3, args=(11412053, 10010621, 0), arg_types='iif') # Chthonic Spark stolen.\n run_event_with_slot(260, 4, args=(EVENT.LordvesselReceived, TEXT.LordvesselWarpUnlocked, 0), arg_types='iif')\n\n # Assorted events (see documentation). Mostly monitoring states. 710 monitors warping ability.\n for event_id in (761, 763, 290, 701, 702, 717, 718,\n 706, 740, 750, 752, 757, 758, 759,\n 754, 770, 772, 730, 731, 766, 710):\n run_event(event_id)\n\n # Monitor Lord Souls/Shard possession. Doesn't include Dark Remnant.\n run_event_with_slot(711, 0, args=(2500, 711)) # Gravelord Nito\n run_event_with_slot(711, 1, args=(2501, 712)) # Bed of Chaos\n run_event_with_slot(711, 2, args=(2502, 713)) # Four Kings\n run_event_with_slot(711, 3, args=(2503, 714)) # Seath the Scaleless\n\n run_event(715) # Player has Gwyn's Soul.\n run_event(716) # Player has Sunlight Spear.\n run_event(11512000) # (New) Player has been given Lordvessel.\n\n # Monitor Estus upgrade level.\n for slot, args in enumerate(zip(range(202, 215, 2), range(203, 216, 2))):\n run_event_with_slot(8131, slot, args)\n\n run_event(819) # Monitor repair box sync.\n\n run_event(2540) # (New) Ring of the Embraced punishes you if removed.\n run_event(2541) # (New) Ring of Temptation activates after 15 seconds.\n run_event(2542) # (New) Ring of Temptation takes your souls and breaks if you die.\n run_event(2543) # (New) Ring of the Evil Eye kill reward.\n run_event(2544) # (New) Twilight Ring effect starts and ends.\n run_event(2545) # (New) Twilight Ring effect waxes and wanes.\n run_event(2546) # (New) Bond to Beyond has a 5% chance of giving one soft humanity.\n run_event(2547) # (New) Contract and heal Bonerust (11302050)\n run_event(2548) # (New) Kills heal with Nahr Alma pact.\n run_event(2549) # (New) Ring of Condemnation recharges.\n run_event(11502020) # (New) Lithic Witness event.\n run_event(11502023) # (New) Beyond Witness event.\n\n # (New) Toggles availability of full bonfire menu based on Spark possession.\n run_event(11512005)\n\n # BOSS DROPS\n\n for slot, args in enumerate((\n # boss_dead_flag, immediate_item_lot, delayed_item_lot_1, delayed_item_lot_2\n (2, ITEMLOT.AriamisReward, 9020, 9030),\n (11010901, ITEMLOT.TaurusDemonReward, 9000, 9030),\n (11010904, ITEMLOT.ProfaneImageReward, 0, 0),\n (3, ITEMLOT.BellGargoylesReward, 9020, 0),\n (4, ITEMLOT.CrossbreedPriscillaReward, 9020, 0),\n (11200900, ITEMLOT.MoonlightButterflyReward, 9000, 0),\n (11200901, ITEMLOT.GravestalkersReward, 9030, 0),\n (5, ITEMLOT.AbyssArtoriasReward, 9000, 0),\n (6, ITEMLOT.PinwheelReward, 9000, 9030),\n (7, ITEMLOT.NitoReward, 9000, 9030),\n (9, ITEMLOT.QuelaagReward, 9020, 0),\n (11410902, ITEMLOT.CeaselessDischargeReward, 9000, 9030),\n (11412055, ITEMLOT.JeremiahReward, 9000, 0),\n (11410901, ITEMLOT.CentipedeDemonReward, 9000, 9030),\n (10, ITEMLOT.BedOfChaosReward, 9000, 9030),\n (11, ITEMLOT.SensGolemReward, 9000, 0),\n (11510900, ITEMLOT.GwyndolinReward, 0, 0),\n (11510901, ITEMLOT.JareelReward, 0, 0),\n (11510902, ITEMLOT.OrnsteinReward, 9000, 0),\n (11510903, ITEMLOT.SmoughReward, 9000, 0),\n (11012012, ITEMLOT.ThrallReward, 0, 0),\n (13, ITEMLOT.FourKingsReward, 9010, 0),\n (14, ITEMLOT.SeathReward, 9000, 0),\n (11800001, ITEMLOT.GwynCinderReward, 0, 0),\n (16, ITEMLOT.AsylumDemonReward, 9000, 0),\n (11810901, ITEMLOT.StrayDemonReward, 9000, 9030),\n (11810902, ITEMLOT.AsylumTyrantReward, 9000, 9030),\n (11210000, ITEMLOT.SanctuaryGuardianReward, 9000, 0),\n (11210001, ITEMLOT.ArtoriasReward, 0, 0),\n (11212006, ITEMLOT.ManusReward, 9040, 0),\n (11210004, ITEMLOT.KalameetReward, 0, 0),\n (11212008, ITEMLOT.TwilightVagrantReward, 0, 0),\n (11512201, ITEMLOT.GwynLightReward, 0, 0),\n )):\n run_event_with_slot(1950, slot, args)\n\n # (New) Monitor Velka's pact. (1910 is enabled in Firelink Shrine.)\n run_event(1915) # Monitor pact breaking.\n run_event(1916) # Monitor Seath punishment.\n run_event(1917) # Monitor Nito punishment.\n run_event(1918) # Monitor Jeremiah punishment.\n\n # (New) Monitor challenge pacts.\n run_event(1900) # Kremmel.\n run_event(1901) # Zandroe.\n run_event(1902) # Caitha.\n run_event(1903) # Nahr Alma.\n run_event(1904) # Quella permanent Abyss warp.\n run_event(1905) # Monitor Etched Ring removal and curse player (non-Quella).\n run_event(1906) # Quella ring removal.\n\n run_event(1920) # (New) Return Xanthous Crown on next load when dropped. Uses 1921.\n run_event(1922) # (New) Warp to special Painted World event when Soul of Ariamis is consumed.\n run_event(1923) # (New) Award Chaos Fire Whip when Soul of the Exile is consumed.\n run_event(1924) # (New) Skeletons in Tomb go back to rest when you load a map other than Tomb or Catacombs.\n run_event(1925) # (New) Manages Dark Ember damage boost stacks.\n run_event(11025400) # (New) Manages Ruinous Hand kill charge-up.\n run_event(1926) # (New) Trigger Ruinous Hand explosion at full charge.\n run_event(1927) # (New) HP penalty for being hollow (25%).\n\n run_event(2510) # (New) Sable Rune control.\n run_event(2511) # (New) Lustrous Rune control.\n run_event(2512) # (New) Wraith Rune control.\n run_event(2513) # (New) Scintilla Rune control.\n run_event(2514) # (New) Omphalic Rune control.\n run_event(2515) # (New) Omphalic Rune kill counter and death trigger.\n run_event(2516) # (New) Pale White Rune control.\n run_event(2517) # (New) Reaper's Rune trigger.\n run_event(2518) # (New) Reaper's Rune kill counter.\n run_event(2519) # (New) Rhythm Rune triggers.\n run_event(2520) # (New) Ransackers Rune trigger.\n # (New) Ransackers Rune item map checks. (2521-2530) (No Kiln, no Asylum.)\n for slot, (block, area) in enumerate(((10, 0), (10, 1), (10, 2), (11, 0), (12, 0), (12, 1),\n (13, 0), (13, 1), (13, 2), (14, 0), (14, 1), (15, 0),\n (15, 1), (16, 0), (17, 0))):\n args = tuple([block, area] + [50000 + 100 * slot + 10 * i for i in range(0, 10)])\n run_event_with_slot(2521, slot, args=args, arg_types='BBiiiiiiiiii')\n \n # Activate Runes.\n for slot, rune in enumerate(range(9)):\n run_event_with_slot(2600, slot, args=(90 + rune, 11025350 + rune))\n\n # Monitor availability of bonfire options\n for slot, args in enumerate(zip(range(2600, 2610), range(250, 260))):\n run_event_with_slot(250, slot, args)\n\n # Remove Embers from inventory when given to blacksmiths. These are removed aggressively and repeatedly!\n for slot_args in zip((0, 1, 2, 6, 7, 8, 9, 10, 12),\n zip((350, 351, 352, 356, 357, 358, 359, 360, 362),\n (800, 801, 802, 806, 807, 808, 809, 810, 812))):\n run_event_with_slot(350, slot_args[0], slot_args[1])\n\n # (NEW) Chthonic Spark version of the above event, which also requires Vamos to be alive.\n run_event_with_slot(363, 0, args=(363, 813))\n\n # Monitor reinforcement material possession.\n for slot, args in enumerate(zip(range(1000, 1131, 10), range(780, 794))):\n run_event_with_slot(780, slot, args)\n\n # Monitor covenant membership.\n for slot, args in enumerate(zip(range(0, 10), range(850, 860))):\n run_event_with_slot(870, slot, args)\n\n # Covenant joining events. (args = trigger_flag, player_animation, rotation_target, looping_animation)\n for slot, args in enumerate(zip(range(840, 850), (7905, 7905, 7905, 7905, 7898, 7905, 7905, 7913, 7905, 7905),\n (6370, 6072, 6080, 6001, 10000, 6340, 6341, 10000, 6380, 1400700),\n (-1, -1, -1, -1, 7896, -1, -1, 7911, -1, -1))):\n run_event_with_slot(840, slot, args)\n\n # Monitor NG+ level. Uses flags 690 (NG) to 705 (NG+15).\n run_event_with_slot(690, 0, args=(600, 4, 16, 1175))\n\n run_event(719) # Monitor possession of any spell.\n run_event(720) # Monitor possession of any pyromancy.\n\n # Monitor whether shops are sold out.\n # NOTE: This all suggests that shopkeeper flags are in the 7000 range for their area. Avoid!\n run_event(721) # Big Hat Logan in Duke's Archives.\n run_event(722) # Quelana of Izalith.\n run_event(723) # Griggs at Firelink Shrine.\n run_event(724) # Male Undead Merchant. (I don't think this does anything.)\n run_event(725) # Checks if you've bought 2+ items from Logan in Duke's Archives.\n run_event(726) # Checks if you've bought 2+ items from Ingward in New Londo Ruins.\n run_event(727) # Checks flags in Ash Lake / Great Hollow. Not sure who this is.\n\n run_event(745) # Cut Shiva questline I think.\n run_event(818) # Black Eye Orb quivers in Anor Londo.\n run_event(810) # Monitor possession of Lautrec's Black Eye Orb.\n # Lautrec frees himself from New Londo if both item flags below are enabled.\n run_event_with_slot(812, 0, args=(51400150,)) # Monitor possession of Blighttown Fire Keeper Soul (moved).\n run_event_with_slot(812, 1, args=(51010050,)) # Monitor possession of Undead Parish Humanity (still on altar).\n run_event(822) # Disable flag 830 half a second after leaving the Kiln. (Frampt pickup.)\n run_event(823) # Disable flag 831 half a second after leaving the Kiln. (Kaathe pickup.)\n\n # (New) Monitor dead NPCs for Twilight Vagrant. Counts friendly or hollow death, unless noted otherwise.\n for slot, npc_dead_flag in enumerate((\n 1073, # 2051: Oscar (friendly) (must be enabled in tutorial)\n 1097, # 2052: Big Hat Logan\n 1115, # 2053: Griggs\n 1005, # 2054: Solaire (note this won't trigger if he is killed when Hollow, unlike other NPCs)\n 1254, # 2055: Laurentius\n 1462, # 2056: Crestfallen Warrior\n 1575, # 2057: Lautrec\n 1604, # 2058: Shiva\n 1628, # 2059: Patches\n 1899, # 2060: Havel\n 1864, # 2061: Ciaran (in Oolacile and/or with Nito)\n 1823, # 2062: Hawkeye Gough\n 5, # 2063: Artorias (in Darkroot)\n )):\n run_event_with_slot(11212050, slot + 1, args=(npc_dead_flag,))\n\n # (New) Monitor Tomb of the Giants presence to send Giant Skeletons back to sleep.\n run_event(11310201)\n\n # (New) Monitor picking up Chthonic Spark for the first time to display message.\n run_event(11512004)\n\n # EVENT REWARDS (covenants, storylines)\n\n run_event_with_slot(910, 0, args=(11400591, 1280)) # Joining Chaos Servants.\n run_event_with_slot(911, 0, args=(11010591, 1000, 1), arg_types='iiB')\n run_event_with_slot(911, 1, args=(11510590, 1010, 1), arg_types='iiB')\n run_event_with_slot(911, 2, args=(11700591, 1020, 1), arg_types='iiB')\n run_event_with_slot(911, 3, args=(11000591, 1030, 1), arg_types='iiB')\n run_event_with_slot(911, 4, args=(11400590, 1040, 1), arg_types='iiB')\n run_event_with_slot(911, 5, args=(11410594, 1050, 1), arg_types='iiB')\n run_event_with_slot(911, 6, args=(11020594, 1060, 1), arg_types='iiB')\n run_event_with_slot(911, 7, args=(11020595, 1070, 1), arg_types='iiB')\n run_event_with_slot(911, 8, args=(11810590, 1082, 1), arg_types='iiB')\n run_event_with_slot(911, 9, args=(11810591, 1080, 1), arg_types='iiB')\n run_event_with_slot(911, 10, args=(11510592, 1090, 1), arg_types='iiB')\n run_event_with_slot(911, 11, args=(11600592, 1100, 1), arg_types='iiB')\n run_event_with_slot(911, 12, args=(11020602, 1110, 1), arg_types='iiB')\n run_event_with_slot(911, 13, args=(11010594, 1120, 1), arg_types='iiB')\n run_event_with_slot(911, 14, args=(11010595, 1130, 1), arg_types='iiB')\n run_event_with_slot(911, 15, args=(11020599, 1140, 1), arg_types='iiB')\n run_event_with_slot(911, 16, args=(11020607, 1150, 1), arg_types='iiB')\n run_event_with_slot(911, 17, args=(11200592, 1160, 1), arg_types='iiB')\n run_event_with_slot(911, 18, args=(11200593, 1170, 1), arg_types='iiB')\n run_event_with_slot(911, 19, args=(11200594, 1180, 1), arg_types='iiB')\n run_event_with_slot(911, 20, args=(11300590, 1190, 1), arg_types='iiB')\n run_event_with_slot(911, 21, args=(11300591, 1200, 1), arg_types='iiB')\n run_event_with_slot(911, 22, args=(11310590, 1210, 1), arg_types='iiB')\n run_event_with_slot(911, 23, args=(11310592, 1220, 1), arg_types='iiB')\n run_event_with_slot(911, 24, args=(11310593, 1230, 1), arg_types='iiB')\n run_event_with_slot(911, 25, args=(11310594, 1240, 1), arg_types='iiB')\n run_event_with_slot(911, 26, args=(11320590, 1250, 1), arg_types='iiB')\n run_event_with_slot(911, 27, args=(11320581, 1260, 1), arg_types='iiB')\n run_event_with_slot(911, 28, args=(11320593, 1270, 1), arg_types='iiB')\n run_event_with_slot(911, 29, args=(11400592, 1290, 1), arg_types='iiB')\n run_event_with_slot(911, 30, args=(11400594, 1300, 1), arg_types='iiB')\n run_event_with_slot(911, 31, args=(11400596, 1310, 1), arg_types='iiB')\n run_event_with_slot(911, 32, args=(11400597, 1320, 1), arg_types='iiB')\n run_event_with_slot(911, 33, args=(11400598, 1330, 1), arg_types='iiB')\n run_event_with_slot(911, 34, args=(11400599, 1340, 1), arg_types='iiB')\n run_event_with_slot(911, 35, args=(11510595, 1350, 1), arg_types='iiB')\n run_event_with_slot(911, 36, args=(11510596, 1360, 1), arg_types='iiB')\n run_event_with_slot(911, 37, args=(11510597, 1370, 1), arg_types='iiB')\n run_event_with_slot(911, 38, args=(11600594, 1380, 1), arg_types='iiB')\n run_event_with_slot(911, 39, args=(11600595, 1390, 1), arg_types='iiB')\n run_event_with_slot(911, 40, args=(11600596, 1400, 1), arg_types='iiB')\n run_event_with_slot(911, 41, args=(11010598, 1410, 0), arg_types='iiB')\n run_event_with_slot(911, 42, args=(11210590, 1500, 1), arg_types='iiB')\n run_event_with_slot(911, 43, args=(11210593, 1510, 1), arg_types='iiB')\n run_event_with_slot(911, 44, args=(11210594, 1520, 1), arg_types='iiB')\n run_event_with_slot(911, 45, args=(11600580, 1401, 1), arg_types='iiB')\n run_event_with_slot(911, 46, args=(11600581, 1402, 1), arg_types='iiB')\n run_event_with_slot(911, 47, args=(11600582, 1403, 1), arg_types='iiB')\n run_event_with_slot(911, 48, args=(11600583, 1404, 1), arg_types='iiB')\n run_event_with_slot(890, 0, args=(11310580, 1221, 1), arg_types='iiB') # 911 ran out of slots (up against 960).\n run_event_with_slot(890, 1, args=(11510580, 1361, 1), arg_types='iiB')\n run_event_with_slot(890, 2, args=(11510581, 1371, 1), arg_types='iiB')\n run_event_with_slot(890, 3, args=(11320592, 1261, 1), arg_types='iiB')\n\n # DIRECT NPC DEATH REWARDS (960-969)\n run_event_with_slot(960, 0, args=(1315, 6180, 1100)) # Ingward (Key to the Seal)\n run_event_with_slot(960, 1, args=(1402, 6230, 6230)) # Undead Merchant (Orange Soapstone)\n # run_event_with_slot(960, 2, args=(1198, 6080, 1140)) # Petrus (Lift Chamber Key) (dies before killing Rhea)\n # run_event_with_slot(960, 3, args=(1196, 6080, 1140)) # Petrus (Lift Chamber Key) (dies after killing Rhea)\n\n # NEW GAME PLUS: Bring covenant ranks up to date, and prevent gifts from being re-awarded.\n run_event_with_slot(8200, 0, args=(3, 5500, 50000120, 11010594))\n run_event_with_slot(8200, 1, args=(3, 5510, 50000130, 11010595))\n run_event_with_slot(8200, 2, args=(2, 103, 50000160, 11200592))\n run_event_with_slot(8200, 3, args=(3, 240, 50000170, 11200593))\n run_event_with_slot(8200, 4, args=(2, 124, 50000180, 11200594))\n run_event_with_slot(8200, 5, args=(0, 453000, 50000220, 11310592))\n run_event_with_slot(8200, 6, args=(3, 5100, 50000225, 11310580))\n run_event_with_slot(8200, 7, args=(3, 5110, 50000230, 11310593))\n run_event_with_slot(8200, 8, args=(3, 114, 50000265, 11320581))\n run_event_with_slot(8200, 9, args=(3, 377, 50000260, 11320592))\n run_event_with_slot(8200, 10, args=(3, 378, 50000270, 11320593))\n run_event_with_slot(8200, 11, args=(3, 4500, 50000310, 11400596))\n run_event_with_slot(8200, 12, args=(3, 4520, 50000320, 11400597))\n run_event_with_slot(8200, 13, args=(3, 4510, 50000330, 11400598))\n run_event_with_slot(8200, 14, args=(2, 130, 50000350, 11510595))\n run_event_with_slot(8200, 15, args=(3, 113, 50000360, 11510596))\n run_event_with_slot(8200, 16, args=(2, 102, 50000365, 11510580))\n run_event_with_slot(8200, 17, args=(3, 5910, 50000370, 11510597))\n run_event_with_slot(8200, 18, args=(0, 1366000, 50000375, 11510581))\n run_event_with_slot(8200, 19, args=(0, 904000, 50000380, 11600594))\n run_event_with_slot(8200, 20, args=(3, 102, 50000390, 11600595))\n run_event_with_slot(8200, 21, args=(0, 210000, 50000400, 11600596))\n run_event_with_slot(8200, 22, args=(1, 40000, 50000410, 11600580))\n run_event_with_slot(8200, 23, args=(1, 41000, 50000420, 11600581))\n run_event_with_slot(8200, 24, args=(1, 42000, 50000430, 11600582))\n run_event_with_slot(8200, 25, args=(1, 43000, 50000440, 11600583))\n\n # Same as above, but for other special rewards.\n run_event_with_slot(8300, 0, args=(ItemType.good, 100, 50000000)) # White Sign Soapstone\n run_event_with_slot(8300, 1, args=(ItemType.good, 101, 51100330)) # Red Sign Soapstone\n run_event_with_slot(8300, 2, args=(ItemType.good, 102, 50000390)) # Red Eye Orb\n run_event_with_slot(8300, 3, args=(ItemType.good, 106, 11017020)) # Orange Guidance Soapstone\n run_event_with_slot(8300, 4, args=(ItemType.good, 108, 11607020)) # Book of the Guilty\n run_event_with_slot(8300, 5, args=(ItemType.good, 112, 11407080)) # Servant Roster\n run_event_with_slot(8300, 6, args=(ItemType.good, 2508, 11007010)) # Unknown - seems unused.\n run_event_with_slot(8300, 7, args=(ItemType.good, 2508, 11007010)) # Unknown - seems unused.\n run_event_with_slot(8300, 8, args=(ItemType.good, 2508, 11007010)) # Unknown - seems unused.\n run_event_with_slot(8300, 9, args=(ItemType.good, 2508, 11007010)) # Unknown - seems unused.\n\n # NOTE: Flag 8310 onwards is used for NPC humanity registration.\n\n # Same as above for DLC items.\n run_event_with_slot(8090, 0, args=(ItemType.good, 510, 11217010))\n run_event_with_slot(8090, 1, args=(ItemType.good, 511, 11217020))\n run_event_with_slot(8090, 2, args=(ItemType.good, 512, 11217030))\n run_event_with_slot(8090, 3, args=(ItemType.good, 513, 11217040))\n run_event_with_slot(8090, 4, args=(ItemType.good, 514, 11217050))\n\n # (New) Same as above, but for Runes and other new items.\n run_event_with_slot(11022100, 0, args=(ItemType.good, 900, 51010020))\n run_event_with_slot(11022100, 1, args=(ItemType.good, 901, 51510690))\n run_event_with_slot(11022100, 2, args=(ItemType.good, 902, 51200120))\n run_event_with_slot(11022100, 3, args=(ItemType.good, 903, 51410030))\n run_event_with_slot(11022100, 4, args=(ItemType.good, 904, 51810080))\n run_event_with_slot(11022100, 5, args=(ItemType.good, 905, 51700020))\n run_event_with_slot(11022100, 6, args=(ItemType.good, 906, 51300220))\n run_event_with_slot(11022100, 7, args=(ItemType.good, 907, 51300221))\n run_event_with_slot(11022100, 8, args=(ItemType.good, 908, 51210290))\n run_event_with_slot(11022100, 9, args=(ItemType.ring, 133, 50000650)) # Velka gift (Ring of Condemnation)\n run_event_with_slot(11022100, 10, args=(ItemType.ring, 124, 50001780)) # Twilight Vagrant drop (Twilight Ring)\n run_event_with_slot(11022100, 11, args=(ItemType.ring, 105, 50004900)) # Lithic Bond\n run_event_with_slot(11022100, 12, args=(ItemType.ring, 107, 50004910)) # Serous Bond\n run_event_with_slot(11022100, 13, args=(ItemType.ring, 106, 50004920)) # Empyrean Bond\n run_event_with_slot(11022100, 14, args=(ItemType.ring, 108, 50004930)) # Bond to Beyond\n # Leaving slots 11022100-11022119 dedicated to this.\n\n # (NEW) Remove some additional new items in NG+.\n run_event_with_slot(11022120, 0, args=(ItemType.ring, 152)) # Ashen Ring\n run_event_with_slot(11022120, 1, args=(ItemType.ring, 151)) # Gwynevere's Ring\n run_event_with_slot(11022120, 2, args=(ItemType.good, 220)) # Silver Pendant\n run_event_with_slot(11022120, 3, args=(ItemType.armor, 294000)) # Xanthous Crown (true)\n run_event_with_slot(11022120, 4, args=(ItemType.ring, 149)) # Darkmoon Seance Ring", "def handle_events( events, model, arguments = DEFAULT_ARGUMENTS):\n unhandled_events = {}\n \n # pass 1 all events\n for event_id in events:\n event = events[event_id]\n \n if STANDOFF_EVENT_TO_SBO_MAPPING.get( event.type_lower) is None:\n # event is unknown\n logging.getLogger( \"st2sbml\").warning( \"{0} event {1} unhandled, because unknown\".format( get_path( arguments), event))\n elif event.type_lower == \"pathway\":\n pass # do nothing for pathways (entities have already been added)\n # handle localization (special handling)\n elif event.type_lower in [ \"localization\", \"transport\"]: \n handle_localization( event, model, arguments = arguments);\n # handle regulation events (special handling)\n elif event.type_lower in [\"regulation\", \"positive_regulation\", \"negative_regulation\", \"activation\", \"inactivation\", \"catalysis\"]:\n unhandled_event = handle_regulation( event, model, arguments = arguments);\n if not unhandled_event is None:\n unhandled_events.update( unhandled_event);\n elif event.type_lower in [\"gene_expression\", \"transcription\", \"translation\"]:\n handle_gene_expression( event, model, arguments = arguments);\n # not all roles are entities\n elif not all( [ isinstance( role[1] , parse_standoff.EntityTrigger) for role in event.roles]):\n logging.getLogger( \"st2sbml\").warning( \"{0} event {1} unhandled. Some roles are events, which is not allowed for this event type\".format( get_path( arguments), event))\n # everything else: Conversion, Acetylation, Deacetylation, Demethylation, Dephosphorylation, Deubiquitination, Methylation, Phosphorylation, Ubiquitination\n else: \n # add reaction\n reaction = add_reaction( event, model, arguments = arguments);\n \n # handle products -> add as product\n for product in event.get_roles( \"product\"):\n add_product( product.id, reaction, model, arguments = arguments);\n # handle themes -> add as reactants\n for theme in event.get_roles( \"theme\"):\n add_reactant( theme.id, reaction, model, arguments = arguments);\n # handle comp -> add as reactants\n for comp in event.get_roles( \"complex\"):\n add_reactant( comp.id, reaction, model, arguments = arguments);\n # handle Participant -> add as product\n for comp in event.get_roles( \"participant\"):\n add_reactant( comp.id, reaction, model, arguments = arguments);\n # handle causes -> add as modifiers\n for cause in event.get_roles( \"cause\"):\n add_modifier( cause.id, reaction, model, arguments = arguments);\n for site in event.get_roles( \"site\"):\n add_note( \"Site: {0}\".format( site.text), reaction, arguments = arguments);\n\n # check if there are any unhandled roles\n for unhandled_role in set([role[0] for role in event.roles]).difference([\"theme\", \"cause\", \"product\", \"site\", \"participant\", \"complex\"]):\n logging.getLogger( \"st2sbml\").warning( \"{0} event {1} role {2} not handled, because unknown.\".format( get_path( arguments), event, unhandled_role))\n\n\n # pass 2 all unhandled events\n for event_id in unhandled_events:\n event = events[event_id]\n unhandled_roles = unhandled_events[event_id]\n handle_unhandled_event( event, unhandled_roles, model, arguments = arguments);" ]
[ "0.55518967", "0.5541481", "0.55277866", "0.5482645", "0.54119766", "0.538342", "0.5349072", "0.5295835", "0.5283326", "0.5205757", "0.51651305", "0.5116842", "0.5108721", "0.5076696", "0.5072107", "0.5070965", "0.5070048", "0.5055877", "0.5042442", "0.5040995", "0.50364155", "0.5031941", "0.5029776", "0.50187767", "0.5009601", "0.499689", "0.4995927", "0.49761188", "0.49698842", "0.49660116" ]
0.66373867
0
A decorator which binds its wrapped function, as via bind, for the event which matches its name.
def event(self, fn): self.bind({fn.__name__: fn})
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def binds(**binds):\n def decorate(func):\n function = to_function(func)\n setattr(function, BINDS, binds)\n return function\n return decorate", "def bindEvent(obj, name, method):\n setattr(obj, name,\n types.MethodType(method, obj, obj.__class__))", "def _event_bridge(self, dom, event_name):\n\n def wrapper_o(func):\n\n ori_func = func\n\n # Wrap the function for value\n @functools.wraps(func)\n def dom_wrapped(sender, evt):\n ori_func(HotDOM(sender, self.browser), evt)\n func = dom_wrapped\n\n new_thread = True\n if new_thread:\n # Wrap with thread\n @functools.wraps(ori_func)\n def thread_wrapped(*args, **kwargs):\n HandlerThread(dom_wrapped).handler_with_args(*args, **kwargs)\n func = thread_wrapped\n\n _dom = dom\n if type(dom) is str:\n _dom = self._get_dom_by_selector(dom)\n _dom.bind_event(event_name, func)\n\n return func\n\n return wrapper_o", "def _bind_closure(f, **kwargs):\n return lambda: f(**kwargs)", "def listen(self, event_type):\n def decorator(func):\n if func not in self.event_subscribers[event_type]:\n kwargs = {event_type: func}\n self.bind(**kwargs)\n self.event_subscribers[event_type].add(func)\n return func\n return decorator", "def event(self, event_type):\n\n def _decorator(callback):\n return self.bind_event(event_type, callback)\n\n return _decorator", "def decorator(func):\n self.subscribe(func, event, *events)\n return func", "def bind(self, _target: aws_cdk.aws_lambda.IFunction) -> None:\n ...", "def global_event(event_name):\n def global_event_wrapper(func):\n global_events.setdefault(event_name, []).append(func)\n\n functools.wraps(func)\n async def global_event_func(*args, **kwargs):\n func(*args, **kwargs)\n return global_event_func\n\n return global_event_wrapper", "def bind(self, sequence=None, func=None, add=None):\n return self._widget_bind(sequence, func, add, internal=False)", "def on(event: str) -> Callable[[Callable], Callable]:\n\n def decorator(method: Callable) -> Callable:\n _handlers.append(Handler(event=event, method=method))\n return method\n\n return decorator", "def notify_decorator(name, fn):\n return fn", "def hook(self, name):\r\n def wrapper(func):\r\n self.hooks.add(name, func)\r\n return func\r\n return wrapper", "def on(self, event, f=None):\n def wrapped(*args, **kwargs):\n return self.pool.apply_async(f, args, kwargs)\n\n w = super().on(event, wrapped)\n # Store mapping from function to wrapped function\n self.wrappers[event].append((f, wrapped))\n return w", "def wrapped(signal_name, sender=dispatcher.Anonymous, safe=False):\n @wrapt.decorator\n def signal_wrapped(wrapped_func, _, args, kwargs):\n def signal_wrapper(*args, **kwargs):\n with wrap(signal_name, sender, safe):\n return wrapped_func(*args, **kwargs)\n\n return signal_wrapper(*args, **kwargs)\n\n return signal_wrapped", "def bind(self, _target: aws_cdk.aws_lambda.IFunction) -> None:\n return jsii.invoke(self, \"bind\", [_target])", "def bind_method(instance, func, as_name=None):\n if as_name is None:\n as_name = func.__name__\n\n bound_method = func.__get__(instance, instance.__class__)\n setattr(instance, as_name, bound_method)\n\n return bound_method", "def bind(self, target: aws_cdk.aws_lambda.IFunction) -> None:\n return jsii.invoke(self, \"bind\", [target])", "def bind(self, target: aws_cdk.aws_lambda.IFunction) -> None:\n return jsii.invoke(self, \"bind\", [target])", "def bind(self, target: aws_cdk.aws_lambda.IFunction) -> None:\n return jsii.invoke(self, \"bind\", [target])", "def bind(self, target: aws_cdk.aws_lambda.IFunction) -> None:\n return jsii.invoke(self, \"bind\", [target])", "def bind(self, target: aws_cdk.aws_lambda.IFunction) -> None:\n return jsii.invoke(self, \"bind\", [target])", "def bind(self, target: aws_cdk.aws_lambda.IFunction) -> None:\n return jsii.invoke(self, \"bind\", [target])", "def _wrap(self, name):\n attr = self.pget(name)\n for cls, handler in WRAP_HANDLERS:\n if isinstance(attr, cls):\n return handler(self, name)\n\n # immediately delegate to self.pboj\n return self._delegate(name)", "def doublewrap(function):\n @functools.wraps(function)\n def decorator(*args, **kwargs):\n if len(args) == 1 and len(kwargs) == 0 and callable(args[0]):\n return function(args[0])\n else:\n return lambda wrapee: function(wrapee, *args, **kwargs)\n return decorator", "def doublewrap(function):\n\n @functools.wraps(function)\n def decorator(*args, **kwargs):\n if len(args) == 1 and len(kwargs) == 0 and callable(args[0]):\n return function(args[0])\n else:\n return lambda wrapee: function(wrapee, *args, **kwargs)\n\n return decorator", "def doublewrap(function):\n @functools.wraps(function)\n def decorator(*args, **kwargs):\n if len(args) == 1 and len(kwargs) == 0 and callable(args[0]):\n return function(args[0])\n else:\n return lambda wrapee: function(wrapee, *args, **kwargs)\n return decorator", "def listen(eventType):\n def _decoration(fcn):\n fcn.listen = True\n fcn.eventType = eventType\n return fcn\n return _decoration", "def decorator(d):\n def _d(fn):\n return update_wrapper(d(fn), fn)\n update_wrapper(_d, d)\n return _d", "def bind(self, arg_names, **bound_params):\n bound_params=bound_params.copy()\n covered_args=set(bound_params)\n covered_args.update(arg_names)\n uncovered_mand_args=self.get_mandatory_args().difference(covered_args)\n if len(uncovered_mand_args)>0:\n raise TypeError(\"mandatory parameters not supplied: {0}\".format(list(uncovered_mand_args)))\n def bound_call(*args, **call_params):\n params=bound_params.copy()\n params.update(call_params)\n params.update(zip(arg_names,args))\n return self(**params)\n return bound_call\n #sig=FunctionSignature(arg_names=arg_names,kwarg_name=\"kwargs\")\n #return sig.wrap_function(bound_call)" ]
[ "0.66477215", "0.66141915", "0.6544321", "0.64554435", "0.643344", "0.6340396", "0.6323057", "0.6308433", "0.61923933", "0.6147095", "0.6073753", "0.601856", "0.60135686", "0.59841114", "0.5885072", "0.58757806", "0.5850867", "0.577973", "0.577973", "0.577973", "0.577973", "0.577973", "0.577973", "0.5773705", "0.57669353", "0.5765241", "0.5748199", "0.56812584", "0.5666809", "0.565259" ]
0.6808041
0
Binds a series of keys for the given 'mode'. Keys may be specified as a dict or as a sequence of tuple values and strings. In the latter case, documentation may be interspersed with key bindings. Any value in the sequence which is not a tuple begins a new key group, with that value as a description. A tuple with two values is considered a keyvalue pair, where the value is the handler for the named key. A three valued tuple is considered a keydescriptionvalue tuple, with the same semantics as above. Each key binding is interpolated with the values of defs, as if processed by (key % self.defs)
def bind(self, mode='main', keys=(), import_={}): self._add_mode(mode) mode = self.modes[mode] group = None def add_desc(key, desc): if group not in mode['desc']: mode['desc'][group] = [] mode['groups'].append(group) if key not in mode['desc'][group]: mode['desc'][group].append(key); if isinstance(keys, dict): keys = keys.iteritems() for obj in keys: if isinstance(obj, tuple) and len(obj) in (2, 3): if len(obj) == 2: key, val = obj desc = '' elif len(obj) == 3: key, desc, val = obj mode['keys'][key] = val add_desc(key, desc) val.__doc__ = str(desc) else: group = obj def wrap_import(mode, key): return lambda k: self.modes[mode]['keys'][key](k) for k, v in flatten((v, k) for k, v in import_.iteritems()): mode['import'][k % self.defs] = wrap_import(v, k)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def bind_modifiers(widget, event:Callable, button='Button-1',\n modes=frozendict({'Shift': KeyModes.SHIFT, 'Control': KeyModes.CONTROL, 'Alt': KeyModes.ALT, })):\n widget.bind(button, event)\n for modifier, keymode in modes.items():\n # We must provide 'keymode' as a default argument in the lambda expression (as method parameters are\n # only evaluated once), otherwise it will end up referencing the last value in the modes dictionary.\n widget.bind(f\"<{modifier}-{button}>\", lambda bind_event, mode=keymode: event(bind_event, mode))", "def bind_keys_to_modes(self):\n for i,button in enumerate(self.mode_select.buttons, 1):\n key = getattr(pg, \"K_{}\".format(i))\n button.bind_key(key)", "def setKeyBindings(self, keyBindings):\n bindings = {}\n for keyseq,binding in keyBindings.items():\n seq = []\n for subkeyseq in keyseq.split(','):\n a = []\n for key in subkeyseq.split('+'):\n key = key.strip()\n key = key[0].upper() + key[1:].lower()\n if key in self.KeyAliases:\n key = self.KeyAliases[key]\n a.append(key)\n state = QtCore.Qt.NoModifier\n for key in a[:-1]:\n state |= eval(\"QtCore.Qt.%sModifier\" % key)\n seq.append((int(state),eval(\"QtCore.Qt.Key_%s\"%a[-1])))\n\n b = bindings\n for e in seq[:-1]:\n if e in b:\n b = b[e]\n else:\n b[e] = {}\n b = b[e]\n b[seq[-1]] = self.translateToBindingName(binding)\n \n self.AKH_keyBindings = bindings\n self.AKH_keyBindingsWaiting = {}", "def simple_key_binder(mod, keynames=None):\n\n def _graceful_fail_call(func, param):\n try:\n func(param)\n except KeyError:\n logger.warning('Name Bloody Group', groups[0].name)\n logger.warning('Key not found in dgroup keys: \"%s\"', str(param))\n\n def func(dgroup):\n # unbind all\n for key in dgroup.keys[:]:\n _graceful_fail_call(dgroup.qtile.ungrab_key, key)\n _graceful_fail_call(dgroup.keys.remove, key)\n\n if keynames:\n keys = keynames\n else:\n # keys 1 to 9 and 0\n keys = list(map(str, list(range(1, 10)) + [0]))\n\n # bind all keys\n for keyname, group in zip(keys, dgroup.qtile.groups):\n name = group.name\n key = Key([mod], keyname, lazy.group[name].toscreen())\n key_s = Key([mod, 'shift'], keyname, lazy.window.togroup(name))\n key_c = Key([mod, 'control'], keyname, lazy.group.switch_groups(name))\n dgroup.keys.append(key)\n dgroup.keys.append(key_s)\n dgroup.keys.append(key_c)\n dgroup.qtile.grab_key(key)\n dgroup.qtile.grab_key(key_s)\n dgroup.qtile.grab_key(key_c)\n\n return func", "def set_keybindings(self, keybindings=None):\n\n if keybindings is None:\n keybindings = []\n\n # Add key bindings.\n tbl = []\n bindings = keybindings\n for binding in keybindings:\n keyid = wx.NewId()\n self.Bind(wx.EVT_MENU, binding[2], id=keyid)\n tbl.append((binding[0], binding[1], keyid))\n\n if len(bindings):\n self.SetAcceleratorTable(wx.AcceleratorTable(tbl))", "def _set_named_args(self, **kv):\n for k in kv:\n self._body['${0}'.format(k)] = kv[k]\n return self", "def bind_key(self, key, command):\n self.key_bindings[key] = command", "def set_keywords_batch(self, mode, keywords, filenames):\n # Explicitly ruling out strings here because passing in a\n # string would lead to strange and hard-to-find errors\n if isinstance(keywords, basestring):\n raise TypeError(\"The argument 'keywords' must be \"\n \"an iterable of strings\")\n if isinstance(filenames, basestring):\n raise TypeError(\"The argument 'filenames' must be \"\n \"an iterable of strings\")\n \n params = [] \n \n kw_operation = {KW_REPLACE:\"-%s=%s\",\n KW_ADD:\"-%s+=%s\",\n KW_REMOVE:\"-%s-=%s\"}[mode]\n\n kw_params = [ kw_operation % (KW_TAGNAME, w) for w in keywords ]\n \n params.extend(kw_params) \n params.extend(filenames)\n logging.debug (params)\n return self.execute(*params)", "def bind(self, items={}, **kwargs):\n kwargs.update(items)\n for k, v in flatten(kwargs.iteritems()):\n if hasattr(k, 'match'):\n self.eventmatchers[k] = v\n else:\n self.events[k] = v", "def __setitem__(self, keys, value):\n\n if isinstance(keys, str):\n keys = [keys]\n\n #print(\"KEYTYPE: {0}\".format(keys))\n self.__setInDict(self.__cfg, keys, value)\n self.write(self.__cfgFile)", "def bind(self, keysym, func):\n if type(keysym) == list:\n [self.bind(key, func) for key in keysym]\n elif keysym in self.binds:\n self.binds[keysym].append(func)\n else:\n self.binds[keysym] = [func]", "def bind(self, binding_id, event_props, handler, arg_list=None, once=False):\n\n args = (arg_list,) if arg_list else ()\n self._bindings[binding_id] = (once, event_props, handler) + args", "def bind_key(self, key):\n self.key_bindings.append(key)", "def add_key(self, key_list: list) -> None:\n\n for key, funct, desc in key_list:\n # Force keys to be lowercase\n key = key.lower()\n \n self.key_functs[key] = funct\n self.key_satified[key] = False\n self.key_description[key] = desc\n self.key_values[key] = None", "def get_bindings(keysToObtain=None):\n if keysToObtain is None:\n keysToObtain = keys_to_obtain\n direct_input_keys = {}\n\n latest_bindings = get_latest_keybinds()\n bindings_tree = parse(latest_bindings)\n bindings_root = bindings_tree.getroot()\n\n for item in bindings_root:\n if item.tag in keysToObtain:\n new_key = None\n mod = None\n # Check primary\n if item[0].attrib['Device'].strip() == \"Keyboard\":\n new_key = item[0].attrib['Key']\n if len(item[0]) > 0:\n mod = item[0][0].attrib['Key']\n # Check secondary (and prefer secondary)\n if item[1].attrib['Device'].strip() == \"Keyboard\":\n new_key = item[1].attrib['Key']\n if len(item[1]) > 0:\n mod = item[1][0].attrib['Key']\n # Prepare final binding\n binding = None\n if new_key is not None:\n binding = {'pre_key': new_key}\n binding['key'] = EDKeyCodes[binding['pre_key']]\n if mod is not None:\n binding['pre_mod'] = mod\n binding['mod'] = EDKeyCodes[binding['pre_mod']]\n if binding is not None:\n direct_input_keys[item.tag] = binding\n # else:\n # logger.warning(\"get_bindings: \"+item.tag+\" = does not have a valid keyboard keybind.\")\n\n if len(list(direct_input_keys.keys())) < 1:\n return None\n else:\n return direct_input_keys", "def _build_modes_dict(self, mode_names, modes):\n last_index = 0\n mode_datas = dict()\n for mode in modes:\n mode_data = dict(mode._data)\n mode_data[\"name\"] = mode_names[\n last_index : last_index + mode_data[\"name_length\"]\n ]\n mode_datas[mode_data[\"id\"]] = mode_data\n last_index += mode_data[\"name_length\"]\n return mode_datas", "def set_binds(self,val):\r\n if val:\r\n self.bind(key_codes.EKeyUpArrow, self.up_key)\r\n self.bind(key_codes.EKeyDownArrow, self.down_key)\r\n self.bind(key_codes.EKeyLeftArrow, self.left_key)\r\n self.bind(key_codes.EKeyRightArrow, self.right_key)\r\n else:\r\n self.bind(key_codes.EKeyUpArrow, None)\r\n self.bind(key_codes.EKeyDownArrow, None)\r\n self.bind(key_codes.EKeyLeftArrow, None)\r\n self.bind(key_codes.EKeyRightArrow, None)", "def set(self, *args, **kargs):\n if len(args) == 0:\n for key in kargs.keys():\n self._set_string_value_pair(key, kargs[key])\n else:\n for element in range(0, len(args), 2):\n self._set_string_value_pair(args[element], args[element+1])", "def holders(self, keys, qualifier = None, start = 0, marker = None):\r\n key_map = dict([(i, start + i + 1) for i in range(0, keys)]) if isinstance(keys, int) \\\r\n else dict([(i, k) for i, k in enumerate(keys)])\r\n qualifier = qualifier or {}\r\n m = marker or self.marker()\r\n return ', '.join([qualifier.get(i, _noop)(m(key_map[i])) for i in range(0, len(key_map))])", "def update(self, *args, **kwargs):\n if args:\n if len(args) != 1:\n raise TypeError(f\"update expected at most 1 argument, got {len(args)}\")\n arg = args[0]\n if hasattr(arg, \"keys\"):\n super().update(normalize(arg, cls=self.__class__))\n else:\n try:\n for k, v in arg:\n super().update(normalize({k: v}, cls=self.__class__))\n except Exception:\n raise ValueError(\n \"dictionary update sequence element #0 has length \"\n f\"{ len(arg[0]) }; 2 is required\"\n )\n for k in kwargs:\n super().update(normalize({k: kwargs[k]}, cls=self.__class__))", "def make_keyhandler(events):\n def handler(key):\n for k in events:\n if key == simplegui.KEY_MAP[k]:\n events[k]()\n return handler", "def act_on_dict(output_names=None, input_names=None, mode='add'):\n def wrapper(func):\n assert mode in ACTING_MODES, f'mode has to be one of {ACTING_MODES}'\n # use names of return variables of func if keys to save returned values is not specified\n if output_names is None:\n provides = extract_return(func)\n else:\n provides = output_names\n\n # use argument names in case keys to get input values is not specified\n if input_names is None:\n args, varargs, varkw, defaults, kwonlyargs, kwonlydefaults, annotations = inspect.getfullargspec(func)\n requires = (args if defaults is None else args[:len(args) - len(defaults)]) + \\\n (kwonlyargs if kwonlydefaults is None else kwonlyargs[:len(kwonlyargs) - len(kwonlydefaults)])\n uses = args + kwonlyargs\n else:\n args = input_names\n varkw = None\n kwonlyargs = []\n\n requires = args\n uses = args\n\n # define function to act on dictionary\n def inner(dictionary):\n # check that all required arguments are present\n for arg in inner.requires:\n assert arg in dictionary, \\\n f\"key '{arg}' whose value is required by function '{func.__name__}' is missing\"\n\n # apply function\n if input_names is not None:\n returns = func(*(dictionary[arg] for arg in args))\n elif varkw is not None:\n returns = func(**dictionary)\n else:\n returns = func(\n **{arg: dictionary[arg] for arg in args if arg in dictionary},\n **{kwonlyarg: dictionary[kwonlyarg] for kwonlyarg in kwonlyargs if kwonlyarg in dictionary})\n\n # add to input or construct new dict based on mode\n if mode == 'add':\n result = dictionary\n else:\n result = {}\n for name, value in zip(provides, returns):\n result[name] = value\n\n return result\n\n # add attributes to function specifying which keys are required, used, provided\n inner.requires = requires\n inner.uses = uses\n inner.provides = provides\n\n return inner\n\n if callable(output_names):\n func = output_names\n output_names = None\n return wrapper(func)\n else:\n return wrapper", "def setKey(self, key, value):\n\t\tself.keyMap[key] = value\n\n\t\tif key == \"help\" :\n\t\t\tif value == 1 :\n\t\t\t\tself.helpText.setText( \\\n\t\t\t\t\t\"arrows to move or turn\\n\" + \\\n\t\t\t\t\t\"shift-arrows to change view\\n\" + \\\n\t\t\t\t\t\"z/Z to zoom in/out, r to reset\\n\" + \\\n\t\t\t\t\t\",/. to slide left/right\")\n\t\t\telse :\n\t\t\t\tself.helpText.setText(\"h for help\")\n\n\t\tif value == 1 : return\n\n\t\t# special cases for releasing keys with modifiers\n\t\tif key == \"zoom-in\" :\n\t\t\tself.keyMap[\"zoom-out\"] = 0\n\t\tif key == \"left\" or key == \"right\" :\n\t\t\tself.keyMap[\"cam-left\"] = 0\n\t\t\tself.keyMap[\"cam-right\"] = 0\n\t\tif key == \"forward\" or key == \"backward\" :\n\t\t\tself.keyMap[\"cam-up\"] = 0\n\t\t\tself.keyMap[\"cam-down\"] = 0", "def read_keybinds(self):\n self.keybinds.set(self.lnp.read_keybinds())", "def setKeyCtx(*args, breakdown: bool=True, exists: bool=True, history: bool=True, image1:\n Union[AnyStr, bool]=\"\", image2: Union[AnyStr, bool]=\"\", image3: Union[AnyStr,\n bool]=\"\", name: AnyStr=\"\", q=True, query=True, e=True, edit=True,\n **kwargs)->Union[bool, Any]:\n pass", "def bind(self):\n # clear up the objects\n self.engines = {}\n for element in self.k_spec_dict.keys():\n self.engines[element] = self._call(self.k_spec_dict[element])\n self.k_spec_dict[element]['acronym'] = self.engines[element].get_acronym()", "def modes(self, modes):\n\n self._modes = modes", "def modes(self, modes):\n\n self._modes = modes", "def pgcli_bindings(vi_mode=False):\n key_binding_manager = KeyBindingManager(enable_vi_mode=vi_mode)\n\n @key_binding_manager.registry.add_binding(Keys.F2)\n def _(event):\n \"\"\"\n Enable/Disable SmartCompletion Mode.\n \"\"\"\n _logger.debug('Detected F2 key.')\n buf = event.cli.current_buffer\n buf.completer.smart_completion = not buf.completer.smart_completion\n\n @key_binding_manager.registry.add_binding(Keys.F3)\n def _(event):\n \"\"\"\n Enable/Disable Multiline Mode.\n \"\"\"\n _logger.debug('Detected F3 key.')\n buf = event.cli.current_buffer\n buf.always_multiline = not buf.always_multiline\n\n @key_binding_manager.registry.add_binding(Keys.F4)\n def _(event):\n \"\"\"\n Toggle between Vi and Emacs mode.\n \"\"\"\n _logger.debug('Detected F4 key.')\n key_binding_manager.enable_vi_mode = not key_binding_manager.enable_vi_mode\n\n @key_binding_manager.registry.add_binding(Keys.ControlSpace)\n def _(event):\n \"\"\"\n Force autocompletion at cursor.\n \"\"\"\n _logger.debug('Detected <C-Space> key.')\n event.cli.current_buffer.complete_next()\n\n return key_binding_manager", "def add_key_command(self, key, command):\n\n self.keybindings[key] = command" ]
[ "0.58895165", "0.58841205", "0.5588694", "0.52264315", "0.48822513", "0.4842393", "0.48337808", "0.48153064", "0.4735098", "0.4661002", "0.45969185", "0.45763007", "0.45294526", "0.4502729", "0.45009285", "0.4483334", "0.44517758", "0.44213554", "0.44187143", "0.4397471", "0.43790764", "0.43619782", "0.43531024", "0.43440247", "0.4329533", "0.43128294", "0.42828798", "0.42828798", "0.4279545", "0.4278588" ]
0.7402634
0
Calls a method named for the first token of 'args', with the rest of the string as its first argument. If the method doesn't exist, a trailing underscore is appended.
def _call(self, args): a = args.split(' ', 1) if a: getattr(self, a[0])(*a[1:])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _func_named(self, arg):\n result = None\n target = 'do_' + arg\n if target in dir(self):\n result = target\n else:\n if self.abbrev: # accept shortened versions of commands\n funcs = [func for func in self.keywords if func.startswith(arg) and func not in self.multilineCommands]\n if len(funcs) == 1:\n result = 'do_' + funcs[0]\n return result", "def method_abbreviator(arg):\n regexp = re.compile(arg)\n matches = []\n for method in methods:\n if regexp.match(method.name):\n matches.append(method.name)\n\n return matches[0] if len(matches) == 1 else arg", "def _name_from_args(func, _, params):\n return \"{}_{}\".format(func.__name__, \"_\".join(str(arg) for arg in params.args))", "def parse(self, args: typing.List[str]) -> str:\n try:\n args = self.cli_parser.parse_args(args)\n if len(vars(args)) == 1:\n return args.func()\n\n return args.func(args)\n except ArgumentParseError as err:\n return str(err)", "def get_class_name(*args):\n name = '_'.join(args)\n name = slugify(name, separator='_')\n return underscore_to_camelcase(name)", "def call(self, method, *args):\n flatcall = flatten(\n m(n=method, t=self.groupName)[[\n squish(x) for x in args if x is not None]])\n self.socket.write(flatcall + '\\0')", "def call_spec_string():\n # pylint: disable=protected-access\n frame = sys._getframe(1)\n argvals = inspect.getargvalues(frame)\n if argvals.args[0] == 'self':\n return inspect.formatargvalues(argvals.args[1:], *argvals[1:])\n else:\n return inspect.formatargvalues(*argvals)", "def __getattr__(self, method_name):\n return partial(self.exec, method_name.replace(\"_\", \" \"))", "def run(self, command, *args):\n command = command.lower()\n\n if callable(getattr(self, command, None)):\n getattr(self, command)(*args)\n return\n\n send.error_message(\"Command '{}' not found\".format(command))", "def expand_call(kargs):\n func = kargs['func']\n del kargs['func']\n out = func(**kargs)\n return out", "def dispatch(self, event, args=''):\n try:\n if event in self.events:\n self.events[event](args)\n for matcher, action in self.eventmatchers.iteritems():\n ary = matcher.match(' '.join((event, args)))\n if ary is not None:\n action(*ary)\n except Exception, e:\n try:\n traceback.print_exc(sys.stderr)\n except:\n pass", "def dispatch(s):\n splt = re.split(r'[^a-zA-Z]', s.request.slack_text, maxsplit=1)\n \n try: subcommand = splt[0]\n except: subcommand = \"\"\n \n try: parser = s._parser(splt[1])\n except: parser = s._parser(\"\")\n del splt\n\n try: run_subcommand = getattr(s, 'run_'+subcommand.lower())\n except AttributeError: return s.unknown_command(subcommand, parser)\n\n return run_subcommand(parser)\n # eg if subcommand == 'list' then we see whether there is a method `run_list`\n # is yes, it is called, with `remainder` as argument", "def call_command(self, name, args):\n try:\n cmd = self.commands[name]\n except KeyError:\n self.answer(\"Unknown command\", success=False)\n else:\n try:\n result, success = cmd(*args)\n result = result or \"\"\n self.answer(result, success=success)\n except TypeError as e:\n self.answer(\n \"Error when calling function: {}\".format(e),\n success=False,\n )", "def _name(self):\n return self.arguments[0].split('(')[0]", "def do(self, /, *args, **kwargs):\n if not args:\n raise TypeError(\"requires at least a single argument.\")\n self(*args, **kwargs)\n return args[0]", "def _call_command_method(self, name, original_method, args, kwargs):\n if threadprop.current_controller() is not self:\n action=self._direct_comm_call_action\n if action==\"warning\":\n if name not in self._command_warned:\n print(\"Warning: direct call of command '{}' of thread '{}' from a different thread '{}'\".format(\n name,self.name,threadprop.current_controller().name),file=sys.stderr)\n self._command_warned.add(name)\n else:\n accessor=QMultiRepeatingThreadController.__getattribute__(self,action)\n return accessor.__getattr__(name)(*args,**kwargs)\n return original_method(*args,**kwargs)", "def _check_method_first_arg(self, node, function_info=_DEFAULT_FUNCTION_INFO):\n if self.current_class is None:\n return\n # staticmethods have no restrictions\n if function_info.is_staticmethod:\n return\n # try to confirm that it's actually a method\n if not hasattr(node, \"name\") or not hasattr(self.current_class, node.name):\n return\n first_must_be = \"cls\" if function_info.is_classmethod else \"self\"\n\n if len(node.args.args) < 1 or len(node.args.defaults) == len(node.args.args):\n self.show_error(\n node,\n \"Method must have at least one non-keyword argument\",\n ErrorCode.method_first_arg,\n )\n elif not self._arg_has_name(node.args.args[0], first_must_be):\n self.show_error(\n node,\n \"First argument to method should be %s\" % (first_must_be,),\n ErrorCode.method_first_arg,\n )", "def __call__(self, args):", "def test_wrapper_with_args():\n my_method = SGMethod(\"test\")\n other_method = SGMethod(\"other\")\n par1 = other_method.create_parameter(\"par1\")\n \n my_method.calls(other_method, ['\"test\"'])\n my_method.check_call_validity();\n \n assert other_method == my_method.method_called\n assert len(my_method.args) == 1\n assert par1 != my_method.args[0]\n assert '\"test\"' == my_method.args[0]", "def TestMethodBody(run_method_name, run_dargs):\n return lambda self: getattr(self, run_method_name)(**run_dargs)", "def __call__(self, *args):\n\n\t\tself.send(self.format(*args))", "def _get_arg_name(self, arg, variable_name):", "def split_method_call(handler_call_details):\n\n # e.g. /package.ServiceName/MethodName\n parts = handler_call_details.method.split(\"/\")\n if len(parts) < 3:\n return \"\", \"\", False\n\n grpc_service_name, grpc_method_name = parts[1:3]\n return grpc_service_name, grpc_method_name, True", "def call_method(self, action):\n\n\t\tif action[0] in self.methods:\n\t\t\tself.methods[action[0]](action[0:])\n\t\telse:\n\t\t\tself.no_such_method()", "def _call_func(quantity, obj, args):\n\n log = logging.getLogger(__name__)\n try:\n result = getattr(obj, quantity)(*args)\n except AttributeError:\n log.error(\"Object %s has no method: %s\", str(obj), quantity)\n raise\n except:\n log.error(\"Error while calling method %s of object %s\", quantity,\n str(obj))\n raise\n return result", "def get_type(args_str, entry_type):\r\n # The C-method-implementations accept self as the first argument,\r\n # so a one-argument method will be invoked with zero arguments in Python.\r\n no_args = 1 if entry_type == \"method\" else 0\r\n return (\"METH_NOARGS\" if len(args_str.split(\",\")) == no_args\r\n else \"METH_VARARGS\")", "def callable_(arg: str) -> str:\n return '! %r !' % arg", "def handler(self, command, args=[]):\n ###\n # command parsing and handling logic to be implemented by child\n ###\n if not command and not hasattr(self, 'handle_'):\n return f'Service {str(self.__class__.__name__)}: {self.__doc__ or \"\"}'\n methodname = 'handle_{}'.format(command or '')\n logger.info('method name: {}'.format(methodname))\n logger.info('args: {}'.format(args))\n method = self.__getattribute__(methodname)\n return method(args)", "def _name(self):\n return self._arguments[0].split('(')[0]", "def servicemethod(*args, **kwargs):\n # Default options\n options = {'name': None, 'store': None, 'request_arg': True, 'store_arg': True}\n\n # Figure out if we were called with arguments\n # If we were called with args, ie:\n # @servicemethod(name='Foo')\n # Then the only argument here will be the pre-decorated function/method object.\n method = ( (len(args) == 1) and callable(args[0]) ) and args[0] or None\n\n if method is None:\n # We were called with args, (or @servicemethod() )\n # so figure out what they were ...\n\n # The method name should be either the first non-kwarg\n # or the kwarg 'name'\n # Example: @servicemethod('my_method', ...) or @servicemethod(name='my_method')\n options.update({\n 'name': bool(args) and args[0] or kwargs.pop('name', None),\n 'store': (len(args) >= 2) and args[1] or kwargs.pop('store', None),\n 'request_arg': kwargs.pop('request_arg', True),\n 'store_arg': kwargs.pop('store_arg', True),\n })\n else:\n options['name'] = method.__name__\n method.__servicemethod__ = options\n\n def method_with_args_wrapper(method):\n \"\"\" Wrapper for a method decorated with decorator arguments\n \"\"\"\n if options['name'] is None:\n options['name'] = method.__name__\n method.__servicemethod__ = options\n\n if options['store'] is not None:\n options['store'].service.add_method(method)\n\n return method\n\n return method or method_with_args_wrapper" ]
[ "0.61590546", "0.61256295", "0.60179985", "0.5816926", "0.5688947", "0.5648416", "0.5534845", "0.5470439", "0.53763974", "0.53491557", "0.5341609", "0.5332069", "0.5326649", "0.53201157", "0.53159", "0.5265616", "0.52220535", "0.5211293", "0.5182447", "0.5174981", "0.5169188", "0.5163801", "0.5157468", "0.51460546", "0.5143799", "0.51366186", "0.5120801", "0.5086957", "0.50768214", "0.5076804" ]
0.75629675
0
Set the access key as key=your_googlemaps_key. This must be called prior to contacting the API.
def set_api_access_keys(**kwargs): API_BASE_PARAMS['key'] = kwargs['key']
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def SetAPIKey(self, api_key):\n self._api_key = api_key", "def SetAPIKey(self, api_key):\n self._api_key = api_key", "def SetAPIKey(self, api_key):\n self._analyzer.SetAPIKey(api_key)", "def update_key(self, key):\n self._api_key = key", "def api_key(self, api_key):\n\n self._api_key = api_key", "def api_key_set(self, api_key):\n self.request('/v1.1/auth_key', 'POST', body={'auth_key': api_key})", "def set_api_key(self, api_key):\n self.api_key = api_key\n self.session.auth = (\"api\", api_key)", "def set_api_key(new_api_key):\n global api_key\n api_key = new_api_key", "def set_api_key(api_key):\n Movie.__api_key = api_key", "def googlemaps(request):\n assert False\n # return {\"GOOGLEMAPS_API_KEY\": settings.GOOGLEMAPS_API_KEY}", "async def statset_apikey(self, key):\n self._set_api_key(key)\n await self.bot.say(\"API key successfully set.\")", "def __init__(self, key=None):\n self._key = key or os.environ['HERE_API_KEY']", "def _check_api_key(self):\n try:\n self.maps.places_nearby(\n location=(53.909804, 27.580184),\n radius=650,\n open_now=False,\n language=config.LANGUAGE,\n type='cafe',\n # rank_by='distance', # IMPORTANT: cannot use rank_by and radius options together\n page_token=None,\n )\n except Exception as e:\n\n with self.__writelock:\n self.print(f'ERROR: bad API key \"{self.maps.key}\" (tracker={self.stats.previous_requests})\\n')\n raise e", "def google_maps(request):\n gmaps_api_key = getattr(settings, 'GOOGLE_MAPS_API', False)\n return {\n 'GOOGLE_MAPS_API': gmaps_api_key,\n 'google_maps': gmaps_api_key\n }", "def set_api_key(self, host, api_key, param_name=u'api_key'):\n raise NotImplementedError(\n u\"%s: Method not implemented\", self.__class__.__name__)", "def google_map_api(request):\n\treturn {\n\t\t'GOOGLE_MAPS_API' : settings.GOOGLE_MAPS_API,\n\t}", "def set_apikey(self, apikey):\n self.apikey = apikey\n self.__init_submodules(apikey)", "def api_key(self, value):\n self.__creds.api_key_v2 = value", "async def apikey_bing(self, ctx, key):\n settings = loadauth()\n settings['apikey'] = key\n saveauth(settings)\n return await self.bot.say(\"Bing API key saved.\")", "def __init__(self, api_key='YOUR_API_KEY'):\n self.api_key = api_key", "async def statset_appkey(self, key):\n self._set_app_key(key)\n await self.bot.say(\"APP key successfully set.\")", "def test_set_api_key(self):\n\n api_key = 'abc'\n project_id = '123'\n\n kaput.init(api_key, project_id)\n\n self.assertEqual(api_key, kaput._API_KEY)\n self.assertEqual(project_id, kaput._PROJECT_ID)\n self.assertFalse(kaput._DEBUG)\n self.assertEqual(kaput._handle_exception, sys.excepthook)", "def SetCredentials(self,\n api_key,\n ):\n self._api_key = api_key", "def cli(ctx, gkg_api_key):\n ctx.ensure_object(dict)\n ctx.obj['GKG_API_KEY'] = gkg_api_key", "def __init__(self, api_key):\r\n self.api_key = api_key", "def api_key(self, api_key):\n if api_key is None:\n raise ValueError(\"Invalid value for `api_key`, must not be `None`\") # noqa: E501\n\n self._api_key = api_key", "def __init__(self, api_key: str):\n self.api_key = api_key", "def authenticate(self, api_key):\n self.headers['x-rapidapi-key'] = api_key", "def set_APIKey(self, value):\n super(SearchByReviewerInputSet, self)._set_input('APIKey', value)", "def set_key(self, key):\n self.key = key" ]
[ "0.6702951", "0.6702951", "0.64656013", "0.63789594", "0.63458663", "0.63154477", "0.62301403", "0.62292933", "0.62059003", "0.61881185", "0.60916674", "0.60765517", "0.5990251", "0.5933332", "0.58120465", "0.5781521", "0.57431203", "0.57308316", "0.5702196", "0.56874853", "0.56513643", "0.56353706", "0.5584545", "0.5583007", "0.55516046", "0.5518411", "0.54915065", "0.54628634", "0.5412198", "0.54099804" ]
0.6750033
0