query
stringlengths
9
9.05k
document
stringlengths
10
222k
metadata
dict
negatives
listlengths
30
30
negative_scores
listlengths
30
30
document_score
stringlengths
4
10
document_rank
stringclasses
2 values
Try to move 1 customer to anywhere it can be put, and see if the move can cut the total cost.
def shift_1_cust(self, sol_in1, cust, c_loc, curr_temp, sol_type1, sa_lns): route_ing = copy.deepcopy(sol_in1[c_loc[0]]) route_new = route_ing move_to_route = c_loc[0] orgn_type1 = sol_type1[c_loc[0]] origin_cost1 = check_violation(route_ing, orgn_type1)[1] route_ing.remove(cust) # move c in the current route new_type1 = route_type(route_ing) adjust_cost1 = check_violation(route_ing, new_type1)[1] best_cut_cost0 = -1000 best_cut_cost = best_cut_cost0 # best cost cut of moving this customer for j, rou in enumerate(sol_in1): orgn_type2 = sol_type1[j] origin_cost2 = check_violation(rou, orgn_type2)[1] if j == c_loc[0]: # moving in the same route for k in range(1, len(route_ing)): if k == c_loc[1]: continue # do not put it at the original position rou_test = route_ing[:k] + [cust] + route_ing[k:] if check_violation(rou_test, orgn_type2)[0]: adjust_cost2 = check_violation(rou_test, orgn_type2)[1] cost_cut_test = origin_cost1 - adjust_cost2 if cost_cut_test > best_cut_cost: best_cut_cost = cost_cut_test route_new = rou_test move_to_route = j else: # moving to a different route for k in range(1, len(rou)): rou_test = rou[:k] + [cust] + rou[k:] if check_violation(rou_test, 5)[0]: new_type2 = route_type(rou_test) adjust_cost2 = check_violation(rou_test, new_type2)[1] cost_cut_test = origin_cost1 + origin_cost2 - adjust_cost1 - adjust_cost2 if cost_cut_test > best_cut_cost: best_cut_cost = cost_cut_test route_new = rou_test move_to_route = j if best_cut_cost > 1e-5: # print('shift1 good', best_cut_cost) sol_in1[move_to_route] = route_new sol_type1[move_to_route] = route_type(route_new) if move_to_route != c_loc[0]: # moving to a different route sol_in1[c_loc[0]] = route_ing sol_type1[c_loc[0]] = route_type(route_ing) elif sa_lns and best_cut_cost < -1e-5: prb = random.uniform(0, 1) if np.exp(best_cut_cost/curr_temp) > prb: # print('shift1', best_cut_cost) sol_in1[move_to_route] = route_new sol_type1[move_to_route] = route_type(route_new) if move_to_route != c_loc[0]: # moving to a different route sol_in1[c_loc[0]] = route_ing sol_type1[c_loc[0]] = route_type(route_ing) # return sol_in1
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def shift_3_cust(self, sol_in6, cust, c_loc, curr_temp, sol_type6, sa_lns):\r\n\r\n route_ing = copy.deepcopy(sol_in6[c_loc[0]])\r\n route_new = route_ing\r\n move_to_route = c_loc[0]\r\n orgn_type1 = sol_type6[c_loc[0]]\r\n cust_folw1 = route_ing[c_loc[1] + 1]\r\n cust_folw2 = route_ing[c_loc[1] + 2]\r\n origin_cost1 = check_violation(route_ing, orgn_type1)[1]\r\n route_ing.remove(cust) # remove c in the current route\r\n del route_ing[c_loc[1]] # remove customer following c\r\n del route_ing[c_loc[1]] # remove customer following following c\r\n new_type1 = route_type(route_ing)\r\n adjust_cost1 = check_violation(route_ing, new_type1)[1]\r\n best_cut_cost0 = -1000\r\n best_cut_cost = best_cut_cost0 # best cost cut of moving this customer\r\n for j, rou in enumerate(sol_in6):\r\n orgn_type2 = sol_type6[j]\r\n origin_cost2 = check_violation(rou, orgn_type2)[1]\r\n if j == c_loc[0]: # moving in the same route\r\n for k in range(1, len(route_ing)):\r\n if k == c_loc[1]:\r\n continue\r\n rou_test = route_ing[:k] + [cust, cust_folw1, cust_folw2] + route_ing[k:]\r\n if check_violation(rou_test, orgn_type2)[0]:\r\n adjust_cost2 = check_violation(rou_test, orgn_type2)[1]\r\n cost_cut_test = origin_cost1 - adjust_cost2\r\n if cost_cut_test > best_cut_cost:\r\n best_cut_cost = cost_cut_test\r\n route_new = rou_test\r\n move_to_route = j\r\n\r\n else: # moving to a different route\r\n for k in range(1, len(rou)):\r\n rou_test = rou[:k] + [cust, cust_folw1, cust_folw2] + rou[k:]\r\n if check_violation(rou_test, 5)[0]:\r\n new_type2 = route_type(rou_test)\r\n adjust_cost2 = check_violation(rou_test, new_type2)[1]\r\n cost_cut_test = origin_cost1 + origin_cost2 - adjust_cost1 - adjust_cost2\r\n if cost_cut_test > best_cut_cost:\r\n best_cut_cost = cost_cut_test\r\n route_new = rou_test\r\n move_to_route = j\r\n\r\n\r\n if best_cut_cost > 1e-5:\r\n # print('shift3 good', best_cut_cost)\r\n sol_in6[move_to_route] = route_new\r\n sol_type6[move_to_route] = route_type(route_new)\r\n if move_to_route != c_loc[0]: # moving to a different route\r\n sol_in6[c_loc[0]] = route_ing\r\n sol_type6[c_loc[0]] = route_type(route_ing)\r\n\r\n elif sa_lns and best_cut_cost < -1e-5:\r\n\r\n prb = random.uniform(0, 1)\r\n if np.exp(best_cut_cost / curr_temp) > prb:\r\n # print('shift3', best_cut_cost)\r\n sol_in6[move_to_route] = route_new\r\n sol_type6[move_to_route] = route_type(route_new)\r\n if move_to_route != c_loc[0]: # moving to a different route\r\n sol_in6[c_loc[0]] = route_ing\r\n sol_type6[c_loc[0]] = route_type(route_ing)", "def test_move_onto_terrain(self):\n # move onto Water (1 extra)\n b1 = board.Board(self.small_ter)\n start = np.array((0, 3), dtype='int')\n k1 = knight.Knight(b1, start)\n # set move choice\n move_choice = 2\n # determine move validity and cost\n (cost, isvalid) = k1.validate_move(move_choice)\n self.assertTrue(isvalid)\n self.assertEqual(cost, 2)\n\n # move onto Lava (4 extra)\n start = np.array((3, 4), dtype='int')\n k1 = knight.Knight(b1, start)\n # set move choice\n move_choice = 0\n # determine move validity and cost\n (cost, isvalid) = k1.validate_move(move_choice)\n self.assertTrue(isvalid)\n self.assertEqual(cost, 5)\n\n # move onto Barrier (illegal)\n start = np.array((1, 4), dtype='int')\n k1 = knight.Knight(b1, start)\n # set move choice\n move_choice = 1\n # determine move validity and cost\n (cost, isvalid) = k1.validate_move(move_choice)\n self.assertFalse(isvalid)\n\n # move onto Rock (illegal)\n start = np.array((1, 0), dtype='int')\n k1 = knight.Knight(b1, start)\n # set move choice\n move_choice = 7\n # determine move validity and cost\n (cost, isvalid) = k1.validate_move(move_choice)\n self.assertFalse(isvalid)", "def shift_2_cust(self, sol_in2, cust, c_loc, curr_temp, sol_type2, sa_lns):\r\n\r\n route_ing = copy.deepcopy(sol_in2[c_loc[0]])\r\n route_new = route_ing\r\n move_to_route = c_loc[0]\r\n orgn_type1 = sol_type2[c_loc[0]]\r\n cust_folw = route_ing[c_loc[1]+1]\r\n origin_cost1 = check_violation(route_ing, orgn_type1)[1]\r\n route_ing.remove(cust) # remove c in the current route\r\n del route_ing[c_loc[1]] # remove customer following c\r\n new_type1 = route_type(route_ing)\r\n adjust_cost1 = check_violation(route_ing, new_type1)[1]\r\n best_cut_cost0 = -1000\r\n best_cut_cost = best_cut_cost0 # best cost cut of moving this customer\r\n for j, rou in enumerate(sol_in2):\r\n orgn_type2 = sol_type2[j]\r\n origin_cost2 = check_violation(rou, orgn_type2)[1]\r\n if j == c_loc[0]: # moving in the same route\r\n for k in range(1, len(route_ing)):\r\n if k == c_loc[1]:\r\n continue\r\n rou_test = route_ing[:k] + [cust, cust_folw] + route_ing[k:]\r\n if check_violation(rou_test, orgn_type2)[0]:\r\n adjust_cost2 = check_violation(rou_test, orgn_type2)[1]\r\n cost_cut_test = origin_cost1 - adjust_cost2\r\n if cost_cut_test > best_cut_cost:\r\n best_cut_cost = cost_cut_test\r\n route_new = rou_test\r\n move_to_route = j\r\n\r\n\r\n else: # moving to a different route\r\n for k in range(1, len(rou)):\r\n rou_test = rou[:k] + [cust, cust_folw] + rou[k:]\r\n if check_violation(rou_test, 5)[0]:\r\n new_type2 = route_type(rou_test)\r\n adjust_cost2 = check_violation(rou_test, new_type2)[1]\r\n cost_cut_test = origin_cost1 + origin_cost2 - adjust_cost1 - adjust_cost2\r\n if cost_cut_test > best_cut_cost:\r\n best_cut_cost = cost_cut_test\r\n route_new = rou_test\r\n move_to_route = j\r\n\r\n\r\n if best_cut_cost > 1e-5:\r\n # print('shift2 good', best_cut_cost)\r\n sol_in2[move_to_route] = route_new\r\n sol_type2[move_to_route] = route_type(route_new)\r\n if move_to_route != c_loc[0]: # moving to a different route\r\n sol_in2[c_loc[0]] = route_ing\r\n sol_type2[c_loc[0]] = route_type(route_ing)\r\n\r\n elif sa_lns and best_cut_cost < -1e-5:\r\n prb = random.uniform(0, 1)\r\n if np.exp(best_cut_cost / curr_temp) > prb:\r\n # print('shift2', best_cut_cost)\r\n sol_in2[move_to_route] = route_new\r\n sol_type2[move_to_route] = route_type(route_new)\r\n if move_to_route != c_loc[0]: # moving to a different route\r\n sol_in2[c_loc[0]] = route_ing\r\n sol_type2[c_loc[0]] = route_type(route_ing)\r\n\r\n # return sol_in2\r", "def test_move_over_terrain(self):\n # move over Water (0 extra)\n b1 = board.Board(self.small_ter)\n start = np.array((0, 1), dtype='int')\n k1 = knight.Knight(b1, start)\n # set move choice\n move_choice = 1\n # determine move validity and cost\n (cost, isvalid) = k1.validate_move(move_choice)\n self.assertTrue(isvalid)\n self.assertEqual(cost, 1)\n #\n # move over Lava (0 extra)\n start = np.array((5, 4), dtype='int')\n k1 = knight.Knight(b1, start)\n # set move choice\n move_choice = 6\n # determine move validity and cost\n (cost, isvalid) = k1.validate_move(move_choice)\n self.assertTrue(isvalid)\n self.assertEqual(cost, 1)\n #\n # move over Barrier (illegal)\n start = np.array((2, 3), dtype='int')\n k1 = knight.Knight(b1, start)\n # set move choice\n move_choice = 0\n # determine move validity and cost\n (cost, isvalid) = k1.validate_move(move_choice)\n self.assertFalse(isvalid)\n #\n # move over Rock (0 extra)\n start = np.array((2, 3), dtype='int')\n k1 = knight.Knight(b1, start)\n # set move choice\n move_choice = 2\n # determine move validity and cost\n (cost, isvalid) = k1.validate_move(move_choice)\n self.assertTrue(isvalid)\n self.assertEqual(cost, 1)", "def is_valid(self, move):\r\n return move > 10 and move < 89", "def is_legal_move(self, current_player, move):\n\t\tstarting_pos = move[0]\n\t\tending_pos = move[1]\n\t\tif ending_pos[0] not in range(self.board_size) or ending_pos[1] not in range(self.board_size):\t# Discard any generated moves that fall off of the board\n\t\t\treturn False \n\t\tif self.board.repr[starting_pos[0]][starting_pos[1]]!=self.player_symbol[current_player]:\n\t\t\tprint \"this should never trigger and is redundant\"\n\t\t\treturn False\n\t\tif self.board.repr[ending_pos[0]][ending_pos[1]]!= '.':\t# Check that landing spot is empty\n\t\t\treturn False\n\t\tmiddle_pos = (starting_pos[0]-(starting_pos[0]-ending_pos[0])/2,starting_pos[1]-(starting_pos[1]-ending_pos[1])/2)\t# Check the middle spot is the other piece - this should in theory not matter because the pieces alternate\n\t\tother_player = 1 - current_player \n\t\tif self.board.repr[middle_pos[0]][middle_pos[1]] != self.player_symbol[other_player]:\n\t\t\treturn False \n\t\treturn True", "def check_costs(self):\r\n if self.cost > self.owner.player.char_ob.currency:\r\n self.add_error(\r\n \"celebration_tier\",\r\n \"You cannot afford to pay the cost of %s.\" % self.cost,\r\n )", "def is_king_move_valid(self, from_row, from_col, to_row, to_col):\n\n piece = self.board.squares[from_row][from_col]\n piece_color = self.piece_color(piece)\n\n if abs(to_row - from_row) <= 1 and abs(to_col - from_col) <= 1:\n if piece_color == \"white\":\n self.whiteCanCastleKside = False\n self.whiteCanCastleQside = False\n else:\n self.blackCanCastleKside = False\n self.blackCanCastleQside = False\n return True\n\n # TODO Castling implementation\n # if king and rook have not been moved yet this game, and no space between\n # the king and the rook are occupied or threatened, then the king can\n # move 2 spaces towards the rook, and the rook will be placed adjacent to the\n # king on the side closer to the center column.\n\n # TODO need function which returns squares being threatened which takes a piece position and board as a param\n\n if (piece_color == \"white\"):\n if self.whiteCanCastleKside and (from_row == 7 and from_col == 4) and (to_row == from_row) and (to_col == 6):\n # White kingside Castle\n if (self.board.squares[7][5] == None and self.board.squares[7][6] == None):\n if not self.testing:\n self.whiteCanCastleKside = False\n self.whiteCanCastleQside = False\n self.board.move_piece(7, 7, 7, 5)\n return True\n\n if self.whiteCanCastleQside and (from_row == 7 and from_col == 4) and (to_row == from_row) and (to_col == 2):\n # White queenside Castle\n if (self.board.squares[7][3] == None and self.board.squares[7][2] == None and self.board.squares[7][1] == None):\n\n if not self.testing:\n self.whiteCanCastleKside = False\n self.whiteCanCastleQside = False\n self.board.move_piece(7, 0, 7, 3)\n return True\n\n elif piece_color == \"black\":\n if self.blackCanCastleKside and (from_row == 0 and from_col == 4) and (to_row == from_row) and (to_col == 6):\n # black kingside Castle\n if (self.board.squares[0][5] == None and self.board.squares[0][6] == None):\n if not self.testing:\n self.blackCanCastleKside = False\n self.blackCanCastleQside = False\n self.board.move_piece(0, 7, 0, 5)\n return True\n\n if self.blackCanCastleQside and (from_row == 0 and from_col == 4) and (to_row == from_row) and (to_col == 2):\n # black queenside Castle\n if (self.board.squares[0][3] == None and self.board.squares[0][2] == None and self.board.squares[0][1] == None):\n if not self.testing:\n self.blackCanCastleKside = False\n self.blackCanCastleQside = False\n self.board.move_piece(0, 0, 0, 3)\n return True\n\n return False", "def check_move(board, move):\n\n player, spike_index, fields_to_move = Judge._validate_move(move)\n\n # 1. moving out of the bar\n # 2. check if the source is of the valid player\n # 3. check if the destination is valid\n\n board.set_player_perspective(player)\n\n # 1.\n if spike_index == OUT_OF_BAR_SPECIAL_MOVE:\n if board.bar[player] < 1:\n return False\n\n if not board.valid_dest(fields_to_move - 1):\n return False\n\n return True\n\n # 2.\n if not board.valid_source(spike_index):\n return False\n # 3.\n dest_spike_index = spike_index + fields_to_move\n\n if dest_spike_index >= len(INITIAL_SPIKES_STATE):\n return board.all_at_home()\n \n return board.valid_dest(dest_spike_index)", "def movable_intraroute_customers(route, customers):\n mcust = []\n for c in range(route.ncustomers):\n if len(factible_route_positions(route.customers[c+1],\n route,customers)) > 1:\n mcust.append(c)\n return mcust", "def exchange_1_cust(self, sol_in3, cust, c_loc, curr_temp, sol_type3, sa_lns):\r\n\r\n route_ing = copy.deepcopy(sol_in3[c_loc[0]])\r\n\r\n route_new_1 = route_ing\r\n route_new_2 = route_ing\r\n exch_to_route = c_loc[0]\r\n orgn_type1 = sol_type3[exch_to_route]\r\n origin_cost1 = check_violation(route_ing, orgn_type1)[1]\r\n # route_ing.remove(cust) # move c in the current route\r\n # adjust_cost1 = check_violation(route_ing)[1]\r\n best_cut_cost0 = -1000\r\n best_cut_cost = best_cut_cost0 # best cost cut of moving this customer\r\n for j, rou in enumerate(sol_in3):\r\n orgn_type2 = sol_type3[j]\r\n origin_cost2 = check_violation(rou, orgn_type2)[1]\r\n if j == c_loc[0]: # exchange in the same route\r\n for k in range(1, len(rou)-1):\r\n if k == c_loc[1]:\r\n continue\r\n rou_test = copy.deepcopy(sol_in3[c_loc[0]])\r\n rou_test[k], rou_test[c_loc[1]] = rou_test[c_loc[1]], rou_test[k]\r\n if check_violation(rou_test, orgn_type2)[0]:\r\n adjust_cost2 = check_violation(rou_test, orgn_type2)[1]\r\n cost_cut_test = origin_cost1 - adjust_cost2\r\n if cost_cut_test > best_cut_cost:\r\n best_cut_cost = cost_cut_test\r\n route_new_1 = rou_test\r\n route_new_2 = rou_test\r\n exch_to_route = j\r\n\r\n else: # exchange to a different route\r\n for k in range(1, len(rou)-1):\r\n rou_test_1 = copy.deepcopy(sol_in3[c_loc[0]])\r\n rou_test_2 = copy.deepcopy(rou)\r\n rou_test_1[c_loc[1]] = rou[k]\r\n rou_test_2[k] = cust\r\n if check_violation(rou_test_1, 5)[0] and check_violation(rou_test_2, 5)[0]:\r\n new_type1 = route_type(rou_test_1)\r\n new_type2 = route_type(rou_test_2)\r\n adjust_cost1 = check_violation(rou_test_1, new_type1)[1]\r\n adjust_cost2 = check_violation(rou_test_2, new_type2)[1]\r\n cost_cut_test = origin_cost1 + origin_cost2 - adjust_cost1 - adjust_cost2\r\n if cost_cut_test > best_cut_cost:\r\n best_cut_cost = cost_cut_test\r\n route_new_1 = rou_test_1\r\n route_new_2 = rou_test_2\r\n exch_to_route = j\r\n\r\n\r\n\r\n if best_cut_cost > 1e-5:\r\n # print('exchange1 good', best_cut_cost)\r\n sol_in3[c_loc[0]] = route_new_1\r\n sol_in3[exch_to_route] = route_new_2\r\n sol_type3[c_loc[0]] = route_type(route_new_1)\r\n sol_type3[exch_to_route] = route_type(route_new_2)\r\n\r\n elif sa_lns and best_cut_cost < -1e-5:\r\n prb = random.uniform(0, 1)\r\n if np.exp(best_cut_cost / curr_temp) > prb:\r\n # print('exchange1', best_cut_cost)\r\n sol_in3[c_loc[0]] = route_new_1\r\n sol_in3[exch_to_route] = route_new_2\r\n sol_type3[c_loc[0]] = route_type(route_new_1)\r\n sol_type3[exch_to_route] = route_type(route_new_2)\r\n\r\n # return sol_in3\r", "def any_legal_move(self, player, board):\r\n moves = self.legal_moves(player, board)\r\n #print(moves)\r\n return len(moves)!=0", "def validate_move(self, move_from, move_to, board):\n\n from_coordinates = JanggiGame.translate_to_grid(move_from)\n to_coordinates = JanggiGame.translate_to_grid(move_to)\n from_col = from_coordinates[0]\n from_row = from_coordinates[1]\n to_col = to_coordinates[0]\n to_row = to_coordinates[1]\n\n # a cannon cannot capture another cannon\n if type(board[to_col][to_row]) == Cannon:\n return False\n\n # if destination within the board and the move is strictly horizontal or vertical\n if to_col in range(9) and to_row in range(10) and (to_col == from_col or to_row == from_row):\n # if move is to the left\n if to_col < from_col:\n # make sure there is exactly one intervening piece that's not a cannon\n piece_count = 0\n for col in range(to_col + 1, from_col):\n if type(board[col][to_row]) == Cannon:\n return False\n if issubclass(type(board[col][to_row]), Piece):\n piece_count += 1\n if piece_count == 1:\n return True\n # if move is to the right\n if to_col > from_col:\n # make sure there is exactly one intervening piece that's not a cannon\n piece_count = 0\n for col in range(from_col + 1, to_col):\n if type(board[col][to_row]) == Cannon:\n return False\n if issubclass(type(board[col][to_row]), Piece):\n piece_count += 1\n if piece_count == 1:\n return True\n # if move is upward\n if to_row < from_row:\n # make sure there is exactly one intervening piece that's not a cannon\n piece_count = 0\n for row in range(to_row + 1, from_row):\n if type(board[to_col][row]) == Cannon:\n return False\n if issubclass(type(board[to_col][row]), Piece):\n piece_count += 1\n if piece_count == 1:\n return True\n # if move is downward\n if to_row > from_row:\n # make sure there is exactly one intervening piece that's not a cannon\n piece_count = 0\n for row in range(from_row + 1, to_row):\n if type(board[to_col][row]) == Cannon:\n return False\n if issubclass(type(board[to_col][row]), Piece):\n piece_count += 1\n if piece_count == 1:\n return True\n return False\n\n # for moving diagonally in the red palace\n if (from_coordinates in [[3,0],[3,2],[5,0],[5,2]] and to_coordinates in [[3,0],[3,2],[5,0],[5,2]] and\n type(board[4][1]) != Cannon and issubclass(type(board[4][1]), Piece)):\n return True\n\n # for moving diagonally in the blue palace\n if (from_coordinates in [[3,7],[3,9],[5,7],[5,9]] and to_coordinates in [[3,7],[3,9],[5,7],[5,9]] and\n type(board[4][8]) != Cannon and issubclass(type(board[4][8]), Piece)):\n return True\n\n return False", "def move_check(self):\r\n \r\n if not self.run:\r\n return False\r\n \r\n if self.get_num_legal_moves() == 0:\r\n SlTrace.lg(\"NO more legal moves!\", \"nolegalmoves\")\r\n ###return False \r\n \r\n if self.new_move:\r\n self.announce_player(\"start_move\")\r\n if SlTrace.trace(\"selected\"):\r\n self.list_selected(\"After start_move\")\r\n self.new_move = False\r\n player = self.get_player()\r\n if player is None:\r\n return False\r\n \r\n return True", "def _isvalidmove(self, from_, to_):\n if self.board[from_].occupant is None:\n print(\"Moving from empty square\")\n return False\n piece = self.board[from_].occupant\n\n if piece.color != self.to_move:\n print(\"Wrong color\")\n return False\n\n if self.is_checked:\n if piece.notation != 'K':\n print(\"King is checked!\")\n return False\n\n diff = (\n to_cartesian(to_)[0] - to_cartesian(from_)[0],\n to_cartesian(to_)[1] - to_cartesian(from_)[1]\n )\n if not piece.hopping:\n if self.board.isblocked(from_, to_):\n print(\"Move blocked by other pieces\")\n return False\n\n if self.board[to_].occupant is not None:\n if piece.color == self.board[to_].occupant.color:\n print(\"Cannot capture friendly\")\n return False\n\n if diff not in piece.get_captures():\n print(\"Invalid piece capture\")\n return False\n\n if diff not in piece.get_moves():\n print(\"Invalid piece move\")\n return False\n\n return True", "def exchange_2_cust(self, sol_in4, cust, c_loc, curr_temp, sol_type4, sa_lns):\r\n\r\n route_ing = copy.deepcopy(sol_in4[c_loc[0]])\r\n route_new_1 = route_ing\r\n route_new_2 = route_ing\r\n cust_folw = route_ing[c_loc[1] + 1]\r\n exch_to_route = c_loc[0]\r\n origin_cost1 = check_violation(route_ing, sol_type4[c_loc[0]])[1]\r\n # route_ing.remove(cust) # move c in the current route\r\n # adjust_cost1 = check_violation(route_ing)[1]\r\n best_cut_cost0 = -1000\r\n best_cut_cost = best_cut_cost0 # best cost cut of moving this customer\r\n for j, rou in enumerate(sol_in4):\r\n origin_cost2 = check_violation(rou, sol_type4[j])[1]\r\n if j != c_loc[0] and len(rou) >= 4: # exchange to a different route\r\n for k in range(1, len(rou) - 2):\r\n rou_test_1 = copy.deepcopy(sol_in4[c_loc[0]])\r\n rou_test_2 = copy.deepcopy(rou)\r\n rou_test_1[c_loc[1]], rou_test_1[c_loc[1] + 1] = rou[k], rou[k + 1]\r\n rou_test_2[k], rou_test_2[k + 1] = cust, cust_folw\r\n if check_violation(rou_test_1, 5)[0] and check_violation(rou_test_2, 5)[0]:\r\n new_type1 = route_type(rou_test_1)\r\n new_type2 = route_type(rou_test_2)\r\n adjust_cost1 = check_violation(rou_test_1, new_type1)[1]\r\n adjust_cost2 = check_violation(rou_test_2, new_type2)[1]\r\n cost_cut_test = origin_cost1 + origin_cost2 - adjust_cost1 - adjust_cost2\r\n if cost_cut_test > best_cut_cost:\r\n best_cut_cost = cost_cut_test\r\n route_new_1 = rou_test_1\r\n route_new_2 = rou_test_2\r\n exch_to_route = j\r\n\r\n\r\n\r\n if best_cut_cost > 1e-5:\r\n # print('exchange2 good', best_cut_cost)\r\n sol_in4[c_loc[0]] = route_new_1\r\n sol_in4[exch_to_route] = route_new_2\r\n sol_type4[c_loc[0]] = route_type(route_new_1)\r\n sol_type4[exch_to_route] = route_type(route_new_2)\r\n\r\n elif sa_lns and best_cut_cost < -1e-5:\r\n prb = random.uniform(0, 1)\r\n if np.exp(best_cut_cost / curr_temp) > prb:\r\n # print('exchange2', best_cut_cost)\r\n sol_in4[c_loc[0]] = route_new_1\r\n sol_in4[exch_to_route] = route_new_2\r\n sol_type4[c_loc[0]] = route_type(route_new_1)\r\n sol_type4[exch_to_route] = route_type(route_new_2)\r\n\r\n # return sol_in4\r", "def attempt_move(self, move_input):\n # handle undo move\n if move_input == ['UN', 0, 'UN']:\n self.undo_move()\n return True\n\n # handle stock draw Special Action first\n if move_input == ['S0', 0, 'S0']:\n self.save_board_state()\n self.stock.deal_to_wp(self.wp)\n self.moves += 1\n return True\n\n # handle basic cases\n if len(move_input) != 3:\n return False\n if move_input[0] not in self.move_dict or move_input[2] not in self.move_dict:\n return False\n if type(move_input[1]) is not int:\n return False\n if move_input[2] == \"W0\":\n return False\n\n orig_pile = self.move_dict[move_input[0]]\n orig_ind = move_input[1]\n dest_pile = self.move_dict[move_input[2]]\n if orig_ind >= orig_pile.get_length():\n return False\n\n # handle flip tableau card Special Action\n if move_input[0][0] == 'T' and orig_pile == dest_pile and orig_ind == 0:\n orig_pile.reveal_top_card()\n\n # basic conditions have been met\n adj_ind = orig_pile.get_length() - orig_ind - 1\n if orig_pile.is_valid_retrieval(orig_ind):\n self.save_board_state()\n move_pile = orig_pile.remove_cards(orig_ind + 1)\n if dest_pile.is_valid_placement(move_pile):\n dest_pile.merge_pile(move_pile)\n if move_input[0][0] == 'T' and self.auto_flip_tab:\n orig_pile.reveal_top_card()\n self.moves += 1\n return True\n else:\n orig_pile.merge_pile(move_pile)\n self.board_states.pop()\n return False\n return False", "def is_valid_move(self, somerow, somecol):\n bool_1 = self.board[somerow][somecol] != 1\n bool_2 = self.num_queens_placed < self.size \n bool_3 = self.attack(somerow, somecol)\n return bool_1 and bool_2 and bool_3", "def move_valid(move):\n return True", "def _is_valid_move(self, vector, current_piece, other_piece):\n return True", "def make_move(self, selected_piece_coords, destination_coords, player):\n\n # Verification player number:\"\n if player < 0 or player > 3:\n return False\n\n board_copy = self.current_board.clone()\n\n # move_piece handled move verification and validation.\n board_copy.move_piece(selected_piece_coords, destination_coords)\n\n next_moves = self.current_board.get_possible_next_moves(selected_piece_coords)\n\n if not board_copy in next_moves:\n return False\n\n self.prev_boards.append(self.current_board)\n self.current_board = board_copy\n self.winner = self.current_board.check_for_game_won()\n\n # notify\n\n if player == 1 or player == 3:\n self.player = 2\n if player == 2:\n self.player = 1\n return True", "def process_move(player, board):\r\n c = player.__repr__()\r\n print(c, \"'s turn\")\r\n move = player.next_move(board)\r\n board.add_checker(player.checker, move)\r\n print()\r\n print(board)\r\n if board.is_win_for(player.checker):\r\n i = player.num_moves\r\n print(player.__repr__(), \"wins in \", i, \"moves\")\r\n print(\"Congratulations!\")\r\n return True\r\n elif board.is_full() and not board.is_win_for(player.checker):\r\n print(\"It's a tie!\")\r\n return True\r\n else:\r\n return False", "def _is_valid_action(self, turn, action):\n if not isinstance(action, Action):\n raise TypeError(\"Action must be Action class\")\n\n if action.x < 0 or action.x > 7 or action.y < 0 or action.y > 7:\n raise Exception(\"You must set disk in board.\")\n\n if self.board[action.x][action.y] is not 0:\n return False\n\n agent = -1 if turn is 0 else 1\n\n for direction in self.directions:\n for i in range(1, 9):\n x = action.x + i * direction[0]\n y = action.y + i * direction[1]\n\n if x < 0 or x > 7 or y < 0 or y > 7:\n break\n\n if self.board[x][y] is 0:\n break\n elif self.board[x][y] is agent:\n # if meet agent's disk and no opposite's dist in between, not valid\n if i is not 1:\n return True\n\n break\n else:\n continue\n\n # what is wrong?\n return False", "def check_limit(self):\n self.ensure_one()\n partner = self.partner_id\n moveline_obj = self.env['account.move.line']\n movelines = moveline_obj.\\\n search([('partner_id', '=', partner.id),\n ('account_id.user_type_id.type', 'in',\n ['receivable', 'payable']),\n ('full_reconcile_id', '=', False)])\n\n debit, credit = 0.0, 0.0\n today_dt = datetime.strftime(datetime.now().date(), DF)\n for line in movelines:\n if line.date_maturity < today_dt:\n credit += line.debit\n debit += line.credit\n\n if (credit - debit + self.amount_total) > partner.credit_limit:\n # Consider partners who are under a company.\n if partner.over_credit or (partner.parent_id and partner.parent_id.over_credit):\n partner.write({\n 'credit_limit': credit - debit + self.amount_total})\n return True\n else:\n msg = '%s Can not confirm Sale Order,Total mature due Amount ' \\\n '%s as on %s !\\nCheck Partner Accounts or Credit ' \\\n 'Limits !' % (partner.over_credit,credit - debit, today_dt)\n raise UserError(_('Credit Over Limits !\\n' + msg))\n else:\n return True", "def is_moving(self) -> bool:\n return self.orders and self.orders[0].ability.id is AbilityId.MOVE", "def validate_move(self, move_from, move_to, board):\n\n from_coordinates = JanggiGame.translate_to_grid(move_from)\n to_coordinates = JanggiGame.translate_to_grid(move_to)\n from_col = from_coordinates[0]\n from_row = from_coordinates[1]\n to_col = to_coordinates[0]\n to_row = to_coordinates[1]\n\n # if destination within the board\n if (to_col in range(9) and to_row in range(10) and\n # and the move is 1 up/down/left/right (with no other piece here) and then 1 farther out diagonally\n ((to_row - from_row == -2 and abs(to_col - from_col) == 1 and board[from_col][from_row - 1] == '') or\n (to_row - from_row == 2 and abs(to_col - from_col) == 1 and board[from_col][from_row + 1] == '') or\n (to_col - from_col == -2 and abs(to_row - from_row) == 1 and board[from_col - 1][from_row] == '') or\n (to_col - from_col == 2 and abs(to_row - from_row) == 1 and board[from_col + 1][from_row] == '')\n )\n ):\n return True\n else:\n return False", "def factible_route_insertion(customer, position, route, customers):\n r = copy.deepcopy(route)\n r.insert(position, [customer], customers)\n return not r.violate_windows(customers)", "def no_more_move(self):\n if (self.p_no_move + self.c_no_move == 2):\n return True\n return False", "def remove_existing_customers(self):\n # remove the customers which are not active (.is_active )\n self.to_move = False\n #for cust in self.customers:\n # print(cust.state)\n self.customers = [cust for cust in self.customers if cust.state != 'checkout']\n #if cust.to_move():\n # self.to_move = True", "def op_move_preconditions(self):\n\n if(self.next_move != self.FREE):\n return False\n\n return True" ]
[ "0.6117333", "0.60589105", "0.5994203", "0.59595776", "0.5953622", "0.5933493", "0.59228426", "0.5878984", "0.58387893", "0.5838295", "0.58261937", "0.5800863", "0.5800339", "0.5784758", "0.5716828", "0.5699591", "0.5699348", "0.5658605", "0.563691", "0.5607451", "0.5571733", "0.5566332", "0.55458564", "0.55335003", "0.55328304", "0.5532227", "0.5522318", "0.55197537", "0.5509702", "0.5507153" ]
0.6281938
0
Try to move 2 consecutive customers to anywhere they can be put, see if they move can cut the total cost.
def shift_2_cust(self, sol_in2, cust, c_loc, curr_temp, sol_type2, sa_lns): route_ing = copy.deepcopy(sol_in2[c_loc[0]]) route_new = route_ing move_to_route = c_loc[0] orgn_type1 = sol_type2[c_loc[0]] cust_folw = route_ing[c_loc[1]+1] origin_cost1 = check_violation(route_ing, orgn_type1)[1] route_ing.remove(cust) # remove c in the current route del route_ing[c_loc[1]] # remove customer following c new_type1 = route_type(route_ing) adjust_cost1 = check_violation(route_ing, new_type1)[1] best_cut_cost0 = -1000 best_cut_cost = best_cut_cost0 # best cost cut of moving this customer for j, rou in enumerate(sol_in2): orgn_type2 = sol_type2[j] origin_cost2 = check_violation(rou, orgn_type2)[1] if j == c_loc[0]: # moving in the same route for k in range(1, len(route_ing)): if k == c_loc[1]: continue rou_test = route_ing[:k] + [cust, cust_folw] + route_ing[k:] if check_violation(rou_test, orgn_type2)[0]: adjust_cost2 = check_violation(rou_test, orgn_type2)[1] cost_cut_test = origin_cost1 - adjust_cost2 if cost_cut_test > best_cut_cost: best_cut_cost = cost_cut_test route_new = rou_test move_to_route = j else: # moving to a different route for k in range(1, len(rou)): rou_test = rou[:k] + [cust, cust_folw] + rou[k:] if check_violation(rou_test, 5)[0]: new_type2 = route_type(rou_test) adjust_cost2 = check_violation(rou_test, new_type2)[1] cost_cut_test = origin_cost1 + origin_cost2 - adjust_cost1 - adjust_cost2 if cost_cut_test > best_cut_cost: best_cut_cost = cost_cut_test route_new = rou_test move_to_route = j if best_cut_cost > 1e-5: # print('shift2 good', best_cut_cost) sol_in2[move_to_route] = route_new sol_type2[move_to_route] = route_type(route_new) if move_to_route != c_loc[0]: # moving to a different route sol_in2[c_loc[0]] = route_ing sol_type2[c_loc[0]] = route_type(route_ing) elif sa_lns and best_cut_cost < -1e-5: prb = random.uniform(0, 1) if np.exp(best_cut_cost / curr_temp) > prb: # print('shift2', best_cut_cost) sol_in2[move_to_route] = route_new sol_type2[move_to_route] = route_type(route_new) if move_to_route != c_loc[0]: # moving to a different route sol_in2[c_loc[0]] = route_ing sol_type2[c_loc[0]] = route_type(route_ing) # return sol_in2
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def shift_1_cust(self, sol_in1, cust, c_loc, curr_temp, sol_type1, sa_lns):\r\n\r\n route_ing = copy.deepcopy(sol_in1[c_loc[0]])\r\n route_new = route_ing\r\n move_to_route = c_loc[0]\r\n orgn_type1 = sol_type1[c_loc[0]]\r\n origin_cost1 = check_violation(route_ing, orgn_type1)[1]\r\n route_ing.remove(cust) # move c in the current route\r\n new_type1 = route_type(route_ing)\r\n adjust_cost1 = check_violation(route_ing, new_type1)[1]\r\n best_cut_cost0 = -1000\r\n best_cut_cost = best_cut_cost0 # best cost cut of moving this customer\r\n for j, rou in enumerate(sol_in1):\r\n orgn_type2 = sol_type1[j]\r\n origin_cost2 = check_violation(rou, orgn_type2)[1]\r\n if j == c_loc[0]: # moving in the same route\r\n for k in range(1, len(route_ing)):\r\n if k == c_loc[1]:\r\n continue # do not put it at the original position\r\n rou_test = route_ing[:k] + [cust] + route_ing[k:]\r\n if check_violation(rou_test, orgn_type2)[0]:\r\n adjust_cost2 = check_violation(rou_test, orgn_type2)[1]\r\n cost_cut_test = origin_cost1 - adjust_cost2\r\n if cost_cut_test > best_cut_cost:\r\n best_cut_cost = cost_cut_test\r\n route_new = rou_test\r\n move_to_route = j\r\n\r\n\r\n else: # moving to a different route\r\n for k in range(1, len(rou)):\r\n rou_test = rou[:k] + [cust] + rou[k:]\r\n\r\n if check_violation(rou_test, 5)[0]:\r\n new_type2 = route_type(rou_test)\r\n adjust_cost2 = check_violation(rou_test, new_type2)[1]\r\n cost_cut_test = origin_cost1 + origin_cost2 - adjust_cost1 - adjust_cost2\r\n if cost_cut_test > best_cut_cost:\r\n best_cut_cost = cost_cut_test\r\n route_new = rou_test\r\n move_to_route = j\r\n\r\n\r\n if best_cut_cost > 1e-5:\r\n # print('shift1 good', best_cut_cost)\r\n sol_in1[move_to_route] = route_new\r\n sol_type1[move_to_route] = route_type(route_new)\r\n if move_to_route != c_loc[0]: # moving to a different route\r\n sol_in1[c_loc[0]] = route_ing\r\n sol_type1[c_loc[0]] = route_type(route_ing)\r\n elif sa_lns and best_cut_cost < -1e-5:\r\n prb = random.uniform(0, 1)\r\n if np.exp(best_cut_cost/curr_temp) > prb:\r\n # print('shift1', best_cut_cost)\r\n sol_in1[move_to_route] = route_new\r\n sol_type1[move_to_route] = route_type(route_new)\r\n if move_to_route != c_loc[0]: # moving to a different route\r\n sol_in1[c_loc[0]] = route_ing\r\n sol_type1[c_loc[0]] = route_type(route_ing)\r\n\r\n\r\n\r\n # return sol_in1\r", "def shift_3_cust(self, sol_in6, cust, c_loc, curr_temp, sol_type6, sa_lns):\r\n\r\n route_ing = copy.deepcopy(sol_in6[c_loc[0]])\r\n route_new = route_ing\r\n move_to_route = c_loc[0]\r\n orgn_type1 = sol_type6[c_loc[0]]\r\n cust_folw1 = route_ing[c_loc[1] + 1]\r\n cust_folw2 = route_ing[c_loc[1] + 2]\r\n origin_cost1 = check_violation(route_ing, orgn_type1)[1]\r\n route_ing.remove(cust) # remove c in the current route\r\n del route_ing[c_loc[1]] # remove customer following c\r\n del route_ing[c_loc[1]] # remove customer following following c\r\n new_type1 = route_type(route_ing)\r\n adjust_cost1 = check_violation(route_ing, new_type1)[1]\r\n best_cut_cost0 = -1000\r\n best_cut_cost = best_cut_cost0 # best cost cut of moving this customer\r\n for j, rou in enumerate(sol_in6):\r\n orgn_type2 = sol_type6[j]\r\n origin_cost2 = check_violation(rou, orgn_type2)[1]\r\n if j == c_loc[0]: # moving in the same route\r\n for k in range(1, len(route_ing)):\r\n if k == c_loc[1]:\r\n continue\r\n rou_test = route_ing[:k] + [cust, cust_folw1, cust_folw2] + route_ing[k:]\r\n if check_violation(rou_test, orgn_type2)[0]:\r\n adjust_cost2 = check_violation(rou_test, orgn_type2)[1]\r\n cost_cut_test = origin_cost1 - adjust_cost2\r\n if cost_cut_test > best_cut_cost:\r\n best_cut_cost = cost_cut_test\r\n route_new = rou_test\r\n move_to_route = j\r\n\r\n else: # moving to a different route\r\n for k in range(1, len(rou)):\r\n rou_test = rou[:k] + [cust, cust_folw1, cust_folw2] + rou[k:]\r\n if check_violation(rou_test, 5)[0]:\r\n new_type2 = route_type(rou_test)\r\n adjust_cost2 = check_violation(rou_test, new_type2)[1]\r\n cost_cut_test = origin_cost1 + origin_cost2 - adjust_cost1 - adjust_cost2\r\n if cost_cut_test > best_cut_cost:\r\n best_cut_cost = cost_cut_test\r\n route_new = rou_test\r\n move_to_route = j\r\n\r\n\r\n if best_cut_cost > 1e-5:\r\n # print('shift3 good', best_cut_cost)\r\n sol_in6[move_to_route] = route_new\r\n sol_type6[move_to_route] = route_type(route_new)\r\n if move_to_route != c_loc[0]: # moving to a different route\r\n sol_in6[c_loc[0]] = route_ing\r\n sol_type6[c_loc[0]] = route_type(route_ing)\r\n\r\n elif sa_lns and best_cut_cost < -1e-5:\r\n\r\n prb = random.uniform(0, 1)\r\n if np.exp(best_cut_cost / curr_temp) > prb:\r\n # print('shift3', best_cut_cost)\r\n sol_in6[move_to_route] = route_new\r\n sol_type6[move_to_route] = route_type(route_new)\r\n if move_to_route != c_loc[0]: # moving to a different route\r\n sol_in6[c_loc[0]] = route_ing\r\n sol_type6[c_loc[0]] = route_type(route_ing)", "def exchange_2_cust(self, sol_in4, cust, c_loc, curr_temp, sol_type4, sa_lns):\r\n\r\n route_ing = copy.deepcopy(sol_in4[c_loc[0]])\r\n route_new_1 = route_ing\r\n route_new_2 = route_ing\r\n cust_folw = route_ing[c_loc[1] + 1]\r\n exch_to_route = c_loc[0]\r\n origin_cost1 = check_violation(route_ing, sol_type4[c_loc[0]])[1]\r\n # route_ing.remove(cust) # move c in the current route\r\n # adjust_cost1 = check_violation(route_ing)[1]\r\n best_cut_cost0 = -1000\r\n best_cut_cost = best_cut_cost0 # best cost cut of moving this customer\r\n for j, rou in enumerate(sol_in4):\r\n origin_cost2 = check_violation(rou, sol_type4[j])[1]\r\n if j != c_loc[0] and len(rou) >= 4: # exchange to a different route\r\n for k in range(1, len(rou) - 2):\r\n rou_test_1 = copy.deepcopy(sol_in4[c_loc[0]])\r\n rou_test_2 = copy.deepcopy(rou)\r\n rou_test_1[c_loc[1]], rou_test_1[c_loc[1] + 1] = rou[k], rou[k + 1]\r\n rou_test_2[k], rou_test_2[k + 1] = cust, cust_folw\r\n if check_violation(rou_test_1, 5)[0] and check_violation(rou_test_2, 5)[0]:\r\n new_type1 = route_type(rou_test_1)\r\n new_type2 = route_type(rou_test_2)\r\n adjust_cost1 = check_violation(rou_test_1, new_type1)[1]\r\n adjust_cost2 = check_violation(rou_test_2, new_type2)[1]\r\n cost_cut_test = origin_cost1 + origin_cost2 - adjust_cost1 - adjust_cost2\r\n if cost_cut_test > best_cut_cost:\r\n best_cut_cost = cost_cut_test\r\n route_new_1 = rou_test_1\r\n route_new_2 = rou_test_2\r\n exch_to_route = j\r\n\r\n\r\n\r\n if best_cut_cost > 1e-5:\r\n # print('exchange2 good', best_cut_cost)\r\n sol_in4[c_loc[0]] = route_new_1\r\n sol_in4[exch_to_route] = route_new_2\r\n sol_type4[c_loc[0]] = route_type(route_new_1)\r\n sol_type4[exch_to_route] = route_type(route_new_2)\r\n\r\n elif sa_lns and best_cut_cost < -1e-5:\r\n prb = random.uniform(0, 1)\r\n if np.exp(best_cut_cost / curr_temp) > prb:\r\n # print('exchange2', best_cut_cost)\r\n sol_in4[c_loc[0]] = route_new_1\r\n sol_in4[exch_to_route] = route_new_2\r\n sol_type4[c_loc[0]] = route_type(route_new_1)\r\n sol_type4[exch_to_route] = route_type(route_new_2)\r\n\r\n # return sol_in4\r", "def intraroute_2opt(route, customers):\n if route.ncustomers < 2: return False\n r = copy.deepcopy(route)\n c1 = random.randint(1,r.ncustomers-1)\n c2 = random.randint(c1+2,r.ncustomers+1)\n #print(c1, c2)\n r.customers[c1:c2] = r.customers[c1:c2][::-1]\n #print(r.customers)\n r.update(customers)\n if r.violate_windows(customers):\n return False\n else:\n route.customers[c1:c2] = r.customers[c1:c2]\n route.update(customers)\n print(\"succeed intraroute 2opt\")\n return True", "def interroute_2opt(route1, route2, customers):\n r1 = copy.deepcopy(route1)\n r2 = copy.deepcopy(route2)\n c1 = random.randint(1,r1.ncustomers)\n c2 = random.randint(1,r2.ncustomers)\n r1cs = r1.customers[:]\n r1.customers[c1:] = r2.customers[c2:]\n r1.ncustomers = len(r1.customers) - 2\n #print(r1cs, r1.customers, r1.ncustomers)\n r1.update(customers)\n if r1.violate_windows(customers): return False\n r2.customers[c2:] = r1cs[c1:]\n r2.ncustomers = len(r2.customers) - 2\n r2.update(customers)\n if r2.violate_windows(customers): return False\n route1.customers[:] = r1.customers\n route1.ncustomers = r1.ncustomers\n route1.update(customers)\n route2.customers[:] = r2.customers\n route2.ncustomers = r2.ncustomers\n route2.update(customers)\n print(\"succeed interroute 2opt\")\n return True", "def movable_intraroute_customers(route, customers):\n mcust = []\n for c in range(route.ncustomers):\n if len(factible_route_positions(route.customers[c+1],\n route,customers)) > 1:\n mcust.append(c)\n return mcust", "def exchange_1_cust(self, sol_in3, cust, c_loc, curr_temp, sol_type3, sa_lns):\r\n\r\n route_ing = copy.deepcopy(sol_in3[c_loc[0]])\r\n\r\n route_new_1 = route_ing\r\n route_new_2 = route_ing\r\n exch_to_route = c_loc[0]\r\n orgn_type1 = sol_type3[exch_to_route]\r\n origin_cost1 = check_violation(route_ing, orgn_type1)[1]\r\n # route_ing.remove(cust) # move c in the current route\r\n # adjust_cost1 = check_violation(route_ing)[1]\r\n best_cut_cost0 = -1000\r\n best_cut_cost = best_cut_cost0 # best cost cut of moving this customer\r\n for j, rou in enumerate(sol_in3):\r\n orgn_type2 = sol_type3[j]\r\n origin_cost2 = check_violation(rou, orgn_type2)[1]\r\n if j == c_loc[0]: # exchange in the same route\r\n for k in range(1, len(rou)-1):\r\n if k == c_loc[1]:\r\n continue\r\n rou_test = copy.deepcopy(sol_in3[c_loc[0]])\r\n rou_test[k], rou_test[c_loc[1]] = rou_test[c_loc[1]], rou_test[k]\r\n if check_violation(rou_test, orgn_type2)[0]:\r\n adjust_cost2 = check_violation(rou_test, orgn_type2)[1]\r\n cost_cut_test = origin_cost1 - adjust_cost2\r\n if cost_cut_test > best_cut_cost:\r\n best_cut_cost = cost_cut_test\r\n route_new_1 = rou_test\r\n route_new_2 = rou_test\r\n exch_to_route = j\r\n\r\n else: # exchange to a different route\r\n for k in range(1, len(rou)-1):\r\n rou_test_1 = copy.deepcopy(sol_in3[c_loc[0]])\r\n rou_test_2 = copy.deepcopy(rou)\r\n rou_test_1[c_loc[1]] = rou[k]\r\n rou_test_2[k] = cust\r\n if check_violation(rou_test_1, 5)[0] and check_violation(rou_test_2, 5)[0]:\r\n new_type1 = route_type(rou_test_1)\r\n new_type2 = route_type(rou_test_2)\r\n adjust_cost1 = check_violation(rou_test_1, new_type1)[1]\r\n adjust_cost2 = check_violation(rou_test_2, new_type2)[1]\r\n cost_cut_test = origin_cost1 + origin_cost2 - adjust_cost1 - adjust_cost2\r\n if cost_cut_test > best_cut_cost:\r\n best_cut_cost = cost_cut_test\r\n route_new_1 = rou_test_1\r\n route_new_2 = rou_test_2\r\n exch_to_route = j\r\n\r\n\r\n\r\n if best_cut_cost > 1e-5:\r\n # print('exchange1 good', best_cut_cost)\r\n sol_in3[c_loc[0]] = route_new_1\r\n sol_in3[exch_to_route] = route_new_2\r\n sol_type3[c_loc[0]] = route_type(route_new_1)\r\n sol_type3[exch_to_route] = route_type(route_new_2)\r\n\r\n elif sa_lns and best_cut_cost < -1e-5:\r\n prb = random.uniform(0, 1)\r\n if np.exp(best_cut_cost / curr_temp) > prb:\r\n # print('exchange1', best_cut_cost)\r\n sol_in3[c_loc[0]] = route_new_1\r\n sol_in3[exch_to_route] = route_new_2\r\n sol_type3[c_loc[0]] = route_type(route_new_1)\r\n sol_type3[exch_to_route] = route_type(route_new_2)\r\n\r\n # return sol_in3\r", "def no_more_move(self):\n if (self.p_no_move + self.c_no_move == 2):\n return True\n return False", "def find_moveable_pieces(self, die, p1): \n moveable = []\n if (p1):\n #must we re-enter?\n if (self.p1vec[0] > 0):\n if (self.free_spot(0, die, p1)):\n b = Board(self.p1vec[:],self.p2vec[:])\n b.move(0, die, p1)\n moveable.append(b)\n #no? ok then generate the moves\n else:\n for i in range(1, 25):\n if (self.p1vec[i] > 0):\n if (self.free_spot(i, die, p1)):\n b = Board(self.p1vec[:],self.p2vec[:])\n b.move(i, die, p1)\n moveable.append(b)\n else:\n #must we re-enter?\n if (self.p2vec[0] > 0):\n if (self.free_spot(0, die, p1)):\n b = Board(self.p1vec[:],self.p2vec[:])\n b.move(0, die, p1)\n moveable.append(b)\n #no? ok then generate the moves\n else:\n for i in range(1, 25):\n if (self.p2vec[i] > 0):\n if (self.free_spot(i, die, p1)):\n b = Board(self.p1vec[:],self.p2vec[:])\n b.move(i, die, p1)\n moveable.append(b)\n return moveable", "def test_move_over_terrain(self):\n # move over Water (0 extra)\n b1 = board.Board(self.small_ter)\n start = np.array((0, 1), dtype='int')\n k1 = knight.Knight(b1, start)\n # set move choice\n move_choice = 1\n # determine move validity and cost\n (cost, isvalid) = k1.validate_move(move_choice)\n self.assertTrue(isvalid)\n self.assertEqual(cost, 1)\n #\n # move over Lava (0 extra)\n start = np.array((5, 4), dtype='int')\n k1 = knight.Knight(b1, start)\n # set move choice\n move_choice = 6\n # determine move validity and cost\n (cost, isvalid) = k1.validate_move(move_choice)\n self.assertTrue(isvalid)\n self.assertEqual(cost, 1)\n #\n # move over Barrier (illegal)\n start = np.array((2, 3), dtype='int')\n k1 = knight.Knight(b1, start)\n # set move choice\n move_choice = 0\n # determine move validity and cost\n (cost, isvalid) = k1.validate_move(move_choice)\n self.assertFalse(isvalid)\n #\n # move over Rock (0 extra)\n start = np.array((2, 3), dtype='int')\n k1 = knight.Knight(b1, start)\n # set move choice\n move_choice = 2\n # determine move validity and cost\n (cost, isvalid) = k1.validate_move(move_choice)\n self.assertTrue(isvalid)\n self.assertEqual(cost, 1)", "def test_move_onto_terrain(self):\n # move onto Water (1 extra)\n b1 = board.Board(self.small_ter)\n start = np.array((0, 3), dtype='int')\n k1 = knight.Knight(b1, start)\n # set move choice\n move_choice = 2\n # determine move validity and cost\n (cost, isvalid) = k1.validate_move(move_choice)\n self.assertTrue(isvalid)\n self.assertEqual(cost, 2)\n\n # move onto Lava (4 extra)\n start = np.array((3, 4), dtype='int')\n k1 = knight.Knight(b1, start)\n # set move choice\n move_choice = 0\n # determine move validity and cost\n (cost, isvalid) = k1.validate_move(move_choice)\n self.assertTrue(isvalid)\n self.assertEqual(cost, 5)\n\n # move onto Barrier (illegal)\n start = np.array((1, 4), dtype='int')\n k1 = knight.Knight(b1, start)\n # set move choice\n move_choice = 1\n # determine move validity and cost\n (cost, isvalid) = k1.validate_move(move_choice)\n self.assertFalse(isvalid)\n\n # move onto Rock (illegal)\n start = np.array((1, 0), dtype='int')\n k1 = knight.Knight(b1, start)\n # set move choice\n move_choice = 7\n # determine move validity and cost\n (cost, isvalid) = k1.validate_move(move_choice)\n self.assertFalse(isvalid)", "def remove_existing_customers(self):\n # remove the customers which are not active (.is_active )\n self.to_move = False\n #for cust in self.customers:\n # print(cust.state)\n self.customers = [cust for cust in self.customers if cust.state != 'checkout']\n #if cust.to_move():\n # self.to_move = True", "def next_move(ttt):\r\n # get board in 2D array form\r\n b = ttt.get_board()\r\n \r\n # if there's a winning move, take it\r\n (cfw, win_move) = check_for_win_lose(b)\r\n if cfw is not None:\r\n if win_move:\r\n print 'COMPUTER WINS!'\r\n return cfw, win_move\r\n # otherwise, pres on with the next best move\r\n\r\n # get \"points\" on board. this tells us not only the move\r\n # but also who went first\r\n board_count = sum(sum(b,[]))\r\n \r\n # IF COMPUTER HAS FIRST TURN\r\n # if 1st move\r\n if board_count == 0:\r\n return (2,2), False # take the center\r\n # this is not best strategy for winning, but\r\n # it the human messes up, the computer can win.\r\n # taking a corner first makes it a little easier\r\n # for the computer to win becase the human only\r\n # has one correct move to make: to take the center\r\n \r\n # if 3rd move, and not a winning one\r\n if board_count == 3:\r\n if b[0][1]==2 or b[1][0]==2 or b[0][0]==2:\r\n return (3,3), False\r\n elif b[0][2]==2:\r\n return (3,1), False\r\n elif b[2][0]==2:\r\n return (1,3), False\r\n else:#elif b[1][2]==2 or b[2][1]==2 or b[2][2]==2:\r\n return (1,1), False\r\n\r\n # if 5th move, and not a winning or losing one\r\n if board_count == 6:\r\n b5 = numpy.array([[0,2,1],[0,1,0],[2,0,0]])\r\n if (b == b5).all():\r\n return (3,3), False\r\n elif (b == numpy.rot90(b5,1)).all():\r\n return (3,1), False\r\n elif (b == numpy.rot90(b5,2)).all():\r\n return (1,1), False\r\n elif (b == numpy.rot90(b5,3)).all():\r\n return (1,3), False\r\n\r\n b5 = numpy.array([[0,0,1],[0,1,2],[2,0,0]])\r\n if (b == b5).all():\r\n return (1,1), False\r\n elif (b == numpy.rot90(b5,1)).all():\r\n return (1,3), False\r\n elif (b == numpy.rot90(b5,2)).all():\r\n return (3,3), False\r\n elif (b == numpy.rot90(b5,3)).all():\r\n return (3,1), False\r\n\r\n # at this point, all possible boards should have been covered\r\n\r\n # if 7th move, and a winning or losing one\r\n if board_count == 9:\r\n # find the row or col with 2 open slots and mark it\r\n for ri in range(3):\r\n r = b[ri]\r\n if sum([1 if i==0 else 0 for i in r]) == 2:\r\n if r[0] == 0:\r\n return (ri+1,1), False\r\n else:\r\n return (ri+1,2), False\r\n for ci in range(3):\r\n c = get_col(b, ci)\r\n if sum([1 if i==0 else 0 for i in c]) == 2:\r\n if c[0] == 0:\r\n return (1,ci+1), False\r\n else:\r\n return (2,ci+1), False\r\n\r\n \r\n # IF HUMAN HAS FIRST TURN\r\n # if 2nd move\r\n if board_count == 2:\r\n if b[1][1] == 0:\r\n # if the center is open, computer has\r\n # to take it in order to not lose\r\n return (2,2), False\r\n else:\r\n # otherwise take a corner\r\n return (1,1), False\r\n\r\n # if 4th move\r\n if board_count == 5:\r\n # if we took a corner on move 2 and they\r\n # are using computer's offensive strategy\r\n # when it is first player\r\n b4 = [[1,0,0],[0,2,0],[0,0,2]]\r\n if b==b4:\r\n return (3,1), False\r\n # if we took center on move 2\r\n else:\r\n b4 = numpy.array([[2,0,0],[0,1,0],[0,0,2]])\r\n if (b == b4).all() or (b == numpy.rot90(b4,1)).all():\r\n return (1,2), False\r\n\r\n # overall ELSE -- just find a square\r\n for ri in range(3):\r\n for ci in range(3):\r\n if b[ri][ci] == 0:\r\n return (ri+1,ci+1), False", "def validate_move(self, move_from, move_to, board):\n\n from_coordinates = JanggiGame.translate_to_grid(move_from)\n to_coordinates = JanggiGame.translate_to_grid(move_to)\n from_col = from_coordinates[0]\n from_row = from_coordinates[1]\n to_col = to_coordinates[0]\n to_row = to_coordinates[1]\n\n # a cannon cannot capture another cannon\n if type(board[to_col][to_row]) == Cannon:\n return False\n\n # if destination within the board and the move is strictly horizontal or vertical\n if to_col in range(9) and to_row in range(10) and (to_col == from_col or to_row == from_row):\n # if move is to the left\n if to_col < from_col:\n # make sure there is exactly one intervening piece that's not a cannon\n piece_count = 0\n for col in range(to_col + 1, from_col):\n if type(board[col][to_row]) == Cannon:\n return False\n if issubclass(type(board[col][to_row]), Piece):\n piece_count += 1\n if piece_count == 1:\n return True\n # if move is to the right\n if to_col > from_col:\n # make sure there is exactly one intervening piece that's not a cannon\n piece_count = 0\n for col in range(from_col + 1, to_col):\n if type(board[col][to_row]) == Cannon:\n return False\n if issubclass(type(board[col][to_row]), Piece):\n piece_count += 1\n if piece_count == 1:\n return True\n # if move is upward\n if to_row < from_row:\n # make sure there is exactly one intervening piece that's not a cannon\n piece_count = 0\n for row in range(to_row + 1, from_row):\n if type(board[to_col][row]) == Cannon:\n return False\n if issubclass(type(board[to_col][row]), Piece):\n piece_count += 1\n if piece_count == 1:\n return True\n # if move is downward\n if to_row > from_row:\n # make sure there is exactly one intervening piece that's not a cannon\n piece_count = 0\n for row in range(from_row + 1, to_row):\n if type(board[to_col][row]) == Cannon:\n return False\n if issubclass(type(board[to_col][row]), Piece):\n piece_count += 1\n if piece_count == 1:\n return True\n return False\n\n # for moving diagonally in the red palace\n if (from_coordinates in [[3,0],[3,2],[5,0],[5,2]] and to_coordinates in [[3,0],[3,2],[5,0],[5,2]] and\n type(board[4][1]) != Cannon and issubclass(type(board[4][1]), Piece)):\n return True\n\n # for moving diagonally in the blue palace\n if (from_coordinates in [[3,7],[3,9],[5,7],[5,9]] and to_coordinates in [[3,7],[3,9],[5,7],[5,9]] and\n type(board[4][8]) != Cannon and issubclass(type(board[4][8]), Piece)):\n return True\n\n return False", "def is_legal_move(self, start_pos, end_pos, start_piece, end_piece_player_id, board):\r\n parsed_positions = self.parse_positions(start_pos, end_pos)\r\n\r\n start_row = parsed_positions[0]\r\n start_col = parsed_positions[1]\r\n end_row = parsed_positions[2]\r\n end_col = parsed_positions[3]\r\n count = 0 # Count will track how many pieces are between start and end_pos\r\n\r\n if start_row != end_row and start_col != end_col: # Moving diagonally\r\n return False\r\n\r\n # If cannon moves to an empty position\r\n # if end_piece_player_id is None:\r\n\r\n if start_row == end_row: # Moving horizontally\r\n col_difference = end_col - start_col\r\n\r\n if col_difference > 0: # Moving to the right of the board\r\n for col in range(start_col + 1, end_col): # Checks if there is a piece between start_col and end_col\r\n if board[start_row][col].get_piece() is not None:\r\n count += 1\r\n\r\n if col_difference < 0: # Moving to the left of the board\r\n for col in range(start_col - 1, end_col, -1): # Checks to the left of the board\r\n # If there is a piece to block movement to the end_pos, return False\r\n if board[start_row][col].get_piece() is not None:\r\n count += 1\r\n\r\n if start_col == end_col: # Moving vertically\r\n row_difference = end_row - start_row\r\n\r\n if row_difference > 0: # Moving down the board\r\n for row in range(start_row + 1, end_row):\r\n if board[row][start_col].get_piece() is not None: # If no piece is impeding path to end_pos\r\n count += 1\r\n\r\n\r\n if row_difference < 0: # Moving up the board\r\n for row in range(start_row -1, end_row, -1):\r\n if board[row][start_col].get_piece() is not None: # If no piece is impeding path to end_pos\r\n count += 1\r\n\r\n # 1 piece between start_pos and end_pos and end_pos contains a chess piece\r\n if count == 1 and end_piece_player_id is not None:\r\n return True\r\n # end_pos has no piece and there are no pieces to impede path\r\n elif end_piece_player_id is None and count == 0:\r\n return True\r\n # Returns False for all other scenarios\r\n else:\r\n return False", "def is_legal_move(self, current_player, move):\n\t\tstarting_pos = move[0]\n\t\tending_pos = move[1]\n\t\tif ending_pos[0] not in range(self.board_size) or ending_pos[1] not in range(self.board_size):\t# Discard any generated moves that fall off of the board\n\t\t\treturn False \n\t\tif self.board.repr[starting_pos[0]][starting_pos[1]]!=self.player_symbol[current_player]:\n\t\t\tprint \"this should never trigger and is redundant\"\n\t\t\treturn False\n\t\tif self.board.repr[ending_pos[0]][ending_pos[1]]!= '.':\t# Check that landing spot is empty\n\t\t\treturn False\n\t\tmiddle_pos = (starting_pos[0]-(starting_pos[0]-ending_pos[0])/2,starting_pos[1]-(starting_pos[1]-ending_pos[1])/2)\t# Check the middle spot is the other piece - this should in theory not matter because the pieces alternate\n\t\tother_player = 1 - current_player \n\t\tif self.board.repr[middle_pos[0]][middle_pos[1]] != self.player_symbol[other_player]:\n\t\t\treturn False \n\t\treturn True", "def checkValidOneMove(ndSoln):\n for i in range(len(ndSoln)-1):\n x1 = ndSoln[i][0]\n y1 = ndSoln[i][1]\n x2 = ndSoln[i+1][0]\n y2 = ndSoln[i+1][1]\n #take Euclidean distance between two consecutive moves\n #which should be approx. 1.0 if the move is valid\n if not(math.isclose(hypot(x2-x1,y2-y1),1.0)):\n return False\n return True", "def check_move(board, move):\n\n player, spike_index, fields_to_move = Judge._validate_move(move)\n\n # 1. moving out of the bar\n # 2. check if the source is of the valid player\n # 3. check if the destination is valid\n\n board.set_player_perspective(player)\n\n # 1.\n if spike_index == OUT_OF_BAR_SPECIAL_MOVE:\n if board.bar[player] < 1:\n return False\n\n if not board.valid_dest(fields_to_move - 1):\n return False\n\n return True\n\n # 2.\n if not board.valid_source(spike_index):\n return False\n # 3.\n dest_spike_index = spike_index + fields_to_move\n\n if dest_spike_index >= len(INITIAL_SPIKES_STATE):\n return board.all_at_home()\n \n return board.valid_dest(dest_spike_index)", "def validBoard():\r\n\r\n\tglobal move1, move2\r\n\r\n\tif move1==move2 or move1-move2==1:\r\n\t\treturn True\r\n\telse:\r\n\t\treturn False", "def is_king_move_valid(self, from_row, from_col, to_row, to_col):\n\n piece = self.board.squares[from_row][from_col]\n piece_color = self.piece_color(piece)\n\n if abs(to_row - from_row) <= 1 and abs(to_col - from_col) <= 1:\n if piece_color == \"white\":\n self.whiteCanCastleKside = False\n self.whiteCanCastleQside = False\n else:\n self.blackCanCastleKside = False\n self.blackCanCastleQside = False\n return True\n\n # TODO Castling implementation\n # if king and rook have not been moved yet this game, and no space between\n # the king and the rook are occupied or threatened, then the king can\n # move 2 spaces towards the rook, and the rook will be placed adjacent to the\n # king on the side closer to the center column.\n\n # TODO need function which returns squares being threatened which takes a piece position and board as a param\n\n if (piece_color == \"white\"):\n if self.whiteCanCastleKside and (from_row == 7 and from_col == 4) and (to_row == from_row) and (to_col == 6):\n # White kingside Castle\n if (self.board.squares[7][5] == None and self.board.squares[7][6] == None):\n if not self.testing:\n self.whiteCanCastleKside = False\n self.whiteCanCastleQside = False\n self.board.move_piece(7, 7, 7, 5)\n return True\n\n if self.whiteCanCastleQside and (from_row == 7 and from_col == 4) and (to_row == from_row) and (to_col == 2):\n # White queenside Castle\n if (self.board.squares[7][3] == None and self.board.squares[7][2] == None and self.board.squares[7][1] == None):\n\n if not self.testing:\n self.whiteCanCastleKside = False\n self.whiteCanCastleQside = False\n self.board.move_piece(7, 0, 7, 3)\n return True\n\n elif piece_color == \"black\":\n if self.blackCanCastleKside and (from_row == 0 and from_col == 4) and (to_row == from_row) and (to_col == 6):\n # black kingside Castle\n if (self.board.squares[0][5] == None and self.board.squares[0][6] == None):\n if not self.testing:\n self.blackCanCastleKside = False\n self.blackCanCastleQside = False\n self.board.move_piece(0, 7, 0, 5)\n return True\n\n if self.blackCanCastleQside and (from_row == 0 and from_col == 4) and (to_row == from_row) and (to_col == 2):\n # black queenside Castle\n if (self.board.squares[0][3] == None and self.board.squares[0][2] == None and self.board.squares[0][1] == None):\n if not self.testing:\n self.blackCanCastleKside = False\n self.blackCanCastleQside = False\n self.board.move_piece(0, 0, 0, 3)\n return True\n\n return False", "def is_valid(self, move):\r\n return move > 10 and move < 89", "def validate_move(self, move_from, move_to, board):\n\n from_coordinates = JanggiGame.translate_to_grid(move_from)\n to_coordinates = JanggiGame.translate_to_grid(move_to)\n from_col = from_coordinates[0]\n from_row = from_coordinates[1]\n to_col = to_coordinates[0]\n to_row = to_coordinates[1]\n\n # if destination within the board\n if (to_col in range(9) and to_row in range(10) and\n # and the move is 1 up/down/left/right (with no other piece here) and then 1 farther out diagonally\n ((to_row - from_row == -2 and abs(to_col - from_col) == 1 and board[from_col][from_row - 1] == '') or\n (to_row - from_row == 2 and abs(to_col - from_col) == 1 and board[from_col][from_row + 1] == '') or\n (to_col - from_col == -2 and abs(to_row - from_row) == 1 and board[from_col - 1][from_row] == '') or\n (to_col - from_col == 2 and abs(to_row - from_row) == 1 and board[from_col + 1][from_row] == '')\n )\n ):\n return True\n else:\n return False", "def make_move(self, selected_piece_coords, destination_coords, player):\n\n # Verification player number:\"\n if player < 0 or player > 3:\n return False\n\n board_copy = self.current_board.clone()\n\n # move_piece handled move verification and validation.\n board_copy.move_piece(selected_piece_coords, destination_coords)\n\n next_moves = self.current_board.get_possible_next_moves(selected_piece_coords)\n\n if not board_copy in next_moves:\n return False\n\n self.prev_boards.append(self.current_board)\n self.current_board = board_copy\n self.winner = self.current_board.check_for_game_won()\n\n # notify\n\n if player == 1 or player == 3:\n self.player = 2\n if player == 2:\n self.player = 1\n return True", "def validate_move(self, move_from, move_to, board):\n\n from_coordinates = JanggiGame.translate_to_grid(move_from)\n to_coordinates = JanggiGame.translate_to_grid(move_to)\n from_col = from_coordinates[0]\n from_row = from_coordinates[1]\n to_col = to_coordinates[0]\n to_row = to_coordinates[1]\n\n # if destination within the board\n if to_col in range(9) and to_row in range(10):\n # if destination is 1 up and diagonally to the left\n if to_col - from_col == -2 and to_row - from_row == -3 and board[from_col][from_row - 1] == '' and board[from_col - 1][from_row - 2] == '':\n return True\n # if destination is 1 up and diagonally to the right\n if to_col - from_col == 2 and to_row - from_row == -3 and board[from_col][from_row - 1] == '' and board[from_col + 1][from_row - 2] == '':\n return True\n # if destination is 1 down and diagonally to the left\n if to_col - from_col == -2 and to_row - from_row == 3 and board[from_col][from_row + 1] == '' and board[from_col - 1][from_row + 2] == '':\n return True\n # if destination is 1 down and diagonally to the right\n if to_col - from_col == 2 and to_row - from_row == 3 and board[from_col][from_row + 1] == '' and board[from_col + 1][from_row + 2] == '':\n return True\n # if destination is 1 left and diagonally up\n if to_col - from_col == -3 and to_row - from_row == -2 and board[from_col - 1][from_row] == '' and board[from_col - 2][from_row - 1] == '':\n return True\n # if destination is 1 left and diagonally down\n if to_col - from_col == -3 and to_row - from_row == 2 and board[from_col - 1][from_row] == '' and board[from_col - 2][from_row + 1] == '':\n return True\n # if destination is 1 right and diagonally up\n if to_col - from_col == 3 and to_row - from_row == -2 and board[from_col + 1][from_row] == '' and board[from_col + 2][from_row - 1] == '':\n return True\n # if destination is 1 right and diagonally down\n if to_col - from_col == 3 and to_row - from_row == 2 and board[from_col + 1][from_row] == '' and board[from_col + 2][from_row + 1] == '':\n return True\n return False", "def move(self, start, distance, p1):\n if (p1):\n #move your piece\n self.p1vec[start] -= 1\n dest = start + distance\n if (dest == 25):\n self.bornoff = True\n else:\n self.p1vec[start+distance] += 1\n #capture your opponent, despite their number\n spot = 25 - start - distance\n self.p2vec[0] += self.p2vec[spot]\n self.p2vec[spot] = 0\n else:\n #move your piece\n self.p2vec[start] -= 1\n dest = start + distance\n if (dest == 25):\n pass\n else:\n self.p2vec[start+distance] += 1\n #capture your opponent\n spot = 25 - start - distance\n self.p2vec[0] += self.p2vec[spot]\n self.p2vec[spot] = 0", "def _is_valid_move(self, vector, current_piece, other_piece):\n return True", "def factible_route_insertion(customer, position, route, customers):\n r = copy.deepcopy(route)\n r.insert(position, [customer], customers)\n return not r.violate_windows(customers)", "async def run_capcacity_control(self):\n # remove all customers which will order nothing, and any sit_together customers\n\n # remove members from end of end of line( waiting the least amount of time ) \n\n try:\n capacity = self.restaurant.line.line_number\n number_to_remove = int(capacity * .25)\n self.log.warning(f\"{self} capacity control started to remove {number_to_remove} customers\")\n queue = self.waiting_list['queue'].copy()\n for _ in range(number_to_remove):\n if len(self.waiting_list['none']) > 0:\n for customer in self.waiting_list['none']:\n await self.remove_customer_from_line(customer)\n number_to_remove-=1\n queue = self.waiting_list['queue'].copy()\n if number_to_remove == 0:\n break\n try:\n customer = queue.pop()\n await self.remove_customer_from_line(customer)\n except IndexError:\n break\n\n self.log.warning(f\"{self} capacity control completed\")\n except Exception as e:\n self.log.warning(f\"{self} error during capacity control\")", "def op_move_preconditions(self):\n\n if(self.next_move != self.FREE):\n return False\n\n return True", "def check_limit(self):\n self.ensure_one()\n partner = self.partner_id\n moveline_obj = self.env['account.move.line']\n movelines = moveline_obj.\\\n search([('partner_id', '=', partner.id),\n ('account_id.user_type_id.type', 'in',\n ['receivable', 'payable']),\n ('full_reconcile_id', '=', False)])\n\n debit, credit = 0.0, 0.0\n today_dt = datetime.strftime(datetime.now().date(), DF)\n for line in movelines:\n if line.date_maturity < today_dt:\n credit += line.debit\n debit += line.credit\n\n if (credit - debit + self.amount_total) > partner.credit_limit:\n # Consider partners who are under a company.\n if partner.over_credit or (partner.parent_id and partner.parent_id.over_credit):\n partner.write({\n 'credit_limit': credit - debit + self.amount_total})\n return True\n else:\n msg = '%s Can not confirm Sale Order,Total mature due Amount ' \\\n '%s as on %s !\\nCheck Partner Accounts or Credit ' \\\n 'Limits !' % (partner.over_credit,credit - debit, today_dt)\n raise UserError(_('Credit Over Limits !\\n' + msg))\n else:\n return True" ]
[ "0.63458234", "0.6205083", "0.5944662", "0.5919286", "0.58585805", "0.5844579", "0.5750275", "0.5711178", "0.56468445", "0.56443506", "0.56095916", "0.5584599", "0.5448591", "0.5444054", "0.5442395", "0.54375905", "0.54311484", "0.5415993", "0.53907067", "0.53784865", "0.5372915", "0.5338633", "0.53384274", "0.53254664", "0.5319461", "0.5318435", "0.53175664", "0.5313669", "0.5312798", "0.5304459" ]
0.6756745
0
Try to move 3 consecutive customers to anywhere they can be put, see if they move can cut the total cost.
def shift_3_cust(self, sol_in6, cust, c_loc, curr_temp, sol_type6, sa_lns): route_ing = copy.deepcopy(sol_in6[c_loc[0]]) route_new = route_ing move_to_route = c_loc[0] orgn_type1 = sol_type6[c_loc[0]] cust_folw1 = route_ing[c_loc[1] + 1] cust_folw2 = route_ing[c_loc[1] + 2] origin_cost1 = check_violation(route_ing, orgn_type1)[1] route_ing.remove(cust) # remove c in the current route del route_ing[c_loc[1]] # remove customer following c del route_ing[c_loc[1]] # remove customer following following c new_type1 = route_type(route_ing) adjust_cost1 = check_violation(route_ing, new_type1)[1] best_cut_cost0 = -1000 best_cut_cost = best_cut_cost0 # best cost cut of moving this customer for j, rou in enumerate(sol_in6): orgn_type2 = sol_type6[j] origin_cost2 = check_violation(rou, orgn_type2)[1] if j == c_loc[0]: # moving in the same route for k in range(1, len(route_ing)): if k == c_loc[1]: continue rou_test = route_ing[:k] + [cust, cust_folw1, cust_folw2] + route_ing[k:] if check_violation(rou_test, orgn_type2)[0]: adjust_cost2 = check_violation(rou_test, orgn_type2)[1] cost_cut_test = origin_cost1 - adjust_cost2 if cost_cut_test > best_cut_cost: best_cut_cost = cost_cut_test route_new = rou_test move_to_route = j else: # moving to a different route for k in range(1, len(rou)): rou_test = rou[:k] + [cust, cust_folw1, cust_folw2] + rou[k:] if check_violation(rou_test, 5)[0]: new_type2 = route_type(rou_test) adjust_cost2 = check_violation(rou_test, new_type2)[1] cost_cut_test = origin_cost1 + origin_cost2 - adjust_cost1 - adjust_cost2 if cost_cut_test > best_cut_cost: best_cut_cost = cost_cut_test route_new = rou_test move_to_route = j if best_cut_cost > 1e-5: # print('shift3 good', best_cut_cost) sol_in6[move_to_route] = route_new sol_type6[move_to_route] = route_type(route_new) if move_to_route != c_loc[0]: # moving to a different route sol_in6[c_loc[0]] = route_ing sol_type6[c_loc[0]] = route_type(route_ing) elif sa_lns and best_cut_cost < -1e-5: prb = random.uniform(0, 1) if np.exp(best_cut_cost / curr_temp) > prb: # print('shift3', best_cut_cost) sol_in6[move_to_route] = route_new sol_type6[move_to_route] = route_type(route_new) if move_to_route != c_loc[0]: # moving to a different route sol_in6[c_loc[0]] = route_ing sol_type6[c_loc[0]] = route_type(route_ing)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def movable_intraroute_customers(route, customers):\n mcust = []\n for c in range(route.ncustomers):\n if len(factible_route_positions(route.customers[c+1],\n route,customers)) > 1:\n mcust.append(c)\n return mcust", "def exchange_1_cust(self, sol_in3, cust, c_loc, curr_temp, sol_type3, sa_lns):\r\n\r\n route_ing = copy.deepcopy(sol_in3[c_loc[0]])\r\n\r\n route_new_1 = route_ing\r\n route_new_2 = route_ing\r\n exch_to_route = c_loc[0]\r\n orgn_type1 = sol_type3[exch_to_route]\r\n origin_cost1 = check_violation(route_ing, orgn_type1)[1]\r\n # route_ing.remove(cust) # move c in the current route\r\n # adjust_cost1 = check_violation(route_ing)[1]\r\n best_cut_cost0 = -1000\r\n best_cut_cost = best_cut_cost0 # best cost cut of moving this customer\r\n for j, rou in enumerate(sol_in3):\r\n orgn_type2 = sol_type3[j]\r\n origin_cost2 = check_violation(rou, orgn_type2)[1]\r\n if j == c_loc[0]: # exchange in the same route\r\n for k in range(1, len(rou)-1):\r\n if k == c_loc[1]:\r\n continue\r\n rou_test = copy.deepcopy(sol_in3[c_loc[0]])\r\n rou_test[k], rou_test[c_loc[1]] = rou_test[c_loc[1]], rou_test[k]\r\n if check_violation(rou_test, orgn_type2)[0]:\r\n adjust_cost2 = check_violation(rou_test, orgn_type2)[1]\r\n cost_cut_test = origin_cost1 - adjust_cost2\r\n if cost_cut_test > best_cut_cost:\r\n best_cut_cost = cost_cut_test\r\n route_new_1 = rou_test\r\n route_new_2 = rou_test\r\n exch_to_route = j\r\n\r\n else: # exchange to a different route\r\n for k in range(1, len(rou)-1):\r\n rou_test_1 = copy.deepcopy(sol_in3[c_loc[0]])\r\n rou_test_2 = copy.deepcopy(rou)\r\n rou_test_1[c_loc[1]] = rou[k]\r\n rou_test_2[k] = cust\r\n if check_violation(rou_test_1, 5)[0] and check_violation(rou_test_2, 5)[0]:\r\n new_type1 = route_type(rou_test_1)\r\n new_type2 = route_type(rou_test_2)\r\n adjust_cost1 = check_violation(rou_test_1, new_type1)[1]\r\n adjust_cost2 = check_violation(rou_test_2, new_type2)[1]\r\n cost_cut_test = origin_cost1 + origin_cost2 - adjust_cost1 - adjust_cost2\r\n if cost_cut_test > best_cut_cost:\r\n best_cut_cost = cost_cut_test\r\n route_new_1 = rou_test_1\r\n route_new_2 = rou_test_2\r\n exch_to_route = j\r\n\r\n\r\n\r\n if best_cut_cost > 1e-5:\r\n # print('exchange1 good', best_cut_cost)\r\n sol_in3[c_loc[0]] = route_new_1\r\n sol_in3[exch_to_route] = route_new_2\r\n sol_type3[c_loc[0]] = route_type(route_new_1)\r\n sol_type3[exch_to_route] = route_type(route_new_2)\r\n\r\n elif sa_lns and best_cut_cost < -1e-5:\r\n prb = random.uniform(0, 1)\r\n if np.exp(best_cut_cost / curr_temp) > prb:\r\n # print('exchange1', best_cut_cost)\r\n sol_in3[c_loc[0]] = route_new_1\r\n sol_in3[exch_to_route] = route_new_2\r\n sol_type3[c_loc[0]] = route_type(route_new_1)\r\n sol_type3[exch_to_route] = route_type(route_new_2)\r\n\r\n # return sol_in3\r", "def shift_1_cust(self, sol_in1, cust, c_loc, curr_temp, sol_type1, sa_lns):\r\n\r\n route_ing = copy.deepcopy(sol_in1[c_loc[0]])\r\n route_new = route_ing\r\n move_to_route = c_loc[0]\r\n orgn_type1 = sol_type1[c_loc[0]]\r\n origin_cost1 = check_violation(route_ing, orgn_type1)[1]\r\n route_ing.remove(cust) # move c in the current route\r\n new_type1 = route_type(route_ing)\r\n adjust_cost1 = check_violation(route_ing, new_type1)[1]\r\n best_cut_cost0 = -1000\r\n best_cut_cost = best_cut_cost0 # best cost cut of moving this customer\r\n for j, rou in enumerate(sol_in1):\r\n orgn_type2 = sol_type1[j]\r\n origin_cost2 = check_violation(rou, orgn_type2)[1]\r\n if j == c_loc[0]: # moving in the same route\r\n for k in range(1, len(route_ing)):\r\n if k == c_loc[1]:\r\n continue # do not put it at the original position\r\n rou_test = route_ing[:k] + [cust] + route_ing[k:]\r\n if check_violation(rou_test, orgn_type2)[0]:\r\n adjust_cost2 = check_violation(rou_test, orgn_type2)[1]\r\n cost_cut_test = origin_cost1 - adjust_cost2\r\n if cost_cut_test > best_cut_cost:\r\n best_cut_cost = cost_cut_test\r\n route_new = rou_test\r\n move_to_route = j\r\n\r\n\r\n else: # moving to a different route\r\n for k in range(1, len(rou)):\r\n rou_test = rou[:k] + [cust] + rou[k:]\r\n\r\n if check_violation(rou_test, 5)[0]:\r\n new_type2 = route_type(rou_test)\r\n adjust_cost2 = check_violation(rou_test, new_type2)[1]\r\n cost_cut_test = origin_cost1 + origin_cost2 - adjust_cost1 - adjust_cost2\r\n if cost_cut_test > best_cut_cost:\r\n best_cut_cost = cost_cut_test\r\n route_new = rou_test\r\n move_to_route = j\r\n\r\n\r\n if best_cut_cost > 1e-5:\r\n # print('shift1 good', best_cut_cost)\r\n sol_in1[move_to_route] = route_new\r\n sol_type1[move_to_route] = route_type(route_new)\r\n if move_to_route != c_loc[0]: # moving to a different route\r\n sol_in1[c_loc[0]] = route_ing\r\n sol_type1[c_loc[0]] = route_type(route_ing)\r\n elif sa_lns and best_cut_cost < -1e-5:\r\n prb = random.uniform(0, 1)\r\n if np.exp(best_cut_cost/curr_temp) > prb:\r\n # print('shift1', best_cut_cost)\r\n sol_in1[move_to_route] = route_new\r\n sol_type1[move_to_route] = route_type(route_new)\r\n if move_to_route != c_loc[0]: # moving to a different route\r\n sol_in1[c_loc[0]] = route_ing\r\n sol_type1[c_loc[0]] = route_type(route_ing)\r\n\r\n\r\n\r\n # return sol_in1\r", "def shift_2_cust(self, sol_in2, cust, c_loc, curr_temp, sol_type2, sa_lns):\r\n\r\n route_ing = copy.deepcopy(sol_in2[c_loc[0]])\r\n route_new = route_ing\r\n move_to_route = c_loc[0]\r\n orgn_type1 = sol_type2[c_loc[0]]\r\n cust_folw = route_ing[c_loc[1]+1]\r\n origin_cost1 = check_violation(route_ing, orgn_type1)[1]\r\n route_ing.remove(cust) # remove c in the current route\r\n del route_ing[c_loc[1]] # remove customer following c\r\n new_type1 = route_type(route_ing)\r\n adjust_cost1 = check_violation(route_ing, new_type1)[1]\r\n best_cut_cost0 = -1000\r\n best_cut_cost = best_cut_cost0 # best cost cut of moving this customer\r\n for j, rou in enumerate(sol_in2):\r\n orgn_type2 = sol_type2[j]\r\n origin_cost2 = check_violation(rou, orgn_type2)[1]\r\n if j == c_loc[0]: # moving in the same route\r\n for k in range(1, len(route_ing)):\r\n if k == c_loc[1]:\r\n continue\r\n rou_test = route_ing[:k] + [cust, cust_folw] + route_ing[k:]\r\n if check_violation(rou_test, orgn_type2)[0]:\r\n adjust_cost2 = check_violation(rou_test, orgn_type2)[1]\r\n cost_cut_test = origin_cost1 - adjust_cost2\r\n if cost_cut_test > best_cut_cost:\r\n best_cut_cost = cost_cut_test\r\n route_new = rou_test\r\n move_to_route = j\r\n\r\n\r\n else: # moving to a different route\r\n for k in range(1, len(rou)):\r\n rou_test = rou[:k] + [cust, cust_folw] + rou[k:]\r\n if check_violation(rou_test, 5)[0]:\r\n new_type2 = route_type(rou_test)\r\n adjust_cost2 = check_violation(rou_test, new_type2)[1]\r\n cost_cut_test = origin_cost1 + origin_cost2 - adjust_cost1 - adjust_cost2\r\n if cost_cut_test > best_cut_cost:\r\n best_cut_cost = cost_cut_test\r\n route_new = rou_test\r\n move_to_route = j\r\n\r\n\r\n if best_cut_cost > 1e-5:\r\n # print('shift2 good', best_cut_cost)\r\n sol_in2[move_to_route] = route_new\r\n sol_type2[move_to_route] = route_type(route_new)\r\n if move_to_route != c_loc[0]: # moving to a different route\r\n sol_in2[c_loc[0]] = route_ing\r\n sol_type2[c_loc[0]] = route_type(route_ing)\r\n\r\n elif sa_lns and best_cut_cost < -1e-5:\r\n prb = random.uniform(0, 1)\r\n if np.exp(best_cut_cost / curr_temp) > prb:\r\n # print('shift2', best_cut_cost)\r\n sol_in2[move_to_route] = route_new\r\n sol_type2[move_to_route] = route_type(route_new)\r\n if move_to_route != c_loc[0]: # moving to a different route\r\n sol_in2[c_loc[0]] = route_ing\r\n sol_type2[c_loc[0]] = route_type(route_ing)\r\n\r\n # return sol_in2\r", "def exchange_2_cust(self, sol_in4, cust, c_loc, curr_temp, sol_type4, sa_lns):\r\n\r\n route_ing = copy.deepcopy(sol_in4[c_loc[0]])\r\n route_new_1 = route_ing\r\n route_new_2 = route_ing\r\n cust_folw = route_ing[c_loc[1] + 1]\r\n exch_to_route = c_loc[0]\r\n origin_cost1 = check_violation(route_ing, sol_type4[c_loc[0]])[1]\r\n # route_ing.remove(cust) # move c in the current route\r\n # adjust_cost1 = check_violation(route_ing)[1]\r\n best_cut_cost0 = -1000\r\n best_cut_cost = best_cut_cost0 # best cost cut of moving this customer\r\n for j, rou in enumerate(sol_in4):\r\n origin_cost2 = check_violation(rou, sol_type4[j])[1]\r\n if j != c_loc[0] and len(rou) >= 4: # exchange to a different route\r\n for k in range(1, len(rou) - 2):\r\n rou_test_1 = copy.deepcopy(sol_in4[c_loc[0]])\r\n rou_test_2 = copy.deepcopy(rou)\r\n rou_test_1[c_loc[1]], rou_test_1[c_loc[1] + 1] = rou[k], rou[k + 1]\r\n rou_test_2[k], rou_test_2[k + 1] = cust, cust_folw\r\n if check_violation(rou_test_1, 5)[0] and check_violation(rou_test_2, 5)[0]:\r\n new_type1 = route_type(rou_test_1)\r\n new_type2 = route_type(rou_test_2)\r\n adjust_cost1 = check_violation(rou_test_1, new_type1)[1]\r\n adjust_cost2 = check_violation(rou_test_2, new_type2)[1]\r\n cost_cut_test = origin_cost1 + origin_cost2 - adjust_cost1 - adjust_cost2\r\n if cost_cut_test > best_cut_cost:\r\n best_cut_cost = cost_cut_test\r\n route_new_1 = rou_test_1\r\n route_new_2 = rou_test_2\r\n exch_to_route = j\r\n\r\n\r\n\r\n if best_cut_cost > 1e-5:\r\n # print('exchange2 good', best_cut_cost)\r\n sol_in4[c_loc[0]] = route_new_1\r\n sol_in4[exch_to_route] = route_new_2\r\n sol_type4[c_loc[0]] = route_type(route_new_1)\r\n sol_type4[exch_to_route] = route_type(route_new_2)\r\n\r\n elif sa_lns and best_cut_cost < -1e-5:\r\n prb = random.uniform(0, 1)\r\n if np.exp(best_cut_cost / curr_temp) > prb:\r\n # print('exchange2', best_cut_cost)\r\n sol_in4[c_loc[0]] = route_new_1\r\n sol_in4[exch_to_route] = route_new_2\r\n sol_type4[c_loc[0]] = route_type(route_new_1)\r\n sol_type4[exch_to_route] = route_type(route_new_2)\r\n\r\n # return sol_in4\r", "def test_move_onto_terrain(self):\n # move onto Water (1 extra)\n b1 = board.Board(self.small_ter)\n start = np.array((0, 3), dtype='int')\n k1 = knight.Knight(b1, start)\n # set move choice\n move_choice = 2\n # determine move validity and cost\n (cost, isvalid) = k1.validate_move(move_choice)\n self.assertTrue(isvalid)\n self.assertEqual(cost, 2)\n\n # move onto Lava (4 extra)\n start = np.array((3, 4), dtype='int')\n k1 = knight.Knight(b1, start)\n # set move choice\n move_choice = 0\n # determine move validity and cost\n (cost, isvalid) = k1.validate_move(move_choice)\n self.assertTrue(isvalid)\n self.assertEqual(cost, 5)\n\n # move onto Barrier (illegal)\n start = np.array((1, 4), dtype='int')\n k1 = knight.Knight(b1, start)\n # set move choice\n move_choice = 1\n # determine move validity and cost\n (cost, isvalid) = k1.validate_move(move_choice)\n self.assertFalse(isvalid)\n\n # move onto Rock (illegal)\n start = np.array((1, 0), dtype='int')\n k1 = knight.Knight(b1, start)\n # set move choice\n move_choice = 7\n # determine move validity and cost\n (cost, isvalid) = k1.validate_move(move_choice)\n self.assertFalse(isvalid)", "def remove_existing_customers(self):\n # remove the customers which are not active (.is_active )\n self.to_move = False\n #for cust in self.customers:\n # print(cust.state)\n self.customers = [cust for cust in self.customers if cust.state != 'checkout']\n #if cust.to_move():\n # self.to_move = True", "def move_length_3(self, move, new_state):\n # First consider the move on 6 corners\n for i in [[\"A\", 1, 2, 2, 5, 0, 3, 7, 11, 11],\n [\"B\", 0, 2, 4, 8, 8, 3, 6, 9, 1],\n [\"F\", 9, 9, 2, 0, 0, 6, 7, 8, 6],\n [\"J\", 5, 9, 10, 11, 7, 6, 3, 1, 1],\n [\"I\", 11, 5, 4, 1, 8, 5, 6, 7, 6],\n [\"L\", 8, 5, 9, 10, 7, 3, 0, 7, 11]]:\n if move == i[0]:\n if new_state.letters[i[1]].isalpha():\n new_state.claim[i[2]] \\\n = new_state.get_current_player_name()[1]\n if new_state.get_current_player_name()[1] \\\n == new_state.letters[i[3]] \\\n or new_state.get_current_player_name()[1] \\\n == new_state.letters[i[4]]:\n new_state.claim[i[5]] \\\n = new_state.get_current_player_name()[1]\n if (new_state.get_current_player_name()[1]\n == new_state.letters[i[6]]\n or new_state.get_current_player_name()[1]\n == new_state.letters[i[7]]\n or new_state.get_current_player_name()[1]\n == new_state.letters[i[8]]) \\\n and new_state.claim[i[9]] == \"@\":\n new_state.claim[i[9]] \\\n = new_state.get_current_player_name()[1]\n\n # then consider the move on the middle of each side\n for i in [[\"C\", 0, 5, 0, 3, 4, 4, 6, 10, 10],\n [\"E\", 1, 8, 8, 2, 3, 4, 7, 10, 3],\n [\"K\", 9, 11, 7, 2, 6, 10, 7, 4, 3]]:\n if move == i[0]:\n if (new_state.get_current_player_name()[1]\n == new_state.letters[i[1]]\n or new_state.get_current_player_name()[1]\n == new_state.letters[i[2]]) \\\n and new_state.claim[i[3]] == \"@\":\n new_state.claim[i[3]] \\\n = new_state.get_current_player_name()[1]\n if (new_state.get_current_player_name()[1]\n == new_state.letters[i[4]]\n or new_state.get_current_player_name()[1]\n == new_state.letters[i[5]]) \\\n and new_state.claim[i[6]] == \"@\":\n new_state.claim[i[6]] \\\n = new_state.get_current_player_name()[1]\n\n if (new_state.get_current_player_name()[1]\n == new_state.letters[i[7]]\n or new_state.get_current_player_name()[1]\n == new_state.letters[i[8]]) \\\n and new_state.claim[i[9]] == \"@\":\n new_state.claim[i[9]] \\\n = new_state.get_current_player_name()[1]\n # Finally consider the internal move\n internal = [[\"D\", 2, 4, 4, 1, 6, 9, 1, 0, 7, 11, 11],\n [\"G\", 2, 10, 10, 9, 3, 1, 1, 5, 7, 8, 6],\n [\"H\", 4, 10, 3, 0, 3, 11, 11, 5, 6, 8, 6]]\n for i in internal:\n if move == i[0]:\n if (new_state.get_current_player_name()[1]\n == new_state.letters[i[1]]\n or new_state.get_current_player_name()[1]\n == new_state.letters[i[2]]) \\\n and new_state.claim[i[3]] == \"@\":\n new_state.claim[i[3]] \\\n = new_state.get_current_player_name()[1]\n if (new_state.get_current_player_name()[1]\n == new_state.letters[i[4]]\n or new_state.get_current_player_name()[1]\n == new_state.letters[i[5]]\n or new_state.get_current_player_name()[1]\n == new_state.letters[i[6]]) \\\n and new_state.claim[i[7]] == \"@\":\n new_state.claim[i[7]] \\\n = new_state.get_current_player_name()[1]\n\n if (new_state.get_current_player_name()[1]\n == new_state.letters[i[8]]\n or new_state.get_current_player_name()[1]\n == new_state.letters[i[9]]\n or new_state.get_current_player_name()[1]\n == new_state.letters[i[10]]) \\\n and new_state.claim[i[11]] == \"@\":\n new_state.claim[i[11]] \\\n = new_state.get_current_player_name()[1]\n\n new_state.letters = [self.get_current_player_name()[1]\n if i == move else i for i in self.letters]\n\n return StonehengeState(not self.p1_turn, new_state.length,\n new_state.letters, new_state.claim)", "def solve(customerCount, vehicleCount, vehicleCapacity, depotIndex, customers):\n \n N, locations, locations_r, distances, closest = precalculate(customers)\n \n #print locations\n #print locations_r\n angle_order = range(1, N)\n angle_order.sort(key=lambda i: (locations_r[i, 1], locations_r[i, 0])) \n \n vehicleTours = best_order(customerCount, customers, vehicleCount, vehicleCapacity, angle_order)\n if not vehicleTours:\n vehicleTours = solve0(customerCount, vehicleCount, vehicleCapacity, depotIndex, customers)\n check(customerCount, customers, vehicleCapacity, vehicleTours)\n vehicleTours = get_shortest_paths('file_path XXX', customers, depotIndex, vehicleTours)\n check(customerCount, customers, vehicleCapacity, vehicleTours)\n \n vehicleTours0 = copy.deepcopy(vehicleTours)\n dist0 = total_dist(customers, depotIndex, vehicleTours)\n if False:\n for _ in range(100):\n vehicleTours = copy.deepcopy(vehicleTours0) \n adjust_tours(customers, vehicleCapacity, vehicleCount, vehicleTours)\n vehicleTours = get_shortest_paths('file_path XXX', customers, depotIndex, vehicleTours)\n #check(customerCount, customers, vehicleCapacity, vehicleTours)\n if not is_valid(customerCount, customers, vehicleCapacity, vehicleTours):\n continue\n dist = total_dist(customers, depotIndex, vehicleTours)\n if dist < dist0:\n print '%s => %s' % (dist0, dist)\n vehicleTours0 = vehicleTours[:]\n dist0 = dist\n \n \n vehicleTours = copy.deepcopy(vehicleTours0) \n check(customerCount, customers, vehicleCapacity, vehicleTours)\n while len(vehicleTours) < vehicleCount:\n vehicleTours.append([])\n \n print '*', vehicleTours \n \n return vehicleTours", "def factible_route_insertion(customer, position, route, customers):\n r = copy.deepcopy(route)\n r.insert(position, [customer], customers)\n return not r.violate_windows(customers)", "def valid_moves(self):\n valid = set()\n\n # If the center is filled, so unlimited movement is allowed\n if self._unlimited is True:\n\n # For each value of filled, add that value to the center until the value is out of bounds\n # to acquire each movement point that can result\n for pos in self._filled:\n loc = self._center\n while 0 < loc[0] < 20 and 0 < loc[1] < 20:\n loc = (loc[0] + pos[0], loc[1] + pos[1])\n valid.add(loc)\n\n else:\n # If the movement is limited, only allow movement up to 3 spaces\n loc = self._center\n for pos in self._filled:\n if 0 < loc[0] + pos[0] < 20 and 0 < loc[1] + pos[1] < 20:\n valid.add((loc[0] + pos[0], loc[1] + pos[1]))\n if 0 < loc[0] + 2 * pos[0] < 20 and 0 < loc[1] + 2 * pos[1] < 20:\n valid.add((loc[0] + 2 * pos[0], loc[1] + 2 * pos[1]))\n if 0 < loc[0] + 3 * pos[0] < 20 and 0 < loc[1] + 3 * pos[1] < 20:\n valid.add((loc[0] + 3 * pos[0], loc[1] + 3 * pos[1]))\n\n return valid", "def checkPossibleMoves():\n for row in range(9):\n for column in range(7):\n if board[row][column] == board[row][column+1]: #A\n a = board[row][column]\n if column != 6: #column +3 would lead to an error\n if a == board[row+1][column+2] or a == board[row][column+3] or a == board[row-1][column+2] or a == board[row-1][column-1] or a == board[row][column-2] or a ==board[row+1][column-1]:\n return False\n else: \n if a == board[row+1][column+2] or a == board[row-1][column+2] or a == board[row-1][column-1] or a == board[row][column-2] or a ==board[row+1][column-1]:\n return False\n if board[row][column] == board[row][column+2]: # B\n if board[row][column] == board[row+1][column+1] or board[row][column] == board[row-1][column+1]:\n return False\n\n if board[row][column] == board[row+1][column]: #C\n a = board[row][column]\n if row != 8: #row +3 would lead to an error\n if a == board[row-1][column+1] or a == board[row-2][column] or a == board[row-1][column-1] or a == board[row+2][column-1] or a == board[row+3][column] or a == board[row+2][column+1]:\n return False\n else:\n if a == board[row-1][column+1] or a == board[row-2][column] or a == board[row-1][column-1] or a == board[row+2][column-1] or a == board[row+2][column+1]:\n return False\n\n if board[row][column] == board[row+2][column]: #D\n if board[row][column] == board[row+1][column-1] or board[row][column] == board[row+1][column+1]:\n return False\n return True", "def test_move_over_terrain(self):\n # move over Water (0 extra)\n b1 = board.Board(self.small_ter)\n start = np.array((0, 1), dtype='int')\n k1 = knight.Knight(b1, start)\n # set move choice\n move_choice = 1\n # determine move validity and cost\n (cost, isvalid) = k1.validate_move(move_choice)\n self.assertTrue(isvalid)\n self.assertEqual(cost, 1)\n #\n # move over Lava (0 extra)\n start = np.array((5, 4), dtype='int')\n k1 = knight.Knight(b1, start)\n # set move choice\n move_choice = 6\n # determine move validity and cost\n (cost, isvalid) = k1.validate_move(move_choice)\n self.assertTrue(isvalid)\n self.assertEqual(cost, 1)\n #\n # move over Barrier (illegal)\n start = np.array((2, 3), dtype='int')\n k1 = knight.Knight(b1, start)\n # set move choice\n move_choice = 0\n # determine move validity and cost\n (cost, isvalid) = k1.validate_move(move_choice)\n self.assertFalse(isvalid)\n #\n # move over Rock (0 extra)\n start = np.array((2, 3), dtype='int')\n k1 = knight.Knight(b1, start)\n # set move choice\n move_choice = 2\n # determine move validity and cost\n (cost, isvalid) = k1.validate_move(move_choice)\n self.assertTrue(isvalid)\n self.assertEqual(cost, 1)", "def check_sum_three(agent):\n return sum(agent.received[-3:]) == 3", "async def run_capcacity_control(self):\n # remove all customers which will order nothing, and any sit_together customers\n\n # remove members from end of end of line( waiting the least amount of time ) \n\n try:\n capacity = self.restaurant.line.line_number\n number_to_remove = int(capacity * .25)\n self.log.warning(f\"{self} capacity control started to remove {number_to_remove} customers\")\n queue = self.waiting_list['queue'].copy()\n for _ in range(number_to_remove):\n if len(self.waiting_list['none']) > 0:\n for customer in self.waiting_list['none']:\n await self.remove_customer_from_line(customer)\n number_to_remove-=1\n queue = self.waiting_list['queue'].copy()\n if number_to_remove == 0:\n break\n try:\n customer = queue.pop()\n await self.remove_customer_from_line(customer)\n except IndexError:\n break\n\n self.log.warning(f\"{self} capacity control completed\")\n except Exception as e:\n self.log.warning(f\"{self} error during capacity control\")", "def next_move(ttt):\r\n # get board in 2D array form\r\n b = ttt.get_board()\r\n \r\n # if there's a winning move, take it\r\n (cfw, win_move) = check_for_win_lose(b)\r\n if cfw is not None:\r\n if win_move:\r\n print 'COMPUTER WINS!'\r\n return cfw, win_move\r\n # otherwise, pres on with the next best move\r\n\r\n # get \"points\" on board. this tells us not only the move\r\n # but also who went first\r\n board_count = sum(sum(b,[]))\r\n \r\n # IF COMPUTER HAS FIRST TURN\r\n # if 1st move\r\n if board_count == 0:\r\n return (2,2), False # take the center\r\n # this is not best strategy for winning, but\r\n # it the human messes up, the computer can win.\r\n # taking a corner first makes it a little easier\r\n # for the computer to win becase the human only\r\n # has one correct move to make: to take the center\r\n \r\n # if 3rd move, and not a winning one\r\n if board_count == 3:\r\n if b[0][1]==2 or b[1][0]==2 or b[0][0]==2:\r\n return (3,3), False\r\n elif b[0][2]==2:\r\n return (3,1), False\r\n elif b[2][0]==2:\r\n return (1,3), False\r\n else:#elif b[1][2]==2 or b[2][1]==2 or b[2][2]==2:\r\n return (1,1), False\r\n\r\n # if 5th move, and not a winning or losing one\r\n if board_count == 6:\r\n b5 = numpy.array([[0,2,1],[0,1,0],[2,0,0]])\r\n if (b == b5).all():\r\n return (3,3), False\r\n elif (b == numpy.rot90(b5,1)).all():\r\n return (3,1), False\r\n elif (b == numpy.rot90(b5,2)).all():\r\n return (1,1), False\r\n elif (b == numpy.rot90(b5,3)).all():\r\n return (1,3), False\r\n\r\n b5 = numpy.array([[0,0,1],[0,1,2],[2,0,0]])\r\n if (b == b5).all():\r\n return (1,1), False\r\n elif (b == numpy.rot90(b5,1)).all():\r\n return (1,3), False\r\n elif (b == numpy.rot90(b5,2)).all():\r\n return (3,3), False\r\n elif (b == numpy.rot90(b5,3)).all():\r\n return (3,1), False\r\n\r\n # at this point, all possible boards should have been covered\r\n\r\n # if 7th move, and a winning or losing one\r\n if board_count == 9:\r\n # find the row or col with 2 open slots and mark it\r\n for ri in range(3):\r\n r = b[ri]\r\n if sum([1 if i==0 else 0 for i in r]) == 2:\r\n if r[0] == 0:\r\n return (ri+1,1), False\r\n else:\r\n return (ri+1,2), False\r\n for ci in range(3):\r\n c = get_col(b, ci)\r\n if sum([1 if i==0 else 0 for i in c]) == 2:\r\n if c[0] == 0:\r\n return (1,ci+1), False\r\n else:\r\n return (2,ci+1), False\r\n\r\n \r\n # IF HUMAN HAS FIRST TURN\r\n # if 2nd move\r\n if board_count == 2:\r\n if b[1][1] == 0:\r\n # if the center is open, computer has\r\n # to take it in order to not lose\r\n return (2,2), False\r\n else:\r\n # otherwise take a corner\r\n return (1,1), False\r\n\r\n # if 4th move\r\n if board_count == 5:\r\n # if we took a corner on move 2 and they\r\n # are using computer's offensive strategy\r\n # when it is first player\r\n b4 = [[1,0,0],[0,2,0],[0,0,2]]\r\n if b==b4:\r\n return (3,1), False\r\n # if we took center on move 2\r\n else:\r\n b4 = numpy.array([[2,0,0],[0,1,0],[0,0,2]])\r\n if (b == b4).all() or (b == numpy.rot90(b4,1)).all():\r\n return (1,2), False\r\n\r\n # overall ELSE -- just find a square\r\n for ri in range(3):\r\n for ci in range(3):\r\n if b[ri][ci] == 0:\r\n return (ri+1,ci+1), False", "def intraroute_2opt(route, customers):\n if route.ncustomers < 2: return False\n r = copy.deepcopy(route)\n c1 = random.randint(1,r.ncustomers-1)\n c2 = random.randint(c1+2,r.ncustomers+1)\n #print(c1, c2)\n r.customers[c1:c2] = r.customers[c1:c2][::-1]\n #print(r.customers)\n r.update(customers)\n if r.violate_windows(customers):\n return False\n else:\n route.customers[c1:c2] = r.customers[c1:c2]\n route.update(customers)\n print(\"succeed intraroute 2opt\")\n return True", "def check_for_winner(self, board):\n\n potential_move = (-1, -1)\n\n # Find Potential Three in a Row for Rows\n first_row = [(0, 0), (0, 1), (0, 2)]\n first_row_index = self.can_complete_three_in_row(first_row, board)\n if first_row_index[0] >= 0:\n return first_row[first_row_index[0]]\n elif first_row_index[1] >= 0:\n potential_move = first_row[first_row_index[1]]\n\n second_row = [(1, 0), (1, 1), (1, 2)]\n second_row_index = self.can_complete_three_in_row(second_row, board)\n if second_row_index[0] >= 0:\n return second_row[second_row_index[0]]\n elif second_row_index[1] >= 0:\n potential_move = second_row[second_row_index[1]]\n\n third_row = [(2, 0), (2, 1), (2, 2)]\n third_row_index = self.can_complete_three_in_row(third_row, board)\n if third_row_index[0] >= 0:\n return third_row[third_row_index[0]]\n elif third_row_index[1] >= 0:\n potential_move = third_row[third_row_index[1]]\n\n\n # Find Potential Three in a Row for Columns\n first_column = [(0, 0), (1, 0), (2, 0)]\n first_column_index = self.can_complete_three_in_row(first_column, board)\n if first_column_index[0] >= 0:\n return first_column[first_column_index[0]]\n elif first_column_index[1] >= 0:\n potential_move = first_column[first_column_index[1]]\n\n second_column = [(0, 1), (1, 1), (2, 1)]\n second_column_index = self.can_complete_three_in_row(second_column, board)\n if second_column_index[0] >= 0:\n return second_column[second_column_index[0]]\n elif second_column_index[1] >= 0:\n potential_move = second_column[second_column_index[1]]\n\n third_column = [(0, 2), (1, 2), (2, 2)]\n third_column_index = self.can_complete_three_in_row(third_column, board)\n if third_column_index[0] >= 0:\n return third_column[third_column_index[0]]\n elif third_column_index[1] >= 0:\n potential_move = third_column[third_column_index[1]]\n\n\n # Find Potential Three in a Row for Diagonals\n first_diagonal = [(0, 0), (1, 1), (2, 2)]\n first_diagonal_index = self.can_complete_three_in_row(first_diagonal, board)\n if first_diagonal_index[0] >= 0:\n return first_diagonal[first_diagonal_index[0]]\n elif first_diagonal_index[1] >= 0:\n potential_move = first_diagonal[first_diagonal_index[1]]\n\n second_diagonal = [(2, 0), (1, 1), (0, 2)]\n second_diagonal_index = self.can_complete_three_in_row(second_diagonal, board)\n\n if second_diagonal_index[0] >= 0:\n return second_diagonal[second_diagonal_index[0]]\n elif second_diagonal_index[1] >= 0:\n potential_move = second_diagonal[second_diagonal_index[1]]\n\n return potential_move", "def no_more_move(self):\n if (self.p_no_move + self.c_no_move == 2):\n return True\n return False", "def look_for_2of3(self, board):\n threes = split_board(board)\n best_move_is_in = -1\n best_move = -1\n winning_move = -1\n for elem in threes:\n if self.could_win(elem):\n best_move_is_in = elem\n for i in elem:\n if isinstance(i, int):\n winning_move = i\n best_move = winning_move\n if winning_move == -1:\n for elem in threes:\n if self.should_block(elem):\n best_move_is_in = elem\n for i in elem:\n if isinstance(i, int):\n best_move = i\n return best_move", "def is_valid(self, move):\r\n return move > 10 and move < 89", "def do_hanoi(A, B, C, n):\n # TODO: IMPLEMENT THIS FUNCTION.\n \n if n == 1:\n # TODO: 1. Initial case - only one disk will be moved from A to C.\n pass\n\n\n else:\n # TODO: 2. General case - All disks must be moved from A to C.\n pass", "def is_king_move_valid(self, from_row, from_col, to_row, to_col):\n\n piece = self.board.squares[from_row][from_col]\n piece_color = self.piece_color(piece)\n\n if abs(to_row - from_row) <= 1 and abs(to_col - from_col) <= 1:\n if piece_color == \"white\":\n self.whiteCanCastleKside = False\n self.whiteCanCastleQside = False\n else:\n self.blackCanCastleKside = False\n self.blackCanCastleQside = False\n return True\n\n # TODO Castling implementation\n # if king and rook have not been moved yet this game, and no space between\n # the king and the rook are occupied or threatened, then the king can\n # move 2 spaces towards the rook, and the rook will be placed adjacent to the\n # king on the side closer to the center column.\n\n # TODO need function which returns squares being threatened which takes a piece position and board as a param\n\n if (piece_color == \"white\"):\n if self.whiteCanCastleKside and (from_row == 7 and from_col == 4) and (to_row == from_row) and (to_col == 6):\n # White kingside Castle\n if (self.board.squares[7][5] == None and self.board.squares[7][6] == None):\n if not self.testing:\n self.whiteCanCastleKside = False\n self.whiteCanCastleQside = False\n self.board.move_piece(7, 7, 7, 5)\n return True\n\n if self.whiteCanCastleQside and (from_row == 7 and from_col == 4) and (to_row == from_row) and (to_col == 2):\n # White queenside Castle\n if (self.board.squares[7][3] == None and self.board.squares[7][2] == None and self.board.squares[7][1] == None):\n\n if not self.testing:\n self.whiteCanCastleKside = False\n self.whiteCanCastleQside = False\n self.board.move_piece(7, 0, 7, 3)\n return True\n\n elif piece_color == \"black\":\n if self.blackCanCastleKside and (from_row == 0 and from_col == 4) and (to_row == from_row) and (to_col == 6):\n # black kingside Castle\n if (self.board.squares[0][5] == None and self.board.squares[0][6] == None):\n if not self.testing:\n self.blackCanCastleKside = False\n self.blackCanCastleQside = False\n self.board.move_piece(0, 7, 0, 5)\n return True\n\n if self.blackCanCastleQside and (from_row == 0 and from_col == 4) and (to_row == from_row) and (to_col == 2):\n # black queenside Castle\n if (self.board.squares[0][3] == None and self.board.squares[0][2] == None and self.board.squares[0][1] == None):\n if not self.testing:\n self.blackCanCastleKside = False\n self.blackCanCastleQside = False\n self.board.move_piece(0, 0, 0, 3)\n return True\n\n return False", "def make_move(self, selected_piece_coords, destination_coords, player):\n\n # Verification player number:\"\n if player < 0 or player > 3:\n return False\n\n board_copy = self.current_board.clone()\n\n # move_piece handled move verification and validation.\n board_copy.move_piece(selected_piece_coords, destination_coords)\n\n next_moves = self.current_board.get_possible_next_moves(selected_piece_coords)\n\n if not board_copy in next_moves:\n return False\n\n self.prev_boards.append(self.current_board)\n self.current_board = board_copy\n self.winner = self.current_board.check_for_game_won()\n\n # notify\n\n if player == 1 or player == 3:\n self.player = 2\n if player == 2:\n self.player = 1\n return True", "def naive_partition3(nums: List[int]) -> bool:\n target, remaining = divmod(sum(nums), 3)\n if remaining:\n return False\n\n def sum_subset3(nums: List[int], n: int, a: int, b: int, c: int) -> bool:\n if a == 0 and b == 0 and c == 0:\n return True\n if n < 0:\n return False\n\n used_in_a = used_in_b = used_in_c = False\n\n if a - nums[n] >= 0:\n used_in_a = sum_subset3(nums, n - 1, a - nums[n], b, c)\n\n if not used_in_a and b - nums[n] >= 0:\n used_in_b = sum_subset3(nums, n - 1, a, b - nums[n], c)\n\n if (not used_in_a and not used_in_b) and c - nums[n] >= 0:\n used_in_c = sum_subset3(nums, n - 1, a, b, c - nums[n])\n\n return used_in_a or used_in_b or used_in_c\n\n return sum_subset3(nums, len(nums) - 1, target, target, target)", "def test_move(self):\n # Run a handful of GCMC moves\n n_moves = 10\n std_gcmc_sphere_sampler.move(std_gcmc_sphere_simulation.context, n_moves)\n\n # Check that all of the appropriate variables seem to have been updated\n # Hard to test individual moves as they are rarely accepted - just need to check the overall behaviour\n assert std_gcmc_sphere_sampler.n_moves == n_moves\n assert 0 <= std_gcmc_sphere_sampler.n_accepted <= n_moves\n assert len(std_gcmc_sphere_sampler.Ns) == n_moves\n assert len(std_gcmc_sphere_sampler.acceptance_probabilities) == n_moves\n assert isinstance(std_gcmc_sphere_sampler.energy, Quantity)\n assert std_gcmc_sphere_sampler.energy.unit.is_compatible(kilocalories_per_mole)\n\n return None", "def time_nn(self, on_way_time, curr_cust, remain_list, used_resource, rout_len, vehicle_type):\r\n if vehicle_type == 2:\r\n veh_cap = small_veh\r\n elif vehicle_type == 3:\r\n veh_cap = medium_veh\r\n else:\r\n veh_cap = large_veh\r\n real_wait_time = 0 # the final wait time after testing all the possible stores\r\n real_vst_cust = -1 # the final visiting store after testing all the possible stores\r\n visit_cust = [-1, 100000, 600000, 10000] # [cust_id, next_start, distance, closeness]\r\n if rout_len - 1 < 50: # max number of stores a vehicle visits\r\n for cust in remain_list:\r\n # print('checking customer: ', cust)\r\n if (used_resource[0] + num_demd[cust][0] * bskt_vol + num_demd[cust][1] * trsf_vol + (num_demd[cust][2] +\r\n num_demd[cust][3]) * milk_vol + num_demd[cust][4] * paper_bskt) > veh_cap[2]:\r\n # print('run out of effective volume')\r\n continue # volume overload\r\n # elif dist_mat[curr_cust, cust] + dist_mat[cust, 0] > veh_cap[3] - used_resource[3]:\r\n # print('run out of distance')\r\n # continue\r\n elif used_resource[2] + time_mat[curr_cust, cust] > num_timez[cust][1]:\r\n # print('late than last receive time')\r\n continue # can not arrive before last receive time\r\n elif time_mat[curr_cust, cust] + oprt_t + time_mat[cust, 0] > veh_cap[3] - on_way_time:\r\n # print('run out of work time')\r\n continue\r\n elif (curr_cust > 0 and used_resource[2] + time_mat[curr_cust, cust] < num_timez[cust][0] and\r\n num_timez[cust][0] - used_resource[2] + oprt_t + time_mat[cust, 0] > veh_cap[3] - on_way_time):\r\n # print('run out of work time - with waiting time')\r\n continue\r\n else:\r\n wait_time = num_timez[cust][0] - (used_resource[2] + time_mat[curr_cust, cust])\r\n\r\n if wait_time < 0:\r\n next_start = used_resource[2] + time_mat[curr_cust, cust]\r\n h_ij = time_mat[curr_cust, cust]\r\n else: # arrive early\r\n next_start = num_timez[cust][0]\r\n if curr_cust == 0:\r\n h_ij = time_mat[curr_cust, cust]\r\n wait_time = 0 # special situation for depot depart\r\n else:\r\n h_ij = next_start - used_resource[2]\r\n v_ij = num_timez[cust][1] - (used_resource[2] + time_mat[curr_cust, cust])\r\n close_ij = alp * time_mat[curr_cust, cust] + bet * h_ij + gam * v_ij # closeness between i and j\r\n # print(curr_cust, cust, close_ij)\r\n if close_ij < visit_cust[3]:\r\n real_wait_time = wait_time\r\n real_vst_cust = cust\r\n visit_cust[0] = cust\r\n visit_cust[1] = next_start\r\n visit_cust[2] = dist_mat[curr_cust, cust]\r\n visit_cust[3] = close_ij\r\n else:\r\n continue\r\n\r\n\r\n if visit_cust[0] == -1: # no customer to visit\r\n visit_cust[0] = 0\r\n visit_cust[1] = used_resource[-1] + time_mat[curr_cust, 0]\r\n on_way_time += time_mat[curr_cust, 0]\r\n else:\r\n # print(curr_cust, real_vst_cust, real_wait_time)\r\n if real_wait_time <= 0:\r\n on_way_time += (oprt_t + time_mat[curr_cust, real_vst_cust])\r\n else:\r\n on_way_time += (oprt_t + real_wait_time + time_mat[curr_cust, real_vst_cust])\r\n\r\n return visit_cust, on_way_time", "def safe_to_dance(self):\n # check for all fail/early-termination conditions\n for _ in range(4):\n if self.read_distance() < 300:\n print(\"NOT SAFE TO DANCE!\")\n return False\n else: \n self.turn_by_deg(90) \n\n #after all checks have been done. We deduce it's safe\n print(\"SAFE TO DANCE!\")\n return True\n\n for x in range(3): \n self.shake()", "def org_clump_tester(clump):\n tester = True\n for block in clump:\n if len(clump) >= 3: # clump should be block!\n tester = False\n break\n return tester", "def test_move_knight_illegally(self):\n self.c.board = [[(0, 0) for i in range(8)] for i in range(8)]\n for piece in [('N', True), ('N', False)]:\n self.c.turn = piece[1]\n self.c.board[4][4] = piece\n dests = [col + row for col in 'abcdefgh' for row in '12345678']\n for dest in dests:\n if dest in ['d6', 'f6', 'c5', 'g5', 'c3', 'g3', 'd2', 'f2']:\n continue\n self.groups['dest'] = dest\n self.assertRaises(\n MoveNotLegalError, self.c._knight_evaluator, self.groups)" ]
[ "0.6071524", "0.60635555", "0.6033704", "0.58507174", "0.5694917", "0.54907", "0.54892546", "0.54756105", "0.54441667", "0.54237115", "0.539738", "0.5397061", "0.53634334", "0.5358302", "0.53474575", "0.53194606", "0.528759", "0.52711314", "0.525795", "0.52506185", "0.52431774", "0.52264595", "0.52152735", "0.52108604", "0.5203587", "0.5183399", "0.51828176", "0.51827985", "0.5152528", "0.5150982" ]
0.67974746
0
Exchange the position of two customers(same route or not) if feasible, and see if it can cut the total cost.
def exchange_1_cust(self, sol_in3, cust, c_loc, curr_temp, sol_type3, sa_lns): route_ing = copy.deepcopy(sol_in3[c_loc[0]]) route_new_1 = route_ing route_new_2 = route_ing exch_to_route = c_loc[0] orgn_type1 = sol_type3[exch_to_route] origin_cost1 = check_violation(route_ing, orgn_type1)[1] # route_ing.remove(cust) # move c in the current route # adjust_cost1 = check_violation(route_ing)[1] best_cut_cost0 = -1000 best_cut_cost = best_cut_cost0 # best cost cut of moving this customer for j, rou in enumerate(sol_in3): orgn_type2 = sol_type3[j] origin_cost2 = check_violation(rou, orgn_type2)[1] if j == c_loc[0]: # exchange in the same route for k in range(1, len(rou)-1): if k == c_loc[1]: continue rou_test = copy.deepcopy(sol_in3[c_loc[0]]) rou_test[k], rou_test[c_loc[1]] = rou_test[c_loc[1]], rou_test[k] if check_violation(rou_test, orgn_type2)[0]: adjust_cost2 = check_violation(rou_test, orgn_type2)[1] cost_cut_test = origin_cost1 - adjust_cost2 if cost_cut_test > best_cut_cost: best_cut_cost = cost_cut_test route_new_1 = rou_test route_new_2 = rou_test exch_to_route = j else: # exchange to a different route for k in range(1, len(rou)-1): rou_test_1 = copy.deepcopy(sol_in3[c_loc[0]]) rou_test_2 = copy.deepcopy(rou) rou_test_1[c_loc[1]] = rou[k] rou_test_2[k] = cust if check_violation(rou_test_1, 5)[0] and check_violation(rou_test_2, 5)[0]: new_type1 = route_type(rou_test_1) new_type2 = route_type(rou_test_2) adjust_cost1 = check_violation(rou_test_1, new_type1)[1] adjust_cost2 = check_violation(rou_test_2, new_type2)[1] cost_cut_test = origin_cost1 + origin_cost2 - adjust_cost1 - adjust_cost2 if cost_cut_test > best_cut_cost: best_cut_cost = cost_cut_test route_new_1 = rou_test_1 route_new_2 = rou_test_2 exch_to_route = j if best_cut_cost > 1e-5: # print('exchange1 good', best_cut_cost) sol_in3[c_loc[0]] = route_new_1 sol_in3[exch_to_route] = route_new_2 sol_type3[c_loc[0]] = route_type(route_new_1) sol_type3[exch_to_route] = route_type(route_new_2) elif sa_lns and best_cut_cost < -1e-5: prb = random.uniform(0, 1) if np.exp(best_cut_cost / curr_temp) > prb: # print('exchange1', best_cut_cost) sol_in3[c_loc[0]] = route_new_1 sol_in3[exch_to_route] = route_new_2 sol_type3[c_loc[0]] = route_type(route_new_1) sol_type3[exch_to_route] = route_type(route_new_2) # return sol_in3
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def exchange_2_cust(self, sol_in4, cust, c_loc, curr_temp, sol_type4, sa_lns):\r\n\r\n route_ing = copy.deepcopy(sol_in4[c_loc[0]])\r\n route_new_1 = route_ing\r\n route_new_2 = route_ing\r\n cust_folw = route_ing[c_loc[1] + 1]\r\n exch_to_route = c_loc[0]\r\n origin_cost1 = check_violation(route_ing, sol_type4[c_loc[0]])[1]\r\n # route_ing.remove(cust) # move c in the current route\r\n # adjust_cost1 = check_violation(route_ing)[1]\r\n best_cut_cost0 = -1000\r\n best_cut_cost = best_cut_cost0 # best cost cut of moving this customer\r\n for j, rou in enumerate(sol_in4):\r\n origin_cost2 = check_violation(rou, sol_type4[j])[1]\r\n if j != c_loc[0] and len(rou) >= 4: # exchange to a different route\r\n for k in range(1, len(rou) - 2):\r\n rou_test_1 = copy.deepcopy(sol_in4[c_loc[0]])\r\n rou_test_2 = copy.deepcopy(rou)\r\n rou_test_1[c_loc[1]], rou_test_1[c_loc[1] + 1] = rou[k], rou[k + 1]\r\n rou_test_2[k], rou_test_2[k + 1] = cust, cust_folw\r\n if check_violation(rou_test_1, 5)[0] and check_violation(rou_test_2, 5)[0]:\r\n new_type1 = route_type(rou_test_1)\r\n new_type2 = route_type(rou_test_2)\r\n adjust_cost1 = check_violation(rou_test_1, new_type1)[1]\r\n adjust_cost2 = check_violation(rou_test_2, new_type2)[1]\r\n cost_cut_test = origin_cost1 + origin_cost2 - adjust_cost1 - adjust_cost2\r\n if cost_cut_test > best_cut_cost:\r\n best_cut_cost = cost_cut_test\r\n route_new_1 = rou_test_1\r\n route_new_2 = rou_test_2\r\n exch_to_route = j\r\n\r\n\r\n\r\n if best_cut_cost > 1e-5:\r\n # print('exchange2 good', best_cut_cost)\r\n sol_in4[c_loc[0]] = route_new_1\r\n sol_in4[exch_to_route] = route_new_2\r\n sol_type4[c_loc[0]] = route_type(route_new_1)\r\n sol_type4[exch_to_route] = route_type(route_new_2)\r\n\r\n elif sa_lns and best_cut_cost < -1e-5:\r\n prb = random.uniform(0, 1)\r\n if np.exp(best_cut_cost / curr_temp) > prb:\r\n # print('exchange2', best_cut_cost)\r\n sol_in4[c_loc[0]] = route_new_1\r\n sol_in4[exch_to_route] = route_new_2\r\n sol_type4[c_loc[0]] = route_type(route_new_1)\r\n sol_type4[exch_to_route] = route_type(route_new_2)\r\n\r\n # return sol_in4\r", "def shift_2_cust(self, sol_in2, cust, c_loc, curr_temp, sol_type2, sa_lns):\r\n\r\n route_ing = copy.deepcopy(sol_in2[c_loc[0]])\r\n route_new = route_ing\r\n move_to_route = c_loc[0]\r\n orgn_type1 = sol_type2[c_loc[0]]\r\n cust_folw = route_ing[c_loc[1]+1]\r\n origin_cost1 = check_violation(route_ing, orgn_type1)[1]\r\n route_ing.remove(cust) # remove c in the current route\r\n del route_ing[c_loc[1]] # remove customer following c\r\n new_type1 = route_type(route_ing)\r\n adjust_cost1 = check_violation(route_ing, new_type1)[1]\r\n best_cut_cost0 = -1000\r\n best_cut_cost = best_cut_cost0 # best cost cut of moving this customer\r\n for j, rou in enumerate(sol_in2):\r\n orgn_type2 = sol_type2[j]\r\n origin_cost2 = check_violation(rou, orgn_type2)[1]\r\n if j == c_loc[0]: # moving in the same route\r\n for k in range(1, len(route_ing)):\r\n if k == c_loc[1]:\r\n continue\r\n rou_test = route_ing[:k] + [cust, cust_folw] + route_ing[k:]\r\n if check_violation(rou_test, orgn_type2)[0]:\r\n adjust_cost2 = check_violation(rou_test, orgn_type2)[1]\r\n cost_cut_test = origin_cost1 - adjust_cost2\r\n if cost_cut_test > best_cut_cost:\r\n best_cut_cost = cost_cut_test\r\n route_new = rou_test\r\n move_to_route = j\r\n\r\n\r\n else: # moving to a different route\r\n for k in range(1, len(rou)):\r\n rou_test = rou[:k] + [cust, cust_folw] + rou[k:]\r\n if check_violation(rou_test, 5)[0]:\r\n new_type2 = route_type(rou_test)\r\n adjust_cost2 = check_violation(rou_test, new_type2)[1]\r\n cost_cut_test = origin_cost1 + origin_cost2 - adjust_cost1 - adjust_cost2\r\n if cost_cut_test > best_cut_cost:\r\n best_cut_cost = cost_cut_test\r\n route_new = rou_test\r\n move_to_route = j\r\n\r\n\r\n if best_cut_cost > 1e-5:\r\n # print('shift2 good', best_cut_cost)\r\n sol_in2[move_to_route] = route_new\r\n sol_type2[move_to_route] = route_type(route_new)\r\n if move_to_route != c_loc[0]: # moving to a different route\r\n sol_in2[c_loc[0]] = route_ing\r\n sol_type2[c_loc[0]] = route_type(route_ing)\r\n\r\n elif sa_lns and best_cut_cost < -1e-5:\r\n prb = random.uniform(0, 1)\r\n if np.exp(best_cut_cost / curr_temp) > prb:\r\n # print('shift2', best_cut_cost)\r\n sol_in2[move_to_route] = route_new\r\n sol_type2[move_to_route] = route_type(route_new)\r\n if move_to_route != c_loc[0]: # moving to a different route\r\n sol_in2[c_loc[0]] = route_ing\r\n sol_type2[c_loc[0]] = route_type(route_ing)\r\n\r\n # return sol_in2\r", "def interroute_2opt(route1, route2, customers):\n r1 = copy.deepcopy(route1)\n r2 = copy.deepcopy(route2)\n c1 = random.randint(1,r1.ncustomers)\n c2 = random.randint(1,r2.ncustomers)\n r1cs = r1.customers[:]\n r1.customers[c1:] = r2.customers[c2:]\n r1.ncustomers = len(r1.customers) - 2\n #print(r1cs, r1.customers, r1.ncustomers)\n r1.update(customers)\n if r1.violate_windows(customers): return False\n r2.customers[c2:] = r1cs[c1:]\n r2.ncustomers = len(r2.customers) - 2\n r2.update(customers)\n if r2.violate_windows(customers): return False\n route1.customers[:] = r1.customers\n route1.ncustomers = r1.ncustomers\n route1.update(customers)\n route2.customers[:] = r2.customers\n route2.ncustomers = r2.ncustomers\n route2.update(customers)\n print(\"succeed interroute 2opt\")\n return True", "def shift_1_cust(self, sol_in1, cust, c_loc, curr_temp, sol_type1, sa_lns):\r\n\r\n route_ing = copy.deepcopy(sol_in1[c_loc[0]])\r\n route_new = route_ing\r\n move_to_route = c_loc[0]\r\n orgn_type1 = sol_type1[c_loc[0]]\r\n origin_cost1 = check_violation(route_ing, orgn_type1)[1]\r\n route_ing.remove(cust) # move c in the current route\r\n new_type1 = route_type(route_ing)\r\n adjust_cost1 = check_violation(route_ing, new_type1)[1]\r\n best_cut_cost0 = -1000\r\n best_cut_cost = best_cut_cost0 # best cost cut of moving this customer\r\n for j, rou in enumerate(sol_in1):\r\n orgn_type2 = sol_type1[j]\r\n origin_cost2 = check_violation(rou, orgn_type2)[1]\r\n if j == c_loc[0]: # moving in the same route\r\n for k in range(1, len(route_ing)):\r\n if k == c_loc[1]:\r\n continue # do not put it at the original position\r\n rou_test = route_ing[:k] + [cust] + route_ing[k:]\r\n if check_violation(rou_test, orgn_type2)[0]:\r\n adjust_cost2 = check_violation(rou_test, orgn_type2)[1]\r\n cost_cut_test = origin_cost1 - adjust_cost2\r\n if cost_cut_test > best_cut_cost:\r\n best_cut_cost = cost_cut_test\r\n route_new = rou_test\r\n move_to_route = j\r\n\r\n\r\n else: # moving to a different route\r\n for k in range(1, len(rou)):\r\n rou_test = rou[:k] + [cust] + rou[k:]\r\n\r\n if check_violation(rou_test, 5)[0]:\r\n new_type2 = route_type(rou_test)\r\n adjust_cost2 = check_violation(rou_test, new_type2)[1]\r\n cost_cut_test = origin_cost1 + origin_cost2 - adjust_cost1 - adjust_cost2\r\n if cost_cut_test > best_cut_cost:\r\n best_cut_cost = cost_cut_test\r\n route_new = rou_test\r\n move_to_route = j\r\n\r\n\r\n if best_cut_cost > 1e-5:\r\n # print('shift1 good', best_cut_cost)\r\n sol_in1[move_to_route] = route_new\r\n sol_type1[move_to_route] = route_type(route_new)\r\n if move_to_route != c_loc[0]: # moving to a different route\r\n sol_in1[c_loc[0]] = route_ing\r\n sol_type1[c_loc[0]] = route_type(route_ing)\r\n elif sa_lns and best_cut_cost < -1e-5:\r\n prb = random.uniform(0, 1)\r\n if np.exp(best_cut_cost/curr_temp) > prb:\r\n # print('shift1', best_cut_cost)\r\n sol_in1[move_to_route] = route_new\r\n sol_type1[move_to_route] = route_type(route_new)\r\n if move_to_route != c_loc[0]: # moving to a different route\r\n sol_in1[c_loc[0]] = route_ing\r\n sol_type1[c_loc[0]] = route_type(route_ing)\r\n\r\n\r\n\r\n # return sol_in1\r", "def intraroute_2opt(route, customers):\n if route.ncustomers < 2: return False\n r = copy.deepcopy(route)\n c1 = random.randint(1,r.ncustomers-1)\n c2 = random.randint(c1+2,r.ncustomers+1)\n #print(c1, c2)\n r.customers[c1:c2] = r.customers[c1:c2][::-1]\n #print(r.customers)\n r.update(customers)\n if r.violate_windows(customers):\n return False\n else:\n route.customers[c1:c2] = r.customers[c1:c2]\n route.update(customers)\n print(\"succeed intraroute 2opt\")\n return True", "def shift_3_cust(self, sol_in6, cust, c_loc, curr_temp, sol_type6, sa_lns):\r\n\r\n route_ing = copy.deepcopy(sol_in6[c_loc[0]])\r\n route_new = route_ing\r\n move_to_route = c_loc[0]\r\n orgn_type1 = sol_type6[c_loc[0]]\r\n cust_folw1 = route_ing[c_loc[1] + 1]\r\n cust_folw2 = route_ing[c_loc[1] + 2]\r\n origin_cost1 = check_violation(route_ing, orgn_type1)[1]\r\n route_ing.remove(cust) # remove c in the current route\r\n del route_ing[c_loc[1]] # remove customer following c\r\n del route_ing[c_loc[1]] # remove customer following following c\r\n new_type1 = route_type(route_ing)\r\n adjust_cost1 = check_violation(route_ing, new_type1)[1]\r\n best_cut_cost0 = -1000\r\n best_cut_cost = best_cut_cost0 # best cost cut of moving this customer\r\n for j, rou in enumerate(sol_in6):\r\n orgn_type2 = sol_type6[j]\r\n origin_cost2 = check_violation(rou, orgn_type2)[1]\r\n if j == c_loc[0]: # moving in the same route\r\n for k in range(1, len(route_ing)):\r\n if k == c_loc[1]:\r\n continue\r\n rou_test = route_ing[:k] + [cust, cust_folw1, cust_folw2] + route_ing[k:]\r\n if check_violation(rou_test, orgn_type2)[0]:\r\n adjust_cost2 = check_violation(rou_test, orgn_type2)[1]\r\n cost_cut_test = origin_cost1 - adjust_cost2\r\n if cost_cut_test > best_cut_cost:\r\n best_cut_cost = cost_cut_test\r\n route_new = rou_test\r\n move_to_route = j\r\n\r\n else: # moving to a different route\r\n for k in range(1, len(rou)):\r\n rou_test = rou[:k] + [cust, cust_folw1, cust_folw2] + rou[k:]\r\n if check_violation(rou_test, 5)[0]:\r\n new_type2 = route_type(rou_test)\r\n adjust_cost2 = check_violation(rou_test, new_type2)[1]\r\n cost_cut_test = origin_cost1 + origin_cost2 - adjust_cost1 - adjust_cost2\r\n if cost_cut_test > best_cut_cost:\r\n best_cut_cost = cost_cut_test\r\n route_new = rou_test\r\n move_to_route = j\r\n\r\n\r\n if best_cut_cost > 1e-5:\r\n # print('shift3 good', best_cut_cost)\r\n sol_in6[move_to_route] = route_new\r\n sol_type6[move_to_route] = route_type(route_new)\r\n if move_to_route != c_loc[0]: # moving to a different route\r\n sol_in6[c_loc[0]] = route_ing\r\n sol_type6[c_loc[0]] = route_type(route_ing)\r\n\r\n elif sa_lns and best_cut_cost < -1e-5:\r\n\r\n prb = random.uniform(0, 1)\r\n if np.exp(best_cut_cost / curr_temp) > prb:\r\n # print('shift3', best_cut_cost)\r\n sol_in6[move_to_route] = route_new\r\n sol_type6[move_to_route] = route_type(route_new)\r\n if move_to_route != c_loc[0]: # moving to a different route\r\n sol_in6[c_loc[0]] = route_ing\r\n sol_type6[c_loc[0]] = route_type(route_ing)", "def solve(customerCount, vehicleCount, vehicleCapacity, depotIndex, customers):\n \n N, locations, locations_r, distances, closest = precalculate(customers)\n \n #print locations\n #print locations_r\n angle_order = range(1, N)\n angle_order.sort(key=lambda i: (locations_r[i, 1], locations_r[i, 0])) \n \n vehicleTours = best_order(customerCount, customers, vehicleCount, vehicleCapacity, angle_order)\n if not vehicleTours:\n vehicleTours = solve0(customerCount, vehicleCount, vehicleCapacity, depotIndex, customers)\n check(customerCount, customers, vehicleCapacity, vehicleTours)\n vehicleTours = get_shortest_paths('file_path XXX', customers, depotIndex, vehicleTours)\n check(customerCount, customers, vehicleCapacity, vehicleTours)\n \n vehicleTours0 = copy.deepcopy(vehicleTours)\n dist0 = total_dist(customers, depotIndex, vehicleTours)\n if False:\n for _ in range(100):\n vehicleTours = copy.deepcopy(vehicleTours0) \n adjust_tours(customers, vehicleCapacity, vehicleCount, vehicleTours)\n vehicleTours = get_shortest_paths('file_path XXX', customers, depotIndex, vehicleTours)\n #check(customerCount, customers, vehicleCapacity, vehicleTours)\n if not is_valid(customerCount, customers, vehicleCapacity, vehicleTours):\n continue\n dist = total_dist(customers, depotIndex, vehicleTours)\n if dist < dist0:\n print '%s => %s' % (dist0, dist)\n vehicleTours0 = vehicleTours[:]\n dist0 = dist\n \n \n vehicleTours = copy.deepcopy(vehicleTours0) \n check(customerCount, customers, vehicleCapacity, vehicleTours)\n while len(vehicleTours) < vehicleCount:\n vehicleTours.append([])\n \n print '*', vehicleTours \n \n return vehicleTours", "def compare_switch_cost(self, x1, y1, x2, y2):\n cost = self.cost\n self.switch(x1,y1,x2,y2)\n deltaC = self.cost - cost\n return deltaC", "def factible_route_insertion(customer, position, route, customers):\n r = copy.deepcopy(route)\n r.insert(position, [customer], customers)\n return not r.violate_windows(customers)", "def switch(self, x1, y1, x2, y2):\n # both positions should not be empty\n assert (self.is_empty(x1, y1) is not True) or (self.is_empty(x2, y2) is not True)\n # x1,y1 is empty\n if self.is_empty(x1, y1):\n self.grid[y1][x1] = self.grid[y2][x2]\n self.cells[self.grid[y2][x2]].x = x1\n self.cells[self.grid[y2][x2]].y = y1\n self.grid[y2][x2] = ' '\n self.update_cost(self.grid[y1][x1])\n # x2,y2 is empty\n elif self.is_empty(x2, y2):\n self.grid[y2][x2] = self.grid[y1][x1]\n self.cells[self.grid[y1][x1]].x = x2\n self.cells[self.grid[y1][x1]].y = y2\n self.grid[y1][x1] = ' '\n self.update_cost(self.grid[y2][x2])\n else:\n n = self.grid[y2][x2]\n self.grid[y2][x2] = self.grid[y1][x1]\n self.cells[self.grid[y1][x1]].x = x2\n self.cells[self.grid[y1][x1]].y = y2\n self.grid[y1][x1] = n\n self.cells[n].x = x1\n self.cells[n].y = y1\n self.update_cost(self.grid[y1][x1])\n self.update_cost(self.grid[y2][x2])", "def confront(self, other):\n if other.__class__ != Defender:\n return - other.confront(self)\n else :\n if self.defense_cost < other.defense_cost:\n return 1\n elif self.defense_cost == other.defense_cost:\n return 0\n else:\n return -1", "def check_costs(self):\r\n if self.cost > self.owner.player.char_ob.currency:\r\n self.add_error(\r\n \"celebration_tier\",\r\n \"You cannot afford to pay the cost of %s.\" % self.cost,\r\n )", "def __collision_cost(self, x0, x1):\n d = np.linalg.norm(x0 - x1)\n cost = self.qc / (1 + np.exp(self.kappa * (d - 2 * self.radius)))\n return cost", "def traveling_salesman(destinations_1):\n # Instantiate the data problem.\n data = create_data_model()\n\n # NEW SPOT TO MAKE distance_matrix\n distance_matrix = compute_euclidean_distance_matrix(destinations_1)\n manager = pywrapcp.RoutingIndexManager(\n len(destinations_1), data['num_vehicles'], data['depot'])\n\n# # Create the routing index manager.\n# manager = pywrapcp.RoutingIndexManager(\n# len(data['locations']), data['num_vehicles'], data['depot'])\n\n # Create Routing Model.\n routing = pywrapcp.RoutingModel(manager)\n\n# distance_matrix = compute_euclidean_distance_matrix(data['locations'])\n\n def distance_callback(from_index, to_index):\n \"\"\"Returns the distance between the two nodes.\"\"\"\n # Convert from routing variable Index to distance matrix NodeIndex.\n from_node = manager.IndexToNode(from_index)\n to_node = manager.IndexToNode(to_index)\n return distance_matrix[from_node][to_node]\n\n transit_callback_index = routing.RegisterTransitCallback(distance_callback)\n\n # Define cost of each arc.\n routing.SetArcCostEvaluatorOfAllVehicles(transit_callback_index)\n\n # Setting first solution heuristic.\n search_parameters = pywrapcp.DefaultRoutingSearchParameters()\n search_parameters.first_solution_strategy = (\n routing_enums_pb2.FirstSolutionStrategy.PATH_CHEAPEST_ARC)\n\n # Solve the problem.\n assignment = routing.SolveWithParameters(search_parameters)\n\n # Print solution on console.\n# if assignment:\n# print_solution(manager, routing, assignment)\n if assignment:\n address1,address2,address3,address4,address5,address6,address7,address8,address9,address10=\\\n set_address_path(manager, routing, assignment,destinations_1)\n return address1,address2,address3,address4,address5,address6,address7,address8,address9,address10", "def geo_dist_penalty(p_a, p_b): # created on Nov.3 2019\n\n # Offset is 0 for the 1st destination.\n distance_matrix = Network.dist_mat\n if p_a[0] != p_b[0] or p_a[-1] != p_b[-1]:\n raise ValueError('Paths have different o or d.')\n\n # define the penalty in utility form for every two destinations. u_ik stands for the generalized cost of travel\n o, d = p_a[0], p_a[-1]\n\n path_a, path_b = p_a[1:-1], p_b[1:-1] # excluding origin and destination\n\n path_node_check = []\n for _path in [path_a, path_b]:\n _new_path = []\n for node in _path:\n if node <= min(distance_matrix.shape) - 1:\n _new_path.append(node)\n path_node_check.append(_new_path)\n path_a, path_b = path_node_check[0], path_node_check[1]\n\n # utility (negative) penalty evaluation\n cost, a, b = 0, o, o # let a, b be origin\n\n # if exist empty path\n if not path_a: # if observed path is empty\n return cost\n\n while path_a and path_b:\n a, b = path_a.pop(0), path_b.pop(0) # a, b correspond to the i_th node in path_a, path_b\n cost += distance_matrix[a][b]\n\n if path_a: # length of path_a > path b\n while path_a:\n a = path_a.pop(0)\n cost += distance_matrix[a][b]\n else: # case when length of path_b > path a\n while path_b:\n b = path_b.pop(0)\n cost += distance_matrix[a][b]\n return cost", "def subst_cost(c0, c1): # Beräknar kostnaden efter att det blivit länkat\r\n return 0 if c0 == c1 else 2 # Om charachter 0 är samma som charachter 1 kostar det 0 annars 2.\r", "def use(self):\n if self.credit < self.price_of_trip:\n print(\"Your credit is not enough, please increase your credit\")\n else:\n self.credit -= self.price_of_trip\n print(\"Done\")", "def cost(self,e1,e2):\n pass", "def twoCitySchedCost(self, costs: List[List[int]]) -> int:\n def abs_diff_compare(x=[1,2], y=[3,4]):\n x1=x[0]-x[1] if x[0]>=x[1] else x[1]-x[0]\n y1=y[0]-y[1] if y[0]>=y[1] else y[1]-x[0]\n return x1 if x1>y1 else y1\n save_costs=costs\n costs.sort(reverse=True, key=abs_diff_compare)\n print(costs)\n nA=0\n nB=0\n tc=0\n num_people=len(costs)\n max_ppl=num_people//2\n \n for x in costs:\n if nA< max_ppl and nB< max_ppl :\n if x[0] <x[1]:\n nA+=1\n tc+=x[0]\n else:\n nB+=1\n tc+=x[1]\n elif nA< max_ppl:\n nA+=1\n tc+=x[0]\n elif nB< max_ppl:\n nB+=1\n tc+=x[1]\n \n \n return tc", "def CalcBeerToDeliver(self):\r\n deliveryQuantity = 0\r\n \r\n #If we can fill the customer's order, we must do it.\r\n if self.currentStock >= self.currentOrders:\r\n deliveryQuantity = self.currentOrders\r\n self.currentStock -= deliveryQuantity\r\n self.currentOrders -= deliveryQuantity\r\n #If the current stock cannot cover the order, we must fill as much as we can, and back-order the rest.\r\n elif self.currentStock >= 0 and self.currentStock < self.currentOrders:\r\n deliveryQuantity = self.currentStock\r\n self.currentStock = 0\r\n self.currentOrders -= deliveryQuantity\r\n\r\n return deliveryQuantity", "def newCostCalc(dfNew, curCost,a,b):\n a1,a2,a3 = dfNew.iloc[a-2],dfNew.iloc[a-1],dfNew.iloc[a]\n b1,b2,b3 = dfNew.iloc[b-2],dfNew.iloc[b-1],dfNew.iloc[b]\n reCalc = curCost\n reCalc = reCalc - euclideanDistance(a1['x'],a1['y'],a2['x'],a2['y'])\n reCalc = reCalc - euclideanDistance(a2['x'],a2['y'],a3['x'],a3['y'])\n reCalc = reCalc - euclideanDistance(b1['x'],b1['y'],b2['x'],b2['y'])\n reCalc = reCalc - euclideanDistance(b2['x'],b2['y'],b3['x'],b3['y'])\n\n x, y = dfNew.iloc[a-1].copy(), dfNew.iloc[b-1].copy()\n dfNew.iloc[a-1],dfNew.iloc[b-1] = y,x\n\n a1,a2,a3 = dfNew.iloc[a-2],dfNew.iloc[a-1],dfNew.iloc[a]\n b1,b2,b3 = dfNew.iloc[b-2],dfNew.iloc[b-1],dfNew.iloc[b]\n\n reCalc = reCalc + euclideanDistance(a1['x'],a1['y'],a2['x'],a2['y'])\n reCalc = reCalc + euclideanDistance(a2['x'],a2['y'],a3['x'],a3['y'])\n reCalc = reCalc + euclideanDistance(b1['x'],b1['y'],b2['x'],b2['y'])\n reCalc = reCalc + euclideanDistance(b2['x'],b2['y'],b3['x'],b3['y'])\n\n x, y = dfNew.iloc[a-1].copy(), dfNew.iloc[b-1].copy()\n dfNew.iloc[a-1],dfNew.iloc[b-1] = y,x\n\n return reCalc", "def calc_capital_costs (self):\n road_needed = 'road needed'\n if self.cd['on road system']:\n road_needed = 'road not needed'\n\n dist = self.comp_specs['distance to community']\n self.capital_costs = self.comp_specs['est. intertie cost per mile']\\\n [road_needed] * dist\n #~ print self.capital_costs", "def nearest_neigbor(self, pc):\n coord = get_coordinates(pc)\n # deliveries\n pdist_deliv = {haversine(coord[0], coord[1], pcoord[1][0], pcoord[1][1]):pc for pc, pcoord in self.state.D_k.items()}\n pdist_list_deliv = list(pdist_deliv.keys())\n if len(pdist_list_deliv) > 0:\n val_deliv_min = min(pdist_list_deliv)\n else:\n val_deliv_min = 1e6 # great value to be discarded when comparing with val_pickup_min\n # pickups\n pdist_pickup = {haversine(coord[0], coord[1], pcoord[-1][0], pcoord[-1][1]):pc for pc, pcoord in self.state.P_k.items()}\n pdist_list_pickup = list(pdist_pickup.keys())\n\n if len(pdist_list_pickup) > 0:\n val_pickup_min = min(pdist_list_pickup)\n else:\n val_pickup_min = 1e6 # great value to be discarded when comparing with val_pickup_min\n\n if val_deliv_min == val_pickup_min and val_deliv_min == 1e6:\n print(\"All jobs completed: go to wait or stop if it's 12pm\")\n return 0\n\n if val_deliv_min < val_pickup_min:\n return pdist_deliv[val_deliv_min]\n\n elif val_deliv_min >= val_pickup_min:\n return pdist_pickup[val_pickup_min]\n else:\n raise valueError('Impossible comparison between val_deliv_min and val_pickup_min ')", "def savings2routes(self,r1,r2):\n newRoute = VRP_Route(r1.route+r2.route)\n newRoute.update_route(self.vrpdata) # compute distance, quantity for newRoute, check whether valid\n if newRoute.tourValid:\n return r1.distance + r2.distance - newRoute.distance\n return -1", "def check_order(customer_name, expected_cost, customer_paid):\n\n if expected_cost != customer_paid:\n if expected_cost > customer_paid:\n payment_status = \"UNDERPAID\"\n else: \n payment_status = \"OVERPAID\"\n\n print(f\" {customer_name} {payment_status} for their melons!\")\n\n else:\n pass", "def noSol(self):\n noSol = False \n\n cost_min_bilet = 100000\n\n for a in self.info.autobuze:\n if a.price < cost_min_bilet:\n cost_min_bilet = a.price\n\n for o in self.info.oameni:\n if o.money < cost_min_bilet and o.remaining_dest != []: \n noSol = True\n break\n \n set_destinatii = set()\n\n for o in self.info.oameni:\n if o.current_loc in set_destinatii:\n noSol = True\n break\n else:\n set_destinatii.add(o.current_loc)\n\n return noSol", "def movable_intraroute_customers(route, customers):\n mcust = []\n for c in range(route.ncustomers):\n if len(factible_route_positions(route.customers[c+1],\n route,customers)) > 1:\n mcust.append(c)\n return mcust", "def check_violation(route, vehicle_type):\r\n if len(route) == 2: # [0, 0] route\r\n return True, 0, 0, 0\r\n else:\r\n accu_res = [0, 0, 0] # 0-leaving time, 1-accumulated distance, 2-volume\r\n if vehicle_type == 2:\r\n veh_cap = small_veh\r\n elif vehicle_type == 3:\r\n veh_cap = medium_veh\r\n elif vehicle_type == 5:\r\n veh_cap = large_veh\r\n else:\r\n veh_cap = large_veh\r\n print('Input wrong vehicle type!', vehicle_type)\r\n # small_veh = [1, 12, 10, 400000, 0.012, 200]\r\n fixed_cost = veh_cap[5]\r\n trans_cost = 0\r\n # wait_cost = 0\r\n if time_mat[0, route[1]] < num_timez[route[1]][0]:\r\n accu_res[0] = num_timez[route[1]][0] - time_mat[0, route[1]] # vehicle leaving depot time\r\n depart_time = accu_res[0] # departing from depot time\r\n else:\r\n depart_time = 0\r\n for i in range(len(route) - 1):\r\n last_cust = route[i]\r\n curr_cust = route[i+1]\r\n # checking leaving time\r\n arr_time = accu_res[0] + time_mat[last_cust, curr_cust]\r\n if arr_time < num_timez[curr_cust][0]:\r\n accu_res[0] = num_timez[curr_cust][0] + oprt_t\r\n wait_time = num_timez[curr_cust][0] - arr_time\r\n # wait_cost += (wait_time / 60. * wait_cost0)\r\n elif arr_time <= num_timez[curr_cust][1]:\r\n accu_res[0] = arr_time + oprt_t\r\n else:\r\n # print('Infeasible route!(Service Time Error.)')\r\n return False, 1000000, 0, 0\r\n\r\n # checking vehicle max distance\r\n trans_cost += (dist_mat[last_cust, curr_cust] * veh_cap[4])\r\n\r\n accu_res[1] += dist_mat[last_cust, curr_cust]\r\n\r\n if accu_res[0] - oprt_t - depart_time > veh_cap[3]:\r\n # print('Infeasible route!(Max Time Error.)')\r\n return False, 1000000, 0, 0\r\n\r\n # checking vehicle max volume\r\n accu_res[2] += (num_demd[curr_cust][0] * bskt_vol + num_demd[curr_cust][1] * trsf_vol + (num_demd[curr_cust][2]\r\n + num_demd[curr_cust][3]) * milk_vol + num_demd[curr_cust][4] * paper_bskt)\r\n\r\n if accu_res[2] > veh_cap[2]:\r\n # print('Infeasible route!(Max Weight/Volume Error.)', accu_res[2])\r\n return False, 1000000, 0, 0\r\n route_cost = fixed_cost + accu_res[1] * veh_cap[4]\r\n route_dist = accu_res[1]\r\n route_time = accu_res[0] - oprt_t - depart_time\r\n # print fixed_cost, trvl_cost, trvl_dist\r\n return True, route_cost, route_time, depart_time + 600", "def check_transaction(menu, drink, resources):\r\n customer_money = process_coins()\r\n drink_cost = menu[drink]['cost']\r\n if customer_money < drink_cost:\r\n print(\"Sorry that's not enough money.Money refunded\")\r\n return False\r\n else:\r\n if customer_money > drink_cost:\r\n change = round((customer_money - drink_cost), 2)\r\n print(f\"Here is your ${change} in change\")\r\n resources['Money'] += drink_cost\r\n return True", "def get_cost(self):\n if self.distance == 0:\n for i in range(1, len(self.cities) + 1):\n point1 = self.cities[i - 1]\n point2 = self.cities[i % len(self.cities)]\n self.distance += self.distance_to(point1, point2)\n return self.distance" ]
[ "0.69025654", "0.6613349", "0.6586741", "0.64510596", "0.61748725", "0.60695565", "0.583845", "0.5822859", "0.57592994", "0.56153905", "0.55894005", "0.5581832", "0.55210274", "0.55059487", "0.54630905", "0.54596806", "0.54371494", "0.5428905", "0.53805614", "0.5376348", "0.5364522", "0.5335114", "0.53296775", "0.5318634", "0.5312875", "0.52938944", "0.5266395", "0.5247891", "0.5240129", "0.5233034" ]
0.6756957
1
Exchange 2 consecutive customers' position with another 2 customers' position, and see if it can cut cost.
def exchange_2_cust(self, sol_in4, cust, c_loc, curr_temp, sol_type4, sa_lns): route_ing = copy.deepcopy(sol_in4[c_loc[0]]) route_new_1 = route_ing route_new_2 = route_ing cust_folw = route_ing[c_loc[1] + 1] exch_to_route = c_loc[0] origin_cost1 = check_violation(route_ing, sol_type4[c_loc[0]])[1] # route_ing.remove(cust) # move c in the current route # adjust_cost1 = check_violation(route_ing)[1] best_cut_cost0 = -1000 best_cut_cost = best_cut_cost0 # best cost cut of moving this customer for j, rou in enumerate(sol_in4): origin_cost2 = check_violation(rou, sol_type4[j])[1] if j != c_loc[0] and len(rou) >= 4: # exchange to a different route for k in range(1, len(rou) - 2): rou_test_1 = copy.deepcopy(sol_in4[c_loc[0]]) rou_test_2 = copy.deepcopy(rou) rou_test_1[c_loc[1]], rou_test_1[c_loc[1] + 1] = rou[k], rou[k + 1] rou_test_2[k], rou_test_2[k + 1] = cust, cust_folw if check_violation(rou_test_1, 5)[0] and check_violation(rou_test_2, 5)[0]: new_type1 = route_type(rou_test_1) new_type2 = route_type(rou_test_2) adjust_cost1 = check_violation(rou_test_1, new_type1)[1] adjust_cost2 = check_violation(rou_test_2, new_type2)[1] cost_cut_test = origin_cost1 + origin_cost2 - adjust_cost1 - adjust_cost2 if cost_cut_test > best_cut_cost: best_cut_cost = cost_cut_test route_new_1 = rou_test_1 route_new_2 = rou_test_2 exch_to_route = j if best_cut_cost > 1e-5: # print('exchange2 good', best_cut_cost) sol_in4[c_loc[0]] = route_new_1 sol_in4[exch_to_route] = route_new_2 sol_type4[c_loc[0]] = route_type(route_new_1) sol_type4[exch_to_route] = route_type(route_new_2) elif sa_lns and best_cut_cost < -1e-5: prb = random.uniform(0, 1) if np.exp(best_cut_cost / curr_temp) > prb: # print('exchange2', best_cut_cost) sol_in4[c_loc[0]] = route_new_1 sol_in4[exch_to_route] = route_new_2 sol_type4[c_loc[0]] = route_type(route_new_1) sol_type4[exch_to_route] = route_type(route_new_2) # return sol_in4
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def shift_2_cust(self, sol_in2, cust, c_loc, curr_temp, sol_type2, sa_lns):\r\n\r\n route_ing = copy.deepcopy(sol_in2[c_loc[0]])\r\n route_new = route_ing\r\n move_to_route = c_loc[0]\r\n orgn_type1 = sol_type2[c_loc[0]]\r\n cust_folw = route_ing[c_loc[1]+1]\r\n origin_cost1 = check_violation(route_ing, orgn_type1)[1]\r\n route_ing.remove(cust) # remove c in the current route\r\n del route_ing[c_loc[1]] # remove customer following c\r\n new_type1 = route_type(route_ing)\r\n adjust_cost1 = check_violation(route_ing, new_type1)[1]\r\n best_cut_cost0 = -1000\r\n best_cut_cost = best_cut_cost0 # best cost cut of moving this customer\r\n for j, rou in enumerate(sol_in2):\r\n orgn_type2 = sol_type2[j]\r\n origin_cost2 = check_violation(rou, orgn_type2)[1]\r\n if j == c_loc[0]: # moving in the same route\r\n for k in range(1, len(route_ing)):\r\n if k == c_loc[1]:\r\n continue\r\n rou_test = route_ing[:k] + [cust, cust_folw] + route_ing[k:]\r\n if check_violation(rou_test, orgn_type2)[0]:\r\n adjust_cost2 = check_violation(rou_test, orgn_type2)[1]\r\n cost_cut_test = origin_cost1 - adjust_cost2\r\n if cost_cut_test > best_cut_cost:\r\n best_cut_cost = cost_cut_test\r\n route_new = rou_test\r\n move_to_route = j\r\n\r\n\r\n else: # moving to a different route\r\n for k in range(1, len(rou)):\r\n rou_test = rou[:k] + [cust, cust_folw] + rou[k:]\r\n if check_violation(rou_test, 5)[0]:\r\n new_type2 = route_type(rou_test)\r\n adjust_cost2 = check_violation(rou_test, new_type2)[1]\r\n cost_cut_test = origin_cost1 + origin_cost2 - adjust_cost1 - adjust_cost2\r\n if cost_cut_test > best_cut_cost:\r\n best_cut_cost = cost_cut_test\r\n route_new = rou_test\r\n move_to_route = j\r\n\r\n\r\n if best_cut_cost > 1e-5:\r\n # print('shift2 good', best_cut_cost)\r\n sol_in2[move_to_route] = route_new\r\n sol_type2[move_to_route] = route_type(route_new)\r\n if move_to_route != c_loc[0]: # moving to a different route\r\n sol_in2[c_loc[0]] = route_ing\r\n sol_type2[c_loc[0]] = route_type(route_ing)\r\n\r\n elif sa_lns and best_cut_cost < -1e-5:\r\n prb = random.uniform(0, 1)\r\n if np.exp(best_cut_cost / curr_temp) > prb:\r\n # print('shift2', best_cut_cost)\r\n sol_in2[move_to_route] = route_new\r\n sol_type2[move_to_route] = route_type(route_new)\r\n if move_to_route != c_loc[0]: # moving to a different route\r\n sol_in2[c_loc[0]] = route_ing\r\n sol_type2[c_loc[0]] = route_type(route_ing)\r\n\r\n # return sol_in2\r", "def interroute_2opt(route1, route2, customers):\n r1 = copy.deepcopy(route1)\n r2 = copy.deepcopy(route2)\n c1 = random.randint(1,r1.ncustomers)\n c2 = random.randint(1,r2.ncustomers)\n r1cs = r1.customers[:]\n r1.customers[c1:] = r2.customers[c2:]\n r1.ncustomers = len(r1.customers) - 2\n #print(r1cs, r1.customers, r1.ncustomers)\n r1.update(customers)\n if r1.violate_windows(customers): return False\n r2.customers[c2:] = r1cs[c1:]\n r2.ncustomers = len(r2.customers) - 2\n r2.update(customers)\n if r2.violate_windows(customers): return False\n route1.customers[:] = r1.customers\n route1.ncustomers = r1.ncustomers\n route1.update(customers)\n route2.customers[:] = r2.customers\n route2.ncustomers = r2.ncustomers\n route2.update(customers)\n print(\"succeed interroute 2opt\")\n return True", "def exchange_1_cust(self, sol_in3, cust, c_loc, curr_temp, sol_type3, sa_lns):\r\n\r\n route_ing = copy.deepcopy(sol_in3[c_loc[0]])\r\n\r\n route_new_1 = route_ing\r\n route_new_2 = route_ing\r\n exch_to_route = c_loc[0]\r\n orgn_type1 = sol_type3[exch_to_route]\r\n origin_cost1 = check_violation(route_ing, orgn_type1)[1]\r\n # route_ing.remove(cust) # move c in the current route\r\n # adjust_cost1 = check_violation(route_ing)[1]\r\n best_cut_cost0 = -1000\r\n best_cut_cost = best_cut_cost0 # best cost cut of moving this customer\r\n for j, rou in enumerate(sol_in3):\r\n orgn_type2 = sol_type3[j]\r\n origin_cost2 = check_violation(rou, orgn_type2)[1]\r\n if j == c_loc[0]: # exchange in the same route\r\n for k in range(1, len(rou)-1):\r\n if k == c_loc[1]:\r\n continue\r\n rou_test = copy.deepcopy(sol_in3[c_loc[0]])\r\n rou_test[k], rou_test[c_loc[1]] = rou_test[c_loc[1]], rou_test[k]\r\n if check_violation(rou_test, orgn_type2)[0]:\r\n adjust_cost2 = check_violation(rou_test, orgn_type2)[1]\r\n cost_cut_test = origin_cost1 - adjust_cost2\r\n if cost_cut_test > best_cut_cost:\r\n best_cut_cost = cost_cut_test\r\n route_new_1 = rou_test\r\n route_new_2 = rou_test\r\n exch_to_route = j\r\n\r\n else: # exchange to a different route\r\n for k in range(1, len(rou)-1):\r\n rou_test_1 = copy.deepcopy(sol_in3[c_loc[0]])\r\n rou_test_2 = copy.deepcopy(rou)\r\n rou_test_1[c_loc[1]] = rou[k]\r\n rou_test_2[k] = cust\r\n if check_violation(rou_test_1, 5)[0] and check_violation(rou_test_2, 5)[0]:\r\n new_type1 = route_type(rou_test_1)\r\n new_type2 = route_type(rou_test_2)\r\n adjust_cost1 = check_violation(rou_test_1, new_type1)[1]\r\n adjust_cost2 = check_violation(rou_test_2, new_type2)[1]\r\n cost_cut_test = origin_cost1 + origin_cost2 - adjust_cost1 - adjust_cost2\r\n if cost_cut_test > best_cut_cost:\r\n best_cut_cost = cost_cut_test\r\n route_new_1 = rou_test_1\r\n route_new_2 = rou_test_2\r\n exch_to_route = j\r\n\r\n\r\n\r\n if best_cut_cost > 1e-5:\r\n # print('exchange1 good', best_cut_cost)\r\n sol_in3[c_loc[0]] = route_new_1\r\n sol_in3[exch_to_route] = route_new_2\r\n sol_type3[c_loc[0]] = route_type(route_new_1)\r\n sol_type3[exch_to_route] = route_type(route_new_2)\r\n\r\n elif sa_lns and best_cut_cost < -1e-5:\r\n prb = random.uniform(0, 1)\r\n if np.exp(best_cut_cost / curr_temp) > prb:\r\n # print('exchange1', best_cut_cost)\r\n sol_in3[c_loc[0]] = route_new_1\r\n sol_in3[exch_to_route] = route_new_2\r\n sol_type3[c_loc[0]] = route_type(route_new_1)\r\n sol_type3[exch_to_route] = route_type(route_new_2)\r\n\r\n # return sol_in3\r", "def shift_1_cust(self, sol_in1, cust, c_loc, curr_temp, sol_type1, sa_lns):\r\n\r\n route_ing = copy.deepcopy(sol_in1[c_loc[0]])\r\n route_new = route_ing\r\n move_to_route = c_loc[0]\r\n orgn_type1 = sol_type1[c_loc[0]]\r\n origin_cost1 = check_violation(route_ing, orgn_type1)[1]\r\n route_ing.remove(cust) # move c in the current route\r\n new_type1 = route_type(route_ing)\r\n adjust_cost1 = check_violation(route_ing, new_type1)[1]\r\n best_cut_cost0 = -1000\r\n best_cut_cost = best_cut_cost0 # best cost cut of moving this customer\r\n for j, rou in enumerate(sol_in1):\r\n orgn_type2 = sol_type1[j]\r\n origin_cost2 = check_violation(rou, orgn_type2)[1]\r\n if j == c_loc[0]: # moving in the same route\r\n for k in range(1, len(route_ing)):\r\n if k == c_loc[1]:\r\n continue # do not put it at the original position\r\n rou_test = route_ing[:k] + [cust] + route_ing[k:]\r\n if check_violation(rou_test, orgn_type2)[0]:\r\n adjust_cost2 = check_violation(rou_test, orgn_type2)[1]\r\n cost_cut_test = origin_cost1 - adjust_cost2\r\n if cost_cut_test > best_cut_cost:\r\n best_cut_cost = cost_cut_test\r\n route_new = rou_test\r\n move_to_route = j\r\n\r\n\r\n else: # moving to a different route\r\n for k in range(1, len(rou)):\r\n rou_test = rou[:k] + [cust] + rou[k:]\r\n\r\n if check_violation(rou_test, 5)[0]:\r\n new_type2 = route_type(rou_test)\r\n adjust_cost2 = check_violation(rou_test, new_type2)[1]\r\n cost_cut_test = origin_cost1 + origin_cost2 - adjust_cost1 - adjust_cost2\r\n if cost_cut_test > best_cut_cost:\r\n best_cut_cost = cost_cut_test\r\n route_new = rou_test\r\n move_to_route = j\r\n\r\n\r\n if best_cut_cost > 1e-5:\r\n # print('shift1 good', best_cut_cost)\r\n sol_in1[move_to_route] = route_new\r\n sol_type1[move_to_route] = route_type(route_new)\r\n if move_to_route != c_loc[0]: # moving to a different route\r\n sol_in1[c_loc[0]] = route_ing\r\n sol_type1[c_loc[0]] = route_type(route_ing)\r\n elif sa_lns and best_cut_cost < -1e-5:\r\n prb = random.uniform(0, 1)\r\n if np.exp(best_cut_cost/curr_temp) > prb:\r\n # print('shift1', best_cut_cost)\r\n sol_in1[move_to_route] = route_new\r\n sol_type1[move_to_route] = route_type(route_new)\r\n if move_to_route != c_loc[0]: # moving to a different route\r\n sol_in1[c_loc[0]] = route_ing\r\n sol_type1[c_loc[0]] = route_type(route_ing)\r\n\r\n\r\n\r\n # return sol_in1\r", "def compare_switch_cost(self, x1, y1, x2, y2):\n cost = self.cost\n self.switch(x1,y1,x2,y2)\n deltaC = self.cost - cost\n return deltaC", "def intraroute_2opt(route, customers):\n if route.ncustomers < 2: return False\n r = copy.deepcopy(route)\n c1 = random.randint(1,r.ncustomers-1)\n c2 = random.randint(c1+2,r.ncustomers+1)\n #print(c1, c2)\n r.customers[c1:c2] = r.customers[c1:c2][::-1]\n #print(r.customers)\n r.update(customers)\n if r.violate_windows(customers):\n return False\n else:\n route.customers[c1:c2] = r.customers[c1:c2]\n route.update(customers)\n print(\"succeed intraroute 2opt\")\n return True", "def shift_3_cust(self, sol_in6, cust, c_loc, curr_temp, sol_type6, sa_lns):\r\n\r\n route_ing = copy.deepcopy(sol_in6[c_loc[0]])\r\n route_new = route_ing\r\n move_to_route = c_loc[0]\r\n orgn_type1 = sol_type6[c_loc[0]]\r\n cust_folw1 = route_ing[c_loc[1] + 1]\r\n cust_folw2 = route_ing[c_loc[1] + 2]\r\n origin_cost1 = check_violation(route_ing, orgn_type1)[1]\r\n route_ing.remove(cust) # remove c in the current route\r\n del route_ing[c_loc[1]] # remove customer following c\r\n del route_ing[c_loc[1]] # remove customer following following c\r\n new_type1 = route_type(route_ing)\r\n adjust_cost1 = check_violation(route_ing, new_type1)[1]\r\n best_cut_cost0 = -1000\r\n best_cut_cost = best_cut_cost0 # best cost cut of moving this customer\r\n for j, rou in enumerate(sol_in6):\r\n orgn_type2 = sol_type6[j]\r\n origin_cost2 = check_violation(rou, orgn_type2)[1]\r\n if j == c_loc[0]: # moving in the same route\r\n for k in range(1, len(route_ing)):\r\n if k == c_loc[1]:\r\n continue\r\n rou_test = route_ing[:k] + [cust, cust_folw1, cust_folw2] + route_ing[k:]\r\n if check_violation(rou_test, orgn_type2)[0]:\r\n adjust_cost2 = check_violation(rou_test, orgn_type2)[1]\r\n cost_cut_test = origin_cost1 - adjust_cost2\r\n if cost_cut_test > best_cut_cost:\r\n best_cut_cost = cost_cut_test\r\n route_new = rou_test\r\n move_to_route = j\r\n\r\n else: # moving to a different route\r\n for k in range(1, len(rou)):\r\n rou_test = rou[:k] + [cust, cust_folw1, cust_folw2] + rou[k:]\r\n if check_violation(rou_test, 5)[0]:\r\n new_type2 = route_type(rou_test)\r\n adjust_cost2 = check_violation(rou_test, new_type2)[1]\r\n cost_cut_test = origin_cost1 + origin_cost2 - adjust_cost1 - adjust_cost2\r\n if cost_cut_test > best_cut_cost:\r\n best_cut_cost = cost_cut_test\r\n route_new = rou_test\r\n move_to_route = j\r\n\r\n\r\n if best_cut_cost > 1e-5:\r\n # print('shift3 good', best_cut_cost)\r\n sol_in6[move_to_route] = route_new\r\n sol_type6[move_to_route] = route_type(route_new)\r\n if move_to_route != c_loc[0]: # moving to a different route\r\n sol_in6[c_loc[0]] = route_ing\r\n sol_type6[c_loc[0]] = route_type(route_ing)\r\n\r\n elif sa_lns and best_cut_cost < -1e-5:\r\n\r\n prb = random.uniform(0, 1)\r\n if np.exp(best_cut_cost / curr_temp) > prb:\r\n # print('shift3', best_cut_cost)\r\n sol_in6[move_to_route] = route_new\r\n sol_type6[move_to_route] = route_type(route_new)\r\n if move_to_route != c_loc[0]: # moving to a different route\r\n sol_in6[c_loc[0]] = route_ing\r\n sol_type6[c_loc[0]] = route_type(route_ing)", "def trade(first, second):\n m, n = 0, 0\n def cumsum(a):\n from collections import OrderedDict\n dict_cumsum = OrderedDict()\n total = 0\n for i, elem in enumerate(a):\n total += elem\n dict_cumsum[total] = i + 1\n return dict_cumsum\n\n cumsum_first = cumsum(first)\n cumsum_second = cumsum(second)\n\n for total in cumsum_second:\n if total in cumsum_first:\n m, n = cumsum_first[total], cumsum_second[total]\n break\n\n if m * n: # change this line!\n first[:m], second[:n] = second[:n], first[:m]\n return 'Deal!'\n else:\n return 'No deal!'", "def crossover_two_candidates(self, character_1: int, character_2: int):\n p = [(0, 5), (0, 6), (0, 11), (0, 12), (6, 6), (6, 11), (6, 12), (6, 15), (7, 11), (7, 12), (7, 15),\n (12, 12), (12, 15)]\n\n candidate_1 = self.characters[character_1]\n candidate_2 = self.characters[character_2]\n\n cut = random.choice(p)\n # generate list with values 0\n c_1_list: List = list(np.zeros(16))\n c_2_list: List = list(np.zeros(16))\n\n # fill the list with the correct values\n for i in range(cut[0], cut[1]+1):\n c_1_list[i] = candidate_1.dna_generator.dna.get_dna_value(i)\n c_2_list[i] = candidate_2.dna_generator.dna.get_dna_value(i)\n\n # change the candidates with the list values\n for i in range(cut[0], cut[1]+1):\n candidate_1.dna_generator.dna.set_dna_value(c_2_list[i], i)\n candidate_2.dna_generator.dna.set_dna_value(c_1_list[i], i)\n\n self.characters[character_1] = candidate_1\n self.characters[character_2] = candidate_2", "def twoCitySchedCost(self, costs: List[List[int]]) -> int:\n def abs_diff_compare(x=[1,2], y=[3,4]):\n x1=x[0]-x[1] if x[0]>=x[1] else x[1]-x[0]\n y1=y[0]-y[1] if y[0]>=y[1] else y[1]-x[0]\n return x1 if x1>y1 else y1\n save_costs=costs\n costs.sort(reverse=True, key=abs_diff_compare)\n print(costs)\n nA=0\n nB=0\n tc=0\n num_people=len(costs)\n max_ppl=num_people//2\n \n for x in costs:\n if nA< max_ppl and nB< max_ppl :\n if x[0] <x[1]:\n nA+=1\n tc+=x[0]\n else:\n nB+=1\n tc+=x[1]\n elif nA< max_ppl:\n nA+=1\n tc+=x[0]\n elif nB< max_ppl:\n nB+=1\n tc+=x[1]\n \n \n return tc", "def test_exchange_securities(self):\n\n # =================================================================\n # test: one sell order and four buy orders\n #\n # expected transactions:\n #\n # SELL: 2, BUY: 3, price: 8.00, amount: 1\n # SELL: 2, BUY: 4, price: 8.00, amount: 2\n # SELL: 2, BUY: 7, price: 8.00, amount: 2\n # SELL: 2, BUY: 8, price: 8.00, amount: 2\n # SELL: 2, BUY: 11, price: 8.00, amount: 2\n # SELL: 2, BUY: 12, price: 8.00, amount: 1\n #\n # SELL: 5, BUY: 7, price: 10.00, amount: 2\n # SELL: 5, BUY: 8, price: 10.00, amount: 2\n # SELL: 5, BUY: 11, price: 10.00, amount: 1\n #\n # SELL: 6, BUY: 7, price: 10.00, amount: 1\n # SELL: 6, BUY: 8, price: 10.00, amount: 1\n # SELL: 6, BUY: 11, price: 10.00, amount: 2\n # SELL: 6, BUY: 12, price: 10.00, amount: 1\n #\n # SELL: 9, BUY: 11, price: 12.00, amount: 4\n #\n # SELL: 10, BUY: 11, price: 12.00, amount: 1\n # SELL: 10, BUY: 12, price: 12.00, amount: 3\n # =================================================================\n\n sell_orders = [StockOrderWrapper(self.order_2), StockOrderWrapper(self.order_5),\n StockOrderWrapper(self.order_6), StockOrderWrapper(self.order_9),\n StockOrderWrapper(self.order_10), StockOrderWrapper(self.order_13)]\n buy_orders = [StockOrderWrapper(self.order_1), StockOrderWrapper(self.order_3),\n StockOrderWrapper(self.order_4), StockOrderWrapper(self.order_7),\n StockOrderWrapper(self.order_8), StockOrderWrapper(self.order_11),\n StockOrderWrapper(self.order_12)]\n\n transactions = self.bidding_round_manager.exchange_round(sell_orders=sell_orders, buy_orders=buy_orders)\n transactions = sorted(transactions, key=lambda x: (x.sell.order_id, x.buy.order_id))\n transactions_iterator = iter(transactions)\n\n trans_exp_buy_3_sell_2 = Transaction(buy=self.order_3, sell=self.order_2, share_amount=1,\n share_price=self.order_2.order_price_per_share,\n transaction_status=PROCESSED)\n trans_exp_buy_4_sell_2 = Transaction(buy=self.order_4, sell=self.order_2, share_amount=2,\n share_price=self.order_2.order_price_per_share,\n transaction_status=PROCESSED)\n trans_exp_buy_7_sell_2 = Transaction(buy=self.order_7, sell=self.order_2, share_amount=2,\n share_price=self.order_2.order_price_per_share,\n transaction_status=PROCESSED)\n trans_exp_buy_8_sell_2 = Transaction(buy=self.order_8, sell=self.order_2, share_amount=2,\n share_price=self.order_2.order_price_per_share,\n transaction_status=PROCESSED)\n trans_exp_buy_11_sell_2 = Transaction(buy=self.order_11, sell=self.order_2, share_amount=2,\n share_price=self.order_2.order_price_per_share,\n transaction_status=PROCESSED)\n trans_exp_buy_12_sell_2 = Transaction(buy=self.order_12, sell=self.order_2, share_amount=1,\n share_price=self.order_2.order_price_per_share,\n transaction_status=PROCESSED)\n\n trans_exp_buy_7_sell_5 = Transaction(buy=self.order_7, sell=self.order_5, share_amount=2,\n share_price=self.order_5.order_price_per_share,\n transaction_status=PROCESSED)\n trans_exp_buy_8_sell_5 = Transaction(buy=self.order_8, sell=self.order_5, share_amount=2,\n share_price=self.order_5.order_price_per_share,\n transaction_status=PROCESSED)\n trans_exp_buy_11_sell_5 = Transaction(buy=self.order_11, sell=self.order_5, share_amount=1,\n share_price=self.order_5.order_price_per_share,\n transaction_status=PROCESSED)\n\n trans_exp_buy_7_sell_6 = Transaction(buy=self.order_7, sell=self.order_6, share_amount=1,\n share_price=self.order_6.order_price_per_share,\n transaction_status=PROCESSED)\n trans_exp_buy_8_sell_6 = Transaction(buy=self.order_8, sell=self.order_6, share_amount=1,\n share_price=self.order_6.order_price_per_share,\n transaction_status=PROCESSED)\n trans_exp_buy_11_sell_6 = Transaction(buy=self.order_11, sell=self.order_6, share_amount=2,\n share_price=self.order_6.order_price_per_share,\n transaction_status=PROCESSED)\n trans_exp_buy_12_sell_6 = Transaction(buy=self.order_12, sell=self.order_6, share_amount=1,\n share_price=self.order_6.order_price_per_share,\n transaction_status=PROCESSED)\n\n trans_exp_buy_11_sell_9 = Transaction(buy=self.order_11, sell=self.order_9, share_amount=4,\n share_price=self.order_9.order_price_per_share,\n transaction_status=PROCESSED)\n\n trans_exp_buy_11_sell_10 = Transaction(buy=self.order_11, sell=self.order_10, share_amount=1,\n share_price=self.order_10.order_price_per_share,\n transaction_status=PROCESSED)\n trans_exp_buy_12_sell_10 = Transaction(buy=self.order_12, sell=self.order_10, share_amount=3,\n share_price=self.order_10.order_price_per_share,\n transaction_status=PROCESSED)\n\n self.is_equal_transaction(transactions_iterator.next(), trans_exp_buy_3_sell_2)\n self.is_equal_transaction(transactions_iterator.next(), trans_exp_buy_4_sell_2)\n self.is_equal_transaction(transactions_iterator.next(), trans_exp_buy_7_sell_2)\n self.is_equal_transaction(transactions_iterator.next(), trans_exp_buy_8_sell_2)\n self.is_equal_transaction(transactions_iterator.next(), trans_exp_buy_11_sell_2)\n self.is_equal_transaction(transactions_iterator.next(), trans_exp_buy_12_sell_2)\n\n self.is_equal_transaction(transactions_iterator.next(), trans_exp_buy_7_sell_5)\n self.is_equal_transaction(transactions_iterator.next(), trans_exp_buy_8_sell_5)\n self.is_equal_transaction(transactions_iterator.next(), trans_exp_buy_11_sell_5)\n\n self.is_equal_transaction(transactions_iterator.next(), trans_exp_buy_7_sell_6)\n self.is_equal_transaction(transactions_iterator.next(), trans_exp_buy_8_sell_6)\n self.is_equal_transaction(transactions_iterator.next(), trans_exp_buy_11_sell_6)\n self.is_equal_transaction(transactions_iterator.next(), trans_exp_buy_12_sell_6)\n\n self.is_equal_transaction(transactions_iterator.next(), trans_exp_buy_11_sell_9)\n\n self.is_equal_transaction(transactions_iterator.next(), trans_exp_buy_11_sell_10)\n self.is_equal_transaction(transactions_iterator.next(), trans_exp_buy_12_sell_10)", "def cross(bid, offer):\n if bid.price >= offer.price:\n remaining_qty = min(offer.qty - bid.qty, MAX_QTY)\n return (True, remaining_qty)\n\n return (False, 0)", "def test_cancel_of_tmsc_with_multiplied_amount(self):\n entity_a1 = self.entities[1]\n\n # 1. A1 starts with 50.0 TMSC, 0 TIndiv1\n self.check_balance(entity_a1.address, TMSC, '50.00000000', '0.00000000') # SP 2\n self.check_balance(entity_a1.address, TIndiv1, '0', '0') # SP 2147483651\n\n # 2. A1 offers 5.0 TMSC for 1250 TIndiv1\n # 3. offers 5.0 TMSC for 1250 TIndiv1\n entity_a1.trade('5.00000000', TMSC, '1250', TIndiv1, ADD_1)\n entity_a1.trade('5.00000000', TMSC, '1250', TIndiv1, ADD_1)\n self.generate_block()\n self.check_balance(entity_a1.address, TMSC, '40.00000000', '10.00000000') # SP 2\n self.check_balance(entity_a1.address, TIndiv1, '0', '0') # SP 2147483651\n\n # 4. A1 cancels 40.0 TMSC for 10000 TIndiv1 (cancel-at-price)\n entity_a1.trade('40.00000000', TMSC, '10000', TIndiv1, CANCEL_2)\n self.generate_block()\n self.check_balance(entity_a1.address, TMSC, '50.00000000', '0.00000000') # SP 2\n self.check_balance(entity_a1.address, TIndiv1, '0', '0') # SP 2147483651", "def test_cancel_of_msc_with_multiplied_amount(self):\n entity_a1 = self.entities[1]\n\n # 1. A1 starts with 50.0 MSC, 0.0 MDiv1\n self.check_balance(entity_a1.address, MSC, '50.00000000', '0.00000000') # SP 1\n self.check_balance(entity_a1.address, MDiv1, '0.00000000', '0.00000000') # SP 4\n\n # 2. A1 offers 0.00000001 MSC for 2.5 MDiv1\n # 3. 0.00000002 MSC for 5.0 MDiv1\n # 4. 0.1 MSC for 25000000.0 MDiv1\n entity_a1.trade('0.00000001', MSC, '2.50000000', MDiv1, ADD_1)\n entity_a1.trade('0.00000002', MSC, '5.00000000', MDiv1, ADD_1)\n entity_a1.trade('0.10000000', MSC, '25000000.00000000', MDiv1, ADD_1)\n self.generate_block()\n self.check_balance(entity_a1.address, MSC, '49.89999997', '0.10000003') # SP 1\n self.check_balance(entity_a1.address, MDiv1, '0.00000000', '0.00000000') # SP 4\n\n # 5. A1 cancels 20.0 MSC for 5000000000.0 MDiv1 (cancel-at-price)\n entity_a1.trade('20.00000000', MSC, '5000000000.00000000', MDiv1, CANCEL_2)\n self.generate_block()\n self.check_balance(entity_a1.address, MSC, '50.00000000', '0.00000000') # SP 1\n self.check_balance(entity_a1.address, MDiv1, '0.00000000', '0.00000000') # SP 4", "def ccw(a, b, c):\n return (c.y - a.y) * (b.x - a.x) > (b.y - a.y) * (c.x - a.x)", "def test_buy_both_stages(chain: TestRPCChain, preico: Contract, actual_ico: Contract, uncapped_token: Contract, customer, customer_2, preico_starts_at, preico_ends_at, actual_ico_starts_at, actual_ico_ends_at, flat_pricing, final_pricing):\n\n # The token contract used in this test\n token = uncapped_token\n\n # First buy tokens when pre-ICO is open\n first_buy = to_wei(100000, \"ether\")\n first_batch = flat_pricing.call().calculatePrice(first_buy, 0, 0)\n time_travel(chain, preico_starts_at + 1)\n assert preico.call().getState() == CrowdsaleState.Funding\n assert actual_ico.call().getState() == CrowdsaleState.PreFunding\n preico.transact({\"from\": customer, \"value\": first_buy}).buy()\n\n # Finalize the pre-ICO does nothing, but can be still called\n time_travel(chain, preico_ends_at + 1)\n assert preico.call().getState() == CrowdsaleState.Success\n preico.transact({\"from\": customer}).finalize()\n assert not token.call().released() # Still on hold\n\n # Then buy more tokens when the actual ICO is open\n time_travel(chain, actual_ico_starts_at + 1)\n assert actual_ico.call().getState() == CrowdsaleState.Funding\n second_buy = to_wei(2, \"ether\")\n second_batch = final_pricing.call().calculatePrice(second_buy, 0, 0)\n actual_ico.transact({\"from\": customer, \"value\": second_buy}).buy()\n\n # Close the actual ICO and check tokens are transferable\n time_travel(chain, actual_ico_ends_at + 1)\n assert actual_ico.call().getState() == CrowdsaleState.Success\n actual_ico.transact({\"from\": customer}).finalize()\n assert actual_ico.call().finalized()\n assert token.call().released()\n\n # We got our tokens from both ICO buys\n token.call().balanceOf(customer) == first_batch + second_batch\n\n # Transfer tokens between accounts\n token.transact({\"from\": customer}).transfer(customer_2, 2000)\n assert token.call().balanceOf(customer_2) == 2000", "def costDeletion(el1,el2):\r\n return 1", "def costInsertion(el1,el2):\r\n return 2", "def transact(self, transaction_type, digicoins_No):\n\n #Raise an exception of digicoins_No is not multiple of 10.\n try:\n if digicoins_No % 10 != 0:\n raise MyError.MyError(digicoins_No)\n except Exception as inst:\n print \"\\nYou can only transact multiples of 10 of digicoins.\\nTransaction Failed!\"\n return\n\n lowest_price = 0\n digicoins_remain = digicoins_No\n while digicoins_remain > 0:\n if digicoins_remain > 100:\n digicoins_No_to_be_transacted = 100\n else:\n digicoins_No_to_be_transacted = digicoins_remain\n\n A_price = self.Broker1.offered_price(digicoins_No_to_be_transacted)\n B_price = self.Broker2.offered_price(digicoins_No_to_be_transacted)\n\n if A_price < B_price:\n self.Broker1.execute_transaction(digicoins_No_to_be_transacted)\n lowest_price += A_price\n else:\n self.Broker2.execute_transaction(digicoins_No_to_be_transacted)\n lowest_price += B_price\n digicoins_remain -= 100\n\n if transaction_type == \"BUY\":\n print self.name, \"buys\", digicoins_No_to_be_transacted, \"at\", lowest_price\n #update the clients list with a pair [price, digicoins]\n self.transactions.append([lowest_price, digicoins_No])\n else:\n print self.name, \"sells\", digicoins_No_to_be_transacted, \"at\", lowest_price\n self.transactions.append([lowest_price, -digicoins_No])", "def cost(self,e1,e2):\n pass", "def cut_out(a0, a1, b0, b1, c0, c1):\r\n\t\t# assert a[0] >= b[0] and b[0] >= c[0]\r\n\t\treturn (a1-c1)*(b0-a0) <= (a1-b1)*(c0-a0)", "def isComrade(self, other): # are the pieces comrades ?\r\n \r\n if self.name == other.name: \r\n return True\r\n else:\r\n return False", "def test_market_1_2(self):\n\n def check_1_2(buyers: List[float], sellers: List[float], expected_num_of_deals: int,\n expected_prices: List[float]):\n market = Market([\n AgentCategory(\"buyer\", buyers),\n AgentCategory(\"seller\", sellers),\n ])\n ps_recipe = [1, 2]\n self._check_market(market, ps_recipe, expected_num_of_deals, expected_prices)\n\n check_1_2(buyers=[9], sellers=[-4, -3],\n expected_num_of_deals=0, expected_prices=[9, -4.5])\n check_1_2(buyers=[9, 8, 7, 6], sellers=[-6, -5, -4, -3, -2, -1],\n expected_num_of_deals=1, expected_prices=[8, -4])\n check_1_2(buyers=[9, 8], sellers=[-4, -3, -2, -1],\n expected_num_of_deals=1, expected_prices=[8, -4])\n check_1_2(buyers=[9, 8], sellers=[-6, -3, -2, -1],\n expected_num_of_deals=1, expected_prices=[8, -4])\n check_1_2(buyers=[9, 8], sellers=[-4, -3, -2, -1],\n expected_num_of_deals=1, expected_prices=[8, -4])\n\n # PRICE CROSSES ZERO AT FIRST PHASE\n check_1_2(buyers=list(range(20)), sellers=[-3, -2, -1],\n expected_num_of_deals=1, expected_prices=[18, -9])", "def simple_transaction_costs(positions, cost):\r\n return np.nansum(np.abs(positions - array_shift(positions, 1)), axis=1) * cost", "def line_edits(c1, c2):\n s1 = c1.splitlines()\n s2 = c2.splitlines()\n n = len(s1)\n m = len(s2)\n ctable = cost_table(s1, s2)\n output = []\n while m > 0 or n > 0:\n if m > 0 and n > 0 and s1[n - 1] == s2[m - 1]: # if the lines are equal, transfer\n output.append(('T', s1[n - 1], s2[m - 1]))\n m -= 1\n n -= 1\n else: # else check whether it is a Del, Sub, In in that priority\n check = (ctable[n - 1][m], ctable[n][m - 1], ctable[n - 1][m - 1])\n key = check.index(min(check))\n if key == 2:\n output.append(('S', s1[n - 1], s2[m - 1]))\n m -= 1\n n -= 1\n elif key == 0:\n output.append(('D', s1[n - 1], ''))\n n -= 1\n else:\n output.append(('I', '', s2[m - 1]))\n m -= 1\n return output[::-1]", "def test_cancel_amount_more_than_tmsc_balance(self):\n entity_a1 = self.entities[1]\n\n # 1. A1 starts with 50.0 TMSC, 0 TIndiv1\n self.check_balance(entity_a1.address, TMSC, '50.00000000', '0.00000000') # SP 2\n self.check_balance(entity_a1.address, TIndiv1, '0', '0') # SP 2147483651\n\n # 2. A1 offers 50.0 TMSC for 100 TIndiv1\n entity_a1.trade('50.00000000', TMSC, '100', TIndiv1, ADD_1)\n self.generate_block()\n self.check_balance(entity_a1.address, TMSC, '0.00000000', '50.00000000') # SP 2\n self.check_balance(entity_a1.address, TIndiv1, '0', '0') # SP 2147483651\n\n # 3. A1 cancels 50.0 TMSC for 100 TIndiv1 (cancel-at-price)\n entity_a1.trade('50.00000000', TMSC, '100', TIndiv1, CANCEL_2)\n self.generate_block()\n self.check_balance(entity_a1.address, TMSC, '50.00000000', '0.00000000') # SP 2\n self.check_balance(entity_a1.address, TIndiv1, '0', '0') # SP 2147483651", "def is_ppc(C1, C2, i):\n c1, c2 = sorted(C1), sorted(C2)\n for k in range(len(c1)):\n if i <= c2[k]:\n # return False\n break\n if c1[k] != c2[k]:\n return False\n return True", "def newCostCalc(dfNew, curCost,a,b):\n a1,a2,a3 = dfNew.iloc[a-2],dfNew.iloc[a-1],dfNew.iloc[a]\n b1,b2,b3 = dfNew.iloc[b-2],dfNew.iloc[b-1],dfNew.iloc[b]\n reCalc = curCost\n reCalc = reCalc - euclideanDistance(a1['x'],a1['y'],a2['x'],a2['y'])\n reCalc = reCalc - euclideanDistance(a2['x'],a2['y'],a3['x'],a3['y'])\n reCalc = reCalc - euclideanDistance(b1['x'],b1['y'],b2['x'],b2['y'])\n reCalc = reCalc - euclideanDistance(b2['x'],b2['y'],b3['x'],b3['y'])\n\n x, y = dfNew.iloc[a-1].copy(), dfNew.iloc[b-1].copy()\n dfNew.iloc[a-1],dfNew.iloc[b-1] = y,x\n\n a1,a2,a3 = dfNew.iloc[a-2],dfNew.iloc[a-1],dfNew.iloc[a]\n b1,b2,b3 = dfNew.iloc[b-2],dfNew.iloc[b-1],dfNew.iloc[b]\n\n reCalc = reCalc + euclideanDistance(a1['x'],a1['y'],a2['x'],a2['y'])\n reCalc = reCalc + euclideanDistance(a2['x'],a2['y'],a3['x'],a3['y'])\n reCalc = reCalc + euclideanDistance(b1['x'],b1['y'],b2['x'],b2['y'])\n reCalc = reCalc + euclideanDistance(b2['x'],b2['y'],b3['x'],b3['y'])\n\n x, y = dfNew.iloc[a-1].copy(), dfNew.iloc[b-1].copy()\n dfNew.iloc[a-1],dfNew.iloc[b-1] = y,x\n\n return reCalc", "def double_crossover(self, original1, original2):\n point1=self.r.uniform(0.1,0.3)\n point2=self.r.uniform(0.6,0.8)\n len1=len(original1)\n len2=len(original2)\n cut11=int(point1*len1)\n cut12=int(point2*len1)\n cut21=int(point1*len2)\n cut22=int(point2*len2)\n child1=original1[:cut11]+original2[cut21:cut22]+original1[cut12:]\n child2=original2[:cut21]+original1[cut11:cut12]+original2[cut22:]\n return child1, child2", "def test_cancel_amount_more_than_msc_balance(self):\n entity_a1 = self.entities[1]\n\n # 1. A1 starts with 50.0 MSC, 0.0 MDiv1\n self.check_balance(entity_a1.address, MSC, '50.00000000', '0.00000000') # SP 1\n self.check_balance(entity_a1.address, MDiv1, '0.00000000', '0.00000000') # SP 4\n\n # 2. A1 offers 50.0 MSC for 111.5 MDiv1\n entity_a1.trade('50.00000000', MSC, '111.5', MDiv1, ADD_1)\n self.generate_block()\n self.check_balance(entity_a1.address, MSC, '0.00000000', '50.00000000') # SP 1\n self.check_balance(entity_a1.address, MDiv1, '0.00000000', '0.00000000') # SP 4\n\n # 3. A1 cancels 50.0 MSC for 111.5 MDiv1 (cancel-at-price)\n entity_a1.trade('50.00000000', MSC, '111.5', MDiv1, CANCEL_2)\n self.generate_block()\n self.check_balance(entity_a1.address, MSC, '50.00000000', '0.00000000') # SP 1\n self.check_balance(entity_a1.address, MDiv1, '0.00000000', '0.00000000') # SP 4" ]
[ "0.63171464", "0.6038648", "0.6019486", "0.5787042", "0.5624252", "0.5611005", "0.5577099", "0.5519468", "0.54916805", "0.54847175", "0.5430665", "0.54237324", "0.5396291", "0.53698677", "0.5358428", "0.5347584", "0.53337723", "0.53297913", "0.5312368", "0.5293557", "0.5280526", "0.52630466", "0.5242461", "0.524148", "0.5228643", "0.5195249", "0.51945627", "0.51915145", "0.5165119", "0.51613206" ]
0.6249691
1
creation du fichier xml de destination ecriture de la phrase d'entete xml fermeture fichier
def creer_fichier(nom_file): fichier = open(nom_file, 'w') fichier.write("<?xml version='1.0' encoding='UTF-8' standalone='yes'?>\n") fichier.close()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def saving_file(xml):\r\n\r\n xml_string = etree.tostring(xml)\r\n parsed = minidom.parseString(xml_string)\r\n with open(self.app_path + \"\\\\temp_\\\\\" + file_name + \".xml\", \"w\") as file:\r\n file.write(parsed.toprettyxml(indent=\" \"))", "def create_xmlfile(images_path, txt_file,class_name):\n\n#\tsavepath = os.path.join(images_path, \"{}_annotations\".format(class_name))\n#\tprint \"savepath:{}\".format(savepath)\n#\tif not os.path.exists(savepath):\n#\t\tos.mkdir(savepath)\n\n\ttxt = open(txt_file, 'r')\n\tfor line in txt:\n#\t\tpdb.set_trace()\n\t\tprint ('line:{}'.format(line))\n\t\twords = line.split(\" \")\n\t\tword_len = len(words)\n\t\tprint('length of words:{}'.format(word_len))\n\t\tprint (\"word_len:{}\".format(word_len))\n\t\t\n\t\tif word_len >3:\n\t\t\ta,b = words[0].split('.')\n\t\t\t\n\t\t\timg_path =a+'.jpg' #words[0]\n\t\t\timg_name =img_path # os.path.basename(img_path)\n\t\t\tprint ('image Name:%s'%img_name)\n\t\t\timg = Image.open('/home/graymatics/py-faster-rcnn/data/violence/'+img_name)\n\t\t\tprint(img)\n\t\t\tw,h = img.size\n\t\t\t#create xml\n\t\t\tannotation = et.Element('annotation')\n\t\t\tet.SubElement(annotation,'folder').text = 'demo'\n\t\t\tet.SubElement(annotation,'filename').text = img_name\n\n\t\t\tsource = et.SubElement(annotation, 'source')\n\t\t\tet.SubElement(source, 'database').text = 'internet'\n\t\t\tet.SubElement(source, 'annotation').text = 'Lyushuen'\n\t\t\tet.SubElement(source, 'image').text = 'unknown'\n\n\t\t\tsize = et.SubElement(annotation, 'size')\n\t\t\tet.SubElement(size, 'width').text = str(w)\n\t\t\tet.SubElement(size, 'height').text =str(h)\n\t\t\tet.SubElement(size, 'depth').text = '3'\n\n\t\t\tet.SubElement(annotation, 'segmented').text = str(0)\n\t for i in range(word_len/4 + 1):\n print (\"I size:{}\".format(i))\n if i == 0:\n print \"Image name is :{}\".format(words[0])\n elif i >= 1:\n index = i - 1\n\n\t\t\t\t\tobj = et.SubElement(annotation, 'object')\n\t\t\t\t\tet.SubElement(obj, 'name').text = class_name #words[5]#class_name\n\t\t\t\t\tet.SubElement(obj, 'pose').text = 'Unspecified'\n\t\t\t\t\tet.SubElement(obj, 'truncated').text = '0'\n\t\t \t\t \tet.SubElement(obj, 'difficult').text = '0'\n\n\t\t \t\t\tbox = et.SubElement(obj, 'bndbox')\n\t\t\t \t\tet.SubElement(box, 'xmin').text = str(int(round(float(words[index*4+1]))))\n\t\t\t \t\tet.SubElement(box, 'ymin').text = str(int(round(float(words[index*4+2]))))\n\t\t\t \t\tet.SubElement(box, 'xmax').text = str(int(round(float(words[index*4+3]))))\n\t\t\t \t\tet.SubElement(box, 'ymax').text = str(int(round(float(words[index*4+4]))))\n\n\t\t #write to file\n\t\t \tname, exten = os.path.splitext(img_name)\n\t\t \tanno_path = os.path.join(src_img,name+'.xml') #path of annotation files\n\t\t\tprint \"anno_path:{}\".format(anno_path)\n\t\t \ttree = et.ElementTree(annotation)\n\t\t \ttree.write(anno_path)\n\ttxt.close()", "def exportXml ( w, xml ):\n assert str ( type ( xml ) ) == \"<type 'str'>\"\n rawText = xml\n pattern = re.compile (r'[^\\S ]+')\n text = re.sub ( pattern, \"\", rawText )\n reparsed = MD.parseString ( text )\n w.write ( reparsed.toprettyxml ( indent = \"\\t\", encoding = \"UTF-8\" ) )", "def ouvrir_fichier(nom_file):\n fichier_xml = open(nom_file, 'a')\n return fichier_xml", "def write_output_file(self, xml_text, xml_file):\n xml_fo = open(xml_file, 'w')\n xml_fo.write(xml_text+'</xml>')\n xml_fo.close()\n return", "def GenerateXML(dictionary, fileName=\"labelling.xml\") : \n root = gfg.Element(\"annotation\") \n #the big section is called Annotation\n for key in dictionary:\n #for every polygon list in inside object witho subelement name and attributes and the type \"polygon\"\n objectElement = gfg.Element(\"object\") \n root.append(objectElement) \n subElement1 = gfg.SubElement(objectElement, \"name:\".strip(\":\"))\n subElement1.text = str(dictionary[key][\"name\"])\n subElement2 = gfg.SubElement(objectElement, \"attributes\".strip(\":\"))\n subElement2.text = str(dictionary[key][\"attributes\"])\n subElement3 = gfg.SubElement(objectElement, \"polygon\")\n \n for i in range(0, len(dictionary[key])-2):\n #for every vertex of the polygon list it's rounded x, y on xml\n SubInsidePolygon = gfg.SubElement(subElement3, \"pt\")\n sub_x = gfg.SubElement(SubInsidePolygon, \"x\")\n sub_y = gfg.SubElement(SubInsidePolygon, \"y\")\n sub_x.text = str(int(round(dictionary[key][\"x_y_\" + str(i)][0])))\n sub_y.text = str(int(round(dictionary[key][\"x_y_\" + str(i)][1])))\n tree = gfg.ElementTree(root) \n #create the xml tree\n with open (fileName, \"wb\") as files : \n tree.write(files) \n #if xml does not exist create one otherwise rewrite to it", "def create_gen_xml(self, out_file):\n\n param_list = []\n msg = []\n msg_type = []\n dep_node = []\n for line in self.full_ed_lines:\n param_list.append(line.text())\n dep_pkg = param_list[6].split(', ')\n if dep_pkg[len(dep_pkg) - 1] == '':\n dep_pkg.pop()\n for dep in self.manager.wid.sub_list:\n dep_node.append(dep['msg_type'])\n for dep in self.manager.wid.pub_list:\n dep_node.append(dep['msg_type'])\n for dep in dep_node:\n a, b = dep.split('/')\n msg.append(a)\n msg_type.append(b)\n f = open('../genkernel/templates/package_rosgen.xml')\n o = open(out_file, 'a')\n flag = 0\n while 1:\n line = f.readline()\n if not line: break\n for i in range(6):\n line = line.replace('[{0}]'.format(i), param_list[i])\n line = line.replace('[7]', param_list[7])\n if line.find('[6]') != -1:\n for dep in dep_pkg:\n line_dep = '\\t<depend>{0}</depend>\\n'.format(dep)\n o.write(line_dep)\n flag = 1\n elif line.find('[8]') != -1:\n for dep, tp in zip(msg, msg_type):\n line_dep = '\\t\\t<depend type=\"{1}\">{0}</depend>\\n'.format(dep, tp)\n o.write(line_dep)\n flag = 1\n elif line.find('<subscribers>') != -1:\n o.write('\\t\\t<subscribers>\\n')\n for sub in self.manager.wid.sub_list:\n o.write('\\t\\t\\t<sub>\\n')\n o.write('\\t\\t\\t\\t<name>{0}</name>\\n'.format(sub['name']))\n o.write('\\t\\t\\t\\t<msg_type>{0}</msg_type>\\n'.format(sub['msg_type']))\n o.write('\\t\\t\\t\\t<topic_name>{0}</topic_name>\\n'.format(sub['topic_name']))\n o.write('\\t\\t\\t\\t<queue_size>{0}</queue_size>\\n'.format(sub['queue_size']))\n o.write('\\t\\t\\t</sub>\\n')\n o.write('\\t\\t</subscribers>\\n')\n flag = 1\n elif line.find('<publishers>') != -1:\n o.write('\\t\\t<publishers>\\n')\n for pub in self.manager.wid.pub_list:\n o.write('\\t\\t\\t<pub>\\n')\n o.write('\\t\\t\\t\\t<name>{0}</name>\\n'.format(pub['name']))\n o.write('\\t\\t\\t\\t<msg_type>{0}</msg_type>\\n'.format(pub['msg_type']))\n o.write('\\t\\t\\t\\t<topic_name>{0}</topic_name>\\n'.format(pub['topic_name']))\n o.write('\\t\\t\\t\\t<queue_size>{0}</queue_size>\\n'.format(pub['queue_size']))\n o.write('\\t\\t\\t</pub>\\n')\n o.write('\\t\\t</publishers>\\n')\n flag = 1\n if flag == 0:\n o.write(line)\n else:\n flag = 0\n o.close()\n f.close()\n self.changed = False", "def _generate_xml(self, body, destn_dir, nodes=True):\n fn = ''.join([random.choice(string.ascii_letters) for _ in range(12)])\n fn += '.xml'\n\n _dir = os.path.dirname(os.path.abspath(__file__))\n _tmpl = 'multi_node.template' if nodes else 'single_node.template'\n _env = Environment(autoescape=False,\n loader=FileSystemLoader(_dir),\n trim_blocks=False)\n\n with open(fn, 'w+') as f:\n o = _env.get_template(_tmpl).render(body)\n f.write(o)\n\n _d = destn_dir + '/' + fn\n self._remote_copy(fn, _d)\n # Remove the XML file created locally\n os.remove(fn)\n\n return _d", "def to_xml_file(self, xml_file_path):\n s = self.to_xml()\n with open(xml_file_path, \"w+b\") as f:\n f.write(s)", "def writeToTempXml(self):\n name = self.fileToProcess.name\n all_tokens = ET.Element(\"tokens\")\n for token in self.tokensTable:\n if token.getType() == KEYWORD:\n keyword = ET.SubElement(all_tokens, \"keyword\")\n keyword.text = ' '+token.getValue()+' '\n elif token.getType() == IDENTIFIER:\n identifier = ET.SubElement(all_tokens, \"identifier\")\n identifier.text = ' '+token.getValue()+' '\n elif token.getType() == SYMBOL:\n symbol = ET.SubElement(all_tokens, \"symbol\")\n symbol.text = ' '+token.getValue()+' '\n elif token.getType() == STRING_CONST:\n stringConstant = ET.SubElement(all_tokens, \"stringConstant\")\n stringConstant.text = ' '+token.getValue()+' '\n elif token.getType() == INT_CONST:\n integerConstant = ET.SubElement(all_tokens, \"integerConstant\")\n integerConstant.text = ' '+token.getValue()+' '\n tree = ET.ElementTree(all_tokens)\n tree.write(name + 'T' + '.xml')", "def save_xml(tree, file_name, folder_name):\r\n import os # ändrar plats för filer\r\n os.chdir(folder_name)\r\n tree.write(file_name) # Namnet på ny fil\r", "def xmlwrite(self, doc, filename):\n pathname = os.path.join(self.session.session_dir, filename)\n f = open(pathname, \"w\")\n doc.writexml(writer=f, indent=\"\", addindent=\" \", newl=\"\\n\", encoding=\"UTF-8\")\n f.close()", "def write(self):\n temp_string = minidom.parseString(ET.tostring(self.root)).toprettyxml(encoding=\"UTF-8\")\n with open(self.xml_file, 'w') as f:\n f.write(temp_string)\n # f = open(self.xml_file, \"w\")\n # f.write(temp_string)\n # f.close()", "def writeFile(self, filename):\n s = ET.tostring(self._root)\n\n #Remove all formatting\n s = s.replace('\\n','')\n s = s.replace('\\t','')\n s = s.replace('\\r','')\n\n f = open(filename, 'w')\n f.write(minidom.parseString(s).toprettyxml())\n f.close()", "def write_to_xml(dictData, metadata, xmlfile):\n\tfout = codecs.open(xmlfile, 'w', 'utf-8')\n\tfout.write('<?xml version = \"1.0\" encoding = \"UTF-8\" standalone = \"no\" ?>\\n')\n\tfout.write('<?xml-stylesheet type=\"text/xsl\" href=\"maketable.xsl\"?>\\n')\n\tfout.write('<root>\\n')\n\tfout.write('<meta>\\n')\n\tfor key, value in metadata.items():\n\t\tfout.write('<' + key + '>' + value + '</' + key + '>\\n')\n\tfout.write('</meta>\\n')\n\tfout.write('<content>\\n')\n\tfor (hw, meanings, verse, verseNumDetails, pageNumDetails) in dictData:\n\t\txmlline = ''\n\t\txmlline += '<word><headword>' + hw + '</headword><meanings>'\n\t\tfor meaning in meanings:\n\t\t\txmlline += '<m>' + meaning + '</m>'\n\t\txmlline += '</meanings>'\n\t\txmlline += '<verse>'\n\t\tlines = verse.split('<BR>')\n\t\tfor line in lines:\n\t\t\txmlline += '<line>' + line + '</line>'\n\t\txmlline += '</verse>'\n\t\txmlline += '<verseNumber>' + verseNumDetails + '</verseNumber>'\n\t\txmlline += '<pageNumber>' + pageNumDetails + '</pageNumber></word>'\n\t\t# Write in babylon format. <BR><BR> is to separate verses.\n\t\tfout.write(xmlline + '\\n')\n\t\txmlline = ''\n\tfout.write('</content>\\n</root>')\n\tfout.close()\n\n\t# Give some summary to the user\n\tprint('XML file generated. Success!')\n\tprint('{} metadata lines and {} content lines written to XML file.'.format(len(metadata), len(dictData)))", "def ler_arquivo_xml(self, diretorio):\r\n with open(diretorio, 'r') as fxml:\r\n\t strfx = fxml.readlines()\r\n\t string = \"\".join(strfx).replace(\"&\",\" e \")\r\n return string", "def XML_EC_PL(Name, InputsFile, OutputFile, emin,emax):\n\n\t#On commence par afficher ce qu'on fait\r\n\tprint \" Build xml file \"\r\n\r\tprint InputsFile\n\t#ouverture du fichier dans lequel on place le source model\n\ttry:\n\t\tfresult = open(OutputFile, 'w')\n\texcept:\n\t\tprint \"Coucou\"\r\n \t#ecriture des premieres lignes invariantes\n\tfresult.write('<?xml version=\"1.0\" ?>')\r\n\tfresult.write(\"<source_library title=\\\"source library\\\">\\n\")\n\r\n \t#ouverture du fichier avec les entrees\r\n\tf = open(InputsFile,\"r\")\r\n\tlines = f.readlines()\r\n\t\r\n \t#Ajout des sources detectees dans le catalogue\n\t#Pour chaque ligne du fichier d'entree\r\n\tfor line in range(len(lines)):\n\t\t#Lire les donnees de la ligne\t\t\r\n\t\tdata = lines[line].split()\r\n\t\tname = data[0]\n\n\t\t#Verification : est on en train de traiter la source que l'on veut etudier ou une autre ?\r\n\t\tif str(name) == Name :\r\n\t\t\tmysource = 1\r\n\t\telse:\r\n\t\t\tmysource = 0\n\n\t\t#recuperation des donnees\r\n\t\tRA = data[1]\r\n\t\tDEC = data[2]\r\n\t\tIntegral = float(data[3])*float(Frac)\r\n\t\tGamma= data[4]\n\n\t\t\r\n\t\ttry:\n\t\t\t#essai de definition des donnees pour un PL avec ExpCut\n\t\t\tPrefactor = float(data[5])*float(Frac)\r\n\t\t\tEnergy = float(data[6])\r\n\t#\t\tPrefactor = Prefactor/pow(Energy/100., float(Gamma)) #Densite de flux calculee a Epivot\r\n\t#\t\tPrefactor = Prefactor*pow(1000./100., float(Gamma)) #We do the calculation with (E/1000.)^Gamma\n\t\t\tvariabilite=float(data[8])\n\n#\t\t\tprint variabilite\n\n\n\n\r\n\t\t\tcut = float(data[7]) # Cut est la variable qui nous permettra de savoir si il faut utiliser un cut off (1) ou une loi de puissance normale (2)\r\n\t\texcept:\r\n\t\t\ttry:\r\n\t\t\t\tcut = float(data[5])\r\n\t\t\texcept:\r\n\t\t\t\tprint \" Wrong size of list \"\r\n\t\t\t\tsys.exit()\r\n \t#Si on considere un ccut off exponentiel pour la source :\r\n\t\tif cut == 1:\n\t\t\t#ecriture du nom de la source consideree\r\n\t\t\tresult_line=\" <source \"\r\n\t\t\tresult_line += \"name=\\\"\"+name+\"\\\"\"\r\n\t\t\tresult_line += \" type=\\\"PointSource\\\">\\n\"\r\n\t\t\tspectrum_type = \"PLSuperExpCutoff\"\n\t\t\t#Utilisation de la modelisation PLSuperExpCutoff car plus simple et plus intuitive pour nous et pour la modelisation des pulsars si il faut en modeliser\n\r\n\t\t\t#definition des parametres spectraux a prendre en comtpe et de la chaine de caractere a integrer\r\n\n\n\n\t\t\tif variabilite==0.0 or variabilite==2.0:\n\t\t\t\tspectrum_lines = \" <parameter free=\\\"0\\\" max=\\\"10000000.0\\\" min=\\\"0.0000001\\\"\"\n\n\t\t\t\t#d'ou vient ce 1e-12\r\n\t\t\t\tIntegral = float(Prefactor)*1.0e10\r\n\t\t\t\tscale = 1.0e-10\n\r\n\t\t\t\tspectrum_lines += \" name=\\\"Prefactor\\\" scale=\\\"\"+str(scale)+\"\\\" value=\\\"\"\r\n\t\t\t\tspectrum_lines += str(Integral)+\"\\\" />\\n\"\r\n \r\n\t\t\t\tspectrum_lines += \" <parameter free=\\\"1\\\" max=\\\"5.0\\\" min=\\\"0.\\\"\"\r\n\t\t\t\tspectrum_lines += \" name=\\\"Index1\\\" scale=\\\"-1.0\\\" value=\\\"\"\r\n\t\t\t\tspectrum_lines += str(Gamma)+\"\\\"/>\\n\"\r\n \r\n\t\t\t\tspectrum_lines += \" <parameter free=\\\"0\\\" max=\\\"20000.0\\\" min=\\\"1.0\\\"\"\r\n\t\t\t\tspectrum_lines += \" name=\\\"Scale\\\" scale=\\\"1.0\\\" value=\\\"\"+str(Energy)+\"\\\"/>\\n\"\r\n \r\n\t\t\t\tspectrum_lines += \" <parameter free=\\\"1\\\" max=\\\"100.0\\\" min=\\\"0.001\\\"\"\n\t\t\t\tspectrum_lines += \" name=\\\"Cutoff\\\" scale=\\\"1000.0\\\" value=\\\"30.0\\\"/>\\n\"\n\r\n\t\t\t\tspectrum_lines += \" <parameter free=\\\"0\\\" max=\\\"5.0\\\" min=\\\"0.0\\\"\"\r\n\t\t\t\tspectrum_lines += \" name=\\\"Index2\\\" scale=\\\"1.0\\\" value=\\\"1.0\\\"/>\\n\"\n\t\t\telif variabilite==1.0 :\n\t\t\t\tspectrum_lines = \" <parameter free=\\\"1\\\" max=\\\"10000000.0\\\" min=\\\"0.0\\\"\"\n\n\t\t\t\t#d'ou vient ce 1e-12\r\n\t\t\t\tIntegral = float(Prefactor)*1.0e10\r\n\t\t\t\tscale = 1.0e-10\n\n\t\t\t\tspectrum_lines += \" name=\\\"Prefactor\\\" scale=\\\"\"+str(scale)+\"\\\" value=\\\"\"\r\n\t\t\t\tspectrum_lines += str(Integral)+\"\\\" />\\n\"\r\n \r\n\t\t\t\tspectrum_lines += \" <parameter free=\\\"1\\\" max=\\\"5.0\\\" min=\\\"0.\\\"\"\r\n\t\t\t\tspectrum_lines += \" name=\\\"Index1\\\" scale=\\\"-1.0\\\" value=\\\"\"\r\n\t\t\t\tspectrum_lines += str(Gamma)+\"\\\"/>\\n\"\r\n \r\n\t\t\t\tspectrum_lines += \" <parameter free=\\\"0\\\" max=\\\"20000.0\\\" min=\\\"1.0\\\"\"\r\n\t\t\t\tspectrum_lines += \" name=\\\"Scale\\\" scale=\\\"1.0\\\" value=\\\"\"+str(Energy)+\"\\\"/>\\n\"\r\n \r\n\t\t\t\tspectrum_lines += \" <parameter free=\\\"1\\\" max=\\\"100.0\\\" min=\\\"0.0001\\\"\"\r\t\t\t\tspectrum_lines += \" name=\\\"Cutoff\\\" scale=\\\"1000.0\\\" value=\\\"30.0\\\"/>\\n\"\r\n \r\n\t\t\t\tspectrum_lines += \" <parameter free=\\\"0\\\" max=\\\"5.0\\\" min=\\\"0.0\\\"\"\r\n\t\t\t\tspectrum_lines += \" name=\\\"Index2\\\" scale=\\\"1.0\\\" value=\\\"1.0\\\"/>\\n\"\n\n\r\n \r\n\n# <spectrum type=\"PLSuperExpCutoff\">\n# <parameter free=\"1\" max=\"100000\" min=\"0\" name=\"Prefactor\" scale=\"1e-10\" value=\"Prefactor*1e-10\"/>\n# <parameter free=\"1\" max=\"0\" min=\"5\" name=\"Index1\" scale=\"-1\" value=\"valeur du catalogue\"/>\n# <parameter free=\"0\" max=\"20000\" min=\"1.0\" name=\"Scale\" scale=\"1\" value=\"Epivot\"/>\n# <parameter free=\"1\" max=\"300000\" min=\"100\" name=\"Cutoff\" scale=\"1\" value=\"3000\"/>\n# <parameter free=\"0\" max=\"5\" min=\"0\" name=\"Index2\" scale=\"1\" value=\"1.5\"/>\n# </spectrum>\n\n\r\n\t\telse:\n\t\t#Sinon (si on considere une loi de puissance simple)\n\t\t#definition de la chaine de caractere comportant le nom de la source\r\n\t\t\tresult_line=\" <source \"\r\n\t\t\tresult_line += \"name=\\\"\"+name+\"\\\"\"\n\t\t\tif mysource == 0:\r\t\t\t\tresult_line += \" type=\\\"PointSource\\\">\\n\"\n\t\t\telse:\n\t\t\t\tresult_line += \" type=\\\"PointSource\\\">\\n\"\t\t\t\t\n\n\t\t\t#definition de la chaine de caractere correspondant a la forme de fit que l'on souhaite utiliser (Loi de puissance)\r\n\t\t\tspectrum_type = \"PowerLaw2\"\r\n\r\n\t\t\tif mysource == 0 and variabilite!=1.0:\n\t\t\t#si ce n'est pas la source que l'on etudie on fige le parametre Integrale\n\t\t\t\tspectrum_lines = \" <parameter free=\\\"0\\\" max=\\\"1000000.0\\\" min=\\\"0.0\\\"\"\r\n\t\t\telse:\n\t\t\t#sinon on le libere\r\n\t\t\t\tspectrum_lines = \" <parameter free=\\\"1\\\" max=\\\"1000000.0\\\" min=\\\"0.0\\\"\"\n\n\n\n\n\n\t\t\t#Toujours ce facteur....\r\n\t\t\tIntegral = float(Integral)*1e10\r\n\t\t\tscale = 1e-10\n\n\n\t\n\r\n\t\t\tspectrum_lines += \" name=\\\"Integral\\\" scale=\\\"\"+str(scale)+\"\\\" value=\\\"\"\r\n\t\t\tspectrum_lines += str(Integral)+\"\\\" />\\n\"\n\r\n\t\t\tif mysource == 0 and variabilite!=1.0:\n\t\t\t\t#si ce n'est pas la source que l'on etudie on fige le parametre gamma\r\n\t\t \t\tspectrum_lines += \" <parameter free=\\\"0\\\" max=\\\"5.0\\\" min=\\\"0.\\\"\"\r\n\t\t\telse:\n\t\t\t\t#si c'est pas la source que l'on etudie on le laisse libre\r\n\t\t \t\tspectrum_lines += \" <parameter free=\\\"1\\\" max=\\\"5.0\\\" min=\\\"0.\\\"\"\n\n\t\t\t#fin de la chaine de parametres sur le modele spectral\r\n\t\t\tspectrum_lines += \" name=\\\"Index\\\" scale=\\\"-1.0\\\" value=\\\"\"\r\n\t\t\tspectrum_lines += str(Gamma)+\"\\\"/>\\n\"\r\n \r\n\t\t\tif mysource == 0 and variabilite!=1.0:\n\t \n\t\t\t spectrum_lines += \" <parameter free=\\\"0\\\" max=\\\"200000.0\\\" min=\\\"20.0\\\"\"\r\n\t\t\t spectrum_lines += \" name=\\\"LowerLimit\\\" scale=\\\"1.0\\\" value=\\\"1000.0\\\"/>\\n\"\r\n \r\n\t\t\t spectrum_lines += \" <parameter free=\\\"0\\\" max=\\\"1000000.0\\\" min=\\\"20.0\\\"\"\r\n\t\t\t spectrum_lines += \" name=\\\"UpperLimit\\\" scale=\\\"1.0\\\" value=\\\"100000.0\\\"/>\\n\"\n\t\t\telse:\n\t\t\t\tspectrum_lines += \" <parameter free=\\\"0\\\" max=\\\"200000.0\\\" min=\\\"20.0\\\"\"\n\t\t\t\tspectrum_lines += \" name=\\\"LowerLimit\\\" scale=\\\"1.0\\\" value=\\\"100\\\"/>\\n\"\n\n\t\t\t\tspectrum_lines += \" <parameter free=\\\"0\\\" max=\\\"100000.0\\\" Min =\\\"20.0\\\"\"\n\t\t\t\tspectrum_lines += \" name=\\\"UpperLimit\\\" scale=\\\"1.0\\\" value=\\\"100000.0\\\"/>\\n\"\n\n \t\t#ajout du modele spectral a la liste de parametres \r\n\t\tresult_line += \" <spectrum type=\\\"\"+spectrum_type+\"\\\">\\n\"\r\t\tresult_line += spectrum_lines\r\n\t\tresult_line += \" </spectrum>\\n\"\n\n\t\t\n\n\t\tif mysource==0 and variabilite!=1.0:\n \t\t\t#ajout du modele spatial a la liste de parametres \r\n\t\t\tresult_line += \" <spatialModel type=\\\"SkyDirFunction\\\">\\n\"\r\n\t\t\tresult_line += \" <parameter free=\\\"0\\\" max=\\\"360\\\" min=\\\"-360\\\"\"\r\n\t\t\tresult_line += \" name=\\\"RA\\\" scale=\\\"1\\\" value=\\\"\"+RA+\"\\\"/>\\n\"\r\n\t\t\tresult_line += \" <parameter free=\\\"0\\\" max=\\\"90\\\" min=\\\"-90\\\"\"\r\n\t\t\tresult_line += \" name=\\\"DEC\\\" scale=\\\"1\\\" value=\\\"\"+DEC+\"\\\"/>\\n\"\r\n\t\t\tresult_line += \" </spatialModel>\\n\"\n\t\telif mysource==0 and variabilite==1.0:\n \t\t\t#ajout du modele spatial a la liste de parametres \r\n\t\t\tresult_line += \" <spatialModel type=\\\"SkyDirFunction\\\">\\n\"\r\n\t\t\tresult_line += \" <parameter free=\\\"1\\\" max=\\\"360\\\" min=\\\"-360\\\"\"\r\n\t\t\tresult_line += \" name=\\\"RA\\\" scale=\\\"1\\\" value=\\\"\"+RA+\"\\\"/>\\n\"\r\n\t\t\tresult_line += \" <parameter free=\\\"1\\\" max=\\\"90\\\" min=\\\"-90\\\"\"\r\n\t\t\tresult_line += \" name=\\\"DEC\\\" scale=\\\"1\\\" value=\\\"\"+DEC+\"\\\"/>\\n\"\r\n\t\t\tresult_line += \" </spatialModel>\\n\"\n\t\telse:\n #ajout du modele spatial a la liste de parametres \n\t\t\tresult_line += \" <spatialModel type=\\\"SkyDirFunction\\\">\\n\"\n\t\t\tresult_line += \" <parameter free=\\\"1\\\" max=\\\"360\\\" min=\\\"-360\\\"\"\n\t\t\tresult_line += \" name=\\\"RA\\\" scale=\\\"1\\\" value=\\\"\"+RA+\"\\\"/>\\n\"\n\t\t\tresult_line += \" <parameter free=\\\"1\\\" max=\\\"90\\\" min=\\\"-90\\\"\"\n\t\t\tresult_line += \" name=\\\"DEC\\\" scale=\\\"1\\\" value=\\\"\"+DEC+\"\\\"/>\\n\"\n\t\t\tresult_line += \" </spatialModel>\\n\"\n\t\t\t\n\t\tresult_line += \" </source>\\n\"\r\n\t\tfresult.write(result_line+\"\\n\")\r\n #Ajout du fond diffus galactique\n\tresult_line=\" <source \"\r\n\tresult_line += \"name=\\\"gal_v02\\\"\"\r\n\tresult_line += \" type=\\\"DiffuseSource\\\">\\n\"\r\n\tspectrum_type = \"ConstantValue\"\r\n\r\n\tspectrum_lines = \" <parameter free=\\\"1\\\" max=\\\"10.0\\\" min=\\\"0\\\"\"\r\n\tspectrum_lines += \" name=\\\"Value\\\" scale=\\\"1.0\\\" value=\\\"\"+str(Frac)+\"\\\" />\\n\"\r\n\r\n\tresult_line += \" <spectrum type=\\\"\"+spectrum_type+\"\\\">\\n\"\r\n\tresult_line += spectrum_lines\r\n\tresult_line += \" </spectrum>\\n\"\r\n\r\n\tresult_line += \" <spatialModel file=\\\"/nfs/farm/g/glast/u31/marianne/VelaX/July09_Pointed/gll_iem_v02.fit\\\" type=\\\"MapCubeFunction\\\">\\n\"\r\n\tresult_line += \" <parameter free=\\\"0\\\" max=\\\"1000.0\\\" min=\\\"0.0\\\"\"\r\n\tresult_line += \" name=\\\"Normalization\\\" scale=\\\"1\\\" value=\\\"1.0\\\"/>\\n\"\r\n\tresult_line += \" </spatialModel>\\n\"\r\n\tresult_line += \" </source>\\n\"\r\n\tfresult.write(result_line+\"\\n\")\r\n\r\n \t#Ajout du fond diffus extragalactique\r\n\tresult_line=\" <source \"\r\n\tresult_line += \"name=\\\"eg_v02\\\"\"\r\n\tresult_line += \" type=\\\"DiffuseSource\\\">\\n\"\r\n\tspectrum_type = \"FileFunction\"\r\n\r\tspectrum_lines = \" <parameter free=\\\"1\\\" max=\\\"10.0\\\" min=\\\"0\\\"\"\r\n\tspectrum_lines += \" name=\\\"Normalization\\\" scale=\\\"1.0\\\" value=\\\"\"+str(Frac)+\"\\\" />\\n\"\r\n\r\n\tresult_line += \" <spectrum file=\\\"/nfs/farm/g/glast/u31/marianne/VelaX/July09_Pointed/isotropic_iem_v02.txt\\\" type=\\\"\"+spectrum_type+\"\\\">\\n\"\r\n\tresult_line += spectrum_lines\r\n\tresult_line += \" </spectrum>\\n\"\r\n \r\n\tresult_line += \" <spatialModel type=\\\"ConstantValue\\\">\\n\"\r\n\tresult_line += \" <parameter free=\\\"0\\\" max=\\\"100.0\\\" min=\\\"0.0\\\"\"\r\n\tresult_line += \" name=\\\"Value\\\" scale=\\\"1\\\" value=\\\"1.0\\\"/>\\n\"\r\n\tresult_line += \" </spatialModel>\\n\"\r\n\tresult_line += \" </source>\\n\"\r\n\tfresult.write(result_line+\"\\n\")\r\n\n \t#Fermeture des fichiers \r\n\tf.close() \r\n\tfresult.write(\"\\n</source_library>\\n\")\r\n\tfresult.close()\r\n\treturn", "def XMLWrite(one, two, three, four, five, six, seven, eight):\n filePath = \"/mnt/RAM/kanban.xml\"\n xmlFile = open(filePath, 'w')\n\n xmlFile.write('<kanbanShelf>\\n')\n xmlFile.write(' <one>%s</one>\\n' % one)\n xmlFile.write(' <two>%s</two>\\n' % two)\n xmlFile.write(' <three>%s</three>\\n' % three)\n xmlFile.write(' <four>%s</four>\\n' % four)\n xmlFile.write(' <five>%s</five>\\n' % five)\n xmlFile.write(' <six>%s</six>\\n' % six)\n xmlFile.write(' <seven>%s</seven>\\n' % seven)\n xmlFile.write(' <eight>%s</eight>\\n' % eight)\n xmlFile.write('</kanbanShelf>')", "def save(self, filename=None):\n f = filename if filename else self.path\n etree.register_namespace('', TEI)\n etree.register_namespace('mith', MITH)\n self.doc.write(f, xml_declaration=True, encoding='utf-8', method='xml')", "def saveToXml(self) -> org.jdom.Element:\n ...", "def write_file(file,dir_name):\n opened_file = open(dir_name + '/%s'%file,'w')\n opened_file.write('<?xml version=\"1.0\"?>\\n')\n return opened_file", "def create_xml_regression(lfiles, lsbj, foxml):\n\n impl = xml.dom.minidom.getDOMImplementation()\n doc = impl.createDocument(None, \"some_tag\", None)\n top_element = doc.documentElement\n\n e = doc.createElement('subject')\n e.setAttribute('id', 'case')\n\n for i, fn in enumerate(lfiles):\n v = doc.createElement('visit')\n v.setAttribute('id', \"subj{}\".format(i))\n\n f = doc.createElement('filename')\n f.setAttribute('object_id', \"face\")\n t = doc.createTextNode(fn)\n f.appendChild(t)\n\n a = doc.createElement('age')\n x = doc.createTextNode(str(lsbj[i][\"age\"]))\n a.appendChild(x)\n\n\n v.appendChild(f)\n v.appendChild(a)\n e.appendChild(v)\n\n top_element.appendChild(e)\n\n with open(foxml, \"w\") as fo:\n fo.write(doc.toprettyxml())", "def save_xml(self, filename):\n if \".xml\" not in filename:\n filename = filename + \".xml\"\n\n shutil.copyfile(self.env.model_file, filename)", "def CreateXMLFromDB(tableName):\r\n fObj = open('htdocs/gl/UI.xml', 'w')\r\n fStr = \"\"\"\r\n <tables>\r\n <table>\r\n <col>data</col>\r\n </table>\r\n </tables>\r\n \"\"\" \r\n fObj.write(fStr)\r\n fObj.close()", "def to_file(self, file_path, smirnoff_data):\n xml_string = self.to_string(smirnoff_data)\n with open(file_path, \"w\") as of:\n of.write(xml_string)", "def XMLWrite(self, one, two, three, four, five, six, seven, eight):\n filePath = \"/mnt/RAM/kanban.xml\"\n xmlFile = open(filePath, 'w')\n \n xmlFile.write('<kanban>\\n')\n xmlFile.write(' <n1>%s</n1>\\n' % one)\n xmlFile.write(' <n2>%s</n2>\\n' % two)\n xmlFile.write(' <n3>%s</n3>\\n' % three)\n xmlFile.write(' <n4>%s</n4>\\n' % four)\n xmlFile.write(' <n5>%s</n5>\\n' % five)\n xmlFile.write(' <n6>%s</n6>\\n' % six)\n xmlFile.write(' <n7>%s</n7>\\n' % seven)\n xmlFile.write(' <n8>%s</n8>\\n' % eight)\n xmlFile.write('</kanban>')", "def create_file(date, title, text, n):\r\n \"\"\"with date as file name and text as content\"\"\"\r\n filename = \"%s_%s.txt\" % (date, n)\r\n with io.open(filename, \"w+\", encoding=\"UTF8\") as newfile:\r\n text = text.replace(\" \", \"\") #remove all spaces\r\n sentences= re.sub(\",|。\", \"\\n\", text) #one sentence per line\r\n newfile.write(title+\"\\n\")\r\n newfile.write(date+\"\\n\")\r\n newfile.write(sentences)\r\n print(filename)", "def export_to_file(self, filename):\n if len(filename.split(\".\")) == 1:\n filename += \".xml\"\n xmlstring = self._dommodel.toprettyxml(\" \", \"\\n\")\n with open(filename, \"w\") as f:\n f.write(xmlstring)", "def writexml(file):\n OUTFILE=open(file,\"w\")\n doc = xml.dom.minidom.Document()\n\n # Create the <dec_reg_list> base element\n decl_reg_list = doc.createElement(\"decl_reg_list\")\n doc.appendChild(decl_reg_list)\n\n regname_old=\"\"\n rows.pop(0)\n for row in rows:\n (regdesc,regname,offset,default,regtype,expose_reg,depth,incsz,bitdesc,bitname,loc,bittype)= row\n if regname != regname_old:\n # Create the register element\n register = doc.createElement(\"register\")\n register.setAttribute(\"name\", regname)\n register.setAttribute(\"offset\", offset)\n if default != \"\" : register.setAttribute(\"default\", default)\n register.setAttribute(\"type\", regtype)\n if expose_reg == \"1\": register.setAttribute(\"usr\", expose_reg)\n if depth != \"\": register.setAttribute(\"size\", depth)\n if incsz != \"\": register.setAttribute(\"incsz\", incsz)\n text = doc.createTextNode(regdesc)\n register.appendChild(text)\n decl_reg_list.appendChild(register)\n \n # Create the field element\n if bitname != \"\":\n field = doc.createElement(\"field\")\n field.setAttribute(\"name\", bitname)\n if loc !=\"\": field.setAttribute(\"loc\", addcolon(loc))\n if bittype != \"\": field.setAttribute(\"type\", bittype)\n if bitdesc != \"\":\n text = doc.createTextNode(bitdesc)\n field.appendChild(text)\n register.appendChild(field)\n regname_old = regname\n\n\n # Print our newly created XML\n #print doc.toprettyxml(indent=\" \")\n #OUTFILE.write(doc.saveXML(decl_reg_list))\n OUTFILE.write(doc.toprettyxml(indent=\" \"))\n OUTFILE.close()", "def save_annotated_text_to_xml(self):\n #initialise file to write the output\n outfile = open(('annotated_text_' + self.lang + '_' + \n self.method + '.xml'), 'w')\n #initialise xml\n annotated_doc = etree.Element('Annotated_document')\n main_text = ''\n #counter for the sentences\n counter_sentence = 0\n #counter for the paragraphs\n counter_paragraph = 0\n #open txt file\n with open(self.lang + '.txt') as file:\n for paragraph in file:\n paragraph_string = ''\n sentences = tokenize.sent_tokenize(paragraph)\n for sentence in sentences:\n #build lists with the ends of the tokens with NE and the NEs\n end_list = [0]\n end_list += [i[2] for i in \n self.named_entity_list_total[counter_sentence]]\n ne_list = [i[3] for i in \n self.named_entity_list_total[counter_sentence]]\n counter_sentence += 1\n #build new string\n new_string = ''\n for i in range(len(end_list)-1):\n new_string += (sentence[end_list[i]:end_list[i+1]]+\n '<annotation class=\"'+ne_list[i]+'\"/>')\n new_string += sentence[end_list[-1]:len(sentence)]\n paragraph_string += new_string+'\\n'\n #print title, author, abstract and main text differently to xml\n if counter_paragraph == 0:\n title_text = etree.SubElement(annotated_doc, \"Title\")\n #add text to the node\n init_text = \"<text>{0}</text>\".format(paragraph_string[6:])\n fin_text = etree.fromstring(init_text)\n title_text.append(fin_text)\n elif counter_paragraph == 1:\n author_text = etree.SubElement(annotated_doc, \"Author\")\n #add text to the node\n init_text = \"<text>{0}</text>\".format(paragraph_string[7:])\n fin_text = etree.fromstring(init_text)\n author_text.append(fin_text)\n elif counter_paragraph == 2:\n abstract_text = etree.SubElement(annotated_doc, \"Abstract\")\n #add text to the node\n init_text = \"<text>{0}</text>\".format(paragraph_string[9:])\n fin_text = etree.fromstring(init_text)\n abstract_text.append(fin_text)\n else: \n main_text += paragraph_string\n counter_paragraph += 1\n main_text_xml = etree.SubElement(annotated_doc, \"Main_text\")\n #add text to the node\n init_text = \"<text>{0}</text>\".format(main_text)\n fin_text = etree.fromstring(init_text)\n main_text_xml.append(fin_text)\n #convert and write to outfile\n xml_bytes = etree.tostring(annotated_doc, encoding='UTF-8', \n pretty_print=True, xml_declaration=True)\n xml_str = xml_bytes.decode(\"utf-8\")\n outfile.write(xml_str)\n outfile.close()\n return" ]
[ "0.6573223", "0.6557936", "0.6554724", "0.6526124", "0.64474225", "0.6354408", "0.6259751", "0.6257362", "0.62307924", "0.61982656", "0.61907524", "0.618365", "0.61595833", "0.61466527", "0.61005646", "0.6085232", "0.6055169", "0.6042117", "0.6034879", "0.59065783", "0.5905436", "0.58871096", "0.5882685", "0.5867433", "0.5852109", "0.5842658", "0.5837437", "0.5824837", "0.5818712", "0.5798339" ]
0.7440519
0
Solves the given system of |equations|. |equations| should be a list of lists of terms summing to 0. Each term should be a tuple of the form (coeff, var), where coeff is a number and var is a variable (string). Constants can be represented by (const, None). Returns a dictionary mapping the variables in the equations to their their respective values, or raises an Exception if the system cannot be solved.
def solve_equations(equations): # variables in the system of equations var_list = list(reduce(set.union, (set(var for coeff, var in eqn if var) for eqn in equations))) # number of variables num_vars = len(var_list) # the index of each variable in |var_list| var_index = dict(zip(var_list, range(num_vars))) # matrices to solve system (Ax = b) A, b = [], [] # populate matrices for equation in equations: coeffs, const = [0] * num_vars, 0 for coeff, var in equation: if var: coeffs[var_index[var]] += coeff else: const -= coeff A.append(coeffs) b.append([const]) try: # solve system x = solve(matrix(A), matrix(b)) return dict(zip(var_list, [x[i, 0] for i in xrange(num_vars)])) except: raise Exception('Could not solve system of equations')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def solve_solvableEquations(equations: List):\n solvedEquations = []\n updatedUnknowns = set()\n\n for equation in equations:\n equation.update()\n if equation.isSolvable():\n solution = equation.solve()\n unknownAddress = list(solution.keys())[0]\n setattr_fromAddress(object=unknownAddress[0], attributeName=unknownAddress[1], value=solution[unknownAddress])\n updatedUnknowns.add(unknownAddress)\n solvedEquations.append(equation)\n\n for equation in solvedEquations:\n equations.remove(equation)\n\n return updatedUnknowns", "def calcEquation(self, equations: List[List[str]], values: List[float], queries: List[List[str]]) -> List[float]:\n graph = collections.defaultdict(list)\n n = len(values)\n for i in range(n):\n dd, d = equations[i]\n graph[dd].append((d, values[i]))\n graph[d].append((dd, 1/values[i]))\n \n res = []\n for dd, d in queries:\n res.append(self.dfs(graph, dd, d, set()))\n \n return res", "def solve_combination_ofEquations(equations: List, number_ofEquations: int) -> Set:\n updatedUnknowns = set()\n\n for equationCombination in combinations(equations, number_ofEquations):\n\n # If any of the equations got solved in a previous iteration and got removed from _equations, skip this combination\n # Combinations are generated beforehand at the beginning of the main for loop.\n if any(equation not in equations for equation in equationCombination):\n continue\n\n if (system := System_ofLinearEquations(list(equationCombination))).isSolvable():\n solution = system.solve()\n unknownAddresses = list(solution.keys())\n for unknownAddress in unknownAddresses:\n setattr_fromAddress(object=unknownAddress[0], attributeName=unknownAddress[1], value=solution[unknownAddress])\n updatedUnknowns.add(unknownAddress)\n\n # If system is solved, all equations in the combination is solved. Remove them from equations pool.\n for equation in equationCombination:\n equations.remove(equation)\n\n return updatedUnknowns", "def solve(\n self,\n x0,\n params=(),\n internal_x0=None,\n solver=None,\n conditional_maxiter=20,\n initial_conditions=None,\n **kwargs\n ):\n if initial_conditions is not None:\n conds = initial_conditions\n else:\n conds = self.get_conds(x0, params, initial_conditions)\n idx, nfev, njev = 0, 0, 0\n while idx < conditional_maxiter:\n neqsys = self.neqsys_factory(conds)\n x0, info = neqsys.solve(x0, params, internal_x0, solver, **kwargs)\n if idx == 0:\n internal_x0 = None\n nfev += info[\"nfev\"]\n njev += info.get(\"njev\", 0)\n new_conds = self.get_conds(x0, params, conds)\n if new_conds == conds:\n break\n else:\n conds = new_conds\n idx += 1\n if idx == conditional_maxiter:\n raise Exception(\"Solving failed, conditional_maxiter reached\")\n self.internal_x = info[\"x\"]\n self.internal_params = neqsys.internal_params\n result = {\n \"x\": info[\"x\"],\n \"success\": info[\"success\"],\n \"conditions\": conds,\n \"nfev\": nfev,\n \"njev\": njev,\n }\n if \"fun\" in info:\n result[\"fun\"] = info[\"fun\"]\n return x0, result", "def equations(self):\n k = 0\n ######################################################################\n # equations for fluid balance\n self.residual[k:k + self.num_nw_fluids * 2] = self.fluid_func()\n k += self.num_nw_fluids * 2\n\n ######################################################################\n # equations for mass flow balance\n self.residual[k:k + 2] = self.mass_flow_func()\n k += 2\n\n ######################################################################\n # equations for energy balance\n self.residual[k] = self.energy_func()\n k += 1\n\n ######################################################################\n # equations for specified heat transfer\n if self.Q.is_set:\n self.residual[k] = (\n self.inl[0].m.val_SI * (\n self.outl[0].h.val_SI - self.inl[0].h.val_SI) - self.Q.val)\n k += 1\n\n ######################################################################\n # equations for specified heat transfer coefficient\n if self.kA.is_set:\n if np.absolute(self.residual[k]) > err ** 2 or self.it % 4 == 0:\n self.residual[k] = self.kA_func()\n k += 1\n\n ######################################################################\n # equations for specified heat transfer coefficient characteristic\n if self.kA_char.is_set:\n if np.absolute(self.residual[k]) > err ** 2 or self.it % 4 == 0:\n self.residual[k] = self.kA_char_func()\n k += 1\n\n ######################################################################\n # equations for specified upper terminal temperature difference\n if self.ttd_u.is_set:\n self.residual[k] = self.ttd_u_func()\n k += 1\n\n ######################################################################\n # equations for specified lower terminal temperature difference\n if self.ttd_l.is_set:\n self.residual[k] = self.ttd_l_func()\n k += 1\n\n ######################################################################\n # equations for specified pressure ratio at hot side\n if self.pr1.is_set:\n self.residual[k] = (\n self.pr1.val * self.inl[0].p.val_SI - self.outl[0].p.val_SI)\n k += 1\n\n ######################################################################\n # equations for specified pressure ratio at cold side\n if self.pr2.is_set:\n self.residual[k] = (\n self.pr2.val * self.inl[1].p.val_SI - self.outl[1].p.val_SI)\n k += 1\n\n ######################################################################\n # equations for specified zeta at hot side\n if self.zeta1.is_set:\n if np.absolute(self.residual[k]) > err ** 2 or self.it % 4 == 0:\n self.residual[k] = self.zeta_func(\n zeta='zeta1', inconn=0, outconn=0)\n k += 1\n\n ######################################################################\n # equations for specified zeta at cold side\n if self.zeta2.is_set:\n if np.absolute(self.residual[k]) > err ** 2 or self.it % 4 == 0:\n self.residual[k] = self.zeta_func(\n zeta='zeta2', inconn=1, outconn=1)\n k += 1\n\n ######################################################################\n # additional equations\n self.additional_equations(k)", "def solve(self):\n\n if self.degree > 2:\n return \"The polynomial degree is strictly greater than 2, I can't solve.\"\n \n elif self.degree == 0:\n \"\"\"a * X^0 = 0\"\"\" \n a = self.all_terms[0].coefficient\n if a != 0:\n return \"The eqution has no solution\"\n else:\n return \"Every real number is a solution\"\n\n elif self.degree == 1:\n \"\"\"a * X^1 + b * X^0 = 0\"\"\"\n a = self.all_terms[1].coefficient\n b = self.all_terms[0].coefficient\n return formula.linear(a, b)\n\n elif self.degree == 2:\n \"\"\"a * X^2 + b * X^1 + c * X^0 = 0\"\"\"\n a = self.all_terms[2].coefficient\n b = self.all_terms[1].coefficient\n c = self.all_terms[0].coefficient\n discriminant = (b ** 2) - (4 * a * c)\n two_a = 2 * a\n if discriminant == 0:\n return formula.linear(two_a, b)\n else:\n if discriminant > 0:\n return formula.quadratic(two_a, b, discriminant)\n else:\n return formula.quadratic(two_a, b, discriminant, simple=False)", "def equations(self):\n k = 0\n ######################################################################\n # equations for fluid balance\n self.residual[k:k + self.num_nw_fluids] = self.fluid_func()\n k += self.num_nw_fluids\n\n ######################################################################\n # equations for mass flow balance\n self.residual[k] = self.mass_flow_func()\n k += 1\n\n ######################################################################\n # equations for specified heta transfer\n if self.Q.is_set:\n self.residual[k] = self.inl[0].m.val_SI * (\n self.outl[0].h.val_SI - self.inl[0].h.val_SI) - self.Q.val\n k += 1\n\n ######################################################################\n # equations for specified pressure ratio\n if self.pr.is_set:\n self.residual[k] = (\n self.inl[0].p.val_SI * self.pr.val - self.outl[0].p.val_SI)\n k += 1\n\n ######################################################################\n # equations for specified zeta\n if self.zeta.is_set:\n if np.absolute(self.residual[k]) > err ** 2 or self.it % 4 == 0:\n self.residual[k] = self.zeta_func(zeta='zeta')\n k += 1\n\n ######################################################################\n # equation for specified hydro-group paremeters\n if self.hydro_group.is_set:\n if np.absolute(self.residual[k]) > err ** 2 or self.it % 4 == 0:\n # hazen williams equation\n if self.hydro_group.method == 'HW':\n func = self.hw_func\n # darcy friction factor\n else:\n func = self.darcy_func\n self.residual[k] = func()\n k += 1\n\n ######################################################################\n # additional equations\n self.additional_equations(k)", "def solve(self, **kwargs):\n return self.system.solve(**kwargs)", "def get_usage_count(equations):\n usage_count = {}\n for eq in equations:\n usage_count.setdefault(eq.lhs, 0)\n for var in eq.rhs.atoms(Variable):\n usage_count.setdefault(var, 0)\n usage_count[var] += 1\n return usage_count", "def solve_model():\n from scipy.integrate import ode\n # Initialise constants and state variables\n (init_states, constants) = initConsts()\n\n # Set timespan to solve over\n voi = linspace(0, 100, 5000)\n\n # Construct ODE object to solve\n r = ode(computeRates)\n r.set_integrator('vode', method='bdf', atol=1e-06, rtol=1e-06, max_step=1)\n r.set_initial_value(init_states, voi[0])\n r.set_f_params(constants)\n\n # Solve model\n states = array([[0.0] * len(voi)] * sizeStates)\n states[:,0] = init_states\n for (i,t) in enumerate(voi[1:]):\n if r.successful():\n r.integrate(t)\n states[:,i+1] = r.y\n else:\n break\n\n # Compute algebraic variables\n algebraic = computeAlgebraic(constants, states, voi)\n return (voi, states, algebraic)", "def solve_model(init_amounts, times, neighbourhood, params):\n # init_amounts should be an array of length 3*no_cultures.\n growth_func = make_cns_model(params, neighbourhood)\n sol = odeint(growth_func, init_amounts, times)\n return np.maximum(0, sol)", "def run(self, diffusion_coefficients):\n mat = self.buildmatrix(diffusion_coefficients)\n\n rhs = np.zeros(self.size)\n rhs[0] = -(diffusion_coefficients[0] + diffusion_coefficients[1]) * self.phi0\n\n if self.verbose > 0:\n print(\"System of equations:\")\n for i in range(mat.shape[0]):\n row = [\"{0:3g}*x{1}\".format(mat[i, j], j + 1) for j in range(mat.shape[1])]\n if self.verbose > 0:\n print(\"[{0}] = [{1:3g}]\".format(\" + \".join(row), rhs[i]))\n\n if parameters.solver == 'jacobi':\n x = self.jacobi_solver(mat, rhs)\n elif parameters.solver == 'gauss-seidel':\n x = self.gauss_seidel_solver(mat, rhs)\n elif parameters.solver == 'tridiag':\n x = self.tridiag_solver(mat, rhs)\n else:\n sys.exit('Unknown solver')\n\n if self.verbose > 1:\n print(\"Solution: {0}\".format(x))\n error = np.dot(mat, x) - rhs\n if self.verbose > 1:\n print(\"Error: {0}\".format(error))\n x = np.insert(x, 0, self.phi0)\n x = np.append(x, 0)\n return x", "def __init__(self, equation_dict):\n self.equation = equation_dict['equation']\n self.variables = equation_dict['variables']\n self.dict = equation_dict\n self.x = list(self.variables)[-1]['variable'] # The variable to solve for", "def symbolic_solve(expr, x, y, xvals, varsol, bound_expr):\n\n # return function from expression\n fun = lambdify((x, y), expr, 'numpy')\n max_fun = lambdify((x, y), bound_expr, 'numpy')\n\n # solutions over varsol\n match = fun(np.expand_dims(xvals, axis=1), varsol)\n\n # closest match to ~ 0. (i.e. supply ~ demand)\n idx = bn.nanargmin(abs(match), axis=1)\n\n # solution with approximate minimizing\n sol = np.asarray([varsol[e, idx[e]] for e in range(len(xvals))])\n\n # deal with mismatches by only allowing up to 5% variation around An\n up = abs(max_fun(xvals, sol))\n mismatch = bn.nanmin(abs(match), axis=1) <= 0.05 * up\n mismatch = mismatch.astype(int)\n\n if all(mismatch) == 0: # no precise enough match\n mismatch[1] = 1 # pick 1st valid value\n\n sol = np.ma.masked_where(idx == 0, sol)\n sol = np.ma.masked_where(mismatch == 0, sol)\n\n return sol", "def infer_constants(formula, variables):\n if isinstance(variables, dict):\n for var in variables:\n other_vars = dict(variables)\n other_vars.pop(var)\n _check_var_conflicts({var}, other_vars)\n else:\n logger.error('infer constants does not know the variable domains.')\n warnings.warn(\n 'infer_constants can give an incorrect result '\n 'depending on the variable domains.\\n'\n 'If you give the variable domain definitions as dict, '\n 'then infer_constants will check for ambiguities.')\n tree = parser.parse(formula)\n old2new = dict()\n for u in tree:\n if u.type != 'var':\n continue\n if str(u) in variables:\n continue\n # Var (so NAME token) but not a variable\n # turn it into a string constant\n old2new[u] = nodes.Const(str(u))\n nx.relabel_nodes(tree, old2new, copy=False)\n return str(tree)", "def scs_solve(A, b, c, dim_dict, init_z=None, **kwargs):\n scs_cones = {'l': dim_dict['l'] if 'l' in dim_dict else 0,\n 'q': dim_dict['q'] if 'q' in dim_dict else [],\n 's': dim_dict['s'] if 's' in dim_dict else [],\n 'ep': dim_dict['ep'] if 'ep' in dim_dict else 0,\n 'ed': dim_dict['ed'] if 'ed' in dim_dict else 0,\n 'f': dim_dict['z'] if 'z' in dim_dict else 0}\n #print('scs_cones', scs_cones)\n sol = scs.solve({'A': A, 'b': b,\n 'c': c},\n cone=scs_cones,\n **kwargs)\n info = sol['info']\n\n if info['statusVal'] > 0:\n z = xsy2z(sol['x'], sol['s'], sol['y'], tau=1., kappa=0.)\n\n if info['statusVal'] < 0:\n x = np.zeros_like(sol['x']) \\\n if np.any(np.isnan(sol['x'])) else sol['x']\n\n s = np.zeros_like(sol['s']) \\\n if np.any(np.isnan(sol['s'])) else sol['s']\n\n y = np.zeros_like(sol['y']) \\\n if np.any(np.isnan(sol['y'])) else sol['y']\n\n if np.allclose(y, 0.) and c@x < 0:\n obj = c@x\n # assert obj < 0\n x /= -obj\n s /= -obj\n # print('primal res:', np.linalg.norm(A@x + s))\n\n if np.allclose(s, 0.) and b@y < 0:\n obj = b@y\n # assert obj < 0\n y /= -obj\n # print('dual res:', np.linalg.norm(A.T@y))\n\n # print('SCS NONSOLVED')\n # print('x', x)\n # print('s', s)\n # print('y', y)\n\n z = xsy2z(x, s, y, tau=0., kappa=1.)\n\n return z, info", "def solve(self,\n notifications = False\n ):\n\n if notifications:\n print('[info]: Solving differential equations for '+self.name+' model. ')\n \n\n \n # getting the time values\n self.days_list = np.linspace(self.tbeg,self.tend,self.npoints)\n\n # calling the odeint method to solve the diff. equations\n self.x = odeint(self.diff_eq,self.x0,self.days_list,args = (self.par,))\n '''\n Its important to note that (par_est,) is the way to define a tuple\n with just one element. When we put (par_est), the parenteses won't\n indicate a tuple\n '''\n \n #setting the variables\n self.confirmed_list = self.x[:,1] + self.x[:,2] + self.x[:,3]\n self.recovered_list = self.x[:,2]\n self.death_list = self.x[:,3]", "def solve(self,\n notifications = False\n ):\n\n if notifications:\n print('[info]: Solving differential equations for '+self.name+' model. ')\n \n\n \n # getting the time values\n self.days_list = np.linspace(self.tbeg,self.tend,self.npoints)\n\n # calling the odeint method to solve the diff. equations\n self.x = odeint(self.diff_eq,self.x0,self.days_list,args = (self.par,))\n '''\n Its important to note that (par_est,) is the way to define a tuple\n with just one element. When we put (par_est), the parenteses won't\n indicate a tuple\n '''\n \n #setting the variables\n self.confirmed_list = self.x[:,1] + self.x[:,2] + self.x[:,3]\n self.recovered_list = self.x[:,2]\n self.death_list = self.x[:,3]", "def solve_nonlinear(self, params, unknowns, resids):\n\n x = params['x']\n a = self.a\n b = self.b\n c = self.c\n\n unknowns['y'] = a*x**2 + b*x + c", "def solve_system(self, rhs, factor, u0, t):\n solver_type, Id, A, nvars, lintol, liniter, sol = (\n self.solver_type,\n self.Id,\n self.A,\n self.nvars,\n self.lintol,\n self.liniter,\n self.u_init,\n )\n\n if solver_type == 'direct':\n sol[:] = spsolve(Id - factor * A, rhs.flatten()).reshape(nvars)\n elif solver_type == 'GMRES':\n sol[:] = gmres(\n Id - factor * A,\n rhs.flatten(),\n x0=u0.flatten(),\n tol=lintol,\n maxiter=liniter,\n atol=0,\n callback=self.work_counters[solver_type],\n callback_type='legacy',\n )[0].reshape(nvars)\n elif solver_type == 'CG':\n sol[:] = cg(\n Id - factor * A,\n rhs.flatten(),\n x0=u0.flatten(),\n tol=lintol,\n maxiter=liniter,\n atol=0,\n callback=self.work_counters[solver_type],\n )[0].reshape(nvars)\n else:\n raise ValueError(f'solver type \"{solver_type}\" not known in generic advection-diffusion implementation!')\n\n return sol", "def ecos_solve(A, b, c, dim_dict, **kwargs):\n\n ###\n # ECOS uses a different definition of the exp cone,\n # with y and z switched. In the future I might wrap it\n # (i.e., switch rows of A and elements of b, and switch\n # elements of the solutions s and y) but for now\n # I'm not supporting exp cones in ecos.\n ###\n\n ecos_cones = {'l': dim_dict['l'] if 'l' in dim_dict else 0,\n 'q': dim_dict['q'] if 'q' in dim_dict else []} # ,\n # 'e': dim_dict['ep'] if 'ep' in dim_dict else 0}\n # print(ecos_cones)\n if ('ep' in dim_dict and dim_dict['ep'] > 0\n or 's' in dim_dict and len(dim_dict['s']) > 0):\n raise SolverError(\n 'Only zero, linear, and second order cones supported.')\n zero = 0 if 'z' not in dim_dict else dim_dict['z']\n ecos_A, ecos_G = A[:zero, :], A[zero:, :]\n ecos_b, ecos_h = b[:zero], b[zero:]\n sol = ecos.solve(c=c, G=ecos_G, h=ecos_h, dims=ecos_cones,\n A=ecos_A, b=ecos_b, **kwargs)\n\n solution = True\n\n x = sol['x']\n s = np.concatenate([np.zeros(zero), sol['s']])\n # not sure we can trust this\n # s = b - A@x\n y = np.concatenate([sol['y'], sol['z']])\n\n if sol['info']['exitFlag'] == 0: # check that things make sense\n print('prim abs res.', np.linalg.norm(A@x + s - b))\n print('dua abs res.', np.linalg.norm(A.T@y + c))\n print('s^T y', s@y)\n\n if sol['info']['exitFlag'] in [1, 11]: # infeas\n solution = False\n obj = b@y\n assert (obj < 0)\n y /= -obj\n\n print('primal infeas. cert residual norm', np.linalg.norm(A.T@y))\n #cones = dim2cones(dim_dict)\n proj = prod_cone.Pi(-y, *make_prod_cone_cache(dim_dict))\n print('primal infeas dist from cone', np.linalg.norm(proj))\n # if not (np.linalg.norm(proj) == 0.) and sol['info']['exitFlag'] == 1.:\n # raise SolverError\n\n x = np.zeros_like(x)\n s = np.zeros_like(s)\n\n if sol['info']['exitFlag'] in [2, 12]: # unbound\n solution = False\n obj = c@x\n assert (obj < 0)\n x /= -obj\n s /= -obj\n\n print('dual infeas. cert residual norm', np.linalg.norm(A@x + s))\n proj = prod_cone.Pi(s, *make_prod_cone_cache(dim_dict))\n print('dual infeas cert dist from cone', np.linalg.norm(s - proj))\n # if not (np.linalg.norm(s - proj) == 0.) and sol['info']['exitFlag'] == 2.:\n # raise SolverError\n y = np.zeros_like(y)\n\n # print('ECOS SOLUTION')\n # print('solution', solution)\n # print('x', x)\n # print('s', s)\n # print('y', y)\n\n z = xsy2z(x, s, y, tau=solution, kappa=not solution)\n\n return z, sol['info']", "def solve(self,notifications = False):\n if notifications:\n print('[info]: Solving differential equations for '+self.name+' model. ')\n \n # getting the time values\n self.days_list = np.linspace(self.tbeg,self.tend,self.npoints)\n\n # calling the odeint method to solve the diff. equations\n self.x = odeint(self.diff_eq,self.x0,self.days_list,args = (self.par,))\n \n #setting the variables\n self.confirmed_list = self.x[:,1] + self.x[:,2] + self.x[:,3]\n self.recovered_list = self.x[:,2]\n self.death_list = self.x[:,3]", "def solve(self,notifications = False):\n if notifications:\n print('[info]: Solving differential equations for '+self.name+' model. ')\n \n # getting the time values\n self.days_list = np.linspace(self.tbeg,self.tend,self.npoints)\n\n # calling the odeint method to solve the diff. equations\n self.x = odeint(self.diff_eq,self.x0,self.days_list,args = (self.par,))\n \n #setting the variables\n self.confirmed_list = self.x[:,1] + self.x[:,2] + self.x[:,3]\n self.recovered_list = self.x[:,2]\n self.death_list = self.x[:,3]", "def solve(\n self,\n x0,\n params=(),\n internal_x0=None,\n solver=None,\n attached_solver=None,\n **kwargs\n ):\n if not isinstance(solver, (tuple, list)):\n solver = [solver]\n if not isinstance(attached_solver, (tuple, list)):\n attached_solver = [attached_solver] + [None] * (len(solver) - 1)\n _x0, self.internal_params = self.pre_process(x0, params)\n for solv, attached_solv in zip(solver, attached_solver):\n if internal_x0 is not None:\n _x0 = internal_x0\n elif self.internal_x0_cb is not None:\n _x0 = self.internal_x0_cb(x0, params)\n\n nfo = self._get_solver_cb(solv, attached_solv)(_x0, **kwargs)\n _x0 = nfo[\"x\"].copy()\n self.internal_x = _x0\n x0 = self.post_process(self.internal_x, self.internal_params)[0]\n return x0, nfo", "def solve(self, x_0, dual_x_0):\n # Sanitize the inputs\n if type(x_0) is not np.ndarray or type(dual_x_0) is not np.ndarray:\n x_0 = np.array(x_0)\n dual_x_0 = np.array(dual_x_0)\n # Make sure that the arrays are column vectors\n x_0 = x_0.reshape(-1, 1)\n dual_x_0 = dual_x_0.reshape(-1, 1)\n\n print (\"Starting SQP minimization...\")\n [x, dual_x, exit_info] = self.globalized_sqp(x_0, dual_x_0)\n conv_criteria = exit_info['val']\n\n print (exit_info['msg'])\n print (\"Exiting with ||grad[L]|| = {0:e}\".format(conv_criteria))\n print (\"x = {0}\".format(x.reshape(-1)))\n print (\"dual_x = {0}\".format(dual_x.reshape(-1)))\n\n return [x, dual_x]", "def params_and_sols(Kd_microM, Rtot, Etot):\r\n\r\n global n, Kd\r\n n = len(Kd_microM) # nbr of effectors\r\n\r\n # micro to nano Molar transformation\r\n micro_to_nano = [10 ** 3] * n\r\n Kd = multiply(Kd_microM, micro_to_nano)\r\n\r\n # Assign ICs to solve the system\r\n IC = [1.] * n\r\n\r\n # Compute absolute values for complex formation (nM)\r\n C = fsolve(system, IC)\r\n # Compute relative/normalized values for complex formation (%)\r\n C_perc = [C[i] / sum(C) for i in range(n)]\r\n\r\n # verify if the results are 0:\r\n if [C[i] * (Kd[i] + Rtot - sum(C)) + Etot[i] * (sum(C) - Rtot) for i in range(n)] > [10 ** (-2)] * n:\r\n print('ATTENTION system not solved properly! \\n')\r\n # break\r\n\r\n return C_perc, C", "def solve(self):\n\n # Get string representation of self\n formula_str = self.to_string()\n\n # Run the solver as a subprocess\n process = subprocess.run([\"cryptominisat5\", \"--verb=0\"],\n input=bytes(formula_str, \"utf-8\"),\n stdout=subprocess.PIPE,\n timeout=15)\n\n assignments = get_assignments(process.stdout)\n\n return [self.variable_meaning[a] for a in assignments if a > 0]", "def system(coeffs: List[List[int]], t: Symbol = Symbol('t', real=True)):\n matrix = Matrix(coeffs)\n procedure = Procedure()\n ident = eye(matrix.rows)\n lam = Symbol('lambda')\n char_eq = simplify((matrix - lam * ident).det())\n\n procedure\\\n .text('Characteristic equation: ', nl=True)\\\n .eq(Eq(char_eq, 0, evaluate=False))\n\n rts = roots(char_eq, lam)\n\n procedure.text('Eigenvalues and eigenvectors', nl=True)\n\n eigenvects = matrix.eigenvects()\n count = 1\n consts = numbered_symbols('C', Dummy, 1)\n sols = []\n conj_roots = []\n for eigenval, mult, eigenvec in eigenvects:\n\n # skip the conjugates of complex eigenvalues\n if not eigenval.is_real:\n if eigenval in conj_roots:\n continue\n\n procedure.latex('\\\\lambda_{} = {}'.format(\n count, eigenval), nl=True)\n for i in range(len(eigenvec)):\n \n aug_matrix = (matrix - eigenval * ident)\\\n .col_insert(matrix.cols, Matrix([0 for i in range(matrix.rows)]))\n procedure.eq(aug_matrix, nl=False).text(' ~ ')\\\n .eq(aug_matrix.rref()[0], nl=False).latex('\\\\Rightarrow ')\n\n procedure.eq(Eq(Dummy('v'), eigenvec[i], evaluate=False))\n if not eigenval.is_real:\n real, imag = eigenval.as_real_imag()\n real_vec, imag_vec = (\n eigenvec[i] * expand(exp(imag*I*t), complex=True)).as_real_imag()\n\n procedure.text(\"Use Euler's formula to expand the imaginary part\", nl=True)\n procedure.eq(eigenvec[i], nl=False).latex(' ').eq(exp(real*t + imag*I*t), nl=False)\\\n .latex(' = ').eq(exp(real*t), nl=False).latex(' ')\\\n .eq(eigenvec[i] * expand(exp(imag*I*t), complex=True), nl=False).latex(' = ')\\\n .eq(exp(real*t), nl=False).latex('\\\\left( ').eq(real_vec, nl=False)\\\n .latex(' + ').eq(imag_vec, nl=False).latex('\\\\right)', nl=True)\n # if mult == len(eigenvec):\n sols.append(['comp', exp(real * t), real_vec, imag_vec])\n\n # we don't need the conjugate\n conj_roots.append(conjugate(eigenval))\n else:\n # if mult == len(eigenvec):\n sols.append(['real', exp(eigenval * t), eigenvec[i]])\n \n if mult != len(eigenvec): # repeated eigenvectors\n procedure.text('Find the generalized eigenvector')\\\n .latex('\\\\left( M - \\\\lambda I \\\\right) w = v ', nl=True)\n \n vec_syms = symbols('a0:{}'.format(matrix.rows))\n generalized_eigenvec = Matrix(vec_syms)\n\n # note: insert is not in-place\n # construct the augmented matrix [ M-lambda I | v]\n aug_matrix = (matrix - eigenval * ident).col_insert(matrix.cols, eigenvec[0]) \n procedure.eq(aug_matrix, nl=False).text(' ~ ').eq(aug_matrix.rref()[0], nl=False)\n\n result = solve((matrix - eigenval * ident) *\n generalized_eigenvec - eigenvec[0], generalized_eigenvec)\n\n free_vars = list(vec_syms)\n\n # use free variables to express other variables\n for var in result:\n if var in free_vars:\n free_vars.remove(var)\n generalized_eigenvec = generalized_eigenvec.subs(\n var, result[var])\n for i, var in enumerate(free_vars): # use 0, 1... for free variables\n generalized_eigenvec = generalized_eigenvec.subs(var, i)\n\n procedure.latex('\\\\Rightarrow ')\\\n .eq(Eq(Dummy('w'), generalized_eigenvec, evaluate=False))\n\n sols.append(\n ['gen', exp(eigenval * t), eigenvec[0], generalized_eigenvec])\n\n count += mult\n\n procedure.text('General solution: ', nl=True)\n procedure.latex('\\\\vec{\\\\mathbf{x}} = ')\n gen_sols = []\n for i in range(len(sols)):\n sol = sols[i]\n if sol[0] == 'real':\n procedure.eq(next(consts), nl=False).eq(\n sol[1], nl=False).eq(sol[2], nl=False)\n gen_sols.append(sol[1] * sol[2])\n elif sol[0] == 'gen':\n procedure.eq(next(consts), nl=False).eq(sol[1], nl=False)\\\n .latex('\\\\left(').eq(sol[2], nl=False).latex('t + ')\\\n .eq(sol[3], nl=False).latex('\\\\right)')\n gen_sols.append(sol[1] * sol[2] * t + sol[1] * sol[3])\n elif sol[0] == 'comp':\n procedure.eq(sol[1], nl=False)\\\n .latex('\\\\left(').eq(next(consts), nl=False).eq(sol[2], nl=False).latex(' + ')\\\n .eq(next(consts), nl=False).eq(sol[3], nl=False).latex('\\\\right)')\n gen_sols.append(sol[1] * sol[2])\n gen_sols.append(sol[1] * sol[3])\n\n if i != len(sols) - 1:\n procedure.latex('+')\n\n return gen_sols, procedure", "def solve(self) -> Dict:\n solution = self.opt.decision_variables.vec2dict(self._solve())\n\n if self._error_on_fail and (not self.did_solve()):\n raise RuntimeError(\"Solver failed!\")\n\n # Add full model state to the solution dictionary\n for model in self.opt.models:\n for d in model.time_derivs:\n n_s = model.state_name(d)\n n_s_x = model.state_optimized_name(d)\n if isinstance(model, RobotModel):\n if model.num_param_joints > 0:\n n_s_p = model.state_parameter_name(d)\n t = solution[n_s_x].shape[1]\n solution[n_s] = cs.DM.zeros(model.dim, t)\n solution[n_s][model.optimized_joint_indexes, :] = solution[\n n_s_x\n ]\n solution[n_s][model.parameter_joint_indexes, :] = self._p_dict[\n n_s_p\n ]\n else:\n solution[n_s] = solution[n_s_x]\n else:\n solution[n_s] = solution[n_s_x]\n\n return solution", "def find_solution(formula):\n #if formula is empty or if there is a contradiction between clauses\n if not formula or disqualifier(formula):\n return {}\n \n solution = get_one_unit_clause(formula)\n #if there are no unit clauses, move on to non-unit clauses\n if not solution:\n solution = get_non_unit_clause(formula)\n #if there are contradictions with literals on non-unit clauses, backtrack, get rid of that contradicting literal, and try again\n if disqualifier(reduce_expression(formula, solution)):\n solution = get_non_unit_clause(formula, True)\n updatedForm = reduce_expression(formula, solution)\n #double asterisks allow any number of keywords to be passed as an argument\n return {**find_solution(updatedForm), **{solution[0]: solution[1]}}" ]
[ "0.64777213", "0.6045988", "0.5746843", "0.5743898", "0.54364383", "0.54286045", "0.53808516", "0.5295969", "0.52503586", "0.5218554", "0.51738834", "0.51685405", "0.51086473", "0.5097052", "0.50915754", "0.50875384", "0.50407755", "0.50407755", "0.5009267", "0.49932858", "0.49837536", "0.49832365", "0.49832365", "0.49455592", "0.49402204", "0.49363184", "0.49345174", "0.49182728", "0.49071804", "0.48810178" ]
0.80595094
0
Gives default department by checking if present in the context
def get_default_department_id(self, cr, uid, context=None): user_obj = self.pool.get('res.users').browse(cr,uid,uid).employee_ids if user_obj: dept_id = self.pool.get('res.users').browse(cr,uid,uid).employee_ids[0].department_id and self.pool.get('res.users').browse(cr,uid,uid).employee_ids[0].department_id.id or False return dept_id
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def department(self):\n if \"department\" in self._prop_dict:\n return self._prop_dict[\"department\"]\n else:\n return None", "def department(self):\n if \"department\" in self._prop_dict:\n return self._prop_dict[\"department\"]\n else:\n return None", "def department(department_id):\n # gather data from db about all employees\n return render_template(\"department.html\",\n department_id=department_id)", "def department(self) -> object:\n return self._department", "def department(department_id):\n\n department_obj = Department.query.get_or_404(department_id)\n employees = Employee.query.filter_by(department_id=department_id)\n return render_template('department/department.html',\n department=department_obj, employees=employees)", "def __str__(self):\n\n return self.department_name", "def show_department(id_: int):\n\n logger.debug('Routed to /departments/%i', id_)\n titles = ['Name', 'Average Salary', 'Employees', 'E-mail']\n department = None\n\n try:\n department = ds.get(id_)\n except IntegrityError:\n logger.error(\"Can't find employee with id %i\", id_)\n abort(404)\n\n logger.info('Get department %s', department.name)\n return render_template('department.html',\n title=f'Department {department.name}',\n table_title=f'Department: {department.name}',\n headers=titles,\n department=department)", "def get_store_department(self):\n\n departments = tuple(STORE_DEPARTMENT.keys())\n\n position, step = \"\", \"\"\n\n while True:\n clean_terminal()\n data, reply_3 = self.__get_sub_department(position), None\n sub_department = data[0]\n for i, department in enumerate(sub_department, start=1):\n if isinstance(department, dict):\n cprint(str(i) + ') ' + department[\"value\"], 'blue')\n else:\n cprint(str(i) + ') ' + department, 'blue')\n\n reply_2 = self.ask_with_input('Choisir un numéro '\n '(tapez \"quit\" pour quitter, '\n '\"back\" pour revenir en arrière)'\n ' : ', len(sub_department),\n ('quit', 'back'))\n if reply_2 == 'quit':\n break\n if reply_2 == 'back':\n if not position:\n break\n position = \"|\".join(position.split('|')[:-int(step[-1])])\n step = step[:-1]\n else:\n department_number = int(reply_2) - 1\n if data[1] is not None and isinstance(\n sub_department[department_number], dict):\n position += \"|tuple:\" + str(data[1])\n position += \"|dict:\" + str(\n sub_department[department_number][\"key_in_dict\"])\n step += \"2\"\n elif data[1] is None and not position:\n position += 'dict:' + str(departments[int(reply_2) - 1])\n step += \"1\"\n else:\n self.__print_products_navigation(\n sub_department[department_number])", "def get_department_by_id(department_id):\n return Department.query.get(department_id)", "def getDepartmentDictionary( self ):\n return DepartmentDictionary.departmentDictionary", "def default_get(self, cr, uid, fields, context=None): \n \n \n res = super(granted_rights_order, self).default_get(cr, uid, fields, context=context)\n \n employee_obj = self.pool.get('hr.employee')\n department_obj = self.pool.get('hr.department')\n manager = False\n donor_emp_id = []\n \n if uid != 1 :\n\n donor_emp_id = employee_obj.search(cr ,uid, [('user_id' , '=' , uid )])\n deparment_id = employee_obj.browse(cr,uid,donor_emp_id[0]).department_id.id\n \n if donor_emp_id[0] == department_obj.browse(cr,uid,deparment_id).manager_id.id :\n manager = True\n \n \n \n \n \n \n \n \n \n if donor_emp_id :\n res.update({ 'employee_donor': donor_emp_id[0], \n 'department_id' : deparment_id,\n 'is_a_amanger' : manager,\n })\n return res", "def __str__(self) -> str:\n\n return self.department_name", "def __str__(self):\n return \"%s (department %s)\" % (self.name, self.get_category_display())", "def default_company():\n return Transaction().context.get('company')", "def add_department():\r\n check_admin()\r\n\r\n add_department = True\r\n\r\n form = DepartmentForm()\r\n if form.validate_on_submit():\r\n department = Department(name=form.name.data,\r\n description=form.description.data)\r\n try:\r\n # add department to the database\r\n db.session.add(department)\r\n db.session.commit()\r\n flash('You have successfully added a new department.')\r\n except:\r\n # in case department name already exists\r\n flash('Error: department name already exists.',category='error')\r\n\r\n # redirect to departments page\r\n return redirect(url_for('admin.list_departments'))\r\n\r\n # load department template\r\n return render_template('admin/departments/department.html', action=\"Add\",\r\n add_department=add_department, form=form,\r\n title=\"Add Department\")", "def add_department():\n\tcheck_admin()\n\n\tadd_department = True\n\n\tform = DepartmentForm()\n\tif form.validate_on_submit():\n\t\tdepartment = Department(name=form.name.data,description=form.description.data)\n\n\t\ttry:\n\t\t\t#add department to the database\n\t\t\tdb.session.add(department)\n\t\t\tdb.session.commit()\n\t\t\tflash(\"You have successsfully added a new department.\")\n\t\texcept:\n\t\t\t#incase the department already exists\n\t\t\tflash(\"Error: department already exists.\")\n\t#once the admin creates a new department,they will be redirected to the departments page\n\treturn render_template('admin/departments/department.html',action=\"Add\", add_department= add_department,form=form,title = \"Add Department\")", "def test_api_can_get_department_by_id(self):\n res = self.client().get(service_url+'/1')\n self.assertEqual(res.status_code, 200)\n self.assertIn('dep 1', str(res.data))", "def doctor(request):\n assert isinstance(request, HttpRequest)\n if request.method == 'POST':\n request.session['selected_package'] = request.POST['package_id']\n request.session['selected_doctor'] = request.POST['doctor_id']\n return redirect('/doctor-detail/')\n status, result = api.show_doctor_in_department()\n # print(result)\n return render(\n request,\n 'app/doctor.html',\n {\n 'title': 'แผนกและแพทย์',\n 'departments': result,\n 'logged_user': request.session.get('user')\n }\n )", "def departments(request):\n if 'selected_package' in request.session:\n del request.session['selected_package']\n assert isinstance(request, HttpRequest)\n status, result = api.show_departments()\n return render(\n request,\n 'app/departments.html',\n {\n 'title': 'แผนกและแพ็คเกจ',\n 'departments': result,\n 'logged_user': request.session.get('user')\n }\n )", "def set_department_by_id(department_id):\n return Department.query.filter(id=department_id).one()", "def Expert(self, default=None):\n return self.data.get('expert', default)", "def seed4():\n if Department.find_by_identity(app.config['SEED_DEPARTMENT']) is not None:\n return None\n\n params = {\n 'departmentname': 'testdept',\n 'deptowneremail': '[email protected]'\n }\n\n return Department(**params).save()", "def _get_default_period(self, cr, uid, context=None):\n context = context or {}\n if context.get('period_id', False):\n return context['period_id']\n account_period_obj = self.pool.get('account.period')\n ctx = dict(context, account_period_prefer_normal=True)\n ids = account_period_obj.find(cr, uid, context=ctx)\n period_id = False\n if ids:\n period_id = ids[0]\n return period_id", "def departments():\n # gather data from db about all departments\n return render_template(\"departments.html\")", "def Agency(self, default=None):\n return self.data.get('agency', default)", "def department(self, department: object):\n\n self._department = department", "def get(id_):\n\n logger.debug('Catch GET request by URL /api/departments/%i.', id_)\n try:\n department = ds.get(id_)\n if not department.id:\n raise Exception\n except Exception:\n logger.error('There is no department with id %i', id_)\n return {'message': f'There is no department with {id_}.'}, 404\n return marshal_departments(department)", "def context_or_settings(context, name):\n if name in context:\n return context[name]\n return getattr(settings, \"DEFAULT_\" + name.upper())", "def default_value(self) -> Optional[Any]:\n return self.get(\"/DV\")", "def context_or_settings(context, name):\n if name in context:\n return context[name]\n return getattr(settings, 'DEFAULT_' + name.upper())" ]
[ "0.727264", "0.727264", "0.6299022", "0.62791663", "0.6205959", "0.6025972", "0.59099317", "0.58730555", "0.5781836", "0.5762072", "0.57508874", "0.5684379", "0.5670995", "0.564136", "0.5601505", "0.5576277", "0.5553041", "0.55370617", "0.5526694", "0.55206823", "0.5495728", "0.5479703", "0.546089", "0.54549885", "0.54338473", "0.5417053", "0.5336082", "0.53142524", "0.52895916", "0.5284607" ]
0.8002562
0
Parsed content of Config file into Dictionary [client] > becomes primarykey with values as what follows host="host" user="un" > becomes secondary keyvalues password="pw" > becomse secondary keyvalues port="port"
def load_config_file(cfgFile): with open(cfgFile) as f: content = f.readlines() cfg = {} primary_key = 0 # has primary key been discovered (the string enclosed in bracket in config file) for line in content: if primary_key: if '=' in line: kv = line.split('=') cfg[primary_key].update({kv[0].strip(' "\n'): kv[1].strip(' "\n')}) else: primary_key = 0 if (line[0] == '[' and line[-2] == ']'): cfg[line[1:-2]] = {} primary_key = line[1:-2] return cfg
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _create_dict_from_file(self, **kwargs):\r\n\r\n if not self.linux_handle.download(local_file='ipsec.conf', remote_file=self.conf_path, protocol='scp'):\r\n self.linux_handle.log(\"Downloading ipsec.conf file failed\")\r\n raise Exception(\"Downloading ipsec.conf file failed \")\r\n self.linux_handle.log(\"Reading ipsec.conf file\")\r\n try:\r\n with open('ipsec.conf', 'r') as f:\r\n lines = f.readlines()\r\n except Exception as err:\r\n self.linux_handle.log(level='ERROR', messsage=\"Unable to open file ipsec.conf\")\r\n raise err\r\n ipsec_conf_dict = dict()\r\n line_key = ''\r\n for line in lines:\r\n line = line.strip()\r\n if re.match('#', line) or not line:\r\n next\r\n elif re.match('conn ', line) or re.match('config setup', line):\r\n # (conn_string, conn_name) = line.split()\r\n ipsec_conf_dict[line] = dict()\r\n line_key = line\r\n elif re.search('=', line):\r\n (key, value) = line.split('=', 1)\r\n ipsec_conf_dict[line_key][key] = value\r\n else:\r\n print(\"\\n None matched line: %s\" % line)\r\n print(ipsec_conf_dict)\r\n return ipsec_conf_dict", "def _parse(self, content):\n result = TincConfParser.conf_file.parseString(to_unicode(content))\n for entry in result.get(\"entries\", []):\n self[entry[0]] = entry[1]\n keys = result.get(\"keys\", [])\n if keys:\n if len(keys) > 1:\n raise ParserError(\"Hostfile specifies more than one public key!\")\n self.rsa_public_key = '\\n'.join(keys[0])\n old_keys = result.get(\"old_keys\", [])\n for old_key in old_keys:\n self.old_public_keys.append('\\n'.join(old_key))", "def parse(self, lines):\n cur_entry = None\n indents = []\n for line in lines:\n kv_ = _key_value(line)\n if len(kv_) > 1:\n key, value = kv_\n if key.lower() == \"host\":\n cur_entry = value\n self.hosts_.add(value)\n else:\n indents.append(_indent(line))\n self.lines_.append(ConfigLine(line=line, host=cur_entry, key=key, value=value))\n else:\n self.lines_.append(ConfigLine(line=line))\n # use most popular indent as indent for file, default ' '\n counter = Counter(indents)\n popular = list(reversed(sorted(counter.items(), key=lambda e: e[1])))\n self.indent = popular[0][0] if len(popular) > 0 else ' '", "def configServer():\n try:\n config = open(r\"./server.conf\",\"r+\")\n except IOError,e:\n print e\n return 0\n configLines = []\n try:\n while True:\n configLines.append(config.next())\n except StopIteration:\n pass\n finally:\n config.close()\n configInfo = {}\n for line in configLines:\n if line[0] == \"#\" or line[0] == \"\\n\":\n continue\n configLineArgumentList = line[:-1].split(\"=\")\n key = configLineArgumentList[0]\n value = configLineArgumentList[1]\n configInfo.update({key:value})\n logging.info(\"Configuration done sucssesfully\")\n return configInfo", "def loadConfig():\n lines = []\n config = {}\n here = path.dirname(__file__)\n fn = path.join(here,'manatee.conf')\n try:\n with codecs.open(fn,'rU','utf-8') as conf:\n lines = conf.readlines()\n conf.close()\n except IOError as e:\n print \" Could not open configuration file: %s\" % e\n\n for line in lines:\n try:\n line = line.strip()\n if line:\n values = [x.strip() for x in line.split('=')]\n config[values[0]] = values[1]\n except Exception as e:\n print \"There was an error in the configuration file: %s\" % e\n # TODO: Any strings from the config file that might be displayed or passed into the SQL server need to be validated here.\n# config = validateConfig(config)\n return config", "def readConfig():\n hosts = []\n domains = []\n with open(\"./host.conf\", \"r\") as fd:\n for line in fd.readlines():\n line = line.strip().split()\n if line != []:\n # Parse config for zone files and hosts\n if line[0] == \"ZONE_FILE:\":\n zoneFile = line[1]\n if line[0] == \"REVERSE_ZONE_FILE:\":\n reverseZoneFile = line[1]\n if line[0] == \"HOST:\":\n hosts.append((line[1], line[2], line[3]))\n if line[0] == \"DOMAIN:\":\n domains.append((line[1], line[2], line[3]))\n\n return zoneFile, reverseZoneFile, hosts, domains", "def get_config(config_file):\n config = ConfigParser.RawConfigParser()\n try:\n config.read(config_file)\n except:\n LOG.error(\"File ping.conf not found\")\n data_center = config.get('DEFAULT', 'dc_name')\n num_process = config.get('DEFAULT', 'process')\n default = {}\n default = {\n \"data_center\": data_center,\n \"num_process\": num_process\n }\n influx_host = config.get('influxdb', 'host')\n influx_port = 8086\n try:\n influx_port = config.get('influxdb', 'port')\n except:\n pass\n influx_username = config.get('influxdb', 'username')\n influx_password = config.get('influxdb', 'password')\n influx_database = config.get('influxdb', 'database')\n influxdb_creds = {}\n influxdb_creds = {\n \"host\": influx_host,\n \"port\": influx_port,\n \"database\": influx_database,\n \"username\": influx_username,\n \"password\": influx_password\n }\n redis_creds = {}\n redis_host = config.get('redis', 'host')\n redis_port = 6379\n try:\n redis_port = config.get('redis', 'port')\n except:\n pass\n redis_password = config.get('redis', 'password')\n redis_creds = {\n \"host\": redis_host,\n \"port\": redis_port,\n \"password\": redis_password,\n }\n return default, influxdb_creds, redis_creds", "def parseconfig_se(cfile):\n cdict = {}\n f = open(cfile,'r')\n lines = f.readlines()\n for l in lines:\n a = string.split(l)\n if len(a) > 0:\n if a[0][0] != '#':\n maxi = len(a)\n for i in range(1,len(a)):\n if a[i][0] == '#':\n maxi = i\n break\n # Turn comma-separated lists into python lists\n entry = []\n for e in a[1:maxi]:\n if string.find(e,','):\n entry = entry + string.split(e,',')\n else:\n entry = entry + [e]\n cdict[a[0]] = entry\n return cdict", "def read_config(self, filename):\n heading = None\n with open(filename) as fin: # open the file\n for line in fin:\n line = line.strip() # cut the tail\n if line.startswith('==') and line.endswith('=='): # detect headings\n heading = line[2:-2] # heading\n self._config[heading] = {} # create a dictionary for the heading\n elif line.count(':') == 1 and heading is not None: # detect attribute\n attr, _, value = line.partition(':') # get attribute and their value\n self._config[heading][attr[:-1]] = value[1:] # update into dic\n elif line == \"\": # if line is empty, skip\n continue\n else: # bad line\n messagebox.showerror(\"Error\", \"Bad config file, I can't read it!\")\n return self._config", "def _read_config(filename):\n\n c = {}\n with open(filename, \"r\") as f:\n for line in f:\n key, val = line.split(\"=\")\n key = key.strip()\n val = val.strip()\n c[key] = val\n return c", "def process_config(config_file=''):\n if not config_file:\n config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), \"config\")\n config = configparser.ConfigParser()\n config.read(config_file)\n config_dict = {}\n for section in config.sections():\n config_dict[section] = {name: value for name, value in config.items(section)}\n return config_dict", "def read_config(config_file):\n config_dict = {\n \"port\": \"22\",\n \"persist_remote_files\": False\n }\n with open(config_file) as fin:\n for config_line in fin:\n config_line = config_line.strip()\n # check for commented out lines\n if config_line.startswith(\"#\") or len(config_line) == 0:\n continue\n key, value = config_line.split(\"=\")\n config_dict[key.rstrip()] = value.lstrip()\n\n return config_dict", "def load_config(self):\n\n with open(os.path.expanduser(self.config_filename), 'r') as f:\n lines = f.readlines()\n\n _usable = lambda l: not(l.startswith('#') or l.strip() == '')\n lines = filter(_usable, lines)\n\n def _build_config(key, value, d):\n \"\"\" Called recursively to split up keys \"\"\"\n pieces = key.split('.', 1)\n if len(pieces) == 1:\n d[pieces[0]] = value.strip()\n else:\n d[pieces[0]] = _build_config(pieces[1], value, {})\n\n return d\n\n d = {}\n for line in lines:\n if '=' not in line:\n continue\n\n key, value = line.split('=')\n d = _build_config(key, value, d)\n\n return d", "def get_config_dict(config_filename):\n f = open(config_filename)\n ret = dict()\n for line in f.readlines():\n arr = line.strip().split('=', 1)\n ret[arr[0]] = arr[1]\n\n return ret", "def parse_config(config_file):\n\n conf = {}\n config = configparser.ConfigParser()\n valid_schedule = r'\\d{1,2}:\\d{2}(:\\d{2})*\\s+[AM|PM]'\n \n #configparser does not throw exception (empty dataset if files are not found)\n if(len(config.read(config_file)) == 0):\n raise FileNotFoundError(\"Failed to find config file\")\n\n\n conf['credentials'] = {\"username\": config['credentials']['username'], \"password\": config['credentials']['password']}\n conf['hashtags'] = [hashtag for hashtag in config['hashtags'].values()]\n conf['schedule'] = [time.upper() for time in config['schedule'].values() if re.search(valid_schedule,time, re.IGNORECASE)]\n conf['driverpath'] = config['driver']['path']\n\n return conf", "def config_parsing(configfile):\n config = ConfigParser.ConfigParser()\n config.read(configfile)\n db_connection = config.get('app:main', 'database_connection')\n db, eng = map_database(db_connection)\n return db, eng", "def read_configfile():\n configtp = namedtuple(\"Config\", [\"lb_user\", \"lb_pwd\", \"lb1\", \"lb2\", \"lb_dg\", \"lb_dg_partition\",\n \"ca\", \"ca_proxy\", \"cm_chain\", \"cm_key\", \"cm_renewal_days\",\n \"cm_delayed_days\", \"plugin\"])\n config = ConfigParser.ConfigParser()\n config.read(CONFIG_FILE)\n if config.getboolean(\"Certificate Authority\", \"use proxy\"):\n ca_proxy = config.get(\"Certificate Authority\", \"proxy\")\n else:\n ca_proxy = False\n\n if config.getboolean(\"Load Balancer\", \"cluster\"):\n bigip1 = config.get(\"Load Balancer\", \"host 1\")\n bigip2 = config.get(\"Load Balancer\", \"host 2\")\n else:\n bigip1 = config.get(\"Load Balancer\", \"host 1\")\n bigip2 = None\n\n try:\n plugin_section = config.items('Plugin')\n except ConfigParser.NoSectionError:\n plugin_section = None\n\n the_config = configtp(\n lb1=bigip1,\n lb2=bigip2,\n lb_user=config.get(\"Load Balancer\", \"username\"),\n lb_pwd=config.get(\"Load Balancer\", \"password\"),\n lb_dg=config.get(\"Load Balancer\", \"datagroup\"),\n lb_dg_partition=config.get(\"Load Balancer\", \"datagroup partition\"),\n ca=config.get(\"Certificate Authority\", \"directory url\"),\n ca_proxy=ca_proxy,\n cm_chain=config.getboolean(\"Common\", \"include chain\"),\n cm_key=config.get(\"Common\", \"account key\"),\n cm_renewal_days=int(config.get(\"Common\", \"renewal days\")),\n cm_delayed_days=int(config.get(\"Common\", \"delayed installation days\")),\n plugin=plugin_section)\n return the_config", "def get_config_dicts(config_file):\n config_dicts = dict()\n time_stamp = time.strftime(\"%Y-%m-%d-%H-%M-%S\")\n\n current_name = None\n for i, line in enumerate(config_file):\n try:\n line = line.strip()\n line = re.sub(r\"#.*\", \"\", line)\n line = re.sub(r\"\\$TIME\", time_stamp, line)\n if not line:\n pass\n elif line.startswith(\";\"):\n pass\n elif OBJECT_NAME.match(line):\n current_name = OBJECT_NAME.match(line).group(1)\n if current_name in config_dicts:\n raise IniSyntaxError(i, \"Duplicit object key: '{}', line {}.\"\n .format(current_name, i))\n config_dicts[current_name] = dict()\n elif KEY_VALUE_PAIR.match(line):\n matched = KEY_VALUE_PAIR.match(line)\n key = matched.group(1)\n value_string = matched.group(2)\n if key in config_dicts[current_name]:\n raise IniSyntaxError(i, \"Duplicit key in '{}' object, line {}.\"\n .format(key, i))\n config_dicts[current_name][key] = format_value(value_string)\n else:\n raise IniSyntaxError(i, \"Unknown string: '{}'\".format(line))\n except IniSyntaxError as exc:\n raise\n except Exception as exc:\n raise IniSyntaxError(i, \"Error\", exc) from None\n\n config_file.close()\n return config_dicts", "def parse_data_config(path):\n cfg = dict()\n cfg['gpus'] = '0,1,2,3'\n cfg['num_workers'] = '10'\n \n with open(path, 'r') as fp:\n lines = fp.readlines()\n for line in lines:\n line = line.strip()\n if line == '' or line.startswith('#'):\n continue\n key, value = line.split('=')\n cfg[key.strip()] = value.strip()\n \n return cfg", "def get_config(client):\n func = client.get_config()\n config = run_in_loop_now('get_config', func)\n\n a = {}\n b = {}\n for i in config['activity']:\n a[i['label']] = i['id']\n b[i['id']] = i['label']\n activities_by_name = a\n activities_by_id = b\n d = {}\n for device in config['device']:\n device_cmds = []\n for grp in device['controlGroup']:\n for fnc in grp['function']:\n device_cmds.append(json.loads(fnc['action'])['command'])\n d[device['label']] = {\"id\": device['id'],\n \"cmds\": device_cmds}\n devices = d\n return config", "def get_config():\n return {'address': ADDRESS, 'https': HTTPS == 'https',\n 'password': PASSWORD, 'username': USERNAME,\n 'port': PORT, 'version': VERSION}", "def parse_config_file(config_file : str) -> dict:\n try:\n config = ConfigParser()\n config.read(config_file)\n\n params = {\n \"config_file\" : config_file,\n \"microservice_port\" : \"0\",\n \"loopback_nr\" : \"10\",\n \"loopback_name\" : \"Fakewebcam\",\n \"loopback_exclusive\" : \"1\",\n \"pid\" : \"/var/run/fakewebcam.pid\",\n }\n\n params[\"config_file\"] = os.path.realpath(config_file)\n params[\"microservice_port\"] = config[\"bodypix\"][\"service_port\"]\n params[\"loopback_nr\"] = os.path.basename(config[\"loopback\"][\"device\"]).split(\"video\")[1]\n params[\"loopback_name\"] = config[\"loopback\"][\"name\"]\n params[\"loopback_exclusive\"] = config[\"loopback\"][\"exclusive_caps\"]\n params[\"pid\"] = config[\"daemon\"][\"pid\"]\n\n return params\n except Exception as e:\n print(\"ERROR: {}: {}\".format(config_file, e))\n exit(1)", "def __analyze_config(self):\n result = {}\n with open(self.file) as f:\n data = f.readlines()\n temp_key = ''\n for line in data:\n if line[0] == '\t' or line[0] == ';':\n result[temp_key].append(line.strip())\n else:\n temp_key = line.strip()\n result[temp_key] = []\n return result", "def parse_config_file(config_file):\n parsed = {}\n\n try:\n with open(config_file, \"r\") as data:\n for line in data.readlines():\n if \"=\" not in line:\n continue\n key, val = line.split(\"=\", 1)\n parsed[key] = val.strip()[1:-1]\n except IOError:\n logging.error(\"%s doesn't exist\" % config_file)\n raise\n\n return parsed", "def parse_config(path):\n class Conf(object):\n pass\n conf = Conf()\n\n parser = ConfigParser()\n parser.read(path)\n vars_config = {}\n for section in parser.sections():\n for option in parser.options(section):\n value = parser.get(section, option)\n vars_config.update({option: value})\n\n for key in vars_config:\n setattr(conf, str(key).upper(), vars_config[key])\n setattr(conf, \"USERNAME\", environ[\"ENUMPI_DB_USER\"])\n setattr(conf, \"PASSPHRASE\", environ[\"ENUMPI_DB_PASSWORD\"])\n\n return conf", "def read_config(file, destination=None, user=None, host=None, cmd_host=None, copy_protocol=None):\n with open(file) as config_yaml:\n base_yaml = yaml.safe_load(config_yaml)\n\n # with config loaded, make sure we have the keys that we need\n\n base_config = {\n 'keys': [],\n 'map': [],\n 'default': [],\n 'required_files': [],\n 'path': None,\n 'destination': destination,\n 'command': {\n 'exts': [],\n 'run': None\n }\n }\n\n router_config = {\n 'key': [],\n 'files': [],\n 'filter': None,\n 'invert': None,\n 'lowercase': None,\n 'exact': None\n }\n\n remote_config = {\n 'user': user,\n 'host': host,\n 'copy_protocol': copy_protocol,\n 'cmd_host': cmd_host,\n }\n\n if 'dassort' in base_yaml.keys() and 'remote' in base_yaml.keys():\n tree_yaml = base_yaml['dassort']\n map_json = tree_yaml['json']\n base_config = merge_dicts(base_config, map_json)\n base_config = merge_dicts(base_config, tree_yaml)\n remote_yaml = base_yaml['remote']\n remote_config = merge_dicts(remote_config, remote_yaml)\n router_config = None\n elif 'dassort' in base_yaml.keys():\n tree_yaml = base_yaml['dassort']\n map_json = tree_yaml['json']\n base_config = merge_dicts(base_config, map_json)\n base_config = merge_dicts(base_config, tree_yaml)\n remote_config = None\n router_config = None\n elif 'router' in base_yaml.keys():\n tree_yaml = base_yaml['router']\n router_config = merge_dicts(router_config, tree_yaml)\n # all router items should be iterables\n for k, v in router_config.items():\n if type(v) is not list:\n router_config[k] = [v]\n base_config = None\n remote_config = None\n else:\n base_config = None\n remote_config = None\n router_config = None\n\n # reformat base configuration\n if base_config is not None:\n base_config = {\n 'keys': base_config['keys'],\n 'map': base_config['map'],\n 'default': base_config['default'],\n 'required_files': base_config['required_files'],\n 'value': [],\n 'path': {\n 'path_string': base_config['path'],\n 're': {'root': base_config['destination']}\n },\n 'command': base_config['command'],\n }\n\n return base_config, remote_config, router_config", "def parse_config_file(config_file):\n\n config = ConfigParser.SafeConfigParser()\n config.read(config_file)\n\n config_dict = {}\n for section in config.sections():\n # TODO : Should I force all section names to lowercase?\n config_dict[section.strip()] = dict(config.items(section))\n\n\n return config_dict", "def config(self) -> Dict[str, Any]:", "def read_my_cnf_files(self, files, keys):\n cnf = read_config_files(files)\n\n sections = ['client']\n if self.login_path and self.login_path != 'client':\n sections.append(self.login_path)\n\n if self.defaults_suffix:\n sections.extend([sect + self.defaults_suffix for sect in sections])\n\n def get(key):\n result = None\n for sect in cnf:\n if sect in sections and key in cnf[sect]:\n result = cnf[sect][key]\n return result\n\n return {x: get(x) for x in keys}", "def readConfig(file, section):\n config = ConfigParser.ConfigParser()\n config.read(file)\n keyval = dict()\n\n items = config.items('%s' % section)\n for entry in items:\n keyval[entry[0]] = entry[1]\n return keyval" ]
[ "0.6369596", "0.62699383", "0.62197447", "0.61826915", "0.61255795", "0.608884", "0.6068326", "0.60656905", "0.605204", "0.60274327", "0.60231185", "0.5968226", "0.5899399", "0.58986956", "0.5892832", "0.5862461", "0.585229", "0.57795405", "0.5766491", "0.5755049", "0.5753591", "0.5713444", "0.5702718", "0.5689584", "0.568533", "0.56818354", "0.5681029", "0.56741273", "0.56584275", "0.5636211" ]
0.6799325
0
Loads sql statments in sql_fn and replaces variables based on supplied var_replace dictionary SQL statements are returned as string
def read_sql(sql_fn,var_replace): with open(sql_fn,'r') as sql: sql_stmts = sql.read() for key in var_replace: sql_stmts = sql_stmts.replace(key,var_replace[key]) return sql_stmts
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def run_sql_from_file(conn, path, replace={}):\n with open(path, 'r') as f:\n query = [s.strip() + ';' for s in f.read().split(';')[:-1]]\n for s in query:\n for k, v in replace.items():\n s = s.replace(k, v)\n run_sql_from_string(conn, s)", "def replace_params(self):\n raw_sql = self.raw_sql\n for placeholder in self.to_replace:\n newreg = re.compile(placeholder)\n repl = self.get_replacement_value(placeholder)\n if repl:\n raw_sql = newreg.sub(str(repl), raw_sql)\n self.sql = raw_sql", "def format_sql_in_context(sql_template, param_dict, conn):\n if conn is not None:\n # Postgres, secure\n query = format_sql_postgres(sql_template, param_dict)\n return query.as_string(conn)\n\n # sqlite, may not be perfectly secure, far better than nothing\n return format_query_check_chars(sql_template, param_dict)", "def mogrify_sql_statement(self, content):\n sql = content[0]\n args = content[1]\n\n if self.dbmi.__name__ == \"psycopg2\":\n if len(args) == 0:\n return sql\n else:\n if self.connected:\n try:\n return self.cursor.mogrify(sql, args)\n except Exception as exc:\n print(sql, args)\n raise exc\n else:\n self.connect()\n statement = self.cursor.mogrify(sql, args)\n self.close()\n return statement\n\n elif self.dbmi.__name__ == \"sqlite3\":\n if len(args) == 0:\n return sql\n else:\n # Unfortunately as sqlite does not support\n # the transformation of sql strings and qmarked or\n # named arguments we must make our hands dirty\n # and do it by ourself. :(\n # Doors are open for SQL injection because of the\n # limited python sqlite3 implementation!!!\n pos = 0\n count = 0\n maxcount = 100\n statement = sql\n\n while count < maxcount:\n pos = statement.find(\"?\", pos + 1)\n if pos == -1:\n break\n\n if args[count] is None:\n statement = \"%sNULL%s\" % (statement[0:pos],\n statement[pos + 1:])\n elif isinstance(args[count], (int, long)):\n statement = \"%s%d%s\" % (statement[0:pos], args[count],\n statement[pos + 1:])\n elif isinstance(args[count], float):\n statement = \"%s%f%s\" % (statement[0:pos], args[count],\n statement[pos + 1:])\n elif isinstance(args[count], datetime):\n statement = \"%s\\'%s\\'%s\" % (statement[0:pos], str(args[count]),\n statement[pos + 1:])\n else:\n # Default is a string, this works for datetime\n # objects too\n statement = \"%s\\'%s\\'%s\" % (statement[0:pos],\n str(args[count]),\n statement[pos + 1:])\n count += 1\n\n return statement", "def replacevals(self, stmt):\n if 'materialize' in stmt:\n stmt = self.process_materialize(stmt)\n if 'listagg' in stmt:\n stmt = process_aggregates(stmt)\n if 'select USER, table_name' in stmt and stmt.count('UNION') == 3:\n return \"select user,table_name,preference from ingest_test\"\n if '.nextval from dual' in stmt and 'connect by' in stmt:\n self.num = int(stmt[stmt.rfind('<') + 1:])\n return None\n for k, v in self.repl.items():\n stmt = stmt.replace(k, v)\n return stmt", "def sub_binds(sql_select):\n\n keywords = ['INNER','FROM','HAVING','WHERE',\"GROUP BY\",\", \"]\n\n (sql_command,binds) = tuple(sql_select)\n\n for b in binds: sql_command=sql_command.replace('?',repr(b),1)\n\n replace_dict = {x:('\\n\\t'+x) for x in keywords}\n\n print '\\n'+replacer(sql_command,replace_dict)+'\\n'", "def clean_postgres_sql_for_spark_sql(\n postgres_sql_str: str, global_temp_view_proxies: List[str] = None, identifier_replacements: Dict[str, str] = None\n):\n # Spark SQL does not like double-quoted identifiers\n spark_sql = postgres_sql_str.replace('\"', \"\")\n spark_sql = re.sub(fr\"CREATE VIEW\", fr\"CREATE OR REPLACE TEMP VIEW\", spark_sql, flags=re.IGNORECASE | re.MULTILINE)\n\n # Treat these type casts as string in Spark SQL\n # NOTE: If replacing a ::JSON cast, be sure that the string data coming from delta is treated as needed (e.g. as\n # JSON or converted to JSON or a dict) on the receiving side, and not just left as a string\n spark_sql = re.sub(fr\"::text|::json\", fr\"::string\", spark_sql, flags=re.IGNORECASE | re.MULTILINE)\n\n if global_temp_view_proxies:\n for vw in global_temp_view_proxies:\n spark_sql = re.sub(\n fr\"FROM\\s+{vw}\", fr\"FROM global_temp.{vw}\", spark_sql, flags=re.IGNORECASE | re.MULTILINE\n )\n spark_sql = re.sub(\n fr\"JOIN\\s+{vw}\", fr\"JOIN global_temp.{vw}\", spark_sql, flags=re.IGNORECASE | re.MULTILINE\n )\n\n if identifier_replacements:\n for old, new in identifier_replacements.items():\n spark_sql = re.sub(fr\"(\\s+|^){old}(\\s+|$)\", fr\" {new} \", spark_sql, flags=re.IGNORECASE | re.MULTILINE)\n\n return spark_sql", "def _get_table_sql(object_id, tab_name, columns=[], rerun='s16a_wide2', save_sql=False, fn_sql='hsc_sql.txt', ):\n\tlocalpath = _get_local_path()\n\n\tfn = localpath + fn_table_template_sql\n\tsql_columns = _get_table_sql_columns(columns=columns)\n\n\twith open(fn, 'r') as f:\n\t\tsql_template = f.read()\n\tsql = sql_template.format(object_id=object_id, tab_name=tab_name, sql_columns=sql_columns, rerun=rerun, )\n\n\tif save_sql:\n\t\twith open(fn_sql, \"w\") as text_file:\n\t\t\ttext_file.write(sql)\n\n\treturn sql", "def _load_statements(self):\n home = Path(\".\")\n context = {\"table_name\": self.TABLE}\n self.sql = {}\n for path in home.glob(\"./sql/*\"):\n with open(path) as f:\n template = Template(f.read().strip())\n self.sql[path.stem] = template.render(context)", "def get_scrub_sql():\r\n # it seems incredibly hard to get SQLAlchemy to emit a fully-compiled SQL\r\n # string that including data values. i gave up after trying this method with\r\n # the \"dialect\" sqlalchemy.dialects.mysql.mysqldb.MySQLDialect()\r\n # https://sqlalchemy.readthedocs.org/en/latest/faq/sqlexpressions.html\r\n # #how-do-i-render-sql-expressions-as-strings-possibly-with-bound\r\n # -parameters-inlined\r\n sql_format = (\"update %(table)s set %(col)s = %(sub_value)s \"\r\n \"where %(col)s is not null;\")\r\n return '\\n'.join(\r\n sql_format % dict(table=c.table.name, col=c.name, sub_value=v)\r\n for c, v in get_scrub_columns().iteritems())", "def reparam(string_, dictionary):\n dictionary = dictionary.copy() # eval mucks with it\n # disable builtins to avoid risk for remote code exection.\n dictionary['__builtins__'] = object()\n vals = []\n result = []\n for live, chunk in _interpolate(string_):\n if live:\n v = eval(chunk, dictionary)\n result.append(sqlquote(v))\n else: \n result.append(chunk)\n return SQLQuery.join(result, '')", "def sqlq(v):\n if not isinstance(v, (bytes, str)):\n return v\n for value, replacement in _sql_replacements:\n v = v.replace(value, replacement)\n return v", "def format_sql_str(statement):\n replace_strs = [\"]\", \"[a\", \"\\r\\n\"]\n for replace_str in replace_strs:\n statement = statement.replace(replace_str, \"\")\n return statement", "def _site_sql(site, owner='dcc_owner', id_name='dcc', pedsnet_only=False):\n\n if id_name == 'dcc':\n if pedsnet_only:\n tmpl = SQL_SITE_PEDSNET_TEMPLATE\n else:\n tmpl = SQL_SITE_TEMPLATE\n else:\n tmpl = SQL_SITE_ID_NAME_TEMPLATE\n\n sql = tmpl.replace('{{.Site}}', site)\n sql = sql.replace('{{.Owner}}', owner)\n\n if id_name != 'dcc':\n sql = sql.replace('{{.IdName}}', id_name)\n\n if site == 'dcc' or site != id_name:\n statements = [_despace(x) for x in sql.split(\"\\n\") if x]\n else:\n statements = []\n\n if site == 'dcc' and id_name != 'dcc':\n if pedsnet_only:\n tmpl = SQL_SITE_PEDSNET_TEMPLATE\n else:\n tmpl = SQL_SITE_TEMPLATE\n sql = tmpl.replace('{{.Site}}', id_name)\n sql = sql.replace('{{.Owner}}', owner)\n statements.append(_despace(sql))\n\n if site != id_name:\n if id_name == 'dcc':\n id_maps_tmpl = SQL_ID_MAPS_TEMPLATE\n else:\n id_maps_tmpl = SQL_ID_MAPS_ID_NAME_TEMPLATE\n\n id_maps_sql = id_maps_tmpl.replace('{{.Site}}', site)\n id_maps_sql = id_maps_sql.replace('{{.Owner}}', owner)\n\n if id_name != 'dcc':\n id_maps_sql = id_maps_sql.replace('{{.IdName}}', id_name)\n\n statements.append(_despace(id_maps_sql))\n\n return statements", "def get_sql(database_name, table_name, sql_id):\n db = get_xml_dict(database_name, table_name)\n sql = db.get(sql_id)\n return sql", "def get_sql(database_name, table_name, sql_id):\n db = get_xml_dict(database_name, table_name)\n sql = db.get(sql_id)\n return sql", "def get_sql_template(table):\n if table == 'account_position':\n sql = \"insert into account_position values \" \\\n \"('%s', '%s', '%s', '0', '%f', '%f', '%f', '0', '0', '%f', '%f', '%f', '0', '0'\" \\\n \",'0', '0', '0', null, null, null, null, null, '0', '0', \" \\\n \"'0', '0', '%f', '0', '0', '%s');\"\n\n elif table == 'pf_position':\n sql = \"insert into pf_position values \" \\\n \"('%s', '%s', '%s', '0', '%f', '%f', '%f', '0', '0', '%f', \" \\\n \"'%f', '%f', '0', '0', '0', '0', null, '1',\" \\\n \" '0', '0', '0', '0', '0', '0', '0', '0', '%f', '0', '0', '0', null);\"\n\n elif table == 'account_trade_restrictions':\n sql = \"insert into account_trade_restrictions values \" \\\n \"('%s', '%s', '18', '0', '1000', '0', '1000', '0', '2000', \" \\\n \"'0', '3000', '0', '1000', '0', '1000', '0','1000', '1000', '0.9',\" \\\n \" '1000', '0.2', '0.1','100000000', '0', '0', '0', \" \\\n \"'0', '0','0' )\"\n elif table == 'instrument':\n sql = \"select ticker, pre_price from instrument where ticker= '%s'\"\n\n elif table == 'pf_account':\n sql = \"insert into pf_account values\" \\\n \"('%s', '%s', '%s', '%s', null, '');\"\n else:\n platform_logger.error(\"input wrong table '%s'\" % table)\n return ''\n return sql", "def prepare_sql(self, sql):\n # Oracle doesn't like trailing semicolons. So remove them.\n # To do this properly we need to strip comments.\n # See issue5.\n sql = sqlparse.format(sql, strip_comments=True)\n sql = sql.strip()\n if sql.endswith(';'):\n sql = sql[:-1]\n return sql", "def run_sql_from_string(conn, statement):\n statement = sqlalchemy.text(statement)\n conn.execute(statement)", "def sql_log(cls, sql_query, data=None):\n\t\t# if data exists , I replace them into `complete_sql_query`\n\t\tif data:\n\t\t\tfor key, value in data.items():\n\t\t\t\tsearch = ':{}'.format(key)\n\t\t\t\treplace = '`{}`'.format(value)\n\t\t\t\tsql_query = sql_query.replace(search, replace)\n\n\t\tprint('\\t{}'.format(sql_query))", "def sql_scripts_execute(self, sql_scripts, params={}):\n ps = self.parameter_handler(params)\n log.debug('Got parameters: %s', ps)\n cursor = self._get_cursor()\n for q in sql_scripts:\n with open(q, 'r') as s:\n sql_string_formatted = s.read().format(**ps)\n cursor.execute(sql.SQL(sql_string_formatted), ps)\n self.connection.commit()\n self.connection.close()", "def _get_photoobj_sql(object_id, band_columns=[], bands=[], all_columns=False, rerun='s16a_wide2', tab_name='forced', save_sql=False, fn_sql='hsc_sql.txt'):\n\n\tlocalpath = _get_local_path()\n\n\tfn = localpath+fn_table_template_sql\n\tsql_columns = _get_photoobj_sql_columns(band_columns=band_columns, bands=bands, all_columns=all_columns)\n\n\twith open(fn, 'r') as f:\n\t\tsql_template=f.read()\n\tsql = sql_template.format(object_id=object_id, tab_name=tab_name, sql_columns=sql_columns, rerun=rerun, )\n\t\n\tif save_sql:\n\t\twith open(fn_sql, \"w\") as text_file:\n\t\t\ttext_file.write(sql)\n\n\treturn sql", "def open (self, sql_file):\n fd = open(sql_file, 'r')\n sql = fd.read()\n fd.close()\n self.sql = sql.replace(UTF_8_STR, \"\")", "def execute_sql_files(connection, sql_files):\n for filename in sql_files:\n statement = resource_text(filename)\n for sub_statement in statement.split(\";\"):\n if sub_statement.strip():\n connection.execute(text(sub_statement))", "def sqlfile(path, **kw):\n sql = path.read_text()\n return sql.format(**kw)", "def execute_sql(sql_stmt, host_in='client'):\n #db = create_engine(host_in,'')\n #sql = sqltext(sql_stmt) \n #return db.execute(sql)\n with open('temp.sql','w') as sql:\n sql.write(sql_stmt)\n\n proc=sp.Popen(\"mysql < temp.sql\",stdout=sp.PIPE, stderr=sp.PIPE, shell=True)\n out,err = proc.communicate()\n sp.Popen(\"rm temp.sql\",stdout=sp.PIPE, stderr=sp.PIPE, shell=True)\n return out.strip(),err.strip()", "def connect_psql(kid, var):\n\n code = f\"\"\"from sqlalchemy import create_engine\nconn_string = f\"postgresql://{cfg.sql_name}:{cfg.sql_password}@localhost/{cfg.sql_dbname}\"\nengine = create_engine(conn_string)\nwith engine.begin() as conn:\n conn.execute(\"INSERT INTO {cfg.sql_schema_name}.{cfg.sql_table_name} (var_value, var_name) VALUES (9,'c')\")\n result = conn.execute(\"select * from {cfg.sql_schema_name}.{cfg.sql_table_name}\")\n for row in result:\n print(row)\n \"\"\"\n\n print(\"---Attempting to execute SQL code---\")\n\n return exec_code(kid, var, code)", "def sql(self, db, sql, args=()):\n assert db in ('source', 'target'), u\"First arg of sql() should be 'source' or 'target'\"\n connection = self.target_connection if db == 'target' else self.source_connection\n with connection.cursor() as cursor:\n cursor.execute(sql, args)\n return cursor.fetchall() if 'select ' in sql.lower() else ()", "def sql_query(sql):\n cur = c.cursor()\n cur.execute(sql)\n c.commit()", "def set_db_data(self, sql_strings):\n connection_string = f\"\"\"\n host='{self.host}' \n dbname='{self.db_name}' \n user='{self.user}' \n password='{self.password}' \n port='{self.port}'\n \"\"\"\n\n with psycopg2.connect(connection_string) as connection:\n cursor = connection.cursor()\n for query in sql_strings:\n cursor.execute(query)\n\n connection.commit()" ]
[ "0.6825659", "0.6823968", "0.65025157", "0.6472315", "0.6245375", "0.6126365", "0.6013925", "0.59643203", "0.5952251", "0.5904826", "0.58467245", "0.5834276", "0.5786407", "0.575063", "0.57417893", "0.57417893", "0.5704225", "0.56946456", "0.5686", "0.56783825", "0.5657021", "0.5652497", "0.5624649", "0.56066513", "0.55952704", "0.55042493", "0.55005807", "0.54992425", "0.5478467", "0.54677075" ]
0.8187875
0
Tells if the target is newer than the source. Return true if 'source' exists and is more recently modified than 'target', or if 'source' exists and 'target' doesn't. Return false if both exist and 'target' is the same age or younger than 'source'. Raise DistutilsFileError if 'source' does not exist.
def newer(source, target): if not os.path.exists(source): raise DistutilsFileError("file '%s' does not exist" % os.path.abspath(source)) if not os.path.exists(target): return True return os.stat(source).st_mtime > os.stat(target).st_mtime
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def newer (source, target):\r\n\r\n if not os.path.exists (target):\r\n return 1\r\n\r\n from stat import ST_MTIME\r\n mtime1 = os.stat(source)[ST_MTIME]\r\n mtime2 = os.stat(target)[ST_MTIME]\r\n\r\n return mtime1 > mtime2", "def newer_group(sources, target, missing='error'):\n # If the target doesn't even exist, then it's definitely out-of-date.\n if not os.path.exists(target):\n return True\n\n # Otherwise we have to find out the hard way: if *any* source file\n # is more recent than 'target', then 'target' is out-of-date and\n # we can immediately return true. If we fall through to the end\n # of the loop, then 'target' is up-to-date and we return false.\n target_mtime = os.stat(target).st_mtime\n\n for source in sources:\n if not os.path.exists(source):\n if missing == 'error': # blow up when we stat() the file\n pass\n elif missing == 'ignore': # missing source dropped from\n continue # target's dependency list\n elif missing == 'newer': # missing source means target is\n return True # out-of-date\n\n if os.stat(source).st_mtime > target_mtime:\n return True\n\n return False", "def needs_rebuild(source, target):\n return not os.path.isfile(target) or (\n os.path.getmtime(source) > os.path.getmtime(target))", "def exists_and_newer(targetfile, topicfile):\n try:\n if getmtime(targetfile) >= getmtime(topicfile):\n return True\n else:\n return False\n except IOError:\n return False", "def _newer(a: str, b: str) -> bool:\n if not os.path.exists(a):\n return False\n if not os.path.exists(b):\n return True\n return os.path.getmtime(a) >= os.path.getmtime(b)", "def is_newer(a, b):\n return os.stat(a).st_mtime >= os.stat(b).st_mtime", "def missingOrStale(target, reference=None):\n if not os.path.isfile(target):\n return True\n if reference:\n return os.path.getmtime(target) < os.path.getmtime(reference)\n else:\n return False", "def check_dependency_change(targets: List[str], dependencies: List[str]) -> bool:\n min_target_mtime = min([get_mtime(path) for path in targets])\n max_dep_mtime = max([get_mtime(path) for path in dependencies])\n return max_dep_mtime > min_target_mtime", "def is_newer(filename1, filename2):\n return os.stat(filename1).st_mtime > os.stat(filename2).st_mtime", "def out_of_date(original, derived):\r\n return (not os.path.exists(derived) or\r\n (os.path.exists(original) and\r\n os.stat(derived).st_mtime < os.stat(original).st_mtime))", "def IsFileNewer(name1, name2):\n\n\tif not os.path.exists(name1):\n\t\treturn 0\n\n\tif not os.path.exists(name2):\n\t\treturn 1\n\n\tmod_time1 = os.stat(name1)[stat.ST_MTIME]\n\tmod_time2 = os.stat(name2)[stat.ST_MTIME]\n\treturn (mod_time1 > mod_time2)", "def existing_and_newer(fn0, fn):\n\n if not os.path.isfile(fn0):\n error(\"Dependency '{}' does not exist\".format(fn0))\n\n if not os.path.isfile(fn):\n return False\n\n if os.path.getmtime(fn0) <= os.path.getmtime(fn):\n return True\n else:\n return False", "def file_newer(check_file: str, base_file: str) -> bool:\n if os.path.isfile(check_file):\n cf_modtime_ts = os.path.getmtime(check_file)\n bf_modtime_ts = os.path.getmtime(base_file)\n else:\n return False\n\n return cf_modtime_ts > bf_modtime_ts", "def isUpToDate(inFile: str, outFile: str) -> bool:\n if not os.path.exists(inFile):\n return False\n if os.path.exists(outFile):\n if os.path.getmtime(outFile) >= os.path.getmtime(inFile):\n return True\n return False", "def ShouldBuild(self, src_files, dst_files):\n if self.force:\n return True\n\n oldest = None\n for dst in dst_files:\n if not os.path.exists(dst):\n self.DebugMsg(\"Build because %s does not exist\" % dst)\n return True\n modified = os.path.getmtime(dst)\n if oldest == None or modified < oldest:\n old = dst\n oldest = modified\n\n for src in src_files:\n modified = os.path.getmtime(src)\n if modified > oldest:\n self.DebugMsg(\"Build because %s is newer than %s\" % (src, old))\n return True\n\n self.DebugMsg(\"%s are up to date\" % \", \".join(dst_files))\n return False", "def source_changed(source, cache):\n return os.path.getmtime(source)>os.path.getmtime(cache)", "def _file_newer(cls, path, check_mtime):\n path_mtime = os.path.getmtime(path)\n return path_mtime > check_mtime", "def needs_update(self, *path):\n dt_fmt = \"%Y-%m-%d %H:%M:%S\"\n try:\n linfo = self.info(*path)\n dt_local = datetime.datetime.strptime(\n linfo[\"datetime\"][:19], dt_fmt)\n dt_server = datetime.datetime.strptime(\n self.serverfiles.info(*path)[\"datetime\"][:19], dt_fmt)\n return dt_server > dt_local\n except FileNotFoundError:\n return True\n except KeyError:\n return True", "def has_changed(self):\n timestamp = os.stat(self.filename).st_mtime\n if timestamp > self.last_timestamp:\n self.last_timestamp = timestamp\n return True\n return False", "def is_more_rencent(filename: str, comparison_filename: str):\n return os.path.getmtime(filename) > os.path.getmtime(comparison_filename)", "def _assets_are_stale(self, sourcedirectory, cachedirectory):\n comparison = filecmp.dircmp(sourcedirectory, cachedirectory, [], [])\n if comparison.left_only or comparison.right_only:\n # We have files in one directory and not the other\n return True\n if comparison.diff_files:\n # Some of the files have changed\n return True\n\n return False", "def older(a, b):\n\treturn not newer(a, b)", "def newer(a, b):\n\treturn modtime(a) < modtime(b) # smaller is earlier", "def is_newer(self, time):\n with self.connection_pool.item() as sftpc:\n return (\n sftpc.stat(self.remote_path).st_mtime > time\n or sftpc.lstat(self.remote_path).st_mtime > time\n )", "def _source_filename_field_is_not_equal_target(self):\n if self.source == self.target:\n # print(f\"{self}\")\n raise SourceEqualsTargetError(\"source must not equal target.\")\n return True", "def is_outdated(self):\n\n if not self.is_done:\n return False\n elif not (self.input_files and self.output_files):\n return False\n\n return fileutils.modified_after(self.input_files, self.output_files)", "def match_stat(dest_path, source_path):\n return shutil.copystat(source_path, dest_path)", "def check_pickle(src: \"list[str]\", targets: \"list[str]\"):\n src_time = np.array([os.path.getmtime(item) for item in src])\n targets_time = np.array([os.path.getmtime(item) for item in targets])\n for time in targets_time:\n if np.any(src_time > time):\n csv_pickle()\n print('new pickle data were successfully made.')\n break", "def _verify_archive_equality(self, file1, file2):\r\n temp_dir_1 = mkdtemp()\r\n temp_dir_2 = mkdtemp()\r\n try:\r\n extract_source(file1, temp_dir_1)\r\n extract_source(file2, temp_dir_2)\r\n return directories_equal(temp_dir_1, temp_dir_2)\r\n\r\n finally:\r\n shutil.rmtree(temp_dir_1)\r\n shutil.rmtree(temp_dir_2)", "def verifyFile(source, destination):\n\tsourceHash = hashlib.sha256(open(source, 'rb').read()).digest()\n\tdestinationHash = hashlib.sha256(open(destination, 'rb').read()).digest()\n\n\tif sourceHash == destinationHash:\n\t\treturn (True, str(sourceHash))\n\n\treturn False" ]
[ "0.8297187", "0.7296959", "0.7276083", "0.71889526", "0.69265825", "0.67073405", "0.6665101", "0.65951735", "0.6554066", "0.6270341", "0.6233283", "0.62098014", "0.61658466", "0.6140355", "0.6124453", "0.6110864", "0.60843563", "0.5883249", "0.5821825", "0.58091307", "0.5777663", "0.5770977", "0.5768198", "0.576778", "0.565858", "0.5658127", "0.56133515", "0.5609499", "0.55862355", "0.55744565" ]
0.88327926
0
Sets the platform string identifier returned by get_platform(). Note that this change doesn't impact the value returned by sysconfig.get_platform() and is local to Distutils
def set_platform(identifier): global _PLATFORM _PLATFORM = identifier
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def setPlatform(self):\n\t\treturn None", "def platform(self, platform):\n # type: (string_types) -> None\n\n if platform is not None:\n if not isinstance(platform, string_types):\n raise TypeError(\"Invalid type for `platform`, type has to be `string_types`\")\n\n self._platform = platform", "def OverridePlatformName(name):\n global override_platform_name\n override_platform_name = name", "def PlatformName():\n if override_platform_name:\n return override_platform_name\n if IsWindows():\n return 'win32'\n if IsLinux():\n return 'linux'\n if IsMac():\n return 'mac'\n raise NotImplementedError('Unknown platform \"%s\".' % sys.platform)", "def getPlatform(self):\n self.platform=util.get_platform()\n if not(self.platform.find('linux')==-1): self.platform='Unix' # i suppose, that in all unix systems are paths similiar\n if self.platform=='win32': self.platform='Win32' # this should be done automatically", "def platform_num(self) -> str:\n return pulumi.get(self, \"platform_num\")", "def get_platform():\n global _PLATFORM\n if _PLATFORM is None:\n _PLATFORM = _sysconfig.get_platform()\n return _PLATFORM", "def platform(self):\n # type: () -> string_types\n return self._platform", "def _platform(*args):\n # Format the platform string\n platform = '-'.join(x.strip() for x in filter(len, args))\n\n # Cleanup some possible filename obstacles...\n platform = platform.replace(' ', '_')\n platform = platform.replace('/', '-')\n platform = platform.replace('\\\\', '-')\n platform = platform.replace(':', '-')\n platform = platform.replace(';', '-')\n platform = platform.replace('\"', '-')\n platform = platform.replace('(', '-')\n platform = platform.replace(')', '-')\n\n # No need to report 'unknown' information...\n platform = platform.replace('unknown', '')\n\n # Fold '--'s and remove trailing '-'\n while 1:\n cleaned = platform.replace('--', '-')\n if cleaned == platform:\n break\n platform = cleaned\n while platform[-1] == '-':\n platform = platform[:-1]\n\n return platform", "def _platform(*args):\n # Format the platform string\n platform = '-'.join(x.strip() for x in filter(len, args))\n\n # Cleanup some possible filename obstacles...\n platform = platform.replace(' ', '_')\n platform = platform.replace('/', '-')\n platform = platform.replace('\\\\', '-')\n platform = platform.replace(':', '-')\n platform = platform.replace(';', '-')\n platform = platform.replace('\"', '-')\n platform = platform.replace('(', '-')\n platform = platform.replace(')', '-')\n\n # No need to report 'unknown' information...\n platform = platform.replace('unknown', '')\n\n # Fold '--'s and remove trailing '-'\n while 1:\n cleaned = platform.replace('--', '-')\n if cleaned == platform:\n break\n platform = cleaned\n while platform[-1] == '-':\n platform = platform[:-1]\n\n return platform", "def platform_type(self, platform_type):\n self._platform_type = platform_type", "def platform():\n return \"micaz\"", "def _format_platform(platform, release, architecture=None):\n rep = f\"{_PLATFORMS[platform]} {release}\"\n if architecture is None or architecture == default.architecture:\n return rep\n return f\"{rep} ({architecture})\"", "def platform(self, return_str=True):\n architecture = self.arch(\"docker\")\n host_platform = self.osversion() + \"/\" + architecture\n if return_str:\n return host_platform.lower()\n return self.parse_platform(host_platform)", "def get_platform():\n system_name = platform.system()\n if system_name == \"Linux\":\n # Previously we'd use either \"-gnu\" or \"-musl\" indicate which version\n # of libc we were built against. We now default to musl since it\n # reliably works on all platforms.\n return \"unknown-linux-musl\"\n elif system_name == \"Darwin\":\n return \"apple-darwin\"\n else:\n return \"unknown\"", "def platform(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"platform\")", "def platform(self):\n return self.random.choice([\n 'Laptop', \n 'Desktop', \n 'Workstation', \n 'Server', \n 'Virtual Machine', \n 'Container', \n 'Micro-Service', \n 'Droplet', \n 'SaaS'\n ])", "def GetCurrentPlatform():\n if sys.platform == 'darwin':\n return 'mac'\n if sys.platform == 'win32':\n return 'win'\n if sys.platform == 'linux2':\n return 'linux'\n raise RuntimeError('Unknown platform')", "def getPlatform(self):\n\t\treturn None", "def platform():\n if 'OS' in gyp_defines():\n if 'android' in gyp_defines()['OS']:\n return 'android'\n else:\n return gyp_defines()['OS']\n elif IsWindows():\n return 'win'\n elif IsLinux():\n return 'linux'\n else:\n return 'mac'", "def _get_os_platform(platform_info):\n os = platform_info[0]\n if not os in _PLATFORM_MAPPINGS:\n raise UnsupportedOS(os)\n return _PLATFORM_MAPPINGS[os]", "def current_platform() -> str:\n if sys.platform.startswith('linux'):\n return 'linux'\n elif sys.platform.startswith('darwin'):\n return 'mac'\n elif (sys.platform.startswith('win') or\n sys.platform.startswith('msys') or\n sys.platform.startswith('cyg')):\n if sys.maxsize > 2 ** 31 - 1:\n return 'win64'\n return 'win32'\n else:\n print('Error: DO NOT SUPPORT OS', file=sys.stderr)\n sys.exit(1)", "def set_platform(self, platform_dict):\n if not os.path.exists(self.file_path):\n print(\"netCDF file does not exist, exiting without saving Platform group...\")\n elif self.format == '.nc':\n with netCDF4.Dataset(self.file_path, 'a', format='NETCDF4') as ncfile:\n plat = ncfile.createGroup('Platform')\n [plat.setncattr(k, v) for k, v in platform_dict.items()]\n elif self.format == '.zarr' and not self.append_zarr: # Do not save platform if appending\n zarrfile = zarr.open(self.file_path, mode='a')\n plat = zarrfile.create_group('Platform')\n for k, v in platform_dict.items():\n plat.attrs[k] = v", "def getplatform():\n\n # Return the system platform\n return sys.platform", "def get_platform(self):\n return self._platform", "def get_sequencing_platform(self):\n platform = self.data[\"platform\"]\n if platform == \"miseq\":\n platform = \"MiSeq\"\n elif platform == \"hiseq4000\":\n platform == \"HiSeq4000\"\n elif platform == \"hiseq2000\":\n platform == \"HiSeq2000\"\n else:\n raise Exception(\"Unknown platform {platform} for sequencing run {run}\".format(platform=platform,run=self.run))\n return platform", "def platform():\n return ['linux']", "def supported_platform(given_platform):\n if given_platform not in constants.platforms():\n raise argparse.ArgumentTypeError(\"Invalid platform given: \" + given_platform)\n return given_platform", "def GetPlatform(self):\n arch = \"None\"\n # check architecture name\n if \"CMTCONFIG\" in os.environ:\n arch = os.environ[\"CMTCONFIG\"]\n elif \"SCRAM_ARCH\" in os.environ:\n arch = os.environ[\"SCRAM_ARCH\"]\n return arch", "def _get_build_os_name():\n system = platform.system()\n if 'Darwin' in system or 'Macintosh' in system:\n return 'darwin-x86'\n\n # TODO: Add more values if needed.\n return 'linux-x86'" ]
[ "0.7211347", "0.7159676", "0.7013561", "0.6954458", "0.68570656", "0.6601686", "0.6520827", "0.6483991", "0.64623207", "0.64623207", "0.64210874", "0.6305744", "0.6305564", "0.6298814", "0.6271021", "0.62246704", "0.6183877", "0.60754293", "0.6066876", "0.6018605", "0.596566", "0.59342617", "0.5927217", "0.5910094", "0.58769995", "0.58630556", "0.58538777", "0.5817244", "0.5811833", "0.57900727" ]
0.81388736
0
Return 'pathname' as a name that will work on the native filesystem. i.e. split it on '/' and put it back together again using the current directory separator. Needed because filenames in the setup script are always supplied in Unix style, and have to be converted to the local convention before we can actually use them in the filesystem. Raises ValueError on nonUnixish systems if 'pathname' either starts or ends with a slash.
def convert_path(pathname): if os.sep == '/': return pathname if not pathname: return pathname if pathname[0] == '/': raise ValueError("path '%s' cannot be absolute" % pathname) if pathname[-1] == '/': raise ValueError("path '%s' cannot end with '/'" % pathname) paths = pathname.split('/') while os.curdir in paths: paths.remove(os.curdir) if not paths: return os.curdir return os.path.join(*paths)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _path(unix_path):\n return unix_path.replace(\"/\", os.path.sep)", "def normalized_file_path(path: str) -> str:\n # Convert Unix path to Windows path for WSL\n if PLATFORM == \"WSL\":\n return path.replace(\"/\", \"\\\\\")\n\n return path", "def nt_path_to_posix_path(path):\r\n path = path.replace(\"\\\\\", \"/\")\r\n parts = path.split(\":\")\r\n if len(parts) > 1:\r\n return \"/\" + parts[0].lower() + parts[1]\r\n return path", "def system_path(path):\n if is_windows(): return path.replace('/', '\\\\')\n else: return path.replace('\\\\', '/')", "def convertString(path):\n if (\"win\" in sys.platform):\n return path.replace(\"/\",\"\\\\\")\n elif (\"linux\" in sys.platform):\n return path.replace(\"\\\\\",\"/\")", "def from_posix(fname):\n import sys\n if sys.platform == 'win32': # pragma: nocover\n if fname[0] == '/':\n fname = fname[1:]\n fname = fname.replace('/', '\\\\')\n return fname", "def normpath_with_actual_case(name):\r\n assert os.path.isabs(name) or os.path.ismount(name), \"Not abs nor mount: \" + name\r\n assert os.path.exists(name), \"Not exists: \" + name\r\n if os.name == \"nt\":\r\n name = os.path.realpath(name)\r\n from ctypes import create_unicode_buffer, windll\r\n buf = create_unicode_buffer(512)\r\n windll.kernel32.GetShortPathNameW(name, buf, 512) # @UndefinedVariable\r\n windll.kernel32.GetLongPathNameW(buf.value, buf, 512) # @UndefinedVariable\r\n if len(buf.value):\r\n result = buf.value\r\n else:\r\n result = name\r\n assert isinstance(result, str)\r\n if result[1] == \":\":\r\n # ensure drive letter is capital\r\n return result[0].upper() + result[1:]\r\n else:\r\n return result\r\n else:\r\n return os.path.normpath(name)", "def native(path):\n path = _os.path.sep.join(path.split('/'))\n return _os.path.normpath(_os.path.join(root, path))", "def make_fs_path(parts):\n return '/'.join(parts)", "def pathToFileName(self, path):\n\t\t# Find the path, and strip the leading slash.\n\t\tpath =urlparse.urlparse(self.path)[2].lstrip(\"/\")\n\t\t# Process url escape codes, and normalize the path.\n\t\tpath = os.path.normpath(urllib2.unquote(path))\n\t\t# normpath strips the last slash\n\t\tif os.path.isdir(path):\n\t\t\treturn path + '/'\n\t\telse:\n\t\t\treturn path", "def to_posix(fname):\n import sys\n if sys.platform == 'win32': # pragma: nocover\n import os.path\n if os.path.isabs(fname):\n fname = '/' + fname\n fname = fname.replace('\\\\', '/')\n return fname", "def posix_path(self, **kw):\n with_drive_letter = kw.get(\"with_drive\", True)\n return self._construct_path(\"/\", with_drive_letter)", "def resource_pathname(pathname, verbose=0):\n try:\n refno = Res.FSpOpenResFile(pathname, 1)\n Res.CloseResFile(refno)\n except Res.Error, arg:\n if arg[0] in (-37, -39):\n # No resource fork. We may be on OSX, and this may be either\n # a data-fork based resource file or a AppleSingle file\n # from the CVS repository.\n try:\n refno = Res.FSOpenResourceFile(pathname, u'', 1)\n except Res.Error, arg:\n if arg[0] != -199:\n # -199 is \"bad resource map\"\n raise\n else:\n return refno\n # Finally try decoding an AppleSingle file\n pathname = _decode(pathname, verbose=verbose)\n else:\n raise\n return pathname", "def makePath(path):\n\n compatPath = os.path.abspath(os.path.expanduser(path))\n\n return compatPath", "def path_name(self, path):\r\n ind = path.rfind(\"/\") + 1\r\n return (path[:ind], path[ind:])", "def win2unix(a_path, use_abs=1):\r\n if use_abs:\r\n a_path = os.path.abspath(a_path)\r\n return re.sub(r\"\\\\\", \"/\", a_path)", "def win2unix(a_path, use_abs=1):\r\n if use_abs:\r\n a_path = os.path.abspath(a_path)\r\n return re.sub(r\"\\\\\", \"/\", a_path)", "def path2name(path,\n slash=\"/\",\n hid_char=\".\",\n extension=False):\n if extension is True:\n return str(path.split(slash)[-1].strip(hid_char))\n else:\n return str(path.split(slash)[-1].strip(hid_char).split(\".\")[0])", "def str_to_path(name):\n import os;\n return(os.path.abspath(name));", "def resolve(self, filespec):\n \n parts = re.split(r\"[\\\\/]\", filespec)\n # try to substitute the first part as if it is a handle\n parts[0] = self.fhdict.get(parts[0].lower(), (parts[0],))[0]\n return os.path.sep.join(parts)", "def unique_path(pathname):\r\n result = pathname\r\n base, ext = os.path.splitext(result)\r\n counter = 2\r\n while os.path.exists(result):\r\n result = \"%s (%s)%s\" % (base, counter, ext)\r\n counter += 1\r\n return result", "def change_root(new_root, pathname):\n if os.name == 'posix':\n if not os.path.isabs(pathname):\n return os.path.join(new_root, pathname)\n else:\n return os.path.join(new_root, pathname[1:])\n\n elif os.name == 'nt':\n (drive, path) = os.path.splitdrive(pathname)\n if path[0] == '\\\\':\n path = path[1:]\n return os.path.join(new_root, path)\n\n elif os.name == 'os2':\n (drive, path) = os.path.splitdrive(pathname)\n if path[0] == os.sep:\n path = path[1:]\n return os.path.join(new_root, path)\n\n else:\n raise DistutilsPlatformError(\"nothing known about \"\n \"platform '%s'\" % os.name)", "def translate_path(self, path):\n # abandon query parameters\n path = path.split('?', 1)[0]\n path = path.split('#', 1)[0]\n # Don't forget explicit trailing slash when normalizing. Issue17324\n trailing_slash = path.rstrip().endswith('/')\n path = posixpath.normpath(urllib.unquote(path))\n words = path.split('/')\n words = filter(None, words)\n path = self.working_dir\n for word in words:\n _drive, word = os.path.splitdrive(word)\n _head, word = os.path.split(word)\n if word in (os.curdir, os.pardir):\n continue\n path = os.path.join(path, word)\n if trailing_slash:\n path += '/'\n return path", "def mangle_path(path):\n # Remove assigns\n path = servers.get_file_server().manglepath( str(path) )\n # Remove parent special directories\n path = os.path.abspath( path )\n # Convert path to Nebula format (slashes instead of backslashes)\n path = servers.get_file_server().manglepath( str(path) )\n # Convert drive letter to lowercase\n if len(path) > 1:\n if path[1] == ':':\n path = path[:1].lower() + path[1:]\n return path", "def system_path(self, path):\n return os.path.join(self.prefix, path.lstrip('/'))", "def translate_path(self, path):\n # abandon query parameters\n path = path.split('?', 1)[0]\n path = path.split('#', 1)[0]\n # Don't forget explicit trailing slash when normalizing. Issue17324\n trailing_slash = path.rstrip().endswith('/')\n try:\n path = urllib.parse.unquote(path, errors='surrogatepass')\n except UnicodeDecodeError:\n path = urllib.parse.unquote(path)\n path = posixpath.normpath(path)\n words = path.split('/')\n words = filter(None, words)\n path = os.getcwd()\n for word in words:\n if os.path.dirname(word) or word in (os.curdir, os.pardir):\n # Ignore components that are not a simple file/directory name\n continue\n path = os.path.join(path, word)\n if trailing_slash:\n path += '/'\n return path", "def fix_path(name):\n saveslash = \"/\" if (name[0] == \"/\") else \"\"\n name = re.split(\"\\\\\\|/\", name)\n new = name[0]\n for i in range(1,len(name)):\n new = os.path.join(new, name[i])\n new = \"%s%s\" % (saveslash, new)\n return new", "def dirname(pathname):\n # FIXME: figure out if this is a desirable outcome. i.e. do we\n # want dirname to be empty, or do a pwd and find out what the\n # current dir is, or keep the \"./\". I suppose this could make a\n # difference to some of the behavior of the scripts, such as\n # copying files around and such.\n return os.path.dirname(pathname) or '.'", "def format_path (in_path):\n return os.path.realpath(os.path.expanduser(in_path))", "def resolvePath_(cls, path):\r\n try:\r\n fsref, isFolder, wasAliased = FSResolveAliasFile(os.path.realpath(path), 1)\r\n return os.path.abspath(fsref.as_pathname().decode(u\"utf-8\"))\r\n except MacOS.Error as e:\r\n return None" ]
[ "0.67207557", "0.6290176", "0.6238237", "0.6112074", "0.61115247", "0.606076", "0.5939797", "0.5932711", "0.59231985", "0.58520925", "0.58230126", "0.57967067", "0.5790922", "0.57871675", "0.57529", "0.57496727", "0.57496727", "0.5720282", "0.5685239", "0.566036", "0.56340814", "0.5631931", "0.56261224", "0.5622315", "0.56055254", "0.5599229", "0.55978936", "0.55957806", "0.5595446", "0.5577465" ]
0.68549985
0
Return 'pathname' with 'new_root' prepended. If 'pathname' is relative, this is equivalent to "os.path.join(new_root,pathname)". Otherwise, it requires making 'pathname' relative and then joining the two, which is tricky on DOS/Windows and Mac OS.
def change_root(new_root, pathname): if os.name == 'posix': if not os.path.isabs(pathname): return os.path.join(new_root, pathname) else: return os.path.join(new_root, pathname[1:]) elif os.name == 'nt': (drive, path) = os.path.splitdrive(pathname) if path[0] == '\\': path = path[1:] return os.path.join(new_root, path) elif os.name == 'os2': (drive, path) = os.path.splitdrive(pathname) if path[0] == os.sep: path = path[1:] return os.path.join(new_root, path) else: raise DistutilsPlatformError("nothing known about " "platform '%s'" % os.name)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def addPathPrefix(prefix, pathname):\n new_pathname = os.path.join(os.path.dirname(pathname), prefix + os.path.basename(pathname))\n return new_pathname", "def makepath(plname,root):\n if (root.endswith('/') and not plname[0] =='/' ) or ( not root.endswith('/') and plname[0] =='/') :\n return root+plname\n elif root.endswith('/') and plname[0] =='/' :\n return root+plname[1:]\n else:\n return root+\"/\"+plname", "def append_to_path(existing, addition):\n if existing == Path.rootPath():\n return Path.rootPath() + addition\n return \"{}.{}\".format(existing, addition)", "def rename_storage_loc_toplvl(newprojname, oldpath):\n # Using .split(pathsep) because os.path.split only removes leaf,\n # we need the first/root dir of the path\n return os.path.join(newprojname, *oldpath.split(os.path.sep)[1:])", "def _relativize(base: str, current: str) -> str:\n if current.startswith(base):\n return current.replace(base, \"\", 1)\n return current", "def make_new_path(path, postfix = \"\", ext = \"\"):\n dir = os.path.split(path)[0]\n old_basename, old_ext = os.path.splitext(path)\n new_basename = old_basename + \"_\" + postfix\n new_path = os.path.join(dir, new_basename + \".\" + ext)\n return new_path", "def insertIntoPath(original, insertion='rest'):\n slashIndex = original.index('/',1)\n newString = '%s/%s%s' % (original[0:slashIndex], insertion, original[slashIndex:len(original)])\n return newString", "def append_to_path(path, name):\n if path[-1] == '/' or path[-1] == ':':\n return path + name\n else:\n return str(path) + str('/') + str(name)", "def expand_path(self, original_path):\n path = self.fix_dir_separator(original_path)\n path = os.path.expanduser(path)\n return os.path.join(self.config['work_dir'], path)", "def convert_path(pathname):\n if os.sep == '/':\n return pathname\n if not pathname:\n return pathname\n if pathname[0] == '/':\n raise ValueError(\"path '%s' cannot be absolute\" % pathname)\n if pathname[-1] == '/':\n raise ValueError(\"path '%s' cannot end with '/'\" % pathname)\n\n paths = pathname.split('/')\n while os.curdir in paths:\n paths.remove(os.curdir)\n if not paths:\n return os.curdir\n return os.path.join(*paths)", "def _adjust_path(self, file):\n path_component = '/osm_pla/test/'\n real_path = os.path.realpath(file)\n if path_component not in real_path:\n return os.path.dirname(real_path) + path_component + os.path.basename(real_path)\n else:\n return real_path", "def relativize(path: str):\n return join('.', path)", "def make_path(self, filename):\n return os.path.join(self.root_path, filename)", "def from_cwd(root, path):\n return normpath(join(root, normpath(path)))", "def relName(path, cwd=None, root=None):\n relRoot = os.path.normpath((root or projectRoot)) + os.sep\n cwd = os.path.abspath((cwd or os.getcwd())) + os.sep\n if path == cwd or path == cwd[:-1]:\n return \".\"\n\n if path.startswith(cwd):\n # The relative name is below the CWD, so we simply strip off the\n # leading parts.\n return path[len(cwd):]\n\n if path.startswith(relRoot) and cwd.startswith(relRoot):\n # The path is below the nominal root but parallel to the CWD. We need\n # to add some '../' parts.\n relToRootPath = path[len(relRoot):]\n relToRootCWD = cwd[len(relRoot):-1]\n count = 0\n while count < 1000 and relToRootCWD and relToRootCWD != os.sep:\n relToRootCWD, b = os.path.split(relToRootCWD)\n relToRootPath = \"..\" + os.sep + relToRootPath\n assert count < 1000\n return relToRootPath\n\n return path", "def join_path(self, template, parent):\n if (template.startswith('./')):\n return os.path.join(os.path.dirname(parent), template)\n return template", "def chop(self, pathname):\n assert pathname.startswith(self.dist_dir)\n return pathname[len(self.dist_dir):]", "def root(*args):\n return join(abspath(dirname(__file__)), *args)", "def native(path):\n path = _os.path.sep.join(path.split('/'))\n return _os.path.normpath(_os.path.join(root, path))", "def root_name(file_name, file_id):\n if file_id is not None:\n return \"{}{}\".format(R_DIR, file_name.format(file_id))\n else:\n return \"{}{}\".format(R_DIR, file_name)", "def reroot_path(\n filename: PurePosixPath, docpath: PurePath, project_root: Path\n) -> Tuple[n.FileId, Path]:\n\n if filename.is_absolute():\n rel_fn = n.FileId(*filename.parts[1:])\n else:\n rel_fn = n.FileId(*docpath.parent.joinpath(filename).parts).collapse_dots()\n return rel_fn, project_root.joinpath(rel_fn).resolve()", "def fix_path(name):\n saveslash = \"/\" if (name[0] == \"/\") else \"\"\n name = re.split(\"\\\\\\|/\", name)\n new = name[0]\n for i in range(1,len(name)):\n new = os.path.join(new, name[i])\n new = \"%s%s\" % (saveslash, new)\n return new", "def build_relative_path(full_path, prefix='/', split_on='/data/'):\n splits = full_path.split(split_on)\n return os.path.join(prefix, split_on, splits[-1])", "def relatif (path, root = None):\n\tfrom os import sep, getcwd\n\tpath = normalizePath(path)\n\tif root != None:\n\t\troot =normalizePath(root)\n\t# If the path is empty\n\tif len(path) == 0:\n\t\treturn \"\"\n\n\t# If the root is not defined\n\tif root == None:\n\t\t# Take the current directory\n\t\troot = getcwd()\n\t\t\n\t# Cut paths to directory\n\tif path[-1] == sep:\n\t\tpath = path[:-1]\n\tspPath = path.split(sep)\n\tspRoot = root.split(sep)\n\n\t# Constructs the list of the identical path\n\tequal = []\n\tfor i in range(0,mini(len(spRoot),len(spPath))):\n\t\tif spRoot[i] != spPath[i]:\n\t\t\tbreak\n\t\telse:\n\t\t\tequal.append(spPath[i])\n\n\t# If the identical list is not empty\n\tif len(equal) != 0:\n\t\t# Remove identical paths \n\t\tspRoot = spRoot[len(equal):]\n\t\tspPath = spPath[len(equal):]\n\t\t\n\t\t# Add an indirection\n\t\tfor i in range(len(spRoot)):\n\t\t\tspPath.insert(0,\"..\")\n\n\t# Constructs the relative path\n\tresult = \"\"\n\tfor i in spPath:\n\t\tresult += i + sep\n\n\tif result != \"\":\n\t\treturn result[:-1]\n\telse:\n\t\treturn \"\"", "def path_from_root(*pathelems):\n return os.path.join(__rootpath__, *pathelems)", "def zenpathjoin(self, path):\n return zenpathjoin(path)", "def buildpath(self):\n basepath = urlutil.href_settings.root + (self.relpath if self.relpath else cherrypy.request.path_info)\n if basepath.find('~') < 0:\n basepath += ('' if basepath.endswith('/') else '/') + '~'\n if cherrypy.request.query_string:\n basepath += ('&' if basepath.find('?') >= 0 else '?') + cherrypy.request.query_string\n return basepath", "def get_path(path):\n if _prefix and not '/' in path:\n path = _prefix + path\n\n if not _cwd:\n return path\n\n return join(_cwd, path)", "def translate(self, uri_path):\n _parts = [self.root] + uri_path.lstrip('/').split('/')\n fs_path = os.sep.join(_parts)\n fs_path = os.path.realpath(fs_path)\n return fs_path", "def new_filename(original_filename, new_locale):\r\n f = path(original_filename)\r\n new_file = f.parent.parent.parent / new_locale / f.parent.name / f.name\r\n return new_file.abspath()" ]
[ "0.6437045", "0.59677273", "0.5884409", "0.57894766", "0.5737389", "0.56540275", "0.5632758", "0.560187", "0.55786604", "0.55463153", "0.54867226", "0.5479389", "0.5465328", "0.5434634", "0.5426187", "0.5377088", "0.5376396", "0.52881503", "0.5265455", "0.5238494", "0.5189643", "0.5177203", "0.5164568", "0.5159111", "0.51431644", "0.51255876", "0.5116204", "0.5114498", "0.5109129", "0.5095898" ]
0.77424914
0
Ensure that 'os.environ' has all the environment variables needed. We guarantee that users can use in config files, commandline options,
def check_environ(): global _environ_checked if _environ_checked: return if os.name == 'posix' and 'HOME' not in os.environ: import pwd os.environ['HOME'] = pwd.getpwuid(os.getuid())[5] if 'PLAT' not in os.environ: os.environ['PLAT'] = _sysconfig.get_platform() _environ_checked = 1
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def verify_environment():\n reqs = ['NAME', 'RECIPIENT', 'SUBJECT', 'MESSAGE',\n 'MAILGUN_API_KEY', 'MAILGUN_DOMAIN']\n for req in reqs:\n if not os.getenv(req):\n logging.error('Environment variable ' + req + ' is not set')\n sys.exit(2)", "def _check_env():\n\tif os.getenv(_DATA_DIRECTORY_ENV_KEY) is None:\n\t\texit_everything(ERROR_DATA_DIRECTORY_NOT_SET, f'{_DATA_DIRECTORY_ENV_KEY} env var not set')\n\t\n\tif os.getenv(_FRONTEND_URL_ENV_KEY) is None:\n\t\texit_everything(ERROR_FRONTEND_NOT_SET, f'{_FRONTEND_URL_ENV_KEY} env var not set')", "def test_environ_vars_available(self) -> None:\n self.assertIsNotNone(os.environ.get('AWS_ACCESS_KEY_ID'))\n self.assertIsNotNone(os.environ.get('AWS_SECRET_KEY'))\n self.assertIsNotNone(os.environ.get('AWS_REGION_NAME'))\n self.assertIsNotNone(os.environ.get('S3_BUCKET'))", "def check_environment() -> None:\n for item in ['IB_USER', 'IB_PASSWORD', 'IB_URL']:\n if os.getenv(item) is None:\n raise click.UsageError(f'{item} environment variable must be set before using ib.')", "def check_user_environment(config):\n if not config.has_section('user_env_vars'):\n return\n\n for env_var in config.keys('user_env_vars'):\n if env_var in os.environ:\n msg = '{} is already set in the environment. '.format(env_var) +\\\n 'Overwriting from conf file'\n config.logger.warning(msg)", "def set_envvars(self):\n # self.logger.trace(\"update os.environ with %s\", self.environ)\n for key in os.environ:\n current = self.environ.get(key)\n if current is None:\n del os.environ[key]\n for key, value in self.environ.items():\n if value is not None:\n os.environ[key] = str(value)", "def _generate_environment(self):\n envvars = {}\n for key in self.envvars:\n try:\n envvars[key] = os.environ[key]\n except KeyError:\n continue\n\n # Warn the user that we cannot support secrets\n if envvars:\n logger.warning(\"This API does not support environment secrets.\")\n return envvars", "def cli(ctx: click.Context):\n try:\n # Ensure the necessary environment variables are set before proceeding.\n all(environ[env_var] for env_var in Env.values())\n\n except KeyError as exc:\n ctx.fail(f\"Missing environment variable: {exc}\")", "def check_envvar(envvar):\n if not os.environ.get(envvar):\n raise EnvironmentError(\"Variable '%s' not set\" % envvar)", "def environ_expansions():\r\n global _environ_expansions\r\n\r\n if _environ_expansions:\r\n return _environ_expansions\r\n\r\n _environ_expansions = {}\r\n for key, value in os.environ.items():\r\n _environ_expansions['ENV_%s' % key] = value\r\n\r\n return _environ_expansions", "def find_environ_config_vars():\n # only allow secret key and database uri for now\n envvars = [\"SQLALCHEMY_DATABASE_URI\", \"SECRET_KEY\"]\n results = {}\n for key in envvars:\n if key in os.environ:\n results[key] = os.environ[key]\n return results", "def clean_env():\n for key in ['FOO', 'THOR', 'IRON', 'NAME', 'PERSONAL_DIR']:\n os.environ.pop(key, None)", "def get_os_env():\n env = os.environ\n# print(\"env \\n\" , env)\n return env", "def _setup_env(self):\n\n os.environ['GIT_NAME'] = statiki.GIT_NAME\n os.environ['GIT_EMAIL'] = statiki.GIT_EMAIL\n os.environ['GH_TOKEN'] = 'this-is-a-bogus-token:password'\n os.environ['TRAVIS_REPO_SLUG'] = TEST_REPO\n\n return", "def _require_environment():\n require('environment', 'host', provided_by=ENVS.keys())", "def validate_env(self) -> None:\n errors = []\n\n self.user_name = env.str('USER_NAME')\n if not self.user_name:\n errors.append('USER_NAME environment variable needs to be set to your MyQ user name')\n\n self.password = env.str('PASSWORD')\n if not self.password:\n errors.append('PASSWORD environment variable needs to be set to your MyQ password')\n\n self.left_door = env.int('EDGEWOOD', 0)\n self.right_door = 1 - self.left_door\n\n self.only_close = env.bool('ONLY_CLOSE', True)\n\n if errors:\n raise Exception(','.join(errors))", "def environ() -> Environ:\n try:\n return Environ(os.environ['LABELBOX_TEST_ENVIRON'])\n except KeyError:\n raise Exception(f'Missing LABELBOX_TEST_ENVIRON in: {os.environ}')", "def _assert_envs_exist(strict_keys: Set[str]) -> None:\n missing_keys: List[str] = [\n strict_key\n for strict_key in strict_keys\n if strict_key not in environ\n ]\n\n if missing_keys:\n raise StrictEnvException(\n 'Missing env vars: {0}'.format(', '.join(missing_keys)),\n )", "def _init_env_variables(self):\n raise NotImplementedError()", "def _init_env_variables(self):\n raise NotImplementedError()", "def _init_env_variables(self):\n raise NotImplementedError()", "def _init_env_variables(self):\n raise NotImplementedError()", "def _init_env_variables(self):\n raise NotImplementedError()", "def _init_env_variables(self):\n raise NotImplementedError()", "def update_environ():\n\n # Environment variables to set.\n BASE = os.getcwd()\n PLUGINS = os.path.join(BASE, 'lib')\n RESOURCES = os.path.join(BASE, 'res')\n MODELS = os.path.join(RESOURCES, 'models')\n\n # Set the vaue to '' to set the var to ''.\n # Anything else will be added to current var value.\n minimapper_env = {\n 'GAZEBO_RESOURCE_PATH': RESOURCES,\n 'GAZEBO_MODEL_PATH': MODELS,\n 'GAZEBO_PLUGIN_PATH': PLUGINS,\n 'GAZEBO_MODEL_DATABASE_URI': None\n }\n\n # Conditionally set environment variables.\n env = os.environ.copy()\n for key, val in minimapper_env.items():\n if val is None:\n env[key] = ''\n elif key not in env:\n env[key] = val\n elif key in env and val not in env[key]:\n env[key] = val + ':' + env[key]\n\n return env", "def getenv_check(e):\n res = os.getenv(e)\n if res == None:\n print(e, 'environment variable not set - stopping.')\n exit(1)\n else:\n return res", "def check_env():\n logger.debug(\"Checking enviroment\")\n if os.getuid() != 0:\n exit_on_error(\"twindb-register-storage.py must be run by root\")\n logger.debug(\"Enviroment is OK\")\n return True", "def config_env_var_verify():\n with open('skywalking/config.py', 'r') as config_file:\n data = config_file.read().replace('\\n', '')\n for each in OPTIONS.keys():\n if f'_{each.upper()}' not in data:\n raise Exception(f'Environment variable for {each.upper()} is not found in config.py\\n'\n f'This means you have a mismatch of config.py variable and env var name')", "def test_env_vars():\n # Create a variable with the file system encoding and save it\n # in our PYTHONPATH\n env_var = to_fs_from_unicode(u'ñññ')\n CONF.set('main', 'spyder_pythonpath', [env_var])\n\n # Create a kernel spec\n kernel_spec = SpyderKernelSpec()\n\n # Assert PYTHONPATH is in env vars and it's not empty\n assert kernel_spec.env['PYTHONPATH'] != ''\n\n # Assert all env vars are binary strings\n assert all([is_binary_string(v) for v in kernel_spec.env.values()])\n\n # Remove our entry from PYTHONPATH\n CONF.set('main', 'spyder_pythonpath', [])", "def test_environ() -> None:\n os.environ[\"TEST\"] = \"tester\"\n assert os.getenv(\"TEST\") == \"tester\"" ]
[ "0.7685037", "0.768269", "0.74665606", "0.7349859", "0.72101486", "0.7192757", "0.7188985", "0.7085273", "0.6954072", "0.692295", "0.6899005", "0.68856543", "0.6869819", "0.68643546", "0.68599814", "0.68545455", "0.68270725", "0.6821289", "0.6815573", "0.6815573", "0.6815573", "0.6815573", "0.6815573", "0.6815573", "0.68128085", "0.67973304", "0.67914826", "0.67559314", "0.6743241", "0.67176634" ]
0.8246562
0
Perform shell/Perlstyle variable substitution on 'string'. Every occurrence of '$' followed by a name is considered a variable, and variable is substituted by the value found in the 'local_vars' dictionary, or in 'os.environ' if it's not in 'local_vars'. 'os.environ' is first checked/augmented to guarantee that it contains
def subst_vars(s, local_vars): check_environ() def _subst(match, local_vars=local_vars): var_name = match.group(1) if var_name in local_vars: return str(local_vars[var_name]) else: return os.environ[var_name] try: return re.sub(r'\$([a-zA-Z_][a-zA-Z_0-9]*)', _subst, s) except KeyError, var: raise ValueError("invalid variable '$%s'" % var)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def expand_vars(string, env_vars=None):\n if env_vars is None:\n env_vars = os.environ\n # create a replacement callback function that uses env_vars as it's first\n # argument, additional arguments will be added after it\n repl_callback = functools.partial(_var_repl, env_vars)\n return re.sub(r'\\$(?P<variable>[a-zA-Z]\\w*)((?=[\\W])|$)', repl_callback, string)", "def envsubst(string):\n # handle simple un-bracketed env vars like $FOO\n a = _simple_re.sub(_repl_simple_env_var, string)\n\n # handle bracketed env vars with optional default specification\n b = _extended_re.sub(_repl_extended_env_var, a)\n return b", "def expandvars(buffer, env, default=None, skip_escaped=False):\n\n def replace_var(match):\n return env.get(match.group(2) or match.group(1), match.group(0) if default is None else default)\n\n pattern = (r'(?<!\\\\)' if skip_escaped else '') + r'\\$(\\w+|\\{([^}]*)\\})'\n return sub(pattern, replace_var, buffer)", "def _expand_variables(input_str, cmake_vars):\n def replace(match):\n if match.group(1) in cmake_vars:\n return cmake_vars[match.group(1)]\n return \"\"\n return _CMAKE_ATVAR_REGEX.sub(replace,_CMAKE_VAR_REGEX.sub(replace, input_str))", "def substitute_key(text, variables):\n if variables is not None:\n matches = _property_pattern.findall(text)\n for token, key in matches:\n if key in variables:\n value = variables[key]\n text = text.replace(token, value)\n\n matches = _environment_pattern.findall(text)\n for token, key in matches:\n # log, or throw an exception if key is not found.\n if not env_helper.has_env(str_helper.to_string(key)):\n continue\n\n value = env_helper.getenv(str_helper.to_string(key))\n text = text.replace(token, value)\n\n return text", "def update_env_from_string(env_string):\n excluded_keys = [\"_\", \"SHLVL\", \"PWD\", \"OLDPWD\"]\n env = os.environ\n for line in env_string.split(\"\\n\"):\n (key, _, value) = line.partition(\"=\")\n if key and value and key not in excluded_keys:\n env[key] = value\n return env", "def replace_variables(text, vars=zen_settings['variables']):\n\treturn re.sub(r'\\$\\{([\\w\\-]+)\\}', lambda m: m.group(1) in vars and vars[m.group(1)] or m.group(0), text)", "def envsubst(input_file) -> str:\n with open(input_file, 'r') as base:\n content_after = content_before = base.read()\n for match in re.finditer(r'\\$\\{?(.+?)\\}?', content_before):\n env_var = match.group(1)\n env_var_value = os.getenv(env_var)\n content_after = content_before.replace(match.group(0), env_var_value or '')\n return content_after", "def _add_env_var_injector(tag: str = \"!ENV\") -> None:\n # pattern for global vars: look for ${word}\n pattern = re.compile(\".*?\\${([^}]+::[^}]*)}.*?\") # noqa: W605\n loader = yaml.SafeLoader\n\n # the tag will be used to mark where to start searching for the pattern\n # e.g. somekey: !ENV somestring${MYENVVAR}blah blah blah\n loader.add_implicit_resolver(tag, pattern, None) # type: ignore\n\n def constructor_env_variables(loader, node) -> Any: # type: ignore\n \"\"\"\n Extracts the environment variable from the node's value\n :param yaml.Loader loader: the yaml loader\n :param node: the current node in the yaml\n :return: the parsed string that contains the value of the environment\n variable\n \"\"\"\n value = loader.construct_scalar(node)\n match = pattern.findall(value) # to find all env variables in line\n if match:\n full_value = value\n for g in match:\n (env_var, default_val) = g.split(\"::\")\n value = os.environ.get(env_var, default_val)\n full_value = full_value.replace(f\"${{{g}}}\", value)\n if not full_value:\n full_value = None\n _logger.debug(f\"injected ENV parameter {env_var} resolved to {value}\")\n return full_value\n return value\n\n loader.add_constructor(tag, constructor_env_variables) # type: ignore", "def substitute_macros(text):\n f_text = text\n for (pattern,replacement) in context.environment.items():\n replacement = replacement.replace(os.path.sep,'/')\n f_text = f_text.replace('$(%s)' % pattern.upper(), replacement)\n return f_text", "def scons_subst_once(strSubst, env, key):\n if isinstance(strSubst, str) and strSubst.find('$') < 0:\n return strSubst\n\n matchlist = ['$' + key, '${' + key + '}']\n val = env.get(key, '')\n def sub_match(match, val=val, matchlist=matchlist):\n a = match.group(1)\n if a in matchlist:\n a = val\n if is_Sequence(a):\n return ' '.join(map(str, a))\n else:\n return str(a)\n\n if is_Sequence(strSubst):\n result = []\n for arg in strSubst:\n if is_String(arg):\n if arg in matchlist:\n arg = val\n if is_Sequence(arg):\n result.extend(arg)\n else:\n result.append(arg)\n else:\n result.append(_dollar_exps.sub(sub_match, arg))\n else:\n result.append(arg)\n return result\n elif is_String(strSubst):\n return _dollar_exps.sub(sub_match, strSubst)\n else:\n return strSubst", "def environment_variable_string(self, name):\n return \"$(\" + name + \")\"", "def escape_variables(environ):\n return {key: environ[key].replace('%', '%%') for key in environ}", "def replace(self, text):\n for key, val in self.env.items():\n text = text.replace(\"$%s\" % key, val)\n return text", "def expand(self, s, lvars):\n if is_String(s):\n try:\n s0, s1 = s[:2]\n except (IndexError, ValueError):\n return s\n if s0 != '$':\n return s\n if s1 == '$':\n # In this case keep the double $'s which we'll later\n # swap for a single dollar sign as we need to retain\n # this information to properly avoid matching \"$(\"\" when\n # the actual text was \"$$(\"\" (or \"$)\"\" when \"$$)\"\" )\n return '$$'\n elif s1 in '()':\n return s\n else:\n key = s[1:]\n if key[0] == '{' or '.' in key:\n if key[0] == '{':\n key = key[1:-1]\n\n # Store for error messages if we fail to expand the\n # value\n old_s = s\n s = None\n if key in lvars:\n s = lvars[key]\n elif key in self.gvars:\n s = self.gvars[key]\n else:\n try:\n s = eval(key, self.gvars, lvars)\n except KeyboardInterrupt:\n raise\n except Exception as e:\n if e.__class__ in AllowableExceptions:\n return ''\n raise_exception(e, lvars['TARGETS'], old_s)\n\n if s is None and NameError not in AllowableExceptions:\n raise_exception(NameError(key), lvars['TARGETS'], old_s)\n elif s is None:\n return ''\n\n # Before re-expanding the result, handle\n # recursive expansion by copying the local\n # variable dictionary and overwriting a null\n # string for the value of the variable name\n # we just expanded.\n #\n # This could potentially be optimized by only\n # copying lvars when s contains more expansions,\n # but lvars is usually supposed to be pretty\n # small, and deeply nested variable expansions\n # are probably more the exception than the norm,\n # so it should be tolerable for now.\n lv = lvars.copy()\n var = key.split('.')[0]\n lv[var] = ''\n return self.substitute(s, lv)\n elif is_Sequence(s):\n def func(l, conv=self.conv, substitute=self.substitute, lvars=lvars):\n return conv(substitute(l, lvars))\n return list(map(func, s))\n elif callable(s):\n\n # SCons has the unusual Null class where any __getattr__ call returns it's self, \n # which does not work the signature module, and the Null class returns an empty\n # string if called on, so we make an exception in this condition for Null class\n # Also allow callables where the only non default valued args match the expected defaults\n # this should also allow functools.partial's to work.\n if isinstance(s, SCons.Util.Null) or {k for k, v in signature(s).parameters.items() if\n k in _callable_args_set or v.default == Parameter.empty} == _callable_args_set:\n\n s = s(target=lvars['TARGETS'],\n source=lvars['SOURCES'],\n env=self.env,\n for_signature=(self.mode == SUBST_SIG))\n else:\n # This probably indicates that it's a callable\n # object that doesn't match our calling arguments\n # (like an Action).\n if self.mode == SUBST_RAW:\n return s\n s = self.conv(s)\n return self.substitute(s, lvars)\n elif s is None:\n return ''\n else:\n return s", "def replaceVariables(template, virtroot, treename):\n if not template.startswith('${') and not template.endswith('}'): raise SyntaxError('Template format unknown.')\n template = template[2:-1]\n template = template.replace('VIRTROOT', virtroot)\n template = template.replace('TREENAME', treename)\n\n # deal with concurrent / issues by having os.path.join build the final path\n return os.path.join('/', *template.split('/'))", "def replace_vars(params, contents):\n if isinstance(contents, str):\n contents = [contents]\n replace_contents = []\n\n if params != None and contents != None:\n for content in contents:\n replace_content = content\n for match in regexp_replace_var.findall(content):\n if match in params:\n w_param = params[match][0].__str__()\n _logger.debug(\"match variable {} , replace by {}\".format(match, w_param))\n if not w_param.isdigit():\n w_param = w_param.replace(\"\\\"\", \"\").replace(\"\\'\", \"\").replace(\"\\\\\", \"\\\\\\\\\")\n replace_content = replace_content.replace(\"${\" + match + \"}\", w_param)\n else:\n replace_content = replace_content.replace(\"\\\"${\" + match + \"}\\\"\", w_param)\n \n else:\n replace_content = replace_content.replace(\"${\" + match + \"}\", \"\") \n _logger.debug(\"replace_content={}\".format(replace_content))\n replace_contents.append(replace_content)\n return replace_contents", "def replace_variables(self, text, context):\n text = text.replace('__VENV_DIR__', context.env_dir)\n text = text.replace('__VENV_NAME__', context.env_name)\n text = text.replace('__VENV_PROMPT__', context.prompt)\n text = text.replace('__VENV_BIN_NAME__', context.bin_name)\n text = text.replace('__VENV_PYTHON__', context.env_exe)\n return text", "def interpolate(self, string, ivars, settings=None):\n\n if not isinstance(string, basestring):\n return string\n\n if settings is None:\n settings = self.settings\n\n ret = string\n m = self._INTERP_REGEX.search(ret)\n i = 0\n while m is not None:\n k = m.group(3)\n ret = ret.replace(m.group(2), self.get_ivar(k, ivars, settings))\n m = self._INTERP_REGEX.search(ret)\n i += 1\n if i > self._MAX_INTERP:\n raise RuntimeError(\"Cyclic interpolation (more than %d expansions performed).\" % self._MAX_INTERP)\n\n return ret.replace(\"$$\", \"$\")", "def update_from_env(d: dict, variables: List[str], inplace: bool = False):\n new_keys = {}\n for var in variables:\n new_keys[var] = os.environ[var]\n\n if inplace:\n d.update(new_keys)\n return d\n\n for key in d:\n if key not in new_keys:\n new_keys[key] = d[key]\n\n return new_keys", "def regex_findall_variables(raw_string: Text) -> List[Text]:\n try:\n match_start_position = raw_string.index(\"$\", 0)\n except ValueError:\n return []\n\n vars_list = []\n while match_start_position < len(raw_string):\n\n # Notice: notation priority\n # $$ > $var\n\n # search $$\n dollar_match = dolloar_regex_compile.match(raw_string, match_start_position)\n if dollar_match:\n match_start_position = dollar_match.end()\n continue\n\n # search variable like ${var} or $var\n var_match = variable_regex_compile.match(raw_string, match_start_position)\n if var_match:\n var_name = var_match.group(1) or var_match.group(2)\n vars_list.append(var_name)\n match_start_position = var_match.end()\n continue\n\n curr_position = match_start_position\n try:\n # find next $ location\n match_start_position = raw_string.index(\"$\", curr_position + 1)\n except ValueError:\n # break while loop\n break\n\n return vars_list", "def FillForm(string_for_substitution, dictionary_of_vars):\n return_string = string_for_substitution\n for i in re.findall(\"//%%(.*)%%//\", string_for_substitution):\n return_string = re.sub(\"//%%\" + i + \"%%//\", dictionary_of_vars[i],\n return_string)\n return return_string", "def constructor_env_variables(loader, node):\n value = loader.construct_scalar(node)\n match = pattern.findall(value) # to find all env variables in line\n if match:\n full_value = value\n for g in match:\n full_value = full_value.replace(\n f'${{{g}}}', os.environ.get(g, g)\n )\n return full_value\n return value", "def set_env_var(varnames, varvalues):\n try:\n for i in range(len(varnames)):\n os.environ[varnames[i]] = str(varvalues[i]).strip()\n except Exception as e:\n raise j.exceptions.RuntimeError(e)", "def _replacement(name):\n ret = os.getenv(name, values.get(name, \"\"))\n return ret", "def scons_subst(strSubst, env, mode=SUBST_RAW, target=None, source=None, gvars={}, lvars={}, conv=None):\n if (isinstance(strSubst, str) and '$' not in strSubst) or isinstance(strSubst, CmdStringHolder):\n return strSubst\n\n if conv is None:\n conv = _strconv[mode]\n\n # Doing this every time is a bit of a waste, since the Executor\n # has typically already populated the OverrideEnvironment with\n # $TARGET/$SOURCE variables. We're keeping this (for now), though,\n # because it supports existing behavior that allows us to call\n # an Action directly with an arbitrary target+source pair, which\n # we use in Tool/tex.py to handle calling $BIBTEX when necessary.\n # If we dropped that behavior (or found another way to cover it),\n # we could get rid of this call completely and just rely on the\n # Executor setting the variables.\n if 'TARGET' not in lvars:\n d = subst_dict(target, source)\n if d:\n lvars = lvars.copy()\n lvars.update(d)\n\n # We're (most likely) going to eval() things. If Python doesn't\n # find a __builtins__ value in the global dictionary used for eval(),\n # it copies the current global values for you. Avoid this by\n # setting it explicitly and then deleting, so we don't pollute the\n # construction environment Dictionary(ies) that are typically used\n # for expansion.\n gvars['__builtins__'] = __builtins__\n\n ss = StringSubber(env, mode, conv, gvars)\n result = ss.substitute(strSubst, lvars)\n\n try:\n del gvars['__builtins__']\n except KeyError:\n pass\n\n res = result\n if is_String(result):\n # Remove $(-$) pairs and any stuff in between,\n # if that's appropriate.\n remove = _regex_remove[mode]\n if remove:\n if mode == SUBST_SIG:\n result = _list_remove[mode](remove.split(result))\n if result is None:\n raise SCons.Errors.UserError(\"Unbalanced $(/$) in: \" + res)\n result = ' '.join(result)\n else:\n result = remove.sub('', result)\n if mode != SUBST_RAW:\n # Compress strings of white space characters into\n # a single space.\n result = _space_sep.sub(' ', result).strip()\n\n # Now replace escaped $'s currently \"$$\"\n # This is needed because we now retain $$ instead of\n # replacing them during substition to avoid\n # improperly trying to escape \"$$(\" as being \"$(\"\n result = result.replace('$$','$')\n elif is_Sequence(result):\n remove = _list_remove[mode]\n if remove:\n result = remove(result)\n if result is None:\n raise SCons.Errors.UserError(\"Unbalanced $(/$) in: \" + str(res))\n\n return result", "def expandvars(s, mp=VAR_EXPAND_MAP, no_match_val=None, proj_err=[False]):\n proj_err[0] = False\n\n def repl(match): #SKIP\n s = match.group(0)\n r = s.replace('{', '').replace('}', '').replace('$', '')\n rs = [('$'+var) for var in r.split(':')]\n\n for optname in rs:\n val = mp.get(optname)\n try:\n val = val()\n except TypeError:\n pass\n if val:\n return val\n\n # failed to expand\n if all(var == '$folder' or var.startswith('$project') for var in rs ): # is only $project... and $folder\n proj_err[0] = True\n return os.path.dirname(ed.get_filename())\n\n return no_match_val or s\n\n def expand_str(s): #SKIP\n if '$' in s:\n return re_expand.sub(repl, s)\n else:\n return s\n\n if type(s) == str:\n return expand_str(s)\n else:\n return [expand_str(sp) for sp in s]", "def variableSubstitution(d):\n variable = re.compile(r\"^(.*)\\$\\{(.*)\\}(.*)\")\n\n # translate the dictionary to lower-case keys:\n dd = {k.lower():v for k,v in d.iteritems()}\n maxIterations=4\n \n for i in range(maxIterations):\n anyChanges=False\n for k,v in dd.iteritems():\n if not isinstance(v,str):\n # Only operate on string-valued entries\n continue\n m = variable.match(v)\n if not m:\n continue\n anyChanges = True\n vout = str(v)\n while m:\n key = m.group(2).lower()\n if key not in dd.keys():\n print \"ERROR: variable substitution asks for nonexistent Attribute\", key, \"in\", v\n sys.exit(1)\n if key==k:\n print \"ERROR: self-reference to Attribute\", key, \"in\", v\n vv = dd[key]\n if not isinstance(vv,str):\n print \"ERROR: variable substitution using non-string-valued Attribute\",key\n sys.exit(1)\n vout = m.expand(r\"\\g<1>\"+vv+r\"\\g<3>\")\n m = variable.match(vout)\n dd[k] = vout\n if not anyChanges:\n break # Done\n if i==maxIterations:\n print \"ERROR: Too many iterations in variableSubstitution\"\n sys.exit(1)\n # restore case of original dictionary\n for k in d.keys():\n d[k] = dd[k.lower()]\n return", "def _replaceEnv(self, match):\r\n var = match.group('var')\r\n\r\n try:\r\n return os.environ[var]\r\n except KeyError:\r\n raise EnvironmentVariableNotFound('Can not find environment '\r\n 'variable: {0}'.format(var))", "def constructor_env_variables(loader, node) -> Any: # type: ignore\n value = loader.construct_scalar(node)\n match = pattern.findall(value) # to find all env variables in line\n if match:\n full_value = value\n for g in match:\n (env_var, default_val) = g.split(\"::\")\n value = os.environ.get(env_var, default_val)\n full_value = full_value.replace(f\"${{{g}}}\", value)\n if not full_value:\n full_value = None\n _logger.debug(f\"injected ENV parameter {env_var} resolved to {value}\")\n return full_value\n return value" ]
[ "0.7734589", "0.73284125", "0.6958385", "0.62611526", "0.61794424", "0.61206144", "0.6119231", "0.5942183", "0.5825754", "0.5727875", "0.5716061", "0.56829596", "0.56288034", "0.56126416", "0.56008154", "0.5499839", "0.5496333", "0.5489837", "0.54745865", "0.5474001", "0.54539514", "0.5427734", "0.5399777", "0.53992033", "0.53650254", "0.53557235", "0.5351744", "0.53503025", "0.53494406", "0.5333476" ]
0.8150585
0
Generate a useful error message from an EnvironmentError. This will generate an IOError or an OSError exception object. Handles Python 1.5.1 and 1.5.2 styles, and does what it can to deal with exception objects that don't have a filename (which happens when the error is due to a twofile operation, such as 'rename()' or 'link()'. Returns the error message as a string prefixed with 'prefix'.
def grok_environment_error(exc, prefix="error: "): # check for Python 1.5.2-style {IO,OS}Error exception objects if hasattr(exc, 'filename') and hasattr(exc, 'strerror'): if exc.filename: error = prefix + "%s: %s" % (exc.filename, exc.strerror) else: # two-argument functions in posix module don't # include the filename in the exception object! error = prefix + "%s" % exc.strerror else: error = prefix + str(exc[-1]) return error
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def exception_message():\n def get_os_release():\n \"\"\"Returns detailed OS release.\"\"\"\n if platform.linux_distribution()[0]:\n return \" \".join(platform.linux_distribution())\n elif platform.mac_ver()[0]:\n return \"%s %s\" % (platform.mac_ver()[0], platform.mac_ver()[2])\n else:\n return \"Unknown\"\n\n msg = (\n \"Oops! Cuckoo failed in an unhandled exception!\\nSometimes bugs are \"\n \"already fixed in the development release, it is therefore \"\n \"recommended to retry with the latest development release available \"\n \"%s\\nIf the error persists please open a new issue at %s\\n\\n\" %\n (GITHUB_URL, ISSUES_PAGE_URL)\n )\n\n msg += \"=== Exception details ===\\n\"\n msg += \"Cuckoo version: %s\\n\" % version\n msg += \"OS version: %s\\n\" % os.name\n msg += \"OS release: %s\\n\" % get_os_release()\n msg += \"Python version: %s\\n\" % platform.python_version()\n msg += \"Python implementation: %s\\n\" % platform.python_implementation()\n msg += \"Machine arch: %s\\n\" % platform.machine()\n\n try:\n import pip\n\n msg += \"Modules: %s\\n\" % \" \".join(sorted(\n \"%s:%s\" % (package.key, package.version)\n for package in pip.get_installed_distributions()\n ))\n except ImportError:\n pass\n\n msg += \"\\n\"\n return msg", "def error(self, e):\n return \"{}: {} ({})\".format(e.__class__.__name__, e.__doc__, e.message)", "def _strerror(err):\n if hasattr(os, 'strerror'):\n return os.strerror(err.errno)\n else:\n return err.strerror", "def err_str(err):\n return \"\".join(format_exception_only(type(err), err))", "def get_error(msg, prefix, default_value='Error:'):\n \n if prefix is not default_value:\n prefix = default_value + prefix\n\n error_msg = prefix + ' ' + msg\n return error_msg", "def get_errors(*args, **kwargs):\n\n prefix = kwargs.get('prefix', 'Error:')\n if prefix is not 'Error:':\n prefix = 'Error: '+ prefix\n\n msg = ' '.join(args)\n \n error_msg = prefix + ' ' + msg\n return error_msg", "def error_message(self) -> str:\n return pulumi.get(self, \"error_message\")", "def error_message(self) -> str:\n return pulumi.get(self, \"error_message\")", "def error_message(self) -> str:\n return pulumi.get(self, \"error_message\")", "def error_name(self) -> str:\n return self._error_name", "def application_error(e):\n return 'Sorry, unexpected error: {}'.format(e), 500", "def application_error(e):\n return 'Sorry, unexpected error: {}'.format(e), 500", "def application_error(e):\n return 'Sorry, unexpected error: {}'.format(e), 500", "def application_error(e):\n return 'Sorry, unexpected error: {}'.format(e), 500", "def application_error(e):\n return 'Sorry, unexpected error: {}'.format(e), 500", "def application_error(e):\n return 'Sorry, unexpected error: {}'.format(e), 500", "def application_error(e):\n return 'Sorry, unexpected error: {}'.format(e), 500", "def application_error(e):\n return 'Sorry, unexpected error: {}'.format(e), 500", "def error_msg(self) -> str:\n return self.__error_msg", "def error_string(self):\n return self._error_string", "def make_exception_message(exc):\n if str(exc):\n return '%s: %s\\n' % (exc.__class__.__name__, exc)\n else:\n return '%s\\n' % (exc.__class__.__name__)", "def src_strerror(error):\n return ffi.string(_lib.src_strerror(error)).decode()", "def tidy_error(ex=None) -> str:\r\n from sys import exc_info\r\n from os.path import join, abspath, dirname\r\n from traceback import extract_tb, format_list, format_exception_only\r\n\r\n show = join(dirname(abspath(__file__)), '')\r\n\r\n def _check_file(name):\r\n return name and name.startswith(show)\r\n\r\n def _print(typ, value, tb): # If not debug, generator expression: filter trace to my files.\r\n show = extract_tb(tb) if DEBUG else (fs for fs in extract_tb(tb, limit=3) if _check_file(fs.filename))\r\n fmt = format_list(show) + format_exception_only(typ, value)\r\n return ''.join((f.strip('\"\\'').replace('\\\\n', '') for f in fmt))\r\n\r\n args = ex or exc_info()\r\n return _print(*args)", "def error_message(self) -> str:\n return self._error_message", "def pretty_exception(err: Exception, message: str = \"\"):\n return f\"{message} ({err.__module__}.{err.__class__.__name__}: {err!s})\"", "def __str__(self) -> str:\n message = (\n f\"ERROR: Backend Exception.\\n\"\n f\" - Internal error code: {str(self.code)}\\n\"\n f\" - Internal error message: {str(self.message)}\"\n )\n return message", "def magma_strerror(error):\n\n return _libmagma.magma_strerror(error)", "def _get_error_message_from_exception(self, e):\n error_code = AWSSECURITYHUB_ERR_CODE_UNAVAILABLE\n error_msg = AWSSECURITYHUB_ERR_MSG_UNAVAILABLE\n\n try:\n if e.args:\n if len(e.args) > 1:\n error_code = e.args[0]\n error_msg = e.args[1]\n elif len(e.args) == 1:\n error_code = AWSSECURITYHUB_ERR_CODE_UNAVAILABLE\n error_msg = e.args[0]\n except:\n pass\n\n try:\n if error_code in AWSSECURITYHUB_ERR_CODE_UNAVAILABLE:\n error_text = \"Error Message: {0}\".format(error_msg)\n else:\n error_text = \"Error Code: {0}. Error Message: {1}\".format(error_code, error_msg)\n except:\n self.debug_print(\"Error occurred while parsing error message\")\n error_text = AWSSECURITYHUB_PARSE_ERR_MSG\n\n return error_text", "def error(self):\n errors = self._info.get('error', {}).get('errors')\n if not errors:\n return None\n return ' '.join(err.get('message', 'unknown') for err in errors)", "def error_message(self):\n summary = format(\"%i out of %s failed unexpectedly:\",\n self.pool.num_failed,\n pluralize(self.pool.num_commands, \"command\"))\n details = \"\\n\".join(\" - %s\" % cmd.error_message for cmd in self.commands)\n return summary + \"\\n\\n\" + details" ]
[ "0.6228609", "0.61467487", "0.6119618", "0.6091069", "0.6041068", "0.6019442", "0.60053355", "0.60053355", "0.60053355", "0.59858197", "0.59780866", "0.59780866", "0.59780866", "0.59780866", "0.59780866", "0.59780866", "0.59780866", "0.59780866", "0.5958574", "0.59230393", "0.5880249", "0.5871675", "0.58348554", "0.5830183", "0.5793401", "0.57683444", "0.5765259", "0.57592714", "0.5749121", "0.57451415" ]
0.79304963
0
Convert a string representation of truth to true (1) or false (0). True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if 'val' is anything else.
def strtobool(val): val = val.lower() if val in ('y', 'yes', 't', 'true', 'on', '1'): return 1 elif val in ('n', 'no', 'f', 'false', 'off', '0'): return 0 else: raise ValueError("invalid truth value %r" % (val,))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def strtobool(val: str) -> int:\n val = val.lower()\n if val in (\"y\", \"yes\", \"t\", \"true\", \"on\", \"1\"):\n return 1\n elif val in (\"n\", \"no\", \"f\", \"false\", \"off\", \"0\"):\n return 0\n else:\n raise ValueError(\"invalid truth value %r\" % (val,))", "def str_to_bool(val: str) -> bool:\n\n val = val.lower()\n if val in {\n \"y\",\n \"yes\",\n \"yep\",\n \"yup\",\n \"t\",\n \"true\",\n \"on\",\n \"enable\",\n \"enabled\",\n \"1\",\n }:\n return True\n elif val in {\"n\", \"no\", \"f\", \"false\", \"off\", \"disable\", \"disabled\", \"0\"}:\n return False\n else:\n raise ValueError(f\"Invalid truth value {val}\")", "def str2bool(self, val):\n return val.lower() in ('true','yes','t',1)", "def _str_to_bool(val):\n if val.lower() in ('yes', 'true', 't', 'y', '1'):\n return True\n elif val.lower() in ('no', 'false', 'f', 'n', '0'):\n return False\n else:\n raise argparse.ArgumentTypeError('Boolean value expected.')", "def strtobool(val: str) -> bool:\n val = val.lower()\n if val in ('y', 'yes', 't', 'true', 'on', '1'):\n return True\n if val in ('n', 'no', 'f', 'false', 'off', '0'):\n return False\n raise ValueError('invalid truth value {!r}'.format(val))", "def parse_boolean(val: str) -> str | bool:\n val = val.lower()\n if val in (\"y\", \"yes\", \"t\", \"true\", \"on\", \"1\"):\n return True\n if val in (\"n\", \"no\", \"f\", \"false\", \"off\", \"0\"):\n return False\n return val", "def str2bool(val: str) -> bool:\n return val.lower() in (\"yes\", \"true\", \"t\", \"1\")", "def convert_to_intbool(val_str):\n return 1 if val_str == 'Yes' else 0", "def str2bool(v):\n\n if isinstance(v, bool):\n return v\n if v.lower() in ('yes', 'true', 't', 'y', '1'):\n return True\n elif v.lower() in ('no', 'false', 'f', 'n', '0'):\n return False\n else:\n raise ArgumentTypeError('Boolean value expected.')", "def _text2bool(val):\n lval = val.lower()\n if lval in __true_strings: return True\n if lval in __false_strings: return False\n raise ValueError(\"Invalid value for boolean option: %s\" % val)", "def boolean(val):\n\tif val == \"True\" or val == \"1\":\n\t\treturn True\n\telse:\n\t\treturn False", "def str2bool(v):\n if v.lower() in ('yes', 'true', 't', 'y', '1'):\n return True\n elif v.lower() in ('no', 'false', 'f', 'n', '0'):\n return False\n raise argparse.ArgumentTypeError('Boolean value expected.')", "def str2bool(self, v):\n \tprint('Entering conversion function')\n return v.lower() in (\"yes\", \"true\", \"t\", \"1\")", "def str2bool(v):\n if v.lower() in ('yes', 'true', 't', 'y', '1'):\n return True\n elif v.lower() in ('no', 'false', 'f', 'n', '0'):\n return False\n else:\n raise argparse.ArgumentTypeError('Boolean value expected.')", "def str2bool(v):\n if v.lower() in ('yes', 'true', 't', 'y', '1'):\n return True\n elif v.lower() in ('no', 'false', 'f', 'n', '0'):\n return False\n else:\n raise argparse.ArgumentTypeError('Boolean value expected.')", "def str2bool(v) -> bool:\n\n if isinstance(v, bool):\n return v\n if v.lower() in ('yes', 'true', 't', 'y', '1'):\n return True\n elif v.lower() in ('no', 'false', 'f', 'n', '0'):\n return False\n else:\n raise argparse.ArgumentTypeError('Boolean value expected.')", "def str2bool(v):\n if isinstance(v, bool):\n return v\n if v.lower() in ('yes', 'true', 't', 'y', '1', 'True'):\n return True\n elif v.lower() in ('no', 'false', 'f', 'n', '0', 'False'):\n return False\n else:\n raise argparse.ArgumentTypeError('Boolean value expected.')", "def str2bool(v):\n if isinstance(v, bool):\n return v\n if v.lower() in ('yes', 'true', 't', 'y', '1'):\n return True\n elif v.lower() in ('no', 'false', 'f', 'n', '0'):\n return False\n else:\n raise argparse.ArgumentTypeError('Boolean value expected.')", "def str2bool(v):\n if isinstance(v, bool):\n return v\n if v.lower() in ('yes', 'true', 't', 'y', '1'):\n return True\n elif v.lower() in ('no', 'false', 'f', 'n', '0'):\n return False\n else:\n raise argparse.ArgumentTypeError('Boolean value expected.')", "def str2bool(v):\n if isinstance(v, bool):\n return v\n if v.lower() in ('yes', 'true', 't', 'y', '1'):\n return True\n elif v.lower() in ('no', 'false', 'f', 'n', '0'):\n return False\n else:\n raise argparse.ArgumentTypeError('Boolean value expected.')", "def str2bool(v):\n if isinstance(v, bool):\n return v\n if v.lower() in ('yes', 'true', 't', 'y', '1'):\n return True\n elif v.lower() in ('no', 'false', 'f', 'n', '0'):\n return False\n else:\n raise argparse.ArgumentTypeError('Boolean value expected.')", "def str2bool(v):\n if v.lower() in (\"yes\", \"true\", \"t\", \"y\", \"1\"):\n return True\n elif v.lower() in (\"no\", \"false\", \"f\", \"n\", \"0\"):\n return False\n else:\n raise argparse.ArgumentTypeError(\"Boolean value expected.\")", "def str2bool(v):\n if isinstance(v, bool):\n return v\n if v.lower() in ('yes', 'true', 't', 'y', '1'):\n return True\n elif v.lower() in ('no', 'false', 'f', 'n', '0'):\n return False\n else:\n raise argparse.ArgumentTypeError('Boolean value expected.')", "def str2bool(v): # type: ignore\n return v.lower() in (\"yes\", \"true\", \"t\", \"1\", \"on\")", "def str2bool(value) -> bool:\n if type(value) == type(''):\n if value.lower() in (\"yes\", \"y\", \"true\", \"t\", \"1\"):\n return True\n if value.lower() in (\"no\", \"n\", \"false\", \"f\", \"0\", \"\"):\n return False\n raise Exception('Invalid value for boolean conversion: ' + value)\n return bool(value)", "def str2bool(v):\n return v.lower() in (\"yes\", \"true\", \"t\", \"1\")", "def string_to_bool(value):\n return False if value.upper() == \"FALSE\" or value == \"0\" or value == \"\" else True", "def _parse_bool(val):\n return val.lower() in ('yes', 'true', 't', '1')", "def string_to_bool(value):\n if value in ['true', 'True']:\n return True\n elif value in ['false', 'False']:\n return False\n else:\n return bool(value)", "def str2bool(v):\n return v.lower() in (\"yes\", \"true\", \"t\", \"1\")" ]
[ "0.81975126", "0.8017742", "0.78834474", "0.78206813", "0.77842885", "0.77564293", "0.75404084", "0.7537402", "0.7356985", "0.73436004", "0.7292158", "0.7287533", "0.7237012", "0.72188634", "0.72188634", "0.7196758", "0.7194048", "0.71899515", "0.71899515", "0.71899515", "0.71899515", "0.71749985", "0.7157196", "0.7132721", "0.7076594", "0.706777", "0.7063224", "0.70422363", "0.7038356", "0.70341116" ]
0.81308395
1
Bytecompile a collection of Python source files to either .pyc or .pyo files in the same directory. 'py_files' is a list of files to compile; any files that don't end in
def byte_compile(py_files, optimize=0, force=0, prefix=None, base_dir=None, verbose=1, dry_run=0, direct=None): # nothing is done if sys.dont_write_bytecode is True if hasattr(sys, 'dont_write_bytecode') and sys.dont_write_bytecode: raise DistutilsByteCompileError('byte-compiling is disabled.') # First, if the caller didn't force us into direct or indirect mode, # figure out which mode we should be in. We take a conservative # approach: choose direct mode *only* if the current interpreter is # in debug mode and optimize is 0. If we're not in debug mode (-O # or -OO), we don't know which level of optimization this # interpreter is running with, so we can't do direct # byte-compilation and be certain that it's the right thing. Thus, # always compile indirectly if the current interpreter is in either # optimize mode, or if either optimization level was requested by # the caller. if direct is None: direct = (__debug__ and optimize == 0) # "Indirect" byte-compilation: write a temporary script and then # run it with the appropriate flags. if not direct: from tempfile import mkstemp script_fd, script_name = mkstemp(".py") log.info("writing byte-compilation script '%s'", script_name) if not dry_run: if script_fd is not None: script = os.fdopen(script_fd, "w") else: script = open(script_name, "w") try: script.write("""\ from distutils2.util import byte_compile files = [ """) # XXX would be nice to write absolute filenames, just for # safety's sake (script should be more robust in the face of # chdir'ing before running it). But this requires abspath'ing # 'prefix' as well, and that breaks the hack in build_lib's # 'byte_compile()' method that carefully tacks on a trailing # slash (os.sep really) to make sure the prefix here is "just # right". This whole prefix business is rather delicate -- the # problem is that it's really a directory, but I'm treating it # as a dumb string, so trailing slashes and so forth matter. #py_files = map(os.path.abspath, py_files) #if prefix: # prefix = os.path.abspath(prefix) script.write(",\n".join(map(repr, py_files)) + "]\n") script.write(""" byte_compile(files, optimize=%r, force=%r, prefix=%r, base_dir=%r, verbose=%r, dry_run=0, direct=1) """ % (optimize, force, prefix, base_dir, verbose)) finally: script.close() cmd = [sys.executable, script_name] if optimize == 1: cmd.insert(1, "-O") elif optimize == 2: cmd.insert(1, "-OO") env = copy(os.environ) env['PYTHONPATH'] = ':'.join(sys.path) try: spawn(cmd, dry_run=dry_run, env=env) finally: execute(os.remove, (script_name,), "removing %s" % script_name, dry_run=dry_run) # "Direct" byte-compilation: use the py_compile module to compile # right here, right now. Note that the script generated in indirect # mode simply calls 'byte_compile()' in direct mode, a weird sort of # cross-process recursion. Hey, it works! else: from py_compile import compile for file in py_files: if file[-3:] != ".py": # This lets us be lazy and not filter filenames in # the "install_lib" command. continue # Terminology from the py_compile module: # cfile - byte-compiled file # dfile - purported source filename (same as 'file' by default) cfile = file + (__debug__ and "c" or "o") dfile = file if prefix: if file[:len(prefix)] != prefix: raise ValueError("invalid prefix: filename %r doesn't " "start with %r" % (file, prefix)) dfile = dfile[len(prefix):] if base_dir: dfile = os.path.join(base_dir, dfile) cfile_base = os.path.basename(cfile) if direct: if force or newer(file, cfile): log.info("byte-compiling %s to %s", file, cfile_base) if not dry_run: compile(file, cfile, dfile) else: log.debug("skipping byte-compilation of %s to %s", file, cfile_base)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def pyo():\n local('python -O -m compileall .')", "def pyo():\n local('python -O -m compileall .')", "def compile_coffeescript(*files):\r\n if not files:\r\n files = [\"`{}`\".format(coffeescript_files())]\r\n sh(cmd(\r\n \"node_modules/.bin/coffee\", \"--compile\", *files\r\n ))", "def build_from_c_and_cpp_files(extensions):\n for extension in extensions:\n sources = []\n for sfile in extension.sources:\n path, ext = os.path.splitext(sfile)\n if ext in ('.pyx', '.py'):\n if extension.language == 'c++':\n ext = '.cpp'\n else:\n ext = '.c'\n sfile = path + ext\n sources.append(sfile)\n extension.sources = sources", "def writepy(self, paths=[]):\n from vyperlogix import misc\n for top in paths if (misc.isList(paths)) else [paths]:\n try:\n for root, dirs, files in os.walk(top):\n if (self.rx.search(root) == None):\n print '='*80\n print 'files=%s' % files\n py_files = [os.path.join(root,f) for f in files if f.endswith('.py' if not self.isSourceless else '.pyo')]\n print '-'*80\n print 'py_files=%s' % py_files\n util.byte_compile(py_files,optimize=2,force=1)\n for f in py_files:\n print 'ZIP Adding (%s) to (%s)' % (f,self.filename)\n f_base = f.replace('.pyo','.pyc').replace(top,'')\n _f_base = f_base.split(os.sep)[-1]\n self.write(f,f_base)\n print '='*80\n except Exception as details:\n print 'Error in ZIP processing. (%s)' % (str(details))", "def compile(*files, exe_name=None, cc=CC, **cflags):\n\n if not files:\n raise RuntimeError(_(\"compile requires at least one file\"))\n\n if exe_name is None and files[0].endswith(\".c\"):\n exe_name = Path(files[0]).stem\n\n files = \" \".join(files)\n\n flags = CFLAGS.copy()\n flags.update(cflags)\n flags = \" \".join((f\"-{flag}\" + (f\"={value}\" if value is not True else \"\")).replace(\"_\", \"-\")\n for flag, value in flags.items() if value)\n\n out_flag = f\" -o {exe_name} \" if exe_name is not None else \" \"\n\n process = run(f\"{cc} {files}{out_flag}{flags}\")\n\n # Strip out ANSI codes\n stdout = re.sub(r\"\\x1B\\[[0-?]*[ -/]*[@-~]\", \"\", process.stdout())\n\n if process.exitcode != 0:\n for line in stdout.splitlines():\n log(line)\n raise Failure(\"code failed to compile\")", "def _pyCompile ( target, source, env ) :\n if len(target) != 1 :\n fail ( \"unexpected number of targets for pyCompile: \"+str(target) )\n if len(source) != 1 :\n fail ( \"unexpected number of sources for pyCompile: \"+str(source) )\n\n target = str(target[0])\n source = str(source[0])\n trace ( \"Executing pycompile `%s'\" % ( source ), \"pyCompile\", 3 )\n\n try :\n import py_compile\n py_compile.compile ( source, target, doraise = True )\n except py_compile.PyCompileError, e :\n print str(e)\n return -1", "def path_to_bin_files(path):\r\n files_list=list_of_files(path)\r\n for file in files_list:\r\n asm_lines = parse_data(file)\r\n symbols_dict = init_symbols_dictionary()\r\n collect_symbols_and_ignore_coments(asm_lines, symbols_dict)\r\n bin_lines = translate_to_binary(asm_lines, symbols_dict)\r\n create_output(bin_lines, file)", "def compile_dir(path):\r\n to_compile = get_pyx_files(path)\r\n print(\"De:\",path)\r\n if to_compile:\r\n print(\"Se compilaran:\", list(map(os.path.basename,to_compile)))\r\n Cythonize.main( ['-a', '-i'] + to_compile )\r\n else:\r\n print(\"Nada para compilar\")", "def _compile(self, filename, source):\n \n if source and source[-1] != '\\n':\n source = source + '\\n'\n code = __builtin__.compile(source, filename.cStr(), 'exec')\n\n # try to cache the compiled code\n pycFilename = Filename(filename)\n pycFilename.setExtension(pycExtension)\n try:\n f = open(pycFilename, 'wb')\n except IOError:\n pass\n else:\n f.write('\\0\\0\\0\\0')\n f.write(struct.pack('<I', self.timestamp))\n f.write(marshal.dumps(code))\n f.flush()\n f.seek(0, 0)\n f.write(imp.get_magic())\n f.close()\n\n return code", "def main():\n print(\n \"\"\"\n\n ##########################################################\n # #\n # #\n # Compiling Colocalized Cyano Datasets #\n # #\n # #\n ##########################################################\n\n \n \"\"\"\n )\n cyanoFiles = glob.glob(f\"{COLOCALIZED_DIR}*.csv\")\n makedir(COMPILED_DIR)\n dfCompiled = pd.DataFrame({})\n for cyanoFile in cyanoFiles:\n print(f\"Compiling {cyanoFile}\")\n data = unify(cyanoFile)\n if len(dfCompiled ) < 1:\n dfCompiled = data\n else:\n dfCompiled = pd.concat([dfCompiled, data], ignore_index=True) \n dfCompiled.to_csv(f\"{COMPILED_DIR}compiled.csv\", index=False)", "def _get_codeobj(pyfile):\n from imp import PY_COMPILED, PY_SOURCE\n\n result, fileobj, fullpath = _check_if_pyc(pyfile)\n\n # WARNING:\n # fp.read() can blowup if the module is extremely large file.\n # Lookout for overflow errors.\n try:\n data = fileobj.read()\n finally:\n fileobj.close()\n\n # This is a .pyc file. Treat accordingly.\n if result is PY_COMPILED:\n # .pyc format is as follows:\n # 0 - 4 bytes: Magic number, which changes with each create of .pyc file.\n # First 2 bytes change with each marshal of .pyc file. Last 2 bytes is \"\\r\\n\".\n # 4 - 8 bytes: Datetime value, when the .py was last changed.\n # 8 - EOF: Marshalled code object data.\n # So to get code object, just read the 8th byte onwards till EOF, and\n # UN-marshal it.\n import marshal\n code_obj = marshal.loads(data[8:])\n\n elif result is PY_SOURCE:\n # This is a .py file.\n code_obj = compile(data, fullpath, 'exec')\n\n else:\n # Unsupported extension\n raise Exception(\"Input file is unknown format: {0}\".format(fullpath))\n\n # Return code object\n return code_obj", "def process_input_files(inputs):\n for ifile in inputs:\n with open(ifile) as fin:\n exec(compile(fin.read(), ifile, 'exec'))", "def get_pyx_files(path, abspath=True):\r\n path = os.path.normpath(os.path.abspath(path))\r\n to_compile = []\r\n for name in os.listdir(path):\r\n if name.endswith(\".pyx\"):\r\n pyx = os.path.join(path,name)\r\n if os.path.isfile(pyx):\r\n to_compile.append(pyx if abspath else name)\r\n return to_compile", "def compile_files(root):\n files = [os.path.join(root, f) for f in os.listdir(root) if not f.startswith(\".\")]\n \n return files", "def assemble_files():\r\n path = os.path.expanduser(sys.argv[1])\r\n if os.path.isdir(path):\r\n file_root = path + \"/\"\r\n for file in os.listdir(path):\r\n filename = os.path.splitext(file)\r\n if filename[1] == \".asm\":\r\n hack_file_name = file_root + filename[0] + \".hack\"\r\n assemble_file(file_root + file, hack_file_name)\r\n else:\r\n filename = os.path.splitext(path)\r\n hack_file_name = filename[0] + \".hack\"\r\n assemble_file(path, hack_file_name)", "def remove_pyc_files():\n with settings(warn_only=True):\n with cd(env.code_dir):\n sudo('find . -name \"*.pyc\" -exec rm {} \\;')", "def _compile_C_code(header, body, return_unloaded=False, verbose=False):\n import importlib\n import tempfile\n import uuid\n\n import cffi\n\n module_name = \"module_\" + uuid.uuid4().hex\n\n if \"__uint128\" in header:\n raise ValueError(\"_compile_C_code does not support bit-vector widths \"\n \"larger than 64 bits (cffi does not support __uint128)\")\n\n ffibuilder = cffi.FFI()\n ffibuilder.cdef(header)\n ffibuilder.set_source(module_name, body)\n\n tmpdir = tempfile.TemporaryDirectory()\n lib_path = ffibuilder.compile(tmpdir=tmpdir.name, verbose=verbose)\n\n if return_unloaded:\n return lib_path, module_name, tmpdir\n\n # dynamic import\n # https://docs.python.org/3/library/importlib.html#importing-a-source-file-directly\n spec = importlib.util.spec_from_file_location(module_name, lib_path)\n pymod_parent = importlib.util.module_from_spec(spec)\n # sys.modules[module_name] = module\n spec.loader.exec_module(pymod_parent)\n\n pymod = pymod_parent\n\n return pymod, tmpdir", "def compile(self, exclude=None, recurse=True, references=None, verbose=False):\n from clay.shell.core import lsgrep\n\n _os.chdir(self.directory)\n sources = self.sources\n\n if sources is None:\n sources = [_os.path.splitext(x)[0] for x in lsgrep(self.src_ext, self.directory, recurse=recurse)]\n if exclude is not None and len(exclude) > 0:\n sources = list(filter(lambda x: all(not(y in x) for y in exclude), sources))\n # if any flags, include them\n if len(self.flags) > 0:\n opt_str = '-' + ' -'.join(self.flags)\n else:\n opt_str = ''\n\n statechanged = False\n for src in sources:\n src_name = src + self.src_ext\n dst_name = src + self.dst_ext\n\n if not _os.path.exists(src_name):\n print(src, 'doesn\\'t exist, skipping...')\n continue\n\n src_mtime = _os.stat(src_name).st_mtime\n if _os.path.exists(dst_name):\n dst_mtime = _os.stat(dst_name).st_mtime\n else:\n dst_mtime = 0 # file doesn't exist\n\n # if edited more than five seconds ago, compile it\n if src_mtime - dst_mtime >= 5:\n print('Compiling ({}):'.format(self.compiler_name), src)\n cmd = self.compiler_name\n if self.compiler_name == 'csc': # C Sharp specific handling\n if references is not None:\n cmd += ' /r:' + ','.join(references)\n cmd += ' /out:{} '.format(dst_name)\n cmd += '{} \"{}\"'.format(opt_str, src_name)\n if verbose:\n print('cmd:', cmd)\n _os.system(cmd)\n statechanged = True\n\n if not statechanged:\n print('Nothing new to compile in \"{}\" when recurse={}'.format(self.directory, recurse))", "def dump_to_pyc(co, python_version, output_dir):\n # assume Windows path information from the .exe\n pyc_basename = ntpath.basename(co.co_filename)\n pyc_name = f'{pyc_basename}.pyc'\n\n if pyc_name not in IGNORE:\n logging.info(\"Extracting %s\", pyc_name)\n pyc_header = _generate_pyc_header(python_version, len(co.co_code))\n destination = os.path.join(output_dir, pyc_name)\n with open(destination, 'wb') as pyc:\n pyc.write(pyc_header)\n marshaled_code = marshal.dumps(co)\n pyc.write(marshaled_code)\n else:\n logging.info(\"Skipping %s\", pyc_name)", "def compile_modules(base, output, source, bind=True):\n return compile_files(base, output, source, bind, amd=True)", "def make_prog(prog_path: str, c_files: List[File]) -> File:\n o_files = [\n compile(c_file)\n for c_file in c_files\n ]\n prog_file = link(prog_path, o_files)\n return prog_file", "def _compile(self, source: str, filename: str) -> CodeType:\n return compile(source, filename, \"exec\") # type: ignore", "def CompileWithClosure(js_files, definitions, entry_points, output_file):\n\n cmd = [\n 'java', '-jar',\n './node_modules/google-closure-compiler-java/compiler.jar',\n '--language_out=ES5_STRICT', '--dependency_mode=PRUNE',\n '--js_output_file=%s' % output_file\n ]\n cmd += ['--entry_point=%s' % e for e in entry_points]\n cmd += ['--output_manifest=%s' % ('%s.manifest' % output_file)]\n cmd += [\n '../node_modules/google-closure-library/closure/**.js',\n '!../node_modules/google-closure-library/closure/**_test.js',\n '../node_modules/google-closure-library/third_party/closure/**.js',\n '!../node_modules/google-closure-library/third_party/closure/**_test.js'\n ]\n cmd += js_files\n cmd += definitions\n subprocess.check_call(cmd)", "def compile_files(base, output, source, bind=True, amd=False):\n\n # Paths\n path = lambda *x: r.path(*[base] + list(x))\n tsc_path = path('node_modules', 'typescript', 'bin', 'tsc.js')\n output_folder = path(*output)\n source_folder = path(*source)\n\n # Compile each file. Sometimes --module seems to screw up the use\n # of --out, so safely check and fix if required.\n def collection(matches, run):\n for path in matches:\n output_module_name = os.path.basename(path)[:-3] + '.js'\n generated_file = os.path.join(source_folder, output_module_name)\n required_file = os.path.join(output_folder, output_module_name)\n run('mkdir', '-p', os.path.dirname(required_file))\n if amd:\n run('node', tsc_path, path, '--module', 'amd', '--out', required_file)\n else:\n run('node', tsc_path, path, '--out', required_file)\n if os.path.exists(generated_file): # wtf?\n run('mv', generated_file, required_file)\n\n # Build\n build = r.build()\n build.notice('Typescript multifile compile')\n build.chdir(source_folder)\n build.collect('.*\\.ts$', collection)\n\n # Target\n target = r.target(timeout=10)\n target.pattern('.*[^d]\\.ts$', dirname(source_folder), recurse=True)\n\n # Bind if required\n if bind:\n r.bind(target, build)\n\n # Run?\n if ruffx.BUILD:\n build.execute()\n\n return build", "def visit_Python(self, node):\n # This compiles the given Python ast into a Python code object\n # then disassembles it into a byteplay code object. This allows\n # us to interleave the instructions with those generated for\n # the rest of the module and then compile a single unified \n # code object.\n py_code = compile(node.py_ast, self.filename, mode='exec')\n bpc = Code.from_code(py_code)\n # Skip the SetLineo and ReturnValue codes\n self.code_ops.extend(bpc.code[1:-2])", "def _get_code_files(self):\n for dirpath, dirnames, filenames in os.walk(self.CodesDirectory):\n for f in filenames:\n rel_name = path.join(dirpath, f)\n if f.endswith('.py'):\n yield (rel_name, 'Python')\n elif f.endswith('.pyx'):\n yield (rel_name, 'PyRex')\n elif f.endswith('.c'):\n yield (rel_name, 'C')\n else:\n pass", "def compile(path: str) -> bytes:\n if not path.endswith('.py'):\n raise InvalidPathException(path)\n\n return Compiler().compile(path)", "def test_py_compile_basic(self):\n self._test_py_compile('basic')", "def filter_python_files(files):\n return [f for f in files if f.endswith('.py')]" ]
[ "0.64949733", "0.64949733", "0.62953174", "0.6256713", "0.61585885", "0.6012568", "0.58619845", "0.58467686", "0.58408356", "0.5704187", "0.56957185", "0.5693709", "0.5662455", "0.5645903", "0.5611024", "0.5549595", "0.55396736", "0.5538459", "0.5522446", "0.5512065", "0.54888564", "0.5469457", "0.5458571", "0.5447377", "0.54335123", "0.5420635", "0.5419334", "0.5399313", "0.53987396", "0.53834003" ]
0.6693471
0
Return a version of the string escaped for inclusion in an RFC822 header, by ensuring there are 8 spaces space after each newline.
def rfc822_escape(header): lines = header.split('\n') sep = '\n' + 8 * ' ' return sep.join(lines)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def tidy_string(s: str\n ) -> str:\n s = s.encode('ascii', errors='ignore').decode(FORMAT)\n s = s.replace(\"\\r\", \"\").replace(\"\\t\", \"\").replace('\\n', '') \n return s", "def beautify(text):\n text = re.sub('\\n{3,}', '\\n\\n', text)\n text = re.sub('\\n+$', '\\n', text)\n return text", "def format_msg(msg):\n if type(msg) == str:\n msg = msg.encode()\n header = str(len(msg))\n header = header.zfill(HEADER_SIZE)\n return header.encode(), msg", "def email_rfc2822_compliance(message, max_line_length=900):\n returnmsg = \"\"\n while len(message) > 0:\n returnmsg = returnmsg + message[:max_line_length] + \"\\r\\n\"\n message = message[max_line_length:]\n\n return returnmsg", "def quoted(string, every=64):\n return \"> \" + re.sub(\"\\r\\n(?=[^\\r\\n])\", \"\\r\\n> \", string)", "def __prepare_content(self, content):\n if isinstance(content, str):\n content = content.encode('utf-8')\n return b\"{%d+}%s%s\" % (len(content), CRLF, content)", "def clean_header(klass, s):\n return re.sub(r\"[\\n\\r\\t]+\", \" \", s).strip()", "def _wadifyString(s):\n\n if len(s) < 8:\n s += \"\\x00\" * (8 - len(s))\n return s", "def _format_message(start_line, header, body):\n # The message begins with the start line, terminated with EOL and encoded.\n msg = (start_line + EOL).encode(HEADER_ENCODING)\n # Convert the header to lines.\n header_lines = _format_header(header)\n # Add them to the message, one by one, each terminated with EOL and encoded.\n for line in header_lines:\n msg = msg + (line + EOL).encode(HEADER_ENCODING)\n # A blank line indicates end of headers.\n msg = msg + EOL.encode(HEADER_ENCODING)\n # The rest of the message is the body.\n msg = msg + body\n return msg", "def email_escape(string):\n return ''.join('&#x%x;' % (ord(c),) for c in string)", "def rfc822(self):\n tzoffset = _tzoffset2rfc822zone(_tzoffset(self._tz, self._t))\n return '%s, %2.2d %s %d %2.2d:%2.2d:%2.2d %s' % (\n self._aday, self._day, self._amon, self._year,\n self._hour, self._minute, self._nearsec, tzoffset)", "def escape_for_display(s) :\n if len(s) == 0 :\n return \"[EMPTY]\"\n return s.replace(\"\\n\",\"[NL]\").replace(\"\\t\",\"[TAB]\") #.replace(\" \",\"[SP]\") # Escape newlines so not to confuse debug output.", "def FixLineEndingsForWindows(self,str):\n # TODO: this should not really be part of this class\n if str[-2:]==\"\\r\\n\":\n return str\n if str[-1:]==\"\\n\":\n return str[:-1]+\"\\r\\n\"\n else:\n return str + \"\\r\\n\"", "def format_value(text):\n return text.encode('utf8').replace('\\n', ' ').replace('\\r', ' ')", "def disp_sec_str(aa_seq):\n return re.sub(\"(.{80})\", \"\\\\1\\n\", aa_seq, 0, re.DOTALL)", "def format_body(self):\n mt = deque(str(self.movetext).split(' ') + [])\n out = mt.popleft()\n ll = len(out)\n while True:\n if len(mt) is 0:\n break\n\n n = mt.popleft()\n # If the current line length + space + character is less than\n # 80 chars long\n if ll + len(n) + 1 < 80:\n to_add = \" \" + n\n out += \" \" + n\n ll += len(to_add)\n else:\n out += \"\\n\" + n\n ll = len(n)\n return out + str(self.score)", "def header_str(a_str, n=80):\n return '{{:=^{:d}}}'.format(n).format(' ' + a_str + ' ')", "def _normalize_linefeeds(a_string):\n newline = re.compile(r'(\\r\\r\\n|\\r\\n|\\n\\r)')\n return newline.sub('\\n', a_string).replace('\\n\\n', '\\n')", "def XHTML_verbatimString(self, string, tab_spaces):\n\n frame_start = 0\n frame_end = 0\n parts = []\n length = len(string)\n while frame_end < length:\n ch = string[frame_end]\n if ch == '\\t':\n parts.append(string[frame_start:frame_end])\n parts.append(\" \" * tab_spaces)\n frame_end += 1\n frame_start = frame_end\n else:\n frame_end += 1\n #end if\n #end while\n\n if frame_end > frame_start:\n parts.append(string[frame_start:frame_end])\n\n return \"\".join(parts)", "def striptext(self, rawt):\n ret = ''\n iscomm = False\n it = iter(rawt)\n for char in it:\n if char in self.control.escape and not iscomm:\n ret += char + next(it)\n if ret[-2:] == '\\\\\\n': ret = ret[:-2]\n print(ret)\n elif char in self.control.comment:\n iscomm = True\n # iscomm = not iscomm\n elif char in self.control.linebreak:\n if char in self.control.delims['endline'][0] and ret and \\\n ret[-1] not in self.control.delims['endline'][0]:\n ret += self.control.delims['endline'][0][0]\n iscomm = False\n else:\n if not iscomm:\n ret += char\n if '@eof' in ret:\n ret = ret[0:ret.find('@eof')]\n if not ret or ret[-1] not in self.control.delims['endline'][0]:\n ret += self.control.delims['endline'][0][0]\n return ret", "def addpoemslashes(value):\n return value.replace(\"\\r\", \"\").replace(\"\\n\", ' / ')", "def decode_field(field):\r\n field = field.replace('\\r\\n','')\r\n field = field.replace('\\n','')\r\n\r\n list = email.Header.decode_header (field)\r\n\r\n decoded = \" \".join([\"%s\" % k for (k,v) in list])\r\n\r\n #print \"Decoding [%s] to [%s]\" % (field, decoded)\r\n\r\n return decoded", "def formatted_message(self):\n message = MIMEMultipart()\n message['From'] = self.sender\n message['To'] = self.receiver\n message['Subject'] = self.subject.format(**self.params)\n message.attach(MIMEText(self.body.format(**self.params), 'plain'))\n return message.as_string()", "def escape_newlines(self, the_string):\n the_string = the_string.replace('\\n', r'\"\"\\n\"\"')\n return the_string", "def to_portable_text( line ):\n \n # in case it's zero-length, don't add chars\n if not len(line):\n return ''\n\n return strip_line_ending(line) + '\\n'", "def prepare_text_line(line):\n\n re_sub = re.sub\n # FIXME: maintain the original character positions\n\n # strip whitespace\n line = line.strip()\n\n # strip comment markers\n # common comment characters\n line = line.strip('\\\\/*#%;')\n # un common comment line prefix in dos\n line = re_sub('^rem\\s+', ' ', line)\n line = re_sub('^\\@rem\\s+', ' ', line)\n # un common comment line prefix in autotools am/in\n line = re_sub('^dnl\\s+', ' ', line)\n # un common comment line prefix in man pages\n line = re_sub('^\\.\\\\\\\\\"', ' ', line)\n # un common pipe chars in some ascii art\n line = line.replace('|', ' ')\n\n # normalize copyright signs and spacing aournd them\n line = line.replace('(C)', ' (c) ')\n line = line.replace('(c)', ' (c) ')\n # the case of \\251 is tested by 'weirdencoding.h'\n line = line.replace(u'\\251', u' (c) ')\n line = line.replace('&copy;', ' (c) ')\n line = line.replace('&#169;', ' (c) ')\n line = line.replace('&#xa9;', ' (c) ')\n line = line.replace(u'\\xa9', ' (c) ')\n # FIXME: what is \\xc2???\n line = line.replace(u'\\xc2', '')\n\n # TODO: add more HTML entities replacements\n # see http://www.htmlhelp.com/reference/html40/entities/special.html\n # convert html entities &#13;&#10; CR LF to space\n line = line.replace(u'&#13;&#10;', ' ')\n line = line.replace(u'&#13;', ' ')\n line = line.replace(u'&#10;', ' ')\n\n # normalize (possibly repeated) quotes to unique single quote '\n # backticks ` and \"\n line = line.replace(u'`', \"'\")\n line = line.replace(u'\"', \"'\")\n line = re.sub(MULTIQUOTES_RE(), \"'\", line)\n # quotes to space? but t'so will be wrecked\n # line = line.replace(u\"'\", ' ')\n\n # some trailing garbage ')\n line = line.replace(\"')\", ' ')\n\n\n # note that we do not replace the debian tag by a space: we remove it\n line = re_sub(DEBIAN_COPYRIGHT_TAGS_RE(), '', line)\n\n line = re_sub(IGNORED_PUNCTUATION_RE(), ' ', line)\n\n # tabs to spaces\n line = line.replace('\\t', ' ')\n\n # normalize spaces around commas\n line = line.replace(' , ', ', ')\n\n # remove ASCII \"line decorations\"\n # such as in --- or === or !!! or *****\n line = re_sub(ASCII_LINE_DECO_RE(), ' ', line)\n line = re_sub(ASCII_LINE_DECO2_RE(), ' ', line)\n\n # Replace escaped literal \\0 \\n \\r \\t that may exist as-is by a space\n # such as in code literals: a=\"\\\\n some text\"\n line = line.replace('\\\\r', ' ')\n line = line.replace('\\\\n', ' ')\n line = line.replace('\\\\t', ' ')\n line = line.replace('\\\\0', ' ')\n\n # TODO: Why?\n # replace contiguous spaces with only one occurrence\n # line = re.sub(WHITESPACE_RE(), ' ', text)\n\n # normalize to ascii text\n line = commoncode.text.toascii(line)\n # logger.debug(\"ascii_only_text: \" + text)\n\n # strip verbatim back slash and comment signs again at both ends of a line\n # FIXME: this is done at the start of this function already\n line = line.strip('\\\\/*#%;')\n\n # normalize to use only LF as line endings so we can split correctly\n # and keep line endings\n line = commoncode.text.unixlinesep(line)\n # why?\n line = lowercase_well_known_word(line)\n\n return line", "def format_byte_string(self, value, encoding, use_newline) -> bytes:\n\n val = use_newline or b'\\r\\n'\n str_detect = b'%s' % bytes([value, val], encoding=encoding)\n return str_detect", "def mywrap(text):\n text = text.replace(\"\\n\\n\", \"\\n\").replace(\"\\n\", \"\\n \").rstrip()\n return text", "def normalizeRawFromHeader(value):\n return value.replace('\\n', '').replace('\\r', '').strip()", "def fmt(message, prefix):\n message = str(message).splitlines(True)\n if not message:\n return prefix\n output = \"\"\n for index, line in enumerate(message):\n if index >= 1 and not line.strip():\n output += line\n else:\n output += \"{} {}\".format(prefix, line)\n return output" ]
[ "0.6096191", "0.5934842", "0.59084433", "0.59060633", "0.5887525", "0.5677352", "0.5667074", "0.56499064", "0.56388175", "0.55878687", "0.55177206", "0.5498205", "0.5489415", "0.54656357", "0.5455645", "0.54367644", "0.54303896", "0.5425611", "0.53949904", "0.538119", "0.5353119", "0.53238946", "0.5314345", "0.52957845", "0.52854836", "0.5280037", "0.5277499", "0.52756333", "0.5270166", "0.52685475" ]
0.7909112
0
Finds the ld version. The version scheme differs under Mac OSX.
def _find_ld_version(): if sys.platform == 'darwin': return _find_exe_version('ld -v', _MAC_OS_X_LD_VERSION) else: return _find_exe_version('ld -v')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_version():\n # this implementation avoids calling Foundation and will work on\n # non Apple OSes.\n vers = \"UNKNOWN\"\n build = \"\"\n # find the munkilib directory, and the version file\n munkilibdir = os.path.dirname(os.path.abspath(__file__))\n versionfile = os.path.join(munkilibdir, \"version.plist\")\n if os.path.exists(versionfile):\n try:\n vers_plist = readPlist(versionfile)\n except (IOError, OSError, ExpatError):\n pass\n else:\n try:\n vers = vers_plist['CFBundleShortVersionString']\n build = vers_plist['BuildNumber']\n except KeyError:\n pass\n if build:\n vers = vers + \".\" + build\n return vers", "def get_version():\n version_file = Path(__file__).resolve().parent / \"clinker\" / \"__init__.py\"\n version_match = re.search(\n r\"^__version__ = ['\\\"]([^'\\\"]*)['\\\"]\", version_file.read_text(), re.M\n )\n if version_match:\n return version_match.group(1)\n raise RuntimeError(\"Failed to find version string\")", "def detect_version_str(self):\n c3d_bin_path = op.dirname(self.locate_command())\n if platform.system() == 'Linux':\n libname = os.listdir(op.join(c3d_bin_path, '..', 'lib'))[0]\n version_str = libname.split('-')[-1]\n elif platform.system() == 'Darwin':\n info_list_path = op.join(c3d_bin_path, '..', 'Info.plist')\n info_etree = xml.etree.ElementTree.parse(info_list_path)\n elem_bodies = [e.text for e in info_etree.iter()]\n version_str = elem_bodies[\n elem_bodies.index('CFBundleShortVersionString') + 1]\n else:\n raise ArcanaVersionNotDetectableError(\n \"Can't detect c3d version on Windows\")\n return version_str", "def getLibVersion():\n return \"Software Development Library for Linux 1.999.1\"", "def find_version(e):\n args = e.split()\n args += ['-shared', '-Wl,-t']\n p = subprocess.Popen(args, stderr=subprocess.STDOUT, stdout=subprocess.PIPE)\n candidates = [x for x in p.stdout if 'libstdc++.so' in x]\n if not candidates:\n return ''\n assert len(candidates) == 1\n libstdcxx = parse_ld_line(candidates[-1])\n\n p = subprocess.Popen(['readelf', '-V', libstdcxx], stdout=subprocess.PIPE)\n versions = [parse_readelf_line(x)\n for x in p.stdout.readlines() if 'Name: GLIBCXX' in x]\n last_version = sorted(versions, cmp = cmp_ver)[-1]\n return encode_ver(last_version)", "def guessNSVersion( elfPath, linkName, defaultVersion ):\n\n # Guess 1\n cmdLine = [ elfPath, \"-version\",\n \"|\", grep, \"Package:\" ]\n line = safeRun( cmdLine ).strip()\n if line != \"\":\n # e.g.: Package: netschedule 4.8.1, build Jun 7 2011 16:07:33\n line = line.split( ',' )[ 0 ].replace( 'Package: netschedule ', \"\" )\n if line != '0.0.0':\n if isVersionFormat( line ):\n return line\n\n # Guess 2\n line = os.path.basename( linkName )\n if isVersionFormat( line ):\n return line\n\n return defaultVersion", "def get_linked_libpython():\n if is_windows():\n return\n libdl = ctypes.CDLL(ctypes.util.find_library(\"dl\"))\n libdl.dladdr.argtypes = [ctypes.c_void_p, ctypes.POINTER(_Dl_info)]\n libdl.dladdr.restype = ctypes.c_int\n\n dlinfo = _Dl_info()\n retcode = libdl.dladdr(\n ctypes.cast(ctypes.pythonapi.Py_GetVersion, ctypes.c_void_p),\n ctypes.pointer(dlinfo))\n if retcode == 0: # means error\n return\n path = os.path.realpath(dlinfo.dli_fname.decode())\n if path == os.path.realpath(sys.executable):\n return\n return path", "def src_get_version():\n return ffi.string(_lib.src_get_version()).decode()", "def version():\n return uname().version", "def version():\n return uname().version", "def version():\n cmd = \"{} -v\".format(_detect_os())\n out = __salt__[\"cmd.run\"](cmd).splitlines()\n ret = out[0].split(\": \")\n return ret[1]", "def unravel_ver():\n unravel_version_path = \"/usr/local/unravel/ngui/www/version.txt\"\n unravel_ver = \"UNKNOWN\"\n if os.path.exists(unravel_version_path):\n with open(unravel_version_path, 'r') as f:\n version_file = f.read()\n f.close()\n if re.search('4\\.[0-9]\\.[0-9].*', version_file):\n return re.search('4\\.[0-9]\\.[0-9].*', version_file).group(0)\n return unravel_ver", "def read_version():\n # code parts were taken from here https://stackoverflow.com/a/67692\n\n path2setup = os.path.dirname(__file__)\n version_file = os.path.abspath(\n os.path.join(path2setup, \"diffusion_maps\", \"version.py\"))\n\n spec = importlib.util.spec_from_file_location(\"version\", version_file)\n version = importlib.util.module_from_spec(spec)\n spec.loader.exec_module(version)\n return version.version.v_short", "def geomdl_version():\n return tuple(__version__.split('.'))", "def getOsVersion():\n os_version_tuple = platform.mac_ver()[0].split('.')\n return int(os_version_tuple[1])", "def fl_library_version():\n _fl_library_version = library.cfuncproto(\n library.load_so_libforms(), \"fl_library_version\", \\\n cty.c_int, [cty.POINTER(cty.c_int), cty.POINTER(cty.c_int)], \\\n \"\"\"int fl_library_version(int * ver, int * rev) \"\"\")\n i_ver, ptr_ver = library.make_intc_and_pointer()\n i_rev, ptr_rev = library.make_intc_and_pointer()\n library.keep_elem_refs(i_ver, i_rev, ptr_ver, ptr_rev)\n retval = _fl_library_version(ptr_ver, ptr_rev)\n return retval, i_ver.value, i_rev.value", "def find_version():\n regex = r\"^ATRAM_VERSION = ['\\\"]v?([^'\\\"]*)['\\\"]\"\n with open(\"./lib/db.py\", 'r') as f:\n match = re.search(regex, f.read(), re.M)\n if match:\n return match.group(1)\n\n raise RuntimeError(\"Unable to find version string.\")", "def get_version():\n return '%d.%d.%d' % version_info", "def get_version():\n with io.open(os.path.join(SCRIPT_DIR, 'oasislmf', '__init__.py'), encoding='utf-8') as init_py:\n return re.search('__version__ = [\\'\"]([^\\'\"]+)[\\'\"]', init_py.read()).group(1)", "def _find_mkl():\n mkl_lib = None\n if _blas_info() == 'INTEL MKL':\n plat = sys.platform\n python_dir = os.path.dirname(sys.executable)\n if plat in ['darwin', 'linux2', 'linux']:\n python_dir = os.path.dirname(python_dir)\n\n if plat == 'darwin':\n lib = '/libmkl_rt.dylib'\n elif plat == 'win32':\n lib = r'\\mkl_rt.dll'\n elif plat in ['linux2', 'linux']:\n lib = '/libmkl_rt.so'\n else:\n raise Exception('Unknown platfrom.')\n\n if plat in ['darwin', 'linux2', 'linux']:\n lib_dir = '/lib'\n else:\n lib_dir = r'\\Library\\bin'\n # Try in default Anaconda location first\n try:\n mkl_lib = cdll.LoadLibrary(python_dir+lib_dir+lib)\n except Exception:\n pass\n\n # Look in Intel Python distro location\n if mkl_lib is None:\n if plat in ['darwin', 'linux2', 'linux']:\n lib_dir = '/ext/lib'\n else:\n lib_dir = r'\\ext\\lib'\n try:\n mkl_lib = \\\n cdll.LoadLibrary(python_dir + lib_dir + lib)\n except Exception:\n pass\n return mkl_lib", "def get_version():\n return harmony.__version__", "def get_tasmota_version():\n matches = []\n with open(tasmotadir + \"/sonoff/sonoff_version.h\", \"r\") as f:\n for line in f:\n matches += findall('0x\\d+', line)\n if len(matches) == 0:\n raise Exception('No tasmota version found.')\n elif len(matches) == 1:\n return matches[0]\n else:\n raise IndexError('Too many tasmota versions found.')", "def find_xcode_major_version():\n cmd = ['xcodebuild', '-version']\n command_trace.log(cmd)\n\n result = str(subprocess.check_output(cmd))\n version = result.split('\\n', 1)[0]\n version = re.sub(r'Xcode ', '', version)\n version = re.sub(r'\\..*', '', version)\n return int(version)", "def target_glibc_version(self, app):\n if self.use_docker:\n try:\n output = self.tools.docker.check_output(\n [\"ldd\", \"--version\"],\n image_tag=app.target_image,\n )\n # On Debian/Ubuntu, ldd --version will give you output of the form:\n #\n # ldd (Ubuntu GLIBC 2.31-0ubuntu9.9) 2.31\n # Copyright (C) 2020 Free Software Foundation, Inc.\n # ...\n #\n # Other platforms produce output of the form:\n #\n # ldd (GNU libc) 2.36\n # Copyright (C) 2020 Free Software Foundation, Inc.\n # ...\n #\n # Note that the exact text will vary version to version.\n # Look for the \"2.NN\" pattern.\n if match := re.search(r\"\\d\\.\\d+\", output):\n target_glibc = match.group(0)\n else:\n raise BriefcaseCommandError(\n \"Unable to parse glibc dependency version from version string.\"\n )\n except subprocess.CalledProcessError:\n raise BriefcaseCommandError(\n \"Unable to determine glibc dependency version.\"\n )\n\n else:\n target_glibc = self.tools.os.confstr(\"CS_GNU_LIBC_VERSION\").split()[1]\n\n return target_glibc", "def _get_installed_version(self):\n\n # We support native versions for everything except Windows.\n if self.target_platform_name == 'android':\n root_dir = self.android_ndk_sysroot\n elif self.target_platform_name in ('ios', 'macos'):\n root_dir = self.apple_sdk\n elif self.target_platform_name == 'linux':\n root_dir = ''\n else:\n self.error(\n \"using an existing installation is not supported for \"\n \"Windows targets\")\n\n version_file = root_dir + '/usr/include/zlib.h'\n version_line = self.get_version_from_file('ZLIB_VERSION', version_file)\n\n version_str = version_line.split()[-1]\n if version_str.startswith('\"'):\n version_str = version_str[1:]\n if version_str.endswith('\"'):\n version_str = version_str[:-1]\n\n return self.parse_version_number(version_str)", "def installedVersion():\n\n cmd = f'{dcm2niix()} -h'\n versionPattern = re.compile(r'v'\n r'(?P<major>[0-9]+)\\.'\n r'(?P<minor>[0-9]+)\\.'\n r'(?P<year>[0-9]{4})'\n r'(?P<month>[0-9]{2})'\n r'(?P<day>[0-9]{2})')\n\n try:\n output = sp.check_output(cmd.split()).decode()\n output = [l for l in output.split('\\n') if 'version' in l.lower()]\n output = '\\n'.join(output).split()\n\n for word in output:\n\n match = re.match(versionPattern, word)\n\n if match is not None:\n return (int(match.group('major')),\n int(match.group('minor')),\n int(match.group('year')),\n int(match.group('month')),\n int(match.group('day')))\n\n except Exception as e:\n log.debug(f'Error parsing dcm2niix version string: {e}')\n return None", "def findjdks_mac():\n\tversions = {}\n\tdirs = []\n\tfor jdk in glob.glob(\"/Library/Java/JavaVirtualMachines/*\"):\n\t\tdirs.append(jdk)\n\tfor jdk in glob.glob(\"/System/Library/Java/JavaVirtualMachines/*\"):\n\t\tdirs.append(jdk)\n\tfor jdk in dirs:\n\t\tname = os.path.basename(jdk)\n\t\tif name.startswith(\"1.6.\"):\n\t\t\tversions[\"1.6\"] = jdk + \"/Contents/Home\"\n\t\telif name.startswith(\"jdk1.7.\"):\n\t\t\tversions[\"1.7\"] = jdk + \"/Contents/Home\"\n\t\telif name.startswith(\"jdk1.8.\"):\n\t\t\tversions[\"1.8\"] = jdk + \"/Contents/Home\"\n\treturn versions", "def find_version():\n _locals = locals()\n src_dir = os.path.abspath(os.path.dirname(__file__))\n version_file = os.path.join(src_dir, 'loudml', '_version.py')\n with io_open(version_file, mode='r') as fd:\n exec(fd.read()) # __version__ is set in the exec call.\n return _locals['__version__']", "def getVersion():\n try:\n fh=open(version_py, 'r')\n version=fh.read().strip().split('=')[-1].replace(\"'\",'').lstrip()\n fh.close()\n except:\n return None\n\n return version", "def latestidd():\n pth, _ = run_functions.install_paths(\n version=\"8.8.0\"\n ) # works with any value in version\n dirpth = os.path.dirname(pth)\n dirpth = os.path.dirname(dirpth)\n alldirs = os.listdir(dirpth)\n eplusdirs = [dir for dir in alldirs if dir.startswith(\"EnergyPlus\")]\n maxapp = max(eplusdirs)\n ver = folder2ver(maxapp)\n return ver" ]
[ "0.7059902", "0.6503561", "0.64812005", "0.64738697", "0.6416523", "0.6415537", "0.61217123", "0.6084685", "0.6074208", "0.6074208", "0.60129017", "0.60093343", "0.5987961", "0.59856707", "0.593912", "0.59271336", "0.59125006", "0.5905193", "0.58683425", "0.5782932", "0.57724047", "0.57695204", "0.5766823", "0.5758741", "0.57571363", "0.5746132", "0.5734529", "0.5731949", "0.57175237", "0.5714565" ]
0.8556363
0
Find the version of an executable by running `cmd` in the shell. `pattern` is a compiled regular expression. If not provided, default to _RE_VERSION. If the command is not found, or the output does not match the mattern, returns None.
def _find_exe_version(cmd, pattern=_RE_VERSION): from subprocess import Popen, PIPE executable = cmd.split()[0] if find_executable(executable) is None: return None pipe = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE) try: stdout, stderr = pipe.stdout.read(), pipe.stderr.read() finally: pipe.stdout.close() pipe.stderr.close() # some commands like ld under MacOS X, will give the # output in the stderr, rather than stdout. if stdout != '': out_string = stdout else: out_string = stderr result = pattern.search(out_string) if result is None: return None return result.group(1)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_version():\n import subprocess\n from tempfile import NamedTemporaryFile\n proc = subprocess.Popen(lm_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n try:\n out, stderr = proc.communicate(timeout=15)\n except TimeoutError:\n proc.kill()\n outs, stderr = proc.communicate()\n raise RuntimeError(\"command timed out\")\n else:\n m = ver_regex.search(stderr.split(b'\\n')[0])\n if m is None:\n log.debug(\"unable to parse version from output: %s\", stderr)\n raise RuntimeError(\"unable to determine version from command output\")\n else:\n return m.group(1).decode('ascii')", "def search_output_from_cmd(cmd: str, find_regex: typing.Pattern) -> typing.Match:\n # Run the given command in a shell\n _, stdout, _ = run_cmd(cmd)\n\n # Search for the object\n content = re.search(find_regex, stdout)\n\n # If the result is not None, return it\n if content:\n return content\n\n raise RuntimeError(\n \"Could not find '%s' in output for '%s'\" % (find_regex.pattern, cmd)\n )", "def match_output(pattern, cmd):\n if isinstance(cmd, str):\n cmd = shlex.split(cmd)\n\n return re.findall(pattern, subprocess.check_output(cmd))", "def get_version_from_executable(\n cls,\n bin_path: Union[Path, str],\n *,\n cwd: Optional[Union[Path, str]] = None,\n env: Optional[Dict[str, str]] = None,\n ) -> Optional[Version]:\n output = subprocess.check_output(\n [str(bin_path), \"-version\"], cwd=cwd, env=env\n ).decode()\n match = re.search(cls.VERSION_OUTPUT_REGEX, output)\n if not match:\n return None\n return cls.parse_version_string(output)", "def version():\n\n version = None\n output = gitopen(['--version'])\n m = re.search(br\" version ([\\d\\.A-Za-z]+)\", output)\n if m is not None:\n version = m.group(1).decode('utf-8')\n return version", "def find_version():\n regex = r\"^ATRAM_VERSION = ['\\\"]v?([^'\\\"]*)['\\\"]\"\n with open(\"./lib/db.py\", 'r') as f:\n match = re.search(regex, f.read(), re.M)\n if match:\n return match.group(1)\n\n raise RuntimeError(\"Unable to find version string.\")", "def get_setup_version():\n if os.path.isdir(\".git\"):\n process = subprocess.Popen(COMMAND_DESCRIBE_VERSION, **SUBPROCESS_KWARGS)\n process.wait()\n version = process.communicate()[0].decode(\"utf-8\").strip()\n return re.match(re_version, version).group(1)\n else:\n return \"0.1\"", "def find_version():\n version_file = read_file('__init__.py')\n version_match = re.search(r'^__version__ = [\"\\']([^\"\\']*)[\"\\']',\n version_file, re.M)\n if version_match:\n return version_match.group(1)\n raise RuntimeError('Unable to find version string.')", "def version(self, *args, **kwargs):\n\n stdout, stderr = self.ctx.execute((self.exe, '--version'), quieter=1)\n\n m = re.match(\n r'(?:Apple clang .* \\(based on LLVM (\\S+)\\))'\n r'|'\n r'(?:clang version (\\S+))', stdout.decode())\n if m:\n if m.group(1):\n return m.group(1)\n else:\n return m.group(2)\n\n return None", "def fiwalk_installed_version(fiwalk='fiwalk'):\n from subprocess import Popen,PIPE\n import re\n for line in Popen([fiwalk,'-V'],stdout=PIPE).stdout.read().split(\"\\n\"):\n g = re.search(\"^FIWalk Version:\\s+(.*)$\",line)\n if g:\n return g.group(1)\n return None", "def get_subproc_version(script_path):\n\n command = \"{} version_only\".format(script_path)\n try:\n result = subprocess.check_output(command, stderr=subprocess.STDOUT, shell=True)\n except subprocess.CalledProcessError as e:\n logger.debug(\n \"COMMAND[{}]: {} -- {}\".format(\n e.returncode, command, e.output.decode(\"utf-8\", \"ignore\").strip()\n )\n )\n return \"version not available\"\n\n # success:\n version = result.decode(\"utf-8\", \"ignore\").strip()\n logger.debug(\"COMMAND[0]: ({}) -- result({})\".format(command, version))\n return version", "def check_version (tool, command, option):\n response = subprocess.Popen([command, option], \n stdout=subprocess.PIPE, \n stderr=subprocess.STDOUT,\n universal_newlines=True) # Needed to strip off extra pre and post formatting characters\n stdout, stderr = response.communicate()\n response_status = response.wait()\n\n print('Currently installed', tool, 'version: ', stdout)\n return", "def get_version():\n\n with open('u2fval/__init__.py', 'r') as f:\n match = VERSION_PATTERN.search(f.read())\n return match.group(1)", "def __queryLatest(versionsPath, versionPattern):\n version = 0\n patternParts = __splitVersionPattern(versionPattern)\n versionRegEx = \"^\"+patternParts['prefix']+\"[0-9]{\"+str(len(patternParts['padding']))+\",}\"+patternParts['suffix']+\"$\"\n\n # finding the latest version\n if os.path.exists(versionsPath):\n for directory in os.listdir(versionsPath):\n if re.match(versionRegEx, directory):\n version = max(\n int(verNumber(directory, versionPattern)),\n version\n )\n return version", "def get_Version(param):\n\n line = subprocess.check_output(['grep', 'Processed with ASKAPsoft', param])\n str_line = line.decode('utf-8')\n\n askapsoft = re.findall('ASKAPsoft\\ version\\ [0-9].+', str_line)[0].split()[-1]\n\n return askapsoft", "def get_version():\n version_file = Path(__file__).resolve().parent / \"clinker\" / \"__init__.py\"\n version_match = re.search(\n r\"^__version__ = ['\\\"]([^'\\\"]*)['\\\"]\", version_file.read_text(), re.M\n )\n if version_match:\n return version_match.group(1)\n raise RuntimeError(\"Failed to find version string\")", "def get_version(program: str) -> str:\n import subprocess\n cmd = \"dpkg -l | grep '{}'\".format(program)\n process = subprocess.Popen([cmd], shell=True, stdout=subprocess.PIPE,\n stdin=subprocess.PIPE)\n (out, _) = process.communicate()\n result = out.decode()\n version = result.split()\n\n if len(version) >= 3:\n if version[1] == program:\n return version[2]\n return \"Cannot find version for '{}'\".format(program)", "def version(path):\n here = os.path.abspath(os.path.dirname(__file__))\n with open(os.path.join(here, path), encoding='utf-8') as f:\n version_file = f.read()\n version_match = re.search(r\"\"\"^__version__ = ['\"]([^'\"]*)['\"]\"\"\",\n version_file, re.M)\n if version_match:\n return version_match.group(1)\n raise RuntimeError(\"Unable to find version string.\")", "def get_external_version(self, path=None):\n exe = self.command.split()[0] if path is None else path\n try:\n p = subprocess.Popen(exe + ' --version', shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n #p.wait() #block the rest\n stdo, stde = p.communicate()\n stdr = p.returncode\n if stdr > 0:\n raise RuntimeError(\"Could not check version of \" + exe + \" - Please check your installation and FRED2 \"\n \"wrapper implementation.\")\n except Exception as e:\n raise RuntimeError(e)\n return str(stdo).strip()", "def test_version(self):\n v = version('/no/such/executable')\n self.assertEqual(v, '0.0.1.dev0')\n v = version('false')\n self.assertEqual(v, '0.0.1.dev0')\n v = version('echo')\n self.assertEqual(v, 'describe .devrev-list --count HEAD')", "def get_version(self):\n\t\ttry:\n\t\t\tp = subprocess.Popen([self.sexpath], stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n\t\texcept:\n\t\t\traise RuntimeError(\"Could not run SExtractor. Is the path '%s' correct ? If not, specify sexpath='/path/to/sextractor'\" % self.sexpath)\n\t\tout, err = p.communicate()\n\t\tversion_match = re.search(\"[Vv]ersion ([0-9\\.])+\", err.decode(encoding='UTF-8'))\n\t\tif version_match is False:\n\t\t\traise RuntimeError(\"Could not determine SExctractor version, check the output of running '%s'\" % (self.sexpath))\n\t\tversion = str(version_match.group()[8:])\n\t\tassert len(version) != 0\n\t\treturn version", "def execute_version_command() -> MarkdownString:\n return MarkdownString(f\"{Constants.MAGIC_PACKAGE_NAME} version: {VERSION}\", title=\"version\")", "def get_matches_commandline(self, match_pattern):\n\n matches = []\n for _process in self.processes:\n if re.search(match_pattern, _process[\"cmd\"]):\n matches.append(_process[\"pid\"])\n return matches", "def get_version():\n\n with open('yubico/yubico_version.py', 'r') as f:\n match = VERSION_PATTERN.search(f.read())\n return match.group(1)", "def get_version(package):\n print(os.path.join(package, '__init__.py'))\n init_py = open(os.path.join(package, '__init__.py')).read()\n return re.match(\"__version__ = ['\\\"]([^'\\\"]+)['\\\"]\", init_py).group(1)", "def verNumber(version, versionPattern=''):\n if not versionPattern:\n versionPattern = os.environ.get('KOMBI_VERSION_PATTERN', DEFAULT_VERSION_PATTERN)\n\n patternParts = __splitVersionPattern(versionPattern)\n return str(version)[len(patternParts['prefix']): len(patternParts['prefix']) + len(patternParts['padding'])]", "def tesseract_version():\n result = None\n\n try:\n command = _config.command\n\n if \" \" in command:\n command = _escape_path(command)\n\n command += \" --version\"\n status, output, err_string = _proc_exec_wait(command, True)\n\n if status == 0:\n result = LooseVersion(output.split()[1].lstrip(string.printable[10:]))\n except Exception as e:\n _warn(\n \"tesseract_version: Unable to retrieve Tesseract version. \"\n \"Error: {0}\".format(e)\n )\n\n return result", "def __get_version_seq_typing():\n\n try:\n cli = [\"seq_typing.py\", \"--version\"]\n p = subprocess.Popen(cli, stdout=PIPE, stderr=PIPE)\n stdout = p.communicate()[0]\n\n version = stdout.splitlines()[0].split()[-1].decode(\"utf8\")\n except Exception as e:\n logger.debug(e)\n version = \"undefined\"\n\n return version", "def get_version(package):\n with open(os.path.join(package, '__init__.py'), 'rb') as init_py:\n src = init_py.read().decode('utf-8')\n return re.search(\"__version__ = ['\\\"]([^'\\\"]+)['\\\"]\", src).group(1)", "def version_number(path: str) -> str:\n exp = r'__version__[ ]*=[ ]*[\"|\\']([\\d]+\\.[\\d]+\\.[\\d]+[\\.dev[\\d]*]?)[\"|\\']'\n version_re = re.compile(exp)\n\n with open(path, 'r') as fqe_version:\n version = version_re.search(fqe_version.read()).group(1)\n\n return version" ]
[ "0.6729657", "0.64490074", "0.61166817", "0.58071357", "0.5795342", "0.5723984", "0.5637662", "0.5618396", "0.5613655", "0.55541384", "0.55272096", "0.54937875", "0.5464113", "0.5462051", "0.53953546", "0.5378721", "0.5374339", "0.5371101", "0.53526723", "0.5351424", "0.5349801", "0.53426975", "0.53360957", "0.53346103", "0.5294396", "0.52828896", "0.52817756", "0.52720547", "0.5269326", "0.5238573" ]
0.842616
0
Returns a tuple providing the versions of gcc, ld and dllwrap For each command, if a command is not found, None is returned. Otherwise a string with the version is returned.
def get_compiler_versions(): gcc = _find_exe_version('gcc -dumpversion') ld = _find_ld_version() dllwrap = _find_exe_version('dllwrap --version') return gcc, ld, dllwrap
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_gcc_ver(exe=\"gcc\"):\n cmd = [exe, '-v']\n major = -1\n minor = -1\n patch = -1\n raw = sub.check_output(cmd, stderr=sub.STDOUT).decode('ascii').lower().split('\\n')\n for line in raw:\n if line.startswith('gcc version'):\n tokens = line.split()\n # we obtain a version string such as \"5.4.0\"\n verstr = tokens[2].strip()\n vertup = verstr.split('.')\n major = int(vertup[0])\n minor = int(vertup[1])\n patch = int(vertup[2])\n ver = major, minor, patch\n return ver", "def version() -> typing.Tuple[int, ...]:\n cmd = [DOT_BINARY, '-V']\n log.debug('run %r', cmd)\n proc = run_check(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, encoding='ascii')\n\n ma = re.search(r'graphviz version'\n r' '\n r'(\\d+)\\.(\\d+)'\n r'(?:\\.(\\d+)'\n r'(?:'\n r'~dev\\.\\d{8}\\.\\d{4}'\n r'|'\n r'\\.(\\d+)'\n r')?'\n r')?'\n r' ', proc.stdout)\n if ma is None:\n raise RuntimeError(f'cannot parse {cmd!r} output: {proc.stdout!r}')\n\n return tuple(int(d) for d in ma.groups() if d is not None)", "def libc_ver(executable=None, lib='', version='', chunksize=16384):\n if not executable:\n try:\n ver = os.confstr('CS_GNU_LIBC_VERSION')\n # parse 'glibc 2.28' as ('glibc', '2.28')\n parts = ver.split(maxsplit=1)\n if len(parts) == 2:\n return tuple(parts)\n except (AttributeError, ValueError, OSError):\n # os.confstr() or CS_GNU_LIBC_VERSION value not available\n pass\n\n executable = sys.executable\n\n if not executable:\n # sys.executable is not set.\n return lib, version\n\n V = _comparable_version\n # We use os.path.realpath()\n # here to work around problems with Cygwin not being\n # able to open symlinks for reading\n executable = os.path.realpath(executable)\n with open(executable, 'rb') as f:\n binary = f.read(chunksize)\n pos = 0\n while pos < len(binary):\n if b'libc' in binary or b'GLIBC' in binary:\n m = _libc_search.search(binary, pos)\n else:\n m = None\n if not m or m.end() == len(binary):\n chunk = f.read(chunksize)\n if chunk:\n binary = binary[max(pos, len(binary) - 1000):] + chunk\n pos = 0\n continue\n if not m:\n break\n libcinit, glibc, glibcversion, so, threads, soversion = [\n s.decode('latin1') if s is not None else s\n for s in m.groups()]\n if libcinit and not lib:\n lib = 'libc'\n elif glibc:\n if lib != 'glibc':\n lib = 'glibc'\n version = glibcversion\n elif V(glibcversion) > V(version):\n version = glibcversion\n elif so:\n if lib != 'glibc':\n lib = 'libc'\n if soversion and (not version or V(soversion) > V(version)):\n version = soversion\n if threads and version[-len(threads):] != threads:\n version = version + threads\n pos = m.end()\n return lib, version", "def version():\n import inspect\n import shlex\n import subprocess\n\n def output(command):\n path = os.path.realpath(os.path.dirname(inspect.stack(0)[0][1]))\n return subprocess.check_output(shlex.split(command), cwd=path).strip()\n\n return (\n output(\"git rev-parse --show-toplevel\"),\n output(\"git remote get-url origin\"),\n output(\"git describe --always\"),\n )", "def fullversion():\n cmd = \"{} -V\".format(_detect_os())\n ret = {}\n ret[\"compiled_with\"] = []\n out = __salt__[\"cmd.run\"](cmd).splitlines()\n # Example\n # -D APR_HAS_MMAP\n define_re = re.compile(r\"^\\s+-D\\s+\")\n for line in out:\n if \": \" in line:\n comps = line.split(\": \")\n if not comps:\n continue\n ret[comps[0].strip().lower().replace(\" \", \"_\")] = comps[1].strip()\n elif \" -D\" in line:\n cwith = define_re.sub(\"\", line)\n ret[\"compiled_with\"].append(cwith)\n return ret", "def _syscmd_ver(system='', release='', version='',\n\n supported_platforms=('win32', 'win16', 'dos')):\n if sys.platform not in supported_platforms:\n return system, release, version\n\n # Try some common cmd strings\n import subprocess\n for cmd in ('ver', 'command /c ver', 'cmd /c ver'):\n try:\n info = subprocess.check_output(cmd,\n stdin=subprocess.DEVNULL,\n stderr=subprocess.DEVNULL,\n text=True,\n encoding=\"locale\",\n shell=True)\n except (OSError, subprocess.CalledProcessError) as why:\n #print('Command %s failed: %s' % (cmd, why))\n continue\n else:\n break\n else:\n return system, release, version\n\n # Parse the output\n info = info.strip()\n m = _ver_output.match(info)\n if m is not None:\n system, release, version = m.groups()\n # Strip trailing dots from version and release\n if release[-1] == '.':\n release = release[:-1]\n if version[-1] == '.':\n version = version[:-1]\n # Normalize the version and build strings (eliminating additional\n # zeros)\n version = _norm_version(version)\n return system, release, version", "def _get_version():\n try:\n code, output = _run_cmd('git', 'describe', '--tags')\n if code:\n return 'unknown'\n output = output.decode('utf8').strip().split('-')\n if len(output) != 3:\n return 'unknown'\n version = '%s+%s' % (output[0], output[2])\n\n code, _ = _run_cmd('git', 'diff', '--quiet')\n if code:\n version += '+dirty'\n\n return version\n except OSError:\n return 'unknown'", "def get_version():\n try:\n return check_output(\n \"git describe --tags\".split(\" \")\n ).decode('utf-8').strip()\n except CalledProcessError:\n return check_output(\n \"git rev-parse --short HEAD\".split(\" \")\n ).decode('utf-8').strip()", "def gcc_version(gcc):\n\tversion = \"\"\n\ttry:\n\t\tversion = os.popen(\"%s --version\" % gcc).readline().split()[-1]\n\texcept:\n\t\tpass\n\treturn version", "def get_version():\n major=c_int_t(0)\n minor=c_int_t(0)\n patch=c_int_t(0)\n safe_call(backend.get().af_get_version(c_pointer(major), c_pointer(minor), c_pointer(patch)))\n return major.value,minor.value,patch.value", "def version(self, *args, **kwargs):\n\n stdout, stderr = self.ctx.execute((self.exe, '--version'), quieter=1)\n\n m = re.match(\n r'(?:Apple clang .* \\(based on LLVM (\\S+)\\))'\n r'|'\n r'(?:clang version (\\S+))', stdout.decode())\n if m:\n if m.group(1):\n return m.group(1)\n else:\n return m.group(2)\n\n return None", "def get_version():\n import subprocess\n from tempfile import NamedTemporaryFile\n proc = subprocess.Popen(lm_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n try:\n out, stderr = proc.communicate(timeout=15)\n except TimeoutError:\n proc.kill()\n outs, stderr = proc.communicate()\n raise RuntimeError(\"command timed out\")\n else:\n m = ver_regex.search(stderr.split(b'\\n')[0])\n if m is None:\n log.debug(\"unable to parse version from output: %s\", stderr)\n raise RuntimeError(\"unable to determine version from command output\")\n else:\n return m.group(1).decode('ascii')", "def detect_version_str(self):\n c3d_bin_path = op.dirname(self.locate_command())\n if platform.system() == 'Linux':\n libname = os.listdir(op.join(c3d_bin_path, '..', 'lib'))[0]\n version_str = libname.split('-')[-1]\n elif platform.system() == 'Darwin':\n info_list_path = op.join(c3d_bin_path, '..', 'Info.plist')\n info_etree = xml.etree.ElementTree.parse(info_list_path)\n elem_bodies = [e.text for e in info_etree.iter()]\n version_str = elem_bodies[\n elem_bodies.index('CFBundleShortVersionString') + 1]\n else:\n raise ArcanaVersionNotDetectableError(\n \"Can't detect c3d version on Windows\")\n return version_str", "def version():\n cmd = \"{} -v\".format(_detect_os())\n out = __salt__[\"cmd.run\"](cmd).splitlines()\n ret = out[0].split(\": \")\n return ret[1]", "def version():\n\n version = None\n output = gitopen(['--version'])\n m = re.search(br\" version ([\\d\\.A-Za-z]+)\", output)\n if m is not None:\n version = m.group(1).decode('utf-8')\n return version", "def _find_ld_version():\n if sys.platform == 'darwin':\n return _find_exe_version('ld -v', _MAC_OS_X_LD_VERSION)\n else:\n return _find_exe_version('ld -v')", "def find_version(e):\n args = e.split()\n args += ['-shared', '-Wl,-t']\n p = subprocess.Popen(args, stderr=subprocess.STDOUT, stdout=subprocess.PIPE)\n candidates = [x for x in p.stdout if 'libstdc++.so' in x]\n if not candidates:\n return ''\n assert len(candidates) == 1\n libstdcxx = parse_ld_line(candidates[-1])\n\n p = subprocess.Popen(['readelf', '-V', libstdcxx], stdout=subprocess.PIPE)\n versions = [parse_readelf_line(x)\n for x in p.stdout.readlines() if 'Name: GLIBCXX' in x]\n last_version = sorted(versions, cmp = cmp_ver)[-1]\n return encode_ver(last_version)", "def get_version_info() -> Tuple[Text, Text]:", "def getLibVersion():\n return \"Software Development Library for Linux 1.999.1\"", "def software_versions():\n\n quiet = 1\n versions = collections.OrderedDict()\n for package in ['python', 'python3', 'robot', 'firefox', 'google-chrome']:\n # Note: \"robot --version\" returns 0x00000000000000fb.\n # Note: If package does not exist, 0x7f is returned.\n rc, version = gc.shell_cmd(package + \" --version\",\n valid_rcs=[0, 0x7f, 0xfb])\n versions[package] = \"Not installed\" if rc == 0x7f else version.rstrip('\\n')\n\n versions.update(import_versions)\n\n for package in ['robotframework-angularjs', 'robotframework-scplibrary',\n 'robotframework-extendedselenium2library']:\n rc, version = gc.shell_cmd(\"pip3 show \" + package\n + \" | grep Version | sed -re 's/.*: //g'\")\n versions[package] = \"Not installed\" if not version else version.rstrip('\\n')\n\n rc, version = gc.shell_cmd(\"lsb_release -d -s\")\n versions[\"host OS\"] = \"Failed\" if not version else version.rstrip('\\n')\n return versions", "def __get_version_seq_typing():\n\n try:\n cli = [\"seq_typing.py\", \"--version\"]\n p = subprocess.Popen(cli, stdout=PIPE, stderr=PIPE)\n stdout = p.communicate()[0]\n\n version = stdout.splitlines()[0].split()[-1].decode(\"utf8\")\n except Exception as e:\n logger.debug(e)\n version = \"undefined\"\n\n return version", "def get_setup_version():\n if os.path.isdir(\".git\"):\n process = subprocess.Popen(COMMAND_DESCRIBE_VERSION, **SUBPROCESS_KWARGS)\n process.wait()\n version = process.communicate()[0].decode(\"utf-8\").strip()\n return re.match(re_version, version).group(1)\n else:\n return \"0.1\"", "def get_version():\n vers = [\"%(major)i.%(minor)i\" % __version_info__, ]\n\n if __version_info__['micro']:\n vers.append(\".%(micro)i\" % __version_info__)\n if __version_info__['releaselevel'] != 'final':\n vers.append('%(releaselevel)s' % __version_info__)\n return ''.join(vers)", "def test_other_versions_subprocess():\n\n def _check_output(interpreter):\n process = subprocess.Popen(\n [interpreter, \"--version\"], stdout=subprocess.PIPE, stderr=subprocess.PIPE\n )\n stdout, stderr = process.communicate()\n version = stdout.decode() + stderr.decode()\n return version.rstrip()\n\n assert _check_output(\"python3.5\") == \"Python 3.5.7\"\n assert _check_output(\"python3.6\") == \"Python 3.6.8\"\n assert _check_output(\"python3.7\") == \"Python 3.7.3\"\n assert _check_output(\"python2.7\") == \"Python 2.7.16\"\n\n assert _check_output(\"python\") == \"Python {0}.{1}.{2}\".format(*sys.version_info[:3])\n assert _check_output(\"python2\") == \"Python 2.7.16\"\n assert (\n _check_output(\"python3\") == \"Python {0}.{1}.{2}\".format(*sys.version_info[:3])\n if sys.version_info[0] == 3\n else \"Python 3.7.3\"\n )", "def get_version():\n version_file = Path(__file__).resolve().parent / \"clinker\" / \"__init__.py\"\n version_match = re.search(\n r\"^__version__ = ['\\\"]([^'\\\"]*)['\\\"]\", version_file.read_text(), re.M\n )\n if version_match:\n return version_match.group(1)\n raise RuntimeError(\"Failed to find version string\")", "def tesseract_version():\n result = None\n\n try:\n command = _config.command\n\n if \" \" in command:\n command = _escape_path(command)\n\n command += \" --version\"\n status, output, err_string = _proc_exec_wait(command, True)\n\n if status == 0:\n result = LooseVersion(output.split()[1].lstrip(string.printable[10:]))\n except Exception as e:\n _warn(\n \"tesseract_version: Unable to retrieve Tesseract version. \"\n \"Error: {0}\".format(e)\n )\n\n return result", "def get_external_version(self, path=None):\n exe = self.command.split()[0] if path is None else path\n try:\n p = subprocess.Popen(exe + ' --version', shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n #p.wait() #block the rest\n stdo, stde = p.communicate()\n stdr = p.returncode\n if stdr > 0:\n raise RuntimeError(\"Could not check version of \" + exe + \" - Please check your installation and FRED2 \"\n \"wrapper implementation.\")\n except Exception as e:\n raise RuntimeError(e)\n return str(stdo).strip()", "def installedVersion():\n\n cmd = f'{dcm2niix()} -h'\n versionPattern = re.compile(r'v'\n r'(?P<major>[0-9]+)\\.'\n r'(?P<minor>[0-9]+)\\.'\n r'(?P<year>[0-9]{4})'\n r'(?P<month>[0-9]{2})'\n r'(?P<day>[0-9]{2})')\n\n try:\n output = sp.check_output(cmd.split()).decode()\n output = [l for l in output.split('\\n') if 'version' in l.lower()]\n output = '\\n'.join(output).split()\n\n for word in output:\n\n match = re.match(versionPattern, word)\n\n if match is not None:\n return (int(match.group('major')),\n int(match.group('minor')),\n int(match.group('year')),\n int(match.group('month')),\n int(match.group('day')))\n\n except Exception as e:\n log.debug(f'Error parsing dcm2niix version string: {e}')\n return None", "def version_info():\r\n return tuple(map(int, __version__.split('.')))", "def version():\n return Tns.exec_command(command='--version')" ]
[ "0.6619813", "0.6580038", "0.64875406", "0.6457868", "0.64217746", "0.63849837", "0.6326838", "0.6278378", "0.624311", "0.61986893", "0.6176207", "0.6146404", "0.60950166", "0.60890126", "0.60757", "0.6046186", "0.60224766", "0.601256", "0.59944826", "0.599008", "0.59763354", "0.597514", "0.59433854", "0.5900393", "0.589668", "0.5882209", "0.5869961", "0.58397007", "0.58120376", "0.5809323" ]
0.7286158
0
Return true if 'target' is outofdate with respect to any file listed in 'sources'. In other words, if 'target' exists and is newer than every file in 'sources', return false; otherwise return true. 'missing' controls what we do when a source file is missing; the default ("error") is to blow up with an OSError from inside 'stat()'; if it is "ignore", we silently drop any missing source files; if it is "newer", any missing source files make us assume that 'target' is
def newer_group(sources, target, missing='error'): # If the target doesn't even exist, then it's definitely out-of-date. if not os.path.exists(target): return True # Otherwise we have to find out the hard way: if *any* source file # is more recent than 'target', then 'target' is out-of-date and # we can immediately return true. If we fall through to the end # of the loop, then 'target' is up-to-date and we return false. target_mtime = os.stat(target).st_mtime for source in sources: if not os.path.exists(source): if missing == 'error': # blow up when we stat() the file pass elif missing == 'ignore': # missing source dropped from continue # target's dependency list elif missing == 'newer': # missing source means target is return True # out-of-date if os.stat(source).st_mtime > target_mtime: return True return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def newer(source, target):\n if not os.path.exists(source):\n raise DistutilsFileError(\"file '%s' does not exist\" %\n os.path.abspath(source))\n if not os.path.exists(target):\n return True\n\n return os.stat(source).st_mtime > os.stat(target).st_mtime", "def needs_rebuild(source, target):\n return not os.path.isfile(target) or (\n os.path.getmtime(source) > os.path.getmtime(target))", "def missingOrStale(target, reference=None):\n if not os.path.isfile(target):\n return True\n if reference:\n return os.path.getmtime(target) < os.path.getmtime(reference)\n else:\n return False", "def newer (source, target):\r\n\r\n if not os.path.exists (target):\r\n return 1\r\n\r\n from stat import ST_MTIME\r\n mtime1 = os.stat(source)[ST_MTIME]\r\n mtime2 = os.stat(target)[ST_MTIME]\r\n\r\n return mtime1 > mtime2", "def has_invalid_targets(self):\n return len(self._combined_invalid_versioned_targets.targets) > 0", "def out_of_date(original, derived):\r\n return (not os.path.exists(derived) or\r\n (os.path.exists(original) and\r\n os.stat(derived).st_mtime < os.stat(original).st_mtime))", "def exists_and_newer(targetfile, topicfile):\n try:\n if getmtime(targetfile) >= getmtime(topicfile):\n return True\n else:\n return False\n except IOError:\n return False", "def _assets_are_stale(self, sourcedirectory, cachedirectory):\n comparison = filecmp.dircmp(sourcedirectory, cachedirectory, [], [])\n if comparison.left_only or comparison.right_only:\n # We have files in one directory and not the other\n return True\n if comparison.diff_files:\n # Some of the files have changed\n return True\n\n return False", "def _validate_sources(generated_sources, original_sources):\n\n generated_sources = list(set(generated_sources))\n original_sources = list(set(original_sources))\n not_existent_source = []\n for source in original_sources:\n if source not in generated_sources:\n not_existent_source.append(source)\n\n if not_existent_source:\n print('WARN: Some sources did exist in generated file')\n print(not_existent_source)\n return False\n\n return True", "def is_outdated(self):\n\n if not self.is_done:\n return False\n elif not (self.input_files and self.output_files):\n return False\n\n return fileutils.modified_after(self.input_files, self.output_files)", "def ShouldBuild(self, src_files, dst_files):\n if self.force:\n return True\n\n oldest = None\n for dst in dst_files:\n if not os.path.exists(dst):\n self.DebugMsg(\"Build because %s does not exist\" % dst)\n return True\n modified = os.path.getmtime(dst)\n if oldest == None or modified < oldest:\n old = dst\n oldest = modified\n\n for src in src_files:\n modified = os.path.getmtime(src)\n if modified > oldest:\n self.DebugMsg(\"Build because %s is newer than %s\" % (src, old))\n return True\n\n self.DebugMsg(\"%s are up to date\" % \", \".join(dst_files))\n return False", "def isUpToDate(inFile: str, outFile: str) -> bool:\n if not os.path.exists(inFile):\n return False\n if os.path.exists(outFile):\n if os.path.getmtime(outFile) >= os.path.getmtime(inFile):\n return True\n return False", "def _source_filename_field_is_not_equal_target(self):\n if self.source == self.target:\n # print(f\"{self}\")\n raise SourceEqualsTargetError(\"source must not equal target.\")\n return True", "def existing_and_newer(fn0, fn):\n\n if not os.path.isfile(fn0):\n error(\"Dependency '{}' does not exist\".format(fn0))\n\n if not os.path.isfile(fn):\n return False\n\n if os.path.getmtime(fn0) <= os.path.getmtime(fn):\n return True\n else:\n return False", "def source_changed(source, cache):\n return os.path.getmtime(source)>os.path.getmtime(cache)", "def CheckFilesMatch(config):\n\n diff_errors = []\n\n file_pairs = _GetFilePairs(config)\n missing_files, stale_files = _GetMissingAndStaleFiles(file_pairs)\n\n for pair in missing_files:\n diff_errors.append(\"File %s does not exist\" % pair.target)\n continue\n\n for pair in stale_files:\n diff_errors.append(\"File %s is out of date\" % pair.target)\n\n if diff_errors:\n error_msg = \"Files out of date!\\n\\n\"\n error_msg += \"To fix run THIS command:\\n\"\n error_msg += \" bazel-bin/%s/%s --fix\\n\\n\" % (config.package_name,\n config.target_name)\n error_msg += \"Errors:\\n\"\n error_msg += \" \" + \"\\n \".join(diff_errors)\n return error_msg\n else:\n return None", "def has_source_file( self ):\n return self._source_file is not None", "def are_package_sources_available(self) -> bool:\n ok = True\n for name, path in self.update_sources(self.stub_sources):\n if (CONFIG.stub_path / path).exists():\n continue\n if name == StubSource.FROZEN:\n # not a blocking issue if there are no frozen stubs, perhaps this port/board does not have any\n continue\n # todo: below is a workaround for different types, but where is the source of this difference coming from?\n msg = (\n f\"{self.package_name}: source '{name.value}' not found: {CONFIG.stub_path / path}\"\n if isinstance(name, StubSource) # type: ignore\n else f\"{self.package_name}: source '{name}' not found: {CONFIG.stub_path / path}\"\n )\n self.status[\"error\"] = msg\n log.debug(msg)\n ok = False\n return ok", "def __checkDestination(self):\n return os.path.exists(self.__targetPath)", "def check_files(self) -> None:\n notfound = False\n give_neuro_data_hint = False\n fullpaths = [f for f, _ in self.input_sources]\n if self.target_sources is not None:\n fullpaths.extend([f for f, _ in self.target_sources])\n for p in fullpaths:\n if not os.path.exists(p):\n print('{} not found.'.format(p))\n notfound = True\n if 'neuro_data_cdhw' in p:\n give_neuro_data_hint = True\n if give_neuro_data_hint:\n print('\\nIt looks like you are referencing the neuro_data_cdhw dataset.\\n'\n 'To install the neuro_data_xzy dataset to the default location, run:\\n'\n ' $ wget https://github.com/ELEKTRONN/elektronn.github.io/releases/download/neuro_data_cdhw/neuro_data_cdhw.zip\\n'\n ' $ unzip neuro_data_cdhw.zip -d ~/neuro_data_cdhw')\n if notfound:\n print('\\nPlease fetch the necessary dataset and/or '\n 'change the relevant file paths in the network config.')\n sys.stdout.flush()\n sys.exit(1)", "def updated_targets(self, targets, destination_directory):\n\n # Do the arguments have the correct format?\n # Raise 'tuf.FormatError' if there is a mismatch.\n tuf.formats.TARGETFILES_SCHEMA.check_match(targets)\n tuf.formats.PATH_SCHEMA.check_match(destination_directory)\n\n updated_targets = []\n\n for target in targets:\n # Get the target's filepath located in 'destination_directory'.\n # We will compare targets against this file.\n target_filepath = os.path.join(destination_directory, target['filepath'])\n \n # Try one of the algorithm/digest combos for a mismatch. We break\n # as soon as we find a mismatch.\n for algorithm, digest in target['fileinfo']['hashes'].items():\n digest_object = None\n try:\n digest_object = tuf.hash.digest_filename(target_filepath,\n algorithm=algorithm)\n # This exception would occur if the target does not exist locally. \n except IOError:\n updated_targets.append(target)\n break\n # The file does exist locally, check if its hash differs. \n if digest_object.hexdigest() != digest:\n updated_targets.append(target)\n break\n \n return updated_targets", "def has_sources(self, extension=None):\r\n return (self.has_label('sources') and\r\n (not extension or\r\n (hasattr(self, 'sources') and\r\n any(source.endswith(extension) for source in self.sources))))", "def check_sources(options, reporter=None):\n if reporter is None:\n reporter = Reporter(Reporter.CONSOLE)\n reporter.call_count = 0\n\n if options.diff_branch:\n # We ignore the passed sources, and get the files from the VCS.\n sources = []\n for change in _git_diff_files(ref=options.diff_branch):\n # Filter deleted changes since we can not lint then.\n if change[0] == \"d\":\n continue\n sources.append(change[1])\n else:\n # We don't have explicit sources, so we use the one from the\n # configuration\n sources = options.scope[\"include\"]\n\n regex_exclude = [re.compile(expression) for expression in options.scope[\"exclude\"]]\n\n def is_excepted_file(file_name):\n for expresion in regex_exclude:\n if expresion.match(file_name):\n return True\n\n if options.scope[\"include\"]:\n included = False\n for include in options.scope[\"include\"]:\n if file_name.startswith(include):\n included = True\n break\n if not included:\n return True\n\n return False\n\n count = 0\n for source in sources:\n file_path = os.path.normpath(source)\n\n if os.path.isdir(source):\n paths = _get_all_files(file_path)\n else:\n paths = [file_path]\n\n for file_path in paths:\n if is_excepted_file(file_path):\n continue\n\n if not Language.is_editable(file_path):\n continue\n\n language = Language.get_language(file_path)\n with open(file_path, \"rt\") as file_:\n text = file_.read()\n\n count += 1\n if options.progress:\n sys.stdout.write(\".\")\n if count % 72 == 0:\n sys.stdout.write(\"\\n\")\n if count % 5 == 0:\n sys.stdout.flush()\n\n checker = UniversalChecker(\n file_path, text, language, reporter, options=options\n )\n checker.check()\n\n sys.stdout.flush()\n return reporter.call_count", "def has_any(self, sources):\n # By default pretend to have all sources,\n # an exception is thrown if the source is not available\n return True", "def needs_update(self, *path):\n dt_fmt = \"%Y-%m-%d %H:%M:%S\"\n try:\n linfo = self.info(*path)\n dt_local = datetime.datetime.strptime(\n linfo[\"datetime\"][:19], dt_fmt)\n dt_server = datetime.datetime.strptime(\n self.serverfiles.info(*path)[\"datetime\"][:19], dt_fmt)\n return dt_server > dt_local\n except FileNotFoundError:\n return True\n except KeyError:\n return True", "def output_out_of_date(self):\n if not os.path.exists(self.output_file):\n logging.info(\"will generate, missing binding output file\")\n return True\n output_mtime = os.path.getmtime(self.output_file)\n if self._any_files_newer(self.header_files, output_mtime):\n logging.info(\"will generate, header files newer\")\n return True\n if self._any_files_newer(self.interface_files, output_mtime):\n logging.info(\"will generate, interface files newer\")\n return True\n if self._file_newer(self.input_file, output_mtime):\n logging.info(\"will generate, swig input file newer\")\n return True\n if self._file_newer(self.extensions_file, output_mtime):\n logging.info(\"will generate, swig extensions file newer\")\n return True\n if self._file_newer(self.wrapper_file, output_mtime):\n logging.info(\"will generate, swig wrapper file newer\")\n return True\n if self._file_newer(self.typemaps_file, output_mtime):\n logging.info(\"will generate, swig typemaps file newer\")\n return True\n if self._file_newer(self.safecast_file, output_mtime):\n logging.info(\"will generate, swig safecast file newer\")\n return True\n\n # If we made it here, nothing is newer than the output file.\n # Thus, the output file is not out of date.\n return False", "def checkfile(filename, source=None):\n if source:\n # Let's check some sums\n if os.path.exists(filename) and os.path.exists(source):\n src_sha = calchash(source)\n dest_sha = calchash(filename)\n if DRYRUN:\n print(\"{src} hash {src_sha}. {dest} hash {dest_sha}\".format(src=source, dest=filename, src_sha=src_sha.hexdigest(), dest_sha=dest_sha.hexdigest()))\n return src_sha.digest() == dest_sha.digest()\n else:\n return os.path.exists(filename)", "def report_missing(missing):\n _report_files('invalid', missing)", "def has_source(self):\n return any(map(utils.assert_package_is_source, self.pkg_arguments))", "def invalid_versioned_targets(self):\n return self._invalid_versioned_targets" ]
[ "0.68539894", "0.6768285", "0.6579951", "0.6263332", "0.6170721", "0.6142411", "0.6040172", "0.60333484", "0.5950948", "0.5950353", "0.5943889", "0.5890367", "0.565034", "0.563318", "0.5581962", "0.5574001", "0.5522218", "0.54946536", "0.5422363", "0.5406527", "0.5402316", "0.53993917", "0.5371853", "0.5369009", "0.533165", "0.53297234", "0.5316884", "0.5266513", "0.52506256", "0.5242922" ]
0.7944473
0
Returns True if path is a package (a dir with an __init__ file.
def _is_package(path): if not os.path.isdir(path): return False return os.path.isfile(os.path.join(path, '__init__.py'))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def IsPackage(path):\n init_base_path = os.path.join(path, '__init__.py')\n return (os.path.isfile(init_base_path) or\n os.path.isfile(init_base_path + 'c') or\n os.path.isfile(init_base_path + 'o'))", "def _is_package(path):\n return (\n os.path.isdir(path)\n and os.path.exists(os.path.join(path, '__init__.py'))\n )", "def _is_package(path):\n return (\n os.path.isdir(path)\n and os.path.exists(os.path.join(path, '__init__.py'))\n )", "def is_package(path: str) -> bool:\n return os.path.isdir(path) and \"__init__.py\" in os.listdir(path)", "def is_pkg(cls, path):\n return exists(join(path, '__init__.py'))", "def is_package_dir(path: Path) -> bool:\n if not path.is_dir():\n return False\n if path.name.endswith(\".egg-info\"):\n return False\n if (path / \"__init__.pyi\").exists():\n return True\n return False", "def is_module_or_package(path):\r\n is_module = osp.isfile(path) and osp.splitext(path)[1] in ('.py', '.pyw')\r\n is_package = osp.isdir(path) and osp.isfile(osp.join(path, '__init__.py'))\r\n return is_module or is_package", "def is_package(self, fullname):\n return hasattr(self.__get_module(fullname), \"__path__\")", "def is_python_package(directory_path, file_path):\n # type: (str, str) -> Tuple[bool, Optional[str]]\n file_name = os.path.basename(file_path)\n init_file_path = os.path.join(file_path, \"__init__.py\")\n\n if os.path.isdir(file_path) and os.path.isfile(init_file_path):\n # Package\n return (True, file_name)\n\n return (False, None)", "def is_module(path):\n\n fname, ext = os.path.splitext(path)\n if ext == \".py\":\n return True\n elif os.path.exists(os.path.join(path, \"__init__.py\")):\n return True\n else:\n return False", "def is_package(self, fullmodname):\n submodname, is_package, relpath = self._get_info(fullmodname)\n return is_package", "def is_module(path: str) -> bool:\n return os.path.isfile(path) and path.endswith(\".py\")", "def is_pyi_directory_init(filename):\n if filename is None:\n return False\n return path_utils.splitext(path_utils.basename(filename))[0] == \"__init__\"", "def test_IsPackage_directory(tempdir: pathlib.Path):\n assert dpack._IsPackage(tempdir)", "def is_built_package(package):\n try:\n parent_folder = finder.get_package_root(package)\n except (AttributeError, TypeError):\n raise ValueError(\n 'Input \"{package}\" is not a valid Rez package.'.format(package=package)\n )\n\n version = str(package.version)\n\n if not version:\n return False\n\n return version == os.path.basename(parent_folder)", "def is_dir(self, path):", "def isdir (self, path):\r\n pass", "def check_cleaned(path):\n bool_1 = isdir(join(path, 'Main'))\n bool_2 = isdir(join(path, 'Finantial'))\n bool_3 = bool_1 and bool_2\n return bool_3", "def is_python_file(path):\n valid = False\n\n if os.path.isfile(path) and path.endswith('.py'):\n valid = True\n\n return valid", "def has_distribution(path):\n\n ws = pkg_resources.WorkingSet([path])\n return bool(ws.entry_keys[path])", "def has_package(self, doc):\n return doc.package is not None", "def has_python_package( # pylint: disable=too-many-branches,too-many-locals\n package, paths=None, allow_build=True, allow_current_context=False\n):\n from . import creator # Avoiding a cyclic import\n\n if not hasattr(package, \"name\") or not hasattr(package, \"version\"):\n raise ValueError(\n 'Object \"{package}\" is not a valid Rez package.'.format(package=package)\n )\n\n if not paths:\n paths = config.packages_path # pylint: disable=no-member\n\n version = \"\"\n is_built = is_built_package(package)\n\n if is_built:\n version = package.version\n\n if allow_current_context and in_valid_context(package):\n environment = os.environ.get(\"PYTHONPATH\", \"\").split(os.pathsep)\n else:\n context = resolved_context.ResolvedContext(\n [\"{package.name}=={version}\".format(package=package, version=version)],\n package_paths=[get_packages_path_from_package(package)] + paths,\n )\n\n environment = context.get_environ().get(\"PYTHONPATH\", \"\").split(os.pathsep)\n\n paths = get_package_python_paths(package, environment)\n\n # All zipped .egg files as valid Python \"packages\"\n for path in paths:\n if path.endswith(\".egg\") and os.path.isfile(path):\n return True\n\n for root_path in paths:\n for _, _, files in os.walk(root_path):\n for file_path in files:\n if file_path == \"__init__.py\":\n continue\n\n if file_path.endswith(\".py\"):\n return True\n\n if is_built or not allow_build:\n return False\n\n # If the package is a source package and PYTHONPATH is defined but\n # no path was found, it may actually be that the Python files are\n # generated on-build (such as C++ files with Python bindings). To\n # find out, we need to run this function again, but with the built\n # package.\n #\n build_directory = tempfile.mkdtemp(suffix=\"_some_temporary_rez_build_package\")\n build_package = creator.build(package, build_directory, quiet=True)\n\n # Reference: https://stackoverflow.com/questions/3850261/doing-something-before-program-exit\n atexit.register(functools.partial(shutil.rmtree, build_directory))\n\n return has_python_package(build_package)", "def _is_nested(pkg: str, pkg_path: str, parent: str, parent_path: str) -> bool:\n norm_pkg_path = _path.normpath(pkg_path)\n rest = pkg.replace(parent, \"\", 1).strip(\".\").split(\".\")\n return pkg.startswith(parent) and norm_pkg_path == _path.normpath(\n Path(parent_path, *rest)\n )", "def _package_available(package_name: str) -> bool:\n try:\n return find_spec(package_name) is not None\n except ModuleNotFoundError:\n return False", "def __is_dir(path):\n if path[-2:] == \"..\":\n return False\n try:\n os.listdir(path)\n return True\n except OSError:\n return False", "def is_package(cls, *args, **kwargs): # real signature unknown\n pass", "def is_package(cls, *args, **kwargs): # real signature unknown\n pass", "def is_package(cls, *args, **kwargs): # real signature unknown\n pass", "def is_dir(self, path: PathLike):", "def _is_dataset_path(ds_path: github_api.GithubPath) -> bool:\n return ds_path.is_dir() and (ds_path / f'{ds_path.name}.py').exists()" ]
[ "0.9028623", "0.8911071", "0.8911071", "0.8750731", "0.8576869", "0.81681454", "0.803935", "0.76286083", "0.7427226", "0.7406388", "0.6980071", "0.6966034", "0.6936042", "0.6757917", "0.67051244", "0.6700581", "0.6627893", "0.6593783", "0.6561629", "0.6427087", "0.6419333", "0.6399255", "0.6354986", "0.63332605", "0.6327496", "0.6215848", "0.6215848", "0.6215848", "0.62052965", "0.61824244" ]
0.89318365
1
Returns a dotted package name, given a subpath.
def _package_name(root_path, path): if not _under(path, root_path): raise ValueError('"%s" is not a subpath of "%s"' % (path, root_path)) return path[len(root_path) + 1:].replace(os.sep, '.')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _resolve_name(name, package, level):\n if not hasattr(package, 'rindex'):\n raise ValueError(\"'package' not set to a string\")\n dot = len(package)\n for x in xrange(level, 1, -1):\n try:\n dot = package.rindex('.', 0, dot)\n except ValueError:\n raise ValueError(\"attempted relative import beyond top-level package\")\n return \"%s.%s\" % (package[:dot], name)", "def _resolve_name(path, package, start):\n\n if not hasattr(package, 'rindex'):\n raise ValueError(\"'package' not set to a string\")\n dot = len(package)\n for _ in range(start, 1, -1):\n try:\n dot = package.rindex('.', 0, dot)\n except ValueError:\n raise ValueError(\"attempted relative import beyond top-level \"\n \"package\")\n return \"{}.{}\".format(package[:dot], path)", "def _resolve_name(name, package, level):\r\n if not hasattr(package, 'rindex'):\r\n raise ValueError(\"'package' not set to a string\")\r\n dot = len(package)\r\n for x in xrange(level, 1, -1):\r\n try:\r\n dot = package.rindex('.', 0, dot)\r\n except ValueError:\r\n raise ValueError(\"attempted relative import beyond top-level \"\r\n \"package\")\r\n return \"%s.%s\" % (package[:dot], name)", "def _package_root(name):\n return name.split('.', 1)[0]", "def package_to_path(package):\n return package.replace('.','/')", "def get_package_path(ontology, parent, package):\n result = get_ontology_name(ontology)\n result += '.v'\n result += get_ontology_version(ontology)\n result += '.'\n result += get_package_name(parent)\n result += '.'\n result += get_package_name(package)\n return result", "def get_package_path():\n package_name = get_package_name()\n return package_name.replace('.', '/')", "def dotted_path(cls):\n return f\"{cls.__module__}.{cls.__qualname__}\"", "def module_path(module: 'ModuleType | str', subpath: str) -> str:\n\n parent = module_path_root(module)\n path = os.path.join(parent, subpath.strip('/'))\n\n # always be paranoid with path manipulation\n assert is_subpath(parent, path)\n\n return path", "def dotted_name(s):\n forbidden = forbidden_chars.intersection(s)\n if forbidden:\n raise ValueError('%(s)s contains forbidden characters'\n ' (%(forbidden)s)'\n % locals())\n if not s:\n return ''\n elif s in reserved_names:\n raise ValueError('The name %(s)r is reserved!'\n % locals())\n # might result from tab completion:\n stripped = s.rstrip('/')\n if '/' in stripped:\n raise ValueError('dotted name %(stripped)r'\n ' must not contain slashes'\n % locals())\n chunks = stripped.split('.')\n if [chunk\n for chunk in chunks\n if not chunk\n ]:\n raise ValueError('badly dotted name: %(stripped)r'\n % locals())\n return stripped", "def get_package_name(cls) -> str:\n return '.'.join(cls.__module__.split('.')[:-1])", "def get_package_name(name):\n name = _strip_package_name(name)\n return name", "def dotted(dirpath, relative_to=\"\", root_module=\"\"):\n def path_components(dirpath):\n d, p = split(dirpath)\n if d == dirpath:\n return [d]\n else:\n return path_components(d) + [p]\n dirs = path_components(dirpath)\n relatives = path_components(relative_to)\n while relatives and dirs and dirs[0] == relatives[0]:\n dirs, relatives = dirs[1:], relatives[1:]\n if root_module:\n roots = root_module.split('.')\n while roots and dirs and dirs[0] == roots[0]:\n dirs, roots = dirs[1:], roots[1:]\n return '.'.join(dirs)", "def get_pkgdirimp_dotpathprefix_site(self):\n from string import join\n modpath = self.sitemodulename\n dirpath = join(modpath.split('.')[:-1], '.')\n return dirpath", "def name_from_path(path):\n return path[0:-3]", "def _get_module(dotted_path):\n package, module = dotted_path.rsplit('.', 1)\n return getattr(import_module(package), module)", "def _namespace_package_path(fqname, pathnames, path=None):\n working_set = pkg_resources.WorkingSet(path)\n\n path = list(pathnames)\n\n for dist in working_set:\n if dist.has_metadata('namespace_packages.txt'):\n namespaces = dist.get_metadata(\n 'namespace_packages.txt').splitlines()\n if fqname in namespaces:\n nspath = os.path.join(dist.location, *fqname.split('.'))\n if nspath not in path:\n path.append(nspath)\n\n return path", "def test_get_module_qualname_from_path_with_dot(self):\n\n name = b_utils.get_module_qualname_from_path(\n os.path.join(\".\", \"__init__.py\")\n )\n\n self.assertEqual(\"__init__\", name)", "def name(self):\n\n if self.package:\n directory = self.package.directory\n if self.package.resolve_root:\n directory = directory.joinpath(self.package.resolve_root)\n rel = None\n try:\n rel = self.filename.with_suffix('').relative_to(directory)\n except ValueError as e:\n if self.package.resolve_root:\n # Possibly this module is required from a directory outside of\n # the package's resolve_root, and Path.relative_to() will raise a\n # ValueError if the file is not inside the specified directory.\n try:\n rel = type(self.filename)(os.path.relpath(str(self.filename.with_suffix('')), str(directory)))\n except ValueError as e:\n pass # On a different drive\n pass\n if rel:\n parts = filter(bool, utils.path.lparts(rel))\n return self.package.name + '/' + '/'.join(parts)\n\n return self.filename.stem", "def _strip_package_name(name):\n name = _strip(name)\n if name.find('.') != -1:\n name = name.split('.')[0]\n return name", "def infer_module_name(filename, fspath):\n filename, _ = os.path.splitext(filename)\n for f in fspath:\n short_name = f.relative_path(filename)\n if short_name:\n # The module name for __init__.py files is the directory.\n if short_name.endswith(os.path.sep + \"__init__\"):\n short_name = short_name[:short_name.rfind(os.path.sep)]\n return short_name.replace(os.path.sep, '.')\n # We have not found filename relative to anywhere in pythonpath.\n return ''", "def resolve_full_name(base, name, level):\n if level == 0:\n return name\n bits = base.rsplit(\".\", level - 1)\n base = bits[0]\n return f\"{base}.{name}\" if name else base", "def get_module_name(module_path):\n return ntpath.split(module_path)[1].split(\".\")[0]", "def get_module_name(module_path):\n return ntpath.split(module_path)[1].split(\".\")[0]", "def get_absolute_name(package, relative_name):\n path = package.split('.') if package else []\n name = relative_name.lstrip('.')\n ndots = len(relative_name) - len(name)\n if ndots > len(path):\n return relative_name\n absolute_path = path[:len(path) + 1 - ndots]\n if name:\n absolute_path.append(name)\n return '.'.join(absolute_path)", "def path2name(path,\n slash=\"/\",\n hid_char=\".\",\n extension=False):\n if extension is True:\n return str(path.split(slash)[-1].strip(hid_char))\n else:\n return str(path.split(slash)[-1].strip(hid_char).split(\".\")[0])", "def module_name_from_filepath(path: str) -> str:\n name = osp.splitext(osp.basename(path))[0]\n if name == '__init__':\n name = osp.basename(osp.dirname(path))\n return name", "def niceName(self, path):\n logger.debug(\"Func: niceName\")\n\n basename = os.path.split(path)[1]\n return os.path.splitext(basename)[0]", "def path_name(self, path):\r\n ind = path.rfind(\"/\") + 1\r\n return (path[:ind], path[ind:])", "def as_package(names: List[str]) -> str:\n return '.'.join(names)" ]
[ "0.680801", "0.68059653", "0.67358416", "0.67321527", "0.6679984", "0.647044", "0.6385544", "0.6206499", "0.60818356", "0.60691476", "0.60549533", "0.6032795", "0.60183483", "0.60061336", "0.5967444", "0.59654856", "0.59017694", "0.5867805", "0.5845933", "0.5839538", "0.583019", "0.5808495", "0.58052266", "0.58052266", "0.57850736", "0.5763329", "0.572595", "0.56966925", "0.56945723", "0.5687559" ]
0.8235141
0
Return a list all Python packages found recursively within directories 'paths' 'paths' should be supplied as a sequence of "crossplatform" (i.e. URLstyle) path; it will be converted to the appropriate local path syntax. 'exclude' is a sequence of package names to exclude; '' can be used as a wildcard in the names, such that 'foo.' will exclude all subpackages of 'foo' (but not 'foo' itself).
def find_packages(paths=(os.curdir,), exclude=()): packages = [] discarded = [] def _discarded(path): for discard in discarded: if _under(path, discard): return True return False for path in paths: path = convert_path(path) for root, dirs, files in os.walk(path): for dir_ in dirs: fullpath = os.path.join(root, dir_) if _discarded(fullpath): continue # we work only with Python packages if not _is_package(fullpath): discarded.append(fullpath) continue # see if it's excluded excluded = False package_name = _package_name(path, fullpath) for pattern in exclude: if fnmatchcase(package_name, pattern): excluded = True break if excluded: continue # adding it to the list packages.append(package_name) return packages
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_all_packages(paths, extensions=['.py', '.ipynb'],\n include_imported_dependencies=False):\n if isinstance(paths, str):\n paths = [paths]\n\n all_packages = set()\n for path in paths:\n if os.path.isfile(path):\n basename, ext = os.path.splitext(path)\n file_dict = {ext: [path]}\n\n else:\n file_dict = find_all_files(path, extensions=extensions)\n\n for ext, files in file_dict.items():\n if ext not in parser_map:\n raise ValueError('File extension \"{0}\" is not supported.'\n .format(ext))\n\n for file in files:\n _packages = parser_map[ext](file)\n all_packages = all_packages.union(_packages)\n\n if include_imported_dependencies:\n init_modules = sys.modules.copy()\n\n # Now we have a list of package names, so we can import them and track\n # what other packages are imported as dependencies. If requested, we add\n # those to the package list as well\n for package_name in all_packages:\n try:\n importlib.import_module(package_name)\n except ImportError:\n # here, just skip if we can't import: a warning is issued later\n pass\n\n loaded_modules = sys.modules.copy()\n diff_modules = set(loaded_modules.keys()) - set(init_modules.keys())\n\n additional_modules = set()\n for module in diff_modules:\n top_level = module.split('.')[0]\n\n if top_level.startswith('_'):\n continue\n\n additional_modules.add(top_level)\n\n all_packages = all_packages.union(additional_modules)\n\n return all_packages", "def get_dependencies(path: str,\n ignore_dirs: Optional[Sequence[str]] = None,\n include_stdlib: bool = False,\n only_top_level: bool = True,\n ) -> set:\n tree = build_tree(path, ignore_dirs=ignore_dirs)\n lookup_imports_tree(tree, stdlib_lookup=not include_stdlib)\n return get_external_imports(tree, only_top_level)", "def resolve_specs(paths):\n specs = []\n for path in paths:\n if os.path.isdir(path):\n _, _, files = os.walk(path).next()\n specs.extend(os.path.join(path, fname) for fname in files)\n else:\n specs.append(path)\n return specs", "def get_package_python_paths(package, paths):\n # Note: Here we're trying to get `package`'s specific changes to PYTHONPATH (if any)\n #\n # Unfortunately, the Rez API doesn't really support this yet.\n # There's 2 GitHub links that may one-day implement it though:\n # - https://github.com/nerdvegas/rez/issues/737\n # - https://github.com/nerdvegas/rez/pull/739\n #\n # Reference: https://rez-talk.slack.com/archives/CHELFCTFB/p1578604659006100\n #\n # Once that work is merged, replace `get_package_python_paths` with it.\n #\n root = finder.get_package_root(package)\n\n if is_built_package(package):\n return {path for path in paths if filer.in_directory(path, root, follow=False)}\n\n output = set()\n\n for path in paths:\n # If the Rez package is a source Rez package + has variants\n # we need to strip the \"variants\" out of `path`, before\n # returning it.\n #\n try:\n variant_less_path = next(\n _iter_variant_extracted_paths(root, path, package.variants or [])\n )\n except StopIteration:\n pass\n else:\n output.add(variant_less_path)\n\n continue\n\n if filer.in_directory(path, root, follow=False) or filer.in_directory(\n path, root, follow=True\n ):\n output.add(path)\n\n continue\n\n return output", "def my_find_packages(*args):\n import os\n packages = []\n for root_module_dir in args:\n for root, dirs, files in os.walk(root_module_dir):\n if '__init__.py' in files:\n packages.append(root)\n return packages", "def get_python_filepaths(*, roots=None, python_paths=None):\n if python_paths is None:\n python_paths = ['setup.py']\n if roots is None:\n roots = ['charmcraft', 'tests']\n for root in roots:\n for dirpath, dirnames, filenames in os.walk(root):\n for filename in filenames:\n if filename.endswith(\".py\"):\n python_paths.append(os.path.join(dirpath, filename))\n return python_paths", "def extract_packages(package_names):\n\n # Set a safe extraction dir\n extraction_tmpdir = tempfile.mkdtemp()\n atexit.register(lambda: shutil.rmtree(\n extraction_tmpdir, ignore_errors=True))\n pkg_resources.set_extraction_path(extraction_tmpdir)\n\n # Extract each package to disk\n dirs_to_add = []\n for package_name in package_names:\n req = pkg_resources.Requirement.parse(package_name)\n extraction_dir = pkg_resources.resource_filename(req, '')\n dirs_to_add.append(extraction_dir)\n\n # Add extracted directories to import path ahead of their zip file\n # counterparts.\n sys.path[0:0] = dirs_to_add\n existing_pythonpath = os.environ.get('PYTHONPATH')\n if existing_pythonpath:\n dirs_to_add.extend(existing_pythonpath.split(':'))\n os.environ['PYTHONPATH'] = ':'.join(dirs_to_add)", "def _resolve_paths(paths):\n allowed_ext = tuple(MIMES.keys())\n\n resolved = []\n for path in paths:\n if os.path.isdir(path):\n resolved.extend(\n entry.path for entry in os.scandir(path)\n if entry.is_file() and entry.name.lower().endswith(allowed_ext)\n )\n elif os.path.isfile(path) and path.lower().endswith(allowed_ext):\n resolved.append(path)\n return resolved", "def get_zip_package_paths(paths: List[Path], root_dir=None) -> List[Tuple[Path, Path]]:\n return [(path, path.relative_to(root_dir)) for path in paths]", "def aggregate(\n *passed_paths: str, recursive: bool = False\n) -> List[pathlib.Path]:\n\n stack = []\n\n for passed_path in passed_paths:\n path = pathlib.Path(passed_path)\n\n if path.is_file():\n stack.append(path)\n\n return stack", "def filter_paths(paths, included_patterns=None, excluded_patterns=None, case_sensitive=True):\n included = [\"*\"] if included_patterns is None else included_patterns\n excluded = [] if excluded_patterns is None else excluded_patterns\n\n for path in paths:\n if _match_path(path, set(included), set(excluded), case_sensitive):\n yield path", "def go_package_dirs(packages, build_tags):\n\n target_packages = []\n for pkg in packages:\n target_packages += (\n check_output(\n f\"go list -find -f \\\"{{{{ .Dir }}}}\\\" -mod=mod -tags \\\"{','.join(build_tags)}\\\" {pkg}\",\n shell=True,\n )\n .decode('utf-8')\n .strip()\n .split(\"\\n\")\n )\n\n return target_packages", "def filter_paths(pathnames, patterns=None, ignore_patterns=None):\n result = []\n if patterns is None:\n patterns = ['*']\n if ignore_patterns is None:\n ignore_patterns = []\n for pathname in pathnames:\n if match_patterns(pathname, patterns) and not match_patterns(pathname,\n ignore_patterns):\n result.append(pathname)\n return result", "def expandPathsToFiles (paths):\n\n\t\tdef getFiles (dirPath):\n\t\t\tfor root, dirs, files in os.walk(dirPath):\n\t\t\t\tfor file in files:\n\t\t\t\t\tyield os.path.join(root, file)\n\n\t\tfiles = []\n\t\tfor path in paths:\n\t\t\tif os.path.isdir(path):\n\t\t\t\tfiles += list(getFiles(path))\n\t\t\telse:\n\t\t\t\tfiles.append(path)\n\n\t\treturn files", "def rpm_query_files(*filepaths):\n if filepaths:\n # The 'PACKAGE:' string is used to filter out errors -- any line not\n # containing it will be discarded.\n cmd = (['rpm', '--query', '--file',\n r'--queryformat=PACKAGE:%{NAME}-%{VERSION}-%{RELEASE}.%{ARCH}\\n'] +\n list(filepaths))\n output, error, returncode = run_with_subprocess(cmd)\n\n return [line.lstrip('PACKAGE:') for line in output.split(\"\\n\") if line.startswith('PACKAGE:')]\n else:\n return []", "def expand_paths(paths, cwd=None):\n return [expand_path(x, cwd) for x in paths]", "def extend_import_paths(paths):\n for path in paths:\n dir = os.path.abspath(path if os.path.isdir(path) else os.path.dirname(path))\n while(os.path.exists(os.path.join(dir, '__init__.py'))):\n dir = os.path.dirname(dir)\n sys.path.append(dir)", "def remove_packages_ignored_since(days, paths=None, dry_run=False, verbose=False):\n num_removed = 0\n\n for path in (paths or config.packages_path):\n repo = package_repository_manager.get_repository(path)\n\n if verbose:\n print_info(\"Searching %s...\", repo)\n\n num_removed += repo.remove_ignored_since(\n days=days,\n dry_run=dry_run,\n verbose=verbose\n )\n\n return num_removed", "def all_possible_beards(paths):\n literal_paths = get_literal_beard_paths(paths)\n\n for path in literal_paths:\n for f in os.listdir(path):\n if is_module(os.path.join(path, f)):\n yield os.path.basename(f)", "def find_packages( root ):\n for path, directories, files in os.walk( root ):\n if is_package( path ):\n yield path.replace( '/','.' )", "def packages_in_folder(path):\n for pkg_path in catkin_pkg.packages.find_package_paths(path):\n yield os.path.join(path, pkg_path)", "def get_files(self, path, exclude=[], extensions=None):\r\n files = []\r\n root = os.path.join(self.path, path)\r\n\r\n if os.path.isdir(root):\r\n for dirpath, dirs, temp_files in os.walk(root, followlinks=True):\r\n for e in exclude:\r\n if e in dirs:\r\n dirs.remove(e)\r\n reldir = os.path.relpath(dirpath, self.path)\r\n for f in temp_files:\r\n fp = os.path.join(reldir, f)\r\n if self._include_path(fp, extensions):\r\n files.append(fp)\r\n elif os.path.exists(root) and self._include_path(path, extensions):\r\n files.append(path) # can't walk non-directories\r\n return files", "def get_files(path, exclude=None):\n exclude = exclude or '*.pyc'\n exclude_expr = '{}/**/{}'.format(path, exclude)\n exclude = set(glob.iglob(exclude_expr, recursive=True))\n\n expr = '{}/**'.format(path)\n paths = set(glob.iglob(expr, recursive=True)) - exclude\n\n files = []\n for filename in paths:\n if os.path.isfile(filename):\n files.append(os.path.abspath(filename))\n return files", "def find_files(path, include=None, exclude=None):\n # If no extension is selected, use the wild card.\n if include is None:\n include = '*'\n # Make sure it is an iterable,\n include = assert_is_iter(include)\n # Find files and flatten.\n files = [glob.glob(f'{path}/**/*.{ext}', recursive=True) for ext in include]\n # The return of deep_flatten is an generator.\n files = list(morsels.deep_flatten(files))\n # Exclude files that the user does not want.\n if exclude is not None:\n # Make sure it is an iterable,\n exclude = assert_is_iter(exclude)\n # The slice is used to remove the dot from the beginning of the extension.\n files = [file for file in files if not os.path.splitext(file)[-1][1:] in exclude]\n return files", "def _get_pyfilelist(srcpath, usegitignore=True) -> list:\n gitignorefile = srcpath / Path(\".gitignore\")\n if usegitignore and gitignorefile.exists():\n with gitignorefile.open('r') as f:\n lines = f.read().splitlines()\n gitignore = [\n srcpath / Path(line)\n for line in lines\n if not line.strip().startswith(\"#\")\n and len(line.strip()) > 1\n and Path(line).suffix == \"\"\n ] + [srcpath / Path(\".git\")]\n viablepaths = [\n p for p in srcpath.glob(\"*/\") if p.is_dir() and p not in gitignore\n ]\n filelist = set().union(*[set(p.glob(\"**/*.py\")) for p in viablepaths])\n filelist = filelist.union(*[set(srcpath.glob('*.py'))])\n else:\n filelist = srcpath.glob(\"**/*.py\")\n return [p.relative_to(srcpath) for p in filelist]", "def get_file_paths_and_size(paths, ignore_dotfiles, ignore_windows_volume_folders):\n\n def walk_error(os_error):\n \"\"\"Print user warning and raise OSError\"\"\"\n printer(\"Cannot access '{}'; zip creation aborted\".format(os_error.filename), \"error\", True)\n raise os_error\n\n EXCLUDE_FOLDERS = {\"$RECYCLE.BIN\", \"System Volume Information\"}\n exclude_folder_seen_log = {} # type: typing.Dict[str, typing.List[str]]\n files = []\n size = 0\n for path in sorted(paths):\n for root, dirs, filenames in os.walk(path, onerror=walk_error):\n if ignore_dotfiles:\n filenames = [f for f in filenames if not f[0] == \".\"]\n dirs[:] = [d for d in dirs if not d[0] == \".\"]\n if ignore_windows_volume_folders:\n for directory in [d for d in dirs if d in EXCLUDE_FOLDERS]:\n if directory not in exclude_folder_seen_log:\n exclude_folder_seen_log[directory] = []\n exclude_folder_seen_log[directory].append(os.path.join(root, directory))\n printer(\n \"'{}' will not be processed (Windows system directory)\".format(\n os.path.join(root, directory)\n ),\n \"info\",\n )\n else:\n exclude_folder_seen_log[directory].append(os.path.join(root, directory))\n printer(\n \"Excluded folder '{}' has been excluded more than once within path\"\n \" '{}' - this is unexpected, as this folder should only be found in\"\n \" the root of a drive. Be advised that the following folders will\"\n \" NOT be processed: {}\".format(\n directory,\n path,\n get_list_as_str(exclude_folder_seen_log[directory]),\n ),\n \"warning\",\n )\n dirs[:] = [d for d in dirs if not d in EXCLUDE_FOLDERS]\n for name in filenames:\n files.append(os.path.join(root, name))\n size += os.path.getsize(os.path.join(root, name))\n return sorted(files), size", "def _discover_path_importables(\n pkg_pth: Path, pkg_name: str,\n) -> Generator[str, None, None]:\n for dir_path, _d, file_names in os.walk(pkg_pth):\n pkg_dir_path = Path(dir_path)\n\n if pkg_dir_path.parts[-1] == '__pycache__':\n continue\n\n if all(Path(_).suffix != '.py' for _ in file_names):\n continue\n\n rel_pt = pkg_dir_path.relative_to(pkg_pth)\n pkg_pref = '.'.join((pkg_name,) + rel_pt.parts)\n yield from (\n pkg_path\n for _, pkg_path, _ in pkgutil.walk_packages(\n (str(pkg_dir_path),), prefix=f'{pkg_pref}.',\n )\n )", "def prepend_find_packages(*roots):\n packages = []\n \n for root in roots:\n packages += [root]\n packages += [root + '.' + s for s in find_packages(root)]\n \n return packages", "def get_files_from(\n find_path: str, pattern: str, exclude_names: list = None, recursive: bool = True\n):\n found = []\n # For each directory in the given path\n for path_dir in os.scandir(find_path):\n # Check if it should be skipped\n if path_dir.name in exclude_names or os.path.isfile(path_dir):\n continue\n # Run glob inside the folder with the given pattern\n found.extend(\n glob.glob(f\"{find_path}/{path_dir.name}/**/{pattern}\", recursive=recursive)\n )\n # scandir will not look at the files matching the pattern in the\n # current directory.\n found.extend(glob.glob(f\"{find_path}/./{pattern}\"))\n return found", "def determine_files_to_test(*, typeshed_location: str, subdir_paths: Sequence[str]) -> List[Tuple[str, int]]:\n skipped = PathMatcher(load_blacklist(typeshed_location))\n files = []\n for root, _, filenames in itertools.chain.from_iterable(os.walk(p) for p in subdir_paths):\n for f in sorted(f for f in filenames if f.endswith(\".pyi\")):\n f = os.path.join(root, f)\n rel = _get_relative(f)\n if skipped.search(rel):\n continue\n if _is_version(f, \"2and3\"):\n files.append((f, 2))\n files.append((f, 3))\n elif _is_version(f, \"2\"):\n files.append((f, 2))\n elif _is_version(f, \"3\"):\n files.append((f, 3))\n else:\n print(\"Unrecognized path: {}\".format(f))\n return files" ]
[ "0.7080239", "0.6244361", "0.61578315", "0.6144756", "0.60934895", "0.59585524", "0.59447527", "0.59435344", "0.58972776", "0.58844006", "0.5858805", "0.58424145", "0.5818661", "0.57693875", "0.5734302", "0.57293177", "0.56524205", "0.5640559", "0.5630103", "0.5587548", "0.5587382", "0.5585771", "0.557893", "0.5550909", "0.55469906", "0.5544207", "0.5536737", "0.55030346", "0.548144", "0.546252" ]
0.8303452
0
Like os.path.splitext, but take off .tar too
def splitext(path): base, ext = posixpath.splitext(path) if base.lower().endswith('.tar'): ext = base[-4:] + ext base = base[:-4] return base, ext
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def split_ext(filepath):\n\t(fn, ext) = os.path.splitext(filepath)\n\tif ext=='.gz':\n\t\t(fn, ext) = os.path.splitext(fn)\n\t\text += '.gz'\n\treturn (fn, ext)", "def splitext( filename ):\n index = filename.find('.')\n if index == 0:\n index = 1+filename[1:].find('.')\n if index == -1:\n return filename, ''\n return filename[:index], filename[index:]\n return os.path.splitext(filename)", "def splitext_no_dot(filename):\n name, ext = os.path.splitext(filename)\n ext.strip('.')\n return name, ext.strip('.')", "def _splitzipext(self, filename):\n\n if self._iszip(filename):\n return os.path.splitext(filename)\n else:\n return filename, None", "def _extr_ext(p: str) -> str:\n file_name = os.path.basename(p)\n _, ext = os.path.splitext(file_name)\n return ext", "def splitexts(path, exts=None):\n exts = []\n ext = os.path.splitext(path)\n while True:\n if len(ext[1]) < 1:\n break\n else:\n exts.append(ext[1])\n ext = os.path.splitext(ext[0])\n exts.reverse()\n return (path, exts)", "def _get_ext(self, path):\n return os.path.splitext(path)[1][1:]", "def file_ext(path):\n result = os.path.splitext(path)[1]\n return result", "def filename_ext(filename):\n base = os.path.basename(filename)\n return os.path.splitext(base)[1][1:]", "def clear_ext(x):\r\n return os.path.splitext(os.path.basename(x))[0]", "def splitext_zip(fname):\n base_fname, ext = splitext(fname)\n if ext == '.gz' or ext == '.zip':\n base_fname, ext2 = splitext(base_fname)\n ext = ''.join([ext2, ext])\n return base_fname, ext", "def filter_pathext(val: Optional[str]) -> str:\n return os.path.splitext(val or '')[1]", "def split_ext(filename):\n parts = filename.split(\".\")\n if len(parts) == 1:\n return filename, \"\"\n\n tail = list(dropwhile(lambda x: len(x) < 5,\n reversed(parts[1:])))\n\n file_parts = parts[:1] + tail[::-1]\n ext_parts = parts[1+len(tail):]\n return \".\".join(file_parts), \".\" + \".\".join(ext_parts)", "def filepath_ext(filepath: str) -> str:\n return pathlib.Path(filepath).suffix", "def fixExt(ext):\n if not ext.startswith(\".\"):\n return \".{}\".format(ext)\n return ext", "def name_sans_ext(self) -> str:\n return os.path.splitext(self.path)[0]", "def get_file_ext(path: str) -> str:\n return os.path.splitext(os.path.basename(path))[1]", "def get_extension(filename: str) -> str:\n return Path(filename).suffix[1:]", "def filename_strip_ext(filename):\n base = os.path.basename(filename)\n # Strip file extension\n return os.path.splitext(base)[0]", "def fextension(filename):\n filename = os.path.normpath(filename)\n return os.path.splitext(filename)[1]", "def splitpath_root_file_ext(path):\r\n head, tail = os.path.split(path)\r\n filename, ext = os.path.splitext(tail)\r\n return head, filename, ext", "def get_fileext(file_path):\n\n # Get rid of directories and etc\n just_file = os.path.basename(file_path)\n\n # Now we return just the base name\n return os.path.splitext(just_file)[1]", "def basename_sans(path):\n return os.path.splitext(os.path.basename(path))[0]", "def _getFileExtension( filepath ):\r\n file = os.path.splitext(filepath.lower())\r\n if len( file ):\r\n return file[1].replace( '.', '' )\r\n else:\r\n return filepath", "def get_file_ext(filename):\n return filename.rsplit('.', 1)[1]", "def strip_extension(filename):\n return filename.rsplit('.',1)[-2]", "def get_file_name_with_ext(path: str) -> str:\n return os.path.basename(path)", "def add_ext_if_needed(fileName, ext):\n ls = fileName.split(\".\")\n if( ( len(ls)==1) or (not (ls[-1] == ext))):\n return fileName + \".\" + ext\n else:\n return fileName", "def get_file_ext(self):\n return self.archive.url.split('.')[-1].lower()", "def getExtension(filename):\n return filename[filename.rfind('.'):]" ]
[ "0.7256462", "0.7026987", "0.68934464", "0.6722942", "0.66994846", "0.66748786", "0.66710615", "0.66696054", "0.66661364", "0.6614728", "0.65565413", "0.6543803", "0.6525695", "0.64786565", "0.645979", "0.64352673", "0.64048153", "0.63310933", "0.6310813", "0.62845665", "0.62432796", "0.6223422", "0.6193126", "0.6183168", "0.6173799", "0.61636907", "0.61351883", "0.6131852", "0.6126033", "0.61231935" ]
0.80011797
0
Returns true if all the paths have the same leading path name (i.e., everything is in one subdirectory in an archive)
def has_leading_dir(paths): common_prefix = None for path in paths: prefix, rest = split_leading_dir(path) if not prefix: return False elif common_prefix is None: common_prefix = prefix elif prefix != common_prefix: return False return True
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def areInSamePaths(path1, path2):\n return areSamePaths(os.path.dirname(path1), os.path.dirname(path2))", "def predicate(path):\n p = os.path.abspath(path)\n return any(p == d or p.startswith(d + os.path.sep)\n for d in directories)", "def _verify_prefix(prefix, files):\n for f in files:\n f = os.path.join(prefix, f)\n if not os.path.exists(f):\n return False\n else:\n return True", "def is_subpath(directory: str, path: str) -> bool:\n directory = os.path.join(os.path.realpath(directory), '')\n path = os.path.realpath(path)\n\n # return true, if the common prefix of both is equal to directory\n # e.g. /a/b/c/d.rst and directory is /a/b, the common prefix is /a/b\n return os.path.commonprefix([path, directory]) == directory", "def _supports_make_dirs(path):\n prefixes = [\"/bigstore\", \"gs://\"]\n return not any(path.startswith(prefix) for prefix in prefixes)", "def _paths_are_consistent_with_hash_prefixes(self, paths,\n path_hash_prefixes):\n\n # Assume that 'paths' and 'path_hash_prefixes' are inconsistent until\n # proven otherwise.\n consistent = False\n\n if len(paths) > 0 and len(path_hash_prefixes) > 0:\n for path in paths:\n path_hash = self._get_target_hash(path)\n # Assume that every path is inconsistent until proven otherwise.\n consistent = False\n\n for path_hash_prefix in path_hash_prefixes:\n if path_hash.startswith(path_hash_prefix):\n consistent = True\n break\n\n # This path has no matching path_hash_prefix. Stop looking further.\n if not consistent: break\n\n return consistent", "def same_folders(src1, src2):\n dcmp = dircmp(src1, src2)\n if dcmp.left_only or dcmp.right_only:\n return False\n for sub_dcmp in dcmp.subdirs.values():\n same_folders(sub_dcmp.left, sub_dcmp.right)\n\n return True", "def _is_subdir(dir1, dir2):\n r1 = os.path.realpath(dir1)\n r2 = os.path.realpath(dir2)\n if r1.startswith(r2):\n return True\n return False", "def _issubpath(self, a, b):\n p1 = a.rstrip(os.sep).split(os.sep)\n p2 = b.rstrip(os.sep).split(os.sep)\n return p1[:len(p2)] == p2", "def common_path_prefix(paths, sep=os.path.sep):\n def allnamesequal(name):\n return all(n==name[0] for n in name[1:])\n bydirectorylevels = zip(*[p.split(sep) for p in paths])\n return sep.join(x[0] for x in takewhile(allnamesequal, bydirectorylevels))", "def is_in_directory(f):\n f = os.path.dirname(f) + os.path.sep\n return any(f.startswith(d) for d in dirs_to_group)", "def is_astro_dir(path):\n # is this required?\n p0 = glob.glob(os.path.join(path, '..', '*.avi'))\n\n p1 = glob.glob(os.path.join(path, \"*.bas.h5\"))\n p2 = glob.glob(os.path.join(path, \"..\", \"*.metadata.xml\"))\n return all(len(x) != 0 for x in [p1, p2, p0])", "def contains_dir_path(file_name: str) -> bool:\n return os.path.sep in file_name", "def test_relativezip(self):\r\n relativezip.relativezip(self.path, self.zip_fn)\r\n zf = zipfile.ZipFile(self.zip_fn)\r\n observed = set(zf.namelist())\r\n # zipfile uses '/' as path separator no matter the OS\r\n # so we can't use os.path.join reliably in the next line.\r\n expected = set([self.archivedir + '/' + f for f in self.files_to_archive])\r\n self.assertEquals(observed, expected)", "def _is_path_inside_output_dir(self, path: str) -> bool:\n real_output_dir = os.path.realpath(self._output_dir)\n real_file_path = os.path.realpath(path)\n return os.path.commonpath([real_output_dir, real_file_path]) == real_output_dir", "def is_dir(self, path):", "def _is_dir(path: str)->bool:\n if _is_s3(path):\n return path.endswith(\"/\")\n else:\n return os.path.isdir(os.path.abspath(path))", "def is_in_folder(base_path, directory):\n return op.normpath(directory).startswith(base_path)", "def _dir_empty(path):\n try:\n next(os.scandir(str(path)))\n except StopIteration:\n return True\n return False", "def is_subdir ( dirpath,\n _path_el=PARENT_PATH, _path_len=len( PARENT_PATH ), _fs_sep=fs_sep\n ):\n dirpath_el = dirpath.rstrip ( _fs_sep ).split ( _fs_sep )\n if len ( dirpath_el ) < _path_len:\n return False\n else:\n return all (\n this == expect for this, expect in zip ( dirpath_el, _path_el )\n )", "def is_path_matched(input_path, nfs_export_details):\n\n input_path = input_path[:-1] if input_path[-1] == \"/\" else input_path\n if nfs_export_details['path'] != input_path:\n return False\n return True", "def ignore(ignored_dirs, path):\n return any([normpath(path).startswith(ignore_dir) for ignore_dir in ignored_dirs])", "def check_paths( self ):\n check_a = utility_code.checkDirectoryExistence( self.PATH_TO_SOURCE_FILE_DIRECTORY )\n check_b = utility_code.checkDirectoryExistence( self.PATH_TO_ARCHIVES_ORIGINALS_DIRECTORY )\n check_c = utility_code.checkDirectoryExistence( self.PATH_TO_ARCHIVES_PARSED_DIRECTORY )\n check_d = utility_code.checkDirectoryExistence( self.PATH_TO_PARSED_ANNEX_DATA_DIRECTORY )\n check_e = utility_code.checkDirectoryExistence( self.PATH_TO_PARSED_ANNEX_COUNT_DIRECTORY )\n if check_a == 'exists' and check_b == 'exists' and check_c == 'exists' and check_d == 'exists' and check_e == 'exists':\n log.debug( 'path check passed' )\n else:\n message='path check failed; quitting'\n log.error( message )\n sys.exit( message )\n return", "def validpath(self, path):\n root = self.realpath(self.root)\n path = self.realpath(path)\n if not self.root.endswith(os.sep):\n root = self.root + os.sep\n if not path.endswith(os.sep):\n path = path + os.sep\n if path[0:len(root)] == root:\n return True\n return False", "def in_folder(self):\n return len(os.path.split(self.file_path)) > 1", "def hasAbsPath(self, test_path):\n test_path = os.path.abspath(test_path)\n for path in self.paths:\n ap = os.path.abspath(path)\n if test_path.startswith(ap):\n return True\n\n return False", "def matches_path(cls, path):\n return path.startswith('/') or \\\n path.startswith('./') or \\\n path.startswith('../') or \\\n path.startswith('file://')", "def verify_paths(paths=DEFAULT_PATHS):\n if isinstance(paths, dict):\n paths = list(paths.values())\n for path in paths:\n if os.path.exists(path):\n continue\n if os.path.isdir(path):\n os.mkdir(path)\n continue\n return False\n return True", "def issubpath(filename, superpath, trueifsame = True):\n filename = os.path.abspath(filename)\n superpath = os.path.abspath(superpath)\n if filename.startswith(superpath + os.sep) or (trueifsame is True and filename == superpath):\n return(True)\n else:\n return(False)", "def _is_child_path(path, parent_path, link_name=None):\n b_path = to_bytes(path, errors='surrogate_or_strict')\n\n if link_name and not os.path.isabs(b_path):\n # If link_name is specified, path is the source of the link and we need to resolve the absolute path.\n b_link_dir = os.path.dirname(to_bytes(link_name, errors='surrogate_or_strict'))\n b_path = os.path.abspath(os.path.join(b_link_dir, b_path))\n\n b_parent_path = to_bytes(parent_path, errors='surrogate_or_strict')\n return b_path == b_parent_path or b_path.startswith(b_parent_path + to_bytes(os.path.sep))" ]
[ "0.68663764", "0.66934335", "0.6670413", "0.6565162", "0.6515844", "0.6405397", "0.6379137", "0.6338458", "0.6308368", "0.62838936", "0.62717277", "0.6256882", "0.6211285", "0.61961395", "0.6110255", "0.609404", "0.6081909", "0.60518205", "0.6041078", "0.6039395", "0.6034955", "0.59587854", "0.59540206", "0.595063", "0.5942128", "0.5928291", "0.5917982", "0.5892713", "0.5886329", "0.5868202" ]
0.8242421
0
Quote commandline arguments for DOS/Windows conventions. Just wraps every argument which contains blanks in double quotes, and returns a new argument list.
def _nt_quote_args(args): # XXX this doesn't seem very robust to me -- but if the Windows guys # say it'll work, I guess I'll have to accept it. (What if an arg # contains quotes? What other magic characters, other than spaces, # have to be escaped? Is there an escaping mechanism other than # quoting?) for i, arg in enumerate(args): if ' ' in arg: args[i] = '"%s"' % arg return args
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def quote_arguments(args):\n if isinstance(args, str):\n args_list = [args]\n else:\n args_list = args\n\n quoted_args = []\n for a in args_list:\n qa = _double_up_quotes(a)\n if \" \" in qa or \"'\" in qa:\n qa = \"'\" + qa + \"'\"\n quoted_args.append(qa)\n return \" \".join(quoted_args)", "def _quote_arguments(args):\n return map(lambda x: '\"{}\"'.format(x) if ' ' in x else '{}'.format(x), args)", "def list2cmdline(seq):\n\n result = []\n needquote = False\n for arg in seq:\n bs_buf = []\n\n # Add a space to separate this argument from the others\n if result:\n result.append(' ')\n\n needquote = (\" \" in arg) or (\"\\t\" in arg) or (not arg) or (\"(\" in arg) or (\")\" in arg)\n if needquote:\n result.append('\"')\n\n for c in arg:\n if c == '\\\\':\n # Don't know if we need to double yet.\n bs_buf.append(c)\n elif c == '\"':\n # Double backslashes.\n result.append('\\\\' * len(bs_buf) * 2)\n bs_buf = []\n result.append('\\\\\"')\n else:\n # Normal char\n if bs_buf:\n result.extend(bs_buf)\n bs_buf = []\n result.append(c)\n\n # Add remaining backslashes, if any.\n if bs_buf:\n result.extend(bs_buf)\n\n if needquote:\n result.extend(bs_buf)\n result.append('\"')\n\n return ''.join(result)", "def argument_list_quote(arguments):\n args = []\n for arg in arguments:\n args.append(argument_quote(arg))\n return '\"%s\"' % ' '.join(args)", "def split_and_honor_quotation_marks(cmdline):\n\n # See\n # http://msdn.microsoft.com/library/en-us/vccelng/htm/progs_12.asp\n\n # Step 1: Translate all literal quotes into QUOTE. Justify number\n # of backspaces before quotes.\n tokens = []\n bs_buf = \"\"\n QUOTE = 1 # \\\", literal quote\n for c in cmdline:\n if c == '\\\\':\n bs_buf += c\n elif c == '\"' and bs_buf:\n # A quote preceded by some number of backslashes.\n num_bs = len(bs_buf)\n tokens.extend([\"\\\\\"] * (num_bs//2))\n bs_buf = \"\"\n if num_bs % 2:\n # Odd. Quote should be placed literally in array\n tokens.append(QUOTE)\n else:\n # Even. This quote serves as a string delimiter\n tokens.append('\"')\n\n else:\n # Normal character (or quote without any preceding\n # backslashes)\n if bs_buf:\n # We have backspaces in buffer. Output these.\n tokens.extend(list(bs_buf))\n bs_buf = \"\"\n\n tokens.append(c)\n\n # Step 2: split into arguments\n result = [] # Array of strings\n quoted = False\n arg = [] # Current argument\n tokens.append(\" \")\n for c in tokens:\n if c == '\"':\n # Toggle quote status\n quoted = not quoted\n arg.append('\"')\n elif c == QUOTE:\n arg.append('\"')\n elif c in (' ', '\\t'):\n if quoted:\n arg.append(c)\n else:\n # End of argument. Output, if anything.\n if arg:\n result.append(''.join(arg))\n arg = []\n else:\n # Normal character\n arg.append(c)\n \n return result", "def sh_quote_safe_cmdline(args):\n return str.join(' ', (sh_quote_safe(arg) for arg in args))", "def shlex_join(argv):\n def quote(arg):\n if arg.find(\" \") >= 0:\n return '\"%s\"' % arg\n else:\n return arg\n return \" \".join([quote(arg) for arg in argv])", "def sh_quote_unsafe_cmdline(args):\n return str.join(' ', (sh_quote_unsafe(arg) for arg in args))", "def expand_args(cmd_args):\n if isinstance(cmd_args, (tuple, list)):\n args_list = list(cmd_args)\n else:\n args_list = shlex.split(cmd_args)\n return args_list", "def _merge_quote_args(self, args_list):\n\n if len(args_list) <= 1:\n return args_list\n \n index = 0\n while index < len(args_list):\n # if the current argument starts with a quote but does not end with a quote,\n # then the argument must have been wrongly split.\n if args_list[index].startswith(\"\\\"\"):\n while index+1 < len(args_list):\n if _ends_in_unescaped_quote(args_list[index].strip(\".\")):\n break\n args_list[index] += \", \" + args_list[index+1]\n args_list.pop(index+1)\n index += 1\n\n return args_list", "def _joinArgv(argv):\n cmdstr = \"\"\n for arg in argv:\n if ' ' in arg:\n cmdstr += '\"%s\"' % _escapeArg(arg)\n else:\n cmdstr += _escapeArg(arg)\n cmdstr += ' '\n if cmdstr.endswith(' '): cmdstr = cmdstr[:-1] # strip trailing space\n return cmdstr", "def argv(self) -> List[str]:\n if self.command:\n rtn = [utils.strip_quotes(self.command)]\n for cur_token in self.arg_list:\n rtn.append(utils.strip_quotes(cur_token))\n else:\n rtn = []\n\n return rtn", "def split_args(args):\n words = []\n quoted_words = []\n\n quoted = re.compile('\"([^\"]+)\"')\n for value in quoted.findall(args):\n quoted_words.append(value)\n \n new_str = args\n\n for i in quoted_words:\n new_str = re.sub('\"[^\"]+\"', '', new_str)\n\n for i in new_str.split():\n words.append(i)\n \n words.extend(quoted_words)\n \n return words", "def split_arguments(s, windows=IS_WINDOWS):\n # from http://stackoverflow.com/a/35900070\n if windows:\n RE_CMD_LEX = r'''\"((?:\"\"|\\\\[\"\\\\]|[^\"])*)\"?()|(\\\\\\\\(?=\\\\*\")|\\\\\")'\n r'|(&&?|\\|\\|?|\\d?>|[<])|([^\\s\"&|<>]+)|(\\s+)|(.)'''\n else:\n RE_CMD_LEX = r'''\"((?:\\\\[\"\\\\]|[^\"])*)\"|'([^']*)'|(\\\\.)|'\n r'(&&?|\\|\\|?|\\d?\\>|[<])|([^\\s'\"\\\\&|<>]+)|(\\s+)|(.)'''\n\n args = []\n accu = None # collects pieces of one arg\n for qs, qss, esc, pipe, word, white, fail in re.findall(RE_CMD_LEX, s):\n if word:\n pass # most frequent\n elif esc:\n word = esc[1]\n elif white or pipe:\n if accu is not None:\n args.append(accu)\n if pipe:\n args.append(pipe)\n accu = None\n continue\n elif fail:\n raise ValueError(\"invalid or incomplete shell string\")\n elif qs:\n word = qs.replace('\\\\\"', '\"').replace('\\\\\\\\', '\\\\')\n if windows:\n word = word.replace('\"\"', '\"')\n else:\n word = qss # may be even empty; must be last\n\n accu = (accu or '') + word\n\n if accu is not None:\n args.append(accu)\n\n return args", "def SplitCommandLineIntoArgv(space_delimited_argv, posix=True):\n try:\n return map(lambda s: s.decode('utf-8'),\n shlex.split(space_delimited_argv.encode('utf-8'),\n comments=FLAGS.pyatdl_allow_command_line_comments,\n posix=posix))\n except ValueError as e:\n raise ShlexSyntaxError('Cannot parse command line. %s' % str(e))", "def argumentsParser(args):\n\targuments = []\n\tif args.find('\"') > -1:\n\t\tt_arguments = args.split('\"')\n\t\tfor a in t_arguments:\n\t\t\tif a == '' or a == ' ':\n\t\t\t\tpass\n\t\t\telif a[-1] == ' ':\n\t\t\t\targuments.append(a[:-1])\n\t\t\telse:\n\t\t\t\targuments.append(a)\n\telif args.find(\"'\") > -1:\n\t\tt_arguments = args.split(\"'\")\n\t\tfor a in t_arguments:\n\t\t\tif a == '' or a == ' ':\n\t\t\t\tpass\n\t\t\telif a[-1] == ' ':\n\t\t\t\targuments.append(a[:-1])\n\t\t\telse:\n\t\t\t\targuments.append(a)\n\telif args == ' ':\n\t\tpass\n\telse:\n\t\targuments = args.split(' ')\n\treturn arguments", "def _normalize_args(args):\n if isinstance(args, str):\n return shlex.split(args)\n\n if isinstance(args, (tuple, list)):\n return [str(arg) for arg in args]\n else:\n return [str(args)]", "def shquote(arg):\n for c in '\"', \"'\", \"\\\\\", \"#\":\n if c in arg:\n return repr(arg)\n if arg.split() != [arg]:\n return repr(arg)\n return arg", "def argument_quote(argument):\n argument = argument.replace('\"', '\"\"')\n if ' ' in argument:\n argument = argument.replace(\"'\", \"''\")\n argument = \"'\" + argument + \"'\"\n return argument", "def _escapeArg(arg):\n #XXX There is a *lot* more that we should escape here.\n return arg.replace('\"', r'\\\"')", "def shell_command(self):\n # TODO: fix this naive version by adding quotes where appropriate\n return \" \".join(self.args)", "def shellquote(arg):\n if re.match('^[-_.:/=a-zA-Z0-9]*$', arg):\n return arg\n else:\n return \"'%s'\" % arg.replace(\"'\", r\"'\\''\")", "def posix_command(command, *args, **kwargs):\n # pylint: disable = redefined-outer-name\n return ' '.join([\n \"'%s'\" % (token.replace(\"'\", \"'\\\\''\")) if needq(token) else token\n for token in map(_make_formatter(*args, **kwargs),\n split_command(command))\n ])", "def _make_posix_command():\n qsearch = _re.compile(r'[^a-zA-Z\\d_./-]').search\n needq = lambda x: not x or qsearch(x)\n\n def posix_command(command, *args, **kwargs):\n \"\"\"\n Return a POSIX shell suitable commandline\n\n Either args or kwargs or neither of them can be set. There cannot be\n set both of them.\n\n :Parameters:\n `command` : ``str``\n Generic commandline, possibly containing substitutions, filled by\n args or kwargs. See `split_command` for generic commandline\n syntax.\n\n `args` : ``tuple``\n Substitution tuple\n\n `kwargs` : ``dict``\n Substitution dict\n\n :Return: Strictly quoted shell commandline for POSIX shells\n :Rtype: ``str``\n \"\"\"\n # pylint: disable = redefined-outer-name\n return ' '.join([\n \"'%s'\" % (token.replace(\"'\", \"'\\\\''\")) if needq(token) else token\n for token in map(_make_formatter(*args, **kwargs),\n split_command(command))\n ])\n return posix_command", "def sh_quote_unsafe(arg):\n return ('\"' + _DQUOTE_RE.sub(r'\\1\\1\\\"', str(arg)) + '\"' )", "def win32_command(command, *args, **kwargs):\n # pylint: disable = redefined-outer-name\n return ' '.join([metasub(\n '\"%s\"' % (slashsub(token).replace('\"', '\\\\\"'),)\n if needq(token) else token\n ) for token in map(_make_formatter(*args, **kwargs),\n split_command(command))])", "def _argsForSubprocess(self) -> list[str]:\n pass", "def safe_args(args,\r\n max_args=None,\r\n config=None,\r\n argfile=None,\r\n delimiter='\\n',\r\n quoter=None,\r\n delete=True):\r\n max_args = max_args or (config or Config.load()).getdefault('max_subprocess_args', int, 10)\r\n if len(args) > max_args:\r\n def create_argfile(fp):\r\n fp.write(delimiter.join(args))\r\n fp.close()\r\n return [quoter(fp.name) if quoter else '@%s' % fp.name]\r\n\r\n if argfile:\r\n try:\r\n with safe_open(argfile, 'w') as fp:\r\n yield create_argfile(fp)\r\n finally:\r\n if delete and os.path.exists(argfile):\r\n os.unlink(argfile)\r\n else:\r\n with temporary_file(cleanup=delete) as fp:\r\n yield create_argfile(fp)\r\n else:\r\n yield args", "def cmdify(self):\n return \" \".join(\n itertools.chain(\n [_quote_if_contains(self.command, r\"[\\s^()]\")],\n (_quote_if_contains(arg, r\"[\\s^]\") for arg in self.args),\n )\n )", "def AddQuoteArgument(parser):\n parser.add_argument(\n '--quote',\n help=(\n 'Specifies the character that encloses values from columns that have '\n 'string data type. The value of this argument has to be a character '\n 'in Hex ASCII Code. For example, \"22\" represents double quotes. '\n 'This flag is only available for MySQL and Postgres. If this flag is '\n 'not provided, double quotes character will be used as the default '\n 'value.'\n ),\n )" ]
[ "0.727529", "0.7251283", "0.72008514", "0.685263", "0.67948896", "0.6764882", "0.6530536", "0.6420306", "0.637898", "0.63067955", "0.6251415", "0.62015647", "0.6195781", "0.6184979", "0.6129439", "0.6106405", "0.6061542", "0.5926107", "0.5858475", "0.5774904", "0.5765774", "0.5752493", "0.5748873", "0.5728083", "0.57158893", "0.5705734", "0.56873274", "0.5653277", "0.56440216", "0.56302" ]
0.78102744
0
Creates a default .pypirc file.
def generate_pypirc(username, password): rc = get_pypirc_path() f = open(rc, 'w') try: f.write(DEFAULT_PYPIRC % (username, password)) finally: f.close() try: os.chmod(rc, 0600) except OSError: # should do something better here pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_default_config(self, parser):\n parser.add_section('irc')\n parser.set('irc', 'channels', '')\n \n # create the full path, and the file\n try:\n os.makedirs(self.config_dir_path, mode=0700)\n except OSError:\n pass\n file_resource = open(self.config_file_path, 'w')\n parser.write(file_resource)", "def _create_protocol_file(\n path_to_protocol_package: str, file_name: str, file_content: str\n) -> None:\n pathname = os.path.join(path_to_protocol_package, file_name)\n\n with open_file(pathname, \"w\") as file:\n file.write(file_content)", "def action(self, args):\n create_new_scratch_file(args.file, self.settings, py_template_func)", "def create(self):\n self.create_file()", "def create(self):\n\t\tlipsBaseFile.imp()", "def create_file():\n with open(\"example.txt\", \"w\") as file:\n file.write(\"\")", "def create_new_python_project():\n\t# Create the different variables\n\tfolder_name = str(sys.argv[1])\n\tdir_name = my_project_folder + folder_name\n\tpy_file = dir_name + '/' + folder_name + '.py'\n\treadme_file = dir_name + '/' + 'README.md'\n\ttodo_file = dir_name + '/' + 'TODO.txt'\n\n\t# Create directory if it does not exist yet\n\tif not os.path.exists(dir_name):\n\t\tos.mkdir(dir_name)\n\t\tprint(\"Directory \" , dir_name , \" Created \")\n\n\t\t# Create Python file\n\t\tdata = ''\n\t\twith open(template_py, 'r') as file:\n\t\t\tdata += file.read()\n\n\t\twith open(py_file, 'w') as f:\n\t\t\tf.write(data)\n\t\t\tprint(\"Python file created\")\n\n\t\t# Create README file\n\t\tdata = ''\n\t\twith open(template_readme, 'r') as file:\n\t\t\tdata += file.read()\n\n\t\twith open(readme_file, 'w') as f:\n\t\t\tf.write(data)\n\t\t\tprint(\"Readme file created\")\n\n\t\t# Create Todo file\n\t\twith open(todo_file, 'w') as f:\n\t\t\tprint(\"TODO file created\")\n\n\t\t# Create Github repo\n\t\twith open(\".env\", \"r\") as f:\n\t\t\tdata = f.read()\n\n\t\tindex_1 = data.find('TOKEN=\"') + len('TOKEN=\"')\n\t\ttoken = data[index_1:-1]\n\t\tg = Github(token)\n\t\tuser = g.get_user()\n\t\trepo = user.create_repo(folder_name)\n\t\tprint(\"Succesfully created repository {}\".format(folder_name))\n\n\n\telse: \n\t\tprint(\"Directory \" , dir_name , \" already exists\")", "def create_file(self, value=None):\n if not path.isdir(\"Project\"):\n system(\"mkdir Project\")\n string_to_systemize = \"echo \\\"#!/usr/bin/python\\n\" + \\\n \"# Please use fp = open(\\'Project/yourfile.*\\') \" + \\\n \"when opening YOUR files\\n\" + \\\n \"# to not lose YOUR file in the jumble of OTHER files.\\n\" + \\\n \"# Also, do NOT delete the very first comment line.\\n\" + \\\n \"# \\'logs.txt\\' is your friend for your error logs.\\\"\" + \\\n \"> Project/myfile.py\"\n system(string_to_systemize)\n system(\"chmod +x Project/myfile.py\")\n self.open_file()", "def make_settings(pypirc):\n default_pypirc = \"\"\"\n [pypi]\n username:foo\n password:bar\n \"\"\"\n\n def _settings(pypirc_text=default_pypirc, **settings_kwargs):\n pypirc.write(textwrap.dedent(pypirc_text))\n\n settings_kwargs.setdefault(\"sign_with\", None)\n settings_kwargs.setdefault(\"config_file\", str(pypirc))\n\n return settings.Settings(**settings_kwargs)\n\n return _settings", "def create_file(file_name: str, startup_text: str) -> None:\n with open(file_name, 'w') as f:\n f.write(startup_text)", "def write_default_config():\n # TODO: BROKEN!\n config_path = pathlib.Path(xdg.BaseDirectory.xdg_config_home) / \"awiesm_bc\"\n config_file = config_path / DEFAULT_CONFIG_FILENAME\n if not os.path.isdir(config_path):\n os.makedirs(config_path)\n\n if not os.path.isfile(config_file):\n # TODO: write file\n pass", "def get_pypirc_path():\n return os.path.join(os.path.expanduser('~'), '.pypirc')", "def create_config():\n check_config()\n\n cprint(\"%sWriting python executable to %s\" % (OUT_PRFX, PYWS_DIR_BIN), OUT_STD_COLOR)\n fs.write(\"%s/python\" % PYWS_DIR_BIN, \"#! /usr/bin/python\\nimport novenv\\nnovenv.python()\")\n fs.chmod(\"%s/python\" % PYWS_DIR_BIN, stat.S_IEXEC)\n\n cprint(\"%sWriting python3 executable to %s\" % (OUT_PRFX, PYWS_DIR_BIN), OUT_STD_COLOR)\n fs.write(\"%s/python3\" % PYWS_DIR_BIN, \"#! /usr/bin/python\\nimport novenv\\nnovenv.python(version=3)\")\n fs.chmod(\"%s/python3\" % PYWS_DIR_BIN, stat.S_IEXEC)\n \n cprint(\"%sWriting pip executable to %s\" % (OUT_PRFX, PYWS_DIR_BIN), OUT_STD_COLOR)\n fs.write(\"%s/pip\" % PYWS_DIR_BIN, \"#! /usr/bin/python\\nimport novenv\\nnovenv.pip()\")\n fs.chmod(\"%s/pip\" % PYWS_DIR_BIN, stat.S_IEXEC)\n\n cprint(\"%sWriting pip3 executable to %s\" % (OUT_PRFX, PYWS_DIR_BIN), OUT_STD_COLOR)\n fs.write(\"%s/pip3\" % PYWS_DIR_BIN, \"#! /usr/bin/python\\nimport novenv\\nnovenv.pip(version=3)\")\n fs.chmod(\"%s/pip3\" % PYWS_DIR_BIN, stat.S_IEXEC)\n\n cprint(\"%sPlease add the %s directory to your path\" % (OUT_PRFX, PYWS_DIR_BIN), OUT_CMD_COLOR)\n cprint(\"%sexport PATH=/home/ckoerner/%s/bin:$PATH\" % (OUT_PRFX_VERBOSE, VENV_DIR), OUT_CMD_COLOR)\n\n cprint(\"%sCheck current python executable with\" % (OUT_PRFX), OUT_CMD_COLOR)\n cprint(\"%swhich python\" % (OUT_PRFX_VERBOSE), OUT_CMD_COLOR)", "def createFile(file):\n file_ = os.path.join(os.getcwd(),file)\n if not(os.path.isfile(file_)):\n with open(file_,\"a\") as f:\n f.close()", "def create_procesed_file(msg, filename, path):\n write_path_txt = os.path.join(path, filename)\n with open(write_path_txt, 'w') as file:\n file.write(str(msg))", "def newfile(self) :\n\n\t\tfrom tempfile import mkstemp\n\t\timport os\n\t\tglobal configurer\n\n\t\tfd,name = mkstemp(suffix='.blend')\n\t\tos.close(fd)\n\t\tself.name = name\n\t\tfd = open(name,'wb', configurer.get('ServerBufferSize'))\n\t\tself.fd = fd\n\t\tprint name\n\t\treturn 1", "def generate_defaults_yaml(username, password):\n\n # Open (or create and open) defaults.yaml file to sotre\n # username/password to SSH to devices\n with open('inventory/defaults.yaml', 'w') as default_file:\n default_file.write(f'---\\nusername: {username}\\npassword: {password}\\n')\n default_file.write('...')", "def new(self):\r\n gen_name = lambda nb: self.tr(\"untitled\") + (\"%d.py\" % nb)\r\n nb = 0\r\n while osp.isfile(gen_name(nb)):\r\n nb += 1\r\n fname = gen_name(nb)\r\n self.emit(SIGNAL('redirect_stdio(bool)'), False)\r\n fname = QFileDialog.getSaveFileName(self, self.tr(\"New Python script\"),\r\n fname, self.tr(\"Python scripts\")+\" (*.py ; *.pyw)\")\r\n self.emit(SIGNAL('redirect_stdio(bool)'), True)\r\n if not fname.isEmpty():\r\n fname = unicode(fname)\r\n default = ['# -*- coding: utf-8 -*-',\r\n '\"\"\"', osp.basename(fname), '\"\"\"', '', '']\r\n text = os.linesep.join(default)\r\n encoding.write(unicode(text), fname, 'utf-8')\r\n self.load(fname)", "def default():\n raise NotImplementedError(\"Pvwattsv7 default file no longer exists!\")", "def file(c, path=local.http_path):\r\n c = conn(c)\r\n print(\"make file repo on {}, path [{}]\".format(c.host, path))\r\n\r\n system.install(c, 'createrepo')\r\n c.run('createrepo {}'.format(path))", "def _createConfigFile(self):\n configFile = self._configFile()\n try:\n with open(configFile) as fh:\n pass\n except IOError:\n try:\n with open(configFile, 'w') as fh:\n fh.write(\"[settings]\\n\")\n fh.write(\"debug = false\\n\")\n fh.write(\"hidefilenames = false\\n\")\n except IOError:\n pass", "def createFile():\n with open(filename.strftime(\"%Y-%m-%d-%H\") + \".txt\", \"w\") as file:\n file.write(\"\")", "def default(ns, path=None, create=True, mode=0o644):\n path_file = os.path.abspath(ns['__file__'])\n path_file = os.path.dirname(path_file)\n path_file = os.path.dirname(path_file)\n\n if path:\n path_file = os.path.join(path_file, path)\n parts = _name_to_parts(ns['__name__'])\n path_file = os.path.join(path_file, *parts)\n path_file = os.path.abspath(path_file)\n if create and not os.path.exists(path_file):\n try:\n os.makedirs(path_file, mode=mode)\n except OSError:\n pass\n\n return path_file", "def create(name):\n if not SchModule._ready:\n raise ValueError(\"not mounted\")\n\n schdir = SchModule.DIR.hpath(name)\n\n if path.exists(schdir):\n raise Exception(\"Already exists\")\n\n # create this scheme directory\n os.makedirs(schdir)\n\n with codecs.open(path.join(schdir, SchModule.DESCR), \"w\", \"utf8\") as f:\n timestamp = time.strftime(\"%Y-%m-%d %H:%M:%S\")\n user = os.getenv(\"USER\", os.getenv(\"USERNAME\", \"Unknown\"))\n f.write(SchModule.DESCRTEMPLATE % locals())\n\n with codecs.open(path.join(schdir, SchModule.PYMODULE), \"w\", \"utf8\") as f:\n f.write(SchModule.PYMODULETEMPLATE)", "def create_default_finmagrc_file():\n import logging\n logger = logging.getLogger(\"finmag\")\n\n if not any([os.path.exists(f) for f in CONFIGURATION_FILES]):\n try:\n write_finmagrc_template_to_file(os.path.expanduser('~/.finmagrc'))\n logger.info(\n \"Created default configuration in '~/.finmagrc' because no \"\n \"Finmag configuration file was found. Please review the \"\n \"settings and adapt them to your liking.\")\n except IOError as e:\n logger.info(\n \"Could not create default configuration file '~/.finmagrc' \"\n \"(reason: {}). Please create one manually.\".format(e.strerror))", "def make_pref_file():\r\n pref_dict = {\"default_user\": None}\r\n\r\n with open(os.path.join(os.path.dirname(__file__), \"preferences.json\"), \"w\") as pref:\r\n pref.write(json.dumps(pref_dict, indent=4))\r\n\r\n return pref_dict", "def create_new_project():\n readline.parse_and_bind('tab: complete')\n\n print \\\n\"\"\"\n xbmcswift2 - A micro-framework for creating XBMC plugins.\n [email protected]\n --\n\"\"\"\n print 'I\\'m going to ask you a few questions to get this project' \\\n ' started.'\n\n # noinspection PyDictCreation\n opts = {}\n\n # Plugin Name\n opts['plugin_name'] = get_valid_value(\n 'What is your plugin name?',\n validate_nonblank\n )\n\n # Plugin ID\n opts['plugin_id'] = get_valid_value(\n 'Enter your plugin id.',\n validate_pluginid,\n 'plugin.video.%s' % (opts['plugin_name'].lower().replace(' ', ''))\n )\n\n # Parent Directory\n opts['parent_dir'] = get_valid_value(\n 'Enter parent folder (where to create project)',\n validate_isfolder,\n getcwd()\n )\n opts['plugin_dir'] = os.path.join(opts['parent_dir'], opts['plugin_id'])\n assert not os.path.isdir(opts['plugin_dir']), \\\n 'A folder named %s already exists in %s.' % (opts['plugin_id'],\n opts['parent_dir'])\n\n # Provider\n opts['provider_name'] = get_valid_value(\n 'Enter provider name',\n validate_nonblank,\n )\n\n # Create the project folder by copying over skel\n copytree(SKEL, opts['plugin_dir'], ignore=ignore_patterns('*.pyc'))\n\n # Walk through all the new files and fill in with out options\n for root, dirs, files in os.walk(opts['plugin_dir']):\n for filename in files:\n update_file(os.path.join(root, filename), opts)\n\n print 'Projects successfully created in %s.' % opts['plugin_dir']\n print 'Done.'", "def create_template_ini_file():\n if not os.path.isfile(API_KEYS_LOCATION):\n with open(API_KEYS_LOCATION, 'w') as f:\n f.write('[openai]\\n')\n f.write('organization_id=\\n')\n f.write('secret_key=\\n')\n\n print('OpenAI API config file created at {}'.format(API_KEYS_LOCATION))\n print('Please edit it and add your organization ID and secret key')\n print('If you do not yet have an organization ID and secret key, you\\n'\n 'need to register for OpenAI Codex: \\n'\n 'https://openai.com/blog/openai-codex/')\n sys.exit(1)", "def create_settings_file():\n with open('./cfg/settings.cfg'.replace(\"/\", os.path.sep), 'w') as cfg:\n cfg.write('[report]\\nlogo = ./cfg/logo.png\\ncompany =\\nrecord =\\nunit =\\nexaminer =\\nnotes =\\n\\n[auth]\\ngmail = [email protected]\\npassw = yourpassword\\ndevid = 1234567887654321\\ncelnumbr = BackupPhoneNunmber\\n\\n[app]\\npkg = com.whatsapp\\nsig = 38a0f7d505fe18fec64fbf343ecaaaf310dbd799\\n\\n[client]\\npkg = com.google.android.gms\\nsig = 38918a453d07199354f8b19af05ec6562ced5788\\nver = 9877000'.replace(\"/\", os.path.sep))", "def create_file(name, text=None):\n\n if os.path.exists(config_tools.full_dest+name):\n print(f\"{name} уже существует, для записи текста введите его ниже, для завершения команды введите 'no': \")\n answer = input()\n if answer != \"no\":\n with open(config_tools.full_dest + name, 'a', encoding='utf-8') as fi:\n fi.write(answer)\n print(f\"В {name} успешно записан текст\")\n elif answer == \"no\":\n quit()\n else:\n with open(config_tools.full_dest + name, 'w', encoding='utf-8') as fi:\n print(f\"{name} успешно создан\")\n if text:\n fi.write(text)\n print(f\"В {name} успешно записан текст\")" ]
[ "0.6558019", "0.62628806", "0.58864355", "0.573295", "0.5716338", "0.5681678", "0.56687874", "0.5664316", "0.5623866", "0.5536374", "0.553043", "0.5503424", "0.5500698", "0.5489507", "0.5476311", "0.54706043", "0.5446161", "0.54202074", "0.5416349", "0.5378271", "0.5377565", "0.5342813", "0.5309702", "0.52941865", "0.5291853", "0.52478623", "0.52471465", "0.52379483", "0.5237481", "0.5237072" ]
0.74003255
0
Wrapper function around the refactor() class which performs the conversions on a list of python files. Invoke 2to3 on a list of Python files. The files should all come from the build area, as the modification is done inplace.
def run_2to3(files, doctests_only=False, fixer_names=None, options=None, explicit=None): #if not files: # return # Make this class local, to delay import of 2to3 from lib2to3.refactor import get_fixers_from_package, RefactoringTool fixers = [] fixers = get_fixers_from_package('lib2to3.fixes') if fixer_names: for fixername in fixer_names: fixers.extend([fixer for fixer in get_fixers_from_package(fixername)]) r = RefactoringTool(fixers, options=options) if doctests_only: r.refactor(files, doctests_only=True, write=True) else: r.refactor(files, write=True)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _refactor_2to3(self, path):\n # self.logger.debug('Refactoring: %s' % path)\n source, encoding = self.refactoring_tool._read_python_source(path)\n\n source += '\\n' # Silence certain parse errors.\n tree = self.refactoring_tool.refactor_string(source, path)\n return str(tree)[:-1], encoding # Take off the '\\n' added earlier.", "def run_2to3(self, files, doctests_only=False):\n return run_2to3(files, doctests_only, self.fixer_names,\n self.options, self.explicit)", "def command_py2to3(args):\n from lib2to3.main import main\n args2 = []\n if command_py2to3_work_around3k:\n if args.no_diffs:\n args2.append(\"--no-diffs\")\n if args.write:\n args2.append(\"-w\")\n if args.nobackups:\n args2.append(\"-n\")\n args2.extend(args.sources)\n sys.exit(main(\"lib2to3.fixes\", args=args2))", "def refactor(path: str, files: List):\n skipped = []\n for filename in files:\n try:\n number = get_number_from_name(filename)\n except AttributeError:\n skipped.append(os.path.join(path, filename))\n continue\n new_number = update_number(number)\n\n file_path = os.path.join(path, filename)\n new_file_path = file_path.replace(number, new_number)\n\n with open(file_path, 'r') as file:\n data = file.read()\n data = data.replace(number, new_number)\n with open(file_path, 'w') as file:\n file.write(data)\n\n os.rename(file_path, new_file_path)\n return skipped", "def update_23(db, filename_persist, snapshots_dir, snapshots_reference_dir):\n text = \"\"\"\n test/test_fadeto.py\n test/test_draw_elbows2.py\n \"\"\"\n candidates = doers.scripts_names_from_text(text, end_mark=':')\n checked_in, unknown, move_failed = hl.update_testrun__pass(db,\n filename_persist, candidates,\n snapshots_dir, snapshots_reference_dir) \n\n return checked_in, unknown, move_failed", "def upgrade_to_2():\n\n def update_file_origins(cont_list, cont_name):\n for container in cont_list:\n updated_files = []\n for file in container.get('files', []):\n origin = file.get('origin')\n if origin is not None:\n if origin.get('name', None) is None:\n file['origin']['name'] = origin['id']\n if origin.get('method', None) is None:\n file['origin']['method'] = ''\n updated_files.append(file)\n\n query = {'_id': container['_id']}\n update = {'$set': {'files': updated_files}}\n result = config.db[cont_name].update_one(query, update)\n\n query = {'$and':[{'files.origin.name': { '$exists': False}}, {'files.origin.id': { '$exists': True}}]}\n\n update_file_origins(config.db.collections.find(query), 'collections')\n update_file_origins(config.db.projects.find(query), 'projects')\n update_file_origins(config.db.sessions.find(query), 'sessions')\n update_file_origins(config.db.acquisitions.find(query), 'acquisitions')", "def lint(files):\n # pylint: disable=unused-import\n if not files:\n files = (\"src/tests\", \"src/za_covid_map\")\n\n try:\n import flake8\n import isort\n except ImportError as exc:\n raise KedroCliError(NO_DEPENDENCY_MESSAGE.format(exc.name))\n\n python_call(\"flake8\", (\"--max-line-length=88\",) + files)\n python_call(\"isort\", (\"-rc\", \"-tc\", \"-up\", \"-fgw=0\", \"-m=3\", \"-w=88\") + files)\n\n if sys.version_info[:2] >= (3, 6):\n try:\n import black\n except ImportError:\n raise KedroCliError(NO_DEPENDENCY_MESSAGE.format(\"black\"))\n python_call(\"black\", files)", "def forwards_func(apps, schema_editor):\n judge_file = apps.get_model(\"judges\", \"JudgeFile\")\n db_alias = schema_editor.connection.alias\n judge_file.objects.using(db_alias).filter(\n file_type='ruling').update(file_type='rulings')\n judge_file.objects.using(db_alias).filter(\n file_type='verdict').update(file_type='verdicts')\n judge_file.objects.using(db_alias).filter(\n file_type='transcript').update(file_type='transcripts')\n judge_file.objects.using(db_alias).filter(\n file_type='image').update(file_type='campaigns')", "def rewrite_packaging(pkg_files, new_root):\n for file in pkg_files.glob('*.py'):\n text = file.text()\n text = re.sub(r' (pyparsing)', rf' {new_root}.\\1', text)\n text = text.replace(\n 'from six.moves.urllib import parse',\n 'from urllib import parse',\n )\n file.write_text(text)", "def apply_transforms(action, pkg_attrs, verbose, act_filename, act_lineno):\n comments = []\n newactions = []\n if verbose:\n comments.append(\"# Action: {0}\".format(action))\n for types, attrdict, operation, filename, lineno, transform in transforms:\n if action is None:\n action = PkgAction(pkg_attrs)\n # skip if types are specified and none match\n if types and action.name not in types:\n continue\n # skip if some attrs don't exist\n if set(attrdict.keys()) - set(action.attrs.keys()):\n continue\n\n # Check to make sure all matching attrs actually match. The\n # order is effectively arbitrary, since they come from a dict.\n matches = [\n attrdict[key].match(attrval)\n for key in attrdict\n for attrval in attrval_as_list(action.attrs, key)\n ]\n\n if not all(matches):\n continue\n\n s = transform[11:transform.index(\"->\")]\n # Map each pattern to its position in the original match string.\n matchorder = {}\n for attr, match in attrdict.iteritems():\n # Attributes might be quoted even if they don't need it,\n # and lead to a mis-match. These three patterns are all\n # safe to try. If we fail to find the match expression,\n # it's probably because it used different quoting rules\n # than the action code does, or from these three rules.\n # It might very well be okay, so we go ahead, but these\n # oddly quoted patterns will sort at the beginning, and\n # backref matching may be off.\n matchorder[match.pattern] = -1\n for qs in (\"{0}={1}\", \"{0}=\\\"{1}\\\"\", \"{0}='{1}'\"):\n pos = s.find(qs.format(attr, match.pattern))\n if pos != -1:\n matchorder[match.pattern] = pos\n break\n\n # Then sort the matches list by those positions.\n matches.sort(key=lambda x: matchorder[x.re.pattern])\n\n # time to apply transform operation\n try:\n if verbose:\n orig_attrs = action.attrs.copy()\n action = operation(action, matches, pkg_attrs,\n act_filename, act_lineno)\n except RuntimeError as e:\n raise RuntimeError(\"Transform specified in file {0}, line {1} reports {2}\".format(\n filename, lineno, e))\n if isinstance(action, tuple):\n newactions.append(action[0])\n action = action[1]\n if verbose:\n if not action or \\\n not isinstance(action, basestring) and \\\n orig_attrs != action.attrs:\n comments.append(\"# Applied: {0} (file {1} line {2})\".format(\n transform, filename, lineno))\n comments.append(\"# Result: {0}\".format(action))\n if not action or isinstance(action, basestring):\n break\n\n # Any newly-created actions need to have the transforms applied, too.\n newnewactions = []\n for act in newactions:\n if not isinstance(act, basestring):\n c, al = apply_transforms(act, pkg_attrs, verbose,\n act_filename, act_lineno)\n if c:\n comments.append(c)\n newnewactions += [a for a in al if a is not None]\n else:\n newnewactions.append(act)\n\n if len(comments) == 1:\n comments = []\n\n if action and action.name != \"pkg\":\n return (comments, [action] + newnewactions)\n else:\n return (comments, [None] + newnewactions)", "def python3(arglist, # pylint: disable=R0913\n stdout = None,\n stderr = None,\n cwd = None,\n env = None,\n dirpath_lwc_root = None):\n return _python(\n arglist,\n dependency_id = 'pyrun3',\n libraries_interface = 'lib_python3',\n stdout = stdout,\n stderr = stderr,\n cwd = cwd,\n env = env,\n dirpath_lwc_root = dirpath_lwc_root)", "def update_21(db, filename_persist, snapshots_dir, snapshots_reference_dir):\n text = \"\"\"\n test/test_label_changing.py\n test/test_batch2.py\n test/test_scalexy.py\n test/test_shader_examples.py\n \"\"\"\n candidates = doers.scripts_names_from_text(text, end_mark=':')\n checked_in, unknown, move_failed = hl.update_testrun__pass(db,\n filename_persist, candidates,\n snapshots_dir, snapshots_reference_dir) \n\n return checked_in, unknown, move_failed", "def updateCodeFiles(self):\n # if this annoying slow, could probably drop to bash or soemthing\n # for a search/replace\n for filename, filetype in self._get_code_files():\n lines = open(filename).readlines()\n found_version_line = False\n\n if self.Verbose:\n print 'Reading %s' % filename\n\n if filetype is 'Python':\n lines, write_out = self._update_python_file(lines, filename)\n elif filetype is 'PyRex':\n lines, write_out = self._update_pyrex_file(lines, filename)\n elif filetype is 'C':\n lines, write_out = self._update_c_file(lines, filename)\n else:\n raise TypeError, \"Unknown code file type %s\" % filetype\n\n if write_out:\n self._file_writer(lines, filename)", "def _convert_flags( self, fromlist, tolist, flaglist, context, numlines ):\n\n\t\t# all anchor names will be generated using the unique \"to\" prefix\n\n\t\t# process change flags, generating middle column of next anchors/links\n\t\tnext_id = [''] * len( flaglist )\n\t\tnext_href = [''] * len( flaglist )\n\t\tnum_chg, in_change = 0, False\n\t\tlast = 0\n\t\ttoprefix = ''\n\t\tfor i, flag in enumerate( flaglist ):\n\t\t\tif flag:\n\t\t\t\tif not in_change:\n\t\t\t\t\tin_change = True\n\t\t\t\t\tlast = i\n\t\t\t\t\t# at the beginning of a change, drop an anchor a few lines\n\t\t\t\t\t# (the context lines) before the change for the previous\n\t\t\t\t\t# link\n\t\t\t\t\ti = max( [0, i - numlines] )\n\t\t\t\t\tnext_id[i] = ' id=\"difflib_chg_%s_%d\"' % ( toprefix, num_chg )\n\t\t\t\t\t# at the beginning of a change, drop a link to the next\n\t\t\t\t\t# change\n\t\t\t\t\tnum_chg += 1\n\t\t\t\t\tnext_href[last] = '<a href=\"#difflib_chg_%s_%d\">n</a>' % (\n\t\t\t\t\t\t toprefix, num_chg )\n\t\t\telse:\n\t\t\t\tin_change = False\n\t\t# check for cases where there is no content to avoid exceptions\n\t\tif not flaglist:\n\t\t\tflaglist = [False]\n\t\t\tnext_id = ['']\n\t\t\tnext_href = ['']\n\t\t\tlast = 0\n\t\t\tif context:\n\t\t\t\tfromlist = ['No Differences Found']\n\t\t\t\ttolist = fromlist\n\t\t\telse:\n\t\t\t\tfromlist = tolist = ['Empty File']\n\t\t# if not a change on first line, drop a link\n\t\tif not flaglist[0]:\n\t\t\tnext_href[0] = '<a href=\"#difflib_chg_%s_0\">f</a>' % toprefix\n\t\t# redo the last link to link to the top\n\t\tnext_href[last] = '<a href=\"#difflib_chg_%s_top\">t</a>' % ( toprefix )\n\n\t\treturn fromlist, tolist, flaglist, next_href, next_id", "def main(root, filelist):\n #print \"got %s: %s\" % (root, filelist)\n rename(root, filelist)", "def three_to_two_fixture(fixer_test_case):\n\n def __init_fixer_suite_test_case(fixer_name, fix_list=None, options=None):\n test_case = fixer_test_case(fixer_name, \"fixer_suites.three_to_two\", fix_list=fix_list, options=options)\n test_case.refactor.driver.grammar = pygram.python_grammar_no_print_statement\n return test_case\n\n return __init_fixer_suite_test_case", "def update_files(regex_replace_list, values, is_release=True):\n # Copy the regex replace list, but update the replace strings to include\n # the supplied values.\n regex_replace_list = [(reg, repl.format(**values)) for (reg, repl) in regex_replace_list]\n filens = get_update_file_list(values[\"calico-version\"])\n for filen in filens:\n old_lines = load_file(filen)\n new_lines = []\n include = True\n master_block = False\n for line in old_lines:\n if is_release:\n if line.startswith(BLOCK_INDICATOR_MASTER_START):\n assert not master_block, \"<!--- start indicator with no end in file %s\" % filen\n master_block = True\n include = False\n continue\n if line.startswith(BLOCK_INDICATOR_MASTER_ELSE):\n assert master_block, \"<!--- else indicator with no start in file %s\" % filen\n include = True\n continue\n if line.startswith(BLOCK_INDICATOR_MASTER_END):\n assert master_block, \"<!--- end indicator with no start in file %s\" % filen\n include = True\n master_block = False\n continue\n if include:\n for regex, replace in regex_replace_list:\n line = regex.sub(replace, line)\n new_lines.append(line)\n assert not master_block, \"<!--- start indicator with no end in file %s\" % filen\n replace_file(filen, new_lines)", "def _apply_patch_odoo(self):\n paths = [os.path.join('openerp', 'tools', 'translate.py'),\n os.path.join('odoo', 'tools', 'translate.py')]\n for path in paths:\n s_file = os.path.join(self._server_path, path)\n if not os.path.isfile(s_file):\n continue\n cmd = [\"sed\", \"-i\", \"-e\",\n r\"s/translation'] = src/translation'] = ''/g\",\n s_file]\n print \" \".join(cmd)\n subprocess.call(cmd)", "def svn_diff_file_diff3_2(*args):\n return _diff.svn_diff_file_diff3_2(*args)", "def change_all_direct_method(edited_method, smali_file_list, class_landroid_java_over_list):\r\n for smali_file in smali_file_list: # For each file\r\n for smali_line in u.open_file_input(smali_file): # For each line\r\n if re.search(r'^([ ]*?)invoke\\-', smali_line) is not None: # If contains a method reference\r\n change_match_line(smali_line, edited_method, class_landroid_java_over_list)\r\n else:\r\n print smali_line, # Print the line unchanged\r", "def rename(root, filelist):\n if not filelist:\n return\n def apply_rules(filename):\n rulez = [('_+' , ' '), # One or more underscores to spaces\n ('-{2,}' , '-'), # Two or more hyphens to single hyphen\n ('&' , 'And'), # An ampersand to 'And'\n ('(-)(\\w*)' ,r' \\1 \\2')]# Spaces around hyphen seperated words\n \n for look_for, replacement in rulez:\n filename = re.sub(look_for, replacement, filename)\n # Capitalize first letter of every word\n filename = \" \".join([ word.capitalize() for word in filename.split() ])\n return filename\n \n names = []\n for filename in filelist:\n basename = os.path.basename(filename)\n names.append(os.path.join(root, apply_rules(filename)))\n try:\n dest = os.tmpnam()\n fl = open(dest, 'w')\n fl.write(\"\\n\".join(names))\n fl.close()\n os.system(\"%s %s\" % (EDITOR, dest))\n ans = 'no'\n for oldname, newname in zip(filelist, open(dest).readlines()):\n oldname = os.path.join(root, oldname)\n newname = newname.strip()\n if oldname == newname:\n print \"No change from %s to %s ...skipping\" % (oldname, newname)\n else:\n print \"Changing %s to %s\" % (oldname, newname)\n if not ans[0].lower == 'a':\n ans = raw_input(\"Contine (Yes/No/All) ? [N] \") or 'no'\n if ans[0].lower() in ('a', 'y'):\n os.rename(oldname, newname)\n else:\n os.rename(oldname, newname)\n finally:\n os.remove(dest)", "def update_9(db, filename_persist, snapshots_dir, snapshots_reference_dir):\n text = \"\"\"\n test/test_action_non_interval.py\n test/test_all_collisions.py : puede no ser representativo; agregado z para ver si cuadrado rinde\n test/test_draw_resolution.py\n test/test_interpreter_layer.py : weak test, interpreter not exercised\n test/test_menu_bottom_right.py : todos los de menu no prueban automaticamente la logica\n test/test_menu_centered.py\n test/test_menu_fixed_position.py\n test/test_menu_items.py\n test/test_menu_rotated.py\n test/test_menu_top_left.py\n test/test_particle_explosion.py\n test/test_particle_fire.py\n test/test_particle_fireworks.py\n test/test_particle_flower.py\n test/test_particle_galaxy.py\n test/test_particle_meteor.py\n test/test_particle_smoke.py\n test/test_particle_spiral.py\n test/test_particle_sun.py\n test/test_schedule.py\n test/test_unscaled_win_resize.py\n \"\"\"\n candidates = doers.scripts_names_from_text(text, end_mark=':')\n checked_in, unknown, move_failed = hl.update_testrun__pass(db,\n filename_persist, candidates,\n snapshots_dir, snapshots_reference_dir) \n\n return checked_in, unknown, move_failed", "def update_proj_dir():\n cwd = os.getcwd()\n srcf_dir = cwd\n i = 0 # Flag\n flg_cpy = True # Flag to cpy func\n flg_rm = True # Flag to rm func\n file_uc_list = [] # Files unchanged\n file_c_list = [] # Files changed\n file_r_list = [] # Files to be removed\n # Project dir\n dstf_dir = r\"C:\\Users\\ajiteshr7\\Dropbox\\python_proj\"\n file_srclist = os.listdir(srcf_dir)\n file_dstlist = os.listdir(dstf_dir)\n file_uc_list, file_c_list = chk_chng(file_srclist, file_dstlist)\n # Display files unchanged\n if file_uc_list:\n print marker\n print \"File List\"\n print \"No of files : %d\" %(len(file_uc_list))\n print_file(file_uc_list)\n # Display files changed\n if file_c_list:\n print \"Files added\"\n print \"No of files added: %d\" %(len(file_c_list))\n print_file(file_c_list)\n else:\n print \"No files added\"\n print \"No of files added: 0\"\n flg_cpy = False\n # Copy files..\n i = cpy_c_files(file_c_list, srcf_dir, dstf_dir,flg_cpy)\n # Remove files...\n file_list, file_r_list = chk_chng(file_dstlist,file_srclist)\n if not file_r_list:\n flg_rm = False\n rm_f(file_r_list,dstf_dir,flg_rm)\n # Display result updated or didn't....\n if flg_cpy == False and flg_rm == False:\n print \"Directory is up-to-date...\"\n elif i == len(file_c_list):\n print \"Sucessfully Updated the project directory :)\"\n else:\n print \"Didn't updated the folder.... =(\"\n return False\n return True", "def test_replacement3(engine_contents, engine_locations):\n file_name = 'Triangle.java.xml'\n new_contents = copy.deepcopy(engine_contents)\n new_locations = copy.deepcopy(engine_locations)\n target1 = (file_name, 'expr_stmt', 0)\n target2 = (file_name, 'comment', 0)\n assert not XmlEngine.do_replace(engine_contents, engine_locations, new_contents, new_locations, target1, target1)\n assert XmlEngine.do_replace(engine_contents, engine_locations, new_contents, new_locations, target1, target2)\n assert not XmlEngine.do_replace(engine_contents, engine_locations, new_contents, new_locations, target1, target2)", "def migrate(ctx, start_version, end_version):\n if ctx.obj['TYPE'] == 'file':\n if ctx.obj['DATA_FILE']:\n file_path = ctx.obj['DATA_FILE']\n else:\n file_path = os.path.join(ctx.obj['DATA_DIR'], ctx.obj['NAMESPACE'] + '.json')\n\n # todo make this more like alemebic and determine/load versions automatically\n with open(file_path, 'r') as f:\n data = json.loads(f.read())\n\n data = run_migration(data, start_version, end_version)\n with open(file_path, 'w') as f:\n f.write(json.dumps(data))", "def main():\n\n global APP_NAME\n\n # Defines Argument Parser and fefines flags and expected inputs\n parser = argparse.ArgumentParser(\n description='Converts specified html files or all html files to \\\n\t\t\tdjango format within a \\n specified directory.'\n )\n # Defines the -f flag, standing for files, to gey file nameof the HTML\n # file to convert\n parser.add_argument(\n 'files',\n metavar='f',\n type=str,\n nargs='*',\n help='provide file names to convert'\n )\n # Defines the -a flag, for defining the APP_NAME, you want the file\n # converted to, for.\n parser.add_argument(\n '-a',\n dest='app_name',\n type=str,\n nargs='?',\n help='provide django app name'\n )\n # Defines the -d flag, standing for directory, which accepts the path\n # to a directory containing the files to be translated\n parser.add_argument(\n '-d',\n dest='base_directory',\n type=str,\n nargs='?',\n help='Provide base directory'\n )\n\n # Parse the Arguments from the user\n args = parser.parse_args()\n\n # Deconstruct the arguments from the parser\n files = args.files\n directory = args.base_directory\n app_name = args.app_name\n\n # If APP_NAME is not passes in as an argument, leave it as ''(empty)\n if app_name is not None:\n APP_NAME = app_name + \"/\"\n\n # If directory is not passed in as an argument, use the current working\n # directory to fetch files\n if directory is None:\n directory = os.getcwd()\n\n logging.info(\"Directory : \" + str(directory))\n logging.info(\"app_name : \" + str(app_name))\n\n # Check if the directory passed in as argument already has the directory \n # 'Modified_files', else create it.\n if not os.path.exists(os.path.join(directory, \"Modified_files\")):\n os.mkdir(os.path.join(directory, \"Modified_files\"))\n\n if files != []:\n for file in files:\n processFile(directory, directory + \"/\" + file, file)\n\n else:\n # If no file was passed in as input, then extract all files in the \n # directory passed in, with extension '.html'\n for file in os.listdir(directory):\n if file.endswith(\".html\"):\n processFile(directory, directory + \"/\" + file, file)", "def convert():\n \n cvt_map = {\n '.add(' : '.add_subsystem(',\n '.add_param(' : '.add_input(',\n '.params': '._inputs',\n '.unknowns': '._outputs',\n '.resids': '._residuals',\n 'openmdao.test.util': 'openmdao.devtools.testutil',\n 'def solve_nonlinear(self, params, unknowns, resids)': 'def compute(params, unknowns)',\n }\n\n with open(sys.argv[1], 'r') as f:\n contents = f.read()\n for old, new in cvt_map.items():\n contents = contents.replace(old, new)\n\n sys.stdout.write(contents)", "def convert(lines):\n\n def parse(line):\n line = line.replace(\"from PySide2 import\", \"from Qt import\")\n line = line.replace(\"QtWidgets.QApplication.translate\",\n \"Qt.QtCompat.translate\")\n return line\n\n parsed = list()\n for line in lines:\n line = parse(line)\n parsed.append(line)\n\n return parsed", "def used_mods(ffile):\n import re\n import codecs\n\n # Go through line by line,\n # remove comments and strings because the latter can include ';'.\n # Then split at at ';', if given.\n # The stripped line should start with 'use '.\n # After use should be the \"module_name\", ', intrinsic :: module_name', or\n # ', non_intrinsic :: module_name'. We allow also to use \":: module_name\"\n # After module name should only be ', only: ...' or ', a ==> b'\n olist = list()\n of = codecs.open(ffile, 'r', encoding='ascii', errors='ignore')\n for line in of:\n ll = line.rstrip().lower() # everything lower case\n ll = re.sub('!.*$', '', ll) # remove F90 comment\n ll = re.sub('^c.*$', '', ll) # remove F77 comments\n ll = re.sub('\".*?\"', '', ll) # remove \"string\"\n ll = re.sub(\"'.*?'\", '', ll) # remove 'string'\n # check if several commands are on one line\n if ';' in ll:\n lll = ll.split(';')\n else:\n lll = [ll]\n for il in lll:\n iil = il.strip()\n # line should start with 'use '\n if iil.startswith('use '):\n iil = iil[4:].strip() # remove 'use '\n # skip intrinsic modules\n if 'intrinsic' in iil:\n if 'non_intrinsic' in iil:\n iil = re.sub(', *non_intrinsic', '', iil)\n iil = iil.strip()\n else:\n continue # skip to next in lll\n if iil.startswith('::'):\n iil = iil[2:].strip() # remove ':: '\n # remove after ',' if rename-list or only-list\n iil = re.sub(',.*$', '', iil)\n olist.append(iil.strip())\n of.close()\n\n return olist", "def alter(options):\n root_file = None\n if not options.freplace:\n if not options.parallel:\n if len(options.args) != 2:\n raise TelemacException(\\\n '\\nThe code \"alter\" (without --replace) '\n 'requires 2 file names\\n')\n slf_files = [options.args[0]]\n out_file = options.args[1]\n else:\n if len(options.args) != 3:\n raise TelemacException(\\\n '\\nThe code \"alter\" (without --replace) '\n 'here requires 2 file names and '\n '1 file root name for the partition\\n')\n slf_files = [options.args[0]]\n root_file = options.args[1]\n out_file = options.args[2]\n else:\n slf_files = options.args\n out_file = \"chop-tmp.slf\"\n\n for slf_file in slf_files:\n slf_file = path.realpath(slf_file)\n if not path.exists(slf_file):\n raise TelemacException(\\\n '\\nCould not find the file named: {}'.format(slf_file))\n print('\\n\\nAltering ' + path.basename(slf_file) + ' within ' + \\\n path.dirname(slf_file) + '\\n'+'~'*72+'\\n')\n vrs = options.xvars\n if options.xvars != None:\n vrs = clean_quotes(options.xvars.replace('_', ' '))\n times = (int(options.tfrom), int(options.tstep), int(options.tstop))\n slf = AlterSelafin(slf_file, times=times, vrs=vrs, root=root_file)\n if options.atitle != None:\n slf.alter_title(options.atitle)\n if options.areset:\n slf.alter_times(p_t=-slf.slf.tags['times'][0])\n if options.adate != None:\n slf.alter_datetime(date=options.adate.split('-'))\n if options.atime != None:\n slf.alter_datetime(time=options.atime.split(':'))\n if options.aswitch:\n slf.switch_vars()\n if options.eswitch:\n slf.alter_endian()\n if options.fswitch:\n slf.alter_float()\n if options.aname != None:\n slf.alter_vars(options.aname)\n slf.alter_times(m_t=float(options.atm), p_t=float(options.atp))\n slf.alter_mesh(m_x=float(options.axm), p_x=float(options.axp),\n m_y=float(options.aym), p_y=float(options.ayp))\n if options.azname != None:\n slf.alter_values(options.azname,\n m_z=float(options.azm), p_z=float(options.azp))\n if options.sph2ll != None:\n radius = 6371000.\n long0, lat0 = options.sph2ll.split(\":\")\n long0 = np.deg2rad(float(long0))\n lat0 = np.deg2rad(float(lat0))\n const = np.tan(lat0/2. + np.pi/4.)\n slf.slf.meshx = np.rad2deg(slf.slf.meshx/radius + long0)\n expo = np.exp(slf.slf.meshy/radius)\n slf.slf.meshy = np.rad2deg(2.*np.arctan(const*expo) - np.pi/2.)\n if options.ll2sph != None:\n radius = 6371000.\n long0, lat0 = options.ll2sph.split(\":\")\n long0 = np.deg2rad(float(long0))\n lat0 = np.deg2rad(float(lat0))\n slf.slf.meshx = radius * (np.deg2rad(slf.slf.meshx) - long0)\n slf.slf.meshy = radius * \\\n (np.log(np.tan(np.deg2rad(slf.slf.meshy)/2. + np.pi/4.)) \\\n - np.log(np.tan(lat0/2. + np.pi/4.)))\n if options.ll2utm != None:\n zone = int(options.ll2utm)\n slf.slf.meshx, slf.slf.meshy, zone = \\\n utm.from_lat_long(slf.slf.meshx, slf.slf.meshy, zone)\n if options.utm2ll != None:\n zone = int(options.utm2ll)\n slf.slf.meshx, slf.slf.meshy = \\\n utm.to_lat_long(slf.slf.meshx, slf.slf.meshy, zone)\n\n slf.put_content(out_file)\n\n if options.freplace:\n move_file(out_file, slf_file)" ]
[ "0.6804691", "0.65736985", "0.6213052", "0.5742118", "0.529011", "0.5283378", "0.5246217", "0.51765335", "0.51484454", "0.51378375", "0.5068919", "0.50683546", "0.50638443", "0.50514895", "0.50448775", "0.4973271", "0.49656966", "0.49321887", "0.49237916", "0.4915581", "0.4899074", "0.48972154", "0.48972005", "0.48914263", "0.48913878", "0.487697", "0.48581284", "0.48574936", "0.48460254", "0.48331115" ]
0.73142666
0
Issues a call to util.run_2to3.
def run_2to3(self, files, doctests_only=False): return run_2to3(files, doctests_only, self.fixer_names, self.options, self.explicit)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def run_2to3(files, doctests_only=False, fixer_names=None, options=None,\n explicit=None):\n\n #if not files:\n # return\n\n # Make this class local, to delay import of 2to3\n from lib2to3.refactor import get_fixers_from_package, RefactoringTool\n fixers = []\n fixers = get_fixers_from_package('lib2to3.fixes')\n\n\n if fixer_names:\n for fixername in fixer_names:\n fixers.extend([fixer for fixer in get_fixers_from_package(fixername)])\n r = RefactoringTool(fixers, options=options)\n if doctests_only:\n r.refactor(files, doctests_only=True, write=True)\n else:\n r.refactor(files, write=True)", "def command_py2to3(args):\n from lib2to3.main import main\n args2 = []\n if command_py2to3_work_around3k:\n if args.no_diffs:\n args2.append(\"--no-diffs\")\n if args.write:\n args2.append(\"-w\")\n if args.nobackups:\n args2.append(\"-n\")\n args2.extend(args.sources)\n sys.exit(main(\"lib2to3.fixes\", args=args2))", "def task3(self):\n\n pass", "def test_integration3(self):\n self._test_integration(3)", "def test_run3():\n tb = TradeBot(\n pair='USD_JPY',\n timeframe='D',\n start='2018-09-24 22:00:00',\n end='2018-10-09 22:00:00',\n settingf=\"../../data/settings.ini\")\n\n tl = tb.run()\n\n assert len(tl.tlist) == 1", "def experiment3():\n raise FAKE_ERROR", "def work3():\n logging.info(\"work3 doing a job\")\n if random.randint(1, 5) == 1:\n logging.error(\"Error in work3: bad input\")", "def task2_3():", "def runMT3D(self):\n \n # write mt3dms input\n self.__mt.write_input()\n # run mt3dms\n self.__mt.run_model()", "def test_v3_runtime(self):\r\n\r\n start_time = time.time()\r\n\r\n for n in range(1, 30000):\r\n prime_numbers_v3(n)\r\n\r\n elapsed_time = round(time.time() - start_time, 3)\r\n\r\n print(f\"v3, time required: {elapsed_time}\")", "def CASE3( self, main ):\n\n from tests.USECASE.SegmentRouting.SRRouting.dependencies.SRRoutingTest import SRRoutingTest\n\n SRRoutingTest.runTest( main,\n test_idx=3,\n onosNodes=3,\n dhcp=1,\n routers=1,\n ipv4=1,\n ipv6=1,\n countFlowsGroups=False,\n linkFailure=False,\n description=\"Ping between all ipv4 and ipv6 hosts in the topology\" )", "def three_experiments(two_experiments, one_experiment):", "def task_4_3_3():\n # TODO Task 4.3.3: Your code goes here\n pass", "def test_multiples_of_3_and_5(num, result):\n from multiples_of_3_and_5 import solution\n assert solution(num) == result", "def sub3(args):\n run_tracks.append({\n 'a': args.a,\n 'counts': args.counts,\n 'times': args.times,\n 'filetype': args.filetype,\n })", "def python3(arglist, # pylint: disable=R0913\n stdout = None,\n stderr = None,\n cwd = None,\n env = None,\n dirpath_lwc_root = None):\n return _python(\n arglist,\n dependency_id = 'pyrun3',\n libraries_interface = 'lib_python3',\n stdout = stdout,\n stderr = stderr,\n cwd = cwd,\n env = env,\n dirpath_lwc_root = dirpath_lwc_root)", "def fs3cmd(args):\n os.environ[\"FAIR_CLUSTER_NAME\"] = os.environ[\"FAIR_ENV_CLUSTER\"].lower()\n subprocess.check_call([\"/public/apps/fairusers_aws/bin/fs3cmd\"] + args)", "def chain3(self, action, args=(), kwargs={}, actionReturnsExitCode=False ):\r\n if not isinstance( action, str):\r\n objToCall=action\r\n else:\r\n objToCall = getattr( self, action )\r\n result = objToCall(*args,**kwargs)\r\n if actionReturnsExitCode:\r\n resultValue = result\r\n resultExitCode = None\r\n else:\r\n resultValue = None\r\n resultExitCode = result\r\n return resultValue, resultExitCode, self", "def auxiliar_exercise1(self, param1, param2, param3):\n result = param1 ** 2 + param2 ** 2 + param3 ** 2\n return", "def test_run(self, _run_mock):\n cstranslate = self.tool(hhsuite.CSTranslate,\n options={\"outfile\": self.OUT_CS219_FILE})\n cstranslate.run({\"a3m\": self.IN_A3M_FILE})\n self.verify_common(\"cstranslate\", cstranslate)\n\n _, kw_args = cstranslate.tool.call_args\n self.assertEqual(kw_args[\"options\"][\"infile\"], self.IN_A3M_FILE)", "def testPython3(self):\n resource = Resource.get()\n resource.load(self.__taskPath)\n crawler = FsCrawler.createFromPath(self.__sourcePath)\n dummyTask = Task.create('pythonMajorVerTestTask')\n dummyTask.add(crawler)\n\n wrapper = TaskWrapper.create(\"python3\")\n result = wrapper.run(dummyTask)\n self.assertTrue(len(result), 1)\n self.assertEqual(result[0].var(\"majorVer\"), 3)", "def test_system_case_3():\n\n reproducer_command = \"python reproducer.py --log 2020102208gm-0009-0000-40337c9c --player Xenia --wind 3 --honba 0 --action enemy_discard --tile 1s\"\n needed_meld = None\n tile_after_meld = None\n\n result_meld, result_tile_after_meld = _run_reproducer(\"3.txt\", reproducer_command)\n assert result_meld == needed_meld\n assert result_tile_after_meld is None", "def main():\n module = AnsibleModule(argument_spec=L3_interfaceArgs.argument_spec,\n supports_check_mode=True)\n\n result = L3_interface(module).execute_module()\n module.exit_json(**result)", "def day3_case():\n print(\"Day 3 Start\")\n #print(\"Steps:\", day3.get_steps(325489))\n print(\"Steps:\", day3.get_steps(36807888888888))", "def main():\n test_problem1a()\n test_problem1b()\n test_problem1c()", "def solution(nums):\n solution = Solution()\n output = solution.threeSum(nums)\n\n print(output)", "def test_run(thirdorder_sow_code=None, thirdorder_reap_code=None):\n computer = helpers.get_computer()\n if not thirdorder_sow_code:\n # get code\n thirdorder_sow_code = helpers.get_code(\n entry_point='thirdorder_vasp_sow', computer=computer)\n if not thirdorder_reap_code:\n thirdorder_reap_code = helpers.get_code(entry_point='thirdorder_vasp_reap',\n computer=computer, prepend_text='find job.* -name vasprun.xml|sort -n|')\n # set up calculation\n base_incar_dict = {\n 'PREC': 'Accurate',\n 'IBRION': 8,\n 'EDIFF': 1e-8,\n 'NELMIN': 5,\n 'NELM': 100,\n 'ENCUT': 240,\n 'IALGO': 38,\n 'ISMEAR': 0,\n 'SIGMA': 0.1,\n 'LREAL': False,\n 'lcharg': False,\n 'lwave': False,\n }\n forces_config = {\n 'code_string': 'vasp@vasp',\n 'kpoints_density': 0.5, # k-point density,\n 'potential_family': 'pbe',\n 'potential_mapping': {'Si': 'Si'},\n 'options': {\n 'resources': {'num_machines': 1, 'tot_num_mpiprocs': 4},\n 'max_wallclock_seconds': 3600 * 10\n },\n 'parser_settings': {\n 'add_energies': True,\n 'add_forces': True,\n 'add_stress': True\n },\n 'parameters': base_incar_dict\n }\n\n inputs = {\n 'structure': helpers.get_test_structure(),\n 'thirdorder_sow': {\n 'code': thirdorder_sow_code,\n 'parameters': Dict(dict={\n 'supercell_matrix': [3, 3, 3],\n 'option': 3\n })\n },\n 'thirdorder_reap': {\n 'code': thirdorder_reap_code,\n 'parameters': Dict(dict={\n 'supercell_matrix': [3, 3, 3],\n 'option': 3\n })\n },\n 'vasp_settings': Dict(dict={'forces': forces_config}),\n # 'clean_workdir': orm.Bool(True),\n 'metadata': {\n 'description': \"Test job submission with the aiida_shengbte thirdorder plugin\",\n },\n }\n logging.error(inputs)\n result = engine.run(WorkflowFactory('shengbte.thirdorder'), **inputs)\n\n logging.info(result)", "def task_test(argv):\n run_tests(\"python2\", argv)\n run_tests(\"python3\", argv)", "def three_times_nums(num_list):", "def run_test_second():\n os.system(\n \"sed -n '/(Failed)$/p' test_op_log.txt | awk '{print $3}' >& rerun_op.txt\"\n )\n rerun_list = get_op_list('rerun_op.txt')\n if len(rerun_list):\n print(\n \"-------there are \"\n + str(len(rerun_list))\n + \" op(s) need to rerun!!!-------\"\n )\n for failed_op in rerun_list:\n os.system(\"ctest -R \\\"(\" + failed_op + \")\\\" \")\n else:\n print(\"-------all op passed successfully!!!-------\")" ]
[ "0.6248613", "0.6005194", "0.5644888", "0.5473579", "0.54017115", "0.5398377", "0.53385645", "0.5264664", "0.5233024", "0.5117116", "0.5111273", "0.51092243", "0.5096129", "0.5089467", "0.50623655", "0.5041441", "0.504088", "0.50230515", "0.50128806", "0.5006936", "0.49927324", "0.49914157", "0.49825135", "0.49793774", "0.4972045", "0.4970165", "0.49602", "0.49418074", "0.4917166", "0.49106714" ]
0.6521159
0
This verifies a vote input is in the allowable range
def voteCheck(number): if number >= MIN_VOTES and number <= MAX_VOTES: return True else: return False number = input("\n\tEnter votes: ")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def can_vote(age):\n return age >= 18", "def isRangeValid(self) -> bool:\n ...", "def isInRange(val, minv, maxv):\n\treturn val >= minv and val <= maxv", "def _validate_val_range(self, proposal):\n val_range = proposal[\"value\"]\n if len(val_range) != 2:\n raise traitlets.TraitError(\"val_range must be of length 2.\")\n if val_range[0] > val_range[1]:\n raise traitlets.TraitError(\n \"val_range[0] must be smaller than val_range[1].\"\n )\n return val_range", "def _withinRangeChecker(entity, params):\n\n for constraint in constraints:\n type = constraint.get('type')\n field = constraint.get('field')\n\n if not type or not field:\n raise ProtocolError()\n\n min_value = constraint.get('min_value', 0)\n max_value = constraint.get('max_value', 1)\n\n if type == 'size':\n value = entity.__getattribute__(field)\n if len(value) < min_value or len(value) > max_value:\n return False\n else:\n raise ProtocolError()\n \n return True", "def _validate_value(self, value):\n if self.limits[0] <= value <= self.limits[1]:\n return True\n else:\n return False", "def __verify_range(value, minimum, maximum):\n if value in range(minimum, maximum):\n return True\n else:\n return False", "def check(self):\n self.lower_bound(5e-4)\n self.upper_bound(5e2)", "def validate_correct_hint(self):\n is_response_hint_valid = False\n while is_response_hint_valid is False:\n hint_value = self.ask_user_input(\"Enter maximum hint threshold\")\n if not hint_value.isdigit():\n print(\"Not a number, please try again\")\n elif 0 <= int(hint_value) <= 81:\n is_response_hint_valid = True\n self.current_response = hint_value\n else:\n print(\"Number is out of the valid range, please try again\")\n return is_response_hint_valid", "def _isInAllowedRange( self, testval, refval, reltol=1.e-2 ):\n denom = refval\n if refval == 0:\n if testval == 0:\n return True\n else:\n denom = testval\n rdiff = (testval-refval)/denom\n del denom,testval,refval\n return (abs(rdiff) <= reltol)", "def check_range(self, csvop, mtype, stype, flavor, pt, eta, discr):\n allowed_range = self.allowed[(csvop, mtype, stype, flavor)]\n\n eta = abs(eta)\n allowed = all([\n eta >= allowed_range['etaMin'], eta <= allowed_range['etaMax'],\n pt >= allowed_range['ptMin'], pt <= allowed_range['ptMax'],\n discr >= allowed_range['discrMin'], discr <= allowed_range['discrMax'],\n ])\n\n if not allowed and self.verbose>2:\n print 'pt %6.1f <? %6.1f <? %6.1f' % (allowed_range['ptMin'], pt, allowed_range['ptMax'])\n print 'eta %4.1f <? %4.1f <? %4.1f' % (allowed_range['etaMin'], eta, allowed_range['etaMax'])\n print 'discr %4.1f <? %4.1f <? %4.1f' % (allowed_range['discrMin'], discr, allowed_range['discrMax'])\n\n return allowed", "def __check_args_val(self):\n if self.__min_range < 0:\n error_msg = \"min_range must be greater than or equal to zero\"\n raise ValueError(error_msg)\n elif self.__max_range < 0:\n error_msg = \"max_range must be greater than or equal to zero\"\n raise ValueError(error_msg)\n elif self.__max_range < self.__min_range:\n error_msg = \"max_range must be greater than or equal to min_range\"\n raise ValueError(error_msg)", "def _withinRangeCheckerWrapper(self, args):\n\n constraints = args['constraints']\n\n def _withinRangeChecker(entity, params):\n \"\"\"Checks if certain properties are within given constrains. \n \"\"\"\n\n for constraint in constraints:\n type = constraint.get('type')\n field = constraint.get('field')\n\n if not type or not field:\n raise ProtocolError()\n\n min_value = constraint.get('min_value', 0)\n max_value = constraint.get('max_value', 1)\n\n if type == 'size':\n value = entity.__getattribute__(field)\n if len(value) < min_value or len(value) > max_value:\n return False\n else:\n raise ProtocolError()\n \n return True\n \n return _withinRangeChecker", "def is_valid(self, value: int) -> bool:\n return value < self.min_value or value > self.max_value", "def validateVoltage( self, name, voltage ):\n channel = self.d[name]\n (MIN,MAX) = channel.limits\n if not MIN <= voltage <= MAX: raise Exception('Invalid voltage {}'.format(voltage))", "def check_value(self, pos):\n if self.limits is not None:\n low, high = self.limits\n if low != high and not (low <= pos <= high):\n raise ValueError(\"{} outside of user-specified limits\" \"\".format(pos))\n else:\n self.setpoint.check_value(pos)", "def constraint_clause_in_range_validator(field, presentation, context):\n\n field.default_validate(presentation, context)\n\n values = getattr(presentation, field.name)\n if isinstance(values, list):\n # Make sure list has exactly two elements\n if len(values) == 2:\n lower, upper = values\n the_type = presentation._get_type(context)\n\n # Lower bound must be coercible\n lower = coerce_value(context, presentation, the_type, None, None, lower, field.name)\n\n if upper != 'UNBOUNDED':\n # Upper bound be coercible\n upper = coerce_value(context, presentation, the_type, None, None, upper, field.name)\n\n # Second \"in_range\" value must be greater or equal than first\n if (lower is not None) and (upper is not None) and (lower >= upper):\n context.validation.report(\n u'upper bound of \"in_range\" constraint is not greater than the lower bound'\n u' in \"{0}\": {1} <= {2}'\n .format(presentation._container._fullname, safe_repr(lower),\n safe_repr(upper)),\n locator=presentation._locator, level=Issue.FIELD)\n else:\n context.validation.report(\n u'constraint \"{0}\" is not a list of exactly 2 elements in \"{1}\": {2}'\n .format(field.name, presentation._fullname, safe_repr(values)),\n locator=presentation._get_child_locator(field.name), level=Issue.FIELD)", "def _validate(self, instance, value):\n\n if not isinstance(value, Real):\n raise TypeError(f\"Value for {self.prop_name} shoulde be real numbers.\")\n\n if (\n self.min_val is not None\n and value < self.min_val\n and not isclose(value, self.min_val)\n ):\n raise ValueError(\n f\"Value should be greater than or equal to {self.min_val}.\"\n )\n\n if (\n self.max_val is not None\n and value > self.max_val\n and not isclose(value, self.max_val)\n ):\n raise ValueError(f\"Value should be less than or equal to {self.max_val}.\")", "def validate(self, value: Any, low: int, high: int) -> bool:\n pass", "def is_in_range(self, price):\r\n return price <= self.pmax and price >= self.pmin", "def is_valid_range(parser, arg, minimum=0, maximum=100):\n if arg < minimum:\n parser.error(\"%s < %s\", arg, minimum)\n else:\n if arg > maximum:\n parser.error(\"%s > %s\", arg, maximum)\n\n return arg", "def isInRange(self,section,option,testval):\n \"\"\" us to test if 15 is in range defined as e.g. \"1-10,12,16-19\" \"\"\"\n value=ConfigParser.SafeConfigParser.get(self,section,option)\n value=value.strip('\"')\n elems=value.split(\",\")\n inrange=False\n if elems:\n for elem in elems:\n if(elem.find(\"-\")):\n # it's a range\n limits=elem.split(\"-\",2)\n notlower=limits.pop()\n nothigher=limits.pop()\n if(testval >= notlower and testval <= nothigher)\n inrange=True\n else\n # it's just 1 number\n if(elem == testval):\n inrange=True\n return inrange", "def is_valid_value(self, value):\n if not self.range:\n return False\n\n return value >= self.range[0] and value <= self.range[1]", "def check_value(self, name, min_int, max_int):\n while True:\n numb = input(f\"-- {name} : Entrez une valeur comprise \"\n f\"entre {min_int} et {max_int} : \")\n try:\n check = int(numb)\n if check == 99 or min_int <= check <= max_int:\n break\n except ValueError:\n pass\n return check", "def test_int_range_constraint_validation():\n\n # Test valid values OK\n minimum = 1\n maximum = 2\n IntRangeConstraint(name=\"Ingredient count\", minimum=minimum, maximum=maximum)\n\n # Test minimum must be less than maximum\n minimum = 3\n maximum = 2\n try:\n RealRangeConstraint(name=\"Ingredient count\", minimum=minimum, maximum=maximum)\n assert False, \"IntRangeConstraint should require that minimum be less than maximum\"\n except CitrinationClientError:\n pass\n\n # Test values must be castable to float\n minimum = {}\n maximum = 2\n try:\n c = IntRangeConstraint(name=\"Ingredient count\", minimum=minimum, maximum=maximum)\n assert False, \"IntRangeConstraint should require that minimum and maximum be castable to integers\"\n except CitrinationClientError:\n pass", "def _is_in_range(valid_values):\n\n def f(x):\n if x not in valid_values:\n raise ValueError('{} not in {}'.format(x, valid_values))", "def boundary(quantity, lower, upper):\r\n in_range = False\r\n while not in_range:\r\n if quantity < lower or quantity > upper:\r\n quantity = int(input(\"That is out of range, please try a number between \" + \\\r\n str(lower) + \" and \" + str(upper) + \": \"))\r\n else:\r\n in_range = True\r\n return quantity", "def test_threshold_range_a(self):\n code, out, err = self.t.runError(\"--threshold --max 3.1 --min 3.2\")\n self.assertIn(\"The min value must be lower than the max value.\", out)", "def check_valid_range(val, max_val):\n if val < 0:\n val = 0\n elif val > max_val:\n val = max_val\n else:\n pass\n return val", "def is_in_range(value: float, lower_bound: float, upper_bound: float, err_string: str) -> None:\n if value < lower_bound or value > upper_bound:\n print('\\n' + err_string + '\\n')\n sys.exit(1)" ]
[ "0.6918635", "0.64611727", "0.6432009", "0.63926494", "0.63070136", "0.6304111", "0.6279981", "0.62616414", "0.62209725", "0.62109786", "0.6138942", "0.6124066", "0.61222", "0.60989904", "0.60964143", "0.60828257", "0.6061203", "0.6046962", "0.60301083", "0.60255885", "0.59888506", "0.5971151", "0.5950925", "0.5946161", "0.59379154", "0.5930363", "0.5919959", "0.58770984", "0.58762336", "0.5855917" ]
0.7730605
0
Replace the ``step()`` method of env with a tracing function that calls callbacks with an events time, priority, ID and its instance just before it is processed.
def trace(self): # noqa # pylint: disable=protected-access,invalid-name def get_wrapper(env_step): """Generate the wrapper for env.step().""" @wraps(env_step) def tracing_step(): # noqa """Call *__monitor* for the next event if one exist before calling ``env.step()``.""" if len(self.__env._queue) > 0: t, prio, eid, event = self.__env._queue[0] self.__monitor(t, prio, eid, event) return env_step() return tracing_step self.__env.step = get_wrapper(self.__env.step)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def tracing_step(): # noqa\n if len(self.__env._queue) > 0:\n t, prio, eid, event = self.__env._queue[0]\n self.__monitor(t, prio, eid, event)\n return env_step()", "def get_wrapper(env_step):\n @wraps(env_step)\n def tracing_step(): # noqa\n \"\"\"Call *__monitor* for the next event if one exist before\n calling ``env.step()``.\"\"\"\n if len(self.__env._queue) > 0:\n t, prio, eid, event = self.__env._queue[0]\n self.__monitor(t, prio, eid, event)\n return env_step()\n\n return tracing_step", "def step_impl(context):\n pass", "def step_impl(context):\n pass", "def _timestep_before_hook(self, *args, **kwargs):\n pass", "def add_step_hook(h):\n add_hook(step, h)", "def register_step(step_function: StepFunction) -> None:\n global _step_function\n _step_function = step_function", "def step(self, action: np.ndarray) -> 'EnvStep':\n ...", "def base_trace(self, frame, event, arg):\n\n # print(\"Tracing %s %s %s (%s))\" % (event, \"<File %s, Line %s>\" % (frame.f_code.co_filename, frame.f_lineno), str(arg), str(id(threading.current_thread()))))\n\n # if true, breakpoints will be checked\n test_breakpoints = True\n\n # check for steppings\n if self.stepping != SteppingMode.STEP_NO_STEP:\n # print(\"Tracing for %s %s %s %s (%s))\" % (str(self.stepping), event, \"<File %s, Line %s>\" % (frame.f_code.co_filename, frame.f_lineno), str(arg), str(id(threading.current_thread()))))\n\n # single execution step, to move out of return/call frames into line frames\n if self.stepping == SteppingMode.STEP_SINGLE_EXEC:\n test_breakpoints = False\n self.stepping = SteppingMode.STEP_NO_STEP\n self.break_pause = False\n self.cont = False\n handler.pause_debugging()\n\n # step INTO and call happens on same level as we are, we are in\n # just move one step to line\n if self.stepping == SteppingMode.STEP_INTO and self.active_frame.f_back is self.stored_frames[1] and event == \"call\":\n # this will exit because call is unhandled!\n test_breakpoints = False\n self.stepping = SteppingMode.STEP_SINGLE_EXEC\n self.pause_reason = \"stepIn\"\n\n # step INTO but there is nothing to go in\n # so only move as step\n if self.stepping == SteppingMode.STEP_INTO and self.active_frame is self.stored_frames[1] and event != \"return\":\n self.stepping = SteppingMode.STEP_NEXT\n\n # same as above but we are returning, so do single step to move out\n if self.stepping == SteppingMode.STEP_INTO and self.active_frame is self.stored_frames[1] and event != \"return\":\n test_breakpoints = False\n self.stepping = SteppingMode.STEP_SINGLE_EXEC\n self.pause_reason = \"step\"\n\n # step OUT and return happens, just move one step to line\n if self.stepping == SteppingMode.STEP_OUT and self.active_frame is self.stored_frames[1] and event == \"return\":\n test_breakpoints = False\n self.stepping = SteppingMode.STEP_SINGLE_EXEC\n self.pause_reason = \"stepOut\"\n return # exit evaluation\n\n # next will always break if this is line\n if self.stepping == SteppingMode.STEP_NEXT and self.active_frame is self.stored_frames[1] and event != \"call\":\n test_breakpoints = False\n self.stepping = SteppingMode.STEP_NO_STEP\n self.break_pause = False\n self.pause_reason = \"step\"\n self.cont = False\n handler.pause_debugging()\n\n if event == \"exception\" or event == \"call\":\n return # TODO: exceptions, calls\n\n if test_breakpoints:\n # due to lock we move triggered breakpoint to here\n breaking_on = None\n\n # check breakpoints under lock\n with self.bkp_lock:\n for breakpoint in self.active_breakpoints:\n if breakpoint.applies(frame):\n breaking_on = breakpoint\n break\n if breaking_on is not None:\n print(\"Broke at %s %s %s (%s))\" % (event, \"<File %s, Line %s>\" % (frame.f_code.co_filename, frame.f_lineno), str(arg), str(id(threading.current_thread()))))\n self.break_code(breaking_on) # sets this to blocking\n\n # check for external requested pause\n if self.break_pause:\n self.break_pause = False\n self.pause_reason = \"pause\"\n self.cont = False\n handler.pause_debugging()\n\n while not self.cont:\n # spinlock when we are waiting for debugger\n time.sleep(0.1)", "def record(self, step):", "def step(self, timestamp=0.0, **keywords):\n self.services.debug('step() method called')\n pass", "def set_step(self):\n super(Pdb, self).set_step()\n if hasattr(self, \"_set_trace_use_next\"):\n del self._set_trace_use_next\n self.set_next(self._via_set_trace_frame)", "def step_env(self, action):\n return self.env.step(action)", "def onTimeStep(self, timeStep):\n pass", "def step(self, step=None):\n pass", "def step_env(self):\n raise NotImplementedError\n # Not needed for this homework", "def set_step(self):\n # Issue #13183: pdb skips frames after hitting a breakpoint and running\n # step commands.\n # Restore the trace function in the caller (that may not have been set\n # for performance reasons) when returning from the current frame.\n if self.frame_returning:\n caller_frame = self.frame_returning.f_back\n if caller_frame and not caller_frame.f_trace:\n caller_frame.f_trace = self.trace_dispatch\n self._set_stopinfo(None, None)", "def step(self, **kwargs):\n pass", "def _timestep_after_hook(self, *args, **kwargs):\n pass", "def StepEnv(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def stepStarted(build, step):", "def onTimeStepStart(self, timeStep):\n pass", "def trace(self, trace=...):\n ...", "def _step(self):\n pass", "def test_continue_on_early_trace_ending(context):\n env = get_env(\n {\n \"AWS_LAMBDA_FUNCTION_NAME\": \"finishing_spans_early_handler\",\n \"DD_LAMBDA_HANDLER\": \"tests.contrib.aws_lambda.handlers.finishing_spans_early_handler\",\n }\n )\n\n with override_env(env):\n patch()\n\n datadog(finishing_spans_early_handler)({}, context())", "def make_step(self):\n name = self.method.lower()\n if name==\"euler\": run_step = solvers.euler_step\n elif name==\"huen\": run_step = solvers.huen_step\n elif name==\"rk4\": run_step = solvers.rk4_step\n else: run_step = solvers.rk23_step\n\n def step_func(m):\n return run_step(self.dt,m,self.torque)\n self.step_func = step_func", "def on_eval_batch_begin(self, step, logs=None):", "def _step(self) -> None:", "def call_tracing(func, args): # real signature unknown; restored from __doc__\n return object()", "def step_lambda_wrapper(func):\n\n @functools.wraps(func)\n def _lambda_wrapper(*args, **kwargs):\n \"\"\"\n Generic Step Function wrapper\n \"\"\"\n cold_start_duration = time.time() - constants.COLD_START_TIME\n trace = epsagon.trace.trace_factory.get_or_create_trace()\n trace.prepare()\n\n try:\n event, context = args\n except ValueError:\n # This can happen when someone manually calls handler without\n # parameters / sends kwargs. In such case we ignore this trace.\n return func(*args, **kwargs)\n\n try:\n runner = epsagon.runners.aws_lambda.StepLambdaRunner(\n time.time(),\n context\n )\n trace.set_runner(runner)\n # pylint: disable=W0703\n except Exception as exception:\n # Regress to python runner.\n warnings.warn(\n 'Lambda context is invalid, using simple python wrapper',\n EpsagonWarning\n )\n trace.add_exception(\n exception,\n traceback.format_exc()\n )\n return epsagon.wrappers.python_function.wrap_python_function(\n func,\n args,\n kwargs\n )\n\n if constants.COLD_START:\n runner.resource['metadata'][\n 'aws.lambda.cold_start_duration'\n ] = cold_start_duration\n constants.COLD_START = False\n\n try:\n trace.add_event(\n epsagon.triggers.aws_lambda.LambdaTriggerFactory.factory(\n time.time(),\n event,\n context\n )\n )\n # pylint: disable=W0703\n except Exception as exception:\n trace.add_exception(\n exception,\n traceback.format_exc(),\n additional_data={'event': event}\n )\n\n trace.set_timeout_handler(context)\n\n result = None\n try:\n result = func(*args, **kwargs)\n steps_data = epsagon.utils.find_in_object(\n event,\n STEP_DICT_NAME\n )\n\n if isinstance(result, dict):\n epsagon.utils.print_debug(\n 'Step function result type is dict, steps_data={}'.format(\n steps_data\n )\n )\n # If the step functions data is not present, then this is the\n # First step.\n if steps_data is None:\n epsagon.utils.print_debug(\n 'Could not find existing steps data'\n )\n steps_dict = {'id': str(uuid4()), 'step_num': 0}\n path = []\n # Otherwise, just advance the steps number by one.\n else:\n # don't change trigger data\n steps_dict, path = steps_data\n steps_dict = copy.deepcopy(steps_dict)\n if 'step_num' in steps_dict:\n steps_dict['step_num'] += 1\n epsagon.utils.print_debug(\n 'Steps data found, new dict={}'.format(steps_dict)\n )\n else:\n steps_dict = {'id': str(uuid4()), 'step_num': 0}\n epsagon.utils.print_debug(\n 'Steps data not found, new dict={}'.format(\n steps_dict\n )\n )\n\n result_path = result\n # Tries to inject the steps data in the configured\n # or same path where it was found\n if isinstance(trace.step_dict_output_path, list):\n path = trace.step_dict_output_path\n try:\n for sub_path in path:\n result_path = result_path.get(sub_path)\n except Exception as exception: # pylint: disable=broad-except\n epsagon.utils.print_debug(\n 'Could not put steps in path={}'.format(path)\n )\n if result_path:\n epsagon.utils.print_debug(\n 'Adding steps dict to result_path={}'.format(\n result_path\n )\n )\n result_path[STEP_DICT_NAME] = steps_dict\n else:\n epsagon.utils.print_debug(\n 'Adding steps dict to root result'\n )\n result[STEP_DICT_NAME] = steps_dict\n\n runner.add_step_data(steps_dict)\n return result\n # pylint: disable=W0703\n except Exception as exception:\n runner.set_exception(\n exception,\n traceback.format_exc(),\n handled=False\n )\n raise\n finally:\n try:\n _add_status_code(runner, result)\n if not trace.metadata_only:\n runner.resource['metadata']['return_value'] = (\n copy.deepcopy(result)\n )\n # pylint: disable=W0703\n except Exception as exception:\n trace.add_exception(\n exception,\n traceback.format_exc(),\n )\n try:\n epsagon.trace.Trace.reset_timeout_handler()\n # pylint: disable=W0703\n except Exception:\n pass\n try:\n epsagon.trace.trace_factory.send_traces()\n # pylint: disable=W0703\n except Exception:\n pass\n\n return _lambda_wrapper" ]
[ "0.69365823", "0.6761876", "0.63075596", "0.63075596", "0.6298799", "0.60999227", "0.60761", "0.60579455", "0.6035825", "0.599109", "0.5944866", "0.5925591", "0.5841467", "0.58281803", "0.5821764", "0.5817346", "0.5761727", "0.5733629", "0.56965375", "0.56544715", "0.5641167", "0.5638249", "0.56223583", "0.5609661", "0.55959535", "0.55869186", "0.5569565", "0.55533916", "0.5547552", "0.5534232" ]
0.78040206
0
Call __monitor for the next event if one exist before calling ``env.step()``.
def tracing_step(): # noqa if len(self.__env._queue) > 0: t, prio, eid, event = self.__env._queue[0] self.__monitor(t, prio, eid, event) return env_step()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def trace(self): # noqa\n\n # pylint: disable=protected-access,invalid-name\n def get_wrapper(env_step):\n \"\"\"Generate the wrapper for env.step().\"\"\"\n @wraps(env_step)\n def tracing_step(): # noqa\n \"\"\"Call *__monitor* for the next event if one exist before\n calling ``env.step()``.\"\"\"\n if len(self.__env._queue) > 0:\n t, prio, eid, event = self.__env._queue[0]\n self.__monitor(t, prio, eid, event)\n return env_step()\n\n return tracing_step\n\n self.__env.step = get_wrapper(self.__env.step)", "def get_wrapper(env_step):\n @wraps(env_step)\n def tracing_step(): # noqa\n \"\"\"Call *__monitor* for the next event if one exist before\n calling ``env.step()``.\"\"\"\n if len(self.__env._queue) > 0:\n t, prio, eid, event = self.__env._queue[0]\n self.__monitor(t, prio, eid, event)\n return env_step()\n\n return tracing_step", "def _on_step(self) -> None:\n self._n_calls += 1\n # Account for multiple environments\n # each call to step() corresponds to n_envs transitions\n if self._n_calls % max(self.target_update_interval // self.n_envs, 1) == 0:\n polyak_update(self.q_net.parameters(), self.q_net_target.parameters(), self.tau)\n # Copy running stats, see GH issue #996\n polyak_update(self.batch_norm_stats, self.batch_norm_stats_target, 1.0)\n\n self.exploration_rate = self.exploration_schedule(self._current_progress_remaining)\n self.logger.record(\"rollout/exploration_rate\", self.exploration_rate)", "def _step(self):\n pass", "def _step(self, whence):\n pass", "def step(self):\n\n pass", "def next(self):\n if self.cur >= len(self.events):\n raise RuntimeError('Trying to time too many regions')\n self.events[self.cur].record()\n self.cur += 1", "def _step(self) -> None:", "def step(self, *args, **kwargs) -> None:\n self.accumulate_step += 1\n if self.accumulate_step < self.accumulate_size:\n pass\n else:\n self.accumulate_step = 0\n self.lr_scheduler.step(*args, **kwargs)", "def observe_first(self, env: dm_env.Environment, timestep: dm_env.TimeStep\n ) -> None:\n self._metrics = {}\n self._accumulate_metrics(env)", "def step(self):\n self.schedule.step()", "def _on_step(self):\n # self.logger.record(\"current_reward\")\n # self.n_calls is automatically updated because\n # we derive from BaseCallback\n if self.n_calls % self.eval_freq == 0:\n # === YOUR CODE HERE ===#\n # Evaluate the agent:\n # you need to do self.n_eval_episodes loop using self.eval_env\n # hint: you can use self.model.predict(obs, deterministic=True)\n mean_reward, std_reward = evaluate_policy(self.model, self.eval_env, n_eval_episodes=self.n_eval_episodes)\n # Save the latest agent\n self.logger.record(\"eval_mean_reward\", mean_reward)\n self.model.save(self.save_latest)\n # and update self.best_mean_reward\n if mean_reward > self.best_mean_reward:\n self.best_mean_reward = mean_reward\n self.model.save(self.save_path)\n if self.verbose > 0:\n print(\"Saving new best model at {} timesteps\".format(self.n_calls))\n print(\"Saving new best model to {}.zip\".format(self.save_best))\n \n print(\"Best mean reward: {:.2f}\".format(self.best_mean_reward))\n \n\n # ====================== # \n return True", "def do_step(self) -> None:", "def step(self, step=None):\n pass", "def step(self):\n raise NotImplementedError", "def step(self):\r\n raise NotImplementedError", "def record(self, step):", "def step(self):\n self.latent.step()", "def step(self, state, controls, reward, next_state, done):\n self.episode_step += 1\n # if early episode termination\n if self.params.max_episode_steps and self.episode_step >= self.params.max_episode_steps:\n done = True\n # track progress\n self.tracker.step(reward)\n # memorize\n self.memory.append((state, controls, reward, next_state, 1.0 - done))\n return done", "def step(self):\n raise NotImplementedError()", "def step(self):\n raise NotImplementedError()", "def step(self):\n raise NotImplementedError()", "def step(self):\r\n self.datacollector.collect(self)\r\n self.datacollector2.collect(self)\r\n self.datacollector3.collect(self)\r\n self.datacollector4.collect(self)\r\n self.datacollector5.collect(self)\r\n self.datacollector6.collect(self)\r\n self.datacollector7.collect(self)\r\n self.datacollector8.collect(self)\r\n self.datacollector9.collect(self)\r\n self.datacollector10.collect(self)\r\n self.datacollector11.collect(self)\r\n self.datacollector12.collect(self)\r\n self.datacollector13.collect(self)\r\n\r\n self.datacollector14.collect(self)\r\n self.datacollector15.collect(self)\r\n self.datacollector16.collect(self)\r\n self.datacollector17.collect(self)\r\n self.datacollector18.collect(self)\r\n self.datacollector19.collect(self)\r\n self.datacollector20.collect(self)\r\n self.datacollector21.collect(self)\r\n self.datacollector22.collect(self)\r\n self.datacollector23.collect(self)\r\n self.datacollector24.collect(self)\r\n self.datacollector25.collect(self)\r\n self.datacollector26.collect(self)\r\n self.schedule.step()", "def step(self):\n self.function()", "def run(self):\n self.observer.start(self.time.start_time, self.agents, self.env)\n current_time = 0\n for current_time in self.time:\n self.env.step(current_time, self.agents)\n schedule = self.scheduler.step(current_time, self.agents, self.env)\n if self.two_stage:\n self._update_agents_two_stage(current_time, schedule)\n else:\n self._update_agents_one_stage(current_time, schedule)\n self.env.complete(current_time, self.agents)\n self.observer.step(current_time, self.agents, self.env)\n if self.terminator.test(current_time, self.agents, self.env):\n break\n self.observer.stop(current_time, self.agents, self.env)", "def step(self):\n self.driver.step()", "def step(self, state, action, reward, next_state, done):\n self.episode_step += 1\n # if early episode termination\n if self.params.max_episode_steps and self.episode_step >= self.params.max_episode_steps:\n done = True\n # track progress\n self.tracker.step(reward)\n # memorize\n self.memory.append((state, action, reward, next_state, 1.0 - done))\n return done", "def step(self):\n self.state_estimator.step()", "def step(self) -> bool:\n raise NotImplementedError()", "def _run_next_state(self):\n if self.state != \"STOP\":\n self.state = self.get_state_info(\"next\")\n self._run_state()" ]
[ "0.67177457", "0.6353171", "0.61669993", "0.6071973", "0.59611213", "0.5918542", "0.58596504", "0.58400434", "0.58186984", "0.581662", "0.57855827", "0.57379687", "0.5727999", "0.5711669", "0.5704478", "0.567846", "0.567341", "0.5660299", "0.56587714", "0.5639612", "0.5639612", "0.5639612", "0.5597076", "0.55564487", "0.55501336", "0.55485207", "0.5539108", "0.5538701", "0.55098313", "0.5501581" ]
0.6955955
0
set MHCI Database directory path
def __setMHCI_DB_Path(self, mhcidb_dirname): if not hasattr(self, "mhcIdb_path") or self.mhcIdb_path is None: cwd = self.getCWD() pre_dir, after = cwd.split(mhcidb_dirname) self.mhcIdb_path = self.joinPath(pre_dir,mhcidb_dirname ) print(("# MHCIDB workding path: {}".format(self.mhcIdb_path)))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_db_file():\n\n return os.path.join(db_path, db_file)", "def get_database_directory(self):\n pass", "def setMHCIDBPath(self, mhcidb_dirname=\"MHCIDB\", data_dir=\"existing_data\"):\n self.__setMHCI_DB_Path(mhcidb_dirname)\n self.mhcIdb_existing_data_path = self.joinPath(self.mhcIdb_path, data_dir)\n self.mhcIdb_hla_path = self.joinPath(self.mhcIdb_existing_data_path, \"hla\" )\n self.mhcIdb_pdb_path = self.joinPath(self.mhcIdb_existing_data_path, \"pdb\" )\n self.mhcIdb_ba_path = self.joinPath(self.mhcIdb_existing_data_path, \"ba\" ) \n self.mhcIdb_pdb3d_path = self.joinPath(self.mhcIdb_pdb_path, \"raw_pdbs\")", "def getDBPath():\n return os.path.join(CONFIG_DIR, CONFIG_DICT['common']['local_db'])", "def db_path(self, host: str) -> str:\n app_path = os.path.abspath(os.getcwd())\n folder = 'data'\n path = os.path.join(app_path, folder)\n return os.path.normpath(os.path.join(path, host))", "def set_data_directory(path):\n gdc19.DATA_DIRECTORY = path\n return gdc19.DATA_DIRECTORY", "def set_data_directory(path):\n if not os.path.exists(path):\n return False\n\n set(\"data_dir\", path)\n return True", "def setupDbEnv(baseDirPath=None):\n global gDbEnv, gDbDirPath\n\n if not baseDirPath:\n baseDirPath = DATABASE_DIR_PATH\n\n baseDirPath = os.path.abspath(os.path.expanduser(baseDirPath))\n if not os.path.exists(baseDirPath):\n try:\n os.makedirs(baseDirPath)\n except OSError as ex:\n baseDirPath = ALT_DATABASE_DIR_PATH\n baseDirPath = os.path.abspath(os.path.expanduser(baseDirPath))\n if not os.path.exists(baseDirPath):\n os.makedirs(baseDirPath)\n else:\n if not os.access(baseDirPath, os.R_OK | os.W_OK):\n baseDirPath = ALT_DATABASE_DIR_PATH\n baseDirPath = os.path.abspath(os.path.expanduser(baseDirPath))\n if not os.path.exists(baseDirPath):\n os.makedirs(baseDirPath)\n\n gDbDirPath = baseDirPath # set global\n\n gDbEnv = lmdb.open(gDbDirPath, max_dbs=MAX_DB_COUNT)\n # creates files data.mdb and lock.mdb in dbBaseDirPath\n\n # create named dbs (core and tables)\n gDbEnv.open_db(b'core')\n gDbEnv.open_db(b'hid2did') # table of dids keyed by hids\n gDbEnv.open_db(b'did2offer', dupsort=True) # table of offer expirations keyed by offer relative dids\n gDbEnv.open_db(b'anon', dupsort=True) # anonymous messages\n gDbEnv.open_db(b'expire2uid', dupsort=True) # expiration to uid anon\n\n return gDbEnv", "def setDB(dbname):\n global DBNAME\n DBNAME = dbname", "def set_object_database (self, file_name):\n try:\n self.object_database=file_name\n self.filepath_object_database = os.path.join(self.filepath, self.object_database)\n print(\"set object_database filename to\", file_name)\n except:\n print(\"setting object database failed\")\n self.object_database=\"Object_database.db\"\n self.filepath_object_database = os.path.join(self.filepath, self.object_database)\n print(\"set object database name to default:\", self.object_database)\n return", "def set_db(db):\n global db_run # Imports the DB from the simulator\n db_run=db", "def setup_database(self):\n self.db.setup_database()", "def get_database_path():\n\treturn _paths[_DATABASE_PATH_KEY]", "def get_db_path():\n \n return(db_run.db_abs_path)", "def set_data(db_dir, command, args = None):\n #print command\n with lite.connect((db_dir)) as conn:\n #try:\n cursor = conn.cursor()\n if args:\n cursor.execute(command,args)\n else:\n cursor.execute(command)\n conn.commit()\n #print '[sql management] set successfully the data according to:\\n--- %s ---'%(command )\n return True\n #except:\n return False\n return False", "def set_output_database (self, file_name):\n try:\n self.object_database=file_name\n self.filepath_output_database = os.path.join(self.filepath, self.output_database)\n print(\"set output_database filename to\", file_name)\n except:\n print(\"setting object database failed\")\n self.output_database=\"Output_database.db\"\n self.filepath_output_database = os.path.join(self.filepath, self.output_database)\n print(\"set output database name to default:\", self.object_database)\n return", "def set_test_database(self):\n db_manager.db = SqliteDatabase(settings.UNITTEST_DATABASE_NAME)\n db_manager.Employee._meta.database = db_manager.db\n db_manager.LogEntry._meta.database = db_manager.db", "def datadir():\n return '../data/'", "def get_db_path():\n return os.path.join(sys.path[0], \"my_db.db\")", "def expand_db_path(path):\n if path is None:\n path = join(xdg.BaseDirectory.xdg_data_home, 'khal', 'khal.db')\n return expanduser(expandvars(path))", "def set_local_database():\n load_config()\n db_path = get_db_path_from_config()\n db = LocalDB(db_path, log=None)\n return db", "def GetDatabase(self):\r\n\r\n if self.database:\r\n return self.database\r\n \r\n if not os.path.exists(self.GetDataDir()):\r\n # Create the data folder, it still doesn't exist\r\n os.makedirs(self.GetDataDir())\r\n\r\n self.database = os.path.join(self.GetDataDir(), \"NDT_Database.db\")\r\n return self.database", "def database_file(file):\r\n fpath = path.join('databases', '{0}'.format(file))\r\n db_path = path.join(mod_path, fpath)\r\n return db_path", "def setup_data_directory(dir_path):\n if exists(\"{}.db\".format(dir_path)):\n raise Exception(\"Simulation data directory {}.db already exists!\".format(dir_path))\n else:\n pass", "def setDb(self, db_file):\n self.db_file = db_file\n self.db = sqlite3.connect(self.db_file, isolation_level=None)\n self._initDb()", "def create_db(self, path: str) -> None:\n if os.path.isfile(path):\n self.db_path = path\n print(\"DB already exists\")\n return\n\n print(path)\n\n self.db_path = path\n\n print(\"Opening the base db\")\n with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'basedb.xml'), 'r') as f:\n base = f.read()\n print(\"Reading the base as {0}\".format(base))", "def setDataDir(self, directory):\n if os.path.exists(directory):\n self.__datadir = directory\n print(\"Datadir setted to '%s'\" % directory)\n else:\n raise ValueError(\"Incorrect file path %s\" % directory)", "def __init__(self, dir: str):\n super().__init__()\n db_dir = os.path.expanduser(dir)\n if not os.path.exists(db_dir):\n try:\n os.makedirs(db_dir)\n except PermissionError:\n raise PermissionError(f'Could not create database directory: {db_path}') from None\n self.db = plyvel.DB(db_dir, create_if_missing=True)", "def test_datadir(self):\n self.chck_triple('datadir')", "def db_small_path():\n return os.path.join(_here, 'fixtures/databases/db-small/database')" ]
[ "0.70672226", "0.683271", "0.66077834", "0.6452278", "0.64162743", "0.63795066", "0.62165344", "0.61757326", "0.6167116", "0.61049265", "0.60585093", "0.6023727", "0.60130346", "0.6011672", "0.5975978", "0.5875537", "0.58486605", "0.5841338", "0.5835576", "0.5833521", "0.5804871", "0.5787624", "0.5776691", "0.5775112", "0.577083", "0.5770011", "0.5765608", "0.57580256", "0.57452935", "0.5730897" ]
0.74962735
0
set MHCIDB working directory path and there existing data
def setMHCIDBPath(self, mhcidb_dirname="MHCIDB", data_dir="existing_data"): self.__setMHCI_DB_Path(mhcidb_dirname) self.mhcIdb_existing_data_path = self.joinPath(self.mhcIdb_path, data_dir) self.mhcIdb_hla_path = self.joinPath(self.mhcIdb_existing_data_path, "hla" ) self.mhcIdb_pdb_path = self.joinPath(self.mhcIdb_existing_data_path, "pdb" ) self.mhcIdb_ba_path = self.joinPath(self.mhcIdb_existing_data_path, "ba" ) self.mhcIdb_pdb3d_path = self.joinPath(self.mhcIdb_pdb_path, "raw_pdbs")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __setMHCI_DB_Path(self, mhcidb_dirname):\n if not hasattr(self, \"mhcIdb_path\") or self.mhcIdb_path is None:\n cwd = self.getCWD() \n pre_dir, after = cwd.split(mhcidb_dirname)\n self.mhcIdb_path = self.joinPath(pre_dir,mhcidb_dirname )\n print((\"# MHCIDB workding path: {}\".format(self.mhcIdb_path)))", "def setDataRoot(path):\n global dataRoot\n dataRoot = os.path.realpath(path)", "def stick_everything_into_cwd():\n global DATA_HOME\n\n DATA_HOME = ''", "def set_data_directory(path):\n gdc19.DATA_DIRECTORY = path\n return gdc19.DATA_DIRECTORY", "def set_basedir(self, host, path):", "def set_data_directory(path):\n if not os.path.exists(path):\n return False\n\n set(\"data_dir\", path)\n return True", "def chdir(self):\n if not self.mru_exp_data:\n logging.error(\"data directory not set for prototxt db\")\n else:\n with suppress_errors():\n # Prototxt may depend on data path\n data_dir = op.join(self.mru_exp_data, \"data\")\n assert op.isdir(data_dir), \"No 'data' directory found in {}\".format(self.mru_exp_data)\n os.chdir(self.mru_exp_data)", "def set_data_dir(proj_data_dir):\n global _USER_PROJ_DATA\n global _VALIDATED_PROJ_DATA\n _USER_PROJ_DATA = proj_data_dir\n # set to none to re-validate\n _VALIDATED_PROJ_DATA = None", "def set_working_folder():\n username = getpass.getuser()\n osType = sys.platform\n if username.lower() == 'youval':\n if osType.startswith('win'):\n dr = r'C:\\Phenix\\Dev\\Work\\work\\Clashes\\wtest'\n else:\n dr = '/net/cci/youval/work/work/Clashes/wtest'\n os.chdir(dr)", "def cwd (self, path):\r\n pass", "def cd(self,path):\n self.cwd = path", "def __init__ (self, relPath) :\n self.m_path = \"\"\n \n dataPath = os.getenv(\"SIT_DATA\")\n if not dataPath: return\n \n for dir in dataPath.split(':'):\n path = os.path.join(dir, relPath)\n if os.path.exists(path):\n self.m_path = path\n break", "def set_local_path(self):\n return HERE", "def set_kale_data_directory(path):\n global KALE_DATA_DIRECTORY\n KALE_DATA_DIRECTORY = path\n # create dir if not exists\n if not os.path.isdir(KALE_DATA_DIRECTORY):\n os.makedirs(KALE_DATA_DIRECTORY, exist_ok=True)", "def _dir(self):\r\n self._h_dir = os.path.abspath(os.path.dirname(__file__))\r\n self._var_dir = os.path.join(self._h_dir, 'var')\r\n self._work_dir = os.path.join(self._h_dir, 'working')\r\n self._lib_dir = os.path.join(self._h_dir, 'lib')\r\n self._hylib_dir = os.path.join(self._lib_dir, 'hylib')\r\n self._hyutil_dir = os.path.join(self._lib_dir, 'hyutil')\r\n self._exe_dir = os.path.join(self._h_dir, 'exe')\r\n if not os.path.isdir(self._var_dir):\r\n os.mkdir(self._var_dir)\r\n if not os.path.isdir(self._work_dir):\r\n raise Exception('No HYSPLIT working directory found')\r\n for i in ['Error.txt', 'Queue.txt', 'Truncated.txt']:\r\n if not os.path.isfile(os.path.join(self._var_dir, i)):\r\n open(os.path.join(self._var_dir, i), 'w').close()", "def setfilepaths():\n\n if gethostname() in ['stable', 'challenger', 'p', 'fog']:\n ncDir = '/home/disk/eos9/woelfle/cesm/nobackup/cesm1to2/'\n ncSubDir = '0.9x1.25/'\n saveDir = ('/home/disk/user_www/woelfle/cesm1to2/')\n\n elif gethostname() == 'woelfle-laptop':\n ncDir = 'C:\\\\Users\\\\woelfle\\\\Documents\\\\UW\\\\CESM\\\\hist\\\\'\n ncSubDir = ''\n saveDir = 'C:\\\\Users\\\\woelfle\\\\Documents\\\\UW\\\\CESM\\\\figs\\\\'\n\n elif gethostname()[0:6] in ['cheyen', 'geyser']:\n ncDir = '/glade/p/cesmLE/CESM-CAM5-BGC-LE/'\n ncSubDir = ''\n saveDir = '/glade/p/work/woelfle/figs/cesm1to2/LENS/'\n\n return (ncDir, ncSubDir, saveDir)", "def set_data_dir(datadir, update_env=True):\n if os.path.isdir(datadir):\n _config.datadir = datadir\n\n if update_env:\n os.environ[\"TESSDATA_PREFIX\"] = _config.datadir\n else:\n _warn(\"set_data_dir: Invalid directory: '{0}'\".format(datadir))", "def setfilepaths():\n\n if gethostname() in ['stable', 'challenger', 'p', 'fog']:\n ncDir = '/home/disk/eos9/woelfle/cesm/nobackup/cesm1to2/'\n ncSubDir = '0.9x1.25/'\n saveDir = ('/home/disk/user_www/woelfle/cesm1to2/')\n\n elif gethostname() == 'woelfle-laptop':\n ncDir = 'C:\\\\Users\\\\woelfle\\\\Documents\\\\UW\\\\CESM\\\\hist\\\\'\n ncSubDir = ''\n saveDir = 'C:\\\\Users\\\\woelfle\\\\Documents\\\\UW\\\\CESM\\\\figs\\\\'\n\n elif gethostname()[0:6] in ['yslogi', 'geyser']:\n ncDir = '/glade/p/cgd/amp/people/hannay/amwg/climo/'\n ncSubDir = '0.9x1.25/'\n saveDir = '/glade/p/work/woelfle/figs/cesm1to2/'\n\n return (ncDir, ncSubDir, saveDir)", "def cwd(self):", "def setDataPath(_path_data_bundle, _path_bin_data, preload=True, verbose=True):\n global path_bin_data\n global path_data_bundle\n path_data_bundle = _path_data_bundle\n path_bin_data = _path_bin_data\n if preload:\n loadExistent(verbose)", "def setHdfPath(self,path=None):\n if path == None:\n path = InterfaceProvider.getPathConstructor().getVisitDirectory() + \"/ivium/\"\n if not os.path.exists(path): os.makedirs(path)\n caputS(self.pvStem+\"HDF\"+\":FilePath\",path)", "def _setPath(self, path):\n self.path = os.path.abspath(path)\n\n print('path = ' + path)\n try:\n os.chdir(self.path)\n except OSError as exc:\n LOGGER.error('Path doesn''t exist: %s' % (path))\n LOGGER.exception(exc)\n raise (exc)\n\n # check for path in the new Radiance directory:\n def _checkPath(path): # create the file structure if it doesn't exist\n if not os.path.exists(path):\n os.makedirs(path)\n print('Making path: '+path)", "def setup_data_dir():\n for dir_ in [PATH, WORKSPACE, RUNTEST_PATH, ENV_PATH, REPO_PATH]:\n if not os.path.isdir(dir_):\n os.makedirs(dir_)\n\n if not os.path.isdir(KEY_PATH):\n shutil.copytree(os.path.join(PATH, 'keys'), KEY_PATH)\n for key in os.listdir(KEY_PATH):\n if os.path.isfile(os.path.join(KEY_PATH, key)):\n os.chmod(os.path.join(KEY_PATH, key), 384) # 0600\n\n if not os.path.isdir(SEL_PATH):\n shutil.copytree(os.path.join(PATH, 'selinux'), SEL_PATH)\n for rule in os.listdir(SEL_PATH):\n if os.path.isfile(os.path.join(SEL_PATH, rule)):\n os.chmod(os.path.join(SEL_PATH, rule), 384) # 0600", "def get_data_path():\n return os.getcwd() + \"/data/\"", "def setDataDir(self, directory):\n if os.path.exists(directory):\n self.__datadir = directory\n print(\"Datadir setted to '%s'\" % directory)\n else:\n raise ValueError(\"Incorrect file path %s\" % directory)", "def work_dir(self, work_dir):\n self._work_dir = work_dir", "def changeDataPath(self,path):\n self.dataPath = path", "def __enter__(self):\n self.savedPath = os.getcwd()\n os.chdir(self.newPath)", "def set_cwd(cwd):\n global _cwd\n _cwd = cwd\n _make_cwd()", "def set_working_dir(self, working_dir):\r\n self.process.set_working_dir(working_dir)" ]
[ "0.7445844", "0.66888374", "0.6555975", "0.64608467", "0.6353928", "0.6310838", "0.6264556", "0.61646134", "0.61341184", "0.60911024", "0.601722", "0.58324337", "0.57763416", "0.5768574", "0.5744997", "0.5684497", "0.56657374", "0.5663596", "0.5662311", "0.5661884", "0.56586355", "0.5658551", "0.5655914", "0.56549376", "0.5634327", "0.5613038", "0.56121993", "0.55825007", "0.5577036", "0.55756855" ]
0.75485724
0
return the path of the file contains the alinged protein seqeuences of HLA gene A, B and C
def get_hla_aligned_seq_fp(self): fn_aln = "ClassI_prot.txt" return self.joinPath(self.mhcIdb_hla_path, fn_aln)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _get_path(self, protein_id: int):\n protein_name = self.files_refined[protein_id]\n path_protein = os.path.join(\n self.init_refined, protein_name, protein_name + \"_protein.pdb\"\n )\n path_ligand = os.path.join(\n self.init_refined, protein_name, protein_name + \"_ligand.mol2\"\n )\n return path_protein, path_ligand", "def _get_organisms_file_path(self, gene_name, gene_id):\n return os.path.join(os.getcwd(), \"src\", \"data\", \"organisms\", \"{}_{}.txt\".format(gene_name, gene_id))", "def _get_gene_file_path(self):\n return os.path.join(os.getcwd(), \"src\", \"data\", \"genes\", \"genes.txt\")", "def all_hq_fa(self):\n return op.join(self.combined_dir, 'all.polished_hq.fasta')", "def find_sequence(filename_pdb, filename_txt):\n with open(filename_pdb, \"r\") as pdb_file, open(filename_txt, \"a\") as seq_file:\n ca_lines = []\n sequence = \"\"\n lines = pdb_file.readlines()\n\n amino_acids = {\"ALA\": \"A\", \"GLY\": \"G\", \"GLU\": \"E\", \"ARG\": \"R\",\n \"TRP\": \"W\", \"TYR\": \"Y\", \"SER\": \"S\", \"ASN\": \"N\",\n \"ASP\": \"D\", \"CYS\": \"C\", \"GLN\": \"Q\", \"HIS\": \"H\",\n \"ILE\": \"I\", \"LEU\": \"L\", \"LYS\": \"K\", \"MET\": \"M\",\n \"PHE\": \"F\", \"PRO\": \"P\", \"THR\": \"T\", \"VAL\": \"V\"}\n\n for line in lines:\n if line[12:16].strip() == \"CA\":\n ca_lines.append(line)\n sequence = sequence + amino_acids[line[17:20]]\n\n new_sequence = \"\"\n for aa in sequence:\n new_sequence += aa\n if len(new_sequence.replace(\"\\n\", \"\")) % 70 == 0:\n new_sequence += \"\\n\"\n\n\n seq_file.write(f\">{filename_pdb[11:18]}\\n\")\n seq_file.write(new_sequence)\n seq_file.write(\"\\n\")", "def flnc_reseq_to_hg_bam(self):\n return op.join(self.reseq_to_hg_dir, \"flnc_to_hg.bam\")", "def all_lq_fa(self):\n return op.join(self.combined_dir, 'all.polished_lq.fasta')", "def display_algn_seq():\n \n import os\n choice = input('Enter the name of the file: ')\n filepath = os.path.join('/home/njesh/python-mini-project-JaneNjeri/Data', choice)\n with open(filepath,'r') as file:\n seq_list = []\n for line in file:\n if line[:6] == 'SEQRES':\n line_split = line.split()[4:]\n seq_list.append(line_split)\n \n filepath1 = os.path.join('/home/njesh/python-mini-project-JaneNjeri/Results', 'outfile1')\n with open(filepath1, 'w') as outfile:\n for i in seq_list:\n outfile.writelines(i)\n \n filepath2 = os.path.join('/home/njesh/python-mini-project-JaneNjeri/Results', 'outfile2')\n j = os.path.join('/home/njesh/python-mini-project-JaneNjeri/Results', 'outfile1')\n with open(j, 'r') as fil:\n d = {'CYS':'C','ASP':'D','SER':'S','GLN':'Q','LYS':'K','ILE':'I','PRO':'P','THR':'T','PHE':'F','ASN':'N',\n 'GLY':'G','HIS':'H','LEU':'L','ARG':'R','TRP':'W','TER':'*','ALA':'A','VAL':'V','GLU':'E','TYR':'Y',\n 'MET':'M','XAA':'X'}\n with open(filepath2, 'w') as outf:\n for line in fil:\n if len(line) %3 == 0:\n upper_seq = line.upper()\n single_seq = ''\n for i in range(int(len(upper_seq)/3)):\n single_seq += d[upper_seq[3*i:3*i+3]]\n outf.write(single_seq) \n return single_seq\n else:\n print(\"ERROR: Line was not a factor of 3 in length!\")", "def _get_cora_filepath():\n # type: () -> Tuple[str, str]\n cache_root = download.get_dataset_directory(_root)\n feat_cache_path = os.path.join(cache_root, feat_file_name)\n edge_cache_path = os.path.join(cache_root, edge_file_name)\n return feat_cache_path, edge_cache_path", "def lq_isoforms_fa(self):\n return op.join(self.fasta_dir, \"lq_isoforms.fasta\")", "def write_protein_fasta(args, clusters=None, fasta_dir=None):\n row, concat_fasta_path, frags = args\n dotpath = row[\"path\"]\n phylogeny_dict = {\"prot.idx\": row.name, \"path\": dotpath}\n for phy_prop in [name for name in row.index if name.startswith(\"phy.\")]:\n phylogeny_dict[phy_prop] = row[phy_prop]\n inpath = dotpath_to_path(dotpath)\n prot_info = read_tsv_or_parquet(inpath / PROTEINS_FILE)\n prot_info[\"frag.idx\"] = prot_info[\"frag.id\"].map(\n lambda oid: frags.loc[oid][\"frag.idx\"]\n )\n prot_info[\"frag.is_plas\"] = prot_info[\"frag.id\"].map(\n lambda oid: frags.loc[oid][\"frag.is_plas\"]\n )\n prot_info[\"frag.is_scaf\"] = prot_info[\"frag.id\"].map(\n lambda oid: frags.loc[oid][\"frag.is_scaf\"]\n )\n prot_info[\"frag.is_chr\"] = prot_info[\"frag.id\"].map(\n lambda oid: frags.loc[oid][\"frag.is_chr\"]\n )\n prot_info[\"frag.id\"] = prot_info[\"frag.id\"].map(\n lambda oid: frags.loc[oid][\"frag.id\"]\n )\n # Write out updated protein info\n write_tsv_or_parquet(prot_info, inpath / HOMOLOGY_FILE)\n # include phylogeny info in per-sequence info\n for prop in phylogeny_dict:\n prot_info[prop] = phylogeny_dict[prop]\n # write concatenated sequence info\n if clusters is None:\n fasta_path = concat_fasta_path\n info_to_fasta(None, fasta_path, append=True, infoobj=prot_info)\n else:\n for cluster_id, subframe in clusters.groupby(by=[\"cluster_id\"]):\n cluster_info = prot_info[prot_info.index.isin(subframe[\"members\"])]\n fasta_path = fasta_dir / f\"{cluster_id}.fa\"\n info_to_fasta(None, fasta_path, append=True, infoobj=cluster_info)", "def hq_isoforms_fa(self):\n return op.join(self.fasta_dir, \"hq_isoforms.fasta\")", "def get_sequences(data_path, gene):\n \n sequence_file = os.path.join(data_path, gene + \".fasta\")\n try:\n sequences_gene = sequence.Sequence.create(file = sequence_file, input_format = 'fasta')\n except FileNotFoundError:\n print(\"Did not found {} in {}.\".format(gene,data_path))\n sequences_gene = \"Did not found {} in {}.\".format(gene,data_path)\n except:\n print(\"Unexpected Error while trying to get the sequence from {}.\".format(sequence_file))\n sequences_gene = \"Unexpected Error while trying to get the sequence from {}.\".format(sequence_file)\n # print(\"sequences_gene\", sequences_gene)\n return sequences_gene", "def concatenate_detected_verified(fasta_name, PATH_FASTA_DETECTED, PATH_FASTA_VERIFIED, INFO_folder, PATH_FASTA_CONCATENATED):\n\n\tprint \"\\n#################\"\n\tprint \"# Concatetaned file\"\n\tprint \"#################\\n\"\n\n\t# NOTE Dictionaire avec en clef l'id espèce/système et en value une liste\n\t# NOTE [\"l'id espèce/système du verifié qui correspond\", [liste des sequences ATPase, IM ...]]\n\tdict_remove = {}\n\n\tprint \"\\n------------------------------------------\"\n\tprint \"| First read : Creation of the dictionnary\"\n\tprint \"------------------------------------------\\n\"\n\n\tfor fasta_file in fasta_name :\n\t\tverified_fasta=os.path.join(PATH_FASTA_VERIFIED, fasta_file)\n\t\tdetected_fasta=os.path.join(PATH_FASTA_DETECTED, fasta_file)\n\t\tconcatenated_fasta=os.path.join(PATH_FASTA_CONCATENATED, fasta_file)\n\n\t\tlist_seq_verified = list(SeqIO.parse(verified_fasta, \"fasta\"))\n\t\tlist_id_verified = [seq.id for seq in list_seq_verified]\n\t\tlist_seq_verified = [seq.seq for seq in list_seq_verified]\n\n\t\tseq_parser = SeqIO.parse(detected_fasta, \"fasta\")\n\t\tnumber_seq = len(list(seq_parser))\n\t\tprogression = 1\n\n\t\tseq_parser = SeqIO.parse(detected_fasta, \"fasta\")\n\n\t\t# IDEA Il faut tester au moins une fois pour voir si lors de la concatenation, je ne me retrouve pas avec des systems ou je n'ai pas tous enlevé. Exemple l'ATPase de X n'est pas la même que celle de Y mais l'IMplatform l'ai si c'est le cas X est a enlevé aussi pour son ATPase\n\t\t# IDEA Si idea précédente vrai alors il faut faire des fichiers temporaires des sequences que l'on garde et concatener par \"cat\" à la fin le fichier temporaire et son homonyme en verifié.\n\n\t\t# NOTE Il y avait un problème : le nom/id de l'epèce + système ne doit pas contenir le _NumX_ car ce Num fait référence au nombre de duplicat de la protéine (exemple deux ATPase gspE)\n\t\t# NOTE Quelques systèmes on des sequences qui sont similaire pour toutes les protéines sauf une exemple ESCO3 et NC_011993 qui sont identique pour tous sauf ATPase (98% seulement)\n\n\t\tfor seq in seq_parser :\n\n\t\t\tsys.stdout.write(\"File : {} -> {:.2f}% : {}/{} sequences detected read\\r\".format(fasta_file, progression/float(number_seq)*100, progression,number_seq))\n\t\t\tsys.stdout.flush()\n\t\t\tprogression += 1\n\n\t\t\tid_seq=seq.id.split(\"_\")\n\t\t\tid_seq=re.sub(\"Num[0-9]_\", \"\", \"_\".join(id_seq[:id_seq.index(\"D\")]))\n\n\t\t\tif id_seq in dict_remove :\n\t\t\t\tcontinue\n\n\t\t\telif seq.seq in list_seq_verified :\n\t\t\t\tindex=list_seq_verified.index(seq.seq)\n\n\t\t\t\tid_seq_verif = list_id_verified[index].split(\"_\")\n\t\t\t\tid_seq_verif = re.sub(\"Num[0-9]_\", \"\", \"_\".join(id_seq_verif[:id_seq_verif.index(\"V\")]))\n\n\t\t\t\t# NOTE dans le dictionnaire je met le système vérifié en premier, toutes les séquences du système identitique en deuxième et la séquence qui en est la cause en troisème\n\t\t\t\tdict_remove[id_seq]=[id_seq_verif,[], seq.id]\n\n\t\tprint\n\t\tprint(\"File : {} -> Done!\".format(fasta_file))\n\n\tprint \"\\n-----------------------------\"\n\tprint \"| Second read : Writing files\"\n\tprint \"-----------------------------\\n\"\n\n\tfor fasta_file in fasta_name :\n\t\tverified_fasta=os.path.join(PATH_FASTA_VERIFIED, fasta_file)\n\t\tdetected_fasta=os.path.join(PATH_FASTA_DETECTED, fasta_file)\n\t\tconcatenated_fasta=os.path.join(PATH_FASTA_CONCATENATED, fasta_file)\n\n\t\tos.system('cat \"{}\" > \"{}\"'.format(verified_fasta, concatenated_fasta))\n\n\t\tseq_parser = SeqIO.parse(detected_fasta, \"fasta\")\n\t\tnumber_seq = len(list(seq_parser))\n\t\tprogression = 1\n\n\t\tseq_parser = SeqIO.parse(detected_fasta, \"fasta\")\n\n\t\twith open(concatenated_fasta, \"a\") as w_file :\n\t\t\tfor seq in seq_parser :\n\n\t\t\t\tsys.stdout.write(\"File : {} -> {:.2f}% : {}/{} sequences detected read\\r\".format(fasta_file, progression/float(number_seq)*100, progression,number_seq))\n\t\t\t\tsys.stdout.flush()\n\t\t\t\tprogression += 1\n\n\t\t\t\tid_seq=seq.id.split(\"_\")\n\t\t\t\tid_seq=re.sub(\"Num[0-9]_\", \"\", \"_\".join(id_seq[:id_seq.index(\"D\")]))\n\n\t\t\t\tif id_seq in dict_remove :\n\t\t\t\t\tdict_remove[id_seq][1].append(seq)\n\n\t\t\t\telse :\n\t\t\t\t\tSeqIO.write(seq, w_file, \"fasta\")\n\t\tprint\n\t\tprint(\"File : {} -> Done!\".format(fasta_file))\n\n\t# NOTE Dict remove complete and all concatenate write\n\twrite_remove_concatenate(dict_remove, INFO_folder)\n\n\treturn", "def _gene_ann(gene_ann_path):\n gene_ann = pd.read_csv(gene_ann_path)\n protein_gene = gene_ann[gene_ann.gene_type ==\n 'protein_coding'].gene_name.tolist()\n return(protein_gene)", "def ccs_fa(self):\n return op.join(self.fasta_dir, \"ccs.fasta\")", "def consensus_isoforms_fa(self):\n return op.join(self.fasta_dir, \"consensus_isoforms.fasta\")", "def hq_reseq_to_hg_bam(self):\n return op.join(self.reseq_to_hg_dir, \"hq_to_hg.bam\")", "def ori_all_reads_fasta(self):\n return op.join(self.out_dir, \"all_reads.fasta.ori\")", "def getDosFormattedFasta():\n return _getAbsPath('barcodes-ed65-450.fasta')", "def read_fasta_file(path):\n with open(path) as data_file:\n output = {}\n sequence_name = None\n for line in data_file.readlines():\n if line.startswith(\">\"):\n sequence_name = line[1:].strip()\n else:\n output.setdefault(sequence_name, \"\")\n line = \"\".join(re.findall(\"[acgtACGT]+\", line))\n\n output[sequence_name]+=line.upper()\n return output", "def PATH(self) -> str:\n return \".\".join(self.SEQ)", "def fast_Q2A(fastq_filepath):\n filein = open(fastq_filepath, \"r\")\n fileout = open(fastq_filepath[:-5] + \"fasta\", \"w\")\n found_id = 0\n num_of_seqs = 0\n for i in filein:\n if i[0] == \"@\":\n seq_id = \">\" + i[1:]\n found_id = 1\n num_of_seqs += 1\n continue\n if found_id == 1:\n seq = i\n found_id = 0\n fileout.write(seq_id + seq)\n filein.close()\n fileout.close()\n print num_of_seqs\n return os.path.abspath(fileout.name)", "def getAlignedBam():\n return _getAbsPath(\"m140905_042212_sidney_c100564852550000001823085912221377_s1_X0.aligned_subreads.bam\")", "def palindrome_reads_fasta(self):\n return op.join(self.out_dir, \"palindrome_subreads.fasta\")", "def read_write_protein_files(dir_path, heme_files):\n for i in number_of_files:\n# seqs = {}\n input_files = (dir_path + heme_files[i])\n f = open(input_files)\n count = 0\n# output_file = (dir_path + heme_files[i] + \".txt\")\n# g = open(output_file, \"x\")\n with open(input_files) as f:\n for line in f:\n if line.startswith('>'):\n name = line[1:].rstrip('\\n')\n count = count + 1\n seqs =[]\n else: # sequence, not header\n seqs[name] = seqs[name] + line\n# sequences += line[:-1]\n# output_file = open(\"out_\" + str(count) + \"_.txt\", \"a\")\n# output_file.write(str(len(sequences)))\n print(\"Number of proteins read:\" + count)\n f.close", "def transeq(seq):\n \n temp_file = 'PATH/TO/ROOT/Database_Outputs/temp.fasta'\n temp = open(temp_file, 'w')\n temp.write(\">Just a formality \\n\"+seq)\n temp.close()\n \n trans = \"PATH/TO/ROOT/BLISTR_support_programs/./transeq -sequence \"+temp_file+\" -outseq \"+temp_file[:-6]+\".faa\"\n proc = subprocess.Popen(trans, shell=True)\n proc.wait()\n \n temp = open(temp_file[:-6]+\".faa\", 'r')\n new_seq = \"\"\n for line in temp:\n if line.startswith(\">\"):\n continue\n new_seq += line\n \n os.remove(temp_file)\n os.remove(temp_file[:-6]+\".faa\")\n \n return new_seq", "def rc_all_reads_fasta(self):\n return op.join(self.out_dir, \"rc_all_reads.fasta\")", "def getcodetofilename(index_file_parameters,bamfile_id):\n index_file=index_file_parameters['index']\n relative_flg=index_file_parameters['relative']\n \n index_dict=dict([(lntxt.rstrip().split(',')[0],lntxt.rstrip().split(',')[1]) for lntxt in open(index_file).readlines()])\n \n if bamfile_id not in index_dict:\n return ''\n \n if relative_flg==0:\n return index_dict[bamfile_id]\n else:\n relative_dir='/'.join(index_file.split('/')[0:-1])\n return '%s/%s'%(relative_dir,index_dict[bamfile_id])", "def path(self):\n return self.alignment.matching_function_bestpath(self.idx)" ]
[ "0.6313968", "0.6223646", "0.60979223", "0.6060333", "0.6025667", "0.6019102", "0.5903507", "0.576424", "0.5727223", "0.5694345", "0.5682708", "0.56573516", "0.56486446", "0.56058353", "0.5595129", "0.55100703", "0.54998416", "0.548543", "0.54813707", "0.5469157", "0.5459987", "0.54488957", "0.5426146", "0.5411073", "0.5399744", "0.53942794", "0.5383473", "0.5350961", "0.5347223", "0.53324735" ]
0.6534873
0
Sets Driver's node list.
def set_nodes(self, nodes): self._drv_nodes = nodes
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def nodes(self, nodes):\n\n self._nodes = nodes", "def set_node_positions(self):", "def nodes(self, nodes_array):\n self.nodes_set = nodes_array", "def set_node(self, node):\n self.__node = node", "def set_nodes(self, nodes_list):\n try:\n self.node_from = nodes_list[self.node_from_code]\n self.node_from.add_line(self)\n self.node_to = nodes_list[self.node_to_code]\n self.node_to.add_line(self)\n except AttributeError:\n raise Exception('line %r has no node(s)!' % self)", "def set_nodes(self, ndict):\n self.inode_ref = ndict[self.inode]\n self.jnode_ref = ndict[self.jnode]", "def setCurrentNode(self, newNode):\r\n\t\tself.currentNode = newNode", "def update_nodes(self):\n raise NotImplementedError('ERROR: sweeper has to implement update_nodes(self)')", "async def set_nodes(self, node_callq: Dict):\n for svc in self._services:\n await svc.set_nodes(node_callq)", "def setnodemodels(self):\n for n in self._objs:\n self.setnodemodel(n)", "def set_node(self, name, state):\n self.source_net.nodes[name] = state", "def change_driver(node, driver):\n module_node = node.find('./attstr')\n module_node.set(\"val\", driver)", "def set_submodule_list(self, L):\n\t\tself.submodule_list = L", "def set_class_list(self, L):\n\t\tself.class_list = L", "def get_nodes(self):\n with open('node_list.txt', 'r') as file:\n self.nodes = [line.rstrip('\\n') for line in file]", "def set_next(self, node):\n self.__next = node", "def setup_lists(self):\n pass", "def set_next(self, node):\r\n self.__next = node", "def initJoints(self):\n self.joints = list(self.tree.nodes)", "def __init__(self, nodes=None):\r\n self.nodes = nodes", "def __setitem__(self, nodename, node):\n\n for hash_ in self._repl_iterator(nodename):\n if hash_ in self._nodes:\n raise ValueError(\"Node name %r is \"\n \"already present\" % nodename)\n self._nodes[hash_] = node\n bisect.insort(self._keys, hash_)", "def __setitem__(self, nodename, node):\n\n for hash_ in self._repl_iterator(nodename):\n if hash_ in self._nodes:\n raise ValueError(\"Node name %r is \"\n \"already present\" % nodename)\n self._nodes[hash_] = node\n bisect.insort(self._keys, hash_)", "def set_libraries(self, lst):\n self.__libraries = lst", "def __set_port_list(self):\n\n self._coms = [str(i.device) for i in sorted(self.ports)]", "def __init__(self):\n\n self.head = None\n self.node_count = 0", "def __init__(self, nodes):\n\n self._nodes = nodes", "def setNext(self, next_node):\n self.__nextListNode = next_node", "def __init__(self):\n\n self.nodes = set()", "def set_driver(self, driver):\n self.driver = driver", "def set_nodeset(self, nodeset):\n self.nodeset = set(nodeset) # overwrite the existing nodeset with the input nodeset\n\n self.__check_validity() # check if graph is valid - throws exception if not" ]
[ "0.623272", "0.61300147", "0.58855206", "0.58536756", "0.58154875", "0.57913256", "0.57402974", "0.572732", "0.5669491", "0.5631962", "0.55938464", "0.55284536", "0.5516289", "0.5496053", "0.54852945", "0.54835105", "0.54743034", "0.5465144", "0.54392004", "0.5405577", "0.5403067", "0.5403067", "0.53793347", "0.5353934", "0.5317408", "0.5315532", "0.531139", "0.5293562", "0.52822286", "0.5264858" ]
0.7336902
0
Snapshot an image of the specified instance
def snapshot(self, context, instance, image_id, update_task_state): raise NotImplementedError()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def snapshot(self, instance, name):\n # TODO(imsplitbit): Need to implement vzdump\n pass", "def get_snapshot(project, zone, instance):\n snapshot_disks(project, zone, *get_disks(instance))", "def take_snapshot(instance_id, objects_created, instance_name=None,\n public=False, wait_for_available=120):\n if not instance_name:\n instance_name = instance_id\n command = 'nova image-create --show %s temp-snap-%s' % (instance_id,\n instance_name)\n snapshot = parse_output(Popen(command.split(), stdout=STDOUT, stderr=STDERR\n ).communicate()[0])\n if wait_for_available > 0:\n wait = 0\n again = False\n while wait < wait_for_available:\n time.sleep(5)\n wait += 5\n again = False\n command = 'glance image-show %s' % snapshot['id']\n status = parse_output(Popen(command.split(), stdout=STDOUT,\n stderr=STDERR).communicate()[0]\n )['status']\n if status == 'error':\n # clean up and create snapshot again\n command = 'glance image-delete %s' % snapshot['id']\n a = Popen(command.split(), stdout=STDOUT,\n stderr=STDERR).communicate()[0]\n command = 'nova image-create --show %s temp-snap-%s' % \\\n (instance_id, instance_name)\n snapshot = parse_output(Popen(command.split(), stdout=STDOUT,\n stderr=STDERR).communicate()[0])\n again = True\n elif status == 'queued' or status == 'saving':\n again = True\n elif status == 'active':\n snapshot['status'] = status\n break\n if again: # loop ended due to timeout\n print 'Error snapshotting instance \\'%s\\'!' % instance_id\n print 'The following entities were created in the process:'\n print_objects_created(objects_created)\n sys.exit(-1)\n if public:\n command = 'glance image-update --visibility public %s' % snapshot['id']\n else:\n command = 'glance image-update --visibility private %s' % \\\n snapshot['id']\n snapshot = parse_output(Popen(\n command.split(), stdout=STDOUT, stderr=STDERR).communicate()[0])\n return snapshot", "def snapshot_image_on_provider(self, builder, provider, credentials, target, template, parameters):", "def snap(self, path=None):\n if path is None:\n path = \"/tmp\"\n else:\n path = path.rstrip(\"/\")\n day_dir = datetime.datetime.now().strftime(\"%d%m%Y\")\n hour_dir = datetime.datetime.now().strftime(\"%H%M\")\n ensure_snapshot_dir(path+\"/\"+self.cam_id+\"/\"+day_dir+\"/\"+hour_dir)\n f_path = \"{0}/{1}/{2}/{3}/{4}.jpg\".format(\n path,\n self.cam_id,\n day_dir,\n hour_dir,\n datetime.datetime.now().strftime(\"%S\"),\n )\n\n urllib.urlretrieve(\n 'http://{0}/snapshot.cgi?user={1}&pwd={2}'.format(\n self.address, \n self.user, \n self.pswd,\n ),\n f_path,\n )\n #print resp[1]['Content-disposition'].replace(\"filename=\\\"\",\"\")[:-1]", "def snapshot(self):\n pass", "def _take_snapshot(self, filename, frame):\n thumbnail_key = os.path.join(self._obj_key_prefix, \"{}.jpg\"\n .format(filename))\n drawn_image = im.draw_region(frame.image,\n self._detector.roi,\n EVENT_ALERT_COLOR_CODE,\n 0.4)\n shrunk_image = im.shrink_image(drawn_image)\n self._obj_store.save_image_obj(thumbnail_key, shrunk_image)\n return thumbnail_key", "def __take_snapshot(self):\n # Set the filename to\n # year_month_date_hour_minute_second.jpg\n time_label = datetime.datetime.now()\n filename = f\"{time_label.strftime('%Y-%m-%d_%H-%M-%S')}.jpg\"\n\n # Start the saving function in background\n save_img_back = threading.Thread(target=self.__upload,\n args=(filename,),\n daemon=True)\n save_img_back.start()", "def do_takesnapshot(self, str_arg):\n img = None\n fname = validateString(str_arg)\n try:\n # self.adbc.wake()\n printLog(self.threadName + 'taking snapshot (0,50,%d,%d) ...' %\n (self.scn_width, self.scn_height))\n img = self.adbc.takeSnapshot(reconnect=True)\n # PIL code\n img = img.crop((0, 50, self.scn_width, self.scn_height))\n img.save(fname, SNAPSHOT_IMAGE_FORMAT)\n # if self.scn_width>SNAPSHOT_WIDTH:\n # self.compressImage(fname)\n # os.remove(fname)\n # im.save(fname)\n printLog(self.threadName + 'snapshot saved as %s' % fname)\n except EnvironmentError:\n self.resultFlag = False\n if DEBUG:\n traceback.print_exc()\n finally:\n img = None", "def snapshot(self, filename=None):\n if filename:\n self.command(\"snapshot %(filename)s\" % locals())\n else:\n self.command(\"snapshot\")", "def snapshot(self):\n self._client.snapshot()", "def snapshot(source, destination):\n\n processutils.execute(\n 'qemu-img convert --force-share -O qcow2 %s %s'\n % (source, destination),\n shell=True)", "def snapshot_gen(self):\n \n # Generate snapshot\n snapshot, snapshot_param = make_snapshot.snapshot_gen(self._parent)\n # Save to ICobj\n self._parent.snapshot = snapshot\n self._parent.snapshot_param = snapshot_param", "def save(self, arguments):\n name = arguments['<name>']\n\n instance_name = arguments['<instance>']\n instance_name = self.activate(instance_name)\n\n vmrun = VMrun(self.vmx, user=self.user, password=self.password)\n if vmrun.snapshot(name) is None:\n puts_err(colored.red(\"Cannot take snapshot\"))\n else:\n puts_err(colored.green(\"Snapshot {} taken\".format(name)))", "def snapshot(self, snapshot):\n self._context[\"snapshot\"] = snapshot", "def capture_snapshot(db_conn, logger):\n Snapshots.__validate(db_conn=db_conn,\n logger=logger)\n #\n db_conn.execute_proc(name='DBMS_WORKLOAD_REPOSITORY.CREATE_SNAPSHOT',\n parameters={})", "def cam_snap(self):\r\n self.cam = CamActuator()\r\n self.cam.initializeCamera()\r\n \r\n exposure_time = self.CamExposureBox.value()\r\n self.Rawimage = self.cam.SnapImage(exposure_time)\r\n self.cam.Exit()\r\n print('Snap finished')\r\n \r\n self.MLtargetedImg_raw = self.Rawimage.copy()\r\n \r\n self.MLtargetedImg = self.convert_for_MaskRCNN(self.MLtargetedImg_raw)\r\n \r\n self.show_raw_image(self.MLtargetedImg)\r\n\r\n self.addedROIitemMask = np.zeros((self.MLtargetedImg.shape[0], self.MLtargetedImg.shape[1]))\r\n self.MLmask = np.zeros((self.MLtargetedImg.shape[0], self.MLtargetedImg.shape[1]))", "def get_image(self):\n logging.debug(\"%s get_image entered\" % str(self.machine_name))\n snapshots = cs.list_snapshots()\n # find the one for this server\n if self.cloudserver:\n server_id = self.cloudserver.id\n else:\n return self.image_id\n\n for snapshot in snapshots:\n img = snapshot.metadata.get(\"instance_uuid\", None)\n # print \"XXX:\", img\n\n if img == server_id:\n print \"Server %s has snapshot %s\" % (server_id, img)\n return img\n\n print \"Server %s has no snapshots\" % (server_id)\n return None", "def snapshot(self, file_path=None):\n \"\"\"default not write into file.\"\"\"\n screen = self.minicap.get_frame()\n\n if file_path:\n file_name = str(time.time()*1000) + '.jpg'\n file_path = os.path.join(file_path, file_name)\n ImgUtils.imwrite(file_path, screen)\n\n # '''t_img 需转换为cv2可解码的文件,不然会抛错 src is not a numpy array, neither a scalar'''\n # try:\n # screen = ImgUtils.str2img(screen)\n # except Exception:\n # # may be black/locked screen or other reason print exc for debugging\n # import traceback\n # traceback.print_exc()\n # return None\n\n return screen", "def create_snapshot(self):\n # Don't create if it already exists\n if self.image_available(self.snapshot_name):\n print('Snapshot already exists')\n return\n\n self.spawn()\n\n sleep_len = 10\n # Make sure the network is up\n t = 0\n networks = None\n while not networks:\n try:\n networks = self.instances[0].networks\n except:\n # not ready yet\n pass\n print('Waited {0}s for network to be up'.format(t))\n if not networks:\n time.sleep(sleep_len)\n t += sleep_len\n self.instances[0] = self.nova.servers.get(self.instances[0].id)\n\n # make sure an ip is received that we can ssh to\n # self.instances[0].add_floating_ip('129.16.125.236')\n t = 0\n ip = None\n while not ip:\n networks = self.instances[0].networks\n for key in networks:\n if 'IPv4' in key:\n ips = networks[key]\n for i in ips:\n # change to not if we want a floating ip\n if i.startswith('192'):\n ip = i\n break\n break\n if not ip:\n time.sleep(sleep_len)\n print('Waited {0}s for ip'.format(t))\n t += sleep_len\n self.instances[0] = self.nova.servers.get(self.instances[0].id)\n\n # make sure cloud init finishes\n t = 0\n while not self._exists_remote(ip):\n print('Waited {0}s for cloud-init to finish'.format(t))\n time.sleep(sleep_len*3)\n t += sleep_len*3\n # create snapshot and make sure it gets active\n self.nova.servers.create_image(self.instances[0].id, self.snapshot_name, None)\n snapshot = self.nova.glance.find_image(self.snapshot_name)\n\n # Wait until snap\n t = 0\n status = snapshot.status\n while status != 'active':\n print('Waited {0}s for snapshot. Status is {1}'.format(t, status))\n snapshot = self.nova.glance.find_image(self.snapshot_name)\n status = snapshot.status\n time.sleep(sleep_len*3)\n t += sleep_len*3\n print('Snapshot successfully uploaded. Now terminating worker.')\n # kill created worker\n self.terminate_all()", "def recorder_snapshot(recorder_name, width=None, height=None, selected_name=None):\n recorder = get_recorder_by_name(recorder_name)\n # get snapshot name for the recorder\n if selected_name is None:\n filename = cnfg.recorders[recorder_name].filename_snapshot()\n else:\n snapshot_path = os.path.dirname(os.path.abspath(cnfg.recorders[recorder_name].filename_snapshot()))\n filename = os.path.join(snapshot_path, selected_name+'.jpg')\n if os.path.isfile(filename):\n height = int(height)\n width = int(width)\n img = cv2.imread(filename)\n # resize image and preserve aspect ratio\n orig_height, orig_width, _ = img.shape\n if orig_height > height:\n scale_height = height / orig_height\n else:\n scale_height = 1\n if orig_width > width:\n scale_width = width / orig_width\n else:\n scale_width = 1\n scale = min(scale_height, scale_width) \n if scale < 1:\n height = math.floor(orig_height*scale)\n width = math.floor(orig_width*scale)\n img = cv2.resize(img, (width, height)) \n # make darker if snapshot was not updated\n if not recorder.status in ['started'] and not recorder.watcher:\n a = np.double(img)\n b = a * 0.2\n img = np.uint8(b)\n # encode to jpeg image\n _, img_jpg = cv2.imencode('.jpg', img)\n response = make_response(img_jpg.tostring())\n response.headers.set('Content-Type', 'image/jpeg')\n return response\n else:\n return send_file('templates/static/nosnapshot.gif')", "def snapshot(self):\n return self.camera.snapshot(0)", "def snapshot(self):\n return cv2.resize(cv2.imread(\"../../../images/obstacles_sample.jpeg\"), (1280, 980))", "def export_insertInstance( self, imageName, instanceName ):\n return gVirtualMachineDB.insertInstance( imageName, instanceName )", "def grab_image(self):\n _, camera_image = self.camera.read()\n with self.lock:\n self.image = camera_image", "def take_snapshot(self):\r\n self.snapshot = self.gain, self.block, self.locked, self.bucket_num", "def get_image():\n\n # Access the global variable and activate the saving for the last camera's\n # frame\n global _save_image\n _save_image = True", "def imgCopy(img):\n return sitk.Image(img)", "def save_img(self):\r\n self.extract_info_from_file()\r\n path_0 = os.path.join(self.output_path, self.field_id, self.patient_id + self.ext)\r\n path_1 = os.path.join(self.output_path, self.field_id + '_' + self.instance, self.patient_id + self.ext)\r\n if self.shot == '0': # first shot\r\n if os.path.exists(path_0) or os.path.exists(path_1):\r\n print(self.patient_id, 'already done')\r\n pass\r\n else:\r\n if not self.img_computed:\r\n self.compute_img()\r\n if self.instance == '0':\r\n self.img.save(path_0)\r\n else:\r\n self.img.save(path_1)\r\n else: # newer shot\r\n if not self.img_computed:\r\n self.compute_img()\r\n if self.instance == '0':\r\n self.img.save(path_0)\r\n else:\r\n self.img.save(path_1)", "def saveSnapshot(self, filename): \n\t\tpass" ]
[ "0.7108288", "0.6922023", "0.6767384", "0.6702588", "0.6634366", "0.6420016", "0.6365674", "0.63577205", "0.6300057", "0.6296758", "0.62549734", "0.61733276", "0.61012477", "0.60931534", "0.6068985", "0.60444653", "0.5981253", "0.5933683", "0.5925053", "0.59189266", "0.59120446", "0.5910599", "0.5864837", "0.5845723", "0.5839447", "0.58352876", "0.58326316", "0.5824446", "0.5815397", "0.5792903" ]
0.7582861
0
Power off the specified instance.
def power_off(self, instance, timeout=0, retry_interval=0): azure_name = self._get_omni_name_from_instance(instance) utils.stop_instance(self.compute_client, drv_conf.resource_group, azure_name)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def power_off(self, instance, node=None):\n if not node:\n node = _get_baremetal_node_by_instance_uuid(instance['uuid'])\n pm = get_power_manager(node=node, instance=instance)\n pm.deactivate_node()\n if pm.state != baremetal_states.DELETED:\n raise exception.InstancePowerOffFailure(_(\n \"Baremetal power manager failed to stop node \"\n \"for instance %r\") % instance['uuid'])\n pm.stop_console()", "def unpause(self, instance):\n self.power_on(\n context=None,\n instance=instance,\n network_info=None,\n block_device_info=None)", "def pause(self, instance):\n self.power_off(instance)", "def power_off(self, ec2_session, ami_id):\n instance = self.aws_api.get_instance_by_id(ec2_session, ami_id)\n instance.stop()\n self.instance_waiter.wait(instance, self.instance_waiter.STOPPED)\n return True", "def power_off(self):\n LOG.info('Powering off system')\n self._run_shutdown_command('poweroff')", "def power_off(vmname):\n\n _conn.lookupByName(vmname).destroy() # cirros don't know shutdown command\n infokeeper.update_status_vm(vmname, Instance.STATUS_POWER_OFF)\n return 'VM %s powered off' % vmname", "def stop_instance(InstanceId=None, Force=None):\n pass", "def off(cls, client_object):\n vm_mor = client_object.get_api()\n return cls._do_power_action(vm_mor.PowerOffVM_Task())", "def _stop(self, instance):\n try:\n _, err = utils.execute('sudo', 'vzctl', 'stop', instance['id'])\n if err:\n LOG.error(err)\n except ProcessExecutionError:\n raise exception.Error('Failed to stop %s' % instance['id'])\n\n # Update instance state\n try:\n db.instance_set_state(context.get_admin_context(),\n instance['id'],\n power_state.SHUTDOWN)\n except exception.DBError as err:\n LOG.error(err)\n raise exception.Error('Failed to update db for %s' % instance['id'])\n \n return True", "def powerOff(self):\n self._sendCommand(self.SONY_CMD_ExtBackupCommunicator_ForcePowerOff, bufferSize=0)", "def reboot_instance(InstanceId=None):\n pass", "def suspend(self, context, instance):\n LOG.info(\"Suspending instance %s\" % instance.uuid)\n self.power_off(instance)", "def stop_instance():\n send_line('stop instance')\n os.system(f'gcloud compute instances stop {os.uname()[1]} --zone us-east1-b')", "def ShutdownInstance(self, instance, dry_run=False, no_remember=False,\n reason=None, **kwargs):\n query = []\n body = kwargs\n\n _AppendDryRunIf(query, dry_run)\n _AppendIf(query, no_remember, (\"no_remember\", 1))\n _AppendReason(query, reason)\n\n return self._SendRequest(HTTP_PUT,\n (\"/%s/instances/%s/shutdown\" %\n (GANETI_RAPI_VERSION, instance)), query, body)", "def down(self, arguments):\n force = arguments['--force']\n\n instance_name = arguments['<instance>']\n instance_name = self.activate(instance_name)\n\n vmrun = VMrun(self.vmx, user=self.user, password=self.password)\n if not force and vmrun.installedTools():\n stopped = vmrun.stop()\n else:\n stopped = vmrun.stop(mode='hard')\n if stopped is None:\n puts_err(colored.red(\"Not stopped\", vmrun))\n else:\n puts_err(colored.green(\"Stopped\", vmrun))", "def power_off(self, sync=True):\n self.vmomi_object.PowerOff()\n if sync: self._wait_for_power_off()", "def turn_off(self) -> None:\n self._monoprice.set_power(self._zone_id, False)", "def deregister_instance(InstanceId=None):\n pass", "def unassign_instance(InstanceId=None):\n pass", "def poweroff(*args, **kwargs):\n try:\n master.main_exit()\n except Exception:\n log.error(\"main_exit error\")\n with open('/tmp/reboot', 'w+') as f:\n log.info(\"Poweroff ...\")", "def turn_off(self, **kwargs: Any) -> None:\n self._device.power_on = False\n _LOGGER.debug(\"Turn off light %s\", self._device.ip)", "async def power_off(self):\n ...", "def stop_instance(self):\n instance_id = self._choose_among_running_instances()\n\n # Cancel\n if not instance_id:\n print 'Operation cancelled'\n return\n\n print '# Stopping the instance \"%s\"' % instance_id\n self.compute.stop_instance(instance_id)\n print 'The instance has been stopped'", "def power_off(self):\n for vm in self.vms:\n try:\n vm.name = \"%s_%s\" % (self.resource_pool, vm.name)\n vm.power_off(manager=self.manager)\n except:\n self.logger.error(\"Error with VM '%s'\" % vm.name)\n raise", "def turn_off(self, **kwargs):\n self.vacuum.stop()\n self.vacuum.home()", "async def async_turn_off(self):\n data_cmd = _command(COMMAND_POWER_OFF)\n await self._async_send_command(data_cmd)", "def do_power_down(self, *arg):\n print_info(\"Shutting down POCS instance, please wait\")\n self.pocs.power_down()\n\n while self.pocs.observatory.mount.is_parked is False:\n print_info('.')\n time.sleep(5)\n\n self.pocs = None", "def poweroff(self) -> None:\n pass", "def power_off(self, port):\n port = int(port)\n self._validate_port(\"power_off\", port)\n self.set_mode(OFF, port)", "def turn_off(self, **kwargs):\n if self.is_on:\n _LOGGER.debug(\"Sending STOP command to: %s\", self._name)\n self._api.control('STOP')\n self._mower_status = STATUS_EXECUTING_STOP\n self.schedule_update_ha_state()" ]
[ "0.8163168", "0.778168", "0.74489874", "0.7349463", "0.7315433", "0.726085", "0.71212804", "0.71180135", "0.7100586", "0.69825584", "0.6973217", "0.69285697", "0.69209427", "0.68579257", "0.6851049", "0.68323034", "0.67938197", "0.67203593", "0.66902196", "0.66892743", "0.66363204", "0.6623518", "0.6607544", "0.66072863", "0.6565908", "0.6554799", "0.6529543", "0.6465413", "0.6457588", "0.64470506" ]
0.7834359
1
Azure doesn't support pause and cannot save system state and hence we've implemented the closest functionality which is to poweroff the instance.
def pause(self, instance): self.power_off(instance)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def power_off(self):\n ...", "def suspend(self, context, instance):\n LOG.info(\"Suspending instance %s\" % instance.uuid)\n self.power_off(instance)", "def poweroff(self) -> None:\n pass", "def unpause(self, instance):\n self.power_on(\n context=None,\n instance=instance,\n network_info=None,\n block_device_info=None)", "def power_off(fast: bool = True, restart: bool = False) -> None:", "def poweroff(self):\n raise NotImplementedError()", "def power_off(timeout: int = 0) -> None:", "def power_off(vmname):\n\n _conn.lookupByName(vmname).destroy() # cirros don't know shutdown command\n infokeeper.update_status_vm(vmname, Instance.STATUS_POWER_OFF)\n return 'VM %s powered off' % vmname", "def powerOff(self):\n self._sendCommand(self.SONY_CMD_ExtBackupCommunicator_ForcePowerOff, bufferSize=0)", "def pause(instance):\n if instance.state == STOPPED:\n return\n\n Queue.objects.add(function=\"pause\", instance=instance)", "def power_off(self):\n raise NotImplementedError", "def poweroff(*args, **kwargs):\n try:\n master.main_exit()\n except Exception:\n log.error(\"main_exit error\")\n with open('/tmp/reboot', 'w+') as f:\n log.info(\"Poweroff ...\")", "def resume(self, context, instance, network_info, block_device_info=None):\n LOG.info(\"Resuming instance %s\" % instance.uuid)\n self.power_on(context, instance, network_info, block_device_info)", "def power():\n request_command(tv_command=TVCommand.power)", "def _windows_power_control(self):\n\n os_power_command = 'shutdown /r /t 3' if self._power_event_type == 'restart' \\\n else 'shutdown /h /t 3'\n\n exit_code, out = self._staf_start_proc(os_power_command,\n self._sut.bespoke_root,\n self._command_timeout,\n location = self._sut.network_address)\n\n if exit_code != 0:\n raise CoreError('Power control event \"{0}\" failed: {1}'.format(self._name, out))", "def power_off(self):\n LOG.info('Powering off system')\n self._run_shutdown_command('poweroff')", "async def poweroff(ctx):\n await ctx.send(\"Bye\")\n await bot.logout()", "def power_off(self, ec2_session, ami_id):\n instance = self.aws_api.get_instance_by_id(ec2_session, ami_id)\n instance.stop()\n self.instance_waiter.wait(instance, self.instance_waiter.STOPPED)\n return True", "def suspend(self):\n\t\treturn Job(SDK.PrlVm_Suspend(self.handle)[0])", "def power_shutdown(self):\n raise NotImplementedError(\"ERROR: Unimplemented function.\")", "def pause(self, arguments):\n instance_name = arguments['<instance>']\n instance_name = self.activate(instance_name)\n\n vmrun = VMrun(self.vmx, user=self.user, password=self.password)\n if vmrun.pause() is None:\n puts_err(colored.red(\"Not paused\", vmrun))\n else:\n puts_err(colored.yellow(\"Paused\", vmrun))", "def stop_instance():\n send_line('stop instance')\n os.system(f'gcloud compute instances stop {os.uname()[1]} --zone us-east1-b')", "def power_off(self):\n for vm in self.vms:\n try:\n vm.name = \"%s_%s\" % (self.resource_pool, vm.name)\n vm.power_off(manager=self.manager)\n except:\n self.logger.error(\"Error with VM '%s'\" % vm.name)\n raise", "def standby() -> None:", "def reboot_instance(InstanceId=None):\n pass", "async def power_on(self):\n ...", "def execute_pause(self):\n pass", "def suspend(host=None,time=10):\r\n if host:\r\n host.suspend(time)", "def do_power_down(self, *arg):\n print_info(\"Shutting down POCS instance, please wait\")\n self.pocs.power_down()\n\n while self.pocs.observatory.mount.is_parked is False:\n print_info('.')\n time.sleep(5)\n\n self.pocs = None", "def vm_power(self, vm_name, state):\n states = [\"on\", \"off\"]\n if state not in states:\n raise OpenStackConnectorException(f\"Incorrect action was provided for the vm {vm_name} power state change\")\n \n vm_id = self._get_vm_id_by_name(vm_name)\n\n if not vm_id:\n return False\n \n try:\n if state == \"on\":\n self.connection.compute.start_server(vm_id)\n else:\n self.connection.compute.stop_server(vm_id)\n except ConflictException: # This exception block handles the situation when the VM is already in the required power state\n pass\n \n return True" ]
[ "0.69178873", "0.6913627", "0.6892047", "0.6817895", "0.67467296", "0.6689266", "0.66683525", "0.6530523", "0.6501536", "0.6460379", "0.6408786", "0.63833135", "0.6374186", "0.6365973", "0.6355397", "0.63161796", "0.6303939", "0.6271553", "0.62554705", "0.6237528", "0.6232611", "0.6201502", "0.6196673", "0.61949396", "0.6189511", "0.6162232", "0.61325735", "0.61052257", "0.6103384", "0.60539603" ]
0.73265564
0
Azure doesn't support suspend and cannot save system state and hence Azure doesn't support suspend and cannot save system state and hence we've implemented the closest functionality which is to poweroff the instance.
def suspend(self, context, instance): LOG.info("Suspending instance %s" % instance.uuid) self.power_off(instance)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def power_off(self):\n ...", "def poweroff(self) -> None:\n pass", "def powerOff(self):\n self._sendCommand(self.SONY_CMD_ExtBackupCommunicator_ForcePowerOff, bufferSize=0)", "def poweroff(self):\n raise NotImplementedError()", "def power_off(vmname):\n\n _conn.lookupByName(vmname).destroy() # cirros don't know shutdown command\n infokeeper.update_status_vm(vmname, Instance.STATUS_POWER_OFF)\n return 'VM %s powered off' % vmname", "def power_off(fast: bool = True, restart: bool = False) -> None:", "def poweroff(*args, **kwargs):\n try:\n master.main_exit()\n except Exception:\n log.error(\"main_exit error\")\n with open('/tmp/reboot', 'w+') as f:\n log.info(\"Poweroff ...\")", "def pause(self, instance):\n self.power_off(instance)", "def power_off(timeout: int = 0) -> None:", "def power_off(self):\n raise NotImplementedError", "def power_shutdown(self):\n raise NotImplementedError(\"ERROR: Unimplemented function.\")", "def unpause(self, instance):\n self.power_on(\n context=None,\n instance=instance,\n network_info=None,\n block_device_info=None)", "def power_off(self):\n LOG.info('Powering off system')\n self._run_shutdown_command('poweroff')", "def reboot_instance(InstanceId=None):\n pass", "def power_off(self):\n for vm in self.vms:\n try:\n vm.name = \"%s_%s\" % (self.resource_pool, vm.name)\n vm.power_off(manager=self.manager)\n except:\n self.logger.error(\"Error with VM '%s'\" % vm.name)\n raise", "def resume(self, context, instance, network_info, block_device_info=None):\n LOG.info(\"Resuming instance %s\" % instance.uuid)\n self.power_on(context, instance, network_info, block_device_info)", "def suspend(self):\n\t\treturn Job(SDK.PrlVm_Suspend(self.handle)[0])", "def power_off(self, ec2_session, ami_id):\n instance = self.aws_api.get_instance_by_id(ec2_session, ami_id)\n instance.stop()\n self.instance_waiter.wait(instance, self.instance_waiter.STOPPED)\n return True", "def power_off(self, instance, node=None):\n if not node:\n node = _get_baremetal_node_by_instance_uuid(instance['uuid'])\n pm = get_power_manager(node=node, instance=instance)\n pm.deactivate_node()\n if pm.state != baremetal_states.DELETED:\n raise exception.InstancePowerOffFailure(_(\n \"Baremetal power manager failed to stop node \"\n \"for instance %r\") % instance['uuid'])\n pm.stop_console()", "def _windows_power_control(self):\n\n os_power_command = 'shutdown /r /t 3' if self._power_event_type == 'restart' \\\n else 'shutdown /h /t 3'\n\n exit_code, out = self._staf_start_proc(os_power_command,\n self._sut.bespoke_root,\n self._command_timeout,\n location = self._sut.network_address)\n\n if exit_code != 0:\n raise CoreError('Power control event \"{0}\" failed: {1}'.format(self._name, out))", "def standby() -> None:", "def test_off_reboot_on(self):\n self.openstack('baremetal node power off {0}'\n .format(self.node['uuid']))\n show_prop = self.node_show(self.node['uuid'], ['power_state'])\n self.assertEqual('power off', show_prop['power_state'])\n\n self.openstack('baremetal node reboot {0}'.format(self.node['uuid']))\n show_prop = self.node_show(self.node['uuid'], ['power_state'])\n self.assertEqual('power on', show_prop['power_state'])", "def off(cls, client_object):\n vm_mor = client_object.get_api()\n return cls._do_power_action(vm_mor.PowerOffVM_Task())", "async def poweroff(ctx):\n await ctx.send(\"Bye\")\n await bot.logout()", "async def power_on(self):\n ...", "def vm_power(self, vm_name, state):\n states = [\"on\", \"off\"]\n if state not in states:\n raise OpenStackConnectorException(f\"Incorrect action was provided for the vm {vm_name} power state change\")\n \n vm_id = self._get_vm_id_by_name(vm_name)\n\n if not vm_id:\n return False\n \n try:\n if state == \"on\":\n self.connection.compute.start_server(vm_id)\n else:\n self.connection.compute.stop_server(vm_id)\n except ConflictException: # This exception block handles the situation when the VM is already in the required power state\n pass\n \n return True", "def request_shutdown(self, restart=False):", "def power_on(vmname):\n\n _conn.lookupByName(vmname).create()\n infokeeper.update_status_vm(vmname, Instance.STATUS_POWER_ON)\n return 'VM %s powered on' % vmname", "def reboot(self):\n raise NotImplementedError", "def power_off(self, sync=True):\n self.vmomi_object.PowerOff()\n if sync: self._wait_for_power_off()" ]
[ "0.6929162", "0.6777848", "0.66953474", "0.66884506", "0.66733646", "0.6653961", "0.6581523", "0.65645", "0.6513084", "0.6512412", "0.6495168", "0.64791274", "0.6453871", "0.6394145", "0.62952673", "0.62936836", "0.6290737", "0.62712413", "0.62297374", "0.6217458", "0.6208989", "0.6201001", "0.6199978", "0.6173484", "0.6164515", "0.6136335", "0.61076385", "0.61010224", "0.6073538", "0.6069155" ]
0.70598346
0
Since Azure doesn't support resume and we cannot save system state, Since Azure doesn't support resume and we cannot save system state, we've implemented the closest functionality which is to power on the instance.
def resume(self, context, instance, network_info, block_device_info=None): LOG.info("Resuming instance %s" % instance.uuid) self.power_on(context, instance, network_info, block_device_info)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def power_on(self, context, instance, network_info, block_device_info):\n azure_name = self._get_omni_name_from_instance(instance)\n utils.start_instance(self.compute_client, drv_conf.resource_group,\n azure_name)", "def power_on(vmname):\n\n _conn.lookupByName(vmname).create()\n infokeeper.update_status_vm(vmname, Instance.STATUS_POWER_ON)\n return 'VM %s powered on' % vmname", "def power_on(self, ec2_session, ami_id):\n instance = self.aws_api.get_instance_by_id(ec2_session, ami_id)\n instance.start()\n self.instance_waiter.wait(instance, self.instance_waiter.RUNNING)\n return True", "def vm_power(self, vm_name, state):\n states = [\"on\", \"off\"]\n if state not in states:\n raise OpenStackConnectorException(f\"Incorrect action was provided for the vm {vm_name} power state change\")\n \n vm_id = self._get_vm_id_by_name(vm_name)\n\n if not vm_id:\n return False\n \n try:\n if state == \"on\":\n self.connection.compute.start_server(vm_id)\n else:\n self.connection.compute.stop_server(vm_id)\n except ConflictException: # This exception block handles the situation when the VM is already in the required power state\n pass\n \n return True", "def resume(self, arguments):\n instance_name = arguments['<instance>']\n instance_name = self.activate(instance_name)\n\n utils.index_active_instance(instance_name)\n\n vmrun = VMrun(self.vmx, user=self.user, password=self.password)\n\n # Try to unpause\n if vmrun.unpause(quiet=True) is not None:\n time.sleep(1)\n puts_err(colored.blue(\"Getting IP address...\"))\n lookup = self.get(\"enable_ip_lookup\", False)\n ip = vmrun.getGuestIPAddress(lookup=lookup)\n if ip:\n puts_err(colored.green(\"VM resumed on {}\".format(ip)))\n else:\n puts_err(colored.green(\"VM resumed on an unknown IP address\"))\n\n # Otherwise try starting\n else:\n started = vmrun.start()\n if started is None:\n puts_err(colored.red(\"VM not started\"))\n else:\n time.sleep(3)\n puts_err(colored.blue(\"Getting IP address...\"))\n lookup = self.get(\"enable_ip_lookup\", False)\n ip = vmrun.getGuestIPAddress(lookup=lookup)\n puts_err(colored.blue(\"Sharing current folder...\"))\n vmrun.enableSharedFolders()\n vmrun.addSharedFolder('mech', os.getcwd(), quiet=True)\n if ip:\n if started:\n puts_err(colored.green(\"VM started on {}\".format(ip)))\n else:\n puts_err(colored.yellow(\"VM already was started on {}\".format(ip)))\n else:\n if started:\n puts_err(colored.green(\"VM started on an unknown IP address\"))\n else:\n puts_err(colored.yellow(\"VM already was started on an unknown IP address\"))", "async def power_on(self):\n ...", "def power_on(self, context, instance, network_info, block_device_info=None,\n node=None):\n if not node:\n node = _get_baremetal_node_by_instance_uuid(instance['uuid'])\n pm = get_power_manager(node=node, instance=instance)\n pm.activate_node()\n if pm.state != baremetal_states.ACTIVE:\n raise exception.InstancePowerOnFailure(_(\n \"Baremetal power manager failed to start node \"\n \"for instance %r\") % instance['uuid'])\n pm.start_console()", "def _windows_power_control(self):\n\n os_power_command = 'shutdown /r /t 3' if self._power_event_type == 'restart' \\\n else 'shutdown /h /t 3'\n\n exit_code, out = self._staf_start_proc(os_power_command,\n self._sut.bespoke_root,\n self._command_timeout,\n location = self._sut.network_address)\n\n if exit_code != 0:\n raise CoreError('Power control event \"{0}\" failed: {1}'.format(self._name, out))", "def suspend(self, context, instance):\n LOG.info(\"Suspending instance %s\" % instance.uuid)\n self.power_off(instance)", "def power_on(self):\n pass", "def power():\n request_command(tv_command=TVCommand.power)", "def reboot(self):\n raise NotImplementedError", "def on(cls, client_object):\n vm_mor = client_object.get_api()\n return cls._do_power_action(vm_mor.PowerOnVM_Task())", "def reboot_instance(InstanceId=None):\n pass", "def _linux_power_control(self):\n\n os_power_command = 'shutdown -r now' if self._power_event_type == 'restart' \\\n else 'shutdown -h now'\n\n exit_code, out = self._staf_start_proc(os_power_command,\n self._sut.bespoke_root,\n self._command_timeout,\n location = self._sut.network_address)\n\n if exit_code != 0:\n raise CoreError('Power control event \"{0}\" failed: {1}'.format(self._name, out))", "def power_on(self):\n for vm in self.vms:\n try:\n vm.name = \"%s_%s\" % (self.resource_pool, vm.name)\n vm.power_on(manager=self.manager)\n except:\n self.logger.error(\"Error with VM '%s'\" % vm.name)\n raise", "def test_power_on_or_off_after_provision(provisioner, prov_data, template_name, provider, started):\n vm_name = \"test_prov_dlg_{}\".format(fauxfactory.gen_alphanumeric())\n prov_data[\"vm_name\"] = vm_name\n prov_data[\"power_on\"] = started\n\n provisioner(template_name, prov_data)\n\n wait_for(\n lambda: provider.mgmt.does_vm_exist(vm_name) and\n (provider.mgmt.is_vm_running if started else provider.mgmt.is_vm_stopped)(vm_name),\n num_sec=240, delay=5\n )", "def acquire_restart(self):\n self.bus.write('ACQ:STATE RUN')", "def _doPowerState(self, state=False):\n if state:\n self._cmdPowerOn()\n else:\n self._cmdPowerOff()", "def poweron(self):\n raise NotImplementedError()", "def resume(vm='', env=''):\n local( main_dir + '/vagrant/bin/vm.sh resume ' + str(vm) + ' ' + str(env) )", "def resume(instance):\n if instance.state == STOPPED:\n return\n\n Queue.objects.add(function=\"resume\", instance=instance)", "def power_off(fast: bool = True, restart: bool = False) -> None:", "def standby() -> None:", "def resume(self):\n\t\tpass", "def restart(self):\n\t\treturn Job(SDK.PrlVm_Restart(self.handle)[0])", "def test_on_reboot_on(self):\n self.openstack('baremetal node power on {0}'.format(self.node['uuid']))\n show_prop = self.node_show(self.node['uuid'], ['power_state'])\n self.assertEqual('power on', show_prop['power_state'])\n\n self.openstack('baremetal node reboot {0}'.format(self.node['uuid']))\n show_prop = self.node_show(self.node['uuid'], ['power_state'])\n self.assertEqual('power on', show_prop['power_state'])", "def resume(self, instance, callback):\n self._start(instance['id'])", "def resume(self):\n pass", "def resume(self):\n pass" ]
[ "0.6904723", "0.6538291", "0.64492124", "0.6384998", "0.6321347", "0.62968606", "0.62499666", "0.62253034", "0.6220497", "0.6195912", "0.6123734", "0.6116155", "0.6062253", "0.6062095", "0.6025022", "0.5974641", "0.59555715", "0.5953936", "0.5952198", "0.5934965", "0.5905421", "0.58719116", "0.585862", "0.5854987", "0.58452517", "0.58393085", "0.5829078", "0.58083063", "0.57886463", "0.57886463" ]
0.73453075
0
Return data about VM diagnostics.
def get_diagnostics(self, instance): # Fake diagnostics return { 'cpu0_time': 17300000000, 'memory': 524288, 'vda_errors': -1, 'vda_read': 262144, 'vda_read_req': 112, 'vda_write': 5778432, 'vda_write_req': 488, 'vnet1_rx': 2070139, 'vnet1_rx_drop': 0, 'vnet1_rx_errors': 0, 'vnet1_rx_packets': 26701, 'vnet1_tx': 140208, 'vnet1_tx_drop': 0, 'vnet1_tx_errors': 0, 'vnet1_tx_packets': 662, }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def diagnostics(self):\r\n # NB: should be not None for multiprocessing works\r\n return {}", "def compute_diagnostics(self) -> Dict[str, Any]:\n return {}", "def get_diagnostics(self) -> List[Diagnostic]:\n raise NotImplementedError", "def diagnostics(self, oid):\n path = '/servers/%s/diagnostics' % oid\n res = self.client.call(path, 'GET', data='', token=self.manager.identity.token)\n self.logger.debug('Shows basic usage data for server %s: %s' % \n (oid, truncate(res)))\n return res[0]", "def extract_diagnostics (self):\n\t\t# TODO: something like this could move into the base class\n\t\tdiag = {}\n\t\tfilenames = [\n\t\t\tINSEQ_FILENAME,\n\t\t\tOUTALIGN_FILENAME,\n\t\t]\n\t\tfor item in filenames:\n\t\t\tfpath = os.path.join (self._curr_workdir, item)\n\t\t\tdiag[item] = utils.file_to_string (fpath)\n\t\treturn diag", "def diagnostics(self) -> bool | None:\n return self._data[ATTR_DIAGNOSTICS]", "def calculate_diagnostic_vars(self):\n pass", "def diagnostics(self):\r\n class DiagIterator:\r\n def __init__(self, tu):\r\n self.tu = tu\r\n\r\n def __len__(self):\r\n return int(conf.lib.clang_getNumDiagnostics(self.tu))\r\n\r\n def __getitem__(self, key):\r\n diag = conf.lib.clang_getDiagnostic(self.tu, key)\r\n if not diag:\r\n raise IndexError\r\n return Diagnostic(diag)\r\n\r\n return DiagIterator(self)", "def get_vm_data(self):\n\n raise NotImplementedError", "def get_diagnostic_list(self):\n return _get_diagnostic_list(self.run_dir)", "def get_server_diagnostics(request, server_id):\n log.debug('server_diagnostics %s', server_id)\n vm = util.get_vm(server_id, request.user_uniq)\n diagnostics = diagnostics_to_dict(vm.diagnostics.all())\n return render_diagnostics(request, diagnostics)", "def getDiagnostics(self):\n msg = DiagnosticStatus()\n msg.name = self.name\n msg.level = DiagnosticStatus.OK\n msg.message = \"OK\"\n if self.active():\n msg.values.append(KeyValue(\"State\", \"Active\"))\n else:\n msg.values.append(KeyValue(\"State\", \"Not Active\"))\n return msg", "def _build_module_diagnostics_info(module: VelbusModule) -> dict[str, Any]:\n data: dict[str, Any] = {\n \"type\": module.get_type_name(),\n \"address\": module.get_addresses(),\n \"name\": module.get_name(),\n \"sw_version\": module.get_sw_version(),\n \"is_loaded\": module.is_loaded(),\n \"channels\": _build_channels_diagnostics_info(module.get_channels()),\n }\n return data", "def getDataDict(self):\n # Used to compare data in MATLAB\n d = {'Vm': self.r_Vm,\n 'Va': self.r_Va,\n 'BusName': self.Busnam,\n 'BusNum': self.Extnum,\n }\n return d", "def diagnostics(self,\n *opts, # type: DiagnosticsOptions\n **kwargs # type: Dict[str, Any]\n ) -> DiagnosticsResult:\n\n return super().diagnostics(*opts, **kwargs)", "def diagnose(self):\n return self._diagnostic_plot()", "async def async_get_config_entry_diagnostics(\n hass: HomeAssistant, entry: ConfigEntry\n) -> dict[str, Any]:\n entry_data: HomeAssistantOverkizData = hass.data[DOMAIN][entry.entry_id]\n client = entry_data.coordinator.client\n\n data = {\n \"setup\": await client.get_diagnostic_data(),\n \"server\": entry.data[CONF_HUB],\n \"execution_history\": [\n repr(execution) for execution in await client.get_execution_history()\n ],\n }\n\n return data", "def dump_diags():\n try:\n stat_log.info(\"=== DIAGNOSTICS ===\")\n for name, diags_function in _registered_diags:\n stat_log.info(\"--- %s ---\", name)\n diags_function(stat_log)\n stat_log.info(\"=== END OF DIAGNOSTICS ===\")\n except Exception:\n # We don't want to take down the process we're trying to diagnose...\n try:\n stat_log.exception(\"Failed to dump diagnostics\")\n except Exception:\n pass", "def print_vm_info(virtual_machine):\n summary = virtual_machine.summary\n print(summary.runtime.host)\n print(\"Name : \", summary.config.name)\n print(\"Template : \", summary.config.template)\n print(\"Path : \", summary.config.vmPathName)\n print(\"Guest : \", summary.config.guestFullName)\n print(\"Instance UUID : \", summary.config.instanceUuid)\n print(\"Bios UUID : \", summary.config.uuid)\n annotation = summary.config.annotation\n if annotation:\n print(\"Annotation : \", annotation)\n print(\"State : \", summary.runtime.powerState)\n if summary.guest is not None:\n ip_address = summary.guest.ipAddress\n tools_version = summary.guest.toolsStatus\n if tools_version is not None:\n print(\"VMware-tools: \", tools_version)\n else:\n print(\"Vmware-tools: None\")\n if ip_address:\n print(\"IP : \", ip_address)\n else:\n print(\"IP : None\")\n if summary.runtime.question is not None:\n print(\"Question : \", summary.runtime.question.text)\n print(\"\")", "def _getvmstat(self):\n\n vmstat_cmd = \"/usr/bin/vmstat -s\"\n\n (retval, output) = utils.safe_getstatusoutput( vmstat_cmd )\n\n if retval != 0:\n log.log( \"<system>system._getvmstat(): error calling '%s'\"%(vmstat_cmd), 5 )\n return None\n\n vmstat_dict = {}\n\n for l in string.split( output, '\\n' ):\n if string.find( l, 'swap ins' ) != -1:\n vmstat_dict['ctr_swap_ins'] = long(string.split(l)[0])\n elif string.find( l, 'swap outs' ) != -1:\n vmstat_dict['ctr_swap_outs'] = long(string.split(l)[0])\n elif string.find( l, 'pages swapped in' ) != -1:\n vmstat_dict['ctr_pages_swapped_in'] = long(string.split(l)[0])\n elif string.find( l, 'pages swapped out' ) != -1:\n vmstat_dict['ctr_pages_swapped_out'] = long(string.split(l)[0])\n elif string.find( l, 'total address trans. faults taken' ) != -1:\n vmstat_dict['ctr_total_address_trans_faults_taken'] = long(string.split(l)[0])\n elif string.find( l, 'page ins' ) != -1:\n vmstat_dict['ctr_page_ins'] = long(string.split(l)[0])\n elif string.find( l, 'page outs' ) != -1:\n vmstat_dict['ctr_page_outs'] = long(string.split(l)[0])\n elif string.find( l, 'pages paged in' ) != -1:\n vmstat_dict['ctr_pages_paged_in'] = long(string.split(l)[0])\n elif string.find( l, 'pages paged out' ) != -1:\n vmstat_dict['ctr_pages_paged_out'] = long(string.split(l)[0])\n elif string.find( l, 'reclaims from free list' ) != -1:\n vmstat_dict['ctr_reclaims_from_free_list'] = long(string.split(l)[0])\n elif string.find( l, 'total page reclaims' ) != -1:\n vmstat_dict['ctr_total_page_reclaims'] = long(string.split(l)[0])\n elif string.find( l, 'intransit blocking page faults' ) != -1:\n vmstat_dict['ctr_intransit_blocking_page_faults'] = long(string.split(l)[0])\n elif string.find( l, 'zero fill pages created' ) != -1:\n vmstat_dict['ctr_zero_fill_pages_created'] = long(string.split(l)[0])\n elif string.find( l, 'zero fill page faults' ) != -1:\n vmstat_dict['ctr_zero_fill_page_faults'] = long(string.split(l)[0])\n elif string.find( l, 'executable fill pages created' ) != -1:\n vmstat_dict['ctr_executable_fill_pages_created'] = long(string.split(l)[0])\n elif string.find( l, 'executable fill page faults' ) != -1:\n vmstat_dict['ctr_executable_fill_page_faults'] = long(string.split(l)[0])\n elif string.find( l, 'swap text pages found in free list' ) != -1:\n vmstat_dict['ctr_swap_text_pages_found_in_free_list'] = long(string.split(l)[0])\n elif string.find( l, 'inode text pages found in free list' ) != -1:\n vmstat_dict['ctr_inode_text_pages_found_in_free_list'] = long(string.split(l)[0])\n elif string.find( l, 'revolutions of the clock hand' ) != -1:\n vmstat_dict['ctr_revolutions_of_the_clock_hand'] = long(string.split(l)[0])\n elif string.find( l, 'pages scanned for page out' ) != -1:\n vmstat_dict['ctr_pages_scanned_for_page_out'] = long(string.split(l)[0])\n elif string.find( l, 'pages freed by the clock daemon' ) != -1:\n vmstat_dict['ctr_pages_freed_by_the_clock_daemon'] = long(string.split(l)[0])\n elif string.find( l, 'cpu context switches' ) != -1:\n vmstat_dict['ctr_cpu_context_switches'] = long(string.split(l)[0])\n elif string.find( l, 'device interrupts' ) != -1:\n vmstat_dict['ctr_device_interrupts'] = long(string.split(l)[0])\n elif string.find( l, 'traps' ) != -1:\n vmstat_dict['ctr_traps'] = long(string.split(l)[0])\n elif string.find( l, 'system calls' ) != -1:\n vmstat_dict['ctr_system_calls'] = long(string.split(l)[0])\n elif string.find( l, 'Page Select Size Successes for Page size 4K' ) != -1:\n vmstat_dict['ctr_Page_Select_Size_Successes_for_Page_size_4K'] = long(string.split(l)[0])\n elif string.find( l, 'Page Select Size Successes for Page size 16K' ) != -1:\n vmstat_dict['ctr_Page_Select_Size_Successes_for_Page_size_16K'] = long(string.split(l)[0])\n elif string.find( l, 'Page Select Size Successes for Page size 64K' ) != -1:\n vmstat_dict['ctr_Page_Select_Size_Successes_for_Page_size_64K'] = long(string.split(l)[0])\n elif string.find( l, 'Page Select Size Successes for Page size 256K' ) != -1:\n vmstat_dict['ctr_Page_Select_Size_Successes_for_Page_size_256K'] = long(string.split(l)[0])\n elif string.find( l, 'Page Select Size Failures for Page size 16K' ) != -1:\n vmstat_dict['ctr_Page_Select_Size_Failures_for_Page_size_16K'] = long(string.split(l)[0])\n elif string.find( l, 'Page Select Size Failures for Page size 64K' ) != -1:\n vmstat_dict['ctr_Page_Select_Size_Failures_for_Page_size_64K'] = long(string.split(l)[0])\n elif string.find( l, 'Page Select Size Failures for Page size 256K' ) != -1:\n vmstat_dict['ctr_Page_Select_Size_Failures_for_Page_size_256K'] = long(string.split(l)[0])\n elif string.find( l, 'Page Allocate Successes for Page size 4K' ) != -1:\n vmstat_dict['ctr_Page_Allocate_Successes_for_Page_size_4K'] = long(string.split(l)[0])\n elif string.find( l, 'Page Allocate Successes for Page size 16K' ) != -1:\n vmstat_dict['ctr_Page_Allocate_Successes_for_Page_size_16K'] = long(string.split(l)[0])\n elif string.find( l, 'Page Allocate Successes for Page size 64K' ) != -1:\n vmstat_dict['ctr_Page_Allocate_Successes_for_Page_size_64K'] = long(string.split(l)[0])\n elif string.find( l, 'Page Allocate Successes for Page size 256K' ) != -1:\n vmstat_dict['ctr_Page_Allocate_Successes_for_Page_size_256K'] = long(string.split(l)[0])\n elif string.find( l, 'Page Allocate Successes for Page size 64M' ) != -1:\n vmstat_dict['ctr_Page_Allocate_Successes_for_Page_size_64M'] = long(string.split(l)[0])\n elif string.find( l, 'Page Demotions for Page size 16K' ) != -1:\n vmstat_dict['ctr_Page_Demotions_for_Page_size_16K'] = long(string.split(l)[0])\n\n return vmstat_dict", "def report_data(self):\n return {}", "def dataForMonitoring(self):\n dict = MinderBase.dataForMonitoring(self)\n \n dict['nTests'] = len(self.tests.keys())\n dict['done'] = self.isDone()\n dict['nTestsSuccess'] = len([s for s in self.finishedTests if s.result == 0])\n dict['nTestsFailure'] = len([s for s in self.finishedTests if s.result != 0])\n dict['nRetries'] = self.errorStateCurRetry\n dict['ppFailure'] = (self.postProcessingResult == 'error')\n dict['ppSuccess'] = (self.postProcessingResult == 'success')\n\n return dict", "def _get_debug_info(self):\n info = {'problem_file' : self._problem.problem_fname,\n 'domain_file' : self.domain.domain_fname }\n return info", "def run_diagnostics(self):\n request = {\n 'jsonrpc': '2.0',\n 'id': 0,\n 'method': 'ping'\n }\n result = CurlTestBase.send_request('&diag=1', request)\n response = '<html><body><pre>'\n response += cgi.escape(result.content)\n response += '</pre></body></html>'\n self.response.out.write(response)", "async def async_get_config_entry_diagnostics(\n hass: HomeAssistant, entry: ConfigEntry\n) -> dict[str, Any]:\n coordinator: PVOutputDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id]\n # Round-trip via JSON to trigger serialization\n data: dict[str, Any] = json.loads(coordinator.data.json())\n return data", "def get_analysis(self) -> Dict[str, Any]:\n return {\n \"report\": self.report.to_text(),\n \"errors\": sorted(self.errors, key=lambda k: k[\"start\"]),\n \"has_errors\": self.has_errors(),\n }", "def describe_collect(self):\n logger.info(\"describe_collect()\")\n d = dict(\n source = \"elapsed time, s\",\n dtype = \"number\",\n shape = (1,)\n )\n return {\n self.name: {\n \"x\": d\n }\n }", "def getMemDetail(self):\n mem = {}\n if self.type in ['E', 'T', 'S', 'K', 'A', 'AX', 'W']:\n m = \"The percentage of CP memory utilization:\\s*([\\d\\.]+)%\\s+DP memory utilization:\\s*([\\d\\.]+)%\"\n rt = re.search(m, self.dut.cli(\"show memory detail\"))\n if rt:\n mem = {\"cp\": float(rt.groups()[0]), \"dp\": float(rt.groups()[1])}\n return mem", "def summary(self):\n\n result = dict()\n\n result[\"control_manager\"] = self._control_manager.summary()\n result[\"data_logger\"] = self._db_manager.summary()\n result[\"alarm_manager\"] = self._alarm_manager.summary()\n result[\"machine_manager\"] = self._machine_manager.summary()\n result[\"function_manager\"] = self._function_manager.summary()\n\n return result", "def get_D1_diagnostic(self):\n raise NotImplementedError('The get_D1_diagnostic method is not implemented for Orca Logs')" ]
[ "0.7394385", "0.7127574", "0.68339425", "0.6612228", "0.64488685", "0.6354231", "0.61432654", "0.61270887", "0.604643", "0.6037903", "0.5928791", "0.5822542", "0.58160037", "0.5722499", "0.56864506", "0.5682793", "0.56672037", "0.560802", "0.5604313", "0.5590717", "0.55809945", "0.5561858", "0.5556396", "0.555456", "0.553939", "0.5538245", "0.55341125", "0.5516846", "0.54992664", "0.5498311" ]
0.79608166
0
Return usage info for volumes attached to vms on a given host.
def get_all_volume_usage(self, context, compute_host_bdms): volusage = [] return volusage
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_volume_info(host, disk_object, dc_obj):\n host_resource = get_host_resource_by_name(host)\n\n vol_id = disk_object.get_image_id()\n sd_id = disk_object.get_storage_domains().get_storage_domain()[0].get_id()\n image_id = disk_object.get_id()\n sp_id = dc_obj.get_id()\n\n args = {\n \"storagepoolID\": sp_id,\n \"storagedomainID\": sd_id,\n \"imageID\": image_id,\n \"volumeID\": vol_id,\n }\n\n return host_resource.vds_client(cmd=\"Volume.getInfo\", args=args)", "def host_info(vm_hostname):\n with _get_vm(vm_hostname) as vm:\n\n if vm.dataset_obj['datacenter_type'] != 'kvm.dct':\n raise NotImplementedError(\n 'This operation is not yet supported for {}'.format(\n vm.dataset_obj['datacenter_type'])\n )\n\n info = vm.info()\n\n # Disconnect fabric now to avoid messages after the table\n disconnect_all()\n\n categories = (\n ('General', (\n 'hypervisor',\n 'status',\n )),\n ('Network', (\n 'intern_ip',\n 'mac_address',\n )),\n ('Resources', (\n 'num_cpu',\n 'max_cpus',\n 'memory',\n 'memory_free',\n 'max_mem',\n 'disk',\n 'disk_size_gib',\n 'disk_free_gib',\n )),\n # Anything else will appear in this section\n ('Other', None),\n )\n\n def _progress_bar(free_key, capacity_key, result_key, unit):\n \"\"\"Helper to show nice progress bars.\"\"\"\n if free_key not in info or capacity_key not in info:\n return\n free = info[free_key]\n del info[free_key]\n capacity = info[capacity_key]\n del info[capacity_key]\n\n simple_stats = (\n 'Current: {} {unit}\\n'\n 'Free: {} {unit}\\n'\n 'Max: {} {unit}'.format(\n capacity - free, free, capacity, unit=unit))\n\n if not 0 <= free <= capacity > 0:\n log.warning(\n '{} ({}) and {} ({}) have weird ratio, skipping progress '\n 'calculation'.format(\n free_key, free, capacity_key, capacity)\n )\n info[result_key] = red(simple_stats)\n return\n\n assert 0 <= free <= capacity\n ratio = 1 - float(free) / float(capacity)\n if ratio >= 0.9:\n color = red\n elif ratio >= 0.8:\n color = yellow\n else:\n color = green\n\n max_bars = 20\n num_bars = int(round(ratio * max_bars))\n info[result_key] = (\n '[{}{}] {}%\\n{}'.format(\n color('#' * num_bars), ' ' * (max_bars - num_bars),\n int(round(ratio * 100)),\n simple_stats,\n )\n )\n\n _progress_bar('memory_free', 'memory', 'memory', 'MiB')\n _progress_bar('disk_free_gib', 'disk_size_gib', 'disk', 'GiB')\n\n max_key_len = max(len(k) for k in info.keys())\n for category, keys in categories:\n # Handle 'Other' section by defaulting to all keys\n keys = list(keys or info.keys())\n\n # Any info available for the category?\n if not any(k in info for k in keys):\n continue\n\n print('')\n print(white(category, bold=True))\n for k in keys:\n if k not in info:\n continue\n\n # Properly re-indent multiline values\n value = str(info.pop(k))\n value = ('\\n' + ' ' * (max_key_len + 3)).join(\n value.splitlines()\n )\n print('{} : {}'.format(k.ljust(max_key_len), value))", "def get_disk_usage():\n query = {\n \"type\": \"op\",\n \"cmd\": \"<show><system><disk-space></disk-space></system></show>\",\n }\n\n return __proxy__[\"panos.call\"](query)", "def _get_vms_on_host(self, host_ref):\n vm_data = []\n vm_ret = self._session._call_method(vutil,\n \"get_object_property\",\n host_ref,\n \"vm\")\n # if there are no VMs on the host, we don't need to look further\n if not vm_ret:\n return vm_data\n\n vm_mors = vm_ret.ManagedObjectReference\n result = self._session._call_method(vutil,\n \"get_properties_for_a_collection_of_objects\",\n \"VirtualMachine\", vm_mors,\n [\"config.instanceUuid\", \"runtime.powerState\",\n \"config.hardware.memoryMB\", \"config.managedBy\"])\n with vutil.WithRetrieval(self._session.vim, result) as objects:\n for obj in objects:\n vm_props = propset_dict(obj.propSet)\n # sometimes, the vCenter finds a file it thinks is a VM and it\n # doesn't even have a config attribute ... instead of crashing\n # with a KeyError, we assume this VM is not running and totally\n # doesn't matter as nova also will not be able to handle it\n if 'config.instanceUuid' not in vm_props:\n continue\n\n vm_data.append((\n vm_props['config.instanceUuid'],\n vm_props['config.hardware.memoryMB'],\n vm_props['runtime.powerState'],\n vm_props.get('config.managedBy'),\n vutil.get_moref_value(obj.obj)))\n return vm_data", "def _get_host_utilization(context, host, ram_mb, disk_gb):\n instances = instance_get_all_by_host(context, host)\n vms = len(instances)\n free_ram_mb = ram_mb - FLAGS.reserved_host_memory_mb\n free_disk_gb = disk_gb - (FLAGS.reserved_host_disk_mb * 1024)\n\n work = 0\n for instance in instances:\n free_ram_mb -= instance.memory_mb\n free_disk_gb -= instance.root_gb\n free_disk_gb -= instance.ephemeral_gb\n if instance.vm_state in [vm_states.BUILDING, vm_states.REBUILDING,\n vm_states.MIGRATING, vm_states.RESIZING]:\n work += 1\n return dict(free_ram_mb=free_ram_mb,\n free_disk_gb=free_disk_gb,\n current_workload=work,\n running_vms=vms)", "def _get_mount_status(self, vm=None):\n result = Shell.run(f\"multipass info {vm} --format=json\")\n\n if f'instance \"{vm}\" does not exist' in result:\n dict_result = {\n 'name': vm,\n 'status': \"instance does not exist\"\n }\n else:\n result = json.loads(result)\n dict_result = {\n 'name': vm,\n 'status': result[\"info\"][vm]['state'],\n 'mounts': result[\"info\"][vm]['mounts']\n }\n return dict_result", "def disk():\n run(env.disk_usage_command % env)", "def getvg(host, disk):\r\n sshCommand = \"lspv | grep '^%s ' | awk '{print $3}'\" % disk\r\n vgName = sub.Popen([\"ssh\", \"-q\", host, sshCommand],\r\n shell=False, stdout=sub.PIPE, stderr=sub.PIPE\r\n ).communicate()[0].strip()\r\n return vgName", "def get_disk_usage():\n\n disk_usage = {}\n diskinfo = subprocess.Popen(['df','-P'], shell=False, stdout=subprocess.PIPE)\n diskinfo.stdout.readline()\n for line in diskinfo.stdout:\n disk_usage[line.split()[5]] = { 'filesystem' : line.split()[0], 'size' : int(line.split()[1]), \\\n'used' : int(line.split()[2]), 'avail' : int(line.split()[3]), 'capacity' : line.split()[4] }\n diskinfo = subprocess.Popen(['df','-i','-P'], shell=False, stdout=subprocess.PIPE)\n diskinfo.stdout.readline()\n for line in diskinfo.stdout:\n disk_usage[line.split()[5]].update( { 'iused' : int(line.split()[2]), 'ifree' : int(line.split()[3]), 'icapacity' : line.split()[4] } )\n return disk_usage", "def get_lun_storage_info(lun_id):\n host = ll_hosts.get_spm_host(config.HOSTS)\n host_ip = ll_hosts.get_host_ip(host)\n executor = rhevm_helpers.get_host_executor(\n host_ip, config.VDC_ROOT_PASSWORD\n )\n # Execute 'pvscan' to display the latest volume info\n storage_resources.pvscan(host)\n logger.info(\"Executing command 'pvs | grep %s'\", lun_id)\n status, output, err = executor.run_cmd(\n shlex.split(PVS_SHOW_LUN_INFO % lun_id)\n )\n if status:\n logger.info(\n \"Status was False executing 'pvs | grep %s'. Err: %s\",\n lun_id, err\n )\n return 0, 0\n\n # Format the output into the 6 expected display parameters (PV, VG,\n # Format, LV Attributes, Physical size and Physical free size)\n formatted_output = shlex.split(output)\n logger.info(\n \"The output received when running pvs on LUN id %s is: %s\"\n % (lun_id, formatted_output)\n )\n # The 2nd last displayed data output is needed - Physical size\n lun_size = formatted_output[-2]\n lun_size = lun_size.replace(\"g\", \"\")\n lun_free_space = formatted_output[-1]\n lun_free_space = lun_free_space.replace(\"g\", \"\")\n lun_size_bytes = float(lun_size) * config.GB\n logger.info(\"The LUN size in bytes is '%s'\", str(lun_size_bytes))\n lun_free_bytes = float(lun_free_space) * config.GB\n logger.info(\"The LUN free space in bytes is '%s'\", str(lun_free_bytes))\n\n return int(lun_size_bytes), int(lun_free_bytes)", "def get_volume_info(volumes):\n if type(volumes) is not list:\n volumes = [volumes]\n volume_info_list = []\n for volume in volumes:\n command = 'cinder show %s' % volume['id']\n volume_info = parse_output(Popen(command.split(), stdout=STDOUT,\n stderr=STDERR).communicate()[0])\n att = volume_info['attachments'].replace(\"'\", \"\\\"\").replace(\n \"u\\\"\", \"\\\"\").replace(\" None,\", \" \\\"None\\\",\")\n volume_info['device'] = json.loads(att)[0]['device']\n volume_info_list.append(volume_info)\n return volume_info_list", "def disk_usage(self):\n self.monitoring_object['disk_usage'] =\\\n psutil.disk_usage('/')", "def describe_volumes(InstanceId=None, StackId=None, RaidArrayId=None, VolumeIds=None):\n pass", "def _get_vm_stats(self, vm_name):\n host = VDS(hosts.get_host_vm_run_on(vm_name), config.VDC_ROOT_PASSWORD)\n return host.vds_client(\"VM.getStats\", {\"vmID\": self.vm_id})[0]", "def get_storage_devices(vm_name, filter='vd[a-z]'):\n vm_executor = get_vm_executor(vm_name)\n\n command = 'ls /sys/block | egrep \\\"%s\\\"' % filter\n rc, output, error = vm_executor.run_cmd(cmd=shlex.split(command))\n if rc:\n logger.error(\n \"Error while retrieving storage devices from VM '%s, output is \"\n \"'%s', error is '%s'\", output, error\n )\n return False\n return output.split()", "def update_volume_after_attached_to_vm(self, info, vms):\n path = info[0]['path']\n path_list = path.split(sep='/')\n machine_path_list = [\"~\", \"Home\"]\n machine_path_list.extend(path_list[3:])\n info[0]['machine_path'] = \"/\".join(machine_path_list)\n info[0]['AttachedToVm'] = vms\n info[0]['State'] = 'in-use'\n info[0]['time'] = datetime.datetime.now()\n return info", "def get_basic_volume_info_all():\n vl = None\n try:\n d, err = xml_parse.run_gluster_command(\n '/usr/sbin/gluster volume info all --xml')\n if err:\n raise Exception(err)\n\n root = d[\"root\"]\n\n # Get the admin vol name so it can be excluded from the list\n admin_vol_name, err = config.get_admin_vol_name()\n if err:\n raise Exception(err)\n\n # Now get the all the volume info for user created volumes\n vl, err = xml_parse.get_volume_info(root, admin_vol_name)\n if err:\n raise Exception(err)\n except Exception, e:\n return None, 'Error getting basic volume information for all volumes : %s' % str(e)\n else:\n return vl, None", "def collect():\n\n command = \"cat /proc/meminfo |grep MemTotal|awk -F' ' '{print $2}'\"\n memTotal_f = round(float(os.popen(command).read())/1024/1000,0)\n memTotal = int(memTotal_f)\n cmd = 'df -h |grep \"/dev/s\"'\n metric_disk = os.popen(cmd).readlines()\n hardNum=[]\n for i in metric_disk:\n hard_space = float((i.strip().split()[1])[:-1])\n hardNum.append(hard_space)\n\n disk_info = sum(hardNum)\n disk_use = {}\n metric_disks=os.popen('df -x tmpfs -x devtmpfs | grep -Eo \" /\\S*$\" ').readlines()\n for disk in metric_disks:\n cmd = 'df|grep -E \"%s$\"' % disk.strip()\n disks = os.popen(cmd).readlines()[0]\n disk_list = disks.split()\n disk_use[disk_list[5]]=disk_list[4]\n hard = {\n \"disk_used\" : disk_use,\n \"disk_total\":disk_info,\n \"mem_total\":memTotal\n }\n\n return hard", "def test_update_volume_stats_cached(self):\n self._fail_host_storage = True\n actual = self.driver.get_volume_stats(False)\n self.assertEqual('HGST', actual['vendor_name'])\n self.assertEqual('hgst', actual['storage_protocol'])\n self.assertEqual(90, actual['total_capacity_gb'])\n self.assertEqual(87, actual['free_capacity_gb'])\n self.assertEqual(0, actual['reserved_percentage'])", "def get_amount_of_file_type_volumes(host_ip, sp_id, sd_id, image_id):\n # Build the path to the Disk's location on the file system\n volume_path = FILE_SD_VOLUME_PATH_IN_FS % (sp_id, sd_id, image_id)\n command = GET_FILE_SD_NUM_DISK_VOLUMES % volume_path\n executor = rhevm_helpers.get_host_executor(\n ip=host_ip, password=config.VDC_ROOT_PASSWORD\n )\n rc, output, err = executor.run_cmd(shlex.split(command))\n\n assert not rc, errors.CommandExecutionError(\"Output: %s\" % output)\n # There are a total of 3 files/volume, the volume metadata (.meta),\n # the volume lease (.lease) and the volume content itself (no\n # extension)\n num_volumes = int(output)/3\n logger.debug(\n \"The number of file type volumes found is '%s'\",num_volumes\n )\n return num_volumes", "def main():\n results = []\n results.extend(check_mounts())\n results.extend(diskusage())\n return results", "def _get_system_volume(vm_):\n\n # Override system volume size if 'disk_size' is defined in cloud profile\n disk_size = get_size(vm_)[\"disk\"]\n if \"disk_size\" in vm_:\n disk_size = vm_[\"disk_size\"]\n\n # Construct the system volume\n volume = Volume(\n name=\"{} Storage\".format(vm_[\"name\"]),\n size=disk_size,\n disk_type=get_disk_type(vm_),\n )\n\n if \"image_password\" in vm_:\n image_password = vm_[\"image_password\"]\n volume.image_password = image_password\n\n # Retrieve list of SSH public keys\n ssh_keys = get_public_keys(vm_)\n volume.ssh_keys = ssh_keys\n\n if \"image_alias\" in vm_.keys():\n volume.image_alias = vm_[\"image_alias\"]\n else:\n volume.image = get_image(vm_)[\"id\"]\n # Set volume availability zone if defined in the cloud profile\n if \"disk_availability_zone\" in vm_:\n volume.availability_zone = vm_[\"disk_availability_zone\"]\n\n return volume", "def usage(self, host):", "def ls(cls):\n for vm in cls._vm_agents_for_host():\n with vm:\n running = vm.qemu.process_exists()\n\n if running:\n vm_mem = vm.qemu.proc().memory_full_info()\n\n expected_size = (\n vm.cfg[\"memory\"] * 1024 * 1024\n + vm.qemu.vm_expected_overhead * 1024 * 1024\n )\n\n log.info(\n \"online\",\n machine=vm.name,\n cores=vm.cfg[\"cores\"],\n memory_booked=\"{:,.0f}\".format(vm.cfg[\"memory\"]),\n memory_pss=\"{:,.0f}\".format(vm_mem.pss / MiB),\n memory_swap=\"{:,.0f}\".format(vm_mem.swap / MiB),\n )\n else:\n log.info(\"offline\", machine=vm.name)", "def do_hostinfo(self, args):\n host = opts = None\n if args:\n args = args.split()\n host = args.pop()\n\n if not host:\n print('Usage: hostinfo [-cdmu] host_name_or_ip')\n print(' uptime and load stats returned if no options specified')\n return\n\n try:\n ip = socket.gethostbyname(host)\n except socket.gaierror:\n print('cannot resolve', host, file=sys.stderr)\n return\n\n opts = []\n while args:\n arg = args.pop(0)\n if arg.startswith('--'):\n if arg == '--cpu':\n opts.append('c')\n elif arg == '--disk':\n opts.append('d')\n elif arg == '--memory':\n opts.append('m')\n elif arg == '--uptime':\n opts.append('u')\n else:\n print('unrecognized option:', arg, file=sys.stderr)\n return\n else:\n if arg[0] == '-':\n for ch in arg[1:]:\n if ch in ('cdmu') and ch not in opts:\n opts.append(ch)\n else:\n print('unrecognized option:', ch, file=sys.stderr)\n return\n\n stats = self._qm.get_host_stats(ip)\n\n if not opts:\n # Get uptime and load averages.\n up = stats['uptime']\n load = stats['cpu_load']\n print('Up for %s days, %s hours, %s minutes, '\n 'load averages: %s, %s, %s'\n % (up['days'], up['hours'], up['minutes'], load['one'],\n load['five'], load['fifteen']))\n return\n\n all_stats = []\n for opt in opts:\n if opt == 'd':\n # Get disk usage.\n disks = stats['disk_usage']\n st = ['Disk Usage:']\n for mount, disk_info in disks.viewitems():\n st.append(' Usage for: %s' % mount)\n for k, v in disk_info.viewitems():\n st.append(' %s: %s' % (k, v))\n all_stats.append('\\n'.join(st))\n all_stats.append('')\n elif opt == 'c':\n # Get CPU load.\n load_stats = stats['cpu_load']\n st = ['CPU Load Average:']\n st.append(' last one minute: %s' % load_stats['one'])\n st.append(' last five minutes: %s' % load_stats['five'])\n st.append(' last fifteen minutes: %s' % load_stats['fifteen'])\n all_stats.append('\\n'.join(st))\n all_stats.append('')\n elif opt == 'm':\n # Get Memory Usage.\n memory_usage = stats['memory_usage']\n st = ['Memory usage:']\n for k, v in memory_usage.viewitems():\n st.append(' %s: %s' % (k, v))\n all_stats.append('\\n'.join(st))\n all_stats.append('')\n elif opt == 'u':\n # Get uptime.\n up = stats['uptime']\n st = ['Uptime:']\n st.append(' Up for %s days, %s hours and %s minutes'\n % (up['days'], up['hours'], up['minutes']))\n all_stats.append('\\n'.join(st))\n all_stats.append('')\n\n print('\\n'.join(all_stats))", "def get_info(volpath):\n dhandle = vol_open_path(volpath, VMDK_OPEN_DISKCHAIN_NOIO)\n\n if not disk_is_valid(dhandle):\n logging.warning(\"Failed to open disk - %s\", volpath)\n return None\n\n sinfo = disk_info()\n res = lib.DiskLib_GetSize(dhandle, 0, VMDK_MAX_SNAPS, byref(sinfo))\n\n lib.DiskLib_Close(dhandle)\n if res != 0:\n logging.warning(\"Failed to get size of disk %s - %x\", volpath, res)\n return None\n\n return {VOL_SIZE: convert(sinfo.size), VOL_ALLOC: convert(sinfo.allocated)}", "def info(self, name=None):\n data = self.cloudman.list_servers(filters={'name': name})\n\n \"\"\"\n vms = self.list()\n print (\"VMS\", vms)\n data = None\n for entry in vms:\n print (\"FFF\", entry['name'])\n if entry['name'] == name:\n data = entry\n break\n \"\"\"\n\n if data is None:\n raise ValueError(f\"vm not found {name}\")\n\n r = self.update_dict(data, kind=\"vm\")\n return r", "def update_volume_after_detach(self, info, vms):\n info[0]['AttachedToVm'] = vms\n if len(vms) == 0:\n info[0]['machine_path'] = None\n info[0]['State'] = 'available'\n info[0]['time'] = datetime.datetime.now()\n return info", "def get_hdd():\n return {\n 'HDD': string_chopped_to_float(psutil.disk_usage('/'), 'percent=', ')'),\n }", "def getAnsibleInfo(host):\n #First do a ping to get more results\n data = runAnsibleCommand(host.getID(), 'ping')\n if data[0]['status'] == 'UNREACHABLE!':\n return None\n #Get the actual data\n return runAnsibleCommand(host.getID(), 'setup')[0]['json']" ]
[ "0.64672023", "0.62748015", "0.6236638", "0.60684156", "0.5916731", "0.5862034", "0.5846422", "0.57962835", "0.57623684", "0.57531667", "0.56755435", "0.5626542", "0.56177384", "0.560629", "0.5597559", "0.55715716", "0.55710214", "0.55689335", "0.55505204", "0.553369", "0.5529433", "0.55284214", "0.5526885", "0.5517558", "0.5474557", "0.5444127", "0.5433442", "0.54288256", "0.54276353", "0.5422043" ]
0.7157881
1
Return Azure Host Status of name, ram, disk, network.
def get_host_stats(self, refresh=False): stats = [] for nodename in self._drv_nodes: host_status = self.host_status_base.copy() host_status['hypervisor_hostname'] = nodename host_status['host_hostname'] = nodename host_status['host_name_label'] = nodename host_status['hypervisor_type'] = self.name host_status['vcpus'] = drv_conf.max_vcpus host_status['memory_mb'] = drv_conf.max_memory_mb host_status['local_gb'] = drv_conf.max_disk_gb stats.append(host_status) if len(stats) == 0: raise exception.NovaException("Azure Driver has no node") elif len(stats) == 1: return stats[0] else: return stats
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_host_stats(self, refresh=False):\n return self.host_status", "def poll_host(self, server, obj, name):\n\n self.log.debug('found host: %s' % (name,))\n\n status = 0\n cpu_total = cpu_usage = cpu_percent = cpu_count = cpu_mhz_per_core = 0\n mem_total = mem_usage = mem_percent = 0\n vms_total = vms_running = vms_stopped = 0\n\n if '.' in name and name.count('.') != 3:\n name = name.split('.')[0]\n\n props = server._retrieve_properties_traversal(property_names=[\n 'name',\n 'summary.overallStatus',\n 'summary.quickStats.overallMemoryUsage',\n 'summary.quickStats.overallCpuUsage',\n 'summary.hardware.memorySize',\n 'summary.hardware.numCpuCores',\n 'summary.hardware.cpuMhz',\n ], from_node=obj, obj_type='HostSystem')\n\n for prop_set in props:\n for prop in prop_set.PropSet:\n pn, pv = prop.Name, prop.Val\n\n if pn == 'summary.overallStatus':\n status = HOST_STATUS.index(pv)\n elif pn == 'summary.quickStats.overallMemoryUsage':\n mem_usage = pv\n elif pn == 'summary.quickStats.overallCpuUsage':\n cpu_usage = pv\n elif pn == 'summary.hardware.memorySize':\n mem_total = pv / MB\n elif pn == 'summary.hardware.numCpuCores':\n cpu_count = pv\n elif pn == 'summary.hardware.cpuMhz':\n cpu_mhz_per_core = pv\n\n vms_total = len(server.get_registered_vms(obj))\n vms_running = len(server.get_registered_vms(obj, status='poweredOn'))\n vms_stopped = len(server.get_registered_vms(obj, status='poweredOff'))\n\n cpu_total = cpu_count * cpu_mhz_per_core\n cpu_percent = cpu_usage / float(cpu_total) * 100\n mem_percent = mem_usage / float(mem_total) * 100\n\n stats = {\n 'status': status,\n 'cpu_total': cpu_total,\n 'cpu_usage': cpu_usage,\n 'cpu_percent': cpu_percent,\n 'cpu_count': cpu_count,\n 'mem_total': mem_total,\n 'mem_usage': mem_usage,\n 'mem_percent': mem_percent,\n 'vms_total': vms_total,\n 'vms_running': vms_running,\n 'vms_stopped': vms_stopped,\n }\n\n return stats", "def host_info(vm_hostname):\n with _get_vm(vm_hostname) as vm:\n\n if vm.dataset_obj['datacenter_type'] != 'kvm.dct':\n raise NotImplementedError(\n 'This operation is not yet supported for {}'.format(\n vm.dataset_obj['datacenter_type'])\n )\n\n info = vm.info()\n\n # Disconnect fabric now to avoid messages after the table\n disconnect_all()\n\n categories = (\n ('General', (\n 'hypervisor',\n 'status',\n )),\n ('Network', (\n 'intern_ip',\n 'mac_address',\n )),\n ('Resources', (\n 'num_cpu',\n 'max_cpus',\n 'memory',\n 'memory_free',\n 'max_mem',\n 'disk',\n 'disk_size_gib',\n 'disk_free_gib',\n )),\n # Anything else will appear in this section\n ('Other', None),\n )\n\n def _progress_bar(free_key, capacity_key, result_key, unit):\n \"\"\"Helper to show nice progress bars.\"\"\"\n if free_key not in info or capacity_key not in info:\n return\n free = info[free_key]\n del info[free_key]\n capacity = info[capacity_key]\n del info[capacity_key]\n\n simple_stats = (\n 'Current: {} {unit}\\n'\n 'Free: {} {unit}\\n'\n 'Max: {} {unit}'.format(\n capacity - free, free, capacity, unit=unit))\n\n if not 0 <= free <= capacity > 0:\n log.warning(\n '{} ({}) and {} ({}) have weird ratio, skipping progress '\n 'calculation'.format(\n free_key, free, capacity_key, capacity)\n )\n info[result_key] = red(simple_stats)\n return\n\n assert 0 <= free <= capacity\n ratio = 1 - float(free) / float(capacity)\n if ratio >= 0.9:\n color = red\n elif ratio >= 0.8:\n color = yellow\n else:\n color = green\n\n max_bars = 20\n num_bars = int(round(ratio * max_bars))\n info[result_key] = (\n '[{}{}] {}%\\n{}'.format(\n color('#' * num_bars), ' ' * (max_bars - num_bars),\n int(round(ratio * 100)),\n simple_stats,\n )\n )\n\n _progress_bar('memory_free', 'memory', 'memory', 'MiB')\n _progress_bar('disk_free_gib', 'disk_size_gib', 'disk', 'GiB')\n\n max_key_len = max(len(k) for k in info.keys())\n for category, keys in categories:\n # Handle 'Other' section by defaulting to all keys\n keys = list(keys or info.keys())\n\n # Any info available for the category?\n if not any(k in info for k in keys):\n continue\n\n print('')\n print(white(category, bold=True))\n for k in keys:\n if k not in info:\n continue\n\n # Properly re-indent multiline values\n value = str(info.pop(k))\n value = ('\\n' + ' ' * (max_key_len + 3)).join(\n value.splitlines()\n )\n print('{} : {}'.format(k.ljust(max_key_len), value))", "def node_host_status(self, node):\n if node.is_online() or node.is_unreachable():\n return self.HOST_MONITORED\n else:\n return self.HOST_UNMONITORED", "def get_host_stats(self):\n status, data, errors, messages = self._make_get_request(CraftyAPIRoutes.HOST_STATS)\n \n if status == 200:\n return data\n elif status == 500:\n self._check_errors(errors, messages)", "def getHostInfo():", "def get_host_power_status(self):\n\n data = self._get_host_details()\n return data['Power'].upper()", "def _status(self, host):\n pass", "def host_list(self):\n try:\n scode, hosts = Rest.get('Host')\n except Exception as e:\n Console.error(e.message)\n return\n if len(hosts) == 0:\n print(\"No hosts exist\")\n return\n\n n = 1\n e = {}\n for host in hosts:\n d = {}\n d['Ip'] = str(host['Ip'])\n d['Name'] = str(host['Name'])\n d['Port'] = str(host['Port'])\n d['Swarmmode'] = str(host['Swarmmode'])\n e[n] = d\n n = n + 1\n Console.ok(str(Printer.dict_table(e, order=['Ip', 'Name', 'Port', 'Swarmmode'])))", "def ls(cls):\n for vm in cls._vm_agents_for_host():\n with vm:\n running = vm.qemu.process_exists()\n\n if running:\n vm_mem = vm.qemu.proc().memory_full_info()\n\n expected_size = (\n vm.cfg[\"memory\"] * 1024 * 1024\n + vm.qemu.vm_expected_overhead * 1024 * 1024\n )\n\n log.info(\n \"online\",\n machine=vm.name,\n cores=vm.cfg[\"cores\"],\n memory_booked=\"{:,.0f}\".format(vm.cfg[\"memory\"]),\n memory_pss=\"{:,.0f}\".format(vm_mem.pss / MiB),\n memory_swap=\"{:,.0f}\".format(vm_mem.swap / MiB),\n )\n else:\n log.info(\"offline\", machine=vm.name)", "def _get_host_utilization(context, host, ram_mb, disk_gb):\n instances = instance_get_all_by_host(context, host)\n vms = len(instances)\n free_ram_mb = ram_mb - FLAGS.reserved_host_memory_mb\n free_disk_gb = disk_gb - (FLAGS.reserved_host_disk_mb * 1024)\n\n work = 0\n for instance in instances:\n free_ram_mb -= instance.memory_mb\n free_disk_gb -= instance.root_gb\n free_disk_gb -= instance.ephemeral_gb\n if instance.vm_state in [vm_states.BUILDING, vm_states.REBUILDING,\n vm_states.MIGRATING, vm_states.RESIZING]:\n work += 1\n return dict(free_ram_mb=free_ram_mb,\n free_disk_gb=free_disk_gb,\n current_workload=work,\n running_vms=vms)", "def get_host_stats(self):\n status, data, errors, messages = self._make_get_request(CraftyAPIRoutes.SERVER_STATS)\n \n if status == 200:\n return data\n elif status == 500:\n self._check_errors(errors, messages)", "def get_health_info(handle, timeout):\n health = dict()\n\n health['stat'] = ceph_mon_command(handle, 'health' , timeout)\n # TODO command not known with ceph_mon_command\n #health['detail'] = ceph_mon_command(handle, 'health detail', timeout)\n health['detail'] = shell_command('ceph health detail') + b'\\n'\n health['df'] = ceph_mon_command(handle, 'df' , timeout)\n health['report'] = ceph_mon_command(handle, 'report' , timeout)\n\n return health", "def hostname(ctx):\n ctl = ctx.ctl\n\n jobs = ctl('list-avail', '--partition', 'main', flatten=False)\n\n if len(jobs) == 0:\n click.echo('No jobs running', err=True)\n sys.exit(1)\n\n for job in jobs:\n host = ctl('get-host', '--jobid', job['id']).get('host')\n click.echo(host)\n\n return 0", "def cluster_health(self, host):\n\n h = self.call_to_cluster(host, '/_cluster/health')\n\n data = {\n 'number_of_nodes': h['number_of_nodes'],\n 'unassigned_shards': h['unassigned_shards'],\n 'timed_out': h['timed_out'],\n 'active_primary_shards': h['active_primary_shards'],\n 'relocating_shards': h['relocating_shards'],\n 'active_shards': h['active_shards'],\n 'initializing_shards': h['initializing_shards'],\n 'number_of_data_nodes': h['number_of_data_nodes']\n }\n\n return data", "def status(self):\n \n tmpl1 = \"\"\"%-20s%-52s[%s]\"\"\"\n tmpl2 = \"\"\"%-20s%-52s\\n\"\"\"\n # print tmpl1 % (\"Machine Name\", \"IP Addresses\", \"Status\")\n # print 80 * \"-\"\n # print self.get_image()\n if self.cloudserver:\n # let's build the IPs first\n status = self.cloudserver.status\n \n else:\n status = \"OFF\"\n\n res2=\"\"\n ip1 = \"%s:%s\" % (self.networks[0], self.ip_addresses[self.networks[0]])\n if len(self.networks) > 1:\n res2 += \"\\n\"\n for network in self.networks[1:]:\n ipstr = \"%s:%s\" % (network, self.ip_addresses[network])\n res2+=tmpl2 % (\"-\", ipstr)\n # print res2\n # if len(self.ip_addresses.keys()) > 1:\n # ip1 = self.ip_addresses.values()[0]\n res1 = tmpl1 % (self.machine_name, ip1, status)\n return res1 + res2", "def host_info(self, host):\n\n endpoint = '/Domain/Host/Info'\n\n params = {\n 'Host' : host,\n }\n \n response = self.__perform_get_request(endpoint, params)\n\n if response.status_code == 200:\n parsed_response = response.json()\n return parsed_response", "def get_hosts_info(self):\n result = []\n index = 0\n while index < self.host_numbers:\n host = self.get_generic_host_entry(index)\n result.append({\n 'ip': host['NewIPAddress'],\n 'name': host['NewHostName'],\n 'mac': host['NewMACAddress'],\n 'status': host['NewActive']})\n index += 1\n return result", "def get_host_info(self):\n\n if len(self.index) == 0:\n # Need to load index from cache\n self.load_index_from_cache()\n\n if not self.args.host in self.index:\n # try updating the cache\n self.do_api_calls_update_cache()\n if not self.args.host in self.index:\n # host might not exist anymore\n return self.json_format_dict({}, True)\n\n node_id = self.index[self.args.host]\n print \"NODE ID %s\" % node_id\n print \"INDEX: %s\" % self.index\n\n node = self.get_node(node_id)\n node_vars = {}\n for direct_attr in [\n \"api_id\",\n \"datacenter_id\",\n \"label\",\n \"display_group\",\n \"create_dt\",\n \"total_hd\",\n \"total_xfer\",\n \"total_ram\",\n \"status\",\n \"alert_cpu_enabled\",\n \"alert_cpu_threshold\",\n \"alert_diskio_enabled\",\n \"alert_diskio_threshold\",\n \"alert_bwin_enabled\",\n \"alert_bwin_threshold\",\n \"alert_bwout_enabled\",\n \"alert_bwout_threshold\",\n \"alert_bwquota_enabled\",\n \"alert_bwquota_threshold\",\n \"backup_weekly_daily\",\n \"backup_window\",\n \"watchdog\"\n ]:\n node_vars[direct_attr] = getattr(node, direct_attr)\n\n node_vars[\"datacenter_city\"] = self.get_datacenter_city(node)\n node_vars[\"public_ip\"] = [addr.address for addr in node.ipaddresses if addr.is_public][0]\n\n return self.json_format_dict(node_vars, True)", "def opencloud_fetch_host_info( hostname ):\n raise Exception(\"Opencloud support not implemented\")", "def hosts_cmd(args):\n r = requete(\"Hosts.Host:get\")\n if not r:\n return\n if len(args) > 0:\n for i in range(0, len(args)):\n for _, host in r['status'].items():\n if (host['MACAddress'].lower() == args[i].lower()\n or host['HostName'].lower() == args[i].lower()\n or host['IPAddress'] == args[i]):\n # pprint.pprint(host)\n json.dump(host, sys.stdout, indent=4)\n else:\n #pprint.pprint(r['status'])\n for _, host in r['status'].items():\n actif = \" \" if host['Active'] else \"*\"\n if mac_parser is None:\n s = \"%-18s %-15s %c %-35s %s\" % (host['MACAddress'], host['InterfaceType'], actif, host['HostName'], host['IPAddress'])\n else:\n s = \"%-18s %-12s %-15s %c %-35s %s\" % (host['MACAddress'], mac_parser.get_manuf(host['MACAddress']), host.get('InterfaceType', \"\"), actif, host['HostName'], host['IPAddress'])\n print(s)", "def status(self):\n if self.qemu.is_running():\n status = 0\n self.log.info(\"vm-status\", result=\"online\")\n for device in list(self.qemu.block_info().values()):\n self.log.info(\n \"disk-throttle\",\n device=device[\"device\"],\n iops=device[\"inserted\"][\"iops\"],\n )\n else:\n status = 1\n self.log.info(\"vm-status\", result=\"offline\")\n for volume in self.ceph.volumes:\n locker = volume.lock_status()\n self.log.info(\"rbd-status\", volume=volume.fullname, locker=locker)\n consul = locate_live_service(self.consul, \"qemu-\" + self.name)\n if consul:\n self.log.info(\n \"consul\", service=consul[\"Service\"], address=consul[\"Address\"]\n )\n else:\n self.log.info(\"consul\", service=\"<not registered>\")\n return status", "def get_host_stats(self, refresh=False):", "def get_homed_status():\n\ttarget = send_command('getstatus home')\n\tsplit_ans = target.split()\n\t\n\treturn split_ans", "def get_info_hosts():\n print(\"\\nMapeando...\")\n host_ip = socket.gethostbyname(socket.gethostname()).split('.')\n base_ip = \".\".join(host_ip[0:3]) + '.'\n host_validos = []\n return_codes = dict()\n for i in range(1, 255):\n return_codes[base_ip + str(i)] = retorna_codigo_ping(base_ip + str(i))\n if i %20 == 0:\n print(\".\", end = \"\")\n if return_codes[base_ip + str(i)] == 0:\n host_validos.append(base_ip + str(i))\n print(\"\\nMapeamento completo, informações sobre portas enviadas...\")\n \n return host_validos", "def _retrieve_health_data(self):\n return self._client.request('_cluster/health', query={'level': 'shards'}).data", "def get_available_resource(self, nodename):\n curent_time = time.time()\n if curent_time - self.cleanup_time > CONF.azure.cleanup_span:\n self.cleanup_time = curent_time\n self._cleanup_deleted_os_disks()\n self._cleanup_deleted_nics()\n usage_family = 'basicAFamily'\n try:\n page = self.compute.usage.list(CONF.azure.location)\n except Exception as e:\n msg = six.text_type(e)\n LOG.exception(msg)\n ex = exception.ComputeUsageListFailure(reason=six.text_type(e))\n raise ex\n usages = [i for i in page]\n cores = 0\n cores_used = 0\n for i in usages:\n if hasattr(i, 'name') and hasattr(i.name, 'value'):\n if usage_family == i.name.value:\n cores = i.limit if hasattr(i, 'limit') else 0\n cores_used = i.current_value \\\n if hasattr(i, 'current_value') else 0\n break\n return {'vcpus': cores,\n 'memory_mb': 100000000,\n 'local_gb': 100000000,\n 'vcpus_used': cores_used,\n 'memory_mb_used': 0,\n 'local_gb_used': 0,\n 'hypervisor_type': hv_type.HYPERV,\n 'hypervisor_version': 300,\n 'hypervisor_hostname': nodename,\n 'cpu_info': '{\"model\": [\"Intel(R) Xeon(R) CPU E5-2670 0 @ '\n '2.60GHz\"], \"topology\": {\"cores\": 16, \"threads\": '\n '32}}',\n 'supported_instances': [(arch.I686, hv_type.HYPERV,\n vm_mode.HVM),\n (arch.X86_64, hv_type.HYPERV,\n vm_mode.HVM)],\n 'numa_topology': None\n }", "def get_hypervisor_info(self):\n try:\n req = Request(self.compute_url +\n \"/os-hypervisors/detail\" )\n self._upgrade_to_authenticated_request(req)\n resp = urlopen(req)\n content = resp.read().decode('utf-8')\n encoded = json.loads(content)\n resp.close()\n except URLError as e:\n return {}\n except Exception as e:\n raise Exception(\"Unable to process compute reponse: %s\" % e)\n\n return encoded['hypervisors']", "def get_health(self):\n return {'status': 'ok'}", "def compute_hypervisors(self):\n path = '/os-hypervisors/detail'\n res = self.compute.call(path, 'GET', data='', \n token=self.manager.identity.token)\n self.logger.debug('Get openstack hypervisors: %s' % truncate(res))\n return res[0]['hypervisors']" ]
[ "0.6819568", "0.6763305", "0.65834033", "0.6454942", "0.64086723", "0.6327985", "0.62002283", "0.6168387", "0.6132985", "0.6122318", "0.60854673", "0.60717744", "0.6018915", "0.6010263", "0.6002129", "0.59691036", "0.59649265", "0.5933538", "0.5916599", "0.5898467", "0.58907485", "0.58585894", "0.5839572", "0.58347464", "0.57793343", "0.5752572", "0.57420063", "0.57403666", "0.5714569", "0.5709444" ]
0.6995629
0
Check if inputs is video or not
def check_is_video(self, inputs): if isinstance(inputs, list): return True if isinstance(inputs, np.ndarray) and len(inputs.shape) == 4: return True return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def check(self):\n #\n # *****************\n # *****************\n # TODO: Check really if video is valid\n # *****************\n # *****************\n return True", "def __check_for_video_file(self):\n formats = ('avi', 'mpg', 'mpeg', 'mp4')\n if os.path.splitext(self.args.input.name)[-1] in (\".%s\" % ext for ext in formats):\n # we got a valid (at least according to extension) file\n pass\n else:\n logging.critical(\"Input is not a video file. Only supports %s\" % \", \".join(formats))\n sys.exit(10)", "def is_video(self):\n val = False\n if self.__dict__['codec_type']:\n if self.__dict__['codec_type'] == 'video':\n val = True\n return val", "def has_video(self):\n return self.__video_format is not None", "def is_video(self):\n if self.settings.background_image is None:\n return False\n\n filename, _ = b64decode_file(self.settings.background_image)\n self.mimetype, _ = mimetypes.guess_type(filename)\n return 'video' in self.mimetype", "def supportedType(request, video_types):\n return request.FILES['file'].content_type in video_types.keys()", "def is_valid(video):\n return video.length != -1", "def isVideoFolder():", "def is_video(mine=None, file=None):\n if file:\n mine = get_file_mine(file)\n print(mine)\n\n if mine:\n return mine.find('video') != -1\n\n return False", "def get_video(self):\n if self.parsing_template.video and self.parsing_template.video in self.headline.url:\n return True\n return False", "def is_valid(self, value) -> 'True | str':\n err_str = super().is_valid()\n if isinstance(err_str, str):\n return err_str\n try:\n cv2.VideoCapture(value)\n except Exception as e:\n return str(e)\n return True", "def is_video_wanted(video: AnimeThemeVideo) -> bool:\n for k in ('nc','subbed','lyrics','uncen'):\n v = OPTIONS['filter'][k]\n if v is not None and video[k] ^ v:\n return False\n if video['resolution'] < OPTIONS['filter']['resolution']:\n return False\n if OPTIONS['filter']['source'] is not None and video['source'] != OPTIONS['filter']['source']:\n return False\n if OPTIONS['filter']['overlap'] is not None and video['overlap'] not in OPTIONS['filter']['overlap']: # uses lists\n return False\n \n return True", "def enable_video(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"enable_video\")", "def main_func_video(param_list: list = None) -> bool:\r\n # index of param\r\n # noinspection PyPep8Naming\r\n PORT_RAW_PICT = 0\r\n\r\n # check if param OK\r\n if len(param_list) != 1:\r\n log_error_to_console(\"GET FRAME VIDEO MAIN FUNCTION PARAM NOK\", str(len(param_list)))\r\n return False\r\n else:\r\n port_image = get_port_from_wave(name=param_list[PORT_RAW_PICT])\r\n\r\n try:\r\n # noinspection PyUnresolvedReferences\r\n success, port_image.arr[:] = global_var_handler.VIDEO.read()\r\n if success is True:\r\n port_image.set_valid()\r\n except BaseException as error:\r\n is_error()\r\n # noinspection PyUnresolvedReferences\r\n log_error_to_console('RAW PICTURE NOK TO READ: ' + str(global_var_handler.VIDEO.__str__()), str(error))\r\n port_image.set_invalid()\r\n pass\r\n\r\n # noinspection PyUnresolvedReferences\r\n log_to_file(str(global_var_handler.FRAME))\r\n # noinspection PyUnresolvedReferences\r\n log_to_file(global_var_handler.STR_L0_SIZE)\r\n\r\n return True", "def valid_video_file(file):\r\n return file.lower().endswith(('.ogg', '.webm'))", "def can_upload_video(self):\n return self.userprofile.user.has_perm('distance_learning.add_video')", "def __sanitize_input(self):\n self.__check_for_video_file()\n self.__manage_output_folder()", "def post_video(self, url: str, text: str) -> bool:\n return False", "def is_video(self, given_file):\n video_extensions = ['mp4', 'flv', 'avi', 'mp3', 'flaac']\n\n if not isinstance(given_file, str):\n try: # iter in play cmd\n given_file = given_file[0]\n except TypeError:\n given_file = given_file\n return any([ext for ext in video_extensions\n if given_file.endswith(ext)])", "def check_video_format(movie_file, desired_format='.mp4', original_format='.avi'):\n\n if not os.path.isfile(movie_file+original_format):\n print 'Error. avi file does not exist:'+movie_file+'.avi'\n if not os.path.isfile(movie_file+desired_format):\n cmd = ['ffmpeg']\n cmd += ['-i', movie_file+original_format]\n cmd += [movie_file+desired_format]\n cmd_string = ''.join([\"%s \" % el for el in cmd])\n #print '-->Running: ', cmd_string\n p = subprocess.Popen(cmd, shell=False)\n p.wait()", "def main_func_video_camera(param_list: list = None) -> bool:\r\n # index of param\r\n # noinspection PyPep8Naming\r\n PORT_RAW_PICT = 0\r\n\r\n # check if param OK\r\n if len(param_list) != 1:\r\n log_error_to_console(\"GET FRAME VIDEO CAPTURE MAIN FUNCTION PARAM NOK\", str(len(param_list)))\r\n return False\r\n else:\r\n port_image = get_port_from_wave(name=param_list[PORT_RAW_PICT])\r\n\r\n try:\r\n # noinspection PyUnresolvedReferences\r\n success, port_image.arr[:] = global_var_handler.VIDEO.read()\r\n if success is True:\r\n port_image.set_valid()\r\n except BaseException as error:\r\n is_error()\r\n # noinspection PyUnresolvedReferences\r\n log_error_to_console('RAW PICTURE NOK TO READ: ' + str(global_var_handler.VIDEO.__str__()), str(error))\r\n port_image.set_invalid()\r\n pass\r\n\r\n # noinspection PyUnresolvedReferences\r\n log_to_file(str(global_var_handler.FRAME))\r\n # noinspection PyUnresolvedReferences\r\n log_to_file(global_var_handler.STR_L0_SIZE)\r\n\r\n return True", "def check_media(self, media):\n return AbstractVLC.check_media(self, os.path.join(settings.get(\"path\", \"relative\", \"video\"), media))", "def allow_video(self, video_id):\n print(\"allow_video needs implementation\")", "def allow_video(self, video_id):\n print(\"allow_video needs implementation\")", "def allow_video(self, video_id):\n print(\"allow_video needs implementation\")", "def allow_video(self, video_id):\n print(\"allow_video needs implementation\")", "def movie_media_type(name):\n return name.endswith(('.ogv', '.vob', '.mp4', '.wmv', '.mov', '.mpeg'))", "def check_input(self):\n try:\n if(self.datatype == \"eeg\"):\n self.model.set_datatype(self.datatype)\n self.model.set_dyad(self.dyad)\n self.model.set_channel(self.channel_or_video)#causes loading of data\n elif(self.datatype == \"motion\"):\n self.model.set_datatype(self.datatype)\n self.model.set_filepath(self.database.dictionary[str(self.dyad)][\"video\"][str(self.channel_or_video)][\"motion\"][\"in_roi\"][\"1\"][\"path\"])#TODO NOT ALWAYS 1\n self.model.set_channel(self.channel_or_video)\n else:\n QMessageBox.about(self, \"Incorrect selection\", \"Choose datatype\")\n self.accept()\n except KeyError as e:\n QMessageBox.about(self, \"Incorrect selection\", \"Please choose wisely\" + str(e))", "def __check_video(self):\n stream_status = self.communications.get_video()\n if self.__video_status[\"last_video_streaming\"] != stream_status: # Set initial video status\n self.__video_status[\"last_video_streaming\"] = stream_status\n self.__live_video_stream(stream_status)", "def testVideoTrackType(self):\n\n trackLine = _buildTrackLine(0, 'video', {'hello': 'goodbye'})\n\n trackID, trackType, trackDict = tools._trackInfo(trackLine)\n\n self.assertEqual(\n 'video',\n trackType,\n )" ]
[ "0.7698217", "0.75833863", "0.7378489", "0.72596055", "0.70826477", "0.6927467", "0.6915077", "0.68082577", "0.65910804", "0.65723324", "0.65678036", "0.65032965", "0.64527184", "0.64243037", "0.64173114", "0.6349831", "0.63205665", "0.63025355", "0.6205517", "0.61707026", "0.6130693", "0.6116143", "0.6016516", "0.6016516", "0.6016516", "0.6016516", "0.5983702", "0.5875935", "0.5829726", "0.5806167" ]
0.7646629
1
Evaluates the postfix expression 's'.
def eval_postfix(s): stack = Stack() s = s.split() for i in s: if operator(i) == False: stack.push(int(i)) else: b = stack.pop() a = stack.pop() result = evaluate(a, i, b) stack.push(result) return stack.pop()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def eval_postfix(s):\n stack = Stack()\n for x in s.split(): # rozděl 's' dle mezer\n if x == '+':\n stack.push(stack.pop() + stack.pop())\n elif x == '-':\n stack.push(-stack.pop() + stack.pop())\n elif x == '*':\n stack.push(stack.pop() * stack.pop())\n elif x == '/':\n second = stack.pop()\n stack.push(stack.pop() / second)\n else:\n stack.push(float(x))\n return stack.pop()", "def postfix_eval(postfix_expr):\n s = StackArray()\n expr = postfix_expr.split()\n for token in expr:\n if token[0] in '0123456789':\n res = token\n s.push(res)\n else: # token is operator\n op2 = s.pop()\n op2 = float(op2)\n if s.is_empty(): # token is ~\n # could also be ~ for non-empty stack\n res = -1 * op2\n else:\n op1 = s.pop()\n op1 = float(op1)\n if token == '^':\n res = op1 ** op2\n elif token == '~':\n s.push(op1)\n res = -1 * op2\n elif token == '*':\n res = op1 * op2\n elif token == '/':\n if op2 == 0:\n raise ZeroDivisionError\n else:\n res = op1 / op2\n elif token == '+':\n res = op1 + op2\n else: # token == '-'\n res = op1 - op2\n s.push(res)\n return res", "def evaluate_infix(string):\n return postfix(infix_to_postfix(string))", "def infix_to_postfix(s):\n result = \"\" # output string\n op = Stack() # operator stack\n i = 0 # index to 's'\n while i < len(s):\n if s[i] in \"0123456789\":\n while i < len(s) and s[i] in \"0123456789\":\n result += s[i]\n i += 1\n result += \" \"\n continue\n if s[i] == '(':\n op.push(s[i])\n elif s[i] == ')':\n top = op.pop()\n while top != '(':\n result += top + \" \"\n top = op.pop()\n else: # s[i] is +,-,*,/\n while not op.is_empty() and not higher_prec(s[i], op.peek()):\n result += op.pop() + \" \"\n op.push(s[i])\n i += 1\n while not op.is_empty():\n result += op.pop() + \" \"\n return result", "def postfix_eval(input_str):\n\n \"\"\"Input argument: a string containing a postfix expression where tokens \n are space separated. Tokens are either operators + - * / ** << >> or numbers (integers or floats)\n Returns the result of the expression evaluation. \n Raises an PostfixFormatException if the input is not well-formed\"\"\"\n if input_str is None: raise PostfixFormatException\n # create list of operands and operators\n term_list = input_str.split()\n # initialize stack large enough to contain all operands\n operand_stack = Stack(2*len(term_list)//3+1)\n # iterate over term_list\n for term in term_list:\n # check for operatorm, evaluate operators on A & B if True\n if operator_present(term) is True:\n if operand_stack.size()<2: \n raise PostfixFormatException(\"Insufficient operands\")\n B = operand_stack.pop()\n A = operand_stack.pop()\n operand_stack.push(\n calculate(\n A, # A\n B, # B\n term) # operator\n )\n # check for operand, push to stack if True\n elif operand_present(term) is True:\n operand_stack.push(term)\n else: raise PostfixFormatException(\"Invalid token\")\n if len(term_list) % 3 != 0: raise PostfixFormatException(\"Too many operands\")\n return operand_stack.pop()", "def evaluatePostfixExp(self, postfixExpr):\n\n operandStack = []\n tokenList = postfixExpr.split(\" \")\n\n for token in tokenList:\n if self.isOperand(token):\n if \".\" in token:\n token = float(token)\n else:\n token = int(token)\n operandStack.append(token)\n else: # token is an operator\n operand2 = operandStack.pop()\n operand1 = operandStack.pop()\n try:\n result = self.applyOperator(operand1, operand2, token)\n except Exception as error:\n print(\"Invalid input. Please enter a valid arithmetic expression.\") # Most likely division by\n # zero error.\n return\n operandStack.append(result)\n return operandStack.pop()", "def infix_to_postfix(input_str): # postfix requires that all operators proceed after the two operands that they work on\n\n \"\"\"Input argument: a string containing an infix expression where tokens are \n space separated. Tokens are either operators + - * / ** << >> or numbers (integers or floats)\n Returns a String containing a postfix expression \"\"\"\n if input_str is None: raise ValueError\n # Split input string\n term_list = input_str.split()\n #print(\"TERM LIST \",term_list) \n # Create output list, will be fed to postfix_eval() at end\n output_list = []\n # initialize stack large enough to contain all operators\n operator_stack = Stack(len(term_list)//3+1)\n for term in term_list:\n # check for operand, if present append to output list\n if operand_present(term) is True:\n output_list.append(term)\n # check for operator\n elif operator_present(term) or term == '(' or term == ')':\n #if operand_stack.size()<2: \n # raise PostfixFormatException(\"Insufficient operands\")\n # Check for open parentheses\n if term == '(': operator_stack.push(term)\n # Check for closing parentheses, pop stack until open parentheses found\n elif term == ')':\n while 1:\n token = operator_stack.pop()\n if token != '(': \n output_list.append(token)\n else: break\n # Otherwise push to stack but pop any higher/equal order operators\n else:\n sort_operators(term, operator_stack, output_list)\n #print(operator_stack.peek())\n #else: raise PostfixFormatException(\"Invalid token\")\n #if len(term_list) % 3 != 0: raise PostfixFormatException(\"Too many operands\")\n while operator_stack.size() != 0:\n output_list.append(operator_stack.pop())\n new_str = (\" \".join(output_list))\n #print(\"NEW STR \", new_str)\n return new_str", "def infix_to_postfix(string):\n \n # Validate and tokenize the string\n tokens = validate(string)\n \n # Initialize the stack\n s = Stack()\n\n # Ready the final postfix expression\n postfix = ''\n \n # List of operators that have to be handled\n operators = ['+', '-', '*', '/', '^', 'sqrt', 'u-', '(', ')']\n \n # Iterate through tokens\n for token in tokens:\n if token in operators:\n if token in ['sqrt', 'u-']:\n # Square root and unary minus have the highest precendence. So\n # they get pushed on to the stack immediately\n s.push(token)\n elif token == '^':\n top = s.peek()\n while top in ['sqrt', 'u-']:\n postfix += s.pop() + ' '\n top = s.peek()\n s.push(token)\n elif token in ['*', '/']:\n # Multiplication and division have the same precedence. Order\n # is determined by order of appearance\n top = s.peek()\n while top in ['sqrt', 'u-', '^']:\n postfix += s.pop() + ' '\n top = s.peek()\n s.push(token)\n elif token in ['+', '-']:\n # Addition and subtraction have the same precedence. Order is\n # determined by order of appearance\n top = s.peek()\n while top in ['sqrt', 'u-', '^', '*', '/']:\n postfix += s.pop() + ' '\n top = s.peek()\n s.push(token)\n elif token == '(':\n s.push(token)\n elif token == ')':\n top = s.peek()\n while top != '(':\n postfix += s.pop() + ' '\n top = s.peek()\n s.pop()\n else: # Token is a number or variable\n postfix += token + ' '\n\n # Pop out any more operators that might be sitting on the stack\n while(len(s)):\n postfix += s.pop() + ' '\n\n # Get rid of trailing whitespace and print\n postfix = postfix.strip()\n return postfix", "def eval(self, s):\n phi_s = self.basify(s)\n probs = softmax(np.dot(self.params, phi_s))\n return probs", "def expr(s):\n if isinstance(s, Expr): return s\n if isnumber(s): return Expr(s)\n ## Replace the alternative spellings of operators with canonical spellings\n s = s.replace('==>', '>>').replace('<==', '<<')\n s = s.replace('<=>', '%').replace('=/=', '^')\n ## Replace a symbol or number, such as 'P' with 'Expr(\"P\")'\n s = re.sub(r'([a-zA-Z0-9_.]+)', r'Expr(\"\\1\")', s)\n ## Now eval the string. (A security hole; do not use with an adversary.)\n return eval(s, {'Expr':Expr})", "def postfix_eval(input_str: str) -> Any:\n \"\"\"Input argument: a string containing a postfix expression where tokens \n are space separated. Tokens are either operators + - * / ** << >> or numbers (integers or floats)\n Returns the result of the expression evaluation. \n Raises an PostfixFormatException if the input is not well-formed\"\"\"\n stack = Stack(30)\n if input_str == \"\":\n raise PostfixFormatException('Insufficient operands')\n op_list = [\"+\", \"-\", \"*\", \"/\", \"<<\", \">>\", \"**\"]\n split_list = input_str.split()\n for i in split_list:\n new_val = i.lstrip(\"-\")\n new_val = new_val.replace(\".\", \"\", 1)\n if i in op_list:\n try:\n num_val = stack.pop()\n num_val_initial = stack.pop()\n except IndexError:\n raise PostfixFormatException(\"Insufficient operands\")\n if i == \"+\":\n stack.push(num_val_initial + num_val)\n if i == \"-\":\n stack.push(num_val_initial - num_val)\n if i == \"*\":\n stack.push(num_val_initial * num_val)\n if i == \"/\":\n if num_val == 0:\n raise ValueError(\"0 not divisible\")\n stack.push(num_val_initial / num_val)\n if i == \"**\":\n stack.push(num_val_initial ** num_val)\n if i == \"<<\":\n t1 = type(num_val)\n t2 = type(num_val_initial)\n if t1 == float or t2 == float:\n raise PostfixFormatException(\"Illegal bit shift operand\")\n stack.push(num_val_initial << num_val)\n if i == \">>\":\n t1 = type(num_val)\n t2 = type(num_val_initial)\n if t1 == float or t2 == float:\n raise PostfixFormatException(\"Illegal bit shift operand\")\n stack.push(num_val_initial >> num_val)\n elif new_val.isdigit():\n if \".\" in i:\n stack.push(float(i))\n else:\n stack.push(int(i))\n else:\n raise PostfixFormatException(\"Invalid token\")\n val = stack.pop()\n if not stack.is_empty():\n raise PostfixFormatException(\"Too many operands\")\n return val", "def infix_to_postfix(self, exp):\n\n try:\n for i in exp:\n #if the character is an operand output it\n if self.is_operand(i):\n self.postfix.append(i)\n\n #if the character is '(' push it\n elif i is '(':\n self.push('(')\n\n elif i is ')':\n #if the character is ')\" pop until we encounter '(' in the stack\n while not self.isEmpty() and self.peek() is not '(':\n self.postfix.append(self.pop())\n if not self.isEmpty() and self.peek() is not '(':\n return -1\n else:\n self.pop()\n\n #if an operator is encountered\n else:\n while not self.isEmpty() and self.peek() is not '(' and self.not_greater(i):\n self.postfix.append(self.pop())\n self.push(i)\n while not self.isEmpty():\n self.postfix.append(self.pop())\n\n return ''.join(self.postfix)\n\n except Exception as e:\n print(\"Error occurred while performing infix to postfix conversion :\", e)\n traceback.print_exc()\n return -1", "def evaluatePostfix(postfix, variableList, variableLocation, methodVariables, output):\n\n stack = [] # Stack that will contain our pushed operands from the postfix expression\n immediateCount = 0 # Keeps count of how many immediate values are being expressed (not variables)\n sourceRegister = 1 # Source register starts at 1: \"B\", and increments as needed\n destRegister = 0 # Destination register starts at 0: 'A\" and increments as needed\n immFlag = 0 # Used to determine whether source or destination register holds an immediate\n\n for element in postfix:\n # Evaluate each postfix element one by one to determine appropriate action\n\n if sourceRegister > 6 or destRegister > 6:\n # We cap the total amount of registers used to 7 (0-6)\n raise ValueError(\"Too many operands in formula.\")\n\n if element in OPERATIONS:\n # Here, our element is an operator. This means we need to pop the top two values from the stack and\n # execute the given operation.\n operand1, operand2 = stack.pop(), stack.pop()\n\n if operand1 in variableList:\n # The operand is in the list of local variables, so we read the value from memory\n output.write(\" MEMR [4] #\" + str(variableLocation[operand1]) + \" $\" + REGISTERS[sourceRegister] + \"\\n\")\n operand1 = REGISTERS[sourceRegister]\n\n elif operand1 in methodVariables:\n # The operand is in the list of arguments passed into the method. We consult the methodVariables list\n # to determine the appropriate offset from the stack pointer register S2.\n output.write(\" MOV $A2 $S2\\n\")\n output.write(\" ADD #\" + str(int(methodVariables[operand1][1]) * 4) + \" $A2\\n\")\n output.write(\" MEMR [4] $A2 $\" + REGISTERS[sourceRegister] + \"\\n\")\n operand1 = REGISTERS[sourceRegister]\n\n elif operand1 in REGISTER_NAMES:\n # This is simply a register that was pushed onto the stack. We can keep it as is\n pass\n\n else:\n # The operand is an immediate value. We test to see if it's a valid integer\n try:\n isinstance(operand1, int)\n immediateCount += 1\n immFlag = 1\n except ValueError as e:\n raise ValueError(\"Invalid operand\")\n\n if operand2 in variableList:\n # The operand is in the list of local variables, so we read the value from memory\n output.write(\" MEMR [4] #\" + str(variableLocation[operand2]) + \" $\" + REGISTERS[destRegister] + \"\\n\")\n operand2 = REGISTERS[destRegister]\n\n elif operand2 in methodVariables:\n # The operand is in the list of arguments passed into the method. We consult the methodVariables list\n # to determine the appropriate offset from the stack pointer register S2.\n output.write(\" MOV $B2 $S2\\n\")\n output.write(\" ADD #\" + str(int(methodVariables[operand2][1]) * 4) + \" $B2\\n\")\n output.write(\" MEMR [4] $B2 $\" + REGISTERS[destRegister] + \"\\n\")\n operand2 = REGISTERS[destRegister]\n\n elif operand2 in REGISTER_NAMES:\n # This is simply a register that was pushed onto the stack. We can keep it as is\n pass\n\n else:\n # The operand is an immediate value. We test to see if it's a valid integer\n try:\n isinstance(operand2, int)\n immediateCount += 1\n immFlag = 2\n except ValueError as e:\n raise ValueError(\"Invalid operand\")\n\n if immediateCount == 2:\n # If we have two immediate values, we don't really need to calculate the arithmetic in Capua ASM.\n # We discretely do the calculations in the background and push the value to the stack. This avoids\n # unnecessary processing.\n try:\n stack.append(int(OPERATIONS[element]['function'](float(operand2), float(operand1))))\n\n except ZeroDivisionError:\n raise ValueError(\"Error: Division by zero! - {} {} {}\".format(operand2, element, operand1))\n\n else:\n if immediateCount == 1:\n # only one of the operands was an immediate value. We determine which one is the immediate value,\n # as the correct instruction output depends on it.\n if immFlag == 1:\n output.write(\" MOV #\" + str(int(operand1)) + \" $\" + REGISTERS[sourceRegister] + \"\\n\")\n operand1 = REGISTERS[sourceRegister]\n\n elif immFlag == 2:\n output.write(\" MOV #\" + str(int(operand2)) + \" $\" + REGISTERS[destRegister] + \"\\n\")\n operand2 = REGISTERS[destRegister]\n\n else:\n # No operands were immediate values. We can do the arithmetic operation as is.\n # We move the source and destination registers up one letter for the next operation\n sourceRegister += 1\n destRegister += 1\n\n output.write(\" \" + INSTRUCTIONS[element] + \" $\" + str(operand1) + \" $\" + str(operand2) + \"\\n\")\n stack.append(operand2)\n\n immediateCount = 0\n\n else:\n # We have an operand to push onto the stack\n stack.append(element)\n\n if len(stack) != 1:\n # If the stack has more than or less than one element, the expression is incorrect.\n raise ValueError(\"invalid expression.\")\n\n # our result is then \"saved\" into register A. The assignment can now be completed.\n result = stack.pop()\n\n if result in REGISTER_NAMES:\n # If we just have a register at the bottom of the stack, we assume the result is already in register A\n pass\n\n else:\n try:\n isinstance(int(result), int)\n output.write(\" MOV #\" + str(result) + \" $A\\n\")\n except ValueError as e:\n raise ValueError(\"Invalid mathematical expression\")", "def infix_to_postfix(self, expr: str) -> str:\n\n # The stack that we will be performing operations on\n stack: list[str] = []\n\n # The output\n output: str = \"\"\n\n # We always need surrounding parentheses\n expr = f\"({expr})\"\n\n # The tokenized expression\n expr = self.tokenize_expr(expr)\n\n\n \n # For every token in expression\n for token in expr:\n # Check what token it is\n if token == \"(\":\n # If it is a (, then append to stack\n stack.append(\"(\")\n elif token == \")\":\n # If it is a ), then iterate over stack\n while stack[-1] != '(':\n # Popping the last item from stack, to output\n # Include a trailing space\n # Until the last item in the stack is a (\n output += f\"{stack.pop()} \"\n # Pop the last ( from the stack\n stack.pop()\n elif re.match(r\"[a-zA-Z_][a-zA-Z0-9_]*\", token):\n # If it matches a name/variable\n # Append to output with a trailing space\n output += f\"{token} \"\n elif re.match(r\"\\d+\",token):\n # If it is a number\n # Then append with a trailing space\n output += f\"{token} \"\n else:\n if self.is_token(token):\n # If it is a token\n # Pop it from the stack while\n # It's priority is smaller than\n # the last priority of the stack\n # Put it into output with a trailing space\n while self.get_token_priority(token) <= self.get_token_priority(stack[-1]):\n output += f\"{stack.pop()} \"\n # And append token to stack\n stack.append(token)\n # Return output\n return output", "def postfix(t_input):\r\n # guardo se gli elementi contengono caratteri non validi\r\n if is_valid(t_input) == 1:\r\n # restituisco Invalid se sono stati trovati caratteri invalidi\r\n result = \"Invalid\"\r\n return result\r\n\r\n # scorri di nuovo gli elementi\r\n # NOTA: sarebbe piu' efficiente fare un unico ciclo\r\n for element in t_input.strip(\"\\0\").split(\" \"):\r\n if element in [\"-\", \"+\", \"*\", \"/\"]:\r\n # ho trovato operatore, ricavo operandi dallo stack\r\n right_operand = stack.pop()\r\n left_operand = stack.pop()\r\n\r\n # faccio l'operazione che serve\r\n if element == \"-\":\r\n op_result = left_operand - right_operand\r\n elif element == \"+\":\r\n op_result = left_operand + right_operand\r\n elif element == \"*\":\r\n op_result = left_operand * right_operand\r\n else:\r\n op_result = left_operand // right_operand\r\n\r\n if boold:\r\n print(\"[DEBUG] Ho trovato operatore '{}': {} {} {} = {}\".format(element, left_operand, element, right_operand, op_result))\r\n # inserisco nello stack il risultato dell'operazione\r\n stack.push(op_result)\r\n else:\r\n # ho trovato operando, lo metto nello stack\r\n # > NOTA: e' necessaria conversione stringa -> intero\r\n stack.push(int(element))\r\n \r\n if boold:\r\n stack.print()\r\n\r\n # il risultato e' l'ultimo elemento\r\n # > NOTA: e' necessaria conversione intero -> stringa\r\n result = str(stack.pop())\r\n return result", "def recursive_eval(sexpr):\n newexpr = rewrite_node(sexpr)\n newexpr.apply(recursive_eval)\n return newexpr", "def eval_one(self, s, a):\n return self.eval(s)[a]", "def infix_to_postfix(input_str: str) -> Any:\n \"\"\"Input argument: a string containing an infix expression where tokens are \n space separated. Tokens are either operators + - * / ** << >> or numbers (integers or floats)\n Returns a String containing a postfix expression \"\"\"\n stack = Stack(30)\n if input_str == '':\n return ''\n op_list = [\"+\", \"-\", \"*\", \"/\", \"<<\", \">>\", \"**\"]\n order = {}\n order[\"+\"] = 1\n order[\"-\"] = 1\n order[\"*\"] = 2\n order[\"/\"] = 2\n order[\"**\"] = 3\n order[\"<<\"] = 4\n order[\">>\"] = 4\n pfix_str = ''\n split_list = input_str.split()\n for i in split_list:\n new_val = i.lstrip(\"-\")\n new_val = new_val.replace(\".\", \"\", 1)\n if new_val.isdigit() and pfix_str == \"\":\n pfix_str = pfix_str + i\n elif i in op_list:\n if not stack.is_empty():\n p = stack.peek()\n while 0 < stack.size():\n p = stack.peek()\n if p == \"(\":\n break\n if i == \"**\":\n if order[p] <= order[i]:\n break\n else:\n p1 = stack.pop()\n pfix_str = pfix_str + \" \" + p1\n elif order[p] < order[i]:\n break\n else:\n p2 = stack.pop()\n pfix_str = pfix_str + \" \" + p2\n stack.push(i)\n elif i == \"(\":\n stack.push(i)\n elif new_val.isdigit():\n pfix_str = pfix_str + \" \" + i\n elif i == \")\":\n p = stack.peek()\n while p != \"(\":\n pfix_str = pfix_str + \" \" + stack.pop()\n if not stack.is_empty():\n p = stack.peek()\n stack.pop()\n while not stack.is_empty():\n pop3 = stack.pop()\n pfix_str = pfix_str + \" \" + pop3\n return pfix_str", "def evaluate_postfix(list_input):\n stack_values = []\n\n for item in list_input:\n # debug stuff\n # print \"item\", item\n try:\n item_value = float(item)\n has_value = True\n except ValueError:\n has_value = False\n\n # value, operand, put on stack\n if has_value:\n stack_values.append(item_value)\n has_value = False\n\n # operator, pull two operands from stack\n elif (has_value == False\n and len(stack_values) >= 2):\n second_value = stack_values.pop()\n first_value = stack_values.pop()\n result = evaluate_op(item,\n first_value,\n second_value)\n stack_values.append(result)\n # debug stuff\n # print \"midstep\", result\n\n return stack_values.pop()", "def rep_of_s(s, final_rep_eqc):\n if final_rep_eqc == []:\n print(\"Error, did not find a rep for state s\")\n else:\n x_X = final_rep_eqc[0]\n if s in x_X[1]:\n return x_X[0]\n else:\n return q0_of(s, final_rep_eqc[1:])", "def _get_postfix_notation(self):\n postfix, operators_stack = list(), list() # initialize postfix list and auxiliary stack\n\n for element in self.expression.split():\n if element in self.OPERATORS:\n if operators_stack:\n # while stack isn't empty and \"stack top\" is stronger(e.g. multiplication is stronger than addition)\n # move \"stack top\" into postfix list\n while operators_stack \\\n and operators_stack[-1] in self.OPERATORS \\\n and self.OPERATOR_WEIGHT[operators_stack[-1]] >= self.OPERATOR_WEIGHT[element]:\n postfix.append(operators_stack.pop())\n\n operators_stack.append(element)\n\n elif element == self.BRACKET_LEFT:\n operators_stack.append(element)\n\n elif element == self.BRACKET_RIGHT:\n # searching for left bracket on stack, moving \"stack Top\" to postfix list\n while operators_stack and operators_stack[-1] != self.BRACKET_LEFT:\n postfix.append(operators_stack.pop())\n operators_stack.pop() # remove left bracket\n\n else: # numbers always goes into postfix list\n postfix.append(self._get_number_from_string(element))\n\n if operators_stack: # move others stack elements to postfix list\n postfix.extend(reversed(operators_stack))\n\n return postfix", "def toPostfix (self,infix):\n postfix = []\n stack = []\n # Loop over characters in the input string\n for char in infix:\n # If char is a number add it to postfix\n if isFloat(char):\n postfix.append(char)\n # If its a special number add it to postfix\n elif char in Calculator.specialNumbers:\n postfix.append(char)\n # If char is a function push it onto the stack\n elif char in Calculator.functions:\n stack.append(char)\n # If the char is a function argument separator (,) pop operators off the stack onto\n # postfix until ( is reached\n elif char == ',':\n while stack[-1] != '(':\n postfix.append(stack.pop())\n # If the size of the stack reaches 0 without finding a ( there are unmatched brackets.\n if len(stack) == 0:\n return \"Unmatched Error\"\n # If char is an operator O\n elif char in Calculator.operators:\n # While there is an operator, P, on the top of stack\n while len(stack)>0 and stack[-1] in Calculator.operators:\n stackTop = stack[-1]\n precChar = Calculator.operators[char][1]\n precStackTop = Calculator.operators[stackTop][1]\n # If O in -?+* and its precedence is <= P, pop P off stack\n if char in Calculator.operators and precChar <= precStackTop:\n postfix.append(stack.pop())\n else:\n break\n # Push O onto stack\n stack.append(char)\n # If char is (, push it onto the stack\n elif char == '(':\n stack.append(char)\n # If char is )\n elif char == ')':\n # If the size of the stack reaches 0 without finding a ( there are unmatched brackets.\n if len(stack) == 0:\n return \"Unmatched Error\"\n # While top of stack isn't ( pop operators off the top of the stack\n while stack[-1] != '(':\n postfix.append(stack.pop())\n # If the size of the stack reaches 0 without finding a ( there are unmatched brackets.\n if len(stack) == 0:\n return \"Unmatched Error\"\n # Pop ( off the stack, but not onto output queue\n stack.pop()\n # If the token at the top of the stack is a function pop it off the stack and add to postfix\n if len(stack) > 0 and stack[-1] in Calculator.functions:\n postfix.append(stack.pop())\n # Finally pop all the operators off the stack onto postfix\n while len(stack)>0:\n # If the operator on the top of the stack is () then there are unmatched brackets\n if stack[-1] in '()':\n return \"Unmatched Error\"\n postfix.append(stack.pop())\n return postfix", "def postfixCalc(self,tokens):\n if len(tokens) == 0:\n return 0\n stack = []\n # while expr is not empty\n while len(tokens)>0:\n toke = tokens.pop(0)\n # if token is a number push it onto the stack\n if isFloat(toke):\n stack.append(float(toke))\n # if token is a special number push it onto the stack\n elif toke in Calculator.specialNumbers:\n stack.append(Calculator.specialNumbers[toke])\n else:\n # Operators take 2 inputs, functions take 1 input except root which takes 2\n if toke in Calculator.operators or toke == 'root':\n n = 2\n elif toke in Calculator.functions:\n n = 1\n # If the length of the stack is less than the required number of operators the user has not \n # input enough values.\n if len(stack)<n:\n return \"Too Few Error\"\n # Pop the top n numbers from the stack\n popedVals = []\n for i in range(n):\n popedVals.append(stack.pop())\n # Evaluate the operator using the number(s) that were popped, and push back onto the stack\n if n == 2 and toke in Calculator.operators:\n stack.append(Calculator.operators[toke][0](popedVals[1], popedVals[0]))\n elif n == 2:\n stack.append(Calculator.functions[toke](popedVals[1], popedVals[0]))\n elif n == 1:\n stack.append(Calculator.functions[toke](popedVals[0]))\n # If there is more than one value left on the stack the user has input too many values\n if len(stack) > 1:\n return \"Too Many Error\"\n # Return the value on the stack (should only be 1 value left)\n return stack[-1]", "def infix_to_postfix(expr):\n ops = Stack()\n postfix = []\n toks = expr.split()\n def tests(chr):\n if chr.isdigit():\n postfix.append(chr)\n\n elif chr == '(':\n ops.push('(')\n\n elif ops.peek() == '(' or ops.empty():\n ops.push(chr)\n\n elif chr ==')':\n while ops.peek() != \"(\":\n postfix.append(ops.pop())\n ops.pop()\n\n elif chr in prec and prec[chr] > prec[ops.peek()]:\n ops.push(chr)\n\n elif chr in prec and prec[chr] == prec[ops.peek()]:\n postfix.append(ops.pop())\n ops.push(chr)\n\n elif chr in prec and prec[chr] < prec[ops.peek()]:\n postfix.append(ops.pop())\n tests(chr)\n\n for tok in toks:\n tests(tok)\n\n\n while not ops.empty():\n postfix.append(ops.pop())\n\n\n return ' '.join(postfix)", "def infix_to_postfix(string_input):\n stack_ops = []\n output = []\n value = \"\"\n\n for item in string_input:\n # item = operator\n if item in ops_prec.keys():\n value = value_to_output(value, output)\n\n # pop elements while they have lower precedence\n while (stack_ops\n and stack_ops[-1] in ops_prec.keys()\n and ops_prec[item] <= ops_prec[stack_ops[-1]]):\n output.append(stack_ops.pop())\n # else put item on stack\n stack_ops.append(item)\n\n # subexpression, delay precedence\n elif item == '(':\n value = value_to_output(value, output)\n\n stack_ops.append(item)\n elif item == ')':\n value = value_to_output(value, output)\n\n # flush output until ( is reached on stack\n while (stack_ops and stack_ops[-1] != '('):\n output.append(stack_ops.pop())\n # remove '('\n stack_ops.pop()\n\n # value = operand\n else:\n # concatenation of value for multidigit ones\n value += item\n # output.append(item) # this would be for one digit\n\n # flush stack to output\n value = value_to_output(value, output)\n\n while stack_ops:\n output.append(stack_ops.pop())\n\n return output", "def postfix(self,Line):\r\n\r\n stak = []\r\n expression = []\r\n infix = []\r\n i=0\r\n while( i <(len(Line))):\r\n if (Line[i] == '(') or (Line[i] == '['):\r\n if len(stak) > 0:\r\n if (Line[i] == '[') and ((stak[len(stak) - 1] == \"lengthof\") or (stak[len(stak) - 1] == \"dup\") or (stak[len(stak) - 1] == \"sizeof\") or (stak[len(stak) - 1] == \"type\")):\r\n return False\r\n if len(stak) > 0:\r\n if (Line[i] == '(') and ((stak[len(stak) - 1] == \"lengthof\") or (stak[len(stak) - 1] == \"sizeof\")):\r\n return False\r\n if (len(stak) == 0) and (Line[i] == '('):\r\n return False\r\n stak.append(Line[i])\r\n elif (Line[i] == ')') or (Line[i] == ']'):\r\n if len(stak) == 0:\r\n return False\r\n\r\n j = len(stak) - 1\r\n while j >= 0:\r\n if (stak[j] == '(') and (Line[i] == ')'):\r\n break\r\n elif (stak[j] == '(') and (Line[i] == ']'):\r\n return False\r\n elif (stak[j] == '[') and (Line[i] == ')'):\r\n return False\r\n elif (stak[j] == '[') and (Line[i] == ']'):\r\n break\r\n expression.append(stak[j])\r\n stak = stak[:-1]\r\n j = j - 1\r\n if j < 0:\r\n break\r\n\r\n stak = stak[:-1]\r\n if (len(stak) > 0) and (stak[stak.__len__() - 1] == 'dup'):\r\n expression.append(stak[stak.__len__() - 1])\r\n stak = stak[:-1]\r\n elif Line[i] == ',':\r\n if expression.__len__() == 0:\r\n return False\r\n if stak.__len__() != 0:\r\n j = stak.__len__() - 1\r\n while (j >= 0):\r\n expression.append(stak[j])\r\n stak = stak[:-1]\r\n j = j - 1\r\n if (expression.__len__() > 0)and(expression!=[\"dup\"]):\r\n infix.append(expression)\r\n expression = []\r\n elif Line[i][0].isdecimal():\r\n if Line[i][len(Line[i]) - 1] == 'h':\r\n tmp = extra_functions.is_hexa(Line[i])\r\n if not tmp:\r\n return False\r\n expression.append(tmp)\r\n\r\n elif Line[i][len(Line[i]) - 1] == 'o':\r\n tmp = extra_functions.is_octa(Line[i])\r\n if not tmp:\r\n return False\r\n expression.append(tmp)\r\n elif Line[i][len(Line[i]) - 1] == 'b':\r\n tmp = extra_functions.is_binary(Line[i])\r\n if not tmp:\r\n return False\r\n expression.append(tmp)\r\n elif Line[i][len(Line[i]) - 1] == 'd':\r\n tmp = int(Line[i][:-1], 10)\r\n expression.append(tmp)\r\n elif Line[i].isdecimal():\r\n expression.append(int(Line[i]))\r\n else:\r\n return False\r\n elif (Line[i] == \"lengthof\") or (Line[i] == \"sizeof\") or (Line[i] == \"type\") or (Line[i] == \"dup\"):\r\n if (Line[i] == \"dup\"):\r\n if stak.__len__()>0:\r\n j = stak.__len__() - 1\r\n while (j >= 0):\r\n expression.append(stak[j])\r\n stak = stak[:-1]\r\n j = j - 1\r\n S = []\r\n L = []\r\n i = 1 + i\r\n while (i < len(Line)):\r\n if (Line[i] == '(') or (Line[i] == '['):\r\n S.append(Line[i])\r\n elif (Line[i] == ')') or (Line[i] == ']'):\r\n if len(S) == 0:\r\n return False\r\n j = len(S) - 1\r\n while j >= 0:\r\n if (S[j] == '(') and (Line[i] == ')'):\r\n break\r\n elif (S[j] == '(') and (Line[i] == ']'):\r\n return False\r\n elif (S[j] == '[') and (Line[i] == ')'):\r\n return False\r\n elif (S[j] == '[') and (Line[i] == ']'):\r\n break\r\n S = S[:-1]\r\n j = j - 1\r\n if j < 0:\r\n break\r\n S = S[:-1]\r\n\r\n L.append(Line[i])\r\n if len(S) == 0:\r\n break\r\n i += 1\r\n if L.__len__() > 1:\r\n if (L[L.__len__() - 1] == ')') and (L[0] == '('):\r\n L = L[:-1]\r\n L = L[1:]\r\n else:\r\n return False\r\n else:\r\n return False\r\n tmp = self.postfix(L)\r\n i = i + 1\r\n if tmp != False:\r\n tmp1 = self.Calc_infix(expression)\r\n if tmp1 != False:\r\n for j in range(0, tmp1[0]):\r\n infix = infix + tmp\r\n else:\r\n return False\r\n else:\r\n return False\r\n expression=[\"dup\"]\r\n continue\r\n stak.append(Line[i])\r\n else:\r\n if (Line[i] == '*') | (Line[i] == '-') | (Line[i] == '/') | (Line[i] == '+'):\r\n if len(stak) > 0:\r\n j = len(stak) - 1\r\n while (j >= 0):\r\n if ((stak[j] == '+') | (stak[j] == '-')) & ((Line[i] == '+') | (Line[i] == '-')):\r\n expression.append(stak[j])\r\n stak = stak[:-1]\r\n elif ((stak[j] == '+') | (stak[j] == '-')) & ((Line[i] == '*') | (Line[i] == '/')):\r\n break\r\n elif ((stak[j] == '*') | (stak[j] == '/')) & ((Line[i] == '*') | (Line[i] == '/')):\r\n\r\n expression.append(stak[j])\r\n stak = stak[:-1]\r\n elif ((stak[j] == '*') | (stak[j] == '/')) & ((Line[i] == '+') | (Line[i] == '-')):\r\n\r\n expression.append(stak[j])\r\n stak = stak[:-1]\r\n elif (stak[j] == 'dup') | (stak[j] == 'lengthof') | (stak[j] == 'type') | (stak[j] == 'sizeof'):\r\n expression.append(stak[j])\r\n stak = stak[:-1]\r\n else:\r\n break\r\n j = j - 1\r\n stak.append(Line[i])\r\n else:\r\n expression.append(Line[i])\r\n i += 1\r\n\r\n j = len(stak) - 1\r\n while j >= 0:\r\n if (stak[j] == '(') or (stak[j] == '['):\r\n return False\r\n expression.append(stak[j])\r\n stak = stak[:-1]\r\n j = j - 1\r\n\r\n if (expression.__len__() > 0)and(expression!=[\"dup\"]):\r\n infix.append(expression)\r\n return infix", "def parse_operand(s, i):\n value = ''\n while (s[i] not in operators):\n value += s[i]\n i += 1\n if s[i] == ')':\n break\n return float(value), i-1", "def infix_to_postfix(expr):\n # you may find the following precedence dictionary useful\n prec = {'*': 2, '/': 2,\n '+': 1, '-': 1}\n ops = Stack()\n postfix = []\n toks = expr.split()\n ### BEGIN SOLUTION\n opp = {'*', '/','+', '-'}\n for x in toks:\n if str.isdigit(x):\n postfix.append(x)\n elif ops.empty() or ops.peek() == '(':\n ops.push(x)\n elif x == '(':\n ops.push(x)\n elif x == ')':\n while not ops.empty():\n temp = ops.pop()\n if temp == '(':\n break\n else:\n postfix.append(temp)\n elif x in opp:\n while True:\n if prec.get(x) > prec.get(ops.peek()):\n ops.push(x)\n break\n elif prec.get(x) == prec.get(ops.peek()):\n postfix.append(ops.pop())\n ops.push(x)\n break\n elif prec.get(x) < prec.get(ops.peek()):\n postfix.append(ops.pop())\n if ops.empty():\n ops.push(x)\n break\n elif ops.empty():\n break\n\n while True:\n if not ops.empty():\n postfix.append(ops.pop())\n else:\n break\n\n ### END SOLUTION\n return ' '.join(str(x) for x in postfix)", "def substitute(self,s,x):\r\n\t\t\r\n\t\t# turn substitution into top line\r\n\t\ttry:\r\n\t\t\tt = Li(s)\r\n\t\t\tb = Li(1)\r\n\t\t\t\r\n\t\t# unless it is a list of lines\r\n\t\texcept:\r\n\t\t\tt = Li(s[0])\r\n\t\t\tb = Li(s[1])\r\n\t\t\r\n\t\t# split variable from power\r\n\t\th = Te._chop(x)\r\n\t\tx = h[0]\r\n\t\t\r\n\t\t# assume power of 1 for substituted variable, but revise if found in string\r\n\t\tp = 1\r\n\t\ttry:\r\n\t\t\tp = int(h[1])\r\n\t\texcept:\r\n\t\t\tpass\r\n\t\t\r\n\t\t# exponents in each term\r\n\t\te = [i.look(x) for i in self]\r\n\t\t\r\n\t\t# adjust for power of substituted variable\r\n\t\te = [i // p for i in e]\r\n\t\t\r\n\t\t# max, min powers of substitution\r\n\t\ttry:\r\n\t\t\ta = max(e)\r\n\t\t\tm = min(e)\r\n\t\texcept:\r\n\t\t\ta = 0\r\n\t\t\tm = 0\r\n\t\t\r\n\t\t# truncate max and min powers \r\n\t\tif a < 0:\r\n\t\t\ta = 0\r\n\t\tif m > 0:\r\n\t\t\tm = 0\r\n\t\t\t\r\n\t\t# dictionaries of calculated terms for top and bottom\r\n\t\tf = {}\r\n\t\tg = {}\r\n\t\t\t\r\n\t\t# expand top and bottom to truncated max and min\r\n\t\tq,f = Li._expand(t,-m,f)\r\n\t\tr,g = Li._expand(b,a,g)\r\n\t\tq = Li(q,c=False)\r\n\t\tr = Li(r,c=False)\r\n\t\t\r\n\t\t# store results in dictionaries\r\n\t\ty = {-m: q}\r\n\t\tz = {a: r}\r\n\t\t\r\n\t\t# make denominator\r\n\t\td = q.multiply(r)\r\n\t\t\r\n\t\t# convert each term\r\n\t\tl = Li([])\r\n\t\tfor n,i in enumerate(self):\r\n\t\t\t\r\n\t\t\t# exponent of substitution\r\n\t\t\tw = e[n]\r\n\t\t\t\r\n\t\t\t# divide out variable\r\n\t\t\tv = Te({x: -w * p})\r\n\t\t\ti = i.multiply(v)\r\n\t\t\t\r\n\t\t\t# retrieve top expansion\r\n\t\t\tif (w - m) in y:\r\n\t\t\t\tu = y[w - m]\r\n\t\t\t\t\r\n\t\t\t# or calculate\r\n\t\t\telse:\r\n\t\t\t\tu,f = Li._expand(t,w - m,f)\r\n\t\t\t\tu = Li(u,c=False)\r\n\t\t\t\ty[w - m] = u\r\n\t\t\t\r\n\t\t\t# retrieve bottom expansion\r\n\t\t\tif (a - w) in z:\r\n\t\t\t\tc = z[a - w]\r\n\t\t\t\r\n\t\t\t# or calculate\r\n\t\t\telse:\r\n\t\t\t\tc,g = Li._expand(b,a - w,g)\r\n\t\t\t\tc = Li(c,c=False)\r\n\t\t\t\tz[a - w] = c\r\n\t\t\t\r\n\t\t\t# multiply and add\r\n\t\t\tu = u.multiply(c)\r\n\t\t\tu = u.multiply(i)\r\n\t\t\tl = l.add(u)\r\n\t\t\r\n\t\treturn [l,d]", "def evaluate(self, p: Posting) -> Union[str, None]:\n return self.eval_fn(p)" ]
[ "0.7593586", "0.67240876", "0.6409349", "0.6357244", "0.6297136", "0.6115302", "0.6042468", "0.59812856", "0.59177285", "0.5893105", "0.5851467", "0.58478415", "0.58040977", "0.5790237", "0.5779237", "0.5760268", "0.56968087", "0.5645114", "0.5584172", "0.5554141", "0.55407107", "0.5537361", "0.5532497", "0.54971623", "0.5495714", "0.5490111", "0.5458186", "0.54400355", "0.54349595", "0.5414102" ]
0.79264116
0
Provide tests for request to retrieve all way`s Notification
def test_get_all(self): expected_response = [ { 'id': 101, 'start_time': '2019-11-27', 'end_time': '2020-12-27', 'week_day': 1, 'time': datetime.time(1, 12, 38), 'way': 100 }, { 'id': 100, 'start_time': '2019-10-29', 'end_time': '2019-12-29', 'week_day': 6, 'time': datetime.time(23, 58, 59), 'way': 100 } ] url = reverse('notification', kwargs={'way_id': self.notification.way_id}) response = self.client.get(url) self.assertEqual(response.status_code, 200) self.assertJSONEqual( str(response.content, encoding='utf8'), json.dumps(expected_response, cls=DjangoJSONEncoder) )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_get_one(self):\n expected_response = {\n 'id': 100,\n 'start_time': datetime.date(2019, 10, 29),\n 'end_time': datetime.date(2019, 12, 29),\n 'week_day': 6,\n 'time': datetime.time(23, 58, 59),\n 'way': 100\n }\n url = reverse('notification', kwargs={'way_id': self.notification.way_id, 'notification_id': self.notification.id})\n response = self.client.get(url)\n self.assertEqual(response.status_code, 200)\n self.assertJSONEqual(\n json.dumps(expected_response, cls=DjangoJSONEncoder, sort_keys=True),\n json.loads(response.content)\n )", "def test_get_from_another_way(self):\n url = reverse('notification', kwargs={'way_id': 101, 'notification_id': self.notification.id})\n response = self.client.get(url)\n self.assertEqual(response.status_code, 403)", "def test_retrieve_notifications_list(client):\n create_user_response = create_user(client, TEST_USER_NAME, TEST_USER_PASS)\n assert create_user_response.status_code == HttpStatus.created_201.value\n\n new_notification_message_one = 'The winners will be announced in 1 minute'\n new_notification_category_one = 'Information'\n post_response = create_notification(\n client,\n new_notification_message_one,\n 15,\n new_notification_category_one)\n assert post_response.status_code == HttpStatus.created_201.value\n assert Notification.query.count() == 1\n\n new_notification_message_two = 'There is a problem with one score'\n new_notification_category_two = 'Error'\n post_response = create_notification(\n client,\n new_notification_message_two,\n 10,\n new_notification_category_two)\n assert post_response.status_code == HttpStatus.created_201.value\n assert Notification.query.count() == 2\n\n get_first_page_url = url_for('service.notificationlistresource', _external=True)\n get_first_page_response = client.get(\n get_first_page_url,\n headers=get_authentication_headers(TEST_USER_NAME, TEST_USER_PASS))\n assert get_first_page_response.status_code == HttpStatus.ok_200.value\n\n get_first_page_response_data = json.loads(\n get_first_page_response.get_data(as_text=True))\n assert get_first_page_response_data['count'] == 2\n assert get_first_page_response_data['previous'] is None\n assert get_first_page_response_data['next'] is None\n assert get_first_page_response_data['results'] is not None\n assert len(get_first_page_response_data['results']) == 2\n assert get_first_page_response_data['results'][0]['message'] == \\\n new_notification_message_one\n assert get_first_page_response_data['results'][1]['message'] == \\\n new_notification_message_two\n\n get_second_page_url = url_for('service.notificationlistresource', page=2)\n get_second_page_response = client.get(\n get_second_page_url,\n headers=get_authentication_headers(TEST_USER_NAME, TEST_USER_PASS))\n assert get_second_page_response.status_code == HttpStatus.ok_200.value\n\n get_second_page_response_data = json.loads(\n get_second_page_response.get_data(as_text=True))\n assert get_second_page_response_data['previous'] is not None\n assert get_second_page_response_data['previous'] == url_for(\n 'service.notificationlistresource', page=1)\n assert get_second_page_response_data['next'] is None\n assert get_second_page_response_data['results'] is not None\n assert len(get_second_page_response_data['results']) == 0", "def test_notification(self, mock):\n mock.register_uri(\n CONST_HTTP_METHOD_POST,\n pyflume.constants.URL_OAUTH_TOKEN,\n text=load_fixture(CONST_TOKEN_FILE),\n )\n mock.register_uri(\n \"get\",\n pyflume.constants.API_NOTIFICATIONS_URL.format(user_id=CONST_USER_ID),\n text=load_fixture(\"notification.json\"),\n )\n flume_auth = pyflume.FlumeAuth(\n CONST_USERNAME,\n CONST_PASSWORD,\n CONST_CLIENT_ID,\n CONST_CLIENT_SECRET,\n CONST_FLUME_TOKEN,\n )\n\n flume_notifications = pyflume.FlumeNotificationList(flume_auth)\n notifications = flume_notifications.get_notifications()\n assert len(notifications) == 1 # noqa: S101\n assert notifications[0][CONST_USER_ID] == 1111 # noqa: S101,WPS432\n assert flume_notifications.has_next # noqa: S101\n\n mock.register_uri(\n \"get\",\n flume_notifications.next_page,\n text=load_fixture(\"notification_next.json\"),\n )\n\n notifications_next = flume_notifications.get_next_notifications()\n assert len(notifications_next) == 1 # noqa: S101\n assert notifications_next[0][CONST_USER_ID] == 1111 # noqa: S101,WPS432\n assert flume_notifications.has_next is False # noqa: S101\n\n mock.register_uri(\n \"get\",\n pyflume.constants.API_NOTIFICATIONS_URL.format(user_id=CONST_USER_ID),\n text=load_fixture(\"notification_nopage.json\"),\n )\n\n notifications_nopage = flume_notifications.get_notifications()\n assert len(notifications_nopage) == 1 # noqa: S101\n assert notifications_nopage[0][CONST_USER_ID] == 1111 # noqa: S101,WPS432\n assert flume_notifications.has_next is False # noqa: S101", "def test_meeting_poll_get(self):\n pass", "def test_tenants_tenant_id_notifications_get(self):\n pass", "def test_get(db, session): # pylint: disable=unused-argument\n # get from method for notf-user\n user_id = 'notf-user'\n method_res = Notification.get(user_id)\n query_res = get_user_notifications(session, user_id) # check if the results are really same\n assert len(query_res) == len(method_res)", "def test_create_and_retrieve_notification(client):\n create_user_response = create_user(client, TEST_USER_NAME, TEST_USER_PASS)\n assert create_user_response.status_code == HttpStatus.created_201.value\n\n new_notification_message = 'Welcome to the eSports Competition'\n new_notification_category = 'Information'\n post_response = create_notification(\n client,\n new_notification_message,\n 15,\n new_notification_category)\n assert post_response.status_code == HttpStatus.created_201.value\n assert Notification.query.count() == 1\n\n # The notification should have created a new notification category as well\n assert NotificationCategory.query.count() == 1\n\n post_response_data = json.loads(post_response.get_data(as_text=True))\n assert post_response_data['message'] == new_notification_message\n\n new_notification_url = post_response_data['url']\n get_response = client.get(\n new_notification_url,\n headers=get_authentication_headers(TEST_USER_NAME, TEST_USER_PASS))\n assert get_response.status_code == HttpStatus.ok_200.value\n\n get_response_data = json.loads(get_response.get_data(as_text=True))\n assert get_response_data['message'] == new_notification_message\n assert get_response_data['notification_category']['name'] == \\\n new_notification_category", "def getInfo(notification):", "def test_get_not_found(self):\n url = reverse('notification', kwargs={'way_id': 999, 'notification_id': 100})\n response = self.client.get(url)\n self.assertEqual(response.status_code, 400)\n\n url = reverse('notification', kwargs={'way_id': 100, 'notification_id': 999})\n response = self.client.get(url)\n self.assertEqual(response.status_code, 400)", "def test_get_all(self):\n expected_response = [\n {\n 'id': 100, 'time': '23:58:59', 'transport_name': '', 'position': 0,\n 'way': 100, 'start_place': 100, 'end_place': 200\n },\n {\n 'id': 101, 'time': '01:02:03', 'transport_name': '', 'position': 1,\n 'way': 100, 'start_place': 100, 'end_place': 200\n }\n ]\n url = reverse('route', kwargs={'way_id': self.route.way_id})\n response = self.client.get(url)\n self.assertEqual(response.status_code, 200)\n self.assertJSONEqual(json.dumps(expected_response), json.loads(response.content))", "def test_basic_fetch(client):\n\n res = client.get('/api/reminders')\n assert res.status_code == 200\n assert res.content_type == 'application/json'", "def test_update_notification(client):\n create_user_response = create_user(client, TEST_USER_NAME, TEST_USER_PASS)\n assert create_user_response.status_code == HttpStatus.created_201.value\n\n new_notification_message_one = 'Fortnite has a new winner'\n new_notification_category_one = 'Information'\n post_response = create_notification(\n client,\n new_notification_message_one,\n 30,\n new_notification_category_one)\n assert post_response.status_code == HttpStatus.created_201.value\n assert Notification.query.count() == 1\n\n post_response_data = json.loads(post_response.get_data(as_text=True))\n new_notification_url = post_response_data['url']\n new_displayed_times = 1\n data = {'displayed_times': new_displayed_times}\n patch_response = client.patch(\n new_notification_url,\n headers=get_authentication_headers(TEST_USER_NAME, TEST_USER_PASS),\n data=json.dumps(data))\n assert patch_response.status_code == HttpStatus.ok_200.value\n\n get_response = client.get(\n new_notification_url,\n headers=get_authentication_headers(TEST_USER_NAME, TEST_USER_PASS))\n assert get_response.status_code == HttpStatus.ok_200.value\n\n get_response_data = json.loads(get_response.get_data(as_text=True))\n assert get_response_data['displayed_times'] == new_displayed_times", "def test_get_notification_count(self):\n request = self.factory.get(\"\")\n request.user = self.local_user\n\n result = views.get_notification_count(request)\n self.assertIsInstance(result, JsonResponse)\n data = json.loads(result.getvalue())\n self.assertEqual(data[\"count\"], 0)\n\n models.Notification.objects.create(\n notification_type=\"BOOST\", user=self.local_user\n )\n result = views.get_notification_count(request)\n self.assertIsInstance(result, JsonResponse)\n data = json.loads(result.getvalue())\n self.assertEqual(data[\"count\"], 1)", "def test_notification_schedule(self):\n\n response = self.client.get(self.dashboard_url)\n self.assertEqual(response.status_code, 200)", "def test_notification_schedule(self):\n\n response = self.client.get(self.dashboard_url)\n self.assertEqual(response.status_code, 200)", "def setUp(self):\n signals.post_save.disconnect(create_notification_task, sender=Notification)\n signals.post_delete.disconnect(revoke_notification_task, sender=Notification)\n\n user = CustomUser.objects.create(id=100, email='[email protected]', is_active=True)\n user.set_password('testpassword')\n user.save()\n\n self.client = Client()\n self.client.login(email='[email protected]', password='testpassword')\n\n way_first = Way.objects.create(id=100, user=user)\n way_second = Way.objects.create(id=101, user=user)\n\n Notification.objects.create(\n id=100,\n way=way_first,\n start_time=datetime.date(2019, 10, 29),\n end_time=datetime.date(2019, 12, 29),\n week_day=6,\n time=datetime.time(23, 58, 59)\n )\n\n Notification.objects.create(\n id=101,\n way=way_first,\n start_time=datetime.date(2019, 11, 27),\n end_time=datetime.date(2020, 12, 27),\n week_day=1,\n time=datetime.time(1, 12, 38)\n )\n\n Notification.objects.create(\n id=102,\n way=way_second,\n start_time=datetime.date(2019, 3, 11),\n end_time=datetime.date(2019, 7, 31),\n week_day=2,\n time=datetime.time(11, 28, 25)\n )\n\n self.notification = Notification.objects.get(id=100)\n self.client = Client()\n self.client.login(email='[email protected]', password='testpassword')", "def test_get_all_topics(mock_send_message_json):\n assert OranDmaap.get_all_topics_url == f\"{BASE_URL}/topics/listAll\"", "def test_api_promotions_get(self):\n default_api = DefaultApi(api_client=self.api_client)\n params = dlrnapi_client.Promotion()\n path, method = default_api.api_promotions_get(params)\n self.assertEqual(path, '/api/promotions')\n self.assertEqual(method, 'GET')", "def test_list(self):\n self.client.force_authenticate(user=self.admin)\n\n response = self.client.get(\n reverse('retreat:waitqueuenotification-list'),\n format='json',\n )\n\n response_data = json.loads(response.content)\n\n content = {\n 'count': 1,\n 'next': None,\n 'previous': None,\n 'results': [{\n 'created_at': response_data['results'][0]['created_at'],\n 'id': self.wait_queue_notif.id,\n 'retreat':\n 'http://testserver/retreat/retreats/' +\n str(self.retreat.id),\n 'url': 'http://testserver/retreat/'\n 'wait_queue_notifications/' +\n str(self.wait_queue_notif.id),\n 'user': 'http://testserver/users/' + str(self.user2.id)\n }]\n }\n\n self.assertEqual(response_data, content)\n\n self.assertEqual(response.status_code, status.HTTP_200_OK)", "def get_user_notifications(self, login):", "def test_get_subscriptions(self):\n pass", "def test_get_answers_to_log(self):\r\n requests = [\r\n {\"event\": \"my_event\", \"event_type\": \"my_event_type\", \"page\": \"my_page\"},\r\n {\"event\": \"{'json': 'object'}\", \"event_type\": unichr(512), \"page\": \"my_page\"}\r\n ]\r\n with mock.patch.dict('django.conf.settings.FEATURES', {'ENABLE_SQL_TRACKING_LOGS': True}):\r\n for request_params in requests:\r\n response = self.client.get(reverse(cms_user_track), request_params)\r\n self.assertEqual(response.status_code, 204)", "def _get_notifications(self):\r\n student = self._student('GET')\r\n if student is None:\r\n self._error_response()\r\n\r\n else:\r\n self._success_response({\r\n 'student_sub_count': self.server.DUMMY_DATA['student_sub_count'],\r\n 'count_required': student.num_required,\r\n 'count_graded': student.num_graded,\r\n 'count_available': student.num_pending\r\n })", "def test_20_notification_url(self):\n cr, uid, group_pigs = self.cr, self.uid, self.group_pigs\n\n # Partner data\n partner_raoul = self.res_partner.browse(cr, uid, self.partner_raoul_id)\n partner_bert_id = self.res_partner.create(cr, uid, {'name': 'bert'})\n partner_bert = self.res_partner.browse(cr, uid, partner_bert_id)\n # Mail data\n mail_mail_id = self.mail_mail.create(cr, uid, {'state': 'exception'})\n mail = self.mail_mail.browse(cr, uid, mail_mail_id)\n\n # Test: link for nobody -> None\n url = self.mail_mail._get_partner_access_link(cr, uid, mail)\n self.assertEqual(url, None,\n 'notification email: mails not send to a specific partner should not have any URL')\n\n # Test: link for partner -> signup URL\n url = self.mail_mail._get_partner_access_link(cr, uid, mail, partner=partner_bert)\n self.assertIn(partner_bert.signup_url, url,\n 'notification email: mails send to a not-user partner should contain the signup URL')\n\n # Test: link for user -> signin\n url = self.mail_mail._get_partner_access_link(cr, uid, mail, partner=partner_raoul)\n self.assertIn('action=mail.action_mail_redirect', url,\n 'notification email: link should contain the redirect action')\n self.assertIn('login=%s' % partner_raoul.user_ids[0].login, url,\n 'notification email: link should contain the user login')", "def test_get_note(self):\n pass", "def test_notify(self):\n # self.client.force_authenticate(user=self.admin)\n\n FIXED_TIME = datetime(2018, 1, 1, tzinfo=LOCAL_TIMEZONE)\n\n # Old notification that will be deleted\n with mock.patch(\n 'django.utils.timezone.now', return_value=FIXED_TIME):\n WaitQueueNotification.objects.create(\n user=self.user,\n retreat=self.retreat,\n )\n\n waiting_user = WaitQueue.objects.create(\n user=self.user,\n retreat=self.retreat,\n )\n\n waiting_user2 = WaitQueue.objects.create(\n user=self.user2,\n retreat=self.retreat,\n )\n\n notification_count = WaitQueueNotification.objects.all().count()\n\n response = self.client.get(\n '/'.join([\n reverse('retreat:waitqueuenotification-list'),\n 'notify',\n ])\n )\n\n self.retreat.refresh_from_db()\n\n # Assert that the wait queue index is updated\n # All users (2) are notified since there are more (4) reserved_seats\n self.assertEqual(\n self.retreat.next_user_notified,\n 2,\n \"next_user_notified index invalid\"\n )\n\n # Assert that only 2 reserved seats remain (since only 2 users are\n # waiting)\n self.assertEqual(\n self.retreat.reserved_seats,\n 2,\n \"reserved_seats index invalid\"\n )\n\n # Assert that 2 new notifications are created (2 users in wait_queue)\n # Assert that 2 old notification has been deleted (too old)\n self.assertEqual(\n WaitQueueNotification.objects.all().count(),\n notification_count + 2 - 2,\n \"WaitQueueNotification count invalid\"\n )\n\n self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)\n\n self.assertEqual(len(mail.outbox), 2)\n\n waiting_user.delete()\n waiting_user2.delete()", "def test_get_all_workout(self):\n response = self.client.open(\n '/workout',\n method='GET')\n self.assert200(response,\n 'Response body is : ' + response.data.decode('utf-8'))", "def test_get_answers_to_log(self):\r\n requests = [\r\n {\"event\": \"my_event\", \"event_type\": \"my_event_type\", \"page\": \"my_page\"},\r\n {\"event\": \"{'json': 'object'}\", \"event_type\": unichr(512), \"page\": \"my_page\"}\r\n ]\r\n with mock.patch.dict('django.conf.settings.FEATURES', {'ENABLE_SQL_TRACKING_LOGS': True}):\r\n for request_params in requests:\r\n try: # because /event maps to two different views in lms and cms, we're only going to test lms here\r\n response = self.client.get(reverse(user_track), request_params)\r\n except NoReverseMatch:\r\n raise SkipTest()\r\n self.assertEqual(response.status_code, 200)\r\n self.assertEqual(response.content, 'success')\r\n tracking_logs = TrackingLog.objects.order_by('-dtcreated')\r\n log = tracking_logs[0]\r\n self.assertEqual(log.event, request_params[\"event\"])\r\n self.assertEqual(log.event_type, request_params[\"event_type\"])\r\n self.assertEqual(log.page, request_params[\"page\"])", "def send_notification_by_get(request, notification_type, phone_number_receiver, phone_number_sender):\n return try_to_send_notification(notification_type, phone_number_sender, phone_number_receiver)" ]
[ "0.6836307", "0.6532002", "0.6530318", "0.642929", "0.63247854", "0.6228781", "0.6221926", "0.6138092", "0.60473317", "0.5991408", "0.5955423", "0.5921455", "0.58701456", "0.5856785", "0.5830087", "0.5830087", "0.5800119", "0.5795136", "0.57847846", "0.5759199", "0.57577235", "0.5742655", "0.57394326", "0.5736472", "0.572826", "0.5722208", "0.5710624", "0.5704941", "0.5673057", "0.5672173" ]
0.7460129
0
Provide tests for request to retrieve non owner Notification instance.
def test_get_non_owner(self): another_user = CustomUser(id=101, email='[email protected]', is_active=True) another_user.set_password('testpassword') another_user.save() self.client.login(email='[email protected]', password='testpassword') url = reverse('notification', kwargs={'way_id': self.notification.way_id, 'notification_id': self.notification.id}) response = self.client.get(url) self.assertEqual(response.status_code, 403)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_create_and_retrieve_notification(client):\n create_user_response = create_user(client, TEST_USER_NAME, TEST_USER_PASS)\n assert create_user_response.status_code == HttpStatus.created_201.value\n\n new_notification_message = 'Welcome to the eSports Competition'\n new_notification_category = 'Information'\n post_response = create_notification(\n client,\n new_notification_message,\n 15,\n new_notification_category)\n assert post_response.status_code == HttpStatus.created_201.value\n assert Notification.query.count() == 1\n\n # The notification should have created a new notification category as well\n assert NotificationCategory.query.count() == 1\n\n post_response_data = json.loads(post_response.get_data(as_text=True))\n assert post_response_data['message'] == new_notification_message\n\n new_notification_url = post_response_data['url']\n get_response = client.get(\n new_notification_url,\n headers=get_authentication_headers(TEST_USER_NAME, TEST_USER_PASS))\n assert get_response.status_code == HttpStatus.ok_200.value\n\n get_response_data = json.loads(get_response.get_data(as_text=True))\n assert get_response_data['message'] == new_notification_message\n assert get_response_data['notification_category']['name'] == \\\n new_notification_category", "def test_tenants_tenant_id_notifications_get(self):\n pass", "def test_get_single(db, session): # pylint: disable=unused-argument\n # add a notification for user\n user_id = 'notf-user'\n request_id = 225\n request_type = 'registration'\n request_status = 3\n message = 'this is a test notification'\n notification = Notification(user_id=user_id, request_id=request_id, request_type=request_type,\n request_status=request_status, message=message)\n notification.add()\n notification_id = get_notification_id(session, request_id)\n notification_data = Notification.get_single(notification_id)\n notification_data_raw = get_single_notification(session, notification_id)\n\n # verify results of both\n assert notification_data_raw.id == notification_data.id\n assert notification_data_raw.user_id == notification_data.user_id\n assert notification_data_raw.request_id == notification_data.request_id\n assert notification_data_raw.request_status == notification_data.request_status\n assert notification_data_raw.message == notification_data.message", "def inner_test(param: models.Notification):\n self.assertEqual(param, notif)", "def test_get(db, session): # pylint: disable=unused-argument\n # get from method for notf-user\n user_id = 'notf-user'\n method_res = Notification.get(user_id)\n query_res = get_user_notifications(session, user_id) # check if the results are really same\n assert len(query_res) == len(method_res)", "def test_post_non_owner(self):\n another_user = CustomUser.objects.create(id=1067, email='[email protected]', is_active=True)\n another_user.set_password('testpassword')\n another_user.save()\n\n self.client.login(email='[email protected]', password='testpassword')\n\n data = {\n 'start_time': '2019-10-29',\n 'end_time': '2019-12-29',\n 'week_day': 6,\n 'time': '23:58:59'\n }\n\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': self.notification.id})\n response = self.client.post(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n self.assertEqual(response.status_code, 403)", "def test_registered_no_notifications(self):\n msg = self._send(self.reg_conn, '1')\n self.assertEqual(len(msg.responses), 1)\n self.assertEqual(msg.responses[0].text,\n self.app.no_reminders)", "def test_registered_no_notifications(self):\n msg = self._send(self.reg_conn, '1')\n self.assertEqual(len(msg.responses), 1)\n self.assertEqual(msg.responses[0].text,\n self.app.no_reminders)", "def test_get_notification_count(self):\n request = self.factory.get(\"\")\n request.user = self.local_user\n\n result = views.get_notification_count(request)\n self.assertIsInstance(result, JsonResponse)\n data = json.loads(result.getvalue())\n self.assertEqual(data[\"count\"], 0)\n\n models.Notification.objects.create(\n notification_type=\"BOOST\", user=self.local_user\n )\n result = views.get_notification_count(request)\n self.assertIsInstance(result, JsonResponse)\n data = json.loads(result.getvalue())\n self.assertEqual(data[\"count\"], 1)", "def test_registered_with_notification(self):\n now = datetime.datetime.now()\n notification = reminders.Notification.objects.create(num_days=1,\n time_of_day=now)\n reminders.SentNotification.objects.create(notification=notification,\n recipient=self.contact,\n status='sent',\n message='abc',\n appt_date=now,\n date_to_send=now)\n msg = self._send(self.reg_conn, '1')\n self.assertEqual(len(msg.responses), 1)\n self.assertEqual(msg.responses[0].text,\n self.app.thank_you)\n sent_notif = reminders.SentNotification.objects.all()\n self.assertEqual(sent_notif.count(), 1)\n self.assertEqual(sent_notif[0].status, 'confirmed')", "def test_get_from_another_way(self):\n url = reverse('notification', kwargs={'way_id': 101, 'notification_id': self.notification.id})\n response = self.client.get(url)\n self.assertEqual(response.status_code, 403)", "def test_registered_with_notification(self):\n now = datetime.datetime.now()\n notification = reminders.Notification.objects.create(num_days=1,\n time_of_day=now)\n reminders.SentNotification.objects.create(notification=notification,\n recipient=self.contact,\n status='sent',\n message='abc',\n appt_date=now,\n date_to_send=now,\n date_queued=now)\n msg = self._send(self.reg_conn, '1')\n self.assertEqual(len(msg.responses), 1)\n self.assertEqual(msg.responses[0].text,\n self.app.thank_you)\n sent_notif = reminders.SentNotification.objects.all()\n self.assertEqual(sent_notif.count(), 1)\n self.assertEqual(sent_notif[0].status, 'confirmed')", "def test_notify(self):\n # self.client.force_authenticate(user=self.admin)\n\n FIXED_TIME = datetime(2018, 1, 1, tzinfo=LOCAL_TIMEZONE)\n\n # Old notification that will be deleted\n with mock.patch(\n 'django.utils.timezone.now', return_value=FIXED_TIME):\n WaitQueueNotification.objects.create(\n user=self.user,\n retreat=self.retreat,\n )\n\n waiting_user = WaitQueue.objects.create(\n user=self.user,\n retreat=self.retreat,\n )\n\n waiting_user2 = WaitQueue.objects.create(\n user=self.user2,\n retreat=self.retreat,\n )\n\n notification_count = WaitQueueNotification.objects.all().count()\n\n response = self.client.get(\n '/'.join([\n reverse('retreat:waitqueuenotification-list'),\n 'notify',\n ])\n )\n\n self.retreat.refresh_from_db()\n\n # Assert that the wait queue index is updated\n # All users (2) are notified since there are more (4) reserved_seats\n self.assertEqual(\n self.retreat.next_user_notified,\n 2,\n \"next_user_notified index invalid\"\n )\n\n # Assert that only 2 reserved seats remain (since only 2 users are\n # waiting)\n self.assertEqual(\n self.retreat.reserved_seats,\n 2,\n \"reserved_seats index invalid\"\n )\n\n # Assert that 2 new notifications are created (2 users in wait_queue)\n # Assert that 2 old notification has been deleted (too old)\n self.assertEqual(\n WaitQueueNotification.objects.all().count(),\n notification_count + 2 - 2,\n \"WaitQueueNotification count invalid\"\n )\n\n self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)\n\n self.assertEqual(len(mail.outbox), 2)\n\n waiting_user.delete()\n waiting_user2.delete()", "def create_notification(self, notifying_href, notifying_action, notified_href, owner):\n if self.id == owner.id:\n return\n new_notification = Notification()\n new_notification.eid = make_uuid()\n new_notification.notifier = self\n new_notification.notifying_href = notifying_href\n new_notification.notifying_action = notifying_action\n new_notification.notified_href = notified_href\n new_notification.owner = owner\n new_notification.save()", "def test_notification(self, mock):\n mock.register_uri(\n CONST_HTTP_METHOD_POST,\n pyflume.constants.URL_OAUTH_TOKEN,\n text=load_fixture(CONST_TOKEN_FILE),\n )\n mock.register_uri(\n \"get\",\n pyflume.constants.API_NOTIFICATIONS_URL.format(user_id=CONST_USER_ID),\n text=load_fixture(\"notification.json\"),\n )\n flume_auth = pyflume.FlumeAuth(\n CONST_USERNAME,\n CONST_PASSWORD,\n CONST_CLIENT_ID,\n CONST_CLIENT_SECRET,\n CONST_FLUME_TOKEN,\n )\n\n flume_notifications = pyflume.FlumeNotificationList(flume_auth)\n notifications = flume_notifications.get_notifications()\n assert len(notifications) == 1 # noqa: S101\n assert notifications[0][CONST_USER_ID] == 1111 # noqa: S101,WPS432\n assert flume_notifications.has_next # noqa: S101\n\n mock.register_uri(\n \"get\",\n flume_notifications.next_page,\n text=load_fixture(\"notification_next.json\"),\n )\n\n notifications_next = flume_notifications.get_next_notifications()\n assert len(notifications_next) == 1 # noqa: S101\n assert notifications_next[0][CONST_USER_ID] == 1111 # noqa: S101,WPS432\n assert flume_notifications.has_next is False # noqa: S101\n\n mock.register_uri(\n \"get\",\n pyflume.constants.API_NOTIFICATIONS_URL.format(user_id=CONST_USER_ID),\n text=load_fixture(\"notification_nopage.json\"),\n )\n\n notifications_nopage = flume_notifications.get_notifications()\n assert len(notifications_nopage) == 1 # noqa: S101\n assert notifications_nopage[0][CONST_USER_ID] == 1111 # noqa: S101,WPS432\n assert flume_notifications.has_next is False # noqa: S101", "async def test_on_revocation_published_no_notify(\n profile: Profile, responder: MockResponder\n):\n mock_rec = mock.MagicMock()\n mock_rec.cred_rev_id = \"mock\"\n mock_rec.delete_record = mock.CoroutineMock()\n\n MockRec = mock.MagicMock()\n MockRec.query_by_rev_reg_id = mock.CoroutineMock(return_value=[mock_rec])\n\n topic = f\"{REVOCATION_EVENT_PREFIX}{REVOCATION_PUBLISHED_EVENT}::mock\"\n event = Event(topic, {\"rev_reg_id\": \"mock\", \"crids\": [\"mock\"]})\n\n assert isinstance(profile.settings, Settings)\n profile.settings[\"revocation.notify\"] = False\n\n with mock.patch.object(test_module, \"RevNotificationRecord\", MockRec):\n await test_module.on_revocation_published(profile, event)\n\n MockRec.query_by_rev_reg_id.assert_called_once()\n mock_rec.delete_record.assert_called_once()\n assert not responder.messages", "async def test_on_revocation_published_x_not_found(\n profile: Profile, responder: MockResponder\n):\n MockRec = mock.MagicMock()\n MockRec.query_by_rev_reg_id = mock.CoroutineMock(side_effect=StorageNotFoundError)\n\n topic = f\"{REVOCATION_EVENT_PREFIX}{REVOCATION_PUBLISHED_EVENT}::mock\"\n event = Event(topic, {\"rev_reg_id\": \"mock\", \"crids\": [\"mock\"]})\n\n with mock.patch.object(test_module, \"RevNotificationRecord\", MockRec):\n await test_module.on_revocation_published(profile, event)\n\n MockRec.query_by_rev_reg_id.assert_called_once()\n assert not responder.messages", "def test_update_notification(client):\n create_user_response = create_user(client, TEST_USER_NAME, TEST_USER_PASS)\n assert create_user_response.status_code == HttpStatus.created_201.value\n\n new_notification_message_one = 'Fortnite has a new winner'\n new_notification_category_one = 'Information'\n post_response = create_notification(\n client,\n new_notification_message_one,\n 30,\n new_notification_category_one)\n assert post_response.status_code == HttpStatus.created_201.value\n assert Notification.query.count() == 1\n\n post_response_data = json.loads(post_response.get_data(as_text=True))\n new_notification_url = post_response_data['url']\n new_displayed_times = 1\n data = {'displayed_times': new_displayed_times}\n patch_response = client.patch(\n new_notification_url,\n headers=get_authentication_headers(TEST_USER_NAME, TEST_USER_PASS),\n data=json.dumps(data))\n assert patch_response.status_code == HttpStatus.ok_200.value\n\n get_response = client.get(\n new_notification_url,\n headers=get_authentication_headers(TEST_USER_NAME, TEST_USER_PASS))\n assert get_response.status_code == HttpStatus.ok_200.value\n\n get_response_data = json.loads(get_response.get_data(as_text=True))\n assert get_response_data['displayed_times'] == new_displayed_times", "def notification(self, notification_id):\r\n return Notification(self, notification_id)", "def test_get_create_page(self):\n\n url = reverse('create-notification')\n response = self.client.get(url)\n self.assertEqual(response.status_code, 200)", "def test_get_create_page(self):\n\n url = reverse('create-notification')\n response = self.client.get(url)\n self.assertEqual(response.status_code, 200)", "def test_notify_user(self):\n foo = Foo.objects.create(name='foo', description='foo object')\n notify_users([self.user_a], foo, notification_type='foo')\n self.assertEqual(len(mail.outbox), 1)", "def test_delete_non_owner(self):\n another_user = CustomUser.objects.create(id=134, email='[email protected]', is_active=True)\n another_user.set_password('qwerty12345')\n another_user.save()\n\n self.client.login(email='[email protected]', password='qwerty12345')\n\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': 87876})\n\n response = self.client.delete(url)\n\n self.assertEqual(response.status_code, 403)", "def get_notification():\n condition.acquire()\n if not notifications:\n ret = condition.wait(2)\n if not ret:\n condition.release()\n raise TimeoutError(\"Timed out while waiting for notification\")\n\n notice = notifications.pop(0)\n condition.release()\n return notice", "def test_put_non_owner(self):\n another_user = CustomUser.objects.create(id=1067, email='[email protected]', is_active=True)\n another_user.set_password('testpassword')\n another_user.save()\n\n self.client.login(email='[email protected]', password='testpassword')\n\n data = {\n 'week_day': 3\n }\n\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': self.notification.id})\n response = self.client.put(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n self.assertEqual(response.status_code, 403)", "def test_get_one(self):\n expected_response = {\n 'id': 100,\n 'start_time': datetime.date(2019, 10, 29),\n 'end_time': datetime.date(2019, 12, 29),\n 'week_day': 6,\n 'time': datetime.time(23, 58, 59),\n 'way': 100\n }\n url = reverse('notification', kwargs={'way_id': self.notification.way_id, 'notification_id': self.notification.id})\n response = self.client.get(url)\n self.assertEqual(response.status_code, 200)\n self.assertJSONEqual(\n json.dumps(expected_response, cls=DjangoJSONEncoder, sort_keys=True),\n json.loads(response.content)\n )", "def get_notifications(self, from_id):\n def _filter_noop(_):\n return True\n\n return get_page(\n mongoengine_model=Notification,\n extra_query_args={\n 'owner': self\n },\n extra_filter_func=_filter_noop,\n from_id=from_id,\n page_count=NotificationPageSize\n )", "def setUp(self):\n signals.post_save.disconnect(create_notification_task, sender=Notification)\n signals.post_delete.disconnect(revoke_notification_task, sender=Notification)\n\n user = CustomUser.objects.create(id=100, email='[email protected]', is_active=True)\n user.set_password('testpassword')\n user.save()\n\n self.client = Client()\n self.client.login(email='[email protected]', password='testpassword')\n\n way_first = Way.objects.create(id=100, user=user)\n way_second = Way.objects.create(id=101, user=user)\n\n Notification.objects.create(\n id=100,\n way=way_first,\n start_time=datetime.date(2019, 10, 29),\n end_time=datetime.date(2019, 12, 29),\n week_day=6,\n time=datetime.time(23, 58, 59)\n )\n\n Notification.objects.create(\n id=101,\n way=way_first,\n start_time=datetime.date(2019, 11, 27),\n end_time=datetime.date(2020, 12, 27),\n week_day=1,\n time=datetime.time(1, 12, 38)\n )\n\n Notification.objects.create(\n id=102,\n way=way_second,\n start_time=datetime.date(2019, 3, 11),\n end_time=datetime.date(2019, 7, 31),\n week_day=2,\n time=datetime.time(11, 28, 25)\n )\n\n self.notification = Notification.objects.get(id=100)\n self.client = Client()\n self.client.login(email='[email protected]', password='testpassword')", "async def find_notification(db_session: Session, notification_id: int):\n notification = await NotificaitonCRUD.find_notification_by_id(db_session, notification_id=notification_id)\n return notification", "def test_exists(db, session): # pylint: disable=unused-argument\n # add a new notification\n user_id = 'notf-user'\n request_id = 224\n request_type = 'registration'\n request_status = 7\n message = 'this is a test notification'\n notification = Notification(user_id=user_id, request_id=request_id, request_type=request_type,\n request_status=request_status, message=message)\n notification.add()\n\n # get notification id\n notification_id = get_notification_id(session, request_id)\n notification_bool = Notification.exists(notification_id)\n\n # check if it really exists\n res = exists_notification(session, notification_id)\n assert res is notification_bool" ]
[ "0.64804983", "0.6367157", "0.6242152", "0.61259156", "0.60889626", "0.60715795", "0.5970538", "0.5970538", "0.5964811", "0.5955176", "0.59446895", "0.59405893", "0.59327626", "0.5912421", "0.5892791", "0.57778025", "0.5776532", "0.57747215", "0.5748287", "0.5742399", "0.5742399", "0.57101345", "0.57094824", "0.5709064", "0.56947565", "0.5694575", "0.5688542", "0.56863713", "0.5676713", "0.56599164" ]
0.668843
0
Provide tests for request to retrieve Notification instance with another `way_id`.
def test_get_from_another_way(self): url = reverse('notification', kwargs={'way_id': 101, 'notification_id': self.notification.id}) response = self.client.get(url) self.assertEqual(response.status_code, 403)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_get_one(self):\n expected_response = {\n 'id': 100,\n 'start_time': datetime.date(2019, 10, 29),\n 'end_time': datetime.date(2019, 12, 29),\n 'week_day': 6,\n 'time': datetime.time(23, 58, 59),\n 'way': 100\n }\n url = reverse('notification', kwargs={'way_id': self.notification.way_id, 'notification_id': self.notification.id})\n response = self.client.get(url)\n self.assertEqual(response.status_code, 200)\n self.assertJSONEqual(\n json.dumps(expected_response, cls=DjangoJSONEncoder, sort_keys=True),\n json.loads(response.content)\n )", "def test_get_not_found(self):\n url = reverse('notification', kwargs={'way_id': 999, 'notification_id': 100})\n response = self.client.get(url)\n self.assertEqual(response.status_code, 400)\n\n url = reverse('notification', kwargs={'way_id': 100, 'notification_id': 999})\n response = self.client.get(url)\n self.assertEqual(response.status_code, 400)", "def test_post_wrong_way_id(self):\n data = {\n 'start_time': '2019-10-29',\n 'end_time': '2019-12-29',\n 'week_day': 6,\n 'time': '23:58:59'\n }\n url = reverse('notification', kwargs={'way_id': 908, 'notification_id': self.notification.id})\n response = self.client.post(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n self.assertEqual(response.status_code, 400)", "def test_delete_another_way_id(self):\n\n url = reverse('notification',\n kwargs={'way_id': 101, 'notification_id': self.notification.id})\n response = self.client.delete(url)\n\n self.assertEqual(response.status_code, 403)", "def test_delete_wrong_way_id(self):\n\n url = reverse('notification',\n kwargs={'way_id': 38987, 'notification_id': self.notification.id})\n response = self.client.delete(url)\n\n self.assertEqual(response.status_code, 400)", "def test_put_wrong_way_id(self):\n data = {\n 'start_time': '2019-10-29',\n 'end_time': '2019-12-29',\n 'week_day': 6,\n 'time': '23:58:59'\n }\n url = reverse('notification', kwargs={'way_id': 543, 'notification_id': self.notification.id})\n response = self.client.put(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n self.assertEqual(response.status_code, 400)", "def test_delete_non_notification_id(self):\n\n url = reverse('notification', kwargs={'way_id': self.notification.way_id})\n response = self.client.delete(url)\n self.assertEqual(response.status_code, 400)", "def setUp(self):\n signals.post_save.disconnect(create_notification_task, sender=Notification)\n signals.post_delete.disconnect(revoke_notification_task, sender=Notification)\n\n user = CustomUser.objects.create(id=100, email='[email protected]', is_active=True)\n user.set_password('testpassword')\n user.save()\n\n self.client = Client()\n self.client.login(email='[email protected]', password='testpassword')\n\n way_first = Way.objects.create(id=100, user=user)\n way_second = Way.objects.create(id=101, user=user)\n\n Notification.objects.create(\n id=100,\n way=way_first,\n start_time=datetime.date(2019, 10, 29),\n end_time=datetime.date(2019, 12, 29),\n week_day=6,\n time=datetime.time(23, 58, 59)\n )\n\n Notification.objects.create(\n id=101,\n way=way_first,\n start_time=datetime.date(2019, 11, 27),\n end_time=datetime.date(2020, 12, 27),\n week_day=1,\n time=datetime.time(1, 12, 38)\n )\n\n Notification.objects.create(\n id=102,\n way=way_second,\n start_time=datetime.date(2019, 3, 11),\n end_time=datetime.date(2019, 7, 31),\n week_day=2,\n time=datetime.time(11, 28, 25)\n )\n\n self.notification = Notification.objects.get(id=100)\n self.client = Client()\n self.client.login(email='[email protected]', password='testpassword')", "def test_get_all(self):\n expected_response = [\n {\n 'id': 101,\n 'start_time': '2019-11-27',\n 'end_time': '2020-12-27',\n 'week_day': 1,\n 'time': datetime.time(1, 12, 38),\n 'way': 100\n },\n {\n 'id': 100,\n 'start_time': '2019-10-29',\n 'end_time': '2019-12-29',\n 'week_day': 6,\n 'time': datetime.time(23, 58, 59),\n 'way': 100\n }\n ]\n url = reverse('notification', kwargs={'way_id': self.notification.way_id})\n response = self.client.get(url)\n self.assertEqual(response.status_code, 200)\n self.assertJSONEqual(\n str(response.content, encoding='utf8'),\n json.dumps(expected_response, cls=DjangoJSONEncoder)\n )", "def test_put_from_another_way(self):\n data = {\n 'start_time': '2019-10-29',\n 'end_time': '2019-12-29',\n 'week_day': 6,\n 'time': '23:58:59'\n }\n url = reverse('notification', kwargs={'way_id': 101, 'notification_id': self.notification.id})\n response = self.client.put(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n self.assertEqual(response.status_code, 403)", "def test_delete_wrong_notification_id(self):\n\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': 87876})\n response = self.client.delete(url)\n\n self.assertEqual(response.status_code, 400)", "def test_tenants_tenant_id_notifications_get(self):\n pass", "def test_get_single(db, session): # pylint: disable=unused-argument\n # add a notification for user\n user_id = 'notf-user'\n request_id = 225\n request_type = 'registration'\n request_status = 3\n message = 'this is a test notification'\n notification = Notification(user_id=user_id, request_id=request_id, request_type=request_type,\n request_status=request_status, message=message)\n notification.add()\n notification_id = get_notification_id(session, request_id)\n notification_data = Notification.get_single(notification_id)\n notification_data_raw = get_single_notification(session, notification_id)\n\n # verify results of both\n assert notification_data_raw.id == notification_data.id\n assert notification_data_raw.user_id == notification_data.user_id\n assert notification_data_raw.request_id == notification_data.request_id\n assert notification_data_raw.request_status == notification_data.request_status\n assert notification_data_raw.message == notification_data.message", "def test_put_wrong_notification_id(self):\n\n data = {\n 'time': '23:38:54'\n }\n\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': 6778})\n response = self.client.put(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n self.assertEqual(response.status_code, 400)", "def inner_test(param: models.Notification):\n self.assertEqual(param, notif)", "def test_get_from_another_way(self):\n url = reverse('route', kwargs={'way_id': 101, 'route_id': self.route.id})\n response = self.client.get(url)\n self.assertEqual(response.status_code, 403)", "def test_put_non_id(self):\n\n data = {\n 'time': '23:38:54'\n }\n\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id})\n response = self.client.put(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n self.assertEqual(response.status_code, 400)", "def test_get_specific_by_id(self):\n token = self.get_token()\n self.client.post('/api/v2/party', data=self.add_party,\n headers=dict(Authorization=\"Bearer \" + token),\n content_type='application/json')\n response = self.client.get('/api/v2/party/1',\n headers=dict(Authorization=\"Bearer \" + token),\n content_type='application/json',\n )\n self.assertEqual(response.status_code, 200)", "def test_retrieving_incident(self):\n resp = self.client.get(\n reverse('incident', kwargs={\n 'team_id': '7de98e0c-8bf9-414c-b397-05acb136935e', 'incident_id': '96e3d488-52b8-4b86-906e-8bc5b3b7504b'\n }),\n )\n\n self.assertEqual(resp.status_code, 200)\n self.assertEqual(resp.json(), {\n 'actionable': True,\n 'annotation': {\n 'annotation': 'Testing Annotation',\n 'created_at': self.creation_time.strftime('%Y-%m-%dT%H:%M:%S.%fZ'),\n 'created_by': self.user.username\n },\n 'created_at': self.creation_time.strftime('%Y-%m-%dT%H:%M:%SZ'),\n 'description': 'Down Master DB',\n 'id': '96e3d488-52b8-4b86-906e-8bc5b3b7504b',\n 'incident_id': 'PIJK3SJ',\n 'status': 'triggered',\n 'summary': 'Down Master DB',\n 'title': 'Down Master DB',\n 'urgency': 'high'\n })", "def test_get__gate_some(self):\n testing_config.sign_out()\n self.vote_1_1.put() # Found.\n self.vote_2_1.put() # On a different gate.\n\n with test_app.test_request_context(self.request_path + '/1'):\n actual_response = self.handler.do_get(\n feature_id=self.feature_id, gate_id=self.gate_1_id)\n\n self.assertEqual({'votes': [self.vote_expected1]}, actual_response)", "def test_get_one(self):\n expected_response = {\n \"id\": 100, \"time\": \"23:58:59\", \"transport_name\": '', \"position\": 0,\n \"way\": 100, \"end_place\": 200, \"start_place\": 100\n }\n url = reverse('route', kwargs={'way_id': self.route.way_id, 'route_id': self.route.id})\n response = self.client.get(url)\n self.assertEqual(response.status_code, 200)\n self.assertJSONEqual(json.dumps(expected_response), json.loads(response.content))", "def test_create_and_retrieve_notification(client):\n create_user_response = create_user(client, TEST_USER_NAME, TEST_USER_PASS)\n assert create_user_response.status_code == HttpStatus.created_201.value\n\n new_notification_message = 'Welcome to the eSports Competition'\n new_notification_category = 'Information'\n post_response = create_notification(\n client,\n new_notification_message,\n 15,\n new_notification_category)\n assert post_response.status_code == HttpStatus.created_201.value\n assert Notification.query.count() == 1\n\n # The notification should have created a new notification category as well\n assert NotificationCategory.query.count() == 1\n\n post_response_data = json.loads(post_response.get_data(as_text=True))\n assert post_response_data['message'] == new_notification_message\n\n new_notification_url = post_response_data['url']\n get_response = client.get(\n new_notification_url,\n headers=get_authentication_headers(TEST_USER_NAME, TEST_USER_PASS))\n assert get_response.status_code == HttpStatus.ok_200.value\n\n get_response_data = json.loads(get_response.get_data(as_text=True))\n assert get_response_data['message'] == new_notification_message\n assert get_response_data['notification_category']['name'] == \\\n new_notification_category", "def test_get_interest_by_id_no_xis(self):\n id = self.list_1.pk\n url = reverse('xds_api:interest-list', args=(id,))\n\n with patch('xds_api.views.get_request') as get_request, \\\n patch('xds_api.views.XDSConfiguration.objects') as conf_obj:\n conf_obj.return_value = conf_obj\n conf_obj.first.return_value = \\\n XDSConfiguration(target_xis_metadata_api=\"www.test.com\")\n http_resp = get_request.return_value\n get_request.return_value = http_resp\n http_resp.json.return_value = [{\n \"test\": \"value\"\n }]\n http_resp.status_code = 500\n response = self.client.get(url)\n\n self.assertEqual(response.status_code,\n status.HTTP_503_SERVICE_UNAVAILABLE)", "def test_get(db, session): # pylint: disable=unused-argument\n # get from method for notf-user\n user_id = 'notf-user'\n method_res = Notification.get(user_id)\n query_res = get_user_notifications(session, user_id) # check if the results are really same\n assert len(query_res) == len(method_res)", "async def test_async_supports_notification_id(hass: HomeAssistant) -> None:\n await async_setup_component(hass, pn.DOMAIN, {\"core\": {}})\n await async_setup_component(hass, notify.DOMAIN, {})\n await hass.async_block_till_done()\n\n message = {\n \"message\": \"Hello\",\n \"title\": \"Test notification\",\n \"data\": {\"notification_id\": \"my_id\"},\n }\n await hass.services.async_call(\n notify.DOMAIN, notify.SERVICE_PERSISTENT_NOTIFICATION, message\n )\n await hass.async_block_till_done()\n\n notifications = async_get_persistent_notifications(hass)\n assert len(notifications) == 1\n\n # Send second message with same ID\n\n message = {\n \"message\": \"Goodbye\",\n \"title\": \"Notification was updated\",\n \"data\": {\"notification_id\": \"my_id\"},\n }\n await hass.services.async_call(\n notify.DOMAIN, notify.SERVICE_PERSISTENT_NOTIFICATION, message\n )\n await hass.async_block_till_done()\n\n notifications = async_get_persistent_notifications(hass)\n assert len(notifications) == 1\n\n notification = notifications[list(notifications)[0]]\n assert notification[\"message\"] == \"Goodbye\"\n assert notification[\"title\"] == \"Notification was updated\"", "def test_christiandoctrines_id_get(self):\n headers = { \n 'Accept': 'application/json',\n }\n response = self.client.open(\n '/v0.0.1/christiandoctrines/{id}'.format(id='id_example'),\n method='GET',\n headers=headers)\n self.assert200(response,\n 'Response body is : ' + response.data.decode('utf-8'))", "def test_002_happy_path_get_conveyancer_by_conveyancer_id(self, mock_db_query):\n mock_db_query.get.return_value = conveyancer\n resp = self.app.get('/v1/conveyancers/1', headers={'accept': 'application/json'})\n self.assertEqual(resp.status_code, 200)\n self.assertEqual(resp.json['x500'], conveyancer_x500_dict)\n self.assertEqual(resp.json['company_name'], \"ConveyIt\")", "def test_post_success(self):\n\n data = {\n 'start_time': '2019-10-29',\n 'end_time': '2019-12-29',\n 'week_day': 6,\n 'time': '23:58:59'\n }\n\n expected_data = {\n 'way': 100,\n 'start_time': '2019-10-29',\n 'end_time': '2019-12-29',\n 'week_day': 6,\n 'time': '23:58:59'\n }\n\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': self.notification.id})\n response = self.client.post(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n response_dict = json.loads(response.content)\n response_dict.pop('id')\n self.assertEqual(response.status_code, 201)\n self.assertDictEqual(response_dict, expected_data)", "def test_db_creating_post(self):\n data = {\n 'start_time': '2019-10-29',\n 'end_time': '2019-12-29',\n 'week_day': 6,\n 'time': '23:58:59'\n }\n\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': self.notification.id})\n\n with mock.patch('notification.views.Notification.create') as notification_create:\n notification_create.return_value = False\n response = self.client.post(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n\n self.assertEqual(response.status_code, 400)", "def test_registered_with_notification_and_pin(self):\n now = datetime.datetime.now()\n self.contact.pin = '1234'\n self.contact.save()\n notification = reminders.Notification.objects.create(num_days=1,\n time_of_day=now)\n reminders.SentNotification.objects.create(notification=notification,\n recipient=self.contact,\n status='sent',\n message='abc',\n appt_date=now,\n date_to_send=now)\n msg = self._send(self.reg_conn, '1234')\n self.assertEqual(len(msg.responses), 1)\n self.assertEqual(msg.responses[0].text,\n self.app.thank_you)\n sent_notif = reminders.SentNotification.objects.all()\n self.assertEqual(sent_notif.count(), 1)\n self.assertEqual(sent_notif[0].status, 'confirmed')" ]
[ "0.70815724", "0.6331676", "0.6276467", "0.6243297", "0.6123713", "0.601003", "0.5946206", "0.59328985", "0.58851916", "0.58831716", "0.5841439", "0.58281595", "0.5776078", "0.5759275", "0.5752853", "0.5731845", "0.5693253", "0.5669554", "0.5616581", "0.5574464", "0.5537879", "0.550399", "0.5465689", "0.54624957", "0.5371642", "0.5350521", "0.5319954", "0.5316928", "0.53094786", "0.53044385" ]
0.70245445
1
Method that tests the success post request for creating notification.
def test_post_success(self): data = { 'start_time': '2019-10-29', 'end_time': '2019-12-29', 'week_day': 6, 'time': '23:58:59' } expected_data = { 'way': 100, 'start_time': '2019-10-29', 'end_time': '2019-12-29', 'week_day': 6, 'time': '23:58:59' } url = reverse('notification', kwargs={'way_id': self.notification.way_id, 'notification_id': self.notification.id}) response = self.client.post(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json') response_dict = json.loads(response.content) response_dict.pop('id') self.assertEqual(response.status_code, 201) self.assertDictEqual(response_dict, expected_data)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_successfully_create_notifications(self):\n pre_function_notifications = Notification.objects.all()\n self.assertEqual(len(pre_function_notifications), 0)\n\n create_notification(\n user=self.user_with_targets,\n title=\"Hi.\",\n body=\"Hello there, friend.\")\n\n post_function_notifications = Notification.objects.all()\n self.assertEqual(len(post_function_notifications), 1)", "def test_db_creating_post(self):\n data = {\n 'start_time': '2019-10-29',\n 'end_time': '2019-12-29',\n 'week_day': 6,\n 'time': '23:58:59'\n }\n\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': self.notification.id})\n\n with mock.patch('notification.views.Notification.create') as notification_create:\n notification_create.return_value = False\n response = self.client.post(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n\n self.assertEqual(response.status_code, 400)", "def post_notification():\n try:\n data = json.loads(request.data)\n notification = Notification(**data).save()\n return jsonify({'data': notification}), 201\n except BaseException as e:\n print(e)\n return e, 400", "def test_create_notification(self):\n\n start_count = reminders.Notification.objects.count()\n url = reverse('create-notification')\n data = self.get_valid_data()\n response = self.client.post(url, data)\n self.assertRedirects(response, self.dashboard_url)\n end_count = reminders.Notification.objects.count()\n self.assertEqual(end_count, start_count + 1)", "def test_create_notification(self):\n\n start_count = reminders.Notification.objects.count()\n url = reverse('create-notification')\n data = self.get_valid_data()\n response = self.client.post(url, data)\n self.assertRedirects(response, self.dashboard_url)\n end_count = reminders.Notification.objects.count()\n self.assertEqual(end_count, start_count + 1)", "def test_registered_with_notification(self):\n now = datetime.datetime.now()\n notification = reminders.Notification.objects.create(num_days=1,\n time_of_day=now)\n reminders.SentNotification.objects.create(notification=notification,\n recipient=self.contact,\n status='sent',\n message='abc',\n appt_date=now,\n date_to_send=now)\n msg = self._send(self.reg_conn, '1')\n self.assertEqual(len(msg.responses), 1)\n self.assertEqual(msg.responses[0].text,\n self.app.thank_you)\n sent_notif = reminders.SentNotification.objects.all()\n self.assertEqual(sent_notif.count(), 1)\n self.assertEqual(sent_notif[0].status, 'confirmed')", "def test_registered_with_notification(self):\n now = datetime.datetime.now()\n notification = reminders.Notification.objects.create(num_days=1,\n time_of_day=now)\n reminders.SentNotification.objects.create(notification=notification,\n recipient=self.contact,\n status='sent',\n message='abc',\n appt_date=now,\n date_to_send=now,\n date_queued=now)\n msg = self._send(self.reg_conn, '1')\n self.assertEqual(len(msg.responses), 1)\n self.assertEqual(msg.responses[0].text,\n self.app.thank_you)\n sent_notif = reminders.SentNotification.objects.all()\n self.assertEqual(sent_notif.count(), 1)\n self.assertEqual(sent_notif[0].status, 'confirmed')", "def test_create_valid_submission(self):\n with self.client:\n # valid submission registration\n sub_response = register_ok_submission(self, self.token)\n response_data = json.loads(sub_response.data.decode())\n self.assertTrue(response_data['status']=='success')", "def test_post_success_response(self):\n sender, recipient = UserFactory(), UserFactory()\n\n data = {\n 'senderId': sender.id,\n 'recipientId': recipient.id,\n 'text': 'Hello World!',\n }\n\n response = self.client.post(\n reverse('messages:list'),\n content_type='application/json',\n data=data,\n )\n actual_data = json.loads(response.content)\n\n self.assertEqual(201, response.status_code)\n self.assertEqual(data['senderId'], actual_data['sender']['id'])\n self.assertEqual(data['recipientId'], actual_data['recipient']['id'])\n self.assertEqual(data['text'], actual_data['text'])", "def test_create_and_retrieve_notification(client):\n create_user_response = create_user(client, TEST_USER_NAME, TEST_USER_PASS)\n assert create_user_response.status_code == HttpStatus.created_201.value\n\n new_notification_message = 'Welcome to the eSports Competition'\n new_notification_category = 'Information'\n post_response = create_notification(\n client,\n new_notification_message,\n 15,\n new_notification_category)\n assert post_response.status_code == HttpStatus.created_201.value\n assert Notification.query.count() == 1\n\n # The notification should have created a new notification category as well\n assert NotificationCategory.query.count() == 1\n\n post_response_data = json.loads(post_response.get_data(as_text=True))\n assert post_response_data['message'] == new_notification_message\n\n new_notification_url = post_response_data['url']\n get_response = client.get(\n new_notification_url,\n headers=get_authentication_headers(TEST_USER_NAME, TEST_USER_PASS))\n assert get_response.status_code == HttpStatus.ok_200.value\n\n get_response_data = json.loads(get_response.get_data(as_text=True))\n assert get_response_data['message'] == new_notification_message\n assert get_response_data['notification_category']['name'] == \\\n new_notification_category", "def post(self):\n try:\n data = request.get_json()\n notification = self.notification_serializer.load(data)\n except ValidationError as err:\n return InvalidRequest(err).to_rest()\n\n # Create a new Hello Message with this model\n try:\n return self.notification_service.create_new_notification(notification).to_rest()\n except BaseApiException as internal_err:\n self.logger.error(f'Notification creation failed with the error: {internal_err}')\n return internal_err.to_rest()\n except Exception as general_err:\n self.logger.error(f'Notification creation failed with the error: {general_err}')\n return BaseApiException.from_exception(general_err).to_rest()", "def test_post_internal_attendance_success(self):\n building_name = 'Building 1'\n room_name = 'Room 1'\n\n building = Building.objects.create(\n name=building_name\n )\n room = Room.objects.create(\n name=room_name,\n building=building\n )\n post_data = {'room': room.id,\n 'entry_datetime': '2021-08-04T10:00',\n 'attendee_email': '[email protected]',\n 'exit_datetime': '2021-08-04T11:30'}\n response = self.client.post(\n reverse('tracking:internal-register'),\n post_data\n )\n\n self.assertEqual(response.status_code, HTTPStatus.OK)\n messages = list(response.context['messages'])\n success_message = str(messages[0])\n self.assertEqual(success_message, 'Registro realizado exitosamente')", "def test_registered_with_notification_and_pin(self):\n now = datetime.datetime.now()\n self.contact.pin = '1234'\n self.contact.save()\n notification = reminders.Notification.objects.create(num_days=1,\n time_of_day=now)\n reminders.SentNotification.objects.create(notification=notification,\n recipient=self.contact,\n status='sent',\n message='abc',\n appt_date=now,\n date_to_send=now)\n msg = self._send(self.reg_conn, '1234')\n self.assertEqual(len(msg.responses), 1)\n self.assertEqual(msg.responses[0].text,\n self.app.thank_you)\n sent_notif = reminders.SentNotification.objects.all()\n self.assertEqual(sent_notif.count(), 1)\n self.assertEqual(sent_notif[0].status, 'confirmed')", "def test_registered_with_notification_and_pin(self):\n now = datetime.datetime.now()\n self.contact.pin = '1234'\n self.contact.save()\n notification = reminders.Notification.objects.create(num_days=1,\n time_of_day=now)\n reminders.SentNotification.objects.create(notification=notification,\n recipient=self.contact,\n status='sent',\n message='abc',\n appt_date=now,\n date_to_send=now,\n date_queued=now)\n msg = self._send(self.reg_conn, '1234')\n self.assertEqual(len(msg.responses), 1)\n self.assertEqual(msg.responses[0].text,\n self.app.thank_you)\n sent_notif = reminders.SentNotification.objects.all()\n self.assertEqual(sent_notif.count(), 1)\n self.assertEqual(sent_notif[0].status, 'confirmed')", "def test_create_success(self, mock_post):\n self.policies.create(\n name=self.policy_single_response['policy']['name'],\n incident_preference=self.policy_single_response['policy']['incident_preference']\n )\n\n mock_post.assert_called_once_with(\n url='https://api.newrelic.com/v2/alerts_policies.json',\n headers=self.policies.headers,\n data=json.dumps({\n \"policy\": {\n \"name\": self.policy_single_response['policy']['name'],\n \"incident_preference\": self.policy_single_response['policy']['incident_preference']\n }\n })\n )", "def test_create_duplicated_notification(client):\n create_user_response = create_user(client, TEST_USER_NAME, TEST_USER_PASS)\n assert create_user_response.status_code == HttpStatus.created_201.value\n\n new_notification_message = 'Welcome to the 4th eSports Competition'\n new_notification_category = 'Information'\n post_response = create_notification(\n client,\n new_notification_message,\n 25,\n new_notification_category)\n assert post_response.status_code == HttpStatus.created_201.value\n assert Notification.query.count() == 1\n\n post_response_data = json.loads(post_response.get_data(as_text=True))\n assert post_response_data['message'] == new_notification_message\n\n new_notification_url = post_response_data['url']\n get_response = client.get(\n new_notification_url,\n headers=get_authentication_headers(TEST_USER_NAME, TEST_USER_PASS))\n assert get_response.status_code == HttpStatus.ok_200.value\n\n get_response_data = json.loads(get_response.get_data(as_text=True))\n assert get_response_data['message'] == new_notification_message\n assert get_response_data['notification_category']['name'] == \\\n new_notification_category\n\n second_post_response = create_notification(\n client,\n new_notification_message,\n 15,\n new_notification_category)\n assert second_post_response.status_code == HttpStatus.bad_request_400.value\n assert Notification.query.count() == 1", "def test_create_multiple(self):\n contact = Contact.objects.first()\n sales_cycle = contact.sales_cycles.first()\n valid_data = [{'sales_cycle_id':sales_cycle.id, 'description':'test message', 'contact_id': contact.id}]\n url, parsed = self.prepare_urls('v1:activity-create-multiple', subdomain=self.company.subdomain)\n \n response = self.client.post(url, valid_data, HTTP_HOST=parsed.netloc, format='json')\n self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)\n\n self.authenticate_user()\n response = self.client.post(url, valid_data, HTTP_HOST=parsed.netloc, format='json')\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n\n content = json.loads(response.content)\n self.assertTrue(content.has_key('notification'))", "def test_create(self):\n self.client.force_authenticate(user=self.admin)\n\n data = {\n 'retreat': reverse(\n 'retreat:retreat-detail', args=[self.retreat.id]\n ),\n 'user': reverse('user-detail', args=[self.user2.id]),\n }\n\n response = self.client.post(\n reverse(\n 'retreat:waitqueuenotification-list',\n ),\n data,\n format='json',\n )\n\n self.assertEqual(\n response.status_code,\n status.HTTP_405_METHOD_NOT_ALLOWED\n )", "def test_create_successful(self):\n self.webkom.add_user(self.disallowed_user)\n request = self.factory.post(\"/permissiontest/\", self.test_update_object)\n force_authenticate(request, self.disallowed_user)\n view = TestViewSet.as_view({\"post\": \"create\"})\n\n response = view(request)\n created = response.data\n\n self.assertEqual(response.status_code, status.HTTP_201_CREATED)\n self.assertEqual(created[\"name\"], self.test_update_object[\"name\"])", "def test_success_on_post(self, mock_create, mock_msg_mgr):\n\n url = '/%s/jobs/' % self.api\n\n User.objects.create_superuser(username='test', email='[email protected]', password='password')\n\n self.client.login(username='test', password='password',)\n response = self.client.post(url, data=self.json_data, format='json')\n self.assertEqual(response.status_code, status.HTTP_201_CREATED, response.content)\n\n result = json.loads(response.content)\n\n # Response should be new v6 job detail response\n self.assertEqual(result['execution'], None)\n self.assertTrue('/%s/jobs/' % self.api in response['location'])", "def test_notification_CreateProjectAndUser(self):\n setup_identity_cache()\n\n url = \"/v1/actions/CreateProjectAndUser\"\n data = {\"project_name\": \"test_project\", \"email\": \"[email protected]\"}\n response = self.client.post(url, data, format=\"json\")\n self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)\n\n new_task = Task.objects.all()[0]\n\n headers = {\n \"project_name\": \"test_project\",\n \"project_id\": \"test_project_id\",\n \"roles\": \"admin,member\",\n \"username\": \"[email protected]\",\n \"user_id\": \"test_user_id\",\n \"authenticated\": True,\n }\n\n url = \"/v1/notifications\"\n response = self.client.get(url, headers=headers)\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertEqual(response.json()[\"notifications\"][0][\"task\"], new_task.uuid)", "def test_api_can_create_a_post(self):\n self.assertEqual(self.response.status_code, status.HTTP_201_CREATED)", "def test_post_external_attendance_successfull(self):\n building_name = 'Building 1'\n room_name = 'Room 1'\n\n building = Building.objects.create(\n name=building_name\n )\n room = Room.objects.create(\n name=room_name,\n building=building\n )\n post_data = {'room': room.id,\n 'entry_datetime': '2021-08-03T19:00',\n 'attendee_email': '[email protected]',\n 'exit_datetime': '2021-08-03T20:00'}\n response = self.client.post(\n reverse('tracking:external-register'),\n post_data\n )\n\n self.assertEqual(response.status_code, HTTPStatus.OK)\n messages = list(response.context['messages'])\n success_message = str(messages[0])\n self.assertEqual(success_message, 'Registro realizado exitosamente')", "def test_post_success_creates_message(self):\n sender, recipient = UserFactory(), UserFactory()\n\n data = {\n 'senderId': sender.id,\n 'recipientId': recipient.id,\n 'text': 'Hello World!',\n }\n\n response = self.client.post(\n reverse('messages:list'),\n content_type='application/json',\n data=data,\n )\n actual_message = Message.objects.get()\n self.assertEqual(sender.id, actual_message.sender.id)\n self.assertEqual(recipient.id, actual_message.recipient.id)\n self.assertEqual(data['text'], actual_message.text)", "def test_post_creation_success(self):\n url = reverse('post-list', args=[self.topic1.url_name])\n payload = {\n 'author': self.user1.id,\n 'title': 'Creating a post',\n 'content': 'Rich content 4',\n }\n self.client.credentials(\n HTTP_AUTHORIZATION = 'Token ' + self.user1.auth_token.key\n )\n response = self.client.post(url, payload)\n\n self.assertEqual(response.status_code, status.HTTP_201_CREATED)\n new_post = Post.objects.filter(\n author=self.user1,\n title=payload.get('title'),\n content=payload.get('content'),\n topic=self.topic1\n )\n self.assertTrue(new_post.exists())", "def post(self):\n\n\t\treturn MessageStore.create(api.payload), 201", "def test_payload_patient_creation(self):\n self._authorize()\n data = {\n 'Subject_Number': '000-1111',\n 'Pin_Code': '1234',\n 'Date_Enrolled': datetime.datetime.now().strftime('%b %d %Y '),\n 'Mobile_Number': '12223334444',\n }\n patient = self.create_xml_patient(data)\n payload = self.create_xml_payload([patient])\n response = self._post(payload)\n self.assertEqual(response.status_code, 200)\n patients = reminders.Patient.objects.all()\n self.assertEqual(patients.count(), 1)", "def test_put_success(self):\n\n data = {\n 'time': '23:58:53'\n }\n\n url = reverse('notification', kwargs={'way_id': self.notification.way_id, 'notification_id': self.notification.id})\n response = self.client.put(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n self.assertEqual(response.status_code, 200)", "def test_topic_notification_create_has_access(self):\n TopicNotification.objects.all().delete()\n private = utils.create_private_topic(user=self.user)\n utils.create_comment(topic=private.topic)\n\n utils.login(self)\n form_data = {'is_active': True, }\n response = self.client.post(\n reverse(\n 'spirit:topic:notification:create',\n kwargs={'topic_id': private.topic.pk, }),\n form_data)\n self.assertRedirects(\n response, private.topic.get_absolute_url(), status_code=302)\n self.assertEqual(len(TopicNotification.objects.all()), 1)", "def test_post_empty_json(self):\n\n data = {}\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': self.notification.id})\n response = self.client.post(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n self.assertEqual(response.status_code, 400)" ]
[ "0.7629735", "0.7618578", "0.7364911", "0.7363563", "0.7363563", "0.7052963", "0.7042657", "0.69984055", "0.6988693", "0.6985212", "0.6984905", "0.68905956", "0.6883018", "0.68676347", "0.6836012", "0.6821067", "0.67853075", "0.6779246", "0.6734471", "0.6721667", "0.66970336", "0.66952074", "0.6679394", "0.6679207", "0.6676625", "0.6672355", "0.66686857", "0.66489226", "0.66428095", "0.663915" ]
0.7656309
0
Method that tests unsuccessful post request for creating notification with invalid post data.
def test_post_invalid_data(self): data = { 'week_day': 'd', 'time': 'd' } url = reverse('notification', kwargs={'way_id': self.notification.way_id, 'notification_id': self.notification.id}) response = self.client.post(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json') self.assertEqual(response.status_code, 400)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_create_invalid_submission(self):\n with self.client:\n # invalid submission registration\n sub_response = register_illegal_submission(self, self.token)\n response_data = json.loads(sub_response.data.decode())\n self.assertTrue(response_data['errors']!=None)", "def test_db_creating_post(self):\n data = {\n 'start_time': '2019-10-29',\n 'end_time': '2019-12-29',\n 'week_day': 6,\n 'time': '23:58:59'\n }\n\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': self.notification.id})\n\n with mock.patch('notification.views.Notification.create') as notification_create:\n notification_create.return_value = False\n response = self.client.post(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n\n self.assertEqual(response.status_code, 400)", "def post_notification():\n try:\n data = json.loads(request.data)\n notification = Notification(**data).save()\n return jsonify({'data': notification}), 201\n except BaseException as e:\n print(e)\n return e, 400", "def test_post_invalid(self):\n sender = UserFactory()\n data = {\n 'senderId': sender.id,\n 'recipientId': 999,\n 'text': '...'\n }\n\n response = self.client.post(\n reverse('messages:list'),\n content_type='application/json',\n data=data,\n )\n self.assertEqual(400, response.status_code)", "def test_post_empty_json(self):\n\n data = {}\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': self.notification.id})\n response = self.client.post(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n self.assertEqual(response.status_code, 400)", "def testMissingData(self):\n data = {\n \"title\": \"Example Post\",\n }\n\n response = self.client.post(\"/api/posts\",\n data=json.dumps(data),\n content_type=\"application/json\",\n headers=[(\"Accept\", \"application/json\")]\n )\n\n self.assertEqual(response.status_code, 422)\n data = json.loads(response.data)\n self.assertEqual(data[\"message\"], \"'rent' is a required property\")", "def test_not_created_with_invalid(self):\n payload = {'name': ''}\n res = self.client.post(TAGS_URL, payload)\n\n self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)", "def test_post_invalid(self):\n self.post_data['name'] = ''\n response = self._post()\n self.assertEquals(response.status_code, 200)\n self.assertTemplateUsed(response, self.template_name)\n self.assertTrue('object' in response.context)\n self.assertEquals(response.context['object'], self.obj)\n self.assertTrue('form' in response.context)\n self.assertTrue(response.context['form'].is_bound)\n self.assertFalse(response.context['form'].is_valid())\n self.assertEquals(response.context['form'].instance, self.obj)\n self._assert_no_change()", "def test_post_invalid(self):\n self.post_data['name'] = ''\n response = self._post()\n self.assertEquals(self.model.objects.count(), 0)\n self.assertEquals(response.status_code, 200)\n self.assertTemplateUsed(response, self.template_name)\n self.assertTrue('form' in response.context)\n self.assertTrue(response.context['form'].is_bound)\n self.assertFalse(response.context['form'].is_valid())", "def test_post_wrong_way_id(self):\n data = {\n 'start_time': '2019-10-29',\n 'end_time': '2019-12-29',\n 'week_day': 6,\n 'time': '23:58:59'\n }\n url = reverse('notification', kwargs={'way_id': 908, 'notification_id': self.notification.id})\n response = self.client.post(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n self.assertEqual(response.status_code, 400)", "def test_post_empty_data(self):\n response = self.app.post('/_ah/push-handlers/receive_message')\n self.assertEqual(response.status_int, 200)\n self.assertEqual(response.body, \"No request body received\")\n self.assertRaises(ValueError)", "def test_post_internal_attendance_fail_invalid_email(self):\n building_name = 'Building 1'\n room_name = 'Room 1'\n\n building = Building.objects.create(\n name=building_name\n )\n room = Room.objects.create(\n name=room_name,\n building=building\n )\n\n post_data = {'room': room.id,\n 'entry_datetime': '2021-08-04T10:00',\n 'attendee_email': '[email protected]',\n 'exit_datetime': '2021-08-04T11:30'}\n response = self.client.post(\n reverse('tracking:internal-register'),\n post_data\n )\n\n self.assertEqual(response.status_code, HTTPStatus.OK)\n messages = list(response.context['messages'])\n error_message = str(messages[0])\n invalid_email_error_msg = ('El correo electrónico no es válido. Si no cuentas con un correo de la Anáhuac, '\n 'usa el registro para invitados')\n self.assertEqual(error_message, invalid_email_error_msg)", "def testInvalidData(self):\n data = {\n \"title\": 32,\n \"rent\": 700\n }\n\n response = self.client.post(\"/api/posts\",\n data=json.dumps(data),\n content_type=\"application/json\",\n headers=[(\"Accept\", \"application/json\")]\n )\n\n self.assertEqual(response.status_code, 422)\n\n data = json.loads(response.data)\n\n self.assertEqual(data[\"message\"], \"32 is not of type 'string'\")", "def test_new_create_resgate_failed(self):\n payload = {\n 'value': '',\n 'user': self.user\n }\n\n response = self.client.post(RESGATE_URL, payload)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)", "def test_uptimerobot_invalid_payload_with_missing_data(self) -> None:\n self.url = self.build_webhook_url()\n payload = self.get_body(\"uptimerobot_invalid_payload_with_missing_data\")\n result = self.client_post(self.url, payload, content_type=\"application/json\")\n self.assert_json_error(result, \"Invalid payload\")\n\n expected_message = MISCONFIGURED_PAYLOAD_ERROR_MESSAGE.format(\n bot_name=self.test_user.full_name,\n support_email=FromAddress.SUPPORT,\n ).strip()\n\n msg = self.get_last_message()\n self.assertEqual(msg.content, expected_message)\n self.assertEqual(msg.recipient.type, Recipient.PERSONAL)", "def test_invalid_registration(self):\n self.response = self.client.post(\n \"/api/users/\",\n self.invalid_reg_data,\n format=\"json\")\n self.assertEqual(self.response.status_code,\n status.HTTP_400_BAD_REQUEST)", "def test_validate_post(client):\n response = client.post(\n '/user/',\n data=json.dumps({\n 'name': 'Jeff Knupp',\n 'email': '[email protected]',\n }),\n headers={'Content-Type': 'application/json'}\n )\n assert response.status_code == 400\n assert response.json['message'] == INVALID_ACTION_MESSAGE", "def test_validate_post_existing_resource(client):\n response = client.post(\n '/user/',\n data=json.dumps({\n 'name': 'Jeff Knupp',\n 'email': '[email protected]',\n }),\n headers={'Content-Type': 'application/json'}\n )\n assert response.status_code == 400\n assert response.json['message'] == INVALID_ACTION_MESSAGE", "def test_post_non_owner(self):\n another_user = CustomUser.objects.create(id=1067, email='[email protected]', is_active=True)\n another_user.set_password('testpassword')\n another_user.save()\n\n self.client.login(email='[email protected]', password='testpassword')\n\n data = {\n 'start_time': '2019-10-29',\n 'end_time': '2019-12-29',\n 'week_day': 6,\n 'time': '23:58:59'\n }\n\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': self.notification.id})\n response = self.client.post(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n self.assertEqual(response.status_code, 403)", "def test_invalid_patient_field(self):\n self._authorize()\n patient = self.create_xml_patient({'Mobile_Number': 'invalid'})\n payload = self.create_xml_payload([patient])\n response = self._post(payload)\n self.assertEqual(response.status_code, 500)", "def test_incorrect_token_post(self): \n request = self.build_request(token=\"incorrect_token\")\n response = self.app.post('/_ah/push-handlers/receive_message',json.dumps(request).encode('utf-8'),content_type=\"application/json\")\n self.assertEqual(response.status_int, 200)\n self.assertRaises(ValueError)", "def test_422_failure_create_new_quetion(self):\n res = self.client().post('/questions')\n\n data = json.loads(res.data)\n\n self.assertEqual(res.status_code, 422)\n self.assertEqual(data['success'], False)\n self.assertEqual(data['error'], 422)\n self.assertEqual(data['message'], \"unprocessable\")", "def test_post_invalid_data_question(self):\n\n response = self.post_question(self.invalid_question)\n\n self.assertEqual(response.status_code, 400)", "def test_post_comment_to_task_chat_using_an_invalid_request_fails(self):\n response = self.client.post(\n self.endpoint_url,\n headers={\"Authorization\": self.test_author_token},\n json={\"message\": TEST_MESSAGE},\n )\n response_body = response.get_json()\n self.assertEqual(response.status_code, 400)\n self.assertEqual(response_body[\"Error\"], \"Unable to add comment\")\n self.assertEqual(response_body[\"SubCode\"], \"InvalidData\")", "def test_registration_view_post_failure(self):\n response = self.client.post(reverse('rdef_web:user_register'),\n data={'username': 'bob',\n 'email1': '[email protected]',\n 'email2': '[email protected]'})\n self.assertEqual(response.status_code, 200)\n self.failIf(response.context['msg'] == 'SUCCESS')\n self.assertEqual(len(mail.outbox), 0)", "def test_add_user_invalid_payload(self):\n with self.client:\n auth_headers = login_test_user(self.client)\n response = self.client.post('/users',\n data = json.dumps(dict()),\n content_type='application/json',\n headers = auth_headers\n )\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 400)\n self.assertIn('Invalid payload', data['message'])\n self.assertIn('fail', data['status'])", "def test_update_issue_with_invalid_request_data_fails(self):\n response = self.client.post(\n self.url,\n headers={\"Authorization\": self.test_user_token},\n json={\"issue_description\": TEST_ISSUE_DESCRIPTION, \"issue_name\": \"\"},\n )\n response_json = response.get_json()\n self.assertEqual(response.status_code, 400)\n self.assertEqual(\n response_json[\"Error\"], \"Unable to create a new mapping issue category\"\n )\n self.assertEqual(response_json[\"SubCode\"], \"InvalidData\")", "def test_post_error_parameters(self):\n data_github = {\"version_control\": \"github\", \"scm_commit\": \"AA\", \"oper\": \"AA\", \"hcnarb\": \"AA\", \"enabled\": \"AA\"}\n data_git = {\"version_control\": \"git\", \"scm_commit\": \"AA\", \"oper\": \"AA\", \"hcnarb\": \"AA\", \"enabled\": \"AA\"}\n\n for data in [data_git, data_github]:\n resp = self.client.post(\"/tracking\", json=data, content_type=\"application/json\", headers=self.auth)\n resp_dict = json.loads(resp.data)\n self.assertIn(\"code\", resp_dict, msg=\"Error in data format return\")\n self.assertEqual(\n ResponseCode.INPUT_PARAMETERS_ERROR, resp_dict.get(\"code\"), msg=\"Error in status code return\"\n )\n\n self.assertIn(\"msg\", resp_dict, msg=\"Error in data format return\")\n self.assertEqual(\n ResponseCode.CODE_MSG_MAP.get(ResponseCode.INPUT_PARAMETERS_ERROR),\n resp_dict.get(\"msg\"),\n msg=\"Error in status code return\"\n )\n\n self.assertIn(\"data\", resp_dict, msg=\"Error in data format return\")\n self.assertEqual(resp_dict.get(\"data\"), None, msg=\"Error in data information return\")", "def test_create_non_effective(self):\n response = self.client.post(\n reverse('contacts'),\n data=json.dumps(self.valid_payload),\n content_type='application/json'\n )\n\n self.assertEqual(response.status_code, status.HTTP_201_CREATED)", "def test_email_sent_on_failure(self):\n self._authorize()\n data = {\n 'Subject_Number': '000-1111',\n 'Pin_Code': '1234',\n 'Date_Enrolled': datetime.datetime.now().strftime('%b %d %Y '),\n 'Mobile_Number': '2223334444',\n }\n patient = self.create_xml_patient(data)\n payload = self.create_xml_payload([patient])\n response = self._post(payload)\n self.assertEqual(response.status_code, 500)\n self.assertEqual(len(mail.outbox), 1)" ]
[ "0.748908", "0.7362392", "0.727116", "0.7229355", "0.7158507", "0.7016817", "0.69638264", "0.695092", "0.69368654", "0.689105", "0.6859958", "0.6832937", "0.68327", "0.68105704", "0.67939985", "0.6764749", "0.6753879", "0.6751137", "0.6747767", "0.67406535", "0.67373455", "0.67344", "0.6733985", "0.6714588", "0.6712248", "0.66855043", "0.66753715", "0.66628575", "0.6647952", "0.66464555" ]
0.77954644
0
Method that tests unsuccessful post request with empty JSON data.
def test_post_empty_json(self): data = {} url = reverse('notification', kwargs={'way_id': self.notification.way_id, 'notification_id': self.notification.id}) response = self.client.post(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json') self.assertEqual(response.status_code, 400)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def testMissingData(self):\n data = {\n \"title\": \"Example Post\",\n }\n\n response = self.client.post(\"/api/posts\",\n data=json.dumps(data),\n content_type=\"application/json\",\n headers=[(\"Accept\", \"application/json\")]\n )\n\n self.assertEqual(response.status_code, 422)\n data = json.loads(response.data)\n self.assertEqual(data[\"message\"], \"'rent' is a required property\")", "def test_case_empty(self):\n data = {\"numbers\": \"\"}\n response = self.client.post(\"/api/hi\", data)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)", "def test_missing_data(self):\n\n response = self.client.post(\n self.reg_url,\n {},\n format=\"json\")\n\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n self.assertIn(b\"required\", response.content)", "def testNotAJson(self):\n data = {'name': 'toto'}\n response = requests.post(url=self.url, data=data)\n headers = response.headers\n\n self.assertEqual(response.status_code, 400, WRONG_STATUS_CODE_MSG)\n self.assertEqual(\n headers['Content-Type'], 'text/html; charset=utf-8',\n WRONG_TYPE_RETURN_MSG)\n self.assertEqual(response.content, b'Not a JSON')", "def test_invalid_JSON_returns_error(self):\n\n response = self.client.post(\n reverse('transcript:record_telegram'),\n content_type='application/json',\n data='''{\"something\":''')\n\n self.assertEqual(response.status_code, 400)\n self.assertEqual(response.content, b\"Could not parse JSON\")\n self.assertEqual(Message.objects.count(), 0)", "def test_post_empty_data(self):\n response = self.app.post('/_ah/push-handlers/receive_message')\n self.assertEqual(response.status_int, 200)\n self.assertEqual(response.body, \"No request body received\")\n self.assertRaises(ValueError)", "def test_missing_body(self):\n self.is_authenticated()\n response = self.post_without_body()\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)", "def test_empty_optionals(self):\n data = self.valid_payload\n data[\"telephone\"] = \"\"\n data[\"cellphone\"] = \"\"\n data[\"activity_description\"] = \"\"\n data[\"about\"] = \"\"\n data[\"institute\"] = \"\"\n response = self.client.post(\n reverse('contacts'),\n data=json.dumps(data),\n content_type='application/json'\n )\n\n self.assertEqual(response.status_code, status.HTTP_201_CREATED)", "def test_empty_optionals(self):\n data = self.valid_payload\n # data[\"telephone\"] = \"\"\n # data[\"cellphone\"] = \"\"\n data[\"activity_description\"] = \"\"\n # data[\"about\"] = \"\"\n response = self.client.post(\n reverse('contacts'),\n data=json.dumps(data),\n content_type='application/json'\n )\n self.assertEqual(response.status_code, status.HTTP_201_CREATED)", "def testNotAJson(self):\n data = {'text': 'toto'}\n response = requests.post(url=self.url, data=data)\n headers = response.headers\n\n self.assertEqual(response.status_code, 400, WRONG_STATUS_CODE_MSG)\n self.assertEqual(\n headers['Content-Type'], 'text/html; charset=utf-8',\n WRONG_TYPE_RETURN_MSG)\n self.assertEqual(response.content, b'Not a JSON')", "def test_storing_missing_data(self):\n data = {\"employer\": \"Trading Ltd\", \"jobTitle\": \"Assistant\", \"jobLocation\": \"5th street\",\n \"fromMonth\": \"january\", \"fromYear\": 2007}\n response = self.client.post(self.url, data=json.dumps(data), content_type='application/json')\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST, msg=response.content.decode())", "def test_no_optionals(self):\n data = self.valid_payload\n # del data[\"telephone\"]\n # del data[\"cellphone\"]\n del data[\"activity_description\"]\n # del data[\"about\"]\n response = self.client.post(\n reverse('contacts'),\n data=json.dumps(data),\n content_type='application/json'\n )\n self.assertEqual(response.status_code, status.HTTP_201_CREATED)", "def test_no_data():\n response = test_app.post(\"/bkt_service/unwind\", expect_errors=True)\n assert response.status == '400 Bad Request'\n assert \"No data\" in response.text", "def test_service_api_post_without_data(service_app):\n response = service_app.post('/predict')\n assert response.headers['Content-Type'] == 'application/json'\n assert response.status_code == 400\n assert json.loads(response.data) == {'error': 'Failed to decode JSON object'}", "def test_no_optionals(self):\n data = self.valid_payload\n del data[\"telephone\"]\n del data[\"cellphone\"]\n del data[\"activity_description\"]\n del data[\"about\"]\n del data[\"institute\"]\n response = self.client.post(\n reverse('contacts'),\n data=json.dumps(data),\n content_type='application/json'\n )\n\n self.assertEqual(response.status_code, status.HTTP_201_CREATED)", "def testInvalidData(self):\n data = {\n \"title\": 32,\n \"rent\": 700\n }\n\n response = self.client.post(\"/api/posts\",\n data=json.dumps(data),\n content_type=\"application/json\",\n headers=[(\"Accept\", \"application/json\")]\n )\n\n self.assertEqual(response.status_code, 422)\n\n data = json.loads(response.data)\n\n self.assertEqual(data[\"message\"], \"32 is not of type 'string'\")", "def test_invalid_json(self):\r\n data = {\"Testing invalid\"}\r\n response = self.client.post(\r\n reverse('verify_student_results_callback'),\r\n data=data,\r\n content_type='application/json',\r\n HTTP_AUTHORIZATION='test BBBBBBBBBBBBBBBBBBBB: testing',\r\n HTTP_DATE='testdate'\r\n )\r\n self.assertIn('Invalid JSON', response.content)\r\n self.assertEqual(response.status_code, 400)", "def test_post_expected_fail_citelet_json(self):\n headers = {'content-type': 'application/json'}\n with open('citelet_invalid_sample_highwire.json') as test_data:\n payload = json.load(test_data)\n\n # retrieve (post) request\n response = requests.post(self.url, data=json.dumps(payload),\n headers=headers)\n\n # assert post request returns a status code 405 (user submission error)\n self.assertEqual(response.status_code, 405)", "def test_post_invalid(self):\n self.post_data['name'] = ''\n response = self._post()\n self.assertEquals(self.model.objects.count(), 0)\n self.assertEquals(response.status_code, 200)\n self.assertTemplateUsed(response, self.template_name)\n self.assertTrue('form' in response.context)\n self.assertTrue(response.context['form'].is_bound)\n self.assertFalse(response.context['form'].is_valid())", "def test_post_invalid(self):\n self.post_data['name'] = ''\n response = self._post()\n self.assertEquals(response.status_code, 200)\n self.assertTemplateUsed(response, self.template_name)\n self.assertTrue('object' in response.context)\n self.assertEquals(response.context['object'], self.obj)\n self.assertTrue('form' in response.context)\n self.assertTrue(response.context['form'].is_bound)\n self.assertFalse(response.context['form'].is_valid())\n self.assertEquals(response.context['form'].instance, self.obj)\n self._assert_no_change()", "def test_invalid_data(self):\n\n json_data = {\n \"input\" : {\n 'version': 'BAD',\n 'files': {'input_a': [self.source_file.id]},\n 'json': {}\n },\n \"job_type_id\" : self.job_type1.pk,\n \"configuration\" : self.configuration\n }\n\n url = '/%s/jobs/' % self.api\n response = self.client.generic('POST', url, json.dumps(json_data), 'application/json')\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST, response.content)", "def test_create_empty_payload(self):\n response = self.client.post('/exercises/', data={})\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)", "def test_not_json(self) -> None:\n issuer = unpaid_redemption()\n treq = treq_for_loopback_ristretto(issuer)\n d = treq.post(\n NOWHERE.child(\"v1\", \"redeem\").to_text().encode(\"ascii\"),\n b\"foo\",\n headers=Headers({\"content-type\": [\"application/json\"]}),\n )\n self.assertThat(\n d,\n succeeded(\n AfterPreprocessing(\n lambda response: response.code,\n Equals(BAD_REQUEST),\n ),\n ),\n )", "def test_post_no_content(self):\n response = self.post(title='foo')\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)", "def test_no_nationality(self):\n data = self.valid_payload\n data[\"nationality\"] = \"\"\n response1 = self.client.post(\n reverse('contacts'),\n data=json.dumps(data),\n content_type='application/json'\n )\n del data[\"nationality\"]\n response = self.client.post(\n reverse('contacts'),\n data=json.dumps(data),\n content_type='application/json'\n )\n self.assertEqual(response1.status_code, status.HTTP_400_BAD_REQUEST)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)", "def test_post_missing_body(self):\n self._login_as_staff()\n response = self.client.post(self.path())\n assert response.status_code == 400", "def test_put_no_data(self):\n test_data = {}\n response = self.client.put(self.url, json.dumps(test_data), content_type='application/json')\n self.assertEquals(response.status_code, 400)", "def test_no_profession(self):\n data = self.valid_payload\n data[\"address\"] = \"\"\n response1 = self.client.post(\n reverse('contacts'),\n data=json.dumps(data),\n content_type='application/json'\n )\n del data[\"profession\"]\n response = self.client.post(\n reverse('contacts'),\n data=json.dumps(data),\n content_type='application/json'\n )\n self.assertEqual(response1.status_code, status.HTTP_400_BAD_REQUEST)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)", "def test_no_firstname(self):\n data = self.valid_payload\n data[\"first_name\"] = ''\n response1 = self.client.post(\n reverse('contacts'),\n data=json.dumps(data),\n content_type='application/json'\n )\n del data[\"first_name\"]\n response = self.client.post(\n reverse('contacts'),\n data=json.dumps(data),\n content_type='application/json'\n )\n self.assertEqual(response1.status_code, status.HTTP_400_BAD_REQUEST)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)", "def test_no_objection(self):\n data = self.valid_payload\n data[\"objection\"] = \"\"\n response1 = self.client.post(\n reverse('contacts'),\n data=json.dumps(data),\n content_type='application/json'\n )\n del data[\"objection\"]\n response = self.client.post(\n reverse('contacts'),\n data=json.dumps(data),\n content_type='application/json'\n )\n self.assertEqual(response1.status_code, status.HTTP_400_BAD_REQUEST)\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)" ]
[ "0.80441666", "0.7749052", "0.77270865", "0.7574347", "0.7527644", "0.7512489", "0.7511428", "0.7503339", "0.750238", "0.7471226", "0.73760986", "0.73705995", "0.7363015", "0.73613787", "0.7355544", "0.73533607", "0.73203945", "0.72921515", "0.7282576", "0.72331876", "0.7231262", "0.72295904", "0.7197208", "0.7143605", "0.713983", "0.713589", "0.7085105", "0.70586145", "0.7043742", "0.70110965" ]
0.7766148
1
Provide tests post request for creating notification with another `way_id`.
def test_post_wrong_way_id(self): data = { 'start_time': '2019-10-29', 'end_time': '2019-12-29', 'week_day': 6, 'time': '23:58:59' } url = reverse('notification', kwargs={'way_id': 908, 'notification_id': self.notification.id}) response = self.client.post(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json') self.assertEqual(response.status_code, 400)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_post_success(self):\n\n data = {\n 'start_time': '2019-10-29',\n 'end_time': '2019-12-29',\n 'week_day': 6,\n 'time': '23:58:59'\n }\n\n expected_data = {\n 'way': 100,\n 'start_time': '2019-10-29',\n 'end_time': '2019-12-29',\n 'week_day': 6,\n 'time': '23:58:59'\n }\n\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': self.notification.id})\n response = self.client.post(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n response_dict = json.loads(response.content)\n response_dict.pop('id')\n self.assertEqual(response.status_code, 201)\n self.assertDictEqual(response_dict, expected_data)", "def test_db_creating_post(self):\n data = {\n 'start_time': '2019-10-29',\n 'end_time': '2019-12-29',\n 'week_day': 6,\n 'time': '23:58:59'\n }\n\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': self.notification.id})\n\n with mock.patch('notification.views.Notification.create') as notification_create:\n notification_create.return_value = False\n response = self.client.post(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n\n self.assertEqual(response.status_code, 400)", "def test_put_wrong_way_id(self):\n data = {\n 'start_time': '2019-10-29',\n 'end_time': '2019-12-29',\n 'week_day': 6,\n 'time': '23:58:59'\n }\n url = reverse('notification', kwargs={'way_id': 543, 'notification_id': self.notification.id})\n response = self.client.put(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n self.assertEqual(response.status_code, 400)", "def test_put_non_id(self):\n\n data = {\n 'time': '23:38:54'\n }\n\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id})\n response = self.client.put(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n self.assertEqual(response.status_code, 400)", "def test_put_from_another_way(self):\n data = {\n 'start_time': '2019-10-29',\n 'end_time': '2019-12-29',\n 'week_day': 6,\n 'time': '23:58:59'\n }\n url = reverse('notification', kwargs={'way_id': 101, 'notification_id': self.notification.id})\n response = self.client.put(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n self.assertEqual(response.status_code, 403)", "def test_db_creating_put(self):\n data = {\n 'start_time': '2019-10-29',\n 'end_time': '2019-12-29',\n 'week_day': 6,\n 'time': '23:58:59'\n }\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': self.notification.id})\n\n with mock.patch('notification.models.Notification.update') as notification_update:\n notification_update.return_value = False\n\n response = self.client.put(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n self.assertEqual(response.status_code, 400)", "def test_post_empty_json(self):\n\n data = {}\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': self.notification.id})\n response = self.client.post(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n self.assertEqual(response.status_code, 400)", "def test_put_wrong_notification_id(self):\n\n data = {\n 'time': '23:38:54'\n }\n\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': 6778})\n response = self.client.put(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n self.assertEqual(response.status_code, 400)", "def test_post_internal_attendance_success(self):\n building_name = 'Building 1'\n room_name = 'Room 1'\n\n building = Building.objects.create(\n name=building_name\n )\n room = Room.objects.create(\n name=room_name,\n building=building\n )\n post_data = {'room': room.id,\n 'entry_datetime': '2021-08-04T10:00',\n 'attendee_email': '[email protected]',\n 'exit_datetime': '2021-08-04T11:30'}\n response = self.client.post(\n reverse('tracking:internal-register'),\n post_data\n )\n\n self.assertEqual(response.status_code, HTTPStatus.OK)\n messages = list(response.context['messages'])\n success_message = str(messages[0])\n self.assertEqual(success_message, 'Registro realizado exitosamente')", "def test_put_success(self):\n\n data = {\n 'time': '23:58:53'\n }\n\n url = reverse('notification', kwargs={'way_id': self.notification.way_id, 'notification_id': self.notification.id})\n response = self.client.put(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n self.assertEqual(response.status_code, 200)", "def test_post_invalid_data(self):\n data = {\n 'week_day': 'd',\n 'time': 'd'\n }\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': self.notification.id})\n response = self.client.post(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n self.assertEqual(response.status_code, 400)", "def test_post_external_attendance_successfull(self):\n building_name = 'Building 1'\n room_name = 'Room 1'\n\n building = Building.objects.create(\n name=building_name\n )\n room = Room.objects.create(\n name=room_name,\n building=building\n )\n post_data = {'room': room.id,\n 'entry_datetime': '2021-08-03T19:00',\n 'attendee_email': '[email protected]',\n 'exit_datetime': '2021-08-03T20:00'}\n response = self.client.post(\n reverse('tracking:external-register'),\n post_data\n )\n\n self.assertEqual(response.status_code, HTTPStatus.OK)\n messages = list(response.context['messages'])\n success_message = str(messages[0])\n self.assertEqual(success_message, 'Registro realizado exitosamente')", "def test_post_non_owner(self):\n another_user = CustomUser.objects.create(id=1067, email='[email protected]', is_active=True)\n another_user.set_password('testpassword')\n another_user.save()\n\n self.client.login(email='[email protected]', password='testpassword')\n\n data = {\n 'start_time': '2019-10-29',\n 'end_time': '2019-12-29',\n 'week_day': 6,\n 'time': '23:58:59'\n }\n\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': self.notification.id})\n response = self.client.post(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n self.assertEqual(response.status_code, 403)", "def test_registered_with_notification_and_pin(self):\n now = datetime.datetime.now()\n self.contact.pin = '1234'\n self.contact.save()\n notification = reminders.Notification.objects.create(num_days=1,\n time_of_day=now)\n reminders.SentNotification.objects.create(notification=notification,\n recipient=self.contact,\n status='sent',\n message='abc',\n appt_date=now,\n date_to_send=now)\n msg = self._send(self.reg_conn, '1234')\n self.assertEqual(len(msg.responses), 1)\n self.assertEqual(msg.responses[0].text,\n self.app.thank_you)\n sent_notif = reminders.SentNotification.objects.all()\n self.assertEqual(sent_notif.count(), 1)\n self.assertEqual(sent_notif[0].status, 'confirmed')", "def test_registered_with_notification_and_pin(self):\n now = datetime.datetime.now()\n self.contact.pin = '1234'\n self.contact.save()\n notification = reminders.Notification.objects.create(num_days=1,\n time_of_day=now)\n reminders.SentNotification.objects.create(notification=notification,\n recipient=self.contact,\n status='sent',\n message='abc',\n appt_date=now,\n date_to_send=now,\n date_queued=now)\n msg = self._send(self.reg_conn, '1234')\n self.assertEqual(len(msg.responses), 1)\n self.assertEqual(msg.responses[0].text,\n self.app.thank_you)\n sent_notif = reminders.SentNotification.objects.all()\n self.assertEqual(sent_notif.count(), 1)\n self.assertEqual(sent_notif[0].status, 'confirmed')", "def setUp(self):\n signals.post_save.disconnect(create_notification_task, sender=Notification)\n signals.post_delete.disconnect(revoke_notification_task, sender=Notification)\n\n user = CustomUser.objects.create(id=100, email='[email protected]', is_active=True)\n user.set_password('testpassword')\n user.save()\n\n self.client = Client()\n self.client.login(email='[email protected]', password='testpassword')\n\n way_first = Way.objects.create(id=100, user=user)\n way_second = Way.objects.create(id=101, user=user)\n\n Notification.objects.create(\n id=100,\n way=way_first,\n start_time=datetime.date(2019, 10, 29),\n end_time=datetime.date(2019, 12, 29),\n week_day=6,\n time=datetime.time(23, 58, 59)\n )\n\n Notification.objects.create(\n id=101,\n way=way_first,\n start_time=datetime.date(2019, 11, 27),\n end_time=datetime.date(2020, 12, 27),\n week_day=1,\n time=datetime.time(1, 12, 38)\n )\n\n Notification.objects.create(\n id=102,\n way=way_second,\n start_time=datetime.date(2019, 3, 11),\n end_time=datetime.date(2019, 7, 31),\n week_day=2,\n time=datetime.time(11, 28, 25)\n )\n\n self.notification = Notification.objects.get(id=100)\n self.client = Client()\n self.client.login(email='[email protected]', password='testpassword')", "def post_notification():\n try:\n data = json.loads(request.data)\n notification = Notification(**data).save()\n return jsonify({'data': notification}), 201\n except BaseException as e:\n print(e)\n return e, 400", "def test_create(self):\n self.client.force_authenticate(user=self.admin)\n\n data = {\n 'retreat': reverse(\n 'retreat:retreat-detail', args=[self.retreat.id]\n ),\n 'user': reverse('user-detail', args=[self.user2.id]),\n }\n\n response = self.client.post(\n reverse(\n 'retreat:waitqueuenotification-list',\n ),\n data,\n format='json',\n )\n\n self.assertEqual(\n response.status_code,\n status.HTTP_405_METHOD_NOT_ALLOWED\n )", "def test_post_request_for_team(self):\n\n usual_user = UserFactory(\n username='Usual User',\n email='[email protected]',\n )\n token = Token.objects.get(user=usual_user)\n self.client.credentials(\n HTTP_AUTHORIZATION=f'Token {token.key}')\n data = {'team': self.team.id}\n response = self.client.post(reverse('api:user-team-requests-list'), data=data)\n self.assertEqual(response.status_code, status.HTTP_201_CREATED)\n notification = UserNotification.objects.last()\n notification_message = UserNotification.get_notification_text(\n UserNotification.TEAM_REQUEST_WAS_SENT_WITH_DEACTIVATED_EMAIL, username=usual_user.username\n )\n self.assertEqual(notification.message, notification_message)", "def test_create_notification(self):\n\n start_count = reminders.Notification.objects.count()\n url = reverse('create-notification')\n data = self.get_valid_data()\n response = self.client.post(url, data)\n self.assertRedirects(response, self.dashboard_url)\n end_count = reminders.Notification.objects.count()\n self.assertEqual(end_count, start_count + 1)", "def test_create_notification(self):\n\n start_count = reminders.Notification.objects.count()\n url = reverse('create-notification')\n data = self.get_valid_data()\n response = self.client.post(url, data)\n self.assertRedirects(response, self.dashboard_url)\n end_count = reminders.Notification.objects.count()\n self.assertEqual(end_count, start_count + 1)", "def test_delete_wrong_way_id(self):\n\n url = reverse('notification',\n kwargs={'way_id': 38987, 'notification_id': self.notification.id})\n response = self.client.delete(url)\n\n self.assertEqual(response.status_code, 400)", "def test_put_invalid_data(self):\n\n data = {\n 'start_time': '201-10-29'\n }\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': self.notification.id})\n response = self.client.put(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n self.assertEqual(response.status_code, 400)", "def test_perturb_nodefail_post(self):\n Parameters = Parameters2()\n response = self.client.open('/perturb/nodefail',\n method='POST',\n data=json.dumps(Parameters),\n content_type='application/json')\n self.assert200(response, \"Response body is : \" + response.data.decode('utf-8'))", "def test_post__mismatched(self, mock_get_approvers):\n mock_get_approvers.return_value = ['[email protected]']\n params = {'state': Vote.NEEDS_WORK}\n\n self.gate_1.feature_id = 999\n self.gate_1.put() # This gate belongs to some other feature.\n\n testing_config.sign_in('[email protected]', 123567890)\n with test_app.test_request_context(self.request_path, json=params):\n with self.assertRaises(werkzeug.exceptions.BadRequest):\n self.handler.do_post(\n feature_id=self.feature_id, gate_id=self.gate_1_id)", "def test_new_Issue(self, requests_post, get_landowner):\n #requests_post.status_code.return_value = 200\n requests_post.json.return_value = {'features': []}\n get_landowner.return_value = 'TEST landowner'\n cat = Category(name=\"test category\")\n cat.save()\n issue = Issue(description=\"test issue\", position=Point(5, 23), category=cat)\n issue.save()\n self.assertEqual(len(Issue.objects.all()), 1)\n issue = Issue(id=666, description=\"test issue with defined id\", position=Point(5, 23), category=cat)\n issue.save()\n self.assertEqual(issue.id, 666)", "def test_post_internal_attendance_fail_already_registered_inside(self):\n building_name = 'Building 1'\n room_name = 'Room 1'\n\n building = Building.objects.create(\n name=building_name\n )\n room = Room.objects.create(\n name=room_name,\n building=building\n )\n\n AttendanceRecord.objects.create(\n room=room,\n entry_datetime=make_aware(datetime.strptime('2021-08-03T20:30', '%Y-%m-%dT%H:%M')),\n exit_datetime=make_aware(datetime.strptime('2021-08-03T22:00', '%Y-%m-%dT%H:%M')),\n attendee_email='[email protected]'\n )\n\n post_data = {'room': room.id,\n 'entry_datetime': '2021-08-03T19:00',\n 'attendee_email': '[email protected]',\n 'exit_datetime': '2021-08-03T23:00'}\n response = self.client.post(\n reverse('tracking:internal-register'),\n post_data\n )\n\n self.assertEqual(response.status_code, HTTPStatus.OK)\n messages = list(response.context['messages'])\n error_message = str(messages[0])\n invalid_date_error_msg = ('Ya existe un registro que coincide con ese lapso de tiempo: '\n 'Building 1 - Room 1 - 2021-08-03 20:30 - 2021-08-03 22:00')\n self.assertEqual(error_message, invalid_date_error_msg)", "def test_typical_post(self):\n self.seed_static_data()\n params = {\n 'event_id': 2,\n 'tag_type': 'RESPONSE',\n 'name': {\n 'en': 'English Tag 2 Event 2',\n 'fr': 'French Tag 2 Event 2',\n },\n 'description': {\n 'en': 'English Tag 2 Event 2 Description',\n 'fr': 'French Tag 2 Event 2 Description',\n }\n }\n response = self.app.post(\n '/api/v1/tag', \n headers=self.user2_headers, \n data=json.dumps(params),\n content_type='application/json')\n self.assertEqual(response.status_code, 201)\n data = json.loads(response.data)\n new_id = data['id']\n\n response = self.app.get('/api/v1/tag', headers=self.user2_headers, data={'id': new_id, 'event_id': 2})\n data = json.loads(response.data)\n\n self.assertEqual(data['id'], new_id)\n self.assertEqual(data['event_id'], 2)\n self.assertEqual(data['tag_type'], 'RESPONSE')\n self.assertDictEqual(data['name'], {\n 'en': 'English Tag 2 Event 2',\n 'fr': 'French Tag 2 Event 2'\n })\n self.assertDictEqual(data['description'], {\n 'en': 'English Tag 2 Event 2 Description',\n 'fr': 'French Tag 2 Event 2 Description'\n })", "def test_post(authenticated_client):\n observable_json = {'value': 'asd.com', 'type': 'domain-name'}\n rv = authenticated_client.post('/api/observables/',\n data=json.dumps(observable_json),\n content_type='application/json')\n response = json.loads(rv.data)\n assert isinstance(response['id'], int)", "def test_api_user_resend_confirmation_post(self):\n pass" ]
[ "0.76020175", "0.74337363", "0.7126387", "0.6757347", "0.6724071", "0.6624706", "0.65971375", "0.65209603", "0.64011407", "0.6349355", "0.634253", "0.62249684", "0.618803", "0.6092924", "0.6064552", "0.5966692", "0.5936303", "0.59089214", "0.58871806", "0.58849657", "0.58849657", "0.5840094", "0.582658", "0.5811073", "0.57961255", "0.5779021", "0.5765596", "0.5763352", "0.57590884", "0.57387006" ]
0.7806376
0
Method that test success put request for the updating Notification
def test_put_success(self): data = { 'time': '23:58:53' } url = reverse('notification', kwargs={'way_id': self.notification.way_id, 'notification_id': self.notification.id}) response = self.client.put(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json') self.assertEqual(response.status_code, 200)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_update(self):\n self.client.force_authenticate(user=self.admin)\n\n data = {\n 'retreat': reverse(\n 'retreat:retreat-detail', args=[self.retreat.id]\n ),\n 'user': reverse('user-detail', args=[self.user2.id]),\n }\n\n response = self.client.put(\n reverse(\n 'retreat:waitqueuenotification-detail',\n kwargs={'pk': 1},\n ),\n data,\n format='json',\n )\n\n self.assertEqual(\n response.status_code,\n status.HTTP_405_METHOD_NOT_ALLOWED\n )", "def test_update(self, mock_put):\n self.policies.update(id=333114, policy_update=self.policy_show_response)\n\n mock_put.assert_called_once_with(\n url='https://api.newrelic.com/v2/alert_policies/333114.json',\n headers=self.policies.headers,\n data=json.dumps(self.policy_show_response)\n )", "def test_update_notification(client):\n create_user_response = create_user(client, TEST_USER_NAME, TEST_USER_PASS)\n assert create_user_response.status_code == HttpStatus.created_201.value\n\n new_notification_message_one = 'Fortnite has a new winner'\n new_notification_category_one = 'Information'\n post_response = create_notification(\n client,\n new_notification_message_one,\n 30,\n new_notification_category_one)\n assert post_response.status_code == HttpStatus.created_201.value\n assert Notification.query.count() == 1\n\n post_response_data = json.loads(post_response.get_data(as_text=True))\n new_notification_url = post_response_data['url']\n new_displayed_times = 1\n data = {'displayed_times': new_displayed_times}\n patch_response = client.patch(\n new_notification_url,\n headers=get_authentication_headers(TEST_USER_NAME, TEST_USER_PASS),\n data=json.dumps(data))\n assert patch_response.status_code == HttpStatus.ok_200.value\n\n get_response = client.get(\n new_notification_url,\n headers=get_authentication_headers(TEST_USER_NAME, TEST_USER_PASS))\n assert get_response.status_code == HttpStatus.ok_200.value\n\n get_response_data = json.loads(get_response.get_data(as_text=True))\n assert get_response_data['displayed_times'] == new_displayed_times", "def test_update_success(self, mock_put):\n self.policies.update(\n id=self.policy_single_response['policy']['id'],\n name=self.policy_single_response['policy']['name'],\n incident_preference=self.policy_single_response['policy']['incident_preference']\n )\n\n mock_put.assert_called_once_with(\n url='https://api.newrelic.com/v2/alerts_policies/{0}.json'.format(\n self.policy_single_response['policy']['id']\n ),\n headers=self.policies.headers,\n data=json.dumps({\n \"policy\": {\n \"name\": self.policy_single_response['policy']['name'],\n \"incident_preference\": self.policy_single_response['policy']['incident_preference']\n }\n })\n )", "def test_put_success(self):\n test_data = {\n 'first_name': 'new_first_name',\n 'last_name': 'new_last_name'\n }\n response = self.client.put(self.url, json.dumps(test_data), content_type='application/json')\n self.assertEquals(response.status_code, 200)", "def taco_test_put_update(self):\n body = '{ \"id\": 400, \"name\": \"item4\", \"content\": \"after test update\" }'\n env = self.get_env('PUT', '/item/4', body=body)\n webapi_start(env, lambda status, response_headers: self.assertEqual(status, '204'))", "def test_online_info_put(self):\n body = UpdateOnline()\n response = self.client.open(\n '/online_info',\n method='PUT',\n data=json.dumps(body),\n content_type='application/json')\n self.assert200(response,\n 'Response body is : ' + response.data.decode('utf-8'))", "def test_db_creating_put(self):\n data = {\n 'start_time': '2019-10-29',\n 'end_time': '2019-12-29',\n 'week_day': 6,\n 'time': '23:58:59'\n }\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': self.notification.id})\n\n with mock.patch('notification.models.Notification.update') as notification_update:\n notification_update.return_value = False\n\n response = self.client.put(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n self.assertEqual(response.status_code, 400)", "def test_partial_update(self):\n self.client.force_authenticate(user=self.admin)\n\n data = {\n 'retreat': reverse(\n 'retreat:retreat-detail', args=[self.retreat.id]\n ),\n 'user': reverse('user-detail', args=[self.user2.id]),\n }\n\n response = self.client.put(\n reverse(\n 'retreat:waitqueuenotification-detail',\n kwargs={'pk': 1},\n ),\n data,\n format='json',\n )\n\n self.assertEqual(\n response.status_code,\n status.HTTP_405_METHOD_NOT_ALLOWED\n )", "def test_put_from_another_way(self):\n data = {\n 'start_time': '2019-10-29',\n 'end_time': '2019-12-29',\n 'week_day': 6,\n 'time': '23:58:59'\n }\n url = reverse('notification', kwargs={'way_id': 101, 'notification_id': self.notification.id})\n response = self.client.put(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n self.assertEqual(response.status_code, 403)", "def do_PUT(self):\n note_details = NoteDetails\n if self.path == '/note/api/update':\n response_data=note_details.update_data(self)\n Response(self).jsonResponse(status=200, data=response_data)", "def test_put_non_id(self):\n\n data = {\n 'time': '23:38:54'\n }\n\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id})\n response = self.client.put(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n self.assertEqual(response.status_code, 400)", "def test_update_a_todo(self):\n # hit the API endpoint\n response = self.make_a_request(\n kind=\"put\",\n version=\"v1\",\n id=2,\n data=self.valid_data\n )\n self.assertEqual(response.data, self.valid_data)\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n # test with invalid data\n response = self.make_a_request(\n kind=\"put\",\n version=\"v1\",\n id=3,\n data=self.invalid_data\n )\n self.assertEqual(\n response.data[\"message\"],\n \"TODO item requires state, due_date and text\"\n )\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)", "def test_updateContact(self):\n response = self.client.get(self.url)\n qs = response.json()\n contact = qs[0]\n to_update_value = 'address 2'\n contact['address'] = to_update_value\n response = self.client.put(self.url + str(contact['id']) + '/', contact, content_type=\"application/json\")\n self.assertEqual(response.status_code, 200)\n contact2 = response.json()\n self.assertEqual(contact2['address'], to_update_value)", "def test_update_telegram_id_success(self):\n test_data = {'telegram_id': 100}\n url = reverse('telegram_id')\n response = self.client.put(url, json.dumps(test_data), content_type='application/json')\n self.assertEqual(response.status_code, 200)", "def test_update_task(self):\n rv = TEST_CLIENT.patch(\n \"/tasks/foo\",\n json={\n \"name\": \"foo 2\",\n },\n )\n result = rv.json()\n expected = {\n \"message\": \"The specified task does not exist\",\n \"code\": \"TaskNotFound\",\n }\n self.assertDictEqual(expected, result)\n self.assertEqual(rv.status_code, 404)", "def put(self, request):\r\n new_status = request.body\r\n\r\n if not new_status in [\"success\", \"failure\"]:\r\n return HttpResponseBadRequest()\r\n\r\n else:\r\n # Configure all views to respond with the new status\r\n PaymentFakeView.PAYMENT_STATUS_RESPONSE = new_status\r\n return HttpResponse()", "def test_multiple_updates(self):\n response = self.api.put(self.assessment, {\"test_plan\": \"steps\"})\n self.assert200(response)\n\n response = self.api.put(self.assessment, {\"title\": \"new title\"})\n self.assert200(response)\n\n notifs, notif_data = common.get_daily_notifications()\n updated = notif_data[\"[email protected]\"][\"assessment_updated\"]\n self.assertEqual(len(notifs), 1)\n self.assertEqual(\n updated[self.assessment.id][\"updated_data\"][\"TITLE\"],\n (\"new title\", \"Assessment1\")\n )\n self.assertEqual(\n updated[self.assessment.id][\"updated_data\"][\"ASSESSMENT PROCEDURE\"],\n (\"steps\", \"\")\n )", "def put(self):\n request_data = json.loads(request.data)\n print(request_data)\n order_id = request_data['order_id']\n status = request_data['status']\n MM.update_order_status(ObjectId(order_id), status)\n return {\"message\": \"Order Status Updated\"}, 200", "def test_put_invalid_data(self):\n\n data = {\n 'start_time': '201-10-29'\n }\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': self.notification.id})\n response = self.client.put(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n self.assertEqual(response.status_code, 400)", "def test_put_wrong_notification_id(self):\n\n data = {\n 'time': '23:38:54'\n }\n\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': 6778})\n response = self.client.put(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n self.assertEqual(response.status_code, 400)", "def test_app_can_update_a_list(self):\n self.ne=json.dumps({\"newName\":\"pants\"})\n list_update=self.client.put('/shoppinglists/trou',\n data=self.ne,\n headers={\n 'Content-Type':'application/json',\n 'x-access-token':self.tok})\n self.assertIn(\"list doesnt exist\",str(list_update.data)) \n self.assertEqual(list_update.status_code,200)", "def put(self):\n sample = request.get_json()\n if 'receipt_id' not in sample:\n return CustomError('No receipt_id in payload', status_code=400)\n if 'authorization_status' not in sample:\n return CustomError('No authorization_status in payload', status_code=400)\n\n DBHelper.update_receipt(sample)\n return {'message': 'updated!'}, 200", "def taco_test_put_new(self):\n body = '{ \"id\": 400, \"name\": \"item_new\", \"content\": \"after test update\" }'\n env = self.get_env('PUT', '/item/4', body=body)\n webapi_start(env, lambda status, response_headers: self.assertEqual(status, '204'))", "def test_user_update_request(self):\n pass", "def update():\n return 'update api in put'", "def update(self, request, pk=None): #update a specific object\n return Response({'http_method': 'PUT'})", "def update(self,request,pk = None):\n return Response({'http_method':'PUT'})", "def test_comment_update(self):\n create_response = self.client.post(reverse('posts:comment_create'),\n data={\n 'post': self.post.id,\n 'user': self.user.id,\n 'comment_description':\n 'This is a test_comment'\n }, format='json')\n # assert that the comment is created. via json\n self.assertEqual(create_response.status_code, status.HTTP_201_CREATED)\n # the comment should be updated with a put response.\n update_response = self.client.put(reverse('posts:comment_update',\n kwargs={'pk': '1'}),\n data={\n 'post': self.post.id,\n 'user': self.user.id,\n 'comment_description':\n 'This is a test_comment '\n 'updated'\n }, format='json')\n self.assertEqual(update_response.status_code, status.HTTP_200_OK)\n # assert the updated data in the response\n self.assertEqual(update_response.data, {\n 'post': self.post.id,\n 'user': self.user.id,\n 'comment_description':\n 'This is a test_comment '\n 'updated'\n }, msg='The put request update test Passed!!')\n # the endpoint should also work with th http patch method as well.\n update_patch_response = self.client.patch(\n reverse('posts:comment_update', kwargs={'pk': '1'}),\n data={'comment_description': 'This is a test comment updated0'},\n format='json'\n )\n self.assertEqual(update_patch_response.status_code,\n status.HTTP_200_OK)\n self.assertEqual(update_patch_response.data, {\n 'post': self.post.id,\n 'user': self.user.id,\n 'comment_description':\n 'This is a test comment updated0'\n }, msg='patch method update test passed')", "def put(self, request, pk=None):\n return Response({'method': 'patch'})" ]
[ "0.749496", "0.7354682", "0.7336386", "0.72968656", "0.7277956", "0.726332", "0.7205782", "0.72020173", "0.7130923", "0.7110041", "0.70423526", "0.70081943", "0.69901866", "0.69531584", "0.6885239", "0.68768865", "0.68694884", "0.6864684", "0.68584836", "0.6839003", "0.68288124", "0.68000704", "0.678752", "0.6770968", "0.676781", "0.67530113", "0.6745747", "0.6738763", "0.67217565", "0.6716086" ]
0.8032294
0
Method that tests for request to update non owner Notification instance.
def test_put_non_owner(self): another_user = CustomUser.objects.create(id=1067, email='[email protected]', is_active=True) another_user.set_password('testpassword') another_user.save() self.client.login(email='[email protected]', password='testpassword') data = { 'week_day': 3 } url = reverse('notification', kwargs={'way_id': self.notification.way_id, 'notification_id': self.notification.id}) response = self.client.put(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json') self.assertEqual(response.status_code, 403)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_update_notification(client):\n create_user_response = create_user(client, TEST_USER_NAME, TEST_USER_PASS)\n assert create_user_response.status_code == HttpStatus.created_201.value\n\n new_notification_message_one = 'Fortnite has a new winner'\n new_notification_category_one = 'Information'\n post_response = create_notification(\n client,\n new_notification_message_one,\n 30,\n new_notification_category_one)\n assert post_response.status_code == HttpStatus.created_201.value\n assert Notification.query.count() == 1\n\n post_response_data = json.loads(post_response.get_data(as_text=True))\n new_notification_url = post_response_data['url']\n new_displayed_times = 1\n data = {'displayed_times': new_displayed_times}\n patch_response = client.patch(\n new_notification_url,\n headers=get_authentication_headers(TEST_USER_NAME, TEST_USER_PASS),\n data=json.dumps(data))\n assert patch_response.status_code == HttpStatus.ok_200.value\n\n get_response = client.get(\n new_notification_url,\n headers=get_authentication_headers(TEST_USER_NAME, TEST_USER_PASS))\n assert get_response.status_code == HttpStatus.ok_200.value\n\n get_response_data = json.loads(get_response.get_data(as_text=True))\n assert get_response_data['displayed_times'] == new_displayed_times", "def test_resuableitem_submit_changerequest_public_owner_reject(self):\n original_reusableitem = setup_public_reusable_item_1(self)\n data1 = submit_change_request_1(self, self.user_1)\n\n # user 2 now votes against the change request\n Notification.objects.all().delete() # make sure no other notifications exist\n self.assertEqual(Notification.objects.count(), 0)\n\n self.client.force_authenticate(user=self.user_2)\n\n data2 = {'vote': 'no'}\n response = self.client.patch(get_reusable_item_1_url(self), data2, format='json')\n\n updated_reusableitem = ReusableItem.objects.get(pk=self.reusableitem_1.id)\n\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n\n # the change request should be resolved\n self.assertEqual(updated_reusableitem.change_request, None)\n self.assertEqual(updated_reusableitem.change_request_votes_no.count(), 0)\n self.assertEqual(updated_reusableitem.change_request_votes_yes.count(), 0)\n\n # the editable properties should be unchanged\n self.assertEqual(updated_reusableitem.name, original_reusableitem.name)\n self.assertEqual(updated_reusableitem.definition, original_reusableitem.definition)\n self.assertEqual(updated_reusableitem.link, original_reusableitem.link)\n\n # history has been updated\n history_entry = updated_reusableitem.history[1]\n\n self.assertNotEqual(history_entry, None)\n self.assertEqual(history_entry['change_request_resolution'], 'rejected')\n self.assertNotEqual(history_entry['changed_request_resolved_at'], None)\n self.assertEqual(history_entry['changed_request_submitted_by_id'], self.user_1.id.__str__())\n self.assertEqual(history_entry['number_of_users'], 2)\n self.assertEqual(history_entry['change_request_votes_yes_count'], 1)\n self.assertEqual(history_entry['change_request_votes_no_count'], 1)\n\n self.assertEqual(history_entry['change_request']['name'], data1['name'])\n self.assertEqual(history_entry['change_request']['definition'], data1['definition'])\n self.assertEqual(history_entry['change_request']['link'], data1['link'])\n\n # User 1 and user 2 should each get a notification of the change request acceptance\n self.assertEqual(Notification.objects.count(), 2)\n\n notification1 = Notification.objects.get(created_by=self.user_1)\n self.assertEqual(notification1.context, 'reusableItem')\n self.assertEqual(notification1.event, 'changeRequestRejected')\n self.assertEqual(notification1.reusableItem, updated_reusableitem)\n\n notification2 = Notification.objects.get(created_by=self.user_2)\n self.assertEqual(notification2.context, 'reusableItem')\n self.assertEqual(notification2.event, 'changeRequestRejected')\n self.assertEqual(notification2.reusableItem, updated_reusableitem)", "def test_resuableitem_submit_changerequest_public_not_owner_accept(self):\n\n original_reusableitem = setup_public_reusable_item_1(self)\n\n # user 2 can propose a change request\n # it does not update immediately\n data = submit_change_request_1(self, self.user_2)\n\n updated_reusableitem = ReusableItem.objects.get(pk=self.reusableitem_1.id)\n\n # editable properties unchanged\n self.assertEqual(updated_reusableitem.name, original_reusableitem.name)\n self.assertEqual(updated_reusableitem.definition, original_reusableitem.definition)\n self.assertEqual(updated_reusableitem.link, original_reusableitem.link)\n\n # change request created\n self.assertEqual(updated_reusableitem.change_request['name'], data['name'])\n self.assertEqual(updated_reusableitem.change_request['definition'], data['definition'])\n self.assertEqual(updated_reusableitem.change_request['link'], data['link'])\n\n # user 2 has voted for it\n self.assertEqual(updated_reusableitem.change_request_votes_no.count(), 0)\n self.assertEqual(updated_reusableitem.change_request_votes_yes.count(), 1)\n self.assertEqual(updated_reusableitem.change_request_votes_yes.first(), self.user_2)\n\n # user 1 now votes for the change request\n self.client.force_authenticate(user=self.user_1)\n\n data3 = {'vote': 'yes'}\n response = self.client.patch(get_reusable_item_1_url(self), data3, format='json')\n\n updated_reusableitem = ReusableItem.objects.get(pk=self.reusableitem_1.id)\n\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n\n # it should be resolved\n self.assertEqual(updated_reusableitem.change_request, None)\n self.assertEqual(updated_reusableitem.change_request_votes_no.count(), 0)\n self.assertEqual(updated_reusableitem.change_request_votes_yes.count(), 0)\n\n self.assertEqual(updated_reusableitem.name, data['name'])\n self.assertEqual(updated_reusableitem.definition, data['definition'])\n self.assertEqual(updated_reusableitem.link, data['link'])\n\n # the name of all referencing top ten items should be updated\n toptenitems1 = self.toptenlist_1.topTenItem.all()\n toptenitem_1_id = toptenitems1[0].id\n updated_user1_toptenitem1 = TopTenItem.objects.get(pk=toptenitem_1_id)\n\n self.assertEqual(updated_user1_toptenitem1.name, data['name'])\n\n #updated_user2_toptenitem = toptenlist_1_2\n toptenitems2 = self.toptenlist_2.topTenItem.all()\n toptenitem_2_id = toptenitems2[1].id\n updated_user1_toptenitem2 = TopTenItem.objects.get(pk=toptenitem_1_id)\n self.assertEqual(updated_user1_toptenitem2.name, data['name'])\n\n # history has been updated\n history_entry = updated_reusableitem.history[1]\n self.assertNotEqual(history_entry, None)\n self.assertEqual(history_entry['change_request_resolution'], 'accepted')\n self.assertNotEqual(history_entry['changed_request_resolved_at'], None)\n self.assertEqual(history_entry['changed_request_submitted_by_id'], self.user_2.id.__str__())\n self.assertEqual(history_entry['number_of_users'], 2)\n self.assertEqual(history_entry['change_request_votes_yes_count'], 2)\n self.assertEqual(history_entry['change_request_votes_no_count'], 0)\n\n self.assertEqual(history_entry['change_request']['name'], data['name'])\n self.assertEqual(history_entry['change_request']['definition'], data['definition'])\n self.assertEqual(history_entry['change_request']['link'], data['link'])", "def test_topic_notification_update_invalid_user(self):\n user = utils.create_user()\n notification = TopicNotification.objects.create(\n user=user, topic=self.topic, comment=self.comment)\n\n utils.login(self)\n form_data = {}\n response = self.client.post(\n reverse(\n 'spirit:topic:notification:update',\n kwargs={'pk': notification.pk, }),\n form_data)\n self.assertEqual(response.status_code, 404)", "def test_post_non_owner(self):\n another_user = CustomUser.objects.create(id=1067, email='[email protected]', is_active=True)\n another_user.set_password('testpassword')\n another_user.save()\n\n self.client.login(email='[email protected]', password='testpassword')\n\n data = {\n 'start_time': '2019-10-29',\n 'end_time': '2019-12-29',\n 'week_day': 6,\n 'time': '23:58:59'\n }\n\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': self.notification.id})\n response = self.client.post(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n self.assertEqual(response.status_code, 403)", "def test_notify(self):\n # self.client.force_authenticate(user=self.admin)\n\n FIXED_TIME = datetime(2018, 1, 1, tzinfo=LOCAL_TIMEZONE)\n\n # Old notification that will be deleted\n with mock.patch(\n 'django.utils.timezone.now', return_value=FIXED_TIME):\n WaitQueueNotification.objects.create(\n user=self.user,\n retreat=self.retreat,\n )\n\n waiting_user = WaitQueue.objects.create(\n user=self.user,\n retreat=self.retreat,\n )\n\n waiting_user2 = WaitQueue.objects.create(\n user=self.user2,\n retreat=self.retreat,\n )\n\n notification_count = WaitQueueNotification.objects.all().count()\n\n response = self.client.get(\n '/'.join([\n reverse('retreat:waitqueuenotification-list'),\n 'notify',\n ])\n )\n\n self.retreat.refresh_from_db()\n\n # Assert that the wait queue index is updated\n # All users (2) are notified since there are more (4) reserved_seats\n self.assertEqual(\n self.retreat.next_user_notified,\n 2,\n \"next_user_notified index invalid\"\n )\n\n # Assert that only 2 reserved seats remain (since only 2 users are\n # waiting)\n self.assertEqual(\n self.retreat.reserved_seats,\n 2,\n \"reserved_seats index invalid\"\n )\n\n # Assert that 2 new notifications are created (2 users in wait_queue)\n # Assert that 2 old notification has been deleted (too old)\n self.assertEqual(\n WaitQueueNotification.objects.all().count(),\n notification_count + 2 - 2,\n \"WaitQueueNotification count invalid\"\n )\n\n self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)\n\n self.assertEqual(len(mail.outbox), 2)\n\n waiting_user.delete()\n waiting_user2.delete()", "def test_user_update_request(self):\n pass", "def test_resuableitem_submit_changerequest_private(self):\n # add a second reference to this reusable item, by the same user\n reference_reusable_item(self, 'user_1', self.reusableitem_1.id, 'toptenlist_1', 1)\n\n self.client.force_authenticate(user=self.user_1)\n\n # ensure is_public is false to start with\n original_reusableitem = ReusableItem.objects.get(pk=self.reusableitem_1.id)\n original_reusableitem.is_public = False\n original_reusableitem.save()\n \n # owner can change name directly when nobody else references the reusable item\n data = {'name': 'Not Jane Austen'}\n response = self.client.patch(get_reusable_item_1_url(self), data, format='json')\n\n updated_reusableitem = ReusableItem.objects.get(pk=self.reusableitem_1.id)\n\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertEqual(updated_reusableitem.name, data['name'])\n\n # owner can change definition, link directly\n data = {'definition': 'A writer', 'link': 'someurl'}\n response = self.client.patch(get_reusable_item_1_url(self), data, format='json')\n\n updated_object = ReusableItem.objects.get(pk=self.reusableitem_1.id)\n\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertEqual(updated_object.definition, data['definition'])\n self.assertEqual(updated_object.link, data['link'])\n\n # there should not be a notification\n self.assertEqual(Notification.objects.count(), 0)\n\n # Note: there should never be an existing change request for a reusable item referenced by only one user\n # it should have been resolved\n # should this occur through some bug, the user could withdraw their vote and then revote, that should trigger a count\n\n # other user cannot add change request\n self.client.force_authenticate(user=self.user_2)\n\n data = {'name': 'Not Jane Austen'}\n\n response = self.client.patch(get_reusable_item_1_url(self), data, format='json')\n\n updated_reusableitem = ReusableItem.objects.get(pk=self.reusableitem_1.id)\n\n self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)", "def test_update_note(self):\n pass", "def test_patch_project_owner(self):\n new_owner = self.make_user('new_owner')\n url = reverse(\n 'projectroles:api_project_update',\n kwargs={'project': self.project.sodar_uuid},\n )\n patch_data = {'owner': str(new_owner.sodar_uuid)}\n response = self.request_knox(url, method='PATCH', data=patch_data)\n self.assertEqual(response.status_code, 400, msg=response.content)", "def create_or_update_resource_request_notifications(\n sender, instance, created, **kwargs\n):\n Notification = import_module('apps.notifications.models').Notification\n\n # notify the User\n Notification.objects.filter(\n notify_id=instance.member.id, instance_id=instance.id\n ).delete()\n notification = Notification.objects.create(\n notify=instance.member,\n actor=instance.organization,\n instance=instance,\n actions=instance.member_notification_actions,\n message=instance.member_notification_message,\n picture_url=instance.organization.picture_url,\n )\n notification.created = instance.updated\n notification.save()\n\n if instance.status == REQUEST_APPROVED:\n # delete existing org notifications\n Notification.objects.filter(\n notify_id=instance.organization.id, instance_id=instance.id\n ).delete()\n\n # notify the Org\n notification = Notification.objects.create(\n notify=instance.organization,\n actor=instance.member,\n instance=instance,\n actions=[\n {\n 'url': reverse('member:records', args=[instance.member.id]),\n 'text': 'View Health Records',\n 'method': 'get',\n }\n ],\n message=\"\"\"<b>{instance.member.profile.name}</b> granted\n {instance.organization.name} access to their health records\"\"\",\n picture_url=instance.member.profile.picture_url,\n )\n notification.created = instance.updated\n notification.save()\n\n elif instance.status == REQUEST_DENIED:\n # delete existing org notifications\n Notification.objects.filter(\n notify_id=instance.organization.id, instance_id=instance.id\n ).delete()\n\n # notify the Org\n notification = Notification.objects.create(\n notify=instance.organization,\n actor=instance.member,\n instance=instance,\n message=\"\"\"<b>{instance.member.profile.name}</b> revoked\n or denied {instance.organization.name} access to their health records\"\"\",\n picture_url=instance.member.profile.picture_url,\n )\n notification.created = instance.updated\n notification.save()", "def test_update(self):\n self.client.force_authenticate(user=self.admin)\n\n data = {\n 'retreat': reverse(\n 'retreat:retreat-detail', args=[self.retreat.id]\n ),\n 'user': reverse('user-detail', args=[self.user2.id]),\n }\n\n response = self.client.put(\n reverse(\n 'retreat:waitqueuenotification-detail',\n kwargs={'pk': 1},\n ),\n data,\n format='json',\n )\n\n self.assertEqual(\n response.status_code,\n status.HTTP_405_METHOD_NOT_ALLOWED\n )", "def inner_test(param: models.Notification):\n self.assertEqual(param, notif)", "def test_resuableitem_submit_changerequest_public_owner_not_referenced(self):\n\n original_reusableitem = make_reusable_item_public(self.reusableitem_1.id)\n\n self.client.force_authenticate(user=self.user_2)\n\n # user 2 tries to submit a change request to a reusable item they do not reference in their top ten lists\n data = {'name': 'Not Jane Austen', 'definition': 'A writer', 'link': 'someurl'}\n response = self.client.patch(get_reusable_item_1_url(self), data, format='json')\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)", "def test_edit_notification(self):\n\n data = self.get_valid_data()\n notification = reminders.Notification.objects.create(**data)\n start_count = reminders.Notification.objects.count()\n url = reverse('edit-notification', args=[notification.pk])\n response = self.client.post(url, data)\n self.assertRedirects(response, self.dashboard_url)\n end_count = reminders.Notification.objects.count()\n self.assertEqual(end_count, start_count)", "def test_edit_notification(self):\n\n data = self.get_valid_data()\n notification = reminders.Notification.objects.create(**data)\n start_count = reminders.Notification.objects.count()\n url = reverse('edit-notification', args=[notification.pk])\n response = self.client.post(url, data)\n self.assertRedirects(response, self.dashboard_url)\n end_count = reminders.Notification.objects.count()\n self.assertEqual(end_count, start_count)", "def test_get_non_owner(self):\n another_user = CustomUser(id=101, email='[email protected]', is_active=True)\n another_user.set_password('testpassword')\n another_user.save()\n self.client.login(email='[email protected]', password='testpassword')\n\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': self.notification.id})\n response = self.client.get(url)\n self.assertEqual(response.status_code, 403)", "def test_notify_user(self):\n foo = Foo.objects.create(name='foo', description='foo object')\n notify_users([self.user_a], foo, notification_type='foo')\n self.assertEqual(len(mail.outbox), 1)", "def test_update(self):\n # this is tested graphically, as it is UI\n pass", "def test_client_nationlity_update(self):\n pass", "def test_db_creating_put(self):\n data = {\n 'start_time': '2019-10-29',\n 'end_time': '2019-12-29',\n 'week_day': 6,\n 'time': '23:58:59'\n }\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': self.notification.id})\n\n with mock.patch('notification.models.Notification.update') as notification_update:\n notification_update.return_value = False\n\n response = self.client.put(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n self.assertEqual(response.status_code, 400)", "def test_registered_with_notification(self):\n now = datetime.datetime.now()\n notification = reminders.Notification.objects.create(num_days=1,\n time_of_day=now)\n reminders.SentNotification.objects.create(notification=notification,\n recipient=self.contact,\n status='sent',\n message='abc',\n appt_date=now,\n date_to_send=now)\n msg = self._send(self.reg_conn, '1')\n self.assertEqual(len(msg.responses), 1)\n self.assertEqual(msg.responses[0].text,\n self.app.thank_you)\n sent_notif = reminders.SentNotification.objects.all()\n self.assertEqual(sent_notif.count(), 1)\n self.assertEqual(sent_notif[0].status, 'confirmed')", "def test_meeting_update(self):\n pass", "def test_api_object_update_callable(self, api_object):\n attrs_dict = {'info': 'CREATING'}\n api_object.update_public_attrs(attrs_dict)\n assert api_object.info != 'CREATING'", "def test_update_subscription(self):\n pass", "def test_put_from_another_way(self):\n data = {\n 'start_time': '2019-10-29',\n 'end_time': '2019-12-29',\n 'week_day': 6,\n 'time': '23:58:59'\n }\n url = reverse('notification', kwargs={'way_id': 101, 'notification_id': self.notification.id})\n response = self.client.put(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n self.assertEqual(response.status_code, 403)", "def test_registered_with_notification(self):\n now = datetime.datetime.now()\n notification = reminders.Notification.objects.create(num_days=1,\n time_of_day=now)\n reminders.SentNotification.objects.create(notification=notification,\n recipient=self.contact,\n status='sent',\n message='abc',\n appt_date=now,\n date_to_send=now,\n date_queued=now)\n msg = self._send(self.reg_conn, '1')\n self.assertEqual(len(msg.responses), 1)\n self.assertEqual(msg.responses[0].text,\n self.app.thank_you)\n sent_notif = reminders.SentNotification.objects.all()\n self.assertEqual(sent_notif.count(), 1)\n self.assertEqual(sent_notif[0].status, 'confirmed')", "def test_update(self):\n\n # Test that instances without application information cannot be started\n incomplete_instance = Instance(self.client, 'foo')\n with self.assertRaises(ValueError):\n incomplete_instance.update()\n\n value = self.instance.update()\n update_instance = self.client.update_instance\n update_instance.assert_called_once_with('nginx', 'nginx', 'latest',\n parameters={\n 'SETTING': 'value'\n },\n options={\n 'storageBucket': 'custom'\n })\n self.assertEqual(value, update_instance.return_value)", "def test_update_domain_only(self):\n self.test_update()", "def test_client_verification_document_update(self):\n pass" ]
[ "0.6825002", "0.65637267", "0.6478547", "0.642229", "0.62722844", "0.6249249", "0.6242472", "0.6202908", "0.61824554", "0.6136467", "0.61097735", "0.60909486", "0.60642284", "0.60447806", "0.6032072", "0.6032072", "0.6029723", "0.60040575", "0.5982113", "0.5950152", "0.5922698", "0.59071416", "0.5900499", "0.58898973", "0.58755726", "0.5874132", "0.58721405", "0.58652365", "0.5851702", "0.58483505" ]
0.6854053
0
Provide tests post request for updating notification with another `way_id`.
def test_put_wrong_way_id(self): data = { 'start_time': '2019-10-29', 'end_time': '2019-12-29', 'week_day': 6, 'time': '23:58:59' } url = reverse('notification', kwargs={'way_id': 543, 'notification_id': self.notification.id}) response = self.client.put(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json') self.assertEqual(response.status_code, 400)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_post_wrong_way_id(self):\n data = {\n 'start_time': '2019-10-29',\n 'end_time': '2019-12-29',\n 'week_day': 6,\n 'time': '23:58:59'\n }\n url = reverse('notification', kwargs={'way_id': 908, 'notification_id': self.notification.id})\n response = self.client.post(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n self.assertEqual(response.status_code, 400)", "def test_put_from_another_way(self):\n data = {\n 'start_time': '2019-10-29',\n 'end_time': '2019-12-29',\n 'week_day': 6,\n 'time': '23:58:59'\n }\n url = reverse('notification', kwargs={'way_id': 101, 'notification_id': self.notification.id})\n response = self.client.put(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n self.assertEqual(response.status_code, 403)", "def test_put_non_id(self):\n\n data = {\n 'time': '23:38:54'\n }\n\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id})\n response = self.client.put(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n self.assertEqual(response.status_code, 400)", "def test_put_wrong_notification_id(self):\n\n data = {\n 'time': '23:38:54'\n }\n\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': 6778})\n response = self.client.put(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n self.assertEqual(response.status_code, 400)", "def test_post_success(self):\n\n data = {\n 'start_time': '2019-10-29',\n 'end_time': '2019-12-29',\n 'week_day': 6,\n 'time': '23:58:59'\n }\n\n expected_data = {\n 'way': 100,\n 'start_time': '2019-10-29',\n 'end_time': '2019-12-29',\n 'week_day': 6,\n 'time': '23:58:59'\n }\n\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': self.notification.id})\n response = self.client.post(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n response_dict = json.loads(response.content)\n response_dict.pop('id')\n self.assertEqual(response.status_code, 201)\n self.assertDictEqual(response_dict, expected_data)", "def test_put_success(self):\n\n data = {\n 'time': '23:58:53'\n }\n\n url = reverse('notification', kwargs={'way_id': self.notification.way_id, 'notification_id': self.notification.id})\n response = self.client.put(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n self.assertEqual(response.status_code, 200)", "def test_db_creating_put(self):\n data = {\n 'start_time': '2019-10-29',\n 'end_time': '2019-12-29',\n 'week_day': 6,\n 'time': '23:58:59'\n }\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': self.notification.id})\n\n with mock.patch('notification.models.Notification.update') as notification_update:\n notification_update.return_value = False\n\n response = self.client.put(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n self.assertEqual(response.status_code, 400)", "def test_api_can_update_post(self):\n post = Post.objects.get()\n change_post = {'name': 'Something new'}\n response = self.client.put(\n reverse('details', kwargs={'pk': post.id}),\n change_post, format='json'\n )\n self.assertEqual(response.status_code, status.HTTP_200_OK)", "def test_api_can_update_post(self):\n post = Post.objects.get()\n change_post = {'name': 'Something new'}\n res = self.client.put(\n reverse('details', kwargs={'pk': post.id}),\n change_post, format='json'\n )\n self.assertEqual(res.status_code, status.HTTP_200_OK)", "def test_update_telegram_id_success(self):\n test_data = {'telegram_id': 100}\n url = reverse('telegram_id')\n response = self.client.put(url, json.dumps(test_data), content_type='application/json')\n self.assertEqual(response.status_code, 200)", "def test_update_notification(client):\n create_user_response = create_user(client, TEST_USER_NAME, TEST_USER_PASS)\n assert create_user_response.status_code == HttpStatus.created_201.value\n\n new_notification_message_one = 'Fortnite has a new winner'\n new_notification_category_one = 'Information'\n post_response = create_notification(\n client,\n new_notification_message_one,\n 30,\n new_notification_category_one)\n assert post_response.status_code == HttpStatus.created_201.value\n assert Notification.query.count() == 1\n\n post_response_data = json.loads(post_response.get_data(as_text=True))\n new_notification_url = post_response_data['url']\n new_displayed_times = 1\n data = {'displayed_times': new_displayed_times}\n patch_response = client.patch(\n new_notification_url,\n headers=get_authentication_headers(TEST_USER_NAME, TEST_USER_PASS),\n data=json.dumps(data))\n assert patch_response.status_code == HttpStatus.ok_200.value\n\n get_response = client.get(\n new_notification_url,\n headers=get_authentication_headers(TEST_USER_NAME, TEST_USER_PASS))\n assert get_response.status_code == HttpStatus.ok_200.value\n\n get_response_data = json.loads(get_response.get_data(as_text=True))\n assert get_response_data['displayed_times'] == new_displayed_times", "def test_update(self):\n self.client.force_authenticate(user=self.admin)\n\n data = {\n 'retreat': reverse(\n 'retreat:retreat-detail', args=[self.retreat.id]\n ),\n 'user': reverse('user-detail', args=[self.user2.id]),\n }\n\n response = self.client.put(\n reverse(\n 'retreat:waitqueuenotification-detail',\n kwargs={'pk': 1},\n ),\n data,\n format='json',\n )\n\n self.assertEqual(\n response.status_code,\n status.HTTP_405_METHOD_NOT_ALLOWED\n )", "def test_update_workout(self):\n body = Workout()\n response = self.client.open(\n '/workout/{id}'.format(id='id_example'),\n method='PUT',\n data=json.dumps(body),\n content_type='application/json')\n self.assert200(response,\n 'Response body is : ' + response.data.decode('utf-8'))", "async def test_update_dispatch_route_by_id(client):\n update_dispatch_route_params = null\n params = [('access_token', 'access_token_example')]\n headers = { \n 'Accept': 'application/json',\n 'Content-Type': 'application/json',\n }\n response = await client.request(\n method='PUT',\n path='/v1/fleet/dispatch/routes/{route_id}'.format(route_id=56),\n headers=headers,\n json=update_dispatch_route_params,\n params=params,\n )\n assert response.status == 200, 'Response body is : ' + (await response.read()).decode('utf-8')", "def test_post__update_vote(self, mock_get_approvers, mock_notifier):\n mock_get_approvers.return_value = ['[email protected]']\n testing_config.sign_in('[email protected]', 123567890)\n self.vote_1_1.put() # Existing vote from reviewer1@.\n\n params = {'state': Vote.DENIED}\n with test_app.test_request_context(self.request_path, json=params):\n actual = self.handler.do_post(\n feature_id=self.feature_id, gate_id=self.gate_1_id)\n\n self.assertEqual(actual, {'message': 'Done'})\n updated_votes = Vote.get_votes(feature_id=self.feature_id)\n self.assertEqual(1, len(updated_votes))\n vote = updated_votes[0]\n self.assertEqual(vote.feature_id, self.feature_id)\n self.assertEqual(vote.gate_id, 1)\n self.assertEqual(vote.set_by, '[email protected]')\n self.assertEqual(vote.state, Vote.DENIED)\n\n mock_notifier.assert_called_once_with(self.feature_1,\n self.gate_1, '[email protected]', Vote.DENIED, Vote.NA)", "def taco_test_post_param_update(self):\n body = '{ \"id\": 400, \"name\": \"item4\", \"content\": \"after test update\" }'\n env = self.get_env('POST', '/item/4', body=body)\n result = webapi_start(env, lambda status, response_headers: self.assertEqual(status, '204'))\n # webapi_start(env, lambda status, response_headers: self.assertEqual(status, '204'))\n debug.log('result', result)", "def test_update(self):\n payload = {\n 'id': self.rout1.id,\n 'name': 'Tuesday routine',\n 'exercises': [self.exer1.id]\n }\n response = self.client.put(\n '/routines/{}/'.format(self.rout1.id), data=payload)\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertEqual(\n Routine.objects.get(id=self.rout1.id).name, payload['name'])", "def test_patch_household(self):\n payload = {\n 'name': 'Test Household Changed'\n }\n res = self.client.patch(get_household_detail_url(\n self.user.household.id), payload)\n\n self.assertEqual(res.status_code, status.HTTP_200_OK)", "def test_partial_update(self):\n self.client.force_authenticate(user=self.admin)\n\n data = {\n 'retreat': reverse(\n 'retreat:retreat-detail', args=[self.retreat.id]\n ),\n 'user': reverse('user-detail', args=[self.user2.id]),\n }\n\n response = self.client.put(\n reverse(\n 'retreat:waitqueuenotification-detail',\n kwargs={'pk': 1},\n ),\n data,\n format='json',\n )\n\n self.assertEqual(\n response.status_code,\n status.HTTP_405_METHOD_NOT_ALLOWED\n )", "def test_update(self, sapid, hostname, loopback, mac_address, router_id):\n\n # Positive test case\n router_data = {\n 'id': self.router_details.id,\n 'sapid': sapid,\n 'hostname': hostname,\n 'loopback': loopback,\n 'mac_address': mac_address,\n }\n\n response = self.client.post(reverse('router-update'), data=router_data, format='json')\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n\n # Test Case: Where record needs to be updated, not found\n router_data_not_found = {\n 'id': router_id,\n 'sapid': sapid,\n 'hostname': hostname,\n 'loopback': loopback,\n 'mac_address': mac_address,\n }\n\n response_not_found = self.client.post(reverse('router-update'), data=router_data_not_found, format='json')\n self.assertEqual(response_not_found.status_code, status.HTTP_404_NOT_FOUND)\n\n # Test Case: Where data is not validated\n router_data_not_valid = {\n 'id': self.router_details.id,\n 'sapid': sapid,\n 'hostname': '',\n 'loopback': loopback,\n 'mac_address': mac_address,\n }\n\n response_not_valid = self.client.post(reverse('router-update'), data=router_data_not_valid, format='json')\n self.assertEqual(response_not_valid.status_code, status.HTTP_400_BAD_REQUEST)", "def test_update_answer(self):\n self.app.post(\"/api/v2/answers/1/answer\", headers=self.headers,\n data=json.dumps(self.answer)) \n response = self.app.patch(\n \"/api/v2/answers/1/answer\", headers=self.headers, data=json.dumps(self.answer))\n result = json.loads(response.data)\n self.assertEqual(response.status_code, 200)\n self.assertEqual(result['status'], 200)", "def test_put_invalid_data(self):\n\n data = {\n 'start_time': '201-10-29'\n }\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': self.notification.id})\n response = self.client.put(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n self.assertEqual(response.status_code, 400)", "def test_db_creating_post(self):\n data = {\n 'start_time': '2019-10-29',\n 'end_time': '2019-12-29',\n 'week_day': 6,\n 'time': '23:58:59'\n }\n\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': self.notification.id})\n\n with mock.patch('notification.views.Notification.create') as notification_create:\n notification_create.return_value = False\n response = self.client.post(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n\n self.assertEqual(response.status_code, 400)", "def test_update_note(self):\n\n url = reverse(\n 'crm-admin:note-update',\n kwargs={\n 'pk': self.object.id\n }\n )\n\n # Test that the page load first\n response = self.c.get(url)\n self.assertEqual(response.status_code, 200)\n\n # Send data\n data = {\n 'comment': 'other value'\n }\n response = self.c.post(url, data)\n self.assertEqual(response.status_code, 302)\n\n # Get the latest added object\n obj = Note.objects.get(id=self.object.id)\n self.assertEqual(obj.comment, 'other value')", "def test_mailpiece_put(self):\n mailPiecePK = MailPiece.objects.filter(user=self.testUser.pk)[0].pk\n url = reverse('MailPiece-detail', kwargs={'pk': mailPiecePK})\n response = self.client.put(url, self.data, format='json')\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertEqual(MailPiece.objects.get(pk=mailPiecePK).tracking,\n 1234)", "def test_beneficiaries_update_that_will_pass(self):\n print('the test function name: {}'.format(sys._getframe().f_code.co_name))\n url = reverse('beneficiary:beneficiary-entity-by-id-update', kwargs={'pk': 1})\n response = self.client.post(url, content_type='application/json')\n return self.assertTrue(response.status_code, 200)", "def test_update_item_using_post(self):\n pass", "def test_update_a_todo(self):\n # hit the API endpoint\n response = self.make_a_request(\n kind=\"put\",\n version=\"v1\",\n id=2,\n data=self.valid_data\n )\n self.assertEqual(response.data, self.valid_data)\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n # test with invalid data\n response = self.make_a_request(\n kind=\"put\",\n version=\"v1\",\n id=3,\n data=self.invalid_data\n )\n self.assertEqual(\n response.data[\"message\"],\n \"TODO item requires state, due_date and text\"\n )\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)", "def test_delete_wrong_way_id(self):\n\n url = reverse('notification',\n kwargs={'way_id': 38987, 'notification_id': self.notification.id})\n response = self.client.delete(url)\n\n self.assertEqual(response.status_code, 400)", "def test_mailpiece_patch(self):\n mailPiecePK = MailPiece.objects.filter(user=self.testUser.pk)[0].pk\n url = reverse('MailPiece-detail', kwargs={'pk': mailPiecePK})\n data = {'tracking': 9876543210}\n response = self.client.patch(url, data, format='json')\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertEqual(MailPiece.objects.get(pk=mailPiecePK).tracking,\n 9876543210)" ]
[ "0.7309353", "0.7287713", "0.70464474", "0.69682556", "0.68557084", "0.6814788", "0.6658717", "0.6463719", "0.64070153", "0.6393831", "0.63803446", "0.63698304", "0.6313824", "0.6292307", "0.62720644", "0.6193457", "0.61715186", "0.6160451", "0.6157474", "0.6146517", "0.6085609", "0.6062946", "0.602664", "0.60180193", "0.60137635", "0.6008124", "0.59893346", "0.5983161", "0.5941502", "0.59239715" ]
0.75480807
0
Provide tests for request to delete Notification instance with another `way_id`.
def test_delete_another_way_id(self): url = reverse('notification', kwargs={'way_id': 101, 'notification_id': self.notification.id}) response = self.client.delete(url) self.assertEqual(response.status_code, 403)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_delete_wrong_way_id(self):\n\n url = reverse('notification',\n kwargs={'way_id': 38987, 'notification_id': self.notification.id})\n response = self.client.delete(url)\n\n self.assertEqual(response.status_code, 400)", "def test_delete_non_notification_id(self):\n\n url = reverse('notification', kwargs={'way_id': self.notification.way_id})\n response = self.client.delete(url)\n self.assertEqual(response.status_code, 400)", "def test_delete_wrong_notification_id(self):\n\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': 87876})\n response = self.client.delete(url)\n\n self.assertEqual(response.status_code, 400)", "def test_delete_success(self):\n\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': self.notification.id})\n response = self.client.delete(url)\n\n self.assertEqual(response.status_code, 200)", "def test_error_db_deleting(self):\n\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': self.notification.id})\n with mock.patch('notification.views.Notification.delete_by_id') as notification_delete:\n notification_delete.return_value = False\n response = self.client.delete(url)\n self.assertEqual(response.status_code, 400)", "def test_delete_device_by_id(self):\n pass", "def test_delete_workout(self):\n response = self.client.open(\n '/workout/{id}'.format(id='id_example'),\n method='DELETE')\n self.assert200(response,\n 'Response body is : ' + response.data.decode('utf-8'))", "def test_delete_device_by_id1(self):\n pass", "def test_delete_notification(self):\n\n data = self.get_valid_data()\n notification = reminders.Notification.objects.create(**data)\n start_count = reminders.Notification.objects.count()\n url = reverse('delete-notification', args=[notification.pk])\n response = self.client.post(url, data)\n self.assertRedirects(response, self.dashboard_url)\n end_count = reminders.Notification.objects.count()\n self.assertEqual(end_count, start_count - 1)", "def test_delete_notification(self):\n\n data = self.get_valid_data()\n notification = reminders.Notification.objects.create(**data)\n start_count = reminders.Notification.objects.count()\n url = reverse('delete-notification', args=[notification.pk])\n response = self.client.post(url, data)\n self.assertRedirects(response, self.dashboard_url)\n end_count = reminders.Notification.objects.count()\n self.assertEqual(end_count, start_count - 1)", "def test_get_delete_page(self):\n\n data = self.get_valid_data()\n notification = reminders.Notification.objects.create(**data)\n url = reverse('delete-notification', args=[notification.pk])\n response = self.client.get(url)\n self.assertEqual(response.status_code, 200)", "def test_get_delete_page(self):\n\n data = self.get_valid_data()\n notification = reminders.Notification.objects.create(**data)\n url = reverse('delete-notification', args=[notification.pk])\n response = self.client.get(url)\n self.assertEqual(response.status_code, 200)", "def test_delete_works(client):\n\n # Create one\n proto_reminder['message'] = 'test_delete_works'\n res = client.post('/api/reminders', json=proto_reminder)\n print(\"Got response:\", res.data)\n reminder = json.loads(res.data)\n print(\"Got response:\", reminder)\n # Delete it\n res = client.delete('/api/reminders/{}'.format(reminder['guid']))\n assert res.status_code == 200\n assert res.content_type == 'application/json'\n # Get and ensure it's not there\n res = client.get('/api/reminders')\n print(\"Got response:\", json.loads(res.data))\n assert proto_reminder['message'].encode() not in res.data", "def test_delete_alert_by_id(self):\n pass", "def test_delete_non_owner(self):\n another_user = CustomUser.objects.create(id=134, email='[email protected]', is_active=True)\n another_user.set_password('qwerty12345')\n another_user.save()\n\n self.client.login(email='[email protected]', password='qwerty12345')\n\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': 87876})\n\n response = self.client.delete(url)\n\n self.assertEqual(response.status_code, 403)", "def delete(thing, id_):\n pass", "def setUp(self):\n signals.post_save.disconnect(create_notification_task, sender=Notification)\n signals.post_delete.disconnect(revoke_notification_task, sender=Notification)\n\n user = CustomUser.objects.create(id=100, email='[email protected]', is_active=True)\n user.set_password('testpassword')\n user.save()\n\n self.client = Client()\n self.client.login(email='[email protected]', password='testpassword')\n\n way_first = Way.objects.create(id=100, user=user)\n way_second = Way.objects.create(id=101, user=user)\n\n Notification.objects.create(\n id=100,\n way=way_first,\n start_time=datetime.date(2019, 10, 29),\n end_time=datetime.date(2019, 12, 29),\n week_day=6,\n time=datetime.time(23, 58, 59)\n )\n\n Notification.objects.create(\n id=101,\n way=way_first,\n start_time=datetime.date(2019, 11, 27),\n end_time=datetime.date(2020, 12, 27),\n week_day=1,\n time=datetime.time(1, 12, 38)\n )\n\n Notification.objects.create(\n id=102,\n way=way_second,\n start_time=datetime.date(2019, 3, 11),\n end_time=datetime.date(2019, 7, 31),\n week_day=2,\n time=datetime.time(11, 28, 25)\n )\n\n self.notification = Notification.objects.get(id=100)\n self.client = Client()\n self.client.login(email='[email protected]', password='testpassword')", "def test_delete_note(self):\n pass", "def test_delete(self):\n\n value = self.instance.delete()\n self.client.delete_instance.assert_called_once_with('nginx')\n self.assertEqual(value, self.client.delete_instance.return_value)", "def test_handle_delete_plan_bad_id(\n mock_send_reply, make_handler_params, make_time,\n):\n params = make_handler_params(\"delete-plan not-tjs\")\n\n plan = Plan(\"tjs\", make_time(12, 30), [])\n params.storage.get.return_value = {plan.uuid: plan}\n\n handle_delete_plan(params)\n\n mock_send_reply.assert_called_with(\n params.client,\n params.message,\n \"That lunch_id doesn't exist! Type show-plans to see each lunch_id and its associated lunch plan.\",\n )", "def test_post_wrong_way_id(self):\n data = {\n 'start_time': '2019-10-29',\n 'end_time': '2019-12-29',\n 'week_day': 6,\n 'time': '23:58:59'\n }\n url = reverse('notification', kwargs={'way_id': 908, 'notification_id': self.notification.id})\n response = self.client.post(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n self.assertEqual(response.status_code, 400)", "def test_issue_delete_subscription(self):\n pass", "def test_put_wrong_way_id(self):\n data = {\n 'start_time': '2019-10-29',\n 'end_time': '2019-12-29',\n 'week_day': 6,\n 'time': '23:58:59'\n }\n url = reverse('notification', kwargs={'way_id': 543, 'notification_id': self.notification.id})\n response = self.client.put(url, json.dumps(data, cls=DjangoJSONEncoder), content_type='application/json')\n self.assertEqual(response.status_code, 400)", "def test_handle_delete_plan_disambiguate(\n mock_send_reply, make_handler_params, make_time,\n):\n params = make_handler_params(\"delete-plan tjs 12:30\")\n\n plan1 = Plan(\"tjs\", make_time(11, 00), [])\n plan2 = Plan(\"tjs\", make_time(12, 30), [])\n params.storage.get.return_value = {\n plan1.uuid: plan1,\n plan2.uuid: plan2,\n }\n\n handle_delete_plan(params)\n\n params.storage.put.assert_called_with(\n params.storage.PLANS_ENTRY, {plan1.uuid: plan1}\n )\n params.cron.remove_event.assert_called_with(plan2.uuid)\n mock_send_reply.assert_called_with(\n params.client,\n params.message,\n \"You've successfully deleted lunch tjs @ 12:30pm.\",\n )", "def test_delete():\n sample_uuid = get_sample_id()\n response = requests.delete(f'http://localhost:5000/api/persons/{sample_uuid}')\n\n assert response.status_code == 200", "def delete(self, _id):", "def test_issue_delete_issue_reaction(self):\n pass", "def test_meeting_delete(self):\n pass", "def test_delete(self):\n self.client.force_authenticate(user=self.admin)\n\n response = self.client.delete(\n reverse(\n 'retreat:waitqueuenotification-detail',\n kwargs={'pk': 1},\n ),\n )\n\n self.assertEqual(\n response.status_code,\n status.HTTP_405_METHOD_NOT_ALLOWED\n )", "def test_meeting_poll_delete(self):\n pass" ]
[ "0.78409946", "0.77346075", "0.75048524", "0.72533673", "0.7238364", "0.6442438", "0.6423715", "0.6412782", "0.63466054", "0.63466054", "0.62805015", "0.62805015", "0.6271731", "0.623744", "0.6173104", "0.61633974", "0.61517215", "0.6149768", "0.61421883", "0.60912126", "0.6089049", "0.60829705", "0.6082753", "0.6075105", "0.6040318", "0.60349596", "0.60149336", "0.6012638", "0.60057175", "0.60033756" ]
0.814844
0
Method that tests for request to delete non owner Notification instance.
def test_delete_non_owner(self): another_user = CustomUser.objects.create(id=134, email='[email protected]', is_active=True) another_user.set_password('qwerty12345') another_user.save() self.client.login(email='[email protected]', password='qwerty12345') url = reverse('notification', kwargs={'way_id': self.notification.way_id, 'notification_id': 87876}) response = self.client.delete(url) self.assertEqual(response.status_code, 403)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_delete_non_notification_id(self):\n\n url = reverse('notification', kwargs={'way_id': self.notification.way_id})\n response = self.client.delete(url)\n self.assertEqual(response.status_code, 400)", "def action_delete():\n try:\n deleted = delete_notification()\n except:\n raise HTTPResponse(body=\"Unexpected error\", status=400)\n \n if deleted:\n return dict(msg=\"Notification deleted\")\n else:\n return dict(msg=\"No notification to delete\")", "def test_delete_success(self):\n\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': self.notification.id})\n response = self.client.delete(url)\n\n self.assertEqual(response.status_code, 200)", "def test_error_db_deleting(self):\n\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': self.notification.id})\n with mock.patch('notification.views.Notification.delete_by_id') as notification_delete:\n notification_delete.return_value = False\n response = self.client.delete(url)\n self.assertEqual(response.status_code, 400)", "def test_delete_notification(self):\n\n data = self.get_valid_data()\n notification = reminders.Notification.objects.create(**data)\n start_count = reminders.Notification.objects.count()\n url = reverse('delete-notification', args=[notification.pk])\n response = self.client.post(url, data)\n self.assertRedirects(response, self.dashboard_url)\n end_count = reminders.Notification.objects.count()\n self.assertEqual(end_count, start_count - 1)", "def test_delete_notification(self):\n\n data = self.get_valid_data()\n notification = reminders.Notification.objects.create(**data)\n start_count = reminders.Notification.objects.count()\n url = reverse('delete-notification', args=[notification.pk])\n response = self.client.post(url, data)\n self.assertRedirects(response, self.dashboard_url)\n end_count = reminders.Notification.objects.count()\n self.assertEqual(end_count, start_count - 1)", "def test_get_delete_page(self):\n\n data = self.get_valid_data()\n notification = reminders.Notification.objects.create(**data)\n url = reverse('delete-notification', args=[notification.pk])\n response = self.client.get(url)\n self.assertEqual(response.status_code, 200)", "def test_get_delete_page(self):\n\n data = self.get_valid_data()\n notification = reminders.Notification.objects.create(**data)\n url = reverse('delete-notification', args=[notification.pk])\n response = self.client.get(url)\n self.assertEqual(response.status_code, 200)", "def test_delete_another_way_id(self):\n\n url = reverse('notification',\n kwargs={'way_id': 101, 'notification_id': self.notification.id})\n response = self.client.delete(url)\n\n self.assertEqual(response.status_code, 403)", "def test_delete_wrong_notification_id(self):\n\n url = reverse('notification',\n kwargs={'way_id': self.notification.way_id, 'notification_id': 87876})\n response = self.client.delete(url)\n\n self.assertEqual(response.status_code, 400)", "def test_destroy_not_owner(self):\n\n self.assertEqual(first=1, second=Post.objects.all().count())\n url = reverse('post-detail', args=(self.post.id,))\n self.client.credentials(HTTP_AUTHORIZATION=self.token_1)\n response = self.client.delete(path=url)\n self.assertEqual(first=403, second=response.status_code)\n self.assertEqual(first=1, second=Post.objects.all().count())", "def test_destroy_owner(self):\n\n self.assertEqual(first=1, second=Post.objects.all().count())\n url = reverse('post-detail', args=(self.post.id,))\n self.client.credentials(HTTP_AUTHORIZATION=self.token)\n response = self.client.delete(path=url)\n self.assertEqual(first=204, second=response.status_code)\n self.assertEqual(first=0, second=Post.objects.all().count())", "def test_delete(self):\n\n value = self.instance.delete()\n self.client.delete_instance.assert_called_once_with('nginx')\n self.assertEqual(value, self.client.delete_instance.return_value)", "def test_owner_delete_assessment(self):\n response = self.user_01.delete(self.assessment_custom_url)\n self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)\n response = self.user_01.get(self.assessment_custom_url)\n self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)", "def test_issue_delete_subscription(self):\n pass", "def delete_notification(request, noti_id):\n user = request.user\n Notification.objects.filter(id=noti_id, user=user).delete()\n return redirect('show_notifications')", "def test_delete(self):\n self.client.force_authenticate(user=self.admin)\n\n response = self.client.delete(\n reverse(\n 'retreat:waitqueuenotification-detail',\n kwargs={'pk': 1},\n ),\n )\n\n self.assertEqual(\n response.status_code,\n status.HTTP_405_METHOD_NOT_ALLOWED\n )", "def test_not_owner(self):\n creating_user = create_user()\n creating_user.save()\n festival = create_festival('test', creating_user)\n festival.save()\n\n concert = create_concert(festival, 'test')\n concert.save()\n\n login(self.client)\n\n client = create_client('test')\n client.delete_access = True\n client.save()\n\n response = self.client.post('/backend/u/conc/', {'client': 'test', 'id': concert.pk})\n self.assertEqual(response.status_code, 200)\n self.assertEqual('Permission not granted', response.content.decode('utf-8'))", "def test_order_can_be_deleted_by_owner(self):\n\n\t\tres = self.login_user()\n\t\taccess_token = json.loads(res.data.decode())['access_token']\n\n\t\tresponse = self.client().post(\n\t\t\t'/api/v2/orders',\n\t\t\theaders={\"x-access-token\": access_token},\n\t\t\tdata = json.dumps(\n\t\t\t\tself.order_data) , content_type = 'application/json')\n\t\tself.assertEqual(response.status_code, 201)\n\n\t\tresponse = self.client().delete(\n\t\t\t'/api/v2/orders/1',\n\t\t\theaders={\"x-access-token\": access_token})\n\n\t\tresult = json.loads(response.data)\n\t\tself.assertEqual(response.status_code, 200)\n\t\tself.assertEqual(result[\"message\"], \"Order deleted succesfully\")", "def test_delete_note(self):\n pass", "def test_delete_object(self):\n u = self.d.user('example')\n u.delete()\n\n method, url, data, headers = self.d._fetcher.last_request\n self.assertEqual(method, 'DELETE')\n self.assertEqual(url, '/users/example')", "def delete_notification():\r\n name = request.args.get('notif')\r\n logging.info(\"Notification deleted in delete_notification(): \" + name)\r\n for notif in notifications:\r\n if notif['title'] == name:\r\n notifications.remove(notif)", "def _notify_delete(self, cuds_object):", "def test_order_cannot_be_deleted_if_not_owner(self):\n\n\t\tres = self.login_user()\n\t\tress = self.login_admin_user()\n\t\taccess_token = json.loads(res.data.decode())['access_token']\n\t\ta_access_token = json.loads(ress.data.decode())['access_token']\n\n\t\tresponse = self.client().post(\n\t\t\t'/api/v2/orders',\n\t\t\theaders={\"x-access-token\": access_token},\n\t\t\tdata = json.dumps(\n\t\t\t\tself.order_data) , content_type = 'application/json')\n\t\tself.assertEqual(response.status_code, 201)\n\n\t\tresponse = self.client().delete(\n\t\t\t'/api/v2/orders/1',\n\t\t\theaders={\"x-access-token\": a_access_token})\n\n\t\tresult = json.loads(response.data)\n\t\tself.assertEqual(response.status_code, 401)\n\t\tself.assertEqual(result[\"message\"], \n\t\t\t\"Not authorized to perform this function!\")", "def test_delete_owner(self):\n self.assertEqual(RoleAssignment.objects.count(), 3)\n url = reverse(\n 'projectroles:api_role_destroy',\n kwargs={'roleassignment': self.owner_as.sodar_uuid},\n )\n response = self.request_knox(url, method='DELETE')\n self.assertEqual(response.status_code, 400, msg=response.content)\n self.assertEqual(RoleAssignment.objects.count(), 3)", "def test_registration_delete_inactive(dummy_regform, api_delete, api_post):\n registration = dummy_regform.registrations[0]\n registration.is_deleted = True\n signals.event.registration_deleted.send(registration)\n assert api_delete.call_count == 0\n assert api_post.call_count == 0", "def test_registration_delete_inactive(dummy_regform, api_delete, api_post):\n registration = dummy_regform.registrations[0]\n registration.is_deleted = True\n signals.event.registration_deleted.send(registration)\n assert api_delete.call_count == 0\n assert api_post.call_count == 0", "def test_delete_request_by_owner(self):\n client = APIClient()\n client.credentials(HTTP_AUTHORIZATION=self.test_user2_token)\n response = client.post('/api/places/', self.restaurant_data, format='json')\n url = f\"/api/places/{response.data['id']}/\"\n\n response = client.delete(url, format='json')\n self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)", "def test_delete_request_by_non_owner(self):\n client = APIClient()\n client.credentials(HTTP_AUTHORIZATION=self.test_user2_token)\n response = client.post('/api/places/', self.restaurant_data, format='json')\n url = f\"/api/places/{response.data['id']}/\"\n\n client.credentials(HTTP_AUTHORIZATION=self.test_user1_token)\n response = client.delete(url, format='json')\n self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)", "def test_delete_wrong_way_id(self):\n\n url = reverse('notification',\n kwargs={'way_id': 38987, 'notification_id': self.notification.id})\n response = self.client.delete(url)\n\n self.assertEqual(response.status_code, 400)" ]
[ "0.72411215", "0.69432837", "0.6825985", "0.68125653", "0.6811813", "0.6811813", "0.66271573", "0.66271573", "0.6599707", "0.65833205", "0.6556676", "0.6446615", "0.64434767", "0.64428777", "0.64428586", "0.6442772", "0.6441421", "0.6401548", "0.6354622", "0.63353646", "0.6299706", "0.6284152", "0.6267853", "0.6239162", "0.62244993", "0.62079126", "0.62079126", "0.61948586", "0.619137", "0.6179012" ]
0.75843847
0
Method that tests unsuccessful delete request when db deleting is failed.
def test_error_db_deleting(self): url = reverse('notification', kwargs={'way_id': self.notification.way_id, 'notification_id': self.notification.id}) with mock.patch('notification.views.Notification.delete_by_id') as notification_delete: notification_delete.return_value = False response = self.client.delete(url) self.assertEqual(response.status_code, 400)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_invalid_db_delete(env_setup, env_table, db_delete_test_data, response_test_data):\n test_string = DbManager(SqLiteHelper, {\"db_path\": env_setup, \"master_table\": env_table}) \\\n .processor(db_delete_test_data.get(\"invalid\"))\n assert test_string == response_test_data.get(\"invalid_delete\")", "def test_delete_error(self):\n with self.assertRaises(QiitaDBExecutionError):\n PrepTemplate.delete(1)", "def test_delete_fail(self):\n self.user_api()\n self.base.metadata.create_all(self.engine)\n people = self.provision_users()\n p = {'id': people[2].id}\n self.delete('user', 403, params=p)", "def test_delete__invalid(self):\n testing_config.sign_in('[email protected]', 123567890)\n\n with register.app.test_request_context(self.request_path):\n with self.assertRaises(werkzeug.exceptions.BadRequest):\n self.handler.do_delete(None)\n\n revised_feature = models.Feature.get_by_id(self.feature_id)\n self.assertFalse(revised_feature.deleted)", "def test_validate_delete(client):\n response = client.delete('/user/1')\n assert response.status_code == 400\n assert response.json['message'] == INVALID_ACTION_MESSAGE", "def test_handle_delete_lookup_error(self):\n self.db.query.return_value = []\n self.assertTupleEqual(self.testcommand.handle(\"team delete brs\", user),\n (self.testcommand.lookup_error, 200))\n self.db.delete.assert_not_called()\n self.gh.org_delete_team.assert_not_called()", "def test_delete_unkonwn_id_error(self):\n with self.assertRaises(QiitaDBUnknownIDError):\n SampleTemplate.delete(5)", "def test_delete(self):\n pass", "def test_delete_failure(self):\r\n problem_url_name = 'H1P1'\r\n location = InstructorTaskModuleTestCase.problem_location(problem_url_name)\r\n self.define_option_problem(problem_url_name)\r\n self.submit_student_answer('u1', problem_url_name, [OPTION_1, OPTION_1])\r\n\r\n expected_message = \"bad things happened\"\r\n with patch('courseware.models.StudentModule.delete') as mock_delete:\r\n mock_delete.side_effect = ZeroDivisionError(expected_message)\r\n instructor_task = self.delete_problem_state('instructor', location)\r\n self._assert_task_failure(instructor_task.id, 'delete_problem_state', problem_url_name, expected_message)", "def test_delete_not_found(self):\n resp = self.client.delete(\n \"/tracking?repo=not_found1&branch=not_found1\", content_type=\"application/json\", headers=self.auth\n )\n resp_dict = json.loads(resp.data)\n self.assertIn(\"code\", resp_dict, msg=\"Error in data format return\")\n self.assertEqual(ResponseCode.DELETE_DB_NOT_FOUND, resp_dict.get(\"code\"), msg=\"Error in status code return\")", "def test_db_delete(env_setup, env_table, db_delete_test_data, response_test_data):\n test_string = DbManager(SqLiteHelper, {\"db_path\": env_setup, \"master_table\": env_table}) \\\n .processor(db_delete_test_data.get(\"valid\"))\n assert test_string == response_test_data.get(\"valid_delete\")", "def test_delete_unexpected_error(self, requests_mock, capsys):\n requests_mock.delete(data_url, exc=ConnectionError)\n with pytest.raises(ConnectionError):\n r = operations.delete(data_url)\n assert 'Unexpected error when connecting to' in capsys.readouterr().out", "def test_delete__not_found(self):\n testing_config.sign_in('[email protected]', 123567890)\n\n with register.app.test_request_context(self.request_path):\n with self.assertRaises(werkzeug.exceptions.NotFound):\n self.handler.do_delete(self.feature_id + 1)\n\n revised_feature = models.Feature.get_by_id(self.feature_id)\n self.assertFalse(revised_feature.deleted)", "def test_delete_unkonwn_id_error(self):\n with self.assertRaises(QiitaDBUnknownIDError):\n PrepTemplate.delete(5)", "def test_delete_item_incorrect_id(test_client):\n\n response = test_client.delete(GOOD_ITEM_URL)\n\n data = json.loads(response.get_data())\n\n assert response.status_code == 404\n assert data['error'] == app.NOT_FOUND", "def test_delete__invalid(self):\n testing_config.sign_in('[email protected]', 123567890)\n\n with test_app.test_request_context(self.request_path):\n with self.assertRaises(werkzeug.exceptions.BadRequest):\n self.handler.do_delete()\n\n unrevised_appuser = user_models.AppUser.get_by_id(self.appuser_id)\n self.assertEqual('[email protected]', unrevised_appuser.email)", "def testDeleteIsDenied(self):\n error = self.assertRaises(PermissionDeniedError, self.users.delete,\n [u'user'])\n self.assertEqual(self.user.username, error.username)\n self.assertEqual([(u'user', Operation.DELETE_USER)],\n error.pathsAndOperations)", "def testDeleteIsDenied(self):\n error = self.assertRaises(PermissionDeniedError, self.users.delete,\n [u'user'])\n self.assertEqual(self.user.username, error.username)\n self.assertEqual([(u'user', Operation.DELETE_USER)],\n error.pathsAndOperations)", "def test_delete_error(self):\n r = mock.Mock(spec=requests.Response)\n r.status_code = 201\n r.content = '{\"it\\'s all\": \"ok\"}'\n\n f = Fitbit(**self.client_kwargs)\n f.client._request = lambda *args, **kwargs: r\n self.assertRaises(exceptions.DeleteError, f.delete_activities, 12345)", "def test_delete_car_invalid_id():\n response = client.delete(\"/11111\")\n assert response.status_code == STATUS_NOT_FOUND", "def test_delete_run(self):\n pass", "def test_delete_review_fail(self):\n client = Client()\n response = client.delete('/api/review/1/')\n self.assertEqual(response.status_code, 401)\n client.login(username='TEST_USER_2',\n email='TEST_EMAIL_2', password='TEST_PW_2')\n review1_id = Review.objects.get(content='TEST_CONTENT').id\n review2_id = Review.objects.get(content='TEST_CONTENT2').id\n review3_id = Review.objects.get(content='TEST_CONTENT3').id\n no_review_id = review1_id + review2_id + review3_id\n response = client.delete('/api/review/'+str(review1_id)+'/')\n self.assertEqual(response.status_code, 403)\n client.login(username='TEST_USER_1',\n email='TEST_EMAIL_1', password='TEST_PW_1')\n response = client.delete('/api/review/'+str(no_review_id)+'/')\n self.assertEqual(response.status_code, 404)", "def test_delete__valid(self):\n testing_config.sign_in('[email protected]', 123567890)\n\n with register.app.test_request_context(self.request_path):\n actual_json = self.handler.do_delete(self.feature_id)\n self.assertEqual({'message': 'Done'}, actual_json)\n\n revised_feature = models.Feature.get_by_id(self.feature_id)\n self.assertTrue(revised_feature.deleted)", "def test_DELETE3(self):\n r = requests.delete(self.address + \"/cars/42\")\n self.assertEqual(r.status_code, 400)", "def delete_fail(self, id_, message):\n rv = self.post((id_, self.delete_url), dict(post='yes'))\n assert in_response(rv, message)\n assert self.verify_object({self.id_field: id_})\n return rv", "def test_DELETE4(self):\n r = requests.delete(self.address + \"/car/\")\n self.assertEqual(r.status_code, 400)", "def test_handle_delete_github_error(self):\n self.db.query.side_effect = GithubAPIException(\"error\")\n self.assertTupleEqual(self.testcommand.handle(\"team delete brs\", user),\n (\"Team delete was unsuccessful with \"\n \"the following error: \"\n \"error\", 200))\n self.db.delete.assert_not_called()\n self.gh.org_delete_team.assert_not_called()", "async def test_delete_invalid(database,valid_data):\n test_valid_insert(database,valid_data)\n N = 10\n for idx in range(N+1,N*2):\n try:\n await database.delete(_id=idx,user_id=idx)\n assert False\n except:\n assert True\n await database.close_pool()", "def test_delete(self):\n self.client.force_authenticate(user=self.admin)\n\n response = self.client.delete(\n reverse(\n 'retreat:waitqueuenotification-detail',\n kwargs={'pk': 1},\n ),\n )\n\n self.assertEqual(\n response.status_code,\n status.HTTP_405_METHOD_NOT_ALLOWED\n )", "def test_delete__not_found(self):\n testing_config.sign_in('[email protected]', 123567890)\n\n with test_app.test_request_context(self.request_path):\n with self.assertRaises(werkzeug.exceptions.NotFound):\n self.handler.do_delete(account_id=self.appuser_id + 1)\n\n unrevised_appuser = user_models.AppUser.get_by_id(self.appuser_id)\n self.assertEqual('[email protected]', unrevised_appuser.email)" ]
[ "0.7845363", "0.7661133", "0.7618815", "0.7579205", "0.74916613", "0.74349666", "0.73349035", "0.7333713", "0.72599447", "0.72522247", "0.7246481", "0.722398", "0.72110033", "0.71876705", "0.7169879", "0.7162017", "0.7146241", "0.7146241", "0.70829624", "0.707316", "0.70727974", "0.7069416", "0.70672816", "0.70672065", "0.7036625", "0.7028593", "0.70278496", "0.7026495", "0.70241857", "0.7017002" ]
0.80490714
0
Function to get the coding of a text. text text to inspect (string) coding string
def get_coding(text): for line in text.splitlines()[:2]: result = CODING_RE.search(line) if result: return result.group(1) return None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def coding(text: str) -> str:\n text = list(itertools.chain(text.upper()))\n coded_text = []\n for letter in text:\n completed = False\n for coding in Encoder.__ALPHABET:\n if coding.code == letter:\n completed = True\n coded_text.append(coding.encode)\n if completed:\n break\n if not completed:\n coded_text.append(letter)\n coded_string = \"\".join(coded_text)\n return coded_string", "def applyCoder(text, coder):\n ### TODO\n ans=''\n for let in text:\n ans+=coder.get(let, let)\n return ans", "def applyCoder(text, coder):\n \n string1=''\n b=coder\n i=0\n while i<len(text):\n \n if text[i]==\" \" or text[i]==\"\\n\" or text[i] in string.digits or text[i] in string.punctuation:\n m=text[i]\n \n string1=string1+str(m)\n \n i=i+1\n \n else:\n\n m=b[text[i]]\n \n string1=string1+str(m)\n \n i=i+1\n\n return string1", "def applyCoder(text, coder):\n res=''\n for ch in text:\n if ch in string.ascii_lowercase:\n res = res + coder[ch]\n elif ch in string.ascii_uppercase:\n res = res + coder[ch]\n else:\n res = res + ch\n return res", "def applyCoder(text, coder):\n out = ''\n for i in text:\n if i in coder.keys():\n out += coder[i]\n else:\n out += i\n return out", "def get_encoded_text(self, text):\n\t\tencoded_text = \"\"\n\t\tfor character in text:\n\t\t\tencoded_text += self.codes[character]\n\t\treturn encoded_text", "def apply_coder(text, coder):\n ## TODO.\n encoded_text = ''\n for letter in text:\n if letter in coder:\n encoded_text += coder[letter]\n else:\n encoded_text += letter\n return encoded_text", "def extractCode(text):\n soup = BeautifulSoup(text, 'html.parser')\n code_str = str()\n for code in soup.find_all('code'):\n code_without_tags = code.get_text()\n code_str += code_without_tags\n return code_str", "def apply_coder(text, coder):\n ### TODO.", "def decode(self, text):\r\n\r\n decoded = \"\".join([self.chars[int(x)] for x in text if x > -1])\r\n decoded = self.remove_tokens(decoded)\r\n decoded = pp.text_standardize(decoded)\r\n\r\n return decoded", "def applyCoder(text, coder):\n ciphertext = str()\n #for each letter in the text find it, and grab shifted letter\n for letter in text:\n ciphertext += coder.get(letter, letter)\n return ciphertext", "def CODE(string):\n return ord(string[0])", "def extractText(text):\n soup = BeautifulSoup(text, 'html.parser')\n for code in soup.find_all('code'):\n code.decompose()\n return soup.get_text()", "def getText():", "def getText():", "def getText():", "def getText():", "def getText():", "def parseCodeLine(self, text):\n\t\tcodeRegex = re.compile(r\"^\\[0x([0-9a-f]{8})\\]\\t0x([0-9a-f]{8}) (.+)$\")\n\t\t# attempt to match the code\n\t\tm = codeRegex.match(text)\n\t\t# If no match found, return None.\n\t\tif (m is None): return None\n\t\tresult = {}\n\t\tresult['encoded_instruction'] = int(m.group(2),16)\n\t\tresult['address'] = int(m.group(1),16)\n\t\t# Do we have a comment?\n\t\tinstr = m.group(3)\n\t\tif (\";\" in instr):\n\t\t\tpos = instr.index(\";\")\n\t\t\tcomment = instr[pos+1:].strip()\n\t\t\tinstr = instr[:pos].strip()\n\t\telse:\n\t\t\tinstr = instr.strip()\n\t\t\tcomment = \"\"\n\t\tresult['comment'], result['instruction'] = comment, instr\n\t\treturn result", "def encoding(text: str) -> str:\n text = [text[i:i + 3] for i in range(0, len(text), 3)]\n encoded_text = []\n for letter in text:\n completed = False\n for coding in Encoder.__ALPHABET:\n if coding.encode == letter:\n completed = True\n encoded_text.append(coding.code)\n if completed:\n break\n if not completed:\n encoded_text.append(letter)\n encoded_string = \"\".join(encoded_text)\n return encoded_string.lower()", "def extract_code_text(bs, index):\r\n texts = ''\r\n code = ''\r\n try:\r\n for a in bs.find_all('div', class_='post')[index].find_all('p'):\r\n texts += a.text\r\n for a in bs.find_all('div', class_='post')[index].find_all('code'):\r\n code += a.text\r\n except:\r\n pass\r\n return texts, code", "def read_codes(self, filename=\"static/codes.txt\"):\n with open(filename, \"r\") as f:\n contents = f.read().splitlines()\n code = contents[0]\n \n return code", "def check(self, text):\n p = self.d\n i = 0\n j = 0\n result = []\n ln = len(text)\n while i + j < ln:\n t = text[i + j].lower()\n # print i,j,hex(ord(t))\n if not (t in p):\n j = 0\n i += 1\n p = self.d\n continue\n p = p[t]\n j += 1\n # print p,i,j\n if chr(11) in p:\n p = self.d\n result.append(text[i:i + j])\n i = i + j\n j = 0\n return result", "def syntax_text():", "def obtain_text():\n pass", "def preprocess(self, text):\r\n return text", "def gcode_text(self):\n return os.linesep.join(map(str, self.gcode))", "def code(self):\n return self.language()", "def csi_to_conky(match: re.Match) -> str:\n # Convert the string of code;code;code to a list of ints\n try:\n codes= [0 if _=='' else int(_) for _ in match.group(1).split(';')]\n except IndexError: \n print('csi_to_conky called with no group match', file=sys.stderr)\n return match.group(0) # if no group has matched return the string as is\n except ValueError as err: # problem converting to int\n print(f'csi_to_conky: {err}', file=sys.stderr)\n return match.group(0)\n \n # Initialize the string to be returned\n result=''\n\n # consume the list one code at a time, first to last\n while len(codes)>0:\n code=codes.pop(0)\n if code==0: # Reset\n # Clear the string and init it with default color and font\n result = '${color}${font}'\n continue\n \n elif code==1: # Bold\n result += '${font DejaVu Sans Mono:style=bold}'\n continue\n \n elif code>29 and code<38: # Set foreground color (0 to 7)\n result += conky_set_fg(code -30)\n continue\n \n elif code==38: # Advanced ANSI\n try:\n type=codes.pop(0)\n if type==2: # ESC[38;2;R;G;Bm => TODO\n # for now just consume the next 3 values in the list\n del codes[0:3]\n continue\n elif type==5: # ESC[38;5;xxm \n result += eightbit_to_conky(codes.pop(0))\n continue\n else:\n raise ValueError(f'Improper value {type} after code 38')\n except (IndexError, ValueError) as err:\n print(f'csi_to_conky: {err} while parsing advanced ANSI sequence {code};{type}', file=sys.stderr)\n continue\n \n elif code==39: # default fg\n result +='${color}'\n continue\n \n else:\n print(f'code {code} not implemented', file=sys.stderr)\n continue \n\n return result", "def from_trace_codes_text(codes_text: str) -> Mapping[int, str]:\n return {int(s[0], 16): s[1] for s in map(lambda l: l.split(), codes_text.splitlines())}" ]
[ "0.707069", "0.6810811", "0.6544954", "0.65068567", "0.65035826", "0.64848405", "0.6453819", "0.64496917", "0.6330471", "0.6172013", "0.6076432", "0.6043792", "0.59862036", "0.5981213", "0.5981213", "0.5981213", "0.5981213", "0.5981213", "0.59120387", "0.59068704", "0.5875831", "0.5850347", "0.5838844", "0.5828088", "0.57775134", "0.5738429", "0.5735371", "0.5697345", "0.5648758", "0.5625342" ]
0.805322
0
Function to encode a text. text text to encode (string) orig_coding type of the original coding (string) encoded text and encoding
def encode(text, orig_coding): if orig_coding == 'utf-8-bom': return BOM_UTF8 + text.encode("utf-8"), 'utf-8-bom' # Try declared coding spec coding = get_coding(text) if coding: try: return text.encode(coding), coding except (UnicodeError, LookupError): raise RuntimeError("Incorrect encoding (%s)" % coding) if orig_coding and orig_coding.endswith('-default'): coding = orig_coding.replace("-default", "") try: return text.encode(coding), coding except (UnicodeError, LookupError): pass if orig_coding == 'utf-8-guessed': return text.encode('utf-8'), 'utf-8' # Try saving as ASCII try: return text.encode('ascii'), 'ascii' except UnicodeError: pass # Save as UTF-8 without BOM return text.encode('utf-8'), 'utf-8'
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def encode(self, text):", "def encoding(text: str) -> str:\n text = [text[i:i + 3] for i in range(0, len(text), 3)]\n encoded_text = []\n for letter in text:\n completed = False\n for coding in Encoder.__ALPHABET:\n if coding.encode == letter:\n completed = True\n encoded_text.append(coding.code)\n if completed:\n break\n if not completed:\n encoded_text.append(letter)\n encoded_string = \"\".join(encoded_text)\n return encoded_string.lower()", "def apply_coder(text, coder):\n ## TODO.\n encoded_text = ''\n for letter in text:\n if letter in coder:\n encoded_text += coder[letter]\n else:\n encoded_text += letter\n return encoded_text", "def get_encoded_text(self, text):\n\t\tencoded_text = \"\"\n\t\tfor character in text:\n\t\t\tencoded_text += self.codes[character]\n\t\treturn encoded_text", "def encodetext(self, text, addstart=False, fixlength=None):\n # Tokenize text\n tokens = self.tokenizer.transform(text)\n return self.encodetokens(tokens, addstart, fixlength)", "def encodeText(text):\r\n#\treturn repr( quote_plus(text.replace(\"'\", '\"')) )\r\n\ttry:\r\n\t\treturn repr( quote_plus(text.replace(\"'\", '\"').encode('utf-8')) )\r\n\texcept:\r\n\t\tlogError(\"encodeText()\")\r\n\treturn repr(text.replace(\"'\", '\"'))", "def enc(text):\n if isinstance(text, str):\n return unicode(text, 'utf-8') # TODO: fix in Python 3\n elif isinstance(text, unicode):\n return text.encode('utf-8')\n else:\n raise Exception(\"Unsupported encode format.\")", "def coding(text: str) -> str:\n text = list(itertools.chain(text.upper()))\n coded_text = []\n for letter in text:\n completed = False\n for coding in Encoder.__ALPHABET:\n if coding.code == letter:\n completed = True\n coded_text.append(coding.encode)\n if completed:\n break\n if not completed:\n coded_text.append(letter)\n coded_string = \"\".join(coded_text)\n return coded_string", "def apply_coder(text, coder):\n ### TODO.", "def encode(self, decoded):", "def encode(self, strs):", "def encode(self, strs):", "def encode(music, encoding):\n # Extract notes\n notes = extract_notes(music, encoding[\"resolution\"])\n\n # Encode the notes\n codes = encode_notes(notes, encoding)\n\n return codes", "async def encode(ctx, text: Option(str, \"Text to encode in brainfuck\")):\n encoded = bot.brainfuck.encode(text)\n await send_code(ctx, encoded.code, lang=\"bf\")", "def convert( self, text ):\n if self.input_codec != self.output_codec:\n return unicode( text, self.input_codec, 'ignore' ).encode( self.output_codec, 'ignore' )\n else:\n return text", "def encode(text: str) -> str:\n b: bytes = text.encode()\n encoded: bytes = base64.b64encode(b)\n return encoded.decode()", "def applyCoder(text, coder):\n res=''\n for ch in text:\n if ch in string.ascii_lowercase:\n res = res + coder[ch]\n elif ch in string.ascii_uppercase:\n res = res + coder[ch]\n else:\n res = res + ch\n return res", "def encode(txt):\n encode_txt = \"\"\n size = len(txt)\n i = 0\n while i < size:\n current_char = txt[i]\n repeat_symbols = 1\n for j in range(i + 1, size):\n if txt[i] == txt[j]:\n repeat_symbols = repeat_symbols + 1\n else:\n break\n if repeat_symbols == 1:\n encode_txt = encode_txt + txt[i]\n i = i + 1\n else:\n encode_txt = encode_txt + current_char + str(repeat_symbols)\n i = i + repeat_symbols\n return encode_txt", "def xcode(text, encoding=\"utf8\", mode=\"ignore\"):\n return text.encode(encoding, mode) if isinstance(text, str) else text", "def _encode_code(self, text):\r\n replacements = [\r\n # Encode all ampersands; HTML entities are not\r\n # entities within a Markdown code span.\r\n ('&', '&amp;'),\r\n # Do the angle bracket song and dance:\r\n ('<', '&lt;'),\r\n ('>', '&gt;'),\r\n ]\r\n for before, after in replacements:\r\n text = text.replace(before, after)\r\n hashed = _hash_text(text)\r\n self._escape_table[text] = hashed\r\n return hashed", "def applyCoder(text, coder):\n ciphertext = str()\n #for each letter in the text find it, and grab shifted letter\n for letter in text:\n ciphertext += coder.get(letter, letter)\n return ciphertext", "def base64_encoder(cls, text, encoding: str = 'utf-8', base64_encoding_map='default') -> str:\n\t\tBASE64MAP = cls.BASE64MAP_dict.get(base64_encoding_map)\n\t\tif not BASE64MAP:\n\t\t\traise ValueError('Invalid base64_encoding_map: only [\"default\", \"RFC3501\", \"RFC4648\"] are allowed')\n\t\ttext_encoded = text.encode(encoding) if type(text) is str else text\n\t\tstr_encoded = ''\n\t\ttext_in_block_of_three = (\n\t\t\t''.join([bin(character).lstrip('0b').zfill(8) for character in text_encoded[i: i + 3]])\n\t\t\tfor i in range(0, len(text_encoded), 3))\n\t\ttext_in_base64_block = (['00' + block[index:index + 6] for index in range(0, 24, 6)] for block in\n\t\t\t\t\t\t\t\ttext_in_block_of_three)\n\t\tfor b64block in text_in_base64_block:\n\t\t\tfor character in b64block:\n\t\t\t\tc_length = len(character)\n\n\t\t\t\tif c_length == 6:\n\t\t\t\t\tcharacter += '00'\n\t\t\t\telif c_length == 4:\n\t\t\t\t\tcharacter += '0000'\n\t\t\t\telif c_length == 2:\n\t\t\t\t\tcharacter += '01' + '0' * 6\n\n\t\t\t\tcharacterToAppend = BASE64MAP[int(character, base=2)]\n\t\t\t\tif characterToAppend == '=' and base64_encoding_map == 'RFC3501':\n\t\t\t\t\tcharacterToAppend = ''\n\t\t\t\tstr_encoded += characterToAppend\n\n\t\treturn str_encoded", "def applyCoder(text, coder):\n ### TODO\n ans=''\n for let in text:\n ans+=coder.get(let, let)\n return ans", "def encode(text: str) -> str:\n reversed_text = \"\".join(char for char in text[-1::-1])\n return reversed_text", "def encodeString(*args, **kwargs)->AnyStr:\n pass", "def applyCoder(text, coder):\n out = ''\n for i in text:\n if i in coder.keys():\n out += coder[i]\n else:\n out += i\n return out", "def encode_data(self, data):\n if self.unit == \"char\":\n data = self.char_encoding(data)\n elif self.unit == \"char-ngram\":\n data = self.ngram_encoding(data)\n elif self.unit == \"morpheme\" or self.unit == \"oracle\":\n data = self.morpheme_encoding(data)\n else:\n data = self.data_to_word_ids(data, False)\n return data", "def encode(text, password):\r\n\tstep_index = 0\r\n\tencoded_text = ''\r\n\tfor letter in text:\r\n\t\tencoded_text += next_letter(letter, to_int(password[step_index]))\r\n\t\tstep_index += 1\r\n\t\tif step_index > len(password)-1:\r\n\t\t\tstep_index = 0\r\n\treturn encoded_text", "def body_encode( self, text, convert=1 ):\n if convert:\n text = self.convert(text)\n # 7bit/8bit encodings return the string unchanged (module conversions)\n if self.body_encoding is BASE64:\n return base64_encode(text)\n elif self.body_encoding is QP:\n return quopri_encode(text)\n else:\n return text", "def encode(encoding_scheme: str, val: str) -> str:\n if encoding_scheme == \"url\":\n return url_quote(val)\n\n # base64 utf8\n if encoding_scheme == \"base64\":\n return base64.b64encode(val.encode(\"utf8\")).decode(\"utf-8\")\n\n # returns original val if encoding_scheme not recognized\n return val" ]
[ "0.78429985", "0.73863465", "0.72532046", "0.7069336", "0.68364793", "0.6779326", "0.66536885", "0.66491073", "0.6570643", "0.6511079", "0.6476293", "0.6476293", "0.64474636", "0.6437557", "0.6430884", "0.6375071", "0.6368134", "0.6325877", "0.6323018", "0.62958544", "0.62450594", "0.62114304", "0.62021583", "0.6194901", "0.61619216", "0.61569196", "0.6156087", "0.614703", "0.6138827", "0.6116088" ]
0.7889577
0
Write 'text' to file ('filename') assuming 'encoding' Return (eventually new) encoding
def write(text, filename, encoding='utf-8', mode='wb'): text, encoding = encode(text, encoding) with open(filename, mode) as textfile: textfile.write(text) return encoding
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def write_file(filename=\"\", text=\"\"):\n with open(filename, mode=\"w\", encoding=\"utf-8\") as m:\n return m.write(text)", "def write_file(filename=\"\", text=\"\"):\n with open(filename, mode='w', encoding=\"utf-8\") as myFile:\n chars_written = myFile.write(text)\n return chars_written", "def write_file(filename=\"\", text=\"\"):\n with open(filename, 'w', encoding='utf-8') as f:\n return f.write(text)", "def writeFile(fileName, text):\n with open(fileName, 'w', encoding='utf-8') as f:\n f.write(text)", "def save_txt(filename, data, encoding):\n with open(filename, \"w\") as f:\n f.write(dump(data, encoding))", "def write_text(file, text):\n\n with open(file, \"w\") as fin:\n fin.write(text)", "def write_file(filename=\"\", text=\"\"):\n with open(filename, \"w\") as f:\n return(f.write(text))", "def write_txt(data, out_path, type=\"w\"):\n with open(out_path, type) as f:\n f.write(data.encode(\"utf-8\"))", "def write(file, text):\n with open(file, 'w') as f:\n f.write(text)", "def write_text_tofile(text):\n try:\n with open(os.path.join(script_dir, 'output_file.txt'), 'a') as output:\n output.write(text + '\\n')\n except:\n pass", "def write_file(filename=\"\", text=\"\"):\n with open(filename, 'w') as f:\n return f.write(text)", "def write_file(filename=\"\", text=\"\"):\n if filename:\n with open(filename, mode='w', encoding='utf-8') as data:\n nb_written = data.write(text)\n return nb_written", "def strToFile(text, filename):\n output = open(filename,\"w\")\n output.write(text)\n output.close()", "def strToFile(text, filename):\n output = open(filename,\"w\")\n output.write(text)\n output.close()", "def write_file(filename=\"\", text=\"\"):\n with open(filename, 'w') as fl:\n wr = fl.write(text)\n return wr", "def txt_file_writer(path):\n return open(path, 'w', encoding=cfg.ENCODING)", "def save_file(self, file_name, text):\n\n with open(file_name, 'w') as content_file:\n content = content_file.write(text)", "def write_file(filename=\"\", text=\"\"):\n with open(filename, mode=\"w\", encoding=\"utf-8\") as f:\n f.write(text)\n\n return len(text)", "def write_file(filename=\"\", text=\"\"):\n with open(filename, 'w', encoding=\"utf-8\") as file:\n nb_characters = file.write(text)\n return nb_characters", "def store_file(text: str, file_path: str) -> None:\n with open(file=file_path, mode='w', encoding='utf8') as f:\n f.write(text)", "def write_file(filename=\"\", text=\"\"):\n with open(filename, mode='w', encoding='utf-8') as a_file:\n i = 0\n for char in text:\n a_file.write(char)\n i += 1\n return i", "def writeFile(self, name, text):\n\t\ttry:\n\t\t\tf = open(name, 'w')\n\t\t\tf.write (text)\n\t\t\tf.close()\n\t\texcept IOError:\n\t\t\tprint \"Error writing file %s\" % name", "def save_file(path, text):\n with path.open(mode='w') as f_stream:\n f_stream.write(text)", "def append_write(filename=\"\", text=\"\"):\n with open(filename, 'a', encoding=\"UTF8\") as f:\n return f.write(str(text))", "def writeFile(file_name, file_text, mode='w+'):\n with open(file_name, mode) as file:\n file.write(file_text)", "def write_file(filename=\"\", text=\"\"):\n with open(filename, mode='w', encoding='utf-8') as f:\n f.write(text)\n with open(filename, encoding='utf-8') as f:\n chars_wrote = 0\n for line in f:\n for chrs in line:\n chars_wrote += 1\n return chars_wrote", "def write_text_file(path: Path, data: str) -> None:\n path.write_text(data, encoding='utf-8')", "def writeText(outputText, fileName):\n with open(fileName,\"w\") as fileObject:\n fileObject.write(outputText)", "def write(self, text):\n self.stream.write(text.encode(self.encoding, 'replace'))", "def writeToFile(outputFile, unicode_text):\n fp = outputFile\n # workaround problem if caller gives byte string instead\n unicode_text = safe_unicode(unicode_text)\n utf8_text = unicode_text.encode('utf-8')\n fp.write(utf8_text)\n #fp.close()" ]
[ "0.7557955", "0.7544506", "0.7533237", "0.75145644", "0.7479202", "0.7241678", "0.70433176", "0.70286334", "0.69647485", "0.69500077", "0.6922134", "0.6919635", "0.6918923", "0.6918923", "0.6891338", "0.6771927", "0.67655516", "0.6725522", "0.6717722", "0.671073", "0.6706911", "0.6703274", "0.6677338", "0.667236", "0.66619986", "0.6659957", "0.6621047", "0.65968955", "0.6578741", "0.65730125" ]
0.87326086
0
Write 'lines' to file ('filename') assuming 'encoding' Return (eventually new) encoding
def writelines(lines, filename, encoding='utf-8', mode='wb'): return write(os.linesep.join(lines), filename, encoding, mode)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def write_to_file(filepath, lines):\n with open(filepath, 'w', encoding='utf-8') as f:\n f.write(''.join([line.replace('\\r\\n', '\\n') for line in lines]))", "def lines_to_file(file_name: str, write_dir: str, lines: Sequence[str]):\n with open(os.path.join(write_dir, file_name), \"w\", encoding=\"utf-8\") as f:\n for l in lines:\n f.write(f\"{l}\\n\")", "def write_lines(filename, lines, verbose=True):\n with open(filename, 'w', encoding=\"utf-8\") as fp:\n for line in lines:\n print(line, file=fp)\n if verbose:\n print(\"Done writing to file %s.\" % filename)", "def write_file(filename, content):\n codecs.open(filename, \"w\", encoding='utf-8').writelines(content)", "def write_lines_to_file(filename, lines):\n with open(filename, 'w') as fp:\n for line in lines:\n fp.write(\"%s\\n\" % line.strip('\\n'))", "def write(text, filename, encoding='utf-8', mode='wb'):\r\n text, encoding = encode(text, encoding)\r\n with open(filename, mode) as textfile:\r\n textfile.write(text)\r\n return encoding", "def write_file(filename, contents):\n try:\n f = open(filename, \"w\")\n for line in contents:\n f.write(line + \"\\n\")\n finally:\n f.close()", "def write_lines(file_lines, new_file):\n with open(new_file, 'w') as f:\n for l in file_lines:\n f.write(l)", "def write_file(filename=\"\", text=\"\"):\n with open(filename, mode='w', encoding='utf-8') as f:\n f.write(text)\n with open(filename, encoding='utf-8') as f:\n chars_wrote = 0\n for line in f:\n for chrs in line:\n chars_wrote += 1\n return chars_wrote", "def save_review_to_file(lines, filename):\n data = '\\n'.join(lines)\n file = open(filename, 'w')\n file.write(data)\n file.close()", "def write_file(filename, contents, encoding = None):\n if not encoding:\n encoding = 'utf-8'\n\n import io\n fname = from_posix(filename)\n with io.open(fname, mode = 'w', encoding = encoding) as handle:\n handle.write(contents)", "def writeToFile(fileName, content, encoding = \"UTF-8\"):\n file = io.open(fileName, mode = \"w\", encoding = encoding)\n file.write(content)\n file.close()", "def __correct_encoding(self, encode, filename):\n if encode == 'None' or encode == self.__tencoding:\n return\n buffname = '~old' + filename\n self.__os.rename(filename, buffname)\n with open(buffname, 'r', encoding=self.__tencoding) as fr:\n with open(filename, 'w', encoding=self.__tencoding) as fw:\n for line in fr:\n fw.write(line[:-1] + '\\r\\n')\n self.__os.remove(buffname)", "def putTextFileContents(filename, contents, encoding=None):\n\n def _writeContents(output_file):\n if isinstance(contents, basestring):\n print(contents, file=output_file, end=\"\")\n else:\n for line in contents:\n print(line, file=output_file)\n\n with withFileLock(\"writing file %s\" % filename):\n with openTextFile(filename, \"w\", encoding=encoding) as output_file:\n _writeContents(output_file)", "def save_txt(filename, data, encoding):\n with open(filename, \"w\") as f:\n f.write(dump(data, encoding))", "def write_file(writer, filename):\n for line in txt_line_iterator(filename):\n writer.write(line)\n writer.write(\"\\n\")", "def write_lines(list_of_lines, file):\r\n for i in range(0, len(list_of_lines)):\r\n file.write(list_of_lines[i] + b\"\\n\")", "def write_file(content, file_path, mode='w', encoding='utf-8'):\n with codecs.open(file_path, mode, encoding=encoding) as fid:\n fid.write(content)", "def save_lines(lines, file_path):\n lines = list(map(lambda x: f'{x}\\n', lines))\n\n with open(file_path, 'w') as f:\n f.writelines(lines)", "def write_gzfile(lines, f):\n out = gzip.open(f, 'wb')\n for line in lines:\n out.write('{}\\n'.format(line))\n out.close()", "def write(self, filename, chars_per_line=70):\n tofile_with_line_sep(self._dna, filename, chars_per_line)", "def _write_to_file(self):\n with open(self.filename + \".asm\", \"w+\") as file:\n file.writelines(\n [\"\\n\" + l if p != 0 else l for p, l in enumerate(self.lines)]\n )", "def _write_to_file(self):\n with open(self.filename + \".ir\", \"w+\") as file:\n file.writelines(\n [\"\\n\" + l if p != 0 else l for p, l in enumerate(self.lines)]\n )", "def txt_file_writer(path):\n return open(path, 'w', encoding=cfg.ENCODING)", "def writeFile(fileName, text):\n with open(fileName, 'w', encoding='utf-8') as f:\n f.write(text)", "async def writelines(self, lines):\n # first check if the file is binary or not\n if 'b' in self._mode:\n raise APIException(\n \"writelines on a binary file is not permitted: {}\".format(\n self._uri)\n )\n # write all but the last line with a line break\n for l in lines:\n await self.write((l+\"\\n\").encode('utf-8'))\n return True", "def write_txt(data, out_path, type=\"w\"):\n with open(out_path, type) as f:\n f.write(data.encode(\"utf-8\"))", "def write_file(a_file, lines):\r\n return append_file(a_file, lines, append=False)", "def write_file(a_file, lines):\r\n return append_file(a_file, lines, append=False)", "def write_file(filename=\"\", text=\"\"):\n with open(filename, mode='w', encoding='utf-8') as a_file:\n i = 0\n for char in text:\n a_file.write(char)\n i += 1\n return i" ]
[ "0.6922916", "0.66539276", "0.6571761", "0.65292937", "0.642268", "0.62827504", "0.60700613", "0.6031235", "0.59540445", "0.59286505", "0.59032005", "0.5897087", "0.5857801", "0.58426356", "0.58392614", "0.57965297", "0.57942283", "0.57908106", "0.5772628", "0.5734947", "0.5722258", "0.5710143", "0.569273", "0.5676538", "0.5609655", "0.5608046", "0.5604351", "0.55937654", "0.55937654", "0.5588106" ]
0.7489392
0
Read text from file ('filename') Return text and encoding
def read(filename, encoding='utf-8'): text, encoding = decode( file(filename, 'rb').read() ) return text, encoding
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _read(filename, encodings=['ascii', 'utf-8', 'utf-16', 'latin-1']):\n text = None\n\n for encoding in encodings:\n try:\n f = open(filename, encoding=encoding)\n text = f.read()\n f.close()\n except UnicodeDecodeError:\n f.close()\n except UnicodeError:\n f.close()\n except FileNotFoundError:\n raise FileNotFoundError(\"Could not open file.\")\n\n if not text:\n raise UnicodeError(filename)\n\n return text", "def read_file(filename):\n with codecs.open(filename, 'r', 'utf8') as f:\n return f.read()", "def read_file(filename):\n with codecs.open(filename, 'r', 'utf8') as f:\n return f.read()", "def read_from_file(filename):\n\twith open(filename, 'r') as myfile:\n\t\ttext=myfile.read()\n\treturn text", "def read_text_file(str_name_file: str):\n content: str = ''\n with open(str_name_file, mode=\"r\", encoding='utf-8') as file:\n print(\"file being read: \" + str_name_file + \"\\n\")\n content = file.read()\n return content", "def get_text(filename):\n with open(filename, 'r', encoding='utf-8') as file:\n file_text = file.read()\n return file_text", "def readFile(fileName):\n with open(fileName, 'r', encoding='utf-8') as f:\n text = f.read()\n return text", "def read_as_text(filename: str) -> str:\n with open(filename) as file_handle:\n txt = file_handle.read()\n return txt", "def readText(fileName):\n fileText = \"\"\n with open(fileName,\"r\") as fileObject:\n fileText = fileObject.read()\n \n return fileText", "def get_file_text(file_name):\n\tf = open(file_name, 'r')\n\ttext = f.read()\n\treturn text", "def read_from_file(file_name):\n with open(file_name, \"rb\") as text_file:\n return text_file.read()", "def load_text_file(file_name: str) -> str:\r\n try:\r\n with open(file_name, encoding='windows-1251') as file_object:\r\n return file_object.read()\r\n except FileNotFoundError as err:\r\n print(f\"{err}\\n\"\r\n f\"Please make sure the file you are trying to open exists!\")\r\n quit()", "def getFileContent(fileName, encoding = \"UTF-8\"):\n file = io.open(fileName, mode = \"r\", encoding = encoding)\n text = file.read()\n file.close()\n return text", "def read_file(filename):\n with codecs.open(os.path.join(here, filename), encoding='utf-8') as f:\n content = f.read()\n return content", "def read_file(name):\n with open(name, 'r') as my_file:\n return my_file.read().encode('utf-8')", "def read(self, filename):\n\t\treturn codecs.open(filename, 'r', 'utf8').read()", "def read_file(file_path, mode='r', encoding=\"utf-8\"):\n with codecs.open(file_path, mode, encoding=encoding) as fp:\n return fp.read().strip()", "def read_text_file(fpath, encoding, read_size=-1, force_unix_linebreaks=True):\n with codecs.open(fpath, encoding=encoding) as f:\n contents = f.read(read_size)\n\n if read_size > 0:\n contents = contents[:read_size]\n\n if force_unix_linebreaks:\n contents = linebreaks_win2unix(contents)\n\n return contents", "def read_file(file_name, enc=\"latin-1\"):\n f = open(file_name, \"r\", encoding=enc)\n content = \"\".join(f.readlines())\n f.close()\n return content", "def getText(filename):\n\n infile = open(filename, 'r')\n text = infile.read()\n infile.close()\n\n return text", "def readfile(filename):\n\n infile = open(filename, \"r\") # open file for reading\n\n # Use Python's file read function to read the file contents\n filetext = infile.read().splitlines()\n\n infile.close() # close the file\n\n return filetext # the text of the file, as a single string", "def read_file(file):\n f = open(file, \"r\", encoding=\"utf8\")\n return f.read()", "def readfile(filename):\n with open(filename, encoding=\"utf-8\") as file:\n raw = file.read()\n return raw", "def read_file(filename):\n open_kwargs = {}\n if sys.version_info.major == 3:\n open_kwargs = {'encoding': 'utf-8'}\n\n path = os.path.abspath(os.path.dirname(__file__))\n filepath = os.path.join(path, filename)\n with open(filepath, **open_kwargs) as filecontents:\n return filecontents.read()", "def read_text(filepath):\n\n text = open(filepath, encoding = \"utf8\").read()\n \n if text_lower:\n return text.lower()\n\n return text", "def loadTextFromFile(fullFilename, fileEncoding=\"utf-8\"):\n with codecs.open(fullFilename, 'r', encoding=fileEncoding) as fp:\n allText = fp.read()\n # logging.debug(\"Complete load text from %s\", fullFilename)\n return allText", "def readFromTextFile(self, file_name):\n with open(file_name, 'r') as file_obj:\n return file_obj.read()", "def read_text(self, encoding):\n with self.open(\"r\", encoding=encoding) as f:\n return f.read()", "def read_file(filename, encoding = None):\n filename = from_posix(filename)\n if not encoding:\n # Detect encoding\n encoding = detect_encoding(filename)\n\n # Finally, read the file in the detected encoding\n import io\n with io.open(filename, mode = 'r', encoding = encoding) as handle:\n return handle.read()", "def get_text_from_file(filepath):\n with open(filepath, 'r') as f:\n return f.read()" ]
[ "0.8138041", "0.7998457", "0.7998457", "0.79705226", "0.78938377", "0.7879223", "0.7831182", "0.7786197", "0.7733396", "0.7665402", "0.76034033", "0.7577817", "0.7550896", "0.75384855", "0.75316525", "0.7510268", "0.7503345", "0.74948895", "0.7489632", "0.74461335", "0.74427104", "0.74352473", "0.74277395", "0.7426796", "0.7387568", "0.7354924", "0.7337277", "0.73023796", "0.7301619", "0.73006445" ]
0.8679592
0
Read lines from file ('filename') Return lines and encoding
def readlines(filename, encoding='utf-8'): text, encoding = read(filename, encoding) return text.split(os.linesep), encoding
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def read_lines(file_name: str) -> List[str]:\n try:\n return open(file_name, encoding='utf8').readlines()\n except UnicodeDecodeError:\n return open(file_name, encoding='cp1252').readlines()", "def read_file(filename):\n with open(filename, encoding='utf-8') as src:\n return [line.strip() for line in src.readlines()]", "def read_lines(files):\n for file in files:\n for line in file.readlines():\n try:\n line = line.decode('utf-8')\n except UnicodeDecodeError:\n line = line.decode('latin-1')\n yield line.strip()", "def read_file(filename):\n\n infile = open(filename, 'r')\n lines = infile.readlines()\n infile.close()\n\n return lines", "def read_file_in_lines(filename):\r\n\twith open(filename) as infile:\r\n\t\tlines = infile.readlines()\r\n\treturn [line.strip() for line in lines]", "def read_from_file(filename):\n with open(filename, \"r\") as f:\n f.readlines()", "def read_lines(filename):\n with open(filename, 'r') as f:\n lines = f.readlines()\n return lines", "def readlines(filename):\n with open(filename, 'r') as f:\n lines = f.read().splitlines()\n return lines", "def read_file_lines(filepath):\n with open(filepath, 'r', encoding='utf-8', newline=os.linesep) as f:\n return f.readlines()", "def read_file(filename=\"\"):\n with open(filename, encoding=\"UTF-8\") as f:\n for line in f:\n print(line, end='')", "def read_file(file_name: str):\n with open(file_name) as fread:\n for line in fread:\n yield line", "def read_lines(filename):\n with file(filename) as f:\n for line in f:\n _line = line.strip()\n if _line:\n yield _line", "def read_file(file_name, enc=\"latin-1\"):\n f = open(file_name, \"r\", encoding=enc)\n content = \"\".join(f.readlines())\n f.close()\n return content", "def read_file(filename=\"\"):\n\n with open(filename, 'r', encoding='utf-8') as file:\n for line in file:\n print(line.rstrip())", "def read_lines(filename=\"\", nb_lines=0):\n\n with open(filename, encoding=\"UTF8\") as f:\n if nb_lines <= 0:\n print(f.read(), end=\"\")\n i = 0\n while i < nb_lines:\n print(f.readline(), end=\"\")\n i += 1", "def read_lines_from_file(fname):\n return []", "def read_file(path):\n with open(path, \"r\", encoding=\"utf8\") as f:\n lines = f.readlines()\n f.close()\n return lines", "def read_text_file(file_name):\n target_file = open(file_name)\n lines = target_file.readlines()\n\n target_file.close()\n return lines", "def read_lines_from_file(filename):\n with open(filename) as f:\n content = f.readlines()\n\n content = [x.strip() for x in content]\n return content", "def load_lines(filename):\r\n lines = []\r\n f = open(filename)\r\n for line in f.readlines():\r\n line = line.strip()\r\n lines.append(line)\r\n return lines", "def __read_file(self, filename):\n with open(filename) as f:\n content = f.readlines()\n \n return content", "def read_txt(cls, input_file):\n return open(input_file, \"r\", encoding=\"utf-8\").readlines()", "def read_txt(cls, input_file):\n return open(input_file, \"r\", encoding=\"utf-8\").readlines()", "def read_lines(filename, verbose=True):\n with open(filename, 'r') as fp:\n lines = fp.readlines()\n if verbose:\n print(\"Done reading file\", filename)\n \n return [line.strip() for line in lines]", "def load_file(filename):\n with open(filename, \"r\") as f:\n return f.readlines()", "def read_lines(filename=\"\", nb_lines=0):\n with open(filename, encoding=\"utf-8\") as myFile:\n if nb_lines <= 0:\n print(myFile.read(), end=\"\")\n for i in range(nb_lines):\n print(myFile.readline(), end=\"\")", "def kitti_readlines(filename):\n with open(filename, 'r') as f:\n lines = f.read().splitlines()\n return lines", "def read_text_file(fpath, encoding, read_size=-1, force_unix_linebreaks=True):\n with codecs.open(fpath, encoding=encoding) as f:\n contents = f.read(read_size)\n\n if read_size > 0:\n contents = contents[:read_size]\n\n if force_unix_linebreaks:\n contents = linebreaks_win2unix(contents)\n\n return contents", "def _lines(filename):\n \n handle = gzip.open(filename, 'rt') if _gz(filename) else open(filename)\n for line in handle:\n if not line.startswith('#'):\n yield line.strip().split('\\t')", "def read_file_lines(afile):\n with open(afile, 'r') as f:\n lines = f.read()\n return lines.splitlines()" ]
[ "0.7824536", "0.7728978", "0.7454107", "0.7385729", "0.73801655", "0.73738843", "0.7366425", "0.7290574", "0.7284927", "0.72171664", "0.7186186", "0.7184496", "0.71711564", "0.7166647", "0.7075612", "0.7070739", "0.69983697", "0.6996958", "0.6993472", "0.6980073", "0.6935585", "0.6934103", "0.6934103", "0.6924462", "0.6919167", "0.6905703", "0.69004357", "0.6898304", "0.6888825", "0.68355614" ]
0.81567216
0
validate the access keys for this tenant
def _ensure_tenant_and_validate(tenant_, access_key): tenant_data = registry.TENANT_DATA_GATEWAY tenant = tenant_data.tenant_by_name(tenant_) if tenant is None: raise TenantNotFoundError( "Tenant not found error. tenant='{}', access_key='{}'".format( tenant_, access_key)) if not tenant.has_access_key(access_key): raise AccessKeyNotValidError( "The access key is not valid. tenant='{}', access_key='{}'".format( tenant_, access_key)) return tenant
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def validate_keystone_tenants(self, client):\n u.log.debug('Checking keystone tenants...')\n expected = [\n {'name': 'services',\n 'enabled': True,\n 'description': 'Created by Juju',\n 'id': u.not_null},\n {'name': 'demoTenant',\n 'enabled': True,\n 'description': 'demo tenant',\n 'id': u.not_null},\n {'name': 'admin',\n 'enabled': True,\n 'description': 'Created by Juju',\n 'id': u.not_null}\n ]\n if self.keystone_api_version == 2:\n actual = client.tenants.list()\n else:\n actual = client.projects.list()\n\n ret = u.validate_tenant_data(expected, actual)\n if ret:\n amulet.raise_status(amulet.FAIL, msg=ret)", "def validate(self, accessToken, requiredScopes=None, requiredSubject=None):\n\n # Reset properties that may have been set by the previous call.\n self.__resetValidation()\n\n try:\n # Call Authlete's /api/auth/introspection API.\n self._introspectionResponse = self.__callIntrospectionApi(\n accessToken, requiredScopes, requiredSubject)\n except Exception as cause:\n self._introspectionException = cause\n self._errorResponse = self.__buildErrorFromException(cause)\n self._valid = False\n return False\n\n # The 'action' parameter in the response from /api/auth/introspection\n # denotes the next action that the API caller should take.\n action = self._introspectionResponse.action\n\n if action == IntrospectionAction.OK:\n # The access token is valid.\n self._valid = True\n return True\n else:\n self._errorResponse = self.__buildErrorFromResponse(self._introspectionResponse)\n self._valid = False\n return False", "def access_key_and_tenant_required(f):\n @wraps(f)\n def wrapper(*args, **kwargs):\n tenant = _ensure_tenant_and_validate(args[0].tenant,\n args[0].access_key)\n\n kwargs['tenant'] = tenant\n return f(*args, **kwargs)\n return wrapper", "def validate_auth(self, required_keys):\n # The config validates that the 'auth' dict was loaded, but do a safety check here\n if not self.auth:\n raise AppAuthError('[{}] Auth config is empty'.format(self))\n\n auth_key_diff = required_keys.difference(set(self.auth))\n if not auth_key_diff:\n return True\n\n missing_auth_keys = ', '.join('\\'{}\\''.format(key) for key in auth_key_diff)\n raise AppAuthError('[{}] Auth config is missing the following '\n 'required keys: {}'.format(self, missing_auth_keys))", "def _auth_oauth_validate(self, provider, access_token):\n\t\toauth_provider = self.env['auth.oauth.provider'].browse(provider)\n\t\tvalidation = self._auth_oauth_rpc(oauth_provider.validation_endpoint, access_token)\n\t\tif validation.get(\"error\"):\n\t\t\traise Exception(validation['error'])\n\t\tif oauth_provider.data_endpoint:\n\t\t\tdata = self._auth_oauth_rpc(oauth_provider.data_endpoint, access_token)\n\t\t\tvalidation.update(data)\n\t\treturn validation", "def test_invalid_access_key(self):\r\n data = {\r\n \"EdX-ID\": self.receipt_id,\r\n \"Result\": \"Testing\",\r\n \"Reason\": \"Testing\",\r\n \"MessageType\": \"Testing\"\r\n }\r\n json_data = json.dumps(data)\r\n response = self.client.post(\r\n reverse('verify_student_results_callback'),\r\n data=json_data,\r\n content_type='application/json',\r\n HTTP_AUTHORIZATION='test testing:testing',\r\n HTTP_DATE='testdate'\r\n )\r\n self.assertIn('Access key invalid', response.content)\r\n self.assertEqual(response.status_code, 400)", "def _validate_credentials(self):\n\n # There should be a client_id and client secret\n return \"client_id\" in self.credentials.keys() and \"client_secret\" in self.credentials.keys() \\\n and self.credentials[\"client_id\"] and self.credentials[\"client_secret\"]", "def validate_request_token():\n if not g.x_tapis_token:\n raise errors.NoTokenError(\"No access token found in the request.\")\n claims = validate_token(g.x_tapis_token)\n g.token_claims = claims\n g.username = claims.get('username')\n g.tenant_id = claims.get('tenant_id')\n g.account_type = claims.get('account_type')\n g.delegation = claims.get('delegation')", "def validate(self):\n if not self.keys:\n raise ValueError(\"Virtual host missing keys\")\n for i in self.keys:\n i.validate()", "def api_auth_validate(request, access_key):\n if not request.is_json:\n return {'error' : 'Bad request, payload must be JSON', 'code' : 400}\n if not 'working_repo' in session:\n return {'error' : 'Operation requires authentication', 'code': 401}\n if session['working_repo'] != access_key:\n return {'error' : 'Not authorized for this operation', 'code' : 403}\n \n return True", "def validate_access_token(cmd, namespace):\n n = namespace\n\n if not n.access_token:\n n.access_token = get_config_value(cmd, 'communication', 'access_token', None)", "def validate_token(token):\n # first, decode the token data to determine the tenant associated with the token. We are not able to\n # check the signature until we know which tenant, and thus, which public key, to use for validation.\n try:\n data = jwt.decode(token, verify=False)\n except Exception as e:\n logger.debug(f\"got exception trying to parse data from the access_token jwt; exception: {e}\")\n raise errors.AuthenticationError(\"could not parse the access token.\")\n # get the tenant out of the jwt payload and get associated public key\n token_tenant_id = data['tenant_id']\n try:\n public_key_str = get_tenant_config(token_tenant_id)['public_key']\n except errors.BaseTapisError:\n raise errors.AuthenticationError(\"Unable to process Tapis token; unexpected tenant_id.\")\n except KeyError:\n raise errors.AuthenticationError(\"Unable to process Tapis token; no public key associated with the \"\n \"tenant_id.\")\n # try:\n # pub_key = get_pub_rsa_key(public_key_str)\n # except Exception as e:\n # logger.error(f\"got exception trying to create public RSA key object; e: {e} \")\n # raise errors.ServiceConfigError(\"Unable to process public key associated with tenant.\")\n try:\n return jwt.decode(token, public_key_str, algorithm='RS256')\n except Exception as e:\n logger.debug(f\"Got exception trying to decode token; exception: {e}\")\n raise errors.AuthenticationError(\"Invalid Tapis token.\")", "def validate_credentials(self, data):\n try:\n boolean_param_list = []\n get_service_data = app.config.get('JWT_CONFIG').get('CREDENTIAL')\n token_identity_param = app.config.get('JWT_CONFIG').get('TOKEN_IDENTITY_PARAM')\n expires_delta = app.config.get('JWT_CONFIG').get('TOKEN_EXPIRY')\n expires_delta = eval(expires_delta) if isinstance(expires_delta, str) else expires_delta\n credentials = data.get('credentials')\n identity_credentials_keys = list(get_service_data.keys())\n for key in identity_credentials_keys:\n if get_service_data[key] != credentials[key]:\n boolean_param_list.append(False)\n else:\n boolean_param_list.append(True)\n\n if False in boolean_param_list:\n return {'msg': \"Incorrect Credentials\"}, 401\n else:\n access_token = self.auth_token_generate(\n identity_param_val=credentials[token_identity_param], expires_delta=expires_delta)\n return {'access_token': access_token}, 200\n except Exception as e:\n print(e)\n return {'msg': \"Incorrect Credentials\"}, 401", "def validate_identical_access(self, ac):\n if self.user != ac.user:\n raise ValidationError(\n \"Instances have different users: %s, %s\" % (self.user, ac.user), code=\"different_user\"\n )\n\n non_overlapping_users_allowed = set(self.users_allowed.all()).symmetric_difference(ac.users_allowed.all())\n if len(non_overlapping_users_allowed) > 0:\n raise ValidationError(\n \"Instances allow different users access\", code=\"different_users_allowed\"\n )\n\n non_overlapping_groups_allowed = set(\n self.groups_allowed.all()).symmetric_difference(\n ac.groups_allowed.all())\n if len(non_overlapping_groups_allowed) > 0:\n raise ValidationError(\n \"Instances allow different groups access\", code=\"different_groups_allowed\"\n )", "def check_keys(self):", "def check_api_keys(self, request):\n app_id, api_obj = request.META.get(\"HTTP_APP_ID\"), None\n api_secret_key = request.META.get(\"HTTP_API_SECRET_KEY\")\n if app_id and api_secret_key:\n # validate app_id and api_secret_key\n app_id_bool = self._validate_app_id(app_id)\n if not app_id_bool:\n return False, self.app_id_message\n api_secret_key_bool = self._validate_api_secret_key(api_secret_key)\n if not api_secret_key:\n return False, self.api_secret_key_message\n try:\n api_obj = ApiApp.objects.get(app_id=app_id, api_secret_key=api_secret_key, active=True)\n if api_obj:\n self.app(request, api_obj)\n return True, ''\n except ApiApp.DoesNotExist:\n self.app(request, api_obj)\n return False, self.message\n else:\n self.app(request, api_obj)\n return False, self.message", "def test_valid_keys(client):\n response=client.post(\"/signin\",data=dict(username=TestSignin.email, password=TestSignin.password), content_type=\"multipart/form-data\")\n data=json.loads(response.data)\n assert response.status_code==400\n assert data[\"error\"] == \"Please provide email and password as keys\"", "def _is_valid(self):\n # TODO: Query Google to validate credentials\n return True", "def checkKeys( ):\n\n if (HMACKey is None) or (AESKey is None):\n loadKeys()\n\n if (int(time.time()) - creationTime) > const.KEY_ROTATION_TIME:\n rotateKeys()", "def validate(self):\n if self.first_name is None or not self.first_name.strip():\n raise AuthorizeInvalidError('First name on account is required.')\n if self.last_name is None or not self.last_name.strip():\n raise AuthorizeInvalidError('Last name on account is required.')\n if self.customer_type == 'business':\n if self.company is None or not self.company.strip():\n raise AuthorizeInvalidError('Company name is required.')\n if self.bank_name is None or not self.bank_name.strip():\n raise AuthorizeInvalidError('Bank name is required.')\n if self.routing_number is None or not self.routing_number.strip():\n raise AuthorizeInvalidError('Routing number is required.')\n if self.account_number is None or not self.account_number.strip():\n raise AuthorizeInvalidError('Account number is required.')\n if self.customer_type is None or not self.customer_type.strip():\n raise AuthorizeInvalidError('Customer type is required.')\n if self.customer_type not in CUSTOMER_TYPES:\n raise AuthorizeInvalidError('Customer type is not valid.')\n if self.account_type is None or not self.account_type.strip():\n raise AuthorizeInvalidError('Bank account type is required.')\n if self.account_type not in ACCOUNT_TYPES:\n raise AuthorizeInvalidError('Bank account type is not valid.')\n if self.routing_number_type is None \\\n or not self.routing_number_type.strip():\n raise AuthorizeInvalidError('Routing number type is required.')\n if self.routing_number_type not in ROUTING_NUMBER_TYPES:\n raise AuthorizeInvalidError('Routing number is not valid.')\n if self.echeck_type is None or not self.echeck_type.strip():\n raise AuthorizeInvalidError('eCheck type is required.')\n if self.echeck_type not in ECHECK_TYPES:\n raise AuthorizeInvalidError('eCheck type is not valid.')\n self._validate_account_number(self.account_number)\n self._validate_aba(self.routing_number)", "def test_valid(self):\n auth_tuple = imageroller.main.read_authconfig(\n imageroller.test.get_config_parser(self._valid))\n self.assertTupleEqual(auth_tuple, (AUTH_DATA[\"ApiUser\"],\n AUTH_DATA[\"ApiKey\"]))", "def check_key(request):\n\ttry:\n\t\taccess_key = request.session.get('access_key_tw', None)\n\t\tif not access_key:\n\t\t\treturn False\n\texcept KeyError:\n\t\treturn False\n\treturn True", "def validate(self, keypoints):\n for k in keypoints:\n self.validate_keypoints(k)", "def validate(self):\n if not self.key or not self.certificates:\n raise ValueError(\"Key or certificate missing in Keypair\")", "def check_token_validate(self, token):\n payload = {'key': self._lr_object._get_api_key(), 'secret': self._lr_object._get_api_secret(), 'access_token': token}\n url = SECURE_API_URL + \"api/v2/access_token/Validate/\"\n return self._lr_object._get_json(url, payload)", "def _valid_app_ids(app_ids):\n for app_id in app_ids:\n try:\n app_data = KNACK_CREDENTIALS[app_id]\n\n except KeyError:\n return False\n\n return True", "def is_valid(self):\n return self.access_token is not None \\\n and time.time() < self._expiration_timestamp", "def _validate_token(self):\n if not self.token:\n self.login()\n if not self.token:\n # TODO: create exception for this\n # Access is denied!!\n raise Exception(\"AccessDenied\")", "def test_authtoken_is_valid(self):\n auth_client = self.fixtures.auth_client\n # scenario 1: when validity is unlimited (0)\n tomriddle = models.User(username='voldemort', fullname='Tom Riddle')\n scope = ['id', 'email']\n tomriddle_token = models.AuthToken(\n auth_client=auth_client, user=tomriddle, scope=scope, validity=0\n )\n self.assertTrue(tomriddle_token.is_valid())\n\n # scenario 2: when validity has not been given\n draco = models.User(username='draco', fullname='Draco Malfoy')\n draco_token = models.AuthToken(auth_client=auth_client, user=draco, scope=scope)\n with self.assertRaises(TypeError):\n draco_token.is_valid()\n\n # scenario 3: when validity is limited\n harry = models.User(username='harry', fullname='Harry Potter')\n harry_token = models.AuthToken(\n auth_client=auth_client,\n user=harry,\n scope=scope,\n validity=3600,\n created_at=utcnow(),\n )\n self.assertTrue(harry_token.is_valid())\n\n # scenario 4: when validity is limited *and* the token has expired\n cedric = models.User(username='cedric', fullname='Cedric Diggory')\n cedric_token = models.AuthToken(\n auth_client=auth_client,\n user=cedric,\n scope=scope,\n validity=1,\n created_at=utcnow() - timedelta(1),\n )\n self.assertFalse(cedric_token.is_valid())", "def test_check_keys_exist_for_provider_string(self):\n\n secret_key = None\n provider_id = 'asu'\n\n serializer = serializers.CreditProviderCallbackSerializer()\n with pytest.raises(PermissionDenied):\n serializer._check_keys_exist_for_provider(secret_key, provider_id) # lint-amnesty, pylint: disable=protected-access" ]
[ "0.6086632", "0.59554845", "0.5888044", "0.5835416", "0.58168113", "0.5784612", "0.56935436", "0.5656515", "0.56534857", "0.5633115", "0.55855936", "0.5562494", "0.5473357", "0.5468281", "0.54314995", "0.5400806", "0.5358791", "0.5337157", "0.5333666", "0.5327647", "0.52998936", "0.52921003", "0.52811724", "0.5276412", "0.5276254", "0.52759343", "0.5273347", "0.52684754", "0.52589834", "0.5251339" ]
0.70454735
0
validates access to the tenant REQUIRES that arg 0 of the calling function contains an object with the following fields (tenant (str), access_key(str))
def access_key_and_tenant_required(f): @wraps(f) def wrapper(*args, **kwargs): tenant = _ensure_tenant_and_validate(args[0].tenant, args[0].access_key) kwargs['tenant'] = tenant return f(*args, **kwargs) return wrapper
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _ensure_tenant_and_validate(tenant_, access_key):\n tenant_data = registry.TENANT_DATA_GATEWAY\n tenant = tenant_data.tenant_by_name(tenant_)\n if tenant is None:\n raise TenantNotFoundError(\n \"Tenant not found error. tenant='{}', access_key='{}'\".format(\n tenant_, access_key))\n\n if not tenant.has_access_key(access_key):\n raise AccessKeyNotValidError(\n \"The access key is not valid. tenant='{}', access_key='{}'\".format(\n tenant_, access_key))\n\n return tenant", "def validateTenant(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def tenant_access(self) -> Optional[pulumi.Input['ServiceTenantAccessArgs']]:\n return pulumi.get(self, \"tenant_access\")", "def tenant_access(self) -> Optional[pulumi.Input['ServiceTenantAccessArgs']]:\n return pulumi.get(self, \"tenant_access\")", "def validate_keystone_tenants(self, client):\n u.log.debug('Checking keystone tenants...')\n expected = [\n {'name': 'services',\n 'enabled': True,\n 'description': 'Created by Juju',\n 'id': u.not_null},\n {'name': 'demoTenant',\n 'enabled': True,\n 'description': 'demo tenant',\n 'id': u.not_null},\n {'name': 'admin',\n 'enabled': True,\n 'description': 'Created by Juju',\n 'id': u.not_null}\n ]\n if self.keystone_api_version == 2:\n actual = client.tenants.list()\n else:\n actual = client.projects.list()\n\n ret = u.validate_tenant_data(expected, actual)\n if ret:\n amulet.raise_status(amulet.FAIL, msg=ret)", "def __call__(self, access_token):", "def test_incompatible_subscription_and_tenant():\n pass", "def get_tenants(self):", "def validate_token(token):\n # first, decode the token data to determine the tenant associated with the token. We are not able to\n # check the signature until we know which tenant, and thus, which public key, to use for validation.\n try:\n data = jwt.decode(token, verify=False)\n except Exception as e:\n logger.debug(f\"got exception trying to parse data from the access_token jwt; exception: {e}\")\n raise errors.AuthenticationError(\"could not parse the access token.\")\n # get the tenant out of the jwt payload and get associated public key\n token_tenant_id = data['tenant_id']\n try:\n public_key_str = get_tenant_config(token_tenant_id)['public_key']\n except errors.BaseTapisError:\n raise errors.AuthenticationError(\"Unable to process Tapis token; unexpected tenant_id.\")\n except KeyError:\n raise errors.AuthenticationError(\"Unable to process Tapis token; no public key associated with the \"\n \"tenant_id.\")\n # try:\n # pub_key = get_pub_rsa_key(public_key_str)\n # except Exception as e:\n # logger.error(f\"got exception trying to create public RSA key object; e: {e} \")\n # raise errors.ServiceConfigError(\"Unable to process public key associated with tenant.\")\n try:\n return jwt.decode(token, public_key_str, algorithm='RS256')\n except Exception as e:\n logger.debug(f\"Got exception trying to decode token; exception: {e}\")\n raise errors.AuthenticationError(\"Invalid Tapis token.\")", "def test_need_params(self):\n\n acme = ACMEAccount(client=self.client)\n # missing acme_id\n self.assertRaises(TypeError, acme.delete)", "def validate(self, apiobj, method, api, param, safe):", "def access_to_bucket_required(f):\n @wraps(f)\n def wrapper(*args, **kwargs):\n tenant = _ensure_tenant_and_validate(args[0].tenant,\n args[0].access_key)\n bucket = tenant.get_bucket(args[0].bucket)\n kwargs['bucket'] = bucket\n return f(*args, **kwargs)\n return wrapper", "def test_access_control_is_superuser_as_user_raises_access_control_error(\n self,\n ):\n # Arrange\n mock_request = create_mock_request(user=self.user1)\n\n # Act # Assert\n with self.assertRaises(AccessControlError):\n access_control_api.is_superuser(\n mock_function, request=mock_request\n )", "def test_need_acme_id(self):\n acme = ACMEAccount(client=self.client)\n self.assertRaises(TypeError, acme.get)", "def check_tenant_authorization(tenant_id, override_permission=None):\n claims = get_jwt_claims()\n if \"id\" in list(claims.keys()):\n tenant_user = identity.TenantUser.query.filter_by(id=claims[\"id\"]).first()\n if (\n tenant_user.tenant_id == tenant_id\n or override_permission in tenant_user.permissions\n ):\n return\n abort(403, \"Unauthorized Tenant\")", "def test_auth_public(self):\n self.do_visible(True, None, True, tenant='froggy')", "def test_specify_non_default_tenant():\n pass", "def validate_request_token():\n if not g.x_tapis_token:\n raise errors.NoTokenError(\"No access token found in the request.\")\n claims = validate_token(g.x_tapis_token)\n g.token_claims = claims\n g.username = claims.get('username')\n g.tenant_id = claims.get('tenant_id')\n g.account_type = claims.get('account_type')\n g.delegation = claims.get('delegation')", "def test_dashboards_v2_request_access(self):\n pass", "def access_token(*args, **kwargs):\n return None", "def testGetAccessAllowed(self):\n for user in (self.guest, self.contributor, self.delegate, self.owner, self.root):\n response = self.runGet(user, sequencer=self.hiseq2000.vendor_id)\n self.response_200(response)\n data = json.loads(response.content.decode(\"utf-8\"))\n self.assertEqual(data[\"sodar_uuid\"], str(self.hiseq2000.sodar_uuid))", "def tenant_access(self) -> pulumi.Output['outputs.ServiceTenantAccess']:\n return pulumi.get(self, \"tenant_access\")", "def validate_access(self, view, rights, prefix, scope_path, field):\n\n access_level = self.cleaned_data[field]\n\n if not has_access(rights, access_level, scope_path, prefix):\n self._errors[field] = ErrorList([DEF_NO_RIGHTS_FOR_ACL_MSG])\n del self.cleaned_data[field]", "def test_access_positive(self, api):\n self.builder.add_user(api.get_user())\n self.builder.upd_access(api.get_user(), False)\n r1 = api.access_user(api.get_user(), True)\n access_true = self.builder.get_access(api.get_user())\n self.builder.del_user(api.get_user())\n assert access_true == 1\n assert r1.status_code == 200", "def initial(self, request, *args, **kwargs):\n\n # It's checks the permissions for the third party endpoint or not. It give access if key present.\n bool_value, message = self.check_api_keys(request)\n if bool_value:\n super(ProjectRestrictedGenericViewSet, self).initial(request, *args, **kwargs)\n # Check action permissions\n self.check_action_permissions(request)\n else:\n self.app_permission_denied(request, message)", "def test_auth_private(self):\n self.do_visible(True, None, False, tenant='froggy')", "def test_admin_api_organization_accesses_request_get(self):\n admin = factories.UserFactory(is_staff=True, is_superuser=True)\n self.client.login(username=admin.username, password=\"password\")\n organization = factories.OrganizationFactory()\n organization_access = factories.UserOrganizationAccessFactory(\n organization=organization\n )\n response = self.client.get(\n f\"/api/v1.0/admin/organizations/{organization.id}/accesses/{organization_access.id}/\"\n )\n\n self.assertContains(\n response,\n 'Method \\\\\"GET\\\\\" not allowed.',\n status_code=405,\n )", "def test_get_tenant_by_id(sample_identity):\n access_token, tenant, tenant_user, tc = sample_identity\n new_access_token = tc.post(\n \"api/v1/authentication/login\",\n json={\"username\": tenant_user.username, \"password\": \"1234\"},\n ).json[\"data\"][\"access_token\"]\n headers = {\"Authorization\": \"Bearer \" + new_access_token}\n response = tc.get(f\"api/v1/identity/tenant/{tenant.id}\", headers=headers)\n assert response.status_code == 200, \"Failed to fetch Tenant By ID\"\n assert response.json[\"data\"][\"name\"] == tenant.name, \"Tenant name doesn't match\"", "def test_auth_public_owned(self):\n self.do_visible(True, 'pattieblack', True, tenant='pattieblack')", "def _check_user_entry(user):\n if \"tenant_name\" in user:\n keys = set(user.keys())\n if keys == {\"username\", \"password\", \"tenant_name\",\n \"project_domain_name\", \"user_domain_name\"}:\n if (user[\"user_domain_name\"] == \"\"\n and user[\"project_domain_name\"] == \"\"):\n # it is credentials of keystone v2 and they were created\n # --fromenv\n del user[\"user_domain_name\"]\n del user[\"project_domain_name\"]\n return True\n else:\n # it looks like keystone v3 credentials\n user[\"project_name\"] = user.pop(\"tenant_name\")\n return True" ]
[ "0.7270588", "0.61245686", "0.60244095", "0.60244095", "0.5603974", "0.5583817", "0.5576855", "0.5457372", "0.54482836", "0.53795445", "0.53709066", "0.5356738", "0.533243", "0.5332021", "0.53286314", "0.5317975", "0.5316835", "0.5304212", "0.5273722", "0.5255667", "0.52554893", "0.52483034", "0.5244654", "0.52372557", "0.5235541", "0.5230392", "0.5226447", "0.51878923", "0.51843035", "0.5183973" ]
0.7408233
0
validates access to the bucket for the given fields. REQUIRES that arg 0 of the calling function contains an object with the following fields (tenant (str), access_key(str), bucket(str))
def access_to_bucket_required(f): @wraps(f) def wrapper(*args, **kwargs): tenant = _ensure_tenant_and_validate(args[0].tenant, args[0].access_key) bucket = tenant.get_bucket(args[0].bucket) kwargs['bucket'] = bucket return f(*args, **kwargs) return wrapper
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def access_key_and_tenant_required(f):\n @wraps(f)\n def wrapper(*args, **kwargs):\n tenant = _ensure_tenant_and_validate(args[0].tenant,\n args[0].access_key)\n\n kwargs['tenant'] = tenant\n return f(*args, **kwargs)\n return wrapper", "def test_buckets(self):\n objectstore.bucket.Bucket.create('new_bucket', self.context)\n bucket = objectstore.bucket.Bucket('new_bucket')\n\n # creator is authorized to use bucket\n self.assert_(bucket.is_authorized(self.context))\n\n # another user is not authorized\n context2 = context.RequestContext('user2', 'proj2')\n self.assertFalse(bucket.is_authorized(context2))\n\n # admin is authorized to use bucket\n admin_context = context.RequestContext('admin_user', None)\n self.assertTrue(bucket.is_authorized(admin_context))\n\n # new buckets are empty\n self.assertTrue(bucket.list_keys()['Contents'] == [])\n\n # storing keys works\n bucket['foo'] = \"bar\"\n\n self.assertEquals(len(bucket.list_keys()['Contents']), 1)\n\n self.assertEquals(bucket['foo'].read(), 'bar')\n\n # md5 of key works\n self.assertEquals(bucket['foo'].md5, hashlib.md5('bar').hexdigest())\n\n # deleting non-empty bucket should throw a NotEmpty exception\n self.assertRaises(NotEmpty, bucket.delete)\n\n # deleting key\n del bucket['foo']\n\n # deleting empty bucket\n bucket.delete()\n\n # accessing deleted bucket throws exception\n self.assertRaises(NotFound, objectstore.bucket.Bucket, 'new_bucket')", "def _ensure_tenant_and_validate(tenant_, access_key):\n tenant_data = registry.TENANT_DATA_GATEWAY\n tenant = tenant_data.tenant_by_name(tenant_)\n if tenant is None:\n raise TenantNotFoundError(\n \"Tenant not found error. tenant='{}', access_key='{}'\".format(\n tenant_, access_key))\n\n if not tenant.has_access_key(access_key):\n raise AccessKeyNotValidError(\n \"The access key is not valid. tenant='{}', access_key='{}'\".format(\n tenant_, access_key))\n\n return tenant", "def bucket_exists(gs_client, test_bucket):\n bucket = gs_client.conn.bucket(test_bucket)\n if not bucket.exists():\n gs_client.conn.create_bucket(test_bucket, predefined_acl=\"project-private\")\n yield gs_client", "def __init__(self, project_id, bucket_name):\n self.project_id = project_id\n self.bucket_name = bucket_name\n self.client = storage.Client(project=project_id)\n self.bucket = self.client.get_bucket(bucket_name)", "def test_get_bucket(self):\n pass", "def test_id_of_bucket_to_be_edited_is_invalid(self):\n with self.client:\n # Get an auth token\n token = self.get_user_token()\n # Update the bucket name\n res = self.client.put(\n '/bucketlists/bucketid',\n headers=dict(Authorization='Bearer ' + token),\n data=json.dumps(dict(name='Adventure')),\n content_type='application/json'\n )\n data = json.loads(res.data.decode())\n self.assertEqual(res.status_code, 400)\n self.assertTrue(res.content_type == 'application/json')\n self.assertTrue(data['status'] == 'failed')\n self.assertTrue(data['message'] == 'Please provide a valid Bucket Id')", "def __init__(self, *, bucket_arn: typing.Optional[str]=None, bucket_domain_name: typing.Optional[str]=None, bucket_dual_stack_domain_name: typing.Optional[str]=None, bucket_name: typing.Optional[str]=None, bucket_regional_domain_name: typing.Optional[str]=None, bucket_website_new_url_format: typing.Optional[bool]=None, bucket_website_url: typing.Optional[str]=None, encryption_key: typing.Optional[aws_cdk.aws_kms.IKey]=None):\n self._values = {\n }\n if bucket_arn is not None: self._values[\"bucket_arn\"] = bucket_arn\n if bucket_domain_name is not None: self._values[\"bucket_domain_name\"] = bucket_domain_name\n if bucket_dual_stack_domain_name is not None: self._values[\"bucket_dual_stack_domain_name\"] = bucket_dual_stack_domain_name\n if bucket_name is not None: self._values[\"bucket_name\"] = bucket_name\n if bucket_regional_domain_name is not None: self._values[\"bucket_regional_domain_name\"] = bucket_regional_domain_name\n if bucket_website_new_url_format is not None: self._values[\"bucket_website_new_url_format\"] = bucket_website_new_url_format\n if bucket_website_url is not None: self._values[\"bucket_website_url\"] = bucket_website_url\n if encryption_key is not None: self._values[\"encryption_key\"] = encryption_key", "def get_bucket_acl(Bucket=None):\n pass", "def test_authentication_with_valid_data(self):\n\n account_data = {\n \"email\": self.email,\n \"password\": self.password\n }\n response = self.client.post(\n self.url,\n account_data,\n format=\"json\")\n \"\"\"Test the api has bucket creation capability.\"\"\"\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertTrue(\"data\" in json.loads(response.content))", "def test_validate_bookstore_bucket():\n expected = {\n \"bookstore_valid\": True,\n \"publish_valid\": True,\n \"archive_valid\": True,\n \"clone_valid\": True,\n }\n settings = BookstoreSettings(s3_bucket=\"A_bucket\")\n assert validate_bookstore(settings) == expected", "def has_bucket_access(self, bucket, user_id):\n msg = \"has_bucket_access not implemented\"\n raise NotImplementedError(msg)", "def get_object_acl(Bucket=None, Key=None, VersionId=None, RequestPayer=None):\n pass", "def validate_get_bucket_list_data(func):\n @wraps(func)\n def validate_data(*args, **kwargs):\n limit_of_items = request.args.get('limit')\n page_no = request.args.get('page')\n query = request.args.get('q')\n if not isinstance(limit_of_items, int):\n return {\"message\": \"The limit must be an integer\"}, 400\n elif not isinstance(page_no, int):\n return {\"message\":\"The page number should be an integer\"}, 400\n elif not isinstance(query, str):\n return {\"message\": \"The query parameter should be a string\"}, 400\n return func(*args, **kwargs)\n return validate_data", "def valid_object(self,object_data):\n if not object_data.get('planId'):\n logging.error(\"Couldn't find planId, required field\")\n self.append_response(\"missing planId\")\n return False\n if not object_data.get(\"name\"):\n logging.warning(\"No name set for bucket\")\n return True", "def validate_bucket_list_data(func):\n @wraps(func)\n def validate_bucket_list(*args, **kwargs):\n bucket_list_data = request.get_json()\n if not bucket_list_data:\n return {\"message\": \"You have to provide the required data\"}, 400\n elif \"bucket_list_id\" not in bucket_list_data or \"bucket_list_name\" not in bucket_list_data:\n return {\"message\": \"You have to provide the required data\"}, 400\n elif bucket_list_data[\"bucket_list_id\"] == \"\"or bucket_list_data[\"bucket_list_name\"] == \"\":\n return {\"message\": \"You have to provide all the required data\"}, 400\n return func(*args, **kwargs)\n return validate_bucket_list", "def __init__(self, bucket):\n self.bucket = bucket", "def test_tag_public_bucket(self, test, object_storage):\n namespace_name, bucket_name = self._get_bucket_details(object_storage)\n session_factory = test.oci_session_factory()\n policy = test.load_policy(\n {\n \"name\": \"tag-public-buckets\",\n \"resource\": \"oci.bucket\",\n \"query\": [\n {\"namespace_name\": namespace_name},\n ],\n \"filters\": [\n {\n \"type\": \"attributes\",\n \"key\": \"public_access_type\",\n \"value\": \"ObjectRead\",\n \"op\": \"eq\",\n },\n ],\n \"actions\": [{\"type\": \"update\", \"freeform_tags\": {\"public_access\": \"true\"}}],\n },\n session_factory=session_factory,\n )\n policy.run()\n resource = self._fetch_bucket_validation_data(\n policy.resource_manager, namespace_name, bucket_name\n )\n test.assertEqual(resource[\"name\"], bucket_name)\n test.assertEqual(resource[\"freeform_tags\"][\"public_access\"], \"true\")", "def test_create_bucket(self):\n pass", "def test_creating_a_bucket(self):\n with self.client:\n self.create_bucket(self.get_user_token())", "def get_dropbox_policy ( bucket_name, requires_aspera = False ) :\n if requires_aspera :\n return \"\"\"{\n \"Version\": \"2012-10-17\",\n \"Statement\": [\n {\n \"Sid\": \"GrantUploadDownloadPermissionsToBucket\",\n \"Effect\": \"Allow\",\n \"Action\": [\n \"s3:AbortMultipartUpload\",\n \"s3:DeleteObject\",\n \"s3:DeleteObjectVersion\",\n \"s3:GetBucketAcl\",\n \"s3:GetBucketLocation\",\n \"s3:GetBucketLogging\",\n \"s3:GetBucketNotification\",\n \"s3:GetBucketPolicy\",\n \"s3:GetBucketRequestPayment\",\n \"s3:GetBucketTagging\",\n \"s3:GetBucketVersioning\",\n \"s3:GetBucketWebsite\",\n \"s3:GetLifecycleConfiguration\",\n \"s3:GetObject\",\n \"s3:GetObjectAcl\",\n \"s3:GetObjectTorrent\",\n \"s3:GetObjectVersion\",\n \"s3:GetObjectVersionAcl\",\n \"s3:GetObjectVersionTorrent\",\n \"s3:ListAllMyBuckets\",\n \"s3:ListBucket\",\n \"s3:ListBucketMultipartUploads\",\n \"s3:ListBucketVersions\",\n \"s3:ListMultipartUploadParts\",\n \"s3:PutBucketVersioning\",\n \"s3:PutObject\"\n ],\n \"Resource\": [\n \"arn:aws:s3:::\"\"\" + bucket_name + \"\"\"\",\n \"arn:aws:s3:::\"\"\" + bucket_name + \"\"\"/*\"\n ]\n },\n {\n \"Sid\": \"AllowAsperaRootLevelListingOfTheBucket\",\n \"Action\": [\"s3:ListBucket\"],\n \"Effect\": \"Allow\",\n \"Resource\": [\n \"arn:aws:s3:::\"\"\" + bucket_name + \"\"\"\"\n ],\n \"Condition\":{\n \"StringEquals\":{\n \"s3:prefix\":[\"\"], \"s3:delimiter\":[\"/\"]\n }\n }\n },\n {\n \"Sid\" : \"AllowGroupToSeeBucketListInAsperaConsole\",\n \"Action\" : [\n \"s3:ListAllMyBuckets\",\n \"s3:GetBucketLocation\"\n ],\n \"Effect\" : \"Allow\",\n \"Resource\" : [ \"arn:aws:s3:::\" ]\n }\n ]\n}\"\"\"\n\n else :\n return \"\"\"{\n \"Version\": \"2012-10-17\",\n \"Statement\": [\n {\n \"Sid\": \"GrantUploadDownloadPermissionsToBucket\",\n \"Effect\": \"Allow\",\n \"Action\": [\n \"s3:AbortMultipartUpload\",\n \"s3:DeleteObject\",\n \"s3:DeleteObjectVersion\",\n \"s3:GetBucketAcl\",\n \"s3:GetBucketLocation\",\n \"s3:GetBucketLogging\",\n \"s3:GetBucketNotification\",\n \"s3:GetBucketPolicy\",\n \"s3:GetBucketRequestPayment\",\n \"s3:GetBucketTagging\",\n \"s3:GetBucketVersioning\",\n \"s3:GetBucketWebsite\",\n \"s3:GetLifecycleConfiguration\",\n \"s3:GetObject\",\n \"s3:GetObjectAcl\",\n \"s3:GetObjectTorrent\",\n \"s3:GetObjectVersion\",\n \"s3:GetObjectVersionAcl\",\n \"s3:GetObjectVersionTorrent\",\n \"s3:ListAllMyBuckets\",\n \"s3:ListBucket\",\n \"s3:ListBucketMultipartUploads\",\n \"s3:ListBucketVersions\",\n \"s3:ListMultipartUploadParts\",\n \"s3:PutBucketVersioning\",\n \"s3:PutObject\"\n ],\n \"Resource\": [\n \"arn:aws:s3:::\"\"\" + bucket_name + \"\"\"\",\n \"arn:aws:s3:::\"\"\" + bucket_name + \"\"\"/*\"\n ]\n }\n ]\n}\"\"\"", "def validate_access(self, view, rights, prefix, scope_path, field):\n\n access_level = self.cleaned_data[field]\n\n if not has_access(rights, access_level, scope_path, prefix):\n self._errors[field] = ErrorList([DEF_NO_RIGHTS_FOR_ACL_MSG])\n del self.cleaned_data[field]", "def test_request_for_a_bucket_has_integer_id(self):\n with self.client:\n response = self.client.get(\n '/bucketlists/dsfgsdsg',\n headers=dict(Authorization='Bearer ' + self.get_user_token())\n )\n data = json.loads(response.data.decode())\n self.assertEqual(response.status_code, 400)\n self.assertTrue(data['status'] == 'failed')\n self.assertTrue(data['message'] == 'Please provide a valid Bucket Id')", "def test_update_public_bucket_to_private(self, test, object_storage):\n namespace_name, bucket_name = self._get_bucket_details(object_storage)\n session_factory = test.oci_session_factory()\n policy = test.load_policy(\n {\n \"name\": \"change-public-bucket-to-private\",\n \"resource\": \"oci.bucket\",\n \"query\": [\n {\"namespace_name\": namespace_name},\n ],\n \"filters\": [\n {\"type\": \"value\", \"key\": \"name\", \"value\": bucket_name},\n ],\n \"actions\": [{\"type\": \"update\", \"public_access_type\": \"NoPublicAccess\"}],\n },\n session_factory=session_factory,\n )\n policy.run()\n resource = self._fetch_bucket_validation_data(\n policy.resource_manager, namespace_name, bucket_name\n )\n test.assertEqual(resource[\"name\"], bucket_name)\n test.assertEqual(resource[\"public_access_type\"], \"NoPublicAccess\")", "def test_change_public_bucket_to_private(self, test, object_storage):\n namespace_name, bucket_name = self._get_bucket_details(object_storage)\n session_factory = test.oci_session_factory()\n policy = test.load_policy(\n {\n \"name\": \"change-public-bucket-to-private\",\n \"resource\": \"oci.bucket\",\n \"query\": [\n {\"namespace_name\": namespace_name},\n ],\n \"filters\": [\n {\"type\": \"value\", \"key\": \"name\", \"value\": bucket_name},\n ],\n \"actions\": [{\"type\": \"update\", \"public_access_type\": \"NoPublicAccess\"}],\n },\n session_factory=session_factory,\n )\n policy.run()\n resource = self._fetch_bucket_validation_data(\n policy.resource_manager, namespace_name, bucket_name\n )\n test.assertEqual(resource[\"name\"], bucket_name)\n test.assertEqual(resource[\"public_access_type\"], \"NoPublicAccess\")", "def __init__(self, name, bucket_id, quota):\n self.name = name\n self.id = bucket_id\n self.quota = quota", "def validate_client_parameters(namespace):\n n = namespace\n\n if not n.connection_string:\n n.connection_string = os.environ.get('AZURE_STORAGE_CONNECTION_STRING')\n\n # if connection string supplied or in environment variables, extract account key and name\n if n.connection_string:\n conn_dict = validate_key_value_pairs(n.connection_string)\n n.account_name = conn_dict['AccountName']\n n.account_key = conn_dict['AccountKey']\n\n # otherwise, simply try to retrieve the remaining variables from environment variables\n if not n.account_name:\n n.account_name = os.environ.get('AZURE_STORAGE_ACCOUNT')\n if not n.account_key:\n n.account_key = os.environ.get('AZURE_STORAGE_KEY')\n if not n.sas_token:\n n.sas_token = os.environ.get('AZURE_SAS_TOKEN')", "def _check_queryable(self):\n if not self._bucket:\n raise Exception('Bucket has not been selected')", "def test_need_params(self):\n\n acme = ACMEAccount(client=self.client)\n # missing acme_id\n self.assertRaises(TypeError, acme.delete)", "def validate(self, apiobj, method, api, param, safe):\n if method in ('PUT', 'DELETE'):\n validate_strlist('site_name', param, safe, RX_SITE)\n validate_strlist('ce', param, safe, RX_FQDN)\n validate_strlist('release', param, safe, RX_RELEASE)\n validate_strlist('arch', param, safe, RX_ARCH)\n validate_lengths(safe, 'site_name', 'ce', 'release', 'arch')\n # Delay authz until we have database connection for name remapping." ]
[ "0.6362296", "0.6169604", "0.60709274", "0.5856636", "0.5740477", "0.5719126", "0.5697819", "0.5665839", "0.5654232", "0.5581581", "0.5573217", "0.5561906", "0.5506819", "0.5506553", "0.546765", "0.5461047", "0.54366624", "0.543023", "0.5401997", "0.5385422", "0.5363991", "0.5363506", "0.5350716", "0.5345576", "0.53126156", "0.528448", "0.52797335", "0.52754015", "0.52563244", "0.5254044" ]
0.7128565
0
>>> validate_n_digits()("") False >>> validate_n_digits()("a") False >>> validate_n_digits()("asdf") False >>> validate_n_digits()("asdfghj") False >>> validate_n_digits()("123") False >>> validate_n_digits()("1234") True >>> validate_n_digits()("1235678") False >>> validate_n_digits(n=9)("000000001") True >>> validate_n_digits(n=9)("0123456789") False
def validate_n_digits(n: int = 4) -> bool: def func(s: str): if len(s) != n: return False if not s.isdigit(): return False return True return func
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def validate_n_digits_range(min_val: int, max_val: int, n: int = 4) -> bool:\n def func(s: str):\n return validate_n_digits(n)(s) and min_val <= int(s) <= max_val\n return func", "def validate(value):\n if str.isdigit(value) or value == \"\":\n return True\n else:\n return False", "def input_validation(input_: str) -> bool:\n return fullmatch('[1-9]', input_) is not None", "def validate(number):\n number = compact(number)\n if len(number) != 11:\n raise InvalidLength()\n if not isdigits(number):\n raise InvalidFormat()\n if number.startswith('0'):\n raise InvalidFormat()\n # In the first 10 digits exactly one digit must be repeated two or\n # three times and other digits can appear only once.\n counter = defaultdict(int)\n for n in number[:10]:\n counter[n] += 1\n counts = [c for c in counter.values() if c > 1]\n if len(counts) != 1 or counts[0] not in (2, 3):\n raise InvalidFormat()\n return mod_11_10.validate(number)", "def validate(number):\n number = compact(number)\n if len(number) != 10:\n raise InvalidLength()\n if not _nipt_re.match(number):\n raise InvalidFormat()\n return number", "def verify(n):\n\n # Take the sum of all digits.\n sum_of_digits = sum(luhn_digits(n))\n\n # The number is valid iff the sum of digits modulo 10 is equal to 0\n return sum_of_digits % 10 == 0", "def validate(self):\n return (self.check_input_digits_count()\n and self.check_if_input_is_int()\n and self.check_if_input_digits_are_unique())", "def checknum(val):\n\n if len(val) == 0:\n return False\n\n for i in range(len(val)):\n if not val[i].isdigit():\n return False\n\n return True", "def validate(n = 5):", "def isnumber(n):\r\n N = str(n)\r\n if N.isdigit():\r\n return True\r\n else:\r\n return False", "def is_valid_number(self, text, widget):\n if len(text) > 2:\n return False\n for char in text:\n if not char.isdigit():\n return False\n if text != '' and int(text) == 0:\n return False\n return True", "def phone_number_validator(phone_number):\n if len(phone_number) != 10:\n return False\n if phone_number[0] == '0':\n return False\n try:\n int(phone_number)\n except ValueError:\n return False\n return True", "def validate(number):\n number = compact(number)\n if not isdigits(number):\n raise InvalidFormat()\n if len(number) != 10:\n raise InvalidLength()\n if checksum(number) != 0:\n raise InvalidChecksum()\n return number", "def validate_account_number(num, should_exist=True):\n if len(num) != 8:\n return False\n elif num[0] == '0':\n return False\n else:\n if should_exist:\n return account_number_exists(num)\n else:\n return not account_number_exists(num)", "def validate(number):\n number = compact(number)\n if len(number) != 9:\n raise InvalidLength()\n if not isdigits(number[2:]):\n raise InvalidFormat()\n if not isdigits(number[:2]) and not all(x in 'ABCEHKMOPT' for x in number[:2]):\n raise InvalidFormat()\n if number[0] not in '1234567ABCEHKM':\n raise InvalidComponent()\n if number[-1] != calc_check_digit(number):\n raise InvalidChecksum()\n return number", "def validate(self, cnpj):\n return bool(cnpj[-2:] == self.digits(cnpj))", "def pattern_with_digits_validate_regular_expression(cls, value):\n if value is None:\n return value\n\n if not re.match(r\"^\\d{10}$\", value):\n raise ValueError(r\"must validate the regular expression /^\\d{10}$/\")\n return value", "def check_input_digits_count(self):\n check = len(str(self.input)) == 4\n return check", "def is_digit_regex(s: str) -> bool:\n if re.match(\"^\\d+?\\.\\d+?$\", s) is None:\n return s.isdigit()\n return True", "def is_valid(n):\n\tif type(n) == int:\n\t\tn = str(n)\n\tfor index, c in enumerate(n):\n\t\tif index == 0:\n\t\t\tcontinue\n\t\tif n[index - 1] > n[index]:\n\t\t\treturn False\n\treturn True", "def check_number(number):\n digits = str(number)\n if len(digits) != 6:\n return False\n\n double = False\n last = '0'\n for digit in digits:\n if digit < last:\n return False\n\n if digit == last:\n double = True\n\n last = digit\n\n return double", "def validate_integer(self, p_str):\n # p_str is str\n if re.search(r\"^[1-9]\\d*$\", p_str) or p_str == \"\":\n return True\n self.frame.bell() # alert wrong input\n return False", "def isbn_10_check_digit(nine_digits):\r\n if len(nine_digits) != 9: return None\r\n try: int(nine_digits)\r\n except: return None\r\n remainder = int(sum((i + 2) * int(x) for i, x in enumerate(reversed(nine_digits))) % 11)\r\n if remainder == 0: tenth_digit = 0\r\n else: tenth_digit = 11 - remainder\r\n if tenth_digit == 10: tenth_digit = 'X'\r\n return str(tenth_digit)", "def validate(number):\n number = compact(number)\n if not isdigits(number):\n raise InvalidFormat()\n if len(number) != 10:\n raise InvalidLength()\n # check if birth date is valid\n get_birth_date(number)\n # TODO: check that the birth date is not in the future\n # check the check digit\n if calc_check_digit(number[:-1]) != number[-1]:\n raise InvalidChecksum()\n return number", "def test_non_numberic_validation(self):", "def test_non_numberic_validation(self):", "def check_number(self):\n digits = self.number\n _sum = 0\n alt = False\n ix = []\n for x in str(digits):\n ix.append(int(x))\n for d in reversed(ix):\n assert 0 <= d <= 9\n if alt:\n d *= 2\n if d > 9:\n d -= 9\n _sum += d\n alt = not alt\n return (_sum % 10) == 0", "def _multiple_choice_validate(s: str, len_options: int):\n if not s:\n raise ValueError('Please enter a value between {} and {}'.format(\n 1, len_options + 1))\n\n if not str.isnumeric(s):\n raise ValueError('Please enter a numeric value')\n\n if 1 <= int(s) <= (len_options + 1):\n return\n else:\n raise ValueError('Please enter a value between {} and {}'.format(\n 1, len_options + 1))", "def is_number(c):\n return '0' <= c <= '9'", "def check_digits_cpf(x: str, n: int) -> int:\n check_vec = np.flip(np.arange(2, 10 + n))\n digits = np.array(list(x[: 8 + n])).astype(\"int\")\n result = np.dot(check_vec, digits) % 11\n\n return 0 if result < 2 else 11 - result" ]
[ "0.6967983", "0.69240135", "0.6848611", "0.6823944", "0.67357725", "0.6591856", "0.6506189", "0.64677304", "0.64408034", "0.64399856", "0.6419439", "0.6367101", "0.6340759", "0.6191284", "0.6188791", "0.6156608", "0.61299425", "0.61085176", "0.6107948", "0.6074612", "0.60343254", "0.5941356", "0.5916217", "0.5899287", "0.58853835", "0.58853835", "0.58779806", "0.58636373", "0.58523124", "0.5835418" ]
0.8455768
0
>>> validate_n_digits_range(1920, 2002)("2002") True >>> validate_n_digits_range(1920, 2002)("2003") False
def validate_n_digits_range(min_val: int, max_val: int, n: int = 4) -> bool: def func(s: str): return validate_n_digits(n)(s) and min_val <= int(s) <= max_val return func
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def validate_n_digits(n: int = 4) -> bool:\n def func(s: str):\n if len(s) != n:\n return False\n if not s.isdigit():\n return False\n return True\n return func", "def is_valid_birth_number(birth_number: int):\n if birth_number in range(1, 1000):\n return True\n return False", "def is_valid_birth_number(birth_number: int):\n if birth_number in range(1, 1000):\n return True\n else:\n return False", "def is_valid_year_number(year_number: int) -> bool:\n if year_number in range(100):\n return True\n else:\n return False", "def isRangeValid(self) -> bool:\n ...", "def is_valid_range(parser, arg, minimum=0, maximum=100):\n if arg < minimum:\n parser.error(\"%s < %s\", arg, minimum)\n else:\n if arg > maximum:\n parser.error(\"%s > %s\", arg, maximum)\n\n return arg", "def validate_range_str(range_str):\n if not isinstance(range_str, str):\n return False\n ranges = range_str.split(\",\")\n assert len(ranges) > 0\n for r in ranges:\n # a range may be either e.g. '64', or '128-256'\n try:\n c = [int(x) for x in r.split(\":\")]\n except:\n return False\n # c should be either e.g. [ 128 ], or [64,128].\n if len(c) == 1:\n if c[0] <= 0:\n return False\n elif len(c) == 2:\n if c[0] <= 0 or c[1] < c[0]:\n return False\n else:\n return False\n return True", "def check_ranges(ranges, value):\n for fromto in ranges:\n start, end = fromto.split('-')\n if int(value) in range(int(start), int(end) + 1):\n return True\n # else:\n # print('%s is not between %s and %s' % (value, start, end))\n return False", "def __verify_range(value, minimum, maximum):\n if value in range(minimum, maximum):\n return True\n else:\n return False", "def validate(number):\n number = compact(number)\n if len(number) != 11:\n raise InvalidLength()\n if not isdigits(number):\n raise InvalidFormat()\n if number.startswith('0'):\n raise InvalidFormat()\n # In the first 10 digits exactly one digit must be repeated two or\n # three times and other digits can appear only once.\n counter = defaultdict(int)\n for n in number[:10]:\n counter[n] += 1\n counts = [c for c in counter.values() if c > 1]\n if len(counts) != 1 or counts[0] not in (2, 3):\n raise InvalidFormat()\n return mod_11_10.validate(number)", "def is_valid_year_number(year_number: int) -> bool:\n if 0 <= int(year_number) < 100:\n return True\n return False", "def is_valid_year(year_range):\n\n if not year_range:\n return False\n\n if len(str(year_range)) != 8:\n return False\n\n year1 = year_range[:4]\n year2 = year_range[4:]\n\n try:\n if int(year2) - int(year1) == 1:\n if int(year1) <= int(get_current_hockey_year_start()):\n return True\n return False\n\n except Exception as e:\n print (\"inalid year passed\")\n print (str(e))\n print (traceback.print_exc())\n return False", "def is_valid_year(year_number):\n\n if (type(year_number) == int) and (START_YEAR <= year_number <= FINAL_YEAR):\n return True\n\n return False", "def _dateisrange(value):\n if len(re.findall('([0-9])/([0-9])', value)) > 1:\n NotImplemented\n elif len(re.findall('([0-9])/([0-9])', value)) == 1:\n return True\n else:\n return False", "def _is_range(cls, rng):\n match = re.search(\"([0-9][1-9]*)-([0-9][1-9]*)\", rng)\n # Group is a singular value.\n return match is not None", "def is_valid_birth_year(birth_year: int) -> bool:\n return birth_year.isnumeric() and 1920 <= int(birth_year) <= 2002", "def input_validation(input_: str) -> bool:\n return fullmatch('[1-9]', input_) is not None", "def _is_in_range(valid_values):\n\n def f(x):\n if x not in valid_values:\n raise ValueError('{} not in {}'.format(x, valid_values))", "def check_subseq_range(subseq_range):\n subseq_range_content = subseq_range.split(\"-\")\n if len(subseq_range_content) != 2:\n err_str = \"A subseq_range must have two arguments (start and stop)\"\n err_str += \" separated by a -\"\n raise ValueError(err_str)\n if int(subseq_range_content[0]) > int(subseq_range_content[1]):\n err_str = \"Start for a subseq_range must be lower than the stop\"\n raise ValueError(err_str)", "def range_between_0_and_9(self, user_num):\r\n if 0 <= user_num < 9:\r\n return True\r\n else:\r\n return False", "def is_valid(n):\n\tif type(n) == int:\n\t\tn = str(n)\n\tfor index, c in enumerate(n):\n\t\tif index == 0:\n\t\t\tcontinue\n\t\tif n[index - 1] > n[index]:\n\t\t\treturn False\n\treturn True", "def is_valid_month_number(month_number: int) -> bool:\n if month_number in range(13):\n return True\n else:\n return False", "def validate_query_range(query_range: str) -> (bool, str):\n try:\n start, end = query_range.split(\"-\")\n start = int(start.strip())\n end = int(end.strip())\n if start < 1:\n return False, \"Start of range should be >= 1\"\n if end < 1:\n return False, \"End of range should be >= 1\"\n\n return True, f\"{start-1}-{end-1}\" # subtract by one as index always start with 0\n\n except ValueError as err:\n return False, err", "def validate(n = 5):", "def validate(number):\n number = compact(number)\n if len(number) != 10:\n raise InvalidLength()\n if not _nipt_re.match(number):\n raise InvalidFormat()\n return number", "def test_validate_ranges():\n arr = np.array([1, 1, 1])\n assert (arr == validate_ranges(arr, 3)).all()\n\n with pytest.raises(ValueError):\n validate_ranges(arr, 2)\n\n with pytest.raises(ValueError):\n validate_ranges(np.array([-0.1, 0.1, 1]), 2)\n\n assert validate_ranges(None, 3) is None\n assert (validate_ranges([1, 1, 1], 3) == np.array([1, 1, 1])).all()", "def is_valid_day (val):\n if len(val) == 2 and count_digits(val) == 2:\n day = int(val)\n return day > 0 and day < 32\n return False", "def rg_valid(rg):\n return len(rg) != 9", "def validate(self, cnpj):\n return bool(cnpj[-2:] == self.digits(cnpj))", "def is_valid_issue_year(issue_year: int) -> bool:\n return issue_year.isnumeric() and 2010 <= int(issue_year) <= 2020" ]
[ "0.71524906", "0.7093412", "0.70692015", "0.69911283", "0.6924774", "0.6916429", "0.6841534", "0.66205734", "0.66151506", "0.661108", "0.6566998", "0.65198666", "0.6514695", "0.6487208", "0.64803034", "0.64262885", "0.64223665", "0.6400888", "0.6372396", "0.6336665", "0.633639", "0.6271358", "0.6235629", "0.62247914", "0.62161124", "0.62040186", "0.61773753", "0.6168788", "0.61395854", "0.6119432" ]
0.81118214
0
>>> validate_hgt("60in") True >>> validate_hgt("190cm") True >>> validate_hgt("190in") False >>> validate_hgt("190") False
def validate_hgt(hgt: str) -> bool: if hgt[-2:] == 'cm': return 150 <= int(hgt[:-2]) <= 193 if hgt[-2:] == 'in': return 59 <= int(hgt[:-2]) <= 76 return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def valid_hgt(cls, hgt):\n check = re.search(r\"^(\\d+)(in|cm)$\", hgt)\n if not check:\n raise ValueError(f\"Invalid hgt {hgt}\")\n n, unit = check.groups()\n n = int(n)\n if unit == \"in\":\n if not (76 >= n >= 59):\n raise ValueError(\"Invalid hgt\")\n if unit == \"cm\":\n if not (193 >= n >= 150):\n raise ValueError(\"Invalid hgt\")\n return hgt", "def is_valid_height(height: str) -> bool:\n split_height_at_centimeters = height.split(\"cm\")\n if len(split_height_at_centimeters) == 1:\n split_height_at_inches = height.split(\"in\")\n return split_height_at_inches[0].isnumeric() and 59 <= int(split_height_at_inches[0]) <= 76\n else:\n return split_height_at_centimeters[0].isnumeric() and 150 <= int(split_height_at_centimeters[0]) <= 193", "def validate_height(passport: map) -> bool:\n if passport.get('hgt'):\n if passport['hgt'].count('cm'):\n val = int(passport['hgt'][:passport['hgt'].find('cm')])\n if 150 <= val <= 193:\n return True\n elif passport['hgt'].count('in'):\n val = int(passport['hgt'][:passport['hgt'].find('in')])\n if 59 <= val <= 76:\n return True\n else:\n return False\n\n return False", "def check_hsdpa_tbi(self, ki):\r\r\n loggerCmw = logging.getLogger(__name__ + ' check_hsdpa_tbi')\r\r\n if ki >=0 and ki <= 62:\r\r\n return ki\r\r\n else:\r\r\n loggerCmw.error('non valid ki, valid range is 0 to 62')\r\r\n sys.exit(self.code.ERRCODE_TEST_FAILURE_PARAMCONFIG)", "def validate_hcl(hcl: str) -> bool:\n if len(hcl) != 7 or hcl[0] != '#':\n return False\n for x in hcl[1:]:\n if x not in list(map(str, range(9 + 1))) + \\\n list(map(chr, range(ord('a'), ord('f') + 1))):\n return False\n return True", "def checkIsHours(value):\n\n if not isinstance(value, str):\n return False\n\n if '*' in value:\n return False\n elif '+' in value:\n return False\n elif '-' in value:\n return False\n else:\n return True", "def HeightTest(string):\n\t# pull out the last 2 characters of the string\n\tunit = string[-2:]\n\n\t# if there are no units (unit is not 'in' or 'cm')\n\t# return False\n\tif (unit != 'in') & (unit != 'cm'):\n\t\treturn False\n\n\t# pull out the measurement, cast as integer\n\tmeasurement = int(string[:-2])\n\n\tif unit == 'cm':\n\t\treturn NumberTest(measurement, 150, 193)\n\tif unit == 'in':\n\t\treturn NumberTest(measurement, 59, 76)\n\treturn False", "def validate_gatetype(self, gt):\r\n if gt not in self.VALID_GATES:\r\n return False\r\n return True", "def validate_hash(h):\n if len(h) not in (32, 40, 64, 128):\n return False\n\n return bool(re.match(\"[0-9a-fA-F]*$\", h))", "def validateGTF(df):\n try:\n msg = ''\n if df.isnull().values.any() == True:\n msg = 'Missing values' + '\\n' + str(df.isnull().sum())\n return [False, msg]\n if (all(x in ['+', '-'] for x in df['strand'].cat.categories.tolist())) != True:\n msg = 'Bad strand symbol(has to be + or -'\n return [False, msg]\n return [True, msg] \n except (TypeError, AttributeError, KeyError):\n return [False, 'Not a valid dataframe']", "def isValid(t_id):\n\tstr_id=str(t_id).strip()\n\treturn str_id.isdigit()", "def oxygen_validation(oxygen: int) -> bool:\n if not str(oxygen).isnumeric() or isinstance(oxygen, str):\n return False\n\n return int(oxygen) < 101 and int(oxygen) >= 0", "def get_valid_emission_heading(self, heading):\n if heading or int(float(heading)) >= 0:\n try:\n heading = int(float(heading))\n if s.MIN_HEADING <= heading <= s.MAX_HEADING:\n return heading\n except:\n pass\n \n message = 'This vehicle heading is not valid. Try something between [%s, %s].' % (s.MIN_HEADING, s.MAX_HEADING)\n raise InvalidUsage(message)", "def parse_trex(input_f):\n\tstring = \"hgt : number of HGT(s) found = \"\n\tout_str = False\n\tfor line in input_f:\n\t\tif string in line:\n\t\t\tnumber_hgts = line.split(string)[1].strip()\n\t\t\tsys.stdout.write(number_hgts)\n\t\t\tout_str = True\n\tif not out_str:\n\t\tsys.stdout.write(\"NaN\")", "def check_gtis(gti):\n if len(gti) < 1:\n raise ValueError(\"Empty GTIs.\")\n\n for g in gti:\n if np.size(g) != 2 or np.ndim(g) != 1:\n raise TypeError(\n \"Please check the formatting of the GTIs. They need to be\"\n \" provided as [[gti00, gti01], [gti10, gti11], ...].\"\n )\n\n gti = np.array(gti)\n gti_start = gti[:, 0]\n gti_end = gti[:, 1]\n\n # Check that GTIs are well-behaved\n if not np.all(gti_end >= gti_start):\n raise ValueError(\"The GTI end times must be larger than the \" \"GTI start times.\")\n\n # Check that there are no overlaps in GTIs\n if not np.all(gti_start[1:] >= gti_end[:-1]):\n raise ValueError(\"This GTI has overlaps.\")\n\n return", "def validate_gin(val):\n match = re.match(GLOBAL_CONF.get('gin_regex'), val)\n if match is None:\n raise ValidationError(\"Government Identification Number must contain {range} digits\".format(range=GLOBAL_CONF.get('gin_length')))", "def test_validate_fr_lt_mh_mn(session, desc, valid, reg_type, message_content):\n # setup\n json_data = copy.deepcopy(FINANCING)\n json_data['type'] = reg_type\n if desc != DESC_EXCLUDES_LY:\n del json_data['lifeYears']\n if desc != DESC_INFINITY_INVALID:\n json_data['lifeInfinite'] = True\n else:\n json_data['lifeInfinite'] = False\n del json_data['trustIndenture']\n if desc != DESC_INCLUDES_GC:\n del json_data['generalCollateral']\n if desc == DESC_MISSING_VC:\n del json_data['vehicleCollateral']\n elif desc != DESC_VC_NOT_MH:\n json_data['vehicleCollateral'][0]['type'] = 'MH'\n\n error_msg = validator.validate(json_data)\n if valid:\n assert error_msg == ''\n elif message_content:\n # print(error_msg)\n assert error_msg != ''\n assert error_msg.find(message_content) != -1", "def test_parse_hgts_riatahgt(self):\n with open(self.riatahgt_output_hgt_fp, 'r') as f:\n output = parse_hgts(f, 'riata-hgt')\n self.assertEqual(int(output), 1)", "def input_validation(input_: str) -> bool:\n return fullmatch('[1-9]', input_) is not None", "def valid(h):\n h = bytes.decode(h)\n if h[0].islower():\n if set(h).issubset(ALNUM):\n # Yes! Digits, Upper- and lowercase are present\n return True\n return False", "def test_validate_input_valid(self):\n final_config = self.dtm1.validate_input('00001111')\n nose.assert_equal(final_config[0], 'q4')\n nose.assert_equal(str(final_config[1]), 'TMTape(\\'xxxxyyyy.\\')')", "def test_is_valid_label_value_valid_input():\n # test valid label values\n assert is_valid_label_value(value=None)\n assert is_valid_label_value(value=\"\")\n assert is_valid_label_value(value=\"l0L\")\n assert is_valid_label_value(value=\"L-l\")\n assert is_valid_label_value(value=\"L.L\")\n assert is_valid_label_value(value=\"l_4\")\n assert is_valid_label_value(value=\"4-you\")\n assert is_valid_label_value(value=\"You.2\")", "def test_height_valid(self):\n result = height_to_cm(\"5' 10\\\"\")\n self.assertEqual(result, 178)", "def validate_hash_difficulty(bhash: str, difficulty: int) -> bool:\n b = hex_to_bin(bhash)\n print(b[:4], type(b))\n return hex_to_bin(bhash).startswith('0' * difficulty)", "def validate_pkh(v):\n return _validate(v, prefixes=[b'tz1', b'tz2', b'tz3'])", "def _validate_heading(self, heading_parts: List[str]):\n # Validate heading row.\n assert len(heading_parts) >= 4\n assert \"oxygen\" in heading_parts\n assert \"pulse\" in heading_parts\n assert \"blood_pressure_systolic\" in heading_parts\n assert \"blood_pressure_diastolic\" in heading_parts", "def validate_range_str(range_str):\n if not isinstance(range_str, str):\n return False\n ranges = range_str.split(\",\")\n assert len(ranges) > 0\n for r in ranges:\n # a range may be either e.g. '64', or '128-256'\n try:\n c = [int(x) for x in r.split(\":\")]\n except:\n return False\n # c should be either e.g. [ 128 ], or [64,128].\n if len(c) == 1:\n if c[0] <= 0:\n return False\n elif len(c) == 2:\n if c[0] <= 0 or c[1] < c[0]:\n return False\n else:\n return False\n return True", "def parse_riatahgt(input_f):\n\tstring = \"There are \"\n\tout_str = False\n\tfor line in input_f:\n\t\tif string in line:\n\t\t\tnumber_hgts = line.split(string)[1].split(\" component(s)\")[0]\n\t\t\tsys.stdout.write(number_hgts)\n\t\t\tout_str = True\n\tif not out_str:\n\t\tsys.stdout.write(\"NaN\")", "def _is_valid_fmt(self, fmt):\n # make sure there is no leading or trailing whitespace\n fmt = fmt.strip()\n \n if fmt[0] != '%':\n return False\n \n # Handle business calendars first.\n # This does not check the calendar name.\n if fmt[1:3] == \"tb\" or fmt[1:4] == \"-tb\":\n return True if TB_FMT_RE.match(fmt) else False\n \n # date formats\n if fmt[1] == 't' or fmt[1:3] == '-t':\n return True if TIME_FMT_RE.match(fmt) else False\n \n # categorize using last character\n last_char = fmt[-1]\n if last_char == 's': # string\n m = STR_FMT_RE.match(fmt)\n if not m: return False\n width = int(m.group(3))\n if width == 0 or width > 244: return False\n return True\n elif last_char == 'H' or last_char == 'L': # binary\n # Valid binary formats are ^%(8|16)(H|L)$. Stata doesn't raise \n # error with -8 or -16, but the results are perhaps unexpected.\n return True if fmt[1:-1] in ('8', '16', '-8', '-16') else False\n elif last_char == 'x': # hexadecimal\n return True if fmt == '%21x' or fmt == '%-12x' else False\n elif last_char in {'f', 'g', 'e', 'c'}: # numeric\n m = NUM_FMT_RE.match(fmt)\n if not m: return False\n width = int(m.group(3))\n if width == 0 or width <= int(m.group(5)) or width > 244: \n return False\n return True\n \n return False", "def validate_query_range(query_range: str) -> (bool, str):\n try:\n start, end = query_range.split(\"-\")\n start = int(start.strip())\n end = int(end.strip())\n if start < 1:\n return False, \"Start of range should be >= 1\"\n if end < 1:\n return False, \"End of range should be >= 1\"\n\n return True, f\"{start-1}-{end-1}\" # subtract by one as index always start with 0\n\n except ValueError as err:\n return False, err" ]
[ "0.84475106", "0.6317095", "0.61406976", "0.5876957", "0.5799034", "0.5661624", "0.56512386", "0.56129754", "0.5500368", "0.54911727", "0.5486033", "0.5481811", "0.54454076", "0.54362947", "0.5415607", "0.53925407", "0.5391829", "0.53789145", "0.53327215", "0.5327332", "0.53048843", "0.5286116", "0.5277938", "0.52700794", "0.52495867", "0.5248052", "0.5207306", "0.5191801", "0.51810145", "0.5179091" ]
0.9066648
0
>>> validate_hcl("123abc") True >>> validate_hcl("123abz") False >>> validate_hcl("123abc") False >>> validate_hcl("000000") True >>> validate_hcl("999999") True >>> validate_hcl("aaaaaa") True >>> validate_hcl("ffffff") True
def validate_hcl(hcl: str) -> bool: if len(hcl) != 7 or hcl[0] != '#': return False for x in hcl[1:]: if x not in list(map(str, range(9 + 1))) + \ list(map(chr, range(ord('a'), ord('f') + 1))): return False return True
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def validate_hash(h):\n if len(h) not in (32, 40, 64, 128):\n return False\n\n return bool(re.match(\"[0-9a-fA-F]*$\", h))", "def valid(h):\n h = bytes.decode(h)\n if h[0].islower():\n if set(h).issubset(ALNUM):\n # Yes! Digits, Upper- and lowercase are present\n return True\n return False", "def validate(number):\n number = compact(number)\n if len(number) != 9:\n raise InvalidLength()\n if not isdigits(number[2:]):\n raise InvalidFormat()\n if not isdigits(number[:2]) and not all(x in 'ABCEHKMOPT' for x in number[:2]):\n raise InvalidFormat()\n if number[0] not in '1234567ABCEHKM':\n raise InvalidComponent()\n if number[-1] != calc_check_digit(number):\n raise InvalidChecksum()\n return number", "def input_validation(input_: str) -> bool:\n return fullmatch('[1-9]', input_) is not None", "def validate_hgt(hgt: str) -> bool:\n if hgt[-2:] == 'cm':\n return 150 <= int(hgt[:-2]) <= 193\n if hgt[-2:] == 'in':\n return 59 <= int(hgt[:-2]) <= 76\n return False", "def validate(input):\n regex = re.compile(r'(UL)?\\d{1,' + re.escape(str(barcode_digit_length)) + '}$', flags=re.IGNORECASE)\n if regex.match(input):\n is_valid = True\n else:\n is_valid = False\n return is_valid", "def test_minlength():\n assert has_min_length(None, 8) is None\n assert has_min_length('abcd1234', 8) is None\n assert has_min_length('a', 8)", "def check_input(input_string):\n if len(input_string) > 50: # check if length of name is less than 50 ir not\n return False\n else:\n return bool(re.match('[a-zA-Z\\s]+$', input_string)) # check is input contains only chars and spaces", "def validate_hash_difficulty(bhash: str, difficulty: int) -> bool:\n b = hex_to_bin(bhash)\n print(b[:4], type(b))\n return hex_to_bin(bhash).startswith('0' * difficulty)", "def isvalid(str_input):\r\n if str_input == \"\":\r\n return True\r\n if str_input[0] not in [\"0\", \"1\", \"2\", \"3\", \"4\", \"5\", \"6\", \"7\", \"8\", \"9\", \"-\", \".\"]:\r\n return False\r\n dot_count = 0\r\n for ch in str_input[1:]:\r\n if ch == \".\":\r\n dot_count += 1\r\n if dot_count == 2:\r\n return False\r\n if ch not in [\"0\", \"1\", \"2\", \"3\", \"4\", \"5\", \"6\", \"7\", \"8\", \"9\", \".\"]:\r\n return False\r\n return True", "def is_valid(key):\n return key[0:2] == \"MR\" and key[2:].isdigit() and len(key) in [9, 10]", "def test_is_valid_hex(self):\n self.assertTrue(is_valid_hex('#aabb11'))\n self.assertTrue(is_valid_hex('#000'))\n self.assertTrue(is_valid_hex('#aaa'))\n self.assertFalse(is_valid_hex('black'))\n self.assertFalse(is_valid_hex('bl(ack'))", "def valid_format(s):\n if len(s) > 7:\n return False\n elif '0' in s and len(s) == 1:\n return True\n elif s[0] == '0' and len(s) > 1:\n return False\n elif introcs.isalpha(s):\n return False\n elif (len(s) > 3) and (introcs.count_str(s, ',') == 0):\n return False\n elif introcs.count_str(s, ',') == 0:\n return introcs.isdecimal(s)\n elif introcs.count_str(s, ',') > 1:\n return False\n elif ',' in s and introcs.count_str(s,',') == 1:\n comma_check = s[introcs.find_str(s, ',')+1:]\n before_comma_check = s[:introcs.find_str(s, ',')]\n introcs.isdecimal(before_comma_check)\n return (True if len(comma_check) == 3 else False) and introcs.isdecimal(before_comma_check)", "def validate_n_digits(n: int = 4) -> bool:\n def func(s: str):\n if len(s) != n:\n return False\n if not s.isdigit():\n return False\n return True\n return func", "def validate(value):\n if str.isdigit(value) or value == \"\":\n return True\n else:\n return False", "def validate(number):\n number = compact(number)\n if not isdigits(number):\n raise InvalidFormat()\n if len(number) != 10:\n raise InvalidLength()\n if checksum(number) != 0:\n raise InvalidChecksum()\n return number", "def validate(info):\n\n\tif info == \"\": \n\t\treturn False\n\telse:\n\t\tif len(info) < 5 or len(info) > 32:\n\t\t\treturn False\n\t\telse:\n\t\t\treturn True", "def test_allowed_chars(self):\n hash_val = self.reverse_hash.get_hash('123')\n self.assertEqual(hash_val['error'], 'allowed chars {}'.format(self.reverse_hash.letters))", "def valid_pw(password, h):\n salt = h.split(',')[0]\n return h == make_pw_hash(password, salt)", "def ascii_numeric(s: str) -> bool:\n return frozenset(s).issubset(_ascii_n)", "def validate_pkh(v):\n return _validate(v, prefixes=[b'tz1', b'tz2', b'tz3'])", "def test_valid_luhn(self):\n assert luhn_checksum(\"79927398713\") == 0", "def check_length(string):\n if 6 < len(string) < 12:\n return True\n\n print(\"Your password is not between 6 and 12 characters\")\n return False", "def __check_validation(input_string):\n if not input_string:\n raise NullInputException(\"Input string should be not empty\")\n if type(input_string) != str:\n raise NonStringInputException(\"Input value should be a string\")\n if len(input_string) >= 200:\n raise TooLongInputException(\"Input string should be less than 200 characters\")\n for i in input_string:\n if not i.isalpha():\n raise NonStringInputException(\"All input value characters should be an alpha\")", "def isValid(t_id):\n\tstr_id=str(t_id).strip()\n\treturn str_id.isdigit()", "def validate(number):\n number = compact(number)\n if len(number) != 11:\n raise InvalidLength()\n if not isdigits(number):\n raise InvalidFormat()\n if number.startswith('0'):\n raise InvalidFormat()\n # In the first 10 digits exactly one digit must be repeated two or\n # three times and other digits can appear only once.\n counter = defaultdict(int)\n for n in number[:10]:\n counter[n] += 1\n counts = [c for c in counter.values() if c > 1]\n if len(counts) != 1 or counts[0] not in (2, 3):\n raise InvalidFormat()\n return mod_11_10.validate(number)", "def __call__(self, value):\n if value is None:\n return value\n\n value = value.replace(\" \", \"\").replace(\".\", \"\")\n if not value.isdigit():\n raise ValidationError(_(\"AHV must contain numbers only\"))\n if len(value) != 13:\n raise ValidationError(_(\"AHV must be 13 numbers long.\"))\n\n if self.ahv_checksum(value[:-1]) != value[-1]:\n raise ValidationError(_(\"Not a valid AHV number.\"))", "def is_valid(t_input):\r\n eax = 1 # flag validita': inizialmente non valido (caso stringa di lunghezza 0)\r\n ecx = 0 # indice\r\n \r\n while t_input[ecx] != \"\\0\":\r\n eax = 1 # mi preparo il flag \"invalido\" per il carattere\r\n\r\n if is_valid_char(t_input[ecx]) == 0:\r\n # carattere valido\r\n eax = 0\r\n\r\n # se il carattere e' invalido\r\n if eax == 1:\r\n # salta fuori dal ciclo\r\n break\r\n\r\n ecx += 1\r\n # salta a inizio ciclo\r\n\r\n # eax e' 1 per stringhe vuote o \r\n # almeno un carattere invalido\r\n return eax", "def alphanumeric(s: str) -> bool:\n return len(re.findall(r'[^A-Za-z0-9]', s)) == 0", "def _validate(cls, pid_value):\n blop = re.compile('^[-\\w]+$')\n if not bool(blop.match(pid_value)):\n raise ValidationError(\n 'The ID should contain only letters with numbers or dashes.',\n field_name='id',\n )" ]
[ "0.66823274", "0.65741116", "0.63123316", "0.6178835", "0.6109796", "0.6095078", "0.60738504", "0.5964063", "0.59419316", "0.59182006", "0.5900173", "0.5891105", "0.58870363", "0.58630085", "0.5845556", "0.58441126", "0.5839956", "0.57940686", "0.5791709", "0.57306457", "0.5724601", "0.5711866", "0.57061064", "0.56986755", "0.5697111", "0.56953657", "0.5694137", "0.5678029", "0.5671987", "0.56674963" ]
0.74781597
0
>>> validate_ecl("brn") True >>> validate_ecl("wat") False
def validate_ecl(ecl: str) -> bool: return ecl in ["amb", "blu", "brn", "gry", "grn", "hzl", "oth"]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def ecssa_verify(ec: EC, hf, m: bytes, P: Point, sig: ECSS) -> bool:\n\n # this is just a try/except wrapper\n # _ecssa_verify raises Errors\n try:\n return _ecssa_verify(ec, hf, m, P, sig)\n except Exception:\n return False", "def validate():", "def test_is_valid(self, address):\n self.test_string(address)\n self.test_alnum(address)", "def validate(self, s):\n if len(s) == 0:\n return False\n if s in self.whitelist:\n return True\n if s in self.blacklist:\n return False\n\n # SQL Types are rarely used\n if 't' in s and 'f(t' not in s and 'At' not in s:\n return False\n\n if '1nf' in s:\n return False\n if 's1o' in s:\n return False\n if 'oo' in s:\n return False\n if 'v,s' in s:\n return False\n if 's,v' in s:\n return False\n if 'v,v' in s:\n return False\n if 'v,1' in s:\n return False\n if 'v,n' in s:\n return False\n if 'n,v' in s:\n return False\n if '1,v' in s:\n return False\n if 'Eo(' in s:\n return False\n if '(o(' in s:\n return False\n if '(o1' in s:\n return False\n if '(on' in s:\n return False\n if '(os' in s:\n return False\n if '(of' in s:\n return False\n if '(ov' in s:\n return False\n if 'B(n)' in s:\n return False\n if 'oso' in s:\n return False\n if 'o1o' in s:\n return False\n if 'ono' in s:\n return False\n\n # only 1 special case for this\n # 1;foo:goto foo\n # 1;n:k\n # the 'foo' can only be a 'n' type\n if ':' in s and not 'n:' in s:\n return False\n\n if '11' in s:\n return False\n\n if '))' in s:\n return False\n if '((' in s:\n return False\n if 'v1' in s:\n return False\n\n if 'nv' in s and ';T' not in s:\n return False\n if 'nn' in s and ';T' not in s:\n return False\n\n # select @version foo is legit\n # but unlikely anywhere else\n if 'vn' in s and 'Evn' not in s:\n return False\n\n if 'oE' in s:\n return False\n\n if 'A1' in s:\n return False\n if 'An' in s:\n return False\n if 'A(1' in s:\n return False\n\n if 'vov' in s:\n return False\n if 'vo1' in s:\n return False\n if 'von' in s:\n return False\n\n if 'ns' in s:\n if 'U' in s:\n return True\n if 'T' in s:\n return True\n return False\n\n if 'sn' in s:\n # that is... Tsn is ok\n if s.find('T') != -1 and s.find('T') < s.find('sn'):\n return True\n return False\n\n # select foo (as) bar is only nn type i know\n if 'nn' in s and 'Enn' not in s and ';T' not in s:\n return False\n\n if ',o' in s:\n return False\n\n if 'kk' in s and 'Tkk' not in s:\n return False\n\n if 'ss' in s:\n return False\n\n if 'ff' in s:\n return False\n\n if '1no' in s:\n return False\n\n if 'kno' in s:\n return False\n\n if 'nEk' in s:\n return False\n\n if 'n(n' in s:\n return False\n if '1so' in s:\n return False\n if '1s1' in s:\n return False\n if 'noo' in s:\n return False\n if 'ooo' in s:\n return False\n\n if 'vvv' in s:\n return False\n\n if '1vn' in s:\n return False\n if '1n1' in s:\n return False\n if '&1n' in s:\n return False\n if '&1v' in s:\n return False\n if '&1s' in s:\n return False\n if 'nnk' in s:\n return False\n if 'n1f' in s:\n return False\n # folded away\n if s.startswith('('):\n return False\n\n if '&o' in s:\n return False\n\n if '1,1' in s:\n return False\n if '1,s' in s:\n return False\n if '1,n' in s:\n return False\n if 's,1' in s:\n return False\n if 's,s' in s:\n return False\n if 's,n' in s:\n return False\n if 'n,1' in s:\n return False\n if 'n,s' in s:\n return False\n if 'n,n' in s:\n return False\n if '1o1' in s:\n return False\n if '1on' in s:\n return False\n if 'no1' in s:\n return False\n if 'non' in s:\n return False\n if '1(v' in s:\n return False\n if '1(n' in s:\n return False\n if '1(s' in s:\n return False\n if '1(1' in s:\n return False\n if 's(s' in s:\n return False\n if 's(n' in s:\n return False\n if 's(1' in s:\n return False\n if 's(v' in s:\n return False\n if 'v(s' in s:\n return False\n if 'v(n' in s:\n return False\n if 'v(1' in s:\n return False\n if 'v(v' in s:\n return False\n\n if s.startswith('n('):\n return False\n\n if s.startswith('vs'):\n return False\n\n if s.startswith('o'):\n return False\n\n if ')(' in s:\n return False\n\n # need to investigate T(vv) to see\n # if it's correct\n if 'vv' in s and s != 'T(vv)':\n return False\n\n # unlikely to be sqli but case FP\n if s in ('so1n)', 'sonoE'):\n return False\n\n return True", "def validate_address(address:str) -> bool:\r\n return True", "def test_special_case(self):\n cases = [\n ('3467875434578764345789654', False),\n ('AAAAAAAAAAA', False),\n ('', False),\n ]\n for titulo_eleitoral, is_valid in cases:\n self.assertEqual(self.titulo_eleitoral.validate(titulo_eleitoral), is_valid)", "def isValid(text):\n return bool(re.search(r\"\\b((close|activate)\\ (check|tunnel|ubuntu|fedora|windows))\\b\", text, re.IGNORECASE))", "def cpf_valid(cpf):\n cpf_validator = CPF()\n return cpf_validator.validate(cpf)", "def validateFromString(cls,xmlstring,target):\n corpus = Corpus()\n if corpus.readFromString(xmlstring):\n return( Validator.validate(corpus,target) ) \n else:\n return(False)", "def validate(prop, string, node, match, entry_start, entry_end):\n return True", "def is_valid(name):\n return bool(name)", "def Check_is_valid(self, String):\r\n\r\n if self.Special_Names.__contains__(String):\r\n return False\r\n elif self.Special_Names_no_Operands.__contains__(String):\r\n return False\r\n elif self.Special_Names_one_Operands.__contains__(String):\r\n return False\r\n elif self.Special_Names_two_Operands.__contains__(String):\r\n return False\r\n elif self.Data_types.__contains__(String):\r\n return False\r\n elif self.Registers.__contains__(String):\r\n return False\r\n elif self.Irvine32_functions.__contains__(String):\r\n return False\r\n elif String.__contains__('\"'):\r\n return False\r\n elif String.__contains__('\\''):\r\n return False\r\n elif String.__contains__('.'):\r\n return False\r\n elif String[0].isdecimal():\r\n return False\r\n if len(self.Data_variables) > 0:\r\n if self.Data_variables.__contains__(String):\r\n return False\r\n if len(self.Functions_names) > 0:\r\n if self.Functions_names.__contains__(String):\r\n return False\r\n if len(self.Labels_names) > 0:\r\n if self.Labels_names.__contains__(String):\r\n return False\r\n return True", "def test_valid_country():\n assert valid_country(\"Democratic Republic of Lungary\") is True\n assert valid_country(\"Kraznoviklandstan\") is True\n assert valid_country(\"kraznoviklandstan\") is True\n assert valid_country(\"KRAZNOVIKLANDSTAN\") is True\n\n assert valid_country(\"Democratic_Republic982759\") is False\n assert valid_country(\"Kraznoviklandsta\") is False\n assert valid_country(\"Principalities of Fryed\") is False\n assert valid_country(\"FRY\") is False", "def _is_valid_certificate(c):\n\n # Read the graph\n if c.Dataset in HUFFNER_DATASETS:\n graph = read_huffner(\n ORIGINAL_DATA_DIR / 'huffner',\n c.Dataset + ORIGINAL_HUFFNER_DATA_EXT\n )\n elif c.Dataset in BEASLEY_DATASETS:\n graph = read_beasley(\n ORIGINAL_DATA_DIR / 'beasley',\n c.Dataset + BEASLEY_EXT\n )\n elif c.Dataset in BEASLEY_GKA:\n graph = read_beasley(\n ORIGINAL_DATA_DIR / 'gka',\n c.Dataset + BEASLEY_EXT\n )\n else:\n raise Exception('Unknown Dataset: {}'.format(c.Dataset))\n\n # Load the original oct set and names\n og_names = load_og_name_lookup(LOOKUP, '{}.lookup'.format(c.Dataset))\n pre_oct_set = load_pre_oct_set(OCT, '{}.oct'.format(c.Dataset))\n\n # Parse the certificate\n certificate = list(map(str, ast.literal_eval(c.Certificate)))\n\n # Convert certificate to OCT set with original names\n oct_set = convert_oct_set(certificate, og_names)\n\n # Remove oct verticies\n if pre_oct_set:\n graph.remove_nodes_from(pre_oct_set)\n if oct_set:\n graph.remove_nodes_from(oct_set)\n\n # Verify the remainder is bipartite\n return nx.is_bipartite(graph)", "def is_valid(passwd: str) -> bool:\n return (\n re.search(r'abc|bcd|cde|def|efg|fgh|ghi|hij|jkl|klm|lmn|mno|nop|opq|pqr|qrs|rst|stu|tuv|uvw|vwx|wxy|xyz', passwd) is not None and\n all([c not in passwd for c in 'iol']) and\n re.search(r'([a-z])\\1.*([a-z])\\2', passwd) is not None\n )", "def is_ascl(val):\n return ascl_regexp.match(val)", "def check_validity(self):", "def _not_valid_(s) :\n return not s.valid()", "def test_valid_text_str(self):\n f = lws.valid_text\n assert f('string', r'[a-z]*') is True\n assert f('string', r'string') is True\n assert f('string', r'[0-9]*') is False\n assert f('', r'.*') is False\n assert f('abcde', lambda x: 'e' in x) is True\n assert f('abcde', lambda x: 'f' in x) is False", "def simple_validator(passport):\n if len(passport) == 8:\n return True\n if len(passport) == 7 and \"cid\" not in passport:\n return True\n return False", "def check_afm(afm):\n \n if not isinstance(afm, str):\n raise TypeError( \"check_afm()\", \"You should feed to this function only strings to avoid exceptions and errors! Aborting.\" )\n if len(afm) == 11 and afm[:2].upper() == \"EL\":\n afm=afm[2:]\n if afm.isdigit() == True and len(afm) == 9:\n i, sums = 256, 0\n for digit in afm[:-1]:\n sums += int(digit) * i\n i /= 2\n checksum = sums % 11\n if int(afm[-1]) == int(checksum) or (checksum==10 and afm[-1]==\"0\"):\n return True\n return False", "def validate_hcl(hcl: str) -> bool:\n if len(hcl) != 7 or hcl[0] != '#':\n return False\n for x in hcl[1:]:\n if x not in list(map(str, range(9 + 1))) + \\\n list(map(chr, range(ord('a'), ord('f') + 1))):\n return False\n return True", "def is_valid_posse(posse: 'Tuple'):\n # todo: write this as iterator through test types\n # could use test types as blocks for lambda calculus\n # for test in attribute tests:\n\n # print('testing posse: %s' % str(posse))\n\n prefixes = set()\n cases = set()\n lengths = set()\n letters = set()\n\n for member in posse:\n # prefix validity\n prefixes.add(member[0])\n\n # case validity\n if ord(member[1]) in THIRD_CASE:\n cases.add(2)\n elif member[1].isupper():\n cases.add(1)\n else:\n cases.add(0)\n\n # length validity\n lengths.add(len(member[1:]))\n\n # letter validity\n # print('letter validity for %s' % member)\n for letter_type in LETTER_SETS:\n if ord(member[1]) in LETTER_SETS[letter_type]:\n letters.add(letter_type)\n\n prefix_is_valid = len(prefixes) == 1 or len(prefixes) == 3\n case_is_valid = len(cases) == 1 or len(cases) == 3\n length_is_valid = len(lengths) == 1 or len(lengths) == 3\n letter_is_valid = len(letters) == 1 or len(letters) == 3\n\n # print('prefix_is_valid: %s' % prefix_is_valid)\n # print('case_is_valid: %s' % case_is_valid)\n # print('length_is_valid: %s' % length_is_valid)\n # print('letter_is_valid: %s' % letter_is_valid)\n\n return all((prefix_is_valid,\n case_is_valid,\n length_is_valid,\n letter_is_valid))", "def test_is_valid():\n # Expected input.\n assert make_european.is_valid('12/31/2015:[email protected]')\n # Check 'at' typo.\n assert not make_european.is_valid('12/31/2015:john.a.grahamgmail.com')\n # Check colon typo.\n assert not make_european.is_valid('12/31/2015::[email protected]')\n # Check forward slash replacement.\n assert not make_european.is_valid('12.31.2015:[email protected]')\n # Check order.\n assert not make_european.is_valid('[email protected]:12/31/2015')", "def brepalgo_IsValid(*args):\n return _BRepAlgo.brepalgo_IsValid(*args)", "def is_valid(s):\n in_str = False\n bb = 0\n for c in s:\n if c == '(' and not in_str:\n bb += 1\n elif c == ')' and not in_str:\n bb -= 1\n if bb < 0:\n return False\n elif c == '\\\"':\n in_str = not in_str\n return bb == 0", "def test_valid_general_collateral():\n is_valid, errors = validate(GENERAL_COLLATERAL, 'generalCollateral', 'ppr')\n\n if errors:\n for err in errors:\n print(err.message)\n print(errors)\n\n assert is_valid", "def valid_format(s):\n if len(s) > 7:\n return False\n elif '0' in s and len(s) == 1:\n return True\n elif s[0] == '0' and len(s) > 1:\n return False\n elif introcs.isalpha(s):\n return False\n elif (len(s) > 3) and (introcs.count_str(s, ',') == 0):\n return False\n elif introcs.count_str(s, ',') == 0:\n return introcs.isdecimal(s)\n elif introcs.count_str(s, ',') > 1:\n return False\n elif ',' in s and introcs.count_str(s,',') == 1:\n comma_check = s[introcs.find_str(s, ',')+1:]\n before_comma_check = s[:introcs.find_str(s, ',')]\n introcs.isdecimal(before_comma_check)\n return (True if len(comma_check) == 3 else False) and introcs.isdecimal(before_comma_check)", "def IsValid(*args):\n return _BRepAlgo.brepalgo_IsValid(*args)", "def valid(h):\n h = bytes.decode(h)\n if h[0].islower():\n if set(h).issubset(ALNUM):\n # Yes! Digits, Upper- and lowercase are present\n return True\n return False" ]
[ "0.62015206", "0.61072654", "0.6074642", "0.6051566", "0.60084826", "0.5970571", "0.59182256", "0.59039706", "0.59022427", "0.58978814", "0.58941233", "0.58641183", "0.58630884", "0.5861052", "0.58596045", "0.58547425", "0.5842046", "0.5841408", "0.5838524", "0.5835987", "0.58297855", "0.5813473", "0.58111733", "0.5791664", "0.57896024", "0.57677186", "0.57654536", "0.5729928", "0.5710381", "0.5693404" ]
0.85189974
0
Takes a list of vectors and returns their average
def compute_average(vec_list): return np.sum(vec_list, axis = 0)/len(vec_list)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def average_vectors(vectors_list):\n return np.mean(vectors_list, axis=0)", "def average(cls, vectors):\n return cls.sum(vectors) / len(vectors)", "def vector_mean(vectors: List[Vector]) -> Vector:\n n = len(vectors)\n return scalar_multiply(1/n, vector_sum(vectors))", "def vector_mean(vectors: List[Vector]) -> Vector:\n n = len(vectors)\n return scalar_multiply(1/n, vector_sum(vectors))", "def vector_mean(vectors: List[Vector]) -> Vector:\n n = len(vectors)\n\n return scalar_multiply(1/n, vector_sum(vectors))", "def vector_mean(vectors):\n n = len(vectors)\n return scalar_multiply(1/n, vector_sum(vectors))", "def vector_mean(vectors):\n n = len(vectors)\n return scalar_multiply(1/n, vector_sum(vectors))", "def vector_mean(vectors):\n n = len(vectors)\n return scalar_multiply(1/n, vector_sum(vectors))", "def vector_mean(vectors):\n n = len(vectors)\n return scalar_multiply(1 / n, vector_sum(vectors))", "def vector_mean(vectors):\n n = len(vectors)\n return scalar_multiply(1 / n, vector_sum(vectors))", "def avg(list):\n return sum(list) / len(list)", "def avg(vector):\n if len(vector) == 0:\n return 0\n return sum(vector) / len(vector)", "def average(lst):\n return sum(lst)/len(lst)", "def avg(l):\n return (sum(l)/float(len(l)))", "def avg(lst: list):\n return sum(lst) / len(lst)", "def func(lst):\n tot = 0\n for i in lst:\n tot = tot + i\n avg = tot / len(lst)\n return avg", "def average(l):\n return float(sum(l)/len(l))", "def CalculateListAverage(values):\n if not values:\n return 0\n return sum(values) / float(len(values))", "def list_mean(lst):\n return list_sum(lst) / len(lst)", "def mean(v):\n return sum(v)/len(v)", "def avg():\n\n # call sum method to add up the values in the collection & div by the num of items\n # call len method to compute the # of vals in collection which is divided by sum total \n mean = sum(inlist) / len(inlist)\n return mean \n\n # alternate method would be calling the reduce method with lamda \n # return reduce(lambda a, b: a + b, inlist) / len(inlist)", "def average(l: List[float]) -> float:\n n = len(l)\n if n == 0:\n return 0\n return sum(l) / n", "def getMean(list):\n return sum(list) / len(list)", "def get_mean(numlist):\n return np.mean(numlist)", "def price_average(lst):\n\n return sum(lst) / len(lst)", "def genre_average(genre_vectors):\n array = [vector for vector in genre_vectors]\n return np.average(array, axis=0)", "def lmean (inlist):\r\n sum = 0\r\n for item in inlist:\r\n sum = sum + item\r\n return sum/float(len(inlist))", "def _avg(cls, l):\n\n return sum(l) / float(len(l))", "def getAveragePositionFromList( positionsList ):\n \n vectors = [ vector.makeMVector( values = [x, y, z] ) for x, y, z in positionsList ]\n \n vectorsSum = vector.makeMVector()\n \n for v in vectors:\n \n vectorsSum += v\n \n vectorsAverage = vectorsSum / len( positionsList )\n \n return [ vectorsAverage[0], vectorsAverage[1], vectorsAverage[2] ]", "def mean_list(data):\n return sum(data) / len(data)" ]
[ "0.8681849", "0.79531354", "0.78980136", "0.78980136", "0.7865671", "0.7705014", "0.7705014", "0.7705014", "0.769987", "0.769987", "0.76556635", "0.7569094", "0.7538223", "0.753232", "0.7523211", "0.7509719", "0.7479618", "0.74456024", "0.737489", "0.72875494", "0.72845083", "0.72515714", "0.72444546", "0.72424406", "0.72221005", "0.72203624", "0.71964234", "0.718416", "0.7178103", "0.7164919" ]
0.8858376
0
Computes a sentence2vece embedding for preprocessed user input.
def compute_user_input_embedding(txt, model): embeddings = [] tokens = txt.split(" ") for word in tokens: embeddings.append(model.wv[word]) sentence_embedding = compute_average(embeddings) return sentence_embedding
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _add_pre_trained_embedding(self):\n\n if self.embedding_type['type'] == 'glove':\n self.logging.info('use pre-trained glove word2vec')\n # a. load pre trained glove\n GLOVE_DIR = '../data/glove_pretrained/glove.6B'\n glove_suffix_name = 'glove.6B.' + str(self.embedding_size) + 'd.txt'\n import os\n import numpy as np\n\n embeddings_index = {}\n f = open(os.path.join(GLOVE_DIR, glove_suffix_name)) # 'glove.6B.100d.txt'))\n for line in f:\n values = line.split()\n word = values[0]\n coefs = np.asarray(values[1:], dtype='float32')\n embeddings_index[word] = coefs\n f.close()\n\n self.logging.info('')\n self.logging.info('Found %s word vectors.' % len(embeddings_index))\n\n # b. compute embedding matrix\n embedding_matrix = np.zeros((len(self.word_index) + 1, self.embedding_size))\n cnt = 0\n for word, i in self.word_index.items():\n embedding_vector = embeddings_index.get(word)\n if embedding_vector is not None:\n embedding_matrix[i] = embedding_vector # words not found in embedding index will be all-zeros.\n else:\n # self.logging.info('token in train missing in word2vec: ' + str(word))\n cnt += 1\n self.logging.info('total tokens missing: ' + str(cnt) + ' / ' + str(len(self.word_index)))\n\n # c. build embedding layer\n from keras.layers import Embedding\n embedding_layer = Embedding(len(self.word_index) + 1,\n self.embedding_size,\n weights=[embedding_matrix],\n input_length=self.maxlen,\n trainable=False)\n\n elif self.embedding_type['type'] == 'gensim':\n self.logging.info('use pre-trained gensim word2vec')\n\n import gzip\n import gensim\n from keras.layers import Embedding\n import numpy as np\n\n # fname = '../data/word2vec_pretrained/motors/d_300_k_712904_w_6_e_60_v_motors'\n # fname = '../data/word2vec_pretrained/fashion/d_300_k_1341062_w_6_e_70_v_fashion'\n\n self.logging.info('load word2vec path: ' + str(self.embedding_type['path']))\n model = gensim.models.Word2Vec.load(self.embedding_type['path'])\n pretrained_weights = model.wv.syn0\n vocab_size, vector_dim = pretrained_weights.shape\n\n method = 3\n if method == 1:\n self.logging.info('word2vec attempt to fit into embedding layer - middle complex')\n # convert the wv word vectors into a numpy matrix that is suitable for insertion\n # into our TensorFlow and Keras models\n\n embedding_matrix = np.zeros((len(model.wv.vocab), vector_dim))\n for i in range(len(model.wv.vocab)):\n embedding_vector = model.wv[model.wv.index2word[i]]\n if embedding_vector is not None:\n embedding_matrix[i] = embedding_vector\n\n embedding_layer = Embedding(input_dim=embedding_matrix.shape[0],\n output_dim=embedding_matrix.shape[1],\n # input_length=self.maxlen,\n weights=[embedding_matrix],\n trainable=False)\n elif method == 2:\n self.logging.info('word2vec simple embedding matching - simple complex')\n embedding_layer = Embedding(input_dim=vocab_size,\n output_dim=vector_dim,\n input_length=self.maxlen,\n weights=[pretrained_weights],\n trainable=False)\n elif method == 3:\n\n self.logging.info('word2vec match using word_index from keras tokenizer - as used in glove match above')\n # b. compute embedding matrix\n\n # sd = 1 / np.sqrt(len(self.word_index) + 1)\n # embedding_matrix = np.random.normal(0, scale=sd, size=(len(self.word_index) + 1, self.embedding_size))\n\n embedding_matrix = np.zeros((len(self.word_index) + 1, self.embedding_size))\n cnt = 0\n for word, i in self.word_index.items():\n if word in model.wv:\n embedding_vector = model.wv[word]\n embedding_matrix[i] = embedding_vector\n else:\n # self.logging.info('token in train missing in word2vec: ' + str(word))\n cnt += 1\n self.logging.info('total tokens missing: ' + str(cnt))\n\n\n # c. build embedding layer\n from keras.layers import Embedding\n embedding_layer = Embedding(len(self.word_index) + 1,\n self.embedding_size,\n weights=[embedding_matrix],\n input_length=self.maxlen,\n trainable=False)\n else:\n raise ValueError('unknown method value')\n\n else:\n raise ValueError('unknown embedding type')\n self.logging.info('create glove pre-trained embedding: ' + str(self.embedding_size))\n return embedding_layer", "def get_embeddings(self, in_data):\n context, da = in_data\n if self.fixed_divide:\n da_emb = super(PersonageContextDAEmbeddingSeq2SeqExtract, self).get_embeddings(da, pad=True)\n else:\n da_emb = super(PersonageContextDAEmbeddingSeq2SeqExtract, self).get_embeddings(da, pad=False)\n\n # Shubhangi: what this step essentially does is it replaces the context words by their token, with UNK as default.\n # again , we don't need this since our context data is essentially vectors therefore commenting this out\n # similary we don't need context embedding , that's exactly what context is already .\n\n # context_emb = []\n context_emb = [float(parameter[0]) for parameter in context]\n\n # for tok in context[-max_context_len:]:\n # context_emb.append(self.dict_token.get(tok, self.UNK_TOKEN))\n\n # Shubhangi: padding is needed because each context sentence could be of different length ,\n # we don't need to include context in padding as we're going to have a fixed size\n # (max_context_len - len(context)) = 0\n\n\n # padding = [self.UNK_TOKEN] * (max_context_len - len(context))\n\n # Shubhangi: padding might be harmless for now therefore not removing ,\n # essentially what this is doing is concatenating the arrays and sending\n if self.use_div_token:\n return context_emb + [self.DIV_TOKEN] + da_emb\n # return padding + context_emb + [self.DIV_TOKEN] + da_emb\n # return padding + context_emb + da_emb\n return context_emb + da_emb", "def sentence_to_embedding(sent, word_to_vec, seq_len, embedding_dim):\n embedding_vec = np.zeros((seq_len,embedding_dim))\n for i in range(min(len(sent),seq_len)):\n embedding_vec[i,:] = word_to_vec.get(sent[i])\n return embedding_vec", "def w2v_embedding(input_file, bin_file, output_file):\n print(\"Loading the training corpus.\")\n df = open(src_file_path, \"r\") # Open the text file stored at src_file_path\n df = df.read() # Read the text file\n tokens1 = word_tokenize(df) # Tokenize\n token_list = []\n print(\"Writing the bin file.\")\n models = KeyedVectors.load_word2vec_format(bin_destination_file_path, binary=True) # Save bin format\n print(\"Saving the word embeddings.\")\n models.save_word2vec_format(txt_file_destination_path, binary=False) # Save the word vectors\n \n return txt_file_destination_path", "def build_bilstm(self, verbose=True):\r\n word_ids = Input(batch_shape=(None, None), dtype='int32', name='word_input')\r\n inputs = [word_ids]\r\n\r\n if self._params.use_pretrain_embedding:\r\n if verbose: logging.info(\"initial word embedding with pretrained embeddings\")\r\n if self._params.word_embedding_dim == 100:\r\n glove_file = self._params.data_dir + '/glove.6B.100d.txt'\r\n elif self._params.word_embedding_dim == 300:\r\n glove_file = self._params.data_dir + '/glove.42B.300d.txt'\r\n else:\r\n logging.error(\"we only support glove embedding with dimension 100 or 300\")\r\n raise ValueError(\"unmatch word dimension, we only support glove embedding with dimension 100 or 300\")\r\n glove_embedding_index = load_glove(glove_file, self._params.word_embedding_dim)\r\n word_vocab = self.input_processor.word_vocab.vocab\r\n glove_embeddings_matrix = np.zeros([len(word_vocab), self._params.word_embedding_dim])\r\n for word, i in word_vocab.items():\r\n vector = glove_embedding_index.get(word)\r\n if vector is not None:\r\n glove_embeddings_matrix[i] = vector\r\n \r\n word_embeddings = Embedding(input_dim=glove_embeddings_matrix.shape[0],\r\n output_dim=glove_embeddings_matrix.shape[1],\r\n trainable=False,\r\n mask_zero=True,\r\n weights=[glove_embeddings_matrix],\r\n name='word_embedding')(word_ids)\r\n else:\r\n word_embeddings = Embedding(input_dim=self._params.word_vocab_size,\r\n output_dim=self._params.word_embedding_dim,\r\n mask_zero=True,\r\n name='word_embedding')(word_ids)\r\n\r\n input_embeddings = [word_embeddings]\r\n if self._params.use_char:\r\n char_ids = Input(batch_shape=(None, None, None), dtype='int32', name='char_input')\r\n inputs.append(char_ids)\r\n if self._params.char_feature == \"lstm\":\r\n char_embeddings = Embedding(input_dim=self._params.char_vocab_size,\r\n output_dim=self._params.char_embedding_dim,\r\n mask_zero=True,\r\n name='char_embedding')(char_ids)\r\n if verbose: logging.info(\"using charcter level lstm features\")\r\n char_feas = TimeDistributed(Bidirectional(LSTM(self._params.char_lstm_size)), name=\"char_lstm\")(char_embeddings)\r\n elif self._params.char_feature == \"cnn\":\r\n # cnn do not support mask\r\n char_embeddings = Embedding(input_dim=self._params.char_vocab_size,\r\n output_dim=self._params.char_embedding_dim,\r\n name='char_embedding')(char_ids)\r\n if verbose: logging.info(\"using charcter level cnn features\")\r\n char_feas = char_cnn_encode(char_embeddings, self._params.n_gram_filter_sizes, self._params.n_gram_filter_nums)\r\n else:\r\n raise ValueError('char feature must be lstm or cnn')\r\n\r\n input_embeddings.append(char_feas)\r\n\r\n if self._params.use_pos:\r\n if verbose: logging.info(\"use pos tag features\")\r\n pos_ids = Input(batch_shape=(None, None), dtype='int32', name='pos_input')\r\n inputs.append(pos_ids)\r\n\r\n\r\n pos_embeddings = Embedding(input_dim=self._params.pos_vocab_size,\r\n output_dim=self._params.pos_embedding_dim,\r\n mask_zero=True,\r\n name='pos_embedding')(pos_ids)\r\n input_embeddings.append(pos_embeddings)\r\n\r\n if self._params.use_dict:\r\n if verbose: logging.info(\"use user dict features\")\r\n dict_ids = Input(batch_shape=(None, None), dtype='int32', name='dict_input')\r\n inputs.append(dict_ids)\r\n\r\n dict_embeddings = Embedding(input_dim=self._params.dict_vocab_size,\r\n output_dim=self._params.dict_embedding_dim,\r\n mask_zero=True,\r\n name='dict_embedding')(dict_ids)\r\n input_embeddings.append(dict_embeddings)\r\n\r\n input_embedding = Concatenate(name=\"input_embedding\")(input_embeddings) if len(input_embeddings)>1 else input_embeddings[0]\r\n input_embedding_ln = LayerNormalization(name='input_layer_normalization')(input_embedding)\r\n #input_embedding_bn = BatchNormalization()(input_embedding_ln)\r\n input_embedding_drop = Dropout(self._params.dropout, name=\"input_embedding_dropout\")(input_embedding_ln)\r\n\r\n z = Bidirectional(LSTM(units=self._params.main_lstm_size, return_sequences=True, dropout=0.2, recurrent_dropout=0.2),\r\n name=\"main_bilstm\")(input_embedding_drop)\r\n z = Dense(self._params.fc_dim, activation='tanh', name=\"fc_dense\")(z)\r\n\r\n if self._params.use_crf:\r\n if verbose: logging.info('use crf decode layer')\r\n crf = CRF(self._params.num_labels, sparse_target=False,\r\n learn_mode='marginal', test_mode='marginal', name='crf_out')\r\n loss = crf.loss_function\r\n pred = crf(z)\r\n else:\r\n loss = 'categorical_crossentropy'\r\n pred = Dense(self._params.num_labels, activation='softmax', name='softmax_out')(z)\r\n\r\n model = Model(inputs=inputs, outputs=pred)\r\n model.summary(print_fn=lambda x: logging.info(x + '\\n'))\r\n model.compile(loss=loss, optimizer=self._params.optimizer)\r\n\r\n self.model = model", "def forward(self, input_sentence):\n sentence = self.word_embedding(input_sentence)\n embedding = self.encoder(sentence)\n return embedding", "def prepare_inputs(token_mapping, w2v_W, w2v_U, sentences):\n tokens = [tokenize(token_mapping, sentence) for sentence in sentences] \n \n depth = len(token_mapping)\n one_hot_tokens = []\n for sentence in tokens:\n one_hot_sentence = []\n for i, token in enumerate(sentence):\n if token != token_mapping['#UNK#']:\n one_hot_sentence.append(one_hot_encode(token, depth))\n else:\n if i <= 2:\n context_tokens = sentence[:i] + sentence[i+1:i+3]\n else:\n context_tokens = sentence[i-2:i] + sentence[i+1:i+3]\n context_one_hot = [one_hot_encode(token, depth) for token in context_tokens]\n context_mean = np.mean(np.asarray(context_one_hot), axis=0)\n one_hot_sentence.append(context_mean)\n one_hot_tokens.append(one_hot_sentence)\n \n one_hot_tokens = [np.asarray(ls) for ls in one_hot_tokens]\n vec_tokens = [word2vec(w2v_W, w2v_U, sentence) for sentence in tqdm(one_hot_tokens, desc='Vectorizing tokens')]\n return vec_tokens", "def gen_embedding(text, model, tokenizer):\n ### Tokenize the texts\n encoded_input = tokenizer(text, padding=True, truncation=True, max_length=512, return_tensors='pt')\n \n ### Encode the tokenized data with model\n with torch.no_grad():\n model_output = model(**encoded_input)\n \n ### Pool the outputs into a single vector\n sentence_embeddings = mean_pooling(model_output, encoded_input['attention_mask'])\n return sentence_embeddings", "def generate_sentence_embeddings():\n generate_embeddings_sentence(\"Data/en-train.json\", \"Data_Sent_Embds/en_sent.pkl\")\n generate_embeddings_sentence(\"Data/es-train.json\", \"Data_Sent_Embds/es_sent.pkl\")\n generate_embeddings_sentence(\"Data/pr-train.json\", \"Data_Sent_Embds/pr_sent.pkl\")", "def process_input_sentence(sess, char_dict, model_settings, model_vars, sentence):\n chars = list(sentence.strip())\n cids = char_dict.chars2cids(chars)\n \n in_embedding = np.eye(model_settings['input_classes'])\n one_hot = in_embedding[cids]\n one_hot_by_t = np.expand_dims(one_hot, 1)\n\n probs = process_sentence(sess, model_settings, model_vars, one_hot_by_t)\n labels = viterbi(probs)\n words = char_dict.chars2words(chars, labels)\n print('|'.join(words))", "def word2vec_generation(self, utterance, with_punctuations):\n vector = []\n\n #words = self.text_preparation(utterance)\n\n words = utterance\n\n #model_ = Word2Vec.load('model.bin')\n #if not self.is_word_in_word2vec_vocabulary(utterance, model_):\n # self.retrain_model([words])\n\n if with_punctuations:\n new_model = Word2Vec.load('./model/model_word2vec.bin')\n else:\n new_model = Word2Vec.load('./model/model_no_punctuation_word2vec.bin')\n\n\n\n # TODO: how generate word2vec vectors for each utterance using the vocabularies in Word2vec model?\n\n #First: average of Word2Vec vectors in each utterance\n for w in words:\n vector.append(new_model.wv[w])\n\n return np.mean(vector, axis=0)", "def Emojify_V2(input_shape, word_to_vec_map, word_to_index):\n \n ### START CODE HERE ###\n # Define sentence_indices as the input of the graph.\n # It should be of shape input_shape and dtype 'int32' (as it contains indices, which are integers).\n sentence_indices = Input(shape = input_shape, dtype = 'int32')\n \n # Create the embedding layer pretrained with GloVe Vectors (≈1 line)\n # def pretrained_embedding_layer(word_to_vec_map, word_to_index): # return embedding_layer\n embedding_layer = pretrained_embedding_layer(word_to_vec_map, word_to_index)\n \n # Propagate sentence_indices through your embedding layer\n # (See additional hints in the instructions).\n embeddings = embedding_layer(sentence_indices) \n \n # Propagate the embeddings through an LSTM layer with 128-dimensional hidden state\n # The returned output should be a batch of sequences.\n X = LSTM(units = 128, return_sequences = True)(embeddings)\n # Add dropout with a probability of 0.5\n X = Dropout(rate = 0.5)(X)\n # Propagate X trough another LSTM layer with 128-dimensional hidden state\n # The returned output should be a single hidden state, not a batch of sequences.\n X = LSTM(units = 128, return_sequences = False)(X)\n # Add dropout with a probability of 0.5\n X = Dropout(rate = 0.5)(X) \n # Propagate X through a Dense layer with 5 units\n X = Dense(units = 5)(X)\n # Add a softmax activation\n X = Activation(activation = 'softmax')(X)\n \n # Create Model instance which converts sentence_indices into X.\n model = Model(inputs = sentence_indices, outputs = X)\n \n ### END CODE HERE ###\n \n return model", "def sentence_to_vec(s, embeddings_dict, stop_words, tokenizer):\n \n words = str(s).lower()\n words = tokenizer(words)\n # remove stop words, if any, and only alpha-numeric tokens\n words = [w for w in words if not w in stop_words and w.isalpha()]\n \n embeddings = []\n for w in words:\n if w in embeddings_dict:\n embeddings.append(embeddings_dict[w])\n \n # dimensions = 300\n if len(embeddings)==0:\n return np.zeros(300)\n\n # list of embeddings to array\n embeddings = np.array(embeddings)\n\n # normalized vector\n sum = embeddings.sum(axis=0)\n return sum/np.sqrt((sum**2).sum())", "def text2vec(doc_tok, model, dim=300):\n doc_embedding = np.zeros(dim)\n valid_words = 0\n for word in doc_tok:\n if word in model:\n valid_words += 1\n doc_embedding += model.query(word)\n else:\n continue\n if valid_words > 0:\n return doc_embedding / valid_words\n else:\n return doc_embedding", "def pretrained_embedding_layer(word_to_vec_map, word_to_index):\n \n vocab_size = len(word_to_index) + 1 # adding 1 to fit Keras embedding (requirement)\n any_word = list(word_to_vec_map.keys())[0]\n emb_dim = word_to_vec_map[any_word].shape[0] # define dimensionality of your GloVe word vectors (= 50)\n \n ### START CODE HERE ###\n # Step 1\n # Initialize the embedding matrix as a numpy array of zeros.\n # See instructions above to choose the correct shape.\n emb_matrix = np.zeros((vocab_size, emb_dim))\n \n # Step 2\n # Set each row \"idx\" of the embedding matrix to be \n # the word vector representation of the idx'th word of the vocabulary\n for word, idx in word_to_index.items():\n emb_matrix[idx, :] = word_to_vec_map[word]\n\n # Step 3\n # Define Keras embedding layer with the correct input and output sizes\n # Make it non-trainable.\n embedding_layer = tensorflow.keras.layers.Embedding(input_dim = vocab_size, output_dim = emb_dim, trainable = False)\n ### END CODE HERE ###\n\n # Step 4 (already done for you; please do not modify)\n # Build the embedding layer, it is required before setting the weights of the embedding layer. \n embedding_layer.build((None,)) # Do not modify the \"None\". This line of code is complete as-is.\n \n # Set the weights of the embedding layer to the embedding matrix. Your layer is now pretrained.\n embedding_layer.set_weights([emb_matrix])\n \n return embedding_layer", "def vectorize(sentence, embeddings, size_limit, ignore_case=False):\n if ignore_case:\n sentence = sentence.lower()\n tokens = [w for w in sentence.strip().split() if w not in PUNCT]\n tokens = tokens[:size_limit] # truncating sentence\n\n vectors = []\n for t in tokens:\n if t in embeddings:\n vectors.append(embeddings[t])\n else: # OOV word\n random_vector = np.random.uniform(-0.25, 0.25,\n size=embeddings.dimension)\n vectors.append(random_vector)\n vectorized = np.array(vectors).T # dimension lines x len(tokens) columns\n return vectorized", "def create_embedding(self):\n self.embedding = []\n\n for index in range(1,self.args.window_size+1):\n print(\"\\nOptimization round: \" +str(index)+\"/\"+str(self.args.window_size)+\".\")\n print(\"Creating documents.\")\n clean_documents = self.walk_extracts(index)\n print(\"Fitting model.\")\n model = Word2Vec(clean_documents,\n size = self.args.dimensions,\n window = 1,\n min_count = self.args.min_count,\n sg = 1,\n workers = self.args.workers)\n\n new_embedding = self.get_embedding(model)\n self.embedding = self.embedding +[new_embedding]\n self.embedding = np.concatenate(self.embedding, axis = 1)", "def pretrained_embedding_layer(word_to_vec_map, word_to_index):\n vocab_len = len(word_to_index) + 1 # adding 1 to fit Keras embedding (requirement)\n emb_dim = word_to_vec_map[\"cucumber\"].shape[0] # define dimensionality of your GloVe word vectors (= 50)\n emb_matrix = np.zeros((vocab_len, emb_dim)) # Initialize the embedding matrix as a numpy array of zeros of shape (vocab_len, dimensions of word vectors = emb_dim)\n for word, index in word_to_index.items(): # Set each row \"index\" of the embedding matrix to be the word vector representation of the \"index\"th word of the vocabulary\n emb_matrix[index, :] = word_to_vec_map[word]\n embedding_layer = Embedding(vocab_len, emb_dim, trainable = False) # Define Keras embedding layer with the correct output/input sizes, make it trainable. Use Embedding(...). Make sure to set trainable=False. \n embedding_layer.build((None,)) # Build the embedding layer, it is required before setting the weights of the embedding layer. Do not modify the \"None\".\n embedding_layer.set_weights([emb_matrix]) # Set the weights of the embedding layer to the embedding matrix. Your layer is now pretrained.\n return embedding_layer", "def pretrained_embedding_layer(model,model2,model3, word_to_index,emb_dim_max):\n words_ignored = []\n vocab_len = len(word_to_index) + 1 \n emb_matrix = np.zeros([vocab_len,emb_dim_max])\n \n print(' Total words would be processed : '+str(vocab_len))\n for word, idx in word_to_index.items():\n if word in model:\n emb_matrix[idx,:200] = model[word]\n emb_matrix[idx,200:] = 0\n if word in model2:\n emb_matrix[idx, :100] = model2[word]\n emb_matrix[idx, 100:] = 0\n if word in model3.keys():\n emb_matrix[idx,:] = model3[word]\n else:\n words_ignored.append(word)\n print(str(len(words_ignored))+\" words ignored\")\n print(emb_matrix.shape) \n \n \n embedding_layer = Embedding(vocab_len,emb_dim_max,trainable = True)\n \n # Build the embedding layer, it is required before setting the weights of the embedding layer. \n embedding_layer.build((None,)) # Do not modify the \"None\". This line of code is complete as-is.\n # Set the weights of the embedding layer to the embedding matrix. Your layer is now pretrained.\n embedding_layer.set_weights([emb_matrix])\n \n return embedding_layer,words_ignored", "def embed(self, loader, model):\n print(\" ** Embedding words\")\n\n words = loader.words\n vectors = [model.get_word_vector(word) for word in words]\n\n return [(w, *v) for w, v in zip(words, vectors)]", "def construct_embedding(self):\n i = 0\n self.load_dicts()\n embedding_shape = (max(self.word2idx.values()) + 1,\n self.embedding_size)\n self.embedding = np.zeros(embedding_shape)\n\n with open(self.config.word_vec_fi_glove, 'r') as fi:\n for line in fi:\n word_vec = line.split(\" \")[1:]\n self.embedding[i, :] = np.array(word_vec, dtype=np.float32)\n i += 1\n\n self.write_embedding()", "def forward(self, input):\n if isinstance(input, tuple):\n check_input = input[0]\n else:\n check_input = input\n in_length, in_batch, nfeat = check_input.size()\n aeq(nfeat, len(self.emb_luts))\n\n emb = self.make_embedding(input)\n\n out_length, out_batch, emb_size = emb.size()\n aeq(in_length, out_length)\n aeq(in_batch, out_batch)\n aeq(emb_size, self.embedding_size)\n\n return emb", "def vectorize_sentence(sentence, model):\n final_vec = np.zeros(300, )\n count = 0\n for word in sentence:\n count += 1\n dummy_vec = np.zeros(300, )\n try:\n temp_vec = get_vector(word, model)\n final_vec += temp_vec\n except:\n final_vec += dummy_vec\n return final_vec / count", "def get_WS(w2v):\n # get set of MAX_NGRAM-grams in text\n lines = open(INFNAME_FORMAT.format(\"train\")).readlines() \\\n + open(INFNAME_FORMAT.format(\"test\")).readlines()\n raw = [process_line(l) for l in lines ]\n ngrams_in_data = set()\n for words in raw:\n for ngram in tweet_to_ngrams(words):\n ngrams_in_data.add(ngram)\n\n # load sentiment features from model\n clf_pipe = pickle.load(open(CLF_FNAME, 'rb')) # model\n\n vect = clf_pipe.best_estimator_.named_steps['vect']\n clf = clf_pipe.best_estimator_.named_steps['clf']\n\n features_to_sent_idx = vect.vocabulary_ # map from model features to sentiment index\n # currently, sentiment = 2 * (count_pos / (count_pos + count_neg)) - 1\n sentiments = clf.feature_count_[1,:] / np.sum(clf.feature_count_, axis=0) # in [0,1]\n sentiments = 2 * sentiments - 1 # rescale to [-1,1]\n\n features_to_sent = {feat: sentiments[idx] for (feat,idx) in features_to_sent_idx.items()}\n\n # build WS and ngram_idx_map for each MAX_NGRAM-gram in the text\n k = len(next(iter(w2v.values()))) # dimension of embedding\n WS = np.zeros(shape=(len(ngrams_in_data) + 1, k + MAX_NGRAM), dtype='float32')\n ngram_idx_map = {}\n\n index = 1 # first row is left 0, for padding in the cnn. This is also neutral sentiment.\n # For Vader Sentiment analysis\n# vader_analyzer = SentimentIntensityAnalyzer()\n\n\n for ngram in ngrams_in_data:\n ngram_idx_map[ngram] = index\n\n # set word embedding, note that unknown words already randomized in load_embedding \n words = ngram.split(' ')\n WS[index,:k] = w2v[words[-1]] # embedding of last word\n\n # set sentiment embedding\n for n in range(MAX_NGRAM): # for 1, 2, ... length ngrams\n sub_ngram = ' '.join(words[-1 - n:]) \n\n # Naive Bayes Sentiment feature --------------------------------\n sent = features_to_sent.get(sub_ngram, 0.0) # default to neutral 0\n # --------------------------------------------------------------\n\n# # TextBlob sentiment feature -----------------------------------\n# sent = TextBlob(sub_ngram).sentiment.polarity\n# # --------------------------------------------------------------\n\n# # Vader sentiment feature -------------------------------------\n# sent = vader_analyzer.polarity_scores(sub_ngram)['compound']\n# # -------------------------------------------------------------\n WS[index,k+n] = sent\n\n index += 1\n\n return WS, ngram_idx_map", "def load_word2vec_en_pretrained():\r\n log.info(\"Load W2V Model\")\r\n model = api.load(\"glove-wiki-gigaword-50\")\r\n return PreTrainedGensimEN(model)", "def embed_sentence(self, sentence:List[str]):\n # prepare the input that can be fed to bert model\n encoded_sentence, indices_subwords = self._prepare_input(sentence[0])\n with torch.no_grad():\n bert_output = self.bert_model.forward(input_ids=encoded_sentence)\n \n # take the sequence of the last four hidden states (the last element of the tuple returned by the bert model)\n # list of tensors (batch_size x num_of_splitted_words x embedding_dim)\n bert_output = list(bert_output[-1][-4:])\n bert_output.reverse()\n \n # stack the hidden states in a tensor (4 x batch_size x num_of_splitted_words x embedding_dim)\n hidden_states = torch.stack(bert_output, axis=0)\n # sum the hidden states (batch_size x num_of_splitted_words x embedding_dim)\n sum_hidden_states = torch.sum(hidden_states, axis=0)\n # merge the words splitted in subwords by the tokenizer (batch_size x sentence_length x embedding_dim)\n embed_output = self._merge_embeddings(sum_hidden_states[0], indices_subwords)\n return embed_output", "def _words_to_vec(self, sentence):\n return torch.FloatTensor([self._use_embeddings(word) for word in sentence])", "def build_input_data_from_word2vec(sentence, word2vec_vocab, word2vec_vec):\n X_data = []\n for word in sentence:\n try:\n word2vec_index = word2vec_vocab[word].index\n word_vector = word2vec_vec[word2vec_index]\n except:\n word2vec_index = word2vec_vocab['<un_known>'].index\n word_vector = word2vec_vec[word2vec_index]\n #word_vector = np.random.uniform(low=-0.25, high=0.25, size=word2vec_vec.shape[1])\n X_data.append(word_vector)\n X_data = np.asarray(X_data)\n return X_data", "def word2vec2embed(word2vec, word2idx):\n\temb_dim = word2vec['the'].shape[0]\n\temb = torch.nn.Embedding(len(word2idx), emb_dim) \n\temb_matrix = []\n\tfor w, idx in word2idx.items():\n\t\tif w in word2vec:\n\t\t\temb_matrix.append(word2vec[w])\n\t\telse:\n\t\t\temb_matrix.append(np.zeros(emb_dim,))\n\temb.weight.data.copy_(torch.from_numpy(np.array(emb_matrix)))\n\treturn emb", "def train_word2vec(sentence_matrix, vocabulary_inv,\r\n num_features=300, min_word_count=1, context=10):\r\n\r\n model_name = 'predictor/model/word2vec'\r\n if exists(model_name):\r\n # embedding_model = word2vec.Word2Vec.load(model_name)\r\n embedding_model = gensim.models.Word2Vec.load('predictor/model/word2vec')\r\n print('Load existing Word2Vec model \\'%s\\'' % split(model_name)[-1])\r\n else:\r\n # Set values for various parameters\r\n num_workers = 2 # Number of threads to run in parallel\r\n downsampling = 1e-3 # Downsample setting for frequent words\r\n\r\n # Initialize and train the model\r\n print('Training Word2Vec model...')\r\n sentences = [[vocabulary_inv[w] for w in s] for s in sentence_matrix]\r\n embedding_model = word2vec.Word2Vec(sentences, workers=num_workers,\r\n size=num_features, min_count=min_word_count,\r\n window=context, sample=downsampling)\r\n\r\n # If we don't plan to train the model any further, calling\r\n # init_sims will make the model much more memory-efficient.\r\n embedding_model.init_sims(replace=True)\r\n\r\n # Saving the model for later use. You can load it later using Word2Vec.load()\r\n print('Saving Word2Vec model \\'%s\\'' % split(model_name)[-1])\r\n embedding_model.save(model_name)\r\n\r\n # add unknown words\r\n embedding_weights = {key: embedding_model[word] if word in embedding_model else\r\n np.random.uniform(-0.25, 0.25, embedding_model.vector_size)\r\n for key, word in embedding_model.wv.vocab.items()}\r\n return embedding_weights" ]
[ "0.6891252", "0.6638002", "0.66288036", "0.65824366", "0.6558864", "0.6556217", "0.64891624", "0.6487913", "0.6419915", "0.6406742", "0.6390424", "0.6382159", "0.63630146", "0.6351706", "0.63423693", "0.628914", "0.628678", "0.6255382", "0.62375826", "0.6213522", "0.6198622", "0.6197618", "0.6169275", "0.61457324", "0.6138302", "0.6120514", "0.6085526", "0.6049677", "0.60464483", "0.6044437" ]
0.7118841
0
Returns the n most similar products for a given user input embedding.
def get_similar_products(user_input_emb, ref_catalog, n = 5): sim_list = [] for i in range(len(ref_catalog)): desc_id = ref_catalog.iloc[i]['id'] emb = ref_catalog.iloc[i]['desc_embedding'] cos_sim = compute_cosine_sim(emb,user_input_emb) sim_list.append((desc_id, cos_sim)) top_n = sorted(sim_list, key= lambda tup: tup[1], reverse = True)[:n] return top_n
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def topn_similarity(word_vecs, word, n):\n vec = word_vecs[word]\n sim = dict()\n for w in word_vecs:\n if w != '<TOP>' and w != '<BOT>':\n # sim[w] = np.dot(vec, np.transpose(word_vecs[w]))\n sim[w] = 1 - spatial.distance.cosine(vec, word_vecs[w])\n # sim[w] = np.dot(vec, np.transpose(word_vecs[w]))/(mod(vec)*mod(np.transpose(word_vecs[w])))\n dd = OrderedDict(sorted(sim.items(), key=lambda x: x[1], reverse=True))\n return list(dd.items())[1:n+1]", "def test_most_similar_topn(self):\n self.assertEqual(len(self.vectors.most_similar('dog.n.01', topn=5)), 5)\n self.assertEqual(len(self.vectors.most_similar('dog.n.01', topn=10)), 10)\n\n predicted = self.vectors.most_similar('dog.n.01', topn=None)\n self.assertEqual(len(predicted), len(self.vectors.vocab) - 1)\n self.assertEqual(predicted[-1][0], 'gallant_fox.n.01')", "def get_mostFrequent(self, n=5):\r\n pass", "def get_mostFrequent(self, n=5):\r\n pass", "def most_similar(self, token, topn=10):\n vec = self.get_vector(token)\n assert vec is not None, \"Cannot compute similarity between None type vectors.\"\n return self.most_similar_embedding(vec, topn+1)[1:]", "def popularity(self,train = None,test = None,k = 8,nitem = 10):\n train = train or self.traindata\n test = test or self.testdata\n item_popularity = dict()\n for user ,items in train.items():\n for item in items.keys():\n item_popularity.setdefault(item,0)\n item_popularity[item] += 1\n ret = 0\n n = 0\n for user in train.keys():\n rank = self.recommend(user, train, k = k, nitem = nitem)\n for item ,_ in rank.items():\n ret += math.log(1+item_popularity[item])\n n += 1\n return ret / (n * 1.0)", "def top_n_similar(base_h_id, comp_hotels, n_hotels=None, axes_omissions=[]):\n axes = get_axes(axes_omissions)\n similar_hotels = []\n base_hotel_chromosomes = get_hotel_chromosomes([base_h_id])[base_h_id]\n comp_hotel_chromosomes = get_hotel_chromosomes(comp_hotels)\n for c in comp_hotels:\n aggregate_similarity, similarity = get_similarity(\n base_hotel_chromosomes, comp_hotel_chromosomes[c], axes)\n similar_hotels.append((c, aggregate_similarity, similarity))\n similar_hotels.sort(key=itemgetter(1), reverse=True)\n if n_hotels:\n return similar_hotels[:n_hotels]\n else:\n return similar_hotels", "def closest_composed(self, w_vectors, n=10):\r\n scores = self.m.dot(w_vectors)\r\n return heapq.nlargest(n, zip(scores, self.iw))", "def calculate_most_popular(text, n_populars, steam=False):\n fdist = calculate_fdist(text, steam)\n term = []\n for key, value in fdist.items():\n term.append((key, value))\n term.sort(key=lambda x: int(x[1]), reverse=True)\n return term[:n_populars]", "def test_most_similar(self):\n expected = [\n 'canine.n.02',\n 'hunting_dog.n.01',\n 'carnivore.n.01',\n 'placental.n.01',\n 'mammal.n.01'\n ]\n predicted = [result[0] for result in self.vectors.most_similar('dog.n.01', topn=5)]\n self.assertEqual(expected, predicted)", "def recommend(self, user_id, N=10):\n scores = self.user_factors[user_id] @ self.product_factors.T\n best = np.argpartition(scores, -N)[-N:]\n return sorted(zip(best, scores[best]), key=lambda x: -x[1])", "def get_frequency_based_priors(n_common=3000, width_under_sigmoid=10):\n freq_map = get_word_frequencies()\n words = np.array(list(freq_map.keys()))\n freqs = np.array([freq_map[w] for w in words])\n arg_sort = freqs.argsort()\n sorted_words = words[arg_sort]\n\n # We want to imagine taking this sorted list, and putting it on a number\n # line so that it's length is 10, situating it so that the n_common most common\n # words are positive, then applying a sigmoid\n x_width = width_under_sigmoid\n c = x_width * (-0.5 + n_common / len(words))\n xs = np.linspace(c - x_width / 2, c + x_width / 2, len(words))\n priors = dict()\n for word, x in zip(sorted_words, xs):\n priors[word] = sigmoid(x)\n return priors", "def get_top_n_words(word_list, n):\n words = []\n\n # Change all words to lowercase\n for word in word_list:\n word = str.lower(word)\n if word not in words:\n words.append(word)\n\n # Calculate frequency of each word\n frequency = []\n for word in words:\n word_count = 0\n for test in word_list:\n if word == test:\n word_count += 1\n frequency.append(word_count)\n\n dic = dict()\n for i, word in enumerate(words):\n dic[frequency[i]] = word\n\n # Sort dictionary to return ranks\n keys = dic.keys()\n keys = sorted(keys)\n words_ranked = []\n for key in keys:\n words_ranked.append(dic.get(key))\n words_ranked = words_ranked[::-1]\n words_ranked = words_ranked[:n]\n return words_ranked", "def test_top_n_counts():\n ngrams = NgramFrequencies()\n new_dic = {\n \"a\": 1,\n \"b\": 2,\n \"c\": 3,\n \"d\": 4\n }\n top_list = ngrams.top_n_counts(new_dic)\n assert top_list == [(\"d\", 4), (\"c\", 3), (\"b\", 2), (\"a\", 1)]", "def calculate_most_frequent_n_words(self, input_string: str, n: int) \\\n -> List[WordFrequencyStructure]:\n results = \\\n self._typed_sorted_result(input_string=input_string)\n\n return results[:n]", "def topMatches(prefs, person, n=5, similarity=sim_pearson):\n all_matches = [(similarity(prefs, person, other), other) \n for other in prefs.keys()\n if person != other]\n all_matches.sort()\n all_matches.reverse()\n return all_matches[0:n]", "def top_matches(prefs, person, n=5, similarity=sim_pearson):\n scores = [(similarity(prefs, person, other), other)\n for other in prefs if other != person]\n\n scores.sort()\n scores.reverse()\n return scores[0:n]", "def test_get_top_n_words_same_frequency(self):\n expected = ['happy', 'man']\n actual = get_top_n_words({'happy': 2, 'man': 2}, 2)\n self.assertEqual(expected, actual)\n expected = ['happy']\n actual = get_top_n_words({'happy': 2, 'man': 2}, 1)\n self.assertEqual(expected, actual)", "def top_n_satisfy(content, n):\n sum_satisfy = 0.0\n query_num = 0.0\n for qid in content:\n label_sort = []\n score = []\n all_info = content[qid]\n for info in all_info:\n label_sort.append([info[0], info[1]])\n score.append(info[1])\n label_sort.sort(key=take_first, reverse=True)\n score.sort(reverse=True)\n satisfy = 0.0\n count = 0\n size = len(label_sort)\n for i in range(size):\n cur_label = label_sort[i][0]\n cur_score = label_sort[i][1]\n if cur_label < 1:\n break\n if i >= n:\n break\n index = score.index(cur_score)\n count += 1\n if index < n:\n satisfy += 1\n if count == 0:\n sum_satisfy += 0.0\n query_num += 1\n else:\n sum_satisfy += satisfy / float(count)\n query_num += 1\n return sum_satisfy / query_num", "def top_n_satisfy2(content, n):\n #print(n)\n sum_satisfy = 0.0\n query_num = 0.0\n for qid in content:\n label_sort = []\n score = []\n all_info = content[qid]\n num_label1 = 0\n for info in all_info:\n if info[0] > 0:\n num_label1 += 1\n label_sort.append([info[0], info[1]])\n label_sort.sort(key=take_second, reverse=True)\n satisfy = 0.0\n count = 0\n size = len(label_sort)\n for i in range(min(n, size)):\n cur_label = label_sort[i][0]\n if cur_label > 0:\n satisfy += 1\n cur_satisfy = satisfy / min(n, num_label1)\n sum_satisfy += cur_satisfy\n query_num += 1\n return sum_satisfy / query_num", "def recommendations_similarity(aData, needed_param, user, products, n = 10, simfunc = sim_cosine):\n table_CF = preproc.make_CF_table(aData, needed_param)\n sim_measures_table = simfunc(table_CF) \n \n scores = sim_measures_table.dot(table_CF)\n mean_scores = np.array(np.sum(sim_measures_table, axis=1).T)\n mean_scores = pd.DataFrame(np.tile(mean_scores, (scores.shape[1],1))).T\n predicted_ratings = np.divide(scores, np.absolute(mean_scores))\n \n ratings = predicted_ratings[user].order(ascending= False)\n ratings = ratings[0:n]\n \n return (ratings.index[ratings.index.isin(products)==False])", "def get_top_n_words(column, n):\r\n frequencies = Counter()\r\n column.str.lower().str.split().apply(frequencies.update)\r\n return frequencies.most_common(n)", "def bow_top_n(corpus, n):\n bag_of_words_model_small = CountVectorizer(max_features=n)\n bag_of_word_df_small = pd.DataFrame(bag_of_words_model_small.fit_transform(corpus).todense())\n bag_of_word_df_small.columns = sorted(bag_of_words_model_small.vocabulary_)\n return bag_of_word_df_small", "def test_top_n_freqs():\n ngrams = NgramFrequencies()\n top_list = [(\"d\", 4), (\"c\", 3), (\"b\", 2), (\"a\", 1)]\n top_freq = ngrams.top_n_freq(top_list, 10)\n assert top_freq == [(\"d\", 0.4), (\"c\", 0.3), (\"b\", 0.2), (\"a\", 0.1)]", "def get_n_best(self):\n pass", "def get_top_n_words(word_list, n):\n d = dict()\n for w in word_list:\n d[w] = d.get(w, 0) + 1\n ordered_by_frequency = sorted(d, key=d.get, reverse=True)\n return ordered_by_frequency[0:n]", "def top_5_similar_2(list_string, my_nlp=nlp1, model_type=my_model, doc_topic=my_doc_topic):\n vec = my_nlp.transform(list_string)\n vtrans = model_type.transform(vec)\n array_5 = pairwise_distances(vtrans, doc_topic, metric='cosine').argsort()[0][0:5]\n # result_df = df_reviews[['game_link']].iloc[array_5]\n return df_reviews[['game']].iloc[array_5]\n # return(\"test\")\n return result_df", "def get_top_n(predictions, n=10):\n\n # First map the predictions to each user.\n top_n = defaultdict(list)\n for uid, iid, true_r, est, _ in predictions:\n top_n[uid].append((iid, est))\n\n # Then sort the predictions for each user and retrieve the k highest ones.\n for uid, user_ratings in top_n.items():\n user_ratings.sort(key=lambda x: x[1], reverse=True)\n top_n[uid] = user_ratings[:n]\n\n return top_n", "def get_top_n(predictions, n):\n # First map the predictions to each user.\n top_n = defaultdict(list)\n for uid, iid, true_r, est, _ in predictions:\n top_n[uid].append((iid, est))\n\n # Then sort the predictions for each user and retrieve the k highest ones.\n for uid, user_ratings in top_n.items():\n user_ratings.sort(key=lambda x: x[1], reverse=True)\n top_n[uid] = user_ratings[:n]\n\n return top_n", "def test_most_similar_with_vector_input(self):\n expected = [\n 'dog.n.01',\n 'canine.n.02',\n 'hunting_dog.n.01',\n 'carnivore.n.01',\n 'placental.n.01',\n ]\n input_vector = self.vectors['dog.n.01']\n predicted = [result[0] for result in self.vectors.most_similar([input_vector], topn=5)]\n self.assertEqual(expected, predicted)" ]
[ "0.6303444", "0.6231034", "0.6041034", "0.6041034", "0.59895635", "0.59817195", "0.5981174", "0.587914", "0.5833353", "0.5826179", "0.57493174", "0.57406974", "0.57392347", "0.57307845", "0.5707042", "0.5693564", "0.5679855", "0.567223", "0.5671902", "0.56702316", "0.5668979", "0.56620044", "0.5660335", "0.56526303", "0.5638089", "0.5634164", "0.5632821", "0.562015", "0.56125647", "0.56034535" ]
0.74809283
0
Removes html tags from txt
def remove_html(txt): TAG_RE = re.compile(r'<[^>]+>') return TAG_RE.sub("", txt).strip()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def remove_html_tags(text):\n import re\n clean = re.compile('<.*?>|\\\\n')\n return re.sub(clean, '', text)", "def remove_html_tags(text):\n print('VOU REMOVER AS TAGS DA STRING')\n clean = re.compile('<.*?>')\n print('',re.sub(clean, '', text))\n return re.sub(clean, '', text)", "def remove_html_tags(self,text):\n #https://medium.com/@jorlugaqui/how-to-strip-html-tags-from-a-string-in-python-7cb81a2bbf44\n clean = re.compile('<.*?>')\n return re.sub(clean, '', text)", "def remove_html_tags(text):\r\n clean = re.compile('<.*?>')\r\n return re.sub(clean, '', text)", "def remove_html_tags_fun(self):\n cleaner = re.compile('<.*?>')\n cleaned_text = re.sub(cleaner, '', self.doc)\n cleaned_text = re.sub('[\\n\\t]', '', cleaned_text)\n self.doc = cleaned_text", "def remove_Tags(self,text):\n cleaned_text = re.sub('<[^<]+?>', '', text)", "def remove_html_tags(text):\n tag_pattern = re.compile(r'<[^>]+>')\n return tag_pattern.sub('', text)", "def remove_html_tags(text: str) -> str:\n return re.sub('<.*?>', '', text).strip()", "def remove_html_tags(text):\n import re\n clean = re.compile('<.*?>')\n return re.sub(clean, '', text)", "def remove_html_tags(text):\n import re\n clean = re.compile('<.*?>')\n return re.sub(clean, '', text)", "def remove_html_tags(text):\n clean = re.compile('<.*?>')\n return re.sub(clean, '', text)", "def remove_html_tags(text: str) -> str:\n clean = re.compile('<.*?>')\n return re.sub(clean, '', str(text))", "def strip_html_tags(text):\r\n soup = BeautifulSoup(text, 'lxml')\r\n stripped_text = soup.get_text(separator=\" \")\r\n return stripped_text", "def remove_html_tags(text):\n import re\n clean = re.compile('<.*?>')\n return re.sub(clean, '', text).rstrip('...')", "def strip_tags(text):\n # Remove header tags\n p = re.compile(\"<\\?.+?\\?>\") \n text = re.sub(p, \"\", text)\n\n # Remove <HOO>, <p> and <s> tags\n text = text.replace(\"<p>\",\"\")\n text = text.replace(\"</p>\",\"\")\n text = text.replace(\"<s>\",\"\")\n text = text.replace(\"</s>\",\"\")\n text = text.replace(\"<HOO>\",\"\")\n text = text.replace(\"</HOO>\",\"\")\n\n return text", "def remove_html_tags(text):\n clean = re.compile('<.*?>|&ndash; ')\n return re.sub(clean, '', text)", "def remove_html_tags(text):\n clean = re.compile('<.*?>|&ndash; ')\n return re.sub(clean, '', text)", "def remove_html( html):\n return html2txt(html)", "def strip_html_tags(text):\n soup = BeautifulSoup(text, \"html.parser\")\n stripped_text = soup.get_text(separator=\" \")\n return stripped_text", "def strip_html_tags(text):\n soup = BeautifulSoup(text, \"html.parser\")\n stripped_text = soup.get_text(separator=\" \")\n return stripped_text", "def clean_html(text):\n cleanr = re.compile(\"<.*?>\")\n clean_text = re.sub(cleanr, \"\", text)\n return clean_text", "def remove_html(text):\n return re.sub(r'<.*?>', r'', text)", "def remove_tags(text):\n tree = html.fromstring(text)\n return tree.xpath(\"//text()\")", "def remove_html_tags(self, text, tags):\n\t\tcheck_if_any_type(text, [str, str])\n\n\t\tfor tag in tags:\n\t\t\tcheck_if_any_type(tag, [str, str])\n\t\t\ttext = re.compile('<\\/?%s\\/?>' % tag, re.U).sub('', text)\n\t\treturn text", "def strip_html_tags(text):\n if text is np.nan:\n return text\n regex = re.compile(r\"<.*?>\")\n return re.sub(regex, \"\", text)", "def _remove_html_tags(self, text: str) -> str:\n pattern = r\"\"\"\n (?x) # Turn on free-spacing\n <[^>]+> # Remove <html> tags\n | &([a-z0-9]+|\\#[0-9]{1,6}|\\#x[0-9a-f]{1,6}); # Remove &nbsp;\n \"\"\"\n return re.sub(pattern, \" \", str(text))", "def remove_html_tags(html_text: str) -> str:\n document = fromstring(html_text)\n text = document.text_content()\n return text.strip()", "def remove_tags(text):\n\n global cleanr\n global cleann\n global cleans\n try:\n text = BeautifulSoup(text)\n for table in text.findAll(\"table\"):\n table.extract()\n text = text.text\n text = re.sub(cleanr, '', text)\n text = re.sub(cleann, '', text)\n text = re.sub(cleans, ' ', text)\n\n except Exception as e:\n pass\n\n return text", "def _remove_tags(self, text):\n try:\n result = \"\".join(xml.etree.ElementTree.fromstring(text).itertext()).replace(\n \"\\n\\n\", \"\\n\"\n )\n except: # pylint: disable=bare-except\n result = text\n return result", "def remove_tags(text):\n # Remove HTML tags\n soup = BeautifulSoup(text, \"html.parser\")\n [s.extract() for s in soup(['iframe', 'script'])]\n stripped_text = soup.get_text()\n stripped_text = re.sub(r'[\\r|\\n|\\r\\n]+', '\\n', stripped_text)\n \n \n text = unicodedata.normalize('NFKD', stripped_text).encode('ascii', 'ignore').decode('utf-8', 'ignore') # Remove Accented characters\n text = re.sub(r'[^\\x00-\\x7F]+','', text) # Remove Non-Ascii characters\n text = re.sub(\"[a-z0-9\\.\\-+_]+@[a-z0-9\\.\\-+_]+\\.[a-z]+\", '', text) # Remove Emails\n text = re.sub(r\"http\\S+\", \"\", text) # Remove URLs\n return text" ]
[ "0.82413596", "0.81933254", "0.8151002", "0.81288743", "0.8118329", "0.8103889", "0.8095974", "0.8079586", "0.8077638", "0.8077638", "0.80603147", "0.7991694", "0.79077923", "0.7896608", "0.78903127", "0.7843595", "0.7843595", "0.7835371", "0.7812128", "0.7812128", "0.77750474", "0.7758151", "0.7731458", "0.7730978", "0.77290636", "0.77243036", "0.7694337", "0.7659961", "0.7592462", "0.75298274" ]
0.8495694
0
return the goal weight question and save the user's answers
def return_goal_weight_text_save_weight(self, data_dict, id_user): # get robot advice to user : defined this weight goal actual_weight = data_dict.get("actual_weight") if actual_weight is not False: # if the user answered to the goal weight question # the parser method returned an error message # and add in the context the weight goal question context = self.parser_weight(data_dict)[1] if context: context["goal_weight_text"] = self.goal_text context["dict_questions"] = self.dict_questions return context # if the user's answer is validate data_validate = self.parser_weight(data_dict)[0] if data_validate is True: # get data goal = self.new_weight_advice_goal.return_weight_advices_goal(data_dict)[0] advice = self.new_weight_advice_goal.return_weight_advices_goal(data_dict)[1] final_weight = self.new_weight_advice_goal.return_weight_advices_goal(data_dict)[2] # if user's goal weight is validate if goal != "impossible": # create the end text # of the questionnaire id_type = RobotQuestionType.objects.values_list("id").get(type="end start")[0] text = RobotQuestion.objects.values_list("text") start_text_end = text.get(robot_question_type=id_type)[0] text = advice + start_text_end context = {} try: user = get_user_model() user = user.objects.get(id=id_user) ProfileUser.objects.values_list("starting_weight").get(user=user)[0] text = "Ton premier objectif de poids a déjà " \ "été défini à - " + str(goal) + " kg." context["robot_answer"] = text # save user's data except ProfileUser.DoesNotExist: user = get_user_model() user = user.objects.get(id=id_user) ProfileUser.objects.create(user=user, starting_weight=actual_weight, actual_goal_weight=goal, final_weight=final_weight) ResultsUser.objects.create(user=user, weight=actual_weight) context["robot_answer"] = text # means that the user have # answered at all questions start user = HistoryUser.objects.get(user=id_user) user.start_questionnaire_completed = True user.save() self.end_questions_start = True # if user's goal weight is not validate else: context["robot_answer"] = advice return context
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_return_goal_weight_text_save_weight(self):\n # create user\n user_created = self.create_user_questionnaire_in_progress()\n\n # data\n data_dict = {\"height\": \"1,60\", \"actual_weight\": \"80\",\n \"cruising_weight\": \"50\", \"weight_goal\": \"70\"}\n\n # call method\n context = self.new_controller.return_goal_weight_text_save_weight(data_dict,\n user_created.id)\n\n self.assertEqual(len(context), 1)\n self.assertEqual(context[\"robot_answer\"][0:19], \"Alors c'est parti !\")", "def test_return_goal_weight_text_goal_defined(self):\n # create user\n user_created = self.create_user_start_program_number_days_ago(0)\n\n # data\n data_dict = {\"height\": \"1,60\", \"actual_weight\": \"80\",\n \"cruising_weight\": \"50\", \"weight_goal\": \"70\"}\n\n # call method\n context = self.new_controller.return_goal_weight_text_save_weight(data_dict,\n user_created.id)\n\n self.assertEqual(len(context), 1)\n self.assertEqual(context[\"robot_answer\"][:40], \"Ton premier objectif de poids a déjà été\")", "def return_weekly_questions_save_weight(self, weekly_weight, id_user):\n # get data\n context = {}\n weighing_date = ResultsUser.objects.values_list(\"weighing_date\")\n last_weighing_date = weighing_date.filter(user=id_user).order_by(\"weighing_date\").last()[0]\n one_week_after_weighing = last_weighing_date + timedelta(days=7)\n present = datetime.now()\n present_date = present.date()\n\n # one week after\n # the weighing last\n if present_date >= one_week_after_weighing:\n\n # if the user gave\n # his weekly weight\n if weekly_weight is not False:\n\n # if the user has reached\n # his weight goal\n final_weight = ProfileUser.objects.values_list(\"final_weight\").get(user=id_user)[0]\n if float(weekly_weight) <= final_weight:\n context[\"robot_comment\"] = self.return_text_congratulations_restart_program\\\n (id_user)\n self.end = True\n\n # save weight\n else:\n context[\"robot_comment\"] = \"J'ai bien pris note de ton poids, \" \\\n \"tu trouveras un récapitulatif dans \" \\\n \"l'onglet résultats.\"\n user = self.user.objects.get(id=id_user)\n ResultsUser.objects.create(user=user, weight=weekly_weight)\n self.new_week = True\n\n # create robot question\n else:\n context[\"robot_comment\"] = \"Bonjour ! J'éspère que ta semaine \" \\\n \"s'est bien passée ? Que donne ta pesée \" \\\n \"ce matin ?\"\n context[\"robot_weekly_weight\"] = True\n\n # during the first week after\n # the weighing last : create robot text\n else:\n month = calendar.month_name[one_week_after_weighing.month]\n date = \"\" + calendar.day_name[one_week_after_weighing.weekday()] + \\\n \" \" + str(one_week_after_weighing.day) \\\n + \" \" + month + \"\"\n context[\"robot_comment\"] = \"Retrouvons nous ici {} pour faire le point \" \\\n \"sur tes prochains résultats et voir ton nouveau \" \\\n \"challenge !\".format(date)\n\n return context", "def test_return_goal_weight_goal_weight_ok(self):\n data_weight_user = {\"height\": \"1,60\", \"actual_weight\": \"60\",\n \"cruising_weight\": \"55\", \"weight_goal\": \"55\"}\n return_goal = self.new_weight_advice_goal.return_weight_advices_goal(data_weight_user)[2]\n\n self.assertEqual(return_goal, 55)", "def test_return_goal_goal_weight_ok(self):\n data_weight_user = {\"height\": \"1,60\", \"actual_weight\": \"60\",\n \"cruising_weight\": \"55\", \"weight_goal\": \"55\"}\n return_goal = self.new_weight_advice_goal.return_weight_advices_goal(data_weight_user)[0]\n\n self.assertEqual(return_goal, 5)", "def test_weighted_exam(self):\r\n self.weighted_setup()\r\n self.submit_question_answer('FinalQuestion', {'2_1': 'Correct', '2_2': 'Correct'})\r\n self.check_grade_percent(0.75)", "def test_return_goal_weight_text_if_incorrect_data(self):\n # create user\n user_created = self.create_user_questionnaire_in_progress()\n\n # data\n data_dict = {\"height\": \"1,60\", \"actual_weight\": \"80\",\n \"cruising_weight\": \"50\", \"weight_goal\": \"90\"}\n\n # call method\n context = self.new_controller.return_goal_weight_text_save_weight(data_dict,\n user_created.id)\n\n dict_questions = {\"height\": \"Quelle taille fais-tu ? (au format x,xx)\",\n \"actual_weight\": \"Quel est ton poids actuel ?\",\n \"cruising_weight\": \"Quel est ton poids de croisière \"\n \"(poids le plus longtemps \"\n \"maintenu sans effort) ?\",\n \"weight_goal\": \"Quel est ton poids d'objectif ?\"}\n\n self.assertEqual(len(context), 3)\n self.assertEqual(context[\"dict_questions\"], dict_questions)\n self.assertTrue(context[\"goal_weight_text\"], \"Nous allons maintenant définir ton objectif.\")\n self.assertTrue(context[\"error_message\"], \"Ton objectif doit être inférieur \"\n \"à ton poids actuel.\")", "def test_weighted_total(self):\r\n self.weighted_setup()\r\n self.submit_question_answer('H1P1', {'2_1': 'Correct', '2_2': 'Correct'})\r\n self.submit_question_answer('FinalQuestion', {'2_1': 'Correct', '2_2': 'Correct'})\r\n self.check_grade_percent(1.0)", "def grade_this_guy(self):\n log.info(\"Function has been called\")\n answer = self._file_storage_path(self.raw_answer['sha1'], self.raw_answer['filename'])\n question = self._question_storage_path(self.raw_question['sha1'], self.raw_question['filename'])\n solution = self._solution_storage_path(self.raw_solution['sha1'], self.raw_solution['filename'])\n\n\n answer = os.path.join(IMAGEDIFF_ROOT, answer)\n question = os.path.join(IMAGEDIFF_ROOT, question)\n solution = os.path.join(IMAGEDIFF_ROOT, solution)\n\n self.score = grade(question, answer, solution)\n if self.score > self.points:\n self.score = self.points\n self.points=float(self.max_score())\n self.save()\n if self.score >= 0: \n self.runtime.publish(self, 'grade',{ 'value': self.score, 'max_value':self.max_score(),})\n log.info(\"runtime.publish-ed\")\n self.save()\n return Response(json_body=self.student_state())", "def test_return_goal_weight_goal_weight_is_too_low(self):\n data_weight_user = {\"height\": \"1,60\", \"actual_weight\": \"60\",\n \"cruising_weight\": \"45\", \"weight_goal\": \"40\"}\n return_goal = self.new_weight_advice_goal.return_weight_advices_goal(data_weight_user)[2]\n\n self.assertEqual(return_goal, 47.4)", "def weight_input():\r\n\tif bool(eval(input('Do you want to use different weights? (y/n) '))):\r\n\t\treturn float(input('payload weight (lbm): ')), \\\r\n\t\t\t\tfloat(input('avionics bay weight (lbm): ')), \\\r\n\t\t\t\tfloat(input('booster weight (lbm): '))\r\n\r\n\telse:\r\n\t\treturn 9.489, 4.083, 11.483 #2016-17 PDR Weights\r", "def test_return_start_discussion_display_weight_question(self):\n # create user\n user_created = self.create_user_questionnaire_in_progress()\n\n # data\n data_dict = {\"height\": False, \"actual_weight\": False,\n \"cruising_weight\": False, \"weight_goal\": False}\n old_robot_question = \"Grignotes-tu après les repas ?\"\n user_answer = \"non\"\n\n # call method\n context = self.new_controller.return_start_discussion(user_created.id, old_robot_question,\n data_dict, user_answer)\n\n dict_questions = {\"height\": \"Quelle taille fais-tu ? (au format x,xx)\",\n \"actual_weight\": \"Quel est ton poids actuel ?\",\n \"cruising_weight\": \"Quel est ton poids de croisière \"\n \"(poids le plus longtemps \"\n \"maintenu sans effort) ?\",\n \"weight_goal\": \"Quel est ton poids d'objectif ?\"}\n self.assertEqual(context[\"robot_answer\"], None)\n self.assertEqual(context[\"goal_weight_text\"], \"Nous allons maintenant \"\n \"définir ton objectif.\")\n self.assertEqual(len(context[\"dict_questions\"]), 4)\n self.assertEqual(context[\"dict_questions\"], dict_questions)", "def test_return_weekly_questions_save_weight(self):\n # create user\n user_created = self.create_user_start_program_number_days_ago(7)\n\n # TEST NEW WEIGHT DON'T EXISTS\n # data\n weekly_weight = False\n\n # call method\n context = self.new_controller.return_weekly_questions_save_weight(weekly_weight,\n user_created.id)\n\n self.assertEqual(context[\"robot_comment\"], \"Bonjour ! J'éspère que ta semaine \"\n \"s'est bien passée ? \"\n \"Que donne ta pesée ce matin ?\")\n self.assertTrue(context[\"robot_weekly_weight\"])\n\n # TEST ADD THE NEW WEIGHT\n # data\n weekly_weight = 58\n\n # call method\n context = self.new_controller.return_weekly_questions_save_weight(weekly_weight,\n user_created.id)\n\n last_weight = ResultsUser.objects.values_list(\"weight\").filter(user=user_created)\\\n .order_by(\"weighing_date\").last()[0]\n self.assertEqual(context[\"robot_comment\"], \"J'ai bien pris note de ton poids, \"\n \"tu trouveras un récapitulatif dans \"\n \"l'onglet résultats.\")\n self.assertEqual(last_weight, weekly_weight)\n\n # TEST AFTER ADD THE NEW WEIGHT\n # data\n weekly_weight = False\n\n # call method\n context = self.new_controller.return_weekly_questions_save_weight(weekly_weight,\n user_created.id)\n\n last_weighing_date = ResultsUser.objects.values_list(\"weighing_date\")\\\n .filter(user=user_created).order_by(\"weighing_date\").last()[0]\n one_week_after_weighing = last_weighing_date + timedelta(days=7)\n month = calendar.month_name[one_week_after_weighing.month]\n date_next_challenge = \"\" + calendar.day_name[one_week_after_weighing.weekday()] + \" \" \\\n + str(one_week_after_weighing.day) + \" \" + month + \"\"\n self.assertEqual(context[\"robot_comment\"], \"Retrouvons nous ici {} pour faire le point sur \"\n \"tes prochains résultats et voir ton nouveau \"\n \"challenge !\".format(date_next_challenge))", "def get_answer_weight(self, list_question_int, answer_selected_str):\n\n quiz_question = self.get_quiz_question(list_question_int)\n answer_weight_key = \"answer_\" + answer_selected_str + \"_weight\"\n answer_weight = quiz_question[answer_weight_key]\n return answer_weight", "def get_answer(self, problem):\t\n\t\tprint problem.statement\n\t\tresp = self.get_float()\n\t\tfor i in range(3):\n\t\t\tif resp == problem.answer: break\n\t\t\tprint \"Nope, try again.\"\n\t\t\tif i > 1:\n\t\t\t\t#this is the hint\n\t\t\t\tprint \"Remember, you are dividing the recipe by {0}.\".format(problem.ratio)\n\t\t\tresp = self.get_float()\n\t\tif resp != problem.answer:\n\t\t\tself.fail_response(problem)\n\t\telse:\n\t\t\tprint \"Right!\"", "def test_return_goal_goal_weight_is_too_low(self):\n data_weight_user = {\"height\": \"1,60\", \"actual_weight\": \"60\",\n \"cruising_weight\": \"45\", \"weight_goal\": \"40\"}\n return_goal = self.new_weight_advice_goal.return_weight_advices_goal(data_weight_user)[0]\n\n self.assertEqual(return_goal, 12.6)", "def test_return_advice_goal_weight_ok(self):\n data_weight_user = {\"height\": \"1,60\", \"actual_weight\": \"60\",\n \"cruising_weight\": \"55\", \"weight_goal\": \"55\"}\n return_advice = self.new_weight_advice_goal.return_weight_advices_goal(data_weight_user)[1]\n\n advice = \"Alors c'est parti ! Partons sur un objectif de - 5 kg. \"\n self.assertEqual(return_advice, advice)", "def ask(self):\n # update the sources\n if len(self.current_subgraph) == 0:\n current_data = self._select_sources(self.data_graph.nodes)\n else:\n current_data = self._select_sources(self.current_subgraph)\n\n # ask for the node with the highest priority .. sorted by (height, num of outgoing edges)\n selected = current_data[0]\n response = self._post_question(\"Je pravda, že \\\"\" + selected.name + \"\\\"? [y/n/w]: \")\n\n \"\"\" Actions based on the response of the user: \"\"\"\n if response == \"y\":\n # specialize on the current subgraph\n self.current_subgraph = self.data_graph.get_subgraph(selected)\n implied = self.current_subgraph.copy()\n\n # check if this node led to a solution\n solution = selected.get_solution()\n if solution is not None:\n print(\"Řešení:\", solution.name)\n response = self._post_question(\"Chceš vysvětlit proč? [y/*]: \")\n if response == \"y\":\n self._explain(solution)\n return True\n\n # traverse the graph and remove all descendants which do not rely only on this node\n self.data_graph.remove_node_and_descendants(selected, ancestors=False, direct=True)\n\n # remove current node and all implied descendants from the subgraph\n intersection = self.current_subgraph.intersection(self.data_graph.nodes)\n self.current_subgraph = intersection\n\n # delete all remaining nodes that are not in the current subgraph from the whole graph\n diff = self.data_graph.nodes.difference(self.current_subgraph)\n self.data_graph.remove_nodes(diff)\n\n # add the implied nodes and the answered node\n implied = implied.symmetric_difference(self.current_subgraph)\n implied.remove(selected)\n self.answered_true.add(selected)\n self.implied_true = self.implied_true.union(implied)\n\n elif response == \"n\":\n # remove only the\n self.data_graph.remove_node_and_descendants(selected, ancestors=True, direct=False)\n\n # update the currently selected set to reflect the removed nodes\n self.current_subgraph = self.current_subgraph.intersection(self.data_graph.nodes)\n\n # check if there are any nodes left in the graph\n if len(self.data_graph.nodes) == 0:\n self._post_answer(\"Neznám řešení tvého problému.\")\n return True\n\n elif response == \"w\":\n self._why(current_data)\n\n else:\n self._post_answer(\"Neznámá odpověď.\")\n\n return False", "def test_return_goal_actual_weight_is_too_low(self):\n data_weight_user = {\"height\": \"1,60\", \"actual_weight\": \"45\",\n \"cruising_weight\": \"45\", \"weight_goal\": \"40\"}\n return_goal = self.new_weight_advice_goal.return_weight_advices_goal(data_weight_user)[0]\n\n user_goal = \"impossible\"\n self.assertEqual(return_goal, user_goal)", "def ask(self):\n # update the sources\n if len(self.current_subgraph) == 0:\n current_data = self._select_sources(self.data_graph.nodes)\n else:\n current_data = self._select_sources(self.current_subgraph)\n\n # ask for the node with the highest priority .. sorted by (height, num of outgoing edges)\n selected = current_data[0]\n response = self._post_question(\"Je pravda, že \\\"\" + selected.name + \"\\\"? [y/n/w/(0-1)]: \")\n\n # Check for numerical response\n response_num = None\n if response not in [\"y\", \"w\", \"s\", \"n\"]:\n try:\n response_num = float(response)\n\n if not (0 < response_num <= 1):\n self._post_answer(\"*Zadané číslo není z intervalu (0, 1>.\")\n return False\n except ValueError:\n self._post_answer(\"*Neznámá odpověď.\")\n return False\n\n \"\"\" Actions based on the response of the user: \"\"\"\n if response == \"y\" or response_num is not None:\n # not defining value is like answering with 1\n if response_num is None:\n response_num = 1\n\n # specialize on the current subgraph\n self.current_subgraph = self.data_graph.get_subgraph(selected)\n implied = self.current_subgraph.copy()\n\n # check if this node led to a solution\n solution = selected.get_solution()\n if solution is not None:\n print(\"*Found a solution\", solution.name)\n self.explanations.append(self._explain(solution))\n\n # traverse the graph and remove all descendants which do not rely only on this node\n self.data_graph.remove_node_and_descendants(node=selected, ancestors=False, direct=True, node_from=None,\n user_prob=response_num)\n\n # remove current node and all implied descendants from the subgraph\n intersection = self.current_subgraph.intersection(self.data_graph.nodes)\n self.current_subgraph = intersection\n\n # add the implied nodes and the answered node\n if solution is None:\n implied = implied.symmetric_difference(self.current_subgraph)\n implied.remove(selected)\n self.answered_true.add(selected)\n self.implied_true = self.implied_true.union(implied)\n\n if len(self.current_subgraph) == 0 and len(self.data_graph.nodes) == 0:\n self.end_bot()\n return True\n\n elif response == \"n\":\n # remove only the\n self.data_graph.remove_node_and_descendants(node=selected, ancestors=True, direct=False, node_from=None, user_prob=None)\n\n # update the currently selected set to reflect the removed nodes\n self.current_subgraph = self.current_subgraph.intersection(self.data_graph.nodes)\n\n # check if there are any nodes left in the graph\n if len(self.data_graph.nodes) == 0:\n if len(self.explanations) == 0:\n self._post_answer(\"*Neznám řešení tvého problému.\")\n else:\n self.end_bot()\n return True\n\n elif response == \"w\":\n self._why(current_data)\n\n elif response == \"s\":\n self.data_graph.graphviz_draw()\n\n else:\n self._post_answer(\"*Neznámá odpověď.\")\n\n return False", "def parser_weight(cls, data_dict):\n # get user's answer\n context = {}\n actual_weight = data_dict.get(\"actual_weight\")\n goal_weight = data_dict.get(\"weight_goal\")\n\n # if not valid\n # create an error message\n if float(goal_weight) >= float(actual_weight):\n text = \"Ton objectif doit être inférieur à ton poids actuel.\"\n context = {\"error_message\": text}\n validate = False\n\n # if valid\n else:\n validate = True\n\n return validate, context", "def heuristics(course, suggestedPlan, user):\n score = course.score\n bonus = 0\n return score + bonus", "def calculateBeliefs(self):\n\n belief = {}\n\n for question in self.getQuestions():\n q = str(question.id)\n belief[q] = self.HELPER_init_belief()\n\n #print belief[q]\n for answer in self.getQuestionCompletedAnswers(question):\n #print q\n #print str(answer.question.id)\n assert str(answer.question.id) == q\n w_skill = answer.worker.inference_results['EM']['skill']\n # answer.value must be \"0\" or \"1\"\n assert answer.value == \"0\" or answer.value == \"1\"\n #print answer.value, w_skill\n belief[q] = self.HELPER_update_belief(belief[q], answer.value, w_skill)\n #print belief[q]\n\n #print \"Question beliefs:\", belief\n #print \"##################\"\n return belief", "def getWeights(self, gameState, action):\n return {'successorScore': 1.0}", "def getWeights(self, gameState, action):\n return {'successorScore': 1.0}", "def getWeights(self, gameState, action):\n return {'successorScore': 1.0}", "def getWeights(self, gameState, action):\n return {'successorScore': 1.0}", "def getWeights(self, gameState, action):\n return {'successorScore': 1.0}", "def getWeights(self, gameState, action):\n return {'successorScore': 1.0}", "def getWeights(self, gameState, action):\n return {'successorScore': 1.0}" ]
[ "0.7202105", "0.69144183", "0.6901956", "0.65887946", "0.6515546", "0.63579017", "0.63563627", "0.6148189", "0.6041054", "0.6025184", "0.60155094", "0.60127217", "0.6002536", "0.59930605", "0.59860015", "0.5915728", "0.58932304", "0.5890873", "0.5890676", "0.58757055", "0.5849008", "0.58142453", "0.5802668", "0.5796182", "0.5796182", "0.5796182", "0.5796182", "0.5796182", "0.5796182", "0.5796182" ]
0.7461492
0
save advices to user
def save_advices_to_user(self, user_answer_id, old_question_id, id_user): # get data id_advice = DiscussionSpace.objects.values_list("robot_advices"). \ filter(robot_question=old_question_id).get(user_answer=user_answer_id)[0] # if the user's answer # contains a robot advice if id_advice is not None: # get user's advices list user = self.user.objects.get(id=id_user) advices_user_id = user.advices_to_user.values_list("id") # get advices by question # in discussion space id_advices_question = DiscussionSpace.objects.values_list("robot_advices")\ .filter(robot_question=old_question_id) # if the user has already given # another answer to this question : # delete the old advice for advices_question in id_advices_question: for advices_user in advices_user_id: if advices_user[0] == advices_question[0]: user.advices_to_user.remove(advices_user) # add a new advice to user advice = RobotAdvices.objects.get(id=id_advice) AdvicesToUser.objects.create(user=user, advice=advice)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def add_advices_to_user(self, id_user):\n # get data\n advice_type_id = RobotAdviceType.objects.values_list(\"id\").get(type=\"default\")\n advices_id = RobotAdvices.objects.values_list(\"id\").filter(robot_advice_type=advice_type_id)\n\n # add new advices to user\n for advice_id in advices_id:\n advice = RobotAdvices.objects.get(id=advice_id[0])\n user = self.user.objects.get(id=id_user)\n AdvicesToUser.objects.create(user=user, advice=advice)", "def add_advice_to_user_created(cls, user, list_advice_id):\n for id_advice in list_advice_id:\n advice = RobotAdvices.objects.get(id=id_advice)\n AdvicesToUser.objects.create(user=user, advice=advice)", "def test_add_advices_to_user(self):\n # create user\n user_created = self.create_user_start_program_advices_list_empty()\n\n # count the number of challenges\n # before a call to the method\n user = self.user.objects.get(id=user_created.id)\n number_advice_to_user_before = user.advices_to_user.count()\n\n # call method\n self.new_controller.add_advices_to_user(user_created.id)\n\n # count the number of challenges\n # after a call to the method\n number_advice_to_user_after = user.advices_to_user.count()\n advice_to_user = user.advices_to_user.values_list(\"id\")\n self.assertEqual(number_advice_to_user_before, 0)\n self.assertEqual(number_advice_to_user_after, 5)\n for id_advice in advice_to_user:\n self.assertEqual([(27,), (28,), (29,), (25,), (26,)].count(id_advice), 1)", "def save_to_users(self):\n Data.add_data(self.user_data())", "def save_users(user):\n user.save_user()", "def save_profile(self):\n self.save()", "def save_model(self, request, obj, form, change):\n obj.propietario = request.user\n obj.save()", "def test_save_advices_to_user_first_answer(self):\n # create user\n user_created = self.create_new_user()\n\n # get the user's advices\n # before of called the method\n user = self.user.objects.get(id=user_created.id)\n advice_user_list_before = user.advices_to_user.values_list(\"id\")\\\n .order_by(\"robot_advice_type\")\n number_advices_before = len(advice_user_list_before)\n\n # call method\n user_answer_id = DiscussionSpace.objects.values_list(\"user_answer\")\\\n .filter(robot_advices__isnull=False).first()[0]\n old_question_id = DiscussionSpace.objects.values_list(\"robot_question\")\\\n .filter(robot_advices__isnull=False).first()[0]\n self.new_controller.save_advices_to_user(user_answer_id, old_question_id, user_created.id)\n\n # get the user's advices\n # after of called the method\n advice_user_list_after = user.advices_to_user.values_list(\"id\")\\\n .order_by(\"robot_advice_type\")\n number_advices_after = len(advice_user_list_after)\n\n self.assertNotEqual(advice_user_list_before, advice_user_list_after)\n self.assertNotEqual(number_advices_before, number_advices_after)", "def save_user(user):\n User.save_user(user)", "def test_save_advices_to_user_other_answer(self):\n # create and get user\n user_created = self.create_new_user()\n user = self.user.objects.get(id=user_created.id)\n\n # test if the user answer to a question :\n # add a new advice to user\n data = DiscussionSpace.objects.values_list(\"robot_question\")\n for id_question in data:\n user_answer_id = DiscussionSpace.objects.values_list(\"user_answer\")\\\n .filter(robot_question=id_question).filter(robot_advices__isnull=False)\n if len(user_answer_id) >= 2:\n user_answer = user_answer_id[0][0]\n advice_to_add = DiscussionSpace.objects.values_list(\"robot_advices\")\\\n .filter(user_answer=user_answer_id[1][0])\\\n .filter(robot_question=id_question)[0][0]\n old_question_id = id_question\n list_advice_id = [advice_to_add]\n self.add_advice_to_user_created(user_created, list_advice_id)\n\n # get the user's advices\n # before called the method\n advice_user = user.advices_to_user.values_list(\"id\").order_by(\"robot_advice_type\")\n number_advice_user = len(advice_user)\n id_question_1 = DiscussionSpace.objects.values_list(\"robot_question\")\\\n .get(robot_advices=advice_user[0][0])\n\n # call method\n # test if the user change\n # this answer to this question\n self.new_controller.save_advices_to_user(user_answer, old_question_id,\n user_created.id)\n\n # get the user's advices\n # after called the method\n advice_user_after = user.advices_to_user.values_list(\"id\").order_by(\"robot_advice_type\")\n number_advice_user_after = len(advice_user_after)\n id_question_2 = DiscussionSpace.objects.values_list(\"robot_question\")\\\n .get(robot_advices=advice_user_after[0][0])\n\n self.assertEqual(id_question_1, id_question_2)\n self.assertNotEqual(advice_user, advice_user_after)\n self.assertEqual(number_advice_user, number_advice_user_after)", "def save_user(self):\n\n User.user_list.append(self)", "def save_user_profile(instance, **_):\n instance.profile.save()", "def save_user(self):\n User.user_list.append(self)", "def save_user(self):\n User.user_list.append(self)", "def save_user(self):\n db.session.add(self)\n db.session.commit()", "def save(self, db):\n db.query(\n \"INSERT INTO fellows (name, accomodation)\\\n VALUES(:name, :accomodation)\",\n name=self.name, accomodation=self.wants_accomodation\n )", "def save_user(self):\n\n User.user_list.append(self)", "def save(self, user):\n if not valid_kid(self.kid_id, user):\n return False\n\n # check whether saving or updating\n if self.id:\n db.session.execute(\n \"\"\"\n UPDATE allowance\n SET kid_id = :kid_id, period = :period, period_day = :period_day,\n amount = :amount\n WHERE id = :id\n \"\"\",\n {\n 'kid_id': self.kid_id,\n 'period': self.period,\n 'period_day': self.period_day,\n 'amount': self.amount,\n 'id': self.id\n }\n )\n else:\n self.is_active = True\n self.created = datetime.datetime.now()\n db.session.add(self)\n db.session.commit()\n return True", "def save(self, *args, **kwargs):\n pass", "def save_user_profile(sender, instance, **kwargs):\n instance.profile.save()", "def save_user_profile(sender, instance, **kwargs):\n instance.profile.save()", "def save():\n pass", "def save():\n user = users.get_current_user()\n if user:\n new_app = Applic(parent=base_key)\n new_app.user = user.user_id()\n new_app.username = user.nickname()\n new_app.content = request.forms.get('content')\n new_app.title = request.forms.get('title') \n new_app.put()\n redirect('/')\n else:\n redirect('/')", "def save_aliment(self, aliment_name):\n aliment = Aliment.objects.get(name=aliment_name)\n self.aliments_pref.add(aliment)", "def save(self, *args, **kwargs):\n return", "def save():", "def response_post_save_add(self, request, obj):\n\n # a simple hack to set the default prescribing officer\n if obj is not None and obj.prescribing_officer is None:\n obj.prescribing_officer = request.user\n obj.save()\n\n if obj is not None and obj.creator_id == 1:\n obj.creator = request.user\n obj.save()\n\n url = reverse('admin:prescription_prescription_detail',\n args=[str(obj.id)])\n return HttpResponseRedirect(url)", "def save(self):\n\n pass", "def save(self):\n pass", "def save(self):\n pass" ]
[ "0.7048428", "0.6602138", "0.6419206", "0.64026564", "0.6258406", "0.622147", "0.62191874", "0.6178238", "0.6152282", "0.61215186", "0.6086368", "0.59979254", "0.59565187", "0.59565187", "0.5946988", "0.5928566", "0.59229535", "0.5900002", "0.5873121", "0.58391154", "0.58391154", "0.57960886", "0.5789866", "0.57655185", "0.57641983", "0.5752273", "0.57328266", "0.5730522", "0.5697315", "0.5697315" ]
0.6839697
1
return weekly question and save user's answer
def return_weekly_questions_save_weight(self, weekly_weight, id_user): # get data context = {} weighing_date = ResultsUser.objects.values_list("weighing_date") last_weighing_date = weighing_date.filter(user=id_user).order_by("weighing_date").last()[0] one_week_after_weighing = last_weighing_date + timedelta(days=7) present = datetime.now() present_date = present.date() # one week after # the weighing last if present_date >= one_week_after_weighing: # if the user gave # his weekly weight if weekly_weight is not False: # if the user has reached # his weight goal final_weight = ProfileUser.objects.values_list("final_weight").get(user=id_user)[0] if float(weekly_weight) <= final_weight: context["robot_comment"] = self.return_text_congratulations_restart_program\ (id_user) self.end = True # save weight else: context["robot_comment"] = "J'ai bien pris note de ton poids, " \ "tu trouveras un récapitulatif dans " \ "l'onglet résultats." user = self.user.objects.get(id=id_user) ResultsUser.objects.create(user=user, weight=weekly_weight) self.new_week = True # create robot question else: context["robot_comment"] = "Bonjour ! J'éspère que ta semaine " \ "s'est bien passée ? Que donne ta pesée " \ "ce matin ?" context["robot_weekly_weight"] = True # during the first week after # the weighing last : create robot text else: month = calendar.month_name[one_week_after_weighing.month] date = "" + calendar.day_name[one_week_after_weighing.weekday()] + \ " " + str(one_week_after_weighing.day) \ + " " + month + "" context["robot_comment"] = "Retrouvons nous ici {} pour faire le point " \ "sur tes prochains résultats et voir ton nouveau " \ "challenge !".format(date) return context
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_return_weekly_questions_save_weight(self):\n # create user\n user_created = self.create_user_start_program_number_days_ago(7)\n\n # TEST NEW WEIGHT DON'T EXISTS\n # data\n weekly_weight = False\n\n # call method\n context = self.new_controller.return_weekly_questions_save_weight(weekly_weight,\n user_created.id)\n\n self.assertEqual(context[\"robot_comment\"], \"Bonjour ! J'éspère que ta semaine \"\n \"s'est bien passée ? \"\n \"Que donne ta pesée ce matin ?\")\n self.assertTrue(context[\"robot_weekly_weight\"])\n\n # TEST ADD THE NEW WEIGHT\n # data\n weekly_weight = 58\n\n # call method\n context = self.new_controller.return_weekly_questions_save_weight(weekly_weight,\n user_created.id)\n\n last_weight = ResultsUser.objects.values_list(\"weight\").filter(user=user_created)\\\n .order_by(\"weighing_date\").last()[0]\n self.assertEqual(context[\"robot_comment\"], \"J'ai bien pris note de ton poids, \"\n \"tu trouveras un récapitulatif dans \"\n \"l'onglet résultats.\")\n self.assertEqual(last_weight, weekly_weight)\n\n # TEST AFTER ADD THE NEW WEIGHT\n # data\n weekly_weight = False\n\n # call method\n context = self.new_controller.return_weekly_questions_save_weight(weekly_weight,\n user_created.id)\n\n last_weighing_date = ResultsUser.objects.values_list(\"weighing_date\")\\\n .filter(user=user_created).order_by(\"weighing_date\").last()[0]\n one_week_after_weighing = last_weighing_date + timedelta(days=7)\n month = calendar.month_name[one_week_after_weighing.month]\n date_next_challenge = \"\" + calendar.day_name[one_week_after_weighing.weekday()] + \" \" \\\n + str(one_week_after_weighing.day) + \" \" + month + \"\"\n self.assertEqual(context[\"robot_comment\"], \"Retrouvons nous ici {} pour faire le point sur \"\n \"tes prochains résultats et voir ton nouveau \"\n \"challenge !\".format(date_next_challenge))", "def weekly():", "def next_question(self, user):\n if not self.sessions[user]:\n return \"You aren't currently in a session. Type 'quiz <set>' to start.\"\n\n deck_id = self.sessions[user]['deck']\n current_buckets = self._fetch_buckets(user, deck_id)\n now = current_buckets['now']\n if not now:\n return \"You've answered all of the questions! \" + self.stop_session()\n\n self.sessions[user]['is_answering'] = True\n\n # ask question\n return str(now[0]['definition'])", "def personal_questions(user):\n questions = {\n 1: 'GBB: How long have you been a fan of the Packers?',\n 2: 'GBB: Why are you a fan of the Packers?',\n 3: \"GBB: What team do you hate the most?\",\n 4: \"GBB: Who's your favorite player on the Packers?\",\n 5: \"GBB: Who's your least favorite player on the Packers?\",\n 6: \"GBB: Do you come from a family of Packer fans, or are you a lone ranger?\"\n }\n\n while True:\n num = random.randint(1, 6)\n answered = user['personal questions asked']\n if num not in answered:\n user['personal questions asked'].append(num)\n return questions[num]\n if len(answered) == len(questions.keys()):\n return \"GBB: Look's like I know you so well that I don't even need to ask you a question!\"", "def test_return_weekly_advice(self):\n # create and get user\n user_created = self.create_user_start_program_number_days_ago(7)\n user = self.user.objects.get(id=user_created.id)\n\n # check the advice returned if new_week == False :\n # first challenge of the program\n self.new_controller.new_week = False\n return_advice_1 = self.new_controller.return_weekly_advice(user_created.id)\n new_advices_user_text_1 = \"J'analyse : La deuxième semaine est une semaine particulière\"\n self.assertEqual(new_advices_user_text_1, return_advice_1[:60])\n\n id_advice_returned = RobotAdvices.objects.values_list(\"id\").get(text=return_advice_1)[0]\n advice_user_list = user.advices_to_user.values_list(\"id\").order_by(\"robot_advice_type\")\n self.assertEqual(id_advice_returned, advice_user_list[0][0])\n\n # check the advice returned if new_week == True :\n # second, ... challenges of the program\n self.new_controller.new_week = True\n return_advice_2 = self.new_controller.return_weekly_advice(user_created.id)\n new_advices_user_text_2 = \"Une faim de loup ! : Tu m'as indiqué dans ton questionnaire\"\n self.assertEqual(new_advices_user_text_2, return_advice_2[:59])\n\n id_advice_returned = RobotAdvices.objects.values_list(\"id\").get(text=return_advice_2)[0]\n advice_user_list = user.advices_to_user.values_list(\"id\").order_by(\"robot_advice_type\")\n self.assertEqual(id_advice_returned, advice_user_list[0][0])", "def ask_question():\n title_question = request.form.get(\"title\")\n question = request.form.get(\"question\")\n\n date_string = datetime.today().strftime('%Y-%m-%d')\n \n ask = Question(user_id = session[\"user_id\"],question_created=date_string, title_question = title_question, question = question)\n\n db.session.add(ask)\n db.session.commit()\n\n return \"question added\"", "def send_weeklies():\n\n user = irc_handler.get_irc_user_name(line[0])\n\n today = datetime.datetime.now()", "def return_weekly_advice(self, id_user):\n # get user\n user = self.user.objects.get(id=id_user)\n\n # if it's a new week\n if self.new_week is True:\n # delete last user's advice\n last_advice = user.advices_to_user.values_list(\"id\").\\\n order_by(\"robot_advice_type\").first()\n user.advices_to_user.remove(last_advice)\n\n # get new user's advice\n text = user.advices_to_user.values_list(\"text\")\n new_advices_user_text = text.order_by(\"robot_advice_type\").first()[0]\n\n return new_advices_user_text", "def notAFan_questions(user):\n questions = {\n 1: \"GBB: How old are you? \",\n 2: \"GBB: What do you like to do in your free time? \",\n 3: \"GBB: What is your ethnicity? \",\n 4: \"GBB: What did you eat for breakfast? \",\n 5: \"GBB: Are you an early bird or a night owl? \",\n 6: \"GBB: Do you like football? \"\n }\n\n while True:\n num = random.randint(1, 6)\n answered = user['personal questions asked']\n if num not in answered:\n user['personal questions asked'].append(num)\n return questions[num]\n if len(answered) == len(questions.keys()):\n return \"GBB: Looks like I know you so well that I don't even need to ask you a question! Type anything to proceed.\"", "def test_weighted_exam(self):\r\n self.weighted_setup()\r\n self.submit_question_answer('FinalQuestion', {'2_1': 'Correct', '2_2': 'Correct'})\r\n self.check_grade_percent(0.75)", "def week(update: Update, _: CallbackContext) -> None:\n running_total, average_dose_per_day = return_weekly_figure()\n text = \\\n (\n \"\\n📅 *Rolling 7 Day Stats*\\n\" \n + \"\\n\\t\\t\\t📈 Rolling 7 Day Doses - \" + str('{:,}'.format(running_total))\n + \"\\n\\t\\t\\t💉 Average Daily Doses - \" + str('{:,}'.format(average_dose_per_day)) \n )\n update.message.reply_markdown(text)\n logger.info(\"Getting week update for \" + str(update.message.chat_id))", "def Answer(self):\r\n\r\n try:\r\n #estraggo i dati\r\n self.__TopicsPages()\r\n #invio le richiste ai vari YahooTopic\r\n for topic in self.__topicpages:\r\n topic['ask url']=u'https://' +topic['ask url']\r\n #print 'elaborazione di :', topic['ask url']\r\n \r\n _t=YahooTopic.YahooTopic(topic['ask url'])\r\n \r\n self.t.append(_t)\r\n\r\n return True\r\n# return self.t[0].MostVotedAnswer.getText(), self.t[0].sogliaMostVotedAnswer\r\n \r\n except Exception, e:\r\n ErrorLog2.ErrorLog(self.__class__.__name__, 'Answer', e)\r\n return False", "def question(update, context):\n bot = context.bot\n user = update.message.from_user\n inc_msg = str.lower(update.message.text)\n\n # answer why questions with a reasons from database\n if 'waarom' in inc_msg:\n\n # return a random reason from file\n with open(REASONS) as file:\n lines = file.readlines()\n msg = random.choice(lines)\n\n # answer other questions with\n else:\n # TODO: introduce random silence\n rng = random.random()\n\n if rng < 0.9 and not 'rob' not in inc_msg:\n return\n options = [\n f\"Vraag het maar niet aan mij, ik ben niet alwetend.\",\n (\"https://lmgtfy.com/?q=\" + inc_msg.replace(\" \", \"+\") + \"&pp=1&s=g&t=w\"),\n f\"Ja he dat weet ik toch ook niet, google dat maar ff {user.first_name}...\"\n ]\n\n msg = random.choice(options)\n time.sleep(HUMAN_DELAY * len(msg))\n\n bot.send_message(chat_id=update.message.chat_id, text=msg,\n reply_to_message_id=update.message.message_id,\n parse_mode=ParseMode.MARKDOWN)", "def set_qs():\n\n print \"Hi there! We're going to give you a fun grammar quiz.\"\n\n user_name = raw_input(\"To start, please enter your name: \")\n\n print \"Thanks, {}!\".format(user_name)\n\n user_num = int(raw_input(\"How many questions would you like us to generate for you? Enter a number: \"))\n\n num_qs = validate_num(user_num)\n\n print \"Ok, we'll make you a quiz with {} questions!\".format(num_qs)\n\n return num_qs", "def save_answer(self, data, system):\r\n # Once we close the problem, we should not allow students\r\n # to save answers\r\n error_message = \"\"\r\n closed, msg = self.check_if_closed()\r\n if closed:\r\n return msg\r\n\r\n if self.child_state != self.INITIAL:\r\n return self.out_of_sync_error(data)\r\n\r\n message = \"Successfully saved your submission.\"\r\n\r\n # add new history element with answer and empty score and hint.\r\n success, error_message, data = self.append_file_link_to_student_answer(data)\r\n if not success:\r\n message = error_message\r\n else:\r\n data['student_answer'] = OpenEndedModule.sanitize_html(data['student_answer'])\r\n success, error_message = self.send_to_grader(data['student_answer'], system)\r\n if not success:\r\n message = error_message\r\n # Store the answer instead\r\n self.store_answer(data, system)\r\n else:\r\n self.new_history_entry(data['student_answer'])\r\n self.change_state(self.ASSESSING)\r\n\r\n return {\r\n 'success': success,\r\n 'error': message,\r\n 'student_response': data['student_answer'].replace(\"\\n\", \"<br/>\")\r\n }", "def weekPay(self):\n pay = self.hourlyPay * self.hoursWorked\n return pay", "def fetch_response(self):\n if self.stored_answer:\n return \"Thanks for your answer. Your answer has been saved. \"\\\n \"I will get back to you when the destined asker, rates your response. \"\\\n \"Keep your fingers crossed. Hopefully the asker will give you good ratings, \"\\\n \"and your karma points will boost up.\"\\\n \"Meanwhile, you can ask another question, or post answer for requested question.\"\n else:\n self.stored_answer = True\n return \"Sorry, you did not enter the Answer in the required format. \"\\\n \"Eg - \\\"[Answer][qid:<placeholder for question_number>] <Placeholder for Answer>\\\". Try again\"", "def user_question():\n return input('What would you like? (espresso/latte/cappuccino): ')", "def process_question(qu):\n\n ## global ranking\n rank_info = {}\n rank_info_k = [\"viewcount\",\"score\",\"favoritecount\"]\n for k in rank_info_k:\n rank_info[k] = int(qu[k])\n qu.pop(k,None)\n\n rank_info[\"creationdate\"] = qu[\"creationdate\"]\n\n if qu[\"acceptedanswer\"]:\n qu[\"acceptedanswer\"] = list(qu[\"acceptedanswer\"])\n else:\n qu[\"acceptedanswer\"] = []\n\n qu.pop('comments',None) # discard comments, maybe add back later\n qu[\"rank_info\"] = rank_info\n\n return qu", "def weekly():\n\n response = {}\n\n # 0..6 => Sunday..Saturday\n for i in range(7):\n hours = []\n interactions = 0\n\n for j in range(25):\n try:\n wfile = open(common.stats_path + '/weekly-average/' + str(i) + '/' + str(j))\n data = wfile.read()\n\n if j == 24:\n interactions = int(data)\n else:\n hours.append(int(data))\n\n wfile.close()\n except IOError:\n if i < 24:\n hours.append(0)\n\n response[DAYS[i]] = {'hours': hours, 'interactions': interactions}\n\n return response", "def survivalQuizMethod(self, questionAndAnswer):\n questionsRight = 1\n for question in questionAndAnswer: \n answer = input(f\"\\n{bcolors.CYAN}{question[0]}\\n:{bcolors.ENDC}\")\n if answer == question[1]:\n questionsRight *= 1\n elif answer != question[1]:\n questionsRight *= 0\n elif answer.capitalize() == \"Quit\":\n # User wants to Quit the Game now\n print(f\"User wants to quit the game now\")\n questionsRight = \"Quit\"\n break\n \n return questionsRight", "def ask_saved(self, refresh_data=False, **kwargs):\n clean_kwargs = pytan.utils.clean_kwargs(kwargs=kwargs)\n sse = kwargs.get('sse', False)\n clean_kwargs['sse_format'] = clean_kwargs.get('sse_format', 'xml_obj')\n\n # get the saved_question object the user passed in\n h = \"Issue a GetObject to find saved question objects\"\n sq_objs = self.get(objtype='saved_question', pytan_help=h, **clean_kwargs)\n\n if len(sq_objs) != 1:\n err = (\n \"Multiple saved questions returned, can only ask one \"\n \"saved question!\\nArgs: {}\\nReturned saved questions:\\n\\t{}\"\n ).format\n sq_obj_str = '\\n\\t'.join([str(x) for x in sq_objs])\n raise pytan.exceptions.HandlerError(err(kwargs, sq_obj_str))\n\n sq_obj = sq_objs[0]\n\n h = (\n \"Issue a GetObject to get the full object of the last question asked by a saved \"\n \"question\"\n )\n q_obj = self._find(obj=sq_obj.question, pytan_help=h, **clean_kwargs)\n\n poller = None\n poller_success = None\n\n if refresh_data:\n # if GetResultInfo is issued on a saved question, Tanium will issue a new question\n # to fetch new/updated results\n h = (\n \"Issue a GetResultInfo for a saved question in order to issue a new question, \"\n \"which refreshes the data for that saved question\"\n )\n self.get_result_info(obj=sq_obj, pytan_help=h, **clean_kwargs)\n\n # re-fetch the saved question object to get the newly asked question info\n h = (\n \"Issue a GetObject for the saved question in order get the ID of the newly \"\n \"asked question\"\n )\n shrunk_obj = pytan.utils.shrink_obj(obj=sq_obj)\n sq_obj = self._find(obj=shrunk_obj, pytan_help=h, **clean_kwargs)\n\n h = (\n \"Issue a GetObject to get the full object of the last question asked by a saved \"\n \"question\"\n )\n q_obj = self._find(obj=sq_obj.question, pytan_help=h, **clean_kwargs)\n\n m = \"Question Added, ID: {}, query text: {!r}, expires: {}\".format\n self.mylog.debug(m(q_obj.id, q_obj.query_text, q_obj.expiration))\n\n # poll the new question for this saved question to wait for results\n poller = pytan.pollers.QuestionPoller(handler=self, obj=q_obj, **clean_kwargs)\n poller_success = poller.run(**clean_kwargs)\n\n # get the results\n if sse and self.session.platform_is_6_5(**clean_kwargs):\n h = (\n \"Issue a GetResultData for a server side export to get the answers for the last \"\n \"asked question of this saved question\"\n )\n\n rd = self.get_result_data_sse(obj=q_obj, pytan_help=h, **clean_kwargs)\n else:\n h = (\n \"Issue a GetResultData to get the answers for the last asked question of \"\n \"this saved question\"\n )\n rd = self.get_result_data(obj=q_obj, pytan_help=h, **clean_kwargs)\n\n if isinstance(rd, taniumpy.object_types.result_set.ResultSet):\n # add the sensors from this question to the ResultSet object for reporting\n rd.sensors = [x.sensor for x in q_obj.selects]\n\n ret = {\n 'saved_question_object': sq_obj,\n 'poller_object': poller,\n 'poller_success': poller_success,\n 'question_object': q_obj,\n 'question_results': rd,\n }\n\n return ret", "def ask_question(index, attributes):\n \n print(\"ask_question, index: \", str(index))\n\n curr_question = quiz.list_fragen[attributes[\"sess_questions\"][index]].get_frage()\n print(\"@ask_question: \", curr_question)\n\n print(\"@ask_question before if \")\n if len(attributes[\"scores\"]) > 1:\n print(\"@ask_question if > 1\")\n text = \"<s>Frage {0} an Spieler {1}:</s> <s>{2}</s>\".format(int(attributes[\"current_round\"]),\\\n attributes[\"current_player\"], curr_question)\n else:\n print(\"@ask_question else\")\n text = \"<s>Frage {0}:</s> <s>{1}</s>\".format(int(attributes[\"current_round\"]),\\\n curr_question)\n \n text = slower_speech(text)\n text += TICK_BEEP_SOUND\n \n print(\"@ask_question before setatts\")\n attributes[\"current_question\"] = curr_question\n print(\"@ask_question before setatts\")\n\n #returns string here excepcionally because response is formed elsewhere\n return text", "def answer():\n print(\"\\nJetzt befindest du dich in einer wunderschönen Welt, doch du stellst fest das es verschiedene Wege gibt. Du bemerkst sofort, dass es Im rechten dagegen hat es sehr viel Wasser mit Krokodile hat, der linke Weg dagegen sieht wunderschön aus!\")", "def reporting_week(self):\n\n print(\"Week Numbers:\")\n print(self.time_stamp)\n print(self.time_stamp_iso)\n print(\"Current = {}\".format(self.current_week()))\n print(\"Reporting = {}\".format(self.current_week() - 1))", "def store_answer():\n #breakpoint()\n answer = request.form['answer']\n response = session[ANSWERS_KEY]\n response.append(answer)\n session[ANSWERS_KEY] = response\n return redirect(f\"/questions/{len(session[ANSWERS_KEY])}\")", "def get_week_date():\n return timezone.now()+timezone.timedelta(days=6)", "def on_submit(self):\n\n if self.question_type == \"programming\":\n database_api.sendAnswers(Cache.get(\"info\", \"token\"),\n Cache.get(\"lect\", \"code\"),\n self.question_no,\n Cache.get(\"info\", \"nick\"),\n self.ids[\"input_code_answer\"].text.replace(\"\\n\",\n \"*[SEAS-SLASH-N]*\"\n )\n )\n\n return True\n elif self.question_type == \"short_answer\":\n database_api.sendAnswers(Cache.get(\"info\", \"token\"),\n Cache.get(\"lect\", \"code\"),\n self.question_no,\n Cache.get(\"info\", \"nick\"),\n self.ids[\"input_short_answer\"].text.replace(\"\\n\",\n \"*[SEAS-SLASH-N]*\"\n )\n )\n\n return True\n elif self.question_type == \"multiple_choice\":\n try:\n students_choice = self.multiple_choice_answer\n except:\n students_choice = \"\"\n\n database_api.sendAnswers(Cache.get(\"info\", \"token\"),\n Cache.get(\"lect\", \"code\"),\n self.question_no,\n Cache.get(\"info\", \"nick\"),\n students_choice\n )\n\n return True\n else:\n return False", "def get_day_of_week_from_user():\n while True:\n day = input('Select the month to explore. Enter from monday, tuesday, wednesday, thursday, friday, '\n 'saturday, sunday or all: ').lower()\n\n if day in VALID_DAYS:\n confirm = input(\"You have selected {}. Press 'y' to confirm: \".format(day.title()))\n\n if confirm == 'y':\n break\n else:\n print(\"Try again.\\n\")\n else:\n print(\"Invalid input: {}. Try again.\\n\".format(day))\n return day", "def _post_question(self, question):\n self.messages_sent.append(question)\n if self.user_input:\n return input(question)\n else:\n ans = self.lines.pop(0)\n print(question + ans)\n return ans" ]
[ "0.66659147", "0.6612688", "0.5977455", "0.57612425", "0.57137007", "0.55952173", "0.5567274", "0.55584353", "0.5535094", "0.5429015", "0.54175264", "0.5411817", "0.5410731", "0.54016536", "0.5363783", "0.5358611", "0.5342742", "0.53238475", "0.53221977", "0.5289323", "0.527541", "0.5268538", "0.52665365", "0.5256951", "0.5237265", "0.52330685", "0.52279985", "0.5217151", "0.5215977", "0.52147526" ]
0.7340768
0
add new robot advices to user
def add_advices_to_user(self, id_user): # get data advice_type_id = RobotAdviceType.objects.values_list("id").get(type="default") advices_id = RobotAdvices.objects.values_list("id").filter(robot_advice_type=advice_type_id) # add new advices to user for advice_id in advices_id: advice = RobotAdvices.objects.get(id=advice_id[0]) user = self.user.objects.get(id=id_user) AdvicesToUser.objects.create(user=user, advice=advice)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def add_advice_to_user_created(cls, user, list_advice_id):\n for id_advice in list_advice_id:\n advice = RobotAdvices.objects.get(id=id_advice)\n AdvicesToUser.objects.create(user=user, advice=advice)", "def test_add_advices_to_user(self):\n # create user\n user_created = self.create_user_start_program_advices_list_empty()\n\n # count the number of challenges\n # before a call to the method\n user = self.user.objects.get(id=user_created.id)\n number_advice_to_user_before = user.advices_to_user.count()\n\n # call method\n self.new_controller.add_advices_to_user(user_created.id)\n\n # count the number of challenges\n # after a call to the method\n number_advice_to_user_after = user.advices_to_user.count()\n advice_to_user = user.advices_to_user.values_list(\"id\")\n self.assertEqual(number_advice_to_user_before, 0)\n self.assertEqual(number_advice_to_user_after, 5)\n for id_advice in advice_to_user:\n self.assertEqual([(27,), (28,), (29,), (25,), (26,)].count(id_advice), 1)", "def add(self, user: U) -> None:\n ...", "def save_advices_to_user(self, user_answer_id, old_question_id, id_user):\n # get data\n id_advice = DiscussionSpace.objects.values_list(\"robot_advices\"). \\\n filter(robot_question=old_question_id).get(user_answer=user_answer_id)[0]\n\n # if the user's answer\n # contains a robot advice\n if id_advice is not None:\n\n # get user's advices list\n user = self.user.objects.get(id=id_user)\n advices_user_id = user.advices_to_user.values_list(\"id\")\n\n # get advices by question\n # in discussion space\n id_advices_question = DiscussionSpace.objects.values_list(\"robot_advices\")\\\n .filter(robot_question=old_question_id)\n\n # if the user has already given\n # another answer to this question :\n # delete the old advice\n for advices_question in id_advices_question:\n for advices_user in advices_user_id:\n if advices_user[0] == advices_question[0]:\n user.advices_to_user.remove(advices_user)\n\n # add a new advice to user\n advice = RobotAdvices.objects.get(id=id_advice)\n AdvicesToUser.objects.create(user=user, advice=advice)", "def add_bot(self, bot):\n self.add_user(bot)", "def add_user(self, attrs):\n pass", "def add_user(self):\n\n pin, code = self.get_auth_pin() \n print(\"Enter the PIN '{}' into the Add Application window and click Add Application\".format(pin))\n input(\"waiting press enter to continue...\")\n\n access_token, refresh_token = self.get_tokens(code)\n user_id = self.tokens.get_next_user_id()\n self.tokens.insert_user(user_id, access_token, refresh_token)\n tstat_ids = self.get_tstat_ids(access_token)\n for tstat_id in tstat_ids:\n logger.info(\"Adding Thermostat ID: {}\".format(tstat_id))\n self.tokens.insert_tstat(user_id, tstat_id)", "def test_save_advices_to_user_other_answer(self):\n # create and get user\n user_created = self.create_new_user()\n user = self.user.objects.get(id=user_created.id)\n\n # test if the user answer to a question :\n # add a new advice to user\n data = DiscussionSpace.objects.values_list(\"robot_question\")\n for id_question in data:\n user_answer_id = DiscussionSpace.objects.values_list(\"user_answer\")\\\n .filter(robot_question=id_question).filter(robot_advices__isnull=False)\n if len(user_answer_id) >= 2:\n user_answer = user_answer_id[0][0]\n advice_to_add = DiscussionSpace.objects.values_list(\"robot_advices\")\\\n .filter(user_answer=user_answer_id[1][0])\\\n .filter(robot_question=id_question)[0][0]\n old_question_id = id_question\n list_advice_id = [advice_to_add]\n self.add_advice_to_user_created(user_created, list_advice_id)\n\n # get the user's advices\n # before called the method\n advice_user = user.advices_to_user.values_list(\"id\").order_by(\"robot_advice_type\")\n number_advice_user = len(advice_user)\n id_question_1 = DiscussionSpace.objects.values_list(\"robot_question\")\\\n .get(robot_advices=advice_user[0][0])\n\n # call method\n # test if the user change\n # this answer to this question\n self.new_controller.save_advices_to_user(user_answer, old_question_id,\n user_created.id)\n\n # get the user's advices\n # after called the method\n advice_user_after = user.advices_to_user.values_list(\"id\").order_by(\"robot_advice_type\")\n number_advice_user_after = len(advice_user_after)\n id_question_2 = DiscussionSpace.objects.values_list(\"robot_question\")\\\n .get(robot_advices=advice_user_after[0][0])\n\n self.assertEqual(id_question_1, id_question_2)\n self.assertNotEqual(advice_user, advice_user_after)\n self.assertEqual(number_advice_user, number_advice_user_after)", "def AddUser(parser, help_text):\n parser.add_argument('--user', help=help_text)", "def do_adduser(self, line):\n\t\tif isinstance(self.cl, Book):\n\t\t\tself.cl.add_contact()\n\t\telse:\n\t\t\tprint(\"To add contacts you need to open or create a book.\")", "def AddUser(self, usercount, user):\n for i in range(usercount):\n login = string.replace(user[i]['Login'], ' ', '')\n home = self.__homeprefix + login[0] + '/' + login\n action = 'userman -A ' + login + ' -p ' + user[i]['Passwd'] + ' -u ' + str(user[i]['UID']) + \\\n ' -g ' + str(user[i]['GID']) + ' -H ' + home + ' -s ' + user[i]['Shell'] \n output = commands.getstatusoutput(action)\n print output\n updatecount, update = self.__sqlData[\"UPDATE AccUser SET ToDo = 0 WHERE Login = '%s'\" % (login)]", "def manage_afterAdd(self, item, container) :\n item.manage_permission(Permissions.AddPortalContent,\n ['Manager'])\n item.manage_permission(Permissions.AccessContentsInformation,\n ['Member', 'Manager'])\n item.manage_permission(Permissions.View,\n ['Manager',])\n BaseTool.inheritedAttribute('manage_afterAdd')(self, item, container)", "def test_save_advices_to_user_first_answer(self):\n # create user\n user_created = self.create_new_user()\n\n # get the user's advices\n # before of called the method\n user = self.user.objects.get(id=user_created.id)\n advice_user_list_before = user.advices_to_user.values_list(\"id\")\\\n .order_by(\"robot_advice_type\")\n number_advices_before = len(advice_user_list_before)\n\n # call method\n user_answer_id = DiscussionSpace.objects.values_list(\"user_answer\")\\\n .filter(robot_advices__isnull=False).first()[0]\n old_question_id = DiscussionSpace.objects.values_list(\"robot_question\")\\\n .filter(robot_advices__isnull=False).first()[0]\n self.new_controller.save_advices_to_user(user_answer_id, old_question_id, user_created.id)\n\n # get the user's advices\n # after of called the method\n advice_user_list_after = user.advices_to_user.values_list(\"id\")\\\n .order_by(\"robot_advice_type\")\n number_advices_after = len(advice_user_list_after)\n\n self.assertNotEqual(advice_user_list_before, advice_user_list_after)\n self.assertNotEqual(number_advices_before, number_advices_after)", "def add_robot(self, robot):\n # ALTHOUGH THE DOCUMENTATION SAYS THAT MENU CHOICES CAN BE UPDATED,\n # THE PACKAGE DOES NOT ALLOW IT.\n # THUS THIS 'HACK' MUST BE DONE TO REFRESH THE UI WITH AN UPDATED LIST\n\n # Save the list of robot names\n new_list = []\n for name in self.__ui_controls.get('menu_robots').choices:\n new_list.append(name)\n # Add the new one\n new_list.append(robot.name)\n\n # Add robot to list\n self.__robots.append(robot)\n self.__selected_robot = len(self.__robots) - 1\n\n num_options = 4\n # Add spot for current robot settings\n self.__teachpanel.append([[0] * num_options] * robot.num_joints)\n\n # Add robot joint sliders\n i = 0\n for joint in robot.joints:\n if joint.qlim[0] == joint.qlim[1]:\n self.__teachpanel[self.__selected_robot][i] = [\n joint.qlim[0], joint.qlim[1],\n joint.theta, None]\n else:\n string = \"{:.2f} rad ({:.2f} deg)\".format(\n joint.theta, degrees(joint.theta))\n self.__teachpanel[self.__selected_robot][i] = [\n joint.qlim[0], joint.qlim[1],\n joint.theta, wtext(text=string)]\n i += 1\n\n # Refresh the caption\n self.__reload_caption(new_list)\n\n # Set it as selected\n self.__ui_controls.get('menu_robots').index = \\\n len(self.__robots) - 1\n\n # Place camera based on robots effective radius * 1.25\n if robot.robot is not None:\n radius = sum([abs(link.a) + abs(link.d) for link in robot.robot.links]) * 1.25\n self.scene.camera.pos = vector(radius, radius, radius) + get_pose_pos(robot.joints[1].get_pose())\n self.scene.camera.axis = vector(-radius, -radius, -radius)", "def createDeveloper(self):\n self.createUser()\n self.user.is_developer = True\n self.user.put()", "def add_user(self, user):\n\t\tself.users[user.username] = user", "def add(self, user_id, aspect_ids):\n for aid in aspect_ids: Aspect(self._connection, aid).addUser(user_id)", "def add_bot_user(self, effective_user, bot_id):\n self.execute(TABELLE['bot_users']['insert'], (bot_id, effective_user['id'], effective_user['language_code'],))", "def append_user_gate(self, user_gate):\n self.translation['g_sect'].append(user_gate)", "def add_admin(self, project_id, user_id):\n current_user = request.environ.get('repoze.who.identity')['user']\n user = controller_globals._get_user_from_email(current_user.email)\n\n # make sure we're actually the project lead\n if not self._current_user_leads_review(project_id):\n return \"<font color='red'>tsk, tsk. you're not the project lead, %s.</font>\" % user.fullname\n\n new_leader = Session.query(model.User).filter_by(id=user_id).one()\n review = self._get_review_from_id(project_id)\n review.leaders.append(new_leader)\n Session.add(review)\n Session.commit()\n\n redirect(url(controller=\"review\", action=\"admin\", project_id=project_id))", "def add_user_keywords(words, uid):\n profile = db.Profile.get(uid)\n for word in words:\n profile.keywords[word] = 100.0\n\n db.session.commit()", "def addUser(self, accountId, username, accesstype, **kwargs):\n #put your code here to implement this method\n raise NotImplementedError (\"not implemented method addUser\")", "def register_adhocs(self):\n aboutform = self.plugin['xep_0004'].makeForm('form', \"About SleekBot\")\n aboutform.addField('about', 'fixed', value= self.__doc__)\n self.plugin['xep_0050'].addCommand('about', 'About Sleekbot', aboutform)\n pluginform = self.plugin['xep_0004'].makeForm('form', 'Plugins')\n plugins = pluginform.addField('plugin', 'list-single', 'Plugins')\n for key in self.cmd_plugins:\n plugins.addOption(key, key)\n plugins = pluginform.addField('option', 'list-single', 'Commands')\n plugins.addOption('about', 'About')\n #plugins.addOption('config', 'Configure')\n self.plugin['xep_0050'].addCommand('plugins', 'Plugins', pluginform, self.form_plugin_command, True)", "def test_IdentityAdmin_responder_adds_user(self):\n responder = IdentityAdmin().locateResponder(AddUser.commandName)\n self.assertIsNotNone(responder)", "def add_user(self, user: User):\n raise NotImplementedError", "def add_admin(user):\n _add_owner(\n _lookup_user(user).biv_id,\n _add_model(pam.Admin())\n )", "def add_user(self, user_id, user_point, do_update=True):\n \n self.n_users += 1;\n self.user_ids.append(user_id);\n self.user_points.append(user_point);\n \n if do_update:\n self.update();", "def test_add_user(self):\n pass", "def add(self, PlugLead):\n\n self.check_conflicts(PlugLead)\n self.plugleads.append(PlugLead)", "def save_user(self):\n\n User.user_list.append(self)" ]
[ "0.65682614", "0.6457554", "0.6085961", "0.60834557", "0.6056406", "0.5792387", "0.5729461", "0.5710384", "0.5673903", "0.56236", "0.559872", "0.5590028", "0.55840814", "0.5548293", "0.5532204", "0.5513462", "0.54895794", "0.5481246", "0.5480195", "0.5451334", "0.54244864", "0.5388633", "0.53621686", "0.5360577", "0.53374493", "0.53373194", "0.53262293", "0.5320695", "0.53070426", "0.52830446" ]
0.73527414
0
reads a slope text data file
def read_slope(fname): # http://milford.nserl.purdue.edu/weppdocs/usersummary/HillSlopeData.html meta = {} OFEs = [] meta['fname'] = fname meta['id'] = ''.join([L for L in fname if L in '0123456789']) fid = open(fname, 'r') lines = fid.readlines() lines = [L for L in lines if len(L) > 0] lines = [L.strip() for L in lines if L.lstrip()[0] != '#'] meta['dataver'] = lines[0] n = meta['nelem'] = int(lines[1]) line = lines[2].split() meta['azm'] = float(line[0]) meta['fwidth'] = float(line[1]) for i in range(3, 3+(n*2), 2): ofe = {} nslpts, slplen = lines[i].split() nslpts = ofe['nslpts'] = int(nslpts) ofe['slplen'] = float(slplen) line = lines[i+1].replace(',', '') points = [float(L) for L in line.split()] points = np.array(points).reshape(nslpts, 2) ofe['distance'] = points[:, 0] ofe['steepness'] = points[:, 1] OFEs.append(ofe) return meta,OFEs
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def read_multinest_file(shower_name):\n\twith open(fit_dir+'fit_'+shower_name+'.out') as file:\n\t\tline = file.readline().split()\n\t\tslope = 1-float(line[0])\n\t\tslope_err_plus = -float(line[1])\n\t\tslope_err_minus = float(line[2])\n\t\ty_int = float(file.readline().split()[0])\n\treturn slope, slope_err_plus, slope_err_minus, y_int", "def get_data(dataf):\n with open(dataf) as f:\n label = []\n e_val = []\n for line in f:\n label.append(float(line.split()[1]))\n e_val.append(-1 * float(line.split()[0]))\n return label, e_val", "def read_slope():\n save = {}\n save2 = {}\n for head in ['samp', 'pi']:\n for inst in ['s', 'i']:\n for sign in ['n', 'p']:\n infile = data_dir + head + '_' + inst + '_' + sign + '_fitting_results'\n name = head + '_' + sign + '_' + inst\n [s_list, s_list2] = read_fitting_results(infile)\n save[name] = s_list\n save2[name] = s_list2\n\n for head in ['samp', 'pi']:\n for inst in ['s','i']:\n infile = data_dir + head + '_' + inst + '_center_fitting_results'\n [s_list, s_list2] = read_fitting_results(infile)\n name = head + '_' + inst + '_center'\n save[name] = s_list\n save2[name] = s_list2\n\n return [save, save2]", "def read_fitting_results(infile):\n try:\n data = mcf.read_data_file(infile)\n except:\n data = []\n\n s_list1 = []\n s_list2 = []\n for ent in data:\n atemp = re.split('\\s+', ent)\n if len(atemp) == 6:\n slope = atemp[1] + '+/-' + atemp[2]\n slope2 = atemp[4] + '+/-' + atemp[5]\n else:\n slope = atemp[2] + '+/-' + atemp[3]\n slope2 = atemp[5] + '+/-' + atemp[6]\n\n s_list1.append(slope)\n s_list2.append(slope2)\n\n return [s_list1, s_list2]", "def open_xy(data):\n twotheta, intensity = [], []\n with open(data) as f:\n for line in f:\n row = line.split()\n twotheta.append(row[0])\n intensity.append(row[1])\n xyarray = list(zip(twotheta, intensity))\n xyarray = np.asarray(xyarray)\n xyarray = xyarray.astype(np.float)\n return xyarray", "def read_data(self,filename):\n self.x = [] #Input values\n self.t = [] #Target values\n\n with open(filename, \"r\") as infile:\n lines = infile.readlines()\n self.n = len(lines)\n for line in lines:\n words = line.split()\n self.x.append(float(words[0]))\n self.t.append(float(words[1]))\n\n self.x = np.array(self.x)\n self.t = np.array(self.t)\n self.create_design_matrix()", "def read_data(self):\n data = np.genfromtxt(self.__file) # Planck SED\n self.__nu = 10.0**data[:,0]\n self.__nuF = 10.0**data[:,2]\n self.__err = 10.0**data[:,3]\n #self.__W = 10.0**data[:,4]\n self.__yerr = [ self.__nuF - self.__nuF / self.__err, \\\n self.__nuF * self.__err - self.__nuF ]\n self.__maxY = max( self.__nuF )\n self.__minY = min( self.__nuF )", "def read_points():\n\tpoints = []\n\tf = open(r'sample_points.txt')\n\twhile True:\n\t\tnstr = f.readline()\n\t\tif len(nstr) == 0:\n\t\t\tbreak\n\t\tline = nstr.rstrip('\\n').split(', ')\n\t\t# print(line)\n\n\t\tpoints.append((round(float(line[0]),3),round(float(line[1]),3))) \n\n\tprint(points)\n\treturn points", "def read_data(self, path):\n if self.data_format == 'twenty': \n length = 20\n else: raise ValueError(\"self.data_format = '%s' unknown.\" % \n self.data_format)\n data = []\n with open(path,'r') as f:\n for line in f:\n data.append([float(line[k:(k + length)]) for k in range(\n 0, len(line.strip('\\n')),length)])\n return np.array(data)", "def read_from(self, filename):\n self.x, self.y = np.loadtxt(filename, unpack=True, usecols=(0, 1))", "def loadpoly(file):\n out = []\n with open(file) as f:\n while True:\n try:\n density = float(f.readline())\n except ValueError:\n break\n l = []\n try:\n while True:\n line = f.readline().replace(\",\", \" \").split()\n l.append((float(line[0]), float(line[1])))\n except (ValueError, IndexError):\n out.append((l, density))\n if not out:\n raise ValueError(f\"Could not read '{file}'\")\n return out", "def readFile(self, fileName, d, addBias=False):\n\n dataSet = [];\n\n # open file and read lines from it, where each line contains a data point and label\n f = open(fileName, 'r');\n for line in f:\n # split line into list of strings, each string representing an element of the data point\n dataPt = (line.strip()).split(); \n \n # extract label for current data point\n label = int(dataPt[0]); \n if label == 0:\n label = -1; \n \n # create ndarray for data point with bias\n if addBias:\n fVector = np.zeros(d+1);\n fVector[-1] = 1;\n else:\n fVector = np.zeros(d);\n for i in range(1,len(dataPt)): \n fIndex, fVal = dataPt[i].split(':');\n fVector[int(fIndex)] = float(fVal);\n \n # add data point and label to data set\n dataSet.append((fVector,label));\n \n return dataSet;", "def readfile(self, path, filename):\n # The DataStudio software uses ISO-8859-1 encoding (especially for the degree sign in temperature files)\n file = open(path + filename, encoding=\"iso-8859-1\")\n rowlist = file.readlines()\n\n title = rowlist[0].strip(\"\\n\")\n labels = rowlist[1].strip(\"\\n\").split(sep=\"\\t\")\n\n data = np.zeros((len(rowlist)-2, 2))\n\n for i in range(2, len(rowlist)):\n columns = rowlist[i].split(sep=\"\\t\")\n data[i-2, 0] = float(columns[0].replace(\",\", \".\"))\n data[i-2, 1] = float(columns[1].replace(\",\", \".\"))\n\n return data, title, labels", "def loadData(name):\n inputs = []\n outputs = []\n with open(name) as file:\n data = file.readlines()[2:]\n lines = map(str.split, data)\n for line in lines:\n inputs.append(preparePatterns(line[:-1]))\n outputs.append(float(line[-1]))\n length = len(inputs[0])\n return inputs, outputs, length", "def read_text(filename):\n with open(filename, 'r') as f:\n com = f.readline()[0]\n wavelength, flux = np.loadtxt(filename, unpack=True,\n usecols=(0, 1), comments=com)\n return wavelength, flux", "def read_slope_aspect(filename):\n if not exists(filename):\n raise IOError('Missing file \"%s\"' % filename)\n \n ds = gdal.Open(str(filename))\n \n if ds is None:\n raise IOError('Unopenable file \"%s\"' % filename)\n \n slope = bytes2slope(ds.GetRasterBand(1).ReadAsArray())\n aspect = bytes2aspect(ds.GetRasterBand(2).ReadAsArray())\n \n return slope, aspect", "def load_get_landmark_was_pointed(path):\n with open(path) as f:\n rows = [rows.strip() for rows in f]\n \n \"\"\"Use the curly braces to find the start and end of the point data\"\"\" \n head = rows.index('{') + 1\n tail = rows.index('}')\n\n \"\"\"Select the point data split into coordinates\"\"\"\n raw_points = rows[head:tail]\n coords_set = [point.split() for point in raw_points]\n\n \"\"\"Convert entries from lists of strings to tuples of floats\"\"\"\n points = [tuple([float(point) for point in coords]) for coords in coords_set]\n return points", "def read_fault(filename):\n\n fault_x = []\n fault_y = []\n fault_file = open(filename)\n\n for segment in fault_file:\n x, y = segment.split()\n fault_x.append(float(x))\n fault_y.append(float(y))\n\n fault_file.close()\n\n return fault_x, fault_y", "def read_txt(path):\n mz = []\n i = []\n with open(path) as f:\n for line in f:\n line = line.split()\n mz.append(float(line[0]))\n i.append(float(line[1]))\n return mz, i", "def readData(self):\n f = open(self.filename)\n self.time = []\n self.data = []\n for line in f:\n if line.find('BAD FLAG') > 0:\n self.badValue = float(line.split(':')[1].strip())\n if line.find('LONGITUDE') > 0:\n self.lon = line.split(':')[1].strip()\n if line.find('LATITUDE') > 0:\n self.lat = line.split(':')[1].strip()\n if len(line) > 6 and line[2] == '-' and line[6] == '-':\n parts = line.rsplit(None, 1)\n # data line\n timeStamp = datetime.datetime.strptime(parts[0], '%d-%b-%Y %H')\n t = timeArray.datetimeToEpochTime(timeStamp)\n self.time.append(t)\n val = float(parts[1])\n self.data.append(val)\n\n self.time = np.array(self.time)\n self.data = np.array(self.data)\n # remove bad values\n if self.badValue:\n goodIx = self.data != self.badValue\n self.time = self.time[goodIx]\n self.data = self.data[goodIx]\n self.fileIsRead = True", "def read_data(filename):\n data = np.genfromtxt(filename, delimiter=',', dtype=str)\n X = data[1:,2:].astype(np.float)\n y = data[1:,0]\n y[y==label0]='0'\n y[y==label1]='1'\n y[y==label2]='2'\n y=y.astype(np.float)\n return X, y", "def Read_Rcwa_Matlab(Path) : \n x,y=[],[]\n fs = open(Path, 'r') \n while 1: \n txt = fs.readline()\n if txt =='': \n break\n x.append(float(txt[0:25]))\n y.append(float(txt[29:-2])) \n fs.close()\n return x,y", "def read_pts_file(filename):\n lines = open(filename).read().splitlines()\n if int(lines[1:2][0].split('n_points:')[-1]) != 68:\n print ('No 68-landmark format founded')\n return None\n lines = lines[3:71]\n\n landmarks = []\n for l in lines:\n coords = l.split()\n landmarks.append([float(coords[0]), float(coords[1])])\n return landmarks", "def read_data(filename):\n data = np.genfromtxt(filename, delimiter=',', dtype = str)\n X = data[1:,2:].astype(np.float)\n y = data[1:,0]\n y[y==label0]='0' \n y[y==label1]='1' \n y[y==label2]='2'\n y.astype(np.float) \n return X, y", "def read_coefficients(path):\n columns = ['model index', 'intercept', 'slope']\n coeff_df = pd.read_csv(path, \n delim_whitespace=True, \n index_col=0,\n header=0, \n names=columns)\n coeff_df = coeff_df.dropna()\n coeff_df = coeff_df.apply(pd.to_numeric)\n return coeff_df", "def load_data():\n x = np.genfromtxt(X_FILE, usecols=(0, 1))\n y = np.genfromtxt(Y_FILE, usecols=(0))\n\n return x, y", "def parse_data(name):\n with open(name) as f:\n lines = f.read().splitlines()\n lines = filter(lambda x: x.split(' ')[0].isdigit(), lines)\n lx = [int(p.split(' ')[1]) for p in lines]\n ly = [int(p.split(' ')[2]) for p in lines]\n return lx, ly", "def read_points(from_file):\n points = []\n with open(from_file) as fp: \n for line in fp.readlines(): \n feats = line.strip().split()\n points.append((int(feats[0]), int(feats[1])))\n\n return points", "def datareader(self, path):\n\n f = open(path, 'r')\n data = f.read()\n data = data.split('\\n')\n data_tmp = []\n for idx in range(len(data)):\n if str(data[idx]).find('@data') >= 0:\n data_tmp = data[idx + 1:]\n break\n res = []\n for record in data_tmp:\n record = record.split(',')\n record = map(float, record)\n res.append(record)\n return res", "def load_vo_txt_raw(*, fname, sampling='1M'):\n # Set the day of month for time series depending on the MF sampling rate\n if sampling == '1M':\n day = 15\n elif sampling == '4M':\n day = 1\n\n # Positions given in degrees - co-latitude (0 to 180), longitude (\n df = pd.read_csv(fname, sep=\"\\s+\", header=14,\n names=[\"theta\", \"phi\", \"Year\", \"Month\", \"Time\", \"r\",\n \"Br\", \"Btheta\", \"Y\", \"sigma_r\", \"sigma_theta\",\n \"sigma_phi\", \"N_data\"], usecols=range(13))\n\n df[\"mjd2000\"] = mjd2000(df[\"Year\"], df[\"Month\"], day)\n df[\"dyear\"] = mjd_to_dyear(df[\"mjd2000\"], leap_year=True)\n df[\"X\"] = -df[\"Btheta\"] # -theta component\n df[\"Z\"] = -df[\"Br\"] # -radial component\n df.drop(columns=[\"Btheta\", \"Br\"], inplace=True)\n # To 00:00 on 1st or 15th each month\n # Multiplication by 10000 and 100 are needed to convert to datetime\n # (see documentation for pandas.datetime)\n df[\"date\"] = pd.to_datetime(df[\"Year\"]*10000+df[\"Month\"]*100+day,\n format=\"%Y%m%d\")\n\n return df" ]
[ "0.69637036", "0.62119883", "0.6132573", "0.6100909", "0.60567594", "0.60152274", "0.5964745", "0.59628475", "0.591921", "0.58862406", "0.5842147", "0.5832435", "0.58309996", "0.5762316", "0.5758946", "0.57343227", "0.5713052", "0.5700565", "0.5696346", "0.5679199", "0.5645833", "0.56449866", "0.561619", "0.5614471", "0.5609231", "0.5605018", "0.56007624", "0.55903625", "0.5576709", "0.55564994" ]
0.7150037
0
Create index, extracting substrings of length 'ln'
def __init__(self, t, ln): self.t = t self.ln = ln self.index = [] size = len(t) for i in range(len(t) - ln + 1): self.index.append((t[i:i + ln], i)) # add <substr, offset> pair self.index.sort() # sort pairs
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def substrings(a, b, n):\n\n # TODO\n return []", "def retrieve_sub(s, n):\n subs = []\n for idx, char in enumerate(s):\n sub = char\n c = 1\n for next_char in s[idx + 1:]:\n if c >= n:\n break\n else:\n sub += next_char\n c += 1\n subs.append(sub)\n return [x for x in subs if len(x) == n]", "def gen_index_via_mod(s, n):\n if len(s) == 0:\n return 0\n\n return n % (len(s) + 1)", "def subStringMatchExact(target,key,length):\r\n index = find(target,key)\r\n if index < 0:\r\n return ()\r\n else:\r\n matches = subStringMatchExact(target[index+len(key):len(target)],key,length)\r\n index += (length - len(target))\r\n matches += (index,)\r\n print matches\r\n return matches", "def analyze_line(self, sentence, index):\r\n try:\r\n\r\n # generate a generic string from sentence\r\n sentence = generic_string(sentence)\r\n\r\n # insert all suffixes into the trie\r\n for sub_seq, offset in get_all_suffixes(sentence):\r\n comp = (index, offset)\r\n level = self.__trie\r\n\r\n for ch in sub_seq[:-1]:\r\n if level[ind(ch)] is None:\r\n level[ind(ch)] = empty_array()\r\n level = level[ind(ch)]\r\n\r\n if len(level[ind(END)]) < SUM_COMPLETE:\r\n level[ind(END)].append(comp)\r\n\r\n except MemoryError as e:\r\n raise e", "def match_all_cui(s,max_len = 10, Eterm_cui = Eterm_cui):\n if len(s) == 0: \n return []\n sub_label = np.zeros(len(s),dtype = 'int')\n location_term = {}\n i = 0\n while i < len(s):\n for j in range(max_len+1,0,-1):\n temp = ' '.join(s[i:i+j])\n if temp in Eterm_cui:\n sub_label[i:i+j] = 1\n location_term[i] = [Eterm_cui[temp]]\n break#matched maximum string, so break\n i += j\n output = []\n for i in range(len(s)):\n if sub_label[i] == 0:#no match\n output += [s[i]]\n elif i in location_term:\n for cui in location_term[i][: :-1]:\n output += [cui]\n return output", "def find_string(n, c_length, start=None):\n \n c = range(c_length)\n if start is None:\n i = get_minimum(n)\n else:\n i = start\n\n strings = [e for e in generate_strings(n, c)]\n while True:\n for x, s in enumerate(generate_strings(i, c)):\n if check_string(s, strings):\n return s\n\n if x % 1000000 == 0:\n print x\n i += 1\n print \"processing %s\" % i", "def get_substrings(string, n):\n substrings = set()\n\n for i in range(len(string) - n + 1):\n substrings.add(string[i:i+n])\n\n return [substring for substring in substrings]", "def buildindex4(invertedindex, index):\n sortedbycount = sorted(invertedindex.items(), key=lambda x: x[1][1])\n startindex = math.floor((2*len(sortedbycount))/100) + 1\n for word, count in sortedbycount[startindex:]:\n index[word] = count\n return", "def find_strings(line, result):\n\n idx = 0\n while idx != -1:\n idx = extract_string(line, idx, result)", "def convert_words_to_index(actual_text, dictionary,length):\n output_index=[]\n for words in actual_text:\n full_sentence = [dictionary[word] if word in dictionary else 0 for word in words]\n sen_len=len(full_sentence)\n if sen_len<length: # padding\n full_sentence.extend([0]*(length-sen_len))\n else:\n full_sentence=full_sentence[:length]\n output_index.append(full_sentence)\n return output_index", "def test_line_substring():\n for _x in range(100):\n l_str = random_str(50, 100)\n line = Line(l_str, random_str(10, 20), randint(1, 10000))\n # Try a single charater\n c_idx = randint(0, len(l_str)-1)\n sub_line = line[c_idx]\n assert sub_line == l_str[c_idx]\n assert isinstance(sub_line, Line)\n assert sub_line.file == line.file\n assert sub_line.number == line.number\n # Try a range\n s_idx = randint(0, (len(l_str) // 2) - 1)\n e_idx = randint(len(l_str) // 2, len(l_str) - 1)\n sub_line = line[s_idx:e_idx]\n assert sub_line == l_str[s_idx:e_idx]\n assert sub_line.file == line.file\n assert sub_line.number == line.number", "def shorten(strings, n):\n return sorted(strings, key=lambda x: x[n])", "def build_sublemma_index(self):\n terms = []\n with open(self.path_lemma_terms, 'r', encoding='utf8') as f:\n for line in f:\n term = line.strip('\\n')\n terms.append(term)\n terms = set(terms)\n for term in terms:\n term_list = term.split('_')\n subterms = get_sublists(term_list)\n self.sublemma_index[term] = [t for t in subterms if t in terms]", "def index(self, sub, start=0):\n br = \"([{\"[\")]}\".index(sub)]\n count = 0\n for i in range(start, len(self.string)):\n char = self.string[i]\n if char == br:\n count += 1\n elif char == sub:\n if count > 0:\n count -= 1\n else:\n return i\n raise SyntaxError(\"Bad string\")", "def lc_index(*args):\n index = []\n x = check_lc_data(args[0])\n i = 0\n for line in args[0].Data.LCData.lc_data:\n i += 1\n if line != x[i - 1]:\n index.append(0)\n elif line == x[i - 1]:\n index.append(i)\n return index", "def get_idx_from_sent(sent, word_index, max_l, pad):\n x = [0] * pad # left padding\n for word in sent:\n if word in word_index: # FIXME: skips unknown words\n if len(x) < max_l: # truncate long sent\n x.append(word_index[word])\n else:\n break\n # len(x) includes pad\n rpad = [0] * max(0, max_l + 2 * pad - len(x)) # right padding\n return x + rpad", "def findLongestCommonSubstringManyStrings(listOfStrings):", "def non_repeating_substring(str1: str) -> int:\n max_length = 0\n seen = {}\n window_start = 0\n for window_end in range(len(str1)):\n right_char = str1[window_end]\n if right_char in seen:\n window_start = max(window_start, seen[right_char] + 1)\n seen[right_char] = window_end\n max_length = max(max_length, window_end - window_start + 1)\n return max_length", "def longwords_Li_Comp(strings):\n # write your code here\n return [string for string in strings if len(string)>4]", "def longwords_Li_Comp(strings):\n return [string for string in strings if len(string)>4 ]", "def LPSubsequence(str):\n if str is None or len(str) == 0:\n return \"\"\n\n sl = len(str) # sl is string length\n\n # Create a table to store results of subproblems\n L = [[0 for x in range(sl)] for x in range(sl)]\n\n # Create palindrome of 1 for each character in input string (a)\n for i in range(sl):\n L[i][i] = str[i]\n\n # cl is check string length\n for cl in range(2, sl + 1):\n for start in range(sl - cl + 1):\n stop = start + cl - 1\n first = str[start]\n last = str[stop]\n if first == last and cl == 2:\n L[start][stop] = first * 2\n elif first == last:\n L[start][stop] = first + L[start + 1][stop - 1] + last\n else:\n L[start][stop] = LPSubsequenceLongest(\n L[start][stop - 1], L[start + 1][stop])\n\n return L[0][sl - 1]", "def checkUsernameSequences(n, ch, url, tableName, minLen = 1, maxLen = 2):\n if(minLen == 1):\n strLst = ch\n # assumes all of ch is a match\n else:\n strLst = []\n for k in range(minLen, maxLen + 1):\n lst = generateSubSequences(k, ch)\n sublst = [x for x in lst if userNameLike(x, url, tableName)]\n# list comprehensions with conditions:\n# https://stackoverflow.com/questions/6475314/python-for-in-loop-preceded-by-a-variable\n strLst += sublst\n return strLst", "def kmp_search(full_str, sub_str):\n n, m = len(full_str), len(sub_str)\n result = []\n pi = get_partial_match(sub_str)\n begin, matched = 0, 0\n while begin <= (n - m):\n if matched < m and full_str[begin + matched] == sub_str[matched]:\n matched += 1\n if matched == m:\n result.append(begin)\n else:\n if matched == 0:\n begin += 1\n else:\n begin += (matched - pi[matched - 1])\n matched = pi[matched - 1]\n return result", "def build_subtoken_index(self):\n terms = []\n with open(self.path_token_terms, 'r', encoding='utf8') as f:\n for line in f:\n term = line.strip('\\n')\n terms.append(term)\n terms = set(terms)\n for term in terms:\n term_list = term.split('_')\n subterms = get_sublists(term_list)\n self.subtoken_index[term] = [t for t in subterms\n if '_'.join(t) in terms]", "def substrings(a, b, n):\n identical_subs = []\n sublist = []\n temp = ''\n\n # Fill buffer\n for i in range(len(a) - (n - 1)):\n temp = a[i:(i + n)]\n sublist.append(temp)\n\n # Remove duplicates\n sublist = list(set(sublist))\n\n # Add to list\n for sub in sublist:\n if b.count(sub) > 0:\n identical_subs.append(sub)\n\n return identical_subs", "def LPSubsequenceLength(str):\n return len(LPSubsequence(str))", "def find_sub_string_index(self, sub):\n try:\n return self.__dna.index(sub)\n except ValueError:\n raise ValueError", "def lrs(st):\n\n length, shifts = __lrs(st.root, 0)\n result = [length, []]\n for shift in shifts:\n lrs_string = st.text[shift[0]-length:shift[0]]\n result[1].append((lrs_string, [x-length for x in shift]))\n return result", "def substrCount(n, s):\r\n lst = []\r\n character = s[0]\r\n count = 1\r\n result = 0\r\n for i in range(1, n):\r\n if s[i] == character:\r\n count += 1\r\n else:\r\n lst.append((character, count))\r\n character = s[i]\r\n count = 1\r\n lst.append((character, count))\r\n\r\n for tpl in lst:\r\n \"\"\"calculate all possible palindromes created from same characters that are close to each other\r\n E.g: aaa => 6 possibles (3*4//2 = 6)\r\n \"\"\"\r\n result += tpl[1] * (tpl[1] + 1) // 2\r\n\r\n for i in range(1, len(lst) - 1):\r\n if lst[i - 1][0] == lst[i + 1][0] and lst[i][1] == 1:\r\n \"\"\"\r\n check palindromes created from 3 tuples with a different character in between\r\n \"\"\"\r\n result += min(lst[i - 1][1], lst[i + 1][1])\r\n\r\n return result" ]
[ "0.58004004", "0.57035714", "0.5630862", "0.5502177", "0.5499872", "0.5449966", "0.54482114", "0.537926", "0.53484404", "0.53246045", "0.5313119", "0.5283813", "0.5277651", "0.5237471", "0.51850224", "0.51836663", "0.5134227", "0.51330423", "0.51146626", "0.507412", "0.5060999", "0.50456077", "0.50407207", "0.5028768", "0.50189555", "0.5010713", "0.5009411", "0.50051314", "0.49885315", "0.49618328" ]
0.6595219
0
Load classes for a given excel_data string, containing 3 columns on each line separated by tabs. Load the schedule to a global variable for later access.
def load_classes(excel_data, slot_count): return True
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def load_class_list():\r\n try:\r\n firstLine = True #keeping track of the first line in the csv file (the header)\r\n index = 0\r\n if os.access(\"mySchedule.csv\", os.F_OK): #If the file exists\r\n f = open(\"mySchedule.csv\")\r\n for row in csv.reader(f):\r\n if firstLine:\r\n firstLine = False\r\n continue #skip first line\r\n classStringList.insert(index, row) #load file to classString list and to classes list\r\n classes.insert(index, Class(row[1], Day(row[0]), formatFromCSV(row[2]), formatFromCSV(row[3])))\r\n index += 1\r\n f.close()\r\n except Exception as e:\r\n print(\"Exception found:\" + e)", "def Excel_Load_Data( self, ExcelFilename ):\n pass", "def import_schedule(user, classes, start_date):\n\n for cls in classes:\n create_event(user, cls, start_date)", "def read_schedules(use, x):\n # read schedules from excel file\n occ = [x['Weekday_1'].values[:24], x['Saturday_1'].values[:24], x['Sunday_1'].values[:24]]\n el = [x['Weekday_2'].values[:24], x['Saturday_2'].values[:24], x['Sunday_2'].values[:24]]\n dhw = [x['Weekday_3'].values[:24], x['Saturday_3'].values[:24], x['Sunday_3'].values[:24]]\n month = x['month'].values[:12]\n\n if use == \"INDUSTRIAL\":\n pro = [x['Weekday_4'].values[:24], x['Saturday_4'].values[:24], x['Sunday_4'].values[:24]]\n else:\n pro = [np.zeros(24), np.zeros(24), np.zeros(24)]\n\n # read area per occupant\n area_per_occupant = x['density'].values[:1][0]\n\n return occ, el, dhw, pro, month, area_per_occupant", "def load_classes():\n \tfnm = \"../datasets/bbc/bbc.classes\"\n \tconverters = { \"docid\": toInt, \"docid\":toInt}\n \tX = pandas.read_table(fnm, header=None, sep=\" \", skiprows=4, comment=\"%\", names= [\"docid\", \"classid\"], converters=converters)\n \treturn X", "def nodes_data_excel_parser(excel_path,**kwargs):\n excel_parser_engine = kwargs.get(\"engine\",\"xlrd\")\n\n # Check if excel file exists\n if not excel_path or not os.path.isfile(excel_path):\n raise FileNotFoundError(\n \"Excel data file {} not found.\".format(excel_path)\n )\n\n xls = pd.ExcelFile(excel_path,engine=excel_parser_engine)\n\n try:\n # TODO for sheet in xls.sheet_names:\n # nodes_data[sheet] = xls.parse(sheet)\n nodes_data = {\n \"buses\": xls.parse(\"buses\").replace({np.nan:None}),\n \"commodity_sources\": xls.parse(\"commodity_sources\").replace({np.nan:None}),\n \"transformers\": xls.parse(\"transformers\").replace({np.nan:None}),\n \"transformers_chp\": xls.parse(\"transformers_chp\").replace({np.nan:None}),\n \"renewables\": xls.parse(\"renewables\").replace({np.nan:None}),\n \"demand\": xls.parse(\"demand\").replace({np.nan:None}),\n \"storages\": xls.parse(\"storages\").replace({np.nan:None}),\n \"powerlines\": xls.parse(\"powerlines\").replace({np.nan:None}),\n \"timeseries\": xls.parse(\"time_series\").replace({np.nan:None}),\n \"financial\":xls.parse(\"financial\").replace({np.nan:None})\n }\n except KeyError:\n err_msg = \"Excel file must contains: [buses, commodity_sources, transformers, renewables, demand, storages, powerlines, financial and timeseries].\\n\\\n The following sheets are found: {}\".format(xls.sheet_names)\n raise Exception(err_msg)\n\n # set datetime index\n nodes_data[\"timeseries\"].set_index(\"timestamp\", inplace=True)\n nodes_data[\"timeseries\"].index = pd.to_datetime(\n nodes_data[\"timeseries\"].index\n )\n\n logger.info(\"Data from Excel file {} imported in as nodes data.\".format(excel_path))\n\n return nodes_data", "def ParseSchedule(self, schedule_data):\n\n if self.verbose:\n\n print \"Parsing schedule data\"\n \n\n # Finds internal identifier for the schedule.\n if self._schedule_data.has_key('name'):\n\n self.name = schedule_data['name']\n\n if self.verbose:\n\n print \"Schedule name is '%s'\\n\" % self.name\n\n\n # Loads the appropriate services for loading a model\n # Such as the model_container\n if self._schedule_data.has_key('services'):\n\n services = schedule_data['services']\n\n self._ParseServices(services)\n\n # This Loads the default options for registered solvers\n # along with the service that it requires. In the case\n # of Heccer, the default required service is the model\n # container.\n if self._schedule_data.has_key('solverclasses'):\n\n solvers = schedule_data['solverclasses']\n\n self._ParseSolvers(solvers)\n \n\n\n # Set of options that define how to run this schedule.\n if self._schedule_data.has_key('apply'):\n \n apply_parameters = schedule_data['apply']\n \n self._ParseAppliedParameters(apply_parameters)\n\n\n # This retrieves the model identifier from the model that\n # was loaded via services and the type of solver to use.\n # for instance if the root identifier of the loaded model is\n # called \"/soma\" then this is the modelname and the solver is set\n # to look for this symbol.\n if self._schedule_data.has_key('models'):\n\n models = schedule_data['models']\n\n self._ParseModelParameters(models)\n\n\n # \n if self._schedule_data.has_key('application_classes'):\n\n application_classes = schedule_data['application_classes']\n\n self._ParseApplicationClasses(application_classes)\n\n\n # Set of options for configuring analyzers\n if self._schedule_data.has_key('analyzers'):\n \n self._analyzers = schedule_data['analyzers']\n\n \n # Here we parse for external simulation objects that generate input into\n # the model. \n if self._schedule_data.has_key('inputclasses'):\n\n inputclasses = schedule_data['inputclasses']\n \n inputs = None\n # Key contains the attributes for the inputclass objects that\n # were loaded.\n if self._schedule_data.has_key('inputs'):\n\n inputs = schedule_data['inputs']\n\n\n self._ParseInputs(inputclasses, inputs)\n\n\n # Specifies the output objects to use.\n if self._schedule_data.has_key('outputclasses'):\n\n outputclasses = schedule_data['outputclasses']\n\n \n # Attributes for the outputclass objects that were loaded.\n\n outputs = None\n \n if self._schedule_data.has_key('outputs'):\n \n outputs = schedule_data['outputs']\n\n self._ParseOutputs(outputclasses, outputs)", "def load_schedule(self, schedule):\n for sched in schedule:\n assert type(sched[\"num_batches\"]) == int\n if sched[\"weights\"] is not None: # schedule specificies specific variables for trainable vars\n assert type(sched[\"weights\"]) == list\n else: # scalar is used\n sched[\"weights\"] = self.get_trainable_variable_names()\n target_len = len(sched[\"weights\"])\n sched[\"weight_lr\"] = self.check_schedule_type(sched[\"weight_lr\"], float, target_len)\n sched[\"decay_steps\"] = self.check_schedule_type(sched[\"decay_steps\"], int, target_len)\n sched[\"decay_rate\"] = self.check_schedule_type(sched[\"decay_rate\"], float, target_len)\n sched[\"staircase\"] = self.check_schedule_type(sched[\"staircase\"], bool, target_len)", "def loadData( self, fileName, colClass, feaRowStart = None, \n feaRowEnd = None, delimiter=','):\n self.dF = pd.read_csv( fileName, delimiter = delimiter)\n if feaRowStart is not None and feaRowEnd is not None:\n self.feature = self.dF.iloc[feaRowStart:feaRowEnd, 0:colClass-1]\n self.Class = self.dF.loc[feaRowStart:feaRowEnd, colClass]\n else:\n self.feature = self.dF.iloc[:,1:29]\n self.Class = self.dF.Class \n #self.feature = self.dF.iloc[:, 0:self.dF.shape[1]-1]\n #self.Class = self.dF.loc[:, self.dF.shape[1]-1:self.dF.shape[1]] \n \n self.dataConvertToNumpy()", "def load(cls):\n \n # Loop through procedures and build patient procedure lists:\n procs = csv.reader(file(PROCEDURES_FILE,'U'),dialect='excel-tab')\n header = procs.next() \n for proc in procs:\n cls(dict(zip(header,proc))) # Create a procedure instance ", "def load_ztf_classes():\n # SQL statement\n sql_stmt = \\\n \"\"\"\n SELECT\n cls.id AS ObjectClassID,\n cls.name AS ObjectClassName\n FROM \n public.class AS cls\n ORDER BY ObjectClassID;\n \"\"\"\n\n # Run query and return DataFrame\n with db_engine.connect() as conn:\n df = pd.read_sql(sql_stmt, conn)\n\n # Map column names (ZTF database has case insensitive column names)\n mapper = {col.lower() : col for col in columns_cls}\n df.rename(mapper=mapper, axis='columns', inplace=True)\n\n return df", "def from_spreadsheet(spreadsheet, framework):\n\n # Basically the strategy is going to be\n # 1. Read in all of the stuff - pops, transfers, interpops can be directly added to Data\n # 2. Read in all the other TDVE content, and then store it in the data specs according to the variable type defined in the Framework\n # e.g. the fact that 'Alive' is a Characteristic is stored in the Framework and Data but not in the Databook. So for example, we read in\n # a TDVE table called 'Alive', but it needs to be stored in data.specs['charac']['ch_alive'] and the 'charac' and 'ch_alive' are only available in the Framework\n\n import openpyxl\n\n self = ProjectData(framework=framework)\n\n if not isinstance(spreadsheet, sc.Spreadsheet):\n spreadsheet = sc.Spreadsheet(spreadsheet)\n\n workbook = openpyxl.load_workbook(spreadsheet.tofile(), read_only=True, data_only=True) # Load in read-only mode for performance, since we don't parse comments etc.\n validate_category(workbook, \"atomica:databook\")\n\n # These sheets are optional - if none of these are provided in the databook\n # then they will remain empty\n self.transfers = list()\n self.interpops = list()\n\n for sheet in workbook.worksheets:\n\n if sheet.title.startswith(\"#ignore\"):\n continue\n\n if sheet.title == \"Population Definitions\":\n try:\n self._read_pops(sheet)\n except Exception as e:\n message = 'An error was detected on the \"Population Definitions\" sheet'\n raise Exception(\"%s -> %s\" % (message, e)) from e\n elif sheet.title == \"Transfers\":\n try:\n self._read_transfers(sheet)\n except Exception as e:\n message = 'An error was detected on the \"Transfers\" sheet'\n raise Exception(\"%s -> %s\" % (message, e)) from e\n elif sheet.title == \"Interactions\":\n try:\n self._read_interpops(sheet)\n except Exception as e:\n message = 'An error was detected on the \"Interactions\" sheet'\n raise Exception(\"%s -> %s\" % (message, e)) from e\n elif sheet.title == \"Metadata\":\n continue\n else:\n self.tdve_pages[sheet.title] = []\n tables, start_rows = read_tables(sheet)\n for table, start_row in zip(tables, start_rows):\n\n try:\n tdve = TimeDependentValuesEntry.from_rows(table)\n except Exception as e:\n message = 'Error on sheet \"%s\" while trying to read a TDVE table starting on row %d' % (sheet.title, start_row)\n raise Exception(\"%s -> %s\" % (message, e)) from e\n\n # If the TDVE is not in the Framework, that's a critical stop error, because the framework needs to at least declare\n # what kind of variable this is - otherwise, we don't know the allowed units and cannot write the databook back properly\n try:\n spec, item_type = framework.get_variable(tdve.name)\n except NotFoundError:\n message = 'Error on sheet \"%s\" while reading TDVE table \"%s\" (row %d). The variable was not found in the Framework' % (sheet.title, tdve.name, start_row)\n raise Exception(message)\n\n code_name = spec.name\n tdve.allowed_units = [framework.get_databook_units(code_name)]\n tdve.pop_type = spec[\"population type\"]\n\n # Migrate the units (20181114)\n # All TimeSeries instances in databook TDVE tables should have the same units as the allowed units\n # However, if the user entered something that is wrong, we need to keep it and alert them during validation\n # Therefore, we can migrate as long as the _old_ units made sense\n for ts in tdve.ts.values():\n if ts.units != tdve.allowed_units[0]:\n if not ts.units or ts.units.strip().lower() == tdve.allowed_units[0].strip().split()[0].strip().lower():\n ts.units = tdve.allowed_units[0]\n\n if not spec[\"databook page\"]:\n logger.warning('A TDVE table for \"%s\" (%s) was read in and will be used, but the Framework did not mark this quantity as appearing in the databook', tdve.name, code_name)\n tdve.comment = spec[\"guidance\"]\n\n if code_name in self.tdve:\n raise Exception('A TDVE table for \"%s\" (%s) appears more than once in the databook. The first table was on sheet \"%s\" and the first duplicate table is on sheet \"%s\" starting on row %d' % (tdve.name, code_name, [k for k, v in self.tdve_pages.items() if code_name in v][0], sheet.title, start_row))\n\n self.tdve[code_name] = tdve\n # Store the TDVE on the page it was actually on, rather than the one in the framework. Then, if users move anything around, the change will persist\n self.tdve_pages[sheet.title].append(code_name)\n\n tvals = set()\n for tdve in self.tdve.values():\n tvals.update(tdve.tvec)\n for tdc in self.transfers + self.interpops:\n tvals.update(tdc.tvec)\n self.tvec = np.array(sorted(tvals))\n\n return self", "def __init__(self, filename=None, filetype=None, instrument=None):\n if filename:\n if instrument == 'Element':\n skipfooter = 4\n header = 1\n drop = 9\n elif instrument == 'Agilent':\n skipfooter = 4\n header = 3\n drop = 3\n else:\n skipfooter = 0\n header = 0\n drop = 0\n\n if filetype == 'xlsx':\n pwd = os.getcwd()\n os.chdir(os.path.dirname(filename))\n self.imported = pd.ExcelFile(filename)\n self.data = self.imported.parse(\n 0, index_col=0, skipfooter=skipfooter, header=header)\n self.data = self.data.drop(self.data.index[:drop], axis=0)\n os.chdir(pwd)\n # TODO xlsx doesnt work with agilent type\n elif filetype == 'csv':\n pwd = os.getcwd()\n os.chdir(os.path.dirname(filename))\n self.data = pd.read_csv(filename, sep=',', index_col=0, skipfooter=skipfooter,\n header=header, engine='python')\n os.chdir(pwd)\n elif filetype == 'asc':\n pwd = os.getcwd()\n os.chdir(os.path.dirname(filename))\n self.data = pd.read_csv(filename, sep='\\t', index_col=0, skipfooter=skipfooter,\n header=header, engine='python')\n self.data = self.data.drop(self.data.index[:drop], axis=0)\n self.data.dropna(axis=1, how='all', inplace=True)\n self.data = self.data.apply(pd.to_numeric, errors='coerce')\n os.chdir(pwd)\n else:\n warnings.warn('File type not supported.')\n\n self.data.index = self.data.index.astype('float32')\n self.time = self.data.index\n self.elements = list(map(elem_resolution, self.data.columns))\n self.data.columns = self.elements\n\n self.srms = pd.ExcelFile('./SRM.xlsx').parse(index_col=0)\n self.sum_koeficients = pd.ExcelFile(\n './default_sum_koef.xlsx').parse(0, index_col=0, header=None).to_dict()[1]\n\n self.srm = None\n self.iolite = None\n self.names = None\n self.internal_std = None\n self.ablation_time = None\n\n self.laser_off = []\n self.laser_on = []\n self.skip = {'bcg_start': 0,\n 'bcg_end': 0,\n 'sample_start': 0,\n 'sample_end': 0} # time in seconds to skip from each bcg and sample\n\n self.filter_line = None\n self.starts = None\n self.ends = None\n self.bcg = None\n self.average_peaks = None\n self.ratio = None\n self.quantified = None\n self.lod = None\n self.correction_elements = None\n self.corrected_IS = None\n self.corrected_SO = None\n\n self.dx = None\n self.dy = None\n self.maps = {}\n self.qmaps = {}\n\n self.regression_values = {}\n self.regression_equations = {}", "def setUpClass(cls):\n cls.w = pd.read_csv(_df.WEATHER_TWO_WEEK, index_col='time',\n parse_dates=True)", "def import_heat_data(self):\n worksheet = (\n xlrd.open_workbook(filename=self.filename_heat).sheet_by_index(0)\n ) \n self.exh.corrected_reading = np.array(worksheet.col_values(0,\n start_rowx=self.start_rowx, end_rowx=self.end_rowx)) \n self.exh.datum = worksheet.cell_value(2,4) # manometer datum (in) \n self.exh.pressure_drop = ( (self.exh.corrected_reading -\n self.exh.datum) * 2. * self.H2O_kPa ) \n # pressure drop across heat exchanger (kPa)\n self.cummins.torque = np.array(worksheet.col_values(1,\n start_rowx=self.start_rowx, end_rowx=self.end_rowx))\n self.exh.T_inlet_array = np.array(worksheet.col_values(2,\n start_rowx=self.start_rowx, end_rowx=self.end_rowx)) \n self.exh.T_outlet_array = np.array(worksheet.col_values(3,\n start_rowx=self.start_rowx, end_rowx=self.end_rowx)) \n self.cool.T_inlet_array = np.array(worksheet.col_values(5,\n start_rowx=self.start_rowx, end_rowx=self.end_rowx)) \n self.cool.T_outlet_array = np.array(worksheet.col_values(4,\n start_rowx=self.start_rowx, end_rowx=self.end_rowx))", "def load(cls):\n \n # Loop through problems and build patient problem lists:\n probs = csv.reader(file(PROBLEMS_FILE,'U'),dialect='excel-tab')\n header = probs.next() \n for prob in probs:\n cls(dict(zip(header,prob))) # Create a problem instance ", "def update_classes_from_schedule(schedule):\n\n\t# load all student, courses and room -information into variables\n\tallcourses = create_courses()\n\tchambers = create_rooms()\n\tstudent_list = create_students()\n\n\t# create student groups\n\tallcourses, student_list = create_student_groups(allcourses, student_list)\n\n\t# for each activity in new schedule\n\tfor roomlock, activity in schedule.items():\n\n\t\t# if it\"s not an empty roomlock\n\t\tif activity is not None:\n\n\t\t\t# if lecture\n\t\t\tif \"lecture\" in activity:\n\n\t\t\t\t# split text\n\t\t\t\tsplittext = activity.split(\" lecture \")\n\n\t\t\t\t# assign class\n\t\t\t\ttype_class = \"lecture\"\n\n\t\t\t\t# split text and determine group\n\t\t\t\tcoursename = splittext[0]\n\t\t\t\tgroup = 0\n\n\t\t\t# same for seminar\n\t\t\tif \"seminar\" in activity:\n\t\t\t\tsplittext = activity.split(\" seminar \")\n\t\t\t\ttype_class = \"seminar\"\n\t\t\t\tcoursename = splittext[0]\n\t\t\t\tgroup = int(float(splittext[1]))\n\n\t\t\t# and practical\n\t\t\tif \"practical\" in activity:\n\t\t\t\tsplittext = activity.split(\" practical \")\n\t\t\t\ttype_class = \"practical\"\n\t\t\t\tcoursename = splittext[0]\n\t\t\t\tgroup = int(float(splittext[1]))\n\n\t\t\t# for each course in course-list\n\t\t\tfor course in allcourses:\n\n\t\t\t\t# find adjusted course\n\t\t\t\tif coursename == course.name:\n\n\t\t\t\t# update course class with new activity\n\t\t\t\t\tcourse.update_schedule(roomlock, (coursename + \" \" + type_class), group)\n\n\t\t\t\t\t# update room class with new activity\n\t\t\t\t\troom, timelock = translate_roomlock(roomlock)\n\t\t\t\t\tchambers[room].add_booking(timelock)\n\n\t\t\t\t\t# update student class with new activity\n\t\t\t\t\tif type_class == \"lecture\":\n\t\t\t\t\t\tfor student in student_list:\n\t\t\t\t\t\t\tif course.name in student.courses:\n\t\t\t\t\t\t\t\tstudent.update_student_schedule(timelock, course.name)\n\n\t\t\t\t\tif type_class == \"seminar\":\n\t\t\t\t\t\tfor student in student_list:\n\t\t\t\t\t\t\tif course.name in student.courses:\n\t\t\t\t\t\t\t\tif student.last_name in course.seminargroups[group]:\n\t\t\t\t\t\t\t\t\tstudent.update_student_schedule(timelock, course.name)\n\n\t\t\t\t\tif type_class == \"practical\":\n\t\t\t\t\t\tfor student in student_list:\n\t\t\t\t\t\t\tif course.name in student.courses:\n\t\t\t\t\t\t\t\tif student.last_name in course.practicalgroups[group]:\n\t\t\t\t\t\t\t\t\tstudent.update_student_schedule(timelock, course.name)\n\n\treturn allcourses, student_list, chambers", "def import_schedule_section(self, filename_suffix='seq', replace_commas=True, filename_prefix=None):\n filename_prefix = self.model_name if filename_prefix is None else filename_prefix\n schedule = []\n n = self.nthemes\n with open('%s/%s.%s' % (self.model_path, filename_prefix, filename_suffix)) as f:\n for lnum, l in enumerate(f):\n if re.match('^\\s*(;|$)', l): continue # skip comments and blank lines\n l = l.lower().strip().partition(';')[0].strip() # strip leading whitespace and trailing comments\n t = re.split('\\s+', l)\n if len(t) != n + 5: break\n dtype_key = tuple(t[:n])\n age = int(t[n])\n area = float(t[n+1].replace(',', '')) if replace_commas else float(t[n+1])\n acode = t[n+2]\n period = int(t[n+3])\n etype = t[n+4] if len(t) >= n+4 else ''\n schedule.append((dtype_key, age, area, acode, period, etype))\n if area <= 0: print('area <= 0', l)\n return schedule", "def load_events_classes(fhandle: TextIO) -> list:\n reader = csv.reader(fhandle, delimiter=\",\")\n headers = next(reader)\n class_ids = headers[3:]\n return class_ids", "def dynamic_import_scheduler(module):\n model_class = dynamic_import(module, SCHEDULER_DICT)\n assert issubclass(\n model_class, SchedulerInterface\n ), f\"{module} does not implement SchedulerInterface\"\n return model_class", "def load_data(filename):\n\n # 'data' stores a list of 'InstanceData's as values.\n data = []\n\n # If this is training data, then 'labels' is a dict that contains instance_ids as keys and labels as values.\n training = False\n if filename.find('train') != -1:\n training = True\n\n if training:\n labels = dict()\n\n num_exercises = 0\n print('Loading instances...')\n\n with open(filename, 'rt') as f:\n for line in f:\n line = line.strip()\n\n # If there's nothing in the line, then we're done with the exercise. Print if needed, otherwise continue\n if len(line) == 0:\n num_exercises += 1\n if num_exercises % 100000 == 0:\n print('Loaded ' + str(len(data)) + ' instances across ' + str(num_exercises) + ' exercises...')\n\n # If the line starts with #, then we're beginning a new exercise\n elif line[0] == '#':\n list_of_exercise_parameters = line[2:].split()\n instance_properties = dict()\n for exercise_parameter in list_of_exercise_parameters:\n [key, value] = exercise_parameter.split(':')\n if key == 'countries':\n value = value.split('|')\n elif key == 'days':\n value = float(value)\n elif key == 'time':\n if value == 'null':\n value = None\n else:\n assert '.' not in value\n value = int(value)\n instance_properties[key] = value\n\n # Otherwise we're parsing a new Instance for the current exercise\n else:\n line = line.split()\n if training:\n assert len(line) == 7\n else:\n assert len(line) == 6\n assert len(line[0]) == 12\n\n instance_properties['instance_id'] = line[0]\n\n instance_properties['token'] = line[1]\n instance_properties['part_of_speech'] = line[2]\n\n instance_properties['morphological_features'] = dict()\n for l in line[3].split('|'):\n [key, value] = l.split('=')\n if key == 'Person':\n value = int(value)\n instance_properties['morphological_features'][key] = value\n\n instance_properties['dependency_label'] = line[4]\n instance_properties['dependency_edge_head'] = int(line[5])\n if training:\n label = float(line[6])\n labels[instance_properties['instance_id']] = label\n data.append(InstanceData(instance_properties=instance_properties))\n\n print('Done loading ' + str(len(data)) + ' instances across ' + str(num_exercises) +\n ' exercises.\\n')\n\n if training:\n return data, labels\n else:\n return data", "def load_submission_schedule():\n logger.info('Loading submission window schedule data')\n load_submission_window_schedule()", "def load_data(data_path='./data/'):\n # Electricity prices for 1.1.2014 - 14.10.2016 [e/MWh]\n prices = pd.read_csv(data_path + 'prices.csv', sep=\";\", decimal=\",\", names=['ts','blockid','price'], skiprows=1)\n prices['ts'] = pd.to_datetime(prices.ts, format='%d.%m.%Y %H:%M')\n prices.set_index('ts',inplace=True)\n \n # Paper item consumptions [MWh/block]\n items = pd.read_csv(data_path + 'items.csv', sep=\";\", decimal=\",\", names=['item','consumption'], skiprows=1)\n \n # One realized schedule, 75 blocks for 29.9.2016 - 11.10.2016\n schedule = pd.read_csv(data_path + 'schedule.csv', sep=\";\", decimal=\",\")\n schedule.columns = ['ts','blockid'] + itemnames() + ['price']\n schedule['ts'] = pd.to_datetime(schedule.ts, format='%d.%m.%Y %H:%M')\n return(prices,items,schedule)", "def load_data(self):\n overlength_num = title_num = 0\n with open(self.path, 'r', encoding='utf-8') as r:\n for line in r:\n inst = json.loads(line)\n is_title = inst['sent_id'].endswith('-3') and inst['tokens'][-1] != '.'\n if self.ignore_title and is_title:\n title_num += 1\n continue\n\n # TODO: add back coarse type\n for event in inst['event_mentions']:\n event_type = event['event_type']\n if ':' in event_type:\n event['event_type'] = event_type.split(':')[1].upper()\n self.data.append(inst)\n\n if title_num:\n print('Discarded {} titles'.format(title_num))\n print('Loaded {} instances from {}'.format(len(self), self.path))", "def import_data(self):\n\n self.worksheet = (\n xlrd.open_workbook(filename=self.source).sheet_by_index(0)\n )\n # Import conversion data from worksheet and store as scipy arrays\n self.T_exp = np.array(\n self.worksheet.col_values(0, start_rowx=4, end_rowx=None)\n ) + 273.15\n self.HCout_raw = np.array(\n self.worksheet.col_values(4, start_rowx=4, end_rowx=None)\n )\n self.HCin_raw = np.array(\n self.worksheet.col_values(8, start_rowx=4, end_rowx=None)\n )\n self.eta_exp = (\n (self.HCin_raw - self.HCout_raw) / self.HCin_raw\n )\n self.T_model = np.linspace(\n self.T_exp[0] - 50, self.T_exp[-1] + 50, 25\n )\n self.T_array = self.T_model", "def loadText(self,fileName,pickScheduleFile=None,imported=None):\n #--Localizing\n defs = self.defs\n log = self.log\n #--Re's\n reCell = re.compile(\"\\s*(\\\".*?\\\")\")\n reCodeCycle = re.compile(\"\\s*([1-4][ ,1-4]*)\")\n reComment = re.compile(r'\\s*\\#.*')\n reDef = re.compile(r'\\.([a-zA-Z]\\w+)')\n rePos = re.compile(\"-?\\d+\\s+-?\\d+\\s+-?\\d+\\s+-?\\d+\")\n reRepeat = re.compile('= (\\d)')\n reSleep = re.compile(r'([=+\\-\\*\\^~x])\\s+(.+)$')\n reWander = re.compile('wander +(\\d+)')\n reIsMember = re.compile('isMember +(\".+\")')\n #--Functions/Translators\n replDef = lambda a: defs[a.group(1)]\n #--0: awake, 1: sleep+trespass, 2: sleep 3: dim trespass\n sleepStates = {'=':None,'-':0,'+':1,'*':2,'^':3,'~':4,'x':5} \n #--Log\n header = os.path.split(fileName)[-1]\n if len(header) < 70: header += '='*(70-len(header))\n log.setHeader(header)\n #--Imported\n isTopFile = (imported == None)\n if isTopFile: imported = []\n #--Input variables\n section = None\n town = None\n townNpcs = set()\n townSchedule = None\n npcSchedule = None\n codeCycles = [0]\n #--Parse input file\n ins = file(fileName)\n for line in ins:\n #log(line.strip())\n #print line,\n #--Strip spaces and comments\n line = reComment.sub('',line)\n line = line.rstrip()\n #--Skip empty/comment lines\n if not line: continue\n #--Section header?\n if line[0] == '@':\n # (town|defs|night|code|npcName)[: npcCondition]\n parsed = line[1:].split(':',1)\n id = parsed[0].strip()\n #--Non-npc?\n if id in set(['town','defs','night','evening','code','import','project']):\n section = id\n if section in ('evening','night'):\n townSleep = self.sleep[town]\n elif section == 'code':\n cycles = [0]\n townCode = self.code[town] = [[],[],[],[],[]]\n else:\n section = 'npc'\n npc = id\n #--Any town,npc combination will overwrite any town,npc \n # combination from an imported file.\n if (town,npc) not in townNpcs:\n townNpcs.add((town,npc))\n townSchedule[npc] = []\n npcSchedule = [0,0,0,0]\n condition = (len(parsed) == 2 and parsed[1].strip())\n townSchedule[npc].append((condition,npcSchedule))\n if section not in set(('town','import','project')): \n log(' '+line[1:])\n #--Data \n else:\n #--Import\n if section == 'import':\n newPath = line.strip()\n log(_('IMPORT: ')+newPath)\n if not os.path.exists(newPath) and pickScheduleFile:\n caption = \"Find sub-import file %s:\" % (newPath,)\n newPath = pickScheduleFile(caption,newPath)\n if not (newPath and os.path.exists(newPath)):\n raise StateError(\"Unable to import schedule file: \"+line.strip())\n if newPath.lower() in [dir.lower() for dir in imported]:\n log(_(' [%s already imported.]') % (newPath,))\n else:\n log.indent += '> '\n imported.append(newPath)\n self.loadText(newPath,pickScheduleFile,imported)\n log.indent = log.indent[:-2]\n #--Project\n elif section == 'project' and isTopFile:\n self.project = line.strip()\n log(_('PROJECT: ')+self.project)\n #--Defs \n elif section == 'defs':\n (key,value) = line.strip().split(':',1)\n defs[key] = value.strip()\n #--Town\n elif section == 'town':\n town = line.strip()\n log.setHeader(town)\n if isTopFile:\n self.newTowns.add(town)\n if town not in self.schedule:\n self.schedule[town] = {}\n self.sleep[town] = {3:{},4:{}} \n townSchedule = self.schedule[town]\n npcSchedule = None\n codeCycles = []\n #--Code\n elif section == 'code':\n line = reDef.sub(replDef,line)\n maCodeCycle = reCodeCycle.match(line)\n if maCodeCycle:\n codeCycles = [int(x) for x in maCodeCycle.group(1).split(',')]\n continue\n for cycle in codeCycles:\n townCode[cycle].append(line)\n #--Evening/Night\n elif section in ('evening','night'):\n cycle = {'evening':3,'night':4}[section]\n line = reDef.sub(replDef,line)\n chunks = [chunk.strip() for chunk in line.split(';')]\n maSleep = reSleep.match(chunks[0])\n if not maSleep: continue\n (cell,defaultState) = (maSleep.group(2), sleepStates[maSleep.group(1)])\n cellStates = (defaultState,)\n for chunk in chunks[1:]:\n chunk = chunk.strip()\n maSleep = reSleep.match(chunk)\n if not maSleep or maSleep.group(1) == '=': \n raise MoshError(_('Bad sleep condition state for %s in %s: %s') \n % (section,town,line))\n condition,state = maSleep.group(2), sleepStates[maSleep.group(1)]\n condition = reIsMember.sub(r'getPCRank \\1 >= 0',condition)\n cellStates += ((condition,state),)\n townSleep[cycle][cell] = cellStates\n #--NPC\n elif section == 'npc':\n #--Get Cycle\n cycle = int(line[0])\n rem = line[2:]\n #--Repeater?\n maRepeat = reRepeat.match(rem)\n if maRepeat:\n oldCycle = int(maRepeat.group(1))\n npcSchedule[cycle-1] = npcSchedule[oldCycle-1]\n continue\n #--Replace defs\n rem = reDef.sub(replDef,rem)\n #--Cell\n maCell = reCell.match(rem)\n if not maCell:\n raise MoshError(_('Pos cell not defined for %s %s %d') % (town,npc,cycle))\n cell = maCell.group(1)\n rem = rem[len(cell):].strip()\n #--Pos\n maPos = rePos.match(rem)\n coords = maPos.group(0).strip().split()\n coords[-1] = `int(coords[-1])*57` #--Workaround interior rotation bug\n pos = 'positionCell %s %s' % (' '.join(coords),cell)\n rem = rem[len(maPos.group(0)):].strip()\n #--Wander/Travel\n ai = reWander.sub(r'wander \\1 5 10 ',rem)\n #--Save\n npcSchedule[cycle-1] = (pos,ai)\n ins.close()", "async def handle_tilknytningsrolle(self):\n rows = self._load_csv_if_newer(Tilknytningsrolle)\n return await self._create_classes_from_csv(Tilknytningsrolle, rows)", "def load_electrical_outputs(file_path):\r\n # Transform the .xls database into panda type\r\n excel = pd.ExcelFile(file_path)\r\n\r\n # Collect data from a particular tab\r\n collection_point = excel.parse('collection point', header=0, index_col=0)\r\n dynamic_cable = excel.parse('dynamic cable', header=0, index_col=0)\r\n static_cable = excel.parse('static cable', header=0, index_col=0)\r\n cable_route = excel.parse('cable route', header=0, index_col=0)\r\n connectors = excel.parse('connectors', header=0, index_col=0)\r\n external_protection = excel.parse('external protection', header=0, index_col=0)\r\n layout = excel.parse('layout', header=0, index_col=0)\r\n\r\n # Splits the different dataset through different dict keys()\r\n electrical_outputs = {'collection point': collection_point,\r\n 'dynamic cable': dynamic_cable,\r\n 'static cable': static_cable,\r\n 'cable route': cable_route,\r\n 'connectors': connectors,\r\n 'external protection': external_protection,\r\n 'layout': layout\r\n }\r\n\r\n return electrical_outputs", "def import_excel(self, filepath_excel,database_type):\n if database_type == \"render\":\n try:\n connection = sqlite3.connect(self.filepath_render_database)\n pointer = connection.cursor()\n\n sql_anweisung = \"\"\"\n INSERT INTO render_information (\n object_type,\n name,\n radius,\n polar_angle_min,\n polar_anglel_max,\n polar_angle_segments,\n polar_angle_random_rad,\n azimuth_angle_min,\n azimuth_angle_max,\n azimuth_angle_segments,\n azimuth_angle_random_rad,\n tracking_obj,\n segmentation\n )\n VALUES (\n :object_type,\n :name,\n :radius,\n :polar_angle_min,\n :polar_anglel_max,\n :polar_angle_segments,\n :polar_angle_random_rad,\n :azimuth_angle_min,\n :azimuth_angle_max,\n :azimuth_angle_segments,\n :azimuth_angle_random_rad,\n :tracking_obj,\n :segmentation\n )\n \"\"\"\n with open(filepath_excel) as csvdatei:\n csv_reader_object = csv.reader(csvdatei, delimiter=';')\n next(csv_reader_object)\n pointer.executemany(sql_anweisung, csv_reader_object)\n connection.commit()\n connection.close()\n print(\"render data addet from excel file\")\n except :\n print(\"adding render data from excel file failed\")\n\n elif database_type == \"object\":\n try:\n connection = sqlite3.connect(self.filepath_object_database)\n pointer = connection.cursor()\n\n sql_anweisung = \"\"\"\n INSERT INTO object_information (\n obj_filepath,\n obj_name,\n obj_type,\n obj_scale_factor,\n obj_type,\n obj_location_x,\n obj_location_y,\n obj_location_z,\n obj_rotation_x,\n obj_rotation_y,\n obj_rotation_z,\n obj_amount_percent,\n obj_material_path,\n obj_point_in_time,\n maximum_random_rotation_degree_z,\n maximum_random_translation,\n random_amount\n )\n VALUES (\n :obj_filepath,\n :obj_name,\n :obj_type,\n :obj_scale_factor,\n :obj_type,\n :obj_location_x,\n :obj_location_y,\n :obj_location_z,\n :obj_rotation_x,\n :obj_rotation_y,\n :obj_rotation_z,\n :obj_amount_percent,\n :obj_material_path,\n :obj_point_in_time,\n :maximum_random_rotation_degree_z,\n :maximum_random_translation,\n :random_amount\n )\n \"\"\"\n with open(filepath_excel) as csvdatei:\n csv_reader_object = csv.reader(csvdatei, delimiter=';')\n print(csv_reader_object)\n next(csv_reader_object)\n pointer.executemany(sql_anweisung, csv_reader_object)\n connection.commit()\n connection.close()\n print(\"object data added from excel file\")\n except :\n print(\"adding object data from excel file failed\")\n\n else:\n print(\"no Database found, maybe check spelling in method call??\")\n return", "def stringToClass(cls_str):\n import_stg1 = cls_str.split(\" \")[1]\n import_stg2 = import_stg1.replace(\"'\", \"\")\n import_stg3 = import_stg2.replace(\">\", \"\")\n import_parse = import_stg3.split(\".\")\n cls = import_parse[-1]\n import_path = '.'.join(import_parse[:-1])\n import_statement = \"from %s import %s\" % (import_path, cls)\n exec(import_statement)\n assign_statement = \"this_class = %s\" % cls\n exec(assign_statement)\n return this_class" ]
[ "0.62730616", "0.5840638", "0.5721444", "0.55272853", "0.5497005", "0.5484389", "0.53764915", "0.53338474", "0.52232695", "0.5209564", "0.5174315", "0.509816", "0.5041172", "0.50368327", "0.5036266", "0.5027175", "0.5022386", "0.49693894", "0.49673653", "0.49530467", "0.49271193", "0.49234453", "0.49010834", "0.4883578", "0.48486516", "0.48378053", "0.48019993", "0.47864085", "0.47773919", "0.47597227" ]
0.67227983
0
Get the names of exams available to pick for a given slot_number Returns list of names of exams.
def get_potential_classes_for_slot(slot_number): return ["econ", "biz", "wtf"]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_slots(intent_request):\n return intent_request[\"currentIntent\"][\"slots\"]", "def _list_of_availability_strings():\n names = [availability.name for availability in Availability]\n return names", "def getResiduesByName(self, resn):\n\n\t\treslist = []\n\t\tfor chn in self.chain:\n\t\t\tfor res in chn.residue:\n\t\t\t\tif res.name == resn:\n\t\t\t\t\treslist.append(res)\n\n\t\treturn reslist", "def get_slot_names(self, *args, **kwargs):\n return self._optimizer.get_slot_names(*args, **kwargs)", "def itemnames():\n g = ['KIS_NA_39', 'VII_57', 'MX_48', 'MX_56', 'KIS_NA_42', 'VII_54',\n 'MX_S_48', 'MX_S_52', 'MX_52', 'KIS_NA_45', 'KIS_NA_51', 'MIP_45',\n 'MIP_49', 'MIP_52', 'MIP_plus_48', 'MIP_plus_51', 'MX_42', 'MX_45',\n 'MIP_G_42', 'KIS_42', 'KIS_NA_48']\n return(g)", "def required_slots(tracker: Tracker) -> List[Text]:\n print(\"required_slots(tracker: Tracker)\")\n return [\"name\",\"roomcount\",\"roomtype\"]", "def required_slots(tracker: Tracker) -> List[Text]:\n\n return [\"product\", \"applicant_name\", \"applicant_dob\", \"applicant_phoneno\", \"applicant_address\"]", "def get_slots(self, slot_names_filter=None):\n if slot_names_filter:\n # show only particular slots\n #TODO\n raise Exception(\"Not implemented!\")\n else:\n slot_names_filter = self.memory.keys()\n\n return self.memory", "def get_parking_slot():\n return parking_slots", "def name_list(qbo_session):\n\n return qbo_session.name_list()", "def required_slots(tracker: Tracker) -> List[Text]:\n\n return [\"bug\", \"beverage\", \"second_person_plural\", \n \"cot_caught\", \"rain_sun\", \"crawfish\", \"halloween\",\n \"sandwich\", \"side_road\", \"shoes\", \"highway\", \"yard_sale\",\n \"rubbernecking\", \"frosting\", \"lawyer\", \"kitty_corner\",\n \"firefly\", \"verge\", \"brew_thru\", \"water_fountain\"]", "def get_owned_exams(self, obj):\n exams = []\n queryset = ExamSheet.objects.filter(owner=obj)\n for q in queryset:\n exams.append(q.title)\n return exams", "def get_exercise_recording_full_names(self):\n full_names = set()\n for er in self.exercise_recordings:\n full_names.add(er.full_name)\n return full_names", "def get_list_beams(self,typ,file_number):\n if typ == 'emis':\n # multiply by the mass due to ADAS\n return self.beam_emis[file_number].adas_beam\n elif typ == 'atte':\n return self.beam_atte[file_number].adas_beam\n else:\n raise NameError('No list with this name: {0}'.format(typ))", "def get_names(self):\n\n return self.mod_suites.keys()", "def get_supported_games(self):\n sg = []\n for game in c.supported_games.keys():\n sg.append(c.supported_games[game].game_name)\n return sg", "def availableSquares(self):\n List2=[]\n for item in self.all:\n if item.retrieve()==\"\":\n List2.append(item.name())\n return List2", "def power_pump_names(self):\n return self._power_pumps", "def getAvailableTimeslots(self, allTimeslots) -> [Timeslot]:\r\n # List with all Timeslots any of the Teachers is not available at.\r\n notAvailableTimeslotsTeachers = flatMap(lambda t: t.not_available_timeslots, self.teachers)\r\n # notAvailableTimeslotsTeachers = [item for sublist in map(lambda t: t.not_available_timeslots, self.teachers) for item in sublist]\r\n # If Lesson can only take place on forenoon, create list with all afternoon timeslots.\r\n if self.course.only_forenoon:\r\n notAvailableTimeslotsForenoon = list(filter(lambda t: t.number not in Timeslot.getForenoonTimeslotNumbers(), allTimeslots))\r\n else:\r\n notAvailableTimeslotsForenoon = []\r\n\r\n timeslots = [x for x in allTimeslots if x not in (notAvailableTimeslotsTeachers + notAvailableTimeslotsForenoon)]\r\n if self.available_timeslots: # If list is not empty. Else no restrictions.\r\n timeslots = [x for x in timeslots if x in self.available_timeslots]\r\n\r\n return timeslots", "def getOqiNames( self ):\n\n if self.oqiNames:\n return self.oqiNames.keys()\n\n n = self.adb.get( \"nSrss\" )\n for indx in xrange( n ):\n name = self.adb.get( \"srsName\", indx )\n self.oqiNames[ name ] = indx\n\n return self.oqiNames.keys()", "def find_usable_exits(room, stuff):\n usable = []\n for exit in room['exits']:\n if exit.get(\"hidden\", False):\n continue\n if \"required_key\" in exit:\n if exit[\"required_key\"] in stuff:\n usable.append(exit)\n continue\n usable.append(exit)\n return usable", "def get_available_time_slot():\n try:\n time_slot_set_list = list()\n # Read all time slot from database\n with open(InterviewCalendarApi.DB_FILE, \"r\") as fd:\n for line in fd:\n time_slot_list = list()\n (_,_,_, time_slots) = line.strip().split(\"|\")\n for time_slot in time_slots.split(\",\"):\n (from_time_slot, to_time_slot) = list(map(int, time_slot.split(\"-\")))\n time_slot_list.extend(range(from_time_slot, (to_time_slot + 1)))\n # Get all available time slot for every user\n time_slot_set_list.append(set(time_slot_list))\n \n # Find common time slot between multiple parties\n available_slots = list(set.intersection(*time_slot_set_list))\n\n msg = json.dumps({\"Status\": \"Success\", \"available_slots\": available_slots})\n return make_response(msg, 200, InterviewCalendarApi.HEADERS)\n except:\n err_msg = sys.exc_info()\n error = json.dumps({'error': 'Unable to find time slot due to error: %s' %str(err_msg)})\n return make_response(error, 401, InterviewCalendarApi.HEADERS)", "def get_room_exits(self, position):\n exits = []\n for text, direction in DIRECTIONS.items():\n if self.check_exit(position, direction):\n exits.append(text)\n return f'Exits: {\", \".join(exits)}'", "def get_available_data_asset_names(self) -> List[str]:\n raise NotImplementedError", "def get_oblist_from_outfit(self, loc):\n from world.fashion.exceptions import FashionError\n from world.fashion.fashion_commands import get_caller_outfit_from_args\n\n try:\n outfit = get_caller_outfit_from_args(self.caller, self.args)\n except FashionError as err:\n raise CommandError(err)\n return [ob for ob in outfit.fashion_items.all() if ob.location == loc]", "def get_search_results(self):\n return self.get_list_of_names(self.SEARCH_RESULTS)", "def get_openers(self, episode_num: int) -> Optional[List[str]]:\n if self._openers:\n return [random.choice(self._openers)]\n return None", "def get_nice_names(self) -> List[str]:\n result = []\n for elements in self._get_results_list():\n result.append(elements[1])\n return result", "def get_available_slots(iso_datetime):\n all_slots = AppointmentService.get_all_slots(iso_datetime)\n made_appointments = AppointmentService.get_made_appointments(iso_datetime)\n available_slots = []\n\n for slot in all_slots:\n if slot not in made_appointments:\n available_slots.append(slot)\n\n return available_slots", "def pump_names(self):\n return self._pumps" ]
[ "0.52379185", "0.5172876", "0.50573343", "0.5030471", "0.5021936", "0.49109137", "0.4860436", "0.4856028", "0.48363858", "0.48315743", "0.47921562", "0.47723097", "0.4765679", "0.47527313", "0.4746533", "0.47408932", "0.4725023", "0.47146904", "0.4711878", "0.46946704", "0.46866995", "0.4661624", "0.46411508", "0.463222", "0.4624029", "0.4616646", "0.46130794", "0.46042857", "0.46017528", "0.45941865" ]
0.52736354
0
Select a class_name for a certain slot_number. Class name is selected from one of get_potential_classes_for_slot(slot_number) Do the necessary manipulation
def select_class_for_slot(class_name, slot_number): return True
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_potential_classes_for_slot(slot_number):\n return [\"econ\", \"biz\", \"wtf\"]", "def get_skill_class(cursor, _class):\n cursor.execute('SELECT id FROM classes WHERE temp_id = ?', (_class,))\n data = cursor.fetchone()\n try:\n return data[0]\n except TypeError:\n l.error(\"The Class {} doesn't exists.\".format(_class))", "def class_selection() -> str:\r\n dict_classes = {'1': 'Bounty Hunter', '2': 'Imperial Agent', '3': 'Jedi Consular', '4': 'Jedi Knight',\r\n '5': 'Sith Warrior', '6': 'Sith Inquisitor', '7': 'Smuggler', '8': 'Trooper'}\r\n print(\"(1) Bounty Hunter\\n(2) Imperial Agent\\n(3) Jedi Consular\\n(4) Jedi Knight\\n\"\r\n \"(5) Sith Warrior\\n(6) Sith Inquisitor\\n(7) Smuggler\\n(8) Trooper\\n\")\r\n chosen_class = str(input(\"Choose your class by entering a number from 1-8, ie. for Bounty Hunter type 1\\n\"))\r\n for key, value in dict_classes.items():\r\n if key == chosen_class:\r\n return value", "def chooseClass(self):\n #global dictionary of classes with 0 values in a list (ex. [str,dex,con,int,wis,cha,hp,suggested specialty])\n classes = {'barbarian': [0,0,0,0,0,0,self.con+12,'reaper'],\n 'cleric':[0,0,0,0,0,0,self.con+8,'mystical healer'],\n 'druid':[0,0,0,0,0,0,self.con+8,'hedge magician'],\n 'fighter':[0,0,0,0,0,0,self.con+10,'reaper'],\n 'monk':[0,0,0,0,0,0,self.con+8,'skirmisher'],\n 'paladin':[0,0,0,0,0,0,self.con+10,'defender'],\n 'ranger':[0,0,0,0,0,0,self.con+10,'sharpshooter'],\n 'rogue':[0,0,0,0,0,0,self.con+6,'specialist'],\n 'wizard':[0,0,0,0,0,0,self.con+6,'hedge magician']\n }\n\n #Ask which class he/she would like\n chosen_class = raw_input(\"Which class would you like? Please choose from:\\nBarbarian, Cleric, Druid, Fighter, Monk, Paladin, Ranger, Rogue, Wizard \" ).lower() \n while chosen_class not in ['barbarian','cleric','druid','fighter','monk','paladin','ranger','rogue','wizard']:\n chosen_class = raw_input(\"\\nIncorrect input\\n\\nWhich class would you like? Please choose from:\\nBarbarian, Cleric, Druid, Fighter, Monk, Paladin, Ranger, Rogue, Wizard \" ).lower() \n print\n\n #Adds character class to Class object for use in print statements\n self.classType = chosen_class.title()\n \n \n\n #Class specific conditional statements. These update the various ability scores\n #in the classes variable\n if chosen_class == 'barbarian':\n barb_choice = raw_input('Would you like to boost (1) Strength or (2) Constitution? ')\n print\n while barb_choice not in ['1','2']:\n barb_choice = raw_input('Would you like to boost (1) Strength or (2) Constitution? ')\n print\n if barb_choice == '1':\n classes['barbarian'][0] = 1\n elif barb_choice == '2':\n classes['barbarian'][2] = 1\n elif chosen_class == 'cleric':\n clerc_choice = raw_input('Would you like to boost (1) Wisdom, (2) Strength, or (3) Constitution? ')\n print\n while clerc_choice not in ['1','2','3']:\n clerc_choice = raw_input('Would you like to boost (1) Wisdom, (2) Strength, or (3) Constitution? ')\n print\n if clerc_choice == '1':\n classes['cleric'][4] = 1\n elif clerc_choice == '2':\n classes['cleric'][0] = 1\n elif clerc_choice == '3':\n classes['cleric'][2] = 1\n elif chosen_class == 'druid':\n druid_choice = raw_input('Would you like to boost (1) Wisdom or (2) Constitution? ')\n print\n while druid_choice not in ['1','2']:\n druid_choice = raw_input('Would you like to boost (1) Wisdom or (2) Constitution? ')\n print\n if druid_choice == '1':\n classes['druid'][4] = 1\n elif druid_choice == '2':\n classes['druid'][2] = 1\n elif chosen_class == 'fighter':\n fight_choice = raw_input('Would you like to boost (1) Strength, (2) Dexterity, or (3) Constitution? ')\n print\n while fight_choice not in ['1','2','3']:\n fight_choice = raw_input('Would you like to boost (1) Strength, (2) Dexterity, or (3) Constitution? ')\n print\n if fight_choice == '1':\n classes['fighter'][0] = 1\n elif fight_choice == '2':\n classes['fighter'][1] = 1\n elif fight_choice == '3':\n classes['fighter'][2] = 1 \n elif chosen_class == 'monk':\n monk_choice = raw_input(\"Would you like to boost (1) Wisdom or (2) Dexterity? \")\n print\n while monk_choice not in ['1','2']:\n monk_choice = raw_input(\"Would you like to boost (1) Wisdom or (2) Dexterity? \")\n print\n if monk_choice == '1':\n classes['monk'][4] = 1\n elif monk_choice == '2':\n classes['monk'][1] = 1\n elif chosen_class == 'paladin':\n pal_choice = raw_input('Would you like to boost (1) Strength, (2) Constitution, or (3) Charisma? ')\n print\n while pal_choice not in ['1','2','3']:\n pal_choice = raw_input('Would you like to boost (1) Strength, (2) Constitution, or (3) Charisma? ')\n print\n if pal_choice == '1':\n classes['paladin'][0] = 1\n elif pal_choice == '2':\n classes['paladin'][2] = 1\n elif pal_choice == '3':\n classes['paladin'][5] = 1\n elif chosen_class == 'ranger':\n rang_choice = raw_input('Would you like to boost (1) Strength, (2) Dexterity, or (3) Constitution? ')\n print\n while rang_choice not in ['1','2','3']:\n rang_choice = raw_input('Would you like to boost (1) Strength, (2) Dexterity, or (3) Constitution? ')\n print\n if rang_choice == '1':\n classes['ranger'][0] = 1\n elif rang_choice == '2':\n classes['ranger'][1] = 1\n elif rang_choice == '3':\n classes['ranger'][2] = 1\n elif chosen_class == 'rogue':\n rog_choice = raw_input('Would you like to boost (1) Strength, (2) Dexterity, or (3) Intelligence? ')\n print\n while rog_choice not in ['1','2','3']:\n rog_choice = raw_input('Would you like to boost (1) Strength, (2) Dexterity, or (3) Intelligence? ')\n print\n if rog_choice == '1':\n classes['rogue'][0] = 1\n elif rog_choice == '2':\n classes['rogue'][1] = 1\n elif rog_choice == '3':\n classes['rogue'][3] = 1\n elif chosen_class == 'wizard':\n wiz_choice = raw_input('Would you like to boost (1) Intelligence or (2) Constitution? ')\n print\n while wiz_choice not in ['1','2']:\n wiz_choice = raw_input('Would you like to boost (1) Intelligence or (2) Constitution? ')\n print\n if wiz_choice == '1':\n classes['wizard'][3] = 1\n elif wiz_choice == '2':\n classes['wizard'][2] = 1\n \n #Update base stats\n\n #A basic list full of the types of ability scores\n stats_list = ['str','dex','con','int','wis','cha','hp']\n #loops through the stats_list and adds all numbers to character's\n #starting stats\n for i in range(len(stats_list)):\n self.stealthUpdate(stats_list[i],classes[chosen_class][i])\n \n\n #modify hp if character is starting out higher than level 1\n def update_hp_for_higher_level(chosen_class,level):\n \"\"\"\n Helper function for chooseClass(). Updates character for\n levels greater than 1.\n \"\"\"\n #Checks to see if your character is level 4,8,12,etc.\n def upgradedAbilityAt4(level):\n if level % 4 == 0:\n upgraded_ability = raw_input(\"Level \"+str(level)+\"!\\n Which two abilities would you like to upgrade? (Adds +1 to ability)\\n Please input two from str/dex/con/int/wis/cha with a space in between.\\n (ex: cha dex) \").split(' ')\n print\n #To write:\n #if either ability pushes ability score over 20, redo input\n\n \n for i in upgraded_ability:\n self.stealthUpdate(i,1)\n #class specific HP calculations\n if chosen_class == 'barbarian': \n for i in range(2,self.level+1):\n upgradedAbilityAt4(i)\n self.hp += r.randint(1,12) + self.con + self.classMods[6]\n elif chosen_class == 'cleric':\n for i in range(2,self.level+1):\n upgradedAbilityAt4(i)\n self.hp += r.randint(1,8) + self.con + self.classMods[6]\n elif chosen_class == 'druid':\n for i in range(2,self.level+1):\n upgradedAbilityAt4(i)\n self.hp += r.randint(1,8) + self.con + self.classMods[6]\n elif chosen_class == 'fighter':\n for i in range(2,self.level+1):\n upgradedAbilityAt4(i)\n self.hp += r.randint(1,10) + self.con + self.classMods[6]\n elif chosen_class == 'monk':\n for i in range(2,self.level+1):\n upgradedAbilityAt4(i)\n self.hp += r.randint(1,8) + self.con + self.classMods[6]\n elif chosen_class == 'paladin':\n for i in range(2,self.level+1):\n upgradedAbilityAt4(i)\n self.hp += r.randint(1,10) + self.con + self.classMods[6]\n elif chosen_class == 'ranger':\n for i in range(2,self.level+1):\n upgradedAbilityAt4(i)\n self.hp += r.randint(1,10) + self.con + self.classMods[6]\n elif chosen_class == 'rogue':\n for i in range(2,self.level+1):\n upgradedAbilityAt4(i)\n self.hp += r.randint(1,6) + self.con + self.classMods[6]\n elif chosen_class == 'wizard':\n for i in range(2,self.level+1):\n upgradedAbilityAt4(i)\n self.hp += r.randint(1,6) + self.con + self.classMods[6]\n \n\n if self.level > 1:\n update_hp_for_higher_level(chosen_class,self.level)", "def select_source(self, c, slot_number):\n source_found = yield self.find_source(c, slot_number)\n if source_found:\n c['slot_number'] = slot_number\n else:\n raise ValueError(self.slot_not_found_msg(slot_number))", "def classes():\n print(\"\"\"Here are all the classes:\n barbarian, bard, cleric, druid, fighter, monk, paladin, ranger, rogue, sorcerer, warlock, wizard, blood hunter\"\"\")\n my_class = input('What class do you want to play as?')\n my_class = my_class.lower()\n if my_class in \"barbarian, bard, cleric, druid, fighter, monk, paladin, \" \\\n \"ranger, rogue, sorcerer, warlock, wizard, blood hunter\":\n return my_class\n else:\n print('That is not a class')\n return classes() # If they don't pick a listed class, re-run the function until they do", "def findGPCRclass(num_scheme):\n if num_scheme == \"gpcrdba\" or num_scheme == \"gpcrdb\":\n current_class =\"A\"\n #active_class[\"A\"]=[\"active gpcrbold\",\"in active\"]\n elif num_scheme == \"gpcrdbb\":\n current_class =\"B\"\n #active_class[\"B\"]=[\"active gpcrbold\",\"in active\"]\n elif num_scheme == \"gpcrdbc\":\n current_class =\"C\"\n #active_class[\"C\"]=[\"active gpcrbold\",\"in active\"]\n elif num_scheme == \"gpcrdbf\":\n current_class =\"F\"\n #active_class[\"F\"]=[\"active gpcrbold\",\"in active\"]\n return current_class", "def get_python_classname(raw_classname):\n class_name = raw_classname.replace(\" \",\"\")\n class_name = class_name.replace(\"-\",\"\")\n return class_name", "def suggest_preset_name(self, classname):\n i = 1\n name = classname + \"-\" + str(i)\n while self.preset_name_exists(name):\n i += 1\n name = classname + \"-\" + str(i)\n return name", "def set_route_class_to_create_new_target_buy_policy(self, route_class):\n self.single_selection_from_static_kendo_dropdown(self.route_class_single_kendo_dropdown_arrow_locator, route_class)", "def pick_class(classes, sort=False, **kwargs):\n def _label(c):\n try:\n return c.LABEL\n except AttributeError:\n return c.__name__\n\n if sort:\n classes = sorted(classes, key=lambda x: _label(x))\n choices = [_label(c) for c in classes]\n return pick_item(classes, choices, **kwargs)", "def choose_class(self, *args, **kwargs):", "def get_step_class_at_index(self, index):\n return self.routine_template.get_step_class_at_index(index)", "def slot_number_for_registration_number(self, reg_no):\n\n if not self._is_valid():\n return\n\n slot_no = \"\"\n for pslot in self.slots.values():\n if not pslot.available and pslot.car and pslot.car.reg_no == reg_no:\n slot_no = pslot.slot_no\n break\n\n if slot_no:\n print(slot_no)\n else:\n print(\"Not found\")\n return\n\n return slot_no", "def slot(self,num):\n if num in ApexAP1000.SLOTS:\n self.__slot=num\n else:\n raise ValueError('Bad slot number !')", "def class_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"class_name\")", "def load_classes(excel_data, slot_count):\n return True", "def get_slot(self, c):\n if 'slot_number' in c.keys():\n slot_number = c['slot_number']\n return slot_number\n else:\n raise ValueError(self.no_selection_msg())\n \n # returnValue(voltage * units.V)", "def get_slot_name(cls, slot: SlotType) -> str:\n if slot is None:\n return 'No Slot Name'\n return slot.__name__", "def get_mpg_class(mpg):\n\n if(mpg >= 45):\n return 10\n elif(mpg >= 37 and mpg < 45):\n return 9\n elif(mpg >= 31 and mpg < 37):\n return 8\n elif(mpg >= 27 and mpg < 31):\n return 7\n elif(mpg >= 24 and mpg < 27):\n return 6\n elif(mpg >= 20 and mpg < 24):\n return 5\n elif(mpg >= 17 and mpg < 20):\n return 4\n elif(mpg >= 15 and mpg < 17):\n return 3\n elif(mpg >= 14 and mpg < 15):\n return 2\n else:\n return 1", "def GetEquipmentClass(typename):\n p_match = EQUIPMENT_CLASS_REGEX.match(typename)\n if p_match:\n return p_match.group(2)\n return None", "def parameterize_class_name(cls: Type, idx: int, input_dicts: Mapping[Any, Any]):\n suffix = \"_\".join(f\"{k}_{v}\" for k, v in input_dicts.items())\n return f\"{cls.__name__}_{suffix}\"", "def suggest_new_class_name(class_name):\n # TODO find all internal classes of this class, and use them to help generate suggestions\n if is_anonymous_class(class_name):\n # anonymous! leave this as is. (we'll just change the parent class)\n return None\n\n # get the list by count\n sorted_list = Counter(get_class_strings(class_name)).most_common()\n # pick the top contenders - take anything that has the same count as the the first in the list\n top_list = [element[0]\n for element in sorted_list if element[1] == sorted_list[0][1]]\n\n if not top_list:\n return None\n\n # remove bad chars\n BAD_CHARS = \":,/\\\\()[]$;#@!&^%*+'\\\"\"\n clean_top_list = []\n for top in top_list:\n for c in BAD_CHARS:\n top = top.replace(c,\"\")\n clean_top_list.append(top)\n top_list = clean_top_list\n \n # heuristics to pick between them - prefer things with dots, and things without \" \", and things that aren't empty\n if len(top_list) > 1:\n dot_list = [guess for guess in top_list if \".\" in guess]\n if dot_list:\n top_list = dot_list\n\n if len(top_list) > 1:\n no_space_list = [guess for guess in top_list if \" \" not in guess]\n if no_space_list:\n top_list = no_space_list\n\n # preference longer name\n top_list.sort(key=len, reverse=True)\n #print(top_list)\n new_name = top_list[0]\n\n # if there's a dot, split the name and take the last chunk\n if \".\" in new_name:\n new_name = new_name.split(\".\")[-1]\n\n # if there's a space, split the name and take the first chunk\n if \" \" in new_name:\n new_name = new_name.split(\" \")[0]\n\n # replace the old class name with this one\n # TODO handle internal classes (anonymous or otherwise)\n # should make sure, at minimum, they have the parent name in their symbol.\n package_elements, class_elements = split_class_name(class_name)\n\n class_elements[-1] = new_name\n new_name = \"L\" + \"/\".join(package_elements) + \\\n \"/\" + \"$\".join(class_elements) + \";\"\n\n return new_name.encode(\"utf-8\")", "def find_class(self, class_name: str) -> Type:\n pass", "def clas(self, x):\n if isinstance(x, tuple):\n index = self.tuple_to_index[x]\n elif isinstance(x, str):\n index = self.string_to_index[x]\n else:\n raise ValueError('x should be string or int; received {}'.format(\n x))\n return self.classes[index]", "def choose_slot(data):\n firebase_uid = data[\"session\"].split(\"/\")[-1]\n db = firebase.database()\n slot = data[\"queryResult\"][\"parameters\"][\"slot\"]\n for i in data[\"queryResult\"][\"outputContexts\"]:\n if \"ticket-id\" in i[\"name\"]:\n ticket_id = i[\"parameters\"][\"ticket_id\"]\n db.child(\"user_data\").child(firebase_uid).child(\"Complaints\").child(ticket_id).child(\"Time Slot Chosen\").set(str(int(slot)))\n break\n response = {\n \"fulfillmentText\": \"I have updated your preference.\"\n }\n return response", "def handle_slot_select(data: bytes) -> Tuple[bytes, str]:\n new_slot = struct.unpack('B', data[:1])[0]\n return data[1:], f'New slot: {new_slot}'", "def addClass():\r\n print(\"\\nEnter classes by day. For example enter all your Monday classes first, then Tuesday, etc.\")\r\n print(\"When asked to put in class meeting times enter in 24 hr format. Example: 1:00 p.m = 1300 8:00 a.m = 0800\")\r\n \r\n day = input(\"Day of Class: \")\r\n while not properDayInput(day): #While format is not correct, persist on getting the correct entry\r\n print(\"Please enter a day of the week\")\r\n day = input(\"Day of Class: \")\r\n \r\n className = input(\"Name of Class: \").strip()\r\n if className == \"\": #If user does not put in a field (or just a bunch of spaces)\r\n className = \"EMPTY ENTRY!\"\r\n \r\n startTime = input(\"Starting Time: \")\r\n while not properTimeInput(startTime):\r\n startTime = input(\"StartingTime: \")\r\n \r\n endTime = input(\"Ending Time: \")\r\n while not properTimeInput(endTime):\r\n endTime = input(\"Ending Time: \")\r\n \r\n class_ = Class(className, Day(day), startTime, endTime) #Creating class object from user's entries\r\n for i in range (0, len(classes),1): #Checking for overlaping/duplicate classes\r\n classInList = classes[i]\r\n if(class_ == classInList):\r\n print(\"\\nThere is a scheduling conflict with class: \" + str(classInList) + \" and \" + str(class_))\r\n print(\"The class you just entered was not added to schedule. Please try another entry or edit an existing class\\n\")\r\n return #Break out of function\r\n \r\n classes.append(Class(className.upper(), Day(day), startTime, endTime))\r\n print(\"\\nClass added to schedule\")\r\n classes.sort()\r\n delay()\r\n clearTerminal()", "def properMenuChoice(classIndex):\r\n if not classIndex.strip().isdigit():\r\n print (\"'\", classIndex, \"' needs to be a number corresponding to a class entry\\n\")\r\n return False\r\n if int(classIndex) < 1 or int(classIndex) > len(classes)+1:\r\n print (\"'\", classIndex, \"' needs to be a number corresponding to a class entry\\n\")\r\n return False\r\n return True", "def get_class(mod, class_name: str):\n for name_val in inspect.getmembers(mod, inspect.isclass):\n name = name_val[0]\n val = name_val[1]\n if name == class_name:\n return val\n return None" ]
[ "0.6997579", "0.590257", "0.56273097", "0.5527541", "0.54824317", "0.5280297", "0.5246553", "0.5108651", "0.5095877", "0.5084751", "0.5055355", "0.50318855", "0.49355", "0.49338344", "0.49330693", "0.49220127", "0.49219468", "0.4920611", "0.4900326", "0.4871079", "0.4848637", "0.4843189", "0.48405147", "0.48282775", "0.48060575", "0.47928292", "0.47672427", "0.47644573", "0.47513908", "0.4732468" ]
0.8108506
0
Resets all made selection, returning to initial state
def reset_selections(): return True
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def Reset_Selection(self):\r\n #if previous selection\r\n if( self.selected != 0 ):\r\n self.canvas_one.delete( self.selected ) #remove bounding rectangle\r\n #return chosen node to branch_color\r\n self.canvas_one.itemconfig( self.selected_ls.line_handle , fill = self.branch_color )\r\n self.system.Set_Selected_Node(0)\r\n self.selected = 0\r\n self.selected_ls = 0", "def reset(self):\n self.selection_bounds = None\n self.selection = []\n for c in self.callbacks[\"reset_data\"]:\n c()\n if self.context is not None and self.context.doc is not None:\n self.context.doc.add_next_tick_callback(self.update_source)", "def reset(self):\n self.source_data = self.get_dict_from_range(None, None)\n self.selection_bounds = None\n self.selection = []\n for c in self.callbacks[\"reset_data\"]:\n c()\n if self.context is not None:\n self.context.doc.add_next_tick_callback(self.update_source)", "def reset(self):\n self.set_state(self._initial_state)", "def deselectall(self):\n if self.selection:\n for node in self.selection[:]: node.deselect()", "def reset(self):\n self.state.fill(EMPTY)", "def resetSelectionArea(self):\n for legend in self._selectionAreas:\n self.plot.remove(legend, kind='item')\n self._selectionAreas = set()", "def reset(self):\n \n pass", "def reset(self):\n self.liidx = 0\n self.clidx = 0", "def unselectAll(self):\n\t\tself.tree.UnselectAll()", "def reset(self):", "def reset(self):", "def reset(self):", "def reset(self):", "def reset(self, ):\n self.clear()\n self.create_all_menus()", "def reset(self) -> None:", "def reset(self) -> None:", "def reset(self) -> None:", "def reset(self):\n ...", "def reset(self):\n ...", "def reset(self):\n self.__sets = []\n self._computed = False", "def reset_states(self):\n self.model.reset_states()", "def __editDeselectAll(self):\n self.activeWindow().selectAll(False)", "def reset(self):\n self.setOptions(self._options)", "def reset(self):\r\n\t\tself.index = 0", "def reset(self):\n self._current_index = 0", "def reset(self):\n self.clear()", "def reset(self):\n self._idx = 0", "def reset(self) -> None:\n ...", "def reset(self) -> None:\n ..." ]
[ "0.79201347", "0.77571887", "0.7379118", "0.7340211", "0.73181266", "0.721911", "0.7197248", "0.7190832", "0.71431947", "0.71210617", "0.7112716", "0.7112716", "0.7112716", "0.7112716", "0.7075982", "0.70753473", "0.70753473", "0.70753473", "0.70569086", "0.70569086", "0.7054553", "0.7052165", "0.7049477", "0.70398146", "0.70371985", "0.7027873", "0.69949347", "0.69925994", "0.69908917", "0.69908917" ]
0.79584837
0
Returs a list of all pandigital primes up to ``n``.
def pandigital_primes(n=10000000): pri = [x for x in primes(n) if is_pandigital(x)] return pri
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_primes(n):\n\n return list(primes_sieve(n))", "def primes(n):\n return [i for i in xrange(1, n + 1) if mr_prime(i)]", "def get_n_primes(n):\n\n primes = [' ']\n num = 2\n while len(primes) < n + 1:\n if is_prime(num):\n primes.append(num)\n num += 1\n return primes", "def make_primes(n):\n out_list = []\n for i in range(2, n):\n if is_prime(i):\n out_list.append(i)\n return out_list", "def list_primes(n):\n primeList = []\n for i in range(n):\n if is_prime(i):\n primeList.append(i)\n return primeList", "def primes(n):\n if n == 0 or n == 1:\n return []\n else:\n p = primes(int(sqrt(n)))\n no_p = { j for i in p for j in xrange(i*2, n+1, i) }\n p = { x for x in xrange(2, n + 1) if x not in no_p }\n return p", "def primes_list(n):\n count = 0\n if n <= 7:\n p_list = [2, 3, 5, 7, 11, 13, 17]\n return p_list[:n]\n else:\n upper_bound = int(n * log(n) + n * log(log(n)))\n return primes(upper_bound)[:n]", "def primes(n):\n result = []\n i = 2\n while n > 0:\n if isPrime(i):\n result += [i]\n n -= 1\n i += 1\n return result", "def primes(n):\n return [i for i, v in enumerate(prime_cache(n)) if v]", "def find_n_primes(n):\n primes = [ ]\n\n if n < 2:\n return None;\n\n primes.append(2)\n\n for i in range(3, n + 1, 2):\n is_prime = True\n for p in primes:\n if i % p is 0:\n is_prime = False\n continue\n if is_prime:\n primes.append(i)\n return primes", "def find_prime_permutations(primes, n):\n\n candidates = [int(\"\".join(digits)) for digits in sorted(set(permutations(str(n))))]\n return [c for c in candidates if c in primes]", "def list_primes(n):\n\tarr = [True] * n\n\tarr[0] = False\n\tarr[1] = False\n\tfor i in range(2, int(math.sqrt(n)) + 1):\n\t\tif is_prime(i):\n\t\t\tfor j in range(2 * i, n, i):\n\t\t\t\tarr[j] = False\n\tprimes = []\n\tfor i in range(len(arr)):\n\t\tif arr[i]:\n\t\t\tprimes.append(i)\n\treturn primes", "def n_length_primes(n):\n assert n > 0, \"Cannot generate a list of %d length primes.\" % n\n a = []\n for i in range(10**(n-1), 10**n):\n if is_prime(i):\n a.append(str(i))\n return a", "def primes(n, DEBUG=False):\n\n return [x[0] for x in enumerate(_sieve(n, DEBUG=DEBUG)[0:n+1]) if x[1]]", "def get_primes(n):\n primes = [True] * (n / 2)\n for i in range(int((n / 2 - 1) / 2) >> 1):\n for j in range((i * (i + 3) << 1) + 3, n / 2, (i << 1) + 3): \n primes[j] = False\n return [2] + [((i << 1) + 3) for i in range(n / 2) if (primes[i])]", "def primesList(n):\n sieve = [True]*n\n for i in range(3,int(n**0.5)+1,2):\n if sieve[i]:\n sieve[2*i::i] = [False]*(len(sieve[2*i::i]))\n return [2]+[i for i in range(3,n,2) if sieve[i]]", "def primes(n):\n sieve = [True] * n\n for i in range(3, int(n ** 0.5) + 1, 2):\n if sieve[i]:\n sieve[i * i::2 * i] = [False] * int(((n - i * i - 1) // (2 * i) + 1))\n return [2] + [i for i in range(3, n, 2) if sieve[i]]", "def generate_prime_less_than_n(n):\n\tif n <= 1:\n\t\treturn []\n\tlist_of_primes = [2]\n\tfor i in range(3, n, 2):\n\t\tis_prime = True\n\t\tfor j in list_of_primes:\n\t\t\tif i%j == 0:\n\t\t\t\tis_prime = False\n\t\t\t\tbreak\n\t\tif is_prime:\n\t\t\tlist_of_primes.append(i)\n\treturn list_of_primes", "def get_probable_prime(n: int) -> [int]:\n return [6*n-1, 6*n+1]", "def primes(n):\n sieve = [True] * n\n for i in range(3, int(n**0.5)+1,2):\n if sieve[i]:\n sieve[i*i::2*i]=[False]*((n-i*i-1)/(2*i)+1)\n return [2] + [i for i in range(3,n,2) if sieve[i]]", "def primes(n):\n sieve = [True] * n\n for i in xrange(3,int(n**0.5)+1,2):\n if sieve[i]:\n sieve[i*i::2*i]=[False]*((n-i*i-1)/(2*i)+1)\n return [2] + [i for i in xrange(3,n,2) if sieve[i]]", "def primes(n):\n sieve = [True] * n\n for i in xrange(3,int(n**0.5)+1,2):\n if sieve[i]:\n sieve[i*i::2*i]=[False]*((n-i*i-1)/(2*i)+1)\n return [2] + [i for i in xrange(3,n,2) if sieve[i]]", "def primes(n):\n sieve = [True] * n\n for i in range(3,int(n**0.5)+1,2):\n if sieve[i]:\n sieve[i*i::2*i]=[False]*int(((n-i*i-1)/(2*i)+1))\n return [2] + [i for i in range(3,n,2) if sieve[i]]", "def primes(n):\n sieve = [True] * n\n for i in xrange(3,int(n**0.5)+1,2):\n if sieve[i]:\n sieve[i*i::2*i]=[False]*((n-i*i-1)/(2*i)+1)\n return [2] + [i for i in xrange(3,n,2) if sieve[i]]", "def first_n_primes(n): \n\tlist_of_primes = []\n\t# the current number that we're checking the primality of\n\tcandidate = 2\n\n\t# keep on finding primes until our list has enough elements\n\twhile len(list_of_primes) < n:\n\t\t# assume that we have a prime number\n\t\tis_prime = True\n\n\t\t# use trial division to determine if it's not prime\n\t\tfor i in range(2, candidate):\n\t\t\t# once we know it's not prime, break!\n\t\t\tif candidate % i == 0:\n\t\t\t\tis_prime = False\n\t\t\t\tbreak\n\t\tif is_prime:\n\t\t\tlist_of_primes.append(candidate)\n\t\tcandidate += 1\n\treturn list_of_primes", "def primeGen(n):\n primes = [2, 3, 5, 7, 11]\n if n in xrange(1, len(primes) + 1):\n return primes[:n]\n else:\n banlist = []\n count = 6\n while count <= n:\n Next = (primes[-2] + primes[-1]) - primes[-3]\n if not is_prime(Next):\n count -= 1\n banlist.append(Next)\n count += 1\n primes.append(Next)\n filterout(banlist, primes)\n return primes", "def n_primes(n):\n primes = [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59,\n 61, 67, 71, 73, 79, 83, 89, 97, 101, 103, 107, 109, 113, 127,\n 131, 137, 139, 149, 151, 157, 163, 167, 173, 179, 181, 191, 193,\n 197, 199, 211, 223, 227, 229, 233, 239, 241, 251, 257, 263, 269,\n 271, 277, 281, 283, 293, 307, 311, 313, 317, 331, 337, 347, 349,\n 353, 359, 367, 373, 379, 383, 389, 397, 401, 409, 419, 421, 431,\n 433, 439, 443, 449, 457, 461, 463, 467, 479, 487, 491, 499, 503,\n 509, 521, 523, 541, 547, 557, 563, 569, 571, 577, 587, 593, 599,\n 601, 607, 613, 617, 619, 631, 641, 643, 647, 653, 659, 661, 673,\n 677, 683, 691, 701, 709, 719, 727, 733, 739, 743, 751, 757, 761,\n 769, 773, 787, 797, 809, 811, 821, 823, 827, 829, 839, 853, 857,\n 859, 863, 877, 881, 883, 887, 907, 911, 919, 929, 937, 941, 947,\n 953, 967, 971, 977, 983, 991, 997][:n]\n\n if len(primes) < n:\n big_number = 2000\n while 'Not enough primes':\n primes = primes_from_2_to(big_number)[:n]\n if len(primes) == n:\n break\n big_number += 1000\n\n return primes", "def primes(n):\n sqrtN=n**0.5\n odds=[2]\n odds+=[i for i in range(3,n) if i%2>0]\n\n for i in odds:\n if i!=0 and i<=sqrtN:\n for j in odds[odds.index(i)+1:]:\n if j%i==0:\n odds[odds.index(j)]=0\n return [i for i in odds if i!=0]", "def genPrimes(n):\n assert n>1\n p = gen_eratosthenes()\n prime_list = []\n prime_list.append(next(p))\n while n > prime_list[len(prime_list)-1]: #while input is less than the last term in the prime list\n prime_list.append(next(p)) #adds next term from generator\n if n < prime_list[len(prime_list)-1]: #deletes last term\n del prime_list[len(prime_list)-1]\n #print(prime_list) #for testing only\n return prime_list", "def primes(n):\n\tsieve = [True] * n\n\tyield 2\n\tfor i in xrange(3,int(n**0.5)+1,2):\n\t\tif sieve[i]:\n\t\t\tyield i\n\t\t\tsieve[i*i::2*i] = [False]*((n-i*i-1)/(2*i)+1)\n\tfor i in xrange(i+2,n,2):\n\t\tif sieve[i]: yield i" ]
[ "0.8037789", "0.79828346", "0.78280413", "0.7799131", "0.7784345", "0.77557623", "0.7705573", "0.7680708", "0.75602317", "0.75260603", "0.75113", "0.74893403", "0.7445755", "0.74427813", "0.74221474", "0.7414387", "0.739986", "0.736477", "0.7353285", "0.7333225", "0.73293346", "0.73293346", "0.7328853", "0.7324065", "0.7321882", "0.7288325", "0.7284062", "0.72696364", "0.7259145", "0.7237217" ]
0.83678913
0
Retrieve the local code for a device using the EufyHome account's username and password.
def get_local_code(username: str, password: str, ip_address: str): client_id = 'eufyhome-app' client_secret = 'GQCpr9dSp3uQpsOMgJ4xQ' login_payload = {'client_id': client_id, 'client_Secret': client_secret, 'email': username, 'password': password} login_request = requests.post("https://home-api.eufylife.com/v1/user/email/login", json=login_payload) if login_request.status_code != 200: raise EufyApiError('Could not authenticate with Eufy API. Is your username and password correct?') token = login_request.json()['access_token'] headers = {'token': token, 'category': 'Home'} devices_request = requests.get('https://home-api.eufylife.com/v1/device/list/devices-and-groups', headers=headers) if devices_request.status_code != 200: raise EufyApiError('Could not list devices from Eufy API.') devices_from_api = devices_request.json() for item in devices_from_api['items']: if 'device' in item and item['device']['wifi']['lan_ip_addr'] == ip_address: return item['device']['local_code'] raise EufyApiError('Cannot find local code for device with given IP address. Check that the IP address is correct.')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _get_address():\n ret = subprocess.getoutput([\"swift auth\"])\n ret = ret.split(\"\\n\")[0]\n ret = ret.split(\"=\")[1]\n return ret", "def query_device_credential(self, device_code):\n raise NotImplementedError()", "def get_device_data():\n user = input('Username: ')\n password = input('Password: ')\n device_name = input('FQDN or IP of device: ')\n\n return user, password, device_name", "def code(ctx, show_hidden, query, single, password, remember):\n\n _init_session(ctx, password, remember)\n\n session = ctx.obj[\"session\"]\n entries = session.calculate_all()\n creds = _search(entries.keys(), query, show_hidden)\n\n if len(creds) == 1:\n cred = creds[0]\n code = entries[cred]\n if cred.touch_required:\n prompt_for_touch()\n try:\n if cred.oath_type == OATH_TYPE.HOTP:\n with prompt_timeout():\n # HOTP might require touch, we don't know.\n # Assume yes after 500ms.\n code = session.calculate_code(cred)\n elif code is None:\n code = session.calculate_code(cred)\n except ApduError as e:\n if e.sw == SW.SECURITY_CONDITION_NOT_SATISFIED:\n raise CliFail(\"Touch account timed out!\")\n entries[cred] = code\n\n elif single and len(creds) > 1:\n _error_multiple_hits(ctx, creds)\n\n elif single and len(creds) == 0:\n raise CliFail(\"No matching account found.\")\n\n if single and creds:\n if is_steam(cred):\n click.echo(calculate_steam(session, cred))\n else:\n click.echo(code.value)\n else:\n outputs = []\n for cred in sorted(creds):\n code = entries[cred]\n if code:\n if is_steam(cred):\n code = calculate_steam(session, cred)\n else:\n code = code.value\n elif cred.touch_required:\n code = \"[Requires Touch]\"\n elif cred.oath_type == OATH_TYPE.HOTP:\n code = \"[HOTP Account]\"\n else:\n code = \"\"\n outputs.append((_string_id(cred), code))\n\n longest_name = max(len(n) for (n, c) in outputs) if outputs else 0\n longest_code = max(len(c) for (n, c) in outputs) if outputs else 0\n format_str = \"{:<%d} {:>%d}\" % (longest_name, longest_code)\n\n for name, result in outputs:\n click.echo(format_str.format(name, result))", "def read_code() -> str:\n code = ''\n if settings.ZULIP_ENABLED:\n _code = zulip_read()\n if _code:\n logger.info(f'Read SMS Code from Zulip: {_code}')\n code = _code\n\n return code", "def code(ctx, show_hidden, query, single):\n\n ensure_validated(ctx)\n\n controller = ctx.obj['controller']\n creds = [(cr, c)\n for (cr, c) in controller.calculate_all()\n if show_hidden or not cr.is_hidden\n ]\n\n creds = _search(creds, query)\n\n if len(creds) == 1:\n cred, code = creds[0]\n if cred.touch:\n prompt_for_touch()\n try:\n if cred.oath_type == OATH_TYPE.HOTP:\n # HOTP might require touch, we don't know.\n # Assume yes after 500ms.\n hotp_touch_timer = Timer(0.500, prompt_for_touch)\n hotp_touch_timer.start()\n creds = [(cred, controller.calculate(cred))]\n hotp_touch_timer.cancel()\n elif code is None:\n creds = [(cred, controller.calculate(cred))]\n except APDUError as e:\n if e.sw == SW.SECURITY_CONDITION_NOT_SATISFIED:\n ctx.fail('Touch credential timed out!')\n\n elif single:\n _error_multiple_hits(ctx, [cr for cr, c in creds])\n\n if single:\n click.echo(creds[0][1].value)\n else:\n creds.sort()\n\n outputs = [\n (\n cr.printable_key,\n c.value if c\n else '[Touch Credential]' if cr.touch\n else '[HOTP Credential]' if cr.oath_type == OATH_TYPE.HOTP\n else ''\n ) for (cr, c) in creds\n ]\n\n longest_name = max(len(n) for (n, c) in outputs) if outputs else 0\n longest_code = max(len(c) for (n, c) in outputs) if outputs else 0\n format_str = u'{:<%d} {:>%d}' % (longest_name, longest_code)\n\n for name, result in outputs:\n click.echo(format_str.format(name, result))", "def run(self, code):\n access_token = self.app_client_cls().exchange_for_access_token(code)\n user_info = self.user_client_cls(access_token).get_me()\n entity = self.user_data_gateway_cls.create_from_me_response(user_info)\n return entity", "async def code(self) -> str:\n if self.shared_secret:\n return generate_one_time_code(self.shared_secret)\n print(\"Please enter a Steam guard code\")\n code = await utils.ainput(\">>> \")\n return code.strip()", "def getUserCodeJSON(self, puid=\"0\"):\n jsonPayload = pdReq.USER_CODE_REQUEST_JSON % (self.UID,puid)\n ucJSON = self.fetchInfo(jsonPayload)\n logger.debug('returning %s' % ucJSON)\n return ucJSON", "def login(self):\n url = self.base_url + \"/api/login\"\n creds = {'username': credentials.api['username'],\n 'password': credentials.api['password']}\n\n with warnings.catch_warnings():\n warnings.simplefilter(\"ignore\")\n resp = self.session.post(url, creds, verify=False)\n\n return json.loads(resp.text)['_global_result']['UIDARUBA']", "def get_code(self, account: ChecksumEvmAddress) -> str:\n return hex_or_bytes_to_str(self.w3.eth.getCode(account))", "def get_credential_fullname(cred_code):\n cred = get_credentials()\n return cred[cred_code]", "def _get_token() -> str:\n username = si.get_env_var(\"EODDATA_USERNAME\")\n password = si.get_env_var(\"EODDATA_PASSWORD\")\n\n _LOG.info(\"Logging into EODData API ...\")\n\n response = get_client().service.Login(Username=username, Password=password)\n\n if response[\"Token\"] is None:\n dbg.dfatal(\"Login Failed: '%s'\", response[\"Message\"])\n\n return str(response[\"Token\"])", "def get_token(self, code):\n\n # live need post a form to get token\n headers = {'Content-type': 'application/x-www-form-urlencoded'}\n data = {\n 'client_id': get_config('login.live.client_id'),\n 'client_secret': get_config('login.live.client_secret'),\n 'redirect_uri': get_config('login.live.redirect_uri'),\n 'grant_type': 'authorization_code',\n 'code': code\n }\n # Following is use urllib to post request\n url = get_config('login.live.access_token_url')\n r = requests.post(url, data=data, headers=headers)\n resp = r.json()\n\n if resp.get(\"error\") is not None:\n raise Exception(resp)\n\n return resp[\"access_token\"]", "def device_password(self) -> str:\n return pulumi.get(self, \"device_password\")", "def device_password(self) -> str:\n return pulumi.get(self, \"device_password\")", "def get():\n return login()", "def get_kcca_devices_codes():\n headers = {'x-api-key': CLARITY_API_KEY, 'Accept-Encoding': 'gzip'}\n api_url = CLARITY_API_BASE_URL + \"devices\"\n results = requests.get(api_url, headers=headers)\n\n device_data = pd.DataFrame(results.json())\n\n device_codes = []\n\n for index, row in device_data.iterrows():\n device_codes.append(row['code'])\n\n return device_codes", "def get_kcca_devices_codes():\n headers = {'x-api-key': CLARITY_API_KEY, 'Accept-Encoding': 'gzip'}\n api_url = CLARITY_API_BASE_URL + \"devices\"\n results = requests.get(api_url, headers=headers)\n\n device_data = pd.DataFrame(results.json())\n\n device_codes = []\n\n for index, row in device_data.iterrows():\n device_codes.append(row['code'])\n\n return device_codes", "def get_auth_token():\n url = 'https://sandboxdnac.cisco.com/dna/system/api/v1/auth/token' # Endpoint URL\n resp = requests.post(url, auth=HTTPBasicAuth(DNAC_USER, DNAC_PASSWORD)) # Make the POST Request\n token = resp.json()['Token'] # Retrieve the Token from the returned JSONhahhah\n return token # Create a return statement to send the token back for later use", "def get_phone_code():\n try:\n if check_magic_cookie(current_app.config):\n eppn = request.args.get('eppn')\n state = current_app.password_reset_state_db.get_state_by_eppn(eppn)\n return state.phone_code.code\n except Exception:\n current_app.logger.exception(\n 'Someone tried to use the backdoor to get the SMS verification code for a password reset'\n )\n\n abort(400)", "def accesscode(request, code):\n employee = Employee.objects.get(access_code=code)\n user = employee.user\n user.backend = 'django.contrib.auth.backends.ModelBackend'\n login(request, user)\n return HttpResponseRedirect('/')", "def getToken(email, password):\n r = requests.post(r\"https://opendata.hopefully.works/api/login\", json={\"email\":email, \"password\":password})\n if r.status_code == 200: \n return r.json()[\"accessToken\"]\n else:\n return \"\"", "def code(self) -> str:\n return pulumi.get(self, \"code\")", "def code(self) -> str:\n return pulumi.get(self, \"code\")", "def code(self) -> str:\n return pulumi.get(self, \"code\")", "def get_fullcode(self):\n raise NotImplementedError", "def send_auth_code(phone: str):\n\n # Replace with credentials from your Twilio account\n account_sid = \"ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX\"\n auth_token = \"XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX\"\n client = Client(account_sid, auth_token)\n\n # secrets uses the most secure RNG available to the OS\n code = f'{secrets.randbelow(1000000):06}'\n\n # send SMS containing the code\n try:\n client.messages.create(\n to=f'+1{phone}',\n from_='+1XXXXXXXXXX',\n body=f'Your SecureLogin verification code is {code}')\n except TwilioRestException:\n return None\n else:\n return code", "def get_password_from_user():\n pwd = ''\n keyboard = xbmc.Keyboard('', ADDON_NAME + ': ' + localise(32022), True)\n keyboard.doModal()\n if keyboard.isConfirmed():\n pwd = keyboard.getText()\n return pwd", "def fl_whoami():\n _fl_whoami = library.cfuncproto(\n library.load_so_libforms(), \"fl_whoami\", \\\n xfdata.STRING, [],\\\n \"\"\"const char * fl_whoami()\"\"\")\n library.check_if_flinitialized()\n retval = _fl_whoami()\n if isinstance(retval, bytes):\n return retval.decode('utf-8')\n else: # str\n return retval" ]
[ "0.58795834", "0.5864797", "0.5834147", "0.57331693", "0.55483615", "0.5507485", "0.54629534", "0.54514444", "0.54253983", "0.54025495", "0.53247046", "0.53159016", "0.5302662", "0.5299826", "0.5280781", "0.5280781", "0.52279246", "0.52222973", "0.52222973", "0.51973945", "0.51948583", "0.5175754", "0.51713353", "0.5168976", "0.5168976", "0.5168976", "0.5164711", "0.5162357", "0.51604056", "0.5113727" ]
0.7983226
0
Encrypt data using the Eufy AES key and IV. Handles padding to a 16 byte interval.
def _encrypt(data): cipher = AES.new(bytes(_AES_KEY), AES.MODE_CBC, bytes(_AES_IV)) # Pad to 16 bytes for AES CBC for i in range(16 - (len(data) % 16)): data += b'\0' return cipher.encrypt(data)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def encryptData(self, key, iv, data, align = True):\r\n\t\tif((len(data) % self.align) != 0 and align):\r\n\t\t\treturn AES.new(key, AES.MODE_CBC, iv).encrypt(data + (\"\\x00\" * (self.align - (len(data) % self.align))))\r\n\t\telse:\r\n\t\t\treturn AES.new(key, AES.MODE_CBC, iv).encrypt(data)", "def encrypt(data, key, iv, save_path=None):\n if isinstance(data, str):\n with open(data, 'rb') as f:\n data = f.read()\n length = str(len(data))\n length = _pad16(length)\n\n key = _pad16(key)\n iv = _pad16(iv)\n data = _pad16(data)\n cipher = AES.new(key, AES.MODE_CBC, iv)\n data = cipher.encrypt(data)\n data = length + data\n if save_path:\n with open(save_path, 'wb') as f:\n f.write(data)\n return data", "def aes_encrypt(mode, aes_key, aes_iv, *data):\n encryptor = Cipher(\n algorithms.AES(aes_key), mode(aes_iv), backend=default_backend()\n ).encryptor()\n\n result = None\n for value in data:\n result = encryptor.update(value)\n encryptor.finalize()\n\n return result, None if not hasattr(encryptor, \"tag\") else encryptor.tag", "def encrypt_data ( aes_key, data ) :\n salt = Crypto.Random.new( ).read( Crypto.Cipher.AES.block_size )\n cipher = Crypto.Cipher.AES.new( aes_key, Crypto.Cipher.AES.MODE_CFB, salt )\n encrypted_data = cipher.encrypt( data )\n\n return encode_data( salt + encrypted_data )", "def encrypt(self, key, data, mode, padding):\n # this can be disabled by _disable_encryption, so pylint: disable=method-hidden\n try:\n block_size = self.cipher.block_size\n iv_len = block_size // 8\n iv = os.urandom(iv_len)\n\n encryptor = Cipher(self.cipher(key), mode.build(iv), backend=default_backend()).encryptor()\n padder = padding.build(block_size).padder()\n\n padded_data = padder.update(data) + padder.finalize()\n return iv + encryptor.update(padded_data) + encryptor.finalize()\n except Exception:\n error_message = \"Encryption failed\"\n _LOGGER.exception(error_message)\n raise EncryptionError(error_message)", "def encrypt_data(data, encryption_key):\n assert isinstance(data, str)\n obj = AES.new(encryption_key, AES.MODE_CBC, 'This is an IV456')\n padded = Pad.pad(data.encode())\n ciphertext = obj.encrypt(padded)\n return ciphertext.hex()", "def Encrypt(self, data):\n\n if len(data) % 16 != 0:\n data += ' ' * (16 - len(data) % 16)\n es = AES.new(self.creds.aesKey, AES.MODE_CBC, self.creds.aesIV)\n data = es.encrypt(data)\n data = base64.b64encode(data)\n return data", "def aes_encrypt(data, key):\r\n cipher = aes_cipher_from_key(key)\r\n padded_data = pad(data)\r\n return cipher.encrypt(padded_data)", "def encrypt( raw, key, iv ):\n result = ''\n tmp_iv = iv \n text = pad(raw)\n\n for i in xrange(0, len(text) / BS):\n lower_bound = i * 16\n upper_bound = (i+1) * 16\n \n tmp = AES.new(key, AES.MODE_OFB, tmp_iv).decrypt( text[lower_bound:upper_bound] )\n tmp_iv = tmp\n result += tmp\n\n return result", "def encrypt_aes(msg, key, iv):\r\n #start timer\r\n start = timeit.default_timer()\r\n\r\n #converting key to bytes from hex\r\n key = bytes.fromhex(key)\r\n msg = pad(msg)\r\n obj = AES.new(key, AES.MODE_CBC, iv)\r\n ciphertxt = obj.encrypt(msg)#ciphertxt will be in 'bytes'\r\n\r\n #converting ciphertxt into hexadecimal\r\n ciphertxt = ciphertxt.hex()\r\n\r\n print(\"Ciper is: \",ciphertxt)\r\n\r\n #stop timer\r\n stop = timeit.default_timer()\r\n print('Encryption Running Time: ', stop-start)\r\n \r\n return ciphertxt", "def encrypt_aes256(data, key, iv=\"random\"):\n\n data = check_string(data)\n\n # Handle a list\n if type(data) == list:\n strings = []\n for string in data:\n if iv == \"random\":\n iv = os.urandom(16)\n encryptor = AES.new(key, AES.MODE_CBC, iv)\n strings.append([iv, encryptor.encrypt(string)])\n return strings\n\n # Handle a 16 char string\n else:\n if iv == \"random\":\n iv = os.urandom(16)\n encryptor = AES.new(key, AES.MODE_CBC, iv)\n return iv, encryptor.encrypt(data)\n elif type(iv) == str:\n if len(iv) != 16:\n raise TypeError, \"IV must be 16 chars\"", "def aes(encrypt, key, data):\n cipher = AES.new(key, AES.MODE_CBC, get_zero_vector(16))\n if encrypt:\n return cipher.encrypt(data)\n else:\n return cipher.decrypt(data)", "def encrypt(self, key, value):\n\n iv = ''.join(chr(random.randint(0, 0xFF)) for i in range(16))\n key = hashlib.sha256(key).digest()[:self.BLOCK_SIZE]\n cipher = AES.new(key, AES.MODE_CBC, iv)\n crypted = cipher.encrypt(self.pkcs5_pad(value))\n return iv+crypted", "def encrypt_data(data, encryption_key, iv=None):\n # Generate a random iv\n if iv is None:\n iv = get_random_bytes(IV_SIZE)\n generate_iv = True\n iv_length = IV_SIZE\n else:\n generate_iv = False\n iv_length = len(iv)\n cipher = AES.new(encryption_key, AES.MODE_GCM, iv)\n ciphered_data, tag = cipher.encrypt_and_digest(bytes(data))\n if generate_iv:\n # if iv passed by user is None, random iv generated\n # above is prepended in encrypted data\n # iv + Cipher + Tag\n result = iv + ciphered_data + tag\n else:\n # Cipher + Tag\n result = ciphered_data + tag\n return result", "def generate_aes_iv(key):\r\n return md5(key + md5(key).hexdigest()).hexdigest()[:AES.block_size]", "def encrypt(cls, plaintext, aad, key, iv):", "def aes_encrypt(self, buff):\n start = time.time()\n message = buff.encode()\n raw = pad(message)\n cipher = AES.new(\"DESCRYPTDESCRYPT\", AES.MODE_CBC, iv())\n enc = cipher.encrypt(raw)\n end = time.time()\n print \"Encrypt time: {0:.10f}\".format((end - start))\n return base64.b64encode(enc).decode('utf-8')", "def encrypt(self, input, iv):\n pass", "def encrypt(self, input, key, iv):\n pass", "def Encrypt(self, data):\n data = self.__Pad(data)\n iv_bytes = util.RandBytes(self.block_size)\n ciph_bytes = AES.new(self.key_bytes, AES.MODE_CBC, iv_bytes).encrypt(data)\n msg_bytes = self.Header() + iv_bytes + ciph_bytes\n sig_bytes = self.hmac_key.Sign(msg_bytes) # Sign bytes\n return msg_bytes + sig_bytes", "def encrypt(self):\n # Generate a randomized initialization vector\n iv = Random.new().read(AES.block_size)\n # Create a new AES object in Cipher Block Chaining mode\n cipher = AES.new(self.key, AES.MODE_CBC, iv)\n # Add a buffer so that the plaintext is a multiple of 16 characters in length\n pt_len = len(self.plaintext)\n buffer_size = AES.block_size - pt_len % AES.block_size\n strmsg = self.plaintext + \" \" * buffer_size\n return cipher.encrypt(str.encode(strmsg)), iv", "def encrypt(plaintext):\n # Pad plaintext\n plaintext = pad(plaintext)\n\n # AES encrypt\n iv = Random.new().read(BS)\n aes = AES.new(aes_key, AES.MODE_CBC, iv)\n return iv + aes.encrypt(plaintext)", "def encrypt(self, message):\n\n message = self._pad(message)\n iv = Random.new().read(AES.block_size)\n cipher = AES.new(self.key, AES.MODE_CBC, iv)\n return base64.b64encode(iv + cipher.encrypt(message)).decode('utf-8')", "def encrypt(self, message):\n\n IV = Random.new().read(self.BLOCK_SIZE)\n aes = AES.new(self.key, AES.MODE_CBC, IV)\n return base64.b64encode(IV + aes.encrypt(self._pad(message)))", "def test_encryption_cycle_aes_192_gcm_iv12_tag16_hkdf_sha256_non_framed(self):\n ciphertext, _ = aws_encryption_sdk.encrypt(\n source=VALUES[\"plaintext_128\"],\n key_provider=self.kms_master_key_provider,\n encryption_context=VALUES[\"encryption_context\"],\n frame_length=0,\n algorithm=Algorithm.AES_192_GCM_IV12_TAG16_HKDF_SHA256,\n )\n plaintext, _ = aws_encryption_sdk.decrypt(source=ciphertext, key_provider=self.kms_master_key_provider)\n assert plaintext == VALUES[\"plaintext_128\"]", "def test_encryption_cycle_aes_128_gcm_iv12_tag16_non_framed(self):\n ciphertext, _ = aws_encryption_sdk.encrypt(\n source=VALUES[\"plaintext_128\"],\n key_provider=self.kms_master_key_provider,\n encryption_context=VALUES[\"encryption_context\"],\n frame_length=0,\n algorithm=Algorithm.AES_128_GCM_IV12_TAG16,\n )\n plaintext, _ = aws_encryption_sdk.decrypt(source=ciphertext, key_provider=self.kms_master_key_provider)\n assert plaintext == VALUES[\"plaintext_128\"]", "def test_encryption_cycle_aes_256_gcm_iv12_tag16_non_framed(self):\n ciphertext, _ = aws_encryption_sdk.encrypt(\n source=VALUES[\"plaintext_128\"],\n key_provider=self.kms_master_key_provider,\n encryption_context=VALUES[\"encryption_context\"],\n frame_length=0,\n algorithm=Algorithm.AES_256_GCM_IV12_TAG16,\n )\n plaintext, _ = aws_encryption_sdk.decrypt(source=ciphertext, key_provider=self.kms_master_key_provider)\n assert plaintext == VALUES[\"plaintext_128\"]", "def test_encryption_cycle_aes_192_gcm_iv12_tag16_non_framed(self):\n ciphertext, _ = aws_encryption_sdk.encrypt(\n source=VALUES[\"plaintext_128\"],\n key_provider=self.kms_master_key_provider,\n encryption_context=VALUES[\"encryption_context\"],\n frame_length=0,\n algorithm=Algorithm.AES_192_GCM_IV12_TAG16,\n )\n plaintext, _ = aws_encryption_sdk.decrypt(source=ciphertext, key_provider=self.kms_master_key_provider)\n assert plaintext == VALUES[\"plaintext_128\"]", "def encrypt(self, message, key):\n message = self.pkcs7_pad(message)\n iv = ''.join(chr(random.randint(0, 0xFF)) for i in range(AES.block_size))\n cipher = AES.new(key, AES.MODE_CBC, iv, segment_size=64)\n return iv + cipher.encrypt(message)", "def test_encryption_cycle_aes_128_gcm_iv12_tag16_hkdf_sha256_non_framed(self):\n ciphertext, _ = aws_encryption_sdk.encrypt(\n source=VALUES[\"plaintext_128\"],\n key_provider=self.kms_master_key_provider,\n encryption_context=VALUES[\"encryption_context\"],\n frame_length=0,\n algorithm=Algorithm.AES_128_GCM_IV12_TAG16_HKDF_SHA256,\n )\n plaintext, _ = aws_encryption_sdk.decrypt(source=ciphertext, key_provider=self.kms_master_key_provider)\n assert plaintext == VALUES[\"plaintext_128\"]" ]
[ "0.6828749", "0.67845535", "0.673316", "0.6700717", "0.6700098", "0.66566104", "0.6576784", "0.65596473", "0.6475029", "0.6362078", "0.6355538", "0.6347096", "0.6343566", "0.6307382", "0.62541276", "0.62478405", "0.62115437", "0.61893487", "0.6187919", "0.61566067", "0.6155642", "0.61308", "0.60445744", "0.60256", "0.60056365", "0.5998931", "0.59931034", "0.59926045", "0.59632397", "0.5960573" ]
0.71610576
0
Compile the given mode and command into the bytes data sent to the RoboVac.
def _build_robovac_command(mode, command): mcu_ota_header_0xa5 = 0xA5 cmd_data = (mode.value + command.value) return bytes([mcu_ota_header_0xa5, mode.value, command.value, cmd_data, 0xFA])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def binary_compile_cmd(self):\n ld = self.nvcc_options_json[\"ld\"]\n objcopy = self.nvcc_options_json[\"objcopy\"]\n cmd = \" \".join([ld, \"-r -b binary -o {target} {src}\"])\n # Support models with >2GB constants on Linux only\n if is_linux():\n cmd += (\n f\" && {objcopy} --rename-section\"\n \" .data=.lrodata,alloc,load,readonly,data,contents\"\n \" {target} {target}\"\n )\n return cmd", "def issue_binary_command (self, command_id, ch=None, BCAST=0, ALLCH=0, ADDM=0, RW=0, ACT=0, DEXT=0, value_int=0, addr_id_num=0x0000, n_lines_requested=2**31, target_errors=None, output_regex='(.*)', special_timeout = None):\r\n\t\t\r\n\t\t\r\n\t\tdef get_val(i):\r\n\t\t\t\"\"\"Function to convert uint16 to bytearray([uint8,uint8])\"\"\"\r\n\t\t\treturn bytearray([int(i/256),int(i)-int(i/256)*256])\r\n\t\t\r\n\t\tdef parity_odd(x):\r\n\t\t\t\"\"\"Function to compute whether a byte's parity is odd.\"\"\"\r\n\t\t\tx = x ^ (x >> 4)\r\n\t\t\tx = x ^ (x >> 2)\r\n\t\t\tx = x ^ (x >> 1)\r\n\t\t\treturn x & 1\r\n\t\t\r\n\t\t\r\n\t\t# Format header byte\r\n\t\theader_byte = 0x80\r\n\t\theader_byte += BCAST*0x40\r\n\t\theader_byte += ALLCH*0x20\r\n\t\theader_byte += ADDM*0x10\r\n\t\theader_byte += RW*0x08\r\n\t\theader_byte += ACT*0x04\r\n\t\theader_byte += DEXT*0x02\r\n\t\theader_byte += parity_odd(header_byte)\r\n\t\t\r\n\t\t\r\n\t\t# Format command byte\r\n\t\tif isinstance(command_id, str):\r\n\t\t\tcommand_byte = CMD_CODES[command_id.upper()]\r\n\t\telif isinstance(command_id, int):\r\n\t\t\tcommand_byte = command_id\r\n\t\t\r\n\t\t\r\n\t\t# Format channel address\r\n\t\taddress_bytes = bytearray()\r\n\t\tif ch is None:\r\n\t\t\tch = 0\r\n\t\tif ADDM == 1:\r\n\t\t\taddress_bytes.extend(get_val(addr_id_num))\r\n\t\t\taddress_bytes.append(ch)\r\n\t\telif ADDM == 0:\r\n\t\t\taddress_bytes.append(0)\r\n\t\t\taddress_bytes.extend(get_val(ch))\r\n\t\t\r\n\t\t\r\n\t\t# Format value bytes\r\n\t\t# value_int can be either an int or a list of ints (for vectorised input, DEXT = 1)\r\n\t\tdata_bytes = bytearray()\r\n\t\t\r\n\t\tif DEXT == 1:\r\n\t\t\t# Handle data extension length\r\n\t\t\tif isinstance(value_int, list):\r\n\t\t\t\tn_dext_words = len(value_int)\r\n\t\t\telse:\r\n\t\t\t\tn_dext_words = 1\r\n\t\t\tif n_dext_words > 0xFFFF:\r\n\t\t\t\tn_dext_words = 0xFFFF\r\n\t\t\tdata_bytes.extend(get_val(n_dext_words))\r\n\t\t\r\n\t\tif isinstance(value_int, int):\r\n\t\t\tdata_bytes.extend(get_val(value_int))\r\n\t\t\r\n\t\telif isinstance(value_int, list) and all([isinstance(e ,int) for e in value_int]):\r\n\t\t\tfor i,e in enumerate(value_int):\r\n\t\t\t\tdata_bytes.extend(get_val(e))\r\n\t\t\t\tif i == n_dext_words:\r\n\t\t\t\t\tbreak\r\n\t\t\r\n\t\telse:\r\n\t\t\traise AttributeError(\"value_int must be of type int, or of type list with all elements of type int (received type {:})\".format(type(value_int) ) )\r\n\t\t\r\n\t\t\r\n\t\t# Compose command byte string\r\n\t\ttx_str = bytearray()\r\n\t\ttx_str.append(header_byte)\t\t\t\t# Header byte\r\n\t\ttx_str.append(command_byte)\t\t\t\t# Command byte\r\n\t\ttx_str.extend(address_bytes)\t\t\t# Three bytes of channel address\r\n\t\ttx_str.extend(data_bytes)\t\t\t\t# 2 (DEXT=0) or 2*N+1 (DEXT=1) bytes of data\r\n\t\t\r\n\t\t# Transmit it\r\n\t\tself.transmit(tx_str, binary_mode = True)\r\n\t\t\r\n\t\t\r\n\t\t# Function to retry this command (in case of comms error)\r\n\t\tdef retry_function():\r\n\t\t\treturn self.issue_binary_command (command_id, ch, BCAST, ALLCH, ADDM, RW, ACT, DEXT, value_int, addr_id_num, n_lines_requested, target_errors, output_regex, special_timeout)\r\n\t\t\r\n\t\t# Wait for response\r\n\t\tif RW==1 or ((RW==0 or ACT) and self.wait_for_responses):\r\n\t\t\ttry:\r\n\t\t\t\tresult = self._issue_command_receive_response (retry_function, n_lines_requested, target_errors, output_regex, special_timeout)\r\n\t\t\t\treturn result\r\n\t\t\texcept RuntimeError as e:\r\n\t\t\t\tif RW == 1:\r\n\t\t\t\t\t# If we want a return value, raise an error\r\n\t\t\t\t\traise RuntimeError (\"Failed to read with command '{0}'. {1}\".format(tx_str, e))\r\n\t\t\t\telse:\r\n\t\t\t\t\t# If we are setting something, just warn the user\r\n\t\t\t\t\tprint(\"Qontroller.issue_command: Warning: Failed to write with command '{0}'. {1}\".format(tx_str, e))\r\n\t\t\t\t\treturn None", "def _write_cmd(self, cmd):\n cmd = cmd & 0x0fff\n for i in range(12):\n j = cmd & 0x0800\n cmd = cmd << 1\n j = j >> 11\n self.wr(0)\n self.data(j)\n self.wr(1)", "def __construct_data(self, cmd_mode, cmd_value):\n if cmd_mode not in command_mode.values():\n raise TypeError(\"%s: specified cmd_mode is not valid.\", self.sensor_name)\n if not isinstance(cmd_value, int):\n raise TypeError(\"%s: cmd_value must be of type %s.\", self.sensor_name, type(int))\n\n ret_val = bytearray()\n ret_val.append(cmd_mode)\n ret_val.append(cmd_value)\n return ret_val", "def command(dev, code, data='', verbose=False):\n communicate(dev, a2b_hex('A' + code) + data.encode('ascii'), a2b_hex('B' + code), verbose=verbose)", "def command(self,addr,cmd):\n if isinstance(cmd,int):\n cmd = struct.pack('!L',cmd)\n res = self._send(bytes([0xef,0xfe,0x05,addr<<1])+cmd)\n if res:\n self.wrcache[addr] = cmd", "def command(self, *cmd):\n assert(len(cmd) <= 32)\n self.bus.write_i2c_block_data(self.addr, self.cmd_mode, list(cmd))", "def build_command_packet(self, command):\n packet = bytearray()\n # All option fields are 0\n packet.append(0)\n packet.append(0)\n packet.append(0)\n packet.append(command)\n return packet", "def prepare_operation(self, command: str, opcode: str, *args, **kwargs) -> bool:\n _opcode_hex, _args_hex = self._opcode_fetch(opcode, *args)\n self.super_memory.PC.write(_opcode_hex)\n _assembler = [_opcode_hex]\n for x in _args_hex:\n for y in x[::-1]:\n self.super_memory.PC.write(y)\n _assembler.append(y)\n self._assembler[command] = \" \".join(_assembler).lower()\n return True", "def command(self, inst_data: int, buf: bytes, /) -> None:", "def cmd(self, data, enable):\n pass", "def _command(self, servo_id, instruction, *params):\n length = 3 + len(params)\n #print('length', length)\n \"\"\"\n checksum calculation:\n checksum = ~(ID + length+instruction+parms) if the numbers in the brackets\n are calculated and exceeded 255, then it takes the lowest one byte, \"~\"\n means Negation\n \"\"\"\n checksum = 255 - ((servo_id + length + instruction + sum(params))% 256)\n #print('checksum', checksum)\n packet = [0x55, 0x55, servo_id, length, instruction, *params, checksum]\n #print('packet', packet)\n self._serial.write(bytearray(packet))\n #print('Sending packet', packet)", "def genCommand(char, command): \n \n if char == 'a':\n command = SModelRobotOutput();\n command.rACT = 1\n command.rGTO = 1\n command.rSPA = 255\n command.rFRA = 150\n\n if char == 'r':\n command = SModelRobotOutput();\n command.rACT = 0\n\n if char == 'c':\n command.rPRA = 255\n\n if char == 'o':\n command.rPRA = 0\n\n if char == 'b':\n command.rMOD = 0\n \n if char == 'p':\n command.rMOD = 1\n \n if char == 'w':\n command.rMOD = 2\n \n if char == 's':\n command.rMOD = 3\n\n #If the command entered is a int, assign this value to rPRA\n try: \n command.rPRA = int(char)\n if command.rPRA > 255:\n command.rPRA = 255\n if command.rPRA < 0:\n command.rPRA = 0\n except ValueError:\n pass \n \n if char == 'f':\n command.rSPA += 25\n if command.rSPA > 255:\n command.rSPA = 255\n \n if char == 'l':\n command.rSPA -= 25\n if command.rSPA < 0:\n command.rSPA = 0\n\n \n if char == 'i':\n command.rFRA += 25\n if command.rFRA > 255:\n command.rFRA = 255\n \n if char == 'd':\n command.rFRA -= 25\n if command.rFRA < 0:\n command.rFRA = 0\n\n return command", "def compile(self, code, options=''):\n try:\n data = self.client.cli.compile_contract(body=dict(\n code=code,\n options=options\n ))\n return data.bytecode\n except OpenAPIClientException as e:\n raise ContractError(e)", "def compile(self) -> str:\n compiled_command = (\n f\"{PUMP_ADDRESS[self.target_pump_num]}\"\n f\"{self.target_syringe}\"\n f\"{self.command}{self.command_value}\"\n )\n\n if self.parameter_value:\n compiled_command += f\"{self.optional_parameter}{self.parameter_value}\"\n\n return compiled_command + self.execution_command", "async def _compile(ctx, code: Option(str, \"Brainfuck code to compile into python\")):\n compiled = bot.brainfuck.compile(code)\n await send_code(ctx, compiled.code, lang=\"py\")", "def execute(self, devices, command_bytes):", "def gen_parse_packet_source(cmd_list):\n\t#TODO: check for count == 0\n\ts = \"\"\n\ts += \"void parse_packet(uint8_t *buf, uint16_t count){\\n\"\n\ts += \"\\tuint8_t cmd = buf[0];\\n\"\n\ts += \"\\tswitch(cmd){\\n\"\n\tfor c in cmd_list:\n\t\ts += \"\\t\\t/* %s */\\n\"%(c[\"name\"])\n\t\ts += \"\\t\\tcase 0x%02X: /* (Write form) */\\n\"%c[\"code\"]\n\t\ts += \"\\t\\t\\tparse_%s(buf, \"%cannon_name(c[\"name\"])\n\t\tadd_trigger = False\n\t\tfor a in c[\"argument\"]:\n\t\t\tif a[0] == \"*\":\n\t\t\t\ts += \"DataReal.%s, \"%(a[1])\n\t\t\t\tadd_trigger = True;\n\t\t\telse:\n\t\t\t\ts += \"&(DataReal.%s), \"%(a[1])\n\t\ts = s[0:-2] + \");\\n\"\n\t\ts += \"\\t\\t\\tbuf[0] = cmd;\\n\"\n\t\ts += \"\\t\\t\\tsend_packet(buf, 1);\\n\"\n\t\tif add_trigger:\n\t\t\ts += \"\\t\\t\\t%s_trigger();\\n\"%cannon_name(c[\"name\"])\n\t\ts += \"\\t\\t\\tbreak;\\n\"\n\t\t\n\t\ts += \"\\t\\tcase 0x%02X: /* (Read form) */\\n\"%(c[\"code\"] | 0x80)\n\t\ts += \"\\t\\t\\tsend_%s(\"%cannon_name(c[\"name\"])\n\t\tfor a in c[\"argument\"]:\n\t\t\ts += \"DataReal.%s, \"%(a[1])\n\t\ts = s[0:-2] + \");\\n\"\n\t\ts += \"\\t\\t\\tbreak;\\n\"\n\ts += \"\\t\\tdefault:\\n\"\n\ts += \"\\t\\t\\tbuf[0] = 0;\\n\"\n\ts += \"\\t\\t\\tsend_packet(buf, 1);\\n\"\n\ts += \"\\t\\t\\tbreak;\\n\"\n\ts += \"\\t}\\n}\\n\"\n\treturn s\n\t#TODO: writeable stuff ", "def genCommand(self,char, command): \n\t\t\n\t\tif char == 'a':\n\t\t\tcommand = outputMsg.Robotiq2FGripper_robot_output();\n\t\t\tcommand.rACT = 1\n\t\t\tcommand.rGTO = 1\n\t\t\tcommand.rSP = 255\n\t\t\tcommand.rFR = 150\n\n\t\tif char == 'r':\n\t\t\tcommand = outputMsg.Robotiq2FGripper_robot_output();\n\t\t\tcommand.rACT = 0\n\n\t\tif char == 'c':\n\t\t\tcommand.rPR = 255\n\n\t\tif char == 'o':\n\t\t\tcommand.rPR = 0 \n\n\t\t#If the command entered is a int, assign this value to rPRA\n\t\ttry: \n\t\t\tcommand.rPR = int(char)\n\t\t\tif command.rPR > 255:\n\t\t\t\tcommand.rPR = 255\n\t\t\tif command.rPR < 0:\n\t\t\t\tcommand.rPR = 0\n\t\texcept ValueError:\n\t\t\tpass \n\t\t\t\n\t\tif char == 'f':\n\t\t\tcommand.rSP += 25\n\t\t\tif command.rSP > 255:\n\t\t\t\tcommand.rSP = 255\n\t\t\t\t\n\t\tif char == 'l':\n\t\t\tcommand.rSP -= 25\n\t\t\tif command.rSP < 0:\n\t\t\t\tcommand.rSP = 0\n\n\t\t\t\t\n\t\tif char == 'i':\n\t\t\tcommand.rFR += 25\n\t\t\tif command.rFR > 255:\n\t\t\t\tcommand.rFR = 255\n\t\t\t\t\n\t\tif char == 'd':\n\t\t\tcommand.rFR -= 25\n\t\t\tif command.rFR < 0:\n\t\t\t\tcommand.rFR = 0\n\n\t\treturn command", "def send_command(self, command):\n send_message = \"\"\n for i in command:\n send_message += chr(i)\n #send_message += bytes(i)\n\n for data in send_message:\n self.pymata.transport.write(data)", "def compile_commands(commands):\n return [ord(char) for char in ''.join([c + '\\n' for c in commands])]", "def sendScratchCommand(self, command):\n if sys.version[0] == \"2\":\n self.socket.send(self.getPacketLengthBytes(command) + command)\n elif sys.version[0] == \"3\":\n self.socket.send(self.getPacketLengthBytes(command) + command.encode('utf-8'))", "def _pack(self):\n\n opt = 0\n if self.notify:\n opt = opt | CQC_OPT_NOTIFY\n if self.block:\n opt = opt | CQC_OPT_BLOCK\n if self.action:\n opt = opt | CQC_OPT_ACTION\n\n cmdH = struct.pack(self.PACKAGING_FORMAT, self.qubit_id, self.instr, opt)\n return cmdH", "def write(self):\n # build up all commands into a single request to increase network perf\n connection = self.connection\n commands = self.commands\n try:\n connection.send_packed_command(connection.pack_commands([c.args for c in commands]))\n except ConnectionError as e:\n for c in commands:\n c.result = e", "def send_crab_command(*args, **kwargs):\n return crabCommand(*args, **kwargs)", "def _create_commands(self, data):\n lines = []\n idone, odone = False, False\n for line in data.split(b'\\n'):\n if line.startswith(b'@intext'):\n if self.inputastext is None:\n self.metadata['inputastext'] = True\n self.inputastext = True\n idone = True\n elif line.startswith(b'@outtext'):\n if self.outputastext is None:\n self.metadata['outputastext'] = True\n self.outputastext = True\n odone = True\n else:\n # remove eventual comment\n m = re.match(br'(.*?);', line)\n if m:\n line = m.group(1)\n line = line.rstrip()\n if line:\n lines.append(line)\n if not idone:\n if self.inputastext:\n self.metadata['inputastext'] = True\n if not odone:\n if self.outputastext:\n self.metadata['outputastext'] = True\n if not lines:\n raise CarError('no source code')\n min_indent = len(lines[0]) # temporary\n for line in lines:\n indent = len(line) - len(line.lstrip())\n if indent == 0:\n break\n if indent < min_indent:\n min_indent = indent\n else:\n lines = tuple(x[min_indent:] for x in lines)\n\n #self.raw_board = '\\n'.join(lines) # for an eventual curses simulator\n\n board = []\n has_car, has_exit = False, False\n y = 0\n for line in lines:\n row = array.array('B')\n x = 0\n for c in line:\n try:\n op = _opcode_to_const_map[c]\n except KeyError:\n op = NOP\n if op == CAR:\n if has_car:\n raise CarError('program can only have one car')\n has_car = True\n car_pos = (x, y)\n row.append(NOP)\n else:\n row.append(op)\n if op == EXIT:\n if has_exit:\n raise CarError('program can only have one exit')\n has_exit = True\n x += 1\n board.append(row)\n y += 1\n if not has_car:\n raise CarError('program must have one car')\n if not has_exit:\n raise CarError('program must have one exit')\n return self._board_to_commands(board, car_pos)", "def create_shellcode(self, _shellcode_type='', command='calc.exe', message='', encode=None, make_exe=0, debug=0):\n generator = ShellGenerator(self.OS_TARGET, self.OS_TARGET_ARCH)\n shellcode = generator.get_shellcode(_shellcode_type,\n connectback_ip=self.CONNECTBACK_IP,\n connectback_port=self.CONNECTBACK_PORT,\n command=command,\n message=message,\n make_exe=make_exe,\n debug=debug)\n if encode:\n if debug == 1:\n print \"[] Encode shellcode is on and started\"\n e = CodeEncoders(self.OS_SYSTEM, self.OS_TARGET, self.OS_TARGET_ARCH, self.BADCHARS)\n e_shellcode = e.encode_shellcode(shellcode, encode, debug)\n\n if debug == 1:\n print \"Length of encoded shellcode: %d\" % len(e_shellcode)\n print \"[] Encode shellcode finished\"\n if e_shellcode:\n shellcode = e_shellcode\n else:\n if debug == 1:\n print \"[] Encode shellcode is off\"\n return shellcode", "def _send_server_command(self, command: ServerCommand) -> None:\n import pickle\n assert current_thread() is self._subprocess_thread\n assert self._subprocess is not None\n assert self._subprocess.stdin is not None\n val = repr(pickle.dumps(command))\n assert '\\n' not in val\n execcode = (f'import ba._servermode;'\n f' ba._servermode._cmd({val})\\n').encode()\n self._subprocess.stdin.write(execcode)\n self._subprocess.stdin.flush()", "def compile(self, mode=None):\n ast_node = self.parse(mode=mode)\n mode = infer_compile_mode(ast_node)\n filename = str(self.filename or \"<unknown>\")\n return compile(ast_node, filename, mode)", "def send_command(self, cmd):\n # encrypt command\n encrypted_cmd = self.encrypt_command(cmd, self.nonce_w)\n \n # send data size header\n self.sock.send(struct.pack(\"!I\", len(encrypted_cmd)))\n\n # send encrypted command\n self.sock.send(encrypted_cmd)\n \n # waiting to receive data size\n data = self.sock.recv(4)\n if not data:\n raise Exception(\"no response size received\")\n \n # unpack response size\n (response_size,) = struct.unpack(\"!I\", data)\n\n # waiting to response response according to the response size\n data = self.sock.recv(response_size)\n while len(data) < response_size:\n data += self.sock.recv(response_size - len(data))\n\n # decrypt data\n r = self.decrypt_response(data, self.nonce_r)\n\n # incremente nonce for next command\n self.nonce_r = self.incremente_nonce(nonce=self.nonce_r)\n self.nonce_w = self.incremente_nonce(nonce=self.nonce_w)\n\n # return response output\n return r" ]
[ "0.6167799", "0.5937154", "0.59241605", "0.59028786", "0.5817669", "0.579884", "0.57838464", "0.5774759", "0.5737887", "0.570226", "0.56056356", "0.55847764", "0.55546546", "0.5552185", "0.54988945", "0.5478312", "0.5477653", "0.5468052", "0.5432817", "0.54185456", "0.54058385", "0.5402257", "0.5392226", "0.5379045", "0.5378384", "0.53676444", "0.53669375", "0.5362912", "0.53583986", "0.5347992" ]
0.71921897
0
Parse a decrypted response into a Protobuf Local Server Message
def _parse_local_server_message_from_decrypted_response(decrypted_response): # First 2 bytes indicate length of the actual data length = struct.unpack("<H", decrypted_response[0:2])[0] protobuf_data = decrypted_response[2:length + 2] message = LocalServerInfo_pb2.LocalServerMessage() message.ParseFromString(protobuf_data) return message
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _parse_reply(self, msg_list): #{\n logger = self.logger\n\n if len(msg_list) < 4 or msg_list[0] != b'|':\n logger.error('bad reply: %r' % msg_list)\n return None\n\n msg_type = msg_list[2]\n data = msg_list[3:]\n result = None\n srv_id = None\n\n if msg_type == b'ACK':\n srv_id = data[0]\n elif msg_type in (b'OK', b'YIELD'):\n try:\n result = self._serializer.deserialize_result(data)\n except Exception, e:\n msg_type = b'FAIL'\n result = e\n elif msg_type == b'FAIL':\n try:\n error = jsonapi.loads(msg_list[3])\n if error['ename'] == 'StopIteration':\n result = StopIteration()\n elif error['ename'] == 'GeneratorExit':\n result = GeneratorExit()\n else:\n result = RemoteRPCError(error['ename'], error['evalue'], error['traceback'])\n except Exception, e:\n logger.error('unexpected error while decoding FAIL', exc_info=True)\n result = RPCError('unexpected error while decoding FAIL: %s' % e)\n else:\n result = RPCError('bad message type: %r' % msg_type)\n\n return dict(\n type = msg_type,\n req_id = msg_list[1],\n srv_id = srv_id,\n result = result,\n )", "def decode(cls, buffer):\n\n if len(buffer) < struct.calcsize(b\"<i\"):\n raise IncompleteMessageError\n size = struct.unpack(b\"<i\", buffer[:4])[0]\n if len(buffer) - struct.calcsize(b\"<i\") < size:\n raise IncompleteMessageError\n packet = buffer[:size + 4]\n buffer = buffer[size + 4:]\n id = struct.unpack(b\"<i\", packet[4:8])[0]\n type = struct.unpack(b\"<i\", packet[8:12])[0]\n body = packet[12:][:-2].decode(\"ascii\")\n return cls(id, type, body), buffer", "def _decode_fetch_response(self, resp: ImapFetchResponseType) -> Message:\n _, data = resp\n actual_data = data[0][1].decode() # type: str\n\n parser = Parser()\n msg = parser.parsestr(actual_data)\n return msg", "def __init__(self, buff):\n fmt = 'hiSSS[SY ]'\n response = struct_helpers.unpack_from(fmt, buff, 0)\n\n self.error_code = response[0]\n self.generation_id = response[1]\n self.group_protocol = response[2]\n self.leader_id = response[3]\n self.member_id = response[4]\n # TODO - parse metadata bytestring into ConsumerGroupProtocolMetadata?\n self.members = {_id: meta for _id, meta in response[5]}", "def decode(cls, data: bytes):\n\n response, = struct.unpack(Protocol.Formats.RESPONSE_FORMAT, data)\n return cls(response=response)", "def parse_response_proto(response_proto):\n model = local_cache[\"model\"]\n\n if model[\"type\"] == \"regression\":\n prediction_key = \"predictions\"\n if model[\"type\"] == \"classification\":\n prediction_key = \"class_ids\"\n\n if model[\"prediction_key\"]:\n prediction_key = model[\"prediction_key\"]\n\n results_dict = json_format.MessageToDict(response_proto)\n outputs = results_dict[\"outputs\"]\n value_key = DTYPE_TO_VALUE_KEY[outputs[prediction_key][\"dtype\"]]\n predicted = outputs[prediction_key][value_key][0]\n\n result = {}\n for key in outputs.keys():\n value_key = DTYPE_TO_VALUE_KEY[outputs[key][\"dtype\"]]\n result[key] = outputs[key][value_key]\n\n if model[\"type\"] == \"regression\":\n predicted = float(predicted)\n result[\"predicted_value\"] = predicted\n result[\"predicted_value_reversed\"] = reverse_transform(predicted)\n if model[\"type\"] == \"classification\":\n predicted = int(predicted)\n result[\"predicted_class\"] = predicted\n result[\"predicted_class_reversed\"] = reverse_transform(predicted)\n\n return result", "def parse_message(msg):\n idx = 8\n tag, nickLen = struct.unpack(\"<LL\", msg[:idx])\n if VERSION != (tag>>16):\n raise Exception(\"Wrong version\")\n sender_nickname = msg[idx:idx+nickLen]\n idx += nickLen\n \n length = struct.unpack(\"<L\", msg[idx:idx+4])[0]\n idx += 4\n sender_pubkey = msg[idx:idx+length]\n idx += length\n\n length = struct.unpack(\"<L\", msg[idx:idx+4])[0]\n idx += 4\n dest_pubkey = msg[idx:idx+length]\n idx += length\n\n length = struct.unpack(\"<L\", msg[idx:idx+4])[0]\n idx += 4\n nonce = msg[idx:idx+length]\n idx += length\n\n length = struct.unpack(\"<L\", msg[idx:idx+4])[0]\n idx += 4\n cipher = msg[idx:idx+length]\n idx += length\n \n return sender_nickname, sender_pubkey, dest_pubkey, nonce, cipher", "def parse_response(self, raw_response):\n \n parsed_response = {\n 'success': False,\n 'raw_response': raw_response,\n }\n \n # Try to make sense of the response status\n try:\n status, msg = raw_response.split('\\r\\n')\n parsed_response['success'] = status == 'OK'\n parsed_response['message'] = msg\n except:\n msg = None\n \n # Try to parse the message ID\n try:\n key, val = msg.split('=')\n parsed_response[key] = val\n except:\n pass\n \n return parsed_response", "def decode_response(\n res_model: Type[T],\n resp: Response,\n) -> T:\n if resp.headers.get(HEADER_CONTENT_TYPE) == MSGPACK_CONTENT_TYPE:\n return msgpack.decode(resp.content, type=res_model)\n return parse_raw_as(res_model, resp.text)", "def _unpack_body(self, buff):\n\n # Unpack <return_code> and <count> (how many records affected or selected)\n self._return_code = struct_L.unpack_from(buff, offset=0)[0]\n\n # Separate return_code and completion_code\n self._completion_status = self._return_code & 0x00ff\n self._return_code >>= 8\n\n # In case of an error unpack the body as an error message\n if self._return_code != 0:\n self._return_message = unicode(buff[4:-1], self.charset, self.errors)\n if self._completion_status == 2:\n raise TarantoolError(self._return_code, self._return_message)\n\n # Unpack <count> (how many records affected or selected)\n self._rowcount = struct_L.unpack_from(buff, offset=4)[0]\n\n # If the response doesn't contain any tuple - there is nothing to unpack\n if self._body_length == 8:\n return\n\n # Parse response tuples (<fq_tuple>)\n if self._rowcount > 0:\n offset = 8 # The first 4 bytes in the response body is the <count> we have already read\n while offset < self._body_length:\n # In resonse tuples have the form <size><tuple> (<fq_tuple> ::= <size><tuple>).\n # Attribute <size> takes into account only size of tuple's <field> payload,\n # but does not include 4-byte of <cardinality> field.\n #Therefore the actual size of the <tuple> is greater to 4 bytes.\n tuple_size = struct.unpack_from(\"<L\", buff, offset)[0] + 4\n tuple_data = struct.unpack_from(\"<%ds\" % (tuple_size), buff, offset+4)[0]\n tuple_value = self._unpack_tuple(tuple_data)\n if self.field_types:\n self.append(self._cast_tuple(tuple_value))\n else:\n self.append(tuple_value)\n\n offset = offset + tuple_size + 4 # This '4' is a size of <size> attribute", "def decode(obj: bytes) -> Message:\n message_pb = ProtobufMessage()\n ml_trade_pb = ml_trade_pb2.MlTradeMessage()\n message_pb.ParseFromString(obj)\n message_id = message_pb.dialogue_message.message_id\n dialogue_reference = (\n message_pb.dialogue_message.dialogue_starter_reference,\n message_pb.dialogue_message.dialogue_responder_reference,\n )\n target = message_pb.dialogue_message.target\n\n ml_trade_pb.ParseFromString(message_pb.dialogue_message.content)\n performative = ml_trade_pb.WhichOneof(\"performative\")\n performative_id = MlTradeMessage.Performative(str(performative))\n performative_content = {} # type: Dict[str, Any]\n if performative_id == MlTradeMessage.Performative.CFP:\n pb2_query = ml_trade_pb.cfp.query\n query = Query.decode(pb2_query)\n performative_content[\"query\"] = query\n elif performative_id == MlTradeMessage.Performative.TERMS:\n pb2_terms = ml_trade_pb.terms.terms\n terms = Description.decode(pb2_terms)\n performative_content[\"terms\"] = terms\n elif performative_id == MlTradeMessage.Performative.ACCEPT:\n pb2_terms = ml_trade_pb.accept.terms\n terms = Description.decode(pb2_terms)\n performative_content[\"terms\"] = terms\n tx_digest = ml_trade_pb.accept.tx_digest\n performative_content[\"tx_digest\"] = tx_digest\n elif performative_id == MlTradeMessage.Performative.DATA:\n pb2_terms = ml_trade_pb.data.terms\n terms = Description.decode(pb2_terms)\n performative_content[\"terms\"] = terms\n payload = ml_trade_pb.data.payload\n performative_content[\"payload\"] = payload\n else:\n raise ValueError(\"Performative not valid: {}.\".format(performative_id))\n\n return MlTradeMessage(\n message_id=message_id,\n dialogue_reference=dialogue_reference,\n target=target,\n performative=performative,\n **performative_content\n )", "async def unpack_message(\n auth_manager: AuthKeyManager,\n schema: Schema,\n encrypted_message: bytes\n) -> EncryptedMessage:\n auth_key = await get_auth_key(auth_manager, encrypted_message)\n\n msg_key = load_int128(encrypted_message[8:]).value\n\n key_pair = generate_key_iv(\n auth_key,\n msg_key,\n key_type='client'\n )\n\n message_bytes = ige256_decrypt(\n encrypted_message[24:],\n key_pair.key,\n key_pair.iv\n )\n\n return await load_message(schema, message_bytes)", "def _parse_response(resp):\n for header in resp['payload']['headers']:\n if header['name'] == 'From':\n email = _parse_email_value(header['value'])\n sender_user_id = EMAIL_TO_USER_ID.get(email)\n if not sender_user_id:\n print(\"sender_user_id not found {}\".format(email))\n return\n\n if resp['payload']['mimeType'] in ['text/html', 'text/plain']:\n encoded_data = resp['payload']['body']['data'].encode('utf-8')\n body = base64.urlsafe_b64decode(encoded_data)\n else:\n # unclear if other options may come through\n print(\"found new mimeType: {}, id: {}\".format(resp['payload']['mimeType'], resp['id']))\n return\n\n # we only care about chat labels for now\n label = 'chats' if 'chats' in resp['labelIds'] else None\n time_secs = int(resp['internalDate']) / 1000 # convert to seconds\n timestamp = datetime.fromtimestamp(time_secs)\n\n return MessageData(\n body=body,\n timestamp=timestamp,\n message_id=resp['id'],\n label=label,\n data=json.dumps(resp),\n sender_user_id=sender_user_id,\n thread_id=resp['threadId']\n )", "def true_recvfrom(conn, buff):\n received, address = conn.recvfrom(buff)\n data = key.decrypt(received)\n try:\n return data.decode(), address\n except UnicodeDecodeError: # pickle data\n return pickle.loads(data), address", "async def package_response(self, response):\n struct = response.get_json_data_dict(JSONFlag.NET)\n\n # Sign response\n info_context = self.vasp.info_context\n my_key = info_context.get_my_compliance_signature_key(\n self.get_my_address().as_str()\n )\n\n signed_response = await my_key.sign_message(json.dumps(struct))\n\n net_message = NetMessage(\n self.myself, self.other, CommandResponseObject, signed_response, response\n )\n\n return net_message", "def parse_pasv_resp(self, msg_rec):\n num_ip_bytes = 4\n index_of_port_1 = 4\n index_of_port_2 = 5\n try:\n print_debug(msg_rec)\n # Parse out IP & Port from the parenthesis within the PASV resp.\n host_info = msg_rec[msg_rec.index(\"(\") + 1:msg_rec.rindex(\")\")]\n # Break up IP & Port based on comma separated delimiter.\n host_info_split = host_info.split(',')\n # Put octets together, delimited by periods.\n host_ip_list = [host_info_split[i] for i in range(num_ip_bytes)]\n host_ip = '.'.join(host_ip_list)\n # Get Port as a valid port number.\n host_port = int(host_info_split[index_of_port_1]) * 256 + \\\n int(host_info_split[index_of_port_2])\n except Exception as e:\n print_debug(\"Error: \" + str(e))\n return \"\", \"\"\n return host_ip, host_port", "def parse_recvd_data(data):\n parts = data.split(b'\\0')\n msgs = parts[:-1]\n rest = parts[-1]\n return (msgs, rest)", "def FromRpcMessage(self, message):\n self.content = message.content\n self.completed = message.completed", "def deserialize(self, raw, host, port):\n self._reader = BitStream(bytes=raw, length=(len(raw) * 8))\n version = self._reader.read(defines.VERSION_BITS).uint\n message_type = self._reader.read(defines.TYPE_BITS).uint\n token_length = self._reader.read(defines.TOKEN_LENGTH_BITS).uint\n code = self._reader.read(defines.CODE_BITS).uint\n mid = self._reader.read(defines.MESSAGE_ID_BITS).uint\n if self.is_response(code):\n message = Response()\n message.code = code\n elif self.is_request(code):\n message = Request()\n message.code = code\n else:\n message = Message()\n message.source = (host, port)\n message.destination = None\n message.version = version\n message.type = message_type\n message._mid = mid\n\n if token_length > 0:\n message.token = self._reader.read(token_length * 8).bytes\n else:\n message.token = None\n\n current_option = 0\n try:\n while self._reader.pos < self._reader.len:\n next_byte = self._reader.peek(8).uint\n if next_byte != int(defines.PAYLOAD_MARKER):\n # the first 4 bits of the byte represent the option delta\n delta = self._reader.read(4).uint\n # the second 4 bits represent the option length\n length = self._reader.read(4).uint\n current_option += self.read_option_value_from_nibble(delta)\n option_length = self.read_option_value_from_nibble(length)\n\n # read option\n try:\n option_name, option_type, option_repeatable, default = defines.options[current_option]\n except KeyError:\n log.err(\"unrecognized option\")\n return message, \"BAD_OPTION\"\n if option_length == 0:\n value = None\n elif option_type == defines.INTEGER:\n value = self._reader.read(option_length * 8).uint\n else:\n value = self._reader.read(option_length * 8).bytes\n\n option = Option()\n option.number = current_option\n option.value = self.convert_to_raw(current_option, value, option_length)\n\n message.add_option(option)\n else:\n self._reader.pos += 8 # skip payload marker\n if self._reader.len <= self._reader.pos:\n log.err(\"Payload Marker with no payload\")\n return message, \"BAD_REQUEST\"\n to_end = self._reader.len - self._reader.pos\n message.payload = self._reader.read(to_end).bytes\n return message\n except ReadError, e:\n log.err(\"Error parsing message: \" + str(e))\n return None", "def _decode(self):\n \n self.version = int(data_to_hex_str(self.packet[0])[2])\n self.header_len = int(data_to_hex_str(self.packet[0])[3]) * 4\n self.type_of_service = data_to_hex_str(self.packet[1:2])\n self.total_len = int(data_to_hex_str(self.packet[2:4]), 16)\n self.id = data_to_hex_str(self.packet[4:6])\n \n #parse the flags fields(reservedbit, don't fragment, more fragment)\n if ((ord(self.packet[6]) & (1 << 7)) != 0):\n self.flags_reservedbit = 1\n else:\n self.flags_reservedbit = 0\n #endof if\n \n if ((ord(self.packet[6]) & (1 << 6)) != 0):\n self.flags_dont_fragment = 1\n else:\n self.flags_dont_fragment = 0\n #endof if\n \n if ((ord(self.packet[6]) & (1 << 5)) != 0):\n self.flags_more_fragment = 1\n else:\n self.flags_more_fragment = 0\n #endof if\n \n #parse the offset field(in packet[6:7]): 00011111 & packet[6] (to filter flags) -->> get packet[6:7] in hex_str\n #tmp = str(31 & ord(self.packet[6]))\n self.fragment_offset = int(data_to_hex_str(self.packet[6:8]), 16)\n if (self.fragment_offset >= (1 << 13)):\n #take away the flags fields: 00011111 11111111 & self.fragment_offset\n self.fragment_offset = self.fragment_offset & ((1 << 13) - 1) \n \n self.TTL = ord(self.packet[8])\n self.protocol = IPPROTO[ord(self.packet[9])]\n self.header_checksum = data_to_hex_str(self.packet[10:12])\n \n self.src = str(ord(self.packet[12])) + '.' + str(ord(self.packet[13])) + '.' + \\\n str(ord(self.packet[14])) + '.' + str(ord(self.packet[15]))\n self.dst = str(ord(self.packet[16])) + '.' + str(ord(self.packet[17])) + '.' + \\\n str(ord(self.packet[18])) + '.' + str(ord(self.packet[19]))\n \n if (self.header_len > 20):\n self.opt_paddings = self.packet[20 : (self.header_len)]", "def _deserialize_response(self, response):\n text = response.content.decode(errors='replace')\n text = _remove_control_characters(text)\n doc = json.loads(text, cls=_TransmissionJSONDecoder)\n\n if doc['result'] != 'success':\n raise TransmissionError(\"Request failed: '%s'\" % doc['result'])\n\n if doc['tag'] != self.tag:\n raise TransmissionError(\"Tag mismatch: (got %d, expected %d)\" % (doc['tag'], self.tag))\n else:\n self.tag += 1\n\n if 'arguments' in doc:\n return doc['arguments'] or None\n\n return None", "def decode_message(self, raw):\n return raw.decode('utf-8')", "def parse_from_bytes(self, raw_buffer):\n\n try:\n (cpu_svn,\n self.misc_select,\n _,\n attributes,\n mr_enclave,\n _,\n mr_signer,\n _,\n self.isv_prod_id,\n self.isv_svn,\n _,\n report_data) = \\\n struct.unpack(self._format, raw_buffer)\n\n # Further parse embedded structures\n self.cpu_svn.parse_from_bytes(cpu_svn)\n self.attributes.parse_from_bytes(attributes)\n self.mr_enclave.parse_from_bytes(mr_enclave)\n self.mr_signer.parse_from_bytes(mr_signer)\n self.report_data.parse_from_bytes(report_data)\n except struct.error as se:\n raise ValueError('Unable to parse: {}'.format(se))", "def pre_dissect(self, s):\n if len(s) < 1:\n raise Exception(\"Invalid InnerPlaintext (too short).\")\n\n tmp_len = len(s) - 1\n if s[-1] != b\"\\x00\":\n msg_len = tmp_len\n else:\n n = 1\n while s[-n] != b\"\\x00\" and n < tmp_len:\n n += 1\n msg_len = tmp_len - n\n self.fields_desc[0].length_from = lambda pkt: msg_len\n\n self.type = struct.unpack(\"B\", s[msg_len:msg_len + 1])[0]\n\n return s", "def __nanojsonrpc_unpack(self, msg):\n try:\n pack = json.loads(msg)\n if 'method' not in pack:\n return None\n else:\n return pack\n except:\n traceback.print_exc()\n return None", "def processReadback(resp):\n a = np.fromstring(resp, dtype='<u1')\n return {\n 'build': a[51],\n 'serDAC': a[56],\n 'noPllLatch': bool((a[58] & 0x80) > 0),\n 'ackoutI2C': a[61],\n 'I2Cbytes': a[69:61:-1],\n 'executionCounter': (a[53] << 8) + a[52]\n }", "def parse_response(msg):\n start_line, header, body = _parse_message(msg)\n status, reason = _parse_status_line(start_line)\n return Response(status, reason, header, body)", "def stateless_unpack(buff, to_server):\n\tdecoder = PacketDecoder(to_server)\n\tdecoder.buff = buff\n\tpacket = decoder.read_packet()\n\treturn packet, decoder.buff", "def deserialize(self, str):\n try:\n if self.header is None:\n self.header = std_msgs.msg.Header()\n end = 0\n _x = self\n start = end\n end += 12\n (_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _get_struct_3I().unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.header.frame_id = str[start:end].decode('utf-8')\n else:\n self.header.frame_id = str[start:end]\n _x = self\n start = end\n end += 56\n (_x.command, _x.set_num, _x.paraset_byte54, _x.paraset_byte53, _x.paraset_byte52, _x.paraset_byte51, _x.paraset_byte50, _x.paraset_byte49, _x.paraset_byte48, _x.paraset_byte47, _x.paraset_byte46, _x.paraset_byte45, _x.paraset_byte44, _x.paraset_byte43, _x.paraset_byte42, _x.paraset_byte41, _x.paraset_byte40, _x.paraset_byte39, _x.paraset_byte38, _x.paraset_byte37, _x.paraset_byte36, _x.paraset_byte35, _x.paraset_byte34, _x.paraset_byte33, _x.paraset_byte32, _x.paraset_byte31, _x.paraset_byte30, _x.paraset_byte29, _x.paraset_byte28, _x.paraset_byte27, _x.paraset_byte26, _x.paraset_byte25, _x.paraset_byte24, _x.paraset_byte23, _x.paraset_byte22, _x.paraset_byte21, _x.paraset_byte20, _x.paraset_byte19, _x.paraset_byte18, _x.paraset_byte17, _x.paraset_byte16, _x.paraset_byte15, _x.paraset_byte14, _x.paraset_byte13, _x.paraset_byte12, _x.paraset_byte11, _x.paraset_byte10, _x.paraset_byte9, _x.paraset_byte8, _x.paraset_byte7, _x.paraset_byte6, _x.paraset_byte5, _x.paraset_byte4, _x.paraset_byte3, _x.paraset_byte2, _x.paraset_byte1,) = _get_struct_56B().unpack(str[start:end])\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e) #most likely buffer underfill", "def decode_packet(data):\n\n opcodes = [(\"AUTH_LOGON_CHALLENGE\", \"\\x00\"), (\"AUTH_LOGON_PROOF\", \"\\x01\")]\n opcode = data[0] # Opcode of the received packet (First byte)\n if opcode == opcodes[0][1]: # Auth Logon challenge\n srp_rcvd = {\n 'error': data[1], # (you should hope that it is always 0)\n 'B': data[3:35], # Read B and skip 1 field (Length_g)\n 'g': data[36:37], # Read g and skip 1 field (Length_n)\n 'N': data[38:70],\n 's': data[70:102], # Read salt\n 'crc': data[102:] # (useless for private servers)\n }\n return srp_rcvd\n if opcode == opcodes[1][1]:\n # Auth logon proof\n if data[1] == \"\\x00\": # Code error: 0\n srp_rcvd = {'login': 1}\n else:\n srp_rcvd = {'login': 0}\n return srp_rcvd" ]
[ "0.6336118", "0.6241895", "0.6204218", "0.61142474", "0.61068696", "0.6064688", "0.60642505", "0.60597825", "0.605668", "0.6025478", "0.5981453", "0.5966325", "0.59572643", "0.5868557", "0.5867732", "0.5841865", "0.5841581", "0.58406657", "0.5827033", "0.58100164", "0.57836545", "0.576718", "0.5762771", "0.57541174", "0.5737128", "0.5712065", "0.5708718", "0.5704675", "0.56903464", "0.56721586" ]
0.80403924
0
Get the status of the RoboVac device (battery level, mode, charging, etc).
def get_status(self) -> RobovacStatus: message = self._build_get_device_status_user_data_message() robovac_response = self._send_packet(message, True) received_status_bytes = robovac_response.c.usr_data received_status_ints = [x for x in received_status_bytes] return RobovacStatus( 1 if received_status_ints[6] & 4 > 0 else 0, 1 if received_status_ints[6] & 2 > 0 else 0, received_status_ints[1] & 255, received_status_ints[8] & 255, received_status_ints[11] & 255, received_status_ints[10] & 255, received_status_ints[12] & 255, received_status_ints[13] & 255 )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _do_get_status(self):\n logging.info(__name__ + ' : Get status of the device.')\n result = self._execute('X')\n usage = {\n 0: \"Channel not in use\",\n 1: \"Channel used for Nitrogen level\",\n 2: \"Channel used for Helium Level (Normal pulsed operation)\",\n 3: \"Channel used for Helium Level (Continuous measurement)\",\n 9: \"Error on channel (Usually means probe unplugged)\"\n }\n # current_flowing = {\n # 0 : \"Curent not flowing in Helium Probe Wire\",\n # 1 : \"Curent not flowing in Helium Probe Wire\"\n # }\n # auto_fill_status = {\n # 00 : \"End Fill (Level > FULL)\",\n # 01 : \"Not Filling (Level < FULL, Level > FILL)\",\n # 10 : \"Filling (Level < FULL, Level > FILL)\",\n # 11 : \"Start Filling (Level < FILL)\"\n # }\n return usage.get(int(result[1]), \"Unknown\")", "def get_status(self):\n return self.o.read_register(self.dev_id, STATUS)", "async def get_status(self) -> str:\n return await self.hw_device.status()", "def getStatus(self, request, context):\n \n statusDrone = str(self.vehicle.system_status).rpartition(':')[2]\n\t \n return droneconnect_pb2.Status(status = statusDrone)", "def get_status(self):\n return self.read_register(259, 0, 3)", "def get_roof_status_from_tcs():\n\t\n\ttarget = send_command('getstatus dome')\n\tsplit_ans = target.split()\n\t\n\treturn split_ans", "def status(self):\n ret = self.dev.ctrl_transfer(0xc0, 0x01, 0x0081, 0x0000, 0x0001)\n if ret[0] == 0xa0:\n return self.POWER_ON\n return self.POWER_OFF", "def cmd_battery_status(ensoapi):\n\n ACSTATUS_OFFLINE = 0\n ACSTATUS_ONLINE = 1\n ACSTATUS_UNKNOWN = 255\n BATTERYFLAG_HIGH = 1 # more than 66%\n BATTERYFLAG_NORMAL = 0 # more than 66%\n BATTERYFLAG_LOW = 2 # less than 33%\n BATTERYFLAG_CRITICAL = 4 # less than 5%\n BATTERYFLAG_CHARGING = 8\n BATTERYFLAG_NOBATTERY = 128\n BATTERYFLAG_UNKNOWNSTATUS = 255\n \n status = struct.Struct(\"BBBBll\")\n # print status.size\n buffer = ctypes.create_string_buffer(status.size)\n ctypes.windll.kernel32.GetSystemPowerStatus(buffer)\n # print status.unpack_from(buffer)\n (ac_line_status, \n battery_flag, \n battery_life_percent, \n _, \n battery_life_time, \n battery_full_life_time) = status.unpack_from(buffer)\n \n #print battery_flag\n if battery_flag == BATTERYFLAG_NOBATTERY:\n ensoapi.display_message(u\"This system has no battery attached.\")\n return\n\n #print battery_life_time/60/60\n\n battery_status = (\n \"good\" if battery_flag == BATTERYFLAG_HIGH \n else \"normal\" if battery_flag == BATTERYFLAG_NORMAL\n else \"low\" if battery_flag == BATTERYFLAG_LOW \n else \"critical\" if battery_flag == BATTERYFLAG_CRITICAL \n else \"charging\" if battery_flag == BATTERYFLAG_CHARGING \n else \"unknown\")\n\n ac_status = (\n \"Online, \" if ac_line_status == ACSTATUS_ONLINE \n else \"Offline, \" if ac_line_status == ACSTATUS_OFFLINE\n else \"\")\n\n if battery_life_time != -1:\n hours = battery_life_time / 60 / 60\n minutes = battery_life_time / 60 - hours * 60\n lifetime = \"%d:%02dh \" % (hours, minutes)\n else:\n lifetime = \"\"\n\n if ac_line_status == ACSTATUS_ONLINE:\n msg = u\"Online, %(status)s%(percentage)s\" % { \n \"status\" : \"charging \" if battery_flag == BATTERYFLAG_CHARGING else \"\",\n \"percentage\" : \"(%d%%)\" % battery_life_percent if battery_flag == BATTERYFLAG_CHARGING \n else \"%d%%\" % battery_life_percent\n }\n else:\n msg = u\"%(acstatus)s %(lifetime)s(%(percentage)d%%) remaining\" % { \n \"percentage\" : battery_life_percent, \n \"acstatus\" : ac_status,\n \"batterystatus\" : battery_status,\n \"lifetime\" : lifetime\n }\n ensoapi.display_message(msg, u\"Battery status\")", "def GetStatus(self):\r\n return self.status", "def status(self) -> VacuumStatus:\n return VacuumStatus(self.send(\"get_status\")[0])", "def read_status(ctl):\n\tr = ctl.bus_read_struct_coherent(tm.status_addr, 'BBBBI')\n\treturn r", "def status(self):\n\n # --- get 0 padded string representation of status register\n response = self.send_lens_cmd(['90', 'B9', '00'], fast_mode=True)\n state_str = bin(int('0x' + response['MISO'][2], 16))\n state_str = state_str[2:]\n for p in range(8 - len(state_str)):\n state_str = '0' + state_str\n\n self._status = dict(AF_switch=bool(int(state_str[0])),\n F_move=bool(int(state_str[5])),\n F_acc=bool(int(state_str[2])),\n FD_endStop=bool(int(state_str[3])),\n status_byte=state_str)\n\n return self._status", "def get_status(self):\n status = self._status.get_message()\n \n if status == \"N\":\n return \"offline\"\n \n elif status == \"Y\":\n return \"online\"\n \n elif status == \"A\":\n return \"away\"\n \n elif status == \"B\":\n return \"busy\"", "def status(self):\n return self._bp.get_motor_status(self._port)", "def get_status(self):\n\n return self._system", "def _get_status(self):\n return self.__status", "def rtt_get_status(self):\n status = structs.JLinkRTTerminalStatus()\n res = self.rtt_control(enums.JLinkRTTCommand.GETSTAT, status)\n return status", "def getstatus(self):\n return self.__status", "def state(self):\n return self.roller.battery", "def get_power_status(self):\n response = self.parent.power_manager.get_status()\n if response is not None:\n response = response[0]\n\n batteries = []\n for batt in response[\"batt_data\"]:\n batteries.append(batt)\n response[\"batt_data\"] = batteries\n\n return response", "def get_status(self):\n r = requests.get(self.base_url + '/status')\n return r.json()", "def get_status(self):\n return self.status", "def get_status(self):\n return self.status", "def get_status(self):\n return self.status", "def get_status():\n return ('off', 'off')", "def get_status(self):\n try:\n status = {\n 'camexptime': self.opt.getParameter(\"ExposureTime\"),\n 'camtemp': self.opt.getParameter(\"SensorTemperatureReading\"),\n 'camspeed': self.opt.getParameter(\"AdcSpeed\"),\n 'state': self.opt.getParameter(\"OutputSignal\")\n }\n logger.info(status)\n return status\n except Exception as e:\n logger.error(\"Error getting the camera status\", exc_info=True)\n return {\n \"error\": str(e), \"camexptime\": -9999,\n \"camtemp\": -9999, \"camspeed\": -999\n }", "def get_status(self):\n request_format = \"{oscillating:01d} {initialising:01d} {initialised:01d} {width:03d} \" \\\n \"{offset:+04d} {speed:02d} {acceleration:03d} {cycles:05d} {backlash:03d}\"\n status_string = request_format.format(\n oscillating=int(self.device.is_oscillating()),\n initialising=int(self.device.is_initialising()),\n initialised=int(self.device.has_been_initialised()),\n width=int(self.device.get_window_width()),\n offset=int(self.device.get_offset()),\n speed=int(self.device.get_speed()),\n acceleration=int(self.device.get_acceleration()),\n cycles=int(self.device.get_complete_cycles()),\n backlash=int(self.device.get_backlash())\n )\n return status_string", "def getStatus(self):\r\n return self.controller.getStatus()", "def getStatus():\n return json.dumps({'camera': Camera.status(), 'rover': rover.status()}), 200", "def comando_status(self):\r\n\tif args.tipo == 'web':\r\n return self.status_web()\r\n\r\n\tif args.tipo == 'nfce':\r\n return self.consulta_status_nfce()\r\n\r\n\tif args.tipo == 'dual':\r\n return self.status_impressora_dual()" ]
[ "0.7337816", "0.7083738", "0.6949301", "0.69253254", "0.6892618", "0.67979217", "0.6779467", "0.6734082", "0.6590052", "0.65739864", "0.6554852", "0.6528611", "0.6488979", "0.6386227", "0.6384382", "0.6353202", "0.6342102", "0.63400286", "0.62900186", "0.62873715", "0.6286426", "0.62745947", "0.62745947", "0.62745947", "0.6242545", "0.62386566", "0.62350255", "0.62343454", "0.6212963", "0.6203065" ]
0.80862015
0
Tell the RoboVac to start its autoclean programme.
def start_auto_clean(self): command = _build_robovac_command(RobovacModes.WORK, RobovacCommands.AUTO_CLEAN) message = self._build_command_user_data_message(command) self._send_packet(message, False)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def run():\r\n autostartup()", "def manual_start(self):\n self.manual_seqnum = 0\n return self.send(\"app_rc_start\")", "def execute(self):\n\t\tself.drivetrain.bad_auto_drive()\n\t\tself.drivetrain.stop_robot()", "def start():\n # Have the car begin at a stop\n rc.drive.stop()\n # Print start message\n print(\">> Lab 4B - LIDAR Wall Following\")", "def runmain():\n\n if roboapps.Unchecked():\n roboapps.Exit()\n else:\n ReversePrograms()", "def activate(self):\n self.start()", "def runmain():\n\n if roboapps.Unchecked():\n roboapps.Exit()\n else:\n AddDeleteObjectsScript()", "def start():\n trio.run(_main)", "def start(self):\n self.active = True", "def start():\n # Have the car begin at a stop\n rc.drive.stop()\n\n # Print start message\n print(\">> Lab 3B - Depth Camera Cone Parking\")", "def start_bot() -> None:\n if sys.platform == 'darwin':\n force = False\n\n if force:\n deb = False\n else:\n deb = True\n\n main(debug=deb)\n else:\n force = True\n if force:\n main(debug=False)\n else:\n main(debug=True)", "def main():\n setup()\n master = Master()\n master.start()", "def __arm(self):\n self._running = True", "def start():", "def start():", "def start():", "def start():", "def maya_start_up():\n import maya.utils as mu\n mu.executeDeferred(\"import mliber;reload(mliber);mliber.show_in_maya()\")", "def start():\n import OnlineEnv as Online\n Online.end_config(False)\n #Online.end_config(True)", "def startapp():", "def startup(self) -> None:", "def start(self):\n self.start_time = dt.datetime.now()\n self.call = ' '.join(sys.argv)\n self.commands = []", "def set_automatic(self, mode):\n self.slam.controlled = not mode\n if mode:\n self.slam.resume()", "def onClick(self):\n self.app.setActiveMode(\"start\")", "def start_again():\n import functions\n functions.start_program()", "def activate(self):\n self.robot = self.behavior_system.robot\n self.cozmo = self.robot.cozmo\n\n self._start_loop()", "def cli(ctx):\n if not ctx.invoked_subcommand:\n auto()", "def autonomousInit(self):\n self.globalInit()\n self.autonomous.start()", "def start (self):\n pass", "def start (self):\n pass" ]
[ "0.6365878", "0.6246379", "0.60141903", "0.59858924", "0.59144974", "0.58120424", "0.5807724", "0.577712", "0.57759285", "0.57748395", "0.5772731", "0.5752366", "0.5745088", "0.57284355", "0.57284355", "0.57284355", "0.57284355", "0.57233566", "0.5711464", "0.5709139", "0.5688541", "0.5684208", "0.5671407", "0.56565714", "0.5651862", "0.56259453", "0.56097555", "0.56096804", "0.5602148", "0.5602148" ]
0.6807182
0
Tell the RoboVac to start its spotclean programme.
def start_spot_clean(self): command = _build_robovac_command(RobovacModes.WORK, RobovacCommands.SPOT_CLEAN) message = self._build_command_user_data_message(command) self._send_packet(message, False)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def runmain():\n\n if roboapps.Unchecked():\n roboapps.Exit()\n else:\n AddDeleteObjectsScript()", "def start_auto_clean(self):\n command = _build_robovac_command(RobovacModes.WORK, RobovacCommands.AUTO_CLEAN)\n message = self._build_command_user_data_message(command)\n\n self._send_packet(message, False)", "def startapp():", "def startRun(self):\r\n #Ask user for verification\r\n usrData = self.getCurrentUserData()\r\n msg = QMessageBox()\r\n msg.setIcon(QMessageBox.Information)\r\n msg.setWindowTitle(\"Run data verification\")\r\n msg.setText((\"You are about to start a run for user %s with ID %s. \" +\r\n \"Please make sure this is correct. You can abort the run at any time \" +\r\n \"by pressing the Escape button.\") % (usrData['Name'], usrData['User_ID']))\r\n msg.setStandardButtons(QMessageBox.Ok | QMessageBox.Cancel)\r\n retval = msg.exec_()\r\n\r\n #Only start run if user pressed OK\r\n if retval == QMessageBox.Ok:\r\n self.runController = RunController(parent =self)", "def start():\n # Have the car begin at a stop\n rc.drive.stop()\n # Print start message\n print(\">> Lab 4B - LIDAR Wall Following\")", "def start():", "def start():", "def start():", "def start():", "def runmain():\n\n if roboapps.Unchecked():\n roboapps.Exit()\n else:\n ReversePrograms()", "def start():\n trio.run(_main)", "def run(self):\n # TODO: Clean this up to better facilitate running as client: There's \n # still too much being done in this function.\n #\n # Only start if we've been appropriately initialised\n # TODO: Are assertion checks stripped out in optimised builds? Is this\n # the wrong method for an important check?\n assert self.isInitialised, \"Detective must be initialise()d before \\\nrunning.\"\n #\n ## If not secretive, announce our cards ################################\n if not self.SECRETIVE:\n announcestr = \"Preparing for battle. I hold cards: \"\n for card in self.myCards:\n announcestr += game.CARDNAMES[card]+\", \"\n self.hook_notifydebug(announcestr[:-2],\"Velma.run\")\n #\n #\n # TODO: Move the following commented code stack to a test routine.\n # Miss Scarlet known to be culprit\n #ui.dbgstatus('tweak','Miss Scarlet known culprit')\n #for ixPlayer in range(1,self.nPlayers):\n # self.event_pass(character=4,room=8,weapon=19,player=ixPlayer)\n # Kitchen known to be scene\n #ui.dbgstatus('tweak','Kitchen known scene')\n #for ixPlayer in range(1,self.nPlayers):\n # self.event_pass(character=0,room=9,weapon=19,player=ixPlayer)\n # Unseen answer 1 Plum/Billiard/Wrench\n #ui.dbgstatus('tweak','Unseen answer from 1')\n #self.event_unseenresponse(character=1,room=12,weapon=20,shower=1,viewer=3)\n # 1 known to have Peacock\n #ui.dbgstatus('tweak','1 known has Peacock')\n #self.event_seenresponse(card=3,shower=1,viewer=0)\n # 1 known not to have candlestick\n #ui.dbgstatus('tweak','1 known without candlestick')\n #self.event_pass(character=0,room=8,weapon=16,player=1)\n # 2 known to have knife\n #ui.dbgstatus('tweak','2 known has knife')\n #self.event_seenresponse(card=15,shower=2,viewer=0)\n # 2 known to have either White or Lounge or Candlestick\n #ui.dbgstatus('tweak','Unseen answer from 2')\n #self.event_unseenresponse(character=5,room=7,weapon=16,shower=2,viewer=1)\n # 3 known has ballroom\n #ui.dbgstatus('tweak','3 known has ballroom')\n #self.event_seenresponse(card=10,shower=3,viewer=0)\n #\n #\n while not self.isGameOver:\n # Output everybody's identity and position on the board. This \n # information is not privileged, and should be helpful in ensuring\n # consistency between what Velma thinks is going on and the state\n # of the real-world board\n for ixPlayer in range(self.nPlayers):\n self.hook_notifydebug(\"Player \"+str(ixPlayer)+\" is \"+\n game.CARDNAMES[game.CHARS[self.playerCharIxs[ixPlayer]]]+\n \" at \"+\n str(self.charLocations[self.playerCharIxs[ixPlayer]]),\n \"Velma.run\")\n #\n # Remind our conversant of any pre-set scenario\n if self.DBGSCENARIOREMINDER:\n self.hook_notifydebug('Reminder: \\n' + self.DBGSCENARIOREMINDER,\n \"Velma.run\")\n #\n # If we're not competing with our conversant, plot our knowledge\n if not self.SECRETIVE:\n self.hook_displaysuspicions()\n #\n if self.ixHotSeat == 0:\n self.move()\n else:\n self.hook_observemove()\n #\n # The hot seat increments, and skips over any players previously\n # knocked out\n self.ixTurn += 1\n self.ixHotSeat = (self.ixHotSeat + 1) % self.nPlayers\n while self.playersOusted[self.ixHotSeat]:\n self.ixHotSeat = (self.ixHotSeat + 1) % self.nPlayers", "def do_quit(self, arg):\n cprint(('Thankyou for Using this todo Application!'), 'yellow')\n exit()", "def start(self):\n ...", "def run_cmd(self):\r\n self.run = True", "def run(self):\n sys.exit(-1)", "def start():\n # Have the car begin at a stop\n rc.drive.stop()\n\n # Print start message\n print(\">> Lab 3B - Depth Camera Cone Parking\")", "def execute(self):\n\t\tself.drivetrain.bad_auto_drive()\n\t\tself.drivetrain.stop_robot()", "def start(update, context):\n update.message.reply_text('Hi! \\n /traccia per tracciare instantaneamente i prezzi \\n /check per far partire il check periodico \\n /stopcheck per far fermare il check periodico')", "def start(self):\n self.start_time = dt.datetime.now()\n self.call = ' '.join(sys.argv)\n self.commands = []", "def start_game(self):\n\n\t\tpass", "def start(self):\r\n pass", "def main():\n setup()\n master = Master()\n master.start()", "def start():\n\n print(\"Hi. I'm your Amazon customer service assistant.\")\n print('What can I help you about your orders?')", "def startup(self) -> None:", "def start (self):\n pass", "def start (self):\n pass", "def start(self):\n pass", "def start(self):\n pass", "def start(self):\n pass" ]
[ "0.59675306", "0.59310967", "0.5914191", "0.5898601", "0.58888316", "0.5851248", "0.5851248", "0.5851248", "0.5851248", "0.5819597", "0.57119226", "0.57067347", "0.57007897", "0.56571686", "0.56343114", "0.562878", "0.5624732", "0.5615956", "0.56120574", "0.55993724", "0.5591775", "0.55836344", "0.5583502", "0.55774915", "0.555704", "0.5556548", "0.5556548", "0.55304766", "0.55304766", "0.55304766" ]
0.6891785
0
Tell the RoboVac to start its edgeclean programme.
def start_edge_clean(self): command = _build_robovac_command(RobovacModes.WORK, RobovacCommands.EDGE_CLEAN) message = self._build_command_user_data_message(command) self._send_packet(message, False)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def runmain():\n\n if roboapps.Unchecked():\n roboapps.Exit()\n else:\n AddDeleteObjectsScript()", "def _commitSuicide(self):\n\n # Kill multicast and zeroconf.\n self.multicast.kill()\n self.zeroconf.kill()\n\n # Stop us from running any further.\n self.alive = False", "def start_auto_clean(self):\n command = _build_robovac_command(RobovacModes.WORK, RobovacCommands.AUTO_CLEAN)\n message = self._build_command_user_data_message(command)\n\n self._send_packet(message, False)", "def run(self):\n sys.exit(-1)", "def runmain():\n\n if roboapps.Unchecked():\n roboapps.Exit()\n else:\n ReversePrograms()", "def estop(self):\n self.status_message = \"EMERGENCY STOP - Check Rexarm and restart program\"\n self.current_state = \"estop\"\n self.rexarm.disable_torque()", "def force_exit(self):\n self.exit = 1", "def conclusion():\n\n print('Program ends')", "def execute(self):\n\t\tself.drivetrain.bad_auto_drive()\n\t\tself.drivetrain.stop_robot()", "def abort(self):\n try:\n self.acqRunning = False\n except:\n print('Cannot abort properly')", "def stopclean(self):\n raise Exception(\"Not implemented\")", "def on_StopNode_clicked(self):\n # TODO: not implemented yet\n #raise NotImplementedError\n print(\"We will kill all gman process!\")\n reply = QMessageBox.question(self, '确认', '确认kill所有gman任务吗', QMessageBox.Yes | QMessageBox.No, QMessageBox.No)\n\n if reply == QMessageBox.Yes:\n autokillGman()\n self.OnlyDisplay(\"kill -9 |grep gman\")\n else:\n print(\"Keep GMAN run.......!\")", "def start():\n # Have the car begin at a stop\n rc.drive.stop()\n # Print start message\n print(\">> Lab 4B - LIDAR Wall Following\")", "def main(self):\n self.startup()\n if self.vehicle:\n try:\n while not self._loop_should_exit:\n self.tick()\n time.sleep(1)\n except KeyboardInterrupt:\n self.cleanup()\n self.cleanup()", "def terminate(self):", "def climb(self):\n print(\"Inside WoodElf.climb\")", "def run(self):\n self.workhorse_.run()\n try:\n while(True):\n self.workhorse_.heartbeat()\n self.periodic_snapshot()\n except workflow.NoMoreWork:\n print \"Fini.\"\n exit(0)\n exit(-1)", "def exit_engine(self):\n self.stop_flag = True", "def quit(self):\n log.debug(\"QUIT\")\n # self.__save_video_feed()\n self.drone.land()\n self.drone.quit()", "def start_check(self):\n pass", "def restart(self):\r\n self.agent_x = self.start_x\r\n self.agent_y = self.start_y\r\n self.terminated = False", "def start():", "def start():", "def start():", "def start():", "def handle_crash(self):\n if self.crash_test[0]:\n # assert self.alive, 'Something is wrong, dead bird is dying again'\n self.alive = False\n # assign the fitness\n self.genome.fitness = self.get_fitness()\n self.crash_test = False, False", "def __disarm(self):\n self._running = False", "async def async_start(self) -> None:\n await self._vacuum_bot.execute_command(Clean(CleanAction.START))", "def graceful(self):\n if self.debug:\n print(\"%s graceful\" % self.name)\n self.quit()", "def quit(self):\n\t\tpass" ]
[ "0.6004148", "0.5909393", "0.5895026", "0.5822715", "0.5812425", "0.58039004", "0.5768682", "0.57507056", "0.5685349", "0.5663432", "0.56619793", "0.548184", "0.54683137", "0.5454407", "0.5450739", "0.5444596", "0.543697", "0.5409681", "0.53943235", "0.5380903", "0.53745776", "0.5346474", "0.5346474", "0.5346474", "0.5346474", "0.5339093", "0.53296643", "0.5329622", "0.53131044", "0.52965456" ]
0.77722454
0